content
stringlengths 5
1.05M
|
|---|
from .get_input import get_input
|
from monty.tempfile import ScratchDir
from monty.serialization import loadfn, dumpfn
from pymatgen.electronic_structure.core import OrbitalType
from mp_api.routes.electronic_structure.query_operators import (
ESSummaryDataQuery,
BSDataQuery,
DOSDataQuery,
ObjectQuery,
)
from mp_api.routes.electronic_structure.models.core import BSPathType, DOSProjectionType
from pymatgen.analysis.magnetism.analyzer import Ordering
from pymatgen.core.periodic_table import Element
def test_es_summary_query():
op = ESSummaryDataQuery()
assert op.query(
magnetic_ordering=Ordering.FiM, is_gap_direct=True, is_metal=False
) == {
"criteria": {
"magnetic_ordering": "FiM",
"is_gap_direct": True,
"is_metal": False,
}
}
with ScratchDir("."):
dumpfn(op, "temp.json")
new_op = loadfn("temp.json")
assert new_op.query(
magnetic_ordering=Ordering.FiM, is_gap_direct=True, is_metal=False
) == {
"criteria": {
"magnetic_ordering": "FiM",
"is_gap_direct": True,
"is_metal": False,
}
}
def test_bs_data_query():
op = BSDataQuery()
q = op.query(
path_type=BSPathType.setyawan_curtarolo,
band_gap_min=0,
band_gap_max=5,
efermi_min=0,
efermi_max=5,
magnetic_ordering=Ordering.FM,
is_gap_direct=True,
is_metal=False,
)
fields = [
"bandstructure.setyawan_curtarolo.band_gap",
"bandstructure.setyawan_curtarolo.efermi",
]
c = {field: {"$gte": 0, "$lte": 5} for field in fields}
assert q == {
"criteria": {
"bandstructure.setyawan_curtarolo.magnetic_ordering": "FM",
"bandstructure.setyawan_curtarolo.is_gap_direct": True,
"bandstructure.setyawan_curtarolo.is_metal": False,
**c,
}
}
with ScratchDir("."):
dumpfn(op, "temp.json")
new_op = loadfn("temp.json")
q = new_op.query(
path_type=BSPathType.setyawan_curtarolo,
band_gap_min=0,
band_gap_max=5,
efermi_min=0,
efermi_max=5,
magnetic_ordering=Ordering.FM,
is_gap_direct=True,
is_metal=False,
)
c = {field: {"$gte": 0, "$lte": 5} for field in fields}
assert q == {
"criteria": {
"bandstructure.setyawan_curtarolo.magnetic_ordering": "FM",
"bandstructure.setyawan_curtarolo.is_gap_direct": True,
"bandstructure.setyawan_curtarolo.is_metal": False,
**c,
}
}
def test_dos_data_query():
op = DOSDataQuery()
proj_types = [
DOSProjectionType.total,
DOSProjectionType.elemental,
DOSProjectionType.orbital,
]
for proj_type in proj_types:
q = op.query(
projection_type=proj_type,
spin="1",
element=Element.Si if proj_type != DOSProjectionType.total else None,
orbital=OrbitalType.s if proj_type != DOSProjectionType.total else None,
band_gap_min=0,
band_gap_max=5,
efermi_min=0,
efermi_max=5,
magnetic_ordering=Ordering.FM,
)
if proj_type == DOSProjectionType.total:
fields = [
"dos.total.1.band_gap",
"dos.total.1.efermi",
]
elif proj_type == DOSProjectionType.elemental:
fields = [
"dos.elemental.Si.s.1.band_gap",
"dos.elemental.Si.s.1.efermi",
]
elif proj_type == DOSProjectionType.orbital:
fields = [
"dos.orbital.s.1.band_gap",
"dos.orbital.s.1.efermi",
]
c = {field: {"$gte": 0, "$lte": 5} for field in fields}
assert q == {"criteria": {"dos.magnetic_ordering": "FM", **c}}
with ScratchDir("."):
dumpfn(op, "temp.json")
new_op = loadfn("temp.json")
q = new_op.query(
projection_type=proj_type,
spin="1",
element=Element.Si if proj_type != DOSProjectionType.total else None,
orbital=OrbitalType.s if proj_type != DOSProjectionType.total else None,
band_gap_min=0,
band_gap_max=5,
efermi_min=0,
efermi_max=5,
magnetic_ordering=Ordering.FM,
)
c = {field: {"$gte": 0, "$lte": 5} for field in fields}
assert q == {"criteria": {"dos.magnetic_ordering": "FM", **c}}
def test_object_query():
op = ObjectQuery()
assert op.query(task_id="mp-149") == {"criteria": {"task_id": "mp-149"}}
with ScratchDir("."):
dumpfn(op, "temp.json")
new_op = loadfn("temp.json")
assert new_op.query(task_id="mp-149") == {"criteria": {"task_id": "mp-149"}}
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Paulius Maruška
from optparse import OptionParser
from sys import stdin, stdout
from pyhex import pyhex_write_stream
def opt_parser():
"""Creates OptionParser."""
parser = OptionParser(version="pyhexview 1.0")
parser.add_option("-i", "--input-file", dest="input_file", help="file to read input from")
parser.add_option("-o", "--output-file", dest="output_file", help="file to write output to")
return parser
def main():
"""Script logic."""
parser = opt_parser()
options, arguments = parser.parse_args()
input_stream = stdin
output_stream = stdout
if options.input_file is not None:
input_stream = open(options.input_file, "rb")
if options.output_file is not None:
output_stream = open(options.output_file, "w")
with input_stream, output_stream:
pyhex_write_stream(input_stream, output_stream)
if __name__ == "__main__":
main()
|
"""
Stdin: N/A
Stdout: N/A
Author: Jey Han Lau
Modified By: Carolina Zheng
Date: Jul 16
"""
import argparse
import sys
import codecs
import random
import time
import os
import pdb
import cPickle
import tensorflow as tf
import numpy as np
import gensim.models as g
import tdlm_config as cf
from ltlm_data import SOS, EOS, PAD, process_dataset, get_batch_v2
from ltlm_util import Config
from util import init_embedding
from tdlm_model import TopicModel as TM
from tdlm_model import LanguageModel as LM
#parser arguments
desc = "trains neural topic language model on a document collection (experiment settings defined in cf.py)"
parser = argparse.ArgumentParser(description=desc)
args = parser.parse_args()
###########
#functions#
###########
def run_epoch(data, models, is_training, ltlm_cf, num_batches):
####unsupervised topic and language model training####
docs_segmented, doc_bows, doc_num_tokens = data
batch_idx = 0
prev_doc_idxs = None
prev_sequence_idxs = None
prev_running_bows = np.zeros((cf.batch_size, num_tm_words))
#set training and cost ops for topic and language model training
tm_cost_ops = (tf.no_op(), tf.no_op(), tf.no_op(), tf.no_op())
lm_cost_ops = (tf.no_op(), tf.no_op(), tf.no_op(), tf.no_op())
if models[0] != None:
tm_cost_ops = (models[0].tm_cost, (models[0].tm_train_op if is_training else tf.no_op()), tf.no_op(), tf.no_op())
if models[1] != None:
lm_cost_ops = (tf.no_op(), tf.no_op(), models[1].lm_cost, (models[1].lm_train_op if is_training else tf.no_op()))
start_time = time.time()
lm_costs, tm_costs, lm_words, tm_words = 0.0, 0.0, 0.0, 0.0
while True:
(
data,
bows,
target,
mask,
prev_doc_idxs,
prev_sequence_idxs,
is_last_sequence,
) = get_batch_v2(
ltlm_cf,
cf.batch_size,
docs_segmented,
doc_bows,
prev_doc_idxs,
prev_sequence_idxs,
prev_running_bows,
)
if data is None:
if num_batches is None:
num_batches = batch_idx
break
batch_idx += 1
# TODO: Initialize LM hidden states if saving
# Train LM
# data = (bsz x seq_len)
model = models[1]
# if batch_idx == 1:
# pdb.set_trace()
feed_dict = {model.x: data, model.y: target, model.lm_mask: mask}
if cf.topic_number > 0:
feed_dict.update({model.doc: bows, model.tag: None})
tm_cost, _, lm_cost, _ = sess.run(lm_cost_ops, feed_dict)
# Train TM
if models[0] is not None:
model = models[0]
# TODO: target should be different for TM
feed_dict = {model.y: target, model.tm_mask: mask, model.doc: bows, model.tag: None}
tm_cost, _, lm_cost, _ = sess.run(tm_cost_ops, feed_dict)
if tm_cost != None:
tm_costs += tm_cost * cf.batch_size #keep track of full batch loss (not per example batch loss)
tm_words += np.sum(mask)
if lm_cost != None:
lm_costs += lm_cost * cf.batch_size
lm_words += np.sum(mask)
#print progress
output_string = "%d/%d: tm ppl = %.3f; lm ppl = %.3f; word/sec = %.1f" % \
(batch_idx, num_batches if num_batches is not None else -1, np.exp(tm_costs/max(tm_words, 1.0)), np.exp(lm_costs/max(lm_words, 1.0)), \
float(tm_words + lm_words)/(time.time()-start_time))
print_progress(batch_idx, is_training, output_string)
if cf.verbose:
sys.stdout.write("\n")
return np.exp(lm_costs/max(lm_words, 1.0)), num_batches
def print_progress(bi, is_training, output_string):
if ((bi % 200) == 0) and cf.verbose:
if is_training:
sys.stdout.write("TRAIN ")
else:
sys.stdout.write("VALID ")
sys.stdout.write(output_string + "\r")
# sys.stdout.write(output_string + "\n")
sys.stdout.flush()
######
#main#
######
#set the seeds
random.seed(cf.seed)
np.random.seed(cf.seed)
#utf-8 output
sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
#set topic vector size and load word embedding model if given
if cf.word_embedding_model:
print "Loading word embedding model..."
mword = g.KeyedVectors.load_word2vec_format(cf.word_embedding_model, binary=True)
cf.word_embedding_size = mword.vector_size
# #labels given for documents
train_labels, valid_labels, num_classes = None, None, 0
# #tags given for documents
train_tags, valid_tags, tagxid, tag_len = None, None, {} , 0
# Load data
if cf.stopwords is not None:
with open(cf.stopwords, "r") as f:
stopwords = set(f.read().splitlines())
else:
stopwords = {}
train_data, val_data, test_data, vocab, num_tm_words = process_dataset(
stopwords, cf.data_path, cf.lm_sent_len, cf.doc_len, reproduce_tdlm=True
)
# print some statistics of the data
print "Vocab size =", len(vocab)
config_attrs = {
"model_type": "TDLM",
"num_tm_words": num_tm_words,
"max_seqlen": cf.lm_sent_len,
"pad_idx": vocab[PAD],
"use_all_bows": False,
"eval_false": True,
"reset_hidden": True,
}
ltlm_cf = Config(**config_attrs)
#train model
with tf.Graph().as_default(), tf.Session() as sess:
tf.set_random_seed(cf.seed)
initializer = tf.contrib.layers.xavier_initializer()
with tf.variable_scope("model", reuse=None, initializer=initializer):
tm_train = TM(is_training=True, vocab_size=len(vocab), batch_size=cf.batch_size, \
num_steps=cf.tm_sent_len, num_classes=num_classes, config=cf) if cf.topic_number > 0 else None
lm_train = LM(is_training=True, vocab_size=len(vocab), batch_size=cf.batch_size, \
num_steps=cf.lm_sent_len, config=cf, reuse_conv_variables=True) \
if cf.rnn_hidden_size > 0 else None
with tf.variable_scope("model", reuse=True, initializer=initializer):
tm_valid = TM(is_training=False, vocab_size=len(vocab), batch_size=cf.batch_size, \
num_steps=cf.tm_sent_len, num_classes=num_classes, config=cf) if cf.topic_number > 0 else None
lm_valid = LM(is_training=False, vocab_size=len(vocab), batch_size=cf.batch_size, \
num_steps=cf.lm_sent_len, config=cf) if cf.rnn_hidden_size > 0 else None
tf.global_variables_initializer().run()
#initialise word embedding
if cf.word_embedding_model:
word_emb = init_embedding(mword, vocab.idxvocab)
if cf.rnn_hidden_size > 0:
sess.run(lm_train.lstm_word_embedding.assign(word_emb))
if cf.topic_number > 0:
sess.run(tm_train.conv_word_embedding.assign(word_emb))
#save model every epoch
if cf.save_model:
if not os.path.exists(os.path.join(cf.output_dir, cf.output_prefix)):
os.makedirs(os.path.join(cf.output_dir, cf.output_prefix))
#create saver object to save model
saver = tf.train.Saver()
#train model
prev_ppl = None
num_train_batches = None
num_val_batches = None
for i in xrange(cf.epoch_size):
print "\nEpoch =", i
#run a train epoch
_, num_train_batches = run_epoch(train_data, (tm_train, lm_train), True, ltlm_cf, num_train_batches)
#run a valid epoch
curr_ppl, num_val_batches = run_epoch(val_data, (tm_valid, lm_valid), False, ltlm_cf, num_val_batches)
if cf.save_model:
if (i < 5) or (prev_ppl == None) or (curr_ppl < prev_ppl):
saver.save(sess, os.path.join(cf.output_dir, cf.output_prefix, "model.ckpt"))
prev_ppl = curr_ppl
else:
saver.restore(sess, os.path.join(cf.output_dir, cf.output_prefix, "model.ckpt"))
print "\tNew valid performance > prev valid performance: restoring previous parameters..."
#print top-N words from topics
if cf.topic_number > 0:
print "\nTopics\n======"
topics, entropy = tm_train.get_topics(sess, topn=20)
for ti, t in enumerate(topics):
print "Topic", ti, "[", ("%.2f" % entropy[ti]), "] :", " ".join([ vocab[item] for item in t ])
#generate some random sentences
if cf.rnn_hidden_size > 0:
print "\nRandom Generated Sentences\n=========================="
with tf.variable_scope("model", reuse=True, initializer=initializer):
mgen = LM(is_training=False, vocab_size=len(vocab), batch_size=1, num_steps=1, config=cf, \
reuse_conv_variables=True)
for temp in [1.0, 0.75, 0.5]:
print "\nTemperature =", temp
for _ in xrange(10):
#select a random topic
if cf.topic_number > 0:
topic = random.randint(0, cf.topic_number-1)
print "\tTopic", topic, ":",
else:
topic = -1
print "\t",
s = mgen.generate_on_topic(sess, topic, vocab[SOS], temp, cf.lm_sent_len+10, \
vocab[EOS])
s = [ vocab.get_word(item) for item in s ]
print " ".join(s)
#save model vocab and configurations
if cf.save_model:
#vocabulary information
cPickle.dump((vocab, num_tm_words), \
open(os.path.join(cf.output_dir, cf.output_prefix, "vocab.pickle"), "w"))
#create a dictionary object for config
cf_dict = {}
for k,v in vars(cf).items():
if not k.startswith("__"):
cf_dict[k] = v
cPickle.dump(cf_dict, open(os.path.join(cf.output_dir, cf.output_prefix, "config.pickle"), "w"))
|
import discord
_int = lambda self: self.id
discord.AppInfo.__int__ = _int
discord.Attachment.__int__ = _int
discord.AuditLogEntry.__int__ = _int
discord.emoji._EmojiTag.__int__ = _int
discord.mixins.Hashable.__int__ = _int
discord.Member.__int__ = _int
discord.Message.__int__ = _int
discord.Reaction.__int__ = _int
discord.Team.__int__ = _int
discord.Webhook.__int__ = _int
|
from dataclasses import dataclass
from markdown import markdown
@dataclass
class dc:
"""Example data class with documentation"""
#: Comment before which can be
#: multiple lines
field0: float = 0
field1: float = 1 #: Comment, one line only
field2: float = 2
"""Comment afterwards one or more
lines
"""
class YoDude:
"""
A class with the hello method.
:param intro: intro portion
"""
intro: str = " YoDude " #: The intro for the hello
def __init__(self, intro: str = None):
if intro is not None:
self.intro = intro
self.intro = self.intro.strip()
def hello(self, person: str = "Nobody") -> None:
"""Say hello to person
:param person: Say hello to the person
"""
print(markdown(f"{self.intro.strip()} {person.strip()}"))
|
from src.functions.objectivefn import ObjectiveFn
import math
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
class RastriginFn(ObjectiveFn):
def __init__(self, dims):
super(RastriginFn, self).__init__(dims)
self.domain = (-5.12, 5.12)
def get_minima(self):
minima_coords = [0 for i in range(self.dims)] # Function value is 0 here.
minima = tuple(minima_coords)
return self.evaluate(minima)
def evaluate(self, params):
A = 10
f = 0
try:
if len(params) != self.dims:
raise Exception('number of paramters passd is not the same as the number of expected dimensions')
for param in params:
f = f + A + (param ** 2 - (A * math.cos(2 * math.pi * param)))
return f
except Exception as error:
print('Exception raised in rastrigin_fn.eval_fn: ', repr(error))
def eval_vectors(self, *X, **kwargs):
A = kwargs.get('A', 10)
return A + sum([(x ** 2 - A * np.cos(2 * math.pi * x)) for x in X])
def graph_fn(self):
A = 10
X = np.linspace(-5.12, 5.12, 200)
Y = np.linspace(-5.12, 5.12, 200)
X, Y = np.meshgrid(X, Y)
Z = self.eval_vectors(X, Y, A=10)
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.plasma, linewidth=0, antialiased=False)
plt.savefig('rastrigin.png')
def contour_plot(self, save_file_name, points):
A = 10
# X = np.linspace(-5.12, 5.12, 200)
# Y = np.linspace(-5.12, 5.12, 200)
X = np.linspace(self.domain[0], self.domain[1], 200)
Y = np.linspace(self.domain[0], self.domain[1], 200)
X, Y = np.meshgrid(X, Y)
Z = self.eval_vectors(X, Y, A=10)
plt.contour(X, Y, Z)
for point in points:
# print(point)
plt.scatter(point[0], point[1], marker='X', color='r')
plt.savefig(save_file_name, dpi=300, bbox_inches='tight')
plt.close()
def is_defined_only_for_2d(self):
return False
def name(self):
return "rastrigin"
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stages', '0017_add_login_field_for_student'),
]
operations = [
migrations.CreateModel(
name='StudentFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('student', models.ForeignKey(on_delete=models.deletion.CASCADE, to='stages.Student')),
('fichier', models.FileField(upload_to='etudiants')),
('titre', models.CharField(max_length=200, verbose_name='Titre')),
],
),
migrations.AddField(
model_name='student',
name='mc_comment',
field=models.TextField(blank=True, verbose_name='Commentaires'),
),
]
|
from SearchAlgorithms import BuscaLargura
from SearchAlgorithms import BuscaProfundidade
from SearchAlgorithms import BuscaProfundidadeIterativa
from SearchAlgorithms import BuscaCustoUniforme
from SearchAlgorithms import BuscaGananciosa
from SearchAlgorithms import AEstrela
from PlusOneTwo import PlusOneTwo
from datetime import date, datetime
def test_largura():
state = PlusOneTwo(1, '', 10)
algorithm = BuscaLargura()
inicio = datetime.now()
result = algorithm.search(state)
fim = datetime.now()
print(fim - inicio)
assert result.show_path() == " ; 2 ; 2 ; 2 ; 2 ; 1"
def test_profundidade():
state = PlusOneTwo(1, '', 10)
algorithm = BuscaProfundidade()
inicio = datetime.now()
result = algorithm.search(state, 50)
fim = datetime.now()
print(fim - inicio)
assert result.show_path() == " ; 1 ; 1 ; 1 ; 1 ; 1 ; 1 ; 1 ; 1 ; 1"
def test_BPI():
state = PlusOneTwo(1, '', 10)
inicio = datetime.now()
algorithm = BuscaProfundidadeIterativa()
fim = datetime.now()
print(fim - inicio)
result = algorithm.search(state)
assert result.show_path() == " ; 1 ; 2 ; 2 ; 2 ; 2"
def test_custoUniforme():
state = PlusOneTwo(1, '', 10)
inicio = datetime.now()
algorithm = BuscaCustoUniforme()
fim = datetime.now()
print(fim - inicio)
result = algorithm.search(state)
assert result.show_path() == " ; 1 ; 2 ; 2 ; 2 ; 2"
def test_largura_bigger():
state = PlusOneTwo(1, '', 40)
algorithm = BuscaLargura()
inicio = datetime.now()
result = algorithm.search(state)
fim = datetime.now()
print(fim - inicio)
|
# https://leetcode.com/problems/ambiguous-coordinates/
#
# algorithms
# Medium (43.27%)
# Total Accepted: 5,726
# Total Submissions: 13,232
# beats 91.11% of python submissions
class Solution(object):
def ambiguousCoordinates(self, S):
"""
:type S: str
:rtype: List[str]
"""
S = S[1:-1]
length = len(S)
res = set()
for i in xrange(1, length):
left = self.add_decimal_point(S[:i])
right = self.add_decimal_point(S[i:])
for m in left:
for n in right:
string = '(' + m + ', ' + n + ')'
if string not in res:
res.add(string)
return list(res)
def add_decimal_point(self, s):
length = len(s)
if length == 1:
return [s]
if int(s) == 0:
return []
if s[-1] == '0':
if s[0] == '0':
return []
return [s]
if s[0] == '0':
return ['0.' + s[1:]]
res = [s]
for i in xrange(1, length):
res.append(s[:i] + '.' + s[i:])
return res
|
"""Unit tests for mailmerage."""
|
# This module is automatically generated by autogen.sh. DO NOT EDIT.
from . import _ICONS
class _Vrt_Light_SVG(_ICONS):
_type = "VRT_Light_SVG"
_icon_dir = "../resources/icons/VRT_Icons/VRT_svg/VRT_light_svg"
class LaserScanner1LLight(_Vrt_Light_SVG):
_icon = "zweidlaserscanner1.svg"
class LaserScannerLight(_Vrt_Light_SVG):
_icon = "zweidlaserscanner2.svg"
class LaserScanner5Light(_Vrt_Light_SVG):
_icon = "dreidlaserscanner1.svg"
class LaserScanner4Light(_Vrt_Light_SVG):
_icon = "dreidlaserscanner2.svg"
class AegisDatacellCellularDataloggerLight(_Vrt_Light_SVG):
_icon = "aegisdatacellcellulardatalogger2.svg"
class AllenBradleyControlLogixPlcLight(_Vrt_Light_SVG):
_icon = "allenbradleycontrollogixplc.svg"
class AllenBradleyFlexIo1Light(_Vrt_Light_SVG):
_icon = "allenbradleyflexio1.svg"
class AllenBradleyFlexIo2Light(_Vrt_Light_SVG):
_icon = "allenbradleyflexio2.svg"
class ApplianceLight(_Vrt_Light_SVG):
_icon = "appliance2.svg"
class AtmSwitchLight(_Vrt_Light_SVG):
_icon = "atmswitch1.svg"
class AtmSwitch2Light(_Vrt_Light_SVG):
_icon = "atmswitch2.svg"
class AuthenticationServer2Light(_Vrt_Light_SVG):
_icon = "authenticationserver2.svg"
class BranchFeederMonitorLight(_Vrt_Light_SVG):
_icon = "branchfeedermonitor.svg"
class CctvCamera2Light(_Vrt_Light_SVG):
_icon = "cctvcamera2.svg"
class CctvCamera4Light(_Vrt_Light_SVG):
_icon = "cctvcamera4.svg"
class CetMultiCircuitPowerMonitorMcpmLight(_Vrt_Light_SVG):
_icon = "cetmulticircuitpowermonitormcpm.svg"
class CommunicationsServerLight(_Vrt_Light_SVG):
_icon = "communicationsserver.svg"
class CompactPlc2Light(_Vrt_Light_SVG):
_icon = "compactplc2.svg"
class CoronisWavenisMeshMoteLight(_Vrt_Light_SVG):
_icon = "coroniswavenismeshmote.svg"
class CoronisWavenisMeshBaseStationLight(_Vrt_Light_SVG):
_icon = "coroniswavenismeshbasestation.svg"
class CurrentTransformerStripLight(_Vrt_Light_SVG):
_icon = "currenttransformerstrip.svg"
class DataAcquisitionScadaServer2Light(_Vrt_Light_SVG):
_icon = "dataacquisitionLightscadaserver2.svg"
class DataLoggerRtu2Light(_Vrt_Light_SVG):
_icon = "dataloggerrtu2.svg"
class DatabaseServerLight(_Vrt_Light_SVG):
_icon = "databaseserver2.svg"
class DesktopLight(_Vrt_Light_SVG):
_icon = "desktop2.svg"
class Desktop2Light(_Vrt_Light_SVG):
_icon = "desktop4.svg"
class DialUpModemLight(_Vrt_Light_SVG):
_icon = "dialupmodem2.svg"
class DirectoryServerLight(_Vrt_Light_SVG):
_icon = "directoryserver2.svg"
class DirectoryServer3Light(_Vrt_Light_SVG):
_icon = "directoryserver.svg"
class DocumentScanner2Light(_Vrt_Light_SVG):
_icon = "documentscanner2.svg"
class DocumentScanner4Light(_Vrt_Light_SVG):
_icon = "documentscanner4.svg"
class DocumentScanner6Light(_Vrt_Light_SVG):
_icon = "documentscanner6.svg"
class DocumentScanner8Light(_Vrt_Light_SVG):
_icon = "documentscanner8.svg"
class EndUsers2Light(_Vrt_Light_SVG):
_icon = "endusers2.svg"
class EnergyUtilityMeterLight(_Vrt_Light_SVG):
_icon = "energyutilitymeter.svg"
class Facsimile2Light(_Vrt_Light_SVG):
_icon = "facsimile2.svg"
class Facsimile4Light(_Vrt_Light_SVG):
_icon = "facsimile4.svg"
class FibreOpticBreakOutTray2Light(_Vrt_Light_SVG):
_icon = "FibreOpticBreakoutTray2.svg"
class FileServer2Light(_Vrt_Light_SVG):
_icon = "fileserver2.svg"
class Firewall1Light(_Vrt_Light_SVG):
_icon = "firewall1.svg"
class Firewall2Light(_Vrt_Light_SVG):
_icon = "firewall2.svg"
class FlatPanelDisplay2Light(_Vrt_Light_SVG):
_icon = "flatpaneldisplay2.svg"
class GasMeterLight(_Vrt_Light_SVG):
_icon = "gasmeter.svg"
class GenericBlackBox2Light(_Vrt_Light_SVG):
_icon = "genericblackbox2.svg"
class GenericPlcDcsController2Light(_Vrt_Light_SVG):
_icon = "genericplcdcscontroller2.svg"
class Hub1Light(_Vrt_Light_SVG):
_icon = "hub1.svg"
class Hub2Light(_Vrt_Light_SVG):
_icon = "hub2.svg"
class IndustrialBarcodeScanner2Light(_Vrt_Light_SVG):
_icon = "industrialbarcodescanner2.svg"
class IndustrialBarcodeScanner4Light(_Vrt_Light_SVG):
_icon = "industrialbarcodescanner4.svg"
class IndustrialCellularModem2Light(_Vrt_Light_SVG):
_icon = "industrialcellularmodem2.svg"
class IndustrialCellularModem4Light(_Vrt_Light_SVG):
_icon = "industrialcellularmodem4.svg"
class IndustrialEthernetToSerialConverter2Light(_Vrt_Light_SVG):
_icon = "industrialethernettoserialconverter2.svg"
class IndustrialFibreToEthernetConverter2Light(_Vrt_Light_SVG):
_icon = "industrialfibretoethernetconverter2.svg"
class IndustrialPc2Light(_Vrt_Light_SVG):
_icon = "industrialpc2.svg"
class IndustrialPc4Light(_Vrt_Light_SVG):
_icon = "industrialpc4.svg"
class IndustrialSwitch2Light(_Vrt_Light_SVG):
_icon = "industrialswitch2.svg"
class InkjetPrinter2Light(_Vrt_Light_SVG):
_icon = "inkjetprinter2.svg"
class InkjetPrinter4Light(_Vrt_Light_SVG):
_icon = "inkjetprinter4.svg"
class Laptop2Light(_Vrt_Light_SVG):
_icon = "laptop2.svg"
class Laptop4Light(_Vrt_Light_SVG):
_icon = "laptop4.svg"
class LargeHmiPanel2Light(_Vrt_Light_SVG):
_icon = "largehmipanel2.svg"
class LaserPrinter2Light(_Vrt_Light_SVG):
_icon = "laserprinter2.svg"
class LaserPrinter4Light(_Vrt_Light_SVG):
_icon = "laserprinter4.svg"
class LeadAcidBattery2Light(_Vrt_Light_SVG):
_icon = "leadacidbattery2.svg"
class MailServer2Light(_Vrt_Light_SVG):
_icon = "mailserver2.svg"
class MicrowaveSatelliteBase2Light(_Vrt_Light_SVG):
_icon = "microwavesatellitebase2.svg"
class MultiRoleServer2Light(_Vrt_Light_SVG):
_icon = "multiroleserver2.svg"
class NetworkAttachedStorage2Light(_Vrt_Light_SVG):
_icon = "networkattachedstorage2.svg"
class Projector2Light(_Vrt_Light_SVG):
_icon = "projector2.svg"
class RackServer2Light(_Vrt_Light_SVG):
_icon = "rackserver2.svg"
class RackmountSwitch2Light(_Vrt_Light_SVG):
_icon = "rackmountswitch2.svg"
class RoleEmblemAuthenticationLight(_Vrt_Light_SVG):
_icon = "roleemblemauthentication.svg"
class RoleEmblemDataAcquisitionScadaLight(_Vrt_Light_SVG):
_icon = "roleemblemdataacquisitionLightscada.svg"
class RoleEmblemDatabaseLight(_Vrt_Light_SVG):
_icon = "roleemblemdatabase.svg"
class RoleEmblemDirectoryLight(_Vrt_Light_SVG):
_icon = "roleemblemdirectory.svg"
class RoleEmblemFileLight(_Vrt_Light_SVG):
_icon = "roleemblemfile.svg"
class RoleEmblemMailLight(_Vrt_Light_SVG):
_icon = "roleemblemmail.svg"
class RoleEmblemVideoLight(_Vrt_Light_SVG):
_icon = "roleemblemvideo.svg"
class RoleEmblemVirtualisationObjectLight(_Vrt_Light_SVG):
_icon = "roleemblemvirtualisationobject.svg"
class RoleEmblemVirtualisationLight(_Vrt_Light_SVG):
_icon = "roleemblemvirtualisation.svg"
class RoleEmblemWebLight(_Vrt_Light_SVG):
_icon = "roleemblemweb.svg"
class Router2Light(_Vrt_Light_SVG):
_icon = "router2.svg"
class RouterFirewallLight(_Vrt_Light_SVG):
_icon = "routerfirewall.svg"
class RouterLight(_Vrt_Light_SVG):
_icon = "router.svg"
class SinglePhaseEnergyMeter2Light(_Vrt_Light_SVG):
_icon = "singlephaseenergymeter2.svg"
class SinglePhaseEnergyMeterDinLight(_Vrt_Light_SVG):
_icon = "singlephaseenergymeterdin.svg"
class SmallHmiPanel2Light(_Vrt_Light_SVG):
_icon = "smallhmipanel2.svg"
class SmallTouchPanel2Light(_Vrt_Light_SVG):
_icon = "smalltouchpanel2.svg"
class Smartphone2Light(_Vrt_Light_SVG):
_icon = "smartphone2.svg"
class Smartphone4Light(_Vrt_Light_SVG):
_icon = "smartphone4.svg"
class SolarPvPanel1Light(_Vrt_Light_SVG):
_icon = "solarpvpanel1.svg"
class SolarPvPanel2Light(_Vrt_Light_SVG):
_icon = "solarpvpanel2.svg"
class Switch1Light(_Vrt_Light_SVG):
_icon = "switch1.svg"
class Switch2Light(_Vrt_Light_SVG):
_icon = "switch2.svg"
class Tablet2Light(_Vrt_Light_SVG):
_icon = "tablet2.svg"
class Tablet4Light(_Vrt_Light_SVG):
_icon = "tablet4.svg"
class Telephone2Light(_Vrt_Light_SVG):
_icon = "telephone2.svg"
class Telephone4Light(_Vrt_Light_SVG):
_icon = "telephone4.svg"
class ThinClient2Light(_Vrt_Light_SVG):
_icon = "thinclient2.svg"
class ThinClient4Light(_Vrt_Light_SVG):
_icon = "thinclient4.svg"
class ThreePhaseEnergyMeter2Light(_Vrt_Light_SVG):
_icon = "threephaseenergymeter2.svg"
class ThreePhaseEnergyMeterDinLight(_Vrt_Light_SVG):
_icon = "threephaseenergymeterdin.svg"
class ThreePhaseMultiFunctionMeter2Light(_Vrt_Light_SVG):
_icon = "threephasemultifunctionmeter2.svg"
class ThreePhasePowerQualityAnalyser2Light(_Vrt_Light_SVG):
_icon = "threephasepowerqualityanalyser2.svg"
class TowerServer2Light(_Vrt_Light_SVG):
_icon = "towerserver2.svg"
class UnifiedCommunicationsServer2Light(_Vrt_Light_SVG):
_icon = "unifiedcommunicationsserver2.svg"
class UninterruptiblePowerSupplyUps2Light(_Vrt_Light_SVG):
_icon = "uninterruptiblepowersupplyLightups2.svg"
class VideoServer2Light(_Vrt_Light_SVG):
_icon = "videoserver2.svg"
class VirtualisationObjectServer2Light(_Vrt_Light_SVG):
_icon = "virtualisationobjectserver2.svg"
class VirtualisationServer2Light(_Vrt_Light_SVG):
_icon = "virtualisationserver2.svg"
class VpnConcentrator1Light(_Vrt_Light_SVG):
_icon = "vpnconcentrator1.svg"
class VpnConcentrator2Light(_Vrt_Light_SVG):
_icon = "vpnconcentrator2.svg"
class WagesHubASeries2Light(_Vrt_Light_SVG):
_icon = "wageshubaseries2.svg"
class WaterThermalMeterLight(_Vrt_Light_SVG):
_icon = "waterthermalmeter.svg"
class WebServer2Light(_Vrt_Light_SVG):
_icon = "webserver2.svg"
class WiredSegmentInternetLight(_Vrt_Light_SVG):
_icon = "wiredsegmentLightinternet.svg"
class WiredSegmentSubnetLight(_Vrt_Light_SVG):
_icon = "wiredsegmentLightsubnet.svg"
class WirelessAccessPoint1Light(_Vrt_Light_SVG):
_icon = "wirelessaccesspoint1.svg"
class WirelessAccessPoint2Light(_Vrt_Light_SVG):
_icon = "wirelessaccesspoint2.svg"
class WirelessBase2Light(_Vrt_Light_SVG):
_icon = "wirelessbase2.svg"
class WirelessRouterAccessPoint2Light(_Vrt_Light_SVG):
_icon = "wirelessrouteraccesspoint2.svg"
class WirelessRouterAccessPoint4Light(_Vrt_Light_SVG):
_icon = "wirelessrouteraccesspoint4.svg"
class WirelessRouterFirewallLight(_Vrt_Light_SVG):
_icon = "wirelessrouterfirewall.svg"
class WirelessRouterLight(_Vrt_Light_SVG):
_icon = "wirelessrouter.svg"
class WorkgroupSwitch3Light(_Vrt_Light_SVG):
_icon = "workgroupswitch2.svg"
# Aliases
|
# -*- coding: utf-8 -*-
import json
import unittest
from hypothesis import assume
from hypothesis import given
from hypothesis import strategies as st
from hypothesis.extra.datetime import datetimes
import pytest
from cashflow.utils import serialize as serialize_utils
class CustomJSONEncoderTestCase(unittest.TestCase):
@given(st.dictionaries(keys=st.text(), values=st.text()))
def test_encoder_with_text(self, data):
assert serialize_utils.json_dumps(data) == json.dumps(data)
@given(st.dictionaries(keys=st.integers(), values=st.integers()))
def test_encode_with_integers(self, data):
assert serialize_utils.json_dumps(data) == json.dumps(data)
@given(st.dictionaries(keys=st.text(), values=st.uuids()))
def test_encode_with_uuids(self, data):
assume(data != {})
with pytest.raises(TypeError):
json.dumps(data)
serialize_utils.json_dumps(data)
@given(st.dictionaries(keys=st.text(), values=datetimes()))
def test_encode_with_datetimes(self, data):
assume(data != {})
with pytest.raises(TypeError):
json.dumps(data)
serialize_utils.json_dumps(data)
|
#the import section
import random
current_recommendations = []
api_key = "3f44093c7132e8d90dfece35961ffafa"
def get_results(arr):
result = []
while (len(result) <= 5):
index = random.randint(0,len(arr) - 1)
if arr[index] not in result:
result.append(arr.pop(index))
return result
str = "What in god's name am i doing with my god's name am i doing with what am i life"
str_list = str.split()
print(str_list)
result = get_results(str_list)
print(result)
print(str_list)
|
"""Cmput404Project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.conf.urls import url, include
from django.views.generic.base import TemplateView
from accounts import views
from django.contrib.auth.decorators import login_required
from rest_framework.routers import DefaultRouter
from rest_framework.documentation import include_docs_urls
from rest_framework_swagger.views import get_swagger_view
router=DefaultRouter()
router.register('users', views.UserViewSet)
schema_view = get_swagger_view(title='API Docs')
urlpatterns = [
# the basic homepage
path('', views.home, name='home'),
# admin paths
path('admin/', admin.site.urls),
path('accounts/', include('accounts.urls', namespace='signup')), # when url request for accounts/ , it will go to accounts.urls
path('accounts/', include('django.contrib.auth.urls')),
# Api docs
re_path(r'docs/?$', schema_view),
# Friend Requests
re_path(r'friendrequest/?$', views.FriendRequest().as_view()),
#unfollow
re_path(r'unfollowrequest/(?P<pk>[0-9]+)?$', views.UnFollow().as_view(), name="unfollowrequest"),
# all public
# TODO: it will change the path of makeing post.
#re_path(r'posts/?$', views.PublicPosts().as_view()),
path(r'posts/', views.PublicPosts().as_view()),
path(r'posts', views.PublicPosts().as_view()),
# handle get/post for author posting
#path(r'author/posts', views.AuthorPosts().as_view()),
path(r'author/posts/', views.AuthorPosts().as_view(), name='make_post'),
path(r'author/posts', views.AuthorPosts().as_view(), name='make_post'),
# author endpoints
path(r'author/<str:author_id>/', views.AuthorProfile().as_view()),
# post endpoints
path('posts/<str:post_id>', views.PostById().as_view(), name='show_post'),
path('posts/<str:post_id>/', views.PostById().as_view(), name='show_post'),
# comment endpoints
path('posts/<str:post_id>/comment', views.Comments().as_view()),
path(r'posts/<str:post_id>', views.PostById().as_view()),
]
|
from hyperopt import hp
from sklearn.ensemble import AdaBoostRegressor as _AdaBoostRegressor
from script.sklearn_like_toolkit.warpper.base.BaseWrapperReg import BaseWrapperReg
from script.sklearn_like_toolkit.warpper.base.MixIn import MetaBaseWrapperRegWithABC
class skAdaBoostReg(_AdaBoostRegressor, BaseWrapperReg, metaclass=MetaBaseWrapperRegWithABC):
def __init__(self, base_estimator=None, n_estimators=50, learning_rate=1., loss='linear', random_state=None):
n_estimators = int(n_estimators)
_AdaBoostRegressor.__init__(self, base_estimator, n_estimators, learning_rate, loss, random_state)
BaseWrapperReg.__init__(self)
HyperOpt_space = {
'n_estimators': hp.qloguniform('n_estimators', 2, 6, 1),
'learning_rate': hp.loguniform('learning_rate', -8, 1),
'loss': hp.choice('loss', ['linear', 'square', 'exponential']),
}
tuning_grid = {
'base_estimator': None,
'n_estimators': 50,
'learning_rate': 1.,
'loss': 'linear',
'random_state': None,
}
|
import numpy as np
import copy
from functools import reduce
from typing import List
'''
Check that inputs adhere to canonical form
'''
def is_canonical(c: List[float], A: List[List[float]], b: List[float]) -> bool:
n = len(c)
p = len(A)
# Check all inputs are of non-zero length
if (n == 0): return False
if (p == 0): return False
# Check that A is pxn matrix
for x in A:
if (type(x) != list): return False
if (len(x) != n): return False
# Check that length of b matches num rows in A
if (p != len(b)): return False
# Check that all elements in b are non-negative
for y in b:
if (y < 0): return False
return True
'''
Function to define an LP problem as a tableau
c is the 1xn vector of coefficients in the objective function
A is the pxn list where each inner list is a vector of coefficients
for the respective constraint equation
b is the 1xp list of right-hand side values for the constraints
'''
def to_tableau(c: List[float], A: List[List[float]], b: List[float]) -> np.array([np.array([float])]):
n = len(c)
p = len(A)
# Define the lower rows of tableau
A_sub = np.array(A, dtype=float)
slack_vars = np.eye(p, dtype=float)
b_sub = np.reshape(np.array(b, dtype=float), (p,1))
lower_rows = np.concatenate((np.zeros((p,1)),A_sub, slack_vars, b_sub), axis=1)
# Define top row which corresponds to objective function
c_cop = copy.copy(c)
c_cop.insert(0, 1.0)
t_sub = np.array(c_cop, dtype=float)
top_row = np.concatenate((t_sub, np.zeros((p+1))), axis=0)
return np.insert(lower_rows, 0, top_row, axis=0)
class LP:
def __init__(self, c: List[float], A: List[List[float]], b: List[float]):
if (not is_canonical(c,A,b)):
raise ValueError("Inputs do not adhere to canonical form")
else:
self.tab = to_tableau(c,A,b)
self.pcol = None
self.prow = None
# Decode the current tableau into feasible solution
# Soultion returned is array of form :
# [ x_1, ... ,x_n, s_1, ... ,s_n, optimal_function_value ]
def decode(self):
n = self.tab.shape[1]-2
solution = [None]*n
for i in range(1,self.tab.shape[1]-1):
col = self.tab[:,i]
# Check if col is a basic var
if (np.count_nonzero(col==0) == self.tab.shape[0]-1):
# Calculate value accordingly
idx = np.nonzero(col)
value = self.tab[idx,-1]/col[idx]
solution[i-1] = value[0][0]
else:
solution[i-1] = 0
solution.append(self.tab[0,-1])
return solution
# Choose the pivot column w.r.t. current tableau state
def getPivCol(self):
# Bland's rule used since argmin returns first occurrence
entv = np.argmin(self.tab[0][1:-1])+1
self.pcol = entv
# Conduct ratio test to choose pivot row
def getPivRow(self):
if (not self.pcol):
raise ValueError("pcol attribute must be set to obtain prow")
tgt_col = self.tab[:,self.pcol]
rhs_col = self.tab[:,-1]
non_zero_idx = np.nonzero(tgt_col>0)[0]
non_zero_vals = tgt_col[non_zero_idx]
rhs_vals = rhs_col[non_zero_idx]
ratios = np.divide(rhs_vals, non_zero_vals)
self.prow = non_zero_idx[np.argmin(ratios)]
# Execute a tableau pivot
def pivot(self):
if (not self.prow):
raise ValueError("prow and pcol attribute must be set to execute pivot")
piv_val = self.tab[self.prow, self.pcol]
multipliers = np.divide(self.tab[:,self.pcol]*-1., piv_val)
multipliers[self.prow] = 1.0/piv_val
init_transf = [self.tab[self.prow,:]*mult for mult in multipliers]
pivoted_tab = np.add(init_transf, self.tab)
pivoted_tab[self.prow] = init_transf[self.prow]
self.tab = pivoted_tab
# Iteratively pivot until optimal solution found
def optimize(self, max_iter: int = 10000):
counter = 1
while (counter < max_iter):
if (np.count_nonzero(self.tab[0]<0) == 0):
break
self.getPivCol()
self.getPivRow()
self.pivot()
counter += 1
if (counter < max_iter):
print("Optimal solution found --- Simplex Algorithm successfully terminated")
else:
print("Max iterations reached")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright(c) 2015 Nippon Telegraph and Telephone Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This file defines the RecoveryControllerStarter class.
"""
import threading
import sys
import datetime
import ConfigParser
import threading
import traceback
import json
import masakari_worker as worker
import masakari_config as config
import masakari_util as util
import os
from eventlet import greenthread
parentdir = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.path.pardir))
# rootdir = os.path.abspath(os.path.join(parentdir, os.path.pardir))
# project root directory needs to be add at list head rather than tail
# this file named 'masakari' conflicts to the directory name
if parentdir not in sys.path:
sys.path = [parentdir] + sys.path
import db.api as dbapi
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
log_process_begin_and_end = util.LogProcessBeginAndEnd(LOG)
VM_LIST = "vm_list"
class RecoveryControllerStarter(object):
"""
RecoveryControllerStarter class:
This class executes startup processing of VM Recover execution thread.
"""
def __init__(self, config_object):
"""
Constructor:
This constructor creates RecoveryControllerWorker object.
"""
self.rc_config = config_object
self.rc_worker = worker.RecoveryControllerWorker(config_object)
self.rc_util = util.RecoveryControllerUtil()
self.rc_util_db = util.RecoveryControllerUtilDb(config_object)
self.rc_util_api = util.RecoveryControllerUtilApi(config_object)
@log_process_begin_and_end.output_log
def _compare_timestamp(self, timestamp_1, timestamp_2):
delta = timestamp_1 - timestamp_2
return long(delta.total_seconds())
@log_process_begin_and_end.output_log
def _create_vm_list_db_for_failed_instance(self,
session,
notification_id,
notification_uuid):
try:
conf_recover_starter_dic = self.rc_config.get_value(
'recover_starter')
interval_to_be_retry = conf_recover_starter_dic.get(
"interval_to_be_retry")
max_retry_cnt = conf_recover_starter_dic.get("max_retry_cnt")
msg = "Do get_one_vm_list_by_uuid_create_at_last."
LOG.info(msg)
result = dbapi.get_one_vm_list_by_uuid_create_at_last(
session, notification_uuid)
msg = "Succeeded in get_one_vm_list_by_uuid_create_at_last. " \
+ "Return_value = " + str(result)
LOG.info(msg)
primary_id = None
# row_cnt is always 0 or 1
if not result:
primary_id = self.rc_util_db.insert_vm_list_db(
session, notification_id, notification_uuid, 0)
return primary_id
else:
result_progress = result.progress
result_create_at = result.create_at
result_retry_cnt = result.retry_cnt
delta = self._compare_timestamp(
datetime.datetime.now(), result_create_at)
if result_progress == 2 and \
delta <= long(interval_to_be_retry):
if result_retry_cnt < long(max_retry_cnt):
primary_id = self.rc_util_db.insert_vm_list_db(
session,
notification_id,
notification_uuid,
result_retry_cnt + 1)
return primary_id
else:
# Not insert vm_list db.
msg = "Do not insert a record" \
+ " into vm_list db because retry_cnt about " \
+ notification_uuid \
+ " is over " \
+ max_retry_cnt \
+ " times."
LOG.warning(msg)
return None
elif result_progress == 2 and \
delta > long(interval_to_be_retry):
primary_id = self.rc_util_db.insert_vm_list_db(
session, notification_id, notification_uuid, 0)
return primary_id
else:
# Not insert vm_list db.
msg = "Do not insert a record " \
+ "into vm_list db because progress of " \
+ notification_uuid \
+ " is " \
+ str(result_progress)
LOG.warning(msg)
return None
except KeyError:
error_type, error_value, traceback_ = sys.exc_info()
tb_list = traceback.format_tb(traceback_)
LOG.error(error_type)
LOG.error(error_value)
for tb in tb_list:
LOG.error(tb)
raise KeyError
@log_process_begin_and_end.output_log
def _create_vm_list_db_for_failed_host(self, session,
notification_id,
notification_uuid):
try:
conf_recover_starter_dic = self.rc_config.get_value(
'recover_starter')
interval_to_be_retry = conf_recover_starter_dic.get(
"interval_to_be_retry")
max_retry_cnt = conf_recover_starter_dic.get("max_retry_cnt")
msg = "Do get_one_vm_list_by_uuid_and_progress_create_at_last."
LOG.info(msg)
result = dbapi.get_one_vm_list_by_uuid_and_progress_create_at_last(
session,
notification_uuid)
msg = "Succeeded in " \
+ "get_one_vm_list_by_uuid_and_progress_create_at_last. " \
+ "Return_value = " + str(result)
LOG.info(msg)
primary_id = None
if not result:
primary_id = self.rc_util_db.insert_vm_list_db(
session, notification_id, notification_uuid, 0)
return primary_id
else:
msg = "Do not insert a record into vm_list db " \
"because there are same uuid records that " \
"progress is 0 or 1."
LOG.warning(msg)
return None
except KeyError:
error_type, error_value, traceback_ = sys.exc_info()
tb_list = traceback.format_tb(traceback_)
LOG.error(error_type)
LOG.error(error_value)
for tb in tb_list:
LOG.error(tb)
raise KeyError
def add_failed_instance(self, notification_id,
notification_uuid, retry_mode):
"""
VM recover start thread :
This thread starts the VM recover execution thread.
:param notification_id: The notification ID included in the
notification
:param notification_uuid: The recovery target VM UUID of which are
included in the notification
:param retry_mode: Set True in the re-processing time of call,
Set the False in the normal processing time of call
"""
try:
self.rc_config.set_request_context()
db_engine = dbapi.get_engine(self.rc_config)
session = dbapi.get_session(db_engine)
# Get primary id of vm_list
primary_id = self._create_vm_list_db_for_failed_instance(
session, notification_id, notification_uuid)
# update record in notification_list
self.rc_util_db.update_notification_list_db(
session, 'progress', 2, notification_id)
# create semaphore (Multiplicity = 1)
sem_recovery_instance = threading.Semaphore(1)
# create and start thread
if primary_id:
if retry_mode is True:
# Skip recovery_instance.
# Will delegate to handle_pending_instances
msg = "RETRY MODE. Skip recovery_instance thread" \
+ " vm_uuide=" + notification_uuid \
+ " notification_id=" + notification_id
LOG.info(msg)
else:
msg = "Run thread rc_worker.recovery_instance." \
+ " notification_uuid=" + notification_uuid \
+ " primary_id=" + str(primary_id)
LOG.info(msg)
thread_name = self.rc_util.make_thread_name(
VM_LIST, primary_id)
threading.Thread(target=self.rc_worker.recovery_instance,
name=thread_name,
args=(notification_uuid, primary_id,
sem_recovery_instance)).start()
return
except KeyError:
error_type, error_value, traceback_ = sys.exc_info()
tb_list = traceback.format_tb(traceback_)
LOG.error(error_type)
LOG.error(error_value)
for tb in tb_list:
LOG.error(tb)
return
except:
error_type, error_value, traceback_ = sys.exc_info()
tb_list = traceback.format_tb(traceback_)
LOG.error(error_type)
LOG.error(error_value)
for tb in tb_list:
LOG.error(tb)
return
def add_failed_host(self,
notification_id,
notification_hostname,
notification_cluster_port,
retry_mode):
"""
Node recover start thread :
This thread starts the VM recover execution thread,
only the number of existing vm in the recovery target node.
:param notification_id: The notification ID included in the
notification
:param notification_hostname: The host name of the failure node that
is included in the notification
"""
try:
self.rc_config.set_request_context()
db_engine = dbapi.get_engine(self.rc_config)
session = dbapi.get_session(db_engine)
conf_dict = self.rc_config.get_value('recover_starter')
recovery_max_retry_cnt = conf_dict.get('recovery_max_retry_cnt')
recovery_retry_interval = conf_dict.get('recovery_retry_interval')
vm_list = self.rc_util_api.fetch_servers_on_hypervisor(
notification_hostname)
# Count vm_list
if len(vm_list) == 0:
msg = "There is no instance in " + notification_hostname + "."
LOG.info(msg)
# update record in notification_list
self.rc_util_db.update_notification_list_db(
session, 'progress', 2, notification_id)
return
else:
msg = "Do get_all_notification_list_by_id_for_update."
LOG.info(msg)
result = dbapi.get_all_notification_list_by_id_for_update(
session, notification_id)
msg = "Succeeded in " \
+ "get_all_notification_list_by_id_for_update. " \
+ "Return_value = " + str(result)
LOG.info(msg)
recover_to = result.pop().recover_to
if retry_mode is False:
msg = "Do get_all_reserve_list_by_hostname_not_deleted."
LOG.info(msg)
cnt = dbapi.get_all_reserve_list_by_hostname_not_deleted(
session,
recover_to)
msg = "Succeeded in " \
+ "get_all_reserve_list_by_hostname_not_deleted. " \
+ "Return_value = " + str(cnt)
LOG.info(msg)
if not cnt:
msg = "Do " \
+ "get_one_reserve_list_by_cluster_port_for_update."
LOG.info(msg)
cnt = dbapi.\
get_one_reserve_list_by_cluster_port_for_update(
session,
notification_cluster_port,
notification_hostname
)
msg = "Succeeded in " \
+ "get_one_reserve_list_by_cluster_port_for_update. " \
+ "Return_value = " + str(cnt)
LOG.info(msg)
if not cnt:
msg = "The reserve node not exist in " \
"reserve_list DB, " \
"so do not recover instances."
LOG.warning(msg)
self.rc_util_db.update_notification_list_db(
'progress', 3, notification_id)
return
result = cnt.pop()
recover_to = result.hostname
update_at = datetime.datetime.now()
msg = "Do " \
+ "update_notification_list_by_notification_id_recover_to."
LOG.info(msg)
dbapi.update_notification_list_by_notification_id_recover_to(
session,
notification_id,
update_at,
recover_to
)
msg = "Succeeded in " \
+ "update_notification_list_by_notification_id_recover_to."
LOG.info(msg)
delete_at = datetime.datetime.now()
msg = "Do update_reserve_list_by_hostname_as_deleted."
LOG.info(msg)
dbapi.update_reserve_list_by_hostname_as_deleted(
session, recover_to, delete_at)
msg = "Succeeded in " \
+ "update_reserve_list_by_hostname_as_deleted."
LOG.info(msg)
# create semaphore (Multiplicity is get from config.)
conf_dict = self.rc_config.get_value('recover_starter')
sem_recovery_instance = threading.Semaphore(
int(conf_dict.get('semaphore_multiplicity')))
incomplete_list = []
for i in range(0, int(recovery_max_retry_cnt)):
incomplete_list = []
for vm_uuid in vm_list:
primary_id = self._create_vm_list_db_for_failed_host(
session, notification_id, vm_uuid)
if primary_id:
if retry_mode is True:
# Skip recovery_instance thread. Will delegate to
# ...
msg = "RETRY MODE. Skip recovery_instance thread" \
+ " vm_uuide=" + vm_uuid \
+ " notification_id=" + notification_id
LOG.info(msg)
else:
msg = "Run thread rc_worker.recovery_instance." \
+ " vm_uuid=" + vm_uuid \
+ " primary_id=" + str(primary_id)
LOG.info(msg)
thread_name = self.rc_util.make_thread_name(
VM_LIST, primary_id)
threading.Thread(
target=self.rc_worker.recovery_instance,
name=thread_name,
args=(vm_uuid, primary_id,
sem_recovery_instance)).start()
else:
if retry_mode is True:
continue
else:
incomplete_list.append(vm_uuid)
if incomplete_list:
vm_list = incomplete_list
greenthread.sleep(int(recovery_retry_interval))
else:
break
for vm_uuid in incomplete_list:
primary_id = self.rc_util_db.insert_vm_list_db(
session, notification_id, vm_uuid, 0)
# Skip recovery_instance thread. Will delegate to ...
msg = "Run thread rc_worker.recovery_instance." \
+ " vm_uuid=" + vm_uuid \
+ " primary_id=" + str(primary_id)
LOG.info(msg)
thread_name = self.rc_util.make_thread_name(
VM_LIST, primary_id)
threading.Thread(
target=self.rc_worker.recovery_instance,
name=thread_name,
args=(vm_uuid, primary_id,
sem_recovery_instance)).start()
# update record in notification_list
self.rc_util_db.update_notification_list_db(
session, 'progress', 2, notification_id)
return
except KeyError:
error_type, error_value, traceback_ = sys.exc_info()
tb_list = traceback.format_tb(traceback_)
LOG.error(error_type)
LOG.error(error_value)
for tb in tb_list:
LOG.error(tb)
return
except:
error_type, error_value, traceback_ = sys.exc_info()
tb_list = traceback.format_tb(traceback_)
LOG.error(error_type)
LOG.error(error_value)
for tb in tb_list:
LOG.error(tb)
return
@log_process_begin_and_end.output_log
def _update_old_records_vm_list(self, session):
conf_dict = self.rc_config.get_value('recover_starter')
notification_expiration_sec = int(conf_dict.get(
'notification_expiration_sec'))
now = datetime.datetime.now()
border_time = now - \
datetime.timedelta(seconds=notification_expiration_sec)
border_time_str = border_time.strftime('%Y-%m-%d %H:%M:%S')
msg = "Do get_old_records_vm_list."
LOG.info(msg)
result = dbapi.get_old_records_vm_list(
session,
border_time_str,
border_time_str
)
msg = "Succeeded in get_old_records_vm_list. " \
+ "Return_value = " + str(result)
LOG.info(msg)
if result:
msg = 'Old and incomplete records will be skipped.'
LOG.info(msg)
# Set progress = 4 for old record
for row in result:
update_val = {'progress': 4,
'update_at': datetime.datetime.now(),
'delete_at': datetime.datetime.now()
}
msg = "Do update_vm_list_by_id_dict."
LOG.info(msg)
dbapi.update_vm_list_by_id_dict(session, row.id, update_val)
msg = "Succeeded in update_vm_list_by_id_dict."
LOG.info(msg)
@log_process_begin_and_end.output_log
def _find_reprocessing_records_vm_list(self, session):
return_value = []
msg = "Do get_all_vm_list_by_progress."
LOG.info(msg)
result = dbapi.get_all_vm_list_by_progress(session)
msg = "Succeeded in get_all_vm_list_by_progress. " \
+ "Return_value = " + str(result)
LOG.info(msg)
# UUID to see one by one, and look for the re-processing target record
for row in result:
msg = "Do get_vm_list_by_uuid_and_progress_sorted."
LOG.info(msg)
result2 = dbapi.get_vm_list_by_uuid_and_progress_sorted(
session,
row.uuid
)
msg = "Succeeded in get_vm_list_by_uuid_and_progress_sorted. " \
+ "Return_value = " + str(result2)
LOG.info(msg)
row_cnt = 0
for row2 in result2:
# First row is the re-processing target
if row_cnt == 0:
return_value.append(row2)
# Update progress that is not the re-processing target
else:
update_val = {'progress': 4,
'update_at': datetime.datetime.now(),
'delete_at': datetime.datetime.now()
}
msg = "Do update_vm_list_by_id_dict."
LOG.info(msg)
dbapi.update_vm_list_by_id_dict(
session,
row2.id,
update_val
)
msg = "Succeeded in update_vm_list_by_id_dict."
LOG.info(msg)
row_cnt += 1
return return_value
def handle_pending_instances(self):
"""
method description.
recovery-controller I do the recovery
of outstanding recovery VM at startup.
"""
try:
self.rc_config.set_request_context()
db_engine = dbapi.get_engine(self.rc_config)
session = dbapi.get_session(db_engine)
self._update_old_records_vm_list(session)
result = self._find_reprocessing_records_vm_list(session)
# [recover_starter]section
recover_starter_dic = self.rc_config.get_value("recover_starter")
semaphore_multiplicity = recover_starter_dic.get(
"semaphore_multiplicity")
# Set multiplicity by semaphore_multiplicity
sem = threading.Semaphore(int(semaphore_multiplicity))
# Execute vm_recovery_worker
if len(result) > 0:
# Execute the required number
for row in result:
vm_uuid = row.uuid
primary_id = row.id
msg = "Run thread rc_worker.recovery_instance." \
+ " vm_uuid=" + vm_uuid \
+ " primary_id=" + str(primary_id)
LOG.info(msg)
thread_name = self.rc_util.make_thread_name(
VM_LIST, primary_id)
threading.Thread(
target=self.rc_worker.recovery_instance,
name=thread_name,
args=(vm_uuid, primary_id, sem)).start()
# Imperfect_recover
else:
return
return
except KeyError:
error_type, error_value, traceback_ = sys.exc_info()
tb_list = traceback.format_tb(traceback_)
LOG.error(error_type)
LOG.error(error_value)
for tb in tb_list:
LOG.error(tb)
return
except:
error_type, error_value, traceback_ = sys.exc_info()
tb_list = traceback.format_tb(traceback_)
LOG.error(error_type)
LOG.error(error_value)
for tb in tb_list:
LOG.error(tb)
return
|
import unittest
from mnts.filters import MNTSFilterGraph
from mnts.filters.intensity import *
from mnts.filters.geom import *
from pathlib import Path
test_yaml =\
"""
SpatialNorm:
out_spacing: [0.5, 0.5, 0]
HuangThresholding:
closing_kernel_size: 10
_ext:
upstream: 0
is_exit: True
N4ITKBiasFieldCorrection:
_ext:
upstream: [0, 1]
NyulNormalizer:
_ext:
upstream: [2, 1]
is_exit: True
"""
def create_graph() -> MNTSFilterGraph:
r"""Create the normalization graph"""
G = MNTSFilterGraph()
# Add filter nodes to the graph.
G.add_node(SpatialNorm(out_spacing=[0.4492, 0.4492, 4]))
G.add_node(HuangThresholding(closing_kernel_size=10), 0, is_exit=True) # Use mask to better match the histograms
G.add_node(N4ITKBiasFieldCorrection(), [0, 1])
G.add_node(NyulNormalizer(), [2, 1], is_exit=True)
return G
class TestGraph(unittest.TestCase):
def test_graph_print(self):
G = create_graph()
self.assertIsInstance(str(G), str)
def test_graph_from_yaml(self):
# Create file from str
with open('_test_graph.yaml', 'w') as f:
f.write(test_yaml)
G = MNTSFilterGraph.CreateGraphFromYAML('_test_graph.yaml')
self.assertTrue(isinstance(G,
MNTSFilterGraph))
print(G)
Path('default.log').unlink()
Path('_test_graph.yaml').unlink()
if __name__ == '__main__':
unittest.main()
|
r"""Incompressible Hyperelasticity
This example solves the governing equations describing the mechanical
response of a nearly incompressible elastomer using a mixed formulation.
The elastomer, assumed to be made up of a Neo-Hookean solid, occupies the domain
:math:`\Omega` in the undeformed configuration, with the internal stored energy
given by
.. math::
\int_\Omega\Psi(\mathbf{F})d\mathbf{X};\qquad\Psi(\mathbf{F}) = \mu/2 (I_1 - 3) - \mu \ln(J) + \lambda/2(J-1)^2
where :math:`I_1 = \mathbf{F}:\mathbf{F} = tr(\mathbf{F}^T\mathbf{F})` and
:math:`J = \text{det}(\mathbf{F})` and :math:`\mathbf{F} = \mathbf{I} +
\nabla\mathbf{u}` is the deformation gradient tensor. The standard variational
formulation in the absence of body force and surface traction can be written as
.. math::
\min_{u\in\mathcal{K}} \int_\Omega \Psi(\mathbf{F})d\mathbf{X}
where :math:`\mathcal{K}` is a set of kinematically admissible fields that
satisfy the Dirichlet boundary condition(s). However, this becomes ill-posed
when :math:`\lambda/\mu\rightarrow +\infty`. In order to circumvent this issue,
we consider a mixed variational formulation, namely
.. math::
\min_{\mathbf{u}\in\mathcal{K}}\max_{p}\int_\Omega \Psi*(\mathbf{F}, p)d\mathbf{X}
where
.. math::
\Psi*(\mathbf{F}, p) = p (J-J*) + mu/2(I_1-3) - \mu\ln(J*) + \lambda/2(J*-1)^2
and :math:`J* = (\lambda + p + \sqrt{(\lambda + p)^2 + 4\lambda\mu)}/(2\lambda)`.
The numerical solution to the above problem requires choosing stable finite
element spaces for the displacement (:math:`\mathbf{u}`) and pressure
(:math:`p`). The corresponding weak form is given by
find :math:`(\mathbf{u},p)\in (V_1 x V_2)` such that
.. math::
\mathcal{F}_1 = \int_\Omega \left( \mu\mathbf{F} + p\mathbf{F}^{-T} \right) : \nabla\mathbf{v}d\mathbf{X} = 0
and
.. math::
\mathcal{F}_2 = \int_\Omega \frac{\partial\Psi*}{\partial p}q\ d\mathbf{X} = 0
for all :math:`(\mathbf{v},q)\in (V_1 x V_2)` and
.. math::
V_1 = \left\{\mathbf{u}\ni \mathbf{u}\in (H^1(\Omega))^3 \cap u\in\mathcal{K} \right\}
and
.. math::
V_2 = \left\{ p\ni p\in L^2(\Omega) \right\}
Here, inspired by it's counterpart in fluid mechanics, we choose the lowest
order Taylor-Hood element (:math:`\mathbb{P}_2-P_1`) which satifies the
Babuska-Brezzi condition. Fore more details on the derivation, see
http://pamies.cee.illinois.edu/Publications_files/IJNME_2015.pdf#page=4&zoom=100,312,414
The weak forms above result in a system of nonlinear algebraic equations for
the degrees of freedom , and therefore needs to be solved using a nonlinear
solver. In the example below, we linearize :math:`\mathcal{F}_1` and
:math:`\mathcal{F}_2` and setup solve for incremental displacement and pressure
dofs.
The following demonstrates uniaxial tension in one direction, and the lateral
edges allowed to remain free. The geometry is a homogeneous unit cube made up
of a Neo-Hookean solid with :math:`\lambda/\mu = 1000`. For this loading and
geometry, in the limit of :math:`\lambda/\mu\rightarrow +\infty`, the
deformation gradient would be given by :math:`\mathbf{F} =
\text{diag}(\lambda,1/\sqrt{\lambda})` and the pressure field admits a closed
form solution :math:`p=-\mu/\ell` where :math:`\ell` is the applied stretch.
"""
from numpy import (einsum, linalg as nla, zeros,
zeros_like, concatenate, split as npsplit,
hstack, abs as npabs, arange, sqrt)
from scipy.sparse import bmat
from skfem.helpers import grad, transpose, det, inv
from skfem import *
mu, lmbda = 1., 1.e3
def F1(w):
u = w["disp"]
p = w["press"]
F = zeros_like(grad(u))
for i in range(3):
F[i, i] += 1.
F += grad(u)
J = det(F)
Finv = inv(F)
return p * J * transpose(Finv) + mu * F
def F2(w):
u = w["disp"]
p = w["press"].value
F = zeros_like(grad(u))
for i in range(3):
F[i, i] += 1.
F += grad(u)
J = det(F)
Js = .5 * (lmbda + p + 2. * sqrt(lmbda * mu + .25 * (lmbda + p) ** 2)) / lmbda
dJsdp = ((.25 * lmbda + .25 * p + .5 * sqrt(lmbda * mu + .25 * (lmbda + p) ** 2))
/ (lmbda * sqrt(lmbda * mu + .25 * (lmbda + p) ** 2)))
return J - (Js + (p + mu / Js - lmbda * (Js - 1)) * dJsdp)
def A11(w):
u = w["disp"]
p = w["press"]
F = zeros_like(grad(u))
eye = zeros_like(grad(u))
for i in range(3):
F[i, i] += 1.
eye[i, i] += 1.
F += grad(u)
J = det(F)
Finv= inv(F)
L = (p * J * einsum("lk...,ji...->ijkl...", Finv, Finv)
- p * J * einsum("jk...,li...->ijkl...", Finv, Finv)
+ mu * einsum("ik...,jl...->ijkl...", eye, eye))
return L
def A12(w):
u = w["disp"]
F = zeros_like(grad(u))
for i in range(3):
F[i, i] += 1.
F += grad(u)
J = det(F)
Finv = inv(F)
return J * transpose(Finv)
def A22(w):
u = w["disp"]
p = w["press"].value
Js = .5 * (lmbda + p + 2. * sqrt(lmbda * mu + .25 * (lmbda + p) ** 2)) / lmbda
dJsdp = ((.25 * lmbda + .25 * p + .5 * sqrt(lmbda * mu + .25 * (lmbda + p) ** 2))
/ (lmbda * sqrt(lmbda * mu + .25 * (lmbda + p) ** 2)))
d2Jdp2 = .25 * mu / (lmbda * mu + .25 * (lmbda + p) ** 2) ** (3/2)
L = (-2. * dJsdp - p * d2Jdp2 + mu / Js ** 2 * dJsdp ** 2 - mu / Js * d2Jdp2
+ lmbda * (Js - 1.) * d2Jdp2 + lmbda * dJsdp ** 2)
return L
mesh = MeshTet().refined(2)
uelem = ElementVectorH1(ElementTetP2())
pelem = ElementTetP1()
elems = {
"u": uelem,
"p": pelem
}
basis = {
field: InteriorBasis(mesh, e, intorder=2)
for field, e in elems.items()
}
du = zeros(basis["u"].N)
dp = zeros(basis["p"].N)
stretch_ = 1.
ddofs = [
basis["u"].find_dofs(
{"left": mesh.facets_satisfying(lambda x: x[0] < 1.e-6)},
skip=["u^2", "u^3"]
),
basis["u"].find_dofs(
{"bottom": mesh.facets_satisfying(lambda x: x[1] < 1.e-6)},
skip=["u^1", "u^3"]
),
basis["u"].find_dofs(
{"back": mesh.facets_satisfying(lambda x: x[2] < 1.e-6)},
skip=["u^1", "u^2"]
),
basis["u"].find_dofs(
{"front": mesh.facets_satisfying(lambda x: npabs(x[2] - 1.) < 1e-6)},
skip=["u^1", "u^2"]
)
]
dofs = {}
for dof in ddofs:
dofs.update(dof)
du[dofs["left"].all()] = 0.
du[dofs["bottom"].all()] = 0.
du[dofs["back"].all()] = 0.
du[dofs["front"].all()] = stretch_
I = hstack((
basis["u"].complement_dofs(dofs),
basis["u"].N + arange(basis["p"].N)
))
@LinearForm
def a1(v, w):
return einsum("ij...,ij...", F1(w), grad(v))
@LinearForm
def a2(v, w):
return F2(w) * v
@BilinearForm
def b11(u, v, w):
return einsum("ijkl...,ij...,kl...", A11(w), grad(u), grad(v))
@BilinearForm
def b12(u, v, w):
return einsum("ij...,ij...", A12(w), grad(v)) * u
@BilinearForm
def b22(u, v, w):
return A22(w) * u * v
for itr in range(12):
uv = basis["u"].interpolate(du)
pv = basis["p"].interpolate(dp)
K11 = asm(b11, basis["u"], basis["u"], disp=uv, press=pv)
K12 = asm(b12, basis["p"], basis["u"], disp=uv, press=pv)
K22 = asm(b22, basis["p"], basis["p"], disp=uv, press=pv)
f = concatenate((
asm(a1, basis["u"], disp=uv, press=pv),
asm(a2, basis["p"], disp=uv, press=pv)
))
K = bmat(
[[K11, K12],
[K12.T, K22]], "csr"
)
uvp = solve(*condense(K, -f, I=I), use_umfpack=True)
delu, delp = npsplit(uvp, [du.shape[0]])
du += delu
dp += delp
normu = nla.norm(delu)
normp = nla.norm(delp)
print(f"{itr+1}, norm_du: {normu}, norm_dp: {normp}")
if normu < 1.e-8 and normp < 1.e-8:
break
if __name__ == "__main__":
mesh.save(
"example36_results.xdmf",
{"u": du[basis["u"].nodal_dofs].T, "p": dp[basis["p"].nodal_dofs[0]]},
)
|
import torch.nn as nn
import math
class Embeddings(nn.Module):
def __init__(self, d_model, vocab_size, padding_idx):
super(Embeddings, self).__init__()
self.lut = nn.Embedding(vocab_size, d_model, padding_idx=padding_idx)
self.d_model = d_model
def forward(self, x):
# in "Attention Is All You Need" embeddings are multiplied my sqrt(d_model)
return self.lut(x) * math.sqrt(self.d_model)
|
# Copyright 2020 Kapil Thangavelu
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from .common import BaseTest
class SARTest(BaseTest):
def test_query(self):
factory = self.replay_flight_data('test_sar_query_app')
p = self.load_policy({
'name': 'test-sar',
'resource': 'aws.serverless-app'},
session_factory=factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['Name'], 'GitterArchive')
def test_cross_account(self):
factory = self.replay_flight_data('test_sar_cross_account')
p = self.load_policy({
'name': 'test-sar',
'resource': 'aws.serverless-app',
'filters': [{
'type': 'cross-account',
'whitelist_orgids': ['o-4adkskbcff']
}]},
session_factory=factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.maxDiff = None
self.assertEqual(
resources[0]['CrossAccountViolations'], [
{'Actions': ['serverlessrepo:Deploy'],
'Effect': 'Allow',
'Principal': {'AWS': ['112233445566']},
'StatementId': 'b364d84f-62d2-411c-9787-3636b2b1975c'}
])
|
#!/usr/bin/env python3
# Copyright (c) 2020 Nick Downing
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# put utils into path
# temporary until we have proper Python packaging
import os.path
import sys
dirname = os.path.dirname(__file__)
sys.path.append(os.path.join(dirname, '..'))
import math
import mpmath
import numpy
import utils.yaml_io
from utils.poly_fixed import poly_fixed
from utils.to_fixed import to_fixed
from utils.to_hex import to_hex
EXIT_SUCCESS = 0
EXIT_FAILURE = 1
# independent variable in 16:16 fixed point
MIRED_EXP = -16
# results in 2:30 fixed point
RGB_EXP = -30
mpmath.mp.prec = 106
#numpy.set_printoptions(threshold = numpy.inf)
if len(sys.argv) < 3:
print(f'usage: {sys.argv[0]:s} mired_to_rgb_fit_in.yml device')
sys.exit(EXIT_FAILURE)
mired_to_rgb_fit_in = sys.argv[1]
device = sys.argv[2]
mired_to_rgb_fit = utils.yaml_io._import(
utils.yaml_io.read_file(mired_to_rgb_fit_in)
)
a = mired_to_rgb_fit['a']
b_red = mired_to_rgb_fit['b_red']
b_green = mired_to_rgb_fit['b_green']
b_blue = mired_to_rgb_fit['b_blue']
c_blue = mired_to_rgb_fit['c_blue']
d = mired_to_rgb_fit['d']
p_red_ab = mired_to_rgb_fit['p_red_ab']
#p_red_bd = mired_to_rgb_fit['p_red_bd']
p_green_ab = mired_to_rgb_fit['p_green_ab']
p_green_bd = mired_to_rgb_fit['p_green_bd']
#p_blue_ab = mired_to_rgb_fit['p_blue_ab']
p_blue_bc = mired_to_rgb_fit['p_blue_bc']
#p_blue_cd = mired_to_rgb_fit['p_blue_cd']
p_red_ab, p_red_ab_shr, _ = poly_fixed(
p_red_ab,
a,
b_red,
MIRED_EXP,
31,
RGB_EXP
)
p_green_ab, p_green_ab_shr, _ = poly_fixed(
p_green_ab,
a,
b_green,
MIRED_EXP,
31,
RGB_EXP
)
p_green_bd, p_green_bd_shr, _ = poly_fixed(
p_green_bd,
b_green,
d,
MIRED_EXP,
31,
RGB_EXP
)
p_blue_bc, p_blue_bc_shr, _ = poly_fixed(
p_blue_bc,
b_blue,
c_blue,
MIRED_EXP,
31,
RGB_EXP
)
sys.stdout.write(
sys.stdin.read().format(
device = device,
b_red = to_fixed(b_red, MIRED_EXP),
b_green = to_fixed(b_green, MIRED_EXP),
b_blue = to_fixed(b_blue, MIRED_EXP),
c_blue = to_fixed(c_blue, MIRED_EXP),
p_red_ab = ', '.join([f'{to_hex(p_red_ab[i]):s}LL' for i in range(p_red_ab.shape[0])]),
p_green_ab = ', '.join([f'{to_hex(p_green_ab[i]):s}LL' for i in range(p_green_ab.shape[0])]),
p_green_bd = ', '.join([f'{to_hex(p_green_bd[i]):s}LL' for i in range(p_green_bd.shape[0])]),
p_blue_bc = ', '.join([f'{to_hex(p_blue_bc[i]):s}LL' for i in range(p_blue_bc.shape[0])]),
p_red_ab_shr = ', '.join([str(p_red_ab_shr[i]) for i in range(p_red_ab_shr.shape[0])]),
p_green_ab_shr = ', '.join([str(p_green_ab_shr[i]) for i in range(p_green_ab_shr.shape[0])]),
p_green_bd_shr = ', '.join([str(p_green_bd_shr[i]) for i in range(p_green_bd_shr.shape[0])]),
p_blue_bc_shr = ', '.join([str(p_blue_bc_shr[i]) for i in range(p_blue_bc_shr.shape[0])])
)
)
|
import json
import time
from threading import Thread
import requests
SERVER_URL = 'http://localhost:5000'
def get_and_save_data(num_agents, env_name, round, update_freq, DQL_TYPE:str='ALL'):
print('Saving Experiment Data for Agents:{}, env:{}, round: {}, tau:{}'.format(num_agents, env_name, round, update_freq))
r = requests.get('http://localhost:5000/state')
state = r.json()
# write to file
with open('result-tau-{}-update-{}-env-{}-agents-{}-round-{}.json'.format(update_freq, DQL_TYPE, env_name, num_agents, round), 'w') as f:
json.dump(state, f, indent=4)
return
def experiment(num_agents, env_name, update_frequency, dql_type):
requests.get(SERVER_URL+"/state/clear")
requests.get(SERVER_URL+"/q/clear")
for i in range(10):
print('Start Experiment round: {}'.format(i))
r = requests.post(SERVER_URL+"/experiment/start", json={"num_agents": num_agents,
"env_name": env_name,
"update_freq": update_frequency})
time.sleep(3)
# is_complete = False
while True:
r = requests.get(SERVER_URL+"/experiment/status")
value = r.json()
if value.get('complete', False):
break
else:
time.sleep(1)
get_and_save_data(num_agents, env_name, i, update_frequency, dql_type)
requests.get(SERVER_URL+"/state/clear")
requests.get(SERVER_URL+"/q/clear")
return
def set_dql_type(dql_type:str='ALL'):
if dql_type == 'Partial':
r = requests.post(SERVER_URL+"/DQL/all", json={"DQL_ALL": False})
else:
r = requests.post(SERVER_URL+"/DQL/all", json={"DQL_ALL": True})
print(r.json())
return
if __name__ == '__main__':
set_dql_type('ALL')
experiment(1, 'Taxi-v2', 10, 'ALL')
experiment(2, 'Taxi-v2', 10, 'ALL')
experiment(4, 'Taxi-v2', 10, 'ALL')
experiment(8, 'Taxi-v2', 10, 'ALL')
set_dql_type('Partial')
experiment(1, 'Taxi-v2', 10, 'Partial')
experiment(2, 'Taxi-v2', 10, 'Partial')
experiment(4, 'Taxi-v2', 10, 'Partial')
experiment(8, 'Taxi-v2', 10, 'Partial')
set_dql_type('ALL')
experiment(1, 'Taxi-v2', 50, 'ALL')
experiment(2, 'Taxi-v2', 50, 'ALL')
experiment(4, 'Taxi-v2', 50, 'ALL')
experiment(8, 'Taxi-v2', 50, 'ALL')
experiment(1, 'Taxi-v2', 100, 'ALL')
experiment(2, 'Taxi-v2', 100, 'ALL')
experiment(4, 'Taxi-v2', 100, 'ALL')
experiment(8, 'Taxi-v2', 100, 'ALL')
set_dql_type('Partial')
experiment(1, 'Taxi-v2', 50, 'Partial')
experiment(2, 'Taxi-v2', 50, 'Partial')
experiment(4, 'Taxi-v2', 50, 'Partial')
experiment(8, 'Taxi-v2', 50, 'Partial')
experiment(1, 'Taxi-v2', 100, 'Partial')
experiment(2, 'Taxi-v2', 100, 'Partial')
experiment(4, 'Taxi-v2', 100, 'Partial')
experiment(8, 'Taxi-v2', 100, 'Partial')
# CartPole with adjusted Tau
# experiment(1, 'CartPole-v1', 10)
# experiment(2, 'CartPole-v1', 10)
# experiment(4, 'CartPole-v1', 10)
# experiment(8, 'CartPole-v1', 10)
set_dql_type('ALL')
experiment(1, 'CartPole-v1', 50, 'ALL')
experiment(2, 'CartPole-v1', 50, 'ALL')
experiment(4, 'CartPole-v1', 50, 'ALL')
experiment(8, 'CartPole-v1', 50, 'ALL')
experiment(1, 'CartPole-v1', 100, 'ALL')
experiment(2, 'CartPole-v1', 100, 'ALL')
experiment(4, 'CartPole-v1', 100, 'ALL')
experiment(8, 'CartPole-v1', 100, 'ALL')
set_dql_type('Partial')
experiment(1, 'CartPole-v1', 50, 'Partial')
experiment(2, 'CartPole-v1', 50, 'Partial')
experiment(4, 'CartPole-v1', 50, 'Partial')
experiment(8, 'CartPole-v1', 50, 'Partial')
experiment(1, 'CartPole-v1', 100, 'Partial')
experiment(2, 'CartPole-v1', 100, 'Partial')
experiment(4, 'CartPole-v1', 100, 'Partial')
experiment(8, 'CartPole-v1', 100, 'Partial')
|
#!/usr/bin/env python
import os, time, shutil, string, subprocess,sys, glob
outfile = "DBS2012D_cache.txt"
sdbs = 'dbs search --query="find run,lumi where dataset =\'/MinimumBias/Run2012D-v1/RAW\' and run = RUNNUMBER order by lumi"'
srunlist = "208686 208551 208541 208540 208538 208535 208509 208487 208486 208429 208428 208427 208407 208406 208402 208397 208395 208394 208393 208392 208391 208390 208357 208353 208352 208351 208341 208339 208307 208304 208300 207924 207922 207921 207920 207905 207898 207897 207889 207887 207886 207885 207884 207883 207882 207875 207813 207790 207789 207779 207714 207518 207517 207515 207492 207491 207490 207488 207487 207477 207469 207468 207454 207398 207397 207372 207371 207328 207320 207316 207299 207279 207273 207269 207233 207231 207222 207221 207220 207219 207217 207214 207100 207099 206940 206939 206906 206901 206897 206869 206868 206867 206866 206859 206745 206744 206605 206598 206596 206595 206594 206575 206574 206573 206572 206550 206542 206539 206513 206512 206484 206478 206477 206476 206466 206448 206446 206401 206391 206390 206389 206331 206304 206303 206302 206297 206258 206257 206246 206245 206243 206210 206208 206207 206199 206188 206187 206102 206098 206088 206066 205921 205908 205834 205833 205826 205781 205777 205774 205718 205694 205690 205683 205667 205666 205627 205620 205618 205617 205614 205605 205600 205599 205598 205595 205526 205519 205515 205344 205339 205311 205310 205303 205238 205236 205233 205217 205193 205158 205111 205086 204601 204600 204599 204577 204576 204567 204566 204565 204564 204563 204555 204554 204553 204552 204551 204545 204544 204541 204511 204506 204250 204238 204114 204113 204101 204100 203994 203992 203991 203987 203986 203985 203981 203980 203912 203909 203894 203853 203835 203834 203833 203832 203830 203780 203778 203777"
#mylist = []
mylist = srunlist.split(" ")
"""
Main Script
"""
if __name__ == "__main__":
myfile = open(outfile, "w")
for run in mylist:
if run.isdigit():
thisrunlumi = run + " "
thisProcess = sdbs.replace('RUNNUMBER', run)
print thisProcess
if os.path.exists("tempFile.stdout") is True: os.remove("tempFile.stdout")
outFile = "tempFile.stdout"
outptr = open(outFile, "w")
if os.path.exists("tempFile.stderr") is True: os.remove("tempFile.stderr")
errFile = "tempFile.stderr"
errptr = open(errFile, "w")
retval = subprocess.call(thisProcess, shell=True, stdout=outptr, stderr=errptr)
errptr.close()
outptr.close()
if not retval == 0:
errptr = open(errFile, "r")
errData = errptr.read()
errptr.close()
raise Exception("Error executing command: " + thisProcess)
else:
print " Successed! "
outptr = open(outFile, "r")
outData = outptr.readlines()
outptr.close()
for line in outData:
if run in line:
if len(line.split()) > 1:
thisrunlumi += line.split()[1] + " "
myfile.write(thisrunlumi + '\n')
myfile.close()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 4 20:25:09 2018
@author: Kazuki
"""
import numpy as np
import pandas as pd
#import os
#from multiprocessing import cpu_count, Pool
#import utils
def multi_weighted_logloss(y_true, y_pred, myweight=None, based_true=True):
"""
@author olivier https://www.kaggle.com/ogrellier
multi logloss for PLAsTiCC challenge
"""
# class_weights taken from Giba's topic : https://www.kaggle.com/titericz
# https://www.kaggle.com/c/PLAsTiCC-2018/discussion/67194
# with Kyle Boone's post https://www.kaggle.com/kyleboone
classes = [6, 15, 16, 42, 52, 53, 62, 64, 65, 67, 88, 90, 92, 95]
class_weight = {6: 1, 15: 2, 16: 1, 42: 1, 52: 1, 53: 1, 62: 1, 64: 2, 65: 1, 67: 1, 88: 1, 90: 1, 92: 1, 95: 1}
if y_true.shape[1] > 14:
classes.append(99)
class_weight[99] = 2
if myweight is None:
myweight = np.ones(y_true.shape[1])
y_p = y_pred * myweight
# normalize
y_p /= y_p.sum(1)[:,None]
# Normalize rows and limit y_preds to 1e-15, 1-1e-15
y_p = np.clip(a=y_p, a_min=0, a_max=1)
# Transform to log
y_p_log = np.log(y_p)
# Get the log for ones, .values is used to drop the index of DataFrames
# Exclude class 99 for now, since there is no class99 in the training set
# we gave a special process for that class
y_log_ones = np.sum(y_true * y_p_log, axis=0)
# Get the number of positives for each class
if based_true == True:
nb_pos = y_true.sum(axis=0).astype(float)
else:
nb_pos = pd.DataFrame(y_pred).sum(axis=0).astype(float)
# Weight average and divide by the number of positives
class_arr = np.array([class_weight[k] for k in sorted(class_weight.keys())])
y_w = y_log_ones * class_arr / nb_pos
loss = - np.nansum(y_w) / np.sum(class_arr)
return loss
def calc_gradient(f, X):
"""
calc_gradient
偏微分を行う関数
関数fを変数xの各要素で偏微分した結果をベクトルにした勾配を返す
@params
f: 対象となる関数
X: 関数fの引数のベクトル(numpy.array)
@return
gradient: 勾配(numpy.array)
"""
h = 1e-4
gradient = np.zeros_like(X)
# 各変数についての偏微分を計算する
for i in range(X.size):
store_X = X[:]
# f(x+h)
X[i] += h
f_x_plus_h = f(X)
X = store_X[:]
# f(x-h)
X[i] -= h
f_x_minus_h = f(X)
# 偏微分
gradient[i] = (f_x_plus_h - f_x_minus_h) / (2 * h)
return gradient
def gradient_descent(f, X, learning_rate, max_iter, is_print=True, verbose_eval=100):
"""
gradient_descent
最急降下法を行う関数
@params
f: 対象となる関数
X: 関数fの引数のベクトル(numpy.array)
learning_rate: 学習率
max_iter: 繰り返し回数
@return
X: 関数の出力を最小にする(であろう)引数(numpy.array)
"""
sw_break = False
score_bk = 9999
for i in range(max_iter):
X -= (learning_rate * calc_gradient(f, X))
score = f(X)
if score_bk <= score:
sw_break = True
break
score_bk = score
if is_print and i%verbose_eval==0:
print("[{:3d}] X = {}, f(X) = {:.7f}".format(i, X, score))
if is_print and sw_break:
print("[{:3d}] X = {}, f(X) = {:.7f}".format(i, X, score))
return X
def get_weight(y_true, y_pred, weight=None, eta=1, nround=100,
is_print=True, verbose_eval=50, based_true=True):
M = y_true.shape[1]
if weight is None:
weight = np.ones(M)
f = lambda X: multi_weighted_logloss(y_true, y_pred, weight, based_true=based_true)
gradient_descent(f, weight, learning_rate=eta, max_iter=nround,
is_print=is_print, verbose_eval=verbose_eval)
return weight
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'x5!1t^*mmzo!i0uii)wm_dh7v_+891r_(n=(m_p(cbp5x^ckla'
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
'fix_me',
]
INTERNAL_IPS = ['127.0.0.1']
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
ROOT_URLCONF = 'bad_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'bad_project.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib'
'.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib'
'.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib'
'.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib'
'.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
|
# -*- coding: utf-8 -*-
import os
import time
from collections import OrderedDict
import json
import pypytorch as t
def extract_modules(o):
named_moudles = OrderedDict()
for key in o.__dict__:
if key in ['weight', 'bias']:
continue
value = o.__dict__[key]
if isinstance(value, Module):
named_moudles[key] = value
return named_moudles
class Module(object):
def __init__(self):
self._name = self.__class__.__name__
self._modules = []
self._named_modules = OrderedDict()
self._parameters = []
self._named_parameters = OrderedDict()
self.training = True
@property
def modules(self):
assert hasattr(self, '_modules'), 'should call super(Class, self).__init__() in __init__'
# self._modules = self._modules if self._modules else tuple(extract_modules(self))
self._named_modules = self._named_modules if self._named_modules else extract_modules(self)
self._modules = list(self._named_modules.values())
return self._modules
def named_modules(self):
assert hasattr(self, '_named_modules'), 'should call super(Class, self).__init__() in __init__'
self._modules
return self._named_modules
def prepare_modules_for_train(self):
self.training = True
self.prepare_modules()
def prepare_modules_for_eval(self):
self.training = False
self.prepare_modules()
def prepare_modules(self):
self.modules
def eval(self):
self.prepare_modules_for_eval()
for module in self.modules:
module.eval()
def train(self):
self.prepare_modules_for_train()
for module in self.modules:
module.train()
def forward(self, *args, **kwargs):
raise NotImplementedError
def __call__(self, *args, **kwargs):
assert hasattr(self, '_modules'), 'module must inherit pypytorch.nn.module.Module'
self._modules = tuple(extract_modules(self))
return self.forward(*args, **kwargs)
@property
def name(self):
return self._name
def parameters(self):
self.modules
if self._parameters:
return self._parameters
if hasattr(self, 'weight') and getattr(self, 'weight') is not None:
self._parameters.append(getattr(self, 'weight'))
if hasattr(self, 'bias') and getattr(self, 'bias') is not None:
self._parameters.append(getattr(self, 'bias'))
for module in self._modules:
self._parameters.extend(module.parameters())
return self._parameters
def zero_grad(self):
for param in self.parameters():
param.zero_grad()
def save(self, epoch, loss, max_item=5, root='checkpoints/'):
assert max_item > 0, 'max_item must be gt 0'
model_dir = os.path.join(root, self.name)
current_time = time.strftime('%Y-%m-%d_%H-%M-%S')
if not os.path.exists(model_dir):
os.makedirs(model_dir)
weight_path = os.path.join(model_dir, current_time + '_' + str(epoch) + '.pth')
checkpoints_path = os.path.join(model_dir, 'checkpoints.json')
if not os.path.exists(checkpoints_path):
with open(checkpoints_path, 'w') as f:
f.write('{}')
with open(checkpoints_path, 'r') as fr:
data = json.load(fr)
if len(data) == max_item:
data = sorted(data.items(), key=lambda x: x[1])
name = data.pop()[0]
full_name = os.path.join(model_dir, name)
os.remove(full_name)
data = dict(data)
with open(checkpoints_path, 'w') as fw:
json.dump(data, fw, indent=4)
with open(checkpoints_path, 'r') as f:
data = json.load(f)
data = dict(sorted(data.items(), key=lambda x: x[1]))
t.save(self, weight_path)
# print(loss.data.tolist())
data[current_time + '_' + str(epoch) + '.pth'] = loss.data.tolist()
with open(checkpoints_path, 'w') as f:
json.dump(data, f, indent=4)
def _description(self, num_space=0):
self.modules
indentation = ' ' * 2
space = ' ' * num_space
s = self._name + '(\n'
for key, value in self.named_modules().items():
value_str = str(value) if not value.modules else value._description(num_space=num_space * 2 if num_space else 2)
s += space + indentation + '(' + key + '): ' + value_str + '\n'
s += space + ')'
return s
def __str__(self):
return self._description()
def __repr__(self):
return str(self)
class Sequential(Module):
def __init__(self, *modules):
super(Sequential, self).__init__()
assert len(modules) != 0, "At least need one module"
for i, mod in enumerate(modules):
setattr(self, str(i), mod)
def forward(self, x):
out = x
for mod in self.modules:
out = mod(out)
return out
|
from pylibpd import *
from threading import Thread
import pyaudio
import time
class AudioEngine(Thread):
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.running = True
def run(self):
p = pyaudio.PyAudio()
ch = 1
sr = 44100
tpb = 6
bs = libpd_blocksize()
stream = p.open(format=pyaudio.paInt16,
channels=ch,
rate=sr,
input=True,
output=True,
frames_per_buffer=bs * tpb)
m = PdManager(ch, ch, sr, 1)
libpd_open_patch('two_ops1poly.pd', '.') # './audioEngine' '.'
data = stream.read(bs)
while self.running:
outp = m.process(data)
stream.write(outp.tobytes())
stream.close()
p.terminate()
libpd_release()
def stop_running(self):
self.running = False
def note_on(self, note):
libpd_noteon(1, note, 64)
def note_off(self, note):
libpd_noteon(1, note, 0)
def send_float(self, float_to_send):
libpd_float('spam', 42)
def send_symbol(self, symbol_to_send):
libpd_symbol('spam', "don't panic")
#For testing purposes:
# a = AudioEngine()
# a.start()
# while(1):
#
# print("s")
# # a.note_on()
# libpd_noteon(0, 60, 64)
# time.sleep(1)
# # a.note_off()
# libpd_noteon(0, 60, 0)
# time.sleep(1)
#
# libpd_noteon(0, 60, 0)
|
import pandas as pd
import gc
import os
def load_diff_file(diff_processed_name):
if diff_processed_name.split('.')[1] == 'parquet':
diff_file = pd.read_parquet('path to load diff code file')
else:
diff_file = pd.read_pickle('path to load diff code file')
return diff_file
def create_ultimate_true_link(true_link_name, diff_processed_name, ultimate_true_link_name, save_type):
true_file = pd.read_parquet('path to load true link without diff code')
diff_file = load_diff_file(diff_processed_name)
diff_file = diff_file[['source', 'repo', 'hash', 'changed_files', 'processDiffCode']]
ultimate_true_link = pd.merge(left=true_file, right=diff_file, how='left', left_on=['source', 'repo', 'hash'], right_on=['source', 'repo', 'hash'])
print('number of diffs in true links: ', ultimate_true_link[ultimate_true_link['changed_files'].notnull() | ultimate_true_link['processDiffCode'].notnull()].shape)
if save_type == 'parquet':
ultimate_true_link.to_parquet('path to save true link with diff code')
else:
ultimate_true_link.to_pickle('path to save true link with diff code')
def create_ultimate_false_link(false_link_name, diff_processed_name, ultimate_false_link_name, save_type):
false_file = pd.read_parquet('path to load false link without diff code')
diff_file = load_diff_file(diff_processed_name)
diff_file = diff_file[['source', 'repo', 'hash', 'changed_files', 'processDiffCode']]
ultimate_false_link = pd.merge(left=false_file, right=diff_file, how='left', left_on=['source', 'repo', 'hash'], right_on=['source', 'repo', 'hash'])
del false_file, diff_file
gc.collect()
print('number of diffs in false links: ', ultimate_false_link[ultimate_false_link['changed_files'].notnull() | ultimate_false_link['processDiffCode'].notnull()].shape)
if ultimate_false_link_name == 'ambari':
ultimate_false_link = ultimate_false_link.sample(n = 50*35589) # 50 times bigger than its true link
if save_type == 'parquet':
ultimate_false_link.to_parquet('path to save false link with diff code')
else:
ultimate_false_link.to_pickle('path to save false link with diff code')
def create_link():
create_ultimate_true_link('netbeans_true_link.parquet', 'netbeans.pickle', 'netbeans', 'parquet')
create_ultimate_false_link('netbeans_false_link.parquet', 'netbeans.pickle', 'netbeans', 'parquet')
print('netbeans done')
create_ultimate_true_link('calcite_true_link.parquet', 'calcite.pickle', 'calcite', 'parquet')
create_ultimate_false_link('calcite_false_link.parquet', 'calcite.pickle', 'calcite', 'parquet')
print('calcite done')
create_ultimate_true_link('beam_true_link.parquet', 'beam.pickle', 'beam', 'parquet')
create_ultimate_false_link('beam_false_link.parquet', 'beam.pickle', 'beam', 'pickle')
print('beam done')
create_ultimate_true_link('flink_true_link.parquet', 'flink.pickle', 'flink', 'parquet')
create_ultimate_false_link('flink_false_link.parquet', 'flink.pickle', 'flink', 'pickle')
print('flink done')
create_ultimate_true_link('airflow_true_link.parquet', 'airflow.parquet', 'airflow', 'parquet')
create_ultimate_false_link('airflow_false_link.parquet', 'airflow.parquet', 'airflow', 'parquet')
print('airflow done')
create_ultimate_true_link('cassandra_true_link.parquet', 'cassandra.parquet', 'cassandra', 'parquet')
create_ultimate_false_link('cassandra_false_link.parquet', 'cassandra.parquet', 'cassandra', 'parquet')
print('cassandra done')
create_ultimate_true_link('freemarker_true_link.parquet', 'freemarker.parquet', 'freemarker', 'parquet')
create_ultimate_false_link('freemarker_false_link.parquet', 'freemarker.parquet', 'freemarker', 'parquet')
print('freemarker done')
create_ultimate_true_link('groovy_true_link.parquet', 'groovy.parquet', 'groovy', 'parquet')
create_ultimate_false_link('groovy_false_link.parquet', 'groovy.parquet', 'groovy', 'parquet')
print('groovy done')
create_ultimate_true_link('ambari_true_link.parquet', 'ambari.parquet', 'ambari', 'parquet')
create_ultimate_false_link('ambari_false_link.parquet', 'ambari.parquet', 'ambari', 'pickle')
print('ambari done')
create_ultimate_true_link('arrow_true_link.parquet', 'arrow.parquet', 'arrow', 'parquet')
create_ultimate_false_link('arrow_false_link.parquet', 'arrow.parquet', 'arrow', 'pickle')
print('arrow done')
create_ultimate_true_link('isis_true_link.parquet', 'isis.parquet', 'isis', 'parquet')
create_ultimate_false_link('isis_false_link.parquet', 'isis.parquet', 'isis', 'parquet')
print('isis done')
create_ultimate_true_link('ignite_true_link.parquet', 'ignite.parquet', 'ignite', 'parquet')
create_ultimate_false_link('ignite_false_link.parquet', 'ignite.parquet', 'ignite', 'parquet')
print('ignite done')
create_link()
|
from __future__ import annotations
import pandas as pd
from pyspark.sql import DataFrame, SparkSession
from sparkypandy._column import Columny
class DataFramy(DataFrame): # type: ignore
@classmethod
def from_spark(cls, df_spark: DataFrame) -> DataFramy:
# noinspection PyProtectedMember
return cls(jdf=df_spark._jdf, sql_ctx=df_spark.sql_ctx)
@classmethod
def from_pandas(cls, spark_session: SparkSession, df_pandas: pd.DataFrame) -> DataFramy:
df_spark = spark_session.createDataFrame(df_pandas)
return cls.from_spark(df_spark)
def to_pandas(self) -> pd.DataFrame:
"""PEP8-compliant alias to toPandas()"""
# noinspection PyTypeChecker
return super().toPandas()
def __getitem__(self, item: str) -> Columny:
if not isinstance(item, str):
raise TypeError(f"Expected a string key, not {item}")
col = super().__getitem__(item=item)
return Columny.from_spark(col=col, df_sparky=self)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-01-06 23:55
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('tests', '0010_auto_20161211_2116'),
]
operations = [
migrations.AlterModelOptions(
name='customflatmenuitem',
options={'ordering': ('sort_order',)},
),
migrations.AlterModelOptions(
name='custommainmenuitem',
options={'ordering': ('sort_order',)},
),
migrations.AlterModelOptions(
name='flatmenucustommenuitem',
options={'ordering': ('sort_order',)},
),
migrations.AlterModelOptions(
name='mainmenucustommenuitem',
options={'ordering': ('sort_order',)},
),
]
|
"""Menu module."""
from brainstorm.utils.colors import menu as menu_color
from PyInquirer import prompt
class MainMenu(object):
"""Menu class. State buttons."""
def __init__(self, answer):
"""Init menu.
Args:
answer: Dict menu
"""
self._play = False
self._stats = False
self._exit = False
button = answer.get("menu")
if button == "play":
self._play = True
elif button == "stats":
self._stats = True
elif button == "exit":
self._exit = True
@property
def play(self):
"""Get play.
Returns:
bool: Play
"""
return self._play
@property
def stats(self):
"""Get stats.
Returns:
bool: Stats
"""
return self._stats
@property
def exit(self):
"""Get stats.
Returns:
bool: Exit
"""
return self._exit
def get_menu():
"""Get menu.
Returns:
class: MainMenu
"""
menu = [
{
"type": "list",
"name": "menu",
"message": "Menu",
"choices": ["Play", "Stats", "Exit"],
"filter": lambda game: game.lower(),
},
]
return MainMenu(
prompt(
menu,
style=menu_color,
),
)
|
from __future__ import print_function
import sys
import os
import time
import numpy as np
def unpickle(file):
import cPickle
fo = open(file, 'rb')
dict = cPickle.load(fo)
fo.close()
return dict
data = unpickle(os.path.join(os.path.expanduser('~/TCDTIMIT/database_binaryViseme/Lipspkr1.pkl')))
print(data.keys())
print(data)
thisN = data['data'].shape[0]
print("This dataset contains ", thisN, " images")
|
# remove & return
m = dict(key='value', key2='value')
v = m.pop('key', 'value') # remove dict key and return or default
print(m, v, sep='\n')
v = m.pop('key2') # remove dict key and return value
print(m, v, sep='\n')
# remove last item
# *note that before Python 3.7 this removes a random item
m = dict(a='v', b='v')
v = m.popitem() # remove and return last entry
print(m, v, sep='\n')
# clear dict
m.clear()
print(m)
# files
file = open('file.txt', 'w') # write option
file.write('test')
file.write('ln2')
file.close()
print(open('file.txt', 'r').read())
file = open('file.txt', 'a') # append option
file.write('test')
file.write('ln2')
file.close()
print(open('file.txt', 'r').read())
print(open('file.txt', 'r').read()) # read
for line in open('file.txt', 'r'): # print by line
print(line)
|
# uncompyle6 version 3.3.5
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.7.3 (default, Apr 24 2019, 15:29:51) [MSC v.1915 64 bit (AMD64)]
# Embedded file name: c:\Jenkins\live\output\win_64_static\Release\python-bundle\MIDI Remote Scripts\ableton\v2\control_surface\components\device_parameters.py
# Compiled at: 2019-05-15 03:17:58
from __future__ import absolute_import, print_function, unicode_literals
from itertools import chain, izip_longest, repeat
from ableton.v2.base import listens, listens_group
from ableton.v2.control_surface import Component, ParameterProvider
from ableton.v2.control_surface.control import ControlList, MappedSensitivitySettingControl
from ableton.v2.control_surface.elements import DisplayDataSource
class DeviceParameterComponent(Component):
controls = ControlList(MappedSensitivitySettingControl, 8)
def __init__(self, parameter_provider=None, *a, **k):
super(DeviceParameterComponent, self).__init__(*a, **k)
self.parameter_provider = parameter_provider
def _get_parameter_provider(self):
return self._parameter_provider
def _set_parameter_provider(self, provider):
self._parameter_provider = provider or ParameterProvider()
self._on_parameters_changed.subject = self._parameter_provider
self._update_parameters()
self._on_parameter_provider_changed(provider)
parameter_provider = property(_get_parameter_provider, _set_parameter_provider)
def set_parameter_controls(self, encoders):
self.controls.set_control_element(encoders)
self._connect_parameters()
def _connect_parameters(self):
parameters = self._parameter_provider.parameters[:self.controls.control_count]
for control, parameter_info in map(None, self.controls, parameters):
parameter = parameter_info.parameter if parameter_info else None
control.mapped_parameter = parameter
if parameter:
control.update_sensitivities(parameter_info.default_encoder_sensitivity, parameter_info.fine_grain_encoder_sensitivity)
return
def _update_parameters(self):
if self.is_enabled():
self._connect_parameters()
@listens(b'parameters')
def _on_parameters_changed(self):
self._update_parameters()
def _on_parameter_provider_changed(self, provider):
pass
def update(self):
super(DeviceParameterComponent, self).update()
self._update_parameters()
class DisplayingDeviceParameterComponent(DeviceParameterComponent):
def __init__(self, *a, **k):
self._parameter_name_data_sources = map(DisplayDataSource, ('', '', '', '',
'', '', '', ''))
self._parameter_value_data_sources = map(DisplayDataSource, ('', '', '', '',
'', '', '',
''))
super(DisplayingDeviceParameterComponent, self).__init__(*a, **k)
@property
def parameters(self):
return map(lambda p: p and p.parameter, self._parameter_provider.parameters)
@property
def parameter_names(self):
return map(lambda p: p and p.name or b'', self.parameters)
def set_name_display_line(self, line):
self._set_display_line(line, self._parameter_name_data_sources)
def set_value_display_line(self, line):
self._set_display_line(line, self._parameter_value_data_sources)
def _set_display_line(self, line, sources):
if line:
line.set_num_segments(len(sources))
for segment in xrange(len(sources)):
line.segment(segment).set_data_source(sources[segment])
def clear_display(self):
for source in chain(self._parameter_name_data_sources, self._parameter_value_data_sources):
source.set_display_string(b'')
def _update_parameters(self):
super(DisplayingDeviceParameterComponent, self)._update_parameters()
if self.is_enabled():
parameters = self.parameters
self._on_parameter_name_changed.replace_subjects(parameters)
self._on_parameter_value_changed.replace_subjects(parameters)
self._update_parameter_names()
self._update_parameter_values()
@listens_group(b'name')
def _on_parameter_name_changed(self, parameter):
self._update_parameter_names()
@listens_group(b'value')
def _on_parameter_value_changed(self, parameter):
self._update_parameter_values()
def _update_parameter_names(self):
if self.is_enabled():
params = zip(chain(self.parameter_provider.parameters, repeat(None)), self._parameter_name_data_sources)
for info, name_data_source in params:
name = self.info_to_name(info)
name_data_source.set_display_string(name or b'')
return
def _update_parameter_values(self):
if self.is_enabled():
for parameter, data_source in izip_longest(self.parameters, self._parameter_value_data_sources):
value_string = self.parameter_to_string(parameter)
if data_source:
data_source.set_display_string(value_string)
def info_to_name(self, info):
parameter = info and info.parameter
return info and info.name or b''
def parameter_to_string(self, parameter):
if parameter == None:
return b''
else:
return unicode(parameter)
def parameter_to_value(self, parameter):
return parameter.value
|
# Copyright 2021 The Feast Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from datetime import datetime
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
import psycopg2
import pytz
from pydantic import PositiveInt, StrictStr
from pydantic.schema import Literal
from feast import Entity, FeatureTable
from feast.feature_view import FeatureView
from feast.infra.key_encoding_utils import serialize_entity_key
from feast.infra.online_stores.online_store import OnlineStore
from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto
from feast.protos.feast.types.Value_pb2 import Value as ValueProto
from feast.repo_config import FeastConfigBaseModel, RepoConfig
class HologresOnlineStoreConfig(FeastConfigBaseModel):
""" Online store config for local (SQLite-based) store """
type: Literal[
"hologres", "feast.infra.online_stores.hologres.HologresOnlineStore"
] = "sqlite"
""" Online store type selector"""
host: StrictStr = None
""" host to hologres db """
port: PositiveInt = 1
""" port to hologres db"""
dbname: StrictStr = None
""" db name to hologres db"""
user: StrictStr = None
""" user name to hologres db"""
password: StrictStr = None
""" passwrod to hologres db"""
class HologresOnlineStore(OnlineStore):
"""
OnlineStore is an object used for all interaction between Feast and the service used for offline storage of
features.
"""
_conn: Optional[psycopg2.extensions.connection] = None
@staticmethod
def _get_db_connect_param(config: RepoConfig) -> str:
assert (
config.online_store.type == "hologres"
or config.online_store.type.endswith("HologresOnlineStore")
)
host = config.online_store.host
port = config.online_store.port
dbname = config.online_store.dbname
user = config.online_store.user
password = config.online_store.password
return host, port, dbname, user, password
def _get_conn(self, config: RepoConfig):
if not self._conn:
host, port, dbname, user, password = self._get_db_connect_param(config)
self._conn = psycopg2.connect(
host=host, port=port, dbname=dbname, user=user, password=password
)
return self._conn
def online_write_batch(
self,
config: RepoConfig,
table: Union[FeatureTable, FeatureView],
data: List[
Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]]
],
progress: Optional[Callable[[int], Any]],
) -> None:
conn = self._get_conn(config)
project = config.project
with conn:
cur = conn.cursor()
for entity_key, values, timestamp, created_ts in data:
entity_key_bin = serialize_entity_key(entity_key)
timestamp = _to_naive_utc(timestamp)
if created_ts is not None:
created_ts = _to_naive_utc(created_ts)
for feature_name, val in values.items():
cur.execute(
f"""
UPDATE {_table_id(project, table)}
SET value = %s, event_ts = %s, created_ts = %s
WHERE (entity_key = %s AND feature_name = %s)
""",
(
# SET
val.SerializeToString(),
timestamp,
created_ts,
# WHERE
entity_key_bin,
feature_name,
),
)
cur.execute(
f"""INSERT INTO {_table_id(project, table)}
(entity_key, feature_name, value, event_ts, created_ts)
VALUES (%s, %s, %s, %s, %s)
ON CONFLICT(entity_key, feature_name) DO UPDATE SET
(entity_key, feature_name, value, event_ts, created_ts) = ROW(excluded.*);
""",
(
entity_key_bin,
feature_name,
val.SerializeToString(),
timestamp,
created_ts,
),
)
if progress:
progress(1)
def online_read(
self,
config: RepoConfig,
table: Union[FeatureTable, FeatureView],
entity_keys: List[EntityKeyProto],
requested_features: Optional[List[str]] = None,
) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]:
conn = self._get_conn(config)
cur = conn.cursor()
result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = []
project = config.project
for entity_key in entity_keys:
entity_key_bin = serialize_entity_key(entity_key)
cur.execute(
f"SELECT feature_name, value, event_ts FROM {_table_id(project, table)} WHERE entity_key = %%s",
(entity_key_bin,),
)
res = {}
res_ts = None
for feature_name, val_bin, ts in cur.fetchall():
val = ValueProto()
val.ParseFromString(val_bin)
res[feature_name] = val
res_ts = ts
if not res:
result.append((None, None))
else:
result.append((res_ts, res))
return result
def update(
self,
config: RepoConfig,
tables_to_delete: Sequence[Union[FeatureTable, FeatureView]],
tables_to_keep: Sequence[Union[FeatureTable, FeatureView]],
entities_to_delete: Sequence[Entity],
entities_to_keep: Sequence[Entity],
partial: bool,
):
conn = self._get_conn(config)
cur = conn.cursor()
project = config.project
# create feast schema
cur.execute("CREATE SCHEMA IF NOT EXISTS feast")
for table in tables_to_keep:
sql = f"CREATE TABLE IF NOT EXISTS {_table_id(project, table)} (entity_key BYTEA, feature_name TEXT, value BYTEA, event_ts timestamp, created_ts timestamp, PRIMARY KEY(entity_key, feature_name))"
cur.execute(sql)
conn.commit()
for table in tables_to_delete:
cur.execute(f"DROP TABLE IF EXISTS {_table_id(project, table)}")
conn.commit()
def teardown(
self,
config: RepoConfig,
tables: Sequence[Union[FeatureTable, FeatureView]],
entities: Sequence[Entity],
):
conn = self._get_conn(config)
conn.close()
def _table_id(project: str, table: Union[FeatureTable, FeatureView]) -> str:
return f"feast.{project}_{table.name}"
def _to_naive_utc(ts: datetime):
if ts.tzinfo is None:
return ts
else:
return ts.astimezone(pytz.utc).replace(tzinfo=None)
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
Created on 19/01/07 05:00:15
@author: Changzhi Sun
"""
|
"""
eval_cmd.py
Provides command line interface to evaluate networks on isa
Copyright (C) 2020, Akhilan Boopathy <akhilan@mit.edu>
Sijia Liu <Sijia.Liu@ibm.com>
Gaoyuan Zhang <Gaoyuan.Zhang@ibm.com>
Cynthia Liu <cynliu98@mit.edu>
Pin-Yu Chen <Pin-Yu.Chen@ibm.com>
Shiyu Chang <Shiyu.Chang@ibm.com>
Luca Daniel <luca@mit.edu>
"""
import os
import tensorflow as tf
import funcs
import logging
flags = tf.flags
FLAGS = flags.FLAGS
## Required parameters
flags.DEFINE_enum(
"dataset", None, ['mnist', 'cifar', 'tinyimagenet', 'restimagenet'],
"dataset")
flags.DEFINE_enum(
"num_class", None, ['one_class', 'two_class', 'all_class'],
"num_class")
flags.DEFINE_integer(
"num_image", None,
"num_image")
flags.DEFINE_enum(
"norm", None, ['l1', 'l2'],
"norm")
flags.DEFINE_enum(
"network_type", None, ['small', 'pool', 'wresnet'],
"network_type")
flags.DEFINE_bool(
"rep_l", None,
"repl")
flags.DEFINE_enum("inp_method", None, ['cam', 'cam++', 'ig'], "inp_method")
flags.DEFINE_string("network", None, "network")
flags.DEFINE_string("output_dir", None, "output_dir")
flags.DEFINE_list("penalties", None, "penalties")
flags.DEFINE_string("task", None, "task")
# ["isa_cam", "tradeoff_cam", "tradeoff_ig", "tradeoff_repr", "isa_ig", "attack_and_display"]
def main(_):
dataset = FLAGS.dataset
num_class = FLAGS.num_class
norm = FLAGS.norm
inp_method = FLAGS.inp_method
rep_l = FLAGS.rep_l
network_type = FLAGS.network_type
network = FLAGS.network
penalties = [float(i) for i in FLAGS.penalties]
num_image = FLAGS.num_image
kappa = 0.1
task = FLAGS.task
tf.logging.info(f"----------task: {task}")
tf.logging.info(f"----------dataset: {dataset}")
tf.logging.info(f"----------inp_method: {inp_method}")
tf.logging.info(f"----------num_class: {num_class}")
tf.logging.info(f"----------norm: {norm}")
output_file = os.path.join(FLAGS.output_dir, f"{norm}_{num_class}_{task}_{dataset}_{inp_method}.tsv")
with tf.Session() as sess:
result = getattr(funcs, task)(network, sess, network_type, penalties, dataset, num_class, norm, inp_method,
rep_l, kappa, num_image, output_file)
print(result)
pass
def setup_env():
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '0'
tf.logging.set_verbosity(tf.logging.INFO)
loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict]
for logger in loggers:
logger.propagate = False
if __name__ == "__main__":
flags.mark_flag_as_required("dataset")
flags.mark_flag_as_required("num_class")
flags.mark_flag_as_required("num_image")
flags.mark_flag_as_required("norm")
flags.mark_flag_as_required("inp_method")
flags.mark_flag_as_required("rep_l")
flags.mark_flag_as_required("network_type")
flags.mark_flag_as_required("network")
flags.mark_flag_as_required("penalties")
flags.mark_flag_as_required("task")
flags.mark_flag_as_required("output_dir")
tf.gfile.MakeDirs(FLAGS.output_dir)
setup_env()
tf.app.run()
pass
|
import pyautogui, pygetwindow, time, random
from twilio.rest import Client
time.sleep(0)
screen = pygetwindow.getWindowsWithTitle('Old School RuneScape')[0]
center = [296, 220]
def main():
message = True
client = Client("AC57db9e7ab173796dd3376ae134fb4d5e","f870aceb106c4505d5c7de3d58d893dc")
time.sleep(1)
move('up',2)
time.sleep(0.4)
can = True
while can:
item = pyautogui.locateCenterOnScreen("dish.png")
if(not item):
can = False
break
pyautogui.click(item)
item = pyautogui.locateCenterOnScreen("dough.png")
if(not item):
can = False
break
pyautogui.click(item)
item = pyautogui.locateCenterOnScreen("close.png")
pyautogui.click(item)
time.sleep(0.5)
item = pyautogui.locateCenterOnScreen("dough.png")
pyautogui.click(item)
item = pyautogui.locateCenterOnScreen("dish.png")
pyautogui.click(item)
time.sleep(0.7)
pyautogui.press('space')
time.sleep(16.5)
move('up',2)
time.sleep(0.3)
item = pyautogui.locateCenterOnScreen("empty.png")
pyautogui.click(item)
if message:
client.messages.create(to="+12242219324",
from_="+16362491215",
body="Bot has finished!")
def reset():
moveMouse([594, 53])
pyautogui.click()
def move(direction,spaces):
vel = [0,0]
if direction == "up": vel[1] = -25;
if direction == "down": vel[1] = 25;
if direction == "left": vel[0] = -25;
if direction == "right": vel[0] = 25;
newRatio = (center[0] + (vel[0] * spaces),center[1] + (vel[1] * spaces))
#pyautogui.click(screen.left + (screen.width/newRatio[0]),screen.top+(screen.height/newRatio[1]))
pyautogui.moveTo(screen.left + newRatio[0],screen.top+newRatio[1])
pyautogui.mouseDown()
time.sleep(random.randint(4,9)/10)
pyautogui.mouseUp()
main()
|
import unittest
class CircularBuffer:
def __init__(self):
self.list = [0]
self.position = 0
def insert(self, value):
self.position += 1
self.list.insert(self.position, value)
def as_list(self):
return self.list
def step(self, steps):
self.position = self._offset_position(steps)
def value_at(self, offset):
return self.list[self._offset_position(offset)]
def _offset_position(self, offset):
return (self.position + offset) % self._buffer_length()
def _buffer_length(self):
return len(self.list)
class TestCircularBuffer(unittest.TestCase):
def setUp(self):
self.buffer = CircularBuffer()
def test_initial_configuration(self):
self.assertListEqual([0], self.buffer.as_list())
def test_inserts_value_after_position(self):
self.buffer.insert(3)
self.assertListEqual([0, 3], self.buffer.as_list())
self.buffer.insert(-9)
self.assertListEqual([0, 3, -9], self.buffer.as_list())
def test_initial_position(self):
self.assertEqual(0, self.buffer.position)
self.buffer.insert(3)
self.assertEqual(1, self.buffer.position)
self.buffer.insert(-9)
self.assertEqual(2, self.buffer.position)
def test_step_forward_after_insert(self):
self.buffer.insert(3)
self.assertEqual(1, self.buffer.position)
self.buffer.insert(-1)
self.assertEqual(2, self.buffer.position)
self.buffer.insert(12)
self.assertEqual(3, self.buffer.position)
def test_step_forward(self):
self.buffer.step(1)
self.assertEqual(0, self.buffer.position)
self.buffer.step(2)
self.assertEqual(0, self.buffer.position)
self.buffer.insert(3)
self.buffer.insert(-1)
self.buffer.insert(12)
self.assertEqual(3, self.buffer.position)
self.buffer.step(1)
self.assertEqual(0, self.buffer.position)
self.buffer.step(2)
self.assertEqual(2, self.buffer.position)
self.buffer.step(3)
self.assertEqual(1, self.buffer.position)
def test_value_at_position(self):
self.buffer.insert(3)
self.buffer.insert(-1)
self.buffer.insert(12)
self.assertEqual(12, self.buffer.value_at(0))
self.assertEqual(0, self.buffer.value_at(1))
self.assertEqual(3, self.buffer.value_at(2))
self.assertEqual(-1, self.buffer.value_at(3))
class SpinStorm:
def __init__(self, buffer=None):
self.buffer = buffer or CircularBuffer()
self.next_value = 1
def walk(self, repetitions, steps):
for _ in range(repetitions):
self.step(steps)
def step(self, steps):
self.buffer.step(steps)
self.buffer.insert(self.next_value)
self.next_value += 1
class TestSpinStorm(unittest.TestCase):
def setUp(self):
self.buffer = CircularBuffer()
self.storm = SpinStorm(self.buffer)
def test_initial_state(self):
self.assertListEqual([0], self.buffer.as_list())
self.assertEqual(self.buffer, self.storm.buffer)
def test_single_step_example(self):
self.storm.step(3)
self.assertListEqual([0, 1], self.buffer.as_list())
self.assertEqual(1, self.buffer.position)
self.storm.step(3)
self.assertListEqual([0, 2, 1], self.buffer.as_list())
self.assertEqual(1, self.buffer.position)
self.storm.step(3)
self.storm.step(3)
self.storm.step(3)
self.storm.step(3)
self.assertListEqual([0, 5, 2, 4, 3, 6, 1], self.buffer.as_list())
self.assertEqual(5, self.buffer.position)
self.storm.step(3)
self.storm.step(3)
self.storm.step(3)
self.assertListEqual([0, 9, 5, 7, 2, 4, 3, 8, 6, 1], self.buffer.as_list())
self.assertEqual(1, self.buffer.position)
def test_many_steps_example(self):
self.storm.walk(9, 3)
self.assertListEqual([0, 9, 5, 7, 2, 4, 3, 8, 6, 1], self.buffer.as_list())
self.assertEqual(1, self.buffer.position)
self.storm.walk(2017 - 9, 3)
self.assertEqual(2017, self.buffer.value_at(0))
self.assertEqual(638, self.buffer.value_at(1))
def test_many_steps_mine(self):
self.storm.walk(2017, 304)
self.assertEqual(2017, self.buffer.value_at(0))
self.assertEqual(1173, self.buffer.value_at(1))
@unittest.skip("Wayyyy too long")
def test_angry_steps_mine(self):
iterations = 50000000
self.storm.walk(iterations, 304)
self.assertEqual(iterations, self.buffer.value_at(0))
self.assertEqual(1173, self.buffer.as_list()[1])
class CheatBuffer(CircularBuffer):
def __init__(self):
self.position = 0
self.length = 1
self.value_at_position_one = None
def insert(self, value):
self.position += 1
self.length += 1
if self.position == 1:
self.value_at_position_one = value
def value_at_one(self):
return self.value_at_position_one
def _buffer_length(self):
return self.length
class TestCheatBufferStorm(unittest.TestCase):
def setUp(self):
self.buffer = CheatBuffer()
self.storm = SpinStorm(self.buffer)
def test_initial_value(self):
self.assertEqual(None, self.buffer.value_at_one())
def test_value_after_steps_example(self):
self.storm.step(3)
self.assertEqual(1, self.buffer.value_at_one())
self.storm.step(3)
self.assertEqual(2, self.buffer.value_at_one())
self.storm.step(3)
self.storm.step(3)
self.assertEqual(2, self.buffer.value_at_one())
self.storm.step(3)
self.assertEqual(5, self.buffer.value_at_one())
self.storm.step(3)
self.storm.step(3)
self.storm.step(3)
self.assertEqual(5, self.buffer.value_at_one())
self.storm.step(3)
self.assertEqual(9, self.buffer.value_at_one())
@unittest.skip("Still too long but reasonable")
def test_angry_steps_mine(self):
iterations = 50000000
self.storm.walk(iterations, 304)
self.assertEqual(1930815, self.buffer.value_at_one())
|
from allink_apps.blog.sitemaps.sitemaps import BlogSitemap # noqa
|
# -*- python -*-
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis <michael.aivazis@para-sim.com>
#
# (c) 2013-2018 parasim inc
# (c) 2010-2018 california institute of technology
# all rights reserved
#
# get the package
import altar
# the protocol
class Distribution(altar.protocol, family="altar.distributions"):
"""
The protocol that all AlTar probability distributions must satisfy
"""
# requirements
# user configurable state
parameters = altar.properties.int()
parameters.doc = "the number of model parameters that i take care of"
offset = altar.properties.int(default=0)
offset.doc = "the starting point of my parameters in the overall model state"
# configuration
@altar.provides
def initialize(self, **kwds):
"""
Initialize with the given random number generator
"""
# model support
@altar.provides
def initializeSample(self, theta):
"""
Fill my portion of {theta} with initial random values from my distribution.
"""
@altar.provides
def priorLikelihood(self, theta, prior):
"""
Fill my portion of {prior} with the likelihoods of the samples in {theta}
"""
@altar.provides
def verify(self, theta, mask):
"""
Check whether my portion of the samples in {theta} are consistent with my constraints, and
update {mask}, a vector with zeroes for valid samples and non-zero for invalid ones
"""
# wrappers over the interface of the underlying support
@altar.provides
def sample(self):
"""
Sample the distribution using a random number generator
"""
@altar.provides
def density(self, x):
"""
Compute the probability density of the distribution at {x}
"""
@altar.provides
def vector(self, vector):
"""
Fill {vector} with random values
"""
@altar.provides
def matrix(self, matrix):
"""
Fill {matrix} with random values
"""
# framework hooks
@classmethod
def pyre_default(cls):
"""
Supply a default implementation
"""
# use the uniform distribution
from .Uniform import Uniform as default
# and return it
return default
# end of file
|
import math
from functools import reduce
from internal import utility as utility
from internal.terrain.cloud import Cloud
class Weather:
def __init__(self, cells, parent):
self.cells = cells
self.water_cells = []
self.clouds = []
self.cells_x = utility.S_WIDTH // utility.CELL_SIZE
self.cells_y = utility.S_HEIGHT // utility.CELL_SIZE
self.setup(parent)
def setup(self, parent):
self.wind_vectors = self.calculate_wind_vectors()
self.setup_clouds()
self.water_cells = self.get_water_cells()
def get_water_cells(self):
res = []
for row in self.cells:
for cell in row:
if cell.terrain.is_water():
res.append(cell)
return res
def setup_clouds(self):
for x, row in enumerate(self.cells):
self.clouds.append([])
for y, cell in enumerate(row):
self.clouds[-1].append([])
def handle_evaporation(self):
for i, cell in enumerate(self.water_cells):
# utility.show_bar(i, self.water_cells, message='Handling evaporation: ')
amount = cell.get_evaporation() * 4
if amount <= 0:
continue
if len(self.clouds[cell.x][cell.y]) > 0:
self.clouds[cell.x][cell.y][0].water += amount
else:
self.clouds[cell.x][cell.y].append(Cloud(cell.x, cell.y, amount))
def handle_clouds(self):
for x, row in enumerate(self.clouds):
for y, cell in enumerate(row):
for cloud in cell:
cloud.processed = False
cloud.age += 1
if not self.cells[x][y].terrain.is_water():
self.cells[x][y].terrain.moisture += cloud.precipitate()
if cloud.water <= 0:
cell.remove(cloud)
def calculate_wind_vectors(self):
wind_vectors = []
for x, row in enumerate(self.cells):
wind_vectors.append([])
for y, cell in enumerate(row):
dx, dy = 0.0, 0.0
for neighbor in cell.neighbors():
cdx, cdy = cell.x - neighbor.x, cell.y - neighbor.y
cdx = cdx * (cell.get_temperature() - neighbor.get_temperature()) / cell.get_temperature()
cdy = cdy * (cell.get_temperature() - neighbor.get_temperature()) / cell.get_temperature()
dx += cdx
dy += cdy
mag = math.sqrt(dx ** 2 + dy ** 2)
dx, dy = dx / mag * 5, dy / mag * 5
dx += 1.5 # Wind goes west to east
wind_vectors[-1].append((dx, dy))
return wind_vectors
def handle_wind(self):
for x, row in enumerate(self.clouds):
for y, cell in enumerate(row):
for cloud in cell:
if not cloud.processed:
cell.remove(cloud)
cloud.processed = True
dx, dy = self.wind_vectors[x][y]
cloud.x += dx
cloud.y += dy
if cloud.x >= self.cells_x:
cloud.x = 0
elif cloud.x < 0:
cloud.x = self.cells_x - 1
if cloud.y >= self.cells_y:
cloud.y = 0
elif cloud.y < 0:
cloud.y = self.cells_y - 1
self.clouds[int(cloud.x)][int(cloud.y)].append(cloud)
def step(self):
self.handle_wind()
self.handle_clouds()
self.handle_evaporation()
def normalize_moistures(self):
print('Normalizing moistures.')
moistures = reduce(lambda a, b: a + b,
map(lambda row: list(map(lambda cell: cell.terrain.moisture, row)), self.cells))
max_amount = max(moistures)
for row in self.cells:
for cell in row:
cell.terrain.moisture /= max_amount
def run(self, steps):
for step in range(steps + 1):
utility.show_bar(step, steps + 1, message='Generating rainfall patterns: ', number_limit=True)
self.step()
data = reduce(lambda a, b: a + b, map(lambda row: list(map(lambda cell: cell.terrain.moisture, row)), self.cells))
max_amount = max(data)
if max_amount != 0:
data = list(map(lambda i: i / max_amount, data))
|
import re
from .parser import _next_significant, _to_token_iterator
def parse_nth(input):
"""Parse `<An+B> <https://drafts.csswg.org/css-syntax-3/#anb>`_,
as found in `:nth-child()
<https://drafts.csswg.org/selectors/#nth-child-pseudo>`_
and related Selector pseudo-classes.
Although tinycss2 does not include a full Selector parser,
this bit of syntax is included as it is particularly tricky to define
on top of a CSS tokenizer.
:type input: :obj:`str` or :term:`iterable`
:param input: A string or an iterable of :term:`component values`.
:returns:
A ``(a, b)`` tuple of integers, or :obj:`None` if the input is invalid.
"""
tokens = _to_token_iterator(input, skip_comments=True)
token = _next_significant(tokens)
if token is None:
return
token_type = token.type
if token_type == 'number' and token.is_integer:
return parse_end(tokens, 0, token.int_value)
elif token_type == 'dimension' and token.is_integer:
unit = token.lower_unit
if unit == 'n':
return parse_b(tokens, token.int_value)
elif unit == 'n-':
return parse_signless_b(tokens, token.int_value, -1)
else:
match = N_DASH_DIGITS_RE.match(unit)
if match:
return parse_end(tokens, token.int_value, int(match.group(1)))
elif token_type == 'ident':
ident = token.lower_value
if ident == 'even':
return parse_end(tokens, 2, 0)
elif ident == 'odd':
return parse_end(tokens, 2, 1)
elif ident == 'n':
return parse_b(tokens, 1)
elif ident == '-n':
return parse_b(tokens, -1)
elif ident == 'n-':
return parse_signless_b(tokens, 1, -1)
elif ident == '-n-':
return parse_signless_b(tokens, -1, -1)
elif ident[0] == '-':
match = N_DASH_DIGITS_RE.match(ident[1:])
if match:
return parse_end(tokens, -1, int(match.group(1)))
else:
match = N_DASH_DIGITS_RE.match(ident)
if match:
return parse_end(tokens, 1, int(match.group(1)))
elif token == '+':
token = next(tokens) # Whitespace after an initial '+' is invalid.
if token.type == 'ident':
ident = token.lower_value
if ident == 'n':
return parse_b(tokens, 1)
elif ident == 'n-':
return parse_signless_b(tokens, 1, -1)
else:
match = N_DASH_DIGITS_RE.match(ident)
if match:
return parse_end(tokens, 1, int(match.group(1)))
def parse_b(tokens, a):
token = _next_significant(tokens)
if token is None:
return (a, 0)
elif token == '+':
return parse_signless_b(tokens, a, 1)
elif token == '-':
return parse_signless_b(tokens, a, -1)
elif (token.type == 'number' and token.is_integer and
token.representation[0] in '-+'):
return parse_end(tokens, a, token.int_value)
def parse_signless_b(tokens, a, b_sign):
token = _next_significant(tokens)
if (token.type == 'number' and token.is_integer and
not token.representation[0] in '-+'):
return parse_end(tokens, a, b_sign * token.int_value)
def parse_end(tokens, a, b):
if _next_significant(tokens) is None:
return (a, b)
N_DASH_DIGITS_RE = re.compile('^n(-[0-9]+)$')
|
from BeautifulSoup import BeautifulSoup, Comment
def sanitize_html(value):
valid_tags = 'p i strong b u a h1 h2 h3 pre br img'.split()
valid_attrs = 'href src'.split()
soup = BeautifulSoup(value)
for comment in soup.findAll(
text=lambda text: isinstance(text, Comment)):
comment.extract()
for tag in soup.findAll(True):
if tag.name not in valid_tags:
tag.hidden = True
tag.attrs = [(attr, val) for attr, val in tag.attrs
if attr in valid_attrs]
return soup.renderContents().decode('utf8').replace('javascript:', '')
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
from django.contrib.gis.db.backends.postgis.base import DatabaseWrapper as _DatabaseWrapper
from django.db.backends.creation import NO_DB_ALIAS
from .operations import SubQueryPostGISOperations
class DatabaseWrapper(_DatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
if kwargs.get('alias', '') != NO_DB_ALIAS:
self.ops = SubQueryPostGISOperations(self)
|
# pylint: disable=unused-argument
from datetime import datetime
from json import loads as load
from logging import getLogger
from typing import Any, Callable, Coroutine, Generator, List, Optional, Tuple
from fastapi import APIRouter, Body, Depends, Request, Security, status
from fastapi.security import HTTPBasic, SecurityScopes
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import Session
import fideslib.oauth.api.endpoints as endpoints
import fideslib.oauth.jwt as jwt
from fideslib.oauth.api.exceptions import (
AuthenticationException,
AuthorizationException,
ClientWriteFailedException,
ClientNotFoundException,
ExpiredTokenException,
)
from fideslib.oauth.api.models import (
AccessToken,
ClientCreatedResponse,
OAuth2ClientCredentialsRequestForm,
)
from fideslib.oauth.api.utils import (
extract_payload,
is_token_expired,
oauth2_scheme,
validate_scopes,
)
from fideslib.oauth.database.client_detail_model import ClientDetail
from fideslib.oauth.scopes import (
CLIENT_CREATE,
CLIENT_DELETE,
CLIENT_READ,
CLIENT_UPDATE,
SCOPE_READ,
SCOPES,
)
EIGHT_DAYS = 60 * 24 * 8 # Expressed in minutes
logger = getLogger(__name__)
class OAuthRouter(APIRouter):
"""
A FastAPI `APIRouter` that includes all endpoints necessary to
implement a complete OAuth scheme.
"""
def __init__(
self,
app_encryption_key: str,
db: Callable[[], Generator[Session, None, None]],
oauth_root_client_id: str,
oauth_root_client_secret_hash: Tuple[str, bytes],
*,
encoding: str = "utf-8",
oauth_access_token_expire_min: int = EIGHT_DAYS,
oauth_client_id_bytelength: int = 16,
oauth_client_secret_bytelength: int = 16,
prefix: str = endpoints.OAUTH_PREFIX,
tags: Optional[List[str]] = None,
) -> None:
if tags is None:
tags = ["OAuth"]
self.access_token_expire_min = oauth_access_token_expire_min
self.client_id_bytelength = oauth_client_id_bytelength
self.client_secret_bytelength = oauth_client_secret_bytelength
self.db_func = db
self.encoding = encoding
self.encryption_key = app_encryption_key
self.root_client_id = oauth_root_client_id
self.root_client_secret_hash = oauth_root_client_secret_hash
super().__init__(prefix=prefix, tags=tags)
self.add_api_route(
endpoints.TOKEN,
self._acquire_access_token(),
methods=["POST"],
response_model=AccessToken,
summary="Retrieve an access token",
)
self.add_api_route(
endpoints.CLIENT,
self._create_client(),
dependencies=[
Security(self._verify_oauth_client(), scopes=[CLIENT_CREATE]),
],
methods=["POST"],
response_model=ClientCreatedResponse,
status_code=status.HTTP_201_CREATED,
summary="Create a new client",
)
self.add_api_route(
endpoints.CLIENT_BY_ID,
self._delete_client(),
dependencies=[
Security(self._verify_oauth_client(), scopes=[CLIENT_DELETE]),
],
methods=["DELETE"],
response_model=None, # Explicitly defined, to prevent overwriting
status_code=status.HTTP_204_NO_CONTENT,
summary="Delete a client",
)
self.add_api_route(
endpoints.CLIENT_SCOPE,
self._get_client_scopes(),
dependencies=[Security(self._verify_oauth_client(), scopes=[CLIENT_READ])],
methods=["GET"],
response_model=List[str],
summary="Retrieve the current scopes for a client",
)
self.add_api_route(
endpoints.CLIENT_SCOPE,
self._set_client_scopes(),
dependencies=[
Security(self._verify_oauth_client(), scopes=[CLIENT_UPDATE]),
],
methods=["PUT"],
summary="Overwrite the scopes for an existing client",
)
self.add_api_route(
endpoints.SCOPE,
self.read_scopes,
dependencies=[Security(self._verify_oauth_client(), scopes=[SCOPE_READ])],
methods=["GET"],
response_model=List[str],
summary="Retrieve all available scopes",
)
def _acquire_access_token(
self,
*args: Any,
**kwargs: Any,
) -> Callable[..., Coroutine[Any, Any, AccessToken]]:
"""
Given a set of credentials, returns an access token.
"""
async def acquire_access_token(
request: Request,
*,
db: Session = Depends(self.db_func),
form_data: OAuth2ClientCredentialsRequestForm = Depends(),
) -> AccessToken:
basic_credentials = await HTTPBasic(auto_error=False)(request)
if form_data.client_id and form_data.client_secret:
client_id = form_data.client_id
client_secret = form_data.client_secret
elif basic_credentials:
client_id = basic_credentials.username
client_secret = basic_credentials.password
else:
raise AuthenticationException()
client_detail = ClientDetail.get(
db,
client_id=client_id,
encoding=self.encoding,
root_client_id=self.root_client_id,
root_client_secret_hash=self.root_client_secret_hash,
)
if client_detail is None:
raise AuthenticationException()
if not client_detail.credentials_valid(client_secret, self.encoding):
raise AuthenticationException()
logger.info("Creating access token for client with ID '%s'", client_id)
return AccessToken(
access_token=client_detail.create_access_code_jwe(
self.encryption_key,
self.encoding,
)
)
return acquire_access_token
def _create_client(
self,
*args: Any,
**kwargs: Any,
) -> Callable[..., ClientCreatedResponse]:
"""
Creates a new client and returns the credentials.
"""
def create_client(
*,
db: Session = Depends(self.db_func),
scopes: List[str] = Body([]),
) -> ClientCreatedResponse:
validate_scopes(scopes)
try:
client, secret = ClientDetail.create_client_and_secret(
self.client_id_bytelength,
self.client_secret_bytelength,
db,
self.encoding,
scopes,
)
logger.info("Created new client with ID '%s'", client.id)
except SQLAlchemyError as e:
logger.error("Failed to create client", exc_info=True, stack_info=True)
raise ClientWriteFailedException() from e
return ClientCreatedResponse(client_id=client.id, client_secret=secret)
return create_client
def _delete_client(
self,
*args: Any,
**kwargs: Any,
) -> Callable[..., None]:
"""
Deletes the client associated with the client_id.
"""
def delete_client(
*,
client_id: str,
db: Session = Depends(self.db_func),
) -> None:
client = ClientDetail.get(
db,
client_id=client_id,
encoding=self.encoding,
root_client_id=self.root_client_id,
root_client_secret_hash=self.root_client_secret_hash,
)
if client is not None:
logger.info("Deleting client with ID '%s'", client_id)
client.delete(db)
return delete_client
def _get_client_scopes(
self,
*args: Any,
**kwargs: Any,
) -> Callable[..., List[str]]:
"""
Returns the list of scopes associated with a client.
"""
def get_client_scopes(
*,
client_id: str,
db: Session = Depends(self.db_func),
) -> List[str]:
logger.info(
"Fetching current permissions for client with ID '%s'", client_id
)
client = ClientDetail.get(
db,
client_id=client_id,
encoding=self.encoding,
root_client_id=self.root_client_id,
root_client_secret_hash=self.root_client_secret_hash,
)
return client.scopes if client is not None else []
return get_client_scopes
@staticmethod
def read_scopes(*args: Any, **kwargs: Any) -> List[str]:
"""
Returns a list of all scopes available for assignment.
"""
SCOPES.sort()
return SCOPES
def _set_client_scopes(
self,
*args: Any,
**kwargs: Any,
) -> Callable[..., None]:
"""
Overwrites the client's scopes with those provided.
"""
def set_client_scopes(
*,
client_id: str,
scopes: List[str],
db: Session = Depends(self.db_func),
) -> None:
client = ClientDetail.get(
db,
client_id=client_id,
encoding=self.encoding,
root_client_id=self.root_client_id,
root_client_secret_hash=self.root_client_secret_hash,
)
if not client:
raise ClientNotFoundException(client_id)
validate_scopes(scopes)
try:
client.update(db, data={"scopes": scopes})
scopes.sort()
logger.info(
"Updated permissions for client with ID '%s' to: [%s]",
client_id,
", ".join(scopes),
)
except SQLAlchemyError as e:
logger.error(
"Failed to update client permissions",
exc_info=True,
stack_info=True,
)
raise ClientWriteFailedException() from e
return set_client_scopes
def _verify_oauth_client(
self,
*args: Any,
**kwargs: Any,
) -> Callable[..., Coroutine[Any, Any, ClientDetail]]:
"""
Verifies that the access token provided in the authorization header
contains the necessary scopes specified by the caller.
"""
async def verify_oauth_client(
*,
authorization: str = Security(oauth2_scheme),
db: Session = Depends(self.db_func),
security_scopes: SecurityScopes,
) -> ClientDetail:
if authorization is None:
raise AuthenticationException()
token_data = load(extract_payload(authorization, self.encryption_key))
issued_at = token_data.get(jwt.JWE_ISSUED_AT, None)
if not issued_at:
raise AuthorizationException()
if is_token_expired(
datetime.fromisoformat(issued_at),
self.access_token_expire_min,
):
raise ExpiredTokenException()
assigned_scopes = token_data[jwt.JWE_PAYLOAD_SCOPES]
if not set(security_scopes.scopes).issubset(assigned_scopes):
raise AuthorizationException()
client_id = token_data.get(jwt.JWE_PAYLOAD_CLIENT_ID)
if not client_id:
raise AuthorizationException()
client = ClientDetail.get(
db,
client_id=client_id,
encoding=self.encoding,
root_client_id=self.root_client_id,
root_client_secret_hash=self.root_client_secret_hash,
)
if not client:
raise AuthorizationException()
if not set(assigned_scopes).issubset(set(client.scopes)):
# If the scopes on the token are not a subset of the scopes available
# to the associated oauth client, this token is not valid
raise AuthorizationException()
return client
return verify_oauth_client
|
from src.data_preparation import prepare_data_gb
from src.data_preparation import prepare_data_lr
from src.data_preparation import prepare_test_data_lr
from src.data_processing import process_data_gb
from src.data_processing import process_data_lr
from src.data_processing import test_lr
from src.data_visualiazation import visualize
import os.path
def main():
# visualize(data)
# X, y = prepare_data_gb('radiant_win')
# process_data_gb(X, y)
if not os.path.isfile('./models/lr.joblib'):
X, y = prepare_data_lr('radiant_win')
process_data_lr(X, y)
X, data_x = prepare_test_data_lr()
test_lr(X, data_x)
main()
|
"""This file simulates power spectra in which two parameters are varied together."""
import numpy as np
from fooof import FOOOFGroup
from fooof.sim import *
import sys
sys.path.append('../bratios')
from settings import *
from paths import DATA_PATHS as dp
from sims import *
###################################################################################################
###################################################################################################
def main():
# Aperiodic Periodic
gen_interacting_aper_per("EXP", "lowCF", dp.make_file_path(dp.sims_interacting, 'EXP_lowCF'))
gen_interacting_aper_per("EXP", "highCF", dp.make_file_path(dp.sims_interacting, 'EXP_highCF'))
gen_interacting_aper_per("EXP", "lowPW", dp.make_file_path(dp.sims_interacting, 'EXP_lowPW'))
gen_interacting_aper_per("EXP", "highPW", dp.make_file_path(dp.sims_interacting, 'EXP_highPW'))
gen_interacting_aper_per("EXP", "lowBW", dp.make_file_path(dp.sims_interacting, 'EXP_lowBW'))
gen_interacting_aper_per("EXP", "highBW", dp.make_file_path(dp.sims_interacting, 'EXP_highBW'))
# Periodic Periodic
gen_interacting_per_per("lowCF", "highCF", dp.make_file_path(dp.sims_interacting, 'lowCF_highCF'))
gen_interacting_per_per("lowCF", "lowPW", dp.make_file_path(dp.sims_interacting, 'lowCF_lowPW'))
gen_interacting_per_per("lowCF", "highPW", dp.make_file_path(dp.sims_interacting, 'lowCF_highPW'))
gen_interacting_per_per("lowCF", "lowBW", dp.make_file_path(dp.sims_interacting, 'lowCF_lowBW'))
gen_interacting_per_per("lowCF", "highBW", dp.make_file_path(dp.sims_interacting, 'lowCF_highBW'))
gen_interacting_per_per("highCF", "lowPW", dp.make_file_path(dp.sims_interacting, 'highCF_lowPW'))
gen_interacting_per_per("highCF", "highPW", dp.make_file_path(dp.sims_interacting, 'highCF_highPW'))
gen_interacting_per_per("highCF", "lowBW", dp.make_file_path(dp.sims_interacting, 'highCF_lowBW'))
gen_interacting_per_per("highCF", "highBW", dp.make_file_path(dp.sims_interacting, 'highCF_highBW'))
gen_interacting_per_per("lowPW", "highPW", dp.make_file_path(dp.sims_interacting, 'lowPW_highPW'))
gen_interacting_per_per("lowPW", "lowBW", dp.make_file_path(dp.sims_interacting, 'lowPW_lowBW'))
gen_interacting_per_per("lowPW", "highBW", dp.make_file_path(dp.sims_interacting, 'lowPW_highBW'))
gen_interacting_per_per("highPW", "lowBW", dp.make_file_path(dp.sims_interacting, 'highPW_lowBW'))
gen_interacting_per_per("highPW", "highBW", dp.make_file_path(dp.sims_interacting, 'highPW_highBW'))
gen_interacting_per_per("lowBW", "highBW", dp.make_file_path(dp.sims_interacting, 'lowBW_highBW'))
if __name__ == "__main__":
main()
|
# Copyright 2020 Graphcore Ltd.
import unittest
import os
import sys
import subprocess
import pytest
from tempfile import TemporaryDirectory
def run_resnet(**kwargs):
with TemporaryDirectory() as tempdir:
kwargs['--log-dir'] = tempdir
cmd = ["python" + str(sys.version_info[0]),
'resnet_synthetic_benchmark.py']
# Flatten kwargs and convert to strings
args = [str(item) for sublist in kwargs.items() for item in sublist if item != '']
cmd.extend(args)
out = subprocess.check_output(cmd, cwd=os.path.dirname(__file__)).decode("utf-8")
print(out)
return out
class TestPopARTResNetSyntheticBenchmarks(unittest.TestCase):
"""Tests for ResNet popART synthetic benchmarks"""
# Resnet inference
@pytest.mark.ipus(1)
def test_resnet_20_inference_batch_size_32(self):
out = run_resnet(**{'--size': "20",
'--batch-size': 32,
'--norm-type': 'BATCH',
'--shortcut-type': 'B',
'--use-data': ''})
@pytest.mark.ipus(1)
def test_resnet_18_inference_batch_size_1(self):
out = run_resnet(**{'--size': "18",
'--batch-size': 1,
'--norm-type': 'GROUP'})
@pytest.mark.ipus(1)
def test_resnet_18_inference_batch_size_16(self):
out = run_resnet(**{'--size': "18",
'--batch-size': 16,
'--norm-type': 'BATCH'})
@pytest.mark.ipus(1)
def test_resnet_50_inference_batch_size_8(self):
out = run_resnet(**{'--size': "50",
'--batch-size': 8,
'--norm-type': 'NONE'})
@pytest.mark.ipus(4)
def test_resnet_50_inference_batch_size_8_pipelined_4ipus(self):
out = run_resnet(**{'--size': "50",
'--batch-size': 8,
'--norm-type': 'NONE',
'--shards': 4,
'--pipeline': ''})
# ResNet training
@pytest.mark.ipus(1)
def test_resnet_18_train_batch_size_4(self):
out = run_resnet(**{'--size': "18",
'--batch-size': 4,
'--norm-type': 'GROUP',
'--mode': 'train'})
@pytest.mark.ipus(2)
def test_resnet_18_train_batch_size_4_pipelined_2ipus(self):
out = run_resnet(**{'--size': "18",
'--batch-size': 4,
'--norm-type': 'GROUP',
'--mode': 'train',
'--shards': 2,
'--pipeline': ''})
@pytest.mark.ipus(1)
def test_resnet_50_train_batch_size_1(self):
out = run_resnet(**{'--size': "50",
'--batch-size': 1,
'--norm-type': 'GROUP',
'--mode': 'train'})
@pytest.mark.ipus(2)
def test_resnet_50_train_sharded(self):
out = run_resnet(**{'--size': "50",
'--batch-size': 2,
'--norm-type': 'GROUP',
'--mode': 'train',
'--shards': 2})
@pytest.mark.ipus(1)
def test_resnet_20_train_batch_size_32(self):
out = run_resnet(**{'--size': "20",
'--batch-size': 16,
'--norm-type': 'BATCH',
'--mode': 'train'})
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from io import StringIO
import socket
import mock
import six
import testtools
from tempest.lib.common import ssh
from tempest.lib import exceptions
from tempest.tests import base
import tempest.tests.utils as utils
class TestSshClient(base.TestCase):
SELECT_POLLIN = 1
@mock.patch('paramiko.RSAKey.from_private_key')
@mock.patch('six.StringIO')
def test_pkey_calls_paramiko_RSAKey(self, cs_mock, rsa_mock):
cs_mock.return_value = mock.sentinel.csio
pkey = 'mykey'
ssh.Client('localhost', 'root', pkey=pkey)
rsa_mock.assert_called_once_with(mock.sentinel.csio)
cs_mock.assert_called_once_with('mykey')
rsa_mock.reset_mock()
cs_mock.reset_mock()
pkey = mock.sentinel.pkey
# Shouldn't call out to load a file from RSAKey, since
# a sentinel isn't a basestring...
ssh.Client('localhost', 'root', pkey=pkey)
self.assertEqual(0, rsa_mock.call_count)
self.assertEqual(0, cs_mock.call_count)
def _set_ssh_connection_mocks(self):
client_mock = mock.MagicMock()
client_mock.connect.return_value = True
return (self.patch('paramiko.SSHClient'),
self.patch('paramiko.AutoAddPolicy'),
client_mock)
def test_get_ssh_connection(self):
c_mock, aa_mock, client_mock = self._set_ssh_connection_mocks()
s_mock = self.patch('time.sleep')
c_mock.return_value = client_mock
aa_mock.return_value = mock.sentinel.aa
# Test normal case for successful connection on first try
client = ssh.Client('localhost', 'root', timeout=2)
client._get_ssh_connection(sleep=1)
aa_mock.assert_called_once_with()
client_mock.set_missing_host_key_policy.assert_called_once_with(
mock.sentinel.aa)
expected_connect = [mock.call(
'localhost',
port=22,
username='root',
pkey=None,
key_filename=None,
look_for_keys=False,
timeout=10.0,
password=None
)]
self.assertEqual(expected_connect, client_mock.connect.mock_calls)
self.assertEqual(0, s_mock.call_count)
@mock.patch('time.sleep')
def test_get_ssh_connection_two_attemps(self, sleep_mock):
c_mock, aa_mock, client_mock = self._set_ssh_connection_mocks()
c_mock.return_value = client_mock
client_mock.connect.side_effect = [
socket.error,
mock.MagicMock()
]
client = ssh.Client('localhost', 'root', timeout=1)
client._get_ssh_connection(sleep=1)
# We slept 2 seconds: because sleep is "1" and backoff is "1" too
sleep_mock.assert_called_once_with(2)
self.assertEqual(2, client_mock.connect.call_count)
def test_get_ssh_connection_timeout(self):
c_mock, aa_mock, client_mock = self._set_ssh_connection_mocks()
timeout = 2
time_mock = self.patch('time.time')
time_mock.side_effect = utils.generate_timeout_series(timeout + 1)
c_mock.return_value = client_mock
client_mock.connect.side_effect = [
socket.error,
socket.error,
socket.error,
]
client = ssh.Client('localhost', 'root', timeout=timeout)
# We need to mock LOG here because LOG.info() calls time.time()
# in order to preprend a timestamp.
with mock.patch.object(ssh, 'LOG'):
self.assertRaises(exceptions.SSHTimeout,
client._get_ssh_connection)
# time.time() should be called twice, first to start the timer
# and then to compute the timedelta
self.assertEqual(2, time_mock.call_count)
@mock.patch('select.POLLIN', SELECT_POLLIN, create=True)
def test_timeout_in_exec_command(self):
chan_mock, poll_mock, _ = self._set_mocks_for_select([0, 0, 0], True)
# Test for a timeout condition immediately raised
client = ssh.Client('localhost', 'root', timeout=2)
with testtools.ExpectedException(exceptions.TimeoutException):
client.exec_command("test")
chan_mock.fileno.assert_called_once_with()
chan_mock.exec_command.assert_called_once_with("test")
chan_mock.shutdown_write.assert_called_once_with()
poll_mock.register.assert_called_once_with(
chan_mock, self.SELECT_POLLIN)
poll_mock.poll.assert_called_once_with(10)
@mock.patch('select.POLLIN', SELECT_POLLIN, create=True)
def test_exec_command(self):
chan_mock, poll_mock, select_mock = (
self._set_mocks_for_select([[1, 0, 0]], True))
chan_mock.recv_exit_status.return_value = 0
chan_mock.recv.return_value = b''
chan_mock.recv_stderr.return_value = b''
client = ssh.Client('localhost', 'root', timeout=2)
client.exec_command("test")
chan_mock.fileno.assert_called_once_with()
chan_mock.exec_command.assert_called_once_with("test")
chan_mock.shutdown_write.assert_called_once_with()
select_mock.assert_called_once_with()
poll_mock.register.assert_called_once_with(
chan_mock, self.SELECT_POLLIN)
poll_mock.poll.assert_called_once_with(10)
chan_mock.recv_ready.assert_called_once_with()
chan_mock.recv.assert_called_once_with(1024)
chan_mock.recv_stderr_ready.assert_called_once_with()
chan_mock.recv_stderr.assert_called_once_with(1024)
chan_mock.recv_exit_status.assert_called_once_with()
def _set_mocks_for_select(self, poll_data, ito_value=False):
gsc_mock = self.patch('tempest.lib.common.ssh.Client.'
'_get_ssh_connection')
ito_mock = self.patch('tempest.lib.common.ssh.Client._is_timed_out')
csp_mock = self.patch(
'tempest.lib.common.ssh.Client._can_system_poll')
csp_mock.return_value = True
select_mock = self.patch('select.poll', create=True)
client_mock = mock.MagicMock()
tran_mock = mock.MagicMock()
chan_mock = mock.MagicMock()
poll_mock = mock.MagicMock()
select_mock.return_value = poll_mock
gsc_mock.return_value = client_mock
ito_mock.return_value = ito_value
client_mock.get_transport.return_value = tran_mock
tran_mock.open_session().__enter__.return_value = chan_mock
if isinstance(poll_data[0], list):
poll_mock.poll.side_effect = poll_data
else:
poll_mock.poll.return_value = poll_data
return chan_mock, poll_mock, select_mock
_utf8_string = six.unichr(1071)
_utf8_bytes = _utf8_string.encode("utf-8")
@mock.patch('select.POLLIN', SELECT_POLLIN, create=True)
def test_exec_good_command_output(self):
chan_mock, poll_mock, _ = self._set_mocks_for_select([1, 0, 0])
closed_prop = mock.PropertyMock(return_value=True)
type(chan_mock).closed = closed_prop
chan_mock.recv_exit_status.return_value = 0
chan_mock.recv.side_effect = [self._utf8_bytes[0:1],
self._utf8_bytes[1:], b'R', b'']
chan_mock.recv_stderr.return_value = b''
client = ssh.Client('localhost', 'root', timeout=2)
out_data = client.exec_command("test")
self.assertEqual(self._utf8_string + 'R', out_data)
@mock.patch('select.POLLIN', SELECT_POLLIN, create=True)
def test_exec_bad_command_output(self):
chan_mock, poll_mock, _ = self._set_mocks_for_select([1, 0, 0])
closed_prop = mock.PropertyMock(return_value=True)
type(chan_mock).closed = closed_prop
chan_mock.recv_exit_status.return_value = 1
chan_mock.recv.return_value = b''
chan_mock.recv_stderr.side_effect = [b'R', self._utf8_bytes[0:1],
self._utf8_bytes[1:], b'']
client = ssh.Client('localhost', 'root', timeout=2)
exc = self.assertRaises(exceptions.SSHExecCommandFailed,
client.exec_command, "test")
self.assertIn('R' + self._utf8_string, six.text_type(exc))
def test_exec_command_no_select(self):
gsc_mock = self.patch('tempest.lib.common.ssh.Client.'
'_get_ssh_connection')
csp_mock = self.patch(
'tempest.lib.common.ssh.Client._can_system_poll')
csp_mock.return_value = False
select_mock = self.patch('select.poll', create=True)
client_mock = mock.MagicMock()
tran_mock = mock.MagicMock()
chan_mock = mock.MagicMock()
# Test for proper reading of STDOUT and STDERROR
gsc_mock.return_value = client_mock
client_mock.get_transport.return_value = tran_mock
tran_mock.open_session().__enter__.return_value = chan_mock
chan_mock.recv_exit_status.return_value = 0
std_out_mock = mock.MagicMock(StringIO)
std_err_mock = mock.MagicMock(StringIO)
chan_mock.makefile.return_value = std_out_mock
chan_mock.makefile_stderr.return_value = std_err_mock
client = ssh.Client('localhost', 'root', timeout=2)
client.exec_command("test")
chan_mock.makefile.assert_called_once_with('rb', 1024)
chan_mock.makefile_stderr.assert_called_once_with('rb', 1024)
std_out_mock.read.assert_called_once_with()
std_err_mock.read.assert_called_once_with()
self.assertFalse(select_mock.called)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2020 Confluent Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import re
from base64 import b64decode
from collections import defaultdict
import pytest
import requests_mock
from requests_mock import create_response
from confluent_kafka.schema_registry.schema_registry_client import \
SchemaRegistryClient
work_dir = os.path.dirname(os.path.realpath(__file__))
@pytest.fixture()
def mock_schema_registry():
return MockSchemaRegistryClient
class MockSchemaRegistryClient(SchemaRegistryClient):
"""
Schema Registry mock.
The MockSchemaRegistry client uses special uri paths to invoke specific
behavior such as coercing an error. They are listed in the table below.
The paths are formed using special keywords referred to as triggers.
Triggers are used to inform the MockSchemaRegistry how to behave when
receiving a request. For instance the `notfound` trigger word when placed
in the subject field of the path will return a http status code of 404 and
the appropriate Schema Registry Error(40401 Schema not found).
Whenever the response includes content from the request body it will return
the same data from the request.
For example the following request will return 123:
DELETE /subjects/notfound/versions/123
or
SchemaRegistryClient.delete_version("delete_version", 123)
All response items which can't be fulfilled with the contents of the request
are populated with constants. Which may be referenced when validating the
response.
- SCHEMA_ID = 47
- VERSION = 3
- VERSIONS = [1, 2, 3, 4]
- SCHEMA = 'basic_schema.avsc'
- SUBJECTS = ['subject1', 'subject2'].
Trigger keywords may also be used in the body of the requests. At this time
the only endpoint which supports this is /config which will return an
`Invalid compatibility level` error.
To coerce Authentication errors configure credentials to
not match MockSchemaRegistryClient.USERINFO.
Request paths to trigger exceptions:
+--------+----------------------------------+-------+------------------------------+
| Method | Request Path | Code | Description |
+========+==================================+=======+==============================+
| GET | /schemas/ids/404 | 40403 | Schema not found |
+--------+----------------------------------+-------+------------------------------+
| GET | /subjects/notfound/versions | 40401 | Subject not found |
+--------+----------------------------------+-------+------------------------------+
| GET | /subjects/notfound/versions/[0-9]| 40401 | Subject not found |
+--------+----------------------------------+-------+------------------------------+
| GET | /subjects/notfound/versions/404 | 40402 | Version not found |
+--------+----------------------------------+-------+------------------------------+
| GET | /subjects/notfound/versions/422 | 42202 | Invalid version |
+--------+----------------------------------+-------+------------------------------+
| DELETE | /subjects/notfound | 40401 | Subject not found |
+--------+----------------------------------+-------+------------------------------+
| POST | /subjects/conflict/versions | 409* | Incompatible Schema |
+--------+----------------------------------+-------+------------------------------+
| POST | /subjects/invalid/versions | 42201 | Invalid Schema |
+--------+----------------------------------+-------+------------------------------+
| POST | /subjects/notfound | 40401 | Subject not found |
+--------+----------------------------------+-------+------------------------------+
| POST | /subjects/schemanotfound | 40403 | Schema not found |
+--------+----------------------------------+-------+------------------------------+
| DELETE | /subjects/notfound | 40401 | Subject not found |
+--------+----------------------------------+-------+------------------------------+
| DELETE | /subjects/notfound/versions/[0-9]| 40401 | Subject not found |
+--------+----------------------------------+-------+------------------------------+
| DELETE | /subjects/notfound/versions/404 | 40402 | Version not found |
+--------+----------------------------------+-------+------------------------------+
| DELETE | /subjects/notfound/versions/422 | 42202 | Invalid version |
+--------+----------------------------------+-------+------------------------------+
| GET | /config/notconfig | 40401 | Subject not found |
+--------+----------------------------------+-------+------------------------------+
| PUT | /config** | 42203 | Invalid compatibility level |
+--------+----------------------------------+-------+------------------------------+
* POST /subjects/{}/versions does not follow the documented API error.
** PUT /config reacts to a trigger in the body: - {"compatibility": "FULL"}
When evaluating special paths with overlapping trigger words the right most
keyword will take precedence.
i.e. Version not found will be returned for the following path.
/subjects/notfound/versions/404
The config endpoint has a special compatibility level "INVALID". This should
be used to verify the handling of in valid compatibility settings.
"""
# request paths
schemas = re.compile("/schemas/ids/([0-9]*)$")
subjects = re.compile("/subjects/?(.*)$")
subject_versions = re.compile("/subjects/(.*)/versions/?(.*)$")
compatibility = re.compile("/config/?(.*)$")
# constants
SCHEMA_ID = 47
VERSION = 3
VERSIONS = [1, 2, 3, 4]
SCHEMA = 'basic_schema.avsc'
SUBJECTS = ['subject1', 'subject2']
USERINFO = 'mock_user:mock_password'
# Counts requests handled per path by HTTP method
# {HTTP method: { path : count}}
counter = {'DELETE': defaultdict(int),
'GET': defaultdict(int),
'POST': defaultdict(int),
'PUT': defaultdict(int)}
def __init__(self, conf):
super(MockSchemaRegistryClient, self).__init__(conf)
adapter = requests_mock.Adapter()
adapter.register_uri('GET', self.compatibility,
json=self.get_compatibility_callback)
adapter.register_uri('PUT', self.compatibility,
json=self.put_compatibility_callback)
adapter.register_uri('GET', self.schemas,
json=self.get_schemas_callback)
adapter.register_uri('DELETE', self.subjects,
json=self.delete_subject_callback)
adapter.register_uri('GET', self.subjects,
json=self.get_subject_callback)
adapter.register_uri('POST', self.subjects,
json=self.post_subject_callback)
adapter.register_uri('GET', self.subject_versions,
json=self.get_subject_version_callback)
adapter.register_uri('DELETE', self.subject_versions,
json=self.delete_subject_version_callback)
adapter.register_uri('POST', self.subject_versions,
json=self.post_subject_version_callback)
adapter.add_matcher(self._auth_matcher)
self._rest_client.session.mount('http://', adapter)
@classmethod
def _auth_matcher(cls, request):
headers = request._request.headers
authinfo = headers.get('Authorization', None)
# Pass request to downstream matchers
if authinfo is None:
return None
# We only support the BASIC scheme today
scheme, userinfo = authinfo.split(" ")
if b64decode(userinfo) == cls.USERINFO:
return None
unauthorized = {'error_code': 401,
'message': "401 Unauthorized"}
return create_response(request=request,
status_code=401,
json=unauthorized)
@staticmethod
def _load_avsc(name):
with open(os.path.join(work_dir, '..', 'integration', 'schema_registry',
'data', name)) as fd:
return fd.read()
def get_compatibility_callback(self, request, context):
self.counter['GET'][request.path] += 1
path_match = re.match(self.compatibility, request.path)
subject = path_match.group(1)
if subject == "notfound":
context.status_code = 404
return {'error_code': 40401,
'message': "Subject not found"}
context.status_code = 200
return {'compatibilityLevel': 'FULL'}
def put_compatibility_callback(self, request, context):
self.counter['PUT'][request.path] += 1
level = request.json().get('compatibility')
if level == "INVALID":
context.status_code = 422
return {'error_code': 42203,
'message': "Invalid compatibility level"}
context.status_code = 200
return request.json()
def delete_subject_callback(self, request, context):
self.counter['DELETE'][request.path] += 1
path_match = re.match(self.subjects, request.path)
subject = path_match.group(1)
if subject == "notfound":
context.status_code = 404
return {'error_code': 40401,
'message': "Subject not found"}
context.status_code = 200
return self.VERSIONS
def get_subject_callback(self, request, context):
self.counter['GET'][request.path] += 1
context.status_code = 200
return self.SUBJECTS
def post_subject_callback(self, request, context):
self.counter['POST'][request.path] += 1
path_match = re.match(self.subjects, request.path)
subject = path_match.group(1)
if subject == 'notfound':
context.status_code = 404
return {'error_code': 40401,
'message': "Subject not found"}
if subject == 'schemanotfound':
context.status_code = 404
return {'error_code': 40403,
'message': "Schema not found"}
context.status_code = 200
return {'subject': subject,
"id": self.SCHEMA_ID,
"version": self.VERSION,
"schema": request.json()['schema']}
def get_schemas_callback(self, request, context):
self.counter['GET'][request.path] += 1
path_match = re.match(self.schemas, request.path)
schema_id = path_match.group(1)
if int(schema_id) == 404:
context.status_code = 404
return {'error_code': 40403,
'message': "Schema not found"}
context.status_code = 200
return {'schema': self._load_avsc(self.SCHEMA)}
def get_subject_version_callback(self, request, context):
self.counter['GET'][request.path] += 1
path_match = re.match(self.subject_versions, request.path)
subject = path_match.group(1)
version = path_match.group(2)
if int(version) == 404:
context.status_code = 404
return {'error_code': 40402,
'message': "Version not found"}
if int(version) == 422:
context.status_code = 422
return {'error_code': 42202,
'message': "Invalid version"}
if subject == 'notfound':
context.status_code = 404
return {'error_code': 40401,
'message': "Subject not found"}
context.status_code = 200
return {'subject': subject,
'id': self.SCHEMA_ID,
'version': int(version),
'schema': self._load_avsc(self.SCHEMA)}
def delete_subject_version_callback(self, request, context):
self.counter['DELETE'][request.path] += 1
path_match = re.match(self.subject_versions, request.path)
subject = path_match.group(1)
version = path_match.group(2)
if int(version) == 404:
context.status_code = 404
return {"error_code": 40402,
"message": "Version not found"}
if int(version) == 422:
context.status_code = 422
return {"error_code": 42202,
"message": "Invalid version"}
if subject == "notfound":
context.status_code = 404
return {"error_code": 40401,
"message": "Subject not found"}
context.status_code = 200
return int(version)
def post_subject_version_callback(self, request, context):
self.counter['POST'][request.path] += 1
path_match = re.match(self.subject_versions, request.path)
subject = path_match.group(1)
if subject == "conflict":
context.status_code = 409
# oddly the Schema Registry does not send a proper error for this.
return "Incompatible Schema"
if subject == "invalid":
context.status_code = 422
return {'error_code': 42201,
'message': "Invalid Schema"}
else:
context.status_code = 200
return {'id': self.SCHEMA_ID}
@pytest.fixture("package")
def load_avsc():
def get_handle(name):
with open(os.path.join(work_dir, '..', 'integration', 'schema_registry',
'data', name)) as fd:
return fd.read()
return get_handle
|
from src.trending_strategies.aSPET_trending_tracks_strategy import (
TrendingTracksStrategyaSPET,
)
from src.trending_strategies.ePWJD_trending_playlists_strategy import (
TrendingPlaylistsStrategyePWJD,
)
from src.trending_strategies.ePWJD_trending_tracks_strategy import (
TrendingTracksStrategyePWJD,
)
from src.trending_strategies.ePWJD_underground_trending_tracks_strategy import (
UndergroundTrendingTracksStrategyePWJD,
)
from src.trending_strategies.trending_type_and_version import (
TrendingType,
TrendingVersion,
)
DEFAULT_TRENDING_VERSIONS = {
TrendingType.TRACKS: TrendingVersion.ePWJD,
TrendingType.UNDERGROUND_TRACKS: TrendingVersion.ePWJD,
TrendingType.PLAYLISTS: TrendingVersion.ePWJD,
}
class TrendingStrategyFactory:
def __init__(self):
self.strategies = {
TrendingType.TRACKS: {
TrendingVersion.ePWJD: TrendingTracksStrategyePWJD(),
TrendingVersion.aSPET: TrendingTracksStrategyaSPET(),
},
TrendingType.UNDERGROUND_TRACKS: {
TrendingVersion.ePWJD: UndergroundTrendingTracksStrategyePWJD()
},
TrendingType.PLAYLISTS: {
TrendingVersion.ePWJD: TrendingPlaylistsStrategyePWJD()
},
}
def get_strategy(self, trending_type, version=None):
if not version:
version = DEFAULT_TRENDING_VERSIONS[trending_type]
return self.strategies[trending_type][version]
def get_versions_for_type(self, trending_type):
return self.strategies[trending_type]
|
""" utility functions used by openmdao gui
"""
import sys
import os
import os.path
from os.path import isfile, isdir, exists, join, getsize, split
import webbrowser
import json
from distutils.spawn import find_executable
from openmdao.util.fileutil import find_files
def ensure_dir(d):
''' create directory if it doesn't exist
'''
if not isdir(d):
os.makedirs(d)
def print_list(list):
''' print the contents of a list
'''
for item in list:
print item
def print_dict(dict):
''' print the contents of a dictionary
'''
for item in dict.items():
key, value = item
print str(key) + ' = ' + str(value)
def print_json(data):
''' pretty print json data
'''
print json.dumps(json.loads(str(data)), indent=2)
def makenode(doc, path):
''' Return a document node contains a directory tree for the path.
modified version of:
http://code.activestate.com/recipes/305313-xml-directory-tree/
'''
node = doc.createElement('dir')
node.setAttribute('name', path)
for f in os.listdir(path):
fullname = join(path, f)
if isdir(fullname):
elem = makenode(doc, fullname)
else:
elem = doc.createElement('file')
elem.setAttribute('name', f)
node.appendChild(elem)
return node
def filedict(path):
''' create a nested dictionary for a file structure with
names relative to the starting directory.
'''
rootlen = len(path)
dirs = { path: {} }
for filename in find_files(path, showdirs=True):
dirname, basename = split(filename)
if isdir(filename):
dirs[filename] = {}
dirs[dirname][filename[rootlen:]] = dirs[filename]
else:
try:
dirs[dirname][filename[rootlen:]] = getsize(filename)
except OSError as err:
# during a mercurial commit we got an error during
# getsize() of a lock file that was no longer there,
# so check file existence here and only raise an exception
# if the file still exists.
if exists(filename):
raise
return dirs[path]
def unique_shortnames(names):
"""Return a dict containing full name vs. short name where short name
is still unique within the given list. Each entry in the initial list
of dotted names is assumed to be unique.
"""
looking = set(names)
dct = dict([(n, n.split('.')) for n in names])
level = 1
while looking:
shorts = dict([(n, '.'.join(dct[n][len(dct[n]) - level:len(dct[n])])) for n in looking])
shortcounts = dict([(s, 0) for n, s in shorts.items()])
for n, shrt in shorts.items():
shortcounts[shrt] += 1
for n, shrt in shorts.items():
if shortcounts[shrt] == 1:
looking.remove(n)
dct[n] = shorts[n]
level += 1
return dct
def packagedict(types):
''' create a nested dict for a package structure
'''
dct = {}
namedict = unique_shortnames([t[0] for t in types])
for typ, meta in types:
m = meta.copy()
m['modpath'] = typ
dct[namedict[typ]] = m
return dct
def get_executable_path(executable_names):
'''Look for an executable given a list of the possible names
'''
path = None
for name in executable_names:
path = find_executable(name)
if path:
break
return path
def launch_browser(port, preferred_browser=None):
''' launch web browser on specified port
try to use preferred browser if specified, fall back to default
(chrome will launch in "app mode")
'''
url = 'http://localhost:' + str(port)
print 'Opening URL in browser: ' + url + ' (pid=' + str(os.getpid()) + ')'
# webbrowser doesn't know about chrome, so try to find it
if preferred_browser and preferred_browser.lower() == 'chrome':
if sys.platform == 'win32':
# Windows7
USERPROFILE = os.getenv("USERPROFILE")
if USERPROFILE:
CHROMEPATH = USERPROFILE + '\AppData\Local\Google\Chrome\Application\chrome.exe'
if os.path.isfile(CHROMEPATH):
preferred_browser = CHROMEPATH.replace('\\', '\\\\') + ' --app=%s &'
elif sys.platform == 'darwin':
# Mac OSX
CHROMEPATH = '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome'
if os.path.isfile(CHROMEPATH):
CHROMEPATH = CHROMEPATH.replace('Google Chrome', 'Google\ Chrome')
preferred_browser = 'open -a ' + CHROMEPATH + ' %s'
elif sys.platform == 'linux2':
# Linux
CHROMEPATH = get_executable_path(["google-chrome", "chrome", "chromium-browser"])
if CHROMEPATH and os.path.isfile(CHROMEPATH):
preferred_browser = CHROMEPATH + ' --app=%s &'
# try to get preferred browser, fall back to default
if preferred_browser:
try:
browser = webbrowser.get(preferred_browser)
except:
print "Couldn't get preferred browser (" + preferred_browser + "), using default..."
browser = webbrowser.get()
else:
browser = webbrowser.get()
# open new browser window (may open in a tab depending on user preferences, etc.)
if browser:
browser.open(url, 1, True)
print "Opened in", browser.name
else:
print "Couldn't launch browser: " + str(browser)
|
import datetime
def max_range(minimum, maximum):
maxlist = []
i = minimum
while True:
maxlist.append(i)
i = i + 1
if i == maximum:
break
return maxlist
start = datetime.datetime.now()
print "x|y"
print "==="
x = max_range(1, 10)
print x
"""
while True:
x = x + 1
if x == 3:
continue
y = x * x + 3
table = "{}|{}".format(x, y)
print table
if x == 10:
break
time_taken = datetime.datetime.now() - start
"""
|
from os import path
from setuptools import setup, find_packages
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
author="Luke Campagnola",
author_email="lukec@alleninstitute.org",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python",
"Topic :: Scientific/Engineering",
"Topic :: Software Development :: Libraries :: Python Modules",
],
description="Functions and modular user interface tools for analysis of patch clamp experiment data.",
extra_require={
"ACQ4": ["acq4"],
"jit": ["numba"],
"MIES": ["h5py"],
"test": ["pytest"],
"ui": ["pyqtgraph"],
},
install_requires=["lmfit", "numpy", "scipy"],
keywords="neuroscience analysis neurodata without borders nwb",
license="MIT",
long_description=long_description,
long_description_content_type="text/markdown",
name="neuroanalysis",
packages=find_packages(),
url="https://github.com/AllenInstitute/neuroanalysis",
version="0.0.1",
)
|
"""
Configuration of the system.
Paths, parameters and somewhat values.
"""
import os
# Directory of data files
DATA_PATH = "./data/"
# Users data
users = os.path.join(DATA_PATH, "users.csv")
# Movies data
movies = os.path.join(DATA_PATH, "movie-titles.csv")
# Movies tags
movies_tags = os.path.join(DATA_PATH, "movie-tags.csv")
# Ratings of the movies by the users
ratings = os.path.join(DATA_PATH, "ratings.csv")
# Serialized object for avoiding realoading csv files and parsing
serialized = os.path.join(DATA_PATH, "data.pickle")
# Serialized object for avoiding recalculating similarities between movies
similarities = os.path.join(DATA_PATH, "similarities.pickle")
# Matrix from SVD
matrix = os.path.join(DATA_PATH, "svd.pickle")
|
import unittest
from streamlink.plugins.oldlivestream import OldLivestream
class TestPluginOldLivestream(unittest.TestCase):
def test_can_handle_url(self):
# should match
self.assertTrue(OldLivestream.can_handle_url("https://cdn.livestream.com/embed/channel"))
self.assertTrue(OldLivestream.can_handle_url("https://original.livestream.com/embed/channel"))
self.assertTrue(OldLivestream.can_handle_url("https://original.livestream.com/channel"))
# shouldn't match
self.assertFalse(OldLivestream.can_handle_url("https://cdn.livestream.com"))
self.assertFalse(OldLivestream.can_handle_url("https://original.livestream.com"))
# other plugin livestream.py
self.assertFalse(OldLivestream.can_handle_url("https://livestream.com"))
self.assertFalse(OldLivestream.can_handle_url("https://www.livestream.com"))
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import zipfile
import logging
import tempfile
from eplist import utils
from eplist.episode import Episode
from eplist.settings import Settings
try:
from bs4 import BeautifulSoup as Soup
except ImportError:
try:
from BeautifulSoup import BeautifulStoneSoup as Soup
except ImportError:
logging.critical("Error: BeautifulSoup was not found, unable to parse AniDB")
priority = 1
def poll(title):
api_key = Settings.tvdb_key
if not api_key:
logging.warn("The TvDB Api key was not found, unable to poll the TvDB")
return utils.show_not_found
try:
Soup
except NameError:
return utils.show_not_found
cleanTitle = utils.quote_plus(title)
#1) First we need to find the series ID
seriesIdLoc = "http://www.thetvdb.com/api/GetSeries.php?seriesname={0}".format(cleanTitle)
seriesFileDesc = utils.get_url_descriptor(seriesIdLoc)
if seriesFileDesc is None:
return utils.show_not_found
seriesIdXml = Soup(seriesFileDesc.content, convertEntities=Soup.HTML_ENTITIES)
seriesIds = seriesIdXml.findAll('series')
if not seriesIds:
return utils.show_not_found
## TODO: Handle the series conflicts in a sane manner
if len(seriesIds) > 1:
logging.warn("Conflict with series title ID on TVdB")
for seriesName in seriesIdXml.findAll('seriesname'):
logging.info("Alternate series: {}".format(seriesName.getText()))
seriesID = seriesIds[0].seriesid.getString()
seriesIdXml.close()
#2) Get base info zip file
infoLoc = "http://www.thetvdb.com/api/{0}/series/{1}/all/en.zip".format(api_key, seriesID)
infoFileDesc = utils.get_url_descriptor(infoLoc)
if infoFileDesc is None:
return utils.show_not_found
tempZip = tempfile.TemporaryFile(suffix='.zip')
tempZip.seek(0)
tempZip.write(infoFileDesc.content)
with zipfile.ZipFile(tempZip) as z:
if 'en.xml' not in z.namelist():
logging.error("English episode list was not found")
return utils.show_not_found
with z.open('en.xml') as d:
soup = Soup(d, convertEntities=Soup.HTML_ENTITIES)
#3) Now we have the xml data in the soup variable, just populate the list
count = 1
eps = []
for data in soup.findAll('episode'):
name = data.episodename.getText()
season = int(data.seasonnumber.getText())
num = int(data.episodenumber.getText())
type_ = 'Episode'
if name == "":
logging.info("The name pulled from TvDB appears to be empty")
continue
if 'commentary' in name.lower():
continue
if int(season) < 0:
type_ = "OVA"
count = num
season = 1
eps.append(Episode(name=name, number=num, season=season,
count=count, type=type_))
count += 1
soup.close()
tempZip.close()
return eps
|
from django.contrib.auth.models import BaseUserManager, AbstractBaseUser
from django.db import models
from django.shortcuts import reverse
class CustomUserManager(BaseUserManager):
def create_user(self, email, username, password):
if not email:
raise ValueError('Email is required')
user = self.model(
email=CustomUserManager.normalize_email(email),
username=username,
)
user.set_password(password)
user.save(using=self._db)
return user
class Reviewer(AbstractBaseUser):
objects = CustomUserManager()
email = models.EmailField(
max_length=100,
unique=True,
)
username = models.CharField(
max_length=30,
unique=True,
)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['name']
def get_absolute_url(self):
return reverse('reviewers')
def reviews(self):
return self.review_set.filter(status='OPEN')
|
#!/usr/bin/python3
class Color:
white = (255, 255, 255)
black = (0, 0, 0)
red = (255, 0, 0)
green = (0, 255, 0)
blue = (0, 0, 255)
medium_blue = (0, 0, 205)
light_blue = (173, 216, 230)
yellow = (247, 251, 0)
pink = (255, 20, 147)
class Screen:
width = 640
height = 640
class Font:
space_font = "assets/space_age.ttf"
|
try:
import requests
import json
except ModuleNotFoundError:
print('Modules not found. Please run pip install -r requirements.txt to install the required modules.')
exit()
def weather_info(data):
max_temp = data['daily'][1]['temp']['max']
min_temp = data['daily'][1]['temp']['min']
current_temp = data['current']['temp']
weather = data['daily'][1]['weather'][0]['description']
print(f'maximum temprature: {max_temp} C')
print(f'minimum temprature: {min_temp} C')
print(f'current temprature: {current_temp} C')
print(f'today\'s condition: {weather}')
def api_data():
# goto https://openweathermap.org/api/one-call-api for reference
url = 'https://api.openweathermap.org/data/2.5/onecall?lat={lat}&lon={lon}&exclude={part}&appid={YOUR API KEY}'
res = requests.get(url)
data = res.text
parsed = dict(json.loads(data))
return parsed
if __name__ == '__main__':
print('Today\'s weather forecast:\n')
weather_info(api_data())
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Libpcap(AutotoolsPackage):
"""libpcap is a portable library in C/C++ for packet capture."""
homepage = "https://www.tcpdump.org/"
list_url = "http://www.tcpdump.org/release/"
url = "http://www.tcpdump.org/release/libpcap-1.8.1.tar.gz"
version('1.10.0', sha256='8d12b42623eeefee872f123bd0dc85d535b00df4d42e865f993c40f7bfc92b1e')
version('1.9.1', sha256='635237637c5b619bcceba91900666b64d56ecb7be63f298f601ec786ce087094')
version('1.8.1', sha256='673dbc69fdc3f5a86fb5759ab19899039a8e5e6c631749e48dcd9c6f0c83541e')
depends_on('flex', type='build')
depends_on('bison', type='build')
|
import sys
import torch
import torch.nn as nn
from torchdiffeq import odeint
from .utils import FCsoftplus,FCtanh, Linear, CosSin
from typing import Tuple, Union
class NN(nn.Module):
def __init__(
self,
G,
dof_ndim: int = 1,
hidden_size: int = 256,
num_layers: int = 2,
angular_dims: Tuple = tuple(),
wgrad: bool = True,
**kwargs
):
super().__init__(**kwargs)
if wgrad:
print("NN ignores wgrad")
self.q_ndim = dof_ndim
# We parameterize angular dims in terms of cos(theta), sin(theta)
chs = [2 * self.q_ndim + len(angular_dims)] + num_layers * [hidden_size]
layers = [CosSin(self.q_ndim, angular_dims, only_q=False)] + \
[FCtanh(chs[i], chs[i + 1], zero_bias=False, orthogonal_init=True)
for i in range(num_layers)] + \
[Linear(chs[-1], 2 * self.q_ndim, zero_bias=False, orthogonal_init=True)]
self.net = nn.Sequential(*layers)
print("NN currently assumes time independent ODE")
self.nfe = 0
self.angular_dims = angular_dims
def forward(self, t, z):
""" Computes a batch of `NxD` time derivatives of the state `z` at time `t`
Args:
t: Scalar Tensor of the current time
z: N x 2D Tensor of the N different states in D dimensions
Returns: N x 2D Tensor of the time derivatives
"""
assert (z.ndim == 2)
assert z.size(-1) == 2 * self.q_ndim
self.nfe += 1
return self.net(z)
def _integrate(self, dynamics, z0, ts, tol=1e-4, method="rk4"):
""" Integrates an initial state forward in time according to the learned dynamics
Args:
z0: (bs x 2 x D) sized
Tensor representing initial state. N is the batch size
ts: a length T Tensor representing the time points to evaluate at
tol: integrator tolerance
Returns: a bs x T x 2 x D sized Tensor
"""
assert (z0.ndim == 3) and (ts.ndim == 1)
bs = z0.shape[0]
self.nfe = 0
zt = odeint(dynamics, z0.reshape(bs, -1), ts, rtol=tol, method=method)
zt = zt.permute(1, 0, 2) # T x N x D -> N x T x D
# self._acc_magn = self.acc_magn(zt)
return zt.reshape(bs, len(ts), *z0.shape[1:])
def integrate(self, z0, ts, tol=1e-4, method="rk4"):
""" Integrates an initial state forward in time according to the learned dynamics
Args:
z0: (bs x 2 x D) sized
Tensor representing initial state. N is the batch size
ts: a length T Tensor representing the time points to evaluate at
tol: integrator tolerance
Returns: a bs x T x 2 x D sized Tensor
"""
return self._integrate(lambda t,z: self.forward(t,z), z0, ts, tol, method)
def acc_magn(self, zt):
dz_dt = self.forward(torch.zeros(1)[0], zt.reshape(-1, zt.shape[-1]))
magnitude = dz_dt.chunk(2, dim=-1)[1].pow(2).mean()
return magnitude
# def log_data(self,logger,step,name):
# logger.add_scalars('info',
# {'acc_magn': self._acc_magn.cpu().data.numpy()},
# step)
class mNN(nn.Module):
def __init__(
self,
G,
dof_ndim: int = 1,
hidden_size: int = 256,
num_layers: int = 2,
angular_dims: Tuple = tuple(),
wgrad: bool = True,
**kwargs
):
super().__init__(**kwargs)
if wgrad:
print("NN ignores wgrad")
self.q_ndim = dof_ndim
self.cossin = CosSin(3 * self.q_ndim, angular_dims, only_q=False)
# We parameterize angular dims in terms of cos(theta), sin(theta)
chs = [3 * self.q_ndim + len(angular_dims)] + num_layers * [hidden_size]
layers = [CosSin(2 * self.q_ndim, angular_dims, only_q=False)] + \
[FCtanh(chs[i], chs[i + 1], zero_bias=False, orthogonal_init=True)
for i in range(num_layers)] + \
[Linear(chs[-1], 2 * self.q_ndim, zero_bias=False, orthogonal_init=True)]
self.net = nn.Sequential(*layers)
# wrap = lambda: nn.Sequential(*layers)
# self.swag_model = SWAG(wrap)
print("NN currently assumes time independent ODE")
self.nfe = 0
self.angular_dims = angular_dims
def forward(self, t, z):
""" Computes a batch of `NxD` time derivatives of the state `z` at time `t`
Args:
t: Scalar Tensor of the current time
z: N x 2D Tensor of the N different states in D dimensions
Returns: N x 2D Tensor of the time derivatives
"""
z, m = z
assert (t.ndim == 0) and (z.ndim == 2)
assert z.size(-1) == 2 * self.q_ndim
self.nfe += 1
zm = torch.cat([z, m], dim=1)
dz = self.net(zm)
# if self.training:
# dz[:,:self.q_ndim] = dz[:,:self.q_ndim] + 0.01 * torch.randn_like(dz[:,:self.q_ndim])
dm = torch.zeros_like(m)
return dz, dm
def integrate(self, z0, m, ts, tol=1e-4, method="rk4"):
""" Integrates an initial state forward in time according to the learned dynamics
Args:
z0: (bs x 2 x D) sized
Tensor representing initial state. N is the batch size
ts: a length T Tensor representing the time points to evaluate at
tol: integrator tolerance
Returns: a bs x T x 2 x D sized Tensor
"""
assert (z0.ndim == 3) and (ts.ndim == 1)
bs = z0.shape[0]
self.nfe = 0
zt, _ = odeint(self, (z0.reshape(bs, -1), m), ts, rtol=tol, method=method)
zt = zt.permute(1, 0, 2) # T x N x D -> N x T x D
return zt.reshape(bs, len(ts), *z0.shape[1:])
class ControlNN(nn.Module):
def __init__(
self,
control_policy,
G,
dof_ndim: int = 1,
hidden_size: int = 256,
num_layers: int = 2,
angular_dims: Tuple = tuple(),
wgrad: bool = True,
**kwargs
):
super().__init__(**kwargs)
if wgrad:
print("NN ignores wgrad")
self.q_ndim = dof_ndim
# We parameterize angular dims in terms of cos(theta), sin(theta)
chs = [2 * self.q_ndim + len(angular_dims)] + num_layers * [hidden_size]
layers = [CosSin(self.q_ndim, angular_dims, only_q=False)] + \
[FCtanh(chs[i], chs[i + 1], zero_bias=False, orthogonal_init=True)
for i in range(num_layers)] + \
[Linear(chs[-1], 2 * self.q_ndim, zero_bias=False, orthogonal_init=True)]
self.net = nn.Sequential(*layers)
chs = [1] + num_layers * [hidden_size]
#[CosSin(self.q_ndim, angular_dims, only_q=False)]
layers = [FCtanh(chs[i], chs[i + 1], zero_bias=False, orthogonal_init=True)
for i in range(num_layers)] + \
[Linear(chs[-1], 2 * self.q_ndim, zero_bias=False, orthogonal_init=True)]
self.control_net = nn.Sequential(*layers)
print("NN currently assumes time independent ODE")
self.nfe = 0
self.angular_dims = angular_dims
self.control_policy = control_policy
def forward(self, t, z):
""" Computes a batch of `NxD` time derivatives of the state `z` at time `t`
Args:
t: Scalar Tensor of the current time
z: N x 2D Tensor of the N different states in D dimensions
Returns: N x 2D Tensor of the time derivatives
"""
assert (t.ndim == 0) and (z.ndim == 2)
assert z.size(-1) == 2 * self.q_ndim
self.nfe += 1
u = self.control_policy(t, z).detach()
# dynamics = self.net(torch.cat([z, u], axis=-1))
dynamics = self.net(z) + self.control_net(u)
# print(dynamics)
return dynamics
def _integrate(self, dynamics, z0, ts, tol=1e-4, method="rk4"):
""" Integrates an initial state forward in time according to the learned dynamics
Args:
z0: (bs x 2 x D) sized
Tensor representing initial state. N is the batch size
ts: a length T Tensor representing the time points to evaluate at
tol: integrator tolerance
Returns: a bs x T x 2 x D sized Tensor
"""
assert (z0.ndim == 3) and (ts.ndim == 1)
bs = z0.shape[0]
self.nfe = 0
zt = odeint(dynamics, z0.reshape(bs, -1), ts, rtol=tol, method=method)
zt = zt.permute(1, 0, 2) # T x N x D -> N x T x D
return zt.reshape(bs, len(ts), *z0.shape[1:])
def integrate(self, z0, ts, tol=1e-4, method="rk4"):
""" Integrates an initial state forward in time according to the learned dynamics
Args:
z0: (bs x 2 x D) sized
Tensor representing initial state. N is the batch size
ts: a length T Tensor representing the time points to evaluate at
tol: integrator tolerance
Returns: a bs x T x 2 x D sized Tensor
"""
return self._integrate(lambda t,z: self.forward(t,z), z0, ts, tol, method)
def integrate_swag(self, z0, ts, tol=1e-4, method="rk4"):
return self._integrate(lambda t, z: self.swag_model(z), z0, ts, tol, method)
def collect_model(self):
self.swag_model.collect_model(self.net)
def sample(self):
self.swag_model.sample()
class DeltaNN(NN):
def integrate(self, z0, ts, tol=0.0,method=None):
""" Integrates an initial state forward in time according to the learned
dynamics using Euler's method with predicted time derivatives
Args:
z0: (bs x 2 x D) sized
Tensor representing initial state. N is the batch size
ts: a length T Tensor representing the time points to evaluate at
Returns: a bs x T x 2 x D sized Tensor
"""
assert (z0.ndim == 3) and (ts.ndim == 1)
bs = z0.shape[0]
dts = ts[1:] - ts[:-1]
zts = [z0.reshape(bs, -1)]
for dt in dts:
zts.append(zts[-1] + dt * self(ts[0], zts[-1]))
return torch.stack(zts, dim=1).reshape(bs, len(ts), *z0.shape[1:])
|
# Copyright 2013 OpenStack Foundation
# Copyright (C) 2013 Yahoo! Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
import hashlib
import json
import logging
import os
import sys
import traceback
import uuid
import fixtures
from keystoneclient import exceptions as ks_exc
from keystoneclient import fixture as ks_fixture
import mock
import requests
from requests_mock.contrib import fixture as rm_fixture
import six
from bileanclient.common import utils
from bileanclient import exc
from bileanclient import shell as openstack_shell
from bileanclient.tests.unit import utils as testutils
DEFAULT_BILEAN_URL = 'http://127.0.0.1:5000/'
DEFAULT_USERNAME = 'username'
DEFAULT_PASSWORD = 'password'
DEFAULT_TENANT_ID = 'tenant_id'
DEFAULT_TENANT_NAME = 'tenant_name'
DEFAULT_PROJECT_ID = '0123456789'
DEFAULT_USER_DOMAIN_NAME = 'user_domain_name'
DEFAULT_UNVERSIONED_AUTH_URL = 'http://127.0.0.1:5000/'
DEFAULT_V2_AUTH_URL = '%sv2.0' % DEFAULT_UNVERSIONED_AUTH_URL
DEFAULT_V3_AUTH_URL = '%sv3' % DEFAULT_UNVERSIONED_AUTH_URL
DEFAULT_AUTH_TOKEN = 'ba5501a434914768824374764adb8014'
TEST_SERVICE_URL = 'http://127.0.0.1:5000/'
FAKE_V2_ENV = {'OS_USERNAME': DEFAULT_USERNAME,
'OS_PASSWORD': DEFAULT_PASSWORD,
'OS_TENANT_NAME': DEFAULT_TENANT_NAME,
'OS_AUTH_URL': DEFAULT_V2_AUTH_URL,
'OS_IMAGE_URL': DEFAULT_BILEAN_URL}
FAKE_V3_ENV = {'OS_USERNAME': DEFAULT_USERNAME,
'OS_PASSWORD': DEFAULT_PASSWORD,
'OS_PROJECT_ID': DEFAULT_PROJECT_ID,
'OS_USER_DOMAIN_NAME': DEFAULT_USER_DOMAIN_NAME,
'OS_AUTH_URL': DEFAULT_V3_AUTH_URL,
'OS_IMAGE_URL': DEFAULT_BILEAN_URL}
TOKEN_ID = uuid.uuid4().hex
V2_TOKEN = ks_fixture.V2Token(token_id=TOKEN_ID)
V2_TOKEN.set_scope()
_s = V2_TOKEN.add_service('billing', name='bilean')
_s.add_endpoint(DEFAULT_BILEAN_URL)
V3_TOKEN = ks_fixture.V3Token()
V3_TOKEN.set_project_scope()
_s = V3_TOKEN.add_service('billing', name='bilean')
_s.add_standard_endpoints(public=DEFAULT_BILEAN_URL)
class ShellTest(testutils.TestCase):
# auth environment to use
auth_env = FAKE_V2_ENV.copy()
# expected auth plugin to invoke
token_url = DEFAULT_V2_AUTH_URL + '/tokens'
# Patch os.environ to avoid required auth info
def make_env(self, exclude=None):
env = dict((k, v) for k, v in self.auth_env.items() if k != exclude)
self.useFixture(fixtures.MonkeyPatch('os.environ', env))
def setUp(self):
super(ShellTest, self).setUp()
global _old_env
_old_env, os.environ = os.environ, self.auth_env
self.requests = self.useFixture(rm_fixture.Fixture())
json_list = ks_fixture.DiscoveryList(DEFAULT_UNVERSIONED_AUTH_URL)
self.requests.get(DEFAULT_BILEAN_URL, json=json_list, status_code=300)
json_v2 = {'version': ks_fixture.V2Discovery(DEFAULT_V2_AUTH_URL)}
self.requests.get(DEFAULT_V2_AUTH_URL, json=json_v2)
json_v3 = {'version': ks_fixture.V3Discovery(DEFAULT_V3_AUTH_URL)}
self.requests.get(DEFAULT_V3_AUTH_URL, json=json_v3)
self.v2_auth = self.requests.post(DEFAULT_V2_AUTH_URL + '/tokens',
json=V2_TOKEN)
headers = {'X-Subject-Token': TOKEN_ID}
self.v3_auth = self.requests.post(DEFAULT_V3_AUTH_URL + '/auth/tokens',
headers=headers,
json=V3_TOKEN)
global shell, _shell, assert_called, assert_called_anytime
_shell = openstack_shell.BileanShell()
shell = lambda cmd: _shell.main(cmd.split())
def tearDown(self):
super(ShellTest, self).tearDown()
global _old_env
os.environ = _old_env
def shell(self, argstr, exitcodes=(0,)):
orig = sys.stdout
orig_stderr = sys.stderr
try:
sys.stdout = six.StringIO()
sys.stderr = six.StringIO()
_shell = openstack_shell.BileanShell()
_shell.main(argstr.split())
except SystemExit:
exc_type, exc_value, exc_traceback = sys.exc_info()
self.assertIn(exc_value.code, exitcodes)
finally:
stdout = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = orig
stderr = sys.stderr.getvalue()
sys.stderr.close()
sys.stderr = orig_stderr
return (stdout, stderr)
def test_help_unknown_command(self):
shell = openstack_shell.BileanShell()
argstr = 'help foofoo'
self.assertRaises(exc.CommandError, shell.main, argstr.split())
@mock.patch('sys.stdout', six.StringIO())
@mock.patch('sys.stderr', six.StringIO())
@mock.patch('sys.argv', ['bilean', 'help', 'foofoo'])
def test_no_stacktrace_when_debug_disabled(self):
with mock.patch.object(traceback, 'print_exc') as mock_print_exc:
try:
openstack_shell.main()
except SystemExit:
pass
self.assertFalse(mock_print_exc.called)
@mock.patch('sys.stdout', six.StringIO())
@mock.patch('sys.stderr', six.StringIO())
@mock.patch('sys.argv', ['bilean', 'help', 'foofoo'])
def test_stacktrace_when_debug_enabled_by_env(self):
old_environment = os.environ.copy()
os.environ = {'BILEANCLIENT_DEBUG': '1'}
try:
with mock.patch.object(traceback, 'print_exc') as mock_print_exc:
try:
openstack_shell.main()
except SystemExit:
pass
self.assertTrue(mock_print_exc.called)
finally:
os.environ = old_environment
@mock.patch('sys.stdout', six.StringIO())
@mock.patch('sys.stderr', six.StringIO())
@mock.patch('sys.argv', ['bilean', '--debug', 'help', 'foofoo'])
def test_stacktrace_when_debug_enabled(self):
with mock.patch.object(traceback, 'print_exc') as mock_print_exc:
try:
openstack_shell.main()
except SystemExit:
pass
self.assertTrue(mock_print_exc.called)
def test_help(self):
shell = openstack_shell.BileanShell()
argstr = 'help'
with mock.patch.object(shell, '_get_keystone_session') as et_mock:
actual = shell.main(argstr.split())
self.assertEqual(0, actual)
self.assertFalse(et_mock.called)
def test_blank_call(self):
shell = openstack_shell.BileanShell()
with mock.patch.object(shell, '_get_keystone_session') as et_mock:
actual = shell.main('')
self.assertEqual(0, actual)
self.assertFalse(et_mock.called)
def test_help_on_subcommand_error(self):
self.assertRaises(exc.CommandError, shell, 'help bad')
def test_get_base_parser(self):
test_shell = openstack_shell.BileanShell()
actual_parser = test_shell.get_base_parser()
description = 'Command-line interface to the OpenStack Bilean API.'
expected = argparse.ArgumentParser(
prog='bilean', usage=None,
description=description,
conflict_handler='error',
add_help=False,
formatter_class=openstack_shell.HelpFormatter,)
self.assertEqual(str(expected), str(actual_parser))
@mock.patch.object(openstack_shell.BileanShell,
'_get_versioned_client')
def test_cert_and_key_args_interchangeable(self,
mock_versioned_client):
# make sure --os-cert and --os-key are passed correctly
args = ('--os-cert mycert '
'--os-key mykey user-list')
shell(args)
assert mock_versioned_client.called
((api_version, args), kwargs) = mock_versioned_client.call_args
self.assertEqual('mycert', args.os_cert)
self.assertEqual('mykey', args.os_key)
# make sure we get the same thing with --cert-file and --key-file
args = ('--cert-file mycertfile '
'--key-file mykeyfile user-list')
bilean_shell = openstack_shell.BileanShell()
bilean_shell.main(args.split())
assert mock_versioned_client.called
((api_version, args), kwargs) = mock_versioned_client.call_args
self.assertEqual('mycertfile', args.os_cert)
self.assertEqual('mykeyfile', args.os_key)
@mock.patch('bileanclient.v1.client.Client')
def test_no_auth_with_token_and_bilean_url(self, mock_client):
# test no authentication is required if both token and endpoint url
# are specified
args = ('--os-auth-token mytoken '
'--os-bilean-url http://host:1234/v1 user-list')
bilean_shell = openstack_shell.BileanShell()
bilean_shell.main(args.split())
assert mock_client.called
(args, kwargs) = mock_client.call_args
self.assertEqual('mytoken', kwargs['token'])
self.assertEqual('http://host:1234', args[0])
def _assert_auth_plugin_args(self):
# make sure our auth plugin is invoked with the correct args
self.assertFalse(self.v3_auth.called)
body = json.loads(self.v2_auth.last_request.body)
self.assertEqual(self.auth_env['OS_TENANT_NAME'],
body['auth']['tenantName'])
self.assertEqual(self.auth_env['OS_USERNAME'],
body['auth']['passwordCredentials']['username'])
self.assertEqual(self.auth_env['OS_PASSWORD'],
body['auth']['passwordCredentials']['password'])
@mock.patch('bileanclient.v1.client.Client')
def test_auth_plugin_invocation(self, v1_client):
args = 'user-list'
bilean_shell = openstack_shell.BileanShell()
bilean_shell.main(args.split())
self.assertEqual(0, self.v2_auth.call_count)
@mock.patch('bileanclient.v1.client.Client')
def test_auth_plugin_invocation_with_unversioned_auth_url(
self, v1_client):
args = ('--os-auth-url %s user-list' % DEFAULT_UNVERSIONED_AUTH_URL)
bilean_shell = openstack_shell.BileanShell()
bilean_shell.main(args.split())
@mock.patch('bileanclient.Client')
def test_endpoint_token_no_auth_req(self, mock_client):
def verify_input(version=None, endpoint=None, *args, **kwargs):
self.assertIn('token', kwargs)
self.assertEqual(TOKEN_ID, kwargs['token'])
self.assertEqual(DEFAULT_BILEAN_URL, endpoint)
return mock.MagicMock()
mock_client.side_effect = verify_input
bilean_shell = openstack_shell.BileanShell()
args = ['--os-auth-token', TOKEN_ID,
'--os-bilean-url', DEFAULT_BILEAN_URL,
'user-list']
bilean_shell.main(args)
self.assertEqual(1, mock_client.call_count)
@mock.patch('sys.stdin', side_effect=mock.MagicMock)
@mock.patch('getpass.getpass', side_effect=EOFError)
@mock.patch('bileanclient.v1.client.Client')
def test_password_prompted_ctrlD(self, v1_client,
mock_getpass, mock_stdin):
cli = mock.MagicMock()
v1_client.return_value = cli
cli.http_client.get.return_value = (None, {'versions': []})
bilean_shell = openstack_shell.BileanShell()
self.make_env(exclude='OS_PASSWORD')
# We should get Command Error because we mock Ctl-D.
self.assertRaises(exc.CommandError, bilean_shell.main, ['user-list'])
# Make sure we are actually prompted.
mock_getpass.assert_called_with('OS Password: ')
@mock.patch(
'bileanclient.shell.BileanShell._get_keystone_session')
def test_no_auth_with_proj_name(self, session):
with mock.patch('bileanclient.v1.client.Client'):
args = ('--os-project-name myname '
'--os-project-domain-name mydomain '
'--os-project-domain-id myid user-list')
bilean_shell = openstack_shell.BileanShell()
bilean_shell.main(args.split())
((args), kwargs) = session.call_args
self.assertEqual('myname', kwargs['project_name'])
self.assertEqual('mydomain', kwargs['project_domain_name'])
self.assertEqual('myid', kwargs['project_domain_id'])
@mock.patch.object(openstack_shell.BileanShell, 'main')
def test_shell_keyboard_interrupt(self, mock_bilean_shell):
# Ensure that exit code is 130 for KeyboardInterrupt
try:
mock_bilean_shell.side_effect = KeyboardInterrupt()
openstack_shell.main()
except SystemExit as ex:
self.assertEqual(130, ex.code)
@mock.patch('bileanclient.common.utils.exit', side_effect=utils.exit)
def test_shell_illegal_version(self, mock_exit):
# Only int versions are allowed on cli
shell = openstack_shell.BileanShell()
argstr = '--os-bilean-api-version 1.1 user-list'
try:
shell.main(argstr.split())
except SystemExit as ex:
self.assertEqual(1, ex.code)
msg = ("Invalid API version parameter. "
"Supported values are %s" % openstack_shell.SUPPORTED_VERSIONS)
mock_exit.assert_called_with(msg=msg)
@mock.patch('bileanclient.common.utils.exit', side_effect=utils.exit)
def test_shell_unsupported_version(self, mock_exit):
# Test an integer version which is not supported (-1)
shell = openstack_shell.BileanShell()
argstr = '--os-bilean-api-version -1 user-list'
try:
shell.main(argstr.split())
except SystemExit as ex:
self.assertEqual(1, ex.code)
msg = ("Invalid API version parameter. "
"Supported values are %s" % openstack_shell.SUPPORTED_VERSIONS)
mock_exit.assert_called_with(msg=msg)
@mock.patch.object(openstack_shell.BileanShell,
'get_subcommand_parser')
def test_shell_import_error_with_mesage(self, mock_parser):
msg = 'Unable to import module xxx'
mock_parser.side_effect = ImportError('%s' % msg)
shell = openstack_shell.BileanShell()
argstr = '--os-bilean-api-version 1 user-list'
try:
shell.main(argstr.split())
self.fail('No import error returned')
except ImportError as e:
self.assertEqual(msg, str(e))
@mock.patch.object(openstack_shell.BileanShell,
'get_subcommand_parser')
def test_shell_import_error_default_message(self, mock_parser):
mock_parser.side_effect = ImportError
shell = openstack_shell.BileanShell()
argstr = '--os-bilean-api-version 1 user-list'
try:
shell.main(argstr.split())
self.fail('No import error returned')
except ImportError as e:
msg = 'Unable to import module. Re-run with --debug for more info.'
self.assertEqual(msg, str(e))
@mock.patch('bileanclient.v1.client.Client')
def test_auth_plugin_invocation_without_username(self, v1_client):
self.make_env(exclude='OS_USERNAME')
args = '--os-bilean-api-version 1 user-list'
bilean_shell = openstack_shell.BileanShell()
self.assertRaises(exc.CommandError, bilean_shell.main, args.split())
@mock.patch('bileanclient.v1.client.Client')
def test_auth_plugin_invocation_without_auth_url(self, v1_client):
self.make_env(exclude='OS_AUTH_URL')
args = '--os-bilean-api-version 1 user-list'
bilean_shell = openstack_shell.BileanShell()
self.assertRaises(exc.CommandError, bilean_shell.main, args.split())
@mock.patch('bileanclient.v1.client.Client')
def test_auth_plugin_invocation_without_tenant(self, v1_client):
if 'OS_TENANT_NAME' in os.environ:
self.make_env(exclude='OS_TENANT_NAME')
if 'OS_PROJECT_ID' in os.environ:
self.make_env(exclude='OS_PROJECT_ID')
args = '--os-bilean-api-version 1 user-list'
bilean_shell = openstack_shell.BileanShell()
self.assertRaises(exc.CommandError, bilean_shell.main, args.split())
@mock.patch('sys.argv', ['bilean'])
@mock.patch('sys.stdout', six.StringIO())
@mock.patch('sys.stderr', six.StringIO())
def test_main_noargs(self):
# Ensure that main works with no command-line arguments
try:
openstack_shell.main()
except SystemExit:
self.fail('Unexpected SystemExit')
# We expect the normal usage as a result
expected = ['Command-line interface to the OpenStack Bilean API',
'user-list',
'rule-list']
for output in expected:
self.assertIn(output,
sys.stdout.getvalue())
@mock.patch('bileanclient.v1.client.Client')
@mock.patch('bileanclient.v1.shell.do_user_list')
@mock.patch('bileanclient.shell.logging.basicConfig')
def test_setup_debug(self, conf, func, v1_client):
cli = mock.MagicMock()
v1_client.return_value = cli
cli.http_client.get.return_value = (None, {'versions': []})
args = '--debug user-list'
bilean_shell = openstack_shell.BileanShell()
bilean_shell.main(args.split())
bilean_logger = logging.getLogger('bileanclient')
self.assertEqual(bilean_logger.getEffectiveLevel(), logging.DEBUG)
conf.assert_called_with(level=logging.DEBUG)
class ShellTestWithKeystoneV3Auth(ShellTest):
# auth environment to use
auth_env = FAKE_V3_ENV.copy()
token_url = DEFAULT_V3_AUTH_URL + '/auth/tokens'
def _assert_auth_plugin_args(self):
self.assertFalse(self.v2_auth.called)
body = json.loads(self.v3_auth.last_request.body)
user = body['auth']['identity']['password']['user']
self.assertEqual(self.auth_env['OS_USERNAME'], user['name'])
self.assertEqual(self.auth_env['OS_PASSWORD'], user['password'])
self.assertEqual(self.auth_env['OS_USER_DOMAIN_NAME'],
user['domain']['name'])
self.assertEqual(self.auth_env['OS_PROJECT_ID'],
body['auth']['scope']['project']['id'])
@mock.patch('bileanclient.v1.client.Client')
def test_auth_plugin_invocation(self, v1_client):
args = '--os-bilean-api-version 1 user-list'
bilean_shell = openstack_shell.BileanShell()
bilean_shell.main(args.split())
self.assertEqual(0, self.v3_auth.call_count)
@mock.patch('keystoneclient.discover.Discover',
side_effect=ks_exc.ClientException())
def test_api_discovery_failed_with_unversioned_auth_url(self,
discover):
args = ('--os-bilean-api-version 1 --os-auth-url %s user-list'
% DEFAULT_UNVERSIONED_AUTH_URL)
bilean_shell = openstack_shell.BileanShell()
self.assertRaises(exc.CommandError, bilean_shell.main, args.split())
def test_bash_completion(self):
stdout, stderr = self.shell('--os-bilean-api-version 1 bash_completion')
# just check we have some output
required = [
'--value',
'rule-create',
'help',
'--limit']
for r in required:
self.assertIn(r, stdout.split())
avoided = [
'bash_completion',
'bash-completion']
for r in avoided:
self.assertNotIn(r, stdout.split())
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Lightlab documentation build configuration file, created by
# sphinx-quickstart on Sat Mar 25 20:35:35 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import better_apidoc
# -- Generate API documentation ------------------------------------------------
def run_apidoc(_):
"""Generage API documentation"""
better_apidoc.main(
['better-apidoc', '-t', './_templates', '--force', '--no-toc',
'--separate', '-o', './API', '../lightlab'])
def run_testdoc(_):
better_apidoc.main(
['better-apidoc', '-t', './_templates', '--force', '--no-toc',
'--separate', '-o', './TestAPI', '../tests'])
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.inheritance_diagram',
'sphinx.ext.coverage',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinxcontrib.bibtex',
'nbsphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = ['.rst', '.md']
# source_suffix = '.rst'
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Lightwave Laboratory Instrumentation'
copyright = '(c) 2018 Lightwave Lab, Princeton University'
author = 'Alex Tait, Thomas Ferreira de Lima'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
with open("../version.py") as f:
code = compile(f.read(), "version.py", 'exec')
version_dict = {}
exec(code, {}, version_dict)
# The short X.Y version.
version = version_dict['version']
# The full version, including alpha/beta/rc tags.
release = version_dict['release']
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '**.ipynb_checkpoints']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for Autodocumentation ----------------------------------------------
autodoc_member_order = 'bysource'
autoclass_content = 'both'
add_module_names = False
napoleon_google_docstring = True
napoleon_use_param = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster' # The default
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Lightlab'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
doc_title = 'Lightlab Documentation'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Lightlab.tex', doc_title,
author, 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'lightlab', doc_title,
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Lightlab', doc_title,
author, 'Lightlab', project,
'Miscellaneous'),
]
# -- Options for IPyNb output -------------------------------------------
nbsphinx_timeout = 60
nbsphinx_allow_errors = True
nbsphinx_execute = 'never'
# -- Mock C libraries to save autodoc -------------------------------------------
import sys
from unittest.mock import MagicMock
class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return MagicMock()
MOCK_MODULES = ['numpy',
'matplotlib',
'matplotlib.pyplot',
'matplotlib.figure',
'matplotlib.backends',
'matplotlib.backends.backend_pdf',
'scipy',
'scipy.optimize',
'scipy.io',
'pandas',
'IPython',
'IPython.lib',
'IPython.lib.lexers',
'matplotlib.cm',
'jsonpickle',
'jsonpickle.ext',
'jsonpickle.ext.numpy']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'python': ('https://docs.python.org/', None)}
def setup(app):
app.connect('builder-inited', run_apidoc)
app.connect('builder-inited', run_testdoc)
|
import pathlib
import sanic.request
import sanic.response
from sanic import Sanic
# isort: off
# These two lines are to ensure that the version of `sanic_openapi3e` your app uses is from this checkout.
import sys
sys.path.insert(0, str(pathlib.Path(__file__).absolute().parent.parent))
from sanic_openapi3e import doc, openapi_blueprint, swagger_blueprint
# isort: on
app = Sanic(name=__file__, strict_slashes=True)
app.config.SHOW_OPENAPI_EXCLUDED = True
app.config.API_TITLE = __file__
app.config.API_DESCRIPTION = f"""This app was configured with `app.config.SHOW_OPENAPI_EXCLUDED = True` so you can:\n
\n
* In the black swagger top-nav above, manually change the value of
`/openapi/spec.json` to `/openapi/spec.all.json`;
* click "Explore"
* Now see the excluded routes in the UI.
It is not recommended to set this flag for production builds, but it can be helpful during development.
"""
app.blueprint(openapi_blueprint)
app.blueprint(swagger_blueprint)
@app.get("/test_id/<an_id:int>")
@doc.parameter(
name="an_id", description="An ID", required=True, _in="path", schema=doc.Schema.Integer,
)
@doc.tag("Tag 1", description="A tag desc")
def get_id_19(request, an_id: int):
d = locals()
del d["request"] # not JSON serializable
return sanic.response.json(d)
@app.get("/22/test_id/<an_id:int>")
@doc.exclude()
@doc.tag("Tag excluded", description="You shouldn'd usually see this...")
@doc.parameter(
name="an_id", description="An ID", required=True, _in="path", schema=doc.Schema.Integer,
)
def get_id_29(request, an_id: int):
d = locals()
del d["request"] # not JSON serializable
return sanic.response.json(d)
example_port = 8002
@app.listener("after_server_start")
async def notify_server_started(app: sanic.app.Sanic, __):
print("\n\n************* sanic-openapi3e ********************************")
print(f"* See your openapi swagger on http://127.0.0.1:{example_port}/swagger/ *")
print("************* sanic-openapi3e ********************************\n\n")
app.go_fast(port=example_port)
|
from flask import json, Response as BaseResponse
from flask.testing import FlaskClient
from werkzeug.utils import cached_property
class Response(BaseResponse):
@cached_property
def json(self):
return json.loads(self.data)
class TestClient(FlaskClient):
def open(self, *args, **kwargs):
if 'json' in kwargs:
kwargs['data'] = json.dumps(kwargs.pop('json'))
kwargs['content_type'] = 'application/json'
return super(TestClient, self).open(*args, **kwargs)
def populate_datastore(userflow):
users = [
('vgavro@gmail.com', 'Victor Gavro', 'password', ['admin'], True),
]
for u in users:
# password = userflow.encrypt_password(u[2])
user = userflow.datastore.create_user(email=u[0], name=u[1], is_active=u[4])
user.set_password(u[2])
user.generate_auth_id()
userflow.datastore.put(user)
userflow.datastore.commit()
|
# Generated by Django 3.1.2 on 2020-11-02 16:51
from django.db import migrations
import jobs.models
class Migration(migrations.Migration):
dependencies = [
('jobs', '0008_company_headquarters'),
]
operations = [
migrations.AlterField(
model_name='company',
name='logo',
field=jobs.models.CloudinaryField(blank=True, max_length=255, null=True),
),
]
|
import json
import subprocess
from .log import get_logger
from .config.exceptions import ConfigurationError
logger = get_logger()
def trivy(image_name, path='trivy', options=''):
"""Scan the vulnerabilities of docker image with Trivy
Args:
image_name (str): Docker image name
path (str): The path of trivy command.
If path is not specified, we suppose trivy is installed in $PATH.
options (str): Trivy command options
Returns:
result (list): contains dict object stored the scan result
None: if raise exceptions
Raises:
ConfigurationError: if the path argument is invalid
json.JSONDecodeError
"""
# `-f json` option is required,
# so replace empty string if this option is specified
options = options.replace('-f json', '')
cmd = f'{path} -f json {options} {image_name}'.split()
try:
proc = subprocess.run(cmd, check=True,
capture_output=True,
timeout=600)
except FileNotFoundError:
raise ConfigurationError(
f'{repr(path)} is invalid as trivy command path. '
'Please set the correct path that trivy command is stored')
except (subprocess.CalledProcessError,
subprocess.TimeoutExpired) as err:
logger.error(
f'Failed to scan {image_name} with Trivy. '
f'Execution command: {" ".join(err.cmd)}')
return None
except Exception as err:
raise err
try:
return json.loads(proc.stdout)
except json.JSONDecodeError as err:
raise err
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Windows Timeline SQLite database plugin."""
import unittest
from plaso.lib import definitions
from plaso.parsers.sqlite_plugins import windows_timeline
from tests.parsers.sqlite_plugins import test_lib
class WindowsTimelineTest(test_lib.SQLitePluginTestCase):
"""Tests for the Windows Timeline plugin."""
def testProcess(self):
"""Tests the Process function on a Windows Timeline SQLite database."""
plugin = windows_timeline.WindowsTimelinePlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(
['windows_timeline_ActivitiesCache.db'], plugin)
self.assertEqual(112, storage_writer.number_of_events)
events = list(storage_writer.GetEvents())
expected_event_values = {
'active_duration_seconds': 9,
'data_type': 'windows:timeline:user_engaged',
'package_identifier': 'c:\\python34\\python.exe',
'reporting_app': 'ShellActivityMonitor',
'timestamp': '2018-08-03 11:29:00.000000',
'timestamp_desc': definitions.TIME_DESCRIPTION_START}
self.CheckEventValues(storage_writer, events[0], expected_event_values)
expected_event_values = {
'active_duration_seconds': 11,
'data_type': 'windows:timeline:user_engaged',
'package_identifier': (
'c:\\users\\demouser\\appdata\\local\\programs\\python\\'
'python37-32\\python.exe'),
'reporting_app': 'ShellActivityMonitor',
'timestamp': '2018-07-27 11:58:55.000000',
'timestamp_desc': definitions.TIME_DESCRIPTION_START}
self.CheckEventValues(storage_writer, events[2], expected_event_values)
expected_event_values = {
'application_display_name': 'OneDrive',
'data_type': 'windows:timeline:generic',
'description': '',
'package_identifier': 'Microsoft.SkyDrive.Desktop',
'timestamp': '2018-07-25 12:04:48.000000',
'timestamp_desc': definitions.TIME_DESCRIPTION_START}
self.CheckEventValues(storage_writer, events[80], expected_event_values)
expected_event_values = {
'application_display_name': 'Notepad',
'data_type': 'windows:timeline:generic',
'description': 'C:\\Users\\demouser\\Desktop\\SCHEMA.txt',
'package_identifier': (
'{1AC14E77-02E7-4E5D-B744-2EB1AE5198B7}\\notepad.exe'),
'timestamp': '2018-07-27 12:36:09.000000',
'timestamp_desc': definitions.TIME_DESCRIPTION_START}
self.CheckEventValues(storage_writer, events[96], expected_event_values)
if __name__ == '__main__':
unittest.main()
|
"""Proof of concept for a federated digital data exchange."""
from mahiru.__version__ import __version__
|
#@+leo-ver=5-thin
#@+node:ekr.20210202110241.1: * @file leoclient.py
"""
An example client for leoserver.py, based on work by Félix Malboeuf. Used by permission.
"""
import asyncio
import json
import time
import websockets
from leo.core import leoGlobals as g
from leo.core import leoserver
wsHost = "localhost"
wsPort = 32125
tag = 'client'
timeout = 0.1
times_d = {} # Keys are n, values are time sent.
tot_response_time = 0.0
n_known_response_times = 0
n_unknown_response_times = 0
#@+others
#@+node:ekr.20210219105145.1: ** function: _dump_outline
def _dump_outline(c): # pragma: no cover
"""Dump the outline."""
tag = '_dump_outline'
print(f"{tag}: {c.shortFileName()}...\n")
for p in c.all_positions():
level_s = ' ' * 2 * p.level()
print(f"{level_s}{p.childIndex():2} {p.v.gnx} {p.h}")
print('')
#@+node:ekr.20210206075253.1: ** function: _get_action_list
def _get_action_list():
"""
Return all callable public methods of the server.
In effect, these are unit tests.
"""
import inspect
import os
server = leoserver.LeoServer()
# file_name = "xyzzy.leo"
file_name = g.os_path_finalize_join(g.app.loadDir, '..', 'test', 'test.leo')
assert os.path.exists(file_name), repr(file_name)
log = False
exclude_names = [
# Dangerous at present.
'delete_node', 'cut_node', 'save_file',
# Require plugins.
'click_button', 'get_buttons', 'remove_button',
# Not ready yet.
'set_selection',
]
head = [
("get_sign_on", {}),
# ("apply_config", {"config": {"whatever": True}}),
("error", {}),
# ("bad_server_command", {}),
("open_file", {"filename": file_name, "log": log}),
]
head_names = [name for (name, package) in head]
tail = [
# ("get_body_length", {}), # All responses now contain len(p.b).
("find_all", {"find_text": "def"}),
("get_ua", {"log": log}),
("get_parent", {"log": log}),
("get_children", {"log": log}),
("set_body", {"body": "new body"}),
("set_headline", {"headline": "new headline"}),
("execute-leo-command", {"leo-command-name": "contract-all"}),
("insert_node", {"headline": "inserted headline"}),
("contract_node", {}),
("close_file", {"filename": file_name}),
("get_all_leo_commands", {}),
("get_all_server_commands", {}),
("shut_down", {}),
]
tail_names = [name for (name, package) in tail]
# Add all remaining methods to the middle.
tests = inspect.getmembers(server, inspect.ismethod)
test_names = sorted([name for (name, value) in tests if not name.startswith('_')])
middle = [(z, {}) for z in test_names
if z not in head_names + tail_names + exclude_names]
middle_names = [name for (name, package) in middle]
all_tests = head + middle + tail
if 0:
g.printObj(middle_names, tag='middle_names')
all_names = sorted([name for (name, package) in all_tests])
g.printObj(all_names, tag='all_names')
return all_tests
#@+node:ekr.20210206093130.1: ** function: _show_response
def _show_response(n, d, trace, verbose):
global n_known_response_times
global n_unknown_response_times
global times_d
global tot_response_time
# Calculate response time.
t1 = times_d.get(n)
t2 = time.perf_counter()
if t1 is None or n is None:
response_time_s = '???'
n_unknown_response_times += 1
else:
response_time = t2 - t1
tot_response_time += response_time
n_known_response_times += 1
response_time_s = f"{response_time:3.2}"
if not trace and not verbose:
return
action = d.get('action')
if not verbose:
if "async" in d:
print(f"{tag}: async: {d.get('s')}")
else:
print(f"{tag}: got: {n} {action}")
return
if action == 'open_file':
g.printObj(d,
tag=f"{tag}: got: open-file response time: {response_time_s}")
elif action == 'get_all_commands':
commands = d.get('commands')
print(f"{tag}: got: get_all_commands {len(commands)}")
else:
print(f"{tag}: got: {d}")
#@+node:ekr.20210205144500.1: ** function: client_main_loop
n_async_responses = 0
n_known_response_times = 0
n_unknown_response_times = 0
async def client_main_loop(timeout):
global n_async_responses
trace = True
verbose = False
uri = f"ws://{wsHost}:{wsPort}"
action_list = _get_action_list()
async with websockets.connect(uri) as websocket: # pylint: disable=no-member
if trace and verbose:
print(f"{tag}: asyncInterval.timeout: {timeout}")
# Await the startup package.
json_s = g.toUnicode(await websocket.recv())
d = json.loads(json_s)
if trace and verbose:
print(f"startup package: {d}")
n = 0
while True:
n += 1
try:
times_d [n] = time.perf_counter()
await asyncio.sleep(timeout)
# Get the next package. The last action is shut_down.
try:
action, package = action_list[n-1]
except IndexError:
break
request_package = {
"id": n,
"action": action,
"package": package,
}
if trace and verbose:
print(f"{tag}: send: id: {n} package: {request_package}")
# Send the next request.
request = json.dumps(request_package, separators=(',', ':'))
await websocket.send(request)
# Wait for response to request n.
inner_n = 0
while True:
inner_n += 1
assert inner_n < 50 # Arbitrary.
try:
json_s = None
json_s = g.toUnicode(await websocket.recv())
d = json.loads(json_s)
except Exception:
if json_s is not None:
g.trace('json_s', json_s)
g.print_exception()
break
_show_response(n, d, trace, verbose)
# This loop invariant guarantees we receive messages in order.
is_async = "async" in d
action2, n2 = d.get("action"), d.get("id")
assert is_async or (action, n) == (action2, n2), (action, n, d)
if is_async:
n_async_responses += 1
else:
break
except websockets.exceptions.ConnectionClosedError as e:
print(f"{tag}: connection closed: {e}")
break
except websockets.exceptions.ConnectionClosed:
print(f"{tag}: connection closed normally")
break
except Exception as e:
print('')
print(f"{tag}: internal client error {e}")
print(f"{tag}: request_package: {request_package}")
g.print_exception()
print('')
print(f"Asynchronous responses: {n_async_responses}")
print(f"Unknown response times: {n_unknown_response_times}")
print(f" Known response times: {n_known_response_times}")
print(f" Average response_time: {(tot_response_time/n_known_response_times):3.2} sec.")
# About 0.1, regardless of tracing.
#@+node:ekr.20210205141432.1: ** function: main
def main():
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(client_main_loop(timeout))
except KeyboardInterrupt:
# This terminates the server abnormally.
print(f"{tag}: Keyboard interrupt")
#@-others
if __name__ == '__main__':
main()
#@-leo
|
import adv_test
import adv
def module():
return Xander
class Xander(adv.Adv):
comment = 'c2+fs & stella show + RR'
conf = {
"mod_a3": ('fs', 'passive', 0.50) ,
}
a = 1
if a==1:
conf["mod_wp"] = [('s','passive',0.25),
('crit','chance',0.06,'hp70') ]
conf["mod_wp2"] = [('fs','passive',0.40),
('crit','damage',0.13) ]
if a==2:
conf["mod_wp"] = [('s','passive',0.25),
('crit','chance',0.06,'hp70') ]
conf["mod_wp2"] = [('crit','chance',0.09,'hit15'),
('crit','damage',0.15) ]
if __name__ == '__main__':
conf = {}
conf['acl'] = """
`s1,fsc
`s2,fsc
`fs, seq=2 and cancel
"""
adv_test.test(module(), conf, verbose=0, mass=0)
exit()
module().conf['mod_wp'] = [('fs','passive',0.3),('s','passive',0.15)]
adv_test.test(module(), conf, verbose=0)
module().conf['mod_wp'] = [('s','passive',0.25)]
adv_test.test(module(), conf, verbose=0)
|
from tqdm import tqdm
import inspect
import torch
from kits19cnn.inference import remove_3D_connected_components, BasePredictor
class General3DPredictor(BasePredictor):
"""
Inference for a single model for every file generated by `test_loader`.
Predictions are saved in `out_dir`.
Predictions are done on the resized predictions. Post-processing only includes
removing small 3D connected components.
"""
def __init__(self, out_dir, model, test_loader,
pseudo_3D=True, pred_3D_params={"do_mirroring": True}):
"""
Attributes
out_dir (str): path to the output directory to store predictions
model (torch.nn.Module): class with the `predict_3D` method for
predicting a single patient volume.
test_loader: Iterable instance for generating data
(pref. torch DataLoader)
must have the __len__ arg.
pred_3D_params (dict): kwargs for `model.predict_3D`
pseudo_3D (bool): whether or not to have pseudo 3D inputs
"""
super().__init__(out_dir=out_dir, model=model, test_loader=test_loader)
assert inspect.ismethod(model.predict_3D), \
"model must have the method `predict_3D`"
if pseudo_3D:
assert inspect.ismethod(model.predict_3D_pseudo3D_2Dconv), \
"model must have the method `predict_3D_pseudo3D_2Dconv`"
self.pseudo_3D = pseudo_3D
self.pred_3D_params = pred_3D_params
def run_3D_predictions(self, min_size=5000):
"""
Runs predictions on the dataset (specified in test_loader)
"""
cases = self.test_loader.dataset.im_ids
assert len(cases) == len(self.test_loader)
for (test_batch, case) in tqdm(zip(self.test_loader, cases), total=len(cases)):
test_x = torch.squeeze(test_batch[0], dim=0)
if self.pseudo_3D:
pred, _, act, _ = self.model.predict_3D_pseudo3D_2Dconv(test_x,
**self.pred_3D_params)
else:
pred, _, act, _ = self.model.predict_3D(test_x,
**self.pred_3D_params)
assert len(pred.shape) == 3
assert len(act.shape) == 4
pred = remove_3D_connected_components(pred, min_size=min_size)
self.save_pred(pred, act, case)
|
import logging as log
# import DbCon
from common import ReadConfig as config
config = config.getConfig()
log.basicConfig(filename=config.get("logging","fileName"), level=log.DEBUG,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
log.debug("This is ")
def get_log():
return log
def info(message):
log.info(msg=message)
def debug(message):
log.debug(msg=message)
def exception(exp,msg=None):
if msg is not None:
exp="Message is: "+ msg+"\n Stacktrace is : "+str(exp)
log.error(exp, exc_info=True)
|
def ficha(jogador="<desconhecido>",gols=0):
print(f'O jogador {jogador} fez {gols} gol(s) no campeonato.')
n = str(input("nome do Jogador: "))
g = str(input("Número de Gols: "))
if g.isnumeric():
g = int(g)
else:
g = 0
if n.strip() == '':
ficha(gols=g)
else:
ficha(n,g)
|
from deepnlpf.notifications.email import Email
email = Email()
email.send()
|
# @Title: 不同路径 II (Unique Paths II)
# @Author: 18015528893
# @Date: 2021-02-23 22:15:54
# @Runtime: 48 ms
# @Memory: 15 MB
class Solution:
def uniquePathsWithObstacles(self, obstacleGrid: List[List[int]]) -> int:
memo = dict()
def dp(i, j):
if i < 0:
return 0
if j < 0:
return 0
if i == 0 and j == 0:
if obstacleGrid[0][0] == 1:
return 0
else:
return 1
if (i, j) in memo:
return memo[(i, j)]
res = 0
if obstacleGrid[i-1][j] == 0:
res += dp(i-1, j)
if obstacleGrid[i][j-1] == 0:
res += dp(i, j-1)
memo[(i, j)] = res
return res
if obstacleGrid[len(obstacleGrid)-1][len(obstacleGrid[0])-1] == 1:
return 0
else:
return dp(len(obstacleGrid)-1, len(obstacleGrid[0])-1)
|
import os
import json
import requests
import logging
import concurrent.futures
from script.models import Station, Train, TrainDetail, StopInfo, StationDetail, PassTrain
# 连接mongo
from mongoengine import connect
from script import config
connect(config.db_name)
# 1. Download & Parse station list from 12306
def get_station_list():
logging.info('获取车站信息开始...')
Station.drop_collection()
try:
url = 'https://kyfw.12306.cn/otn/resources/js/framework/station_name.js'
resp = requests.get(url)
resp_str = resp.text
data = resp_str[resp_str.find("'") + 1: resp_str.rfind("'")]
# print(data)
station_list = data.split('@')
for station in station_list:
if station.find('|') > -1:
details = station.split('|')
point = Station()
point.py_code = '@' + details[0]
point.name = details[1]
point.tel_code = details[2]
point.pinyin = details[3]
point.initial = details[4]
point.identity = details[5]
# save
point.save()
except Exception as exc:
print(exc)
logging.info('异常退出')
return
logging.info('获取到了 %s座 车站信息' % Station.objects().count())
# 2. Download & Parse train list from 12306
def get_train_list():
logging.info('获取车次信息开始...')
Train.drop_collection()
try:
url = 'https://kyfw.12306.cn/otn/resources/js/query/train_list.js'
resp = requests.get(url)
resp_str = resp.text
data = resp_str[resp_str.find('{') + 1: resp_str.rfind('}')]
json_data = json.loads('{' + data + '}')
for day, day_value in json_data.items():
print(day)
if len(day_value) > 0:
for category, trains in day_value.items():
print('category: ' + category)
for train in trains:
code = train['train_no']
detail_text = train['station_train_code']
name = detail_text[:detail_text.find('(')]
# start
start_station_name = detail_text[detail_text.find('(') + 1: detail_text.find('-')]
# 就保存个字符串吧 下面本来想保存referenced field的
# results = Station.objects(name=station_name)
# if results.count() > 1:
# print(station_name)
# elif results.count() == 1:
# tr.start = results[0]
# else:
# tr.start = Station(name=station_name)
# end
end_station_name = detail_text[detail_text.find('-') + 1: detail_text.find(')')]
# 就保存个字符串吧 下面本来想保存referenced field的
# results = Station.objects(name=station_name)
# if results.count() > 1:
# print(station_name)
# elif results.count() == 1:
# tr.end = results[0]
# else:
# tr.start = Station(name=station_name)
# check exist
results = Train.objects(category=category, code=code, name=name, start=start_station_name, end=end_station_name)
if results.count() >= 1:
print('%s 已经存在 跳过' % name)
else:
tr = Train()
tr.category = category
tr.code = code
tr.name = name
tr.start = start_station_name
tr.end = end_station_name
tr.save()
except Exception as exc:
print(exc)
logging.info('异常退出')
return
logging.info('获取到了 %s次 车次信息' % Train.objects().count())
# 3. Download all train detail
def get_train_detail_list():
# clear collection
TrainDetail.drop_collection()
with concurrent.futures.ThreadPoolExecutor() as executor:
for result in Train.objects():
if result.category == 'G':
executor.submit(get_train_detail, result.code, result.start, result.end, '2018-01-22')
# 3.1 Download one train detail
def get_train_detail(train_no, from_station, to_station, depart_date):
from_station_code = Station.objects(name=from_station).first().tel_code
to_station_telecode = Station.objects(name=to_station).first().tel_code
try:
# url = 'https://kyfw.12306.cn/otn/czxx/queryByTrainNo'
# params = {
# 'train_no': train_no,
# 'from_station_telecode': from_station_code,
# 'to_station_telecode': to_station_telecode,
# 'depart_date': depart_date,
# }
# resp = requests.get(url, params=params)
url = 'https://kyfw.12306.cn/otn/czxx/queryByTrainNo?train_no={}&from_station_telecode={}&to_station_telecode={}&depart_date={}'.format(
train_no, from_station_code, to_station_telecode, depart_date)
resp = requests.get(url)
real_data = resp.json().get('data').get('data')
if len(real_data) > 0:
train_detail = TrainDetail()
for item in real_data:
if 'start_station_name' in item.keys():
train_detail.station_train_code = item.get('station_train_code', '')
train_detail.start_station_name = item.get('start_station_name', '')
train_detail.end_station_name = item.get('end_station_name', '')
train_detail.service_type = item.get('service_type', '')
train_detail.train_class_name = item.get('train_class_name', '')
train_detail.save()
stop = StopInfo()
stop.isEnabled = item.get('isEnabled', True)
stop.station_no = int(item.get('station_no', '0'))
stop.station_name = item.get('station_name', '')
stop.arrive_time = item.get('arrive_time', '')
stopover_time_text = item.get('stopover_time', '----')
if '----' == stopover_time_text:
stop.stopover_time = 0
else:
stop.stopover_time = int(stopover_time_text.replace('分钟', ''))
stop.start_time = item.get('start_time', '')
train_detail.stop_info_list.append(stop)
train_detail.save()
logging.info('保存 %s:%s-%s(%s)' % (train_detail.station_train_code, train_detail.start_station_name, \
train_detail.end_station_name, len(train_detail.stop_info_list)))
except Exception as exc:
print(exc)
return
# 下面是 https://kyfw.12306.cn/otn/czxx/queryByTrainNo 的返回结果示例
'''
{
'status': True,
'validateMessages': {
},
'messages': [
],
'validateMessagesShowId': '_validatorMessage',
'httpstatus': 200,
'data': {
'data': [
{
'end_station_name': '延吉西',
'service_type': '2',
'train_class_name': '动车',
'start_time': '05: 47',
'isEnabled': True,
'station_name': '长春',
'start_station_name': '长春',
'station_no': '01',
'arrive_time': '----',
'station_train_code': 'C1001',
'stopover_time': '----'
},
{
'isEnabled': True,
'start_time': '06: 29',
'station_name': '吉林',
'station_no': '02',
'stopover_time': '2分钟',
'arrive_time': '06: 27'
},
{
'isEnabled': True,
'start_time': '07: 25',
'station_name': '敦化',
'station_no': '03',
'stopover_time': '2分钟',
'arrive_time': '07: 23'
},
{
'isEnabled': True,
'start_time': '08: 04',
'station_name': '延吉西',
'station_no': '04',
'stopover_time': '----',
'arrive_time': '08: 04'
}
]
}
}
'''
# 4. Merge station stop info
def merge_station_stop_info():
TrainDetail.drop_collection()
# 多线程会引起重复写入的问题
# with concurrent.futures.ThreadPoolExecutor() as executor:
# for train_detail in TrainDetail.objects():
# executor.submit(merge_train_stop_info, train_detail)
for train_detail in TrainDetail.objects():
merge_train_stop_info(train_detail)
def merge_train_stop_info(train_detail):
try:
for stop_info in train_detail.stop_info_list:
station_name = stop_info.station_name
if exist_station_detail(station_name):
# exist
station_detail = StationDetail.objects(station_name=station_name).first()
else:
# new
station_detail = StationDetail()
station_detail.station_name=station_name
pass_train = PassTrain()
# 车次信息
pass_train.station_train_code = train_detail.station_train_code
pass_train.start_station_name = train_detail.start_station_name
pass_train.end_station_name = train_detail.end_station_name
pass_train.service_type = train_detail.service_type
pass_train.train_class_name = train_detail.train_class_name
# 车次在本站的停站信息
pass_train.isEnabled = stop_info.isEnabled
pass_train.station_no = stop_info.station_no
pass_train.station_name = stop_info.station_name
pass_train.arrive_time = stop_info.arrive_time
pass_train.stopover_time = stop_info.stopover_time
pass_train.start_time = stop_info.start_time
logging.info('车站 %s (车次 %s)' % (pass_train.station_name, pass_train.station_train_code))
station_detail.pass_train_list.append(pass_train)
station_detail.save()
station_detail.pass_train_num = len(station_detail.pass_train_list)
station_detail.save()
except Exception as exc:
print(exc)
return
# 检查是否保存
def exist_station_detail(station_name):
count = StationDetail.objects(station_name=station_name).count()
if count > 1:
logging.info('有问题吧 %s' % station_name)
return True
elif count == 1:
return True
else:
return False
# 经过车次top 10 的高铁站 可以直达的站点数目
def top_10_gaotie_direct():
# 高铁站数目
logging.info('全国高铁站总数 %s' % StationDetail.objects.count())
# 经过车次top 10 的高铁站
results = StationDetail.objects.order_by('-pass_train_num')[:10]
top_ten_list = list()
for result in results:
top_ten_list.append(result.station_name)
logging.info('%s %s' % (result.station_name, result.pass_train_num))
# 可以直达的站点
for station_name in top_ten_list:
reach_station_list = get_direct_station(station_name)
logging.info('从 %s 可以直达 %s 座车站' % (station_name, len(reach_station_list)))
logging.info(reach_station_list)
def get_direct_station(station_name):
reach_station_list = list()
result = StationDetail.objects(station_name=station_name).first()
for pass_train in result.pass_train_list:
station_train_code = pass_train.station_train_code
train_detail = TrainDetail.objects(station_train_code=station_train_code).first()
for stop_info in train_detail.stop_info_list:
if not stop_info.station_name in reach_station_list:
reach_station_list.append(stop_info.station_name)
return reach_station_list
def reach_station_indirect():
start = '南京南'
reach_station_indirect_list = list()
# 可以直达的站点
reach_station_list = get_direct_station(start)
reach_station_indirect_list.extend(reach_station_list)
# 可以一次换乘到达的站点
for station in reach_station_list:
temp = get_direct_station(station)
reach_station_indirect_list = list(set(reach_station_indirect_list).union(set(temp)))
logging.info('从 %s 经过一次换乘可以到达 %s 座车站' % (start, len(reach_station_indirect_list)))
def find_transfer_plan():
start = '南京南'
end = '黄山北'
find_path(start, end)
# 查找两站间的路径
def find_path(start, end):
try:
result_from = StationDetail.objects(station_name=start).first()
for pass_train_from in result_from.pass_train_list:
result_to = StationDetail.objects(station_name=end).first()
for pass_train_to in result_to.pass_train_list:
find_common_station(pass_train_from, pass_train_to)
except Exception as exc:
logging.info(exc)
return list()
# 查看两列火车有没有交点
def find_common_station(pass_train_from, pass_train_to):
train_from = TrainDetail.objects(station_train_code=pass_train_from.station_train_code).first()
stop_list_from = train_from.stop_list()
train_to = TrainDetail.objects(station_train_code=pass_train_to.station_train_code).first()
stop_list_to = train_to.stop_list()
transfer_list = list((set(stop_list_from).union(set(stop_list_to))) ^ (set(stop_list_from) ^ set(stop_list_to)))
if len(transfer_list) > 0:
for transfer in transfer_list:
# logging.info(transfer)
if get_arrive_time(train_from, transfer) < get_start_time(train_to, transfer):
logging.info('%s 在(%s)换乘 %s' % (train_from.str(), transfer, train_to.str()))
def get_arrive_time(train, station):
for stop in train.stop_info_list:
if stop.station_name == station:
return stop.arrive_time
def get_start_time(train, station):
for stop in train.stop_info_list:
if stop.station_name == station:
return stop.start_time
def main():
# 1. Download & Parse station list from 12306
get_station_list()
# 2. Download & Parse train list from 12306
get_train_list()
# 3. Download all train detail with url
get_train_detail_list()
# 4. Merge station stop info
merge_station_stop_info()
# 经过车次最多的高铁站 能直达的站点
top_10_gaotie_direct()
# 可间接到达的站点
reach_station_indirect()
# 中转换乘方案
find_transfer_plan()
if __name__ == '__main__':
# file
log_file_name = os.path.join('logs', 'Parse12306.log')
logging.basicConfig(level=logging.DEBUG, format='%(message)s', filename=log_file_name, filemode='w')
# console
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(message)s')
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
main()
|
import getpass
import subprocess
import threading
from optimization.pyroServerManagement import PyroServerManagement
from utils_intern.messageLogger import MessageLogger
"""
Created by Gustavo Aragón on 14.03.2018
"""
import configparser
import os
import signal
import sys
import shutil
# import swagger_server.__main__ as webserver
import time
import swagger_server.wsgi as webserver
from IO.ZMQClient import ForwarderDevice
from config.configUpdater import ConfigUpdater
"""
Get the address of the data.dat
"""
def startOfw(options):
# code to start a daemon
init = 0
def parseArgs():
mandatoryArgs = 0
def main():
global OPTIONS
logger, redisDB = setup()
logger.debug("###################################")
logger.info("OFW started")
logger.debug("###################################")
logger.debug("Starting name server and dispatch server")
#threading.Thread(target=PyroServerManagement.start_name_servers, args=(redisDB,)).start()
#threading.Thread(target=PyroServerManagement.start_pryo_mip_servers, args=(redisDB, 5,)).start()
logger.info("Starting webserver")
webserver.main()
# while True:
# results=opt.start()
# print(results)
# time.sleep(5)
def signal_handler(sig, frame):
print('You pressed Ctrl+C!')
if zmqForwarder:
print("stopping zmq forwarder")
zmqForwarder.Stop()
sys.exit(0)
zmqForwarder = None
# Register the signal to the handler
signal.signal(signal.SIGTERM, signal_handler)
def setup():
signal.signal(signal.SIGINT, signal_handler)
config_path = "/usr/src/app/optimization/resources/ConfigFile.properties"
config_path_default = "/usr/src/app/config/ConfigFile.properties"
ConfigUpdater.copy_config(config_path_default, config_path)
# Creating an object of the configuration file (standard values)
config = configparser.RawConfigParser()
config.read(config_path)
log_level = config.get("IO", "log.level", fallback="DEBUG")
logger = MessageLogger.set_and_get_logger_parent(id="", level=log_level)
redisDB = clear_redis(logger)
copy_models()
copy_env_varibles()
#logger.debug("env = "+str(os.environ))
zmqHost = config.get("IO", "zmq.host")
pubPort = config.get("IO", "zmq.pub.port")
subPort = config.get("IO", "zmq.sub.port")
zmqForwarder = ForwarderDevice(zmqHost, pubPort, subPort)
zmqForwarder.start()
return logger, redisDB
def copy_models():
models_path = "/usr/src/app/optimization/resources/models"
if os.path.exists(models_path):
for file in os.listdir(models_path):
file_path = os.path.join(models_path, file)
if os.path.isfile(file_path) and ".py" in file:
shutil.copyfile(file_path, os.path.join("/usr/src/app/optimization/models", file))
def clear_redis(logger):
logger.info("reset redis")
from IO.redisDB import RedisDB
redisDB = RedisDB()
redisDB.reset()
redisDB.set("time", time.time())
return redisDB
def copy_env_varibles():
with open("/usr/src/app/utils_intern/env_var.txt", "r") as f:
rows = f.readlines()
for row in rows:
if len(row) > 0:
row = row.replace("\n","")
s = row.split("=")
if len(s) == 1:
s.append("")
os.environ[s[0]] = str(s[1])
if __name__ == "__main__":
# execute only if run as a script
main()
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import torch
import torch.nn as nn
import torch.nn.functional as F
class RNNEncoder(nn.Module):
"""RNN encoder."""
def __init__(
self, cell_type='lstm', feat_dim=3, hidden_size=128, num_layers=1,
dropout_fc=0.1, dropout_rnn=0.0, bidirectional=False,**kwargs):
super(RNNEncoder, self).__init__()
self.cell_type = cell_type.lower()
self.feat_dim = feat_dim
self.num_layers = num_layers
self.dropout_fc = dropout_fc
self.dropout_rnn = dropout_rnn
self.bidirectional = bidirectional
self.hidden_size = hidden_size
input_size = feat_dim
if self.cell_type == 'lstm':
self.rnn = LSTM(input_size=input_size, hidden_size=hidden_size,
num_layers=num_layers, bidirectional=bidirectional,
dropout=self.dropout_rnn)
elif self.cell_type == 'gru':
self.rnn = GRU(input_size=input_size, hidden_size=hidden_size,
num_layers=num_layers, bidirectional=bidirectional,
dropout=self.dropout_rnn)
else:
self.rnn = RNN(input_size=input_size, hidden_size=hidden_size,
num_layers=num_layers, bidirectional=bidirectional,
dropout=self.dropout_rnn)
self.output_units = hidden_size
if bidirectional:
self.output_units *= 2
def forward(self, src_seq):
x = src_seq
# B x T x C -> T x B x C
x = x.transpose(0, 1)
encoder_out, h_t = self.rnn(x)
if self.cell_type == 'lstm':
final_hiddens, final_cells = h_t
else:
final_hiddens, final_cells = h_t, None
if self.dropout_fc>0:
encoder_out = F.dropout(encoder_out, p=self.dropout_fc, training=self.training)
if self.bidirectional:
batch_size = src_seq.size(0)
def combine_bidir(outs):
out = outs.view(self.num_layers, 2, batch_size, -1).transpose(1, 2).contiguous()
return out.view(self.num_layers, batch_size, -1)
final_hiddens = combine_bidir(final_hiddens)
if self.cell_type == 'lstm':
final_cells = combine_bidir(final_cells)
# T x B x C -> B x T x C
encoder_out = encoder_out.transpose(0, 1)
final_hiddens = final_hiddens.transpose(0, 1)
if self.cell_type == 'lstm':
final_cells = final_cells.transpose(0, 1)
return encoder_out, (final_hiddens, final_cells)
class AttentionLayer(nn.Module):
def __init__(self, input_embed_dim, source_embed_dim, output_embed_dim, bias=False):
super(AttentionLayer, self).__init__()
self.input_proj = Linear(input_embed_dim, source_embed_dim, bias=bias)
self.output_proj = Linear(input_embed_dim + source_embed_dim, output_embed_dim, bias=bias)
def forward(self, input, source_hids, encoder_padding_mask=None):
# input: bsz x input_embed_dim
# source_hids: srclen x bsz x output_embed_dim
# x: bsz x output_embed_dim
x = self.input_proj(input)
# compute attention
attn_scores = (source_hids * x.unsqueeze(0)).sum(dim=2)
# don't attend over padding
if encoder_padding_mask is not None:
attn_scores = attn_scores.float().masked_fill_(
encoder_padding_mask,
float('-inf')
).type_as(attn_scores) # FP16 support: cast to float and back
attn_scores = F.softmax(attn_scores, dim=0) # srclen x bsz
# sum weighted sources
x = (attn_scores.unsqueeze(2) * source_hids).sum(dim=0)
out = torch.cat((x, input), dim=1)
x = F.tanh(self.output_proj(out))
return x, attn_scores
class RNNDecoder(nn.Module):
"""RNN decoder."""
def __init__(
self, cell_type='lstm', feat_dim=3, hidden_size=128, num_layers=1,
dropout_fc=0.1, dropout_rnn=0.0, encoder_output_units=128, max_seq_len=10,
attention=True, traj_attn_intent_dim=0,**kwargs):
super(RNNDecoder, self).__init__()
self.cell_type = cell_type.lower()
self.dropout_fc = dropout_fc
self.dropout_rnn = dropout_rnn
self.hidden_size = hidden_size
self.encoder_output_units = encoder_output_units
self.num_layers = num_layers
self.max_seq_len = max_seq_len
self.feat_dim = feat_dim
self.traj_attn_intent_dim =traj_attn_intent_dim
input_size = feat_dim
if encoder_output_units != hidden_size:
self.encoder_hidden_proj = Linear(encoder_output_units, hidden_size)
if self.cell_type == 'lstm':
self.encoder_cell_proj = Linear(encoder_output_units, hidden_size)
else:
self.encoder_cell_proj = None
else:
self.encoder_hidden_proj = self.encoder_cell_proj = None
if self.cell_type == 'lstm':
self.cell = LSTM
elif self.cell_type == 'gru':
self.cell = GRU
else:
self.cell = RNN
self.rnn = self.cell(input_size=input_size,
hidden_size=hidden_size, bidirectional=False,
dropout=self.dropout_rnn,
num_layers=num_layers)
if attention:
self.attention = AttentionLayer(hidden_size, encoder_output_units, hidden_size, bias=False)
else:
self.attention = None
self.output_projection = Linear(hidden_size, feat_dim)
if traj_attn_intent_dim>0:
self.traj_attn_fc = Linear(hidden_size, traj_attn_intent_dim)
def forward(self, encoder_out_list, start_decode=None, encoder_mask=None):
x = start_decode.unsqueeze(1)
bsz = x.size(0)
# get outputs from encoder
encoder_outs, (encoder_hiddens, encoder_cells) = encoder_out_list
# B x T x C -> T x B x C
encoder_outs = encoder_outs.transpose(0, 1)
encoder_hiddens = encoder_hiddens.transpose(0, 1)
if encoder_mask is not None:
encoder_mask = encoder_mask.transpose(0, 1)
prev_hiddens = [encoder_hiddens[i] for i in range(self.num_layers)]
if self.cell_type == 'lstm':
encoder_cells = encoder_cells.transpose(0, 1)
prev_cells = [encoder_cells[i] for i in range(self.num_layers)]
x = x.transpose(0, 1)
srclen = encoder_outs.size(0)
# initialize previous states
if self.encoder_hidden_proj is not None:
prev_hiddens = [self.encoder_hidden_proj(x) for x in prev_hiddens]
prev_hiddens = torch.stack(prev_hiddens, dim=0)
if self.encoder_cell_proj is not None:
prev_cells = [self.encoder_cell_proj(x) for x in prev_cells]
if self.cell_type == 'lstm':
prev_cells = torch.stack(prev_cells, dim=0)
attn_scores = x.new_zeros(srclen, self.max_seq_len, bsz)
inp = x
outs = []
hidden_outs=[]
for j in range(self.max_seq_len):
if self.cell_type == 'lstm':
output, (prev_hiddens, prev_cells) = self.rnn(inp, (prev_hiddens, prev_cells))
else:
output, prev_hiddens = self.rnn(inp, prev_hiddens)
output = output.view(bsz, -1)
# apply attention using the last layer's hidden state
if self.attention is not None:
out, attn_scores[:, j, :] = self.attention(output, encoder_outs, encoder_mask)
else:
out = output
if self.dropout_fc>0:
out = F.dropout(out, p=self.dropout_fc, training=self.training)
hid_out = out
if self.traj_attn_intent_dim > 0:
hid_out= self.traj_attn_fc(hid_out)
hid_out = F.selu(hid_out)
hidden_outs.append(hid_out)
out = self.output_projection(out)
# save final output
outs.append(out)
inp = out.unsqueeze(0)
# collect outputs across time steps
x = torch.cat(outs, dim=0).view(self.max_seq_len, bsz, self.feat_dim)
hidden_outs = torch.cat(hidden_outs, dim=0).view(self.max_seq_len, bsz, -1)
# T x B x C -> B x T x C
x = x.transpose(1, 0)
hidden_outs=hidden_outs.transpose(1, 0)
# srclen x tgtlen x bsz -> bsz x tgtlen x srclen
attn_scores = attn_scores.transpose(0, 2)
# project back to input space
return x, hidden_outs
def LSTM(input_size, hidden_size, **kwargs):
m = nn.LSTM(input_size, hidden_size, **kwargs)
for name, param in m.named_parameters():
if 'weight' in name or 'bias' in name:
param.data.uniform_(-0.1, 0.1)
return m
def GRU(input_size, hidden_size, **kwargs):
m = nn.GRU(input_size, hidden_size, **kwargs)
for name, param in m.named_parameters():
if 'weight' in name or 'bias' in name:
param.data.uniform_(-0.1, 0.1)
return m
def RNN(input_size, hidden_size, **kwargs):
m = nn.RNN(input_size, hidden_size, **kwargs)
for name, param in m.named_parameters():
if 'weight' in name or 'bias' in name:
param.data.uniform_(-0.1, 0.1)
return m
def Linear(in_features, out_features, bias=True):
"""Linear layer (input: N x T x C)"""
m = nn.Linear(in_features, out_features, bias=bias)
m.weight.data.uniform_(-0.1, 0.1)
if bias:
m.bias.data.uniform_(-0.1, 0.1)
return m
|
from heart_rate_monitoring import heart_rate_insta
def test_instantaneous_HR_indicies():
import numpy as np
Fs = 80
sample = 8000
x = np.arange(sample)
y = np.array(np.sin(2 * np.pi * x / Fs))
heart_rate_length = heart_rate_insta(y)
assert heart_rate_length == 100
|
from __future__ import annotations
from typing import List, Optional, Any, Tuple, Dict, Union, cast, Set, TYPE_CHECKING
from PIL.Image import Image
from lxml.etree import ElementBase as Element
from inspect import signature
from deprecated import deprecated
from ocrd import Workspace, OcrdFile, OcrdExif
from ocrd_utils import MIMETYPE_PAGE, getLogger, pushd_popd, VERSION as OCRD_VERSION
from ocrd_models.ocrd_page import PcGtsType, PageType, MetadataType
from ocrd_models.constants import NAMESPACES
if TYPE_CHECKING:
from ocrd_browser.model import Document
IMAGE_FROM_PAGE_FILENAME_SUPPORT = 'filename' in signature(Workspace.image_from_page).parameters
class Page:
def __init__(self, document: Document, id_: str, file_group: str):
self.document = document
self._id = id_
self.file_group = file_group
self._pc_gts: Optional[PcGtsType] = None
self._images: Optional[List[Image]] = None
self._image_files: Optional[List[OcrdFile]] = None
self._page_file: Optional[OcrdFile] = None
@property
def images(self) -> List[Image]:
if self._images is None:
self._images = [self.document.resolve_image(f) for f in self.image_files]
return self._images
@property
def image_files(self) -> List[OcrdFile]:
if self._image_files is None:
self._image_files = self.get_files("//image/.*")
return self._image_files
def get_files(self, mimetype: str = MIMETYPE_PAGE) -> List[OcrdFile]:
return self.document.files_for_page_id(self.id, self.file_group, mimetype=mimetype)
@property
def page_file(self) -> OcrdFile:
if self._page_file is None:
page_files = self.get_files(MIMETYPE_PAGE)
if page_files:
self._page_file = page_files[0]
return self._page_file
@property # type: ignore[misc]
@deprecated(reason="Makes no sense anymore, what is **the** file of a page? Use get_files() or page_file / image_files instead")
def file(self) -> Optional[OcrdFile]:
"""
TODO: Makes no sense anymore, what is **the** file of a page?
The whole Page class needs to be split in "PageProxy" and PageXmlPage maybe
@return: Optional[OcrdFile]
"""
if self.page_file:
return self.page_file
elif self.image_files:
return next(iter(self.image_files))
else:
# noinspection PyTypeChecker
any_files = self.get_files(mimetype=None)
if any_files:
return next(iter(any_files))
return None
@property
def pc_gts(self) -> PcGtsType:
if self._pc_gts is None:
if self.page_file:
self._pc_gts = self.document.page_for_file(self.page_file)
else:
image_files = self.image_files
if len(image_files) > 0:
self._pc_gts = self.document.page_for_file(image_files[0])
return self._pc_gts
def get_image(self, feature_selector: Union[str, Set[str]] = '', feature_filter: Union[str, Set[str]] = '', filename: str = '') -> Tuple[Image, Dict[str, Any], OcrdExif]:
log = getLogger('ocrd_browser.model.page.Page.get_image')
ws: Workspace = self.document.workspace
kwargs = {
'transparency': True,
'feature_selector': feature_selector if isinstance(feature_selector, str) else ','.join(sorted(feature_selector)),
'feature_filter': feature_filter if isinstance(feature_filter, str) else ','.join(sorted(feature_filter))
}
if filename:
if IMAGE_FROM_PAGE_FILENAME_SUPPORT:
kwargs['filename'] = filename
else:
raise RuntimeError('Parameter filename not supported in ocrd version {}, at least 2.33.0 needed'.format(OCRD_VERSION))
try:
with pushd_popd(ws.directory):
page_image, page_coords, page_image_info = ws.image_from_page(self.page, self.id, **kwargs)
except Exception as e:
log.exception(e)
page_image, page_coords, page_image_info = None, None, None
return page_image, page_coords, page_image_info
@property
def id(self) -> str:
return self._id
@property
def page(self) -> PageType:
return self.pc_gts.get_Page()
@property
def meta(self) -> MetadataType:
return self.pc_gts.get_Metadata()
def xpath(self, xpath: str) -> List[Element]:
page_namespace = {'page': ns for ns in self.xml_root.nsmap.values() if ns.startswith('http://schema.primaresearch.org/PAGE/gts/pagecontent/')}
return cast(List[Element], self.xml_root.xpath(xpath, namespaces=dict(NAMESPACES, **page_namespace)))
@property
def xml_root(self) -> Element:
if self.pc_gts.gds_elementtree_node_ is None:
from ocrd_models.ocrd_page_generateds import parsexmlstring_
from io import StringIO
sio = StringIO()
self.pc_gts.export(
outfile=sio,
level=0,
name_='PcGts',
namespaceprefix_='pc:',
namespacedef_='xmlns:pc="%s" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="%s %s/pagecontent.xsd"' % (
NAMESPACES['page'],
NAMESPACES['page'],
NAMESPACES['page']
))
self.pc_gts.gds_elementtree_node_ = parsexmlstring_(sio.getvalue()) # pylint: disable=undefined-variable
return self.pc_gts.gds_elementtree_node_
|
# ################################################ MODULE INFO ###################################################
# Author: Jamie Caesar
# Email: jcaesar@presidio.com
#
# The module contains all the classes and functions necessary to represent a SecureCRT session (or simulate one)
#
#
#
# ################################################ IMPORTS ###################################################
import os
import sys
import logging
import time
import datetime
import re
from abc import ABCMeta, abstractmethod
# ############################################# MESSAGEBOX CONSTANTS ################################################
#
# These are used for MessageBox creation.
#
# Button parameter options. These can be OR'd ( | ) together to combine one from each category, and the final value
# passed in as the option to the MessageBox.
#
# Options to display icons
ICON_STOP = 16 # display the ERROR/STOP icon.
ICON_QUESTION = 32 # display the '?' icon
ICON_WARN = 48 # display a '!' icon.
ICON_INFO= 64 # displays "info" icon.
#
# Options to choose what types of buttons are available
BUTTON_OK = 0 # OK button only
BUTTON_CANCEL = 1 # OK and Cancel buttons
BUTTON_ABORTRETRYIGNORE = 2 # Abort, Retry, and Ignore buttons
BUTTON_YESNOCANCEL = 3 # Yes, No, and Cancel buttons
BUTTON_YESNO = 4 # Yes and No buttons
BUTTON_RETRYCANCEL = 5 # Retry and Cancel buttons
#
# Options for which button is default
DEFBUTTON1 = 0 # First button is default
DEFBUTTON2 = 256 # Second button is default
DEFBUTTON3 = 512 # Third button is default
#
#
# Possible MessageBox() return values
IDOK = 1 # OK button clicked
IDCANCEL = 2 # Cancel button clicked
IDABORT = 3 # Abort button clicked
IDRETRY = 4 # Retry button clicked
IDIGNORE = 5 # Ignore button clicked
IDYES = 6 # Yes button clicked
IDNO = 7 # No button clicked
# ################################################ CLASSES ###################################################
class ConnectError(Exception):
def __init__(self, message):
super(ConnectError, self).__init__(message)
class DeviceInteractionError(Exception):
def __init__(self, message):
super(DeviceInteractionError, self).__init__(message)
class OSDetectError(Exception):
def __init__(self, message):
super(OSDetectError, self).__init__(message)
class Session:
__metaclass__ = ABCMeta
def __init__(self, script_path, settings_importer):
self.script_dir, self.script_name = os.path.split(script_path)
self.settings_importer = settings_importer
self.os = None
self.prompt = None
self.hostname = None
self.logger = logging
self.settings = settings_importer.get_settings_dict()
if self.settings['debug']:
save_path = os.path.realpath(self.settings['save path'])
self.debug_dir = os.path.join(save_path, "debugs")
log_file = os.path.join(self.debug_dir, self.script_name.replace(".py", "-debug.txt"))
self.validate_path(log_file)
self.logger = logging.getLogger("securecrt")
self.logger.propagate = False
self.logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(message)s', datefmt='%m/%d/%Y %I:%M:%S%pOK')
fh = logging.FileHandler(log_file, mode='w')
fh.setFormatter(formatter)
self.logger.addHandler(fh)
self.logger.debug("<INIT> Starting Logging. Running Python version: {0}".format(sys.version))
def validate_path(self, path):
"""
Verify the directory to supplied file exists. Create it if necessary (unless otherwise specified).
:param path: The path to validate
:return:
"""
self.logger.debug("<VALIDATE_PATH> Starting validation of path: {0}".format(path))
# Get the directory portion of the path
base_dir = os.path.dirname(path)
self.logger.debug("<VALIDATE_PATH> Base directory is {0}".format(base_dir))
# Verify that base_path is valid absolute path, or else error and exit.
if not os.path.isabs(base_dir):
error_str = 'Directory is invalid. Please correct\n' \
'the path in the script settings.\n' \
'Dir: {0}'.format(base_dir)
self.message_box(error_str, "Path Error", ICON_STOP)
self.end()
sys.exit()
# Check if directory exists. If not, prompt to create it.
if not os.path.exists(os.path.normpath(base_dir)):
message_str = "The path: '{0}' does not exist. Do you want to create it?.".format(base_dir)
result = self.message_box(message_str, "Create Directory?", ICON_QUESTION | BUTTON_YESNO | DEFBUTTON2)
if result == IDYES:
os.makedirs(base_dir)
else:
self.message_box("Output directory does not exist. Exiting.", "Invalid Path", ICON_STOP)
self.end()
sys.exit()
def create_output_filename(self, desc, ext=".txt", include_date=True):
"""
Generates a filename based on information from the connected device
:param desc: <str> Customer description to put in filename.
:param ext: Default extension is ".txt", but other extension can be supplied.
:param include_date: A boolean to specify whether the date string shoudl be included in the filename.
:return:
"""
self.logger.debug("<CREATE_FILENAME> Starting creation of filename with desc: {0}, ext: {1}, include_date: {2}"
.format(desc, ext, include_date))
self.logger.debug("<CREATE_FILENAME> Original Save Path: {0}".format(self.settings['save path']))
if not os.path.isabs(self.settings['save path']):
save_path = os.path.join(self.script_dir, self.settings['save path'])
save_path = os.path.realpath(save_path)
else:
save_path = os.path.realpath(self.settings['save path'])
self.logger.debug("<CREATE_FILENAME> Real Save Path: {0}".format(save_path))
# If environment vars were used, expand them
save_path = os.path.expandvars(save_path)
# If a relative path was specified in the settings file, expand it.
save_path = os.path.expandvars(os.path.expanduser(save_path))
self.logger.debug("<CREATE_FILENAME> Expanded Save Path: {0}".format(save_path))
# Remove reserved filename characters from filename
clean_desc = desc.replace("/", "-")
clean_desc = clean_desc.replace(".", "-")
clean_desc = clean_desc.replace(":", "-")
clean_desc = clean_desc.replace("\\", "")
clean_desc = clean_desc.replace("| ", "")
# Just in case the trailing space from the above replacement was missing.
clean_desc = clean_desc.replace("|", "")
if include_date:
# Get the current date in the format supplied in date_format
now = datetime.datetime.now()
my_date = now.strftime(self.settings['date format'])
self.logger.debug("<CREATE_FILENAME> Created Date String: {0}".format(my_date))
file_bits = [self.hostname, clean_desc, my_date]
else:
file_bits = [self.hostname, desc]
self.logger.debug("<CREATE_FILENAME> Using {0} to create filename".format(file_bits))
# Create Filename based on hostname and date format string.
filename = '-'.join(file_bits)
filename = filename + ext
file_path = os.path.normpath(os.path.join(save_path, filename))
self.logger.debug("<CREATE_FILENAME> Final Filename: {0}".format(file_path))
return file_path
@abstractmethod
def connect(self, host, username, password=None):
pass
@abstractmethod
def disconnect(self):
pass
@abstractmethod
def is_connected(self):
pass
@abstractmethod
def end(self):
pass
@abstractmethod
def message_box(self, message, title="", options=0):
pass
@abstractmethod
def prompt_window(self, message, title="", hide_input=False):
pass
@abstractmethod
def file_open_dialog(self, title, open_type, file_filter):
pass
@abstractmethod
def get_command_output(self, command):
pass
@abstractmethod
def write_output_to_file(self, command, filename):
pass
@abstractmethod
def create_new_saved_session(self, session_name, ip, protocol="SSH2", folder="_imports"):
pass
@abstractmethod
def send_config_commands(self, command_list, output_filename=None):
pass
@abstractmethod
def save(self):
pass
class CRTSession(Session):
def __init__(self, crt, settings_importer):
self.crt = crt
super(CRTSession, self).__init__(crt.ScriptFullName, settings_importer)
self.logger.debug("<INIT> Starting creation of CRTSession object")
# Set up SecureCRT tab for interaction with the scripts
self.tab = self.crt.GetScriptTab().Screen
if not self.is_connected():
self.logger.debug("<INIT> Session not connected prior to creating object. Skipping device setup.")
else:
self.logger.debug("<INIT> Session already connected. Setting up device session.")
self.__start()
def __start(self):
"""
Performs initial setup of the session by detecting the prompt, hostname and network OS of the connected device.
:return: None
"""
# Set Tab parameters to allow correct sending/receiving of data via SecureCRT
self.tab.Synchronous = True
self.tab.IgnoreEscape = True
self.logger.debug("<START> Set Syncronous and IgnoreEscape")
# Get prompt (and thus hostname) from device
self.prompt = self.__get_prompt()
self.logger.debug("<START> Set Prompt: {0}".format(self.prompt))
self.hostname = self.prompt[:-1]
self.logger.debug("<START> Set Hostname: {0}".format(self.hostname))
# Detect the OS of the device, because outputs will differ per OS
self.os = self.__get_network_os()
self.logger.debug("<START> Discovered OS: {0}".format(self.os))
# Get terminal length and width, so we can revert back after changing them.
self.term_len, self.term_width = self.__get_term_info()
self.logger.debug("<START> Discovered Term Len: {0}, Term Width: {1}".format(self.term_len, self.term_width))
# If modify_term setting is True, then prevent "--More--" prompt (length) and wrapping of lines (width)
if self.settings['modify term']:
self.logger.debug("<START> Modify Term setting is set. Sending commands to adjust terminal")
if self.os == "IOS" or self.os == "NXOS":
# Send term length command and wait for prompt to return
if self.term_len:
self.tab.Send('term length 0\n')
self.tab.WaitForString(self.prompt)
elif self.os == "ASA":
if self.term_len:
self.tab.Send('terminal pager 0\r\n')
self.tab.WaitForString(self.prompt)
# Send term width command and wait for prompt to return (depending on platform)
if self.os == "IOS":
if self.term_len:
self.tab.Send('term width 0\n')
self.tab.WaitForString(self.prompt)
elif self.os == "NXOS":
if self.term_len:
self.tab.Send('term width 511\n')
self.tab.WaitForString(self.prompt)
# Added due to Nexus echoing twice if system hangs and hasn't printed the prompt yet.
# Seems like maybe the previous WaitFor prompt isn't always working correctly. Something to look into.
time.sleep(0.1)
def __get_prompt(self):
"""
Returns the prompt of the device logged into.
"""
self.logger.debug("<GET PROMPT> Attempting to discover device prompt.")
# Send two line feeds to the device so the device will re-display the prompt line
self.tab.Send("\r\n\r\n")
# Waits for first linefeed to be echoed back to us
wait_result = self.tab.WaitForString("\n", 5)
if wait_result == 1:
# Capture the text until we receive the next line feed
prompt = self.tab.ReadString("\n", 5)
self.logger.debug("<GET PROMPT> Prompt Discovered:'{}'".format(repr(prompt)))
# Remove any trailing control characters from what we captured
prompt = prompt.strip()
self.logger.debug("<GET PROMPT> Cleaned Prompt:'{}'".format(prompt))
# Check for non-enable mode (prompt ends with ">" instead of "#")
if prompt[-1] == ">":
self.end()
raise DeviceInteractionError("Not in enable mode. Cannot continue.")
# If our prompt shows in a config mode -- there is a ) before # -- e.g. Router(config)#
if prompt[-2] == ")":
self.end()
raise DeviceInteractionError("Device already in config mode.")
elif prompt[-1] != "#":
self.end()
raise DeviceInteractionError("Unable to capture prompt.")
else:
return prompt
else:
# If WaitForString timed out, return None to signal failure
return None
def __get_network_os(self):
"""
Discovers OS type so that scripts can use them when necessary (e.g. commands vary by version)
"""
send_cmd = "show version | i Cisco"
raw_version = self.__get_output(send_cmd)
self.logger.debug("<GET OS> Version String: {0}".format(raw_version))
if "IOS XE" in raw_version:
version = "IOS"
elif "Cisco IOS Software" in raw_version or "Cisco Internetwork Operating System" in raw_version:
version = "IOS"
elif "Cisco Nexus Operating System" in raw_version:
version = "NXOS"
elif "Adaptive Security Appliance" in raw_version:
version = "ASA"
else:
self.logger.debug("<GET OS> Error detecting OS. Raising Exception.")
raise OSDetectError("Unknown or Unsupported device OS.")
return version
def __get_term_info(self):
"""
Returns the current terminal length and width, by capturing the output from the relevant commands.
:return: A 2-tuple containing the terminal length and the terminal width
"""
re_num_exp = r'\d+'
re_num = re.compile(re_num_exp)
if self.os == "IOS" or self.os == "NXOS":
result = self.__get_output("show terminal | i Length")
term_info = result.split(',')
re_length = re_num.search(term_info[0])
if re_length:
length = re_length.group(0)
else:
length = None
re_width = re_num.search(term_info[1])
if re_width:
width = re_width.group(0)
else:
width = None
return length, width
elif self.os == "ASA":
pager = self.__get_output("show pager")
re_length = re_num.search(pager)
if re_length:
length = re_length.group(0)
else:
length = None
term_info = self.__get_output("show terminal")
re_width = re_num.search(term_info[1])
if re_width:
width = re_width.group(0)
else:
width = None
return length, width
else:
return None, None
def __get_output(self, command):
"""
A function that issues a command to the current session and returns the output as a string variable.
*** NOTE *** This is private because it should only be used when it is guaranteeds that the output will be
small (less than 1000 lines), or else SecureCRT can crash.
:param command: Command string that should be sent to the device
:return result: Variable holding the result of issuing the above command.
"""
# Send command
self.tab.Send(command.strip() + '\n')
# Ignore the echo of the command we typed
self.tab.WaitForString(command)
# Capture the output until we get our prompt back and write it to the file
result = self.tab.ReadString(self.prompt)
return result.strip('\r\n')
def connect(self, host, username, password=None):
#TODO: Handle manual telnet login process
self.logger.debug("<CONNECT> Attempting Connection to: {}@{}".format(username, host))
if not password:
password = self.prompt_window("Enter the password for {}@{}.".format(username, host), "Password",
hide_input=True)
ssh2_string = "/SSH2 /ACCEPTHOSTKEYS /L {} /PASSWORD {} {}".format(username, password, host)
if not self.is_connected():
try:
self.logger.debug("<CONNECT> Sending '/SSH2 /ACCEPTHOSTKEYS /L {} /PASSWORD <removed> {}' to SecureCRT."
.format(username, host))
self.crt.Session.Connect(ssh2_string)
except:
error = self.crt.GetLastErrorMessage()
self.logger.debug("<CONNECT> Error connecting SSH2 to {}: {}".format(host, error))
try:
ssh1_string = "/SSH1 /ACCEPTHOSTKEYS /L {} /PASSWORD {} {}".format(username, password, host)
self.logger.debug("<CONNECT> Sending '/SSH1 /ACCEPTHOSTKEYS /L {} /PASSWORD <removed> {}' to SecureCRT."
.format(username, host))
self.crt.Session.Connect(ssh1_string)
except:
error = self.crt.GetLastErrorMessage()
self.logger.debug("<CONNECT> Error connecting SSH1 to {}: {}".format(host, error))
raise ConnectError(error)
# Once connected, we want to make sure the banner message has finished printing before trying to do
# anything else. We'll do this by sending a small string (followed by backspaces to erase it), which
# will be printed after the final CLI prompt prints. We will wait until we see the end of the prompt
# (# or >) followed by our unique string
# Assume test string is sent before banner and prompt finishing printing. We should wait for our
# string to be echoed after a # or > symbol. Timout if it takes too long (timeout_seconds).
test_string = "!\b"
timeout_seconds = 2
self.tab.Send(test_string)
result = self.tab.WaitForStrings(["# {}".format(test_string),
"#{}".format(test_string),
">{}".format(test_string)], timeout_seconds)
self.logger.debug("<CONNECT> Prompt result = {}".format(result))
# If the above check timed out, either everything printed before we sent our string, or it hasn't been
# long enough. A few times a second, send our string (and backspace) until we finally capture it before
# proceeding.
while result == 0:
test_string = "!\b"
timeout_seconds = .2
self.tab.Send(test_string)
result = self.tab.WaitForString(test_string, timeout_seconds)
self.logger.debug("<CONNECT> Prompt result = {}".format(result))
# Continue with setting up the session with the device
self.__start()
else:
self.logger.debug("<CONNECT> Session already connected. Please disconnect before trying again.")
raise ConnectError("SecureCRT is already connected.")
def disconnect(self):
self.logger.debug("<DISCONNECT> Sending 'exit' command.")
self.tab.Send("exit\n")
self.tab.WaitForString("exit")
time.sleep(0.25)
attempts = 0
while self.is_connected() and attempts < 10:
self.logger.debug("<DISCONNECT> Not disconnected. Attempting ungraceful disconnect.")
self.crt.Session.Disconnect()
time.sleep(0.1)
attempts += 1
if attempts >= 10:
raise ConnectError("Unable to disconnect from session.")
def is_connected(self):
session_connected = self.crt.Session.Connected
if session_connected == 1:
self.logger.debug("<IS_CONNECTED> Checking Connected Status. Got: {} (True)".format(session_connected))
return True
else:
self.logger.debug("<IS_CONNECTED> Checking Connected Status. Got: {} (False)".format(session_connected))
return False
def end(self):
"""
End the session by returning the device's terminal parameters back to normal.
:return:
"""
# If the 'tab' and 'prompt' options aren't in the session structure, then we aren't actually connected to a
# device when this is called, and there is nothing to do.
self.logger.debug("<END> Ending Session")
if self.crt:
if self.tab:
if self.prompt:
if self.settings['modify term']:
self.logger.debug("<END> Modify Term setting is set. Sending commands to return terminal "
"to normal.")
if self.os == "IOS" or self.os == "NXOS":
if self.term_len:
# Set term length back to saved values
self.tab.Send('term length {0}\n'.format(self.term_len))
self.tab.WaitForString(self.prompt)
if self.term_width:
# Set term width back to saved values
self.tab.Send('term width {0}\n'.format(self.term_width))
self.tab.WaitForString(self.prompt)
elif self.os == "ASA":
self.tab.Send("terminal pager {0}\n".format(self.term_len))
self.prompt = None
self.logger.debug("<END> Deleting learned Prompt.")
self.hostname = None
self.logger.debug("<END> Deleting learned Hostname.")
# Detect the OS of the device, because outputs will differ per OS
self.os = None
self.logger.debug("<END> Deleting Discovered OS.")
self.tab.Synchronous = False
self.tab.IgnoreEscape = False
self.logger.debug("<END> Unset Syncronous and IgnoreEscape")
def message_box(self, message, title="", options=0):
"""
Prints a message in a pop-up message box, and captures the response (which button clicked). See the section
at the top of this file "MessageBox Constants" to manipulate how the message box will look (which buttons are
available
:param message: <string> The message to print to the screen
:param title: <string> Title for the message box
:param options: <Integer> (See MessageBox Constansts at the top of this file)
:return:
"""
self.logger.debug("<MESSAGE_BOX> Creating MessageBox with: \nTitle: {0}\nMessage: {1}\nOptions: {2}"
.format(title, message, options))
return self.crt.Dialog.MessageBox(message, title, options)
def prompt_window(self, message, title="", hide_input=False):
self.logger.debug("<PROMPT> Creating Prompt with message: '{0}'".format(message))
result = self.crt.Dialog.Prompt(message, title, "", hide_input)
self.logger.debug("<PROMPT> Captures prompt results: '{0}'".format(result))
return result
def file_open_dialog(self, title, open_type, file_filter):
result_filename = ""
self.logger.debug("<FILE_OPEN> Creating File Open Dialog with title: '{0}'".format(title))
result_filename = self.crt.Dialog.FileOpenDialog(title, open_type, result_filename, file_filter)
return result_filename
def write_output_to_file(self, command, filename):
"""
Send the supplied command to the session and writes the output to a file.
This function was written specifically to write output line by line because storing large outputs into a variable
will cause SecureCRT to bog down until it freezes. A good example is a large "show tech" output.
This method can handle any length of output
:param command: The command to be sent to the device
:param filename: The filename for saving the output
"""
self.logger.debug("<WRITE_FILE> Call to write_output_to_file with command: {0}, filename: {1}"
.format(command, filename))
self.validate_path(filename)
self.logger.debug("<WRITE_FILE> Using filename: {0}".format(filename))
# RegEx to match the whitespace and backspace commands after --More-- prompt
exp_more = r' [\b]+[ ]+[\b]+(?P<line>.*)'
re_more = re.compile(exp_more)
# The 3 different types of lines we want to match (MatchIndex) and treat differntly
if self.os == "IOS" or self.os == "NXOS":
matches = ["\r\n", '--More--', self.prompt]
elif self.os == "ASA":
matches = ["\r\n", '<--- More --->', self.prompt]
else:
matches = ["\r\n", '--More--', self.prompt]
# Write the output to the specified file
try:
# Need the 'b' in mode 'wb', or else Windows systems add extra blank lines.
with open(filename, 'wb') as newfile:
self.tab.Send(command + "\n")
# Ignore the echo of the command we typed (including linefeed)
self.tab.WaitForString(command.strip(), 30)
# Loop to capture every line of the command. If we get CRLF (first entry in our "endings" list), then
# write that line to the file. If we get our prompt back (which won't have CRLF), break the loop b/c we
# found the end of the output.
while True:
nextline = self.tab.ReadString(matches, 30)
# If the match was the 1st index in the endings list -> \r\n
if self.tab.MatchIndex == 1:
# Strip newlines from front and back of line.
nextline = nextline.strip('\r\n')
# If there is something left, write it.
if nextline != "":
# Check for backspace and spaces after --More-- prompt and strip them out if needed.
regex = re_more.match(nextline)
if regex:
nextline = regex.group('line')
# Strip line endings from line. Also re-encode line as ASCII
# and ignore the character if it can't be done (rare error on
# Nexus)
newfile.write(nextline.strip('\r\n').encode('ascii', 'ignore') + "\r\n")
self.logger.debug("<WRITE_FILE> Writing Line: {0}".format(nextline.strip('\r\n')
.encode('ascii', 'ignore')))
elif self.tab.MatchIndex == 2:
# If we get a --More-- send a space character
self.tab.Send(" ")
elif self.tab.MatchIndex == 3:
# We got our prompt, so break the loop
break
else:
raise DeviceInteractionError("Timeout trying to capture output")
except IOError, err:
error_str = "IO Error for:\n{0}\n\n{1}".format(filename, err)
self.message_box(error_str, "IO Error", ICON_STOP)
def get_command_output(self, command):
"""
Captures the output from the provided command and saves the results in a variable.
** NOTE ** Assigning the output directly to a variable causes problems with SecureCRT for long outputs. It
will gradually get slower and slower until the program freezes and crashes. The workaround is to
save the output directly to a file (line by line), and then read it back into a variable. This is the
procedure that this method uses.
:param command: Command string that should be sent to the device
:return result: Variable holding the result of issuing the above command.
"""
self.logger.debug("<GET OUTPUT> Running get_command_output with input '{0}'".format(command))
# Create a temporary filename
temp_filename = self.create_output_filename("{0}-temp".format(command))
self.logger.debug("<GET OUTPUT> Temp Filename".format(temp_filename))
self.write_output_to_file(command, temp_filename)
with open(temp_filename, 'r') as temp_file:
result = temp_file.read()
if self.settings['debug']:
filename = os.path.split(temp_filename)[1]
new_filename = os.path.join(self.debug_dir, filename)
self.logger.debug("<GET OUTPUT> Moving temp file to {0}".format(new_filename))
os.rename(temp_filename, new_filename)
else:
self.logger.debug("<GET OUTPUT> Deleting {0}".format(temp_filename))
os.remove(temp_filename)
self.logger.debug("<GET OUTPUT> Returning results of size {0}".format(sys.getsizeof(result)))
return result
def create_new_saved_session(self, session_name, ip, protocol="SSH2", folder="_imports"):
now = datetime.datetime.now()
creation_date = now.strftime("%A, %B %d %Y at %H:%M:%S")
# Create a session from the configured default values.
new_session = self.crt.OpenSessionConfiguration("Default")
# Set options based)
new_session.SetOption("Protocol Name", protocol)
new_session.SetOption("Hostname", ip)
desc = ["Created on {} by script:".format(creation_date), self.crt.ScriptFullName]
new_session.SetOption("Description", desc)
session_path = os.path.join(folder, session_name)
# Save session based on passed folder and session name.
self.logger.debug("<CREATE_SESSION> Creating new session '{0}'".format(session_path))
new_session.Save(session_path)
def send_config_commands(self, command_list, output_filename=None):
"""
This method accepts a list of strings, where each string is a command to be sent to the device. This method
will send "conf t", plus all the commands and finally and "end" to the device and write the results to a file.
NOTE: This method is new and does not have any error checking for how the remote device handles the commands
you are trying to send. USE IT AT YOUR OWN RISK.
:param command_list: A list of strings, where each string is a command to be sent. This should NOT include
'config t' or 'end'. This is added automatically.
:return:
"""
self.logger.debug("<SEND_CMDS> Preparing to write commands to device.")
self.logger.debug("<SEND_CMDS> Received: {}".format(str(command_list)))
# Build text commands to send to device, and book-end with "conf t" and "end"
config_results = ""
command_list.insert(0,"configure terminal")
success = True
for command in command_list:
self.tab.Send("{}\n".format(command))
output = self.tab.ReadString(")#", 3)
if output:
config_results += "{})#".format(output)
else:
error = "Did not receive expected prompt after issuing command: {}".format(command)
self.logger.debug("<SEND_CMDS> {}".format(error))
raise DeviceInteractionError("{}".format(error))
self.tab.Send("end\n")
output = self.tab.ReadString(self.prompt, 2)
config_results += "{}{}".format(output, self.prompt)
with open(output_filename, 'w') as output_file:
self.logger.debug("<SEND_CMDS> Writing config session output to: {}".format(output_filename))
output_file.write(config_results.replace("\r", ""))
def save(self):
save_string = "copy running-config startup-config\n\n"
self.logger.debug("<SAVE> Saving configuration on remote device.")
self.tab.Send(save_string)
save_results = self.tab.ReadString(self.prompt)
self.logger.debug("<SAVE> Save results: {}".format(save_results))
class DirectSession(Session):
def __init__(self, full_script_path, settings_importer):
super(DirectSession, self).__init__(full_script_path, settings_importer)
self.logger.debug("<INIT> Building Direct Session Object")
self.prompt = "DebugHost#"
self.hostname = "DebugHost"
valid_response = ["yes", "no"]
response = ""
while response.lower() not in valid_response:
response = raw_input("Is this device already connected?({}): ".format(str(valid_response)))
if response.lower() == "yes":
self.logger.debug("<INIT> Assuming session is already connected")
self._connected = True
valid_os = ["IOS", "NXOS", "ASA"]
response = ""
while response not in valid_os:
response = raw_input("Select OS ({0}): ".format(str(valid_os)))
self.logger.debug("<INIT> Setting OS to {0}".format(response))
self.os = response
else:
self.logger.debug("<INIT> Assuming session is NOT already connected")
self._connected = False
def connect(self, host, username, password=None):
print "Pretending to log into device {} with username {}.".format(host, username)
valid_os = ["IOS", "NXOS", "ASA"]
response = ""
while response not in valid_os:
response = raw_input("Select OS ({0}): ".format(str(valid_os)))
self.logger.debug("<INIT> Setting OS to {0}".format(response))
self.os = response
self._connected = True
def disconnect(self):
print "Prentending to disconnect from device {}.".format(self.hostname)
self._connected = False
def is_connected(self):
return self._connected
def end(self):
pass
def message_box(self, message, title="", options=0):
"""
Simulates the MessageBox from SecureCRT, but on the command line/cossole window.
:param message: <string> The message to print to the screen
:param title: <string> Title for the message box
:param options: <Integer> (See MessageBox Constansts at the top of this file)
:return:
"""
def get_button_layout(option):
# These numbers signify default buttons and icons shown. We don't care about these when using console.
numbers = [512, 256, 64, 48, 32, 16]
for number in numbers:
if option >= number:
option -= number
return option
def get_response_code(text):
responses = {"OK": IDOK, "Cancel": IDCANCEL, "Yes": IDYES, "No": IDNO, "Retry": IDRETRY, "Abort": IDABORT,
"Ignore": IDIGNORE}
return responses[text]
self.logger.debug("<MESSAGEBOX> Creating Message Box, with Title: {0}, Message: {1}, and Options: {2}".format(title, message,
options))
# Extract the layout paramter in the options field
layout = get_button_layout(options)
self.logger.debug("<MESSAGEBOX> Layout Value is: {0}".format(layout))
# A mapping of each integer value and which buttons are shown in a MessageBox, so we can prompt for the
# same values from the console
buttons = {BUTTON_OK: ["OK"], BUTTON_CANCEL: ["OK", "Cancel"],
BUTTON_ABORTRETRYIGNORE: ["Abort", "Retry", "Ignore"],
BUTTON_YESNOCANCEL: ["Yes", "No", "Cancel"], BUTTON_YESNO: ["Yes", "No"],
BUTTON_RETRYCANCEL: ["Retry", "Cancel"]}
print "{0}: {1}".format(message, title)
response = ""
while response not in buttons[layout]:
response = raw_input("Choose from {0}: ".format(buttons[layout]))
self.logger.debug("<MESSAGEBOX> Received: {0}".format(response))
code = get_response_code(response)
self.logger.debug("<MESSAGEBOX> Returning Response Code: {0}".format(code))
return code
def prompt_window(self, message, title="", hide_input=False):
self.logger.debug("<PROMPT> Creating Prompt with message: '{0}'".format(message))
result = raw_input("{0}: ".format(message))
self.logger.debug("<PROMPT> Captures prompt results: '{0}'".format(result))
return result
def file_open_dialog(self, title, open_type, file_filter):
result_filename = raw_input("{0}, {1} (type {2}): ".format(open_type, title, file_filter))
return result_filename
def write_output_to_file(self, command, filename):
"""
Imitates the write_output_to_file method from the CRTSession object for debgugging purposes. It prompts for
an imput file path to open and then write the output like happens with SecureCRT.
:param command: <str> The command that gives the output we want to write to a file
:param filename: <str> Output filename to write the output
"""
input_file = ""
while not os.path.isfile(input_file):
input_file = raw_input("Path to file with output from '{0}' ('q' to quit): ".format(command))
if input_file == 'q':
exit(0)
elif not os.path.isfile(input_file):
print "Invalid File, please try again..."
with open(input_file, 'r') as input:
input_data = input.readlines()
self.logger.debug("<WRITE OUTPUT> Call to write_output_to_file with command: {0}, filename: {1}".format(command, filename))
self.validate_path(filename)
self.logger.debug("<WRITE OUTPUT> Using filename: {0}".format(filename))
# Write the output to the specified file
try:
# Need the 'b' in mode 'wb', or else Windows systems add extra blank lines.
with open(filename, 'wb') as newfile:
for line in input_data:
newfile.write(line.strip('\r\n').encode('ascii', 'ignore') + "\r\n")
self.logger.debug("<WRITE OUTPUT> Writing Line: {0}".format(line.strip('\r\n').encode('ascii', 'ignore')))
except IOError, err:
error_str = "IO Error for:\n{0}\n\n{1}".format(filename, err)
self.message_box(error_str, "IO Error", ICON_STOP)
def get_command_output(self, command):
"""
Simulates captures the output from the provided command and saves the results in a variable, for debugging
purposes.
:param command: Command string that should be sent to the device
:return result: Variable holding the result of issuing the above command.
"""
self.logger.debug("<GET OUTPUT> Running get_command_output with input {0}".format(command))
# Create a temporary filename
temp_filename = self.create_output_filename("{0}-temp".format(command))
self.logger.debug("<GET OUTPUT> Temp Filename".format(temp_filename))
self.write_output_to_file(command, temp_filename)
with open(temp_filename, 'r') as temp_file:
result = temp_file.read()
if self.settings['debug']:
filename = os.path.split(temp_filename)[1]
new_filename = os.path.join(self.debug_dir, filename)
self.logger.debug("<GET OUTPUT> Moving temp file to {0}".format(new_filename))
os.rename(temp_filename, new_filename)
else:
self.logger.debug("<GET OUTPUT> Deleting {0}".format(temp_filename))
os.remove(temp_filename)
self.logger.debug("<GET OUTPUT> Returning results of size {0}".format(sys.getsizeof(result)))
return result
def create_new_saved_session(self, session_name, ip, protocol="SSH2", folder="_imports"):
now = datetime.datetime.now()
creation_date = now.strftime("%A, %B %d %Y at %H:%M:%S")
session_path = os.path.join(folder, session_name)
desc = ["Created on {0} by script:".format(creation_date), os.path.join(self.script_dir, self.script_name)]
print "Simulated saving session '{0}'\n IP: {1}, protocol: {2}\n Description: {3}".format(session_path, ip,
protocol, str(desc))
def send_config_commands(self, command_list, output_filename=None):
self.logger.debug("<SEND CONFIG> Preparing to write commands to device.")
self.logger.debug("<SEND CONFIG> Received: {}".format(str(command_list)))
command_string = ""
command_string += "configure terminal\n"
for command in command_list:
command_string += "{}\n".format(command.strip())
command_string += "end\n"
self.logger.debug("<SEND CONFIG> Final command list:\n {}".format(command_string))
output_filename = self.create_output_filename("CONFIG_RESULT")
config_results = command_string
with open(output_filename, 'w') as output_file:
self.logger.debug("<SEND CONFIG> Writing output to: {}".format(output_filename))
output_file.write("{}{}".format(self.prompt, config_results))
def save(self):
save_string = "copy running-config startup-config"
self.logger.debug("<SAVE> Simulating Saving configuration on remote device.")
print "Saved config."
|
linelist = [line for line in open('Day 04.input').readlines()]
draws = linelist[0].strip().split(',')
b_side = 5
boards = []
for i in range(0, (int(len(linelist[1:]) / (b_side + 1)))):
board = {}
b_solved = [0] * (b_side * 2)
for row in range(0, b_side):
line = linelist[1 * (i + 1) + b_side * i + row + 1].strip()
nums = line.strip().split()
for col in range(0, b_side):
board[nums[col]] = (row, col)
boards.append([board, b_solved])
winner = False
c = 0
while not winner:
draw = draws[c]
for b in boards:
if draw in b[0]:
row, col = b[0][draw][0], b[0][draw][1]
b[1][row] += 1
b[1][b_side + col] += 1
b[0].pop(draw, None)
if 5 in b[1]:
print(b[1])
s = 0
for k in b[0].keys():
s += int(k)
print(s * int(draw))
winner = True
break
c += 1
|
def define_pipeline():
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
from xml.etree import ElementTree
from compat import ConfigParser
from macdown_utils import ROOT_DIR, XCODEBUILD, XLIFF_URL, execute
TX_CONFIG_FILE = os.path.join(ROOT_DIR, '.tx', 'config')
logger = logging.getLogger()
ElementTree.register_namespace('', XLIFF_URL)
def pull_translations(parser):
xliff_dirpath = os.path.abspath(os.path.join(
__file__, '..', '..',
parser.get('macdown.macdownxliff', 'file_filter'),
'..',
))
if os.path.exists(xliff_dirpath):
for fn in os.listdir(xliff_dirpath):
os.remove(os.path.join(xliff_dirpath, fn))
logger.info('Connecting...')
os.system('tx pull -a')
def parse_tx_config():
parser = ConfigParser()
with open(TX_CONFIG_FILE) as f:
parser.read_file(f)
return parser
def fix_translation_codes(parser):
# Get language code mapping (Transifex, Xcode).
def parse_lang_pair(s):
f, t = (c.strip() for c in s.split(':'))
return f.replace('_', '-'), t
code_map = dict(
parse_lang_pair(keymap)
for keymap in parser.get('main', 'lang_map').split(',')
)
# Get the file pattern.
xliff_dirpath = os.path.dirname(
parser.get('macdown.macdownxliff', 'file_filter'),
)
for fn in os.listdir(xliff_dirpath):
if os.path.splitext(fn)[-1] != '.xliff':
continue
xliff_filepath = os.path.join(xliff_dirpath, fn)
logger.info('Fixing {}'.format(xliff_filepath))
tree = ElementTree.parse(xliff_filepath)
# Fix language codes.
for node in tree.iterfind('xliff:file', {'xliff': XLIFF_URL}):
try:
new_code = code_map[node.get('target-language')]
except KeyError:
pass
else:
node.set('target-language', new_code)
tree.write(
xliff_filepath,
encoding='UTF-8', xml_declaration=True, method='xml',
)
def import_translations(parser):
source_lang = parser.get('macdown.macdownxliff', 'source_lang')
xliff_dirpath = os.path.dirname(
parser.get('macdown.macdownxliff', 'file_filter'),
)
for fn in os.listdir(xliff_dirpath):
stem, ext = os.path.splitext(fn)
if ext != '.xliff' or stem == source_lang:
continue
logger.info('Importing {}'.format(fn))
execute(
XCODEBUILD, '-importLocalizations',
'-localizationPath', os.path.join(xliff_dirpath, fn),
)
def main():
logging.basicConfig(level=logging.INFO)
parser = parse_tx_config()
pull_translations(parser)
fix_translation_codes(parser)
import_translations(parser)
if __name__ == '__main__':
main()
|
"""
Specialization of einops for jittor.
Unfortunately, jittor's jit scripting mechanism isn't strong enough,
and to have scripting supported at least for layers,
a number of changes is required, and this layer helps.
Importantly, whole lib is designed so that you can't use it
"""
from typing import Dict, List
import jittor as jt
from ..einops_my.einops import TransformRecipe, _reconstruct_from_shape_uncached
class JittorJitBackend:
"""
Completely static backend that mimics part of normal backend functionality
but restricted to jittor stuff only
"""
@staticmethod
def reduce(x: jt.jittor_core.Var, operation: str, reduced_axes: List[int]):
if operation == 'min':
return x.min(dims=reduced_axes)
elif operation == 'max':
return x.max(dims=reduced_axes)
elif operation == 'sum':
return x.sum(dims=reduced_axes)
elif operation == 'mean':
return x.mean(dims=reduced_axes)
elif operation == 'prod':
for i in list(sorted(reduced_axes))[::-1]:
x = x.prod(dim=i)
return x
else:
raise NotImplementedError('Unknown reduction ', operation)
@staticmethod
def transpose(x, axes: List[int]):
return x.permute(axes)
@staticmethod
def stack_on_zeroth_dimension(tensors: List[jt.jittor_core.Var]):
return jt.stack(tensors)
@staticmethod
def tile(x, repeats: List[int]):
return x.repeat(repeats)
@staticmethod
def add_axes(x, n_axes: int, pos2len: Dict[int, int]):
repeats = [1] * n_axes
for axis_position, axis_length in pos2len.items():
x = jt.unsqueeze(x, axis_position)
repeats[axis_position] = axis_length
return JittorJitBackend.tile(x, repeats)
@staticmethod
def is_float_type(x):
return x.dtype in ["float16", "float32", "float64"]
@staticmethod
def shape(x):
return x.shape
@staticmethod
def reshape(x, shape: List[int]):
return x.reshape(shape)
# mirrors einops.einops._apply_recipe
def apply_for_scriptable_jittor(recipe: TransformRecipe, tensor: jt.jittor_core.Var, reduction_type: str) -> jt.jittor_core.Var:
backend = JittorJitBackend
init_shapes, reduced_axes, axes_reordering, added_axes, final_shapes = \
_reconstruct_from_shape_uncached(recipe, backend.shape(tensor))
tensor = backend.reshape(tensor, init_shapes)
if len(reduced_axes) > 0:
tensor = backend.reduce(tensor, operation=reduction_type, reduced_axes=reduced_axes)
tensor = backend.transpose(tensor, axes_reordering)
if len(added_axes) > 0:
tensor = backend.add_axes(tensor, n_axes=len(axes_reordering) + len(added_axes), pos2len=added_axes)
return backend.reshape(tensor, final_shapes)
|
# -*- coding: UTF-8 -*-
from __future__ import unicode_literals
from tutorials.catalog.models import Product
from north.dbutils import babel_values
def P(en,de,fr):
return Product(**babel_values('name',en=en,de=de,fr=fr))
def objects():
yield P("Chair","Stuhl","Chaise")
yield P("Table","Tisch","Table")
yield P("Monitor","Bildschirm","Écran")
yield P("Mouse","Maus","Souris")
yield P("Keyboard","Tastatur","Clavier")
yield P("Consultation","Beratung","Consultation")
|
# coding: utf-8
"""
MIT License
Copyright (c) 2019 Claude SIMON (https://q37.info/s/rmnmqd49)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import atlastk
import math, colorsys, random
_S_UP = True
_S_DOWN = False
def _round(value):
return int(math.floor(value + 0.5))
class Tortoise:
def _reset(self):
self._posx = 0
self._posy = 0
self._angle = 0
self._color = (0, 0, 0)
self._autoDrawCount = 0
self._state = _S_UP
self._path = atlastk.createHTML()
def __init__(self, dom, id):
self._id = id # id of the SVG element.
self._dom = dom
self._autoDraw = 0 if dom == None else 1
self._reset()
def _draw(self, dom = None):
if dom == None:
dom = self._dom
dom.appendLayout(self._id, self._path)
self._path = atlastk.createHTML()
self._autoDrawCount = 0
dom.flush()
def _clear(self, dom):
if dom == None:
dom = self._dom
self._reset()
dom.setLayout(self._id, atlastk.createHTML())
def _push(self, x1, y1, x2, y2):
self._path.pushTag("path")
self._path.putAttribute(
"stroke", "rgb({}, {}, {})".format(*self._color))
self._path.putAttribute("stroke-width", "1")
self._path.putAttribute("fill", "none")
self._path.putAttribute('d', "M {} {} L {} {}".format(int(x1), int(y1), int(x2), int(y2)))
self._path.popTag()
if self._autoDraw:
self._autoDrawCount += 1
if self._autoDrawCount >= self._autoDraw:
self._draw()
def getAngle(self):
return math.degrees(self._angle)
def up(self):
self._state = _S_UP
def down(self):
self._state = _S_DOWN
def setAutoDraw(self,value):
self._autoDraw = value
def setPosition(self,x,y):
self._posx = x
self._posy = y
def forward(self, distance):
posx = self._posx + distance * math.sin(self._angle)
posy = _round(self._posy) - distance * math.cos(self._angle)
if self._state == _S_DOWN:
self._push(self._posx, self._posy, posx, posy)
self._posx = posx
self._posy = posy
def setColorRGB(self, r, g, b): # 0 to 255
self._color = (r, g, b)
def setColorHSL(self, h, s, l): # h: 0-360, s & l: 0-100 (%)
# '….0', or, with Python 2, calculations return integer instead of float…
self._color = tuple(_round(255*x)
for x in colorsys.hls_to_rgb(h/360.0, l/100.0, s/100.0))
def right(self, angle):
self._angle = math.radians((math.degrees(self._angle) + angle) % 360)
def left(self, angle):
self.right(360 - angle)
def draw(self, dom = None):
self._draw(dom)
def clear(self, dom = None):
self._clear(dom)
|
import os
import subprocess
import tkinter
filename_noext = input(']>> ')
filename_text = filename_noext.replace('.txt', '')
filename_text = filename_noext + '.txt'
filename_ghost = filename_noext + '_render.ps'
filea = open(filename_text, 'r')
lines = filea.read()
lines_split = lines.split('\n')
lines_height = len(lines_split)
lines_width = len(lines_split[0])
print('%d x %d' % (lines_width, lines_height))
lines_height *= 17
lines_width *= 7
filea.close()
t = tkinter.Tk()
c = tkinter.Canvas(t, width=lines_width, height=lines_height)
c.pack()
c.create_text(0, 0, text=lines, anchor="nw", font=("Courier New", 12))
print('Writing Postscript')
c.postscript(file=filename_ghost, width=lines_width, height=lines_height)
t.destroy()
print('Writing PNG')
subprocess.Popen('PNGCREATOR.bat', shell=True, cwd=os.getcwd())
|
# coding: utf-8
class CutWords:
def __init__(self):
dict_path = './disease.txt'
self.word_dict, self.max_wordlen = self.load_words(dict_path)
# 加载词典
def load_words(self, dict_path):
words = list()
max_len = 0
for line in open(dict_path):
wd = line.strip()
if not wd:
continue
if len(wd) > max_len:
max_len = len(wd)
words.append(wd)
return words, max_len
# 最大向前匹配
def max_forward_cut(self, sent):
# 1.从左向右取待切分汉语句的m个字符作为匹配字段,m为大机器词典中最长词条个数。
# 2.查找大机器词典并进行匹配。若匹配成功,则将这个匹配字段作为一个词切分出来。
cutlist = []
index = 0
while index < len(sent):
matched = False
for i in range(self.max_wordlen, 0, -1):
cand_word = sent[index: index + i]
if cand_word in self.word_dict:
cutlist.append(cand_word)
matched = True
break
# 如果没有匹配上,则按字符切分
if not matched:
i = 1
cutlist.append(sent[index])
index += i
return cutlist
# 最大向后匹配
def max_backward_cut(self, sent):
# 1.从右向左取待切分汉语句的m个字符作为匹配字段,m为大机器词典中最长词条个数。
# 2.查找大机器词典并进行匹配。若匹配成功,则将这个匹配字段作为一个词切分出来。
cutlist = []
index = len(sent)
max_wordlen = 5
while index > 0:
matched = False
for i in range(self.max_wordlen, 0, -1):
tmp = (i + 1)
cand_word = sent[index - tmp: index]
# 如果匹配上,则将字典中的字符加入到切分字符中
if cand_word in self.word_dict:
cutlist.append(cand_word)
matched = True
break
# 如果没有匹配上,则按字符切分
if not matched:
tmp = 1
cutlist.append(sent[index - 1])
index -= tmp
return cutlist[::-1]
# 双向最大向前匹配
def max_biward_cut(self, sent):
# 双向最大匹配法是将正向最大匹配法得到的分词结果和逆向最大匹配法的到的结果进行比较,从而决定正确的分词方法。
# 启发式规则:
# 1.如果正反向分词结果词数不同,则取分词数量较少的那个。
# 2.如果分词结果词数相同 a.分词结果相同,就说明没有歧义,可返回任意一个。 b.分词结果不同,返回其中单字较少的那个。
forward_cutlist = self.max_forward_cut(sent)
backward_cutlist = self.max_backward_cut(sent)
count_forward = len(forward_cutlist)
count_backward = len(backward_cutlist)
def compute_single(word_list):
num = 0
for word in word_list:
if len(word) == 1:
num += 1
return num
if count_forward == count_backward:
if compute_single(forward_cutlist) > compute_single(backward_cutlist):
return backward_cutlist
else:
return forward_cutlist
elif count_backward > count_forward:
return forward_cutlist
else:
return backward_cutlist
|
# Generated by Django 3.2 on 2021-04-23 09:19
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('JWTAuth', '0001_initial'),
('forum', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='forum',
name='total_answer',
),
migrations.AlterField(
model_name='answer',
name='upvote',
field=models.ManyToManyField(blank=True, to='JWTAuth.Employee'),
),
migrations.AlterField(
model_name='forum',
name='answer',
field=models.ManyToManyField(blank=True, to='forum.Answer'),
),
migrations.AlterField(
model_name='forum',
name='createdAt',
field=models.DateTimeField(default=datetime.datetime(2021, 4, 23, 16, 19, 12, 765243)),
),
migrations.AlterField(
model_name='forum',
name='topic',
field=models.ManyToManyField(blank=True, to='forum.Topic'),
),
migrations.AlterField(
model_name='forum',
name='upvote',
field=models.ManyToManyField(blank=True, to='JWTAuth.Employee'),
),
]
|
import random
TEXT = "this does not align with our core values"
new_text =
def choice(*args, **kwargs):
return random.choice(*args, **kwargs)
def rand(val1, val2):
return random.choice([val1, val2])
def upper(string):
return string.upper()
def lower(string):
return string.lower()
def upper_lower(string):
return rand(upper, lower)(string)
def update_text(string):
global new_text
new_text = new_text + string
def main():
for i in TEXT:
update_text(upper_lower(i))
print(new_text)
if __name__.upper() == '__MAIN__':
main()
|
class Solution:
def searchInsert(self, nums, target):
if len(nums) == 0:
return 0
for idx in range(len(nums)):
if nums[idx] >= target:
return idx
return len(nums)
example = [1, 2]
t = 2
sln = Solution()
print(sln.searchInsert(example, t))
|
"""
State actions on classical states
Commonly, in order to recover a state you need to compute the action
of Pauli operators on classical basis states.
In this module we provide infrastructure to do this for Pauli Operators from
pyquil.
Given
"""
from functools import reduce
from scipy.sparse import csc_matrix
import numpy as np
from pyquil.paulis import PauliTerm, sX, sZ, sY
def compute_action(classical_state, pauli_operator, num_qubits):
"""
Compute action of Pauli opertors on a classical state
The classical state is enumerated as the left most bit is the least-significant
bit. This is how one usually reads off classical data from the QVM. Not
how the QVM stores computational basis states.
:param classical_state: binary repr of a state or an integer. Should be
left most bit (0th position) is the most significant bit
:param num_qubits:
:return: new classical state and the coefficient it picked up.
"""
if not isinstance(pauli_operator, PauliTerm):
raise TypeError("pauli_operator must be a PauliTerm")
if not isinstance(classical_state, (list, int)):
raise TypeError("classical state must be a list or an integer")
if isinstance(classical_state, int):
if classical_state < 0:
raise TypeError("classical_state must be a positive integer")
classical_state = list(map(int, np.binary_repr(classical_state,
width=num_qubits)))
if len(classical_state) != num_qubits:
raise TypeError("classical state not long enough")
# iterate through tensor elements of pauli_operator
new_classical_state = classical_state.copy()
coefficient = 1
for qidx, telem in pauli_operator:
if telem == 'X':
new_classical_state[qidx] = new_classical_state[qidx] ^ 1
elif telem == 'Y':
new_classical_state[qidx] = new_classical_state[qidx] ^ 1
# set coeff
if new_classical_state[qidx] == 0:
coefficient *= -1j
else:
coefficient *= 1j
elif telem == 'Z':
# set coeff
if new_classical_state[qidx] == 1:
coefficient *= -1
return new_classical_state, coefficient
def state_family_generator(state, pauli_operator):
"""
Generate a new state by applying the pauli_operator to each computational bit-string
This is accomplished in a sparse format where a sparse vector is returned
after the action is accumulate in a new list of data and indices
:param csc_matrix state: wavefunction represented as a column sparse matrix
:param PauliTerm pauli_operator: action to apply to the state
:return: new state
:rtype: csc_matrix
"""
if not isinstance(state, csc_matrix):
raise TypeError("we only take csc_matrix")
num_qubits = int(np.log2(state.shape[0]))
new_coeffs = []
new_indices = []
# iterate through non-zero
rindices, cindices = state.nonzero()
for ridx, cidx in zip(rindices, cindices):
# this is so gross looking
bitstring = [int(x) for x in np.binary_repr(ridx, width=num_qubits)][::-1]
new_ket, new_coefficient = compute_action(bitstring, pauli_operator, num_qubits)
new_indices.append(int("".join([str(x) for x in new_ket[::-1]]), 2))
new_coeffs.append(state[ridx, cidx] * new_coefficient * pauli_operator.coefficient)
return csc_matrix((new_coeffs, (new_indices, [0] * len(new_indices))),
shape=(2 ** num_qubits, 1), dtype=complex)
def project_stabilized_state(stabilizer_list, num_qubits=None,
classical_state=None):
"""
Project out the state stabilized by the stabilizer matrix
|psi> = (1/2^{n}) * Product_{i=0}{n-1}[ 1 + G_{i}] |vac>
:param List stabilizer_list: set of PauliTerms that are the stabilizers
:param num_qubits: integer number of qubits
:param classical_state: Default None. Defaults to |+>^{\otimes n}
:return: state projected by stabilizers
"""
if num_qubits is None:
num_qubits = len(stabilizer_list)
if classical_state is None:
indptr = np.array([0] * (2 ** num_qubits))
indices = np.arange(2 ** num_qubits)
data = np.ones((2 ** num_qubits)) / np.sqrt((2 ** num_qubits))
else:
if not isinstance(classical_state, list):
raise TypeError("I only accept lists as the classical state")
if len(classical_state) != num_qubits:
raise TypeError("Classical state does not match the number of qubits")
# convert into an integer
ket_idx = int("".join([str(x) for x in classical_state[::-1]]), 2)
indptr = np.array([0])
indices = np.array([ket_idx])
data = np.array([1.])
state = csc_matrix((data, (indices, indptr)), shape=(2 ** num_qubits, 1),
dtype=complex)
for generator in stabilizer_list:
# (I + G(i)) / 2
state += state_family_generator(state, generator)
state /= 2
normalization = (state.conj().T.dot(state)).toarray()
state /= np.sqrt(float(normalization.real))
return state
def pauli_stabilizer_to_binary_stabilizer(stabilizer_list):
"""
Convert a list of stabilizers represented as PauliTerms to a binary tableau form
:param List stabilizer_list: list of stabilizers where each element is a PauliTerm
:return: return an integer matrix representing the stabilizers where each row is a
stabilizer. The size of the matrix is n x (2 * n) where n is the maximum
qubit index.
"""
if not all([isinstance(x, PauliTerm) for x in stabilizer_list]):
raise TypeError("At least one element of stabilizer_list is not a PauliTerm")
max_qubit = max([max(x.get_qubits()) for x in stabilizer_list]) + 1
stabilizer_tableau = np.zeros((len(stabilizer_list), 2 * max_qubit + 1), dtype=int)
for row_idx, term in enumerate(stabilizer_list):
for i, pterm in term: # iterator for each tensor-product element of the Pauli operator
if pterm == 'X':
stabilizer_tableau[row_idx, i] = 1
elif pterm == 'Z':
stabilizer_tableau[row_idx, i + max_qubit] = 1
elif pterm == 'Y':
stabilizer_tableau[row_idx, i] = 1
stabilizer_tableau[row_idx, i + max_qubit] = 1
else:
# term is identity
pass
if not (np.isclose(term.coefficient, -1) or np.isclose(term.coefficient, 1)):
raise ValueError("stabilizers must have a +/- coefficient")
if int(np.sign(term.coefficient.real)) == 1:
stabilizer_tableau[row_idx, -1] = 0
elif int(np.sign(term.coefficient.real)) == -1:
stabilizer_tableau[row_idx, -1] = 1
else:
raise TypeError('unrecognized on pauli term of stabilizer')
return stabilizer_tableau
def binary_stabilizer_to_pauli_stabilizer(stabilizer_tableau):
"""
Convert a stabilizer tableau to a list of PauliTerms
:param stabilizer_tableau: Stabilizer tableau to turn into pauli terms
:return: a list of PauliTerms representing the tableau
:rytpe: List of PauliTerms
"""
stabilizer_list = []
num_qubits = (stabilizer_tableau.shape[1] - 1) // 2
for nn in range(stabilizer_tableau.shape[0]): # iterate through the rows
stabilizer_element = []
for ii in range(num_qubits):
if stabilizer_tableau[nn, ii] == 1 and stabilizer_tableau[nn, ii + num_qubits] == 0:
stabilizer_element.append(sX(ii))
elif stabilizer_tableau[nn, ii] == 0 and stabilizer_tableau[nn, ii + num_qubits] == 1:
stabilizer_element.append(sZ(ii))
elif stabilizer_tableau[nn, ii] == 1 and stabilizer_tableau[nn, ii + num_qubits] == 1:
stabilizer_element.append(sY(ii))
stabilizer_term = reduce(lambda x, y: x * y, stabilizer_element) * ((-1) ** stabilizer_tableau[nn, -1])
stabilizer_list.append(stabilizer_term)
return stabilizer_list
def symplectic_inner_product(vector1, vector2):
"""
Operators commute if the symplectic inner product of their binary form is zero
Operators anticommute if symplectic inner product of their binary form is one
:param vector1: binary form of operator with no sign info
:param vector2: binary form of a pauli operator with no sign info
:return: 0, 1
"""
if vector1.shape != vector2.shape:
raise ValueError("vectors must be the same size.")
# TODO: add a check for binary or integer linear arrays
hadamard_product = np.multiply(vector1, vector2)
return reduce(lambda x, y: x ^ y, hadamard_product)
|
import json
from django.contrib.gis.db import models
class Geo(models.Model):
STATE_TYPE, COUNTY_TYPE, TRACT_TYPE, METRO_TYPE, MICRO_TYPE = range(1, 6)
METDIV_TYPE, = range(6, 7)
TYPES = [(STATE_TYPE, 'State'), (COUNTY_TYPE, 'County'),
(TRACT_TYPE, 'Census Tract'), (METRO_TYPE, 'Metropolitan'),
(MICRO_TYPE, 'Micropolitan'),
(METDIV_TYPE, 'Metropolitan Division')]
geoid = models.CharField(max_length=20, primary_key=True)
geo_type = models.PositiveIntegerField(choices=TYPES, db_index=True)
name = models.CharField(max_length=50)
state = models.CharField(max_length=2, null=True)
county = models.CharField(max_length=3, null=True)
tract = models.CharField(max_length=6, null=True)
csa = models.CharField(max_length=3, null=True,
help_text='Combined Statistical Area')
cbsa = models.CharField(max_length=5, null=True,
help_text='Core Based Statistical Area')
metdiv = models.CharField(max_length=5, null=True,
help_text='Metro Division')
geom = models.MultiPolygonField(srid=4269)
minlat = models.FloatField()
maxlat = models.FloatField()
minlon = models.FloatField()
maxlon = models.FloatField()
centlat = models.FloatField()
centlon = models.FloatField()
objects = models.GeoManager()
class Meta:
index_together = [("geo_type", "minlat", "minlon"),
("geo_type", "minlat", "maxlon"),
("geo_type", "maxlat", "minlon"),
("geo_type", "maxlat", "maxlon"),
("geo_type", "centlat", "centlon"),
("geo_type", "cbsa")]
def tract_centroids_as_geojson(self):
"""Convert this model into a geojson string"""
geojson = {'type': 'Feature',
'properties': {
'geoid': self.geoid,
'geoType': self.geo_type,
'state': self.state,
'county': self.county,
'cbsa': self.cbsa,
'centlat': self.centlat,
'centlon': self.centlon}}
geojson = json.dumps(geojson)
return geojson
def tract_shape_as_geojson(self):
"""Convert this model into a geojson string"""
geojson = {'type': 'Feature',
'geometry': '$_$', # placeholder
'properties': {
'geoid': self.geoid,
'geoType': self.geo_type,
'state': self.state,
'county': self.county,
'cbsa': self.cbsa,
'centlat': self.centlat,
'centlon': self.centlon}}
geojson = json.dumps(geojson)
return geojson.replace(
'"$_$"',
self.geom.simplify(preserve_topology=True).geojson)
|
#
# PySNMP MIB module FC-MGMT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/FC-MGMT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 16:50:13 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
Counter32, TimeTicks, ObjectIdentity, ModuleIdentity, MibIdentifier, iso, Counter64, Bits, Integer32, IpAddress, transmission, MibScalar, MibTable, MibTableRow, MibTableColumn, Unsigned32, NotificationType, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "TimeTicks", "ObjectIdentity", "ModuleIdentity", "MibIdentifier", "iso", "Counter64", "Bits", "Integer32", "IpAddress", "transmission", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Unsigned32", "NotificationType", "Gauge32")
TextualConvention, DisplayString, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "TruthValue")
fcMgmtMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 10, 56))
fcMgmtMIB.setRevisions(('2005-04-26 00:00',))
if mibBuilder.loadTexts: fcMgmtMIB.setLastUpdated('200504260000Z')
if mibBuilder.loadTexts: fcMgmtMIB.setOrganization('IETF IPS (IP-Storage) Working Group')
fcmgmtObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 10, 56, 1))
fcmgmtNotifications = MibIdentifier((1, 3, 6, 1, 2, 1, 10, 56, 2))
fcmgmtNotifPrefix = MibIdentifier((1, 3, 6, 1, 2, 1, 10, 56, 2, 0))
fcmgmtConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 10, 56, 3))
class FcNameIdOrZero(TextualConvention, OctetString):
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(8, 8), ValueSizeConstraint(16, 16), )
class FcAddressIdOrZero(TextualConvention, OctetString):
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(3, 3), )
class FcDomainIdOrZero(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 239)
class FcPortType(TextualConvention, Unsigned32):
reference = 'The IANA-maintained registry for Fibre Channel port types (http://www.iana.org/).'
status = 'current'
class FcClasses(TextualConvention, Bits):
reference = 'Classes of service are described in FC-FS Section 13.'
status = 'current'
namedValues = NamedValues(("classF", 0), ("class1", 1), ("class2", 2), ("class3", 3), ("class4", 4), ("class5", 5), ("class6", 6))
class FcBbCredit(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 32767)
class FcBbCreditModel(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("regular", 1), ("alternate", 2))
class FcDataFieldSize(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(128, 2112)
class FcUnitFunctions(TextualConvention, Bits):
status = 'current'
namedValues = NamedValues(("other", 0), ("hub", 1), ("switch", 2), ("bridge", 3), ("gateway", 4), ("host", 5), ("storageSubsys", 6), ("storageAccessDev", 7), ("nas", 8), ("wdmux", 9), ("storageDevice", 10))
fcmInstanceTable = MibTable((1, 3, 6, 1, 2, 1, 10, 56, 1, 1), )
if mibBuilder.loadTexts: fcmInstanceTable.setStatus('current')
fcmInstanceEntry = MibTableRow((1, 3, 6, 1, 2, 1, 10, 56, 1, 1, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"))
if mibBuilder.loadTexts: fcmInstanceEntry.setStatus('current')
fcmInstanceIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: fcmInstanceIndex.setStatus('current')
fcmInstanceWwn = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 1, 1, 2), FcNameIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmInstanceWwn.setStatus('current')
fcmInstanceFunctions = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 1, 1, 3), FcUnitFunctions()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmInstanceFunctions.setStatus('current')
fcmInstancePhysicalIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmInstancePhysicalIndex.setStatus('current')
fcmInstanceSoftwareIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmInstanceSoftwareIndex.setStatus('current')
fcmInstanceStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("unknown", 1), ("ok", 2), ("warning", 3), ("failed", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmInstanceStatus.setStatus('current')
fcmInstanceTextName = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 1, 1, 7), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 79))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fcmInstanceTextName.setStatus('current')
fcmInstanceDescr = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 1, 1, 8), SnmpAdminString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fcmInstanceDescr.setStatus('current')
fcmInstanceFabricId = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 1, 1, 9), FcNameIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmInstanceFabricId.setStatus('current')
fcmSwitchTable = MibTable((1, 3, 6, 1, 2, 1, 10, 56, 1, 2), )
if mibBuilder.loadTexts: fcmSwitchTable.setStatus('current')
fcmSwitchEntry = MibTableRow((1, 3, 6, 1, 2, 1, 10, 56, 1, 2, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "FC-MGMT-MIB", "fcmSwitchIndex"))
if mibBuilder.loadTexts: fcmSwitchEntry.setStatus('current')
fcmSwitchIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 2, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: fcmSwitchIndex.setStatus('current')
fcmSwitchDomainId = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 2, 1, 2), FcDomainIdOrZero()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fcmSwitchDomainId.setStatus('current')
fcmSwitchPrincipal = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 2, 1, 3), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmSwitchPrincipal.setStatus('current')
fcmSwitchWWN = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 2, 1, 4), FcNameIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmSwitchWWN.setStatus('current')
fcmPortTable = MibTable((1, 3, 6, 1, 2, 1, 10, 56, 1, 3), )
if mibBuilder.loadTexts: fcmPortTable.setStatus('current')
fcmPortEntry = MibTableRow((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: fcmPortEntry.setStatus('current')
fcmPortInstanceIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortInstanceIndex.setStatus('current')
fcmPortWwn = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 2), FcNameIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortWwn.setStatus('current')
fcmPortNodeWwn = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 3), FcNameIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortNodeWwn.setStatus('current')
fcmPortAdminType = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 4), FcPortType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fcmPortAdminType.setStatus('current')
fcmPortOperType = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 5), FcPortType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortOperType.setStatus('current')
fcmPortFcCapClass = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 6), FcClasses()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortFcCapClass.setStatus('current')
fcmPortFcOperClass = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 7), FcClasses()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortFcOperClass.setStatus('current')
fcmPortTransmitterType = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("unknown", 1), ("other", 2), ("shortwave850nm", 3), ("longwave1550nm", 4), ("longwave1310nm", 5), ("electrical", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortTransmitterType.setStatus('current')
fcmPortConnectorType = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("unknown", 1), ("other", 2), ("gbic", 3), ("embedded", 4), ("glm", 5), ("gbicSerialId", 6), ("gbicNoSerialId", 7), ("sfpSerialId", 8), ("sfpNoSerialId", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortConnectorType.setStatus('current')
fcmPortSerialNumber = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 10), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortSerialNumber.setStatus('current')
fcmPortPhysicalNumber = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 11), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortPhysicalNumber.setStatus('current')
fcmPortAdminSpeed = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("auto", 1), ("eighthGbs", 2), ("quarterGbs", 3), ("halfGbs", 4), ("oneGbs", 5), ("twoGbs", 6), ("fourGbs", 7), ("tenGbs", 8)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fcmPortAdminSpeed.setStatus('current')
fcmPortCapProtocols = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 13), Bits().clone(namedValues=NamedValues(("unknown", 0), ("loop", 1), ("fabric", 2), ("scsi", 3), ("tcpIp", 4), ("vi", 5), ("ficon", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortCapProtocols.setStatus('current')
fcmPortOperProtocols = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 3, 1, 14), Bits().clone(namedValues=NamedValues(("unknown", 0), ("loop", 1), ("fabric", 2), ("scsi", 3), ("tcpIp", 4), ("vi", 5), ("ficon", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortOperProtocols.setStatus('current')
fcmPortStatsTable = MibTable((1, 3, 6, 1, 2, 1, 10, 56, 1, 4), )
if mibBuilder.loadTexts: fcmPortStatsTable.setStatus('current')
fcmPortStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1), )
fcmPortEntry.registerAugmentions(("FC-MGMT-MIB", "fcmPortStatsEntry"))
fcmPortStatsEntry.setIndexNames(*fcmPortEntry.getIndexNames())
if mibBuilder.loadTexts: fcmPortStatsEntry.setStatus('current')
fcmPortBBCreditZeros = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortBBCreditZeros.setStatus('current')
fcmPortFullInputBuffers = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortFullInputBuffers.setStatus('current')
fcmPortClass2RxFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2RxFrames.setStatus('current')
fcmPortClass2RxOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2RxOctets.setStatus('current')
fcmPortClass2TxFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2TxFrames.setStatus('current')
fcmPortClass2TxOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2TxOctets.setStatus('current')
fcmPortClass2Discards = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2Discards.setStatus('current')
fcmPortClass2RxFbsyFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2RxFbsyFrames.setStatus('current')
fcmPortClass2RxPbsyFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2RxPbsyFrames.setStatus('current')
fcmPortClass2RxFrjtFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2RxFrjtFrames.setStatus('current')
fcmPortClass2RxPrjtFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2RxPrjtFrames.setStatus('current')
fcmPortClass2TxFbsyFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 12), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2TxFbsyFrames.setStatus('current')
fcmPortClass2TxPbsyFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 13), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2TxPbsyFrames.setStatus('current')
fcmPortClass2TxFrjtFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 14), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2TxFrjtFrames.setStatus('current')
fcmPortClass2TxPrjtFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 15), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass2TxPrjtFrames.setStatus('current')
fcmPortClass3RxFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 16), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass3RxFrames.setStatus('current')
fcmPortClass3RxOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 17), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass3RxOctets.setStatus('current')
fcmPortClass3TxFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 18), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass3TxFrames.setStatus('current')
fcmPortClass3TxOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 19), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass3TxOctets.setStatus('current')
fcmPortClass3Discards = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 20), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClass3Discards.setStatus('current')
fcmPortClassFRxFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClassFRxFrames.setStatus('current')
fcmPortClassFRxOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClassFRxOctets.setStatus('current')
fcmPortClassFTxFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClassFTxFrames.setStatus('current')
fcmPortClassFTxOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClassFTxOctets.setStatus('current')
fcmPortClassFDiscards = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 4, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortClassFDiscards.setStatus('current')
fcmPortLcStatsTable = MibTable((1, 3, 6, 1, 2, 1, 10, 56, 1, 5), )
if mibBuilder.loadTexts: fcmPortLcStatsTable.setStatus('current')
fcmPortLcStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1), )
fcmPortEntry.registerAugmentions(("FC-MGMT-MIB", "fcmPortLcStatsEntry"))
fcmPortLcStatsEntry.setIndexNames(*fcmPortEntry.getIndexNames())
if mibBuilder.loadTexts: fcmPortLcStatsEntry.setStatus('current')
fcmPortLcBBCreditZeros = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcBBCreditZeros.setStatus('current')
fcmPortLcFullInputBuffers = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcFullInputBuffers.setStatus('current')
fcmPortLcClass2RxFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2RxFrames.setStatus('current')
fcmPortLcClass2RxOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2RxOctets.setStatus('current')
fcmPortLcClass2TxFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2TxFrames.setStatus('current')
fcmPortLcClass2TxOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2TxOctets.setStatus('current')
fcmPortLcClass2Discards = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2Discards.setStatus('current')
fcmPortLcClass2RxFbsyFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2RxFbsyFrames.setStatus('current')
fcmPortLcClass2RxPbsyFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2RxPbsyFrames.setStatus('current')
fcmPortLcClass2RxFrjtFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2RxFrjtFrames.setStatus('current')
fcmPortLcClass2RxPrjtFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2RxPrjtFrames.setStatus('current')
fcmPortLcClass2TxFbsyFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2TxFbsyFrames.setStatus('current')
fcmPortLcClass2TxPbsyFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2TxPbsyFrames.setStatus('current')
fcmPortLcClass2TxFrjtFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2TxFrjtFrames.setStatus('current')
fcmPortLcClass2TxPrjtFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass2TxPrjtFrames.setStatus('current')
fcmPortLcClass3RxFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass3RxFrames.setStatus('current')
fcmPortLcClass3RxOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass3RxOctets.setStatus('current')
fcmPortLcClass3TxFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass3TxFrames.setStatus('current')
fcmPortLcClass3TxOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass3TxOctets.setStatus('current')
fcmPortLcClass3Discards = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 5, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLcClass3Discards.setStatus('current')
fcmPortErrorsTable = MibTable((1, 3, 6, 1, 2, 1, 10, 56, 1, 6), )
if mibBuilder.loadTexts: fcmPortErrorsTable.setStatus('current')
fcmPortErrorsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1), )
fcmPortEntry.registerAugmentions(("FC-MGMT-MIB", "fcmPortErrorsEntry"))
fcmPortErrorsEntry.setIndexNames(*fcmPortEntry.getIndexNames())
if mibBuilder.loadTexts: fcmPortErrorsEntry.setStatus('current')
fcmPortRxLinkResets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortRxLinkResets.setStatus('current')
fcmPortTxLinkResets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortTxLinkResets.setStatus('current')
fcmPortLinkResets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLinkResets.setStatus('current')
fcmPortRxOfflineSequences = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortRxOfflineSequences.setStatus('current')
fcmPortTxOfflineSequences = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortTxOfflineSequences.setStatus('current')
fcmPortLinkFailures = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLinkFailures.setStatus('current')
fcmPortLossofSynchs = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLossofSynchs.setStatus('current')
fcmPortLossofSignals = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortLossofSignals.setStatus('current')
fcmPortPrimSeqProtocolErrors = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortPrimSeqProtocolErrors.setStatus('current')
fcmPortInvalidTxWords = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortInvalidTxWords.setStatus('current')
fcmPortInvalidCRCs = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortInvalidCRCs.setStatus('current')
fcmPortInvalidOrderedSets = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortInvalidOrderedSets.setStatus('current')
fcmPortFrameTooLongs = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortFrameTooLongs.setStatus('current')
fcmPortTruncatedFrames = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortTruncatedFrames.setStatus('current')
fcmPortAddressErrors = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortAddressErrors.setStatus('current')
fcmPortDelimiterErrors = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortDelimiterErrors.setStatus('current')
fcmPortEncodingDisparityErrors = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortEncodingDisparityErrors.setStatus('current')
fcmPortOtherErrors = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 6, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmPortOtherErrors.setStatus('current')
fcmFxPortTable = MibTable((1, 3, 6, 1, 2, 1, 10, 56, 1, 7), )
if mibBuilder.loadTexts: fcmFxPortTable.setStatus('current')
fcmFxPortEntry = MibTableRow((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: fcmFxPortEntry.setStatus('current')
fcmFxPortRatov = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1, 1), Unsigned32()).setUnits('milliseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFxPortRatov.setStatus('current')
fcmFxPortEdtov = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1, 2), Unsigned32()).setUnits('milliseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFxPortEdtov.setStatus('current')
fcmFxPortRttov = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1, 3), Unsigned32()).setUnits('milliseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFxPortRttov.setStatus('current')
fcmFxPortHoldTime = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1, 4), Unsigned32()).setUnits('microseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFxPortHoldTime.setStatus('current')
fcmFxPortCapBbCreditMax = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1, 5), FcBbCredit()).setUnits('buffers').setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFxPortCapBbCreditMax.setStatus('current')
fcmFxPortCapBbCreditMin = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1, 6), FcBbCredit()).setUnits('buffers').setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFxPortCapBbCreditMin.setStatus('current')
fcmFxPortCapDataFieldSizeMax = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1, 7), FcDataFieldSize()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFxPortCapDataFieldSizeMax.setStatus('current')
fcmFxPortCapDataFieldSizeMin = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1, 8), FcDataFieldSize()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFxPortCapDataFieldSizeMin.setStatus('current')
fcmFxPortCapClass2SeqDeliv = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1, 9), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFxPortCapClass2SeqDeliv.setStatus('current')
fcmFxPortCapClass3SeqDeliv = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1, 10), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFxPortCapClass3SeqDeliv.setStatus('current')
fcmFxPortCapHoldTimeMax = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1, 11), Unsigned32()).setUnits('microseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFxPortCapHoldTimeMax.setStatus('current')
fcmFxPortCapHoldTimeMin = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 7, 1, 12), Unsigned32()).setUnits('microseconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFxPortCapHoldTimeMin.setStatus('current')
fcmISPortTable = MibTable((1, 3, 6, 1, 2, 1, 10, 56, 1, 8), )
if mibBuilder.loadTexts: fcmISPortTable.setStatus('current')
fcmISPortEntry = MibTableRow((1, 3, 6, 1, 2, 1, 10, 56, 1, 8, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: fcmISPortEntry.setStatus('current')
fcmISPortClassFCredit = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 8, 1, 1), FcBbCredit()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: fcmISPortClassFCredit.setStatus('current')
fcmISPortClassFDataFieldSize = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 8, 1, 2), FcDataFieldSize()).setUnits('bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmISPortClassFDataFieldSize.setStatus('current')
fcmFLoginTable = MibTable((1, 3, 6, 1, 2, 1, 10, 56, 1, 9), )
if mibBuilder.loadTexts: fcmFLoginTable.setStatus('current')
fcmFLoginEntry = MibTableRow((1, 3, 6, 1, 2, 1, 10, 56, 1, 9, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "FC-MGMT-MIB", "fcmFLoginNxPortIndex"))
if mibBuilder.loadTexts: fcmFLoginEntry.setStatus('current')
fcmFLoginNxPortIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 9, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: fcmFLoginNxPortIndex.setStatus('current')
fcmFLoginPortWwn = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 9, 1, 2), FcNameIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFLoginPortWwn.setStatus('current')
fcmFLoginNodeWwn = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 9, 1, 3), FcNameIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFLoginNodeWwn.setStatus('current')
fcmFLoginBbCreditModel = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 9, 1, 4), FcBbCreditModel()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFLoginBbCreditModel.setStatus('current')
fcmFLoginBbCredit = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 9, 1, 5), FcBbCredit()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFLoginBbCredit.setStatus('current')
fcmFLoginClassesAgreed = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 9, 1, 6), FcClasses()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFLoginClassesAgreed.setStatus('current')
fcmFLoginClass2SeqDelivAgreed = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 9, 1, 7), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFLoginClass2SeqDelivAgreed.setStatus('current')
fcmFLoginClass2DataFieldSize = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 9, 1, 8), FcDataFieldSize()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFLoginClass2DataFieldSize.setStatus('current')
fcmFLoginClass3SeqDelivAgreed = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 9, 1, 9), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFLoginClass3SeqDelivAgreed.setStatus('current')
fcmFLoginClass3DataFieldSize = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 9, 1, 10), FcDataFieldSize()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmFLoginClass3DataFieldSize.setStatus('current')
fcmLinkTable = MibTable((1, 3, 6, 1, 2, 1, 10, 56, 1, 10), )
if mibBuilder.loadTexts: fcmLinkTable.setStatus('current')
fcmLinkEntry = MibTableRow((1, 3, 6, 1, 2, 1, 10, 56, 1, 10, 1), ).setIndexNames((0, "FC-MGMT-MIB", "fcmInstanceIndex"), (0, "FC-MGMT-MIB", "fcmLinkIndex"))
if mibBuilder.loadTexts: fcmLinkEntry.setStatus('current')
fcmLinkIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: fcmLinkIndex.setStatus('current')
fcmLinkEnd1NodeWwn = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 10, 1, 2), FcNameIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmLinkEnd1NodeWwn.setStatus('current')
fcmLinkEnd1PhysPortNumber = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 10, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmLinkEnd1PhysPortNumber.setStatus('current')
fcmLinkEnd1PortWwn = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 10, 1, 4), FcNameIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmLinkEnd1PortWwn.setStatus('current')
fcmLinkEnd2NodeWwn = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 10, 1, 5), FcNameIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmLinkEnd2NodeWwn.setStatus('current')
fcmLinkEnd2PhysPortNumber = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 10, 1, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmLinkEnd2PhysPortNumber.setStatus('current')
fcmLinkEnd2PortWwn = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 10, 1, 7), FcNameIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmLinkEnd2PortWwn.setStatus('current')
fcmLinkEnd2AgentAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 10, 1, 8), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmLinkEnd2AgentAddress.setStatus('current')
fcmLinkEnd2PortType = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 10, 1, 9), FcPortType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmLinkEnd2PortType.setStatus('current')
fcmLinkEnd2UnitType = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 10, 1, 10), FcUnitFunctions()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmLinkEnd2UnitType.setStatus('current')
fcmLinkEnd2FcAddressId = MibTableColumn((1, 3, 6, 1, 2, 1, 10, 56, 1, 10, 1, 11), FcAddressIdOrZero()).setMaxAccess("readonly")
if mibBuilder.loadTexts: fcmLinkEnd2FcAddressId.setStatus('current')
fcmgmtCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 10, 56, 3, 1))
fcmgmtGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 10, 56, 3, 2))
fcmgmtCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 10, 56, 3, 1, 1)).setObjects(("FC-MGMT-MIB", "fcmInstanceBasicGroup"), ("FC-MGMT-MIB", "fcmPortBasicGroup"), ("FC-MGMT-MIB", "fcmPortErrorsGroup"), ("FC-MGMT-MIB", "fcmPortStatsGroup"), ("FC-MGMT-MIB", "fcmPortClass23StatsGroup"), ("FC-MGMT-MIB", "fcmPortClassFStatsGroup"), ("FC-MGMT-MIB", "fcmPortLcStatsGroup"), ("FC-MGMT-MIB", "fcmSwitchBasicGroup"), ("FC-MGMT-MIB", "fcmSwitchPortGroup"), ("FC-MGMT-MIB", "fcmSwitchLoginGroup"), ("FC-MGMT-MIB", "fcmLinkBasicGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
fcmgmtCompliance = fcmgmtCompliance.setStatus('current')
fcmInstanceBasicGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 10, 56, 3, 2, 1)).setObjects(("FC-MGMT-MIB", "fcmInstanceWwn"), ("FC-MGMT-MIB", "fcmInstanceFunctions"), ("FC-MGMT-MIB", "fcmInstancePhysicalIndex"), ("FC-MGMT-MIB", "fcmInstanceSoftwareIndex"), ("FC-MGMT-MIB", "fcmInstanceStatus"), ("FC-MGMT-MIB", "fcmInstanceTextName"), ("FC-MGMT-MIB", "fcmInstanceDescr"), ("FC-MGMT-MIB", "fcmInstanceFabricId"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
fcmInstanceBasicGroup = fcmInstanceBasicGroup.setStatus('current')
fcmSwitchBasicGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 10, 56, 3, 2, 2)).setObjects(("FC-MGMT-MIB", "fcmSwitchDomainId"), ("FC-MGMT-MIB", "fcmSwitchPrincipal"), ("FC-MGMT-MIB", "fcmSwitchWWN"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
fcmSwitchBasicGroup = fcmSwitchBasicGroup.setStatus('current')
fcmPortBasicGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 10, 56, 3, 2, 3)).setObjects(("FC-MGMT-MIB", "fcmPortInstanceIndex"), ("FC-MGMT-MIB", "fcmPortWwn"), ("FC-MGMT-MIB", "fcmPortNodeWwn"), ("FC-MGMT-MIB", "fcmPortAdminType"), ("FC-MGMT-MIB", "fcmPortOperType"), ("FC-MGMT-MIB", "fcmPortFcCapClass"), ("FC-MGMT-MIB", "fcmPortFcOperClass"), ("FC-MGMT-MIB", "fcmPortTransmitterType"), ("FC-MGMT-MIB", "fcmPortConnectorType"), ("FC-MGMT-MIB", "fcmPortSerialNumber"), ("FC-MGMT-MIB", "fcmPortPhysicalNumber"), ("FC-MGMT-MIB", "fcmPortAdminSpeed"), ("FC-MGMT-MIB", "fcmPortCapProtocols"), ("FC-MGMT-MIB", "fcmPortOperProtocols"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
fcmPortBasicGroup = fcmPortBasicGroup.setStatus('current')
fcmPortStatsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 10, 56, 3, 2, 4)).setObjects(("FC-MGMT-MIB", "fcmPortBBCreditZeros"), ("FC-MGMT-MIB", "fcmPortFullInputBuffers"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
fcmPortStatsGroup = fcmPortStatsGroup.setStatus('current')
fcmPortClass23StatsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 10, 56, 3, 2, 5)).setObjects(("FC-MGMT-MIB", "fcmPortClass2RxFrames"), ("FC-MGMT-MIB", "fcmPortClass2RxOctets"), ("FC-MGMT-MIB", "fcmPortClass2TxFrames"), ("FC-MGMT-MIB", "fcmPortClass2TxOctets"), ("FC-MGMT-MIB", "fcmPortClass2Discards"), ("FC-MGMT-MIB", "fcmPortClass2RxFbsyFrames"), ("FC-MGMT-MIB", "fcmPortClass2RxPbsyFrames"), ("FC-MGMT-MIB", "fcmPortClass2RxFrjtFrames"), ("FC-MGMT-MIB", "fcmPortClass2RxPrjtFrames"), ("FC-MGMT-MIB", "fcmPortClass2TxFbsyFrames"), ("FC-MGMT-MIB", "fcmPortClass2TxPbsyFrames"), ("FC-MGMT-MIB", "fcmPortClass2TxFrjtFrames"), ("FC-MGMT-MIB", "fcmPortClass2TxPrjtFrames"), ("FC-MGMT-MIB", "fcmPortClass3RxFrames"), ("FC-MGMT-MIB", "fcmPortClass3RxOctets"), ("FC-MGMT-MIB", "fcmPortClass3TxFrames"), ("FC-MGMT-MIB", "fcmPortClass3TxOctets"), ("FC-MGMT-MIB", "fcmPortClass3Discards"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
fcmPortClass23StatsGroup = fcmPortClass23StatsGroup.setStatus('current')
fcmPortClassFStatsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 10, 56, 3, 2, 6)).setObjects(("FC-MGMT-MIB", "fcmPortClassFRxFrames"), ("FC-MGMT-MIB", "fcmPortClassFRxOctets"), ("FC-MGMT-MIB", "fcmPortClassFTxFrames"), ("FC-MGMT-MIB", "fcmPortClassFTxOctets"), ("FC-MGMT-MIB", "fcmPortClassFDiscards"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
fcmPortClassFStatsGroup = fcmPortClassFStatsGroup.setStatus('current')
fcmPortLcStatsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 10, 56, 3, 2, 7)).setObjects(("FC-MGMT-MIB", "fcmPortLcBBCreditZeros"), ("FC-MGMT-MIB", "fcmPortLcFullInputBuffers"), ("FC-MGMT-MIB", "fcmPortLcClass2RxFrames"), ("FC-MGMT-MIB", "fcmPortLcClass2RxOctets"), ("FC-MGMT-MIB", "fcmPortLcClass2TxFrames"), ("FC-MGMT-MIB", "fcmPortLcClass2TxOctets"), ("FC-MGMT-MIB", "fcmPortLcClass2Discards"), ("FC-MGMT-MIB", "fcmPortLcClass3Discards"), ("FC-MGMT-MIB", "fcmPortLcClass3RxFrames"), ("FC-MGMT-MIB", "fcmPortLcClass3RxOctets"), ("FC-MGMT-MIB", "fcmPortLcClass3TxFrames"), ("FC-MGMT-MIB", "fcmPortLcClass3TxOctets"), ("FC-MGMT-MIB", "fcmPortLcClass2RxFbsyFrames"), ("FC-MGMT-MIB", "fcmPortLcClass2RxPbsyFrames"), ("FC-MGMT-MIB", "fcmPortLcClass2RxFrjtFrames"), ("FC-MGMT-MIB", "fcmPortLcClass2RxPrjtFrames"), ("FC-MGMT-MIB", "fcmPortLcClass2TxFbsyFrames"), ("FC-MGMT-MIB", "fcmPortLcClass2TxPbsyFrames"), ("FC-MGMT-MIB", "fcmPortLcClass2TxFrjtFrames"), ("FC-MGMT-MIB", "fcmPortLcClass2TxPrjtFrames"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
fcmPortLcStatsGroup = fcmPortLcStatsGroup.setStatus('current')
fcmPortErrorsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 10, 56, 3, 2, 8)).setObjects(("FC-MGMT-MIB", "fcmPortRxLinkResets"), ("FC-MGMT-MIB", "fcmPortTxLinkResets"), ("FC-MGMT-MIB", "fcmPortLinkResets"), ("FC-MGMT-MIB", "fcmPortRxOfflineSequences"), ("FC-MGMT-MIB", "fcmPortTxOfflineSequences"), ("FC-MGMT-MIB", "fcmPortLinkFailures"), ("FC-MGMT-MIB", "fcmPortLossofSynchs"), ("FC-MGMT-MIB", "fcmPortLossofSignals"), ("FC-MGMT-MIB", "fcmPortPrimSeqProtocolErrors"), ("FC-MGMT-MIB", "fcmPortInvalidTxWords"), ("FC-MGMT-MIB", "fcmPortInvalidCRCs"), ("FC-MGMT-MIB", "fcmPortInvalidOrderedSets"), ("FC-MGMT-MIB", "fcmPortFrameTooLongs"), ("FC-MGMT-MIB", "fcmPortTruncatedFrames"), ("FC-MGMT-MIB", "fcmPortAddressErrors"), ("FC-MGMT-MIB", "fcmPortDelimiterErrors"), ("FC-MGMT-MIB", "fcmPortEncodingDisparityErrors"), ("FC-MGMT-MIB", "fcmPortOtherErrors"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
fcmPortErrorsGroup = fcmPortErrorsGroup.setStatus('current')
fcmSwitchPortGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 10, 56, 3, 2, 9)).setObjects(("FC-MGMT-MIB", "fcmFxPortRatov"), ("FC-MGMT-MIB", "fcmFxPortEdtov"), ("FC-MGMT-MIB", "fcmFxPortRttov"), ("FC-MGMT-MIB", "fcmFxPortHoldTime"), ("FC-MGMT-MIB", "fcmFxPortCapBbCreditMax"), ("FC-MGMT-MIB", "fcmFxPortCapBbCreditMin"), ("FC-MGMT-MIB", "fcmFxPortCapDataFieldSizeMax"), ("FC-MGMT-MIB", "fcmFxPortCapDataFieldSizeMin"), ("FC-MGMT-MIB", "fcmFxPortCapClass2SeqDeliv"), ("FC-MGMT-MIB", "fcmFxPortCapClass3SeqDeliv"), ("FC-MGMT-MIB", "fcmFxPortCapHoldTimeMax"), ("FC-MGMT-MIB", "fcmFxPortCapHoldTimeMin"), ("FC-MGMT-MIB", "fcmISPortClassFCredit"), ("FC-MGMT-MIB", "fcmISPortClassFDataFieldSize"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
fcmSwitchPortGroup = fcmSwitchPortGroup.setStatus('current')
fcmSwitchLoginGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 10, 56, 3, 2, 10)).setObjects(("FC-MGMT-MIB", "fcmFLoginPortWwn"), ("FC-MGMT-MIB", "fcmFLoginNodeWwn"), ("FC-MGMT-MIB", "fcmFLoginBbCreditModel"), ("FC-MGMT-MIB", "fcmFLoginBbCredit"), ("FC-MGMT-MIB", "fcmFLoginClassesAgreed"), ("FC-MGMT-MIB", "fcmFLoginClass2SeqDelivAgreed"), ("FC-MGMT-MIB", "fcmFLoginClass2DataFieldSize"), ("FC-MGMT-MIB", "fcmFLoginClass3SeqDelivAgreed"), ("FC-MGMT-MIB", "fcmFLoginClass3DataFieldSize"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
fcmSwitchLoginGroup = fcmSwitchLoginGroup.setStatus('current')
fcmLinkBasicGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 10, 56, 3, 2, 11)).setObjects(("FC-MGMT-MIB", "fcmLinkEnd1NodeWwn"), ("FC-MGMT-MIB", "fcmLinkEnd1PhysPortNumber"), ("FC-MGMT-MIB", "fcmLinkEnd1PortWwn"), ("FC-MGMT-MIB", "fcmLinkEnd2NodeWwn"), ("FC-MGMT-MIB", "fcmLinkEnd2PhysPortNumber"), ("FC-MGMT-MIB", "fcmLinkEnd2PortWwn"), ("FC-MGMT-MIB", "fcmLinkEnd2AgentAddress"), ("FC-MGMT-MIB", "fcmLinkEnd2PortType"), ("FC-MGMT-MIB", "fcmLinkEnd2UnitType"), ("FC-MGMT-MIB", "fcmLinkEnd2FcAddressId"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
fcmLinkBasicGroup = fcmLinkBasicGroup.setStatus('current')
mibBuilder.exportSymbols("FC-MGMT-MIB", fcmPortClass2TxFrames=fcmPortClass2TxFrames, fcmPortFrameTooLongs=fcmPortFrameTooLongs, fcmPortLcClass3RxFrames=fcmPortLcClass3RxFrames, fcmFLoginTable=fcmFLoginTable, fcmPortPrimSeqProtocolErrors=fcmPortPrimSeqProtocolErrors, fcmInstanceDescr=fcmInstanceDescr, FcAddressIdOrZero=FcAddressIdOrZero, fcmPortInstanceIndex=fcmPortInstanceIndex, fcmFxPortCapHoldTimeMin=fcmFxPortCapHoldTimeMin, FcDataFieldSize=FcDataFieldSize, fcmISPortTable=fcmISPortTable, fcmPortTxLinkResets=fcmPortTxLinkResets, fcmISPortClassFCredit=fcmISPortClassFCredit, fcmFxPortTable=fcmFxPortTable, fcmLinkBasicGroup=fcmLinkBasicGroup, fcmPortLcClass3TxOctets=fcmPortLcClass3TxOctets, fcmPortErrorsTable=fcmPortErrorsTable, fcmSwitchWWN=fcmSwitchWWN, fcmInstanceBasicGroup=fcmInstanceBasicGroup, FcBbCreditModel=FcBbCreditModel, fcmgmtNotifPrefix=fcmgmtNotifPrefix, fcmPortLossofSynchs=fcmPortLossofSynchs, fcmPortClass2TxOctets=fcmPortClass2TxOctets, fcmInstanceStatus=fcmInstanceStatus, fcmgmtNotifications=fcmgmtNotifications, fcmPortLcClass2RxPbsyFrames=fcmPortLcClass2RxPbsyFrames, fcmPortAdminType=fcmPortAdminType, fcmFLoginNodeWwn=fcmFLoginNodeWwn, fcmFxPortEdtov=fcmFxPortEdtov, fcmPortLcClass2RxFbsyFrames=fcmPortLcClass2RxFbsyFrames, fcmPortLcClass2TxFbsyFrames=fcmPortLcClass2TxFbsyFrames, fcmFxPortCapDataFieldSizeMin=fcmFxPortCapDataFieldSizeMin, fcmInstanceTable=fcmInstanceTable, fcmPortClass2RxPrjtFrames=fcmPortClass2RxPrjtFrames, fcmFLoginClass2DataFieldSize=fcmFLoginClass2DataFieldSize, fcmLinkEnd1NodeWwn=fcmLinkEnd1NodeWwn, fcmPortDelimiterErrors=fcmPortDelimiterErrors, fcmPortLcStatsGroup=fcmPortLcStatsGroup, fcmSwitchPortGroup=fcmSwitchPortGroup, fcmgmtCompliances=fcmgmtCompliances, fcmPortClass2RxFbsyFrames=fcmPortClass2RxFbsyFrames, fcmPortClassFTxFrames=fcmPortClassFTxFrames, fcmPortClass3TxFrames=fcmPortClass3TxFrames, fcmInstancePhysicalIndex=fcmInstancePhysicalIndex, fcmPortInvalidCRCs=fcmPortInvalidCRCs, fcmPortClass2TxFbsyFrames=fcmPortClass2TxFbsyFrames, fcmgmtCompliance=fcmgmtCompliance, fcmPortClass23StatsGroup=fcmPortClass23StatsGroup, fcmSwitchIndex=fcmSwitchIndex, fcmPortOtherErrors=fcmPortOtherErrors, fcmgmtGroups=fcmgmtGroups, fcmSwitchDomainId=fcmSwitchDomainId, fcmPortClass3RxOctets=fcmPortClass3RxOctets, fcmPortLcBBCreditZeros=fcmPortLcBBCreditZeros, fcmPortNodeWwn=fcmPortNodeWwn, fcmPortClass2RxPbsyFrames=fcmPortClass2RxPbsyFrames, fcmPortLcClass3TxFrames=fcmPortLcClass3TxFrames, fcmFLoginClass3DataFieldSize=fcmFLoginClass3DataFieldSize, fcmLinkEnd1PhysPortNumber=fcmLinkEnd1PhysPortNumber, fcmSwitchLoginGroup=fcmSwitchLoginGroup, FcNameIdOrZero=FcNameIdOrZero, fcmPortLcClass2TxFrjtFrames=fcmPortLcClass2TxFrjtFrames, fcmPortLcClass3RxOctets=fcmPortLcClass3RxOctets, fcmFxPortCapBbCreditMax=fcmFxPortCapBbCreditMax, fcmPortFcOperClass=fcmPortFcOperClass, fcmPortLcClass2RxFrjtFrames=fcmPortLcClass2RxFrjtFrames, fcmPortTable=fcmPortTable, fcmPortLcClass2RxOctets=fcmPortLcClass2RxOctets, fcmFxPortRttov=fcmFxPortRttov, fcmPortClass2Discards=fcmPortClass2Discards, fcmSwitchPrincipal=fcmSwitchPrincipal, fcmFLoginPortWwn=fcmFLoginPortWwn, fcmSwitchEntry=fcmSwitchEntry, fcmPortClass2TxPbsyFrames=fcmPortClass2TxPbsyFrames, fcmInstanceFunctions=fcmInstanceFunctions, fcmFxPortRatov=fcmFxPortRatov, fcmPortClass2TxFrjtFrames=fcmPortClass2TxFrjtFrames, fcmLinkEnd2AgentAddress=fcmLinkEnd2AgentAddress, fcmPortRxOfflineSequences=fcmPortRxOfflineSequences, fcmLinkTable=fcmLinkTable, fcmPortClass3RxFrames=fcmPortClass3RxFrames, fcmFxPortCapDataFieldSizeMax=fcmFxPortCapDataFieldSizeMax, fcmInstanceFabricId=fcmInstanceFabricId, fcmFxPortCapClass3SeqDeliv=fcmFxPortCapClass3SeqDeliv, fcmPortErrorsGroup=fcmPortErrorsGroup, fcmPortStatsTable=fcmPortStatsTable, fcmPortLinkFailures=fcmPortLinkFailures, fcmLinkEnd2FcAddressId=fcmLinkEnd2FcAddressId, fcmPortLcStatsEntry=fcmPortLcStatsEntry, fcmISPortEntry=fcmISPortEntry, fcmPortLcFullInputBuffers=fcmPortLcFullInputBuffers, fcmgmtObjects=fcmgmtObjects, fcmPortEncodingDisparityErrors=fcmPortEncodingDisparityErrors, fcmLinkEnd2UnitType=fcmLinkEnd2UnitType, fcmPortInvalidTxWords=fcmPortInvalidTxWords, fcmPortOperType=fcmPortOperType, fcmLinkEnd1PortWwn=fcmLinkEnd1PortWwn, fcmLinkIndex=fcmLinkIndex, fcmLinkEnd2PortWwn=fcmLinkEnd2PortWwn, fcmPortBBCreditZeros=fcmPortBBCreditZeros, FcUnitFunctions=FcUnitFunctions, fcmPortTruncatedFrames=fcmPortTruncatedFrames, fcmLinkEnd2NodeWwn=fcmLinkEnd2NodeWwn, fcmFxPortCapBbCreditMin=fcmFxPortCapBbCreditMin, fcmInstanceSoftwareIndex=fcmInstanceSoftwareIndex, fcmPortInvalidOrderedSets=fcmPortInvalidOrderedSets, fcmInstanceIndex=fcmInstanceIndex, fcmLinkEntry=fcmLinkEntry, fcmPortClass3TxOctets=fcmPortClass3TxOctets, fcmFxPortEntry=fcmFxPortEntry, fcmInstanceWwn=fcmInstanceWwn, fcmISPortClassFDataFieldSize=fcmISPortClassFDataFieldSize, FcDomainIdOrZero=FcDomainIdOrZero, fcmPortClassFDiscards=fcmPortClassFDiscards, fcmPortSerialNumber=fcmPortSerialNumber, fcmFLoginEntry=fcmFLoginEntry, fcmPortLcClass2TxFrames=fcmPortLcClass2TxFrames, fcmSwitchTable=fcmSwitchTable, fcmPortFcCapClass=fcmPortFcCapClass, fcmPortClass2RxFrjtFrames=fcmPortClass2RxFrjtFrames, fcmPortStatsEntry=fcmPortStatsEntry, fcmPortStatsGroup=fcmPortStatsGroup, fcmgmtConformance=fcmgmtConformance, fcmPortClass2RxFrames=fcmPortClass2RxFrames, fcmPortLcClass2Discards=fcmPortLcClass2Discards, fcmPortEntry=fcmPortEntry, fcmPortClass3Discards=fcmPortClass3Discards, fcmFLoginClassesAgreed=fcmFLoginClassesAgreed, fcmPortAddressErrors=fcmPortAddressErrors, fcmPortLcClass2TxOctets=fcmPortLcClass2TxOctets, fcmPortTransmitterType=fcmPortTransmitterType, PYSNMP_MODULE_ID=fcMgmtMIB, fcmPortClassFRxFrames=fcmPortClassFRxFrames, fcmPortLcStatsTable=fcmPortLcStatsTable, fcmPortConnectorType=fcmPortConnectorType, fcmFLoginBbCreditModel=fcmFLoginBbCreditModel, fcmLinkEnd2PhysPortNumber=fcmLinkEnd2PhysPortNumber, fcmPortClass2RxOctets=fcmPortClass2RxOctets, fcmPortClassFTxOctets=fcmPortClassFTxOctets, fcmPortLcClass3Discards=fcmPortLcClass3Discards, fcmPortLossofSignals=fcmPortLossofSignals, fcmFLoginClass2SeqDelivAgreed=fcmFLoginClass2SeqDelivAgreed, fcmPortLcClass2RxPrjtFrames=fcmPortLcClass2RxPrjtFrames, fcmFLoginClass3SeqDelivAgreed=fcmFLoginClass3SeqDelivAgreed, fcmSwitchBasicGroup=fcmSwitchBasicGroup, fcmFLoginNxPortIndex=fcmFLoginNxPortIndex, fcmPortCapProtocols=fcmPortCapProtocols, fcmPortLinkResets=fcmPortLinkResets, fcmPortAdminSpeed=fcmPortAdminSpeed, fcmPortWwn=fcmPortWwn, fcMgmtMIB=fcMgmtMIB, fcmFxPortHoldTime=fcmFxPortHoldTime, FcBbCredit=FcBbCredit, FcPortType=FcPortType, fcmPortTxOfflineSequences=fcmPortTxOfflineSequences, fcmFLoginBbCredit=fcmFLoginBbCredit, fcmFxPortCapClass2SeqDeliv=fcmFxPortCapClass2SeqDeliv, fcmPortBasicGroup=fcmPortBasicGroup, fcmPortOperProtocols=fcmPortOperProtocols, fcmPortLcClass2RxFrames=fcmPortLcClass2RxFrames, fcmFxPortCapHoldTimeMax=fcmFxPortCapHoldTimeMax, fcmPortLcClass2TxPrjtFrames=fcmPortLcClass2TxPrjtFrames, fcmLinkEnd2PortType=fcmLinkEnd2PortType, fcmInstanceEntry=fcmInstanceEntry, fcmInstanceTextName=fcmInstanceTextName, fcmPortClassFStatsGroup=fcmPortClassFStatsGroup, fcmPortFullInputBuffers=fcmPortFullInputBuffers, fcmPortRxLinkResets=fcmPortRxLinkResets, fcmPortClassFRxOctets=fcmPortClassFRxOctets, fcmPortClass2TxPrjtFrames=fcmPortClass2TxPrjtFrames, fcmPortPhysicalNumber=fcmPortPhysicalNumber, FcClasses=FcClasses, fcmPortErrorsEntry=fcmPortErrorsEntry, fcmPortLcClass2TxPbsyFrames=fcmPortLcClass2TxPbsyFrames)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.