content
stringlengths 5
1.05M
|
|---|
import json
from pyex.api import PyexAPIBuilder, Exchange
if __name__ == "__main__":
with open('api.json', 'r', encoding='utf-8') as f:
obj = json.loads(f.read())
api_key = obj['api_key']
secret_key = obj['secret_key']
passphrase = obj['passphrase']
api = PyexAPIBuilder().api_key(api_key) \
.secret_key(secret_key) \
.passphrase(passphrase) \
.build(Exchange.OKEX)
|
import unittest
from roadsearch.tests.abstract_test import AbstractTest
from roadsearch.generators.mutators.mutations import ListMutator, ValueAlterationMutator
from roadsearch.generators.exploiters.exploiters import FirstVariableExploiter
from roadsearch.generators.representations.kappa_generator import KappaGenerator
class KappaTest(AbstractTest):
def test_all_mutations(self):
generator = KappaGenerator(20)
mutator = ListMutator(generator)
kappas = generator.generate()
for name, func in mutator.get_all():
mutant = func(kappas)
func(mutant)
def test_all_tuple_mutations(self):
generator = KappaGenerator(20)
mutator = ValueAlterationMutator()
kappas = generator.generate()
for name, func in mutator.get_all():
mutant = func(kappas)
func(mutant)
def test_exploiters_all_exploiters(self):
generator = KappaGenerator(20)
exploiter = FirstVariableExploiter()
kappas = generator.generate()
for name, func in exploiter.get_all():
mutant = func(kappas)
func(mutant)
def test_random_replacement(self):
length = 11
generator = KappaGenerator(length)
mutator = ListMutator(generator)
example = generator.generate()
self.size_does_not_change(length, example, mutator.random_replacement, is_tuple=True)
def test_random_alteration(self):
length = 11
generator = KappaGenerator(length)
mutator = ValueAlterationMutator()
example = generator.generate()
self.size_does_not_change(length, example, mutator.random_alteration, is_tuple=True)
def test_randomly_remove(self):
length = 15
generator = KappaGenerator(length)
mutator = ListMutator(generator)
example = generator.generate()
self.size_decreases_until_minimum_allowed(length=length,
min_length=mutator.min_length,
example=example,
function=mutator.randomly_remove_kappas,
is_tuple=True)
def test_randomly_remove_front(self):
length = 15
generator = KappaGenerator(length)
mutator = ListMutator(generator)
example = generator.generate()
self.size_decreases_until_minimum_allowed(length=length,
min_length=mutator.min_length,
example=example,
function=mutator.remove_from_front,
is_tuple=True)
def test_randomly_remove_tail(self):
length = 15
generator = KappaGenerator(length)
mutator = ListMutator(generator)
example = generator.generate()
self.size_decreases_until_minimum_allowed(length=length,
min_length=mutator.min_length,
example=example,
function=mutator.remove_from_tail,
is_tuple=True)
if __name__ == '__main__':
unittest.main()
|
from datetime import datetime
from bson.objectid import ObjectId
from werkzeug.exceptions import InternalServerError, BadRequest, NotFound, Conflict
from bdc_oauth.users.business import UsersBusiness
from bdc_oauth.utils.helpers import random_string
from bdc_oauth.utils.base_mongo import mongo
class ClientsBusiness():
@classmethod
def init_infos(cls):
return {
"model": mongo.db.clients
}
@classmethod
def get_all(cls):
model = cls.init_infos()['model']
clients = model.find({
"$or": [
{ "expired_at": {"$gt": datetime.now()} },
{ "expired_at": None }
]
})
return list(clients)
@classmethod
def get_by_id(cls, id):
model = cls.init_infos()['model']
try:
client = model.find_one({
"_id": ObjectId(id),
"$or": [
{ "expired_at": {"$gt": datetime.now()} },
{ "expired_at": None }
]
})
return client
except Exception:
raise NotFound("Client not Found!")
@classmethod
def get_by_name(cls, name):
model = cls.init_infos()['model']
try:
client = model.find_one({
"client_name": name,
"$or": [
{ "expired_at": {"$gt": datetime.now()} },
{ "expired_at": None }
]
})
return client
except Exception:
raise NotFound("Client not Found!")
@classmethod
def list_by_userid(cls, user_id):
model = cls.init_infos()['model']
clients = model.find({
"user_id": ObjectId(user_id),
"$or": [
{ "expired_at": {"$gt": datetime.now()} },
{ "expired_at": None }
]
})
return clients
@classmethod
def create(cls, user_id, client_infos):
model = cls.init_infos()['model']
user = UsersBusiness.get_by_id(user_id)
if not user:
raise NotFound('User not Found!')
"""
check if client name is already registered
"""
client = model.find_one({
"client_name": client_infos['client_name'],
"$or": [
{ "expired_at": {"$gt": datetime.now()} },
{ "expired_at": None }
]
})
if client:
raise Conflict('A client with this name already exists')
"""
create client credentials
"""
client_infos['user_id'] = user['_id']
client_infos['created_at'] = datetime.now()
client_infos['expired_at'] = client_infos.get('expired_at', None)
"""
save in mongodb
"""
try:
model.insert_one(client_infos)
return client_infos
except Exception:
return False
@classmethod
def update(cls, id, client_infos):
model = cls.init_infos()['model']
"""
checks whether the user exists
"""
client = cls.get_by_id(id)
if not client:
raise NotFound('Client not Found!')
"""
update in mongodb
"""
try:
model.update_one({"_id": ObjectId(id)}, {"$set": client_infos})
return True
except Exception:
return False
@classmethod
def delete(cls, id):
model = cls.init_infos()['model']
"""
checks whether the user exists
"""
client = model.find_one({ "_id": ObjectId(id) })
if not client:
raise NotFound('Client not Found!')
# TODO: remove client of users
"""
delete in mongodb
"""
try:
model.delete_one({"_id": ObjectId(id)})
return True
except Exception:
return False
@classmethod
def update_date_expiration(cls, id, action, date):
model = cls.init_infos()['model']
"""
checks whether the user exists
"""
client = model.find_one({ "_id": ObjectId(id) })
if not client:
raise NotFound('Client not Found!')
client['expired_at'] = datetime.now()
if action == 'enable':
if date and datetime.strptime(date, '%Y-%m-%d') <= datetime.now():
raise BadRequest('Expiration date must be greater than today date')
else:
client['expired_at'] = datetime.strptime(date, '%Y-%m-%d') if date else None
"""
update in mongodb
"""
try:
model.update_one({"_id": ObjectId(id)}, {"$set": client})
return True
except Exception:
return False
|
from django.contrib.auth.models import AbstractUser
class Profile(AbstractUser):
def __str__(self):
return self.get_full_name() or self.username
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Dict, List, Optional
ATTRIBUTES = "attributes"
def metadata_to_dict(
values: List[str],
ignore_columns: List[int],
ignored_values: Optional[List[str]],
keys: List[str]
) -> Dict[str, str]:
"""
Converts two parallel lists of values into a dictionary of key value pairs.
This function is ultimately a more complex dict(zip(keys,values)) function.
Ideally, len(values) == len(keys). Only iterates through min(len(values), len(keys)) to avoid any overruns.
Mismatched sizes could mean invalid data, but due to the relatively lax rules of CSV, could also be valid data.
Will use ignore_columns to skip over columns in both lists that correspond with that integer. No key or value will
be written for these columns.
Will also use any value provided in ignore_values to ignore adding any value that matches. This allows us to avoid
adding usually skipped data such as NULL or N/A or "" as metadata attributes.
:param List[str] values: The data values will be the values of the resulting dictionary. List can be empty, but
cannot be None.
:param List[int] ignore_columns: A list of column indices to be skipped over. List can be empty, but cannot be None
:param List[str] ignored_values: A list of invalid values to avoid if they exist in the row list. List can be
empty, but cannot be None.
:param List[str] keys: A list of the keys to be used as the key in the resulting dictionary. These are most often
from the headers of a CsvFile or the attribute name in a JSON file. List can be empty, but cannot be None.
:return: An attribute dictionary
:rtype: dict[str, str]
"""
actual_ignored_values = [] if ignored_values is None else ignored_values
min_length = min(len(values), len(keys))
metadata = {}
for i in range(0, min_length):
header = keys[i]
value = values[i]
if i not in ignore_columns and value not in actual_ignored_values:
metadata[header] = value
return metadata
|
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from jobbing.models.base_model_ import Model
from jobbing import util
class Municipality(Model):
def __init__(self,
id_municipality:int = None,
municipality_name:str = None,
id_state_code:int = None): # noqa: E501
self.swagger_types = {
'id_municipality': int,
'municipality_name': str,
'id_state_code': int
}
self.attribute_map = {
'id_municipality': 'id_municipality',
'municipality_name': 'municipality_name',
'id_state_code': 'id_state_code'
}
self._id_municipality = id_municipality
self._municipality_name = municipality_name
self._id_state_code = id_state_code
@classmethod
def from_dict(cls, dikt) -> 'Municipality':
return util.deserialize_model(dikt, cls)
@property
def id_municipality(self) -> int:
return self._id_municipality
@id_municipality.setter
def id_municipality(self, param):
if param is None:
raise ValueError("Invalid value for `id_municipality`, must not be `None`") # noqa: E501
self._id_municipality = param
@property
def municipality_name(self) -> str:
return self._municipality_name
@municipality_name.setter
def municipality_name(self, param):
if param is None:
raise ValueError("Invalid value for `municipality_name`, must not be `None`") # noqa: E501
self._municipality_name = param
@property
def id_state_code(self) -> int:
return self._id_state_code
@id_state_code.setter
def id_state_code(self, param):
if param is None:
raise ValueError("Invalid value for `id_state_code`, must not be `None`") # noqa: E501
self._id_state_code = param
|
import re
from robobrowser import RoboBrowser
def is_ascii(s):
return all(ord(c) < 128 for c in s)
def remove_html_markup(s):
tag = False
quote = False
out = ""
for c in s:
if c == '<' and not quote:
tag = True
elif c == '>' and not quote:
tag = False
elif (c == '"' or c == "'") and tag:
quote = not quote
elif not tag:
out = out + c
return out
browser = RoboBrowser(history=True)
browser.open('http://www.emopuddle.com/')
boards = browser.select('td.col_c_forum a')
boards = boards[6:]
boards = boards[:-1]
del boards[3]
#print(boards)
for board in boards:
browser.follow_link(board)
while True:
nextPage = browser.select('li.next a')
posts = browser.select('a[class="topic_title"]')
#posts = browser.select('td.col_f_content a')
for post in posts:
#print(post)
#print(post['title'])
if is_ascii(post['title']):
#print(post)
browser.follow_link(post)
while True:
nextPostPage = browser.select('li.next a')
text = browser.select('div.entry-content')
#print(nextPage)
for tag in text:
if (is_ascii(tag.text.strip())):
print(remove_html_markup(tag.text.strip()))
else:
continue
if not nextPostPage:
break
browser.follow_link(nextPostPage[0])
if not nextPage:
break
browser.follow_link(nextPage[0])
|
# Gregary C. Zweigle, 2020
import fifo
import numpy as np
import pyaudio
import time
class AudioInOut:
def __init__(self, rate, chunk, downwsample_ratio, fifo_length):
#self.last_time = time.time()
#self.get_last_time = time.time()
self.fifo = fifo.Fifo(fifo_length)
self.width = 2
self.channels = 2
self.rate = rate
self.chunk = chunk
self.downsample_ratio = downwsample_ratio
print("Initialized the PyAudio class.")
def start_audio_driver(self):
self.fifo.clear()
self.pa = pyaudio.PyAudio()
self.stream = self.pa.open(
format = self.pa.get_format_from_width(self.width),
channels = self.channels,
rate = self.rate,
input = True,
output = True,
frames_per_buffer = self.chunk,
stream_callback = self.callback)
self.stream.start_stream()
print("Started the PyAudio driver.")
def end_audio_driver(self):
max_wait_tries = 0
while self.stream.is_active() == True and max_wait_tries < 50:
max_wait_tries += 1
time.sleep(0.1)
if max_wait_tries == 50:
print('DEBUG - Stopping the driver timed out.')
self.stream.stop_stream()
self.stream.close()
self.pa.terminate()
print("Ended the PyAudio driver.")
def get_data_from_audio_driver(self):
audio_left_list = []
audio_right_list = []
for i in range(self.fifo.get_fifo_length()):
data, valid = self.fifo.get()
if not valid:
break
else:
for k in range(0, 2*self.chunk, 2*self.downsample_ratio):
audio_left_list.append(data[k])
audio_right_list.append(data[k+1])
audio_left = np.asarray(audio_left_list).astype(float)
audio_right = np.asarray(audio_right_list).astype(float)
#print("Got {0} data: delta_t = {1}".format(len(audio_left_list),
#time.time() - self.get_last_time))
#self.get_last_time = time.time()
return audio_left, audio_right
def callback(self, in_data, frame_count, time_info, status):
audio_data = np.fromstring(in_data, dtype=np.int16)
if audio_data.shape[0] != 2*self.chunk: # Fix annoying startup transient.
audio_data = np.zeros((2*self.chunk,)).astype('int16')
self.fifo.put(audio_data)
# audio_data are int16 but numpy converted to float64.
audio_data = np.zeros((2*self.chunk,)).astype('int16')
audio_data.astype('int16')
#print("\t\t\t\t\t\tPut {0} data: delta_t = {1}".format(audio_data.size,
#time.time() - self.last_time))
#self.last_time = time.time()
return audio_data, pyaudio.paContinue
|
from django import test as django_test
from django.contrib.auth import get_user_model
from django.urls import reverse
from final_project.accounts.helpers import UserAndProfileData
from final_project.accounts.models import Profile
from final_project.main.models import Post
from final_project.main.tests_main.views.tests_posts import ValidPostData
UserModel = get_user_model()
class DeletePostViewTests(ValidPostData, UserAndProfileData, django_test.TestCase):
EXPECTED_TEMPLATE = 'posts/delete_post.html'
SECOND_VALID_USER_CREDENTIALS = {
'email': 'wronguser@abv.bg',
'password': '1234',
}
def __create_user(self, **credentials):
return UserModel.objects.create_user(**credentials)
def __create_valid_user_and_profile(self):
user = self.__create_user(**self.VALID_USER_CREDENTIALS)
profile = Profile.objects.create(
**self.VALID_PROFILE_DATA,
user=user,
)
return (user, profile)
def __create_post_view(self, user):
return Post.objects.create(**self.VALID_POST_DATA, creator=user)
def __get_response_for_profile(self, post):
return self.client.get(reverse('delete post', kwargs={'pk': post.pk}))
# CHECK IF VIEW LOADS CORRECT TEMPLATE
def test_view_renders_correct_template(self):
user, profile = self.__create_valid_user_and_profile()
self.client.login(**self.VALID_USER_CREDENTIALS)
post = self.__create_post_view(user)
response = self.__get_response_for_profile(post)
self.assertTemplateUsed(response, self.EXPECTED_TEMPLATE)
# CHECK IF VIEW IS ACCESSED ONLY BY LOGGED-IN USER
def test_when_opening_with_logged_in_user__expect_200(self):
user, profile = self.__create_valid_user_and_profile()
self.client.login(**self.VALID_USER_CREDENTIALS)
post = self.__create_post_view(user)
response = self.__get_response_for_profile(post)
self.assertEqual(200, response.status_code)
# CHECK IF VIEW DELETES POST WITH CORRECT USER
def test_deleting_post_with_correct_user__expect_success(self):
user, profile = self.__create_valid_user_and_profile()
self.client.login(**self.VALID_USER_CREDENTIALS)
post = self.__create_post_view(user)
self.client.post(
reverse('delete post', kwargs={'pk': post.pk}),
data={},
)
deleted_post = Post.objects.first()
self.assertIsNone(deleted_post)
# CHECK IF VIEW DELETES POST WITH WRONG USER
def test_deleting_post_with_wrong_user__expect_success(self):
user, profile = self.__create_valid_user_and_profile()
post = self.__create_post_view(user)
wrong_user = self.__create_user(**self.SECOND_VALID_USER_CREDENTIALS)
self.client.login(**self.SECOND_VALID_USER_CREDENTIALS)
response = self.client.post(
reverse('delete post', kwargs={'pk': post.pk}),
data={},
)
not_deleted_post = Post.objects.first()
error_message = b'You must be the creator to delete the post!'
self.assertEqual(error_message, response.content)
self.assertIsNotNone(not_deleted_post)
|
from randomplushmiku import myfunctions
def test_lib():
assert myfunctions.multiplemikus(2) == 2
|
from __future__ import annotations
from typing import Optional, TYPE_CHECKING, Union
from pyspark.sql.types import StructType, DataType
from spark_auto_mapper_fhir.fhir_types.date_time import FhirDateTime
from spark_auto_mapper_fhir.fhir_types.list import FhirList
from spark_auto_mapper_fhir.fhir_types.string import FhirString
from spark_auto_mapper_fhir.extensions.extension_base import ExtensionBase
from spark_auto_mapper_fhir.base_types.fhir_complex_type_base import FhirComplexTypeBase
from spark_fhir_schemas.r4.complex_types.annotation import AnnotationSchema
if TYPE_CHECKING:
pass
# id_ (string)
# extension (Extension)
# authorReference (Reference)
from spark_auto_mapper_fhir.complex_types.reference import Reference
# Imports for References for authorReference
from spark_auto_mapper_fhir.resources.practitioner import Practitioner
from spark_auto_mapper_fhir.resources.patient import Patient
from spark_auto_mapper_fhir.resources.related_person import RelatedPerson
from spark_auto_mapper_fhir.resources.organization import Organization
# authorString (string)
# time (dateTime)
# text (markdown)
from spark_auto_mapper_fhir.fhir_types.markdown import FhirMarkdown
# This file is auto-generated by generate_classes so do not edit manually
# noinspection PyPep8Naming
class Annotation(FhirComplexTypeBase):
"""
Annotation
fhir-base.xsd
A text note which also contains information about who made the statement and when.
If the element is present, it must have a value for at least one of the defined elements, an @id referenced from the Narrative, or extensions
"""
# noinspection PyPep8Naming
def __init__(
self,
*,
id_: Optional[FhirString] = None,
extension: Optional[FhirList[ExtensionBase]] = None,
authorReference: Optional[
Reference[Union[Practitioner, Patient, RelatedPerson, Organization]]
] = None,
authorString: Optional[FhirString] = None,
time: Optional[FhirDateTime] = None,
text: FhirMarkdown,
) -> None:
"""
A text note which also contains information about who made the statement and
when.
If the element is present, it must have a value for at least one of the
defined elements, an @id referenced from the Narrative, or extensions
:param id_: None
:param extension: May be used to represent additional information that is not part of the basic
definition of the element. To make the use of extensions safe and manageable,
there is a strict set of governance applied to the definition and use of
extensions. Though any implementer can define an extension, there is a set of
requirements that SHALL be met as part of the definition of the extension.
:param authorReference: None
:param authorString: None
:param time: Indicates when this particular annotation was made.
:param text: The text of the annotation in markdown format.
"""
super().__init__(
id_=id_,
extension=extension,
authorReference=authorReference,
authorString=authorString,
time=time,
text=text,
)
def get_schema(
self, include_extension: bool
) -> Optional[Union[StructType, DataType]]:
return AnnotationSchema.get_schema(include_extension=include_extension)
|
import nicehash
# For testing purposes use api-test.nicehash.com. Register here: https://test.nicehash.com
# When ready, uncomment line bellow, to run your script on production environment
host = 'https://api2.nicehash.com'
# How to create key, secret and where to get organisation id please check:
# Production - https://www.nicehash.com
# host = 'https://api2.nicehash.com'
# organisation_id = 'Enter your organisation id'
# key = 'Enter your api key'
# secret = 'Enter your secret for api key'
# # Test - https://test.nicehash.com
# host = 'https://api-test.nicehash.com'
# organisation_id = '286fcf65-d44e-4cdf-81f2-4790c0cbed04'
# key = '6b957253-bcb9-4b83-b431-4f28ab783a6f'
# secret = 'ac09da0c-0b41-49ba-be6f-4698f9c184a67c6a834f-5bfe-5389-ba6f-d9ada9a86c03'
############################################
# PUBLIC FUNCTIONS
# Create public api object
public_api = nicehash.public_api(host, True)
# Get all algorithms
algorithms = public_api.get_algorithms()
print(algorithms)
# Get all markets
markets = public_api.get_markets()
print(markets)
# Get all curencies
currencies = public_api.get_currencies()
print(currencies)
# Get current global stats
global_stats_current = public_api.get_current_global_stats()
print(global_stats_current)
# Get global stats for 24h
global_stats_24h = public_api.get_global_stats_24()
print(global_stats_24h)
# Get orders for certain algorithm
global_active_orders = public_api.get_active_orders()
print(global_active_orders)
# Buy info
buy_info = public_api.buy_info()
print(buy_info)
# Get multialgo info
multialgo_info = public_api.get_multialgo_info()
print(multialgo_info)
############################################
# PRIVATE FUNCTIONS
# Create private api object
private_api = nicehash.private_api(host, organisation_id, key, secret, True)
# Get balance for all currencies
my_accounts = private_api.get_accounts()
print(my_accounts)
# Get balance for BTC address
my_btc_account = private_api.get_accounts_for_currency(currencies['currencies'][0]['symbol'])
print(my_btc_account)
# Get my active hashpower orders
my_top_active_x16r_eu_orders = private_api.get_my_active_orders('X16R', 'EU', 10)
print(my_top_active_x16r_eu_orders)
# Create pool
new_pool = private_api.create_pool('My best pool', 'X16R', 'the.best.pool.com', 3333, 'mybestcoinaddress', 'x')
print(new_pool)
# Get pools
pools_on_fist_page = private_api.get_my_pools(0, 10)
print(pools_on_fist_page)
# Create hashpower order
new_order = private_api.create_hashpower_order('EU', 'STANDARD', 'X16R', 0.123, 0, 0.005, pools_on_fist_page['list'][0]['id'], algorithms)
print(new_order)
# Refill hashpower order
refilled_order = private_api.refill_hashpower_order(new_order['id'], 0.005)
print(refilled_order)
# Order hashpower set price
set_price_order = private_api.set_price_hashpower_order(new_order['id'], 0.234, 'X16R', algorithms)
print(set_price_order)
# Order hashpower set limit
set_limit_order = private_api.set_limit_hashpower_order(new_order['id'], 2.12, 'X16R', algorithms)
print(set_limit_order)
# Order hashpower set price and imit
set_limit_order = private_api.set_price_and_limit_hashpower_order(new_order['id'], 0.235, 1.2, 'X16R', algorithms)
print(set_limit_order)
# Remove hashpower order
delete_hp_order = private_api.cancel_hashpower_order(new_order['id'])
print(delete_hp_order)
# Delete pool
delete_pool_result = private_api.delete_pool(new_pool['id'])
print(delete_pool_result)
############################################
# EXCHANGE
# Get exchange market info
exchange_info = public_api.get_exchange_markets_info()
print(exchange_info)
# Get trades for first market
trades = public_api.get_exchange_trades(exchange_info['symbols'][0]['symbol'])
print (trades)
# Get candlesticks
candlesticks = public_api.get_candlesticks(exchange_info['symbols'][0]['symbol'], 1561896404, 1567080464, 60)
print (candlesticks)
# Get exchange orderbook
exchange_orderbook = public_api.get_exchange_orderbook(exchange_info['symbols'][0]['symbol'], 10)
print (exchange_orderbook)
# Get my exchange orders
my_exchange_orders = private_api.get_my_exchange_orders(exchange_info['symbols'][0]['symbol'])
print (my_exchange_orders)
# Get my exchnage trades
my_exchange_trades = private_api.get_my_exchange_trades(exchange_info['symbols'][0]['symbol'])
print (my_exchange_trades)
# Create buy limit exchange order
new_sell_limit_order = private_api.create_exchange_limit_order(exchange_info['symbols'][0]['symbol'], 'sell', 10, 0.1)
print (new_sell_limit_order)
# Create sell limit exchange order
new_buy_limit_order = private_api.create_exchange_limit_order(exchange_info['symbols'][0]['symbol'], 'buy', 0.1, 0.1)
print (new_buy_limit_order)
# Create sell market order
new_sell_market_order = private_api.create_exchange_sell_market_order(exchange_info['symbols'][0]['symbol'], 0.1)
print(new_sell_market_order)
# Create buy market order
new_buy_market_order = private_api.create_exchange_buy_market_order(exchange_info['symbols'][0]['symbol'], 0.1)
print(new_buy_market_order)
# Cancel exchange order
cancelled_order = private_api.cancel_exchange_order(exchange_info['symbols'][0]['symbol'], my_exchange_orders[0]['orderId'])
print(cancelled_order)
|
#!/usr/bin/python
"""
based on
PyAudio + PyQtGraph Spectrum Analyzer
Author:@sbarratt
Date Created: August 8, 2015
and Spectrum Analyzer with STFT see Yumi's blog
https://fairyonice.github.io/implement-the-spectrogram-from-scratch-in-python.html
as modified by waszee Oct 12, 2020
this version is using sounddevice instead of pyaudio
"""
import struct
import sys
import numpy as np
import IPython as ipy
import pyqtgraph as pg
from pyqtgraph.Qt import QtGui, QtCore
import matplotlib.pyplot as plt
import queue
import sounddevice as sd
q = queue.Queue()
# Audio Format (check Audio MIDI Setup if on Mac)
FORMAT = np.int16
RATE = 44100
CHANNELS = 1
# Set Plot Range [-RANGE,RANGE], default is nyquist/2
URANGE =12000
if not URANGE:
URANGE = RATE/2
LRANGE=200
if not LRANGE:
LRANGE=0
TRACK=1024 #for spectrogram
OVERLAP=400
COLLECTSEC=30
#expect 44100 data points per sec so 30sec is about 1.2MB
# input block is used for the realtime pyqtgraph
INPUT_BLOCK_TIME = 0.1
INPUT_FRAMES_PER_BLOCK = int(RATE*INPUT_BLOCK_TIME) #for pyQTgraph
#print("block:",INPUT_FRAMES_PER_BLOCK)
# Which Channel if stereo? (L or R)
LR = "l"
class SpectrumAnalyzer():
def __init__(self):
self.sdinit_stream()
self.initUI()
def sdinit_stream(self):
self.sdstream = sd.InputStream(samplerate=RATE,channels=1,blocksize =INPUT_FRAMES_PER_BLOCK,callback = self.sdcallback, dtype=np.int16)
self.sdstream.start()
def sdcallback(self,indata, frames, time, status):
"""This is called (from a separate thread) for each audio block."""
if status:
print(status)
ndata=indata[:]
q.put(ndata)
def sdreadData(self):
try:
data = q.get()
except queue.Empty:
pass
return data
def initUI(self):
self.app = QtGui.QApplication([])
self.app.quitOnLastWindowClosed()
self.mainWindow = QtGui.QMainWindow()
self.mainWindow.setWindowTitle("Spectrum Analyzer")
self.mainWindow.resize(800,300)
self.centralWid = QtGui.QWidget()
self.mainWindow.setCentralWidget(self.centralWid)
self.lay = QtGui.QVBoxLayout()
self.centralWid.setLayout(self.lay)
self.specWid = pg.PlotWidget(name="spectrum")
self.specItem = self.specWid.getPlotItem()
self.specItem.setMouseEnabled(y=False)
self.specItem.setYRange(0,5000)
self.specItem.setXRange(-URANGE,URANGE, padding=0)
self.specAxis = self.specItem.getAxis("bottom")
self.specAxis.setLabel("Frequency [Hz]")
self.lay.addWidget(self.specWid)
self.mainWindow.show()
self.app.aboutToQuit.connect(self.close)
def close(self):
self.sdstream.stop()
sys.exit()
def get_spectrum(self, data):
T = 1.0/RATE
N = data.shape[0]
f = np.fft.fftfreq(N,T)
f = np.fft.fftshift(f)
w = np.blackman(N)
Pxx = np.fft.fft(data*w)
Pxx = np.fft.fftshift(Pxx)
Pxx = 2/N*np.abs(Pxx)
return f, Pxx
def create_spectrogram(self,ts,NFFT,noverlap = None):
'''
ts: original time series
NFFT: The number of data points used in each block for the DFT.
Fs: the number of points sampled per second, so called sample_rate
noverlap: The number of points of overlap between blocks. The default
value is NFFT/2.
'''
if noverlap is None:
noverlap = NFFT/2
noverlap = int(noverlap)
starts = np.arange(0,len(ts),NFFT-noverlap,dtype=int)
# remove any window with less than NFFT sample size
starts = starts[starts + NFFT < len(ts)]
xns = []
for start in starts:
# short term discrete fourier transform
#ts_window = get_xns(ts[start:start + NFFT])
f, Pxx = self.get_spectrum(ts[start:start + NFFT])
#xns.append(ts_window)
#stack the new readings in upper half array and transpose to horizontal
N=len(Pxx)
Pxx = 2/N*np.abs(Pxx[N//2:N-1])
xns.append(Pxx)
specX = np.array(xns).T
# rescale the absolute value of the spectrogram as rescaling is standard
spec = 20*np.log10(specX)
assert spec.shape[1] == len(starts)
return(starts,spec)
def plot_spectrogram(self,spec,ks,sample_rate, L, starts,tslen, mappable = None):
plt.figure(figsize=(7.5,3))
rlow = int(L*LRANGE/sample_rate)
rhigh = int(L*URANGE/sample_rate)
specshow = spec[rlow:rhigh,]
plt_spec = plt.imshow(specshow,origin='lower', cmap="twilight_r")
## create ylim
Nyticks = 10
V=int(specshow.shape[0])
ks = np.linspace(0,V,Nyticks)
ksHz = self.get_Hz_scale_vec(ks,sample_rate,V*2)
plt.yticks(ks,ksHz)
plt.ylabel("Frequency (Hz)")
## create xlim
Nxticks = 10
ts_spec = np.linspace(0,spec.shape[1],Nxticks)
total_ts_sec=int(tslen/RATE)
ts_spec_sec = ["{:4.2f}".format(i) for i in np.linspace(0,total_ts_sec*starts[-1]/tslen,Nxticks)]
plt.xticks(ts_spec,ts_spec_sec)
plt.xlabel("Time (sec)")
plt.title("Spectrogram L={} Spectrogram.shape={}".format(L,spec.shape))
#plt.colorbar(mappable,use_gridspec=True)
plt.show()
return(plt_spec)
def get_Hz_scale_vec(self,ks,sample_rate,Npoints):
maxrange=sample_rate/2
freq_Hz = ks*sample_rate/Npoints*(URANGE-LRANGE)/maxrange+LRANGE
freq_Hz = [int(i) for i in freq_Hz ]
return(freq_Hz )
# def alt_spectrogram(self,ts,sample_rate):
# dt = 1/sample_rate
# t = np.arange(0.0, COLLECTSEC, dt)
# NFFT = TRACK # the length of the windowing segments
# Fs = int(1.0 / dt) # the sampling frequency
# fig, (ax1, ax2) = plt.subplots(nrows=2)
# ax1.plot(t, ts)
# Pxx, freqs, bins, im = ax2.specgram(ts, NFFT=NFFT, Fs=Fs, noverlap=OVERLAP)
# return
def mainLoop(self):
ts=[]
#with self.stream:
while 1:
# Sometimes Input overflowed because of mouse events, ignore this
while (len(ts)<(RATE*COLLECTSEC)):
try:
#data = self.readData()
sddata=self.sdreadData()
data=sddata.reshape(-1)
#print(data)
except IOError:
continue
except Exception as e:
print("Exception:",e)
self.close()
try:
f, Pxx = self.get_spectrum(data)
self.specItem.plot(x=f,y=Pxx, clear=True)
QtGui.QApplication.processEvents()
ts=np.concatenate((ts,data))
except Exception as e:
print("exception = ",e)
print(len(data))
print(len(Pxx)," Pxx shape= ",Pxx.shape)
print("ts len",len(ts))
self.close()
break
L =TRACK
noverlap = OVERLAP
Nxlim=10
sample_rate=RATE
ks = np.linspace(0,len(Pxx),Nxlim)
starts, spec = self.create_spectrogram(ts,L,noverlap = noverlap )
tslen=len(ts)
self.plot_spectrogram(spec,ks,sample_rate,L, starts,tslen)
#self.alt_spectrogram(ts,sample_rate)
self.close()
if __name__ == '__main__':
sa = SpectrumAnalyzer()
sa.mainLoop()
sa.close()
|
from karel.kareldefinitions import *
class Karel():
def __init__(self, world):
self._world = world
self._avenue, self._street = self._world.karel_starting_location
self._direction = self._world.karel_starting_direction
self._num_beepers = self._world.karel_starting_beeper_count
@property
def avenue(self):
return self._avenue
@avenue.setter
def avenue(self, val):
self._avenue = val
@property
def street(self):
return self._street
@street.setter
def street(self, val):
self._street = val
@property
def direction(self):
return self._direction
@direction.setter
def direction(self, val):
self._direction = val
@property
def num_beepers(self):
return self._num_beepers
@num_beepers.setter
def num_beepers(self, val):
self._num_beepers = val
def reset_state(self):
self._avenue, self._street = self._world.karel_starting_location
self._direction = self._world.karel_starting_direction
self._num_beepers = self._world.karel_starting_beeper_count
def move(self):
if not self.front_is_clear():
raise KarelException(self._avenue, self._street, self._direction,
"Karel attempted to move, but its front was blocked.")
delta_avenue, delta_street = DIRECTION_DELTA_MAP[self._direction]
self._avenue += delta_avenue
self._street += delta_street
def turn_left(self):
self._direction = NEXT_DIRECTION_MAP[self._direction]
def put_beeper(self):
if self._num_beepers == 0:
raise KarelException(self._avenue, self._street, self._direction,
"Karel attempted to put a beeper, but it had none left in its bag.")
self._num_beepers -= 1
self._world.add_beeper(self._avenue, self._street)
def pick_beeper(self):
if not self.on_beeper():
raise KarelException(self._avenue, self._street, self._direction,
"Karel attempted to pick up a beeper, but there were none on the current corner.")
self._num_beepers += 1
self._world.remove_beeper(self._avenue, self._street)
def front_is_clear(self):
return self.direction_is_clear(self._direction)
def direction_is_clear(self, direction):
delta_avenue, delta_street = DIRECTION_DELTA_MAP[direction]
next_avenue = self._avenue + delta_avenue
next_street = self._street + delta_street
opposite_direction = NEXT_DIRECTION_MAP[NEXT_DIRECTION_MAP[direction]]
# front is not clear if we are about to go out of bounds
if not self._world.in_bounds(next_avenue, next_street):
return False
# front is not clear if wall exists in same direction of where we're currently facing
if self._world.wall_exists(self._avenue, self._street, direction):
return False
# must also check for alternate possible representation of wall
if self._world.wall_exists(next_avenue, next_street, opposite_direction):
return False
# If all previous conditions checked out, then the front is clear
return True
def front_is_blocked(self):
return not self.front_is_clear()
def left_is_clear(self):
return self.direction_is_clear(NEXT_DIRECTION_MAP[self._direction])
def left_is_blocked(self):
return not self.left_is_clear()
def right_is_clear(self):
return self.direction_is_clear(NEXT_DIRECTION_MAP_RIGHT[self._direction])
def right_is_blocked(self):
return not self.right_is_clear()
def on_beeper(self):
return self._world.beepers[(self.avenue, self.street)] != 0
def beepers_in_bag(self):
return self._num_beepers > 0
def facing_north(self):
return self.direction == Direction.NORTH
def facing_east(self):
return self.direction == Direction.EAST
def facing_west(self):
return self.direction == Direction.WEST
def facing_south(self):
return self.direction == Direction.SOUTH
def paint_corner(self, color):
if color not in COLOR_MAP.values():
raise KarelException(self._avenue, self._street, self._direction,
f"Karel attempted to paint the corner with color {color}, which is not valid.")
self._world.paint_corner(self.avenue, self.street, color)
def corner_color_is(self, color):
return self._world.corner_color(self.avenue, self.street) == color
|
'''
@Author: Zhang Ruihan
@Date: 2019-10-28 01:01:52
@LastEditors : Zhang Ruihan
@LastEditTime : 2020-01-07 01:33:49
@Description: file content
'''
import numpy as np
import os
import torch
from torch.utils.data import TensorDataset,DataLoader
class ModelWrapper:
def __init__(self,model,batch_size = 128):
self.model = model
self.batch_size = batch_size
def get_feature(self,x,layer_name):
'''
get feature map from a given layer
'''
pass
def feature_predict(self,feature,layer_name = None):
'''
prediction from given feature maps
'''
pass
def predict(self,x):
'''
provide prediction from given feature
'''
pass
class PytorchModelWrapper(ModelWrapper):
def __init__(self,
model,
layer_dict = {},
predict_target = None,
input_channel_first = False, # True if input image is channel first
model_channel_first = True, #True if model use channel first
#switch_channel = None, #"f_to_l" or "l_to_f" if switch channel is required from loader to model
numpy_out = True,
input_size = [3,224,224], #model's input size
batch_size=128):#target: (layer_name,unit_nums)
super().__init__(model,batch_size)
self.layer_dict = layer_dict
self.layer_dict.update(dict(model.named_children()))
self.predict_target = predict_target
self.input_channel = 'f' if input_channel_first else 'l'
self.model_channel = 'f' if model_channel_first else 'l'
self.numpy_out = numpy_out
self.input_size = list(input_size)
self.non_negative = False
self.CUDA = torch.cuda.is_available()
def _to_tensor(self,x):
if type(x) == np.ndarray:
x = torch.from_numpy(x)
x = torch.clone(x)
if x.ndim == 3:
x = x.unsqueeze(0)
return x
def _switch_channel_f_to_l(self,x): #transform from channel first to channel last
if x.ndim == 3:
x = x.permute(1,2,0)
if x.ndim == 4:
x = x.permute(0,2,3,1)
return x
def _switch_channel_l_to_f(self,x): #transform from channel last to channel first
if x.ndim == 3:
x = x.permute(2,0,1)
if x.ndim == 4:
x = x.permute(0,3,1,2)
return x
def _switch_channel(self,x,layer_in='input',layer_out='output',to_model=True):
c_from = None
c_to = None
if to_model:
c_from = self.input_channel if layer_in == 'input' else 'l'
c_to = self.model_channel
else:
c_from = self.model_channel
c_to = 'l'
#print (x.shape,c_from,c_to,layer_in,layer_out,to_model)
if c_from == 'f' and c_to == 'l':
x = self._switch_channel_f_to_l(x)
if c_from == 'l' and c_to == 'f':
x = self._switch_channel_l_to_f(x)
return x
def _fun(self,x,layer_in = "input",layer_out = "output"):
#tensor cpu in cpu out
x = x.type(torch.FloatTensor)
in_flag = False
if layer_in == "input":
in_flag = True
data_in = x.clone()
if self.CUDA:
data_in = data_in.cuda()
data_out = []
handles = []
def hook_in(m,i,o):
return data_in
def hook_out(m,i,o):
data_out.append(o)
if layer_in == "input":
nx = x
else:
handles.append(self.layer_dict[layer_in].register_forward_hook(hook_in))
nx = torch.zeros([x.size()[0]]+self.input_size)
if not layer_out == "output":
handles.append(self.layer_dict[layer_out].register_forward_hook(hook_out))
if self.CUDA:
nx = nx.cuda()
with torch.no_grad():
ny = self.model(nx)
#print(data_out)
if layer_out == "output":
data_out = ny
else:
data_out = data_out[0]
data_out = data_out.cpu()
for handle in handles:
handle.remove()
if self.non_negative:
data_out = torch.relu(data_out)
return data_out
def _batch_fn(self,x,layer_in = "input",layer_out = "output"):
#numpy in numpy out
if type(x) == torch.Tensor or type(x) == np.ndarray:
x = self._to_tensor(x)
dataset = TensorDataset(x)
x = DataLoader(dataset,batch_size=self.batch_size)
out = []
for nx in x:
nx = nx[0]
nx = self._switch_channel(nx,layer_in=layer_in,layer_out=layer_out,to_model=True)
out.append(self._fun(nx,layer_in,layer_out))
res = torch.cat(out,0)
res = self._switch_channel(res,layer_in=layer_in,layer_out=layer_out,to_model=False)
if self.numpy_out:
res = res.detach().numpy()
return res
def set_predict_target(self,predict_target):
self.predict_target = predict_target
def get_feature(self,x,layer_name):
if layer_name not in self.layer_dict:
print ("Target layer not exists")
return None
out = self._batch_fn(x,layer_out = layer_name)
return out
def feature_predict(self,feature,layer_name = None):
if layer_name not in self.layer_dict:
print ("Target layer not exists")
return None
out = self._batch_fn(feature,layer_in = layer_name)
if self.predict_target is not None:
out = out[:,self.predict_target]
return out
def predict(self,x):
out = self._batch_fn(x)
if self.predict_target is not None:
out = out[:,self.predict_target]
return out
|
##############################################################################
#
# Unit tests for beamsplitter operations
# Convention: The beamsplitter operation transforms
# \hat{a} -> t \hat{a} + r \hat{b}
# \hat{b} -> - r^* \hat{a} + t^* \hat{b}
# where \hat{a}, \hat{b} are the photon creation operators of the two modes
# Equivalently, we have t:=\cos(\theta) (t assumed real) and r:=\exp{i\phi}\sin(\theta)
#
##############################################################################
import unittest
import os, sys
sys.path.append(os.getcwd())
import numpy as np
from scipy.special import factorial
from defaults import BaseTest, FockBaseTest
phase_alphas = np.linspace(0, 2 * np.pi, 3, endpoint=False) + np.pi / 13
t_values = np.linspace(0., 1., 3)
phase_r = np.linspace(0, 2 * np.pi, 3, endpoint=False)
###################################################################
class BasicTests(BaseTest):
"""Basic implementation-independent tests."""
num_subsystems = 2
def setUp(self):
super().setUp()
self.mag_alphas = np.linspace(0., self.args.alpha, 3)
def test_vacuum_beamsplitter(self):
"""Tests beamsplitter operation in some limiting cases where the output
should be the vacuum in both modes."""
for t in t_values:
for r_phi in phase_r:
r = np.exp(1j * r_phi) * np.sqrt(1. - np.abs(t) ** 2)
self.circuit.reset(pure=self.kwargs['pure'])
self.circuit.beamsplitter(t, r, 0, 1)
self.assertAllTrue(self.circuit.is_vacuum(self.tol),msg="Test failed for t={}, r_phi={}.".format(t, r))
def test_coherent_vacuum_interfered(self):
"""Tests if a range of beamsplitter output states (formed from a coherent state interfering with vacuum)
have the correct fidelity with the expected coherent states outputs.
|\psi_in> = |\alpha>|0> --> |t \alpha>|r \alpha> = |\psi_out>
and for each output mode,
|\gamma> = exp(-0.5 |\gamma|^2) \sum_n \gamma^n / \sqrt{n!} |n>"""
phase_alpha = np.pi / 5
for mag_alpha in self.mag_alphas[1:]:
alpha = mag_alpha * np.exp(1j * phase_alpha)
for t in t_values:
for r_phi in phase_r:
r = np.exp(1j * r_phi) * np.sqrt(1. - np.abs(t) ** 2)
self.circuit.reset(pure=self.kwargs['pure'])
self.circuit.displacement(alpha, 0)
self.circuit.beamsplitter(t, r, 0, 1)
alpha_outA = t * alpha
alpha_outB = r * alpha
state = self.circuit.state()
fidel = state.fidelity_coherent([alpha_outA, alpha_outB])
self.assertAllAlmostEqual(fidel, 1, delta=self.tol, msg="Test failed for t={}, r_phi={}.".format(t, r))
class FockBasisTests(FockBaseTest):
"""Tests for simulators that use Fock basis."""
num_subsystems = 2
def setUp(self):
super().setUp()
self.mag_alphas = np.linspace(0., self.args.alpha, 3)
def test_normalized_beamsplitter_output(self):
"""Tests if a range of beamsplitter outputs states are normalized."""
alpha = self.args.alpha * np.exp(1j * np.pi / 3)
for t in t_values:
for r_phi in phase_r:
r = np.exp(1j * r_phi) * np.sqrt(1. - np.abs(t) ** 2)
self.circuit.reset(pure=self.kwargs['pure'])
self.circuit.displacement(alpha, 1)
self.circuit.beamsplitter(t, r, 0, 1)
state = self.circuit.state()
tr = state.trace()
self.assertAllAlmostEqual(tr, 1, delta=self.tol, msg="Test failed for t={}, r_phi={}.".format(t, r))
def test_coherent_vacuum_interfered_fock_elements(self):
"""Tests if a range of beamsplitter output states (formed from a coherent state interfering with vacuum)
have the correct Fock basis elements.
|\psi_in> = |\alpha>|0> --> |t \alpha>|r \alpha> = |\psi_out>
and for each output mode,
|\gamma> = exp(-0.5 |\gamma|^2) \sum_n \gamma^n / \sqrt{n!} |n>"""
phase_alpha = np.pi / 5
for mag_alpha in self.mag_alphas[1:]:
alpha = mag_alpha * np.exp(1j * phase_alpha)
for t in t_values:
for r_phi in phase_r:
r = np.exp(1j * r_phi) * np.sqrt(1. - np.abs(t) ** 2)
self.circuit.reset(pure=self.kwargs['pure'])
self.circuit.displacement(alpha, 0)
self.circuit.beamsplitter(t, r, 0, 1)
state = self.circuit.state()
if state.is_pure:
numer_state = state.ket()
else:
numer_state = state.dm()
alpha_outA = t * alpha
alpha_outB = r * alpha
ref_stateA = np.array([np.exp(-0.5 * np.abs(alpha_outA) ** 2) * alpha_outA ** n / np.sqrt(factorial(n)) for n in range(self.D)])
ref_stateB = np.array([np.exp(-0.5 * np.abs(alpha_outB) ** 2) * alpha_outB ** n / np.sqrt(factorial(n)) for n in range(self.D)])
ref_state = np.einsum('i,j->ij',ref_stateA, ref_stateB)
if not self.kwargs['pure']:
ref_state = np.einsum('i,j,k,l->ijkl', ref_stateA, np.conj(ref_stateA), ref_stateB, np.conj(ref_stateB))
self.assertAllAlmostEqual(numer_state, ref_state, delta=self.tol, msg="Test failed for t={}, r_phi={}.".format(t, r))
if __name__=="__main__":
# run the tests in this file
suite = unittest.TestSuite()
for t in (BasicTests, FockBasisTests):
ttt = unittest.TestLoader().loadTestsFromTestCase(t)
suite.addTests(ttt)
unittest.TextTestRunner().run(suite)
|
#
# PySNMP MIB module A3Com-IPXpolicy-r3-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/A3COM-IPXPOLICY-R3-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:08:05 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint")
MacAddress, = mibBuilder.importSymbols("RFC1286-MIB", "MacAddress")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter64, MibIdentifier, iso, TimeTicks, ModuleIdentity, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, enterprises, ObjectIdentity, Unsigned32, IpAddress, Gauge32, NotificationType, Bits, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "MibIdentifier", "iso", "TimeTicks", "ModuleIdentity", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "enterprises", "ObjectIdentity", "Unsigned32", "IpAddress", "Gauge32", "NotificationType", "Bits", "Counter32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
a3Com = MibIdentifier((1, 3, 6, 1, 4, 1, 43))
brouterMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 2))
a3ComIPXpol = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 2, 11))
a3ipxPolNbr = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 2, 11, 5))
class RowStatus(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))
namedValues = NamedValues(("active", 1), ("notInService", 2), ("notReady", 3), ("createAndGo", 4), ("createAndWait", 5), ("destroy", 6))
class IPXNET(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(4, 4)
fixedLength = 4
a3ipxPolControlTable = MibTable((1, 3, 6, 1, 4, 1, 43, 2, 11, 1), )
if mibBuilder.loadTexts: a3ipxPolControlTable.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolControlTable.setDescription('This table enables and disables the use of policies, policy attributes and the use of trusted neighbors and their attributes.')
a3ipxPolControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1), ).setIndexNames((0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPCPort"))
if mibBuilder.loadTexts: a3ipxPolControlEntry.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolControlEntry.setDescription('A single entry in the Policy Control Table')
a3ipxPCPort = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPCPort.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPCPort.setDescription('The port to which this Policy Control entry applies.')
a3ipxPCRteAdvCtl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCRteAdvCtl.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPCRteAdvCtl.setDescription("The object enables or disables the use of the 'advertisable' entries in the Route Policy Table.")
a3ipxPCRteAdvType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normal", 1), ("inverse", 2))).clone('normal')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCRteAdvType.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPCRteAdvType.setDescription("The object specifies whether only those routes listed as 'advertisable' in the Route Policy Table are advertised from this port (normal), or if only those routes that are not listed as 'advertisable' in the Route Policy Table are advertised (inverse).")
a3ipxPCRteRcvCtl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCRteRcvCtl.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPCRteRcvCtl.setDescription("The object enables or disables the use of the 'receivable' entries in the Route Policy Table.")
a3ipxPCRteRcvType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normal", 1), ("inverse", 2))).clone('normal')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCRteRcvType.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPCRteRcvType.setDescription("The object specifies whether only those routes listed as 'receivable' in the Route Policy Table are accepted from this port (normal), or if only those routes that are not listed as 'receivable' in the Route Policy Table are accepted (inverse).")
a3ipxPCSvcAdvCtl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCSvcAdvCtl.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPCSvcAdvCtl.setDescription("The object enables or disables the use of the 'advertisable' entries in the Service Policy Table.")
a3ipxPCSvcAdvType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normal", 1), ("inverse", 2))).clone('normal')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCSvcAdvType.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPCSvcAdvType.setDescription("The object specifies whether only those routes listed as 'advertisable' in the Service Policy Table are advertised from this port (normal), or if only those routes that are not listed as 'advertisable' in the Service Policy Table are advertised (inverse).")
a3ipxPCSvcRcvCtl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCSvcRcvCtl.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPCSvcRcvCtl.setDescription("The object enables or disables the use of the 'receivable' entries in the Service Policy Table.")
a3ipxPCSvcRcvType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normal", 1), ("inverse", 2))).clone('normal')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCSvcRcvType.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPCSvcRcvType.setDescription("The object specifies whether only those routes listed as 'receivable' in the Service Policy Table are accepted from this port (normal), or if only those routes that are not listed as 'receivable' in the Service Policy Table are accepted (inverse).")
a3ipxPCNbrAdvCtl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCNbrAdvCtl.setStatus('deprecated')
if mibBuilder.loadTexts: a3ipxPCNbrAdvCtl.setDescription('This object enables or disables the use of the entries in the Neighbor Policy Table that apply to neighbors to which routing and service reachability information is advertised. In other words, this object enables or disables the use of those entries in the Neighbor Policy Table for which a3ipxPolNbrType = advertise(2) or both(3). NOTE: this object is not supported by NETBuilders running sw version 8.0 and greater. Its functionality has been replaced by a3ipxRipPolNbrCtlTable and a3ipxSapPolNbrCtlTable.')
a3ipxPCNbrRcvCtl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCNbrRcvCtl.setStatus('deprecated')
if mibBuilder.loadTexts: a3ipxPCNbrRcvCtl.setDescription('The object enables or disables the use of the entries in the Neighbor Policy Table that apply to neighbors from which routing and service reachability information is allowed to be received. In other words, this object enables or disables the use of those entries in the Neighbor Policy Table for which a3ipxPolNbrType = receive(2) or both(3). NOTE: this object is not supported by NETBuilders running sw version 8.0 and greater. Its functionality has been replaced by a3ipxRipPolNbrCtlTable and a3ipxSapPolNbrCtlTable.')
a3ipxPCNbrRcvType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normal", 1), ("inverse", 2))).clone('normal')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCNbrRcvType.setStatus('deprecated')
if mibBuilder.loadTexts: a3ipxPCNbrRcvType.setDescription('If this object is set to normal(1), then route and service reachability information is accepted only from those neighbors listed in the Neighbor Policy Table for which a3ipxPolNbrType = receive(2) or both(3). If this object is set to inverse(2), then route and service reachability information is accepted from any neighbor except those neighbors listed in the Neighbor Policy Table for which a3ipxPolNbrType = receive(2) or both(3). NOTE: this object is not supported by NETBuilders running sw version 8.0 and greater. Its functionality has been replaced by a3ipxRipPolNbrCtlTable and a3ipxSapPolNbrCtlTable.')
a3ipxPCPolicyOverride = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("policyOverride", 1), ("noPolicyOverride", 2))).clone('noPolicyOverride')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCPolicyOverride.setStatus('deprecated')
if mibBuilder.loadTexts: a3ipxPCPolicyOverride.setDescription('Policy Override is used to override the configured policies when the router issues responses to specific RIP or SAP requests. The configured policies are always applied for regular RIP or SAP updates. NOTE: this object is no longer support by NETBuilders running sw version 8.0 and greater. Its functionality has been replaced by a3ipxPCRipPolOverride and a3ipxPCSapPolOverride.')
a3ipxPCSvrRplyCtl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("bestSvrReply", 1), ("noBestSvrReply", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCSvrRplyCtl.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPCSvrRplyCtl.setDescription("This controls whether or not the router is permitted to respond to 'get nearest server' requests. If this object has the value bestSvrReply(1), then this router is permitted to respond to these requests. If this object has the value noBestSvrReply(2), this this router is not permitted to respond to these requests.")
a3ipxPCRipPolOverride = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("policyOverride", 1), ("noPolicyOverride", 2))).clone('noPolicyOverride')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCRipPolOverride.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPCRipPolOverride.setDescription('RIP Policy Override is used to override the configured RIP policies when the router issues responses to specific RIP requests. The configured policies are always applied for regular RIP updates.')
a3ipxPCSapPolOverride = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("policyOverride", 1), ("noPolicyOverride", 2))).clone('noPolicyOverride')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPCSapPolOverride.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPCSapPolOverride.setDescription('SAP Policy Override is used to override the configured policies when the router issues responses to specific SAP requests. The configured policies are always applied for regular SAP updates.')
a3ipxPolRteTable = MibTable((1, 3, 6, 1, 4, 1, 43, 2, 11, 2), )
if mibBuilder.loadTexts: a3ipxPolRteTable.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRteTable.setDescription("The IPX Route Policy Table controls the acceptance of route reachability information from neighbors and the advertisement of route reachability information to neighbors. If a3ipxPCRteRcvType is configured as normal, a route is accepted if the network number for the route is in the list of 'receivable' networks specified in this table. If a3ipxPCRteRcvType is configure as inverse, a route is not accepted if the network number for the route is in the list of 'receivable' networks specified in this table. If a3ipxPCRteAdvType is configured as normal, a route is advertised if the network number for the route is in the list of 'advertisable' networks specified in this table. If a3ipxPCRteAdvType is configure as inverse, a route is not advertised if the network number for the route is in the list of 'advertisable' networks specified in this table. Note, since this table allows ranges of routes to be specified, all the entries should be examined when determining if a specific route is affected by these policies. It is not sufficient to search for an instance of a3ipxPolRteNet1 that matches the route in question.")
a3ipxPolRteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 2, 11, 2, 1), ).setIndexNames((0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolRtePort"), (0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolRteNet1"), (0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolRteNet2"))
if mibBuilder.loadTexts: a3ipxPolRteEntry.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRteEntry.setDescription('A single entry in the Route Policy table')
a3ipxPolRtePort = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolRtePort.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRtePort.setDescription('The IPX port to which this Receive Route Policy entry applies.')
a3ipxPolRteType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("advertise", 1), ("receive", 2), ("both", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolRteType.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRteType.setDescription('This object specifies whether this entry applies to receiving routes, advertising routes, or both.')
a3ipxPolRteNet1 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 2, 1, 3), IPXNET()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolRteNet1.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRteNet1.setDescription('The IPX Network Number for which route information is accepted or rejected for this port. The value 0xfffffffe is used to represent the default route.')
a3ipxPolRteNet2 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 2, 1, 4), IPXNET()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolRteNet2.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRteNet2.setDescription('When the value of this object is a valid IPX network and different from a3ipxPolRteNet1, then this table entries specifies a range of IPX networks, with the value of a3ipxPolRteNet1 identifying the lower end of the range. If the value of this object is the same as a3ipxPolRteNet1, then this table entry specifies a single IPX network identified by the value of a3ipxPolRteNet1.')
a3ipxPolRteStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 2, 1, 5), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolRteStatus.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRteStatus.setDescription('This object is used to add and delete entries in this table. See the notes describing RowStatus at the beginning of this MIB.')
a3ipxPolSvcTable = MibTable((1, 3, 6, 1, 4, 1, 43, 2, 11, 3), )
if mibBuilder.loadTexts: a3ipxPolSvcTable.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSvcTable.setDescription("The IPX Policy Services Table controls the acceptance of service reachability information from neighbors and the advertisement of that information to neighbors. If a3ipxPCSvcRcvType is configured as normal, a service is accepted if its port number, server name, and service type is in the list of tuples specified as 'receivable' by this table. If a3ipxPCSvcRcvType is configured as inverse, a service is not accepted if its port number, server name, and service type is in the list of tuples specified as 'receivable' by this table. If a3ipxPCSvcAdvType is configured as normal, a service is advertised if its network port, server name, and service type is in the list of tuples specified as 'advertisable' by this table. If a3ipxPCSvcAdvType is configured as inverse, a service is not advertised if its port number, server name, and service type is in the list of tuples specified as 'advertisable' by this table.")
a3ipxPolSvcEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 2, 11, 3, 1), ).setIndexNames((0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolSvcPort"), (0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolSvcSvrName"), (0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolSvcType"))
if mibBuilder.loadTexts: a3ipxPolSvcEntry.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSvcEntry.setDescription('A single entry in the Receive Services Policy table.')
a3ipxPolSvcPort = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolSvcPort.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSvcPort.setDescription('The IPX port to which this Receive Svc Pol entry applies.')
a3ipxPolSvcEntryType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("advertise", 1), ("receive", 2), ("both", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolSvcEntryType.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSvcEntryType.setDescription('This object specifies whether this entry applies to receiving services, advertising services, or both.')
a3ipxPolSvcSvrName = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 3, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 15))).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolSvcSvrName.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSvcSvrName.setDescription('This object contains the name of a server. A single wildcard (*) is allowed at the end of the name.')
a3ipxPolSvcType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 3, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolSvcType.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSvcType.setDescription("The type of service for which information is accepted or rejected by this port. The value 'FFFF' acts as a wildcard for all service types.")
a3ipxPolSvcStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 3, 1, 5), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolSvcStatus.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSvcStatus.setDescription('This object is used to add and delete entries in this table. See the notes describing RowStatus at the beginning of this MIB.')
a3ipxPolNbrTable = MibTable((1, 3, 6, 1, 4, 1, 43, 2, 11, 4), )
if mibBuilder.loadTexts: a3ipxPolNbrTable.setStatus('deprecated')
if mibBuilder.loadTexts: a3ipxPolNbrTable.setDescription('This table defines a set of neighbors to which the route and service reachability information is advertised. This table also defines a set of neighbors from which route and service reachability information is accepted. NOTE: this table is not supported by NETBuilders running sw version 8.0 and greater. The functionality provided by this table has been replaced by a3ipxPolRipNbrTable and a3ipxPolSapNbrTable.')
a3ipxPolNbrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 2, 11, 4, 1), ).setIndexNames((0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolNbrPort"), (0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolNbrNet"), (0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolNbrAddress"))
if mibBuilder.loadTexts: a3ipxPolNbrEntry.setStatus('deprecated')
if mibBuilder.loadTexts: a3ipxPolNbrEntry.setDescription('A single entry in the Policy Neighbor Table')
a3ipxPolNbrPort = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolNbrPort.setStatus('deprecated')
if mibBuilder.loadTexts: a3ipxPolNbrPort.setDescription('The IPX port over which the Trusted Neighbor identified by this table entry can be reached.')
a3ipxPolNbrType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("advertise", 1), ("receive", 2), ("both", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolNbrType.setStatus('deprecated')
if mibBuilder.loadTexts: a3ipxPolNbrType.setDescription('This object specifies whether this entry applies to receiving information, advertising information, or both.')
a3ipxPolNbrNet = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 4, 1, 3), IPXNET()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolNbrNet.setStatus('deprecated')
if mibBuilder.loadTexts: a3ipxPolNbrNet.setDescription('The IPX Network number identifying the network to which the neighbor identified by this table entry is attached.')
a3ipxPolNbrAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 4, 1, 4), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolNbrAddress.setStatus('deprecated')
if mibBuilder.loadTexts: a3ipxPolNbrAddress.setDescription('The Mac Address of the Neighbor to whom route and service reachability information is to be advertised and from which route and service reachability information is accepted.')
a3ipxPolNbrStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 4, 1, 5), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolNbrStatus.setStatus('deprecated')
if mibBuilder.loadTexts: a3ipxPolNbrStatus.setDescription('This object is used to add and delete entries in this table. See the notes describing RowStatus at the beginning of this MIB.')
a3ipxPolRipNbrCtlTable = MibTable((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 1), )
if mibBuilder.loadTexts: a3ipxPolRipNbrCtlTable.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrCtlTable.setDescription('This table enables or disables the use of the entries in a3ipxPolRipNbrTable. If the use of the entries in that table is enabled, this table also controls how they are interpreted. In one case, RIP updates are accepted only from those IPX Neighbors listed in a3ipxPolRipNbrTable. In the other case, RIP updates are accepted only from those IPX Neighbors not listed in a3ipxPolRipNbrTable.')
a3ipxPolRipNbrCtlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 1, 1), ).setIndexNames((0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolRipNbrCtlPort"))
if mibBuilder.loadTexts: a3ipxPolRipNbrCtlEntry.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrCtlEntry.setDescription('Each entry in this table applies to a separate NETBuilder port and controls those entries in a3ipxPolRipNbrTable that correspond to the same port.')
a3ipxPolRipNbrCtlPort = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolRipNbrCtlPort.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrCtlPort.setDescription('This NETBuilder port to which this RIP Neighbor Control entry applies.')
a3ipxPolRipNbrCtlAdvCtl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolRipNbrCtlAdvCtl.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrCtlAdvCtl.setDescription('This object enables or disables the use of the entries in the RIP Neighbor Policy Table (a3ipxPolRipNbrTable) that apply to neighbors to which routing information is advertised. In other words, this object enables or disables the use of those entries in the Neighbor Policy Table for which a3ipxPolRipNbrType = advertise(2) or both(3).')
a3ipxPolRipNbrCtlRcvCtl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolRipNbrCtlRcvCtl.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrCtlRcvCtl.setDescription('The object enables or disables the use of the entries in the Neighbor Policy Table that apply to neighbors from which routing information is allowed to be received. In other words, this object enables or disables the use of those entries in the Neighbor Policy Table for which a3ipxPolRipNbrType = receive(2) or both(3).')
a3ipxPolRipNbrCtlRcvType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normal", 1), ("inverse", 2))).clone('normal')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolRipNbrCtlRcvType.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrCtlRcvType.setDescription('If this object is set to normal(1), then route information is accepted only from those neighbors listed in the Neighbor Policy Table for which a3ipxPolRipNbrType = receive(2) or both(3). If this object is set to inverse(2), then route information is accepted from any neighbor except those neighbors listed in the Neighbor Policy Table for which a3ipxPolRipNbrType = receive(2) or both(3).')
a3ipxPolRipNbrTable = MibTable((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 2), )
if mibBuilder.loadTexts: a3ipxPolRipNbrTable.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrTable.setDescription('This table defines a set of neighbors to which the route information is advertised. This table also defines a set of neighbors from which route information is accepted.')
a3ipxPolRipNbrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 2, 1), ).setIndexNames((0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolRipNbrPort"), (0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolRipNbrNet"), (0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolRipNbrAddress"))
if mibBuilder.loadTexts: a3ipxPolRipNbrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrEntry.setDescription('A single entry in the Policy Neighbor Table')
a3ipxPolRipNbrPort = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolRipNbrPort.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrPort.setDescription('The IPX port over which the Trusted Neighbor identified by this table entry can be reached.')
a3ipxPolRipNbrType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("advertise", 1), ("receive", 2), ("both", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolRipNbrType.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrType.setDescription('This object specifies whether this entry applies to receiving information, advertising information, or both.')
a3ipxPolRipNbrNet = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 2, 1, 3), IPXNET()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolRipNbrNet.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrNet.setDescription('The IPX Network number identifying the network to which the neighbor identified by this table entry is attached.')
a3ipxPolRipNbrAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 2, 1, 4), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolRipNbrAddress.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrAddress.setDescription('The Mac Address of the Neighbor to whom route information is to be advertised and from which route information is accepted.')
a3ipxPolRipNbrStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 2, 1, 5), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolRipNbrStatus.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolRipNbrStatus.setDescription('This object is used to add and delete entries in this table. See the notes describing RowStatus at the beginning of this MIB.')
a3ipxPolSapNbrCtlTable = MibTable((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 3), )
if mibBuilder.loadTexts: a3ipxPolSapNbrCtlTable.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrCtlTable.setDescription('This table enables or disables the use of the entries in a3ipxPolSapNbrTable. If the use of the entries in that table is enabled, this table also controls how they are interpreted. In one case, SAP updates are accepted only from those IPX Neighbors listed in a3ipxPolSapNbrTable. In the case, SAP updates are accepted only from those IPX Neighbors not listed in a3ipxPolSapNbrTable.')
a3ipxPolSapNbrCtlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 3, 1), ).setIndexNames((0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolSapNbrCtlPort"))
if mibBuilder.loadTexts: a3ipxPolSapNbrCtlEntry.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrCtlEntry.setDescription('Each entry in this table applies to a separate NETBuilder port and controls those entries in a3ipxPolSapNbrTable that correspond to the same port.')
a3ipxPolSapNbrCtlPort = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolSapNbrCtlPort.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrCtlPort.setDescription('This NETBuilder port to which this SAP Neighbor Control entry applies.')
a3ipxPolSapNbrCtlAdvCtl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolSapNbrCtlAdvCtl.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrCtlAdvCtl.setDescription('This object enables or disables the use of the entries in the SAP Neighbor Policy Table (a3ipxPolSapNbrTable) that apply to neighbors to which service reachability information is advertised. In other words, this object enables or disables the use of those entries in the Neighbor Policy Table for which a3ipxPolSapNbrType = advertise(2) or both(3).')
a3ipxPolSapNbrCtlRcvCtl = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolSapNbrCtlRcvCtl.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrCtlRcvCtl.setDescription('The object enables or disables the use of the entries in the Neighbor Policy Table that apply to neighbors from which service reachability information is allowed to be received. In other words, this object enables or disables the use of those entries in the Neighbor Policy Table for which a3ipxPolSapNbrType = receive(2) or both(3).')
a3ipxPolSapNbrCtlRcvType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normal", 1), ("inverse", 2))).clone('normal')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolSapNbrCtlRcvType.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrCtlRcvType.setDescription('If this object is set to normal(1), then service reachability information is accepted only from those neighbors listed in the Neighbor Policy Table for which a3ipxPolSapNbrType = receive(2) or both(3). If this object is set to inverse(2), then service reachability information is accepted from any neighbor except those neighbors listed in the Neighbor Policy Table for which a3ipxPolSapNbrType = receive(2) or both(3).')
a3ipxPolSapNbrTable = MibTable((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 4), )
if mibBuilder.loadTexts: a3ipxPolSapNbrTable.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrTable.setDescription('This table defines a set of neighbors to which the service reachability information is advertised. This table also defines a set of neighbors from which service reachability information is accepted.')
a3ipxPolSapNbrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 4, 1), ).setIndexNames((0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolSapNbrPort"), (0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolSapNbrNet"), (0, "A3Com-IPXpolicy-r3-MIB", "a3ipxPolSapNbrAddress"))
if mibBuilder.loadTexts: a3ipxPolSapNbrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrEntry.setDescription('A single entry in the Policy Neighbor Table')
a3ipxPolSapNbrPort = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolSapNbrPort.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrPort.setDescription('The IPX port over which the Trusted Neighbor identified by this table entry can be reached.')
a3ipxPolSapNbrType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("advertise", 1), ("receive", 2), ("both", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolSapNbrType.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrType.setDescription('This object specifies whether this entry applies to receiving information, advertising information, or both.')
a3ipxPolSapNbrNet = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 4, 1, 3), IPXNET()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolSapNbrNet.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrNet.setDescription('The IPX Network number identifying the network to which the neighbor identified by this table entry is attached.')
a3ipxPolSapNbrAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 4, 1, 4), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: a3ipxPolSapNbrAddress.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrAddress.setDescription('The Mac Address of the Neighbor to whom service reachability information is to be advertised and from which service reachability information is accepted.')
a3ipxPolSapNbrStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 2, 11, 5, 4, 1, 5), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: a3ipxPolSapNbrStatus.setStatus('mandatory')
if mibBuilder.loadTexts: a3ipxPolSapNbrStatus.setDescription('This object is used to add and delete entries in this table. See the notes describing RowStatus at the beginning of this MIB.')
mibBuilder.exportSymbols("A3Com-IPXpolicy-r3-MIB", a3ipxPolSvcType=a3ipxPolSvcType, a3ipxPCNbrRcvType=a3ipxPCNbrRcvType, a3ipxPolNbrAddress=a3ipxPolNbrAddress, a3ipxPCSapPolOverride=a3ipxPCSapPolOverride, a3ipxPolRipNbrAddress=a3ipxPolRipNbrAddress, brouterMIB=brouterMIB, a3ipxPolSvcPort=a3ipxPolSvcPort, a3ipxPolSvcEntryType=a3ipxPolSvcEntryType, a3ipxPolRipNbrCtlEntry=a3ipxPolRipNbrCtlEntry, a3ipxPolSapNbrStatus=a3ipxPolSapNbrStatus, a3ipxPolSapNbrCtlRcvType=a3ipxPolSapNbrCtlRcvType, a3ipxPolRteStatus=a3ipxPolRteStatus, a3ipxPolSapNbrAddress=a3ipxPolSapNbrAddress, a3ipxPolSapNbrType=a3ipxPolSapNbrType, a3ipxPolRteNet1=a3ipxPolRteNet1, a3ipxPolSapNbrTable=a3ipxPolSapNbrTable, a3ipxPolRipNbrCtlRcvCtl=a3ipxPolRipNbrCtlRcvCtl, a3ipxPolRteType=a3ipxPolRteType, a3ipxPolSapNbrCtlPort=a3ipxPolSapNbrCtlPort, a3ipxPolSapNbrEntry=a3ipxPolSapNbrEntry, a3ipxPCPolicyOverride=a3ipxPCPolicyOverride, a3ComIPXpol=a3ComIPXpol, a3ipxPCSvrRplyCtl=a3ipxPCSvrRplyCtl, a3ipxPolSvcEntry=a3ipxPolSvcEntry, a3ipxPolSvcSvrName=a3ipxPolSvcSvrName, a3ipxPolNbrTable=a3ipxPolNbrTable, a3ipxPCNbrAdvCtl=a3ipxPCNbrAdvCtl, a3ipxPolNbrStatus=a3ipxPolNbrStatus, a3Com=a3Com, a3ipxPolRipNbrCtlAdvCtl=a3ipxPolRipNbrCtlAdvCtl, a3ipxPolNbrPort=a3ipxPolNbrPort, a3ipxPolSvcTable=a3ipxPolSvcTable, a3ipxPolRipNbrCtlPort=a3ipxPolRipNbrCtlPort, a3ipxPolRipNbrNet=a3ipxPolRipNbrNet, a3ipxPolRipNbrCtlRcvType=a3ipxPolRipNbrCtlRcvType, a3ipxPolRipNbrCtlTable=a3ipxPolRipNbrCtlTable, a3ipxPolSapNbrCtlAdvCtl=a3ipxPolSapNbrCtlAdvCtl, a3ipxPolNbrEntry=a3ipxPolNbrEntry, a3ipxPolNbrType=a3ipxPolNbrType, a3ipxPolRteEntry=a3ipxPolRteEntry, a3ipxPCSvcAdvCtl=a3ipxPCSvcAdvCtl, a3ipxPolSapNbrCtlRcvCtl=a3ipxPolSapNbrCtlRcvCtl, IPXNET=IPXNET, a3ipxPolNbrNet=a3ipxPolNbrNet, a3ipxPolRipNbrStatus=a3ipxPolRipNbrStatus, a3ipxPolControlTable=a3ipxPolControlTable, a3ipxPolSapNbrNet=a3ipxPolSapNbrNet, a3ipxPolRteNet2=a3ipxPolRteNet2, a3ipxPolRipNbrType=a3ipxPolRipNbrType, a3ipxPolNbr=a3ipxPolNbr, a3ipxPCSvcAdvType=a3ipxPCSvcAdvType, a3ipxPCRteRcvType=a3ipxPCRteRcvType, a3ipxPolRipNbrEntry=a3ipxPolRipNbrEntry, a3ipxPolRtePort=a3ipxPolRtePort, a3ipxPolRipNbrTable=a3ipxPolRipNbrTable, a3ipxPolSapNbrPort=a3ipxPolSapNbrPort, a3ipxPolSvcStatus=a3ipxPolSvcStatus, a3ipxPCRipPolOverride=a3ipxPCRipPolOverride, a3ipxPCRteAdvCtl=a3ipxPCRteAdvCtl, a3ipxPCRteRcvCtl=a3ipxPCRteRcvCtl, a3ipxPolSapNbrCtlTable=a3ipxPolSapNbrCtlTable, a3ipxPCSvcRcvCtl=a3ipxPCSvcRcvCtl, a3ipxPolRteTable=a3ipxPolRteTable, RowStatus=RowStatus, a3ipxPolControlEntry=a3ipxPolControlEntry, a3ipxPCRteAdvType=a3ipxPCRteAdvType, a3ipxPCPort=a3ipxPCPort, a3ipxPCSvcRcvType=a3ipxPCSvcRcvType, a3ipxPolRipNbrPort=a3ipxPolRipNbrPort, a3ipxPCNbrRcvCtl=a3ipxPCNbrRcvCtl, a3ipxPolSapNbrCtlEntry=a3ipxPolSapNbrCtlEntry)
|
import copy
from data_holder import State, Move
from simulation_data import get_sample
from utils import positive_list_difference
#SAMPLES DIAGNOSIS MOLECULES LABORATORY Start area
#SAMPLES 0 3 3 3 2
#DIAGNOSIS 3 0 3 4 2
#MOLECULES 3 3 0 3 2
#LABORATORY 3 4 3 0 2
#Start area 2 2 2 2 0
from data_holder import Robot, Action, Location
movement_matrix = [[0,3,3,3,2],
[3,0,3,4,2],
[3,3,0,3,2],
[3,4,3,0,2],
[2,2,2,2,0]]
def simulate_action(state: State, my_action: Move, enemy_action: Move):
""" Returns new game state after both actions are performed
:param state:
:param my_action:
:param enemy_action:
"""
simulate_player(state, state.robot_a, my_action)
simulate_player(state, state.robot_b, enemy_action)
def simulate_player(state: State, player: Robot, move: Move):
if player.eta == 0:
if move.action == Action.GOTO:
player.eta = movement_matrix[player.target.value][move.arg.value]
player.target = move.arg
elif move.action == Action.CONNECT:
if player.target == Location.SAMPLES and move.arg in [1,2,3]:
sample = get_sample(move.arg)
sample.carried_by = player.id
state.add_sample(sample)
elif player.target == Location.MOLECULES and move.arg in [1,2,3,4,5]:
if state.available_molecules[move.arg] <= 0:
return
state.available_molecules[move.arg] -= 1
player.storage[move.arg] += 1
elif player.target == Location.LABORATORY:
samples = list(filter(lambda s: s.id == move.arg, player.samples))
if len(samples) == 0:
return
sample = samples[0]
# difference = positive_list_difference(player.storage, sample.cost)
# if sum(difference) > 0:
# raise "Molecules not available for " + move.arg
player.storage = list(map(int.__sub__, player.storage, sample.cost))
player.score += sample.health
player.expertise[sample.exp.value] += 1
state.remove_sample(sample)
elif player.target == Location.DIAGNOSIS:
player_samples = list(filter(lambda s: s.id == move.arg, player.samples))
cloud_samples = list(filter(lambda s: s.id == move.arg, state.cloud_samples))
if len(player_samples) == 1:
sample = player_samples[0]
if sample.diagnosed:
state.remove_sample(sample)
sample.carried_by = -1
state.add_sample(sample)
else:
sample.cost = sample.cost_tmp
elif len(cloud_samples) == 1:
sample = cloud_samples[0]
if len(player.samples) < 3:
state.remove_sample(sample)
sample.carried_by = player.id
state.add_sample(sample)
if len(player_samples) == 0 and len(cloud_samples) == 0:
return
player.eta = max(0, player.eta - 1)
for project in state.projects:
if not project.completed:
difference = positive_list_difference(project.req_expertise, player.expertise)
if sum(difference) == 0:
player.score += 50
project.completed = True
|
#!/usr/bin/env python
"""
_ListOpenByName_
MySQL implementation of Fileset.ListOpenByName
"""
__all__ = []
from WMCore.Database.DBFormatter import DBFormatter
class ListOpenByName(DBFormatter):
sql = "SELECT name FROM wmbs_fileset WHERE open = 1 AND name LIKE :name"
def format(self, results):
"""
_format_
Take the array of rows that were returned by the query and format that
into a single list of open fileset names.
"""
results = DBFormatter.format(self, results)
openFilesetNames = []
for result in results:
openFilesetNames.append(str(result[0]))
return openFilesetNames
def execute(self, name, conn = None, transaction = False):
result = self.dbi.processData(self.sql, binds = {"name": name},
conn = conn, transaction = transaction)
return self.format(result)
|
#!/usr/bin/env python
# Copyright (C) 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import argparse
import os.path
import json
import google.oauth2.credentials
#import RPi.GPIO as GPIO
from google.assistant.library import Assistant
from google.assistant.library.event import EventType
from google.assistant.library.file_helpers import existing_file
from pubnub.callbacks import SubscribeCallback
from pubnub.enums import PNStatusCategory
from pubnub.pnconfiguration import PNConfiguration
from pubnub.pubnub import PubNub
#GPIO.setmode(GPIO.BCM)
#GPIO.setup(25, GPIO.OUT)
global pubnub
#Pubnub Communication
def my_publish_callback(envelope, status):
# Check whether request successfully completed or not
if not status.is_error():
pass # Message successfully published to specified channel.
else:
pass # Handle message publish error. Check 'category' property to find out possible issue
# because of which request did fail.
# Request can be resent using: [status retry];
class MySubscribeCallback(SubscribeCallback):
def presence(self, pubnub, presence):
pass # handle incoming presence data
def status(self, pubnub, status):
if status.category == PNStatusCategory.PNUnexpectedDisconnectCategory:
pass # This event happens when radio / connectivity is lost
elif status.category == PNStatusCategory.PNConnectedCategory:
# Connect event. You can do stuff like publish, and know you'll get it.
# Or just use the connected event to confirm you are subscribed for
# UI / internal notifications, etc
pubnub.publish().channel("magicmirror").message("hello from python!!").async(my_publish_callback)
elif status.category == PNStatusCategory.PNReconnectedCategory:
pass
# Happens as part of our regular operation. This event happens when
# radio / connectivity is lost, then regained.
elif status.category == PNStatusCategory.PNDecryptionErrorCategory:
pass
# Handle message decryption error. Probably client configured to
# encrypt messages and on live data feed it received plain text.
def message(self, pubnub, message):
print (message.message)
# pass # Handle new message stored in message.message
def init_pubnub():
global pubnub
pnconfig = PNConfiguration()
pnconfig.subscribe_key = 'sub-c-fc4f4ea8-e4cb-11e7-ab5b-be68b02b0975'
pnconfig.publish_key = 'pub-c-b7e0c189-0969-4105-857a-9fdf1466e1ee'
pubnub = PubNub(pnconfig)
pubnub.add_listener(MySubscribeCallback())
pubnub.subscribe().channels('magicmirror').execute()
print ('pubnub subscription completed')
#googleassistant events processing
def process_event(event):
"""Pretty prints events.
Prints all events that occur with two spaces between each new
conversation and a single space between turns of a conversation.
Args:
event(event.Event): The current event to process.
"""
if event.type == EventType.ON_CONVERSATION_TURN_STARTED:
pubnub.publish().channel("magicmirror").message("ON_CONVERSATION_TURN_STARTED").async(my_publish_callback)
print()
#GPIO.output(25,True)
if (event.type == EventType.ON_CONVERSATION_TURN_FINISHED and
event.args and not event.args['with_follow_on_turn']):
pubnub.publish().channel("magicmirror").message("ON_CONVERSATION_TURN_FINISHED").async(my_publish_callback)
print()
#GPIO.output(25,False)
if event.type == EventType.ON_RECOGNIZING_SPEECH_FINISHED:
pubnub.publish().channel("magicmirror").message("ON_RECOGNIZING_SPEECH_FINISHED : "+event.args['text']).async(my_publish_callback)
print()
def init_googleAssistant():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--credentials', type=existing_file,
metavar='OAUTH2_CREDENTIALS_FILE',
default=os.path.join(
os.path.expanduser('/home/pi/.config'),
'google-oauthlib-tool',
'credentials.json'
),
help='Path to store and read OAuth2 credentials')
args = parser.parse_args()
with open(args.credentials, 'r') as f:
credentials = google.oauth2.credentials.Credentials(token=None,
**json.load(f))
with Assistant(credentials) as assistant:
for event in assistant.start():
process_event(event)
def main():
init_pubnub()
init_googleAssistant()
if __name__ == '__main__':
main()
|
from urllib.parse import quote_plus
from typing import Optional
from re import search
from ..http import Http
from ..constants import AUTHORIZE
from ..models import PreAuthResponse, UserLoginResponse
from ..errors import InvalidCredentials, TwoFactorAccount, MsMcAuthException
__all__ = ("Xbox",)
class Xbox:
""""Xbox requests handler.
Attributes
----------
http : Optional[:class:`Http`]
Http client.
"""
def __init__(self, http: Optional[Http] = None):
self.http = http
async def get_pre_auth(self) -> PreAuthResponse:
_, text = await self.http.request(AUTHORIZE, allow_redirects=True)
sft_tag = search(r"sFTTag:'(.*?)'", text).group(1)
flow_token = search(r"value=\"(.*?)\"", sft_tag).group(1)
post_url = search(r"urlPost:'(.+?(?=\'))", text).group(1)
if (flow_token or post_url) is None:
raise MsMcAuthException("Couldn't extract sFTTag and urlPost")
return PreAuthResponse(flow_token=flow_token, post_url=post_url)
async def xbox_login(self, email: str, password: str, pre_auth: PreAuthResponse) -> UserLoginResponse:
"""Check user credentials.
Parameters
----------
email : :class:`str`
Email to log into.
password : :class:`str`
Password of the email to log into.
pre_auth : :class:`PreAuthResponse`
Pre auth response.
Returns
-------
user : :class:`UserLoginResponse`
User login response.
"""
data = f"login={self.encode(email)}&loginfmt={self.encode(email)}" \
f"&passwd={self.encode(password)}&PPFT={self.encode(pre_auth.flow_token)}"
res, text = await self.http.request(
pre_auth.post_url,
"POST", data=data,
headers={"Content-Type": "application/x-www-form-urlencoded"},
allow_redirects=True
)
if "access_token" not in str(res.real_url) or str(res.real_url) == pre_auth.post_url:
if "Sign in to" in str(text):
raise InvalidCredentials("Provided credentials was invalid.")
elif "Help us protect your account" in str(text):
raise TwoFactorAccount("2FA is enabled but not supported yet.")
else:
raise MsMcAuthException(f"Something went wrong. Status Code: {res.status}")
data = str(res.real_url).split("#")[1].split("&")
return UserLoginResponse(
refresh_token=data[4].split("=")[1],
access_token=data[0].split("=")[1],
expires_in=int(data[2].split("=")[1]),
logged_in=True
)
def encode(self, data: str) -> str:
"""Encodes data."""
return quote_plus(data)
|
# Copyright (c) 2015 Jonathan M. Lange <jml@mumak.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyrsistent import field, PClass
class _State(PClass):
"""
An action that, given an initial state, returns a value and a new state.
"""
_function = field()
@classmethod
def new(cls, function):
"""
Create a new stateful action.
:param function: a callable that takes a single parameter representing
the initial state and returns a tuple of ``(a, new_state)``, where
``a`` is the result of the action and ``new_state`` is the new
state.
:return: A stateful action.
"""
return cls(_function=function)
def run(self, state):
"""
Run this action given initial state ``state``.
:return: A tuple of ``(a, s)``, where ``a`` is the value of the action,
and ``s`` is the new state.
"""
return self._function(state)
def map(self, function):
return map_state(self, function)
def bind(self, function):
return bind(self, function)
def then(self, new_state):
return then(self, new_state)
def pure(value):
"""
Create a stateful action that does not use the state at all.
:return: A ``State`` that when run will return ``value`` as-is and the
state unchanged.
"""
return _State.new(lambda s: (value, s))
def run(state, initial):
"""
Run the stateful action ``state`` given initial state ``initial``.
Equivalent to ``state.run(initial)``.
:return: A tuple of ``(a, s)``, where ``a`` is the value of the action,
and ``s`` is the new state.
"""
return state.run(initial)
def evaluate(state, initial):
"""
Evaluate ``state`` given initial state ``initial``.
:return: The value of the action.
"""
return run(state, initial)[0]
def execute(state, initial):
"""
Execute ``state`` given initial state ``initial``.
:return: The new state.
"""
return run(state, initial)[1]
def map_state(state, function):
"""
Map the value of ``state`` with ``function``, without reference to the
actual state.
"""
def _new_operation(s):
a, s2 = run(state, s)
return function(a), s2
return state.new(_new_operation)
def bind(state, function):
"""
Bind ``function`` to the stateful action ``state``.
``function`` must expect a single parameter. It will be called with
the value of this stateful action.
:param state: A stateful action that returns a value of type ``A`` and
state ``S``.
:param function: A function that expects a single parameter of type ``A``
and returns a ``State`` wrapper for state ``S`` and value of
type ``B``.
:return: A State wrapper for a state ``S`` and value of type ``B``.
"""
def _new_operation(s):
a, s2 = run(state, s)
return function(a).run(s2)
return state.new(_new_operation)
# XXX: Rather than making up my own terminology, I should probably borrow from
# Rust, which at least has words for these things.
def then(state1, state2):
"""
Like ``bind``, but instead of a function that returns a statetful action,
just bind a new stateful action.
Equivalent to bind(state1, lambda _: state2)
"""
return bind(state1, lambda _: state2)
def put(new_state):
"""
A stateful action that replaces the current state with ``new_state``.
"""
return _State.new(lambda _: (None, new_state))
"""
A stateful action that retrieves the current state.
"""
get = _State.new(lambda s: (s, s))
def modify(function):
"""
A stateful action that updates the state to the result of applying
``function`` to the current state.
:param function: A unary callable.
"""
return get().map(function).bind(put)
|
from optparse import IndentedHelpFormatter
from nndl.optim import sgd
import numpy as np
import matplotlib.pyplot as plt
class TwoLayerNet(object):
"""
A two-layer fully-connected neural network. The net has an input dimension of
N, a hidden layer dimension of H, and performs classification over C classes.
We train the network with a softmax loss function and L2 regularization on the
weight matrices. The network uses a ReLU nonlinearity after the first fully
connected layer.
In other words, the network has the following architecture:
input - fully connected layer - ReLU - fully connected layer - softmax
The outputs of the second fully-connected layer are the scores for each class.
"""
def __init__(self, input_size, hidden_size, output_size, std=1e-4):
"""
Initialize the model. Weights are initialized to small random values and
biases are initialized to zero. Weights and biases are stored in the
variable self.params, which is a dictionary with the following keys:
W1: First layer weights; has shape (H, D)
b1: First layer biases; has shape (H,)
W2: Second layer weights; has shape (C, H)
b2: Second layer biases; has shape (C,)
Inputs:
- input_size: The dimension D of the input data.
- hidden_size: The number of neurons H in the hidden layer.
- output_size: The number of classes C.
"""
self.params = {}
self.params['W1'] = std * np.random.randn(hidden_size, input_size)
self.params['b1'] = np.zeros(hidden_size)
self.params['W2'] = std * np.random.randn(output_size, hidden_size)
self.params['b2'] = np.zeros(output_size)
def loss(self, X, y=None, reg=0.0):
"""
Compute the loss and gradients for a two layer fully connected neural
network.
Inputs:
- X: Input data of shape (N, D). Each X[i] is a training sample.
- y: Vector of training labels. y[i] is the label for X[i], and each y[i] is
an integer in the range 0 <= y[i] < C. This parameter is optional; if it
is not passed then we only return scores, and if it is passed then we
instead return the loss and gradients.
- reg: Regularization strength.
Returns:
If y is None, return a matrix scores of shape (N, C) where scores[i, c] is
the score for class c on input X[i].
If y is not None, instead return a tuple of:
- loss: Loss (data loss and regularization loss) for this batch of training
samples.
- grads: Dictionary mapping parameter names to gradients of those parameters
with respect to the loss function; has the same keys as self.params.
"""
# Unpack variables from the params dictionary
W1, b1 = self.params['W1'], self.params['b1']
W2, b2 = self.params['W2'], self.params['b2']
N, D = X.shape
# Compute the forward pass
scores = None
# ================================================================ #
# YOUR CODE HERE:
# Calculate the output scores of the neural network. The result
# should be (N, C). As stated in the description for this class,
# there should not be a ReLU layer after the second FC layer.
# The output of the second FC layer is the output scores. Do not
# use a for loop in your implementation.
# ================================================================ #
r = W1@X.T+b1.reshape(len(b1),1)
h1 = (r>0)*r
scores = W2@h1+b2.reshape(len(b2),1)
scores = scores.T
# ================================================================ #
# END YOUR CODE HERE
# ================================================================ #
# If the targets are not given then jump out, we're done
if y is None:
return scores
# Compute the loss
loss = None
# ================================================================ #
# YOUR CODE HERE:
# Calculate the loss of the neural network. This includes the
# softmax loss and the L2 regularization for W1 and W2. Store the
# total loss in teh variable loss. Multiply the regularization
# loss by 0.5 (in addition to the factor reg).
# ================================================================ #
# scores is num_examples by num_classes
C = scores.shape[1]
N = scores.shape[0]
escores = np.exp(scores)
loss = np.ones((1,N))@(np.log(escores@np.ones((C,))) - scores[np.arange(N),y])/N + reg*0.5*(np.linalg.norm(W1)**2 + np.linalg.norm(W2)**2)
# ================================================================ #
# END YOUR CODE HERE
# ================================================================ #
grads = {}
# ================================================================ #
# YOUR CODE HERE:
# Implement the backward pass. Compute the derivatives of the
# weights and the biases. Store the results in the grads
# dictionary. e.g., grads['W1'] should store the gradient for
# W1, and be of the same size as W1.
# ================================================================ #
scaling = escores.T/(escores@np.ones((C,)))
scaling[y,np.arange(N)] -= 1
dLdCE = scaling@np.ones((N,))/N
#print(dLdCE.shape)
grads['b2'] = dLdCE
grads['W2'] = scaling@h1.T/N +reg*W2
dLdh1 = W2.T@dLdCE
dLdr = W2.T@scaling*(r>0)@np.ones((N,))/N
grads['b1'] = dLdr
grads['W1'] = W2.T@scaling*(r>0)@X/N +reg*W1
# ================================================================ #
# END YOUR CODE HERE
# ================================================================ #
return loss, grads
def train(self, X, y, X_val, y_val,
learning_rate=1e-3, learning_rate_decay=0.95,
reg=1e-5, num_iters=100,
batch_size=200, verbose=False):
"""
Train this neural network using stochastic gradient descent.
Inputs:
- X: A numpy array of shape (N, D) giving training data.
- y: A numpy array f shape (N,) giving training labels; y[i] = c means that
X[i] has label c, where 0 <= c < C.
- X_val: A numpy array of shape (N_val, D) giving validation data.
- y_val: A numpy array of shape (N_val,) giving validation labels.
- learning_rate: Scalar giving learning rate for optimization.
- learning_rate_decay: Scalar giving factor used to decay the learning rate
after each epoch.
- reg: Scalar giving regularization strength.
- num_iters: Number of steps to take when optimizing.
- batch_size: Number of training examples to use per step.
- verbose: boolean; if true print progress during optimization.
"""
num_train = X.shape[0]
iterations_per_epoch = max(num_train / batch_size, 1)
# Use SGD to optimize the parameters in self.model
loss_history = []
train_acc_history = []
val_acc_history = []
for it in np.arange(num_iters):
X_batch = None
y_batch = None
# ================================================================ #
# YOUR CODE HERE:
# Create a minibatch by sampling batch_size samples randomly.
# ================================================================ #
indexes = (np.random.rand(batch_size,1)*num_train).astype(int)
X_batch = X[indexes,:].reshape(batch_size,X.shape[1])
y_batch = y[indexes].reshape(batch_size,)
# ================================================================ #
# END YOUR CODE HERE
# ================================================================ #
# Compute loss and gradients using the current minibatch
loss, grads = self.loss(X_batch, y=y_batch, reg=reg)
loss_history.append(loss)
# ================================================================ #
# YOUR CODE HERE:
# Perform a gradient descent step using the minibatch to update
# all parameters (i.e., W1, W2, b1, and b2).
# ================================================================ #
config={}
config['learning_rate'] = learning_rate
self.params['W1'],config = sgd(self.params['W1'],grads['W1'],config)
self.params['b1'],config = sgd(self.params['b1'],grads['b1'],config)
self.params['W2'],config = sgd(self.params['W2'],grads['W2'],config)
self.params['b2'],config = sgd(self.params['b2'],grads['b2'],config)
# ================================================================ #
# END YOUR CODE HERE
# ================================================================ #
if verbose and it % 100 == 0:
print('iteration {} / {}: loss {}'.format(it, num_iters, loss))
# Every epoch, check train and val accuracy and decay learning rate.
if it % iterations_per_epoch == 0:
# Check accuracy
train_acc = (self.predict(X_batch) == y_batch).mean()
val_acc = (self.predict(X_val) == y_val).mean()
train_acc_history.append(train_acc)
val_acc_history.append(val_acc)
# Decay learning rate
learning_rate *= learning_rate_decay
return {
'loss_history': loss_history,
'train_acc_history': train_acc_history,
'val_acc_history': val_acc_history,
}
def predict(self, X):
"""
Use the trained weights of this two-layer network to predict labels for
data points. For each data point we predict scores for each of the C
classes, and assign each data point to the class with the highest score.
Inputs:
- X: A numpy array of shape (N, D) giving N D-dimensional data points to
classify.
Returns:
- y_pred: A numpy array of shape (N,) giving predicted labels for each of
the elements of X. For all i, y_pred[i] = c means that X[i] is predicted
to have class c, where 0 <= c < C.
"""
y_pred = None
# ================================================================ #
# YOUR CODE HERE:
# Predict the class given the input data.
# ================================================================ #
W1, b1 = self.params['W1'], self.params['b1']
W2, b2 = self.params['W2'], self.params['b2']
N, D = X.shape
r = W1@X.T+b1.reshape(len(b1),1)
h1 = (r>0)*r
scores = W2@h1+b2.reshape(len(b2),1)
scores = scores.T
y_pred = np.argmax(scores, axis=1)
# ================================================================ #
# END YOUR CODE HERE
# ================================================================ #
return y_pred
|
from pykechain.enums import Category, Multiplicity
from pykechain.exceptions import NotFoundError, MultipleFoundError, IllegalArgumentError
from pykechain.models import PartSet, Part
from pykechain.utils import find
from tests.classes import TestBetamax
class TestPartRetrieve(TestBetamax):
# 1.8
def test_get_instances_of_a_model(self):
wheel_model = self.project.model('Wheel')
wheel_instances = wheel_model.instances()
self.assertIsInstance(wheel_instances, PartSet)
for wheel_instance in wheel_instances:
self.assertEqual(wheel_instance.category, Category.INSTANCE)
self.assertEqual(wheel_instance.model().id, wheel_model.id)
def test_get_instances_of_an_instances_raises_notfound(self):
wheel_instance = self.project.part('Rear Wheel', category=Category.INSTANCE)
with self.assertRaises(NotFoundError):
wheel_instance.instances()
def test_get_single_instance_of_a_model(self):
bike_model = self.project.model('Bike')
bike_instance = bike_model.instance()
self.assertEqual(bike_instance.category, Category.INSTANCE)
def test_get_single_instance_of_a_multiplicity_model_raises_multiplefounderror(self):
wheel_model = self.project.model('Wheel')
with self.assertRaises(MultipleFoundError):
wheel_model.instance()
# test added in 1.12.7
def test_get_single_instance_of_a_model_without_instances_raises_notfounderror(self):
catalog = self.project.model(name__startswith='Catalog')
model_without_instances = self.project.create_model(parent=catalog, name='model_without_instances',
multiplicity=Multiplicity.ZERO_ONE)
with self.assertRaises(NotFoundError):
model_without_instances.instance()
# tearDown
model_without_instances.delete()
# test added in 2.1, changed in 3.2
def test_get_parts_with_descendants_tree(self):
# setUp
root = self.project.part(name='Product')
root.populate_descendants()
# testing
self.assertIsInstance(root._cached_children, list)
self.assertEqual(1, len(root._cached_children), msg='Number of instances has changed, expected 1')
# follow-up
bike_part = find(root._cached_children, lambda d: d.name == 'Bike')
self.assertIsNotNone(bike_part._cached_children)
self.assertEqual(7, len(bike_part._cached_children), msg='Number of child instances has changed, expected 7')
# test added in 2.1, changed in 3.2
def test_get_models_with_descendants_tree(self):
# setUp
root = self.project.model(name='Product')
root.populate_descendants()
# testing
self.assertIsInstance(root._cached_children, list)
self.assertEqual(1, len(root._cached_children), msg='Number of models has changed, expected 1')
# follow-up
bike_model = find(root._cached_children, lambda d: d.name == 'Bike')
self.assertIsNotNone(bike_model._cached_children)
self.assertEqual(5, len(bike_model._cached_children), msg='Number of child models has changed, expected 5')
# test added in 3.0
def test_retrieve_parts_with_refs(self):
# setup
front_fork_ref = 'front-fork'
front_fork_name = 'Front Fork'
front_fork_part = self.project.part(ref=front_fork_ref)
front_fork_model = self.project.model(ref=front_fork_ref)
# testing
self.assertIsInstance(front_fork_part, Part)
self.assertEqual(front_fork_name, front_fork_part.name)
self.assertEqual(Category.INSTANCE, front_fork_part.category)
self.assertIsInstance(front_fork_model, Part)
self.assertEqual(front_fork_name, front_fork_model.name)
self.assertEqual(Category.MODEL, front_fork_model.category)
def test_child(self):
root = self.project.model(name='Product')
bike = root.child(name='Bike')
self.assertIsInstance(bike, Part)
self.assertEqual(bike.parent_id, root.id)
bike_via__call__ = root('Bike')
self.assertEqual(bike, bike_via__call__)
def test_child_caching(self):
root = self.project.model(name='Product')
self.assertIsNone(root._cached_children, msg='No cached children yet')
root.children()
self.assertTrue(root._cached_children, msg='Children should be cached')
bike = root.child(name='Bike')
self.assertTrue(root._cached_children, msg='Cache was used and should still be intact')
bike_again = root.child(pk=bike.id)
self.assertEqual(bike, bike_again, msg='Bike should be retrieved from cache, based on ID')
still_the_bike = root.child(name=bike.name)
self.assertEqual(bike, still_the_bike, msg='Bike should be retrieved from cache, based on name')
root._cached_children = None
more_bike = root.child(pk=bike.id)
self.assertEqual(bike, more_bike, msg='Cache should be cleared, bike has to be retrieved anew.')
def test_child_invalid(self):
root = self.project.model(name='Product')
with self.assertRaises(IllegalArgumentError):
root.child()
second_bike_model = root.add_model(name='Bike')
with self.assertRaises(MultipleFoundError):
root.child(name='Bike')
second_bike_model.delete()
with self.assertRaises(NotFoundError):
root.child(name="It's only a model")
def test_all_children(self):
root = self.project.model(name='Product')
all_children = root.all_children()
self.assertIsInstance(all_children, list)
self.assertEqual(6, len(all_children), msg='Number of models has changed, expected 6')
def test_child_after_construction(self):
"""Test retrieval of child after creating the model via another Part object of the same KE-chain Part."""
bike_for_adding = self.project.model(name='Bike')
bike_for_getting = self.project.model(name='Bike')
current_children = bike_for_getting.children()
self.assertEqual(5, len(current_children))
child = bike_for_adding.add_model(name='__Temp child')
try:
retrieved_child = bike_for_getting.child('__Temp child')
self.assertEqual(child, retrieved_child)
except NotFoundError as e:
raise e
finally:
# tearDown
child.delete()
|
import re
base_url = 'https://sede.administracionespublicas.gob.es'
# base_url = 'http://127.0.0.1:5000' # for local simulation
no_cita_message = 'En este momento no hay citas disponibles.'
error_503_message = 'ERROR [503]'
nie_pattern = re.compile(r'^[XYZ]\d{7,8}[A-Z]$')
hidden_params_pattern = re.compile(
r'><input type="hidden" name="([\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12})" value(?:="([\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12})")?/><input type="hidden" name="([\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12})" value(?:="([\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12})")?/>')
offices_pattern = re.compile(r'<option value="(\d+)"\s?>(.+)</option>')
cita_pattern = re.compile(
r'<span>CITA</span>\s*(\d+)[\s\S]+?Día</span>: <span>(\d{2}/\d{2}/\d{4})</span> <br(?: /)?> <span>Hora</span>: <span>(\d{2}:\d{2})</span> <br')
cita_code_pattern = re.compile(r'<span id="justificanteFinal" class="mf-carousel--item-title">([\dA-Z]+?)</span>')
browser_headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.54 Safari/537.36',
'Cache-Control': 'no-cache',
# 'Accept-Language': 'en-US,en;q=0.9,es-ES;q=0.8,es;q=0.7,ca-ES;q=0.6,ca;q=0.5',
# 'Accept-Encoding': 'gzip, deflate',
# 'Referer': f'{base_url}/icpplus/index.html',
# 'DNT': '1',
# 'sec-ch-ua': '"Google Chrome";v="95", "Chromium";v="95", ";Not A Brand";v="99"',
# 'sec-ch-ua-mobile': '?0',
# 'sec-ch-ua-platform': '"Windows"',
# 'Upgrade-Insecure-Requests': '1',
# 'Sec-Fetch-Site': 'same-origin',
# 'Sec-Fetch-Mode': 'navigate',
# 'Sec-Fetch-User': '?1',
# 'Sec-Fetch-Dest': 'document',
}
countries = {
'AFGANISTAN': 401,
'ALBANIA': 102,
'ALEMANIA': 103,
'ANDORRA': 133,
'ANGOLA': 301,
'ANGUILLA': 258,
'ANTIGUA Y BARBUDA': 255,
'ANTILLAS NL.': 200,
'APATRIDA': 600,
'ARABIA SAUDI': 403,
'ARGELIA': 304,
'ARGENTINA': 202,
'ARMENIA': 142,
'ARUBA': 257,
'AUSTRALIA': 500,
'AUSTRIA': 104,
'AZERBAYAN': 143,
'BAHAMAS': 203,
'BAHREIN': 405,
'BANGLADESH': 432,
'BARBADOS': 205,
'BELGICA': 105,
'BELICE': 207,
'BENIN': 302,
'BHUTAN': 407,
'BIELORRUSIA O BELARUS': 144,
'BOLIVIA': 204,
'BOSNIA-HERZEGOVINA': 156,
'BOTSWANA': 305,
'BRASIL': 206,
'BRUNEI DARUSSALAM': 409,
'BULGARIA': 134,
'BURKINA FASO': 303,
'BURUNDI': 321,
'CABO VERDE': 315,
'CAMBOYA': 402,
'CAMERUN': 308,
'CANADA': 208,
'CENTROAFRICA REPUBLICA': 310,
'CHAD': 372,
'CHILE': 210,
'CHINA': 406,
'CHIPRE': 107,
'COLOMBIA': 212,
'COMORES': 311,
'CONGO BRAZZAVILLE': 312,
'COREA, REP. POP. DEMOC.': 460,
'COREA, REPUBLICA': 410,
'COSTA DE MARFIL': 314,
'COSTA RICA': 214,
'CROACIA': 140,
'CUBA': 216,
'DINAMARCA': 108,
'DJIBOUTI': 317,
'DOMINICA': 217,
'DOMINICANA REPUBLICA': 218,
'ECUADOR': 222,
'EEUU': 224,
'EGIPTO': 300,
'EL SALVADOR': 220,
'EL VATICANO': 136,
'EMIRATOS ARABES UNIDOS': 429,
'ERITREA': 384,
'ESLOVAQUIA': 158,
'ESLOVENIA': 141,
'ESPAÑA': 109,
'ESTONIA': 137,
'ETIOPIA': 318,
'FIDJI': 550,
'FILIPINAS': 411,
'FINLANDIA': 110,
'FRANCIA': 111,
'GABON': 320,
'GAMBIA': 323,
'GEORGIA': 145,
'GHANA': 322,
'GRANADA REPUBLICA': 229,
'GRECIA': 113,
'GUATEMALA': 228,
'GUAYANA': 225,
'GUINEA ECUATORIAL': 324,
'GUINEA REPUBLICA': 325,
'GUINEA-BISSAU': 328,
'HAITI': 230,
'HOLANDA': 123,
'HONDURAS': 232,
'HUNGRIA': 114,
'INDIA': 412,
'INDONESIA': 414,
'IRAK': 413,
'IRAN': 415,
'IRLANDA': 115,
'ISLANDIA': 116,
'ISLAS MARSCHALL': 520,
'ISRAEL': 417,
'ITALIA': 117,
'JAMAICA': 233,
'JAPON': 416,
'JORDANIA': 419,
'KAZAJSTAN': 146,
'KENIA': 336,
'KIRGUISTAN': 147,
'KIRIBATI': 501,
'KUWAIT': 421,
'LAOS': 418,
'LAS MALDIVAS': 436,
'LESOTHO': 337,
'LETONIA': 138,
'LIBANO': 423,
'LIBERIA': 342,
'LIBIA': 344,
'LIECHTENSTEIN': 118,
'LITUANIA': 139,
'LUXEMBURGO': 119,
'MACAO': 463,
'MACEDONIA': 159,
'MADAGASCAR': 354,
'MALASIA': 425,
'MALASIA - GRAN BRETAÑA': 900,
'MALAWI': 346,
'MALI': 347,
'MALTA': 120,
'MARRUECOS': 348,
'MAURICIO': 349,
'MAURITANIA': 350,
'MEJICO': 234,
'MICRONESIA': 525,
'MOLDAVIA': 148,
'MONACO': 121,
'MONGOLIA': 427,
'MONTENEGRO': 160,
'MOZAMBIQUE': 351,
'MYANMAR': 400,
'NAMIBIA': 353,
'NAURU': 541,
'NEPAL': 420,
'NICARAGUA': 236,
'NIGER': 360,
'NIGERIA': 352,
'NORUEGA': 122,
'NUEVA ZELANDA': 540,
'OMAN': 444,
'PAKISTAN': 424,
'PANAMA': 238,
'PAPUA NUEVA GUINEA': 542,
'PARAGUAY': 240,
'PERU': 242,
'POLONIA': 124,
'PORTUGAL': 125,
'PUERTO RICO': 244,
'QATAR': 431,
'REINO UNIDO': 112,
'REP. DEMOCRATICA DEL CONGO (EX-ZAIRE)': 380,
'REPUBLICA CHECA': 157,
'REUNION-COMO': 355,
'RUANDA': 306,
'RUMANIA': 127,
'RUSIA': 149,
'SALOMON': 551,
'SAMOA OCCIDENTAL': 552,
'SAN CRISTOBAL Y NEVIS': 256,
'SAN MARINO': 135,
'SAN VICENTE': 254,
'SANTA LUCIA': 253,
'SANTO TOME Y PRINCIPE': 361,
'SEICHELLES': 363,
'SENEGAL': 362,
'SENEGAMBIA': 366,
'SERBIA': 155,
'SIERRA LEONA': 364,
'SINGAPUR': 426,
'SIRIA': 433,
'SOMALIA': 365,
'SRI LANKA': 404,
'SUDAFRICA': 367,
'SUDAN': 368,
'SUECIA': 128,
'SUIZA': 129,
'SURINAM': 250,
'SWAZILANDIA': 371,
'TADJIKISTAN': 154,
'TAIWAN': 408,
'TANZANIA': 370,
'THAILANDIA': 428,
'TIMOR ORIENTAL': 465,
'TOGO': 374,
'TONGA': 554,
'TRINIDAD Y TOBAGO': 245,
'TUNEZ': 378,
'TURKMENIA': 151,
'TURQUIA': 130,
'TUVALU': 560,
'UCRANIA': 152,
'UGANDA': 358,
'URUGUAY': 246,
'UZBEKISTAN': 153,
'VANUATU': 565,
'VENEZUELA': 248,
'VIETNAM': 430,
'YEMEN': 434,
'ZAMBIA': 382,
'ZIMBABWE': 357,
}
tramites = {
'POLICIA - RECOGIDA DE TARJETA DE IDENTIDAD DE EXTRANJERO (TIE)': 4036,
'POLICIA-AUTORIZACIÓN DE REGRESO': 20,
'POLICIA-TOMA DE HUELLAS (EXPEDICIÓN DE TARJETA) Y RENOVACIÓN DE TARJETA DE LARGA DURACIÓN': 4010,
'POLICIA-CARTA DE INVITACIÓN': 4037,
'POLICIA-CERTIFICADO DE REGISTRO DE CIUDADANO DE LA U.E.': 4038,
'POLICIA-CERTIFICADOS (DE RESIDENCIA, DE NO RESIDENCIA Y DE CONCORDANCIA)': 4049,
'POLICIA- EXPEDICIÓN/RENOVACIÓN DE DOCUMENTOS DE SOLICITANTES DE ASILO': 4067,
'POLICIA- SOLICITUD ASILO': 4078,
'POLICIA-CERTIFICADOS Y ASIGNACION NIE (NO COMUNITARIOS)': 4079,
'POLICIA - TÍTULOS DE VIAJE': 4092,
'POLICÍA-EXP.TARJETA ASOCIADA AL ACUERDO DE RETIRADA CIUDADANOS BRITÁNICOS Y SUS FAMILIARES (BREXIT)': 4094,
'POLICIA-CERTIFICADOS Y ASIGNACION NIE': 4096,
}
office_codes = {
'CNP CARTAS DE INVITACION, CALLE GUADALAJARA , 1': 48,
'CNP COMISARIA BADALONA, AVDA. DELS VENTS, 9': 18,
'CNP COMISARIA CASTELLDEFELS, PLAÇA DE L`ESPERANTO, 4': 19,
'CNP COMISARIA CERDANYOLA DEL VALLES, VERGE DE LES FEIXES, 4': 20,
'CNP COMISARIA CORNELLA DE LLOBREGAT, AV. SANT ILDEFONS, S/N': 21,
'CNP COMISARIA EL PRAT DE LLOBREGAT, CENTRE, 4': 23,
'CNP COMISARIA GRANOLLERS, RICOMA, 65': 28,
'CNP COMISARIA IGUALADA, PRAT DE LA RIBA, 13': 26,
'CNP COMISARIA LHOSPITALET DE LLOBREGAT, Rbla. Just Oliveres, 43': 17,
'CNP COMISARIA MANRESA, SOLER I MARCH, 5': 38,
'CNP COMISARIA MATARO, AV. GATASSA, 15': 27,
'CNP COMISARIA MONTCADA I REIXAC, MAJOR, 38': 31,
'CNP COMISARIA RIPOLLET, TAMARIT, 78': 32,
'CNP COMISARIA RUBI, TERRASSA, 16': 29,
'CNP COMISARIA SABADELL, BATLLEVELL, 115': 30,
'CNP COMISARIA SANT ADRIA DEL BESOS, AV. JOAN XXIII, 2': 33,
'CNP COMISARIA SANT BOI DE LLOBREGAT, RIERA BASTÉ, 43': 24,
'CNP COMISARIA SANT CUGAT DEL VALLES, VALLES, 1': 34,
'CNP COMISARIA SANT FELIU DE LLOBREGAT, CARRERETES, 9': 22,
'CNP COMISARIA SANTA COLOMA DE GRAMENET, IRLANDA, 67': 35,
'CNP COMISARIA TERRASSA, BALDRICH, 13': 36,
'CNP COMISARIA VIC, BISBE MORGADES, 4': 37,
'CNP COMISARIA VILADECANS, AVDA. BALLESTER, 2': 25,
'CNP COMISARIA VILAFRANCA DEL PENEDES, Avinguda Ronda del Mar, 109': 46,
'CNP COMISARIA VILANOVA I LA GELTRU, VAPOR, 19': 39,
'CNP MALLORCA GRANADOS, MALLORCA, 213': 14,
'CNP PSJ PLANTA BAJA, PASSEIG SANT JOAN, 189': 43,
'CNP RAMBLA GUIPUSCOA 74, RAMBLA GUIPUSCOA, 74': 16,
'OUE BCN-C/MURCIA, 42, MURCIA, 42': 5,
'PASSEIG DE SANT JOAN, PASSEIG DE SANT JOAN, 189': 6,
}
class CountryNotFoundError(Exception):
pass
class OfficeNotFoundError(Exception):
pass
class TramiteNotFoundError(Exception):
pass
class UnsupportedTramiteError(Exception):
pass
class FailedAttempt(Exception):
pass
class FailedAttemptAtOffice(Exception):
pass
|
import os
from cqlengine import connection
from cqlengine.management import create_keyspace
def setup_package():
try:
CASSANDRA_VERSION = int(os.environ["CASSANDRA_VERSION"])
except:
print("CASSANDRA_VERSION must be set as an environment variable. "
"One of (12, 20, 21)")
raise
if os.environ.get('CASSANDRA_TEST_HOST'):
CASSANDRA_TEST_HOST = os.environ['CASSANDRA_TEST_HOST']
else:
CASSANDRA_TEST_HOST = 'localhost'
if CASSANDRA_VERSION < 20:
protocol_version = 1
else:
protocol_version = 2
connection.setup([CASSANDRA_TEST_HOST],
protocol_version=protocol_version,
default_keyspace='cqlengine_test')
create_keyspace("cqlengine_test", replication_factor=1, strategy_class="SimpleStrategy")
|
from flask_httpauth import HTTPBasicAuth
from flask import g, request
from datetime import datetime
import pytz
from app.ext.database import db
# Tables
from app.models.tables import User as UserTable
# Integration
from app.integration.user import User
auth_api = HTTPBasicAuth()
@auth_api.verify_password
def verify_password(username, password):
if request.headers.get('token'):
_user = UserTable.verify_auth_token(request.headers.get('token'))
if _user:
g.current_user = UserTable.verify_auth_token(
request.headers.get('token'))
g.token_used = True
# Data e hr do acesso
datetimeNow = datetime.now(pytz.timezone("America/Sao_Paulo"))
user = User()
if user.check_first_seen(g.current_user.id):
user.first_seen(g.current_user.id, datetimeNow,
db) # Inserir o primeiro acesso
user.last_seen(g.current_user.id, datetimeNow,
db) # Alterar último acesso
return g.current_user is not None
g.current_user = None
return g.current_user is None
elif request.path == '/v1/token/' or request.path == '/v1/token':
if username == '':
return False
_user = UserTable.query.filter_by(username=username).first()
if _user:
g.current_user = _user
g.token_used = False
# Data e hr do acesso
datetimeNow = datetime.now(pytz.timezone("America/Sao_Paulo"))
User().last_seen(g.current_user.id, datetimeNow,
db) # Alterar último acesso
return _user.verify_password(password)
print(request.path)
return False
@auth_api.error_handler
def auth_error():
return 'Usuario e senha invalidos'
def init_app(app):
auth_api(app)
|
import pytest
from io import BytesIO
from pathlib import Path
import can_decoder
from tests.LoadDBCPathType import LoadDBCPathType
@pytest.mark.env("canmatrix")
class TestLoadDBC(object):
@pytest.mark.parametrize(
("input_type",),
[
(LoadDBCPathType.LoadDBCPathType_STR, ),
(LoadDBCPathType.LoadDBCPathType_PATH, ),
(LoadDBCPathType.LoadDBCPathType_FILE, ),
(LoadDBCPathType.LoadDBCPathType_MEMORY, )
]
)
def test_load_j1939_dbc(self, input_type: LoadDBCPathType):
dbc_base_path = Path(__file__).parent.parent / "examples" / "CSS-Electronics-SAE-J1939-DEMO.dbc"
if input_type == LoadDBCPathType.LoadDBCPathType_STR:
result = can_decoder.load_dbc(str(dbc_base_path))
elif input_type == LoadDBCPathType.LoadDBCPathType_PATH:
result = can_decoder.load_dbc(dbc_base_path)
elif input_type == LoadDBCPathType.LoadDBCPathType_FILE:
with open(dbc_base_path, "rb") as handle:
result = can_decoder.load_dbc(handle)
elif input_type == LoadDBCPathType.LoadDBCPathType_MEMORY:
with open(dbc_base_path, "rb") as handle:
raw = handle.read()
with BytesIO(raw) as handle:
result = can_decoder.load_dbc(handle)
# Ensure that a DB is loaded.
assert result is not None, "Expected a DB to be loaded"
# Ensure the protocol is set to J1939.
assert result.protocol == "J1939", "Expected protocol to be J1939"
# Ensure the correct signals are present.
frame_ids = [0x8CF004FE, 0x98FEF1FE]
assert len(result.frames) == len(frame_ids)
frame_eec1 = result.frames[0x8CF004FE] # type: can_decoder.Frame
assert frame_eec1.id == 0x8CF004FE
assert frame_eec1.multiplexer is None
assert frame_eec1.size == 8
assert len(frame_eec1.signals) == 1
signal_engine_speed = frame_eec1.signals[0]
assert signal_engine_speed.is_float is False
assert signal_engine_speed.is_little_endian is True
assert signal_engine_speed.is_multiplexer is False
assert signal_engine_speed.is_signed is False
assert signal_engine_speed.name == "EngineSpeed"
assert signal_engine_speed.factor == 0.125
assert signal_engine_speed.offset == 0
assert signal_engine_speed.size == 16
assert signal_engine_speed.start_bit == 24
frame_ccv1 = result.frames[0x98FEF1FE] # type: can_decoder.Frame
assert frame_ccv1.id == 0x98FEF1FE
assert frame_ccv1.multiplexer is None
assert frame_ccv1.size == 8
assert len(frame_ccv1.signals) == 1
signal_vehicle_speed = frame_ccv1.signals[0]
assert signal_vehicle_speed.is_float is False
assert signal_vehicle_speed.is_little_endian is True
assert signal_vehicle_speed.is_multiplexer is False
assert signal_vehicle_speed.is_signed is False
assert signal_vehicle_speed.name == "WheelBasedVehicleSpeed"
assert signal_vehicle_speed.factor == 0.00390625
assert signal_vehicle_speed.offset == 0
assert signal_vehicle_speed.size == 16
assert signal_vehicle_speed.start_bit == 8
return
@pytest.mark.parametrize(
("input_type",),
[
(LoadDBCPathType.LoadDBCPathType_STR,),
(LoadDBCPathType.LoadDBCPathType_PATH,),
(LoadDBCPathType.LoadDBCPathType_FILE,),
(LoadDBCPathType.LoadDBCPathType_MEMORY,)
]
)
def test_load_j1939_dbc_custom_attribute(self, input_type: LoadDBCPathType):
dbc_base_path = Path(__file__).parent.parent / "examples" / "CSS-Electronics-SAE-J1939-DEMO.dbc"
kwargs = {
"use_custom_attribute": "SPN"
}
if input_type == LoadDBCPathType.LoadDBCPathType_STR:
result = can_decoder.load_dbc(str(dbc_base_path), **kwargs)
elif input_type == LoadDBCPathType.LoadDBCPathType_PATH:
result = can_decoder.load_dbc(dbc_base_path, **kwargs)
elif input_type == LoadDBCPathType.LoadDBCPathType_FILE:
with open(dbc_base_path, "rb") as handle:
result = can_decoder.load_dbc(handle, **kwargs)
elif input_type == LoadDBCPathType.LoadDBCPathType_MEMORY:
with open(dbc_base_path, "rb") as handle:
raw = handle.read()
with BytesIO(raw) as handle:
result = can_decoder.load_dbc(handle, **kwargs)
# Ensure that a DB is loaded.
assert result is not None, "Expected a DB to be loaded"
# Ensure the protocol is set to J1939.
assert result.protocol == "J1939", "Expected protocol to be J1939"
# Ensure the correct signals are present.
frame_ids = [0x8CF004FE, 0x98FEF1FE]
assert len(result.frames) == len(frame_ids)
frame_eec1 = result.frames[0x8CF004FE] # type: can_decoder.Frame
assert frame_eec1.id == 0x8CF004FE
assert frame_eec1.multiplexer is None
assert frame_eec1.size == 8
assert len(frame_eec1.signals) == 1
signal_engine_speed = frame_eec1.signals[0]
assert signal_engine_speed.is_float is False
assert signal_engine_speed.is_little_endian is True
assert signal_engine_speed.is_multiplexer is False
assert signal_engine_speed.is_signed is False
assert signal_engine_speed.name == "190"
assert signal_engine_speed.factor == 0.125
assert signal_engine_speed.offset == 0
assert signal_engine_speed.size == 16
assert signal_engine_speed.start_bit == 24
frame_ccv1 = result.frames[0x98FEF1FE] # type: can_decoder.Frame
assert frame_ccv1.id == 0x98FEF1FE
assert frame_ccv1.multiplexer is None
assert frame_ccv1.size == 8
assert len(frame_ccv1.signals) == 1
signal_vehicle_speed = frame_ccv1.signals[0]
assert signal_vehicle_speed.is_float is False
assert signal_vehicle_speed.is_little_endian is True
assert signal_vehicle_speed.is_multiplexer is False
assert signal_vehicle_speed.is_signed is False
assert signal_vehicle_speed.name == "84"
assert signal_vehicle_speed.factor == 0.00390625
assert signal_vehicle_speed.offset == 0
assert signal_vehicle_speed.size == 16
assert signal_vehicle_speed.start_bit == 8
return
@pytest.mark.parametrize(
("input_type",),
[(LoadDBCPathType.LoadDBCPathType_STR,), (LoadDBCPathType.LoadDBCPathType_PATH,),
(LoadDBCPathType.LoadDBCPathType_FILE,), (LoadDBCPathType.LoadDBCPathType_MEMORY,)]
)
def test_load_obd2_dbc(self, input_type: LoadDBCPathType):
dbc_base_path = Path(__file__).parent.parent / "examples" / "CSS-Electronics-OBD2-v1.3.dbc"
if input_type == LoadDBCPathType.LoadDBCPathType_STR:
result = can_decoder.load_dbc(str(dbc_base_path))
elif input_type == LoadDBCPathType.LoadDBCPathType_PATH:
result = can_decoder.load_dbc(dbc_base_path)
elif input_type == LoadDBCPathType.LoadDBCPathType_FILE:
with open(dbc_base_path, "rb") as handle:
result = can_decoder.load_dbc(handle)
elif input_type == LoadDBCPathType.LoadDBCPathType_MEMORY:
with open(dbc_base_path, "rb") as handle:
raw = handle.read()
with BytesIO(raw) as handle:
result = can_decoder.load_dbc(handle)
# Ensure that a DB is loaded.
assert result is not None, "Expected a DB to be loaded"
# Ensure the protocol is set to J1939.
assert result.protocol == "OBD2", "Expected protocol to be OBD2"
# Ensure the correct signals are present.
assert len(result.frames) == 1
assert 0x7E8 in result.frames.keys()
frame = result.frames[0x7E8] # type: can_decoder.Frame
assert frame.id == 0x7E8
assert frame.multiplexer is not None
assert frame.size == 8
assert len(frame.signals) == 3
signal_service = frame.signals[0]
assert signal_service.name == "service"
assert signal_service.is_multiplexer is True
assert signal_service.size == 4
assert signal_service.start_bit == 12
assert len(signal_service.signals) == 2
signal_service_1_list = signal_service.signals[1]
assert len(signal_service_1_list) == 1
signal_service_1 = signal_service_1_list[0]
assert signal_service_1.name == "ParameterID_Service01"
assert signal_service_1.is_multiplexer is True
assert signal_service_1.size == 8
assert signal_service_1.start_bit == 16
assert len(signal_service_1.signals) == 114
signal_service_freeze_dtc_list = signal_service_1.signals[2]
assert len(signal_service_freeze_dtc_list) == 1
signal_service_freeze_dtc = signal_service_freeze_dtc_list[0]
assert signal_service_freeze_dtc.name == "S1_PID_02_FreezeDTC"
assert signal_service_freeze_dtc.factor == 1
assert signal_service_freeze_dtc.is_float is False
assert signal_service_freeze_dtc.is_little_endian is False
assert signal_service_freeze_dtc.is_multiplexer is False
assert signal_service_freeze_dtc.is_signed is False
assert signal_service_freeze_dtc.offset == 0
assert signal_service_freeze_dtc.size == 16
assert signal_service_freeze_dtc.start_bit == 24
assert len(signal_service_freeze_dtc.signals) == 0
signal_response = frame.signals[1]
assert signal_response.name == "response"
assert signal_response.is_float is False
assert signal_response.is_little_endian is False
assert signal_response.is_multiplexer is False
assert signal_response.is_signed is False
assert signal_response.size == 4
assert signal_response.start_bit == 8
assert len(signal_response.signals) == 0
signal_length = frame.signals[2]
assert signal_length.name == "length"
assert signal_length.is_float is False
assert signal_length.is_little_endian is False
assert signal_length.is_multiplexer is False
assert signal_length.is_signed is False
assert signal_length.size == 8
assert signal_length.start_bit == 0
assert len(signal_length.signals) == 0
return
def test_load_with_invalid_object(self):
with pytest.raises(OSError):
can_decoder.load_dbc(45)
pass
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/core/node/domain/service/request_answer_response.proto
"""Generated protocol buffer code."""
# third party
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
# syft absolute
from syft.proto.core.common import (
common_object_pb2 as proto_dot_core_dot_common_dot_common__object__pb2,
)
from syft.proto.core.io import address_pb2 as proto_dot_core_dot_io_dot_address__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n<proto/core/node/domain/service/request_answer_response.proto\x12\x1dsyft.core.node.domain.service\x1a%proto/core/common/common_object.proto\x1a\x1bproto/core/io/address.proto"z\n\x15RequestAnswerResponse\x12\x0e\n\x06status\x18\x01 \x01(\x05\x12)\n\nrequest_id\x18\x02 \x01(\x0b\x32\x15.syft.core.common.UID\x12&\n\x07\x61\x64\x64ress\x18\x03 \x01(\x0b\x32\x15.syft.core.io.Addressb\x06proto3'
)
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(
DESCRIPTOR, "proto.core.node.domain.service.request_answer_response_pb2", globals()
)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_REQUESTANSWERRESPONSE._serialized_start = 163
_REQUESTANSWERRESPONSE._serialized_end = 285
# @@protoc_insertion_point(module_scope)
|
import asyncio
def get_loop():
return asyncio.get_event_loop()
|
import json
import gzip
import pickle
def read_lines(path):
with open(path) as f:
for line in f:
yield line
def read_json(path):
with open(path) as f:
object = json.loads(f.read())
return object
def write_json(object, path):
with open(path, 'w') as f:
f.write(json.dumps(object))
def read_jsonl(path, load=False, start=0, stop=None):
def read_jsonl_gen(path):
with open(path) as f:
for i, line in enumerate(f):
if (stop is not None) and (i >= stop):
break
if i >= start:
yield json.loads(line)
data = read_jsonl_gen(path)
if load:
data = list(data)
return data
def read_jsonl_gz(path):
with gzip.open(path) as f:
for l in f:
yield json.loads(l)
def write_jsonl(items, path, batch_size=100, override=True):
if override:
with open(path, 'w'):
pass
batch = []
for i, x in enumerate(items):
if i > 0 and i % batch_size == 0:
with open(path, 'a') as f:
output = '\n'.join(batch) + '\n'
f.write(output)
batch = []
raw = json.dumps(x)
batch.append(raw)
if batch:
with open(path, 'a') as f:
output = '\n'.join(batch) + '\n'
f.write(output)
def load_pkl(path):
with open(path, 'rb') as f:
obj = pickle.load(f)
return obj
def dump_pkl(obj, path):
with open(path, 'wb') as f:
pickle.dump(obj, f)
def args_to_summarize_settings(args):
args = vars(args)
settings = {}
for k in ['len_type', 'max_len',
'min_sent_tokens', 'max_sent_tokens',
'in_titles', 'out_titles']:
settings[k] = args[k]
return settings
|
#
# PySNMP MIB module BAY-STACK-IPV6-MLD-SNOOPING-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/BAY-STACK-IPV6-MLD-SNOOPING-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:35:27 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion")
InterfaceIndexOrZero, InterfaceIndex = mibBuilder.importSymbols("IF-MIB", "InterfaceIndexOrZero", "InterfaceIndex")
InetAddressIPv6, = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressIPv6")
PortList, = mibBuilder.importSymbols("Q-BRIDGE-MIB", "PortList")
PortSet, = mibBuilder.importSymbols("RAPID-CITY", "PortSet")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, Bits, iso, Integer32, TimeTicks, Gauge32, MibIdentifier, ModuleIdentity, Counter32, Unsigned32, ObjectIdentity, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "Bits", "iso", "Integer32", "TimeTicks", "Gauge32", "MibIdentifier", "ModuleIdentity", "Counter32", "Unsigned32", "ObjectIdentity", "NotificationType")
DisplayString, TruthValue, RowStatus, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TruthValue", "RowStatus", "TextualConvention")
bayStackMibs, = mibBuilder.importSymbols("SYNOPTICS-ROOT-MIB", "bayStackMibs")
bayStackIpv6MldSnoopingMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 45, 5, 44))
bayStackIpv6MldSnoopingMib.setRevisions(('2015-05-29 00:00', '2015-01-22 00:00', '2014-10-23 00:00', '2014-08-11 00:00', '2014-01-16 00:00', '2013-01-22 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: bayStackIpv6MldSnoopingMib.setRevisionsDescriptions(('Ver 6: Corrected MIB compiling errors.', 'Ver 5: Inverted the syntaxes for bsIpv6MldSnoopingProxyCacheType and bsIpv6MldSnoopingProxyCacheMode objects', 'Ver 4: Added bsIpv6MldSnoopingInterfaceFlushPorts object, bsIpv6MldSnoopingFlushPorts scalar object', 'Ver 3: Added bsIpv6MldSnoopingProxyCacheTable, bsIpv6MldSnoopingInterfaceFlush object, bsIpv6MldSnoopingFlush scalar object; Updated bsIpv6MldSnoopingInterfaceOperationalVersion, bsIpv6MldSnoopingInterfaceSendQuery, bsIpv6MldSnoopingInterfaceProxy objects descriptions', 'Ver 2: Added bsIpv6MldSnoopingInterfaceOperationalVersion, bsIpv6MldSnoopingInterfaceSendQuery, bsIpv6MldSnoopingInterfaceProxy', 'Ver 1: Initial version.',))
if mibBuilder.loadTexts: bayStackIpv6MldSnoopingMib.setLastUpdated('201505290000Z')
if mibBuilder.loadTexts: bayStackIpv6MldSnoopingMib.setOrganization('Avaya')
if mibBuilder.loadTexts: bayStackIpv6MldSnoopingMib.setContactInfo('avaya.com')
if mibBuilder.loadTexts: bayStackIpv6MldSnoopingMib.setDescription('This MIB module is used for IPv6 MLD Snooping configuration.')
bsIpv6MldSnoopingNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 44, 0))
bsIpv6MldSnoopingObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 44, 1))
bsIpv6MldSnoopingScalars = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 44, 2))
bsIpv6MldSnoopingInterfaceTable = MibTable((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1), )
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceTable.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceTable.setDescription('The (conceptual) table listing IPv6 MLD Snooping interfaces.')
bsIpv6MldSnoopingInterfaceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1), ).setIndexNames((0, "BAY-STACK-IPV6-MLD-SNOOPING-MIB", "bsIpv6MldSnoopingInterfaceIfIndex"))
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceEntry.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceEntry.setDescription('An entry (conceptual row) representing an IPv6 MLD Snooping interface.')
bsIpv6MldSnoopingInterfaceIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceIfIndex.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceIfIndex.setDescription('The internetwork-layer interface value of the interface for which IPv6 MLD Snooping is enabled.')
bsIpv6MldSnoopingInterfaceQueryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 2), Unsigned32().clone(125)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceQueryInterval.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceQueryInterval.setDescription('The frequency at which IPv6 MLD Snooping Host-Query packets are transmitted on this interface.')
bsIpv6MldSnoopingInterfaceStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceStatus.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceStatus.setDescription('Row status for create/delete.')
bsIpv6MldSnoopingInterfaceVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 4), Unsigned32().clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceVersion.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceVersion.setDescription('The version of IPv6 MLD Snooping which is running on this interface. This object is a place holder to allow for new versions of MLD to be introduced. Version 1 of MLD is defined in RFC 2710.')
bsIpv6MldSnoopingInterfaceQuerier = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 5), InetAddressIPv6().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16)).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceQuerier.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceQuerier.setDescription('The address of the IPv6 MLD Snooping Querier on the IPv6 subnet to which this interface is attached.')
bsIpv6MldSnoopingInterfaceQueryMaxResponseDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 6), Unsigned32().clone(10)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceQueryMaxResponseDelay.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceQueryMaxResponseDelay.setDescription('The maximum query response time advertised in IPv6 MLD Snooping queries on this interface.')
bsIpv6MldSnoopingInterfaceJoins = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceJoins.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceJoins.setDescription('The number of times a group membership has been added on this interface; that is, the number of times an entry for this interface has been added to the Cache Table. This object gives an indication of the amount of MLD activity over time.')
bsIpv6MldSnoopingInterfaceGroups = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceGroups.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceGroups.setDescription('The current number of entries for this interface in the Cache Table.')
bsIpv6MldSnoopingInterfaceRobustness = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 9), Unsigned32().clone(2)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceRobustness.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceRobustness.setDescription('The Robustness Variable allows tuning for the expected packet loss on a subnet. If a subnet is expected to be lossy, the Robustness Variable may be increased. IPv6 MLD Snooping is robust to (Robustness Variable-1) packet losses. The discussion of the Robustness Variable is in Section 7.1 of RFC 2710.')
bsIpv6MldSnoopingInterfaceLastListenQueryIntvl = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 10), Unsigned32().clone(1)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceLastListenQueryIntvl.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceLastListenQueryIntvl.setDescription('The Last Member Query Interval is the Max Response Delay inserted into Group-Specific Queries sent in response to Leave Group messages, and is also the amount of time between Group-Specific Query messages. This value may be tuned to modify the leave latency of the network. A reduced value results in reduced time to detect the loss of the last member of a group.')
bsIpv6MldSnoopingInterfaceProxyIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 11), InterfaceIndexOrZero()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceProxyIfIndex.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceProxyIfIndex.setDescription('Some devices implement a form of MLD proxying whereby memberships learned on the interface represented by this row, cause MLD Multicast Listener Reports to be sent on the internetwork-layer interface identified by this object. Such a device would implement mldRouterMIBGroup only on its router interfaces (those interfaces with non-zero mldInterfaceProxyIfIndex). Typically, the value of this object is 0, indicating that no proxying is being done.')
bsIpv6MldSnoopingInterfaceQuerierUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 12), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceQuerierUpTime.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceQuerierUpTime.setDescription('The time since mldInterfaceQuerier was last changed.')
bsIpv6MldSnoopingInterfaceQuerierExpiryTime = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 13), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceQuerierExpiryTime.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceQuerierExpiryTime.setDescription('The time remaining before the Other Querier Present Timer expires. If the local system is the querier, the value of this object is zero.')
bsIpv6MldSnoopingInterfaceEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 14), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceEnabled.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceEnabled.setDescription('This object controls whether IPv6 MLD Snooping is enabled on this interface.')
bsIpv6MldSnoopingInterfaceIgmpMRouterPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 15), PortSet()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceIgmpMRouterPorts.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceIgmpMRouterPorts.setDescription('The set of ports in this interface that provide connectivity to an IPv6 Multicast router.')
bsIpv6MldSnoopingInterfaceIgmpActiveMRouterPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 16), PortSet()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceIgmpActiveMRouterPorts.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceIgmpActiveMRouterPorts.setDescription('The set of active ports in this interface that provide connectivity to an IPv6 Multicast router.')
bsIpv6MldSnoopingInterfaceIgmpMRouterExpiration = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceIgmpMRouterExpiration.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceIgmpMRouterExpiration.setDescription('Multicast querier router aging time out.')
bsIpv6MldSnoopingInterfaceOperationalVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 18), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceOperationalVersion.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceOperationalVersion.setDescription('The operational version of IPv6 MLD Snooping which is running on this interface at the moment.')
bsIpv6MldSnoopingInterfaceSendQuery = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 19), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceSendQuery.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceSendQuery.setDescription('This object controls whether IPv6 MLD Send-Query is enabled on this interface.')
bsIpv6MldSnoopingInterfaceProxy = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 20), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceProxy.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceProxy.setDescription('This object controls whether IPv6 MLD Proxy is enabled on this interface.')
bsIpv6MldSnoopingInterfaceFlush = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("noAction", 1), ("groups", 2), ("mrouters", 3), ("all", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceFlush.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceFlush.setDescription('This object is used to remove MLD members from this interface. noAction(1) value is returned at read. all(4) value is used to flush groups and mrouters.')
bsIpv6MldSnoopingInterfaceFlushPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 1, 1, 22), PortSet()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceFlushPorts.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingInterfaceFlushPorts.setDescription('The set of ports in this interface that are going to be flushed. An empty port set is returned at read.')
bsIpv6MldSnoopingCacheTable = MibTable((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 2), )
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheTable.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheTable.setDescription('The (conceptual) table listing the IPv6 multicast groups for which there are members on a particular interface.')
bsIpv6MldSnoopingCacheEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 2, 1), ).setIndexNames((0, "BAY-STACK-IPV6-MLD-SNOOPING-MIB", "bsIpv6MldSnoopingCacheAddress"), (0, "BAY-STACK-IPV6-MLD-SNOOPING-MIB", "bsIpv6MldSnoopingCacheIfIndex"))
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheEntry.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheEntry.setDescription('An entry (conceptual row) in the bsIpv6MldSnoopingCacheTable.')
bsIpv6MldSnoopingCacheAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 2, 1, 1), InetAddressIPv6().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16))
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheAddress.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheAddress.setDescription('The IPv6 multicast group address for which this entry contains information.')
bsIpv6MldSnoopingCacheIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 2, 1, 2), InterfaceIndex())
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheIfIndex.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheIfIndex.setDescription('The internetwork-layer interface for which this entry contains information for an IPv6 multicast group address.')
bsIpv6MldSnoopingCacheSelf = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 2, 1, 3), TruthValue().clone('true')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheSelf.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheSelf.setDescription('An indication of whether the local system is a member of this group address on this interface.')
bsIpv6MldSnoopingCacheLastReporter = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 2, 1, 4), InetAddressIPv6().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16)).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheLastReporter.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheLastReporter.setDescription('The IPv6 address of the source of the last membership report received for this IPv6 Multicast group address on this interface. If no membership report has been received, this object has the value 0::0.')
bsIpv6MldSnoopingCacheUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 2, 1, 5), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheUpTime.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheUpTime.setDescription('The time elapsed since this entry was created.')
bsIpv6MldSnoopingCacheExpiryTime = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 2, 1, 6), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheExpiryTime.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheExpiryTime.setDescription('The minimum amount of time remaining before this entry will be aged out. A value of 0 indicates that the entry is only present because bsIpv6MldSnoopingCacheSelf is true and that if the router left the group, this entry would be aged out immediately. Note that some implementations may process Membership Reports from the local system in the same way as reports from other hosts, so a value of 0 is not required.')
bsIpv6MldSnoopingCacheStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 2, 1, 7), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheStatus.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheStatus.setDescription('The status of this row, by which new entries may be created, or existing entries deleted from this table.')
bsIpv6MldSnoopingCacheType = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("dynamic", 2), ("static", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheType.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingCacheType.setDescription('The type of this entry.')
bsIpv6MldSnoopingIgmpGroupTable = MibTable((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 3), )
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupTable.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupTable.setDescription('The (conceptual) table listing IPv6 MLD Snooping IGMP groups.')
bsIpv6MldSnoopingIgmpGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 3, 1), ).setIndexNames((0, "BAY-STACK-IPV6-MLD-SNOOPING-MIB", "bsIpv6MldSnoopingIgmpGroupIpv6Address"), (0, "BAY-STACK-IPV6-MLD-SNOOPING-MIB", "bsIpv6MldSnoopingIgmpGroupMembers"), (0, "BAY-STACK-IPV6-MLD-SNOOPING-MIB", "bsIpv6MldSnoopingIgmpGroupSourceAddress"), (0, "BAY-STACK-IPV6-MLD-SNOOPING-MIB", "bsIpv6MldSnoopingIgmpGroupIfIndex"))
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupEntry.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupEntry.setDescription('An entry (conceptual row) representing an IPv6 MLD Snooping IGMP group.')
bsIpv6MldSnoopingIgmpGroupIpv6Address = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 3, 1, 1), InetAddressIPv6().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16))
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupIpv6Address.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupIpv6Address.setDescription('Multicast group Address (Class D) that others want to join. A group address can be the same for many incoming ports.')
bsIpv6MldSnoopingIgmpGroupMembers = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 3, 1, 2), InetAddressIPv6().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16))
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupMembers.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupMembers.setDescription('IPv6 Address of a source that has sent group report wishing to join this group.')
bsIpv6MldSnoopingIgmpGroupSourceAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 3, 1, 3), InetAddressIPv6().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16))
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupSourceAddress.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupSourceAddress.setDescription('IPv6 Address of the source.')
bsIpv6MldSnoopingIgmpGroupIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 3, 1, 4), InterfaceIndex())
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupIfIndex.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupIfIndex.setDescription('An unique value to identify a physical interface or a logical interface (VLAN), which has received Group reports from various sources.')
bsIpv6MldSnoopingIgmpGroupInPort = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 3, 1, 5), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupInPort.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupInPort.setDescription('Value to identify physical interfaces or logical interfaces (VLANs), which has received Group reports from various sources.')
bsIpv6MldSnoopingIgmpGroupExpiration = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 3, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupExpiration.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupExpiration.setDescription('Time left before the group report expired on this port. Only one of this variable port. This variable is updated upon receiving a group report.')
bsIpv6MldSnoopingIgmpGroupUserId = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 3, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupUserId.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupUserId.setDescription('User-id sending this group.')
bsIpv6MldSnoopingIgmpGroupType = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 3, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("dynamic", 2), ("static", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupType.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupType.setDescription('The type of this entry.')
bsIpv6MldSnoopingIgmpGroupMode = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 3, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("received", 1), ("include", 2), ("exclude", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupMode.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupMode.setDescription('Address mode.')
bsIpv6MldSnoopingIgmpGroupVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 3, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("version1", 1), ("version2", 2))).clone('version1')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupVersion.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingIgmpGroupVersion.setDescription('Group version.')
bsIpv6MldSnoopingProxyCacheTable = MibTable((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 4), )
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheTable.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheTable.setDescription('The (conceptual) table listing the IPv6 multicast groups for which the switch is registered in order to receive the multicast traffic.')
bsIpv6MldSnoopingProxyCacheEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 4, 1), ).setIndexNames((0, "BAY-STACK-IPV6-MLD-SNOOPING-MIB", "bsIpv6MldSnoopingProxyCacheIfIndex"), (0, "BAY-STACK-IPV6-MLD-SNOOPING-MIB", "bsIpv6MldSnoopingProxyCacheGroupAddress"), (0, "BAY-STACK-IPV6-MLD-SNOOPING-MIB", "bsIpv6MldSnoopingProxyCacheSourceAddress"))
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheEntry.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheEntry.setDescription('An entry (conceptual row) in the bsIpv6MldSnoopingProxyCacheTable.')
bsIpv6MldSnoopingProxyCacheIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 4, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheIfIndex.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheIfIndex.setDescription('An unique value to identify a logical interface (VLAN) which is registered as MLD host for receiving multicast traffic')
bsIpv6MldSnoopingProxyCacheGroupAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 4, 1, 2), InetAddressIPv6().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16))
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheGroupAddress.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheGroupAddress.setDescription('The IPv6 destination address of the multicast traffic that the interface is registered for receiving it.')
bsIpv6MldSnoopingProxyCacheSourceAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 4, 1, 3), InetAddressIPv6().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16))
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheSourceAddress.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheSourceAddress.setDescription('The IPv6 source address of the multicast traffic that the interface is registered for receiving it.')
bsIpv6MldSnoopingProxyCacheVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 4, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("version1", 1), ("version2", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheVersion.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheVersion.setDescription('Interface proxy version.')
bsIpv6MldSnoopingProxyCacheType = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 4, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("static", 1), ("dynamic", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheType.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheType.setDescription('The entry registration type (static or dynamic).')
bsIpv6MldSnoopingProxyCacheMode = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 44, 1, 4, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("version1", 1), ("include", 2), ("exclude", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheMode.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingProxyCacheMode.setDescription('Proxy mode for MLDv2 entries. version1(1) value is returned for MLDv1 entries')
bsIpv6MldSnoopingFlush = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 44, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("noAction", 1), ("groups", 2), ("mrouters", 3), ("all", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsIpv6MldSnoopingFlush.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingFlush.setDescription('This object is used to remove MLD members from all the interfaces. noAction(1) value is returned at read. all(4) value is used to flush groups and mrouters.')
bsIpv6MldSnoopingFlushPorts = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 44, 2, 2), PortSet()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bsIpv6MldSnoopingFlushPorts.setStatus('current')
if mibBuilder.loadTexts: bsIpv6MldSnoopingFlushPorts.setDescription('The set of ports from all interfaces that are going to be flushed. An empty port set is returned at read.')
mibBuilder.exportSymbols("BAY-STACK-IPV6-MLD-SNOOPING-MIB", bsIpv6MldSnoopingObjects=bsIpv6MldSnoopingObjects, bsIpv6MldSnoopingInterfaceSendQuery=bsIpv6MldSnoopingInterfaceSendQuery, bsIpv6MldSnoopingProxyCacheTable=bsIpv6MldSnoopingProxyCacheTable, bsIpv6MldSnoopingProxyCacheEntry=bsIpv6MldSnoopingProxyCacheEntry, bsIpv6MldSnoopingIgmpGroupIpv6Address=bsIpv6MldSnoopingIgmpGroupIpv6Address, bsIpv6MldSnoopingProxyCacheMode=bsIpv6MldSnoopingProxyCacheMode, bsIpv6MldSnoopingIgmpGroupInPort=bsIpv6MldSnoopingIgmpGroupInPort, bsIpv6MldSnoopingInterfaceIgmpMRouterExpiration=bsIpv6MldSnoopingInterfaceIgmpMRouterExpiration, bsIpv6MldSnoopingInterfaceFlushPorts=bsIpv6MldSnoopingInterfaceFlushPorts, bsIpv6MldSnoopingCacheTable=bsIpv6MldSnoopingCacheTable, bsIpv6MldSnoopingInterfaceVersion=bsIpv6MldSnoopingInterfaceVersion, bsIpv6MldSnoopingInterfaceEnabled=bsIpv6MldSnoopingInterfaceEnabled, bayStackIpv6MldSnoopingMib=bayStackIpv6MldSnoopingMib, bsIpv6MldSnoopingFlushPorts=bsIpv6MldSnoopingFlushPorts, bsIpv6MldSnoopingIgmpGroupEntry=bsIpv6MldSnoopingIgmpGroupEntry, bsIpv6MldSnoopingCacheIfIndex=bsIpv6MldSnoopingCacheIfIndex, bsIpv6MldSnoopingIgmpGroupMembers=bsIpv6MldSnoopingIgmpGroupMembers, bsIpv6MldSnoopingCacheLastReporter=bsIpv6MldSnoopingCacheLastReporter, bsIpv6MldSnoopingIgmpGroupVersion=bsIpv6MldSnoopingIgmpGroupVersion, bsIpv6MldSnoopingInterfaceQuerier=bsIpv6MldSnoopingInterfaceQuerier, bsIpv6MldSnoopingInterfaceQuerierUpTime=bsIpv6MldSnoopingInterfaceQuerierUpTime, bsIpv6MldSnoopingInterfaceQueryMaxResponseDelay=bsIpv6MldSnoopingInterfaceQueryMaxResponseDelay, bsIpv6MldSnoopingCacheType=bsIpv6MldSnoopingCacheType, bsIpv6MldSnoopingInterfaceOperationalVersion=bsIpv6MldSnoopingInterfaceOperationalVersion, bsIpv6MldSnoopingInterfaceProxy=bsIpv6MldSnoopingInterfaceProxy, bsIpv6MldSnoopingInterfaceQueryInterval=bsIpv6MldSnoopingInterfaceQueryInterval, bsIpv6MldSnoopingInterfaceIgmpMRouterPorts=bsIpv6MldSnoopingInterfaceIgmpMRouterPorts, bsIpv6MldSnoopingCacheEntry=bsIpv6MldSnoopingCacheEntry, bsIpv6MldSnoopingIgmpGroupTable=bsIpv6MldSnoopingIgmpGroupTable, bsIpv6MldSnoopingNotifications=bsIpv6MldSnoopingNotifications, bsIpv6MldSnoopingIgmpGroupExpiration=bsIpv6MldSnoopingIgmpGroupExpiration, bsIpv6MldSnoopingCacheAddress=bsIpv6MldSnoopingCacheAddress, bsIpv6MldSnoopingCacheExpiryTime=bsIpv6MldSnoopingCacheExpiryTime, bsIpv6MldSnoopingIgmpGroupType=bsIpv6MldSnoopingIgmpGroupType, bsIpv6MldSnoopingInterfaceJoins=bsIpv6MldSnoopingInterfaceJoins, bsIpv6MldSnoopingInterfaceStatus=bsIpv6MldSnoopingInterfaceStatus, bsIpv6MldSnoopingInterfaceGroups=bsIpv6MldSnoopingInterfaceGroups, bsIpv6MldSnoopingProxyCacheSourceAddress=bsIpv6MldSnoopingProxyCacheSourceAddress, bsIpv6MldSnoopingInterfaceTable=bsIpv6MldSnoopingInterfaceTable, bsIpv6MldSnoopingIgmpGroupSourceAddress=bsIpv6MldSnoopingIgmpGroupSourceAddress, bsIpv6MldSnoopingIgmpGroupMode=bsIpv6MldSnoopingIgmpGroupMode, bsIpv6MldSnoopingInterfaceEntry=bsIpv6MldSnoopingInterfaceEntry, bsIpv6MldSnoopingInterfaceFlush=bsIpv6MldSnoopingInterfaceFlush, bsIpv6MldSnoopingCacheStatus=bsIpv6MldSnoopingCacheStatus, bsIpv6MldSnoopingCacheUpTime=bsIpv6MldSnoopingCacheUpTime, bsIpv6MldSnoopingIgmpGroupUserId=bsIpv6MldSnoopingIgmpGroupUserId, bsIpv6MldSnoopingInterfaceLastListenQueryIntvl=bsIpv6MldSnoopingInterfaceLastListenQueryIntvl, bsIpv6MldSnoopingProxyCacheGroupAddress=bsIpv6MldSnoopingProxyCacheGroupAddress, bsIpv6MldSnoopingInterfaceRobustness=bsIpv6MldSnoopingInterfaceRobustness, bsIpv6MldSnoopingInterfaceProxyIfIndex=bsIpv6MldSnoopingInterfaceProxyIfIndex, bsIpv6MldSnoopingInterfaceIgmpActiveMRouterPorts=bsIpv6MldSnoopingInterfaceIgmpActiveMRouterPorts, bsIpv6MldSnoopingProxyCacheVersion=bsIpv6MldSnoopingProxyCacheVersion, bsIpv6MldSnoopingFlush=bsIpv6MldSnoopingFlush, bsIpv6MldSnoopingInterfaceIfIndex=bsIpv6MldSnoopingInterfaceIfIndex, bsIpv6MldSnoopingInterfaceQuerierExpiryTime=bsIpv6MldSnoopingInterfaceQuerierExpiryTime, bsIpv6MldSnoopingScalars=bsIpv6MldSnoopingScalars, bsIpv6MldSnoopingProxyCacheIfIndex=bsIpv6MldSnoopingProxyCacheIfIndex, bsIpv6MldSnoopingIgmpGroupIfIndex=bsIpv6MldSnoopingIgmpGroupIfIndex, bsIpv6MldSnoopingProxyCacheType=bsIpv6MldSnoopingProxyCacheType, PYSNMP_MODULE_ID=bayStackIpv6MldSnoopingMib, bsIpv6MldSnoopingCacheSelf=bsIpv6MldSnoopingCacheSelf)
|
# define: forward propagation code(python version)
# date: 2022.1.19.
# Resource: 밑바닥부터 시작하는 인공지능(사이토고키, 2017)
class TwoLayerNet:
def __init__(self, input_size, hidden_size, output_size):
I, H, O = input_size, hidden_size, output_size
#initialization of weight value
W1 = np.random.randn(I, H)
b1 = np.random.randn(H)
W2 = np.random.randn(H, O)
b2 = np.random.randn(O)
#ganerate layer
self.layers = [
Affine(W1, b1),
Sigmoid(),
Affine(W2, b2)
]
# gather all weight value to list.
self.params = []
for layer in self.layers:
self.params += layer.params
def predict(self, x):
for layer in self.layers:
x = layer.forward(x)
return x
|
import board
import busio
import time
import usb_hid
from adafruit_hid.keycode import Keycode
from adafruit_hid.keyboard import Keyboard
from adafruit_hid.keyboard_layout_jp import KeyboardLayoutJP
from adafruit_hid.keyboard_layout_us import KeyboardLayoutUS
import board
import digitalio
import config
import displayio
from adafruit_st7789 import ST7789
from adafruit_display_text import label
from adafruit_bitmap_font import bitmap_font
import supervisor
# Circuit Configlation
PIN_UP = board.GP2
PIN_DOWN = board.GP18
PIN_LEFT = board.GP16
PIN_RIGHT = board.GP20
PIN_CENTER = board.GP3
PIN_A = board.GP15
PIN_B = board.GP17
PIN_X = board.GP19
PIN_Y = board.GP21
PIN_SCK_DISPLAY = board.GP10
PIN_SDA_DISPLAY = board.GP11
PIN_CHIPSELECT_DISPLAY = board.GP9
PIN_DATACOMMAND_DISPLAY = board.GP8
PIN_RESET_DISPLAY = board.GP12
DISPLAY_WIDTH = 240
DISPLAY_HEIGHT = 240
LOOP_WAIT = 0.05
LOCK_TIME = 2 / LOOP_WAIT
# COLOR CONFIG
TITLE_COLOR = 0xFFFFFF
TEXT_COLOR = 0xEEEEEE
BG_LOGO = 0x0000CD
BG_LOCK = 0x696969
BG_UNLOCK = 0x0000CD
BG_CENTER = 0x2F4F4F
BG_UP = 0x8B0000
BG_DOWN = 0x4B0082
BG_LEFT = 0xC71585
BG_RIGHT = 0x6A5ACD
# Auto Reload OFF.
# Avoiding unreleased gpip10 when AutoReload
supervisor.disable_autoreload()
# ready keyboard
keyboard = Keyboard(usb_hid.devices)
layout = None
# ready buttons
J_UP = digitalio.DigitalInOut(PIN_UP)
J_UP.direction = digitalio.Direction.INPUT
J_UP.pull = digitalio.Pull.UP
J_DOWN = digitalio.DigitalInOut(PIN_DOWN)
J_DOWN.direction = digitalio.Direction.INPUT
J_DOWN.pull = digitalio.Pull.UP
J_LEFT = digitalio.DigitalInOut(PIN_LEFT)
J_LEFT.direction = digitalio.Direction.INPUT
J_LEFT.pull = digitalio.Pull.UP
J_RIGHT = digitalio.DigitalInOut(PIN_RIGHT)
J_RIGHT.direction = digitalio.Direction.INPUT
J_RIGHT.pull = digitalio.Pull.UP
J_CENTER = digitalio.DigitalInOut(PIN_CENTER)
J_CENTER.direction = digitalio.Direction.INPUT
J_CENTER.pull = digitalio.Pull.UP
btnA = digitalio.DigitalInOut(PIN_A)
btnA.direction = digitalio.Direction.INPUT
btnA.pull = digitalio.Pull.UP
btnB = digitalio.DigitalInOut(PIN_B)
btnB.direction = digitalio.Direction.INPUT
btnB.pull = digitalio.Pull.UP
btnX = digitalio.DigitalInOut(PIN_X)
btnX.direction = digitalio.Direction.INPUT
btnX.pull = digitalio.Pull.UP
btnY = digitalio.DigitalInOut(PIN_Y)
btnY.direction = digitalio.Direction.INPUT
btnY.pull = digitalio.Pull.UP
# ready display
spi = busio.SPI(clock=PIN_SCK_DISPLAY, MOSI=PIN_SDA_DISPLAY)
while not spi.try_lock():
pass
spi.configure(baudrate=24000000) # Configure SPI for 24MHz
spi.unlock()
displayio.release_displays()
display_bus = displayio.FourWire(spi, command=PIN_DATACOMMAND_DISPLAY, chip_select=PIN_CHIPSELECT_DISPLAY, reset=PIN_RESET_DISPLAY)
display = ST7789(display_bus, width=DISPLAY_WIDTH, height=DISPLAY_HEIGHT, rowstart=80, rotation=270)
# Make the display context
passSplash = None # use for lock screen
# load font
font_file = "Junction-regular-24.pcf"
font = bitmap_font.load_font(font_file)
def bgFill(color):
color_bitmap = displayio.Bitmap(DISPLAY_WIDTH, DISPLAY_HEIGHT, 1)
color_palette = displayio.Palette(1)
color_palette[0] = color
bg_sprite = displayio.TileGrid(color_bitmap,pixel_shader=color_palette,x=0, y=0)
return bg_sprite
def textGrp(_scale, _x, _y, _text, _color):
text_group = displayio.Group(scale=_scale, x=_x, y=_y)
text_group.append(label.Label(font, text=_text, color=_color))
return text_group
def logo():
splash = displayio.Group()
display.show(splash)
splash.append(bgFill(BG_LOGO))
splash.append(textGrp(1,5,70,"Welcome to",TITLE_COLOR))
splash.append(textGrp(1,5,120,"Custom Key",TITLE_COLOR))
splash.append(textGrp(1,5,170,"Version 3",TITLE_COLOR))
time.sleep(3)
def textShow(title,text1,text2,text3,text4,bgColor, titlecolor,color):
splash = displayio.Group()
display.show(splash)
splash.append(bgFill(bgColor))
splash.append(textGrp(1,5,30,title,titlecolor))
splash.append(textGrp(1,5,80,text1,color))
splash.append(textGrp(1,5,120,text2,color))
splash.append(textGrp(1,5,160,text3,color))
splash.append(textGrp(1,5,200,text4,color))
def isPressed(currentVal, preVal):
if preVal == True and currentVal == False:
return True
else:
return False
def lockLoop():
if config.uselock == False or len(config.lockpin) == 0:
setLayout()
return
pin = []
passText()
time.sleep(0.1)
preBtnValCENTER = J_CENTER.value
preBtnValA = btnA.value
preBtnValB = btnB.value
preBtnValX = btnX.value
preBtnValY = btnY.value
while True:
stateCENTER = isPressed(J_CENTER.value, preBtnValCENTER)
stateA = isPressed(btnA.value, preBtnValA)
stateB = isPressed(btnB.value, preBtnValB)
stateX = isPressed(btnX.value, preBtnValX)
stateY = isPressed(btnY.value, preBtnValY)
preBtnValCENTER = J_CENTER.value
preBtnValA = btnA.value
preBtnValB = btnB.value
preBtnValX = btnX.value
preBtnValY = btnY.value
if stateCENTER:
if checkPin(pin):
setLayout()
return
pin = []
passText()
elif stateA:
pin.append("A")
passTextAdd(len(pin), "*")
elif stateB:
pin.append("B")
passTextAdd(len(pin), "*")
elif stateX:
pin.append("X")
passTextAdd(len(pin), "*")
elif stateY:
pin.append("Y")
passTextAdd(len(pin), "*")
else:
pass
time.sleep(LOOP_WAIT)
def passText():
global passSplash
passSplash = displayio.Group()
display.show(passSplash)
passSplash.append(bgFill(BG_LOCK))
passSplash.append(textGrp(1,5,30,"Locked",TITLE_COLOR))
passSplash.append(textGrp(1,5,80,"Enter the Pin.",TEXT_COLOR))
passSplash.append(textGrp(1,5,140,">> ",TEXT_COLOR))
def passTextAdd(times, subtext):
passSplash.append(textGrp(1, 40 + (20 * times) ,140 ,subtext,TEXT_COLOR))
def checkPin(pin):
if len(pin) != len(config.lockpin):
return False
for i in range(len(config.lockpin)):
if pin[i] != config.lockpin[i]:
return False
return True
def setLayout():
global layout
if config.layoutType == "jp":
layout = KeyboardLayoutJP(keyboard)
else:
layout = KeyboardLayoutUS(keyboard)
textShow("Unlocked!", "You got it.","Welcome to",
"Custom Key 3",
" ( ^_^)b", BG_UNLOCK , TITLE_COLOR, TEXT_COLOR)
time.sleep(2.5)
def getKeyCodes(idx, btnIdx):
return config.keymap[idx]["data"][btnIdx]["value"]
def keysend(strVal):
global layout
layout.write(strVal)
time.sleep(0.1)
def titleShow(sidx):
sidxStr = str(sidx)
title = "[UNKNOWN]"
bgC = 0x000000
if sidx == 0:
title = "[CENTER]"
bgC = BG_CENTER
elif sidx == 1:
title = "[UP]"
bgC = BG_UP
elif sidx == 2:
title = "[DOWN]"
bgC = BG_DOWN
elif sidx == 3:
title = "[LEFT]"
bgC = BG_LEFT
elif sidx == 4:
title = "[RIGHT]"
bgC = BG_RIGHT
textShow(title
,"1. " + config.keymap[sidx]["data"][0]["label"]
,"2. " + config.keymap[sidx]["data"][1]["label"]
,"3. " + config.keymap[sidx]["data"][2]["label"]
,"4. " + config.keymap[sidx]["data"][3]["label"]
, bgC ,TITLE_COLOR, TEXT_COLOR)
def mainLoop():
lockLoop()
lockTimeCount = 0
macroIdx = 0
titleShow(macroIdx)
preBtnValUP = J_UP.value
preBtnValDOWN = J_DOWN.value
preBtnValLEFT = J_LEFT.value
preBtnValRIGHT = J_RIGHT.value
preBtnValCENTER = J_CENTER.value
preBtnValA = btnA.value
preBtnValB = btnB.value
preBtnValX = btnX.value
preBtnValY = btnY.value
while True:
stateUP = isPressed(J_UP.value, preBtnValUP)
stateDOWN = isPressed(J_DOWN.value, preBtnValDOWN)
stateLEFT = isPressed(J_LEFT.value, preBtnValLEFT)
stateRIGHT = isPressed(J_RIGHT.value, preBtnValRIGHT)
stateCENTER = isPressed(J_CENTER.value, preBtnValCENTER)
stateA = isPressed(btnA.value, preBtnValA)
stateB = isPressed(btnB.value, preBtnValB)
stateX = isPressed(btnX.value, preBtnValX)
stateY = isPressed(btnY.value, preBtnValY)
preBtnValUP = J_UP.value
preBtnValDOWN = J_DOWN.value
preBtnValLEFT = J_LEFT.value
preBtnValRIGHT = J_RIGHT.value
preBtnValCENTER = J_CENTER.value
preBtnValA = btnA.value
preBtnValB = btnB.value
preBtnValX = btnX.value
preBtnValY = btnY.value
if J_CENTER.value == False:
lockTimeCount = lockTimeCount + 1
if lockTimeCount > LOCK_TIME:
lockTimeCount = 0
lockLoop()
macroIdx = 0
titleShow(macroIdx)
if stateCENTER:
macroIdx = 0
titleShow(macroIdx)
lockTimeCount = 0
elif stateUP:
macroIdx = 1
titleShow(macroIdx)
elif stateDOWN:
macroIdx = 2
titleShow(macroIdx)
elif stateLEFT:
macroIdx = 3
titleShow(macroIdx)
elif stateRIGHT:
macroIdx = 4
titleShow(macroIdx)
elif stateA:
keysend(getKeyCodes(macroIdx, 0))
elif stateB:
keysend(getKeyCodes(macroIdx, 1))
elif stateX:
keysend(getKeyCodes(macroIdx, 2))
elif stateY:
keysend(getKeyCodes(macroIdx, 3))
time.sleep(LOOP_WAIT)
logo()
mainLoop()
|
import sys
sys.path.append('..')
from persistence.bot_reader import BotReader
from persistence.server_writer import ServerWriter
from server.cli import CliServer
sys.path.append('..')
def start_cli():
bot = BotReader('../bots/default.json').load()
server = CliServer(bot)
ServerWriter(server).write('../servers/cli.json')
server.start()
if __name__ == '__main__':
start_cli()
|
# Knight On Chess Board
# https://www.interviewbit.com/problems/knight-on-chess-board/
#
# Given any source point and destination point on a chess board, we need to find whether Knight can move to the destination or not.
#
# Knight's movements on a chess board
#
# The above figure details the movements for a knight ( 8 possibilities ). Note that a knight cannot go out of the board.
#
# If yes, then what would be the minimum number of steps for the knight to move to the said point.
# If knight can not move from the source point to the destination point, then return -1
#
# Input:
#
# N, M, x1, y1, x2, y2
# where N and M are size of chess board
# x1, y1 coordinates of source point
# x2, y2 coordinates of destination point
#
# Output:
#
# return Minimum moves or -1
#
# Example
#
# Input : 8 8 1 1 8 8
# Output : 6
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
class Solution:
steps = ((2, 1), (2, -1), (-2, 1), (-2, -1), (1, 2), (1, -2), (-1, 2), (-1, -2))
def _knight(self, N, M, x1, y1, x2, y2):
from collections import deque
if x1 == x2 and y1 == y2:
return 0
visited = [[-1] * M for _ in range(N)]
queue = deque()
queue.append((x1, y1))
visited[x1][y1] = 0
while queue:
i, j = queue.popleft()
for step in Solution.steps:
tx, ty = step
if i + tx == x2 and j + ty == y2:
return visited[i][j] + 1
if 0 <= i + tx < N and 0 <= j + ty < M and visited[i + tx][j + ty] < 0:
visited[i + tx][j + ty] = visited[i][j] + 1
queue.append((i + tx, j + ty))
return -1
# @param A : integer
# @param B : integer
# @param C : integer
# @param D : integer
# @param E : integer
# @param F : integer
# @return an integer
def knight(self, A, B, C, D, E, F):
return self._knight(A, B, C - 1, D - 1, E - 1, F - 1)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
if __name__ == "__main__":
s = Solution()
print(s.knight(8, 8, 1, 1, 8, 8))
|
from graphql import GraphQLField, GraphQLFieldMap, GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLSchema
from sqlalchemy.ext.declarative import DeclarativeMeta
from .args import (
make_args,
make_delete_args,
make_insert_args,
make_insert_one_args,
make_pk_args,
make_update_args,
make_update_by_pk_args,
)
from .helpers import get_table
from .names import (
get_model_delete_by_pk_name,
get_model_delete_name,
get_model_insert_object_name,
get_model_insert_one_object_name,
get_model_pk_field_name,
get_model_update_by_pk_name,
get_model_update_name,
get_table_name,
)
from .objects import build_mutation_response_type, build_object_type
from .resolvers import (
make_delete_by_pk_resolver,
make_delete_resolver,
make_insert_one_resolver,
make_insert_resolver,
make_object_resolver,
make_pk_resolver,
make_update_by_pk_resolver,
make_update_resolver,
)
from .types import Inputs, Objects
def build_queries(model: DeclarativeMeta, objects: Objects, queries: GraphQLFieldMap, inputs: Inputs) -> None:
object_type = build_object_type(model, objects)
objects[object_type.name] = object_type
queries[object_type.name] = GraphQLField(
GraphQLNonNull(GraphQLList(GraphQLNonNull(object_type))),
args=make_args(model, inputs=inputs),
resolve=make_object_resolver(model),
)
if get_table(model).primary_key:
pk_field_name = get_model_pk_field_name(model)
queries[pk_field_name] = GraphQLField(object_type, args=make_pk_args(model), resolve=make_pk_resolver(model))
def build_mutations(model: DeclarativeMeta, objects: Objects, mutations: GraphQLFieldMap, inputs: Inputs) -> None:
mutation_response_type = build_mutation_response_type(model, objects)
object_type = objects[get_table_name(model)]
insert_type_name = get_model_insert_object_name(model)
mutations[insert_type_name] = GraphQLField(
mutation_response_type, args=make_insert_args(model, inputs), resolve=make_insert_resolver(model)
)
insert_one_type_name = get_model_insert_one_object_name(model)
mutations[insert_one_type_name] = GraphQLField(
object_type, args=make_insert_one_args(model, inputs), resolve=make_insert_one_resolver(model)
)
delete_type_name = get_model_delete_name(model)
mutations[delete_type_name] = GraphQLField(
mutation_response_type, args=make_delete_args(model, inputs), resolve=make_delete_resolver(model)
)
update_type_name = get_model_update_name(model)
mutations[update_type_name] = GraphQLField(
mutation_response_type, args=make_update_args(model, inputs), resolve=make_update_resolver(model)
)
if get_table(model).primary_key:
delete_by_pk_type_name = get_model_delete_by_pk_name(model)
mutations[delete_by_pk_type_name] = GraphQLField(
object_type, args=make_pk_args(model), resolve=make_delete_by_pk_resolver(model)
)
update_by_pk_type_name = get_model_update_by_pk_name(model)
mutations[update_by_pk_type_name] = GraphQLField(
object_type, args=make_update_by_pk_args(model, inputs), resolve=make_update_by_pk_resolver(model)
)
def build_schema(base: DeclarativeMeta, enable_subscription: bool = False) -> GraphQLSchema:
"""
Args:
base:
enable_subscription:
Returns: :class:`graphql:graphql.type.GraphQLSchema`
"""
queries: GraphQLFieldMap = {}
mutations: GraphQLFieldMap = {}
objects: Objects = {}
inputs: Inputs = {}
for model in base.__subclasses__():
build_queries(model, objects, queries, inputs)
build_mutations(model, objects, mutations, inputs)
return GraphQLSchema(
GraphQLObjectType("Query", queries),
GraphQLObjectType("Mutation", mutations),
GraphQLObjectType("Subscription", {}) if enable_subscription else None,
)
|
"""
CEASIOMpy: Conceptual Aircraft Design Software
Developed by CFS ENGINEERING, 1015 Lausanne, Switzerland
Module to export Aeromap (or other data?) to CSV
Python version: >=3.6
| Author: Aidan Jungo
| Creation: 2021-04-07
| Last modifiction: 2021-04-08
TODO:
* export of other data...
*
"""
#==============================================================================
# IMPORTS
#==============================================================================
import os
import sys
import math
import numpy
import matplotlib
import ceasiompy.utils.ceasiompyfunctions as ceaf
import ceasiompy.utils.cpacsfunctions as cpsf
import ceasiompy.utils.apmfunctions as apmf
import ceasiompy.utils.su2functions as su2f
import ceasiompy.utils.moduleinterfaces as mi
from ceasiompy.utils.ceasiomlogger import get_logger
log = get_logger(__file__.split('.')[0])
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
MODULE_NAME = os.path.basename(os.getcwd())
#==============================================================================
# CLASSES
#==============================================================================
#==============================================================================
# FUNCTIONS
#==============================================================================
def export_aeromaps(cpacs_path, cpacs_out_path):
tixi = cpsf.open_tixi(cpacs_path)
wkdir = ceaf.get_wkdir_or_create_new(tixi)
aeromap_to_export_xpath = '/cpacs/toolspecific/CEASIOMpy/export/aeroMapToExport'
aeromap_uid_list = []
aeromap_uid_list = cpsf.get_string_vector(tixi,aeromap_to_export_xpath)
for aeromap_uid in aeromap_uid_list:
csv_dir_path = os.path.join(wkdir,'CSVresults')
if not os.path.isdir(csv_dir_path):
os.mkdir(csv_dir_path)
csv_path = os.path.join(csv_dir_path,aeromap_uid+'.csv')
apmf.aeromap_to_csv(tixi, aeromap_uid, csv_path)
cpsf.close_tixi(tixi,cpacs_out_path)
#==============================================================================
# MAIN
#==============================================================================
if __name__ == '__main__':
log.info('----- Start of ' + MODULE_NAME + ' -----')
cpacs_path = mi.get_toolinput_file_path(MODULE_NAME)
cpacs_out_path = mi.get_tooloutput_file_path(MODULE_NAME)
# Call the function which check if imputs are well define
mi.check_cpacs_input_requirements(cpacs_path)
export_aeromaps(cpacs_path, cpacs_out_path)
log.info('----- End of ' + MODULE_NAME + ' -----')
|
#!/usr/bin/python
#vim:fileencoding=utf-8
#参考: 「python+OpenCVで顔認識をやってみる」
# http://qiita.com/wwacky/items/98d8be2844fa1b778323
import cv2, sys
#画像を読み込む
img = cv2.imread("/home/pi/face.jpg")
#処理用に変換
gimg = cv2.cvtColor(img,cv2.cv.CV_BGR2GRAY)
#顔識別用のデータをロード
classifier = "/usr/share/opencv/haarcascades/haarcascade_frontalface_default.xml"
cascade = cv2.CascadeClassifier(classifier)
#認識
face = cascade.detectMultiScale(gimg,1.1,1,cv2.CASCADE_FIND_BIGGEST_OBJECT)
if len(face) == 0: sys.exit(1) #検出失敗したら出る
#検出結果
r = face[0]
print "左上の座標:",r[0:2]
print "横幅,縦幅:",r[2:4]
#顔の部分に枠を描いてファイルに書き出す
cv2.rectangle(img,tuple(r[0:2]),tuple(r[0:2]+r[2:4]),(0,255,255),4)
cv2.imwrite("out.jpg",img)
|
# Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
documents = client.sync \
.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.documents \
.list()
for document in documents:
print(document.unique_name)
print(document.data)
|
from . import consts
from .app import start_web_server
if __name__ == "__main__":
start_web_server(port=consts.WEBSERVER_PORT, host=consts.WEBSERVER_HOST)
|
# 教學: https://www.youtube.com/watch?v=YQboCnlOb6Y
# 1. 寄送email的程式
# 2. 準備訊息物件設定
import email.message
msg = email.message.EmailMessage()
msg["From"] = "georgiawang5332@gmail.com" # 寄件人
# msg["To"] = "nunumary5798@gmail.com" # 有效收件人
msg["To"] = "wleejan982@hotmail.com" # 有效收件人
msg["Subject"] = "你好,玥玥"
# 3. 寄送純文字內容
# msg.set_content("TEST 試試看")
# 4. 寄送必較多樣式的內容(html)
msg.add_alternative("<h3>優惠券</h3>滿五百送一百,限時優惠要買要快!", subtype="html")
# 5. 連線到SMTP Server, 驗證寄件人的身分並發送郵件
import smtplib
server = smtplib.SMTP_SSL("smtp.gmail.com", 465) # 到網路上搜尋 gmail(yahoo hotmail) smtp server,會有各自server or port:465(端口) 號碼是多少
# 就可以建立跟gmail郵件伺服器連線
server.login("georgiawang5332@gmail.com", "uusnymglbxkyyqfy")
# ("帳號", "應用程式產生的密碼") 安全性=>登入google=>應用程式密碼=>輸入信箱密碼=>選取應用程式(其他)=>選取裝置(其他-自訂名稱)=>產生=>拍照,因為備用密碼只會出現一次
server.send_message(msg)#送出郵件
server.close()#關閉連線
# 登入: C:\Users\wleej\PycharmProjects\meatFoodManager\home>python send-email.py
|
import typing
from d3m.metadata import hyperparams, base as metadata_module, params
from d3m.primitive_interfaces import base, transformer, unsupervised_learning
from d3m import container, utils
import os
import numpy as np
__all__ = ('GRASTA',)
Inputs = container.ndarray
Outputs = container.ndarray
class GRASTAHyperparams(hyperparams.Hyperparams):
rank = hyperparams.Bounded[int](lower=1,
upper=None,
default=2,
semantic_types=['https://metadata.datadrivendiscovery.org/types/TuningParameter'],
description="Rank of learned low-rank matrix")
subsampling = hyperparams.Bounded[float](lower=0.01,
upper=1,
default=1,
semantic_types=[
'https://metadata.datadrivendiscovery.org/types/TuningParameter'],
description="Matrix sub-sampling parameter")
admm_max_iter = hyperparams.Bounded[int](lower=1,
upper=None,
default=20,
semantic_types=[
'https://metadata.datadrivendiscovery.org/types/TuningParameter'],
description="Maximum ADMM iterations")
admm_min_iter = hyperparams.Bounded[int](lower=1,
upper=None,
default=1,
semantic_types=[
'https://metadata.datadrivendiscovery.org/types/TuningParameter'],
description="Minimum ADMM iterations")
admm_rho = hyperparams.Bounded[float](lower=1, upper=None, default=1.8, semantic_types=[
'https://metadata.datadrivendiscovery.org/types/TuningParameter'], description="ADMM rho parameter")
max_level = hyperparams.Bounded[int](lower=1,
upper=None,
default=20,
semantic_types=[
'https://metadata.datadrivendiscovery.org/types/TuningParameter'],
description="Maximum level in multi-level adaptive step size")
max_mu = hyperparams.Bounded[int](lower=1,
upper=None,
default=15,
semantic_types=[
'https://metadata.datadrivendiscovery.org/types/TuningParameter'],
description="Maximum mu in multi-level adaptive step size")
min_mu = hyperparams.Bounded[int](lower=1,
upper=None,
default=1,
semantic_types=[
'https://metadata.datadrivendiscovery.org/types/TuningParameter'],
description="Minimum mu in multi-level adaptive step size")
constant_step = hyperparams.Bounded[float](lower=0, upper=None, default=0, semantic_types=[
'https://metadata.datadrivendiscovery.org/types/TuningParameter'],
description="Make nonzero for contant step size instead of multi-level adaptive step")
max_train_cycles = hyperparams.Bounded[int](lower=1, upper=None, default=10, semantic_types=[
'https://metadata.datadrivendiscovery.org/types/TuningParameter'],
description="Number of times to cycle over training data")
### GRASTA OPTIONS CLASS
class _OPTIONS(object):
def __init__(self, rank, subsampling=1, admm_max_itr=20, admm_min_itr=20, max_level=20, max_mu=15, min_mu=1,
constant_step=0):
self.admm_max_itr = admm_max_itr
self.admm_min_itr = admm_min_itr
self.rank = rank
self.subsampling = subsampling
self.max_level = max_level
self.max_mu = max_mu
self.min_mu = min_mu
self.constant_step = constant_step
class _STATUS(object):
def __init__(self, last_mu, last_w, last_gamma, level=0, step_scale=0, train=0, step = np.pi/3):
self.last_mu = last_mu
self.level = level
self.step_scale = step_scale
self.last_w = last_w
self.last_gamma = last_gamma
self.train = train
self.step = step
class _OPTS(object):
def __init__(self, max_iter=20, rho=1.8, tol=1e-8):
self.max_iter = max_iter
self.rho = rho
self.tol = tol
class GRASTAParams(params.Params):
OPTIONS: _OPTIONS
STATUS: _STATUS
OPTS: _OPTS
U: np.ndarray
## GRASTA class
#
# uses GRASTA to perform online dimensionality reduction of (possibly) sub-sampled data
class GRASTA(unsupervised_learning.UnsupervisedLearnerPrimitiveBase[Inputs, Outputs, GRASTAParams, GRASTAHyperparams]):
"""
Uses GRASTA to perform online dimensionality reduction of (possibly) sub-sampled data
"""
metadata = metadata_module.PrimitiveMetadata({
'id': 'fe20ef05-7eaf-428b-934f-4de0b8011ed2',
'version': "0.0.5",
'name': 'GRASTA',
'description': """Performs online, unsupervised dimensionality reduction by computing robust PCA on the Grassmannian manifold.""",
'keywords': ['unsupervised learning', 'dimensionality reduction', 'robust PCA', 'low-rank', 'online',
'streaming', 'Grassmannian manifold', 'subspace tracking', 'matrix completion',
'video surveillance'],
'source': {
'name': 'Michigan',
'contact': 'mailto:davjoh@umich.edu',
'uris': [
# link to file and repo
'https://github.com/dvdmjohnson/d3m_michigan_primitives/blob/master/spider/unsupervised_learning/GRASTA/GRASTA.py',
'https://github.com/dvdmjohnson/d3m_michigan_primitives'],
'citation': """@inproceedings{he2014grasta, title={Incremental Gradient on the Grassmannian for Online Foreground and Background Separation in Subsampled Video}, author={He, Balzano and Lui}, booktitle={Computer Vision and Pattern Recognition (CVPR), 2012 IEEE Conference On}, pages={1568–1575}, year={2014}, organization={IEEE}}"""
},
'installation': [
{'type': metadata_module.PrimitiveInstallationType.PIP,
'package_uri': 'git+https://github.com/dvdmjohnson/d3m_michigan_primitives.git@{git_commit}#egg=spider'.format(
git_commit=utils.current_git_commit(os.path.dirname(__file__)))
},
{'type': metadata_module.PrimitiveInstallationType.UBUNTU,
'package': 'ffmpeg',
'version': '7:2.8.11-0ubuntu0.16.04.1'}],
'python_path': 'd3m.primitives.data_compression.grasta.Umich',
'hyperparams_to_tune': ['rank', 'admm_rho', 'max_level', 'max_mu', 'min_mu', 'constant_step',
'max_train_cycles'],
'algorithm_types': [
metadata_module.PrimitiveAlgorithmType.ROBUST_PRINCIPAL_COMPONENT_ANALYSIS],
'primitive_family': metadata_module.PrimitiveFamily.DATA_COMPRESSION
})
# GRASTA class constructor and instantiation
def __init__(self, *, hyperparams: GRASTAHyperparams, random_seed: int = 0,
docker_containers: typing.Dict[str, base.DockerContainer] = None) -> None:
super().__init__(hyperparams=hyperparams, random_seed=random_seed, docker_containers=docker_containers)
self._rank = hyperparams['rank']
self._subsampling = hyperparams['subsampling']
self._admm_max_iter = hyperparams['admm_max_iter']
self._admm_min_iter = hyperparams['admm_min_iter']
self._admm_rho = hyperparams['admm_rho']
self._max_level = hyperparams['max_level']
self._max_mu = hyperparams['max_mu']
self._min_mu = hyperparams['min_mu']
self._constant_step = hyperparams['constant_step']
self._max_train_cycles = hyperparams['max_train_cycles']
self._X: Inputs = None
self._U = None
self._random_state = np.random.RandomState(random_seed)
#Instantiate GRASTA status and admm control params
self._admm_OPTS = _OPTS()
def set_training_data(self, *, inputs: Inputs) -> None:
self._X = inputs
self._dim = inputs.shape[1] # row vector data
self._training_size = inputs.shape[0]
# GRASTA fit function: learns low-rank subspace from training data
def fit(self, *, timeout: float = None, iterations: int = None) -> base.CallResult[None]:
# Internal function to generate low-rank random matrix
def generateLRMatrix(d, r):
rando_mat = self._random_state.randn(d, d)
Q, R = np.linalg.qr(rando_mat)
U = Q[:, 0:r]
return U
assert self._X is not None, "No training data provided."
assert self._X.ndim == 2, "Data is not in the right shape."
assert self._rank <= self._X.shape[1], "Dim_subspaces should be less than ambient dimension."
assert self._min_mu < self._max_mu, "Min mu cannot be greater than max mu"
assert self._admm_min_iter <= self._admm_max_iter, "Min admm iterations cannot exceed max admm iterations"
_X = self._X.T # Get the training data
# Begin training
# Instantiate a random low-rank subspace
d = self._dim
r = self._rank
U = generateLRMatrix(d, r)
# Set the training control params
self._grastaOPTIONS = _OPTIONS(self._rank, self._subsampling, self._admm_max_iter,
self._admm_min_iter,
self._max_level, self._max_mu, self._min_mu, self._constant_step)
U = self._train_grasta(_X, U)
self._U = U.copy() # update global variable
return base.CallResult(None)
# GRASTA training internal function
def _train_grasta(self, X, U):
max_cycles = self._max_train_cycles
train_size = self._training_size
self._grastaSTATUS = _STATUS(last_mu=self._min_mu, last_w=np.zeros(self._rank),
last_gamma=np.zeros(self._dim), train=1)
for i in range(0, max_cycles):
perm = self._random_state.choice(train_size, train_size, replace=False) # randomly permute training data
for j in range(0, train_size):
_x = X[:, perm[j]]
if(self._grastaOPTIONS.subsampling < 1):
_xidx = self._random_state.choice(self._dim, int(np.ceil(self._grastaOPTIONS.subsampling * self._dim)),replace=False)
else:
_xidx = np.where(~np.isnan(_x))[0]
U, w, s, STATUS_new, admm_OPTS_new = self._grasta_stream(U, _x, _xidx)
# Update subspace and control variables
self._grastaSTATUS = STATUS_new
self._admm_OPTS = admm_OPTS_new
return U
def continue_fit(self, *, timeout: float = None, iterations: int = None) -> base.CallResult[None]:
# Get the vector input, and the subspace
_X = self._X.T # Get the data
d, numVectors = _X.shape
U = self._U.copy()
# Set the proper subsampling for streaming
self._grastaOPTIONS.subsampling = self._subsampling
for i in range(0, numVectors):
_x = _X[:, i]
if(self._grastaOPTIONS.subsampling < 1):
_xidx = self._random_state.choice(self._dim, int(np.ceil(self._grastaOPTIONS.subsampling * self._dim)),replace=False)
else:
_xidx = np.where(~np.isnan(_x))[0]
# Call GRASTA iteration
U, w, s, STATUS_new, admm_OPTS_new = self._grasta_stream(U, _x, _xidx)
# print("iteration: " + np.str(i) + ' Step: ' + np.str(STATUS_new.step))
# Update subspace and control variables
self._grastaSTATUS = STATUS_new
self._admm_OPTS = admm_OPTS_new
self._U = U.copy()
return base.CallResult(None)
def produce(self, *, inputs: Inputs, timeout: float = None, iterations: int = None) -> base.CallResult[Outputs]:
_X = inputs.T
d, numVectors = _X.shape
Uhat = self._U
self._grastaOPTIONS.subsampling = self._subsampling
Lhat = np.zeros(_X.shape)
for i in range(0, numVectors):
_x = _X[:, i]
if(self._grastaOPTIONS.subsampling < 1):
_xidx = self._random_state.choice(self._dim, int(np.ceil(self._grastaOPTIONS.subsampling * self._dim)),replace=False)
else:
_xidx = np.where(~np.isnan(_x))[0]
U, w, s, STATUS_new, admm_OPTS = self._grasta_stream(Uhat, _x, _xidx)
Lhat[:, i] = U @ w
return base.CallResult(container.ndarray(Lhat.T, generate_metadata=True))
def produce_subspace(self, *, inputs: Inputs, timeout: float = None, iterations: int = None) -> base.CallResult[
Outputs]:
X = inputs
U = self._U.copy()
return base.CallResult(container.ndarray(U, generate_metadata=True))
def produce_sparse(self, *, inputs: Inputs, timeout: float = None, iterations: int = None) -> base.CallResult[
Outputs]:
Lhat = self.produce(inputs=inputs).value
Shat = inputs - Lhat
return base.CallResult(container.ndarray(Shat, generate_metadata=True))
### MAIN GRASTA UPDATE FUNCTION
def _grasta_stream(self, Uhat, x, xIdx):
### ELEMENTWISE SOFT THRESHOLDING FUNCTION
def shrinkage(a, kappa):
y = np.maximum(0, a - kappa) - np.maximum(0, -a - kappa)
return y
### SIGMOID FUNCTION
def sigmoid(x):
FMIN = -1
FMAX = 1
omega = 0.1
y = FMIN + (FMAX - FMIN) / (1 - (FMAX / FMIN) * np.exp(-x / omega))
return y
### ADMM FUNCTION
def admm(Uomega, xOmega, ADMM_OPTS):
tol = ADMM_OPTS.tol
y = np.zeros(xOmega.size)
s = np.zeros(xOmega.size)
rho = ADMM_OPTS.rho
mu = rho
converged = False
itrs = 0
pinv_U = np.linalg.pinv(Uomega)
max_itrs = ADMM_OPTS.max_iter
while not converged and itrs <= max_itrs:
w = pinv_U @ (xOmega - s + y / mu)
s = shrinkage(xOmega - Uomega @ w + y / mu, 1 / mu)
h = xOmega - Uomega @ w - s
y = y + mu * h
h_norm = np.linalg.norm(h, 2)
itrs += 1
if (h_norm < tol):
converged = True
else:
mu = mu * rho
return w, s, y, h
### Multi-level Adaptive Step Size Calculation Function
def calculate_mla_step(grastaSTATUS, grastaOPTIONS, admm_OPTS, gamma, w, sG):
level_factor = 2
MAX_MU = grastaOPTIONS.max_mu
MIN_MU = grastaOPTIONS.min_mu
MAX_LEVEL = grastaOPTIONS.max_level
ITER_MAX = grastaOPTIONS.admm_max_itr
MIN_ITER = grastaOPTIONS.admm_min_itr
last_w = grastaSTATUS.last_w
last_gamma = grastaSTATUS.last_gamma
last_mu = grastaSTATUS.last_mu
level = grastaSTATUS.level
step_scale = grastaSTATUS.step_scale
DEFAULT_MU_HIGH = (MAX_MU - 1) / 2
DEFAULT_MU_LOW = MIN_MU + 2
# 1. Determine step-scale from 1st observation
if step_scale == 0:
step_scale = 0.5 * np.pi * (1 + MIN_MU) / sG
# 2. Inner product of previous and current gradients
grad_ip = np.trace((last_gamma.T @ gamma) * np.multiply.outer(last_w, w))
# Avoid too large of inner products
normalization = np.linalg.norm(np.multiply.outer(last_gamma, last_w.T), 'fro') * np.linalg.norm(
np.multiply.outer(gamma, w.T), 'fro')
if normalization == 0:
grad_ip_normalization = 0
else:
grad_ip_normalization = grad_ip / normalization
# 3. Take step by sigmoid rule. If gradients in same direction, take a larger step; o.w. small step
mu = max(last_mu + sigmoid(-grad_ip_normalization), MIN_MU)
if grastaOPTIONS.constant_step > 0:
step = grastaOPTIONS.constant_step
else:
step = step_scale * level_factor ** (-level) * sG / (1 + mu)
if step >= np.pi / 3:
step = np.pi / 3
bShrUpd = 0
MAX_ITER = ITER_MAX
if mu <= MIN_MU:
if level > 1:
bShrUpd = 1
level = level - 1
mu = DEFAULT_MU_LOW
elif mu > MAX_MU:
if level < MAX_LEVEL:
bShrUpd = 1
level = level + 1
mu = DEFAULT_MU_HIGH
else:
mu = MAX_MU
if bShrUpd:
if level >= 0 and level < 4:
MAX_ITER = grastaOPTIONS.admm_min_itr
elif level >= 4 and level < 7:
MAX_ITER = min(MIN_ITER * 2, ITER_MAX)
elif level >= 7 and level < 10:
MAX_ITER = min(MIN_ITER * 4, ITER_MAX)
elif level >= 10 and level < 14:
MAX_ITER = min(MIN_ITER * 8, ITER_MAX)
else:
MAX_ITER = ITER_MAX
STATUS_new = _STATUS(mu, w, gamma, level, step_scale, step=step)
ADMM_OPTS_new = _OPTS(MAX_ITER, admm_OPTS.rho, admm_OPTS.tol)
return step, STATUS_new, ADMM_OPTS_new
### Main GRASTA update
xOmega = x[xIdx]
Uomega = Uhat[xIdx, :]
w_hat, s_hat, y_hat, h = admm(Uomega, xOmega, self._admm_OPTS)
gamma1 = y_hat + (xOmega - Uomega @ w_hat - s_hat)
gamma2 = Uomega.T @ gamma1
gamma = np.zeros(self._dim)
gamma[xIdx] = gamma1
gamma = gamma - Uhat @ gamma2
w_norm = np.linalg.norm(w_hat)
gamma_norm = np.linalg.norm(gamma, 2)
sG = gamma_norm * w_norm
t, STATUS_new, admm_OPTS_new = calculate_mla_step(self._grastaSTATUS, self._grastaOPTIONS, self._admm_OPTS,
gamma, w_hat, sG)
step = np.multiply.outer([((np.cos(t) - 1) * (Uhat @ w_hat) / w_norm) + (np.sin(t) * gamma / gamma_norm)],
(w_hat / w_norm))
Unew = Uhat + np.squeeze(step)
return Unew, w_hat, s_hat, STATUS_new, admm_OPTS_new
def get_params(self) -> GRASTAParams:
return GRASTAParams(OPTIONS=self._grastaOPTIONS, STATUS=self._grastaSTATUS, OPTS=self._admm_OPTS, U=self._U)
def set_params(self, *, params: GRASTAParams) -> None:
self._grastaOPTIONS = params['OPTIONS']
self._grastaSTATUS = params['STATUS']
self._admm_OPTS = params['OPTS']
self._U = params['U']
def __getstate__(self) -> dict:
return {
'constructor': {
'hyperparams': self.hyperparams,
'random_seed': self.random_seed,
'docker_containers': self.docker_containers,
},
'params': self.get_params(),
'random_state': self._random_state,
}
def __setstate__(self, state: dict) -> None:
self.__init__(**state['constructor']) # type: ignore
self.set_params(params=state['params'])
self._random_state = state['random_state']
# placeholder for now, just calls base version.
@classmethod
def can_accept(cls, *, method_name: str, arguments: typing.Dict[str, typing.Union[metadata_module.Metadata, type]],
hyperparams: GRASTAHyperparams) -> typing.Optional[metadata_module.DataMetadata]:
return super().can_accept(method_name=method_name, arguments=arguments, hyperparams=hyperparams)
|
import scrapy
from urllib.parse import urlparse, parse_qs, urljoin
from ..items import Spot
class PlayAdvisorSpider(scrapy.Spider):
name = "play_advisor"
start_urls = [
"https://playadvisor.co/zoeken/?_sft_speelplektype=sport-fitness&_sf_s=&_sft_land=nederland",
]
def parse(self, response):
for spot in response.css("article"):
item = Spot()
item["id"] = spot.css("article::attr(id)").re_first(r"post-(\d*)")
item["label"] = spot.css("article header.entry-header h2.entry-title a::text").get()
item["sports"] = spot.xpath("@class").re(r"speelplektype-(\S*)")
# Get additional details
spot_detail_url = spot.css("article header.entry-header h2.entry-title a::attr(href)").get()
request = scrapy.Request(spot_detail_url, callback=self.parse_spot_details,)
request.meta["item"] = item
yield request
# Paginate over search results
next_page = response.css("nav .nav-links a.next::attr(href)").get()
if next_page is not None:
yield response.follow(next_page, self.parse)
def parse_spot_details(self, response):
item = response.meta["item"]
# Add lat and lng
# REF: href="https://maps.google.com?daddr=51.9419762,5.8667076"
google_maps_url = response.css("div#speelplek-location > a::attr(href)").get()
parsed_google_maps_url = urlparse(google_maps_url)
parsed_query_string = parse_qs(parsed_google_maps_url.query)
daddr = parsed_query_string["daddr"][0]
lat, lng = daddr.split(",")
item["lat"] = lat
item["lng"] = lng
# Add images
item["images"] = list()
main_image_url = response.css("div.post-thumb img::attr(src)").get()
item["images"].append(main_image_url)
gallery_images_urls = response.css("div.gallery-image img::attr(src)").getall()
item["images"].extend(gallery_images_urls)
# Add spot address
item["attributes"] = list()
address = response.css("div#speelplek-location p::text").get() or ""
city = response.css("div#speelplek-location p a::text").get() or ""
item["attributes"].append({"attribute_name": "formatted_address", "value": f"{address} {city}"})
# REF: https://playadvisor.co/speelplek/outdoor-fitness-toestellen/skatebaan-in-burgemeester-t-veldpark/?_sft_speelplektype=sport-fitness&_sf_s&_sft_land=nederland
item["attributes"].append(
{"attribute_name": "url", "value": urljoin(response.url, urlparse(response.url).path),}
)
yield item
|
"""
The mainwindow module.
"""
import os
import shutil
from multiprocessing import Process
from PyQt4.QtCore import (Qt, QDir, QFile, QFileInfo, QIODevice,
QPoint, QSize, QTextStream, QUrl)
from PyQt4.QtGui import (qApp, QAction, QCheckBox, QDesktopWidget, QDialog,
QDockWidget, QFileDialog, QIcon, QLabel, QLineEdit, QMainWindow, QMenuBar,
QMessageBox, QKeySequence, QPrintDialog, QPrinter, QStatusBar, QSplitter,
QTabWidget, QTextCursor, QTextDocument, QToolBar, QTreeWidgetItem,
QTreeWidgetItemIterator, QVBoxLayout, QWidget)
from PyQt4.QtWebKit import QWebPage
from whoosh.index import create_in, open_dir
from whoosh.qparser import QueryParser, RegexPlugin
import mikidown.mikidown_rc
from .config import __appname__, __version__
from .mikibook import NotebookListDialog
from .mikitree import MikiTree, TocTree
from .mikiedit import MikiEdit
from .mikiview import MikiView
from .mikisearch import MikiSearch
from .attachment import AttachmentView
from .highlighter import MikiHighlighter
from .utils import LineEditDialog, ViewedNoteIcon, parseHeaders, parseTitle
import logging
from autologging import logged, traced, TracedMethods
_logger = logging.getLogger(__name__)
@logged
class MikiWindow(QMainWindow):
def __init__(self, settings, parent=None):
super(MikiWindow, self).__init__(parent)
self.setObjectName("mikiWindow")
self.settings = settings
self.notePath = settings.notePath
################ Setup core components ################
self.notesTree = MikiTree(self)
self.notesTree.setObjectName("notesTree")
self.initTree(self.notePath, self.notesTree)
self.notesTree.sortItems(0, Qt.AscendingOrder)
self.ix = None
self.setupWhoosh()
self.viewedList = QToolBar(self.tr('Recently Viewed'), self)
self.viewedList.setIconSize(QSize(16, 16))
self.viewedList.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
self.viewedListActions = []
self.noteSplitter = QSplitter(Qt.Horizontal)
self.dockIndex = QDockWidget("Index")
self.dockSearch = QDockWidget("Search")
self.searchEdit = QLineEdit()
self.searchView = MikiSearch(self)
self.searchTab = QWidget()
self.dockToc = QDockWidget("TOC")
self.tocTree = TocTree()
self.dockAttachment = QDockWidget("Attachment")
self.attachmentView = AttachmentView(self)
self.notesEdit = MikiEdit(self)
self.notesEdit.setObjectName("notesEdit")
MikiHighlighter(self.notesEdit)
self.notesView = MikiView(self)
self.findBar = QToolBar(self.tr('Find'), self)
self.findBar.setFixedHeight(30)
self.findEdit = QLineEdit(self.findBar)
self.checkBox = QCheckBox(self.tr('Match case'), self.findBar)
self.statusBar = QStatusBar(self)
self.statusLabel = QLabel(self)
self.altPressed = False
################ Setup actions ################
self.actions = dict()
self.setupActions()
################ Setup mainwindow ################
self.setupMainWindow()
# show changelogs after upgrade mikidown
if self.settings.version < __version__:
self.changelogHelp()
self.settings.qsettings.setValue("version", __version__)
def setupActions(self):
# Global Actions
actTabIndex = self.act(self.tr('Switch to Index Tab'),
lambda: self.raiseDock(self.dockIndex), 'Ctrl+Shift+I')
actTabSearch = self.act(self.tr('Switch to Search Tab'),
lambda: self.raiseDock(self.dockSearch), 'Ctrl+Shift+F')
self.addAction(actTabIndex)
self.addAction(actTabSearch)
################ Menu Actions ################
# actions in menuFile
actionNewPage = self.act(self.tr('&New Page...'),
self.notesTree.newPage, QKeySequence.New)
self.actions.update(newPage=actionNewPage)
actionNewSubpage = self.act(self.tr('New Sub&page...'),
self.notesTree.newSubpage, 'Ctrl+Shift+N')
self.actions.update(newSubpage=actionNewSubpage)
actionImportPage = self.act(self.tr('&Import Page...'), self.importPage)
self.actions.update(importPage=actionImportPage)
actionOpenNotebook = self.act(self.tr('&Open Notebook...'),
self.openNotebook, QKeySequence.Open)
self.actions.update(openNotebook=actionOpenNotebook)
actionReIndex = self.act(self.tr('Re-index'), self.reIndex)
self.actions.update(reIndex=actionReIndex)
actionSave = self.act(self.tr('&Save'),
self.saveCurrentNote, QKeySequence.Save)
actionSave.setEnabled(False)
self.actions.update(save=actionSave)
actionSaveAs = self.act(self.tr('Save &As...'),
self.saveNoteAs, QKeySequence.SaveAs)
self.actions.update(saveAs=actionSaveAs)
actionHtml = self.act(self.tr('to &HTML'), self.notesEdit.saveAsHtml)
self.actions.update(html=actionHtml)
actionPrint = self.act(self.tr('&Print'),
self.printNote, QKeySequence.Print)
self.actions.update(print_=actionPrint)
actionRenamePage = self.act(self.tr('&Rename Page...'),
self.notesTree.renamePage, 'F2')
self.actions.update(renamePage=actionRenamePage)
actionDelPage = self.act(self.tr('&Delete Page'),
self.notesTree.delPageWrapper, QKeySequence.Delete)
self.actions.update(delPage=actionDelPage)
actionQuit = self.act(self.tr('&Quit'), self.close, QKeySequence.Quit)
actionQuit.setMenuRole(QAction.QuitRole)
self.actions.update(quit=actionQuit)
# actions in menuEdit
actionUndo = self.act(self.tr('&Undo'),
lambda: self.notesEdit.undo(), QKeySequence.Undo)
actionUndo.setEnabled(False)
self.notesEdit.undoAvailable.connect(actionUndo.setEnabled)
self.actions.update(undo=actionUndo)
actionRedo = self.act(self.tr('&Redo'),
lambda: self.notesEdit.redo(), QKeySequence.Redo)
actionRedo.setEnabled(False)
self.notesEdit.redoAvailable.connect(actionRedo.setEnabled)
self.actions.update(redo=actionRedo)
actionFindText = self.act(self.tr('&Find Text'),
self.findBar.setVisible, QKeySequence.Find, True)
self.actions.update(findText=actionFindText)
actionFind = self.act(self.tr('Next'),
self.findText, QKeySequence.FindNext)
self.actions.update(find=actionFind)
actionFindPrev = self.act(self.tr('Previous'),
lambda: self.findText(back=True), QKeySequence.FindPrevious)
self.actions.update(findPrev=actionFindPrev)
actionSortLines = self.act(self.tr('&Sort Lines'), self.sortLines)
self.actions.update(sortLines=actionSortLines)
actionInsertImage = self.act(self.tr('&Insert Attachment'),
self.notesEdit.insertAttachmentWrapper, 'Ctrl+I')
actionInsertImage.setEnabled(False)
self.actions.update(insertImage=actionInsertImage)
# actions in menuView
actionEdit = self.act(self.tr('Edit'), self.edit, 'Ctrl+E',
True, QIcon(':/icons/edit.svg'), 'Edit mode (Ctrl+E)')
self.actions.update(edit=actionEdit)
actionSplit = self.act(self.tr('Split'), self.liveView, 'Ctrl+R',
True, QIcon(':/icons/split.svg'), 'Split mode (Ctrl+R)')
self.actions.update(split=actionSplit)
actionFlipEditAndView = self.act(self.tr('Flip Edit and View'),
self.flipEditAndView)
actionFlipEditAndView.setEnabled(False)
self.actions.update(flipEditAndView=actionFlipEditAndView)
#actionLeftAndRight = self.act(
# self.tr('Split into Left and Right'), trig=self.leftAndRight)
#actionUpAndDown = self.act(
# self.tr('Split into Up and Down'), trig=self.upAndDown)
# self.actionLeftAndRight.setEnabled(False)
# self.actionUpAndDown.setEnabled(False)
# actions in menuHelp
actionReadme = self.act(self.tr('README'), self.readmeHelp)
self.actions.update(readme=actionReadme)
actionChangelog = self.act(self.tr('Changelog'), self.changelogHelp)
self.actions.update(changelog=actionChangelog)
actionAboutQt = self.act(self.tr('About Qt'), qApp.aboutQt)
self.actions.update(aboutQt=actionAboutQt)
def setupMainWindow(self):
self.resize(800, 600)
screen = QDesktopWidget().screenGeometry()
size = self.geometry()
self.move((
screen.width()-size.width())/2, (screen.height()-size.height())/2)
self.setWindowTitle(
'{} - {}'.format(self.settings.notebookName, __appname__))
self.viewedList.setFixedHeight(25)
self.noteSplitter.addWidget(self.notesEdit)
self.noteSplitter.addWidget(self.notesView)
mainSplitter = QSplitter(Qt.Vertical)
mainSplitter.setChildrenCollapsible(False)
mainSplitter.addWidget(self.viewedList)
mainSplitter.addWidget(self.noteSplitter)
mainSplitter.addWidget(self.findBar)
self.setCentralWidget(mainSplitter)
self.searchEdit.returnPressed.connect(self.searchNote)
searchLayout = QVBoxLayout()
searchLayout.addWidget(self.searchEdit)
searchLayout.addWidget(self.searchView)
self.searchTab.setLayout(searchLayout)
self.tocTree.header().close()
self.dockIndex.setObjectName("Index")
self.dockIndex.setWidget(self.notesTree)
self.dockSearch.setObjectName("Search")
self.dockSearch.setWidget(self.searchTab)
self.dockToc.setObjectName("TOC")
self.dockToc.setWidget(self.tocTree)
self.dockAttachment.setObjectName("Attachment")
self.dockAttachment.setWidget(self.attachmentView)
self.setDockOptions(QMainWindow.VerticalTabs)
self.addDockWidget(Qt.LeftDockWidgetArea, self.dockIndex)
self.addDockWidget(Qt.LeftDockWidgetArea, self.dockSearch)
self.addDockWidget(Qt.LeftDockWidgetArea, self.dockToc)
self.addDockWidget(Qt.LeftDockWidgetArea, self.dockAttachment)
self.tabifyDockWidget(self.dockIndex, self.dockSearch)
self.tabifyDockWidget(self.dockSearch, self.dockToc)
self.tabifyDockWidget(self.dockToc, self.dockAttachment)
self.setTabPosition(Qt.LeftDockWidgetArea, QTabWidget.North)
self.dockIndex.raise_() # Put dockIndex on top of the tab stack
menuBar = QMenuBar(self)
self.setMenuBar(menuBar)
menuFile = menuBar.addMenu(self.tr('&File'))
menuEdit = menuBar.addMenu(self.tr('&Edit'))
menuView = menuBar.addMenu(self.tr('&View'))
menuHelp = menuBar.addMenu(self.tr('&Help'))
# menuFile
menuFile.addAction(self.actions['newPage'])
menuFile.addAction(self.actions['newSubpage'])
menuFile.addAction(self.actions['importPage'])
menuFile.addAction(self.actions['openNotebook'])
menuFile.addAction(self.actions['reIndex'])
menuFile.addSeparator()
menuFile.addAction(self.actions['save'])
menuFile.addAction(self.actions['saveAs'])
menuFile.addAction(self.actions['print_'])
menuExport = menuFile.addMenu(self.tr('&Export'))
menuExport.addAction(self.actions['html'])
menuFile.addSeparator()
menuFile.addAction(self.actions['renamePage'])
menuFile.addAction(self.actions['delPage'])
menuFile.addSeparator()
menuFile.addAction(self.actions['quit'])
# menuEdit
menuEdit.addAction(self.actions['undo'])
menuEdit.addAction(self.actions['redo'])
menuEdit.addAction(self.actions['findText'])
menuEdit.addSeparator()
menuEdit.addAction(self.actions['sortLines'])
menuEdit.addAction(self.actions['insertImage'])
# menuView
menuView.addAction(self.actions['edit'])
menuView.addAction(self.actions['split'])
menuView.addAction(self.actions['flipEditAndView'])
menuShowHide = menuView.addMenu(self.tr('Show/Hide'))
menuShowHide.addAction(self.dockIndex.toggleViewAction())
menuShowHide.addAction(self.dockSearch.toggleViewAction())
menuShowHide.addAction(self.dockToc.toggleViewAction())
menuShowHide.addAction(self.dockAttachment.toggleViewAction())
#menuMode = menuView.addMenu(self.tr('Mode'))
#menuMode.addAction(self.actionLeftAndRight)
#menuMode.addAction(self.actionUpAndDown)
# menuHelp
menuHelp.addAction(self.actions['readme'])
menuHelp.addAction(self.actions['changelog'])
menuHelp.addAction(self.actions['aboutQt'])
toolBar = QToolBar(self.tr("toolbar"), self)
toolBar.setObjectName("toolbar") # needed in saveState()
toolBar.setIconSize(QSize(16, 16))
toolBar.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
self.addToolBar(Qt.TopToolBarArea, toolBar)
toolBar.addAction(self.actions['edit'])
toolBar.addAction(self.actions['split'])
self.findEdit.returnPressed.connect(self.findText)
self.findBar.addWidget(self.findEdit)
self.findBar.addWidget(self.checkBox)
self.findBar.addAction(self.actions['findPrev'])
self.findBar.addAction(self.actions['find'])
self.findBar.setVisible(False)
self.findBar.visibilityChanged.connect(self.findBarVisibilityChanged)
self.setStatusBar(self.statusBar)
self.statusBar.addWidget(self.statusLabel, 1)
self.notesTree.currentItemChanged.connect(
self.currentItemChangedWrapper)
self.tocTree.itemClicked.connect(self.tocNavigate)
self.notesEdit.textChanged.connect(self.noteEditted)
self.notesEdit.document(
).modificationChanged.connect(self.modificationChanged)
self.updateRecentViewedNotes()
notes = self.settings.recentViewedNotes()
if len(notes) != 0:
item = self.notesTree.pageToItem(notes[0])
self.notesTree.setCurrentItem(item)
def setupWhoosh(self):
# Initialize whoosh index, make sure notePath/.indexdir exists
indexdir = self.settings.indexdir
try:
self.ix = open_dir(indexdir)
except:
QDir().mkpath(indexdir)
self.ix = create_in(indexdir, self.settings.schema)
# Fork a process to update index, which benefit responsiveness.
p = Process(target=self.whoosh_index, args=())
p.start()
def restore(self):
""" Restore saved geometry and state.
Set the status of side panels in View Menu correspondently.
"""
if self.settings.geometry:
self.restoreGeometry(self.settings.geometry)
if self.settings.windowstate:
self.restoreState(self.settings.windowstate)
def initTree(self, notePath, parent):
''' When there exist foo.md, foo.mkd, foo.markdown,
only one item will be shown in notesTree.
'''
if not QDir(notePath).exists():
return
notebookDir = QDir(notePath)
notesList = notebookDir.entryInfoList(['*.md', '*.mkd', '*.markdown'],
QDir.NoFilter,
QDir.Name|QDir.IgnoreCase)
nl = [note.completeBaseName() for note in notesList]
noduplicate = list(set(nl))
for name in noduplicate:
item = QTreeWidgetItem(parent, [name])
path = notePath + '/' + name
self.initTree(path, item)
def updateToc(self):
''' TOC is updated in `updateView`
tocTree fields: [hdrText, hdrPosition, hdrAnchor]
'''
root = self.notesTree.currentPage()
self.tocTree.clear()
item = QTreeWidgetItem(self.tocTree, [root, '0'])
curLevel = 0
for (level, h, p, a) in parseHeaders(self.notesEdit.toPlainText()):
val = [h, str(p), a]
if level == curLevel:
item = QTreeWidgetItem(item.parent(), val)
elif level < curLevel:
item = QTreeWidgetItem(item.parent().parent(), val)
curLevel = level
else:
item = QTreeWidgetItem(item, val)
curLevel = level
self.tocTree.expandAll()
def updateAttachmentView(self):
# Update attachmentView to show corresponding attachments.
item = self.notesTree.currentItem()
attachmentdir = self.notesTree.itemToAttachmentDir(item)
self.__logger.debug("UpdateAttatchementView : %s", attachmentdir)
index = self.attachmentView.model.index(attachmentdir)
self.attachmentView.setRootIndex(index)
def openFile(self, filename):
fh = QFile(filename)
try:
if not fh.open(QIODevice.ReadOnly):
raise IOError(fh.errorString())
except IOError as e:
QMessageBox.warning(self, 'Read Error',
'Failed to open %s: %s' % (filename, e))
finally:
if fh is not None:
noteBody = QTextStream(fh).readAll()
fh.close()
self.notesEdit.setPlainText(noteBody)
self.notesView.scrollPosition = QPoint(0, 0)
# self.actionSave.setEnabled(False)
self.notesEdit.document().setModified(False)
self.notesView.updateView()
self.setCurrentNote()
self.updateRecentViewedNotes()
#self.statusLabel.setText(noteFullName)
def currentItemChangedWrapper(self, current, previous):
if current is None:
return
#if previous != None and self.notesTree.pageExists(previous):
prev = self.notesTree.itemToPage(previous)
if self.notesTree.pageExists(prev):
self.saveNote(previous)
currentFile = self.notesTree.itemToFile(current)
self.openFile(currentFile)
# Update attachmentView to show corresponding attachments.
attachmentdir = self.notesTree.itemToAttachmentDir(current)
self.attachmentView.model.setRootPath(attachmentdir)
self.__logger.debug("currentItemChangedWrapper: %s", attachmentdir)
index = self.attachmentView.model.index(attachmentdir)
if index.row() == -1:
index = self.attachmentView.model.index(self.settings.attachmentPath)
self.attachmentView.model.setFilter(QDir.Files)
self.attachmentView.setRootIndex(index)
def tocNavigate(self, current):
''' works for notesEdit now '''
if current is None:
return
pos = int(current.text(1))
link = "file://" + self.notePath + "/#" + current.text(2)
# Move cursor to END first will ensure
# header is positioned at the top of visual area.
self.notesEdit.moveCursor(QTextCursor.End)
cur = self.notesEdit.textCursor()
cur.setPosition(pos, QTextCursor.MoveAnchor)
self.notesEdit.setTextCursor(cur)
self.notesView.load(QUrl(link))
def switchNote(self, num):
if num < len(self.viewedListActions):
self.viewedListActions[num].trigger()
def saveCurrentNote(self):
item = self.notesTree.currentItem()
self.saveNote(item)
def saveNote(self, item):
if self.notesEdit.document().isModified():
self.notesEdit.document().setModified(False)
else:
return
self.notesEdit.save(item)
def saveNoteAs(self):
self.saveCurrentNote()
fileName = QFileDialog.getSaveFileName(self, self.tr('Save as'), '',
'(*.md *.mkd *.markdown);;'+self.tr('All files(*)'))
if fileName == '':
return
if not QFileInfo(fileName).suffix():
fileName += '.md'
fh = QFile(fileName)
fh.open(QIODevice.WriteOnly)
savestream = QTextStream(fh)
savestream << self.notesEdit.toPlainText()
fh.close()
def printNote(self):
printer = QPrinter(QPrinter.HighResolution)
printer.setCreator(__appname__ + ' ' + __version__)
printer.setDocName(self.notesTree.currentItem().text(0))
printdialog = QPrintDialog(printer, self)
# if printdialog.exec() == QDialog.Accepted:
# self.notesView.print_(printer)
def noteEditted(self):
""" Continuously get fired while editing"""
self.updateToc()
self.notesView.updateLiveView()
def modificationChanged(self, changed):
""" Fired one time: modified or not """
self.actions['save'].setEnabled(changed)
name = self.notesTree.currentPage()
self.statusBar.clearMessage()
if changed:
self.statusLabel.setText(name + '*')
else:
self.statusLabel.setText(name)
def importPage(self):
filename = QFileDialog.getOpenFileName(
self, self.tr('Import file'), '',
'(*.md *.mkd *.markdown *.txt);;'+self.tr('All files(*)'))
if filename == '':
return
self.importPageCore(filename)
def importPageCore(self, filename):
fh = QFile(filename)
fh.open(QIODevice.ReadOnly)
fileBody = QTextStream(fh).readAll()
fh.close()
page = QFileInfo(filename).completeBaseName()
fh = QFile(self.notesTree.pageToFile(page))
if fh.exists():
QMessageBox.warning(self, 'Import Error',
'Page already exists: %s' % page)
dialog = LineEditDialog(self.notePath, self)
if dialog.exec_():
page = dialog.editor.text()
fh.close()
fh = QFile(self.notesTree.pageToFile(page))
else:
return
fh.open(QIODevice.WriteOnly)
savestream = QTextStream(fh)
savestream << fileBody
fh.close()
item = QTreeWidgetItem(self.notesTree, [page])
self.notesTree.sortItems(0, Qt.AscendingOrder)
self.notesTree.setCurrentItem(item)
def openNotebook(self):
dialog = NotebookListDialog(self)
if dialog.exec_():
pass
def reIndex(self):
""" Whoosh index breaks for unknown reasons (sometimes) """
shutil.rmtree(self.settings.indexdir)
self.setupWhoosh()
def act(self, name, trig, shortcut=None, checkable=False,
icon=None, tooltip=None):
""" A wrapper to several QAction methods """
if icon:
action = QAction(icon, name, self)
else:
action = QAction(name, self)
if shortcut:
action.setShortcut(QKeySequence(shortcut))
action.setCheckable(checkable)
if tooltip:
action.setToolTip(tooltip)
action.triggered.connect(trig)
return action
def edit(self, viewmode):
""" Switch between EDIT and VIEW mode. """
if self.actions['split'].isChecked():
self.actions['split'].setChecked(False)
self.notesView.setVisible(not viewmode)
self.notesEdit.setVisible(viewmode)
# Gives the keyboard input focus to notesEdit/notesView.
# Without this, keyboard input may change note text even when
# notesEdit is invisible.
if viewmode:
self.notesEdit.setFocus()
else:
self.notesView.setFocus()
self.saveCurrentNote()
self.actions['insertImage'].setEnabled(viewmode)
#self.actionLeftAndRight.setEnabled(True)
#self.actionUpAndDown.setEnabled(True)
# Render the note text as it is.
self.notesView.updateView()
def liveView(self, viewmode):
""" Switch between VIEW and LIVE VIEW mode. """
self.actions['split'].setChecked(viewmode)
sizes = self.noteSplitter.sizes()
if self.actions['edit'].isChecked():
self.actions['edit'].setChecked(False)
self.notesView.setVisible(viewmode)
splitSize = [sizes[0]*0.45, sizes[0]*0.55]
else:
self.notesEdit.setVisible(viewmode)
splitSize = [sizes[1]*0.45, sizes[1]*0.55]
# setFocus for the same reason as in edit(self, viewmode)
if viewmode:
self.notesEdit.setFocus()
else:
self.notesView.setFocus()
self.actions['flipEditAndView'].setEnabled(viewmode)
#self.actionUpAndDown.setEnabled(viewmode)
self.actions['insertImage'].setEnabled(viewmode)
self.noteSplitter.setSizes(splitSize)
self.saveCurrentNote()
# Render the note text as it is.
self.notesView.updateView()
def findBarVisibilityChanged(self, visible):
self.actions['findText'].setChecked(visible)
if visible:
self.findEdit.setFocus(Qt.ShortcutFocusReason)
def findText(self, back=False):
flags = 0
if back:
flags = QTextDocument.FindBackward
if self.checkBox.isChecked():
flags = flags | QTextDocument.FindCaseSensitively
text = self.findEdit.text()
if not self.findMain(text, flags):
if text in self.notesEdit.toPlainText():
cursor = self.notesEdit.textCursor()
if back:
cursor.movePosition(QTextCursor.End)
else:
cursor.movePosition(QTextCursor.Start)
self.notesEdit.setTextCursor(cursor)
self.findMain(text, flags)
# self.notesView.findText(text, flags)
def findMain(self, text, flags):
viewFlags = QWebPage.FindFlags(
flags) | QWebPage.FindWrapsAroundDocument
if flags:
self.notesView.findText(text, viewFlags)
return self.notesEdit.find(text, flags)
else:
self.notesView.findText(text)
return self.notesEdit.find(text)
def sortLines(self):
''' sort selected lines
Currently, have to select whole lines. (ToFix)
TODO: second sort reverse the order
'''
cursor = self.notesEdit.textCursor()
text = cursor.selectedText()
lines = text.split('\u2029') # '\u2029' is the line break
sortedLines = sorted(lines)
self.notesEdit.insertPlainText('\n'.join(sortedLines))
def notesEditInFocus(self, e):
if e.gotFocus:
self.actions['insertImage'].setEnabled(True)
# if e.lostFocus:
# self.actionInsertImage.setEnabled(False)
# QWidget.focusInEvent(self,f)
def searchNote(self):
""" Sorting criteria: "title > path > content"
Search matches are organized into html source.
"""
pattern = self.searchEdit.text()
if not pattern:
return
results = []
with self.ix.searcher() as searcher:
matches = []
for f in ["title", "path", "content"]:
queryp = QueryParser(f, self.ix.schema)
queryp.add_plugin(RegexPlugin())
# r"pattern" is the desired regex term format
query = queryp.parse('r"' + pattern + '"')
ms = searcher.search(query, limit=None) # default limit is 10!
for m in ms:
if not m in matches:
matches.append(m)
for r in matches:
title = r['title']
path = r['path']
term = r.highlights("content")
results.append([title, path, term])
html = """
<style>
body { font-size: 14px; }
.path { font-size: 12px; color: #009933; }
</style>
"""
for title, path, hi in results:
html += ("<p><a href='" + path + "'>" + title +
"</a><br/><span class='path'>" +
path + "</span><br/>" + hi + "</p>")
self.searchView.setHtml(html)
def whoosh_index(self):
it = QTreeWidgetItemIterator(
self.notesTree, QTreeWidgetItemIterator.All)
writer = self.ix.writer()
while it.value():
treeItem = it.value()
name = self.notesTree.itemToPage(treeItem)
path = os.path.join(self.notesTree.pageToFile(name))
print(path)
fileobj = open(path, 'r')
content = fileobj.read()
fileobj.close()
writer.add_document(
path=name, title=parseTitle(content, name), content=content)
it += 1
writer.commit()
def listItemChanged(self, row):
if row != -1:
item = self.searchList.currentItem().data(Qt.UserRole)
self.notesTree.setCurrentItem(item)
flags = QWebPage.HighlightAllOccurrences
self.notesView.findText(self.searchEdit.text(), flags)
def setCurrentNote(self):
item = self.notesTree.currentItem()
name = self.notesTree.itemToPage(item)
# Current note is inserted to head of list.
notes = self.settings.recentViewedNotes()
for f in notes:
if f == name:
notes.remove(f)
notes.insert(0, name)
# TODO: move this NUM to configuration
if len(notes) > 20:
del notes[20:]
self.settings.updateRecentViewedNotes(notes)
def updateRecentViewedNotes(self):
""" Switching notes will trigger this.
When Alt pressed, show note number.
"""
self.viewedList.clear()
self.viewedListActions = []
# Check notes exists.
viewedNotes = self.settings.recentViewedNotes()
existedNotes = []
i = 0
for f in viewedNotes:
if self.notesTree.pageExists(f):
existedNotes.append(f)
names = f.split('/')
if self.altPressed and i in range(1, 10):
action = self.act(names[-1], self.openFunction(f),
'Alt+'+str(i), True, ViewedNoteIcon(i), 'Alt+'+str(i))
else:
action = self.act(names[-1], self.openFunction(f),
None, True)
self.viewedListActions.append(action)
i += 1
if not self.altPressed:
self.settings.updateRecentViewedNotes(existedNotes)
for action in self.viewedListActions:
self.viewedList.addAction(action)
if len(self.viewedListActions):
self.viewedListActions[0].setChecked(True)
def openFunction(self, name):
item = self.notesTree.pageToItem(name)
return lambda: self.notesTree.setCurrentItem(item)
def raiseDock(self, widget):
if not widget.isVisible():
widget.show()
if widget == self.dockSearch:
self.searchEdit.setFocus()
widget.raise_()
def flipEditAndView(self):
index = self.noteSplitter.indexOf(self.notesEdit)
if index == 0:
self.noteSplitter.insertWidget(1, self.notesEdit)
else:
self.noteSplitter.insertWidget(0, self.notesEdit)
def leftAndRight(self):
self.liveView(True)
self.noteSplitter.setOrientation(Qt.Horizontal)
#self.actionLeftAndRight.setEnabled(False)
#self.actionUpAndDown.setEnabled(True)
def upAndDown(self):
self.liveView(True)
self.noteSplitter.setOrientation(Qt.Vertical)
#self.actionUpAndDown.setEnabled(False)
#self.actionLeftAndRight.setEnabled(True)
def readmeHelp(self):
readmeFile = '/usr/share/mikidown/README.mkd'
if not os.path.exists(readmeFile):
readmeFile = os.path.join(
os.path.dirname(os.path.dirname(__file__)), 'README.mkd')
self.importPageCore(readmeFile)
def changelogHelp(self):
changeLog = "/usr/share/mikidown/Changelog.md"
if not os.path.exists(changeLog):
changeLog = os.path.join(
os.path.dirname(os.path.dirname(__file__)), 'Changelog.md')
self.importPageCore(changeLog)
def keyPressEvent(self, event):
""" When Alt pressed, note number will be shown in viewedList. """
if event.key() == Qt.Key_Alt:
self.altPressed = True
self.updateRecentViewedNotes()
else:
QMainWindow.keyPressEvent(self, event)
def keyReleaseEvent(self, event):
if event.key() == Qt.Key_Alt:
self.altPressed = False
self.updateRecentViewedNotes()
else:
QMainWindow.keyPressEvent(self, event)
def closeEvent(self, event):
"""
saveGeometry: Saves the current geometry and state for
top-level widgets
saveState: Restores the state of this mainwindow's toolbars
and dockwidgets
"""
self.saveCurrentNote()
self.settings.saveGeometry(self.saveGeometry())
self.settings.saveWindowState(self.saveState())
event.accept()
|
import os
import tempfile
import urllib.error
import zeit.cms.testing
import zope.app.appsetup.product
class HealthCheckTest(zeit.cms.testing.ZeitCmsBrowserTestCase):
check = 'http://localhost/++skin++vivi/@@health-check'
def setUp(self):
super().setUp()
self.browser = zeit.cms.testing.Browser(self.layer['wsgi_app'])
def test_should_normally_have_status_200(self):
b = self.browser
b.open(self.check)
self.assertEqual('200 Ok', b.headers['status'])
self.assertEqual('OK', b.contents)
def test_should_fail_if_stopfile_exists(self):
handle, filename = tempfile.mkstemp()
os.close(handle)
os.unlink(filename)
config = zope.app.appsetup.product.getProductConfiguration('zeit.cms')
config['stopfile'] = filename
b = self.browser
with self.assertNothingRaised():
b.open(self.check)
open(filename, 'w').close()
with self.assertRaises(urllib.error.HTTPError) as info:
b.open(self.check)
self.assertEqual(500, info.exception.status)
self.assertEqual('fail: stopfile %s present' % filename)
|
default_app_config = "backend.reviews.apps.ReviewsConfig"
|
#!/usr/bin/python3
# author: Charlotte Bunne
# imports
import jax
import jax.numpy as jnp
import numpy as np
import optax
# internal imports
from jkonet.utils.helper import count_parameters
from jkonet.utils.optim import global_norm, penalize_weights_icnn
from jkonet.models import fixpoint_loop
from jkonet.models.loss import sinkhorn_loss
def get_step_fn(optimize_psi_fn, psi, optimizer_psi, teacher_forcing=True,
cumulative=False, parallel=False, epsilon=0.1,
loss='sinkhorn', train=True):
"""Create a one-step training and evaluation function of Energy."""
def loss_fn_energy(params_energy, rng_psi, batch, t):
# initialize psi model and optimizer
params_psi = psi.init(
rng_psi, jnp.ones(batch[t].shape[1]))['params']
opt_state_psi = optimizer_psi.init(params_psi)
# solve jko step
_, predicted, loss_psi = optimize_psi_fn(
params_energy, params_psi, opt_state_psi, batch[t])
# compute sinkhorn distance between prediction and data
if loss == 'sinkhorn':
loss_energy = sinkhorn_loss(predicted, batch[t + 1], epsilon,
div=True)
elif loss == 'wasserstein':
loss_energy = sinkhorn_loss(predicted, batch[t + 1], epsilon,
div=False)
else:
raise NotImplementedError
return loss_energy, (loss_psi, predicted)
def loss_fn_energy_cum(params_energy, rng_psi, batch):
# iterate through time steps
def _through_time(batch, t):
# initialize psi model and optimizer
params_psi = psi.init(
rng_psi, jnp.ones(batch[t].shape[1]))['params']
opt_state_psi = optimizer_psi.init(params_psi)
# solve jko step
_, predicted, loss_psi = optimize_psi_fn(
params_energy, params_psi, opt_state_psi, batch[t])
# compute sinkhorn distance between prediction and data
if loss == 'sinkhorn':
loss_energy = sinkhorn_loss(predicted, batch[t + 1], epsilon,
div=True)
elif loss == 'wasserstein':
loss_energy = sinkhorn_loss(predicted, batch[t + 1], epsilon,
div=False)
else:
raise NotImplementedError
# if no teacher-forcing, replace next overvation with predicted
batch = jax.lax.cond(
teacher_forcing, lambda x: x,
lambda x: jax.ops.index_update(x, t + 1, predicted), batch)
return batch, (loss_energy, loss_psi, predicted)
_, (loss_energy, loss_psi, predicted) = jax.lax.scan(
_through_time, batch, jnp.arange(len(batch) - 1))
return jnp.sum(loss_energy), (loss_energy, loss_psi, predicted)
@jax.jit
def step_fn_cum(inputs, batch):
"""Running one step of training or evaluation with cumulative loss."""
rng_psi, state_energy = inputs
# adjust dimensions
if parallel:
rng_psi = jnp.squeeze(rng_psi)
# define gradient function
grad_fn_energy = jax.value_and_grad(
loss_fn_energy_cum, argnums=0, has_aux=True)
if train:
# compute gradient
(loss_energy, (_, loss_psi, _)
), grad_energy = grad_fn_energy(
state_energy.params, rng_psi, batch)
if parallel:
grad_energy = jax.lax.pmean(grad_energy, axis_name="batch")
# apply gradient to energy optimizer
state_energy = state_energy.apply_gradients(grads=grad_energy)
# compute gradient norm
grad_norm = global_norm(grad_energy)
return (rng_psi, state_energy), (loss_energy, loss_psi, grad_norm)
else:
(loss_energy, (_, _, predicted)), _ = grad_fn_energy(
state_energy.params, rng_psi, batch)
return loss_energy, predicted
@jax.jit
def step_fn(inputs, batch):
"""Running one step of training or evaluation."""
rng_psi, state_energy = inputs
# adjust dimensions
if parallel:
rng_psi = jnp.squeeze(rng_psi)
# define gradient function
grad_fn_energy = jax.value_and_grad(
loss_fn_energy, argnums=0, has_aux=True)
if train:
# iterate through time steps
def _through_time(inputs, t):
state_energy, batch = inputs
# compute gradient
(loss_energy, (loss_psi, predicted)
), grad_energy = grad_fn_energy(state_energy.params,
rng_psi, batch, t)
if parallel:
grad_energy = jax.lax.pmean(grad_energy, axis_name="batch")
# apply gradient to energy optimizer
state_energy = state_energy.apply_gradients(grads=grad_energy)
# compute gradient norm
grad_norm = global_norm(grad_energy)
# if no teacher-forcing, replace next overvation with predicted
batch = jax.lax.cond(
teacher_forcing, lambda x: x,
lambda x: jax.ops.index_update(x, t + 1, predicted), batch)
return ((state_energy, batch),
(loss_energy, loss_psi, grad_norm))
# iterate through time steps
(state_energy, _), (
loss_energy, loss_psi, grad_norm) = jax.lax.scan(
_through_time, (state_energy, batch),
jnp.arange(len(batch) - 1))
loss_energy = jnp.sum(loss_energy)
return (rng_psi, state_energy), (loss_energy, loss_psi, grad_norm)
else:
# iterate through time steps
def _through_time(inputs, t):
state_energy, batch = inputs
(loss_energy, (loss_psi, predicted)), _ = grad_fn_energy(
state_energy.params, rng_psi, batch, t)
# if no teacher-forcing, replace next overvation with predicted
batch = jax.lax.cond(
teacher_forcing, lambda x: x,
lambda x: jax.ops.index_update(x, t + 1, predicted), batch)
return ((state_energy, batch),
(loss_energy, loss_psi, predicted))
# iterate through time steps
(_, _), (loss_energy, loss_psi, predicted) = jax.lax.scan(
_through_time, (state_energy, batch),
jnp.arange(len(batch) - 1))
loss_energy = jnp.sum(loss_energy)
# do not update state
return loss_energy, predicted
if cumulative:
return step_fn_cum
else:
return step_fn
def get_optimize_psi_fn(optimizer_psi, psi, energy, tau=1.0, n_iter=100,
min_iter=50, max_iter=200, inner_iter=10,
threshold=1e-5, beta=1.0, pos_weights=True, cvx_reg=.0,
fploop=False):
"""Create a training function of Psi."""
def loss_fn_psi(params_psi, params_energy, data):
grad_psi_data = jax.vmap(lambda x: jax.grad(
psi.apply, argnums=1)({'params': params_psi}, x))(data)
predicted = cvx_reg * data + grad_psi_data
# jko objective
loss_e = energy.apply(
{'params': params_energy}, predicted)
loss_p = jnp.sum(jnp.square(predicted - data))
loss = loss_e + 1 / tau * loss_p
# add penalty to negative icnn weights in relaxed setting
if not pos_weights:
penalty = penalize_weights_icnn(params_psi)
loss += beta * penalty
return loss, grad_psi_data
@jax.jit
def step_fn_fpl(params_energy, params_psi, opt_state_psi, data):
def cond_fn(iteration, constants, state):
"""Condition function for optimization of convex potential Psi.
"""
_, _ = constants
_, _, _, _, grad = state
norm = sum(jax.tree_util.tree_leaves(
jax.tree_map(jnp.linalg.norm, grad)))
norm /= count_parameters(grad)
return jnp.logical_or(iteration == 0,
jnp.logical_and(jnp.isfinite(norm),
norm > threshold))
def body_fn(iteration, constants, state, compute_error):
"""Body loop for gradient update of convex potential Psi.
"""
params_energy, data = constants
params_psi, opt_state_psi, loss_psi, predicted, _ = state
(loss_jko, predicted), grad_psi = jax.value_and_grad(
loss_fn_psi, argnums=0, has_aux=True)(
params_psi, params_energy, data)
# apply optimizer update
updates, opt_state_psi = optimizer_psi.update(
grad_psi, opt_state_psi)
params_psi = optax.apply_updates(params_psi, updates)
loss_psi = jax.ops.index_update(
loss_psi, jax.ops.index[iteration // inner_iter], loss_jko)
return params_psi, opt_state_psi, loss_psi, predicted, grad_psi
# create empty vectors for losses and predictions
loss_psi = jnp.full(
(np.ceil(max_iter / inner_iter).astype(int)), 0., dtype=float)
predicted = jnp.zeros_like(data, dtype=float)
# define states and constants
state = params_psi, opt_state_psi, loss_psi, predicted, params_psi
constants = params_energy, data
# iteratively _ psi
params_psi, _, loss_psi, predicted, _ = fixpoint_loop.fixpoint_iter(
cond_fn, body_fn, min_iter, max_iter, inner_iter, constants, state)
return params_psi, predicted, loss_psi
@jax.jit
def step_fn(params_energy, params_psi, opt_state_psi, data):
# iteratively optimize psi
def apply_psi_update(state_psi, i):
params_psi, opt_state_psi = state_psi
# compute gradient of jko step
(loss_psi, predicted), grad_psi = jax.value_and_grad(
loss_fn_psi, argnums=0, has_aux=True)(
params_psi, params_energy, data)
# apply optimizer update
updates, opt_state_psi = optimizer_psi.update(
grad_psi, opt_state_psi)
params_psi = optax.apply_updates(params_psi, updates)
return (params_psi, opt_state_psi), (loss_psi, predicted)
(params_psi, _), (loss_psi, predicted) = jax.lax.scan(
apply_psi_update, (params_psi, opt_state_psi), jnp.arange(n_iter))
return params_psi, predicted[-1], loss_psi
if fploop:
return step_fn_fpl
else:
return step_fn
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
from django.urls import reverse
# Create your models here.
class PublishedManager(models.Manager):
def get_queryset(self):
return super(PublishedManager,self) \
.get_queryset() \
.filter(status='published')
class Post(models.Model):
""" This Blog Post Model """
STATUS_CHOICES = (
('draft', 'Draft'),
('published', 'Published'),
)
title = models.CharField(max_length=250)
slug = models.SlugField(max_length=250,
unique_for_date='publish')
author = models.ForeignKey(User,
on_delete=models.CASCADE,
related_name='blog_posts')
body = models.TextField()
publish = models.DateTimeField(default= timezone.now)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
status = models.CharField(max_length=10,\
choices=STATUS_CHOICES,
default='draft')
objects = models.Manager() # the default manager like Post.objects.all()
published = PublishedManager() # customized manager
tags = TaggableManager()
class Meta:
ordering = ('-publish',) # last post goes first
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('blog:post_detail',
args=[self.publish.year,
self.publish.month,
self.publish.day, self.slug])
# Comment Model
class Comment(models.Model):
post = models.ForeignKey(Post,
on_delete=models.CASCADE,
related_name='comments')
name = models.CharField(max_length=80)
email = models.EmailField()
body = models.TextField()
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ('created',)
def __str__(self):
return f'Comment By {self.name} on {self.post}'
|
from typing import Dict, List, Optional, Tuple, Union
import numpy as np
import pandas as pd
import scipy.linalg
import scipy.sparse
import scipy.sparse.linalg
from sklearn.base import BaseEstimator
from sklearn.utils.validation import check_is_fitted
from datafold.dynfold.base import TransformType, TSCTransformerMixin
from datafold.pcfold import PCManifold, TSCDataFrame
from datafold.pcfold.eigsolver import compute_kernel_eigenpairs
from datafold.pcfold.kernels import GaussianKernel, PCManifoldKernel
from datafold.utils.general import mat_dot_diagmat
def sort_eigensystem(eigenvalues, eigenvectors):
idx = np.argsort(np.abs(eigenvalues))[::-1]
sorted_eigenvalues = eigenvalues[idx]
if isinstance(eigenvectors, pd.DataFrame):
sorted_eigenvectors = eigenvectors.iloc[:, idx]
else:
sorted_eigenvectors = eigenvectors[:, idx]
return sorted_eigenvalues, sorted_eigenvectors
class JsfDataset:
"""`JsfDataset` does the slicing of multimodal data. This is needed, as `.fit`,
`.transform`, and `.fit_transform` of `JointlySmoothFunctions` accept a single
data array `X`. Thus, the multimodal data is passed in as a single array and is
then separated inside the methods.
Parameters
----------
name
The name of the dataset.
columns
The columns that correspond to the dataset.
kernel
The (optional) kernel for the dataset.
result_scaling
The (optional) result scaling for the parameter optimization.
dist_kwargs
Keyword arguments passed to the internal distance matrix computation. See
:py:meth:`datafold.pcfold.distance.compute_distance_matrix` for parameter
arguments.
"""
def __init__(
self,
name: Optional[str] = None,
columns: Optional[slice] = None,
kernel: Optional[PCManifoldKernel] = None,
result_scaling: float = 1.0,
**dist_kwargs,
):
self.name = name
self.columns = columns
self.kernel = kernel
self.result_scaling = result_scaling
self.dist_kwargs = dist_kwargs
def extract_from(self, X: TransformType) -> Union[TSCDataFrame, PCManifold]:
if self.columns:
if isinstance(X, pd.DataFrame) or isinstance(X, TSCDataFrame):
data = X.iloc[:, self.columns]
else:
data = X[:, self.columns]
else:
data = X
if isinstance(data, TSCDataFrame):
if self.kernel is None:
self.kernel = GaussianKernel()
data = TSCDataFrame(data, kernel=self.kernel, dist_kwargs=self.dist_kwargs)
elif isinstance(data, (np.ndarray, pd.DataFrame)):
data = PCManifold(
data=data, kernel=self.kernel, dist_kwargs=self.dist_kwargs
)
if self.kernel is None:
data.optimize_parameters(
inplace=True, result_scaling=self.result_scaling
)
return data
class _ColumnSplitter:
"""Uses a `JsfDataset` list to split up a single data array X into a `PCManifold` list.
Parameters
----------
datasets
The `JsfDataset`s used to split up the array X.
"""
def __init__(self, datasets: Optional[List[JsfDataset]] = None):
self.datasets = datasets
def split(self, X: TransformType, y=None) -> List[Union[TSCDataFrame, PCManifold]]:
if not self.datasets:
dataset = JsfDataset()
return [dataset.extract_from(X)]
X_split: List[Union[TSCDataFrame, PCManifold]] = []
for dataset in self.datasets:
X_split.append(dataset.extract_from(X))
return X_split
class JointlySmoothFunctions(TSCTransformerMixin, BaseEstimator):
"""Calculate smooth functions on multimodal data/observations.
Parameters
----------
datasets
The :py:class:`JsfDataset`s used to split up the multimodal data.
n_kernel_eigenvectors
The number of eigenvectors to compute from the kernel matrices.
n_jointly_smooth_functions
The number of jointly smooth functions to compute from the eigenvectors of the
kernel matrices.
kernel_eigenvalue_cut_off
The kernel eigenvectors with a eigenvalue smaller than or equal to
``kernel_eigenvalue_cut_off`` will not be included in the calculation of the
jointly smooth functions.
eigenvector_tolerance
The relative accuracy for eigenvalues, i.e. the stopping criterion. A value of
0 implies machine precision.
Attributes
----------
observations_: List[PCManifold]
The :py:class:`PCManifolds` containing the separated observations with the
specified, corresponding :py:class:`PCManifoldKernel`.
kernel_matrices_: List[scipy.spars.csr_matrix]
The computed kernel matrices.
_cdist_kwargs_: List[Dict]
The cdist_kwargs returned during the kernel calculation. This is required for the
out-of-sample extension.
kernel_eigenvectors_: List[scipy.sparse.csr_matrix]
The kernel eigenvectors used to calculate the jointly smooth functions.
kernel_eigenvalues_ List[scipy.sparse.csr_matrix]
The kernel eigenvalues used to calculate the out-of-sample extension.
_jointly_smooth_functions_: np.ndarray
The calculated jointly smooth functions of shape
`(n_samples, n_jointly_smooth_functions)`.
_eigenvalues_: np.ndarray
The eigenvalues of the jointly smooth functions of shape `(n_samples)`
References
----------
:cite:`TODO enter paper reference`
"""
def __init__(
self,
datasets: Optional[List[JsfDataset]] = None,
n_kernel_eigenvectors: int = 100,
n_jointly_smooth_functions: int = 10,
kernel_eigenvalue_cut_off: float = 0,
eigenvector_tolerance: float = 1e-6,
) -> None:
self.n_kernel_eigenvectors = n_kernel_eigenvectors
self.n_jointly_smooth_functions = n_jointly_smooth_functions
self.datasets = datasets
self.kernel_eigenvalue_cut_off = kernel_eigenvalue_cut_off
self.eigenvector_tolerance = eigenvector_tolerance
self.ending_points_: List[int]
self.observations_: List[Union[TSCDataFrame, PCManifold]]
self.kernel_matrices_: List[scipy.sparse.csr_matrix]
self._cdist_kwargs_: List[Dict]
self.kernel_eigenvectors_: List[scipy.sparse.csr_matrix]
self.kernel_eigenvalues_: List[scipy.sparse.csr_matrix]
self._jointly_smooth_functions_: np.ndarray
self._eigenvalues_: np.ndarray
@property
def jointly_smooth_functions(self) -> TransformType:
return self._jointly_smooth_functions_
@property
def eigenvalues(self) -> np.ndarray:
return self._eigenvalues_
def _calculate_kernel_matrices(self):
self._cdist_kwargs_ = []
self.kernel_matrices_ = []
for observation in self.observations_:
kernel_output = observation.compute_kernel_matrix()
kernel_matrix, cdist_kwargs, _ = PCManifoldKernel.read_kernel_output(
kernel_output
)
self._cdist_kwargs_.append(cdist_kwargs)
sparse_kernel_matrix = scipy.sparse.csr_matrix(
kernel_matrix, dtype=np.float64
)
self.kernel_matrices_.append(sparse_kernel_matrix)
def _calculate_kernel_eigensystem(self):
self.kernel_eigenvectors_ = []
self.kernel_eigenvalues_ = []
for i, kernel_matrix in enumerate(self.kernel_matrices_):
is_symmetric = np.alltrue(kernel_matrix.A == kernel_matrix.T.A)
ones_row = np.ones(kernel_matrix.shape[0])
ones_col = np.ones(kernel_matrix.shape[1])
is_stochastic = np.alltrue(kernel_matrix @ ones_col == ones_row)
kernel_eigenvalues, kernel_eigenvectors = compute_kernel_eigenpairs(
kernel_matrix,
n_eigenpairs=self.n_kernel_eigenvectors,
is_symmetric=is_symmetric,
is_stochastic=is_stochastic,
)
if isinstance(kernel_matrix, TSCDataFrame):
index_from = kernel_matrix
elif (
isinstance(self.observations_[i], TSCDataFrame)
and kernel_matrix.shape[0] == self.observations_[i].shape[0]
):
index_from = self.observations_[i]
else:
index_from = None
if index_from is not None:
kernel_eigenvectors = TSCDataFrame.from_same_indices_as(
index_from,
kernel_eigenvectors,
except_columns=[
f"kev{i}" for i in range(self.n_kernel_eigenvectors)
],
)
kernel_eigenvalues, kernel_eigenvectors = sort_eigensystem(
kernel_eigenvalues, kernel_eigenvectors
)
if isinstance(kernel_eigenvectors, TSCDataFrame):
kernel_eigenvectors = kernel_eigenvectors.iloc[
:, kernel_eigenvalues > self.kernel_eigenvalue_cut_off
]
else:
kernel_eigenvectors = kernel_eigenvectors[
:, kernel_eigenvalues > self.kernel_eigenvalue_cut_off
]
kernel_eigenvalues = kernel_eigenvalues[
kernel_eigenvalues > self.kernel_eigenvalue_cut_off
]
self.kernel_eigenvectors_.append(kernel_eigenvectors)
self.kernel_eigenvalues_.append(kernel_eigenvalues)
def _calculate_jointly_smooth_functions(self) -> Tuple[np.ndarray, np.ndarray]:
eigenvectors_matrix = scipy.sparse.csr_matrix(
np.column_stack([eigenvector for eigenvector in self.kernel_eigenvectors_])
)
tsc_flag = isinstance(self.kernel_eigenvectors_[0], TSCDataFrame)
if tsc_flag:
index_from = self.kernel_eigenvectors_[0]
else:
index_from = None
rng = np.random.default_rng(seed=1)
if len(self.kernel_eigenvectors_) == 2:
ev0 = self.kernel_eigenvectors_[0]
ev1 = self.kernel_eigenvectors_[1]
n_jointly_smooth_functions = min(
[self.n_jointly_smooth_functions, ev0.shape[1] - 1, ev1.shape[1] - 1]
)
if tsc_flag:
evs = ev0.to_numpy().T @ ev1.to_numpy()
else:
evs = ev0.T @ ev1
min_ev_shape = min(evs.shape)
v0 = rng.normal(loc=0, scale=1 / min_ev_shape, size=min_ev_shape)
Q, eigenvalues, R_t = scipy.sparse.linalg.svds(
evs,
k=n_jointly_smooth_functions,
which="LM",
tol=self.eigenvector_tolerance,
v0=v0,
)
center = np.row_stack(
[np.column_stack([Q, Q]), np.column_stack([R_t.T, -R_t.T])]
)
right = np.diag(
np.power(np.concatenate([1 + eigenvalues, 1 - eigenvalues]), -1 / 2)
)
jointly_smooth_functions = (
1 / np.sqrt(2) * eigenvectors_matrix @ center @ right
)[:, :n_jointly_smooth_functions]
else:
n_jointly_smooth_functions = min(
[self.n_jointly_smooth_functions, eigenvectors_matrix.shape[1]]
)
min_ev_shape = min(eigenvectors_matrix.shape)
v0 = rng.normal(loc=0, scale=1 / min_ev_shape, size=min_ev_shape)
jointly_smooth_functions, eigenvalues, _ = scipy.sparse.linalg.svds(
eigenvectors_matrix,
k=n_jointly_smooth_functions,
which="LM",
tol=self.eigenvector_tolerance,
v0=v0,
)
if index_from is not None:
jointly_smooth_functions = TSCDataFrame.from_same_indices_as(
index_from,
jointly_smooth_functions,
except_columns=[f"jsf{i}" for i in range(n_jointly_smooth_functions)],
)
eigenvalues, jointly_smooth_functions = sort_eigensystem(
eigenvalues, jointly_smooth_functions
)
return jointly_smooth_functions, eigenvalues
def nystrom(self, new_indexed_observations: Dict[int, TransformType]):
"""Embed out-of-sample points with Nyström.
(see transform of dmap for Nyström documentation)
Parameters
----------
new_indexed_observations: Dict[int, List[Union[TSCDataFrame, pandas.DataFrame, numpy.ndarray]]
A dict containing out-of-sample points for (not necessarily all) observations.
The keys are the indexes of the observations. The values are the observations
of shape `(n_samples, *n_features_of_observation*)`.
Returns
-------
TSCDataFrame, pandas.DataFrame, numpy.ndarray
same type as the values of shape `(n_samples, n_jointly_smooth_functions)`.
"""
eigenvectors = []
alphas = []
for index, new_observation in new_indexed_observations.items():
kernel_eigenvectors = self.kernel_eigenvectors_[index]
if isinstance(kernel_eigenvectors, TSCDataFrame):
kernel_eigenvectors = kernel_eigenvectors.to_numpy()
if isinstance(self._jointly_smooth_functions_, TSCDataFrame):
alpha = (
kernel_eigenvectors.T @ self._jointly_smooth_functions_.to_numpy()
)
else:
alpha = kernel_eigenvectors.T @ self._jointly_smooth_functions_
alphas.append(alpha)
observation = self.observations_[index]
kernel_output = observation.compute_kernel_matrix(
new_observation, **self._cdist_kwargs_[index]
)
kernel_matrix, _, _ = PCManifoldKernel.read_kernel_output(
kernel_output=kernel_output
)
approx_eigenvectors = kernel_matrix @ mat_dot_diagmat(
kernel_eigenvectors,
np.reciprocal(self.kernel_eigenvalues_[index]),
)
if isinstance(kernel_matrix, TSCDataFrame):
index_from: Optional[TSCDataFrame] = kernel_matrix
elif (
isinstance(new_observation, TSCDataFrame)
and kernel_matrix.shape[0] == new_observation.shape[0]
):
index_from = new_observation
else:
index_from = None
if index_from is not None:
approx_eigenvectors = TSCDataFrame.from_same_indices_as(
index_from,
approx_eigenvectors,
except_columns=[
f"aev{i}"
for i in range(self.kernel_eigenvectors_[index].shape[1])
],
)
eigenvectors.append(approx_eigenvectors)
f_m_star = 0.0
for i in range(len(alphas)):
f_m_star += eigenvectors[i] @ alphas[i]
f_m_star /= len(alphas)
return f_m_star
def fit(self, X: TransformType, y=None, **fit_params) -> "JointlySmoothFunctions":
"""Compute the jointly smooth functions.
Parameters
----------
X: TSCDataFrame, pandas.Dataframe, numpy.ndarray
Training data of shape `(n_samples, n_features)`
y: None
ignored
**fit_params: Dict[str, object]
ignored
Returns
-------
JointlySmoothFunctions
self
"""
X = self._validate_datafold_data(
X=X,
array_kwargs=dict(
ensure_min_samples=max(2, self.n_kernel_eigenvectors + 1)
),
tsc_kwargs=dict(ensure_min_samples=max(2, self.n_kernel_eigenvectors + 1)),
)
self._setup_feature_attrs_fit(
X=X,
features_out=[f"jsf{i}" for i in range(self.n_jointly_smooth_functions)],
)
column_splitter = _ColumnSplitter(self.datasets)
self.observations_ = column_splitter.split(X)
self._calculate_kernel_matrices()
self._calculate_kernel_eigensystem()
(
self._jointly_smooth_functions_,
self._eigenvalues_,
) = self._calculate_jointly_smooth_functions()
return self
def transform(self, X: TransformType) -> TransformType:
"""Embed out-of-sample points with the Nyström extension.
(see transform of dmap for Nyström documentation)
Parameters
----------
X: TSCDataFrame, pandas.DataFrame, numpy.ndarray
Data points of shape `(n_samples, n_features)` to be embedded.
Returns
-------
TSCDataFrame, pandas.DataFrame, numpy.ndarray
same type as `X` of shape `(n_samples, n_jointly_smooth_functions)`
"""
check_is_fitted(
self,
(
"observations_",
"kernel_matrices_",
"_cdist_kwargs_",
"kernel_eigenvectors_",
"kernel_eigenvalues_",
"_jointly_smooth_functions_",
"_eigenvalues_",
),
)
X = self._validate_datafold_data(
X=X,
array_kwargs=dict(ensure_min_samples=1),
tsc_kwargs=dict(ensure_min_samples=1),
)
if X.shape[1] != self.n_features_in_:
raise ValueError(
"X must have the same number of features as the data with which fit was called."
"If you want to call it with fewer observations, you have to call nystrom"
)
self._validate_feature_input(X, direction="transform")
column_splitter = _ColumnSplitter(self.datasets)
new_observations = column_splitter.split(X)
indices = list(range(len(self.observations_)))
indexed_observations = dict(zip(indices, new_observations))
f_m_star = self.nystrom(indexed_observations)
return f_m_star
def fit_transform(self, X: TransformType, y=None, **fit_params) -> TransformType:
"""Compute jointly smooth functions and return them.
Parameters
----------
X: TSCDataFrame, pandas.DataFrame, numpy.ndarray
Training data of shape `(n_samples, n_features)`
y: None
ignored
**fit_params: Dict[str, object]
See `fit` method for additional parameter.
Returns
-------
TSCDataFrame, pandas.DataFrame, numpy.ndarray
same type as `X` of shape `(n_samples, n_jointly_smooth_functions)`
"""
X = self._validate_datafold_data(
X,
array_kwargs=dict(ensure_min_samples=max(2, self.n_kernel_eigenvectors)),
tsc_kwargs=dict(ensure_min_samples=max(2, self.n_kernel_eigenvectors)),
)
self.fit(X=X, y=y, **fit_params)
return self._jointly_smooth_functions_
def score_(self, X, y):
"""Compute a score for hyperparameter optimization.
Returns
-------
float
The sum of the truncated energies.
"""
return self.calculate_truncated_energies().sum()
def calculate_truncated_energies(self) -> np.ndarray:
"""Compute the truncated energy for each kernel eigenvector.
Returns
-------
np.ndarray
The truncated energies of shape `(n_observations, n_jointly_smooth_functions)`.
"""
truncated_energies = []
for kernel_eigenvector in self.kernel_eigenvectors_:
truncated_energy = (
np.linalg.norm(
kernel_eigenvector.T @ self.jointly_smooth_functions, axis=0
)
** 2
)
truncated_energies.append(truncated_energy)
return np.array(truncated_energies)
def calculate_E0(self) -> float:
"""Compute a threshold for the eigenvalues of the jointly smooth functions.
Returns
-------
float
The E0 threshold value from :cite:`TODO enter paper reference`
"""
noisy = self.kernel_eigenvectors_[-1].copy()
np.random.shuffle(noisy)
kernel_eigenvectors = self.kernel_eigenvectors_[:-1]
kernel_eigenvectors.append(noisy)
eigenvectors_matrix = scipy.sparse.csr_matrix(
np.column_stack([eigenvector for eigenvector in kernel_eigenvectors])
)
if len(kernel_eigenvectors) == 2:
ev0 = kernel_eigenvectors[0]
ev1 = kernel_eigenvectors[1]
_, Gamma, _ = scipy.sparse.linalg.svds(
ev0.T @ ev1, k=self.n_jointly_smooth_functions, which="LM"
)
else:
_, Gamma, _ = scipy.sparse.linalg.svds(
eigenvectors_matrix, k=self.n_jointly_smooth_functions, which="LM"
)
Gamma.sort()
gamma2 = Gamma[-2]
E0 = (1 + gamma2) / 2
return E0
|
import webob
from prestans import exception
from prestans.http import STATUS
from prestans.parser import AttributeFilter
from prestans import serializer
from prestans.types import Array
from prestans.types import BinaryResponse
from prestans.types import DataCollection
from prestans.types import Model
class Response(webob.Response):
"""
Response is the writable HTTP response. It inherits and leverages
from webob.Response to do the heavy lifting of HTTP Responses. It adds to
webob.Response prestans customisations.
Overrides content_type property to use prestans' serializers with the set body
"""
def __init__(self, charset, logger, serializers, default_serializer):
super(Response, self).__init__()
self._logger = logger
self._serializers = serializers
self._default_serializer = default_serializer
self._selected_serializer = None
self._template = None
self._app_iter = []
self._minify = False
self._attribute_filter = None
self._template = None
self._charset = charset
#:
#: IETF hash dropped the X- prefix for custom headers
#: http://stackoverflow.com/q/3561381
#: http://tools.ietf.org/html/draft-saintandre-xdash-00
#:
from prestans import __version__ as version
if not isinstance(version, str):
version = version.encode("latin1")
self.headers.add('Prestans-Version', version)
@property
def minify(self):
return self._minify
@minify.setter
def minify(self, value):
self._minify = value
@property
def logger(self):
return self._logger
@property
def supported_mime_types(self):
return [serializer.content_type() for serializer in self._serializers]
@property
def supported_mime_types_str(self):
return ''.join(str(mime_type) + ',' for mime_type in self.supported_mime_types)[:-1]
@property
def selected_serializer(self):
return self._selected_serializer
@property
def default_serializer(self):
return self._default_serializer
def _set_serializer_by_mime_type(self, mime_type):
"""
:param mime_type:
:return:
used by content_type_set to set get a reference to the appropriate serializer
"""
# ignore if binary response
if isinstance(self._app_iter, BinaryResponse):
self.logger.info("ignoring setting serializer for binary response")
return
for available_serializer in self._serializers:
if available_serializer.content_type() == mime_type:
self._selected_serializer = available_serializer
self.logger.info("set serializer for mime type: %s" % mime_type)
return
self.logger.info("could not find serializer for mime type: %s" % mime_type)
raise exception.UnsupportedVocabularyError(mime_type, self.supported_mime_types_str)
@property
def template(self):
"""
is an instance of prestans.types.DataType; mostly a subclass of prestans.types.Model
"""
return self._template
@template.setter
def template(self, value):
if value is not None and (not isinstance(value, DataCollection) and
not isinstance(value, BinaryResponse)):
raise TypeError("template in response must be of type prestans.types.DataCollection or subclass")
self._template = value
#:
#: Attribute filter setup
#:
@property
def attribute_filter(self):
return self._attribute_filter
@attribute_filter.setter
def attribute_filter(self, value):
if value is not None and not isinstance(value, AttributeFilter):
msg = "attribute_filter in response must be of type prestans.types.AttributeFilter"
raise TypeError(msg)
self._attribute_filter = value
def _content_type__get(self):
"""
Get/set the Content-Type header (or None), *without* the
charset or any parameters.
If you include parameters (or ``;`` at all) when setting the
content_type, any existing parameters will be deleted;
otherwise they will be preserved.
"""
header = self.headers.get('Content-Type')
if not header:
return None
return header.split(';', 1)[0]
def _content_type__set(self, value):
# skip for responses that have no body
if self.status_code in [STATUS.NO_CONTENT, STATUS.PERMANENT_REDIRECT, STATUS.TEMPORARY_REDIRECT]:
self.logger.info("attempt to set Content-Type to %s being ignored due to empty response" % value)
self._content_type__del()
else:
self._set_serializer_by_mime_type(value)
if ';' not in value:
header = self.headers.get('Content-Type', '')
if ';' in header:
params = header.split(';', 1)[1]
value += ';' + params
self.headers['Content-Type'] = value
self.logger.info("Content-Type set to: %s" % value)
def _content_type__del(self):
self.headers.pop('Content-Type', None)
# content_type; overrides webob.Response line 606
content_type = property(
_content_type__get,
_content_type__set,
_content_type__del,
doc=_content_type__get.__doc__
)
# body; overrides webob.Response line 324
@property
def body(self):
"""
Overridden response does not support md5, text or json properties. _app_iter
is set using rules defined by prestans.
body getter will return the validated prestans model.
webob does the heavy lifting with headers.
"""
#: If template is null; return an empty iterable
if self.template is None:
return []
return self._app_iter
@body.setter
def body(self, value):
#: If not response template; we have to assume its NO_CONTENT
#: hence do not allow setting the body
if self.template is None:
raise AssertionError("response_template is None; handler can't return a response")
#: value should be a subclass prestans.types.DataCollection
if not isinstance(value, DataCollection) and \
not isinstance(value, BinaryResponse):
msg = "%s is not a prestans.types.DataCollection or prestans.types.BinaryResponse subclass" % (
value.__class__.__name__
)
raise TypeError(msg)
#: Ensure that it matches the return type template
if not value.__class__ == self.template.__class__:
msg = "body must of be type %s, given %s" % (
self.template.__class__.__name__,
value.__class__.__name__
)
raise TypeError(msg)
#: If it's an array then ensure that element_template matches up
if isinstance(self.template, Array) and \
not isinstance(value.element_template, self.template.element_template.__class__):
msg = "array elements must of be type %s, given %s" % (
self.template.element_template.__class__.__name__,
value.element_template.__class__.__name__
)
raise TypeError(msg)
#: _app_iter assigned to value
#: we need to serialize the contents before we know the length
#: deffer the content_length property to be set by getter
self._app_iter = value
# body = property(_body__get, _body__set, _body__set)
def register_serializers(self, serializers):
"""
Adds extra serializers; generally registered during the handler lifecycle
"""
for new_serializer in serializers:
if not isinstance(new_serializer, serializer.Base):
msg = "registered serializer %s.%s does not inherit from prestans.serializer.Serializer" % (
new_serializer.__module__,
new_serializer.__class__.__name__
)
raise TypeError(msg)
self._serializers = self._serializers + serializers
def __call__(self, environ, start_response):
"""
Overridden WSGI application interface
"""
# prestans equivalent of webob.Response line 1022
if self.template is None or self.status_code == STATUS.NO_CONTENT:
self.content_type = None
start_response(self.status, self.headerlist)
if self.template is not None:
self.logger.warn("handler returns No Content but has a response_template; set template to None")
return []
# ensure what we are able to serialize is serializable
if not isinstance(self._app_iter, DataCollection) and \
not isinstance(self._app_iter, BinaryResponse):
if isinstance(self._app_iter, list):
app_iter_type = "list"
else:
app_iter_type = self._app_iter.__name__
msg = "handler returns content of type %s; not a prestans.types.DataCollection subclass" % (
app_iter_type
)
raise TypeError(msg)
if isinstance(self._app_iter, DataCollection):
#: See if attribute filter is completely invisible
if self.attribute_filter is not None:
#: Warning to say nothing is visible
if not self.attribute_filter.are_any_attributes_visible():
self.logger.warn("attribute_filter has all the attributes turned \
off, handler will return an empty response")
#: Warning to say none of the fields match
model_attribute_filter = None
if isinstance(self._app_iter, Array):
model_attribute_filter = AttributeFilter. \
from_model(self._app_iter.element_template)
elif isinstance(self._app_iter, Model):
model_attribute_filter = AttributeFilter. \
from_model(self._app_iter)
if model_attribute_filter is not None:
try:
model_attribute_filter.conforms_to_template_filter(self.attribute_filter)
except exception.AttributeFilterDiffers as exp:
exp.request = self.request
self.logger.warn("%s" % exp)
# body should be of type DataCollection try; attempt calling
# as_serializable with available attribute_filter
serializable_body = self._app_iter.as_serializable(self.attribute_filter.as_immutable(), self.minify)
#: attempt serializing via registered serializer
stringified_body = self._selected_serializer.dumps(serializable_body)
# if not isinstance(stringified_body, str):
# msg = "%s dumps must return a python str not %s" % (
# self._selected_serializer.__class__.__name__,
# stringified_body.__class__.__name__
# )
# raise TypeError(msg)
#: set content_length
self.content_length = len(stringified_body)
start_response(self.status, self.headerlist)
return [stringified_body.encode("utf-8")]
elif isinstance(self._app_iter, BinaryResponse):
if self._app_iter.content_length == 0 or \
self._app_iter.mime_type is None or \
self._app_iter.file_name is None:
msg = "Failed to write binary response with content_length %i; mime_type %s; file_name %s" % (
self._app_iter.content_length,
self._app_iter.mime_type,
self._app_iter.file_name
)
self.logger.warn(msg)
self.status = STATUS.INTERNAL_SERVER_ERROR
self.content_type = "text/plain"
return []
# set the content type
self.content_type = self._app_iter.mime_type
#: Add content disposition header
if self._app_iter.as_attachment:
attachment = "attachment; filename=\"%s\"" % self._app_iter.file_name
if not isinstance(attachment, str):
attachment = attachment.encode("latin1")
self.headers.add("Content-Disposition", attachment)
else:
inline = "inline; filename=\"%s\"" % self._app_iter.file_name
if not isinstance(inline, str):
inline = inline.encode("latin1")
self.headers.add("Content-Disposition", inline)
#: Write out response
self.content_length = self._app_iter.content_length
start_response(self.status, self.headerlist)
return [self._app_iter.contents]
else:
raise AssertionError("prestans failed to write a binary or textual response")
def __str__(self):
#: Overridden so webob's __str__ skips serializing the body
super(Response, self).__str__(skip_body=True)
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE 蓝鲸基础平台 is licensed under the MIT License.
License for BK-BASE 蓝鲸基础平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
from meta.basic import asset, entity
from meta.tests.conftest import FakeRequest
@pytest.mark.django_db(transaction=True)
@pytest.mark.usefixtures("patch_meta_sync", "patch_auth_check")
def test_post_entity_complex_search():
post_data = {"statement": "select 1;", "backend_type": "mysql"}
request = FakeRequest(data=post_data)
view = entity.ComplexSearchView()
post_ret = view.post(request)
# print(post_ret.data)
assert isinstance(post_ret.data, (list, dict))
assert post_ret.status_code == 200
@pytest.mark.django_db(transaction=True)
@pytest.mark.usefixtures("patch_meta_sync", "patch_auth_check")
def test_post_entity_lineage():
query_params = {
"type_name": "ResultTable",
"qualified_name": "test",
"direction": "INPUT",
"depth": 3,
"backend_type": "dgraph",
"extra_retrieve": '{"erp": "erp_statement"}',
}
request = FakeRequest(query_params=query_params)
view = entity.LineageView()
get_ret = view.get(request)
# print(get_ret.data)
assert isinstance(get_ret.data, (list, dict))
assert get_ret.status_code == 200
@pytest.mark.django_db(transaction=True)
@pytest.mark.usefixtures("patch_meta_sync", "patch_auth_check")
def test_post_asset_query_via_erp():
post_data = {"retrieve_args": '{"erp": "erp_statement"}', "backend_type": "dgraph", "version": 2}
request = FakeRequest(data=post_data)
view = asset.QueryViaERPView()
post_ret = view.post(request=request)
# print(post_ret.data)
assert isinstance(post_ret.data, (list, dict))
assert post_ret.status_code == 200
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from djangocms_link_manager import __version__
INSTALL_REQUIRES = [
'django>=1.8.0',
'django-cms>=3.0',
'phonenumberslite>=7.4,<8.0',
'attrs',
]
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
setup(
name='djangocms-link-manager',
version=__version__,
description='An extensible means of checking for broken links in virtually any django CMS plugin.',
author='Divio',
author_email='info@divio.com',
url='https://github.com/divio/djangocms-link-manager/',
packages=find_packages(),
install_requires=INSTALL_REQUIRES,
license='LICENSE.txt',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
long_description=open('README.rst').read(),
include_package_data=True,
zip_safe=False,
)
|
# (C) Copyright 2019 Hewlett Packard Enterprise Development LP.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# __author__ = "@netwookie"
# __credits__ = ["Rick Kauffman"]
# __license__ = "Apache2.0"
# __version__ = "1.0.0"
# __maintainer__ = "Rick Kauffman"
# __email__ = "rick.a.kauffman@hpe.com"
from flask import Blueprint, render_template, request, redirect, session, url_for, abort
import os
# from werkzeug import secure_filename
from mongoengine import Q
import json
import requests
from database.system import System
from database.creds import Creds
from database.alerts import Alerts
from utilities.get_creds import get
from utilities.save_creds import save
from utilities.save_system import save_system
from utilities.get_system import get_system
from utilities.save_alerts import save_alerts
from utilities.get_alerts import get_alerts
from utilities.populate import servers
from utilities.populate import disks
from utilities.populate import sanmanagers
from database.server_hardware import Server_Hardware
from database.disk_hardware import Disk_Hardware
from database.san_managers import San_Managers
import time
from collections import OrderedDict
from hpOneView.oneview_client import OneViewClient
# from qumulo.rest_client import RestClient
requests.packages.urllib3.disable_warnings()
main_app = Blueprint('main_app', __name__)
@main_app.route('/main', methods=('GET', 'POST'))
@main_app.route('/', methods=('GET', 'POST'))
@main_app.route('/index', methods=('GET', 'POST'))
def main():
''' Display login screen
'''
# Clear credential database on new session.
Creds.objects().delete()
return render_template('main/login.html')
@main_app.route('/help', methods=('GET', 'POST'))
def help():
return render_template('main/help.html')
@main_app.route('/main_load', methods=('GET', 'POST'))
def main_load():
'''
read creds
'''
# If this is a POSt it is from the login screen capture creds and save
if request.method == 'POST':
#Get creds from login
ipaddress = request.form['ipaddress'].encode('utf-8')
user = request.form['user'].encode('utf-8')
password = request.form['password'].encode('utf-8')
# Save the record
try:
savecreds=save(ipaddress,user,password)
except:
error="ERR001 - Failed to save login credentials"
return render_template('main/dberror.html', error=error)
# Returning to the main page if HTTP GET pull creds from DB
creds=get()
authx = {
"ip" : creds[0],
"credentials" : {
"userName" : creds[1],
"password" : creds[2]
}
}
# Create client connector
try:
client = OneViewClient(authx)
except:
error="ERR00X - Wrong host, user or password, please try again!"
return render_template('main/dberror.html', error=error)
# Get system information
ov = client.appliance_node_information.get_version()
#
uuid=ov['uuid'].encode('utf-8')
family=ov['family'].encode('utf-8')
serno=ov['serialNumber'].encode('utf-8')
model=ov['modelNumber'].encode('utf-8')
software=ov['softwareVersion'].encode('utf-8')
build=ov['build'].encode('utf-8')
# Save the system to mongo
try:
savesys=save_system(uuid,family,serno,model,software,build)
except:
error="ERR002 - Failed to save system information to mongo"
return render_template('main/dberror.html', error=error)
# Clear switches database on new session.
Alerts.objects().delete()
# Get alerts
out_alerts = []
ov = client.alerts.get_all()
for alert in ov:
severity=alert['severity'].encode('utf-8')
description=alert['description'].encode('utf-8')
modified=alert['modified'].encode('utf-8')
# Save the alerts to mongo
try:
savealert=save_alerts(severity,description,modified)
except:
error="ERR003 - Failed to save alarm information to mongo"
return render_template('main/dberror.html', error=error)
out = [severity,description,modified]
out_alerts.append(out)
pad='104.55.322'
# Populate the rest of the mongo collections
#------------------------------------------------SERVERS------------------
# Get and Save Server Hardware
ov_servers = client.server_hardware.get_all()
try:
load_servers = servers(ov_servers)
except:
error="ERR004 - Failed to save server hardware information to mongo"
return render_template('main/dberror.html', error=error)
#-----------------------------------------------DISKS-----------------
# Get and Save D3940 Hardware
ov_disks = client.drive_enclosures.get_all()
try:
load_disks = disks(ov_disks)
except:
error="ERR005 - Failed to save disk hardware information to mongo"
return render_template('main/dberror.html', error=error)
#-----------------------------------------------SAN MANAGERS---------------
# Get and Save San Managers
ov_san_managers = client.san_managers.get_all()
# Clear San Managers database on new session.
San_Managers.objects().delete()
count = 0
for sm in ov_san_managers:
status=sm['status'].encode('utf-8')
display=sm['connectionInfo'][count]['displayName'].encode('utf-8')
name=sm['connectionInfo'][count]['name'].encode('utf-8')
ipaddress=sm['connectionInfo'][count]['value'].encode('utf-8')
description=sm['description'].encode('utf-8')
state=sm['state'].encode('utf-8')
refresh=sm['refreshState'].encode('utf-8')
inside=sm['isInternal']
inside = str("{}").format(sm['isInternal'])
# Build database entry to save creds
manager = San_Managers(status=sm['status'].encode('utf-8'),
display=sm['connectionInfo'][count]['displayName'].encode('utf-8'),
name=sm['connectionInfo'][count]['name'].encode('utf-8'),
ipaddress=sm['connectionInfo'][count]['value'].encode('utf-8'),
description=sm['description'].encode('utf-8'),
state=sm['state'].encode('utf-8'),
refresh=sm['refreshState'].encode('utf-8'),
inside=inside)
# out=[status,display,name,ipaddress,description,state,refresh,inside]
try:
manager.save()
except:
error="SUB-SUB routine- ERR00777 - Failed to save san manager"
return render_template('main/dberror.html', error=error)
'''
try:
load_san_manager = sanmanagers(ov_sanmanagers)
except:
error="ERR005 - Failed to save san manager information to mongo"
return render_template('main/dberror.html', error=error)
'''
#-----------------------------------------------RENDER-----------------
return render_template('main/index.html', uuid=uuid,
family=family,
serno=serno,
model=model,
software=software,
build=build,
out_alerts=out_alerts,
pad=pad)
@main_app.route('/main_select', methods=('GET', 'POST'))
def main_select():
'''
return to the main screen
'''
# get system information
system = System.objects()
for s in system:
uuid = s.uuid.encode('utf8')
family=s.family.encode('utf8')
serno=s.serno.encode('utf8')
model=s.model.encode('utf8')
software=s.software.encode('utf8')
build=s.build.encode('utf8')
# Get alerts
alerts = Alerts.objects()
out_alerts = []
for alert in alerts:
out = [alert.severity,alert.description,alert.modified]
out_alerts.append(out)
pad='104.55.322'
return render_template('main/index.html', uuid=uuid,
family=family,
serno=serno,
model=model,
software=software,
build=build,
out_alerts=out_alerts,
pad=pad)
@main_app.route('/diskhardware', methods=('GET', 'POST'))
def diskhardware():
'''
Display table of the disk hardware
'''
disks = Disk_Hardware.objects()
out_disks = []
for disk in disks:
out = [disk.cage,
disk.serialNumber,
disk.model,
disk.rotationalRpms,
disk.drivePaths,
disk.firmwareVersion,
disk.capacity,
disk.temperature,
disk.blockSize,
disk.deviceInterface,
disk.status,
disk.driveMedia,
disk.authentic,
]
out_disks.append(out)
return render_template('main/diskhardware.html', out_disks=out_disks)
@main_app.route('/serverhardware', methods=('GET', 'POST'))
def serverhardware():
'''
Display table of the server hardware
'''
servers = Server_Hardware.objects()
out_servers = []
for server in servers:
out = [server.model,
server.serno,
server.smodel,
server.proctype,
server.name,
server.sname,
server.mem,
server.firmver,
server.romv]
out_servers.append(out)
return render_template('main/serverhardware.html', out_servers=out_servers)
@main_app.route('/sanmanagers', methods=('GET', 'POST'))
def sanmanagers():
'''
Display table of the server hardware
'''
sanmanagers = San_Managers.objects()
out_managers = []
for man in sanmanagers:
out = [man.status,
man.display,
man.name,
man.ipaddress,
man.description,
man.state,
man.refresh,
man.inside]
out_managers.append(out)
return render_template('main/sanmanagers.html', out_managers=out_managers)
@main_app.route('/maps', methods=('GET', 'POST'))
def maps():
'''
Display Maps
'''
return render_template('main/maps.html')
@main_app.route('/users', methods=('GET', 'POST'))
def users():
'''
Manage Users
'''
return render_template('main/manage-users.html')
@main_app.route('/preferences', methods=('GET', 'POST'))
def preferences():
'''
Manage Preferences
'''
return render_template('main/preferences.html')
@main_app.route('/logout', methods=('GET', 'POST'))
def logout():
'''
Logout of system
'''
return render_template('main/logout.html')
@main_app.route('/timestamps', methods=('GET', 'POST'))
def timestamps():
'''
Get Qumulo Timestamps
'''
# Get user informaation
creds = Creds.objects.first()
user = creds.user
password = creds.password
ipaddress= creds.ipaddress
columns = ["iops.read.rate", "iops.write.rate",
"throughput.read.rate", "throughput.write.rate",
"reclaim.deferred.rate", "reclaim.snapshot.rate"]
#
feed = []
rc = RestClient(ipaddress,8000)
rc.login(user,password)
#
begin_time = int(time.time()) - 60 * 60 * 24
results = rc.analytics.time_series_get(begin_time = begin_time)
data = {}
#
for i in range(0,len(results[0]['times'])-1):
ts = results[0]['times'][i]
data[ts] = [None] * len(columns)
for series in results:
if series['id'] not in columns:
continue
for i in range(0,len(series['values'])):
ts = series['times'][i]
data[ts][columns.index(series['id'])] = series['values'][i]
for key in data.items():
tmp=[key[0],key[1][0],key[1][1],key[1][2],key[1][3],key[1][4],key[1][5]]
if key[1][0] == 0.0 and key[1][1] == 0.0 and key[1][2] == 0.0 and key[1][3] == 0.0 and key[1][4] == 0.0 and key[1][5] == 0.0:
continue
feed.append(tmp)
return render_template('main/index.sm.html', feed=feed)
|
__all__ = ('ResultGatheringFuture',)
import reprlib
from ...utils.trace import format_callback
from ..exceptions import InvalidStateError
from .future import FUTURE_STATE_FINISHED, FUTURE_STATE_PENDING, Future
class ResultGatheringFuture(Future):
"""
A Future subclass, which yields after it's result was set a set amount of times with ``.set_result``, or with
``.set_result_if_pending``, or till an exception is set to with ``.set_exception``, or with
``.set_exception_if_pending``.
Attributes
----------
_blocking : `bool`
Whether the future is already being awaited, so it blocks the respective coroutine.
_callbacks : `list` of `callable`
The callbacks of the future, which are queued up on the respective event loop to be called, when the future is
finished. These callback should accept `1` parameter, the future itself.
Note, if the future is already done, then the newly added callbacks are queued up instantly on the respective
event loop to be called.
_exception : `None`, `BaseException`
The exception set to the future as it's result. Defaults to `None`.
_loop : ``EventThread``
The loop to what the created future is bound.
_result : `list` of `Any`
The results of the future.
_state : `str`
The state of the future.
Can be set as one of the following:
+---------------------------+-----------+
| Respective name | Value |
+===========================+===========+
| FUTURE_STATE_PENDING | `0` |
+---------------------------+-----------+
| FUTURE_STATE_CANCELLED | `1` |
+---------------------------+-----------+
| FUTURE_STATE_FINISHED | `2` |
+---------------------------+-----------+
| FUTURE_STATE_RETRIEVED | `3` |
+---------------------------+-----------+
Note, that states are checked by memory address and not by equality. Also ``FUTURE_STATE_RETRIEVED`` is used
only if `__debug__` is set as `True`.
_count : `int`
The amount, how much times the future's result need to be set, because it will yield.
"""
__slots__ = ('_count',)
def __new__(cls, loop, count):
"""
Creates a new ``ResultGatheringFuture`` object bound to the given `loop`, which will be marked as done, only if `count`
results are set to it with ``.set_result``, or with ``.set_result_if_pending``.
Parameters
----------
loop : ``EventThread``
The loop to what the created future will be bound to.
count : `int`
The amount of times, the future's result need to be set, because becoming done.
"""
self = object.__new__(cls)
self._loop = loop
self._count = count
self._state = FUTURE_STATE_PENDING
self._result = []
self._exception = None
self._callbacks = []
self._blocking = False
return self
def __repr__(self):
"""Returns the gatherer's representation."""
repr_parts = ['<', self.__class__.__name__, ' ']
state = self._state
repr_parts.append(state)
if state >= FUTURE_STATE_FINISHED:
exception = self._exception
if exception is None:
results = self._result
for index, result in enumerate(results):
repr_parts.append(f', result[')
repr_parts.append(repr(index))
repr_parts.append(']=')
repr_parts.append(reprlib.repr(result))
repr_parts.append(', needed=')
repr_parts.append(str(self._count - len(results)))
else:
repr_parts.append(', exception=')
repr_parts.append(repr(exception))
callbacks = self._callbacks
limit = len(callbacks)
if limit:
repr_parts.append(', callbacks=[')
index = 0
while True:
callback = callbacks[index]
repr_parts.append(format_callback(callback))
index += 1
if index == limit:
break
repr_parts.append(', ')
continue
repr_parts.append(']')
repr_parts.append('>')
return ''.join(repr_parts)
def set_result(self, result):
"""
Sets the future result, and if it waits for no more results, marks it as done as well.
Parameters
----------
result : `Any`
The object to set as result.
Raises
------
InvalidStateError
If the future is already done.
"""
if self._state != FUTURE_STATE_PENDING:
raise InvalidStateError(self, 'set_result')
results = self._result
results.append(result)
if self._count != len(results):
return
self._state = FUTURE_STATE_FINISHED
self._loop._schedule_callbacks(self)
def set_result_if_pending(self, result):
"""
Sets the future result, and if it waits for no more results, marks it as done as well. Not like
``.set_result``, this method will not raise ``InvalidStateError`` if the future is already done.
Parameters
----------
result : `Any`
The object to set as result.
Returns
------
set_result : `int` (`0`, `1`, `2`)
If the future is already done, returns `0`. If the future's result was successfully set, returns `1`,
meanwhile if the future was marked as done as well, returns `2`.
"""
if self._state != FUTURE_STATE_PENDING:
return 0
results = self._result
results.append(result)
if self._count != len(results):
return 1
self._state = FUTURE_STATE_FINISHED
self._loop._schedule_callbacks(self)
return 2
def clear(self):
"""
Clears the future making it reusable.
"""
self._state = FUTURE_STATE_PENDING
self._exception = None
self._result.clear()
self.cancel_handles()
self._blocking = False
|
from collections import defaultdict
import unicodedata
import re
ca_fr = "Montréal, über, 12.89, Mère, Françoise, noël, 889"
yo_0= "ọjọ́ìbí 18 Oṣù Keje 1918 jẹ́ Ààrẹ Gúúsù Áfríkà"
yo_1 = "Kí ó tó di ààrẹ"
def strip_accents(string):
return ''.join(c for c in unicodedata.normalize('NFD', string)
if unicodedata.category(c) != 'Mn')
def convert_to_NFC(filename, outfilename):
text=''.join(c for c in unicodedata.normalize('NFC', open(filename).read()))
with open(outfilename, 'w') as f:
f.write(text)
def strip_accents_from_file(filename, outfilename):
text=''.join(c for c in unicodedata.normalize('NFC', open(filename).read()))
with open(outfilename, 'w') as f:
f.write(strip_accents(text))
def getFileStats(filename):
print("\nFilename: " + filename)
lines = tuple(open(filename, 'r'))
num_utts = len(lines)
text = ''.join(c for c in unicodedata.normalize('NFC', open(filename).read()))
words = re.findall('\w+', text)
num_words = len(words)
num_chars = len(re.findall(r'\S', text))
unique_chars = set(text)
num_uniq_chars = len(unique_chars)
print(sorted(unique_chars))
print("# utts : " + str(num_utts))
print("# chars : " + str(num_chars))
print("# uniq chars: " + str(num_uniq_chars))
# unaccented word stats
unaccented_words = 0
for word in words:
if word == strip_accents(word):
unaccented_words += 1
print("# total words: " + str(num_words))
print("# unaccented words : " + str(unaccented_words))
# ambiguous word stats
ambiguity_map = defaultdict(set)
for word in words:
no_accents = strip_accents(word)
ambiguity_map[no_accents].add(word)
ambiguous_words = 0
ambiguous_words_2 = 0
ambiguous_words_3 = 0
ambiguous_words_4 = 0
ambiguous_words_5 = 0
ambiguous_words_6 = 0
ambiguous_words_7 = 0
ambiguous_words_8 = 0
ambiguous_words_9 = 0
# fill ambiguity map
for word in ambiguity_map:
if len(ambiguity_map[word]) > 1:
ambiguous_words += 1
if len(ambiguity_map[word]) == 2:
ambiguous_words_2 += 1
elif len(ambiguity_map[word]) == 3:
ambiguous_words_3 += 1
elif len(ambiguity_map[word]) == 4:
ambiguous_words_4 += 1
elif len(ambiguity_map[word]) == 5:
ambiguous_words_5 += 1
elif len(ambiguity_map[word]) == 6:
ambiguous_words_6 += 1
elif len(ambiguity_map[word]) == 7:
ambiguous_words_7 += 1
elif len(ambiguity_map[word]) == 8:
ambiguous_words_8 += 1
elif len(ambiguity_map[word]) == 9:
ambiguous_words_9 += 1
# print ambiguity map
for word in ambiguity_map:
# if len(ambiguity_map[word]) == 2:
# el
if len(ambiguity_map[word]) == 3:
print("# 3: " + str(ambiguity_map[word]))
elif len(ambiguity_map[word]) == 4:
print("# 4: " + str(ambiguity_map[word]))
elif len(ambiguity_map[word]) == 5:
print("# 5: " + str(ambiguity_map[word]))
elif len(ambiguity_map[word]) == 6:
print("# 6: " + str(ambiguity_map[word]))
elif len(ambiguity_map[word]) == 7:
print("# 7: " + str(ambiguity_map[word]))
elif len(ambiguity_map[word]) == 8:
print("# 8: " + str(ambiguity_map[word]))
elif len(ambiguity_map[word]) == 9:
print("# 9: " + str(ambiguity_map[word]))
print("# unique ambiguous words : " + str(ambiguous_words))
print("# total unique non-diacritized words : " + str(len(ambiguity_map)))
unique_all_words = set()
for word in words:
unique_all_words.add(word)
print("# total unique words : " + str(len(unique_all_words)))
print("# ambiguous 2 words : " + str(ambiguous_words_2))
print("# ambiguous 3 words : " + str(ambiguous_words_3))
print("# ambiguous 4 words : " + str(ambiguous_words_4))
print("# ambiguous 5 words : " + str(ambiguous_words_5))
print("# ambiguous 6 words : " + str(ambiguous_words_6))
print("# ambiguous 7 words : " + str(ambiguous_words_7))
print("# ambiguous 8 words : " + str(ambiguous_words_8))
print("# ambiguous 9 words : " + str(ambiguous_words_9))
# For yoruba blog (and probably bibeli)
def split_out_corpus_on_symbol(filename, outfilename, symbol=','):
lines = tuple(open(filename, 'r'))
min_words_to_split = 10
min_words_in_utt = 5
with open(outfilename, 'w') as f:
# split out heavily comma'd text :((
for line in lines:
if symbol in line:
num_words = len(line.split())
num_commas = line.count(symbol)
curr_comma_position = line.index(symbol)
num_words_ahead_of_curr_comma = len(line[0:curr_comma_position].split())
curr_line = line
while num_commas > 0:
if num_words < min_words_to_split:
# print(curr_line.strip())
f.write(curr_line)
break
if num_words >= min_words_to_split:
if num_words_ahead_of_curr_comma >= min_words_in_utt and \
len((curr_line)[curr_comma_position:].split()) >= min_words_in_utt:
f.write((curr_line)[0:curr_comma_position] + "\n")
# update vars
curr_line = curr_line[curr_comma_position +1:]
num_words = len(curr_line.split())
num_commas = num_commas - 1
if num_commas > 0:
curr_comma_position = curr_line.index(symbol)
num_words_ahead_of_curr_comma = len(curr_line[0:curr_comma_position].split())
else:
f.write(curr_line)
else:
# ignore too short comma (+= vs = on current comma position)
num_commas = num_commas - 1
if num_commas > 0: # for say 3 commas
curr_comma_position += curr_line[curr_comma_position +1:].index(symbol) + 1
num_words_ahead_of_curr_comma = len(curr_line[0:curr_comma_position].split())
else:
f.write(curr_line)
else:
f.write(curr_line)
else:
f.write(line)
if __name__ == "__main__":
# test
print(ca_fr, ": " ,strip_accents(ca_fr))
print(yo_0, ": " , strip_accents(yo_0))
print(yo_1, ": " ,strip_accents(yo_1))
# getFileStats('data/LagosNWUspeech_corpus/all_transcripts.txt')
# getFileStats('data/theyorubablog_corpus/theyorubablog_dot_com.txt')
# getFileStats('data/BibeliYoruba_corpus/bibeli_ede_yoruba.txt')
# getFileStats('data/BibeliYoruba_corpus/bibeli_ede_yoruba.txt')
getFileStats('seq2seq/nmt_data/yoruba_diacritics/train/tgt-train.txt')
getFileStats('seq2seq/nmt_data/yoruba_diacritics/test/tgt-test.txt')
getFileStats('seq2seq/nmt_data/yoruba_diacritics/train/src-train.txt')
getFileStats('seq2seq/nmt_data/yoruba_diacritics/test/src-test.txt')
#
# split_out_corpus_on_symbol('data/theyorubablog_corpus/theyorubablog_dot_com.txt')
# strip accents
# strip_accents_from_file('yorubaspeechcorpus/all_transcripts.txt', 'yorubaspeechcorpus/all_transcripts_no_diacritics.txt')
# strip_accents_from_file('corpus/theyorubablog_dot_com.txt', 'corpus/theyorubablog_dot_com_no_diacritics.txt')
strip_accents_from_file('/Users/iorife/github/yoruba-text/first_words.txt', '/Users/iorife/github/yoruba-text/first_words_ascii.txt')
# convert from NFD to NFC
# convert_to_NFC('data/LagosNWUspeech_corpus/all_transcripts.txt', 'data/LagosNWUspeech_corpus/all_transcripts_NFC.txt')
# convert_to_NFC('data/theyorubablog_corpus/theyorubablog_dot_com.txt', 'data/theyorubablog_corpus/theyorubablog_dot_com_NFC.txt')
# convert_to_NFC('data/BibeliYoruba_corpus/bibeli_ede_yoruba.txt', 'data/BibeliYoruba_corpus/bibeli_ede_yoruba_NFC.txt')
# convert_to_NFC('data/theyorubablog_corpus/theyorubablog_dot_com_JARA.txt', 'data/theyorubablog_corpus/theyorubablog_dot_com_JARA_NFC.txt')
|
import argparse
import json
import os
from datetime import datetime
from annotation_predictor.util.groundtruth_reader import GroundTruthReader
from settings import annotation_predictor_metadata_dir
evaluation_record = {}
def extract_gt(path_to_images: str, path_to_gt: str):
"""
Extracts ground truth data for a given set of images and saves it in a json-file
Args:
path_to_images: test images
path_to_gt: Ground-Truth-Data which contains data for images as a subset
"""
gt_json = {}
timestamp = datetime.now().strftime('%Y_%m_%d_%H%M%S')
gt_reader = GroundTruthReader(path_to_gt)
for image in os.listdir(path_to_images):
image_id = os.path.splitext(image)[0]
ground_truth = gt_reader.get_ground_truth_annotation(image_id)
gt_json[image_id] = ground_truth
with open(os.path.join(annotation_predictor_metadata_dir, timestamp + '.json'), 'w') as f:
json.dump(gt_json, f)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Concatenate two detection records')
parser.add_argument('path_to_gt', type=str, metavar='path_to_gt',
help='path to groundtruth')
parser.add_argument('path_to_images', type=str, metavar='path_to_images',
help='path to images')
args = parser.parse_args()
extract_gt(args.path_to_images, args.path_to_gt)
|
import numpy as np
class NeuralNetwork:
def __init__(self, layers, alpha=0.1):
"""
The constructor of the Neural Network
-----------------------------------
Each neural network consists of input nodes, at least 1 hidden layer and an output layer
:param layers: list of integers representing the architecture of the NN, e.g. [2, 2, 1] - 2 input nodes 1 hidden
layer with 2 nodes and 1 ouput layer with 1 node
:param alpha: learning rate
"""
self.W = []
self.layers = layers
self.alpha = alpha
# Weight matrix initialization for each of the layers
for i in np.arange(0, len(layers) - 2):
# we initialized the weight matrix with random sample values of the normal distribution
# the Weight matrix will be MxN such that we can connect each of the nodes of the current layer
# to each of the nodes to the next layer
# If layers[i] = 2 and layers[i+1] = 2 -> W will be a matrix 2x2.
# We also add one to the number of the current layer (layer[i]) and 1 to the next (layer[i+1]) to
# account for the bias, ultimately W = 3x3 matrix
w = np.random.randn(layers[i] + 1, layers[i + 1] + 1)
# weight matrix normalization
self.W.append(w / np.sqrt(layers[i]))
# This accounts to the special case of the last two layers in the network
# layers[-2] - second to last layer needs only bias in the input but not in the output
w = np.random.randn(layers[-2] + 1, layers[-1])
self.W.append(w / np.sqrt(layers[-2]))
def __repr__(self):
return "Neural network: {}".format("-".join(str(l) for l in self.layers))
def sigmoid(self, x):
"""
Sigmoid activation function
:param x: input weighted vector
:return: activated value
"""
return 1.0 / (1 + np.exp(-x))
def sigmoid_deriv(self, x):
"""
Used in the backpropagation phase of the neural networking using the backpropagating algorithm
:param x:
:return:
"""
return x * (1 - x)
def fit(self, X, y, epochs=1000, displayUpdate=100):
"""
Trains the model with the specified dataset and corresponding target labels
:param X: dataset
:param y: target labels
:param epochs: # of epoch for training
:param displayUpdate: parameter to adjust the update information on console
"""
# Adding extra column to each of the datapoints for the bias trick
# to be treated as a trainable parameter within the weight matrix
X = np.c_[X, np.ones((X.shape[0]))]
# loop over each epoch
for epoch in np.arange(0, epochs):
for(x, target) in zip(X, y):
# loop over each data point and train the network
self.fit_partial(x, target)
if epoch == 0 or (epoch + 1) % displayUpdate == 0:
loss = self.calculate_loss(X, y)
print("[INFO] epoch={}, loss={:.7f}".format((epoch + 1), loss))
def fit_partial(self, x, y):
"""
Partially fitting our model
:param x: data point from the dataset
:param y: corresponding target label
"""
# List of outputs of the activiations from each layer
# the first activiation is the input itself (data point)
A = [np.atleast_2d(x)]
# Feedforward pass
# we pass the data point thrugh each of the layers in the network
# and each activation is then passed to the next layer in the network, dotted with the corresponding
# weight matrix of the layer
# Each activation output is stored in the A list
for layer in np.arange(0, len(self.W)):
net = A[layer].dot(self.W[layer])
out = self.sigmoid(net)
A.append(out)
# Backpropagation phase
# --------------------
# Compute the differnce between the prediction (the final output in the list of activations (A))
# and the actual target label
error = A[-1] - y
# We start with initializing a list D, which contains the deltas for the chain rule
# The first element is the error times the derivative of the output of the last layer
D = [error * self.sigmoid_deriv(A[-1])]
# We then start iterating each layer backwards aplying the chain rule
# We ignore the last two layers since they are already taken care of (the first elemnt in D list)
for layer in np.arange(len(A) - 2, 0, -1):
# The delta for the current layer is computed by dotting the delta of the previous layer with
# the weight matrix of the current layer, which is then multiplied with the derivative of the
# activation function for the activations of the current layer
delta = D[-1].dot(self.W[layer].T)
delta = delta * self.sigmoid_deriv(A[layer])
D.append(delta)
# reverse the deltas, becuase of the reversed loop
D = D[::-1]
# update the weight matrices for each layer
for layer in np.arange(0, len(self.W)):
self.W[layer] += -self.alpha * A[layer].T.dot(D[layer])
def predict(self, X, addBias=True):
"""
Predicts the class label for the given test vector
:param X: the test vector
:param addBias: wether to add extra column for the bias
:return: prediction
"""
p = np.atleast_2d(X)
if addBias:
p = np.c_[p, np.ones((p.shape[0]))]
for layer in np.arange(0, len(self.W)):
p = self.sigmoid(np.dot(p, self.W[layer]))
return p
def calculate_loss(self, X, targets):
"""
Calculates the loss of the whole dataset, used in each epoch to visualize the improvement over time
:param X: dataset
:param targets: class labels
:return: loss
"""
targets = np.atleast_2d(targets)
predictions = self.predict(X, addBias=False)
loss = 0.5 * np.sum((predictions - targets) ** 2)
return loss
|
Import("env")
import time, os
def before_upload(source, target, env):
print "before_upload: resetting GPIO18 for Alamode"
os.system("sudo gpio export 18 out")
os.system("sudo gpio write 18 0")
time.sleep(0.1)
os.system("sudo gpio write 18 1")
os.system("sudo gpio unexport 18")
env.AddPreAction("upload", before_upload)
|
import construct
import numpy as np
def read_tec_str(byte_list):
if not len(byte_list) == 4:
return {'Correct' : False}
check = construct.Int32ul.parse(byte_list)
if not check == 0:
return {'Correct':True, 'str': chr(byte_list[0]), 'End':False}
return {'Correct':True, 'str': '','End':True}
def construct_qword(byte_list):
if len(byte_list) < 8:
return {'Correct':False}
qword=0
uni_chars=''
tec_str = ''
first = read_tec_str(byte_list[0:4])
second = read_tec_str(byte_list[4:8])
if first['Correct']:
tec_str=tec_str+first['str']
if second['Correct']:
tec_str=tec_str+second['str']
for i in range(8):
shiftval=(7-1*i)*8
qword=qword + (byte_list[i] << shiftval)
uni_chars=uni_chars+str(chr(byte_list[i]))
lei32=construct.Int32sl.parse(byte_list)
return {'Correct':True, 'qword':qword,'I32ul':lei32,
'uni_chars':uni_chars, 'tec_str':tec_str}
def read_magic_number(byte_list):
if len(byte_list) < 8:
return {'Correct':False}
magic_num = construct_qword(byte_list[0:8])
return magic_num
def get_title(byte_list, offset=0):
title = ''
title_end = False
counter = 0
next_rel_byte = 0
while not title_end:
first_rel_byte = counter * 8
next_rel_byte = (counter + 1) * 8
first = read_tec_str(byte_list[first_rel_byte:first_rel_byte+4])
second = read_tec_str(byte_list[first_rel_byte+4:first_rel_byte + 8])
if not first['Correct']:
return {'Correct':False}
if not second['Correct']:
return {'Correct':False}
if first['End']:
title_end = True
next_rel_byte = first_rel_byte+4
continue
title = title + first['str']
if second['End']:
title_end = True
next_rel_byte = first_rel_byte+8
continue
title = title + second['str']
counter = counter+1
return {'Correct':True,'title':title,'next_byte':next_rel_byte}
def read_var_names(byte_list, num_vars):
var_names = list()
next_byte=0
for i in range(num_vars):
qword = get_title(byte_list[next_byte:])
if not qword['Correct']:
return {'Correct':False}
var_names.append(qword['title'])
next_byte = next_byte + qword['next_byte']
return var_names, next_byte
def parse_zone(byte_list, num_vars):
FeZone = lambda x: x>0
zone={}
zone_name = get_title(byte_list)
if zone_name['Correct']==False:
return {'Correct':False}
zone['ZoneName'] = zone_name['title']
byte_start = zone_name['next_byte']
byte_end = zone_name['next_byte']+4
zone['ParentZone']= construct.Int32ul.parse(
byte_list[byte_start:byte_end])
byte_start = byte_start+4
byte_end = byte_end + 4
zone['StrandID'] = construct.Int32ul.parse(
byte_list[byte_start:byte_end])
byte_start = byte_end
byte_end = byte_end + 8
zone['SolutionTime'] = construct.Float64l.parse(
byte_list[byte_start:byte_end])
byte_start = byte_end
byte_end = byte_end + 4
zone['NotUsed'] = construct.Int32ul.parse(
byte_list[byte_start:byte_end])
byte_start = byte_start + 4
byte_end = byte_end + 4
zone['ZoneType'] = construct.Int32ul.parse(
byte_list[byte_start:byte_end])
byte_start = byte_start + 4
byte_end = byte_end + 4
zone['VarLoc'] = construct.Int32ul.parse(
byte_list[byte_start:byte_end])
if zone['VarLoc'] == 1:
byte_start = byte_start + 4
byte_end = byte_end + 4
varLocs=[]
for i in range(num_vars):
byte_start = byte_start + i*4
byte_end = byte_end + i*4
varLocs.append(
construct.Int32ul.parse(
byte_list[byte_start:byte_end])
)
byte_start = byte_start + 4
byte_end = byte_end + 4
zone['RawFaceNeighbors'] = construct.Int32ul.parse(
byte_list[byte_start:byte_end])
byte_start = byte_start + 4
byte_end = byte_end + 4
zone['UserdefinedFaceNeighbors'] = construct.Int32ul.parse(
byte_list[byte_start:byte_end])
if FeZone(zone['ZoneType']):
byte_start = byte_start + 4
byte_end = byte_end + 4
zone['RawFaceNeighbors'] = construct.Int32ul.parse(
byte_list[byte_start:byte_end])
if not FeZone(zone['ZoneType']):
byte_start = byte_start + 4
byte_end = byte_end + 4
zone['Imax'] = construct.Int32ul.parse(
byte_list[byte_start:byte_end])
byte_start = byte_start + 4
byte_end = byte_end + 4
zone['Jmax'] = construct.Int32ul.parse(
byte_list[byte_start:byte_end])
byte_start = byte_start + 4
byte_end = byte_end + 4
zone['Kmax'] = construct.Int32ul.parse(
byte_list[byte_start:byte_end])
byte_start = byte_start + 4
byte_end = byte_end + 4
zone['AuxdataNamePair'] = construct.Int32ul.parse(
byte_list[byte_start:byte_end])
return zone
def find_zones(byte_list, eo_header):
counter = 0
end_of_header = False
zone_makers = list()
while not end_of_header:
first_byte = counter * 4
if first_byte >= eo_header:
end_of_header = True
continue
next_byte = (counter + 1) * 4
zone_marker = construct.Float32l.parse(byte_list[first_byte:next_byte])
if zone_marker == 299.0:
print('Zone Found')
print(first_byte)
zone_makers.append(first_byte)
counter = counter + 1
return zone_makers
def find_end_of_header(byte_list):
end_of_header_found = False
counter = 0
while not end_of_header_found:
first_byte = counter * 4
eo_of_header_byte = first_byte +4
eof_value = construct.Float32l.parse(byte_list[first_byte:eo_of_header_byte])
if eof_value == 357.0:
end_of_header_found = True
counter = counter +1
return eo_of_header_byte
def read_header(byte_list):
file_type_name=['FULL','GRID','SOLUTION']
magic_num = read_magic_number(byte_list[0:8])
if not magic_num['Correct']:
return {'Correct':False}
byte_order = construct.Int16sl.parse(byte_list[8:12])
file_type = construct.Int16sl.parse(byte_list[12:16])
title=''
title_res = get_title(byte_list[16:])
if title_res['Correct']:
title=title_res['title']
num_vars = construct.Int32sl.parse( byte_list[
title_res['next_byte']+16:
(title_res['next_byte']+20)])
start=title_res['next_byte']+20
var_names, next_byte = read_var_names(byte_list[start:],
num_vars)
start = start + next_byte
end_of_header = find_end_of_header(byte_list[start:])
end_of_header_abs = end_of_header + start
zone_markers= find_zones(byte_list[start:], end_of_header)
zones=list()
for zone in zone_markers:
zones.append(parse_zone(byte_list[start+zone+4:], var_names))
# Now find and read zones
#zones = find_zones(byte_list[next_byte+start:])
return {'Correct': True,
'magic_num' : magic_num,
'ByteOrder' : byte_order,
'FileType' : file_type_name[file_type],
'Title':title,
'NumVars':num_vars,
'VarNames':var_names,
'EofHeader': end_of_header_abs,
'ZoneMarkers': zone_markers,
'Zones': zones}
def find_zones_data(byte_list, num_zones, offset):
count_zones=0
counter = 0
all_zones_found = False
zone_makers = list()
while not all_zones_found:
first_byte = counter * 4
if count_zones == num_zones:
all_zones_found = True
continue
next_byte = (counter + 1) * 4
zone_marker = construct.Float32l.parse(byte_list[first_byte:next_byte])
if zone_marker == 299.0:
count_zones = count_zones + 1
zone_makers.append(first_byte+offset)
counter = counter + 1
return zone_makers
def read_zones(byte_list, zone_markers, header, binary_file):
var_names = header['VarNames']
var_dict = {}
zone_vars = list()
start_byte = 0
zone_counter = 0
zones_list=[]
for zone in zone_markers:
zone_data={}
start_byte = zone + 4
var_dict = {}
for name in var_names:
end_byte = start_byte + 4
var_dict[name] = construct.Int32ul.parse(byte_list[start_byte:end_byte])
start_byte = end_byte
zone_data['VarDict'] = var_dict
zone_data['PassiveVars'] = construct.Int32ul.parse(byte_list[start_byte:start_byte + 4])
if zone_data['PassiveVars'] != 0:
passive_var_dict={}
for name in var_names:
end_byte = start_byte + 4
passive_var_dict[name] = construct.Int32ul.parse(byte_list[start_byte:end_byte])
start_byte = end_byte
zone_data['PassiveVarDict'] = passive_var_dict
zone_data['VarSharing'] = construct.Int32ul.parse(byte_list[start_byte:start_byte + 4])
if zone_data['VarSharing'] != 0:
share_var_dict={}
for name in var_names:
end_byte = start_byte + 4
share_var_dict[name] = construct.Int32ul.parse(byte_list[start_byte:end_byte])
start_byte = end_byte
zone_data['ShareVarDict'] = share_var_dict
zone_data['ConnSharing'] = construct.Int32ul.parse(byte_list[start_byte:start_byte + 4])
start_byte=start_byte+4
non_passive_non_shared = list()
if zone_data['VarSharing'] !=0:
for name in var_names:
if zone_data['ShareVarDict'][name] == 0:
non_passive_non_shared.append(name)
else:
for name in var_names:
non_passive_non_shared.append(name)
if zone_data['PassiveVars'] !=0:
for name in var_names:
if zone_data['PassiveVarDict'][name] != 0:
if name in non_passive_non_shared:
non_passive_non_shared.remove(name)
min_val = {}
max_val = {}
start_byte=start_byte+4+4
for var_with_min_max in non_passive_non_shared:
end_byte = start_byte + 8
min_val[var_with_min_max] = construct.Float64l.parse(byte_list[start_byte:end_byte])
start_byte = end_byte
end_byte = start_byte + 8
max_val[var_with_min_max] = construct.Float64l.parse(byte_list[start_byte:end_byte])
start_byte = end_byte
print('start_data_list')
print(start_byte)
zone_data['Min_Vals'] = min_val
zone_data['Max_Vals'] = max_val
Imax = header['Zones'][zone_counter]['Imax']
Jmax = header['Zones'][zone_counter]['Jmax']
Kmax = header['Zones'][zone_counter]['Kmax']
print('Imax in read Zone')
print(Imax)
binary_file.seek(0)
print('NumValuesPerVariable')
print(Imax * Jmax * Kmax)
for name in var_names:
print('StartByte')
print(start_byte)
data = np.frombuffer(byte_list, dtype='float32',
count=Imax * Jmax * Kmax,
offset=start_byte)
start_byte = start_byte + 4 * Imax * Jmax * Kmax
zone_data[name] = data
#var_data=list()
#for I in range(0, Imax):
# for J in range(0, Jmax):
# for K in range(0, Kmax):
# end_byte = start_byte + 4
#print(byte_list[start_byte:end_byte])
#print(construct.Float32l.parse(byte_list[start_byte:end_byte]))
# var_data.append( construct.Float32b.parse(byte_list[start_byte:end_byte]))
# start_byte = end_byte
# for J in range(0, Jmax):
# end_byte = start_byte + 4
# #print(construct.Float32l.parse(byte_list[start_byte:end_byte]))
# var_data.append(construct.Float32b.parse(byte_list[start_byte:end_byte]))
# start_byte = end_byte
# for K in range(0, Kmax):
# end_byte = start_byte + 4
# #print(construct.Float32l.parse(byte_list[start_byte:end_byte]))
# var_data.append(construct.Float32b.parse(byte_list[start_byte:end_byte]))
# start_byte = end_byte
zones_list.append(zone_data)
print('start_data_list')
print(start_byte)
zone_counter = zone_counter + 1
return zones_list
def read_data(byte_list, header, binary_file):
eo_header = header['EofHeader']
num_zones = len(header['ZoneMarkers'])
zone_markers = find_zones_data(byte_list[eo_header:], num_zones, eo_header)
zones_list = read_zones(byte_list, zone_markers, header, binary_file)
print('len_byte_list')
print(len(byte_list))
return {'ZoneMarkers':zone_markers,
'Zones':zones_list}
|
"""Generic GeoJSON feed."""
import logging
from datetime import datetime
from typing import Dict, List, Optional, Tuple
from aio_geojson_client.feed import GeoJsonFeed
from aiohttp import ClientSession
from geojson import FeatureCollection
from .feed_entry import GenericFeedEntry
_LOGGER = logging.getLogger(__name__)
class GenericFeed(GeoJsonFeed[GenericFeedEntry]):
"""Generic GeoJSON feed."""
def __init__(
self,
websession: ClientSession,
home_coordinates: Tuple[float, float],
url: str,
filter_radius: float = None,
):
"""Initialise this service."""
super().__init__(websession, home_coordinates, url, filter_radius=filter_radius)
def __repr__(self):
"""Return string representation of this feed."""
return "<{}(home={}, url={}, radius={})>".format(
self.__class__.__name__,
self._home_coordinates,
self._url,
self._filter_radius,
)
def _new_entry(
self, home_coordinates: Tuple[float, float], feature, global_data: Dict
) -> GenericFeedEntry:
"""Generate a new entry."""
return GenericFeedEntry(home_coordinates, feature)
def _extract_last_timestamp(
self, feed_entries: List[GenericFeedEntry]
) -> Optional[datetime]:
"""Determine latest (newest) entry from the filtered feed."""
if feed_entries:
dates = sorted(
filter(None, [entry.publication_date for entry in feed_entries]),
reverse=True,
)
if dates:
return dates[0]
return None
def _extract_from_feed(self, feed: FeatureCollection) -> Optional[Dict]:
"""Extract global metadata from feed."""
return None
|
import pytest
from opera.error import ParseError
from opera.parser.tosca.integer import Integer
from opera.parser.yaml.node import Node
class TestValidate:
def test_with_int_data(self):
Integer.validate(Node(1234))
@pytest.mark.parametrize("data", ["4", (), (1, 2, 3), [], ["a", "b"], {}])
def test_with_non_string(self, data):
with pytest.raises(ParseError):
Integer.validate(Node(data))
|
from discord import Embed
from discord.ext import commands
from time import time
from collections import namedtuple
from src.internal.bot import Bot
from src.internal.context import Context
TimedResult = namedtuple('TimedResult', ["time", "rv"])
class Topics(commands.Cog):
"""Get topics for discussion."""
def __init__(self, bot: Bot):
self.bot = bot
@staticmethod
async def timed_coro(coro):
ts = time()
rs = await coro
return TimedResult(round((time() - ts) * 1000, 2), rs)
@commands.command(name="ping")
@commands.is_owner()
async def ping(self, ctx: Context):
"""Beefy ass ping command."""
msend = await self.timed_coro(ctx.send("Pinging..."))
medit = await self.timed_coro(msend.rv.edit(content="Editing..."))
msdel = await self.timed_coro(msend.rv.delete())
embed = Embed(
title="Ping",
colour=0x87CEEB,
timestamp=ctx.message.created_at,
)
embed.add_field(name="WS Latency", value=f"{round(self.bot.latency * 1000, 2)}ms", inline=False)
embed.add_field(name="API Send", value=f"{msend.time}ms", inline=True)
embed.add_field(name="API Edit", value=f"{medit.time}ms", inline=True)
embed.add_field(name="API Delete", value=f"{msdel.time}ms", inline=True)
await ctx.reply(embed=embed)
def setup(bot: Bot):
bot.add_cog(Topics(bot))
|
"""
statistics
---------
Calculate trading statistics
"""
# Use future imports for python 3.0 forward compatibility
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
# Other imports
import pandas as pd
import numpy as np
import operator
import math
import copy
from datetime import datetime
from dateutil.relativedelta import relativedelta
from numpy.lib.stride_tricks import as_strided
#####################################################################
# CONSTANTS
TRADING_DAYS_PER_YEAR = 252
TRADING_DAYS_PER_MONTH = 20
TRADING_DAYS_PER_WEEK = 5
#####################################################################
# HELPER FUNCTIONS
def _difference_in_years(start, end):
""" calculate the number of years between two dates """
diff = end - start
diff_in_years = (diff.days + diff.seconds/86400)/365.2425
return diff_in_years
def _get_trade_bars(dbal):
l = []
for i in range(len(tlog.index)):
if op(tlog['pl_cash'][i], 0):
entry_date = tlog['entry_date'][i]
exit_date = tlog['exit_date'][i]
l.append(len(ts[entry_date:exit_date].index))
return l
def _get_exit_tlog(tlog):
return tlog[(tlog['s_type'] == 'EXIT_ALL') | \
(tlog['s_type'] == 'EXIT_LONG') | \
(tlog['s_type'] == 'EXIT_SHORT')]
#####################################################################
# OVERALL RESULTS
def beginning_balance(capital):
return capital
def ending_balance(dbal):
return dbal[['total']].iat[-1,-1]
def total_net_profit(dbal,capital):
return dbal[['total']].iat[-1,-1]- capital
def gross_profit(tlog):
return tlog[tlog['PnL'] > 0].sum()['PnL']
def gross_loss(tlog):
return tlog[tlog['PnL'] < 0].sum()['PnL']
def profit_and_loss_ratio(tlog):
if gross_profit(tlog) == 0: return 0
if gross_loss(tlog) == 0: return 'Never Lose! This is your OnePiece!'
return gross_profit(tlog) / gross_loss(tlog) * -1
def return_on_initial_capital(dbal, capital):
return total_net_profit(dbal,capital) / capital * 100
def _cagr(B, A, n):
""" calculate compound annual growth rate """
return (math.pow(B / A, 1 / n) - 1) * 100
def annual_return_rate(end_balance, capital, start, end):
B = end_balance
A = capital
n = _difference_in_years(start, end)
return _cagr(B, A, n)
def trading_period(start, end):
diff = relativedelta(end, start)
return '{} years {} months {} days'.format(diff.years, diff.months,
diff.days)
def pct_time_in_market(dbal):
df = copy.deepcopy(dbal)
df.drop(df.columns[-5:],1,inplace=True)
df = df.T
df = pd.DataFrame(df.sum()).replace(0,float('nan'))
df.dropna(inplace=True)
in_market = float(len(df.index))
total_market = float(len(dbal.index))
return '%0.2f%%' % (in_market / total_market *100)
#####################################################################
# SUMS
def total_num_trades(tlog):
return len(tlog.index)
def total_EXIT_trades(ori_tlog):
ori_tlog = _get_exit_tlog(ori_tlog)
return len(ori_tlog.index)
def num_winning_trades(tlog):
return (tlog['PnL'] > 0).sum()
def num_losing_trades(tlog):
return (tlog['PnL'] < 0).sum()
def num_even_trades(tlog):
return (tlog['PnL'] == 0).sum()
def pct_profitable_trades(tlog,ori_tlog):
if total_EXIT_trades(ori_tlog) == 0: return 0
return '%0.2f%%' % (float(num_winning_trades(tlog)) / \
float(total_num_trades(tlog)) * 100)
#####################################################################
# CASH PROFITS AND LOSSES
def avg_profit_per_trade(tlog,ori_tlog,dbal,capital):
if total_EXIT_trades(ori_tlog) == 0: return 0
return float(total_net_profit(dbal,capital)) / total_num_trades(tlog)
def avg_profit_per_winning_trade(tlog):
if num_winning_trades(tlog) == 0: return 0
return float(gross_profit(tlog)) / num_winning_trades(tlog)
def avg_loss_per_losing_trade(tlog):
if num_losing_trades(tlog) == 0: return 0
return float(gross_loss(tlog)) / num_losing_trades(tlog)
def ratio_avg_profit_win_loss(tlog):
if avg_profit_per_winning_trade(tlog) == 0: return 0
if avg_loss_per_losing_trade(tlog) == 0: return 1000
return (avg_profit_per_winning_trade(tlog) /
avg_loss_per_losing_trade(tlog) * -1)
def largest_profit_winning_trade(tlog):
if num_winning_trades(tlog) == 0: return 0
return tlog[tlog['PnL'] > 0].max()['PnL']
def largest_loss_losing_trade(tlog):
if num_losing_trades(tlog) == 0: return 0
return tlog[tlog['PnL'] < 0].min()['PnL']
#####################################################################
# POINTS
def num_winning_points(tlog):
if num_winning_trades(tlog) == 0: return 0
points = tlog['PnL']/tlog['qty']
return points[points > 0].sum()
def num_losing_points(tlog):
if num_losing_trades(tlog) == 0: return 0
points = tlog['PnL']/tlog['qty']
return points[points < 0].sum()
def total_net_points(tlog):
return num_winning_points(tlog) + num_losing_points(tlog)
def avg_points(tlog,ori_tlog):
if total_EXIT_trades(ori_tlog) == 0: return 0
points = tlog['PnL']/tlog['qty']
return points.sum() / len(tlog.index)
def largest_points_winning_trade(tlog):
if num_winning_trades(tlog) == 0: return 0
points = tlog['PnL']/tlog['qty']
return points.max()
def largest_points_losing_trade(tlog):
if num_losing_trades(tlog) == 0: return 0
points = tlog['PnL']/tlog['qty']
return points.min()
def avg_pct_gain_points_per_trade(tlog,ori_tlog):
if total_EXIT_trades(ori_tlog) == 0: return 0
points = tlog['PnL']/tlog['qty']
pct = np.average(points) / np.average(_get_exit_tlog(ori_tlog)['price'])
return '%0.2f%%' % (pct * 100)
def largest_pct_winning_point(tlog):
if num_winning_trades(tlog) == 0: return 0
points = tlog['PnL']/tlog['qty']
return '%0.2f%%' % (points.max() * 100)
def largest_pct_losing_point(tlog):
if num_losing_trades(tlog) == 0: return 0
points = tlog['PnL']/tlog['qty']
return '%0.2f%%' % (points.min() * 100)
#####################################################################
# STREAKS
def _subsequence(s, c):
"""
Takes as parameter list like object s and returns the length of the longest
subsequence of s constituted only by consecutive character 'c's.
Example: If the string passed as parameter is "001000111100", and c is '0',
then the longest subsequence of only '0's has length 3.
"""
bit = 0 # current element in the sequence
count = 0 # current length of the sequence of zeros
maxlen = 0 # temporary value of the maximum length
for i in range(len(s)):
bit = s[i]
if bit == c: # we have read a new '0'
count = count + 1 # update the length of the current sequence
if count > maxlen: # if necessary, ...
# ... update the temporary maximum
maxlen = count
else: # we have read a 1
count = 0 # reset the length of the current sequence
return maxlen
def max_consecutive_winning_periods(tlog,dbal):
if num_winning_trades(tlog) == 0: return 0
return _subsequence(dbal['returns'] > 0, True)
def max_consecutive_losing_periods(tlog,dbal):
if num_losing_trades(tlog) == 0: return 0
return _subsequence(dbal['returns'] > 0, False)
def periods_winning_trades(tlog,dbal):
if num_winning_trades(tlog) == 0: return 0
dbal = dbal['returns']
return len(dbal[dbal > 0])
def periods_losing_trades(tlog,dbal):
if num_losing_trades(tlog) == 0: return 0
dbal = dbal['returns']
return len(dbal[dbal < 0])
#####################################################################
# DRAWDOWN AND RUNUP
def max_closed_out_drawdown(close):
""" only compare each point to the previous running peak O(N) """
running_max = close.expanding().max()
cur_dd = (close - running_max) / running_max * 100
dd_max = min(0, cur_dd.min())
idx = cur_dd.idxmin()
dd = pd.Series()
dd['max'] = dd_max
dd['peak'] = running_max[idx]
dd['trough'] = close[idx]
dd['start_date'] = close[close == dd['peak']].index[0].strftime('%Y-%m-%d')
dd['end_date'] = idx.strftime('%Y-%m-%d')
close = close[close.index > idx]
rd_mask = close > dd['peak']
if rd_mask.any():
dd['recovery_date'] = \
close[rd_mask].index[0].strftime('%Y-%m-%d')
else:
dd['recovery_date'] = 'Not Recovered Yet'
return dd
def max_intra_day_drawdown(high, low):
""" only compare each point to the previous running peak O(N) """
running_max = high.expanding().max()
cur_dd = (low - running_max) / running_max * 100
dd_max = min(0, cur_dd.min())
idx = cur_dd.idxmin()
dd = pd.Series()
dd['max'] = dd_max
dd['peak'] = running_max[idx]
dd['trough'] = low[idx]
dd['start_date'] = high[high == dd['peak']].index[0].strftime('%Y-%m-%d')
dd['end_date'] = idx.strftime('%Y-%m-%d')
high = high[high.index > idx]
rd_mask = high > dd['peak']
if rd_mask.any():
dd['recovery_date'] = \
high[rd_mask].index[0].strftime('%Y-%m-%d')
return dd
def _windowed_view(x, window_size):
"""Create a 2d windowed view of a 1d array.
`x` must be a 1d numpy array.
`numpy.lib.stride_tricks.as_strided` is used to create the view.
The data is not copied.
Example:
>>> x = np.array([1, 2, 3, 4, 5, 6])
>>> _windowed_view(x, 3)
array([[1, 2, 3],
[2, 3, 4],
[3, 4, 5],
[4, 5, 6]])
"""
y = as_strided(x, shape=(x.size - window_size + 1, window_size),
strides=(x.strides[0], x.strides[0]))
return y
def rolling_max_dd(ser, period, min_periods=1):
"""Compute the rolling maximum drawdown of `ser`.
`ser` must be a Series.
`min_periods` should satisfy `1 <= min_periods <= window_size`.
Returns an 1d array with length `len(x) - min_periods + 1`.
"""
window_size = period + 1
x = ser.values
if min_periods < window_size:
pad = np.empty(window_size - min_periods)
pad.fill(x[0])
x = np.concatenate((pad, x))
y = _windowed_view(x, window_size)
running_max_y = np.maximum.accumulate(y, axis=1)
dd = (y - running_max_y) / running_max_y * 100
rmdd = dd.min(axis=1)
return pd.Series(data=rmdd, index=ser.index, name=ser.name)
def rolling_max_ru(ser, period, min_periods=1):
"""Compute the rolling maximum runup of `ser`.
`ser` must be a Series.
`min_periods` should satisfy `1 <= min_periods <= window_size`.
Returns an 1d array with length `len(x) - min_periods + 1`.
"""
window_size = period + 1
x = ser.values
if min_periods < window_size:
pad = np.empty(window_size - min_periods)
pad.fill(x[0])
x = np.concatenate((pad, x))
y = _windowed_view(x, window_size)
running_min_y = np.minimum.accumulate(y, axis=1)
ru = (y - running_min_y) / running_min_y * 100
rmru = ru.max(axis=1)
return pd.Series(data=rmru, index=ser.index, name=ser.name)
#####################################################################
# PERCENT CHANGE - used to compute several stastics
def pct_change(close, period):
diff = (close.shift(-period) - close) / close * 100
diff.dropna(inplace=True)
return diff
#####################################################################
# RATIOS
def sharpe_ratio(rets, risk_free=0.00, period=TRADING_DAYS_PER_YEAR):
"""
summary Returns the daily Sharpe ratio of the returns.
param rets: 1d numpy array or fund list of daily returns (centered on 0)
param risk_free: risk free returns, default is 0%
return Sharpe Ratio, computed off daily returns
"""
dev = np.std(rets, axis=0)
mean = np.mean(rets, axis=0)
sharpe = (mean*period - risk_free) / (dev * np.sqrt(period))
return sharpe
def sortino_ratio(rets, risk_free=0.00, period=TRADING_DAYS_PER_YEAR):
"""
summary Returns the daily Sortino ratio of the returns.
param rets: 1d numpy array or fund list of daily returns (centered on 0)
param risk_free: risk free return, default is 0%
return Sortino Ratio, computed off daily returns
"""
mean = np.mean(rets, axis=0)
negative_rets = rets[rets < 0]
dev = np.std(negative_rets, axis=0)
sortino = (mean*period - risk_free) / (dev * np.sqrt(period))
return sortino
#####################################################################
# STATS - this is the primary call used to generate the results
def stats(tlog,ori_tlog, dbal, start, end, capital):
"""
Compute trading stats
Parameters
----------
ts : Dataframe
Time series of security prices (date, high, low, close, volume,
adj_close)
tlog : Dataframe
Trade log (entry_date, entry_price, long_short, qty,
exit_date, exit_price, pl_points, pl_cash, cumul_total)
dbal : Dataframe
Daily Balance (date, high, low, close)
start : datetime
date of first buy
end : datetime
date of last sell
capital : float
starting capital
Examples
--------
Returns
-------
stats : Series of stats
"""
stats = pd.Series()
# OVERALL RESULTS
stats['start'] = start.strftime('%Y-%m-%d')
stats['end'] = end.strftime('%Y-%m-%d')
stats['beginning_balance'] = beginning_balance(capital)
stats['ending_balance'] = ending_balance(dbal)
stats['total_net_profit'] = total_net_profit(dbal,capital)
stats['gross_profit'] = gross_profit(tlog)
stats['gross_loss'] = gross_loss(tlog)
stats['P/L'] = profit_and_loss_ratio(tlog)
stats['return_on_initial_capital'] = \
return_on_initial_capital(tlog, capital)
cagr = annual_return_rate(ending_balance(dbal), capital, start, end)
stats['annual_return_rate'] = cagr
stats['trading_period'] = trading_period(start, end)
stats['pct_time_in_market'] = pct_time_in_market(dbal)
# SUMS
stats['total_num_trades'] = total_num_trades(tlog)
stats['total_EXIT_trades'] = total_EXIT_trades(ori_tlog)
stats['num_winning_trades'] = num_winning_trades(tlog)
stats['num_losing_trades'] = num_losing_trades(tlog)
stats['num_even_trades'] = num_even_trades(tlog)
stats['pct_profitable_trades'] = pct_profitable_trades(tlog,ori_tlog)
# CASH PROFITS AND LOSSES
stats['avg_profit_per_trade'] = avg_profit_per_trade(tlog,ori_tlog,dbal,capital)
stats['avg_profit_per_winning_trade'] = avg_profit_per_winning_trade(tlog)
stats['avg_loss_per_losing_trade'] = avg_loss_per_losing_trade(tlog)
stats['ratio_avg_profit_win_loss'] = ratio_avg_profit_win_loss(tlog)
stats['largest_profit_winning_trade'] = largest_profit_winning_trade(tlog)
stats['largest_loss_losing_trade'] = largest_loss_losing_trade(tlog)
# POINTS
stats['num_winning_points'] = num_winning_points(tlog)
stats['num_losing_points'] = num_losing_points(tlog)
stats['total_net_points'] = total_net_points(tlog)
stats['avg_points'] = avg_points(tlog,ori_tlog)
stats['largest_points_winning_trade'] = largest_points_winning_trade(tlog)
stats['largest_points_losing_trade'] = largest_points_losing_trade(tlog)
stats['avg_pct_gain_per_trade'] = avg_pct_gain_points_per_trade(tlog,ori_tlog)
stats['largest_pct_winning_trade'] = largest_pct_winning_point(tlog)
stats['largest_pct_losing_trade'] = largest_pct_losing_point(tlog)
# STREAKS
stats['max_consecutive_winning_periods'] = \
max_consecutive_winning_periods(tlog,dbal)
stats['max_consecutive_losing_periods'] = \
max_consecutive_losing_periods(tlog,dbal)
stats['periods_winning_trades'] = \
periods_winning_trades(tlog,dbal)
stats['periods_losing_trades'] = periods_losing_trades(tlog,dbal)
#
# # DRAWDOWN
# dd = max_closed_out_drawdown(dbal['close'])
# stats['max_closed_out_drawdown'] = dd['max']
# stats['max_closed_out_drawdown_start_date'] = dd['start_date']
# stats['max_closed_out_drawdown_end_date'] = dd['end_date']
# stats['max_closed_out_drawdown_recovery_date'] = dd['recovery_date']
# stats['drawdown_recovery'] = _difference_in_years(
# datetime.strptime(dd['start_date'], '%Y-%m-%d'),
# datetime.strptime(dd['end_date'], '%Y-%m-%d')) *-1
# stats['drawdown_annualized_return'] = dd['max'] / cagr
# dd = max_intra_day_drawdown(dbal['high'], dbal['low'])
# stats['max_intra_day_drawdown'] = dd['max']
# dd = rolling_max_dd(dbal['close'], TRADING_DAYS_PER_YEAR)
# stats['avg_yearly_closed_out_drawdown'] = np.average(dd)
# stats['max_yearly_closed_out_drawdown'] = min(dd)
# dd = rolling_max_dd(dbal['close'], TRADING_DAYS_PER_MONTH)
# stats['avg_monthly_closed_out_drawdown'] = np.average(dd)
# stats['max_monthly_closed_out_drawdown'] = min(dd)
# dd = rolling_max_dd(dbal['close'], TRADING_DAYS_PER_WEEK)
# stats['avg_weekly_closed_out_drawdown'] = np.average(dd)
# stats['max_weekly_closed_out_drawdown'] = min(dd)
#
# # RUNUP
# ru = rolling_max_ru(dbal['close'], TRADING_DAYS_PER_YEAR)
# stats['avg_yearly_closed_out_runup'] = np.average(ru)
# stats['max_yearly_closed_out_runup'] = ru.max()
# ru = rolling_max_ru(dbal['close'], TRADING_DAYS_PER_MONTH)
# stats['avg_monthly_closed_out_runup'] = np.average(ru)
# stats['max_monthly_closed_out_runup'] = max(ru)
# ru = rolling_max_ru(dbal['close'], TRADING_DAYS_PER_WEEK)
# stats['avg_weekly_closed_out_runup'] = np.average(ru)
# stats['max_weekly_closed_out_runup'] = max(ru)
#
# # PERCENT CHANGE
# pc = pct_change(dbal['close'], TRADING_DAYS_PER_YEAR)
# stats['pct_profitable_years'] = (pc > 0).sum() / len(pc) * 100
# stats['best_year'] = pc.max()
# stats['worst_year'] = pc.min()
# stats['avg_year'] = np.average(pc)
# stats['annual_std'] = pc.std()
# pc = pct_change(dbal['close'], TRADING_DAYS_PER_MONTH)
# stats['pct_profitable_months'] = (pc > 0).sum() / len(pc) * 100
# stats['best_month'] = pc.max()
# stats['worst_month'] = pc.min()
# stats['avg_month'] = np.average(pc)
# stats['monthly_std'] = pc.std()
# pc = pct_change(dbal['close'], TRADING_DAYS_PER_WEEK)
# stats['pct_profitable_weeks'] = (pc > 0).sum() / len(pc) * 100
# stats['best_week'] = pc.max()
# stats['worst_week'] = pc.min()
# stats['avg_week'] = np.average(pc)
# stats['weekly_std'] = pc.std()
#
# # RATIOS
# stats['sharpe_ratio'] = sharpe_ratio(dbal['close'].pct_change())
# stats['sortino_ratio'] = sortino_ratio(dbal['close'].pct_change())
return stats
#####################################################################
# SUMMARY - stats() must be called before calling any of these functions
def summary(stats, *metrics):
""" Returns stats summary in a DataFrame.
stats() must be called before calling this function """
index = []
columns = ['strategy']
data = []
# add metrics
for metric in metrics:
index.append(metric)
data.append(stats[metric])
df = pd.DataFrame(data, columns=columns, index=index)
return df
def summary2(stats, benchmark_stats, *metrics):
""" Returns stats with benchmark summary in a DataFrame.
stats() must be called before calling this function """
index = []
columns = ['strategy', 'benchmark']
data = []
# add metrics
for metric in metrics:
index.append(metric)
data.append((stats[metric], benchmark_stats[metric]))
df = pd.DataFrame(data, columns=columns, index=index)
return df
def summary3(stats, benchmark_stats, *extras):
""" Returns stats with benchmark summary in a DataFrame.
stats() must be called before calling this function
*extras: extra metrics """
index = ['annual_return_rate',
'max_closed_out_drawdown',
'drawdown_annualized_return',
'pct_profitable_months',
'best_month',
'worst_month',
'sharpe_ratio',
'sortino_ratio']
columns = ['strategy', 'benchmark']
data = [(stats['annual_return_rate'],
benchmark_stats['annual_return_rate']),
(stats['max_closed_out_drawdown'],
benchmark_stats['max_closed_out_drawdown']),
(stats['drawdown_annualized_return'],
benchmark_stats['drawdown_annualized_return']),
(stats['pct_profitable_months'],
benchmark_stats['pct_profitable_months']),
(stats['best_month'],
benchmark_stats['best_month']),
(stats['worst_month'],
benchmark_stats['worst_month']),
(stats['sharpe_ratio'],
benchmark_stats['sharpe_ratio']),
(stats['sortino_ratio'],
benchmark_stats['sortino_ratio'])]
# add extra metrics
for extra in extras:
index.append(extra)
data.append((stats[extra], benchmark_stats[extra]))
df = pd.DataFrame(data, columns=columns, index=index)
return df
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from absl import app
from absl.testing import absltest
from grr_response_server import blob_store_test_mixin
from grr_response_server import db_test_mixin
from grr_response_server.databases import mem
from grr.test_lib import test_lib
class MemoryDBTest(db_test_mixin.DatabaseTestMixin,
blob_store_test_mixin.BlobStoreTestMixin, absltest.TestCase):
def CreateDatabase(self):
return mem.InMemoryDB(), None
def CreateBlobStore(self):
return self.CreateDatabase()
def main(args):
test_lib.main(args)
if __name__ == "__main__":
app.run(main)
|
#!/usr/bin/env python3
"""
Paxos application class is part of a thesis work about distributed systems
Two main threads are created, one with an UDP socket and one with a TCP socket
UDP socket - Command exchange - ADHOC commands sent in the ether
TCP socket - Data exchange
The membership is controlled by the network application.
This applicaiton uses pickle to serialize data, so it shouldn't be used outside
academic world since pickle has security flaws.
"""
__author__ = "Bruno Chianca Ferreira"
__license__ = "MIT"
__version__ = "0.3"
__maintainer__ = "Bruno Chianca Ferreira"
__email__ = "brunobcf@gmail.com"
import socket, random, sys, json, traceback, zlib, fcntl, time, threading, pickle, asyncio, os
from apscheduler.schedulers.background import BackgroundScheduler
from classes.network import network_sockets
from classes import prompt, tools
from struct import pack
from struct import unpack
class App():
def __init__(self, Node, tag, time_scale, second, tag_number):
'Initializes the properties of the Node object'
#### Genesis Common
random.seed(tag)
self.Node = Node
self.tag = tag
self.tag_number = tag_number
self.debug = False
self.multiplier = time_scale
self.scheduler = BackgroundScheduler()
### Default options that are replaced by settings file
self.bcast_group = '10.0.0.255' #broadcast ip address
self.port = 56555 # TCP/UDP port used by paxos to talk to acceptors and learners
self.client_port = 56444 # Client entry
self.max_packet = 65500 #max packet size to listen
self.proposal_timeout = 5000
self.quorum_len = 10 #initial value, should be made by topology update?
### Application variables
self.max_log = 100000 #what is more pratical? Create a log rotation or just use DEQUE?
self.job_queue = []
self.job_hist = []
self.consensus_log = [None] * self.max_log
self.leader = ''
self.quorum = []
self.max_round = 0
self.sequence = 0
self.state = "ENABLED" #current state
self.seek_head = -1
self.request = []
##################### Constructor actions #########################
self._setup()
self.udp_interface = network_sockets.UdpInterface(self._packet_handler, debug=False, port=self.port, interface='')
self.tcp_interface = network_sockets.TcpInterface(self._packet_handler, debug=False, port=self.port, interface='')
self.scheduler.add_job(self._check_quorum, 'interval', seconds=1, id='quorum')
self.scheduler.add_job(self._broadcast_leader, 'interval', seconds=1, id='leader')
#self.scheduler.add_job(self._find_gaps, 'interval', seconds=1, id='gap')
############# Public methods ########################
def start(self):
self.Node.Tracer.add_status_trace("Time" + ";" + "State" + ';'+ 'Role' + ';' + 'Chosen Value' +';' + 'Local Sequence #' +';' + 'Last promise' +';' + 'Current Quorum' + ';' + 'Set Quorum')
self.udp_interface.start()
self.tcp_interface.start()
self.scheduler.start()
tools.printxy(2, 20 , "L: ---NONE--- ")
self.election_round()
self._auto_job()
def rsm_start(self):
self.Node.Tracer.add_status_trace("Time" + ";" + "State" + ';'+ 'Role' + ';' + 'Chosen Value' +';' + 'Local Sequence #' +';' + 'Last promise' +';' + 'Current Quorum' + ';' + 'Set Quorum')
self.udp_interface.start()
self.tcp_interface.start()
self.scheduler.start()
tools.printxy(2, 20 , "L: ---NONE--- ")
def shutdown(self):
self.tcp_interface.send(self.myip, "bye".encode(), 255)
self.udp_interface.send(self.myip, "bye".encode(), 255)
self.udp_interface.shutdown()
self.tcp_interface.shutdown()
self.scheduler.shutdown()
def local(self):
pass
def last_state(self):
last = 0
for index in range(0, self.max_log):
if self.consensus_log[index] == None:
return self.consensus_log[last]
else:
last = index
def toggleDebug(self):
self.debug = not self.debug
if (self.debug):
print("Multipaxos -> Debug mode set to on")
elif (not self.debug):
print("Multipaxos -> Debug mode set to off")
def disable(self):
if self.state == 'ENABLED':
self.set_state('DISABLED')
self.scheduler.shutdown()
#self.tcp_interface.send(self.myip, "bye".encode(), 255)
#self.udp_interface.send(self.myip, "bye".encode(), 255)
self.udp_interface.shutdown()
self.tcp_interface.shutdown()
else:
print("Node not enabled. Skiping...")
def enable(self):
if self.state == 'DISABLED':
self.set_role('FOLLOWER')
self.set_state('ENABLED')
self.udp_interface = network_sockets.UdpInterface(self._packet_handler, debug=False, port=self.port, interface='')
self.tcp_interface = network_sockets.TcpInterface(self._packet_handler, debug=False, port=self.port, interface='')
self.udp_interface.start()
self.tcp_interface.start()
self.scheduler = BackgroundScheduler()
self.scheduler.add_job(self._check_quorum, 'interval', seconds=1, id='quorum')
self.scheduler.add_job(self._broadcast_leader, 'interval', seconds=1, id='leader')
self.scheduler.start()
else:
print("Node not disabled. Skiping...")
def get_leader(self):
return self.leader
def election_round(self):
self.set_role('LEADER')
self.propose(['LEADER',self.tag], leader_round=True)
self.set_role('FOLLOWER')
def propose(self,proposal, leader_round=False):
log = -1
if self.get_role() == 'LEADER':
log = self._propose(proposal, leader_round)
elif self.get_role() != 'LEADER' and leader_round:
log = self._propose(proposal, leader_round)
else:
print("not leader")
print(self.get_role())
return log
def get_role(self):
return self.Node.role
def set_role(self, role):
self.Node.role = role
self.Node.Tracer.add_app_trace('PAXOS->' + self.Node.fulltag + ' Set as ' + self.Node.role)
def get_state(self):
return self.state
def set_state(self, state):
self.state = state
self.Node.Tracer.add_app_trace('PAXOS->' + self.Node.fulltag + ' Stage changed to ' + self.state)
def get_seek_head(self):
return self.seek_head
def increment_seek_head(self, value):
self.seek_head += value
def set_seek_head(self, value):
self.seek_head = value
def sync(self, rounds):
print("syncing")
self.set_role('LEADER')
for i in range(rounds):
self.propose('')
self.set_role('SERVER')
def leader_failed(self):
self.leader = ''
tools.printxy(2, 20 , "L: ---NONE--- ")
############# Private methods #######################
def _setup(self):
self.myip = self.Node.Network.myip
settings_file = open("./classes/apps/multipaxos/settings.json","r").read()
settings = json.loads(settings_file)
self.port = settings['controlPort']
self.max_packet = settings['maxPacket']
self.network = settings['network']
self.bcast_group = settings['network'] + "255"
self.proposal_timeout = settings['proposalTimeout']
self.quorum_len = settings['quorumLen']
self.Node.role = 'SERVER'
def _auto_job(self):
'Loads batch jobs from files. File must correspond to node name'
try:
jobs_file = open("./classes/apps/multipaxos/job_" + self.Node.fulltag + ".json","r").read()
jobs_batch = json.loads(jobs_file)
loop = asyncio.get_event_loop()
for job in jobs_batch["jobs"]:
loop.create_task(self._auto_job_add(job['start'],job['type'],job['value']))
loop.run_forever()
loop.close()
except:
#print("No jobs batch for me")
pass
async def _auto_job_add(self, delay, jobtype, value):
'Adds batch jobs to the scheduler'
await asyncio.sleep(delay * self.Node.multiplier)
self._add_job(jobtype, value)
def _add_job(self, jobtype='propose', value=None):
'Adds manual jobs'
if jobtype == 'propose':
try:
self._propose(value)
except:
traceback.print_exc()
elif jobtype == 'leader':
try:
self.set_role('LEADER')
except:
traceback.print_exc()
elif jobtype == 'disable':
try:
self.disable()
except:
traceback.print_exc()
elif jobtype == 'enable':
try:
self.enable()
except:
traceback.print_exc()
def _broadcast_leader(self):
if self.Node.role == 'LEADER':
if len(self.Node.FaultDetector.get_running()) < (len(self.Node.FaultDetector.get_suspect()) + len(self.Node.FaultDetector.get_faulty())):
self.set_role('FOLLOWER')
return
#There are more faulty than running, so it is better not to have a leader
#if leader, let others know
self.leader = self.Node.Network.myip
tools.printxy(2, 20 , "L: "+ str(self.leader) + " ")
leaderPack = [5,self.seek_head]
leaeder = json.dumps(leaderPack).encode()
self.Node.Bus.emmit(['RSM', 'BCAST_LEADER', self.tag])
for node in self.quorum:
msg_id = self._create_id()
self.Node.Tracer.add_trace(hex(msg_id)+';'+'SEND' + ';' + 'LEADER' + ';' + str(sys.getsizeof(leaeder)) + ';' + str(node))
self.worker = threading.Thread(target=self.udp_interface.send, args=(node, leaeder, msg_id))
self.worker.start()
def _find_empty(self):
for entry in range(0,self.max_log):
if self.consensus_log[entry] == None:
return entry
def _find_gaps(self):
_request = []
for i in range(len(self.consensus_log)-2):
try:
if self.consensus_log[i] == None:
_request.append(i)
if self.consensus_log[i+1] != None:
self.request = _request
return
except IndexError:
pass
except:
traceback.print_exc()
def _create_id(self):
return zlib.crc32((str(int(time.time()*1000))+ str(self.tag) + str(random.randint(0,10000))).encode())
def _generate_proposal_number(self):
self.max_round += 1
return (self.max_round << 16) + self.tag_number
def _extract_max_round(self, proposal_number):
this_round = (proposal_number & 4294901760) >> 16
server = proposal_number & 65535
return (this_round , server)
def _increment_proposal(self):
self.sequence += 1
def _status_tracer(self):
self.Node.Tracer.add_status_trace(str(int(time.time()*1000)) + ';' + self.state + ';' + self.Node.role + ';' + str(self.consensus) + ';' + str(self.sequence) + ';' + str(self.last_promise_sent) +';' + str(len(self.quorum)) + ';' + str(self.quorum_len))
def _move_to_history(self, id):
pass
def _encode(self, object):
data = pickle.dumps(object)
size = len(data)
return data, size
def _check_quorum(self):
'Creating my quorum based in the Network class, which is an implementation of a membership protocol and needs to be enhanced'
self.quorum = []
for node in self.Node.Membership.get_servers():
if node[0] != self.myip:
self.quorum.append(node[0])
def _packet_handler(self, payload, sender_ip, connection):
'Callback function for receiving packets'
if (sender_ip == self.myip):
return
try:
payload = pickle.loads(payload)
except:
pass
print(sender_ip)
#print(payload)
#magic_word = payload[0]
#if magic_word != "genesis":
# return
msg_id = payload[0]
encoded_payload = payload[1]
try:
payload = pickle.loads(encoded_payload)
except:
payload = json.loads(encoded_payload.decode())
pdu = payload[0]
if pdu == 1: # Got a proposal package
self._handle_prop(payload, sender_ip)
self.Node.Tracer.add_trace(msg_id+';'+'RECV' + ';' + 'PROPOSAL' + ';' + str(sys.getsizeof(encoded_payload)) + ';' + str(sender_ip))
elif pdu == 2: #got an ACK package
self._handle_prom(payload, sender_ip)
self.Node.Tracer.add_trace(msg_id+';'+'RECV' + ';' + 'PROMISE' + ';' + str(sys.getsizeof(encoded_payload)) + ';' + str(sender_ip))
elif pdu == 3: #got an ACK package
self._handle_accept(payload, sender_ip)
self.Node.Tracer.add_trace(msg_id+';'+'RECV' + ';' + 'ACCEPT' + ';' + str(sys.getsizeof(encoded_payload)) + ';' + str(sender_ip))
elif pdu == 4: #got an ACK package
self._handle_accepted(payload, sender_ip)
self.Node.Tracer.add_trace(msg_id+';'+'RECV' + ';' + 'ACCEPTED' + ';' + str(sys.getsizeof(encoded_payload)) + ';' + str(sender_ip))
elif pdu == 5: #got an LEADER package
self._handle_leader(payload, sender_ip)
self.Node.Tracer.add_trace(msg_id+';'+'RECV' + ';' + 'LEADER' + ';' + str(sys.getsizeof(encoded_payload)) + ';' + str(sender_ip))
def _handle_prop(self, payload, sender_ip):
seq = payload[1]
time = payload[2]
log = payload[3]
#print(payload)
if self.debug: print("Received a proposal with seq #" + str(seq))
self.Node.Tracer.add_app_trace('PAXOS->' + 'Received proposal ' + str(seq) + ' for slot ' + str(log) + ' from: '+ str(sender_ip))
#find if log is empty
if self.consensus_log[log] == None:
if self.debug: print("Handling because slot is empty")
self.job_hist.append(['PROPOSAL', sender_ip, seq, time,'', '', log])
promise = self._createpromise(seq, None , log)
self.consensus_log[log] = [seq, ""]
msg_id = self._create_id()
self.tcp_interface.send(sender_ip, promise, msg_id)
self.Node.Tracer.add_app_trace('PAXOS->' + 'Sending promise ' + str(seq) + ' for slot ' + str(log) + ' with value ' + str(self.consensus_log[log][1]) + ' to: '+ str(sender_ip))
self.Node.Tracer.add_trace(hex(msg_id)+';'+'SENT' + ';' + 'PROMISE' + ';' + str(sys.getsizeof(promise)) + ';' + str(sender_ip))
else:
if seq > self.consensus_log[log][0]:
if self.debug: print("Handling because seq is higher than the one we primsed before for this slot")
self.job_hist.append(['PROPOSAL', sender_ip, seq, time,'', ''])
#creating promise with old value
promise = self._createpromise(seq, [self.consensus_log[log][1], 0] , log)
(self.max_round,_) = self._extract_max_round(seq)
msg_id = self._create_id()
self.tcp_interface.send(sender_ip, promise, msg_id)
self.consensus_log[log][0] = seq
self.Node.Tracer.add_app_trace('PAXOS->' + 'Sending promise ' + str(seq) + ' for slot ' + str(log) + ' with value ' + str(self.consensus_log[log][1]) + ' to: '+ str(sender_ip))
self.Node.Tracer.add_trace(hex(msg_id)+';'+'SENT' + ';' + 'PROMISE' + ';' + str(sys.getsizeof(promise)) + ';' + str(sender_ip))
else: #maybe instead of denial, we could just don't participate
#slot was taken with a proposal value higher let sender so to avoid active waiting
if self.debug: print("Not handling because seq is lower than the one we primsed before for this slot")
promise = self._createpromise(seq, [self.consensus_log[log][0], -1],log)
msg_id = self._create_id()
self.tcp_interface.send(sender_ip, promise, msg_id)
self.Node.Tracer.add_app_trace('PAXOS->' + 'Sending denial ' + str(seq) + ' for slot ' + str(log) + ' with value ' + str(-1) + ' to: '+ str(sender_ip))
self.Node.Tracer.add_trace(hex(msg_id)+';'+'SENT' + ';' + 'PROMISE' + ';' + str(sys.getsizeof(promise)) + ';' + str(sender_ip))
def _handle_prom(self, payload, sender_ip):
lock = threading.Lock()
seq = payload[1]
consensus = payload[2]
log = payload[3]
if consensus == None:
if self.debug: print("Received promise "+ str(seq) + "/" + str(log) + " from:" + str(sender_ip))
self.Node.Tracer.add_app_trace('PAXOS->' + 'Received promise ' + str(seq) + ' for slot ' + str(log) + ' with value ' + str(consensus) + ' from: '+ str(sender_ip))
lock.acquire()
for job in self.job_queue:
if (job[1] == seq) and (job[6] == log):
if job[4] == 'ONGOING' and len(job[5]) <= self.quorum_len / 2:
job[5].append(sender_ip)
lock.release()
elif str(consensus[1]) == "-1":
#todo: can we skip this step?
#this was denied cause was more updated. Lets skip to its round
if self.debug: print("This proposal "+ str(seq) + "/" + str(log) + " was denied. There was a more recent proposal")
self.Node.Tracer.add_app_trace('PAXOS->' + 'Received denial ' + str(seq) + ' for slot ' + str(log) + ' with value ' + str(consensus) + ' from: '+ str(sender_ip))
#checking If I'm the one proposing. Would be better to create a unique ID
#self.Node.role = 'LEADER'
(self.max_round,_) = self._extract_max_round(seq)
self.consensus_log[log] = None
lock.acquire()
for job in self.job_queue:
if (job[1] == seq) and (job[6] == log):
job[4] = 'REJECTED'
lock.release()
else:
if self.debug: print("This proposal "+ str(seq) + "/" + str(log) + "was skiped due to previews consensus. Keeping same value")
self.Node.Tracer.add_app_trace('PAXOS->' + 'Received promise ' + str(payload[1]) + ' with value ' + str(payload[2]) + ' from: '+ str(sender_ip))
#self.Node.role = 'LEADER'
self.consensus_log[log][1] = consensus[0]
lock.acquire()
for job in self.job_queue:
if (job[1] == seq) and (job[6] == log):
job[4] = 'CONSENSUS'
if job[4] == 'ONGOING' or job[4] == 'CONSENSUS':
job[5].append(sender_ip)
lock.release()
def _handle_accept(self, payload, sender_ip):
if self.debug: print(payload)
lock = threading.Lock()
log = payload[3]
value = payload[2]
seq = payload[1]
if self.debug: print("Received ACCEPT "+ str(seq) + "/" + str(log) + "/" + str(value) + " from:" + str(sender_ip))
self.Node.Tracer.add_app_trace('PAXOS->' + 'Received ACCEPT! proposal ' + str(payload[1]) + ' with value ' + str(payload[2]) + ' from: '+ str(sender_ip))
#gonna handle the accept event if i didnt promise
if self.consensus_log[log] == None:
self.consensus_log[log] = [0, ""]
if (seq >= self.consensus_log[log][0]):
lock.acquire()
#for job in self.job_hist:
# if (seq == job[1]) and (job[6] == log):#job2?
# job[4] = payload[2]
# job[5] = 'ACCEPTED'
lock.release()
self.consensus_log[log][1] = value
(self.max_round,_) = self._extract_max_round(seq)
#self.consensus = [payload[1], payload[2]]
#self.sequence = payload[1]
#TODO : Come up with something for sync
#if log == self.get_seek_head() + 1:
#we are in sync
self.set_seek_head(log)
try:
self.Node.Bus.emmit(['RSM', 'COMMIT', value])
except:
if self.debug: print("Multipaxos-handleaccept->Could not write to BUS")
#elif self.get_seek_head() >= log:
#pass
#elif log > self.get_seek_head() + 1:
#looks like we are falling behind. Call sync
#self.sync(log - self.get_seek_head())
#sync is not working well
# pass
self._createAccepted(payload[1], sender_ip, log)
def _handle_accepted(self, payload, sender_ip):
lock = threading.Lock()
seq = payload[1]
value = payload[2]
log = payload[3]
if self.debug: print("Received ACCEPTED "+ str(seq) + "/" + str(log) + "/" + str(value) + " from:" + str(sender_ip))
self.Node.Tracer.add_app_trace('PAXOS->' + 'Got accepted for proposal '+ str(payload[1]) + ' with value: ' + str(payload[2]))
lock.acquire()
for job in self.job_queue:
if (seq == job[1]) and (job[6] == log):
self.consensus_log[log][1] = value
#TODO : Come up with something for sync
#if log == self.get_seek_head() + 1:
#we are in sync
# self.increment_seek_head(1)
try:
self.Node.Bus.emmit(['RSM', 'COMMIT', value])
except:
if self.debug: print("Multipaxos-handleaccept->Could not write to BUS")
#traceback.print_exc()
self.set_seek_head(log)
#self.consensus = payload[2]
job[4] = 'FINALIZED'
self.job_hist.append(job)
self.job_queue.remove(job)
lock.release()
def _handle_leader(self, payload, sender_ip):
#if self.debug: print("Got leader")
self.Node.Tracer.add_app_trace('PAXOS->' + 'Got leader keepalive from: ' + str(sender_ip))
self.leader = sender_ip
self.Node.FaultDetector.set_leader(sender_ip)
tools.printxy(2, 20 , "L: "+ str(self.leader) + " ")
leader_seek_head = payload[1]
request = []
if leader_seek_head > self.seek_head:
pass
#I am behind
#for i in range(self.seek_head+1, leader_seek_head):
# request.append(i)
#request from leader missing positions
def _propose(self, proposal, leader_round=False):
tlock = threading.Lock()
tlock.acquire()
self.sequence = self._generate_proposal_number()
self.current_log_position = 0
tlock.release()
#log = 0
#if not leader_round:
self.current_log_position = self._find_empty()
log = self.current_log_position
self.job_queue.append(['PROPOSAL', self.sequence, int(time.time()*1000), proposal, 'ONGOING', [], log])
seq = self.sequence
self.consensus_log[log] = [seq, ""]
propPack = [1 , self.sequence, int(time.time()*1000), log]
prop = pickle.dumps(propPack)
self.Node.Tracer.add_app_trace('PAXOS->' + 'Proposal '+ str(seq) + ' for position ' + str(log) + ' with value: ' + str(proposal)+ ' is being sent to: '+ str(self.quorum))
if self.debug: print("Proposing:" + str(self.sequence) + ' for pos ' + str(log) + ' with value: ' + str(proposal))
for node in self.quorum:
msg_id = self._create_id()
self.Node.Tracer.add_trace(hex(msg_id)+';'+'SENT' + ';' + 'PROPOSAL' + ';' + str(sys.getsizeof(prop)) + ';' + str(node))
self.worker = threading.Thread(target=self.tcp_interface.send, args=(node,prop, msg_id))
self.worker.start()
propTd = threading.Thread(target=self._propose_thread, args=(self.quorum, seq, int(time.time()*1000), log))
propTd.start()
return log
def _propose_thread(self, quorum, seq, start, current_log_position):
lock = True
tlock = threading.Lock()
while lock:
if int(time.time()*1000) - start > self.proposal_timeout:
lock = False
if self.debug: print("Proposal rejected in stage 1")
tlock.acquire()
for job in self.job_queue:
if job[1] == seq:
job[4] = 'REJECTED'
self.Node.Tracer.add_app_trace('PAXOS->' + 'Proposal '+ str(seq) + ' was rejected in stage 1')
#print(self.current_log_position)
self.consensus_log[self.current_log_position][1] = 'NOP'
response = [self.current_log_position, -1]
self.job_hist.append(job)
self.job_queue.remove(job)
tlock.release()
else:
tlock.acquire()
for job in self.job_queue:
if (job[1] == seq) and (job[6] == current_log_position):
voters = len(job[5])
#if float(voters) >= (len(quorum) * 2) / 3: #variable quorum
if float(voters) > float((self.quorum_len * 1.0) / 2): #fixed quorum
if job[4] == 'ONGOING':
job[4] = 'PROMISED'
if self.debug: print("Enougth quorum for stage 2")
self.Node.Tracer.add_app_trace('PAXOS->' + 'Proposal '+ str(seq) + ' was accepted in stage 1 by:' + str(voters) + ' voters.' )
lock = False
self._createAccept(seq,current_log_position)
tlock.release()
time.sleep(0.01)
def _createpromise(self, seq, value, log):
promise = [2, seq, value, log]
prom = pickle.dumps(promise)
return prom
def _createAccept(self, seq, log):
if self.debug: print("createAccept -> " + str(seq) + "/" + str(log))
for job in self.job_queue:
if (job[1] == seq) and (job[6] == log):
if job[4] == 'PROMISED':
if self.debug: print("Sending ACCEPT PROMISED "+ str(seq) + "/" + str(log))
accPack = [3, seq, job[3], log]
accept = pickle.dumps(accPack)
#Here we are sending to quorum or to the ones that sent promises?
#for acceptor in job[5]:
for acceptor in self.quorum:
msg_id = self._create_id()
self.tcp_interface.send(acceptor, accept, msg_id)
self.Node.Tracer.add_trace(hex(msg_id)+';'+'SENT' + ';' + 'ACCEPT' + ';' + str(sys.getsizeof(accept)) + ';' + str(acceptor))
self.Node.Tracer.add_app_trace('PAXOS->' + 'Sent ACCEPT to:'+ str(acceptor))
elif job[4] == 'CONSENSUS':
if self.debug: print("Sending ACCEPT CONSENSUS "+ str(seq) + "/" + str(log))
accPack = [3, seq, self.consensus_log[log][1],log]
accept = pickle.dumps(accPack)
#Here we are sending to quorum or to the ones that sent promises?
#for acceptor in job[5]:
for acceptor in self.quorum:
msg_id = self._create_id()
self.tcp_interface.send(acceptor, accept, msg_id)
self.Node.Tracer.add_trace(hex(msg_id)+';'+'SENT' + ';' + 'ACCEPT' + ';' + str(sys.getsizeof(accept)) + ';' + str(acceptor))
self.Node.Tracer.add_app_trace('PAXOS->' + 'Sent ACCEPT to:'+ str(acceptor))
else:
if self.debug: print("Not sending ACCEPT "+ str(seq) + "/" + str(log))
def _createAccepted(self, seq, sender, log):
acceptedPack = [4, seq, self.consensus_log[log][1], log]
accepted = pickle.dumps(acceptedPack)
msg_id = self._create_id()
self.tcp_interface.send(sender, accepted, msg_id)
self.Node.Tracer.add_trace(hex(msg_id)+';'+'SENT' + ';' + 'ACCEPTED' + ';' + str(sys.getsizeof(accepted)) + ';' + str(sender))
self.Node.Tracer.add_app_trace('PAXOS->' + 'Sending ACCEPT! proposal ' + str(seq) + ' with value ' + str(self.consensus_log[log][1]) + ' packet to: '+ str(sender))
def _get_ip(self,iface = 'eth0'):
'This should be in routing layer'
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sockfd = sock.fileno()
SIOCGIFADDR = 0x8915
ifreq = pack('16sH14s', iface.encode('utf-8'), socket.AF_INET, b'\x00'*14)
try:
res = fcntl.ioctl(sockfd, SIOCGIFADDR, ifreq)
except:
traceback.print_exc()
return None
ip = unpack('16sH2x4s8x', res)[2]
return socket.inet_ntoa(ip)
def _sender_udp(self, destination, bytes_to_send, msg_id):
bytes_to_send = pickle.dumps(["genesis",hex(msg_id), bytes_to_send])
addrinfo = socket.getaddrinfo(destination, None)[1]
sender_socket = socket.socket(addrinfo[0], socket.SOCK_DGRAM)
sender_socket.sendto(bytes_to_send, (destination, self.port))
sender_socket.close()
def _sender_tcp(self, destination, bytes_to_send, msg_id):
try:
bytes_to_send = pickle.dumps(["genesis", hex(msg_id), bytes_to_send])
addrinfo = socket.getaddrinfo(destination, None)[1]
sender_socket = socket.socket(addrinfo[0], socket.SOCK_STREAM)
sender_socket.settimeout(5)
sender_socket.connect((destination, self.port))
sender_socket.sendall(bytes_to_send)
sender_socket.close()
except:
if self.debug: print("Could not send data to: " + str(destination))
def _prompt(self, command):
if (len(command))>=2:
if command[1] == 'help':
self._printhelp()
elif command[1] == 'propose':
if self.Node.role != 'LEADER':
prompt.print_error('Try this from a node set as leader')
else:
try:
proposal = command[2]
self._propose(proposal)
except:
traceback.print_exc()
prompt.print_alert('Command error')
self._printhelp()
elif command[1] == 'leader':
prompt.print_alert('Setting this node as a leader')
self.set_role('LEADER')
elif command[1] == 'server':
prompt.print_alert('Setting this node as a server')
self.set_role('SERVER')
elif command[1] == 'disable':
if (self.debug): prompt.print_alert('Disabling node')
self.disable()
elif command[1] == 'enable':
if (self.debug): prompt.print_alert('Enabling node')
self.enable()
elif command[1] == 'hist':
self._print_hist()
elif command[1] == 'info':
self.printinfo()
elif command[1] == 'debug':
self.debug = not self.debug
elif command[1] == 'quorum':
self._print_quorum()
elif command[1] == 'log':
self._print_log()
elif command[1] == 'queue':
self._print_queue()
elif command[1] == 'request':
self._print_request()
else:
print("Invalid Option")
self._printhelp()
elif (len(command))==1:
self._printhelp()
def _printhelp(self):
'Prints general information about the application'
print()
print("Options for Multi-Paxos")
print()
print("help - Print this help message")
print("quorum - Show current quorum")
print("queue - Show current job queue")
print("hist - Show current job history")
print("log - Print current consensus log")
print("leader - Set current node as a leader")
print("server - Set current node as a server")
print("disable - Set current node as disabled")
print("enable - Set current node as enabled")
print("propose [value] - Propose a new value to quorum")
print()
def printinfo(self):
'Prints general information about the application'
print()
print("Application stats (MultiPaxos)")
print("State: \t\t" + self.state)
print("Role: \t\t" + self.Node.role)
print("Paxos leader: \t" + self.leader)
print()
def _print_queue(self):
print('Local job queue:')
print()
for job in self.job_queue:
print(job)
def _print_hist(self):
print('Local history:')
print()
for job in self.job_hist:
print(job)
def _print_quorum(self):
print('Current Quorum:')
print()
for node in self.quorum:
print(node)
def _print_log(self):
print('Current seek head:' + str(self.get_seek_head()))
print('Current log:')
print()
for entry in range(0,self.max_log):
#for entry in self.consensus_log:
if self.consensus_log[entry] != None:
seq, ser = self._extract_max_round(self.consensus_log[entry][0])
print(str(entry) + "-Seq:" + str(seq) + " Ser:" + str(ser) + "-->" + str(self.consensus_log[entry]))
def _print_request(self):
print('Current seek head:' + str(self.get_seek_head()))
print('Current gap: ' + str(self.request))
print()
|
from __future__ import division, print_function, absolute_import
import numpy as np
from matplotlib import pyplot as plt
def overlay_args(**kwargs):
""" convernience function for populating overlay kwargs """
args = {}
args['vmin'] = kwargs.pop('vmin', None)
args['vmax'] = kwargs.pop('vmax', None)
args['cmap'] = kwargs.pop('cmap', plt.cm.hot)
args['maskval'] = kwargs.pop('maskval', 0)
args['add_colorbar'] = kwargs.pop('add_colorbar', False)
args['colorbar_kwargs'] = kwargs.pop('colorbar_kwargs', {'shrink', 0.9})
args['alpha'] = kwargs.pop('alpha', 1.0)
args['alpha_image'] = kwargs.pop('alpha_image', None)
args['ax'] = kwargs.pop('ax', None)
return args
def masked_overlay(overlay_image, ax=None, cmap=plt.cm.hot,
add_colorbar=False, colorbar_kwargs={'shrink': 0.9},
vmin=None, vmax=None, alpha=1.0, alpha_image=None,
maskval=0, call_imshow=True):
""" overlay another volume via alpha transparency onto the existing volume
plotted on axis ax.
Parameters
----------
overlay_image : np.ndarray
volume to use for the overlay
ax : matplotlib.axes.Axes, optional
axis to add the overlay to. plt.gca() if unspecified
cmap : matplotlib.colors.Colormap
colormap for the overlay
add_colorbar : bool, optional
determine of a colorbar should be added to the axis
colorbar_kwargs : dict, optional
additional arguments to pass on to the colorbar
vmin : float, optional
minimum value for imshow. alpha = 0 anywhere `overlay_image` < `vmin`
vmax : float, optional
maximum value for imshow
alpha_image : np.ndarray, optional
if provided, use this as the transparency channel for the overlay
alpha : float, optional
transparency of the overlay is equal to alpha, unless `alpha_image` is
provided instead
maksval : float, optional
anywhere `overlay_image` == `maskval`, alpha = 0
call_imshow : bool, optional
if False, just return the argument dictionary for imshow rather than
calling it directly
"""
if ax is None:
ax = plt.gca()
if vmin is None:
vmin = overlay_image.min()
if vmax is None:
vmax = overlay_image.max()
if alpha_image is None:
if maskval is not None:
alpha_mask = (overlay_image == maskval)
else:
if alpha_image.max() > 1 or alpha_image.min() < 0:
raise ValueError("alpha_image must lie in range [0, 1]")
alpha_mask = np.ones(alpha_image.shape, dtype=np.bool)
# alpha_mask[overlay_image < vmin] = 0
alpha_mask = alpha_mask | (overlay_image < vmin)
image = (np.clip(overlay_image, vmin, vmax) - vmin) / (vmax - vmin)
image_RGBA = cmap(image) # convert to RGBA
if alpha_mask is not None:
if alpha_image is None:
image_RGBA[..., -1][alpha_mask] = 0 # set
image_RGBA[..., -1] *= alpha
else:
image_RGBA[..., -1] = image_RGBA[..., -1] * alpha_image
if call_imshow:
im = ax.imshow(image_RGBA, cmap=cmap, vmin=vmin, vmax=vmax)
if add_colorbar:
plt.colorbar(im, ax=ax, **colorbar_kwargs)
ax.axis('off')
ax.axis('image')
else:
return (dict(X=image_RGBA, cmap=cmap, vmin=vmin, vmax=vmax),
dict(ax=ax, colorbar_kwargs=colorbar_kwargs))
return im
|
# This code is heavily based on the code from MLPerf
# https://github.com/mlperf/reference/tree/master/translation/tensorflow/transformer
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import tensorflow as tf
from six.moves import range
from open_seq2seq.parts.transformer import utils, attention_layer, \
ffn_layer, beam_search
from open_seq2seq.parts.transformer.common import PrePostProcessingWrapper, \
LayerNormalization, Transformer_BatchNorm
from .decoder import Decoder
class TransformerDecoder(Decoder):
@staticmethod
def get_required_params():
"""Static method with description of required parameters.
Returns:
dict:
Dictionary containing all the parameters that **have to** be
included into the ``params`` parameter of the
class :meth:`__init__` method.
"""
return dict(Decoder.get_required_params(), **{
'EOS_ID': int,
'layer_postprocess_dropout': float,
'num_hidden_layers': int,
'hidden_size': int,
'num_heads': int,
'attention_dropout': float,
'relu_dropout': float,
'filter_size': int,
'batch_size': int,
'tgt_vocab_size': int,
'beam_size': int,
'alpha': float,
'extra_decode_length': int,
'num_features': int,
})
@staticmethod
def get_optional_params():
"""Static method with description of optional parameters.
Returns:
dict:
Dictionary containing all the parameters that **can** be
included into the ``params`` parameter of the
class :meth:`__init__` method.
"""
return dict(Decoder.get_optional_params(), **{
'regularizer': None, # any valid TensorFlow regularizer
'regularizer_params': dict,
'initializer': None, # any valid TensorFlow initializer
'initializer_params': dict,
'GO_SYMBOL': int,
'PAD_SYMBOL': int,
'END_SYMBOL': int,
'norm_params': dict,
})
def _cast_types(self, input_dict):
return input_dict
def __init__(self, params, model,
name="transformer_decoder", mode='train'):
super(TransformerDecoder, self).__init__(params, model, name, mode)
self.embedding_softmax_layer = None
self.output_normalization = None
self._mode = mode
self.layers = []
self.batch_size = self.params.get("batch_size")
self.num_features = self.params.get("num_features")
# in original T paper embeddings are shared between encoder and decoder
# also final projection = transpose(E_weights), we currently only support
# this behaviour
self.params['shared_embed'] = True
self.norm_params = self.params.get("norm_params", {"type": "layernorm_L2" })
self.regularizer = self.params.get("regularizer", None)
if self.regularizer != None:
self.regularizer_params = params.get("regularizer_params", {'scale': 0.0})
self.regularizer=self.regularizer(self.regularizer_params['scale']) \
if self.regularizer_params['scale'] > 0.0 else None
#print("reg", self.regularizer)
def _decode(self, input_dict):
if 'target_tensors' in input_dict:
targets = input_dict['target_tensors'][0]
else:
targets = None
encoder_outputs = input_dict['encoder_output']['outputs']
inputs_attention_bias = (
input_dict['encoder_output']['inputs_attention_bias']
)
self.embedding_softmax_layer = (
input_dict['encoder_output']['embedding_softmax_layer']
)
with tf.name_scope("decode"):
training = (self.mode == "train")
# prepare decoder layers
if len(self.layers) == 0:
for _ in range(self.params["num_hidden_layers"]):
self_attention_layer = attention_layer.SelfAttention(
hidden_size=self.params["hidden_size"],
num_heads=self.params["num_heads"],
attention_dropout=self.params["attention_dropout"],
train=training,
regularizer=self.regularizer,
batch_size=self.batch_size,
num_features=self.num_features
)
enc_dec_attention_layer = attention_layer.Attention(
hidden_size=self.params["hidden_size"],
num_heads=self.params["num_heads"],
attention_dropout=self.params["attention_dropout"],
train=training,
regularizer=self.regularizer,
batch_size=self.batch_size,
num_features=self.num_features
)
feed_forward_network = ffn_layer.FeedFowardNetwork(
hidden_size=self.params["hidden_size"],
filter_size=self.params["filter_size"],
relu_dropout=self.params["relu_dropout"],
train=training,
num_features=self.num_features,
batch_size=self.batch_size,
regularizer=self.regularizer
)
self.layers.append([
PrePostProcessingWrapper(self_attention_layer, self.params,
training),
PrePostProcessingWrapper(enc_dec_attention_layer, self.params,
training),
PrePostProcessingWrapper(feed_forward_network, self.params,
training)
])
print("Decoder:", self.norm_params["type"], self.mode)
if self.norm_params["type"] == "batch_norm":
self.output_normalization = Transformer_BatchNorm(
training=training,
params=self.norm_params)
else:
self.output_normalization = LayerNormalization(
hidden_size=self.params["hidden_size"],
params=self.norm_params)
if targets is None:
return self.predict(encoder_outputs, inputs_attention_bias)
else:
logits, outputs = self.decode_pass(targets, encoder_outputs,
inputs_attention_bias)
CSI="\x1B["
print(CSI+"32;40m" + "open_seq2seq/decoders/transformer_decoder.py 160" + CSI + "0m")
print('outputs')
print(outputs)
return {"logits": logits,
#"outputs": outputs,
"outputs": [tf.argmax(logits, axis=-1)],
"final_state": None,
"final_sequence_lengths": None}
def _call(self, decoder_inputs, encoder_outputs, decoder_self_attention_bias,
attention_bias, cache=None):
for n, layer in enumerate(self.layers):
self_attention_layer = layer[0]
enc_dec_attention_layer = layer[1]
feed_forward_network = layer[2]
# Run inputs through the sublayers.
layer_name = "layer_%d" % n
layer_cache = cache[layer_name] if cache is not None else None
with tf.variable_scope(layer_name):
with tf.variable_scope("self_attention"):
# TODO: Figure out why this is needed
# decoder_self_attention_bias = tf.cast(x=decoder_self_attention_bias,
# dtype=decoder_inputs.dtype)
decoder_inputs = self_attention_layer(
decoder_inputs, decoder_self_attention_bias, cache=layer_cache,
)
with tf.variable_scope("encdec_attention"):
decoder_inputs = enc_dec_attention_layer(
decoder_inputs, encoder_outputs, attention_bias,
)
with tf.variable_scope("ffn"):
decoder_inputs = feed_forward_network(decoder_inputs)
return self.output_normalization(decoder_inputs)
def decode_pass(self, targets, encoder_outputs, inputs_attention_bias):
"""Generate logits for each value in the target sequence.
Args:
targets: target values for the output sequence.
int tensor with shape [batch_size, target_length]
encoder_outputs: continuous representation of input sequence.
float tensor with shape [batch_size, input_length, hidden_size]
inputs_attention_bias: float tensor with shape [batch_size, 1, 1, input_length]
Returns:
float32 tensor with shape [batch_size, target_length, vocab_size]
"""
# Prepare inputs to decoder layers by shifting targets, adding positional
# encoding and applying dropout.
decoder_inputs = self.embedding_softmax_layer(targets)
with tf.name_scope("shift_targets"):
# Shift targets to the right, and remove the last element
decoder_inputs = tf.pad(
decoder_inputs, [[0, 0], [1, 0], [0, 0]],
)[:, :-1, :]
with tf.name_scope("add_pos_encoding"):
length = tf.shape(decoder_inputs)[1]
# decoder_inputs += utils.get_position_encoding(
# length, self.params["hidden_size"])
decoder_inputs += tf.cast(
utils.get_position_encoding(length, self.params["hidden_size"]),
dtype=self.params['dtype'],
)
if self.mode == "train":
decoder_inputs = tf.nn.dropout(decoder_inputs,
keep_prob = 1 - self.params["layer_postprocess_dropout"] )
# Run values
decoder_self_attention_bias = utils.get_decoder_self_attention_bias(length,
dtype = tf.float32
# dtype=self._params["dtype"]
)
# do decode
outputs = self._call(
decoder_inputs=decoder_inputs,
encoder_outputs=encoder_outputs,
decoder_self_attention_bias=decoder_self_attention_bias,
attention_bias=inputs_attention_bias,
)
logits = self.embedding_softmax_layer.linear(outputs)
return logits, outputs
def _get_symbols_to_logits_fn(self, max_decode_length):
"""Returns a decoding function that calculates logits of the next tokens."""
timing_signal = utils.get_position_encoding(
max_decode_length + 1, self.params["hidden_size"],
)
decoder_self_attention_bias = utils.get_decoder_self_attention_bias(
max_decode_length, dtype = tf.float32
# dtype=self._params["dtype"]
)
def symbols_to_logits_fn(ids, i, cache):
"""Generate logits for next potential IDs.
Args:
ids: Current decoded sequences.
int tensor with shape [batch_size * beam_size, i + 1]
i: Loop index
cache: dictionary of values storing the encoder output, encoder-decoder
attention bias, and previous decoder attention values.
Returns:
Tuple of
(logits with shape [batch_size * beam_size, vocab_size],
updated cache values)
"""
# Set decoder input to the last generated IDs
decoder_input = ids[:, -1:]
# Preprocess decoder input by getting embeddings and adding timing signal.
decoder_input = self.embedding_softmax_layer(decoder_input)
decoder_input += tf.cast(x=timing_signal[i:i + 1],
dtype=decoder_input.dtype)
self_attention_bias = decoder_self_attention_bias[:, :, i:i + 1, :i + 1]
decoder_outputs = self._call(
decoder_input, cache.get("encoder_outputs"), self_attention_bias,
cache.get("encoder_decoder_attention_bias"), cache,
)
logits = self.embedding_softmax_layer.linear(decoder_outputs)
logits = tf.squeeze(logits, axis=[1])
return tf.cast(logits, tf.float32), cache
return symbols_to_logits_fn
def predict(self, encoder_outputs, encoder_decoder_attention_bias):
"""Return predicted sequence."""
#batch_size = tf.shape(encoder_outputs)[0]
batch_size = self.batch_size
input_length = tf.shape(encoder_outputs)[1]
max_decode_length = input_length + self.params["extra_decode_length"]
symbols_to_logits_fn = self._get_symbols_to_logits_fn(max_decode_length)
# Create initial set of IDs that will be passed into symbols_to_logits_fn.
initial_ids = tf.zeros([batch_size], dtype=tf.int32)
# Create cache storing decoder attention values for each layer.
cache = {
"layer_%d" % layer: {
"k": tf.zeros([batch_size, 0,
self.params["hidden_size"]],
dtype=encoder_outputs.dtype),
"v": tf.zeros([batch_size, 0,
self.params["hidden_size"]],
dtype=encoder_outputs.dtype),
} for layer in range(self.params["num_hidden_layers"])
}
# Add encoder output and attention bias to the cache.
cache["encoder_outputs"] = encoder_outputs
cache["encoder_decoder_attention_bias"] = encoder_decoder_attention_bias
# Use beam search to find the top beam_size sequences and scores.
decoded_ids, scores = beam_search.sequence_beam_search(
symbols_to_logits_fn=symbols_to_logits_fn,
initial_ids=initial_ids,
initial_cache=cache,
vocab_size=self.params["tgt_vocab_size"],
beam_size=self.params["beam_size"],
alpha=self.params["alpha"],
max_decode_length=max_decode_length,
eos_id=self.params["EOS_ID"],
)
# Get the top sequence for each batch element
top_decoded_ids = decoded_ids[:, 0, 1:]
# this isn't particularly efficient
logits = self.decode_pass(top_decoded_ids, encoder_outputs,
encoder_decoder_attention_bias)
return {"logits": logits,
"outputs": [top_decoded_ids],
"final_state": None,
"final_sequence_lengths": None}
|
from typing import Type
from os.path import join
from importlib import import_module
from restiro import Parser, DocumentationRoot
from restiro.helpers import generate_pot
from restiro.generators import BaseGenerator
class Documentor:
def __init__(self, title: str, source_dir: str, base_uri: str=None,
generator_type: str='markdown'):
self.title = title
self.source_dir = source_dir
self.base_uri = base_uri
self.generator_type = generator_type
def initiate_docs_root(self, locale=None):
parsed_resources = Parser.load_from_path(self.source_dir)
docs_root = DocumentationRoot(
title=self.title,
base_uri=self.base_uri,
locale=locale
)
docs_root.resources.update(parsed_resources)
docs_root.load_resource_examples()
return docs_root
@property
def generator(self) -> Type[BaseGenerator]:
try:
module_name = (
'restiro.generators.%s' % self.generator_type
if self.generator_type in ('json', 'markdown') else
'restiro_%s' % self.generator_type
)
mod = import_module(module_name)
for cname in dir(mod):
if not cname.endswith('Generator') or cname == 'BaseGenerator':
continue
return getattr(mod, cname)
except ImportError:
raise ValueError('Generator not detected %s' % self.generator_type)
def generate(self, output_dir: str, locales_dir=None, locale=None):
docs_root = self.initiate_docs_root(locale)
if locale:
docs_root.translate_all(locales_dir, locale)
self.generator(
docs_root=docs_root,
destination_dir=output_dir
).generate()
def generate_gettext(self, gettext_dir):
docs_root = self.initiate_docs_root()
pot_file = join(gettext_dir, 'restiro.pot')
with open(pot_file, 'w+') as f:
f.write(generate_pot(docs_root.extract_translations()))
|
# Simple list of site URLs
# Import external modules
import logging
import os
import webapp2
# Import local modules
import configuration
import httpServer
import user
class SiteList( webapp2.RequestHandler ):
def get(self):
templateValues = { }
httpServer.outputTemplate( 'siteList.html', templateValues, self.response )
# Route URLs to page generators
app = webapp2.WSGIApplication([
('/', SiteList),
])
|
# Copyright (c) OpenMMLab. All rights reserved.
import os.path as osp
from mmcv.utils import TORCH_VERSION, digit_version
from ...dist_utils import master_only
from ..hook import HOOKS
from .base import LoggerHook
import matplotlib.pyplot as plt
from matplotlib import gridspec
import numpy as np
def log_images_to_tensorboard(imgs_list, writer, iter_num):
print('Logging images to tensorboard.')
src_input_img = imgs_list[0]
src_feature = imgs_list[1]
src_pred = imgs_list[2]
src_label = imgs_list[3]
if len(imgs_list) == 8:
trg_input_img = imgs_list[4]
trg_feature = imgs_list[5]
trg_pred = imgs_list[6]
trg_label = imgs_list[7]
else:
trg_input_img = None
trg_feature = None
trg_pred = None
trg_label = None
fig = plt.figure(figsize=(16, 10))
nrow = 2
ncol = 4
gs = gridspec.GridSpec(nrow, ncol,
wspace=0.0, hspace=0.5,
top=1.-0.5/(nrow+1), bottom=0.5/(nrow+1),
left=0.5/(ncol+1), right=1-0.5/(ncol+1))
ax1 = fig.add_subplot(gs[0,0])
if src_input_img is not None:
im1 = ax1.imshow(src_input_img)
ax1.set_yticklabels([])
ax1.set_xticklabels([])
ax1.axis('off')
ax2 = fig.add_subplot(gs[0,1])
if src_feature is not None and isinstance(src_feature, dict):
data = src_feature['src_tsne_result']
gt_label = src_feature['src_tsne_label']
colors_per_class = src_feature['palette']
class_names = src_feature['class_names']
x_min, x_max = np.min(data, 0), np.max(data, 0)
data = (data - x_min) / (x_max - x_min)
tx = data[:, 0]
ty = data[:, 1]
# for every class, we'll add a scatter plot separately
for idx in range(len(colors_per_class)):
# find the samples of the current class in the data
indices = [i for i, l in enumerate(gt_label) if l == idx]
# extract the coordinates of the points of this class only
current_tx = np.take(tx, indices)
current_ty = np.take(ty, indices)
# convert the class color to matplotlib format
color = np.array(colors_per_class[idx], dtype=float) / 255.
color = np.array(color).reshape(1, -1)
# add a scatter plot with the corresponding color and label
ax2.scatter(current_tx, current_ty, c=color, label=class_names[idx])
# ax2.scatter(current_tx, current_ty, c=color)
# ax2.legend(loc='best')
ax2.set_yticklabels([])
ax2.set_xticklabels([])
ax2.axis('off')
ax3 = fig.add_subplot(gs[0,2])
if src_pred is not None:
# ax3.title.set_text('Prediction image: ' + str(src_pred.shape))
im3 = ax3.imshow(src_pred)
ax3.set_yticklabels([])
ax3.set_xticklabels([])
ax3.axis('off')
ax4 = fig.add_subplot(gs[0,3])
if src_label is not None:
# ax4.title.set_text('Label image: ' + str(src_label.shape))
im4 = ax4.imshow(src_label)
ax4.set_yticklabels([])
ax4.set_xticklabels([])
ax4.axis('off')
ax5 = fig.add_subplot(gs[1,0])
if trg_input_img is not None:
im5 = ax5.imshow(trg_input_img)
ax5.set_yticklabels([])
ax5.set_xticklabels([])
ax5.axis('off')
ax6 = fig.add_subplot(gs[1,1])
if trg_feature is not None and isinstance(trg_feature, dict):
data = trg_feature['trg_tsne_result']
gt_label = trg_feature['trg_tsne_label']
colors_per_class = trg_feature['palette']
class_names = trg_feature['class_names']
x_min, x_max = np.min(data, 0), np.max(data, 0)
data = (data - x_min) / (x_max - x_min)
tx = data[:, 0]
ty = data[:, 1]
# for every class, we'll add a scatter plot separately
for idx in range(len(colors_per_class)):
# find the samples of the current class in the data
indices = [i for i, l in enumerate(gt_label) if l == idx]
# extract the coordinates of the points of this class only
current_tx = np.take(tx, indices)
current_ty = np.take(ty, indices)
# convert the class color to matplotlib format
color = np.array(colors_per_class[idx], dtype=float) / 255.
color = np.array(color).reshape(1, -1)
# add a scatter plot with the corresponding color and label
ax6.scatter(current_tx, current_ty, c=color, label=class_names[idx])
# ax6.scatter(current_tx, current_ty, c=color)
# # build a legend using the labels we set previously
# ax6.legend(loc='best')
ax6.set_yticklabels([])
ax6.set_xticklabels([])
ax6.axis('off')
ax7 = fig.add_subplot(gs[1,2])
if trg_pred is not None:
im7 = ax7.imshow(trg_pred)
ax7.set_yticklabels([])
ax7.set_xticklabels([])
ax7.axis('off')
ax8 = fig.add_subplot(gs[1,3])
if trg_label is not None:
im8 = ax8.imshow(trg_label)
ax8.set_yticklabels([])
ax8.set_xticklabels([])
ax8.axis('off')
is_show_imgs = False
if is_show_imgs:
plt.show()
writer.add_figure('figs'+str(iter_num), fig)
@HOOKS.register_module()
class TensorboardLoggerHook(LoggerHook):
"""Class to log metrics to Tensorboard.
Args:
log_dir (string): Save directory location. Default: None. If default
values are used, directory location is ``runner.work_dir``/tf_logs.
interval (int): Logging interval (every k iterations). Default: True.
ignore_last (bool): Ignore the log of last iterations in each epoch
if less than `interval`. Default: True.
reset_flag (bool): Whether to clear the output buffer after logging.
Default: False.
by_epoch (bool): Whether EpochBasedRunner is used. Default: True.
"""
def __init__(self,
log_dir=None,
interval=10,
ignore_last=True,
reset_flag=False,
by_epoch=True):
super(TensorboardLoggerHook, self).__init__(interval, ignore_last,
reset_flag, by_epoch)
self.log_dir = log_dir
@master_only
def before_run(self, runner):
super(TensorboardLoggerHook, self).before_run(runner)
if (TORCH_VERSION == 'parrots'
or digit_version(TORCH_VERSION) < digit_version('1.1')):
try:
from tensorboardX import SummaryWriter
except ImportError:
raise ImportError('Please install tensorboardX to use '
'TensorboardLoggerHook.')
else:
try:
from torch.utils.tensorboard import SummaryWriter
except ImportError:
raise ImportError(
'Please run "pip install future tensorboard" to install '
'the dependencies to use torch.utils.tensorboard '
'(applicable to PyTorch 1.1 or higher)')
if self.log_dir is None:
self.log_dir = osp.join(runner.work_dir, 'tf_logs')
self.writer = SummaryWriter(self.log_dir)
@master_only
def log(self, runner):
tags = self.get_loggable_tags(runner, allow_text=True)
iter_num = self.get_iter(runner)
print('In tensorboard, number of iteration: ', iter_num)
for tag, val in tags.items():
if isinstance(val, str):
self.writer.add_text(tag, val, iter_num)
else:
self.writer.add_scalar(tag, val, iter_num)
is_log_imgs = True
if is_log_imgs:
# img_log_iter_interval = 1
# if iter_num % img_log_iter_interval == 0:
# Images in imgs are ready to display
imgs = self.get_loggable_imgs(runner)
# # Log images to tensorboard
log_images_to_tensorboard(imgs, self.writer, iter_num)
@master_only
def after_run(self, runner):
self.writer.close()
|
# encoding: utf-8
"""
Shader and ShaderProgram wrapper classes for vertex and fragment shaders used
in Interactive Data Visualization
"""
import contextlib
import ctypes
import os
from collections import OrderedDict
import traitlets
import yaml
from OpenGL import GL
from yt.units.yt_array import YTQuantity
from yt.utilities.exceptions import (
YTInvalidShaderType,
YTUnknownUniformKind,
YTUnknownUniformSize,
)
from .opengl_support import GLValue, num_to_const
_NULL_SOURCES = {
"geometry": r"""
#version 330 core
layout ( points ) in;
layout ( points ) out;
void main() {
gl_Position = gl_in[0].gl_Position;
EmitVertex();
}
""",
"vertex": r"""
#version 330 core
// Input vertex data, different for all executions of this shader.
in vec3 vertexPosition_modelspace;
// Output data ; will be interpolated for each fragment.
out vec2 UV;
void main()
{
gl_Position = vec4(vertexPosition_modelspace, 1.0);
UV = (vertexPosition_modelspace.xy+vec2(1.0,1.0))/2.0;
}
""",
"fragment": r"""
#version 330 core
out vec4 color;
void main() {
color = vec4(gl_FragCoord.x, gl_FragCoord.y, gl_FragCoord.z, 1.0);
return;
}
""",
}
class ShaderProgram:
"""
Wrapper class that compiles and links vertex and fragment shaders
into a shader program.
Parameters
----------
vertex_shader : string
or :class:`yt.visualization.volume_rendering.shader_objects.VertexShader`
The vertex shader used in the Interactive Data Visualization pipeline.
fragment_shader : string
or :class:`yt.visualization.volume_rendering.shader_objects.FragmentShader`
The fragment shader used in the Interactive Data Visualization pipeline.
geometry_shader : string
or :class:`yt_idv.shader_objects.GeometryShader`
The geometry shader used in the pipeline; optional.
"""
def __init__(self, vertex_shader=None, fragment_shader=None, geometry_shader=None):
# Don't allow just one. Either neither or both.
if vertex_shader is None and fragment_shader is None:
pass
elif None not in (vertex_shader, fragment_shader):
# Geometry is optional
self.link(vertex_shader, fragment_shader, geometry_shader)
else:
raise RuntimeError
self._uniform_funcs = OrderedDict()
def link(self, vertex_shader, fragment_shader, geometry_shader=None):
# We allow an optional geometry shader, but not tesselation (yet?)
self.program = GL.glCreateProgram()
if not isinstance(vertex_shader, Shader):
vertex_shader = Shader(source=vertex_shader)
if not isinstance(fragment_shader, Shader):
fragment_shader = Shader(source=fragment_shader)
if geometry_shader is not None and not isinstance(geometry_shader, Shader):
geometry_shader = Shader(source=geometry_shader)
self.vertex_shader = vertex_shader
self.fragment_shader = fragment_shader
self.geometry_shader = geometry_shader
GL.glAttachShader(self.program, vertex_shader.shader)
GL.glAttachShader(self.program, fragment_shader.shader)
if geometry_shader is not None:
GL.glAttachShader(self.program, geometry_shader.shader)
GL.glLinkProgram(self.program)
result = GL.glGetProgramiv(self.program, GL.GL_LINK_STATUS)
if not result:
raise RuntimeError(GL.glGetProgramInfoLog(self.program))
vertex_shader.delete_shader()
fragment_shader.delete_shader()
if geometry_shader is not None:
geometry_shader.delete_shader()
self.introspect()
def introspect(self):
if self.program is None:
raise RuntimeError
# First get all of the uniforms
self.uniforms = {}
self.attributes = {}
if not bool(GL.glGetProgramInterfaceiv):
return
n_uniforms = GL.glGetProgramInterfaceiv(
self.program, GL.GL_UNIFORM, GL.GL_ACTIVE_RESOURCES
)
for i in range(n_uniforms):
name, size, gl_type = GL.glGetActiveUniform(self.program, i)
gl_type = num_to_const[gl_type]
self.uniforms[name.decode("utf-8")] = (size, gl_type)
n_attrib = GL.glGetProgramInterfaceiv(
self.program, GL.GL_PROGRAM_INPUT, GL.GL_ACTIVE_RESOURCES
)
length = ctypes.pointer(ctypes.c_int())
size = ctypes.pointer(ctypes.c_int())
gl_type = ctypes.pointer(ctypes.c_int())
name = ctypes.create_string_buffer(256)
for i in range(n_attrib):
GL.glGetActiveAttrib(self.program, i, 256, length, size, gl_type, name)
gl_const = num_to_const[gl_type[0]]
self.attributes[name[: length[0]].decode("utf-8")] = (size[0], gl_const)
def delete_program(self):
if self.program is not None:
GL.glDeleteProgram(self.program)
self.program = None
def _guess_uniform_func(self, value):
# We make a best-effort guess.
# This does NOT work with arrays of uniforms.
# First look at the dtype kind. Fortunately, this falls into either
# 'f' or 'i', which matches nicely with OpenGL.
# Note that in some implementations, it seems there is also a 'd' type,
# but we will not be using that here.
if isinstance(value, int):
return GL.glUniform1i
elif isinstance(value, (YTQuantity, float)):
return GL.glUniform1f
else:
kind = value.dtype.kind
if kind not in "if":
raise YTUnknownUniformKind(kind)
if value.ndim == 0:
return {"f": GL.glUniform1f, "i": GL.glUniform1i}[kind]
elif value.ndim == 1:
if value.size > 4:
raise YTUnknownUniformSize(value.size)
func = self._set_scalar_uniform(kind, value.size)
elif value.ndim == 2:
if value.shape[0] != value.shape[1]:
raise YTUnknownUniformSize(value.shape)
func = self._set_matrix_uniform(kind, value.shape)
else:
raise YTUnknownUniformSize(value.shape)
return func
def _set_scalar_uniform(self, kind, size_spec):
gl_func = getattr(GL, f"glUniform{size_spec}{kind}v")
def _func(location, value):
return gl_func(location, 1, value)
return _func
def _set_matrix_uniform(self, kind, size_spec):
assert size_spec[0] == size_spec[1]
gl_func = getattr(GL, f"glUniformMatrix{size_spec[0]}{kind}v")
def _func(location, value):
return gl_func(location, 1, GL.GL_TRUE, value)
return _func
def _set_uniform(self, name, value):
# We need to figure out how to pass it in.
if name not in self._uniform_funcs:
self._uniform_funcs[name] = self._guess_uniform_func(value)
loc = GL.glGetUniformLocation(self.program, name)
if loc < 0:
return -1
return self._uniform_funcs[name](loc, value)
@contextlib.contextmanager
def enable(self):
GL.glUseProgram(self.program)
self.vertex_shader.setup_blend()
self.fragment_shader.setup_blend()
yield self
GL.glUseProgram(0)
class Shader(traitlets.HasTraits):
"""
Creates a shader from source
Parameters
----------
source : str
This can either be a string containing a full source of a shader,
an absolute path to a source file or a filename of a shader
residing in the ./shaders/ directory.
"""
_shader = None
source = traitlets.Any()
shader_name = traitlets.CUnicode()
info = traitlets.CUnicode()
shader_type = traitlets.CaselessStrEnum(("vertex", "fragment", "geometry"))
blend_func = traitlets.Tuple(
GLValue(), GLValue(), default_value=("src alpha", "dst alpha")
)
blend_equation = GLValue("func add")
depth_test = GLValue("always")
use_separate_blend = traitlets.Bool(False)
blend_equation_separate = traitlets.Tuple(
GLValue(), GLValue(), default_value=("none", "none")
)
blend_func_separate = traitlets.Tuple(
GLValue(),
GLValue(),
GLValue(),
GLValue(),
default_value=("none", "none", "none", "none"),
)
def _get_source(self, source):
if ";" in source:
# This is probably safe, right? Enh, probably.
return source
# What this does is concatenate multiple (if available) source files.
# This gets around GLSL's composition issues, which means we can have
# functions that get called at each step in a ray tracing process, for
# instance, that can still share ray tracing code between multiple
# files.
if not isinstance(source, (tuple, list)):
source = (source,)
source = ("header.inc.glsl", "known_uniforms.inc.glsl",) + tuple(source)
full_source = []
for fn in source:
if os.path.isfile(fn):
sh_directory = ""
else:
sh_directory = os.path.join(os.path.dirname(__file__), "shaders")
fn = os.path.join(sh_directory, fn)
if not os.path.isfile(fn):
raise YTInvalidShaderType(fn)
full_source.append(open(fn, "r").read())
return "\n\n".join(full_source)
def _enable_null_shader(self):
source = _NULL_SOURCES[self.shader_type]
self.compile(source=source)
def compile(self, source=None, parameters=None):
if source is None:
source = self.source
if source is None:
raise RuntimeError
if parameters is not None:
raise NotImplementedError
source = self._get_source(source)
shader_type_enum = getattr(GL, f"GL_{self.shader_type.upper()}_SHADER")
shader = GL.glCreateShader(shader_type_enum)
# We could do templating here if we wanted.
self.shader_source = source
GL.glShaderSource(shader, source)
GL.glCompileShader(shader)
result = GL.glGetShaderiv(shader, GL.GL_COMPILE_STATUS)
if not (result):
raise RuntimeError(GL.glGetShaderInfoLog(shader))
self._shader = shader
def setup_blend(self):
GL.glEnable(GL.GL_BLEND)
if self.use_separate_blend:
GL.glBlendEquationSeparate(*self.blend_equation_separate)
GL.glBlendFuncSeparate(*self.blend_func_separate)
else:
GL.glBlendEquation(self.blend_equation)
GL.glBlendFunc(*self.blend_func)
GL.glEnable(GL.GL_DEPTH_TEST)
GL.glDepthFunc(self.depth_test)
@property
def shader(self):
if self._shader is None:
try:
self.compile()
except RuntimeError as exc:
print(exc)
for line_num, line in enumerate(self.shader_source.split("\n")):
print(f"{line_num + 1:05}: {line}")
self._enable_null_shader()
return self._shader
def delete_shader(self):
if None not in (self._shader, GL.glDeleteShader):
GL.glDeleteShader(self._shader)
self._shader = None
def __del__(self):
# This is not guaranteed to be called
self.delete_shader()
class ShaderTrait(traitlets.TraitType):
default_value = None
info_text = "A shader (vertex, fragment or geometry)"
def validate(self, obj, value):
if isinstance(value, str):
try:
shader_type = self.metadata.get("shader_type", "vertex")
shader_info = known_shaders[shader_type][value]
shader_info.setdefault("shader_type", shader_type)
shader_info["use_separate_blend"] = bool(
"blend_func_separate" in shader_info
)
shader_info.setdefault("shader_name", value)
shader = Shader(**shader_info)
return shader
except KeyError:
self.error(obj, value)
elif isinstance(value, Shader):
return value
self.error(obj, value)
known_shaders = {}
component_shaders = {}
default_shader_combos = {}
# We'll load our shaders here from shaderlist.yaml
_shlist_fn = os.path.join(os.path.dirname(__file__), "shaders", "shaderlist.yaml")
if os.path.exists(_shlist_fn):
with open(_shlist_fn, "r") as f:
shader_info = yaml.load(f, yaml.SafeLoader)
known_shaders.update(shader_info["shader_definitions"])
component_shaders.update(shader_info["component_shaders"])
default_shader_combos.update(
{_: component_shaders[_].pop("default_value") for _ in component_shaders}
)
|
from cemc.mcmc import Montecarlo, TooFewElementsError
from itertools import combinations_with_replacement, combinations
import numpy as np
import time
class ChemicalPotentialROI(object):
"""
Class that identifies interesting chemical potentials to study.
The algoritthm performs the following steps
"""
def __init__(self, atoms, conc_step=0.1, temperature=100, symbols=[]):
self.atoms = atoms
self.conc_step = conc_step
self.temperature = temperature
self.symbols =symbols
self.status_every_sec = 30
def _log(self, msg):
"""
Logging
"""
print(msg)
def _estimate_internal_energy(self, conc, sweeps=2):
"""
Estimates the internal energy of one structure
"""
try:
self.atoms._calc.set_composition(conc)
mc = Montecarlo(self.atoms, self.temperature)
mc.runMC(mode="fixed", steps=sweeps*len(self.atoms), equil=False)
energy = mc.get_thermodynamic()["energy"]
except TooFewElementsError as exc:
energy = 1.0
return energy
def _find_energies(self, sweeps=2):
"""
Estimate the energy at all compositions
"""
singlets = self.atoms._calc.get_singlets()
if len(singlets) != len(self.symbols)-1:
msg = "The number of symbols does not match the number of basis singlets\n"
msg += "Number of symbols: {}\n".format(len(self.symbols))
msg += "Number of singlet terms: {}\n".format(len(singlets))
msg += "It should be one more symbol compared to the number of singlets"
raise ValueError(msg)
n_concs = len(singlets)
template_conc = np.linspace(0.0, 1.0, int(1.0/self.conc_step))
result = []
now = time.time()
counter = 0
n_comb = len(template_conc)**len(singlets)
for comp in combinations_with_replacement(template_conc, n_concs):
counter += 1
if np.sum(comp) > 1.0:
continue
if time.time()-now > self.status_every_sec:
self._log("Running composition {} of {}".format(counter,n_comb))
now = time.time()
conc_dict = {symb:value for symb,value in zip(self.symbols[1:],comp)}
conc_dict[self.symbols[0]] = 1.0-np.sum(comp)
energy = self._estimate_internal_energy(conc_dict, sweeps=sweeps)/len(self.atoms)
res = {}
res["conc"] = conc_dict
res["energy"] = energy
cf = self.atoms._calc.get_cf()
singl = {key:value for key,value in cf.items() if key.startswith("c1")}
res["singlets"] = singl
result.append(res)
return result
def chemical_potential_roi(self, sweeps=2):
"""
Finds the chemical potential that makes all structures have the same
energy as the structure with lowest enthalpy of formation
"""
ref_energies = {}
singlets = {}
for ref_symb in self.symbols:
comp = {key:0.0 for key in self.symbols}
comp[ref_symb] = 1.0
self.atoms._calc.set_composition(comp)
ref_energies[ref_symb] = self.atoms._calc.calculate(self.atoms, ["energy"], [])/len(self.atoms)
singl = self.atoms._calc.get_cf()
singl = {key:value for key,value in singl.items() if key.startswith("c1")}
singlets[ref_symb] = singl
energies = self._find_energies(sweeps=sweeps)
e_form = np.zeros(len(energies))
for i,entry in enumerate(energies):
e_form[i] = entry["energy"]
for symb in self.symbols:
e_form[i] -= entry["conc"][symb]*ref_energies[symb]
min_e_form = np.argmin(e_form)
lowest_energy_form = energies[min_e_form]
chemical_potentials = []
N = len(self.symbols)-1
A = np.zeros((N,N))
rhs = np.zeros(N)
key_indx = {key:i for i,key in enumerate(singlets[self.symbols[0]].keys())}
mu_roi = []
for comb in combinations(self.symbols,len(self.symbols)-1):
row = 0
for symb in comb:
for key2,indx2 in key_indx.items():
A[row,indx2] = singlets[symb][key2]-lowest_energy_form["singlets"][key2]
rhs[row] = ref_energies[symb]-lowest_energy_form["energy"]
row += 1
used_pseudo_inverse = False
try:
mu = np.linalg.solve(A,rhs)
except np.linalg.LinAlgError:
inv = np.linalg.pinv(A)
mu = inv.dot(rhs)
used_pseudo_inverse = True
mu_dict = {}
for key,indx in key_indx.items():
mu_dict[key] = mu[indx]
mu_dict["symbs"] = comb
mu_dict["pseudo_inv"] = used_pseudo_inverse
mu_roi.append(mu_dict)
return mu_roi
@staticmethod
def list_match(list1, list2):
"""
Checks if the elements in two lists match
:param list1: First entry
:param list2: Second entry
"""
if len(list1) != len(list2):
return False
for entry in list1:
if entry not in list2:
return False
return True
@staticmethod
def suggest_mu(mu_roi=None, N=10, extend_fraction=0.1, elements=None):
"""
This function suggests mu that can be used for exploring the
parameter space based on the region of interest found by
the function chemical_potential_roi
"""
suggested_sampling_lines = []
names = [key for key in mu_roi[0].keys() if key.startswith("c1")]
if elements is None:
phase_combinations = combinations(mu_roi,2)
else:
if len(elements) != 2:
raise ValueError("You have to specify start and end points!")
phase_combinations = [[]]
for entry in mu_roi:
if ChemicalPotentialROI.list_match(entry["symbs"], elements[0]) or \
ChemicalPotentialROI.list_match(entry["symbs"], elements[1]):
phase_combinations[0].append(entry)
for phase_comb in phase_combinations:
mu_array1 = np.array([phase_comb[0][key] for key in names])
mu_array2 = np.array([phase_comb[1][key] for key in names])
suggested_sampling_lines.append(ChemicalPotentialROI.linear_sampling(mu_array1, mu_array2, N, extend_fraction=extend_fraction))
return suggested_sampling_lines, names
@staticmethod
def linear_sampling(start, stop, N, extend_fraction=0.1):
"""
Construct one array with linear sampling from start to stop
"""
diff = stop-start
unit_vector = diff/np.sqrt(np.sum(diff**2))
mu = np.zeros((N,len(diff)))
start = start-extend_fraction*diff
stop = stop+extend_fraction*diff
# Update the difference
diff = stop-start
step = diff/(N-1)
for i in range(N):
mu[i,:] = start + step*i
return mu
|
import re
from . import Encoder
class DelimitedNumberEncoder(Encoder):
"""
This encoder can en-/decode numbers with delimiters.
While the prefix and suffix will be restored upon decoding, the delimiters will be removed.
Prefix and suffix can contain arbitrary characters. Delimiters are non-word characters only.
If the number contained exactly one `.` or `,`, it is assumed to be a float.
Otherwise it is assumed to be an int.
>>> enc = DelimitedNumberEncoder()
>>> enc.decode(*enc.encode('12,18 €'))
'12,18 €'
>>> v, ctx = enc.encode('12,18 €')
>>> enc.decode(v + 2.1, ctx)
'14,28 €'
"""
delimited_number_regex = re.compile(r"^(\D*?)(-?((\d+)(\W*?))+)(\D*)$")
def encode(self, value):
match = DelimitedNumberEncoder.delimited_number_regex.match(value)
if not match:
raise ValueError("Invalid input value: does not match pattern for delimited numbers")
prefix = match.group(1)
suffix = match.group(6)
delimited_number = match.group(2)
# We define a number as float if `.` or `,` occurs exactly once.
float_delimiter = "." if delimited_number.count(".") == 1 else "," if delimited_number.count(",") == 1 else None
# Replace float delimiter `,` by `.` if necessary.
if float_delimiter == ",":
delimited_number = delimited_number.replace(".", "").replace(",", ".")
# Remove all delimiters, parse as float.
sign = "-" if delimited_number.startswith("-") else ""
number_s = sign + "".join(re.findall(r"[\d.]+", delimited_number))
precision = len(number_s) - number_s.index(".") - 1 if float_delimiter is not None else 0
number = float(number_s)
ctx = dict(prefix=prefix, suffix=suffix, float_delimiter=float_delimiter, precision=precision)
return number, ctx
def decode(self, value, ctx):
try:
prefix, suffix, float_delimiter, precision = ctx["prefix"], ctx["suffix"], ctx["float_delimiter"], ctx["precision"]
except (TypeError, KeyError):
raise ValueError("Invalid context")
# Post-procession to int if necessary.
number = f"{value:.{precision}f}"
if float_delimiter == ",":
number = number.replace(".", ",")
return prefix + number + suffix
|
import feedparser
import requests
from bs4 import BeautifulSoup
import re
def get_dl(url):
r = requests.get(url)
with requests.Session() as req:
download = req.get(url)
if download.status_code == 200:
return download
return 0
def get_new_dls_(path_pods,sub_folder,new_titles):
success = []
failure = []
#iterate through each episode
for ea in new_titles:
title = ea[0]
episode = ea[1]
links = ea[2]
dl = ''
#as long as there is more than 1 link
if type(links) == type(list()):
#out of all the candidate links that are associated with the download
candidates_dl = []
for link in links:
## print('link: {}'.format(link))
candidates_dl.append(get_dl(link))
## print('candidates_dl: {}'.format(candidates_dl))
#get the lengths for each (as long as '0' was not returned for the download
lengths_dl = []
for each_dl in candidates_dl:
if each_dl != 0:
lengths_dl.append(len(each_dl.content))
## print('lengths_dl: {}'.format(lengths_dl))
#find the largest file
length = max(lengths_dl)
## print('length: {}'.format(length))
#and its corresponding index
dl_index = lengths_dl.index(length)
## print('dl_index: {}'.format(dl_index))
#the file to download corresponds to this index
dl = candidates_dl[dl_index]
else:
dl = get_dl(links)
#save the file
save_file = False
file_name = path_pods + sub_folder + '//' + title + '.mp3'
with open(file_name,'wb') as f:
f.write(dl.content)
save_file = True
with open(path_pods + sub_folder + '//pod_log.txt','a') as f:
f.write(title + '\n')
#if the file failed to be downloaded
if save_file:
success.append(ea)
print('Successful download: {}'.format(len(success)))
else:
failure.append(ea)
print('Failure download: {}'.format(len(failure)))
return success,failure
if __name__ == '__main__':
pass
|
#-------------------------------------------------------------------------------
# Name: 妯″潡1
# Purpose:
#
# Author: zhx
#
# Created: 17/05/2016
# Copyright: (c) zhx 2016
# Licence: <your licence>
#-------------------------------------------------------------------------------
import openpyxl
def main():
cctv_data = openpyxl.load_workbook('train.xlsx')
cctv_new = openpyxl.Workbook()
new_sheet = cctv_new.active
sheet = cctv_data["Sheet1"]
cnt = 2
for r in xrange(2,4748):
print r
picture = sheet.cell(row=r,column=4).value
min_min = sheet.cell(row=r,column=9).value
min_max = sheet.cell(row=r,column=10).value
min_mean = sheet.cell(row=r,column=11).value
if min_min == None:
continue
elif min_min==99999:
continue
else:
new_sheet.cell(row=cnt,column=1).value = sheet.cell(row=r,column=1).value
new_sheet.cell(row=cnt,column=2).value = sheet.cell(row=r,column=2).value
new_sheet.cell(row=cnt,column=3).value = sheet.cell(row=r,column=3).value
new_sheet.cell(row=cnt,column=4).value = int(sheet.cell(row=r,column=4).value)
new_sheet.cell(row=cnt,column=5).value = sheet.cell(row=r,column=5).value
new_sheet.cell(row=cnt,column=6).value = sheet.cell(row=r,column=6).value
new_sheet.cell(row=cnt,column=7).value = sheet.cell(row=r,column=7).value
new_sheet.cell(row=cnt,column=8).value = sheet.cell(row=r,column=8).value
new_sheet.cell(row=cnt,column=9).value = sheet.cell(row=r,column=9).value
new_sheet.cell(row=cnt,column=10).value = sheet.cell(row=r,column=10).value
new_sheet.cell(row=cnt,column=11).value = sheet.cell(row=r,column=11).value
new_sheet.cell(row=cnt,column=12).value = sheet.cell(row=r,column=12).value
new_sheet.cell(row=cnt,column=13).value = sheet.cell(row=r,column=13).value
new_sheet.cell(row=cnt,column=14).value = sheet.cell(row=r,column=14).value
new_sheet.cell(row=cnt,column=15).value = sheet.cell(row=r,column=15).value
new_sheet.cell(row=cnt,column=16).value = sheet.cell(row=r,column=16).value
new_sheet.cell(row=cnt,column=17).value = sheet.cell(row=r,column=17).value
new_sheet.cell(row=cnt,column=18).value = sheet.cell(row=r,column=18).value
cnt+=1
cctv_new.save("trainnew.xlsx")
main()
|
#
# PySNMP MIB module UPPHONEDOTCOM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/UPPHONEDOTCOM-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:21:31 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Bits, TimeTicks, Counter32, MibIdentifier, iso, IpAddress, NotificationType, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, enterprises, Counter64, ModuleIdentity, ObjectIdentity, NotificationType, Unsigned32, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "TimeTicks", "Counter32", "MibIdentifier", "iso", "IpAddress", "NotificationType", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "enterprises", "Counter64", "ModuleIdentity", "ObjectIdentity", "NotificationType", "Unsigned32", "Gauge32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
phoneDotCom = MibIdentifier((1, 3, 6, 1, 4, 1, 1900))
systems = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4))
upiInit = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 1))
upiInitDescriptionTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 1, 1), )
if mibBuilder.loadTexts: upiInitDescriptionTable.setStatus('mandatory')
upiInitDescriptionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 1, 1, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "upidInitIpAddr"), (0, "UPPHONEDOTCOM-MIB", "upidInitProcessId"))
if mibBuilder.loadTexts: upiInitDescriptionEntry.setStatus('mandatory')
upidInitIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 1, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upidInitIpAddr.setStatus('mandatory')
upidInitProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upidInitProcessId.setStatus('mandatory')
upidInitVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upidInitVersion.setStatus('mandatory')
upidInitProcessType = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upidInitProcessType.setStatus('mandatory')
upidInitHostName = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upidInitHostName.setStatus('mandatory')
upidInitStartupTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 1, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upidInitStartupTime.setStatus('mandatory')
upiInitStats = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2))
upiInitChildProcessTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1), )
if mibBuilder.loadTexts: upiInitChildProcessTable.setStatus('mandatory')
upiInitChildProcessEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "upipInitIpAddr"), (0, "UPPHONEDOTCOM-MIB", "upipInitProcessId"), (0, "UPPHONEDOTCOM-MIB", "upipChildProcessId"))
if mibBuilder.loadTexts: upiInitChildProcessEntry.setStatus('mandatory')
upipInitIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipInitIpAddr.setStatus('mandatory')
upipInitProcessType = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipInitProcessType.setStatus('mandatory')
upipInitProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipInitProcessId.setStatus('mandatory')
upipChildProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipChildProcessId.setStatus('mandatory')
upipChildProcessType = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipChildProcessType.setStatus('mandatory')
upipChildProcessIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 6), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipChildProcessIpAddr.setStatus('mandatory')
upipChildProcessHostName = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipChildProcessHostName.setStatus('mandatory')
upipChildProcessExePath = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipChildProcessExePath.setStatus('mandatory')
upipChildProcessExeArgs = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipChildProcessExeArgs.setStatus('mandatory')
upipChildProcessState = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipChildProcessState.setStatus('mandatory')
upipChildProcessStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipChildProcessStatus.setStatus('mandatory')
upipChildProcessStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 12), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipChildProcessStartTime.setStatus('mandatory')
upipChildProcessStopTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 1, 1, 13), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipChildProcessStopTime.setStatus('mandatory')
upiInitChildProcessStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 2), )
if mibBuilder.loadTexts: upiInitChildProcessStatsTable.setStatus('mandatory')
upiInitChildProcessStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 2, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "upipsInitIpAddr"), (0, "UPPHONEDOTCOM-MIB", "upipsInitProcessId"), (0, "UPPHONEDOTCOM-MIB", "upipsChildProcessType"))
if mibBuilder.loadTexts: upiInitChildProcessStatsEntry.setStatus('mandatory')
upipsInitIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 2, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipsInitIpAddr.setStatus('mandatory')
upipsInitProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipsInitProcessId.setStatus('mandatory')
upipsChildProcessType = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 2, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipsChildProcessType.setStatus('mandatory')
upipsInitProcessType = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 2, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipsInitProcessType.setStatus('mandatory')
upipsChildProcessesStarted = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipsChildProcessesStarted.setStatus('mandatory')
upipsChildProcessesDied = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipsChildProcessesDied.setStatus('mandatory')
upipsChildProcessesRunning = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 1, 2, 2, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upipsChildProcessesRunning.setStatus('mandatory')
upiInitTrapInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 1, 20))
upitTrapInfo = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 1, 20, 1), DisplayString())
if mibBuilder.loadTexts: upitTrapInfo.setStatus('optional')
upitChildProcessHostName = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 1, 20, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upitChildProcessHostName.setStatus('mandatory')
upitChildProcessType = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 1, 20, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upitChildProcessType.setStatus('mandatory')
upitChildProcessId = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 1, 20, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upitChildProcessId.setStatus('mandatory')
upiChildProcessStart = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 1) + (0,300)).setObjects(("UPPHONEDOTCOM-MIB", "upidInitHostName"), ("UPPHONEDOTCOM-MIB", "upidInitProcessType"), ("UPPHONEDOTCOM-MIB", "upidInitProcessId"), ("UPPHONEDOTCOM-MIB", "upitChildProcessHostName"), ("UPPHONEDOTCOM-MIB", "upitChildProcessType"), ("UPPHONEDOTCOM-MIB", "upitChildProcessId"), ("UPPHONEDOTCOM-MIB", "upitTrapInfo"))
upiChildProcessShutdown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 1) + (0,301)).setObjects(("UPPHONEDOTCOM-MIB", "upidInitHostName"), ("UPPHONEDOTCOM-MIB", "upidInitProcessType"), ("UPPHONEDOTCOM-MIB", "upidInitProcessId"), ("UPPHONEDOTCOM-MIB", "upitChildProcessHostName"), ("UPPHONEDOTCOM-MIB", "upitChildProcessType"), ("UPPHONEDOTCOM-MIB", "upitChildProcessId"), ("UPPHONEDOTCOM-MIB", "upitTrapInfo"))
upiInitFailToStart = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 1) + (0,302)).setObjects(("UPPHONEDOTCOM-MIB", "upidInitHostName"), ("UPPHONEDOTCOM-MIB", "upidInitProcessType"), ("UPPHONEDOTCOM-MIB", "upidInitProcessId"), ("UPPHONEDOTCOM-MIB", "upitTrapInfo"))
upiInitShutdown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 1) + (0,303)).setObjects(("UPPHONEDOTCOM-MIB", "upidInitHostName"), ("UPPHONEDOTCOM-MIB", "upidInitProcessType"), ("UPPHONEDOTCOM-MIB", "upidInitProcessId"), ("UPPHONEDOTCOM-MIB", "upitTrapInfo"))
upiAllChildProcessesStop = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 1) + (0,304)).setObjects(("UPPHONEDOTCOM-MIB", "upidInitHostName"), ("UPPHONEDOTCOM-MIB", "upidInitProcessType"), ("UPPHONEDOTCOM-MIB", "upidInitProcessId"), ("UPPHONEDOTCOM-MIB", "upitTrapInfo"))
upiAllChildProcessesRestart = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 1) + (0,305)).setObjects(("UPPHONEDOTCOM-MIB", "upidInitHostName"), ("UPPHONEDOTCOM-MIB", "upidInitProcessType"), ("UPPHONEDOTCOM-MIB", "upidInitProcessId"), ("UPPHONEDOTCOM-MIB", "upitTrapInfo"))
upiDatabaseConnectionDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 1) + (0,306)).setObjects(("UPPHONEDOTCOM-MIB", "upidInitHostName"), ("UPPHONEDOTCOM-MIB", "upidInitProcessType"), ("UPPHONEDOTCOM-MIB", "upidInitProcessId"), ("UPPHONEDOTCOM-MIB", "upitTrapInfo"))
upiDatabaseConnectionUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 1) + (0,307)).setObjects(("UPPHONEDOTCOM-MIB", "upidInitHostName"), ("UPPHONEDOTCOM-MIB", "upidInitProcessType"), ("UPPHONEDOTCOM-MIB", "upidInitProcessId"), ("UPPHONEDOTCOM-MIB", "upitTrapInfo"))
upiChildProcessFailToStart = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 1) + (0,308)).setObjects(("UPPHONEDOTCOM-MIB", "upidInitHostName"), ("UPPHONEDOTCOM-MIB", "upidInitProcessType"), ("UPPHONEDOTCOM-MIB", "upidInitProcessId"), ("UPPHONEDOTCOM-MIB", "upitChildProcessType"), ("UPPHONEDOTCOM-MIB", "upitTrapInfo"))
upiNoChildProcess = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 1) + (0,309)).setObjects(("UPPHONEDOTCOM-MIB", "upidInitHostName"), ("UPPHONEDOTCOM-MIB", "upidInitProcessType"), ("UPPHONEDOTCOM-MIB", "upidInitProcessId"), ("UPPHONEDOTCOM-MIB", "upitTrapInfo"))
upiChildProcessesBelowMinimum = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 1) + (0,310)).setObjects(("UPPHONEDOTCOM-MIB", "upidInitHostName"), ("UPPHONEDOTCOM-MIB", "upidInitProcessType"), ("UPPHONEDOTCOM-MIB", "upidInitProcessId"), ("UPPHONEDOTCOM-MIB", "upitChildProcessType"), ("UPPHONEDOTCOM-MIB", "upitTrapInfo"))
upLink = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2))
upLinkProcesses = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1))
uplDispatcher = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1))
uplDispatcherDescription = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 1))
upldHostName = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upldHostName.setStatus('mandatory')
upldProcessId = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upldProcessId.setStatus('mandatory')
upldPortNumber = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upldPortNumber.setStatus('mandatory')
upldStartUpTime = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upldStartUpTime.setStatus('mandatory')
upldState = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("regular", 1), ("redirect", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: upldState.setStatus('mandatory')
uplDispatcherStats = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 2))
upldRequestsReceived = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 2, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upldRequestsReceived.setStatus('mandatory')
upldRequestsDropped = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 2, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upldRequestsDropped.setStatus('mandatory')
upldUplAgentsLoaded = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 2, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upldUplAgentsLoaded.setStatus('mandatory')
upldUplAgentsDisconnected = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 2, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upldUplAgentsDisconnected.setStatus('mandatory')
upldSubscribersLoaded = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 2, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upldSubscribersLoaded.setStatus('mandatory')
upldKeyExchanges = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 2, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upldKeyExchanges.setStatus('mandatory')
uplDispatcherTrapInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 20))
upldTrapInfo = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 20, 1), DisplayString())
if mibBuilder.loadTexts: upldTrapInfo.setStatus('optional')
upldUplAgentId = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 1, 20, 2), Integer32())
if mibBuilder.loadTexts: upldUplAgentId.setStatus('optional')
upldStartup = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,100)).setObjects(("UPPHONEDOTCOM-MIB", "upldHostName"), ("UPPHONEDOTCOM-MIB", "upldProcessId"), ("UPPHONEDOTCOM-MIB", "upldTrapInfo"))
upldShutdown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,101)).setObjects(("UPPHONEDOTCOM-MIB", "upldHostName"), ("UPPHONEDOTCOM-MIB", "upldProcessId"), ("UPPHONEDOTCOM-MIB", "upldTrapInfo"))
upldInvalidConfig = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,102)).setObjects(("UPPHONEDOTCOM-MIB", "upldHostName"), ("UPPHONEDOTCOM-MIB", "upldProcessId"), ("UPPHONEDOTCOM-MIB", "upldTrapInfo"))
upldUplAgentConnectionDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,103)).setObjects(("UPPHONEDOTCOM-MIB", "upldHostName"), ("UPPHONEDOTCOM-MIB", "upldProcessId"), ("UPPHONEDOTCOM-MIB", "upldUplAgentId"), ("UPPHONEDOTCOM-MIB", "upldTrapInfo"))
upldDatabaseConnectionDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,104)).setObjects(("UPPHONEDOTCOM-MIB", "upldHostName"), ("UPPHONEDOTCOM-MIB", "upldProcessId"), ("UPPHONEDOTCOM-MIB", "upldTrapInfo"))
upldOutOfResouce = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,105)).setObjects(("UPPHONEDOTCOM-MIB", "upldHostName"), ("UPPHONEDOTCOM-MIB", "upldProcessId"), ("UPPHONEDOTCOM-MIB", "upldTrapInfo"))
upldUplAgentConnectionUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,106)).setObjects(("UPPHONEDOTCOM-MIB", "upldHostName"), ("UPPHONEDOTCOM-MIB", "upldProcessId"), ("UPPHONEDOTCOM-MIB", "upldUplAgentId"), ("UPPHONEDOTCOM-MIB", "upldTrapInfo"))
upldDatabaseConnectionUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,107)).setObjects(("UPPHONEDOTCOM-MIB", "upldHostName"), ("UPPHONEDOTCOM-MIB", "upldProcessId"), ("UPPHONEDOTCOM-MIB", "upldTrapInfo"))
uplAgent = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2))
uplAgentDescriptionTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 1), )
if mibBuilder.loadTexts: uplAgentDescriptionTable.setStatus('mandatory')
uplAgentDescriptionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 1, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplaAgentIdentifier"))
if mibBuilder.loadTexts: uplAgentDescriptionEntry.setStatus('mandatory')
uplaAgentIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaAgentIdentifier.setStatus('mandatory')
uplaHostName = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaHostName.setStatus('mandatory')
uplaProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaProcessId.setStatus('mandatory')
uplaStartUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaStartUpTime.setStatus('mandatory')
uplAgentProxyStats = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2))
uplAgentWebAccessStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 1), )
if mibBuilder.loadTexts: uplAgentWebAccessStatsTable.setStatus('mandatory')
uplAgentWebAccessStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 1, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplawsAgentIdentifier"))
if mibBuilder.loadTexts: uplAgentWebAccessStatsEntry.setStatus('mandatory')
uplawsAgentIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplawsAgentIdentifier.setStatus('mandatory')
uplaHttpRequestsStarted = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaHttpRequestsStarted.setStatus('mandatory')
uplaHttpRequestsSucceeded = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaHttpRequestsSucceeded.setStatus('mandatory')
uplaHttpMeanResponseTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaHttpMeanResponseTime.setStatus('mandatory')
uplaHttpDeviationOfResponseTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaHttpDeviationOfResponseTime.setStatus('mandatory')
uplaHttpsRequestsStarted = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaHttpsRequestsStarted.setStatus('mandatory')
uplaHttpsRequestsSucceeded = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaHttpsRequestsSucceeded.setStatus('mandatory')
uplaHttpsMeanResponseTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaHttpsMeanResponseTime.setStatus('mandatory')
uplaHttpsDeviationOfResponseTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaHttpsDeviationOfResponseTime.setStatus('mandatory')
uplAgentErrorStatsSummaryTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 2), )
if mibBuilder.loadTexts: uplAgentErrorStatsSummaryTable.setStatus('mandatory')
uplAgentErrorStatsSummaryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 2, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplaesAgentIdentifier"))
if mibBuilder.loadTexts: uplAgentErrorStatsSummaryEntry.setStatus('mandatory')
uplaesAgentIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaesAgentIdentifier.setStatus('mandatory')
uplaTotalErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 2, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaTotalErrors.setStatus('mandatory')
uplaSilentErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaSilentErrors.setStatus('mandatory')
uplaDeviceErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaDeviceErrors.setStatus('mandatory')
uplaKeyErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaKeyErrors.setStatus('mandatory')
uplaSessionErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaSessionErrors.setStatus('mandatory')
uplaTransactionErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaTransactionErrors.setStatus('mandatory')
uplaOtherErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 2, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaOtherErrors.setStatus('mandatory')
uplAgentErrorStatsDetailTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 3), )
if mibBuilder.loadTexts: uplAgentErrorStatsDetailTable.setStatus('mandatory')
uplAgentErrorStatsDetailEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 3, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplaedAgentIdentifier"), (0, "UPPHONEDOTCOM-MIB", "uplaErrorCode"))
if mibBuilder.loadTexts: uplAgentErrorStatsDetailEntry.setStatus('mandatory')
uplaedAgentIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaedAgentIdentifier.setStatus('mandatory')
uplaErrorCode = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaErrorCode.setStatus('mandatory')
uplaErrorName = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 3, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaErrorName.setStatus('mandatory')
uplaErrorSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 3, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaErrorSeverity.setStatus('optional')
uplaErrorClass = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("silent", 1), ("device", 2), ("key", 3), ("session", 4), ("transaction", 5), ("other", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaErrorClass.setStatus('mandatory')
uplaErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 2, 3, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaErrorCount.setStatus('mandatory')
uplHdtpStats = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3))
uplAgentSessionStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 1), )
if mibBuilder.loadTexts: uplAgentSessionStatsTable.setStatus('mandatory')
uplAgentSessionStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 1, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplassAgentIdentifier"))
if mibBuilder.loadTexts: uplAgentSessionStatsEntry.setStatus('mandatory')
uplassAgentIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplassAgentIdentifier.setStatus('mandatory')
uplaActiveSessions = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 1, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaActiveSessions.setStatus('mandatory')
uplaEncryptedSessions = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 1, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaEncryptedSessions.setStatus('mandatory')
uplaProtoSessions = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 1, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaProtoSessions.setStatus('mandatory')
uplaSessionsStarted = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaSessionsStarted.setStatus('mandatory')
uplaSessionsSucceeded = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaSessionsSucceeded.setStatus('mandatory')
uplaKeyExchanges = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaKeyExchanges.setStatus('mandatory')
uplAgentAirLinkStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 2), )
if mibBuilder.loadTexts: uplAgentAirLinkStatsTable.setStatus('deprecated')
uplAgentAirLinkStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 2, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplaasAgentIdentifier"))
if mibBuilder.loadTexts: uplAgentAirLinkStatsEntry.setStatus('deprecated')
uplaasAgentIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaasAgentIdentifier.setStatus('deprecated')
uplaRequestsReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 2, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaRequestsReceived.setStatus('deprecated')
uplaRequestsDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaRequestsDropped.setStatus('deprecated')
uplaRequestsDuplicated = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaRequestsDuplicated.setStatus('deprecated')
uplaRequestsNotValid = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaRequestsNotValid.setStatus('deprecated')
uplaRepliesDelivered = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaRepliesDelivered.setStatus('deprecated')
uplaRepliesTimedOut = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaRepliesTimedOut.setStatus('deprecated')
uplAgentTransactionStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 3), )
if mibBuilder.loadTexts: uplAgentTransactionStatsTable.setStatus('mandatory')
uplAgentTransactionStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 3, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplatsAgentIdentifier"))
if mibBuilder.loadTexts: uplAgentTransactionStatsEntry.setStatus('mandatory')
uplatsAgentIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplatsAgentIdentifier.setStatus('mandatory')
uplaTransactionsActive = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 3, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaTransactionsActive.setStatus('mandatory')
uplaTransactionsStarted = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 3, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaTransactionsStarted.setStatus('mandatory')
uplaTransactionsSucceeded = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 3, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaTransactionsSucceeded.setStatus('mandatory')
uplaMeanTransactionLife = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaMeanTransactionLife.setStatus('mandatory')
uplaDeviationOfTransactionLife = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 3, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaDeviationOfTransactionLife.setStatus('mandatory')
uplaMeanResponseTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 3, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaMeanResponseTime.setStatus('mandatory')
uplaDeviationOfResponseTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 3, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaDeviationOfResponseTime.setStatus('mandatory')
uplaMeanRetriesPerThousandTxn = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 3, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaMeanRetriesPerThousandTxn.setStatus('mandatory')
uplaDeviationOfRetriesPTTxn = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 3, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaDeviationOfRetriesPTTxn.setStatus('mandatory')
uplAgentLimitedResourceTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 6), )
if mibBuilder.loadTexts: uplAgentLimitedResourceTable.setStatus('deprecated')
uplAgentLimitedResourceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 3, 6, 1), )
if mibBuilder.loadTexts: uplAgentLimitedResourceEntry.setStatus('deprecated')
uplaWapStats = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4))
uplAgentWapWSPSessionStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 1), )
if mibBuilder.loadTexts: uplAgentWapWSPSessionStatsTable.setStatus('mandatory')
uplAgentWapSessionStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 1, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplawssAgentIdentifier"))
if mibBuilder.loadTexts: uplAgentWapSessionStatsEntry.setStatus('mandatory')
uplawssAgentIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplawssAgentIdentifier.setStatus('mandatory')
uplaActiveWapSessions = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 1, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaActiveWapSessions.setStatus('mandatory')
uplaWapSessionsStarted = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaWapSessionsStarted.setStatus('mandatory')
uplAgentWapWTPTransactionStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 2), )
if mibBuilder.loadTexts: uplAgentWapWTPTransactionStatsTable.setStatus('mandatory')
uplAgentWapTransactionStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 2, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplawtsAgentIdentifier"))
if mibBuilder.loadTexts: uplAgentWapTransactionStatsEntry.setStatus('mandatory')
uplawtsAgentIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplawtsAgentIdentifier.setStatus('mandatory')
uplaWapInvokeTpdus = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 2, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaWapInvokeTpdus.setStatus('mandatory')
uplaWapResultTpdus = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaWapResultTpdus.setStatus('mandatory')
uplaWapAbortTransaction = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaWapAbortTransaction.setStatus('mandatory')
uplAgentWapErrorStatsSummaryTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 3), )
if mibBuilder.loadTexts: uplAgentWapErrorStatsSummaryTable.setStatus('mandatory')
uplAgentWapErrorStatsSummaryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 3, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplawesAgentIdentifier"))
if mibBuilder.loadTexts: uplAgentWapErrorStatsSummaryEntry.setStatus('mandatory')
uplawesAgentIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplawesAgentIdentifier.setStatus('mandatory')
uplaTotalWapErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 3, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaTotalWapErrors.setStatus('mandatory')
uplaOtherWapErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 3, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaOtherWapErrors.setStatus('mandatory')
uplaSessionWapErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 3, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaSessionWapErrors.setStatus('mandatory')
uplaTransactionWapErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 3, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaTransactionWapErrors.setStatus('mandatory')
uplAgentWapErrorStatsDetailTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 4), )
if mibBuilder.loadTexts: uplAgentWapErrorStatsDetailTable.setStatus('mandatory')
uplAgentWapErrorStatsDetailEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 4, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplaweAgentIdentifier"), (0, "UPPHONEDOTCOM-MIB", "uplaWapErrorCode"))
if mibBuilder.loadTexts: uplAgentWapErrorStatsDetailEntry.setStatus('mandatory')
uplaweAgentIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaweAgentIdentifier.setStatus('mandatory')
uplaWapErrorCode = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaWapErrorCode.setStatus('mandatory')
uplaWapErrorName = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 4, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaWapErrorName.setStatus('mandatory')
uplaWapErrorSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 4, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaWapErrorSeverity.setStatus('optional')
uplaWapErrorClass = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 4, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("session", 2), ("txn", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaWapErrorClass.setStatus('mandatory')
uplaWapErrorCount = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 4, 4, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplaWapErrorCount.setStatus('mandatory')
uplaStackServiceStats = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 5))
uplAgentStackServiceTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 5, 1), )
if mibBuilder.loadTexts: uplAgentStackServiceTable.setStatus('mandatory')
uplAgentStackServiceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 5, 1, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplasstAgentIdentifier"), (0, "UPPHONEDOTCOM-MIB", "uplAgentStackServiceIdentifier"))
if mibBuilder.loadTexts: uplAgentStackServiceEntry.setStatus('mandatory')
uplasstAgentIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 5, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplasstAgentIdentifier.setStatus('mandatory')
uplAgentStackServiceIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 5, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplAgentStackServiceIdentifier.setStatus('mandatory')
uplAgentStackServiceAppProtoName = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 5, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplAgentStackServiceAppProtoName.setStatus('mandatory')
uplAgentStackServiceName = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 5, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplAgentStackServiceName.setStatus('mandatory')
uplAgentStackServiceLoaded = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 5, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("notloaded", 1), ("loaded", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplAgentStackServiceLoaded.setStatus('mandatory')
uplAgentStackServiceAdaptorThreads = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 5, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplAgentStackServiceAdaptorThreads.setStatus('mandatory')
uplAgentStackServiceWDPPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 5, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplAgentStackServiceWDPPortNumber.setStatus('mandatory')
uplAgentTrapInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 20))
uplaTrapInfo = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 2, 20, 1), DisplayString())
if mibBuilder.loadTexts: uplaTrapInfo.setStatus('optional')
uplaStartup = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,200)).setObjects(("UPPHONEDOTCOM-MIB", "uplaAgentIdentifier"), ("UPPHONEDOTCOM-MIB", "uplaHostName"), ("UPPHONEDOTCOM-MIB", "uplaProcessId"), ("UPPHONEDOTCOM-MIB", "uplaTrapInfo"))
uplaShutdown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,201)).setObjects(("UPPHONEDOTCOM-MIB", "uplaAgentIdentifier"), ("UPPHONEDOTCOM-MIB", "uplaHostName"), ("UPPHONEDOTCOM-MIB", "uplaProcessId"), ("UPPHONEDOTCOM-MIB", "uplaTrapInfo"))
uplaDatabaseConnectionDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,202)).setObjects(("UPPHONEDOTCOM-MIB", "uplaAgentIdentifier"), ("UPPHONEDOTCOM-MIB", "uplaHostName"), ("UPPHONEDOTCOM-MIB", "uplaProcessId"), ("UPPHONEDOTCOM-MIB", "uplaTrapInfo"))
uplaFaxMgrConnectionDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,203)).setObjects(("UPPHONEDOTCOM-MIB", "uplaAgentIdentifier"), ("UPPHONEDOTCOM-MIB", "uplaHostName"), ("UPPHONEDOTCOM-MIB", "uplaProcessId"), ("UPPHONEDOTCOM-MIB", "uplaTrapInfo"))
uplaMessengerConnectionDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,204)).setObjects(("UPPHONEDOTCOM-MIB", "uplaAgentIdentifier"), ("UPPHONEDOTCOM-MIB", "uplaHostName"), ("UPPHONEDOTCOM-MIB", "uplaProcessId"), ("UPPHONEDOTCOM-MIB", "uplaTrapInfo"))
uplaInvalidConfig = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,205)).setObjects(("UPPHONEDOTCOM-MIB", "uplaAgentIdentifier"), ("UPPHONEDOTCOM-MIB", "uplaHostName"), ("UPPHONEDOTCOM-MIB", "uplaProcessId"), ("UPPHONEDOTCOM-MIB", "uplaTrapInfo"))
uplaInternalFatalErrors = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,206)).setObjects(("UPPHONEDOTCOM-MIB", "uplaAgentIdentifier"), ("UPPHONEDOTCOM-MIB", "uplaHostName"), ("UPPHONEDOTCOM-MIB", "uplaProcessId"), ("UPPHONEDOTCOM-MIB", "uplaTrapInfo"))
uplaOutOfResource = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,207)).setObjects(("UPPHONEDOTCOM-MIB", "uplaAgentIdentifier"), ("UPPHONEDOTCOM-MIB", "uplaHostName"), ("UPPHONEDOTCOM-MIB", "uplaProcessId"), ("UPPHONEDOTCOM-MIB", "uplaTrapInfo"))
uplaDatabaseConnectionUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,208)).setObjects(("UPPHONEDOTCOM-MIB", "uplaAgentIdentifier"), ("UPPHONEDOTCOM-MIB", "uplaHostName"), ("UPPHONEDOTCOM-MIB", "uplaProcessId"), ("UPPHONEDOTCOM-MIB", "uplaTrapInfo"))
uplNbRouter = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3))
uplNbRouterDescriptionTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 1), )
if mibBuilder.loadTexts: uplNbRouterDescriptionTable.setStatus('mandatory')
uplNbRouterDescriptionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 1, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplrdIpAddress"), (0, "UPPHONEDOTCOM-MIB", "uplrdProcessId"))
if mibBuilder.loadTexts: uplNbRouterDescriptionEntry.setStatus('mandatory')
uplrdIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 1, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplrdIpAddress.setStatus('mandatory')
uplrdProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplrdProcessId.setStatus('mandatory')
uplrdHostName = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplrdHostName.setStatus('mandatory')
uplrdPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplrdPortNumber.setStatus('mandatory')
uplrdStartUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplrdStartUpTime.setStatus('mandatory')
uplrHdtpStats = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 2))
uplNbRouterAirlinkTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 2, 1), )
if mibBuilder.loadTexts: uplNbRouterAirlinkTable.setStatus('deprecated')
uplNbRouterAirlinkEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 2, 1, 1), )
if mibBuilder.loadTexts: uplNbRouterAirlinkEntry.setStatus('deprecated')
uplNbRouterAirlinkStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 2, 2), )
if mibBuilder.loadTexts: uplNbRouterAirlinkStatsTable.setStatus('deprecated')
uplNbRouterAirlinkStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 2, 2, 1), )
if mibBuilder.loadTexts: uplNbRouterAirlinkStatsEntry.setStatus('deprecated')
uplrStackServiceStats = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 3))
uplNbRouterTrapInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 20))
uplrTrapInfo = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 20, 1), DisplayString())
if mibBuilder.loadTexts: uplrTrapInfo.setStatus('optional')
uplrClientIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 20, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplrClientIpAddress.setStatus('mandatory')
uplrClientHostName = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 20, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplrClientHostName.setStatus('mandatory')
uplrClientProcessId = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 3, 20, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplrClientProcessId.setStatus('mandatory')
uplrStartup = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,500)).setObjects(("UPPHONEDOTCOM-MIB", "uplrdHostName"), ("UPPHONEDOTCOM-MIB", "uplrdProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplrShutdown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,501)).setObjects(("UPPHONEDOTCOM-MIB", "uplrdHostName"), ("UPPHONEDOTCOM-MIB", "uplrdProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplrDatabaseConnectionDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,502)).setObjects(("UPPHONEDOTCOM-MIB", "uplrdHostName"), ("UPPHONEDOTCOM-MIB", "uplrdProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplrDatabaseConnectionUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,503)).setObjects(("UPPHONEDOTCOM-MIB", "uplrdHostName"), ("UPPHONEDOTCOM-MIB", "uplrdProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplrInvalidConfig = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,504)).setObjects(("UPPHONEDOTCOM-MIB", "uplrdHostName"), ("UPPHONEDOTCOM-MIB", "uplrdProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplrInternalError = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,505)).setObjects(("UPPHONEDOTCOM-MIB", "uplrdHostName"), ("UPPHONEDOTCOM-MIB", "uplrdProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplrSMSCConnectionDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,506)).setObjects(("UPPHONEDOTCOM-MIB", "uplrdHostName"), ("UPPHONEDOTCOM-MIB", "uplrdProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplrSMSCConnectionUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,507)).setObjects(("UPPHONEDOTCOM-MIB", "uplrdHostName"), ("UPPHONEDOTCOM-MIB", "uplrdProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplrClientConnectionDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,508)).setObjects(("UPPHONEDOTCOM-MIB", "uplrdHostName"), ("UPPHONEDOTCOM-MIB", "uplrdProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplrClientConnectionUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,509)).setObjects(("UPPHONEDOTCOM-MIB", "uplrdHostName"), ("UPPHONEDOTCOM-MIB", "uplrdProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplrNbRouterConnectionDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,510)).setObjects(("UPPHONEDOTCOM-MIB", "uplrClientIpAddress"), ("UPPHONEDOTCOM-MIB", "uplrClientHostName"), ("UPPHONEDOTCOM-MIB", "uplrClientProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplrNbRouterConnectionUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,511)).setObjects(("UPPHONEDOTCOM-MIB", "uplrClientIpAddress"), ("UPPHONEDOTCOM-MIB", "uplrClientHostName"), ("UPPHONEDOTCOM-MIB", "uplrClientProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplrProtocolError = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,512)).setObjects(("UPPHONEDOTCOM-MIB", "uplrClientIpAddress"), ("UPPHONEDOTCOM-MIB", "uplrClientHostName"), ("UPPHONEDOTCOM-MIB", "uplrClientProcessId"), ("UPPHONEDOTCOM-MIB", "uplrTrapInfo"))
uplMessenger = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4))
uplMessengerDescriptionTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 1), )
if mibBuilder.loadTexts: uplMessengerDescriptionTable.setStatus('mandatory')
uplMessengerDescriptionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 1, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplmdIpAddress"), (0, "UPPHONEDOTCOM-MIB", "uplmdProcessId"))
if mibBuilder.loadTexts: uplMessengerDescriptionEntry.setStatus('mandatory')
uplmdIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 1, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmdIpAddress.setStatus('mandatory')
uplmdProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmdProcessId.setStatus('mandatory')
uplmdHostName = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmdHostName.setStatus('mandatory')
uplmdMsgServerPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmdMsgServerPortNumber.setStatus('mandatory')
uplmdPublicHTTPPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmdPublicHTTPPortNumber.setStatus('mandatory')
uplmdPublicHTTPSPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmdPublicHTTPSPortNumber.setStatus('mandatory')
uplmdPrivateHTTPPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmdPrivateHTTPPortNumber.setStatus('mandatory')
uplmdStartupTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 1, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmdStartupTime.setStatus('mandatory')
uplmHdtpStats = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 2))
uplMessengerAirlinkTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 2, 1), )
if mibBuilder.loadTexts: uplMessengerAirlinkTable.setStatus('deprecated')
uplMessengerAirlinkEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 2, 1, 1), )
if mibBuilder.loadTexts: uplMessengerAirlinkEntry.setStatus('deprecated')
uplMessengerAirlinkStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 2, 2), )
if mibBuilder.loadTexts: uplMessengerAirlinkStatsTable.setStatus('deprecated')
uplMessengerAirlinkStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 2, 2, 1), )
if mibBuilder.loadTexts: uplMessengerAirlinkStatsEntry.setStatus('deprecated')
uplmStackServiceStats = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 3))
uplMessengerNtfnStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4), )
if mibBuilder.loadTexts: uplMessengerNtfnStatsTable.setStatus('mandatory')
uplMessengerNtfnStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplmnsIpAddress"), (0, "UPPHONEDOTCOM-MIB", "uplmnsProcessId"))
if mibBuilder.loadTexts: uplMessengerNtfnStatsEntry.setStatus('mandatory')
uplmnsIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmnsIpAddress.setStatus('mandatory')
uplmnsProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmnsProcessId.setStatus('mandatory')
uplmnsPublicHTTPReqReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmnsPublicHTTPReqReceived.setStatus('mandatory')
uplmnsPrivateHTTPReqReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmnsPrivateHTTPReqReceived.setStatus('mandatory')
uplmnsPublicHTTPSReqReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmnsPublicHTTPSReqReceived.setStatus('mandatory')
uplmnsPublicHTTPReqProcessed = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmnsPublicHTTPReqProcessed.setStatus('mandatory')
uplmnsPrivateHTTPReqProcessed = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmnsPrivateHTTPReqProcessed.setStatus('mandatory')
uplmnsPublicHTTPSReqProcessed = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmnsPublicHTTPSReqProcessed.setStatus('mandatory')
uplmnsAvgNtfnsAddedPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmnsAvgNtfnsAddedPerSec.setStatus('mandatory')
uplmnsAvgNtfnsDeliveredPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmnsAvgNtfnsDeliveredPerSec.setStatus('mandatory')
uplmnsAvgNtfnsExpiredPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmnsAvgNtfnsExpiredPerSec.setStatus('mandatory')
uplmnsAvgNtfnsMarkedUnDelvrPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 4, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmnsAvgNtfnsMarkedUnDelvrPerSec.setStatus('mandatory')
uplMessengerNtfnCacheTable = MibTable((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 5), )
if mibBuilder.loadTexts: uplMessengerNtfnCacheTable.setStatus('mandatory')
uplMessengerNtfnCacheEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 5, 1), ).setIndexNames((0, "UPPHONEDOTCOM-MIB", "uplmncIpAddress"), (0, "UPPHONEDOTCOM-MIB", "uplmncProcessId"))
if mibBuilder.loadTexts: uplMessengerNtfnCacheEntry.setStatus('mandatory')
uplmncIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 5, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmncIpAddress.setStatus('mandatory')
uplmncProcessId = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 5, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmncProcessId.setStatus('mandatory')
uplmncTotalNumOfPendingNtfns = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 5, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmncTotalNumOfPendingNtfns.setStatus('mandatory')
uplmncAvgNumOfPendingNtfnsPerSub = MibTableColumn((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 5, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: uplmncAvgNumOfPendingNtfnsPerSub.setStatus('mandatory')
uplMessengerTrapInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 20))
uplmTrapInfo = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 2, 1, 4, 20, 1), DisplayString())
if mibBuilder.loadTexts: uplmTrapInfo.setStatus('optional')
uplmStartup = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,600)).setObjects(("UPPHONEDOTCOM-MIB", "uplmdIpAddress"), ("UPPHONEDOTCOM-MIB", "uplmdHostName"), ("UPPHONEDOTCOM-MIB", "uplmdProcessId"), ("UPPHONEDOTCOM-MIB", "uplmTrapInfo"))
uplmShutdown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,601)).setObjects(("UPPHONEDOTCOM-MIB", "uplmdIpAddress"), ("UPPHONEDOTCOM-MIB", "uplmdHostName"), ("UPPHONEDOTCOM-MIB", "uplmdProcessId"), ("UPPHONEDOTCOM-MIB", "uplmTrapInfo"))
uplmDatabaseConnectionDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,602)).setObjects(("UPPHONEDOTCOM-MIB", "uplmdIpAddress"), ("UPPHONEDOTCOM-MIB", "uplmdHostName"), ("UPPHONEDOTCOM-MIB", "uplmdProcessId"), ("UPPHONEDOTCOM-MIB", "uplmTrapInfo"))
uplmDatabaseConnectionUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,603)).setObjects(("UPPHONEDOTCOM-MIB", "uplmdIpAddress"), ("UPPHONEDOTCOM-MIB", "uplmdHostName"), ("UPPHONEDOTCOM-MIB", "uplmdProcessId"), ("UPPHONEDOTCOM-MIB", "uplmTrapInfo"))
uplmInvalidConfig = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,604)).setObjects(("UPPHONEDOTCOM-MIB", "uplmdIpAddress"), ("UPPHONEDOTCOM-MIB", "uplmdHostName"), ("UPPHONEDOTCOM-MIB", "uplmdProcessId"), ("UPPHONEDOTCOM-MIB", "uplmTrapInfo"))
uplmInternalErrors = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,605)).setObjects(("UPPHONEDOTCOM-MIB", "uplmdIpAddress"), ("UPPHONEDOTCOM-MIB", "uplmdHostName"), ("UPPHONEDOTCOM-MIB", "uplmdProcessId"), ("UPPHONEDOTCOM-MIB", "uplmTrapInfo"))
uplmAgentConnectionDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,606)).setObjects(("UPPHONEDOTCOM-MIB", "uplmdIpAddress"), ("UPPHONEDOTCOM-MIB", "uplmdHostName"), ("UPPHONEDOTCOM-MIB", "uplmdProcessId"), ("UPPHONEDOTCOM-MIB", "uplmTrapInfo"))
uplmPublicHTTPServiceStarted = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,607)).setObjects(("UPPHONEDOTCOM-MIB", "uplmdIpAddress"), ("UPPHONEDOTCOM-MIB", "uplmdHostName"), ("UPPHONEDOTCOM-MIB", "uplmdProcessId"), ("UPPHONEDOTCOM-MIB", "uplmTrapInfo"))
uplmPublicHTTPServiceDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,608)).setObjects(("UPPHONEDOTCOM-MIB", "uplmdIpAddress"), ("UPPHONEDOTCOM-MIB", "uplmdHostName"), ("UPPHONEDOTCOM-MIB", "uplmdProcessId"), ("UPPHONEDOTCOM-MIB", "uplmTrapInfo"))
uplmPrivateHTTPServiceDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,609)).setObjects(("UPPHONEDOTCOM-MIB", "uplmdIpAddress"), ("UPPHONEDOTCOM-MIB", "uplmdHostName"), ("UPPHONEDOTCOM-MIB", "uplmdProcessId"), ("UPPHONEDOTCOM-MIB", "uplmTrapInfo"))
uplmPublicHTTPSServiceDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 2) + (0,610)).setObjects(("UPPHONEDOTCOM-MIB", "uplmdIpAddress"), ("UPPHONEDOTCOM-MIB", "uplmdHostName"), ("UPPHONEDOTCOM-MIB", "uplmdProcessId"), ("UPPHONEDOTCOM-MIB", "uplmTrapInfo"))
upLinkConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 2))
upLinkStaticInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 2, 2, 1))
upAdmin = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 3))
upAdminTrapInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 4, 3, 20))
upsTrapInfo = MibScalar((1, 3, 6, 1, 4, 1, 1900, 4, 3, 20, 1), DisplayString())
if mibBuilder.loadTexts: upsTrapInfo.setStatus('optional')
upsProxyServiceDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,400)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsProxyServiceSlow = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,401)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsPushServiceDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,402)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsBookmarksServiceDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,403)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsBookmarksServiceSlow = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,404)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsHomePageServiceDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,405)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPWebServiceDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,406)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPWebServiceSlow = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,407)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPAdminServiceDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,408)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPMailServiceDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,409)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPMailServiceSlow = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,410)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPPimServiceDown = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,411)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPPimServiceSlow = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,412)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsHomePageServiceSlow = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,413)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsProxyServiceUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,414)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsProxyServiceNormal = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,415)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsPushServiceUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,416)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsBookmarksServiceUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,417)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsBookmarksServiceNormal = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,418)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsHomePageServiceUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,419)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPWebServiceUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,420)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPWebServiceNormal = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,421)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPAdminServiceUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,422)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPMailServiceUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,423)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPMailServiceNormal = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,424)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPPimServiceUp = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,425)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
upsUPPimServiceNormal = NotificationType((1, 3, 6, 1, 4, 1, 1900, 4, 3) + (0,426)).setObjects(("UPPHONEDOTCOM-MIB", "upsTrapInfo"))
services = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 5))
upMail = MibIdentifier((1, 3, 6, 1, 4, 1, 1900, 5, 1))
mibBuilder.exportSymbols("UPPHONEDOTCOM-MIB", uplAgentLimitedResourceEntry=uplAgentLimitedResourceEntry, uplaHttpsRequestsSucceeded=uplaHttpsRequestsSucceeded, uplMessengerAirlinkStatsEntry=uplMessengerAirlinkStatsEntry, uplaTransactionsSucceeded=uplaTransactionsSucceeded, uplAgentDescriptionEntry=uplAgentDescriptionEntry, uplAgent=uplAgent, uplAgentErrorStatsDetailTable=uplAgentErrorStatsDetailTable, uplrHdtpStats=uplrHdtpStats, upldHostName=upldHostName, upitChildProcessType=upitChildProcessType, upipChildProcessHostName=upipChildProcessHostName, uplmdPrivateHTTPPortNumber=uplmdPrivateHTTPPortNumber, uplmPublicHTTPSServiceDown=uplmPublicHTTPSServiceDown, uplawesAgentIdentifier=uplawesAgentIdentifier, uplaSessionsSucceeded=uplaSessionsSucceeded, uplaStartUpTime=uplaStartUpTime, upsUPPimServiceDown=upsUPPimServiceDown, uplNbRouterAirlinkStatsEntry=uplNbRouterAirlinkStatsEntry, services=services, uplrSMSCConnectionDown=uplrSMSCConnectionDown, uplAgentWapErrorStatsSummaryEntry=uplAgentWapErrorStatsSummaryEntry, uplrClientHostName=uplrClientHostName, uplNbRouterDescriptionEntry=uplNbRouterDescriptionEntry, uplmnsPrivateHTTPReqProcessed=uplmnsPrivateHTTPReqProcessed, upidInitProcessType=upidInitProcessType, uplrNbRouterConnectionUp=uplrNbRouterConnectionUp, uplaKeyExchanges=uplaKeyExchanges, uplAgentSessionStatsEntry=uplAgentSessionStatsEntry, upidInitProcessId=upidInitProcessId, uplmInvalidConfig=uplmInvalidConfig, uplaDeviationOfTransactionLife=uplaDeviationOfTransactionLife, upidInitIpAddr=upidInitIpAddr, uplmdPublicHTTPSPortNumber=uplmdPublicHTTPSPortNumber, upsPushServiceUp=upsPushServiceUp, uplasstAgentIdentifier=uplasstAgentIdentifier, uplaMessengerConnectionDown=uplaMessengerConnectionDown, upipsChildProcessesDied=upipsChildProcessesDied, upiAllChildProcessesRestart=upiAllChildProcessesRestart, upiInitDescriptionEntry=upiInitDescriptionEntry, upldUplAgentsDisconnected=upldUplAgentsDisconnected, upMail=upMail, uplNbRouterAirlinkEntry=uplNbRouterAirlinkEntry, uplMessengerNtfnCacheEntry=uplMessengerNtfnCacheEntry, uplaErrorSeverity=uplaErrorSeverity, uplaShutdown=uplaShutdown, uplaAgentIdentifier=uplaAgentIdentifier, uplrDatabaseConnectionUp=uplrDatabaseConnectionUp, uplaHttpRequestsSucceeded=uplaHttpRequestsSucceeded, uplrdStartUpTime=uplrdStartUpTime, upiInitChildProcessEntry=upiInitChildProcessEntry, uplAgentWapErrorStatsDetailTable=uplAgentWapErrorStatsDetailTable, upldStartup=upldStartup, uplAgentErrorStatsSummaryEntry=uplAgentErrorStatsSummaryEntry, upipChildProcessId=upipChildProcessId, uplAgentStackServiceAdaptorThreads=uplAgentStackServiceAdaptorThreads, uplaDeviationOfResponseTime=uplaDeviationOfResponseTime, upldRequestsDropped=upldRequestsDropped, uplaHttpRequestsStarted=uplaHttpRequestsStarted, uplrdProcessId=uplrdProcessId, uplrSMSCConnectionUp=uplrSMSCConnectionUp, uplaRequestsDropped=uplaRequestsDropped, uplmnsPublicHTTPSReqProcessed=uplmnsPublicHTTPSReqProcessed, uplmncAvgNumOfPendingNtfnsPerSub=uplmncAvgNumOfPendingNtfnsPerSub, uplaTotalErrors=uplaTotalErrors, uplmPrivateHTTPServiceDown=uplmPrivateHTTPServiceDown, upipChildProcessExeArgs=upipChildProcessExeArgs, upiInitChildProcessStatsTable=upiInitChildProcessStatsTable, uplrInvalidConfig=uplrInvalidConfig, uplAgentStackServiceWDPPortNumber=uplAgentStackServiceWDPPortNumber, upsUPAdminServiceDown=upsUPAdminServiceDown, upsBookmarksServiceUp=upsBookmarksServiceUp, upldDatabaseConnectionUp=upldDatabaseConnectionUp, uplaMeanResponseTime=uplaMeanResponseTime, uplAgentTransactionStatsEntry=uplAgentTransactionStatsEntry, upsProxyServiceUp=upsProxyServiceUp, uplaErrorClass=uplaErrorClass, uplaStackServiceStats=uplaStackServiceStats, uplassAgentIdentifier=uplassAgentIdentifier, uplrClientProcessId=uplrClientProcessId, uplaFaxMgrConnectionDown=uplaFaxMgrConnectionDown, uplmdHostName=uplmdHostName, uplmncTotalNumOfPendingNtfns=uplmncTotalNumOfPendingNtfns, uplDispatcher=uplDispatcher, upsTrapInfo=upsTrapInfo, uplaProtoSessions=uplaProtoSessions, uplaTransactionErrors=uplaTransactionErrors, uplaWapInvokeTpdus=uplaWapInvokeTpdus, upiChildProcessStart=upiChildProcessStart, upldInvalidConfig=upldInvalidConfig, uplMessengerTrapInfo=uplMessengerTrapInfo, upipInitProcessId=upipInitProcessId, upldDatabaseConnectionDown=upldDatabaseConnectionDown, upldPortNumber=upldPortNumber, uplaErrorCode=uplaErrorCode, uplaesAgentIdentifier=uplaesAgentIdentifier, upsBookmarksServiceNormal=upsBookmarksServiceNormal, uplaMeanTransactionLife=uplaMeanTransactionLife, upsHomePageServiceUp=upsHomePageServiceUp, uplaTransactionsStarted=uplaTransactionsStarted, uplrDatabaseConnectionDown=uplrDatabaseConnectionDown, uplaWapSessionsStarted=uplaWapSessionsStarted, uplMessengerAirlinkStatsTable=uplMessengerAirlinkStatsTable, upldSubscribersLoaded=upldSubscribersLoaded, uplaedAgentIdentifier=uplaedAgentIdentifier, upsProxyServiceDown=upsProxyServiceDown, uplaRequestsDuplicated=uplaRequestsDuplicated, upiNoChildProcess=upiNoChildProcess, uplMessengerDescriptionTable=uplMessengerDescriptionTable, uplaWapErrorCode=uplaWapErrorCode, upLinkStaticInfo=upLinkStaticInfo, uplaEncryptedSessions=uplaEncryptedSessions, upiInitFailToStart=upiInitFailToStart, uplAgentWapWTPTransactionStatsTable=uplAgentWapWTPTransactionStatsTable, uplrClientConnectionDown=uplrClientConnectionDown, uplaSilentErrors=uplaSilentErrors, uplaWapErrorClass=uplaWapErrorClass, uplmStartup=uplmStartup, uplAgentWapSessionStatsEntry=uplAgentWapSessionStatsEntry, upsUPPimServiceNormal=upsUPPimServiceNormal, uplmPublicHTTPServiceDown=uplmPublicHTTPServiceDown, uplAgentErrorStatsSummaryTable=uplAgentErrorStatsSummaryTable, upsProxyServiceNormal=upsProxyServiceNormal, upipChildProcessStatus=upipChildProcessStatus, uplmdStartupTime=uplmdStartupTime, uplAgentErrorStatsDetailEntry=uplAgentErrorStatsDetailEntry, upitTrapInfo=upitTrapInfo, uplAgentStackServiceEntry=uplAgentStackServiceEntry, uplmnsProcessId=uplmnsProcessId, uplMessengerAirlinkEntry=uplMessengerAirlinkEntry, uplAgentStackServiceTable=uplAgentStackServiceTable, upsUPWebServiceNormal=upsUPWebServiceNormal, uplAgentStackServiceIdentifier=uplAgentStackServiceIdentifier, uplaInvalidConfig=uplaInvalidConfig, uplAgentWapWSPSessionStatsTable=uplAgentWapWSPSessionStatsTable, uplaProcessId=uplaProcessId, uplmdPublicHTTPPortNumber=uplmdPublicHTTPPortNumber, uplmnsAvgNtfnsAddedPerSec=uplmnsAvgNtfnsAddedPerSec, uplmdMsgServerPortNumber=uplmdMsgServerPortNumber, uplaActiveWapSessions=uplaActiveWapSessions, uplAgentTrapInfo=uplAgentTrapInfo, uplrStackServiceStats=uplrStackServiceStats, uplmdIpAddress=uplmdIpAddress, upLinkConfig=upLinkConfig, upsPushServiceDown=upsPushServiceDown, uplawsAgentIdentifier=uplawsAgentIdentifier, uplAgentWapErrorStatsSummaryTable=uplAgentWapErrorStatsSummaryTable, uplMessengerAirlinkTable=uplMessengerAirlinkTable, upipChildProcessType=upipChildProcessType, uplaMeanRetriesPerThousandTxn=uplaMeanRetriesPerThousandTxn, upiAllChildProcessesStop=upiAllChildProcessesStop, uplaWapResultTpdus=uplaWapResultTpdus, uplaTrapInfo=uplaTrapInfo, upipsInitIpAddr=upipsInitIpAddr, upsHomePageServiceSlow=upsHomePageServiceSlow, uplaStartup=uplaStartup, upiInitTrapInfo=upiInitTrapInfo, uplaTotalWapErrors=uplaTotalWapErrors, upLinkProcesses=upLinkProcesses, upipsChildProcessesStarted=upipsChildProcessesStarted, uplrClientConnectionUp=uplrClientConnectionUp, uplmncProcessId=uplmncProcessId, upiChildProcessFailToStart=upiChildProcessFailToStart, uplmInternalErrors=uplmInternalErrors, uplrdPortNumber=uplrdPortNumber, upidInitVersion=upidInitVersion, uplaWapAbortTransaction=uplaWapAbortTransaction, uplmnsPublicHTTPReqReceived=uplmnsPublicHTTPReqReceived, uplMessengerNtfnStatsEntry=uplMessengerNtfnStatsEntry, upipChildProcessStopTime=upipChildProcessStopTime, upipChildProcessExePath=upipChildProcessExePath, uplaInternalFatalErrors=uplaInternalFatalErrors, uplmDatabaseConnectionDown=uplmDatabaseConnectionDown, uplaRequestsNotValid=uplaRequestsNotValid, upsBookmarksServiceDown=upsBookmarksServiceDown, uplrdIpAddress=uplrdIpAddress, upsUPAdminServiceUp=upsUPAdminServiceUp, uplmHdtpStats=uplmHdtpStats, uplAgentStackServiceAppProtoName=uplAgentStackServiceAppProtoName, uplmdProcessId=uplmdProcessId, uplawtsAgentIdentifier=uplawtsAgentIdentifier, upitChildProcessHostName=upitChildProcessHostName, uplAgentWebAccessStatsEntry=uplAgentWebAccessStatsEntry, upldTrapInfo=upldTrapInfo, uplaHttpsMeanResponseTime=uplaHttpsMeanResponseTime, uplrProtocolError=uplrProtocolError, uplmPublicHTTPServiceStarted=uplmPublicHTTPServiceStarted, upldProcessId=upldProcessId, upipChildProcessIpAddr=upipChildProcessIpAddr, uplMessengerNtfnCacheTable=uplMessengerNtfnCacheTable, uplaHttpsDeviationOfResponseTime=uplaHttpsDeviationOfResponseTime, upldUplAgentConnectionUp=upldUplAgentConnectionUp, upipChildProcessState=upipChildProcessState, uplmnsPublicHTTPReqProcessed=uplmnsPublicHTTPReqProcessed, uplNbRouter=uplNbRouter, upiChildProcessShutdown=upiChildProcessShutdown, uplaErrorName=uplaErrorName, upsBookmarksServiceSlow=upsBookmarksServiceSlow, upsProxyServiceSlow=upsProxyServiceSlow, uplAgentSessionStatsTable=uplAgentSessionStatsTable, upsUPPimServiceSlow=upsUPPimServiceSlow, upipsInitProcessId=upipsInitProcessId, uplaRepliesTimedOut=uplaRepliesTimedOut, uplaActiveSessions=uplaActiveSessions, upiInit=upiInit, upiInitStats=upiInitStats, uplaWapErrorName=uplaWapErrorName, uplrStartup=uplrStartup, upsUPWebServiceUp=upsUPWebServiceUp, uplaDeviationOfRetriesPTTxn=uplaDeviationOfRetriesPTTxn, uplaSessionWapErrors=uplaSessionWapErrors, upsUPMailServiceNormal=upsUPMailServiceNormal, phoneDotCom=phoneDotCom, uplDispatcherDescription=uplDispatcherDescription, upLink=upLink, uplAgentProxyStats=uplAgentProxyStats, upitChildProcessId=upitChildProcessId, uplaOtherWapErrors=uplaOtherWapErrors, uplrInternalError=uplrInternalError, uplaKeyErrors=uplaKeyErrors, uplmDatabaseConnectionUp=uplmDatabaseConnectionUp, systems=systems, uplaErrorCount=uplaErrorCount, upAdmin=upAdmin, uplaWapErrorSeverity=uplaWapErrorSeverity, uplaRepliesDelivered=uplaRepliesDelivered, uplaRequestsReceived=uplaRequestsReceived, upipsInitProcessType=upipsInitProcessType, uplaOutOfResource=uplaOutOfResource, uplmnsAvgNtfnsExpiredPerSec=uplmnsAvgNtfnsExpiredPerSec, upiInitChildProcessTable=upiInitChildProcessTable, upldUplAgentConnectionDown=upldUplAgentConnectionDown, upiDatabaseConnectionUp=upiDatabaseConnectionUp, uplmncIpAddress=uplmncIpAddress, uplrTrapInfo=uplrTrapInfo, uplaSessionErrors=uplaSessionErrors, upsUPWebServiceSlow=upsUPWebServiceSlow, uplaweAgentIdentifier=uplaweAgentIdentifier, uplAgentWebAccessStatsTable=uplAgentWebAccessStatsTable, uplaHttpMeanResponseTime=uplaHttpMeanResponseTime, uplrdHostName=uplrdHostName, uplaDatabaseConnectionDown=uplaDatabaseConnectionDown, uplaTransactionWapErrors=uplaTransactionWapErrors, uplmTrapInfo=uplmTrapInfo, upldUplAgentId=upldUplAgentId, uplmnsAvgNtfnsDeliveredPerSec=uplmnsAvgNtfnsDeliveredPerSec, uplaDatabaseConnectionUp=uplaDatabaseConnectionUp, upsHomePageServiceDown=upsHomePageServiceDown, uplaOtherErrors=uplaOtherErrors, uplaWapErrorCount=uplaWapErrorCount, uplaasAgentIdentifier=uplaasAgentIdentifier, upldState=upldState, upipInitProcessType=upipInitProcessType, uplawssAgentIdentifier=uplawssAgentIdentifier, uplAgentStackServiceLoaded=uplAgentStackServiceLoaded)
mibBuilder.exportSymbols("UPPHONEDOTCOM-MIB", uplmAgentConnectionDown=uplmAgentConnectionDown, upsUPMailServiceUp=upsUPMailServiceUp, uplHdtpStats=uplHdtpStats, upldOutOfResouce=upldOutOfResouce, uplaHostName=uplaHostName, uplmnsPublicHTTPSReqReceived=uplmnsPublicHTTPSReqReceived, uplAgentTransactionStatsTable=uplAgentTransactionStatsTable, upipChildProcessStartTime=upipChildProcessStartTime, uplMessengerNtfnStatsTable=uplMessengerNtfnStatsTable, upsUPMailServiceDown=upsUPMailServiceDown, uplAgentAirLinkStatsTable=uplAgentAirLinkStatsTable, uplatsAgentIdentifier=uplatsAgentIdentifier, uplAgentAirLinkStatsEntry=uplAgentAirLinkStatsEntry, upipInitIpAddr=upipInitIpAddr, upldUplAgentsLoaded=upldUplAgentsLoaded, uplmnsPrivateHTTPReqReceived=uplmnsPrivateHTTPReqReceived, uplaTransactionsActive=uplaTransactionsActive, uplaDeviceErrors=uplaDeviceErrors, upidInitStartupTime=upidInitStartupTime, upAdminTrapInfo=upAdminTrapInfo, upiInitChildProcessStatsEntry=upiInitChildProcessStatsEntry, upsUPPimServiceUp=upsUPPimServiceUp, uplaHttpsRequestsStarted=uplaHttpsRequestsStarted, uplaHttpDeviationOfResponseTime=uplaHttpDeviationOfResponseTime, uplrShutdown=uplrShutdown, uplaWapStats=uplaWapStats, uplmnsIpAddress=uplmnsIpAddress, upiChildProcessesBelowMinimum=upiChildProcessesBelowMinimum, uplrNbRouterConnectionDown=uplrNbRouterConnectionDown, uplmShutdown=uplmShutdown, uplNbRouterTrapInfo=uplNbRouterTrapInfo, uplaSessionsStarted=uplaSessionsStarted, upiInitShutdown=upiInitShutdown, upipsChildProcessesRunning=upipsChildProcessesRunning, uplmnsAvgNtfnsMarkedUnDelvrPerSec=uplmnsAvgNtfnsMarkedUnDelvrPerSec, uplAgentWapErrorStatsDetailEntry=uplAgentWapErrorStatsDetailEntry, uplAgentWapTransactionStatsEntry=uplAgentWapTransactionStatsEntry, uplNbRouterAirlinkStatsTable=uplNbRouterAirlinkStatsTable, uplNbRouterDescriptionTable=uplNbRouterDescriptionTable, upsUPWebServiceDown=upsUPWebServiceDown, uplAgentLimitedResourceTable=uplAgentLimitedResourceTable, uplNbRouterAirlinkTable=uplNbRouterAirlinkTable, upldShutdown=upldShutdown, uplAgentStackServiceName=uplAgentStackServiceName, upiDatabaseConnectionDown=upiDatabaseConnectionDown, upidInitHostName=upidInitHostName, upldStartUpTime=upldStartUpTime, uplDispatcherStats=uplDispatcherStats, upldRequestsReceived=upldRequestsReceived, upldKeyExchanges=upldKeyExchanges, uplMessenger=uplMessenger, upsUPMailServiceSlow=upsUPMailServiceSlow, uplmStackServiceStats=uplmStackServiceStats, upipsChildProcessType=upipsChildProcessType, uplrClientIpAddress=uplrClientIpAddress, upiInitDescriptionTable=upiInitDescriptionTable, uplDispatcherTrapInfo=uplDispatcherTrapInfo, uplMessengerDescriptionEntry=uplMessengerDescriptionEntry, uplAgentDescriptionTable=uplAgentDescriptionTable)
|
# encoding: utf-8
from __future__ import division, print_function, unicode_literals
###########################################################################################################
#
#
# Palette Plugin
#
# Read the docs:
# https://github.com/schriftgestalt/GlyphsSDK/tree/master/Python%20Templates/Palette
#
#
###########################################################################################################
import objc
from GlyphsApp.plugins import *
from AppKit import NSStackView, NSButton, NSBundle, NSOnState, NSOffState, NSMiniControlSize, NSSmallControlSize, NSSwitchButton
import traceback
NSStackViewGravityLeading = 1
NSLayoutConstraintOrientationVertical = 1
NSLayoutConstraintOrientationHorizontal = 0
# print("Reporter Toggler 2018-01-29")
ControlSize = NSMiniControlSize # NSSmallControlSize
class ReporterToggler (PalettePlugin):
stackView = objc.IBOutlet()
view = objc.IBOutlet()
@objc.python_method
def settings(self):
try:
#NSBundle.loadNibNamed_owner_('View', self)
self.name = 'Reporters'
width = 160
self.reporterArray = list(Glyphs.reporters)
self.reporterArray = sorted(self.reporterArray, key=lambda reporter: reporter.title())
self.checkboxes = []
for i, reporter in enumerate(self.reporterArray): # Glyphs.activeReporters
# print(reporter.classCode())
frame = NSMakeRect(0, 0, 18, 18)
checkBox = NSButton.alloc().initWithFrame_(frame)
checkBox.setTitle_(reporter.title())
checkBox.setButtonType_(NSSwitchButton)
checkBox.setTarget_(self)
checkBox.setAction_(self.toggle_)
if reporter in Glyphs.activeReporters:
isActive = NSOnState
else:
isActive = NSOffState
checkBox.setState_(isActive)
checkBox.setControlSize_(ControlSize)
font = NSFont.systemFontOfSize_(NSFont.systemFontSizeForControlSize_(ControlSize))
checkBox.setFont_(font)
self.checkboxes.append(checkBox)
self.dialog = NSStackView.stackViewWithViews_(self.checkboxes)
self.dialog.setOrientation_(1)
self.dialog.setAlignment_(1)
self.dialog.setSpacing_(5)
self.dialog.setEdgeInsets_((2, 8, 8, 1))
self.dialog.setClippingResistancePriority_forOrientation_(250, NSLayoutConstraintOrientationHorizontal)
self.dialog.setViews_inGravity_(self.checkboxes, NSStackViewGravityLeading)
self.dialog.setNeedsLayout_(True)
#self.dialog = self.view
except:
print(traceback.format_exc())
@objc.python_method
def start(self):
# Adding a callback for when the visiblity of a reporter changes
NSUserDefaults.standardUserDefaults().addObserver_forKeyPath_options_context_(self, "visibleReporters", 0, None)
def observeValueForKeyPath_ofObject_change_context_(self, keyPath, aObject, change, context):
self.update(self)
def toggle_(self, sender=None):
try:
thisReporter = sender.title()
for i, reporter in enumerate(self.reporterArray):
if reporter.title() == thisReporter:
if sender.state() == NSOffState:
Glyphs.deactivateReporter(reporter)
else:
Glyphs.activateReporter(reporter)
except:
print(traceback.format_exc())
@objc.python_method
def update(self, sender=None):
try:
for i, reporter in enumerate(self.reporterArray): # Glyphs.activeReporters
if reporter in Glyphs.activeReporters:
isActive = NSOnState
else:
isActive = NSOffState
self.checkboxes[i].setState_(isActive)
except:
print(traceback.format_exc())
@objc.python_method
def __del__(self):
# Delete callbacks when the window is closed, otherwise it'll crash :(
NSUserDefaults.standardUserDefaults().removeObserver_forKeyPath_(self, "visibleReporters")
def setSortID_(self, id):
pass
def sortID(self):
return 0
|
from twitchio.ext import commands
import socket
import time
from dotenv import dotenv_values
class Bot(commands.Bot):
config = dotenv_values(".env")
def __init__(self):
# Initialise our Bot with our access token, prefix and a list of channels to join on boot...
# Make sure token is non-empty
if 'OAUTH_TOKEN' not in self.config.keys():
raise ValueError('OAUTH_TOKEN is not set')
if 'CHANNEL_NAME' not in self.config.keys():
raise ValueError('CHANNEL_NAME is not set')
token = self.config.get("OAUTH_TOKEN")
channel = self.config.get("CHANNEL_NAME")
super().__init__(token, prefix="!", initial_channels=[channel])
async def sendCommand(self, content):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", 6000))
print('Twitch pressed a button!', content)
content += '\r\n' # important for the parser on the switch side
s.sendall(content.encode())
async def event_ready(self):
# Notify us when everything is ready!
# We are logged in and ready to chat and use commands...
print(f'Logged in as | {self.nick}')
print('Connected to Nintendo Switch')
# uppercase right joycon Buttons
@commands.command()
async def A(self, ctx: commands.Context):
# Clicks the A Button
# twitch command = !A
await self.sendCommand("click A")
@commands.command()
async def B(self, ctx: commands.Context):
# Clicks the B Button
# twitch command = !B
await self.sendCommand("click B")
@commands.command()
async def X(self, ctx: commands.Context):
# Clicks the X Button
# twitch command = !X
await self.sendCommand("click X")
@commands.command()
async def Y(self, ctx: commands.Context):
# Clicks the Y Button
# twitch command = !Y
await self.sendCommand("click Y")
@commands.command()
async def ZR(self, ctx: commands.Context):
# Clicks the ZR Button
# twitch command = !ZR
await self.sendCommand("click ZR")
@commands.command()
async def ZL(self, ctx: commands.Context):
# Clicks the ZL Button
# twitch command = !ZL
await self.sendCommand("click ZL")
@commands.command()
async def L(self, ctx: commands.Context):
# Clicks the L Button
# twitch command = !L
await self.sendCommand("click L")
@commands.command()
async def R(self, ctx: commands.Context):
# Clicks the R Button
# twitch command = !R
await self.sendCommand("click R")
@commands.command()
async def DLEFT(self, ctx: commands.Context):
# Clicks the DPAD LEDT
# twitch command = !DLEFT
await self.sendCommand("click DLEFT")
@commands.command()
async def DRIGHT(self, ctx: commands.Context):
# Clicks the DPAD RIGHT
# twitch command = !DLEFT
await self.sendCommand("click DRIGHT")
@commands.command()
async def DDOWN(self, ctx: commands.Context):
# Clicks DPAD DOWN
# twitch command = !DLEFT
await self.sendCommand("click DDOWN")
@commands.command()
async def DUP(self, ctx: commands.Context):
# Clicks DPAD UP
# twitch command = !DLEFT
await self.sendCommand("click DUP")
@commands.command()
async def CAPTURE(self, ctx: commands.Context):
# Captures a screenshot
# twitch command = !capture
await self.sendCommand("click CAPTURE")
@commands.command()
async def PLUS(self, ctx: commands.Context):
# Clicks Plus
# twitch command = !PLUS
await self.sendCommand("click PLUS")
@commands.command()
async def MINUS(self, ctx: commands.Context):
# Clicks MINUS
# twitch command = !MINUS
await self.sendCommand("click MINUS")
# Left Joystick
@commands.command()
async def up(self, ctx: commands.Context):
# Presses UP on the left joystick
# twitch command = !DLEFT
await self.sendCommand("setStick LEFT 0x0000 0x7FFF")
time.sleep(1)
await self.sendCommand("setStick LEFT 0x0000 0x0000")
@commands.command()
async def left(self, ctx: commands.Context):
# Presses LEFT on the left joystick
# twitch command = !left
await self.sendCommand("setStick LEFT -0x8000 0x0000")
time.sleep(1)
await self.sendCommand("setStick LEFT 0x0000 0x0000")
@commands.command()
async def down(self, ctx: commands.Context):
# Presses DOWN on the left joystick
# twitch command = !down
await self.sendCommand("setStick LEFT 0x0000 -0x8000")
time.sleep(1)
await self.sendCommand("setStick LEFT 0x0000 0x0000")
@commands.command()
async def right(self, ctx: commands.Context):
# Presses RIGHT on the left joystick
# twitch command = !right
await self.sendCommand("setStick LEFT 0x7FFF 0x0000")
time.sleep(1)
await self.sendCommand("setStick LEFT 0x0000 0x0000")
# lowercase right joycon buttons
@commands.command()
async def a(self, ctx: commands.Context):
# Clicks the A Button
# twitch command = !a
await self.sendCommand("click A")
@commands.command()
async def b(self, ctx: commands.Context):
# Clicks the B Button
# twitch command = !b
await self.sendCommand("click B")
@commands.command()
async def x(self, ctx: commands.Context):
# Clicks the X Button
# twitch command = !x
await self.sendCommand("click X")
@commands.command()
async def y(self, ctx: commands.Context):
# Clicks the Y Button
# twitch command = !y
await self.sendCommand("click Y")
@commands.command()
async def zr(self, ctx: commands.Context):
# Clicks the ZR Button
# twitch command = !zr
await self.sendCommand("click ZR")
@commands.command()
async def capture(self, ctx: commands.Context):
# Captures a screenshot
# twitch command = !capture
await self.sendCommand("click CAPTURE")
@commands.command()
async def plus(self, ctx: commands.Context):
# Clicks Plus
# twitch command = !plus
await self.sendCommand("click PLUS")
@commands.command()
async def minus(self, ctx: commands.Context):
# Clicks MINUS
# twitch command = !minus
await self.sendCommand("click MINUS")
@commands.command()
async def zl(self, ctx: commands.Context):
# Clicks the ZL Button
# twitch command = !zl
await self.sendCommand("click ZL")
@commands.command()
async def dleft(self, ctx: commands.Context):
# Clicks the DPAD LEDT
# twitch command = !dleft
await self.sendCommand("click DLEFT")
@commands.command()
async def dright(self, ctx: commands.Context):
# Clicks the DPAD RIGHT
# twitch command = !dright
await self.sendCommand("click DRIGHT")
@commands.command()
async def ddown(self, ctx: commands.Context):
# Clicks DPAD DOWN
# twitch command = !ddown
await self.sendCommand("click DDOWN")
@commands.command()
async def dup(self, ctx: commands.Context):
# Clicks DPAD UP
# twitch command = !dup
await self.sendCommand("click DUP")
async def run_command(self, command: str, ctx: commands.Context):
# Programmatically execute Twitch commands
#
# command: twitch command string
# ctx: command context
return await self.commands[command](ctx)
async def command_parser(self, message):
# message: the message object passed from the event_message event
#
# We want to allow command chaining. So, we want to parse valid
# commands separated by spaces.
message_content = message.content
message_parts = message_content.split("!")
if message_content[0] != "!": # If the message doesn't start with !, ignore it
return
message_context = await self.get_context(message)
if len(message_parts) > 2: # If there are more than 2 parts, we have a command chain
for part in message_parts:
if len(part) > 0:
if part in self.commands:
await self.run_command(part, message_context)
time.sleep(1)
else: # If there are only 2 parts, we have a single command ({before}!{after} where {after}
# is the command. {before} is always empty because we enforce a first character of !)
if message_parts[1] in self.commands:
await self.run_command(message_parts[1], message_context)
async def event_command_error(self, context: commands.Context, error):
# Handle command errors
# Set DEBUG in .env to True to see errors
#
# context: the error context
# error: the error object
if "DEBUG" in self.config and self.config["DEBUG"] == "True":
print(error)
return
async def event_message(self, message):
# Handles messages sent in the chat
# message: the message object passed from the event_message event
# If the bot is the sender, ignore it
if message.echo:
return
await self.command_parser(message)
bot = Bot()
bot.run()
# bot.run() is blocking and will stop execution of any below code here until stopped or closed.
|
"""
========================
Custom Figure subclasses
========================
You can pass a `.Figure` subclass to `.pyplot.figure` if you want to change
the default behavior of the figure.
This example defines a `.Figure` subclass ``WatermarkFigure`` that accepts an
additional parameter ``watermark`` to display a custom watermark text. The
figure is created using the ``FigureClass`` parameter of `.pyplot.figure`.
The additional ``watermark`` parameter is passed on to the subclass
constructor.
"""
import matplotlib.pyplot as plt
from matplotlib.figure import Figure
import numpy as np
class WatermarkFigure(Figure):
"""A figure with a text watermark."""
def __init__(self, *args, watermark=None, **kwargs):
super().__init__(*args, **kwargs)
if watermark is not None:
bbox = dict(boxstyle='square', lw=3, ec='gray',
fc=(0.9, 0.9, .9, .5), alpha=0.5)
self.text(0.5, 0.5, watermark,
ha='center', va='center', rotation=30,
fontsize=40, color='gray', alpha=0.5, bbox=bbox)
x = np.linspace(-3, 3, 201)
y = np.tanh(x) + 0.1 * np.cos(5 * x)
plt.figure(FigureClass=WatermarkFigure, watermark='draft')
plt.plot(x, y)
#############################################################################
#
# ------------
#
# References
# """"""""""
#
# The use of the following functions, methods, classes and modules is shown
# in this example:
import matplotlib
matplotlib.pyplot.figure
matplotlib.figure.Figure
matplotlib.figure.Figure.text
|
#!/usr/bin/env python
# encoding: utf-8
"""
@version: v1.0
@author: william wei
@license: Apache Licence
@contact: weixiaole@baidu.com
@file: config.py
@time: 15/01/2018 11:27 AM
"""
import toml
import os
from core import container
APP_PATH = os.path.split(os.path.split(os.path.realpath(__file__))[0])[0]
__config = {}
for p, d, file_lists in os.walk(os.path.join(APP_PATH, 'config')):
for f in file_lists:
__config[f.split('.toml')[0]] = toml.load(os.path.join(p, f))
def get(key_str, def_val=None):
cur = __config
try:
for key in key_str.split('.'):
cur = cur[key]
return cur
except Exception as e:
container.resolve('logger').notice("config key [%s] not found" % key_str)
return def_val
|
import unittest
import json
from sqlalchemy import Table, Column, Integer, String, MetaData
from sqlalchemy import create_engine
# from dbsync import DBSync
from dbsync.stores.rdbms import DatabaseStore
from dbsync.stores.local import LocalStore
from dbsync.syncers.pool import ThreadPoolSyncer
from datetime import date, datetime
class DBSyncTestCase(unittest.TestCase):
def test_dbsync(self):
#DBSync().serializer().syncer().validator().start()
engine = create_engine('oracle://vbaread:vbaread@10.1.253.15:1521/orcl', echo=True)
rdbms = DatabaseStore(engine)
local_file = LocalStore('data')
ThreadPoolSyncer(rdbms, local_file).sync()
def test_dump(self):
engine = create_engine('oracle://vbaread:vbaread@10.1.253.15:1521/orcl', echo=True)
res = engine.connect().execute("select * from vba.student_homework_month")
for row in res:
yield row
# print json.dumps(collections.OrderedDict((key.lower(), row[key]) for key in row.keys()), cls=DatetimeJSONEncoder)
class DatetimeJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return obj.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(obj, date):
return obj.strftime('%Y-%m-%d')
else:
return json.JSONEncoder.default(self, obj)
if __name__ == '__main__':
unittest.main()
|
"""Setup file for tensorbayes
For easy installation and uninstallation, do the following.
MANUAL INSTALL:
python setup.py install --record files.txt
UNINSTALL:
cat files.txt | xargs rm -r
"""
from setuptools import setup, find_packages
import os
setup(
name="tensorbayes",
version="0.4.0",
author="Rui Shu",
author_email="ruishu@stanford.edu",
url="http://www.github.com/RuiShu/tensorbayes",
download_url="https://github.com/RuiShu/tensorbayes/archive/0.4.0.tar.gz",
license="MIT",
description="Deep Variational Inference in TensorFlow",
install_requires = ['numpy'],
extras_require={
'notebook': ['jupyter']
},
packages=find_packages()
)
|
# -*- coding: utf-8 -*-
import hmac
import hashlib
from rest_framework import permissions
from rest_framework import exceptions
from framework import sentry
from website import settings
class RequestComesFromMailgun(permissions.BasePermission):
"""Verify that request comes from Mailgun.
Adapted here from conferences/message.py
Signature comparisons as recomended from mailgun docs:
https://documentation.mailgun.com/en/latest/user_manual.html#webhooks
"""
def has_permission(self, request, view):
if request.method != 'POST':
raise exceptions.MethodNotAllowed(method=request.method)
data = request.data
if not data:
raise exceptions.ParseError('Request body is empty')
if not settings.MAILGUN_API_KEY:
return False
signature = hmac.new(
key=settings.MAILGUN_API_KEY,
msg='{}{}'.format(
data['timestamp'],
data['token'],
),
digestmod=hashlib.sha256,
).hexdigest()
if 'signature' not in data:
error_message = 'Signature required in request body'
sentry.log_message(error_message)
raise exceptions.ParseError(error_message)
if not hmac.compare_digest(unicode(signature), unicode(data['signature'])):
raise exceptions.ParseError('Invalid signature')
return True
|
from django.shortcuts import render
from django.http import HttpResponse
from .models import Item
# Create your views here.
def index(request):
return HttpResponse(', '.join([item.name for item in Item.objects.order_by('name')]))
|
from .rsmaker import RunstatMaker
|
global type_dict
type_dict=dict()
global tmp_var
tmp_var=[]
global err_flag
err_flag=0
global bottom_flag
bottom_flag=0
def is_var(var_name):
if len(var_name)==0:
return False
if var_name[0]!='`':
return False
x=var_name[1:]
tmp_len=len(x)
if tmp_len==0:
return False
else:
if tmp_len==1:
return x.isalpha()
else:
if x[0].isalpha()==False:
return False
else:
for i in x[1:]:
if i.isdigit()==False and i.isalpha()==False:
return False
return True
def is_pri(x):
return (x in ["str","int","real"])
def is_arglist(x):
if x=="()":
return True
tmp_len=len(x)
if tmp_len<2:
return False
if x[0]!='(' or x[tmp_len-1]!=')':
return False
tmp_x=x[1:tmp_len-1]
tmp_x_list=tmp_x.split(',')
for item in tmp_x_list:
if check_type(item)==4:
return False
return True
def split_arg(input_list):
result=[]
x=input_list[1:-1]
if x=="":
return result
find_start_index=0
while 1:
tmp_index=x.find(',',find_start_index)
if tmp_index!=-1:
tmp_data=x[:tmp_index]
if check_type(tmp_data)!=4:
result.append(tmp_data)
x=x[tmp_index+1:]
find_start_index=0
else:
find_start_index=tmp_index+1
if find_start_index>len(x)-1:
break
else:
continue
else:
result.append(x)
break
return result
def is_func(func_name):
tmp_len=len(func_name)
if tmp_len<=2:
return False
if "->" not in func_name:
return False
flag_index=func_name.rindex("->")
l_p=func_name[:flag_index]
if flag_index+2>tmp_len-1:
return False
r_p=func_name[flag_index+2:]
if is_arglist(l_p)==False:
return False
if check_type(r_p)==4:
return False
return True
def is_list(list_name):
tmp_len=len(list_name)
if tmp_len<=2:
return False
else:
x=list_name[1:-1]
if is_pri(x) or is_var(x):
return True
return False
#0 var,1 pri,2 func,3 list,4 err
def check_type(x):
if is_pri(x):
return 1
if is_func(x):
return 2
if is_var(x):
return 0
if is_list(x):
return 3
return 4
def has_one_flag(x):
cnt=0
for i in x:
if i == '^':
cnt+=1
return True if cnt==1 else False
def check_var(s):
if s[0].isalpha()==False:
return False
else:
for i in range(1,len(s),1):
if s[i].isalpha() == False and s[i].isdigit()==False:
return False
return True
def check_one_space(s):
s=s.replace('(',' ')
s=s.replace(')',' ')
s=s.lstrip().rstrip()
for x in s:
if x==' ':
return False
s_len=len(s)
if s_len<2:
return False
else:
if s[0]=='`':
return check_var(s[1:])
else:
return (s in ["int","real","str"])
def check_space(s):
s=s.replace('^',',')
s=s.replace('->',',')
s=s.replace('[',' ')
s=s.replace(']',' ')
split_result=s.split(',')
tmp_data=[]
for x in split_result:
if x.replace(" ", "")!="()":
tmp_data.append(x)
for x in tmp_data:
if check_one_space(x)==False:
return False
return True
def check_legal(x):
if has_one_flag(x)==False or check_space(x)==False:
return False
x = x.replace(" ","")
left_p,right_p=x.split('^')[0],x.split('^')[1]
if check_type(left_p)==4 or check_type(right_p)==4:
return False
return True
class my_parse_tree:
def __init__(self,x):
self.child_list=[]
self.return_type=None
self.value=None
self.my_type=None
self.forward_link=None
if x=="":
self.my_type=1
self.value=''
if check_type(x)==1:
self.my_type=1
self.value=x
elif check_type(x)==0:
self.my_type=0
self.value=x
elif check_type(x)==3:
self.my_type=3
self.value=my_parse_tree(x[1:-1])
else:
self.my_type=2
flag_index=x.rindex("->")
self.l_p=x[:flag_index]
self.r_p=x[flag_index+2:]
tmp_data=split_arg(self.l_p)
for item in tmp_data:
self.child_list.append(my_parse_tree(item))
self.return_type=my_parse_tree(self.r_p)
def tree_print(self):
if self.my_type==1 or self.my_type==0:
print(self.value,end="")
if self.my_type==3:
print('[',end="")
self.value.tree_print()
print(']',end="")
if self.my_type==2:
print('(',end='')
tmp_len=len(self.child_list)
if tmp_len>0:
for i in range(tmp_len-1):
self.child_list[i].tree_print()
print(',',end='')
self.child_list[tmp_len-1].tree_print()
print(')->',end='')
self.return_type.tree_print()
def tree_print_unify(self):
if self.my_type==1:
print(self.value,end="")
if self.my_type==0:
if self.value in type_dict:
print(type_dict[self.value],end="")
else:
print(self.value,end="")
if self.my_type==3:
print('[',end="")
self.value.tree_print()
print(']',end="")
if self.my_type==2:
print('(',end='')
tmp_len=len(self.child_list)
if tmp_len>0:
for i in range(tmp_len-1):
self.child_list[i].tree_print_unify()
print(',',end='')
self.child_list[tmp_len-1].tree_print_unify()
print(')->',end='')
self.return_type.tree_print_unify()
def check_recurisve():
global bottom_flag
for x in type_dict:
if type_dict[x]==x:
bottom_flag=1
def my_parse(x):
x = x.replace(" ","")
left_p,right_p=x.split('^')[0],x.split('^')[1]
left_tree=my_parse_tree(left_p)
right_tree=my_parse_tree(right_p)
unify_tree(left_tree, right_tree)
dict_revised()
check_recurisve()
if bottom_flag==1:
print('BOTTOM',end="")
else:
left_tree.tree_print_unify()
def check_bottom(a,b):
global bottom_flag
if a.my_type==2 and b.my_type==2:
if len(a.child_list)!=len(b.child_list):
bottom_flag=1
if a.my_type==1:
if b.my_type==1:
if b.value!=a.value:
bottom_flag=1
else:
if b.my_type==2 or b.my_type==3:
bottom_flag=1
if a.my_type==0:
if a.value in type_dict:
tmp_tree=my_parse_tree(type_dict[a.value])
check_bottom(tmp_tree, b)
check_bottom(b, tmp_tree)
def unify_tree(a,b):
check_bottom(a, b)
check_bottom(b, a)
if bottom_flag!=1:
if a.my_type==0 :
if b.my_type!=3:
if a.value!=b.value:
type_dict[a.value]=b.value
else:
type_dict[a.value]='['+b.value.value+']'
elif b.my_type==0:
if a.my_type!=3:
if a.value!=b.value:
type_dict[b.value]=a.value
else:
type_dict[b.value]='['+a.value.value+']'
else:
tmp_len=len(a.child_list)
for i in range(tmp_len):
unify_tree(a.child_list[i], b.child_list[i])
if a.return_type!=None:
unify_tree(a.return_type, b.return_type)
def revise_item(x,y):
if is_pri(y):
pass
elif is_var(y):
if y in tmp_var:
err_flag=1
return False
else:
tmp_var.append(x)
if y in type_dict:
revise_item(y, type_dict[y])
type_dict[x]=type_dict[y]
tmp_var.pop()
elif is_list(y):
y_tmp=y[1:-1]
if y_tmp in type_dict:
revise_item(y_tmp, type_dict[y_tmp])
type_dict[x]='['+type_dict[y_tmp]+']'
else:
pass
else:
pass
return True
def dict_revised():
for x in type_dict:
y=type_dict[x]
revise_item(x,y)
if __name__ == "__main__":
while(1):
try:
x=input()
if x == "QUIT":
break
if check_legal(x):
my_parse(x)
print()
if bottom_flag==1:
break
else:
print("ERR")
break
except EOFError:
print("ERR")
break
|
# Copyright 2022 Zilliz. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code inspired by https://pytorch.org/vision/stable/_modules/torchvision/models/mobilenetv2.html
# https://pytorch.org/vision/stable/_modules/torchvision/models/mobilenetv3.html
#
import os
try:
from fvcore.common.config import CfgNode as CN
except ModuleNotFoundError:
os.system('pip install fvcore')
from fvcore.common.config import CfgNode as CN
_C = CN()
_C.MODEL = CN()
###################
####UniFormerS8 ###
###################
_C.MODEL.UniFormerS8 = CN()
_C.MODEL.UniFormerS8.depth = (3, 4, 8, 3)
_C.MODEL.UniFormerS8.num_classes = 400
_C.MODEL.UniFormerS8.img_size = 256
_C.MODEL.UniFormerS8.in_chans = 3
_C.MODEL.UniFormerS8.embed_dim = (64, 128, 320, 512)
_C.MODEL.UniFormerS8.head_dim = 64
_C.MODEL.UniFormerS8.mlp_ratio = 4
_C.MODEL.UniFormerS8.qkv_bias = True
_C.MODEL.UniFormerS8.qk_scale = None
_C.MODEL.UniFormerS8.representation_size = None
_C.MODEL.UniFormerS8.drop_rate = 0
_C.MODEL.UniFormerS8.attn_drop_rate = 0
_C.MODEL.UniFormerS8.drop_path_rate = 0.1
_C.MODEL.UniFormerS8.split = False
_C.MODEL.UniFormerS8.std = False
_C.MODEL.UniFormerS8.use_checkpoint = True
_C.MODEL.UniFormerS8.checkpoint_num = (0, 0, 0, 0)
_C.MODEL.UniFormerS8.pretrain_name = 'uniformer_small_k400_8x8'
###################
####UniFormerS16 ###
###################
_C.MODEL.UniFormerS16 = CN()
_C.MODEL.UniFormerS16.depth = (3, 4, 8, 3)
_C.MODEL.UniFormerS16.num_classes = 400
_C.MODEL.UniFormerS16.img_size = 256
_C.MODEL.UniFormerS16.in_chans = 3
_C.MODEL.UniFormerS16.embed_dim = (64, 128, 320, 512)
_C.MODEL.UniFormerS16.head_dim = 64
_C.MODEL.UniFormerS16.mlp_ratio = 4
_C.MODEL.UniFormerS16.qkv_bias = True
_C.MODEL.UniFormerS16.qk_scale = None
_C.MODEL.UniFormerS16.representation_size = None
_C.MODEL.UniFormerS16.drop_rate = 0
_C.MODEL.UniFormerS16.attn_drop_rate = 0
_C.MODEL.UniFormerS16.drop_path_rate = 0.1
_C.MODEL.UniFormerS16.split = False
_C.MODEL.UniFormerS16.std = False
_C.MODEL.UniFormerS16.use_checkpoint = True
_C.MODEL.UniFormerS16.checkpoint_num = (0, 0, 0, 0)
_C.MODEL.UniFormerS16.pretrain_name = 'uniformer_small_k400_16x4'
###################
####UniFormerB8 ###
###################
_C.MODEL.UniFormerB8 = CN()
_C.MODEL.UniFormerB8.depth = (5, 8, 20, 7)
_C.MODEL.UniFormerB8.num_classes = 400
_C.MODEL.UniFormerB8.img_size = 256
_C.MODEL.UniFormerB8.in_chans = 3
_C.MODEL.UniFormerB8.embed_dim = (64, 128, 320, 512)
_C.MODEL.UniFormerB8.head_dim = 64
_C.MODEL.UniFormerB8.mlp_ratio = 4
_C.MODEL.UniFormerB8.qkv_bias = True
_C.MODEL.UniFormerB8.qk_scale = None
_C.MODEL.UniFormerB8.representation_size = None
_C.MODEL.UniFormerB8.drop_rate = 0
_C.MODEL.UniFormerB8.attn_drop_rate = 0
_C.MODEL.UniFormerB8.drop_path_rate = 0.1
_C.MODEL.UniFormerB8.split = False
_C.MODEL.UniFormerB8.std = False
_C.MODEL.UniFormerB8.use_checkpoint = True
_C.MODEL.UniFormerB8.checkpoint_num = (0, 0, 0, 0)
_C.MODEL.UniFormerB8.pretrain_name = 'uniformer_base_k400_8x8'
###################
####UniFormerB16###
###################
_C.MODEL.UniFormerB16 = CN()
_C.MODEL.UniFormerB16.depth = (5, 8, 20, 7)
_C.MODEL.UniFormerB16.num_classes = 400
_C.MODEL.UniFormerB16.img_size = 256
_C.MODEL.UniFormerB16.in_chans = 3
_C.MODEL.UniFormerB16.embed_dim = (64, 128, 320, 512)
_C.MODEL.UniFormerB16.head_dim = 64
_C.MODEL.UniFormerB16.mlp_ratio = 4
_C.MODEL.UniFormerB16.qkv_bias = True
_C.MODEL.UniFormerB16.qk_scale = None
_C.MODEL.UniFormerB16.representation_size = None
_C.MODEL.UniFormerB16.drop_rate = 0
_C.MODEL.UniFormerB16.attn_drop_rate = 0
_C.MODEL.UniFormerB16.drop_path_rate = 0.1
_C.MODEL.UniFormerB16.split = False
_C.MODEL.UniFormerB16.std = False
_C.MODEL.UniFormerB16.use_checkpoint = True
_C.MODEL.UniFormerB16.checkpoint_num = (0, 0, 0, 0)
_C.MODEL.UniFormerB16.pretrain_name = 'uniformer_base_k400_16x4'
|
// driver function
// para editar
|
import unittest
from unittest.mock import Mock, patch
from pywoo import Api
from pywoo.models.orders_notes import OrderNote
from .tools import mock_request
class TestOrderNote(unittest.TestCase):
@patch('pywoo.pywoo.requests.api.request', side_effect=mock_request)
def test_api_post(self, func):
api = Api('', 'fake_consumer_key', 'fake_consumer_secret')
obj = api.create_order_note(order_id=97)
assert type(obj) == OrderNote
@patch('pywoo.pywoo.requests.api.request', side_effect=mock_request)
def test_api_get(self, func):
api = Api('', 'fake_consumer_key', 'fake_consumer_secret')
obj = api.get_order_notes(order_id='97')
assert all(type(x) == OrderNote for x in obj)
obj = api.get_order_notes(order_id='97', id='108')
assert type(obj) == OrderNote and obj.id == 108
@patch('pywoo.pywoo.requests.api.request', side_effect=mock_request)
def test_api_delete(self, func):
api = Api('', 'fake_consumer_key', 'fake_consumer_secret')
obj = api.delete_order_note(order_id='97', id='108')
assert type(obj) == OrderNote and obj.id == 108
@patch('pywoo.pywoo.requests.api.request', side_effect=mock_request)
def test_classmethod_post(self, func):
api = Api('', 'fake_consumer_key', 'fake_consumer_secret')
obj = OrderNote.create_order_note(api, order_id='97')
assert type(obj) == OrderNote
@patch('pywoo.pywoo.requests.api.request', side_effect=mock_request)
def test_classmethod_get(self, func):
api = Api('', 'fake_consumer_key', 'fake_consumer_secret')
obj = OrderNote.get_order_notes(api, order_id='97')
assert all(type(x) == OrderNote for x in obj)
obj = OrderNote.get_order_notes(api, order_id='97', id='108')
assert type(obj) == OrderNote and obj.id == 108
@patch('pywoo.pywoo.requests.api.request', side_effect=mock_request)
def test_classmethod_delete(self, func):
api = Api('', 'fake_consumer_key', 'fake_consumer_secret')
obj = OrderNote.delete_order_note(api, order_id='97', id='108')
assert type(obj) == OrderNote and obj.id == 108
@patch('pywoo.pywoo.requests.api.request', side_effect=mock_request)
def test_object_delete(self, func):
api = Api('', 'fake_consumer_key', 'fake_consumer_secret')
obj = api.get_order_notes(order_id='97', id='108')
assert type(obj) == OrderNote and obj.id == 108
obj = obj.delete()
assert type(obj) == OrderNote and obj.id == 108
@patch('pywoo.pywoo.requests.api.request', side_effect=mock_request)
def test_object_refresh(self, func):
api = Api('', 'fake_consumer_key', 'fake_consumer_secret')
obj = api.get_order_notes(order_id='97', id='108')
assert type(obj) == OrderNote and obj.id == 108
obj.refresh()
assert type(obj) == OrderNote and obj.id == 108
|
#################################################################
# collisionWindow.py
# Written by Yi-Hong Lin, yihhongl@andrew.cmu.edu, 2004
#################################################################
# Import Tkinter, Pmw, and the floater code from this directory tree.
from direct.tkwidgets.AppShell import *
from direct.showbase.TkGlobal import *
from tkSimpleDialog import askfloat
import string
import math
import types
from direct.task import Task
FRAMES = 0
SECONDS = 1
#####################################################################################
# BlendAnimPanel(AppShell)
# This Panel will allow user to blend tow animations
# that have already been loaded for this actor.
# user can play and manipulate this blended animation
# just like in the animation panel. And, they can save this blended animation.
#####################################################################################
class BlendAnimPanel(AppShell):
# Override class variables
appname = 'Blend Anim Panel'
frameWidth = 575
frameHeight = 450
usecommandarea = 0
usestatusarea = 0
index = 0
dragMode = False
blendRatio = 0
rateList= ['1/24.0', '0.1', '0.5', '1.0', '2.0', '5.0' , '10.0']
enableBlend = False
currentBlendName = None
def __init__(self, aNode = None, blendDict={}, parent = None, **kw):
INITOPT = Pmw.INITOPT
self.id = 'BlendAnimPanel '+ aNode.getName()
self.appname = self.id
self.actorNode = aNode
self.blendDict = blendDict.copy()
if len(blendDict)>0:
self.blendList = blendDict.keys()
else:
self.blendList = []
optiondefs = (
('title', self.appname, None),
('actor', aNode, None),
('animList', [], None),
('blendAnimList', self.blendList, None),
)
self.defineoptions(kw, optiondefs)
self.id = 'Blend AnimPanel '+ aNode.getName()
self.nodeName = aNode.getName()
# Initialize the superclass
AppShell.__init__(self)
# Execute option callbacks
self.initialiseoptions(BlendAnimPanel)
self.currTime = 0.0
self.animNameA = None
self.animNameB = None
self.parent.resizable(False,False) ## Disable the ability to resize for this Window.
def createInterface(self):
# Handle to the toplevels interior
interior = self.interior()
self.menuBar.destroy()
# show the actor's name
actorFrame = Frame(interior)
name_label = Label(actorFrame, text= self.nodeName,font=('MSSansSerif', 14),
relief = SUNKEN, borderwidth=3)
name_label.pack(side = TOP, expand = False)
actorFrame.pack(side = TOP, expand = False, fill = X)
# Create a frame to show is there any ore-blended animation and save, edit, rename button.
group = Pmw.Group(interior, tag_pyclass=None)
actorFrame = group.interior()
group.pack(side = TOP, expand = False, fill = X)
Label(actorFrame, text= "Blended:", font=('MSSansSerif', 10)).pack(side=LEFT)
self.blendAnimEntry = self.createcomponent(
'Blended Animation', (), None,
Pmw.ComboBox, (actorFrame,),
labelpos = W, entry_width = 20, selectioncommand = self.setBlendAnim,
scrolledlist_items = self['blendAnimList'])
self.blendAnimEntry.pack(side=LEFT)
Label(actorFrame, text= " ", font=('MSSansSerif', 10)).pack(side=LEFT)
button = Button(actorFrame, text="Save", font=('MSSansSerif', 10),width = 12,
command = self.saveButtonPushed).pack(side=LEFT)
button = Button(actorFrame, text="Remove", font=('MSSansSerif', 10),width = 12,
command = self.removeButtonPushed).pack(side=LEFT)
button = Button(actorFrame, text="Rename", font=('MSSansSerif', 10),width = 12,
command = self.renameButtonPushed).pack(side=LEFT)
actorFrame.pack(side = TOP, expand = False, fill = X)
# Create a frame to hold all the animation setting
group = Pmw.Group(interior, tag_pyclass=None)
actorFrame = group.interior()
group.pack(side = TOP, expand = False, fill = X)
Label(actorFrame, text= "Animation A:", font=('MSSansSerif', 10)).pack(side=LEFT)
self['animList'] = self['actor'].getAnimNames()
self.AnimEntryA = self.createcomponent(
'AnimationMenuA', (), None,
Pmw.ComboBox, (actorFrame,),
labelpos = W, entry_width = 20, entry_state = DISABLED,
selectioncommand = lambda name, a = 'a' : self.setAnimation(name, AB=a),
scrolledlist_items = self['animList'])
self.AnimEntryA.pack(side=LEFT)
Label(actorFrame, text= " ", font=('MSSansSerif', 10)).pack(side=LEFT,)
Label(actorFrame, text= "Animation B:", font=('MSSansSerif', 10)).pack(side=LEFT)
self['animList'] = self['actor'].getAnimNames()
self.AnimEntryB = self.createcomponent(
'AnimationMenuB', (), None,
Pmw.ComboBox, (actorFrame,),
labelpos = W, entry_width = 20, entry_state = DISABLED,
selectioncommand = lambda name, a = 'b' : self.setAnimation(name, AB=a),
scrolledlist_items = self['animList'])
self.AnimEntryB.pack(side=LEFT)
actorFrame.pack(side = TOP, expand = False, fill = X)
### Blend Enable checkbox
actorFrame = Frame(interior, relief = SUNKEN, bd = 1)
Label(actorFrame, text= "Enable Blending:", font=('MSSansSerif', 10)).pack(side=LEFT,)
self.blendVar = IntVar()
self.blendVar.set(0)
self.blendButton = self.createcomponent(
'blendButton', (), None,
Checkbutton, (actorFrame,),
variable = self.blendVar,
command = self.toggleBlend)
self.blendButton.pack(side=LEFT)
actorFrame.pack(side = TOP, expand = False, fill = X)
## Ratio control
actorFrame = Frame(interior)
frameFrame = Frame(actorFrame, relief = SUNKEN, bd = 1)
minRatioLabel = self.createcomponent(
'minRatioLabel', (), 'sLabel',
Label, (frameFrame,),
text = 0.00)
minRatioLabel.pack(side = LEFT)
self.ratioControl = self.createcomponent(
'ratio', (), None,
Scale, (frameFrame,),
from_ = 0.0, to = 1.0, resolution = 0.01,
command = self.setRatio, length = 500,
orient = HORIZONTAL, showvalue = 1)
self.ratioControl.pack(side = LEFT, expand = 1)
self.ratioControl.set(1.0)
self.maxRatioLabel = self.createcomponent(
'maxRatioLabel', (), 'sLabel',
Label, (frameFrame,),
text = 1.00)
self.maxRatioLabel.pack(side = LEFT)
frameFrame.pack(side = LEFT, expand = 1, fill = X)
actorFrame.pack(side = TOP, expand = True, fill = X)
###################################################################################
###################################################################################
actorFrame = Frame(interior)
Label(actorFrame, text= "Play Rate:", font=('MSSansSerif', 10)).pack(side=LEFT)
self.playRateEntry = self.createcomponent(
'playRateMenu', (), None,
Pmw.ComboBox, (actorFrame,),
labelpos = W, entry_width = 20, selectioncommand = self.setPlayRate,
scrolledlist_items = self.rateList)
self.playRateEntry.pack(side=LEFT)
self.playRateEntry.selectitem('1.0')
### Loop checkbox
Label(actorFrame, text= " ", font=('MSSansSerif', 10)).pack(side=LEFT,)
Label(actorFrame, text= "Loop:", font=('MSSansSerif', 10)).pack(side=LEFT,)
self.loopVar = IntVar()
self.loopVar.set(0)
self.loopButton = self.createcomponent(
'loopButton', (), None,
Checkbutton, (actorFrame,),
variable = self.loopVar)
self.loopButton.pack(side=LEFT)
actorFrame.pack(side = TOP, expand = True, fill = X)
### Display Frames/Seconds
actorFrame = Frame(interior)
Label(actorFrame, text= "Frame/Second:", font=('MSSansSerif', 10)).pack(side=LEFT)
self.unitsVar = IntVar()
self.unitsVar.set(FRAMES)
self.displayButton = self.createcomponent(
'displayButton', (), None,
Checkbutton, (actorFrame,),
command = self.updateDisplay,
variable = self.unitsVar)
self.displayButton.pack(side=LEFT)
actorFrame.pack(side = TOP, expand = True, fill = X)
## scale control
actorFrame = Frame(interior)
frameFrame = Frame(actorFrame, relief = SUNKEN, bd = 1)
self.minLabel = self.createcomponent(
'minLabel', (), 'sLabel',
Label, (frameFrame,),
text = 0)
self.minLabel.pack(side = LEFT)
self.frameControl = self.createcomponent(
'scale', (), None,
Scale, (frameFrame,),
from_ = 0, to = 24, resolution = 1.0,
command = self.goTo, length = 500,
orient = HORIZONTAL, showvalue = 1)
self.frameControl.pack(side = LEFT, expand = 1)
self.frameControl.bind('<Button-1>', self.onPress)
self.frameControl.bind('<ButtonRelease-1>', self.onRelease)
self.maxLabel = self.createcomponent(
'maxLabel', (), 'sLabel',
Label, (frameFrame,),
text = 24)
self.maxLabel.pack(side = LEFT)
frameFrame.pack(side = LEFT, expand = 1, fill = X)
actorFrame.pack(side = TOP, expand = True, fill = X)
## button contorl
actorFrame = Frame(interior)
ButtomFrame = Frame(actorFrame, relief = SUNKEN, bd = 1,borderwidth=5)
self.toStartButton = self.createcomponent(
'toStart', (), None,
Button, (ButtomFrame,),
text = '<<',
width = 8,
command = self.resetAllToZero)
self.toStartButton.pack(side = LEFT, expand = 1, fill = X)
self.playButton = self.createcomponent(
'playButton', (), None,
Button, (ButtomFrame,),
text = 'Play', width = 8,
command = self.play)
self.playButton.pack(side = LEFT, expand = 1, fill = X)
self.stopButton = self.createcomponent(
'stopButton', (), None,
Button, (ButtomFrame,),
text = 'Stop', width = 8, state=DISABLED,
command = self.stop)
self.stopButton.pack(side = LEFT, expand = 1, fill = X)
self.toEndButton = self.createcomponent(
'toEnd', (), None,
Button, (ButtomFrame,),
text = '>>',
width = 8,
command = self.resetAllToEnd)
self.toEndButton.pack(side = LEFT, expand = 1, fill = X)
ButtomFrame.pack(side = TOP, expand = True, fill = X)
actorFrame.pack(expand = 1, fill = BOTH)
def updateList(self):
#################################################################
# updateList(self)
# This will reset the list of all animations that this actor has
# to the animation entry A and B.
#################################################################
self['animList'] = self['actor'].getAnimNames()
animL = self['actor'].getAnimNames()
self.AnimEntryA.setlist(animL)
self.AnimEntryB.setlist(animL)
def play(self):
#################################################################
# play(self)
# It works pretty much like what we have in the Animation Panel.
# The only different now is that we set two "pose" here.
# When you do the blending animation by setPose, you don't have
# to set them simultaneously.
#################################################################
self.animNameA = self.AnimEntryA.get()
self.animNameB = self.AnimEntryB.get()
if (self.animNameA in self['animList'])and(self.animNameB in self['animList']):
self.playButton.config(state=DISABLED)
self.lastT = globalClock.getFrameTime()
taskMgr.add(self.playTask, self.id + '_UpdateTask')
self.stopButton.config(state=NORMAL)
else:
print '----Illegal Animaion name!!', self.animNameA + ', '+ self.animNameB
return
def playTask(self, task):
#################################################################
# playTask(self, task)
# see play(self)
#################################################################
fLoop = self.loopVar.get()
currT = globalClock.getFrameTime()
deltaT = currT - self.lastT
self.lastT = currT
if self.dragMode:
return Task.cont
self.currTime = self.currTime + deltaT
if (self.currTime > self.maxSeconds):
if fLoop:
self.currTime = self.currTime%self.duration
self.gotoT(self.currTime)
else:
self.currTime = 0.0
self.gotoT(0.0)
self.playButton.config(state=NORMAL)
self.stopButton.config(state=DISABLED)
return Task.done
else:
self.gotoT(self.currTime)
return Task.cont
def stop(self):
#################################################################
# stop(self)
# see play(self)
#################################################################
taskMgr.remove(self.id + '_UpdateTask')
self.playButton.config(state=NORMAL)
self.stopButton.config(state=DISABLED)
return
def setAnimation(self, animation, AB = 'a'):
#################################################################
# setAnimation(self, animation, AB = 'a')
# see play(self)
#################################################################
print 'OK!!!'
if AB == 'a':
if self.animNameA != None:
self['actor'].setControlEffect(self.animNameA, 1.0, 'modelRoot','lodRoot')
self.animNameA = self.AnimEntryA.get()
else:
if self.animNameB != None:
self['actor'].setControlEffect(self.animNameB, 1.0, 'modelRoot','lodRoot')
self.animNameB = self.AnimEntryB.get()
self.currTime = 0.0
self.frameControl.set(0)
self.updateDisplay()
self.setRatio(self.blendRatio)
return
def setPlayRate(self,rate):
#################################################################
# setPlayRate(self,rate)
# see play(self)
#################################################################
self.animNameA = self.AnimEntryA.get()
if self.animNameA in self['animList']:
self['actor'].setPlayRate(eval(rate), self.animNameA)
self.updateDisplay()
if self.animNameB in self['animList']:
self['actor'].setPlayRate(eval(rate), self.animNameB)
self.updateDisplay()
return
def updateDisplay(self):
#################################################################
# updateDisplay(self)
# see play(self)
#################################################################
if not (self.animNameA in self['animList']):
return
self.fps = self['actor'].getFrameRate(self.animNameA)
self.duration = self['actor'].getDuration(self.animNameA)
self.maxFrame = self['actor'].getNumFrames(self.animNameA) - 1
if not (self.animNameB in self['animList']):
return
if self.duration > self['actor'].getDuration(self.animNameB):
self.duration = self['actor'].getDuration(self.animNameB)
if self.maxFrame > self['actor'].getNumFrames(self.animNameB) - 1:
self.maxFrame = self['actor'].getNumFrames(self.animNameB) - 1
self.maxSeconds = self.duration
if self.unitsVar.get() == FRAMES:
fromFrame = 0
toFrame = self.maxFrame
self.minLabel['text'] = fromFrame
self.maxLabel['text'] = toFrame
self.frameControl.configure(from_ = fromFrame,
to = toFrame,
resolution = 1.0)
else:
self.minLabel['text'] = '0.0'
self.maxLabel['text'] = "%.2f" % self.duration
self.frameControl.configure(from_ = 0.0,
to = self.duration,
resolution = 0.01)
def gotoT(self,time):
#################################################################
# gotoT(self,time)
# see play(self)
#################################################################
if self.unitsVar.get() == FRAMES:
self.frameControl.set(time * self.fps)
else:
self.frameControl.set(time)
return
def goTo(self,frame):
#################################################################
# goTo(self,frame)
# see play(self)
#################################################################
if (self.animNameA in self['animList'])and(self.animNameB in self['animList']):
# Convert scale value to float
frame = string.atof(frame)
# Now convert t to seconds for offset calculations
if self.unitsVar.get() == FRAMES:
frame = frame / self.fps
if self.dragMode:
self.currTime = frame
self['actor'].pose(self.animNameA,
min(self.maxFrame, int(frame * self.fps)))
self['actor'].pose(self.animNameB,
min(self.maxFrame, int(frame * self.fps)))
return
def onRelease(self,frame):
#################################################################
# onRelease(self,frame)
# see play(self)
#################################################################
self.dragMode = False
return
def onPress(self,frame):
#################################################################
# onPress(self,frame)
# see play(self)
#################################################################
self.dragMode = True
return
def resetAllToZero(self):
#################################################################
# resetAllToZero(self)
# see play(self)
#################################################################
self.currTime = 0.0
self.gotoT(0)
return
def resetAllToEnd(self):
#################################################################
# resetAllToEnd(self)
# see play(self)
#################################################################
self.currTime = self.maxSeconds
self.gotoT(self.duration)
return
def toggleBlend(self):
#################################################################
# toggleBlend(self)
# This function will enable the blending option for the actor.
# and call set ratio function to set the blending animation mixing in
# current ratio.
#
# This blending enable will not be keep when you close the window!
#
#################################################################
if self.blendVar.get():
self.enableBlend = True
self['actor'].enableBlend()
self.setRatio(self.blendRatio)
else:
self.enableBlend = False
self['actor'].disableBlend()
return
def setRatio(self, ratio):
#################################################################
# setRatio(self, ratio)
# callback funtion
# This one will be called each time when user drag the blend ratio
# slider on the panel. This will set the blening ratio to both animation.
# (Which is "setControlEffect")
#################################################################
self.blendRatio = float(ratio)
if self.enableBlend:
if self.animNameA in self['animList']:
self['actor'].setControlEffect(self.animNameA, self.blendRatio, 'modelRoot','lodRoot')
if self.animNameB in self['animList']:
self['actor'].setControlEffect(self.animNameB, 1-self.blendRatio, 'modelRoot','lodRoot')
return
def setBlendAnim(self, name):
#################################################################
# setBlendAnim(self, name)
# This function will be called each time when user try to select
# a existing blending animation from the comboBox on the panel
# This function will re-set every varaibles on the panel to what
# it should be. For example, when user choose blending anim "R,"
# which was blended by anim "a" and "b" with ratio "c,"
# then this function will set Animation A to "a" and animation B
# to "b" and set the ratio slider to "c" position.
#################################################################
if self.blendDict.has_key(name):
self.currentBlendName = name
animA = self.blendDict[name][0]
animB = self.blendDict[name][1]
ratio = self.blendDict[name][2]
self.AnimEntryA.selectitem(animA)
self.AnimEntryB.selectitem(animB)
self.setAnimation(animA, AB = 'a')
self.setAnimation(animB, AB = 'b')
self.ratioControl.set(ratio)
return
def setBlendAnimList(self, dict, select=False):
#################################################################
# setBlendAnimList(self, dict, select=False)
# This function will be called when we need to reset the dropdown list
# of "Blend Anim."
# About "selec" option, this now is mainly used when we remove
# a blended animation from the actor. When it has been specified to True,
# the function will not only reset the list, but will also automatically
# select one from the top of list, if it is not empty.
#################################################################
self.blendDict.clear()
del self.blendDict
self.blendDict = dict.copy()
print self.blendDict
if len(self.blendDict)>0:
self.blendList = self.blendDict.keys()
else:
self.blendList = []
self.blendAnimEntry.setlist(self.blendList)
if select:
if len(self.blendList)>0:
self.blendAnimEntry.selectitem(self.blendList[0])
self.setBlendAnim(self.blendList[0])
self.currentBlendName = self.blendList[0]
else:
self.blendAnimEntry.clear()
self.currentBlendName = None
return
def saveButtonPushed(self):
#################################################################
# saveButtonPushed(self)
# This function will be called when user clicked on the "Save" button
# This functiont will collect all data on the panel and send them with
# a message to sceneEditor to save the current blending animation
# into the dataHolder.
#################################################################
name = self.blendAnimEntry.get()
if name=='':
Pmw.MessageDialog(None, title='Caution!',
message_text = 'You have to give the blending animation a name first!',
iconpos='s',
defaultbutton = 'Close'
)
return
elif (not(self.animNameA in self['animList']))or(not(self.animNameB in self['animList'])):
Pmw.MessageDialog(None, title='Caution!',
message_text = 'The Animations you have selected are not exist!',
iconpos='s',
defaultbutton = 'Close'
)
return
else:
messenger.send('BAW_saveBlendAnim', [self['actor'].getName(),
name,
self.animNameA,
self.animNameB,
self.blendRatio])
self.currentBlendName = name
return
def removeButtonPushed(self):
#################################################################
# removeButtonPushed(self)
# remove the current seleted blended animation from the actor.
# This will send out a message to sceneEditor to delete the data inside
# the dataHolder and then reset the list of here.
#################################################################
name = self.blendAnimEntry.get()
messenger.send('BAW_removeBlendAnim', [self['actor'].getName(),name])
return
def renameButtonPushed(self):
#################################################################
# renameButtonPushed(self)
# this function will be called when user click on the "Rename" button.
# This function will collect all data on the panel and send them out
# with a message to sceneEditor to rename and re-save all setting about
# current animation.
#################################################################
oName = self.currentBlendName
name = self.blendAnimEntry.get()
if self.currentBlendName == None:
Pmw.MessageDialog(None, title='Caution!',
message_text = "You haven't select any blended animation!!",
iconpos='s',
defaultbutton = 'Close'
)
return
elif name=='':
Pmw.MessageDialog(None, title='Caution!',
message_text = 'You have to give the blending animation a name first!',
iconpos='s',
defaultbutton = 'Close'
)
return
elif (not(self.animNameA in self['animList']))or(not(self.animNameB in self['animList'])):
Pmw.MessageDialog(None, title='Caution!',
message_text = 'The Animations you have selected are not exist!',
iconpos='s',
defaultbutton = 'Close'
)
return
else:
messenger.send('BAW_renameBlendAnim', [self['actor'].getName(),
name,
oName,
self.animNameA,
self.animNameB,
self.blendRatio]
)
self.currentBlendName = name
return
def onDestroy(self, event):
#################################################################
# onDestroy(self, event)
# This function will be call when user try to close the window.
# In here we will stop all tasks we have opend and disable the
# blend setting of actor.
# If we didn't disable the blend option, the next time you play
# the animation via animation panel will cause some error.
#################################################################
if taskMgr.hasTaskNamed(self.id + '_UpdateTask'):
taskMgr.remove(self.id + '_UpdateTask')
messenger.send('BAW_close',[self.nodeName])
self.actorNode.setControlEffect(self.animNameA, 1.0, 'modelRoot','lodRoot')
self.actorNode.setControlEffect(self.animNameB, 1.0, 'modelRoot','lodRoot')
self.actorNode.disableBlend()
'''
If you have open any thing, please rewrite here!
'''
pass
|
# -* encoding: utf-8 *-
import logging
import time
from django.apps.config import AppConfig
from django.db import utils as django_db_utils
from django.db.backends.base import base as django_db_base
from django.dispatch import Signal
from typing import Union, Tuple, Callable, List # noqa. flake8 #118
_log = logging.getLogger(__name__)
default_app_config = 'django_dbconn_retry.DjangoIntegration'
pre_reconnect = Signal(providing_args=["dbwrapper"])
post_reconnect = Signal(providing_args=["dbwrapper", "retry_count"])
_operror_types = () # type: Union[Tuple[type], Tuple]
_operror_types += (django_db_utils.OperationalError,)
try:
import psycopg2
except ImportError:
pass
else:
_operror_types += (psycopg2.OperationalError,)
try:
import sqlite3
except ImportError:
pass
else:
_operror_types += (sqlite3.OperationalError,)
try:
import MySQLdb
except ImportError:
pass
else:
_operror_types += (MySQLdb.OperationalError,)
def monkeypatch_django() -> None:
def ensure_connection_with_retries(self: django_db_base.BaseDatabaseWrapper) -> None:
if self.connection is not None and hasattr(self.connection, 'closed') and self.connection.closed:
_log.debug("failed connection detected")
self.connection = None
if self.connection is None and not hasattr(self, '_in_connecting'):
with self.wrap_database_errors:
try:
self._in_connecting = True
self.connect()
except Exception as e:
if isinstance(e, _operror_types):
if not hasattr(self, "_connection_retries"):
self._connection_retries = 0
if self._connection_retries >= self.settings_dict.get("DBCONN_RETRY_MAX_RETRIES", 0):
_log.error("Reconnecting to the database didn't help %s", str(e))
del self._in_connecting
post_reconnect.send(self.__class__, dbwrapper=self,
retry_count=self._connection_retries)
raise
else:
if self._connection_retries > 0:
retry_delay = self.settings_dict.get("DBCONN_RETRY_SUBSEQUENT_RETRY_DELAY", None)
if retry_delay:
time.sleep(retry_delay)
_log.info("Database connection failed. Refreshing...")
# mark the retry
self._connection_retries += 1
# ensure that we retry the connection. Sometimes .closed isn't set correctly.
self.connection = None
del self._in_connecting
retry_count = self._connection_retries
# give libraries like 12factor-vault the chance to update the credentials
pre_reconnect.send(self.__class__, dbwrapper=self)
self.ensure_connection()
post_reconnect.send(self.__class__, dbwrapper=self, retry_count=retry_count)
else:
_log.debug("Database connection failed, but not due to a known error for dbconn_retry %s",
str(e))
del self._in_connecting
raise
else:
# connection successful, reset the flag
self._connection_retries = 0
del self._in_connecting
_log.debug("django_dbconn_retry: monkeypatching BaseDatabaseWrapper")
django_db_base.BaseDatabaseWrapper.ensure_connection = ensure_connection_with_retries
class DjangoIntegration(AppConfig):
name = "django_dbconn_retry"
def ready(self) -> None:
monkeypatch_django()
|
"""Identify files to be moved to their final destination directories"""
import logging
import re
import shutil
from pathlib import Path
from osa.configs import options
from osa.configs.config import cfg
from osa.paths import destination_dir
from osa.utils.logging import myLogger
from osa.veto import set_closed_sequence
__all__ = [
"register_files",
"register_run_concept_files",
"register_found_pattern",
"register_non_existing_file"
]
log = myLogger(logging.getLogger(__name__))
def register_files(run_str, analysis_dir, prefix, suffix, output_dir) -> None:
"""
Copy files into final data directory destination and register
them into the DB (to be implemented).
Parameters
----------
run_str: str
Run number
analysis_dir: pathlib.Path
analysis directory
suffix: str
suffix of the data file
output_dir: pathlib.Path
final data directory
prefix: str
prefix of the data file
"""
file_list = analysis_dir.rglob(f"{prefix}*{run_str}*{suffix}")
for input_file in file_list:
output_file = output_dir / input_file.name
if not output_file.exists():
log.debug(f"Moving file {input_file} to {output_dir}")
shutil.move(input_file, output_file)
# Keep DL1 and muons symlink in running_analysis
create_symlinks(input_file, output_file, prefix, suffix)
def create_symlinks(input_file, output_file, prefix, suffix):
"""
Keep DL1 and muons symlink in running_analysis for possible future re-use.
DL1 symlink is also kept in the DL1ab subdirectory to be able to process
up to DL2 later on.
"""
analysis_dir = Path(options.directory)
dl1ab_dir = analysis_dir / options.dl1_prod_id
if prefix == "dl1_LST-1" and suffix == ".h5":
dl1_filepath_analysis_dir = analysis_dir / input_file.name
dl1_filepath_dl1_dir = dl1ab_dir / input_file.name
# Remove the original DL1 files pre DL1ab stage and keep only symlinks
if (
dl1_filepath_analysis_dir.is_file()
and not dl1_filepath_analysis_dir.is_symlink()
):
dl1_filepath_analysis_dir.unlink()
if not dl1_filepath_analysis_dir.is_symlink():
dl1_filepath_analysis_dir.symlink_to(output_file.resolve())
# Also set the symlink in the DL1ab subdirectory
if not dl1_filepath_dl1_dir.is_symlink():
dl1_filepath_dl1_dir.symlink_to(output_file.resolve())
if prefix == "muons_LST-1" and suffix == ".fits":
input_file.symlink_to(output_file.resolve())
def register_run_concept_files(run_string, concept):
"""
Prepare files to be moved to final destination directories
from the running_analysis original directory.
Parameters
----------
run_string: str
concept: str
"""
initial_dir = Path(options.directory)
if concept == "DL2":
initial_dir = initial_dir / options.dl2_prod_id
elif concept in ["DL1AB", "DATACHECK"]:
initial_dir = initial_dir / options.dl1_prod_id
output_dir = destination_dir(concept, create_dir=False)
data_level = cfg.get("PATTERN", concept + "TYPE")
prefix = cfg.get("PATTERN", concept + "PREFIX")
suffix = cfg.get("PATTERN", concept + "SUFFIX")
log.debug(f"Registering {data_level} file for {prefix}*{run_string}*{suffix}")
if concept in [
"DL1AB", "DATACHECK", "PEDESTAL", "CALIB", "TIMECALIB", "MUON", "DL2"
]:
register_files(run_string, initial_dir, prefix, suffix, output_dir)
else:
log.warning(f"Concept {concept} not known")
def register_found_pattern(
file_path: Path,
seq_list: list,
concept: str,
destination_path: Path
):
"""
Parameters
----------
file_path: pathlib.Path
seq_list: list
concept: str
destination_path: pathlib.Path
"""
new_dst = destination_path / file_path.name
log.debug(f"New file path {new_dst}")
if not options.simulate:
if new_dst.exists():
log.debug("Destination file already exists")
else:
log.debug(f"Destination file {new_dst} does not exists")
register_non_existing_file(file_path, concept, seq_list)
# Return filepath already registered to be deleted from the set of all files
return file_path
def register_non_existing_file(file_path, concept, seq_list):
"""
Parameters
----------
file_path: pathlib.Path
concept: str
seq_list: list
"""
for sequence in seq_list:
if sequence.type == "DATA":
run_str_found = re.search(sequence.run_str, str(file_path))
if run_str_found is not None:
log.debug(f"Registering file {run_str_found}")
register_run_concept_files(sequence.run_str, concept)
if options.seqtoclose is None and not file_path.exists():
log.debug("File does not exists")
elif sequence.type in ["PEDCALIB", "DRS4"]:
calib_run_str_found = re.search(str(sequence.run), str(file_path))
drs4_run_str_found = re.search(str(sequence.previousrun), str(file_path))
if calib_run_str_found is not None:
log.debug(f"Registering file {calib_run_str_found}")
register_run_concept_files(str(sequence.run), concept)
if options.seqtoclose is None and not file_path.exists():
log.debug("File does not exists")
if drs4_run_str_found is not None:
log.debug(f"Registering file {drs4_run_str_found}")
register_run_concept_files(str(sequence.previousrun), concept)
if options.seqtoclose is None and not file_path.exists():
log.debug("File does not exists")
set_closed_sequence(sequence)
|
import ast
import os
import sys
from pymake3 import report
from pymake3.cli import info
from pymake3.core import makeconf
# Make configuration specified on command-line.
conf = None
# Indicates whether colors should be disabled when printing to stdout.
disable_color = False
# Whether warnings should be disabled.
disable_warnings = False
def option_conf(value):
global conf
conf = makeconf.from_dict(ast.literal_eval(value))
def option_version(value):
from . import __version__
println("pymake3 v{}", __version__)
|
from django.conf.urls import include, url
from django_viewset import URLView
from .backend import BaseBackend
from .views.auth import LoginView, LogoutView
from .views.index import IndexView
class SiteInlineBackends(object):
def __init__(self, site_backend):
self.site_backend = site_backend
def __getitem__(self, key):
try:
return self.site_backend.find(id=key, registry='inline')
except ValueError as e:
raise KeyError(e.args[0])
class SiteBackend(BaseBackend):
login = URLView(r'^login/$', LoginView)
logout = URLView(r'^logout/$', LogoutView)
index = URLView(r'^(?:(?P<site>[0-9]+)/(?P<language>[a-zA-Z_-]+)/)?$', IndexView)
app_name = 'django_backend'
@property
def inline_backends(self):
return SiteInlineBackends(self)
def get_urlname_prefix(self):
return None
def get_children_urls(self):
base_urls = super(SiteBackend, self).get_children_urls()
# Append all children under the language ID url.
return [
url(r'^(?P<site>[0-9]+)/(?P<language>[a-zA-Z_-]+)/', include(base_urls))
]
|
import numpy as np
import tikreg.utils as tikutils
def test_determinant_normalizer():
mat = np.random.randn(100,100)
mat = np.dot(mat.T, mat)
det = np.linalg.det(mat)
det_norm = det**(1.0/100.0)
ndet = np.linalg.det(mat / det_norm)
pdet = np.linalg.det(mat/tikutils.determinant_normalizer(mat))
assert np.allclose(tikutils.determinant_normalizer(mat), det_norm)
assert np.allclose(ndet, 1.0)
assert np.allclose(pdet, 1.0)
def test_fast_indexing():
D = np.random.randn(1000, 1000)
rows = np.random.randint(0, 1000, (400))
cols = np.random.randint(0, 1000, (400))
a = tikutils.fast_indexing(D, rows, cols)
b = D[rows, :][:, cols]
assert np.allclose(a, b)
a = tikutils.fast_indexing(D, rows)
b = D[rows, :]
assert np.allclose(a, b)
a = tikutils.fast_indexing(D.T, cols).T
b = D[:, cols]
assert np.allclose(a, b)
rows = np.random.randint(0, 1000, (127))
cols = np.random.randint(0, 1000, (151))
a = tikutils.fast_indexing(D, rows, cols)
b = D[rows, :][:, cols]
assert np.allclose(a, b)
def test_generators(N=100, testpct=0.2, nchunks=5, nfolds=5):
ntest = int(N*(1./nfolds))
ntrain = N - ntest
alltrn = []
folds = tikutils.generate_trnval_folds(N, 'cv', testpct=testpct,
nfolds=nfolds, nchunks=nchunks)
for idx, (trn, val) in enumerate(list(folds)):
# none of the trn is in the val
assert np.in1d(trn, val).sum() == 0
assert np.in1d(val, trn).sum() == 0
assert len(np.unique(np.r_[val, trn])) == N
assert ntrain + nchunks >= len(trn) >= ntrain - nchunks
ntest = int(N*testpct)
ntrain = int(np.ceil(N - ntest))
remainder = np.mod(ntrain, nchunks)
nfolds = 10
folds = tikutils.generate_trnval_folds(N, 'nbb', nfolds=nfolds,
testpct=testpct, nchunks=nchunks)
for idx, (trn, val) in enumerate(list(folds)):
# none of the trn is in the val
assert np.in1d(trn, val).sum() == 0
assert np.in1d(val, trn).sum() == 0
assert (len(trn) == ntrain - remainder) or (len(trn) == ntrain - nchunks)
assert idx+1 == nfolds
nfolds = 100
folds = tikutils.generate_trnval_folds(N, 'mbb', nfolds=nfolds,
testpct=testpct, nchunks=nchunks)
for idx, (trn, val) in enumerate(list(folds)):
# none of the trn is in the val
assert np.in1d(trn, val).sum() == 0
assert np.in1d(val, trn).sum() == 0
assert len(trn) == (ntrain - remainder) or (len(trn) == ntrain - nchunks)
assert idx+1 == nfolds
def test_noise_ceiling_correction():
# Based on Schoppe, et al. (2016)
# Author's Sample MATLAB code
# https://github.com/OSchoppe/CCnorm/blob/master/calc_CCnorm.m
from scipy.stats import zscore
signal = np.random.randn(50)
repeats = np.asarray([signal + np.random.randn(len(signal))*1. for t in range(10)])
nreps, ntpts = repeats.shape
repeats = zscore(repeats, 1)
ymean = np.mean(repeats,0) # mean time-course
yhat = zscore(ymean + np.random.randn(len(ymean))*0.5)
Vy = np.var(ymean)
Vyhat = 1.
Cyyhat = np.cov(ymean, yhat)[0,1]
mcov = ((ymean - ymean.mean(0))*yhat).sum(0)/(ntpts - 1) # sample covariance
assert np.allclose(Cyyhat, mcov)
top = np.var(np.sum(repeats,0)) - np.sum(np.var(repeats, 1))
SP = top/(nreps*(nreps-1)) # THIS IS EXPLAINABLE VARIANCE
# same as
top2 = (nreps**2)*np.var(np.mean(repeats,0)) - nreps
SP2 = top2/(nreps*(nreps -1))
assert np.allclose(top, top2)
assert np.allclose(SP, SP2)
# same as
top3 = nreps*np.var(np.mean(repeats,0)) - 1
SP3 = top3/(nreps-1)
assert np.allclose(top2/nreps, top3)
assert np.allclose(SP2, SP3)
# same as
ev = np.var(np.mean(repeats,0)) # same as R2 := SSreg/SStot
ev = ev - ((1 - ev) / np.float((repeats.shape[0] - 1))) # adjusted
assert np.allclose(ev, SP)
# same as (1 - residual variance)
assert np.allclose(tikutils.explainable_variance(repeats[...,None],
dozscore=False, ncorrection=True),
SP)
assert np.allclose(tikutils.explainable_variance(repeats[...,None],
dozscore=True, ncorrection=True),
SP)
# measures
CCabs = Cyyhat/np.sqrt(Vy*Vyhat)
CCnorm = Cyyhat/np.sqrt(SP*Vyhat)
CCmax = np.sqrt(SP/Vy)
corrected = CCabs/CCmax
eqn27 = Cyyhat/np.sqrt(SP) # eqn 27 from Schoppe, et al. (2016) paper
res = tikutils.noise_ceiling_correction(repeats, yhat, dozscore=True)
assert np.allclose(eqn27, res)
assert np.allclose(corrected, res)
assert np.allclose(CCnorm, res)
def test_make_trials():
"""smoke test"""
values = [[1.0, 3.0, 4.0],
[44.0, 33.0, 2.0]]
losses = [0.3, -0.2]
hpo_trials = tikutils.hyperopt_make_trials(values, losses)
parameter_names = ['X{}'.format(i) for i in range(3)]
vals = [{pn: [v] for pn, v in zip(parameter_names, val)} for val in values]
assert len(hpo_trials.trials) == len(values)
for trl, val, loss in zip(hpo_trials.trials, vals, losses):
assert trl['result']['loss'] == loss
assert trl['misc']['vals'] == val
return hpo_trials
def test_correlation():
A = np.random.randn(10, 3)
B = np.random.randn(10, 5)
cc = tikutils.cross_correlation(A, B)
mat = np.zeros((A.shape[-1], B.shape[-1]))
for adx in range(A.shape[-1]):
for bdx in range(B.shape[-1]):
corr = np.corrcoef(A[:,adx], B[:, bdx])[0,1]
mat[adx, bdx] = corr
assert np.allclose(mat, cc)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url
from haystack.views import search_view_factory
from machina.core.app import Application
from machina.core.loading import get_class
class SearchApp(Application):
name = 'forum_search'
search_view = get_class('forum_search.views', 'FacetedSearchView')
search_form = get_class('forum_search.forms', 'SearchForm')
def get_urls(self):
return [
url(r'^$', search_view_factory(
view_class=self.search_view,
form_class=self.search_form),
name='search'),
]
application = SearchApp()
|
#! /usr/bin/env python
import sys
import json
import csv
import re
import requests
import getpass
import argparse
import collections
def get_data(url, js_path):
user = input("User name for {0}: ".format(url))
passw = getpass.getpass("Password: ")
with open(js_path) as js_file:
js = js_file.read()
### Format JavaScript ###
# Replace newlines and remove tabs:
js = js.replace("\n", " ")
js = js.replace("\t", "")
# Remove JS comments:
js = re.sub("(\/\*)[^\*]+\*\/", "", js).strip()
### Request data with POST ###
post_data = json.dumps({"map": js})
header = {"content-type": "application/json"}
response = requests.post(url, headers=header, auth=(user, passw),
data=post_data, stream=True)
return response
def write_json(resp, out_file):
for line in resp.iter_lines(decode_unicode=True):
out_file.write(line)
def write_simple_json(resp, out_file):
def _simplify():
obj = resp.json()
for row in obj["rows"]:
key = row["key"]
value = row["value"]
yield json.dumps({key: value})
for line in _simplify():
out_file.write("{}\n".format(line))
def flatten(l):
for val in l.values() if isinstance(l, dict) else l:
if isinstance(val, (tuple, list)):
for sub in flatten(val):
yield sub
elif isinstance(val, dict):
for sub in flatten(val.values()):
yield sub
else:
yield val
def write_csv_gen(resp, out_file):
writer = csv.writer(out_file, delimiter=",")
# writer.writerow(header)
# Parse using for loop:
# header = None
# for line in resp.iter_lines(decode_unicode=True):
# if re.match("\{\"id", line):
# obj = json.loads(line.rstrip("\r\n,"))
# if not header:
# header = ["key"]
# header.extend([key for key in obj["value"].keys()])
# writer.writerow(header)
# row = [obj["key"]]
# row.extend([val for val in obj["value"].values()])
# writer.writerow(row)
# Parse using generator function:
def _csv_gen():
p = re.compile(r"\{\"id")
for line in resp.iter_lines(decode_unicode=True):
if p.match(line):
obj = json.loads(line.rstrip("\r\n,"))
row = [obj["key"]]
vals = obj["value"]
row.extend(flatten(vals))
yield row
writer.writerows(_csv_gen())
# CSV formatter for reads per barcode data:
def write_custom1(resp, out_file):
# Data object is of format:
# Total flowcells: data["total_rows"]
# Offset: data["offset"]
# Flowcell data: data["rows"]
# Where data["rows"] is a list and if fc = data["rows"][n]:
# Database hash key: fc["id"]
# Flowcell/run ID: fc["key"]
# Lane data: fc["value"]
# Where fc["value"] is a list and if n is the lane index then
# fc["value"][n-1] is of format {barcode_sequence : million_reads}
# Write comma-separated output with format RUN_ID,LANE,BARCODE,READS:
writer = csv.writer(out_file, delimiter=",")
writer.writerow(["RUN_ID", "LANE", "BARCODE", "READS"])
p = re.compile(r"\{\"id")
for line in resp.iter_lines(decode_unicode=True):
if p.match(line):
fc = json.loads(line.rstrip("\r\n,"))
writer.writerows([[fc["key"], i, bc, reads]
for i, lane in fc["value"].items()
for bc, reads in lane.items()])
# CSV formatter for date data:
def write_custom2(resp, out_file):
header = ["project","app","facility","sample_type","prep",
"n_samples","rc_fail","prep_fail","lanes","sequencer","open_date",
"close_date","queue_date","samples_date","sequenced_date","deliver_date"
,"prep_date","qc_date","rc_date","order_date"]
obj = resp.json()
s = "\t".join(header)
out_file.write("{}\n".format(s))
for e in obj["rows"]:
row = []
vals = e["value"]
for x in header:
if x in vals and vals[x] != None and vals[x] != "undefined":
row.append(vals[x])
else:
row.append("")
s = "\t".join(row)
out_file.write("{}\n".format(s))
# CSV formatter for fragment size data:
def write_custom3(resp, out_file):
writer = csv.writer(out_file, delimiter=",")
header = ["proj", "app" ,"open", "lib", "prep", "sample", "size", "nm"]
obj = resp.json()
writer.writerow(header)
for e in obj["rows"]:
vals = e["value"]
for k,v in vals["samples"].items():
row = [vals[x] for x in header[:-3]] + [k, v["size"], v["nm"]]
writer.writerow(row)
# CSV formatter for flowcell data:
def write_custom4(resp, out_file):
writer = csv.writer(out_file, delimiter=",")
header = ["date", "flowcell", "sequencer", "lane", "barcode", "project", "sample" ,"reads"]
writer.writerow(header)
p = re.compile(r"\{\"id")
for line in resp.iter_lines(decode_unicode=True):
if p.match(line):
obj = json.loads(line.rstrip("\r\n,"))
vals = obj["value"]
pre = [vals[x] for x in header[:3]]
for lane, barcodes in vals["lanes"].items():
for bc, data in barcodes.items():
writer.writerow(pre + [lane, bc] + [data[x] for x in header[-3:]])
if __name__ == "__main__":
# Parse command-line arguments:
parser = argparse.ArgumentParser(description="Query a CouchDB database")
parser.add_argument("jsfile", help="File containing JavaScript map function")
parser.add_argument("--out", help="File to write response to (default: stdout")
parser.add_argument("--url", help="Database URL", default="http://tools-dev.scilifelab.se:5984/projects/_temp_view")
parser.add_argument("--csv", help="Convert response to CSV", action="store_true")
parser.add_argument("-s", "--simplify", help="Omit database id's", action="store_true")
args = parser.parse_args()
if args.csv:
write_func = write_csv_gen
else:
if args.simplify:
write_func = write_simple_json
else:
write_func = write_json
write_func = write_custom2
resp = get_data(args.url, args.jsfile)
if args.out:
with open(args.out, "w") as out_file:
write_func(resp, out_file)
else:
write_func(resp, sys.stdout)
|
# coding: utf-8
import sys
import pandas as pd
import olap.xmla.xmla as xmla
from StringIO import StringIO
def xmlamembers2list(itrbl):
result = []
for member in itrbl:
if isinstance(member, list):
label = u''
member_it = iter(member)
s = [None]
while member_it:
try:
low_member = next(member_it)
if isinstance(low_member, list):
s.append(member_it)
member_it = iter(low_member)
else:
label += u'{} '.format(low_member.Caption)
except StopIteration:
member_it = s.pop()
label = label[:-1]
else:
label = member.Caption
result.append(label)
return result
url = sys.argv[1]
user = sys.argv[2]
passw = sys.argv[3]
catalog = sys.argv[4]
mdx_str = sys.argv[5]
p = xmla.XMLAProvider()
c = None
try:
c = p.connect(location=url, username=user, password=passw)
mdx_res = c.Execute(mdx_str, Catalog=catalog)
except: pass
if c:
try: c.EndSession()
except: pass
mdx_cols = xmlamembers2list(mdx_res.getAxisTuple(axis=0))
mdx_rows = xmlamembers2list(mdx_res.getAxisTuple(axis=1))
mdx_data = [[x.FmtValue if hasattr(x, 'FmtValue') else '0' for x in cell] for cell in mdx_res.getSlice()]
mdx_df = pd.DataFrame(mdx_data,
columns=mdx_cols, index=pd.Index(mdx_rows, name='ID'))
mdx_csv_str = StringIO()
mdx_df.to_csv(mdx_csv_str)
print(mdx_csv_str.getvalue())
|
import os
import random
import shutil
import subprocess
base_dir = '/Users/ng98/Desktop/avalanche_test/'
core50_dir = '/Users/ng98/.avalanche/data/core50/'
image_dir = core50_dir + 'core50_128x128'
head = '<!DOCTYPE html><html><head><title>CoRe50</title></head><body>'
tail ='</body></html>'
def print_table_NI_DI_cat_task_id_by_session(f, show_only_fist_object_of_the_class=False):
fd.write('<h3>{} from each category/class</h3>'.format('Only first object type' if show_only_fist_object_of_the_class else 'All object types'))
f.write('<table border="1">\n')
f.write('<tr>')
for o in range (51):
if o == 0:
f.write('<td></td>')
q, mod = divmod(o, 5)
if mod == 1:
f.write('<td>{}</td>'.format('class ' + str(q)))
else:
continue
f.write('</tr>')
for s in range(1, 12):
f.write('<tr>\n<td>task {}</td>'.format(s))
for o in range(1, 51):
q, mod = divmod(o, 5)
if mod == 1:
f.write('<td>')
if (show_only_fist_object_of_the_class and mod == 1) or (not show_only_fist_object_of_the_class):
ss = str(s)
sss = str(s).zfill(2)
img_number = str(random.randrange(1, 300)).zfill(3)
file_name = 'C_' + sss + '_' + str(o).zfill(2) + '_' + img_number + '.png'
src = image_dir + '/s' + ss + '/o' + str(o) + '/' + file_name
dest = os.path.join(html_dir_path, file_name)
if os.path.exists(src):
shutil.copyfile(src, dest)
f.write('<img src="' + file_name + '">')
else:
# fd.write('{}, {}, {}'.format(o, mod, q))
pass
if mod == 0:
f.write('</td>')
f.write('</tr>')
f.write('<table>\n')
def add_task_run_dic_key(task_info, task_id=None, run=None, key=None):
if task_id in task_info:
if run in task_info[task_id]:
if key in task_info[task_id][run]:
pass
else:
task_info[task_id][run].update({key: []})
else:
task_info[task_id].update({run: {key: []}})
else:
task_info.update({task_id: {run: {key: []}}})
def join_values_using_key(task_info, key=None):
for task_id in sorted(task_info.keys()):
for run in sorted(task_info[task_id].keys()):
old_key_idx_items = ''
for key_idx in task_info[task_id][run][key]:
key_idx_items = ''.join([str(x)+',' for x in key_idx])
if old_key_idx_items != key_idx_items:
old_key_idx_items += key_idx_items
task_info[task_id][run][key] = old_key_idx_items
def get_range(s):
ll = s.split(',')
ll = ll[0: len(ll): len(ll) - 2]
return ll[0] + ' - ' + ll[1]
def original_NI(df, scenario=None, print_objects=True):
scenario_dir = os.path.join(core50_dir, 'batches_filelists', scenario)
task_info = {}
for run in os.scandir(path=scenario_dir):
if run.is_dir():
# print(run.name)
r = run.name.replace('run', '')
for f in os.scandir(path=os.path.join(scenario_dir, run.name)):
if f.is_file():
# train_batch_03_filelist.txt | test_filelist.txt
if f.name == 'test_filelist.txt':
t_id = '-1'
else:
t_id = f.name.replace('train_batch_', '').replace('_filelist.txt', '')
# print('==', f.name)
# s11/o1/C_11_01_000.png 0
sessions = []
command = subprocess.Popen(
"awk -F '/' '{print $1}' " + os.path.join(scenario_dir, run.name,
f.name) + " | sed 's/s//g' | sort | uniq",
shell=True, stdout=subprocess.PIPE)
for line in command.stdout.readlines():
sessions.append(int(line.decode("utf-8").replace('\n', '')))
objects = []
command = subprocess.Popen(
"awk -F '/' '{print $2}' " + os.path.join(scenario_dir, run.name,
f.name) + " | sed 's/o//g' | sort | uniq",
shell=True, stdout=subprocess.PIPE)
for line in command.stdout.readlines():
objects.append(int(line.decode("utf-8").replace('\n', '')))
classes = []
command = subprocess.Popen(
"awk -F ' ' '{print $2}' " + os.path.join(scenario_dir, run.name,
f.name) + " | sed 's/o//g' | sort | uniq",
shell=True, stdout=subprocess.PIPE)
for line in command.stdout.readlines():
classes.append(int(line.decode("utf-8").replace('\n', '')))
sessions.sort()
objects.sort()
classes.sort()
add_task_run_dic_key(task_info, task_id=t_id, run=r, key='sessions')
add_task_run_dic_key(task_info, task_id=t_id, run=r, key='objects')
add_task_run_dic_key(task_info, task_id=t_id, run=r, key='classes')
task_info[t_id][r]['sessions'].append(sessions)
task_info[t_id][r]['objects'].append(objects)
task_info[t_id][r]['classes'].append(classes)
# print('S: ', sessions)
# print('O: ', objects)
# print(task_info['-1'])
join_values_using_key(task_info, key='sessions')
join_values_using_key(task_info, key='objects')
join_values_using_key(task_info, key='classes')
fd.write('<h3>{}</h3>'.format(scenario))
fd.write('<table border="1">\n')
fd.write('<tr><td>task</td><td>run</td><td>session</td>{}<td>classes</td></tr>\n'.
format('<td>objects</td>' if print_objects else ''))
for t in sorted(task_info.keys()):
for r in sorted(task_info[t].keys()):
fd.write('<tr>')
fd.write('<td>{}</td>'.format(t))
fd.write('<td>{}</td>'.format(r))
fd.write('<td>{}</td>'.format(task_info[t][r]['sessions']))
if print_objects:
fd.write('<td>{}</td>'.format(get_range(task_info[t][r]['objects'])))
fd.write('<td>{}</td>'.format(get_range(task_info[t][r]['classes'])))
fd.write('</tr>\n')
fd.write('<table>\n')
html_dir_path = os.path.join(base_dir, 'Core50')
if os.path.isdir(html_dir_path):
shutil.rmtree(html_dir_path)
os.mkdir(html_dir_path)
fd = open(os.path.join(html_dir_path, 'corRe50.html'), 'w')
fd.write(head)
print_table_NI_DI_cat_task_id_by_session(fd, show_only_fist_object_of_the_class=True)
print_table_NI_DI_cat_task_id_by_session(fd, show_only_fist_object_of_the_class=False)
original_NI(fd, scenario='NI_cum')
original_NI(fd, scenario='NI_inc')
original_NI(fd, scenario='NI_inc_cat')
fd.write(tail)
fd.close()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.