content stringlengths 5 1.05M |
|---|
"""
update date: 2021/09/24
Author: Xu Chenchen
content: 1. download_pdf()中,在GET请求前确保url没有转义字符
update date: 2021/08/05
Author: Xu Chenchen
content: 1. 修改了主函数的示例,导入version2的xpath筛选模块
2. 下载的pdf文件可以自主确定文件路径(新增功能)
"""
import requests
from urllib.robotparser import RobotFileParser
import time
from urllib.parse import urlparse
from filename import get_valid_filename
def doi_parser(doi, start_url, useSSL=True):
"""Parse doi to url"""
HTTP = 'https' if useSSL else 'http'
url = HTTP + '://{}/{}'.format(start_url, doi)
return url
def get_robot_parser(robot_url):
rp = RobotFileParser()
rp.set_url(robot_url)
rp.read()
return rp
def wait(url, delay=3, domains={}):
"""wait until the interval between two
downloads of the same domain reaches time delay"""
domain = urlparse(url).netloc # get the domain
last_accessed = domains.get(domain) # the time last accessed
if delay > 0 and last_accessed is not None:
sleep_secs = delay - (time.time() - last_accessed)
if sleep_secs > 0:
time.sleep(sleep_secs)
domains[domain] = time.time()
def download(url, headers, proxies=None, num_retries=2):
print('Downloading: ', url)
try:
resp = requests.get(url, headers=headers, proxies=proxies, verify=False)
html = resp.text
if resp.status_code >= 400:
print('Download error: ', resp.text)
html = None
if num_retries and 500 <= resp.status_code < 600:
return download(url, headers, proxies, num_retries-1)
except requests.exceptions.RequestException as e:
print('Download error', e)
return None
return html
def download_pdf(result, headers, dir, proxies=None, num_retries=2, doi=None):
"""
:param dir: pdf文献下载的文件路径
"""
url = result['onclick']
components = urlparse(url)
if len(components.scheme) == 0: # 不存在协议时,自行添加
url = 'https:{}'.format(url)
# 去掉url中的转移符号(默认下载url中没有转义字符)
url = url.replace('\\', '')
print('File downloading: ', url)
try:
resp = requests.get(url, headers=headers, proxies=proxies, verify=False)
if resp.status_code >= 400:
print('File download error: ', resp.status_code)
if num_retries and 500 <= resp.status_code < 600:
return download(result, headers, proxies, num_retries-1)
if len(result['title']) < 5: # 处理标题为空的情况
filename = get_valid_filename(doi) + '.pdf'
else:
filename = get_valid_filename(result['title']) + '.pdf'
path = '/'.join([dir, filename])
print(path)
# ok, let's write it to file
with open(path, 'wb') as fp:
fp.write(resp.content)
except requests.exceptions.RequestException as e:
print('File download error', e)
return False
return True
def sci_hub_crawler(doi_list, dir, robot_url=None, user_agent='sheng', proxies=None,num_retries=2,
delay=3, start_url='sci-hub.do', useSSL=True, get_link=None, nolimit=False, cache=None):
"""
给定文献doi列表,爬取对应文献的 pdf 文件
:param doi_list: doi列表
:param robot_url: robots.txt在sci-bub上的url
:param user_agent: 用户代理,不要设为 'Twitterbot'
:param dir: pdf文献下载的文件路径(若不存在则会自动创建)
:param proxies: 代理
:param num_retries: 下载重试次数
:param delay: 下载间隔时间
:param start_url: sci-hub 主页域名
:param useSSL: 是否开启 SSL,开启后HTTP协议名称为 'https'
:param get_link: 抓取下载链接的函数对象,调用方式 get_link(html) -> html -- 请求的网页文本
所使用的函数在 scraping_using_%s.py % (bs4, lxml, regex) 内
:param nolimit: do not be limited by robots.txt if True
:param cache: 一个缓存类对象,在此代码块中我们完全把它当作字典使用
:return:
"""
headers = {'User-Agent': user_agent}
HTTP = 'https' if useSSL else 'http'
if not get_link:
print('Crawl failed, no get_link method.')
return None
if not robot_url:
robot_url = HTTP + '://{}/robots.txt'.format(start_url)
# print(robot_url)
try:
rp = get_robot_parser(robot_url)
except Exception as e:
rp = None
print('get_robot_parser() error: ', e)
domains={} # save the timestamp of accessed domains
download_succ_cnt: int = 0 # the number of pdfs that're successfully downloaded
# 如果dir不存在,则创建dir
import os
try:
if not os.path.exists(dir):
os.makedirs(dir)
except Exception as e:
print('directory making error: ', e)
for doi in doi_list:
url = doi_parser(doi, start_url, useSSL)
if cache and cache[url]:
print('already downloaded: ', cache[url])
download_succ_cnt += 1
continue
if rp and rp.can_fetch(user_agent, url) or nolimit:
wait(url, delay, domains)
html = download(url, headers, proxies, num_retries)
result = get_link(html)
if result and download_pdf(result, headers, dir, proxies, num_retries, doi):
if cache:
cache[url] = 'https:{}'.format(result['onclick']) # cache
download_succ_cnt += 1
else:
print('Blocked by robots.txt: ', url)
print('%d of total %d pdf success' % (download_succ_cnt, len(doi_list)))
if __name__ == '__main__':
from scraping_using_lxml import get_link_xpath
"""
dois = ['10.1109/TCIAIG.2017.2755699', # HTTP协议头重复
'10.3390/s20205967', # 标题为空
'10.1016/j.apergo.2020.103286' # 没毛病
]
get_link = get_link_xpath
print('use %s as get_link_method.' % get_link.__name__)
dir ='./documents' # pdf存储路径
sci_hub_crawler(dois, dir, get_link = get_link, user_agent='sheng', nolimit=True)
print('Done.')
"""
url = 'https:\/\/sci.bban.top\/pdf\/10.1145\/3132847.3132909.pdf?download=true'
components = urlparse(url)
if len(components.scheme) == 0: # 不存在协议时,自行添加
url = 'https:{}'.format(url)
url = url.replace('\\', '')
print('File downloading: ', url)
try:
headers = {'User-Agent': 'sheng'}
resp = requests.get(url, headers=headers, proxies=None, verify=False)
except Exception as e:
print('error occurred: ', e)
|
#!/usr/bin/env python3
import csv
import logging
import argparse
from pathlib import Path
from fast_forward.ranking import Ranking
from fast_forward.index import InMemoryIndex, Mode
from fast_forward.encoder import TransformerQueryEncoder, TCTColBERTQueryEncoder
LOGGER = logging.getLogger(__name__)
def main():
ap = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
ap.add_argument("INDEX", help="Fast-Forward index file.")
ap.add_argument(
"MODE", choices=["maxp", "avep", "firstp", "passage"], help="Retrieval mode."
)
ap.add_argument("ENCODER", help="Pre-trained transformer encoder.")
ap.add_argument(
"SPARSE_SCORES",
help="TREC runfile containing the scores of the sparse retriever.",
)
ap.add_argument("QUERY_FILE", help="Queries (tsv).")
ap.add_argument(
"--cutoff", type=int, help="Maximum number of sparse documents per query."
)
ap.add_argument(
"--cutoff_result",
type=int,
default=1000,
help="Maximum number of documents per query in the final ranking.",
)
ap.add_argument(
"--alpha",
type=float,
nargs="+",
default=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0],
help="Interpolation weight.",
)
ap.add_argument(
"--early_stopping", action="store_true", help="Use approximated early stopping."
)
ap.add_argument("--target", default="out", help="Output directory.")
args = ap.parse_args()
logging.basicConfig(level=logging.INFO)
LOGGER.info(f"reading {args.SPARSE_SCORES}")
sparse_scores = Ranking.from_file(Path(args.SPARSE_SCORES))
if args.cutoff is not None:
sparse_scores.cut(args.cutoff)
LOGGER.info(f"reading {args.QUERY_FILE}")
with open(args.QUERY_FILE, encoding="utf-8", newline="") as fp:
queries = {q_id: q for q_id, q in csv.reader(fp, delimiter="\t")}
if "tct_colbert" in args.ENCODER:
encoder = TCTColBERTQueryEncoder(args.ENCODER)
else:
encoder = TransformerQueryEncoder(args.ENCODER)
mode = {
"maxp": Mode.MAXP,
"avep": Mode.AVEP,
"firstp": Mode.FIRSTP,
"passage": Mode.PASSAGE,
}[args.MODE]
LOGGER.info(f"reading {args.INDEX}")
index = InMemoryIndex.from_disk(Path(args.INDEX), encoder, mode=mode)
result = index.get_scores(
sparse_scores, queries, args.alpha, args.cutoff_result, args.early_stopping
)
for alpha, ranking in result.items():
name = f"interpolation-{alpha}"
if args.early_stopping:
name += "-es"
ranking.name = name
target = Path(args.target) / f"{name}.tsv"
LOGGER.info(f"writing {target}")
ranking.save(target)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 16 11:40:46 2019
@author: msr
"""
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
import glob
files_snoring = list(glob.glob("snoring_test_data/*.csv"))
files_snoring.sort()
test_file_snoring = files_snoring[-1]
files_snoring.pop(-1)
df_snoring = pd.DataFrame()
for file in files_snoring:
df_open = pd.read_csv(file, header=None)
df_snoring = pd.concat([df_snoring, df_open], axis=0)
del df_open
df_snoring = df_snoring.reset_index(drop=True)
df_snoring.columns = ['obs']
obs_per_sec = round(df_snoring.shape[0]/(4*4*60))
mean = df_snoring.obs.mean()
median = df_snoring.obs.median()
df_snoring['obs_mean'] = mean
df_snoring['diff_mean'] = mean - df_snoring['obs']
df_snoring['delta_mean'] = (df_snoring['diff_mean'] / mean)
df_snoring['delta_mean_rolling_100ms'] = df_snoring['delta_mean'].rolling(window=obs_per_sec//10).mean()
df_snoring['delta_mean_rolling_200ms'] = df_snoring['delta_mean'].rolling(window=obs_per_sec//5).mean()
df_snoring['delta_mean_rolling_std_100ms'] = df_snoring['delta_mean'].rolling(window=25).std()
df_snoring['roll_mean_obs_10'] = df_snoring['obs'].rolling(window=10).mean()
df_snoring['diff_1'] = df_snoring['obs'].diff(1)
df_snoring['rolling_std_2'] = df_snoring['obs'].rolling(window=2).std()
df_snoring['rolling_std_2_rolling_mean_10'] = df_snoring['rolling_std_2'].rolling(window=10).mean()
df_snoring['diff_5_abs'] = abs(df_snoring['obs'].diff(5))
df_snoring['diff_5_abs_centred'] = abs(df_snoring['diff_5_abs'] - df_snoring['diff_5_abs'].mean())
df_snoring['diff_5_abs_rolling_mean'] = df_snoring['diff_5_abs'].rolling(window=10).sum()
df_snoring['autocorr_10'] = df_snoring['obs'].rolling(window=10).apply(lambda x: x.autocorr(), raw=False)
df_snoring['autocorr_5'] = df_snoring['obs'].rolling(window=5).apply(lambda x: x.autocorr(), raw=False)
df_snoring['autocorr_5_plus_diff'] = df_snoring['autocorr_5'] / df_snoring['diff_5_abs']
df_snoring['diff_mean_rolling'] = df_snoring['diff_mean'].rolling(window=obs_per_sec//10).mean()
aa = df_snoring.iloc[600:800] # Snoring
bb = df_snoring.iloc[:500] # No snoring
cc = df_snoring.iloc[1650:2000] # Snoring
dd = df_snoring.iloc[54200:54600] # Snoring
ee = df_snoring.iloc[52900:53300] # No snoring
ff = df_snoring.iloc[42000:42400]
gg = df_snoring.iloc[42600: 43000]
bk = df_snoring.copy()
df_snoring['snoring'] = 0
for index, row in df_snoring.iterrows():
df_snoring.loc[index, 'snoring'] = np.where(
df_snoring.loc[index:index+249, 'rolling_std_2'].mean() > 0.75, 1, 0)
import statistics
from statistics import StatisticsError
snoring_detected = []
for sec in range(0, df_snoring.shape[0], obs_per_sec):
try:
verdict = bool(np.where(statistics.mode(df_snoring.iloc[sec:sec+obs_per_sec]['snoring']) == 1, True, False))
except StatisticsError:
verdict = True
snoring_detected.append(verdict)
df_snoring_test = pd.read_csv(test_file_snoring, header=None)
df_snoring_test = df_snoring_test.reset_index(drop=True)
df_snoring_test.columns = ['obs']
obs_per_sec = round(df_snoring_test.shape[0]/(4*4*60))
mean = df_snoring_test.obs.mean()
median = df_snoring_test.obs.median()
df_snoring_test['obs_mean'] = mean
df_snoring_test['diff_mean'] = mean - df_snoring_test['obs']
df_snoring_test['delta_mean'] = (df_snoring_test['diff_mean'] / mean)
df_snoring_test['delta_mean_rolling_100ms'] = df_snoring_test['delta_mean'].rolling(window=obs_per_sec//10).mean()
df_snoring_test['delta_mean_rolling_200ms'] = df_snoring_test['delta_mean'].rolling(window=obs_per_sec//5).mean()
df_snoring_test['delta_mean_rolling_std_100ms'] = df_snoring_test['delta_mean'].rolling(window=25).std()
df_snoring_test['roll_mean_obs_10'] = df_snoring_test['obs'].rolling(window=10).mean()
df_snoring_test['diff_1'] = df_snoring_test['obs'].diff(1)
df_snoring_test['rolling_std_2'] = df_snoring_test['obs'].rolling(window=2).std()
df_snoring_test['rolling_std_2_rolling_mean_10'] = df_snoring_test['rolling_std_2'].rolling(window=10).mean()
df_snoring_test['diff_5_abs'] = abs(df_snoring_test['obs'].diff(5))
df_snoring_test['diff_5_abs_centred'] = abs(df_snoring_test['diff_5_abs'] - df_snoring_test['diff_5_abs'].mean())
df_snoring_test['diff_5_abs_rolling_mean'] = df_snoring_test['diff_5_abs'].rolling(window=10).sum()
df_snoring_test['autocorr_10'] = df_snoring_test['obs'].rolling(window=10).apply(lambda x: x.autocorr(), raw=False)
df_snoring_test['autocorr_5'] = df_snoring_test['obs'].rolling(window=5).apply(lambda x: x.autocorr(), raw=False)
df_snoring_test['autocorr_5_plus_diff'] = df_snoring_test['autocorr_5'] / df_snoring_test['diff_5_abs']
df_snoring_test['diff_mean_rolling'] = df_snoring_test['diff_mean'].rolling(window=obs_per_sec//10).mean()
df_snoring_test['snoring'] = 0
for index, row in df_snoring_test.iterrows():
df_snoring_test.loc[index, 'snoring'] = np.where(
df_snoring_test.loc[index:index+249, 'rolling_std_2'].mean() > 0.75, 1, 0)
import statistics
from statistics import StatisticsError
snoring_detected = pd.Series()
timestep = 25
for sec in range(0, df_snoring_test.shape[0], timestep):
try:
verdict = bool(np.where(statistics.mode(df_snoring_test.iloc[sec:sec+timestep]['snoring']) == 1, True, False))
except StatisticsError:
verdict = True
snoring_detected[str(sec)+":"+str(sec+timestep)] = verdict
dd.autoreg_std.mean()
def autocorr(x):
result = np.correlate(x, x, mode='full')
return result[result.size//2:]
result = autocorr(aa.obs)
np.polyfit(cc.index, cc.autoreg_std_rolling_mean_abs, 1)
from scipy import stats
stats.signaltonoise(aa)
import statsmodels.api as sm
decomposition = sm.tsa.filters.filtertools.recursive_filter(aa.obs)
decomposition = sm.tsa.seasonal_decompose(aa.obs, model='additive')
from matplotlib.pyplot import figure
figure(num=None, figsize=(20, 6))
plt.plot(df1.iloc[:5000]['obs'])
from scipy.fftpack import fft
sample_rate = 250
N = 960 * sample_rate
time = np.linspace(0, 2, N)
y = fft(cc.obs)
y = 2/N * np.abs (freq_data [0:np.int (N/2)])
cc['fft'] = y
f, t, Sxx = scipy.signal.spectrogram(cc.obs, 1)
plt.pcolormesh(t, f, Sxx)
plt.ylabel('Frequency [Hz]')
plt.xlabel('Time [sec]')
plt.show()
import plotly.plotly as py
import plotly.graph_objs as go
amplitude = np.absolute(np.fft.fft(cc.obs))[1:]
amplitude = amplitude[0:(int(len(amplitude)/2))]
frequency = np.linspace(0,10000, len(amplitude))
plt.plot(frequency, amplitude)
trace = go.Scatter(x = frequency, y = amplitude)
data = [trace]
layout = go.Layout(title="Frequency vs Amplitude after FFT",
xaxis=dict(title='Frequency'),
yaxis=dict(title='Amplitude'))
fig = go.Figure(data=data, layout=layout)
py.iplot(fig)
|
import json
import time
from rest_framework import status
import api
from api.models import House, Note
__author__ = 'schien'
from rest_framework.test import APITestCase, APIRequestFactory, force_authenticate
from django.core.urlresolvers import reverse
from api.tests.constants import house_serializer_id_field
from TestMixins import OAuthTestCaseMixin
class NotesAPITests(OAuthTestCaseMixin, APITestCase):
"""
API for submitting Notes
"""
fixtures = ['test_data.json']
def test_bulk_notes(self):
user = self.setCredentialsForAnyUser()
url = reverse('app_note')
timestamp = 1378073890220
data = [{house_serializer_id_field: 1, 'text': 'Test Text', 'timestamp': timestamp},
{house_serializer_id_field: 3, 'text': 'Test Text 3', 'timestamp': timestamp + 1}]
response = self.client.post(url, data, format='json')
self.assertTrue(response.status_code == status.HTTP_201_CREATED)
self.assertTrue(len(user.notes.all()) == 2)
def test_no_duplicate_bulk_notes(self):
user = self.setCredentialsForAnyUser()
url = reverse('app_note')
timestamp = 1378073890220
data = [{house_serializer_id_field: 1, 'text': 'Test Text', 'timestamp': timestamp},
{house_serializer_id_field: 1, 'text': 'Test Text', 'timestamp': timestamp}]
response = self.client.post(url, data, format='json')
self.assertTrue(response.status_code == status.HTTP_201_CREATED)
self.assertTrue(len(user.notes.all()) == 1)
def test_update_note(self):
user = self.setCredentialsForAnyUser()
url = reverse('app_note')
house_1_id = 1
house_3_id = 3
timestamp = 1378073890220
data = [{house_serializer_id_field: house_1_id, 'text': 'Test Text', 'timestamp': timestamp},
{house_serializer_id_field: house_3_id, 'text': 'Test Text 3', 'timestamp': timestamp + 1}]
response = self.client.post(url, data, format='json')
self.assertTrue(len(user.notes.all()) == 2)
house_1_updated_text = 'Updated text'
house_3_updated_text = 'update 2'
data = [{house_serializer_id_field: house_1_id, 'text': house_1_updated_text, 'timestamp': timestamp + 2},
{house_serializer_id_field: house_3_id, 'text': house_3_updated_text, 'timestamp': timestamp + 3}]
response = self.client.post(url, data, format='json')
self.assertTrue(response.status_code == status.HTTP_201_CREATED)
self.assertTrue(len(user.notes.all()) == 2)
self.assertTrue(House.objects.get(pk=house_1_id).note.all()[0].text == house_1_updated_text)
def getNote(self, user, scan):
factory = APIRequestFactory()
request = factory.get(reverse('note_detail', args=[scan]))
force_authenticate(request, user=user)
view = api.views.BulkNoteList.as_view()
response = view(request)
response.render()
return response
def createNote(self, user, text, house=None):
note = Note(text=text, user=user, timestamp=time.time(), house=house)
note.save()
return note
def test_get_note(self):
user = self.setCredentialsForAnyUser()
self.assertTrue(len(user.notes.all()) == 0)
noteText = "0001"
house = House.objects.all()[0]
note = self.createNote(user, noteText, house)
response = self.getNote(user, note.pk)
self.assertEqual(response.status_code, 200)
self.assertTrue(json.loads(response.content)[0]['text'] == noteText)
self.assertTrue(len(user.notes.all()) == 1) |
# BSD LICENSE
#
# Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import os
import sys
import inspect
import re
from settings import LOG_NAME_SEP, FOLDERS
from utils import RED
"""
DTS logger module with several log level. DTS framwork and TestSuite log
will saved into different log files.
"""
verbose = False
logging.DTS_DUT_CMD = logging.INFO + 1
logging.DTS_DUT_OUTPUT = logging.DEBUG + 1
logging.DTS_DUT_RESULT = logging.WARNING + 1
logging.DTS_TESTER_CMD = logging.INFO + 2
logging.DTS_TESTER_OUTPUT = logging.DEBUG + 2
logging.DTS_TESTER_RESULT = logging.WARNING + 2
logging.SUITE_DUT_CMD = logging.INFO + 3
logging.SUITE_DUT_OUTPUT = logging.DEBUG + 3
logging.SUITE_TESTER_CMD = logging.INFO + 4
logging.SUITE_TESTER_OUTPUT = logging.DEBUG + 4
logging.DTS_IXIA_CMD = logging.INFO + 5
logging.DTS_IXIA_OUTPUT = logging.DEBUG + 5
logging.DTS_VIRTDUT_CMD = logging.INFO + 6
logging.DTS_VIRTDUT_OUTPUT = logging.DEBUG + 6
logging.addLevelName(logging.DTS_DUT_CMD, 'DTS_DUT_CMD')
logging.addLevelName(logging.DTS_DUT_OUTPUT, 'DTS_DUT_OUTPUT')
logging.addLevelName(logging.DTS_DUT_RESULT, 'DTS_DUT_RESULT')
logging.addLevelName(logging.DTS_TESTER_CMD, 'DTS_TESTER_CMD')
logging.addLevelName(logging.DTS_TESTER_OUTPUT, 'DTS_TESTER_OUTPUT')
logging.addLevelName(logging.DTS_TESTER_RESULT, 'DTS_TESTER_RESULT')
logging.addLevelName(logging.DTS_IXIA_CMD, 'DTS_IXIA_CMD')
logging.addLevelName(logging.DTS_IXIA_OUTPUT, 'DTS_IXIA_OUTPUT')
logging.addLevelName(logging.DTS_VIRTDUT_CMD, 'VIRTDUT_CMD')
logging.addLevelName(logging.DTS_VIRTDUT_OUTPUT, 'VIRTDUT_OUTPUT')
logging.addLevelName(logging.SUITE_DUT_CMD, 'SUITE_DUT_CMD')
logging.addLevelName(logging.SUITE_DUT_OUTPUT, 'SUITE_DUT_OUTPUT')
logging.addLevelName(logging.SUITE_TESTER_CMD, 'SUITE_TESTER_CMD')
logging.addLevelName(logging.SUITE_TESTER_OUTPUT, 'SUITE_TESTER_OUTPUT')
logging.addLevelName(logging.DTS_IXIA_CMD, 'DTS_IXIA_CMD')
logging.addLevelName(logging.DTS_IXIA_OUTPUT, 'DTS_IXIA_OUTPUT')
message_fmt = '%(asctime)s %(levelname)20s: %(message)s'
date_fmt = '%d/%m/%Y %H:%M:%S'
RESET_COLOR = '\033[0m'
stream_fmt = '%(color)s%(levelname)20s: %(message)s' + RESET_COLOR
log_dir = None
def set_verbose():
global verbose
verbose = True
def add_salt(salt, msg):
if not salt:
return msg
else:
return '[%s] ' % salt + str(msg)
class BaseLoggerAdapter(logging.LoggerAdapter):
"""
Upper layer of original logging module.
"""
def dts_dut_cmd(self, msg, *args, **kwargs):
self.log(logging.DTS_DUT_CMD, msg, *args, **kwargs)
def dts_dut_output(self, msg, *args, **kwargs):
self.log(logging.DTS_DUT_OUTPUT, msg, *args, **kwargs)
def dts_dut_result(self, msg, *args, **kwargs):
self.log(logging.DTS_DUT_RESULT, msg, *args, **kwargs)
def dts_tester_cmd(self, msg, *args, **kwargs):
self.log(logging.DTS_TESTER_CMD, msg, *args, **kwargs)
def dts_tester_output(self, msg, *args, **kwargs):
self.log(logging.DTS_TESTER_CMD, msg, *args, **kwargs)
def dts_tester_result(self, msg, *args, **kwargs):
self.log(logging.DTS_TESTER_RESULT, msg, *args, **kwargs)
def suite_dut_cmd(self, msg, *args, **kwargs):
self.log(logging.SUITE_DUT_CMD, msg, *args, **kwargs)
def suite_dut_output(self, msg, *args, **kwargs):
self.log(logging.SUITE_DUT_OUTPUT, msg, *args, **kwargs)
def suite_tester_cmd(self, msg, *args, **kwargs):
self.log(logging.SUITE_TESTER_CMD, msg, *args, **kwargs)
def suite_tester_output(self, msg, *args, **kwargs):
self.log(logging.SUITE_TESTER_OUTPUT, msg, *args, **kwargs)
def dts_ixia_cmd(self, msg, *args, **kwargs):
self.log(logging.DTS_IXIA_CMD, msg, *args, **kwargs)
def dts_ixia_output(self, msg, *args, **kwargs):
self.log(logging.DTS_IXIA_OUTPUT, msg, *args, **kwargs)
def dts_virtdut_cmd(self, msg, *args, **kwargs):
self.log(logging.DTS_VIRTDUT_CMD, msg, *args, **kwargs)
def dts_virtdut_output(self, msg, *args, **kwargs):
self.log(logging.DTS_VIRTDUT_OUTPUT, msg, *args, **kwargs)
class ColorHandler(logging.StreamHandler):
"""
Color of DTS log format.
"""
LEVEL_COLORS = {
logging.DEBUG: '', # SYSTEM
logging.DTS_DUT_OUTPUT: '\033[00;37m', # WHITE
logging.DTS_TESTER_OUTPUT: '\033[00;37m', # WHITE
logging.SUITE_DUT_OUTPUT: '\033[00;37m', # WHITE
logging.SUITE_TESTER_OUTPUT: '\033[00;37m', # WHITE
logging.INFO: '\033[00;36m', # CYAN
logging.DTS_DUT_CMD: '', # SYSTEM
logging.DTS_TESTER_CMD: '', # SYSTEM
logging.SUITE_DUT_CMD: '', # SYSTEM
logging.SUITE_TESTER_CMD: '', # SYSTEM
logging.DTS_IXIA_CMD: '', # SYSTEM
logging.DTS_IXIA_OUTPUT: '', # SYSTEM
logging.DTS_VIRTDUT_CMD: '', # SYSTEM
logging.DTS_VIRTDUT_OUTPUT: '', # SYSTEM
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.DTS_DUT_RESULT: '\033[01;34m', # BOLD BLUE
logging.DTS_TESTER_RESULT: '\033[01;34m', # BOLD BLUE
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.__dict__['color'] = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DTSLOG(BaseLoggerAdapter):
"""
DTS log class for framework and testsuite.
"""
def __init__(self, logger, crb="suite"):
global log_dir
filename = inspect.stack()[1][1][:-3]
self.name = filename.split('/')[-1]
self.error_lvl = logging.ERROR
self.warn_lvl = logging.WARNING
self.info_lvl = logging.INFO
self.debug_lvl = logging.DEBUG
if log_dir is None:
self.log_path = os.getcwd() + "/../" + FOLDERS['Output']
else:
self.log_path = log_dir # log dir should contain tag/crb global value and mod in dts
self.dts_log = "dts.log"
self.logger = logger
self.logger.setLevel(logging.DEBUG)
self.crb = crb
super(DTSLOG, self).__init__(self.logger, dict(crb=self.crb))
self.salt = ''
self.fh = None
self.ch = None
# add default log file
fh = logging.FileHandler(self.log_path + "/" + self.dts_log)
ch = ColorHandler()
self.__log_handler(fh, ch)
def __log_handler(self, fh, ch):
"""
Config stream handler and file handler.
"""
fh.setFormatter(logging.Formatter(message_fmt, date_fmt))
ch.setFormatter(logging.Formatter(stream_fmt, date_fmt))
fh.setLevel(logging.DEBUG) # file hander default level
global verbose
if verbose is True:
ch.setLevel(logging.DEBUG)
else:
ch.setLevel(logging.INFO) # console handler default level
self.logger.addHandler(fh)
self.logger.addHandler(ch)
if self.fh is not None:
self.logger.removeHandler(self.fh)
if self.ch is not None:
self.logger.removeHandler(self.ch)
self.fh = fh
self.ch = ch
def warning(self, message):
"""
DTS warnning level log function.
"""
message = add_salt(self.salt, message)
self.logger.log(self.warn_lvl, message)
def info(self, message):
"""
DTS information level log function.
"""
message = add_salt(self.salt, message)
self.logger.log(self.info_lvl, message)
def error(self, message):
"""
DTS error level log function.
"""
message = add_salt(self.salt, message)
self.logger.log(self.error_lvl, message)
def debug(self, message):
"""
DTS debug level log function.
"""
message = add_salt(self.salt, message)
self.logger.log(self.debug_lvl, message)
def set_logfile_path(self, path):
"""
Configure the log file path.
"""
self.log_path = path
def set_stream_level(self, lvl):
"""
Configure the stream level, logger level >= stream level will be
output on the screen.
"""
self.ch.setLevel(lvl)
def set_logfile_level(self, lvl):
"""
Configure the file handler level, logger level >= logfile level will
be saved into log file.
"""
self.fh.setLevel(lvl)
def config_execution(self, crb):
"""
Reconfigure stream&logfile level and reset info,debug,warn level.
"""
log_file = self.log_path + '/' + self.dts_log
fh = logging.FileHandler(log_file)
ch = ColorHandler()
self.__log_handler(fh, ch)
def set_salt(crb, start_flag):
if LOG_NAME_SEP in crb:
old = '%s%s' % (start_flag, LOG_NAME_SEP)
if not self.salt:
self.salt = crb.replace(old, '', 1)
if crb.startswith('dut'):
self.info_lvl = logging.DTS_DUT_CMD
self.debug_lvl = logging.DTS_DUT_OUTPUT
self.warn_lvl = logging.DTS_DUT_RESULT
set_salt(crb, 'dut')
elif crb.startswith('tester'):
self.info_lvl = logging.DTS_TESTER_CMD
self.debug_lvl = logging.DTS_TESTER_OUTPUT
self.warn_lvl = logging.DTS_TESTER_RESULT
set_salt(crb, 'tester')
elif crb.startswith('ixia'):
self.info_lvl = logging.DTS_IXIA_CMD
self.debug_lvl = logging.DTS_IXIA_OUTPUT
set_salt(crb, 'ixia')
elif crb.startswith('virtdut'):
self.info_lvl = logging.DTS_VIRTDUT_CMD
self.debug_lvl = logging.DTS_VIRTDUT_OUTPUT
set_salt(crb, 'virtdut')
else:
self.error_lvl = logging.ERROR
self.warn_lvl = logging.WARNING
self.info_lvl = logging.INFO
self.debug_lvl = logging.DEBUG
def config_suite(self, suitename, crb=None):
"""
Reconfigure stream&logfile level and reset info,debug level.
"""
log_file = self.log_path + '/' + suitename + '.log'
fh = logging.FileHandler(log_file)
ch = ColorHandler()
# exit first
self.logger_exit()
# then add handler
self.__log_handler(fh, ch)
if crb == 'dut':
self.info_lvl = logging.SUITE_DUT_CMD
self.debug_lvl = logging.SUITE_DUT_OUTPUT
elif crb == 'tester':
self.info_lvl = logging.SUITE_TESTER_CMD
self.debug_lvl = logging.SUITE_TESTER_OUTPUT
elif crb == 'ixia':
self.info_lvl = logging.DTS_IXIA_CMD
self.debug_lvl = logging.DTS_IXIA_OUTPUT
elif crb == 'virtdut':
self.info_lvl = logging.DTS_VIRTDUT_CMD
self.debug_lvl = logging.DTS_VIRTDUT_OUTPUT
def logger_exit(self):
"""
Remove stream handler and logfile handler.
"""
if self.fh is not None:
self.logger.removeHandler(self.fh)
if self.ch is not None:
self.logger.removeHandler(self.ch)
def getLogger(name, crb="suite"):
"""
Get logger handler and if there's no handler for specified CRB will create one.
"""
logger = DTSLOG(logging.getLogger(name), crb)
return logger
_TESTSUITE_NAME_FORMAT_PATTERN = r'TEST SUITE : (.*)'
_TESTSUITE_ENDED_FORMAT_PATTERN = r'TEST SUITE ENDED: (.*)'
_TESTCASE_NAME_FORMAT_PATTERN = r'Test Case (.*) Begin'
_TESTCASE_RESULT_FORMAT_PATTERN = r'Test Case (.*) Result (.*):'
class LogParser(object):
"""
Module for parsing saved log file, will implement later.
"""
def __init__(self, log_path):
self.log_path = log_path
try:
self.log_handler = open(self.log_path, 'r')
except:
print RED("Failed to logfile %s" % log_path)
return None
self.suite_pattern = re.compile(_TESTSUITE_NAME_FORMAT_PATTERN)
self.end_pattern = re.compile(_TESTSUITE_ENDED_FORMAT_PATTERN)
self.case_pattern = re.compile(_TESTCASE_NAME_FORMAT_PATTERN)
self.result_pattern = re.compile(_TESTCASE_RESULT_FORMAT_PATTERN)
self.loglist = self.parse_logfile()
self.log_handler.close()
def locate_suite(self, suite_name=None):
begin = 0
end = len(self.loglist)
for line in self.loglist:
m = self.suite_pattern.match(line.values()[0])
if m:
if suite_name is None:
begin = self.loglist.index(line)
elif suite_name == m.group(1):
begin = self.loglist.index(line)
for line in self.loglist[begin:]:
m = self.end_pattern.match(line.values()[0])
if m:
if suite_name is None:
end = self.loglist.index(line)
elif suite_name == m.group(1):
end = self.loglist.index(line)
return self.loglist[begin:end + 1]
def locate_case(self, case_name=None):
begin = 0
end = len(self.loglist)
for line in self.loglist:
# only handle case log
m = self.case_pattern.match(line.values()[0])
if m:
# not determine case will start from begining
if case_name is None:
begin = self.loglist.index(line)
# start from the determined case
elif case_name == m.group(1):
begin = self.loglist.index(line)
for line in self.loglist[begin:]:
m = self.result_pattern.match(line.values()[0])
if m:
# not determine case will stop to the end
if case_name is None:
end = self.loglist.index(line)
# stop to the determined case
elif case_name == m.group(1):
end = self.loglist.index(line)
return self.loglist[begin:end + 1]
def __dict_log(self, lvl_name, msg):
tmp = {}
if lvl_name is not '':
tmp[lvl_name] = msg
return tmp
def parse_logfile(self):
loglist = []
out_type = 'DTS_DUT_OUTPUT'
for line in self.log_handler:
tmp = {}
line = line.replace('\n', '')
line = line.replace('^M', '')
m = re.match("(\d{2}/\d{2}/\d{4}) (\d{2}:\d{2}:\d{2}) (.{20}): (.*)", line)
if m:
lvl_name = m.group(3).strip()
tmp = self.__dict_log(lvl_name, m.group(4))
if "OUTPUT" in lvl_name:
out_type = lvl_name
else:
tmp[out_type] = line
loglist.append(tmp)
return loglist
|
from typing import List
import pandas
from pathlib import Path
from xfeat import SelectCategorical, ConcatCombination, LabelEncoder, Pipeline, SelectNumerical
def read_df(root_dir:str, path:str, categorical_cols:List[str], exclude_cols:List[str]=["imp_time"]):
df = pandas.read_csv(Path(root_dir, path))
df = df.astype({c:str for c in categorical_cols})
encoder = Pipeline([
SelectCategorical(exclude_cols=exclude_cols),
LabelEncoder(output_suffix=""),
])
df_encoded = pandas.concat([SelectNumerical().fit_transform(df), encoder.fit_transform(df)], axis=1)
return df_encoded |
# coding: utf-8
from __future__ import absolute_import, division, print_function
import logging
class Message(object):
token = ""
team_id = ""
channel_id = ""
channel_name = ""
timestamp = 0
user_id = ""
user_name = ""
# raw text
text = ""
# parsed text
args = []
command = ""
trigger_word = ""
@classmethod
def parse(cls, request):
params = request.form
msg = cls()
try:
msg.team_id = params["team_id"]
msg.channel_id = params["channel_id"]
msg.channel_name = params["channel_name"]
msg.timestamp = params["timestamp"]
msg.user_id = params["user_id"]
msg.user_name = params["user_name"]
msg.text = params["text"]
msg.trigger_word = params["trigger_word"]
msg.args = msg.text.split()
logging.debug(msg.args)
if len(msg.args) >= 2:
msg.command = msg.args[1]
except Exception, e:
logging.error(e)
return msg
def __str__(self):
res = self.__class__.__name__
res += "@{0.token}[channel={0.channel_name}, user={0.user_name}, text={0.text}]".format(self)
return res
|
class Solution(object):
def findDuplicate(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
def countLess(nums, x):
ret = 0
for num in nums:
if num <= x:
ret += 1
return ret
length = len(nums)
n = length - 1
i, j = 1, n
while i < j:
mid = (i + j) / 2
x = countLess(nums, mid)
if x <= mid:
i = mid + 1
else:
j = mid
return i
def main():
s = Solution()
print s.findDuplicate([1, 3, 4, 2, 2])
print s.findDuplicate([1, 3, 3, 4, 2])
print s.findDuplicate([1, 3, 5, 4, 2, 5])
if __name__ == "__main__":
main()
|
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def reorderList(self, head):
"""
:type head: ListNode
:rtype: void Do not return anything, modify head in-place instead.
"""
if head is None or head.next is None:
return
i, j = head, head
while True:
i = i.next
j = j.next.next
if j is None or j.next is None:
break
stack = list()
p = i.next
while p is not None:
stack.append(p)
p = p.next
i.next = None
p = head
while len(stack) > 0:
node = stack.pop()
node.next = p.next
p.next = node
p = node.next
return
|
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 10 07:46:43 2018
@author: jhodges
"""
import h5py
import numpy as np
import glob
import skimage.measure as skme
import matplotlib.pyplot as plt
def writeConstH5(name,elevImg,canopyImg,canopyHeightImg,canopyBaseHeightImg,fuelImg):
hf = h5py.File(name,'w')
hf.create_dataset('elevation',data=elevImg,compression="gzip",compression_opts=9)
hf.create_dataset('canopyCover',data=canopyImg,compression="gzip",compression_opts=9)
hf.create_dataset('canopyHeight',data=canopyHeightImg,compression="gzip",compression_opts=9)
hf.create_dataset('canopyBaseHeight',data=canopyBaseHeightImg,compression="gzip",compression_opts=9) # or /canopyHeightImg
hf.create_dataset('fuelModel',data=fuelImg,compression="gzip",compression_opts=9)
hf.close()
def writeSpecH5(specName,pointData,inputBurnmap,outputBurnmap,constsName):
hf = h5py.File(specName,'w')
hf.create_dataset('pointData',data=pointData,compression="gzip",compression_opts=9)
hf.create_dataset('inputBurnmap',data=inputBurnmap,compression="gzip",compression_opts=9)
hf.create_dataset('outputBurnmap',data=outputBurnmap,compression="gzip",compression_opts=9)
hf.create_dataset('constsName',data=bytes(constsName,'utf-8'),dtype=h5py.special_dtype(vlen=bytes))
hf.close()
def readSpecH5(specName):
hf = h5py.File(specName,'r')
pointData = hf.get('pointData')
inputBurnmap = np.array(hf.get('inputBurnmap'),dtype=np.float)
outputBurnmap = np.array(hf.get('outputBurnmap'),dtype=np.float)
constsName = hf.get('constsName').value.decode('utf-8')
[windX,windY,lhm,lwm,m1h,m10h,m100h] = pointData
hf.close()
#print(specName.split('run_')[0]+"..//"+constsName)
hf = h5py.File(specName.split('run_')[0]+"..//"+constsName,'r')
elev = np.array(hf.get('elevation'),dtype=np.float)
canopyCover = np.array(hf.get('canopyCover'),dtype=np.float)
canopyHeight = np.array(hf.get('canopyHeight'),dtype=np.float)
canopyBaseHeight = np.array(hf.get('canopyBaseHeight'),dtype=np.float)
fuelModel = np.array(hf.get('fuelModel'),dtype=np.float)
data = np.zeros((elev.shape[0],elev.shape[1],13))
data[:,:,0] = inputBurnmap
data[:,:,1] = elev
data[:,:,2] = windX
data[:,:,3] = windY
data[:,:,4] = lhm
data[:,:,5] = lwm
data[:,:,6] = m1h
data[:,:,7] = m10h
data[:,:,8] = m100h
data[:,:,9] = canopyCover
data[:,:,10] = canopyHeight
data[:,:,11] = canopyBaseHeight
data[:,:,12] = fuelModel
return data, outputBurnmap, constsName
def downsampleData(data,outputBurnmap):
#data = data[::33,::33,:]
#data = data[:-1,:-1,:]
data2 = np.zeros((50,50,13))
for j in range(0,data.shape[2]):
data2[:,:,j] = skme.block_reduce(data[:,:,j],(33,33),np.median)[:50,:50]
outputBurnmap = skme.block_reduce(outputBurnmap,(33,33),np.median)[:50,:50]
#outputBurnmap = outputBurnmap[::33,::33]
#outputBurnmap = outputBurnmap[:-1,:-1]
return data2, outputBurnmap
def extractConsts(data):
elev = data[:,:,1]
canopyImg = data[:,:,9]
canopyHeightImg = data[:,:,10]
canopyBaseHeightImg = data[:,:,11]
fuelImg = data[:,:,12]
return elev, canopyImg, canopyHeightImg, canopyBaseHeightImg, fuelImg
def extractPointData(data):
(windX,windY) = (data[0,0,2],data[0,0,3])
(lhm,lwm) = (data[0,0,4],data[0,0,5])
(m1h,m10h,m100h) = (data[0,0,6],data[0,0,7],data[0,0,8])
pointData = [windX,windY,lhm,lwm,m1h,m10h,m100h]
return pointData
if __name__ == "__main__":
inDir = "E:\\projects\\wildfire-research\\farsite\\results\\train\\"
outDir = inDir+"lowres_2\\"
files = glob.glob(inDir+"run*.h5")
for file in files:
#file = files[10]
data, outputBurnmap, constsName = readSpecH5(file)
data, outputBurnmap = downsampleData(data,outputBurnmap)
elev, canopy, canopyHeight, canopyBaseHeight, fuel = extractConsts(data)
pointData = extractPointData(data)
inputBurnmap = data[:,:,0]
specName = outDir+file.split('\\')[-1]
writeConstH5(outDir+constsName,elev,canopy,canopyHeight,canopyBaseHeight,fuel)
writeSpecH5(specName,pointData,inputBurnmap,outputBurnmap,constsName)
pass |
import os
print("path = ",os.getcwd())
files = []
for x in os.listdir():
if ('.zip' in x):
files.append(x)
count = len(files)-1
if count < 10 :
os.rename(os.getcwd()+"\\"+files[-1],os.getcwd()+"\\"+str(count).zfill(2)+'-'+files[-1])
else:
files[-1] = str(count)+'-'+files[-1]
os.rename(os.getcwd()+"\\"+files[-1],os.getcwd()+"\\"+str(count)+'-'+files[-1])
for x in os.listdir():
if ('.zip' in x):
files.append(x)
print("zip list :",)
print(*files, sep = ", ") |
import click
from OpenSSL import crypto
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.serialization import Encoding, PrivateFormat, NoEncryption
from consul_kube.lib.color import debug, info, color_assert
from consul_kube.lib.kube import ConsulApiClient
from consul_kube.lib.x509 import compare_certs, save_cert, save_key
def generate_ecdsa_key() -> crypto.PKey:
debug('Generating EC params')
key = ec.generate_private_key(ec.SECP256R1(), default_backend())
key_pem = key.private_bytes(encoding=Encoding.PEM, format=PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=NoEncryption())
return crypto.load_privatekey(crypto.FILETYPE_PEM, key_pem)
def generate_ca_root(serial_number: int, trust_domain: str, public_key: crypto.PKey) -> crypto.X509:
debug('Generating self-signed CA certificate')
cert = crypto.X509()
cert.set_version(2)
cert.set_serial_number(serial_number)
cert.set_pubkey(public_key)
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(10 * 365 * 86400)
setattr(cert.get_subject(), 'CN', f'Consul CA {serial_number}')
cert.set_issuer(cert.get_subject())
spiffe_uri = f'URI:spiffe://{trust_domain}'.encode('utf-8')
cert.add_extensions([
crypto.X509Extension(b'basicConstraints', True, b'CA:TRUE'),
crypto.X509Extension(b'keyUsage', True, b'digitalSignature, cRLSign, keyCertSign'),
crypto.X509Extension(b'subjectAltName', False, spiffe_uri)
])
cert.add_extensions([crypto.X509Extension(b'subjectKeyIdentifier', False, b'hash', subject=cert)])
cert.add_extensions([crypto.X509Extension(b'authorityKeyIdentifier', False, b'keyid:always,issuer', issuer=cert)])
# noinspection PyTypeChecker
cert.sign(public_key, "sha256")
return cert
# noinspection PyUnusedLocal
def rotate_command(ctx: click.Context) -> None: # pylint: disable=W0613
debug('Looking up existing CA serial number')
consul_api = ConsulApiClient()
root_cert, api_response = consul_api.active_ca_root_cert
trust_domain = api_response['TrustDomain']
old_serial = root_cert.get_serial_number()
info(f'Current CA serial number is {old_serial}')
key = generate_ecdsa_key()
cert = generate_ca_root(old_serial + 1, trust_domain, key)
save_cert('new_root.crt', [cert])
save_key('new_root.key', key)
debug('Sending new CA cert to Consul')
result_body, response_code, _ = consul_api.update_config(key, cert)
color_assert(response_code == 200, f'Unexpected HTTP return code from server: {response_code}({result_body})',
'Consul responded with 200 OK')
debug('Confirming new CA cert with Consul')
new_root, _ = consul_api.active_ca_root_cert
color_assert(compare_certs(new_root, cert),
'Cert returned by Consul does not match what we just uploaded',
'Cert returned by Consul matches our new cert')
info(f'New root CA with serial {new_root.get_serial_number()} is now live')
|
"""Tools for managing evaluation contexts. """
from sympy.utilities.iterables import dict_merge
from sympy.polys.polyutils import PicklableWithSlots
__known_options__ = set(['frac', 'gens', 'wrt', 'sort', 'order', 'domain',
'modulus', 'gaussian', 'extension', 'field', 'greedy', 'symmetric'])
__global_options__ = []
__template__ = """\
def %(option)s(_%(option)s):
return Context(%(option)s=_%(option)s)
"""
for option in __known_options__:
exec __template__ % { 'option': option }
class Context(PicklableWithSlots):
__slots__ = ['__options__']
def __init__(self, dict=None, **options):
if dict is not None:
self.__options__ = dict_merge(dict, options)
else:
self.__options__ = options
def __getattribute__(self, name):
if name in __known_options__:
try:
return object.__getattribute__(self, '__options__')[name]
except KeyError:
return None
else:
return object.__getattribute__(self, name)
def __str__(self):
return 'Context(%s)' % ', '.join(
[ '%s=%r' % (key, value) for key, value in self.__options__.iteritems() ])
def __and__(self, other):
if isinstance(other, Context):
return Context(**dict_merge(self.__options__, other.__options__))
else:
raise TypeError("a context manager expected, got %s" % other)
def __enter__(self):
raise NotImplementedError('global context')
def __exit__(self, exc_type, exc_val, exc_tb):
raise NotImplementedError('global context')
def register_context(func):
def wrapper(self, *args, **kwargs):
return func(*args, **dict_merge(self.__options__, kwargs))
wrapper.__doc__ = func.__doc__
wrapper.__name__ = func.__name__
setattr(Context, func.__name__, wrapper)
return func
|
from django.apps import AppConfig
from django.conf import settings
import tensorflow as tf
import os
class LstmConfig(AppConfig):
name = 'lstm'
model = tf.keras.models.Sequential([
tf.keras.layers.Lambda(lambda x: x * 0.1,
input_shape=[None,6]),
tf.keras.layers.Conv1D(filters=32, kernel_size=5,
strides=1, padding="causal",
activation="relu"),
tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(64,activation='relu',return_sequences=True,input_shape=[None])),
tf.keras.layers.Dropout(0.3),
tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(64,activation='relu', return_sequences=True)),
tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(32,activation='relu')),
tf.keras.layers.Dropout(0.1),
tf.keras.layers.Dense(16,activation='relu'),
tf.keras.layers.Dense(1),
tf.keras.layers.Lambda(lambda x: x * 10)
])
optimizer = tf.keras.optimizers.Adam(learning_rate=0.00034)
model.compile(loss=tf.keras.losses.Huber(), optimizer=optimizer,metrics=[tf.keras.metrics.RootMeanSquaredError()])
model_h5 = os.path.join(settings.STATICFILES_DIR, 'model.h5')
# load weights into new model
model.load_weights(model_h5)
|
BIG_ENDIAN = ">"
LITTLE_ENDIAN = "<"
|
"""
msort is the library backing the mediasort.py command line tool
"""
__version__ = "2.0"
class MSortError(Exception):
"""
Generic msort based error
"""
pass
|
#!/usr/bin/python
import sys
from os.path import join,exists,dirname
import random
import numpy as np
from numpy.random import randint, choice
from sklearn.datasets import load_svmlight_file
from torch.autograd import Function, Variable
import torch.nn as nn
import torch.optim as optim
import torch
from torch import FloatTensor
from uda_common import read_feature_groups, read_feature_lookup
# the concepts here come from: https://github.com/fungtion/DANN/blob/master/models/model.py
class ReverseLayerF(Function):
@staticmethod
def forward(ctx, x, alpha):
ctx.alpha = alpha
return x.view_as(x)
@staticmethod
def backward(ctx, grad_output):
# Totally random:
# output = Variable(torch.randn(grad_output.shape).cuda()) + grad_output * 0 # grad_output.neg() * ctx.alpha
# zero (ignores domain)
# output = 0 * grad_output
# reversed (default)
output = grad_output.neg() * ctx.alpha
# print("Input grad is %s, output grad is %s" % (grad_output.data.cpu().numpy()[:10], output.data.cpu().numpy()[:10]))
return output, None
# Instead of this, may be able to just regularize by forcing off-diagonal to zero
# didn't work bc/ of memory issues
class StraightThroughLayer(nn.Module):
def __init__(self, input_features):
super(StraightThroughLayer, self).__init__()
self.vector = nn.Parameter( torch.randn(1, input_features) )
#self.add_module('pass-through vector', self.vector)
def forward(self, input_data):
# output = input_data * self.vector
output = torch.mul(input_data, self.vector)
return output
class PivotLearnerModel(nn.Module):
def __init__(self, input_features):
super(PivotLearnerModel, self).__init__()
# Feature takes you from input to the "representation"
# self.feature = nn.Sequential()
# straight through layer just does an element-wise product with a weight vector
num_features = input_features
# num_features = 200
# self.vector = nn.Parameter( torch.randn(1, input_features) )
self.feature = nn.Sequential()
self.feature.add_module('input_layer', StraightThroughLayer(input_features))
# self.feature.add_module('feature_layer', nn.Linear(input_features, num_features))
self.feature.add_module('relu', nn.ReLU(True))
# Standard feed forward layer:
# num_features = 200
# self.feature.add_module('input_layer', nn.Linear(input_features, num_features))
# self.feature.add_module('relu', nn.ReLU(True))
# task_classifier maps from a feature representation to a task prediction
self.task_classifier = nn.Sequential()
self.task_classifier.add_module('task_binary', nn.Linear(num_features, 1))
self.task_classifier.add_module('task_sigmoid', nn.Sigmoid())
# domain classifier maps from a feature representation to a domain prediction
self.domain_classifier = nn.Sequential()
# hidden_nodes = 100
# self.domain_classifier.add_module('domain_hidden', nn.Linear(num_features, hidden_nodes, bias=False))
# self.domain_classifier.add_module('relu', nn.ReLU(True))
self.domain_classifier.add_module('domain_classifier', nn.Linear(num_features, 1, bias=False))
# # self.domain_classifier.add_module('domain_predict', nn.Linear(100, 1))
self.domain_classifier.add_module('domain_sigmoid', nn.Sigmoid())
# self.domain_classifier2 = nn.Sequential()
# self.domain_classifier2.add_module('domain_linear', nn.Linear(num_features, 1, bias=False))
# # # self.domain_classifier.add_module('domain_predict', nn.Linear(100, 1))
# self.domain_classifier2.add_module('domain_sigmoid', nn.Sigmoid())
def forward(self, input_data, alpha):
feature = self.feature(input_data)
# feature = input_data * self.vector
task_prediction = self.task_classifier(feature)
# Get domain prediction
reverse_feature = ReverseLayerF.apply(feature, alpha)
domain_prediction = self.domain_classifier(reverse_feature)
# Only domain predictor 1 is reversed
# domain_prediction2 = self.domain_classifier2(feature)
return task_prediction, domain_prediction #(domain_prediction, domain_prediction2)
def main(args):
if len(args) < 1:
sys.stderr.write("Required arguments: <data file> [backward True|False]\n")
sys.exit(-1)
device = 'cpu'
if torch.cuda.is_available():
device = 'cuda'
if len(args) > 1:
backward = bool(args[1])
print("Direction is backward based on args=%s" % (args[1]))
else:
backward = False
print("Direction is forward by default")
# Read the data:
goal_ind = 2
domain_weight = 1.0
reg_weight = 0.1
lr = 0.01
epochs = 1000
batch_size = 50
sys.stderr.write("Reading source data from %s\n" % (args[0]))
all_X, all_y = load_svmlight_file(args[0])
# y is 1,2 by default, map to 0,1 for sigmoid training
all_y -= 1 # 0/1
# continue to -1/1 for softmargin training:
# all_y *= 2 # 0/2
# all_y -= 1 # -1/1
num_instances, num_feats = all_X.shape
domain_map = read_feature_groups(join(dirname(args[0]), 'reduced-feature-groups.txt'))
domain_inds = domain_map['Domain']
feature_map = read_feature_lookup(join(dirname(args[0]), 'reduced-features-lookup.txt'))
direction = 1 if backward else 0
sys.stderr.write("using domain %s as source, %s as target\n" %
(feature_map[domain_inds[direction]],feature_map[domain_inds[1-direction]]))
source_instance_inds = np.where(all_X[:,domain_inds[direction]].toarray() > 0)[0]
X_source = all_X[source_instance_inds,:]
X_source[:, domain_inds[direction]] = 0
X_source[:, domain_inds[1-direction]] = 0
y_source = all_y[source_instance_inds]
num_source_instances = X_source.shape[0]
num_train_instances = int(X_source.shape[0] * 0.8)
X_task_train = X_source[:num_train_instances,:]
y_task_train = y_source[:num_train_instances]
X_task_valid = X_source[num_train_instances:, :]
y_task_valid = y_source[num_train_instances:]
target_instance_inds = np.where(all_X[:,domain_inds[1-direction]].toarray() > 0)[0]
X_target = all_X[target_instance_inds,:]
X_target[:, domain_inds[direction]] = 0
X_target[:, domain_inds[1-direction]] = 0
num_target_train = int(X_target.shape[0] * 0.8)
X_target_train = X_target[:num_target_train,:]
# y_target_train = y_target[:num_target_train]
X_target_valid = X_target[num_target_train:, :]
# y_target_dev = y_target[num_target_train:]
# y_test = all_y[target_instance_inds]
num_target_instances = X_target_train.shape[0]
model = PivotLearnerModel(num_feats).to(device)
task_loss_fn = nn.BCELoss()
domain_loss_fn = nn.BCELoss()
l1_loss = nn.L1Loss()
#task_loss_fn.cuda()
# domain_loss_fn.cuda()
# l1_loss.cuda()
optimizer = optim.Adam(model.parameters())
# optimizer = optim.SGD(model.parameters(), lr=lr)
# weights = model.vector
try:
weights = model.feature.input_layer.vector
print("Before training:")
print("Min (abs) weight: %f" % (torch.abs(weights).min()))
print("Max (abs) weight: %f" % (torch.abs(weights).max()))
print("Ave weight: %f" % (torch.abs(weights).mean()))
num_zeros = (weights.data==0).sum()
near_zeros = (torch.abs(weights.data)<0.000001).sum()
print("Zeros=%d, near-zeros=%d" % (num_zeros, near_zeros))
except:
pass
# Main training loop
inds = np.arange(num_train_instances)
for epoch in range(epochs):
epoch_loss = 0
model.train()
# Do a training epoch:
for batch in range( 1+ ( num_train_instances // batch_size ) ):
model.zero_grad()
start_ind = batch * batch_size
if start_ind >= num_train_instances:
#This happens if our number of instances is perfectly divisible by batch size (when batch_size=1 this is often).
break
end_ind = num_train_instances if start_ind + batch_size >= num_train_instances else start_ind+batch_size
this_batch_size = end_ind - start_ind
## Gradually increase (?) the importance of the regularization term
ave_ind = start_ind + this_batch_size // 2
p = float(ave_ind + epoch * num_train_instances*2) / (epochs * num_train_instances*2)
alpha = 2. / (1. + np.exp(-10 * p)) - 1
source_batch = FloatTensor(X_task_train[start_ind:end_ind,:].toarray()).to(device) # read input
source_task_labels = torch.unsqueeze(FloatTensor([y_task_train[start_ind:end_ind],]).to(device), 1)# read task labels
source_domain_labels = torch.zeros(this_batch_size,1, device=device) # set to 0
# Get the task loss and domain loss for the source instance:
task_out, task_domain_out = model.forward(source_batch, alpha)
task_loss = task_loss_fn(task_out, source_task_labels)
domain_loss = domain_loss_fn(task_domain_out, source_domain_labels)
# domain2_loss = domain_loss_fn(task_domain_out[1], source_domain_labels)
try:
weights = model.feature.input_layer.vector
reg_term = l1_loss(weights, torch.zeros_like(weights, device=device))
except:
reg_term = 0
# Randomly select a matching number of target instances:
target_inds = choice(num_target_instances, this_batch_size, replace=False)
target_batch = FloatTensor(X_target_train[target_inds,:].toarray()).to(device) # read input
target_domain_labels = torch.ones(this_batch_size, 1, device=device)
# Get the domain loss for the target instances:
_, target_domain_out = model.forward(target_batch, alpha)
target_domain_loss = domain_loss_fn(target_domain_out, target_domain_labels)
# target_domain2_loss = domain_loss_fn(target_domain_out[1], target_domain_labels)
# Get sum loss update weights:
# domain adaptation:
# total_loss = task_loss + domain_weight * (domain_loss + target_domain_loss)
# Task only:
# total_loss = task_loss
# Domain only:
# total_loss = domain_loss + target_domain_loss
# Debugging with 2 domain classifiers:
# total_loss = domain_loss + domain2_loss + target_domain_loss + target_domain2_loss
# With regularization and DA term:
total_loss = (task_loss +
domain_weight * (domain_loss + target_domain_loss) +
reg_weight * reg_term)
# With regularization only:
# total_loss = task_loss + reg_term
epoch_loss += total_loss
total_loss.backward()
# for param in model.named_parameters():
# print(param[0])
# print(param[1])
optimizer.step()
# At the end of every epoch, examine domain accuracy and how many non-zero parameters we have
# unique_source_inds = np.unique(selected_source_inds)
# all_source_inds = np.arange(num_train_instances)
# eval_source_inds = np.setdiff1d(all_source_inds, unique_source_inds)
# source_eval_X = X_train[eval_source_inds]
# source_eval_y = y_train[eval_source_inds]
source_eval_X = X_task_valid
source_eval_y = y_task_valid
source_task_out, source_domain_out = model.forward( FloatTensor(source_eval_X.toarray()).to(device), alpha=0.)
# If using BCEWithLogitsLoss which would automatically do a sigmoid post-process
# source_task_out = nn.functional.sigmoid(source_task_out)
# source_domain_out = nn.functional.sigmoid(source_domain_out)
# source domain is 0, count up predictions where 1 - prediction = 1
# If using sigmoid outputs (0/1) with BCELoss
source_domain_preds = np.round(source_domain_out.cpu().data.numpy())
# if using Softmargin() loss (-1/1) with -1 as source domain
# source_domain_preds = np.round(((source_domain_out.cpu().data.numpy() * -1) + 1) / 2)
source_predicted_count = np.sum(1 - source_domain_preds)
source_domain_acc = source_predicted_count / len(source_eval_y)
target_eval_X = X_target_valid
_, target_domain_out = model.forward( FloatTensor(target_eval_X.toarray()).to(device), alpha=0.)
# If ussing with BCEWithLogitsLoss (see above)
# target_domain_out = nn.functional.sigmoid(target_domain_out)
# if using sigmoid output (0/1) with BCELoss
target_domain_preds = np.round(target_domain_out.cpu().data.numpy())
# if using Softmargin loss (-1/1) with 1 as target domain:
# target_domain_preds = np.round(((source_domain_out.cpu().data.numpy()) + 1) / 2)
target_predicted_count = np.sum(target_domain_preds)
domain_acc = (source_predicted_count + target_predicted_count) / (source_eval_X.shape[0] + target_eval_X.shape[0])
# if using 0/1 predictions:
source_y_pred = np.round(source_task_out.cpu().data.numpy()[:,0])
# if using -1/1 predictions? (-1 = not negated, 1 = negated)
# source_y_pred = np.round((source_task_out.cpu().data.numpy()[:,0] + 1) / 2)
# source_eval_y += 1
# source_eval_y /= 2
# predictions of 1 are the positive class: tps are where prediction and gold are 1
tps = np.sum(source_y_pred * source_eval_y)
true_preds = source_y_pred.sum()
true_labels = source_eval_y.sum()
recall = tps / true_labels
prec = 1 if tps == 0 else tps / true_preds
f1 = 2 * recall * prec / (recall+prec)
try:
weights = model.feature.input_layer.vector
num_zeros = (weights.data==0).sum()
near_zeros = (torch.abs(weights.data)<0.000001).sum()
print("Min (abs) weight: %f" % (torch.abs(weights).min()))
print("Max (abs) weight: %f" % (torch.abs(weights).max()))
print("Ave weight: %f" % (torch.abs(weights).mean()))
except:
num_zeros = near_zeros = -1
print("[Source] Epoch %d: loss=%f\tzeros=%d\tnear_zeros=%d\tnum_insts=%d\tdom_acc=%f\tP=%f\tR=%f\tF=%f" % (epoch, epoch_loss, num_zeros, near_zeros, len(source_eval_y), domain_acc, prec, recall, f1))
weights = model.feature.input_layer.vector
ranked_inds = torch.sort(torch.abs(weights))[1]
pivots = ranked_inds[0,-1000:]
pivot_list = pivots.cpu().data.numpy().tolist()
# pivot_list.sort()
for pivot in pivot_list:
print('%d : %s' % (pivot, feature_map[pivot]))
if __name__ == '__main__':
main(sys.argv[1:])
|
# -*- coding: utf-8 -*-
# flake8: noqa
# Tags set to None of False will be ignored
# Tags set to True will be used as-is
# Tags set to a string will be replaced by that string
# Tags set to a list will be replaced with multiple tags
from __future__ import unicode_literals
TAGS_MAP = {u'1:2000': None,
u'1:5000': None,
u'25000': None,
u'ACQUA, DERIVAZIONE IDRICA DI SOCCORSO, SIAT, SUAP, RNDT': u'Derivazione idrica di soccorso',
u'ACQUA, DERIVAZIONE IDRICA, RESTITUZIONE IDRICA, SIAT, SUAP, RNDT': u'Restituzione idrica',
u'Alvei': True,
u'BIC': None,
u'Biotopi comunali': True,
u'Biotopi locali': True,
u'Biotopi provinciali': True,
u'Cadastral parcels': None,
u'Comuni': True,
u'Corine Land Cover': None,
u'Direttiva Uccelli': None,
u'Distribuzione della popolazione \xe2\x80\x94 demografia': None,
u'Distribuzione della popolazione \u2014 demografia': None,
u'Edifici': True,
u'Elevazione': True,
u'Geologia': True,
u'Ghiacciai': True,
u'Idrografia': True,
u'Impianti agricoli e di acquacoltura': None,
u'Mineral resources': None,
u'Natura2000': None,
u'Nomi geografici': True,
u'Orto immagini': True,
u'PUP': None,
u'PUP, RNDT': None,
u'PUP, RNDT,': None,
u'PUP; RNDT': None,
u'PUP, RNDT, TRASPARENZA': [u'Trasparenza'],
u'Parcelle catastali': 'Particelle catastali',
u'Particelle catastali': True,
u'RNDT': None,
u'RNDT, attenzione identificatore pupvia': None,
u'RNDT, TRASPARENZA': [u'Trasparenza'],
u'Regioni marine': None,
u'Reti di trasporto': None,
u'Riserve Naturali provinciali': None,
u'Riserve locali': None,
u'SIAT': None,
u'SIAT, Grotte, Catasto grotte, Rilievi grotte, Cavit\xc3\xa0, Servizio Geologico, RNDT': u'Grotte',
u'SIAT, Grotte, Catasto grotte, Rilievi grotte, Cavit\xe0, Servizio Geologico, RNDT': None,
u'SIAT, Grotte, Rilievi grotte, Cavit\xc3\xa0, Servizio Geologico, RNDT': u'Grotte',
u'SIAT, Grotte, Rilievi grotte, Cavit\xe0, Servizio Geologico, RNDT': None,
u'SIAT, RNDT': None,
u'SIAT, RNDT, Corine Land Cover, Copertura del suolo': u'Copertura del suolo',
u'SIAT, RNDT, PGUAP': None,
u'SIAT, RNDT, cave dismesse, vecchie cave': [u'Cave', u'Cave dismesse'],
u'SIAT, RNDT, cave, piano cave': u'Cave',
u'SIAT, RNDT, miniere dismesse, sotterraneo': u'Miniere dismesse',
u'SIAT, RNDT, miniere, acque minerali, acque termali, concessioni': None,
u'SIAT, RNDT, pozzi, sorgenti, acque minerali': [u'Pozzi',
u'Sorgenti',
u'Acque minerali'],
u'SIAT, RNDT, viafer': None,
u'SIAT, RNDT, TRASPARENZA': [u'Trasparenza'],
u'SIAT, Terremoti, Stazioni sismiche, Eventi sismici, Servizio Geologico, RNDT': u'Terremoti',
u'SIAT, TRASPARENZA': [u'Trasparenza'],
u'SIAT, ghiacciai, RNDT': None,
u'SIAT, valanghe, RNDT': None,
u'SIAT,SUAP, RNDT, ACQUA, DERIVAZIONE IDRICA': [u'Acqua',
u'Derivazione idrica'],
u'SIC': None,
u'Sedi': True,
u'Servizi di pubblica utilit\xc3\xa0 e servizi amministrativi': None,
u'Servizi di pubblica utilit\xe0 e servizi amministrativi': None,
u'Siti': True,
u'Siti protetti': True,
u'TRASPARENZA': True,
u'Trasparenza': True,
u'UDF': None,
u'UNESCO': True,
u'Unit\xe0 amministrative': None,
u'ZPS': None,
u'ZSC, Zone Speciali di Conservazione, TRASPARENZA': [u'Trasparenza'],
u'accessi aree sciabili': None,
u'acqua di rifiuto urbana': ['Acqua'],
u'acque sotterranee': ['Acqua'],
u'acque superficiali': ['Acqua'],
u'aereoporti': 'Aeroporti',
u'aeroporti': 'Aeroporti',
u'agglomerato': None,
u'agricol': 'Agricoltura',
u'alberi monumentali': None,
u'alta capacit\xc3\xa0': None,
u'alta capacit\xe0': None,
u'altimetria': None,
u'altitudine': None,
u'alvei': True,
u'ambiti': None,
u'archeolog': 'Archeologia',
u'aree a bosco': True,
u'aree a pascolo': True,
u'aree agricole': True,
u'aree espansione': True,
u'aree estrattive': True,
u'aree insediative produttive servizi': True,
u'aree produttive': True,
u'aree protette': True,
u'aree rispetto': True,
u'aree rocciose': True,
u'aree sciabili': True,
u'aree servizi': True,
u'aree, RNDT': None,
u'asta fluviale': True,
u'attrezzature': None,
u'bacini': True,
u'bacini, RNDT': 'Bacini',
u'beni ambientali': True,
u'beni architettonici': True,
u'beni architettonici ed artistici': True,
u'beni artistici': True,
u'beni culturali': True,
u'big city': None,
u'biotopi': None,
u'bosc': 'Boschi',
u'boschi': True,
u'boundaries': None,
u'canali': True,
u'carico generato': None,
u'carta del paesaggio': True,
u'carta tecnica': True,
u'cartografia': True,
u'catasto': True,
u'cave': True,
u'ceduo': None,
u'censimento': None,
u'centrali': None,
u'centrali idroelettriche': None,
u'centri abitati': None,
u'centri commerciali': None,
u'centri innovazione': None,
u'centri storici': None,
u'centri storici aggregati': None,
u'centri turistici': None,
u'ciclabili': u'Piste ciclabili',
u'collegamenti funzionali': None,
u'commercio': None,
u'comprensori': None,
u'comun': None,
u'comune amministrativo': None,
u'comuni': None,
u'comuni catastali': None,
u'comuni amministrativi': None,
u'comunit\xe0': None,
u'comunit\xe0 di valle': True,
u'condotte': None,
u'confine': None,
u'conoidi alluvionali': None,
u'conservazione ambientale': None,
u'corine land cover': None,
u'corsi artificiali': True,
u'corsi naturali': True,
u'curve di livello': u'Curve di livello',
u'curve livello': u'Curve di livello',
u'depuratori': True,
u'dipartimento foreste': None,
u'direttiva europea HABITAT': None,
u'direttiva europea abitat': None,
u'dolomiti': True,
u'economy': u'Economia',
u'edifici pubblici': True,
u'elementi geologici': True,
u'elementi geomorfologici': True,
u'elementi rappresentativi': True,
u'elevation': None,
u'environment': u'Ambiente',
u'faglie': True,
u'farming': None,
u'farzione': None,
u'ferrovia': 'Ferrovie',
u'ferrovie': 'Ferrovie',
u'ferrovie di progetto': ['Ferrovie'],
u'fiume': 'Fiumi',
u'fiumi': True,
u'foreste': True,
u'foreste demaniali': ['Foreste'],
u'fosse': True,
u'fronti di pregio': None,
u'fustaia': True,
u'gallerie': True,
u'gas': True,
u'geologia': True,
u'geoscientificInformation': None,
u'habitat': None,
u'idrografia': True,
u'idrografici': 'Idrografia',
u'imageryBaseMapsEarthCover': None,
u'impianti a fune': None,
u'impianti di depurazione': None,
u'impianti di risalita': None,
u'impianti pubblici': None,
u'impianto di depurazione': None,
u'improduttivo': None,
u'incendi': None,
u'indice di funzionalit\xc3\xa0 fluviale': None,
u'indice di funzionalit\xe0 fluviale': None,
u'infrastrutture': None,
u'inlandWaters': None,
u'inquadramenti': True,
u'inquadramenti di base': True,
u'inquadramento': True,
u'inquinamento': True,
u'insediamenti': True,
u'interporti': True,
u'invariante': None,
u'invarianti': None,
u'istat': None,
u'laghi': None,
u'lago': None,
u'limite provinciale': None,
u'limiti': None,
u'limiti espansione abitati': None,
u'linee alta tensione': None,
u'linee di livellazione': None,
u'linee elettriche': None,
u'livello': None,
u'localit\xe0': None,
u'location': None,
u'mappe catastali': True,
u"monitoraggio dell'acqua": None,
u"monitoraggio qualita' acqua": None,
u'nazionali': None,
u'oceans': None,
u'orografia': None,
u'paesaggi di particolare pregio': None,
u'paesaggio': None,
u'paleoalvei': None,
u'parchi naturali': None,
u'pascoli': None,
u'pascolo': None,
u'patrimonio delle dolomiti': None,
u'pericolosit\xc3\xa0': None,
u'pericolosit\xe0': None,
u'pertinenza': None,
u'piano cave': True,
u'piste': None,
u'piste sci': True,
u'planningCadastre': None,
u'presenze': None,
u'principale': None,
u'principali': None,
u'punti quotati': None,
u'pup': None,
u'pupagri': None,
u'pupasc': None,
u'pupbos': None,
u'quadro unione': None,
u"qualit\xc3\xa0 dell'acqua": None,
u"qualit\xe0 dell'acqua": None,
u'rete caposaldi': None,
u'rete natura 2000': None,
u'reti': None,
u'reticolo chilometrico': None,
u'rio': None,
u'riserve naturali': u'Riserve naturali',
u'rocce': None,
u'rupi': None,
u'rupi boscate': None,
u'scritte': None,
u'servizi': None,
u'sezioni censimento': None,
u'siat': None,
u'sistema di collettamento': None,
u'sistemi complessi di paesaggio': None,
u'siti archeolog': u'Siti archeologici',
u'siti di interesse comunitario': None,
u'society': None,
u'specchi': None,
u'statistica': None,
u'statistiche': None,
u'stazioni forestali': u'Stazioni forestali',
u'strade': None,
u'structure': None,
u'toponimi': u'Toponimi',
u'toponomastica': None,
u'toponomastica 1:25000': None,
u'toponomastica1:50000': None,
u'torrente': None,
u'transportation': u'Trasporti',
u'tutela': None,
u'tutela ambientale': u'Tutela ambientale',
u'tutele paesistiche': u'Tutela paesistica',
u'uffici distrettuali forestali': None,
u'uso del suolo': u'Uso del suolo',
u'uso del suolo CORINE': u'Uso del suolo',
u'uso del suolo corine': u'Uso del suolo',
u'uso suolo': u'Uso del suolo',
u'uso suolo reale': u'Uso del suolo',
u'utilitiesCommunication': None,
u'vegetazione': True,
u'viabilit\xe0': True,
u'viabilit\xe0 di progetto': True,
u'viabilit\xe0 esistente': True,
u'viabilit\xe0 ferroviaria': True,
u'viabilit\xe0 forestale': True,
u'zone di protezione speciale': True,
u'ZSC, Zone Speciali di Conservazione': u'Zone speciali di conservazione',
u'zone speciali conservazione, ZSC': u'Zone speciali di conservazione'}
|
import heapq
if __name__ == '__main__':
candidates =[2,3,6,7]
target = 7
res = []
resList = []
heapq.heappop()
def backtracking(start, rest):
if rest == 0:
temp = resList[:]
res.append(temp)
for i in range(start, len(candidates)):
if (candidates[i] <= rest):
resList.append(candidates[i])
backtracking(i, rest - candidates[i])
resList.pop()
backtracking(0, target)
print(res)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2017-18 Richard Hull and contributors
# See LICENSE.rst for details.
# PYTHON_ARGCOMPLETE_OK
"""
Capture photo with picamera and display it on a screen.
Requires picamera to be installed.
"""
import io
import sys
import time
from PIL import Image
from luma.core.interface.serial import i2c
from luma.oled.device import ssd1306, ssd1325, ssd1331, sh1106
try:
import picamera
except ImportError:
print("The picamera library is not installed. Install it using 'sudo -H pip install picamera'.")
sys.exit()
def main():
cameraResolution = (1024, 768)
displayTime = 5
# create the in-memory stream
stream = io.BytesIO()
with picamera.PiCamera() as camera:
# set camera resolution
camera.resolution = cameraResolution
print("Starting camera preview...")
camera.start_preview()
time.sleep(2)
print("Capturing photo...")
camera.capture(stream, format='jpeg', resize=device.size)
print("Stopping camera preview...")
camera.close()
# "rewind" the stream to the beginning so we can read its content
stream.seek(0)
print("Displaying photo for {0} seconds...".format(displayTime))
# open photo
photo = Image.open(stream)
# display on screen for a few seconds
device.display(photo.convert(device.mode))
time.sleep(displayTime)
print("Done.")
if __name__ == "__main__":
try:
serial = i2c(port=0, address=0x3C)
device = ssd1306(serial, rotate=0)
main()
except KeyboardInterrupt:
pass
|
from __future__ import annotations
from dials_algorithms_background_ext import * # noqa: F403; lgtm
__all__ = ("RadialAverage", "set_shoebox_background_value") # noqa: F405
|
from yann.modules.conv.utils import get_same_padding
def test_get_same_padding():
assert get_same_padding(3) == 1
assert get_same_padding(5) == 2
assert get_same_padding(7) == 3
|
"""Colors"""
white_ambiance_luminaires = {
'concentrate': 233,
'default': 367,
'energize': 156,
'reading': 346,
'relax': 447,
}
"""light recipes"""
|
MAPS = {
'30x30':['SFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFHFFFFFFFFF',
'FFFFFFFHFFFHFFFFFHFHFFFFFFFFFF',
'FFFFFFFFFFFFFHFFFFFFFFFFFFFFFF',
'FFFFFFFFFFHFFFFFHFFFFFFFHFFFGF',
'FFFFFFHFFFFFFFFFFFFFHFFFFFFFFF',
'FFFFFFFFFFFFHFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFHFFFFHFFFFFFF',
'FFFFFFFFFHFFFFHFFFFFFFFFFFFFFF',
'FFFFFFFHFFFFFFFFFFHFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFFFHFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFHFFFFFFHFFFFFFFFFF',
'FFFFFFFFFFFFFFFHFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFHFFFFFFFFF',
'FFFFFFFFFFFFHFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFHFFFFFHFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'FFFFFHFFFFFFFFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFHFFFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFHFFFFFFFFFF',
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFF'],
"4x4": [
"SFFF",
"FHFH",
"FFFH",
"HFFG"
],
"8x8": [
"SFFFFFFF",
"FFFFFFFF",
"FFFHFFHF",
"FFFFFFFF",
"FFFFFFFF",
"FHHFFFFF",
"FHFFHFHF",
"FFFHFFFG"
],
}
|
#!/usr/bin/python3
from frc971.control_loops.python import control_loop
from y2020.control_loops.python import flywheel
import numpy
import sys
import gflags
import glog
FLAGS = gflags.FLAGS
gflags.DEFINE_bool('plot', False, 'If true, plot the loop response.')
# Inertia for a single 4" diameter, 1" wide neopreme wheel.
J_wheel = 0.000319
# Gear ratio between wheels (speed up!)
G_per_wheel = 1.2
# Gear ratio to the final wheel.
G = (30.0 / 40.0) * numpy.power(G_per_wheel, 3.0)
# Overall flywheel inertia.
J = J_wheel * (
1.0 + numpy.power(G_per_wheel, -2.0) + numpy.power(G_per_wheel, -4.0) + numpy.power(G_per_wheel, -6.0))
# The position and velocity are measured for the final wheel.
kAccelerator = flywheel.FlywheelParams(
name='Accelerator',
motor=control_loop.Falcon(),
G=G,
J=J * 1.3,
q_pos=0.01,
q_vel=40.0,
q_voltage=1.0,
r_pos=0.03,
controller_poles=[.89])
def main(argv):
if FLAGS.plot:
R = numpy.matrix([[0.0], [500.0], [0.0]])
flywheel.PlotSpinup(kAccelerator, goal=R, iterations=400)
return 0
glog.debug("J is %f" % J)
if len(argv) != 5:
glog.fatal('Expected .h file name and .cc file name')
else:
namespaces = [
'y2020', 'control_loops', 'superstructure', 'accelerator'
]
flywheel.WriteFlywheel(kAccelerator, argv[1:3], argv[3:5], namespaces)
if __name__ == '__main__':
argv = FLAGS(sys.argv)
glog.init()
sys.exit(main(argv))
|
#!/usr/bin/env python
import os
import sys
import h5py
import glob
import numpy as np
import pandas as pd
def main():
"""run synergy calculations <- separate bc need R and such
"""
# inputs
#out_dir = sys.argv[1]
grammar_summary_file = "/mnt/lab_data3/dskim89/ggr/nn/2019-03-12.freeze/dmim.shuffle.complete/grammars.annotated.manual_filt.merged.final/grammars_summary.txt" # sys.argv[2]
SYNERGY_MAIN_DIR = "./synergy"
#SYNERGY_MAIN_DIR = "./sims.synergy"
out_dir = "./synergy_calcs.endog"
os.system("mkdir -p {}".format(out_dir))
# get synergy files
grammars_df = pd.read_table(grammar_summary_file)
synergy_files = []
for grammar_idx in range(grammars_df.shape[0]):
grammar = grammars_df.iloc[grammar_idx]["filename"]
grammar_prefix = os.path.basename(grammar).split(".gml")[0]
# get the synergy file
#synergy_file = glob.glob("sims.synergy/{}/ggr.synergy.h5".format(grammar_prefix))
synergy_file = glob.glob("{}/{}/ggr.synergy.h5".format(SYNERGY_MAIN_DIR, grammar_prefix))
if len(synergy_file) != 1:
print "MORE THAN ONE FILE:", grammar
else:
synergy_file = synergy_file[0]
synergy_files.append(synergy_file)
# check that it actually exists and is complete
try:
with h5py.File(synergy_file, "r") as hf:
pwm_names = hf["logits.motif_mut"].attrs["pwm_names"]
except:
print "synergy file not readable/does not exist!"
# debug
print "total synergy files: {}".format(len(synergy_files))
# other convenience set ups
plots_dir = "{}/plots".format(out_dir)
os.system("mkdir -p {}".format(plots_dir))
grammars_dir = "{}/grammars.synergy_only".format(out_dir)
os.system("mkdir -p {}".format(grammars_dir))
# go through synergy files
total = 0
group_summary_file = "{}/interactions.summary.txt".format(out_dir)
print group_summary_file
header_str = "pwm1\tpwm2\tnum_examples\tsig\tbest_task_index\tactual\texpected\tdiff\tpval\tcategory"
write_header = "echo '{}' > {}".format(header_str, group_summary_file)
print write_header
os.system(write_header)
num = 0
for synergy_file in synergy_files:
# set up dir/prefix
out_dir = "{}/calc_synergy".format(os.path.dirname(synergy_file))
prefix = os.path.dirname(synergy_file).split("/")[-1]
# check whether 2 or 3 and setup calc string
with h5py.File(synergy_file, "r") as hf:
num_motifs = int(np.log2(hf["sequence.motif_mut.string"].shape[1]))
if num_motifs == 2:
calculations = "11"
elif num_motifs == 3:
calculations = "110 101 011"
# calculate
calc_synergy = (
"calculate_mutagenesis_effects.py "
"--synergy_file {} "
"--calculations {} "
"--interpretation_indices 0 1 2 3 4 5 6 9 10 12 "
"-o {} --refine --prefix {}").format(
synergy_file,
calculations,
out_dir,
prefix)
print calc_synergy
os.system(calc_synergy) # comment this out since file exists already...
# extract the calculation results into a summary file
if num_motifs == 2:
summary_file = "{}/{}.interactions.txt".format(out_dir, prefix)
get_summary = "cat {} | awk 'NR>1{{ print }}' >> {}".format(
summary_file, group_summary_file)
print get_summary
os.system(get_summary)
# cp plots out to separate folder for easy download
copy_plots = "cp {}/*pdf {}".format(out_dir, plots_dir)
#os.system(copy_plots)
# and copy grammars to separate folder for easy annotation
copy_grammar = "cp {}/*gml {}".format(out_dir, grammars_dir)
#os.system(copy_grammar)
# TODO plot the expected vs observed results
return
main()
|
import cv2 as cv
from package.color_histogram import color_histogram as ch
# while running please ensure this example is in same directory as package
image = cv.imread('images/lines.jpg')
c = ch.ColorHistogram((8, 8, 8))
features = c.Regional(image)
print(features)
|
# Copyright 2021 Hakan Kjellerstrand hakank@gmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Photo problem in OR-tools CP-SAT Solver.
Problem statement from Mozart/Oz tutorial:
http://www.mozart-oz.org/home/doc/fdt/node37.html#section.reified.photo
'''
Betty, Chris, Donald, Fred, Gary, Mary, and Paul want to align in one
row for taking a photo. Some of them have preferences next to whom
they want to stand:
1. Betty wants to stand next to Gary and Mary.
2. Chris wants to stand next to Betty and Gary.
3. Fred wants to stand next to Mary and Donald.
4. Paul wants to stand next to Fred and Donald.
Obviously, it is impossible to satisfy all preferences. Can you find
an alignment that maximizes the number of satisfied preferences?
'''
Oz solution:
6 # alignment(betty:5 chris:6 donald:1 fred:3 gary:7 mary:4 paul:2)
[5, 6, 1, 3, 7, 4, 2]
This is a port of my old CP model photo_problem.py
This model was created by Hakan Kjellerstrand (hakank@gmail.com)
Also see my other OR-tools models: http://www.hakank.org/or_tools/
"""
from __future__ import print_function
from ortools.sat.python import cp_model as cp
import math, sys
from cp_sat_utils import flatten, ListPrinter
def main(opt = 0):
model = cp.CpModel()
#
# data
#
persons = ["Betty", "Chris", "Donald", "Fred", "Gary", "Mary", "Paul"]
n = len(persons)
preferences = [
# 0 1 2 3 4 5 6
# B C D F G M P
[0, 0, 0, 0, 1, 1, 0], # Betty 0
[1, 0, 0, 0, 1, 0, 0], # Chris 1
[0, 0, 0, 0, 0, 0, 0], # Donald 2
[0, 0, 1, 0, 0, 1, 0], # Fred 3
[0, 0, 0, 0, 0, 0, 0], # Gary 4
[0, 0, 0, 0, 0, 0, 0], # Mary 5
[0, 0, 1, 1, 0, 0, 0] # Paul 6
]
if opt == 0:
print("""Preferences:
1. Betty wants to stand next to Gary and Mary.
2. Chris wants to stand next to Betty and Gary.
3. Fred wants to stand next to Mary and Donald.
4. Paul wants to stand next to Fred and Donald.
""")
#
# declare variables
#
positions = [model.NewIntVar(0, n - 1, "positions[%i]" % i) for i in range(n)]
# successful preferences
# z = model.NewIntVar(0, n * n, "z")
z = model.NewIntVar(0, sum(flatten(preferences)), "z")
#
# constraints
#
model.AddAllDifferent(positions)
# calculate all the successful preferences
# b = [model.NewBoolVar("") for i in range(n) for j in range(n)]
bb = []
for i in range(n):
for j in range(n):
if preferences[i][j] == 1:
b = model.NewBoolVar("b")
p = model.NewIntVar(-n,n,"p")
model.Add(p == positions[i] - positions[j])
d = model.NewIntVar(-n,n, "d")
# This don't work:
# model.AddAbsEquality(d,p).OnlyEnforce(b)
model.AddAbsEquality(d,p)
model.Add(d == 1).OnlyEnforceIf(b)
bb.append(b)
model.Add(z == sum(bb))
#
# Symmetry breaking (from the Oz page):
# Fred is somewhere left of Betty
model.Add(positions[3] < positions[0])
# objective
if opt == 0:
model.Maximize(z)
else:
model.Add(z == opt)
#
# search and result
#
solver = cp.CpSolver()
if opt == 0:
status = solver.Solve(model)
else:
solution_printer = ListPrinter(positions)
status = solver.SearchForAllSolutions(model,solution_printer)
print("status:", solver.StatusName(status))
if opt == 0 and (status == cp.OPTIMAL or status == cp.FEASIBLE):
print("z:", solver.Value(z))
p = [solver.Value(positions[i]) for i in range(n)]
print("p:",p)
print(" ".join(
[persons[j] for i in range(n) for j in range(n) if p[j] == i]))
print("Successful preferences:")
for i in range(n):
for j in range(n):
if preferences[i][j] == 1 and abs(p[i] - p[j]) == 1:
print("\t", persons[i], persons[j])
print()
print("NumConflicts:", solver.NumConflicts())
print("NumBranches:", solver.NumBranches())
print("WallTime:", solver.WallTime())
print()
return solver.Value(z)
if __name__ == "__main__":
print("Get max number of matchings")
z = main(opt = 0)
print("Get all optimal solutions with z =", z)
main(z)
|
lista = []
for c in range(0, 5):
lista.append(int(input(f'Digite um número para a posição {c+1}: ')))
print(f'O maior número da lista foi o {max(lista)} e apareceu nas posições ', end='')
for pos, valores in enumerate(lista):
if valores == max(lista):
print(f'{pos+1}...', end=' ')
print(f'\nO menor número da lista foi o {min(lista)} e apareceu nas posições ', end='')
for pos, valores in enumerate(lista):
if valores == min(lista):
print(f'{pos+1}...', end=' ')
|
#
# Copyright (c) 2013 Markus Eliasson, http://www.quarterapp.com/
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import smtplib
import logging
import sys
import tornado.template
from tornado.options import options
signup_email_template = tornado.template.Template("""From: Quarterapp <{{ email_from }}>
To: {{ email_to }}
Subject: {{ subject }}
Welcome to Quarterapp in order to complete your free registration you need to activate
your account.
Follow this link to complete the activation: {{ code_link }}
If the above link does not work, go to {{ link }} and enter the code manually.
Activation code: {{ code }}
Cheers!
Quarterapp team
""")
reset_email_template = tornado.template.Template("""From: Quarterapp <{{ email_from }}>
To: {{ email_to }}
Subject: {{ subject }}
Someone - hopefully you - reported that you forgot your password to Quarterapp.
Follow this link to assign a new password to your account: {{ code_link }}
If the above link does not work, go to {{ link }} and enter the code manually.
Reset code: {{ code }}
If you did not ask to reset your password, please ignore this mail.
Cheers!
Quarterapp team
""")
def send_mail(username, message):
try:
server = smtplib.SMTP(options.mail_host)
server.ehlo()
server.starttls()
server.ehlo()
server.login(options.mail_user, options.mail_password)
server.sendmail(options.mail_sender, username, message)
return True
except:
logging.warn("Could not send email: %s", sys.exc_info())
logging.warn(message)
return True
def send_signup_email(username, code):
"""
Sends the activation email to the given address with the given activation code.
@param username The username to send the email to
@param code The activation code
@return True if mail could be sent, else False
"""
try:
message = signup_email_template.generate(subject = "Welcome to quarterapp",
email_from = options.mail_sender,
email_to = username,
code_link = "http://" + options.base_url + "/activate/" + code,
link = "http://" + options.base_url + "/activate",
code = code)
return send_mail(username, message)
except:
logging.warn("Could not send signup email: %s", sys.exc_info())
return False
def send_reset_email(username, code):
"""
Sends the password reset email to the given address with the given reset code.
@param username The username to send the email to
@param code The activation code
@return True if mail could be sent, else False
"""
try:
message = reset_email_template.generate(subject = "Reset your password",
email_from = options.mail_sender,
email_to = username,
code_link = "http://" + options.base_url + "/reset/" + code,
link = "http://" + options.base_url + "/reset",
code = code)
return send_mail(username, message)
except:
logging.warn("Could not send reset email: %s", sys.exc_info())
return False
|
"""DNS Authenticator for Domeneshop DNS."""
import logging
import re
import zope.interface
from domeneshop.client import Client as DomeneshopClient, DomeneshopError
from certbot import errors
from certbot import interfaces
from certbot.plugins import dns_common
logger = logging.getLogger(__name__)
HELP_URL = "https://api.domeneshop.no/docs"
@zope.interface.implementer(interfaces.IAuthenticator)
@zope.interface.provider(interfaces.IPluginFactory)
class Authenticator(dns_common.DNSAuthenticator):
"""DNS Authenticator for Domeneshop
This Authenticator uses the Domeneshop API to fulfill a dns-01 challenge.
"""
description = "Obtain certificates using a DNS TXT record (if you are using Domeneshop for DNS)."
ttl = 60
def __init__(self, *args, **kwargs):
super(Authenticator, self).__init__(*args, **kwargs)
self.credentials = None
@classmethod
def add_parser_arguments(cls, add): # pylint: disable=arguments-differ
super(Authenticator, cls).add_parser_arguments(
add, default_propagation_seconds=120
)
add(
"credentials",
help="Domeneshop credentials INI file.",
default="/etc/letsencrypt/domeneshop.ini",
)
def more_info(self):
return (
"This plugin configures a DNS TXT record to respond to a dns-01 challenge using "
+ "the Domeneshop API."
)
def _setup_credentials(self):
self.credentials = self._configure_credentials(
"credentials",
"Domeneshop credentials INI file",
{
"client-token": "Client token for Domeneshop API, see {0}".format(
HELP_URL
),
"client-secret": "Client secret for Domeneshop API, see {0}".format(
HELP_URL
),
},
)
def _get_domeneshop_client(self):
return DomeneshopClient(
token=self.credentials.conf("client-token"),
secret=self.credentials.conf("client-secret"),
)
def _domain_id_from_guesses(
self, provider_domains, domain_guesses, original_domain
):
for provider_domain in provider_domains:
if provider_domain["domain"] in domain_guesses:
return (provider_domain["id"], provider_domain["domain"])
raise errors.PluginError(
"Failed to find domain {0} (Does your account have have access to this domain?)".format(
original_domain
)
)
def _perform(
self, domain, validation_name, validation
): # pylint: disable=arguments-differ
client = self._get_domeneshop_client()
provider_domains = client.get_domains()
domain_guesses = dns_common.base_domain_name_guesses(domain)
# Determine the domain ID and registered domain from the provider
domain_id, registered_domain = self._domain_id_from_guesses(
provider_domains, domain_guesses, domain
)
# The Domeneshop API requires only the subdomain part of the domain:
host = re.sub(r"{0}$".format(registered_domain), "", validation_name)
host = re.sub(r"\.$", "", host)
try:
client.create_record(
domain_id,
{"type": "TXT", "ttl": self.ttl, "host": host, "data": validation},
)
except DomeneshopError as e:
logger.error(
"Encountered DomeneshopError during communication with API: %s", e
)
raise errors.PluginError(
"Encountered DomeneshopError during communication with API: {0}".format(
e
)
)
def _cleanup(
self, domain, validation_name, validation
): # pylint: disable=arguments-differ
client = self._get_domeneshop_client()
provider_domains = client.get_domains()
domain_guesses = dns_common.base_domain_name_guesses(domain)
# Determine the domain ID and registered domain from the provider
try:
domain_id, registered_domain = self._domain_id_from_guesses(
provider_domains, domain_guesses, domain
)
except errors.PluginError as e:
logger.warning(
"Error occurred while determining domain ID for deletion: %s", e
)
return
# The Domeneshop API requires the subdomain part of the domain:
host = re.sub(r"{0}$".format(registered_domain), "", validation_name)
host = re.sub(r"\.$", "", host)
find_record = {"type": "TXT", "host": host, "data": validation}
provider_records = client.get_records(domain_id)
for record in provider_records:
if record.items() >= find_record.items():
try:
client.delete_record(domain_id, record["id"])
except DomeneshopError as e:
logger.warning("Error occurred while deleting DNS record: %s", e)
|
# -*- coding: utf-8 -*-
# Copyright (C) Canux CHENG <canuxcheng@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Servers Inventory."""
from fabric.api import env
import textwrap
# Inventory
#
_roles = {
# Operating systems
"debian": [],
"ubuntu": [],
"redhat": [],
# Groups
"central": [
"canuxcheng.com",
],
"satellites": [
"canuxcheng.com",
],
"workers": [
"canuxcheng.com",
],
"omnibus": [
"canuxcheng.com",
],
}
# Operating Systems
#
# Ubuntu
_roles["ubuntu"].extend(_roles["central"])
_roles["ubuntu"].extend(_roles["satellites"])
_roles["ubuntu"].extend(_roles["workers"])
# Debian (keep compat with old scripts)
_roles["debian"].extend(_roles["ubuntu"])
# RedHat
_roles["redhat"].extend(_roles["omnibus"])
# Load inventory in shared fabric env
env.roledefs.update(_roles)
#------------------------------------------------------------------------------
# Show some very very very useful information in DEBUG ;-)
if env.get('DEBUG'):
print "Available server's roles: (specify with -R)\n"
role_string_length = len(max(env.roledefs.keys(), key=len))
jitter = 3
role_string_format = '\033[1;33m{0:%d}:' \
'\033[0m {1:80}' % role_string_length
for role, servers in sorted(env.roledefs.viewitems()):
print role_string_format.format(
role, textwrap.fill(
", ".join(servers),
width=80 - role_string_length - jitter,
subsequent_indent=" " * (role_string_length + jitter)))
print "\n",
|
#!/usr/bin/env python
"""
Convolutional Neural Network based on the 6-layer deep model proposed by
Barkan et al. [1]_
"""
import tensorflow as tf
from tensorflow.keras import layers
from spiegelib.estimator.tf_estimator_base import TFEstimatorBase
class Conv6(TFEstimatorBase):
"""
:param input_shape: Shape of matrix that will be passed to model input
:type input_shape: tuple
:param num_outputs: Number of outputs the model has
:type numOuputs: int
:param kwargs: optional keyword arguments to pass to
:class:`spiegelib.estimator.TFEstimatorBase`
"""
def __init__(self, input_shape, num_outputs, **kwargs):
"""
Constructor
"""
super().__init__(input_shape, num_outputs, **kwargs)
def build_model(self):
"""
Construct 6-layer CNN Model
"""
self.model = tf.keras.Sequential()
self.model.add(layers.Conv2D(32, (3, 3), strides=(2,2), dilation_rate=(1,1),
input_shape=self.input_shape,
activation='relu'))
self.model.add(layers.Conv2D(71, (3, 3), strides=(2,2), dilation_rate=(1,1),
activation='relu'))
self.model.add(layers.Conv2D(128, (3, 4), strides=(2,3), dilation_rate=(1,1),
activation='relu'))
self.model.add(layers.Conv2D(128, (3, 3), strides=(2,2), dilation_rate=(1,1),
activation='relu'))
self.model.add(layers.Conv2D(128, (3, 3), strides=(2,2), dilation_rate=(1,1),
activation='relu'))
self.model.add(layers.Conv2D(128, (3, 3), strides=(1,2), dilation_rate=(1,1),
activation='relu'))
self.model.add(layers.Dropout(0.20))
self.model.add(layers.Flatten())
self.model.add(layers.Dense(self.num_outputs))
self.model.compile(
optimizer=tf.optimizers.Adam(),
loss=TFEstimatorBase.rms_error,
metrics=['accuracy']
)
|
x = int(input('Qual tabuada quer saber? '))
for n in range(1,11):
print (f'{x} x {n} = {x*n}')
|
from __future__ import annotations
from ..typecheck import *
from ..import core
from ..import ui
from ..import dap
from ..terminal import Terminal, Line
from ..autocomplete import Autocomplete
from .variable import VariableComponent
from .tabbed_panel import Panel
from .import css
import re
import webbrowser
import sublime
url_matching_regex = re.compile(r"((http|ftp|https)://([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?)") # from https://stackoverflow.com/questions/6038061/regular-expression-to-find-urls-within-a-string
default_file_regex = re.compile("(.*):([0-9]+):([0-9]+): error: (.*)")
_css_for_type = {
"console": css.label,
"stderr": css.label_redish,
"stdout": css.label,
"debugger.error": css.label_redish_secondary,
"debugger.info": css.label_secondary,
"debugger.output": css.label_secondary,
"terminal.output": css.label_secondary,
"terminal.error": css.label_redish,
}
class LineSourceView (ui.span):
def __init__(self, name: str, line: int|None, text_width: int, on_clicked_source: Callable[[], None]):
super().__init__()
self.on_clicked_source = on_clicked_source
self.name = name
self.line = line
self.text_width = text_width
def render(self) -> ui.span.Children:
if self.line:
source_text = "{}@{}".format(self.name, self.line)
else:
source_text = self.name
source_text = source_text.rjust(self.text_width)
return [
ui.click(self.on_clicked_source)[
ui.text(source_text, css=css.label_secondary)
]
]
class LineView (ui.div):
def __init__(self, line: Line, max_line_length: int, on_clicked_source: Callable[[dap.SourceLocation], None]) -> None:
super().__init__()
self.line = line
self.css = _css_for_type.get(line.type, css.label_secondary)
self.max_line_length = max_line_length
self.on_clicked_source = on_clicked_source
self.clicked_menu = None
def get(self) -> ui.div.Children:
if self.line.variable:
source = self.line.source
component = VariableComponent(self.line.variable, source=self.line.source, on_clicked_source=self.on_clicked_source)
return [component]
span_lines = [] #type: list[ui.div]
spans = [] #type: list[ui.span]
max_line_length = self.max_line_length
leftover_line_length = max_line_length
# if we have a name/line put it to the right of the first line
source = None
if self.line.source:
source = self.line.source.name
# reserve at least the length of the label and a space before it to render the source button
leftover_line_length -= len(source)
leftover_line_length -= 1
def add_source_if_needed():
if not span_lines and source:
def on_clicked_source():
self.on_clicked_source(self.line.source)
source_text = source.rjust(leftover_line_length + len(source) + 1)
spans.append(ui.click(on_clicked_source)[
ui.text(source_text, css=css.label_secondary)
])
span_offset = 0
line_text = self.line.line
while span_offset < len(line_text) and max_line_length > 0:
if leftover_line_length <= 0:
add_source_if_needed()
span_lines.append(ui.div(height=css.row_height)[spans])
spans = []
leftover_line_length = max_line_length
text = line_text[span_offset:span_offset + leftover_line_length]
span_offset += len(text)
spans.append(ui.click(lambda text=text: self.click(text))[
ui.text(text, css=self.css)
])
leftover_line_length -= len(text)
add_source_if_needed()
span_lines.append(ui.div(height=css.row_height)[spans])
if len(span_lines) == 1:
return span_lines
span_lines.reverse()
return span_lines
@core.schedule
async def click(self, text: str):
values = [
ui.InputListItem(lambda: sublime.set_clipboard(text), "Copy"),
]
for match in url_matching_regex.findall(text):
values.insert(0, ui.InputListItem(lambda match=match: webbrowser.open_new_tab(match[0]), "Open"))
if self.line.source:
values.insert(0, ui.InputListItem(lambda: self.on_clicked_source(self.line.source), "Navigate"))
if self.clicked_menu:
values[0].run()
self.clicked_menu.cancel()
return
values[0].text += "\t Click again to select"
self.clicked_menu = ui.InputList(values, text).run()
await self.clicked_menu
self.clicked_menu = None
class TerminalView (Panel):
def __init__(self, terminal: Terminal, on_clicked_source: Callable[[dap.SourceLocation], None]) -> None:
super().__init__(terminal.name())
self.terminal = terminal
self.terminal.on_updated.add(self._on_updated_terminal)
self.start_line = 0
self.on_clicked_source = on_clicked_source
def _on_updated_terminal(self):
self.dirty()
def on_input(self):
label = self.terminal.writeable_prompt()
def run(value: str):
if not value: return
self.terminal.write(value)
self.on_input()
ui.InputText(run, label, enable_when_active=Autocomplete.for_window(sublime.active_window())).run()
def on_toggle_input_mode(self):
self.terminal.escape_input = not self.terminal.escape_input
self.dirty()
def on_clear(self) -> None:
self.terminal.clear()
def render(self) -> list[ui.div]:
assert self.layout
lines = []
height = 0
max_height = int((self.layout.height() - css.header_height)/css.row_height) - 2.0
count = len(self.terminal.lines)
start = 0
width = self.width(self.layout) - self.css.padding_width
max_line_length = int(width)
start = int(self.start_line)
for line in self.terminal.lines[::-1][start:]:
for l in LineView(line, max_line_length, self.on_clicked_source).get():
height += 1
if height > max_height:
break
lines.append(l)
for line in self.terminal.lines[::-1][0:start][::-1]:
for l in reversed(LineView(line, max_line_length, self.on_clicked_source).get()):
height += 1
if height > max_height:
break
lines.insert(0, l)
lines.reverse()
# if height > max_height:
# self.start_line = min()
if self.terminal.writeable():
input_line = []
if self.terminal.can_escape_input():
if self.terminal.escape_input:
text = 'esc'
else:
text = 'line'
mode_toggle = ui.click(self.on_toggle_input_mode)[
ui.text(text, css=css.button_secondary),
]
input_line.append(mode_toggle)
label = self.terminal.writeable_prompt()
input_line.append(
ui.click(self.on_input)[
ui.icon(ui.Images.shared.right),
ui.text(label, css=css.label_secondary),
]
)
lines.append(ui.div(height=css.row_height)[input_line])
return lines
|
n1 = int(input('Digite um valor: '))
n2 = int(input('Digite outro número: '))
soma = n1 + n2
subi = n1 - n2
multi = n1 * n2
divi = n1 / n2
diviinte = n1 // n2
divirest = n1 % n2
expo = n1 ** n2
print('A soma entre {} e {} equivale a {}, \n Equanto a subtração tem o valor de {}, \n A multiplicação de {}, \n A divisão de {:.3f}, sendo a divisão inteira {} e o resto de divisão {}, \n E a exponenciação {}.'.format(n1, n2, soma, subi, multi, divi, diviinte, divirest, expo))
'''
nome = input('Digite seu nome: ')
print('Prazer em te conhecer, {:=^20}!'.format(nome))
''' |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import base64
import datetime
import logging
import json
import mock
import pytest
import time
import tensorflow as tf
from handlers import run_log_analyzer
from handlers import schedule_log_analyzer
from handlers import _prepare_log_analyzer_request_body
DEFAULT_TEMPLATE_PATH = 'gs://mlops-dev-workspace/dataflow-templates/log_analyzer.json'
DEFAULT_PROJECT_ID = 'mlops-dev-env'
DEFAULT_LOG_TABLE = 'mlops-dev-env.data_validation.covertype_logs_tf'
DEFAULT_MODEL = 'covertype_tf'
DEFAULT_VERSION = 'v3'
DEFAULT_START_TIME = '2020-06-03T16:00:00'
DEFAULT_END_TIME = '2020-06-03T21:00:00'
DEFAULT_OUTPUT_LOCATION ='gs://mlops-dev-workspace/drift_monitor/output/tf/tests'
DEFAULT_SCHEMA_LOCATION = 'gs://mlops-dev-workspace/drift_monitor/schema/schema.pbtxt'
DEFAULT_BASELINE_STATS_LOCATION = 'gs://mlops-dev-workspace/drift_monitor/baseline_stats/stats.pbtxt'
DEFAULT_SERVICE_ACCOUNT = 'drift_monitor@mlops-dev-env.iam.gserviceaccount.com'
DEFAULT_REGION = 'us-central1'
DEFAULT_TASK_QUEUE = 'drift_monitor-runs'
DEFAULT_TIME_WINDOW = '60m'
def test_prepare_log_analyzer_request_body():
job_name = '{}-{}'.format('data-drift-detector', time.strftime("%Y%m%d-%H%M%S"))
template_path = DEFAULT_TEMPLATE_PATH
model = DEFAULT_MODEL
version = DEFAULT_VERSION
output_location = '{}/{}'.format(DEFAULT_OUTPUT_LOCATION, 'testing_body')
log_table = DEFAULT_LOG_TABLE
start_time = DEFAULT_START_TIME
end_time = DEFAULT_END_TIME
schema_location = DEFAULT_SCHEMA_LOCATION
baseline_stats_location = DEFAULT_BASELINE_STATS_LOCATION
time_window = DEFAULT_TIME_WINDOW
body = _prepare_log_analyzer_request_body(
job_name=job_name,
template_path=template_path,
model=model,
version=version,
log_table=log_table,
start_time=start_time,
end_time=end_time,
output_location=output_location,
schema_location=schema_location,
baseline_stats_location=baseline_stats_location,
time_window=time_window
)
print(body)
def test_run_log_analyzer():
project_id = DEFAULT_PROJECT_ID
template_path = DEFAULT_TEMPLATE_PATH
model = DEFAULT_MODEL
version = DEFAULT_VERSION
region = DEFAULT_REGION
log_table = DEFAULT_LOG_TABLE
start_time = datetime.datetime.fromisoformat(DEFAULT_START_TIME)
end_time = datetime.datetime.fromisoformat(DEFAULT_END_TIME)
output_location = DEFAULT_OUTPUT_LOCATION
schema_location = DEFAULT_SCHEMA_LOCATION
baseline_stats_location = DEFAULT_BASELINE_STATS_LOCATION
time_window = DEFAULT_TIME_WINDOW
response = run_log_analyzer(
project_id=project_id,
region=region,
template_path=template_path,
model=model,
version=version,
log_table=log_table,
start_time=start_time,
end_time=end_time,
output_location=output_location,
schema_location=schema_location,
baseline_stats_location=baseline_stats_location,
time_window=time_window
)
print(response)
def test_schedule_log_analyzer():
service_account = DEFAULT_SERVICE_ACCOUNT
task_queue = DEFAULT_TASK_QUEUE
schedule_time = datetime.datetime.now() + datetime.timedelta(seconds=30)
project_id = DEFAULT_PROJECT_ID
template_path = DEFAULT_TEMPLATE_PATH
model = DEFAULT_MODEL
version = DEFAULT_VERSION
region = DEFAULT_REGION
log_table = DEFAULT_LOG_TABLE
start_time = datetime.datetime.fromisoformat(DEFAULT_START_TIME)
end_time = datetime.datetime.fromisoformat(DEFAULT_END_TIME)
output_location = DEFAULT_OUTPUT_LOCATION
schema_location =DEFAULT_SCHEMA_LOCATION
baseline_stats_location = DEFAULT_BASELINE_STATS_LOCATION
time_window = DEFAULT_TIME_WINDOW
response = schedule_log_analyzer(
task_queue=task_queue,
service_account=service_account,
schedule_time=schedule_time,
project_id=project_id,
region=region,
template_path=template_path,
model=model,
version=version,
log_table=log_table,
start_time=start_time,
end_time=end_time,
output_location=output_location,
schema_location=schema_location,
baseline_stats_location=baseline_stats_location,
time_window=time_window
)
print(response)
|
# Copyright (c) 2017-2021, Mudita Sp. z.o.o. All rights reserved.
# For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md
import pytest
from harness.interface.defs import status
@pytest.mark.rt1051
@pytest.mark.service_desktop_test
@pytest.mark.usefixtures("phone_unlocked")
def test_battery_file(harness):
body = {"command" : "checkFile" , "fileName" : "/sys/user/batteryFuelGaugeConfig.cfg"}
ret = harness.endpoint_request("filesystem", "post", body)
assert ret["status"] == status["OK"]
assert ret["body"]["fileExists"] == True
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
# from fairseq import utils
# from fairseq.modules import (
# LearnedPositionalEmbedding, MultiheadAttention,
# SinusoidalPositionalEmbedding,
# )
from ncc.modules.roberta.learned_positional_embedding import LearnedPositionalEmbedding
from ncc.modules.attention.multihead_attention import MultiheadAttention
from ncc.modules.roberta.sinusoidal_positional_embedding import SinusoidalPositionalEmbedding
from ncc.modules.seq2seq.ncc_incremental_decoder import NccIncrementalDecoder
from ncc.modules.code2vec.ncc_encoder import NccEncoder
from ncc.models.ncc_model import NccEncoderDecoderModel
from ncc.models import register_model
from ncc.utils import utils
# from . import (
# NccIncrementalDecoder, NccEncoder, NccModel,
# register_model, register_model_architecture,
# )
# @register_model('hi_transformer_summarization')
# class HiTransformerSummarizationModel(NccModel):
def get_sent_end_repr(src_emb, sent_ends):
bsz, nsent = sent_ends.size()
assert bsz == src_emb.size(0)
seqlen = src_emb.size(1)
offset = torch.linspace(0, (bsz - 1) * seqlen, bsz).type(sent_ends.type())
sent_ends_abs = sent_ends + offset.view(-1, 1)
sent_ends_repr = src_emb.contiguous().view(bsz * seqlen, -1)[sent_ends_abs]
sent_ends_repr = sent_ends_repr.view(bsz, nsent, -1)
return sent_ends_repr
@register_model('hi_transformer_summarization')
class HiTransformerSummarizationModel(NccEncoderDecoderModel):
def __init__(self, encoder, decoder):
super().__init__(encoder, decoder)
# @staticmethod
# def add_args(parser):
# """Add model-specific arguments to the parser."""
# parser.add_argument('--dropout', type=float, metavar='D',
# help='dropout probability')
# parser.add_argument('--attention-dropout', type=float, metavar='D',
# help='dropout probability for attention weights')
# parser.add_argument('--relu-dropout', type=float, metavar='D',
# help='dropout probability after ReLU in FFN')
# parser.add_argument('--encoder-embed-path', type=str, metavar='STR',
# help='path to pre-trained encoder embedding')
# parser.add_argument('--encoder-embed-dim', type=int, metavar='N',
# help='encoder embedding dimension')
# parser.add_argument('--encoder-ffn-embed-dim', type=int, metavar='N',
# help='encoder embedding dimension for FFN')
# parser.add_argument('--encoder-layers', type=int, metavar='N',
# help='num encoder layers')
# parser.add_argument('--encoder-attention-heads', type=int, metavar='N',
# help='num encoder attention heads')
# parser.add_argument('--encoder-normalize-before', default=False, action='store_true',
# help='apply layernorm before each encoder block')
# parser.add_argument('--encoder-learned-pos', default=False, action='store_true',
# help='use learned positional embeddings in the encoder')
# parser.add_argument('--decoder-embed-path', type=str, metavar='STR',
# help='path to pre-trained decoder embedding')
# parser.add_argument('--decoder-embed-dim', type=int, metavar='N',
# help='decoder embedding dimension')
# parser.add_argument('--decoder-ffn-embed-dim', type=int, metavar='N',
# help='decoder embedding dimension for FFN')
# parser.add_argument('--decoder-layers', type=int, metavar='N',
# help='num decoder layers')
# parser.add_argument('--decoder-attention-heads', type=int, metavar='N',
# help='num decoder attention heads')
# parser.add_argument('--decoder-learned-pos', default=False, action='store_true',
# help='use learned positional embeddings in the decoder')
# parser.add_argument('--decoder-normalize-before', default=False, action='store_true',
# help='apply layernorm before each decoder block')
# parser.add_argument('--share-decoder-input-output-embed', default=False, action='store_true',
# help='share decoder input and output embeddings')
# parser.add_argument('--share-all-embeddings', default=False, action='store_true',
# help='share encoder, decoder and output embeddings'
# ' (requires shared dictionary and embed dim)')
@classmethod
def build_model(cls, args, config, task):
"""Build a new model instance."""
# make sure that all args are properly defaulted (in case there are any new ones)
# base_architecture(args)
src_dict, tgt_dict = task.source_dictionary, task.target_dictionary
def build_embedding(dictionary, embed_dim, path=None):
num_embeddings = len(dictionary)
padding_idx = dictionary.pad()
emb = Embedding(num_embeddings, embed_dim, padding_idx)
# if provided, load from preloaded dictionaries
if path:
embed_dict = utils.parse_embedding(path)
utils.load_embedding(embed_dict, dictionary, emb)
return emb
if args['model']['share_all_embeddings']:
if src_dict != tgt_dict:
raise RuntimeError('--share-all-embeddings requires a joined dictionary')
if args['model']['encoder_embed_dim'] != args['model']['decoder_embed_dim']:
raise RuntimeError(
'--share-all-embeddings requires --encoder-embed-dim to match --decoder-embed-dim')
if args['model']['decoder_embed_path'] and (
args['model']['decoder_embed_path'] != args['model']['encoder_embed_path']):
raise RuntimeError('--share-all-embeddings not compatible with --decoder-embed-path')
encoder_embed_tokens = build_embedding(
src_dict, args['model']['encoder_embed_dim'], args['model']['encoder_embed_path']
)
decoder_embed_tokens = encoder_embed_tokens
args['model']['share_decoder_input_output_embed'] = True
else:
encoder_embed_tokens = build_embedding(
src_dict, args['model']['encoder_embed_dim'], args['model']['encoder_embed_path']
)
decoder_embed_tokens = build_embedding(
tgt_dict, args['model']['decoder_embed_dim'], args['model']['decoder_embed_path']
)
encoder = TransformerEncoder(args, src_dict, encoder_embed_tokens)
decoder = TransformerDecoder(args, tgt_dict, decoder_embed_tokens)
return HiTransformerSummarizationModel(encoder, decoder)
# def forward(self, src_tokens, doc_pad_mask, doc_pos_tok, masked_sent_positions, prev_output_tokens):
# encoder_out = self.encoder(src_tokens, doc_pad_mask, doc_pos_tok)
# decoder_out = self.decoder(encoder_out, masked_sent_positions, prev_output_tokens)
# return decoder_out
def forward(self, src_tokens, src_sent_ends, doc_pad_mask, doc_pos_tok, prev_output_tokens):
encoder_out = self.encoder(src_tokens, src_sent_ends, doc_pad_mask, doc_pos_tok)
decoder_out = self.decoder(prev_output_tokens, encoder_out)
return decoder_out
class TransformerEncoder(NccEncoder):
"""Transformer encoder."""
def __init__(self, args, dictionary, embed_tokens, left_pad=False):
super().__init__(dictionary)
self.dropout = args['model']['dropout']
embed_dim = embed_tokens.embedding_dim
self.padding_idx = embed_tokens.padding_idx
self.embed_tokens = embed_tokens
self.embed_scale = math.sqrt(embed_dim)
self.embed_positions = PositionalEmbedding(
1024, embed_dim, self.padding_idx,
left_pad=left_pad,
learned=args['model']['encoder_learned_pos'],
)
self.layers = nn.ModuleList([])
self.layers.extend([
TransformerEncoderLayer(args)
for i in range(args['model']['encoder_layers'])
])
self.sent_embed_positions = PositionalEmbedding(
1024, embed_dim, self.padding_idx,
left_pad=False,
learned=args['model']['encoder_learned_pos'],
)
self.doc_layers = nn.ModuleList([])
self.doc_layers.extend([
TransformerEncoderLayer(args)
for i in range(args['model']['encoder_layers'])
])
def forward(self, src_tokens, src_sent_ends, doc_pad_mask, doc_pos_tok):
bsz, seqlen = src_tokens.size()
# src_tokens = src_tokens.view(bsz * n_sent, seqlen)
# embed tokens and positions
x = self.embed_scale * self.embed_tokens(src_tokens)
x += self.embed_positions(src_tokens)
x = F.dropout(x, p=self.dropout, training=self.training)
# B x T x C -> T x B x C
x = x.transpose(0, 1)
# compute padding mask
encoder_padding_mask = src_tokens.eq(self.padding_idx)
if not encoder_padding_mask.any():
encoder_padding_mask = None
# encoder layers
for layer in self.layers:
x = layer(x, encoder_padding_mask)
doc_pos = self.sent_embed_positions(doc_pos_tok)
# sent_repr = x[-1].view(bsz, n_sent, -1)
x = x.transpose(0, 1)
sent_repr = get_sent_end_repr(x, src_sent_ends)
# print('sent_repr', sent_repr.size())
sent_repr = sent_repr + doc_pos
# print('sent_repr after', sent_repr.size())
# n_sent x bsz x C
sent_repr = sent_repr.transpose(0, 1)
for doc_layer in self.doc_layers:
sent_repr = doc_layer(sent_repr, doc_pad_mask)
return {
'encoder_out': sent_repr, # n_sent x bsz x C
'encoder_padding_mask': doc_pad_mask, # bsz x n_sent
}
def reorder_encoder_out(self, encoder_out_dict, new_order):
if encoder_out_dict['encoder_out'] is not None:
encoder_out_dict['encoder_out'] = \
encoder_out_dict['encoder_out'].index_select(1, new_order)
if encoder_out_dict['encoder_padding_mask'] is not None:
encoder_out_dict['encoder_padding_mask'] = \
encoder_out_dict['encoder_padding_mask'].index_select(0, new_order)
return encoder_out_dict
def max_positions(self):
"""Maximum input length supported by the encoder."""
return self.embed_positions.max_positions()
def upgrade_state_dict(self, state_dict):
if isinstance(self.embed_positions, SinusoidalPositionalEmbedding):
if 'encoder.embed_positions.weights' in state_dict:
del state_dict['encoder.embed_positions.weights']
if 'encoder.embed_positions._float_tensor' not in state_dict:
state_dict['encoder.embed_positions._float_tensor'] = torch.FloatTensor()
return state_dict
class TransformerDecoder(NccIncrementalDecoder):
"""Transformer decoder."""
def __init__(self, args, dictionary, embed_tokens, left_pad=False):
super().__init__(dictionary)
self.dropout = args['model']['dropout']
self.share_input_output_embed = args['model']['share_decoder_input_output_embed']
embed_dim = embed_tokens.embedding_dim
padding_idx = embed_tokens.padding_idx
self.embed_tokens = embed_tokens
self.embed_scale = math.sqrt(embed_dim)
self.embed_positions = PositionalEmbedding(
1024, embed_dim, padding_idx,
left_pad=left_pad,
learned=args['model']['decoder_learned_pos'],
)
self.layers = nn.ModuleList([])
self.layers.extend([
TransformerDecoderLayer(args)
for i in range(args['model']['decoder_layers'])
])
if not self.share_input_output_embed:
self.embed_out = nn.Parameter(torch.Tensor(len(dictionary), embed_dim))
nn.init.normal_(self.embed_out, mean=0, std=embed_dim ** -0.5)
def forward(self, prev_output_tokens, encoder_out, incremental_state=None):
# embed positions
positions = self.embed_positions(
prev_output_tokens,
incremental_state=incremental_state,
)
if incremental_state is not None:
prev_output_tokens = prev_output_tokens[:, -1:]
positions = positions[:, -1:]
# embed tokens and positions
x = self.embed_scale * self.embed_tokens(prev_output_tokens)
x += positions
x = F.dropout(x, p=self.dropout, training=self.training)
# B x T x C -> T x B x C
x = x.transpose(0, 1)
# decoder layers
for layer in self.layers:
x, attn = layer(
x,
encoder_out['encoder_out'],
encoder_out['encoder_padding_mask'],
incremental_state,
)
# T x B x C -> B x T x C
x = x.transpose(0, 1)
# project back to size of vocabulary
if self.share_input_output_embed:
x = F.linear(x, self.embed_tokens.weight)
else:
x = F.linear(x, self.embed_out)
return x, attn
def max_positions(self):
"""Maximum output length supported by the decoder."""
return self.embed_positions.max_positions()
def upgrade_state_dict(self, state_dict):
if isinstance(self.embed_positions, SinusoidalPositionalEmbedding):
if 'decoder.embed_positions.weights' in state_dict:
del state_dict['decoder.embed_positions.weights']
if 'decoder.embed_positions._float_tensor' not in state_dict:
state_dict['decoder.embed_positions._float_tensor'] = torch.FloatTensor()
return state_dict
class TransformerDecoder_(NccIncrementalDecoder):
"""Transformer decoder."""
def __init__(self, args, dictionary):
super().__init__(dictionary)
self.dropout = args['model']['dropout']
embed_dim = args['model']['decoder_embed_dim']
self.out_proj = Linear(embed_dim, len(dictionary))
def forward(self, encoder_out, incremental_state=None):
x = encoder_out['encoder_out']
x = F.dropout(x, p=self.dropout, training=self.training)
# embed positions
x = self.out_proj(x)
x = x.transpose(0, 1)
return x, x
def max_positions(self):
"""Maximum output length supported by the decoder."""
return 1024
def upgrade_state_dict(self, state_dict):
'''
if isinstance(self.embed_positions, SinusoidalPositionalEmbedding):
if 'decoder.embed_positions.weights' in state_dict:
del state_dict['decoder.embed_positions.weights']
if 'decoder.embed_positions._float_tensor' not in state_dict:
state_dict['decoder.embed_positions._float_tensor'] = torch.FloatTensor()
'''
return state_dict
class TransformerEncoderLayer(nn.Module):
"""Encoder layer block.
In the original paper each operation (multi-head attention or FFN) is
postprocessed with: dropout -> add residual -> layernorm.
In the tensor2tensor code they suggest that learning is more robust when
preprocessing each layer with layernorm and postprocessing with:
dropout -> add residual.
We default to the approach in the paper, but the tensor2tensor approach can
be enabled by setting `normalize_before=True`.
"""
def __init__(self, args):
super().__init__()
self.embed_dim = args['model']['encoder_embed_dim']
# self.self_attn = MultiheadAttention(
# self.embed_dim, args['model']['encoder_attention_heads'],
# dropout=args['model']['attention_dropout'],
# )
# TODO: to be verified
self.self_attn = MultiheadAttention(
self.embed_dim,
args['model']['encoder_attention_heads'],
dropout=args['model']['attention_dropout'],
# add_bias_kv=add_bias_kv,
# add_zero_attn=add_zero_attn,
self_attention=True
)
self.dropout = args['model']['dropout']
self.relu_dropout = args['model']['relu_dropout']
self.normalize_before = args['model']['encoder_normalize_before']
self.fc1 = Linear(self.embed_dim, args['model']['encoder_ffn_embed_dim'])
self.fc2 = Linear(args['model']['encoder_ffn_embed_dim'], self.embed_dim)
self.layer_norms = nn.ModuleList([LayerNorm(self.embed_dim) for i in range(2)])
def forward(self, x, encoder_padding_mask):
residual = x
x = self.maybe_layer_norm(0, x, before=True)
x, _ = self.self_attn(query=x, key=x, value=x, key_padding_mask=encoder_padding_mask)
x = F.dropout(x, p=self.dropout, training=self.training)
x = residual + x
x = self.maybe_layer_norm(0, x, after=True)
residual = x
x = self.maybe_layer_norm(1, x, before=True)
x = F.relu(self.fc1(x))
x = F.dropout(x, p=self.relu_dropout, training=self.training)
x = self.fc2(x)
x = F.dropout(x, p=self.dropout, training=self.training)
x = residual + x
x = self.maybe_layer_norm(1, x, after=True)
return x
def maybe_layer_norm(self, i, x, before=False, after=False):
assert before ^ after
if after ^ self.normalize_before:
return self.layer_norms[i](x)
else:
return x
class TransformerDecoderLayer(nn.Module):
"""Decoder layer block."""
def __init__(self, args):
super().__init__()
self.embed_dim = args['model']['decoder_embed_dim']
self.self_attn = MultiheadAttention(
self.embed_dim, args['model']['decoder_attention_heads'],
dropout=args['model']['attention_dropout'],
)
self.dropout = args['model']['dropout']
self.relu_dropout = args['model']['relu_dropout']
self.normalize_before = args['model']['decoder_normalize_before']
self.encoder_attn = MultiheadAttention(
self.embed_dim, args['model']['decoder_attention_heads'],
dropout=args['model']['attention_dropout'],
)
self.fc1 = Linear(self.embed_dim, args['model']['decoder_ffn_embed_dim'])
self.fc2 = Linear(args['model']['decoder_ffn_embed_dim'], self.embed_dim)
self.layer_norms = nn.ModuleList([LayerNorm(self.embed_dim) for i in range(3)])
def forward(self, x, encoder_out, encoder_padding_mask, incremental_state):
residual = x
x = self.maybe_layer_norm(0, x, before=True)
x, _ = self.self_attn(
query=x,
key=x,
value=x,
# mask_future_timesteps=True,
incremental_state=incremental_state,
need_weights=False,
)
x = F.dropout(x, p=self.dropout, training=self.training)
x = residual + x
x = self.maybe_layer_norm(0, x, after=True)
residual = x
x = self.maybe_layer_norm(1, x, before=True)
x, attn = self.encoder_attn(
query=x,
key=encoder_out,
value=encoder_out,
key_padding_mask=encoder_padding_mask,
incremental_state=incremental_state,
static_kv=True,
)
x = F.dropout(x, p=self.dropout, training=self.training)
x = residual + x
x = self.maybe_layer_norm(1, x, after=True)
residual = x
x = self.maybe_layer_norm(2, x, before=True)
x = F.relu(self.fc1(x))
x = F.dropout(x, p=self.relu_dropout, training=self.training)
x = self.fc2(x)
x = F.dropout(x, p=self.dropout, training=self.training)
x = residual + x
x = self.maybe_layer_norm(2, x, after=True)
return x, attn
def maybe_layer_norm(self, i, x, before=False, after=False):
assert before ^ after
if after ^ self.normalize_before:
return self.layer_norms[i](x)
else:
return x
def Embedding(num_embeddings, embedding_dim, padding_idx):
m = nn.Embedding(num_embeddings, embedding_dim, padding_idx=padding_idx)
nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5)
return m
def LayerNorm(embedding_dim):
m = nn.LayerNorm(embedding_dim)
return m
def Linear(in_features, out_features, bias=True):
m = nn.Linear(in_features, out_features, bias)
nn.init.xavier_uniform_(m.weight)
nn.init.constant_(m.bias, 0.)
return m
def PositionalEmbedding(num_embeddings, embedding_dim, padding_idx, left_pad, learned=False):
if learned:
m = LearnedPositionalEmbedding(num_embeddings, embedding_dim, padding_idx, left_pad)
nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5)
nn.init.constant_(m.weight[padding_idx], 0)
else:
m = SinusoidalPositionalEmbedding(embedding_dim, padding_idx, left_pad, num_embeddings)
return m
|
import asyncio
import logging
import os
import signal
import socket
import time
from typing import Optional, TYPE_CHECKING
from urllib.parse import urlsplit
from addict import Dict
from pyrogram.types import Message
from hikcamerabot.common.video.tasks.ffprobe_context import GetFfprobeContextTask
from hikcamerabot.common.video.tasks.thumbnail import MakeThumbnailTask
from hikcamerabot.constants import (
FFMPEG_CAM_VIDEO_SRC,
FFMPEG_CMD_HLS_VIDEO_GIF,
FFMPEG_CMD_VIDEO_GIF,
FFMPEG_SRS_HLS_VIDEO_SRC,
FFMPEG_SRS_RTMP_VIDEO_SRC,
RTSP_TRANSPORT_TPL,
SRS_LIVESTREAM_NAME_TPL,
)
from hikcamerabot.enums import Event, VideoGifType
from hikcamerabot.event_engine.events.outbound import (
SendTextOutboundEvent,
VideoOutboundEvent,
)
from hikcamerabot.event_engine.queue import get_result_queue
from hikcamerabot.utils.task import wrap
from hikcamerabot.utils.utils import format_ts, gen_random_str, bold
if TYPE_CHECKING:
from hikcamerabot.camera import HikvisionCam
class RecordVideoGifTask:
_PROCESS_TIMEOUT = 30
_VIDEO_FILENAME_MAP: dict[VideoGifType, str] = {
VideoGifType.ON_ALERT: '{0}-alert-{1}-{2}.mp4',
VideoGifType.ON_DEMAND: '{0}-{1}-{2}.mp4',
}
_VIDEO_TYPE_TO_EVENT: dict[VideoGifType, Event] = {
VideoGifType.ON_ALERT: Event.ALERT_VIDEO,
VideoGifType.ON_DEMAND: Event.RECORD_VIDEOGIF,
}
FILENAME_TIME_FORMAT = '%Y-%b-%d--%H-%M-%S'
def __init__(
self,
rewind: bool,
cam: 'HikvisionCam',
video_type: VideoGifType,
context: Message = None,
):
self._log = logging.getLogger(self.__class__.__name__)
self._cam = cam
self._video_type = video_type
self._rewind = rewind
self._gif_conf: Dict = self._cam.conf.video_gif[self._video_type.value]
self._is_srs_enabled: bool = self._cam.conf.livestream.srs.enabled
self._tmp_storage_path: str = self._gif_conf.tmp_storage
self._filename = self._get_filename()
self._file_path: str = os.path.join(self._tmp_storage_path, self._filename)
self._thumb_path: str = os.path.join(
self._tmp_storage_path, f'{self._filename}.jpg'
)
self._thumb_created = False
self._rec_time: int = self._gif_conf.record_time
if self._rewind:
self._rec_time += self._gif_conf.rewind_time
self._message = context
self._event = self._VIDEO_TYPE_TO_EVENT[self._video_type]
self._result_queue = get_result_queue()
self._killpg = wrap(os.killpg)
self._ffmpeg_cmd = self._build_ffmpeg_cmd()
self._duration: Optional[int] = None
self._width: Optional[int] = None
self._height: Optional[int] = None
self._probe_ctx: Optional[dict] = None
async def run(self) -> None:
await asyncio.gather(self._record(), self._send_confirmation_message())
async def _record(self) -> None:
"""Start Ffmpeg subprocess and return file path and video type."""
self._log.debug(
'Recording %s video gif from %s: %s',
self._video_type.value,
self._cam.conf.description,
self._ffmpeg_cmd,
)
await self._start_ffmpeg_subprocess()
is_validated = await self._validate_file()
if not is_validated:
err_msg = f'Failed to record {self._file_path} on {self._cam.description}'
self._log.error(err_msg)
await self._result_queue.put(
SendTextOutboundEvent(
event=Event.SEND_TEXT,
text=f'{err_msg}.\nEvent type: {self._event.value}\nCheck logs.',
message=self._message,
)
)
if is_validated:
await asyncio.gather(self._get_probe_ctx(), self._make_thumbnail_frame())
await self._send_result()
async def _make_thumbnail_frame(self) -> None:
# TODO: Refactor duplicate code. Move to mixin.
if not await MakeThumbnailTask(self._thumb_path, self._file_path).run():
self._log.error(
'Error during making thumbnail context of %s', self._file_path
)
return
self._thumb_created = True
async def _get_probe_ctx(self) -> None:
# TODO: Refactor duplicate code. Move to mixin.
self._probe_ctx = await GetFfprobeContextTask(self._file_path).run()
if not self._probe_ctx:
return
video_streams = [
stream
for stream in self._probe_ctx['streams']
if stream['codec_type'] == 'video'
]
self._duration = int(float(self._probe_ctx['format']['duration']))
self._height = video_streams[0]['height']
self._width = video_streams[0]['width']
def _post_err_cleanup(self):
"""Delete video file and thumb if they exist after exception."""
for file_path in (self._file_path, self._thumb_path):
try:
os.remove(file_path)
except Exception as err:
self._log.warning('File path %s not deleted: %s', file_path, err)
async def _start_ffmpeg_subprocess(self) -> None:
proc_timeout = (
self._cam.conf.alert.video_gif.record_time + self._PROCESS_TIMEOUT
)
proc = await asyncio.create_subprocess_shell(self._ffmpeg_cmd)
try:
await asyncio.wait_for(proc.wait(), timeout=proc_timeout)
except asyncio.TimeoutError:
self._log.error(
'Failed to record %s: FFMPEG process ran longer than '
'expected and was killed',
self._file_path,
)
await self._killpg(os.getpgid(proc.pid), signal.SIGINT)
self._post_err_cleanup()
async def _validate_file(self) -> bool:
"""Validate recorded file existence and size."""
try:
is_empty = os.path.getsize(self._file_path) == 0
except FileNotFoundError:
self._log.error(
'Failed to validate %s: File does not exist', self._file_path
)
return False
except Exception:
self._log.exception('Failed to validate %s', self._file_path)
return False
if is_empty:
self._log.error('Failed to validate %s: File is empty', self._file_path)
self._post_err_cleanup()
return not is_empty
async def _send_result(self) -> None:
await self._result_queue.put(
VideoOutboundEvent(
event=self._event,
video_path=self._file_path,
video_duration=self._duration or 0,
video_height=self._height or 0,
video_width=self._width or 0,
thumb_path=self._thumb_path if self._thumb_created else None,
cam=self._cam,
message=self._message,
)
)
async def _send_confirmation_message(self) -> None:
if self._video_type is VideoGifType.ON_DEMAND:
text = f'Recording video gif for {self._rec_time} seconds'
await self._result_queue.put(
SendTextOutboundEvent(
event=Event.SEND_TEXT,
message=self._message,
text=bold(text),
)
)
def _get_filename(self) -> str:
return self._VIDEO_FILENAME_MAP[self._video_type].format(
self._cam.id,
format_ts(time.time(), time_format=self.FILENAME_TIME_FORMAT),
gen_random_str(),
)
def _build_ffmpeg_cmd(self) -> str:
if self._is_srs_enabled:
livestream_name = SRS_LIVESTREAM_NAME_TPL.format(
channel=self._gif_conf.channel, cam_id=self._cam.id
)
if self._rewind:
video_source = FFMPEG_SRS_HLS_VIDEO_SRC.format(
ip_address=socket.gethostbyname('hikvision_srs_server'),
livestream_name=livestream_name,
)
return FFMPEG_CMD_HLS_VIDEO_GIF.format(
video_source=video_source,
rec_time=self._rec_time,
loglevel=self._gif_conf.loglevel,
filepath=self._file_path,
)
else:
video_source = FFMPEG_SRS_RTMP_VIDEO_SRC.format(
livestream_name=livestream_name
)
else:
video_source = FFMPEG_CAM_VIDEO_SRC.format(
user=self._cam.conf.api.auth.user,
pw=self._cam.conf.api.auth.password,
host=urlsplit(self._cam.conf.api.host).netloc,
rtsp_port=self._cam.conf.rtsp_port,
channel=self._gif_conf.channel,
)
return FFMPEG_CMD_VIDEO_GIF.format(
rtsp_transport=RTSP_TRANSPORT_TPL.format(
rtsp_transport_type=self._gif_conf.rtsp_transport_type
)
if not self._is_srs_enabled
else '',
video_source=video_source,
rec_time=self._rec_time,
loglevel=self._gif_conf.loglevel,
filepath=self._file_path,
)
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2022, CloudBlue
# All rights reserved.
import copy
from typing import Dict, List, Tuple, Sequence
from connect.client import R, ConnectClient
from connect.client.models import ResourceSet
from google.cloud import channel
from google.cloud.channel_v1 import Offer, PriceByResource, PricePhase, PriceTier, Price
from google.oauth2 import service_account
from reports.collection import Collection
from reports.utils import parameter_value
HEADERS = (
'Environment', 'Vendor Name', 'Vendor ID', 'Marketplace Name', 'Marketplace ID', 'Product Name',
'Product Display Name', 'Product Description', 'Sku Name', 'Sku Display Name',
'Sku Description', 'Offer Name', 'Offer Display Name', 'Offer Description', 'Payment Plan',
'Payment Type', 'Payment Cycle Duration', 'Payment Cycle Period Type', 'Minimum Max Units',
'Maximum Max Units', 'Minimum Num Units', 'Maximum Num Units', 'Allowed Regions',
'Allowed Customer Types', 'Resource Type', 'Base Price', 'Discount', 'Effective Price'
)
GOOGLE_PARAMETERS = ['json_credentials_', 'provider_email_', 'provider_cloud_channel_id_']
SCOPES = ["https://www.googleapis.com/auth/apps.order"]
PRODUCTS = ['PRD-861-570-450', 'PRD-550-104-278']
def generate(
client=None,
input_data=None,
progress_callback=None,
renderer_type='xlsx',
extra_context_callback=None,
):
marketplace_param = input_data.get('mkp', {})
env = input_data.get('env', 'test')
products = _get_product_list(client)
credential_set_list = _get_credentials_set(
client, products, marketplace_param, env
)
def __generate_offer_iterator():
return _set_offer_list(credential_set_list)
offer_list = Collection(__generate_offer_iterator)
progress = 0
total = offer_list.total
if renderer_type == 'csv':
yield HEADERS
progress += 1
total += 1
progress_callback(progress, total)
for offer in offer_list:
if renderer_type == 'json':
yield {
HEADERS[idx].replace(' ', '_').lower(): value
for idx, value in enumerate(_process_line(offer))
}
else:
yield _process_line(offer)
progress += 1
progress_callback(progress, total)
return 0
def _get_product_list(client: ConnectClient) -> ResourceSet:
""" Obtain the complete list of selected products to generate the report.
If all option has been selected, get only products with 'google' in the
product name as this report only works with Google products
:param client: Connect Client needed to make requests
:return Result set of product query
"""
product_query = R()
product_query &= R().id.oneof(PRODUCTS)
return client.collection('products').filter(product_query).all()
def _get_credentials_set(
client: ConnectClient,
products: ResourceSet,
marketplace_param: Dict,
env: str
) -> List[Dict]:
""" To obtain the complete Google Catalog it is needed to get the configured credentials
in Connect. This parameters are stored in the configuration collection. As credentials set
could be test or prod, for each selected environment the process will get all the configuration
parameters with the known parameters id (stored in GOOGLE_PARAMETERS).
The function will return all credentials set to obtain for each one the corresponding offer list
:param client: Connect Client needed to make requests
:param products: Filtered products to generate the report
:param marketplace_param: Selected Marketplace param to filter the query
:param env: selected environment
:return List with needed information to obtain the offer lists
"""
credentials_set = []
for product in products:
configuration_query = R()
if marketplace_param.get('all') is False:
configuration_query &= R().marketplace.id.oneof(
marketplace_param.get('choices', [])
)
configuration_query &= R().parameter.id.oneof(
[param + env for param in GOOGLE_PARAMETERS]
)
configurations = client.collection('products').resource(product.get('id')).collection(
'configurations'
).filter(configuration_query).order_by('marketplace.id').all()
credentials_set.extend(
_parse_credentials_set_by_marketplace_and_product(product, configurations, env)
)
return credentials_set
def _parse_credentials_set_by_marketplace_and_product(
product: Dict,
configuration_params: ResourceSet,
env: str
) -> List[Dict]:
""" With the complete list of parameters for all the marketplaces in the current environment
it is needed to group them by marketplace, and in this way has the complete credentials set
and information that will be needed to add to the list.
:param product: current product information stored in Connect
:param configuration_params: result of the sent query to obtain the needed configuration params
:param env: current environment
"""
marketplace_data = []
base_data = {
'environment': env,
'vendor_name': product.get('owner', {}).get('name', ''),
'vendor_id': product.get('owner', {}).get('id', ''),
'marketplace_name': '',
'marketplace_id': '',
'json_credentials': '',
'provider_email': '',
'provider_cloud_channel_id': ''
}
current_marketplace = ''
data = None
for param in configuration_params:
if param.get('marketplace', {}).get('id', '') != current_marketplace:
_append_credentials_data(data, marketplace_data)
current_marketplace = param.get('marketplace', {}).get('id', '')
data = copy.deepcopy(base_data)
data['marketplace_id'] = param.get('marketplace', {}).get('id', '')
data['marketplace_name'] = param.get('marketplace', {}).get('name', '')
param_id = param.get('parameter', {}).get('id', '').replace("_" + env, "")
data[param_id] = parameter_value(param, "")
_append_credentials_data(data, marketplace_data)
return marketplace_data
def _append_credentials_data(data: Dict, marketplace_data: List[Dict]):
""" Function to check if the current credentials set has all the values completed and attach
it if all needed information is present
:param data: last completed data wit the credentials of one marketplace and environment
:param marketplace_data: List of credentials
"""
if (
data
and data.get('json_credentials')
and data.get('provider_email')
and data.get('provider_cloud_channel_id')
):
marketplace_data.append(data)
def _set_offer_list(credential_set_list: List[Dict]):
""" Obtain all the offers that will be shown in the report
:param credential_set_list: Complete credentials set list
"""
for google_credentials in credential_set_list:
json_key_file = google_credentials.get('json_credentials', '')
reseller_admin_user = google_credentials.get('provider_email', '')
account_id = google_credentials.get('provider_cloud_channel_id', '')
credentials = service_account.Credentials.from_service_account_info(
json_key_file,
scopes=SCOPES
)
credentials_delegated = credentials.with_subject(reseller_admin_user)
client = channel.CloudChannelServiceClient(credentials=credentials_delegated)
request = channel.ListOffersRequest(parent="accounts/" + account_id)
offer_list = client.list_offers(request)
for offer in offer_list:
offer_data = {
'environment': google_credentials.get('environment', ''),
'vendor_name': google_credentials.get('vendor_name', ''),
'vendor_id': google_credentials.get('vendor_id', ''),
'marketplace_name': google_credentials.get('marketplace_name', ''),
'marketplace_id': google_credentials.get('marketplace_id', ''),
'offer': offer,
}
yield offer_data
def _process_line(offer_data: Dict):
""" Process each line data to write in the report
:param offer_data: information related with the offer
"""
offer = offer_data.get('offer')
param_definition = obtain_param_definition(offer)
max_units_params = param_definition.get('max_units', {})
num_units_params = param_definition.get('num_units', {})
customer_types = [c_type.name for c_type in
offer.constraints.customer_constraints.allowed_customer_types]
base_price, discount, effective_price = parse_price_data(
obtain_prices_by_resources(offer.price_by_resources)
)
return (
offer_data.get('environment', ''),
offer_data.get('vendor_name', ''),
offer_data.get('vendor_id', ''),
offer_data.get('marketplace_name', ''),
offer_data.get('marketplace_id', ''),
offer.sku.product.name,
offer.sku.product.marketing_info.display_name,
offer.sku.product.marketing_info.description,
offer.sku.name,
offer.sku.marketing_info.display_name,
offer.sku.marketing_info.description,
offer.name,
offer.marketing_info.display_name,
offer.marketing_info.description,
offer.plan.payment_plan.name,
offer.plan.payment_type.name if offer.plan.payment_type != 0 else '-',
offer.plan.payment_cycle.duration if offer.plan.payment_cycle.duration != 0 else "-",
offer.plan.payment_cycle.period_type.name if offer.plan.payment_cycle.period_type != 0
else "-",
max_units_params.get("min_value", "-"),
max_units_params.get("max_value", "-"),
num_units_params.get("min_value", "-"),
num_units_params.get("max_value", "-"),
','.join(map(str, offer.constraints.customer_constraints.allowed_regions)),
', '.join(map(str, customer_types)),
','.join([resource.resource_type.name for resource in offer.price_by_resources]),
base_price,
discount,
effective_price
)
def obtain_param_definition(offer: Offer) -> Dict:
""" extract param definition to be accessible in an easier way
:param offer Offer object
:return Simple dictionary with the required info
"""
param = {}
for parameter_definition in offer.parameter_definitions:
param[parameter_definition.name] = {
'min_value': parameter_definition.min_value.int64_value,
'max_value': parameter_definition.max_value.int64_value,
}
return param
def obtain_prices_by_resources(price_by_resources: Sequence[PriceByResource]):
""" With this function the process will process priceByResource data to generate
legible information related with the base price, the discount and the effective price
:param price_by_resources: Google Price By Resource
"""
prices_data = {
'base_price': {},
'discount': {},
'effective_price': {}
}
for resource in price_by_resources:
_obtain_prices_by_phase(
prices_data,
resource.price_phases,
resource.resource_type.name,
len(price_by_resources) > 1
)
if resource.price:
type_divider = resource.resource_type.name + " ->"
process_prices(prices_data, type_divider, resource.price)
return prices_data
def _obtain_prices_by_phase(
prices_data: Dict,
price_phases: Sequence[PricePhase],
resource_type_name: str,
multi_resource: bool
):
""" If offer has pricePhases the process will get the data of each phase to
generate a legible price information
:param prices_data: The final dictionary where the prices will be stored
:param price_phases: sequence of price phases. In case of non existent empty
:param resource_type_name: the name of the current resource
:param multi_resource: indicator of multi resources to add this at the beginning of the
prices indicator
"""
for price_phase in price_phases:
phase_price_name = _generate_phase_name(price_phase, resource_type_name, multi_resource)
get_price_tiers(prices_data, price_phase, resource_type_name, phase_price_name)
if price_phase.price:
process_prices(prices_data, phase_price_name, price_phase.price)
def _generate_phase_name(phase: PricePhase, resource_type: str, multi_resource: bool):
""" The phase name has the format:
if the phase has an interval of period (firstPeriod and lastPeriod): 1 - 12 MONTH
if only has the initial value of the period (firstPeriod): >= 13 MONTH
In case of prices with multiple resources, it is needed to add the resource type name
before the phase name
"""
phase_price_name = ">= " + str(phase.first_period)
if phase.last_period != 0:
phase_price_name = str(phase.first_period) + " - " + str(phase.last_period)
phase_price_name += " " + phase.period_type.name
if multi_resource:
phase_price_name = resource_type + " -> " + phase_price_name
return phase_price_name
def get_price_tiers(
prices_data: Dict,
price_phase: PricePhase,
resource_type_name: str,
phase_price_name: str
):
""" Get all the PriceTier data that comes in the current phase, when the price is also
divided by interval of resources, and set the price information into the dict
:param prices_data: The dictory when the information is being stored
:param price_phase The current phase to process
:param resource_type_name: The name of the resource that is being processed
:param phase_price_name: The current phase name
"""
for price_tier in price_phase.price_tiers:
tier_price_name = generate_tier_name(price_tier, resource_type_name)
phase_price_name = phase_price_name + " [" + tier_price_name + "]"
process_prices(prices_data, phase_price_name, price_tier.price)
def generate_tier_name(price_tier: PriceTier, resource_type_name: str):
""" The tier name has the format:
if the phase has an interval of resources (firstResource and lastResource): 1 - 10 SEAT
if only has the initial value of the period (firstResource): >= 11 SEAT
:param price_tier: PriceTier object with the information of the level
:param resource_type_name: The name of the processing resource type
:return the helping name of the current price tier
"""
tier_price_name = ">= " + str(price_tier.first_resource)
if price_tier.last_resource != 0:
tier_price_name = str(price_tier.first_resource) + " - " + str(price_tier.last_resource)
return tier_price_name + " " + resource_type_name
def process_prices(prices_data: Dict, name_divider: str, price: Price):
""" In the last level when the price appears, setting the information to the prices_data
dictionary to organize all the prices
:param prices_data: dictionary that is going to be updated
:param name_divider: The generated name that identify the current price
:param price: Complete price data of the current block
"""
if price.external_price_uri == '':
base_currency = " " + price.base_price.currency_code
prices_data['base_price'][name_divider] = str(price.base_price.units) + base_currency
prices_data['discount'][name_divider] = str(round(price.discount * 100, 2)) + "%"
ecurrency = " " + price.effective_price.currency_code
prices_data['effective_price'][name_divider] = str(price.effective_price.units) + ecurrency
else:
prices_data['base_price'][name_divider] = price.external_price_uri
prices_data['discount'][name_divider] = price.external_price_uri
prices_data['effective_price'][name_divider] = price.external_price_uri
def parse_price_data(prices: Dict) -> Tuple:
""" Return the price information data usgin the defined format
:param prices: Generated structure with the complete information of the prices
:return base_price, discount and effective price in str format
"""
return (
join_price_data(prices['base_price']),
join_price_data(prices['discount']),
join_price_data(prices['effective_price'])
)
def join_price_data(price_data: Dict) -> str:
""" Having all the information stored in a dictionary, merge this information using the
name and the prices stored and separate them with a pipe line
:param price_data: The dictionary of one of the prices
:return the generated string
"""
if len({val for key, val in price_data.items()}) == 1:
str_price = list({val for key, val in price_data.items()})
else:
str_price = []
for phase, price in price_data.items():
str_price.append(phase + " " + price)
return ' | '.join(str_price)
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
#
# pylint: disable=unused-argument, too-many-public-methods, too-many-instance-attributes
"""Create an interactive view of a configuration.
This is a visualisation of a trajectory, allowing interaction with both the current
frame, zooming in at particular points, and additionally exploring the time dimension of
a trajectory.
This is intended to be a simple and easy to use interface, where the visualisation is
nice and clear, and interaction is intuitive. To this end, while there are tool-tips available,
providing the information on the position, orientation and further details of particles,
they are off by default.
While the particles are trimer in shape, for reasons of performance they are rendered as
3 circles. The performance gain is from using the WebGL for the rendering of the
circles, while rendering of arbitrary shapes is not supported.
"""
import logging
from pathlib import Path
from typing import Any, Callable, Dict, List, Optional
import gsd.hoomd
import numpy as np
from bokeh.layouts import column, row
from bokeh.models import (
Button,
ColorBar,
ColumnDataSource,
Div,
FixedTicker,
LinearColorMapper,
RadioButtonGroup,
Select,
Slider,
Toggle,
)
from bokeh.plotting import figure
from hsluv import hpluv_to_hex
from ..frame import HoomdFrame
from ..molecules import Trimer
from ..order import create_ml_ordering, create_neigh_ordering, create_orient_ordering
from ..util import parse_directory
from .configuration import DARK_COLOURS, frame2data, plot_circles
logger = logging.getLogger(__name__)
gsdlogger = logging.getLogger("gsd")
gsdlogger.setLevel("WARN")
class TrimerFigure:
order_functions: Dict[str, Any] = {
"None": None,
"Orient": create_orient_ordering(threshold=0.75),
"Num Neighs": create_neigh_ordering(neighbours=6),
}
controls_width = 400
_frame = None
plot = None
_temperatures = None
_pressures = None
_crystals = None
_iter_index = None
_callback = None
def __init__(self, doc, directory: Path = None, models=None) -> None:
self._doc = doc
self._trajectory = [None]
if directory is None:
directory = Path.cwd()
self._source = ColumnDataSource(
{"x": [], "y": [], "orientation": [], "colour": [], "radius": []}
)
if models is not None:
if not isinstance(models, (list, tuple)):
raise ValueError("The argument models has to have type list or tuple")
logger.debug("Found additional models: %s", models)
for model in models:
model = Path(model)
self.order_functions[model.stem] = create_ml_ordering(model)
self.directory = directory
self.initialise_directory()
self._filename_div = Div(text="", width=self.controls_width)
self.initialise_trajectory_interface()
self.update_current_trajectory(None, None, None)
self._playing = False
# Initialise user interface
self.initialise_media_interface()
self.initialise_doc()
def initialise_directory(self) -> None:
self.variable_selection = parse_directory(self.directory, glob="dump*.gsd")
logger.debug("Pressures present: %s", self.variable_selection.keys())
self._pressures = sorted(list(self.variable_selection.keys()))
self._pressure_button = RadioButtonGroup(
name="Pressure ",
labels=self._pressures,
active=0,
width=self.controls_width,
)
self._pressure_button.on_change("active", self.update_temperature_button)
pressure = self._pressures[self._pressure_button.active]
self._temperatures = sorted(list(self.variable_selection[pressure].keys()))
self._temperature_button = Select(
name="Temperature",
options=self._temperatures,
value=self._temperatures[0],
width=self.controls_width,
)
self._temperature_button.on_change("value", self.update_crystal_button)
temperature = self._temperature_button.value
self._crystals = sorted(
list(self.variable_selection[pressure][temperature].keys())
)
self._crystal_button = RadioButtonGroup(
name="Crystal", labels=self._crystals, active=0, width=self.controls_width
)
self._crystal_button.on_change("active", self.update_index_button)
crystal = self._crystals[self._crystal_button.active]
self._iter_index = sorted(
list(self.variable_selection[pressure][temperature][crystal].keys())
)
self._iter_index_button = Select(
name="Iteration Index",
options=self._iter_index,
value=self._iter_index[0],
width=self.controls_width,
)
self._iter_index_button.on_change("value", self.update_current_trajectory)
@property
def pressure(self) -> Optional[str]:
if self._pressures is None:
return None
return self._pressures[self._pressure_button.active]
@property
def temperature(self) -> Optional[str]:
if self._temperatures is None:
return None
return self._temperature_button.value
@property
def crystal(self) -> Optional[str]:
logger.debug(
"Current crystal %s from %s", self._crystal_button.active, self._crystals
)
if self._crystals is None:
return None
return self._crystals[self._crystal_button.active]
@property
def iter_index(self) -> Optional[str]:
logger.debug(
"Current index %s from %s", self._iter_index_button.value, self._iter_index
)
return self._iter_index_button.value
def update_temperature_button(self, attr, old, new):
self._temperatures = sorted(list(self.variable_selection[self.pressure].keys()))
self._temperature_button.options = self._temperatures
self._temperature_button.value = self._temperatures[0]
self.update_crystal_button(None, None, None)
def update_crystal_button(self, attr, old, new):
self._crystals = sorted(
list(self.variable_selection[self.pressure][self.temperature].keys())
)
self._crystal_button.labels = self._crystals
self._crystal_button.active = 0
self.update_index_button(None, None, None)
def update_index_button(self, attr, old, new):
self._iter_index = sorted(
list(
self.variable_selection[self.pressure][self.temperature][
self.crystal
].keys()
)
)
self._iter_index_button.options = self._iter_index
self._iter_index_button.value = self._iter_index[0]
self.update_current_trajectory(None, None, None)
def create_files_interface(self) -> None:
directory_name = Div(
text=f"<b>Current Directory:</b><br/>{self.directory}",
width=self.controls_width,
)
self._filename_div = Div(text="", width=self.controls_width)
current_file = self.get_selected_file()
if current_file is not None:
self._filename_div.text = f"<b>Current File:</b><br/>{current_file.name}"
file_selection = column(
directory_name,
self._filename_div,
Div(text="<b>Pressure:</b>"),
self._pressure_button,
Div(text="<b>Temperature:</b>"),
self._temperature_button,
Div(text="<b>Crystal Structure:</b>"),
self._crystal_button,
Div(text="<b>Iteration Index:</b>"),
self._iter_index_button,
)
return file_selection
def get_selected_file(self) -> Optional[Path]:
if self.pressure is None:
return None
if self.temperature is None:
return None
return self.variable_selection[self.pressure][self.temperature][self.crystal][
self.iter_index
]
def update_frame(self, attr, old, new) -> None:
self._frame = HoomdFrame(self._trajectory[self.index])
self.update_data(None, None, None)
def radio_update_frame(self, attr) -> None:
self.update_frame(attr, None, None)
@property
def index(self) -> int:
try:
return self._trajectory_slider.value
except AttributeError:
return 0
def initialise_trajectory_interface(self) -> None:
logger.debug("Loading Models: %s", self.order_functions.keys())
self._order_parameter = RadioButtonGroup(
name="Classification algorithm:",
labels=list(self.order_functions.keys()),
active=0,
width=self.controls_width,
)
self._order_parameter.on_click(self.radio_update_frame)
def create_trajectory_interface(self) -> None:
return column(
Div(text="<b>Classification Algorithm:<b>"),
self._order_parameter,
Div(text="<hr/>", width=self.controls_width, height=10),
height=120,
)
def update_current_trajectory(self, attr, old, new) -> None:
if self.get_selected_file() is not None:
logger.debug("Opening %s", self.get_selected_file())
self._trajectory = gsd.hoomd.open(str(self.get_selected_file()), "rb")
num_frames = len(self._trajectory)
try:
if self._trajectory_slider.value > num_frames:
self._trajectory_slider.value = num_frames - 1
self._trajectory_slider.end = len(self._trajectory) - 1
except AttributeError:
pass
self.update_frame(attr, old, new)
current_file = self.get_selected_file()
if current_file is not None:
self._filename_div.text = (
f"<b>Current File:</b><br/>{current_file.name}"
)
else:
self._filename_div.text = f"<b>Current File:</b><br/>None"
def initialise_media_interface(self) -> None:
self._trajectory_slider = Slider(
title="Trajectory Index",
value=0,
start=0,
end=max(len(self._trajectory), 1),
step=1,
width=self.controls_width,
)
self._trajectory_slider.on_change("value", self.update_frame)
self._play_pause = Toggle(
name="Play/Pause", label="Play/Pause", width=int(self.controls_width / 3)
)
self._play_pause.on_click(self._play_pause_toggle)
self._nextFrame = Button(label="Next", width=int(self.controls_width / 3))
self._nextFrame.on_click(self._incr_index)
self._prevFrame = Button(label="Previous", width=int(self.controls_width / 3))
self._prevFrame.on_click(self._decr_index)
self._increment_size = Slider(
title="Increment Size",
value=10,
start=1,
end=100,
step=1,
width=self.controls_width,
)
def _incr_index(self) -> None:
if self._trajectory_slider.value < self._trajectory_slider.end:
self._trajectory_slider.value = min(
self._trajectory_slider.value + self._increment_size.value,
self._trajectory_slider.end,
)
def _decr_index(self) -> None:
if self._trajectory_slider.value > self._trajectory_slider.start:
self._trajectory_slider.value = max(
self._trajectory_slider.value - self._increment_size.value,
self._trajectory_slider.start,
)
def create_media_interface(self):
# return widgetbox([prevFrame, play_pause, nextFrame, increment_size], width=300)
return column(
Div(text="<b>Media Controls:</b>"),
self._trajectory_slider,
row(
[self._prevFrame, self._play_pause, self._nextFrame],
width=int(self.controls_width),
),
self._increment_size,
)
# When using webgl as the backend the save option doesn't work for some reason.
def _update_source(self, data):
logger.debug("Data Keys: %s", data.keys())
self._source.data = data
def get_order_function(self) -> Optional[Callable]:
return self.order_functions[
list(self.order_functions.keys())[self._order_parameter.active]
]
def update_data(self, attr, old, new):
if self.plot and self._frame is not None:
self.plot.title.text = f"Timestep {self._frame.timestep:,}"
if self._frame is not None:
data = frame2data(
self._frame, order_function=self.get_order_function(), molecule=Trimer()
)
self._update_source(data)
def update_data_attr(self, attr):
self.update_data(attr, None, None)
def _play_pause_toggle(self, attr):
if self._playing:
self._doc.remove_periodic_callback(self._callback)
self._playing = False
else:
self._callback = self._doc.add_periodic_callback(self._incr_index, 100)
self._playing = True
@staticmethod
def create_legend():
cm_orient = LinearColorMapper(palette=DARK_COLOURS, low=-np.pi, high=np.pi)
cm_class = LinearColorMapper(
palette=[hpluv_to_hex((0, 0, 60)), hpluv_to_hex((0, 0, 80))], low=0, high=2
)
plot = figure(width=200, height=250)
plot.toolbar_location = None
plot.border_fill_color = "#FFFFFF"
plot.outline_line_alpha = 0
cb_orient = ColorBar(
title="Orientation",
major_label_text_font_size="10pt",
title_text_font_style="bold",
color_mapper=cm_orient,
orientation="horizontal",
ticker=FixedTicker(ticks=[-np.pi, 0, np.pi]),
major_label_overrides={-np.pi: "-π", 0: "0", np.pi: "π"},
width=100,
major_tick_line_color=None,
location=(0, 120),
)
cb_class = ColorBar(
color_mapper=cm_class,
title="Classification",
major_label_text_font_size="10pt",
title_text_font_style="bold",
orientation="vertical",
ticker=FixedTicker(ticks=[0.5, 1.5]),
major_label_overrides={0.5: "Crystal", 1.5: "Liquid"},
label_standoff=15,
major_tick_line_color=None,
width=20,
height=80,
location=(0, 0),
)
plot.add_layout(cb_orient)
plot.add_layout(cb_class)
return plot
def initialise_doc(self):
self.plot = figure(
width=920,
height=800,
aspect_scale=1,
match_aspect=True,
title=f"Timestep {0:.5g}",
output_backend="webgl",
active_scroll="wheel_zoom",
)
self.plot.xgrid.grid_line_color = None
self.plot.ygrid.grid_line_color = None
self.plot.x_range.start = -30
self.plot.x_range.end = 30
self.plot.y_range.start = -30
self.plot.y_range.end = 30
plot_circles(self.plot, self._source)
def create_doc(self):
self.update_data(None, None, None)
controls = column(
[
self.create_files_interface(),
self.create_trajectory_interface(),
self.create_media_interface(),
],
width=int(self.controls_width * 1.1),
)
self._doc.add_root(row(controls, self.plot, self.create_legend()))
self._doc.title = "Configurations"
def make_document(doc, directory: Path = None, models: Optional[List[Path]] = None):
if models is None:
models = []
fig = TrimerFigure(doc, directory=directory, models=models)
fig.create_doc()
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# @author jsbxyyx
# @since 1.0
from seata.exception.NotSupportYetException import NotSupportYetException
from seata.exception.ShouldNeverHappenException import ShouldNeverHappenException
from seata.sqlparser.SQLType import SQLType
from seata.sqlparser.mysql.MySQLDeleteSQLRecognizer import MySQLDeleteSQLRecognizer
from seata.sqlparser.mysql.MySQLInsertSQLRecognizer import MySQLInsertRecognizer
from seata.sqlparser.mysql.MySQLSelectSQLRecognizer import MySQLSelectSQLRecognizer
from seata.sqlparser.mysql.MySQLUpdateSQLRecognizer import MySQLUpdateSQLRecognizer
from am.mysql_base import MySqlBase, MysqlStatementVisitor, InsertStatement, \
DeleteStatement, UpdateStatement, SelectStatement
from seata.sqlparser.util.JdbcConstants import JdbcConstants
class SQLVisitorFactory(object):
@classmethod
def get(cls, target_sql, db_type):
if db_type != JdbcConstants.MYSQL:
raise NotSupportYetException()
stmts = MySqlBase.parserSQLStatement(target_sql)
if stmts is None or len(stmts) <= 0:
raise ShouldNeverHappenException()
sql_recognizers = cls.get_sql_recognizer(target_sql, stmts)
if sql_recognizers is None or len(sql_recognizers) <= 0:
return None
if len(sql_recognizers) > 1:
raise NotSupportYetException()
return sql_recognizers[0]
@classmethod
def get_sql_recognizer(cls, target_sql, stmts):
sql_recognizers = []
for i in range(len(stmts)):
dml_stmt = stmts[i]
if dml_stmt is None:
continue
stmt = MysqlStatementVisitor().visit(dml_stmt)
if isinstance(stmt, InsertStatement):
sql_recognizer = MySQLInsertRecognizer()
sql_recognizer.sql_type = SQLType.INSERT
if stmt.ignore:
sql_recognizer.sql_type = SQLType.INSERT_IGNORE
sql_recognizer.stmt = stmt
sql_recognizer.original_sql = target_sql
sql_recognizers.append(sql_recognizer)
elif isinstance(stmt, DeleteStatement):
sql_recognizer = MySQLDeleteSQLRecognizer()
sql_recognizer.sql_type = SQLType.DELETE
sql_recognizer.stmt = stmt
sql_recognizer.original_sql = target_sql
sql_recognizers.append(sql_recognizer)
elif isinstance(stmt, UpdateStatement):
sql_recognizer = MySQLUpdateSQLRecognizer()
sql_recognizer.sql_type = SQLType.UPDATE
sql_recognizer.stmt = stmt
sql_recognizer.original_sql = target_sql
sql_recognizers.append(sql_recognizer)
elif isinstance(stmt, SelectStatement):
sql_recognizer = MySQLSelectSQLRecognizer()
sql_recognizer.sql_type = SQLType.SELECT
if stmt.lock is not None:
sql_recognizer.sql_type = SQLType.SELECT_FOR_UPDATE
sql_recognizer.stmt = stmt
sql_recognizer.original_sql = target_sql
sql_recognizers.append(sql_recognizer)
sql_recognizer.init()
return sql_recognizers
|
from django.core.management.base import BaseCommand
from ...actions import plans
class Command(BaseCommand):
help = "Make sure your Stripe account has the plans"
def handle(self, *args, **options):
plans.sync_plans()
|
from oscar.apps.partner import config
class PartnerConfig(config.PartnerConfig):
name = 'tests._site.apps.partner'
|
import argparse, logging, subprocess, time, multiprocessing, sys
from pathlib import Path
from unet_test import run_segmentation
if __name__=="__main__":
# Initialize the logger
logging.basicConfig(format='%(asctime)s - %(name)-8s - %(levelname)-8s - %(message)s',
datefmt='%d-%b-%y %H:%M:%S')
logger = logging.getLogger("main")
logger.setLevel(logging.INFO)
''' Argument parsing '''
logger.info("Parsing arguments...")
parser = argparse.ArgumentParser(prog='main', description='WIPP plugin to test UNet model from UFreiburg')
# Input arguments
parser.add_argument('--weightsfilename', dest='weightsfilename', type=str,
help='Weights file name for testing.', required=True)
parser.add_argument('--inpDir', dest='inpDir', type=str,
help='Input image collection to be processed by this plugin.', required=True)
parser.add_argument('--filePattern', dest='filePattern', type=str,
help='Filename pattern to filter data.', required=True)
parser.add_argument('--pixelsize', dest='pixelsize', type=str,
help='Input image pixel size in micrometers.', required=False)
parser.add_argument('--weights', dest='weights', type=str,
help='Weights file path for testing.', required=True)
# Output arguments
parser.add_argument('--outDir', dest='outDir', type=str,
help='Output collection', required=True)
# Parse the arguments
args = parser.parse_args()
weightsfilename = args.weightsfilename
logger.info('weightsfilename = {}'.format(weightsfilename))
inpDir = args.inpDir
if (Path.is_dir(Path(args.inpDir).joinpath('images'))):
# switch to images folder if present
fpath = str(Path(args.inpDir).joinpath('images').absolute())
logger.info('inpDir = {}'.format(inpDir))
filePattern = args.filePattern
logger.info('filePattern = {}'.format(filePattern))
pixelsize = args.pixelsize
logger.info('pixelsize = {}'.format(pixelsize))
weights = args.weights
logger.info('weights = {}'.format(weights))
outDir = args.outDir
logger.info('outDir = {}'.format(outDir))
run_segmentation(inpDir,filePattern, pixelsize, weights, weightsfilename, outDir)
logger.info('Inference completed.') |
"""
Fall 2017 CSc 690
File: controller.py
Author: Steve Pedersen & Andrew Lesondak
System: OS X
Date: 12/13/2017
Usage: python3 spotify_infosuite.py
Dependencies: model, musikki, playback, reviews, view, requests, urllib, unidecode, pyqt5
Description: Controller class. Used to generate window frames and handle events, such as key presses, mouse clicks.
It also handles calculations needed to display elements to the window correctly.
"""
import model
import view
import playback
import musikki
import flickr
from flickr import flickr_thread
from reviews import reviews
import json
import sys
import threading
import requests
import urllib
import ssl
import os
import sys
import shutil
import unidecode
import string
from threading import Thread
from time import sleep
from urllib.request import urlopen
from bs4 import BeautifulSoup
from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QPushButton, QAction, QLineEdit
from PyQt5.QtMultimedia import QSoundEffect
from PyQt5.QtCore import *
from PyQt5 import QtNetwork, QtCore
from PyQt5.QtGui import *
from PyQt5 import QtGui
class Controller(QWidget):
"""Handles all logic to build frames and dispatch content to window.
Args:
app (object) -- QApplication
use_default (bool) -- Use default window size or not
"""
def __init__(self, app, use_default=True):
super().__init__()
self.app = app
self.determine_window_size(use_default)
# Build the main view: Multi-Frame Window
self.multi_frame_window = view.MultiFrameWindow(
self.window_x,
self.window_y,
self.window_w,
self.window_h,
"Spotify Info Suite", # window title
"multi_frame_window" # object name
)
self.multi_frame_window.show()
self.init_playback_frame()
self.init_bio_frame()
self.init_news_frame()
self.init_review_frame()
self.init_images_frame()
self.init_lyrics_frame()
self.init_social_frame()
def determine_window_size(self, use_default_size):
"""Window scales to a 1080 screen resolution by default, but will revert to your
own screen resolution if the app window ends up being bigger than your screen
or if use_default_size is set to False
Args:
use_default_size (bool) -- Use default window size or not
"""
screen_resolution = self.app.desktop().screenGeometry()
self.screen_width = screen_resolution.width()
self.screen_height = screen_resolution.height()
# minimum window dimensions
min_w, min_h = 1440, 900
# default window dimensions
def_w, def_h = 1920, 1080
window_fits = False
while not window_fits:
if not use_default_size:
w = self.screen_width
h = self.screen_height
else:
w = def_w
h = def_h
space_w = w / 4
space_h = h / 4
self.window_w = w - space_w
self.window_h = h - space_h
self.window_x = space_w / 4
self.window_y = space_h / 2
if not use_default_size:
window_fits = True
elif self.window_w <= min_w and self.window_h <= min_h:
window_fits = True
else:
def_w = min_w
def_h = min_h
def init_bio_frame(self):
"""
Initialize Bio frame and make the initial async request to Musikki for the Bio.
"""
x = 0
y = self.window_h * 0.1
w = self.window_w / 3
h = self.window_h*3/4 - y
self.bio_frame = model.Frame(
self, self.multi_frame_window, x,y, w,h, "bio_frame"
)
self.bio_frame.set_display_title("Bio", 10, 5)
self.bio_expando_btn = self.bio_frame.create_expando_button()
self.bio_expando_btn.clicked.connect(self.expand_bio)
self.multi_frame_window.add_frame_bio(self.bio_frame)
self.bio_nam = QtNetwork.QNetworkAccessManager()
self.bio_nam.finished.connect(self.search_bio_handler)
self.musikki_artist = musikki.search(self.current_artist)
if not self.musikki_artist.is_found:
# try again with formatted string
formatted_artist = self.format_unicode_alpha(self.current_artist)
self.musikki_artist = musikki.search(formatted_artist)
if self.musikki_artist.is_found:
self.musikki_artist.get_full_bio(self.bio_nam)
else:
self.bio_frame.set_display_text('No results for current artist.', 10, 45)
def init_news_frame(self):
"""
Initialize News frame and make the initial async request to Musikki.
"""
x = 0
y = self.window_h*3 / 4
w = self.window_w / 3
h = self.window_h / 4
self.news_frame = model.Frame(
self, self.multi_frame_window, x,y, w,h, "news_frame"
)
self.news_frame.set_display_title("News", 10, 5)
self.multi_frame_window.add_frame(self.news_frame)
self.news_nam = QtNetwork.QNetworkAccessManager()
self.news_nam.finished.connect(self.news_handler)
if self.musikki_artist.is_found:
self.musikki_artist.get_news(self.news_nam)
def init_playback_frame(self):
"""
Initialize Playback Frame, make the connection to Spotify and create playback listener thread.
"""
self.spotify = self.open_spotify()
self.update_current_playing()
self.playback_title_x = 10
self.playback_title_y = 5
x = 0
y = 0
w = self.window_w / 3
h = self.window_h * 0.1
self.playback_frame = model.Frame(self, self.multi_frame_window, x,y, w,h, 'playback_frame')
self.playback_frame.set_display_title(
self.get_current_playing(), self.playback_title_x, self.playback_title_y
)
self.playback_frame.create_playback_buttons()
self.playback_frame.get_playback_prev_button().clicked.connect(self.prev)
self.playback_frame.get_playback_play_button().clicked.connect(self.play_pause)
self.playback_frame.get_playback_next_button().clicked.connect(self.next)
self.multi_frame_window.add_frame(self.playback_frame)
# spawn a playback listener to keep InfoSuite in sync with Spotify
self.listener = Listener(self.current_playing, self.spotify)
self.listener.song_change.connect(self.update_playback_display)
self.listener.run()
def init_lyrics_frame(self):
"""
Initialize Lyrics frame and make the initial async request to Genius.
"""
x = self.window_w / 3
y = 0
w = self.window_w / 3
h = self.window_h * 0.75
self.lyrics_frame = model.Frame(
self, self.multi_frame_window, x,y, w,h, "lyrics_frame"
)
self.lyrics_frame.set_display_title("Lyrics", 10, 5)
self.lyrics_expando_btn = self.lyrics_frame.create_expando_button()
self.lyrics_expando_btn.clicked.connect(self.expand_lyrics)
self.multi_frame_window.add_frame(self.lyrics_frame)
self.lyrics_nam = QtNetwork.QNetworkAccessManager()
self.lyrics_nam.finished.connect(self.lyrics_handler)
self.get_lyrics()
def init_review_frame(self):
"""
Initialize Review (Pitchfork) frame and make the initial async request to Pitchfork & Metacritic.
"""
x = self.window_w * 2 / 3
y = self.window_h / 2
w = self.window_w / 3
h = self.window_h * 0.37
title_x = 10
title_y = 5
self.review_frame = model.Frame(
self, self.multi_frame_window, x,y, w,h, 'review_frame'
)
self.review_frame.set_display_title('Reviews', title_x, title_y)
self.review_expando_btn = self.review_frame.create_expando_button()
self.review_expando_btn.clicked.connect(self.expand_review)
self.multi_frame_window.add_frame(self.review_frame)
self.init_metacritic_frame()
self.get_pitchfork_review()
self.get_metacritic_review()
def init_metacritic_frame(self):
"""
Initialize Metacritic frame.
"""
x = self.window_w * 2/3
y = self.window_h/2 + self.window_h*0.37
w = self.window_w / 3
h = self.window_h * 0.13
self.metacritic_frame = model.Frame(
self, self.multi_frame_window, x,y, w,h, 'metacritic_frame'
)
self.multi_frame_window.add_frame(self.metacritic_frame)
def init_images_frame(self):
"""
Initialize Images frame and make the initial async requests to Musikki and Flickr.
"""
x = self.window_w * 2 / 3
y = 0
w = self.window_w / 3
h = self.window_h / 2
title_x = 10
title_y = 5
self.images_frame = model.Frame(
self, self.multi_frame_window, x,y, w,h, 'images_frame'
)
self.images_frame.set_display_title('Images', title_x, title_y)
self.multi_frame_window.add_frame(self.images_frame)
self.images_nam = QtNetwork.QNetworkAccessManager()
self.images_nam.finished.connect(self.musikki_images_handler)
self.get_images()
def init_social_frame(self):
"""
Initialize Social frame and make the initial async requests to Musikki.
"""
x = self.window_w / 3
y = self.window_h * 0.75
w = self.window_w / 3
h = self.window_h * 0.25
self.social_frame = model.Frame(
self, self.multi_frame_window, x,y, w,h, "social_frame"
)
self.social_frame.set_display_title("Social", 10, 5)
self.multi_frame_window.add_frame(self.social_frame)
self.social_nam = QtNetwork.QNetworkAccessManager()
self.social_nam.finished.connect(self.social_handler)
if self.musikki_artist.is_found:
self.musikki_artist.get_social_media_twitter(self.social_nam)
else:
self.social_frame.set_display_text('No results for current artist.', 10, 45)
def get_images(self):
"""Spawn a thread to request images from Flickr.
Thread will signal to update_images_frame() handler with the downloaded images.
"""
if self.musikki_artist.is_found:
self.musikki_artist.get_full_images(self.images_nam)
requester = flickr_thread.Requester()
requester.flickr_reciever.connect(self.update_images_frame)
requester.get_images(self.current_artist)
def get_pitchfork_review(self):
"""Spawn a thread to fetch a review for current album from Pitchfork.com.
Thread will signal to update_review_frame() handler with the downloaded review.
"""
requester = reviews.Requester()
requester.pitchfork_receiver.connect(self.update_review_frame)
artist, album = self.format_unicode_alpha([self.current_artist, self.current_album])
requester.get_pitchfork_review(artist, album)
def get_metacritic_review(self):
"""Spawn a thread to fetch a review for current album from a Metacritic API.
Thread will signal to update_review_frame() handler with the downloaded review.
"""
requester = reviews.Requester()
requester.metacritic_receiver.connect(self.update_review_frame)
requester.get_metacritic_review(self.current_artist, self.current_album)
def update_everything(self):
"""
Fetch new info for all frames.
"""
self.update_current_playing()
self.playback_frame.set_display_title(
self.current_playing, self.playback_title_x, self.playback_title_y
)
self.update_artist_info(update_playback=False)
self.update_album_info(update_playback=False)
self.update_song_info(update_playback=False)
def update_artist_info(self, update_playback=True):
"""
Fetch new info for the following frames, which are dependent on artist:
Bio, News, Social Media, Images
"""
if update_playback:
self.update_current_playing()
self.playback_frame.set_display_title(self.current_playing, 10, 10)
self.musikki_artist = musikki.search(self.get_current_artist())
self.musikki_artist.get_full_bio(self.bio_nam)
self.musikki_artist.get_news(self.news_nam)
self.musikki_artist.get_social_media_twitter(self.social_nam)
self.images_frame.clear_images_list()
self.get_images()
def update_song_info(self, update_playback=True):
"""
Fetch new info for the following frames, which are dependent on song:
Lyrics
"""
if update_playback:
self.update_current_playing()
self.playback_frame.set_display_title(self.current_playing, 10, 10)
self.get_lyrics()
def update_album_info(self, update_playback=True):
"""
Fetch new info for the following frames, which are dependent on album:
Reviews: Pitchfork, Metacritic
"""
if update_playback:
self.update_current_playing()
self.playback_frame.set_display_title(self.current_playing, 10, 10)
self.get_pitchfork_review()
self.get_metacritic_review()
def update_current_playing(self):
"""
Update formatted playback, artist, song and album strings from Spotify.
"""
self.current_playing = self.get_current_playing()
self.current_artist = self.get_current_artist()
self.current_song = self.get_current_song()
self.current_album = self.get_current_album()
print('='*60, '\n\n-----Now Playing-----')
print('Artist:\t', self.current_artist)
print('Song:\t', self.current_song)
print('Album:\t', self.current_album, '\n')
def get_lyrics(self, url=''):
"""Make an async request to Genius.com for lyrics.
Args:
url (str) -- Either the url we know or the one returned in a 301 response.
"""
artist, song = self.format_unicode_alpha([self.current_artist, self.current_song])
print('Searching lyrics for: ', artist, ' - ', song)
if url == '':
url = "https://genius.com/%s-%s-lyrics" % (artist.replace(' ', '-'), song.replace(' ', '-'))
req = QtNetwork.QNetworkRequest(QtCore.QUrl(url))
self.lyrics_nam.get(req)
def set_lyrics(self, url='', lyrics_exist=True):
"""Make synchronous lyrics request, then set text in the lyrics frame.
Args:
url (str) -- URL to request lyrics if not using default URL
lyrics_exist (bool) -- Don't make request for lyrics if you know they don't exist.
"""
error = "Error: Could not find lyrics."
proxy = urllib.request.getproxies()
# remove punctuation and convert to English alphabet
artist, song = self.format_unicode_alpha([self.current_artist, self.current_song])
if lyrics_exist:
try:
if url == '':
url = "https://genius.com/%s-%s-lyrics"%(artist.replace(' ', '-'),song.replace(' ', '-'))
lyricspage = requests.get(url, proxies=proxy)
soup = BeautifulSoup(lyricspage.text, 'html.parser')
lyrics = soup.text.split(' Lyrics')[3].split('More on Genius')[0]
if artist.lower().replace(" ", "") not in soup.text.lower().replace(" ", ""):
lyrics = error
self.lyrics_frame.set_results(True)
except Exception:
lyrics = error
else:
lyrics = error
# set those lyrics on the frame
self.lyrics_frame.set_display_text(lyrics, 10, 45, 'lyrics_text')
def format_unicode_alpha(self, strings):
"""Removes punctuation and replaces non-English alphabet chars with closest equivalent.
Args:
strings (list:str) -- A list of strings or single string to be formatted
"""
formatted_strings = []
is_list = True
if isinstance(strings, str):
is_list = False
strings = [strings]
for s in strings:
s = unidecode.unidecode(s)
s = s.translate(str.maketrans('','',string.punctuation))
formatted_strings.append(s)
return (formatted_strings if is_list else formatted_strings[0])
def update_review_frame(self, review):
"""Reviews Handler.
Args:
review (str:object) -- Either Pitchfork formatted string or a metacritic.Review object
Review object consists of the following:
artist album date critic_rating critic_count user_rating user_count img_url
"""
# Pitchfork frame
if isinstance(review, str):
self.review_frame.set_results(True)
self.review_frame.set_display_text(review)
# Metacritic frame
elif isinstance(review, object):
default_image = QPixmap(os.path.dirname(__file__)+'/info-icon.png')
if not review.has_review:
self.metacritic_frame.default_metacritic_content(default_image)
else:
try:
album_image = urllib.request.urlopen(review.img_url).read()
except:
album_image = default_image
review.pixmap = album_image
self.metacritic_frame.add_metacritic_content(review)
print('\n-----Metacritic Results-----')
print(review.artist, ' - ', review.album)
print('Critic Score:\t', review.critic_rating, '\t(',review.critic_count,' reviews)')
print('User Score:\t', review.user_rating, '\t(',review.user_count,' reviews)\n')
def update_images_frame(self, images):
"""Images handler.
Args:
images (list) -- List of QPixmaps
"""
if len(images) > 0:
# add image scrolling buttons
self.images_frame.create_image_buttons()
self.images_frame.get_image_next_button().clicked.connect(self.next_image_handler)
self.images_frame.get_image_prev_button().clicked.connect(self.prev_image_handler)
self.images_frame.get_image_next_button().show()
self.images_frame.get_image_prev_button().show()
# add the flickr images
self.images_frame.add_flickr_artist_images(images)
def lyrics_handler(self, reply):
"""Lyrics handler.
Args:
reply (object) -- QNetworkReply
"""
er = reply.error()
if er == QtNetwork.QNetworkReply.NoError:
if reply.rawHeader(QByteArray(b'Status')) == '301 Moved Permanently':
qbyteurl = reply.rawHeader(QByteArray(b'Location'))
url = ''
for q in qbyteurl:
url += q
# parse the html for lyrics
self.set_lyrics(url)
elif reply.rawHeader(QByteArray(b'Status') != '200 OK'):
print('response not a 301 or 200. it is: ', reply.rawHeader(QByteArray(b'Status')))
else:
self.set_lyrics(url='', lyrics_exist=False)
def news_handler(self, reply):
"""News handler.
Args:
reply (object) -- QNetworkReply
"""
default_img = os.path.dirname(__file__) + '/info-icon.png'
results = {}
er = reply.error()
if er == QtNetwork.QNetworkReply.NoError:
response = reply.readAll()
document = QJsonDocument()
error = QJsonParseError()
document = document.fromJson(response, error)
json_resp = document.object()
if len(json_resp['summary'].toObject()['errors'].toArray()) == 0 \
and json_resp['summary'].toObject()['result_count'].toInt() > 0:
counter = 0
resultlist = []
for r in json_resp['results'].toArray():
if counter < 1:
r = r.toObject()
results['author'], name = [], ''
if r['author_info'] != '':
try:
if r['author_info'].toObject()['name'] != '':
name = r['author_info'].toObject()['name'].toString()
except:
name = ''
results['author'] = name
results['source'], avatar, title = [],'',''
if r['source'] != '':
if r['source'].toObject()['title'] != '':
results['src_title'] = r['source'].toObject()['title'].toString()
if r['source'].toObject()['avatar'].toString() != '':
avatar = r['source'].toObject()['avatar'].toString()
results['source'].extend([avatar, title])
results['date'], year, month, day = '','','',''
if r['publish_date'] != '':
try:
if str(r['publish_date'].toObject()['year'].toInt()) != '':
year = str(r['publish_date'].toObject()['year'].toInt())
if str(r['publish_date'].toObject()['month'].toInt()) != '':
month = str(r['publish_date'].toObject()['month'].toInt())
if str(r['publish_date'].toObject()['day'].toInt()) != '':
day = str(r['publish_date'].toObject()['day'].toInt())
except:
year, month, day = '0000', '00', '00'
results['date'] = year +'-'+ month +'-'+ day
results['mkid'] = ''
if str(r['mkid'].toInt()) != '':
results['mkid'] = str(r['mkid'].toInt())
results['title'] = ''
if r['title'].toString() != '':
results['title'] = r['title'].toString()
results['newsurl'] = ''
if r['url'].toString() != '':
results['newsurl'] = r['url'].toString()
results['summary'] = ''
if r['summary'].toString() != '':
results['summary'] = r['summary'].toString()
results['imgurl'] = ''
if r['image'].toString() != '':
results['imgurl'] = r['image'].toString()
try:
url = results['imgurl']
r = requests.get(url, stream=True)
filename = os.path.dirname(__file__)+'/images/'+results['title']+'.jpg'
with open(filename, 'wb') as fd:
for chunk in r.iter_content(chunk_size=128):
fd.write(chunk)
results['newsicon'] = QPixmap(filename)
except BaseException as e:
print(e)
results['newsicon'] = QPixmap(default_img)
else:
break
resultlist.append(results)
counter += 1
# end for
results['found'] = True
try:
results['newsicon'] = results['newsicon'] if results['newsicon'] else QPixmap(default_img)
except:
results['newsicon'] = QPixmap(default_img)
self.news_frame.add_news(results)
#end if
else:
print('No news found')
results['found'] = False
results['message'] = 'No news for this artist.'
self.news_frame.add_news(results, QPixmap(default_img))
else:
print('No news found')
results['found'] = False
results['message'] = 'No news for this artist.'
self.news_frame.add_news(results, QPixmap(default_img))
def search_bio_handler(self, reply):
"""Biography handler.
Args:
reply (object) -- QNetworkReply
"""
er = reply.error()
if er == QtNetwork.QNetworkReply.NoError:
response = reply.readAll()
document = QJsonDocument()
error = QJsonParseError()
document = document.fromJson(response, error)
json_resp = document.object()
bio = ''
for f in json_resp['full'].toArray():
f = f.toObject()
paragraph = ''
for i, t in enumerate(f['text'].toArray()):
t = t.toString()
paragraph += f['title'].toString()+'\n\n'+t.rstrip() if i==0 else (' '+t.rstrip())
bio += paragraph + '\n\n'
self.bio_frame.set_results(True)
self.bio_frame.set_display_text(bio, 10, 45, 'bio_text')
else:
self.bio_frame.set_display_text('No artist bio found.', 10, 45)
def musikki_images_handler(self, reply):
"""Musikki images handler.
Args:
reply (object) -- QNetworkReply
"""
urls, pixmaps, widths, heights = [], [], [], []
er = reply.error()
notfound_count = 0
if er == QtNetwork.QNetworkReply.NoError:
response = reply.readAll()
document = QJsonDocument()
error = QJsonParseError()
document = document.fromJson(response, error)
json_resp = document.object()
if len(json_resp['results'].toArray()) > 0:
f = json_resp['results'].toArray()
thumb = f[0].toObject()['thumbnails'].toArray()[0].toObject()
thumb_url = thumb['url'].toString()
thumb_width = thumb['width'].toInt()
thumb_height = thumb['height'].toInt()
try:
context = ssl._create_unverified_context()
data = urlopen(thumb_url, context=context).read()
pixmap = QPixmap()
pixmap.loadFromData(data)
pixmaps.append(pixmap)
except:
notfound_count += 1
urls.append(thumb_url)
widths.append(thumb_width)
heights.append(thumb_height)
if notfound_count > 0:
print(notfound_count, " 404 responses in image handler")
if len(pixmaps) > 0:
# load the biggest image as the first and only pixmap
biggest = 0
for i, p in enumerate(pixmaps):
if p.width() > biggest:
biggest = i
pixmaps[0] = pixmaps[biggest]
widths[0] = widths[biggest]
heights[0] = heights[biggest]
self.images_frame.add_musikki_artist_images(pixmaps, widths, heights)
def social_handler(self, reply):
"""Social handler.
Args:
reply (object) -- QNetworkReply
"""
er = reply.error()
if er == QtNetwork.QNetworkReply.NoError:
response = reply.readAll()
document = QJsonDocument()
error = QJsonParseError()
document = document.fromJson(response, error)
json_resp = document.object()
found = True
try:
service_name = json_resp['service_name'].toString()
except:
found = False
service_name = ''
try:
year = json_resp['timeline_posts'].toArray()[0].toObject()['date'].toObject()['year'].toInt()
month = json_resp['timeline_posts'].toArray()[0].toObject()['date'].toObject()['month'].toInt()
day = json_resp['timeline_posts'].toArray()[0].toObject()['date'].toObject()['day'].toInt()
except:
year, month, day = 0000, 00, 00
date = str(month) + '/' + str(day) + '/' + str(year)
try:
content = json_resp['timeline_posts'].toArray()[0].toObject()['content'].toString()
except:
content = ''
social_text = date + ' - via ' + service_name + '\n\n' + content
if found:
self.social_frame.set_display_text(social_text, 10, 45, 'social_text')
self.musikki_artist.twitter_search = False
elif not found:
self.social_frame.set_display_text('No social media found.', 10, 45)
self.musikki_artist.facebook_search = False
elif self.musikki_artist.facebook_search == False:
self.musikki_artist.get_social_media_facebook(self.social_nam)
else:
self.social_frame.set_display_text('No social media found.', 10, 45)
self.musikki_artist.facebook_search = False
def next_image_handler(self):
self.images_frame.next_image()
def prev_image_handler(self):
self.images_frame.prev_image()
def update_playback_display(self):
"""
Playback handler.
"""
if self.current_playing != self.get_current_playing():
if (self.current_artist == self.get_current_artist() and
self.current_song != self.get_current_song()):
if self.current_album != self.get_current_album():
print('Album change...')
self.update_album_info(update_playback=True)
self.update_song_info(update_playback=False)
else:
print('Song change...')
self.update_song_info(update_playback=True)
else:
print('Artist and song change...')
self.update_everything()
elif (self.current_artist == self.get_current_artist() and
self.current_album != self.get_current_album()):
print('Album changed but song & artist did not...')
self.update_album_info(update_playback=True)
self.update_song_info(update_playback=False)
def expand_bio(self):
if self.bio_frame.has_results():
self.build_popup(self.bio_frame)
else:
print('No bio results, so no bio popup')
def expand_lyrics(self):
if self.lyrics_frame.has_results():
self.build_popup(self.lyrics_frame)
else:
print('No lyrics results, so no lyrics popup')
def expand_review(self):
if self.review_frame.has_results():
self.build_popup(self.review_frame)
else:
print('No review results, so no review popup')
def build_popup(self, source_frame):
"""Build a SingleFrameWindow popup window.
Args:
source_frame (object) -- model.Frame is the content for the popup
"""
offset = 50
self.popup_window = view.SingleFrameWindow(self.screen_width, self.screen_height)
self.popup_window.init_popup(
self.window_x-offset, self.window_y-offset, source_frame.display_title, 'single_frame_window'
)
source_frame.create_popup(self.popup_window)
self.popup_window.add_frame(source_frame)
self.popup_window.show()
def open_spotify(self):
spotify = playback.Playback()
return spotify
def play_pause(self):
self.spotify.play_pause()
def next(self):
self.spotify.next()
def prev(self):
self.spotify.prev()
def pause(self):
self.spotify.pause()
def get_current_artist(self):
return self.spotify.get_current_artist()
def get_current_song(self):
return self.spotify.get_current_song()
def get_current_album(self):
return self.spotify.get_current_album()
def get_current_playing(self):
return self.get_current_artist() + ' - ' + self.get_current_song()
class Listener(QThread):
song_change = pyqtSignal()
"""Listener object that can run playback synchronization threads.
Args:
stored_song (str) -- formatted string (Artist - Song Title)
spotify (object) -- playback.Playback object which connects and talks to Spotify
"""
def __init__(self, stored_song, spotify):
super().__init__()
self.stored_song = stored_song.rstrip()
self.spotify = spotify
# start a synchronization thread that will close when app does
self.playback_sync_thread = Thread(target=self.sync_playback)
self.playback_sync_thread.setDaemon(True)
def run(self):
self.playback_sync_thread.start()
def sync_playback(self):
"""
Every 1 second, check the stored_song against what Spotify is currently playing.
"""
while True:
if self.stored_song != self.spotify.get_current_playing().rstrip():
self.song_change.emit()
self.stored_song = self.spotify.get_current_playing().rstrip()
sleep(1)
|
from .const import (
ATTR_ACCELERATION,
ATTR_ALARM,
ATTR_BATTERY,
ATTR_CARBON_MONOXIDE,
ATTR_CODE_CHANGED,
ATTR_CODE_LENGTH,
ATTR_COLOR_MODE,
ATTR_COLOR_NAME,
ATTR_COLOR_TEMP,
ATTR_CONTACT,
ATTR_CURRENT,
ATTR_DEVICE_ID,
ATTR_DOOR,
ATTR_DOUBLE_TAPPED,
ATTR_ENERGY,
ATTR_ENERGY_SOURCE,
ATTR_ENTRY_DELAY,
ATTR_EXIT_DELAY,
ATTR_HELD,
ATTR_HUE,
ATTR_HUMIDITY,
ATTR_ILLUMINANCE,
ATTR_LAST_CODE_NAME,
ATTR_LEVEL,
ATTR_LOCK,
ATTR_LOCK_CODES,
ATTR_MAX_CODES,
ATTR_MOTION,
ATTR_NAME,
ATTR_NUM_BUTTONS,
ATTR_POSITION,
ATTR_POWER,
ATTR_POWER_SOURCE,
ATTR_PRESENCE,
ATTR_PRESSURE,
ATTR_PUSHED,
ATTR_SATURATION,
ATTR_SECURITY_KEYPAD,
ATTR_SMOKE,
ATTR_SPEED,
ATTR_SWITCH,
ATTR_TEMPERATURE,
ATTR_UV,
ATTR_VALUE,
ATTR_VOLTAGE,
ATTR_WATER,
ATTR_WINDOW_SHADE,
CAP_ALARM,
CAP_COLOR_CONTROL,
CAP_COLOR_MODE,
CAP_COLOR_TEMP,
CAP_CONTACT_SENSOR,
CAP_DOOR_CONTROL,
CAP_DOUBLE_TAPABLE_BUTTON,
CAP_ENERGY_METER,
CAP_ENERGY_SOURCE,
CAP_FAN_CONTROL,
CAP_GARAGE_DOOR_CONTROL,
CAP_HOLDABLE_BUTTON,
CAP_ILLUMINANCE_MEASUREMENT,
CAP_LIGHT,
CAP_LOCK,
CAP_LOCK_CODES,
CAP_MOTION_SENSOR,
CAP_MUSIC_PLAYER,
CAP_POWER_METER,
CAP_POWER_SOURCE,
CAP_PRESENCE_SENSOR,
CAP_PRESSURE_MEASUREMENT,
CAP_PUSHABLE_BUTTON,
CAP_RELATIVE_HUMIDITY_MEASUREMENT,
CAP_SECURITY_KEYPAD,
CAP_SWITCH,
CAP_SWITCH_LEVEL,
CAP_TEMPERATURE_MEASUREMENT,
CAP_THERMOSTAT,
CAP_WINDOW_SHADE,
CMD_ARM_AWAY,
CMD_ARM_HOME,
CMD_ARM_NIGHT,
CMD_AUTO,
CMD_AWAY,
CMD_BOTH,
CMD_CLOSE,
CMD_COOL,
CMD_CYCLE_SPEED,
CMD_DELETE_CODE,
CMD_DISARM,
CMD_ECO,
CMD_EMERGENCY_HEAT,
CMD_FAN_AUTO,
CMD_FAN_CIRCULATE,
CMD_FAN_ON,
CMD_FLASH,
CMD_GET_CODES,
CMD_HEAT,
CMD_LOCK,
CMD_OFF,
CMD_ON,
CMD_OPEN,
CMD_PRESENT,
CMD_SET_CODE,
CMD_SET_CODE_LENGTH,
CMD_SET_COLOR,
CMD_SET_COLOR_TEMP,
CMD_SET_COOLING_SETPOINT,
CMD_SET_ENTRY_DELAY,
CMD_SET_EXIT_DELAY,
CMD_SET_FAN_MODE,
CMD_SET_HEATING_SETPOINT,
CMD_SET_HUE,
CMD_SET_LEVEL,
CMD_SET_POSITION,
CMD_SET_PRESENCE,
CMD_SET_SAT,
CMD_SET_SPEED,
CMD_SET_THERMOSTAT_MODE,
CMD_SIREN,
CMD_STROBE,
CMD_UNLOCK,
COLOR_MODE_CT,
COLOR_MODE_RGB,
DEFAULT_FAN_SPEEDS,
HSM_ARM_ALL,
HSM_ARM_AWAY,
HSM_ARM_HOME,
HSM_ARM_NIGHT,
HSM_ARM_RULES,
HSM_CANCEL_ALERTS,
HSM_DISARM,
HSM_DISARM_ALL,
HSM_DISARM_RULES,
HSM_STATUS_ALL_DISARMED,
HSM_STATUS_ARMED_AWAY,
HSM_STATUS_ARMED_HOME,
HSM_STATUS_ARMED_NIGHT,
HSM_STATUS_ARMING_AWAY,
HSM_STATUS_ARMING_HOME,
HSM_STATUS_ARMING_NIGHT,
HSM_STATUS_DISARMED,
ID_HSM_STATUS,
ID_MODE,
STATE_ARMED_AWAY,
STATE_ARMED_HOME,
STATE_ARMED_NIGHT,
STATE_CLOSED,
STATE_CLOSING,
STATE_DISARMED,
STATE_LOCKED,
STATE_LOW,
STATE_OFF,
STATE_ON,
STATE_OPEN,
STATE_OPENING,
STATE_PARTIALLY_OPEN,
STATE_UNKNOWN,
STATE_UNLOCKED,
STATE_UNLOCKED_WITH_TIMEOUT,
)
from .error import ConnectionError, InvalidConfig, InvalidToken, RequestError
from .hub import Hub
from .types import Attribute, Device, Event
__version__ = "0.5.7"
__all__ = [
"ATTR_ACCELERATION",
"ATTR_ALARM",
"ATTR_BATTERY",
"ATTR_CARBON_MONOXIDE",
"ATTR_CODE_CHANGED",
"ATTR_CODE_LENGTH",
"ATTR_COLOR_MODE",
"ATTR_COLOR_NAME",
"ATTR_COLOR_TEMP",
"ATTR_CONTACT",
"ATTR_CURRENT",
"ATTR_DEVICE_ID",
"ATTR_DOOR",
"ATTR_DOUBLE_TAPPED",
"ATTR_ENERGY",
"ATTR_ENERGY_SOURCE",
"ATTR_ENTRY_DELAY",
"ATTR_EXIT_DELAY",
"ATTR_HELD",
"ATTR_HUE",
"ATTR_HUMIDITY",
"ATTR_ILLUMINANCE",
"ATTR_LAST_CODE_NAME",
"ATTR_LEVEL",
"ATTR_LOCK",
"ATTR_LOCK_CODES",
"ATTR_MAX_CODES",
"ATTR_MOTION",
"ATTR_NAME",
"ATTR_NUM_BUTTONS",
"ATTR_POSITION",
"ATTR_POWER",
"ATTR_POWER_SOURCE",
"ATTR_PRESENCE",
"ATTR_PRESSURE",
"ATTR_PUSHED",
"ATTR_SATURATION",
"ATTR_SECURITY_KEYPAD",
"ATTR_SMOKE",
"ATTR_SPEED",
"ATTR_SWITCH",
"ATTR_TEMPERATURE",
"ATTR_UV",
"ATTR_VALUE",
"ATTR_VOLTAGE",
"ATTR_WATER",
"ATTR_WINDOW_SHADE",
"Attribute",
"CAP_ALARM",
"CAP_COLOR_CONTROL",
"CAP_COLOR_MODE",
"CAP_COLOR_TEMP",
"CAP_CONTACT_SENSOR",
"CAP_DOOR_CONTROL",
"CAP_DOUBLE_TAPABLE_BUTTON",
"CAP_ENERGY_METER",
"CAP_ENERGY_SOURCE",
"CAP_FAN_CONTROL",
"CAP_GARAGE_DOOR_CONTROL",
"CAP_HOLDABLE_BUTTON",
"CAP_ILLUMINANCE_MEASUREMENT",
"CAP_LIGHT",
"CAP_LOCK",
"CAP_LOCK_CODES",
"CAP_MOTION_SENSOR",
"CAP_MUSIC_PLAYER",
"CAP_POWER_METER",
"CAP_POWER_SOURCE",
"CAP_PRESENCE_SENSOR",
"CAP_PRESSURE_MEASUREMENT",
"CAP_PUSHABLE_BUTTON",
"CAP_RELATIVE_HUMIDITY_MEASUREMENT",
"CAP_SECURITY_KEYPAD",
"CAP_SWITCH",
"CAP_SWITCH_LEVEL",
"CAP_TEMPERATURE_MEASUREMENT",
"CAP_THERMOSTAT",
"CAP_WINDOW_SHADE",
"CMD_ARM_AWAY",
"CMD_ARM_HOME",
"CMD_ARM_NIGHT",
"CMD_AUTO",
"CMD_AWAY",
"CMD_BOTH",
"CMD_CLOSE",
"CMD_COOL",
"CMD_CYCLE_SPEED",
"CMD_DELETE_CODE",
"CMD_DISARM",
"CMD_ECO",
"CMD_EMERGENCY_HEAT",
"CMD_FAN_AUTO",
"CMD_FAN_CIRCULATE",
"CMD_FAN_ON",
"CMD_FLASH",
"CMD_GET_CODES",
"CMD_HEAT",
"CMD_LOCK",
"CMD_OFF",
"CMD_ON",
"CMD_OPEN",
"CMD_PRESENT",
"CMD_SET_CODE",
"CMD_SET_CODE_LENGTH",
"CMD_SET_COLOR",
"CMD_SET_COLOR_TEMP",
"CMD_SET_COOLING_SETPOINT",
"CMD_SET_ENTRY_DELAY",
"CMD_SET_EXIT_DELAY",
"CMD_SET_FAN_MODE",
"CMD_SET_HEATING_SETPOINT",
"CMD_SET_HUE",
"CMD_SET_LEVEL",
"CMD_SET_POSITION",
"CMD_SET_PRESENCE",
"CMD_SET_SAT",
"CMD_SET_SPEED",
"CMD_SET_THERMOSTAT_MODE",
"CMD_SIREN",
"CMD_STROBE",
"CMD_UNLOCK",
"COLOR_MODE_CT",
"COLOR_MODE_RGB",
"ConnectionError",
"DEFAULT_FAN_SPEEDS",
"Device",
"Event",
"HSM_ARM_ALL",
"HSM_ARM_AWAY",
"HSM_ARM_HOME",
"HSM_ARM_NIGHT",
"HSM_ARM_RULES",
"HSM_CANCEL_ALERTS",
"HSM_DISARM",
"HSM_DISARM_ALL",
"HSM_DISARM_RULES",
"HSM_STATUS_ALL_DISARMED",
"HSM_STATUS_ARMED_AWAY",
"HSM_STATUS_ARMED_HOME",
"HSM_STATUS_ARMED_NIGHT",
"HSM_STATUS_ARMING_AWAY",
"HSM_STATUS_ARMING_HOME",
"HSM_STATUS_ARMING_NIGHT",
"HSM_STATUS_DISARMED",
"Hub",
"ID_HSM_STATUS",
"ID_MODE",
"InvalidConfig",
"InvalidToken",
"RequestError",
"STATE_ARMED_AWAY",
"STATE_ARMED_HOME",
"STATE_ARMED_NIGHT",
"STATE_CLOSED",
"STATE_CLOSING",
"STATE_DISARMED",
"STATE_LOCKED",
"STATE_LOW",
"STATE_OFF",
"STATE_ON",
"STATE_OPEN",
"STATE_OPENING",
"STATE_PARTIALLY_OPEN",
"STATE_UNKNOWN",
"STATE_UNLOCKED",
"STATE_UNLOCKED_WITH_TIMEOUT",
]
|
"""Themes for plotnine."""
import plotnine as p9
import endktheme.colors
import endktheme.style
def theme_energinet() -> p9.themes.theme:
"""Create a simple Energinet theme."""
return p9.theme(
text=p9.element_text(family=endktheme.style.font_family()),
axis_line=p9.element_line(color="black"),
plot_background=p9.element_blank(),
panel_background=p9.element_rect(fill="white"),
legend_background=p9.element_rect(fill="white"),
legend_key=p9.element_blank(),
panel_grid=p9.element_blank(),
axis_ticks=p9.element_blank(),
)
def scale_color_energinet(**kwargs) -> p9.scale_color_manual:
"""Create a color scale."""
return p9.scale_color_manual(values=endktheme.colors.excel(), **kwargs)
def scale_fill_energinet(**kwargs) -> p9.scale_fill_manual:
"""Create a fill scale."""
return p9.scale_fill_manual(values=endktheme.colors.excel(), **kwargs)
def scale_fill_gradient_energinet(
low: int = 0, high: int = 2, **kwargs
) -> p9.scale_fill_gradient:
"""
Create a two-point fill gradient.
Parameters:
low (int): Index of low color.
high (int): Index of high color.
"""
pal = endktheme.colors.excel()
return p9.scale_fill_gradient(low=pal[low], high=pal[high], **kwargs)
def scale_color_gradient_energinet(
low: int = 0, high: int = 2, **kwargs
) -> p9.scale_color_gradient:
"""
Create a two-point color gradient.
Parameters:
low (int): Index of low color.
high (int): Index of high color.
"""
pal = endktheme.colors.excel()
return p9.scale_color_gradient(low=pal[low], high=pal[high], **kwargs)
def scale_fill_gradient2_energinet(
low: int = 0, mid: int = 1, high: int = 2, **kwargs
) -> p9.scale_fill_gradient2:
"""
Create a three-point fill gradient.
Parameters:
low (int): Index of low color.
mid (int): Index of middle color.
high (int): Index of high color.
"""
pal = endktheme.colors.excel()
return p9.scale_fill_gradient2(
low=pal[low], mid=pal[mid], high=pal[high], **kwargs
)
def scale_color_gradient2_energinet(
low: int = 0, mid: int = 1, high: int = 2, **kwargs
) -> p9.scale_color_gradient2:
"""
Create a three-point color gradient.
Parameters:
low (int): Index of low color.
mid (int): Index of middle color.
high (int): Index of high color.
"""
pal = endktheme.colors.excel()
return p9.scale_color_gradient2(
low=pal[low], mid=pal[mid], high=pal[high], **kwargs
)
|
"""
test_Plant_FinanceSE_gradients.py
Created by Katherine Dykes on 2014-01-07.
Copyright (c) NREL. All rights reserved.
"""
import unittest
import numpy as np
from commonse.utilities import check_gradient_unit_test
from plant_financese.basic_finance.basic_finance import fin_cst_component, fin_cst_assembly
from plant_financese.nrel_csm_fin.nrel_csm_fin import fin_csm_component, fin_csm_assembly
# Basic Finance Tests
class Test_fin_cst_assembly(unittest.TestCase):
def setUp(self):
self.fin = fin_cst_assembly()
self.fin.turbine_cost = 6087803.555 / 50
self.fin.turbine_number = 50
preventative_maintenance_cost = 401819.023
land_lease_cost = 22225.395
corrective_maintenance_cost = 91048.387
self.fin.avg_annual_opex = preventative_maintenance_cost + corrective_maintenance_cost + land_lease_cost
self.fin.bos_costs = 7668775.3
self.fin.net_aep = 15756299.843
def test_functionality(self):
self.fin.run()
self.assertEqual(round(self.fin.coe,4), 0.1307)
class Test_fin_cst_component(unittest.TestCase):
def setUp(self):
self.fin = fin_cst_component()
self.fin.turbine_cost = 6087803.555 / 50
self.fin.turbine_number = 50
preventative_maintenance_cost = 401819.023
land_lease_cost = 22225.395
corrective_maintenance_cost = 91048.387
self.fin.avg_annual_opex = preventative_maintenance_cost + corrective_maintenance_cost + land_lease_cost
self.fin.bos_costs = 7668775.3
self.fin.net_aep = 15756299.843
def test_functionality(self):
self.fin.run()
self.assertEqual(round(self.fin.coe,4), 0.1307)
def test_gradient(self):
check_gradient_unit_test(self, self.fin)
# NREL CSM Finance Tests
class Test_fin_csm_assembly(unittest.TestCase):
def setUp(self):
self.fin = fin_csm_assembly()
self.fin.turbine_cost = 6087803.555 / 50
self.fin.turbine_number = 50
preventative_opex = 401819.023
lease_opex = 22225.395
corrective_opex = 91048.387
self.fin.avg_annual_opex = preventative_opex + corrective_opex + lease_opex
self.fin.bos_costs = 7668775.3
self.fin.net_aep = 15756299.843
def test_functionality(self):
self.fin.run()
self.assertEqual(round(self.fin.coe,4), 0.1307)
self.assertEqual(round(self.fin.lcoe,4), 0.1231)
class Test_fin_csm_component(unittest.TestCase):
def setUp(self):
self.fin = fin_csm_component()
self.fin.turbine_cost = 6087803.555 / 50
self.fin.turbine_number = 50
preventative_opex = 401819.023
lease_opex = 22225.395
corrective_opex = 91048.387
self.fin.avg_annual_opex = preventative_opex + corrective_opex + lease_opex
self.fin.bos_costs = 7668775.3
self.fin.net_aep = 15756299.843
def test_functionality(self):
self.fin.run()
self.assertEqual(round(self.fin.coe,4), 0.1307)
self.assertEqual(round(self.fin.lcoe,4), 0.1231)
def test_gradient(self):
check_gradient_unit_test(self, self.fin)
if __name__ == "__main__":
unittest.main()
|
#-*- encoding:utf-8 -*-
__author__ = 'Nobody'
str = raw_input()
len = len(str)
str = list(str)
for i in range(0, len):
j = i+1
for k in range(j, len):
if(str[i] == str[k]):
str[k] = "\0"
print "".join(str)
|
# DEFAULT
import tkinter
from tkinter import messagebox
class Frame:
def __init__(self):
# create the main window
self.root = tkinter.Tk()
self.root.title="Greet the User!!!"
self.root.geometry("425x100")
self.root.resizable(0,0)
# add a new label
self.label1 = tkinter.Label(self.root,text='Enter your name:')
self.label1.place(x=20, y=20, in_=self.root)
# add a textbox
self.textbox1 = tkinter.Text(self.root, width=32,height=1)
self.textbox1.place(x=120,y=20, in_=self.root)
# add a button
self.button1 = tkinter.Button(self.root,text="Click Me!", command=self.onclick_button1)
self.button1.place(x=200,y=55, in_=self.root)
def mainloop(self):
# run the main loop and process events
self.root.mainloop()
def onclick_button1(self):
# action process, when the button1 "Click Me!" button is pressed
txt = self.textbox1.get("1.0","end-1c")
txt1 = txt + ", Welcome to Python"
messagebox.showinfo("Hello Message",txt1)
class FrameApplication:
def __init__(self):
f = Frame()
f.mainloop()
if __name__=="__main__":
FrameApplication() |
import flask
from flask import Blueprint, session, redirect, url_for, escape, request, abort, g
import flask_login;
from flask.ext.cors import cross_origin
from flask.ext.login import login_required, current_user
import urllib2
import lxml.html
from . import database
import urlparse
import json
from marrow_config import config
import dateutil.parser
bone_blueprint = Blueprint('bone', __name__)
useragent = [
('User-agent',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36')]
for handler in [urllib2.HTTPSHandler,urllib2.HTTPHandler]:
handler = handler()
opener = urllib2.build_opener(handler)
opener.addheaders = useragent
urllib2.install_opener(opener)
import functools
def as_json(f):
@functools.wraps(f)
def _inner(*a, **k):
res = f(*a, **k)
res = (json.dumps(res), 200, {'Content-Type': 'application/json'})
return res
return _inner
@bone_blueprint.route('/link/<linkid>', methods=['GET','POST','DELETE'])
@login_required
def delete_link(linkid):
db = database.get_db()
linkid = int(linkid)
result = ''
with db.cursor() as cur:
if request.method == 'GET':
cur.execute('SELECT id,url,title,posted FROM links WHERE id=%s', (linkid,))
nid,url,title,posted = cur.fetchone()
result = dict(id=nid,url=url,title=title,posted=posted.isoformat())
elif request.method == 'POST':
result = False
if 'username' in session:
cur.execute('SELECT subscribe_link(%s,%s)', (current_user.id,linkid))
result = cur.fetchone()[0]
elif request.method == 'DELETE':
result = False
if 'username' in session:
cur.execute('SELECT delete_link(%s,%s)', (current_user.id,linkid))
result = cur.fetchone()[0]
db.commit()
return json.dumps(result)
def clean_url(url):
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url, 'http')
if path and not netloc:
netloc, path = path, netloc
return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
def get_siteinfo(url):
return config.titlegetter.get_title(url)
@bone_blueprint.route('/vote/total')
@login_required
@as_json
def vote_link_total():
url = request.args['url'].encode('utf-8')
db = database.get_db()
result = dict(success=False, votes=None)
with db.cursor() as cur:
cur.callproc('total_votes', (url,))
dbresult, = cur.fetchone()
if dbresult is not None:
result['success'] = True
result['votes'] = dbresult
return result
def vote_link_common(vote):
obj = request.get_json()
url = obj['url']
db = database.get_db()
result = dict(success=False, votes=None, myvote=None)
url = url.encode('utf-8')
with db.cursor() as cur:
cur.callproc('vote_link', (url, current_user.id, vote))
dbresult = cur.fetchone()
print(dbresult, "<--- dbresult")
if dbresult is not None:
myvote, total = dbresult
result['success'] = True
result['myVote'] = myvote
result['votes'] = total
db.commit()
else:
db.rollback();
print 'done!'
print result, '<-- result'
return (json.dumps(result), 200, {'Content-Type': 'application/json'})
@bone_blueprint.route('/vote/zero', methods=['POST'])
@login_required
def vote_link_zero():
return vote_link_common(0)
@bone_blueprint.route('/vote/down', methods=['POST'])
@login_required
def vote_link_down():
return vote_link_common(-1)
@bone_blueprint.route('/vote/up', methods=['POST'])
@login_required
def vote_link_up():
return vote_link_common(1)
@bone_blueprint.route('/add', methods=['POST'])
@bone_blueprint.route('/submit', methods=['POST'])
@cross_origin(allow_headers='Content-Type')
def submit_link():
result = dict(success=False, id={});
username = None
db = database.get_db()
obj = request.get_json()
if 'username' in obj:
_username = obj['username']
_username = _username.lower().strip()
if 'ak' in obj and database.check_ak(db, _username, obj['ak']):
username = _username
else:
abort(401);
return
elif 'username' in session:
username = session['username'] # Note that we need to figger out a better way to do the extension
# auth before we can change this to Flask-Login
if username is not None:
url, title = obj['url'],obj['title']
url = clean_url(url)
title, url = get_siteinfo(url) # this makes sure that the url is the site's preferred URL
# TODO: this might need sanity checks . . . like make sure same site?
with db.cursor() as cur:
cur.callproc('put_link', (username, url, title))
## This returns (link_id, user_id)
res = cur.fetchone()
if cur.rowcount > 0:
db.commit()
result['success'] = True
result['id'] = res[0]
else:
db.rollback()
return json.dumps(result), 200, {'Content-Type':'application/json'}
@bone_blueprint.route('', methods=['GET'])
@login_required
def default_data():
print current_user.id
print 'username' in session
result = data(current_user.id)
return result
@bone_blueprint.route('/u/<username>', methods=['GET'])
def data(username):
sectionTitle = username
result = {'marrow':[], 'sectionTitle': sectionTitle}
with database.get_db().cursor() as cur:
cur_username = 'anonymous'
if current_user.is_authenticated:
cur_username = current_user.id
cur.execute("SELECT url, title, posted, linkid, votes, has_user_shared(%s, url) from get_bone(%s);",
(cur_username, username,))
result['marrow'] = [
dict(id=linkid, url=url,title=title,posted=posted.isoformat(),votes=votes,shared=shared)
for url,title,posted,linkid,votes,shared
in cur.fetchall()
]
return json.dumps(result)
# TODO: rethink variable names here
@bone_blueprint.route('/unsubscribe', methods=['POST'])
@login_required
def unsubscribe():
data = request.get_json()
result = False
if 'username' in session:
fro_user = current_user.id
to_user = data['from']
db = database.get_db()
with db.cursor() as cur:
cur.callproc('unsubscribe', (fro_user,to_user))
db.commit()
result = True
return json.dumps(result)
@bone_blueprint.route('/subscribe', methods=['POST'])
@login_required
def subscribe():
data = request.get_json()
result = False
if 'username' in session:
fro_user = current_user.id
to_user = data['to']
db = database.get_db()
with db.cursor() as cur:
cur.callproc('subscribe', (fro_user,to_user))
db.commit()
result = True
return json.dumps(result);
@bone_blueprint.route('/subscriptions', defaults={'before':None, 'count': None})
@bone_blueprint.route('/subscriptions/<before>', defaults={'count': None})
@bone_blueprint.route('/subscriptions/count/<int:count>', defaults={'before': None})
@login_required
@cross_origin(allow_headers='Content-Type')
def subscriptions(before, count):
result = {'marrow':[], 'sectionTitle': 'Subscriptions'}
username = None
db = database.get_db()
with db: # Start transaction to make sure that the ak really is deleted.
# Otherwise, a malicious attacker could sniff the ak and reuse
# it.
if 'username' in request.args:
username = request.args['username']
if 'ak' not in request.args: username = None
elif not database.check_ak(db, username, request.args['ak']): username = None
if username is None and 'username' in session:
username = current_user.id
if username is not None:
with db.cursor() as cur:
if count is None or count > 200: count = 50 # 50 results or up to 200 results
args = (username,count)
if before is not None:
before = dateutil.parser.parse(before)
args = args + (before,)
cur.callproc("get_bones", args)
result['marrow'] = [
dict(id=id,poster=poster, url=url,title=title,posted=posted.isoformat(), votes=votes,
myVote=myvote, shared=shared)
for id,url,title,posted,poster,votes,myvote,shared
in cur.fetchall()
]
return (json.dumps(result), 200, {'Content-Type': 'application/json'})
import random
@bone_blueprint.route('/random')
@login_required
def random():
db = database.get_db()
with db.cursor() as cur:
if 'username' in session:
exclude = [current_user.id]
if 'last' in request.args:
exclude.append(request.args['last'])
cur.execute(
'SELECT name FROM users WHERE name NOT IN %s ORDER BY random() LIMIT 1',
(tuple(exclude),)
)
else:
cur.execute('SELECT name FROM users ORDER BY random() LIMIT 1')
username = cur.fetchone()[0]
return redirect(url_for('bone.data', username=username))
|
# -*- coding: utf-8 -*-
from qcloudsdkcore.request import Request
class CreateLVBChannelRequest(Request):
def __init__(self):
super(CreateLVBChannelRequest, self).__init__(
'live', 'qcloudcliV1', 'CreateLVBChannel', 'live.api.qcloud.com')
def get_channelDescribe(self):
return self.get_params().get('channelDescribe')
def set_channelDescribe(self, channelDescribe):
self.add_param('channelDescribe', channelDescribe)
def get_channelName(self):
return self.get_params().get('channelName')
def set_channelName(self, channelName):
self.add_param('channelName', channelName)
def get_outputRate(self):
return self.get_params().get('outputRate')
def set_outputRate(self, outputRate):
self.add_param('outputRate', outputRate)
def get_outputSourceType(self):
return self.get_params().get('outputSourceType')
def set_outputSourceType(self, outputSourceType):
self.add_param('outputSourceType', outputSourceType)
def get_playerPassword(self):
return self.get_params().get('playerPassword')
def set_playerPassword(self, playerPassword):
self.add_param('playerPassword', playerPassword)
def get_sourceList(self):
return self.get_params().get('sourceList')
def set_sourceList(self, sourceList):
self.add_param('sourceList', sourceList)
def get_watermarkId(self):
return self.get_params().get('watermarkId')
def set_watermarkId(self, watermarkId):
self.add_param('watermarkId', watermarkId)
|
from __future__ import division, print_function, absolute_import
__author__ = 'Alex Rogozhnikov'
# Dirty hack so things are importable in tests without installing
import sys
sys.path.insert(0, '..')
|
from datetime import datetime
from typing import Any
from typing import Callable
from typing import Dict
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import Type
import warnings
from optuna import TrialPruned
from optuna._imports import try_import
from optuna.distributions import BaseDistribution
from optuna.distributions import CategoricalChoiceType
from optuna.storages import InMemoryStorage
from optuna.storages import RDBStorage
from optuna.study import Study
from optuna.trial import BaseTrial
from optuna.trial import Trial
with try_import() as _imports:
from chainermn.communicators.communicator_base import CommunicatorBase # NOQA
class _ChainerMNObjectiveFunc(object):
"""A wrapper of an objective function to incorporate Optuna with ChainerMN.
Note that this class is not supposed to be used by library users.
Args:
func:
A callable that implements objective function.
comm:
A `ChainerMN communicator <https://docs.chainer.org/en/stable/chainermn/reference/
index.html#communicators>`_.
"""
def __init__(
self,
func: Callable[["ChainerMNTrial", "CommunicatorBase"], float],
comm: "CommunicatorBase",
) -> None:
self.comm = comm
self.objective = func
def __call__(self, trial: Trial) -> float:
self.comm.mpi_comm.bcast(True)
return self.objective(ChainerMNTrial(trial, self.comm), self.comm)
class ChainerMNStudy(object):
"""A wrapper of :class:`~optuna.study.Study` to incorporate Optuna with ChainerMN.
.. seealso::
:class:`~optuna.integration.chainermn.ChainerMNStudy` provides the same interface as
:class:`~optuna.study.Study`. Please refer to :class:`optuna.study.Study` for further
details.
See `the example <https://github.com/optuna/optuna/blob/master/
examples/pruning/chainermn_integration.py>`__
if you want to optimize an objective function that trains neural network
written with ChainerMN.
Args:
study:
A :class:`~optuna.study.Study` object.
comm:
A `ChainerMN communicator <https://docs.chainer.org/en/stable/chainermn/reference/
index.html#communicators>`_.
"""
def __init__(self, study: Study, comm: "CommunicatorBase") -> None:
_imports.check()
if isinstance(study._storage, InMemoryStorage):
raise ValueError("ChainerMN integration is not available with InMemoryStorage.")
if isinstance(study._storage, RDBStorage):
if study._storage.engine.dialect.name == "sqlite":
warnings.warn(
"SQLite may cause synchronization problems when used with "
"ChainerMN integration. Please use other DBs like PostgreSQL."
)
study_names = comm.mpi_comm.allgather(study.study_name)
if len(set(study_names)) != 1:
raise ValueError("Please make sure an identical study name is shared among workers.")
super(ChainerMNStudy, self).__setattr__("delegate", study)
super(ChainerMNStudy, self).__setattr__("comm", comm)
def optimize(
self,
func: Callable[["ChainerMNTrial", "CommunicatorBase"], float],
n_trials: Optional[int] = None,
timeout: Optional[float] = None,
catch: Tuple[Type[Exception], ...] = (),
) -> None:
"""Optimize an objective function.
This method provides the same interface as :func:`optuna.study.Study.optimize` except
the absence of ``n_jobs`` argument.
"""
if self.comm.rank == 0:
func_mn = _ChainerMNObjectiveFunc(func, self.comm)
try:
self.delegate.optimize(func_mn, n_trials=n_trials, timeout=timeout, catch=catch)
finally:
self.comm.mpi_comm.bcast(False)
else:
has_next_trial = self.comm.mpi_comm.bcast(None)
while True:
if not has_next_trial:
break
try:
func(ChainerMNTrial(None, self.comm), self.comm)
# We assume that if a node raises an exception,
# all other nodes will do the same.
#
# The responsibility to handle acceptable exceptions (i.e., `TrialPruned` and
# `catch`) is in the rank-0 node, so other nodes simply ignore them.
except TrialPruned:
pass
except catch:
pass
finally:
has_next_trial = self.comm.mpi_comm.bcast(None)
def __getattr__(self, attr_name: str) -> Any:
return getattr(self.delegate, attr_name)
def __setattr__(self, attr_name: str, value: Any) -> None:
setattr(self.delegate, attr_name, value)
class ChainerMNTrial(BaseTrial):
"""A wrapper of :class:`~optuna.trial.Trial` to incorporate Optuna with ChainerMN.
.. seealso::
:class:`~optuna.integration.chainermn.ChainerMNTrial` provides the same interface as
:class:`~optuna.trial.Trial`. Please refer to :class:`optuna.trial.Trial` for further
details.
Args:
trial:
A :class:`~optuna.trial.Trial` object if the caller is rank0 worker,
:obj:`None` otherwise.
comm:
A `ChainerMN communicator <https://docs.chainer.org/en/stable/chainermn/reference/
index.html#communicators>`_.
"""
def __init__(self, trial: Optional[Trial], comm: "CommunicatorBase") -> None:
self.delegate = trial
self.comm = comm
def suggest_float(
self,
name: str,
low: float,
high: float,
*,
step: Optional[float] = None,
log: bool = False
) -> float:
def func() -> float:
assert self.delegate is not None
return self.delegate.suggest_float(name, low, high, log=log, step=step)
return self._call_with_mpi(func)
def suggest_uniform(self, name: str, low: float, high: float) -> float:
def func() -> float:
assert self.delegate is not None
return self.delegate.suggest_uniform(name, low, high)
return self._call_with_mpi(func)
def suggest_loguniform(self, name: str, low: float, high: float) -> float:
def func() -> float:
assert self.delegate is not None
return self.delegate.suggest_loguniform(name, low, high)
return self._call_with_mpi(func)
def suggest_discrete_uniform(self, name: str, low: float, high: float, q: float) -> float:
def func() -> float:
assert self.delegate is not None
return self.delegate.suggest_discrete_uniform(name, low, high, q)
return self._call_with_mpi(func)
def suggest_int(self, name: str, low: int, high: int, step: int = 1, log: bool = False) -> int:
def func() -> int:
assert self.delegate is not None
return self.delegate.suggest_int(name, low, high, step=step, log=log)
return self._call_with_mpi(func)
def suggest_categorical(self, name: str, choices: Sequence[CategoricalChoiceType]) -> Any:
def func() -> CategoricalChoiceType:
assert self.delegate is not None
return self.delegate.suggest_categorical(name, choices)
return self._call_with_mpi(func)
def report(self, value: float, step: int) -> None:
if self.comm.rank == 0:
assert self.delegate is not None
self.delegate.report(value, step)
self.comm.mpi_comm.barrier()
def should_prune(self) -> bool:
def func() -> bool:
assert self.delegate is not None
return self.delegate.should_prune()
return self._call_with_mpi(func)
def set_user_attr(self, key: str, value: Any) -> None:
if self.comm.rank == 0:
assert self.delegate is not None
self.delegate.set_user_attr(key, value)
self.comm.mpi_comm.barrier()
def set_system_attr(self, key: str, value: Any) -> None:
if self.comm.rank == 0:
assert self.delegate is not None
self.delegate.set_system_attr(key, value)
self.comm.mpi_comm.barrier()
@property
def number(self) -> int:
def func() -> int:
assert self.delegate is not None
return self.delegate.number
return self._call_with_mpi(func)
@property
def _trial_id(self) -> int:
def func() -> int:
assert self.delegate is not None
return self.delegate._trial_id
return self._call_with_mpi(func)
@property
def params(self) -> Dict[str, Any]:
def func() -> Dict[str, Any]:
assert self.delegate is not None
return self.delegate.params
return self._call_with_mpi(func)
@property
def distributions(self) -> Dict[str, BaseDistribution]:
def func() -> Dict[str, BaseDistribution]:
assert self.delegate is not None
return self.delegate.distributions
return self._call_with_mpi(func)
@property
def user_attrs(self) -> Dict[str, Any]:
def func() -> Dict[str, Any]:
assert self.delegate is not None
return self.delegate.user_attrs
return self._call_with_mpi(func)
@property
def system_attrs(self) -> Dict[str, Any]:
def func() -> Dict[str, Any]:
assert self.delegate is not None
return self.delegate.system_attrs
return self._call_with_mpi(func)
@property
def datetime_start(self) -> Optional[datetime]:
def func() -> Optional[datetime]:
assert self.delegate is not None
return self.delegate.datetime_start
return self._call_with_mpi(func)
def _call_with_mpi(self, func: Callable) -> Any:
if self.comm.rank == 0:
try:
result = func()
self.comm.mpi_comm.bcast(result)
return result
except Exception as e:
self.comm.mpi_comm.bcast(e)
raise
else:
result = self.comm.mpi_comm.bcast(None)
if isinstance(result, Exception):
raise result
return result
|
from .appwindow import AppWindow
|
##########################################################################
#
# Copyright (c) 2020 Tom Cowland. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of Tom Cowland nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
import GafferAstro
import IECore
import imath
import functools
import weakref
Gaffer.Metadata.registerNode(
GafferAstro.MultiMonoImageReader,
"description",
"""
""",
plugs = {
"fileName" : [
"description",
"""
""",
"plugValueWidget:type", "GafferUI.FileSystemPathPlugValueWidget",
"path:leaf", True,
"path:bookmarks", "xisf fits",
"fileSystemPath:extensions", "xisf fits",
"fileSystemPath:extensionsLabel", "Show only image files",
"fileSystemPath:includeSequences", False,
"layout:index", 0
],
"resize" : [
"layout:index", 1,
"layout:divider", True
]
}
)
def __addRow( weakNode, name = None, filenameToken = None, extension = None ) :
if weakNode() is None :
return
rowsPlug = weakNode()["rows"]
with Gaffer.UndoScope( rowsPlug.ancestor( Gaffer.ScriptNode ) ) :
row = rowsPlug.addRow()
if name :
row["name"].setValue( name )
if filenameToken :
row["cells"]["filenameToken"]["value"].setValue( filenameToken )
else :
row["cells"]["filenameToken"].enabledPlug().setValue( False )
if extension :
row["cells"]["extension"]["value"].setValue( extension )
else :
row["cells"]["extension"].enabledPlug().setValue( False )
def __existingRows( rowsPlug ) :
rows = []
defaultToken = rowsPlug.defaultRow()["cells"]["filenameToken"]["value"].getValue()
defaultExtension = rowsPlug.defaultRow()["cells"]["extension"]["value"].getValue()
for r in rowsPlug.children() :
if r.isSame( rowsPlug.defaultRow() ) :
continue
token = r["cells"]["filenameToken"]["value"].getValue() if r["cells"]["filenameToken"].enabledPlug().getValue() else defaultToken
token = token or r["name"]
extension = r["cells"]["extension"]["value"].getValue() if r["cells"]["extension"].enabledPlug().getValue() else defaultExtension
rows.append( ( token, extension ) )
return rows
def __addRowButtonMenuDefinition( menuDefinition, widget ) :
node = widget.getPlug().node()
if not isinstance( node, GafferAstro.MultiMonoImageReader ) :
return
weakNode = weakref.ref( node )
menuDefinition.append( "/Empty", { "command" : functools.partial( __addRow, weakNode ) } )
with widget.ancestor( GafferUI.NodeEditor ).getContext() :
template = node["fileName"].getValue()
defaultExtension = node["rows"].defaultRow()["cells"]["extension"]["value"].getValue()
existingRows = __existingRows( node["rows"] )
if not template :
return
baseDir, _ = GafferAstro.FileAlgo.splitPathTemplate( template )
matches = GafferAstro.FileAlgo.filesMatchingTemplate( template )
if not matches :
return
menuDefinition.append( "/EmptyDivider", { "divider" : True, "label" : "File Matches" } )
for m in matches :
token = m[1].get( "token", None )
extension = m[1].get( "extension", None )
menuDefinition.append( "/%s" % ( m[0][len(baseDir):] ), {
"command" : functools.partial(
__addRow, weakNode,
token,
token,
extension if extension != defaultExtension else None
),
"active" : ( token, extension ) not in existingRows
} )
GafferUI.SpreadsheetUI.addRowButtonMenuSignal().connect( __addRowButtonMenuDefinition, scoped = False )
|
#Import the OS module "Step#1"
import os
#Module for reading the Csv "Step#2"
import csv
# Declaring Variables "Step#6"
total_votes = 0
khan_votes = 0
correy_votes = 0
li_votes = 0
otooley_votes = 0
pypoll= os.path.join("PyPoll/election_data.csv")
print(pypoll)
# Open the CSV
with open(pypoll) as csvfile:
#Syntax error urg
# CSV reader specifies delimiter and variable that holds contents "Step#3"
csvreader = csv.reader(csvfile, delimiter=',')
print(csvreader)
# Read the header row first (skip this step if there is now header) "Step#4"
csv_header = next(csvreader)
print(f"CSV Header: {csv_header}")
# Read each row of data after the header "Step#5"
for row in csvreader:
print(row)
# Calculate Total Number Of Votes Cast"Step#7"
total_votes += 1
# Calculate Total Number Of Votes Each Candidate Received. "Step#8"
if (row[2] == "Khan"):
khan_votes += 1
elif (row[2] == "Correy"):
correy_votes += 1
elif (row[2] == "Li"):
li_votes += 1
else:
otooley_votes += 1
# Calculate Percentage Of Votes Each Candidate Won "Step#9"
kahn_percent = khan_votes / total_votes
correy_percent = correy_votes / total_votes
li_percent = li_votes / total_votes
otooley_percent = otooley_votes / total_votes
# Calculate Winner Of The Election Based On Popular Vote "Step#10"
winner = max(khan_votes, correy_votes, li_votes, otooley_votes)
if winner == khan_votes:
winner_name = "Khan"
elif winner == correy_votes:
winner_name = "Correy"
elif winner == li_votes:
winner_name = "Li"
else:
winner_name = "O'Tooley"
# Print Analysis "Step#11"
print(f"Election Results")
print(f"---------------------------")
print(f"Total Votes: {total_votes}")
print(f"---------------------------")
print(f"Kahn: {kahn_percent:.3%}({khan_votes})")
print(f"Correy: {correy_percent:.3%}({correy_votes})")
print(f"Li: {li_percent:.3%}({li_votes})")
print(f"O'Tooley: {otooley_percent:.3%}({otooley_votes})")
print(f"---------------------------")
print(f"Winner: {winner_name}")
print(f"---------------------------")
#convert to text_file "Final Step#12"
output_file = os.path.join("PyPoll/pypoll.txt")
# Open File Using "Write" Mode. Specify The Variable To Hold The Contents
with open(output_file, 'w',) as txtfile:
# Write New Data
txtfile.write(f"Election Results\n")
txtfile.write(f"---------------------------\n")
txtfile.write(f"Total Votes: {total_votes}\n")
txtfile.write(f"---------------------------\n")
txtfile.write(f"Kahn: {kahn_percent:.3%}({khan_votes})\n")
txtfile.write(f"Correy: {correy_percent:.3%}({correy_votes})\n")
txtfile.write(f"Li: {li_percent:.3%}({li_votes})\n")
txtfile.write(f"O'Tooley: {otooley_percent:.3%}({otooley_votes})\n")
txtfile.write(f"---------------------------\n")
txtfile.write(f"Winner: {winner_name}\n")
txtfile.write(f"---------------------------\n")
|
""" Cisco_IOS_XR_sysadmin_asic_errors_ael
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class AsicErrors(Entity):
"""
.. attribute:: device_name (key)
**type**\: str
.. attribute:: instance
**type**\: list of :py:class:`Instance <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance>`
.. attribute:: show_all_instances
**type**\: :py:class:`ShowAllInstances <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors, self).__init__()
self._top_entity = None
self.yang_name = "asic-errors"
self.yang_parent_name = "Cisco-IOS-XR-sysadmin-asic-errors-ael"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = ['device_name']
self._child_container_classes = OrderedDict([("show-all-instances", ("show_all_instances", AsicErrors.ShowAllInstances))])
self._child_list_classes = OrderedDict([("instance", ("instance", AsicErrors.Instance))])
self._leafs = OrderedDict([
('device_name', YLeaf(YType.str, 'device-name')),
])
self.device_name = None
self.show_all_instances = AsicErrors.ShowAllInstances()
self.show_all_instances.parent = self
self._children_name_map["show_all_instances"] = "show-all-instances"
self._children_yang_names.add("show-all-instances")
self.instance = YList(self)
self._segment_path = lambda: "Cisco-IOS-XR-sysadmin-asic-errors-ael:asic-errors" + "[device-name='" + str(self.device_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors, ['device_name'], name, value)
class Instance(Entity):
"""
.. attribute:: instance_num (key)
**type**\: int
**range:** 0..4294967295
.. attribute:: sbe
**type**\: :py:class:`Sbe <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Sbe>`
.. attribute:: mbe
**type**\: :py:class:`Mbe <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Mbe>`
.. attribute:: parity
**type**\: :py:class:`Parity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Parity>`
.. attribute:: generic
**type**\: :py:class:`Generic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Generic>`
.. attribute:: crc
**type**\: :py:class:`Crc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Crc>`
.. attribute:: reset
**type**\: :py:class:`Reset <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Reset>`
.. attribute:: barrier
**type**\: :py:class:`Barrier <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Barrier>`
.. attribute:: unexpected
**type**\: :py:class:`Unexpected <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Unexpected>`
.. attribute:: link
**type**\: :py:class:`Link <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Link>`
.. attribute:: oor_thresh
**type**\: :py:class:`OorThresh <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.OorThresh>`
.. attribute:: bp
**type**\: :py:class:`Bp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Bp>`
.. attribute:: io
**type**\: :py:class:`Io <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Io>`
.. attribute:: ucode
**type**\: :py:class:`Ucode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Ucode>`
.. attribute:: config
**type**\: :py:class:`Config <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Config>`
.. attribute:: indirect
**type**\: :py:class:`Indirect <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Indirect>`
.. attribute:: nonerr
**type**\: :py:class:`Nonerr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Nonerr>`
.. attribute:: summary
**type**\: :py:class:`Summary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Summary>`
.. attribute:: all
**type**\: :py:class:`All <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.All>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance, self).__init__()
self.yang_name = "instance"
self.yang_parent_name = "asic-errors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['instance_num']
self._child_container_classes = OrderedDict([("sbe", ("sbe", AsicErrors.Instance.Sbe)), ("mbe", ("mbe", AsicErrors.Instance.Mbe)), ("parity", ("parity", AsicErrors.Instance.Parity)), ("generic", ("generic", AsicErrors.Instance.Generic)), ("crc", ("crc", AsicErrors.Instance.Crc)), ("reset", ("reset", AsicErrors.Instance.Reset)), ("barrier", ("barrier", AsicErrors.Instance.Barrier)), ("unexpected", ("unexpected", AsicErrors.Instance.Unexpected)), ("link", ("link", AsicErrors.Instance.Link)), ("oor-thresh", ("oor_thresh", AsicErrors.Instance.OorThresh)), ("bp", ("bp", AsicErrors.Instance.Bp)), ("io", ("io", AsicErrors.Instance.Io)), ("ucode", ("ucode", AsicErrors.Instance.Ucode)), ("config", ("config", AsicErrors.Instance.Config)), ("indirect", ("indirect", AsicErrors.Instance.Indirect)), ("nonerr", ("nonerr", AsicErrors.Instance.Nonerr)), ("summary", ("summary", AsicErrors.Instance.Summary)), ("all", ("all", AsicErrors.Instance.All))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('instance_num', YLeaf(YType.uint32, 'instance-num')),
])
self.instance_num = None
self.sbe = AsicErrors.Instance.Sbe()
self.sbe.parent = self
self._children_name_map["sbe"] = "sbe"
self._children_yang_names.add("sbe")
self.mbe = AsicErrors.Instance.Mbe()
self.mbe.parent = self
self._children_name_map["mbe"] = "mbe"
self._children_yang_names.add("mbe")
self.parity = AsicErrors.Instance.Parity()
self.parity.parent = self
self._children_name_map["parity"] = "parity"
self._children_yang_names.add("parity")
self.generic = AsicErrors.Instance.Generic()
self.generic.parent = self
self._children_name_map["generic"] = "generic"
self._children_yang_names.add("generic")
self.crc = AsicErrors.Instance.Crc()
self.crc.parent = self
self._children_name_map["crc"] = "crc"
self._children_yang_names.add("crc")
self.reset = AsicErrors.Instance.Reset()
self.reset.parent = self
self._children_name_map["reset"] = "reset"
self._children_yang_names.add("reset")
self.barrier = AsicErrors.Instance.Barrier()
self.barrier.parent = self
self._children_name_map["barrier"] = "barrier"
self._children_yang_names.add("barrier")
self.unexpected = AsicErrors.Instance.Unexpected()
self.unexpected.parent = self
self._children_name_map["unexpected"] = "unexpected"
self._children_yang_names.add("unexpected")
self.link = AsicErrors.Instance.Link()
self.link.parent = self
self._children_name_map["link"] = "link"
self._children_yang_names.add("link")
self.oor_thresh = AsicErrors.Instance.OorThresh()
self.oor_thresh.parent = self
self._children_name_map["oor_thresh"] = "oor-thresh"
self._children_yang_names.add("oor-thresh")
self.bp = AsicErrors.Instance.Bp()
self.bp.parent = self
self._children_name_map["bp"] = "bp"
self._children_yang_names.add("bp")
self.io = AsicErrors.Instance.Io()
self.io.parent = self
self._children_name_map["io"] = "io"
self._children_yang_names.add("io")
self.ucode = AsicErrors.Instance.Ucode()
self.ucode.parent = self
self._children_name_map["ucode"] = "ucode"
self._children_yang_names.add("ucode")
self.config = AsicErrors.Instance.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.indirect = AsicErrors.Instance.Indirect()
self.indirect.parent = self
self._children_name_map["indirect"] = "indirect"
self._children_yang_names.add("indirect")
self.nonerr = AsicErrors.Instance.Nonerr()
self.nonerr.parent = self
self._children_name_map["nonerr"] = "nonerr"
self._children_yang_names.add("nonerr")
self.summary = AsicErrors.Instance.Summary()
self.summary.parent = self
self._children_name_map["summary"] = "summary"
self._children_yang_names.add("summary")
self.all = AsicErrors.Instance.All()
self.all.parent = self
self._children_name_map["all"] = "all"
self._children_yang_names.add("all")
self._segment_path = lambda: "instance" + "[instance-num='" + str(self.instance_num) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance, ['instance_num'], name, value)
class Sbe(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Sbe.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Sbe, self).__init__()
self.yang_name = "sbe"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Sbe.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "sbe"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Sbe, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Sbe.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Sbe.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "sbe"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Sbe.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Sbe.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Sbe.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Sbe.Location.LogLst, ['log_line'], name, value)
class Mbe(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Mbe.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Mbe, self).__init__()
self.yang_name = "mbe"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Mbe.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "mbe"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Mbe, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Mbe.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Mbe.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "mbe"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Mbe.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Mbe.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Mbe.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Mbe.Location.LogLst, ['log_line'], name, value)
class Parity(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Parity.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Parity, self).__init__()
self.yang_name = "parity"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Parity.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "parity"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Parity, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Parity.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Parity.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "parity"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Parity.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Parity.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Parity.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Parity.Location.LogLst, ['log_line'], name, value)
class Generic(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Generic.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Generic, self).__init__()
self.yang_name = "generic"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Generic.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "generic"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Generic, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Generic.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Generic.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "generic"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Generic.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Generic.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Generic.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Generic.Location.LogLst, ['log_line'], name, value)
class Crc(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Crc.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Crc, self).__init__()
self.yang_name = "crc"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Crc.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "crc"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Crc, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Crc.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Crc.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "crc"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Crc.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Crc.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Crc.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Crc.Location.LogLst, ['log_line'], name, value)
class Reset(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Reset.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Reset, self).__init__()
self.yang_name = "reset"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Reset.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "reset"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Reset, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Reset.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Reset.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "reset"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Reset.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Reset.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Reset.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Reset.Location.LogLst, ['log_line'], name, value)
class Barrier(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Barrier.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Barrier, self).__init__()
self.yang_name = "barrier"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Barrier.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "barrier"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Barrier, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Barrier.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Barrier.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "barrier"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Barrier.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Barrier.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Barrier.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Barrier.Location.LogLst, ['log_line'], name, value)
class Unexpected(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Unexpected.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Unexpected, self).__init__()
self.yang_name = "unexpected"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Unexpected.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "unexpected"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Unexpected, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Unexpected.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Unexpected.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "unexpected"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Unexpected.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Unexpected.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Unexpected.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Unexpected.Location.LogLst, ['log_line'], name, value)
class Link(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Link.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Link, self).__init__()
self.yang_name = "link"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Link.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "link"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Link, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Link.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Link.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Link.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Link.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Link.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Link.Location.LogLst, ['log_line'], name, value)
class OorThresh(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.OorThresh.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.OorThresh, self).__init__()
self.yang_name = "oor-thresh"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.OorThresh.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "oor-thresh"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.OorThresh, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.OorThresh.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.OorThresh.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "oor-thresh"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.OorThresh.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.OorThresh.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.OorThresh.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.OorThresh.Location.LogLst, ['log_line'], name, value)
class Bp(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Bp.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Bp, self).__init__()
self.yang_name = "bp"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Bp.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "bp"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Bp, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Bp.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Bp.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "bp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Bp.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Bp.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Bp.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Bp.Location.LogLst, ['log_line'], name, value)
class Io(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Io.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Io, self).__init__()
self.yang_name = "io"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Io.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "io"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Io, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Io.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Io.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "io"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Io.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Io.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Io.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Io.Location.LogLst, ['log_line'], name, value)
class Ucode(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Ucode.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Ucode, self).__init__()
self.yang_name = "ucode"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Ucode.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "ucode"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Ucode, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Ucode.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Ucode.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "ucode"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Ucode.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Ucode.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Ucode.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Ucode.Location.LogLst, ['log_line'], name, value)
class Config(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Config.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Config.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Config, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Config.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Config.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "config"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Config.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Config.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Config.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Config.Location.LogLst, ['log_line'], name, value)
class Indirect(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Indirect.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Indirect, self).__init__()
self.yang_name = "indirect"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Indirect.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "indirect"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Indirect, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Indirect.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Indirect.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "indirect"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Indirect.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Indirect.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Indirect.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Indirect.Location.LogLst, ['log_line'], name, value)
class Nonerr(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Nonerr.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Nonerr, self).__init__()
self.yang_name = "nonerr"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Nonerr.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "nonerr"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Nonerr, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Nonerr.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Nonerr.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "nonerr"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Nonerr.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Nonerr.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Nonerr.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Nonerr.Location.LogLst, ['log_line'], name, value)
class Summary(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Summary.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Summary, self).__init__()
self.yang_name = "summary"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.Summary.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "summary"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Summary, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.Summary.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Summary.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "summary"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.Summary.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Summary.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.Summary.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.Summary.Location.LogLst, ['log_line'], name, value)
class All(Entity):
"""
.. attribute:: history
**type**\: :py:class:`History <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.All.History>`
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.All.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.All, self).__init__()
self.yang_name = "all"
self.yang_parent_name = "instance"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("history", ("history", AsicErrors.Instance.All.History))])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.All.Location))])
self._leafs = OrderedDict()
self.history = AsicErrors.Instance.All.History()
self.history.parent = self
self._children_name_map["history"] = "history"
self._children_yang_names.add("history")
self.location = YList(self)
self._segment_path = lambda: "all"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.All, [], name, value)
class History(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.All.History.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.All.History, self).__init__()
self.yang_name = "history"
self.yang_parent_name = "all"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.Instance.All.History.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "history"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.All.History, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.All.History.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.All.History.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "history"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.All.History.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.All.History.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.All.History.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.All.History.Location.LogLst, ['log_line'], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.Instance.All.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.All.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "all"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.Instance.All.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.All.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.Instance.All.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.Instance.All.Location.LogLst, ['log_line'], name, value)
class ShowAllInstances(Entity):
"""
.. attribute:: sbe
**type**\: :py:class:`Sbe <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Sbe>`
.. attribute:: mbe
**type**\: :py:class:`Mbe <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Mbe>`
.. attribute:: parity
**type**\: :py:class:`Parity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Parity>`
.. attribute:: generic
**type**\: :py:class:`Generic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Generic>`
.. attribute:: crc
**type**\: :py:class:`Crc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Crc>`
.. attribute:: reset
**type**\: :py:class:`Reset <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Reset>`
.. attribute:: barrier
**type**\: :py:class:`Barrier <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Barrier>`
.. attribute:: unexpected
**type**\: :py:class:`Unexpected <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Unexpected>`
.. attribute:: link
**type**\: :py:class:`Link <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Link>`
.. attribute:: oor_thresh
**type**\: :py:class:`OorThresh <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.OorThresh>`
.. attribute:: bp
**type**\: :py:class:`Bp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Bp>`
.. attribute:: io
**type**\: :py:class:`Io <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Io>`
.. attribute:: ucode
**type**\: :py:class:`Ucode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Ucode>`
.. attribute:: config
**type**\: :py:class:`Config <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Config>`
.. attribute:: indirect
**type**\: :py:class:`Indirect <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Indirect>`
.. attribute:: nonerr
**type**\: :py:class:`Nonerr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Nonerr>`
.. attribute:: summary
**type**\: :py:class:`Summary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Summary>`
.. attribute:: all
**type**\: :py:class:`All <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.All>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances, self).__init__()
self.yang_name = "show-all-instances"
self.yang_parent_name = "asic-errors"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("sbe", ("sbe", AsicErrors.ShowAllInstances.Sbe)), ("mbe", ("mbe", AsicErrors.ShowAllInstances.Mbe)), ("parity", ("parity", AsicErrors.ShowAllInstances.Parity)), ("generic", ("generic", AsicErrors.ShowAllInstances.Generic)), ("crc", ("crc", AsicErrors.ShowAllInstances.Crc)), ("reset", ("reset", AsicErrors.ShowAllInstances.Reset)), ("barrier", ("barrier", AsicErrors.ShowAllInstances.Barrier)), ("unexpected", ("unexpected", AsicErrors.ShowAllInstances.Unexpected)), ("link", ("link", AsicErrors.ShowAllInstances.Link)), ("oor-thresh", ("oor_thresh", AsicErrors.ShowAllInstances.OorThresh)), ("bp", ("bp", AsicErrors.ShowAllInstances.Bp)), ("io", ("io", AsicErrors.ShowAllInstances.Io)), ("ucode", ("ucode", AsicErrors.ShowAllInstances.Ucode)), ("config", ("config", AsicErrors.ShowAllInstances.Config)), ("indirect", ("indirect", AsicErrors.ShowAllInstances.Indirect)), ("nonerr", ("nonerr", AsicErrors.ShowAllInstances.Nonerr)), ("summary", ("summary", AsicErrors.ShowAllInstances.Summary)), ("all", ("all", AsicErrors.ShowAllInstances.All))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.sbe = AsicErrors.ShowAllInstances.Sbe()
self.sbe.parent = self
self._children_name_map["sbe"] = "sbe"
self._children_yang_names.add("sbe")
self.mbe = AsicErrors.ShowAllInstances.Mbe()
self.mbe.parent = self
self._children_name_map["mbe"] = "mbe"
self._children_yang_names.add("mbe")
self.parity = AsicErrors.ShowAllInstances.Parity()
self.parity.parent = self
self._children_name_map["parity"] = "parity"
self._children_yang_names.add("parity")
self.generic = AsicErrors.ShowAllInstances.Generic()
self.generic.parent = self
self._children_name_map["generic"] = "generic"
self._children_yang_names.add("generic")
self.crc = AsicErrors.ShowAllInstances.Crc()
self.crc.parent = self
self._children_name_map["crc"] = "crc"
self._children_yang_names.add("crc")
self.reset = AsicErrors.ShowAllInstances.Reset()
self.reset.parent = self
self._children_name_map["reset"] = "reset"
self._children_yang_names.add("reset")
self.barrier = AsicErrors.ShowAllInstances.Barrier()
self.barrier.parent = self
self._children_name_map["barrier"] = "barrier"
self._children_yang_names.add("barrier")
self.unexpected = AsicErrors.ShowAllInstances.Unexpected()
self.unexpected.parent = self
self._children_name_map["unexpected"] = "unexpected"
self._children_yang_names.add("unexpected")
self.link = AsicErrors.ShowAllInstances.Link()
self.link.parent = self
self._children_name_map["link"] = "link"
self._children_yang_names.add("link")
self.oor_thresh = AsicErrors.ShowAllInstances.OorThresh()
self.oor_thresh.parent = self
self._children_name_map["oor_thresh"] = "oor-thresh"
self._children_yang_names.add("oor-thresh")
self.bp = AsicErrors.ShowAllInstances.Bp()
self.bp.parent = self
self._children_name_map["bp"] = "bp"
self._children_yang_names.add("bp")
self.io = AsicErrors.ShowAllInstances.Io()
self.io.parent = self
self._children_name_map["io"] = "io"
self._children_yang_names.add("io")
self.ucode = AsicErrors.ShowAllInstances.Ucode()
self.ucode.parent = self
self._children_name_map["ucode"] = "ucode"
self._children_yang_names.add("ucode")
self.config = AsicErrors.ShowAllInstances.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self._children_yang_names.add("config")
self.indirect = AsicErrors.ShowAllInstances.Indirect()
self.indirect.parent = self
self._children_name_map["indirect"] = "indirect"
self._children_yang_names.add("indirect")
self.nonerr = AsicErrors.ShowAllInstances.Nonerr()
self.nonerr.parent = self
self._children_name_map["nonerr"] = "nonerr"
self._children_yang_names.add("nonerr")
self.summary = AsicErrors.ShowAllInstances.Summary()
self.summary.parent = self
self._children_name_map["summary"] = "summary"
self._children_yang_names.add("summary")
self.all = AsicErrors.ShowAllInstances.All()
self.all.parent = self
self._children_name_map["all"] = "all"
self._children_yang_names.add("all")
self._segment_path = lambda: "show-all-instances"
class Sbe(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Sbe.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Sbe, self).__init__()
self.yang_name = "sbe"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Sbe.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "sbe"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Sbe, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Sbe.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Sbe.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "sbe"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Sbe.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Sbe.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Sbe.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Sbe.Location.LogLst, ['log_line'], name, value)
class Mbe(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Mbe.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Mbe, self).__init__()
self.yang_name = "mbe"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Mbe.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "mbe"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Mbe, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Mbe.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Mbe.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "mbe"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Mbe.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Mbe.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Mbe.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Mbe.Location.LogLst, ['log_line'], name, value)
class Parity(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Parity.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Parity, self).__init__()
self.yang_name = "parity"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Parity.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "parity"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Parity, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Parity.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Parity.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "parity"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Parity.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Parity.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Parity.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Parity.Location.LogLst, ['log_line'], name, value)
class Generic(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Generic.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Generic, self).__init__()
self.yang_name = "generic"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Generic.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "generic"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Generic, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Generic.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Generic.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "generic"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Generic.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Generic.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Generic.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Generic.Location.LogLst, ['log_line'], name, value)
class Crc(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Crc.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Crc, self).__init__()
self.yang_name = "crc"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Crc.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "crc"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Crc, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Crc.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Crc.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "crc"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Crc.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Crc.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Crc.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Crc.Location.LogLst, ['log_line'], name, value)
class Reset(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Reset.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Reset, self).__init__()
self.yang_name = "reset"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Reset.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "reset"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Reset, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Reset.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Reset.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "reset"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Reset.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Reset.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Reset.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Reset.Location.LogLst, ['log_line'], name, value)
class Barrier(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Barrier.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Barrier, self).__init__()
self.yang_name = "barrier"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Barrier.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "barrier"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Barrier, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Barrier.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Barrier.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "barrier"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Barrier.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Barrier.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Barrier.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Barrier.Location.LogLst, ['log_line'], name, value)
class Unexpected(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Unexpected.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Unexpected, self).__init__()
self.yang_name = "unexpected"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Unexpected.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "unexpected"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Unexpected, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Unexpected.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Unexpected.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "unexpected"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Unexpected.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Unexpected.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Unexpected.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Unexpected.Location.LogLst, ['log_line'], name, value)
class Link(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Link.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Link, self).__init__()
self.yang_name = "link"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Link.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "link"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Link, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Link.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Link.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Link.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Link.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Link.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Link.Location.LogLst, ['log_line'], name, value)
class OorThresh(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.OorThresh.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.OorThresh, self).__init__()
self.yang_name = "oor-thresh"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.OorThresh.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "oor-thresh"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.OorThresh, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.OorThresh.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.OorThresh.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "oor-thresh"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.OorThresh.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.OorThresh.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.OorThresh.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.OorThresh.Location.LogLst, ['log_line'], name, value)
class Bp(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Bp.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Bp, self).__init__()
self.yang_name = "bp"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Bp.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "bp"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Bp, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Bp.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Bp.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "bp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Bp.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Bp.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Bp.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Bp.Location.LogLst, ['log_line'], name, value)
class Io(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Io.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Io, self).__init__()
self.yang_name = "io"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Io.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "io"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Io, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Io.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Io.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "io"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Io.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Io.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Io.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Io.Location.LogLst, ['log_line'], name, value)
class Ucode(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Ucode.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Ucode, self).__init__()
self.yang_name = "ucode"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Ucode.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "ucode"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Ucode, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Ucode.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Ucode.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "ucode"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Ucode.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Ucode.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Ucode.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Ucode.Location.LogLst, ['log_line'], name, value)
class Config(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Config.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Config.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "config"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Config, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Config.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Config.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "config"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Config.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Config.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Config.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Config.Location.LogLst, ['log_line'], name, value)
class Indirect(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Indirect.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Indirect, self).__init__()
self.yang_name = "indirect"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Indirect.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "indirect"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Indirect, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Indirect.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Indirect.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "indirect"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Indirect.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Indirect.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Indirect.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Indirect.Location.LogLst, ['log_line'], name, value)
class Nonerr(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Nonerr.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Nonerr, self).__init__()
self.yang_name = "nonerr"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Nonerr.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "nonerr"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Nonerr, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Nonerr.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Nonerr.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "nonerr"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Nonerr.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Nonerr.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Nonerr.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Nonerr.Location.LogLst, ['log_line'], name, value)
class Summary(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Summary.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Summary, self).__init__()
self.yang_name = "summary"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.Summary.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "summary"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Summary, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.Summary.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Summary.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "summary"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.Summary.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Summary.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.Summary.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.Summary.Location.LogLst, ['log_line'], name, value)
class All(Entity):
"""
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.All.Location>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.All, self).__init__()
self.yang_name = "all"
self.yang_parent_name = "show-all-instances"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("location", ("location", AsicErrors.ShowAllInstances.All.Location))])
self._leafs = OrderedDict()
self.location = YList(self)
self._segment_path = lambda: "all"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.All, [], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
**pattern:** ((([fF][0\-3])/(([a\-zA\-Z]){2}\\d{1,2}))\|((0?[0\-9]\|1[1\-5])/((([a\-zA\-Z]){2,3})?\\d{1,2})))(/[cC][pP][uU]0)?
.. attribute:: log_lst
**type**\: list of :py:class:`LogLst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_asic_errors_ael.AsicErrors.ShowAllInstances.All.Location.LogLst>`
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.All.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "all"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("log-lst", ("log_lst", AsicErrors.ShowAllInstances.All.Location.LogLst))])
self._leafs = OrderedDict([
('location_name', YLeaf(YType.str, 'location-name')),
])
self.location_name = None
self.log_lst = YList(self)
self._segment_path = lambda: "location" + "[location-name='" + str(self.location_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.All.Location, ['location_name'], name, value)
class LogLst(Entity):
"""
.. attribute:: log_line
**type**\: str
"""
_prefix = 'ael'
_revision = '2017-07-05'
def __init__(self):
super(AsicErrors.ShowAllInstances.All.Location.LogLst, self).__init__()
self.yang_name = "log-lst"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('log_line', YLeaf(YType.str, 'log-line')),
])
self.log_line = None
self._segment_path = lambda: "log-lst"
def __setattr__(self, name, value):
self._perform_setattr(AsicErrors.ShowAllInstances.All.Location.LogLst, ['log_line'], name, value)
def clone_ptr(self):
self._top_entity = AsicErrors()
return self._top_entity
|
import psi4
import numpy
psi4.core.set_output_file('ccsdrun.out', False)
psi4.set_memory('24 GB')
mol = psi4.geometry("""
units ang
symmetry c1
#FLAGXYZ
""")
mol.fix_com(True)
mol.fix_orientation(True)
psi4.core.set_num_threads(NUMTHREADS)
psi4.set_options({'basis': 'cc-pvdz'})
psi4.set_options({'maxiter': 500})
psi4.set_options({'cachelevel': CCCACHELEVEL})
psi4.set_options({'freeze_core': 'true'})
psi4.set_options({'reference': 'rhf'})
# --- HF calculation --- #
E, wf = psi4.energy('scf', molecule=mol, return_wfn=True)
# save the some matrixes
numpy.save("D-HF", wf.Da().to_array(False, True))
numpy.save("F-HF", wf.Fa().to_array(False, True))
numpy.save("C-HF", wf.Ca().to_array(False, True))
numpy.save("H-HF", wf.H().to_array(False, True))
numpy.save("e-HF", wf.epsilon_a().to_array(False, True))
props, ccwf = psi4.properties('CCSD', molecule=mol, properties=['MULLIKEN_CHARGES'], return_wfn=True, ref_wfn=wf)
# save the some matrixes
numpy.save("D-CCSD", ccwf.Da().to_array(False, True))
|
import re
import datetime
import jwt
from flask import request
from main import app
from main import (
maximum_age,
minimum_age,
)
from models import User, db
def validate_phonenumber_and_email_in_db(email, phonenumber):
email_exists = db.session.query(User).filter_by(email=email).count()
phonenumber_exists = db.session.query(User).filter_by(phonenumber=phonenumber).count()
if email_exists or phonenumber_exists:
if email_exists and phonenumber_exists:
return 'email-in-db;phonenumber-in-db'
elif email_exists:
return 'email-in-db'
else:
return 'phonenumber-in-db'
else:
return 0
def validate_email(email):
if (re.search(r'\b^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}\b',
email)):
return 0
return 'email'
def validate_phonenumber(phonenumber):
if(re.search(r"(\)|)\d{2}(\)|)(\s|)\d{5}(-|\s|)\d{4}", phonenumber)):
return 0
return 'phonenumber'
def validate_date(date):
try:
date = date.strip().split('-')
date = datetime.datetime(day= int(date[0]),
month= int(date[1]),
year= int(date[2]))
except IndexError:
return 'date'
except ValueError:
return 'date'
if not (
datetime.datetime(day= maximum_age[0],
month= maximum_age[1],
year=maximum_age[2])
< date
< datetime.datetime(day=minimum_age[0],
month= minimum_age[1],
year= minimum_age[2])):
return 'date'
return 0
def validate_username(username):
if username.isdigit() or (len(username) < 1):
return 'username'
return 0
def validate_registration(json_user):
keys = [
'username',
'birthday',
'phonenumber',
'email',
'password'
]
invalid_fields = []
answer = ''
#validate keys
for i in keys:
if i not in json_user.keys():
return [f'This key is missing {i}']
for i in json_user.keys():
json_user[i] = json_user[i].strip()
#validate email sintaxe
answer = validate_email(json_user['email'])
if answer:
invalid_fields.append(answer)
#validate_phonenumber sintaxe
answer = validate_phonenumber(json_user['phonenumber'])
if answer:
invalid_fields.append(answer)
#check phonenumber and email in db
answer = validate_phonenumber_and_email_in_db(
json_user['email'],
json_user['phonenumber']
)
if answer:
for i in answer.split(';'):
invalid_fields.append(i)
#validate date
answer = validate_date(json_user['birthday'])
if answer:
invalid_fields.append(answer)
#validate username
answer = validate_username(json_user['username'])
if answer:
invalid_fields.append(answer)
#check invalid fields
if len(invalid_fields):
return invalid_fields
return 0
|
import pytest
from brownie import ETH_ADDRESS, chain
from brownie_tokens import MintableForkToken
pytestmark = pytest.mark.usefixtures("add_synths")
@pytest.mark.parametrize("btc_idx", range(2))
def test_btc_to_eth(
Settler, alice, bob, swap, sBTC, sETH, curve_sbtc, curve_seth, btc_idx
):
initial = MintableForkToken(curve_sbtc.coins(btc_idx))
amount = 5 * 10 ** initial.decimals()
initial._mint_for_testing(alice, amount)
initial.approve(swap, 2 ** 256 - 1, {"from": alice})
alice.transfer(bob, alice.balance())
tx = swap.swap_into_synth(initial, sETH, amount, 0, {"from": alice})
token_id = tx.events["Transfer"][-1]["token_id"]
chain.mine(timedelta=600)
amount = swap.token_info(token_id)["underlying_balance"]
swap.swap_from_synth(token_id, ETH_ADDRESS, amount, 0, {"from": alice})
settler = Settler.at(hex(token_id))
for coin in (initial, sETH, sBTC):
assert coin.balanceOf(swap) == 0
assert coin.balanceOf(settler) == 0
assert coin.balanceOf(alice) == 0
assert swap.balance() == 0
assert settler.balance() == 0
assert alice.balance() > 0
assert swap.balanceOf(alice) == 0
@pytest.mark.parametrize("btc_idx", range(2))
def test_eth_to_btc(Settler, alice, swap, sBTC, sETH, curve_sbtc, curve_seth, btc_idx):
final = MintableForkToken(curve_sbtc.coins(btc_idx))
tx = swap.swap_into_synth(
ETH_ADDRESS, sBTC, alice.balance(), 0, {"from": alice, "value": alice.balance()}
)
token_id = tx.events["Transfer"][-1]["token_id"]
chain.mine(timedelta=600)
amount = swap.token_info(token_id)["underlying_balance"]
swap.swap_from_synth(token_id, final, amount, 0, {"from": alice})
settler = Settler.at(hex(token_id))
for coin in (sETH, sBTC):
assert coin.balanceOf(swap) == 0
assert coin.balanceOf(settler) == 0
assert coin.balanceOf(alice) == 0
assert swap.balance() == 0
assert settler.balance() == 0
assert alice.balance() == 0
assert final.balanceOf(swap) == 0
assert final.balanceOf(settler) == 0
assert final.balanceOf(alice) > 0
assert swap.balanceOf(alice) == 0
|
"""Constants for the Salus iT600 smart devices."""
# Temperature units
TEMP_CELSIUS = "°C"
# Supported features
SUPPORT_TARGET_TEMPERATURE = 1
SUPPORT_PRESET_MODE = 16
# HVAC modes
HVAC_MODE_OFF = "off"
HVAC_MODE_HEAT = "heat"
# HVAC states
CURRENT_HVAC_OFF = "off"
CURRENT_HVAC_HEAT = "heating"
CURRENT_HVAC_IDLE = "idle"
# Supported presets
PRESET_FOLLOW_SCHEDULE = "Follow Schedule"
PRESET_PERMANENT_HOLD = "Permanent Hold"
PRESET_OFF = "Off"
|
import dash
import dash_core_components as dcc
import dash_html_components as html
#import plotly.graph_objects as go
#from numpy import random
from graphFromText.graphByEquals import graphByEquals2, graphByEqualsFxn1, graphByEquals3, graphByEquals4
from graphFromText.graphEquations import graphEquations, graphEquationsWithFxn
# This is a dash application created by Pascal Nespeca on 9/9/2020
# created in order to help visualize systems of equations. It's initial
# intent is to facilitate tracking dependencies of Ordinary Differential
# Equations (ODE). The Python packages Plotly and Dash are used to publish the
# parsed equations.
#
# Portions of this code are borrowed from Jiahui Hwang's project
# https://github.com/jhwang1992/network-visualization
# Most notably, it borrows from the idea that the visualization
# of a graph data structure consists of nodes drawn as a scatter plot
# in plotly and the edges of the graph are drawn as lines.
#
# Notable differences are that the NetworkX library is not used
# and there is a rudimentary parser in the graphFromText folder.
# The graphByEquals2 function creates a graph from the equations
# entered by the user. This needs further development.
my_app = dash.Dash(__name__, suppress_callback_exceptions=True)
app = my_app.server #used to tell gunicorn where the $(VARIABLE_NAME) can be found,
#in this case, it is "app". Supposedly, this is a server?
colors = {'background': '#FFFFFF','text': '#111111'}
my_app.layout = html.Div(style={'backgroundColor': colors['background']},
children = [
dcc.Location(id='url', refresh=False),
html.Div(id='page-content')
])
# myText is global in scope, which can be problematic. This is actually
# allowed by dash.plotly.com
# see page https://dash.plotly.com/sharing-data-between-callbacks
myText = 'y = 3*x+u\nz = 4*x + 8*y + 9*v\nw = z+y^2+v\n'
HomeMD='''
From the following equations:
y = 3\*x+u
z = 4\*x + 8\*y + 9\*v
w = z+y^2+v
Comes the visualization:
'''
index_page = html.Div(id='index-page',
style={'backgroundColor': colors['background']}, \
children = [
dcc.Link(id='P1-link',children='Use Equation Symbol Visualization Tool', \
href='/page-1', style={'color': colors['text']}),
html.Br(),
dcc.Link(id='P2-link', children='How to use Equation Symbol Visualization',
href='/page-2', style={'color': colors['text']}),
html.Hr(),
dcc.Markdown(id='Home-Eq1', children=HomeMD, style={'color': colors['text']}),
html.Br(),
html.Img(src=my_app.get_asset_url('graph1-white.png'))
])
page_1_layout = html.Div(id='page1',style={'backgroundColor': colors['background']},
children = [
############## Links elsewhere ####################################
dcc.Link(id='page1-link1', children='How to use Equation Symbol Visualization', href='/page-2',
style={'color': colors['text']}),
html.Br(),
dcc.Link(id='page1-link2', children='Go back to home', href='/',
style={'color': colors['text']}),
html.Hr(),
html.H1(id='page1-H1',children='Equation Symbol Visualization',
style={'color':colors['text']}),
html.H2(id='page1-H2',children="Write out some equations in the text box below",
style = {'color': colors['text']}),
dcc.Textarea(id='page-1-input', value=myText,
style={'width': '100%', 'height': 200, 'color': colors['text'],
'backgroundColor': colors['background']}),
html.Br(),
html.Button(id='submit-val', n_clicks=0, children='Submit'),
html.Hr(),
############## Options ############################################
html.Button(id='hide_or_show_opts-val', n_clicks=0, children='Show Options'),
html.Br(),
html.Section(id='page1-Options',title="Options", hidden=True,
style={'color': colors['text'],'backgroundColor': colors['background']},
children = [
html.H4(id='page1-H4', children="Options", style={'color': colors['text'],
'backgroundColor': colors['background']}),
#html.Br(),
############## Light or Dark Theme Selector #######################
dcc.RadioItems(id='light-dark-theme',
options=[{'label': i, 'value': i} for i in ['Light', 'Dark']],
value='Light',
labelStyle={'display': 'inline-block'},
style={'color': colors['text'], 'backgroundColor': colors['background']}),
#html.Br(),
############## New, added 1-4-2020 #######################
dcc.RadioItems(id='fxn-option',
options=[{'label': i, 'value': i} for i in ['Map symbols to functions',\
'Do not map symbols to functions']],
value='Do not map symbols to functions',
labelStyle={'display': 'inline-block'},
style={'color': colors['text'], 'backgroundColor': colors['background']})],
),
html.Hr(),
############## Graph of the graph here ############################
dcc.Graph(id='page-1-graph', figure=graphEquations(myText)),
############## Display Graph in JSON / Python dict ################
html.Hr(),
html.Div(id='page-1-content',
style={'whiteSpace': 'pre-line', 'color': colors['text']}) # new
])
#[dash.dependencies.Input('page-1-input', 'value')])
@my_app.callback(dash.dependencies.Output('page-1-content', 'children'),
[dash.dependencies.Input('submit-val', 'n_clicks'),
dash.dependencies.Input('fxn-option','value')], #new
[dash.dependencies.State('page-1-input', 'value')]) #new
def page_1_text(n_clicks, fxnOptionSelection, input_value):
myText = input_value
graphOfLeft, err = graphByEquals2(input_value)
if (fxnOptionSelection == 'Do not map symbols to functions'):
graphOfLeft, err = graphByEquals2(input_value)
else:
graphOfLeft, err = graphByEqualsFxn1(input_value)
if (err is None):
return 'The graph in python dictionary / JSON form: {}'.format('\n' + str(graphOfLeft))
else:
return 'There was an error: {}'.format('\n' + str(err.args[0]))
'''[dash.dependencies.Input('page-1-input', 'value'),
dash.dependencies.Input('light-dark-theme', 'value')]'''
@my_app.callback(dash.dependencies.Output('page-1-graph', 'figure'),
[dash.dependencies.Input('submit-val', 'n_clicks'),
dash.dependencies.Input('light-dark-theme', 'value'),
dash.dependencies.Input('fxn-option','value')], #new
[dash.dependencies.State('page-1-input', 'value')]) #new
def update_graph(n_clicks,lightOrDarkSelection,fxnOptionSelection,input_value):
myText = input_value
if (fxnOptionSelection == 'Do not map symbols to functions'):
myFig=graphEquations(myText)
else:
myFig=graphEquationsWithFxn(myText)
############### Graph Color Update ###############################
if (lightOrDarkSelection == 'Light'):
localColors = {'background': '#FFFFFF',
'text': '#111111'}
else:
localColors = {'background': '#111111',
'text': '#7FDBFF'}
myFig['layout']['plot_bgcolor']=localColors['background']
myFig['layout']['paper_bgcolor']=localColors['background']
myFig['layout']['font_color']=localColors['text']
for arrow in myFig['layout']['annotations']:
arrow['arrowcolor']=localColors['text']
return myFig
how_to_markdown_text = '''
### Description
Equation Symbol Visualization is a [__dash app__](http://dash.plotly.com) created to create a visual representation of
information flow for systems of equations. It is presumed that the leftmost symbol on the left hand side of the equation
represents assignment.
### Usage
Simply navigate from home to the "Equation Symbol Visualization".
Enter equations with one symbol on the left hand side for assignment and one or more symbols on the right hand side
For example, if the following were entered in the text box:
y = 3\*x+u
z = 4\*x + 8\*y + 9\*v
w = z+y^2+v
v = -y+x
The following graph data structure as a python dictionary / JSON representation would be produced:
{'y': \['x', 'u'], 'z': \['x', 'y', 'v'], 'w': \['z', 'y', 'v'], 'v': \['y', 'x']}
Also, a visualization of the equations will be produced below the textbox of the equations which uses the dash and
plotly packages. Where the visualization would show the following:
* x and u would point to y
* x, y and v would point to z
* z, y and v would point to w
* y and x would point to v
### Notes
* Symbols such as '9bears' are considered illegal since the first character contains a number. However, the symbol
'bears9' is perfectly acceptable.
* There is an options section at the bottom of the web tool that allows a user to select a light or dark theme.
* There is also a new option to map symbols using functional notation, e.g. y = f(x).
* In the current implementation, note that in the event that more than one symbol appears on the left hand side,
it will be treated as if the leftmost symbol were used for assignment and the other symbol(s) were moved to the
right hand side. For example:
>y = 3\*x+u
>x+v = 8\*y + u
>z = v + y
>
>Will produce the following graph data structure of symbols:
>{'y': \['x', 'u'], 'x': \['y', 'u', 'v'], 'z': \['v', 'y']}
>
>Since both x and v are found in _x+v_ on the left hand side, x is leftmost symbol, v is moved over to the right
>hand side.
>
>In this case, the visualization would show the following:
>* x and u would point to y
>* y, u and v would point to x
>* v and y would point to z
### Source Code
* Source code can be found at [__Pascal's Git Hub repo__](https://github.com/Pascal1755/Equation-Symbol-Visualization)
### Future Improvements
Desired improvements would be as follows:
* Special visual representation for reflexive expressions used in coding statements, like x = x + 1
* More options for multiple assignments on the left hand side
* Support for inequality statements, i.e. <=, >=
* A user input to help identify unnecessary variables or unnecessary equations
### Usefulness
Utilizing the graphical representation of a system of equations can be helpful from the standpoint of identifying
relations between symbols, sub-systems of equations and identifying unnecessary equations.
'''
page_2_layout = html.Div(style={'backgroundColor': colors['background']},
children = [
dcc.Link(id='page2-link1', children='Use Equation Symbol Visualization Tool',
href='/page-1',style={'color': colors['text']}),
html.Br(),
dcc.Link(id='page2-link2', children='Go back to home', href='/',
style={'color': colors['text']}),
html.Hr(),
html.H1(id='page2-H1',children='How to use Equation Symbol Visualization',
style={'color': colors['text']}),
dcc.Markdown(id='page2-MD', children=how_to_markdown_text,
style={'color': colors['text']})
])
# Update the index
@my_app.callback(dash.dependencies.Output('page-content', 'children'),
[dash.dependencies.Input('url', 'pathname')])
def display_page(pathname):
if pathname == '/page-1':
return page_1_layout
elif pathname == '/page-2':
return page_2_layout
else:
return index_page
# You could also return a 404 "URL not found" page here
@my_app.callback([dash.dependencies.Output('page1-H1','style'),
dash.dependencies.Output('page1-H2','style'),
dash.dependencies.Output('page1-H4','style'),
dash.dependencies.Output('page-1-input','style'),
dash.dependencies.Output('page-1-content','style'),
dash.dependencies.Output('page1','style'),
dash.dependencies.Output('page1-link1','style'),
dash.dependencies.Output('page1-link2','style'),
dash.dependencies.Output('light-dark-theme','style'),
dash.dependencies.Output('fxn-option','style')],
[dash.dependencies.Input('light-dark-theme','value')])
def lightDarkSelector(lightOrDarkSelection):
global colors #strange that Dash needs this
if (lightOrDarkSelection == 'Light'):
localColors = {'background': '#FFFFFF',
'text': '#111111'}
else:
localColors = {'background': '#111111',
'text': '#7FDBFF'}
colors=localColors #strange artifact of dash,
#does this actually update the global colors?
localStyle = {'backgroundColor': localColors['background'],
'color': localColors['text']}
localStyles = [localStyle for k in range(0, 10)]
############### Handle special cases #############################
localStyles[3]={'width': '100%', 'height': 200,
'color': localColors['text'],
'backgroundColor': localColors['background']}
return localStyles
@my_app.callback([dash.dependencies.Output('page1-Options', 'hidden'),
dash.dependencies.Output('hide_or_show_opts-val','children')],
[dash.dependencies.Input('hide_or_show_opts-val', 'n_clicks')])
def page_1_hide_or_show_options(n_clicks):
if ( n_clicks%2 != 0 ):
return [False,'Hide Options']
else:
return [True,'Show Options']
if __name__ == '__main__':
my_app.run_server(debug=True) |
import re
from object_detection.utils import config_util
import tensorflow as tf
from functools import reduce
def isfloat(value):
"""Returns if a given value can be converted to a float or not.
Args:
value: Input value of each type
Returns:
bool: Boolean if the input value can be parsed to a float"""
try:
float(value)
return True
except:
return False
def config_to_dict(pipeline_config_path):
"""Convert pipeline_config_file from object_detection training to a better accessible dictionary.
Args:
pipeline_config_path: Path to pipeline_config_file (.config)
Returns:
config_dict: Dictionary for the pipeline_config_file to access with subscripts like config_dict['train_config']"""
f = open(pipeline_config_path, "r")
config_str = f.read()
# Make some preprocessing to access and convert easier
all_elems = config_str.split('\n')
# delete all characters which are after # and in the same line (the comments are deleted)
all_elems_no_comments = [elem if '#' not in elem else elem[:elem.index('#')] for elem in all_elems]
# separate the elements by blanks
all_elems_no_blanks = re.split('\s|\t', str(reduce(lambda x, y: x+' '+y, all_elems_no_comments, '')))
# add the start and end {} for the whole dict
all_real_elems = ['{']
all_real_elems.extend(list(filter(lambda x: x != '' and x != '\n', all_elems_no_blanks)))
all_real_elems.append('}')
# Information if you got a key, to know, that you need a comma after the next value
# to separate it from other key-value pairs
start_attr = False
# Information if you have just a key-value pair or a key-subdict pair, so you don't
# need a , after the next elem (it will be a {)
start_subdict = False
depth = 0
# Save the last attribute and the position in the string
# to convert two or more same arguments into a list (otherwise not correctly parsed to dict)
last_attr = dict()
opened_list = dict()
dict_str = ''
# Each key and each values is saved in one listelement
# There are a lot more elements filled with '', '{' or '}'
for elem in all_real_elems:
# ignore the elems without keys/values or which are quoted
if '"' in elem or elem == '' or isfloat(elem):
dict_str += elem
elif '{' in elem:
# going one step deeper
depth += 1
start_subdict = False
start_attr = False
dict_str += elem
elif '}' in elem:
# going one step up, so it's not important any more which key was the last
# at the previous depth. Otherwise the key can occur later in another context
# and another parent attribute, but my code thinks, that these two attributes
# must be saved together in a list, even if they are in completely different contexts
if depth in last_attr: del last_attr[depth]
if depth in opened_list:
# maybe the list is opened, so it should be closed
if opened_list[depth]:
dict_str += "],"
del opened_list[depth]
depth -= 1
dict_str += elem + ','
# adjust false and true correctly to Python syntax
elif elem == 'false':
dict_str += 'False'
elif elem == 'true':
dict_str += 'True'
# if there is a ':' in the element, this is a key
# making start_attr = True so you know, that the next elem will be the value
# and after the value you must add the delimiter ',' to get a correct dict
# Some key arguments does not have a ':' inside, but the { is in the next elem
elif elem[-1] == ':' or '{' in all_real_elems[all_real_elems.index(elem)+1]:
if elem[-1] != ':':
elem += ':'
start_subdict = True
# test if the key value occurs not the first time
if depth in last_attr and last_attr[depth][0] == elem[:-1]:
# if it the second time and the list is not started with [
if depth not in opened_list or not opened_list[depth]:
# go back into the string and add some [, for signalizing the list started
rest_dict = dict_str[last_attr[depth][1]:]
rest_dict_key = rest_dict.split(':')[0]
rest_values = rest_dict[len(rest_dict_key)+1:]
dict_str = dict_str[:last_attr[depth][1]]
dict_str += '"' + elem[:-1] + '":[' + rest_values
# safe the information that you opened a list
opened_list[depth] = True
else:
# otherwise the key occured a third time (or more often)
# so there is no need to add the key, only the value should be added to the list
pass
else:
# if a list is opened and you have no other attribute to add to this list
if depth in opened_list and opened_list[depth]:
# just close the list
dict_str += "],"
opened_list[depth] = False
last_attr[depth] = (elem[:-1], len(dict_str))
dict_str += '"' + elem[:-1] + '":'
start_attr = True
else:
dict_str += '"' + elem + '"'
# adding the ',' if the key-value-pair is finished to set apart from other pairs
if not start_subdict and start_attr and elem[-1] != ':':
start_attr = False
dict_str += ','
# after getting the dict in a correct string format, just convert it to a dict
config_dict = eval(dict_str)
return config_dict[0]
def recursive_dict_to_str(sub_dict, depth=0):
"""Convert dictionary recursively to a string, so that it is pretty, when you write it to a file.
Args:
sub_dict: Sub dictionary which should be converted to a string
depth: Depth of the actual dictionary in the initial dictionary
Returns:
dict_str: Dictionary string formatted for looking good in a config file"""
# if there is no dict just return the value
if type(sub_dict) != dict:
return str(sub_dict)
keys = list(sub_dict.keys())
dict_str = ''
for key in keys:
# Starting the sub_dict with enough indent
dict_str += ' '*2*depth
# if there is no sub_dict in sub_dict[key], just add the key-value-pair pretty
if isfloat(sub_dict[key]):
dict_str += key + ': ' + str(sub_dict[key]) + '\n'
elif type(sub_dict[key]) == str:
dict_str += key + ': "' + sub_dict[key] + '"\n'
# otherwise add recursively the pretty string from the sub_dict[key]-sub_dict
else:
dict_str += key + ' {\n'
dict_str += recursive_dict_to_str(sub_dict[key], depth+1)
dict_str += ' '*2*depth + '}\n'
return dict_str
def dict_to_config(config_dict, new_pipeline_path):
"""Convert dictionary to a string and write the result readable (and pretty) to a pipeline_config_file.
Args:
config_dict: Dictionary which should be written to the config file
new_pipeline_path: Path of the new pipeline_config_file where the dict will be written to
Returns:
dict_str: The string which is written to the pipeline_config_file"""
dict_str = recursive_dict_to_str(config_dict)
with tf.gfile.Open(new_pipeline_path, "wb") as f:
f.write(dict_str)
return dict_str
|
config = {
"twitter_consumer_key" : "",
"twitter_consumer_secret" : "",
"twitter_oauth_token" : "",
"twitter_oauth_token_secret" : "",
"db_username" : "",
"db_password" : "",
"pg_connection_string" : "",
"server_socket_port" : 9001,
"pidfile" : "/data/log/twitter-daemon.pid",
"debug_log" : "/data/log/twitter-daemon-out.log",
"error_log" : "/data/log/twitter-daemon-err.log",
}
|
from __future__ import print_function, with_statement
from _utils import githuboptparse, githublogin, get_filtered_repos
def has_webhook (hooks, url):
"""Goes through all the web hooks checking if any URL references the given URL"""
found = False
for temp_hook in hooks:
if temp_hook.config['url'].lower() == url.lower():
found = True
return found
if __name__ == "__main__":
parser = githuboptparse()
parser.add_option('-w', '--webhook', action="store", dest="webhook",
help="URL for web hook")
options, additional_args = parser.parse_args()
ghcon = githublogin(options)
if ghcon is None:
print("No login details specified")
parser.parse_args(['-h'])
if options.webhook:
hook_config = {
"url": options.webhook,
"content_type": "json"
}
hook_events = ['pull_request', 'pull_request_review', 'pull_request_review_comment']
else:
print("You need to provide Slack webhook")
parser.parse_args(['-h'])
for repo in get_filtered_repos(ghcon, options):
if not has_webhook(repo.get_hooks(), options.webhook):
print("Adding webhook to {name}".format(name=repo.name))
repo.create_hook("web", hook_config, hook_events, active=True)
|
import math
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from ..loss import acc
from ..post_processing import BeamSearch
class PAN_PP_RecHead(nn.Module):
def __init__(self,
input_dim,
hidden_dim,
voc,
char2id,
id2char,
beam_size=1,
feature_size=(8, 32)):
super(PAN_PP_RecHead, self).__init__()
self.char2id = char2id
self.id2char = id2char
self.beam_size = beam_size
self.conv = nn.Conv2d(input_dim,
hidden_dim,
kernel_size=3,
stride=1,
padding=1)
self.bn = nn.BatchNorm2d(hidden_dim)
self.relu = nn.ReLU(inplace=True)
self.feature_size = feature_size
self.encoder = Encoder(hidden_dim, voc, char2id, id2char)
self.decoder = Decoder(hidden_dim, hidden_dim, 2, voc, char2id,
id2char)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _upsample(self, x, output_size):
return F.upsample(x, size=output_size, mode='bilinear')
def extract_feature(self,
f,
output_size,
instance,
bboxes,
gt_words=None,
word_masks=None,
unique_labels=None):
x = self.conv(f)
x = self.relu(self.bn(x))
x = self._upsample(x, output_size)
x_crops = []
if gt_words is not None:
words = []
batch_size, _, H, W = x.size()
pad_scale = 1
pad = x.new_tensor([-1, -1, 1, 1], dtype=torch.long) * pad_scale
if self.training:
offset = x.new_tensor(np.random.randint(-pad_scale, pad_scale + 1,
bboxes.size()),
dtype=torch.long)
pad = pad + offset
bboxes = bboxes + pad
bboxes[:, :, (0, 2)] = bboxes[:, :, (0, 2)].clamp(0, H)
bboxes[:, :, (1, 3)] = bboxes[:, :, (1, 3)].clamp(0, W)
for i in range(x.size(0)):
instance_ = instance[i:i + 1]
if unique_labels is None:
unique_labels_, _ = torch.unique(instance_,
sorted=True,
return_inverse=True)
else:
unique_labels_ = unique_labels[i]
x_ = x[i]
if gt_words is not None:
gt_words_ = gt_words[i]
if word_masks is not None:
word_masks_ = word_masks[i]
bboxes_ = bboxes[i]
for label in unique_labels_:
if label == 0:
continue
if word_masks is not None and word_masks_[label] == 0:
continue
t, l, b, r = bboxes_[label]
mask = (instance_[:, t:b, l:r] == label).float()
mask = F.max_pool2d(mask.unsqueeze(0),
kernel_size=(3, 3),
stride=1,
padding=1)[0]
if torch.sum(mask) == 0:
continue
x_crop = x_[:, t:b, l:r] * mask
_, h, w = x_crop.size()
if h > w * 1.5:
x_crop = x_crop.transpose(1, 2)
x_crop = F.interpolate(x_crop.unsqueeze(0),
self.feature_size,
mode='bilinear')
x_crops.append(x_crop)
if gt_words is not None:
words.append(gt_words_[label])
if len(x_crops) == 0:
return None, None
x_crops = torch.cat(x_crops)
if gt_words is not None:
words = torch.stack(words)
else:
words = None
return x_crops, words
def loss(self, input, target, reduce=True):
EPS = 1e-6
N, L, D = input.size()
mask = target != self.char2id['PAD']
input = input.contiguous().view(-1, D)
target = target.contiguous().view(-1)
loss_rec = F.cross_entropy(input, target, reduce=False)
loss_rec = loss_rec.view(N, L)
loss_rec = torch.sum(loss_rec * mask.float(),
dim=1) / (torch.sum(mask.float(), dim=1) + EPS)
acc_rec = acc(torch.argmax(input, dim=1).view(N, L),
target.view(N, L),
mask,
reduce=False)
if reduce:
loss_rec = torch.mean(loss_rec) # [valid]
acc_rec = torch.mean(acc_rec)
losses = {'loss_rec': loss_rec, 'acc_rec': acc_rec}
return losses
def forward(self, x, target=None):
holistic_feature = self.encoder(x)
if self.training:
return self.decoder(x, holistic_feature, target)
else:
if self.beam_size <= 1:
return self.decoder.forward_test(x, holistic_feature)
else:
return self.decoder.beam_search(x,
holistic_feature,
beam_size=self.beam_size)
class Encoder(nn.Module):
def __init__(self, hidden_dim, voc, char2id, id2char):
super(Encoder, self).__init__()
self.hidden_dim = hidden_dim
self.vocab_size = len(voc)
self.START_TOKEN = char2id['EOS']
self.emb = nn.Embedding(self.vocab_size, self.hidden_dim)
self.att = MultiHeadAttentionLayer(self.hidden_dim, 8)
def forward(self, x):
batch_size, feature_dim, H, W = x.size()
x_flatten = x.view(batch_size, feature_dim, H * W).permute(0, 2, 1)
st = x.new_full((batch_size, ), self.START_TOKEN, dtype=torch.long)
emb_st = self.emb(st)
holistic_feature, _ = self.att(emb_st, x_flatten, x_flatten)
return holistic_feature
class Decoder(nn.Module):
def __init__(self, featrue_dim, hidden_dim, num_layers, voc, char2id,
id2char):
super(Decoder, self).__init__()
self.featrue_dim = featrue_dim
self.hidden_dim = hidden_dim
self.num_layers = num_layers
self.vocab_size = len(voc)
self.START_TOKEN = char2id['EOS']
self.END_TOKEN = char2id['EOS']
self.NULL_TOKEN = char2id['PAD']
self.id2char = id2char
self.lstm_u = nn.ModuleList()
for i in range(self.num_layers):
self.lstm_u.append(nn.LSTMCell(self.hidden_dim, self.hidden_dim))
self.emb = nn.Embedding(self.vocab_size, self.hidden_dim)
self.att = MultiHeadAttentionLayer(self.hidden_dim, 8)
self.cls = nn.Linear(self.hidden_dim + self.featrue_dim,
self.vocab_size)
def forward(self, x, holistic_feature, target):
# print(x.shape, holistic_feature.shape, target.shape)
batch_size, feature_dim, H, W = x.size()
x_flatten = x.view(batch_size, feature_dim, H * W).permute(0, 2, 1)
max_seq_len = target.size(1)
h = []
for i in range(self.num_layers):
h.append((x.new_zeros((x.size(0), self.hidden_dim),
dtype=torch.float32),
x.new_zeros((x.size(0), self.hidden_dim),
dtype=torch.float32)))
out = x.new_zeros((x.size(0), max_seq_len + 1, self.vocab_size),
dtype=torch.float32)
for t in range(max_seq_len + 1):
if t == 0:
xt = holistic_feature
elif t == 1:
it = x.new_full((batch_size, ),
self.START_TOKEN,
dtype=torch.long)
xt = self.emb(it)
else:
it = target[:, t - 2]
xt = self.emb(it)
for i in range(self.num_layers):
if i == 0:
inp = xt
else:
inp = h[i - 1][0]
h[i] = self.lstm_u[i](inp, h[i])
ht = h[-1][0]
out_t, _ = self.att(ht, x_flatten, x_flatten)
# print(out_t.shape, _.shape)
out_t = torch.cat((out_t, ht), dim=1)
# print(out_t.shape)
# exit()
out_t = self.cls(out_t)
out[:, t, :] = out_t
return out[:, 1:, :]
def to_words(self, seqs, seq_scores=None):
EPS = 1e-6
words = []
word_scores = None
if seq_scores is not None:
word_scores = []
for i in range(len(seqs)):
word = ''
word_score = 0
for j, char_id in enumerate(seqs[i]):
char_id = int(char_id)
if char_id == self.END_TOKEN:
break
if self.id2char[char_id] in ['PAD', 'UNK']:
continue
word += self.id2char[char_id]
if seq_scores is not None:
word_score += seq_scores[i, j]
words.append(word)
if seq_scores is not None:
word_scores.append(word_score / (len(word) + EPS))
return words, word_scores
def forward_test(self, x, holistic_feature):
batch_size, feature_dim, H, W = x.size()
x_flatten = x.view(batch_size, feature_dim, H * W).permute(0, 2, 1)
h = x.new_zeros(self.num_layers, 2, batch_size, self.hidden_dim)
max_seq_len = 32
seq = x.new_full((batch_size, max_seq_len + 1),
self.START_TOKEN,
dtype=torch.long)
seq_score = x.new_zeros((batch_size, max_seq_len + 1),
dtype=torch.float32)
end = x.new_ones((batch_size, ), dtype=torch.uint8)
for t in range(max_seq_len + 1):
if t == 0:
xt = holistic_feature
else:
it = seq[:, t - 1]
xt = self.emb(it)
for i in range(self.num_layers):
if i == 0:
inp = xt
else:
inp = h[i - 1, 0]
h[i, 0], h[i, 1] = self.lstm_u[i](inp, (h[i, 0], h[i, 1]))
ht = h[-1, 0]
if t == 0:
continue
out_t, _ = self.att(ht, x_flatten, x_flatten)
out_t = torch.cat((out_t, ht), dim=1)
score = torch.softmax(self.cls(out_t), dim=1)
score, idx = torch.max(score, dim=1)
seq[:, t] = idx
seq_score[:, t] = score
end = end & (idx != self.START_TOKEN)
if torch.sum(end) == 0:
break
words, word_scores = self.to_words(seq[:, 1:], seq_score[:, 1:])
return words, word_scores
def beam_search(self, x, holistic_feature, beam_size=2):
batch_size, c, h, w = x.size()
x_beam = x.repeat(1, beam_size, 1, 1).view(-1, c, h, w)
def decode_step(inputs, h, k):
if len(inputs.shape) == 1:
inputs = self.emb(inputs)
for i in range(self.num_layers):
if i == 0:
xt = inputs
else:
xt = h[i - 1, 0]
h[i, 0], h[i, 1] = self.lstm_u[i](xt, (h[i, 0], h[i, 1]))
ht = h[-1, 0]
if ht.size(0) == batch_size:
out_t = self.att(x, ht)
else:
out_t = self.att(x_beam, ht)
out_t = torch.cat((out_t, ht), -1)
out_t = torch.softmax(self.cls(out_t), dim=1)
scores, words = torch.topk(out_t, k, dim=1, sorted=True)
return words, scores, h
bs = BeamSearch(decode_step, self.END_TOKEN, beam_size, 32)
x0 = holistic_feature
h = x.new_zeros(self.num_layers, 2, batch_size, self.hidden_dim)
words, scores, h = decode_step(x0, h, 1)
init_inputs = x.new_full((batch_size, ),
self.START_TOKEN,
dtype=torch.long)
seqs, seq_scores = bs.beam_search(init_inputs, h)
words, _ = self.to_words(seqs)
# print(words)
# exit()
return words, seq_scores
class MultiHeadAttentionLayer(nn.Module):
def __init__(self, hidden_dim, n_heads, dropout=0.1):
super().__init__()
assert hidden_dim % n_heads == 0
self.hidden_dim = hidden_dim
self.n_heads = n_heads
self.head_dim = hidden_dim // n_heads
self.fc_q = nn.Linear(hidden_dim, hidden_dim)
self.fc_k = nn.Linear(hidden_dim, hidden_dim)
self.fc_v = nn.Linear(hidden_dim, hidden_dim)
self.fc_o = nn.Linear(hidden_dim, hidden_dim)
self.dropout = nn.Dropout(dropout)
self.layer_norm = nn.LayerNorm(hidden_dim, eps=1e-6)
self.scale = math.sqrt(self.head_dim)
def forward(self, q, k, v, mask=None):
batch_size = q.size(0)
q = self.layer_norm(q)
q = self.fc_q(q)
k = self.fc_k(k)
v = self.fc_v(v)
q = q.view(batch_size, -1, self.n_heads,
self.head_dim).permute(0, 2, 1, 3)
k = k.view(batch_size, -1, self.n_heads,
self.head_dim).permute(0, 2, 1, 3)
v = v.view(batch_size, -1, self.n_heads,
self.head_dim).permute(0, 2, 1, 3)
att = torch.matmul(q / self.scale, k.permute(0, 1, 3, 2))
if mask is not None:
att = att.masked_fill(mask == 0, -1e10)
att = torch.softmax(att, dim=-1)
out = torch.matmul(self.dropout(att), v)
out = out.permute(0, 2, 1, 3).contiguous()
out = out.view(batch_size, self.hidden_dim)
out = self.dropout(self.fc_o(out))
return out, att
|
#
# PySNMP MIB module BTI7800-CONDITIONS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/BTI7800-CONDITIONS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:24:44 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Counter64, Gauge32, enterprises, Unsigned32, ModuleIdentity, ObjectIdentity, Counter32, NotificationType, Integer32, iso, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, IpAddress, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "Gauge32", "enterprises", "Unsigned32", "ModuleIdentity", "ObjectIdentity", "Counter32", "NotificationType", "Integer32", "iso", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "IpAddress", "TimeTicks")
TextualConvention, RowStatus, DisplayString, TruthValue, DateAndTime = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "RowStatus", "DisplayString", "TruthValue", "DateAndTime")
bTI7800_CONDITIONS_MIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 18070, 2, 9, 1)).setLabel("bTI7800-CONDITIONS-MIB")
bTI7800_CONDITIONS_MIB.setRevisions(('2013-02-19 00:00',))
if mibBuilder.loadTexts: bTI7800_CONDITIONS_MIB.setLastUpdated('201302190000Z')
if mibBuilder.loadTexts: bTI7800_CONDITIONS_MIB.setOrganization('@ORGANIZATION')
class ConfdString(TextualConvention, OctetString):
status = 'current'
displayHint = '1t'
class String(TextualConvention, OctetString):
status = 'current'
displayHint = '1t'
class Severity(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))
namedValues = NamedValues(("critical", 1), ("major", 2), ("minor", 3), ("not-alarmed", 4), ("not-reported", 5))
class ConditionCode(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135))
namedValues = NamedValues(("eqptMiss", 1), ("eqptUnkn", 2), ("eqptMism", 3), ("eqptFail", 4), ("eqptDgrd", 5), ("eqptComm", 6), ("upgr", 7), ("lpbk", 8), ("los", 9), ("lof", 10), ("loSync", 11), ("lola", 12), ("lom", 13), ("tim", 14), ("sd", 15), ("bdi", 16), ("pyldMism", 17), ("odtgMism", 18), ("ais-l", 19), ("ms-ais", 20), ("otu-ais", 21), ("odu-ais", 22), ("lck", 23), ("oci", 24), ("highBer", 25), ("lf", 26), ("rf", 27), ("rdi-l", 28), ("ms-rdi", 29), ("oprHighTh", 30), ("oprLowTh", 31), ("optHighTh", 32), ("optLowTh", 33), ("laserTempHighTh", 34), ("laserTempLowTh", 35), ("laserFail", 36), ("cfgUnsupp", 37), ("cfgFail", 38), ("lolightRx", 39), ("lolightTx", 40), ("feim", 41), ("feci", 42), ("contComS", 43), ("contComE", 44), ("loSpecRx", 45), ("tLossRxHt", 46), ("iaocp", 47), ("iaocm", 48), ("iaocb", 49), ("apsd", 50), ("pmi", 51), ("uneqO", 52), ("aisO", 53), ("posRx", 54), ("posTx", 55), ("obros", 56), ("chnDfc", 57), ("replUnitDegrade", 58), ("cnxMea", 59), ("cnxVldTmout", 60), ("posRxHigh", 61), ("posRxLow", 62), ("oprHighFail", 63), ("obrHt", 64), ("apr", 65), ("modTempHighTh", 66), ("modTempLowTh", 67), ("modTempShutdown", 68), ("envTempHighTh", 69), ("envTempLowTh", 70), ("envTempFail", 71), ("envVoltHighTh", 72), ("envVoltLowTh", 73), ("envVoltFail", 74), ("scmNmiDown", 75), ("scmNoNmConn", 76), ("eqptLatchOpen", 77), ("powerAbsent", 78), ("fanSpeedLowTh", 79), ("nonCoLocatedController", 80), ("preFecBerTh", 81), ("firmUpgrdReqd", 82), ("otuBerTh", 83), ("oduBerTh", 84), ("pcsBerTh", 85), ("berTh-s", 86), ("berTh-l", 87), ("rs-berTh", 88), ("ms-berTh", 89), ("oneCableDisconnected", 90), ("envCurrentHighTh", 91), ("envCurrentLowTh", 92), ("prbs", 93), ("forced", 94), ("lockout", 95), ("tLossRxLt", 96), ("omsBdi", 97), ("ochAis", 98), ("ochRdi", 99), ("ochUeq", 100), ("ochOci", 101), ("defRDICCM", 102), ("defMACStatus", 103), ("defRemoteCCM", 104), ("defErrorCCM", 105), ("defXconCCM", 106), ("defBfdDown", 107), ("lf-tx", 108), ("apsData", 109), ("omsAis", 110), ("isisDbOvrld", 111), ("isisXDown", 112), ("isisAdjDown", 113), ("isisAdjRejected", 114), ("rsvpAdjDown", 115), ("diskHighUsage", 116), ("memHighUsage", 117), ("invUnknown", 118), ("airfilterAbsense", 119), ("tx-msais", 120), ("tx-msrdi", 121), ("tx-aisl", 122), ("tx-rdil", 123), ("tx-rf", 124), ("tx-oduAis", 125), ("tx-oduLck", 126), ("tx-oduOci", 127), ("tx-opucsf", 128), ("firmUpgrdInProg", 129), ("firmUpgrdFail", 130), ("partitionFault", 131), ("oom", 132), ("lolck", 133), ("inventoryUnsupp", 134), ("eqptBrownout", 135))
conditionsTable = MibTable((1, 3, 6, 1, 4, 1, 18070, 2, 9, 1, 1), )
if mibBuilder.loadTexts: conditionsTable.setStatus('current')
conditionsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 18070, 2, 9, 1, 1, 1), ).setIndexNames((0, "BTI7800-CONDITIONS-MIB", "conditionsEntityName"), (0, "BTI7800-CONDITIONS-MIB", "conditionsCode"))
if mibBuilder.loadTexts: conditionsEntry.setStatus('current')
conditionsEntityName = MibTableColumn((1, 3, 6, 1, 4, 1, 18070, 2, 9, 1, 1, 1, 1), String())
if mibBuilder.loadTexts: conditionsEntityName.setStatus('current')
conditionsCode = MibTableColumn((1, 3, 6, 1, 4, 1, 18070, 2, 9, 1, 1, 1, 2), ConditionCode())
if mibBuilder.loadTexts: conditionsCode.setStatus('current')
conditionsReportType = MibTableColumn((1, 3, 6, 1, 4, 1, 18070, 2, 9, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("non-alarmed", 1), ("alarmed", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: conditionsReportType.setStatus('current')
conditionsTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 18070, 2, 9, 1, 1, 1, 4), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: conditionsTimeStamp.setStatus('current')
conditionsSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 18070, 2, 9, 1, 1, 1, 5), Severity()).setMaxAccess("readonly")
if mibBuilder.loadTexts: conditionsSeverity.setStatus('current')
conditionsServiceAffecting = MibTableColumn((1, 3, 6, 1, 4, 1, 18070, 2, 9, 1, 1, 1, 6), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: conditionsServiceAffecting.setStatus('current')
conditionsDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 18070, 2, 9, 1, 1, 1, 7), String()).setMaxAccess("readonly")
if mibBuilder.loadTexts: conditionsDescription.setStatus('current')
mibBuilder.exportSymbols("BTI7800-CONDITIONS-MIB", conditionsServiceAffecting=conditionsServiceAffecting, conditionsTable=conditionsTable, conditionsReportType=conditionsReportType, ConfdString=ConfdString, conditionsSeverity=conditionsSeverity, String=String, ConditionCode=ConditionCode, conditionsDescription=conditionsDescription, conditionsCode=conditionsCode, Severity=Severity, conditionsEntityName=conditionsEntityName, PYSNMP_MODULE_ID=bTI7800_CONDITIONS_MIB, conditionsEntry=conditionsEntry, conditionsTimeStamp=conditionsTimeStamp, bTI7800_CONDITIONS_MIB=bTI7800_CONDITIONS_MIB)
|
# from .iter_counter import IterationCounter
# from .visualizer import Visualizer
# from .metric_tracker import MetricTracker
from .util import *
# from .html import HTML
# from .pca import PCA
|
from os import walk
import h5py
import numpy as np
import spotipy
import json
from config.Database import Base
from config.Database import engine
from config.Database import Session
from models.Music import Music
from kohonen.kohonen import run
def main():
# 2 - generate database schema
Base.metadata.create_all(engine)
# 3 - create a new session
session = Session()
musics = session.query(Music).all()
data = []
for music in musics:
data.append(music.toObject()['valor'])
data = np.array(data).reshape(len(data), 12)
run(data, musics)
session.commit()
if __name__ == "__main__":
main()
|
from django.urls import path
from .views import (
order_success,
order_progress,
OrderListView,
order_detail,
)
app_name = 'order'
urlpatterns = [
path('order/success/', order_success, name="success"),
path('order/progress/', order_progress, name="progress"),
path('order/detail/<str:order_id>/', order_detail, name="detail"),
path('active/orders/', OrderListView.as_view(), name="list"),
]
|
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
import numpy
import unittest
from pyscf import gto
from pyscf import scf
mol = gto.M(
verbose = 5,
output = '/dev/null',
atom = '''
O 0 0 0
H 0 -0.757 0.587
H 0 0.757 0.587''',
basis = 'cc-pvdz',
)
class KnowValues(unittest.TestCase):
def test_sfx2c1e(self):
myx2c = scf.x2c.sfx2c1e(scf.RHF(mol))
myx2c.with_x2c.xuncontract = False
e = myx2c.kernel()
self.assertAlmostEqual(e, -76.081765429967618, 9)
myx2c.with_x2c.xuncontract = True
e = myx2c.kernel()
self.assertAlmostEqual(e, -76.075429077955874, 9)
def test_sfx2c1e_cart(self):
pmol = mol.copy()
pmol.cart = True
myx2c = scf.x2c.sfx2c1e(scf.RHF(pmol))
myx2c.with_x2c.xuncontract = False
e = myx2c.kernel()
self.assertAlmostEqual(e, -76.081452837461342, 9)
def test_x2c1e(self):
myx2c = scf.x2c.UHF(mol)
myx2c.with_x2c.xuncontract = False
e = myx2c.kernel()
self.assertAlmostEqual(e, -76.08176796102066, 9)
myx2c.with_x2c.xuncontract = True
e = myx2c.kernel()
self.assertAlmostEqual(e, -76.075431226329414, 9)
if __name__ == "__main__":
print("Full Tests for x2c")
unittest.main()
|
#encoding=utf-8
from math import sin, pi
import time
import serial
import struct
import numpy as np
import h5py
import string
import binascii
global hand_id
hand_id = 1
#把数据分成高字节和低字节
def data2bytes(data):
rdata = [0xff]*2
if data == -1:
rdata[0] = 0xff
rdata[1] = 0xff
else:
rdata[0] = data&0xff
rdata[1] = (data>>8)&(0xff)
return rdata
#把十六进制或十进制的数转成bytes
def num2str(num):
str = hex(num)
str = str.rstrip('Ll')
str = str[2:4]
if(len(str) == 1):
str = '0'+ str
str = bytes.fromhex(str)
# str = str.decode('hex')
return str
#求校验和
def checknum(data,leng):
result = 0
for i in range(2,leng):
result += data[i]
result = result&0xff
#print(result)
return result
class InspireHand:
def __init__(self,
portName='/dev/ttyUSB0',
baudRate=115200,
hand_id=1):
self.ser = serial.Serial(portName,baudRate)
self.ser.isOpen()
self.hand_id = hand_id
#设置驱动器位置
def setpos(self,pos1,pos2,pos3,pos4,pos5,pos6):
if pos1 <-1 or pos1 >2000:
print('数据超出正确范围:-1-2000')
return
if pos2 <-1 or pos2 >2000:
print('数据超出正确范围:-1-2000')
return
if pos3 <-1 or pos3 >2000:
print('数据超出正确范围:-1-2000')
return
if pos4 <-1 or pos4 >2000:
print('数据超出正确范围:-1-2000')
return
if pos5 <-1 or pos5 >2000:
print('数据超出正确范围:-1-2000')
return
if pos6 <-1 or pos6 >2000:
print('数据超出正确范围:-1-2000')
return
datanum = 0x0F
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#写操作
b[4] = 0x12
#地址
b[5] = 0xC2
b[6] = 0x05
#数据
b[7] = data2bytes(pos1)[0]
b[8] = data2bytes(pos1)[1]
b[9] = data2bytes(pos2)[0]
b[10] = data2bytes(pos2)[1]
b[11] = data2bytes(pos3)[0]
b[12] = data2bytes(pos3)[1]
b[13] = data2bytes(pos4)[0]
b[14] = data2bytes(pos4)[1]
b[15] = data2bytes(pos5)[0]
b[16] = data2bytes(pos5)[1]
b[17] = data2bytes(pos6)[0]
b[18] = data2bytes(pos6)[1]
#校验和
b[19] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
#print('发送的数据:',putdata)
# print('发送的数据:')
# for i in range(1,datanum+6):
# print(hex(putdata[i-1]))
# getdata= self.ser.read(9)
# #print('返回的数据:',getdata)
# print('返回的数据:')
# for i in range(1,10):
# print(hex(getdata[i-1]))
#设置角度
def setangle(self,angle1,angle2,angle3,angle4,angle5,angle6):
if angle1 <-1 or angle1 >1000:
print('数据超出正确范围:-1-1000')
return
if angle2 <-1 or angle2 >1000:
print('数据超出正确范围:-1-1000')
return
if angle3 <-1 or angle3 >1000:
print('数据超出正确范围:-1-1000')
return
if angle4 <-1 or angle4 >1000:
print('数据超出正确范围:-1-1000')
return
if angle5 <-1 or angle5 >1000:
print('数据超出正确范围:-1-1000')
return
if angle6 <-1 or angle6 >1000:
print('数据超出正确范围:-1-1000')
return
datanum = 0x0F
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#写操作
b[4] = 0x12
#地址
b[5] = 0xCE
b[6] = 0x05
#数据
b[7] = data2bytes(angle1)[0]
b[8] = data2bytes(angle1)[1]
b[9] = data2bytes(angle2)[0]
b[10] = data2bytes(angle2)[1]
b[11] = data2bytes(angle3)[0]
b[12] = data2bytes(angle3)[1]
b[13] = data2bytes(angle4)[0]
b[14] = data2bytes(angle4)[1]
b[15] = data2bytes(angle5)[0]
b[16] = data2bytes(angle5)[1]
b[17] = data2bytes(angle6)[0]
b[18] = data2bytes(angle6)[1]
#校验和
b[19] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
# for i in range(1,datanum+6):
# putdata = putdata + num2str(b[i-1])
# self.ser.write(putdata)
# print('发送的数据:')
# for i in range(1,datanum+6):
# print(hex(putdata[i-1]))
# getdata= self.ser.read(9)
# print('返回的数据:')
# for i in range(1,10):
# print(hex(getdata[i-1]))
#设置力控阈值
def setpower(self,power1,power2,power3,power4,power5,power6):
if power1 <0 or power1 >1000:
print('数据超出正确范围:0-1000')
return
if power2 <0 or power2 >1000:
print('数据超出正确范围:0-1000')
return
if power3 <0 or power3 >1000:
print('数据超出正确范围:0-1000')
return
if power4 <0 or power4 >1000:
print('数据超出正确范围:0-1000')
return
if power5 <0 or power5 >1000:
print('数据超出正确范围:0-1000')
return
if power6 <0 or power6 >1000:
print('数据超出正确范围:0-1000')
return
datanum = 0x0F
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#写操作
b[4] = 0x12
#地址
b[5] = 0xDA
b[6] = 0x05
#数据
b[7] = data2bytes(power1)[0]
b[8] = data2bytes(power1)[1]
b[9] = data2bytes(power2)[0]
b[10] = data2bytes(power2)[1]
b[11] = data2bytes(power3)[0]
b[12] = data2bytes(power3)[1]
b[13] = data2bytes(power4)[0]
b[14] = data2bytes(power4)[1]
b[15] = data2bytes(power5)[0]
b[16] = data2bytes(power5)[1]
b[17] = data2bytes(power6)[0]
b[18] = data2bytes(power6)[1]
#校验和
b[19] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(9)
print('返回的数据:')
for i in range(1,10):
print(hex(getdata[i-1]))
#设置速度
def setspeed(self,speed1,speed2,speed3,speed4,speed5,speed6):
if speed1 <0 or speed1 >1000:
print('数据超出正确范围:0-1000')
return
if speed2 <0 or speed2 >1000:
print('数据超出正确范围:0-1000')
return
if speed3 <0 or speed3 >1000:
print('数据超出正确范围:0-1000')
return
if speed4 <0 or speed4 >1000:
print('数据超出正确范围:0-1000')
return
if speed5 <0 or speed5 >1000:
print('数据超出正确范围:0-1000')
return
if speed6 <0 or speed6 >1000:
print('数据超出正确范围:0-1000')
return
datanum = 0x0F
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#写操作
b[4] = 0x12
#地址
b[5] = 0xF2
b[6] = 0x05
#数据
b[7] = data2bytes(speed1)[0]
b[8] = data2bytes(speed1)[1]
b[9] = data2bytes(speed2)[0]
b[10] = data2bytes(speed2)[1]
b[11] = data2bytes(speed3)[0]
b[12] = data2bytes(speed3)[1]
b[13] = data2bytes(speed4)[0]
b[14] = data2bytes(speed4)[1]
b[15] = data2bytes(speed5)[0]
b[16] = data2bytes(speed5)[1]
b[17] = data2bytes(speed6)[0]
b[18] = data2bytes(speed6)[1]
#校验和
b[19] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
# print('发送的数据:')
# for i in range(1,datanum+6):
# print(hex(putdata[i-1]))
# getdata= self.ser.read(9)
# print('返回的数据:')
# for i in range(1,10):
# print(hex(getdata[i-1]))
#读取驱动器实际的位置值
def get_setpos(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#读操作
b[4] = 0x11
#地址
b[5] = 0xC2
b[6] = 0x05
#读取寄存器的长度
b[7] = 0x0C
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
#print('发送的数据:',putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(20)
print('返回的数据:')
for i in range(1,21):
print(hex(getdata[i-1]))
setpos = [1000]*6
for i in range(1,7):
if getdata[i*2+5]== 0xff and getdata[i*2+6]== 0xff:
setpos[i-1] = -1
else:
setpos[i-1] = getdata[i*2+5] + (getdata[i*2+6]<<8)
return setpos
#读取设置角度
def get_setangle(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#读操作
b[4] = 0x11
#地址
b[5] = 0xCE
b[6] = 0x05
#读取寄存器的长度
b[7] = 0x0C
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(20)
print('返回的数据:')
for i in range(1,21):
print(hex(getdata[i-1]))
setangle = [0]*6
for i in range(1,7):
if getdata[i*2+5]== 0xff and getdata[i*2+6]== 0xff:
setangle[i-1] = -1
else:
setangle[i-1] = getdata[i*2+5] + (getdata[i*2+6]<<8)
return setangle
#读取驱动器设置的力控阈值
def get_setpower(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#读操作
b[4] = 0x11
#地址
b[5] = 0xDA
b[6] = 0x05
#读取寄存器的长度
b[7] = 0x0C
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(20)
print('返回的数据:')
for i in range(1,21):
print(hex(getdata[i-1]))
setpower = [0]*6
for i in range(1,7):
if getdata[i*2+5]== 0xff and getdata[i*2+6]== 0xff:
setpower[i-1] = -1
else:
setpower[i-1] = getdata[i*2+5] + (getdata[i*2+6]<<8)
return setpower
#读取驱动器实际的位置值
def get_actpos(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#读操作
b[4] = 0x11
#地址
b[5] = 0xFE
b[6] = 0x05
#读取寄存器的长度
b[7] = 0x0C
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(20)
print('返回的数据:')
for i in range(1,21):
print(hex(getdata[i-1]))
actpos = [0]*6
for i in range(1,7):
if getdata[i*2+5]== 0xff and getdata[i*2+6]== 0xff:
actpos[i-1] = -1
else:
actpos[i-1] = getdata[i*2+5] + (getdata[i*2+6]<<8)
return actpos
#读取实际的角度值
def get_actangle(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#读操作
b[4] = 0x11
#地址
b[5] = 0x0A
b[6] = 0x06
#读取寄存器的长度
b[7] = 0x0C
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(20)
print('返回的数据:')
for i in range(1,21):
print(hex(getdata[i-1]))
actangle = [0]*6
for i in range(1,7):
if getdata[i*2+5]== 0xff and getdata[i*2+6]== 0xff:
actangle[i-1] = -1
else:
actangle[i-1] = getdata[i*2+5] + (getdata[i*2+6]<<8)
return actangle
#读取实际的受力
def get_actforce(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#读操作
b[4] = 0x11
#地址
b[5] = 0x2E
b[6] = 0x06
#读取寄存器的长度
b[7] = 0x0C
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(20)
print('返回的数据:')
for i in range(1,21):
print(hex(getdata[i-1]))
actforce = [0]*6
for i in range(1,7):
if getdata[i*2+5]== 0xff and getdata[i*2+6]== 0xff:
actforce[i-1] = -1
else:
actforce[i-1] = getdata[i*2+5] + (getdata[i*2+6]<<8)
return actforce
#读取电流
def get_current(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#读操作
b[4] = 0x11
#地址
b[5] = 0x3A
b[6] = 0x06
#读取寄存器的长度
b[7] = 0x0C
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(20)
print('返回的数据:')
for i in range(1,21):
print(hex(getdata[i-1]))
current = [0]*6
for i in range(1,7):
if getdata[i*2+5]== 0xff and getdata[i*2+6]== 0xff:
current[i-1] = -1
else:
current[i-1] = getdata[i*2+5] + (getdata[i*2+6]<<8)
return current
#读取故障信息
def get_error(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#读操作
b[4] = 0x11
#地址
b[5] = 0x46
b[6] = 0x06
#读取寄存器的长度
b[7] = 0x06
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(14)
print('返回的数据:')
for i in range(1,15):
print(hex(getdata[i-1]))
error = [0]*6
for i in range(1,7):
error[i-1] = getdata[i+6]
return error
#读取状态信息
def get_status(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#读操作
b[4] = 0x11
#地址
b[5] = 0x4C
b[6] = 0x06
#读取寄存器的长度
b[7] = 0x06
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(14)
print('返回的数据:')
for i in range(1,15):
print(hex(getdata[i-1]))
status = [0]*6
for i in range(1,7):
status[i-1] = getdata[i+6]
return status
#读取温度信息
def get_temp(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#读操作
b[4] = 0x11
#地址
b[5] = 0x52
b[6] = 0x06
#读取寄存器的长度
b[7] = 0x06
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(14)
print('返回的数据:')
for i in range(1,15):
print(hex(getdata[i-1]))
temp = [0]*6
for i in range(1,7):
temp[i-1] = getdata[i+6]
return temp
#清除错误
def set_clear_error(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#写操作
b[4] = 0x12
#地址
b[5] = 0xEC
b[6] = 0x03
#数据
b[7] = 0x01
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(9)
print('返回的数据:')
for i in range(1,10):
print(hex(getdata[i-1]))
#保存参数到FLASH
def set_save_flash(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#写操作
b[4] = 0x12
#地址
b[5] = 0xED
b[6] = 0x03
#数据
b[7] = 0x01
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(18)
print('返回的数据:')
for i in range(1,19):
print(hex(getdata[i-1]))
#力传感器校准
def gesture_force_clb(self):
datanum = 0x04
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#写操作
b[4] = 0x12
#地址
b[5] = 0xF1
b[6] = 0x03
#数据
b[7] = 0x01
#校验和
b[8] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(18)
print('返回的数据:')
for i in range(1,19):
print(hex(getdata[i-1]))
#设置上电速度
def setdefaultspeed(self,speed1,speed2,speed3,speed4,speed5,speed6):
if speed1 <0 or speed1 >1000:
print('数据超出正确范围:0-1000')
return
if speed2 <0 or speed2 >1000:
return
if speed3 <0 or speed3 >1000:
return
if speed4 <0 or speed4 >1000:
return
if speed5 <0 or speed5 >1000:
return
if speed6 <0 or speed6 >1000:
return
datanum = 0x0F
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#写操作
b[4] = 0x12
#地址
b[5] = 0x08
b[6] = 0x04
#数据
b[7] = data2bytes(speed1)[0]
b[8] = data2bytes(speed1)[1]
b[9] = data2bytes(speed2)[0]
b[10] = data2bytes(speed2)[1]
b[11] = data2bytes(speed3)[0]
b[12] = data2bytes(speed3)[1]
b[13] = data2bytes(speed4)[0]
b[14] = data2bytes(speed4)[1]
b[15] = data2bytes(speed5)[0]
b[16] = data2bytes(speed5)[1]
b[17] = data2bytes(speed6)[0]
b[18] = data2bytes(speed6)[1]
#校验和
b[19] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(9)
print('返回的数据:')
for i in range(1,10):
print(hex(getdata[i-1]))
#设置上电力控阈值
def setdefaultpower(self,power1,power2,power3,power4,power5,power6):
if power1 <0 or power1 >1000:
print('数据超出正确范围:0-1000')
return
if power2 <0 or power2 >1000:
return
if power3 <0 or power3 >1000:
return
if power4 <0 or power4 >1000:
return
if power5 <0 or power5 >1000:
return
if power6 <0 or power6 >1000:
return
datanum = 0x0F
b = [0]*(datanum + 5)
#包头
b[0] = 0xEB
b[1] = 0x90
#hand_id号
b[2] = self.hand_id
#数据个数
b[3] = datanum
#写操作
b[4] = 0x12
#地址
b[5] = 0x14
b[6] = 0x04
#数据
b[7] = data2bytes(power1)[0]
b[8] = data2bytes(power1)[1]
b[9] = data2bytes(power2)[0]
b[10] = data2bytes(power2)[1]
b[11] = data2bytes(power3)[0]
b[12] = data2bytes(power3)[1]
b[13] = data2bytes(power4)[0]
b[14] = data2bytes(power4)[1]
b[15] = data2bytes(power5)[0]
b[16] = data2bytes(power5)[1]
b[17] = data2bytes(power6)[0]
b[18] = data2bytes(power6)[1]
#校验和
b[19] = checknum(b,datanum+4)
#向串口发送数据
putdata = b''
for i in range(1,datanum+6):
putdata = putdata + num2str(b[i-1])
self.ser.write(putdata)
print('发送的数据:')
for i in range(1,datanum+6):
print(hex(putdata[i-1]))
getdata= self.ser.read(9)
print('返回的数据:')
for i in range(1,10):
print(hex(getdata[i-1]))
def convert_optim_ang_exec_ang(optim_angles):
# Input *optim_angles* is of the size (50, 12)
# prep
ndata = optim_angles.shape[0]
elec_signal = np.zeros((ndata, 6), dtype="int32")
optim_fourfin_range = [0, -1.6]
optim_thumbroll_range = [0.1, 0.0]
optim_thumbrot_range = [-1.0, 0.3] #[0.3, -1.0] # actually fixed at intermediate position when we still use S14 glove, since it doesn't measure this axis of motion
max_elec = 2000
min_elec = 0
# conversion
for i in range(ndata):
# four fingers (for inspire hand electrical signal, order is: pinky->ring->middle->index)
elec_signal[i, 3] = round((optim_angles[i, 0] - optim_fourfin_range[0]) / (optim_fourfin_range[1] - optim_fourfin_range[0]) * (max_elec - min_elec))
elec_signal[i, 2] = round((optim_angles[i, 2] - optim_fourfin_range[0]) / (optim_fourfin_range[1] - optim_fourfin_range[0]) * (max_elec - min_elec))
elec_signal[i, 1] = round((optim_angles[i, 4] - optim_fourfin_range[0]) / (optim_fourfin_range[1] - optim_fourfin_range[0]) * (max_elec - min_elec))
elec_signal[i, 0] = round((optim_angles[i, 6] - optim_fourfin_range[0]) / (optim_fourfin_range[1] - optim_fourfin_range[0]) * (max_elec - min_elec))
# thumb roll
elec_signal[i, 4] = round((optim_angles[i, 9] - optim_thumbroll_range[0]) / (optim_thumbroll_range[1] - optim_thumbroll_range[0]) * (max_elec - min_elec))
# thumb rot (actually fixed when using S14 generated data)
elec_signal[i, 5] = round((optim_angles[i, 8] - optim_thumbrot_range[0]) / (optim_thumbrot_range[1] - optim_thumbrot_range[0]) * (max_elec - min_elec))
# check if within range
for j in range(6):
if elec_signal[i, j] < 0 or elec_signal[i, j] > 2000:
print("Error: Joint angle J{}={} of path point {} is out of bounds!".format(i, optim_angles[i, j], j))
return None
return elec_signal
if __name__ == "__main__":
### Read joint angles from file
h5_fname = '../h5_data/dmp_optimize_results.h5'
group_name = 'baozhu_1'
f = h5py.File(h5_fname,'r')
# Res 1
# l_glove_angles = f[group_name+'/l_glove_angle'][:]
# r_glove_angles = f[group_name+'/r_glove_angle'][:]
# Res 2
# l_glove_angles = f[group_name+'/l_glove_angle_1'][:]
# r_glove_angles = f[group_name+'/r_glove_angle_1'][:]
# Res 3
l_glove_angles = f[group_name+'/l_glove_angle_2'][:]
r_glove_angles = f[group_name+'/r_glove_angle_2'][:]
### Convert optimized joint angles to meet execution requirements
l_glove_angles_elec = convert_optim_ang_exec_ang(l_glove_angles)
r_glove_angles_elec = convert_optim_ang_exec_ang(r_glove_angles)
### Configure hand controllers
left_hand_controller = InspireHand('/dev/ttyUSB1',115200)
right_hand_controller = InspireHand('/dev/ttyUSB0',115200)
# go to initial state
print(">>>> Go to initial state")
left_hand_controller.setpos(0, 0, 0, 0, 0, 0)
right_hand_controller.setpos(0, 0, 0, 0, 0, 0)
time.sleep(1.0)
import pdb
pdb.set_trace()
### Left Hand Controller execution
# set speed (when set as 1000, from min to max takes 800 ms)
# left_hand_controller.setspeed(1000,1000,1000,1000,1000,1000) # useless???
l_pos_list = l_glove_angles_elec
# = np.array([[0, 0, 0, 0, 0, 0],
# [2000,2000,2000,2000,0,0],
# [2000,2000,2000,2000,0,2000],
# [2000,2000,2000,2000,500,2000]], dtype='int32')
for i in range(l_pos_list.shape[0]):
print("Point {}".format(str(i+1)))
left_hand_controller.setpos(l_pos_list[i, 0], l_pos_list[i, 1], l_pos_list[i, 2],
l_pos_list[i, 3], l_pos_list[i, 4], l_pos_list[i, 5])
time.sleep(0.05)
### Right Hand Controller execution
# set speed (when set as 1000, from min to max takes 800 ms)
# left_hand_controller.setspeed(1000,1000,1000,1000,1000,1000) # useless???
r_pos_list = r_glove_angles_elec
# = np.array([[0, 0, 0, 0, 0, 0],
# [2000,2000,2000,2000,0,0],
# [2000,2000,2000,2000,0,2000],
# [2000,2000,2000,2000,500,2000]], dtype='int32')
for i in range(r_pos_list.shape[0]):
print("Point {}".format(str(i+1)))
right_hand_controller.setpos(r_pos_list[i, 0], r_pos_list[i, 1], r_pos_list[i, 2],
r_pos_list[i, 3], r_pos_list[i, 4], r_pos_list[i, 5])
time.sleep(0.05)
### Back to open palm
print(">>>> Back to initial state")
time.sleep(1)
left_hand_controller.setpos(0,0,0,0,0,0)#(2000,2000,2000,2000,2000,2000)#(0,0,0,0,0,0)
right_hand_controller.setpos(0,0,0,0,0,0)
|
from collections import defaultdict
from heapq import heappop, heappush
from sqlalchemy import Boolean, ForeignKey, Integer
from sqlalchemy.orm import aliased, backref, relationship
from sqlalchemy.schema import UniqueConstraint
from eNMS.database import db
from eNMS.models.base import AbstractBase
from eNMS.forms import ServiceForm
from eNMS.fields import BooleanField, HiddenField, InstanceField, SelectField
from eNMS.models.automation import Service
from eNMS.runner import Runner
from eNMS.variables import vs
class Workflow(Service):
__tablename__ = "workflow"
pretty_name = "Workflow"
parent_type = "service"
id = db.Column(Integer, ForeignKey("service.id"), primary_key=True)
category = db.Column(db.SmallString)
close_connection = db.Column(Boolean, default=False)
labels = db.Column(db.Dict, info={"log_change": False})
services = relationship(
"Service", secondary=db.service_workflow_table, back_populates="workflows"
)
edges = relationship(
"WorkflowEdge", back_populates="workflow", cascade="all, delete-orphan"
)
superworkflow_id = db.Column(
Integer, ForeignKey("workflow.id", ondelete="SET NULL")
)
superworkflow = relationship(
"Workflow", remote_side=[id], foreign_keys="Workflow.superworkflow_id"
)
__mapper_args__ = {"polymorphic_identity": "workflow"}
def __init__(self, **kwargs):
migration_import = kwargs.get("migration_import", False)
if not migration_import:
start = db.fetch("service", scoped_name="Start", rbac=None)
end = db.fetch("service", scoped_name="End", rbac=None)
self.services.extend([start, end])
super().__init__(**kwargs)
if not migration_import and self.name not in end.positions:
end.positions[self.name] = (500, 0)
def delete(self):
for service in self.services:
if not service.shared:
db.delete_instance(service)
def set_name(self, name=None):
old_name = self.name
super().set_name(name)
for service in self.services:
if not service.shared:
service.set_name()
if old_name in service.positions:
service.positions[self.name] = service.positions[old_name]
for edge in self.edges:
edge.name.replace(old_name, self.name)
def duplicate(self, workflow=None, clone=None):
if not clone:
clone = super().duplicate(workflow)
clone.labels = self.labels
clone_services = {}
db.session.commit()
for service in self.services:
if service.shared:
service_clone = service
if service not in clone.services:
clone.services.append(service)
else:
service_clone = service.duplicate(clone)
service_clone.positions[clone.name] = service.positions.get(
self.name, (0, 0)
)
service_clone.skip[clone.name] = service.skip.get(self.name, False)
clone_services[service.id] = service_clone
db.session.commit()
for edge in self.edges:
clone.edges.append(
db.factory(
"workflow_edge",
rbac=None,
**{
"workflow": clone.id,
"subtype": edge.subtype,
"source": clone_services[edge.source.id].id,
"destination": clone_services[edge.destination.id].id,
},
)
)
db.session.commit()
return clone
@property
def deep_services(self):
services = [
service.deep_services if service.type == "workflow" else [service]
for service in self.services
]
return [self] + sum(services, [])
@property
def deep_edges(self):
return sum([w.edges for w in self.deep_services if w.type == "workflow"], [])
def job(self, run, device=None):
number_of_runs = defaultdict(int)
start = db.fetch("service", scoped_name="Start")
end = db.fetch("service", scoped_name="End")
services, targets = [], defaultdict(set)
start_targets = [device] if device else run.target_devices
for service_id in run.start_services or [start.id]:
service = db.fetch("service", id=service_id)
targets[service.name] |= {device.name for device in start_targets}
heappush(services, (1 / service.priority, service))
visited, restart_run = set(), run.restart_run
tracking_bfs = run.run_method == "per_service_with_workflow_targets"
while services:
if run.stop:
return {"payload": run.payload, "success": False, "result": "Aborted"}
_, service = heappop(services)
if number_of_runs[service.name] >= service.maximum_runs:
continue
number_of_runs[service.name] += 1
visited.add(service)
if service in (start, end) or service.skip.get(self.name, False):
success = service.skip_value == "success"
results = {"result": "skipped", "success": success}
if tracking_bfs or device:
results["summary"] = {
"success": targets[service.name],
"failure": [],
}
else:
kwargs = {
"service": run.placeholder
if service.scoped_name == "Placeholder"
else service,
"workflow": self,
"restart_run": restart_run,
"parent": run,
"parent_runtime": run.parent_runtime,
"workflow_run_method": run.run_method,
}
if tracking_bfs or device:
kwargs["target_devices"] = [
db.fetch("device", name=name) for name in targets[service.name]
]
if run.parent_device:
kwargs["parent_device"] = run.parent_device
results = Runner(run, payload=run.payload, **kwargs).results
if not results:
continue
status = "success" if results["success"] else "failure"
summary = results.get("summary", {})
if not tracking_bfs and not device:
run.write_state(f"progress/service/{status}", 1, "increment")
for edge_type in ("success", "failure"):
if not tracking_bfs and edge_type != status:
continue
if (tracking_bfs or device) and not summary[edge_type]:
continue
for successor, edge in service.neighbors(
self, "destination", edge_type
):
if tracking_bfs or device:
targets[successor.name] |= set(summary[edge_type])
heappush(services, ((1 / successor.priority, successor)))
if tracking_bfs or device:
run.write_state(
f"edges/{edge.id}", len(summary[edge_type]), "increment"
)
else:
run.write_state(f"edges/{edge.id}", "DONE")
if tracking_bfs or device:
failed = list(targets[start.name] - targets[end.name])
summary = {"success": list(targets[end.name]), "failure": failed}
results = {
"payload": run.payload,
"success": not failed,
"summary": summary,
}
else:
results = {"payload": run.payload, "success": end in visited}
run.restart_run = restart_run
return results
class WorkflowForm(ServiceForm):
form_type = HiddenField(default="workflow")
category = SelectField(
"Category",
choices=vs.dualize(vs.properties["property_list"]["workflow"]["category"]),
validate_choice=False,
default="Other",
)
close_connection = BooleanField(default=False)
run_method = SelectField(
"Run Method",
choices=(
("per_device", "Run the workflow device by device"),
(
"per_service_with_workflow_targets",
"Run the workflow service by service using workflow targets",
),
(
"per_service_with_service_targets",
"Run the workflow service by service using service targets",
),
),
)
superworkflow = InstanceField(
"Superworkflow",
constraints={"children": ["[Shared] Placeholder"], "children_filter": "union"},
)
class WorkflowEdge(AbstractBase):
__tablename__ = type = class_type = "workflow_edge"
id = db.Column(Integer, primary_key=True)
name = db.Column(db.SmallString, unique=True)
label = db.Column(db.SmallString)
color = db.Column(db.SmallString)
subtype = db.Column(db.SmallString)
source_id = db.Column(Integer, ForeignKey("service.id"))
source = relationship(
"Service",
primaryjoin="Service.id == WorkflowEdge.source_id",
backref=backref("destinations", cascade="all, delete-orphan"),
foreign_keys="WorkflowEdge.source_id",
)
destination_id = db.Column(Integer, ForeignKey("service.id"))
destination = relationship(
"Service",
primaryjoin="Service.id == WorkflowEdge.destination_id",
backref=backref("sources", cascade="all, delete-orphan"),
foreign_keys="WorkflowEdge.destination_id",
)
workflow_id = db.Column(Integer, ForeignKey("workflow.id"))
workflow = relationship(
"Workflow", back_populates="edges", foreign_keys="WorkflowEdge.workflow_id"
)
__table_args__ = (
UniqueConstraint(subtype, source_id, destination_id, workflow_id),
)
def __init__(self, **kwargs):
self.label = kwargs["subtype"]
self.color = "green" if kwargs["subtype"] == "success" else "red"
super().__init__(**kwargs)
def update(self, **kwargs):
super().update(**kwargs)
self.set_name(kwargs.get("name"))
@classmethod
def rbac_filter(cls, query, mode, user):
if mode == "edit":
originals_alias = aliased(vs.models["service"])
pool_alias = aliased(vs.models["pool"])
user_alias = aliased(vs.models["user"])
query = (
query.join(cls.workflow)
.join(vs.models["pool"], vs.models["service"].pools)
.join(vs.models["access"], vs.models["pool"].access)
.join(pool_alias, vs.models["access"].user_pools)
.join(user_alias, pool_alias.users)
.filter(vs.models["access"].access_type.contains(mode))
.filter(user_alias.name == user.name)
)
query = (
query.join(cls.workflow)
.join(originals_alias, vs.models["service"].originals)
.filter(~originals_alias.owners_access.contains(mode))
.union(
query.join(vs.models["user"], originals_alias.owners).filter(
vs.models["user"].name == user.name
)
)
)
return query
def set_name(self, name=None):
self.name = name or f"[{self.workflow}] {vs.get_time()}"
|
#!/usr/bin/env python3
"""This is the main WordRoom script.
It contains all of the UI views and actions.
"""
# coding: utf-8
import os.path
import builtins
import json
import webbrowser
from urllib.parse import urlparse, unquote
import ui
import dialogs
import console
# import appex
from jinja2 import Environment, FileSystemLoader
from vocabulary import Vocabulary
import define
from config import VOCABULARY_FILE, CONFIG_FILE, HTML_DIR, UI_DIR
__author__ = 'John Jackson'
__copyright__ = 'Copyright 2018 John Jackson'
__license__ = 'MIT'
__version__ = '1.1.1'
__maintainer__ = "John Jackson"
__email__ = "jbpjackson@icloud.com"
# ---- Functions & button actions
# When convenient, button actions are set in the UI designer and defined here.
# Some button actions are more useful when set and defined inside their view
# classes.
def load_word_view(word=''):
"""Open a WordView."""
if container.horizontal_size_class() == AdaptiveView.REGULAR:
word_view.load_word(word)
else:
compact_word_view.load_word(word)
container.nav_column.push_view(compact_word_view)
container.open_words = [word]
def action_random(sender):
"""Open a random word."""
dialogs.hud_alert('Random word opened.')
load_word_view(vocab.random_word())
def export_notes_format(word, notes):
"""Return a string with a given word and note for exporting.
This might need more sofisticated markup.
"""
return '%s\n\n%s' % (word, notes)
def action_share_multiple(sender):
"""Open the iOS share dialog to export selected words and notes."""
table = sender.superview.superview['table']
words = []
for row in table.selected_rows:
cell = vocab.tableview_cell_for_row(table, row[0], row[1])
word = cell.text_label.text
definition = vocab.get_notes(word)
words.append(export_notes_format(word, definition))
dialogs.share_text('\n\n----\n\n'.join(words))
def action_export(sender):
"""Open the iOS share dialog to send the vocabulary data file."""
vocab.save_json_file(indent=1)
console.open_in(VOCABULARY_FILE)
@ui.in_background # otherwise the alert is blocked
def action_import(sender):
"""Import a new vocabulary file.
This selects a file from the iOS file picker and replace the current
vocabulary file with it.
"""
choice = console.alert('This will override your current data',
button1='Okay')
if choice:
f = dialogs.pick_document(types=['public.text'])
try:
if f is not None:
vocab.load_json_file(f)
vocab.save_json_file()
except json.JSONDecodeError:
dialogs.hud_alert('Invalid JSON file.', icon='error')
return
if f is not None:
dialogs.hud_alert('Import was successful.')
lookup_view['table'].reload()
def action_cancel(sender):
"""Cancel the search. Used by the "cancel" button."""
search = sender.superview['search_field']
search.text = ''
search.delegate.textfield_did_change(search)
search.end_editing()
def action_switch_search(sender):
"""Switch between searching words and searching full-text notes."""
vocab.fulltext_toggle = bool(sender.selected_index)
sender.superview['table'].reload()
def action_change_key(sender=None):
"""Input the WordNik API key with a dialog box."""
try:
with open(CONFIG_FILE, 'r') as file:
config = json.load(file)
except (FileNotFoundError, json.JSONDecodeError):
config = {}
d = dialogs.text_dialog(title='WordNik.com API Key',
text=config.get('wordnik_api_key') or '')
if d is not None:
config['wordnik_api_key'] = d
with open(CONFIG_FILE, 'w') as file:
json.dump(config, file)
define.check_wordnik_key()
def action_about(sender):
"""Open the "About" view."""
about_view.present('sheet', hide_close_button=True)
# ---- The view classes
class LookupView(ui.View):
"""This is the view for the main word list and search interface.
In compact mode, this view is the "home" view. In regular mode, it's the
left column.
"""
def did_load(self):
"""Initialize the buttons."""
self['table'].data_source = vocab
self['table'].delegate = TableViewDelegate()
self['search_field'].delegate = SearchDelegate()
self['editbar']['delete'].action = self.action_delete
self['toolbar']['edit'].action = self.start_editing
self['editbar']['done'].action = self.end_editing
about_img = ui.Image.named('iob:ios7_help_outline_24')
about_button = ui.ButtonItem(image=about_img, action=action_about)
self.right_button_items = [about_button]
close_img = ui.Image.named('iob:close_round_24')
close_button = ui.ButtonItem(image=close_img, action=self.action_close)
self.left_button_items = [close_button]
def action_close(self, sender):
"""Close the main view."""
container.close()
def start_editing(self, sender):
"""Set the table for editing and activate the editbar."""
self['table'].set_editing(True, True)
self['toolbar'].hidden = True
self['editbar'].hidden = False
self['editbar'].frame = self['toolbar'].frame
def end_editing(self, sender):
"""End the table editing and hide the editbar."""
self['table'].set_editing(False, True)
self['editbar']['share'].enabled = False
self['editbar']['delete'].enabled = False
self['toolbar'].hidden = False
self['editbar'].hidden = True
def action_delete(self, sender):
"""Delete the selected rows."""
rows = self['table'].selected_rows
words = vocab.delete_multiple(rows)
# `tableview.delete_rows` uses backwards tuples. This fixes it.
# https://forum.omz-software.com/topic/2733/delete-rows-in-tableview/6
self['table'].delete_rows([(x[1], x[0]) for x in rows])
for word in words:
if container.content_column['word'].text == word:
container.content_column.clear()
console.hud_alert('Deleted %s word(s).' % len(rows))
class WordView(ui.View):
"""This is the view for displaying notes and definitions.
In compact mode, it's displayed in LookupView's NavigationView. In regular
mode, it's displayed on the right column.
"""
def did_load(self):
"""Initialize the buttons."""
self['webcontainer']['html_definition'].delegate = WebDelegate()
self['textview'].delegate = TextViewDelegate()
self['segmentedcontrol1'].action = self.action_switch_modes
self['webcontainer']['open_safari'].action = self.action_open_in_safari
share_img = ui.Image.named('iob:ios7_upload_outline_32')
share_button = ui.ButtonItem(image=share_img, action=self.action_share,
enabled=False)
lookup_img = ui.Image.named('iob:ios7_search_32')
lookup_button = ui.ButtonItem(image=lookup_img,
action=self.action_search)
self.right_button_items = [share_button, lookup_button]
self.add_subview(load_view('blank'))
self['blank'].background_color = 'white'
self['blank'].flex = 'WH'
self['blank'].frame = self.frame
def load_word(self, word: str, force=False):
"""Open a word."""
if self['word'].text == word and not force:
return
self['blank'].hidden = True
self.right_button_items[0].enabled = True
self['word'].text = word
self['textview'].text = vocab.get_notes(word)
if self['textview'].text:
self['segmentedcontrol1'].selected_index = 0
else:
self['segmentedcontrol1'].selected_index = 1
loading = jinja2env.get_template('loading.html').render()
self['webcontainer']['html_definition'].load_html(loading)
self.switch_modes()
self.load_definition(word)
self.select_word()
def select_word(self):
"""Select the current word on the table."""
if self['textview'].text:
section = 0
else:
section = 1
words = vocab.list_words(section)
if vocab.query:
section += 1
if self['word'].text in words:
row = words.index(self['word'].text)
if lookup_view['table'].selected_rows != [(section, row)]:
lookup_view['table'].selected_rows = [(section, row)]
def clear(self):
"""Clear the word data and display a placeholder "blank" view."""
self['blank'].hidden = False
self['word'].text = ''
self['textview'].text = ''
self['webcontainer']['html_definition'].load_html('')
self.right_button_items[0].enabled = False
container.open_words = []
@ui.in_background
def load_definition(self, word: str):
"""Fetch the definition of a word and render its HTML template."""
template = jinja2env.get_template('definition.html')
d = define.define(word)
html = template.render(**d)
self['webcontainer']['html_definition'].load_html(html)
if d['definitions'] and not vocab.get_notes(word):
# only save the word to history if there are definitions for it
row = vocab.set_word(word)
if row:
lookup_view['table'].insert_rows([row])
def action_share(self, sender):
"""Open the iOS share dialog to export a word or its notes."""
options = ['Share Word', 'Share Word & Notes']
d = dialogs.list_dialog(items=options, title='Share Word')
word = self['word'].text
if d == options[0]:
text = word
elif d == options[1]:
text = export_notes_format(word, self['textview'].text)
else: # no option was selected
return
dialogs.share_text(text)
def action_search(self, sender):
"""Open the search box on LookupView."""
if container.horizontal_size_class() == AdaptiveView.COMPACT:
for word in container.open_words:
container.nav_column.pop_view()
lookup_view['search_field'].begin_editing()
def action_open_in_safari(self, sender):
"""Open a given word in WordNik."""
word = self['word'].text
webbrowser.get('safari').open('https://wordnik.com/words/' + word)
def action_switch_modes(self, sender):
"""Switch modes. This is a wrapper for WordView.switch_modes()."""
self.switch_modes()
def switch_modes(self, animate=True):
"""Switch between viewing the notes and the definitions."""
def switch_webview():
self['textview'].end_editing()
self['webcontainer'].alpha = 1.0
self['textview'].alpha = 0.0
def switch_textview():
self['webcontainer'].alpha = 0.0
self['textview'].alpha = 1.0
animations = (switch_textview, switch_webview)
index = self['segmentedcontrol1'].selected_index
if animate:
ui.animate(animations[index])
else:
animations[index]()
class AboutView(ui.View):
"""This is the view for the "about" screen."""
def did_load(self):
"""Initialize the buttons and HTML data."""
html = jinja2env.get_template('about.html')
self['webview1'].load_html(html.render())
self['webview1'].delegate = WebDelegate()
mode = ui.RENDERING_MODE_ORIGINAL
img_path = os.path.join(UI_DIR, 'wordnik_badge_a1.png')
img = ui.Image.named(img_path).with_rendering_mode(mode)
def action_wordnik(sender):
webbrowser.get('safari').open('https://wordnik.com/')
def action_close(sender):
self.close()
self['wn_logo'].image = img
self['wn_logo'].action = action_wordnik
self['wn_logo'].title = ''
done_button = ui.ButtonItem(title='Done', action=action_close)
self.right_button_items = [done_button]
class AdaptiveView(ui.View):
"""This view renders one or two columns depending on display size.
This acts as a wrapper for two main views. It adapts to layout changes,
such as putting an app in split-screen, and it rearranges the two views
accordingly. It has two main modes: regular and compact. Regular is the
"iPad" view. Compact is the "iPhone" view. (Although compact can be shown
in split-screen on iPad.)
"""
COMPACT = 1
REGULAR = 2
def __init__(self, nav_column, content_column):
"""Initialize the view with the two view columns."""
# Putting content_column inside a NavigationView is a hack to make its
# title bar visible. We never invoke the NavigationView methods.
nav_column = ui.NavigationView(nav_column)
self.add_subview(nav_column)
self.add_subview(ui.NavigationView(content_column))
self.content_column = content_column
self.nav_column = nav_column
# open_words will probably always just have one item, but it's
# technically possible to have more than one open.
self.open_words = []
self.last_layout = None
# background color is used as a border between the columns.
self.background_color = 'lightgrey'
def layout(self):
"""Call when the layout changes."""
new_layout = self.horizontal_size_class()
if new_layout == self.REGULAR and self.last_layout != self.REGULAR:
self.set_regular()
if new_layout == self.COMPACT and self.last_layout != self.COMPACT:
self.set_compact()
def horizontal_size_class(self):
"""Return regular or compact size class."""
return self.objc_instance.traitCollection().horizontalSizeClass()
def set_compact(self):
"""Render the view in compact mode.
This collapses open content into the left column's NavigationView.
"""
nav, content = self.subviews
nav.x = self.x
nav.width = self.width
nav.height = self.height
nav.flex = 'WH'
content.hidden = True
for word in self.open_words:
compact_word_view.load_word(word)
nav.push_view(compact_word_view, False)
self.last_layout = self.COMPACT
def set_regular(self):
"""Render the view in regular, two-column mode."""
nav, content = self.subviews
nav.width = 320
nav.height = self.height
nav.flex = 'H'
nav.x = self.x
content.hidden = False
content.flex = 'WHR'
content.x = nav.width + 1
content.width = self.width - nav.width - 1
content.height = self.height
if self.last_layout == self.COMPACT:
for word in self.open_words:
nav.pop_view(False)
self.content_column.load_word(word)
self.last_layout = self.REGULAR
# ---- View Delegates
class TableViewDelegate:
"""The delegate class to handle the vocabulary table."""
def tableview_did_select(self, tableview, section, row):
"""Call when the user selects a table row.
For some reason, setting the `action` attribute in the UI designer
passes an empty ui.ListDataSource as the sender. This method fixes it.
"""
tableview.superview['search_field'].end_editing()
if tableview.editing:
tableview.superview['editbar']['delete'].enabled = True
tableview.superview['editbar']['share'].enabled = True
else:
item = vocab.tableview_cell_for_row(tableview,
section, row)
load_word_view(item.text_label.text)
def tableview_did_deselect(self, tableview, section, row):
"""Call when the user deselects a table row."""
if not tableview.selected_rows and tableview.editing:
tableview.superview['editbar']['delete'].enabled = False
tableview.superview['editbar']['share'].enabled = False
class WebDelegate:
"""This is the delegate class for the WebViews."""
def webview_should_start_load(self, webview, url, nav_type):
"""Call when the user taps a link.
Links to suggested words will load in a fresh WordView.
Links to external sites will load in Safari.
There's one special rule for changing the API key.
"""
if nav_type == 'link_clicked':
parsed_url = urlparse(url)
if parsed_url.scheme == 'wordroom':
wv = webview.superview.superview
if parsed_url.netloc == 'word':
wv.load_word(unquote(parsed_url.path[1:]))
elif parsed_url.netloc == '-change_key':
# This is one special condition for when define.define()
# returns a message asking to change an API key.
action_change_key()
wv.load_word(wv['word'].text, True)
else:
print('unknown url:', parsed_url)
return False
else:
webbrowser.get('safari').open(url)
return False
else:
return True
class TextViewDelegate:
"""This is the delegate class for the TextViews."""
def textview_did_end_editing(self, textview):
"""Save text when user finishes editing."""
word = textview.superview['word'].text
notes = textview.text
row = vocab.set_word(word, notes)
if row:
lookup_view['table'].insert_rows([row])
row = vocab.del_dup_word(word, notes)
if row:
lookup_view['table'].delete_rows([row])
class SearchDelegate:
"""The delegate class for the search TextFields."""
def __init__(self):
"""Init the class."""
self.is_editing = False # used to show/hide the "Cancel" button
def textfield_did_change(self, textfield):
"""Search the vocabulary as the user types."""
vocab.set_query(textfield.text)
if textfield.text.find('#') != -1:
# Typing a #hashtag automaticaly activates fulltext search
textfield.superview['segmentedcontrol1'].selected_index = 1
action_switch_search(textfield.superview['segmentedcontrol1'])
if not self.is_editing:
# This is called just to activate the animation.
self.textfield_did_end_editing(textfield)
textfield.superview['table'].reload()
def textfield_should_return(self, textfield):
"""Search the vocabulary."""
if textfield.text:
load_word_view(textfield.text.strip())
textfield.end_editing()
return True
def textfield_did_begin_editing(self, textfield):
"""Animate the "Cancel" button."""
self.is_editing = True
view = textfield.superview
cancel = view['cancel']
def animation():
textfield.width -= cancel.width + 6
cancel.x = view.width - cancel.width - 6
if not textfield.text:
ui.animate(animation)
cancel.enabled = True
def textfield_did_end_editing(self, textfield):
"""Animate the "Cancel" button."""
self.is_editing = False
view = textfield.superview
cancel = view['cancel']
def animation():
textfield.width = view.width - 12
cancel.x = view.width + 6
if not textfield.text:
ui.animate(animation)
cancel.enabled = False
def load_view(view_name: str):
"""Return a given view from a UI file."""
return ui.load_view(os.path.join(UI_DIR, view_name))
if __name__ == '__main__':
# This `builtins` trick fixes a problem where launching the script from
# the home screen can cause multiple instances to run at once.
# https://forum.omz-software.com/topic/4097/home-screen-alias-is-script-already-running/
try:
(vocab, jinja2env, lookup_view, word_view,
compact_word_view, about_view, container) = builtins.wordroom
except (AttributeError, ValueError):
container = None
if isinstance(container, ui.View) and container.on_screen:
pass # reuse the original globals
else: # initialize new globals
vocab = Vocabulary(data_file=VOCABULARY_FILE)
jinja2env = Environment(loader=FileSystemLoader(HTML_DIR))
lookup_view = load_view('lookup')
word_view = load_view('word')
compact_word_view = load_view('word')
about_view = load_view('about')
container = AdaptiveView(lookup_view, word_view)
container.name = 'WordRoom'
container.present('fullscreen', hide_title_bar=True)
builtins.wordroom = (vocab, jinja2env, lookup_view, word_view,
compact_word_view, about_view, container)
# if appex.is_running_extension():
# load_word_view(appex.get_text())
|
#!/usr/bin/env python3
import dash_core_components as dcc
import dash_html_components as html
from .data.config import Config
from .layout_providing import LayoutProviding
from .data.data_provider import DataProvider, RepresentableData
from .data.data_constants import DataConstants
from .data.config import Config
import plotly.graph_objects as go
from typing import List
import plotly.express as px
class CodeMetricsLayoutProvider(LayoutProviding):
def __init__(self, data_provider: DataProvider, config: Config):
self.data_provider = data_provider
self.config = config
def create_components(self) -> list:
loc_data, deps_data = self.data_provider.fetch_code_metrics()
should_render = self.config.render_metrics['code_metrics']['total_loc'] or self.config.render_metrics['code_metrics']['main_repo_loc'] or self.config.render_metrics['code_metrics']['n_of_dependencies']
if not should_render:
return []
return [
html.H2(
children='Source Code Metrics'
)] + self.__total_loc(loc_data=loc_data) + self.__repo_loc(loc_data=loc_data) + self.__deps(deps_data=deps_data)
def __total_loc(self, loc_data: RepresentableData) -> list:
if not self.config.render_metrics['code_metrics']['main_repo_loc']:
return []
return [
dcc.Graph(
id='TOTAL-LOC',
figure=self.__total_loc_figure(title='Total LOC (Lines of Code) growth', rep_data=loc_data)
)
]
def __repo_loc(self, loc_data: RepresentableData) -> list:
if not self.config.render_metrics['code_metrics']['total_loc']:
return []
return [
dcc.Graph(
id='REPO-LOC',
figure=self.__main_repo_loc_figure(title=f'{self.config.repo_name} LOC (Lines of Code) growth', rep_data=loc_data)
)
]
def __deps(self, deps_data: RepresentableData) -> list:
if not self.config.render_metrics['code_metrics']['n_of_dependencies']:
return []
return [
dcc.Graph(
id='DEPS',
figure=self.__n_of_dependencies_figure(title='Number of dependencies over time', rep_data=deps_data)
)
]
def __total_loc_figure(self, title: str, rep_data: RepresentableData) -> go.Figure:
data, legend = rep_data
x = data[DataConstants.date().key]
traces = [
go.Scatter(
x=x,
y=data[DataConstants.deps_test_loc().key],
hoverinfo='x+y',
mode='lines',
name=legend[DataConstants.deps_test_loc().key],
line_color='darkslateblue',
stackgroup='one'
),
go.Scatter(
x=x,
y=data[DataConstants.deps_prod_loc().key],
hoverinfo='x+y',
mode='lines',
name=legend[DataConstants.deps_prod_loc().key],
line_color='darkblue',
stackgroup='one'
),
go.Scatter(
x=x,
y=data[DataConstants.repo_test_loc().key],
hoverinfo='x+y',
mode='lines',
name=legend[DataConstants.repo_test_loc().key],
line_color='firebrick',
stackgroup='one'
),
go.Scatter(
x=x,
y=data[DataConstants.repo_prod_loc().key],
hoverinfo='x+y',
mode='lines',
name=legend[DataConstants.repo_prod_loc().key],
line_color='red',
stackgroup='one'
)
]
return self.__build_filled_area_chart(title=title, traces=traces, yaxis_title=DataConstants.kloc().description)
def __main_repo_loc_figure(self, title: str, rep_data: RepresentableData) -> go.Figure:
data, legend = rep_data
x = data[DataConstants.date().key]
traces = [
go.Scatter(
x=x,
y=data[DataConstants.repo_test_loc().key],
hoverinfo='x+y',
mode='lines',
name=legend[DataConstants.repo_test_loc().key],
line_color='firebrick',
stackgroup='one'
),
go.Scatter(
x=x,
y=data[DataConstants.repo_prod_loc().key],
hoverinfo='x+y',
mode='lines',
name=legend[DataConstants.repo_prod_loc().key],
line_color='red',
stackgroup='one'
),
go.Scatter(
x=x,
y=data[DataConstants.repo_dupl_loc().key],
name=legend[DataConstants.repo_dupl_loc().key],
line=dict(color='black', width=5, dash='dash')
),
]
return self.__build_filled_area_chart(title=title, traces=traces, yaxis_title=DataConstants.kloc().description)
def __n_of_dependencies_figure(self, title: str, rep_data: RepresentableData) -> px.line:
data, legend = rep_data
x = data[DataConstants.date().key]
traces = [
go.Scatter(
x=x,
y=data[DataConstants.external_deps().key],
hoverinfo='x+y',
mode='lines',
name=legend[DataConstants.external_deps().key],
line_color='lightseagreen',
stackgroup='one'
),
go.Scatter(
x=x,
y=data[DataConstants.internal_deps().key],
hoverinfo='x+y',
mode='lines',
name=legend[DataConstants.internal_deps().key],
line_color='lightsalmon',
stackgroup='one'
)
]
return self.__build_filled_area_chart(title=title, traces=traces, yaxis_title=DataConstants.n_of_deps().description)
def __build_filled_area_chart(self, title: str, traces: List[go.Scatter], yaxis_title: str) -> go.Figure:
fig = go.Figure()
for trace in traces:
fig.add_trace(trace)
fig.update_layout(
title=title,
xaxis_title=DataConstants.date().description,
yaxis_title=yaxis_title
)
return fig |
from perceptron import perceptron
class cluster():
def __init__(self):
self.inputNode = perceptron.perceptron()
self.output1 = perceptron.perceptron()
self.output2 = perceptron.perceptron()
self.output3 = perceptron.perceptron()
self.finalOutput = perceptron.perceptron()
def runCluster(self,x):
primaryOutput = self.inputNode.runPerceptron(x)
secondaryInput = np.array([primaryOutput,0,0])
|
from io import BytesIO
import matplotlib.pyplot as plt
from model import *
def get_main_image():
loss_time = [get_loss_times(r) for r in data]
losses = [get_loss(r) for r in data]
damages = [get_damage(r) for r in data]
plt.clf()
plt.scatter(loss_time, losses, alpha=0.5)
plt.xlabel('time')
plt.ylabel('losss caused by ASF')
img = BytesIO()
plt.savefig(img)
img.seek(0)
return img
def get_region_image(region):
loss_time = [get_loss_times(r) for r in data if get_place(r)[0] == region]
losses = [get_loss(r) for r in data if get_place(r)[0] == region]
damages = [get_damage(r) for r in data if get_place(r)[0] == region]
plt.clf()
plt.scatter(loss_time, losses, alpha=0.5)
plt.xlabel('time')
plt.ylabel('losss caused by ASF')
img = BytesIO()
plt.savefig(img)
img.seek(0)
return img
|
from __future__ import absolute_import, division
import numpy as np
import pandas as pd
import pickle
import tables
import csv
import os
import re
import csv
def total_exp(cell_lines, path_to_networks):
for cl in cell_lines:
nets = 0
for exp in os.listdir(path_to_networks + cl):
path_exp = path_to_networks + cl + '/' + exp
if os.path.isdir(path_exp + '/Results_CARNIVAL'):
nets += 1
print(f'Cell line {cl} has a total of {len(os.listdir(path_to_networks + cl))} experiments and {nets} CARNIVAL processed experiments.')
def count_models(files):
count = 0
for f in files:
inter = re.search('\A[i].*', f)
if inter is not None:
count += 1
return count
def node_bond_mat(reader):
"""
Takes the tsv file and outputs the connectivity matrices.
"""
node1 = []
node2 = []
bonds = []
node_dict = {}
for row in reader:
if (row['Node1'] != 'Perturbation') and (row['Node2'] != 'Perturbation'):
node1.append(row['Node1'])
node2.append(row['Node2'])
bonds.append(int(row['Sign']))
for idx, node in enumerate(np.unique(node1 + node2)):
node_dict[node] = idx
max_atoms = idx
node1 = [node_dict[node] for node in node1]
node2 = [node_dict[node] for node in node2]
return node1, node2, bonds, max_atoms
def connectivity_mat(reader, max_atoms, max_degree, num_bond_features):
"""
Returns the edges and bonds matrices of the input signaling network.
"""
edge_mat = np.full((max_atoms, max_degree), fill_value=-1)
bonds = np.full((max_atoms, max_degree), fill_value=0)
node1, node2, bonds_mat, _ = node_bond_mat(reader)
for n1, b, n2 in zip(node1, bonds_mat, node2):
for i in range(max_degree):
if edge_mat[n1][i]==-1:
edge_mat[n1][i]=n2
bonds[n1][i] = b
break
for i in range(max_degree):
if edge_mat[n2][i] == -1:
edge_mat[n2][i] = n1
bonds[n2][i] = b
break
return edge_mat, bonds
def node_attributes(prot_dict):
pass
def gather(self, dim, index):
"""
Gathers values along an axis specified by ``dim``.
For a 3-D tensor the output is specified by:
out[i][j][k] = input[index[i][j][k]][j][k] # if dim == 0
out[i][j][k] = input[i][index[i][j][k]][k] # if dim == 1
out[i][j][k] = input[i][j][index[i][j][k]] # if dim == 2
Parameters
----------
dim:
The axis along which to index
index:
A tensor of indices of elements to gather
Returns
-------
Output Tensor
"""
idx_xsection_shape = index.shape[:dim] + \
index.shape[dim + 1:]
self_xsection_shape = self.shape[:dim] + self.shape[dim + 1:]
if idx_xsection_shape != self_xsection_shape:
raise ValueError("Except for dimension " + str(dim) +
", all dimensions of index and self should be the same size")
if index.dtype != np.dtype('int_'):
raise TypeError("The values of index must be integers")
data_swaped = np.swapaxes(self, 0, dim)
index_swaped = np.swapaxes(index, 0, dim)
gathered = np.choose(index_swaped, data_swaped)
return np.swapaxes(gathered, 0, dim)
def temporal_padding(x, padding=(1, 1), padvalue = 0):
"""Pads the middle dimension of a 3D array.
Arguments:
x: array or variable.
padding: Tuple of 2 integers, how many zeros to
add at the start and end of dim 1.
Returns:
A padded 3D array.
"""
assert len(padding) == 2
pattern = [[0, 0], [padding[0], padding[1]], [0, 0]]
return np.pad(x, pattern, mode='constant', constant_values = padvalue)
def neighbour_lookup(atoms, edges, maskvalue=0, include_self=False):
''' Looks up the features of an all atoms neighbours, for a batch of molecules.
# Arguments:
atoms (K.tensor): of shape (batch_n, max_atoms, num_atom_features)
edges (K.tensor): of shape (batch_n, max_atoms, max_degree) with neighbour
indices and -1 as padding value
maskvalue (numerical): the maskingvalue that should be used for empty atoms
or atoms that have no neighbours (does not affect the input maskvalue
which should always be -1!)
include_self (bool): if True, the featurevector of each atom will be added
to the list feature vectors of its neighbours
# Returns:
neigbour_features (K.tensor): of shape (batch_n, max_atoms(+1), max_degree,
num_atom_features) depending on the value of include_self
# Todo:
- make this function compatible with Tensorflow, it should be quite trivial
because there is an equivalent of `T.arange` in tensorflow.
'''
# The lookup masking trick: We add 1 to all indices, converting the
# masking value of -1 to a valid 0 index.
masked_edges = edges + 1
# We then add a padding vector at index 0 by padding to the left of the
# lookup matrix with the value that the new mask should get
masked_atoms = temporal_padding(atoms, (1,0), padvalue=maskvalue)
# Import dimensions
atoms_shape = masked_atoms.shape
batch_n = atoms_shape[0]
lookup_size = atoms_shape[1]
num_atom_features = atoms_shape[2]
edges_shape = masked_edges.shape
max_atoms = edges_shape[1]
max_degree = edges_shape[2]
# create broadcastable offset
offset_shape = (batch_n, 1, 1)
offset = np.reshape(np.arange(start=0, stop=batch_n, dtype='int32'), offset_shape)
offset *= lookup_size
# apply offset to account for the fact that after reshape, all individual
# batch_n indices will be combined into a single big index
flattened_atoms = np.reshape(masked_atoms, (-1, num_atom_features))
flattened_edges = np.reshape(masked_edges + offset, (batch_n, -1))
# Gather flattened
flattened_result = np.take(flattened_atoms, flattened_edges, axis=0)
# Unflatten result
output_shape = (batch_n, max_atoms, max_degree, num_atom_features)
output = np.reshape(flattened_result, output_shape)
if include_self:
return np.concatenate([np.expand_dims(atoms, axis=2), output], axis=2)
return output
def mask_atoms_by_degree(atoms, edges, bonds=None):
# Create a matrix that stores for each atom, the degree it is
atom_degrees = np.sum(np.ndarray.astype(np.not_equal(edges, -1),dtype = 'float32'), axis=-1, keepdims=True)
# For each atom, look up the features of it's neighbour
neighbour_atom_features = neighbour_lookup(atoms, edges, include_self=False)
# Sum along degree axis to get summed neighbour features
summed_atom_features = np.sum(neighbour_atom_features, axis=-2)
# Sum the edge features for each atom
if bonds is not None:
summed_bond_features = np.sum(bonds, axis=-2)
# Concatenate the summed atom and bond features
if bonds is not None:
summed_features = np.concatenate([summed_atom_features, summed_bond_features], axis=-1)
else:
summed_features = summed_atom_features
return summed_features, atom_degrees |
import pymem
import re
pm = pymem.Pymem('csgo.exe')
# bypass NtOpenFile hook in csgo.exe
csgo = pymem.process.module_from_name(pm.process_handle,
'csgo.exe')
csgoModule = pm.read_bytes(csgo.lpBaseOfDll, csgo.SizeOfImage)
address = csgo.lpBaseOfDll + re.search(rb'.\x1A\xF6\x45\x0C\x20',
csgoModule).start()
pm.write_uchar(address, 0xEB if pm.read_uchar(address) == 0x74 else 0x74)
# bypass thread creation detection in DllMain of client.dll
client = pymem.process.module_from_name(pm.process_handle,
'client.dll')
clientModule = pm.read_bytes(client.lpBaseOfDll, client.SizeOfImage)
address = client.lpBaseOfDll + re.search(rb'.\x69\x6A\x00\x6A\x04',
clientModule).start()
pm.write_uchar(address, 0xEB if pm.read_uchar(address) == 0x74 else 0x74)
pm.close_process()
|
import sys
file1 = sys.argv[1]
file2 = sys.argv[2]
FIRST = open(file1, 'r').read()
SECOND = open(file2, 'r').read()
import re
s1 = FIRST.split(' ')
s2 = SECOND
longest = ""
i = 0
for x in s1:
if re.search(x, s2):
s = x
while re.search(s, s2):
if len(s)>len(longest):
longest = s
if i+len(s) == len(FIRST):
break
s = FIRST[i:i+len(s)+9]
i += 9
print longest.strip(' ').replace(' ', '\n')
|
# Jay Williams 2017
# This module is included as part of https://github.com/codingJWilliams/jw_utils
# Liscenced according to ../../LISCENCE
# Contact codingJWilliams on github with any queries
import jw_utils
def is_prime(n):
if n == 0 or n == 1: return False
for i in range(1, n):
x = (n - i)
if x == 0 or x == 1: continue
if ((n / x) % 1) == 0:
return False
return True
def next_prime(n):
# https://en.wikipedia.org/wiki/Bertrand%27s_postulate
# There is always a prime number between n and 2n
for i in range(n * 2):
x = n + i
if (x % 2) == 0: continue
if is_prime(x) and x != n:
return x
return "Oh. Well you've proved bertrand wrong. GG"
def primes_between(a, b):
primesList = []
currentPrime = a
while currentPrime < b:
if is_prime(currentPrime):
primesList += [currentPrime]
currentPrime = next_prime(currentPrime)
else:
currentPrime = next_prime(currentPrime)
return primesList |
"""
A python wrapper for NAMD 2.9
"""
#=============================================================================================
# IMPORTS
#=============================================================================================
import os, sys, gzip
try:
from AlGDock import findPath
from AlGDock import search_paths
if not 'namd' in search_paths.keys():
search_paths['namd'] = [None]
except:
def findPath(locations):
"""
Parses a list of locations, returning the first file that exists.
If none exist, then None is returned.
"""
import os.path
for location in locations:
if location is not None and os.path.exists(location):
return os.path.abspath(location)
if not locations == [None]:
print 'File not found!'
print 'Searched:'
print locations
return None
# Define search paths for external programs
# Defined for David's IIT MacBook Pro, DSCR cluster, and CCB cluster
search_paths = {
'namd': [
'/Users/dminh/Applications/NAMD_2.10/namd2',
'/share/apps/namd/2.9/Linux-x86_64-g++/namd2', None
]
}
class GRID:
def __init__(self, forceFN, LJR_FN, LJA_FN, ELE_FN, lambdaVal=1.0):
"""
Class to define the grid portion of a NAMD configuration file.
forceFN - a pdb file with the following in the columns
X - partial charges
Y - Lennard-Jones repulsive interaction multiplier
Z - Lennard-Jones attractive interaction multiplier
O - anything
B - ones
LJR_FN - a dx file with the Lennard-Jones repulsive potential grid
LJA_FN - a dx file with the Lennard-Jones attractive potential grid
ELE_FN - a dx file with the electrostatic potential grid
lambdaVal - the value of lambda
"""
self.set_scale(lambdaVal)
self.LJ_conf = '''
mgridforcefile LJR {%s}
mgridforcecol LJR B # Ones
mgridforcechargecol LJR Y # Lennard-Jones repulsive interactions
mgridforcepotfile LJR {%s}
mgridforcecont1 LJR no
mgridforcecont2 LJR no
mgridforcecont3 LJR no
mgridforcelite LJR yes
mgridforcefile LJA {%s}
mgridforcecol LJA B # Ones
mgridforcechargecol LJA Z # Lennard-Jones attractive interactions
mgridforcepotfile LJA {%s}
mgridforcecont1 LJA no
mgridforcecont2 LJA no
mgridforcecont3 LJA no
mgridforcelite LJA yes
''' % (forceFN, LJR_FN, forceFN, LJA_FN)
self.ELE_conf = '''
mgridforcefile ELE {%s}
mgridforcecol ELE B # Ones
mgridforcechargecol ELE X # Charges
mgridforcepotfile ELE {%s}
mgridforcecont1 ELE no
mgridforcecont2 ELE no
mgridforcecont3 ELE no
mgridforcelite ELE yes
''' % (forceFN, ELE_FN)
def set_scale(self, lambdaVal=1.0):
"""
Based on lambdaVal, sets class variables scale_LJ and scale_ELE
"""
self.lambdaVal = lambdaVal
if self.lambdaVal == 0:
self.scale_LJ = 0
self.scale_ELE = 0
elif self.lambdaVal < 0.5:
self.scale_LJ = self.lambdaVal / 0.5
self.scale_ELE = 0
else:
self.scale_LJ = 1
self.scale_ELE = (self.lambdaVal - 0.5) / 0.5
def _scale_conf(self, type, scale):
"""
Returns a string that describes grid force scaling
"""
return 'mgridforcescale %3s %8.6e %8.6e %8.6e\n' % (type, scale,
scale, scale)
def script(self):
"""
Returns the grid force portion of a NAMD configuration script, with scaling according to lambdaVal.
"""
conf = ''
if self.lambdaVal > 0:
conf = conf + 'mgridforce on\n' + self.LJ_conf
conf = conf + self._scale_conf('LJR', self.scale_LJ) + self._scale_conf(
'LJA', self.scale_LJ)
if self.lambdaVal > 0.5:
conf = conf + self.ELE_conf + self._scale_conf('ELE', self.scale_ELE)
return conf
def script_LJ(self):
"""
Returns a fully scaled Lennard-Jones grid force portion of a NAMD configuration script
"""
return 'mgridforce on\n' + self.LJ_conf + self._scale_conf(
'LJR', 1) + self._scale_conf('LJA', 1)
def script_ELE(self):
"""
Returns a fully scaled electrostatic grid force portion of a NAMD configuration script
"""
return 'mgridforce on\n' + self.ELE_conf + self._scale_conf(
'ELE', 1)
class COLVARS_BINDING_SITE_SPHERE:
def __init__(self,
radius,
atomFN,
atomsCol='B',
atomsColValue=1.0,
center=[0.0, 0.0, 0.0],
centerFN=None,
colvarsFN='sphere.colvars'):
"""
Class containing collective variables script and configuration file options for a spherical binding site.
"""
self.radius = radius
self.center = center
self.colvarsFN = colvarsFN
if not (centerFN == None):
centerF = open(centerFN, 'r')
center = [float(item) for item in centerF.read().strip().split(' ')]
self.center = center
centerF.close()
self.colvars_script = '''
colvar {
name DistanceFromSite
upperWall %.2f
upperWallConstant 10.0
distance {
group1 {
atomsFile {%s}
atomsCol {%s}
atomsColValue {%.2f}
}
group2 {
dummyAtom (%8.6f, %8.6f, %8.6f)
}
}
}
''' % (self.radius, atomFN, atomsCol, atomsColValue, self.center[0],
self.center[1], self.center[2])
self.conf = '\ncolvars on\ncolvarsConfig {%s}\n' % colvarsFN
def write(self):
if not os.path.exists(self.colvarsFN):
colvarsF = open(self.colvarsFN, 'w')
colvarsF.write(self.colvars_script)
colvarsF.close()
class COLVARS_BINDING_SITE_CYLINDER:
def __init__(self,
radius,
minZ,
maxZ,
atomFN,
atomsCol='B',
atomsColValue=1.0,
axis=[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
axisFN=None,
colvarsFN='cylinder.colvars'):
"""
Class containing collective variables script and configuration file options for a cylindrical binding site.
"""
self.radius = radius
self.minZ = minZ
self.maxZ = maxZ
self.colvarsFN = colvarsFN
if not (axisFN == None):
axisF = open(axisFN, 'r')
axis = [float(item) for item in axisF.read().strip().split(' ')]
axisF.close()
self.axis = axis
groups = '''
main {
atomsFile {%s}
atomsCol %s
atomsColValue %d
}
ref {
dummyAtom (%.6f, %.6f, %.6f)
}
axis (%.6f, %.6f, %.6f)
''' % (atomFN, atomsCol, atomsColValue, axis[0], axis[1], axis[2], axis[3],
axis[4], axis[5])
self.colvars_script = '''
colvar {
name DistXY
upperWall %.2f
upperWallConstant 10.0
distanceXY { %s }
}
colvar {
name DistZ
lowerWall %.2f
upperWall %.2f
lowerWallConstant 10.0
upperWallConstant 10.0
distanceZ { %s }
}
''' % (self.radius, groups, self.minZ, self.maxZ, groups)
self.conf = '\ncolvars on\ncolvarsConfig {%s}\n' % colvarsFN
def write(self):
if not os.path.exists(self.colvarsFN):
colvarsF = open(self.colvarsFN, 'w')
colvarsF.write(self.colvars_script)
colvarsF.close()
class NAMD:
def __init__(self,
namd_command='namd2',
NPROCS=1,
prmtop=None,
inpcrd=None,
bincoordinates=None,
binvelocities=None,
xsc=None,
fixed=None,
solvent='Gas',
useCutoff=True,
grid=None,
colvars=None,
alchemical=None,
seed=None,
finishBy=None,
debug=False):
"""
Wrapper for NAMD 2.9
Required arguments:
namd_command - the location of NAMD 2.9 [Default is 'namd2']
NPROCS - the number of processors [Default is 1]
prmtop - AMBER parameters and topology for the system
inpcrd - AMBER coordinates for the system
Optional arguments:
bincoordinates - binary coordinates to restart a NAMD run [Default is None]
binvelocities - binary velocities to restart a NAMD run [Default is None]
xsc - extended system file [Default is None]
fixed - pdb file with fixed atoms labeled with 1 in the occupancy column
solvent - either 'TIP3P', 'GBSA', 'GB', or 'Gas' [Default is Gas]
TIP3P uses a shorter cutoff than the others
GBSA and GB use Generalized Born solvation
GBSA also includes a surface area term
useCutoff - use a cutoff for nonbonded terms
grid - GRID class, or None [Default is None]
colvars - class containing conf and colvars objects, or None [Default is None]
alchemical - ALCHEMICAL class (Doesn't do anything yet)
seed - a random number seed to start NAMD simulation [Default is None]
finishBy - the time (in seconds) any NAMD instance should be finished by [Default is none, meaning that there is no time limit.]
debug - does not remove any files [Default = False]
"""
self.namd_command = namd_command
self.NPROCS = NPROCS
self.prmtop = prmtop
self.inpcrd = inpcrd
self.bincoordinates = bincoordinates
self.binvelocities = binvelocities
self.xsc = xsc
self.fixed = fixed
self.solvent = solvent
self.useCutoff = useCutoff
self.grid = grid
self.colvars = colvars
self.alchemical = alchemical
self.seed = seed
self.finishBy = finishBy
self.debug = debug
if self.prmtop == None:
raise Exception("AMBER prmtop file required")
if self.inpcrd == None:
raise Exception("AMBER inpcrd file required")
# Find NAMD
self.namd_command = findPath([self.namd_command] + search_paths['namd'])
if self.namd_command == None:
raise Exception("NAMD not found!")
def load_pkl_gz(self, FN):
import os, gzip, pickle
if os.path.isfile(FN) and os.path.getsize(FN) > 0:
F = gzip.open(FN, 'r')
try:
data = pickle.load(F)
except:
print ' error loading ' + FN
F.close()
return None
F.close()
return data
else:
return None
def _write_pkl_gz(self, FN, data):
import os, gzip, pickle
F = gzip.open(FN, 'w')
pickle.dump(data, F)
F.close()
print " wrote to " + FN
def _removeFile(self, filename):
"""
If the file exists and we are not debugging, remove it
"""
if os.path.exists(filename) and (not self.debug):
os.remove(filename)
def _removeFiles(self, searchString, forceRemove=False):
if (not self.debug) or forceRemove:
import glob
list = glob.glob(searchString)
for FN in list:
os.remove(FN)
def _writeConfiguration(self,
outputname,
temperature,
integrator_script,
output_script,
execution_script,
grid_script=''):
"""
Writes a NAMD configuration script based on class variables
"""
conf = '''# Variables
set outputname %s
set temperature %d
# Input files
amber on
parmfile {%s}
ambercoor {%s}
''' % (outputname, temperature, self.prmtop, self.inpcrd)
if not (self.bincoordinates == None):
conf = conf + 'bincoordinates {%s}\n' % self.bincoordinates
if not (self.binvelocities == None):
conf = conf + 'binvelocities {%s}\n' % self.binvelocities
if not (self.xsc == None):
conf = conf + 'extendedSystem {%s}\n' % self.xsc
conf = conf + '''
# Force field parameters
exclude scaled1-4
1-4scaling 0.833333 # =1/1.2, default for AMBER. NAMD default is 1.0
scnb 2 # This is AMBER and NAMD default
switching on
'''
if not self.useCutoff:
conf = conf + 'cutoff 999.0\nswitchdist 999.0\npairlistdist 999.0\n\n'
elif self.solvent == 'TIP3P':
conf = conf + 'cutoff 10.0\nswitchdist 9.0\npairlistdist 11.0\n\n'
else:
conf = conf + 'cutoff 16.0\nswitchdist 15.0\npairlistdist 18.0\n\n'
if self.solvent == 'GB' or self.solvent == 'GBSA':
conf = conf + 'GBIS on\nionConcentration 0.0\n'
if self.solvent == 'GBSA':
conf = conf + 'sasa on\nsurfaceTension 0.006\n'
conf = conf + grid_script
if self.colvars is not None:
conf = conf + self.colvars.conf
if self.fixed is not None:
conf = conf + '''
# Do not calculate fixed-atom energies
fixedAtoms on
fixedAtomsFile ''' + self.fixed + '''
fixedAtomsCol O
'''
if self.binvelocities is None:
tLine = 'temperature $temperature'
else:
tLine = ''
conf = conf + '''
# Temperature control
%s
langevin on
langevinDamping 1
langevinTemp $temperature
langevinHydrogen off ;# Don't couple bath to hydrogens
''' % tLine
if not (self.seed == None):
conf = conf + 'seed %d\n' % self.seed
conf = conf + integrator_script
conf = conf + '''
# Output parameters
outputName $outputname
binaryoutput yes
''' + output_script
conf = conf + execution_script
confF = open(outputname + '.namd', 'w')
confF.write(conf)
confF.close()
def _execute(self,
outputname,
temperature,
integrator_script,
output_script,
execution_script,
grid_script='',
energyFields=[12],
write_energy_pkl_gz=False,
keepScript=False,
keepOutput=False,
keepCoor=False,
prmtop=None,
inpcrd=None,
bincoordinates=None,
binvelocities=None,
xsc=None,
solvent=None,
grid=None,
colvars=None,
alchemical=None,
seed=None,
finishBy=None,
totalSteps=None,
debug=None,
retry=True):
"""
Executes a NAMD instance, returning the energies.
Required arguments:
outputname - prefix for NAMD output
temperture - simulation temperature
integrator_script - part of the NAMD configuration file that defines the integration. [Default: '']
output_script - part of the NAMD configuration file that dictates output. [Default: '']
execution_script - part of the NAMD configuration file that determines how the simulation is executed. [Default: '']
Optional Arguments:
grid_script - part of the NAMD configuration file that defines the interaction grids. [Default: '']
energyFields - a list of fields to keep from the ENERGY output lines [Default: [12], which is the total potential energy]
write_energy_pkl_gz - writes the energies into outputname.pkl.gz
finishBy - the time, in seconds, by which the MD simulation should be complete. If it is defined and totalSteps is defined, NAMD will abort if the projected simulation length (for totalSteps) is longer than the allotted time. [Default is none, meaning that there is no time limit.]
totalSteps - the total number of simulation steps. Only relevant if finishBy is defined.
keepScript - keeps the NAMD configuration script
keepOutput - keeps the NAMD output file
keepCoor - keeps the final coordinate set from the simulation
All optional arguments in the initialization function.
"""
# Parse the arguments
if not (prmtop == None):
self.prmtop = prmtop
if not (inpcrd == None):
self.inpcrd = inpcrd
if not (bincoordinates == None):
self.bincoordinates = bincoordinates
if not (binvelocities == None):
self.binvelocities = binvelocities
if not (xsc == None):
self.xsc = xsc
if not (solvent == None):
self.solvent = solvent
if not (grid == None):
self.grid = grid
if not (colvars == None):
self.colvars = colvars
if not (alchemical == None):
self.alchemical = alchemical
if not (seed == None):
self.seed = seed
if not (finishBy == None):
self.finishBy = finishBy
if not (debug == None):
self.debug = debug
del prmtop, inpcrd, bincoordinates, binvelocities, xsc
del solvent, seed, finishBy, debug
# Check to make sure remaining arguments make sense
if execution_script == None:
raise Exception("Execution script required")
original_dir = os.getcwd()
execution_dir = os.path.dirname(outputname)
if execution_dir != '':
os.chdir(execution_dir)
outputname = os.path.basename(outputname)
# Checks that the process isn't already complete
if os.path.exists(outputname + '.coor'):
return []
# Writes the configuration file
self._writeConfiguration(outputname, temperature, integrator_script,
output_script, execution_script, grid_script)
# Gets the original parameters in the input scripts
def getParm(keyword, config):
ind = config.find(keyword)
if not (ind == -1):
val = config[ind + len(keyword):]
if val.find('\n') > -1:
val = val[:val.find('\n')]
if val.find(';') > -1:
val = val[:val.find(';')]
return float(val)
return 0.0
original = {}
original['timestep'] = getParm('timestep', integrator_script)
original['dcdfreq'] = getParm('dcdfreq', output_script)
original['outputEnergies'] = getParm('outputEnergies', output_script)
original['run'] = getParm('run', execution_script)
##############################
# Executes the NAMD instance #
##############################
# Restarts with a new random number seed if necessary
attempts = 0
noRunError = True
while (not os.path.exists(outputname + '.coor')) and noRunError:
energies = []
if (not self.finishBy == None) and (not totalSteps == None):
import time
startTime = time.time()
elapsedTimeSteps = 0
timePerStep = 0
# Start NAMD
import subprocess
proc = subprocess.Popen(
[self.namd_command, '+p',
'%d' % self.NPROCS, outputname + '.namd'],
stdout=subprocess.PIPE)
outF = open(outputname + '.out', 'w')
for line in iter(proc.stdout.readline, ''):
outF.write(line)
# Store energy output
if line.find('ENERGY:') == 0:
ENERGY = line[9:].split()
energyLine = []
for energyField in energyFields:
energyLine.append(float(ENERGY[energyField]))
energies.append(energyLine)
# End NAMD if there is an error
if line.find('ERROR:') > -1:
if line.find('velocity') > -1:
print 'Atoms moving too fast'
break
else:
print line
noRunError = False
raise Exception('Error in NAMD')
try:
proc.kill() # After python 2.6
except AttributeError:
print 'NAMD not killed.'
# If there is a time limit, end NAMD is there is insufficient time to complete the instance
if (not self.finishBy == None) and (not totalSteps == None):
# if (line.find('ENERGY:')==0) or (line.find('Benchmark time')>-1):
# if (line.find('ENERGY:')==0):
# elapsedTimeSteps = int(line[7:15])
# print '%d / %d steps completed'%(elapsedTimeSteps,totalSteps)
# if elapsedTimeSteps > 0:
# timePerStep = (time.time()-startTime)/elapsedTimeSteps
# print 'Observed rate of %.7f s/step'%timePerStep
# elif line.find('Benchmark time')>-1:
if line.find('Benchmark time') > -1:
timePerStep = float(line[line.find('CPUs') +
4:line.find('s/step')])
print 'Benchmark rate of %.7f s/step' % timePerStep
projectedCompletion = (totalSteps - elapsedTimeSteps) * timePerStep
remainingTime = self.finishBy - time.time()
print 'Projected completion in %3.2f s. %3.2f s remaining.' % (
projectedCompletion, remainingTime)
if projectedCompletion > remainingTime:
print 'Insufficient time remaining for cycle.'
outF.write('Insufficient time remaining for cycle.\n')
noRunError = False
try:
proc.kill() # After python 2.6. Will raise error otherwise.
except:
print 'NAMD not terminated normally.'
sys.exit()
if noRunError:
proc.wait() # Let NAMD finish
outF.close()
# Restart NAMD if there was only a velocity error
if (not os.path.exists(outputname + '.coor')) and noRunError:
if not retry:
raise Exception('Error in NAMD')
break
attempts = attempts + 1
if attempts <= 5:
retry_string = 'Retrying'
if not (self.seed == None):
self.seed = self.seed + 1
self._writeConfiguration(outputname, temperature,
integrator_script, output_script,
execution_script, grid_script)
retry_string = retry_string + ' with a random number seed of %d' % self.seed
print retry_string
elif attempts <= 10:
retry_string = 'Retrying'
attempts_ts = attempts - 5
new_integrator_script = integrator_script.replace(
'timestep',
'timestep %.4f ;#' % (original['timestep'] / attempts_ts))
new_output_script = output_script.replace(
'dcdfreq',
'dcdfreq %d ;#' % (original['dcdfreq'] * attempts_ts)).replace(
'outputEnergies',
'outputEnergies %d ;#' % (original['dcdfreq'] * attempts_ts))
new_execution_script = execution_script.replace(
'run', 'run %d ;#' % (original['run'] * attempts_ts))
if not (self.seed == None):
self.seed = self.seed + 1
retry_string = retry_string + ' with a random number seed of %d' % self.seed
self._writeConfiguration(outputname, temperature,
new_integrator_script, new_output_script,
new_execution_script)
retry_string = retry_string + ' and a time step of %.4f' % (
original['timestep'] / attempts_ts)
print retry_string
else:
print 'Too many attempts!'
noRunError = False
# Clean up
if not keepScript:
self._removeFile(outputname + '.namd')
# Even if keepOutput=False, keep the output if there are run errors
if (not keepOutput) or (not noRunError):
self._removeFile(outputname + '.out')
if not keepCoor:
self._removeFile(outputname + '.coor')
self._removeFile(outputname + '.vel')
self._removeFile(outputname + '.xsc')
self._removeFile(outputname + '.fep')
self._removeFile(outputname + '.colvars.traj')
self._removeFile(outputname + '.colvars.state')
self._removeFile(outputname + '.colvars.state.old')
# Return to original directory
if execution_dir != '':
os.chdir(original_dir)
if noRunError:
if write_energy_pkl_gz:
self._write_pkl_gz(outputname + '.pkl.gz', energies)
return energies
else:
return None
def mintherm(self, outputname, margin=0.0, keepScript=False):
"""
Minimizes a configuration and thermalizes it to 300 K.
"""
integrator_script = '''
# Integrator parameters
rigidBonds none
timestep 1.0
nonbondedFreq 2
fullElectFrequency 4
stepspercycle 20
'''
output_script = '''
outputEnergies 1000 ;# 1 ps
margin {0}
'''.format(margin)
execution_script = '''
minimize 1000
for { set curTemp 10 } { $curTemp <= $temperature } { incr curTemp 10 } {
reinitvels $curTemp
langevinTemp $curTemp
run 1000
}
'''
energies = self._execute(outputname,
300.0,
integrator_script,
output_script,
execution_script,
keepScript=keepScript,
keepCoor=True)
return energies
def simulate(self, outputname, temperature=300.0, steps=10000000, \
margin=0.0, \
energyFields=[12], \
keepScript=False, keepCoor=False, write_energy_pkl_gz=False):
"""
Runs an MD simulation.
"""
integrator_script = '''
# Integrator parameters
rigidBonds none
timestep 1.0
nonbondedFreq 2
fullElectFrequency 4
stepspercycle 20
'''
output_script = '''
dcdfreq 1000 ;# 1 ps
outputEnergies 1000 ;# 1 ps
margin {0}
'''.format(margin)
execution_script = '''
reinitvels $temperature
run %d
''' % steps
energies = self._execute(outputname,
temperature,
integrator_script,
output_script,
execution_script,
energyFields=energyFields,
keepScript=keepScript,
keepCoor=keepCoor,
write_energy_pkl_gz=write_energy_pkl_gz)
return energies
def _energy_scripts(self, dcdname, stride=1, test=False):
"""
Scripts for calculating energies
"""
integrator_script = '''
# Integrator parameters
rigidBonds none
nonbondedFreq 1
fullElectFrequency 1
stepspercycle 1
'''
output_script = '''
outputEnergies 1
'''
import os
if os.path.isfile(dcdname):
execution_script = '''
set ts 0
coorfile open dcd {''' + dcdname + '''}
while { ![coorfile read] } {
if { [expr $ts %''' + '''%d == 0] } {
firstTimestep $ts
run 0
}
incr ts 1
}
coorfile close
''' % stride
else:
execution_script = '''run 0'''
return (integrator_script, output_script, execution_script)
def energies_PE(self, outputname, dcdname=None, energyFields=[12], \
stride=1, keepScript=False, write_energy_pkl_gz=True, test=False):
"""
Calculates potential energies in a dcd file.
outputname - the prefix for the resulting energy file
dcdname - the dcd file file read [Default - outputname.dcd]
energyFields - the NAMD energy fields to keep [Default 12, total potential energy]
"""
if dcdname == None:
dcdname = outputname + '.dcd'
(integrator_script, output_script,
execution_script) = self._energy_scripts(dcdname,
stride=stride,
test=test)
if (os.path.exists('%s.pkl.gz' % outputname)):
energies = self.load_pkl_gz('%s.pkl.gz' % outputname)
else:
energies = self._execute(outputname,
0.0,
integrator_script,
output_script,
execution_script,
energyFields=energyFields,
write_energy_pkl_gz=write_energy_pkl_gz,
keepScript=keepScript,
retry=False)
return energies
def energies_LJ_ELE_INT(self, outputname, dcdname=None, keepScript=False):
"""
Calculates Lennard-Jones and electrostatic interaction energies with a grid, and ligand internal energy
"""
if (self.grid == None):
raise Exception('Function requires grid')
if dcdname == None:
dcdname = outputname + '.dcd'
(integrator_script, output_script,
execution_script) = self._energy_scripts(dcdname)
grid_script_LJ = self.grid.script_LJ()
grid_script_ELE = self.grid.script_ELE()
if (os.path.exists('%s.LJ.pkl.gz' % outputname)):
energies = self.load_pkl_gz('%s.LJ.pkl.gz' % outputname)
else:
energies = self._execute(
outputname + '.LJ',
0.0,
integrator_script,
output_script,
execution_script,
grid_script_LJ,
energyFields=[8, 12], # MISC and POTENTIAL energy fields
write_energy_pkl_gz=True,
keepScript=keepScript,
retry=False)
E_LJ = [energy[0] for energy in energies]
E_INT = [energy[1] - energy[0] for energy in energies]
if (os.path.exists('%s.ELE.pkl.gz' % outputname)):
energies = self.load_pkl_gz('%s.ELE.pkl.gz' % outputname)
else:
energies = self._execute(
outputname + '.ELE',
0.0,
integrator_script,
output_script,
execution_script,
grid_script_ELE,
energyFields=[8], # MISC field has grid energy
write_energy_pkl_gz=True,
keepScript=keepScript,
retry=False)
E_ELE = [energy[0] for energy in energies]
return [E_LJ, E_ELE, E_INT]
|
# #!/usr/bin/env python
import json
import requests
import pprint
import time
LG_API = 'http://127.0.0.1:5001/load-generator'
RM_API = 'http://127.0.0.1:5002/resource-mapper'
OC_API = 'http://127.0.0.1:5003/openstack-client'
rrhs = []
bbus = []
def requestGet(url):
r = requests.get(url)
if (r.status_code != 200):
raise ValueError('Unexpected status code: ' + r.status_code)
return r.json()
def requestPost(url, payload = None):
r = requests.post(url, data = payload)
if (r.status_code != 200):
raise ValueError('Unexpected status code: ' + r.status_code)
return r.json()
def pollBBUObject(bbu, retryCount = 0):
if (retryCount > 10): return False
builtInstances = requestGet(OC_API + '/hypervisor/' + bbu['zone'] + '/instances')
for instance in builtInstances:
if instance['name'] == bbu['name']:
for address in instance['addresses']:
if address['type'] == 'fixed' and address['addr'].startswith('10.0'):
bbus.append({
'name': bbu['name'],
'ip': address['addr']
})
return True
print('BBU instance is not ready yet, retry number: ' + retryCount)
time.sleep(10)
return pollBBUObject(bbu, retryCount + 1)
def getBBUIP(name):
for bbu in bbus:
if (bbu['name'] == name):
return bbu['ip']
# read configuration file
with open('configuration.json') as f:
configuration = json.load(f)
# create load in waiting state
for rrh in configuration['rrhs']:
rrhObject = requestPost(LG_API + '/rrh/create?rate=' + str(rrh['rate']))
if (rrhObject['id'] is not None):
requestPost(LG_API + '/rrh/' + str(rrhObject['id']) + '/add-connection?amount=' + str(rrh['connections']))
rrhs.append({
'id': rrhObject['id'],
'bbus': rrh['bbus']
})
# create openstack instances
for bbu in configuration['bbus']:
requestPost(OC_API + '/instance?name=' + bbu['name'] + '&zone=' + bbu['zone'])
# poll until BBU ip address could be obtained
if (pollBBUObject(bbu)):
print('BBU instance is being built and IP is obtained')
else:
print('Cannot obtain BBU instance IP before timeout')
exit(1)
# create openflow mapping rules
for rrh in rrhs:
bbuIps = [getBBUIP(bbuName) for bbuName in rrh['bbus']]
mapping = requestPost(RM_IP + '/mapping?format=ip&rrh=' + str(rrh['id']) + '&bbus=' + (','.join(bbuIps)))
print('mapping created for rrh#' + str(rrh['id']))
print(json.dumps(mapping, indent = 2))
# start sending UDP load
print('all BBU instances are ready and mapping is complete, starting to send load')
time.sleep(5)
for rrhId in rrhs:
requestPost(LG_API + '/rrh/' + str(rrhId) + '/start')
print('RRH#' + str(rrhId) + ' started sending its load')
print('system is ready')
exit(0)
|
import config
import requests
from app.helpers import imagesource, response, dbhelper
from app.resources import limiter, images_blueprint
from flask import make_response, jsonify, request, stream_with_context, Response
@images_blueprint.route("/view/<int:sid>")
@limiter.limit(lambda : config.view_images_limit)
def view(sid):
if (sid == imagesource.PIXIV):
req = requests.get(request.args["url"], headers={"Referer": "https://app-api.pixiv.net/"})
return Response(req.content, content_type=req.headers['content-type']) |
# 098 - Faça um programa que tenha uma função chamada contador(), que receba três parâmetros: inicio, fim e passo.
#Seu programa tem que realizar três contagens através da função criada:
from time import sleep
def contagem(ini, fim, pas):
if ini<=fim:
for c in range(ini, fim+1, pas):
print(f'{c}', end = ' ')
sleep(0.5)
print()
else:
for c in range(ini, fim-1, -pas):
print(f'{c}', end = ' ')
sleep(0.5)
print()
print('-='*17)
print('Contagem de 1 até 10 de 1 em 1')
contagem(1,11,1)
print('Contagem de 10 até 0 de 2 em 2')
contagem(10,0,2)
ini = int(input('Digite o início da sequencia: '))
fim = (int(input('Digite o fim da sequencia: ')))
pas = int(input('Digite de quanto em quanto a sequencia deve ir: '))
contagem(ini, fim, pas)
|
# -*- coding: utf-8 -*-
# Author : 怕你呀
# Time : 2021/5/14
# File : main_page
# IDE : PyCharm
from time import sleep
from selenium import webdriver
from selenium.webdriver.common.by import By
from page.base_page import BasePage
class MainPage(BasePage):
__by_of_search = (By.XPATH, '//*[@id="su"]')
__by_of_input = (By.XPATH, '//*[@id="kw"]')
__by_of_frist = (By.XPATH, '//*[@class="t"]/a')
def search(self, key=None):
self.send(self.__by_of_search, val=key)
def get_att(self):
return self.driver.find_element_by_id("su").get_attribute('value')
def get_handles(self):
self.send(self.__by_of_input, val='ssl')
self.click(self.__by_of_search)
self.click(self.__by_of_frist)
handles = self.driver.window_handles
self.driver.switch_to.window(handles[0])
sleep(4)
return handles
|
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the admin page."""
from core.controllers import editor
from core.controllers import pages
from core.domain import config_domain
from core.tests import test_utils
import feconf
BOTH_MODERATOR_AND_ADMIN_EMAIL = 'moderator.and.admin@example.com'
BOTH_MODERATOR_AND_ADMIN_USERNAME = 'moderatorandadm1n'
SITE_FORUM_URL = 'siteforum.url'
class AdminIntegrationTest(test_utils.GenericTestBase):
"""Server integration tests for operations on the admin page."""
def setUp(self):
"""Complete the signup process for self.ADMIN_EMAIL."""
super(AdminIntegrationTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
def test_admin_page(self):
"""Test that the admin page shows the expected sections."""
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.testapp.get('/admin')
self.assertEqual(response.status_int, 200)
response.mustcontain(
'Performance Counters',
'Total processing time for all JSON responses',
'Configuration',
'Reload a single exploration',
'three_balls')
self.logout()
def test_admin_page_rights(self):
"""Test access rights to the admin page."""
response = self.testapp.get('/admin')
self.assertEqual(response.status_int, 302)
# Login as a non-admin.
self.login(self.EDITOR_EMAIL)
response = self.testapp.get('/admin', expect_errors=True)
self.assertEqual(response.status_int, 401)
self.logout()
# Login as an admin.
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.testapp.get('/admin')
self.assertEqual(response.status_int, 200)
self.logout()
def test_change_configuration_property(self):
"""Test that configuration properties can be changed."""
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.testapp.get('/admin')
csrf_token = self.get_csrf_token_from_response(response)
response_dict = self.get_json('/adminhandler')
response_config_properties = response_dict['config_properties']
self.assertDictContainsSubset({
'value': editor.MODERATOR_REQUEST_FORUM_URL_DEFAULT_VALUE,
}, response_config_properties[editor.MODERATOR_REQUEST_FORUM_URL.name])
payload = {
'action': 'save_config_properties',
'new_config_property_values': {
editor.MODERATOR_REQUEST_FORUM_URL.name: (
self.UNICODE_TEST_STRING),
}
}
self.post_json('/adminhandler', payload, csrf_token)
response_dict = self.get_json('/adminhandler')
response_config_properties = response_dict['config_properties']
self.assertDictContainsSubset({
'value': self.UNICODE_TEST_STRING,
}, response_config_properties[editor.MODERATOR_REQUEST_FORUM_URL.name])
self.logout()
def test_change_about_page_config_property(self):
"""Test that the correct variables show up on the about page."""
# Navigate to the about page. The site name is not set.
response = self.testapp.get('/about')
self.assertIn('https://site/forum/url', response.body)
self.assertNotIn(SITE_FORUM_URL, response.body)
self.login(self.ADMIN_EMAIL, is_super_admin=True)
response = self.testapp.get('/admin')
csrf_token = self.get_csrf_token_from_response(response)
self.post_json('/adminhandler', {
'action': 'save_config_properties',
'new_config_property_values': {
pages.SITE_FORUM_URL.name: SITE_FORUM_URL
}
}, csrf_token)
self.logout()
# Navigate to the splash page. The site name is set.
response = self.testapp.get('/about')
self.assertNotIn('https://site/forum/url', response.body)
self.assertIn(SITE_FORUM_URL, response.body)
def test_change_rights(self):
"""Test that the correct role indicators show up on app pages."""
self.signup(self.MODERATOR_EMAIL, self.MODERATOR_USERNAME)
self.signup('superadmin@example.com', 'superadm1n')
self.signup(
BOTH_MODERATOR_AND_ADMIN_EMAIL, BOTH_MODERATOR_AND_ADMIN_USERNAME)
# Navigate to any page. The role is not set.
self.testapp.get('/gallery').mustcontain(no=['/moderator', '/admin'])
# Log in as a superadmin. This gives access to /admin.
self.login('superadmin@example.com', is_super_admin=True)
self.testapp.get('/gallery').mustcontain('/admin', no=['/moderator'])
# Add a moderator, an admin, and a person with both roles, then log
# out.
response = self.testapp.get('/admin')
csrf_token = self.get_csrf_token_from_response(response)
self.post_json('/adminhandler', {
'action': 'save_config_properties',
'new_config_property_values': {
config_domain.ADMIN_USERNAMES.name: [
self.ADMIN_USERNAME,
BOTH_MODERATOR_AND_ADMIN_USERNAME],
config_domain.MODERATOR_USERNAMES.name: [
self.MODERATOR_USERNAME,
BOTH_MODERATOR_AND_ADMIN_USERNAME],
}
}, csrf_token)
self.logout()
# Log in as a moderator.
self.login(self.MODERATOR_EMAIL)
self.testapp.get(feconf.GALLERY_URL).mustcontain(
'/moderator', no=['/admin'])
self.logout()
# Log in as an admin.
self.login(self.ADMIN_EMAIL)
self.testapp.get(feconf.GALLERY_URL).mustcontain(
'/moderator', no=['/admin'])
self.logout()
# Log in as a both-moderator-and-admin.
# Only '(Admin)' is shown in the navbar.
self.login(BOTH_MODERATOR_AND_ADMIN_EMAIL)
self.assertEqual(self.testapp.get('/').status_int, 302)
self.testapp.get(feconf.GALLERY_URL).mustcontain(
'/moderator', no=['/admin'])
self.logout()
|
import numpy as np
def magnitude(x):
"""Returns magnitude of a vector"""
return np.linalg.norm(np.array(x))
def distance(p1, p2):
"""Returns distance between two positions p1 and p2"""
return magnitude(np.array(p2) - np.array(p1))
def unit_vector(x):
"""Returns unit_vector of a vector"""
return np.array(x)/magnitude(x)
def random_sphere(center, radius, size, seed=123):
"""Create a list of random points in a sphere.
Keyword arguments:
center -- center of sphere
radius -- radius of sphere
size -- number of points to create
seed -- random state (default 123)
"""
np.random.seed(seed)
x0, y0, z0 = center
phi = np.random.uniform(0, 2*np.pi, size)
costheta = np.random.uniform(-1, 1, size)
u = np.random.uniform(0, 1, size)
theta = np.arccos( costheta )
r = radius * (u**(1/3))
xs = (r * np.sin(theta) * np.cos(phi)) + x0
ys = (r * np.sin(theta) * np.sin(phi)) + y0
zs = (r * np.cos(theta)) + z0
return list(zip(xs,ys,zs))
def random_ec_sphere(center, r1, r2, size, seed=123):
"""Create a list of random points between two concentric spheres.
Keyword arguments:
center -- center of spheres
r1 -- radius of the smaller sphere
r2 -- radius of the bigger sphere
size -- number of points to create
seed -- random state (default 123)
"""
np.random.seed(seed)
inc_size = int(2*size / (1 - (r1/r2)**3))
x0, y0, z0 = center
ls = random_sphere(center, r2, inc_size, seed)
x = np.array([i[0] for i in ls])
y = np.array([i[1] for i in ls])
z = np.array([i[2] for i in ls])
cnd = x**2 + y**2 + z**2 > r1**2
xs = x[cnd]
ys = y[cnd]
zs = z[cnd]
return list(zip(xs,ys,zs))[:size]
def somigliana(phi):
"""
Somigliana equation
phi: latitude in degrees
g: Gravitational acceleration (m/s2)
"""
phi = phi * (np.pi/180)
a = 1 + 0.00193185265245827352087 * (np.sin(phi)**2)
b = np.sqrt(1 - 0.006694379990141316996137 * (np.sin(phi)**2))
g = 9.780325335903891718546 * (a/b)
return g
def welmec(phi, h):
"""
WELMEC formula
phi: latitude in degrees
h: height in meters above sea level
g: Gravitational acceleration (m/s2)
"""
phi = phi * (np.pi/180)
g = (1 + 0.0053024*(np.sin(phi)**2) - 0.0000058*(np.sin(2*phi)**2))\
* 9.780318 - 0.000003085 * h
return g
|
import random
import time
from pathmap.tree import Tree
class Timer():
def __init__(self):
self.start = time.time()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
end = time.time()
runtime = end - self.start
msg = 'The function took {time} seconds to complete'
print(msg.format(time=runtime))
# ========== Fixtures ============
def get_file_fixture():
files = []
with open('tests/test_files/toc_benchmark.txt', 'r') as input_data:
for line in input_data:
files.extend(line.strip().split(','))
return files
def main():
toc = ','.join(get_file_fixture())
tree = Tree()
print('Benchmark Tree:construct_tree')
with Timer():
tree.construct_tree(toc)
path = None
print('Benchmark Tree::lookup')
with Timer():
path = tree.lookup('c:/projects/media-server/source/calldetailrecords/esncdr/esncdr.cpp')
print(path)
if __name__ == '__main__':
main()
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from __future__ import absolute_import
from allura.tests import TestController
app = None
class TestDispatch(TestController):
validate_skip = True
def test_dispatch(self):
r = self.app.get('/dispatch/foo/')
assert r.text == 'index foo', r
r = self.app.get('/dispatch/foo/bar')
assert r.text == "default(foo)(('bar',))", r
self.app.get('/not_found', status=404)
self.app.get('/dispatch/', status=404)
# self.app.get('/hello/foo/bar', status=404)
|
from abc import abstractmethod
from .statement_interface import StatementInterface
class QueryInterface(StatementInterface):
@abstractmethod
def as_expression(self) -> 'ExpressionInterface':
raise NotImplementedError('as_expression must be implemented')
@abstractmethod
def compile(self) -> 'Query':
raise NotImplementedError('compile must be implemented')
|
# Copyright (c) 2017 Hanson Robotics, Ltd.
import os
import time
import datetime as dt
import logging
import rospy
import emopy
from cv_bridge import CvBridge, CvBridgeError
from std_msgs.msg import String
from sensor_msgs.msg import Image
logger = logging.getLogger('hr.emotion_recognizer')
class EmotionRecognizer(object):
def __init__(self):
self.sub = rospy.Subscriber('camera/image_raw', Image, self.recognize)
self.pub = rospy.Publisher('emotion_image', Image, queue_size=1)
self.count = 0
self.bridge = CvBridge()
self.emotion_file = os.path.expanduser('~/.hr/chatbot/data/emotion.csv')
def republish(self, image):
self.pub.publish(image)
def write(self, emotion):
dirname = os.path.dirname(self.emotion_file)
if not os.path.isdir(dirname):
os.makedirs(dirname)
with open(self.emotion_file, 'a') as f:
now = dt.datetime.now()
f.write('{},{}\n'.format(dt.datetime.strftime(now, '%Y%m%d%H%M%S'), emotion))
def recognize(self, msg):
self.count += 1
if self.count % 5 != 0:
self.pub.publish(msg)
return
try:
frame = self.bridge.imgmsg_to_cv2(msg, "bgr8")
faces = emopy.get_faces(frame) # faces: [(left, top, right, bottom), (...)]
if not faces:
return
biggest_face = max(faces, key=lambda rect: (rect[1][0]-rect[0][0])*(rect[1][1]-rect[0][1]))
emotions = emopy.recognize(frame, [biggest_face])
frame = emopy.overlay(frame, [biggest_face], emotions)
emotion = emotions[0]
if emotion is not None:
self.write(emotion)
ros_frame = self.bridge.cv2_to_imgmsg(frame, "bgr8")
self.pub.publish(ros_frame)
except CvBridgeError as ex:
logger.error(ex)
if __name__ == '__main__':
rospy.init_node('emotion')
EmotionRecognizer()
while not rospy.is_shutdown():
rospy.spin()
|
import RPi.GPIO as GPIO
import time
pin_1 = 18
pin_2 = 17
def initGPIO ():
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(pin_1,GPIO.OUT)
GPIO.setup(pin_2,GPIO.OUT)
def ledOn_1 ():
GPIO.output(pin_1, GPIO.HIGH)
print "LED 1 on"
def ledOff_1 ():
GPIO.output(pin_1, GPIO.LOW)
print "LED 1 off"
def ledOn_2 ():
GPIO.output(pin_2, GPIO.HIGH)
print "LED 2 on"
def ledOff_2 ():
GPIO.output(pin_2, GPIO.LOW)
print "LED 2 off"
def turnOnLed (led):
GPIO.output(led, GPIO.HIGH)
print "LED " + str(led) + " on"
def turnOffLed (led):
GPIO.output(led, GPIO.LOW)
print "LED " + str(led) + " off"
def turnLed (led, message):
if message == 'on':
turnOnLed(led)
elif message == 'off':
turnOffLed(led)
else:
print("Unrecognized message") |
#!/usr/bin/python3
"""
Doorstop PyQt GUI
"""
import sys
from PyQt5.QtCore import Qt, QSize
from PyQt5.QtWidgets import QMainWindow, QApplication, QAction, QWidget, QFileDialog
from PyQt5.QtWidgets import QSplitter, QVBoxLayout, QTreeWidget, QTreeWidgetItem, QTableWidget
from PyQt5.QtWidgets import QTabWidget, QStackedWidget, QFrame
from PyQt5.QtGui import QIcon
from doorstop.core import builder
class DocTreeItem(QTreeWidgetItem):
def __init__(self, doc):
super().__init__()
self.doc = doc
class DocTree(QTreeWidget):
def __init__(self):
super().__init__()
self._docList = []
self.header().hide() # we don't need the header
#self.setHeaderLabels(['Requirement Tree'])
def addItem(self, doc):
if doc.parent is None:
self._addRootItem(doc)
else:
self._addChildItem(self._findParent(doc), doc)
def _addRootItem(self, doc):
rootItem = DocTreeItem(doc)
rootItem.setText(0, doc.prefix)
self.addTopLevelItem(rootItem)
self._docList.append(rootItem)
def _addChildItem(self, parent, doc):
childItem = DocTreeItem(doc)
childItem.setText(0, doc.prefix)
if parent is None:
raise ValueError('Can\'t find document parent for {}'.format(doc.prefix))
parent.addChild(childItem)
parent.setExpanded(True) # TODO: make this optional?
self._docList.append(childItem)
def _findParent(self, child):
for parent in self._docList:
if parent.doc.prefix == child.parent:
return parent
return None
class ReqTree(QTreeWidget):
def __init__(self, doc=None):
super().__init__()
self.header().hide() # we don't need the header
if doc:
self.loadDoc(doc)
def loadDoc(self, doc):
for item in doc.items:
self.addItem(item.level, item.text)
def addItem(self, level, text):
# strip off trailing zero
index = level.value[:-1] if level.value[-1] == 0 else level.value
if not index:
raise ValueError('Invalid item index: empty')
if (len(index) == 1) or (len(index) == 2 and index[1] == 0):
self._addRootItem(index[0], text)
else:
finalIndex = index[-1]
subIndex = index[:-1]
self._addChildItem(self._findParent(subIndex), finalIndex, str(level), text)
def _addRootItem(self, index, text):
rootItem = QTreeWidgetItem()
rootItem.setText(0, str(index) + ' ' + text)
self.insertTopLevelItem(index-1, rootItem)
def _addChildItem(self, parent, finalIndex, index, text):
childItem = QTreeWidgetItem()
childItem.setText(0, str(index) + ' ' + text)
parent.insertChild(finalIndex-1, childItem)
def _findParent(self, subIndex):
if not subIndex:
raise ValueError('Invalid item index: empty')
parent = self.topLevelItem(subIndex[0]-1)
if not parent:
print('root not found ' + str(subIndex[0]-1))
while (parent and subIndex[1:]):
parent = parent.child(subIndex[1]-1)
subIndex = subIndex[1:]
if not parent:
raise ValueError('Invalid item index: parent not found')
return parent
class DocTable(QTableWidget):
def __init__(self, doc=None):
super().__init__()
if doc:
self.loadDoc(doc)
def loadDoc(self, doc):
#self.setColumnCount(
self.setRowCount(len(doc))
for item in doc.items:
self._addItem(item)
def _addItem(self, item):
pass
class TreeStack(QStackedWidget):
def __init__(self):
super().__init__()
self._docList = {}
tempFrame = QTreeWidget()
self.addWidget(tempFrame)
def addDoc(self, doc):
newDoc = ReqTree(doc)
self._docList[doc.prefix] = newDoc
self.addWidget(newDoc)
def makeDocActive(self, doc):
self.setCurrentWidget(self._docList[doc.prefix])
class DocStack(QStackedWidget):
def __init__(self):
super().__init__()
self._docList = {}
tempFrame = QTableWidget()
self.addWidget(tempFrame)
def addDoc(self, doc):
newDoc = DocTable(doc)
self._docList[doc.prefix] = newDoc
self.addWidget(newDoc)
def makeDocActive(self, doc):
self.setCurrentWidget(self._docList[doc.prefix])
class MainWindow(QMainWindow):
def __init__(self, app):
super().__init__()
self._startUI(app)
self._tree = None
def _startUI(self, app):
# Create the menubar
openAction = QAction('&Open Project', self)
openAction.setShortcut('Ctrl+O')
openAction.setStatusTip('Open Project')
openAction.triggered.connect(self._openProject)
saveAction = QAction('&Save', self)
saveAction.setShortcut('Ctrl+S')
saveAction.setStatusTip('Save Requirements')
exportAction = QAction('&Export', self)
exportAction.setShortcut('Ctrl+E')
exportAction.setStatusTip('Export Requirements')
quitAction = QAction('&Quit', self)
quitAction.setShortcut('Ctrl+Q')
quitAction.setStatusTip('Quit')
quitAction.triggered.connect(app.quit)
aboutAction = QAction('About', self)
menuBar = self.menuBar()
fileMenu = menuBar.addMenu('&File')
fileMenu.addAction(openAction)
fileMenu.addAction(saveAction)
fileMenu.addAction(exportAction)
fileMenu.addSeparator()
fileMenu.addAction(quitAction)
helpMenu = menuBar.addMenu('Help')
helpMenu.addAction(aboutAction)
# Parent container to hold everything in the main window
self.mainContainer = QWidget()
self.mainContainer.hide()
self.setCentralWidget(self.mainContainer)
# Document tree view widget
self._docTree = DocTree()
docTreeWindow = QWidget()
docTreeLayout = QVBoxLayout()
docTreeLayout.addWidget(self._docTree)
docTreeWindow.setLayout(docTreeLayout)
self._docTree.itemActivated.connect(self._docSelected)
# Requirement tree view widget
self._reqStack = TreeStack()
reqTreeWindow = QWidget()
reqTreeLayout = QVBoxLayout()
reqTreeLayout.addWidget(self._reqStack)
reqTreeWindow.setLayout(reqTreeLayout)
# splitter for the tree views
treeSplit = QSplitter()
treeSplit.setOrientation(Qt.Vertical)
treeSplit.addWidget(docTreeWindow)
treeSplit.addWidget(reqTreeWindow)
treeSplit.setStretchFactor(0,1)
treeSplit.setStretchFactor(1,9)
#treeSplit.setSizes([1,9])
"""
# Tab widget to hold the tree views
self._tabs = QTabWidget()
self._tabs.addTab(docTreeWindow, 'Documents')
self._tabs.addTab(reqTreeWindow, 'Requirements')
"""
# Requirement table view widget
self._reqView = DocStack()
mainSplit = QSplitter()
#mainSplit.addWidget(self._tabs)
mainSplit.addWidget(treeSplit)
mainSplit.addWidget(self._reqView)
mainSplit.setStretchFactor(0,1)
mainSplit.setStretchFactor(1,3)
#treeSplit.setSizes([1,3])
layout = QVBoxLayout()
layout.addWidget(mainSplit)
self.mainContainer.setLayout(layout)
sg = app.desktop().screenGeometry()
wmax = sg.width()
hmax = sg.height()
self.setGeometry(wmax/8, hmax/8, (wmax*3)/4, (hmax*3)/4)
self.setWindowTitle('Doorstop')
self.statusBar().showMessage('No Requirements Loaded')
self.show()
def _openProject(self):
dialog = QFileDialog(self)
dialog.setFileMode(QFileDialog.DirectoryOnly)
dialog.setOptions(QFileDialog.ShowDirsOnly)
if dialog.exec():
# TODO: this returns a list... should give user an error if they select multiple
dir = dialog.selectedFiles()[0]
self._tree = builder.build(root=dir)
for doc in self._tree:
self._docTree.addItem(doc)
self._reqStack.addDoc(doc)
self._reqView.addDoc(doc)
self.mainContainer.show()
def _docSelected(self, item, col):
self._reqStack.makeDocActive(item.doc)
self._reqView.makeDocActive(item.doc)
if __name__ == '__main__':
app = QApplication(sys.argv)
w = MainWindow(app)
sys.exit(app.exec_())
|
[
[
float("NaN"),
0.55170308,
0.47929564,
float("NaN"),
0.55170308,
float("NaN"),
0.57323057,
1.0,
float("NaN"),
],
[
float("NaN"),
0.66380801,
1.0,
float("NaN"),
0.63761809,
float("NaN"),
0.66380801,
float("NaN"),
float("NaN"),
],
[
float("NaN"),
0.85116275,
0.23356075,
float("NaN"),
0.93480694,
float("NaN"),
0.23356075,
1.0,
float("NaN"),
],
[
float("NaN"),
0.65398008,
0.80997282,
float("NaN"),
0.65398008,
float("NaN"),
0.81518393,
1.0,
float("NaN"),
],
]
|
from rest_framework import serializers
from ..models import Prediction, Equipment
class PredictionSerializer(serializers.HyperlinkedModelSerializer):
base_equipment = serializers.CharField()
target_equipment = serializers.CharField()
def _get_equipment_or_raise(self, symbol):
eq = Equipment.objects.filter(symbol=symbol).first()
if not eq:
raise serializers.ValidationError('Equipment not found')
return eq
validate_base_equipment = _get_equipment_or_raise
validate_target_equipment = _get_equipment_or_raise
def validate(self, data):
request = self.context['request']
data['user'] = request.user
date_default = Prediction._meta.get_field('date').default()
if Prediction.objects.filter(**data, date=date_default).exists():
raise serializers.ValidationError('You have already made a prediction today '
'for the given parity.')
return data
class Meta:
model = Prediction
fields = ["url", "id", "user", "base_equipment", "target_equipment", "direction"]
read_only_fields = ["user"]
|
#!/usr/bin/env python3
# coding: utf-8
from .checkpoint_update_base_test import CheckpointUpdateBaseTest
class CheckpointUpdateCompressTest(CheckpointUpdateBaseTest):
def test_compress_concurrent_update_eviction_single_checkpoint(self):
self.concurrent_update_eviction_base(True, False, False, 0)
def test_compress_concurrent_update_eviction_first_checkpoint(self):
self.concurrent_update_eviction_base(True, False, False, 3)
def test_compress_concurrent_update_eviction_middle_checkpoint(self):
self.concurrent_update_eviction_base(True, True, False, 1)
def test_compress_concurrent_update_eviction_many_checkpoints(self):
self.concurrent_update_eviction_base(True, True, True, 5, False)
def test_compress_concurrent_update_eviction_many_update_checkpoints(self):
self.concurrent_update_eviction_base(True, True, True, 5)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.