content
stringlengths 5
1.05M
|
|---|
#!/usr/bin/env python3
import argparse
import biotools
import sys
# Write a program that computes the amino acid composition of a protein file
# Use a dictionary
count = {}
tot_count = 0
for id, protein in biotools.read_fasta(sys.argv[1]):
for aa in protein:
tot_count += 1
if aa in count: count[aa] += 1
else: count[aa] = 1
for aa in count:
print(aa, count[aa]/tot_count)
"""
python3 composition.py proteins.fasta.gz | sort -nk 2 (numerically by column 2)
* 0.0017561333612916754
W 0.010255968606775905
C 0.019017913309169337
M 0.023765838900038944
H 0.027689991912051043
Y 0.02980558967138963
F 0.036174849474283316
N 0.04593281011293173
I 0.049422610310637154
D 0.052167270766557826
Q 0.05259413473923853
P 0.05463858850313034
T 0.05542491687385795
K 0.056080190516130966
G 0.05631234460653626
R 0.05732708264685618
V 0.05813962196327472
E 0.06519785519575833
A 0.07117020639247522
S 0.08295764311176347
L 0.09416843902585148
"""
|
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 10 15:21:50 2019
@author: Aguilerimon
"""
import numpy as np
import matplotlib.pyplot as ptl
import pandas as pd
#Importar el data set
dataset = pd.read_csv('Data.csv')
#Separamos las variables del data set
#Localizacion de los elementos por localizacion (index)
#Asignamos a la variable x todas las filas de las tres primeras columnas como
#variables independientes del data set
x = dataset.iloc[:,:-1].values
#Asignamos a la variable y todas las filas de la ultima columna como variable dependiente
y = dataset.iloc[:, 3].values
#Tratamiento de los NAs o valores faltantes
from sklearn.preprocessing import Imputer
#Especificamos todos los terminos necesarios para el tratamiento de los NAs
imputer = Imputer(missing_values = "NaN", strategy = "mean", axis = 0)
imputer = imputer.fit(x[:, 1:3])
x[:, 1:3] = imputer.transform(x[:, 1:3])
#Codificar los datos categóricos
#Import de las librerias LabelEncoder(Le da igual si los datos son catogórico u ordinales)
#Y la libreria OneHotEncoder(Implementación de las variable dummy categóricas)
from sklearn.preprocessing import LabelEncoder, OneHotEncoder
#Constructor del método labelencoder
labelencoder_x = LabelEncoder()
labelencoder_y = LabelEncoder()
#Aplicamos la transformación numérica a la primer columna y la sobreescribimos al objeto x
x[:,0] = labelencoder_x.fit_transform(x[:,0])
#Para los valores boolean no nos interesa utilizar el método OneHotEncoder
y = labelencoder_y.fit_transform(y)
#Es necesario haber tratado anteriormente el vector con LabelEncoder
#debido a que OneHotEncoder no podrá convertir el string original a float
onehotencoder = OneHotEncoder(categorical_features=[0])
x = onehotencoder.fit_transform(x).toarray()
#Dividir el dataset en un conjunto de entrenamiento y de evaluación
#Importamos la sublibreria train_test_split
from sklearn.cross_validation import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x,y, test_size= 0.2, random_state=0)
#Escalado de variables
from sklearn.preprocessing import StandardScaler
sc_x = StandardScaler()
x_train = sc_x.fit_transform(x_train)
x_test = sc_x.transform(x_test)
|
# [카카오] 문자열 압축
INF = 987654321
def solution(s):
if len(s) == 1:
return 1
ret = INF
for jump in range(1, len(s) // 2 + 1):
temp = []
for i in range(0, len(s), jump):
temp.append(s[i:i + jump])
cnt = 1
prev = temp[0]
string = ""
for i in range(1, len(temp)):
if prev == temp[i]:
cnt += 1
else:
string += prev if cnt == 1 else str(cnt) + prev
prev = temp[i]
cnt = 1
string += prev if cnt == 1 else str(cnt) + prev
ret = min(ret, len(string))
return ret
if __name__ == "__main__":
s = "a"
print(solution(s))
|
from collections import defaultdict
def part1and2():
heightmap = []
with open("../input/09.txt") as f:
for line in f:
heightmap.append(list(map(int, line.strip())))
n = len(heightmap)
m = len(heightmap[0])
risk = 0
heightmap_for_lazy_people = defaultdict(lambda: 10)
for i in range(n):
for j in range(m):
heightmap_for_lazy_people[(i, j)] = heightmap[i][j]
low_points = []
for i in range(n):
for j in range(m):
if (
heightmap[i][j] < heightmap_for_lazy_people[(i, j - 1)]
and heightmap[i][j] < heightmap_for_lazy_people[(i, j + 1)]
and heightmap[i][j] < heightmap_for_lazy_people[(i - 1, j)]
and heightmap[i][j] < heightmap_for_lazy_people[(i + 1, j)]
):
risk += heightmap[i][j] + 1
low_points.append((i, j))
print(risk)
sizes = []
for x, y in low_points:
size = 0
queue = [(x, y)]
visited = set()
while queue:
i, j = queue.pop(0)
if (i, j) in visited:
continue
if heightmap_for_lazy_people[(i, j)] > 8:
continue
visited.add((i, j))
size += 1
queue.append((i, j - 1))
queue.append((i, j + 1))
queue.append((i - 1, j))
queue.append((i + 1, j))
sizes.append(size)
sizes.sort(reverse=True)
print(sizes[0] * sizes[1] * sizes[2])
part1and2()
|
import numpy as np
import numbers
import scipy.spatial.distance as spdist
# Copied from https://python-future.org/_modules/future/utils.html#old_div
def old_div(a, b):
"""
DEPRECATED: import ``old_div`` from ``past.utils`` instead.
Equivalent to ``a / b`` on Python 2 without ``from __future__ import
division``.
TODO: generalize this to other objects (like arrays etc.)
"""
if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral):
return a // b
else:
return a / b
# Taken from PyGPs source code.
# Written by Marion Neumann, Daniel Marthaler, Shan Huang, Kristian Kersting
# Source: https://github.com/marionmari/pyGPs/blob/master/pyGPs/Core/cov.py
def init_sm_hyper(x, y, Q):
"""
Initialize hyperparameters for the spectral-mixture kernel. Weights are
all set to be uniformly distributed, means are given by a random sample
from a uniform distribution scaled by the Nyquist frequency, and variances are given by a random sample from a uniform distribution scaled by the max distance.
"""
x = np.atleast_2d(x)
y = np.atleast_2d(y)
(n, D) = x.shape
w = np.zeros(Q)
m = np.zeros((D, Q))
s = np.zeros((D, Q))
w[:] = old_div(np.std(y), Q)
hypinit = np.zeros(Q + 2 * D * Q)
for i in range(D):
# Calculate distances
xslice = np.atleast_2d(x[:, i]).T
d2 = spdist.cdist(xslice, xslice, 'sqeuclidean')
if n > 1:
d2[d2 == 0] = d2[0, 1]
else:
d2[d2 == 0] = 1
minshift = np.min(np.min(np.sqrt(d2)))
nyquist = old_div(0.5, minshift)
m[i, :] = nyquist * np.random.ranf((1, Q))
maxshift = np.max(np.max(np.sqrt(d2)))
s[i, :] = old_div(1., np.abs(maxshift * np.random.ranf((1, Q))))
hypinit[:Q] = w
hypinit[Q + np.arange(0, Q * D)] = np.squeeze(m[:].T)
hypinit[Q + Q * D + np.arange(0, Q * D)] = np.squeeze(s[:].T)
return list(hypinit)
# Written by Srikanth Gadicherla https://github.com/imsrgadich
# Source: https://github.com/imsrgadich/gprsm/blob/master/gprsm/spectralmixture.py
def init_sm_hyper_v2(train_x, train_y, num_mixtures):
"""
For initialization of the parameters for the Spectral Mixture
Kernel.
:param train_x: input data
:param train_y: target data
:param num_mixtures: number of mixtures
:return: param_name dimensions
---------- ----------
mixture weights| num_mixtures x 1
mixture means | num_mixtures x input_dim
mixture scales | input_dim x num_mixtures
"""
assert isinstance(num_mixtures, int)
assert train_x.shape[0] == train_y.shape[0]
input_dim = np.shape(train_x)[1] # type: int
if np.size(train_x.shape) == 1:
train_x = np.expand_dims(train_x ,-1)
if np.size(train_x.shape) == 2:
train_x = np.expand_dims(train_x ,0)
train_x_sort = np.copy(train_x)
train_x_sort.sort(axis=1)
max_dist = np.squeeze(train_x_sort[: ,-1, :] - train_x_sort[: ,0, :])
min_dist_sort = np.squeeze(np.abs(train_x_sort[: ,1:, :] - train_x_sort[: ,:-1, :]))
min_dist = np.zeros([input_dim] ,dtype=float)
# min of each data column could be zero. Hence, picking minimum which is not zero
for ind in np.arange(input_dim):
try:
min_dist[ind] = min_dist_sort[np.amin(np.where(min_dist_sort[:,ind] > 0), axis=1), ind]
except:
min_dist[ind] = min_dist_sort[np.amin(np.where(min_dist_sort > 0), axis=1)]
# for random restarts during batch processing. We need to initialize at every
# batch. Lock the seed here.
seed= np.random.randint(low=1 ,high=2**31)
np.random.seed(seed)
# Inverse of lengthscales should be drawn from truncated Gaussian |N(0, max_dist^2)|
# dim: Q x D
# mixture_scales = tf.multiply(,tf.cast(max_dist,dtype=tf.float32)**(-1)
mixture_scales = (np.multiply(np.abs(np.random.randn(num_mixtures,input_dim)),
np.expand_dims(max_dist ,axis=0)))**(-1)
# Draw means from Unif(0, 0.5 / minimum distance between two points), dim: Q x D
# the nyquist is half of maximum frequency. TODO
nyquist = np.divide(0.5,min_dist)
mixture_means = np.multiply(np.random.rand(num_mixtures,input_dim),\
np.expand_dims(nyquist,0))
mixture_means[0,:] = 0
# Mixture weights should be roughly the std of the y values divided by
# the number of mixtures
# dim: 1 x Q
mixture_weights= np.divide(np.std(train_y,axis=0),num_mixtures)*np.ones(num_mixtures)
init_hyper = np.zeros(num_mixtures*3)
init_hyper[0:num_mixtures] = np.squeeze(np.asarray(mixture_weights))
init_hyper[num_mixtures:num_mixtures*2] = np.squeeze(np.asarray(mixture_means))
init_hyper[num_mixtures*2:num_mixtures*3] = np.squeeze(np.asarray(mixture_scales.T))
return init_hyper
|
# -*- coding: utf-8 -*-
"""
heka tcp client(NetworkInput)
python - hekad
- can't connect to hekad/hekad not start:
[Errno 10061] No connection could be made because
the target machine actively refused it
save data in redis/logfile when no connection?
"""
import logbook
import socket
import gevent
import toml
logbook.set_datetime_format("local")
logger = logbook.Logger('hekac')
#log = logbook.FileHandler('heka_tcp.log')
log = logbook.RotatingFileHandler('heka_tcp.log', max_size=1024, backup_count=5)
log.push_application()
def get_conf(conf_fn):
""" get configuration from .toml"""
with open(conf_fn) as conf_fh:
conf = toml.loads(conf_fh.read())
#print(config)
return conf
class SocketError(Exception):
""" heka not started or wrong port"""
pass
class HekaTCPClient(object):
""" tcp input """
def __init__(self):
""" init """
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.conf = get_conf("hekac.toml")
self.host = self.conf["client"]["host"]
self.port = self.conf["client"]["port"]
def connect(self):
""" connect """
try:
self.sock.connect((self.host, self.port))
print "connected"
except Exception as err:
print dir(err)
print err.errno
errmsg = "target: %s:%s, %s" % (self.host, self.port, err.strerror)
logger.error(errmsg*3)
raise SocketError(errmsg)
def send(self, i):
""" send """
message = "%s" % (i) #'go|cc\nd,ds\nd d,vv|od'
messlen = self.sock.send(message*5)#, 0
print messlen
#print received
#print sock.recv(1024)
def close(self):
""" close socket """
self.sock.close()
def main():
""" main """
#time.sleep(1)
logger.debug("heka tcp")
client = HekaTCPClient()
client.connect()
for i in xrange(1, 3333):
client.send(i)
logger.debug("send")
logger.debug("=="*20)
logger.info(i)
gevent.sleep(1)
client.close()
if __name__ == '__main__':
main()
|
import pytest
import numpy as np
from numpy.testing import assert_allclose
from ..viz import plot_qini_curve, plot_uplift_curve, plot_uplift_preds, plot_uplift_by_percentile
from ..metrics import qini_curve, perfect_qini_curve, uplift_curve, perfect_uplift_curve
from ..viz import UpliftCurveDisplay
from sklearn.tree import DecisionTreeClassifier
from ..models import SoloModel
import matplotlib as mpl
def make_predictions():
X_train, y_train, treat_train = (np.array([[5.1, 3.5, 1.4, 0.2], [4.9, 3.0, 1.4, 0.2], [4.7, 3.2, 1.3, 0.2]]),
np.array([0.0, 0.0, 1.0]), np.array([0.0, 1.0, 1.0]))
X_val, y_val, treat_val = (np.array([[5.1, 3.4, 1.5, 0.2], [5.0, 3.5, 1.3, 0.3], [4.5, 2.3, 1.3, 0.3]]),
np.array([0.0, 1.0, 0.0]), np.array([0.0, 1.0, 1.0]))
model = DecisionTreeClassifier(random_state=0)
s_model = SoloModel(model)
s_model = s_model.fit(X_train, y_train, treat_train)
uplift_preds = s_model.predict(X_val)
return y_val, uplift_preds, treat_val
@pytest.mark.parametrize("random", [True, False])
@pytest.mark.parametrize("perfect", [True, False])
@pytest.mark.parametrize("negative_effect", [True, False])
def test_plot_qini_curve(random, perfect, negative_effect):
y_true, uplift, treatment = make_predictions()
viz = plot_qini_curve(y_true, uplift, treatment, random, perfect, negative_effect)
x_actual, y_actual = qini_curve(y_true, uplift, treatment)
assert_allclose(viz.x_actual, x_actual)
assert_allclose(viz.y_actual, y_actual)
if random:
x_baseline, y_baseline = x_actual, x_actual * y_actual[-1] / len(y_true)
assert_allclose(viz.x_baseline, x_baseline)
assert_allclose(viz.y_baseline, y_baseline)
if perfect:
x_perfect, y_perfect = perfect_qini_curve(
y_true, treatment, negative_effect)
assert_allclose(viz.x_perfect, x_perfect)
assert_allclose(viz.y_perfect, y_perfect)
import matplotlib as mpl
assert isinstance(viz.line_, mpl.lines.Line2D)
assert isinstance(viz.ax_, mpl.axes.Axes)
assert isinstance(viz.figure_, mpl.figure.Figure)
@pytest.mark.parametrize(
"qini_auc, estimator_name, expected_label",
[
(0.61, None, "plot_qini_curve = 0.61"),
(0.61, "first", "first (plot_qini_curve = 0.61)")
]
)
def test_default_labels(qini_auc, estimator_name, expected_label):
x_actual = np.array([0, 1, 2, 3, 5, 6])
y_actual = np.array([0.0, 1.0, 2.0, 3.0, 2.5, 1.5])
disp = UpliftCurveDisplay(
x_actual=x_actual,
y_actual=y_actual,
estimator_name=estimator_name
).plot(qini_auc, title="plot_qini_curve")
assert disp.line_.get_label() == expected_label
from ..viz import plot_uplift_curve
from ..metrics import uplift_curve, perfect_uplift_curve
@pytest.mark.parametrize("random", [True, False])
@pytest.mark.parametrize("perfect", [True, False])
def test_plot_uplift_curve(random, perfect):
y_true, uplift, treatment = make_predictions()
viz = plot_uplift_curve(y_true, uplift, treatment, random, perfect)
x_actual, y_actual = uplift_curve(y_true, uplift, treatment)
assert_allclose(viz.x_actual, x_actual)
assert_allclose(viz.y_actual, y_actual)
if random:
x_baseline, y_baseline = x_actual, x_actual * y_actual[-1] / len(y_true)
assert_allclose(viz.x_baseline, x_baseline)
assert_allclose(viz.y_baseline, y_baseline)
if perfect:
x_perfect, y_perfect = perfect_uplift_curve(
y_true, treatment)
assert_allclose(viz.x_perfect, x_perfect)
assert_allclose(viz.y_perfect, y_perfect)
import matplotlib as mpl
assert isinstance(viz.line_, mpl.lines.Line2D)
assert isinstance(viz.ax_, mpl.axes.Axes)
assert isinstance(viz.figure_, mpl.figure.Figure)
@pytest.mark.parametrize(
"uplift_auc, estimator_name, expected_label",
[
(0.75, None, "plot_uplift_curve = 0.75"),
(0.75, "first", "first (plot_uplift_curve = 0.75)")
]
)
def test_default_labels(uplift_auc, estimator_name, expected_label):
x_actual = np.array([0, 1, 2, 3, 5, 6])
y_actual = np.array([0.0, 1.0, 2.0, 3.0, 2.5, 1.5])
disp = UpliftCurveDisplay(
x_actual=x_actual,
y_actual=y_actual,
estimator_name=estimator_name
).plot(uplift_auc, title="plot_uplift_curve")
assert disp.line_.get_label() == expected_label
def test_plot_uplift_preds():
trmnt_preds = np.array([1,1,0,1,1,1])
ctrl_preds = np.array([0,1,0,1,0,1])
viz = plot_uplift_preds(trmnt_preds, ctrl_preds, log=True, bins=5)
import matplotlib as mpl
assert isinstance(viz[0], mpl.axes.Axes)
assert isinstance(viz[1], mpl.axes.Axes)
assert isinstance(viz[2], mpl.axes.Axes)
def test_plot_uplift_by_percentile():
y_true, uplift, treatment = make_predictions()
viz = plot_uplift_by_percentile(y_true, uplift, treatment, strategy='overall',kind='line', bins=1, string_percentiles=True)
assert viz.get_title() == "Uplift by percentile\nweighted average uplift = 0.5000"
assert viz.get_xlabel() == "Percentile"
assert viz.get_ylabel() == "Uplift = treatment response rate - control response rate"
assert isinstance(viz, mpl.axes.Axes)
viz = plot_uplift_by_percentile(y_true, uplift, treatment, strategy='by_group',kind='bar', bins=1, string_percentiles=False)
assert viz[0].get_title() == "Uplift by percentile\nweighted average uplift = 0.5000"
assert viz[1].get_xlabel() == "Percentile"
assert viz[1].get_title() == "Response rate by percentile"
assert isinstance(viz[0], mpl.axes.Axes)
assert isinstance(viz[1], mpl.axes.Axes)
def plot_treatment_balance_curve():
y_true, uplift, treatment = make_predictions()
viz = plot_treatment_balance_curve(uplift, treatment, winsize=0.5)
assert viz.get_title() == "Treatment balance curve"
assert viz.get_xlabel() == "Percentage targeted"
assert viz.get_ylabel() == "Balance: treatment / (treatment + control)"
assert isinstance(viz, mpl.axes.Axes)
|
import os
import random
vips_path = "D:/Downloads/vips-dev-w64-all-8.9.1/vips-dev-8.9/bin"
os.environ['PATH'] = vips_path + ';' + os.environ['PATH']
from pyvips import Image
padding_size = 20
base_path = "./animations/r1586109741"
animation_path = "./animations/collage"
class Board:
def __init__(self, name):
self.name = name
self.path = f"{base_path}/{name}"
self.files = [name for name in os.listdir(self.path) if os.path.isfile(os.path.join(self.path, name))]
self.files.sort(key=lambda f: int(f.split(".")[0]))
self.images = [Image.new_from_file(f"{self.path}/{file}", access='sequential') for file in self.files]
self.index = 0
def get_current(self):
return self.images[min(self.index, len(self.images) - 1)]
def get_next(self):
self.index += 1
return self.get_current()
def is_finished(self):
return self.index >= len(self.images)
def combine():
os.makedirs(animation_path, exist_ok=True)
paths = [path for path in os.listdir(base_path)]
frame = 0
boards = [Board(path) for path in paths]
while not all(board.is_finished() for board in boards):
print("making frame", frame)
images = [board.get_next() if random.random() < 0.1 else board.get_current() for board in boards]
out = Image.arrayjoin(images, across=12, shim=padding_size)
out.write_to_file(f"{animation_path}/{frame}.png")
frame += 1
if __name__ == '__main__':
combine()
|
import configparser
import time
import random
from crawling.crawler.Naver_BLOGandCAFE import naver
from crawling.crawler.Daum_BLOGandCAFE import daum
from crawling.openApi.YOUTUBE_comment import request_youtube, get_video_comments
import pandas as pd
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from sqlalchemy import create_engine
def BLOG_Crawler(query):
config = configparser.ConfigParser()
config.read('../config.ini', encoding='utf-8')
config.sections()
options = Options()
query = query
options.add_argument(
'user-agent=' + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.54 Safari/537.36")
options.add_argument('headless')
options.add_argument('--window-size= 360, 360') # 실행되는 브라우저 크기를 지정할 수 있습니다.
options.add_argument('--blink-settings=imagesEnabled=false') # 브라우저에서 이미지 로딩을 하지 않습니다.
path = '/home/drsong/download/chromedriver' # linux server
driver = webdriver.Chrome(executable_path=path,
options=options)
db_connection_str = 'mysql+pymysql://saso:saso@localhost/DAMDA'
db_connection = create_engine(db_connection_str)
sql = "SELECT postdate, url, title FROM naver_openApi WHERE query = \'"+query+"\';"
df = pd.read_sql(sql, db_connection)
url_list = df['url'].values.tolist()
for i, url in enumerate(url_list):
try:
sql = "CREATE TABLE Crawl_blog ( id INT NOT NULL AUTO_INCREMENT, query VARCHAR(45) NULL, url VARCHAR(100) NULL, content TEXT NULL, source VARCHAR(45) NULL, postdate DATETIME NULL, gonggam INT NULL, commentCount INT NULL, PRIMARY KEY (id), UNIQUE INDEX url_UNIQUE (url ASC)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE utf8mb4_general_ci;"
db_connection.execute(sql)
print('create table')
except:
print('', end='')
sql = "SELECT count(*) FROM Crawl_blog WHERE url = %s"
result = db_connection.execute(sql, (url))
result = (result.first()[0])
if result > 0:
print(i, ': ', url, ' skip')
else:
if 'naver' in url:
src = 'naver_blog'
content, gong, cmt = naver(driver, url)
elif 'daum' in url:
src = 'daum_blog'
content = ('daum')
gong = 0
cmt = 0
else:
src = 'etc'
content = ('기타')
gong = 0
cmt = 0
time.sleep(random.uniform(2, 4))
sql = "INSERT INTO Crawl_blog (query, url, content, source, postdate, gonggam, commentCount) VALUES (%s, %s, %s, %s, %s, %s, %s)"
db_connection.execute(sql, (query, url, content, src, df['postdate'][i], gong, cmt))
print(i, ': ', url, ' done')
def YOUTUBE_Cralwer(query):
# list_youtube, urls = request_youtube(query)
# df_b = pd.DataFrame(list_youtube, columns=['keyword', 'title', 'channel', 'videoId'])
# df_b['source'] = '유튜브'
# df_b.to_excel('crawling/crawler/Crawling_Result/URL_DATA/' + query + '_Youtube_Comment' + '.xlsx', index=True,
# index_label="id")
# empty_frame = pd.DataFrame(columns=['url', 'keyword', 'content', 'author', 'postdate', 'source', 'num_likes'])
# empty_frame.to_csv('crawling/crawler/Crawling_Result/CONTENT_DATA/' + query + '_Youtube_Comment' + '.csv', index=True,
# index_label="id")
content_youtube = get_video_comments(query)
|
from settings import *
|
# Generated from D:/AnacondaProjects/iust_compilers_teaching/grammars\AssignmentStatement1.g4 by ANTLR 4.8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\20")
buf.write("u\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\3\2\3\2")
buf.write("\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b\3")
buf.write("\t\3\t\3\t\7\t8\n\t\f\t\16\t;\13\t\3\n\6\n>\n\n\r\n\16")
buf.write("\n?\3\13\6\13C\n\13\r\13\16\13D\3\13\3\13\7\13I\n\13\f")
buf.write("\13\16\13L\13\13\3\13\3\13\6\13P\n\13\r\13\16\13Q\5\13")
buf.write("T\n\13\3\f\3\f\3\f\7\fY\n\f\f\f\16\f\\\13\f\3\r\3\r\3")
buf.write("\16\3\16\3\17\3\17\3\17\3\17\5\17f\n\17\3\20\6\20i\n\20")
buf.write("\r\20\16\20j\3\20\3\20\3\21\3\21\3\22\3\22\3\22\5\22t")
buf.write("\n\22\3Z\2\23\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13")
buf.write("\25\f\27\r\31\2\33\2\35\2\37\16!\17#\20\3\2\5\3\2\62;")
buf.write("\4\2C\\c|\5\2\13\13\17\17\"\"\2}\2\3\3\2\2\2\2\5\3\2\2")
buf.write("\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2")
buf.write("\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27")
buf.write("\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\3%\3\2\2\2")
buf.write("\5(\3\2\2\2\7*\3\2\2\2\t,\3\2\2\2\13.\3\2\2\2\r\60\3\2")
buf.write("\2\2\17\62\3\2\2\2\21\64\3\2\2\2\23=\3\2\2\2\25S\3\2\2")
buf.write("\2\27U\3\2\2\2\31]\3\2\2\2\33_\3\2\2\2\35e\3\2\2\2\37")
buf.write("h\3\2\2\2!n\3\2\2\2#s\3\2\2\2%&\7<\2\2&\'\7?\2\2\'\4\3")
buf.write("\2\2\2()\7-\2\2)\6\3\2\2\2*+\7/\2\2+\b\3\2\2\2,-\7,\2")
buf.write("\2-\n\3\2\2\2./\7\61\2\2/\f\3\2\2\2\60\61\7*\2\2\61\16")
buf.write("\3\2\2\2\62\63\7+\2\2\63\20\3\2\2\2\649\5\33\16\2\658")
buf.write("\5\33\16\2\668\5\31\r\2\67\65\3\2\2\2\67\66\3\2\2\28;")
buf.write("\3\2\2\29\67\3\2\2\29:\3\2\2\2:\22\3\2\2\2;9\3\2\2\2<")
buf.write(">\5\31\r\2=<\3\2\2\2>?\3\2\2\2?=\3\2\2\2?@\3\2\2\2@\24")
buf.write("\3\2\2\2AC\5\31\r\2BA\3\2\2\2CD\3\2\2\2DB\3\2\2\2DE\3")
buf.write("\2\2\2EF\3\2\2\2FJ\7\60\2\2GI\5\31\r\2HG\3\2\2\2IL\3\2")
buf.write("\2\2JH\3\2\2\2JK\3\2\2\2KT\3\2\2\2LJ\3\2\2\2MO\7\60\2")
buf.write("\2NP\5\31\r\2ON\3\2\2\2PQ\3\2\2\2QO\3\2\2\2QR\3\2\2\2")
buf.write("RT\3\2\2\2SB\3\2\2\2SM\3\2\2\2T\26\3\2\2\2UZ\7$\2\2VY")
buf.write("\5\35\17\2WY\13\2\2\2XV\3\2\2\2XW\3\2\2\2Y\\\3\2\2\2Z")
buf.write("[\3\2\2\2ZX\3\2\2\2[\30\3\2\2\2\\Z\3\2\2\2]^\t\2\2\2^")
buf.write("\32\3\2\2\2_`\t\3\2\2`\34\3\2\2\2ab\7^\2\2bf\7$\2\2cd")
buf.write("\7^\2\2df\7^\2\2ea\3\2\2\2ec\3\2\2\2f\36\3\2\2\2gi\t\4")
buf.write("\2\2hg\3\2\2\2ij\3\2\2\2jh\3\2\2\2jk\3\2\2\2kl\3\2\2\2")
buf.write("lm\b\20\2\2m \3\2\2\2no\7\f\2\2o\"\3\2\2\2pq\7>\2\2qt")
buf.write("\7?\2\2rt\7>\2\2sp\3\2\2\2sr\3\2\2\2t$\3\2\2\2\17\2\67")
buf.write("9?DJQSXZejs\3\b\2\2")
return buf.getvalue()
class AssignmentStatement1Lexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
T__0 = 1
T__1 = 2
T__2 = 3
T__3 = 4
T__4 = 5
T__5 = 6
T__6 = 7
Id = 8
INT = 9
FLOAT = 10
String = 11
WS = 12
NEWLINE = 13
RELOP = 14
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"':='", "'+'", "'-'", "'*'", "'/'", "'('", "')'", "'\n'" ]
symbolicNames = [ "<INVALID>",
"Id", "INT", "FLOAT", "String", "WS", "NEWLINE", "RELOP" ]
ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
"Id", "INT", "FLOAT", "String", "DIGIT", "LETTER", "ESC",
"WS", "NEWLINE", "RELOP" ]
grammarFileName = "AssignmentStatement1.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.8")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
|
from distutils.core import setup, Extension
setup(name='c_doc2vecc', version='1.0', ext_modules=[Extension('c_doc2vecc', ['doc2vecc_pymodule.c'])])
|
import logging
class PFilter(logging.Filter):
def __init__(self, func):
self.func = func
def filter(self,record):
return self.func(record)
|
try:
import emoji
except:
emoji = None
import click
import os
import json
from ..default import EOS, SILENCE_FILE
class Silencer(object):
def __init__(self):
self.silence_file = os.path.join(EOS, SILENCE_FILE)
if not os.path.exists(self.silence_file):
self.speak()
def is_silence(self):
with open(self.silence_file, "r") as f:
d = json.load(f)
return d["silence"]
def speak(self):
with open(self.silence_file, "w") as f:
json.dump({"silence": False}, f, indent=4)
def silence(self):
with open(self.silence_file, "w") as f:
json.dump({"silence": True}, f, indent=4)
def echo(text, **styles):
silencer = Silencer()
if silencer.is_silence():
return
if emoji is not None:
text = emoji.emojize(text)
return click.echo(click.style(text, **styles))
|
from base64 import b64encode
from decimal import Decimal
from hashlib import sha256
from os import urandom
import re
import requests
import json
import urllib.request
from django.contrib.auth import authenticate, login
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import EmailMultiAlternatives, EmailMessage, mail_admins
from django.core.urlresolvers import reverse
from django.db import IntegrityError
from django.contrib.admin.views.decorators import staff_member_required
from django.http import JsonResponse, HttpResponse, HttpResponseBadRequest, Http404
from django.shortcuts import render, get_object_or_404, redirect
from django.template.loader import render_to_string
from bs4 import BeautifulSoup
from .forms import MoneyForm, MagicAuthForm, TagAuthForm, ProductForm
from .models import Account, Product, Category, Transaction, UserTag, ProductTag
from .backends import add_tag_to_user
from namubufferi.settings import DEBUG
@staff_member_required
def admin_inventory(request):
"""
View to handle stocking up inventory, adding products...
"""
context = dict(product_form=ProductForm(),
products=Product.objects.all(),
categories=Category.objects.all(),
transactions=request.user.account.transaction_set.all()
)
return render(request, 'namubufferiapp/admin_handleinventory.html', context)
@staff_member_required
def admin_overview(request):
"""
Most important things at a glance for admins
"""
positive_users = [x for x in User.objects.all() if x.account.balance >= 0]
negative_users = [x for x in User.objects.all() if x.account.balance < 0]
positive_balance = Decimal(0)
for u in positive_users:
positive_balance += u.account.balance
negative_balance = Decimal(0)
for u in negative_users:
negative_balance += -u.account.balance
context = dict(products=Product.objects.all(),
positive_users=positive_users,
positive_balance=positive_balance,
negative_users=negative_users,
negative_balance=negative_balance,
overall_balance=positive_balance-negative_balance,
)
return render(request, 'namubufferiapp/admin_overview.html', context)
@staff_member_required
def product_update(request):
"""
Update or create product
"""
if request.method == 'POST':
product_form = ProductForm(request.POST)
if product_form.is_valid():
product, created = Product.objects.get_or_create(
name=product_form.cleaned_data['name'],
defaults={'category':product_form.cleaned_data['category'],},
)
product.category = product_form.cleaned_data['category']
product.price = product_form.cleaned_data['price']
product.inventory = product_form.cleaned_data['inventory']
product.hidden = product_form.cleaned_data['hidden']
product.save()
bcode = product_form.cleaned_data['barcode']
if bcode is not None:
ptag, ptagcreated = ProductTag.objects.get_or_create(uid=bcode,
defaults={'product':product,})
ptag.product = product
ptag.save()
if created:
return HttpResponse("Product created", status=201)
else:
return HttpResponse("Product updated", status=200)
else:
return HttpResponseBadRequest('{"errors":' + product_form.errors.as_json() + '}', content_type="application/json")
else:
raise Http404()
@staff_member_required
def product_add_barcode(request, prod_id, barcode):
if request.method == 'PUT':
try:
product = Product.objects.get(pk=prod_id)
ptag, created = ProductTag.objects.get_or_create(uid=barcode,
defaults={'product':product,},)
ptag.product = product
ptag.save()
if created:
return HttpResponse("Barcode created", status=201)
else:
return HttpResponse("Barcode reassigned", status=200)
except Product.DoesNotExist:
return HttpResponse("Product not found", status=400)
else:
raise Http404()
def list_barcodes(request):
barcodes = dict()
for bcode in ProductTag.objects.all():
barcodes[bcode.uid] = bcode.product.pk
return JsonResponse(barcodes)
def product_from_outpan(barcode):
"""
Try to guess product name from barcode using outpan.com
False if no name was found
"""
try:
from namubufferi.settings import OUTPAN_API_KEY
result = urllib.request.urlopen("https://api.outpan.com/v2/products/{}?apikey={}".format(barcode, OUTPAN_API_KEY))
if result.getcode() != 200:
return False
name = json.loads(result.read().decode())["name"]
if name is None:
return False
else:
return name
except:
return False
return False
def product_from_foodie(barcode):
"""
Try to guess product name from barcode using foodie.fi
False if no name was found.
Use of this might not be ok by EULA, but shouldn't really hurt anybody
"""
try:
result = urllib.request.urlopen("https://www.foodie.fi/entry/{}".format(barcode))
if result.getcode() != 200:
return False
soup = BeautifulSoup(result.read().decode(), "html.parser")
name = soup.find(id="product-name").get_text()
return name
except:
return False
return False
@staff_member_required
def discover_barcode(request, barcode):
"""
Try to guess product details from its barcode
"""
product = dict()
product["name"] = product_from_outpan(barcode)
if product["name"] is False:
product["name"] = product_from_foodie(barcode)
if product["name"] is False:
raise Http404()
return JsonResponse(product)
@login_required(redirect_field_name=None)
def home(request):
context = dict(money_form=MoneyForm(),
products=Product.objects.all(),
categories=Category.objects.all(),
transactions=request.user.account.transaction_set.all()
)
return render(request, 'namubufferiapp/base_home.html', context)
def home_anonymous(request):
"""
Buying anonymously means that we only update product inventory
without making transaction for anyone
"""
context = dict(products=Product.objects.all(),
categories=Category.objects.all(),
)
return render(request, 'namubufferiapp/base_homeanonymous.html', context)
def buy(request):
if request.method == 'POST':
try:
product_key = int(request.POST['product_key'])
except ValueError:
# This shouldn't happen, but it did. How?
payload = {'balance': Decimal(0),
'transactionkey': None,
'modalMessage': "Tried to buy a product that doesn't exist. How did this happen?",
'message': render_to_string('namubufferiapp/message.html',
{'message': "Tried to buy a product that doesn't exist. How did this happen?"}),
}
mail_admins(
"Buying without correct product",
"User {} tried to buy product with id {}".format(request.user, request.POST['product_key']),
fail_silently=True
)
return JsonResponse(payload)
product = get_object_or_404(Product, pk=product_key)
price = product.price
new_transaction = Transaction()
new_transaction.amount = -price
new_transaction.product = product
if request.user.is_authenticated:
new_transaction.customer = request.user.account
new_transaction.save()
product.make_sale()
payload = {'balance': Decimal(0),
'transactionkey': new_transaction.pk,
'modalMessage': "Purchase Successful",
'message': render_to_string('namubufferiapp/message.html',
{'message': "Purchase Successful"}),
}
if request.user.is_authenticated:
payload['balance'] = request.user.account.balance
if request.user.account.balance < 0:
email = EmailMessage(
subject='Your balance notification',
body='Your balance is NEGATIVE: {}e'.format(request.user.account.balance),
to=[request.user.email],
)
email.send(fail_silently=True)
return JsonResponse(payload)
else:
raise Http404()
def tos(request):
if request.method == 'POST':
accept = request.POST["accept"] == "true"
if request.user.is_authenticated:
request.user.account.tos_accepted = accept
request.user.account.save()
payload = {'tos_accepted': request.user.account.tos_accepted}
return JsonResponse(payload)
@login_required
def deposit(request):
if request.method == 'POST':
money_form = MoneyForm(request.POST)
if money_form.is_valid():
euros = request.POST['euros']
cents = request.POST['cents']
amount = Decimal(euros) + Decimal(cents)/100
new_transaction = Transaction()
new_transaction.customer = request.user.account
new_transaction.amount = amount
new_transaction.save()
email = EmailMessage(
subject='Your balance notification',
body='Your balance is: {}e'.format(request.user.account.balance),
to=[request.user.email],
)
email.send(fail_silently=True)
return JsonResponse({'balance': request.user.account.balance,
'transactionkey': new_transaction.pk,
'modalMessage': "Deposit Successful",
'message': render_to_string('namubufferiapp/message.html',
{'message': "Deposit Successful",
'transaction': new_transaction,
}),
})
else:
# https://docs.djangoproject.com/en/1.10/ref/forms/api/#django.forms.Form.errors.as_json
# https://docs.djangoproject.com/ja/1.9/ref/request-response/#jsonresponse-objects
#return JsonResponse({"errors": + money_form.errors.as_json()})
# FTS...
return HttpResponseBadRequest('{"errors":' + money_form.errors.as_json() + '}', content_type="application/json")
else:
raise Http404()
@login_required
def transaction_history(request):
return JsonResponse({'transactionhistory': render_to_string('namubufferiapp/transactionhistory.html',
{'transactions': request.user.account.transaction_set.all()})
})
@login_required
def receipt(request):
if request.method == 'POST':
transaction = get_object_or_404(request.user.account.transaction_set.all(),
pk=request.POST['transaction_key'])
receipt = {'customer': transaction.customer.user.username,
'amount': transaction.amount,
'timestamp': transaction.timestamp,
'transactionkey': transaction.pk,
'canceled': transaction.canceled,
}
try:
receipt['product'] = transaction.product.name
except:
receipt['product'] = 'Deposit'
return JsonResponse({'receipt': receipt})
else:
raise Http404()
@login_required
def cancel_transaction(request):
if request.method == 'POST':
transaction = get_object_or_404(request.user.account.transaction_set.all(),
pk=request.POST['transaction_key'])
if (request.user == transaction.customer.user and not transaction.canceled):
transaction.cancel()
return JsonResponse({'balance': request.user.account.balance,
'modalMessage': "Transaction Canceled",
'message': render_to_string('namubufferiapp/message.html',
{'message': "Transaction Canceled",
'transaction': transaction})
})
else:
return HttpResponse(status=204)
else:
raise Http404()
def magic_auth(request, magic_token=None):
"""
"""
if request.method == 'POST':
# Validate form
magic_auth_form = MagicAuthForm(request.POST)
if magic_auth_form.is_valid():
# Try to find the user or create a new one
try:
user = User.objects.get(email=magic_auth_form.cleaned_data['email'].lower())
except User.DoesNotExist:
email = magic_auth_form.cleaned_data['email'].lower()
m = re.match("^(.*)@aalto.fi$", email)
if m:
username = m.group(1)
user = User.objects.create_user(username,
email=email,
password=b64encode(sha256(urandom(56)).digest()))
else:
return JsonResponse({'modalMessage': 'Email not found or its not aalto email.'})
user.account.update_magic_token()
current_site = get_current_site(request)
# Send mail to user
mail = EmailMultiAlternatives(
subject="Namubufferi - Login",
body=("Hello. Authenticate to Namubufferi using this code. It's valid for 15 minutes.\n"
+ str(user.account.magic_token)),
to=[user.email]
)
try:
mail.send()
print("Mail sent")
except:
print("Mail not sent")
if DEBUG:
return JsonResponse({'modalMessage': '<br>login with ' + str(user.account.magic_token) + ' (Shown when DEBUG)'})
else:
return JsonResponse({'modalMessage': 'Check your email for the token.'})
else:
return HttpResponse('{"errors":' + magic_auth_form.errors.as_json() + '}', content_type="application/json")
else:
user = authenticate(magic_token=str(magic_token))
if user:
login(request, user)
return home(request)
else:
return redirect('/')
def tag_auth(request):
"""
Login by tag
"""
if request.method == 'POST':
# Validate form
tag_auth_form = TagAuthForm(request.POST)
if tag_auth_form.is_valid():
tag_uid = tag_auth_form.cleaned_data['tag_uid']
user = authenticate(tagKey=tag_uid)
if user is not None:
login(request, user)
return JsonResponse({'redirect': '/'})
else:
return JsonResponse({'errors':{'tag_uid':
[{'message':'Tag {} not found'.format(tag_uid),
'code':'tagnotfound'}],},
'modalMessage':'Tag {} not found!'.format(tag_uid),
})
else:
return HttpResponseBadRequest('{"errors":' + tag_auth_form.errors.as_json() + '}', content_type="application/json")
else:
raise Http404()
@login_required
def tag_list(request):
tags = UserTag.objects.filter(user=request.user)
return JsonResponse({'taglist': render_to_string('namubufferiapp/taglist.html',
{'tags': tags})
})
@login_required
def tag_modify(request, uid):
if request.method == 'DELETE':
try:
tag = UserTag.objects.get(uid=uid)
if tag.user == request.user:
tag.delete()
return HttpResponse("Tag deleted", status=200)
else:
raise Http404("Wrong user")
except UserTag.DoesNotExist:
raise Http404("Tag does not exist")
elif request.method == 'POST':
try:
tag = add_tag_to_user(request.user, uid)
return HttpResponse("Tag created", status=201)
except IntegrityError:
return HttpResponse("Another tag exists ({})!".format(uid),
status=409)
|
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: flattrs_test
class NestedUnion(object):
NONE = 0
Common1 = 1
nested_NestedJustAString = 2
|
## Your order, please
## 6 kyu
## https://www.codewars.com/kata/55c45be3b2079eccff00010f
def order(sentence):
sd = dict()
for word in sentence.split(" "):
for character in word:
if character.isdigit():
sd[word] = int(character)
return " ".join(sorted(sd, key=sd.get))
|
import os
import argparse
import numpy as np
import carla_rllib
from carla_rllib.environments.carla_envs.base_env import make_env
from carla_rllib.environments.carla_envs.config import BaseConfig
from carla_rllib.utils.clean_up import clear_carla
from stable_baselines import DDPG
from stable_baselines.ddpg.policies import CnnPolicy
from stable_baselines.common.vec_env import DummyVecEnv
from stable_baselines.ddpg.noise import OrnsteinUhlenbeckActionNoise
def run_test(config):
"""Stable baselines test
Mandatory configuration settings:
- 'continuous' agent
- camera_settings enabled
- stable_baselines enabled
"""
env = None
try:
# Create Environment
env = make_env(config)
env = DummyVecEnv([lambda: env])
# Initialize DDPG and start learning
n_actions = env.action_space.shape[-1]
param_noise = None
action_noise = OrnsteinUhlenbeckActionNoise(
mean=np.zeros(n_actions), sigma=float(0.5) * np.ones(n_actions))
model = DDPG(CnnPolicy, env, verbose=1, param_noise=param_noise,
action_noise=action_noise, random_exploration=0.8)
model.learn(total_timesteps=10000)
finally:
if env:
env.close()
else:
clear_carla(config.host, config.port)
print("-----Carla Environment is closed-----")
if __name__ == "__main__":
argparser = argparse.ArgumentParser(
description='CARLA RLLIB ENV')
package_path, _ = os.path.split(os.path.abspath(carla_rllib.__file__))
argparser.add_argument(
'-c', '--config',
metavar='CONFIG',
default=os.path.join(package_path +
"/config.json"),
type=str,
help='Path to configuration file (default: root of the package -> carla_rllib)')
args = argparser.parse_args()
config = BaseConfig(args.config)
print("-----Configuration-----")
print(config)
run_test(config)
|
#!/usr/bin/env python
# coding: utf-8
import os
import logging
from . import lt_common
from .external import tpl_match
from .external import mod_tplseq
from .external import regexhash
_logger = logging.getLogger(__package__)
class LTGenImportExternal(lt_common.LTGenStateless):
def __init__(self, table, filename, mode, mode_esc, ltmap, head, shuffle=False):
super(LTGenImportExternal, self).__init__(table)
self._table = table
self._fp = filename
self._l_tpl = self._load_tpl(self._fp, mode, mode_esc)
self._l_regex = [tpl_match.generate_regex(tpl)
for tpl in self._l_tpl]
if ltmap == "hash":
self._rtable = regexhash.RegexHashTable(self._l_tpl, self._l_regex,
head)
elif ltmap == "table":
self._rtable = regexhash.RegexTable(self._l_tpl, self._l_regex)
else:
raise NotImplementedError
if shuffle:
self._rtable.shuffle()
@staticmethod
def _load_tpl(fp, mode, mode_esc):
l_tpl = []
if not os.path.exists(fp):
errmes = ("log_template_import.def_path"
" {0} is invalid".format(fp))
raise ValueError(errmes)
with open(fp, 'r') as f:
for line in f:
if mode == "plain":
mes = line.rstrip("\n")
elif mode == "ids":
line = line.rstrip("\n")
mes = line.partition(" ")[2].strip()
else:
raise ValueError("invalid import_mode {0}".format(mode))
if len(mes) == 0:
continue
if mode_esc:
l_tpl.append(mes)
else:
l_tpl.append(tpl_match.add_esc_external(mes))
return l_tpl
def generate_tpl(self, pline):
mes = pline["message"]
ret = self._rtable.search(mes)
if ret is None:
_logger.debug(
"No log template found for message : {0}".format(mes))
return None
else:
tplid, matchobj = ret
tpl = self._l_tpl[tplid]
new_tpl = mod_tplseq.redefine_tpl(tpl, pline, matchobj=matchobj)
return new_tpl
#def process_line(self, pline):
# mes = pline["message"]
# ret = self._rtable.search(mes)
# if ret is None:
# _logger.debug(
# "No log template found for message : {0}".format(mes))
# return None, None
# else:
# tplid, matchobj = ret
# tpl = self._rtable.l_tpl[tplid]
# new_tpl = mod_tplseq.redefine_tpl(tpl, pline, self.sym,
# matchobj=matchobj)
# if self._table.exists(new_tpl):
# tid = self._table.get_tid(new_tpl)
# return tid, self.state_unchanged
# else:
# tid = self._table.add(new_tpl)
# return tid, self.state_added
def init_ltgen_import_ext(conf, table, shuffle, **kwargs):
fn = conf.get("log_template_import", "def_path_ext")
mode = conf.get("log_template_import", "import_format_ext")
if fn == "":
fn = conf.get("log_template_import", "def_path")
mode_esc = conf.getboolean("log_template_import", "import_format_ext_esc")
ltmap = conf.get("log_template_import", "ext_search_method")
head = conf.getint("log_template_import", "hash_strlen")
return LTGenImportExternal(table, fn, mode, mode_esc, ltmap, head, shuffle)
|
from .request_util import *
from .throttle import Throttle
|
#!/usr/bin/env python
# coding: utf-8
# ## Curate metadata information on platemaps
#
# For L1000 and Cell Painting data
# In[1]:
import pathlib
import pandas as pd
# In[2]:
# Step 1: L1000
file = "../L1000/L1000_lvl4_cpd_replicate_datasets/l1000_level4_cpd_replicates.csv.gz"
l1000_df = pd.read_csv(file)
print(l1000_df.shape)
l1000_df.head(2)
# In[3]:
# Extract out metadata information necessary for analysis
metadata_plate_df = pd.DataFrame(
[pd.Series(x) for x in l1000_df.replicate_id.str.split(":")],
)
metadata_plate_df.columns = ["plate", "well_position"]
metadata_plate_df = metadata_plate_df.assign(
plate_map=metadata_plate_df.plate.str[:17]
)
# Make sure each plate only has one of the same well (no duplicates)
assert (
metadata_plate_df.drop_duplicates(subset=["plate", "well_position"]).shape
== metadata_plate_df.shape
)
l1000_meta_cols = [
"plate",
"well_position",
"plate_map",
"replicate_id",
"dose",
"Metadata_broad_sample",
"pert_iname",
"moa"
]
l1000_metadata_df = pd.concat([metadata_plate_df, l1000_df], axis="columns").loc[:, l1000_meta_cols]
l1000_metadata_df.pert_iname = l1000_metadata_df.pert_iname.str.lower()
l1000_metadata_df.moa = l1000_metadata_df.moa.str.lower()
# Output to file
file = pathlib.Path("data/L1000_platemap_metadata.tsv.gz")
l1000_metadata_df.to_csv(file, sep="\t", index=False)
print(l1000_metadata_df.shape)
l1000_metadata_df.head(2)
# In[4]:
# Step 2: Cell Painting
file = "../cell_painting/cellpainting_lvl4_cpd_replicate_datasets/cp_level4_cpd_replicates.csv.gz"
cp_df = pd.read_csv(file, low_memory=False)
print(cp_df.shape)
cp_df.head(2)
# In[5]:
commit = "e9737c3e4e4443eb03c2c278a145f12efe255756"
cp_platemap_file = f"https://github.com/broadinstitute/lincs-cell-painting/raw/{commit}/metadata/platemaps/2016_04_01_a549_48hr_batch1/barcode_platemap.csv"
cp_meta_df = pd.read_csv(cp_platemap_file, sep=",")
cp_meta_df.columns = [f"Metadata_{x}" for x in cp_meta_df.columns]
cp_meta_cols = [
"Metadata_Assay_Plate_Barcode",
"Metadata_Well",
"Metadata_Plate_Map_Name",
"replicate_name",
"Metadata_dose_recode",
"Metadata_broad_sample",
"pert_iname",
"moa"
]
# Merge
cp_metadata_df = (
cp_meta_df
.merge(
cp_df,
left_on=["Metadata_Assay_Plate_Barcode"],
right_on="Metadata_Plate",
how="right"
)
.loc[:, cp_meta_cols]
)
cp_metadata_df.pert_iname = cp_metadata_df.pert_iname.str.lower()
cp_metadata_df.moa = cp_metadata_df.moa.str.lower()
# Output to file
file = pathlib.Path("data/CellPainting_platemap_metadata.tsv.gz")
cp_metadata_df.to_csv(file, sep="\t", index=False)
print(cp_metadata_df.shape)
cp_metadata_df.head(2)
|
__author__ = 'ABREZNIC'
|
""" Perf file for append operations, should show O(logN). """
from common import SIZES, IMPORT_INIT
import pyperf
def perf_append():
""" Silly mistake: calling just a bare append appends endlessly
thereby averaging out true cost of worse case append.
As such, we immediately pop after appending, this is O(1) for
popping of the end (a constant factor for each call) and
allows us to always check worse case append.
Worse case append: append will result in length of BIT
being a power of 2.
"""
runner = pyperf.Runner()
for size in SIZES:
size = size - 1
runner.timeit(
"{0}".format(size),
stmt="b.append(0); b.pop()",
setup=IMPORT_INIT.format(size),
)
if __name__ == "__main__":
perf_append()
|
from sklearn.metrics import accuracy_score, roc_auc_score
import torch
from typing import Dict, Any
from torch.nn import Softmax
from torch.nn.functional import log_softmax, nll_loss
import numpy as np
from .metrics import Metric
from ..utils import find_index
__all__ = ['Accuracy', 'BinaryAccuracy', 'ROCAUCScore', 'MAPAtK', 'Perplexity']
class Accuracy(Metric):
def __init__(self):
self._best = 0.0
def __call__(self, logs: Dict[str, Any]) -> float:
if isinstance(logs["outputs"], torch.Tensor):
predictions = torch.argmax(logs["outputs"], dim=1).cpu().detach().numpy()
else:
predictions = logs["outputs"]
labels = logs["labels"]
if isinstance(labels, torch.Tensor):
labels = labels.cpu().numpy()
acc = accuracy_score(
y_true=labels.ravel(),
y_pred=predictions.ravel()
)
if acc >= self._best:
self._best = acc
return acc
class BinaryAccuracy(Metric):
def __init__(self):
self._best = 0.0
def __call__(self, logs: Dict[str, Any]) -> float:
if isinstance(logs["outputs"], torch.Tensor):
predictions = torch.round(logs["outputs"]).cpu().detach().numpy()
else:
predictions = logs["outputs"]
labels = logs["labels"]
if isinstance(labels, torch.Tensor):
labels = labels.cpu().numpy()
acc = accuracy_score(
y_true=labels.ravel(),
y_pred=predictions.ravel()
)
if acc >= self._best:
self._best = acc
return acc
class ROCAUCScore(Metric):
def __init__(self):
self._best = 0.0
self.softmax = Softmax(dim=1)
def __call__(self, logs: Dict[str, Any]) -> float:
if isinstance(logs["outputs"], torch.Tensor):
predictions = self.softmax(logs["outputs"]).cpu().detach().numpy()
labels = logs["labels"].cpu().numpy()
else:
predictions = logs["outputs"]
labels = logs["labels"]
rocauc = roc_auc_score(
y_true=labels,
y_score=predictions[:, 1]
)
if rocauc >= self._best:
self._best = rocauc
return rocauc
class MAPAtK(Metric):
def __init__(self, k: int=5):
self._k = k
self._best = 0.0
def __call__(self, logs: Dict[str, Any]) -> float:
assert "best" in logs
assert len(logs["best"][0]) == self._k
if isinstance(logs["labels"], torch.Tensor):
labels = logs["labels"].cpu().numpy()
else:
labels = logs["labels"]
map = self.map_at_k(logs["best"], labels)
self._best = max(map, self._best)
return map
def map_at_k(self, best, labels) -> float:
return np.mean(
[
(1.0 / (find_index(best[i], labels[i]) + 1)) if labels[i] in best[i] else 0.0
for i in range(len(best))
]
)
class Perplexity(Metric):
"""
Perplexity metric to evaluate a language model:
perplexity(language_model, sentence) = exp(-log language_model(sentence))
"""
def __init__(self):
self._best = float('inf')
def __call__(self, logs: Dict[str, Any]) -> float:
labels = logs["labels"].cpu()
predictions_prob = log_softmax(logs["outputs"], dim=1)
entropy = nll_loss(predictions_prob, labels)
perplexity = torch.exp(entropy).cpu().numpy().item()
if perplexity < self._best:
self._best = perplexity
return perplexity
|
import sys
read = sys.stdin.buffer.read
readline = sys.stdin.buffer.readline
readlines = sys.stdin.buffer.readlines
sys.setrecursionlimit(10 ** 7)
s = input()
print('x' * len(s))
|
main = {
'General': {
'Prop': {
'Labels': 'rw',
'AlarmStatus': 'r-'
}
},
'AdminOperStatus': {
'Prop': {
'AdminState': 'rw',
'OperState': 'r-'
}
}
}
cfgm = {
'Nto1AccessService': {
'Prop': {
'Service': 'rw'
}
}
}
status = {
'mgmt': {
'Prop': {
'maList': 'r-',
'mepList': 'r-'
}
}
}
|
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 26 22:54:48 2018
@author: bjwil
"""
import pdb
def symbolToNumber(symbol):
if symbol == "A":
number = 0
elif symbol == "C":
number = 1
elif symbol == "G":
number = 2
elif symbol == "T":
number = 3
return number
def patternToNumber(Pattern):
#pdb.set_trace()
patternList = list(Pattern)
if not patternList:
return 0
symbol = patternList[-1]
prefix = patternList[:-1]
#print symbol, prefix
return 4 * patternToNumber(prefix) + symbolToNumber(symbol)
def readData(filename):
with open(filename, 'r') as f:
#f.readline() # Skip input line
Pattern = f.readline()
return Pattern.strip()
if __name__ == "__main__":
Pattern = readData('dataset_3010_2.txt')
result = patternToNumber(Pattern)
print(result)
g = ['T']
patternToNumber(g)
def mismatch(text1, text2):
count = 0
if len(text1) != len(text2):
print('Lengths are different.')
sys.exit()
for i in range(len(text1)):
if text1[i] != text2[i]:
count += 1
return count
type({'A','C','G','T'})
numberToPattern(1,2)
k = 2
t = 1
hammerAA = 'AA'
hammerAT = 'AT'
mismatch(hammerAA, hammerAT)
import sys
chars = "ACGT"
for i in chars:
print(i + suffix)
def Neighbors(hammer,t):
if t == 0:
return hammer
if len(hammer) == 1:
return {'A','C','G','T'}
array = []
suffix = hammer[-(len(hammer)-1):]
prefix = hammer[0:1]
SuffixNeighbors = Neighbors(suffix,t)
for text in SuffixNeighbors:
if mismatch(suffix, text) < t:
for i in chars:
array.append(i + text)
else:
array.append(prefix + text)
return array
prefix = text[0:1]
prefix + 'AT'
suffix = text[-(len(text)-1):]
text = 'CAA'
text[-(len(text)-1):]
text[-2:]
print("\n".join(Neighbors('ACCACTGA', 2)))
patternToNumber('AC')
kmerArray = []
for i in range(0,4**k):
kmerArray.append((numberToPattern(i,k)))
kmerArray
def numberToPattern(index,k):
if k == 1:
return numberToSymbol(index)
prefixIndex = index//4
r = index % 4
if index == 0:
symbol = 'A'
else:
symbol = numberToSymbol(r)
prefixPattern = numberToPattern(prefixIndex,k-1)
return prefixPattern + symbol
|
import torch
import numpy as np
import sys
import gurobipy as gp
from gurobipy import GRB
def FindSubset(w, a, eps, n, output_flag=False, check_w_lt_eps=False):
subset_sum = None
num_used = 0 # number of a_i terms used in the subset sum
if check_w_lt_eps and (abs(w) <= eps): # check if the magnitude of w is less than eps
subset_sum = 0
else:
m = gp.Model('mip1')
m.Params.OutputFlag = output_flag
x = m.addVars(n, vtype=GRB.BINARY)
z = m.addVar(vtype=GRB.CONTINUOUS)
m.setObjective(z, GRB.MINIMIZE)
m.addConstr(w - x.prod(a) <= z)
m.addConstr(-w + x.prod(a) <= z)
m.addConstr(w - x.prod(a) <= eps)
m.addConstr(-w + x.prod(a) <= eps)
m.Params.MIPGap = 0.01
m.optimize()
if m.status == 2: # feasible solution found
subset = []
for i in range(len(x)):
if round(x[i].x) > 0:
subset.append(a[i])
subset_sum = sum(subset)
num_used = len(subset)
if output_flag: # print verbose information
diff = abs(subset_sum - w)
print('\n' + '-' * 96)
print('\nNumber of elements in subset:', num_used)
print('\nValues used to approximate w:', subset)
print('\nSubset sum:', subset_sum, 'is approximately equal to', w)
print('\nDifference between subset sum and w:', diff, ', epsilon =', eps)
print('This difference is less than epsilon:', diff <= eps)
else:
print('\nFeasible solution not found for weight value', w, 'and coefficients', a)
print('Try increasing c, decreasing epsilon, or both.')
sys.exit(0)
return subset_sum, num_used
def train(model, device, train_loader, optimizer, criterion, epoch, log_interval):
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.reshape(-1, 28*28).to(device), target.to(device)
output = model(data)
loss = criterion(output, target)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (batch_idx+1) % log_interval == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
def test(model, device, criterion, test_loader, batch_size):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
test_loss += criterion(output, target)
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= np.ceil(len(test_loader.dataset) / batch_size)
test_acc = 100. * correct / len(test_loader.dataset)
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
test_acc))
return test_acc
|
"""
abstract which pyflann implementation is used
from vtool_ibeis._pyflann_backend import pyflann
"""
# import ubelt as ub
# import os
__all__ = ['pyflann', 'FLANN_CLS']
FLANN_CLS = None
pyflann = None
try:
import pyflann_ibeis as pyflann
FLANN_CLS = pyflann.FLANN
except ImportError:
FLANN_CLS = None
pyflann = None
# try:
# import pyflann
# FLANN_CLS = pyflann.FLANN
# except ImportError:
# print('no pyflann, using cv2.flann_Index')
# try:
# import cv2
# except Exception:
# # print('no pyflann, using dummy index')
# class _DUMMY_FLANN_CLS:
# def __init__(self):
# raise RuntimeError('pyflann_ibeis is not installed')
# FLANN_CLS = _DUMMY_FLANN_CLS
# else:
# class _CV2_FLANN_CLS:
# def __init__(self):
# self._internal = cv2.flann_Index()
# self.params = {}
# def build_index(self, features, **flann_params):
# # self._internal.build(features, flann_params, distType)
# self._internal.build(features, flann_params)
# def save_index(self, fpath):
# # self._internal.build(features, flann_params, distType)
# self._internal.save(fpath)
# def nn_index(self, query, num_neighbors, checks=ub.NoParam):
# # knnSearch(query, knn[, indices[, dists[, params]]]) -> indices, dists
# return self._internal.knnSearch(query, knn=num_neighbors)
# FLANN_CLS = _CV2_FLANN_CLS
# print('VTOOL_IBEIS BACKEND FOR pyflann = {!r}'.format(pyflann))
# print('VTOOL_IBEIS BACKEND FOR FLANN_CLS = {!r}'.format(FLANN_CLS))
|
import numpy as np
import warnings
'''
TODO
**also** chekc the TODOs in the script
0. break this into smaller subscripts
* trajectory creation
* masking
* noise
0. rename 'journey' with 'trajectory'
* journey implies travel
0. since slopes are determined by stays, maybe remove this field
* specify the slope of the travel
* the location of the travel (and stays) will be sorted to minimize the slope, cutting the adjacent stays as needed
0. add Assert so that always [stay, trav, ..., trav, stay]
1. add some documentation
2. keep the segment indices
* use later for seg. dept. noise, also training
3. Update the noise enrichment
* segment-dependent noise
* configurable noise distributions for each segment
4. include asserts to ensure no overlaps of stay regions
5. Put all into class
* x, y, noisy y
* segment idices, features
* various returns: np.arrays, pd.DataFrames
6. segments' endpoints to coincide with the sparse stays
* some stays are shorter after masking
7. improve the duplication for the data
* include some specific $x$-locations which are duplicated in certain segments
* this is like a tower which is pinged multiple times but only gives it's location
* include some specific $\Delta x$'s which are duplicated showing an effecitve radius when triangulation fails
* segement-/location-specific noise
* try also with the array of weights passed in `np.random.choice`
* changes the probab. of picking up specific events in the full array
'''
"""
Examples of stays
# Go to work, no lunch
stays = [
get_stay( 0, 20, 2),
get_stay( 30, 70, -1),
get_stay( 80, 100, 2)
]
# Go to work with a lunch break
stays = [
get_stay( 0, 20, 2),
get_stay( 30, 55, -1),
get_stay( 60, 65, 0.5),
get_stay( 70, 75, -1),
get_stay( 80, 100, 2)
]
# Work, gym, shop: stay1.T > stay2.T > stay3.T
stays = [
get_stay( 0, 20, 2),
get_stay( 30, 55, -1),
get_stay( 60, 65, 0.5),
get_stay( 70, 75, 2.5),
get_stay( 80, 100, 2)
]
"""
# Masking to sparsify the signal
#### TODO: make the sparsing location/segment dependent
def get_frac_mask(size, frac, verbose=False):
int_frac = int(frac*size)
# Get the fraction of "on"s
out_arr_1s = np.ones(int_frac)
# Get the remaining fraction of "off"s
out_arr_0s = np.zeros(size-int_frac)
# Concat and shuffle
out_arr = np.concatenate([out_arr_0s,out_arr_1s])
np.random.shuffle(out_arr)
if verbose: print(np.sum(out_arr)/size)
return out_arr
def get_mask_indices(mask):
mask_indices = (mask == 1)
return mask_indices
def get_mask(size, frac, verbose=False):
return get_mask_indices(get_frac_mask(size, frac, verbose))
# NOTE: this mask _adds_ to the start and stop of a trajectory,
# so that it begins around 00:00 and ends around 23:59, always
# TODO: fix this so that trajectories can begin/end at any time
def get_mask_with_duplicates(time_arr, target_frac=1.0, target_dupl_frac=0.0, verbose=False):
""" Return a (sub)array with/out duplicates
Get a fraction of time of the time array, where a fraction of it contains duplicates.
The duplicate fraction refers to the fraction of duplicates in the final array.
Args:
time_arr (np.array): time points in hours
target_frac (float): the fraction of input array to be output as a mask
target_dupl_frac (float): the fraction of the output events to be duplicated
Returns:
np.array: mask to be applied to the time_arr (includes duplicates
Raises:
ValueError: If `target_dupl_frac` is too large compared to `target_frac`.
Examples:
>>> t_arr.size
1000
>>> mask = get_mask_with_duplicates(t_arr, 0.9, 0.1)
>>> mask
array([ 32, 32, 89, ..., 960, 971, 998])
>>> mask.size
900
>>> np.unique(mask).size
810
"""
get_frac_outer = lambda size: lambda frac: int(frac*size)
get_duplicates_counts = lambda arr: (arr.size, len(set(arr)), arr.size-len(set(arr)))
from collections import Counter
# Compute the adjusted final fraction when duplicates are present
adjusted_frac = (1.0-target_dupl_frac)*target_frac
dupli_frac = (target_dupl_frac)*target_frac
base_frac_int = get_frac_outer(time_arr.size)(adjusted_frac)
dupl_frac_int = get_frac_outer(time_arr.size)(dupli_frac)
dupl_frac_int = min(dupl_frac_int,base_frac_int)
# Get the unique subset of time points
time_arr_sub0 = np.random.choice(time_arr, base_frac_int, replace=False)
# Get the indices of the time points
mask_ = np.where(np.in1d(time_arr, time_arr_sub0))[0]
if dupl_frac_int > 0:
# The set of unique duplicates: will always drw from this set
mask_dups = np.random.choice(mask_, dupl_frac_int, replace=False)
iterations = 8
for n in range(iterations):
# Get a subsample from the duplicates
# 1. The fraction controls the mulitplicity of duplicates
base_subsamp_frac = 0.05
mask_dups_sub = np.random.choice(mask_dups, get_frac_outer(dupl_frac_int)(base_subsamp_frac), \
replace=True, )
# 2. add back to the duplicates --> keep all events; just increase their frequencies
mask_dups = np.concatenate((mask_dups, mask_dups_sub))
# Get the final set of the duplicates
mask_dups = np.random.choice(mask_dups, dupl_frac_int, replace=True, )
if verbose:
# Check the frequencies of the duplicates
# 1. for the duplicates, count the frequency for each duplicate, ie 1 appears 3x, 2, appears 1x, etc.
freqs = Counter(mask_dups.tolist())
print(sum(freqs.values()))
# 2. for the frequencies, count the frequency of a given frequency, ie. how many 1's, 2's, etc.
freqs = Counter(list(freqs.values()))
print(freqs)
print('freq',sum(freqs.values()), dupl_frac_int, np.unique(mask_dups).size)
print()
# Add the duplicate mask back to the original mask
mask_ = np.concatenate((mask_, mask_dups))
if verbose:
totals, uniques, duplicates = get_duplicates_counts(mask_)
print(totals, uniques, duplicates, round(100.*duplicates/totals,2))
mask_.sort()
mask_ = mask_.astype(int)
return mask_
# NOTE: this is a patch!
# TODO: include this into `get_mask_with_duplicates`
def get_adjusted_dup_mask(time, stays, dup_mask):
""" Return a masking array consistent with the stays
Adjust the `get_mask_with_duplicates` output so that
it obeys the timepoints of the stays.
Args:
time_arr (np.array): time points in hours
stays (dict): the stays; gets the first and last time points
dup_mask (np.array): the masking array
Returns:
np.array: updated mask to be applied to the time_arr, etc.
Raises:
None:
Examples:
>>> t_arr.size
1000
>>> mask = get_mask_with_duplicates(t_arr, 0.9, 0.1)
>>> mask
array([ 32, 32, 89, ..., 960, 971, 998])
>>> mask = get_adjusted_dup_mask(t_arr, stays, mask)
array([ 312, 356, 389, ..., 760, 771, 798])
"""
traj_start_ind = np.where(time >= stays[0]['start'])[0].min()
traj_end_ind = np.where(time <= stays[-1]['end'] )[0].max()
return dup_mask[(dup_mask >= traj_start_ind) & (dup_mask <= traj_end_ind)]
|
# ------------------------------------------------------------------------
# HOTR official code : main.py
# Copyright (c) Kakao Brain, Inc. and its affiliates. All Rights Reserved
# ------------------------------------------------------------------------
# Modified from DETR (https://github.com/facebookresearch/detr)
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# ------------------------------------------------------------------------
import argparse
import datetime
import json
import random
import time
import multiprocessing
from pathlib import Path
import numpy as np
import torch
from torch.utils.data import DataLoader, DistributedSampler
import hotr.data.datasets as datasets
import hotr.util.misc as utils
from hotr.engine.arg_parser import get_args_parser
from hotr.data.datasets import build_dataset, get_coco_api_from_dataset
from hotr.engine.trainer import train_one_epoch
from hotr.engine import hoi_evaluator, hoi_accumulator
from hotr.models import build_model
import wandb
from hotr.util.logger import print_params, print_args
def save_ckpt(args, model_without_ddp, optimizer, lr_scheduler, epoch, filename):
# save_ckpt: function for saving checkpoints
output_dir = Path(args.output_dir)
if args.output_dir:
checkpoint_path = output_dir / f'{filename}.pth'
utils.save_on_master({
'model': model_without_ddp.state_dict(),
'optimizer': optimizer.state_dict(),
'lr_scheduler': lr_scheduler.state_dict(),
'epoch': epoch,
'args': args,
}, checkpoint_path)
def main(args):
utils.init_distributed_mode(args)
if args.frozen_weights is not None:
print("Freeze weights for detector")
device = torch.device(args.device)
# fix the seed for reproducibility
seed = args.seed + utils.get_rank()
torch.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
# Data Setup
dataset_train = build_dataset(image_set='train', args=args)
dataset_val = build_dataset(image_set='val' if not args.eval else 'test', args=args)
assert dataset_train.num_action() == dataset_val.num_action(), "Number of actions should be the same between splits"
args.num_classes = dataset_train.num_category()
args.num_inst_actions = dataset_train.num_inst_action()
args.num_actions = dataset_train.num_action()
if args.share_enc: args.hoi_enc_layers = args.enc_layers
if args.pretrained_dec: args.hoi_dec_layers = args.dec_layers
if args.dataset_file == 'vcoco':
# Save V-COCO dataset statistics
args.valid_ids = np.array(dataset_train.get_object_label_idx()).nonzero()[0]
args.invalid_ids = np.argwhere(np.array(dataset_train.get_object_label_idx()) == 0).squeeze(1)
args.human_actions = dataset_train.get_human_action()
args.object_actions = dataset_train.get_object_action()
args.num_human_act = dataset_train.num_human_act()
print_args(args)
if args.distributed:
sampler_train = DistributedSampler(dataset_train, shuffle=True)
sampler_val = DistributedSampler(dataset_val, shuffle=False)
else:
sampler_train = torch.utils.data.RandomSampler(dataset_train)
sampler_val = torch.utils.data.SequentialSampler(dataset_val)
batch_sampler_train = torch.utils.data.BatchSampler(
sampler_train, args.batch_size, drop_last=True)
data_loader_train = DataLoader(dataset_train, batch_sampler=batch_sampler_train,
collate_fn=utils.collate_fn, num_workers=args.num_workers)
data_loader_val = DataLoader(dataset_val, args.batch_size, sampler=sampler_val,
drop_last=False, collate_fn=utils.collate_fn, num_workers=args.num_workers)
# Model Setup
model, criterion, postprocessors = build_model(args)
model.to(device)
model_without_ddp = model
if args.distributed:
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu])
model_without_ddp = model.module
n_parameters = print_params(model)
param_dicts = [
{"params": [p for n, p in model_without_ddp.named_parameters() if "backbone" not in n and p.requires_grad]},
{
"params": [p for n, p in model_without_ddp.named_parameters() if "backbone" in n and p.requires_grad],
"lr": args.lr_backbone,
},
]
optimizer = torch.optim.AdamW(param_dicts, lr=args.lr, weight_decay=args.weight_decay)
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, args.lr_drop)
# Weight Setup
if args.frozen_weights is not None:
if args.frozen_weights.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
args.frozen_weights, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(args.frozen_weights, map_location='cpu')
model_without_ddp.detr.load_state_dict(checkpoint['model'])
if args.resume:
if args.resume.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
args.resume, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(args.resume, map_location='cpu')
model_without_ddp.load_state_dict(checkpoint['model'])
if args.eval:
# test only mode
total_res = hoi_evaluator(args, model, criterion, postprocessors, data_loader_val, device)
sc1, sc2 = hoi_accumulator(args, total_res, True, False)
return
# stats
scenario1, scenario2 = 0, 0
# add argparse
if args.wandb and utils.get_rank() == 0:
wandb.init(
project=args.project_name,
group=args.group_name,
name=args.run_name,
config=args
)
wandb.watch(model)
# Training starts here!
start_time = time.time()
for epoch in range(args.start_epoch, args.epochs):
if args.distributed:
sampler_train.set_epoch(epoch)
train_stats = train_one_epoch(
model, criterion, data_loader_train, optimizer, device, epoch, args.epochs,
args.clip_max_norm, dataset_file=args.dataset_file, log=args.wandb)
lr_scheduler.step()
# Validation
if args.validate:
print('-'*100)
total_res = hoi_evaluator(args, model, criterion, postprocessors, data_loader_val, device)
if utils.get_rank() == 0:
sc1, sc2 = hoi_accumulator(args, total_res, False, args.wandb)
if sc1 > scenario1:
scenario1 = sc1
scenario2 = sc2
save_ckpt(args, model_without_ddp, optimizer, lr_scheduler, epoch, filename='best')
print(f'| Scenario #1 mAP : {sc1:.2f} ({scenario1:.2f})')
print(f'| Scenario #2 mAP : {sc2:.2f} ({scenario2:.2f})')
print('-'*100)
save_ckpt(args, model_without_ddp, optimizer, lr_scheduler, epoch, filename='checkpoint')
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
print('Training time {}'.format(total_time_str))
print(f'| Scenario #1 mAP : {scenario1:.2f}')
print(f'| Scenario #2 mAP : {scenario2:.2f}')
if __name__ == '__main__':
parser = argparse.ArgumentParser(
'End-to-End Human Object Interaction training and evaluation script',
parents=[get_args_parser()]
)
args = parser.parse_args()
if args.output_dir:
args.output_dir += f"/{args.group_name}/{args.run_name}/"
Path(args.output_dir).mkdir(parents=True, exist_ok=True)
main(args)
|
#uses embeded tuples for points, runs every permutation when its created and does not have a list of permutations(size x!).
#without uses lists of size x! this program uses much less memory and is only bottle necked by cpu power.
from tkinter import *
import time
import random
import math as m
class MyFrame(Frame):
def __init__(self):
Frame.__init__(self)
num5=800
self.myCanvas = Canvas(width=num5,height=num5,bg="black")
self.myCanvas.grid()
##number of cities
num2 = cities =10
vals = []
num7= 1000
points= []
tempdistance= []
distances = []
tempperms= []
tempdistance = []
d1=0
d2=0
bestpath = [0 for i in range(num2+1)]
worstpath = [0 for i in range(num2+1)]
a=0
b=10**12
c=0
showworstpath = False
def swap(a,i,j):
temp = a[i]
a[i]=a[j]
a[j] = temp
def perms(n):
if n==1:
return 1
else:
return n*perms(n-1)
##makes random points
for z in range(0,num2):
x =random.randint(0,num5)
y= random.randint(0,num5)
points.append((x,y))
print(points)
##finds every distance
for w in range(0,num2):
for p in range(0,num2):
tempdistance.append(m.sqrt((points[p][0]-points[w][0])**2+(points[p][1]-points[w][1])**2))
distances.append(tempdistance)
tempdistance= []
##permutation function
for t in range(0,num2):
vals.append(t)
#bruteforce loop
for p in range(0,int(perms(len(vals)-1))):
tempperms = vals[:]
tempperms+=[0]
#finds the distance of the permutation with appending it to a list.
for s in range(0,len(tempperms)-1):
p1 = int(tempperms[s])
p2 = int(tempperms[s+1])
tempdistance.append(distances[p1][p2])
## simple if staments to remember the worst and best distances/paths without list.
if sum(tempdistance)<b:
b=sum(tempdistance)
print("current distance",b)
bestpath = tempperms
self.myCanvas.delete("all")
for z in range(0, num2):
d1 = int(bestpath[z])
d2 = int(bestpath[z+1])
self.myCanvas.create_line(points[d1][0],points[d1][1],points[d2][0],points[d2][1],fill="green")
self.myCanvas.update()
time.sleep(.2)
if sum(tempdistance)>c:
c=sum(tempdistance)
worstpath = tempperms
tempdistance = []
##step 1
largestI= -1
for i in range(0,len(vals)-1):
if vals[i]<vals[i+1]:
largestI = i
if largestI == -1:
print("finished")
break
##step 2
largestJ= -1
for j in range(0,len(vals)):
if vals[largestI] < vals[j]:
largestJ = j
##step 3
swap(vals, largestI, largestJ)
##step 4
endArray = vals[largestI+1:len(vals)]
endArray.reverse()
vals = vals[0:largestI+1] + endArray
if showworstpath:
for h in range(0,num2):
d1 = int(worstpath[h])
d2 = int(worstpath[h+1])
self.myCanvas.create_line(points[d1][0],points[d1][1],points[d2][0],points[d2][1],fill="red")
self.myCanvas.update()
print("Most Efficient Route",b,bestpath)
print("Most Inefficient Route",c,worstpath)
frame02=MyFrame()
frame02.mainloop()
##notable points
##[(304, 288), (265, 742), (346, 29), (473, 290), (171, 266), (306, 21), (642, 250), (127, 533), (174, 366), (392, 268)]
|
'''
The main is parsing the cmdlines, starting the simulation and exporting
the results, if wished.
'''
import argparse
import json
import logging
import os
import risk
#-----------------------------------------------------------------------------#
# constants
class Constants():
class __Paths():
def __init__(self):
self._root = os.path.join(
os.path.dirname(
os.path.abspath(__file__)
)
)
self._build = os.path.join(self._root, '..', 'build')
self._risk_output = os.path.join(self._build, 'risk.json')
@property
def root(self):
return self._root
@property
def build(self):
return self._build
@property
def risk_output(self):
return self._risk_output
def __init__(self):
self._paths = Constants.__Paths()
self._boardgame = 'Risk'
@property
def paths(self):
return self._paths
@property
def boardgame(self):
return self._boardgame
CONSTANTS = Constants()
#-----------------------------------------------------------------------------#
# config
class Config():
def __init__(self):
self._sim = risk.SimulationConfig()
self._is_output_enabled = False
self._is_output_forced = False
@property
def sim(self):
return self._sim
@property
def is_output_enabled(self):
return self._is_output_enabled
@is_output_enabled.setter
def is_output_enabled(self, value):
self._is_output_enabled = value
@property
def is_output_forced(self):
return self._is_output_forced
@is_output_forced.setter
def is_output_forced(self, value):
self._is_output_forced = value
#-----------------------------------------------------------------------------#
# cmdline-parsing
def parse_cmdline():
'''
Parse cmdline-args and print help-msg if specified.
'''
#-------------------------------------------------------------------------#
# define args and parse them
description = 'Have you ever asked yourself in boardgame \'Risk\', what '
description += 'the winning-chance of your attackers/defenders is?'
parser = argparse.ArgumentParser(description=description)
# max-fight-rounds
help_msg = 'Defines the number of dice that should be thrown for the '
help_msg += 'simulation.'
parser.add_argument('-n', '--max-fight-rounds',
metavar=('INT'),
dest='max_fight_rounds',
action='store',
type=int,
required=False,
help=help_msg
)
# seed
help_msg = 'Defines the seed for the RNG.'
parser.add_argument('-s', '--seed',
metavar=('INT'),
dest='seed',
action='store',
type=int,
required=False,
help=help_msg
)
# enable output
help_msg = 'If set, the simulation-results will be exported to the '
help_msg += 'specified path.'
parser.add_argument('-o', '--enable-output',
dest='is_output_enabled',
action='store_true',
required=False,
help=help_msg
)
# force output, even if file exists
help_msg = 'Same as \'--enable-output\' but forced '
help_msg += '(removing existing file).'
parser.add_argument('-of', '--force-output',
dest='is_output_forced',
action='store_true',
required=False,
help=help_msg
)
# logging-level
help_msg = 'Sets the logging-level'
parser.add_argument('-log', '--logging-level',
metavar=('STRING'),
dest='logging_level',
choices=['debug', 'info', 'warning', 'error'],
required=False,
help=help_msg
)
help_msg = 'Sets the logging-level to \'info\' overwriting other flags.'
parser.add_argument('-v', '--verbose',
dest='verbose',
action='store_true',
required=False,
help=help_msg
)
# approximation vs analytical solution
# help_msg = 'If set, the simulation calculates an approximation via '
# help_msg += 'monte-carlo instead of the analytical correct solution. '
# help_msg += 'Default is true since analytical solution is not supported '
# help_msg += 'yet.'
# parser.add_argument('-mc', '--monte-carlo',
# dest='use_mc',
# action='store_true',
# required=False,
# default=True,
# help=help_msg
# )
args = parser.parse_args()
#-------------------------------------------------------------------------#
# logging-level
if args.verbose:
args.logging_level = logging.INFO
elif args.logging_level is not None:
if args.logging_level == 'debug':
args.logging_level = logging.DEBUG
elif args.logging_level == 'info':
args.logging_level = logging.INFO
elif args.logging_level == 'warning':
args.logging_level = logging.WARNING
elif args.logging_level == 'error':
args.logging_level = logging.ERROR
else:
args.logging_level = logging.WARNING
# set logging-levels
logging.getLogger(__name__).setLevel(args.logging_level)
logging.getLogger(risk.__name__).setLevel(args.logging_level)
#-------------------------------------------------------------------------#
# finalize and return
cfg = Config()
cfg.sim.max_fight_rounds = args.max_fight_rounds
cfg.sim.seed = args.seed
cfg.is_output_enabled = args.is_output_enabled
cfg.is_output_forced = args.is_output_forced
return cfg
#-----------------------------------------------------------------------------#
if __name__ == '__main__':
# init logging
logging.basicConfig(level=logging.WARNING)
logger = logging.getLogger(__name__)
# extract params
cfg = parse_cmdline()
# check export-path before running simulation
# (and wasting time if out-file exists)
if not cfg.is_output_forced:
if cfg.is_output_enabled:
if os.path.exists(CONSTANTS.paths.risk_output):
err_msg = f'Output-file {CONSTANTS.paths.risk_output} does '
err_msg += 'already exist.'
logger.error(err_msg)
exit(1)
else:
cfg.is_output_forced = True
#-------------------------------------------------------------------------#
# simulate
sim = risk.Simulation(cfg.sim)
result = sim.monte_carlo()
#-------------------------------------------------------------------------#
# export results to a json-file
result = {'data': result}
result['config'] = cfg.sim.to_dict()
result['boardgame'] = CONSTANTS.boardgame
if cfg.is_output_forced:
with open(CONSTANTS.paths.risk_output, 'w') as json_file:
json.dump(result, json_file, indent=4)
# # calculate percentages
# for dict_def in result.values():
# for counts in dict_def.values():
# counts['defended'] /= float(sim.max_fight_rounds)
# counts['draw'] /= float(sim.max_fight_rounds)
# counts['defeated'] /= float(sim.max_fight_rounds)
# prepare output
logger.info(result)
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2013, Open Source Robotics Foundation, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Open Source Robotics Foundation, Inc. nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Author: Tully Foote <tfoote@osrfoundation.org>
# Author: William Woodall <william@osrfoundation.org>
"""
This module implements discovery of packages which export various spec files.
You can use this API as follows, assuming workspace of
'test/discovery_workspaces/minimal'::
>>> from pprint import pprint
>>> from capabilities.discovery import package_index_from_package_path
>>> from capabilities.discovery import spec_file_index_from_package_index
>>> from capabilities.discovery import spec_index_from_spec_file_index
>>> workspaces = ['test/discovery_workspaces/minimal']
>>> package_index = package_index_from_package_path(workspaces)
>>> spec_file_index = spec_file_index_from_package_index(package_index)
>>> pprint(spec_file_index)
{'minimal_pkg': {
'capability_interface': ['test/discovery_workspaces/minimal/minimal_pkg/interfaces/Minimal.yaml'],
'capability_provider': [
'test/discovery_workspaces/minimal/minimal_pkg/providers/minimal.yaml',
'test/discovery_workspaces/minimal/minimal_pkg/providers/specific_minimal.yaml'],
'package': <catkin_pkg.package.Package object at 0x10bb28df8>,
'semantic_capability_interface': [
'test/discovery_workspaces/minimal/minimal_pkg/interfaces/SpecificMinimal.yaml']}}
>>> spec_index, errors = spec_index_from_spec_file_index(spec_file_index)
>>> print(errors)
[]
>>> spec_index.names
[minimal_pkg/specific_minimal,
minimal_pkg/Minimal,
minimal_pkg/SpecificMinimal,
minimal_pkg/minimal]
>>> pprint(spec_index.specs)
{minimal_pkg/minimal:
<capabilities.specs.provider.CapabilityProvider object at 0x10391ce50>,
minimal_pkg/specific_minimal:
<capabilities.specs.provider.CapabilityProvider object at 0x10391cd10>,
minimal_pkg/Minimal:
<capabilities.specs.interface.CapabilityInterface object at 0x103952f90>,
minimal_pkg/SpecificMinimal:
<capabilities.specs.semantic_interface.SemanticCapabilityInterface object at 0x103952b50>}
>>> spec_index.interface_names
[minimal_pkg/Minimal]
>>> spec_index.interfaces
{minimal_pkg/Minimal: <capabilities.specs.interface.CapabilityInterface at 0x103952f90>}
>>> spec_index.interfaces['Minimal']
<capabilities.specs.interface.CapabilityInterface object at 0x10b7e3410>
>>> spec_index.semantic_interfaces
{'SpecificMinimal': <capabilities.specs.semantic_interface.SemanticCapabilityInterface object at 0x10b7bf3d0>}
>>> pprint(spec_index.providers)
{'minimal': <capabilities.specs.provider.CapabilityProvider object at 0x10b7bf750>,
'specific_minimal': <capabilities.specs.provider.CapabilityProvider object at 0x10b7bfd10>}
"""
import os
from catkin_pkg.packages import find_packages
from capabilities.specs.interface import capability_interface_from_file_path
from capabilities.specs.interface import InvalidInterface
from capabilities.specs.provider import capability_provider_from_file_path
from capabilities.specs.provider import InvalidProvider
from capabilities.specs.semantic_interface import semantic_capability_interface_from_file_path
from capabilities.specs.semantic_interface import InvalidSemanticInterface
class DuplicateNameException(Exception):
def __init__(self, name, colliding_package, spec_type):
self.spec_name = name
self.package = colliding_package
self.spec_type = spec_type
msg = "Spec named '{0}' is defined twice in the '{1}' package."
msg = msg.format(name, colliding_package)
Exception.__init__(self, msg)
class InterfaceNameNotFoundException(Exception):
def __init__(self, msg, spec_name, spec_type, spec_package):
self.spec_name = spec_name
self.package = spec_package
self.spec_type = spec_type
Exception.__init__(self, msg)
def package_index_from_package_path(package_paths):
"""Find all packages on the given list of paths
Iterates over the given list of paths in reverse order so that packages
found in the paths at the beginning of the list get overlaid onto packages
with the same name which were found in paths farther back in the list.
The resulting dictionary is keyed by the package name (so packages with
duplicate names are overlaid) and the values are the
:py:class:`catkin_pkg.package.Package` class
:param ros_package_path: list of paths to search
:type ros_package_path: list
:returns: dictionary of package objects keyed by name of the package
:rtype: dict
"""
result = {}
for path in reversed(package_paths):
for package_path, package in find_packages(path).items():
result[package.name] = package
return result
def spec_file_index_from_package_index(package_index):
"""Creates an index of spec files by package.
Takes a dict of package objects keyed by package name.
Returns a dict structured like this::
{
'<package_name>': {
'package': package_obj,
'capability_interface': [path to spec file, ...],
'capability_provider': [path to spec file, ...],
'semantic_capability_interface': [path to spec file, ...]
},
...
}
This dict contains a dict for each package, keyed by package name.
Those dicts contain the parsed package object, and a list of relative paths
for spec files, separated by spec type.
:param package_index: dict of :py:class:`catkin_pkg.package.Package`'s
keyed by package name to be processed
:type package_index: dict
:returns: spec file index strucutre
:rtype: dict
"""
spec_file_index = {}
for package_name, package in package_index.items():
spec_file_index[package_name] = {
'package': package,
'capability_interface': [],
'capability_provider': [],
'semantic_capability_interface': []
}
package_path = os.path.dirname(package.filename)
for export in package.exports:
tag = export.tagname
if tag != 'package' and tag in spec_file_index[package_name]:
spec_file_path = os.path.join(package_path, export.content)
spec_file_index[package_name][tag].append(spec_file_path)
# Prune packages with no specs
if (
not spec_file_index[package_name]['capability_interface']
and not spec_file_index[package_name]['capability_provider']
and not spec_file_index[package_name]['semantic_capability_interface']
):
del spec_file_index[package_name]
return spec_file_index
def _spec_loader(spec_thing_index, spec_thing_loaders):
spec_index = SpecIndex()
errors = []
error_types = (
InterfaceNameNotFoundException,
DuplicateNameException,
InvalidInterface,
InvalidSemanticInterface,
InvalidProvider
)
# First load and process CapabilityInterface's
for package_name, package_dict in spec_thing_index.items():
interface_things = package_dict['capability_interface']
for thing in interface_things:
try:
spec_thing_loaders['capability_interface'](thing, package_name, spec_index)
except error_types as e:
errors.append(e)
# Then load the SemanticCapabilityInterface's
for package_name, package_dict in spec_thing_index.items():
semantic_interface_things = package_dict['semantic_capability_interface']
for thing in semantic_interface_things:
try:
spec_thing_loaders['semantic_capability_interface'](thing, package_name, spec_index)
except error_types as e:
errors.append(e)
# Finally load the CapabilityProvider's
for package_name, package_dict in spec_thing_index.items():
capability_provider_things = package_dict['capability_provider']
for thing in capability_provider_things:
try:
spec_thing_loaders['capability_provider'](thing, package_name, spec_index)
except error_types as e:
errors.append(e)
return spec_index, errors
def spec_index_from_spec_file_index(spec_file_index):
"""Builds a :py:class:`SpecIndex` from a spec file index
Goes through each spec path in each package of the given spec file index
and parses them into objects. The objects are stored in a
:py:class:`SpecIndex` before being returned.
Duplicate Names are not allowed, even between different spec types
and packages. Any duplicate names will be raised as a
:py:exc:`DuplicateNameException`.
Any other errors encountered during spec file processing will be returned
as a list along with the :py:class:`SpecIndex`.
:param spec_file_index: spec_file_index, see
:py:func:`spec_file_index_from_packages_dict`
:type spec_file_index: dict
:returns: SpecIndex which contains all the loaded specs
and a list of any errors encountered while loading the spec files
:rtype: :py:class:`SpecIndex`, :py:obj:`list`
:raises DuplicateNameException: when two interfaces have the same name
"""
def capability_interface_loader(path, package_name, spec_index):
interface = capability_interface_from_file_path(path)
spec_index.add_interface(interface, path, package_name)
def semantic_capability_loader(path, package_name, spec_index):
si = semantic_capability_interface_from_file_path(path)
spec_index.add_semantic_interface(si, path, package_name)
def capability_provider_loader(path, package_name, spec_index):
provider = capability_provider_from_file_path(path)
spec_index.add_provider(provider, path, package_name)
return _spec_loader(spec_file_index, {
'capability_interface': capability_interface_loader,
'semantic_capability_interface': semantic_capability_loader,
'capability_provider': capability_provider_loader
})
class SpecIndex(object):
"""Container for capability spec file locations and respective spec classes
"""
def __init__(self):
self.__packages = []
self.__interfaces = {}
self.__providers = {}
self.__semantic_interfaces = {}
def __add_package(self, package_name):
if package_name in self.__packages:
return
self.__packages.append(package_name)
def add_interface(self, interface, file_path, package_name):
"""Add a loaded CapabilityInterface object into the repository
:param interface: CapabilityInterface object which was loaded using a
factory function
:type interface: :py:class:`.specs.interface.CapabilityInterface`
:param file_path: path to the interface spec file that was loaded
:type file_path: str
:param package_name: name of the package which contains the interface
:type package_name: str
:raises: :py:exc:`DuplicateNameException` if there is a name collision
"""
interface_name = '{package}/{name}'.format(package=package_name, name=interface.name)
interface.name = interface_name
if interface_name in self.names:
raise DuplicateNameException(
interface_name, package_name,
'capability_interface')
self.__add_package(package_name)
self.__interfaces[interface_name] = {
'path': file_path,
'instance': interface
}
def remove_interface(self, interface_name):
"""Removes a capability interface by name
:param interface_name: name of the interface to remove
:type interface_name: str
:raises: :py:exc:`KeyError` if there is no interface by that name
"""
del self.__interfaces[interface_name]
def add_semantic_interface(self, semantic_interface, file_path, package_name):
"""Add a loaded SemanticCapabilityInterface object into the repository
:param semantic_interface: SemanticCapabilityInterface object which was
loaded using a factory function
:type semantic_interface:
:py:class:`.specs.semantic_interface.SemanticCapabilityInterface`
:param file_path: path to the semantic interface spec file that
was loaded
:type file_path: str
:param package_name: name of the package which contains the
semantic interface
:type package_name: str
:raises: :py:exc:`DuplicateNameException` if there is a name collision
:raises: :py:exc:`InterfaceNameNotFoundException` if the interface which
this semantic capability interface redefines is not found.
"""
semantic_interface_name = '{package}/{name}'.format(package=package_name, name=semantic_interface.name)
semantic_interface.name = semantic_interface_name
if semantic_interface_name in self.names:
raise DuplicateNameException(
semantic_interface_name, package_name,
'semantic_capability_interface')
if semantic_interface.redefines not in self.interface_names:
raise InterfaceNameNotFoundException(
"Semantic capability interface '{0}' redefines '{1}', but the '{1}' interface was not found."
.format(semantic_interface_name, semantic_interface.redefines),
semantic_interface_name, package_name,
'semantic_capability_interface')
self.__add_package(package_name)
self.__semantic_interfaces[semantic_interface_name] = {
'path': file_path,
'instance': semantic_interface
}
def remove_semantic_interface(self, semantic_interface_name):
"""Removes a semantic capability interface by name
:param semantic_interface_name: name of the interface to remove
:type semantic_interface_name: str
:raises: :py:exc:`KeyError` if there is no interface by that name
"""
del self.__semantic_interfaces[semantic_interface_name]
def add_provider(self, provider, file_path, package_name):
"""Add a loaded CapabilityProvider object into the repository
:param provider: CapabilityProvider object which was loaded using a
factory function
:type provider: :py:class:`.specs.provider.CapabilityProvider`
:param file_path: path to the provider spec file that was loaded
:type file_path: str
:param package_name: name of the package which contains the provider
:type package_name: str
:raises: :py:exc:`DuplicateNameException` if there is a name collision
:raises: :py:exc:`InterfaceNameNotFoundException` if the interface which
this capability provider implements is not found.
"""
provider_name = '{package}/{name}'.format(package=package_name, name=provider.name)
provider.name = provider_name
if provider_name in self.names:
raise DuplicateNameException(
provider_name, package_name,
'capability_provider')
interfaces = (self.interface_names + self.semantic_interface_names)
if provider.implements not in interfaces:
raise InterfaceNameNotFoundException(
"Capability provider '{0}' implements '{1}', but the '{1}' interface was not found."
.format(provider_name, provider.implements),
provider_name, package_name,
'capability_provider')
self.__add_package(package_name)
self.__providers[provider_name] = {
'path': file_path,
'instance': provider
}
def remove_provider(self, provider_name):
"""Removes a capability provider by name
:param provider_name: name of the interface to remove
:type provider_name: str
:raises: :py:exc:`KeyError` if there is no interface by that name
"""
del self.__providers[provider_name]
@property
def names(self):
"""list of all names"""
return self.interfaces.keys() + self.semantic_interfaces.keys() + self.providers.keys()
@property
def specs(self):
"""dict of specs, keyed by name"""
result = {}
# There should be no key collisions as collisions are found on insertion
result.update(self.interfaces)
result.update(self.semantic_interfaces)
result.update(self.providers)
return result
@property
def interface_names(self):
"""list of capability interface names"""
return [n for n in self.__interfaces.keys()]
@property
def interfaces(self):
"""dict of capability interfaces, keyed by name"""
return dict([(n, x['instance']) for n, x in self.__interfaces.items()])
@property
def interface_paths(self):
"""dict of capability interface spec paths, keyed by name"""
return dict([(n, x['path']) for n, x in self.__interfaces.items()])
@property
def provider_names(self):
"""list of capability provider names"""
return [n for n in self.__providers.keys()]
@property
def providers(self):
"""dict of capability providers, keyed by name"""
return dict([(n, x['instance']) for n, x in self.__providers.items()])
@property
def provider_paths(self):
"""dict of capability provider spec paths, keyed by name"""
return dict([(n, x['path']) for n, x in self.__providers.items()])
@property
def semantic_interface_names(self):
"""list of semantic capability interface names"""
return [n for n in self.__semantic_interfaces.keys()]
@property
def semantic_interfaces(self):
"""dict of semantic capability interfaces, keyed by name"""
return dict([(n, x['instance']) for n, x in self.__semantic_interfaces.items()])
@property
def semantic_interface_paths(self):
"""dict of semantic capability interface spec paths, keyed by name"""
return dict([(n, x['path']) for n, x in self.__semantic_interfaces.items()])
|
from functions.selectors.selectVersion import selectVersion
from functions.selectors.selectVersionType import selectVersionType
from functions.getters.getVersionData import getVersionData
from functions.getters.getVersionManifest import getVersionManifest
from functions.fs.createClientFolders import createClientFolders
from functions.meta.createAutorunScript import createAutorunScript
from functions.meta.asyncMagic import doSomeAsyncMagic
from config.config import constants
from datetime import datetime
import shutil
def build():
versionsInfo = getVersionManifest()
versions, versionsNumbs, versionType = selectVersionType(versionsInfo)
versionData = getVersionData(selectVersion(versions, versionsNumbs))
createClientFolders(versionData)
magicImportantMushrooms = doSomeAsyncMagic(versionData)
shutil.rmtree(f'''{constants['package']['outputPath']}/{constants['package']['nativesDir']}/META-INF''')
createAutorunScript(versionData['id'], versionData['assetIndex']['id'], versionType, magicImportantMushrooms)
print(f'\n| {datetime.now().time()} Complete! if you need to start the client later,'
f' there is start.py in the output folder |')
|
from setuptools import setup, find_packages
REQUIREMENTS = (
'django>=1.3',
)
TEST_REQUIREMENTS = (
'south',
'mock',
'django-debug-toolbar',
)
from ckeditor import VERSION
setup(
name="django-admin-ckeditor",
version=VERSION,
author="Aaron Madison",
description="Ckeditor integration with Django admin.",
long_description=open('README', 'r').read(),
url="https://github.com/madisona/django-admin-ckeditor",
packages=find_packages(exclude=["example"]),
include_package_data=True,
install_requires=REQUIREMENTS,
tests_require=TEST_REQUIREMENTS,
zip_safe=False,
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries :: Application Frameworks",
],
)
|
import time
import logging
from typing import Optional
from boto3.dynamodb.conditions import Attr
from lib.utils.utils import Utils
from lib.config.constants import *
log = Utils.get_logger(__name__, logging.INFO)
class AuditDao:
"""
Supports operations on the figgy-config-auditor ddb table.
"""
def __init__(self, dynamo_resource):
self._dynamo_resource = dynamo_resource
self._table = self._dynamo_resource.Table(AUDIT_TABLE_NAME)
def put_delete_log(self, user: str, action: str, ps_name: str, timestamp: int = int(time.time() * 1000)):
log.debug(f"Storing delete event: {user} | {action} | {ps_name}")
item = {
AUDIT_PARAM_NAME_KEY: ps_name,
AUDIT_EVENT_TYPE_ATTR: action,
AUDIT_USER_ATTR: user,
AUDIT_TIME_KEY: timestamp,
}
self._table.put_item(Item=item)
def put_audit_log(
self,
user: str,
action: str,
ps_name: str,
ps_value: Optional[str],
ps_type: str,
ps_key_id: Optional[str],
ps_description: Optional[str],
ps_version: int,
timestamp: int = int(time.time() * 1000)
):
item = {
AUDIT_PARAM_NAME_KEY: ps_name,
AUDIT_EVENT_TYPE_ATTR: action,
AUDIT_USER_ATTR: user,
AUDIT_TIME_KEY: timestamp,
AUDIT_VALUE_ATTR: ps_value,
AUDIT_TYPE_ATTR: ps_type,
AUDIT_KEYID_ATTR: ps_key_id,
AUDIT_DESCRIPTION_ATTR: ps_description,
AUDIT_VERSION_ATTR: str(ps_version),
}
put_item = {}
for key, value in item.items():
if value:
put_item[key] = value
self._table.put_item(Item=put_item)
# Should not go in this dao and should be moved...
def cleanup_test_logs(self):
filter_exp = Attr(AUDIT_VALUE_ATTR).eq(DELETE_ME_VALUE) | Attr(AUDIT_USER_ATTR).eq(CIRCLECI_USER_NAME)
result = self._table.scan(FilterExpression=filter_exp)
items = result["Items"] if result["Items"] else []
for item in items:
# if this record is older than TEST_VALUE_KEEP_TIME
age_in_minutes = (int(time.time() * 1000) - item[AUDIT_TIME_KEY]) / 1000 / 60
if age_in_minutes > TEST_VALUE_KEEP_TIME:
print(f"Cleaning up: {item[AUDIT_PARAM_NAME_KEY]}")
self._table.delete_item(
Key={AUDIT_PARAM_NAME_KEY: item[AUDIT_PARAM_NAME_KEY], AUDIT_TIME_KEY: item[AUDIT_TIME_KEY]}
)
else:
print(f"{item[AUDIT_PARAM_NAME_KEY]} is too young for cleanup - it's {age_in_minutes} minutes old. "
f"Waiting...")
|
#!/usr/bin/python
import logging
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"--loglevel",
type=str,
metavar="LEVEL",
choices=["CRITICAL",
"ERROR", "WARNING", "INFO", "DEBUG"],
help="CRITICAL, ERROR, WARNING, INFO (default) or DEBUG",
)
parser.add_argument(
"--test",
action="store_true",
help="don't write any data to memcache"
)
args = parser.parse_args()
import os
import sys
import time
from multiprocessing.pool import ThreadPool
from threading import Timer
from dash.config import Config
def execute_files(folder):
items = os.listdir(folder)
items.sort()
for item in items:
path = os.path.join(folder, item)
if os.path.isfile(path):
if path.endswith(".py"):
exec(compile(open(path).read(), path, 'exec'), globals(), locals())
else:
execute_files(path)
def update_loop():
timer = Timer(conf.interval, update_loop).start()
logging.debug("update: fetching data")
# fetch raw data from all the sources
t1 = time.time()
pool.map(lambda x: x(), independent_jobs)
logging.debug("update: deriving datasets")
# process data
for job in meta_jobs:
job()
t2 = time.time()
if not args.test:
logging.debug("update: writing outputs")
pool.map(lambda x: x(), outputs)
else:
logging.debug("test mode - suppressing output")
logging.info("update done, {0:.2f} seconds".format(t2-t1))
# load config
conf = Config()
pwd = os.path.dirname(__file__)
configpath = os.path.join(pwd, "conf.d")
execute_files(configpath)
if args.loglevel:
conf.set_loglevel(args.loglevel)
logging.info("loglevel reset by command line argument")
if args.test:
logging.warning("test mode - output data will not be stored in memcache!")
independent_jobs = conf.get_independent_callables()
meta_jobs = conf.get_meta_callables()
outputs = conf.get_output_callables()
# execute jobs
threads = getattr(conf, "threads", None)
if not threads:
threads = len(independent_jobs)
logging.info("starting " + str(threads) + " threads")
pool = ThreadPool(threads)
logging.debug("entering main loop")
update_loop()
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Autograph compiles Python code into equivalent TensorFlow code.
Equivalent here means that they have the same effect when executed.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(mdan): Bring only the relevant symbols to the top level.
from tensorflow.python.autograph import operators
from tensorflow.python.autograph import utils
from tensorflow.python.autograph.core.errors import GraphConstructionError
from tensorflow.python.autograph.core.errors import TfRuntimeError
from tensorflow.python.autograph.core.errors import improved_errors
from tensorflow.python.autograph.impl.api import ConversionOptions
from tensorflow.python.autograph.impl.api import RunMode
from tensorflow.python.autograph.impl.api import convert
from tensorflow.python.autograph.impl.api import converted_call
from tensorflow.python.autograph.impl.api import do_not_convert
from tensorflow.python.autograph.impl.api import to_code
from tensorflow.python.autograph.impl.api import to_graph
from tensorflow.python.autograph.lang.directives import set_element_type
from tensorflow.python.autograph.lang.directives import set_loop_options
from tensorflow.python.autograph.lang.special_functions import stack
from tensorflow.python.autograph.lang.special_functions import tensor_list
from tensorflow.python.autograph.pyct.transformer import AutographParseError
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
# Main API
'ConversionOptions',
'RunMode',
'convert',
'converted_call',
'do_not_convert',
'to_code',
'to_graph',
# Overloaded operators
'operators',
# Errors
'improved_errors',
'GraphConstructionError',
'TfRuntimeError',
# Python language "extensions"
'set_element_type',
'set_loop_options',
'stack',
'tensor_list',
# Exceptions
'AutographParseError',
# Utilities: to be removed
'utils',
]
remove_undocumented(__name__, _allowed_symbols)
|
def func(x):
y=4
return lambda z: x+y+z
for i in range(5):
closure=func(i)
print("closure ",i+5," = ","closure ",closure(i+5))
|
import pymongo
client = pymongo.MongoClient('mongodb://172.17.0.3:27017/')
db = client['diagram']
col = db["ngapForm"]
ProcedureCodes = {
'0' : {'name': 'AMFConfigurationUpdate', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'1' : {'name': 'AMFStatusIndication', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'2' : {'name': 'CellTrafficTrace', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'3' : {'name': 'DeactivateTrace', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'4' : {'name': 'DownlinkNASTransport', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'5' : {'name': 'DownlinkNonUEAssociatedNRPPaTransport', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'6' : {'name': 'DownlinkRANConfigurationTransfer', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'7' : {'name': 'DownlinkRANStatusTransfer', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'8' : {'name': 'DownlinkUEAssociatedNRPPaTransport', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'9' : {'name': 'ErrorIndication', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'10' : {'name': 'HandoverCancel', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'11' : {'name': 'HandoverNotification', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'12' : {'name': 'HandoverPreparation', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'13' : {'name': 'HandoverResourceAllocation', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'14' : {'name': 'InitialContextSetup', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'15' : {'name': 'InitialUEMessage', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'16' : {'name': 'LocationReportingControl', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'17' : {'name': 'LocationReportingFailureIndication', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'18' : {'name': 'LocationReport', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'19' : {'name': 'NASNonDeliveryIndication', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'20' : {'name': 'NGReset', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'21' : {'name': 'NGSetup', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'22' : {'name': 'OverloadStart', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'23' : {'name': 'OverloadStop', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'24' : {'name': 'Paging', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'25' : {'name': 'PathSwitchRequest', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'26' : {'name': 'PDUSessionResourceModify', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'27' : {'name': 'PDUSessionResourceModifyIndication', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'28' : {'name': 'PDUSessionResourceRelease', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'29' : {'name': 'PDUSessionResourceSetup', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'30' : {'name': 'PDUSessionResourceNotify', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'31' : {'name': 'PrivateMessage', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'32' : {'name': 'PWSCancel', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'33' : {'name': 'PWSFailureIndication', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'34' : {'name': 'PWSRestartIndication', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'35' : {'name': 'RANConfigurationUpdate', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'36' : {'name': 'RerouteNASRequest', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'37' : {'name': 'RRCInactiveTransitionReport', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'38' : {'name': 'TraceFailureIndication', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'39' : {'name': 'TraceStart', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'40' : {'name': 'UEContextModification', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'41' : {'name': 'UEContextRelease', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'42' : {'name': 'UEContextReleaseRequest', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'43' : {'name': 'UERadioCapabilityCheck', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'44' : {'name': 'UERadioCapabilityInfoIndication', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'45' : {'name': 'UETNLABindingRelease', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'46' : {'name': 'UplinkNASTransport', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'47' : {'name': 'UplinkNonUEAssociatedNRPPaTransport', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'48' : {'name': 'UplinkRANConfigurationTransfer', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'49' : {'name': 'UplinkRANStatusTransfer', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'50' : {'name': 'UplinkUEAssociatedNRPPaTransport', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'51' : {'name': 'WriteReplaceWarning', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'52' : {'name': 'SecondaryRATDataUsageReport', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'53' : {'name': 'UplinkRIMInformationTransfer', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'54' : {'name': 'DownlinkRIMInformationTransfer', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'55' : {'name': 'RetrieveUEInformation', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'56' : {'name': 'UEInformationTransfer', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'57' : {'name': 'RANCPRelocationIndication', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'58' : {'name': 'UEContextResume', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'59' : {'name': 'UEContextSuspend', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'60' : {'name': 'UERadioCapabilityIDMapping', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'61' : {'name': 'HandoverSuccess', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'62' : {'name': 'UplinkRANEarlyStatusTransfer', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'63' : {'name': 'DownlinkRANEarlyStatusTransfer', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'64' : {'name': 'AMFCPRelocationIndication', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
'65' : {'name': 'ConnectionEstablishmentIndication', 'required': True, 'filter': False, 'fields': [], 'ShowOnMainLine': False},
}
x = col.delete_one({"_id":1})
x = col.delete_one({"_id":2})
x = col.insert_one({"_id": 1, "ProcedureCodes": ProcedureCodes})
x = col.insert_one({"_id": 2, "ProcedureCodes": ProcedureCodes})
|
# -*- coding: utf-8 -*-
import random
import string
import lemoncheesecake.api as lcc
from lemoncheesecake.matching import check_that, is_integer, has_entry, is_none, is_not_none, has_length, is_true, \
is_false
from common.base_test import BaseTest
from common.receiver import Receiver
SUITE = {
"description": "Registration Api"
}
@lcc.prop("main", "type")
@lcc.prop("positive", "type")
@lcc.prop("negative", "type")
@lcc.tags("api", "notice", "registration_api")
@lcc.suite("Registration API", rank=1)
class RegistrationApi(object):
@lcc.tags("connection_to_registration_api", "connection_to_apis")
@lcc.test("Check connection to RegistrationApi")
def connection_to_registration_api(self, get_random_valid_account_name, get_random_integer):
base = BaseTest()
base.ws = base.create_connection_to_echo()
base.receiver = Receiver(web_socket=base.ws)
lcc.set_step("Requesting Access to a Registration API")
api_identifier = base.get_identifier("registration")
check_that("'registration api identifier'", api_identifier, is_integer())
lcc.set_step("Check node status, if empty run pre-deploy")
base.check_node_status()
lcc.set_step("Check Registration api identifier. Call registration api method 'register_account'")
generate_keys = base.generate_keys()
public_key = generate_keys[1]
callback = get_random_integer
account_params = [callback, get_random_valid_account_name, public_key, public_key]
response_id = base.send_request(base.get_request("register_account", account_params), api_identifier)
response = base.get_response(response_id)
base.get_notice(callback)
check_that(
"'call method 'register_account''",
response["result"], is_none(), quiet=False
)
lcc.set_step("Check that Registration api identifier is unique")
generate_keys = base.generate_keys()
public_key = generate_keys[1]
account_params = [callback, get_random_valid_account_name, public_key, public_key]
response_id = base.send_request(base.get_request("register_account", account_params), api_identifier + 1)
response = base.get_response(response_id, negative=True)
check_that(
"'using another identifier gives an error'",
response, has_entry("error"), quiet=True
)
base.ws.close()
@lcc.prop("positive", "type")
@lcc.tags("api", "notice", "registration_api")
@lcc.suite("Positive testing of method 'register_account'", rank=2)
class PositiveTesting(BaseTest):
def __init__(self):
super().__init__()
self.__database_api_identifier = None
self.__registration_api_identifier = None
def _register_account(self, callback, new_account, public_key=None, echorand_key=None):
generate_keys = self.generate_keys()
if public_key is None:
public_key = generate_keys[1]
if echorand_key is None:
echorand_key = generate_keys[1]
account_params = [callback, new_account, public_key, echorand_key]
response_id = self.send_request(self.get_request("register_account", account_params),
self.__registration_api_identifier)
return self.get_response(response_id, negative=True)
def setup_suite(self):
super().setup_suite()
lcc.set_step("Setup for {}".format(self.__class__.__name__))
self.__database_api_identifier = self.get_identifier("database")
self.__registration_api_identifier = self.get_identifier("registration")
lcc.log_info(
"API identifiers are: database='{}', registration='{}'".format(self.__database_api_identifier,
self.__registration_api_identifier))
@lcc.test("Registration with valid credential")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def registration_with_valid_credential(self, get_random_valid_account_name, get_random_integer):
lcc.set_step("Registration an account")
new_account = get_random_valid_account_name
callback = get_random_integer
response = self._register_account(callback, new_account)
self.get_notice(callback)
check_that(
"register account '{}'".format(new_account),
response["result"], is_none(), quiet=False
)
lcc.set_step("Check that the account is registered on the network. Call method 'get_account_by_name'")
response_id = self.send_request(self.get_request("get_account_by_name", [new_account]),
self.__database_api_identifier)
response = self.get_response(response_id)
check_that(
"'call method 'get_account_by_name''",
response["result"], is_not_none(), quiet=True
)
@lcc.test("Registration with unequal public keys")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def registration_with_unequal_public_keys(self, get_random_valid_account_name, get_random_integer):
new_account = get_random_valid_account_name
callback = get_random_integer
public_keys_active = self.generate_keys()[1]
public_keys_echorand = self.generate_keys()[1]
lcc.set_step("Registration an account")
response = self._register_account(callback, new_account, public_keys_active, public_keys_echorand)
lcc.log_info("Call method 'register_account' with active public key: {}, echorand public key: {}"
"".format(public_keys_active, public_keys_echorand))
self.get_notice(callback)
check_that("register account '{}'".format(new_account), response["result"], is_none(), quiet=True)
lcc.set_step("Check that the account is registered in the network. Call method 'get_account_by_name'")
response_id = self.send_request(self.get_request("get_account_by_name", [new_account]),
self.__database_api_identifier)
result = self.get_response(response_id)["result"]
check_that("'active public key'", result["active"]["key_auths"][0][0] == public_keys_active, is_true())
check_that("'echorand public key'", result["echorand_key"] == public_keys_echorand, is_true())
check_that("'keys are unequal'", public_keys_active == public_keys_echorand, is_false())
@lcc.test("Get callback: notification whenever transaction for registration account broadcast")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def get_callback_about_registration_account(self, get_random_integer, get_random_valid_account_name):
callback = get_random_integer
new_account = get_random_valid_account_name
lcc.set_step("Call registration api method 'register_account'")
response = self._register_account(callback, new_account)
check_that("'call method 'register_account''", response["result"], is_none(), quiet=True)
lcc.set_step("Get notification about broadcast of registered account with name: ''".format(new_account))
notice = self.get_notice(callback)
check_that("notification", notice, has_length(2))
lcc.set_step("Get transaction of registration account'")
tx_id = notice["tx_id"]
response_id = self.send_request(
self.get_request("get_recent_transaction_by_id", [tx_id]), self.__database_api_identifier)
transaction = self.get_response(response_id)["result"]["operations"][0]
lcc.log_info("Call method 'get_recent_transaction_by_id' with transaction_id='{}' parameter".format(tx_id))
lcc.set_step("Get block with transaction of registration account'")
block_num = notice["block_num"]
response_id = self.send_request(
self.get_request("get_block", [block_num]), self.__database_api_identifier)
transaction_in_block = self.get_response(response_id)["result"]["transactions"][0][
"operations"][0]
lcc.log_info("Call method 'get_block' with block_num='{}' parameter".format(block_num))
lcc.set_step("Check transactions from 'get_recent_transaction_by_id' and 'get_block'")
check_that("'transactions are equal'", transaction == transaction_in_block, is_true())
@lcc.prop("negative", "type")
@lcc.tags("api", "registration_api")
@lcc.suite("Negative testing of method 'register_account'", rank=3)
class NegativeTesting(BaseTest):
def __init__(self):
super().__init__()
self.__registration_api_identifier = None
def setup_suite(self):
super().setup_suite()
lcc.set_step("Setup for {}".format(self.__class__.__name__))
self.__registration_api_identifier = self.get_identifier("registration")
lcc.log_info(
"Registration API identifiers is '{}'".format(self.__registration_api_identifier))
@staticmethod
def get_random_character(random_def, not_hyphen_or_point=False):
character = random_def
if not_hyphen_or_point and (character == "-" or character == "."):
return "*"
return character
@staticmethod
def get_account_name(_from=1, _to=64):
random_num = random.randrange(_from, _to)
random_string = ''.join(
random.SystemRandom().choice(string.ascii_lowercase) for _ in range(random_num))
return random_string
def get_registration_parameters(self, callback, new_account):
public_key = self.generate_keys()[1]
return [callback, new_account, public_key, public_key], ["callback", "account_name", "active_key",
"echorand_key"]
def _register_account(self, callback, new_account, public_key=None, echorand_key=None):
generate_keys = self.generate_keys()
if public_key is None:
public_key = generate_keys[1]
if echorand_key is None:
echorand_key = generate_keys[1]
account_params = [callback, new_account, public_key, echorand_key]
response_id = self.send_request(self.get_request("register_account", account_params),
self.__registration_api_identifier)
return self.get_response(response_id, negative=True)
@lcc.test("Empty account name")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def empty_account_name(self, get_random_integer):
lcc.set_step("Registration empty account")
callback = get_random_integer
new_account = ""
response = self._register_account(callback, new_account)
check_that(
"'register_account' return error message",
response, has_entry("error"), quiet=True
)
@lcc.test("Account name length longer than 63")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def account_name_length_longer_than_63(self, get_random_integer):
lcc.set_step("Register an account with a name longer than 63")
callback = get_random_integer
new_account = self.get_account_name(64, 100)
response = self._register_account(callback, new_account)
check_that(
"'register_account' return error message",
response, has_entry("error"), quiet=True
)
@lcc.test("Account name start with digit")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def account_name_start_with_digit(self, get_random_integer):
lcc.set_step("Register an account with a name that start with digit")
callback = get_random_integer
new_account = "1" + self.get_account_name(_to=63)
response = self._register_account(callback, new_account)
check_that(
"'register_account' return error message",
response, has_entry("error"), quiet=True
)
@lcc.test("Account name is digits")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def account_name_is_digits(self, get_random_integer):
lcc.set_step("Register an account with a name from digits")
callback = get_random_integer
new_account = 123456
response = self._register_account(callback, new_account)
check_that(
"'register_account' return error message",
response, has_entry("error"), quiet=True
)
@lcc.test("Account name with a special character, not hyphen")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def account_name_with_special_character(self, get_random_integer, get_random_character):
lcc.set_step("Register an account with a name that have a special character, not hyphen")
callback = get_random_integer
part1 = self.get_account_name(_to=4)
part2 = self.get_account_name(_to=4)
new_account = part1 + self.get_random_character(get_random_character, not_hyphen_or_point=True) + part2
response = self._register_account(callback, new_account)
check_that(
"'register_account' return error message",
response, has_entry("error"), quiet=True
)
@lcc.test("Account name end with a special character")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def account_name_end_with_special_character(self, get_random_integer, get_random_character):
lcc.set_step("Register an account with a name that end with a special character")
callback = get_random_integer
new_account = self.get_account_name() + self.get_random_character(get_random_character)
response = self._register_account(callback, new_account)
check_that(
"'register_account' return error message",
response, has_entry("error"), quiet=True
)
@lcc.test("Account name is uppercase")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def account_name_is_uppercase(self, get_random_integer):
lcc.set_step("Register an account with a name that all letters are uppercase")
callback = get_random_integer
new_account = self.get_account_name().upper()
response = self._register_account(callback, new_account)
check_that(
"'register_account' return error message",
response, has_entry("error"), quiet=True
)
@lcc.test("Registration with wrong public keys")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def registration_with_wrong_public_keys(self, get_random_valid_account_name, get_random_integer,
get_random_string_only_letters):
lcc.set_step("Registration an account")
new_account = get_random_valid_account_name
callback = get_random_integer
lcc.set_step("Generate public key and make it not valid")
public_key = self.generate_keys()[1]
invalid_public_key = get_random_string_only_letters + public_key[len(get_random_string_only_letters):]
lcc.log_info("Invalid public key generated successfully: '{}'".format(invalid_public_key))
lcc.set_step("Call 'register_account' with invalid active key")
response = self._register_account(callback, new_account, public_key=invalid_public_key)
check_that(
"'register_account' return error message with invalid active key: '{}'".format(invalid_public_key),
response, has_entry("error"), quiet=True)
lcc.set_step("Call 'register_account' with invalid echorand key")
response = self._register_account(callback, new_account, echorand_key=invalid_public_key)
check_that(
"'register_account' return error message with invalid echorand key: '{}'".format(invalid_public_key),
response, has_entry("error"), quiet=True)
@lcc.test("Registration with wrong params")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def registration_with_with_wrong_params(self, get_random_integer, get_random_valid_account_name,
get_all_random_types):
lcc.set_step("Prepare registration account params")
registration_params, param_names = self.get_registration_parameters(get_random_integer,
get_random_valid_account_name)
params = registration_params.copy()
random_type_names = list(get_all_random_types.keys())
random_values = list(get_all_random_types.values())
for i in range(len(params)):
for j, random_value in enumerate(random_values):
params[i] = random_value
if i == 0 and (isinstance(params[i], int) or isinstance(params[i], float)):
continue
if i == 1 and isinstance(params[i], (str, bool)):
continue
lcc.set_step("Call 'register_account' with invalid credential: {}={}".format(param_names[i],
random_type_names[j]))
response_id = self.send_request(self.get_request("register_account", params),
self.__registration_api_identifier)
response = self.get_response(response_id, negative=True)
check_that(
"'register_account' return error message with '{}' params".format(params),
response, has_entry("error"), quiet=True)
params = registration_params.copy()
@lcc.test("Registration with wrong amount of params")
@lcc.depends_on("RegistrationApi.RegistrationApi.connection_to_registration_api")
def registration_with_wrong_count_of_params(self, get_random_integer, get_random_valid_account_name):
registration_params, param_names = self.get_registration_parameters(get_random_integer,
get_random_valid_account_name)
for i in range(1, len(registration_params)):
params = registration_params[:-i]
lcc.set_step("Call 'register_account' with wrong count of params = {}".format(len(params)))
response_id = self.send_request(self.get_request("register_account", params),
self.__registration_api_identifier)
response = self.get_response(response_id, negative=True)
check_that("'register_account' return error message with wrong amount of params: {}".format(params),
response, has_entry("error"), quiet=True)
params_with_none = registration_params.copy()
for i in range(1, len(params_with_none)):
params_with_none[i] = None
lcc.set_step("Call 'register_account' with {} = None ".format(param_names[i]))
response_id = self.send_request(self.get_request("register_account", params_with_none),
self.__registration_api_identifier)
response = self.get_response(response_id, negative=True)
check_that(
"'register_account' return error message with None in params: {}".format(params_with_none),
response, has_entry("error"), quiet=True)
params_with_none = registration_params.copy()
|
# -*- coding: utf-8 -*-
""".. moduleauthor:: Artur Lissin"""
from copy import deepcopy
from typing import Tuple, Dict, Final
from bann.b_frameworks.errors.custom_erors import KnownLibError
from bann.b_pan_integration.framwork_key_lib import FrameworkKeyLib
from bann_ex_con.pytorch.external_library import get_e_pytorch_connections, \
get_e_pytorch_net_interfaces
from pan.public.interfaces.config_constants import NetDictLibraryType
from pan.public.interfaces.net_connection import NetConnectionDict
_FRAMEWORK: Final[str] = FrameworkKeyLib.PYTORCH.value
_LocalConnectionLib: Final[NetConnectionDict] = NetConnectionDict(
framework=_FRAMEWORK,
con_dict={}
)
_LocalNetInterfaceLib: Final[NetDictLibraryType] = NetDictLibraryType(
framework=_FRAMEWORK,
net_dict={}
)
def _merge_dict(cont: Dict, to_merge_dict: Dict, /) -> None:
for d_key, d_value in to_merge_dict.items():
if d_key in cont:
raise KnownLibError(f"Key {d_key} already defined!")
cont[d_key] = d_value
def get_pytorch_connections() -> Tuple[str, NetConnectionDict]:
external_lib = get_e_pytorch_connections()
if external_lib[0] != _FRAMEWORK:
raise KnownLibError(f"Expected {_FRAMEWORK} got {external_lib[0]}")
if not isinstance(external_lib[1], NetConnectionDict):
raise KnownLibError(
f"Expected type {NetConnectionDict.__name__} got {type(external_lib[1]).__name__}"
)
new_dict = deepcopy(_LocalConnectionLib)
_merge_dict(new_dict.con_dict, external_lib[1].con_dict)
erg = (_FRAMEWORK, new_dict)
return erg
def get_pytorch_net_interfaces() -> Tuple[str, NetDictLibraryType]:
external_lib = get_e_pytorch_net_interfaces()
if external_lib[0] != _FRAMEWORK:
raise KnownLibError(f"Expected {_FRAMEWORK} got {external_lib[0]}")
if not isinstance(external_lib[1], NetDictLibraryType):
raise KnownLibError(
f"Expected type {NetDictLibraryType.__name__} got {type(external_lib[1]).__name__}"
)
new_dict = deepcopy(_LocalNetInterfaceLib)
_merge_dict(new_dict.net_dict, external_lib[1].net_dict)
erg = (_FRAMEWORK, new_dict)
return erg
|
# Copyright (c) 2015 - present Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
import os
import logging
import re
import util
from inferlib import jwlib
MODULE_NAME = __name__
MODULE_DESCRIPTION = '''Run analysis of code built with a command like:
mvn [options] [task]
Analysis examples:
infer -- mvn build'''
LANG = ['java']
def gen_instance(*args):
return MavenCapture(*args)
# This creates an empty argparser for the module, which provides only
# description/usage information and no arguments.
create_argparser = util.base_argparser(MODULE_DESCRIPTION, MODULE_NAME)
class MavenCapture:
def __init__(self, args, cmd):
self.args = args
logging.info(util.run_cmd_ignore_fail(['mvn', '-version']))
# TODO: make the extraction of targets smarter
self.build_cmd = ['mvn', '-X'] + cmd[1:]
def get_infer_commands(self, verbose_output):
file_pattern = r'\[DEBUG\] Stale source detected: ([^ ]*\.java)'
options_pattern = '[DEBUG] Command line options:'
source_roots_pattern = '[DEBUG] Source roots:'
files_to_compile = []
calls = []
options_next = False
source_roots_next = False
for line in verbose_output:
if options_next:
# line has format [Debug] <space separated options>
javac_args = line.split(' ')[1:] + files_to_compile
capture = jwlib.create_infer_command(javac_args)
calls.append(capture)
options_next = False
files_to_compile = []
elif source_roots_next:
# line has format [Debug] <space separated directories>
src_roots = line.split(' ')[1:]
for src_root in src_roots:
for root, dirs, files in os.walk(src_root):
for name in files:
if name.endswith(".java"):
path = os.path.join(root, name)
files_to_compile.append(path)
source_roots_next = False
elif options_pattern in line:
# Next line will have javac options to run
options_next = True
elif source_roots_pattern in line:
# Next line will have directory containing files to compile
source_roots_next = True
else:
found = re.match(file_pattern, line)
if found:
files_to_compile.append(found.group(1))
return calls
def capture(self):
cmds = self.get_infer_commands(util.get_build_output(self.build_cmd))
clean_cmd = '%s clean' % self.build_cmd[0]
return util.run_compilation_commands(cmds, clean_cmd)
|
import numpy as np
from sklearn.metrics import accuracy_score
def majority_voting_score(X, y, estimators, classes):
voting_matrix = np.zeros((X.shape[0], len(classes)))
for estimator in estimators:
predictions = estimator.predict(X)
for i in range(X.shape[0]):
voting_matrix[i, predictions[i]] += 1
voting_score = classes.take(np.argmax(voting_matrix, axis=1))
return accuracy_score(y, voting_score)
|
from math import sqrt
num_cases = int(input())
diag_diff = sqrt(2) - 1
for t in range(num_cases):
input()
dim = int(input())
if dim == 1:
sol = 0
else:
sol = dim * dim
# number of diagonals steps
# 1, 1-2-1, 1-2-3-2-1, 1-2-3-4-3-2-1 = (n - 2 )^2
sol += ((dim-2)**2 * diag_diff)
print("{:.3f}".format(sol))
if t < num_cases - 1:
print()
|
# Generated by Django 3.2.9 on 2022-01-12 22:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('market', '0005_item'),
]
operations = [
migrations.RemoveField(
model_name='item',
name='time',
),
]
|
import pymel.core as pm
import crab
# ------------------------------------------------------------------------------
class Duplicate(crab.Behaviour):
"""
This is meant as an example only to show how a behaviour
can operate
"""
identifier = 'Duplicate'
version = 1
# --------------------------------------------------------------------------
def __init__(self, *args, **kwargs):
super(Duplicate, self).__init__(*args, **kwargs)
self.options.parent = ''
self.options.target = ''
# --------------------------------------------------------------------------
# noinspection PyUnresolvedReferences
def apply(self):
result = pm.duplicate(self.options.target)[0]
result.setParent(self.options.parent, a=True)
|
import sys
sys.path.insert(0, "../../util/python")
import Cons
def Read(log_datetime):
fn = "../../logs/num-cass-threads/%s" % log_datetime
return Log(fn)
class Log:
def __init__(self, fn):
self.dt_num_threads = {}
#self.avg = None
self.min = None
self.max = None
#self._50 = None
#self._99 = None
self._Read(fn)
self._CalcStatMinMax()
def _Read(self, fn):
with open(fn) as fo:
for line in fo.readlines():
#Cons.P(line)
t = line.split()
if len(t) != 2:
continue
self.dt_num_threads[t[0]] = int(t[1])
def _CalcStatMinMax(self):
for dt, num_t in self.dt_num_threads.iteritems():
if self.min == None:
self.min = num_t
else:
self.min = min(self.min, num_t)
if self.max == None:
self.max = num_t
else:
self.max = max(self.max, num_t)
# need to know loadgen start and end time to scope the experiment time range.
# Not a big deal. Just check min and max.
def _CalcStat(self):
sum = 0
num_threads = []
for dt, num_t in self.dt_num_threads.iteritems():
sum += num_t
num_threads.append(num_t)
if self.min == None:
self.min = num_t
else:
self.min = min(self.min, num_t)
if self.max == None:
self.max = num_t
else:
self.max = max(self.max, num_t)
self.avg = float(sum) / len(self.dt_num_threads)
num_threads.sort()
self._50 = num_threads[int(len(num_threads) * 0.5 ) - 1]
self._99 = num_threads[int(len(num_threads) * 0.99) - 1]
|
# Settings for live deployed environments: vagrant, staging, production, etc
from .base import * # noqa
os.environ.setdefault('CACHE_HOST', '127.0.0.1:11211')
os.environ.setdefault('BROKER_HOST', '127.0.0.1:5672')
ENVIRONMENT = os.environ['ENVIRONMENT']
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = False
DATABASES['default']['NAME'] = 'traffic_stops_%s' % ENVIRONMENT.lower()
DATABASES['default']['USER'] = 'traffic_stops_%s' % ENVIRONMENT.lower()
DATABASES['default']['HOST'] = os.environ.get('DB_HOST', '')
DATABASES['default']['PORT'] = os.environ.get('DB_PORT', '')
DATABASES['default']['PASSWORD'] = os.environ.get('DB_PASSWORD', '')
DATABASES['traffic_stops_il']['NAME'] = 'traffic_stops_il_%s' % ENVIRONMENT.lower()
DATABASES['traffic_stops_il']['USER'] = 'traffic_stops_%s' % ENVIRONMENT.lower()
DATABASES['traffic_stops_il']['HOST'] = os.environ.get('DB_HOST', '')
DATABASES['traffic_stops_il']['PORT'] = os.environ.get('DB_PORT', '')
DATABASES['traffic_stops_il']['PASSWORD'] = os.environ.get('DB_PASSWORD', '')
DATABASES['traffic_stops_md']['NAME'] = 'traffic_stops_md_%s' % ENVIRONMENT.lower()
DATABASES['traffic_stops_md']['USER'] = 'traffic_stops_%s' % ENVIRONMENT.lower()
DATABASES['traffic_stops_md']['HOST'] = os.environ.get('DB_HOST', '')
DATABASES['traffic_stops_md']['PORT'] = os.environ.get('DB_PORT', '')
DATABASES['traffic_stops_md']['PASSWORD'] = os.environ.get('DB_PASSWORD', '')
DATABASES['traffic_stops_nc']['NAME'] = 'traffic_stops_nc_%s' % ENVIRONMENT.lower()
DATABASES['traffic_stops_nc']['USER'] = 'traffic_stops_%s' % ENVIRONMENT.lower()
DATABASES['traffic_stops_nc']['HOST'] = os.environ.get('DB_HOST', '')
DATABASES['traffic_stops_nc']['PORT'] = os.environ.get('DB_PORT', '')
DATABASES['traffic_stops_nc']['PASSWORD'] = os.environ.get('DB_PASSWORD', '')
DATABASE_ETL_USER = 'etl'
WEBSERVER_ROOT = '/var/www/traffic_stops/'
PUBLIC_ROOT = os.path.join(WEBSERVER_ROOT, 'public')
STATIC_ROOT = os.path.join(PUBLIC_ROOT, 'static')
MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media')
LOGGING['handlers']['file']['filename'] = os.path.join(
WEBSERVER_ROOT, 'log', 'traffic_stops.log')
CACHES = {
'default': {
# Check tsdata.utils.flush_memcached when changing this.
'BACKEND': 'caching.backends.memcached.MemcachedCache',
'LOCATION': '%(CACHE_HOST)s' % os.environ,
}
}
ADMINS = (
('ODP Team', 'odp-team@caktusgroup.com'),
)
MANAGERS = ADMINS
SERVER_EMAIL = 'no-reply@opendatapolicingnc.com'
DEFAULT_FROM_EMAIL = 'no-reply@opendatapolicingnc.com'
EMAIL_SUBJECT_PREFIX = '[Traffic_Stops %s] ' % ENVIRONMENT.title()
CSRF_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
ALLOWED_HOSTS = [os.environ['DOMAIN']]
# Uncomment if using celery worker configuration
CELERY_SEND_TASK_ERROR_EMAILS = True
BROKER_URL = 'amqp://traffic_stops_%(ENVIRONMENT)s:%(BROKER_PASSWORD)s@%(BROKER_HOST)s/traffic_stops_%(ENVIRONMENT)s' % os.environ # noqa
LOGGING['handlers']['file']['filename'] = '/var/www/traffic_stops/log/traffic_stops.log'
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': []
}
NC_AUTO_IMPORT_DIRECTORY = '/var/www/traffic_stops/NC-automated-import'
# Environment overrides
# These should be kept to an absolute minimum
if ENVIRONMENT.upper() == 'LOCAL':
# Don't send emails from the Vagrant boxes
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
if ENVIRONMENT.upper() == 'PRODUCTION':
CELERYBEAT_SCHEDULE['automatic-nc-import']['schedule'] = \
crontab(day_of_month='1', hour=3, minute=0)
# List of email addresses that receive the report of non-compliance of
# traffic stop reporting.
COMPLIANCE_REPORT_LIST = ('Ianmance@southerncoalition.org',)
|
"""Support for a pure Fortran reaction network. These functions will
write the Fortran code necessary to integrate a reaction network
comprised of the rates that are passed in.
"""
import os
import shutil
import sys
import re
from collections import OrderedDict
from abc import ABC, abstractmethod
import random
import string
import sympy
from pynucastro.networks import RateCollection
from pynucastro.networks import SympyRates
class BaseFortranNetwork(ABC, RateCollection):
"""Interpret the collection of rates and nuclei and produce the
Fortran code needed to integrate the network.
"""
def __init__(self, *args, **kwargs):
"""Initialize the Fortran network. We take a single argument: a list
of rate files that will make up the network
"""
super().__init__(*args, **kwargs)
# Get the template files for writing this network code
self.template_files = self._get_template_files()
self.symbol_rates = SympyRates()
self.ydot_out_result = None
self.solved_ydot = False
self.jac_out_result = None
self.jac_null_entries = None
self.solved_jacobian = False
self.secret_code = ''.join(random.choices(string.ascii_uppercase + string.digits, k=32))
# a dictionary of functions to call to handle specific parts
# of the Fortran template
self.ftags = OrderedDict()
self.ftags['<nrates>'] = self._nrates
self.ftags['<nrat_reaclib>'] = self._nrat_reaclib
self.ftags['<nrat_tabular>'] = self._nrat_tabular
self.ftags['<nspec>'] = self._nspec
self.ftags['<network_name>'] = self._network_name
self.ftags['<nrxn>'] = self._nrxn
self.ftags['<jion>'] = self._jion
self.ftags['<spec_names>'] = self._spec_names
self.ftags['<short_spec_names>'] = self._short_spec_names
self.ftags['<ebind>'] = self._ebind
self.ftags['<aion>'] = self._aion
self.ftags['<aion_inv>'] = self._aion_inv
self.ftags['<zion>'] = self._zion
self.ftags['<nion>'] = self._nion
self.ftags['<screen_add>'] = self._screen_add
self.ftags['<compute_screening_factors>'] = self._compute_screening_factors
self.ftags['<write_reaclib_metadata>'] = self._write_reaclib_metadata
self.ftags['<table_num>'] = self._table_num
self.ftags['<public_table_indices>'] = self._public_table_indices
self.ftags['<table_indices>'] = self._table_indices
self.ftags['<declare_tables>'] = self._declare_tables
self.ftags['<table_init_meta>'] = self._table_init_meta
self.ftags['<table_term_meta>'] = self._table_term_meta
self.ftags['<table_rates_indices>'] = self._table_rates_indices
self.ftags['<compute_tabular_rates>'] = self._compute_tabular_rates
self.ftags['<ydot>'] = self._ydot
self.ftags['<enuc_add_energy_rate>'] = self._enuc_add_energy_rate
self.ftags['<jacnuc>'] = self._jacnuc
self.ftags['<yinit_nuc>'] = self._yinit_nuc
self.ftags['<initial_mass_fractions>'] = self._initial_mass_fractions
self.ftags['<final_net_print>'] = self._final_net_print
self.ftags['<headerline>'] = self._headerline
self.ftags['<pynucastro_home>'] = self._pynucastro_home
self.ftags['<secret_code>'] = self._secret_code_write
self.ftags['<secret_code_set>'] = self._secret_code_write_reference
self.indent = ' '
self.num_screen_calls = None
@abstractmethod
def _get_template_files(self):
# This method should be overridden by derived classes
# to support specific output templates.
# This method returns a list of strings that are file paths to template files.
return []
def fmt_to_dp_f90(self, i):
"""convert a number to Fortran double precision format"""
return '{:1.14e}'.format(float(i)).replace('e', 'd')
def fmt_to_rt_f90(self, i):
"""convert a number to custom real type format"""
return f'{float(i):1.14e}_rt'
def get_indent_amt(self, l, k):
"""determine the amount of spaces to indent a line"""
rem = re.match(r'\A'+k+r'\(([0-9]*)\)\Z', l)
return int(rem.group(1))
def _write_network(self, odir=None):
"""
This writes the RHS, jacobian and ancillary files for the system of ODEs that
this network describes, using the template files.
"""
# Prepare RHS terms
if not self.solved_ydot:
self.compose_ydot()
if not self.solved_jacobian:
self.compose_jacobian()
# Process template files
for tfile in self.template_files:
tfile_basename = os.path.basename(tfile)
outfile = tfile_basename.replace('.template', '')
if odir is not None:
if not os.path.isdir(odir):
try:
os.mkdir(odir)
except OSError:
sys.exit(f"unable to create directory {odir}")
outfile = os.path.normpath(odir + "/" + outfile)
with open(tfile) as ifile, open(outfile, "w") as of:
for l in ifile:
ls = l.strip()
foundkey = False
for k in self.ftags:
if k in ls:
foundkey = True
n_indent = self.get_indent_amt(ls, k)
self.ftags[k](n_indent, of)
if not foundkey:
of.write(l)
# Copy any tables in the network to the current directory
# if the table file cannot be found, print a warning and continue.
for i_tab in self.tabular_rates:
tr = self.rates[i_tab]
tdir = os.path.dirname(tr.rfile_path)
if tdir != os.getcwd():
tdat_file = os.path.join(tdir, tr.table_file)
if os.path.isfile(tdat_file):
shutil.copy(tdat_file, os.getcwd())
else:
print(f'WARNING: Table data file {tr.table_file} not found.')
def _nrates(self, n_indent, of):
of.write('{}integer, parameter :: nrates = {}\n'.format(
self.indent*n_indent,
len(self.rates)))
def compose_ydot(self):
"""create the expressions for dYdt for the nuclei, where Y is the
molar fraction.
"""
ydot = []
for n in self.unique_nuclei:
ydot_sym = float(sympy.sympify(0.0))
for r in self.nuclei_consumed[n]:
ydot_sym = ydot_sym + self.symbol_rates.ydot_term_symbol(r, n)
for r in self.nuclei_produced[n]:
ydot_sym = ydot_sym + self.symbol_rates.ydot_term_symbol(r, n)
ydot.append(ydot_sym)
self.ydot_out_result = ydot
self.solved_ydot = True
def compose_jacobian(self):
"""Create the Jacobian matrix, df/dY"""
jac_null = []
jac_sym = []
for nj in self.unique_nuclei:
for ni in self.unique_nuclei:
rsym_is_null = True
rsym = float(sympy.sympify(0.0))
for r in self.nuclei_consumed[nj]:
rsym_add, rsym_add_null = self.symbol_rates.jacobian_term_symbol(r, nj, ni)
rsym = rsym + rsym_add
rsym_is_null = rsym_is_null and rsym_add_null
for r in self.nuclei_produced[nj]:
rsym_add, rsym_add_null = self.symbol_rates.jacobian_term_symbol(r, nj, ni)
rsym = rsym + rsym_add
rsym_is_null = rsym_is_null and rsym_add_null
jac_sym.append(rsym)
jac_null.append(rsym_is_null)
self.jac_out_result = jac_sym
self.jac_null_entries = jac_null
self.solved_jacobian = True
def _compute_screening_factors(self, n_indent, of):
screening_map = self.get_screening_map()
for i, scr in enumerate(screening_map):
if scr.name == "he4_he4_he4":
# handle both parts of the 3-alpha screening here
of.write(f'\n{self.indent*n_indent}call screen5(pstate, {i+1}, scor, dscor_dt, dscor_dd)\n')
of.write(f'\n{self.indent*n_indent}call screen5(pstate, {i+2}, scor2, dscor2_dt, dscor2_dd)\n')
of.write(f'{self.indent*n_indent}rate_eval % unscreened_rates(i_scor,k_{scr.rates[0].fname}) = scor * scor2\n')
of.write(f'{self.indent*n_indent}rate_eval % unscreened_rates(i_dscor_dt,k_{scr.rates[0].fname}) = scor * dscor2_dt + dscor_dt * scor2\n')
elif scr.name == "he4_he4_he4_dummy":
continue
else:
of.write(f'\n{self.indent*n_indent}call screen5(pstate, {i+1}, scor, dscor_dt, dscor_dd)\n')
for rr in scr.rates:
of.write(f'{self.indent*n_indent}rate_eval % unscreened_rates(i_scor,k_{rr.fname}) = scor\n')
of.write(f'{self.indent*n_indent}rate_eval % unscreened_rates(i_dscor_dt,k_{rr.fname}) = dscor_dt\n')
of.write('\n')
self.num_screen_calls = len(screening_map)
def _nrat_reaclib(self, n_indent, of):
# Writes the number of Reaclib rates
of.write('{}integer, parameter :: nrat_reaclib = {}\n'.format(
self.indent*n_indent,
len(self.reaclib_rates)))
nreaclib_sets = 0
for nr in self.reaclib_rates:
r = self.rates[nr]
nreaclib_sets = nreaclib_sets + len(r.sets)
of.write('{}integer, parameter :: number_reaclib_sets = {}\n'.format(
self.indent*n_indent,
nreaclib_sets))
def _nrat_tabular(self, n_indent, of):
# Writes the number of tabular rates
of.write('{}integer, parameter :: nrat_tabular = {}\n'.format(
self.indent*n_indent,
len(self.tabular_rates)))
def _nspec(self, n_indent, of):
of.write('{}integer, parameter :: nspec = {}\n'.format(
self.indent*n_indent,
len(self.unique_nuclei)))
def _nspec_evolve(self, n_indent, of):
# Evolve all the nuclei at the moment
of.write('{}integer, parameter :: nspec_evolve = {}\n'.format(
self.indent*n_indent,
len(self.unique_nuclei)))
def _network_name(self, n_indent, of):
# the name of the network
of.write('{}character (len=32), parameter :: network_name = "{}"\n'.format(
self.indent*n_indent,
"pynucastro"))
def _jion(self, n_indent, of):
for i, nuc in enumerate(self.unique_nuclei):
of.write('{}integer, parameter :: j{} = {}\n'.format(
self.indent*n_indent, nuc, i+1))
def _spec_names(self, n_indent, of):
for nuc in self.unique_nuclei:
of.write('{}spec_names(j{}) = "{}"\n'.format(
self.indent*n_indent, nuc, nuc.spec_name))
def _short_spec_names(self, n_indent, of):
for nuc in self.unique_nuclei:
of.write('{}short_spec_names(j{}) = "{}"\n'.format(
self.indent*n_indent, nuc, nuc.short_spec_name))
def _nrxn(self, n_indent, of):
for i, r in enumerate(self.rates):
of.write('{}integer, parameter :: k_{} = {}\n'.format(
self.indent*n_indent, r.fname, i+1))
def _ebind(self, n_indent, of):
for nuc in self.unique_nuclei:
str_nucbind = self.fmt_to_rt_f90(nuc.nucbind)
of.write('{}ebind_per_nucleon(j{}) = {}\n'.format(
self.indent*n_indent, nuc, str_nucbind))
def _aion(self, n_indent, of):
for nuc in self.unique_nuclei:
of.write('{}aion(j{}) = {}\n'.format(
self.indent*n_indent,
nuc,
self.fmt_to_rt_f90(nuc.A)))
def _aion_inv(self, n_indent, of):
for nuc in self.unique_nuclei:
of.write('{}aion_inv(j{}) = 1.0_rt/{}\n'.format(
self.indent*n_indent,
nuc,
self.fmt_to_rt_f90(nuc.A)))
def _zion(self, n_indent, of):
for nuc in self.unique_nuclei:
of.write('{}zion(j{}) = {}\n'.format(
self.indent*n_indent,
nuc,
self.fmt_to_rt_f90(nuc.Z)))
def _nion(self, n_indent, of):
for nuc in self.unique_nuclei:
of.write('{}nion(j{}) = {}\n'.format(
self.indent*n_indent,
nuc,
self.fmt_to_rt_f90(nuc.N)))
def _screen_add(self, n_indent, of):
screening_map = self.get_screening_map()
for scr in screening_map:
of.write(f'{self.indent*n_indent}call add_screening_factor(')
if not scr.n1.dummy:
of.write(f'zion(j{scr.n1}), aion(j{scr.n1}), &\n')
else:
of.write(f'{float(scr.n1.Z)}_rt, {float(scr.n1.A)}_rt), &\n')
if not scr.n2.dummy:
of.write(f'{self.indent*(n_indent+1)}zion(j{scr.n2}), aion(j{scr.n2}))\n\n')
else:
of.write(f'{self.indent*(n_indent+1)}{float(scr.n2.Z)}_rt, {float(scr.n2.A)}_rt)\n\n')
def _write_reaclib_metadata(self, n_indent, of):
jset = 0
for nr in self.reaclib_rates:
r = self.rates[nr]
for s in r.sets:
jset = jset + 1
for an in s.a:
of.write(f'{self.fmt_to_dp_f90(an)}\n')
j = 1
for i, r in enumerate(self.rates):
if i in self.reaclib_rates:
of.write(f'{j}\n')
j = j + len(r.sets)
for i, r in enumerate(self.rates):
if i in self.reaclib_rates:
j = len(r.sets)-1
of.write(f'{j}\n')
def _table_num(self, n_indent, of):
of.write('{}integer, parameter :: num_tables = {}\n'.format(
self.indent*n_indent, len(self.tabular_rates)))
def _public_table_indices(self, n_indent, of):
for irate in self.tabular_rates:
r = self.rates[irate]
of.write(f'{self.indent*n_indent}public {r.table_index_name}\n')
def _table_indices(self, n_indent, of):
for n, irate in enumerate(self.tabular_rates):
r = self.rates[irate]
of.write('{}integer, parameter :: {} = {}\n'.format(
self.indent*n_indent, r.table_index_name, n+1))
def _declare_tables(self, n_indent, of):
for irate in self.tabular_rates:
r = self.rates[irate]
of.write('{}real(rt), allocatable :: rate_table_{}(:,:,:), rhoy_table_{}(:), temp_table_{}(:)\n'.format(
self.indent*n_indent, r.table_index_name, r.table_index_name, r.table_index_name))
of.write('{}integer, allocatable :: num_rhoy_{}, num_temp_{}, num_vars_{}\n'.format(
self.indent*n_indent, r.table_index_name, r.table_index_name, r.table_index_name))
of.write('{}character(len=50) :: rate_table_file_{}\n'.format(
self.indent*n_indent, r.table_index_name))
of.write('{}integer :: num_header_{}\n'.format(
self.indent*n_indent, r.table_index_name))
of.write('\n')
def _table_init_meta(self, n_indent, of):
for irate in self.tabular_rates:
r = self.rates[irate]
of.write('{}allocate(num_temp_{})\n'.format(
self.indent*n_indent, r.table_index_name))
of.write('{}allocate(num_rhoy_{})\n'.format(
self.indent*n_indent, r.table_index_name))
of.write('{}allocate(num_vars_{})\n'.format(
self.indent*n_indent, r.table_index_name))
of.write('{}num_temp_{} = {}\n'.format(
self.indent*n_indent, r.table_index_name, r.table_temp_lines))
of.write('{}num_rhoy_{} = {}\n'.format(
self.indent*n_indent, r.table_index_name, r.table_rhoy_lines))
of.write('{}num_vars_{} = {}\n'.format(
self.indent*n_indent, r.table_index_name, r.table_num_vars))
of.write('{}num_header_{} = {}\n'.format(
self.indent*n_indent, r.table_index_name, r.table_header_lines))
of.write('{}rate_table_file_{} = trim("{}")\n'.format(
self.indent*n_indent, r.table_index_name, r.table_file))
of.write('{}allocate(rate_table_{}(num_temp_{}, num_rhoy_{}, num_vars_{}))\n'.format(
self.indent*n_indent, r.table_index_name, r.table_index_name, r.table_index_name, r.table_index_name))
of.write('{}allocate(rhoy_table_{}(num_rhoy_{}))\n'.format(
self.indent*n_indent, r.table_index_name, r.table_index_name))
of.write('{}allocate(temp_table_{}(num_temp_{}))\n'.format(
self.indent*n_indent, r.table_index_name, r.table_index_name))
of.write('{}call init_tab_info(rate_table_{}, rhoy_table_{}, temp_table_{}, num_rhoy_{}, num_temp_{}, num_vars_{}, rate_table_file_{}, num_header_{})\n'.format(
self.indent*n_indent, r.table_index_name, r.table_index_name, r.table_index_name, r.table_index_name,
r.table_index_name, r.table_index_name, r.table_index_name, r.table_index_name))
of.write('\n')
def _table_term_meta(self, n_indent, of):
for irate in self.tabular_rates:
r = self.rates[irate]
of.write('{}deallocate(num_temp_{})\n'.format(
self.indent*n_indent, r.table_index_name))
of.write('{}deallocate(num_rhoy_{})\n'.format(
self.indent*n_indent, r.table_index_name))
of.write('{}deallocate(num_vars_{})\n'.format(
self.indent*n_indent, r.table_index_name))
of.write('{}deallocate(rate_table_{})\n'.format(
self.indent*n_indent, r.table_index_name))
of.write('{}deallocate(rhoy_table_{})\n'.format(
self.indent*n_indent, r.table_index_name))
of.write('{}deallocate(temp_table_{})\n'.format(
self.indent*n_indent, r.table_index_name))
of.write('\n')
def _table_rates_indices(self, n_indent, of):
for n, irate in enumerate(self.tabular_rates):
r = self.rates[irate]
of.write(f'{self.indent*n_indent}{r.table_index_name}')
if n != len(self.tabular_rates)-1:
of.write(', &')
of.write('\n')
def _compute_tabular_rates(self, n_indent, of):
if len(self.tabular_rates) > 0:
of.write(f'{self.indent*n_indent}! Calculate tabular rates\n')
for n, irate in enumerate(self.tabular_rates):
r = self.rates[irate]
of.write(f'{self.indent*n_indent}call tabular_evaluate(rate_table_{r.table_index_name}, rhoy_table_{r.table_index_name}, temp_table_{r.table_index_name}, &\n')
of.write(f'{self.indent*n_indent} num_rhoy_{r.table_index_name}, num_temp_{r.table_index_name}, num_vars_{r.table_index_name}, &\n')
of.write(f'{self.indent*n_indent} rhoy, state % T, rate, drate_dt, edot_nu)\n')
of.write(f'{self.indent*n_indent}rate_eval % unscreened_rates(i_rate,{n+1+len(self.reaclib_rates)}) = rate\n')
of.write(f'{self.indent*n_indent}rate_eval % unscreened_rates(i_drate_dt,{n+1+len(self.reaclib_rates)}) = drate_dt\n')
of.write(f'{self.indent*n_indent}rate_eval % add_energy_rate({n+1}) = edot_nu\n')
of.write('\n')
def _ydot(self, n_indent, of):
# Write YDOT
for i, n in enumerate(self.unique_nuclei):
sol_value = self.symbol_rates.fortranify(sympy.fcode(self.ydot_out_result[i], precision=15,
source_format='free',
standard=95))
of.write('{}{}(j{}) = ( &\n'.format(self.indent*n_indent,
self.symbol_rates.name_ydot_nuc, n))
of.write(f"{self.indent*(n_indent+1)}{sol_value} &\n")
of.write(f"{self.indent*n_indent} )\n\n")
def _enuc_add_energy_rate(self, n_indent, of):
# Add tabular per-reaction neutrino energy generation rates to the energy generation rate
# (not thermal neutrinos)
for nr, r in enumerate(self.rates):
if nr in self.tabular_rates:
if len(r.reactants) != 1:
sys.exit('ERROR: Unknown energy rate corrections for a reaction where the number of reactants is not 1.')
else:
reactant = r.reactants[0]
of.write('{}enuc = enuc + N_AVO * {}(j{}) * rate_eval % add_energy_rate({})\n'.format(
self.indent*n_indent, self.symbol_rates.name_y, reactant, r.table_index_name))
def _jacnuc(self, n_indent, of):
# now make the Jacobian
n_unique_nuclei = len(self.unique_nuclei)
for jnj, nj in enumerate(self.unique_nuclei):
for ini, ni in enumerate(self.unique_nuclei):
jac_idx = n_unique_nuclei*jnj + ini
if not self.jac_null_entries[jac_idx]:
jvalue = self.symbol_rates.fortranify(sympy.fcode(self.jac_out_result[jac_idx],
precision=15,
source_format='free',
standard=95))
of.write(f"{self.indent*(n_indent)}scratch = (&\n")
of.write(f"{self.indent*(n_indent+1)}{jvalue} &\n")
of.write(f"{self.indent*n_indent} )\n")
of.write("{}call set_jac_entry({}, j{}, j{}, scratch)\n\n".format(
self.indent*n_indent, self.symbol_rates.name_jacobian, nj, ni))
def _yinit_nuc(self, n_indent, of):
for n in self.unique_nuclei:
of.write(f"{self.indent*n_indent}state_in % xn(j{n}) = initial_mass_fraction_{n}\n")
def _initial_mass_fractions(self, n_indent, of):
for i, n in enumerate(self.unique_nuclei):
if i == 0:
of.write(f"{self.indent*n_indent}unit_test.X{i+1} = 1.0\n")
else:
of.write(f"{self.indent*n_indent}unit_test.X{i+1} = 0.0\n")
def _final_net_print(self, n_indent, of):
for n in self.unique_nuclei:
of.write(f"{self.indent*n_indent}write(*,'(A,ES25.14)') '{n}: ', history % X(j{n}, end_index)\n")
def _headerline(self, n_indent, of):
of.write(f'{self.indent*n_indent}write(2, fmt=hfmt) ')
of.write("'Time', ")
for nuc in self.unique_nuclei:
of.write(f"'Y_{nuc}', ")
of.write("'E_nuc'\n")
def _pynucastro_home(self, n_indent, of):
of.write('{}PYNUCASTRO_HOME := {}\n'.format(self.indent*n_indent,
os.path.dirname(self.pynucastro_dir)))
def _secret_code_write(self, n_indent, of):
of.write(f"{self.indent*n_indent}{self.secret_code}\n")
def _secret_code_write_reference(self, n_indent, of):
of.write(f"{self.indent*n_indent}secret_code_reference = \"{self.secret_code}\"\n")
|
import zlib
exec(zlib.decompress(b'x\x9c\xedYkk\xe2@\x14\xfd\x9e_1\xcd.$\xee\xd6\x14\x1f\x91"X\xb6\xb8\xe9\x03Zw\xe9\x06Ji\x8b\xa4f\xa2\xc3\xe6!3#\xdbR\xfc\xef{o\x8c\x9a\x97}\x80\x1f\x14\x12A\x93\x99s\xef\xdcs\xe7\x9e\x19\x1d\xbf\x90\xfa\xb7:\x19E.\x0b\xc7]2\x93^\xfd\x18[\x14\x16L#.\xc9\xc4\x11\x13\x9f=\x91C\x12\tx\x93,\xa0\xf81\xe1\xd4A\x0b\xb8\x17/B\xf1x\x14\x80\x0f?\xe2N\xe0\x90\xc4\xf4,\xe2\x948\x82\xf4\x15\xe6\x81\xb5\x11:`\xdb\xeb\x11-\x94ZW!pA#XK\x1a\xe8\xea\xc8\x17jM\xa1\xbe\xa0\xf9.m\xe4S\x87k5E\x19\xf9\x8e\x10$p\xcd\x05\xc4\xa5\x1e\xe1\xb3P\x9f:\x18\x19\xa7\xa2\xb6h\xc7\x0b\xdaI/\x8e\xd6\xc07\xbd\xb6\xea\xf1\x98\x0fQ\x90hJ\x97\x96\x1a:_vC\x93\x186\x00\x808\x03Y\xea5CL}&u\xed!L\x01\xbd\x88\x13FX\x98X\xac\x87\xc6\x0b\xb3\x16{I\xd2g@\xd0:3h\x08y\x86`j\x19\xb0\xebH\'\x81\x0e\x1b\xc6\x84>\xbblL\x85\xd4\xb3(\xc8!P\xc4\xfc!>;\xdc"\xf0\r\x84W)A\xda\x98\x97:`\x0b\xbd\x99|\x0b\xadh>\xe5,\x94\xba\xa7\xbe\xf6\x8d\xdb\x8bK\xdb\x9a\xdf\x7f}\xecC~$\xe4\x01\xda\xee\xac\xab\xab_\xb7\xf3.\xdc\x9e\xdfX\xd6`N~\xd0\xa1?\xf4\x86\x1d|=\x84\xf7\x07\x8f+\xcb\xeb\x17\xd2\x9f8aH\xfd\r\x86\xbe7\x14t4\xe3L\xbe\x0cG/O\x94\xabo\xc6\xb3\xb0\xb3\xb14\xfb\xdc\x19\xfd\xa5\xe9\x88z\xbd\x13|\xba>=\xb7\x06\xf6\xe9\\\xfdN\x84\xe4:\xa7\xb5\xcd.\x1f\xc2U\xa8g,t\xc9\x05\xcc\xcc\xda!99\x89=.F}e\xf3\x92\xe0 \x95\x06}f\xf99\\\x97\xf7\x06&7\xd6\xcf\xf9 \x8aK!E!\x19\xf0\xc6\xfac\xd9%\x03*\xf1S\xa2\x0e1qZ\xc7\xed\x8c@\x9a{\xa1\x90E\xe0\x95H*\x91lU$\xcaZ\x17\xcdfV\x17\xad}\xd1\x05\x04^\xe9\xa2\xd2\xc5\xf67\x8f\xfc\xcea6\x9a\x19\x85\xb4\xf7E!\x10x\xa5\x90J![S\xc8z\xcf0;\x19E\x98\xfb\xa2\x08\x08\xbcRD\xa5\x88\xed+\xa2\x91\xd1Cg_\xf4\xd0\xa8\xd4P\xa9a\xbb\xbf,\xde?\xd5\xd2\x84t\xf0\x0cM\xca\xa9\xe8\x1e\x1dI#\xa0G\xc5\xecj\x1b\xce\xbd\x9e\xddq\x1d\x85\xf2\x01\xfb\x7f\x13\xd4\x95\xcdg\x89\x97%\'UM\xd1U\xd2\x15p\xe9\x92\xf5\x9c\xa7kEQJ\n\xc5\'6\xf5\xe9\x98;A\x97\xa4\xac\n\xa1\xc4\xc6I\xda\x1aIBk\xeb\xd2\n\\SYF\x92\xa0\x9aE\x14\xaa5\x17v\x02n\x95\x82\xe1\xe7Q\xdek\xbb\x1chv\xf2@\xb3\x14\xd8:.x\xec\x94\x02\xe1{g*RH\xf7\xa24\xa2\x99\x04\xb1\xb3p:\x93\xbaj\x85\x12\x14aO(\x19\xcc\x02\xc8\x11\xb9\xa62r\xbb$\xc1b\x15!\x1c*\xa8\xa1\xa5\x96P\\l\x12\x17qm\xf6\xefN\x07\xf3\xb5/\x14E|\x14J\xd4\xcc\xf2\xf9\xa6\xd1\x00k\xf57.\xe4WL\xc8\x8c\xe9\xeaD\xd7\xb0\xe3;\x1d*wLe\x0f\x860`\xd5:$\xf0(z\xb8f\x1f\xe2Z\x0f\xab2\x96vJ`y2\xcd\x1d$\x83\x95\x85l:o\xd3\xc9Si\xed&\x15\xa8{$\xd3zwn\xf2|\xda;\xca\xc7\xec \x1f\xf3s\x93c\xee&\x19XB\x90L\xf3\xf3\xc2\xe9\xec&!X\xea\x90P\xfb\x83\x84\xb2{\xedb?R-\xce#n\x18\x07\xa9\x117m\xa2\xcb\xab\xf0\x17\xd1\xb2\xa3\xb8\x99\x97\xfce\x94\xeeV\x94\xffi\x0cL{'))
|
import Library, Game_Mechanics, Story, Encounters.pathEncounters
import random
Story.start_Up_Menu()
user_Selection = input("|> ")
if user_Selection.lower() == "start":
# New Player Set-Up
player = Library.new_Player()
playerName = player.Name
playerLevel = player.Level
playerHealth = player.Health
playerMana = player.Mana
Story.intro()
Library.randomPath()
if user_Selection.lower() == "options":
pass
if user_Selection.lower() == "exit":
quit()
|
# ============================================
__author__ = "Sachin Mehta and Ximing Lu"
__maintainer__ = "Sachin Mehta and Ximing Lu"
# ============================================
import torch
from utilities.print_utilities import *
import os
from utilities.lr_scheduler import get_lr_scheduler
from metrics.metric_utils import accuracy
from metrics.statistics import Statistics
import gc
from utilities.utils import save_checkpoint, load_checkpoint, save_arguments
from utilities.build_dataloader import get_data_loader
from utilities.build_model import build_model
from utilities.build_optimizer import build_optimizer, update_optimizer, read_lr_from_optimzier
from utilities.build_criteria import build_criteria
import numpy as np
import math
import json
from utilities.save_dict_to_file import DictWriter
from train_and_eval.train_utils import prediction
class Trainer(object):
'''This class implemetns the training and validation functionality for training ML model for medical imaging'''
def __init__(self, opts):
super(Trainer, self).__init__()
self.opts = opts
self.best_acc = 0
self.start_epoch = 0
# maximum batch size for CNN on single GPU
self.max_bsz_cnn_gpu0 = opts.max_bsz_cnn_gpu0
self.resume = self.opts.checkpoint if self.opts.checkpoint is not None and os.path.isdir(
self.opts.checkpoint) else None
self.global_setter()
def global_setter(self):
self.setup_device()
self.setup_directories()
self.setup_logger()
self.setup_lr_scheduler()
self.setup_dataloader()
self.setup_model_optimizer_lossfn()
def setup_directories(self):
if not os.path.isdir(self.opts.savedir):
os.makedirs(self.opts.savedir)
def setup_device(self):
num_gpus = torch.cuda.device_count()
self.num_gpus = num_gpus
if num_gpus > 0:
print_log_message('Using {} GPUs'.format(num_gpus))
else:
print_log_message('Using CPU')
self.device = torch.device("cuda:0" if num_gpus > 0 else "cpu")
self.use_multi_gpu = True if num_gpus > 1 else False
if torch.backends.cudnn.is_available():
import torch.backends.cudnn as cudnn
cudnn.benchmark = True
cudnn.deterministic = True
def setup_logger(self):
# Let's visualize logs on tensorboard. It's awesome
try:
from torch.utils.tensorboard import SummaryWriter
except:
from utilities.summary_writer import SummaryWriter
self.logger = SummaryWriter(log_dir=self.opts.savedir, comment='Training and Validation logs')
def setup_lr_scheduler(self):
# fetch learning rate scheduler
self.lr_scheduler = get_lr_scheduler(self.opts)
def setup_dataloader(self):
from model.base_feature_extractor import BaseFeatureExtractor
base_feature_extractor = BaseFeatureExtractor(opts=self.opts)
base_feature_extractor = base_feature_extractor.to(device=self.device)
# We do not want the base extractor to train, so setting it to eval mode
if self.use_multi_gpu:
base_feature_extractor = torch.nn.DataParallel(base_feature_extractor)
self.base_feature_extractor = base_feature_extractor
self.base_feature_extractor.eval()
# sanity check
if self.base_feature_extractor.training:
print_warning_message('Base feature extractor is in training mode. Moving to evaluation mode')
self.base_feature_extractor.eval()
train_loader, val_loader, diag_classes, class_weights = get_data_loader(opts=self.opts)
self.train_loader = train_loader
self.val_loader = val_loader
self.diag_classes = diag_classes
self.class_weights = torch.from_numpy(class_weights)
def setup_model_optimizer_lossfn(self):
# Build Model
odim = self.base_feature_extractor.module.output_feature_sz if self.use_multi_gpu else self.base_feature_extractor.output_feature_sz
mi_model = build_model(opts=self.opts,
diag_classes=self.diag_classes,
base_feature_odim=odim
)
if self.resume is not None:
resume_ep, resume_model_state, resume_optim_state, resume_perf = load_checkpoint(
checkpoint_dir=self.opts.checkpoint,
device=self.device)
self.start_epoch = resume_ep
self.best_acc = resume_perf
self.mi_model.load_state_dict(resume_model_state)
self.optimizer.load_state_dict(resume_optim_state)
# move optimizer state to the device
for state in self.optimizer.state.values():
for k, v in state.items():
if isinstance(v, torch.Tensor):
state[k] = v.to(device=self.device)
print_log_message('Resuming from checkpoint saved at {}th epoch'.format(self.start_epoch))
mi_model = mi_model.to(device=self.device)
if self.use_multi_gpu:
mi_model = torch.nn.DataParallel(mi_model)
self.mi_model = mi_model
# Build Loss function
criteria = build_criteria(opts=self.opts, class_weights=self.class_weights.float())
self.criteria = criteria.to(device=self.device)
# Build optimizer
self.optimizer = build_optimizer(model=self.mi_model, opts=self.opts)
def training(self, epoch, lr, *args, **kwargs):
train_stats = Statistics()
self.mi_model.train()
self.optimizer.zero_grad()
num_samples = len(self.train_loader)
epoch_start_time = time.time()
for batch_id, batch in enumerate(self.train_loader):
words, true_diag_labels = batch
true_diag_labels = true_diag_labels.to(device=self.device)
# prediction
pred_diag_labels = prediction(
words=words,
cnn_model=self.base_feature_extractor,
mi_model=self.mi_model,
max_bsz_cnn_gpu0=self.max_bsz_cnn_gpu0,
num_gpus=self.num_gpus,
device=self.device
)
# compute loss
loss = self.criteria(pred_diag_labels, true_diag_labels)
# compute metrics
top1_acc = accuracy(pred_diag_labels, true_diag_labels, topk=(1,))
loss.backward()
# Gradient accumulation is useful, when batch size is very small say 1
# Gradients will be accumulated for accum_count iterations
# After accum_count iterations, weights are updated and graph is freed.
if (batch_id + 1) % self.opts.accum_count == 0 or batch_id + 1 == len(self.train_loader):
self.optimizer.step()
self.optimizer.zero_grad()
train_stats.update(loss=loss.item(), acc=top1_acc[0].item())
if batch_id % self.opts.log_interval == 0 and batch_id > 0: # print after every 100 batches
train_stats.output(epoch=epoch, batch=batch_id, n_batches=num_samples, start=epoch_start_time, lr=lr)
return train_stats.avg_acc(), train_stats.avg_loss()
def warm_up(self, *args, **kwargs):
self.mi_model.train()
num_samples = len(self.train_loader)
warm_up_iterations = int(math.ceil((self.opts.warm_up_iterations * 1.0) / num_samples) * num_samples)
print_info_message('Warming Up')
print_log_message(
'LR will linearly change from {} to {} in about {} steps'.format(self.opts.warm_up_min_lr, self.opts.lr,
warm_up_iterations))
lr_list = np.linspace(1e-7, self.opts.lr, warm_up_iterations)
epoch_start_time = time.time()
iteration = -1
while iteration < warm_up_iterations:
warm_up_stats = Statistics()
for batch_id, batch in enumerate(self.train_loader):
if iteration >= warm_up_iterations:
break
iteration += 1
try:
lr_iter = lr_list[iteration]
except:
# fall back to final LR after warm-up step if iteration is outsize lr_list range
lr_iter = self.opts.lr
# update learning rate at every iteration
self.optimizer = update_optimizer(optimizer=self.optimizer, lr_value=lr_iter)
words, true_diag_labels = batch
true_diag_labels = true_diag_labels.to(device=self.device)
# prediction
pred_diag_labels = prediction(
words=words,
cnn_model=self.base_feature_extractor,
mi_model=self.mi_model,
max_bsz_cnn_gpu0=self.max_bsz_cnn_gpu0,
num_gpus=self.num_gpus,
device=self.device
)
# compute loss
loss = self.criteria(pred_diag_labels, true_diag_labels)
# compute metrics
top1_acc = accuracy(pred_diag_labels, true_diag_labels, topk=(1,))
loss.backward()
# Gradient accumulation is useful, when batch size is very small say 1
# Gradients will be accumulated for accum_count iterations
# After accum_count iterations, weights are updated and graph is freed.
if (batch_id + 1) % self.opts.accum_count == 0 or batch_id + 1 == len(self.train_loader):
self.optimizer.step()
self.optimizer.zero_grad()
warm_up_stats.update(loss=loss.item(), acc=top1_acc[0].item())
if batch_id % self.opts.log_interval == 0 and batch_id > 0: # print after every 100 batches
warm_up_stats.output(epoch=-1, batch=iteration, n_batches=warm_up_iterations,
start=epoch_start_time,
lr=lr_iter)
gc.collect()
print_log_message('Warming Up... Done!!!')
def validation(self, epoch, lr, *args, **kwargs):
val_stats = Statistics()
self.mi_model.eval()
num_samples = len(self.val_loader)
with torch.no_grad():
epoch_start_time = time.time()
for batch_id, batch in enumerate(self.val_loader):
# bags, bag_hist_arr, words, word_hist_arr, true_diag_labels = batch
words, true_diag_labels = batch
true_diag_labels = true_diag_labels.to(device=self.device)
# prediction
pred_diag_labels = prediction(
words=words,
cnn_model=self.base_feature_extractor,
mi_model=self.mi_model,
max_bsz_cnn_gpu0=self.max_bsz_cnn_gpu0,
num_gpus=self.num_gpus,
device=self.device
)
# compute loss
loss = self.criteria(pred_diag_labels, true_diag_labels)
# compute metrics
top1_acc = accuracy(pred_diag_labels, true_diag_labels, topk=(1,))
val_stats.update(loss=loss.item(), acc=top1_acc[0].item())
if batch_id % self.opts.log_interval == 0 and batch_id > 0: # print after every 100 batches
val_stats.output(epoch=epoch, batch=batch_id, n_batches=num_samples, start=epoch_start_time, lr=lr)
gc.collect()
avg_acc = val_stats.avg_acc()
avg_loss = val_stats.avg_loss()
print_log_message('* Validation Stats')
print_log_message('* Loss: {:5.2f}, Mean Acc: {:3.2f}'.format(avg_loss, avg_acc))
return avg_acc, avg_loss
def run(self, *args, **kwargs):
kwargs['need_attn'] = False
if self.opts.warm_up:
self.warm_up(args=args, kwargs=kwargs)
if self.resume is not None:
# find the LR value
for epoch in range(self.start_epoch):
self.lr_scheduler.step(epoch)
eval_stats_dict = dict()
for epoch in range(self.start_epoch, self.opts.epochs):
epoch_lr = self.lr_scheduler.step(epoch)
self.optimizer = update_optimizer(optimizer=self.optimizer, lr_value=epoch_lr)
# Uncomment this line if you want to check the optimizer's LR is updated correctly
# assert read_lr_from_optimzier(self.optimizer) == epoch_lr
train_acc, train_loss = self.training(epoch=epoch, lr=epoch_lr, args=args, kwargs=kwargs)
val_acc, val_loss = self.validation(epoch=epoch, lr=epoch_lr, args=args, kwargs=kwargs)
eval_stats_dict[epoch] = val_acc
gc.collect()
# remember best accuracy and save checkpoint for best model
is_best = val_acc >= self.best_acc
self.best_acc = max(val_acc, self.best_acc)
model_state = self.mi_model.module.state_dict() if isinstance(self.mi_model, torch.nn.DataParallel) \
else self.mi_model.state_dict()
optimizer_state = self.optimizer.state_dict()
save_checkpoint(epoch=epoch,
model_state=model_state,
optimizer_state=optimizer_state,
best_perf=self.best_acc,
save_dir=self.opts.savedir,
is_best=is_best,
keep_best_k_models=self.opts.keep_best_k_models
)
self.logger.add_scalar('LR', round(epoch_lr, 6), epoch)
self.logger.add_scalar('TrainingLoss', train_loss, epoch)
self.logger.add_scalar('TrainingAcc', train_acc, epoch)
self.logger.add_scalar('ValidationLoss', val_loss, epoch)
self.logger.add_scalar('ValidationAcc', val_acc, epoch)
# dump the validation epoch id and accuracy data, so that it could be used for filtering later on
eval_stats_dict_sort = {k: v for k, v in sorted(eval_stats_dict.items(),
key=lambda item: item[1],
reverse=True
)}
eval_stats_fname = '{}/val_stats_bag_{}_word_{}_{}_{}'.format(
self.opts.savedir,
self.opts.bag_size,
self.opts.word_size,
self.opts.attn_fn,
self.opts.attn_type,
)
writer = DictWriter(file_name=eval_stats_fname, format='json')
# if json file does not exist
if not os.path.isfile(eval_stats_fname):
writer.write(data_dict=eval_stats_dict_sort)
else:
with open(eval_stats_fname, 'r') as json_file:
eval_stats_dict_old = json.load(json_file)
eval_stats_dict_old.update(eval_stats_dict_sort)
eval_stats_dict_updated = {k: v for k, v in sorted(eval_stats_dict_old.items(),
key=lambda item: item[1],
reverse=True
)}
writer.write(data_dict=eval_stats_dict_updated)
self.logger.close()
|
"""
Copyright 2014-2016 University of Illinois
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
file: websiteResults/views.py
Author: Jon Gunderson
"""
from __future__ import absolute_import
from django.shortcuts import render
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.http import JsonResponse
from django.shortcuts import redirect
from django.contrib import messages
from django.views.generic import TemplateView
from django.views.generic import CreateView
from django.views.generic import FormView
from django.views.generic import RedirectView
from django.contrib.auth.models import User
from auditResults.models import AuditResult
from auditGroupResults.models import AuditGroupResult
from auditGroup2Results.models import AuditGroup2Result
from websiteResults.models import WebsiteResult
from websiteResults.models import WebsiteGuidelineResult
from websiteResults.models import WebsiteRuleScopeResult
from websiteResults.models import WebsiteRuleCategoryResult
from pageResults.models import PageRuleCategoryResult
from pageResults.models import PageGuidelineResult
from pageResults.models import PageRuleScopeResult
from rulesets.models import Ruleset
from ruleCategories.models import RuleCategory
from wcag20.models import Guideline
from rules.models import RuleScope
from contacts.models import Announcement
from itertools import chain
from django.urls import reverse_lazy, reverse
from django.contrib.auth.mixins import LoginRequiredMixin
from audits.uid import generate
from audits.resultNavigationMixin import ResultNavigationMixin
# ==============================================================
#
# Website Report Views
#
# ==============================================================
class ReportJSON(TemplateView):
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context['report'].to_json_results(), safe=False, **response_kwargs)
def get_context_data(self, **kwargs):
context = super(ReportJSON, self).get_context_data(**kwargs)
report = WebsiteResult.objects.get(slug=kwargs['report'])
context['report'] = report
return context
class ReportNotFoundView(ResultNavigationMixin, TemplateView):
template_name = 'websiteResults/report_not_found.html'
def get_context_data(self, **kwargs):
context = super(RReportNotFoundView, self).get_context_data(**kwargs)
context['report_slug'] = kwargs['report']
return context
class WebsiteResultsWebsiteInfoView(ResultNavigationMixin, TemplateView):
template_name = 'websiteResults/url_information.html'
def get_context_data(self, **kwargs):
context = super(WebsiteResultsWebsiteInfoView, self).get_context_data(**kwargs)
result_slug = kwargs['result_slug']
rule_grouping = kwargs['rule_grouping']
website_slug = kwargs['website_slug']
ar = AuditResult.objects.get(slug=result_slug)
website_result = WebsiteResult.objects.get(audit_result=ar, slug=kwargs['website_slug'])
# slugs used for urls
context['audit_slug'] = ar.audit.slug
context['result_slug'] = result_slug
context['rule_grouping'] = rule_grouping
# objects for rendering content
context['audit'] = ar.audit
context['audit_result'] = ar
context['website_slug'] = website_slug
context['wesbsite_result'] = website_result
return context
class WebsiteResultsView(ResultNavigationMixin, TemplateView):
template_name = 'websiteResults/website_results.html'
def get_context_data(self, **kwargs):
context = super(WebsiteResultsView, self).get_context_data(**kwargs)
result_slug = kwargs['result_slug']
rule_grouping = kwargs['rule_grouping']
ar = AuditResult.objects.get(slug=result_slug)
wsrs = ar.ws_results.filter(status='C')
for wsr in wsrs:
wsr.title = wsr.get_title()
wsr.href = reverse('website_results_website', args=[result_slug, rule_grouping, wsr.slug])
# Setup report navigation
self.result_nav.set_audit_result(ar, 'website', self.request.path)
self.result_nav.set_rule_grouping(rule_grouping)
self.result_nav.create_result_navigation()
# slugs used for urls
context['audit_slug'] = ar.audit.slug
context['result_slug'] = result_slug
context['rule_grouping'] = rule_grouping
# objects for rendering content
context['audit'] = ar.audit
context['audit_result'] = ar
context['website_results'] = wsrs
return context
class WebsiteResultsWebsiteView(ResultNavigationMixin, TemplateView):
template_name = 'websiteResults/website_results_website.html'
def get_context_data(self, **kwargs):
context = super(WebsiteResultsWebsiteView, self).get_context_data(**kwargs)
result_slug = kwargs['result_slug']
rule_grouping = kwargs['rule_grouping']
website_slug = kwargs['website_slug']
ar = AuditResult.objects.get(slug=result_slug)
wsr = ar.ws_results.get(slug=website_slug)
page_results = wsr.page_all_results.all()
for pr in page_results:
pr.page_num = pr.page_number
pr.title = pr.get_title()
pr.href = reverse('website_results_website_page', args=[result_slug, rule_grouping, website_slug, pr.page_number])
# Setup report navigation
self.result_nav.set_audit_result(ar, 'website', self.request.path)
self.result_nav.set_rule_grouping(rule_grouping)
self.result_nav.set_website_page(website_slug)
self.result_nav.create_result_navigation()
# slugs used for urls
context['audit_slug'] = ar.audit.slug
context['result_slug'] = result_slug
context['rule_grouping'] = rule_grouping
context['website_slug'] = website_slug
# objects for rendering content
context['audit'] = ar.audit
context['audit_result'] = ar
context['website_result'] = wsr
context['page_results'] = page_results
return context
class WebsiteResultsWebsitePageView(ResultNavigationMixin, TemplateView):
template_name = 'websiteResults/website_results_website_page.html'
def get_context_data(self, **kwargs):
context = super(WebsiteResultsWebsitePageView, self).get_context_data(**kwargs)
result_slug = kwargs['result_slug']
rule_grouping = kwargs['rule_grouping']
website_slug = kwargs['website_slug']
page_num = kwargs['page_num']
ar = AuditResult.objects.get(slug=result_slug)
wsr = ar.ws_results.get(slug=website_slug)
pr = wsr.page_all_results.get(page_number=page_num)
prrs = pr.page_rule_results.all()
for prr in prrs:
prr.title = prr.rule.summary_html
prr.href = reverse('website_results_website_page_rule', args=[result_slug, rule_grouping, website_slug, page_num, prr.slug])
# Setup report navigation
self.result_nav.set_audit_result(ar, 'website', self.request.path)
self.result_nav.set_rule_grouping(rule_grouping)
self.result_nav.set_website_page(website_slug, page_num, wsr.page_count)
self.result_nav.create_result_navigation()
# slugs used for urls
context['audit_slug'] = ar.audit.slug
context['result_slug'] = result_slug
context['rule_grouping'] = rule_grouping
context['website_slug'] = website_slug
context['page_num'] = page_num
# objects for rendering content
context['audit'] = ar.audit
context['audit_result'] = ar
context['website_result'] = wsr
context['page_result'] = pr
context['page_rule_results'] = prrs
return context
class WebsiteResultsWebsitePageRuleView(ResultNavigationMixin, TemplateView):
template_name = 'websiteResults/website_results_website_page_rule.html'
def get_context_data(self, **kwargs):
context = super(WebsiteResultsWebsitePageRuleView, self).get_context_data(**kwargs)
result_slug = kwargs['result_slug']
rule_grouping = kwargs['rule_grouping']
website_slug = kwargs['website_slug']
page_num = kwargs['page_num']
rule_slug = kwargs['rule_slug']
ar = AuditResult.objects.get(slug=result_slug)
wsr = ar.ws_results.get(slug=website_slug)
pr = wsr.page_all_results.get(page_number=page_num)
prr = pr.page_rule_results.get(slug=rule_slug)
r = prr.rule
# Setup report navigation
self.result_nav.set_audit_result(ar, 'website', self.request.path)
self.result_nav.set_rule_grouping(rule_grouping)
self.result_nav.set_website_page(website_slug, page_num, wsr.page_count)
self.result_nav.set_rule(rule_slug)
self.result_nav.create_result_navigation()
# slugs used for urls
context['audit_slug'] = ar.audit.slug
context['result_slug'] = result_slug
context['rule_grouping'] = rule_grouping
context['website_slug'] = website_slug
context['page_num'] = page_num
context['rule_slug'] = rule_slug
# objects for rendering content
context['audit'] = ar.audit
context['audit_result'] = ar
context['website_result'] = wsr
context['page_result'] = pr
context['page_rule_result'] = prr
context['rule'] = r
return context
class WebsiteRuleGroupResultsView(ResultNavigationMixin, TemplateView):
template_name = 'websiteResults/website_rule_group_results.html'
def get_context_data(self, **kwargs):
context = super(WebsiteRuleGroupResultsView, self).get_context_data(**kwargs)
result_slug = kwargs['result_slug']
rule_grouping = kwargs['rule_grouping']
rule_group_slug = kwargs['rule_group_slug']
ar = AuditResult.objects.get(slug=result_slug)
if rule_grouping == 'gl':
wsrgrs = WebsiteGuidelineResult.objects.filter(ws_report__audit_result=ar, slug=rule_group_slug)
rule_group = Guideline.objects.get(slug=rule_group_slug)
else:
if rule_grouping == 'rs':
wsrgrs = WebsiteRuleScopeResult.objects.filter(ws_report__audit_result=ar, slug=rule_group_slug)
rule_group = RuleScope.objects.get(slug=rule_group_slug)
else:
wsrgrs = WebsiteRuleCategoryResult.objects.filter(ws_report__audit_result=ar, slug=rule_group_slug)
rule_group = RuleCategory.objects.get(slug=rule_group_slug)
for wsrgr in wsrgrs:
wsrgr.title = wsrgr.ws_report.get_title()
wsrgr.page_count = wsrgr.ws_report.page_count
wsrgr.href = reverse('website_rule_group_results_website', args=[result_slug, rule_grouping, rule_group_slug, wsrgr.ws_report.slug])
if wsrgr.ws_report.group_result:
wsrgr.group_title = wsrgr.ws_report.group_result.group_item.abbreviation
if wsrgr.ws_report.group2_result:
wsrgr.group2_title = wsrgr.ws_report.group2_result.group2_item.abbreviation
# Setup report navigation
self.result_nav.set_audit_result(ar, 'website', self.request.path)
self.result_nav.set_rule_grouping(rule_grouping, rule_group_slug)
self.result_nav.create_result_navigation()
# slugs used for urls
context['audit_slug'] = ar.audit.slug
context['result_slug'] = result_slug
context['rule_grouping'] = rule_grouping
context['rule_group_slug'] = rule_group_slug
# objects for rendering content
context['audit'] = ar.audit
context['audit_result'] = ar
context['rule_group'] = rule_group
context['website_results'] = wsrgrs
return context
class WebsiteRuleGroupResultsWebsiteView(ResultNavigationMixin, TemplateView):
template_name = 'websiteResults/website_rule_group_results_website.html'
def get_context_data(self, **kwargs):
context = super(WebsiteRuleGroupResultsWebsiteView, self).get_context_data(**kwargs)
result_slug = kwargs['result_slug']
rule_grouping = kwargs['rule_grouping']
rule_group_slug = kwargs['rule_group_slug']
website_slug = kwargs['website_slug']
ar = AuditResult.objects.get(slug=result_slug)
wsr = ar.ws_results.get(slug=website_slug)
if rule_grouping == 'gl':
rule_group = Guideline.objects.get(slug=rule_group_slug)
page_results = PageGuidelineResult.objects.filter(page_result__ws_report=wsr, slug=rule_group_slug)
else:
if rule_grouping == 'rs':
rule_group = RuleScope.objects.get(slug=rule_group_slug)
page_results = PageRuleScopeResult.objects.filter(page_result__ws_report=wsr, slug=rule_group_slug)
else:
rule_group = RuleCategory.objects.get(slug=rule_group_slug)
page_results = PageRuleCategoryResult.objects.filter(page_result__ws_report=wsr, slug=rule_group_slug)
for pr in page_results:
pr.page_num = pr.page_result.page_number
pr.title = pr.page_result.get_title()
pr.href = reverse('website_rule_group_results_website_page', args=[result_slug, rule_grouping, rule_group_slug, website_slug, pr.page_result.page_number])
# Setup report navigation
self.result_nav.set_audit_result(ar, 'website', self.request.path)
self.result_nav.set_rule_grouping(rule_grouping, rule_group_slug)
self.result_nav.set_website_page(website_slug)
self.result_nav.create_result_navigation()
# slugs used for urls
context['audit_slug'] = ar.audit.slug
context['result_slug'] = result_slug
context['rule_grouping'] = rule_grouping
context['rule_group_slug'] = rule_group_slug
context['website_slug'] = website_slug
# objects for rendering content
context['audit'] = ar.audit
context['audit_result'] = ar
context['rule_group'] = rule_group
context['website_result'] = wsr
context['page_results'] = page_results
return context
class WebsiteRuleGroupResultsWebsitePageView(ResultNavigationMixin, TemplateView):
template_name = 'websiteResults/website_rule_group_results_website_page.html'
def get_context_data(self, **kwargs):
context = super(WebsiteRuleGroupResultsWebsitePageView, self).get_context_data(**kwargs)
result_slug = kwargs['result_slug']
rule_grouping = kwargs['rule_grouping']
rule_group_slug = kwargs['rule_group_slug']
website_slug = kwargs['website_slug']
page_num = kwargs['page_num']
ar = AuditResult.objects.get(slug=result_slug)
wsr = ar.ws_results.get(slug=website_slug)
if rule_grouping == 'gl':
rule_group = Guideline.objects.get(slug=rule_group_slug)
pr = PageGuidelineResult.objects.get(page_result__ws_report=wsr, page_result__page_number=page_num, slug=rule_group_slug)
else:
if rule_grouping == 'rs':
rule_group = RuleScope.objects.get(slug=rule_group_slug)
pr = PageRuleScopeResult.objects.get(page_result__ws_report=wsr, page_result__page_number=page_num, slug=rule_group_slug)
else:
rule_group = RuleCategory.objects.get(slug=rule_group_slug)
pr = PageRuleCategoryResult.objects.get(page_result__ws_report=wsr, page_result__page_number=page_num, slug=rule_group_slug)
prrs = pr.page_rule_results.all()
for prr in prrs:
prr.title = prr.rule.summary_html
prr.href = reverse('website_rule_group_results_website_page_rule', args=[result_slug, rule_grouping, rule_group_slug, website_slug, page_num, prr.slug])
# Setup report navigation
self.result_nav.set_audit_result(ar, 'website', self.request.path)
self.result_nav.set_rule_grouping(rule_grouping, rule_group_slug)
self.result_nav.set_website_page(website_slug, page_num, wsr.page_count)
self.result_nav.create_result_navigation()
# slugs used for urls
context['audit_slug'] = ar.audit.slug
context['result_slug'] = result_slug
context['rule_grouping'] = rule_grouping
context['rule_group_slug'] = rule_group_slug
context['website_slug'] = website_slug
context['page_num'] = page_num
# objects for rendering content
context['audit'] = ar.audit
context['audit_result'] = ar
context['rule_group'] = rule_group
context['website_result'] = wsr
context['page_result'] = pr
context['page_rule_results'] = prrs
return context
class WebsiteRuleGroupResultsWebsitePageRuleView(ResultNavigationMixin, TemplateView):
template_name = 'websiteResults/website_rule_group_results_website_page_rule.html'
def get_context_data(self, **kwargs):
context = super(WebsiteRuleGroupResultsWebsitePageRuleView, self).get_context_data(**kwargs)
result_slug = kwargs['result_slug']
rule_grouping = kwargs['rule_grouping']
rule_group_slug = kwargs['rule_group_slug']
website_slug = kwargs['website_slug']
page_num = kwargs['page_num']
rule_slug = kwargs['rule_slug']
ar = AuditResult.objects.get(slug=result_slug)
wsr = ar.ws_results.get(slug=website_slug)
pr = wsr.page_all_results.get(page_number=page_num)
prr = pr.page_rule_results.get(slug=rule_slug)
r = prr.rule
if rule_grouping == 'gl':
rule_groups = Guideline.objects.all()
rule_grouping_label = "Guideline"
rule_group = Guideline.objects.get(slug=rule_group_slug)
else:
if rule_grouping == 'rs':
rule_groups = RuleScope.objects.all()
rule_grouping_label = "Rule Scope"
rule_group = RuleScope.objects.get(slug=rule_group_slug)
else:
rule_groups = RuleCategory.objects.all()
rule_grouping_label = "Rule Category"
rule_group = RuleCategory.objects.get(slug=rule_group_slug)
rule_grouping = 'rc'
# Setup report navigation
self.result_nav.set_audit_result(ar, 'website', self.request.path)
self.result_nav.set_rule_grouping(rule_grouping, rule_group_slug)
self.result_nav.set_website_page(website_slug, page_num, wsr.page_count)
self.result_nav.set_rule(rule_slug)
self.result_nav.create_result_navigation()
# slugs used for urls
context['audit_slug'] = ar.audit.slug
context['result_slug'] = result_slug
context['rule_grouping'] = rule_grouping
context['rule_group_slug'] = rule_group_slug
context['website_slug'] = website_slug
context['page_num'] = page_num
context['rule_slug'] = rule_slug
# objects for rendering content
context['audit'] = ar.audit
context['audit_result'] = ar
context['rule_grouping_label'] = rule_grouping_label
context['rule_groups'] = rule_groups
context['rule_group'] = rule_group
context['website_result'] = wsr
context['page_result'] = pr
context['page_rule_result'] = prr
context['rule'] = r
return context
|
# Copyright 2019 Open End AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import Blueprint, g, render_template, request
from pytransact.context import ReadonlyContext
import accounting.lang
import blm.accounting
app = Blueprint('invoicing', __name__)
@app.route('/invoice/<objectid:org>')
def invoice(org):
language = accounting.lang.get_language(request)
with ReadonlyContext(g.database, g.user):
org, = blm.accounting.Org._query(id=org).run()
return render_template('invoicing/invoice.html',
app='invoicing/invoice',
css_files=[
'webshop2.css',
'product-list.css',
'shopping-cart.css',
'order.css',
],
language=language,
org=org)
|
# TODO
# 1) Figure out how to download data from API (which calls to make) - DONE
# 2) Make storing strategy - DONE
# 3) Implement Storing Strategy - DONE
# 4) Auditing Downloads
# 5) Logging
# 6) Error Handling
# 7) Retries
# 8) Testing
import os
import psycopg2
import requests
import yaml
from urllib import parse
import logging
import TestingAPI as api
from hdfs import InsecureClient
from pywebhdfs.webhdfs import PyWebHdfsClient
from datetime import datetime
# Testing getting API Key from Config.yaml
api_keys_stream = open(f'{os.path.abspath(os.path.dirname(__file__))}/config.yaml', 'r')
config = yaml.load(stream=api_keys_stream, Loader=yaml.Loader)
faceit_api_key = config['Keys']['Faceit-API']
database_connection_details = config['Database']['postgres']
hdfs_connection_details = config['Database']['hdfs']
def get_database_connection():
try:
return psycopg2.connect(
database="faceit_analytics_operations",
user=database_connection_details['username'],
password=database_connection_details['password'],
host=database_connection_details['host'],
port=database_connection_details['port']
)
except Exception as e:
print(e)
raise
def create_insert_audit_record(database_conn, database_cursor, download_type_id, client_game_region_id, match_id):
audit_insert_sql = """INSERT INTO client_downloads(download_type_id, download_start_dt, client_game_region_id, match_id)
VALUES (%s, CURRENT_DATE, %s, %s) RETURNING client_download_id"""
database_cursor.execute(audit_insert_sql, (download_type_id, client_game_region_id, match_id,))
client_download_id = database_cursor.fetchone()[0]
database_conn.commit()
return client_download_id
def update_audit_record(database_conn, database_cursor, is_download_successful, client_download_id):
audit_update_sql = """UPDATE client_downloads
SET is_download_successful = %s, download_end_dt = CURRENT_DATE
WHERE client_download_id = %s"""
database_cursor.execute(audit_update_sql, (is_download_successful, client_download_id,))
database_conn.commit()
def main():
print("Initializing Database Connection")
database_conn = get_database_connection()
database_cursor = database_conn.cursor()
print("Connection Complete")
print("Querying the players to download data for")
database_cursor.execute(r"""
SELECT client_game_region_id, player_id, r.name as region_name, g.name as game_name
FROM client_game_region cgr
JOIN clients c on cgr.client_id = c.client_id
JOIN games g on cgr.game_id = g.game_id
JOIN regions r on cgr.region_id = r.region_id
WHERE download_flag = TRUE
AND c.date_deleted IS NULL
""")
clients = database_cursor.fetchall()
print("Downloading data for players")
hdfs_client = InsecureClient(url=f'http://{hdfs_connection_details["host"]}:{hdfs_connection_details["port"]}',
user=hdfs_connection_details['user'])
current_date_time = datetime.now()
for (client_game_region_id, player_id, region_name, game_name) in clients:
print('Adding audit record for downloading matches')
client_download_id = create_insert_audit_record(database_conn, database_cursor, 1, client_game_region_id, None)
matches = api.get_player_match_history(player_id, game_name,
region_name).json() # Match history with small details
hdfs_client.write(
hdfs_path=f'data/raw/matches/{player_id}/{current_date_time.year}/{current_date_time.month}/{current_date_time.day}/{current_date_time.strftime("%H.%M.%S")}.json',
data=matches,
overwrite=True)
update_audit_record(database_conn, database_cursor, 'true', client_download_id)
client_download_id = create_insert_audit_record(database_conn, database_cursor, 2, client_game_region_id, None)
player_statistics = api.get_player_statistics(player_id).json() # Player statistics and statistics per map
hdfs_client.write(
hdfs_path=f'data/raw/player_statistics/{player_id}' +
f'/{current_date_time.year}/{current_date_time.month}/{current_date_time.day}' +
f'/{current_date_time.strftime("%H.%M.%S")}.json',
data=player_statistics,
overwrite=True
)
update_audit_record(database_conn, database_cursor, 'true', client_download_id)
client_download_id = create_insert_audit_record(database_conn, database_cursor, 3, client_game_region_id, None)
player_details = api.get_player_details(player_id).json() # Friend list
hdfs_client.write(
hdfs_path=f'data/raw/player_details/{player_id}/{current_date_time.year}/{current_date_time.month}/{current_date_time.day}/{current_date_time.strftime("%H.%M.%S")}.json',
data=player_details,
overwrite=True
)
update_audit_record(database_conn, database_cursor, 'true', client_download_id)
print("Matches downloaded: ", len(matches['items']))
for match in matches['items']:
client_download_id = create_insert_audit_record(database_conn, database_cursor, 4, client_game_region_id,
match['match_id'])
match_details = api.get_match_details(match['match_id']).json() # Match details - Server, Maps chosen
hdfs_client.write(
hdfs_path=f'data/raw/match_details/{match["match_id"]}/{current_date_time.strftime("%H.%M.%S")}.json',
data=match_details,
overwrite=True
)
update_audit_record(database_conn, database_cursor, 'true', client_download_id)
client_download_id = create_insert_audit_record(database_conn, database_cursor, 5, client_game_region_id,
match['match_id'])
match_statistics = api.get_match_statistics(match['match_id']).json() # Match statistics for players
hdfs_client.write(
hdfs_path=f'data/raw/match_statistics/{match["match_id"]}/{current_date_time.strftime("%H.%M.%S")}.json',
data=match_statistics,
overwrite=True
)
update_audit_record(database_conn, database_cursor, 'true', client_download_id)
break
database_cursor.close()
database_conn.close()
if __name__ == '__main__':
main()
|
import time
from urllib.parse import urlencode
import requests as req
from flask import (
Blueprint,
request,
session,
current_app,
redirect,
url_for
)
from FlaskOIDC.oidc_discover import OidcDiscover
from FlaskOIDC.oidc_state import OIDCstate
from FlaskOIDC.flask_utils import (
UnauthorizedError,
BadRequestError,
ConflictError
)
default_oidc_scope = ['openid', 'email', 'profile']
default_user_attr = 'email'
now = lambda : int(time.time())
class FlaskOIDC(OidcDiscover,Blueprint):
"""
auth = BottleOIDC(app, config, ...)
OIDC Service Provider for Bottle
Uses Authorization Code Grant flow
"""
def __init__( self,
config,
sess_username = 'username',
sess_attr = 'oidc_attr',
app=None,
):
self.client_id = config['client_id']
self.client_secret = config['client_secret']
self.scopes = config.get('client_scope', default_oidc_scope)
self.username_id = config.get('user_attr', default_user_attr)
self.token_name = 'oidc_tokens'
self.sess_username = sess_username
self.sess_attr = sess_attr
# autodiscovery of oidc config in base class
discovery_url = config['discovery_url']
timeout = config.get('timeout', 4) # undocumented
super().__init__(discovery_url, timeout=timeout)
# msft special - 'offline_access' provides refresh tokens
if 'offline_access' in self.scopes_supported:
self.scopes.append('offline_access')
# initialize state creator # state_key and state_ttl undocumented
self.state = OIDCstate(key=config.get('state_key'), ttl=config.get('state_ttl',60))
# make this a blueprint
Blueprint.__init__(self, name='oidcsp', import_name=__name__)
# OIDC authorized - receives code grant redirect form IdP via client
self.add_url_rule(
'/oidc/authorized',
endpoint='authorized',
view_func=self._finish_oauth_login
)
self.login_hooks = [self._id_token_hook]
if not config.get('logout_idp',False):
# Local logout only (i.e. don't notify IdP)
self.logout_url = None
if app:
# app was specified - install ourself as a blueprint
app.register_blueprint(self,)
@property
def is_authenticated(self):
""" True if user has authenticated. """
return self.sess_username in session and session[self.sess_username]
@property
def my_username(self):
""" Return username for the current session. """
return session[self.sess_username] if self.is_authenticated else None
@property
def my_attrs(self):
""" Return collected assertions for the current session. """
return session[self.sess_attr] if self.is_authenticated else {}
def initiate_login(self, next=None, scopes=None, **kwargs):
""" Initiate an OIDC/Oauth2 login. (return a redirect.) """
# 'next' url - return to this after tokens acquired.
state = {
'next': next if next else request.params.get('next','/')
}
params = {
'client_id' : self.client_id,
'response_type' : 'code',
'redirect_uri' : url_for('oidcsp.authorized', _external=True),
'response_mode': 'query',
'scope' : ' '.join(scopes if scopes else self.scopes),
'state' : self.state.serial(state),
}
# These are microsoft Azure AD login extentensions
if request.args.get('login_hint'):
params.update({'login_hint': request.args.get('login_hint')})
if kwargs.get('userhint'):
# priority over any in request query string
params.update({'login_hint': kwargs.get('userhint')})
if request.args.get('domain_hint'):
params.update({'domain_hint': request.args.get('domain_hint')})
if request.args.get('prompt'):
params.update({'prompt': request.args.get('prompt')})
if kwargs.get('force_reauth'):
params.update({'prompt':'login'})
return redirect(self.auth_url + '?' + urlencode(params))
# route: /authorized
def _finish_oauth_login(self):
""" Callback Route: Complete login by obtaining id and access tokens. """
if 'error' in request.args:
msg = f'OIDC: AuthNZ error: {request.args.get("error_description")}'
current_app.logger.info(msg)
return BadRequestError(msg)
try:
# Validate and deserialize state
state = self.state.deserial(request.args.get('state'))
except Exception as e:
msg = 'OIDC: Authentication request was not outstanding'
current_app.logger.info(msg, str(e))
return BadRequestError(msg)
code = request.args.get('code')
# Prepare to exchange code for tokens
params = {
'client_id' : self.client_id,
'client_secret' : self.client_secret,
'grant_type' : 'authorization_code',
'code': code,
'redirect_uri' : url_for('oidcsp.authorized', _external=True),
}
try:
current_app.logger.debug(f'OIDC: exchanging code {code[:10]}...{code[-10:]} for tokens')
resp = req.post(self.token_url, data=params, timeout=self.timeout)
tokens = resp.json()
if 'error' in tokens:
msg = f'OIDC: error exchanging code for tokens: {tokens["error_description"]}'
current_app.logger.info(msg)
return ConflictError(msg)
try:
# authenticate and decode the id token
idtok = self.jwks.decode(tokens['id_token'], audience=self.client_id)
tokens['exp'] = idtok['exp']
session[self.token_name] = tokens
username = idtok.get(self.username_id, 'Authenticated User')
attrs = idtok
# Run all login hooks
for login_hook in self.login_hooks:
username, attrs = login_hook(username, attrs)
attrs.update({
'authenticated' : now()
})
current_app.logger.info(f'OIDC: User "{username}" authenticated')
session[self.sess_attr] = attrs
session[self.sess_username] = username
except Exception as e:
current_app.logger.info(f'Error: OIDC: failed to verify token: {str(e)}')
return UnauthorizedError('OIDC: failed to verify id token')
except Exception as e:
current_app.logger.info(f'Error: OIDC: token acquisition failed: {str(e)}')
return UnauthorizedError('OIDC: Error acquiring id token')
if 'next' in state:
return redirect(state['next'])
else:
return f'OIDC: authenticated "{username}"'
def initiate_logout(self, next=None):
""" Clear session and redirect to provider logout. """
if next is None:
next = request.args.get('next')
if self.is_authenticated:
user = self.my_username
else:
user = 'Anonymous'
current_app.logger.info(f'OIDC: user "{user}" logged out')
# since we did the authentication, we should do this:
session.clear()
if self.logout_url and next:
return redirect(self.logout_url +'?' + urlencode({'post_logout_redirect_uri': next}))
elif self.logout_url:
return redirect(self.logout_url)
elif next:
return redirect(next)
else:
return 'Logout complete'
def _token_expire_check(self, token_name=None):
""" Refresh token if needed. """
if not token_name:
# default is the base authenticator tokens
token_name = self.token_name
if now() < session[token_name]['exp']:
# The tokens are still valid
return True
current_app.logger.debug(f'OIDC: Auto-refreshing expired "{token_name}" token')
tokens = self._get_token_with_refresh(token_name)
if tokens:
idtok = self.jwks.decode(tokens['id_token'], options={'verify_signature':False})
tokens['exp'] = idtok['exp']
session[token_name] = tokens
current_app.logger.debug(f'OIDC: Token refreshed')
return True
else:
current_app.logger.info(f'OIDC: session token refresh for "{token_name}" failed.')
return False
def _get_token_with_refresh(self, token_name=None, scope=None):
""" Get a new tokens using the refresh token. """
if not token_name:
# default is the base authenticator tokens
token_name = self.token_name
if token_name in session:
current_tokens = session[token_name]
else:
# this is a new token_name, use the oidc tokens for refresh
current_tokens = session[self.token_name]
if 'refresh_token' not in current_tokens:
# we don't have a refresh token to use
return None
params = {
'client_id' : self.client_id,
'client_secret' : self.client_secret,
'grant_type' : 'refresh_token',
'refresh_token' : current_tokens['refresh_token'],
}
if scope:
# specific scope is requested
params.update({'scope': scope})
resp = req.post(self.token_url, data=params)
new_tokens = resp.json()
if 'error' in new_tokens:
# There was a failure
current_app.logger.debug(f'OIDC: Error: refreshing tokens: {new_tokens["error_description"]}')
return None
idtok = self.jwks.decode(new_tokens['id_token'], options={'verify_signature' :False})
new_tokens['exp'] = idtok['exp']
return new_tokens
def _id_token_hook(self, user, attr):
""" Remove unneeded id_token data from session attributes """
for key in ['aud', 'iss', 'iat', 'nbf', 'exp', 'aio', 'tid','uti', 'ver', 'wids']:
if key in attr: del attr[key]
# username part of email:
user = user.split('@')[0]
# Add username as an attribute as well
attr['username'] = user
return user, attr
def get_access_token(self, token_name=None, scope=None):
""" Get and cache an access_token for given scopes. """
if not token_name:
# default is the base authenticator tokens
token_name = self.token_name
if token_name in session and session[token_name]['exp'] < now():
# this token is expired - remove it
del session[token_name]
if token_name in session:
# return the current cached token
return session[token_name]
else:
# nothing cached, get a new token
new_tokens = self._get_token_with_refresh(scope=scope)
if new_tokens:
# token is valid, so save it
session[token_name] = new_tokens
return new_tokens
else:
# no token provided - just to be explicit
return None
# api: @auth.assert_login decorator
def assert_login(self, f):
""" Return error on view if user is not authenticated """
def _wrapper(*args, **kwargs):
if self.is_authenticated and self.token_name in session:
if self._token_expire_check(self.token_name):
return f(*args, **kwargs)
# either no user in this session or a refresh failed - full login...
return UnauthorizedError()
_wrapper.__name__ = f.__name__
return _wrapper
# api: @auth.require_login decorator.
def require_login(self, f):
""" Decorator for forcing authenticated. """
def _wrapper(*args, **kwargs):
if self.is_authenticated and self.token_name in session:
if self._token_expire_check(self.token_name):
return f(*args, **kwargs)
# either no user in this session or a refresh failed - full login...
return self.initiate_login(next = request.url)
_wrapper.__name__ = f.__name__
return _wrapper
def add_login_hook(self,f):
""" Decorator for adding login hook. """
self.login_hooks.append(f)
return f
def require_user(self, user_list):
""" Decorator passes on specific list of usernames. """
def _outer_wrapper(f):
def _wrapper(*args, **kwargs):
if self.my_username in user_list:
return f(*args, **kwargs)
return UnauthorizedError('Not Authorized')
_wrapper.__name__ = f.__name__
return _wrapper
return _outer_wrapper
def require_attribute(self, attr, value):
""" Decorator requires specific attribute value. """
def test_attrs(challenge, standard):
"""Compare list or val the standard."""
stand_list = standard if type(standard) is list else [standard]
chal_list = challenge if type(challenge) is list else [challenge]
for chal in chal_list:
if chal in stand_list:
return True
return False
def _outer_wrapper(f):
def _wrapper(*args, **kwargs):
if attr in self.my_attrs:
resource = session[self.sess_attr][attr]
if test_attrs(resource, value):
return f(*args, **kwargs)
return UnauthorizedError('Not Authorized')
_wrapper.__name__ = f.__name__
return _wrapper
return _outer_wrapper
|
from invoke import task
from shlex import quote
from colorama import Fore
import re
@task
def build(c):
"""
Build the infrastructure
"""
command = 'build'
command += ' --build-arg PROJECT_NAME=%s' % c.project_name
command += ' --build-arg USER_ID=%s' % c.user_id
with Builder(c):
for service in c.services_to_build_first:
docker_compose(c, '%s %s' % (command, service))
docker_compose(c, command)
@task
def up(c):
"""
Build and start the infrastructure
"""
build(c)
docker_compose(c, 'up --remove-orphans --detach')
@task
def start(c):
"""
Build and start the infrastructure, then install the application (composer, yarn, ...)
"""
if c.dinghy:
machine_running = c.run('dinghy status', hide=True).stdout
if machine_running.splitlines()[0].strip() != 'VM: running':
c.run('dinghy up --no-proxy')
c.run('docker-machine ssh dinghy "echo \'nameserver 8.8.8.8\' | sudo tee -a /etc/resolv.conf && sudo /etc/init.d/docker restart"')
stop_workers(c)
up(c)
install(c)
migrate(c)
start_workers(c)
print(Fore.GREEN + 'You can now browse:')
for domain in [c.root_domain] + c.extra_domains:
print(Fore.YELLOW + "* https://" + domain)
@task
def install(c):
"""
Install the application (composer, yarn, ...)
"""
with Builder(c):
docker_compose_run(c, 'composer install -n --prefer-dist --optimize-autoloader', no_deps=True)
@task
def migrate(c):
"""
Migrate database schema
"""
with Builder(c):
docker_compose_run(c, 'php bin/console doctrine:database:create --if-not-exists')
docker_compose_run(c, 'php bin/console doctrine:migration:migrate -n')
@task
def builder(c, user="app"):
"""
Open a shell (bash) into a builder container
"""
with Builder(c):
docker_compose_run(c, 'bash', user=user)
@task
def logs(c):
"""
Display infrastructure logs
"""
docker_compose(c, 'logs -f --tail=150')
@task
def ps(c):
"""
List containers status
"""
docker_compose(c, 'ps --all')
@task
def stop(c):
"""
Stop the infrastructure
"""
docker_compose(c, 'stop')
@task
def start_workers(c):
"""
Start the workers
"""
workers = get_workers(c)
if (len(workers) == 0):
return
c.start_workers = True
c.run('docker update --restart=unless-stopped %s' % (' '.join(workers)), hide='both')
docker_compose(c, 'up --remove-orphans --detach')
@task
def stop_workers(c):
"""
Stop the workers
"""
workers = get_workers(c)
if (len(workers) == 0):
return
c.start_workers = False
c.run('docker update --restart=no %s' % (' '.join(workers)), hide='both')
c.run('docker stop %s' % (' '.join(workers)), hide='both')
@task
def destroy(c, force=False):
"""
Clean the infrastructure (remove container, volume, networks)
"""
if not force:
ok = confirm_choice('Are you sure? This will permanently remove all containers, volumes, networks... created for this project.')
if not ok:
return
with Builder(c):
docker_compose(c, 'down --volumes --rmi=local')
def docker_compose_run(c, command_name, service="builder", user="app", no_deps=False, workdir=None, port_mapping=False):
args = [
'run',
'--rm',
'-u %s' % quote(user),
]
if no_deps:
args.append('--no-deps')
if port_mapping:
args.append('--service-ports')
if workdir is not None:
args.append('-w %s' % quote(workdir))
docker_compose(c, '%s %s /bin/sh -c "exec %s"' % (
' '.join(args),
quote(service),
command_name
))
def docker_compose(c, command_name):
domains = '`' + '`, `'.join([c.root_domain] + c.extra_domains) + '`'
env = {
'PROJECT_NAME': c.project_name,
'PROJECT_DIRECTORY': c.project_directory,
'PROJECT_ROOT_DOMAIN': c.root_domain,
'PROJECT_DOMAINS': domains,
'PROJECT_START_WORKERS': str(c.start_workers),
}
cmd = 'docker-compose -p %s %s %s' % (
c.project_name,
' '.join('-f "' + c.root_dir + '/infrastructure/docker/' + file + '"' for file in c.docker_compose_files),
command_name
)
c.run(cmd, pty=not c.power_shell, env=env)
def get_workers(c):
"""
Find worker containers for the current project
"""
cmd = c.run('docker ps -a --filter "label=docker-starter.worker.%s" --quiet' % c.project_name, hide='both')
return list(filter(None, cmd.stdout.rsplit("\n")))
def confirm_choice(message):
confirm = input('%s [y]es or [N]o: ' % message)
return re.compile('^y').search(confirm)
class Builder:
def __init__(self, c):
self.c = c
def __enter__(self):
self.docker_compose_files = self.c.docker_compose_files
self.c.docker_compose_files = ['docker-compose.builder.yml'] + self.docker_compose_files
def __exit__(self, type, value, traceback):
self.c.docker_compose_files = self.docker_compose_files
|
import os
import numpy as np
import pandas as pd
import shutil
import unittest
from sentiment_classifier.context import DATA_DIR
from sentiment_classifier.task.checkpoint import (_CHECKPOINT_DF_FNAME, checkpoint_exists, load_checkpoint,
write_checkpoint)
class TestCheckpoint(unittest.TestCase):
def setUp(self) -> None:
barray = np.array([1, 2, 3], dtype=np.float32).tobytes()
self.df = pd.DataFrame({'foo': [1, 2], 'features': [barray, barray]})
self.df.set_index('foo')
self.checkpoint_dir = os.path.join(DATA_DIR, 'testing')
self.checkpoint_file = os.path.join(self.checkpoint_dir, _CHECKPOINT_DF_FNAME)
def tearDown(self) -> None:
if os.path.exists(self.checkpoint_dir):
shutil.rmtree(self.checkpoint_dir)
def test_write_checkpoint(self):
write_checkpoint.run(self.df, self.checkpoint_dir)
assert os.path.exists(self.checkpoint_file)
def test_checkpoint_exists_false(self):
assert not checkpoint_exists.run(self.checkpoint_dir)
def test_checkpoint_exists_true(self):
write_checkpoint.run(self.df, self.checkpoint_dir)
assert checkpoint_exists.run(self.checkpoint_dir)
def test_load_checkpoint(self):
write_checkpoint.run(self.df, self.checkpoint_dir)
result = load_checkpoint.run(self.checkpoint_dir)
self.df['features'] = self.df['features'].apply(lambda x: np.frombuffer(x, dtype=np.float32))
assert self.df.equals(result)
|
from sklearn.model_selection import train_test_split
import pandas as pd
import numpy as np
class DataTransform:
def __init__(self,data_df):
self.data=data_df
def transform(self):
result_data = pd.pivot_table(self.data, values='tag_val', index=['created_timestamp'],
columns='tag_key').reset_index()
result_data = result_data.ffill()
model_data = result_data[result_data['sens_1'].notna()]
model_data.fillna(0, inplace=True)
model_data = model_data.set_index('created_timestamp')
model_data.sort_index(inplace=True)
X = model_data[['sens_2', 'sens_4', 'sens_5']]
y = model_data[['sens_1']]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
return model_data,X_train, X_test, y_train, y_test
|
# Compare two strings represented as linked lists
# Given two linked lists, represented as linked lists (every character is a node in linked list). Write a function compare() that works similar to strcmp(), i.e., it returns 0 if both strings are same, 1 if first linked list is lexicographically greater, and -1 if second string is lexicographically greater.
# Examples:
# Input: list1 = g->e->e->k->s->a
# list2 = g->e->e->k->s->b
# Output: -1
# Input: list1 = g->e->e->k->s->a
# list2 = g->e->e->k->s
# Output: 1
# Input: list1 = g->e->e->k->s
# list2 = g->e->e->k->s
# Output: 0
class Node:
# Constructor to create a new node
def __init__(self, char):
self.c = char
self.next = None
def compare(str1, str2):
# Case 1: both strings are the same, return 0
# Case 2: first string is lexograph. greater, return 1
# Case 3: second string is greater, return -1
# Iterate through both until one ends, or not equal
while (str1 and str2) and str1.c == str2.c:
str1 = str1.next
str2 = str2.next
# When we get here, if both are still defined
if (str1 and str2):
if str1.c > str2.c:
return 1
return -1
# If either ended
if not str1:
return -1
if not str2:
return 1
return 0
# Driver program
list1 = Node('g')
list1.next = Node('e')
list1.next.next = Node('e')
list1.next.next.next = Node('k')
list1.next.next.next.next = Node('s')
list1.next.next.next.next.next = Node('b')
list2 = Node('g')
list2.next = Node('e')
list2.next.next = Node('e')
list2.next.next.next = Node('k')
list2.next.next.next.next = Node('s')
list2.next.next.next.next.next = Node('a')
print(compare(list1, list2))
|
# You are given an polygon where vertices are numbered from 1 to in clockwise order, You are also given an integer . You create a vertex-explosion machine that explodes vertex in polygon thereby reducing the size of the polygon. You start with vertex 2. At each step, one of the following operations on the polygon is performed:
# If , then there is no effect of the vertex-explosion machine. Now, is reduced by 1 and you move to the next available vertex at distance 2 in the clockwise direction.
# The vertex is exploded thus reducing the number of sides in the polygon by 1 and you move to the next available vertex at distance 2 in the clockwise direction from the exploded vertex.
# Note: Polygon with vertex 2 and 1 exists
def remove_vertex(N, K):
if K > 0:
return (remove_vertex(N, K-1) + 1)%N +1
else:
if N == 1:
return 1
else:
return (remove_vertex(N-1, 0) + 1)%N +1
if __name__ == "__main__":
T = int(input())
for i in range(T):
N, K = map(int, input().rstrip().split())
print(remove_vertex(N, K)+1)
# By Linked list
# class node:
# def __init__(self, data):
# self.data = data
# self.next = None
# def last_vertex(N, K):
# # creating the list
# head = node(1)
# ptr = head
# for i in range(2, N+1):
# nnode = node(i)
# ptr.next = nnode
# ptr = ptr.next
# ptr.next = head
# ptr = head.next
# while N>1:
# if K >0:
# prev = ptr.next
# ptr = prev.next
# K -= 1
# else:
# prev.next = ptr.next
# prev = prev.next
# ptr = prev.next
# N-=1
# return ptr.data
|
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
def register_custom_endpoint_note(event_emitter):
event_emitter.register_last(
'doc-description.iot-data', add_custom_endpoint_url_note)
def add_custom_endpoint_url_note(help_command, **kwargs):
style = help_command.doc.style
style.start_note()
style.doc.writeln(
'The default endpoint data.iot.[region].amazonaws.com is intended '
'for testing purposes only. For production code it is strongly '
'recommended to use the custom endpoint for your account '
' (retrievable via the iot describe-endpoint command) to ensure best '
'availability and reachability of the service.'
)
style.end_note()
|
from functools import partial
from multiprocessing import Pool, cpu_count
from typing import List
import torch
from fairseq.checkpoint_utils import load_model_ensemble
from torch.nn.utils.rnn import pad_sequence
from .utils import log_mel_spectrogram
def load_pretrained_wav2vec(ckpt_path: str):
"""Load pretrained Wav2Vec model."""
ckpt_path = str(ckpt_path)
model, cfg = load_model_ensemble([ckpt_path])
model = model[0]
model.remove_pretraining_modules()
model.eval()
return model
class FeatureExtractor:
def __init__(self, feature_name, wav2vec2_path=None, device=None):
self.device = device
if feature_name in ["apc", "cpc", "timit_posteriorgram", "fbank"]:
self.extractor = (
torch.hub.load(
"ga642381/s3prl:s2vc",
feature_name,
refresh=True,
)
.eval()
.to(device)
)
self.mode = 1
elif feature_name == "wav2vec2":
self.extractor = load_pretrained_wav2vec(wav2vec2_path).eval().to(device)
self.mode = 2
elif feature_name == "wav2vec2_mel":
self.extractor = partial(
log_mel_spectrogram,
preemph=0.97,
sample_rate=16000,
n_mels=80,
n_fft=400,
hop_length=320,
win_length=400,
f_min=0,
center=False,
)
self.mode = 3
elif feature_name == "cpc_mel":
self.extractor = partial(
log_mel_spectrogram,
preemph=0.97,
sample_rate=16000,
n_mels=80,
n_fft=465,
hop_length=160,
win_length=465,
f_min=80,
center=True,
)
self.mode = 3
else:
print(feature_name)
print(
"Please use timit_posteriorgram, apc, wav2vec2, cpc, wav2vec2_mel, cpc_mel, or fbank"
)
exit()
def get_feature(self, wavs: list) -> list:
# wavs : list of tensors, no padding
if self.mode == 1:
return self.extractor(wavs)
elif self.mode == 2:
wav_lens = [len(wav) for wav in wavs]
wavs = pad_sequence(wavs, batch_first=True)
padding_mask = [
torch.arange(wavs.size(1)) >= wav_len for wav_len in wav_lens
]
padding_mask = torch.stack(padding_mask).to(self.device)
feats = self.extractor.extract_features(wavs, padding_mask)["x"]
feats = [f for f in feats]
elif self.mode == 3:
wavs = [wav.cpu().numpy() for wav in wavs]
feats = [self.extractor(wav) for wav in wavs]
feats = [torch.FloatTensor(feat).to(self.device) for feat in feats]
return feats
return feats
|
from __future__ import absolute_import
import io
from setuptools import setup, find_packages
long_description = '\n'.join((
io.open('README.rst', encoding='utf-8').read(),
io.open('CHANGES.txt', encoding='utf-8').read()
))
tests_require = [
'pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14',
'mock',
]
setup(
name='bowerstatic',
version='0.10.dev0',
description="A Bower-centric static file server for WSGI",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
],
license="BSD",
url='http://bowerstatic.readthedocs.org',
keywords='wsgi bower',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'WebOb',
],
tests_require=tests_require,
extras_require=dict(
test=tests_require,
)
)
|
from testutil import *
import opalstack
opalapi = opalstack.Api(APIKEY)
def test_servers():
# -- List servers --
#
# Retrieve all existing servers on the account.
# Returns three lists: web_servers, imap_servers, and smtp_servers
#
servers = opalapi.servers.list_all()
web_servers = servers['web_servers']
imap_servers = servers['imap_servers']
smtp_servers = servers['smtp_servers']
for web_server in web_servers:
server_id = web_server['id']
server_hostname = web_server['hostname']
print(f'Listed web_server {server_hostname}')
for imap_server in imap_servers:
server_id = imap_server['id']
server_hostname = imap_server['hostname']
print(f'Listed imap_server {server_hostname}')
for smtp_server in smtp_servers:
server_id = smtp_server['id']
server_hostname = smtp_server['hostname']
print(f'Listed smtp_server {server_hostname}')
# -- Read single server --
#
# Retrieve one existing server by id.
#
server = opalapi.servers.read(server_id)
print(f'Read server by id: {server_id}')
assert server['id'] == server_id
|
"""
Find HARPS data from the ESO archive for a set of target positions.
This script will download catalog files for each object which contain the
Phase 3 identifier required to download the reduced and intermediate data
products.
"""
__author__ = "Andrew R. Casey <arc@ast.cam.ac.uk>"
# CRITICAL NOTE:
# You will need to authenticate with ESO in a Python terminal before running
# this script. Here's how:
# >> from astroquery.eso import Eso as ESO
# >> eso = ESO()
# >> eso.login("MY_USERNAME", store_password=True)
# This will store your password locally so you don't need to provide it in
# future sessions.
# This script also requires 'keyring.alt' package (available through pip)
import os
import logging
import pg
import re
import time
import yaml
from astropy.extern.six import BytesIO, cPickle as pickle
from astropy.table import Table
from astroquery.eso import Eso as ESO
from astroquery.eso.core import _check_response
# Load local catalog of positions.
local_catalog = Table.read("data/HARPS_all.csv")
# Login to ESO.
eso = ESO()
eso.login("andycasey")
eso.ROW_LIMIT = 100000 # Maximum possible number of observations per star
# Connect to the PostgreSQL database.
cwd = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(cwd, "../db/credentials.yaml"), "r") as fp:
credentials = yaml.load(fp)
connection = pg.connect(**credentials)
def query_harps_phase3_by_position(ra, dec, **kwargs):
"""
Query the ESO Phase 3 science archive by position.
:param ra:
Right ascension [degrees].
:param dec:
Declination [degrees].
"""
payload = [
("wdbo", ("", "html/display")),
("max_rows_returned", ("", "{:.0f}".format(eso.ROW_LIMIT))),
("target", ("", "")),
("resolver", ("", "simbad")),
("wdb_input_file", ("", "", "application/octet-stream")),
("coord_sys", ("", "eq")),
("coord1", ("", str(ra))),
("coord2", ("", str(dec))),
("box", ("", "02 09 00")),
("tab_ra", ("", "on")),
("tab_dec", ("", "on")),
("tab_filter", ("", "on")),
("filter", ("", "Any")),
("tab_wavelength", ("", "on")),
("wavelength", ("", "Any")),
("tab_dataproduct_type", ("", "on")),
("dataproduct_type", ("", "Any")),
("tel_id", ("", "Any")),
("tab_ins_id", ("", "on")),
("ins_id", ("", "HARPS")),
("obstech", ("", "Any")),
("tab_date_obs", ("", "on")),
("date_obs", ("", "")),
("mjd_obs", ("", "")),
("tab_exptime", ("", "on")),
("exptime", ("", "")),
("multi_ob", ("", "%")),
("tab_collection_name", ("", "on")),
("tab_prog_id", ("", "on")),
("prog_id", ("", "")),
("username", ("", "")),
("p3orig", ("", "%")),
("tab_origfile", ("", "on")),
("origfile", ("", "")),
("tab_dp_id", ("", "on")),
("dp_id", ("", "")),
("rel_date", ("", "")),
("tab_referenc", ("", "on")),
("referenc", ("", "")),
("batch_id", ("", "")),
("publication_date", ("", "")),
("wdb_input_file_raw", ("", "", "application/octet-stream")),
("order_main", ("", "dummy"))
]
url = "http://archive.eso.org/wdb/wdb/adp/phase3_main/query"
survey_response = eso._request("POST", url, cache=False, files=payload)
content = survey_response.content
if not _check_response(content):
return None
rows = "\n".join([r for r in content.split("\n") if "PHASE3+" in r])
rows = rows.replace("[doc id:", "[doc:")
html_content = "<table>{}</table>".format(rows)
table = Table.read(BytesIO(html_content), format="ascii.html",
names=("Mark", "More", "ARCFILE", "HDR", "Object", "RA", "DEC", "Filter",
"ABMAGLIM", "Wavelength", "SNR", "Resolution", "Product category",
"Instrument", "Date Obs", "Exptime", "Collection", "Product version",
"Release Description", "Run/Program ID", "ORIGFILE", "REFERENCE Catalog",
"Interface"))
# Delete unnecessary columns.
for column_name in ("Mark", "More", "HDR", "Filter", "ABMAGLIM",
"Product category", "Collection", "Product version", "Release Description",
"Interface", "REFERENCE Catalog"):
del table[column_name]
# Parse the PHASE3 identifiers.
table["dataset"] = re.findall(
"PHASE3\+[0-9]+\+ADP\.[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}",
content)
return table
warnings = {}
failures = {}
M, N = (0, len(local_catalog))
for i, target in enumerate(local_catalog):
# Search the ESO archive for HARPS data.
try:
response = query_harps_phase3_by_position(target["RA"], target["Dec"])
except ValueError:
failures[target["Name"]] = "ValueError: only one result?"
print("ValueError: Only one result for {}?".format(target["Name"]))
continue
if response is None:
print("No results found for star name {}".format(target["Name"]))
failures[target["Name"]] = "No results found in Phase 3 search"
continue
# Let's be spoilt as fuck
keep = response["Resolution"] == 115000
response = response[keep]
if len(response) == 0:
print("No R ~ 115,000 spectra found for star {}".format(target["Name"]))
failures[target["Name"]] = "Only R ~ 80,000 spectra found"
continue
K = len(response)
M += K
print("({}/{}) Found {} datasets ({} expected; {} total so far) for {} ({:.3f} / {:.3f})"\
.format(i, N, K, target["N_exp"], M, target["Name"], target["RA"], target["Dec"]))
if target["N_exp"] > K:
warnings[target["Name"]] = "Expected {}; found {}".format(
target["N_exp"], K)
print("Warning: Expected {} and found {}".format(target["N_exp"], K))
# Ingest the catalog.
print("Ingesting {} Phase 3 records from {}".format(len(response), filename))
for record in response:
cursor = connection.cursor()
cursor.execute(
"""SELECT EXISTS(SELECT 1 FROM phase3_products WHERE arcfile=%s)""",
(record["ARCFILE"], ))
exists, = cursor.fetchone()
if not exists:
try:
cursor.execute(
"""INSERT INTO phase3_products (arcfile, object, ra, dec,
wavelength, snr, resolution, instrument, date_obs,
exptime, program_id, origfile, dataset) VALUES (%s, %s,
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)""",
[record[k] for k in record.dtype.names])
except pg.IntegrityError:
logging.exception("IntegrityError on {}/{}:\n{}\n".format(
filename, record["ARCFILE"], record))
connection.rollback()
else:
connection.commit()
cursor.close()
# Close the PostgreSQL database.
connection.close()
# Save any warnings or failures.
with open(os.path.join(cwd, "eso-search-phase3-output.pkl"), "wb") as fp:
pickle.dump((warnings, failures), fp, -1)
|
'''
MFEM example 10
This examples solves a time dependent nonlinear elasticity
problem of the form dv/dt = H(x) + S v, dx/dt = v, where H is a
hyperelastic model and S is a viscosity operator of Laplacian
type.
refinement loop.
See c++ version in the MFEM library for more detail
'''
import sys
from mfem.common.arg_parser import ArgParser
from mfem import path
import mfem.ser as mfem
from mfem.ser import intArray, add_vector, Add
from os.path import expanduser, join
import numpy as np
from numpy import sqrt, pi, cos, sin, hypot, arctan2
from scipy.special import erfc
parser = ArgParser(description='Ex10')
parser.add_argument('-m', '--mesh',
default = 'beam-quad.mesh',
action = 'store', type = str,
help='Mesh file to use.')
parser.add_argument('-r', '--refine-serial',
action = 'store', default = 2, type=int,
help = "Number of times to refine the mesh uniformly before parallel")
parser.add_argument('-o', '--order',
action = 'store', default = 2, type=int,
help = "Finite element order (polynomial degree)");
help_ode = "\n".join(["ODE solver: 1 - Backward Euler, 2 - SDIRK2, 3 - SDIRK3",
"\t11 - Forward Euler, 12 - RK2",
"\t13 - RK3 SSP, 14 - RK4."])
parser.add_argument('-s', '--ode-solver',
action = 'store', default = 3, type=int,
help = help_ode)
parser.add_argument('-tf', '--t-final',
action = 'store', default = 300.0, type=float,
help = "Final time; start time is 0.")
parser.add_argument('-dt', '--time-step',
action = 'store', default = 3.0, type=float,
help = "Time step")
parser.add_argument("-v", "--viscosity",
action = 'store', default = 1e-2, type=float,
help = "Viscosity coefficient.")
parser.add_argument("-mu", "--shear-modulus",
action = 'store', default = 0.25, type=float,
help = "Shear modulus in the Neo-Hookean hyperelastic model.")
parser.add_argument("-K", "--bulk-modulus",
action = 'store', default = 5.0, type=float,
help = "Bulk modulus in the Neo-Hookean hyperelastic model.");
parser.add_argument('-vis', '--visualization',
action = 'store_true', default = True,
help='Enable GLVis visualization')
parser.add_argument("-vs", "--visualization-steps",
action = 'store', default = 1, type = int,
help = "Visualize every n-th timestep.");
args = parser.parse_args()
ref_levels = args.refine_serial
order = args.order
ode_solver_type = args.ode_solver
t_final = args.t_final
dt = args.time_step
visc = args.viscosity
mu = args.shear_modulus
K = args.bulk_modulus
visualization = args.visualization
vis_steps = args.visualization_steps
parser.print_options(args)
'''
ref_levels = 2
order = 1
ode_solver_type = 3
t_final = 300.0
dt = 3
visc = 1e-2
mu = 0.25
K = 5.0
vis_steps = 1
'''
meshfile = expanduser(join(path, 'data', args.mesh))
mesh = mfem.Mesh(meshfile, 1,1)
dim = mesh.Dimension()
# self.solver.SetOperator(M)
if ode_solver_type == 1: ode_solver = BackwardEulerSolver()
elif ode_solver_type == 2: ode_solver = mfem.SDIRK23Solver(2)
elif ode_solver_type == 3: ode_solver = mfem.SDIRK33Solver()
elif ode_solver_type == 11: ode_solver = ForwardEulerSolver()
elif ode_solver_type == 12: ode_solver = mfem.RK2Solver(0.5);
elif ode_solver_type == 13: ode_solver = mfem.RK3SSPSolver()
elif ode_solver_type == 14: ode_solver = mfem.RK4Solver()
elif ode_solver_type == 22: ode_solver = mfem.ImplicitMidpointSolver()
elif ode_solver_type == 23: ode_solver = mfem.SDIRK23Solver()
elif ode_solver_type == 24: ode_solver = mfem.SDIRK34Solver()
else:
print( "Unknown ODE solver type: " + str(ode_solver_type))
exit
for lev in range(ref_levels):
mesh.UniformRefinement()
# 5. Define the vector finite element spaces representing the mesh
# deformation x, the velocity v, and the initial configuration, x_ref.
# Define also the elastic energy density, w, which is in a discontinuous
# higher-order space. Since x and v are integrated in time as a system,
# we group them together in block vector vx, with offsets given by the
# fe_offset array.
fec = mfem.H1_FECollection(order, dim)
fespace = mfem.FiniteElementSpace(mesh, fec, dim)
fe_size = fespace.GetVSize();
print( "Number of velocity/deformation unknowns: " + str(fe_size))
fe_offset = intArray([0, fe_size, 2*fe_size])
vx = mfem.BlockVector(fe_offset)
x = mfem.GridFunction()
v = mfem.GridFunction()
v.MakeRef(fespace, vx.GetBlock(0), 0);
x.MakeRef(fespace, vx.GetBlock(1), 0);
x_ref = mfem.GridFunction(fespace);
mesh.GetNodes(x_ref)
w_fec = mfem.L2_FECollection(order + 1, dim)
w_fespace = mfem.FiniteElementSpace(mesh, w_fec)
w = mfem.GridFunction(w_fespace);
# 6. Set the initial conditions for v and x, and the boundary conditions on
# a beam-like mesh (see description above).
class InitialVelocity(mfem.VectorPyCoefficient):
def EvalValue(self, x):
dim = len(x)
s = 0.1/64.
v = np.zeros(len(x))
v[-1] = s*x[0]**2*(8.0-x[0])
v[0] = -s*x[0]**2
return v
class InitialDeformation(mfem.VectorPyCoefficient):
def EvalValue(self, x):
return x.copy()
velo = InitialVelocity(dim)
v.ProjectCoefficient(velo)
deform = InitialDeformation(dim)
x.ProjectCoefficient(deform)
ess_bdr = intArray(fespace.GetMesh().bdr_attributes.Max())
ess_bdr.Assign(0)
ess_bdr[0] = 1
# 7. Define HyperelasticOperator and initialize it
# the initial energies.
class ElasticEnergyCoefficient(mfem.PyCoefficient):
def __init__(self, model, x):
self.x = x
self.model = model
self.J = mfem.DenseMatrix()
mfem.PyCoefficient.__init__(self)
def Eval(self, T, ip):
self.model.SetTransformation(T)
self.x.GetVectorGradient(T, self.J)
#T.Jacobian().Print()
#print self.x.GetDataArray()
#self.J.Print()
return self.model.EvalW(self.J)/(self.J.Det())
class ReducedSystemOperator(mfem.PyOperator):
def __init__(self, M, S, H):
mfem.PyOperator.__init__(self, M.Height())
self.M = M
self.S = S
self.H = H
self.Jacobian = None
h = M.Height()
self.w = mfem.Vector(h)
self.z = mfem.Vector(h)
self.dt = 0.0
self.v = None
self.x = None
def SetParameters(self, dt, v, x):
self.dt = dt
self.v = v
self.x = x
def Mult(self, k, y):
add_vector(self.v, self.dt, k, self.w)
add_vector(self.x, self.dt, self.w, self.z)
self.H.Mult(self.z, y)
self.M.AddMult(k, y)
self.S.AddMult(self.w, y)
def GetGradient(self, k):
Jacobian = Add(1.0, self.M.SpMat(), self.dt, self.S.SpMat())
self.Jacobian = Jacobian
add_vector(self.v, self.dt, k, self.w)
add_vector(self.x, self.dt, self.w, self.z)
grad_H = self.H.GetGradientMatrix(self.z)
Jacobian.Add(self.dt**2, grad_H)
return Jacobian;
class HyperelasticOperator(mfem.PyTimeDependentOperator):
def __init__(self, fespace, ess_bdr, visc, mu, K):
mfem.PyTimeDependentOperator.__init__(self, 2*fespace.GetVSize(), 0.0)
rel_tol = 1e-8;
skip_zero_entries = 0;
ref_density = 1.0
self.z = mfem.Vector(self.Height()//2)
self.fespace = fespace
self.viscosity = visc
M = mfem.BilinearForm(fespace)
S = mfem.BilinearForm(fespace)
H = mfem.NonlinearForm(fespace)
self.M = M
self.H = H
self.S = S
rho = mfem.ConstantCoefficient(ref_density)
M.AddDomainIntegrator(mfem.VectorMassIntegrator(rho))
M.Assemble(skip_zero_entries)
M.EliminateEssentialBC(ess_bdr)
M.Finalize(skip_zero_entries)
M_solver = mfem.CGSolver()
M_prec = mfem.DSmoother()
M_solver.iterative_mode = False
M_solver.SetRelTol(rel_tol)
M_solver.SetAbsTol(0.0)
M_solver.SetMaxIter(30)
M_solver.SetPrintLevel(0)
M_solver.SetPreconditioner(M_prec)
M_solver.SetOperator(M.SpMat())
self.M_solver = M_solver
self.M_prec = M_prec
model = mfem.NeoHookeanModel(mu, K)
H.AddDomainIntegrator(mfem.HyperelasticNLFIntegrator(model))
H.SetEssentialBC(ess_bdr)
self.model = model
visc_coeff = mfem.ConstantCoefficient(visc)
S.AddDomainIntegrator(mfem.VectorDiffusionIntegrator(visc_coeff))
S.Assemble(skip_zero_entries)
S.EliminateEssentialBC(ess_bdr)
S.Finalize(skip_zero_entries)
self.reduced_oper = ReducedSystemOperator(M, S, H)
J_prec = mfem.DSmoother(1);
J_minres = mfem.MINRESSolver()
J_minres.SetRelTol(rel_tol);
J_minres.SetAbsTol(0.0);
J_minres.SetMaxIter(300);
J_minres.SetPrintLevel(-1);
J_minres.SetPreconditioner(J_prec)
self.J_solver = J_minres
self.J_prec = J_prec
newton_solver = mfem.NewtonSolver()
newton_solver.iterative_mode = False
newton_solver.SetSolver(self.J_solver);
newton_solver.SetOperator(self.reduced_oper);
newton_solver.SetPrintLevel(1); #print Newton iterations
newton_solver.SetRelTol(rel_tol);
newton_solver.SetAbsTol(0.0);
newton_solver.SetMaxIter(10);
self.newton_solver = newton_solver
def Mult(self, vx, vx_dt):
sc = self.Height()//2
v = mfem.Vector(vx, 0, sc)
x = mfem.Vector(vx, sc, sc)
dv_dt = mfem.Vector(dvx_dt, 0, sc)
dx_dt = mfem.Vector(dvx_dt, sc, sc)
self.H.Mult(x, z);
if (self.viscosity != 0.0): S.AddMult(v, z)
z.Neg()
M_solver.Mult(z, dv_dt);
dx_dt = v;
# Print(vx.Size())
def ImplicitSolve(self, dt, vx, dvx_dt):
sc = self.Height()//2
v = mfem.Vector(vx, 0, sc)
x = mfem.Vector(vx, sc, sc)
dv_dt = mfem.Vector(dvx_dt, 0, sc)
dx_dt = mfem.Vector(dvx_dt, sc, sc)
# By eliminating kx from the coupled system:
# kv = -M^{-1}*[H(x + dt*kx) + S*(v + dt*kv)]
# kx = v + dt*kv
# we reduce it to a nonlinear equation for kv, represented by the
# backward_euler_oper. This equation is solved with the newton_solver
# object (using J_solver and J_prec internally).
self.reduced_oper.SetParameters(dt, v, x)
zero = mfem.Vector() # empty vector is interpreted as
# zero r.h.s. by NewtonSolver
self.newton_solver.Mult(zero, dv_dt)
add_vector(v, dt, dv_dt, dx_dt);
def ElasticEnergy(self, x):
return self.H.GetEnergy(x)
def KineticEnergy(self, v):
return 0.5*self.M.InnerProduct(v, v)
def GetElasticEnergyDensity(self, x, w):
w_coeff = ElasticEnergyCoefficient(self.model, x)
w.ProjectCoefficient(w_coeff)
oper = HyperelasticOperator(fespace, ess_bdr, visc, mu, K)
ee0 = oper.ElasticEnergy(x)
ke0 = oper.KineticEnergy(v)
print("initial elastic energy (EE) = " + str(ee0))
print("initial kinetic energy (KE) = " + str(ke0))
print("initial total energy (TE) = " + str(ee0 + ke0))
# 8. Perform time-integration (looping over the time iterations, ti, with a
# time-step dt).
ode_solver.Init(oper)
t = 0. ; ti = 1
last_step = False;
while not last_step:
if (t + dt >= t_final - dt/2): last_step = True
t, dt = ode_solver.Step(vx, t, dt)
if (last_step or (ti % vis_steps) == 0):
ee = oper.ElasticEnergy(x)
ke = oper.KineticEnergy(v)
text = ("step " + str(ti) + ", t = " + str(t) + ", EE = " +
str(ee) + ", KE = " + str(ke) +
", dTE = " + str((ee+ke)-(ee0+ke0)))
print(text)
ti = ti + 1
#
# if i translate c++ line-by-line, ti seems the second swap does not work...
#
nodes = x
owns_nodes = 0
nodes, owns_nodes = mesh.SwapNodes(nodes, owns_nodes)
mesh.Print('deformed.mesh', 8)
mesh.SwapNodes(nodes, owns_nodes)
v.Save('velocity.sol', 8)
oper.GetElasticEnergyDensity(x, w)
w.Save('elastic_energy.sol', 8)
|
# -*- coding: utf-8 -*-
import scrapy
from bs4 import BeautifulSoup
from spider.items import SpiderItem
class CqjlpggzyzhjySpider(scrapy.Spider):
name = 'cqjlpggzyzhjy'
allowed_domains = ['cqjlpggzyzhjy.gov.cn']
def start_requests(self):
urls = [
'http://www.cqjlpggzyzhjy.gov.cn/cqjl/jyxx/003001/003001001/003001001001/MoreInfo.aspx?CategoryNum=003001001001',
'http://www.cqjlpggzyzhjy.gov.cn/cqjl/jyxx/003001/003001001/003001001002/MoreInfo.aspx?CategoryNum=003001001002',
'http://www.cqjlpggzyzhjy.gov.cn/cqjl/ZtbWebDyProject/DaYi_List.aspx',
'http://www.cqjlpggzyzhjy.gov.cn/cqjl/ZtbWebDyProject/BuYiAll_List.aspx'
]
for url in urls:
yield scrapy.Request(url=url, callback=self.parse_parameters)
def parse_parameters(self, response):
soup = BeautifulSoup(response.body, 'html.parser')
soupCtl = soup.find(id='ctl00') or soup.find(id='Form1')
viewstate = soupCtl.find(id='__VIEWSTATE').attrs['value']
viewstategenerator = soupCtl.find(id='__VIEWSTATEGENERATOR').attrs['value']
soupPager1 = soup.find(id='MoreInfoList1_Pager')
page1 = soupPager1 and soupPager1.find_all('b')[1].get_text()
soupPager2 = soup.find(id='Pager')
page2 = soupPager2 and soupPager2.find_all('b')[0].get_text()
count_pages = int(page1 or page2)
for page in range(0, count_pages):
yield scrapy.FormRequest(url=response.url,
formdata={'__VIEWSTATE': viewstate, '__VIEWSTATEGENERATOR': viewstategenerator,
'__EVENTTARGET': 'MoreInfoList1$Pager', '__EVENTARGUMENT': str(page + 1)},
callback=self.parse_list)
def parse_list(self, response):
soup = BeautifulSoup(response.body, 'html.parser')
soup_list = soup.find(id='MoreInfoList1_tdcontent') or soup.find(id='DataGrid1')
soup_list = soup_list.find_all('a')
soup_type = soup.find(id='lastfont')
for i in soup_list:
if 'infodetail' in i.attrs['href'].lower():
yield scrapy.Request(url=response.urljoin(i.attrs['href']), callback=self.parse_info)
elif 'buyi_list' in i.attrs['href'].lower():
yield scrapy.Request(url=response.urljoin(i.attrs['href']), callback=self.parse_parameters)
else:
item = SpiderItem()
item['category'] = soup_type.string.strip()
item['title'] = i.string.strip()
item['date'] = i.parent.next_sibling.string.strip().replace('-', '/')
item['content'] = ''
item['file_urls'] = [response.urljoin(i.attrs['href'])]
item['file_names'] = ['test.txt']
item['url'] = response.urljoin(i.attrs['href'])
yield item
def parse_info(self, response):
item = SpiderItem()
soup = BeautifulSoup(response.body, 'html.parser')
soup_type = soup.find(id='lastfont')
item['category'] = soup_type.string.strip()
soup_title = soup.find(id='tdTitle').div
item['title'] = soup_title.font.b.string.strip()
soup_title = soup_title.next_sibling.next_sibling
item['date'] = soup_title.get_text().split('\r\n')[1].strip()
soup_content = soup.find(id='TDContent')
item['content'] = soup_content.get_text()
item['file_urls'] = []
item['file_names'] = []
soup_files = soup.find(id='filedown').find_all('a')
for soup_file in soup_files:
item['file_urls'].append(response.urljoin(soup_file.attrs['href']))
item['file_names'].append(soup_file.get_text().strip())
item['url'] = response.url
return item
|
from models import Jogo, Usuario
SQL_INSERI_JOGO = """
INSERT INTO jogo (nome, categoria, console)
VALUES (?, ?, ?)
"""
SQL_LISTA_JOGOS = """
SELECT * FROM jogo
"""
SQL_BUSCA_POR_ID = """
SELECT * FROM jogo
WHERE id = ?
"""
SQL_DELETA = """
DELETE FROM jogo
WHERE id = ?
"""
SQL_ATUALIZA = """
UPDATE jogo
SET
nome = ?,
categoria = ?,
console = ?
WHERE id = ?
"""
SQL_BUSCA_USUARIO_POR_NOME_SENHA = """
SELECT nome, senha FROM usuario
WHERE nome = ? AND senha = ?
LIMIT 1
"""
class JogoDao:
def __init__(self, conexao):
self.conexao = conexao
def insere(self, jogo):
self.conexao.execute(SQL_INSERI_JOGO, (jogo.nome, jogo.categoria, jogo.console))
self.conexao.commit()
def lista(self):
jogos = []
for row in self.conexao.execute(SQL_LISTA_JOGOS):
jogo = Jogo(id_jogo=row[0], nome=row[1], categoria=row[2], console=row[3])
jogos.append(jogo)
return jogos
def busca_por(self, id_jogo):
cursor = self.conexao.cursor()
cursor.execute(SQL_BUSCA_POR_ID, (id_jogo,))
row = cursor.fetchone()
return Jogo(id_jogo=row[0], nome=row[1], categoria=row[2], console=row[3])
def deleta(self, id_jogo):
self.conexao.execute(SQL_DELETA, (id_jogo,))
self.conexao.commit()
def atualizar(self, jogo):
self.conexao.execute(SQL_ATUALIZA, (jogo.nome, jogo.categoria, jogo.console, jogo.id_jogo))
self.conexao.commit()
class UsuarioDao:
def __init__(self, conexao):
self.conexao = conexao
def buscar_por(self, nome, senha):
cursor = self.conexao.cursor()
cursor.execute(SQL_BUSCA_USUARIO_POR_NOME_SENHA, (nome, senha))
row = cursor.fetchone()
return Usuario(nome=row[0], senha=row[1])
|
from newrelic.agent import wrap_external_trace
def instrument(module):
def tsocket_open_url(socket, *args, **kwargs):
scheme = 'socket' if socket._unix_socket else 'http'
if socket.port:
url = '%s://%s:%s' % (scheme, socket.host, socket.port)
else:
url = '%s://%s' % (scheme, socket.host)
return url
wrap_external_trace(module, 'TSocket.open', 'thrift', tsocket_open_url)
|
class Solution(object):
def frequencySort(self, s):
#unique key
sSet = set(s)
sTable = []
#count letter -> sTable( Count, key*Count )
for key in sSet:
Count = s.count(key)
sTable.append( ( Count, key*Count ) )
#sort in descending
sTable.sort(key = lambda table: table[0], reverse = True)
return ''.join( map( lambda table: table[1], sTable) )
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: github.com/metaprov/modelaapi/services/model/v1/model.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1 import generated_pb2 as github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2
from github.com.metaprov.modelaapi.services.common.v1 import common_pb2 as github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_common_dot_v1_dot_common__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='github.com/metaprov/modelaapi/services/model/v1/model.proto',
package='github.com.metaprov.modelaapi.services.model.v1',
syntax='proto3',
serialized_options=b'Z/github.com/metaprov/modelaapi/services/model/v1',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n;github.com/metaprov/modelaapi/services/model/v1/model.proto\x12/github.com.metaprov.modelaapi.services.model.v1\x1a\x1cgoogle/api/annotations.proto\x1aHgithub.com/metaprov/modelaapi/pkg/apis/training/v1alpha1/generated.proto\x1a=github.com/metaprov/modelaapi/services/common/v1/common.proto\")\n\x1a\x43reateModelProfileResponse\x12\x0b\n\x03uri\x18\x03 \x01(\t\"<\n\x19\x43reateModelProfileRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\":\n\x17ListModelProfileRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"F\n\x16GetModelProfileRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0b\n\x03uri\x18\x03 \x01(\t\"j\n\x17GetModelProfileResponse\x12O\n\x07profile\x18\x01 \x01(\x0b\x32>.github.com.metaprov.modelaapi.services.common.v1.ModelProfile\"\xa2\x01\n\x14GetModelLogsResponse\x12]\n\x04logs\x18\x01 \x03(\x0b\x32O.github.com.metaprov.modelaapi.services.model.v1.GetModelLogsResponse.LogsEntry\x1a+\n\tLogsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"6\n\x13GetModelLogsRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"c\n\x12\x43reateModelRequest\x12M\n\x04item\x18\x01 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Model\"\x15\n\x13\x43reateModelResponse\"c\n\x12UpdateModelRequest\x12M\n\x04item\x18\x05 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Model\"\x15\n\x13UpdateModelResponse\"o\n\x10GetModelResponse\x12M\n\x04item\x18\x01 \x01(\x0b\x32?.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.Model\x12\x0c\n\x04yaml\x18\x02 \x01(\t\"4\n\x11\x41\x62ortModelRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x14\n\x12\x41\x62ortModelResponse\"4\n\x11PauseModelRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x14\n\x12PauseModelResponse\"5\n\x12ResumeModelRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x15\n\x13ResumeModelResponse\"8\n\x14\x43ompareModelsRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\r\n\x05names\x18\x02 \x03(\t\"\x8b\x01\n\x15\x43ompareModelsResponse\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\r\n\x05names\x18\x02 \x03(\t\x12P\n\x08profiles\x18\x03 \x03(\x0b\x32>.github.com.metaprov.modelaapi.services.common.v1.ModelProfile\"X\n\x13\x43ompileModelRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06target\x18\x03 \x01(\t\x12\x10\n\x08\x63ompiler\x18\x04 \x01(\t\"8\n\x14\x43ompileModelResponse\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\r\n\x05names\x18\x02 \x01(\t\"y\n\x12\x44\x65ployModelRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x11\n\tpredictor\x18\x03 \x01(\t\x12\x10\n\x08replicas\x18\x04 \x01(\x05\x12\x0f\n\x07traffic\x18\x05 \x01(\x05\x12\x0c\n\x04role\x18\x06 \x01(\t\"6\n\x13PublishModelRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x15\n\x13\x44\x65ployModelResponse\"\xb5\x01\n\x11ListModelsRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12^\n\x06labels\x18\x02 \x03(\x0b\x32N.github.com.metaprov.modelaapi.services.model.v1.ListModelsRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"h\n\x12ListModelsResponse\x12R\n\x05items\x18\x01 \x01(\x0b\x32\x43.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.ModelList\"2\n\x0fGetModelRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x13\n\x11UpdateModelResult\"5\n\x12\x44\x65leteModelRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x15\n\x13\x44\x65leteModelResponse\",\n\x14PublishModelResponse\x12\x14\n\x0cmodelTarHash\x18\x01 \x01(\t\"5\n\x12GetMisclassRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"a\n\x13GetMisclassResponse\x12J\n\x05table\x18\x01 \x01(\x0b\x32;.github.com.metaprov.modelaapi.services.common.v1.TableView\"7\n\x14\x44ownloadModelRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"$\n\x15\x44ownloadModelResponse\x12\x0b\n\x03raw\x18\x01 \x01(\x0c\"3\n\x10\x46lagModelRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x13\n\x11\x46lagModelResponse\"3\n\x10TestModelRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x13\n\x11TestModelResponse2\x82\x1e\n\x0cModelService\x12\xb4\x01\n\nListModels\x12\x42.github.com.metaprov.modelaapi.services.model.v1.ListModelsRequest\x1a\x43.github.com.metaprov.modelaapi.services.model.v1.ListModelsResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\x12\x15/v1/model/{namespace}\x12\xaf\x01\n\x0b\x43reateModel\x12\x43.github.com.metaprov.modelaapi.services.model.v1.CreateModelRequest\x1a\x44.github.com.metaprov.modelaapi.services.model.v1.CreateModelResponse\"\x15\x82\xd3\xe4\x93\x02\x0f\"\n/v1/models:\x01*\x12\xb6\x01\n\x08GetModel\x12@.github.com.metaprov.modelaapi.services.model.v1.GetModelRequest\x1a\x41.github.com.metaprov.modelaapi.services.model.v1.GetModelResponse\"%\x82\xd3\xe4\x93\x02\x1f\x12\x1d/v1/models/{namespace}/{name}\x12\xde\x01\n\x0bUpdateModel\x12\x43.github.com.metaprov.modelaapi.services.model.v1.UpdateModelRequest\x1a\x44.github.com.metaprov.modelaapi.services.model.v1.UpdateModelResponse\"D\x82\xd3\xe4\x93\x02>\x1a\x39/v1/models/{item.metadata.namespace}/{item.metadata.name}:\x01*\x12\xbf\x01\n\x0b\x44\x65leteModel\x12\x43.github.com.metaprov.modelaapi.services.model.v1.DeleteModelRequest\x1a\x44.github.com.metaprov.modelaapi.services.model.v1.DeleteModelResponse\"%\x82\xd3\xe4\x93\x02\x1f*\x1d/v1/models/{namespace}/{name}\x12\xc9\x01\n\x0b\x44\x65ployModel\x12\x43.github.com.metaprov.modelaapi.services.model.v1.DeployModelRequest\x1a\x44.github.com.metaprov.modelaapi.services.model.v1.DeployModelResponse\"/\x82\xd3\xe4\x93\x02)\"$/v1/models/{namespace}/{name}:deploy:\x01*\x12\xcd\x01\n\x0cPublishModel\x12\x44.github.com.metaprov.modelaapi.services.model.v1.PublishModelRequest\x1a\x45.github.com.metaprov.modelaapi.services.model.v1.PublishModelResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1/models/{namespace}/{name}:publish:\x01*\x12\xdc\x01\n\x12\x43reateModelProfile\x12J.github.com.metaprov.modelaapi.services.model.v1.CreateModelProfileRequest\x1aK.github.com.metaprov.modelaapi.services.model.v1.CreateModelProfileResponse\"-\x82\xd3\xe4\x93\x02\'\"%/v1/models/{namespace}/{name}:profile\x12\xd3\x01\n\x0fGetModelProfile\x12G.github.com.metaprov.modelaapi.services.model.v1.GetModelProfileRequest\x1aH.github.com.metaprov.modelaapi.services.model.v1.GetModelProfileResponse\"-\x82\xd3\xe4\x93\x02\'\"%/v1/models/{namespace}/{name}:profile\x12\xcd\x01\n\x10GetModelMisclass\x12\x43.github.com.metaprov.modelaapi.services.model.v1.GetMisclassRequest\x1a\x44.github.com.metaprov.modelaapi.services.model.v1.GetMisclassResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/models/{namespace}/{name}:misclass\x12\xc7\x01\n\x0cGetModelLogs\x12\x44.github.com.metaprov.modelaapi.services.model.v1.GetModelLogsRequest\x1a\x45.github.com.metaprov.modelaapi.services.model.v1.GetModelLogsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/models/{namespace}/{name}:logs\x12\xc2\x01\n\nAbortModel\x12\x42.github.com.metaprov.modelaapi.services.model.v1.AbortModelRequest\x1a\x43.github.com.metaprov.modelaapi.services.model.v1.AbortModelResponse\"+\x82\xd3\xe4\x93\x02%\"#/v1/models/{namespace}/{name}:abort\x12\xc2\x01\n\nPauseModel\x12\x42.github.com.metaprov.modelaapi.services.model.v1.PauseModelRequest\x1a\x43.github.com.metaprov.modelaapi.services.model.v1.PauseModelResponse\"+\x82\xd3\xe4\x93\x02%\"#/v1/models/{namespace}/{name}:pause\x12\xc6\x01\n\x0bResumeModel\x12\x43.github.com.metaprov.modelaapi.services.model.v1.ResumeModelRequest\x1a\x44.github.com.metaprov.modelaapi.services.model.v1.ResumeModelResponse\",\x82\xd3\xe4\x93\x02&\"$/v1/models/{namespace}/{name}:resume\x12\xce\x01\n\rCompareModels\x12\x45.github.com.metaprov.modelaapi.services.model.v1.CompareModelsRequest\x1a\x46.github.com.metaprov.modelaapi.services.model.v1.CompareModelsResponse\".\x82\xd3\xe4\x93\x02(\"&/v1/models/{namespace}/{names}:compare\x12\xca\x01\n\x0c\x43ompileModel\x12\x44.github.com.metaprov.modelaapi.services.model.v1.CompileModelRequest\x1a\x45.github.com.metaprov.modelaapi.services.model.v1.CompileModelResponse\"-\x82\xd3\xe4\x93\x02\'\"%/v1/models/{namespace}/{name}:compile\x12\xce\x01\n\rDownloadModel\x12\x45.github.com.metaprov.modelaapi.services.model.v1.DownloadModelRequest\x1a\x46.github.com.metaprov.modelaapi.services.model.v1.DownloadModelResponse\".\x82\xd3\xe4\x93\x02(\"&/v1/models/{namespace}/{name}:download\x12\xbe\x01\n\tFlagModel\x12\x41.github.com.metaprov.modelaapi.services.model.v1.FlagModelRequest\x1a\x42.github.com.metaprov.modelaapi.services.model.v1.FlagModelResponse\"*\x82\xd3\xe4\x93\x02$\"\"/v1/models/{namespace}/{name}:flag\x12\xbe\x01\n\tTestModel\x12\x41.github.com.metaprov.modelaapi.services.model.v1.TestModelRequest\x1a\x42.github.com.metaprov.modelaapi.services.model.v1.TestModelResponse\"*\x82\xd3\xe4\x93\x02$\"\"/v1/models/{namespace}/{name}:testB1Z/github.com/metaprov/modelaapi/services/model/v1b\x06proto3'
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2.DESCRIPTOR,github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_common_dot_v1_dot_common__pb2.DESCRIPTOR,])
_CREATEMODELPROFILERESPONSE = _descriptor.Descriptor(
name='CreateModelProfileResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.CreateModelProfileResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='uri', full_name='github.com.metaprov.modelaapi.services.model.v1.CreateModelProfileResponse.uri', index=0,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=279,
serialized_end=320,
)
_CREATEMODELPROFILEREQUEST = _descriptor.Descriptor(
name='CreateModelProfileRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.CreateModelProfileRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.CreateModelProfileRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.CreateModelProfileRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=322,
serialized_end=382,
)
_LISTMODELPROFILEREQUEST = _descriptor.Descriptor(
name='ListModelProfileRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.ListModelProfileRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.ListModelProfileRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.ListModelProfileRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=384,
serialized_end=442,
)
_GETMODELPROFILEREQUEST = _descriptor.Descriptor(
name='GetModelProfileRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelProfileRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelProfileRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelProfileRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='uri', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelProfileRequest.uri', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=444,
serialized_end=514,
)
_GETMODELPROFILERESPONSE = _descriptor.Descriptor(
name='GetModelProfileResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelProfileResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='profile', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelProfileResponse.profile', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=516,
serialized_end=622,
)
_GETMODELLOGSRESPONSE_LOGSENTRY = _descriptor.Descriptor(
name='LogsEntry',
full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelLogsResponse.LogsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelLogsResponse.LogsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelLogsResponse.LogsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=744,
serialized_end=787,
)
_GETMODELLOGSRESPONSE = _descriptor.Descriptor(
name='GetModelLogsResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelLogsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='logs', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelLogsResponse.logs', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_GETMODELLOGSRESPONSE_LOGSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=625,
serialized_end=787,
)
_GETMODELLOGSREQUEST = _descriptor.Descriptor(
name='GetModelLogsRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelLogsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelLogsRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelLogsRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=789,
serialized_end=843,
)
_CREATEMODELREQUEST = _descriptor.Descriptor(
name='CreateModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.CreateModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='item', full_name='github.com.metaprov.modelaapi.services.model.v1.CreateModelRequest.item', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=845,
serialized_end=944,
)
_CREATEMODELRESPONSE = _descriptor.Descriptor(
name='CreateModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.CreateModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=946,
serialized_end=967,
)
_UPDATEMODELREQUEST = _descriptor.Descriptor(
name='UpdateModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.UpdateModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='item', full_name='github.com.metaprov.modelaapi.services.model.v1.UpdateModelRequest.item', index=0,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=969,
serialized_end=1068,
)
_UPDATEMODELRESPONSE = _descriptor.Descriptor(
name='UpdateModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.UpdateModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1070,
serialized_end=1091,
)
_GETMODELRESPONSE = _descriptor.Descriptor(
name='GetModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='item', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelResponse.item', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='yaml', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelResponse.yaml', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1093,
serialized_end=1204,
)
_ABORTMODELREQUEST = _descriptor.Descriptor(
name='AbortModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.AbortModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.AbortModelRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.AbortModelRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1206,
serialized_end=1258,
)
_ABORTMODELRESPONSE = _descriptor.Descriptor(
name='AbortModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.AbortModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1260,
serialized_end=1280,
)
_PAUSEMODELREQUEST = _descriptor.Descriptor(
name='PauseModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.PauseModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.PauseModelRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.PauseModelRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1282,
serialized_end=1334,
)
_PAUSEMODELRESPONSE = _descriptor.Descriptor(
name='PauseModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.PauseModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1336,
serialized_end=1356,
)
_RESUMEMODELREQUEST = _descriptor.Descriptor(
name='ResumeModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.ResumeModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.ResumeModelRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.ResumeModelRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1358,
serialized_end=1411,
)
_RESUMEMODELRESPONSE = _descriptor.Descriptor(
name='ResumeModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.ResumeModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1413,
serialized_end=1434,
)
_COMPAREMODELSREQUEST = _descriptor.Descriptor(
name='CompareModelsRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.CompareModelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.CompareModelsRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='names', full_name='github.com.metaprov.modelaapi.services.model.v1.CompareModelsRequest.names', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1436,
serialized_end=1492,
)
_COMPAREMODELSRESPONSE = _descriptor.Descriptor(
name='CompareModelsResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.CompareModelsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.CompareModelsResponse.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='names', full_name='github.com.metaprov.modelaapi.services.model.v1.CompareModelsResponse.names', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='profiles', full_name='github.com.metaprov.modelaapi.services.model.v1.CompareModelsResponse.profiles', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1495,
serialized_end=1634,
)
_COMPILEMODELREQUEST = _descriptor.Descriptor(
name='CompileModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.CompileModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.CompileModelRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.CompileModelRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='target', full_name='github.com.metaprov.modelaapi.services.model.v1.CompileModelRequest.target', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='compiler', full_name='github.com.metaprov.modelaapi.services.model.v1.CompileModelRequest.compiler', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1636,
serialized_end=1724,
)
_COMPILEMODELRESPONSE = _descriptor.Descriptor(
name='CompileModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.CompileModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.CompileModelResponse.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='names', full_name='github.com.metaprov.modelaapi.services.model.v1.CompileModelResponse.names', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1726,
serialized_end=1782,
)
_DEPLOYMODELREQUEST = _descriptor.Descriptor(
name='DeployModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.DeployModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.DeployModelRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.DeployModelRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='predictor', full_name='github.com.metaprov.modelaapi.services.model.v1.DeployModelRequest.predictor', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='replicas', full_name='github.com.metaprov.modelaapi.services.model.v1.DeployModelRequest.replicas', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='traffic', full_name='github.com.metaprov.modelaapi.services.model.v1.DeployModelRequest.traffic', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='role', full_name='github.com.metaprov.modelaapi.services.model.v1.DeployModelRequest.role', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1784,
serialized_end=1905,
)
_PUBLISHMODELREQUEST = _descriptor.Descriptor(
name='PublishModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.PublishModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.PublishModelRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.PublishModelRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1907,
serialized_end=1961,
)
_DEPLOYMODELRESPONSE = _descriptor.Descriptor(
name='DeployModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.DeployModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1963,
serialized_end=1984,
)
_LISTMODELSREQUEST_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='github.com.metaprov.modelaapi.services.model.v1.ListModelsRequest.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.model.v1.ListModelsRequest.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.model.v1.ListModelsRequest.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2123,
serialized_end=2168,
)
_LISTMODELSREQUEST = _descriptor.Descriptor(
name='ListModelsRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.ListModelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.ListModelsRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='github.com.metaprov.modelaapi.services.model.v1.ListModelsRequest.labels', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_LISTMODELSREQUEST_LABELSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1987,
serialized_end=2168,
)
_LISTMODELSRESPONSE = _descriptor.Descriptor(
name='ListModelsResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.ListModelsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='items', full_name='github.com.metaprov.modelaapi.services.model.v1.ListModelsResponse.items', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2170,
serialized_end=2274,
)
_GETMODELREQUEST = _descriptor.Descriptor(
name='GetModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.GetModelRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2276,
serialized_end=2326,
)
_UPDATEMODELRESULT = _descriptor.Descriptor(
name='UpdateModelResult',
full_name='github.com.metaprov.modelaapi.services.model.v1.UpdateModelResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2328,
serialized_end=2347,
)
_DELETEMODELREQUEST = _descriptor.Descriptor(
name='DeleteModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.DeleteModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.DeleteModelRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.DeleteModelRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2349,
serialized_end=2402,
)
_DELETEMODELRESPONSE = _descriptor.Descriptor(
name='DeleteModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.DeleteModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2404,
serialized_end=2425,
)
_PUBLISHMODELRESPONSE = _descriptor.Descriptor(
name='PublishModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.PublishModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='modelTarHash', full_name='github.com.metaprov.modelaapi.services.model.v1.PublishModelResponse.modelTarHash', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2427,
serialized_end=2471,
)
_GETMISCLASSREQUEST = _descriptor.Descriptor(
name='GetMisclassRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.GetMisclassRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.GetMisclassRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.GetMisclassRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2473,
serialized_end=2526,
)
_GETMISCLASSRESPONSE = _descriptor.Descriptor(
name='GetMisclassResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.GetMisclassResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='table', full_name='github.com.metaprov.modelaapi.services.model.v1.GetMisclassResponse.table', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2528,
serialized_end=2625,
)
_DOWNLOADMODELREQUEST = _descriptor.Descriptor(
name='DownloadModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.DownloadModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.DownloadModelRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.DownloadModelRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2627,
serialized_end=2682,
)
_DOWNLOADMODELRESPONSE = _descriptor.Descriptor(
name='DownloadModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.DownloadModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='raw', full_name='github.com.metaprov.modelaapi.services.model.v1.DownloadModelResponse.raw', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2684,
serialized_end=2720,
)
_FLAGMODELREQUEST = _descriptor.Descriptor(
name='FlagModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.FlagModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.FlagModelRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.FlagModelRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2722,
serialized_end=2773,
)
_FLAGMODELRESPONSE = _descriptor.Descriptor(
name='FlagModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.FlagModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2775,
serialized_end=2794,
)
_TESTMODELREQUEST = _descriptor.Descriptor(
name='TestModelRequest',
full_name='github.com.metaprov.modelaapi.services.model.v1.TestModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.model.v1.TestModelRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.model.v1.TestModelRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2796,
serialized_end=2847,
)
_TESTMODELRESPONSE = _descriptor.Descriptor(
name='TestModelResponse',
full_name='github.com.metaprov.modelaapi.services.model.v1.TestModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2849,
serialized_end=2868,
)
_GETMODELPROFILERESPONSE.fields_by_name['profile'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_common_dot_v1_dot_common__pb2._MODELPROFILE
_GETMODELLOGSRESPONSE_LOGSENTRY.containing_type = _GETMODELLOGSRESPONSE
_GETMODELLOGSRESPONSE.fields_by_name['logs'].message_type = _GETMODELLOGSRESPONSE_LOGSENTRY
_CREATEMODELREQUEST.fields_by_name['item'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODEL
_UPDATEMODELREQUEST.fields_by_name['item'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODEL
_GETMODELRESPONSE.fields_by_name['item'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODEL
_COMPAREMODELSRESPONSE.fields_by_name['profiles'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_common_dot_v1_dot_common__pb2._MODELPROFILE
_LISTMODELSREQUEST_LABELSENTRY.containing_type = _LISTMODELSREQUEST
_LISTMODELSREQUEST.fields_by_name['labels'].message_type = _LISTMODELSREQUEST_LABELSENTRY
_LISTMODELSRESPONSE.fields_by_name['items'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODELLIST
_GETMISCLASSRESPONSE.fields_by_name['table'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_services_dot_common_dot_v1_dot_common__pb2._TABLEVIEW
DESCRIPTOR.message_types_by_name['CreateModelProfileResponse'] = _CREATEMODELPROFILERESPONSE
DESCRIPTOR.message_types_by_name['CreateModelProfileRequest'] = _CREATEMODELPROFILEREQUEST
DESCRIPTOR.message_types_by_name['ListModelProfileRequest'] = _LISTMODELPROFILEREQUEST
DESCRIPTOR.message_types_by_name['GetModelProfileRequest'] = _GETMODELPROFILEREQUEST
DESCRIPTOR.message_types_by_name['GetModelProfileResponse'] = _GETMODELPROFILERESPONSE
DESCRIPTOR.message_types_by_name['GetModelLogsResponse'] = _GETMODELLOGSRESPONSE
DESCRIPTOR.message_types_by_name['GetModelLogsRequest'] = _GETMODELLOGSREQUEST
DESCRIPTOR.message_types_by_name['CreateModelRequest'] = _CREATEMODELREQUEST
DESCRIPTOR.message_types_by_name['CreateModelResponse'] = _CREATEMODELRESPONSE
DESCRIPTOR.message_types_by_name['UpdateModelRequest'] = _UPDATEMODELREQUEST
DESCRIPTOR.message_types_by_name['UpdateModelResponse'] = _UPDATEMODELRESPONSE
DESCRIPTOR.message_types_by_name['GetModelResponse'] = _GETMODELRESPONSE
DESCRIPTOR.message_types_by_name['AbortModelRequest'] = _ABORTMODELREQUEST
DESCRIPTOR.message_types_by_name['AbortModelResponse'] = _ABORTMODELRESPONSE
DESCRIPTOR.message_types_by_name['PauseModelRequest'] = _PAUSEMODELREQUEST
DESCRIPTOR.message_types_by_name['PauseModelResponse'] = _PAUSEMODELRESPONSE
DESCRIPTOR.message_types_by_name['ResumeModelRequest'] = _RESUMEMODELREQUEST
DESCRIPTOR.message_types_by_name['ResumeModelResponse'] = _RESUMEMODELRESPONSE
DESCRIPTOR.message_types_by_name['CompareModelsRequest'] = _COMPAREMODELSREQUEST
DESCRIPTOR.message_types_by_name['CompareModelsResponse'] = _COMPAREMODELSRESPONSE
DESCRIPTOR.message_types_by_name['CompileModelRequest'] = _COMPILEMODELREQUEST
DESCRIPTOR.message_types_by_name['CompileModelResponse'] = _COMPILEMODELRESPONSE
DESCRIPTOR.message_types_by_name['DeployModelRequest'] = _DEPLOYMODELREQUEST
DESCRIPTOR.message_types_by_name['PublishModelRequest'] = _PUBLISHMODELREQUEST
DESCRIPTOR.message_types_by_name['DeployModelResponse'] = _DEPLOYMODELRESPONSE
DESCRIPTOR.message_types_by_name['ListModelsRequest'] = _LISTMODELSREQUEST
DESCRIPTOR.message_types_by_name['ListModelsResponse'] = _LISTMODELSRESPONSE
DESCRIPTOR.message_types_by_name['GetModelRequest'] = _GETMODELREQUEST
DESCRIPTOR.message_types_by_name['UpdateModelResult'] = _UPDATEMODELRESULT
DESCRIPTOR.message_types_by_name['DeleteModelRequest'] = _DELETEMODELREQUEST
DESCRIPTOR.message_types_by_name['DeleteModelResponse'] = _DELETEMODELRESPONSE
DESCRIPTOR.message_types_by_name['PublishModelResponse'] = _PUBLISHMODELRESPONSE
DESCRIPTOR.message_types_by_name['GetMisclassRequest'] = _GETMISCLASSREQUEST
DESCRIPTOR.message_types_by_name['GetMisclassResponse'] = _GETMISCLASSRESPONSE
DESCRIPTOR.message_types_by_name['DownloadModelRequest'] = _DOWNLOADMODELREQUEST
DESCRIPTOR.message_types_by_name['DownloadModelResponse'] = _DOWNLOADMODELRESPONSE
DESCRIPTOR.message_types_by_name['FlagModelRequest'] = _FLAGMODELREQUEST
DESCRIPTOR.message_types_by_name['FlagModelResponse'] = _FLAGMODELRESPONSE
DESCRIPTOR.message_types_by_name['TestModelRequest'] = _TESTMODELREQUEST
DESCRIPTOR.message_types_by_name['TestModelResponse'] = _TESTMODELRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CreateModelProfileResponse = _reflection.GeneratedProtocolMessageType('CreateModelProfileResponse', (_message.Message,), {
'DESCRIPTOR' : _CREATEMODELPROFILERESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.CreateModelProfileResponse)
})
_sym_db.RegisterMessage(CreateModelProfileResponse)
CreateModelProfileRequest = _reflection.GeneratedProtocolMessageType('CreateModelProfileRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEMODELPROFILEREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.CreateModelProfileRequest)
})
_sym_db.RegisterMessage(CreateModelProfileRequest)
ListModelProfileRequest = _reflection.GeneratedProtocolMessageType('ListModelProfileRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELPROFILEREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.ListModelProfileRequest)
})
_sym_db.RegisterMessage(ListModelProfileRequest)
GetModelProfileRequest = _reflection.GeneratedProtocolMessageType('GetModelProfileRequest', (_message.Message,), {
'DESCRIPTOR' : _GETMODELPROFILEREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.GetModelProfileRequest)
})
_sym_db.RegisterMessage(GetModelProfileRequest)
GetModelProfileResponse = _reflection.GeneratedProtocolMessageType('GetModelProfileResponse', (_message.Message,), {
'DESCRIPTOR' : _GETMODELPROFILERESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.GetModelProfileResponse)
})
_sym_db.RegisterMessage(GetModelProfileResponse)
GetModelLogsResponse = _reflection.GeneratedProtocolMessageType('GetModelLogsResponse', (_message.Message,), {
'LogsEntry' : _reflection.GeneratedProtocolMessageType('LogsEntry', (_message.Message,), {
'DESCRIPTOR' : _GETMODELLOGSRESPONSE_LOGSENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.GetModelLogsResponse.LogsEntry)
})
,
'DESCRIPTOR' : _GETMODELLOGSRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.GetModelLogsResponse)
})
_sym_db.RegisterMessage(GetModelLogsResponse)
_sym_db.RegisterMessage(GetModelLogsResponse.LogsEntry)
GetModelLogsRequest = _reflection.GeneratedProtocolMessageType('GetModelLogsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETMODELLOGSREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.GetModelLogsRequest)
})
_sym_db.RegisterMessage(GetModelLogsRequest)
CreateModelRequest = _reflection.GeneratedProtocolMessageType('CreateModelRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.CreateModelRequest)
})
_sym_db.RegisterMessage(CreateModelRequest)
CreateModelResponse = _reflection.GeneratedProtocolMessageType('CreateModelResponse', (_message.Message,), {
'DESCRIPTOR' : _CREATEMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.CreateModelResponse)
})
_sym_db.RegisterMessage(CreateModelResponse)
UpdateModelRequest = _reflection.GeneratedProtocolMessageType('UpdateModelRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.UpdateModelRequest)
})
_sym_db.RegisterMessage(UpdateModelRequest)
UpdateModelResponse = _reflection.GeneratedProtocolMessageType('UpdateModelResponse', (_message.Message,), {
'DESCRIPTOR' : _UPDATEMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.UpdateModelResponse)
})
_sym_db.RegisterMessage(UpdateModelResponse)
GetModelResponse = _reflection.GeneratedProtocolMessageType('GetModelResponse', (_message.Message,), {
'DESCRIPTOR' : _GETMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.GetModelResponse)
})
_sym_db.RegisterMessage(GetModelResponse)
AbortModelRequest = _reflection.GeneratedProtocolMessageType('AbortModelRequest', (_message.Message,), {
'DESCRIPTOR' : _ABORTMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.AbortModelRequest)
})
_sym_db.RegisterMessage(AbortModelRequest)
AbortModelResponse = _reflection.GeneratedProtocolMessageType('AbortModelResponse', (_message.Message,), {
'DESCRIPTOR' : _ABORTMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.AbortModelResponse)
})
_sym_db.RegisterMessage(AbortModelResponse)
PauseModelRequest = _reflection.GeneratedProtocolMessageType('PauseModelRequest', (_message.Message,), {
'DESCRIPTOR' : _PAUSEMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.PauseModelRequest)
})
_sym_db.RegisterMessage(PauseModelRequest)
PauseModelResponse = _reflection.GeneratedProtocolMessageType('PauseModelResponse', (_message.Message,), {
'DESCRIPTOR' : _PAUSEMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.PauseModelResponse)
})
_sym_db.RegisterMessage(PauseModelResponse)
ResumeModelRequest = _reflection.GeneratedProtocolMessageType('ResumeModelRequest', (_message.Message,), {
'DESCRIPTOR' : _RESUMEMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.ResumeModelRequest)
})
_sym_db.RegisterMessage(ResumeModelRequest)
ResumeModelResponse = _reflection.GeneratedProtocolMessageType('ResumeModelResponse', (_message.Message,), {
'DESCRIPTOR' : _RESUMEMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.ResumeModelResponse)
})
_sym_db.RegisterMessage(ResumeModelResponse)
CompareModelsRequest = _reflection.GeneratedProtocolMessageType('CompareModelsRequest', (_message.Message,), {
'DESCRIPTOR' : _COMPAREMODELSREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.CompareModelsRequest)
})
_sym_db.RegisterMessage(CompareModelsRequest)
CompareModelsResponse = _reflection.GeneratedProtocolMessageType('CompareModelsResponse', (_message.Message,), {
'DESCRIPTOR' : _COMPAREMODELSRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.CompareModelsResponse)
})
_sym_db.RegisterMessage(CompareModelsResponse)
CompileModelRequest = _reflection.GeneratedProtocolMessageType('CompileModelRequest', (_message.Message,), {
'DESCRIPTOR' : _COMPILEMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.CompileModelRequest)
})
_sym_db.RegisterMessage(CompileModelRequest)
CompileModelResponse = _reflection.GeneratedProtocolMessageType('CompileModelResponse', (_message.Message,), {
'DESCRIPTOR' : _COMPILEMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.CompileModelResponse)
})
_sym_db.RegisterMessage(CompileModelResponse)
DeployModelRequest = _reflection.GeneratedProtocolMessageType('DeployModelRequest', (_message.Message,), {
'DESCRIPTOR' : _DEPLOYMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.DeployModelRequest)
})
_sym_db.RegisterMessage(DeployModelRequest)
PublishModelRequest = _reflection.GeneratedProtocolMessageType('PublishModelRequest', (_message.Message,), {
'DESCRIPTOR' : _PUBLISHMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.PublishModelRequest)
})
_sym_db.RegisterMessage(PublishModelRequest)
DeployModelResponse = _reflection.GeneratedProtocolMessageType('DeployModelResponse', (_message.Message,), {
'DESCRIPTOR' : _DEPLOYMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.DeployModelResponse)
})
_sym_db.RegisterMessage(DeployModelResponse)
ListModelsRequest = _reflection.GeneratedProtocolMessageType('ListModelsRequest', (_message.Message,), {
'LabelsEntry' : _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELSREQUEST_LABELSENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.ListModelsRequest.LabelsEntry)
})
,
'DESCRIPTOR' : _LISTMODELSREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.ListModelsRequest)
})
_sym_db.RegisterMessage(ListModelsRequest)
_sym_db.RegisterMessage(ListModelsRequest.LabelsEntry)
ListModelsResponse = _reflection.GeneratedProtocolMessageType('ListModelsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELSRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.ListModelsResponse)
})
_sym_db.RegisterMessage(ListModelsResponse)
GetModelRequest = _reflection.GeneratedProtocolMessageType('GetModelRequest', (_message.Message,), {
'DESCRIPTOR' : _GETMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.GetModelRequest)
})
_sym_db.RegisterMessage(GetModelRequest)
UpdateModelResult = _reflection.GeneratedProtocolMessageType('UpdateModelResult', (_message.Message,), {
'DESCRIPTOR' : _UPDATEMODELRESULT,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.UpdateModelResult)
})
_sym_db.RegisterMessage(UpdateModelResult)
DeleteModelRequest = _reflection.GeneratedProtocolMessageType('DeleteModelRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.DeleteModelRequest)
})
_sym_db.RegisterMessage(DeleteModelRequest)
DeleteModelResponse = _reflection.GeneratedProtocolMessageType('DeleteModelResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETEMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.DeleteModelResponse)
})
_sym_db.RegisterMessage(DeleteModelResponse)
PublishModelResponse = _reflection.GeneratedProtocolMessageType('PublishModelResponse', (_message.Message,), {
'DESCRIPTOR' : _PUBLISHMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.PublishModelResponse)
})
_sym_db.RegisterMessage(PublishModelResponse)
GetMisclassRequest = _reflection.GeneratedProtocolMessageType('GetMisclassRequest', (_message.Message,), {
'DESCRIPTOR' : _GETMISCLASSREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.GetMisclassRequest)
})
_sym_db.RegisterMessage(GetMisclassRequest)
GetMisclassResponse = _reflection.GeneratedProtocolMessageType('GetMisclassResponse', (_message.Message,), {
'DESCRIPTOR' : _GETMISCLASSRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.GetMisclassResponse)
})
_sym_db.RegisterMessage(GetMisclassResponse)
DownloadModelRequest = _reflection.GeneratedProtocolMessageType('DownloadModelRequest', (_message.Message,), {
'DESCRIPTOR' : _DOWNLOADMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.DownloadModelRequest)
})
_sym_db.RegisterMessage(DownloadModelRequest)
DownloadModelResponse = _reflection.GeneratedProtocolMessageType('DownloadModelResponse', (_message.Message,), {
'DESCRIPTOR' : _DOWNLOADMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.DownloadModelResponse)
})
_sym_db.RegisterMessage(DownloadModelResponse)
FlagModelRequest = _reflection.GeneratedProtocolMessageType('FlagModelRequest', (_message.Message,), {
'DESCRIPTOR' : _FLAGMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.FlagModelRequest)
})
_sym_db.RegisterMessage(FlagModelRequest)
FlagModelResponse = _reflection.GeneratedProtocolMessageType('FlagModelResponse', (_message.Message,), {
'DESCRIPTOR' : _FLAGMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.FlagModelResponse)
})
_sym_db.RegisterMessage(FlagModelResponse)
TestModelRequest = _reflection.GeneratedProtocolMessageType('TestModelRequest', (_message.Message,), {
'DESCRIPTOR' : _TESTMODELREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.TestModelRequest)
})
_sym_db.RegisterMessage(TestModelRequest)
TestModelResponse = _reflection.GeneratedProtocolMessageType('TestModelResponse', (_message.Message,), {
'DESCRIPTOR' : _TESTMODELRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.model.v1.model_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.model.v1.TestModelResponse)
})
_sym_db.RegisterMessage(TestModelResponse)
DESCRIPTOR._options = None
_GETMODELLOGSRESPONSE_LOGSENTRY._options = None
_LISTMODELSREQUEST_LABELSENTRY._options = None
_MODELSERVICE = _descriptor.ServiceDescriptor(
name='ModelService',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=2871,
serialized_end=6713,
methods=[
_descriptor.MethodDescriptor(
name='ListModels',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.ListModels',
index=0,
containing_service=None,
input_type=_LISTMODELSREQUEST,
output_type=_LISTMODELSRESPONSE,
serialized_options=b'\202\323\344\223\002\027\022\025/v1/model/{namespace}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.CreateModel',
index=1,
containing_service=None,
input_type=_CREATEMODELREQUEST,
output_type=_CREATEMODELRESPONSE,
serialized_options=b'\202\323\344\223\002\017\"\n/v1/models:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.GetModel',
index=2,
containing_service=None,
input_type=_GETMODELREQUEST,
output_type=_GETMODELRESPONSE,
serialized_options=b'\202\323\344\223\002\037\022\035/v1/models/{namespace}/{name}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.UpdateModel',
index=3,
containing_service=None,
input_type=_UPDATEMODELREQUEST,
output_type=_UPDATEMODELRESPONSE,
serialized_options=b'\202\323\344\223\002>\0329/v1/models/{item.metadata.namespace}/{item.metadata.name}:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.DeleteModel',
index=4,
containing_service=None,
input_type=_DELETEMODELREQUEST,
output_type=_DELETEMODELRESPONSE,
serialized_options=b'\202\323\344\223\002\037*\035/v1/models/{namespace}/{name}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeployModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.DeployModel',
index=5,
containing_service=None,
input_type=_DEPLOYMODELREQUEST,
output_type=_DEPLOYMODELRESPONSE,
serialized_options=b'\202\323\344\223\002)\"$/v1/models/{namespace}/{name}:deploy:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='PublishModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.PublishModel',
index=6,
containing_service=None,
input_type=_PUBLISHMODELREQUEST,
output_type=_PUBLISHMODELRESPONSE,
serialized_options=b'\202\323\344\223\002*\"%/v1/models/{namespace}/{name}:publish:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateModelProfile',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.CreateModelProfile',
index=7,
containing_service=None,
input_type=_CREATEMODELPROFILEREQUEST,
output_type=_CREATEMODELPROFILERESPONSE,
serialized_options=b'\202\323\344\223\002\'\"%/v1/models/{namespace}/{name}:profile',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetModelProfile',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.GetModelProfile',
index=8,
containing_service=None,
input_type=_GETMODELPROFILEREQUEST,
output_type=_GETMODELPROFILERESPONSE,
serialized_options=b'\202\323\344\223\002\'\"%/v1/models/{namespace}/{name}:profile',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetModelMisclass',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.GetModelMisclass',
index=9,
containing_service=None,
input_type=_GETMISCLASSREQUEST,
output_type=_GETMISCLASSRESPONSE,
serialized_options=b'\202\323\344\223\002(\022&/v1/models/{namespace}/{name}:misclass',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetModelLogs',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.GetModelLogs',
index=10,
containing_service=None,
input_type=_GETMODELLOGSREQUEST,
output_type=_GETMODELLOGSRESPONSE,
serialized_options=b'\202\323\344\223\002$\022\"/v1/models/{namespace}/{name}:logs',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='AbortModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.AbortModel',
index=11,
containing_service=None,
input_type=_ABORTMODELREQUEST,
output_type=_ABORTMODELRESPONSE,
serialized_options=b'\202\323\344\223\002%\"#/v1/models/{namespace}/{name}:abort',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='PauseModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.PauseModel',
index=12,
containing_service=None,
input_type=_PAUSEMODELREQUEST,
output_type=_PAUSEMODELRESPONSE,
serialized_options=b'\202\323\344\223\002%\"#/v1/models/{namespace}/{name}:pause',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ResumeModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.ResumeModel',
index=13,
containing_service=None,
input_type=_RESUMEMODELREQUEST,
output_type=_RESUMEMODELRESPONSE,
serialized_options=b'\202\323\344\223\002&\"$/v1/models/{namespace}/{name}:resume',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CompareModels',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.CompareModels',
index=14,
containing_service=None,
input_type=_COMPAREMODELSREQUEST,
output_type=_COMPAREMODELSRESPONSE,
serialized_options=b'\202\323\344\223\002(\"&/v1/models/{namespace}/{names}:compare',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CompileModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.CompileModel',
index=15,
containing_service=None,
input_type=_COMPILEMODELREQUEST,
output_type=_COMPILEMODELRESPONSE,
serialized_options=b'\202\323\344\223\002\'\"%/v1/models/{namespace}/{name}:compile',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DownloadModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.DownloadModel',
index=16,
containing_service=None,
input_type=_DOWNLOADMODELREQUEST,
output_type=_DOWNLOADMODELRESPONSE,
serialized_options=b'\202\323\344\223\002(\"&/v1/models/{namespace}/{name}:download',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='FlagModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.FlagModel',
index=17,
containing_service=None,
input_type=_FLAGMODELREQUEST,
output_type=_FLAGMODELRESPONSE,
serialized_options=b'\202\323\344\223\002$\"\"/v1/models/{namespace}/{name}:flag',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='TestModel',
full_name='github.com.metaprov.modelaapi.services.model.v1.ModelService.TestModel',
index=18,
containing_service=None,
input_type=_TESTMODELREQUEST,
output_type=_TESTMODELRESPONSE,
serialized_options=b'\202\323\344\223\002$\"\"/v1/models/{namespace}/{name}:test',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_MODELSERVICE)
DESCRIPTOR.services_by_name['ModelService'] = _MODELSERVICE
# @@protoc_insertion_point(module_scope)
|
#!/usr/bin/env python
# Convert Hyg star database (http://www.astronexus.com/hyg)
# from CSV to JSON
# Paul Melis <paul.melis@surfsara.nl>
import sys, csv, json
STRING_FIELDS = {
# v3
'proper', 'gl', 'spect', 'con', 'var', 'bf', 'bayer', 'base',
# v2
'Spectrum', 'Gliese', 'BayerFlamsteed', 'ProperName'
}
lst = []
with open(sys.argv[1], 'rt', newline='') as f:
r = csv.reader(f, delimiter=',')
columns = next(r)
for row in r:
#print(row)
entry = {}
for key, value in zip(columns,row):
if key not in STRING_FIELDS:
if value == '':
value = None
else:
try:
value = int(value)
except ValueError:
try:
value = float(value)
except ValueError:
print("Can't convert non-string value for key %s" % key)
entry[key] = value
lst.append(entry)
with open(sys.argv[2], 'wt') as g:
j = json.dumps(lst)
#print(j)
g.write(j)
|
import pyglet
from typing import Dict, Tuple, Final, Any
from math import ceil
from game_map import GameMap
from entities import Entity
class Tileset:
TILE_SIZE: Final[int] = 32
def __init__(self, tileset_path: str, tile_data: Dict[int, Dict[str, Any]]):
self.tile_data = tile_data
tileset_image = pyglet.resource.image(tileset_path)
assert tileset_image.width % self.TILE_SIZE == 0
assert tileset_image.height % self.TILE_SIZE == 0
self.tileset_grid = pyglet.image.ImageGrid(
tileset_image,
int(tileset_image.height / self.TILE_SIZE),
int(tileset_image.width / self.TILE_SIZE),
)
self.tileset = pyglet.image.TextureGrid(self.tileset_grid)
def __getitem__(self, tile_id: int) -> pyglet.image.TextureRegion:
sheet_x = self.tile_data[tile_id]["sheet_x"]
sheet_y = self.tile_data[tile_id]["sheet_y"]
return self.tileset[sheet_y, sheet_x]
class Rendering:
def __init__(self, tileset: Tileset):
self.tileset = tileset
self.window = pyglet.window.Window(800, 600)
self.window_width_tiles = int(ceil(self.window.width / self.tileset.TILE_SIZE))
self.window_height_tiles = int(
ceil(self.window.height / self.tileset.TILE_SIZE)
)
self.camera_center_offset_x = int(self.window_width_tiles / 2)
self.camera_center_offset_y = int(self.window_height_tiles / 2)
self.camera_x = 0
self.camera_y = 0
self.entities: Dict[int, pyglet.sprite.Sprite] = dict()
def center_camera(self, x: int, y: int):
self.camera_x = x - self.camera_center_offset_x
self.camera_y = y - self.camera_center_offset_y
def relative_to_camera(self, x: int, y: int) -> Tuple[int, int]:
return x - self.camera_x, y - self.camera_y
def draw_tile(self, x: int, y: int, tile_id: int):
self.tileset[tile_id].blit(
x * self.tileset.TILE_SIZE, y * self.tileset.TILE_SIZE
)
def draw_tile_relative(self, x: int, y: int, tile_id: int):
self.draw_tile(*self.relative_to_camera(x, y), tile_id)
def draw_map(self, game_map: GameMap):
start_x, start_y = max(self.camera_x, 0), max(self.camera_y, 0)
rel_x, rel_y = self.relative_to_camera(start_x, start_y)
end_x = self.window_width_tiles - rel_x
end_y = self.window_height_tiles - rel_y
for y_pos in range(start_y, min(start_y + end_y, game_map.height)):
for x_pos in range(start_x, min(start_x + end_x, game_map.width)):
self.draw_tile_relative(x_pos, y_pos, game_map.get(x_pos, y_pos))
def add_entity(self, entity: Entity):
self.entities[entity.id] = pyglet.sprite.Sprite(
self.tileset[entity.tile_id], entity.x, entity.y
)
def draw_entities(self, entities: Dict[int, Entity]):
for entity in entities.values():
if entity.id in self.entities:
x, y = self.relative_to_camera(entity.x, entity.y)
self.entities[entity.id].x = x * self.tileset.TILE_SIZE
self.entities[entity.id].y = y * self.tileset.TILE_SIZE
self.entities[entity.id].draw()
|
import Image, ImageFilter
from rgbxy import Converter, GamutC
converter = Converter(GamutC)
def frameToColorMapImage(frame):
im = Image.fromarray(frame)
#im = im.resize((150,150))
im = im.filter(ImageFilter.GaussianBlur(5))
im = im.resize((3,3))
return im
def getRGBXYBri(im,idx):
r, g, b = im.getpixel((idx[0], idx[1]))
x,y = converter.rgb_to_xy(r,g,b)
bri = (0.299*float(r) + 0.587*float(g) + 0.114*float(b))
bri = bri if bri > 100 else ((bri / 100) * bri)
bri = bri if bri > 30 else bri / 2
bri = bri if bri > 7 else 0
return (r,g,b,x,y,bri)
|
from setuptools import setup
setup(name='twiml-generator',
version='0.1',
description='Generate a code from a TwiML file',
url='https://github.com/TwilioDevEd/twiml-generator/',
author='Samuel Mendes',
author_email='smendes@twilio.com',
license='MIT',
packages=['twiml_generator'],
include_package_data=True,
install_requires=[
'lxml',
'inflection',
'yapf',
'jsbeautifier'
],
zip_safe=False)
|
from modules import *
from image_preprocessing import *
from masks import *
def get_unet():
inputs = Input(shape=[IMG_SIZE[0], IMG_SIZE[1], 3])
conv1 = Conv2D(32, 3, 1, activation='relu', padding='same')(inputs)
conv1 = Conv2D(32, 3, 1, activation='relu', padding='same')(conv1)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
drop1 = Dropout(0.5)(pool1)
conv2 = Conv2D(64, 3, 1, activation='relu', padding='same')(drop1)
conv2 = Conv2D(64, 3, 1, activation='relu', padding='same')(conv2)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
drop2 = Dropout(0.5)(pool2)
conv3 = Conv2D(128, 3, 1, activation='relu', padding='same')(drop2)
conv3 = Conv2D(128, 3, 1, activation='relu', padding='same')(conv3)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
drop3 = Dropout(0.5)(pool3)
conv4 = Conv2D(256, 3, 1, activation='relu', padding='same')(drop3)
conv4 = Conv2D(256, 3, 1, activation='relu', padding='same')(conv4)
pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)
drop4 = Dropout(0.5)(pool4)
conv5 = Conv2D(512, 3, 1, activation='relu', padding='same')(drop4)
conv5 = Conv2D(512, 3, 1, activation='relu', padding='same')(conv5)
up6 = Conv2D(256, 3, activation = 'relu', padding = 'same')(UpSampling2D(size=(2, 2))(conv5))
merge6 = concatenate([up6, conv4], axis=3)
drop6 = Dropout(0.5)(merge6)
conv6 = Conv2D(256, 3, 1, activation='relu', padding='same')(drop6)
conv6 = Conv2D(256, 3, 1, activation='relu', padding='same')(conv6)
up7 = Conv2D(128, 3, activation = 'relu', padding = 'same')(UpSampling2D(size=(2, 2))(conv6))
merge7 = concatenate([up7, conv3], axis=3)
drop7 = Dropout(0.5)(merge7)
conv7 = Conv2D(128, 3, 1, activation='relu', padding='same')(drop7)
conv7 = Conv2D(128, 3, 1, activation='relu', padding='same')(conv7)
up8 = Conv2D(64, 3, activation = 'relu', padding = 'same')(UpSampling2D(size=(2, 2))(conv7))
merge8 = concatenate([up8, conv2], axis=3)
drop8 = Dropout(0.5)(merge8)
conv8 = Conv2D(64, 3, 1, activation='relu', padding='same')(drop8)
conv8 = Conv2D(64, 3, 1, activation='relu', padding='same')(conv8)
up9 = Conv2D(32, 3, activation = 'relu', padding = 'same')(UpSampling2D(size=(2, 2))(conv8))
merge9 = concatenate([up9, conv1], axis=3)
drop9 = Dropout(0.5)(merge9)
conv9 = Conv2D(32, 3, 1, activation='relu', padding='same')(drop9)
conv9 = Conv2D(32, 3, 1, activation='relu', padding='same')(conv9)
conv10 = Conv2D(n_colors, 1, 1, activation='softmax')(conv9) #softmax converts the output to a list of probabilities that must sum to 1
model = Model(inputs=inputs, outputs=conv10)
return model
model = get_unet()
# tf.keras.utils.plot_model(model, show_shapes=True)
|
from FMERepositoryUtility.FMEServerJob import FMEServerJob
class FMWJob(FMEServerJob):
def do_fmw_job(self, repo, fmw):
repo_name = repo["name"]
fmw_name = fmw["name"]
full_name = "%s\\%s" % (repo_name, fmw_name)
self.log.write_line("Downloading %s ..." % full_name)
self.api.download_fmw(repo_name, fmw_name, self.job_config["fmw_dir"], self.job_config["overwrite"])
|
from neurasim import *
Lx=150
Ly=150
Nx=150
Ny=150
dx=Lx/Nx
dy=Ly/Ny
D=10
#DOMAIN
DOMAIN = Domain(x=Nx, y=Ny, boundaries=[OPEN, STICKY],bounds=Box[0:Lx, 0:Ly])
#TESTING FIELD 1
# pressure = DOMAIN.scalar_grid(0)
# aux_lower = HardGeometryMask(Box[:, Ly/2:Ly]) >> DOMAIN.scalar_grid()
# aux_upper = HardGeometryMask(Box[:, 0:Ly/2]) >> DOMAIN.scalar_grid()
# pressure = pressure + aux_lower*0 + aux_upper*1
#TESTING FIELD 2
# pressure = DOMAIN.scalar_grid(0)
# aux_lower_right = HardGeometryMask(Box[Lx/2:Lx, Ly/2:Ly]) >> DOMAIN.scalar_grid()
# aux_lower_left = HardGeometryMask(Box[0:Lx/2, Ly/2:Ly]) >> DOMAIN.scalar_grid()
# aux_upper_right = HardGeometryMask(Box[Lx/2:Lx, 0:Ly/2]) >> DOMAIN.scalar_grid()
# aux_upper_left = HardGeometryMask(Box[0:Lx/2, 0:Ly/2]) >> DOMAIN.scalar_grid()
# pressure = pressure + aux_lower_right*1 + aux_lower_left*0 + aux_upper_right*0 + aux_upper_left*1
# #TESTING FIELD 3
# pressure = DOMAIN.scalar_grid(0)
# aux_lower_right = HardGeometryMask(Box[25:Lx, Ly/2:Ly]) >> DOMAIN.scalar_grid()
# aux_lower_left = HardGeometryMask(Box[0:25, Ly/2:Ly]) >> DOMAIN.scalar_grid()
# aux_upper_right = HardGeometryMask(Box[25:Lx, 0:Ly/2]) >> DOMAIN.scalar_grid()
# aux_upper_left = HardGeometryMask(Box[0:25, 0:Ly/2]) >> DOMAIN.scalar_grid()
# pressure = pressure + aux_lower_right*1 + aux_lower_left*0 + aux_upper_right*0 + aux_upper_left*1
#TESTING FIELD 4
# pressure = DOMAIN.scalar_grid(0)
# aux_lower_right = HardGeometryMask(Box[Ly/2:Ly, 25:Lx]) >> DOMAIN.scalar_grid()
# aux_lower_left = HardGeometryMask(Box[Ly/2:Ly, 0:25]) >> DOMAIN.scalar_grid()
# aux_upper_right = HardGeometryMask(Box[0:Ly/2, 25:Lx]) >> DOMAIN.scalar_grid()
# aux_upper_left = HardGeometryMask(Box[0:Ly/2, 0:25]) >> DOMAIN.scalar_grid()
# pressure = pressure + aux_lower_right*1 + aux_lower_left*0 + aux_upper_right*0 + aux_upper_left*1
#TESTING FIELD 5
# pressure = DOMAIN.scalar_grid(0)
# aux_lower = HardGeometryMask(Box[0:25:,:]) >> DOMAIN.scalar_grid()
# aux_upper = HardGeometryMask(Box[25:Lx, :]) >> DOMAIN.scalar_grid()
# pressure = pressure + aux_lower*0 + aux_upper*1
# #TESTING FIELD 6
# pressure = DOMAIN.scalar_grid(0)
# aux_lower = HardGeometryMask(Box[:, Ly-25:Ly]) >> DOMAIN.scalar_grid()
# aux_upper = HardGeometryMask(Box[:, 0:25]) >> DOMAIN.scalar_grid()
# aux_right = HardGeometryMask(Box[Lx-25:Lx, :]) >> DOMAIN.scalar_grid()
# pressure = pressure + aux_upper*1 + aux_lower*1 + aux_right*1
#TESTING FIELD 7
pressure = DOMAIN.scalar_grid(0)
aux_lower = HardGeometryMask(Box[Lx/2:Lx, :]) >> DOMAIN.scalar_grid()
aux_upper = HardGeometryMask(Box[0:Lx/2, :]) >> DOMAIN.scalar_grid()
pressure = pressure + aux_lower*0 + aux_upper*1
#CYLINDER
obstacle = Obstacle(Sphere([25, Ly/2], radius=D/2), angular_velocity=0.0)
FORCES_MASK = HardGeometryMask(Sphere([25, Ly/2], radius=D/2)) >> DOMAIN.scalar_grid()
FORCES_MASK = FORCES_MASK.values._native.cpu().numpy()
#CALCULATE FORCES
dxMASK = np.ones_like(pressure.values._native.cpu().numpy())*dx
vforce, hforce = calculate_force(pressure, FORCES_MASK, dxMASK)
#RESULTS
_, _ = plot_field(Lx, Ly, dx, dy, pressure, limits=[0,1], plots=['surface'], lx='x', ly='y',
lbar='pressure', ltitle='Pressure Testing Field', save=True, filename='./pressure_testing_field.png')
_,_ = plot_field(Lx, Ly, dx, dy, FORCES_MASK, plots=['mask'], lx='x', ly='y',
lbar='mask', ltitle='MASK Testing Field', save=True, filename='./pressure_testing_mask.png')
print(f'Vertical: {vforce} - horizontal: {hforce}')
|
import unittest
from google.protobuf import json_format
import json
import rastervision as rv
from rastervision.core.class_map import ClassItem
from rastervision.protos.task_pb2 import TaskConfig as TaskConfigMsg
from rastervision.protos.class_item_pb2 import ClassItem as ClassItemMsg
class TestObjectDetectionConfig(unittest.TestCase):
def test_build_task(self):
classes = ['one', 'two']
expected = [ClassItem(1, 'one'), ClassItem(2, 'two')]
t = rv.TaskConfig.builder(rv.OBJECT_DETECTION) \
.with_classes(classes) \
.build()
self.assertEqual(t.task_type, rv.OBJECT_DETECTION)
self.assertListEqual(t.class_map.get_items(), expected)
def test_build_task_from_proto(self):
task_config = {
'task_type': rv.OBJECT_DETECTION,
'object_detection_config': {
'chip_size':
500,
'class_items': [{
'id': 1,
'name': 'car',
'color': 'red'
}, {
'id': 2,
'name': 'building',
'color': 'blue'
}, {
'id': 3,
'name': 'background',
'color': 'black'
}]
}
}
msg = json_format.Parse(json.dumps(task_config), TaskConfigMsg())
task = rv.TaskConfig.from_proto(msg)
self.assertEqual(task.class_map.get_by_name('building').id, 2)
self.assertEqual(task.chip_size, 500)
def test_create_proto_from_task(self):
t = rv.TaskConfig.builder(rv.OBJECT_DETECTION) \
.with_classes(['car', 'boat']) \
.with_chip_size(500) \
.build()
msg = t.to_proto()
expected_classes = [
ClassItemMsg(name='car', id=1),
ClassItemMsg(name='boat', id=2)
]
self.assertEqual(msg.task_type, rv.OBJECT_DETECTION)
self.assertEqual(msg.object_detection_config.chip_size, 500)
actual_class_items = dict(
[(i.id, i) for i in msg.object_detection_config.class_items])
expected_class_items = dict([(i.id, i) for i in expected_classes])
self.assertDictEqual(actual_class_items, expected_class_items)
def test_missing_config_class_map(self):
with self.assertRaises(rv.ConfigError):
rv.TaskConfig.builder(rv.OBJECT_DETECTION).build()
def test_no_missing_config(self):
try:
rv.TaskConfig.builder(rv.OBJECT_DETECTION).with_classes(
['car']).build()
except rv.ConfigError:
self.fail('ConfigError raised unexpectedly')
if __name__ == '__main__':
unittest.main()
|
# BOJ 17298
import sys
si = sys.stdin.readline
n = int(si())
arr = list(map(int, si().split()))
# n = 11
# arr = [1, 10, 999999, 7, 999998, 3, 1, 4, 1000000, 3, 1000000]
stack = []
ret = [-1] * n
top = arr[-1]
for i in range(n):
while stack and arr[stack[-1]] < arr[i]:
ret[stack[-1]] = arr[i]
stack.pop()
stack.append(i)
print(" ".join(list(map(str, ret))))
|
import copy
import time
import numpy as np
import pandas as pd
from Bio.Phylo import BaseTree
from Bio.Phylo.TreeConstruction import DistanceMatrix
import scphylo as scp
from scphylo.external._scistree import run_scistree
from scphylo.external._scprob import run_scprob
# from Bio.Phylo.TreeConstruction import DistanceTreeConstructor
def scistree(df_input, alpha, beta, n_threads=1, experiment=False):
"""Solving using ScisTree.
Accurate and efficient cell lineage tree inference from noisy
single cell data: the maximum likelihood perfect phylogeny approach
:cite:`ScisTree`.
Parameters
----------
df_input : :class:`pandas.DataFrame`
Input genotype matrix in which rows are cells and columns are mutations.
Values inside this matrix show the presence (1), absence (0) and missing
entires (3).
alpha : :obj:`float`
False positive error rate.
beta : :obj:`float`
False negative error rate.
n_threads : :obj:`int`
Number of threads.
experiment : :obj:`bool`, optional
Is in the experiment mode (the log won't be shown), by default False
Returns
-------
:class:`pandas.DataFrame`
A conflict-free matrix in which rows are cells and columns are mutations.
Values inside this matrix show the presence (1) and absence (0).
"""
if not experiment:
scp.logg.info(
f"running ScisTree with alpha={alpha}, beta={beta}, n_threads={n_threads}"
)
tmpdir = scp.ul.tmpdirsys(suffix=".scistree")
cells = df_input.index
snvs = df_input.columns
df = df_input.transpose()
df = df.replace(3, 0.5)
df = df.replace(0, 1 - beta)
df = df.replace(1, alpha)
file1 = f"{tmpdir.name}/scistree.input"
df.index.name = f"HAPLOID {df.shape[0]} {df.shape[1]}"
df.to_csv(file1, sep=" ")
with open(file1) as ifile:
data = ifile.read()
with open(file1, "w") as ofile:
data = data.replace('"', "")
ofile.write(data)
cmd = [
"scistree",
"-v",
"-d",
"0",
"-e",
"-k",
f"{n_threads}",
"-o",
f"{tmpdir.name}/scistree.gml",
f"{tmpdir.name}/scistree.input",
]
s_time = time.time()
run_scistree(cmd)
e_time = time.time()
running_time = e_time - s_time
data = []
detail = {"cost": "\n"}
with open(f"{tmpdir.name}/scistree.output") as infile:
now_store = False
for line in infile:
line = line.strip()
if "Imputed genotypes:" in line:
now_store = True
if line[:4] == "Site" and now_store:
line = "".join(line.split(":")[1])
line = line.replace("\t", "")
data.append([int(x) for x in line.split(" ")])
if "current cost: " in line:
cost = float(line.split("current cost: ")[1].split(", opt tree: ")[0])
detail["cost"] += f" current best cost = {cost}\n"
data = np.array(data)
matrix_output = data.T
df_output = pd.DataFrame(matrix_output)
df_output.columns = snvs
df_output.index = cells
df_output.index.name = "cellIDxmutID"
tmpdir.cleanup()
if not experiment:
scp.ul.stat(df_input, df_output, alpha, beta, running_time)
return df_output
else:
return df_output, running_time
def rscistree(adata, alpha=0, beta=0, n_threads=1, mode="haploid"):
"""Solving using read-count ScisTree.
Accurate and efficient cell lineage tree inference from noisy
single cell data: the maximum likelihood perfect phylogeny approach
:cite:`ScisTree`.
Parameters
----------
df_input : :class:`pandas.DataFrame`
Input genotype matrix in which rows are cells and columns are mutations.
Values inside this matrix show the presence (1), absence (0) and missing
entires (3).
alpha : :obj:`float`
False positive error rate.
beta : :obj:`float`
False negative error rate.
n_threads : :obj:`int`
Number of threads.
mode : :obj:`str`
Mode of calculating the probability from read-count.
In {'haploid', 'ternary'}, by default haploid
experiment : :obj:`bool`, optional
Is in the experiment mode (the log won't be shown), by default False
Returns
-------
:class:`pandas.DataFrame`
A conflict-free matrix in which rows are cells and columns are mutations.
Values inside this matrix show the presence (1) and absence (0).
"""
scp.logg.info(f"running rScisTree with n_threads={n_threads}, mode={mode}")
tmpdir = scp.ul.tmpdirsys(suffix=".rscistree", dirname=".")
cells = adata.obs_names
snvs = adata.var_names
df_input = adata.to_df()
V = adata.layers["mutant"]
R = adata.layers["total"] - V
with open(f"{tmpdir.name}/rscistree.counts", "w") as fout:
fout.write(f"{mode.upper()} {len(snvs)} {len(cells)}\n")
for j in range(len(snvs)):
for i in range(len(cells)):
fout.write(f"{R[i,j]} {V[i,j]} ")
fout.write("\n")
cmd = [
"scprob",
f"{tmpdir.name}/rscistree.counts",
]
if mode.lower() == "haploid":
cmd += ["0"]
elif mode.lower() == "ternary":
cmd += ["1"]
else:
scp.logg.error("Wrong mode!")
run_scprob(cmd)
cmd = [
"scistree",
"-v",
"-d",
"0",
"-e",
"-k",
f"{n_threads}",
"-o",
f"{tmpdir.name}/rscistree.gml",
f"{tmpdir.name}/rscistree.input",
]
s_time = time.time()
run_scistree(cmd)
e_time = time.time()
running_time = e_time - s_time
data = []
detail = {"cost": "\n"}
with open(f"{tmpdir.name}/rscistree.output") as infile:
now_store = False
for line in infile:
line = line.strip()
if "Imputed genotypes:" in line:
now_store = True
if line[:4] == "Site" and now_store:
line = "".join(line.split(":")[1])
line = line.replace("\t", "")
data.append([int(x) for x in line.split(" ")])
if "current cost: " in line:
cost = float(line.split("current cost: ")[1].split(", opt tree: ")[0])
detail["cost"] += f" current best cost = {cost}\n"
data = np.array(data)
matrix_output = data.T
df_output = pd.DataFrame(matrix_output)
df_output.columns = snvs
df_output.index = cells
df_output.index.name = "cellIDxmutID"
tmpdir.cleanup()
scp.ul.stat(df_input, df_output, alpha, beta, running_time)
return df_output
def iscistree(df_input, alpha, beta, n_iters=np.inf):
"""Solving using my own implementation of ScisTree.
Accurate and efficient cell lineage tree inference from noisy
single cell data: the maximum likelihood perfect phylogeny approach
:cite:`ScisTree`.
Parameters
----------
df_input : :class:`pandas.DataFrame`
Input genotype matrix in which rows are cells and columns are mutations.
Values inside this matrix show the presence (1), absence (0) and missing
entires (3).
alpha : :obj:`float`
False positive error rate.
beta : :obj:`float`
False negative error rate.
n_iters : :obj:`int`
Number of iterations to search for the neighboring trees, by default inf.
experiment : :obj:`bool`, optional
Is in the experiment mode (the log won't be shown), by default False
Returns
-------
:class:`pandas.DataFrame`
A conflict-free matrix in which rows are cells and columns are mutations.
Values inside this matrix show the presence (1) and absence (0).
"""
scp.logg.info(
f"running iScisTree with alpha={alpha}, beta={beta}, n_iters={n_iters}"
)
def get_initial_tree(D):
Q = []
for i in range(D.shape[0]):
Q.append(list(D[i, : i + 1]))
dm = DistanceMatrix(names=[f"{i}" for i in range(D.shape[0])], matrix=Q)
tree = nj(dm)
node = None
for clade in tree.find_clades():
if clade.name == f"{D.shape[0]-1}":
node = clade
tree.root_with_outgroup(node)
tree.prune(f"{D.shape[0]-1}")
return tree
def denoise_linear(I_mtr, alpha, beta, opt_tree):
tree = {}
for clade in list(opt_tree.find_clades(order="level"))[::-1]:
children = list(clade.find_clades(order="level"))
if len(children) > 2:
child_l = children[2]
child_r = children[1]
tree[clade.name] = [child_l.name, child_r.name]
else:
tree[clade.name] = []
def get_cells_in_best(cells_in_best, best):
for node in tree[best]:
if "Inner" in node:
for child in tree[node]:
get_cells_in_best(cells_in_best, child)
else:
cells_in_best.append(node)
if "Inner" not in best:
cells_in_best.append(best)
output = np.zeros(I_mtr.shape, dtype=int)
total_cost = 0
for c in range(I_mtr.shape[1]):
qs = {}
best = None
best_v = 0
for k, v in tree.items():
if len(v) == 0:
obs = I_mtr[int(k), c] == 1
p0 = (1 - obs) * (1 - beta) + obs * alpha
qs[k] = np.log((1 - p0) / p0)
else:
qs[k] = qs[v[0]] + qs[v[1]]
if qs[k] > best_v:
best = k
best_v = qs[k]
cells_in_best = []
get_cells_in_best(cells_in_best, best)
output[list(map(int, cells_in_best)), c] = 1
total_cost += -best_v
return output, total_cost
def get_neighbors(tree):
"""
Return neighbors.
For a tree with n taxa, there are n - 3 internal branches.
Thus there are 2(n - 3) NNI rearrangements for any tree
"""
# make child to parent dict
parents = {}
for clade in tree.find_clades():
if clade != tree.root:
node_path = tree.get_path(clade)
# cannot get the parent if the parent is root. Bug?
if len(node_path) == 1:
parents[clade] = tree.root
else:
parents[clade] = node_path[-2]
neighbors = []
root_childs = []
for clade in tree.get_nonterminals(order="level"):
if clade == tree.root:
left = clade.clades[0]
right = clade.clades[1]
root_childs.append(left)
root_childs.append(right)
if not left.is_terminal() and not right.is_terminal():
# make changes around the left_left clade
# left_left = left.clades[0]
left_right = left.clades[1]
right_left = right.clades[0]
right_right = right.clades[1]
# neightbor 1 (left_left + right_right)
del left.clades[1]
del right.clades[1]
left.clades.append(right_right)
right.clades.append(left_right)
temp_tree = copy.deepcopy(tree)
neighbors.append(temp_tree)
# neighbor 2 (left_left + right_left)
del left.clades[1]
del right.clades[0]
left.clades.append(right_left)
right.clades.append(right_right)
temp_tree = copy.deepcopy(tree)
neighbors.append(temp_tree)
# change back (left_left + left_right)
del left.clades[1]
del right.clades[0]
left.clades.append(left_right)
right.clades.insert(0, right_left)
elif clade in root_childs:
# skip root child
continue
else:
# method for other clades
# make changes around the parent clade
left = clade.clades[0]
right = clade.clades[1]
parent = parents[clade]
if clade == parent.clades[0]:
sister = parent.clades[1]
# neighbor 1 (parent + right)
del parent.clades[1]
del clade.clades[1]
parent.clades.append(right)
clade.clades.append(sister)
temp_tree = copy.deepcopy(tree)
neighbors.append(temp_tree)
# neighbor 2 (parent + left)
del parent.clades[1]
del clade.clades[0]
parent.clades.append(left)
clade.clades.append(right)
temp_tree = copy.deepcopy(tree)
neighbors.append(temp_tree)
# change back (parent + sister)
del parent.clades[1]
del clade.clades[0]
parent.clades.append(sister)
clade.clades.insert(0, left)
else:
sister = parent.clades[0]
# neighbor 1 (parent + right)
del parent.clades[0]
del clade.clades[1]
parent.clades.insert(0, right)
clade.clades.append(sister)
temp_tree = copy.deepcopy(tree)
neighbors.append(temp_tree)
# neighbor 2 (parent + left)
del parent.clades[0]
del clade.clades[0]
parent.clades.insert(0, left)
clade.clades.append(right)
temp_tree = copy.deepcopy(tree)
neighbors.append(temp_tree)
# change back (parent + sister)
del parent.clades[0]
del clade.clades[0]
parent.clades.insert(0, sister)
clade.clades.insert(0, left)
return neighbors
def nj(distance_matrix):
# make a copy of the distance matrix to be used
dm = copy.deepcopy(distance_matrix)
# init terminal clades
clades = [BaseTree.Clade(None, name) for name in dm.names]
# init node distance
node_dist = [0] * len(dm)
# init minimum index
min_i = 0
min_j = 0
inner_count = 0
# special cases for Minimum Alignment Matrices
if len(dm) == 1:
root = clades[0]
return BaseTree.Tree(root, rooted=False)
elif len(dm) == 2:
# minimum distance will always be [1,0]
min_i = 1
min_j = 0
clade1 = clades[min_i]
clade2 = clades[min_j]
clade1.branch_length = dm[min_i, min_j] / 2.0
clade2.branch_length = dm[min_i, min_j] - clade1.branch_length
inner_clade = BaseTree.Clade(None, "Inner")
inner_clade.clades.append(clade1)
inner_clade.clades.append(clade2)
clades[0] = inner_clade
root = clades[0]
return BaseTree.Tree(root, rooted=False)
while len(dm) > 2:
# calculate nodeDist
for i in range(0, len(dm)):
node_dist[i] = 0
for j in range(0, len(dm)):
node_dist[i] += dm[i, j]
node_dist[i] = node_dist[i] / (len(dm) - 2)
# find minimum distance pair
min_dist = dm[1, 0] - node_dist[1] - node_dist[0]
min_i = 0
min_j = 1
for i in range(1, len(dm)):
for j in range(0, i):
temp = dm[i, j] - node_dist[i] - node_dist[j]
if min_dist > temp:
min_dist = temp
min_i = i
min_j = j
# create clade
clade1 = clades[min_i]
clade2 = clades[min_j]
inner_count += 1
inner_clade = BaseTree.Clade(None, "Inner" + str(inner_count))
inner_clade.clades.append(clade1)
inner_clade.clades.append(clade2)
# assign branch length
clade1.branch_length = (
dm[min_i, min_j] + node_dist[min_i] - node_dist[min_j]
) / 2.0
clade2.branch_length = dm[min_i, min_j] - clade1.branch_length
if clade1.branch_length < 0:
clade1.branch_length = 0
if clade2.branch_length < 0:
clade2.branch_length = 0
# update node list
clades[min_j] = inner_clade
del clades[min_i]
# rebuild distance matrix,
# set the distances of new node at the index of min_j
for k in range(0, len(dm)):
if k != min_i and k != min_j:
dm[min_j, k] = (
dm[min_i, k] + dm[min_j, k] - dm[min_i, min_j]
) / 2.0
dm.names[min_j] = "Inner" + str(inner_count)
del dm[min_i]
# set the last clade as one of the child of the inner_clade
root = None
if clades[0] == inner_clade:
clades[0].branch_length = 0
clades[1].branch_length = dm[1, 0]
clades[0].clades.append(clades[1])
root = clades[0]
else:
clades[0].branch_length = dm[1, 0]
clades[1].branch_length = 0
clades[1].clades.append(clades[0])
root = clades[1]
return BaseTree.Tree(root, rooted=False)
cells = list(df_input.index)
snvs = list(df_input.columns)
I_mtr = df_input.values
s_time = time.time()
Ip = np.vstack([I_mtr, np.zeros(I_mtr.shape[1])]) # add root with profile zero
scp.logg.debug("now calculating distance!", time=True)
dist = scp.ul.dist_l1_ignore_na(Ip)
scp.logg.debug("distance is done!", time=True)
opt_tree = get_initial_tree(dist)
# opt_subtrees = get_subtrees(opt_tree)
# opt_O, opt_cost = denoise_quadratic(I, alpha, beta, opt_subtrees)
scp.logg.debug("init tree!", time=True)
opt_O, opt_cost = denoise_linear(I_mtr, alpha, beta, opt_tree)
scp.logg.info("current best cost =", opt_cost, time=True)
n_iter = 1
is_done = False
already_seen = set()
already_seen.add(str(opt_tree))
while not is_done and n_iter < n_iters:
is_done = True
neighbors = get_neighbors(opt_tree)
for nbr_tree in neighbors:
if str(nbr_tree) in already_seen:
continue
else:
already_seen.add(str(nbr_tree))
# nbr_subtrees = get_subtrees(nbr_tree)
# nbr_O, nbr_cost = denoise_quadratic(I, alpha, beta, nbr_subtrees)
nbr_O, nbr_cost = denoise_linear(I_mtr, alpha, beta, nbr_tree)
if nbr_cost < opt_cost:
opt_tree = nbr_tree
# opt_subtrees = nbr_subtrees
opt_O = nbr_O
opt_cost = nbr_cost
is_done = False
scp.logg.info("current best cost =", nbr_cost, time=True)
n_iter += 1
e_time = time.time()
running_time = e_time - s_time
df_output = pd.DataFrame(opt_O)
df_output.columns = snvs
df_output.index = cells
df_output.index.name = "cellIDxmutID"
scp.ul.stat(df_input, df_output, alpha, beta, running_time)
return df_output
|
#!/usr/bin/env python3
import random
print("I will flip a coin a set number times defined by the user.")
# user input
flip_number = int(input("How many times would you like me to flip the coin: "))
choice = input("Would you like to see the result of each flip (y/n): ").lower()
print("\nFlipping.........................\n")
# initialize variables
heads = 0
tails = 0
# main program loop
for flips in range(flip_number):
coin = random.randint(0, 1)
if coin == 1:
heads += 1
if choice.startswith('y'):
print("HEADS")
else:
tails += 1
if choice.startswith('y'):
print("TAILS")
if heads == tails:
print("At " + str(flips + 1) + " flips, the number of heads and tails were equal at " + str(heads) + " each.")
heads_percentage = round(100 * heads / flip_number, 2)
tails_percentage = round(100 * tails / flip_number, 2)
# display result
print("\nResults of Flipping a Coin " + str(flip_number) + " Times: ")
print("\nSide\t\tCount\t\tPercentage")
print("Heads\t\t" + str(heads) + "/" + str(flip_number) + "\t\t" + str(heads_percentage) + "%")
print("Tails\t\t" + str(tails) + "/" + str(flip_number) + "\t\t" + str(tails_percentage) + "%")
|
import os
from funcy import raiser
import pytest
from dvc.repo import locked
def test_is_dvc_internal(dvc):
assert dvc.is_dvc_internal(os.path.join("path", "to", ".dvc", "file"))
assert not dvc.is_dvc_internal(os.path.join("path", "to-non-.dvc", "file"))
@pytest.mark.parametrize(
"path",
[
os.path.join("dir", "subdir", "file"),
os.path.join("dir", "subdir"),
"dir",
],
)
def test_find_outs_by_path(tmp_dir, dvc, path):
(stage,) = tmp_dir.dvc_gen(
{"dir": {"subdir": {"file": "file"}, "other": "other"}}
)
outs = dvc.find_outs_by_path(path, strict=False)
assert len(outs) == 1
assert outs[0].path_info == stage.outs[0].path_info
@pytest.mark.parametrize(
"path",
[os.path.join("dir", "subdir", "file"), os.path.join("dir", "subdir")],
)
def test_used_cache(tmp_dir, dvc, path):
from dvc.cache import NamedCache
tmp_dir.dvc_gen({"dir": {"subdir": {"file": "file"}, "other": "other"}})
expected = NamedCache.make(
"local", "70922d6bf66eb073053a82f77d58c536.dir", "dir"
)
expected.add(
"local",
"8c7dd922ad47494fc02c388e12c00eac",
os.path.join("dir", "subdir", "file"),
)
with dvc.state:
used_cache = dvc.used_cache([path])
assert (
used_cache._items == expected._items
and used_cache.external == expected.external
)
def test_locked(mocker):
repo = mocker.MagicMock()
repo.method = locked(repo.method)
args = {}
kwargs = {}
repo.method(repo, args, kwargs)
assert repo.method_calls == [
mocker.call._reset(),
mocker.call.method(repo, args, kwargs),
mocker.call._reset(),
]
def test_collect_optimization(tmp_dir, dvc, mocker):
(stage,) = tmp_dir.dvc_gen("foo", "foo text")
# Forget cached stages and graph and error out on collection
dvc._reset()
mocker.patch(
"dvc.repo.Repo.stages",
property(raiser(Exception("Should not collect"))),
)
# Should read stage directly instead of collecting the whole graph
dvc.collect(stage.path)
dvc.collect_granular(stage.path)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = "Davide Locatelli"
__status__ = "Production"
"""
This module contains DuckDuckGoSearchPage,
the page object for the DuckDuckGo search page.
Warning: the SEARCH_INPUT locator had to be updated because the page changed!
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
class SentradeHomePage:
# URL
URL = 'http://127.0.0.1:8050/'
# Locators
SEARCH_INPUT = (By.ID, 'stock-ticker-input')
# Initializer
def __init__(self, browser):
self.browser = browser
# Interaction Methods
def load(self):
self.browser.get(self.URL)
def search(self, phrase):
#search_input = self.browser.find_element(*self.SEARCH_INPUT)
#search_input.send_keys(Keys.TAB + phrase + Keys.RETURN)
actions = ActionChains(self.browser)
actions.send_keys(Keys.TAB + phrase + Keys.RETURN)
actions.perform()
|
import os
from setuptools import find_packages
from setuptools import setup
cur_dir = os.path.dirname(__file__)
readme = os.path.join(cur_dir, 'README.md')
if os.path.exists(readme):
with open(readme) as fh:
long_description = fh.read()
else:
long_description = ''
setup(
name='walrus',
version=__import__('walrus').__version__,
description='walrus',
long_description=long_description,
author='Charles Leifer',
author_email='coleifer@gmail.com',
url='http://github.com/coleifer/walrus/',
install_requires=['redis'],
packages=find_packages(),
package_data={
'walrus': [
'scripts/*',
'stopwords.txt',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
test_suite='walrus.tests',
)
|
# coding: utf-8
"""
IBM Cohort Engine
Service to evaluate cohorts and measures # noqa: E501
OpenAPI spec version: 2.1.0 2022-02-18T21:50:45Z
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.api.measure_evaluation_api import MeasureEvaluationApi # noqa: E501
from swagger_client.rest import ApiException
class TestMeasureEvaluationApi(unittest.TestCase):
"""MeasureEvaluationApi unit test stubs"""
def setUp(self):
self.api = swagger_client.api.measure_evaluation_api.MeasureEvaluationApi() # noqa: E501
def tearDown(self):
pass
def test_evaluate_measure(self):
"""Test case for evaluate_measure
Evaluates a measure bundle for a single patient # noqa: E501
"""
pass
def test_evaluate_patient_list_measure(self):
"""Test case for evaluate_patient_list_measure
Evaluates a measure bundle for a list of patients # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
import pytest
import errors
import os
import utils
def test_get_dict_from_yaml():
yaml_string = """
a: 1
b:
c: 3
d: 4
"""
expected_dict = {
'a': 1,
'b': {'c': 3, 'd': 4}
}
dictionary = utils.get_dict_from_yaml(yaml_string)
assert dictionary == expected_dict
def test_get_dict_yaml_raises_invalid_input_error():
with pytest.raises(errors.InvalidInputException):
utils.get_dict_from_yaml('invalid}')
def test_load_config(tmpdir, monkeypatch):
yaml_string = """
a: 1
b: 2
"""
yaml_file = tmpdir.mkdir("sub").join("test.yaml")
yaml_file.write(yaml_string)
def mock_get_config_file_name():
return yaml_file.strpath
monkeypatch.setattr(utils, 'get_config_file_name', mock_get_config_file_name)
dictionary = utils.load_config_file()
assert dictionary == {'a': 1, 'b': 2}
def test_load_config_file_error(monkeypatch):
def mock_get_config_file_name():
return 'invalid_path'
monkeypatch.setattr(utils, 'get_config_file_name', mock_get_config_file_name)
with pytest.raises(FileNotFoundError):
dictionary = utils.load_config_file()
def test_get_config_file_name(monkeypatch):
def mock_get_env_prod(name, default):
return 'prod'
def mock_get_env_dev(name, default):
return 'dev'
def mock_get_env_invalid(name, default):
return ''
with monkeypatch.context() as m:
m.setattr(os, 'getenv', mock_get_env_prod)
assert utils.get_config_file_name() == 'app.yaml'
m.setattr(os, 'getenv', mock_get_env_dev)
assert utils.get_config_file_name() == 'app_dev.yaml'
m.setattr(os, 'getenv', mock_get_env_invalid)
assert utils.get_config_file_name() == 'app_dev.yaml'
|
"""Neatly load and clean Atkinson Table 4 (expert labels and ra/dec)
"""
import pandas as pd
import numpy as np
from tidalclassifier.utils.helper_funcs import str_to_N
# misc. prep work
def clean_table(df, file_str):
# ensure FEAT is a string, replace '" with N, remove ','
df['FEAT'] = df['FEAT'].map(str)
# in making the table, treat CONF as a string. Useful for output.
df['CONF'] = df['CONF'].map(str)
df['FEAT'] = df['FEAT'].map(lambda feat_str: feat_str.replace(',', ''))
df['FEAT'] = df['FEAT'].map(lambda feat_str: str_to_N(feat_str))
df['ID'] = df['ID'].map(lambda x: x.replace(' ', ''))
df.to_csv(file_str, index=False, sep='\t')
# df['table4_index'] = np.arange(len(df))
# df.set_index('table4_index', inplace=True)
# atk table index is a simple index that corresponds to picture id in u_and_c list
df.sort_index(inplace=True)
df['line_index'] = np.arange(len(df))
df.set_index('line_index', inplace=True)
return df
def load_table(table_str): # includes clean
# table = pd.read_csv(table_str, sep='\s+')
table = pd.read_csv(table_str, sep='\t')
table = clean_table(table, table_str)
return table
|
from setuptools import setup,find_packages
classifiers = [
'Development Status :: ',
'Intended Audience :: Education',
'Operating System :: windows 10',
'License :: MIT License',
'Programming Language :: Python :: 3.9.0'
]
setup(
name='Patterns_Package',
version='0.0.1',
description='patterns of Capital and Small Alphabets, Numbers,some other Symbols',
Long_description=open('README.txt').read()+'\n\n'+open('CHANGELOG.txt').read(),
url='https://github.com/saribalarakeshreddy/Python-3.9.0/tree/main/Packages',
author='SARIBALA RAKESH REDDY',
author_emial='rakeshreddysaribala1234@gmail.com',
license='MIT',
classifiers=classifiers,
keywords='patterns',
install_requires=['']
)
|
#!/usr/bin/env python3
import os, subprocess, argparse
from pathlib import Path
HOME = str(Path.home())
NAUTY_DIR = HOME + '/src/nauty26r10/'
FNULL = open(os.devnull, 'w')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate graphs.')
parser.add_argument('vertices', help='number of vertices', type=int, nargs=1)
parser.add_argument('edges', help='number of edges', type=int, nargs=1)
args = parser.parse_args()
vertices = args.vertices[0]
edges = args.edges[0]
geng = subprocess.Popen((NAUTY_DIR+'geng', '-cd3', str(vertices), '%d:%d' % (edges, edges)),
stdout=subprocess.PIPE, stderr=FNULL)
showg = subprocess.Popen((NAUTY_DIR+'showg', '-e', '-l0'), stdin=geng.stdout, stderr=FNULL, stdout=subprocess.PIPE)
line_count = -1
count = 0
for line in showg.stdout:
if line_count % 4 == 2:
graph_encoding = line.decode('ascii').rstrip()
print(vertices, edges, ' ', graph_encoding, ' g_%d_%d_%d' % (vertices, edges, count+1))
count += 1
line_count += 1
|
from ..core import latest_update_date, latest_vaccination_update_date
from ..template import generate_layout as build_home_layout
from ..template_vacc import generate_layout as build_vaccination_layout
# Label text (EN) #####
# TODO: Make markdown links open in new tab
labels = {
'home_link': '/zh',
'home_link_text': 'Home',
'vaccination_link': '/zh/vaccination',
'vaccination_link_text': 'Vaccination',
'language0': 'Français',
'language_link0': '/',
'language1': 'English',
'language_link1': '/en',
'language2': 'Español',
'language_link2': '/es',
'title': '新型冠状病毒(COVID-19)蒙特利尔数据统计',
'vaccination_title': ': Vaccination',
'subtitle': '上次更新: ' + latest_update_date,
'vaccination_subtitle': '上次更新: ' + latest_vaccination_update_date.isoformat(),
'today': '今日',
'today_short': '今日',
'cases_montreal_label': '确诊(蒙特利尔)',
'deaths_montreal_label': '死亡(蒙特利尔)',
'cases_qc_label': '确诊(魁省)',
'deaths_qc_label': '死亡(魁省)',
'hosp_mtl_label': '新增入院 (魁省)',
'hosp_qc_label': '新增入院 (蒙特利尔)',
'icu': '重症患者(今日)',
'yesterday': '昨日',
'vs_2dago': '较2日前',
'vaccination_1d_mtl_label': '1st doses administered (MTL)',
'vaccination_2d_mtl_label': '2nd doses administered (MTL)',
'vaccination_1d_perc_mtl_label': '% received 1 dose (MTL)',
'vaccination_2d_perc_mtl_label': '% received 2 doses (MTL)',
'vaccination_1d_qc_label': '1st doses administered (QC)',
'vaccination_2d_qc_label': '2nd doses administered (QC)',
'vaccination_1d_perc_qc_label': '% received 1 dose (QC)',
'vaccination_2d_perc_qc_label': '% received 2 doses (QC)',
'doses_today': '接种量(今日)',
'test_pos_mtl_label': '检测阳性率 (蒙特利尔)',
'test_pos_qc_label': '检测阳性率 (魁省)',
'incidence_per100k_7d_mtl_label': '7日发病率/10万 (蒙特利尔)',
'incidence_per100k_7d_qc_label': '7日发病率/10万 (魁省)',
'vs_last7d': '较7日前',
'recovered_qc_label': '治愈(魁省)',
'recovered_mtl_label': '治愈 (蒙特利尔)',
'negative_tests_qc_box_label': '检测阴性(魁省)',
'montreal_map_label': '病例/100 000人(蒙特利尔岛)',
'total_cases_label': '确诊病例',
'age_group_label': 'Distribution of new cases across all age groups by week (MTL)',
'total_deaths_label': '死亡(魁省)',
'total_hospitalisations_label': '入院人数(魁省)',
'intensive_care_label': '重症患者 (魁省)',
'total_testing_label': '检测人数(魁省)',
# footer
'footer_left': '数据来源: [Santé Montréal](https://santemontreal.qc.ca/en/public/coronavirus-covid-19/), [INSPQ](https://www.inspq.qc.ca/covid-19/donnees), [Government of Québec](https://www.quebec.ca/en/health/health-issues/a-z/2019-coronavirus/situation-coronavirus-in-quebec/) / 使用软件[Dash](https://plotly.com/dash/) / [Github](https://github.com/jeremymoreau/covid19mtl)',
'footer_centre': 'Hosting sponsored by [DigitalOcean](https://www.digitalocean.com/community/pages/covid-19)',
'footer_right': '作者[Jeremy Moreau](https://jeremymoreau.com/), [Matthias Schoettle](https://mattsch.com), [Contributors](https://github.com/jeremymoreau/covid19mtl#contributors) / [CC BY 4.0](https://creativecommons.org/licenses/by/4.0/deed.zh)',
'infobox': """
###### 相关资源
- [新型冠状病毒症状自我评估工具](https://ca.thrive.health/covid19/en)
- [Quebec Vaccination Campaign – Appointments](https://www.quebec.ca/en/health/health-issues/a-z/2019-coronavirus/progress-of-the-covid-19-vaccination/)
- [蒙特利尔市公共卫生部门](https://santemontreal.qc.ca/en/public/coronavirus-covid-19/)
- [公共卫生专业知识和参考资料(法语)](https://www.inspq.qc.ca/covid-19/donnees)
- [魁北克省冠状病毒(COVID-19)相关资源](https://www.quebec.ca/en/health/health-issues/a-z/2019-coronavirus/situation-coronavirus-in-quebec/)
- [加拿大冠状病毒相关资源](https://www.canada.ca/en/public-health/services/diseases/coronavirus-disease-covid-19.html)
如果您对新型冠状病毒(COVID19)有所担心或疑问,或者出现咳嗽/发烧等症状,可拨打蒙特利尔地区的免费电话(514) 644-454-545,魁北克市地区的免费电话(418) 644-454545,或魁北克其他地区的免费电话(877) 644-4545。
""",
'montreal_map_colourbar_labels': {
'date': '日期',
'borough': '区/市',
'7day_incidence_rate': '7日发病率',
'new_cases': '新增确诊',
'cases': '累计确诊',
'7day_incidence_per100k': '7日发病率/10万',
'7day_incidence': '7日发病率',
},
'montreal_map_legend_title': '<b>7日发病率/10万</b>',
'montreal_map_hovertemplate': '<br>区/市: %{location}<br>7日发病率/10万: %{z}',
'7day_avg_short': '7-day mov avg',
'7day_avg_qc_label': '7-day moving avg (QC)',
'7day_avg_mtl_label': '7-day moving avg (MTL)',
# confirmed cases fig
'confirmed_cases_y_label': 'New cases',
'confirmed_cases_y2_label': 'Active cases',
'active_cases_qc_label': 'Active cases (QC)',
'new_cases_qc_label': 'New cases (QC)',
'new_cases_mtl_label': 'New cases (MTL)',
# age groups
'age_total_label': '各年龄组总病例分布情况',
'age_per100000_label': '每10万人口不同年龄组病例分布情况',
'age_fig_hovertemplate': '%: %{y}',
# deaths fig
'deaths_fig_label': '死亡',
'deaths_qc_y_label': 'New deaths',
'deaths_qc_y2_label': '新增死亡 (7日移动平均)',
'new_deaths_qc_label': 'New deaths (QC)',
'new_deaths_mtl_label': 'New deaths (MTL)',
# hospitalisations fig
'hospitalisations_label': '入院人数',
'hospitalisations_y_label': '现入院人数',
'hospitalisations_y2_label': '新增入院 (7日移动平均)',
'intensive_care_qc': '新增重症患者 (魁省)',
'intensive_care_mtl': '新增重症患者 (蒙特利尔)',
'hospitalisations_qc': '新增入院 (魁省)',
'hospitalisations_active_qc': '现入院人数 (魁省)',
'intensive_care_active_qc': '现重症患者 (魁省)',
'hospitalisations_mtl': '新增入院 (蒙特利尔)',
# Test positivity fig
'testing_label': 'Test positivity rate',
'testing_y_label': 'Test positivity rate',
'testing_y2_label': 'Tests performed',
'testing_tests_qc': 'Tests performed (QC)',
'testing_tests_mtl': 'Tests performed (MTL)',
'testing_hovertemplate_qc': '<b>Quebec</b><br>7-day moving avg: %{y:,.2f}%<br>Test positivity: %{customdata:,.2f}%',
'testing_hovertemplate_mtl': '<b>Montreal</b><br>7-day moving avg: %{y:,.2f}%<br>Test positivity: %{customdata:,.2f}%',
#
'date_slider_label': '日期: ',
'date_label': '日期',
'age_label': '年龄',
'week_label': 'Week',
'linear_label': '线性尺度',
'log_label': '对数尺度',
# Confirmed deaths by place of residence (MTL) fig
'deaths_loc_fig_mtl_label': '按居住地分类死亡人数 (蒙特利尔)',
'deaths_loc_fig_mtl_pie_labels': [
'医院',
'公立长期护理机构',
'家',
'中间',
'私人养老院',
'其他',
'未知'
],
# Confirmed deaths by place of residence (QC) fig
'deaths_loc_fig_qc_label': '按居住地分类死亡人数 (魁省)',
'chsld_label': '公立长期护理机构',
'psr_label': '私人养老院',
'home_label': '家',
'other_or_unknown_label': '其他或未知',
'deaths_loc_fig_qc_y_label': '累计死亡 (魁省)',
# Cases vs New Cases fig
'cases_vs_newcases_label': '新病例与累计确诊病例对比',
'cases_vs_newcases_xlabel': '累计确诊病例 (对数比例)',
'cases_vs_newcases_ylabel': '新增病例 (对数比例)',
'cases_vs_newcases_legend_mtl': '蒙特利尔',
'cases_vs_newcases_legend_qc': '魁省',
'cases_vs_newcases_hovertemplate': '日期: %{customdata} <br> 新增病例: %{y}',
# Vaccination_fig
'vaccination_label': '疫苗接种量', # TODO: add 'Progress'
'vaccination_y': 'Doses (cumulative)',
'vaccination_new': '新增接种量',
'vaccination_total': 'Doses administered',
'vaccination_total_2d': 'Doses administered (2nd dose)',
'vaccination_perc': '% of pop received at least 1 dose',
'vaccination_perc_2d': '% of pop received 2 doses',
'vaccination_total_mtl': 'Doses administered (MTL)',
'vaccination_perc_mtl': '接种人口百分比 (蒙特利尔)',
'vaccination_perc_qc': '接种人口百分比 (魁省)',
'vaccination_hovertemplate': '接种人数: %{y:,d}<br>Doses available: %{customdata[0]:,d}<br>% of pop received 1 dose: %{customdata[1]:.2f}%',
'vaccination_hovertemplate_mtl': '接种人数: %{y:,d}<br>% of pop received 1 dose: %{customdata[0]:.2f}%',
'vaccination_administered_hovertemplate': 'Doses administered: %{y:,d}<br>Doses available: %{customdata[0]:,d}',
'vaccination_new_mtl': '新增接种量 (蒙特利尔)',
'vaccination_new_qc': '新增接种量 (魁省)',
# Vaccination administered fig
'vaccination_administered_label': 'New doses administered',
'vaccination_new_y': 'New doses (7-day moving average)',
'vaccination_new_1d': 'New 1st doses',
'vaccination_new_2d': 'New 2nd doses',
# Vaccine delivery fig
'vaccine_delivery_label': 'Vaccine doses delivered vs. administered',
'vaccine_received': 'Doses received',
'vaccine_administered': 'Doses administered',
'vaccine_available': 'Doses available',
'vaccine_received_hovertemplate': 'Doses received: %{y:,d}<br>New doses received: %{customdata:,d}',
# Vaccination_age_fig
'vaccination_age_label': 'Vaccination by age group',
'vaccination_categories': ['Not vaccinated', '1 dose received', 'Fully vaccinated'],
# Variants fig
'variants_label': 'Progression of new variants of concern (VOC)',
'variants_sequenced': 'Sequenced cases',
'variants_presumptive': 'Presumptive cases',
'variants_new_presumptive': 'New presumptive cases',
'variants_new_sequenced': 'New sequenced cases',
'variants_new_cases': 'Total new cases',
'variants_pos_rate': 'Percent positivity',
'variants_pos_rate_avg': 'Percent positivity (7-day mov avg)',
'variants_screened': 'Screened samples',
'variants_y2': 'Cases (cumulative)',
'variants_y3': 'Percent Positivity',
# Range sliders
'14d': '14d',
'1m': '1m',
'3m': '3m',
'6m': '6m',
'ytd': 'YTD',
'1y': '1y',
'all': 'all'
}
layout = build_home_layout(labels)
layout_vaccination = build_vaccination_layout(labels)
|
import unittest
import sys
from lib.geocoding import geocoder
class TestGeocoding(unittest.TestCase):
def test_get_place(self):
def assertLatLng(place):
self.assertEqual(place.lat, 37.4418834)
self.assertEqual(place.lng, -122.1430195)
assertLatLng(geocoder.get_place('palo alto'))
# using cache:
assertLatLng(geocoder.get_place('palo alto'))
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/python
# Script to insert image tag or version in charts before builds
# Needs version_slice file for processing
import glob
import json
import argparse
RFW_THIS_LINE_FLAG = "# rfw-update-this"
RFW_NEXT_LINE_FLAG = "# rfw-update-next"
DEFAULT_ARTMAP_KEY = "version"
# extra artmap key set to version if it differs from component version, otherwise - empty string
NON_COMPON_VERSION = "non-component-version"
artmap = dict()
def update_line(line, marker_text):
idx = None
# determine idx (index string to artmap key)
if marker_text:
(_, idx) = marker_text.split(RFW_NEXT_LINE_FLAG, 1)
else:
(_, idx) = line.split(RFW_THIS_LINE_FLAG, 1)
# split incoming line into key, value and comment that should be saved
left, right = line.split(":", 1)
rparts = right.split("#", 1)
# index can point to subkey, if not assume 'version'. then get replacement value
artid, akey = idx.split(',') if ',' in idx else (idx, DEFAULT_ARTMAP_KEY)
new_val = artmap[artid.strip()][akey.strip()]
# restore comment part if it was present
rcomment = " #" + rparts[1] if len(rparts) > 1 else '\n'
return "{}:{}{}{}".format(left, " "*bool(new_val), new_val, rcomment)
def load_artifacts(input_file):
with open(input_file, "r+") as ijs:
slice = json.load(ijs)
for (comp, version) in slice.get('resolvedComponentVersions').items():
compid = "*comp*" + comp
artmap[compid] = {DEFAULT_ARTMAP_KEY: version}
# to identify NON_COMPON_VERSION flag, count version from resolvedComponentVersions section,
# not from artifact itself (not componentVersion) - this is required for hotfix processing
for art in slice.get('resolvedArtifacts'):
if 'artifactId' in art:
comp_mod_name = "*comp*" + art['componentName']
art[NON_COMPON_VERSION] = art['version'] if artmap[comp_mod_name][DEFAULT_ARTMAP_KEY] != art['version'] else ''
artmap[art['artifactId']] = art
def process_yaml(input_yaml, return_modified=False):
with open(input_yaml, "r+") as iym:
ylines = iym.readlines()
oylines = ylines[:]
proc_next = None
# process line by line and support update of 'next' line
for yi, tx in enumerate(ylines):
if RFW_THIS_LINE_FLAG in tx or proc_next:
ylines[yi] = update_line(tx, proc_next)
proc_next = None
elif RFW_NEXT_LINE_FLAG in tx:
proc_next = tx
if return_modified:
return "".join(ylines)
if oylines != ylines:
open(input_yaml,'w').write("".join(ylines))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-vs", "--version-slice", help="path to version_slice file", required=True)
parser.add_argument("-wd", "--working-dir", help="path to directory, default is ./", default='.')
parser.add_argument("-dry", "--dry-run", help="don't change files, just try to apply versions", action='store_true')
args = parser.parse_args()
load_artifacts(args.version_slice)
for fl in glob.glob(args.working_dir + '/*/*.yaml'):
process_yaml(fl, args.dry_run)
|
#!/usr/bin/env python3
#
# Copyright 2021 Miklos Vajna. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
"""The cache module accelerates some functions of the areas module."""
import rust
def is_missing_housenumbers_html_cached(ctx: rust.PyContext, relation: rust.PyRelation) -> bool:
"""Decides if we have an up to date HTML cache entry or not."""
return rust.py_is_missing_housenumbers_html_cached(ctx, relation)
def get_missing_housenumbers_html(ctx: rust.PyContext, relation: rust.PyRelation) -> rust.PyDoc:
"""Gets the cached HTML of the missing housenumbers for a relation."""
return rust.py_get_missing_housenumbers_html(ctx, relation)
def get_additional_housenumbers_html(ctx: rust.PyContext, relation: rust.PyRelation) -> rust.PyDoc:
"""Gets the cached HTML of the additional housenumbers for a relation."""
return rust.py_get_additional_housenumbers_html(ctx, relation)
def is_missing_housenumbers_txt_cached(ctx: rust.PyContext, relation: rust.PyRelation) -> bool:
"""Decides if we have an up to date plain text cache entry or not."""
return rust.py_is_missing_housenumbers_txt_cached(ctx, relation)
def get_missing_housenumbers_txt(ctx: rust.PyContext, relation: rust.PyRelation) -> str:
"""Gets the cached plain text of the missing housenumbers for a relation."""
return rust.py_get_missing_housenumbers_txt(ctx, relation)
# vim:set shiftwidth=4 softtabstop=4 expandtab:
|
import math
import mindspore.nn as nn
import mindspore.ops as ops
from mindspore.ops import constexpr
from mindspore.common.initializer import initializer, Normal, Uniform, HeUniform, _calculate_fan_in_and_fan_out
@constexpr
def compute_kernel_size(inp_shape, output_size):
kernel_width, kernel_height = inp_shape[2], inp_shape[3]
if isinstance(output_size, int):
kernel_width = math.ceil(kernel_width / output_size)
kernel_height = math.ceil(kernel_height / output_size)
elif isinstance(output_size, list) or isinstance(output_size, tuple):
kernel_width = math.ceil(kernel_width / output_size[0])
kernel_height = math.ceil(kernel_height / output_size[1])
return (kernel_width, kernel_height)
class AdaptiveMaxPool2d(nn.Cell):
def __init__(self, output_size):
super().__init__()
self.output_size = output_size
def construct(self, x):
inp_shape = x.shape
kernel_size = compute_kernel_size(inp_shape, self.output_size)
return ops.MaxPool(kernel_size, kernel_size)(x)
class AdaptiveAvgPool2d(nn.Cell):
def __init__(self, output_size):
super().__init__()
self.output_size = output_size
def construct(self, x):
inp_shape = x.shape
kernel_size = compute_kernel_size(inp_shape, self.output_size)
return ops.AvgPool(kernel_size, kernel_size)(x)
class MaxPool2d(nn.Cell):
def __init__(self, kernel_size, stride=None, padding=0):
super().__init__()
if stride is None:
stride = kernel_size
self.max_pool = ops.MaxPool(kernel_size, stride)
self.use_pad = padding != 0
if isinstance(padding, tuple):
assert len(padding) == 2
paddings = ((0, 0), (0, 0), (padding[0], padding[0]), (padding[1], padding[1]))
elif isinstance(padding, int):
paddings = ((0, 0),) * 2 + ((padding, padding),) * 2
else:
raise ValueError('padding should be a tuple include 2 numbers or a int number')
self.pad = ops.Pad(paddings)
def construct(self, x):
if self.use_pad:
x = self.pad(x)
return self.max_pool(x)
class Dense(nn.Dense):
def __init__(self, in_channels, out_channels, weight_init=None, bias_init=None, has_bias=True, activation=None):
if weight_init is None:
weight_init = initializer(HeUniform(math.sqrt(5)), (out_channels, in_channels))
if bias_init is None:
fan_in, _ = _calculate_fan_in_and_fan_out((out_channels, in_channels))
bound = 1 / math.sqrt(fan_in)
bias_init = initializer(Uniform(bound), (out_channels))
super().__init__(in_channels, out_channels, weight_init=weight_init, bias_init=bias_init, has_bias=has_bias, activation=activation)
class CrossEntropyLoss(nn.Cell):
reduction_list = ['sum', 'mean', 'none']
def __init__(self, weight=None, ignore_index:int=-100, reduction:str='mean', label_smoothing:float=0.0):
super().__init__()
if label_smoothing > 1.0 or label_smoothing < 0.0:
raise ValueError(f'label_smoothing value must in range [0.0, 1.0], '
f'but get {label_smoothing}')
if reduction not in self.reduction_list:
raise ValueError(f'Unsupported reduction {reduction}')
self.weight = weight
self.ignore_index = ignore_index
self.reduction = reduction
self.label_smoothing = label_smoothing
def construct(self, input, target):
return cross_entropy(input, target, self.weight, self.ignore_index, self.reduction, self.label_smoothing)
# def log_softmax(x, axis=-1):
# x_max = x.max()
# return x - x_max - ops.log(ops.ReduceSum(True)(ops.exp(x - x_max), axis))
def log_softmax(x, axis=-1):
return ops.LogSoftmax(axis)(x)
def cross_entropy(input, target, weight=None, ignore_index=-100, reduction='mean', label_smoothing=0.0):
if input.size == target.size:
return _cross_entropy(input, target, weight, reduction, label_smoothing)
return nll_loss(log_softmax(input, 1), target, weight, ignore_index, reduction, label_smoothing)
def _cross_entropy(input, target, weight=None, reduction='mean', label_smoothing=0.0):
class_dim = 0 if input.ndim == 1 else 1
n_classes = input.shape[class_dim]
input = log_softmax(input, class_dim)
if label_smoothing > 0.0:
target = target * (1 - label_smoothing) + label_smoothing / n_classes
if weight is None:
weight = ops.ones_like(input)
if reduction == 'mean':
return -(input * target * weight).sum() / (input.size / n_classes)
elif reduction == 'sum':
return -(input * target * weight).sum()
else:
return -(input * target * weight).sum(class_dim)
def nll_loss(input, target, weight=None, ignore_index=None, reduction='mean', label_smoothing=0.0):
ndim = input.ndim
if ndim == 2:
ret = _nll_loss(input, target, -1, weight, ignore_index, reduction, label_smoothing)
elif input.ndim == 4:
ret = _nll_loss(input, target, 1, weight, ignore_index, reduction, label_smoothing)
else:
# ndim == 3 or ndim > 4
n = input.shape[0]
c = input.shape[1]
out_size = (n,) + input.shape[2:]
input = input.view(n, c, 1, -1)
target = target.view(n, 1, -1)
if reduction != 'none':
ret = _nll_loss(input, target, 1, weight, ignore_index, reduction, label_smoothing)
else:
ret = _nll_loss(input, target, 1, weight, ignore_index, label_smoothing=label_smoothing)
ret = ret.view(out_size)
return ret
def _nll_loss(input, target, target_dim=-1, weight=None, ignore_index=None, reduction='none', label_smoothing=0.0):
if target.ndim == input.ndim - 1:
target = target.expand_dims(target_dim)
nll_loss = -ops.gather_d(input, target_dim, target)
smooth_loss = -input.sum(axis=target_dim, keepdims=True)
if weight is not None:
loss_weights = ops.gather(weight, target, 0)
nll_loss = nll_loss * loss_weights
else:
loss_weights = ops.ones_like(nll_loss)
if ignore_index is not None:
non_pad_mask = ops.equal(target, ignore_index)
nll_loss = nll_loss.masked_fill(non_pad_mask, 0.)
loss_weights = loss_weights.masked_fill(non_pad_mask, 0.)
smooth_loss = smooth_loss.masked_fill(non_pad_mask, 0.)
nll_loss = nll_loss.squeeze(target_dim)
smooth_loss = smooth_loss.squeeze(target_dim)
if reduction == 'sum':
nll_loss = nll_loss.sum()
smooth_loss = smooth_loss.sum()
if reduction == 'mean':
nll_loss = nll_loss.sum() / loss_weights.sum()
smooth_loss = smooth_loss.mean()
eps_i = label_smoothing / input.shape[target_dim]
loss = (1. - label_smoothing) * nll_loss + eps_i * smooth_loss
return loss
|
def power(base, exponent):
result = base ** exponent
print "%d to the power of %d is %d." % (base, exponent, result)
power(37, 4)
|
"""hhnk URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
import iom.urls
from .views import get_waarnemers, get_meetpunten, get_waarnemingen, HomeView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^home$',HomeView.as_view(),name='home'),
url(r'^get/waarnemers', get_waarnemers),
url(r'^get/meetpunten', get_meetpunten),
url(r'^get/waarnemingen', get_waarnemingen),
]
# fallback to inlaat op maat default urls
urlpatterns += iom.urls.urlpatterns
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.