content stringlengths 5 1.05M |
|---|
from django.db import models
class Rating(models.Model):
full_name = models.CharField(max_length=255)
term = models.PositiveSmallIntegerField()
volume = models.PositiveIntegerField()
risk_level = models.PositiveSmallIntegerField()
credit_history = models.BooleanField()
wages = models.PositiveIntegerField()
rating = models.DecimalField(max_digits=19, decimal_places=2, blank=True, null=True)
def __str__(self):
return self.full_name |
# encoding: utf-8
import os, re, requests
import lxml.html
import workon.utils
__all__ = ["opengraph"]
def opengraph(url, *args, **kwargs):
metadata = {}
if not url:
return metadata
url = workon.utils.append_protocol(url)
try:
r = requests.get(url, *args, **kwargs)
except requests.ConnectionError:
return metadata
base_url = "/".join(r.url.split('/')[0:3]).strip('/')
content = ""
head = ""
i = 0
s = -1
e = -1
for chunk in r.iter_content(chunk_size=512):
if chunk:
content += chunk
if s != -1:
e = chunk.find('</head>')
if e != -1:
e += i
head = content[s:e+7]
break
else:
s = chunk.find('<head')
if s != -1:
s += i
i += len(chunk)
if head:
parser = lxml.html.HTMLParser(encoding=r.encoding)
html = lxml.html.document_fromstring(head, parser=parser)
# html = lxml.html.fromstring(head)
metadata['url'] = url
metadata['base_url'] = base_url
metadata['title'] = html.xpath('//meta[@property="og:title"]/@content')
metadata['title'] += html.xpath('//title/text()')
metadata['site_name'] = html.xpath('//meta[@property="og:site_name"]/@content')
metadata['site_name'] += [base_url]
metadata['icon'] = html.xpath('//link[@rel="icon"]/@href')
metadata['icon'] += html.xpath('//link[@rel="shortcut icon"]/@href')
metadata['icon'] += html.xpath('//link[@rel="favicon"]/@href')
for i, icon in enumerate(metadata['icon']):
icon = icon.strip()
if icon:
if icon.startswith('//'):
metadata['icon'][i] = "%s/%s" % ("https:", icon)
elif icon.startswith('/'):
metadata['icon'][i] = "%s/%s" % (base_url, icon.lstrip('/'))
if not metadata['icon']:
default_favicon = base_url +'/favicon.ico'
if requests.head(default_favicon).status_code == 200:
metadata['icon'] += [default_favicon]
metadata['keywords'] = html.xpath('//meta[@property="og:keywords"]/@content')
metadata['keywords'] += html.xpath('//meta[@name="keywords"]/@content')
metadata['keywords'] += html.xpath('//meta[@name="Keywords"]/@content')
metadata['description'] = html.xpath('//meta[@property="og:description"]/@content')
metadata['description'] += html.xpath('//meta[@name="description"]/@content')
metadata['description'] += html.xpath('//meta[@name="Description"]/@content')
metadata['image'] = html.xpath('//meta[@property="og:image"]/@content')
metadata['image'] += html.xpath('//link[@rel="image_src"]/@href')
for i, img in enumerate(metadata['image']):
img = img.strip()
if img:
if img.startswith('//'):
metadata['image'][i] = "%s/%s" % ("https:", img)
elif img.startswith('/'):
metadata['image'][i] = "%s/%s" % (base_url, img.lstrip('/'))
return metadata |
import torch
import torchvision
import torchvision.transforms as transforms
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import torch.optim as optim
import matplotlib.pyplot as plt
import numpy as np
import serial
import numpy as np
import csv
import time
import os
def arrangeTo2D(arr):
step=3
arr=[arr[i:i+step] for i in range(0,len(arr),step)]
return arr
# delete all number which absolute value smaller than 100
def subtractAverageRemoveNoiseGetSum(arr):
sum = 0
for i in range(len(arr)):
# we found out that output generally go above 100 while it's working
if abs(arr[i]-ave_arr[i]) < 100:
arr[i] = 0
else:
arr[i] = arr[i]-ave_arr[i]
sum += abs(arr[i])
return arr,sum
# convert array to Int
def convertToInt(arr):
for i in range(len(arr)):
if arr[i].isnumeric():
arr[i] = int(arr[i])
else:
return [0]
return arr
def loadtraindata():
path = r"C:\\Users\\ytjun\\Desktop\\kubo\\kubo\\train" # 路径
trainset = torchvision.datasets.ImageFolder(path,
transform=transforms.Compose([
transforms.Resize((160, 160)), # 将图片缩放到指定大小(h,w)或者保持长宽比并缩放最短的边到int大小
transforms.CenterCrop(160),
transforms.ToTensor()])
)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=1,
shuffle=True, num_workers=2)
return trainloader
class Net(nn.Module): # 定义网络,继承torch.nn.Module
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(3, 6, 5) # 卷积层
self.pool = nn.MaxPool2d(2, 2) # 池化层
self.conv2 = nn.Conv2d(6, 16, 5) # 卷积层
self.fc1 = nn.Linear(21904, 120) # 全连接层
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 3
) # 10个输出
def forward(self, x): # 前向传播
x = self.pool(F.relu(self.conv1(x))) # F就是torch.nn.functional
x = self.pool(F.relu(self.conv2(x)))
#x = x.view(-1, 16 * 5 * 5) # .view( )是一个tensor的方法,使得tensor改变size但是元素的总数是不变的。
x = x.view(-1, 21904) # .view( )是一个tensor的方法,使得tensor改变size但是元素的总数是不变的。
# 从卷基层到全连接层的维度转换
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
classes = ('0','1')
def loadtestdata():
ser = serial.Serial('COM4', 921600, timeout=None,
bytesize=serial.EIGHTBITS, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE)
# init a zerofilled array to record average array
ave_arr = [0] * 48
count = 0
file_count = 0
char = 'w'
path_name = 'test1'
# path_name = char + str(int(time.time()))
if not os.path.exists('data/' + path_name):
os.mkdir('data/' + path_name)
temp_data = []
for i in range(2):
for line in ser.read():
raw_data = ser.readline()
try:
raw_data = str(raw_data, 'utf-8')
except UnicodeDecodeError:
pass
else:
raw_data = raw_data.strip('AB\n')
arr = raw_data.split(',')
arr = convertToInt(arr)
if len(arr) == 48:
if count < 100:
ave_arr = list(np.array(ave_arr) + np.array(arr))
elif count == 100:
ave_arr = list(np.array(ave_arr) / count)
print('start')
else:
# since it output really fast, I concerned to arrange them by looping only once
arr, sum = subtractAverageRemoveNoiseGetSum(arr)
# arr = arrangeTo2D(arr)
if sum > 200:
temp_data.append(arr)
print(len(temp_data))
# I tested the code and found out that even a gentle touch would generate more than 100 rows
elif len(temp_data) < 80:
if len(temp_data) > 0:
print('\n\n\n\n\n\n\n\nhold on, your hand blured! Start over\n\n\n\n\n\n\n\n')
time.sleep(2)
print('OK GO')
temp_data = []
else:
f = open('data/' + path_name + '/' + str(file_count) + '.csv', 'w')
f_csv = csv.writer(f)
f_csv.writerows(temp_data)
f.close()
temp_data = []
print('\n\n\n\n\n\n\n\n' + str(file_count) + '.csv\n\n\n\n\n\n\n\n')
file_count += 1
time.sleep(1)
print('OK GO')
count += 1
ser.close()
path = r"C:\\Users\\ytjun\\Desktop\\kubo\\kubo\\test1"
testset = torchvision.datasets.ImageFolder(path,
transform=transforms.Compose([
transforms.Resize((160, 160)), # 将图片缩放到指定大小(h,w)或者保持长宽比并缩放最短的边到int大小
transforms.ToTensor()])
)
testloader = torch.utils.data.DataLoader(testset, batch_size=25,
shuffle=True, num_workers=2)
return testloader
# def loadtestdata():
# path = r"C:\\Users\\ytjun\\Desktop\\kubo\\kubo\\test"
# testset = torchvision.datasets.ImageFolder(path,
# transform=transforms.Compose([
# transforms.Resize((160, 160)), # 将图片缩放到指定大小(h,w)或者保持长宽比并缩放最短的边到int大小
# transforms.ToTensor()])
# )
# testloader = torch.utils.data.DataLoader(testset, batch_size=25,
# shuffle=True, num_workers=2)
# return testloader
def trainandsave():
trainloader = loadtraindata()
# 神经网络结构
net = Net()
optimizer = optim.SGD(net.parameters(), lr=0.001, momentum=0.9) # 学习率为0.001
criterion = nn.CrossEntropyLoss() # 损失函数也可以自己定义,我们这里用的交叉熵损失函数
# 训练部分
for epoch in range(5): # 训练的数据量为5个epoch,每个epoch为一个循环
# 每个epoch要训练所有的图片,每训练完成200张便打印一下训练的效果(loss值)
running_loss = 0.0 # 定义一个变量方便我们对loss进行输出
for i, data in enumerate(trainloader, 0): # 这里我们遇到了第一步中出现的trailoader,代码传入数据
# enumerate是python的内置函数,既获得索引也获得数据
# get the inputs
inputs, labels = data # data是从enumerate返回的data,包含数据和标签信息,分别赋值给inputs和labels
# wrap them in Variable
inputs, labels = Variable(inputs), Variable(labels) # 转换数据格式用Variable
optimizer.zero_grad() # 梯度置零,因为反向传播过程中梯度会累加上一次循环的梯度
# forward + backward + optimize
outputs = net(inputs) # 把数据输进CNN网络net
loss = criterion(outputs, labels) # 计算损失值
loss.backward() # loss反向传播
optimizer.step() # 反向传播后参数更新
running_loss += loss.item() # loss累加
if i % 200 == 199:
print('[%d, %5d] loss: %.3f' %
(epoch + 1, i + 1, running_loss / 200)) # 然后再除以200,就得到这两百次的平均损失值
running_loss = 0.0 # 这一个200次结束后,就把running_loss归零,下一个200次继续使用
print('Finished Training')
# 保存神经网络
torch.save(net, 'net.pkl') # 保存整个神经网络的结构和模型参数
torch.save(net.state_dict(), 'net_params.pkl') # 只保存神经网络的模型参数
def reload_net():
trainednet = torch.load('net.pkl')
return trainednet
def imshow(img):
img = img / 2 + 0.5 # unnormalize
npimg = img.numpy()
plt.imshow(np.transpose(npimg, (1, 2, 0)))
plt.show()
def test():
testloader = loadtestdata()
net = reload_net()
dataiter = iter(testloader)
images, labels = dataiter.next() #
imshow(torchvision.utils.make_grid(images, nrow=5)) # nrow是每行显示的图片数量,缺省值为8
print('GroundTruth: '
, " ".join('%5s' % classes[labels[j]] for j in range(25))) # 打印前25个GT(test集里图片的标签)
outputs = net(Variable(images))
_, predicted = torch.max(outputs.data, 1)
print('Predicted: ', " ".join('%5s' % classes[predicted[j]] for j in range(25)))
# 打印前25个预测值
return predicted
if __name__ == '__main__':
classes = ('0', '1','2','3','4','5','6','7','8','9')
#trainandsave()
test()
# if classes[a[0]] == '1':
# print("1")
# if classes[a[0]] == '0':
# print("0") |
import json
from pathlib import Path
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from opal_common.authentication.signer import JWTSigner
class JwksStaticEndpoint:
"""
configure a static files endpoint on a fastapi app, exposing JWKs.
"""
def __init__(
self,
signer: JWTSigner,
jwks_url: str,
jwks_static_dir: str,
):
self._signer = signer
self._jwks_url = Path(jwks_url)
self._jwks_static_dir = Path(jwks_static_dir)
def configure_app(self, app: FastAPI):
# create the directory in which the jwks.json file should sit
self._jwks_static_dir.mkdir(parents=True, exist_ok=True)
# get the jwks contents from the signer
jwks_contents = {}
if self._signer.enabled:
jwk = json.loads(self._signer.get_jwk())
jwks_contents = {
"keys": [jwk]
}
# write the jwks.json file
filename = self._jwks_static_dir / self._jwks_url.name
with open(filename, "w") as f:
f.write(json.dumps(jwks_contents))
route_url = str(self._jwks_url.parent)
app.mount(route_url, StaticFiles(directory=str(self._jwks_static_dir)), name="jwks_dir") |
Inventory = {'tomatoes' : 5}
print(Inventory)
user_food = input("What food do you want to update?")
if user_food in Inventory :
user_operation = input("Do you want to 'add' or 'remove' %s from Inventory?" % user_food)
if user_operation == "add":
user_quantity = input("How many do you want to %s?" % user_operation)
Inventory[user_food] += int(user_quantity)
elif user_operation == "remove" :
user_quantity = input("How many do you want to %s?" % user_operation)
Inventory[user_food] -= int(user_quantity)
else:
print("Please type 'add' or 'remove' to update the quantity of %s in Inventory" % user_food)
else :
print("Sorry, there are no %s in Inventory" % user_food)
print(Inventory)
|
# Generated by Django 2.2.10 on 2020-03-04 11:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0003_alter_agent_name'),
]
operations = [
migrations.AlterField(
model_name='agent',
name='based_near',
field=models.ManyToManyField(blank=True, help_text='A location that something is based near, for some broadly human notion of near.', related_name='agents', to='geonames_place.Place'),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-02-24 21:38
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('rentals', '0002_auto_20180224_2031'),
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('address_1', models.CharField(max_length=128)),
('address_2', models.CharField(blank=True, max_length=128)),
('city', models.CharField(max_length=64)),
('state', models.CharField(max_length=15)),
('zip_code', models.CharField(max_length=5)),
('person', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rentals.Person')),
],
),
]
|
import json
import datetime
from service import db
from service.model.user import User
from service.tests.base import BaseTestCase
from service.tests.utils import add_user, id_generator, add_random_user
class TestUserServiceGET(BaseTestCase):
''' Tests for GET on Users Service. '''
def test_get_pong(self):
''' Ensure the availability check is working. '''
response, data = self.send_get("/ping")
self.assertEqual(response.status_code, 200)
self.assertIn('pong!', data['message'])
self.assertIn('success', data['status'])
def test_get_user_own(self):
''' Ensure the users can retrieve their own information. '''
user, auth = self.get_user_and_auth(permission="user")
response, data = self.send_get("/users/" + str(user.id), headers=auth)
self.assertEqual(response.status_code, 200)
self.assertIn('User found.', data['message'])
self.assertIn('success', data['status'])
self.assertIn(user.username, data['data']['username'])
self.assertIn(user.email, data['data']['email'])
self.assertEqual(user.admin, data['data']['admin'])
self.assertTrue('created_at' in data['data'])
def test_get_user_other(self):
''' Ensure the users can bot retrieve informations of other users. '''
new_user = add_random_user()
user, auth = self.get_user_and_auth(permission="user")
response, data = self.send_get("/users/" + str(new_user.id), headers=auth)
self.assertEqual(response.status_code, 401)
self.assertIn("You don't have the necessary permissions.", data["message"])
self.assertIn('error', data['status'])
def test_admin_get_user(self):
''' Ensure admin can access details of users. '''
user = add_random_user()
_, auth = self.get_user_and_auth(permission="admin")
response, data = self.send_get("/users/" + str(user.id), headers=auth)
self.assertEqual(response.status_code, 200)
self.assertIn('User found.', data['message'])
self.assertIn('success', data['status'])
self.assertIn(user.username, data['data']['username'])
self.assertIn(user.email, data['data']['email'])
self.assertEqual(user.admin, data['data']['admin'])
self.assertTrue('created_at' in data['data'])
def test_get_user_invalid_id(self):
''' Ensure error is thrown if an id is not provided. '''
_, auth = self.get_user_and_auth(permission="admin")
name_gen = id_generator()
response, data = self.send_get("/users/" + name_gen, headers=auth)
self.assertEqual(response.status_code, 404)
self.assertIn('User does not exist.', data['message'])
self.assertIn('not found', data['status'])
def test_get_user_id_not_exists(self):
''' Ensure error is thrown if the id does not exist. '''
_, auth = self.get_user_and_auth(permission="admin")
response, data = self.send_get("/users/999999999999999999", headers=auth)
self.assertEqual(response.status_code, 404)
self.assertIn('User does not exist.', data['message'])
self.assertIn('not found', data['status'])
def test_get_users_no_admin(self):
''' Ensure get users behaves correctly. '''
_, auth = self.get_user_and_auth(permission="user")
response, data = self.send_get("/users", headers=auth)
self.assertEqual(response.status_code, 401)
self.assertIn("You don't have the necessary permissions.", data['message'])
self.assertIn('error', data['status'])
def test_get_users_admin(self):
''' Ensure get users behaves correctly. '''
_, auth = self.get_user_and_auth(permission="admin")
created = datetime.datetime.utcnow() + datetime.timedelta(-30)
user_1 = add_random_user(created_at=created)
user_2 = add_random_user()
response, data = self.send_get("/users", headers=auth)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(data['data']['users']), 3)
self.assertIn('success', data['status'])
self.assertIn(user_1.username, data['data']['users'][2]['username'])
self.assertIn(user_2.username, data['data']['users'][1]['username'])
self.assertIn(user_1.email, data['data']['users'][2]['email'])
self.assertIn(user_2.email, data['data']['users'][1]['email'])
self.assertEqual(user_1.admin, data['data']['users'][2]['admin'])
self.assertEqual(user_2.admin, data['data']['users'][1]['admin'])
self.assertTrue('created_at' in data['data']['users'][2])
self.assertTrue('created_at' in data['data']['users'][1])
|
#!/usr/bin/python3
"""an instance of the specified class"""
def is_same_class(obj, a_class):
"""instance of theclass"""
return True if type(obj) == a_class else False
|
__title__ = 'pyfcm'
__summary__ = 'Python client for FCM - Firebase Cloud Messaging (Android, iOS and Web)'
__url__ = 'https://github.com/olucurious/pyfcm'
__version__ = '1.5.2'
__author__ = 'Emmanuel Adegbite'
__email__ = 'olucurious@gmail.com'
__license__ = 'MIT License'
|
#! /usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
# Written by Vasaant S/O Krishnan on Tuesday, 06 March 2018
# 03-dirac-vis.py "dirac visibility" plots aim to replicates figures from
# page 33 of "00 Fundamentals of Radio Interferometry 1 - Perley.pdf"
# https://science.nrao.edu/science/meetings/2015/summer-schools/interferometry-program
# Which is the integrated source visibility, V(u) represented as a
# complex number, when I(l) is a dirac delta function.
#
# As of Tuesday, 06 March 2018, 16:12 pm I have not been able to
# perfectly replicate the images in the slides. And do not have as
# much of an "intuitive" feel for the function V(u) as I would like.
#
# As of Wednesday, 01 August 2018, 14:41 pm I've managed to replicate
# the slide images in a way which makes sense. It looks like "i/np.pi"
# in the Real and Imag components is needed to represent the baselines
# in units of radians of pi. And do not have as much of an "intuitive"
# feel for the function V(u) as I would like.
#
# As of Thursday, 11 April 2019, 13:07 PM I've made changes here which
# are based on 04-box-vis.py. Previously, in 04-box-vis.py, I had been
# having difficulty in getting the sine component of V(u) to replicate
# what I found in in page 34 of "00 Fundamentals of Radio
# Interferometry 1 - Perley.pdf" by using np.angle to automatically
# compute the phase angle. However, I was able to replicate the phases
# for 04-box-vis.py by manually computing amp = sqrt(cos**2 + sin**2)
# and phase = arctan(sin/cos). From my understanding, the former is
# the more robust (and correct?) method, though I will stick with the
# latter for now.
#
# As of Saturday, 18 May 2019, 06:36 am I have changed
# cosr = [np.cos(2 * np.pi * k/np.pi * l) for k in u] # Real component
# to
# cosr = [np.cos(2 * np.pi * k * l/np.pi) for k in u] # Real component
# from when the former was implemented on Wednesday, 01 August 2018,
# 14:41 pm in 03-dirac-vis.py. I think it makes sense as l is supposed
# to be the angular source position. and by dividing by pi, we represent it
# it in units of radians instead of an arbitrary number.
#=====================================================================
# User variables
#
l = 0 # Source position (dimensionless)
ulim = 5 # Limit of range of baselines (wavelengths)
steps = 10000
#=====================================================================
#=====================================================================
# Code begins here
#
u = np.linspace(ulim, -ulim, steps) # Baseline span
uDeg = np.degrees(u)
cosr = [np.cos(2 * np.pi * k * l/np.pi) for k in u] # Real component
sinr = [np.sin(2 * np.pi * k * l/np.pi) for k in u] # Imag component
# These compute the amp and phase manually:
amp = [np.sqrt(i**2 + j**2) for i, j in zip(cosr, sinr)]
pha = [ np.arctan(j/i) for i, j in zip(cosr, sinr)]
# pha = [ np.arctan2(j,i) for i, j in zip(cosr, sinr)] # This is akin to using np.angle as below
# These use the numpy's built in functions instead:
# vis = [complex(i,j) for i,j in zip(cosr,sinr)] # Visibility, V(u)
# amp = [ np.abs(i) for i in vis]
# pha = [ np.angle(i) for i in vis]
#=====================================================================
#=====================================================================
# Plot
#
fig = plt.figure()
# Plot source, which is a Dirac delta function
ax1 = fig.add_subplot(131)
ax1.arrow(l, 0, 0, 0.95,
head_width = 0.50,
head_length = 0.05,
fc = 'k',
ec = 'k')
ax1.set_xlim(-10, 10)
ax1.set_xticks(np.arange(-10, 11, 2))
ax1.set_ylim([0, 1.06])
ax1.set_xlabel('Source position')
ax1.set_title( '$\\delta (\\ell - %d$)'%l)
# Plot the visibility cosine and sine components
ax2 = fig.add_subplot(132)
ax3 = ax2.twiny()
ax2.plot( u, cosr, color = 'r')
ax3.plot(uDeg, sinr, color = 'b')
ax2.set_xlabel('Baseline (Spatial frequency)')
ax3.set_xlabel('(degrees)')
ax2.set_title( 'V(u): Real (r) and Imag (b)', y=1.09)
ax2.set_xlim([-ulim, ulim])
ax2.set_ylim(-1.1, 1.1)
# Plot the visibility amplitude and phase
ax4 = fig.add_subplot(133)
ax5 = ax4.twiny()
ax4.plot( u, amp, color = 'r')
ax5.plot(uDeg, pha, color = 'b')
ax4.set_xlabel('Baseline (Spatial frequency)')
ax4.set_title( 'V(u): Amp (r) and Phas (b)', y=1.09)
ax4.set_xlim([-ulim, ulim])
if l == 0:
ax4.set_ylim(-0.1, 1.1)
plt.show()
|
from apps.repo.models import Answers
from apps.accounts.models import User
from django.db.models import Count
from apps.repo.models import Answers, User, Questions
from django.db.models import Count
def check_rank(data):
return data["id__count"]
def recent_user():
"""最近刷题的同学"""
# 按用户统计刷题的数量(新近刷的在后面)
result = Answers.objects.values_list('user').annotate(Count('id'))
# 从后取最新的10个用户
user_id_list = [item[0] for item in result][-10:]
userlist = User.objects.filter(id__in=user_id_list)
return userlist
def user_answer_data(user):
# 答题数量及总量
# count=> 计数
answer_num = Answers.objects.filter(user=user).count()
question_all = Questions.objects.all().__len__()
# 用户总量
user_sum = User.objects.all().__len__()
# 答题情况
# 每个用户答题数量:按用户统计答题数量
rank = Answers.objects.values('user').annotate(Count('id'))
# <QuerySet [{'user': 1, 'id__count': 1}, {'user': 2, 'id__count': 1}, {'user': 3, 'id__count': 2}]>
# print(rank) 按答题量排序
rank = sorted(rank, key=check_rank, reverse=True)
# 统计每个人的排名(为提升效率,可写入memcache)
rank_dict = {}
# 当前排名
cur_rank = 0
# 刷题数量
cur_count = 0
for index, item in enumerate(rank, start=1):
# 如果上一名的刷题数据与自己不一样,则更新排名(否则跟上一名同名次)
if cur_count != item["id__count"]:
cur_rank = index
cur_count = item["id__count"]
rank_dict[item["user"]] = dict(item, **{"rank": cur_rank})
print(rank_dict)
kwgs = {
"answer_num": answer_num,
"question_all": question_all,
"user_sum": user_sum,
"rank": rank_dict[user.id] if answer_num else {"rank": 0, },
}
return kwgs
|
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import sys
filename = sys.argv[1]
data = pd.read_csv(filename, header=None,delim_whitespace=True)
data = np.array(data).flatten()
print(data)
fig, ax = plt.subplots()
ax.plot(range(0,int(data.shape[0])), data[:], '-o')
plt.show()
|
import datetime
import factory
from taggit import models as taggit_models
from . import models
from ..core.utils import datetime_now
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.User
nsid = factory.Sequence(lambda n: "%s@N01" % (n * 10000))
username = factory.Sequence(lambda n: "user%04d" % n)
realname = factory.Sequence(lambda n: "User Name %04d" % n)
iconserver = 1234
iconfarm = 5
timezone_id = "America/Los_Angeles"
photos_url = factory.Sequence(
lambda n: "https://www.flickr.com/photos/user%04d/" % n
)
photos_first_date = factory.LazyAttribute(
lambda o: datetime_now() - datetime.timedelta(weeks=52)
)
photos_first_date_taken = factory.LazyAttribute(
lambda o: datetime_now() - datetime.timedelta(weeks=52)
)
fetch_time = datetime_now()
avatar = factory.django.ImageField(filename="my_avatar.jpg")
class AccountFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Account
user = factory.SubFactory(
UserFactory,
username=factory.Sequence(lambda n: n),
realname=factory.Sequence(lambda n: "User Name %04d" % n),
)
class PhotoFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Photo
user = factory.SubFactory(
UserFactory,
username=factory.Sequence(lambda n: n),
realname=factory.Sequence(lambda n: "User Name %04d" % n),
)
flickr_id = factory.Sequence(lambda n: (n * 1000000))
title = factory.Sequence(lambda n: "Photo %d" % n)
secret = factory.Sequence(lambda n: (n * 10000))
original_secret = factory.Sequence(lambda n: (n * 10001))
original_format = "jpg"
server = "987"
farm = 2
media = "photo"
post_time = datetime_now()
taken_time = factory.LazyAttribute(
lambda o: datetime_now() - datetime.timedelta(weeks=3)
)
last_update_time = factory.LazyAttribute(
lambda o: datetime_now() - datetime.timedelta(weeks=2)
)
fetch_time = factory.LazyAttribute(
lambda o: datetime_now() - datetime.timedelta(weeks=1)
)
original_file = factory.django.ImageField(filename="example.jpg")
video_original_file = factory.django.FileField(filename="example.mov")
thumbnail_width = 100
thumbnail_height = 67
small_width = 240
small_height = 160
small_320_width = 320
small_320_height = 213
medium_width = 500
medium_height = 333
medium_640_width = 640
medium_640_height = 427
medium_800_width = 800
medium_800_height = 533
large_width = 1024
large_height = 683
large_1600_width = 1600
large_1600_height = 1067
large_2048_width = 2048
large_2048_height = 1365
x_large_3k_width = 3072
x_large_3k_height = 2048
x_large_4k_width = 4096
x_large_4k_height = 2731
x_large_5k_width = 5120
x_large_5k_height = 3413
x_large_6k_width = 6000
x_large_6k_height = 4000
original_width = 6000
original_height = 4000
class TagFactory(factory.django.DjangoModelFactory):
class Meta:
model = taggit_models.Tag
slug = factory.Sequence(lambda n: "slug%d" % n)
name = slug
class TaggedPhotoFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.TaggedPhoto
tag = factory.SubFactory(TagFactory)
flickr_id = factory.Sequence(lambda n: (n * 1000))
author = factory.SubFactory(UserFactory)
machine_tag = False
content_object = factory.SubFactory(PhotoFactory)
class PhotosetFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.Photoset
user = factory.SubFactory(
UserFactory,
username=factory.Sequence(lambda n: n),
realname=factory.Sequence(lambda n: "User Name %04d" % n),
)
flickr_id = factory.Sequence(lambda n: (n * 1000000))
title = factory.Sequence(lambda n: "Photoset %d" % n)
flickr_created_time = datetime_now()
fetch_time = datetime_now()
|
import math
import locale
locale.setlocale(locale.LC_ALL, '')
def get_change(x, y):
return {"change": math.floor(x / y),
"left_over": (math.ceil((x - math.floor(x / y) * y) * 100) / 100)}
while 1:
cash = float(input("Enter amount: "))
if cash < 0:
print("Invalid input.")
continue
change = get_change(cash, .25)
quarters = change["change"]
change = get_change(change["left_over"], .10)
dimes = change["change"]
pennies = math.ceil(change["left_over"]*100)
print(locale.currency(cash, grouping=True),
"makes ", quarters, " quarters, ", dimes, " dimes, and ", pennies,
" pennies (", quarters + dimes + pennies, " coins), total amount in coins: ",
locale.currency(cash, grouping=True), ".")
|
# coding: UTF-8
print(r'''
【程序51】题目: 小姐队列
本题是柏林大学的一次考试题,要求在30分钟内做出,不过只有少于10%的人完成了要求。计分是这样的,共150分,从1到30分钟,每加1分钟减2分,那么30分钟答出就是90分,是及格分;从30分钟以后每加1分钟减1分。我当时用了22分钟,大家也试试,看自己能得多少分。
原题:
有五位小姐排成一列,所有的小姐姓不同、穿的衣服颜色不同、喝不同的饮料、养不同的宠物、吃不同的水果。
钱小姐穿红色衣服;翁小姐养了一只狗;陈小姐喝茶;穿绿衣服的站在穿
白衣服的左边;穿绿衣服的小姐喝咖啡;吃西瓜的小姐养鸟;穿黄衣服的小姐吃
梨;站在中间的小姐喝牛奶;赵小姐站在最左边;吃橘子的小姐站在养猫的旁边;
养鱼的小姐旁边的那位吃梨;吃苹果的小姐喝香槟;江小姐吃香蕉;赵小姐站在
穿蓝衣服的小姐旁边;喝开水的小姐站在吃橘子的小姐旁边;请问哪位小姐养蛇?
你可以收到题后,看三遍,然后开始计时做题。
''')
import random
|
def command_ping(self, command, user):
self.send_message(self.CHAN, "Pong")
|
/*
* Copyright (c) 2020 Huawei Technologies Co.,Ltd.
*
* openGauss is licensed under Mulan PSL v2.
* You can use this software according to the terms and conditions of the Mulan PSL v2.
* You may obtain a copy of Mulan PSL v2 at:
*
* http://license.coscl.org.cn/MulanPSL2
*
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
* See the Mulan PSL v2 for more details.
*/
"""
Copyright (c) 2020 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
from gensim.models import word2vec
from .sql_processor import sql_filter
class MySentence(object):
def __init__(self, data):
self.data = data
def __iter__(self):
for _, sql in self.data:
yield sql_filter(sql).split()
class Word2Vector(object):
def __init__(self, max_len=150, **kwargs):
self.model = None
self.params = kwargs
self.max_len = max_len
def train(self, sentence):
sentence = MySentence(sentence)
self.model = word2vec.Word2Vec(sentence, **self.params)
def update(self, sentence):
sentence = MySentence(sentence)
self.model.build_vocab(sentence, update=True)
self.model.train(sentence, total_examples=self.model.corpus_count, epochs=self.model.iter)
def str2vec(self, string):
str_vec = list()
for item in string.strip().split():
if item in self.model:
str_vec.extend(self.model[item])
else:
str_vec.extend([0.0] * self.params.get('size'))
if len(str_vec) >= self.max_len:
del str_vec[self.max_len:]
else:
str_vec.extend([0.0] * (self.max_len - len(str_vec)))
return str_vec
def save_model(self, model_path):
self.model.save(model_path)
def load_model(self, model_path):
self.model = word2vec.Word2Vec.load(model_path)
|
#!/usr/bin/env python
import sys
import os
import os.path
import logging
import numpy
import icecube.icetray
import icecube.dataio
import icecube.clsim
import icecube.clsim.shadow
import icecube.clsim.shadow.cylinder_utils
import icecube.dataclasses
import icecube.phys_services
import icecube.simclasses
from I3Tray import I3Tray
from I3Tray import I3Units
randomService = icecube.phys_services.I3GSLRandomService(seed = 10)
gcd_file = os.getenv('I3_TESTDATA') + '/GCD/GeoCalibDetectorStatus_IC86.55697_corrected_V2.i3.gz'
cable_orientations = os.path.expandvars("$I3_BUILD/ice-models/resources/models/cable_position/orientation.led7.txt")
class InjectCables(icecube.icetray.I3Module):
'''
This module injects cylinders as cables reading the measured orientations
stored in a text file in the ice-models project.
'''
def __init__(self, context):
icecube.icetray.I3Module.__init__(self, context)
self.AddParameter("CableMapName", "Frame key of the cable map",'CableMap')
def Configure(self):
self.cable_map_name = self.GetParameter("CableMapName")
def Geometry(self, frame):
dom_radius=165.1*I3Units.mm
cable_radius=23*I3Units.mm
cable_length=10*I3Units.m
radius = dom_radius + cable_radius
cylinder_map = icecube.simclasses.I3CylinderMap()
geo = frame['I3Geometry'].omgeo
for string, om, angle, _ in numpy.loadtxt(cable_orientations,
dtype=[('string',int),('om',int),('angle',float),('angle_err',float)]):
dx = radius*numpy.cos(numpy.radians(angle))
dy = radius*numpy.sin(numpy.radians(angle))
top_relative = icecube.dataclasses.I3Position(dx, dy, cable_length/2.)
bottom_relative = icecube.dataclasses.I3Position(dx, dy, -cable_length/2.)
omkey = icecube.icetray.OMKey(int(string),int(om))
dom_position = geo[omkey].position
# want positions in lab/detector coordinates
cylinder_top = dom_position + top_relative
cylinder_bottom = dom_position + bottom_relative
cylinder_map[omkey] = icecube.simclasses.I3ExtraGeometryItemCylinder(cylinder_top, cylinder_bottom, cable_radius)
frame[self.cable_map_name] = cylinder_map
self.PushFrame(frame)
class GenerateEvent(icecube.icetray.I3Module):
def __init__(self, context):
icecube.icetray.I3Module.__init__(self, context)
self.AddParameter("I3RandomService", "the service", None)
self.AddParameter("Type", "", icecube.dataclasses.I3Particle.ParticleType.EMinus)
self.AddParameter("Energy", "", 10.*I3Units.TeV)
self.AddParameter("NEvents", "", 1)
self.AddParameter("XCoord", "", 0.)
self.AddParameter("YCoord", "", 0.)
self.AddParameter("ZCoord", "", 0.)
self.AddParameter("PrimaryDirection","",icecube.dataclasses.I3Direction(0.,0.,-1.))
self.AddParameter("DaughterDirection","",icecube.dataclasses.I3Direction(0.,0.,-1.))
def Configure(self):
self.rs = self.GetParameter("I3RandomService")
self.particleType = self.GetParameter("Type")
self.energy = self.GetParameter("Energy")
self.nEvents = self.GetParameter("NEvents")
self.xCoord = self.GetParameter("XCoord")
self.yCoord = self.GetParameter("YCoord")
self.zCoord = self.GetParameter("ZCoord")
self.primary_direction = self.GetParameter("PrimaryDirection")
self.daughter_direction = self.GetParameter("DaughterDirection")
self.eventCounter = 0
def DAQ(self, frame):
daughter = icecube.dataclasses.I3Particle()
daughter.type = self.particleType
daughter.energy = self.energy
daughter.pos = icecube.dataclasses.I3Position(self.xCoord,self.yCoord,self.zCoord)
daughter.dir = self.daughter_direction
daughter.time = 0.
daughter.location_type = icecube.dataclasses.I3Particle.LocationType.InIce
primary = icecube.dataclasses.I3Particle()
primary.type = icecube.dataclasses.I3Particle.ParticleType.NuE
primary.energy = self.energy
primary.pos = icecube.dataclasses.I3Position(self.xCoord,self.yCoord,self.zCoord)
primary.dir = self.primary_direction
primary.time = 0.
primary.location_type = icecube.dataclasses.I3Particle.LocationType.Anywhere
mctree = icecube.dataclasses.I3MCTree()
mctree.add_primary(primary)
mctree.append_child(primary,daughter)
frame["I3MCTree"] = mctree
self.PushFrame(frame)
self.eventCounter += 1
if self.eventCounter==self.nEvents:
self.RequestSuspension()
tray = I3Tray()
cable_map_name = 'CableMap'
tray.Add("I3InfiniteSource" ,
Prefix=os.path.expandvars(gcd_file),
Stream=icecube.icetray.I3Frame.DAQ)
# AddCylinders puts a cylinder next to each DOM using
# the same orientation for each cylinder.
# It's recommended to use the measured orientations,
# which is what InjectCables does.
#tray.Add(icecube.clsim.shadow.cylinder_utils.AddCylinders,
# CableMapName = cable_map_name,
# CylinderLength = 17.0,
# CylinderRadius = 0.023)
tray.Add(InjectCables, CableMapName=cable_map_name)
tray.Add("Dump")
tray.Add(GenerateEvent,
Type = icecube.dataclasses.I3Particle.ParticleType.MuMinus,
NEvents = 10,
XCoord = -256.14,
YCoord = -521.08,
ZCoord = 496.03,
PrimaryDirection = icecube.dataclasses.I3Direction(0 , 0 ,-1),
DaughterDirection = icecube.dataclasses.I3Direction(0 , 0 , -1),
I3RandomService = randomService,
Energy = 10.0*I3Units.TeV )
photonSeriesName = "Photons"
usegpus = any([device.gpu for device in icecube.clsim.I3CLSimOpenCLDevice.GetAllDevices()])
tray.Add(icecube.clsim.I3CLSimMakePhotons,
UseGPUs = usegpus,
UseOnlyDeviceNumber=0,
UseCPUs = not usegpus,
PhotonSeriesName = photonSeriesName,
RandomService = randomService,
IceModelLocation = os.path.expandvars("$I3_BUILD/ice-models/resources/models/spice_lea"),
GCDFile = gcd_file)
tray.Add("I3ShadowedPhotonRemoverModule",
InputPhotonSeriesMapName = photonSeriesName,
OutputPhotonSeriesMapName = photonSeriesName+'Shadowed',
CableMapName = cable_map_name,
Distance = 20.0)
tray.AddModule(icecube.clsim.shadow.cylinder_utils.AverageShadowFraction,
PhotonMapName=photonSeriesName,
ShadowedPhotonMapName=photonSeriesName+'Shadowed',
)
tray.Add("I3Writer", filename='shadowed_photons_removed.i3')
tray.Execute()
|
# !!!! impoprtant !!!!
# be sure to add:
# nameofthefile-x-
# in front of every id, otherwise Dash will not work. Read more in readme.md
from dash import Dash, dcc, html, Input, Output
import plotly.express as px
app = Dash(__name__)
app.layout = html.Div([
dcc.Graph(id="template-x-graph"),
html.P("Title"),
dcc.Input(id="template-x-title", value="", type="text"),
])
@app.callback(
Output("template-x-graph", "figure"),
Input("template-x-component", "value"))
def display_(value):
return
if __name__ == "__main__":
app.run_server(debug=True)
|
import pytest
import psycopg2
from sqlalchemist import get_config
@pytest.fixture
def config():
return get_config()
def test_configuration(config):
assert "database" in config
assert "DB_CONNECTION_STRING" in config.database
def test_connection(config):
config = get_config()
psycopg2.connect(config.database.DB_CONNECTION_STRING)
|
__all__ = [
"Embeddings", "FastText",
"FastTextEmbeddings", "GloveFormatEmbeddings", "HDF5Embeddings",
"BinaryEmbeddings"
]
from .embeddings import *
from .fasttext import *
from .glove import *
from .hdf5 import *
from .bin import *
|
from django.core.management.base import BaseCommand, CommandError
from build.management.commands.base_build import Command as BaseBuild
from django.conf import settings
from django.db import connection
from django.db import IntegrityError
from protein.models import (Protein, ProteinConformation, ProteinState, ProteinFamily, ProteinAlias,
ProteinSequenceType, Species, Gene, ProteinSource, ProteinSegment)
from residue.models import (ResidueNumberingScheme, ResidueGenericNumber, Residue, ResidueGenericNumberEquivalent)
from signprot.models import SignprotComplex, SignprotStructure, SignprotStructureExtraProteins
from common.models import WebResource, WebLink, Publication
from structure.models import StructureType, StructureStabilizingAgent, PdbData, Rotamer
from structure.functions import get_pdb_ids
import re
from Bio import pairwise2
from collections import OrderedDict
import logging
import shlex, subprocess
from io import StringIO
from Bio.PDB import PDBParser, PPBuilder, PDBIO, Polypeptide
from Bio import pairwise2
import pprint
import json
import yaml
import urllib
import traceback
import sys, os
import datetime
AA = {"ALA":"A", "ARG":"R", "ASN":"N", "ASP":"D",
"CYS":"C", "GLN":"Q", "GLU":"E", "GLY":"G",
"HIS":"H", "ILE":"I", "LEU":"L", "LYS":"K",
"MET":"M", "PHE":"F", "PRO":"P", "SER":"S",
"THR":"T", "TRP":"W", "TYR":"Y", "VAL":"V",
"YCM":"C", "CSD":"C", "TYS":"Y", "SEP":"S"} #non-standard AAs
atom_num_dict = {'E':9, 'S':6, 'Y':12, 'G':4, 'A':5, 'V':7, 'M':8, 'L':8, 'I':8, 'T':7, 'F':11, 'H':10, 'K':9,
'D':8, 'C':6, 'R':11, 'P':7, 'Q':9, 'N':8, 'W':14, '-':0}
class Command(BaseBuild):
local_uniprot_dir = os.sep.join([settings.DATA_DIR, "g_protein_data", "uniprot"])
local_uniprot_beta_dir = os.sep.join([settings.DATA_DIR, "g_protein_data", "uniprot_beta"])
local_uniprot_gamma_dir = os.sep.join([settings.DATA_DIR, "g_protein_data", "uniprot_gamma"])
with open(os.sep.join([settings.DATA_DIR, "g_protein_data", "g_protein_display_names.yaml"]), "r") as y:
display_name_lookup = yaml.load(y, Loader=yaml.FullLoader)
def add_arguments(self, parser):
parser.add_argument("--purge_complex", default=False, action="store_true", help="Purge G protein complex structures from database")
parser.add_argument("--purge_non_complex", default=False, action="store_true", help="Purge G protein non-complex structures from database")
parser.add_argument("--only_signprot_structures", default=False, action="store_true", help="Only build SignprotStructure objects")
parser.add_argument("-s", default=False, type=str, action="store", nargs="+", help="PDB codes to build")
parser.add_argument("--debug", default=False, action="store_true", help="Debug mode")
def handle(self, *args, **options):
startTime = datetime.datetime.now()
self.options = options
if self.options["purge_complex"]:
Residue.objects.filter(protein_conformation__protein__entry_name__endswith="_a", protein_conformation__protein__family__parent__parent__name="Alpha").delete()
ProteinConformation.objects.filter(protein__entry_name__endswith="_a", protein__family__parent__parent__name="Alpha").delete()
Protein.objects.filter(entry_name__endswith="_a", family__parent__parent__name="Alpha").delete()
if self.options["purge_non_complex"]:
SignprotStructureExtraProteins.objects.all().delete()
SignprotStructure.objects.all().delete()
if not options["only_signprot_structures"]:
# Building protein and protconf objects for g protein structure in complex
if options["s"]:
scs = SignprotComplex.objects.filter(structure__pdb_code__index__in=[i.upper() for i in options["s"]])
else:
scs = SignprotComplex.objects.all()
for sc in scs:
self.logger.info("Protein, ProteinConformation and Residue build for alpha subunit of {} is building".format(sc))
try:
# Alpha subunit
try:
alpha_protein = Protein.objects.get(entry_name=sc.structure.pdb_code.index.lower()+"_a")
except:
alpha_protein = Protein()
alpha_protein.entry_name = sc.structure.pdb_code.index.lower()+"_a"
alpha_protein.accession = None
alpha_protein.name = sc.structure.pdb_code.index.lower()+"_a"
alpha_protein.sequence = sc.protein.sequence
alpha_protein.family = sc.protein.family
alpha_protein.parent = sc.protein
alpha_protein.residue_numbering_scheme = sc.protein.residue_numbering_scheme
alpha_protein.sequence_type = ProteinSequenceType.objects.get(slug="mod")
alpha_protein.source = ProteinSource.objects.get(name="OTHER")
alpha_protein.species = sc.protein.species
alpha_protein.save()
try:
alpha_protconf = ProteinConformation.objects.get(protein__entry_name=sc.structure.pdb_code.index.lower()+"_a")
except:
alpha_protconf = ProteinConformation()
alpha_protconf.protein = alpha_protein
alpha_protconf.state = ProteinState.objects.get(slug="active")
alpha_protconf.save()
pdbp = PDBParser(PERMISSIVE=True, QUIET=True)
s = pdbp.get_structure("struct", StringIO(sc.structure.pdb_data.pdb))
chain = s[0][sc.alpha]
nums = []
structure_seq = ''
for res in chain:
if "CA" in res and res.id[0]==" ":
nums.append(res.get_id()[1])
structure_seq+=Polypeptide.three_to_one(res.get_resname())
if options['debug']:
print('Structure seq:')
print(structure_seq)
resis = Residue.objects.filter(protein_conformation__protein=sc.protein)
num_i = 0
temp_seq2 = ""
pdb_num_dict = OrderedDict()
# Create first alignment based on sequence numbers
for n in nums:
if sc.structure.pdb_code.index=="6OIJ" and n<30:
nr = n+6
elif sc.structure.pdb_code.index in ['7MBY', '7F9Y'] and n>58:
nr = n-35
elif sc.structure.pdb_code.index=='7EIB':
nr = n-2
else:
nr = n
pdb_num_dict[n] = [chain[n], resis.get(sequence_number=nr)]
# Find mismatches
mismatches = []
for n, res in pdb_num_dict.items():
if AA[res[0].get_resname()]!=res[1].amino_acid:
mismatches.append(res)
pdb_lines = sc.structure.pdb_data.pdb.split("\n")
seqadv = []
for l in pdb_lines:
if l.startswith("SEQADV"):
seqadv.append(l)
mutations, shifted_mutations = OrderedDict(), OrderedDict()
# Search for annotated engineered mutations in pdb SEQADV
for s in seqadv:
line_search = re.search("SEQADV\s{1}[A-Z\s\d]{4}\s{1}([A-Z]{3})\s{1}([A-Z]{1})\s+(\d+)[\s\S\d]{5}([\s\S\d]{12})([A-Z]{3})\s+(\d+)(\s\S+)",s)
if line_search!=None:
if line_search.group(2)==sc.alpha:
if line_search.group(4).strip()==sc.protein.accession:
if line_search.group(3)==line_search.group(6):
mutations[int(line_search.group(3))] = [line_search.group(1), line_search.group(5)]
else:
shifted_mutations[int(line_search.group(3))] = [line_search.group(1), line_search.group(5), int(line_search.group(6))]
else:
# Exception for 6G79
if line_search.group(3)!=line_search.group(6) and "CONFLICT" in line_search.group(7):
mutations[int(line_search.group(3))] = [line_search.group(1), line_search.group(5)]
# Exception for 5G53
if line_search.group(4).strip()!=sc.protein.accession:
mutations[int(line_search.group(3))] = [line_search.group(1), line_search.group(5)]
remaining_mismatches = []
# Check and clear mismatches that are registered in pdb SEQADV as engineered mutation
for m in mismatches:
num = m[0].get_id()[1]
if num in mutations:
if m[0].get_resname()!=mutations[num][0] and m[1].amino_acid!=AA[mutations[num][1]]:
remaining_mismatches.append(m)
elif num in shifted_mutations:
remaining_mismatches.append(m)
else:
remaining_mismatches.append(m)
if options["debug"]:
print(sc)
print(mutations)
print(shifted_mutations)
print(mismatches)
print("======")
print(remaining_mismatches)
pprint.pprint(pdb_num_dict)
no_seqnum_shift = ['6OY9', '6OYA', '6LPB', '6WHA', '7D77', '6XOX', '7L1U', '7L1V', '7MBY', '7EIB', '7F9Y']
# Check if HN is mutated to GNAI1 for the scFv16 stabilizer
if sc.protein.entry_name!='gnai1_human' and len(remaining_mismatches)>0:
target_HN = resis.filter(protein_segment__slug='HN')
gnai1_HN = Residue.objects.filter(protein_conformation__protein__entry_name='gnai1_human', protein_segment__slug='HN')
pdb_HN_seq = ''
for num, val in pdb_num_dict.items():
if num<=target_HN.reverse()[0].sequence_number:
pdb_HN_seq+=Polypeptide.three_to_one(val[0].get_resname())
if options['debug']:
print('Checking if HN is gnai1_human')
print(pdb_HN_seq)
print(''.join(gnai1_HN.values_list('amino_acid', flat=True)))
gnai1_HN_seq = ''.join(gnai1_HN.values_list('amino_acid', flat=True))
pw2 = pairwise2.align.localms(gnai1_HN_seq, pdb_HN_seq, 3, -4, -3, -1)
ref_seq, temp_seq = str(pw2[0][0]), str(pw2[0][1])
length, match = 0,0
for r, t in zip(ref_seq, temp_seq):
if options['debug']:
print(r,t)
if t!='-':
if r==t:
match+=1
length+=1
identity = match/length*100
if options['debug']:
print(identity)
if identity>85:
if sc.structure.pdb_code.index not in ['7DFL']:
no_seqnum_shift.append(sc.structure.pdb_code.index)
if options['debug']:
print('INFO: HN has {}% with gnai1_human HN, skipping seqnum shift correction'.format(round(identity)))
elif sc.structure.pdb_code.index in ['7KH0']:
no_seqnum_shift.append(sc.structure.pdb_code.index)
# Mismatches remained possibly to seqnumber shift, making pairwise alignment to try and fix alignment
if len(remaining_mismatches)>0 and sc.structure.pdb_code.index not in no_seqnum_shift:
ppb = PPBuilder()
seq = ""
for pp in ppb.build_peptides(chain, aa_only=False):
seq += str(pp.get_sequence())
if sc.structure.pdb_code.index in ['7JVQ','7L1U','7L1V','7D68','7EZK']:
pw2 = pairwise2.align.localms(sc.protein.sequence, seq, 3, -4, -3, -1)
else:
pw2 = pairwise2.align.localms(sc.protein.sequence, seq, 2, -1, -.5, -.1)
ref_seq, temp_seq = str(pw2[0][0]), str(pw2[0][1])
# Custom fix for A->G mutation at pos 18
if sc.structure.pdb_code.index=='7JJO':
ref_seq = ref_seq[:18]+ref_seq[19:]
temp_seq = temp_seq[:17]+temp_seq[18:]
# Custom alignment fixes
elif sc.structure.pdb_code.index=='7DFL':
ref_seq = 'MTLESIMACCLSEEAKEARRINDEIERQLRRDKRDARRELKLLLLGTGESGKSTFIKQMRIIHGSGYSDEDKRGFTKLVYQNIFTAMQAMIRAMDTLKIPYKYEHNKAHAQLVREVDVEKVSAFENPYVDAIKSLWNDPGIQECYDRRREYQLSDSTKYYLNDLDRVADPAYLPTQQDVLRVRVPTTGIIEYPFDLQSVIFRMVDVGGQRSERRKWIHCFENVTSIMFLVALSEYDQVLVESDNENRMEESKALFRTIITYPWFQNSSVILFLNKKDLLEEKIMYSHLVDYFPEYDGPQRDAQAAREFILKMFVDLNPDSDKIIYSHFTCATDTENIRFVFAAVKDTILQLNLKEYNLV'
temp_seq = '--------CTLSAEDKAAVERSKMIDRNLREDGEKARRELKLLLLGTGESGKSTFIKQMRIIHG--------------------------------------------------------------------------------------------------------------------------TGIIEYPFDLQSVIFRMVDVGGQRSERRKWIHCFENVTSIMFLVALSEYDQV----DNENRMEESKALFRTIITYPWFQNSSVILFLNKKDLLEEKIMYSHLVDYFPEYDGPQRDAQAAREFILKMFVDLNPDSDKILYSHFTCATDTENIRFVFAAVKDTILQLNLKEYNLV'
elif sc.structure.pdb_code.index=='7JOZ':
temp_seq = temp_seq[:67]+('-'*14)+'FNGDS'+temp_seq[86:]
elif sc.structure.pdb_code.index=='7AUE':
ref_seq = ref_seq[:31].replace('-','')+ref_seq[31:]
temp_seq = (9*'-')+temp_seq[2:5]+temp_seq[5:54].replace('-','')+temp_seq[54:]
elif sc.structure.pdb_code.index=='7D68':
temp_seq = temp_seq[:203]+'-T'+temp_seq[205:]
elif sc.structure.pdb_code.index=='7EZK':
temp_seq = temp_seq[:12]+temp_seq[157:204]+(145*'-')+temp_seq[204:]
wt_pdb_dict = OrderedDict()
pdb_wt_dict = OrderedDict()
j, k = 0, 0
for i, ref, temp in zip(range(0,len(ref_seq)), ref_seq, temp_seq):
if options["debug"]:
print(i, ref, temp) # alignment check
if ref!="-" and temp!="-":
wt_pdb_dict[resis[j]] = pdb_num_dict[nums[k]]
pdb_wt_dict[pdb_num_dict[nums[k]][0]] = resis[j]
j+=1
k+=1
elif ref=="-":
wt_pdb_dict[i] = pdb_num_dict[nums[k]]
pdb_wt_dict[pdb_num_dict[nums[k]][0]] = i
k+=1
elif temp=="-":
wt_pdb_dict[resis[j]] = i
pdb_wt_dict[i] = resis[j]
j+=1
# Custom fix for 7JJO isoform difference
if sc.structure.pdb_code.index in ['7JJO', '7JOZ', '7AUE', '7EZK']:
pdb_num_dict = OrderedDict()
for wt_res, st_res in wt_pdb_dict.items():
if type(st_res)==type([]):
pdb_num_dict[wt_res.sequence_number] = [st_res[0], wt_res]
else:
for i, r in enumerate(remaining_mismatches):
# Adjust for shifted residue when residue is a match
if r[0].get_id()[1]-remaining_mismatches[i-1][0].get_id()[1]>1:
pdb_num_dict[r[0].get_id()[1]-1][1] = pdb_wt_dict[chain[r[0].get_id()[1]-1]]
# Adjust for shifted residue when residue is mutated and it's logged in SEQADV
if r[0].get_id()[1] in shifted_mutations:
pdb_num_dict[r[0].get_id()[1]][1] = resis.get(sequence_number=shifted_mutations[r[0].get_id()[1]][2])
# Adjust for shift
else:
pdb_num_dict[r[0].get_id()[1]][1] = pdb_wt_dict[r[0]]
if sc.structure.pdb_code.index=='7JVQ':
pdb_num_dict[198][1] = Residue.objects.get(protein_conformation__protein=sc.protein, sequence_number=346)
pdb_num_dict[235][1] = Residue.objects.get(protein_conformation__protein=sc.protein, sequence_number=383)
elif sc.structure.pdb_code.index=='6PB0':
pdb_num_dict[205][1] = Residue.objects.get(protein_conformation__protein=sc.protein, sequence_number=205)
### Custom alignment fix for 6WHA, 7MBY mini-Gq/Gi/Gs chimera
elif sc.structure.pdb_code.index in ['6WHA', '7MBY', '7EIB', '7F9Y']:
if sc.structure.pdb_code.index=='6WHA':
ref_seq = "MTLESIMACCLSEEAKEARRINDEIERQLRRDKRDARRELKLLLLGTGESGKSTFIKQMRIIHGSGYSDEDKRGFTKLVYQNIFTAMQAMIRAMDTLKIPYKYEHNKAHAQLVREVDVEKVSAFENPYVDAIKSLWNDPGIQECYDRRREYQLSDSTKYYLNDLDRVADPAYLPTQQDVLRVRVPTTGIIEYPFDLQSVIFRMVDVGGQRSERRKWIHCFENVTSIMFLVALSEYDQVLVESDNENRMEESKALFRTIITYPWFQNSSVILFLNKKDLLEEKIM--YSHLVDYFPEYDGP----QRDAQAAREFILKMFVDL---NPDSDKIIYSHFTCATDTENIRFVFAAVKDTILQLNLKEYNLV"
temp_seq = "----------VSAEDKAAAERSKMIDKNLREDGEKARRTLRLLLLGADNSGKSTIVK----------------------------------------------------------------------------------------------------------------------------------GIFETKFQVDKVNFHMFDVG-----RRKWIQCFNDVTAIIFVVDSSDYNR----------LQEALNDFKSIWNNRWLRTISVILFLNKQDLLAEKVLAGKSKIEDYFPEFARYTTPDPRVTRAKY-FIRKEFVDISTASGDGRHICYPHFTC-VDTENARRIFNDCKDIILQMNLREYNLV"
elif sc.structure.pdb_code.index=='7MBY':
ref_seq = 'MTLESIMACCLSEEAKEARRINDEIERQLRRDKRDARRELKLLLLGTGESGKSTFIKQMRIIHGSGYSDEDKRGFTKLVYQNIFTAMQAMIRAMDTLKIPYKYEHNKAHAQLVREVDVEKVSAFENPYVDAIKSLWNDPGIQECYDRRREYQLSDSTKYYLNDLDRVADPAYLPTQQDVLRVRVPTTGIIEYPFDLQSVIFRMVDVGGQRSERRKWIHCFENVTSIMFLVALSEYDQVLVESDNENRMEESKALFRTIITYPWFQNSSVILFLNKKDLLEEKIM-YSHLVDYFPEYDGP----QRDAQAAREFILKMFVDL---NPDSDKIIYSHFTCATDTENIRFVFAAVKDTILQLNLKEYNLV'
temp_seq = '----------------AAVERSKMIDRNLREDGEKARRTLRLLLLGADNSGKSTIVKQ----------------------------------------------------------------------------------------------------------------------------------IFETKFQVDKVNFHMFDVG-----RRKWIQCFNDVTAIIFVVDSSDYN----------RLQEALNDFKSIWNNRWLRTISVILFLNKQDLLAEKVLA-SKIEDYFPEFARYTTEDPRVTRAKY-FIRKEFVDISTASGDGRHICYPHFTCAVDTENARRIFNDCKDIILQMNLREYNLV'
elif sc.structure.pdb_code.index=='7EIB':
ref_seq = 'MTLESIMACCLSEEAKEARRINDEIERQLRRDKRDARRELKLLLLGTGESGKSTFIKQMRIIHGSGYSDEDKRGFTKLVYQNIFTAMQAMIRAMDTLKIPYKYEHNKAHAQLVREVDVEKVSA--FENPYVDAIKSLWNDPGIQECYDRRREYQLSDSTKYYLNDLDRVADPAYLPTQQDVLRVRVPTTGIIEYPFDLQSVIFRMVDVGGQRSERRKWIHCFENVTSIMFLVALSEYDQVLVESDNENRMEESKALFRTIITYPWFQNSSVILFLNKKDLLEEKIMYS--HLVDYFPEYDGPQR------------DAQAAREFILKMFVDL---NPDSDKIIYSHFTCATDTENIRFVFAAVKDTILQLNLKEYNLV'
temp_seq = '----------LSAEDKAAVERSKMIEKQLQKDKQVYRRTLRLLLLGADNSGKSTIVKQMRIYH---------------------------------------------------------------------------------------------------------------------------KTSGIFETKFQVDKVNFHMFDVGAQRDERRKWIQCFNDVTAIIFVVDSSDYN----------RLQEALNDFKSIWNNRWLRTISVILFLNKQDLLAEKVLAGKSKIEDYFPEFARYTTPEDATPEPGEDPRVTRAKYFIRKEFVDISTASGDGRHICYPHFTCSVDTENARRIFNDCKDIILQMNLREYNLV'
elif sc.structure.pdb_code.index=='7F9Y':
ref_seq = 'MTLESIMACCLSEEAKEARRINDEIERQLRRDKRDARRELKLLLLGTGESGKSTFIKQMRIIHGSGYSDEDKRGFTKLVYQNIFTAMQAMIRAMDTLKIPYKYEHNKAHAQLVREVDVEKVSA--FENPYVDAIKSLWNDPGIQECYDRRREYQLSDSTKYYLNDLDRVADPAYLPTQQDVLRVRVPTTGIIEYPFDLQSVIFRMVDVGGQRSERRKWIHCFENVTSIMFLVALSEYDQVLVESDNENRMEESKALFRTIITYPWFQNSSVILFLNKKDLLEEKIMYS--HLVDYFPEYDGPQR------------DAQAAREFILKMFVDL---NPDSDKIIYSHFTCATDTENIRFVFAAVKDTILQLNLKEYNLV'
temp_seq = '-------------EDKAAVERSKMIEKQLQKDKQVYRRTLRLLLLGADNSGKSTIVKQMRI------------------------------------------------------------------------------------------------------------------------------TSGIFETKFQVDKVNFHMFDVGAQRDERRKWIQCFNDVTAIIFVVDSSDN-----------RLQEALNDFKSIWNNRWLRTISVILFLNKQDLLAEKVLAGKSKIEDYFPEFARYTTPEDATPEPGEDPRVTRAKYFIRKEFVDISTASGDGRHICYPHFTCSVDTENARRIFNDCKDIILQMNLREYNLV'
pdb_num_dict = OrderedDict()
temp_resis = [res for res in chain]
temp_i = 0
mapped_cgns = []
for i, aa in enumerate(temp_seq):
if aa!="-":
ref_split_on_gaps = ref_seq[:i+1].split("-")
ref_seqnum = i-(len(ref_split_on_gaps)-1)+1
res = resis.get(sequence_number=ref_seqnum)
if res.display_generic_number.label in mapped_cgns:
next_presumed_cgn = self.get_next_presumed_cgn(res)
if next_presumed_cgn:
res = next_presumed_cgn
while res and res.display_generic_number.label in mapped_cgns:
res = self.get_next_presumed_cgn(res)
else:
print("Error: {} CGN does not exist. Incorrect mapping of {} in {}".format(next_presumed_cgn, chain[nums[temp_i]], sc.structure))
if res:
mapped_cgns.append(res.display_generic_number.label)
pdb_num_dict[nums[temp_i]] = [chain[nums[temp_i]], res]
temp_i+=1
bulked_rotamers = []
for key, val in pdb_num_dict.items():
# print(key, val) # sanity check
if not isinstance(val[1], int):
res_obj = Residue()
res_obj.sequence_number = val[0].get_id()[1]
res_obj.amino_acid = AA[val[0].get_resname()]
res_obj.display_generic_number = val[1].display_generic_number
res_obj.generic_number = val[1].generic_number
res_obj.protein_conformation = alpha_protconf
res_obj.protein_segment = val[1].protein_segment
res_obj.save()
rot = self.create_structure_rotamer(val[0], res_obj, sc.structure)
bulked_rotamers.append(rot)
else:
self.logger.info("Skipped {} as no annotation was present, while building for alpha subunit of {}".format(val[1], sc))
if options["debug"]:
pprint.pprint(pdb_num_dict)
Rotamer.objects.bulk_create(bulked_rotamers)
self.logger.info("Protein, ProteinConformation and Residue build for alpha subunit of {} is finished".format(sc))
except Exception as msg:
if options["debug"]:
print("Error: ", sc, msg)
self.logger.info("Protein, ProteinConformation and Residue build for alpha subunit of {} has failed".format(sc))
if not options["s"]:
### Build SignprotStructure objects from non-complex signprots
g_prot_alphas = Protein.objects.filter(family__slug__startswith="100_001", accession__isnull=False)#.filter(entry_name="gnai1_human")
complex_structures = SignprotComplex.objects.all().values_list("structure__pdb_code__index", flat=True)
for a in g_prot_alphas:
pdb_list = get_pdb_ids(a.accession)
for pdb in pdb_list:
if pdb not in complex_structures:
try:
data = self.fetch_gprot_data(pdb, a)
if data:
self.build_g_prot_struct(a, pdb, data)
except Exception as msg:
self.logger.error("SignprotStructure of {} {} failed\n{}: {}".format(a.entry_name, pdb, type(msg), msg))
if options["debug"]:
print(datetime.datetime.now() - startTime)
@staticmethod
def create_structure_rotamer(PDB_residue, residue_object, structure):
out_stream = StringIO()
io = PDBIO()
# print(PDB_residue)
io.set_structure(PDB_residue)
io.save(out_stream)
pdbdata = PdbData.objects.get_or_create(pdb=out_stream.getvalue())[0]
missing_atoms = atom_num_dict[Polypeptide.three_to_one(PDB_residue.get_resname())] > len(PDB_residue.get_unpacked_list())
rot = Rotamer(missing_atoms=missing_atoms, pdbdata=pdbdata, residue=residue_object, structure=structure)
return rot
@staticmethod
def get_next_presumed_cgn(res):
try:
next_num = str(int(res.display_generic_number.label[-2:])+1)
if len(next_num)==1:
next_num = "0"+next_num
next_cgn = res.display_generic_number.label[:-2]+next_num
presumed_cgn = ResidueGenericNumber.objects.get(label=next_cgn)
res = Residue.objects.filter(display_generic_number=presumed_cgn)[0]
return res
except ResidueGenericNumber.DoesNotExist:
return False
def fetch_gprot_data(self, pdb, alpha_protein):
data = {}
beta_uniprots = os.listdir(self.local_uniprot_beta_dir)
gamma_uniprots = os.listdir(self.local_uniprot_gamma_dir)
response = urllib.request.urlopen("https://data.rcsb.org/rest/v1/core/entry/{}".format(pdb))
json_data = json.loads(response.read())
response.close()
data["method"] = json_data["exptl"][0]["method"]
if data["method"].startswith("THEORETICAL") or data["method"] in ["SOLUTION NMR","SOLID-STATE NMR"]:
return None
if "citation" in json_data and "pdbx_database_id_doi" in json_data["citation"]:
data["doi"] = json_data["citation"]["pdbx_database_id_doi"]
else:
data["doi"] = None
if "pubmed_id" in json_data["rcsb_entry_container_identifiers"]:
data["pubmedId"] = json_data["rcsb_entry_container_identifiers"]["pubmed_id"]
else:
data["pubmedId"] = None
# Format server time stamp to match release date shown on entry pages
# print(pdb, json_data["rcsb_accession_info"]["initial_release_date"])
# date = datetime.date.fromisoformat(json_data["rcsb_accession_info"]["initial_release_date"][:10])
# date += datetime.timedelta(days=1)
# print(datetime.date.isoformat(date))
# data["release_date"] = datetime.date.isoformat(date)
data["release_date"] = json_data["rcsb_accession_info"]["initial_release_date"][:10]
data["resolution"] = json_data["rcsb_entry_info"]["resolution_combined"][0]
entities_num = len(json_data["rcsb_entry_container_identifiers"]["polymer_entity_ids"])
data["alpha"] = alpha_protein.accession
data["alpha_chain"] = None
data["alpha_coverage"] = None
data["beta"] = None
data["beta_chain"] = None
data["gamma"] = None
data["gamma_chain"] = None
data["other"] = []
for i in range(1,entities_num+1):
response = urllib.request.urlopen("https://data.rcsb.org/rest/v1/core/polymer_entity/{}/{}".format(pdb, i))
json_data = json.loads(response.read())
response.close()
if "uniprot_ids" in json_data["rcsb_polymer_entity_container_identifiers"]:
for j, u_id in enumerate(json_data["rcsb_polymer_entity_container_identifiers"]["uniprot_ids"]):
if u_id+".txt" in beta_uniprots:
data["beta"] = u_id
data["beta_chain"] = json_data["rcsb_polymer_entity_container_identifiers"]["auth_asym_ids"][j][0]
elif u_id+".txt" in gamma_uniprots:
data["gamma"] = u_id
data["gamma_chain"] = json_data["rcsb_polymer_entity_container_identifiers"]["auth_asym_ids"][j][0]
elif u_id==alpha_protein.accession:
data["alpha"] = u_id
data["alpha_coverage"] = json_data["entity_poly"]["rcsb_sample_sequence_length"]
# pprint.pprint(json_data)
try:
data["alpha_chain"] = json_data["rcsb_polymer_entity_container_identifiers"]["auth_asym_ids"][j][0]
except IndexError as e:
data["alpha_chain"] = json_data["rcsb_polymer_entity_container_identifiers"]["auth_asym_ids"][j-1][0]
else:
if json_data["rcsb_polymer_entity"]["pdbx_description"] not in data["other"]:
data["other"].append(json_data["rcsb_polymer_entity"]["pdbx_description"])
else:
if json_data["rcsb_polymer_entity"]["pdbx_description"] not in data["other"]:
data["other"].append(json_data["rcsb_polymer_entity"]["pdbx_description"])
return data
def build_g_prot_struct(self, alpha_prot, pdb, data):
ss = SignprotStructure()
pdb_code, p_c = WebLink.objects.get_or_create(index=pdb, web_resource=WebResource.objects.get(slug="pdb"))
pub_date = data["release_date"]
# Structure type
if "x-ray" in data["method"].lower():
structure_type_slug = "x-ray-diffraction"
elif "electron" in data["method"].lower():
structure_type_slug = "electron-microscopy"
else:
structure_type_slug = "-".join(data["method"].lower().split(" "))
try:
structure_type = StructureType.objects.get(slug=structure_type_slug)
except StructureType.DoesNotExist as e:
structure_type, c = StructureType.objects.get_or_create(slug=structure_type_slug, name=data["method"])
self.logger.info("Created StructureType:"+str(structure_type))
# Publication
pub = None
if data["doi"]:
try:
pub = Publication.get_or_create_from_doi(data["doi"])
except:
# 2nd try (in case of paralellization clash)
pub = Publication.get_or_create_from_doi(data["doi"])
elif data["pubmedId"]:
try:
pub = Publication.get_or_create_from_pubmed(data["pubmedId"])
except:
# 2nd try (in case of paralellization clash)
pub = Publication.get_or_create_from_pubmed(data["pubmedId"])
# PDB data
url = 'https://www.rcsb.org/pdb/files/{}.pdb'.format(pdb)
req = urllib.request.Request(url)
with urllib.request.urlopen(req) as response:
pdbdata_raw = response.read().decode('utf-8')
pdbdata_object = PdbData.objects.get_or_create(pdb=pdbdata_raw)[0]
ss.pdb_code = pdb_code
ss.structure_type = structure_type
ss.resolution = data["resolution"]
ss.publication_date = pub_date
ss.publication = pub
ss.protein = alpha_prot
ss.pdb_data = pdbdata_object
if len(SignprotStructure.objects.filter(pdb_code=ss.pdb_code))>0:
self.logger.warning('SignprotStructure {} already created, skipping'.format(pdb_code))
return 0
ss.save()
# Stabilizing agent
for o in data["other"]:
if len(o)>75:
continue
if o=="REGULATOR OF G-PROTEIN SIGNALING 14":
o = "Regulator of G-protein signaling 14"
elif o=="Nanobody 35":
o = "Nanobody-35"
elif o=="ADENYLATE CYCLASE, TYPE V":
o = "Adenylate cyclase, type V"
elif o=="1-phosphatidylinositol-4,5-bisphosphate phosphodiesterase beta-3":
o = "1-phosphatidylinositol 4,5-bisphosphate phosphodiesterase beta-3"
stabagent, sa_created = StructureStabilizingAgent.objects.get_or_create(slug=o.replace(" ","-").replace(" ","-"), name=o)
ss.stabilizing_agents.add(stabagent)
ss.save()
# Extra proteins
# Alpha - ### A bit redundant, consider changing this in the future
if data["alpha"]:
alpha_sep = SignprotStructureExtraProteins()
alpha_sep.wt_protein = alpha_prot
alpha_sep.structure = ss
alpha_sep.protein_conformation = ProteinConformation.objects.get(protein=alpha_prot)
alpha_sep.display_name = self.display_name_lookup[alpha_prot.family.name]
alpha_sep.note = None
alpha_sep.chain = data["alpha_chain"]
alpha_sep.category = "G alpha"
cov = round(data["alpha_coverage"]/len(alpha_prot.sequence)*100)
if cov>100:
self.logger.warning("SignprotStructureExtraProtein Alpha subunit sequence coverage of {} is {}% which is longer than 100% in structure {}".format(alpha_sep, cov, ss))
cov = 100
alpha_sep.wt_coverage = cov
alpha_sep.save()
# ss.extra_proteins.add(alpha_sep)
# Beta
if data["beta"]:
beta_prot = Protein.objects.get(accession=data["beta"])
beta_sep = SignprotStructureExtraProteins()
beta_sep.wt_protein = beta_prot
beta_sep.structure = ss
beta_sep.protein_conformation = ProteinConformation.objects.get(protein=beta_prot)
beta_sep.display_name = self.display_name_lookup[beta_prot.name]
beta_sep.note = None
beta_sep.chain = data["beta_chain"]
beta_sep.category = "G beta"
beta_sep.wt_coverage = None
beta_sep.save()
# ss.extra_proteins.add(beta_sep)
# Gamma
if data["gamma"]:
gamma_prot = Protein.objects.get(accession=data["gamma"])
gamma_sep = SignprotStructureExtraProteins()
gamma_sep.wt_protein = gamma_prot
gamma_sep.structure = ss
gamma_sep.protein_conformation = ProteinConformation.objects.get(protein=gamma_prot)
gamma_sep.display_name = self.display_name_lookup[gamma_prot.name]
gamma_sep.note = None
gamma_sep.chain = data["gamma_chain"]
gamma_sep.category = "G gamma"
gamma_sep.wt_coverage = None
gamma_sep.save()
# ss.extra_proteins.add(gamma_sep)
# ss.save()
self.logger.info("Created SignprotStructure: {}".format(ss.pdb_code))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
'python-dateutil',
'sdag2',
'six'
]
test_requirements = [
]
setup(
name='jsonte',
version='0.8.6',
description="A simple way of 'extending' json to support additional types like datetime, Decimal and binary data.",
long_description=readme + '\n\n' + history,
author="Rasjid Wilcox",
author_email='rasjidw@openminddev.net',
url='https://github.com/rasjidw/python-jsonte',
py_modules=['jsonte'],
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='jsonte json',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='test_jsonte',
tests_require=test_requirements
)
|
import sys
import multiprocessing
import os
try:
from mpi4py import MPI
except ImportError:
MPI = None
import gym
from gym.wrappers import FlattenObservation, FilterObservation
from baselines import logger
from baselines.bench import Monitor
from baselines.common import set_global_seeds
from baselines.common.atari_wrappers import make_atari, wrap_deepmind
from baselines.common.vec_env.subproc_vec_env import SubprocVecEnv
from baselines.common.vec_env.dummy_vec_env import DummyVecEnv
from baselines.common.vec_env.vec_frame_stack import VecFrameStack
from baselines.common.wrappers import ClipActionsWrapper
from torchrl_with_routines.routine_wrapper import RoutineWrapper
def build_env(game_name, args, routine_actions=None, eval_mode=False, inside_frame_stack=True):
ncpu = multiprocessing.cpu_count()
if sys.platform == 'darwin':
ncpu //= 2
nenv = args.num_env or ncpu
if eval_mode:
nenv = 1
alg = args.alg
seed = args.seed
env_type, env_id = "atari", game_name + "NoFrameskip-v4"
if env_type == 'atari' and alg not in ['deepq', 'trpo_mpi']:
assert args.reward_scale == 1, "we assume the reward equals to the score in our code."
if inside_frame_stack:
wrapper_kwargs = {'frame_stack': True}
env = make_vec_env(env_id, env_type, nenv, seed, gamestate=args.gamestate, reward_scale=args.reward_scale,
routine_actions=routine_actions, wrap_monitor=True, wrapper_kwargs=wrapper_kwargs)
if not inside_frame_stack:
env = VecFrameStack(env, nstack=4)
# the original implementation uses vecframestack, but we use framestack inside vec wrapper.
else:
raise NotImplementedError("We remove other specifications.")
return env
def make_vec_env(env_id, env_type, num_env, seed,
wrapper_kwargs=None,
env_kwargs=None,
start_index=0,
reward_scale=1.0,
flatten_dict_observations=True,
gamestate=None,
initializer=None,
force_dummy=False,
routine_actions=None,
wrap_monitor=True):
"""
Create a wrapped, monitored SubprocVecEnv for Atari and MuJoCo.
"""
wrapper_kwargs = wrapper_kwargs or {}
env_kwargs = env_kwargs or {}
mpi_rank = MPI.COMM_WORLD.Get_rank() if MPI else 0
seed = seed + 10000 * mpi_rank if seed is not None else None
logger_dir = logger.get_dir()
def make_thunk(rank, initializer=None):
return lambda: make_env(
env_id=env_id,
env_type=env_type,
mpi_rank=mpi_rank,
subrank=rank,
seed=seed,
reward_scale=reward_scale,
gamestate=gamestate,
flatten_dict_observations=flatten_dict_observations,
wrapper_kwargs=wrapper_kwargs,
env_kwargs=env_kwargs,
logger_dir=logger_dir,
initializer=initializer,
routine_actions=routine_actions,
wrap_monitor=wrap_monitor,
)
set_global_seeds(seed)
if not force_dummy and num_env > 1:
return SubprocVecEnv([make_thunk(i + start_index, initializer=initializer) for i in range(num_env)])
else:
return DummyVecEnv([make_thunk(i + start_index, initializer=None) for i in range(num_env)])
def make_env(env_id, env_type, mpi_rank=0, subrank=0, seed=None, reward_scale=1.0, gamestate=None,
flatten_dict_observations=True, wrapper_kwargs=None, env_kwargs=None, logger_dir=None, initializer=None,
routine_actions=None, wrap_monitor=True):
if initializer is not None:
initializer(mpi_rank=mpi_rank, subrank=subrank)
wrapper_kwargs = wrapper_kwargs or {}
env_kwargs = env_kwargs or {}
if ':' in env_id:
import re
import importlib
module_name = re.sub(':.*','',env_id)
env_id = re.sub('.*:', '', env_id)
importlib.import_module(module_name)
if env_type == 'atari':
env = make_atari(env_id)
else:
raise NotImplementedError("Currently, we only support Atari.")
if flatten_dict_observations and isinstance(env.observation_space, gym.spaces.Dict):
env = FlattenObservation(env)
env.seed(seed + subrank if seed is not None else None)
if wrap_monitor:
env = Monitor(env,
logger_dir and os.path.join(logger_dir, str(mpi_rank) + '.' + str(subrank)),
allow_early_resets=True)
if env_type == 'atari':
env = wrap_deepmind(env, **wrapper_kwargs)
else:
raise NotImplementedError("Currently, we only support Atari.")
if isinstance(env.action_space, gym.spaces.Box):
env = ClipActionsWrapper(env)
if routine_actions is not None:
env = RoutineWrapper(env=env, routine_actions=routine_actions)
if reward_scale != 1:
raise RuntimeWarning("reward scale is not 1!")
return env
def make_mujoco_env(env_id, seed, reward_scale=1.0):
"""
Create a wrapped, monitored gym.Env for MuJoCo.
"""
rank = MPI.COMM_WORLD.Get_rank()
myseed = seed + 1000 * rank if seed is not None else None
set_global_seeds(myseed)
env = gym.make(env_id)
logger_path = None if logger.get_dir() is None else os.path.join(logger.get_dir(), str(rank))
env = Monitor(env, logger_path, allow_early_resets=True)
env.seed(seed)
if reward_scale != 1.0:
from baselines.common.retro_wrappers import RewardScaler
env = RewardScaler(env, reward_scale)
return env
def make_robotics_env(env_id, seed, rank=0):
"""
Create a wrapped, monitored gym.Env for MuJoCo.
"""
set_global_seeds(seed)
env = gym.make(env_id)
env = FlattenObservation(FilterObservation(env, ['observation', 'desired_goal']))
env = Monitor(
env, logger.get_dir() and os.path.join(logger.get_dir(), str(rank)),
info_keywords=('is_success',))
env.seed(seed)
return env
|
"""
REFERENCES:
[tra17] Tramer et al. "The Space of Transferable Adversarial Examples," 2017.
[tbd17a] "Decision Boundary Analysis of Adversarial Examples," blind ICRL submission 2017.
[tbd17b] "Visualizing the Loss Landscape of Neural Nets," blind ICLR submission 2017.
"""
__author__ = "mjp"
__date__ = "november, 2017"
import sys, os
import unittest
import ast
import pdb
import numpy as np
from numpy.linalg import norm, matrix_rank
from scipy.linalg import block_diag as blkdiag
from scipy.misc import imread, imsave
from scipy.io import savemat
import pandas as pd
import tensorflow as tf
from tensorflow.contrib.slim.nets import inception, resnet_v2
slim = tf.contrib.slim
import nets
from gaas import gaas
SEED = 1099
def tf_run(sess, outputs, feed_dict, seed=1099):
"""Wrapper around TF session that sets the seed each time.
This does not seem to remove the non-determinism I'm seeing so
further investigation is required...it may be that the graph
needs to be re-created each time?
"""
tf.set_random_seed(SEED)
return sess.run(outputs, feed_dict=feed_dict)
def fgsm_attack(sess, model, epsilon, input_dir, output_dir):
""" Simple implementation of a fast gradient sign attack.
This serves primarily as a sanity check of our tensorflow code.
"""
n_changed = 0
n_total = 0
for batch_id, (filenames, x0) in enumerate(nets.load_images(input_dir, model.batch_shape)):
n = len(filenames)
# for now, take predictions on clean data as truth.
pred0 = sess.run(model.output, feed_dict={model.x_tf : x0})
y0 = nets.smooth_one_hot_predictions(np.argmax(pred0, axis=1), model._num_classes)
# compute the gradient
feed_dict = {model.x_tf : x0, model.y_tf : y0}
grad = sess.run(model.loss_x, feed_dict=feed_dict)
# construct AE
x_adv = x0 + epsilon * np.sign(grad)
x_adv = np.clip(x_adv, -1, 1) # make sure AE respects box contraints
# predicted label of AE
pred1 = sess.run(model.output, feed_dict={model.x_tf : x_adv})
y1 = nets.smooth_one_hot_predictions(np.argmax(pred1, axis=1), model._num_classes)
is_same = np.argmax(y1[:n,:],axis=1) == np.argmax(y0[:n,:],axis=1)
print('[FGSM]: batch %02d: %d of %d predictions changed' % (batch_id, np.sum(~is_same), n))
n_changed += np.sum(~is_same)
n_total += n
print('[FGSM]: overall AE success rate: %0.2f' % (100.*n_changed/n_total))
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
def _random_direction(dir_shape):
rv = np.random.randn(*dir_shape) # vector with iid gaussian entries
return rv / norm(rv.flatten(),2)
def _loss_map_2d(sess, model, x0, y0, d1, d2, points=np.linspace(0, 100, 10)):
""" LOSS_MAP_2d
"""
V1,V2 = np.meshgrid(points, points)
Z = np.zeros(V1.shape) # loss map
Y = np.zeros(V1.shape) # prediction map
# TODO: support for models with batch size > 1
# (may be more efficient)
for row in range(V1.shape[0]):
for col in range(V1.shape[1]):
x_eval = x0 + d1 * V1[row,col] + d2 * V2[row,col]
[loss, pred] = tf_run(sess, [model.loss, model.output], feed_dict={model.x_tf : x_eval, model.y_tf : y0})
Z[row,col] = loss
Y[row,col] = np.argmax(pred) == np.argmax(y0)
return V1, V2, Z, Y
def _distance_to_decision_boundary(sess, model, x, y0=None, direction=None, epsilon_max=100, epsilon0=.5):
""" Computes (approximately) the distance one needs to move along
some direction in order for the CNN to change its decision.
The distance is denoted epsilon; if no direction is specified, the gradient
of the loss evaluated will be used by default.
"""
# compute the initial prediction
if y0 is None:
pred0 = tf_run(sess, model.output, feed_dict={model.x_tf : x})
y0_scalar = np.argmax(pred0)
y0 = nets.smooth_one_hot_predictions(y0_scalar, model._num_classes)
# use gradient direction if none was provided
if direction is None:
grad = tf_run(sess, model.loss_x, feed_dict={model.x_tf : x, model.y_tf : y0})
direction = grad.astype(np.float64)
# normalize vector
direction = direction / norm(direction.flatten(),2)
# brute force search
epsilon = epsilon0
epsilon_lb = 0
done = False
while (epsilon < epsilon_max) and (not done):
x_step = x + epsilon * direction
pred_end = tf_run(sess, model.output, feed_dict={model.x_tf : x_step})
if np.argmax(pred_end) != np.argmax(y0):
# prediction changed; all done
done = True
else:
# keep searching
epsilon_lb = epsilon
epsilon = epsilon * 1.1
# XXX: could search between lb and epsilon for more precise value
return epsilon
def linearity_test(sess, model, input_dir, output_dir, epsilon_max=50):
""" Here we check to see if the GAAS subspace construction seems to
be working with representative loss functions.
"""
overall_result = []
overall_hypothesis = [0,0]
if not os.path.exists(output_dir):
os.mkdir(output_dir)
for batch_id, (filenames, x0) in enumerate(nets.load_images(input_dir, model.batch_shape)):
n = len(filenames)
assert(n==1) # for now, we assume batch size is 1; correctness >> speed here
base_name = os.path.split(filenames[0])[-1]
base_name = base_name.split('.')[0]
#--------------------------------------------------
# Use predictions on original example as ground truth.
#--------------------------------------------------
pred0 = tf_run(sess, model.output, feed_dict={model.x_tf : x0})
y0_scalar = np.argmax(pred0, axis=1)
y0 = nets.smooth_one_hot_predictions(y0_scalar, model._num_classes)
#--------------------------------------------------
# choose an epsilon
#--------------------------------------------------
epsilon = _distance_to_decision_boundary(sess, model, x0, y0=y0, epsilon_max=epsilon_max)
print('\nEXAMPLE %3d (%s); epsilon = %0.3f' % (batch_id, filenames[0], epsilon))
#--------------------------------------------------
# compute the loss and its gradient
#--------------------------------------------------
loss0, grad = tf_run(sess, [model.loss, model.loss_x], feed_dict={model.x_tf : x0, model.y_tf : y0})
grad = grad.astype(np.float64) # float32 -> float64
l2_norm_g = norm(grad.flatten(),2)
#--------------------------------------------------
# determine how much the loss changes if we move along grad by epsilon
#--------------------------------------------------
x_step = x0 + epsilon * (grad / l2_norm_g)
loss_end, pred_end = tf_run(sess, [model.loss, model.output], feed_dict={model.x_tf : x_step, model.y_tf : y0})
was_ae_successful = (np.argmax(pred_end,axis=1) != y0_scalar)
print(' loss / ||g|| on x0: %2.3f / %2.3f' % (loss0, l2_norm_g))
print(' loss after FGM: %2.3f ' % (loss_end))
print(' was \ell_2 gradient-step AE successful? {}'.format(was_ae_successful))
#--------------------------------------------------
# quick loss visualization
#--------------------------------------------------
if batch_id == 0:
r1 = _random_direction(grad.shape)
X_1, X_2, M_loss, M_label = _loss_map_2d(sess, model, x0, y0, grad, r1, points=np.linspace(0,5,10))
savemat(os.path.join(output_dir, base_name), {'X_1' : X_1, 'X_2' : X_2, 'M_loss' : M_loss, 'M_label' : M_label})
# for now, we ignore cases where the original attack was unsuccessful
if not was_ae_successful:
continue
# if moving by epsilon fails to increase the loss, this is unexpected
# (possible, especially if epsilon is very large)
if loss_end <= loss0:
print('[info]: moving along gradient failed to increase loss; skipping...')
continue
loss_predicted = loss0 + l2_norm_g * epsilon
print(' loss along gradient direction, predicted/actual: %2.3f / %2.3f %s' % (loss_predicted, loss_end, '*' if loss_predicted > loss_end else ''))
#--------------------------------------------------
# Pick some admissible gamma and compute the corresponding value of k.
#--------------------------------------------------
# Note: Lemma 1 requires alpha live in [0,1]!
# This limits how large one can make gamma.
# Note: the formula in [tra17] is actually for alpha^{-1}
# (there is a slight typo in paper):
#
results = []
alpha_vals = np.array([.1, .15, .2, .3, .4, .5, .6, .7, .8]) # trying a range of admissible gamma
gamma_vals = epsilon * l2_norm_g * alpha_vals
# if the "ideal" gamma (associated with movement along g) is feasible
# then add it to the list of gamma values to try.
gamma_ideal = loss_end - loss0
alpha_ideal = gamma_ideal / (epsilon * l2_norm_g)
if alpha_ideal <= 1:
gamma_vals = np.concatenate((gamma_vals, np.array([gamma_ideal,])))
gamma_vals.sort()
for gamma in gamma_vals:
alpha_inv = epsilon * (l2_norm_g / gamma)
k = int(np.floor(alpha_inv ** 2))
k = max(k,1)
k = min(k,300) # put a limit on k
#--------------------------------------------------
# Check behavior of GAAS and of the loss function
#
# Note that, if the assumption of local linearity is incorrect,
# the r_i may fail to increase the loss as anticipated.
#--------------------------------------------------
inner_product_test = np.zeros((k,))
losses = np.zeros((k,))
delta_loss = np.zeros((k,))
delta_loss_test = np.zeros((k,))
y_hat_test = np.zeros((k,), dtype=np.int32)
g_norm_test = np.nan * np.ones((k,))
#--------------------------------------------------
# The notation from the paper can be a bit confusing.
# The r_i in lemma1 have unit \ell_2 norm while the r_i in
# the GAAS construction have \ell_2 norm <= epsilon.
#
# To help keep things clear, I will call the vectors from the lemma "q_i"
# and the appropriately rescaled vectors for GAAS will be called "r_i".
#--------------------------------------------------
Q = gaas(grad, k)
for ii in range(k):
q_i = np.reshape(Q[:,ii], grad.shape) # the r_i in lemma 1 of [tra17]
r_i = q_i * epsilon # the r_i in GAAS perturbation of [tra17]
x_adv_i = x0 + r_i
#--------------------------------------------------
# Ensure lemma 1 is satisfied.
# This should always be true if our GAAS implementation is correct
# (does not depend on local behavior of loss function).
#--------------------------------------------------
inner_product_test[ii] = (np.dot(grad.flatten(), q_i.flatten()) + 1e-4) > (l2_norm_g / alpha_inv)
assert(inner_product_test[ii])
#--------------------------------------------------
# see whether the loss behaves as expected; ie. moving along the r_i
# increases the loss by at least gamma. This assumes the second-order term
# is sufficiently small that it can be ignored entirely (which may be untrue
# if the curvature is sufficiently large?).
#
# This *does* depend on the loss function and failing this test does not
# mean the GAAS code is incorrect...it could be that our assumption of
# local linearity is incorrect for this epsilon.
#--------------------------------------------------
loss_i, pred_i = tf_run(sess, [model.loss, model.output], feed_dict={model.x_tf : x_adv_i, model.y_tf : y0})
losses[ii] = loss_i
delta_loss[ii] = (loss_i - (gamma + loss0))
slop = 1e-6 # this should really be tied to the error term in the Taylor series expansion...
delta_loss_test[ii] = (delta_loss[ii] + slop) > 0.0
#--------------------------------------------------
# Check whether r_i was a successful AE.
# In some sense, this is more a test of the hypothesis that
# changes in the loss are sufficient to characterize
# network predictions (vs. integrity of the code).
#--------------------------------------------------
y_ae_scalar = np.argmax(pred_i, axis=1)
y_hat_test[ii] = (y_ae_scalar != y0_scalar)
#--------------------------------------------------
# Check gradient norm hypothesis.
# This is most meaningful if r_i was successful attack...
#--------------------------------------------------
# note: now, y is the label associated with the AE
# (vs the original label)
#
y_ae = nets.smooth_one_hot_predictions(np.argmax(pred_i,axis=1), model._num_classes)
feed_dict = {model.x_tf : x_adv_i, model.y_tf : y_ae}
loss_ae, g_ae = tf_run(sess, [model.loss, model.loss_x], feed_dict=feed_dict)
g_norm_test[ii] = norm(g_ae.flatten(),2) - l2_norm_g
#if was_ae_successful and y_hat_test[ii]:
#print(' [r_%d]: loss_ae / ||g_ae||: %2.5f / %2.3f' % (ii, loss_ae, norm(g_ae.flatten(),2)))
#print(' "%s" -> "%s"' % (CLASS_NAMES[y0_scalar[0]-1], CLASS_NAMES[y_ae_scalar[0]-1]))
#--------------------------------------------------
# record performance on this example
#--------------------------------------------------
results.append((1./alpha_inv,
gamma,
k,
np.mean(losses),
np.max(losses),
np.sum(y_hat_test),
np.sum(g_norm_test > 0),
np.mean(g_norm_test)));
df = pd.DataFrame(np.array(results), columns=('alpha', 'gamma', 'k', 'mean loss', 'max loss', '#AE', '||g_a||>||g||', 'mean(||g_a|| - ||g||)'))
print('\n'); print(df); print('\n')
#--------------------------------------------------
# aggregate results
#--------------------------------------------------
# here we track whether the r_i construction changed the loss as desired
if np.sum(delta_loss_test) < 1:
overall_result.append(False)
else:
overall_result.append(True)
# here we check our gradient norm hypothesis
# we only care about cases where the gradient attack was successful AND the r_i were too
#if was_ae_successful and np.sum(y_hat_test) > 0:
# overall_hypothesis[0] += np.sum(g_norm_test > 0)
# overall_hypothesis[1] += np.sum(np.isfinite(g_norm_test))
# all done!
print('%d (of %d) admissible examples behaved as expected' % (np.sum(overall_result), len(overall_result)))
#print('%d (of %d) successful r_i resulted in larger gradient norms' % (overall_hypothesis[0], overall_hypothesis[1]))
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
def _sample_adversarial_direction(model, x0, y0, g, epsilon_max):
""" Evaluates model loss and predictions at multiple points along
a (presumed) adversarial direction.
"""
g_normalized = g / norm(g.flatten(),2) # unit \ell_2 norm
v = [.01, .1, .2, .3, .4, .5, .6, .7, .8, .9, 1.0]
out = np.zeros((len(v),3))
for idx, pct in enumerate(v):
x_adv = np.clip(x0 + pct * epsilon_max * g_normalized, -1, 1)
loss = sess.run(model.loss, feed_dict={model.x_tf : x_adv, model.y_tf : y0})
pred = sess.run(model.output, feed_dict={model.x_tf : x_adv})
out[idx,0] = pct * epsilon_max
out[idx,1] = loss
out[idx,2] = np.argmax(pred,axis=1)
return out, x_adv
def gaas_attack(sess, model, epsilon_frac, input_dir, output_dir):
""" Computes subset of attacks using GAAS method.
"""
n_images, n_successful = 0, 0
print('[WARNING] this code is still under development!!!')
for batch_id, (filenames, x0) in enumerate(nets.load_images(input_dir, model.batch_shape)):
n = len(filenames)
assert(n==1) # for now, we assume batch size is 1
# translate relative energy constraint into an ell2 constraint
epsilon = epsilon_frac * norm(x0.flatten(),2)
#--------------------------------------------------
# Use predictions on clean data as truth.
#--------------------------------------------------
pred0 = sess.run(model.output, feed_dict={model.x_tf : x0})
y0_scalar = np.argmax(pred0, axis=1)
y0 = nets.smooth_one_hot_predictions(y0_scalar, model._num_classes)
# also compute the loss and its gradient
feed_dict = {model.x_tf : x0, model.y_tf : y0}
loss0, g = sess.run([model.loss, model.loss_x], feed_dict=feed_dict)
g_normalized = g / norm(g.flatten(),2)
#--------------------------------------------------
# Determine whether moving epsilon along the gradient produces an AE.
# If not, then the subset of r_i is unlikely to be useful.
#--------------------------------------------------
out, x_adv = _sample_adversarial_direction(model, x0, y0, g_normalized, epsilon)
loss_g = out[-1,1]
was_ae_successful = np.any(out[:,2] != y0_scalar)
delta_x = x_adv - x0
print('[GAAS]: image %d successful? %d, y0=%d, ||x||_2 = %0.3f, ||x - x_g||_2 = %0.3f, ||x - x_g||_\inf = %0.3f, delta_loss=%0.3f' % (batch_id, was_ae_successful, y0_scalar, norm(x0.flatten(),2), norm(delta_x.flatten(), 2), norm(delta_x.flatten(), np.inf), loss_g - loss0))
sys.stdout.flush()
# save results for subsequent analysis
fn = os.path.join(output_dir, 'grad_%d_' % was_ae_successful + filenames[0])
imsave(fn, x_adv[0,...])
fn = os.path.join(output_dir, 'gradient_samps_' + filenames[0].replace('.png', '.mat'))
savemat(fn, {'out' : out})
n_images += 1
if not was_ae_successful:
continue
n_successful += 1
print(out)
#--------------------------------------------------
# Check whether the r_i are also adversarial directions
# (if one moves epsilon along that direction)
#--------------------------------------------------
for gamma_pct in [.8, .9, .99]:
# We have to choose how much the loss should change (i.e. gamma)
# Currently, this is based on the maximum change determined above.
# It may be there is a better approach...
gamma = gamma_pct * (loss_g - loss0)
# Compute k ("subspace" dimension)
alpha = gamma / (epsilon * norm(g.flatten(),2))
k = min(g.size, np.floor(1.0/(alpha*alpha)))
k = int(max(k, 1))
print('k=%d' % k) # TEMP
k = min(k, 1000) # put a limit on k for practical (computational) reasons
R = gaas(g.flatten(),k)
# count how many of the r_i are also successful AE directions
n_successful = 0
for ii in range(k):
r_i = R[:,ii] * epsilon
r_i = np.reshape(r_i, x0.shape)
x_adv = np.clip(x0 + r_i, -1, 1)
pred_ri = sess.run(model.output, feed_dict={model.x_tf : x_adv})
n_successful += np.argmax(pred_ri,axis=1) != y0_scalar
print(' gamma=%0.2f, epsilon=%0.2f, k=%d, ri_succ_rate=%0.3f%%' % (gamma, epsilon, k, 100.*n_successful / k))
print('[GAAS]: AE success rate: %0.2f%%' % (100.* n_successful / n_images))
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
if __name__ == "__main__":
np.set_printoptions(precision=4, suppress=True) # make output easier to read
pd.set_option('display.width', 120)
# otherwise, attack some data
input_dir = sys.argv[1]
output_dir = sys.argv[2]
epsilon_l2 = 0.1
epsilon_linf = 0.4
print('[info]: epsilon_l2=%0.3f, epsilon_linf=%0.3f' % (epsilon_l2, epsilon_linf))
# load imagenet class names
with open('./imagenet1000_clsid_to_human.txt', 'r') as f:
CLASS_NAMES = ast.literal_eval(f.read())
if not os.path.exists(output_dir):
os.mkdir(output_dir)
with tf.Graph().as_default(), tf.Session() as sess:
model = nets.InceptionV3(sess)
linearity_test(sess, model, input_dir, output_dir)
|
"""
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import scraper
import urlparse
import re
import kodi
import log_utils # @UnusedImport
import dom_parser2
from salts_lib import jsunpack
from salts_lib import scraper_utils
from salts_lib.constants import VIDEO_TYPES
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import QUALITIES
BASE_URL = 'http://moviego.cc'
XHR = {'X-Requested-With': 'XMLHttpRequest'}
Q_MAP = {'HD1080': QUALITIES.HD1080, 'HD720': QUALITIES.HD720, 'SD480': QUALITIES.HIGH, 'CAMRIP': QUALITIES.LOW}
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.MOVIE])
@classmethod
def get_name(cls):
return 'MovieGo'
def get_sources(self, video):
hosters = []
source_url = self.get_url(video)
if not source_url or source_url == FORCE_NO_MATCH: return hosters
page_url = scraper_utils.urljoin(self.base_url, source_url)
html = self._http_get(page_url, cache_limit=8)
q_str = dom_parser2.parse_dom(html, 'div', {'class': 'poster-qulabel'})
if q_str:
q_str = q_str[0].content.replace(' ', '').upper()
page_quality = Q_MAP.get(q_str, QUALITIES.HIGH)
else:
page_quality = QUALITIES.HIGH
for _attrs, fragment in dom_parser2.parse_dom(html, 'div', {'class': 'tab_box'}):
iframe_url = dom_parser2.parse_dom(fragment, 'iframe', req='src')
if iframe_url:
iframe_url = iframe_url[0].attrs['src']
if 'youtube' in iframe_url: continue
html = self._http_get(iframe_url, headers={'Referer': page_url}, cache_limit=.5)
for match in re.finditer('(eval\(function\(.*?)</script>', html, re.DOTALL):
js_data = jsunpack.unpack(match.group(1))
js_data = js_data.replace('\\', '')
html += js_data
sources = scraper_utils.parse_sources_list(self, html)
if not sources:
sources = {iframe_url: {'quality': page_quality, 'direct': False}}
for source, values in sources.iteritems():
direct = values['direct']
if direct:
host = scraper_utils.get_direct_hostname(self, source)
if host == 'gvideo':
quality = scraper_utils.gv_get_quality(source)
else:
quality = values['quality']
source += scraper_utils.append_headers({'User-Agent': scraper_utils.get_ua(), 'Referer': page_url})
else:
host = urlparse.urlparse(source).hostname
quality = scraper_utils.get_quality(video, host, values['quality'])
hoster = {'multi-part': False, 'url': source, 'host': host, 'class': self, 'quality': quality, 'views': None, 'rating': None, 'direct': direct}
hosters.append(hoster)
return hosters
def __get_ajax_sources(self, html, page_url):
stream_url = ''
match = re.search('''\$\.getJSON\('([^']+)'\s*,\s*(\{.*?\})''', html)
if match:
ajax_url, params = match.groups()
params = scraper_utils.parse_params(params)
ajax_url = scraper_utils.urljoin(self.base_url, ajax_url)
headers = {'Referer': page_url}
headers.update(XHR)
html = self._http_get(ajax_url, params=params, headers=headers, cache_limit=.5)
js_data = scraper_utils.parse_json(html, ajax_url)
stream_url = js_data.get('file', '')
return stream_url
def search(self, video_type, title, year, season=''): # @UnusedVariable
results = []
data = {'hash': 'indexert', 'do': 'search', 'subaction': 'search', 'search_start': 0, 'full_search': 0, 'result_from': 1, 'story': title}
search_url = scraper_utils.urljoin(self.base_url, 'index.php')
html = self._http_get(search_url, params={'do': 'search'}, data=data, cache_limit=8)
if dom_parser2.parse_dom(html, 'div', {'class': 'sresult'}):
for _attrs, item in dom_parser2.parse_dom(html, 'div', {'class': 'short_content'}):
match_url = dom_parser2.parse_dom(item, 'a', req='href')
match_title_year = dom_parser2.parse_dom(item, 'div', {'class': 'short_header'})
if match_url and match_title_year:
match_url = match_url[0].attrs['href']
match_title, match_year = scraper_utils.extra_year(match_title_year[0].content)
if not year or not match_year or year == match_year:
result = {'title': scraper_utils.cleanse_title(match_title), 'year': match_year, 'url': scraper_utils.pathify_url(match_url)}
results.append(result)
return results
|
#-------------------------------------------------------------------------------
# Revc
#-------------------------------------------------------------------------------
def runRevc(inputFile):
fi = open(inputFile, 'r') #reads in the file that list the before/after file names
inputData = fi.read() #reads in files
finalString = ""
for k in inputData:
if k =="T":
finalString = finalString + "A"
if k =="A":
finalString = finalString + "T"
if k =="G":
finalString = finalString + "C"
if k =="C":
finalString = finalString + "G"
finalString = finalString[::-1]
return finalString
#-------------------------------------------------------------------------------
# Fin
#-------------------------------------------------------------------------------
|
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2015 Red Hat, Inc.
# Author: Petr Spacek <pspacek@redhat.com>
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.rdtypes.euibase
import dns.immutable
@dns.immutable.immutable
class EUI48(dns.rdtypes.euibase.EUIBase):
"""EUI48 record"""
# see: rfc7043.txt
byte_len = 6 # 0123456789ab (in hex)
text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab
|
import re
import yaml
FM_BOUNDARY = re.compile(r"^---$", re.MULTILINE)
def split(content):
try:
_, fm, body = FM_BOUNDARY.split(content, 2)
except ValueError:
raise ValueError("Invalid front matter content")
try:
fm_data = yaml.safe_load(fm)
except yaml.scanner.ScannerError:
raise ValueError("Invalid yaml")
return [fm_data, body[1:]]
|
# -*- coding: utf-8 -*-
import xlrd
import xlwt
from datetime import date,datetime
from xlutils.copy import copy
def read_excel():
# # 打开文件
# workbook = xlrd.open_workbook(r'books.xls')
# # 获取所有sheet
# print workbook.sheet_names() # [u'sheet1', u'sheet2']
# sheet2_name = workbook.sheet_names()[1]
#
# # 根据sheet索引或者名称获取sheet内容
# sheet2 = workbook.sheet_by_index(1) # sheet索引从0开始
# sheet2 = workbook.sheet_by_name('Sheet2')
#
# # sheet的名称,行数,列数
# print sheet2.name,sheet2.nrows,sheet2.ncols
#
# # 获取整行和整列的值(数组)
# rows = sheet2.row_values(3) # 获取第四行内容
# cols = sheet2.col_values(2) # 获取第三列内容
# print rows
# print cols
#
# # 获取单元格内容
# print sheet2.cell(1,0).value.encode('utf-8')
# print sheet2.cell_value(1,0).encode('utf-8')
# print sheet2.row(1)[0].value.encode('utf-8')
#
#
# print xlrd.xldate_as_tuple(sheet2.cell_value(2,2),workbook.datemode)
# date_value = xlrd.xldate_as_tuple(sheet2.cell_value(2,2),workbook.datemode)
# print date(*date_value[:3]).strftime('%Y/%m/%d')
# # 获取单元格内容的数据类型
# print sheet2.cell(2,2).ctype
workbook = xlrd.open_workbook(r'books.xls')
sheet2 = workbook.sheet_by_name('account')
sheet = workbook.sheet_by_name(sheet2.cell(1,0).value)
print sheet.cell(1,0)
print sheet.cell(1,1)
newbook = copy(workbook)
new_sheet = newbook.get_sheet(2)
#sheet_change = workbook.sheet_by_name(sheet2.cell(1,0).value)
new_sheet.write(2,1,"123")
newbook.save(r'books.xls')
if __name__ == '__main__':
read_excel() |
import json
import unittest
from .. import address, slp
script_tests_json = r"""
[
{
"msg": "OK: minimal GENESIS",
"script": "6a04534c500001010747454e455349534c004c004c004c0001004c00080000000000000064",
"code": null
},
{
"msg": "OK: typical MINT without baton",
"script": "6a04534c50000101044d494e5420ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff4c00080000000000000064",
"code": null
},
{
"msg": "OK: typical 1-output SEND",
"script": "6a04534c500001010453454e44208888888888888888888888888888888888888888888888888888888888888888080000000000000042",
"code": null
},
{
"msg": "OK: typical 2-output SEND",
"script": "6a04534c500001010453454e44208888888888888888888888888888888888888888888888888888888888888888080000000000000042080000000000000063",
"code": null
},
{
"msg": "Script ending mid-PUSH (one byte short) must be SLP-invalid",
"script": "6a04534c500001010747454e455349534c004c004c004c0001004c000800000000000064",
"code": 1
},
{
"msg": "Script ending mid-PUSH (no length) must be SLP-invalid",
"script": "6a04534c500001010747454e455349534c004c004c004c0001004c004c",
"code": 1
},
{
"msg": "Script ending mid-PUSH (length is one byte short) must be SLP-invalid",
"script": "6a04534c500001010747454e455349534c004c004c004c0001004c004d00",
"code": 1
},
{
"msg": "(must be invalid: forbidden opcode): uses opcode OP_0",
"script": "6a04534c500001010747454e455349534c00004c004c0001004c00080000000000000064",
"code": 2
},
{
"msg": "(must be invalid: forbidden opcode): uses opcode OP_1",
"script": "6a04534c5000510747454e455349534c004c004c004c0001004c00080000000000000064",
"code": 2
},
{
"msg": "(must be invalid: forbidden opcode): uses opcode OP_1NEGATE",
"script": "6a04534c50004f0747454e455349534c004c004c004c0001004c00080000000000000064",
"code": 2
},
{
"msg": "(must be invalid: forbidden opcode): uses opcode 0x50",
"script": "6a04534c5000500747454e455349534c004c004c004c0001004c00080000000000000064",
"code": 2
},
{
"msg": "(not SLP): p2pkh address script",
"script": "76a914ffffffffffffffffffffffffffffffffffffffff88ac",
"code": 3
},
{
"msg": "(not SLP): empty op_return",
"script": "6a",
"code": 3
},
{
"msg": "(not SLP): first push is 9-byte 'yours.org'",
"script": "6a09796f7572732e6f7267",
"code": 3
},
{
"msg": "(not SLP): first push is 4-byte '\\x00BET'",
"script": "6a0400424554",
"code": 3
},
{
"msg": "(not SLP): first push is 4-byte '\\x00SLP'",
"script": "6a0400534c5001010747454e455349534c004c004c004c0001004c00080000000000000064",
"code": 3
},
{
"msg": "(not SLP): first push is 3-byte 'SLP'",
"script": "6a03534c5001010747454e455349534c004c004c004c0001004c00080000000000000064",
"code": 3
},
{
"msg": "(not SLP): first push is 5-byte 'SLP\\x00\\x00'",
"script": "6a05534c50000001010747454e455349534c004c004c004c0001004c00080000000000000064",
"code": 3
},
{
"msg": "(not SLP): first push is 7-byte '\\xef\\xbb\\xbfSLP\\x00' (UTF8 byte order mark + 'SLP\\x00')",
"script": "6a07efbbbf534c500001010747454e455349534c004c004c004c0001004c00080000000000000064",
"code": 3
},
{
"msg": "OK: lokad pushed using PUSHDATA1",
"script": "6a4c04534c500001010747454e455349534c004c004c004c0001004c00080000000000000064",
"code": null
},
{
"msg": "OK: lokad pushed using PUSHDATA2",
"script": "6a4d0400534c500001010747454e455349534c004c004c004c0001004c00080000000000000064",
"code": null
},
{
"msg": "OK: lokad pushed using PUSHDATA4",
"script": "6a4e04000000534c500001010747454e455349534c004c004c004c0001004c00080000000000000064",
"code": null
},
{
"msg": "OK: 2 bytes for token_type=1",
"script": "6a04534c50000200010747454e455349534c004c004c004c0001004c00080000000000000064",
"code": null
},
{
"msg": "(unsupported token type, must be token_type=1): 2 bytes for token_type=2",
"script": "6a04534c50000200020747454e455349534c004c004c004c0001004c00080000000000000064",
"code": 255
},
{
"msg": "(must be invalid: wrong size): 3 bytes for token_type",
"script": "6a04534c5000030000010747454e455349534c004c004c004c0001004c00080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: wrong size): 0 bytes for token_type",
"script": "6a04534c50004c000747454e455349534c004c004c004c0001004c00080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: too short): stopped after lokad ID",
"script": "6a04534c5000",
"code": 12
},
{
"msg": "(must be invalid: too short): stopped after token_type",
"script": "6a04534c50000101",
"code": 12
},
{
"msg": "(must be invalid: too short): stopped after transaction_type GENESIS",
"script": "6a04534c500001010747454e45534953",
"code": 12
},
{
"msg": "(must be invalid: too short): stopped after transaction_type MINT",
"script": "6a04534c50000101044d494e54",
"code": 12
},
{
"msg": "(must be invalid: too short): stopped after transaction_type SEND",
"script": "6a04534c500001010453454e44",
"code": 12
},
{
"msg": "(must be invalid: bad value): transaction_type 'INIT'",
"script": "6a04534c5000010104494e49544c004c004c004c0001004c00080000000000000064",
"code": 11
},
{
"msg": "(must be invalid: bad value): transaction_type 'TRAN'",
"script": "6a04534c50000101045452414e208888888888888888888888888888888888888888888888888888888888888888080000000000000042",
"code": 11
},
{
"msg": "(must be invalid: bad value): transaction_type 'send'",
"script": "6a04534c500001010473656e64208888888888888888888888888888888888888888888888888888888888888888080000000000000042",
"code": 11
},
{
"msg": "(must be invalid: bad value): transaction_type = 7-byte '\\xef\\xbb\\xbfSEND' (UTF8 byte order mark + 'SEND')",
"script": "6a04534c5000010107efbbbf53454e44208888888888888888888888888888888888888888888888888888888888888888080000000000000042",
"code": 11
},
{
"msg": "(must be invalid: bad value): transaction_type = 10-byte UTF16 'SEND' (incl. BOM)",
"script": "6a04534c500001010afffe530045004e004400208888888888888888888888888888888888888888888888888888888888888888080000000000000042",
"code": 11
},
{
"msg": "(must be invalid: bad value): transaction_type = 20-byte UTF32 'SEND' (incl. BOM)",
"script": "6a04534c5000010114fffe000053000000450000004e00000044000000208888888888888888888888888888888888888888888888888888888888888888080000000000000042",
"code": 11
},
{
"msg": "OK: 8-character ticker 'NAKAMOTO' ascii",
"script": "6a04534c500001010747454e45534953084e414b414d4f544f4c004c004c0001094c00080000000000000064",
"code": null
},
{
"msg": "OK: 9-character ticker 'Satoshi_N' ascii",
"script": "6a04534c500001010747454e45534953095361746f7368695f4e4c004c004c0001094c00080000000000000064",
"code": null
},
{
"msg": "OK: 2-character ticker '\u4e2d\u672c' ('nakamoto' kanji) -- 6 bytes utf8",
"script": "6a04534c500001010747454e4553495306e4b8ade69cac4c004c004c0001094c00080000000000000064",
"code": null
},
{
"msg": "OK: 4-character ticker '\u30ca\u30ab\u30e2\u30c8' ('nakamoto' katakana) -- 12 bytes utf8",
"script": "6a04534c500001010747454e455349530ce3838ae382abe383a2e383884c004c004c0001094c00080000000000000064",
"code": null
},
{
"msg": "(must be invalid: wrong size): Genesis with 0-byte decimals",
"script": "6a04534c500001010747454e455349534c004c004c004c004c004c00080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: wrong size): Genesis with 2-byte decimals",
"script": "6a04534c500001010747454e455349534c004c004c004c000200004c00080000000000000064",
"code": 10
},
{
"msg": "OK: Genesis with 32-byte dochash",
"script": "6a04534c500001010747454e455349534c004c004c0020ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01004c00080000000000000064",
"code": null
},
{
"msg": "(must be invalid: wrong size): Genesis with 31-byte dochash",
"script": "6a04534c500001010747454e455349534c004c004c001fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01004c00080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: wrong size): Genesis with 33-byte dochash",
"script": "6a04534c500001010747454e455349534c004c004c0021ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01004c00080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: wrong size): Genesis with 64-byte dochash",
"script": "6a04534c500001010747454e455349534c004c004c0040ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01004c00080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: wrong size): Genesis with 20-byte dochash",
"script": "6a04534c500001010747454e455349534c004c004c0014ffffffffffffffffffffffffffffffffffffffff01004c00080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: wrong size): SEND with 0-byte token_id",
"script": "6a04534c500001010453454e444c00080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: wrong size): SEND with 31-byte token_id",
"script": "6a04534c500001010453454e441fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: wrong size): SEND with 33-byte token_id",
"script": "6a04534c500001010453454e4421ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: wrong size): MINT with 0-byte token_id",
"script": "6a04534c50000101044d494e544c004c00080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: wrong size): MINT with 31-byte token_id",
"script": "6a04534c50000101044d494e541fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff4c00080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: wrong size): MINT with 32-byte token_id",
"script": "6a04534c50000101044d494e5421ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff4c00080000000000000064",
"code": 10
},
{
"msg": "(must be invalid: wrong size): SEND with a 7-byte amount",
"script": "6a04534c500001010453454e442088888888888888888888888888888888888888888888888888888888888888880800000000000000630700000000000042080000000000000063",
"code": 10
},
{
"msg": "(must be invalid: wrong size): SEND with a 9-byte amount",
"script": "6a04534c500001010453454e4420888888888888888888888888888888888888888888888888888888888888888808000000000000006309000000000000000042080000000000000063",
"code": 10
},
{
"msg": "(must be invalid: wrong size): SEND with a 0-byte amount",
"script": "6a04534c500001010453454e442088888888888888888888888888888888888888888888888888888888888888880800000000000000634c00080000000000000063",
"code": 10
},
{
"msg": "OK: Genesis with decimals=9",
"script": "6a04534c500001010747454e455349534c004c004c004c0001094c00080000000000000064",
"code": null
},
{
"msg": "(must be invalid: bad value): Genesis with decimals=10",
"script": "6a04534c500001010747454e455349534c004c004c004c00010a4c00080000000000000064",
"code": 11
},
{
"msg": "OK: Genesis with mint_baton_vout=255",
"script": "6a04534c500001010747454e455349534c004c004c004c00010001ff080000000000000064",
"code": null
},
{
"msg": "OK: Genesis with mint_baton_vout=95",
"script": "6a04534c500001010747454e455349534c004c004c004c000100015f080000000000000064",
"code": null
},
{
"msg": "OK: Genesis with mint_baton_vout=2",
"script": "6a04534c500001010747454e455349534c004c004c004c0001000102080000000000000064",
"code": null
},
{
"msg": "(must be invalid: bad value): Genesis with mint_baton_vout=1",
"script": "6a04534c500001010747454e455349534c004c004c004c0001000101080000000000000064",
"code": 11
},
{
"msg": "(must be invalid: bad value): Genesis with mint_baton_vout=0",
"script": "6a04534c500001010747454e455349534c004c004c004c0001000100080000000000000064",
"code": 11
},
{
"msg": "OK: MINT with mint_baton_vout=255",
"script": "6a04534c50000101044d494e5420ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01ff080000000000000064",
"code": null
},
{
"msg": "OK: MINT with mint_baton_vout=95",
"script": "6a04534c50000101044d494e5420ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff015f080000000000000064",
"code": null
},
{
"msg": "OK: MINT with mint_baton_vout=2",
"script": "6a04534c50000101044d494e5420ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0102080000000000000064",
"code": null
},
{
"msg": "(must be invalid: bad value): MINT with mint_baton_vout=1",
"script": "6a04534c50000101044d494e5420ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0101080000000000000064",
"code": 11
},
{
"msg": "(must be invalid: bad value): MINT with mint_baton_vout=0",
"script": "6a04534c50000101044d494e5420ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0100080000000000000064",
"code": 11
},
{
"msg": "(must be invalid: wrong number of params) GENESIS with extra token amount",
"script": "6a04534c500001010747454e455349534c004c004c004c0001004c00080000000000000064080000000000000064",
"code": 12
},
{
"msg": "(must be invalid: wrong number of params) MINT with extra token amount",
"script": "6a04534c50000101044d494e5420ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff4c00080000000000000064080000000000000064",
"code": 12
},
{
"msg": "OK: SEND with 19 token output amounts",
"script": "6a04534c500001010453454e44208888888888888888888888888888888888888888888888888888888888888888080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001",
"code": null
},
{
"msg": "(must be invalid: too many parameters): SEND with 20 token output amounts",
"script": "6a04534c500001010453454e44208888888888888888888888888888888888888888888888888888888888888888080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001080000000000000001",
"code": 21
},
{
"msg": "OK: all output amounts 0",
"script": "6a04534c500001010453454e44208888888888888888888888888888888888888888888888888888888888888888080000000000000000080000000000000000",
"code": null
},
{
"msg": "OK: three inputs of max value (2**64-1) whose sum overflows a 64-bit int",
"script": "6a04534c500001010453454e4420888888888888888888888888888888888888888888888888888888888888888808ffffffffffffffff08ffffffffffffffff08ffffffffffffffff",
"code": null
},
{
"msg": "OK: using opcode PUSHDATA1 for 8-byte push",
"script": "6a04534c500001010747454e455349534c004c004c004c0001004c004c080000000000000064",
"code": null
},
{
"msg": "OK: using opcode PUSHDATA2 for empty push",
"script": "6a04534c500001010747454e455349534c004d00004c004c0001004c00080000000000000064",
"code": null
},
{
"msg": "OK: using opcode PUSHDATA4 for empty push",
"script": "6a04534c500001010747454e455349534c004e000000004c004c0001004c00080000000000000064",
"code": null
},
{
"msg": "OK: ticker is bad utf8 E08080 (validators must not require decodeable strings)",
"script": "6a04534c500001010747454e4553495303e080804c004c004c0001094c00080000000000000064",
"code": null
},
{
"msg": "OK: ticker is bad utf8 C0 (validators must not require decodeable strings)",
"script": "6a04534c500001010747454e4553495301c04c004c004c0001094c00080000000000000064",
"code": null
},
{
"msg": "OK: name is bad utf8 E08080 (validators must not require decodeable strings)",
"script": "6a04534c500001010747454e455349534c0003e080804c004c0001094c00080000000000000064",
"code": null
},
{
"msg": "OK: name is bad utf8 C0 (validators must not require decodeable strings)",
"script": "6a04534c500001010747454e455349534c0001c04c004c0001094c00080000000000000064",
"code": null
},
{
"msg": "OK: url is bad utf8 E08080 (validators must not require decodeable strings)",
"script": "6a04534c500001010747454e455349534c004c0003e080804c0001094c00080000000000000064",
"code": null
},
{
"msg": "OK: url is bad utf8 C0 (validators must not require decodeable strings)",
"script": "6a04534c500001010747454e455349534c004c0001c04c0001094c00080000000000000064",
"code": null
},
{
"msg": "OK: genesis with 300-byte name 'UUUUU...' (op_return over 223 bytes, validators must not refuse this)",
"script": "6a04534c500001010747454e455349534c004d2c015555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555554c004c0001004c00080000000000000064",
"code": null
},
{
"msg": "OK: genesis with 300-byte document url 'UUUUU...' (op_return over 223 bytes, validators must not refuse this)",
"script": "6a04534c500001010747454e455349534c004c004d2c015555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555555554c0001004c00080000000000000064",
"code": null
}
]
"""
errorcodes = {
# no-error maps to None
# various script format errors
("Bad OP_RETURN", "Script error"): 1,
# disallowed opcodes
("Bad OP_RETURN", "Non-push opcode"): 2,
("Bad OP_RETURN", "OP_1NEGATE to OP_16 not allowed"): 2,
("Bad OP_RETURN", "OP_0 not allowed"): 2,
# not OP_RETURN script / not SLP
# (note in some implementations, parsers should never be given such non-SLP scripts in the first place. In such implementations, error code 3 tests may be skipped.)
("Bad OP_RETURN", "No OP_RETURN"): 3,
("Empty OP_RETURN",): 3,
("Not SLP",): 3,
# 10- field bytesize is wrong
("Field has wrong length",): 10,
("Ticker too long",): 10,
("Token document hash is incorrect length",): 10,
("token_id is wrong length",): 10,
# 11- improper value
("Too many decimals",): 11,
("Bad transaction type",): 11,
("Mint baton cannot be on vout=0 or 1",): 11,
# 12- missing field / too few fields
("Missing output amounts",): 12,
("Missing token_type",): 12,
("Missing SLP command",): 12,
("GENESIS with incorrect number of parameters",): 12,
("SEND with too few parameters",): 12,
("MINT with incorrect number of parameters",): 12,
# specific
("More than 19 output amounts",): 21,
# SlpUnsupportedSlpTokenType : 255 below
}
class SLPTests(unittest.TestCase):
def test_opreturn_parse(self):
testlist = json.loads(script_tests_json)
for d in testlist:
description = d["msg"]
scripthex = d["script"]
code = d["code"]
if scripthex is None:
continue
if hasattr(code, "__iter__"):
expected_codes = tuple(code)
else:
expected_codes = (code,)
with self.subTest(description=description, script=scripthex):
sco = address.ScriptOutput(bytes.fromhex(scripthex))
try:
slp.ScriptOutput(sco.script)
except Exception as e:
if isinstance(e, slp.InvalidOutputMessage):
self.assertIn(
errorcodes[e.args],
expected_codes,
"Invalidity reason %r (code: %d) not in expected reasons %r"
% (e.args, errorcodes[e.args], expected_codes),
)
elif isinstance(e, slp.UnsupportedSlpTokenType):
self.assertIn(
255,
expected_codes,
"UnsupportedSlpTokenType exception raised (code 255) but not in expected reasons (%r)"
% (expected_codes,),
)
else:
raise
else:
# no exception
self.assertIn(
None,
expected_codes,
"Script was found valid but should have been invalid, for a reason code in %r."
% (expected_codes,),
)
def test_opreturn_build(self):
testlist = json.loads(script_tests_json)
ctr = 0
for d in testlist:
description = d["msg"]
scripthex = d["script"]
code = d["code"]
if code is not None:
# we are only interested in "None" tests, that is, ones
# that are expected to parse as valid
continue
if scripthex is None:
continue
def check_is_equal_message(msg1, msg2):
seen = {"chunks"}
for k in msg1.valid_properties:
if k.startswith("_") or k in seen:
continue
v = getattr(msg1, k, None)
if v is not None and not callable(v):
self.assertEqual(v, getattr(msg2, k, None))
seen.add(k)
for k in msg2.valid_properties:
if k.startswith("_") or k in seen:
continue
v = getattr(msg2, k, None)
if v is not None and not callable(v):
self.assertEqual(v, getattr(msg1, k, None))
seen.add(k)
with self.subTest(description=description, script=scripthex):
sco = address.ScriptOutput(bytes.fromhex(scripthex))
slp_sco = slp.ScriptOutput(sco.script) # should not raise
_type = slp_sco.message.transaction_type
if _type == "GENESIS":
try:
outp = slp.Build.GenesisOpReturnOutput_V1(
ticker=slp_sco.message.ticker.decode("utf-8"),
token_name=slp_sco.message.token_name.decode("utf-8"),
token_document_url=slp_sco.message.token_doc_url
and slp_sco.message.token_doc_url.decode("utf-8"),
token_document_hash_hex=slp_sco.message.token_doc_hash
and slp_sco.message.token_doc_hash.decode("utf-8"),
decimals=slp_sco.message.decimals,
baton_vout=slp_sco.message.mint_baton_vout,
initial_token_mint_quantity=slp_sco.message.initial_token_mint_quantity,
token_type=slp_sco.message.token_type,
)
except (UnicodeError, slp.OPReturnTooLarge):
# some of the test data doesn't decode to utf8 because it contains 0xff
# some of the test data has too-big op_return
continue
check_is_equal_message(slp_sco.message, outp[1].message)
elif _type == "MINT":
try:
outp = slp.Build.MintOpReturnOutput_V1(
token_id_hex=slp_sco.message.token_id_hex,
baton_vout=slp_sco.message.mint_baton_vout,
token_mint_quantity=slp_sco.message.additional_token_quantity,
token_type=slp_sco.message.token_type,
)
except (UnicodeError, slp.OPReturnTooLarge):
continue
check_is_equal_message(slp_sco.message, outp[1].message)
elif _type == "SEND":
try:
outp = slp.Build.SendOpReturnOutput_V1(
token_id_hex=slp_sco.message.token_id_hex,
output_qty_array=slp_sco.message.token_output[1:],
token_type=slp_sco.message.token_type,
)
except (UnicodeError, slp.OPReturnTooLarge):
continue
check_is_equal_message(slp_sco.message, outp[1].message)
elif _type == "COMMIT":
continue
else:
raise RuntimeError("Unexpected transation_type")
ctr += 1
if __name__ == "__main__":
unittest.main()
|
"""Expectations that can be held against metrics collected in Graphite"""
from abc import abstractmethod
SECONDS_PER_MINUTE = 60
SECONDS_PER_HOUR = SECONDS_PER_MINUTE * 60
SECONDS_PER_DAY = SECONDS_PER_HOUR * 24
def _avg(readings):
"""Python 2.7 does not have an average function"""
return sum(readings, 0.0) / len(readings)
def _delta_str(delta):
"""Convert a timedelta to a nice string.
timedelta.__str__ prints out days and times awkwardly.
"""
days, rem = divmod(delta.total_seconds(), SECONDS_PER_DAY)
hours, rem = divmod(rem, SECONDS_PER_HOUR)
minutes, rem = divmod(rem, SECONDS_PER_MINUTE)
result = []
if days:
result.append('{0} day(s)'.format(days))
if hours:
result.append('{0} hour(s)'.format(hours))
if minutes:
result.append('{0} minute(s)'.format(minutes))
return ', '.join(result)
class GraphiteExpectation(object):
"""An expectation placed on a list of Graphte readings"""
def __init__(self, validation, name):
self._validation = validation
self._name = name
@abstractmethod
def validate(self, readings, time_range):
"""make sure the expectation is met"""
pass
def _validate(self, bad_readings, higher_values_are_worse):
"""Derived instances should call this method passing it any readings
that were outside of specified parameters.
"""
num_bad_readings = len(bad_readings)
if num_bad_readings:
bad_readings = list(set(bad_readings))
bad_readings.sort(reverse=higher_values_are_worse)
self._validation.fail(
"In the last {0} there were {1} readings that " +
"exceeded allowed parameters. For example: {2}"
.format(_delta_str(self._validation.time_range),
num_bad_readings,
', '.join([str(x) for x in bad_readings[:20]])))
def _validate_avg(self, average, is_bad_average):
"""Derived instances should call this method passing the average
reading and whether or not that average was within specified
parameters
"""
if is_bad_average:
self._validation.fail(
"In the last {0} the average reading was {1}"
.format(_delta_str(self._validation.time_range), average))
class GreaterThanExpectation(GraphiteExpectation):
"""Expect that a graphite metric is greater than a specified number"""
def __init__(self, validation, lower_bound):
GraphiteExpectation.__init__(self, validation,
"All values must be greater than {0}".format(lower_bound))
self._lower_bound = lower_bound
def validate(self, readings, time_range):
self._validate([x for x in readings if x is not None and
x <= self._lower_bound], False)
def __repr__(self):
return "{}: all > {} on {}".format(type(self).__name__, self._lower_bound, self._validation)
class LessThanExpectation(GraphiteExpectation):
"""Expect that a graphite metric is less than than a specified number"""
def __init__(self, validation, upper_bound):
GraphiteExpectation.__init__(self, validation,
"All values must be less than {0}".format(upper_bound))
self._upper_bound = upper_bound
def validate(self, readings, time_range):
self._validate([x for x in readings if x is not None and
x >= self._upper_bound], True)
def __repr__(self):
return "{}: all < {} on {}".format(type(self).__name__, self._upper_bound, self._validation)
class AverageGreaterThanExpectation(GraphiteExpectation):
"""Expect that the average of a graphite metric is greater than a
specified number
"""
def __init__(self, validation, lower_bound):
GraphiteExpectation.__init__(self, validation,
"Average of all values must be greater than {0}"
.format(lower_bound))
self._lower_bound = lower_bound
def validate(self, readings, time_range):
average = _avg([x for x in readings if x is not None])
self._validate_avg(average, average <= self._lower_bound)
def __repr__(self):
return "{}: average > {} on {}".format(type(self).__name__, self._upper_bound, self._validation)
class AverageLessThanExpectation(GraphiteExpectation):
"""Expect that the average of a graphite metric is less than a
specified number
"""
def __init__(self, validation, upper_bound):
GraphiteExpectation.__init__(self, validation,
"Average of all values must be less than {0}"
.format(upper_bound))
self._upper_bound = upper_bound
def validate(self, readings, time_range):
average = _avg([x for x in readings if x is not None])
self._validate_avg(average, average >= self._upper_bound)
def __repr__(self):
return "{}: average < {} on {}".format(type(self).__name__, self._upper_bound, self._validation)
|
#!/usr/bin/env python
import sys
import os
import socket
import string
import time
import locale
import codecs
import urllib2
import re
import HTMLParser
import json
import urllib
def decode(bytes):
try:
text = bytes.decode('utf-8')
except UnicodeDecodeError:
try:
text = bytes.decode('iso-8859-1')
except UnicodeDecodeError:
text = bytes.decode('cp1252')
return text
def encode(bytes):
try:
unicode(bytes, "ascii")
except UnicodeError:
bytes = unicode(bytes, 'utf-8', errors="replace")
else:
bytes = bytes
try:
text = bytes.encode('utf-8')
except UnicodeEncodeError:
try:
text = bytes.encode('iso-8859-1')
except UnicodeEncodeError:
text = bytes.encode('cp1252')
return text
def fix_urls(text):
return re.search("(?P<url>https?://[^\s]+)", text).group("url")
def google(q):
try:
query = urllib.urlencode({'q': encode(q)})
url = 'http://ajax.googleapis.com/ajax/services/search/web?v=1.0&%s' % query
search_response = urllib.urlopen(url)
search_results = search_response.read()
results = json.loads(search_results)
data = results['responseData']
#print 'Total results: %s' % data['cursor']['estimatedResultCount']
hits = data['results']
#print 'Top %d hits:' % len(hits)
#for h in hits: print ' ', h['url']
#print 'For more results, see %s' % data['cursor']['moreResultsUrl']
#print hits[0]['url']
return hits[0]['url']
except:
return u'N/A'
def calc(q):
try:
query = urllib.urlencode({'q': encode(q)})
url = 'http://www.google.com/ig/calculator?%s' %query
search_response = urllib.urlopen(url)
search_results = search_response.read()
j = search_results
j = re.sub(r"{\s*(\w)", r'{"\1', j)
j = re.sub(r",\s*(\w)", r',"\1', j)
j = re.sub(r"(\w):", r'\1":', j)
j = encode(j)
results = json.loads(j)
return results['lhs'] + " = " + results['rhs']
except:
return u'N/A'
def restart_program():
"""Restarts the current program.
Note: this function does not return. Any cleanup action (like
saving data) must be done before calling this function."""
python = sys.executable
os.execl(python, python, * sys.argv)
try:
import cPickle as pickle
except:
import pickle
import pprint
import string
#string.split(the_string, the_separator)
#the_string.split(the_separator[,the_limit])
def saveData(db, data):
print db
output = open('data/' + db + '.pkl', 'wb')
pickle.dump(data, output)
output.close()
def loadData(db):
try:
pkl_file = open('data/' + db + '.pkl', 'rb')
data = pickle.load(pkl_file)
pkl_file.close()
except:
saveData(db, [] )
data = []
return data
modules = []
def loadModule(id):
print "loading Module [" + id + "]"
_temp = __import__("modules." + id, globals(), locals(), ['func','trigger'], -1)
_temp = reload(_temp)
moduleExists=0
for m in modules:
if m[0] == id:
#m = [id, _temp.func, _temp.trigger]
removeModule(id)
#moduleExists=1
break
if moduleExists == 0:
modules.append([id, _temp.func, _temp.trigger])
def removeModule(id):
global modules
for m in modules:
if m[0] == id:
modules.remove(m)
break
def reloadModule(id):
removeModule(id)
loadModule(id)
#loadModule("title") #url announcer (triggered by a url posted)
loadModule("google") #google search (!go SEARCHTERM)
loadModule("calc") #google calc (!calc 1*PI or !calc 1 metre in miles or !calc 1 euro in dollars)
loadModule("shout") #just a shout module YES YOU HEARD RIGHT
userdata = loadData('users')
HOST="irc.rizon.net"
PORT=6667
NICK="raspibot"
IDENT="onebot"
REALNAME="OneBot"
CHANNEL="#IRCCHANNEL"
readbuffer=""
s=socket.socket( )
s.connect((HOST, PORT))
s.send("NICK %s\r\n" % NICK)
s.send("USER %s %s bla :%s\r\n" % (IDENT, HOST, REALNAME))
user = []
while 1:
readbuffer=readbuffer+s.recv(1024)
temp=string.split(readbuffer, "\n")
readbuffer=temp.pop( )
for line in temp:
rawline = line
nick = string.split(line, "!")
nick = nick[0]
nick = nick[1:]
#print "current nick:" + nick
msg = line.split(":", 2)
if len(msg) > 2:
msg = msg[2]
#print "msg:" + msg
else:
msg = ""
#print line
line=string.rstrip(line)
line=string.split(line)
#print "ID: " + m[0] + " TRIGGER:" + m[2]
for m in modules:
result = ""
#print type(m[2])
if type(m[2]) is list:
for t in m[2]:
if t in msg:
result = m[1](msg.replace(t, '').strip())
break
elif m[2] != "":
if m[2] in msg:
result = m[1](msg.replace(m[2], '').strip())
else:
result = m[1](msg)
if result != "":
s.send("PRIVMSG " + CHANNEL + " :" + result + " \r\n")
if "/MOTD" in line:
print "Connecting..."
time.sleep(5)
s.send("JOIN " + CHANNEL + " \r\n")
if line[1] == "353":
data = string.split(rawline, ":")
names = string.split(data[2])
user = user + names
for u in user:
print u
if len(line) == 5 and line[1] == "MODE":
if line[3] == "+o":
user = [w.replace(line[4], '@' + line[4]) for w in user]
if line[3] == "-o":
user = [w.replace('@' + line[4], line[4]) for w in user]
#print line[3]
#print line[4]
#if "!list" in rawline:
#for u in user:
# s.send("PRIVMSG " + nick + " :" + u + " \r\n")
#if "linux" in line:
# print "<===============Linux==========>"
# s.send("PRIVMSG " + CHANNEL + " :I think GNU/Linux is cool!!!RMS IS GOD! \r\n")
if '@' + nick in user or '+@' + nick in user or '%' + nick in user or '+%' + nick:
if "!quit" in rawline:
saveData('users', userdata )
s.send("QUIT :RIP :( \r\n")
sys.exit()
if "!restart" in rawline:
saveData('users', userdata )
s.send("QUIT :Brb restarting :3 \r\n")
restart_program()
if "!loadModule" in rawline:
q = msg.split()
q = " ".join(q[1::])
try:
loadModule(q)
s.send("PRIVMSG " + CHANNEL + " :module [" + q + "] loaded \r\n")
except:
s.send("PRIVMSG " + CHANNEL + " :module [" + q + "] could not be loaded \r\n")
if "!removeModule" in rawline:
q = msg.split()
q = " ".join(q[1::])
try:
removeModule(q)
s.send("PRIVMSG " + CHANNEL + " :module [" + q + "] removed \r\n")
except:
s.send("PRIVMSG " + CHANNEL + " :module [" + q + "] could not be removed \r\n")
if "!helpdklfjlsjflsjflsjfljslfjlsfjlsjflsjflsdjf" in rawline:
s.send("PRIVMSG " + nick + " :Raspibot has the following features \r\n")
s.send("PRIVMSG " + nick + " :to search google use: !go [KEYWORD] OR !google [KEYWORD] \r\n")
s.send("PRIVMSG " + nick + " :to search wolfram alpha use: !wa [KEYWORD] OR raspibot, compute [KEYWORD] \r\n")
s.send("PRIVMSG " + nick + " :to use googles calculator and currency converter use !calc [FORMULA OR CURRENCIES] \r\n")
s.send("PRIVMSG " + nick + " :write in all caps to trigger the shout module \r\n")
s.send("PRIVMSG " + nick + " :and pls don't be a dick. thx :3 \r\n")
if "!load" in rawline:
userdata = loadData('users')
if "!save" in rawline:
saveData('users', userdata)
if(line[0]=="PING"):
s.send("PONG %s\r\n" % line[1]) |
CONTAINS_ADS_MESSAGES = {
"af": "Bevat advertensies",
"sq": "Contains Ads",
"en": "Contains Ads",
"ar": "تتضمن إعلانات",
"az": "Contains Ads",
"eu": "Contains Ads",
"be": "Утрымлівае аб'явы",
"bn": "Contains Ads",
"hi": "इसमें विज्ञापन शामिल हैं",
"bs": "Contains Ads",
"bg": "Съдържа реклами",
"ca": "Conté anuncis",
"zh": "含廣告內容",
"hr": "Sadrži oglase",
"cs": "Obsahuje reklamy",
"da": "Indeholder annoncer",
"nl": "Bevat advertenties",
"et": "Sisaldab reklaame",
"fi": "Sisältää mainoksia",
"fr": "Contient des annonces",
"gl": "Contains Ads",
"ka": "Contains Ads",
"de": "Enthält Werbung",
"el": "Περιέχει διαφημίσεις",
"gu": "Contains Ads",
"iw": "מכיל מודעות",
"hu": "Hirdetéseket tartalmaz",
"is": "Contains Ads",
"in": "Berisi Iklan",
"it": "Contiene annunci",
"ja": "広告を含む",
"kn": "Contains Ads",
"ko": "광고 포함",
"lv": "Ietver reklāmas",
"lt": "Yra skelbimų",
"mk": "Contains Ads",
"ms": "Mengandungi Iklan",
"ml": "Contains Ads",
"mr": "Contains Ads",
"ne": "Contains Ads",
"no": "Inneholder annonser",
"fa": "Contains Ads",
"pl": "Zawiera reklamy",
"pt": "Contém anúncios",
"pa": "Contains Ads",
"ro": "Conține anunțuri",
"ru": "Есть реклама",
"sr": "Садржи огласе",
"si": "Contains Ads",
"sk": "Obsahuje reklamy",
"sl": "Vsebuje oglase",
"es": "Contiene anuncios",
"sw": "Ina Matangazo",
"sv": "Innehåller annonser",
"fil": "May Mga Ad",
"ta": "Contains Ads",
"te": "Contains Ads",
"th": "มีโฆษณา",
"tr": "Reklam İçeriyor",
"uk": "Містить рекламу",
"ur": "Contains Ads",
"uz": "Contains Ads",
"vi": "Chứa quảng cáo",
"zu": "Iqukethe izikhangiso"
}
BASE_NORMALIZATION_MESSAGES = {
"af": ["Opgedateer", "Grootte", "Installerings", "Huidige weergawe", "Vereis Android", "Inhoudgradering", "Interaktiewe elemente", "Inprogram-produkte", "Toestemmings", "Verslag", "Aangebied deur", "Ontwikkelaar"],
"sq": ["Përditësuar", "Madhësia", "Instalimet", "Versioni aktual", "Kërkon Android", "Vlerësimi i përmbajtjes", "Elementet interaktive", "Produktet brenda aplikacionit", "Autorizimet", "Raporto", "Ofruar nga", "Zhvilluesi"],
"sm": ["Updated", "Size", "Installs", "Current Version", "Requires Android", "Content Rating", "Interactive Elements", "In-app Products", "Permissions", "Report", "Offered By", "Developer"],
"ar": ["تم التحديث", "الحجم", "عمليات التثبيت", "الإصدار الحالي", "يتطلب Android", "تقييم المحتوى", "العناصر التفاعلية", "منتجات داخل التطبيقات", "الأذونات", "التقرير", "تقديم", "مطوّر البرامج"],
"az": ["Güncəlləşdirildi", "Ölçü", "Quraşdırmalar", "Mövcud Versiya", "Android tələb olunur", "Məzmun Reytinqi", "İnteraktiv Elementlər", "Tətbiqdaxili Məhsullar", "İcazələr", "Bildirin", "Təklif edən", "Hazırlayan"],
"eu": ["Eguneratze-data", "Tamaina", "Instalazio kopurua", "Uneko bertsioa", "Android behar da", "Edukien balorazioa", "Elementu interaktiboak", "Aplikazioko produktuak", "Baimenak", "Txostena", "Hornitzailea", "Garatzailea"],
"be": ["Абноўлена:", "Памер", "Усталёўкі", "Гэта версія", "Патрабуецца Android", "Ацэнка змесціва", "Інтэрактыўныя элементы", "Прадукты для продажу з праграмы", "Дазволы", "Паскардзіцца", "Пастаўшчык", "Распрацоўшчык"],
"bn": ["আপডেট করা হয়েছে", "সাইজ", "ইনস্টল করার সংখ্যা", "বর্তমান ভার্সন", "Android প্রয়োজন", "কন্টেন্টের রেটিং", "ইন্টার্যাক্টিভ উপাদান", "অ্যাপ-মধ্যস্থ প্রোডাক্ট", "অনুমতিগুলি", "অভিযোগ জানান", "নিয়ে এসেছে", "ডেভেলপার"],
"bh": ["अपडेट करने की तारीख", "आकार", "इंस्टॉल की संख्या", "वर्तमान वर्शन", "Android ज़रूरी है", "कॉन्टेंट रेटिंग", "इंटरैक्टिव तत्व", "ऐप्लिकेशन के उत्पाद", "अनुमतियां", "रिपोर्ट", "इनकी ओर से ऑफ़र किया गया", "डेवलपर"],
"bs": ["Ažurirano", "Veličina", "Instalacije", "Trenutna verzija", "Zahtijeva Android", "Kategorizacija sadržaja", "Interaktivni elementi", "Proizvodi u aplikaciji", "Odobrenja", "Izvještaj", "Ponuđač", "Razvojni programer"],
"bg": ["Актуализирано", "Размер", "Инсталирания", "Текуща версия", "Изисква Android", "Класификация на съдържанието", "Интерактивни елементи", "Продукти в приложението", "Разрешения", "Подаване на сигнал", "Предлага се от", "Програмист"],
"ca": ["Actualitzada", "Mida", "Instal·lacions", "Versió actual", "Requereix Android", "Classificació del contingut", "Elements interactius", "Productes de compra a l'aplicació", "Permisos", "Informes", "Oferta per", "Desenvolupador"],
"zh-CN": ["更新日期", "大小", "安装次数", "当前版本", "Android 系统版本要求", "内容分级", "互动元素", "应用内商品", "权限", "报告", "提供者:", "开发者"],
"zh-TW": ["更新日期", "大小", "安裝次數", "目前版本", "Android 最低版本需求", "內容分級", "互動式元素", "應用程式內產品", "權限", "報告", "提供者:", "開發人員"],
"hr": ["Ažurirano", "Veličina", "Instalacije", "Trenutačna verzija", "Zahtijeva Android", "Ocjenjivanje sadržaja", "Interaktivni elementi", "Proizvodi ponuđeni putem aplikacije", "Dopuštenja", "Izvješće", "Ponuditelj", "Razvojni programer"],
"cs": ["Aktualizováno", "Velikost", "Instalace", "Aktuální verze", "Vyžaduje Android", "Hodnocení obsahu", "Interaktivní prvky", "Produkty v aplikacích", "Oprávnění", "Přehled", "Od vývojáře", "Vývojář"],
"da": ["Opdateret", "Størrelse", "Installationer", "Aktuel version", "Kræver Android", "Indholdsklassificering", "Interaktive elementer", "Produkter i appen", "Tilladelser", "Rapport", "Udbydes af", "Udvikler"],
"nl": ["Bijgewerkt", "Grootte", "Installaties", "Huidige versie", "Android vereist", "Beoordeling van content", "Interactieve elementen", "In-app-producten", "Rechten", "Melden", "Aangeboden door", "Distributieovereenkomst"],
"en": ["Updated", "Size", "Installs", "Current Version", "Requires Android", "Content Rating", "Interactive Elements", "In-app Products", "Permissions", "Report", "Offered By", "Developer"],
"eo": ["Updated", "Size", "Installs", "Current Version", "Requires Android", "Content Rating", "Interactive Elements", "In-app Products", "Permissions", "Report", "Offered By", "Developer"],
"et": ["Värskendatud", "Suurus", "Installimised", "Praegune versioon", "Nõuab Androidi", "Sisu reiting", "Interaktiivsed elemendid", "Rakendusesisesed tooted", "Load", "Aruanne", "Pakub:", "Arendaja"],
"fo": ["Opdateret", "Størrelse", "Installationer", "Aktuel version", "Kræver Android", "Indholdsklassificering", "Interaktive elementer", "Produkter i appen", "Tilladelser", "Rapport", "Udbydes af", "Udvikler"],
"fi": ["Päivitetty", "Koko", "Asennukset", "Nykyinen versio", "Vaatii Android-version", "Sisällön ikärajoitus", "Interaktiiviset elementit", "Sovelluksen sisäiset tuotteet", "Käyttöoikeudet", "Raportti", "Tuotteen tarjoaa", "Kehittäjä"],
"fr": ["Mise à jour", "Taille", "Installations", "Version actuelle", "Nécessite Android", "Classification du contenu", "Éléments interactifs", "Produits intégrés à l'application", "Autorisations", "Rapport", "Proposée par", "Développeur"],
"fy": ["Bijgewerkt", "Grootte", "Installaties", "Huidige versie", "Android vereist", "Beoordeling van content", "Interactieve elementen", "In-app-producten", "Rechten", "Melden", "Aangeboden door", "Distributieovereenkomst"],
"gl": ["Data de actualización", "Tamaño", "Instalacións", "Versión actual", "Require Android", "Valoración de contido", "Elementos interactivos", "Produtos dentro da aplicación", "Permisos", "Denunciar", "Ofrecida por", "Programador"],
"ka": ["განახლდა", "ზომა", "ინსტალაციები", "მიმდინარე ვერსია", "საჭიროა Android", "შემცველობის რეიტინგი", "ინტერაქტიული ელემენტები", "აპს-შიდა პროდუქტები", "ნებართვები", "ანგარიში", "შემოთავაზებული", "დეველოპერი"],
"de": ["Aktualisiert", "Größe", "Installationen", "Aktuelle Version", "Erforderliche Android-Version", "Altersfreigabe", "Interaktive Elemente", "In-App-Produkte", "Berechtigungen", "Melden", "Angeboten von", "Entwickler"],
"el": ["Ενημερώθηκε", "Μέγεθος", "Εγκαταστάσεις", "Τρέχουσα έκδοση", "Απαιτεί Android", "Αξιολόγηση περιεχομένου", "Διαδραστικά στοιχεία", "Προϊόντα εντός εφαρμογής", "Άδειες", "Αναφορά", "Προσφέρεται από", "Προγραμματιστής"],
"gu": ["અપડેટ કરેલ", "કદ", "ઇન્સ્ટૉલ કરે છે", "વર્તમાન વર્ઝન", "Android આવશ્યક છે", "કન્ટેન્ટનું રેટિંગ", "ક્રિયા-પ્રતિક્રિયાત્મક તત્વો", "ઍપ્લિકેશનમાં ખરીદી માટે પ્રોડક્ટ", "પરવાનગીઓ", "જાણ કરો", "આના દ્વારા ઑફર કરાયું", "ડેવલપર"],
"iw": ["עדכון אחרון", "גודל", "התקנות", "גרסה נוכחית", "נדרש Android", "סיווג תוכן", "אלמנטים אינטראקטיביים", "מוצרים מתוך האפליקציה", "הרשאות", "דוח", "מבצע של", "מפתח"],
"hi": ["अपडेट करने की तारीख", "आकार", "इंस्टॉल की संख्या", "वर्तमान वर्शन", "Android ज़रूरी है", "कॉन्टेंट रेटिंग", "इंटरैक्टिव तत्व", "ऐप्लिकेशन के उत्पाद", "अनुमतियां", "रिपोर्ट", "इनकी ओर से ऑफ़र किया गया", "डेवलपर"],
"hu": ["Frissítve", "Méret", "Telepítések", "Aktuális verzió", "Követelmény: Android", "Tartalom besorolása", "Interaktív elemek", "Alkalmazáson belüli termékek", "Engedélyek", "Jelentés", "Forrás", "Fejlesztő"],
"is": ["Uppfært", "Stærð", "Uppsetningar", "Núverandi útgáfa", "Krefst Android", "Efnisflokkun", "Gagnvirkar einingar", "Vörur í forriti", "Heimildir", "Tilkynning", "Í boði frá", "Þróunaraðili"],
"id": ["Diupdate", "Ukuran", "Instal", "Versi Saat Ini", "Perlu Android versi", "Rating Konten", "Elemen Interaktif", "Produk Dalam Aplikasi", "Izin", "Laporan", "Ditawarkan Oleh", "Developer"],
"ia": ["Updated", "Size", "Installs", "Current Version", "Requires Android", "Content Rating", "Interactive Elements", "In-app Products", "Permissions", "Report", "Offered By", "Developer"],
"ga": ["Updated", "Size", "Installs", "Current Version", "Requires Android", "Content Rating", "Interactive Elements", "In-app Products", "Permission", "Report", "Offered By", "Developer"],
"it": ["Aggiornata", "Dimensioni", "Installazioni", "Versione corrente", "È necessario Android", "Classificazione contenuti", "Elementi interattivi", "Prodotti in-app", "Autorizzazioni", "Segnala", "Offerta da", "Sviluppatore"],
"ja": ["更新日", "サイズ", "インストール", "現在のバージョン", "Android 要件", "コンテンツのレーティング", "インタラクティブな要素", "アプリ内アイテム", "権限", "レポート", "提供元", "開発元"],
"jw": ["Diupdate", "Ukuran", "Instal", "Versi Saat Ini", "Perlu Android versi", "Rating Konten", "Elemen Interaktif", "Produk Dalam Aplikasi", "Izin", "Laporan", "Ditawarkan Oleh", "Developer"],
"kn": ["ಅಪ್ಡೇಟ್ ಮಾಡಲಾಗಿದೆ", "ಗಾತ್ರ", "ಇನ್ಸ್ಟಾಲ್ಗಳು", "ಪ್ರಸ್ತುತ ಆವೃತ್ತಿ", "Android ಅಗತ್ಯವಿದೆ", "ವಿಷಯ ರೇಟಿಂಗ್", "ಸಂವಾದಾತ್ಮಕ ಅಂಶಗಳು", "ಅಪ್ಲಿಕೇಶನ್ನಲ್ಲಿನ ಉತ್ಪನ್ನಗಳು", "ಅನುಮತಿಗಳು", "ವರದಿ", "ಇವರು ಆಫರ್ ಮಾಡಿದ್ದಾರೆ", "ಡೆವಲಪರ್"],
"ko": ["업데이트 날짜", "크기", "설치 수", "현재 버전", "필요한 Android 버전", "콘텐츠 등급", "상호작용 요소", "인앱 상품", "권한", "신고", "제공", "개발자"],
"la": ["Aggiornata", "Dimensioni", "Installazioni", "Versione corrente", "È necessario Android", "Classificazione contenuti", "Elementi interattivi", "Prodotti in-app", "Autorizzazioni", "Segnala", "Offerta da", "Sviluppatore"],
"lv": ["Atjaunināta", "Lielums", "Instalēšanas reizes", "Pašreizējā versija", "Nepieciešamā Android versija", "Satura vērtējums", "Interaktīvi elementi", "Produkti lietotnē", "Atļaujas", "Pārskats", "Nodrošina", "Izstrādātājs"],
"lt": ["Atnaujinta", "Dydis", "Įdiegimai", "Dabartinė versija", "Būtina naudoti „Android“", "Turinio įvertinimas", "Interaktyvūs elementai", "Produktai programoje", "Leidimai", "Ataskaita", "Siūlo", "Kūrėjas"],
"mk": ["Ажурирано", "Големина", "Инсталации", "Тековна верзија", "Потребен е Android", "Оцена на содржината", "Интерактивни елементи", "Производи во апликација", "Дозволи", "Извештај", "Понудено од", "Програмер"],
"ms": ["Dikemas kini", "Saiz", "Pemasangan", "Versi Semasa", "Memerlukan Android", "Rating Kandungan", "Elemen Interaktif", "Produk Dalam Apl", "Kebenaran", "Laporkan", "Ditawarkan Oleh", "Pembangun"],
"ml": ["അപ്ഡേറ്റുചെയ്തു", "വലുപ്പം", "ഇൻസ്റ്റാളുകൾ", "നിലവിലെ പതിപ്പ്", "Android ആവശ്യമാണ്", "ഉള്ളടക്ക റേറ്റിംഗ്", "സംവേദനാത്മക ഘടകങ്ങൾ", "ഇൻ-ആപ്പ് ഉൽപ്പന്നങ്ങൾ", "അനുമതികൾ", "റിപ്പോര്ട്ട്", "നൽകുന്നത്", "ഡെവലപ്പർ"],
"mt": ["Updated", "Size", "Installs", "Current Version", "Requires Android", "Content Rating", "Interactive Elements", "In-app Products", "Permissions", "Report", "Offered By", "Developer"],
"mr": ["अपडेट केले", "माप", "इंस्टॉल", "वर्तमान आवृत्ती", "Android आवश्यक आहे", "आशय रेटिंग", "सुसंवादी घटक", "ॲपमधील उत्पादने", "परवानग्या", "नोंदवा", "ने ऑफर केले", "डेव्हलपर"],
"ne": ["अद्यावधिक गरियो", "आकार", "कुल स्थापनाहरू", "हालको संस्करण", "Android को आवश्यक न्यूनतम संस्करण", "सामग्रीको मूल्याङ्कन", "अन्तर्क्रियात्मक तत्त्वहरू", "अनुप्रयोगभित्रका उत्पादनहरू", "अनुमतिहरू", "रिपोर्ट गर्नुहोस्", "यसको प्रस्ताव", "विकासकर्ता"],
"no": ["Oppdatert", "Størrelse", "Installeringer", "Gjeldende versjon", "Krever Android", "Egnethet", "Interaktive elementer", "Produkter i appen", "Tillatelser", "Rapport", "Levert av", "Utvikler"],
"nn": ["Oppdatert", "Størrelse", "Installeringer", "Gjeldende versjon", "Krever Android", "Egnethet", "Interaktive elementer", "Produkter i appen", "Tillatelser", "Rapport", "Levert av", "Utvikler"],
"oc": ["Mise à jour", "Taille", "Installations", "Version actuelle", "Nécessite Android", "Classification du contenu", "Éléments interactifs", "Produits intégrés à l'application", "Autorisations", "Rapport", "Proposée par", "Développeur"],
"fa": ["تاریخ بهروزرسانی", "اندازه", "دفعات نصب", "نسخه کنونی", "نسخه Android مورد نیاز", "رتبهبندی محتوا", "عناصر تعاملی", "محصولات درون برنامه", "مجوزها", "گزارش", "ارائهکننده", "برنامهنویس"],
"pl": ["Zaktualizowano", "Rozmiar", "Instalacje", "Aktualna wersja", "Wymaga Androida", "Ocena treści", "Elementy interaktywne", "Produkty w aplikacji", "Uprawnienia", "Zgłoś", "Sprzedawca", "Deweloper"],
"pt-BR": ["Atualizada", "Tamanho", "Instalações", "Versão atual", "Requer Android", "Classificação do conteúdo", "Elementos interativos", "Produtos no aplicativo", "Permissões", "Relatório", "Oferecido por", "Desenvolvedor"],
"pt-PT": ["Atualizado", "Tamanho", "Instalações", "Versão Atual", "Requer o Android", "Classificação de conteúdo", "Elementos interativos", "Produtos integrados em aplicações", "Autorizações", "Relatório", "Oferecido por", "Programador"],
"pa": ["ਅੱਪਡੇਟ ਕੀਤੀ ਗਈ", "ਆਕਾਰ", "ਸਥਾਪਨਾਵਾਂ", "ਮੌਜੂਦਾ ਵਰਜਨ", "Android ਦੀ ਲੋੜ ਹੈ", "ਸਮੱਗਰੀ ਰੇਟਿੰਗ", "ਅੰਤਰਕਿਰਿਆਤਮਕ ਤੱਤ", "ਐਪ-ਅੰਦਰ ਉਤਪਾਦ", "ਇਜਾਜ਼ਤਾਂ", "ਰਿਪੋਰਟ ਕਰੋ", "ਇਸ ਵੱਲੋਂ ਪੇਸ਼ਕਸ਼ ਕੀਤੀ ਗਈ", "ਵਿਕਾਸਕਾਰ"],
"ro": ["Actualizată", "Dimensiune", "Instalări", "Versiunea actuală", "Necesită Android", "Evaluarea conținutului", "Elemente interactive", "Produse în aplicație", "Permisiuni", "Raport", "Oferită de", "Dezvoltator"],
"ru": ["Обновлено", "Размер", "Количество установок", "Текущая версия", "Требуемая версия Android", "Возрастные ограничения", "Интерактивные элементы", "Платный контент", "Разрешения", "Отчет", "Продавец", "Разработчик"],
"gd": ["Updated", "Size", "Installs", "Current Version", "Requires Android", "Content Rating", "Interactive Elements", "In-app Products", "Permission", "Report", "Offered By", "Developer"],
"sr": ["Ажурирано", "Величина", "Инсталирања", "Актуелна верзија", "Захтева Android", "Оцена садржаја", "Интерактивни елементи", "Производи у апликацији", "Дозволе", "Извештај", "Нуди", "Програмер"],
"si": ["යාවත්කාලීනයි", "තරම", "ස්ථාපන", "වත්මන් අනුවාදය", "Android අවශ්යයි", "අන්තර්ගතය අගයමින්", "අන්තර්ක්රියාකාරී මූලාංග", "යෙදුම-තුළ නිෂ්පාදන", "අවසර", "වාර්තා කරන්න", "පිරිනමන ලද්දේ", "සංවර්ධක"],
"sk": ["Aktualizované", "Veľkosť", "Inštalácie", "Aktuálna verzia", "Vyžaduje Android", "Hodnotenie obsahu", "Interaktívne prvky", "Produkty v aplikácii", "Povolenia", "Prehľad", "Od predajcu", "Vývojár"],
"sl": ["Posodobljeno", "Velikost", "Namestitve", "Trenutna različica", "Potrebujete Android", "Ocena vsebine", "Interaktivni elementi", "Izdelki v aplkacijah", "Dovoljenja", "Prijava", "Ponudnik", "Razvijalec"],
"es": ["Actualizada", "Tamaño", "Descargas", "Versión actual", "Requiere Android", "Clasificación de contenido", "Elementos interactivos", "Productos de compra integrados en aplicaciones", "Permisos", "Informe", "Ofrecida por", "Desarrollador"],
"su": ["Diupdate", "Ukuran", "Instal", "Versi Saat Ini", "Perlu Android versi", "Rating Konten", "Elemen Interaktif", "Produk Dalam Aplikasi", "Izin", "Laporan", "Ditawarkan Oleh", "Developer"],
"sw": ["Imesasishwa", "Ukubwa", "Usakinishaji", "Toleo la Sasa", "Inahitaji Android", "Daraja la Maudhui", "Vipengele Vinavyoshirikisha", "Bidhaa za ndani ya programu", "Ruhusa", "Ripoti", "Inauzwa na", "Msanidi programu"],
"sv": ["Uppdaterad", "Storlek", "Installationer", "Aktuell version", "Kräver Android", "Innehållsrankning", "Interaktiva element", "Produkter i appar", "Behörigheter", "Rapport", "Leverantör", "Utvecklare"],
"tl": ["Na-update", "Laki", "Mga Pag-install", "Kasalukuyang Bersyon", "Nangangailangan ng Android", "Rating ng Content", "Mga Interactive na Elemento", "Mga In-app na Produkto", "Mga Pahintulot", "Ulat", "Inaalok Ng", "Developer"],
"ta": ["புதுப்பிக்கப்பட்டது", "அளவு", "நிறுவல்கள்", "தற்போதைய பதிப்பு", "Android தேவை", "உள்ளடக்க மதிப்பீடு", "ஊடாடத்தக்கவை", "ஆப்ஸ் சார்ந்த தயாரிப்புகள்", "அனுமதிகள்", "புகாரளி", "வழங்குபவர்", "டெவெலப்பர்"],
"te": ["అప్డేట్ చేయబడింది", "పరిమాణం", "ఇన్స్టాల్ చేస్తుంది", "ప్రస్తుత వెర్షన్", "Android అవసరం", "కంటెంట్ రేటింగ్", "ప్రభావశీల అంశాలు", "యాప్లో ఉత్పత్తులు", "అనుమతులు", "నివేదిక", "అందించినది", "డెవలపర్"],
"th": ["อัปเดตเมื่อ", "ขนาด", "การติดตั้ง", "เวอร์ชันปัจจุบัน", "เวอร์ชัน Android ที่กำหนด", "การจัดประเภทเนื้อหา", "องค์ประกอบแบบอินเทอร์แอกทีฟ", "ผลิตภัณฑ์ที่ซื้อในแอป", "สิทธิ์", "รายงาน", "ให้บริการโดย", "นักพัฒนาซอฟต์แวร์"],
"ti": ["Updated", "Size", "Installs", "Current Version", "Requires Android", "Content Rating", "Interactive Elements", "In-app Products", "Permissions", "Report", "Offered By", "Developer"],
"tr": ["Güncellendi", "Boyut", "Yükleme sayısı", "Mevcut Sürüm", "Gereken Android sürümü", "İçerik Derecelendirmesi", "Etkileşimli Öğeler", "Uygulama İçi Ürünler", "İzinler", "Rapor", "Sunan:", "Geliştirici"],
"uk": ["Оновлено", "Розмір", "Встановлення", "Поточна версія", "Потрібна ОС Android", "Вікові обмеження", "Інтерактивні елементи", "Контент, що продається через додаток", "Дозволи", "Звіт", "Постачальник", "Розробник"],
"ur": ["اپ ڈیٹ کردہ", "سائز", "انسٹالز", "موجودہ ورژن", "Android درکار ہے", "مواد کی درجہ بندی", "متعامل عناصر", "درون ایپ پرڈکٹس", "اجازتیں", "اطلاع دیں", "پیش کردہ منجانب", "ڈیولپر"],
"uz": ["Yangilandi", "Hajmi", "Yuklab olingan", "Joriy versiya", "Zarur Android versiyasi", "Yoshga oid cheklov", "Interaktiv elementlar", "Pulli mahsulotlari bor", "Ruxsatlar", "Xabar berish", "Muallif", "Ishlab chiquvchi"],
"vi": ["Đã cập nhật", "Kích thước", "Lượt cài đặt", "Phiên bản hiện tại", "Cần có Android", "Xếp hạng nội dung", "Yếu tố tương tác", "Sản phẩm trong ứng dụng", "Quyền", "Báo cáo", "Cung cấp bởi", "Nhà phát triển"],
"cy": ["Updated", "Size", "Installs", "Current Version", "Requires Android", "Content Rating", "Interactive Elements", "In-app Products", "Permission", "Report", "Offered By", "Developer"],
"xh": ["Updated", "Size", "Installs", "Current Version", "Requires Android", "Content Rating", "Interactive Elements", "In-app Products", "Permission", "Report", "Offered By", "Developer"],
"zu": ["Kubuyekeziwe", "Usayizi", "Ukufakwa", "Inguqulo yamanje", "Idinga i-Android", "Isilinganiselwa sokuqukethwe", "Izinto ezibandakanyayo", "Imikhiqizo yangaphakathi nohlelo", "Izimvume", "Bika", "Kunikezelwe ngu", "Unjiniyela"]
}
def get_title_normalization_messages(language):
message = BASE_NORMALIZATION_MESSAGES.get(language, "en")
return {
message[0]: "updated",
message[1]: "size",
message[2]: "installs",
message[3]: "current_version",
message[4]: "requires_android_version",
message[5]: "content_rating",
message[6]: "interactive_elements",
message[7]: "iap_range",
message[10]: "developer",
message[11]: "developer_info"
}
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*- ########################################################
# ____ _ __ #
# ___ __ __/ / /__ ___ ______ ______(_) /___ __ #
# / _ \/ // / / (_-</ -_) __/ // / __/ / __/ // / #
# /_//_/\_,_/_/_/___/\__/\__/\_,_/_/ /_/\__/\_, / #
# /___/ team #
# #
# nullscan #
# A modular framework designed to chain and automate security tests #
# #
# FILE #
# geo.py #
# #
# AUTHOR #
# noptrix@nullsecurity.net #
# #
################################################################################
# sys imports
import requests
import json
# own imports
from modules.libs.base import Base, tool, timeout
class Geo(Base):
""" GEO module (host) """
def __init__(self, target, opts):
""" init """
Base.__init__(self, target, opts)
return
@tool
def geoiplookup(self):
"""
DESCR: Get country name and country code. (ext)
TOOLS: geoiplookup
"""
opts = f"{self.target['host']}"
if not self.target['privip']:
self._run_tool('geoiplookup', opts)
return
@tool
def geoiplookup6(self):
"""
DESCR: Get country name and country code. (ext)
TOOLS: geoiplookup6
"""
opts = f"{self.target['host']}"
if not self.target['privip']:
self._run_tool('geoiplookup6', opts)
return
@tool
def geoiptools(self):
"""
DESCR: Get GEO information via geoip.tools site. (int)
TOOLS: python3
"""
url = f"http://api.ipapi.com/{self.target['host']}?access_key="
url += f"{self.opts['ipapi_key']}"
headers = {'User-Agent': self.useragent}
if not self.target['privip']:
res = requests.get(url, verify=False, headers=headers).json()
self._log('geoiptools', json.dumps(res, indent=2, sort_keys=True))
return
# EOF
|
import os
import sys
import numpy as np
pairs = {
'}' : '{',
']' : '[',
')' : '(',
'>' : '<'
}
scores1 = {
'}': 1197,
')': 3,
']': 57,
'>': 25137,
}
def get_first_illegal(line):
stack = []
for char in line:
if char in pairs:
if stack and stack.pop() == pairs[char]:
continue
else:
return char, None
else:
stack.append(char)
return None, stack
def FNAME1(arr):
score = 0
for line in arr:
char = get_first_illegal(line)[0]
if char: score = score + scores1[char]
return score
scores2 = {
'{': 3,
'(': 1,
'[': 2,
'<': 4,
}
reverse_pair = {
'{': '}',
'(': ')',
'[': ']',
'<': '>',
}
def completion_score(stack):
linescore = 0
while stack:
char = stack.pop()
linescore = linescore*5 + scores2[char]
return linescore
def FNAME2(arr):
scores = []
for line in arr:
illegal, stack = get_first_illegal(line)
if illegal:
continue
else:
scores.append(completion_score(stack))
return np.median(scores)
def preprocess(fname):
fpath = os.path.join(os.path.dirname(os.path.abspath(__file__)), fname)
with open(fpath) as f:
arr = [line[:-1] for line in f.readlines()]
return arr
def test_day10(outfile):
test_arr = preprocess("test_input")
arr = preprocess("input")
if outfile:
log = open(outfile, 'a')
sys.stdout = log
header = '## '
codeblock = '\n```'
else:
header = ''
codeblock = ''
print(header + "Day 10 Results:" + codeblock)
assert FNAME1(test_arr) == 26397
print("P1:\t" + str(FNAME1(arr)))
assert FNAME2(test_arr) == 288957
print("P2:\t" + str(FNAME2(arr)) + codeblock)
if __name__ == "__main__":
test_day10(None) |
import FWCore.ParameterSet.Config as cms
from DQM.TrackingMonitor.LogMessageMonitor_cfi import *
LocalRecoLogMessageMon = LogMessageMon.clone(
pluginsMonName = 'LocalReco',
modules = ('siPixelDigis', 'siStripDigis', 'siPixelClusters', 'siStripClusters',), # siPixelDigis : SiPixelRawToDigi, siStripDigis : SiStripRawToDigi (SiStripRawToDigiUnpacker), siPixelClusters : SiPixelClusterProducer, siStripClusters : SiStripClusterizer
categories = ('SiPixelRawToDigi', 'TooManyErrors', 'TooManyClusters',)
)
# apparentely there are not LogError in RecoLocalTracker/SubCollectionProducers/src/TrackClusterRemover.cc
ClusterizerLogMessageMon = LogMessageMon.clone(
pluginsMonName = 'TrackClusterRemover',
modules = ('detachedTripletStepClusters', 'lowPtTripletStepClusters', 'pixelPairStepClusters', 'mixedTripletStepClusters', 'pixelLessStepClusters', 'tobTecStepClusters',), # TrackClusterRemover
categories = ()
)
# initialStepSeeds,lowPtTripletStepSeeds, pixelPairStepSeeds, detachedTripletStepSeeds, : TooManyClusters (SeedGeneratorFromRegionHitsEDProducer),
# photonConvTrajSeedFromSingleLeg : (PhotonConversionTrajectorySeedProducerFromSingleLeg)
SeedingLogMessageMon = LogMessageMon.clone(
pluginsMonName = 'Seeding',
modules = ('initialStepSeedsPreSplitting', 'initialStepSeeds', 'detachedTripletStepSeeds', 'lowPtTripletStepSeeds', 'pixelPairStepSeeds', 'mixedTripletStepSeedsA', 'mixedTripletStepSeedsB', 'pixelLessStepSeeds', 'tobTecStepSeeds', 'jetCoreRegionalStepSeeds', 'muonSeededSeedsOutIn', 'muonSeededSeedsInOut', 'photonConvTrajSeedFromSingleLeg',),
categories = ('TooManyClusters', 'TooManyPairs', 'TooManyTriplets', 'TooManySeeds',)
)
# RecoTracker/CkfPattern/src/CkfTrackCandidateMakerBase.cc
TrackCandidateLogMessageMon = LogMessageMon.clone(
pluginsMonName = 'TrackCandidate',
modules = ('initialStepTrackCandidatesPreSplitting', 'initialStepTrackCandidates', 'detachedTripletStepTrackCandidates', 'lowPtTripletStepTrackCandidates', 'pixelPairStepTrackCandidates', 'mixedTripletStepTrackCandidates', 'pixelLessStepTrackCandidates', 'tobTecStepTrackCandidates', 'jetCoreRegionalStepTrackCandidates', 'muonSeededTrackCandidatesInOut', 'muonSeededTrackCandidatesOutIn', 'convTrackCandidates',),
categories = ('TooManySeeds',)
)
# TrackProducer:FailedPropagation
TrackFinderLogMessageMon = LogMessageMon.clone(
pluginsMonName = 'TrackFinder',
modules = ('pixelTracks', 'initialStepTracks', 'lowPtTripletStepTracks', 'pixelPairStepTracks', 'detachedTripletStepTracks', 'mixedTripletStepTracks', 'pixelLessStepTracks', 'tobTecStepTracks', 'jetCoreRegionalStepTracks', 'muonSeededTracksOutIn', 'muonSeededTracksInOut', 'convStepTracks', 'generalTracks',),
categories = ('FailedPropagation', 'RKPropagatorInS',)
)
FullIterTrackingLogMessageMon = LogMessageMon.clone(
pluginsMonName = 'FullIterTracking',
modules = (
'initialStepSeeds_iter0',
'initialStepTrackCandidates_iter0',
'initialStepTracks_iter0',
'lowPtTripletStepSeeds_iter1',
'lowPtTripletStepTrackCandidates_iter1',
'lowPtTripletStepTracks_iter1',
'pixelPairStepSeeds_iter2',
'pixelPairStepTrackCandidates_iter2',
'pixelPairStepTracks_iter2',
'detachedTripletStepSeeds_iter3',
'detachedTripletStepTrackCandidates_iter3',
'detachedTripletStepTracks_iter3',
'mixedTripletStepSeedsA_iter4',
'mixedTripletStepSeedsB_iter4',
'mixedTripletStepTrackCandidates_iter4',
'mixedTripletStepTracks_iter4',
'pixelLessStepSeeds_iter5',
'pixelLessStepTrackCandidates_iter5',
'pixelLessStepTracks_iter5',
'tobTecStepSeeds_iter6',
'tobTecStepTrackCandidates_iter6',
'tobTecStepTracks_iter6',
'photonConvTrajSeedFromSingleLeg',
'convTrackCandidates',
'convStepTracks',
),
categories = ('TooManyClusters', 'TooManyPairs', 'TooManyTriplets', 'TooManySeeds',)
)
IterTrackingLogMessageMon = LogMessageMon.clone(
pluginsMonName = 'IterTracking',
modules = (
'initialStepSeeds_iter0',
'initialStepTrackCandidates_iter0',
'initialStepTracks_iter0',
'lowPtTripletStepSeeds_iter1',
'lowPtTripletStepTrackCandidates_iter1',
'lowPtTripletStepTracks_iter1',
'pixelPairStepSeeds_iter2',
'pixelPairStepTrackCandidates_iter2',
'pixelPairStepTracks_iter2',
'detachedTripletStepSeeds_iter3',
'detachedTripletStepTrackCandidates_iter3',
'detachedTripletStepTracks_iter3',
'mixedTripletStepSeedsA_iter4',
'mixedTripletStepSeedsB_iter4',
'mixedTripletStepTrackCandidates_iter4',
'mixedTripletStepTracks_iter4',
'pixelLessStepSeeds_iter5',
'pixelLessStepTrackCandidates_iter5',
'pixelLessStepTracks_iter5',
'tobTecStepSeeds_iter6',
'tobTecStepTrackCandidates_iter6',
'tobTecStepTracks_iter6',
),
categories = ('TooManyClusters', 'TooManyPairs', 'TooManyTriplets', 'TooManySeeds',)
)
ConversionLogMessageMon = LogMessageMon.clone(
pluginsMonName = 'Conversion',
modules = ('photonConvTrajSeedFromSingleLeg', 'convTrackCandidates', 'convStepTracks',),
categories = ('TooManyClusters', 'TooManyPairs', 'TooManyTriplets', 'TooManySeeds',)
)
|
load("@bazel_tools//tools/build_defs/cc:action_names.bzl", "ACTION_NAMES")
load(
"@bazel_tools//tools/cpp:cc_toolchain_config_lib.bzl",
"feature",
"flag_group",
"flag_set",
"tool_path",
)
all_link_actions = [
ACTION_NAMES.cpp_link_executable,
ACTION_NAMES.cpp_link_dynamic_library,
ACTION_NAMES.cpp_link_nodeps_dynamic_library,
]
def _impl(ctx):
tool_paths = [
tool_path(
name = "gcc",
path = "toolchainroot/bin/x86_64-pc-linux-gnu-g++",
),
tool_path(
name = "ld",
path = "toolchainroot/bin/bin/x86_64-pc-linux-gnu-ld",
),
tool_path(
name = "ar",
path = "toolchainroot/bin/bin/x86_64-pc-linux-gnu-ar",
),
tool_path(
name = "cpp",
path = "toolchainroot/bin/x86_64-pc-linux-gnu-g++",
),
tool_path(
name = "gcov",
path = "/bin/false",
),
tool_path(
name = "nm",
path = "/bin/false",
),
tool_path(
name = "objdump",
path = "/bin/false",
),
tool_path(
name = "strip",
path = "/bin/false",
),
]
default_linker_flags = feature(
name = "default_linker_flags",
enabled = True,
flag_sets = [
flag_set(
actions = all_link_actions,
flag_groups = ([
flag_group(
flags = [
"-lstdc++",
],
),
]),
),
],
)
features = [
feature(
name = "default_compiler_flags",
enabled = True,
flag_sets = [
flag_set(
actions = [ACTION_NAMES.cpp_compile],
flag_groups = ([
flag_group(
flags = [
"-no-canonical-prefixes",
"-fno-canonical-system-headers",
],
),
]),
),
],
),
]
return cc_common.create_cc_toolchain_config_info(
ctx = ctx,
features = features,
cxx_builtin_include_directories = [
"/usr/include", # This is a native toolchain, it is NOT hermetic and WILL use computer libraries
"toolchainroot/include/c++/11.2.0",
"toolchainroot/include/c++/11.2.0/parallel",
"toolchainroot/lib/gcc/x86_64-pc-linux-gnu/11.2.0/include",
"toolchainroot/lib/gcc/x86_64-pc-linux-gnu/11.2.0/include-fixed",
],
toolchain_identifier = "k8-toolchain-native-gcc",
host_system_name = "local",
target_system_name = "local",
target_cpu = "k8",
target_libc = "unknown",
compiler = "gcc",
abi_version = "unknown",
abi_libc_version = "unknown",
tool_paths = tool_paths,
)
cc_toolchain_gcc_native_config = rule(
implementation = _impl,
attrs = {},
provides = [CcToolchainConfigInfo],
)
|
import tobii_research as tr
class EyeTracker:
def __init__(self):
self.start = False
self.gaze = {"Left": [0, 0], "Right": [0, 0]}
found_eyetrackers = tr.find_all_eyetrackers()
if len(found_eyetrackers) > 0:
print(found_eyetrackers)
self.my_eyetracker = found_eyetrackers[0]
self.my_eyetracker.subscribe_to(tr.EYETRACKER_GAZE_DATA, self.gaze_data_callback, as_dictionary=True)
self.start = True
else:
print("ERROR: No device connected")
self.start = False
def gaze_data_callback(self, gaze_data):
# Print gaze points of left and right eye
# print("Left eye: ({gaze_left_eye}) \t Right eye: ({gaze_right_eye})".format(
# gaze_left_eye=gaze_data['left_gaze_point_on_display_area'],
# gaze_right_eye=gaze_data['right_gaze_point_on_display_area']))
self.gaze["Left"] = gaze_data['left_gaze_point_on_display_area']
self.gaze["Right"] = gaze_data['right_gaze_point_on_display_area']
def stop(self):
self.my_eyetracker.unsubscribe_from(tr.EYETRACKER_GAZE_DATA, self.gaze_data_callback)
def started(self):
return self.start
def getGaze(self):
return self.gaze |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import io
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
from anvil import cfg
from anvil import colorizer
from anvil import components as comp
from anvil import exceptions
from anvil import log as logging
from anvil import shell as sh
from anvil import utils
from anvil.components.helpers import db as dbhelper
from anvil.components.helpers import keystone as khelper
from anvil.components.helpers import nova as nhelper
from anvil.components.helpers import rabbit as rhelper
from anvil.components.helpers import virt as lv
LOG = logging.getLogger(__name__)
# Copies from helpers
API_CONF = nhelper.API_CONF
DB_NAME = nhelper.DB_NAME
PASTE_CONF = nhelper.PASTE_CONF
# Normal conf
POLICY_CONF = 'policy.json'
LOGGING_CONF = "logging.conf"
CONFIGS = [PASTE_CONF, POLICY_CONF, LOGGING_CONF, API_CONF]
ADJUST_CONFIGS = [PASTE_CONF]
# This is a special marker file that when it exists, signifies that nova net was inited
NET_INITED_FN = 'nova.network.inited.yaml'
# This makes the database be in sync with nova
DB_SYNC_CMD = [
{'cmd': ['$BIN_DIR/nova-manage', '--config-file', '$CFG_FILE', 'db', 'sync'], 'run_as_root': True},
]
# Used to create a fixed network when initializating nova
FIXED_NET_CMDS = [
{
'cmd': ['$BIN_DIR/nova-manage', '--config-file', '$CFG_FILE',
'network', 'create', 'private', '$FIXED_RANGE', '1', '$FIXED_NETWORK_SIZE'],
'run_as_root': True,
},
]
# Used to create a floating network + test floating pool
FLOATING_NET_CMDS = [
{
'cmd': ['$BIN_DIR/nova-manage', '--config-file', '$CFG_FILE', 'floating', 'create', '$FLOATING_RANGE'],
'run_as_root': True,
},
{
'cmd': ['$BIN_DIR/nova-manage', '--config-file', '$CFG_FILE',
'floating', 'create', '--ip_range=$TEST_FLOATING_RANGE', '--pool=$TEST_FLOATING_POOL'],
'run_as_root': True,
},
]
# Subdirs of the checkout/download
BIN_DIR = 'bin'
# This is a special conf
CLEANER_DATA_CONF = 'nova-clean.sh'
class NovaUninstaller(comp.PythonUninstallComponent):
def __init__(self, *args, **kargs):
comp.PythonUninstallComponent.__init__(self, *args, **kargs)
self.virsh = lv.Virsh(self.get_int_option('service_wait_seconds'), self.distro)
def pre_uninstall(self):
self._clear_libvirt_domains()
self._clean_it()
def _clean_it(self):
cleaner_fn = sh.joinpths(self.get_option('app_dir'), BIN_DIR, CLEANER_DATA_CONF)
if sh.isfile(cleaner_fn):
LOG.info("Cleaning up your system by running nova cleaner script: %s", colorizer.quote(cleaner_fn))
# These environment additions are important
# in that they eventually affect how this script runs
env = {
'ENABLED_SERVICES': ",".join(self.subsystems.keys()),
}
sh.execute(cleaner_fn, run_as_root=True, env_overrides=env)
def _clear_libvirt_domains(self):
virt_driver = nhelper.canon_virt_driver(self.get_option('virt_driver'))
if virt_driver == 'libvirt':
inst_prefix = self.get_option('instance_name_prefix', 'instance-')
libvirt_type = lv.canon_libvirt_type(self.get_option('libvirt_type'))
self.virsh.clear_domains(libvirt_type, inst_prefix)
class NovaInstaller(comp.PythonInstallComponent):
def __init__(self, *args, **kargs):
comp.PythonInstallComponent.__init__(self, *args, **kargs)
self.conf_maker = nhelper.ConfConfigurator(self)
@property
def config_files(self):
return list(CONFIGS)
def _filter_pip_requires_line(self, fn, line):
# We handle these ourselves in anvil
if utils.has_any(line.lower(), 'quantumclient', 'cinder', 'glance'):
return None
return line
@property
def env_exports(self):
to_set = OrderedDict()
to_set['OS_COMPUTE_API_VERSION'] = self.get_option('nova_version')
n_params = nhelper.get_shared_params(**self.options)
for (endpoint, details) in n_params['endpoints'].items():
to_set[("NOVA_%s_URI" % (endpoint.upper()))] = details['uri']
return to_set
def verify(self):
comp.PythonInstallComponent.verify(self)
self.conf_maker.verify()
def warm_configs(self):
mq_type = nhelper.canon_mq_type(self.get_option('mq-type'))
if mq_type == 'rabbit':
rhelper.get_shared_passwords(self)
def _sync_db(self):
LOG.info("Syncing nova to database named: %s", colorizer.quote(DB_NAME))
utils.execute_template(*DB_SYNC_CMD, params=self.config_params(None))
def post_install(self):
comp.PythonInstallComponent.post_install(self)
# Extra actions to do nova setup
if self.get_bool_option('db-sync'):
self._setup_db()
self._sync_db()
self._setup_cleaner()
def _setup_cleaner(self):
LOG.info("Configuring cleaner template: %s", colorizer.quote(CLEANER_DATA_CONF))
(_fn, contents) = utils.load_template(self.name, CLEANER_DATA_CONF)
# FIXME(harlowja), stop placing in checkout dir...
cleaner_fn = sh.joinpths(sh.joinpths(self.get_option('app_dir'), BIN_DIR), CLEANER_DATA_CONF)
sh.write_file(cleaner_fn, contents)
sh.chmod(cleaner_fn, 0755)
self.tracewriter.file_touched(cleaner_fn)
def _setup_db(self):
dbhelper.drop_db(distro=self.distro,
dbtype=self.get_option('db', 'type'),
dbname=DB_NAME,
**utils.merge_dicts(self.get_option('db'),
dbhelper.get_shared_passwords(self)))
# Explicitly use latin1: to avoid lp#829209, nova expects the database to
# use latin1 by default, and then upgrades the database to utf8 (see the
# 082_essex.py in nova)
dbhelper.create_db(distro=self.distro,
dbtype=self.get_option('db', 'type'),
dbname=DB_NAME,
charset='latin1',
**utils.merge_dicts(self.get_option('db'),
dbhelper.get_shared_passwords(self)))
def _generate_nova_conf(self, fn):
LOG.debug("Generating dynamic content for nova: %s.", (fn))
return self.conf_maker.generate(fn)
def source_config(self, config_fn):
if config_fn == PASTE_CONF:
config_fn = 'api-paste.ini'
elif config_fn == LOGGING_CONF:
config_fn = 'logging_sample.conf'
elif config_fn == API_CONF:
config_fn = 'nova.conf.sample'
fn = sh.joinpths(self.get_option('app_dir'), 'etc', "nova", config_fn)
return (fn, sh.load_file(fn))
def _config_adjust_paste(self, contents, fn):
params = khelper.get_shared_params(ip=self.get_option('ip'),
service_user='nova',
**utils.merge_dicts(self.get_option('keystone'),
khelper.get_shared_passwords(self)))
with io.BytesIO(contents) as stream:
config = cfg.RewritableConfigParser()
config.readfp(stream)
config.set('filter:authtoken', 'auth_host', params['endpoints']['admin']['host'])
config.set('filter:authtoken', 'auth_port', params['endpoints']['admin']['port'])
config.set('filter:authtoken', 'auth_protocol', params['endpoints']['admin']['protocol'])
config.set('filter:authtoken', 'service_host', params['endpoints']['internal']['host'])
config.set('filter:authtoken', 'service_port', params['endpoints']['internal']['port'])
config.set('filter:authtoken', 'service_protocol', params['endpoints']['internal']['protocol'])
config.set('filter:authtoken', 'admin_tenant_name', params['service_tenant'])
config.set('filter:authtoken', 'admin_user', params['service_user'])
config.set('filter:authtoken', 'admin_password', params['service_password'])
contents = config.stringify(fn)
return contents
def _config_adjust_logging(self, contents, fn):
with io.BytesIO(contents) as stream:
config = cfg.RewritableConfigParser()
config.readfp(stream)
config.set('logger_root', 'level', 'DEBUG')
config.set('logger_root', 'handlers', "stdout")
contents = config.stringify(fn)
return contents
def _config_adjust(self, contents, name):
if name == PASTE_CONF:
return self._config_adjust_paste(contents, name)
elif name == LOGGING_CONF:
return self._config_adjust_logging(contents, name)
elif name == API_CONF:
return self._generate_nova_conf(name)
else:
return contents
def _config_param_replace(self, config_fn, contents, parameters):
if config_fn in [PASTE_CONF, LOGGING_CONF, API_CONF]:
# We handle these ourselves
return contents
else:
return comp.PythonInstallComponent._config_param_replace(self, config_fn, contents, parameters)
def config_params(self, config_fn):
mp = comp.PythonInstallComponent.config_params(self, config_fn)
mp['CFG_FILE'] = sh.joinpths(self.get_option('cfg_dir'), API_CONF)
mp['BIN_DIR'] = sh.joinpths(self.get_option('app_dir'), BIN_DIR)
return mp
class NovaRuntime(comp.PythonRuntime):
def __init__(self, *args, **kargs):
comp.PythonRuntime.__init__(self, *args, **kargs)
self.wait_time = self.get_int_option('service_wait_seconds')
self.virsh = lv.Virsh(self.wait_time, self.distro)
self.config_path = sh.joinpths(self.get_option('cfg_dir'), API_CONF)
self.bin_dir = sh.joinpths(self.get_option('app_dir'), BIN_DIR)
self.net_init_fn = sh.joinpths(self.get_option('trace_dir'), NET_INITED_FN)
def _do_network_init(self):
ran_fn = self.net_init_fn
if not sh.isfile(ran_fn) and self.get_bool_option('do-network-init'):
# Figure out the commands to run
cmds = []
mp = {
'CFG_FILE': self.config_path,
'BIN_DIR': self.bin_dir
}
mp['BIN_DIR'] = self.bin_dir
if self.get_bool_option('enable_fixed'):
# Create a fixed network
mp['FIXED_NETWORK_SIZE'] = self.get_option('fixed_network_size', default_value='256')
mp['FIXED_RANGE'] = self.get_option('fixed_range', default_value='10.0.0.0/24')
cmds.extend(FIXED_NET_CMDS)
if self.get_bool_option('enable_floating'):
# Create a floating network + test floating pool
cmds.extend(FLOATING_NET_CMDS)
mp['FLOATING_RANGE'] = self.get_option('floating_range', default_value='172.24.4.224/28')
mp['TEST_FLOATING_RANGE'] = self.get_option('test_floating_range', default_value='192.168.253.0/29')
mp['TEST_FLOATING_POOL'] = self.get_option('test_floating_pool', default_value='test')
# Anything to run??
if cmds:
LOG.info("Creating your nova network to be used with instances.")
utils.execute_template(*cmds, params=mp)
# Writing this makes sure that we don't init again
cmd_mp = {
'cmds': cmds,
'replacements': mp,
}
sh.write_file(ran_fn, utils.prettify_yaml(cmd_mp))
LOG.info("If you wish to re-run network initialization, delete %s", colorizer.quote(ran_fn))
def post_start(self):
self._do_network_init()
@property
def apps_to_start(self):
apps = []
for (name, _values) in self.subsystems.items():
real_name = "nova-%s" % (name)
app_pth = sh.joinpths(self.bin_dir, real_name)
if sh.is_executable(app_pth):
apps.append({
'name': real_name,
'path': app_pth,
})
return apps
def pre_start(self):
# Let the parent class do its thing
comp.PythonRuntime.pre_start(self)
virt_driver = nhelper.canon_virt_driver(self.get_option('virt_driver'))
if virt_driver == 'libvirt':
virt_type = lv.canon_libvirt_type(self.get_option('libvirt_type'))
LOG.info("Checking that your selected libvirt virtualization type %s is working and running.", colorizer.quote(virt_type))
try:
self.virsh.check_virt(virt_type)
self.virsh.restart_service()
LOG.info("Libvirt virtualization type %s seems to be working and running.", colorizer.quote(virt_type))
except exceptions.ProcessExecutionError as e:
msg = ("Libvirt type %r does not seem to be active or configured correctly, "
"perhaps you should be using %r instead: %s" %
(virt_type, lv.DEF_VIRT_TYPE, e))
raise exceptions.StartException(msg)
def app_params(self, app_name):
params = comp.PythonRuntime.app_params(self, app_name)
params['CFG_FILE'] = self.config_path
return params
def app_options(self, app):
return ['--config-file', '$CFG_FILE']
class NovaTester(comp.PythonTestingComponent):
def _get_test_exclusions(self):
return [
# Disable since quantumclient is not always installed.
'test_quantumv2',
]
|
import pytest
from numpy.testing import assert_array_equal
from sparseba.indices import Indices, indices_are_unique
def test_indices_are_unique():
# unique
Indices([0, 1, 2, 0, 2, 1, 2, 0, 1],
[0, 0, 0, 1, 1, 2, 2, 3, 3])
with pytest.raises(ValueError):
# second and third are duplicated
# 0 1 2 3 4 5 6 7 8
Indices([0, 1, 2, 2, 2, 1, 2, 0, 1],
[0, 0, 0, 1, 1, 2, 2, 3, 3])
def test_case_1():
# row indicates keypoints generated from the same point
# column indicates keypoints observed in the same viewpoint
# visibility mask = [
# mask keypoints indices
# [1, 1, 1], # x_00 x_01 x_02 # 0 1 2
# [1, 0, 1], # x_10 x_12 # 3 4
# [0, 1, 1], # x_21 x_22 # 5 6
# [1, 1, 0] # x_30 x_31 # 7 8
# ]
indices = Indices(viewpoint_indices=[0, 1, 2, 0, 2, 1, 2, 0, 1],
point_indices=[0, 0, 0, 1, 1, 2, 2, 3, 3])
assert(indices.n_visible == 9)
assert(indices.n_points == 4)
assert(indices.n_viewpoints == 3)
# get array indices of X for x_*0
# where x_*0 are projections of all visible points in the 0th viewpoint
assert_array_equal(indices.points_by_viewpoint(0), [0, 3, 7])
# get array indices of X for x_*2
assert_array_equal(indices.points_by_viewpoint(2), [2, 4, 6])
# get array indices of X for x_0*
# where x_0* are projections of the 0st 2D point in all observable viewpoints
assert_array_equal(indices.viewpoints_by_point(0), [0, 1, 2])
# get array indices of X for x_3*
assert_array_equal(indices.viewpoints_by_point(3), [7, 8])
# [x_01, x_21] and [x_02, x_22] are shared
indices_j, indices_k = indices.shared_point_indices(1, 2)
assert_array_equal(indices_j, [1, 5])
assert_array_equal(indices_k, [2, 6])
# [x_00, x_10] and [x_02, x_12] are shared
indices_j, indices_k = indices.shared_point_indices(0, 2)
assert_array_equal(indices_j, [0, 3])
assert_array_equal(indices_k, [2, 4])
# [x_00, x_30] and [x_01, x_31] are shared
indices_j, indices_k = indices.shared_point_indices(0, 1)
assert_array_equal(indices_j, [0, 7])
assert_array_equal(indices_k, [1, 8])
indices_j, indices_k = indices.shared_point_indices(1, 1)
assert_array_equal(indices_j, [1, 5, 8])
assert_array_equal(indices_k, [1, 5, 8])
def test_case_2():
# n_points = 2, n_viewpoints = 3,
# indices 0 1 2
# X = [x_02 x_10 x_11]
# then the corresponding mask should be
# mask = [
# [0 0 1], # x_02 # 0
# [1 1 0] # x_10 x_11 # 1 2
# ]
indices = Indices(viewpoint_indices=[2, 0, 1], point_indices=[0, 1, 1])
assert(indices.n_visible == 3)
assert(indices.n_points == 2)
assert(indices.n_viewpoints == 3)
# get array indices of X for x_*0
assert_array_equal(indices.points_by_viewpoint(0), [1])
# get array indices of X for x_*2
assert_array_equal(indices.points_by_viewpoint(2), [0])
# get array indices of X for x_0*
assert_array_equal(indices.viewpoints_by_point(0), [0])
# get array indices of X for x_1*
assert_array_equal(indices.viewpoints_by_point(1), [1, 2])
# [x_10] and [x_11] are shared
indices_j, indices_k = indices.shared_point_indices(0, 1)
assert_array_equal(indices_j, [1])
assert_array_equal(indices_k, [2])
# no points are shared
indices_j, indices_k = indices.shared_point_indices(0, 2)
assert_array_equal(indices_j, [])
assert_array_equal(indices_k, [])
# second row has only zero elements
# visibility mask = [
# 1 0 1 0;
# 0 0 0 0;
# 0 1 1 1
# ]
viewpoint_indices = [0, 2, 1, 2, 3]
point_indices = [0, 0, 2, 2, 2]
with pytest.raises(AssertionError):
Indices(viewpoint_indices, point_indices)
# third column has only zero elements
# visibility mask = [
# 1 0 0 1;
# 0 1 0 0;
# 0 1 0 1
# ]
viewpoint_indices = [0, 3, 1, 1, 3]
point_indices = [0, 0, 1, 2, 2]
with pytest.raises(AssertionError):
Indices(viewpoint_indices, point_indices)
|
import itertools
import networkx as nx
import numpy as np
from arsenic import stats
from arsenic.stats import bootstrap_statistic
def test_mle_easy():
"""
Test that the MLE for a graph with an absolute
estimate on all nodes will recapitulate it
"""
input_absolutes: list = [-14.0, -13.0, -9.0]
graph = nx.DiGraph()
for i, val in enumerate(input_absolutes):
graph.add_node(i, f_i=val, f_di=0.5)
edges = [(0, 1), (0, 2), (2, 1)]
for node1, node2 in edges:
noise = np.random.uniform(low=-1.0, high=1.0)
diff = input_absolutes[node2] - input_absolutes[node1] + noise
graph.add_edge(node1, node2, f_ij=diff, f_dij=0.5 + np.abs(noise))
output_absolutes, covar = stats.mle(graph, factor="f_ij", node_factor="f_i")
for i, _ in enumerate(graph.nodes(data=True)):
diff = np.abs(output_absolutes[i] - input_absolutes[i])
assert (
diff < covar[i, i]
), f"MLE error. Output absolute \
estimate, {output_absolutes[i]}, is too far from\
true value: {input_absolutes[i]}."
def test_mle_easy_self_edge():
"""
Test that the MLE for a graph with an absolute
estimate on all nodes will recapitulate it
when a self-edge is included
"""
input_absolutes: list = [-14.0, -13.0, -9.0]
graph = nx.DiGraph()
for i, val in enumerate(input_absolutes):
graph.add_node(i, f_i=val, f_di=0.5)
edges = [(0, 1), (0, 2), (2, 1), (0, 0)]
for node1, node2 in edges:
noise = np.random.uniform(low=-1.0, high=1.0)
diff = input_absolutes[node2] - input_absolutes[node1] + noise
graph.add_edge(node1, node2, f_ij=diff, f_dij=0.5 + np.abs(noise))
output_absolutes, covar = stats.mle(graph, factor="f_ij", node_factor="f_i")
for i, _ in enumerate(graph.nodes(data=True)):
diff = np.abs(output_absolutes[i] - input_absolutes[i])
assert (
diff < covar[i, i]
), f"MLE error. Output absolute \
estimate, {output_absolutes[i]}, is too far from\
true value: {input_absolutes[i]}."
def test_mle_hard():
"""
Test that the MLE for a graph with a node missing an absolute value
can get it right based on relative results
"""
input_absolutes: list = [-14.0, -13.0, -9.0]
# make a t
graph = nx.DiGraph()
# Don't assign the first absolute value, check that MLE can get close to it
for i, val in enumerate(input_absolutes):
if i == 0:
graph.add_node(i)
else:
graph.add_node(i, f_i=val, f_di=0.5)
edges = [(0, 1), (0, 2), (2, 1)]
for node1, node2 in edges:
noise = np.random.uniform(low=-1.0, high=1.0)
diff = input_absolutes[node2] - input_absolutes[node1] + noise
graph.add_edge(node1, node2, f_ij=diff, f_dij=0.5 + np.abs(noise))
output_absolutes, covar = stats.mle(graph, factor="f_ij", node_factor="f_i")
for i, _ in enumerate(graph.nodes(data=True)):
diff = np.abs(output_absolutes[i] - input_absolutes[i])
assert (
diff < covar[i, i]
), f"MLE error. Output absolute \
estimate, {output_absolutes[i]}, is too far from\
true value: {input_absolutes[i]}."
def test_mle_relative():
"""
Test that the MLE can get the relative differences correct
when no absolute values are provided
"""
input_absolutes: list = [-14.0, -13.0, -9.0]
graph = nx.DiGraph()
# Don't assign any absolute values
edges = [(0, 1), (0, 2), (2, 1)]
for node1, node2 in edges:
noise = np.random.uniform(low=-0.5, high=0.5)
diff = input_absolutes[node2] - input_absolutes[node1] + noise
graph.add_edge(node1, node2, f_ij=diff, f_dij=0.5 + np.abs(noise))
output_absolutes, _ = stats.mle(graph, factor="f_ij", node_factor="f_i")
pairs = itertools.combinations(range(len(input_absolutes)), 2)
for i, j in pairs:
mle_diff = output_absolutes[i] - output_absolutes[j]
true_diff = input_absolutes[i] - input_absolutes[j]
assert (
np.abs(true_diff - mle_diff) < 1.0
), f"Relative\
difference from MLE: {mle_diff} is too far from the\
input difference, {true_diff}"
def test_correlation_positive(fe_map):
"""
Test that the absolute DG plots have the correct signs,
and statistics within reasonable agreement to the example data
in `arsenic/data/example.csv`
"""
nodes = fe_map.graph.nodes
x_data = np.asarray([n[1]["exp_DG"] for n in nodes(data=True)])
y_data = np.asarray([n[1]["calc_DG"] for n in nodes(data=True)])
xerr = np.asarray([n[1]["exp_dDG"] for n in nodes(data=True)])
yerr = np.asarray([n[1]["calc_dDG"] for n in nodes(data=True)])
bss = bootstrap_statistic(x_data, y_data, xerr, yerr, statistic="rho")
assert 0 < bss["mle"] < 1, "Correlation must be positive for this data"
for stat in ["R2", "rho"]:
bss = bootstrap_statistic(x_data, y_data, xerr, yerr, statistic=stat)
# all of the statistics for this example is between 0.61 and 0.84
assert (
0.5 < bss["mle"] < 0.9
), f"Correlation must be positive for this data. {stat} is {bss['mle']}"
|
import pytest
@pytest.mark.parametrize("run", ["value_template.json"], indirect=True)
def test_value_template(run):
assert run == 0
with open("value_template_string.txt") as f:
assert f.read() == '42 42'
with open("value_template_file.txt") as f:
assert f.read() == '42\n42'
with open("value_template_empty_string.txt") as f:
assert f.read() == ''
with open("value_template_empty_file.txt") as f:
assert f.read() == ''
@pytest.mark.parametrize("run", ["variable.json"], indirect=True)
def test_variable(run):
assert run == 0
with open("variable_continuous.txt") as f:
assert -2 <= float(f.read()) < 2
with open("variable_discrete.txt") as f:
assert int(f.read()) in range(-42, 43)
with open("variable_discrete_float.txt") as f:
assert float(f.read()) in [-1.5, -0.5, 0.5, 1.5]
with open("variable_categorical.txt") as f:
assert f.read() in ['red', 'green', 'blue']
with open("variable_file.txt") as f:
lines = [x.strip() for x in f]
assert -2 <= float(lines[0]) < 2
i, f = lines[1].split()
assert int(i) in range(-42, 43)
assert float(f) in [-1.5, -0.5, 0.5, 1.5]
assert lines[2] in ['red', 'green', 'blue']
|
my_name = "Steveen Echeverri"
print("Hello and welcome " + my_name + "!") |
class Value(object):
def __init__(self, t, v, f=False):
'''
t = type (VARIABLE/CONSTANT/ ARRAY)
v = value (string / integer / tuple variable respectively)
f = flag, True if array is DIO, PWM, TMR, etc.
'''
self.type = t
self.val = v
self.flag = f
def is_arr_var(self):
'''
checks if the value is a variable indexed array
'''
return (self.type == 'ARR') and (type(self.val[1]) == str)
def is_arr_const(self):
'''
checks if the value is a array indexed by a const
'''
return (self.type == 'ARR') and (type(self.val[1]) == int)
def is_any_var(self):
'''
checks if the value is either ARR[const] or VAR
'''
return self.type == 'VAR' or self.is_arr_const()
arr_var = property(is_arr_var, None)
arr_const = property(is_arr_const, None)
any_var = property(is_any_var, None)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Console script for pimkl."""
import sys
import click
from pimkl.cli.preprocess import preprocess_data_and_inducers
from pimkl.cli.analyse import analyse, kpca
@click.command()
@click.option('-fd', '--data_csv_file', 'data_csv_files', required=True, multiple=True, type=click.Path())
@click.option('-nd', '--data_name', 'data_names', required=True, multiple=True)
@click.argument('network_csv_file', required=True, type=click.Path())
@click.argument('network_name', required=True)
@click.argument('gene_sets_gmt_file', required=True, type=click.Path())
@click.argument('gene_sets_name', required=True)
@click.argument('preprocess_dir', required=True, type=click.Path(exists=True, file_okay=False, writable=True))
@click.argument('output_dir', required=True, type=click.Path(exists=True, file_okay=False, writable=True))
@click.argument('class_label_file', required=True, type=click.Path(exists=True, file_okay=True))
@click.option('--model_name', default='EasyMKL', type=click.Choice(['EasyMKL', 'UMKLKNN', 'AverageMKL']))
@click.argument('lam', default=0.2)
@click.argument('k', default=5)
@click.argument('number_of_folds', default=10)
@click.argument('max_per_class', default=20)
@click.argument('seed', default=0)
@click.argument('max_processes', default=1)
@click.argument('fold', default=-1)
def main(
data_csv_files,
data_names,
network_csv_file,
network_name,
gene_sets_gmt_file,
gene_sets_name,
preprocess_dir,
output_dir,
class_label_file,
model_name,
lam,
k,
number_of_folds,
max_per_class,
seed,
max_processes,
fold,
):
"""Console script for a complete pimkl pipeline, including preprocessing
and analysis. For more details consult the following console scripts, which
are here executed in this order.
`pimkl-preprocess --help`
`pimkl-analyse run-performance-analysis --help`
"""
preprocess_data_and_inducers(
data_csv_files, data_names,
network_csv_file, network_name,
gene_sets_gmt_file, gene_sets_name,
preprocess_dir,
match_samples=True)
output_filename_core = analyse(
data_names, network_name, gene_sets_name,
preprocess_dir, output_dir,
class_label_file,
model_name, lam, k,
number_of_folds, max_per_class,
seed, max_processes
)
weights_csv_file = '{}/weights_{}.csv'.format(
output_dir, output_filename_core
)
# NOTE: the kernel PCA at the moment can be run in a separate step
# kpca(
# data_names,
# network_name,
# gene_sets_name,
# preprocess_dir,
# output_dir,
# class_label_file,
# weights_csv_file,
# fold,
# )
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
data = [int(i) for i in input()]
width = 25
height = 6
num_in_layer = width * height
layers = []
for i, c in enumerate(data):
if (i % num_in_layer) is 0:
layers.append([])
layers[-1].append(c)
# for i, layer in enumerate(layers):
# print("Layer " + str(i) + ": " + str(layer))
image = [0] * num_in_layer
pixel = range(0, num_in_layer)
for l in layers:
pixels = [i for i in pixel if l[i] is not 2]
pixel = [i for i in pixel if l[i] is 2]
for i in pixels:
image[i] = l[i]
if len(pixel) is 0:
break
for i in range(0, height):
line = image[(i * width):((i + 1) * width)]
print(str(line))
|
#!/usr/bin/env python
import os
import joblib
import argparse
from PIL import Image
from util import draw_bb_on_img
from constants import MODEL_PATH
from face_recognition import preprocessing
def parse_args():
parser = argparse.ArgumentParser(prog='Face Classifier',
description='Script for detecting and classifying faces on user-provided image. This script will process image, draw '
'bounding boxes and labels on image and display it. It will also optionally save that image.')
parser.add_argument('--image-path', required=True, help='Path to image file.')
parser.add_argument('--save-dir', help='If save dir is provided image will be saved to specified directory.')
parser.add_argument('--min-conf', help='Only use face predictions that have a confidence of at least the specified value (0-1).')
parser.add_argument('--fast', action='store_true', help='Enable Low-Res fast mode.')
return parser.parse_args()
def recognise_faces(img, args):
faces = joblib.load(MODEL_PATH)(img)
if args.min_conf:
faces = [face for face in faces if face.top_prediction.confidence > float(args.min_conf)]
if faces:
draw_bb_on_img(faces, img)
return faces, img
def main():
args = parse_args()
preprocess = preprocessing.ExifOrientationNormalize()
img = Image.open(args.image_path)
filename = img.filename
if args.fast:
width, height = img.size
factor = 512/width
size = [round(width*factor), round(height*factor)]
img = img.resize(size, Image.BILINEAR)
img = preprocess(img)
img = img.convert('RGB')
faces, img = recognise_faces(img, args)
if not faces:
print('No faces found in this image.')
if args.save_dir:
basename = os.path.basename(filename)
name = basename.split('.')[0]
ext = basename.split('.')[1]
img.save('{}_tagged.{}'.format(name, ext))
img.show()
if __name__ == '__main__':
main()
|
#! /usr/bin/env python3
# Dump the cache to dump.txt (used for faster manual injection to skip generation for demos)
import argparse
import time
import sys
import logging
import pandas as pd
from getargs import getargs
from config import *
import config
import utils
import xmlutils
import montecarlo
import azlog
import secrets
log = azlog.getLogger(__name__)
azlog.color=False
if __name__ == "__main__":
#-- grab cli args
args = getargs("dumpdb")
#-- verbosity
azlog.setDebug(args.verbose)
#-- pull keys/passwords from the keyvault
log.info("Reading kevault secrets")
secrets.ReadKVSecrets()
log.info("Done.")
#-- open connection to cache
r = utils.SetupCacheConn(args.cache_type,args.cache_name,args.cache_port,config.AZFINSIM_REDISKEY,args.cache_ssl)
if r is None:
logging.error("Cannot connect to Redis DB: %s, %s, %s" % args.cache_name,args.cache_port,args.cache_key)
f = open("dump.txt", "a")
for tradenum in range(0,1000000):
keyname = "ey%007d.xml" % tradenum
#-- read trade from cache
xmlstring=utils.GetTrade(r,keyname)
xmlstr = xmlstring.decode('utf-8')
xmlclean = xmlstr.replace("\n","")
x = xmlclean.replace('"','\\"')
# log.info("%s %s" % (keyname,xmlstring))
str = 'SET %s "%s"' % (keyname,x)
print(str, file=f)
f.close()
|
import redis
def pace():
pass
def unpace():
pass
def run():
pacing = False
promt = "Press ENTER to {}"
try:
while True:
if pacing:
input(promt.format("unpace"))
unpace()
else:
input(promt.format("pace"))
pace()
pacing = not pacing
except KeyboardInterrupt:
print("\nStopping pacer")
if __name__ == "__main__":
run()
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
__all__ = [
'GetKeyResult',
'AwaitableGetKeyResult',
'get_key',
]
@pulumi.output_type
class GetKeyResult:
"""
A collection of values returned by getKey.
"""
def __init__(__self__, arn=None, aws_account_id=None, creation_date=None, customer_master_key_spec=None, deletion_date=None, description=None, enabled=None, expiration_model=None, grant_tokens=None, id=None, key_id=None, key_manager=None, key_state=None, key_usage=None, origin=None, valid_to=None):
if arn and not isinstance(arn, str):
raise TypeError("Expected argument 'arn' to be a str")
pulumi.set(__self__, "arn", arn)
if aws_account_id and not isinstance(aws_account_id, str):
raise TypeError("Expected argument 'aws_account_id' to be a str")
pulumi.set(__self__, "aws_account_id", aws_account_id)
if creation_date and not isinstance(creation_date, str):
raise TypeError("Expected argument 'creation_date' to be a str")
pulumi.set(__self__, "creation_date", creation_date)
if customer_master_key_spec and not isinstance(customer_master_key_spec, str):
raise TypeError("Expected argument 'customer_master_key_spec' to be a str")
pulumi.set(__self__, "customer_master_key_spec", customer_master_key_spec)
if deletion_date and not isinstance(deletion_date, str):
raise TypeError("Expected argument 'deletion_date' to be a str")
pulumi.set(__self__, "deletion_date", deletion_date)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if enabled and not isinstance(enabled, bool):
raise TypeError("Expected argument 'enabled' to be a bool")
pulumi.set(__self__, "enabled", enabled)
if expiration_model and not isinstance(expiration_model, str):
raise TypeError("Expected argument 'expiration_model' to be a str")
pulumi.set(__self__, "expiration_model", expiration_model)
if grant_tokens and not isinstance(grant_tokens, list):
raise TypeError("Expected argument 'grant_tokens' to be a list")
pulumi.set(__self__, "grant_tokens", grant_tokens)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if key_id and not isinstance(key_id, str):
raise TypeError("Expected argument 'key_id' to be a str")
pulumi.set(__self__, "key_id", key_id)
if key_manager and not isinstance(key_manager, str):
raise TypeError("Expected argument 'key_manager' to be a str")
pulumi.set(__self__, "key_manager", key_manager)
if key_state and not isinstance(key_state, str):
raise TypeError("Expected argument 'key_state' to be a str")
pulumi.set(__self__, "key_state", key_state)
if key_usage and not isinstance(key_usage, str):
raise TypeError("Expected argument 'key_usage' to be a str")
pulumi.set(__self__, "key_usage", key_usage)
if origin and not isinstance(origin, str):
raise TypeError("Expected argument 'origin' to be a str")
pulumi.set(__self__, "origin", origin)
if valid_to and not isinstance(valid_to, str):
raise TypeError("Expected argument 'valid_to' to be a str")
pulumi.set(__self__, "valid_to", valid_to)
@property
@pulumi.getter
def arn(self) -> str:
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="awsAccountId")
def aws_account_id(self) -> str:
return pulumi.get(self, "aws_account_id")
@property
@pulumi.getter(name="creationDate")
def creation_date(self) -> str:
return pulumi.get(self, "creation_date")
@property
@pulumi.getter(name="customerMasterKeySpec")
def customer_master_key_spec(self) -> str:
return pulumi.get(self, "customer_master_key_spec")
@property
@pulumi.getter(name="deletionDate")
def deletion_date(self) -> str:
return pulumi.get(self, "deletion_date")
@property
@pulumi.getter
def description(self) -> str:
return pulumi.get(self, "description")
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="expirationModel")
def expiration_model(self) -> str:
return pulumi.get(self, "expiration_model")
@property
@pulumi.getter(name="grantTokens")
def grant_tokens(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "grant_tokens")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="keyId")
def key_id(self) -> str:
return pulumi.get(self, "key_id")
@property
@pulumi.getter(name="keyManager")
def key_manager(self) -> str:
return pulumi.get(self, "key_manager")
@property
@pulumi.getter(name="keyState")
def key_state(self) -> str:
return pulumi.get(self, "key_state")
@property
@pulumi.getter(name="keyUsage")
def key_usage(self) -> str:
return pulumi.get(self, "key_usage")
@property
@pulumi.getter
def origin(self) -> str:
return pulumi.get(self, "origin")
@property
@pulumi.getter(name="validTo")
def valid_to(self) -> str:
return pulumi.get(self, "valid_to")
class AwaitableGetKeyResult(GetKeyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetKeyResult(
arn=self.arn,
aws_account_id=self.aws_account_id,
creation_date=self.creation_date,
customer_master_key_spec=self.customer_master_key_spec,
deletion_date=self.deletion_date,
description=self.description,
enabled=self.enabled,
expiration_model=self.expiration_model,
grant_tokens=self.grant_tokens,
id=self.id,
key_id=self.key_id,
key_manager=self.key_manager,
key_state=self.key_state,
key_usage=self.key_usage,
origin=self.origin,
valid_to=self.valid_to)
def get_key(grant_tokens: Optional[Sequence[str]] = None,
key_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetKeyResult:
"""
Use this data source to get detailed information about
the specified KMS Key with flexible key id input.
This can be useful to reference key alias
without having to hard code the ARN as input.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
by_alias = aws.kms.get_key(key_id="alias/my-key")
by_id = aws.kms.get_key(key_id="1234abcd-12ab-34cd-56ef-1234567890ab")
by_alias_arn = aws.kms.get_key(key_id="arn:aws:kms:us-east-1:111122223333:alias/my-key")
by_key_arn = aws.kms.get_key(key_id="arn:aws:kms:us-east-1:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab")
```
:param Sequence[str] grant_tokens: List of grant tokens
:param str key_id: Key identifier which can be one of the following format:
* Key ID. E.g: `1234abcd-12ab-34cd-56ef-1234567890ab`
* Key ARN. E.g.: `arn:aws:kms:us-east-1:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab`
* Alias name. E.g.: `alias/my-key`
* Alias ARN: E.g.: `arn:aws:kms:us-east-1:111122223333:alias/my-key`
"""
__args__ = dict()
__args__['grantTokens'] = grant_tokens
__args__['keyId'] = key_id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws:kms/getKey:getKey', __args__, opts=opts, typ=GetKeyResult).value
return AwaitableGetKeyResult(
arn=__ret__.arn,
aws_account_id=__ret__.aws_account_id,
creation_date=__ret__.creation_date,
customer_master_key_spec=__ret__.customer_master_key_spec,
deletion_date=__ret__.deletion_date,
description=__ret__.description,
enabled=__ret__.enabled,
expiration_model=__ret__.expiration_model,
grant_tokens=__ret__.grant_tokens,
id=__ret__.id,
key_id=__ret__.key_id,
key_manager=__ret__.key_manager,
key_state=__ret__.key_state,
key_usage=__ret__.key_usage,
origin=__ret__.origin,
valid_to=__ret__.valid_to)
|
import json
from unittest.mock import Mock
from urllib import parse
import pytest
from rest_framework.test import APIRequestFactory, APITestCase, force_authenticate
from django.test import override_settings
from django.urls import reverse
from authentication.factories import UserFactory, StaffUserFactory
from authentication.models import User
from authentication.serializers import UserSerializer
from authentication.views import CreateUserView
from shared import frontend_urls
from .utils import assert_unauthenticated
pytestmark = [pytest.mark.django_db]
USER_EMAIL = "user@test.com"
VALID_PASSWORD = "test_password123"
INVALID_PASSWORD = "wrong_password123"
TEST_TOKEN = "test_token"
USER_DATA = {
"email": USER_EMAIL,
"password": VALID_PASSWORD,
}
GOOGLE_USER_DATA = {
"email": USER_EMAIL,
"token": TEST_TOKEN,
"provider": User.PROVIDER_GOOGLE,
}
FACEBOOK_USER_DATA = {
**GOOGLE_USER_DATA,
"provider": User.PROVIDER_FACEBOOK,
}
SUCCESSFUL_FACEBOOK_RESPONSE = Mock(
json=lambda: {"access_token": "test_access_token", "email": USER_EMAIL}, ok=True
)
FAILED_FACEBOOK_RESPONSE = Mock(json=lambda: {"error": "bad_token"}, ok=False)
class TestLogin:
def test_login(self, authenticate_and_test):
UserFactory(**USER_DATA)
authenticate_and_test("authentication:login", USER_DATA)
def test_login_wrong_password(self, authenticate_and_test):
UserFactory(**USER_DATA)
data = {
**USER_DATA,
"password": INVALID_PASSWORD,
}
authenticate_and_test("authentication:login", data, success=False)
class TestRegister:
@pytest.mark.parametrize(
"password",
[
"test_password", # No number
"837493938481", # No letter
"password1", # Too common
"uam38m", # Too short
],
)
def test_register_with_bad_password(self, authenticate_and_test, password):
data = {
**USER_DATA,
"password": password,
}
authenticate_and_test("authentication:register", data, success=False)
def test_register_with_bad_email(self, authenticate_and_test):
data = {
**USER_DATA,
"email": "abc@abc",
}
authenticate_and_test("authentication:register", data, success=False)
def test_register_existing_email(self, authenticate_and_test):
UserFactory(**USER_DATA)
data = {**USER_DATA, "password": INVALID_PASSWORD}
authenticate_and_test("authentication:register", data, success=False)
def test_redirect(self, authenticate_and_test):
redirect_url = "http://localhost:1234/test_path"
data = {
**USER_DATA,
"next": redirect_url,
}
authenticate_and_test("authentication:register", data, redirect_url=redirect_url)
@override_settings(VALID_REDIRECT_HOSTNAMES=["*.testing.com"])
def test_redirect_wildcard(self, authenticate_and_test):
redirect_url = "http://sub.testing.com"
data = {
**USER_DATA,
"next": redirect_url,
}
authenticate_and_test("authentication:register", data, redirect_url=redirect_url)
def test_ignores_bad_redirect_param(self, authenticate_and_test):
data = {
**USER_DATA,
"next": "https://badsite.com",
}
authenticate_and_test("authentication:register", data)
class TestLogout:
def test_logout(self, client, authenticate_and_test):
authenticate_and_test("authentication:register", USER_DATA)
response = client.get(reverse("logout"))
assert response.status_code == 302
assert response.url == frontend_urls.LOGIN
assert_unauthenticated(client)
def test_logout_preserves_query_params(self, client, authenticate_and_test):
authenticate_and_test("authentication:register", USER_DATA)
query_params = {"test": "value"}
response = client.get(reverse("logout"), query_params)
assert response.status_code == 302
parsed_url = parse.urlparse(response.url)
assert parsed_url.path == frontend_urls.LOGIN
query_string = parse.urlencode(query_params)
assert parsed_url.query == query_string
assert_unauthenticated(client)
@override_settings(VALID_REDIRECT_HOSTNAMES=["*.testing.com"])
def test_logout_redirects_to_logout_redirect(self, client, authenticate_and_test):
authenticate_and_test("authentication:register", USER_DATA)
test_url = "https://www.testing.com"
params = {"logoutNext": test_url}
response = client.get(reverse("logout"), params)
assert response.status_code == 302
assert response.url == test_url
assert_unauthenticated(client)
@override_settings(VALID_REDIRECT_HOSTNAMES=["*.testing.com"])
def test_logout_does_not_redirect_to_bad_domain(self, client, authenticate_and_test):
authenticate_and_test("authentication:register", USER_DATA)
params = {"logoutNext": "https://www.badsite.com"}
response = client.get(reverse("logout"), params)
assert response.status_code == 302
parsed_url = parse.urlparse(response.url)
assert parsed_url.path == frontend_urls.LOGIN
assert_unauthenticated(client)
class TestCreateUserView(APITestCase):
def test_create_with_no_existing_user(self):
staff_user = StaffUserFactory(**USER_DATA)
factory = APIRequestFactory()
view = CreateUserView.as_view()
new_email = "newuser@test.com"
request = factory.post(
reverse("authentication:create_user"),
{
"email": new_email,
"password": VALID_PASSWORD,
},
format="json",
)
force_authenticate(request, user=staff_user)
response = view(request)
new_user = User.objects.get(email=new_email)
self.assertEqual(response.status_code, 200)
self.assertEqual(
json.loads(response.content)["portunus_uuid"], str(new_user.portunus_uuid)
)
def test_create_with_existing_user(self):
staff_user = StaffUserFactory(**USER_DATA)
factory = APIRequestFactory()
view = CreateUserView.as_view()
request = factory.post(reverse("authentication:create_user"), USER_DATA, format="json")
force_authenticate(request, user=staff_user)
response = view(request)
self.assertEqual(response.status_code, 400)
self.assertEqual(
json.loads(response.content)["portunus_uuid"], staff_user.portunus_uuid
)
self.assertEqual(json.loads(response.content)["user_exists"], True)
class TestRetrieveDeleteUserView(APITestCase):
def setUp(self):
self.user = UserFactory()
@property
def endpoint_path(self):
return reverse(
"authentication:retrieve_or_delete_user", args=[self.user.portunus_uuid]
)
def check_get(self, status_code):
response = self.client.get(self.endpoint_path)
self.assertEqual(response.status_code, status_code)
if status_code == 200:
self.assertDictEqual(response.json(), UserSerializer(self.user).data)
def check_delete(self, status_code=204):
response = self.client.delete(self.endpoint_path)
self.assertEqual(response.status_code, status_code)
user = User.objects.filter(pk=self.user.pk).first()
if status_code == 204:
self.assertIsNone(user)
else:
self.assertIsNotNone(user)
def check_user(self, user_requesting, status_code=None):
if user_requesting:
self.client.force_authenticate(user_requesting)
self.check_get(status_code or 200)
self.check_delete(status_code or 204)
def test_same_user(self):
self.check_user(self.user)
def test_staff_user(self):
staff_user = StaffUserFactory()
self.check_user(staff_user)
def test_wrong_user(self):
other_user = UserFactory()
self.check_user(other_user, status_code=403)
def test_unauthenticated_user(self):
self.check_user(None, status_code=401)
|
#!/usr/bin/env python3
# Copyright (c) 2016 Iotic Labs Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://github.com/Iotic-Labs/py-IoticAgent/blob/master/LICENSE
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Constants for Units and helper class to build complete JSON tree of units from
"""
from __future__ import print_function, unicode_literals
import os
import json
import logging
logger = logging.getLogger(__name__)
import rdflib
#
# Common units constants
#
# - Electric Current
AMPERE = 'http://purl.obolibrary.org/obo/UO_0000011'
'''Electric current unit'''
# - Distance
METER = 'http://purl.obolibrary.org/obo/UO_0000008'
'''Metric distance unit = 100 cm'''
METRE = METER
MILLIMETER = 'http://purl.obolibrary.org/obo/UO_0000016'
'''Metric distance unit = 1/100 of a meter'''
MILLIMETRE = MILLIMETER
CENTIMETER = 'http://purl.obolibrary.org/obo/UO_0000015'
'''Metric distance unit = 1/100 of a meter'''
CENTIMETRE = CENTIMETER
CM = CENTIMETER
# - Area
SQUARE_METER = "http://purl.obolibrary.org/obo/UO_0000080"
'''An area unit which is equal to an area enclosed by a square with sides each 1 meter long.'''
SQUARE_METRE = SQUARE_METER
SQUARE_CENTIMETER = "http://purl.obolibrary.org/obo/UO_0000081"
'''An area unit which is equal to an area enclosed by a square with sides each 1 centimeter long.'''
SQUARE_CENTIMETRE = SQUARE_CENTIMETER
SQUARE_MILLIMETER = "http://purl.obolibrary.org/obo/UO_0000082"
'''An area unit which is equal to an area enclosed by a square with sides each 1 millimeter long.'''
SQUARE_MILLIMETRE = SQUARE_MILLIMETER
# - Temperature
CELSIUS = 'http://purl.obolibrary.org/obo/UO_0000027'
'''Temperature units where the freezing point of water at 273.15 Kelvin is considered 0C and the boiling point 283.15K
is 100C'''
FAHRENHEIT = 'http://purl.obolibrary.org/obo/UO_0000195'
'''A temperature unit which is equal to 5/9ths of a kelvin. Negative 40 degrees Fahrenheit is equal to negative 40
degrees Celsius'''
KELVIN = 'http://purl.obolibrary.org/obo/UO_0000012'
'''A thermodynamic temperature unit. 0K is "absolute zero", ~293K is "room temperature", i.e. 20C'''
# - Speed
METER_PER_SEC = 'http://purl.obolibrary.org/obo/UO_0000094'
'''A speed/velocity unit which is equal to the speed of an object traveling 1 meter distance in one second'''
# - Frequency
HERTZ = 'http://purl.obolibrary.org/obo/UO_0000106'
'''A frequency unit which is equal to 1 complete cycle of a recurring phenomenon in 1 second.'''
MEGAHERTZ = 'http://purl.obolibrary.org/obo/UO_0000325'
'''A frequency unit which is equal 1 Million Hz'''
# - Angles
DEGREE = 'http://purl.obolibrary.org/obo/UO_0000185'
''' A plane angle unit which is equal to 1/360 of a full rotation or 1.7453310^[-2] rad'''
RADIAN = 'http://purl.obolibrary.org/obo/UO_0000123'
''' A plane angle unit which is equal to the angle subtended at the center of a circle by an arc equal in length
to the radius of the circle, approximately 57 degrees 17 minutes and 44.6 seconds.'''
# - Time
SECOND = 'http://purl.obolibrary.org/obo/UO_0000010'
''' A time unit which is equal to the duration of 9,192,631,770 periods of the radiation
corresponding to the transition between the two hyperfine levels
of the ground state of the caesium 133 atom
'''
HOUR = 'http://purl.obolibrary.org/obo/UO_0000032'
''' A time unit which is equal to 3600 seconds or 60 minutes'''
MINUTE = 'http://purl.obolibrary.org/obo/UO_0000031'
''' A time unit which is equal to 60 seconds'''
# - Energy
JOULE = 'http://purl.obolibrary.org/obo/UO_0000112'
'''An energy unit which is equal to the energy required when a force of 1 newton moves an object 1 meter'''
WATT_HOUR = 'http://purl.obolibrary.org/obo/UO_0000223'
'''An energy unit which is equal to the amount of electrical energy equivalent to a one-watt load drawing power for one
hour'''
KILOWATT_HOUR = 'http://purl.obolibrary.org/obo/UO_0000224'
'''An energy unit which is equal to 1000 Watt-hours'''
# - Power
WATT = 'http://purl.obolibrary.org/obo/UO_0000114'
'''A power unit which is equal to the power used when work is done at the rate of 1 joule per second'''
# - Electric potential
VOLT = 'http://purl.obolibrary.org/obo/UO_0000218'
'''An electric potential difference unit which is equal to the work per unit charge'''
# - Force
NEWTON = 'http://purl.obolibrary.org/obo/UO_0000108'
'''A force unit which is equal to the force required to cause an acceleration of 1m/s2 of a mass of 1 Kg'''
# - Illuminance
LUX = 'http://purl.obolibrary.org/obo/UO_0000116'
'''An illuminance unit which is equal to the illuminance produced by 1 lumen evenly spread over an area 1 m^[2]'''
# - Pressure
MM_MERCURY = 'http://purl.obolibrary.org/obo/UO_0000272'
'''A unit of pressure equal to the amount of fluid pressure one millimeter deep in mercury at 0C'''
PASCAL = 'http://purl.obolibrary.org/obo/UO_0000110'
''' A pressure unit which is equal to the pressure or stress on a surface caused by a force of 1 newton spread over a
surface of 1 m^[2]'''
# - Weight
GRAM = 'http://purl.obolibrary.org/obo/UO_0000021'
'''A mass unit which is equal to one thousandth of a kilogram or 10^[-3] kg'''
MILLIGRAM = 'http://purl.obolibrary.org/obo/UO_0000022'
'''A mass unit which is equal to one thousandth of a gram or 10^[-3] g'''
MICROGRAM = 'http://purl.obolibrary.org/obo/UO_0000023'
'''A mass unit which is equal to one millionth of a gram or 10^[-6] g'''
KILOGRAM = 'http://purl.obolibrary.org/obo/UO_0000009'
''' A mass unit which is equal to the mass of the International Prototype Kilogram kept by the BIPM at Svres, France.'''
# - Radiation
ROENTGEN = 'http://purl.obolibrary.org/obo/UO_0000136'
'''An exposure unit which is equal to the amount of radiation required to liberate positive
and negative charges of one electrostatic unit of charge in 1 cm^[3] of air
'''
BECQUEREL = 'http://purl.obolibrary.org/obo/UO_0000132'
'''An activity (of a radionuclide) unit which is equal to the activity of a quantity
of radioactive material in which one nucleus decays per second or
there is one atom disintegration per second
'''
COUNTS_PER_MIN = 'http://purl.obolibrary.org/obo/UO_0000148'
'''An activity (of a radionuclide) unit which is equal to the number of light
emissions produced by ionizing radiation in one minute.
'''
# - Dimensionless
PERCENT = "http://purl.obolibrary.org/obo/UO_0000187"
'''A dimensionless ratio unit which denotes numbers as fractions of 100.'''
DECIBEL = "http://purl.obolibrary.org/obo/UO_0000259"
''' A ratio unit which is an indicator of sound power per unit area.'''
PARTS_PER_HUNDRED = "http://purl.obolibrary.org/obo/UO_0000167"
'''A dimensionless concentration notation which denotes the amount of a given substance in a total amount of 100
regardless of the units of measure as long as they are the same.'''
PARTS_PER_THOUSAND = "http://purl.obolibrary.org/obo/UO_0000168"
'''A dimensionless concentration notation which denotes the amount of a given substance in a total amount of 1000
regardless of the units of measure as long as they are the same.'''
PARTS_PER_MILLION = "http://purl.obolibrary.org/obo/UO_0000169"
'''A dimensionless concentration notation which denotes the amount of a given substance in a total amount of 1,000,000
regardless of the units of measure as long as they are the same.'''
PARTS_PER_BILLION = "http://purl.obolibrary.org/obo/UO_0000170"
'''A dimensionless concentration notation which denotes the amount of a given substance in a total amount of 10^9
regardless of the units of measure as long as they are the same.'''
PARTS_PER_TRILLION = "http://purl.obolibrary.org/obo/UO_0000171"
'''A dimensionless concentration notation which denotes the amount of a given substance in a total amount of 10^12
regardless of the units of measure as long as they are the same.'''
PARTS_PER_QUADRILLION = "http://purl.obolibrary.org/obo/UO_0000172"
'''A dimensionless concentration notation which denotes the amount of a given substance in a total amount of 10^15
regardless of the units of measure as long as they are the same.'''
class Units(object):
def __init__(self, uofn=None):
# Get a location for the uo.owl
self.__owlfn = 'https://unit-ontology.googlecode.com/svn/trunk/uo.owl'
if uofn is not None and os.path.exists(uofn):
self.__owlfn = uofn
else:
pfn = os.path.join(os.getcwd(), 'uo.owl')
if os.path.exists(pfn):
self.__owlfn = pfn
#
self.__query = """
## SPARQL to get all labels and comments of all classes which are sub-classes of a parent
prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT ?s ?label ?comment
WHERE
{
?s rdfs:subClassOf <%s> .
?s rdfs:label ?label .
?s rdfs:comment ?comment
}
"""
#
if self.__owlfn.startswith('https://'):
logger.debug("Downloading: %s", self.__owlfn)
self.__graph = rdflib.Graph()
self.__graph.parse(self.__owlfn, format="xml")
#
logger.debug("Building Units")
self.units = self.__build_units()
def __build_units(self, parent='http://purl.obolibrary.org/obo/UO_0000000'):
ret = []
qres = self.__graph.query(self.__query % parent)
for s, label, comment in qres:
ret.append((s, label, comment, self.__build_units(parent=s)))
return ret
def print_units(self, parent=None, indent=0, indentsize=4):
if parent is None:
parent = self.units
for s, label, comment, children in parent:
print("%s %s => %s (%s)" % ((' ' * indent), s, label, comment))
if children:
self.print_units(parent=children, indent=indent + indentsize, indentsize=indentsize)
def save_json(self, jsonfn=None, pretty=True):
"""Write a .json file with the units tree
jsonfn='path/file.name' default os.getcwd() + 'units.json'
pretty=True use JSON dumps pretty print for human readability
"""
if jsonfn is None:
jsonfn = os.path.join(os.getcwd(), 'units.json')
#
jsondump = None
sort_keys = False
indent = 0
if pretty:
sort_keys = True
indent = 4
jsondump = json.dumps(self.units, sort_keys=sort_keys, indent=indent)
#
with open(jsonfn, 'w') as f:
f.write(jsondump)
return True
return False
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
Units().save_json()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""iterator over an sdf files"""
from builtins import next
from builtins import object
from subprocess import Popen, PIPE
import re
class GzipFile(object):
def __init__(self, path):
self.p = Popen(
'zcat %s' % path,
bufsize=1024,
close_fds=True,
shell=True,
stdout=PIPE,
stderr=None,
)
self.status = 'open'
def __iter__(self):
assert self.status == 'open'
for i in self.p.stdout:
yield i
def __next__(self):
assert self.status == 'open'
return next(self.p.stdout)
def close(self):
try:
self.p.terminate()
except:
from signal import SIGTERM
try:
os.kill(self.p.pid, SIGTERM)
except:
pass
self.p.stdout.close()
self.status = 'closed'
def sdf_iter(sdf, skip_annotations=False):
def _next():
#print("sdf: ",sdf)
if sdf == '-':
import sys
f = sys.stdin
elif hasattr(sdf, 'next'):
f = sdf
else:
f = sdf
buf = []
structure_ends = False
for i in f:
if skip_annotations:
if not structure_ends:
if i.startswith('>'):
structure_ends = True
else:
buf += i
elif i.startswith('$$$$'):
structure_ends = False
buf += i
else:
buf += i
if i.startswith('$$$$'):
yield ''.join(buf)
buf = []
return _next()
def get_sdf_tags(sdf):
"""parse the sdf tags"""
tag_pattern = re.compile(">\s+<([^>]+)>([^>$]+)", re.DOTALL)
tags = tag_pattern.findall(sdf)
tagdict = dict()
# process each tag
for (name, value) in tags:
tagdict[name.strip()] = value.strip()
return tagdict
|
import os
from sklearn.datasets import load_iris
from sklearn.decomposition import PCA
from sklearn.ensemble import RandomForestClassifier
from sklearn.pipeline import FeatureUnion
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
from sklearn_instrumentation import SklearnInstrumentor
from sklearn_instrumentation.instruments.cprofile import CProfiler
ss = StandardScaler()
pca = PCA(n_components=3)
rf = RandomForestClassifier()
classification_model = Pipeline(
steps=[
(
"fu",
FeatureUnion(
transformer_list=[
("ss", ss),
("pca", pca),
],
),
),
("rf", rf),
],
)
X, y = load_iris(return_X_y=True)
if __name__ == "__main__":
profiler = CProfiler()
instrumentor = SklearnInstrumentor(
instrument=profiler, instrument_kwargs={"out_dir": os.getcwd() + "/../.ignore"}
)
instrumentor.instrument_estimator(
classification_model,
)
classification_model.fit(X, y)
classification_model.predict(X)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import Queue
import signal
import sys
import os
import threading
import time
from ConfigParser import ConfigParser
from thread_manager import ThreadManager
from thread_mysql import ThreadMySQL
from thread_statsd import ThreadStatsd, ThreadFakeStatsd
class MysqlStatsd():
"""Main program class"""
opt = None
config = None
def __init__(self):
"""Program entry point"""
op = argparse.ArgumentParser()
op.add_argument("-c", "--config", dest="cfile",
default="/etc/mysql-statsd.conf",
help="Configuration file"
)
op.add_argument("-d", "--debug", dest="debug",
help="Prints statsd metrics next to sending them",
default=False, action="store_true"
)
op.add_argument("--dry-run", dest="dry_run",
default=False,
action="store_true",
help="Print the output that would be sent to statsd without actually sending data somewhere"
)
# TODO switch the default to True, and make it fork by default in init script.
op.add_argument("-f", "--foreground", dest="foreground", help="Dont fork main program", default=False, action="store_true")
opt = op.parse_args()
self.get_config(opt.cfile)
if not self.config:
sys.exit(op.print_help())
try:
logfile = self.config.get('daemon').get('logfile', '/tmp/daemon.log')
except AttributeError:
logfile = sys.stdout
pass
if not opt.foreground:
self.daemonize(stdin='/dev/null', stdout=logfile, stderr=logfile)
# Set up queue
self.queue = Queue.Queue()
# split off config for each thread
mysql_config = dict(mysql=self.config['mysql'])
mysql_config['metrics'] = self.config['metrics']
statsd_config = self.config['statsd']
# Spawn MySQL polling thread
mysql_thread = ThreadMySQL(queue=self.queue, **mysql_config)
# t1 = ThreadMySQL(config=self.config, queue=self.queue)
# Spawn Statsd flushing thread
statsd_thread = ThreadStatsd(queue=self.queue, **statsd_config)
if opt.dry_run:
statsd_thread = ThreadFakeStatsd(queue=self.queue, **statsd_config)
if opt.debug:
""" All debug settings go here """
statsd_thread.debug = True
# Get thread manager
tm = ThreadManager(threads=[mysql_thread, statsd_thread])
try:
tm.run()
except:
# Protects somewhat from needing to kill -9 if there is an exception
# within the thread manager by asking for a quit an joining.
try:
tm.stop_threads()
except:
pass
raise
def get_config(self, config_file):
cnf = ConfigParser()
try:
cnf.read(config_file)[0]
except IndexError:
# Return None so we can display help...
self.config = None # Just to be safe..
return None
self.config = {}
for section in cnf.sections():
self.config[section] = {}
for key, value in cnf.items(section):
self.config[section][key] = value
return self.config
def daemonize(self, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
'''This forks the current process into a daemon. The stdin, stdout, and
stderr arguments are file names that will be opened and be used to replace
the standard file descriptors in sys.stdin, sys.stdout, and sys.stderr.
These arguments are optional and default to /dev/null. Note that stderr is
opened unbuffered, so if it shares a file with stdout then interleaved
output may not appear in the order that you expect. '''
# Do first fork.
try:
pid = os.fork()
if pid > 0:
sys.exit(0) # Exit first parent.
except OSError, e:
sys.stderr.write("fork #1 failed: (%d) %s\n" % (e.errno, e.strerror))
sys.exit(1)
# Decouple from parent environment.
# TODO: do we need to change to '/' or can we chdir to wherever __file__ is?
os.chdir("/")
os.umask(0)
os.setsid()
# Do second fork.
try:
pid = os.fork()
if pid > 0:
f = open(self.config.get('daemon').get('pidfile', '/var/run/mysql_statsd.pid'), 'w')
f.write(str(pid))
f.close()
sys.exit(0) # Exit second parent.
except OSError, e:
sys.stderr.write("fork #2 failed: (%d) %s\n" % (e.errno, e.strerror))
sys.exit(1)
# Now I am a daemon!
# Redirect standard file descriptors.
si = open(stdin, 'r')
so = open(stdout, 'a+')
se = open(stderr, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
if __name__ == "__main__":
program = MysqlStatsd()
|
#!/usr/bin/env python
import boto
from boto.s3.key import Key
OrdinaryCallingFormat = boto.config.get('s3', 'calling_format', 'boto.s3.connection.OrdinaryCallingFormat')
s3 = boto.connect_s3(host='localhost', port=10001, calling_format=OrdinaryCallingFormat, is_secure=False)
b = s3.create_bucket('mocking')
keys = b.get_all_keys(prefix='level')
print 'TEST 1'
for key in keys:
print repr(key)
keys = b.get_all_keys(max_keys=3)
print 'TEST 2'
for key in keys:
print repr(key)
|
from conekt import db
import json
SQL_COLLATION = 'NOCASE' if db.engine.name == 'sqlite' else ''
class ConditionTissue(db.Model):
__tablename__ = 'conditions_tissue'
id = db.Column(db.Integer, primary_key=True)
species_id = db.Column(db.Integer, db.ForeignKey('species.id', ondelete='CASCADE'))
data = db.Column(db.Text)
description = db.Column(db.Text)
expression_specificity_method_id = db.Column(db.Integer,
db.ForeignKey('expression_specificity_method.id', ondelete='CASCADE'),
index=True)
in_tree = db.Column(db.SmallInteger, default=0)
@staticmethod
def add(species_id, data, order, colors, expression_specificity_method_id, description=''):
"""
Add conversion table to the database for a species
:param species_id: internal id for the species
:param data: dict with the conversion (key = condition, value = more general feature (e.g. tissue))
:param order: list with order of the samples in the plot
:param colors: list with colors to use in the plot
:param expression_specificity_method_id: ID for expression specificity method
"""
new_ct = ConditionTissue()
new_ct.species_id = species_id
new_ct.data = json.dumps({'order': order,
'colors': colors,
'conversion': data})
new_ct.expression_specificity_method_id = expression_specificity_method_id
new_ct.description = description
db.session.add(new_ct)
db.session.commit()
|
#!/usr/bin/env python
# encoding: utf-8
from pyxtal import print_logo
from pyxtal.symmetry import Group
from argparse import ArgumentParser
if __name__ == "__main__":
# -------------------------------- Options -------------------------
parser = ArgumentParser()
parser.add_argument(
"-s",
"--symmetry",
dest="sg",
type=str,
help="desired symmetry, number or string, e.g., 36, Pbca, Ih. if None, show all list of available groups",
)
parser.add_argument(
"-d",
"--dimension",
dest="dimension",
default=3,
type=int,
help="desired dimension: (3, 2, 1, 0): default 3",
)
print_logo()
options = parser.parse_args()
dimension = options.dimension
if options.sg is not None:
sg = options.sg
if sg.isnumeric():
sg = int(sg)
print(Group(sg, dimension))
else:
Group.list_groups(dimension)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
from mock import call
import random
from osc_lib import exceptions
from osc_lib import utils
from openstackclient.network.v2 import network
from openstackclient.tests.unit import fakes
from openstackclient.tests.unit.identity.v2_0 import fakes as identity_fakes_v2
from openstackclient.tests.unit.identity.v3 import fakes as identity_fakes_v3
from openstackclient.tests.unit.network.v2 import fakes as network_fakes
from openstackclient.tests.unit import utils as tests_utils
# Tests for Neutron network
#
class TestNetwork(network_fakes.TestNetworkV2):
def setUp(self):
super(TestNetwork, self).setUp()
# Get a shortcut to the network client
self.network = self.app.client_manager.network
# Get a shortcut to the ProjectManager Mock
self.projects_mock = self.app.client_manager.identity.projects
# Get a shortcut to the DomainManager Mock
self.domains_mock = self.app.client_manager.identity.domains
class TestCreateNetworkIdentityV3(TestNetwork):
project = identity_fakes_v3.FakeProject.create_one_project()
domain = identity_fakes_v3.FakeDomain.create_one_domain()
# The new network created.
_network = network_fakes.FakeNetwork.create_one_network(
attrs={
'tenant_id': project.id,
'availability_zone_hints': ["nova"],
}
)
qos_policy = (network_fakes.FakeNetworkQosPolicy.
create_one_qos_policy(attrs={'id': _network.qos_policy_id}))
columns = (
'admin_state_up',
'availability_zone_hints',
'availability_zones',
'description',
'id',
'ipv4_address_scope',
'ipv6_address_scope',
'is_default',
'name',
'port_security_enabled',
'project_id',
'provider:network_type',
'provider:physical_network',
'provider:segmentation_id',
'qos_policy_id',
'router:external',
'shared',
'status',
'subnets',
'tags',
)
data = (
network._format_admin_state(_network.admin_state_up),
utils.format_list(_network.availability_zone_hints),
utils.format_list(_network.availability_zones),
_network.description,
_network.id,
_network.ipv4_address_scope_id,
_network.ipv6_address_scope_id,
_network.is_default,
_network.name,
_network.is_port_security_enabled,
_network.project_id,
_network.provider_network_type,
_network.provider_physical_network,
_network.provider_segmentation_id,
_network.qos_policy_id,
network._format_router_external(_network.is_router_external),
_network.shared,
_network.status,
utils.format_list(_network.subnets),
utils.format_list(_network.tags),
)
def setUp(self):
super(TestCreateNetworkIdentityV3, self).setUp()
self.network.create_network = mock.Mock(return_value=self._network)
# Get the command object to test
self.cmd = network.CreateNetwork(self.app, self.namespace)
self.projects_mock.get.return_value = self.project
self.domains_mock.get.return_value = self.domain
self.network.find_qos_policy = mock.Mock(return_value=self.qos_policy)
def test_create_no_options(self):
arglist = []
verifylist = []
self.assertRaises(tests_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_create_default_options(self):
arglist = [
self._network.name,
]
verifylist = [
('name', self._network.name),
('enable', True),
('share', None),
('project', None),
('external', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.create_network.assert_called_once_with(**{
'admin_state_up': True,
'name': self._network.name,
})
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, data)
def test_create_all_options(self):
arglist = [
"--disable",
"--share",
"--description", self._network.description,
"--project", self.project.name,
"--project-domain", self.domain.name,
"--availability-zone-hint", "nova",
"--external", "--default",
"--provider-network-type", "vlan",
"--provider-physical-network", "physnet1",
"--provider-segment", "400",
"--qos-policy", self.qos_policy.id,
"--transparent-vlan",
"--enable-port-security",
self._network.name,
]
verifylist = [
('disable', True),
('share', True),
('description', self._network.description),
('project', self.project.name),
('project_domain', self.domain.name),
('availability_zone_hints', ["nova"]),
('external', True),
('default', True),
('provider_network_type', 'vlan'),
('physical_network', 'physnet1'),
('segmentation_id', '400'),
('qos_policy', self.qos_policy.id),
('transparent_vlan', True),
('enable_port_security', True),
('name', self._network.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = (self.cmd.take_action(parsed_args))
self.network.create_network.assert_called_once_with(**{
'admin_state_up': False,
'availability_zone_hints': ["nova"],
'name': self._network.name,
'shared': True,
'description': self._network.description,
# TODO(dtroyer): Remove tenant_id when we clean up the SDK refactor
'tenant_id': self.project.id,
'project_id': self.project.id,
'is_default': True,
'router:external': True,
'provider:network_type': 'vlan',
'provider:physical_network': 'physnet1',
'provider:segmentation_id': '400',
'qos_policy_id': self.qos_policy.id,
'vlan_transparent': True,
'port_security_enabled': True,
})
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, data)
def test_create_other_options(self):
arglist = [
"--enable",
"--no-share",
"--disable-port-security",
self._network.name,
]
verifylist = [
('enable', True),
('no_share', True),
('name', self._network.name),
('external', False),
('disable_port_security', True),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.create_network.assert_called_once_with(**{
'admin_state_up': True,
'name': self._network.name,
'shared': False,
'port_security_enabled': False,
})
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, data)
class TestCreateNetworkIdentityV2(TestNetwork):
project = identity_fakes_v2.FakeProject.create_one_project()
# The new network created.
_network = network_fakes.FakeNetwork.create_one_network(
attrs={'tenant_id': project.id}
)
columns = (
'admin_state_up',
'availability_zone_hints',
'availability_zones',
'description',
'id',
'ipv4_address_scope',
'ipv6_address_scope',
'is_default',
'name',
'port_security_enabled',
'project_id',
'provider:network_type',
'provider:physical_network',
'provider:segmentation_id',
'qos_policy_id',
'router:external',
'shared',
'status',
'subnets',
'tags',
)
data = (
network._format_admin_state(_network.admin_state_up),
utils.format_list(_network.availability_zone_hints),
utils.format_list(_network.availability_zones),
_network.description,
_network.id,
_network.ipv4_address_scope_id,
_network.ipv6_address_scope_id,
_network.is_default,
_network.name,
_network.is_port_security_enabled,
_network.project_id,
_network.provider_network_type,
_network.provider_physical_network,
_network.provider_segmentation_id,
_network.qos_policy_id,
network._format_router_external(_network.is_router_external),
_network.shared,
_network.status,
utils.format_list(_network.subnets),
utils.format_list(_network.tags),
)
def setUp(self):
super(TestCreateNetworkIdentityV2, self).setUp()
self.network.create_network = mock.Mock(return_value=self._network)
# Get the command object to test
self.cmd = network.CreateNetwork(self.app, self.namespace)
# Set identity client v2. And get a shortcut to Identity client.
identity_client = identity_fakes_v2.FakeIdentityv2Client(
endpoint=fakes.AUTH_URL,
token=fakes.AUTH_TOKEN,
)
self.app.client_manager.identity = identity_client
self.identity = self.app.client_manager.identity
# Get a shortcut to the ProjectManager Mock
self.projects_mock = self.identity.tenants
self.projects_mock.get.return_value = self.project
# There is no DomainManager Mock in fake identity v2.
def test_create_with_project_identityv2(self):
arglist = [
"--project", self.project.name,
self._network.name,
]
verifylist = [
('enable', True),
('share', None),
('name', self._network.name),
('project', self.project.name),
('external', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.create_network.assert_called_once_with(**{
'admin_state_up': True,
'name': self._network.name,
# TODO(dtroyer): Remove tenant_id when we clean up the SDK refactor
'tenant_id': self.project.id,
'project_id': self.project.id,
})
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, data)
def test_create_with_domain_identityv2(self):
arglist = [
"--project", self.project.name,
"--project-domain", "domain-name",
self._network.name,
]
verifylist = [
('enable', True),
('share', None),
('project', self.project.name),
('project_domain', "domain-name"),
('name', self._network.name),
('external', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.assertRaises(
AttributeError,
self.cmd.take_action,
parsed_args,
)
class TestDeleteNetwork(TestNetwork):
def setUp(self):
super(TestDeleteNetwork, self).setUp()
# The networks to delete
self._networks = network_fakes.FakeNetwork.create_networks(count=3)
self.network.delete_network = mock.Mock(return_value=None)
self.network.find_network = network_fakes.FakeNetwork.get_networks(
networks=self._networks)
# Get the command object to test
self.cmd = network.DeleteNetwork(self.app, self.namespace)
def test_delete_one_network(self):
arglist = [
self._networks[0].name,
]
verifylist = [
('network', [self._networks[0].name]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.network.delete_network.assert_called_once_with(self._networks[0])
self.assertIsNone(result)
def test_delete_multiple_networks(self):
arglist = []
for n in self._networks:
arglist.append(n.id)
verifylist = [
('network', arglist),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
calls = []
for n in self._networks:
calls.append(call(n))
self.network.delete_network.assert_has_calls(calls)
self.assertIsNone(result)
def test_delete_multiple_networks_exception(self):
arglist = [
self._networks[0].id,
'xxxx-yyyy-zzzz',
self._networks[1].id,
]
verifylist = [
('network', arglist),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# Fake exception in find_network()
ret_find = [
self._networks[0],
exceptions.NotFound('404'),
self._networks[1],
]
self.network.find_network = mock.Mock(side_effect=ret_find)
# Fake exception in delete_network()
ret_delete = [
None,
exceptions.NotFound('404'),
]
self.network.delete_network = mock.Mock(side_effect=ret_delete)
self.assertRaises(exceptions.CommandError, self.cmd.take_action,
parsed_args)
# The second call of find_network() should fail. So delete_network()
# was only called twice.
calls = [
call(self._networks[0]),
call(self._networks[1]),
]
self.network.delete_network.assert_has_calls(calls)
class TestListNetwork(TestNetwork):
# The networks going to be listed up.
_network = network_fakes.FakeNetwork.create_networks(count=3)
columns = (
'ID',
'Name',
'Subnets',
)
columns_long = (
'ID',
'Name',
'Status',
'Project',
'State',
'Shared',
'Subnets',
'Network Type',
'Router Type',
'Availability Zones',
)
data = []
for net in _network:
data.append((
net.id,
net.name,
utils.format_list(net.subnets),
))
data_long = []
for net in _network:
data_long.append((
net.id,
net.name,
net.status,
net.project_id,
network._format_admin_state(net.admin_state_up),
net.shared,
utils.format_list(net.subnets),
net.provider_network_type,
network._format_router_external(net.is_router_external),
utils.format_list(net.availability_zones),
))
def setUp(self):
super(TestListNetwork, self).setUp()
# Get the command object to test
self.cmd = network.ListNetwork(self.app, self.namespace)
self.network.networks = mock.Mock(return_value=self._network)
self._agent = \
network_fakes.FakeNetworkAgent.create_one_network_agent()
self.network.get_agent = mock.Mock(return_value=self._agent)
self.network.dhcp_agent_hosting_networks = mock.Mock(
return_value=self._network)
def test_network_list_no_options(self):
arglist = []
verifylist = [
('external', False),
('long', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with()
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_list_external(self):
arglist = [
'--external',
]
verifylist = [
('external', True),
('long', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with(
**{'router:external': True, 'is_router_external': True}
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_list_internal(self):
arglist = [
'--internal',
]
verifylist = [
('internal', True),
('long', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with(
**{'router:external': False, 'is_router_external': False}
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_network_list_long(self):
arglist = [
'--long',
]
verifylist = [
('long', True),
('external', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with()
self.assertEqual(self.columns_long, columns)
self.assertEqual(self.data_long, list(data))
def test_list_name(self):
test_name = "fakename"
arglist = [
'--name', test_name,
]
verifylist = [
('external', False),
('long', False),
('name', test_name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with(
**{'name': test_name}
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_network_list_enable(self):
arglist = [
'--enable',
]
verifylist = [
('long', False),
('external', False),
('enable', True),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with(
**{'admin_state_up': True, 'is_admin_state_up': True}
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_network_list_disable(self):
arglist = [
'--disable',
]
verifylist = [
('long', False),
('external', False),
('disable', True)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with(
**{'admin_state_up': False, 'is_admin_state_up': False}
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_network_list_project(self):
project = identity_fakes_v3.FakeProject.create_one_project()
self.projects_mock.get.return_value = project
arglist = [
'--project', project.id,
]
verifylist = [
('project', project.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with(
**{'tenant_id': project.id, 'project_id': project.id}
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_network_list_project_domain(self):
project = identity_fakes_v3.FakeProject.create_one_project()
self.projects_mock.get.return_value = project
arglist = [
'--project', project.id,
'--project-domain', project.domain_id,
]
verifylist = [
('project', project.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
filters = {'tenant_id': project.id, 'project_id': project.id}
self.network.networks.assert_called_once_with(**filters)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_network_list_share(self):
arglist = [
'--share',
]
verifylist = [
('long', False),
('share', True),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with(
**{'shared': True, 'is_shared': True}
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_network_list_no_share(self):
arglist = [
'--no-share',
]
verifylist = [
('long', False),
('no_share', True),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with(
**{'shared': False, 'is_shared': False}
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_network_list_status(self):
choices = ['ACTIVE', 'BUILD', 'DOWN', 'ERROR']
test_status = random.choice(choices)
arglist = [
'--status', test_status,
]
verifylist = [
('long', False),
('status', test_status),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with(
**{'status': test_status}
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_network_list_provider_network_type(self):
network_type = self._network[0].provider_network_type
arglist = [
'--provider-network-type', network_type,
]
verifylist = [
('provider_network_type', network_type),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with(
**{'provider:network_type': network_type,
'provider_network_type': network_type}
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_network_list_provider_physical_network(self):
physical_network = self._network[0].provider_physical_network
arglist = [
'--provider-physical-network', physical_network,
]
verifylist = [
('physical_network', physical_network),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with(
**{'provider:physical_network': physical_network,
'provider_physical_network': physical_network}
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_network_list_provider_segment(self):
segmentation_id = self._network[0].provider_segmentation_id
arglist = [
'--provider-segment', segmentation_id,
]
verifylist = [
('segmentation_id', segmentation_id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.networks.assert_called_once_with(
**{'provider:segmentation_id': segmentation_id,
'provider_segmentation_id': segmentation_id}
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
def test_network_list_dhcp_agent(self):
arglist = [
'--agent', self._agent.id
]
verifylist = [
('agent_id', self._agent.id),
]
attrs = {self._agent, }
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.dhcp_agent_hosting_networks.assert_called_once_with(
*attrs)
self.assertEqual(self.columns, columns)
self.assertEqual(list(data), list(self.data))
class TestSetNetwork(TestNetwork):
# The network to set.
_network = network_fakes.FakeNetwork.create_one_network()
qos_policy = (network_fakes.FakeNetworkQosPolicy.
create_one_qos_policy(attrs={'id': _network.qos_policy_id}))
def setUp(self):
super(TestSetNetwork, self).setUp()
self.network.update_network = mock.Mock(return_value=None)
self.network.find_network = mock.Mock(return_value=self._network)
self.network.find_qos_policy = mock.Mock(return_value=self.qos_policy)
# Get the command object to test
self.cmd = network.SetNetwork(self.app, self.namespace)
def test_set_this(self):
arglist = [
self._network.name,
'--enable',
'--name', 'noob',
'--share',
'--description', self._network.description,
'--external',
'--default',
'--provider-network-type', 'vlan',
'--provider-physical-network', 'physnet1',
'--provider-segment', '400',
'--enable-port-security',
'--qos-policy', self.qos_policy.name,
]
verifylist = [
('network', self._network.name),
('enable', True),
('description', self._network.description),
('name', 'noob'),
('share', True),
('external', True),
('default', True),
('provider_network_type', 'vlan'),
('physical_network', 'physnet1'),
('segmentation_id', '400'),
('enable_port_security', True),
('qos_policy', self.qos_policy.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
attrs = {
'name': 'noob',
'admin_state_up': True,
'description': self._network.description,
'shared': True,
'router:external': True,
'is_default': True,
'provider:network_type': 'vlan',
'provider:physical_network': 'physnet1',
'provider:segmentation_id': '400',
'port_security_enabled': True,
'qos_policy_id': self.qos_policy.id,
}
self.network.update_network.assert_called_once_with(
self._network, **attrs)
self.assertIsNone(result)
def test_set_that(self):
arglist = [
self._network.name,
'--disable',
'--no-share',
'--internal',
'--disable-port-security',
'--no-qos-policy',
]
verifylist = [
('network', self._network.name),
('disable', True),
('no_share', True),
('internal', True),
('disable_port_security', True),
('no_qos_policy', True),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
attrs = {
'admin_state_up': False,
'shared': False,
'router:external': False,
'port_security_enabled': False,
'qos_policy_id': None,
}
self.network.update_network.assert_called_once_with(
self._network, **attrs)
self.assertIsNone(result)
def test_set_nothing(self):
arglist = [self._network.name, ]
verifylist = [('network', self._network.name), ]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
attrs = {}
self.network.update_network.assert_called_once_with(
self._network, **attrs)
self.assertIsNone(result)
class TestShowNetwork(TestNetwork):
# The network to show.
_network = network_fakes.FakeNetwork.create_one_network()
columns = (
'admin_state_up',
'availability_zone_hints',
'availability_zones',
'description',
'id',
'ipv4_address_scope',
'ipv6_address_scope',
'is_default',
'name',
'port_security_enabled',
'project_id',
'provider:network_type',
'provider:physical_network',
'provider:segmentation_id',
'qos_policy_id',
'router:external',
'shared',
'status',
'subnets',
'tags',
)
data = (
network._format_admin_state(_network.admin_state_up),
utils.format_list(_network.availability_zone_hints),
utils.format_list(_network.availability_zones),
_network.description,
_network.id,
_network.ipv4_address_scope_id,
_network.ipv6_address_scope_id,
_network.is_default,
_network.name,
_network.is_port_security_enabled,
_network.project_id,
_network.provider_network_type,
_network.provider_physical_network,
_network.provider_segmentation_id,
_network.qos_policy_id,
network._format_router_external(_network.is_router_external),
_network.shared,
_network.status,
utils.format_list(_network.subnets),
utils.format_list(_network.tags),
)
def setUp(self):
super(TestShowNetwork, self).setUp()
self.network.find_network = mock.Mock(return_value=self._network)
# Get the command object to test
self.cmd = network.ShowNetwork(self.app, self.namespace)
def test_show_no_options(self):
arglist = []
verifylist = []
self.assertRaises(tests_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_show_all_options(self):
arglist = [
self._network.name,
]
verifylist = [
('network', self._network.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.network.find_network.assert_called_once_with(
self._network.name, ignore_missing=False)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, data)
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from parlai.core.params import ParlaiParser
from parlai.mturk.tasks.light.light_chats.worlds import (
LightChatOnboardingWorld, LightChatTaskWorld
)
import parlai.mturk.core.mturk_utils as mturk_utils
import parlai.mturk.tasks.light.light_chats.graph as graph
from parlai.mturk.core.mturk_manager import MTurkManager
from parlai.mturk.tasks.light.light_chats.task_config import task_config
from parlai.tasks.light_dialog.build import download as download_light
import os
import random
import pickle
class GraphGenerator(object):
def __init__(self, opt, use_seen):
download_light(opt)
self.opt = opt
self.use_seen = use_seen
dpath = os.path.join(opt['datapath'], 'light_dialogue')
env_file = os.path.join(dpath, 'light_environment.pkl')
with open(env_file, 'rb') as picklefile:
self.db = pickle.load(picklefile)
# Split rooms into seen and unseen
seen_rooms = {i: r
for i, r in self.db['rooms'].items()
if int(r['room_id']) < 703}
unseen_rooms = {i: r
for i, r in self.db['rooms'].items()
if int(r['room_id']) >= 703}
if use_seen:
self.rooms = seen_rooms
else:
self.rooms = unseen_rooms
for i, room in self.rooms.items():
room['id'] = i
# only use annotated characters
self.chars = {i: c
for i, c in self.db['characters'].items()
if 'desc' in c}
for i, char in self.chars.items():
char['id'] = i
self.rooms_list = list(self.rooms.values())
self.chars_list = list(self.chars.values())
self.room_idx = 3 # Arbitrary start index for what room to load
def props_from_obj(self, obj):
use_classes = ['object']
props = {
'object': True,
'size': 1,
'food_energy': 0,
'value': 1,
'desc': random.choice(obj['descriptions'])
}
if obj['is_surface'] > 0.5:
props['container'] = True
props['contain_size'] = 3
props['surface_type'] = 'on'
use_classes.append('container')
if obj['is_container'] > 0.5:
props['container'] = True
props['contain_size'] = 3
props['surface_type'] = 'on'
use_classes.append('container')
if obj['is_drink'] > 0.5:
props['drink'] = True
use_classes.append('drink')
if obj['is_food'] > 0.5:
props['food'] = True
use_classes.append('food')
if obj['is_gettable'] < 0.33:
use_classes.append('not_gettable')
if obj['is_wearable'] > 0.5:
props['wearable'] = True
props['stats'] = {
'attack': 1
}
use_classes.append('wearable')
if obj['is_weapon'] > 0.5:
props['weapon'] = True
props['stats'] = {
'attack': 1
}
use_classes.append('weapon')
props['classes'] = use_classes
return props
def props_from_char(self, char):
use_classes = ['agent']
props = {
'agent': True,
'size': 20,
'contain_size': 20,
'health': 10,
'food_energy': 1,
'aggression': 0,
'speed': 5,
'char_type': char['char_type'],
'desc': random.choice(char['personas']),
}
props['classes'] = use_classes
return props
def get_room(self):
room = self.rooms_list[self.room_idx % len(self.rooms_list)]
self.room_idx += 1
g = graph.Graph(self.opt)
room_gid = g.add_node(
room['setting'],
{
'room': True,
'desc': room['description'],
'extra_desc': room['background'],
'room': True,
'contain_size': 2000, # TODO turk room sizes
'name_prefix': "the",
'surface_type': "in",
'classes': {'room'},
}
)
# Add items to the graph
added_objs = []
for item_id in room['ex_objects']:
if random.random() > 0.5:
continue
obj = self.db['objects'][item_id]
use_desc = obj['name'] if obj['is_plural'] == 0 \
else random.choice(obj['base_form'])
if len(use_desc.split(' ')) > 5:
continue # Skip really long objects
if use_desc.lower() in added_objs:
continue
added_objs.append(use_desc.lower())
obj_id = g.add_node(use_desc, self.props_from_obj(obj))
g.move_object(obj_id, room_gid)
for item_id in room['in_objects']:
obj = self.db['objects'][item_id]
use_desc = obj['name'] if obj['is_plural'] == 0 \
else random.choice(obj['base_form'])
if len(use_desc.split(' ')) > 8:
continue # Skip really long objects
if use_desc.lower() in added_objs:
continue
added_objs.append(use_desc.lower())
props = self.props_from_obj(obj)
obj_id = g.add_node(use_desc, props)
g.move_object(obj_id, room_gid)
# Add characters to the graph
create_characters = []
in_characters = []
used_descs = []
for char_id in room['ex_characters']:
char = self.db['characters'][char_id]
if char.get('id') is None:
continue
ignore_chance = 0.5
if char.get('char_type') == 'object':
# heavily downrank objects
ignore_chance = 0.95
if random.random() < ignore_chance:
continue
use_desc = char['name'] if char['is_plural'] == 0 \
else random.choice(char['base_form'])
use_desc = use_desc.lower()
if use_desc in used_descs:
continue
used_descs.append(use_desc)
create_characters.append([use_desc, char])
for char_id in room['in_characters']:
char = self.db['characters'][char_id]
if char.get('id') is None:
continue
if char.get('char_type') == 'object':
if random.random() < 0.95:
continue # highly downrank objects
use_desc = char['name'] if char['is_plural'] == 0 \
else random.choice(char['base_form'])
use_desc = use_desc.lower()
if use_desc in used_descs:
continue
used_descs.append(use_desc)
in_characters.append(use_desc)
create_characters.append((use_desc, char))
while len(create_characters) < 2:
char = random.choice(self.chars_list)
use_desc = char['name'] if char['is_plural'] == 0 \
else random.choice(char['base_form'])
use_desc = use_desc.lower()
if use_desc in used_descs:
continue
used_descs.append(use_desc)
create_characters.append((use_desc, char))
random.shuffle(create_characters)
player_characters = create_characters[:2]
npc_characters = create_characters[2:]
# Filter out characters that are in the room description
# as they already exist in context
npc_characters = [(ud, c) for (ud, c) in npc_characters
if ud not in in_characters]
# only leave one npc character at most
npc_characters = npc_characters[:1]
# Add player characters to the world
for use_desc, char in player_characters:
g_id = g.add_node(use_desc, self.props_from_char(char),
is_player=True, uid=use_desc)
g.move_object(g_id, room_gid)
added_objs = []
# add items to the player character
for item_id in char['carrying_objects']:
if random.random() > 0.5:
continue
obj = self.db['objects'][item_id]
use_desc = obj['name'] if obj['is_plural'] == 0 \
else random.choice(obj['base_form'])
if len(use_desc.split(' ')) > 5:
continue # Skip really long objects
if use_desc.lower() in added_objs:
continue
added_objs.append(use_desc.lower())
obj_id = g.add_node(use_desc, self.props_from_obj(obj))
g.move_object(obj_id, g_id)
for item_id in char['wearing_objects']:
if random.random() > 0.5:
continue
obj = self.db['objects'][item_id]
use_desc = obj['name'] if obj['is_plural'] == 0 \
else random.choice(obj['base_form'])
if len(use_desc.split(' ')) > 5:
continue # Skip really long objects
if use_desc.lower() in added_objs:
continue
added_objs.append(use_desc.lower())
obj_id = g.add_node(use_desc, self.props_from_obj(obj))
g.move_object(obj_id, g_id)
g.set_prop(obj_id, 'equipped', 'wear')
for item_id in char['wielding_objects']:
if random.random() > 0.5:
continue
obj = self.db['objects'][item_id]
use_desc = obj['name'] if obj['is_plural'] == 0 \
else random.choice(obj['base_form'])
if len(use_desc.split(' ')) > 5:
continue # Skip really long objects
if use_desc.lower() in added_objs:
continue
added_objs.append(use_desc.lower())
obj_id = g.add_node(use_desc, self.props_from_obj(obj))
g.move_object(obj_id, g_id)
g.set_prop(obj_id, 'equipped', 'wield')
# add non player characters to the world
for use_desc, char in npc_characters:
g_id = g.add_node(use_desc, self.props_from_char(char),
uid=use_desc)
g.move_object(g_id, room_gid)
return g, room, player_characters
def main():
'''Handles setting up and running a ParlAI-MTurk task by instantiating
an MTurk manager and configuring it for the qa_data_collection task
'''
# Get relevant arguments
argparser = ParlaiParser(False, False)
argparser.add_parlai_data_path()
argparser.add_mturk_args()
argparser.add_argument(
'--light-unseen-rooms', default=False, type='bool',
help='Launch using rooms from the unseen set rather than the seen')
opt = argparser.parse_args()
generator = GraphGenerator(opt, opt['light_unseen_rooms'])
# Set the task name to be the folder name
opt['task'] = os.path.basename(os.path.dirname(os.path.abspath(__file__)))
# append the contents of task_config.py to the configuration
opt.update(task_config)
# Select an agent_id that worker agents will be assigned in their world
mturk_agent_roles = ['worker_1', 'worker_2']
# Set runtime to be an hour in case workers are slow
opt['assignment_duration_in_seconds'] = 60 * 60
# Instantiate an MTurkManager with the given options and a maximum number
# of agents per world of 1 (based on the length of mturk_agent_ids)
mturk_manager = MTurkManager(
opt=opt,
mturk_agent_ids=mturk_agent_roles,
use_db=True,
)
mturk_manager.setup_server(
task_directory_path=os.path.dirname(os.path.abspath(__file__)))
# Create an onboard_function, which will be run for workers who have
# accepted your task and must be completed before they are put in the
# queue for a task world.
completed_agents = []
def run_onboard(worker):
nonlocal completed_agents
if worker.worker_id in completed_agents:
return
else:
world = LightChatOnboardingWorld(opt=opt, mturk_agent=worker)
while not world.episode_done():
world.parley()
world.shutdown()
completed_agents.append(worker.worker_id)
print(worker.worker_id, 'took', world.turns, 'turns for onboarding')
return world.prep_save_data([worker])
# If we want to use the above onboard function, we can replace the below
# with set_onboard_function(onboard_function=run_onboard)
mturk_manager.set_onboard_function(onboard_function=run_onboard)
qualification_id = \
mturk_utils.find_qualification('adventure_chat_reject',
opt['is_sandbox'],
must_be_owned=False)
print('Found qualification: ', qualification_id)
try:
# Initialize run information
mturk_manager.start_new_run()
# Set up the sockets and threads to recieve workers
mturk_manager.ready_to_accept_workers()
agent_qualifications = [{
'QualificationTypeId': qualification_id,
'Comparator': 'DoesNotExist',
'RequiredToPreview': True
}]
# Create the hits as specified by command line arguments
mturk_manager.create_hits(qualifications=agent_qualifications)
# Check workers eligiblity acts as a filter, and should return
# the list of all workers currently eligible to work on the task
# Can be used to pair workers that meet certain criterea
def check_workers_eligibility(workers):
return workers
eligibility_function = {
'func': check_workers_eligibility,
'multiple': True,
}
# Assign worker roles is used to determine what the role each worker
# in the given worker list will play. Setting `id` to None will return
# the worker to the pool rather than putting them in a given task,
# which is useful for having tasks with different possible worker
# counts.
def assign_worker_roles(workers):
workers[0].id = mturk_agent_roles[0]
workers[1].id = mturk_agent_roles[1]
# Define the task function, which will be run with workers that are
# as the main task.
global run_conversation
def run_conversation(mturk_manager, opt, workers):
# Create the task world
g = None
while g is None:
try:
g, room, characters = generator.get_room()
except Exception as e:
print('error when creating graph:', repr(e))
world = LightChatTaskWorld(
opt=opt,
mturk_agents=workers,
graph=g,
room=room,
characters=characters,
)
# run the world to completion
while not world.episode_done():
world.parley()
# shutdown and review the work
world.shutdown()
world.review_work()
# Return the contents for saving
return world.prep_save_data(workers)
# Begin the task, allowing mturk_manager to start running the task
# world on any workers who connect
mturk_manager.start_task(
eligibility_function=eligibility_function,
assign_role_function=assign_worker_roles,
task_function=run_conversation
)
except BaseException:
raise
finally:
# Any hits that aren't claimed or completed have to be shut down. Must
# keep the world running until that point.
mturk_manager.expire_all_unassigned_hits()
# Shutdown the manager and free all related resources
mturk_manager.shutdown()
if __name__ == '__main__':
main()
|
from enum import IntEnum, Enum
from typing import Dict, Optional
from enocean.protocol.constants import PACKET
from enocean.protocol.packet import RadioPacket
from src.common.eep import Eep
from src.common.switch_state import SwitchState
from src.enocean_packet_factory import EnoceanPacketFactory
from src.common.eep_prop_exception import EepPropException
from src.tools.enocean_tools import EnoceanTools
class Fsr61Command(IntEnum):
STATUS_REQUEST = 0
SWITCHING = 1
class Fsr61Prop(Enum):
# common
CMD = "CMD" # command
SW = "SW" # Switching command ON/OFF [0=OFF, 1=ON] (offset=31 size=1)
# switch
DEL = "DEL" # 0 = Duration (switch immediately...) 1 = Delay (offset=30, size=1)
LCK = "LCK" # Lock for duration time (if >0), unlimited time of no time [Unlock=0, Lock=1] (offset=29, size=1)
TIM = "TIM" # Time in 1/10 seconds. 0 = no time specifed (offset=8, size=16, unit=s)
class Fsr61Action:
def __init__(self,
command: Fsr61Command = Fsr61Command.STATUS_REQUEST,
learn=False,
switch_state: SwitchState = None,
sender: int = None,
destination: int = None
):
self.command = command
self.learn = learn
self.switch_state = switch_state
self.sender = sender
self.destination = destination
def __str__(self):
parts = []
if self.command is not None:
parts.append("command={}".format(self.command.name))
if self.learn:
parts.append("learn!")
if self.switch_state is not None:
parts.append("switch_state={}".format(self.switch_state))
if parts:
return ",".join(parts)
else:
return "<empty>"
def __repr__(self) -> str:
return '{}({})'.format(self.__class__.__name__, str(self))
class Fsr61Eep:
"""
Handles conversions for EEP A5-38-08 command 0 + 1 (status request + switching)
"""
EEP = Eep(
rorg=0xa5,
func=0x38,
type=0x08,
direction=None,
command=0x00
)
@classmethod
def create_packet(cls, action: Fsr61Action) -> RadioPacket:
_, packet = cls.create_props_and_packet(action)
return packet
@classmethod
def create_props_and_packet(cls, action: Fsr61Action) -> RadioPacket:
eep = cls.EEP.clone()
# command is to be set as "COM"
eep.command = action.command.value
if action.command == Fsr61Command.STATUS_REQUEST:
props = cls.get_props_for_status_request()
# eep.command = 0
elif action.command == Fsr61Command.SWITCHING:
props = cls.get_props_for_switch(action)
else:
raise ValueError("wrong A5-38-08 command!")
packet = EnoceanPacketFactory.create_packet(
eep=eep, destination=action.destination, sender=action.sender, learn=action.learn, **props
)
return props, packet
@classmethod
def extract_packet(cls, packet: RadioPacket, command: Optional[Fsr61Command] = None) -> Fsr61Action:
props = cls.get_props_from_packet(packet, command)
action = cls.get_action_from_props(props)
return action
@classmethod
def get_props_for_status_request(cls):
return {
Fsr61Prop.CMD.value: Fsr61Command.STATUS_REQUEST.value,
}
@classmethod
def get_props_for_switch(cls, action: Fsr61Action):
# SW 0: OFF, 1: ON
props = {
Fsr61Prop.CMD.value: Fsr61Command.SWITCHING.value,
}
if action.switch_state == SwitchState.ON:
props[Fsr61Prop.SW.value] = 1
elif action.switch_state == SwitchState.OFF:
props[Fsr61Prop.SW.value] = 0
else:
raise ValueError("Wrong SwitchState!")
return props
@classmethod
def can_read_packet(cls, packet) -> bool:
return packet.packet_type == PACKET.RADIO and packet.rorg == cls.EEP.rorg
@classmethod
def get_props_from_packet(cls, packet, command: Optional[Fsr61Command] = None) -> Dict[str, int]:
eep = cls.EEP.clone()
if packet.packet_type == PACKET.RADIO and packet.rorg == eep.rorg:
if command is None:
eep.command = 0
try:
tmp_props = EnoceanTools.extract_props(packet=packet, eep=eep)
eep.command = tmp_props[Fsr61Prop.CMD.value]
except AttributeError as ex:
raise EepPropException(ex)
else:
eep.command = command.value
try:
props = EnoceanTools.extract_props(packet=packet, eep=eep)
except AttributeError as ex:
raise EepPropException(ex)
else:
props = {}
return props
@classmethod
def get_action_from_props(cls, props: Dict[str, int]) -> Fsr61Action:
# COM 0x01: Switching, 0x00: Status Request (*)
# SW 0: OFF, 1: ON
action = Fsr61Action()
prop = props.get(Fsr61Prop.CMD.value)
for command in Fsr61Command:
if prop == command.value:
action.command = command
break
if action.command == Fsr61Command.SWITCHING:
prop = props.get(Fsr61Prop.SW.value)
if prop == 0:
action.switch_state = SwitchState.OFF
elif prop == 1:
action.switch_state = SwitchState.ON
else:
action.switch_state = SwitchState.ERROR
return action
|
import unittest
from ..lib.data import register_data_generator, DataGenerator, data_generator_map
from ..lib.iterator import ConfigIterator, register_config_iterator, config_iterator_map
from ..lib.operator import (
register_operator,
register_operators,
OperatorInterface,
op_map,
)
class TestRegister(unittest.TestCase):
def test_register_config_iterator(self):
class TestConfigIterator(ConfigIterator):
pass
name = "__TestConfigIterator__"
register_config_iterator(name, TestConfigIterator)
self.assertTrue(name in config_iterator_map)
self.assertRaises(
ValueError, register_config_iterator, name, TestConfigIterator
)
def test_register_data_generator(self):
class TestDataGenerator(DataGenerator):
pass
name = "__TestDataGenerator__"
register_data_generator(name, TestDataGenerator)
self.assertTrue(name in data_generator_map)
self.assertRaises(ValueError, register_data_generator, name, TestDataGenerator)
def test_register_operator(self):
class TestOperator(OperatorInterface):
pass
name = "__TestOperator__"
register_operator(name, TestOperator)
self.assertTrue(name in op_map)
self.assertRaises(ValueError, register_operator, name, TestOperator)
name_1 = "__TestOperator_1__"
name_2 = "__TestOperator_2__"
register_operators({name_1: TestOperator, name_2: TestOperator})
self.assertTrue(name_1 in op_map)
self.assertTrue(name_2 in op_map)
self.assertRaises(ValueError, register_operators, {name_1: TestOperator})
self.assertRaises(ValueError, register_operators, {name_2: TestOperator})
self.assertRaises(
ValueError, register_operators, {name_1: TestOperator, name_2: TestOperator}
)
if __name__ == "__main__":
unittest.main()
|
import os
import urllib.parse as urlparse
from cgitb import text
import string
from flask import Flask, render_template, request, Response, redirect, url_for, abort
import psycopg2
from psycopg2 import pool
import toml
import math
from models import Page
from psycopg2.extras import DictCursor
from jinja2 import Template
app = Flask(__name__)
config = toml.load("config.toml")
try:
DATABASE_URL = os.environ['DATABASE_URL']
url = urlparse.urlparse(DATABASE_URL)
dbname = url.path[1:]
user = url.username
password = url.password
host = url.hostname
port = url.port
except KeyError:
dbname = config["database"]
user = config["user"]
password = config["password"]
host = config["host"]
port = config["port"]
postgreSQL_pool = psycopg2.pool.SimpleConnectionPool(1, 20, user=user,
password=password,
host=host,
port=port,
database=dbname,
cursor_factory=DictCursor)
def fetchone(query, params):
try:
ps_connection = postgreSQL_pool.getconn()
cursor = ps_connection.cursor()
cursor.execute(query, params)
result = cursor.fetchone()
return result
except Exception as e:
print(e)
print(query)
ps_connection.rollback()
finally:
cursor.close()
postgreSQL_pool.putconn(ps_connection)
def fetchall(query, params):
ps_connection = postgreSQL_pool.getconn()
cursor = ps_connection.cursor()
try:
cursor.execute(query, params)
results = cursor.fetchall()
except Exception as e:
print(e)
print(query)
ps_connection.rollback()
finally:
cursor.close()
postgreSQL_pool.putconn(ps_connection)
return results
def fetchpage(query, page_number, params):
# 1. get rows count (with count.sql.j2 template)
with open('templates/sql/count.sql.j2', 'r') as file:
data = file.read().rstrip()
count_template = Template(data)
result = count_template.render(query_text=query)
count = fetchone(result, params)["result"]
# 2. get rows for page (with pageable.j2.sql template)
with open('templates/sql/pageable.sql.j2', 'r') as file:
data = file.read().rstrip()
page_template = Template(data)
result = page_template.render(query_text=query, page=page_number)
page_content = fetchall(result, params)
# 3. responce with object { rows, total_pages, current_page, next_page, prev_page }
# TODO put in page size in params file
return Page(page_content, math.ceil(count/15), page_number)
@app.context_processor
def inject_stage_and_region():
return dict(fetchone=fetchone, fetchall=fetchall, fetchpage=fetchpage)
@app.route('/', methods=['GET', 'POST'])
def index():
template = request.args.get('template', default = "overview")
return render_template('index.html', template = template)
@app.route('/test', methods=['GET'])
def test():
page = request.args.get('page', default = 1, type = int)
template = request.args.get('template', default = "overview/index.html.j2")
frame = request.args.get('frame')
return render_template("pages/" + template, page = page, math = math, frame = frame)
@app.route('/<section>', methods=['GET'])
def test2(section):
assert section == request.view_args['section']
page = request.args.get('page', default = 1, type = int)
table_name = request.args.get('table_name')
val = render_template('index.html', template = section, page = page, table_name = table_name)
return Response(val, mimetype='text/html')
@app.route('/test_part/<section>', methods=['GET'])
def test_part(section):
assert section == request.view_args['section']
page = request.args.get('page', default = 1, type = int)
table_name = request.args.get('table_name')
template = section + "/index.html.j2"
frame = request.args.get('frame')
return render_template("pages/" + template, template = section, page = page, math = math, frame = frame, table_name = table_name)
if __name__ == '__main__':
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port, threaded=True, debug=True)
|
from myutils.utils import getConnection, cronlog
import pandas as pd
import numpy as np
import datetime
import requests
class TestRequest:
def __init__(self, url, method='GET', META=None, postdata=None):
self.method = method
u = url.split('?')
self.path_info = u[0]
self.META = META or {}
self.GET = {}
if len(u)>1:
for x in u[1].split('&'):
y = x.split('=')
if len(y)==1:
self.GET[x] = ''
else:
self.GET[y[0]] = y[1]
self.PUT = postdata
def get_full_path(self):
return url
conn, cur = getConnection()
if False:
s = """
DROP TABLE IF EXISTS price_function;
CREATE TABLE price_function (
id smallserial PRIMARY KEY
, date DATE NOT NULL
, slope FLOAT(8) NOT NULL
, intercept FLOAT(8) NOT NULL
, r FLOAT(8) NOT NULL
, created_on TIMESTAMP NOT NULL
);
"""
cur.execute(s)
conn.commit()
if False:
s = """
INSERT INTO price_function (date, slope, intercept, r, created_on)
VALUES
('2020-07-05', 3, 2.8, 0.9, CURRENT_TIMESTAMP),
('2020-07-04', 2., 2.9, 0.7, CURRENT_TIMESTAMP);
"""
cur.execute(s)
conn.commit()
s = 'select * from price_function;'
cur.execute(s)
list_tables = cur.fetchall()
print(list_tables)
if False:
conn.commit()
s = """
DROP TABLE IF EXISTS price_forecast;
CREATE TABLE price_forecast (
id serial PRIMARY KEY
, datetime TIMESTAMP NOT NULL
, demand Float(8) NOT NULL
, solar Float(8) NOT NULL
, wind Float(8) NOT NULL
, price Float(4) NOT NULL
, created_on TIMESTAMP NOT NULL
);
"""
cur.execute(s)
conn.commit()
if False:
s = """
DROP TABLE IF EXISTS testing;
CREATE TABLE testing (
id serial PRIMARY KEY
, created_on TIMESTAMP NOT NULL
); """
cur.execute(s)
conn.commit()
if False:
conn.commit()
s = """
DROP TABLE IF EXISTS sm_periods;
CREATE TABLE sm_periods (
period_id serial PRIMARY KEY
, period Char(16) not null
, local_date Date not null
, local_time char(5) not null
, timezone_adj smallint not null
);
"""
cur.execute(s)
conn.commit()
df_idx = pd.date_range(datetime.datetime(2019,1,1), datetime.datetime(2020,10,1), freq='30min')
df_idx_local = df_idx.tz_localize('UTC').tz_convert('Europe/London')
df = pd.DataFrame(index=df_idx)
df['period'] = df_idx.strftime('%Y-%m-%d %H:%M')
df['local_date'] = df_idx_local.strftime('%Y-%m-%d')
df['local_time'] = df_idx_local.strftime('%H:%M')
df['timezone_adj'] = df_idx_local.strftime('%z').str[0:3].astype(int)
df.reset_index(inplace=True)
start = """
INSERT INTO sm_periods (period_id, period, local_date, local_time, timezone_adj)
VALUES
"""
s=""
for i, j in df.iterrows():
s+= "({},'{}', '{}', '{}', {}),".format(i, j['period'], j['local_date'],j['local_time'], j['timezone_adj'])
if (i+1)%1000==0:
print('done: {}'.format(i+1))
cur.execute(start + s[:-1] + ';')
conn.commit()
s=""
print('done: {}'.format(i+1))
cur.execute(start + s[:-1] + ';')
conn.commit()
s=""
if False:
conn.commit()
s = """
DROP TABLE IF EXISTS sm_accounts;
CREATE TABLE sm_accounts (
account_id serial PRIMARY KEY
, type_id smallint not null
, first_period varChar(16) not null
, last_period varChar(16) not null
, last_updated TIMESTAMP not null
, hash varChar(64) not null
, region varChar(1)
, source_id smallint not null
);
"""
cur.execute(s)
conn.commit()
if False:
conn.commit()
s = """
DROP TABLE IF EXISTS sm_quantity;
CREATE TABLE sm_quantity (
id serial PRIMARY KEY
, account_id integer not null
, period_id integer not null
, quantity float(8) not null
);
"""
cur.execute(s)
conn.commit()
if False:
conn.commit()
s = """
DROP TABLE IF EXISTS sm_hh_variables;
CREATE TABLE sm_hh_variables (
var_id serial PRIMARY KEY
, var_name varchar(32) not null
, var_type varchar(32));
"""
cur.execute(s)
conn.commit()
if False:
conn.commit()
s = """
DROP TABLE IF EXISTS sm_d_variables;
CREATE TABLE sm_d_variables (
var_id serial PRIMARY KEY
, var_name varchar(32) not null
, var_type varchar(32));
"""
cur.execute(s)
conn.commit()
if False: # Creates new hh tariff variables in sm_hh_variables and sm_tariffs
product = 'AGILE-OUTGOING-19-05-13'
type_id=2
s = f"""
delete from sm_hh_variables where var_name like '{product}%';
delete from sm_tariffs where product='{product}';
"""
cur.execute(s)
conn.commit()
for region in ['A','B','C','D','E','F','G','H','J','K','L','M','N','P']:
s = f"""
INSERT INTO sm_hh_variables (var_name) values ('{product}-{region}');
"""
cur.execute(s)
conn.commit()
s = f"select var_id from sm_hh_variables where var_name='{product}-{region}';"
cur.execute(s)
var_id = cur.fetchone()[0]
conn.commit()
s = f"""
INSERT INTO sm_tariffs (type_id, product, region, granularity_id, var_id) values
({type_id}, '{product}', '{region}', 0, {var_id});
"""
cur.execute(s)
conn.commit()
START='201901010000'
if False: #Inserts initial prices into hh tariff variables
import requests
idx = pd.date_range(START, '202101010000', freq='30T')
df = pd.DataFrame()
df['timestamp'] = idx
df = pd.DataFrame(idx, columns=['timestamp'])
for region in ['B','C','D','E','F','G','H','J','K','L','M','N','P']:
tariff = 'AGILE-OUTGOING-19-05-13'
url = ('https://api.octopus.energy/v1/products/{}/' +
'electricity-tariffs/E-1R-{}-{}/standard-unit-rates/' +
'?period_from={}Z&period_to={}Z&page_size=15000')
url = url.format(tariff, tariff, region,
df.timestamp.iloc[0].strftime('%Y-%m-%dT%H:%M'),
df.timestamp.iloc[-1].strftime('%Y-%m-%dT%H:%M'))
r = requests.get(url)
dfs = []
dfs.append(pd.DataFrame(r.json()['results'])[['valid_from','value_exc_vat']])
while r.json()['next'] is not None:
r = requests.get(r.json()['next'])
dfs.append(pd.DataFrame(r.json()['results'])[['valid_from','value_exc_vat']])
if len(dfs)>30:
raise Exception
dfs = pd.concat(dfs)
dfs['timestamp'] = pd.DatetimeIndex(dfs.valid_from.str[:-1])
dfs = df.merge(right=dfs, how='left', on='timestamp')
dfs = dfs[dfs.value_exc_vat.notna()]
s = f"select var_id from sm_hh_variables where var_name='{tariff}-{region}';"
cur.execute(s)
var_id = cur.fetchone()[0]
conn.commit()
print(f'{var_id} {tariff} {region}' )
s = """
delete from sm_hh_variable_vals where var_id={};
"""
s = s.format(var_id)
cur.execute(s)
conn.commit()
s = """
INSERT INTO sm_hh_variable_vals (var_id, period_id, value) values
"""
s = s.format(var_id)
for i, j in dfs.iterrows():
s+= " ({}, {}, {}),".format(var_id, i, j.value_exc_vat)
s = s[:-1] + ';'
cur.execute(s)
conn.commit()
if False:
conn.commit()
s = """
DROP TABLE IF EXISTS sm_hh_variable_vals;
CREATE TABLE sm_hh_variable_vals (
id serial primary key
, var_id integer not null
, period_id integer not null
, value float(8) not null);
"""
cur.execute(s)
conn.commit()
if False:
conn.commit()
s = """
DROP TABLE IF EXISTS sm_d_variable_vals;
CREATE TABLE sm_d_variable_vals (
id serial primary key
, var_id integer not null
, local_date date not null
, value float(8) not null);
"""
cur.execute(s)
conn.commit()
from myutils.utils import loadDataFromDb
if False: #Creates daily tracker variables
product = 'SILVER-2017-1'
for region in ['A','B','C','D','E','F','G','H','J','K','L','M','N','P']:
s = f"""
insert into sm_d_variables (var_name) values ('{product}-{region}') returning var_id; """
var_id = loadDataFromDb(s)[0][0]
print(var_id)
s = f"""
insert into sm_tariffs (product, region, var_id, type_id, granularity_id) values
('{product}', '{region}', {var_id}, 1, 1); """
loadDataFromDb(s)
if False:
product = 'SILVER-2017-1'
for region in ['A','B','C','D','E','F','G','H','J','K','L','M','N','P']:
s = f"select var_id from sm_variables where product='{product}' and region='{region}' ;"
var_id = loadDataFromDb(s)[0][0]
r = requests.get(f'https://octopus.energy/api/v1/tracker/G-1R-SILVER-2017-1-{region}/daily/past/540/1/')
dates = [x['date'] for x in r.json()['periods']]
prices = [x['unit_rate'] for x in r.json()['periods']]
d = pd.Series(prices, index=dates)
d = d[:datetime.date.today().strftime('%Y-%m-%d')]
d = d/1.05
d = d.round(2)
s = 'insert into sm_d_variable_vals (var_id, local_date, value) values '
for i, j in d.iteritems():
s+= f"({var_id}, '{i}', {j}),"
s = s[:-1]+';'
loadDataFromDb(s)
print(region)
if False:
conn.commit()
import requests
idx = pd.date_range(START, '202101010000', freq='30T')
df = pd.DataFrame()
df['timestamp'] = idx
df = pd.DataFrame(idx, columns=['timestamp'])
s = """
select sm_hh_variables.var_id, var_name, min(sm_periods.period_id) as period_id, min(period) as period
from sm_periods cross join sm_hh_variables
left outer join sm_hh_variable_vals on sm_periods.period_id=sm_hh_variable_vals.period_id
and sm_hh_variable_vals.var_id=sm_hh_variables.var_id
where sm_hh_variable_vals.id is null
group by sm_hh_variables.var_id, var_name;
"""
mins = loadDataFromDb(s, returndf=True)
for i, j in mins.iterrows():
tariff = j.var_name
if 'AGILE-18-02-21' not in tariff:
continue
print(tariff)
start = j.period.replace(' ','T')
end = '2021-01-01T00:00'
url = ('https://api.octopus.energy/v1/products/AGILE-18-02-21/' +
'electricity-tariffs/E-1R-{}/standard-unit-rates/' +
'?period_from={}Z&period_to={}Z&page_size=15000')
url = url.format(tariff, start, end)
r = requests.get(url)
r = r.json()['results']
if len(r)==0:
continue
dfs = pd.DataFrame(r)[['valid_from','value_exc_vat']]
dfs['timestamp'] = pd.DatetimeIndex(dfs.valid_from.str[:-1])
dfs = df.merge(right=dfs, how='left', on='timestamp')
dfs = dfs[dfs.valid_from.notna()]
print(dfs)
s = """
INSERT INTO sm_hh_variable_vals (var_id, period_id, value) values
"""
for a, b in dfs.iterrows():
s+= " ({}, {}, {}),".format(j.var_id, a, b.value_exc_vat)
s = s[:-1] + ';'
cur.execute(s)
print(cur.statusmessage)
conn.commit()
if False:
s = 'select account_id, code from sm_accounts;'
a = loadDataFromDb(s, returndf=True)
s = ''
for i, j in a.iterrows():
s+= "update sm_accounts set hash='{}' where account_id={};\n".format(encode(j.code), j.account_id)
loadDataFromDb(s)
if False:
# Checks for gaps
s = """
select sm_accounts.account_id, sm_accounts.type_id, sm_periods.period from sm_accounts
left join sm_periods on sm_periods.period between sm_accounts.first_period and sm_accounts.last_period
left join sm_quantity on sm_quantity.period_id=sm_periods.period_id and sm_quantity.account_id= sm_accounts.account_id
where sm_quantity.quantity is null
"""
df = loadDataFromDb(s, returndf=True)
print(df)
if False:
s = """
DROP TABLE IF EXISTS sm_tariffs;
CREATE TABLE sm_tariffs (
tariff_id serial primary key
, type_id integer not null
, product varchar not null
, region char(1) not null
, granularity_id integer not null
, var_id integer not null);
"""
loadDataFromDb(s)
if False:
s = """
insert into sm_tariffs (type_id, product, region, granularity_id, var_id)
select
0 as type_id, left(var_name, 14) as product, right(var_name, 1) as region, 0 as granularity_id, var_id
from sm_hh_variables;
"""
loadDataFromDb(s)
print(loadDataFromDb('select * from sm_tariffs', returndf=True))
if False:
s = """
select period from sm_periods
left join sm_quantity on sm_quantity.period_id=sm_periods.period_id
where sm_quantity.quantity is null and sm_periods.local_date between '2020/07/01' and '2020/07/30'
and sm_quantity.account_id in (select account_id from sm_accounts where hash LIKE
'c6e81194c0aa3d65d0522d41171e4d07301457dc1cb26f7b05f60a70227be1f3%' and type_id=0);
"""
s = """
with p as (select period, period_id from sm_periods where local_date between '2020/07/01' and '2020/07/30' ),
q as (select quantity, period_id from sm_quantity where sm_quantity.account_id in (select account_id from sm_accounts where hash LIKE
'c6e81194c0aa3d65d0522d41171e4d07301457dc1cb26f7b05f60a70227be1f3%' and type_id=0))
select count(p.period) from p
left join q on q.period_id=p.period_id
where q.quantity is null;
"""
print(loadDataFromDb(s, returndf=True))
if False:
s = "insert into sm_hh_variables (var_name) Values ('Profile_1'), ('Profile_2');"
#loadDataFromDb(s)
#
for pc in [1,2]:
idx = pd.date_range(START, '202203312300', freq='30T')
df = pd.DataFrame()
df['timestamp'] = idx
df = pd.DataFrame(idx, columns=['timestamp'])
df = df.iloc[:-1].copy()
f = '/home/django/django_project/scripts/Default_Period_Profile_Class_Coefficient_309.csv'
d = pd.read_csv(f)
d.columns = ['class','d1','period','coeff']
d = d[d['class']==pc]
d['date'] = d.d1.str[6:] + d.d1.str[2:6] + d.d1.str[:2]
d = d[d.date>=(START[:4] + '/' + START[4:6] + '/' + START[6:8])]
df = df[df.timestamp>='2021-03-31 23:00']
#d = d[d.date<'2021/04/01']
d = d.iloc[:len(df)]
assert(len(d)==len(df))
df['coeff'] = d.coeff.tolist()
s = "select var_id from sm_hh_variables where var_name='{}';".format('Profile_{}'.format(pc))
var_id = loadDataFromDb(s)[0][0]
s = "insert into sm_hh_variable_vals (var_id, period_id, value) values "
for i, j in df.iterrows():
s+= " ({}, {}, {}),".format(var_id, i, j.coeff)
s = s[:-1] + ';'
loadDataFromDb(s)
if False: #Gets latest carbon intensity
s = "insert into sm_hh_variables (var_name) Values ('CO2_National');"
#loadDataFromDb(s)
s = """
select s.var_id, s.var_name, max(sm_periods.period_id) as period_id, max(period) as period
from sm_hh_variables s
left join sm_hh_variable_vals on s.var_id=sm_hh_variable_vals.var_id
left join sm_periods on sm_periods.period_id=sm_hh_variable_vals.period_id
where s.var_name='CO2_National'
group by s.var_id, s.var_name;
"""
data = loadDataFromDb(s)[0]
latest = data[3]
var_id = data[0]
print(latest)
idx = pd.date_range(START, '202101010000', freq='30T')
df = pd.DataFrame()
df['timestamp'] = idx
df = pd.DataFrame(idx, columns=['timestamp'])
df = df.iloc[:-1].copy()
def getintensity(dt):
url = "https://api.carbonintensity.org.uk/intensity/"
r = requests.get(url + dt + "/fw48h")
j = r.json()['data']
return j[-1]['to'], pd.DataFrame([x['intensity']['actual'] for x in j], index=[x['from'] for x in j])
dt = latest.replace(' ', 'T')+ 'Z'
intensities = []
for d in range(500):
dt, intensity = getintensity(dt)
intensities.append( intensity)
if intensity[0].isna()[-1]:
break
d = pd.concat(intensities)
d.columns=['intensity']
last = d[d.intensity.notna()].index.max()
d = d.loc[:last].copy()
for i in range(len(d)):
if np.isnan(d.intensity.iloc[i]):
if i<48:
d.intensity.iloc[i] = d.intensity.iloc[i-1]
else:
d.intensity.iloc[i] = d.intensity.iloc[i-48]
d['timestamp'] = pd.DatetimeIndex(d.index.str[:16])
d = d.iloc[2:]
d = df.merge(d, how='left', on='timestamp' )
d = d[d.intensity.notna()]
print(d)
s = "insert into sm_hh_variable_vals (var_id, period_id, value) values "
for i, j in d.iterrows():
s+= " ({}, {}, {}),".format(var_id, i, j.intensity)
s = s[:-1] + ';'
loadDataFromDb(s)
if False:
s = """
select sm_periods.local_date, sm_periods.local_time, emis.value emissions, prof.value profile
from sm_hh_variable_vals emis
inner join sm_hh_variable_vals prof on emis.period_id=prof.period_id and emis.var_id=31 and prof.var_id=29
inner join sm_periods on sm_periods.period_id=emis.period_id;
"""
s = """
select sm_periods.local_date, sm_periods.local_time, emis.value emissions, prof.value profile, COALESCE(qty.quantity,0) quantity
from sm_hh_variable_vals emis
inner join sm_hh_variable_vals prof on emis.period_id=prof.period_id and emis.var_id=31 and prof.var_id=29
inner join sm_periods on sm_periods.period_id=emis.period_id
left outer join sm_quantity qty on qty.period_id=emis.period_id and qty.account_id=21
;
"""
s = """
select sm_periods.local_date, sm_periods.local_time, emis.value emissions, prof.value profile, COALESCE(qty.quantity,0) quantity
from sm_hh_variable_vals emis
inner join sm_hh_variable_vals prof on emis.period_id=prof.period_id and emis.var_id=31 and prof.var_id=29
inner join sm_periods on sm_periods.period_id=emis.period_id
left outer join sm_quantity qty on qty.period_id=emis.period_id and qty.account_id=21
where local_date='2020-07-25'
;
"""
df = loadDataFromDb(s, returndf=True)
print(df)
if False:
s = """
DROP TABLE IF EXISTS sm_log;
CREATE TABLE sm_log (
id serial primary key
, datetime timestamp not null
, mode integer not null
, url varchar(124) not null
, hash varchar(64) not null);
"""
loadDataFromDb(s)
if False:
for region in ['A','B','C','D','E','F','G','H','J','K','L','M','N','P']:
tariff = 'GO-18-06-12'
s = '''
insert into sm_hh_variables (var_name) values ('{t}-{r}');
insert into sm_tariffs (type_id, product, region, granularity_id, var_id)
select 0 as type_id, '{t}' as product, '{r}' as region, 0 as granularity_id, var_id
from sm_hh_variables where var_name='{t}-{r}';
'''
loadDataFromDb(s.format(t=tariff, r=region))
if False:
idx = pd.date_range(START, '202101010000', freq='30T')
df = pd.DataFrame()
df['timestamp'] = idx
df = pd.DataFrame(idx, columns=['timestamp'])
for region in ['A','B','C','D','E','F','G','H','J','K','L','M','N','P']:
tariff = 'GO-18-06-12'
print(region)
url = ('https://api.octopus.energy/v1/products/{}/' +
'electricity-tariffs/E-1R-{}-{}/standard-unit-rates/' +
'?period_from={}Z&period_to={}Z&page_size=15000')
url = url.format(tariff, tariff, region,
df.timestamp.iloc[0].strftime('%Y-%m-%dT%H:%M'),
df.timestamp.iloc[-1].strftime('%Y-%m-%dT%H:%M'))
r = requests.get(url)
dfs = pd.DataFrame(r.json()['results'])
dfs.index = pd.DatetimeIndex(dfs.valid_from.str[:16])
dfs.sort_index(inplace=True)
dfs.loc[pd.Timestamp(dfs.valid_to[-1][:16])] = dfs.iloc[-1]
dfs = dfs['value_exc_vat']
dfs = dfs.resample('30T').ffill()
dfs = pd.merge(left=df, right=dfs, left_on='timestamp', right_index=True, how='left')
dfs = dfs[dfs.value_exc_vat.notna()]
var_id = loadDataFromDb("select var_id from sm_tariffs where product='{}' and region='{}'".format(tariff, region))[0][0]
s = '''
insert into sm_hh_variable_vals (var_id, period_id, value) values
'''
for i, j in dfs.iterrows():
s += "({}, {}, {}),".format(var_id, i, j.value_exc_vat)
s = s[:-1] + ';'
loadDataFromDb(s)
if False:
from myutils.utils import encode
keys = ['ea873c14b0626673', 'd307ca43950726cf','db1da1932e528e06']
source_id=1
region='C'
multipliers = [1.2, 0.8, 0.7]
newhash = encode('demo')
s = """
delete from sm_accounts where hash='{}';
"""
loadDataFromDb(s.format(newhash))
for i in range(3):
s = """
insert into sm_accounts (type_id, first_period, last_period, last_updated, hash, region, source_id )
select {}, first_period, last_period, CURRENT_TIMESTAMP, '{}', '{}', {}
from sm_accounts where hash LIKE '{}%' and type_id={} returning account_id;
"""
s = s.format(i, newhash, region, source_id, keys[i], i)
account_id = loadDataFromDb(s)[0][0]
s = '''
insert into sm_quantity (account_id, period_id, quantity)
select {} as account_id, period_id, {}*quantity as quantity from sm_quantity
inner join sm_accounts on sm_accounts.account_id=sm_quantity.account_id
where hash LIKE '{}%' and type_id={};
'''
s = s.format(account_id, multipliers[i], keys[i], i)
print(s)
loadDataFromDb(s)
print(i)
if False:
s = 'delete from sm_quantity where account_id not in (select account_id from sm_accounts)'
loadDataFromDb(s)
s = '''
select distinct account_id from sm_quantity order by account_id;
'''
print(loadDataFromDb(s, returndf=True))
s = '''
select distinct account_id from sm_accounts order by account_id;
'''
print(loadDataFromDb(s, returndf=True))
s = 'select count(id) from sm_quantity;'
print(loadDataFromDb(s, returndf=True))
if False: #Inserts latest daily gas tracker prices
conn.commit()
import requests
import datetime
s = """
select sm_tariffs.var_id, sm_tariffs.tariff_id, product, region, max(sm_d_variable_vals.local_date) as latest_date
from sm_tariffs
inner join sm_d_variable_vals on sm_tariffs.var_id=sm_d_variable_vals.var_id and sm_tariffs.granularity_id=1
group by sm_tariffs.var_id, sm_tariffs.tariff_id, product, region;
"""
mins = loadDataFromDb(s, returndf=True)
print(mins)
for i, j in mins.iterrows():
if j['product'] not in ['SILVER-2017-1']:
continue
print(f"{j['product']}_{j.region}")
r = requests.get(f'https://octopus.energy/api/v1/tracker/G-1R-SILVER-2017-1-{j.region}/daily/past/90/1/')
dates = [x['date'] for x in r.json()['periods']]
prices = [x['unit_rate'] for x in r.json()['periods']]
d = pd.Series(prices, index=dates)
d = d[j.latest_date.strftime('%Y-%m-%d'):datetime.date.today().strftime('%Y-%m-%d')]
d = d.iloc[1:]
if len(d)==0:
continue
print(d)
s = """
INSERT INTO sm_d_variable_vals (var_id, local_date, value) values
"""
for a, b in d.iteritems():
s+= f" ({j.var_id}, '{a}', {b}),"
s = s[:-1] + ';'
cur.execute(s)
print(cur.statusmessage)
conn.commit()
if False: #Inserts latest hh prices
conn.commit()
import requests
idx = pd.date_range(START, '202101010000', freq='30T')
df = pd.DataFrame()
df['timestamp'] = idx
df = pd.DataFrame(idx, columns=['timestamp'])
s = """
select sm_tariffs.var_id, sm_tariffs.tariff_id, product, region, max(sm_periods.period_id) as period_id, max(period) as period
from sm_tariffs
inner join sm_hh_variable_vals on sm_tariffs.var_id=sm_hh_variable_vals.var_id and sm_tariffs.granularity_id=0
inner join sm_periods on sm_periods.period_id=sm_hh_variable_vals.period_id
group by sm_tariffs.var_id, sm_tariffs.tariff_id, product, region;
"""
mins = loadDataFromDb(s, returndf=True)
print(mins)
for i, j in mins.iterrows():
if j['product'] not in ['AGILE-18-02-21','GO-18-06-12', 'AGILE-OUTGOING-19-05-13']:
continue
print(f"{j['product']}_{j.region}")
start = j.period.replace(' ','T')
end = '2021-01-01T00:00'
url = ('https://api.octopus.energy/v1/products/{}/' +
'electricity-tariffs/E-1R-{}-{}/standard-unit-rates/' +
'?period_from={}Z&period_to={}Z&page_size=15000')
url = url.format(j['product'], j['product'], j.region, start, end)
r = requests.get(url)
r = r.json().get('results',[])
if len(r)==0:
continue
dfs = pd.DataFrame(r)[['valid_from','valid_to','value_exc_vat']]
dfs.index = pd.DatetimeIndex(dfs.valid_from.str[:16])
dfs.sort_index(inplace=True)
dfs.loc[pd.Timestamp(dfs.valid_to[-1][:16])] = dfs.iloc[-1]
dfs = dfs.iloc[1:]
dfs = dfs['value_exc_vat']
dfs = dfs.resample('30T').ffill()
dfs = dfs.iloc[:-1].copy()
dfs = pd.merge(left=df, right=dfs, left_on='timestamp', right_index=True, how='left')
dfs = dfs[dfs.value_exc_vat.notna()]
print(url)
#print(len(dfs))
print(dfs)
if len(dfs):
s = """
INSERT INTO sm_hh_variable_vals (var_id, period_id, value) values
"""
for a, b in dfs.iterrows():
s+= " ({}, {}, {}),".format(j.var_id, a, b.value_exc_vat)
s = s[:-1] + ';'
cur.execute(s)
print(cur.statusmessage)
conn.commit()
if False:
s = """
select var_id, period_id, max(id) m from sm_hh_variable_vals
group by var_id, period_id
having count(id)>1);
"""
s = """
delete from sm_hh_variable_vals s where id in (
select max(id) m from sm_hh_variable_vals
group by var_id, period_id
having count(id)>1);
"""
s = """
select count(id) from sm_hh_variable_vals s where
var_id not in (select var_id from sm_hh_variables);
"""
s = """
select count(id) from sm_quantity where
account_id not in (select account_id from sm_accounts);
"""
s = "select count(id) from sm_quantity;"
s = "select count(account_id) from sm_accounts;"
s = """
select s.var_id, s.var_name, max(sm_periods.period_id) as period_id, max(period) as period
from sm_hh_variables s
left join sm_hh_variable_vals on s.var_id=sm_hh_variable_vals.var_id
left join sm_periods on sm_periods.period_id=sm_hh_variable_vals.period_id
where s.var_name='CO2_National'
group by s.var_id, s.var_name;
"""
d = loadDataFromDb(s)
print(d)
s = 3, 31
s = """
select min(period_id) from sm_hh_variable_vals where var_id=47;
"""
#print(loadDataFromDb(s, returndf=True))
s = """
with accounts as (select distinct hash, source_id from sm_accounts)
select mode, source_id, left(sm_log.hash, 6), count(id)
from sm_log left outer join accounts on accounts.hash=sm_log.hash
where datetime>'2020-07-23 17:00' and datetime<'2020-07-24 17:00'
group by mode, source_id, sm_log.hash;
"""
if False:
exportparams = {
'A': (0.95, 1.09, 7.04),
'B': (0.94, 0.78, 6.27),
'C': (0.95, 1.3, 5.93),
'D': (0.97, 1.26, 5.97),
'E': (0.94, 0.77, 6.5),
'F': (0.95, 0.87, 4.88),
'G': (0.96, 1.1, 5.89),
'H': (0.94, 0.93, 7.05),
'J': (0.94, 1.09, 7.41),
'K': (0.94, 0.97, 5.46),
'L': (0.93, 0.83, 7.14),
'M': (0.96, 0.72, 5.78),
'N': (0.97, 0.9, 3.85),
'P': (0.96, 1.36, 2.68), }
product = 'AGILE-OUTGOING-19-05-13'
for region in ['A','B','C','D','E','F','G','H','J','K','L','M','N','P']:
s = f"select var_id from sm_tariffs where product='{product}' and region='{region}'"
var_id = loadDataFromDb(s)[0][0]
print(f'{region}, {var_id}')
p = exportparams[region]
s = f"""
insert into sm_hh_variable_vals (var_id, period_id, value)
select {var_id} var_id, v.period_id, ({p[2]} + {p[1]} + {p[0]}*(v.value-12)/2.0) as value from sm_hh_variable_vals v
inner join sm_periods p on v.period_id=p.period_id
where var_id=31 and p.period_id<6478 and local_time between'16:00' and '18:30';
"""
print(loadDataFromDb(s, returndf=True))
s = f"""
insert into sm_hh_variable_vals (var_id, period_id, value)
select {var_id} var_id, v.period_id, ({p[1]}+{p[0]}*v.value/2.0) as value from sm_hh_variable_vals v
inner join sm_periods p on v.period_id=p.period_id
where var_id=31 and p.period_id<6478 and local_time not between'16:00' and '18:30';
"""
print(loadDataFromDb(s, returndf=True))
if False:
s = '''
select local_date, local_time as local_time_start, timezone_adj, quantity as total_quantity, value as price, quantity*value as total_cost
from sm_quantity inner join sm_periods on sm_quantity.period_id=sm_periods.period_id
inner join sm_hh_variable_vals on sm_hh_variable_vals.period_id=sm_quantity.period_id and sm_hh_variable_vals.var_id={}
where local_date BETWEEN '{}' AND '{}' and
sm_quantity.account_id in ({}) order by period;
'''
s = s.format(15, '2020-06-01', '2020-06-30', 58)
s = '''
select sm_periods.local_date, local_time as local_time_start, timezone_adj, quantity as total_quantity, value as price, quantity*value as total_cost
from sm_quantity inner join sm_periods on sm_quantity.period_id=sm_periods.period_id
inner join sm_d_variable_vals on sm_d_variable_vals.local_date=sm_periods.local_date and sm_d_variable_vals.var_id={}
where sm_periods.local_date BETWEEN '{}' AND '{}' and
sm_quantity.account_id in ({}) order by period;
'''
s = s.format(1, '2020-06-01', '2020-06-30', 58)
s = '''
select sm_periods.local_date as day, count(sm_quantity.id) as numperiods, sum(quantity) as total_quantity, value as price, sum(quantity*value) as total_cost
from sm_quantity inner join sm_periods on sm_quantity.period_id=sm_periods.period_id
inner join sm_d_variable_vals on sm_d_variable_vals.local_date=sm_periods.local_date and sm_d_variable_vals.var_id={}
where sm_periods.local_date between '{}' and '{}' and date_trunc('month', sm_periods.local_date)='{}' and
sm_quantity.account_id in ({}) group by sm_periods.local_date, value order by sm_periods.local_date;
'''
s = s.format(1, '2020-06-01', '2020-06-30', '2020-06' + '-01', 58)
s = '''
select date_trunc('month', sm_periods.local_date) as month, count(sm_quantity.id) as numperiods, sum(value)/count(value) as price, sum(quantity) as total_quantity, sum(quantity*value) as total_cost
from sm_quantity inner join sm_periods on sm_quantity.period_id=sm_periods.period_id
inner join sm_d_variable_vals on sm_d_variable_vals.local_date=sm_periods.local_date and sm_d_variable_vals.var_id={}
where sm_periods.local_date between '{}' and '{}' and
sm_quantity.account_id in ({}) group by month order by month;
'''
s = s.format(1, '2020-01-01', '2020-06-30', 58)
#print(loadDataFromDb(s, returndf=True))
if False: #Clean out old account data
from myutils.utils import encode
demohash = encode('A-EB5A2015sk_live_BXmPhoj6LwhwwfYvosRMePtm')
print(loadDataFromDb('select count(id) from sm_quantity;'))
s = f"delete from sm_accounts where last_updated<'2020-07-26 09:00' and hash!='{demohash}';"
loadDataFromDb(s)
s = f"delete from sm_quantity where account_id not in (select account_id from sm_accounts);"
loadDataFromDb(s)
print(loadDataFromDb('select count(id) from sm_quantity;'))
conn.close()
if False:
s = '''
with hhvars as (
select t.var_id, t.var_name, min(v.period_id) min, max(v.period_id) max from sm_hh_variables t
left outer join sm_hh_variable_vals v on t.var_id=v.var_id
group by t.var_name, t.var_id order by var_name)
select hhvars.var_id, hhvars.var_name, pmin.period min, pmax.period max from hhvars
inner join sm_periods pmin on pmin.period_id=hhvars.min
inner join sm_periods pmax on pmax.period_id=hhvars.max
;
'''
print(loadDataFromDb(s, returndf=True))
s = '''
with hhvars as (
select t.var_id, t.var_name, min(v.period_id) min, max(v.period_id) max from sm_hh_variables t
left outer join sm_hh_variable_vals v on t.var_id=v.var_id
group by t.var_name, t.var_id order by var_name)
select hhvars.var_id, hhvars.var_name, p.period, p.period_id
from hhvars
inner join sm_periods p on p.period_id between hhvars.min and hhvars.max
left outer join sm_hh_variable_vals v on v.period_id=p.period_id and hhvars.var_id=v.var_id
where v.period_id is null
;
'''
print(loadDataFromDb(s, returndf=True))
if False:
s = '''
with dvars as (
select t.var_id, t.var_name, min(v.local_date) min, max(v.local_date) max from sm_d_variables t
left outer join sm_d_variable_vals v on t.var_id=v.var_id
group by t.var_name, t.var_id order by var_name)
select dvars.var_id, dvars.var_name, dvars.min, dvars.max
from dvars;
'''
print(loadDataFromDb(s, returndf=True))
s = '''
with dvars as (
select t.var_id, t.var_name, min(v.local_date) min, max(v.local_date) max from sm_d_variables t
left outer join sm_d_variable_vals v on t.var_id=v.var_id
group by t.var_name, t.var_id order by var_name)
select distinct dvars.var_id, dvars.var_name, p.local_date
from dvars
inner join sm_periods p on p.local_date between dvars.min and dvars.max
left outer join sm_d_variable_vals v on v.local_date=p.local_date and dvars.var_id=v.var_id
where v.local_date is null
;
'''
print(loadDataFromDb(s, returndf=True))
if False:
s = '''
select v.var_id, t.var_name, v.local_date, min(id) min, max(id) max, count(id)
from sm_d_variable_vals v
inner join sm_d_variables t on t.var_id=v.var_id
group by v.var_id, t.var_name, v.local_date
having count(id)>1;
'''
print(loadDataFromDb(s, returndf=True))
if False:
s = '''
with log as (select left(hash,8) shorthash, date(datetime-Interval '3 hours') myday, * from sm_log
where url not LIKE '%debug%' and mode=0 ),
firstlog as (select shorthash, min(myday) firstday from log where
url LIKE '%task=load%' group by shorthash ),
dailylog as (select shorthash, myday, count(id) numhits from log group by shorthash, myday ),
sumlog as (select shorthash, max(myday) lastday, count(myday) as numdays, sum(numhits) numhits from dailylog
group by shorthash),
sources as (select distinct shorthash, CASE WHEN url LIKE '%octopus%' THEN 'octopus' WHEN url LIKE '%n3rgy%' THEN 'n3rgy' else 'none' end as source
from log where (url LIKE '%octopus%' or url LIKE '%n3rgy%') )
select firstlog.shorthash, sources.source, sumlog.lastday, firstlog.firstday, sumlog.numdays, sumlog.numhits
from firstlog inner join sumlog on firstlog.shorthash = sumlog.shorthash
left outer join sources on sources.shorthash=firstlog.shorthash
order by lastday desc, firstday desc, numdays desc
;
'''
print(loadDataFromDb(s, returndf=True))
#CASE WHEN url LIKE '%?%' THEN left(url, position('?' in url)-1)
if False:
s = '''
select account_id, period_id, count(id) from sm_quantity group by period_id, account_id
having count(id)>1 order by account_id, period_id
'''
df = loadDataFromDb(s, returndf=True)
print(df)
if len(df):
s = ''' delete from sm_quantity where id in (
select min(id) id from sm_quantity group by period_id, account_id
having count(id)>1 order by account_id, period_id)
'''
loadDataFromDb(s)
s = '''
select account_id, period_id, count(id) from sm_quantity group by period_id, account_id
having count(id)>1 order by account_id, period_id
'''
df = loadDataFromDb(s, returndf=True)
print(df)
if False:
s = '''
select datetime, url from sm_log order by datetime desc limit 5;
'''
df = loadDataFromDb(s, returndf=True)
print(df)
if False:
s = '''
Alter Table sm_log
add column session_id uuid,
add column choice varchar(64);
'''
#loadDataFromDb(s)
s = '''
update sm_accounts
set active = True
'''
#loadDataFromDb(s)
s = '''
alter table sm_log
drop column mode,
drop column hash
'''
#loadDataFromDb(s)
s = "select * from sm_log order by datetime desc limit 5"
s = '''
update sm_log
set choice= right(split_part(url, '?',1),-4)
where choice is Null
'''
#loadDataFromDb(s)
s = "select account_id, type_id, last_updated, region, session_id, active from sm_accounts order by last_updated"
s = "select * from sm_log order by datetime"
s = '''
with log as
(select date(datetime-Interval '3 hours') myday,
right(split_part(url, '?',1),-4) as choice,
* from sm_log where url not LIKE '%debug%')
select myday, count(id) from log where url like '%load%' group by myday order by myday
'''
print(loadDataFromDb(s, returndf=True))
s = '''
with log as
(select date(datetime-Interval '3 hours') myday,
right(split_part(url, '?',1),-4) as choice,
* from sm_log where url not LIKE '%debug%')
select myday, count(id) from log group by myday order by myday
'''
print(loadDataFromDb(s, returndf=True))
s = '''
with log as
(select date(datetime-Interval '3 hours') myday,
* from sm_log where url not LIKE '%debug%'),
log2 as (
select
CASE WHEN POSITION('octopus' in url)>0 then 'octopus' WHEN POSITION ('n3rgy' in url)>0 then 'n3rgy' else 'unknown' end as source,
CASE when POSITION('loadgas' in url)>0 then 'gas' WHEN POSITION('loadexport' in url)>0 then 'export' else 'electricity' end as type,
*
from log where url like '%load%')
select
myday, source, type, count(id) count from log2 group by myday, source, type
order by myday desc, source, type
'''
print(loadDataFromDb(s, returndf=True))
if False:
s = '''
with sessions as (
select sm_accounts.account_id, sm_accounts.session_id, sm_accounts.type_id, last_updated, max(datetime) as last_called
from sm_accounts
left outer join sm_log on sm_accounts.session_id=sm_log.session_id
where sm_accounts.session_id != 'e4280c7d-9d06-4bbe-87b4-f9e106ede788' and sm_accounts.active='1'
group by account_id, last_updated)
update sm_accounts set active='0' where account_id in
(
select account_id from sessions where last_updated<CURRENT_TIMESTAMP-Interval '6 hours' or last_called<CURRENT_TIMESTAMP-Interval '3 hours' )
'''
#print(loadDataFromDb(s, returndf=True))
s = "delete from sm_quantity where account_id not in (select account_id from sm_accounts where active='1') "
#print(loadDataFromDb(s, returndf=True))
s = '''
select count(id), count(session_id) from sm_quantity left outer join sm_accounts on sm_quantity.account_id=sm_accounts.account_id and sm_accounts.active='1'
'''
#print(loadDataFromDb(s, returndf=True))
s = "select session_id, count(account_id) from sm_accounts where active='1' group by session_id having count(account_id)>0"
s = "select count(id) from sm_quantity"
print(loadDataFromDb(s, returndf=True))
if False:
s = '''Update sm_log set session_id=Null where session_id='e4280c7d-9d06-4bbe-87b4-f9e106ede788' '''
#'e4280c7d-9d06-4bbe-87b4-f9e106ede788'
print(loadDataFromDb(s))
s = '''select * from sm_log where session_id='e4280c7d-9d06-4bbe-87b4-f9e106ede788' limit 5'''
print(loadDataFromDb(s, returndf=True))
if False:
s = f'''
select concat(local_date, ' ', local_time) dt, value from sm_hh_variable_vals v
inner join sm_periods on v.period_id=sm_periods.period_id
inner join sm_tariffs on sm_tariffs.var_id=v.var_id
where product='AGILE-18-02-21' and region='C'
order by dt desc limit 10
'''
print(loadDataFromDb(s, returndf=True))
s = f'''
select concat(local_date, ' ', local_time) dt, value from sm_hh_variable_vals v
inner join sm_periods on v.period_id=sm_periods.period_id
inner join sm_tariffs on sm_tariffs.var_id=v.var_id
where product='AGILE-18-02-21' and region='C'
and concat(sm_periods.local_date, ' ', sm_periods.local_time)>='2020-10-01T00:00'
order by dt
'''
print(loadDataFromDb(s, returndf=True))
idx = pd.date_range(START, '202101010000', freq='30T')
df = pd.DataFrame()
df['timestamp'] = idx
df = pd.DataFrame(idx, columns=['timestamp'])
s = """
select sm_variables.var_id, product, region, max(sm_periods.period_id) as period_id, max(period) as period
from sm_variables
inner join sm_hh_variable_vals on sm_variables.var_id=sm_hh_variable_vals.var_id and sm_variables.granularity_id=0
and sm_variables.region!='Z'
inner join sm_periods on sm_periods.period_id=sm_hh_variable_vals.period_id
group by sm_variables.var_id, product, region;
"""
mins = loadDataFromDb(s, returndf=True)
print(mins)
for i, j in mins.iterrows():
if j['product'] not in ['AGILE-18-02-21','GO-18-06-12', 'AGILE-OUTGOING-19-05-13']:
continue
start = j.period.replace(' ','T')
end = '2021-01-01T00:00'
url = ('https://api.octopus.energy/v1/products/{}/' +
'electricity-tariffs/E-1R-{}-{}/standard-unit-rates/' +
'?period_from={}Z&period_to={}Z&page_size=15000')
url = url.format(j['product'], j['product'], j.region, start, end)
r = requests.get(url)
r = r.json().get('results',[])
if len(r)==0:
continue
dfs = pd.DataFrame(r)[['valid_from','valid_to','value_exc_vat']]
dfs.index = pd.DatetimeIndex(dfs.valid_from.str[:16])
dfs.sort_index(inplace=True)
dfs.loc[pd.Timestamp(dfs.valid_to[-1][:16])] = dfs.iloc[-1]
dfs = dfs.iloc[1:]
dfs = dfs['value_exc_vat']
dfs = dfs.resample('30T').ffill()
dfs = dfs.iloc[:-1].copy()
dfs = pd.merge(left=df, right=dfs, left_on='timestamp', right_index=True, how='left')
dfs = dfs[dfs.value_exc_vat.notna()]
print(dfs)
if len(dfs):
s = """
INSERT INTO sm_hh_variable_vals (var_id, period_id, value) values
"""
for a, b in dfs.iterrows():
s+= " ({}, {}, {}),".format(j.var_id, a, b.value_exc_vat)
s = s[:-1] + ';'
print(loadDataFromDb(s) )
if True:
conn, cur = getConnection()
df_idx = pd.date_range(datetime.datetime(2019,1,1), datetime.datetime(2022,7,1), freq='30min')
df_idx_local = df_idx.tz_localize('UTC').tz_convert('Europe/London')
df = pd.DataFrame(index=df_idx)
df['period'] = df_idx.strftime('%Y-%m-%d %H:%M')
df['local_date'] = df_idx_local.strftime('%Y-%m-%d')
df['local_time'] = df_idx_local.strftime('%H:%M')
df['timezone_adj'] = df_idx_local.strftime('%z').str[0:3].astype(int)
df.reset_index(inplace=True)
df = df.loc[43777:]
print(df)
start = """
INSERT INTO sm_periods (period_id, period, local_date, local_time, timezone_adj)
VALUES
"""
s=""
for i, j in df.iterrows():
s+= "({},'{}', '{}', '{}', {}),".format(i, j['period'], j['local_date'],j['local_time'], j['timezone_adj'])
if (i+1)%1000==0:
print('done: {}'.format(i+1))
cur.execute(start + s[:-1] + ';')
conn.commit()
s=""
print('done: {}'.format(i+1))
cur.execute(start + s[:-1] + ';')
conn.commit()
s=""
if False:
s = '''
with latest as (select max(id) m, var_id, period_id from sm_hh_variable_vals group by var_id, period_id)
, remove as (select v.id from sm_hh_variable_vals v inner join latest on v.var_id=latest.var_id and v.period_id=latest.period_id
where v.id<latest.m
order by latest.var_id, latest.period_id)
delete from sm_hh_variable_vals where id in (select id from remove)
'''
print(loadDataFromDb(s, returndf=True))
if False:
s = '''
with periods as (select * from sm_periods where local_date between '2020-08-01' and '2020-10-01' )
, quantities1 as (select period_id, quantity
from sm_quantity
inner join sm_accounts on sm_quantity.account_id=sm_accounts.account_id
where session_id='39b76afc-118d-40e1-8368-c395fa0926e4' and type_id=0 and active='1')
, quantities2 as (select period_id, quantity from sm_quantity
inner join sm_accounts on sm_quantity.account_id=sm_accounts.account_id
where session_id='39b76afc-118d-40e1-8368-c395fa0926e4' and type_id=2 and active='1')
, fulldata as
(select periods.*, coalesce(quantities1.quantity,0) as import, coalesce(quantities2.quantity, 0) as export
from periods inner join quantities2 on periods.period_id=quantities2.period_id
left outer join quantities1 on periods.period_id=quantities1.period_id)
select date_trunc('month',local_date) as month, count(period_id) as numperiods,
sum(import) as total_import,
sum(export) as total_export
from fulldata group by month order by month
'''
import time
a = time.time()
#print(loadDataFromDb(s, returndf=True))
print(time.time()-a)
s = '''
with periods as (select * from sm_periods where local_date between '2020-08-01' and '2020-10-01' )
, quantities1 as (select period_id, quantity
from sm_quantity
inner join sm_accounts on sm_quantity.account_id=sm_accounts.account_id
where session_id='39b76afc-118d-40e1-8368-c395fa0926e4' and type_id=0 and active='1')
, quantities2 as (select period_id, quantity from sm_quantity
inner join sm_accounts on sm_quantity.account_id=sm_accounts.account_id
where session_id='39b76afc-118d-40e1-8368-c395fa0926e4' and type_id=2 and active='1')
, full1 as
(select periods.*, quantities1.quantity
from periods inner join quantities1 on periods.period_id=quantities1.period_id)
, full2 as
(select periods.*, quantities2.quantity
from periods inner join quantities2 on periods.period_id=quantities2.period_id)
, fulldata as
(select full2.*, coalesce(full1.quantity,0) as import, full2.quantity as export
from full2 full outer join full1 on full2.period_id=full1.period_id)
select date_trunc('month',local_date) as month, count(period_id) as numperiods,
sum(import) as total_import,
sum(export) as total_export
from fulldata group by month order by month
'''
import time
a = time.time()
print(loadDataFromDb(s, returndf=True))
print(time.time()-a) |
# Generated by Django 3.0 on 2020-02-07 08:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hlwtadmin', '0008_venue_non_assignable'),
]
operations = [
migrations.AddField(
model_name='historicalorganisation',
name='unverified',
field=models.BooleanField(blank=True, default=False, null=True),
),
migrations.AddField(
model_name='organisation',
name='unverified',
field=models.BooleanField(blank=True, default=False, null=True),
),
]
|
#!/usr/bin/env python3
import math
import time
def is_prime(number):
if number < 2:
return False
for divisor in range(2, int(math.sqrt(number)) + 1):
if number % divisor == 0:
return False
return True
def main():
limit = 0.1
number = 9
side_length = 3
number_of_primes = 3
while number_of_primes/(2*side_length - 1) >= limit:
for i in range(4):
number += side_length + 1
if is_prime(number):
number_of_primes += 1
side_length += 2
print("Solution:", side_length)
if __name__ == "__main__":
start = time.time()
main()
end = time.time()
print("Duration: {0:0.6f}s".format(end - start))
|
import tensorflow as tf
from datetime import datetime
from logger.logger import Logger
from models.multimodal import JointTwomvae
from models.multimodal import AssociatorVideoAc
from models.multimodal import AssociatorAudio
from models.multimodal import AssociatorAudioAc
from models.multimodal import Jointmvae
from models.multimodal import JointTwomvae2
from models.unet_sound22 import UNetSound as UNetSound22
from models.unet_sound2 import UNetSound
from models.unet_architecture_energy import UNetE
from models.unet_noconc2 import UNetAc as UNetAc2
from models.unet_z import UNetAc as UNetzvariable
from models.unet_noconc import UNetAc
from models.unet_acresnet2skip import UNetAc as UNetAcResNet50_2skips
from models.unet_acresnet import UNetAc as UNetAcResNet50
from models.unet_acresnet0skip import UNetAc as UNetAcResNet50_0skips
from models.unet_architecture_noconc import UNet
from models.unet_architecture_noconc2 import UNet as Unet2
from models.vision import ResNet50Model
from trainer.trainermulti import Trainer as TrainerMulti
from trainer.trainer import Trainer as Trainer
from trainer.trainer_three import Trainer as TrainerLoss
from trainer.trainer2 import Trainer as TrainerNCAproxyanchor
from trainer.mfcctrainer import Trainer as TrainerMask
from trainer.trainer_proietta import Trainer as TrainerProject
from dataloader.actions_data_old import ActionsDataLoader
from dataloader.outdoor_data_mfcc import ActionsDataLoader as SoundDataLoader
from trainer.trainer_class import Trainer as Trainer_classification
from trainer.trainer_reconstructed_class import Trainer as Trainer_rec_class
from models.dualcamnet import DualCamHybridModel
flags = tf.app.flags
flags.DEFINE_string('mode', None, 'Execution mode, it can be either \'train\' or \'test\'')
flags.DEFINE_string('model', None, 'Model type, it can be one of \'SeeNet\', \'ResNet50\', \'TemporalResNet50\', '
'\'DualCamNet\', \'DualCamHybridNet\', \'SoundNet5\', or \'HearNet\'')
flags.DEFINE_string('train_file', None, 'Path to the plain text file for the training set')
flags.DEFINE_string('valid_file', None, 'Path to the plain text file for the validation set')
flags.DEFINE_string('test_file', None, 'Path to the plain text file for the testing set')
flags.DEFINE_string('exp_name', None, 'Name of the experiment')
flags.DEFINE_string('init_checkpoint', None, 'Checkpoint file for model initialization')
flags.DEFINE_string('acoustic_init_checkpoint', None, 'Checkpoint file for acoustic model initialization')
flags.DEFINE_string('audio_init_checkpoint', None, 'Checkpoint file for audio model initialization')
flags.DEFINE_string('visual_init_checkpoint', None, 'Checkpoint file for visual model initialization')
flags.DEFINE_string('restore_checkpoint', None, 'Checkpoint file for session restoring')
flags.DEFINE_integer('batch_size', 8, 'Size of the mini-batch')
flags.DEFINE_float('learning_rate', 0.001, 'Learning rate')
flags.DEFINE_float('latent_loss', 0.000001, 'Learning rate')
flags.DEFINE_integer('display_freq', 1, 'How often must be shown training results')
flags.DEFINE_integer('num_epochs', 100, 'Number of iterations through dataset')
flags.DEFINE_integer('total_length', 30, 'Length in seconds of a full sequence')
# sample length is 1 s for dualcamnet and 5 s for hearnet and soundnet
flags.DEFINE_integer('sample_length', 1, 'Length in seconds of a sequence sample')
# number of crops 30 for 1 s and 6 for 5 s
flags.DEFINE_integer('number_of_crops', 30, 'Number of crops')
flags.DEFINE_integer('buffer_size', 100, 'Size of pre-fetch buffer')
flags.DEFINE_string('tensorboard', None, 'Directory for storing logs')
flags.DEFINE_string('checkpoint_dir', None, 'Directory for storing models')
flags.DEFINE_integer('temporal_pooling', 0, 'Flag to indicate whether to use average pooling over time')
flags.DEFINE_integer('embedding', 0, 'Say if you are training 128 vectors')
flags.DEFINE_float('margin', 0.2, 'margin') # between 0 and 11 for 128 vector
flags.DEFINE_integer('block_size', 1, 'Number of frames to pick randomly for each second') # 12
flags.DEFINE_integer('num_class', 128, 'Classes')
flags.DEFINE_string('datatype', 'outdoor', 'music or outdoor or old')
flags.DEFINE_integer('correspondence', 0, 'use correspondence')
flags.DEFINE_integer('proxy', 0, 'Use NCA')
flags.DEFINE_string('encoder_type', 'Video', 'Modality for encoder, it can be one of \'Energy\', \'Video\', \'Ac\' or \'Audio\'')
flags.DEFINE_integer('fusion', 0, 'Use both audio and video')
flags.DEFINE_integer('moddrop', 0, 'Use audio video and dropmod ac')
flags.DEFINE_integer('l2', 0, 'Use l2 between latent variables')
flags.DEFINE_integer('project', 0, 'Use conversion between latent variables')
flags.DEFINE_integer('jointmvae', 0, 'Use joint latent')
flags.DEFINE_integer('onlyaudiovideo', 0, 'Using only audio and video')
flags.DEFINE_integer('mfcc', 0, 'Using mfcc and resnet50 or dualcamnet with acoustic and mfccmap')
flags.DEFINE_integer('mfccmap', 0, 'Do not reconstruct')
flags.DEFINE_integer('num_skip_conn', 1, 'Number of skip')
flags.DEFINE_integer('ae', 0, 'auto encoder')
flags.DEFINE_integer('MSE', 1, 'MSE loss use')
flags.DEFINE_integer('huber_loss', 1, 'Huber loss use')
FLAGS = flags.FLAGS
def main(_):
# Create data loaders according to the received program arguments
print('{}: {} - Creating data loaders'.format(datetime.now(), FLAGS.exp_name))
# random_pick = (FLAGS.model == 'TemporalResNet50' or FLAGS.model_1 == 'TemporalResNet50') or (FLAGS.model == 'ResNet18' or FLAGS.model_1 == 'ResNet18')
# if we are randomly picking total number of frames, we can set random pick to False
nr_frames = FLAGS.block_size * FLAGS.sample_length
# if (FLAGS.model == 'ResNet18_v1' or FLAGS.model == 'ResNet50' or FLAGS.model_1 == 'ResNet18_v1'
# or FLAGS.model_1 == 'ResNet50' or FLAGS.model == 'AVNet') and nr_frames < 12*FLAGS.sample_length:
# random_pick = True
# else:
# random_pick = False
random_pick = False
build_spectrogram = True
normalize = False #normalize spectrogram without statistic every one
modalities = []
#consider all
modalities.append(0)
modalities.append(1)
modalities.append(2)
with tf.device('/cpu:0'):
if FLAGS.datatype == 'old':
num_classes = 14
if FLAGS.train_file is None:
train_data = None
else:
train_data = ActionsDataLoader(FLAGS.train_file, 'training', FLAGS.batch_size, num_epochs=1,
sample_length=FLAGS.sample_length, embedding=FLAGS.embedding,
buffer_size=FLAGS.buffer_size, datakind=FLAGS.datatype,
shuffle=True, normalize=normalize, random_pick=random_pick,
correspondence=FLAGS.correspondence,
build_spectrogram=build_spectrogram, modalities=modalities, nr_frames=nr_frames)
if FLAGS.valid_file is None:
valid_data = None
else:
valid_data = ActionsDataLoader(FLAGS.valid_file, 'validation', FLAGS.batch_size, num_epochs=1,
sample_length=FLAGS.sample_length, datakind=FLAGS.datatype, embedding=FLAGS.embedding,
buffer_size=FLAGS.buffer_size, shuffle=False, normalize=normalize,
correspondence=FLAGS.correspondence,
random_pick=random_pick, build_spectrogram=build_spectrogram, modalities=modalities,
nr_frames=nr_frames)
if FLAGS.test_file is None:
test_data = None
else:
test_data = ActionsDataLoader(FLAGS.test_file, 'testing', FLAGS.batch_size, num_epochs=1,
sample_length=FLAGS.sample_length, datakind=FLAGS.datatype, embedding=FLAGS.embedding,
buffer_size=FLAGS.buffer_size, shuffle=False, normalize=normalize,
correspondence=FLAGS.correspondence,
random_pick=random_pick, build_spectrogram=build_spectrogram, modalities=modalities,
nr_frames=nr_frames)
elif FLAGS.datatype == 'outdoor':
num_classes = 10
if FLAGS.train_file is None:
train_data = None
else:
train_data = SoundDataLoader(FLAGS.train_file, 'training', FLAGS.batch_size, num_epochs=1,
sample_length=FLAGS.sample_length, embedding=FLAGS.embedding,
buffer_size=FLAGS.buffer_size, datakind=FLAGS.datatype,
shuffle=True, normalize=normalize, random_pick=random_pick,
correspondence=FLAGS.correspondence,
build_spectrogram=build_spectrogram, modalities=modalities, nr_frames=nr_frames)
if FLAGS.valid_file is None:
valid_data = None
else:
valid_data = SoundDataLoader(FLAGS.valid_file, 'validation', FLAGS.batch_size, num_epochs=1,
sample_length=FLAGS.sample_length, datakind=FLAGS.datatype, embedding=FLAGS.embedding,
buffer_size=FLAGS.buffer_size, shuffle=False, normalize=normalize,
correspondence=FLAGS.correspondence,
random_pick=random_pick, build_spectrogram=build_spectrogram,
modalities=modalities,
nr_frames=nr_frames)
if FLAGS.test_file is None:
test_data = None
else:
test_data = SoundDataLoader(FLAGS.test_file, 'testing', FLAGS.batch_size, num_epochs=1,
sample_length=FLAGS.sample_length, datakind=FLAGS.datatype, embedding=FLAGS.embedding,
buffer_size=FLAGS.buffer_size, shuffle=False, normalize=normalize,
correspondence=FLAGS.correspondence,
random_pick=random_pick, build_spectrogram=build_spectrogram,
modalities=modalities,
nr_frames=nr_frames)
# Build model
print('{}: {} - Building model'.format(datetime.now(), FLAGS.exp_name))
if FLAGS.embedding:
with tf.device('/gpu:0'):
if FLAGS.project:
model_encoder_images = UNet(input_shape=[224, 298, 3])
model_encoder_audio = UNetSound(input_shape=[193, 257, 1])
model_encoder_acoustic = UNetzvariable(input_shape=[36, 48, 12])
if FLAGS.fusion:
model_associator = AssociatorVideoAc(input_shape=1024)
model_associator1 = AssociatorAudioAc(input_shape=256)
elif FLAGS.encoder_type == 'Video':
model_associator = AssociatorVideoAc(input_shape=1024)
model_associator1 = None
else:
model_associator = AssociatorAudio(input_shape=[193, 257, 1])
model_associator1 = None
elif FLAGS.jointmvae:
model_encoder_images = Unet2(input_shape=[224, 298, 3])
model_encoder_audio = UNetSound22(input_shape=[193, 257, 1])
model_encoder_acoustic = UNetAc2(input_shape=[36, 48, 12])
if FLAGS.fusion:
model_associator = JointTwomvae2()
model_associator1 = None
elif FLAGS.onlyaudiovideo:
model_associator = Jointmvae()
model_associator1 = JointTwomvae()
else:
model_associator = Jointmvae()
model_associator1 = None
else:#mfcc
model_encoder_images = ResNet50Model(input_shape=[224, 298, 3], num_classes=None)
if FLAGS.num_skip_conn == 2:
model_encoder_acoustic = UNetAcResNet50_2skips(input_shape=[36, 48, 12], embedding=FLAGS.ae)
elif FLAGS.num_skip_conn == 1:
model_encoder_acoustic = UNetAcResNet50(input_shape=[36, 48, 12], embedding=FLAGS.ae)
elif FLAGS.num_skip_conn == 0:
model_encoder_acoustic = UNetAcResNet50_0skips(input_shape=[36, 48, 12], embedding=FLAGS.ae)
# Build trainer
print('{}: {} - Building trainer'.format(datetime.now(), FLAGS.exp_name))
if FLAGS.proxy == 0:
if FLAGS.project:
trainer = TrainerProject(model_encoder_acoustic, model_encoder_audio, model_encoder_images,
model_associator, model_associator1, display_freq=FLAGS.display_freq,
learning_rate=FLAGS.learning_rate, num_classes=num_classes,
num_epochs=FLAGS.num_epochs, temporal_pooling=FLAGS.temporal_pooling, nr_frames=nr_frames)
elif FLAGS.jointmvae:
trainer = TrainerMulti(model_encoder_acoustic, model_encoder_audio, model_encoder_images,
model_associator, model_associator1,
display_freq=FLAGS.display_freq, learning_rate=FLAGS.learning_rate,
num_classes=num_classes, num_epochs=FLAGS.num_epochs,
temporal_pooling=FLAGS.temporal_pooling, nr_frames=nr_frames)
elif FLAGS.mfcc:
trainer = TrainerMask(model_encoder_acoustic, model_encoder_images, display_freq=FLAGS.display_freq,
learning_rate=FLAGS.learning_rate, num_classes=num_classes,
num_epochs=FLAGS.num_epochs, temporal_pooling=FLAGS.temporal_pooling, nr_frames=nr_frames)
else:
trainer = TrainerLoss(model_encoder_acoustic, model_encoder_audio, model_encoder_images,
display_freq=FLAGS.display_freq, learning_rate=FLAGS.learning_rate,
num_classes=num_classes, num_epochs=FLAGS.num_epochs,
temporal_pooling=FLAGS.temporal_pooling, nr_frames=nr_frames)
else:
trainer = TrainerNCAproxyanchor(model_encoder_acoustic, model_encoder_audio, model_encoder_images,
display_freq=FLAGS.display_freq, learning_rate=FLAGS.learning_rate,
num_classes=num_classes, num_epochs=FLAGS.num_epochs,
temporal_pooling=FLAGS.temporal_pooling, nr_frames=nr_frames)
if FLAGS.mode == 'train':
checkpoint_dir = '{}/{}'.format(FLAGS.checkpoint_dir, FLAGS.exp_name)
if not tf.gfile.Exists(checkpoint_dir):
tf.gfile.MakeDirs(checkpoint_dir)
# Train model
with open('{}/{}'.format(FLAGS.checkpoint_dir, FLAGS.exp_name) + "/configuration.txt", "w") as outfile:
outfile.write(
'Experiment: {} \nModel: {} \nLearning_rate: {}\n'.format(FLAGS.exp_name, FLAGS.model,
FLAGS.learning_rate))
outfile.write(
'Num_epochs: {} \nTotal_length: {} \nSample_length: {}\n'.format(FLAGS.num_epochs,
FLAGS.total_length,
FLAGS.sample_length))
outfile.write(
'Number_of_crops: {} \nMargin: {}\nNumber of classes: {}\n'.format(FLAGS.number_of_crops,
FLAGS.margin, num_classes))
outfile.write(
'Block_size: {} \nEmbedding: {}\nLatent weight: {}\n'.format(FLAGS.block_size, FLAGS.embedding, FLAGS.latent_loss))
outfile.write(
'Train_file: {} \nValid_file: {} \nTest_file: {}\n'.format(FLAGS.train_file,
FLAGS.valid_file,
FLAGS.test_file))
outfile.write(
'Mode: {} \nVisual_init_checkpoint: {} \nAcoustic_init_checkpoint: {} \nRestore_checkpoint: {}\n'.format(
FLAGS.mode,
FLAGS.visual_init_checkpoint,
FLAGS.acoustic_init_checkpoint,
FLAGS.restore_checkpoint))
outfile.write('Checkpoint_dir: {} \nLog dir: {} \nBatch_size: {}\n'.format(FLAGS.checkpoint_dir,
FLAGS.tensorboard,
FLAGS.batch_size))
outfile.write('Number of skip connections: {} \nAuto encoder: {}\nHuber: {}\nMSE: {}\n'.format(
FLAGS.num_skip_conn,
FLAGS.ae, FLAGS.huber_loss, FLAGS.MSE))
print('{}: {} - Training started'.format(datetime.now(), FLAGS.exp_name))
trainer.train(train_data=train_data, valid_data=valid_data)
elif FLAGS.mode == 'test':
# Test model
print('{}: {} - Testing started'.format(datetime.now(), FLAGS.exp_name))
trainer.test(test_data=test_data)
else:
raise ValueError('Unknown execution mode')
else:
with tf.device('/gpu:0'):
if FLAGS.model == 'UNet':
if FLAGS.encoder_type == 'Video':
model_encoder = UNet(input_shape=[224, 298, 3])
elif FLAGS.encoder_type == 'Audio':
model_encoder = UNetSound(input_shape=[99, 257, 1])
elif FLAGS.encoder_type == 'Ac':
model_encoder = UNetAc(input_shape=[36, 48, 12])
else:
model_encoder = UNetE(input_shape=[36, 48, 1])
else:#DualCamNet
model_encoder = DualCamHybridModel(input_shape=[36, 48, 12], num_classes=num_classes, embedding=0)
model_encoder_images = ResNet50Model(input_shape=[224, 298, 3], num_classes=None)
if FLAGS.num_skip_conn == 2:
model_encoder_acoustic = UNetAcResNet50_2skips(input_shape=[36, 48, 12], embedding=FLAGS.ae)
elif FLAGS.num_skip_conn == 1:
model_encoder_acoustic = UNetAcResNet50(input_shape=[36, 48, 12], embedding=FLAGS.ae)
elif FLAGS.num_skip_conn == 0:
model_encoder_acoustic = UNetAcResNet50_0skips(input_shape=[36, 48, 12], embedding=FLAGS.ae)
# Build trainer
print('{}: {} - Building trainer'.format(datetime.now(), FLAGS.exp_name))
if FLAGS.model == 'UNet':
trainer = Trainer(model_encoder, display_freq=FLAGS.display_freq,
learning_rate=FLAGS.learning_rate, num_classes=num_classes,
num_epochs=FLAGS.num_epochs, temporal_pooling=FLAGS.temporal_pooling, nr_frames=nr_frames)
else:
if FLAGS.mfcc:
trainer = Trainer_classification(model_encoder, display_freq=FLAGS.display_freq,
learning_rate=FLAGS.learning_rate, num_classes=num_classes,
num_epochs=FLAGS.num_epochs, temporal_pooling=FLAGS.temporal_pooling, nr_frames=nr_frames)
else:
trainer = Trainer_rec_class(model_encoder, model_encoder_acoustic, model_encoder_images,
display_freq=FLAGS.display_freq,
learning_rate=FLAGS.learning_rate, num_classes=num_classes,
num_epochs=FLAGS.num_epochs, temporal_pooling=FLAGS.temporal_pooling,
nr_frames=nr_frames)
if FLAGS.mode == 'train':
checkpoint_dir = '{}/{}'.format(FLAGS.checkpoint_dir, FLAGS.exp_name)
if not tf.gfile.Exists(checkpoint_dir):
tf.gfile.MakeDirs(checkpoint_dir)
# Train model
with open('{}/{}'.format(FLAGS.checkpoint_dir, FLAGS.exp_name) + "/configuration.txt", "w") as outfile:
outfile.write('Experiment: {} \nBatch_size: {}\n Latent weight: {}\n'.format(FLAGS.exp_name,
FLAGS.batch_size, FLAGS.latent_loss))
outfile.write(
'Model: {} \nLearning_rate: {}\nNumber of classes: {}\n'.format(FLAGS.model, FLAGS.learning_rate,
num_classes))
outfile.write(
'Num_epochs: {} \nTotal_length: {} \nSample_length: {}\n'.format(FLAGS.num_epochs,
FLAGS.total_length,
FLAGS.sample_length))
outfile.write(
'Number_of_crops: {} \nCheckpoint_dir: {} \nLog dir: {}\n'.format(FLAGS.number_of_crops,
FLAGS.checkpoint_dir,
FLAGS.tensorboard))
outfile.write(
'Train_file: {} \nValid_file: {} \nTest_file: {}\n'.format(FLAGS.train_file,
FLAGS.valid_file,
FLAGS.test_file))
outfile.write('Number of skip connections: {} \nAuto encoder: {}\nHuber: {}\nMSE: {}\n'.format(
FLAGS.num_skip_conn,
FLAGS.ae, FLAGS.huber_loss, FLAGS.MSE))
outfile.write(
'Mode: {} \nInit_checkpoint: {} \nRestore_checkpoint: {}\n'.format(FLAGS.mode,
FLAGS.init_checkpoint,
FLAGS.restore_checkpoint))
# Train model
print('{}: {} - Training started'.format(datetime.now(), FLAGS.exp_name))
trainer.train(train_data=train_data, valid_data=valid_data)
elif FLAGS.mode == 'test':
# Test model
print('{}: {} - Testing started'.format(datetime.now(), FLAGS.exp_name))
trainer.test(test_data=test_data)
else:
raise ValueError('Unknown execution mode')
if __name__ == '__main__':
flags.mark_flags_as_required(['mode', 'exp_name'])
tf.app.run()
|
from copy import copy
from doculabs.samon import registry
from doculabs.samon.loaders import BaseLoader
from doculabs.samon.parser import DefaultParser
from doculabs.samon.template import Template
class Environment:
DEFAULT_TEMPLATE_CLASS = Template
def __init__(self, loader: BaseLoader):
self.loader = loader
self.registry = copy(registry)
self.template_class = self.DEFAULT_TEMPLATE_CLASS
self.parser = DefaultParser(environment=self)
def get_template(self, template_name):
src, source_path = self.loader.get_source(template_name)
template = self.parser.parse(src, template_name=template_name)
template.source_path = source_path
return template
|
import sys
sys.path.append('../')
import matplotlib.pyplot as plt
import json
from pprint import pprint
import numpy as np
Settings = json.load(open('settings.txt'))
from cabbage.features.ReId import StoredReId
pprint(Settings)
import numpy as np
import sys
sys.path.append('../')
from cabbage.regression.Regression import ReadOnlyRegression
from cabbage.MultiplePeopleTracking import GraphGenerator
from experiments import MOT16_Experiments
root = Settings['data_root']
mot16 = MOT16_Experiments(root)
video_name = 'MOT16-11'
video = mot16.mot16_11_X
dmax = 100
#Dt = mot16.mot16_02_detections
Dt = mot16.mot16_11_true_detections_no_pid
#Dt = mot16.mot16_02_true_detections_no_pid
reid = StoredReId(root, dmax)
reid.memorize(Dt, video, video_name + '_dmax100')
|
from django.apps import AppConfig
class SliderConfig(AppConfig):
name = 'Slider'
verbose_name = 'ماژول اسلایدر'
|
"Define stdlibs"
load("@io_bazel_rules_dotnet//dotnet/private:rules/stdlib.bzl", "core_stdlib_internal")
load("@io_bazel_rules_dotnet//dotnet/private:rules/libraryset.bzl", "core_libraryset")
def define_stdlib():
"Declares stdlibs"
core_libraryset(
name = "NETStandard.Library",
deps = [
],
)
core_libraryset(
name = "Microsoft.AspNetCore.App",
deps = [
],
)
core_libraryset(
name = "Microsoft.NETCore.App",
deps = [
],
)
core_libraryset(
name = "Microsoft.WindowsDesktop.App",
deps = [
],
)
core_libraryset(
name = "libraryset",
deps = [
":microsoft.csharp.dll",
":microsoft.visualbasic.dll",
":microsoft.win32.primitives.dll",
":mscorlib.dll",
":netstandard.dll",
":system.appcontext.dll",
":system.buffers.dll",
":system.collections.concurrent.dll",
":system.collections.dll",
":system.collections.immutable.dll",
":system.collections.nongeneric.dll",
":system.collections.specialized.dll",
":system.componentmodel.annotations.dll",
":system.componentmodel.dataannotations.dll",
":system.componentmodel.dll",
":system.componentmodel.eventbasedasync.dll",
":system.componentmodel.primitives.dll",
":system.componentmodel.typeconverter.dll",
":system.configuration.dll",
":system.console.dll",
":system.core.dll",
":system.data.common.dll",
":system.data.dll",
":system.diagnostics.contracts.dll",
":system.diagnostics.debug.dll",
":system.diagnostics.diagnosticsource.dll",
":system.diagnostics.fileversioninfo.dll",
":system.diagnostics.process.dll",
":system.diagnostics.stacktrace.dll",
":system.diagnostics.textwritertracelistener.dll",
":system.diagnostics.tools.dll",
":system.diagnostics.tracesource.dll",
":system.diagnostics.tracing.dll",
":system.dll",
":system.drawing.dll",
":system.drawing.primitives.dll",
":system.dynamic.runtime.dll",
":system.globalization.calendars.dll",
":system.globalization.dll",
":system.globalization.extensions.dll",
":system.io.compression.brotli.dll",
":system.io.compression.dll",
":system.io.compression.filesystem.dll",
":system.io.compression.zipfile.dll",
":system.io.dll",
":system.io.filesystem.dll",
":system.io.filesystem.driveinfo.dll",
":system.io.filesystem.primitives.dll",
":system.io.filesystem.watcher.dll",
":system.io.isolatedstorage.dll",
":system.io.memorymappedfiles.dll",
":system.io.pipes.dll",
":system.io.unmanagedmemorystream.dll",
":system.linq.dll",
":system.linq.expressions.dll",
":system.linq.parallel.dll",
":system.linq.queryable.dll",
":system.memory.dll",
":system.net.dll",
":system.net.http.dll",
":system.net.httplistener.dll",
":system.net.mail.dll",
":system.net.nameresolution.dll",
":system.net.networkinformation.dll",
":system.net.ping.dll",
":system.net.primitives.dll",
":system.net.requests.dll",
":system.net.security.dll",
":system.net.servicepoint.dll",
":system.net.sockets.dll",
":system.net.webclient.dll",
":system.net.webheadercollection.dll",
":system.net.webproxy.dll",
":system.net.websockets.client.dll",
":system.net.websockets.dll",
":system.numerics.dll",
":system.numerics.vectors.dll",
":system.objectmodel.dll",
":system.reflection.dispatchproxy.dll",
":system.reflection.dll",
":system.reflection.emit.dll",
":system.reflection.emit.ilgeneration.dll",
":system.reflection.emit.lightweight.dll",
":system.reflection.extensions.dll",
":system.reflection.metadata.dll",
":system.reflection.primitives.dll",
":system.reflection.typeextensions.dll",
":system.resources.reader.dll",
":system.resources.resourcemanager.dll",
":system.resources.writer.dll",
":system.runtime.compilerservices.visualc.dll",
":system.runtime.dll",
":system.runtime.extensions.dll",
":system.runtime.handles.dll",
":system.runtime.interopservices.dll",
":system.runtime.interopservices.runtimeinformation.dll",
":system.runtime.interopservices.windowsruntime.dll",
":system.runtime.loader.dll",
":system.runtime.numerics.dll",
":system.runtime.serialization.dll",
":system.runtime.serialization.formatters.dll",
":system.runtime.serialization.json.dll",
":system.runtime.serialization.primitives.dll",
":system.runtime.serialization.xml.dll",
":system.security.claims.dll",
":system.security.cryptography.algorithms.dll",
":system.security.cryptography.csp.dll",
":system.security.cryptography.encoding.dll",
":system.security.cryptography.primitives.dll",
":system.security.cryptography.x509certificates.dll",
":system.security.dll",
":system.security.principal.dll",
":system.security.securestring.dll",
":system.servicemodel.web.dll",
":system.serviceprocess.dll",
":system.text.encoding.dll",
":system.text.encoding.extensions.dll",
":system.text.regularexpressions.dll",
":system.threading.dll",
":system.threading.overlapped.dll",
":system.threading.tasks.dataflow.dll",
":system.threading.tasks.dll",
":system.threading.tasks.extensions.dll",
":system.threading.tasks.parallel.dll",
":system.threading.thread.dll",
":system.threading.threadpool.dll",
":system.threading.timer.dll",
":system.transactions.dll",
":system.transactions.local.dll",
":system.valuetuple.dll",
":system.web.dll",
":system.web.httputility.dll",
":system.windows.dll",
":system.xml.dll",
":system.xml.linq.dll",
":system.xml.readerwriter.dll",
":system.xml.serialization.dll",
":system.xml.xdocument.dll",
":system.xml.xmldocument.dll",
":system.xml.xmlserializer.dll",
":system.xml.xpath.dll",
":system.xml.xpath.xdocument.dll",
":windowsbase.dll",
],
)
core_stdlib_internal(
name = "microsoft.csharp.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/Microsoft.CSharp.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/Microsoft.CSharp.dll",
deps = [
],
)
core_stdlib_internal(
name = "microsoft.visualbasic.dll",
version = "10.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/Microsoft.VisualBasic.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/Microsoft.VisualBasic.dll",
deps = [
],
)
core_stdlib_internal(
name = "microsoft.win32.primitives.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/Microsoft.Win32.Primitives.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/Microsoft.Win32.Primitives.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "mscorlib.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/mscorlib.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/mscorlib.dll",
deps = [
":system.runtime.dll",
":system.runtime.extensions.dll",
":system.collections.dll",
":system.collections.nongeneric.dll",
":system.collections.concurrent.dll",
":system.objectmodel.dll",
":system.console.dll",
":system.runtime.interopservices.dll",
":system.diagnostics.tools.dll",
":system.diagnostics.contracts.dll",
":system.diagnostics.debug.dll",
":system.diagnostics.stacktrace.dll",
":system.diagnostics.tracing.dll",
":system.io.filesystem.dll",
":system.io.filesystem.driveinfo.dll",
":system.io.isolatedstorage.dll",
":system.componentmodel.dll",
":system.threading.thread.dll",
":system.threading.tasks.dll",
":system.reflection.emit.dll",
":system.reflection.emit.ilgeneration.dll",
":system.reflection.emit.lightweight.dll",
":system.reflection.primitives.dll",
":system.resources.resourcemanager.dll",
":system.resources.writer.dll",
":system.runtime.compilerservices.visualc.dll",
":system.runtime.interopservices.windowsruntime.dll",
":system.runtime.serialization.formatters.dll",
":system.security.claims.dll",
":system.security.cryptography.algorithms.dll",
":system.security.cryptography.primitives.dll",
":system.security.cryptography.csp.dll",
":system.security.cryptography.encoding.dll",
":system.security.cryptography.x509certificates.dll",
":system.security.principal.dll",
":system.text.encoding.extensions.dll",
":system.threading.dll",
":system.threading.overlapped.dll",
":system.threading.threadpool.dll",
":system.threading.tasks.parallel.dll",
":system.threading.timer.dll",
],
)
core_stdlib_internal(
name = "netstandard.dll",
version = "2.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/netstandard.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/netstandard.dll",
deps = [
":system.runtime.dll",
":system.io.memorymappedfiles.dll",
":system.io.pipes.dll",
":system.diagnostics.process.dll",
":system.security.cryptography.x509certificates.dll",
":system.runtime.extensions.dll",
":system.diagnostics.tools.dll",
":system.collections.dll",
":system.collections.nongeneric.dll",
":system.collections.concurrent.dll",
":system.objectmodel.dll",
":system.collections.specialized.dll",
":system.componentmodel.typeconverter.dll",
":system.componentmodel.eventbasedasync.dll",
":system.componentmodel.primitives.dll",
":system.componentmodel.dll",
":microsoft.win32.primitives.dll",
":system.console.dll",
":system.data.common.dll",
":system.runtime.interopservices.dll",
":system.diagnostics.tracesource.dll",
":system.diagnostics.contracts.dll",
":system.diagnostics.debug.dll",
":system.diagnostics.textwritertracelistener.dll",
":system.diagnostics.fileversioninfo.dll",
":system.diagnostics.stacktrace.dll",
":system.diagnostics.tracing.dll",
":system.drawing.primitives.dll",
":system.linq.expressions.dll",
":system.io.compression.dll",
":system.io.compression.zipfile.dll",
":system.io.filesystem.dll",
":system.io.filesystem.driveinfo.dll",
":system.io.filesystem.watcher.dll",
":system.io.isolatedstorage.dll",
":system.linq.dll",
":system.linq.queryable.dll",
":system.linq.parallel.dll",
":system.threading.thread.dll",
":system.net.requests.dll",
":system.net.primitives.dll",
":system.net.httplistener.dll",
":system.net.servicepoint.dll",
":system.net.nameresolution.dll",
":system.net.webclient.dll",
":system.net.http.dll",
":system.net.webheadercollection.dll",
":system.net.webproxy.dll",
":system.net.mail.dll",
":system.net.networkinformation.dll",
":system.net.ping.dll",
":system.net.security.dll",
":system.net.sockets.dll",
":system.net.websockets.client.dll",
":system.net.websockets.dll",
":system.runtime.numerics.dll",
":system.threading.tasks.dll",
":system.reflection.primitives.dll",
":system.resources.resourcemanager.dll",
":system.resources.writer.dll",
":system.runtime.compilerservices.visualc.dll",
":system.runtime.interopservices.runtimeinformation.dll",
":system.runtime.serialization.primitives.dll",
":system.runtime.serialization.xml.dll",
":system.runtime.serialization.json.dll",
":system.runtime.serialization.formatters.dll",
":system.security.claims.dll",
":system.security.cryptography.algorithms.dll",
":system.security.cryptography.csp.dll",
":system.security.cryptography.encoding.dll",
":system.security.cryptography.primitives.dll",
":system.security.principal.dll",
":system.text.encoding.extensions.dll",
":system.text.regularexpressions.dll",
":system.threading.dll",
":system.threading.overlapped.dll",
":system.threading.threadpool.dll",
":system.threading.tasks.parallel.dll",
":system.threading.timer.dll",
":system.transactions.local.dll",
":system.web.httputility.dll",
":system.xml.readerwriter.dll",
":system.xml.xdocument.dll",
":system.xml.xmlserializer.dll",
":system.xml.xpath.xdocument.dll",
":system.xml.xpath.dll",
],
)
core_stdlib_internal(
name = "system.appcontext.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.AppContext.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.AppContext.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.buffers.dll",
version = "4.0.2.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Buffers.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Buffers.dll",
deps = [
],
)
core_stdlib_internal(
name = "system.collections.concurrent.dll",
version = "4.0.14.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Collections.Concurrent.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Collections.Concurrent.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.collections.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Collections.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Collections.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.collections.immutable.dll",
version = "1.2.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Collections.Immutable.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Collections.Immutable.dll",
deps = [
":system.runtime.dll",
":system.runtime.interopservices.dll",
":system.collections.dll",
],
)
core_stdlib_internal(
name = "system.collections.nongeneric.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Collections.NonGeneric.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Collections.NonGeneric.dll",
deps = [
":system.runtime.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.collections.specialized.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Collections.Specialized.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Collections.Specialized.dll",
deps = [
":system.runtime.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.componentmodel.annotations.dll",
version = "4.2.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.ComponentModel.Annotations.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.ComponentModel.Annotations.dll",
deps = [
],
)
core_stdlib_internal(
name = "system.componentmodel.dataannotations.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.ComponentModel.DataAnnotations.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.ComponentModel.DataAnnotations.dll",
deps = [
":system.runtime.dll",
":system.componentmodel.annotations.dll",
],
)
core_stdlib_internal(
name = "system.componentmodel.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.ComponentModel.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.ComponentModel.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.componentmodel.eventbasedasync.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.ComponentModel.EventBasedAsync.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.ComponentModel.EventBasedAsync.dll",
deps = [
":system.runtime.dll",
":system.threading.dll",
":system.componentmodel.primitives.dll",
":system.componentmodel.dll",
],
)
core_stdlib_internal(
name = "system.componentmodel.primitives.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.ComponentModel.Primitives.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.ComponentModel.Primitives.dll",
deps = [
":system.runtime.dll",
":system.collections.nongeneric.dll",
":system.componentmodel.dll",
],
)
core_stdlib_internal(
name = "system.componentmodel.typeconverter.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.ComponentModel.TypeConverter.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.ComponentModel.TypeConverter.dll",
deps = [
":system.runtime.dll",
":system.componentmodel.primitives.dll",
":system.componentmodel.dll",
":system.resources.resourcemanager.dll",
":system.runtime.extensions.dll",
":system.collections.nongeneric.dll",
":system.resources.writer.dll",
],
)
core_stdlib_internal(
name = "system.configuration.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Configuration.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Configuration.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.console.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Console.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Console.dll",
deps = [
":system.runtime.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.core.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Core.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Core.dll",
deps = [
":system.runtime.dll",
":system.io.memorymappedfiles.dll",
":system.io.pipes.dll",
":system.collections.dll",
":system.linq.expressions.dll",
":system.linq.dll",
":system.linq.queryable.dll",
":system.linq.parallel.dll",
":system.runtime.interopservices.dll",
":system.security.cryptography.algorithms.dll",
":system.security.cryptography.csp.dll",
":system.security.cryptography.x509certificates.dll",
":system.threading.dll",
":system.threading.tasks.dll",
],
)
core_stdlib_internal(
name = "system.data.common.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Data.Common.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Data.Common.dll",
deps = [
":system.runtime.dll",
":system.componentmodel.typeconverter.dll",
":system.componentmodel.primitives.dll",
":system.runtime.extensions.dll",
":system.objectmodel.dll",
":system.componentmodel.dll",
":system.xml.readerwriter.dll",
":system.transactions.local.dll",
],
)
core_stdlib_internal(
name = "system.data.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Data.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Data.dll",
deps = [
":system.runtime.dll",
":system.data.common.dll",
],
)
core_stdlib_internal(
name = "system.diagnostics.contracts.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Diagnostics.Contracts.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Diagnostics.Contracts.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.diagnostics.debug.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Diagnostics.Debug.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Diagnostics.Debug.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.diagnostics.diagnosticsource.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Diagnostics.DiagnosticSource.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Diagnostics.DiagnosticSource.dll",
deps = [
],
)
core_stdlib_internal(
name = "system.diagnostics.fileversioninfo.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Diagnostics.FileVersionInfo.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Diagnostics.FileVersionInfo.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.diagnostics.process.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Diagnostics.Process.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Diagnostics.Process.dll",
deps = [
":system.runtime.dll",
":system.runtime.interopservices.dll",
":system.componentmodel.primitives.dll",
":system.runtime.extensions.dll",
":system.diagnostics.fileversioninfo.dll",
":system.collections.nongeneric.dll",
":system.collections.specialized.dll",
],
)
core_stdlib_internal(
name = "system.diagnostics.stacktrace.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Diagnostics.StackTrace.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Diagnostics.StackTrace.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.diagnostics.textwritertracelistener.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Diagnostics.TextWriterTraceListener.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Diagnostics.TextWriterTraceListener.dll",
deps = [
":system.runtime.dll",
":system.runtime.extensions.dll",
":system.diagnostics.tracesource.dll",
],
)
core_stdlib_internal(
name = "system.diagnostics.tools.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Diagnostics.Tools.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Diagnostics.Tools.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.diagnostics.tracesource.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Diagnostics.TraceSource.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Diagnostics.TraceSource.dll",
deps = [
":system.runtime.dll",
":system.collections.nongeneric.dll",
":system.collections.specialized.dll",
],
)
core_stdlib_internal(
name = "system.diagnostics.tracing.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Diagnostics.Tracing.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Diagnostics.Tracing.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.dll",
deps = [
":system.runtime.dll",
":system.diagnostics.process.dll",
":system.security.cryptography.x509certificates.dll",
":system.diagnostics.tools.dll",
":system.runtime.extensions.dll",
":system.collections.concurrent.dll",
":system.collections.dll",
":system.objectmodel.dll",
":system.collections.specialized.dll",
":system.collections.nongeneric.dll",
":system.componentmodel.typeconverter.dll",
":system.componentmodel.eventbasedasync.dll",
":system.componentmodel.primitives.dll",
":system.componentmodel.dll",
":microsoft.win32.primitives.dll",
":system.diagnostics.tracesource.dll",
":system.diagnostics.debug.dll",
":system.diagnostics.textwritertracelistener.dll",
":system.diagnostics.fileversioninfo.dll",
":system.io.compression.dll",
":system.io.filesystem.watcher.dll",
":system.net.requests.dll",
":system.net.primitives.dll",
":system.net.httplistener.dll",
":system.net.servicepoint.dll",
":system.net.nameresolution.dll",
":system.net.webclient.dll",
":system.net.webheadercollection.dll",
":system.net.webproxy.dll",
":system.net.mail.dll",
":system.net.networkinformation.dll",
":system.net.ping.dll",
":system.net.security.dll",
":system.net.sockets.dll",
":system.net.websockets.client.dll",
":system.net.websockets.dll",
":system.runtime.interopservices.dll",
":system.security.cryptography.encoding.dll",
":system.text.regularexpressions.dll",
":system.threading.dll",
":system.threading.thread.dll",
],
)
core_stdlib_internal(
name = "system.drawing.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Drawing.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Drawing.dll",
deps = [
":system.runtime.dll",
":system.drawing.primitives.dll",
":system.componentmodel.typeconverter.dll",
],
)
core_stdlib_internal(
name = "system.drawing.primitives.dll",
version = "4.2.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Drawing.Primitives.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Drawing.Primitives.dll",
deps = [
":system.runtime.dll",
":system.componentmodel.primitives.dll",
],
)
core_stdlib_internal(
name = "system.dynamic.runtime.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Dynamic.Runtime.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Dynamic.Runtime.dll",
deps = [
":system.runtime.dll",
":system.linq.expressions.dll",
],
)
core_stdlib_internal(
name = "system.globalization.calendars.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Globalization.Calendars.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Globalization.Calendars.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.globalization.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Globalization.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Globalization.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.globalization.extensions.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Globalization.Extensions.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Globalization.Extensions.dll",
deps = [
":system.runtime.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.io.compression.brotli.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.Compression.Brotli.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.Compression.Brotli.dll",
deps = [
":system.runtime.dll",
":system.memory.dll",
":system.io.compression.dll",
],
)
core_stdlib_internal(
name = "system.io.compression.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.Compression.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.Compression.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.io.compression.filesystem.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.Compression.FileSystem.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.Compression.FileSystem.dll",
deps = [
":system.runtime.dll",
":system.io.compression.zipfile.dll",
],
)
core_stdlib_internal(
name = "system.io.compression.zipfile.dll",
version = "4.0.4.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.Compression.ZipFile.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.Compression.ZipFile.dll",
deps = [
":system.runtime.dll",
":system.io.compression.dll",
],
)
core_stdlib_internal(
name = "system.io.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.dll",
deps = [
":system.runtime.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.io.filesystem.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.FileSystem.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.FileSystem.dll",
deps = [
":system.runtime.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.io.filesystem.driveinfo.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.FileSystem.DriveInfo.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.FileSystem.DriveInfo.dll",
deps = [
":system.runtime.dll",
":system.io.filesystem.dll",
],
)
core_stdlib_internal(
name = "system.io.filesystem.primitives.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.FileSystem.Primitives.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.FileSystem.Primitives.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.io.filesystem.watcher.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.FileSystem.Watcher.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.FileSystem.Watcher.dll",
deps = [
":system.runtime.dll",
":system.componentmodel.primitives.dll",
],
)
core_stdlib_internal(
name = "system.io.isolatedstorage.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.IsolatedStorage.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.IsolatedStorage.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.io.memorymappedfiles.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.MemoryMappedFiles.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.MemoryMappedFiles.dll",
deps = [
":system.runtime.dll",
":system.runtime.interopservices.dll",
],
)
core_stdlib_internal(
name = "system.io.pipes.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.Pipes.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.Pipes.dll",
deps = [
":system.runtime.dll",
":system.security.principal.dll",
],
)
core_stdlib_internal(
name = "system.io.unmanagedmemorystream.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.IO.UnmanagedMemoryStream.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.IO.UnmanagedMemoryStream.dll",
deps = [
":system.runtime.dll",
":system.runtime.interopservices.dll",
],
)
core_stdlib_internal(
name = "system.linq.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Linq.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Linq.dll",
deps = [
":system.runtime.dll",
":system.collections.dll",
],
)
core_stdlib_internal(
name = "system.linq.expressions.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Linq.Expressions.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Linq.Expressions.dll",
deps = [
":system.runtime.dll",
":system.objectmodel.dll",
],
)
core_stdlib_internal(
name = "system.linq.parallel.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Linq.Parallel.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Linq.Parallel.dll",
deps = [
":system.runtime.dll",
":system.collections.concurrent.dll",
":system.linq.dll",
":system.collections.dll",
],
)
core_stdlib_internal(
name = "system.linq.queryable.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Linq.Queryable.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Linq.Queryable.dll",
deps = [
":system.runtime.dll",
":system.linq.expressions.dll",
":system.linq.dll",
],
)
core_stdlib_internal(
name = "system.memory.dll",
version = "4.1.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Memory.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Memory.dll",
deps = [
":system.runtime.dll",
":system.runtime.interopservices.dll",
],
)
core_stdlib_internal(
name = "system.net.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.dll",
deps = [
":system.runtime.dll",
":system.net.primitives.dll",
":system.net.webclient.dll",
":system.net.webheadercollection.dll",
":system.net.requests.dll",
":system.net.networkinformation.dll",
":system.net.sockets.dll",
],
)
core_stdlib_internal(
name = "system.net.http.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.Http.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.Http.dll",
deps = [
":system.runtime.dll",
":system.net.primitives.dll",
":system.security.cryptography.x509certificates.dll",
":system.net.security.dll",
],
)
core_stdlib_internal(
name = "system.net.httplistener.dll",
version = "4.0.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.HttpListener.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.HttpListener.dll",
deps = [
":system.runtime.dll",
":system.net.primitives.dll",
":system.net.security.dll",
":system.security.claims.dll",
":system.security.principal.dll",
":microsoft.win32.primitives.dll",
":system.collections.specialized.dll",
":system.security.cryptography.x509certificates.dll",
":system.net.webheadercollection.dll",
":system.net.websockets.dll",
],
)
core_stdlib_internal(
name = "system.net.mail.dll",
version = "4.0.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.Mail.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.Mail.dll",
deps = [
":system.runtime.dll",
":system.collections.specialized.dll",
":system.componentmodel.eventbasedasync.dll",
":system.security.cryptography.x509certificates.dll",
":system.net.primitives.dll",
":system.net.servicepoint.dll",
],
)
core_stdlib_internal(
name = "system.net.nameresolution.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.NameResolution.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.NameResolution.dll",
deps = [
":system.runtime.dll",
":system.net.primitives.dll",
],
)
core_stdlib_internal(
name = "system.net.networkinformation.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.NetworkInformation.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.NetworkInformation.dll",
deps = [
":system.runtime.dll",
":system.net.primitives.dll",
":microsoft.win32.primitives.dll",
],
)
core_stdlib_internal(
name = "system.net.ping.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.Ping.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.Ping.dll",
deps = [
":system.runtime.dll",
":system.componentmodel.primitives.dll",
":system.net.primitives.dll",
":system.componentmodel.eventbasedasync.dll",
],
)
core_stdlib_internal(
name = "system.net.primitives.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.Primitives.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.Primitives.dll",
deps = [
":system.runtime.dll",
":system.runtime.interopservices.dll",
":microsoft.win32.primitives.dll",
],
)
core_stdlib_internal(
name = "system.net.requests.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.Requests.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.Requests.dll",
deps = [
":system.runtime.dll",
":system.net.webheadercollection.dll",
":system.collections.specialized.dll",
":system.net.primitives.dll",
":system.security.cryptography.x509certificates.dll",
":system.net.servicepoint.dll",
":system.net.security.dll",
":system.security.principal.dll",
],
)
core_stdlib_internal(
name = "system.net.security.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.Security.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.Security.dll",
deps = [
":system.runtime.dll",
":system.net.primitives.dll",
":system.collections.nongeneric.dll",
":system.security.cryptography.x509certificates.dll",
":system.security.principal.dll",
":system.collections.dll",
],
)
core_stdlib_internal(
name = "system.net.servicepoint.dll",
version = "4.0.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.ServicePoint.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.ServicePoint.dll",
deps = [
":system.runtime.dll",
":system.net.primitives.dll",
":system.security.cryptography.x509certificates.dll",
":system.net.security.dll",
],
)
core_stdlib_internal(
name = "system.net.sockets.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.Sockets.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.Sockets.dll",
deps = [
":system.runtime.dll",
":system.net.primitives.dll",
],
)
core_stdlib_internal(
name = "system.net.webclient.dll",
version = "4.0.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.WebClient.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.WebClient.dll",
deps = [
":system.runtime.dll",
":system.componentmodel.eventbasedasync.dll",
":system.componentmodel.primitives.dll",
":system.net.primitives.dll",
":system.net.webheadercollection.dll",
":system.collections.specialized.dll",
":system.net.requests.dll",
],
)
core_stdlib_internal(
name = "system.net.webheadercollection.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.WebHeaderCollection.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.WebHeaderCollection.dll",
deps = [
":system.runtime.dll",
":system.collections.specialized.dll",
],
)
core_stdlib_internal(
name = "system.net.webproxy.dll",
version = "4.0.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.WebProxy.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.WebProxy.dll",
deps = [
":system.runtime.dll",
":system.net.primitives.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.net.websockets.client.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.WebSockets.Client.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.WebSockets.Client.dll",
deps = [
":system.runtime.dll",
":system.net.websockets.dll",
":system.security.cryptography.x509certificates.dll",
":system.net.primitives.dll",
":system.net.security.dll",
],
)
core_stdlib_internal(
name = "system.net.websockets.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Net.WebSockets.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Net.WebSockets.dll",
deps = [
":system.runtime.dll",
":system.net.primitives.dll",
":system.collections.specialized.dll",
":system.security.principal.dll",
":microsoft.win32.primitives.dll",
],
)
core_stdlib_internal(
name = "system.numerics.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Numerics.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Numerics.dll",
deps = [
":system.runtime.dll",
":system.runtime.numerics.dll",
":system.numerics.vectors.dll",
],
)
core_stdlib_internal(
name = "system.numerics.vectors.dll",
version = "4.1.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Numerics.Vectors.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Numerics.Vectors.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.objectmodel.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.ObjectModel.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.ObjectModel.dll",
deps = [
":system.runtime.dll",
":system.collections.dll",
],
)
core_stdlib_internal(
name = "system.reflection.dispatchproxy.dll",
version = "4.0.4.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Reflection.DispatchProxy.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Reflection.DispatchProxy.dll",
deps = [
],
)
core_stdlib_internal(
name = "system.reflection.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Reflection.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Reflection.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.reflection.emit.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Reflection.Emit.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Reflection.Emit.dll",
deps = [
":system.runtime.dll",
":system.reflection.emit.ilgeneration.dll",
":system.reflection.primitives.dll",
],
)
core_stdlib_internal(
name = "system.reflection.emit.ilgeneration.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Reflection.Emit.ILGeneration.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Reflection.Emit.ILGeneration.dll",
deps = [
":system.runtime.dll",
":system.runtime.interopservices.dll",
":system.reflection.primitives.dll",
],
)
core_stdlib_internal(
name = "system.reflection.emit.lightweight.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Reflection.Emit.Lightweight.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Reflection.Emit.Lightweight.dll",
deps = [
":system.runtime.dll",
":system.reflection.emit.ilgeneration.dll",
],
)
core_stdlib_internal(
name = "system.reflection.extensions.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Reflection.Extensions.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Reflection.Extensions.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.reflection.metadata.dll",
version = "1.4.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Reflection.Metadata.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Reflection.Metadata.dll",
deps = [
":system.runtime.dll",
":system.collections.immutable.dll",
],
)
core_stdlib_internal(
name = "system.reflection.primitives.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Reflection.Primitives.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Reflection.Primitives.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.reflection.typeextensions.dll",
version = "4.1.2.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Reflection.TypeExtensions.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Reflection.TypeExtensions.dll",
deps = [
],
)
core_stdlib_internal(
name = "system.resources.reader.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Resources.Reader.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Resources.Reader.dll",
deps = [
":system.runtime.dll",
":system.resources.resourcemanager.dll",
],
)
core_stdlib_internal(
name = "system.resources.resourcemanager.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Resources.ResourceManager.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Resources.ResourceManager.dll",
deps = [
":system.runtime.dll",
":system.runtime.interopservices.dll",
],
)
core_stdlib_internal(
name = "system.resources.writer.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Resources.Writer.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Resources.Writer.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.runtime.compilerservices.visualc.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.CompilerServices.VisualC.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.CompilerServices.VisualC.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.runtime.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.dll",
deps = [
],
)
core_stdlib_internal(
name = "system.runtime.extensions.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.Extensions.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.Extensions.dll",
deps = [
":system.runtime.dll",
":system.security.principal.dll",
],
)
core_stdlib_internal(
name = "system.runtime.handles.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.Handles.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.Handles.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.runtime.interopservices.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.InteropServices.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.InteropServices.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.runtime.interopservices.runtimeinformation.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.InteropServices.RuntimeInformation.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.InteropServices.RuntimeInformation.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.runtime.interopservices.windowsruntime.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.InteropServices.WindowsRuntime.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.InteropServices.WindowsRuntime.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.runtime.loader.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.Loader.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.Loader.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.runtime.numerics.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.Numerics.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.Numerics.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.runtime.serialization.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.Serialization.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.Serialization.dll",
deps = [
":system.runtime.dll",
":system.runtime.serialization.primitives.dll",
":system.runtime.serialization.xml.dll",
":system.runtime.serialization.json.dll",
],
)
core_stdlib_internal(
name = "system.runtime.serialization.formatters.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.Serialization.Formatters.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.Serialization.Formatters.dll",
deps = [
":system.runtime.dll",
":system.collections.nongeneric.dll",
],
)
core_stdlib_internal(
name = "system.runtime.serialization.json.dll",
version = "4.0.4.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.Serialization.Json.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.Serialization.Json.dll",
deps = [
":system.runtime.dll",
":system.runtime.serialization.xml.dll",
":system.xml.readerwriter.dll",
],
)
core_stdlib_internal(
name = "system.runtime.serialization.primitives.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.Serialization.Primitives.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.Serialization.Primitives.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.runtime.serialization.xml.dll",
version = "4.1.4.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Runtime.Serialization.Xml.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Runtime.Serialization.Xml.dll",
deps = [
":system.runtime.dll",
":system.xml.readerwriter.dll",
":system.runtime.serialization.primitives.dll",
],
)
core_stdlib_internal(
name = "system.security.claims.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Security.Claims.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Security.Claims.dll",
deps = [
":system.runtime.dll",
":system.security.principal.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.security.cryptography.algorithms.dll",
version = "4.3.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Security.Cryptography.Algorithms.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Security.Cryptography.Algorithms.dll",
deps = [
":system.runtime.dll",
":system.security.cryptography.primitives.dll",
":system.security.cryptography.encoding.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.security.cryptography.csp.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Security.Cryptography.Csp.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Security.Cryptography.Csp.dll",
deps = [
":system.runtime.dll",
":system.runtime.interopservices.dll",
":system.security.cryptography.algorithms.dll",
":system.security.cryptography.primitives.dll",
],
)
core_stdlib_internal(
name = "system.security.cryptography.encoding.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Security.Cryptography.Encoding.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Security.Cryptography.Encoding.dll",
deps = [
":system.runtime.dll",
":system.security.cryptography.primitives.dll",
],
)
core_stdlib_internal(
name = "system.security.cryptography.primitives.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Security.Cryptography.Primitives.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Security.Cryptography.Primitives.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.security.cryptography.x509certificates.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Security.Cryptography.X509Certificates.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Security.Cryptography.X509Certificates.dll",
deps = [
":system.runtime.dll",
":system.runtime.interopservices.dll",
":system.security.cryptography.algorithms.dll",
":system.security.cryptography.primitives.dll",
":system.security.cryptography.encoding.dll",
":system.net.primitives.dll",
":system.collections.nongeneric.dll",
],
)
core_stdlib_internal(
name = "system.security.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Security.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Security.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.security.principal.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Security.Principal.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Security.Principal.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.security.securestring.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Security.SecureString.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Security.SecureString.dll",
deps = [
":system.runtime.dll",
":system.runtime.interopservices.dll",
],
)
core_stdlib_internal(
name = "system.servicemodel.web.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.ServiceModel.Web.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.ServiceModel.Web.dll",
deps = [
":system.runtime.dll",
":system.runtime.serialization.json.dll",
],
)
core_stdlib_internal(
name = "system.serviceprocess.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.ServiceProcess.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.ServiceProcess.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.text.encoding.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Text.Encoding.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Text.Encoding.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.text.encoding.extensions.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Text.Encoding.Extensions.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Text.Encoding.Extensions.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.text.regularexpressions.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Text.RegularExpressions.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Text.RegularExpressions.dll",
deps = [
":system.runtime.dll",
":system.runtime.extensions.dll",
":system.reflection.emit.ilgeneration.dll",
],
)
core_stdlib_internal(
name = "system.threading.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Threading.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Threading.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.threading.overlapped.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Threading.Overlapped.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Threading.Overlapped.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.threading.tasks.dataflow.dll",
version = "4.6.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Threading.Tasks.Dataflow.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Threading.Tasks.Dataflow.dll",
deps = [
],
)
core_stdlib_internal(
name = "system.threading.tasks.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Threading.Tasks.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Threading.Tasks.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.threading.tasks.extensions.dll",
version = "4.3.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Threading.Tasks.Extensions.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Threading.Tasks.Extensions.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.threading.tasks.parallel.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Threading.Tasks.Parallel.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Threading.Tasks.Parallel.dll",
deps = [
":system.runtime.dll",
":system.collections.concurrent.dll",
],
)
core_stdlib_internal(
name = "system.threading.thread.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Threading.Thread.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Threading.Thread.dll",
deps = [
":system.runtime.dll",
":system.threading.dll",
":system.security.principal.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.threading.threadpool.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Threading.ThreadPool.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Threading.ThreadPool.dll",
deps = [
":system.runtime.dll",
":system.threading.overlapped.dll",
],
)
core_stdlib_internal(
name = "system.threading.timer.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Threading.Timer.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Threading.Timer.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.transactions.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Transactions.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Transactions.dll",
deps = [
":system.runtime.dll",
":system.transactions.local.dll",
],
)
core_stdlib_internal(
name = "system.transactions.local.dll",
version = "4.0.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Transactions.Local.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Transactions.Local.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.valuetuple.dll",
version = "4.0.3.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.ValueTuple.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.ValueTuple.dll",
deps = [
":system.runtime.dll",
],
)
core_stdlib_internal(
name = "system.web.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Web.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Web.dll",
deps = [
":system.runtime.dll",
":system.web.httputility.dll",
],
)
core_stdlib_internal(
name = "system.web.httputility.dll",
version = "4.0.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Web.HttpUtility.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Web.HttpUtility.dll",
deps = [
":system.runtime.dll",
":system.runtime.extensions.dll",
":system.collections.specialized.dll",
],
)
core_stdlib_internal(
name = "system.windows.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Windows.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Windows.dll",
deps = [
":system.runtime.dll",
":system.objectmodel.dll",
],
)
core_stdlib_internal(
name = "system.xml.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Xml.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Xml.dll",
deps = [
":system.runtime.dll",
":system.xml.readerwriter.dll",
":system.xml.xmlserializer.dll",
":system.xml.xpath.dll",
],
)
core_stdlib_internal(
name = "system.xml.linq.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Xml.Linq.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Xml.Linq.dll",
deps = [
":system.runtime.dll",
":system.xml.xdocument.dll",
":system.xml.xpath.xdocument.dll",
],
)
core_stdlib_internal(
name = "system.xml.readerwriter.dll",
version = "4.2.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Xml.ReaderWriter.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Xml.ReaderWriter.dll",
deps = [
":system.runtime.dll",
":system.runtime.extensions.dll",
":system.diagnostics.debug.dll",
":system.net.primitives.dll",
":system.collections.nongeneric.dll",
],
)
core_stdlib_internal(
name = "system.xml.serialization.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Xml.Serialization.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Xml.Serialization.dll",
deps = [
":system.runtime.dll",
":system.xml.readerwriter.dll",
":system.xml.xmlserializer.dll",
],
)
core_stdlib_internal(
name = "system.xml.xdocument.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Xml.XDocument.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Xml.XDocument.dll",
deps = [
":system.runtime.dll",
":system.xml.readerwriter.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.xml.xmldocument.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Xml.XmlDocument.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Xml.XmlDocument.dll",
deps = [
":system.runtime.dll",
":system.xml.readerwriter.dll",
],
)
core_stdlib_internal(
name = "system.xml.xmlserializer.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Xml.XmlSerializer.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Xml.XmlSerializer.dll",
deps = [
":system.runtime.dll",
":system.collections.specialized.dll",
":system.xml.readerwriter.dll",
":system.collections.nongeneric.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.xml.xpath.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Xml.XPath.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Xml.XPath.dll",
deps = [
":system.runtime.dll",
":system.xml.readerwriter.dll",
":system.runtime.extensions.dll",
],
)
core_stdlib_internal(
name = "system.xml.xpath.xdocument.dll",
version = "4.1.1.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/System.Xml.XPath.XDocument.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/System.Xml.XPath.XDocument.dll",
deps = [
":system.runtime.dll",
":system.xml.readerwriter.dll",
":system.xml.xdocument.dll",
],
)
core_stdlib_internal(
name = "windowsbase.dll",
version = "4.0.0.0",
ref = "@Microsoft.NETCore.App.2.1.6//:ref/netcoreapp2.1/WindowsBase.dll",
stdlib_path = ":core/shared/Microsoft.NETCore.App/2.1.6/WindowsBase.dll",
deps = [
":system.runtime.dll",
":system.objectmodel.dll",
],
)
|
import os
import sys
import hashlib
from sys import platform
from banner import access
from menu import OPEN
from plat import check
########################################
# Educational purpose only #
########################################
# I'm not responsible for your actions #
########################################
# Created By: TheTechHacker #
########################################
"""
This tool is for encrypting passwords or messages to hash
it's meant for research purposes only
and any malicious usage of this tool is prohibited.
author : TheTechHacker
"""
check.plat()
def md5():
check.plat()
access.Pyhash()
access.description()
while True:
print "\033[1;32m"
hash = raw_input(" Enter Text: ")
print "\033[1;m"
hash = hashlib.md5(hash)
print (hash.hexdigest())
def sha1():
check.plat()
access.Pyhash()
access.description()
while True:
print "\033[1;32m"
sha1 = raw_input(" Enter Text: ")
print "\033[1;m"
hash = hashlib.sha1(sha1)
print (hash.hexdigest())
def sha224():
check.plat()
access.Pyhash()
access.description()
while True:
print "\033[1;32m"
sha224 = raw_input(" Enter Text: ")
print "\033[1;m"
hash = hashlib.sha224(sha224)
print (hash.hexdigest())
def sha256():
check.plat()
access.Hash()
access.description()
while True:
print "\033[1;32m"
sha256 = raw_input(" Enter Text: ")
print "\033[1;m"
hash = hashlib.sha256(sha256)
print (hash.hexdigest())
def sha384():
check.plat()
access.Hash()
access.description()
while True:
print "\033[1;32m"
sha384 = raw_input(" Enter Text: ")
print "\033[1;m"
hash = hashlib.sha384(sha384)
print (hash.hexdigest())
def sha512():
check.plat()
access.Hashed()
access.description()
while True:
print "\033[1;32m"
sha512 = raw_input(" Enter Text: ")
print "\033[1;m"
hash = hashlib.sha512(sha512)
print (hash.hexdigest())
access.Pyhash()
access.description()
OPEN.menu()
print "\033[1;32m"
user = raw_input("ENTER: ")
if user == "1":
print (md5())
elif user == "2":
print (sha1())
elif user == "3":
print (sha224())
elif user == "4":
print (sha256())
elif user == "5":
print (sha384())
elif user == "6":
print (sha512())
elif user == "99":
exit("\033[1;34m Exiting \033[1;m")
else:
exit("\033[1;34m ERROR \033[1;m") |
#!/usr/bin/python
# Run continuously, getting the auto-added names from the DB and adding them to the
import urllib3
import sys
from xml.dom.minidom import parseString
import json
from pymongo import MongoClient, ReturnDocument
import time
sandboxHost = 'https://api.sandbox.namecheap.com/xml.response'
sandboxAuthentication = 'ApiUser=rick1&ApiKey=0a1a2799eed246f791af0f2d941808b0&UserName=rick1'
realHost = 'https://api.namecheap.com/xml.response'
realAuthentication = 'ApiUser=rickmcgeer&ApiKey=613209274bb84fa7ae10f9d021a0e197&UserName=rickmcgeer'
domainInfo = 'SLD=planet-ignite&TLD=net'
clientIP='clientIP=171.67.92.194'
autoDomainName = '.planet-ignite.net'
host = realHost
authentication = realAuthentication
class HostRecord:
def __init__(self, hostName, address, recordType, TTL):
self.hostName = hostName
self.address = address
self.recordType = recordType
self.TTL = TTL
def specString(self, i):
return 'HostName%d=%s&Address%d=%s&RecordType%d=%s&TTL%d=%s' % (i, self.hostName, i, self.address, i, self.recordType, i, self.TTL)
sandboxKeepRecords = []
realKeepRecords = [HostRecord(u'www', u'parkingpage.namecheap.com.', u'CNAME', u'1800'), HostRecord(u'@', u'http://www.planet-ignite.net/', u'URL', u'1800')]
keepRecords = realKeepRecords
def mainRecords(hostRecords):
return filter(lambda x: x[2] != 'Type', hostRecords)
def returnRecord(ip,name):
record = Record(ip,name)
return record
def execCommand(anURL):
http = urllib3.PoolManager()
httpResponse = http.request('GET', anURL)
return httpResponse.data
def getHosts():
getHostsURL = '%s?Command=namecheap.domains.dns.getHosts&%s&%s&%s' % (host, authentication, domainInfo,clientIP)
xmlResult = parseString(execCommand(getHostsURL))
hostRecords = xmlResult.getElementsByTagName('host')
return [(aRecord.getAttribute('Name'), aRecord.getAttribute('Address'), aRecord.getAttribute('Type')) for aRecord in hostRecords]
def mainRecords(hostRecords):
return filter(lambda x: x[2] != 'Type', hostRecords)
def makeSetHostURL(aHostList):
# tuples = [(i + 1, aHostList[i][0], i + 1, aHostList[i][1], i + 1, aHostList[i][2]) for i in range(len(aHostList))]
# hostStrings = ['HostName%d=%s&Address%d=%s&RecordType%d=%s&TTL=1000' % aTuple for aTuple in tuples]
hostStrings = [aHostList[i].specString(i + 1) for i in range(len(aHostList))]
hostString = '&'.join(hostStrings)
setHostsURL = '%s?Command=namecheap.domains.dns.setHosts&%s&%s&%s&%s' % (host, authentication, domainInfo,clientIP,hostString)
return setHostsURL
client = MongoClient('mongodb://mongodb:27017/')
db = client.gee_master
nodeCollection = db.nodes
def hostsFromDB():
nodes = nodeCollection.find({})
nodes = filter(lambda x:x['dnsName'].endswith(autoDomainName), nodes)
suffixLength = -len(autoDomainName)
return [HostRecord(node['dnsName'][:suffixLength], node['ipAddress'], 'A', 1000) for node in nodes]
|
import torch
from torch import nn
import torch.nn.functional as F
import numpy as np
import pdb
from itertools import chain
def normalize(W):
W_norm = torch.norm(W)
W_norm = torch.relu(W_norm - 1) + 1
W = W/ W_norm
return W
class SamePadConv(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, dilation=1, groups=1, gamma=0.9):
super().__init__()
self.receptive_field = (kernel_size - 1) * dilation + 1
padding = self.receptive_field // 2
self.conv = nn.Conv1d(
in_channels, out_channels, kernel_size,
padding=padding,
dilation=dilation,
groups=groups, bias=False
)
self.bias = torch.nn.Parameter(torch.zeros([out_channels]),requires_grad=True)
self.padding=padding
self.dilation = dilation
self.kernel_size= kernel_size
self.grad_dim, self.shape = [], []
for p in self.conv.parameters():
self.grad_dim.append(p.numel())
self.shape.append(p.size())
self.dim = sum(self.grad_dim)
self.in_channels = in_channels
self.out_features= out_channels
self.n_chunks = in_channels
self.chunk_in_d = self.dim // self.n_chunks
self.chunk_out_d = int(in_channels*kernel_size// self.n_chunks)
self.grads = torch.Tensor(sum(self.grad_dim)).fill_(0).cuda()
self.f_grads = torch.Tensor(sum(self.grad_dim)).fill_(0).cuda()
nh=64
self.controller = nn.Sequential(nn.Linear(self.chunk_in_d, nh), nn.SiLU())
self.calib_w = nn.Linear(nh, self.chunk_out_d)
self.calib_b = nn.Linear(nh, out_channels//in_channels)
self.calib_f = nn.Linear(nh, out_channels//in_channels)
dim = self.n_chunks * (self.chunk_out_d + 2 * out_channels // in_channels)
self.W = nn.Parameter(torch.empty(dim, 32), requires_grad=False)
nn.init.xavier_uniform_(self.W.data)
self.W.data = normalize(self.W.data)
#self.calib_w = torch.nn.Parameter(torch.ones(out_channels, in_channels,1), requires_grad = True)
#self.calib_b = torch.nn.Parameter(torch.zeros([out_channels]), requires_grad = True)
#self.calib_f = torch.nn.Parameter(torch.ones(1,out_channels,1), requires_grad = True)
self.remove = 1 if self.receptive_field % 2 == 0 else 0
self.gamma = gamma
self.f_gamma = 0.3
self.cos = nn.CosineSimilarity(dim=0, eps=1e-6)
self.trigger = 0
self.tau = 0.75
def ctrl_params(self):
c_iter = chain(self.controller.parameters(), self.calib_w.parameters(),
self.calib_b.parameters(), self.calib_f.parameters())
for p in c_iter:
yield p
def store_grad(self):
#print('storing grad')
grad = self.conv.weight.grad.data.clone()
grad = nn.functional.normalize(grad)
grad = grad.view(-1)
self.f_grads = self.f_gamma * self.f_grads + (1-self.f_gamma) * grad
if not self.training:
e = self.cos(self.f_grads, self.grads)
if e < -self.tau:
self.trigger = 1
self.grads = self.gamma * self.grads + (1-self.gamma) * grad
def fw_chunks(self):
x = self.grads.view(self.n_chunks, -1)
rep = self.controller(x)
w = self.calib_w(rep)
b = self.calib_b(rep)
f = self.calib_f(rep)
q = torch.cat([w.view(-1), b.view(-1), f.view(-1)])
if not hasattr(self, 'q_ema'):
setattr(self, 'q_ema', torch.zeros(*q.size()).float().cuda())
else:
self.q_ema = self.f_gamma * self.q_ema + (1-self.f_gamma)*q
q = self.q_ema
if self.trigger == 1:
dim = w.size(0)
self.trigger = 0
# read
att = q @ self.W
att = F.softmax(att/0.5)
v, idx = torch.topk(att, 2)
ww = torch.index_select(self.W, 1, idx)
idx = idx.unsqueeze(1).float()
old_w = ww @ idx
# write memory
s_att = torch.zeros(att.size(0)).cuda()
s_att[idx.squeeze().long()] = v.squeeze()
W = old_w @ s_att.unsqueeze(0)
mask = torch.ones(W.size()).cuda()
mask[:, idx.squeeze().long()] = self.tau
self.W.data = mask * self.W.data + (1-mask) * W
self.W.data = normalize(self.W.data)
# retrieve
ll = torch.split(old_w, dim)
nw,nb, nf = w.size(1), b.size(1), f.size(1)
o_w, o_b, o_f = torch.cat(*[ll[:nw]]), torch.cat(*[ll[nw:nw+nb]]), torch.cat(*[ll[-nf:]])
try:
w = self.tau * w + (1-self.tau)*o_w.view(w.size())
b = self.tau * b + (1-self.tau)*o_b.view(b.size())
f = self.tau * f + (1-self.tau)*o_f.view(f.size())
except:
pdb.set_trace()
f = f.view(-1).unsqueeze(0).unsqueeze(2)
return w.unsqueeze(0) ,b.view(-1),f
def forward(self, x):
w,b,f = self.fw_chunks()
d0, d1 = self.conv.weight.shape[1:]
cw = self.conv.weight * w
#cw = self.conv.weight
try:
conv_out = F.conv1d(x, cw, padding=self.padding, dilation=self.dilation, bias = self.bias * b)
out = f * conv_out
except: pdb.set_trace()
return out
def representation(self, x):
out = self.conv(x)
if self.remove > 0:
out = out[:, :, : -self.remove]
return out
def _forward(self, x):
out = self.conv(x)
if self.remove > 0:
out = out[:, :, : -self.remove]
return out
class ConvBlock(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, dilation, final=False, gamma=0.9):
super().__init__()
self.conv1 = SamePadConv(in_channels, out_channels, kernel_size, dilation=dilation, gamma=gamma)
self.conv2 = SamePadConv(out_channels, out_channels, kernel_size, dilation=dilation, gamma=gamma)
self.projector = nn.Conv1d(in_channels, out_channels, 1) if in_channels != out_channels or final else None
def ctrl_params(self):
c_iter = chain(self.conv1.controller.parameters(), self.conv1.calib_w.parameters(),
self.conv1.calib_b.parameters(), self.conv1.calib_f.parameters(),
self.conv2.controller.parameters(), self.conv2.calib_w.parameters(),
self.conv2.calib_b.parameters(), self.conv2.calib_f.parameters())
return c_iter
def forward(self, x):
residual = x if self.projector is None else self.projector(x)
x = F.gelu(x)
x = self.conv1(x)
x = F.gelu(x)
x = self.conv2(x)
return x + residual
class DilatedConvEncoder(nn.Module):
def __init__(self, in_channels, channels, kernel_size, gamma=0.9):
super().__init__()
self.net = nn.Sequential(*[
ConvBlock(
channels[i-1] if i > 0 else in_channels,
channels[i],
kernel_size=kernel_size,
dilation=2**i,
final=(i == len(channels)-1), gamma=gamma
)
for i in range(len(channels))
])
def ctrl_params(self):
ctrl = []
for l in self.net:
ctrl.append(l.ctrl_params())
c = chain(*ctrl)
for p in c:
yield p
def forward(self, x):
return self.net(x)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# test_utilities.py
"""unittest cases for utilities."""
#
# Copyright (c) 2020 Dan Cutright
# This file is part of DVHA-Stats, released under a MIT license.
# See the file LICENSE included with this distribution, also
# available at https://github.com/cutright/DVHA-Stats
import unittest
from os.path import join
import numpy as np
from numpy.testing import assert_array_equal
from dvhastats import utilities
from dateutil.parser import parse as date_parser
from dateutil.parser._parser import ParserError
basedata_dir = join("tests", "testdata")
example_data = join(basedata_dir, "multivariate_data_small.csv")
example_data_nh = join(basedata_dir, "multivariate_data_small_no-header.csv")
example_narrow_data = join(basedata_dir, "narrow_data.csv")
class TestUtilities(unittest.TestCase):
"""Unit tests for Utilities."""
def setUp(self):
"""Setup files and base data for utility testing."""
self.data_path = example_data
self.data_path_nh = example_data_nh
data = [
[np.nan, 48.1, 48.3, 65.1, 47.1, 49.9, 49.5, 48.9, 35.5, 44.5],
[51.9, 44.3, 44.5, 58.7, 41.1, 43.9, 43.9, 44.5, 31.1, 40.5],
[48.5, 38.5, 37.1, 53.9, 27.1, 40.7, 34.5, 41.7, 28.7, 37.7],
[33.9, 21.5, 20.1, 48.7, 12.1, 38.5, 13.9, 33.7, 25.3, 28.1],
[19.1, 10.9, 9.9, 42.3, 4.3, 36.5, 6.1, 16.9, 13.9, 13.3],
[12.7, 4.7, 3.9, 31.3, 3.1, 29.1, 3.7, 4.9, 6.9, 4.7],
]
keys = ["V%s" % (i + 1) for i in range(6)]
self.expected_dict = {key: data[i] for i, key in enumerate(keys)}
self.expected_dict_nh = {i: row for i, row in enumerate(data)}
self.expected_arr = np.array(data).T
self.expected_var_names = keys
def test_csv_to_dict(self):
"""Test csv_to_dict"""
data = utilities.csv_to_dict(self.data_path, dtype=float)
self.assertEqual(data, self.expected_dict)
def test_dict_to_array(self):
"""Test dict_to_array"""
dict_data = utilities.csv_to_dict(self.data_path, dtype=float)
arr = utilities.dict_to_array(dict_data)
assert_array_equal(arr["data"], self.expected_arr)
self.assertEqual(arr["var_names"], self.expected_var_names)
def test_no_header_row(self):
"""Test import with no header row"""
data = utilities.csv_to_dict(
self.data_path_nh, dtype=float, header_row=False
)
self.assertEqual(data, self.expected_dict_nh)
def test_apply_dtype(self):
"""Test the apply_dtype function"""
test = utilities.apply_dtype("2.5", float)
self.assertEqual(test, 2.5)
test = utilities.apply_dtype(2.5, str)
self.assertEqual(test, "2.5")
test = utilities.apply_dtype(2.5, None)
self.assertEqual(test, 2.5)
def test_import_data(self):
"""Test the generalized import data function"""
# File test
data, var_names = utilities.import_data(self.data_path)
self.assertEqual(var_names, self.expected_var_names)
assert_array_equal(data, self.expected_arr)
# dict test
data_dict = utilities.csv_to_dict(self.data_path, dtype=float)
data_arr, var_names = utilities.import_data(data_dict)
self.assertEqual(var_names, self.expected_var_names)
assert_array_equal(data_arr, self.expected_arr)
# array test
data_arr_2, var_names = utilities.import_data(data_arr)
self.assertEqual(var_names, list(range(len(data_arr[0, :]))))
assert_array_equal(data_arr_2, self.expected_arr)
with self.assertRaises(NotImplementedError):
utilities.import_data([0, 1])
def test_get_sorted_indices(self):
"""Test the simple get sorted indices function"""
data = [0, 3, 2]
test = utilities.get_sorted_indices(data)
self.assertEqual(test, [0, 2, 1])
def test_sort_2d_array(self):
"""Test the 2D array sort using numpy"""
# One column sort
arr = np.copy(self.expected_arr)
arr = utilities.sort_2d_array(arr, 0)
expected_sort = np.array(
[
[35.5, 31.1, 28.7, 25.3, 13.9, 6.9],
[44.5, 40.5, 37.7, 28.1, 13.3, 4.7],
[47.1, 41.1, 27.1, 12.1, 4.3, 3.1],
[48.1, 44.3, 38.5, 21.5, 10.9, 4.7],
[48.3, 44.5, 37.1, 20.1, 9.9, 3.9],
[48.9, 44.5, 41.7, 33.7, 16.9, 4.9],
[49.5, 43.9, 34.5, 13.9, 6.1, 3.7],
[49.9, 43.9, 40.7, 38.5, 36.5, 29.1],
[65.1, 58.7, 53.9, 48.7, 42.3, 31.3],
[np.nan, 51.9, 48.5, 33.9, 19.1, 12.7],
]
)
assert_array_equal(arr, expected_sort)
# Two column sort
arr = np.copy(self.expected_arr)
arr = utilities.sort_2d_array(arr, [1, 0])
expected_sort = np.array(
[
[35.5, 31.1, 28.7, 25.3, 13.9, 6.9],
[44.5, 40.5, 37.7, 28.1, 13.3, 4.7],
[47.1, 41.1, 27.1, 12.1, 4.3, 3.1],
[49.5, 43.9, 34.5, 13.9, 6.1, 3.7],
[49.9, 43.9, 40.7, 38.5, 36.5, 29.1],
[48.1, 44.3, 38.5, 21.5, 10.9, 4.7],
[48.3, 44.5, 37.1, 20.1, 9.9, 3.9],
[48.9, 44.5, 41.7, 33.7, 16.9, 4.9],
[np.nan, 51.9, 48.5, 33.9, 19.1, 12.7],
[65.1, 58.7, 53.9, 48.7, 42.3, 31.3],
]
)
assert_array_equal(arr, expected_sort)
# Verify mode check
with self.assertRaises(NotImplementedError):
utilities.sort_2d_array(arr, 0, mode="test")
def test_str_arr_to_date_arr(self):
"""Test str_arr_to_date_arr (str to datetime)"""
# simple test
data_str = ["1/2/2000", "5/1/2005"]
data_dt = [date_parser(date) for date in data_str]
test = utilities.str_arr_to_date_arr(data_str)
self.assertEqual(test, data_dt)
# unparsable str
data_str.append("1/4/2004a")
with self.assertRaises(ParserError):
utilities.str_arr_to_date_arr(data_str)
# force parsing
test = utilities.str_arr_to_date_arr(data_str, force=True)
expected = data_dt + [data_str[-1]]
assert_array_equal(test, expected)
def test_widen_data(self):
"""Test widen data script"""
data_dict = utilities.csv_to_dict(example_narrow_data)
kwargs = {
"uid_columns": ["patient", "plan", "field id"],
"x_data_cols": ["DD(%)", "DTA(mm)", "Threshold(%)"],
"y_data_col": "Gamma Pass Rate(%)",
"date_col": "date",
"dtype": float,
}
ds = utilities.widen_data(data_dict, **kwargs)
expected = {
"uid": ["ANON1234 && Plan_name && 3", "ANON1234 && Plan_name && 4"],
"date": ["6/13/2019 7:27", "6/13/2019 7:27"],
"2.0 && 3.0 && 10.0": [np.nan, 99.99476863],
"2.0 && 3.0 && 5.0": [99.88772435, 99.99533258],
"3.0 && 2.0 && 10.0": [99.94708217, 99.99941874],
"3.0 && 3.0 && 10.0": [99.97706894, 100],
"3.0 && 3.0 && 5.0": [99.97934552, 100],
}
for key, exp_value in expected.items():
assert_array_equal(ds[key], exp_value)
# No date test
kwargs_no_date = {key: value for key, value in kwargs.items()}
kwargs_no_date["date_col"] = None
ds_2 = utilities.widen_data(data_dict, **kwargs_no_date)
for key, ds_2_value in ds_2.items():
assert_array_equal(ds_2_value, expected[key])
# test column length check
data_dict_2 = utilities.csv_to_dict(example_narrow_data)
data_dict_2[list(data_dict_2)[0]].append("test")
with self.assertRaises(NotImplementedError):
utilities.widen_data(data_dict_2, **kwargs)
# test policy check
with self.assertRaises(NotImplementedError):
utilities.widen_data(data_dict, multi_val_policy="test", **kwargs)
ds = utilities.widen_data(data_dict, multi_val_policy="last", **kwargs)
index = ds["uid"].index("ANON1234 && Plan_name && 4")
self.assertEqual(ds["2.0 && 3.0 && 10.0"][index], 50)
ds = utilities.widen_data(data_dict, multi_val_policy="min", **kwargs)
index = ds["uid"].index("ANON1234 && Plan_name && 4")
self.assertEqual(ds["2.0 && 3.0 && 10.0"][index], 50)
ds = utilities.widen_data(data_dict, multi_val_policy="max", **kwargs)
index = ds["uid"].index("ANON1234 && Plan_name && 4")
self.assertEqual(
ds["2.0 && 3.0 && 10.0"][index],
expected["2.0 && 3.0 && 10.0"][index],
)
ds = utilities.widen_data(
data_dict, remove_partial_columns=True, **kwargs
)
self.assertTrue("2.0 && 3.0 && 10.0" not in ds.keys())
def test_is_numeric(self):
self.assertTrue(utilities.is_numeric(3))
self.assertFalse(utilities.is_numeric('a'))
if __name__ == "__main__":
import sys
sys.exit(unittest.main())
|
from magpie import Magpie
import time
count = 10
magpie = Magpie()
while (count <= 500):
start = time.clock()
magpie.train_word2vec('data/hep-categories', vec_dim=count)
magpie.save_word2vec_model('save/embeddings/here'+str(count), overwrite=True)
end = time.clock()
runtime = end - start
print(str(count)+','+str(runtime))
file = open('save/embeddings/here.txt', 'a')
file.write('\n'+str(count)+','+str(runtime))
file.close()
count = count+10 |
import enum
from app import db
from datetime import datetime
class MessageType(enum.Enum):
""" MessageType class """
USER = 'user'
BOT = 'bot'
class Message(db.Model):
""" Message model class """
__tablename__ = 'messages'
id = db.Column(db.Integer, primary_key=True)
msg_type = db.Column(db.Enum(MessageType), nullable=False)
text = db.Column(db.Text, nullable=False)
d_time = db.Column(db.DateTime, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False)
def __init__(self, text, msg_type, user_id):
""" Initializes a Message instance"""
self.text = text
self.d_time = datetime.utcnow()
self.msg_type = msg_type
self.user_id = user_id
def __repr__(self):
""" Obtains a string representation of the instance """
return '<Message {}>'.format(self.id) |
from rest_framework.permissions import AllowAny
from rest_framework.generics import GenericAPIView
from rest_framework.views import APIView
from django.http import response
from django.shortcuts import redirect
from .models import UrlDetailes
from django.shortcuts import render, HttpResponse
from .models import UrlDetailes
from .serializers import UrlDetailesSerailizer
from rest_framework.response import Response
from rest_framework import status
def redirect_shortner(request, slug):
try:
data = UrlDetailes.objects.get(shorted_url=slug)
return redirect(data.original_url)
except UrlDetailes.DoesNotExist:
return HttpResponse('Url Not Existed')
class UrlViewSet(GenericAPIView):
"""
It Will Return Long URL to Shorten URL
"""
permission_classes = [AllowAny]
serializer_class = UrlDetailesSerailizer
def post(self, request, format=None):
serializer_class = UrlDetailesSerailizer(data=request.data)
if serializer_class.is_valid():
serializer_class.save()
data = UrlDetailes.objects.get(
original_url=serializer_class.data['original_url'])
return Response('http://127.0.0.1:8000/{}'.format(data.shorted_url), status=status.HTTP_201_CREATED)
return Response(serializer_class.errors, status=status.HTTP_400_BAD_REQUEST)
|
from q2_pepsirf.actions.norm import norm
from q2_pepsirf.actions.info import infoSumOfProbes, infoSNPN
from q2_pepsirf.actions.enrich import enrich
from q2_pepsirf.actions.zscore import zscore
from q2_pepsirf.actions.bin import bin
__all__ = ['norm', 'infoSumOfProbes', 'infoSNPN', 'enrich', 'zscore', 'bin'] |
"""Module to handle initialization, imports, for DeepDAO class"""
from .deepdao import *
|
#!/usr/bin/python3
"""Recipe for training a classifier using the
mobvoihotwords Dataset.
To run this recipe, use the following command:
> python train.py {hyperparameter_file}
Using your own hyperparameter file or one of the following:
hyperparams/xvect.yaml (xvector system)
"""
import os
import sys
import torch
import torchaudio
import speechbrain as sb
from hyperpyyaml import load_hyperpyyaml
from speechbrain.utils.distributed import run_on_main
from torch.utils.data import DataLoader
from tqdm import tqdm
class SpeakerBrain(sb.core.Brain):
"""Class for GSC training"
"""
def compute_forward(self, batch, stage):
"""Computation pipeline based on a encoder + command classifier.
Data augmentation and environmental corruption are applied to the
input speech.
"""
batch = batch.to(self.device)
wavs, lens = batch.sig
# print("wavs.size():{}".format(wavs.size()))
wav_len = wavs.shape[1]
win_len = 24000
if wav_len < win_len:
zero_pad = torch.zeros((1, win_len - wav_len)).to(self.device)
# print("zero_pad.size():{}".format(zero_pad.size()))
wavs = torch.cat((wavs, torch.zeros((1, win_len - wav_len)).to(self.device)), 1)
# print("lens.size():{}".format(lens.size()))
# Feature extraction and normalization
feats = self.modules.compute_features(wavs)
frame_num = feats.shape[1]
if self.hparams.use_log1p:
# Log1p reduces the emphasis on small differences
feats = torch.log1p(feats)
compute_cw = sb.processing.features.ContextWindow(left_frames=75, right_frames=75)
# print("feats:{}".format(feats.shape))
feats_contex = compute_cw(feats)
# print("feats_contex0:{}".format(feats_contex.shape))
feats_contex = feats_contex.transpose(0, 1)
feats_contex = feats_contex.transpose(1, 2)
# print("feats_contex0:{}".format(feats_contex.shape))
feats_contex = torch.reshape(feats_contex, (frame_num, 40, 151))
feats_contex = feats_contex.transpose(1, 2)
# print("feats_contex1:{}".format(feats_contex.shape))
feats_contex = feats_contex[75:-75, :, :]
# print("feats_contex2:{}".format(feats_contex.shape))
frame_num = feats_contex.shape[0]
# noisy_feats = torch.transpose(feats_contex, 0, 1)
# print("feats_contex:{}".format(feats_contex.shape))
# print(noisy_feats.shape)
feats = self.modules.mean_var_norm(feats_contex, torch.ones([frame_num]).to(self.device))
# Embeddings + classifier
outputs = self.modules.embedding_model(feats)
# outputs = self.modules.classifier(embeddings)
# print("outputs.size():{}".format(outputs.size()))
# Ecapa model uses softmax outside of its classifer
# if "softmax" in self.modules.keys():
# outputs = self.modules.softmax(outputs)
output_label = torch.argmax(outputs[:, 0, :], dim=1).cpu().numpy()
# output_label = np.sum(output_label)
# print("output_label:{}".format(output_label.shape))
# print(len(output_label))
return outputs, lens
def compute_objectives(self, predictions, batch, stage):
"""Computes the loss using command-id as label.
"""
predictions, lens = predictions
uttid = batch.id
command, _ = batch.command_encoded
# Concatenate labels (due to data augmentation)
if stage == sb.Stage.TRAIN and self.hparams.apply_data_augmentation:
command = torch.cat([command] * self.n_augment, dim=0)
# # compute the cost function
# # loss = self.hparams.compute_cost(predictions, command, lens)
# # loss = sb.nnet.losses.nll_loss(predictions, command, lens)
# if hasattr(self.hparams.lr_annealing, "on_batch_end"):
# self.hparams.lr_annealing.on_batch_end(self.optimizer)
# if stage != sb.Stage.TRAIN:
# self.error_metrics.append(uttid, predictions, command, lens)
keyword1_count = 0
keyword2_count = 0
# print("predictions[:, 0, :].shape:{}".format(predictions[:, 0, :].shape))
output_label = torch.argmax(predictions[:, 0, :], dim=1).cpu().numpy()
# print("output_label.shape:{}".format(output_label))
for t in range(predictions.shape[0]):
if output_label[t] == 0:
keyword1_count += 1
if output_label[t] == 1:
keyword2_count += 1
if command == 0:
if keyword1_count > 0:
self.result['hixiaowen']['TP'] += 1
else:
self.result['hixiaowen']['FN'] += 1
if keyword2_count > 0:
self.result['nihaowenwen']['FP'] += 1
else:
self.result['nihaowenwen']['TN'] += 1
if command == 1:
if keyword1_count > 0:
self.result['hixiaowen']['FP'] += 1
else:
self.result['hixiaowen']['TN'] += 1
if keyword2_count > 0:
self.result['nihaowenwen']['TP'] += 1
else:
self.result['nihaowenwen']['FN'] += 1
if command == 2:
if keyword1_count > 0:
self.result['hixiaowen']['FP'] += 1
if keyword2_count > 0:
self.result['nihaowenwen']['FP'] += 1
return None
def on_stage_start(self, stage, epoch=None):
"""Gets called at the beginning of an epoch."""
if stage != sb.Stage.TRAIN:
self.error_metrics = self.hparams.error_stats()
self.result = {}
self.wake_words = ['hixiaowen', 'nihaowenwen']
for wake_word in self.wake_words:
self.result[wake_word] = {}
self.result[wake_word].update({'TP': 0})
self.result[wake_word].update({'FN': 0})
self.result[wake_word].update({'FP': 0})
self.result[wake_word].update({'TN': 0})
def on_stage_end(self, stage, stage_loss, epoch=None):
"""Gets called at the end of an epoch."""
# We also write statistics about test data to stdout and to the logfile.
if stage == sb.Stage.TEST:
for wake_word in self.wake_words:
print('result on {}'.format(wake_word))
compute_metrics(self.result[wake_word])
# self.hparams.train_logger.log_stats(
# {"Epoch loaded": self.hparams.epoch_counter.current},
# test_stats=stage_stats,
# )
def evaluate(
self,
test_set,
max_key=None,
min_key=None,
progressbar=None,
test_loader_kwargs={},
):
"""Iterate test_set and evaluate brain performance. By default, loads
the best-performing checkpoint (as recorded using the checkpointer).
Arguments
---------
test_set : Dataset, DataLoader
If a DataLoader is given, it is iterated directly. Otherwise passed
to ``self.make_dataloader()``.
max_key : str
Key to use for finding best checkpoint, passed to
``on_evaluate_start()``.
min_key : str
Key to use for finding best checkpoint, passed to
``on_evaluate_start()``.
progressbar : bool
Whether to display the progress in a progressbar.
test_loader_kwargs : dict
Kwargs passed to ``make_dataloader()`` if ``test_set`` is not a
DataLoader. NOTE: ``loader_kwargs["ckpt_prefix"]`` gets
automatically overwritten to ``None`` (so that the test DataLoader
is not added to the checkpointer).
Returns
-------
average test loss
"""
if progressbar is None:
progressbar = not self.noprogressbar
if not isinstance(test_set, DataLoader):
test_loader_kwargs["ckpt_prefix"] = None
test_set = self.make_dataloader(
test_set, sb.Stage.TEST, **test_loader_kwargs
)
self.on_evaluate_start(max_key=max_key, min_key=min_key)
self.on_stage_start(sb.Stage.TEST, epoch=None)
# print("Epoch loaded: {}".format(self.hparams['epoch_counter'].current))
self.modules.eval()
avg_test_loss = 0.0
with torch.no_grad():
for batch in tqdm(
test_set, dynamic_ncols=True, disable=not progressbar
):
self.step += 1
# loss = self.evaluate_batch(batch, stage=Stage.TEST)
out = self.compute_forward(batch, stage=sb.Stage)
loss = self.compute_objectives(out, batch, stage=sb.Stage)
# avg_test_loss = self.update_average(loss, avg_test_loss)
# Debug mode only runs a few batches
if self.debug and self.step == self.debug_batches:
break
# Only run evaluation "on_stage_end" on main process
run_on_main(
self.on_stage_end, args=[sb.Stage.TEST, None]
)
self.step = 0
def dataio_prep(hparams):
"Creates the datasets and their data processing pipelines."
data_folder = hparams["data_folder"]
# 1. Declarations:
train_data = sb.dataio.dataset.DynamicItemDataset.from_csv(
csv_path=hparams["train_annotation"],
replacements={"data_root": data_folder},
)
valid_data = sb.dataio.dataset.DynamicItemDataset.from_csv(
csv_path=hparams["valid_annotation"],
replacements={"data_root": data_folder},
)
test_data = sb.dataio.dataset.DynamicItemDataset.from_csv(
csv_path=hparams["test_annotation"],
replacements={"data_root": data_folder},
)
datasets = [train_data, valid_data, test_data]
label_encoder = sb.dataio.encoder.CategoricalEncoder()
# 2. Define audio pipeline:
@sb.utils.data_pipeline.takes("wav", "start", "stop", "duration")
@sb.utils.data_pipeline.provides("sig")
def audio_pipeline(wav, start, stop, duration):
start = int(start)
stop = int(stop)
num_frames = stop - start
sig, fs = torchaudio.load(
wav, num_frames=num_frames, frame_offset=start
)
sig = sig.transpose(0, 1).squeeze(1)
return sig
sb.dataio.dataset.add_dynamic_item(datasets, audio_pipeline)
# 3. Define text pipeline:
@sb.utils.data_pipeline.takes("command")
@sb.utils.data_pipeline.provides("command", "command_encoded")
def label_pipeline(command):
yield command
command_encoded = label_encoder.encode_sequence_torch([command])
yield command_encoded
sb.dataio.dataset.add_dynamic_item(datasets, label_pipeline)
# 3. Fit encoder:
# Load or compute the label encoder (with multi-GPU DDP support)
lab_enc_file = os.path.join(hparams["save_folder"], "label_encoder.txt")
label_encoder.load_or_create(
path=lab_enc_file, from_didatasets=[train_data], output_key="command",
)
# 4. Set output:
sb.dataio.dataset.set_output_keys(
datasets, ["id", "sig", "command_encoded"]
)
return train_data, valid_data, test_data, label_encoder
def compute_metrics(result : dict, verbose=True):
TP = result['TP']
FN = result['FN']
TN = result['TN']
FP = result['FP']
precision = TP / (TP + FP) if TP + FP > 0 else 0.0
recall = TP / (TP + FN) if TP + FN > 0 else 0.0
false_positive_rate = FP / (FP + TN) if FP + TN > 0 else 0.0
false_negative_rate = FN / (FN + TP) if FN + TP > 0 else 0.0
if verbose:
print("True Positive:{}".format(result['TP']))
print("False Negative:{}".format(result['FN']))
print("True Negative:{}".format(result['TN']))
print("False Positive:{}".format(result['FP']))
print("precise:{}".format(precision))
print("recall:{}".format(recall))
print("false_positive_rate:{}".format(false_positive_rate))
print("false_negative_rate:{}".format(false_negative_rate))
return precision, recall, false_positive_rate, false_negative_rate
if __name__ == "__main__":
# This flag enables the inbuilt cudnn auto-tuner
torch.backends.cudnn.benchmark = True
# CLI:
hparams_file, run_opts, overrides = sb.parse_arguments(sys.argv[1:])
# Initialize ddp (useful only for multi-GPU DDP training)
sb.utils.distributed.ddp_init_group(run_opts)
# Load hyperparameters file with command-line overrides
with open(hparams_file) as fin:
hparams = load_hyperpyyaml(fin, overrides)
# Create experiment directory
sb.core.create_experiment_directory(
experiment_directory=hparams["output_folder"],
hyperparams_to_save=hparams_file,
overrides=overrides,
)
# Dataset prep (parsing GSC and annotation into csv files)
from prepare_kws import prepare_kws
# Data preparation
run_on_main(
prepare_kws,
kwargs={
"data_folder": hparams["data_folder"],
"save_folder": hparams["output_folder"],
"skip_prep": hparams["skip_prep"],
},
)
# Dataset IO prep: creating Dataset objects and proper encodings for phones
train_data, valid_data, test_data, label_encoder = dataio_prep(hparams)
# Brain class initialization
speaker_brain = SpeakerBrain(
modules=hparams["modules"],
opt_class=hparams["opt_class"],
hparams=hparams,
run_opts=run_opts,
checkpointer=hparams["checkpointer"],
)
# # Training
# speaker_brain.fit(
# speaker_brain.hparams.epoch_counter,
# train_data,
# valid_data,
# train_loader_kwargs=hparams["dataloader_options"],
# valid_loader_kwargs=hparams["dataloader_options"],
# )
# Load the best checkpoint for evaluation
test_stats = speaker_brain.evaluate(
test_set=test_data,
min_key="ErrorRate",
test_loader_kwargs=hparams["dataloader_options"],
)
|
#!/home/loris/anaconda3/envs/py3/bin/python
import numpy as np
from numpy import pi
from math import sin,cos
import rospy
from beginner_tutorials.msg import o3d_coord_msg
from try_bayesian.msg import bayesian_msg
from scipy.spatial.transform import Rotation as R
import re
def normalizer(coord_to_be_norm, lower_limit, upper_limit):
m = upper_limit-lower_limit
coord_norm = m*coord_to_be_norm+lower_limit
return coord_norm
def eul2rotm(phi,theta,gamma):
rz = np.matrix([[np.cos(phi),-np.sin(phi),0],[np.sin(phi),np.cos(phi),0],[0,0,1]])
ry = np.matrix([[np.cos(theta),0,np.sin(theta)],[0,1,0],[-np.sin(theta),0,np.cos(theta)]])
rx = np.matrix([[1,0,0],[0,np.cos(gamma),-np.sin(gamma)],[0,np.sin(gamma),np.cos(gamma)]])
rotm = rz*ry*rx
return rotm
def taglia_stringa(pattern,stringa,tipo):
''' se il valore da leggere è una stringa mettere testo altrimenti numero'''
substring = re.search(pattern, stringa).group(1)
if tipo =="numero":
return float(substring)
elif tipo == "testo":
return substring
def fromSENSORtoEEF(pos,quat):
r = R.from_quat(quat)
rot_mat = np.matrix(r.as_matrix(), 'float')
hom = np.matrix(([0,0,0, 1]), 'float')
pos1 = np.hstack((rot_mat, pos))
pose = np.vstack((pos1, hom))
R1 = np.matrix(([1,0,0,-0.02],
[0, 1, 0, 0],
[0,0,1,0.05285],
[0,0,0,1]), 'float')
R1_inv = np.linalg.inv(R1)
plan_matrix = pose*R1_inv
r = R.from_dcm(plan_matrix[:3,:3])
quat2 = np.array(r.as_quat(), 'float')
pos2 = np.array((plan_matrix[0,3],plan_matrix[1,3],plan_matrix[2,3] ), 'float')
return pos2, quat2, plan_matrix
if __name__ == '__main__':
param_file_name = "/home/loris/ply_and_stl/param.txt"
file = open(param_file_name,"r")
parameters = file.readlines()
#read parameters from file
center_x = taglia_stringa("{(.*?)}",parameters[1],"numero")
center_y = taglia_stringa("{(.*?)}",parameters[2],"numero")
center_z = taglia_stringa("{(.*?)}",parameters[3],"numero")
rho = taglia_stringa("{(.*?)}",parameters[4],"numero")
lat_min = taglia_stringa("{(.*?)}",parameters[5],"numero")
lat_max = taglia_stringa("{(.*?)}",parameters[6],"numero")
long_min = taglia_stringa("{(.*?)}",parameters[8],"numero")
long_max = taglia_stringa("{(.*?)}",parameters[9],"numero")
rospy.init_node('normalizer', anonymous=True)
pub1 = rospy.Publisher("o3d_coord_msg", o3d_coord_msg, queue_size=100)
msg_to_o3d = o3d_coord_msg()
print('NORMALIZER NODE \n')
while not rospy.is_shutdown():
print("WAITING FOR THE MESSAGE FROM BAYESIAN OPTIMIZATION NODE")
bayesian = rospy.wait_for_message("bayesian", bayesian_msg)
phi_to_be_norm = bayesian.phi
theta_to_be_norm = bayesian.theta
print("MESSAGE RECEIVED: NORMALIZING COORDINATES")
phi = normalizer(phi_to_be_norm, lat_min, lat_max)
phi_rad = phi/180*pi
theta = normalizer(theta_to_be_norm, long_min, long_max)
theta_rad = theta/180*pi
mat2 = eul2rotm(pi/2,0,-pi/2)
print("CALCULATING COORDINATES")
sample_points = np.matrix(([rho*np.sin(theta_rad)*np.cos(phi_rad), rho*np.sin(theta_rad)*np.sin(phi_rad),rho*np.cos(theta_rad)]),'float32')
sample_points[0,0] = sample_points[0,0] + center_x
sample_points[0,1] = sample_points[0,1] + center_y
sample_points[0,2] = sample_points[0,2] + center_z
rotm_sphere = np.matrix(([np.sin(theta_rad)*np.cos(phi_rad), np.sin(theta_rad)*np.sin(phi_rad), np.cos(theta_rad)],
[np.cos(theta_rad)*np.cos(phi_rad), np.cos(theta_rad)*np.sin(phi_rad), -np.sin(theta_rad)],
[-np.sin(phi_rad), np.cos(phi_rad), 0]))
rotm_sphere1=rotm_sphere
ux = rotm_sphere1[1,0]
uy = rotm_sphere1[1,1]
uz = rotm_sphere1[1,2]
gamma = -pi/2
com = 1-np.cos(gamma)
ct = np.cos(gamma)
st = np.sin(gamma)
#DERIVE FROM EULER THEOREM
#http://www.ladispe.polito.it/corsi/meccatronica/01GTG/2008-09/Slides/Rotazioni.pdf
rtp_camera = np.matrix(([ux*ux*com+ct, ux*uy*com-uz*st, ux*uz*com+uy*st],
[ux*uy*com+uz*st, uy*uy*com+ct, uy*uz*com-ux*st],
[ux*uz*com-uy*st, uy*uz*com+ux*st, uz*uz*com+ct]))
# r = R.from_matrix(rtp_camera)
# quate = np.array(r.as_quat(), 'float')
mat = rtp_camera*(rotm_sphere.T)
mat_vet = np.reshape(mat,-1)
pos = np.matrix(([sample_points[0,0]],
[sample_points[0,1]],
[sample_points[0,2]]), 'float')
r = R.from_matrix(mat)
quat = np.array(r.as_quat(), 'float')
pos, quat, plan_matrix = fromSENSORtoEEF(pos,quat)
msg_to_o3d.x = pos[0]
msg_to_o3d.y = pos[1]
msg_to_o3d.z = pos[2]
msg_to_o3d.a1 = plan_matrix[0,0]
msg_to_o3d.a2 = plan_matrix[0,1]
msg_to_o3d.a3 = plan_matrix[0,2]
msg_to_o3d.a4 = plan_matrix[1,0]
msg_to_o3d.a5 = plan_matrix[1,1]
msg_to_o3d.a6 = plan_matrix[1,2]
msg_to_o3d.a7 = plan_matrix[2,0]
msg_to_o3d.a8 = plan_matrix[2,1]
msg_to_o3d.a9 = plan_matrix[2,2]
rospy.sleep(2)
pub1.publish(msg_to_o3d)
print("COORDINATES SENT\n")
print("-------------------------------------\n")
|
import os
import numpy as np
def softmax(logits):
"""Transforms predictions into probability values.
Parameters
----------
logits : array_like
The logits predicted by the model.
Returns
-------
`numpy.ndarray`
Probability values corresponding to the logits.
"""
assert logits.ndim == 1
# for numerical reasons we subtract the max logit
# (mathematically it doesn't matter!)
# otherwise exp(logits) might become too large or too small
logits = logits - np.max(logits)
e = np.exp(logits)
return e / np.sum(e)
def crossentropy(label, logits):
"""Calculates the cross-entropy.
Parameters
----------
logits : array_like
The logits predicted by the model.
label : int
The label describing the target distribution.
Returns
-------
float
The cross-entropy between softmax(logits) and onehot(label).
"""
assert logits.ndim == 1
# for numerical reasons we subtract the max logit
# (mathematically it doesn't matter!)
# otherwise exp(logits) might become too large or too small
logits = logits - np.max(logits)
e = np.exp(logits)
s = np.sum(e)
ce = np.log(s) - logits[label]
return ce
def batch_crossentropy(label, logits):
"""Calculates the cross-entropy for a batch of logits.
Parameters
----------
logits : array_like
The logits predicted by the model for a batch of inputs.
label : int
The label describing the target distribution.
Returns
-------
np.ndarray
The cross-entropy between softmax(logits[i]) and onehot(label)
for all i.
"""
assert logits.ndim == 2
# for numerical reasons we subtract the max logit
# (mathematically it doesn't matter!)
# otherwise exp(logits) might become too large or too small
logits = logits - np.max(logits, axis=1, keepdims=True)
e = np.exp(logits)
s = np.sum(e, axis=1)
ces = np.log(s) - logits[:, label]
return ces
def binarize(x, values, threshold=None, included_in='upper'):
"""Binarizes the values of x.
Parameters
----------
values : tuple of two floats
The lower and upper value to which the inputs are mapped.
threshold : float
The threshold; defaults to (values[0] + values[1]) / 2 if None.
included_in : str
Whether the threshold value itself belongs to the lower or
upper interval.
"""
lower, upper = values
if threshold is None:
threshold = (lower + upper) / 2.
x = x.copy()
if included_in == 'lower':
x[x <= threshold] = lower
x[x > threshold] = upper
elif included_in == 'upper':
x[x < threshold] = lower
x[x >= threshold] = upper
else:
raise ValueError('included_in must be "lower" or "upper"')
return x
def imagenet_example(shape=(224, 224), data_format='channels_last'):
""" Returns an example image and its imagenet class label.
Parameters
----------
shape : list of integers
The shape of the returned image.
data_format : str
"channels_first" or "channels_last"
Returns
-------
image : array_like
The example image.
label : int
The imagenet label associated with the image.
NOTE: This function is deprecated and will be removed in the future.
"""
assert len(shape) == 2
assert data_format in ['channels_first', 'channels_last']
from PIL import Image
path = os.path.join(os.path.dirname(__file__), 'example.png')
image = Image.open(path)
image = image.resize(shape)
image = np.asarray(image, dtype=np.float32)
image = image[:, :, :3]
assert image.shape == shape + (3,)
if data_format == 'channels_first':
image = np.transpose(image, (2, 0, 1))
return image, 282
def samples(dataset='imagenet', index=0, batchsize=1, shape=(224, 224),
data_format='channels_last'):
''' Returns a batch of example images and the corresponding labels
Parameters
----------
dataset : string
The data set to load (options: imagenet, mnist, cifar10,
cifar100, fashionMNIST)
index : int
For each data set 20 example images exist. The returned batch
contains the images with index [index, index + 1, index + 2, ...]
batchsize : int
Size of batch.
shape : list of integers
The shape of the returned image (only relevant for Imagenet).
data_format : str
"channels_first" or "channels_last"
Returns
-------
images : array_like
The batch of example images
labels : array of int
The labels associated with the images.
'''
from PIL import Image
images, labels = [], []
basepath = os.path.dirname(__file__)
samplepath = os.path.join(basepath, 'data')
files = os.listdir(samplepath)
for idx in range(index, index + batchsize):
i = idx % 20
# get filename and label
file = [n for n in files if '{}_{:02d}_'.format(dataset, i) in n][0]
label = int(file.split('.')[0].split('_')[-1])
# open file
path = os.path.join(samplepath, file)
image = Image.open(path)
if dataset == 'imagenet':
image = image.resize(shape)
image = np.asarray(image, dtype=np.float32)
if dataset != 'mnist' and data_format == 'channels_first':
image = np.transpose(image, (2, 0, 1))
images.append(image)
labels.append(label)
labels = np.array(labels)
images = np.stack(images)
return images, labels
def onehot_like(a, index, value=1):
"""Creates an array like a, with all values
set to 0 except one.
Parameters
----------
a : array_like
The returned one-hot array will have the same shape
and dtype as this array
index : int
The index that should be set to `value`
value : single value compatible with a.dtype
The value to set at the given index
Returns
-------
`numpy.ndarray`
One-hot array with the given value at the given
location and zeros everywhere else.
"""
x = np.zeros_like(a)
x[index] = value
return x
|
import logging
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import time
plugin_enable=False
try:
import subprocess
import plugin
print("Import plugin successfully")
plugin_enable=True
except (ImportError, RuntimeError, FileNotFoundError, subprocess.CalledProcessError, PermissionError) as e:
print("Failing to import plugin, %r" % e)
plugin_enable=False
# add on date 2019.12.26
__EPS__ = 1e-5
## LQ-net
class LqNet_fm(torch.autograd.Function):
@staticmethod
def forward(ctx, inputs, basis, codec_vector, codec_index, thrs_multiplier, training=True, half_range=False, auxil=None, adaptive='none'):
num_levels = codec_vector.shape[0]
bit = codec_vector.shape[1]
quant_group = basis.shape[1]
# calculate levels and sort
levels = torch.matmul(codec_vector, basis) # [num_levels * bit] * [bit * quant_group]
levels, sort_id = torch.sort(levels, 0, descending=False)
# calculate threshold
thrs = torch.matmul(thrs_multiplier, levels) # [(num_levels - 1) * num_levels] * [num_levels * quant_group]
# pre-processing of the inputs, according to adaptive
if adaptive == 'mean':
mean = inputs.mean([1,2,3], keepdim=True) + __EPS__
inputs = inputs / mean
if adaptive == 'var':
std = inputs.std([1,2,3], keepdim=True) + __EPS__
inputs = inputs / std
if adaptive == 'min':
lower = inputs.min()
inputs = inputs - lower
# feature map: b * c * h * w | b * c
if quant_group != 1:
x = inputs.transpose(1, 0) # bchw --> cbhw | bc --> cb
else:
x = inputs # keep origin shape
# calculate output y and its binary codec
x_shape = x.shape
x = x.reshape(quant_group, -1)
y = levels[0].unsqueeze(1).expand_as(x) # output
codec = codec_index[sort_id[0]].unsqueeze(1).expand_as(x)
for i in range(num_levels - 1):
g = x > thrs[i].unsqueeze(1).expand_as(x)
y = torch.where(g, levels[i + 1].unsqueeze(1).expand_as(x), y)
codec = torch.where(g, codec_index[sort_id[i+1]].unsqueeze(1).expand_as(codec), codec)
# y is ready here, means forward has been finished
y = y.reshape(x_shape)
if quant_group != 1:
y = y.transpose(1,0) # cbhw --> bchw
if adaptive == 'mean':
y = y * mean
if adaptive == 'var':
y = y * std
if adaptive == 'min':
y = y + lower
if not training:
return y, basis
# contine to compute the gradident of basis to avoid saving buffer to backward
code = codec.new_ones(bit, x.shape[0], x.shape[1], dtype=torch.int8)
for i in range(bit):
code[i] = codec / (2**i) - codec / (2**(i+1)) * 2
if not half_range:
code[i] = code[i] * 2 - 1
codec = None
# calculate BTxX
BTxX = inputs.new_zeros(bit, quant_group, 1)
for i in range(bit):
BTxXi0 = code[i].float() * x
BTxXi0 = BTxXi0.sum(dim=1, keepdim=True)
BTxX[i] = BTxXi0
BTxX = BTxX.reshape(bit, quant_group)
x = None
# BTxB
BTxB = inputs.new_zeros(bit*bit, quant_group, 1)
for i in range(bit):
for j in range(i+1):
value = (code[i] * code[j]).float().sum(dim=1, keepdim=True)
if i == j:
value = torch.where(value == 0, value.new_ones(value.shape) * 0.00001, value)
else:
BTxB[j*bit + i] = value
BTxB[i*bit + j] = value
BTxB = BTxB.reshape(bit*bit, quant_group).reshape(bit, bit, quant_group).float()
# inverse
BTxB_transpose = BTxB.transpose(0, 2).transpose(1, 2)
try:
BTxB_inv = torch.inverse(BTxB_transpose)
except RuntimeError:
logging.info("LqNet_fm matrix has not inverse %r" % BTxB_transpose)
raise RuntimeError("LqNet_fm matrix has no inverse for weight %r" % BTxB_transpose)
BTxB_inv = BTxB_inv.transpose(1, 2).transpose(0, 2)
new_basis = BTxB_inv * BTxX.expand_as(BTxB_inv)
new_basis = new_basis.sum(dim=1, keepdim=True)
new_basis = new_basis.squeeze(1)
auxil.data = new_basis
basis = 0.9 * basis + 0.1 * new_basis
ctx.save_for_backward(inputs, levels[num_levels - 1])
return y, basis
@staticmethod
def backward(ctx, grad_output, grad_basis):
inputs, clip = ctx.saved_tensors
quant_group = clip.size(0)
if quant_group != 1:
x = inputs.transpose(1,0)
else:
x = inputs
x_shape = x.shape
x = x.reshape(quant_group, -1)
clip = clip.unsqueeze(1).expand_as(x)
x = x >= clip
x = x.reshape(x_shape)
if quant_group != 1:
x = x.transpose(1,0)
#x = x.reshape(grad_output.shape)
grad_input = grad_output.clone()
grad_input.masked_fill_(x, 0)
return grad_input, None, None, None, None, None, None, None, None
## LQ-net
class LqNet_wt(torch.autograd.Function):
@staticmethod
def forward(ctx, inputs, basis, codec_vector, codec_index, thrs_multiplier, training=True, half_range=False, auxil=None, adaptive='none'):
num_levels = codec_vector.shape[0]
bit = codec_vector.shape[1]
quant_group = basis.shape[1]
# calculate levels and sort
levels = torch.matmul(codec_vector, basis)
levels, sort_id = torch.sort(levels, 0, descending=False)
# calculate threshold
thrs = torch.matmul(thrs_multiplier, levels)
# calculate output y and its binary codec
origin_shape = inputs.shape
x = inputs.reshape(quant_group, -1)
# pre-processing of the inputs, according to adaptive
if adaptive == 'mean':
mean = x.mean(1, keepdim=True)
x = x - mean
if adaptive == 'var':
std = x.std(1, keepdim=True) + __EPS__
x = x / std
if adaptive == 'mean-var':
mean = x.mean(1, keepdim=True)
std = x.std(1, keepdim=True) + __EPS__
x = (x - mean) / std
y = levels[0].unsqueeze(1).expand_as(x) # output
codec = codec_index[sort_id[0]].unsqueeze(1).expand_as(x)
for i in range(num_levels - 1):
g = x > thrs[i].unsqueeze(1).expand_as(x)
y = torch.where(g, levels[i + 1].unsqueeze(1).expand_as(x), y)
codec = torch.where(g, codec_index[sort_id[i+1]].unsqueeze(1).expand_as(codec), codec)
if adaptive == 'mean':
y = y + mean
mean = None
if adaptive == 'var':
y = y * std
std = None
if adaptive == 'mean-var':
y = y * std + mean
std = None
mean = None
y = y.reshape(origin_shape)
if not training:
return y, basis
# contine to compute the gradident of basis to avoid saving buffer to backward
code = codec.new_ones(bit, x.shape[0], x.shape[1], dtype=torch.int8)
for i in range(bit):
code[i] = codec / (2**i) - codec / (2**(i+1)) * 2
if not half_range:
code[i] = code[i] * 2 - 1
codec = None
# calculate BTxX
BTxX = x.new_zeros(bit, quant_group, 1)
for i in range(bit):
BTxXi0 = code[i].float() * x
BTxXi0 = BTxXi0.sum(dim=1, keepdim=True)
BTxX[i] = BTxXi0
BTxX = BTxX.reshape(bit, quant_group)
BTxX = BTxX + (0.0001 * basis)
x = None
# BTxB
BTxB = inputs.new_zeros(bit*bit, quant_group, 1)
for i in range(bit):
for j in range(i+1):
value = (code[i] * code[j]).float().sum(dim=1, keepdim=True)
if i == j:
value = (value + 0.0001) * 1.000001
else:
BTxB[j*bit + i] = value
BTxB[i*bit + j] = value
BTxB = BTxB.reshape(bit*bit, quant_group).reshape(bit, bit, quant_group).float()
# inverse
BTxB_transpose = BTxB.transpose(0, 2).transpose(1, 2)
try:
BTxB_inv = torch.inverse(BTxB_transpose)
except RuntimeError:
logging.info("LqNet_wt matrix has not inverse %r" % BTxB_transpose)
raise RuntimeError("LqNet_wt matrix has no inverse for weight %r" % BTxB_transpose)
BTxB_inv = BTxB_inv.transpose(1, 2).transpose(0, 2)
new_basis = BTxB_inv * BTxX.expand_as(BTxB_inv)
new_basis = new_basis.sum(dim=1, keepdim=True)
new_basis = new_basis.squeeze(1)
auxil.data = new_basis
basis = 0.9 * basis + 0.1 * new_basis
return y, basis
@staticmethod
def backward(ctx, grad_output, grad_basis):
return grad_output, None, None, None, None, None, None, None, None
|
# -*- coding:utf8 -*-
import time
import Article as art
# FIXME: 很多公众号一天可以推文很多次。需要改名为PublishArticle
class PublishArticle:
# 今天的发布的类型,只统计图文消息:49
type = 49
biz = ""
nickname = "" # 怎么获取,直接外部设置吧
# 发布时间,Unix时间戳
datetime = 0
# 今天的文章,idx:article
articles = {}
standardtime = ""
def __init__(self, biz="MjM5MzI5NzQ1MA==", nickname="忘记起名字了", raw_dict=None):
# 别忘了初始化。。。。草
self.articles = {}
self.datetime = 0
self.biz = biz
self.nickname = nickname
if raw_dict:
# 发布时间,发布文章类型
self.datetime = raw_dict['comm_msg_info']['datetime']
self.standardtime = self.translate_time(self.datetime)
self.type = raw_dict['comm_msg_info']['type']
# 文章信息
if self.type == 49:
self.init_articles(raw_dict)
else:
print(self.nickname + "在" + self.translate_time(self.datetime) + "未发布图文!")
else:
print("Error in new DailyArticles because of None raw_dict...")
def init_articles(self, raw_dict):
# 头条
article_first = art.Article(raw_dict['app_msg_ext_info'], self.biz, self.nickname, self.datetime, self.standardtime)
# 如果未被删除
if article_first.del_flag != 1:
self.articles[article_first.idx] = article_first
# 分栏文章
other_dict = raw_dict['app_msg_ext_info']['multi_app_msg_item_list']
num = raw_dict['app_msg_ext_info']['multi_app_msg_item_list'].__len__()
i = 0
while i < num:
article = art.Article(other_dict[i], self.biz, self.nickname, self.datetime, self.standardtime)
if article.del_flag != 1:
self.articles[article.idx] = article
i += 1
def translate_time(self, timestamp):
time_local = time.localtime(timestamp)
time_standard = time.strftime("%Y-%m-%d %H:%M:%S", time_local)
return time_standard
def get_first_article(self):
return self.articles[1]
|
''' Override for function from built-in json module to allow formatting of JSON two container types
(arrays and objects), to be optionally specified separately by allowing `indent` to be a 2-tuple.
Copied this from v3.5.6 (Lib/json/encoder.py) but looks about the same in 3.4-3.6. Probably wont
work in >=3.7.
'''
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
ValueError=ValueError,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
str=str,
tuple=tuple,
_intstr=int.__str__,
):
_array_indent = None
if isinstance(_indent, tuple):
(_indent, _array_indent) = _indent
else:
_array_indent = _indent
if _indent is not None and not isinstance(_indent, str):
_indent = ' ' * _indent
if _array_indent is not None and not isinstance(_array_indent, str):
_array_indent = ' ' * _array_indent
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _array_indent is not None:
_current_indent_level += 1
newline_indent = '\n' + _array_indent * _current_indent_level
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, str):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, int):
# Subclasses of int/float may override __str__, but we still
# want to encode them as integers/floats in JSON. One example
# within the standard library is IntEnum.
yield buf + _intstr(value)
elif isinstance(value, float):
# see comment above for int
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
yield from chunks
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _array_indent * _current_indent_level
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + _indent * _current_indent_level
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = sorted(dct.items(), key=lambda kv: kv[0])
else:
items = dct.items()
for key, value in items:
if isinstance(key, str):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
# see comment for int/float in _make_iterencode
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, int):
# see comment for int/float in _make_iterencode
key = _intstr(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, str):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, int):
# see comment for int/float in _make_iterencode
yield _intstr(value)
elif isinstance(value, float):
# see comment for int/float in _make_iterencode
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
yield from chunks
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, str):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, int):
# see comment for int/float in _make_iterencode
yield _intstr(o)
elif isinstance(o, float):
# see comment for int/float in _make_iterencode
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
yield from _iterencode_list(o, _current_indent_level)
elif isinstance(o, dict):
yield from _iterencode_dict(o, _current_indent_level)
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
yield from _iterencode(o, _current_indent_level)
if markers is not None:
del markers[markerid]
return _iterencode
|
#!/usr/bin/python
from datetime import datetime as dt
from sys import stdout
from requests import get
from time import sleep
from dbus import SessionBus
from multiprocessing import Process, Queue
notification_time = 10# notification display time, in seconds
wait_interval = 360# time between stream checks, in seconds
user_list = [
'seamlessr',
'guude',
'coestar',
'docm77live',
'supermcgamer',
'millbee',
'anderzel',
'vintagebeef',
'mc_arkas',
'aureylian',
'pauseunpause',
'pyropuncher',
'w92baj'
]
users = {u: [0, ''] for u in user_list}
def send_notification(title, text, display_time):
knotify = SessionBus().get_object("org.kde.knotify", "/Notify")
notif = knotify.event("warning", "kde", [], title, u"%s" % text, [], [], 0, 0, dbus_interface="org.kde.KNotify")
sleep(display_time)
knotify.closeNotification(notif)
knotify = None
def check_stream(u, q):
#print('[%s] Checking %s...' % (dt.now().strftime('%I:%M:%p'), u))
user_request = get('https://api.twitch.tv/kraken/streams/%s' % u, headers={'Client-ID': 'streamchecker-linux.py'})
if not user_request.status_code == 200:
return 'Unable to get page (err: %s)' % user_request.status_code
user_info = user_request.json()
if users[u][0] == 1 and user_info['stream'] and not users[u][1] == user_info['stream']['created_at']:
q.put({u: [0, '']})
if users[u][0] == 0 and user_info['stream']:
user_display = get('http://api.twitch.tv/channels/%s' % u).json()['display_name']
send_notification('User Streaming', '%s is streaming!' % user_display, notification_time)
print('[%s] %s is streaming!' % (dt.now().strftime('%I:%M %p'), user_display))
q.put({u: [1, user_info['stream']['created_at']]})
if __name__ == '__main__':
while True:
try:
q = Queue()
procs = []
for u in user_list:
p = Process(target=check_stream, args=(u, q,))
procs.append(p)
p.start()
sleep(wait_interval)
while not q.empty():
users.update(q.get())
for p in procs:
p.terminate()
except KeyboardInterrupt:
print('Received KeyboardInterrupt...')
sleep(0.5)
break
|
import numpy as np
def spherical_to_cartesian(r, theta, phi):
x = r*np.sin(phi)*np.cos(theta)
y = r*np.sin(phi)*np.sin(theta)
z = r*np.cos(phi)
return x, y, z
def cylindrical_to_cartesian(r, theta, z):
x = r*np.cos(theta)
y = r*np.sin(theta)
z = z
return x, y, z
def spherical_unit_vec_conversion(rhat, thetahat, phihat, theta, phi):
x = np.sin(theta)*np.sin(phi)*rhat + np.cos(theta)*np.cos(phi)*thetahat - np.sin(phi)*phihat
y = np.sin(theta)*np.sin(phi)*rhat + np.cos(theta)*np.sin(phi)*thetahat + np.cos(phi)*phihat
z = np.cos(theta)*rhat - np.sin(theta)*thetahat
return x, y, z
class NFW:
# use gen(n) to generate n random.
def __init__(self, R, c):
self.R = R
self.c = c
self.norm = R**2 * (np.log(c+1) - c/(c+1))
self.p = np.vectorize(self.__p)
def __p(self, r):
# For single generations, use p for multi
if r < (self.c*self.R):
return r/(1+r/self.R)**2
else:
return 0
def __gen(self):
y = 1e100
x = 0
while y >= self.__p(x): # rejection method
x = self.c*self.R * np.random.rand()
y = (self.R/4 + 0.001) * np.random.rand()
return x
def vel(self, r):
# Can be completely virialized, doesn't matter
rngs = np.random.rand(3, len(r)) - 0.5
rngs = rngs/np.sqrt((rngs**2).sum(0))
return rngs
def gen(self, n=1):
op = np.empty(n)
for i in range(n):
op[i] = self.__gen()
return op
class GasZ:
# use gen(n) to generate n random.
def __init__(self, Z):
# Z the scale height
self.Z = Z
self.norm = 2*Z
self.p = self.__p # already works vector like
self.gen = self.__gen # already works vector like
def __p(self, z):
return 1/np.cosh(z/self.Z)**2
def __gen(self, n=1):
return self.Z*np.arctanh(2*np.random.rand(n) - 1)
class GasR:
# use gen(n) to generate n random
def __init__(self, R, max=10):
# R the scale height; max is the factor of radius to stop calcing at (e.g. c above for NFW)
self.R = R
self.max = max
self.norm = R**2
self.p = np.vectorize(self.__p)
self.vel = np.vectorize(self.__vel)
def __p(self, r):
if r < (self.R*self.max):
return r*np.exp(-r/self.R)
else:
return 0.
def __gen(self):
y = 1e100
x = 0
while y >= self.__p(x): # rejection method
x = self.max*self.R * np.random.rand()
y = (self.R + 0.001) * np.random.rand()
return x
def __vel(self, theta):
# We want all of these going in the same direction -- of theta
return spherical_unit_vec_conversion(np.zeros_like(theta), np.zeros_like(theta), np.ones_like(theta), np.zeros_like(theta), theta)
def gen(self, n=1):
op = np.empty(n)
for i in range(n):
op[i] = self.__gen()
return op
if __name__ == "__main__":
import matplotlib.pyplot as plt
mygas = GasZ(10)
r = np.arange(-100, 100, 0.05)
p = mygas.p(r)/mygas.norm
rvs = mygas.gen(100000)
plt.plot(r, p)
plt.hist(rvs, bins=50, normed=True)
plt.show()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
@version: 0.1
@author: quantpy
@file: timeout.py
@time: 2018-07-03 14:16
"""
from functools import wraps
from contextlib import contextmanager
import threading
import _thread
@contextmanager
def timeout_context(seconds, msg=''):
"""超时环境管理器"""
timer = threading.Timer(seconds, lambda: _thread.interrupt_main())
timer.start()
try:
yield
except KeyboardInterrupt:
raise TimeoutError(f'timeout for operation {msg}')
finally:
timer.cancel()
def timeout_decorator(seconds, msg=''):
"""超时装饰器"""
def decorate(func):
@wraps(func)
def decorated_func(*args, **kwargs):
try:
if seconds > 0:
with timeout_context(seconds, msg):
ret = func(*args, **kwargs)
else:
ret = func(*args, **kwargs)
except TimeoutError as e:
ret = e
return ret
return decorated_func
return decorate
class TimeoutJob(object):
def __init__(self, func, *args, **kwargs):
""""""
self.func = func
self.args = args
self.kwargs = kwargs
def get(self, timeout=0xffff):
ret = timeout_decorator(timeout)(self.func)(*self.args, **self.kwargs)
return ret
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
|
import sys
from collections import defaultdict
if __name__ == '__main__':
feats_rspecifier = sys.argv[1]
train_dir = sys.argv[2]
val_dir = sys.argv[3]
num_valid_utts = int(sys.argv[4])
feats = defaultdict(list)
with open(feats_rspecifier, 'r') as f:
for line in f:
line = line.strip()
utt, _ = line.split(None, 1)
spk = "_".join(utt.split('_')[:-1])
feats[spk].append(line)
for i, all_feats in enumerate(feats.values()):
with open('%s/feats_%.4d.scp' % (train_dir, i + 1), 'w') as f:
for line in all_feats[:-num_valid_utts]:
print >> f, line
for i, all_feats in enumerate(feats.values()):
with open('%s/feats_%.4d.scp' % (val_dir, i + 1), 'w') as f:
for line in all_feats[-num_valid_utts:]:
print >> f, line
|
from argus.backend.models import ArgusEventTypes
def event_process_posted_comment(event: dict) -> dict:
return event["message"].format(**event)
def event_process_status_changed(event: dict) -> dict:
return event["message"].format(**event)
def event_process_assignee_changed(event: dict) -> dict:
return event["message"].format(**event)
def event_process_issue_added(event: dict) -> dict:
return event["message"].format(**event)
EVENT_PROCESSORS = {
ArgusEventTypes.AssigneeChanged: event_process_assignee_changed,
ArgusEventTypes.TestRunStatusChanged: event_process_status_changed,
ArgusEventTypes.TestRunCommentPosted: event_process_posted_comment,
ArgusEventTypes.TestRunIssueAdded: event_process_issue_added,
}
|
# -*- coding: utf-8 -*-
"""Parse Archimate XML Exchange File Format into a MongoDB DB""" |
from qtpyvcp.widgets.form_widgets.main_window import VCPMainWindow
from qtpy.QtCore import Slot
from qtpy.QtWidgets import QAbstractButton
from PyQt5.QtSql import QSqlDatabase
# Setup logging
from qtpyvcp.utilities import logger
LOG = logger.getLogger('qtpyvcp.' + __name__)
import os
current_path = os.path.dirname(os.path.realpath(__file__)) + '/'
import plasma_v1.tools as tools
class MyMainWindow(VCPMainWindow):
"""Main window class for the VCP."""
def __init__(self, *args, **kwargs):
super(MyMainWindow, self).__init__(*args, **kwargs)
db = QSqlDatabase.addDatabase('QSQLITE')
db.setDatabaseName(current_path + 'plasma.db')
if db.open():
print("Connection success !")
else:
print("Connection failed !\n{}".format(db.lastError().text()))
tools.toolsSetup(self)
@Slot(QAbstractButton)
def on_mainNavBtns_buttonClicked(self, button):
self.mainStkWidget.setCurrentIndex(button.property('page'))
@Slot(QAbstractButton)
def on_sideNavBtns_buttonClicked(self, button):
self.sideStkWiget.setCurrentIndex(button.property('page'))
@Slot(QAbstractButton)
def on_droNavBtns_buttonClicked(self, button):
self.droStkWidget.setCurrentIndex(button.property('page'))
@Slot(QAbstractButton)
def on_holeNavBtns_buttonClicked(self, button):
self.holeOps1Stk.setCurrentIndex(button.property('page'))
self.holeOps2Stk.setCurrentIndex(button.property('page'))
@Slot(QAbstractButton)
def on_notifyNavBtns_buttonClicked(self, button):
self.notifyStkWidget.setCurrentIndex(button.property('page'))
def on_exitAppBtn_clicked(self):
self.app.quit()
|
#! /usr/bin/env python3
################################################################################
# File Name : l6e6.py
# Created By : Félix Chiasson (7138723)
# Creation Date : [2015-10-20 13:15]
# Last Modified : [2015-10-20 13:47]
# Description : ehhhh
################################################################################
def coder(L):
mot = ''
index = 0
while index < len(L):
try:
mot = mot + L[index+1] + L[index]
index += 2
except IndexError:
mot = mot + L[index]
return mot
return mot
bla = input("Veuillez entrer votre message: ")
print(coder(bla))
|
# tkinter is the Python standard library which serves as an interface to Tk, a simple toolkit, for building GUI applications using event-driven programming.
# https://python-textbok.readthedocs.io/en/stable/Introduction_to_GUI_Programming.html
# https://docs.python.org/3.7/library/tkinter.html
# FIRST GUI APP WITH WINDOW, LABEL AND TWO BUTTONS
from tkinter import Tk, Label, Button, LEFT, RIGHT, StringVar
class MyFirstGUI:
LABEL_TEXT = [
'This is our first GUI',
'Actually, this is our second GUI',
'We made it more interesting.....',
'....by making this label interactive',
'Go on, click it again!'
]
def __init__(self, master):
'''
Initializes the app with a root window, passed to it from the Tk class
'''
self.master = master
master.title("A simple GUI")
self.label_index = 0
self.label_text = StringVar()
self.label_text.set(self.LABEL_TEXT[self.label_index])
# The widgets below are not children of the root window above
self.label = Label(master, textvariable=self.label_text)
self.label.bind("<Button-1>", self.cycle_label_text) # Bind an event to label
self.label.pack(side=LEFT) # one of the geometry managers used to position a widget inside its parent. Grid() is recommended for complex GUIs. Place() is the last but not recommended.
# side parameter specifies the widget alignment inside the parent
self.greet_button = Button(master, text="Greet", command=self.greet)
self.greet_button.pack()
self.close_button = Button(master, text="Close", command=master.quit)
self.close_button.pack(side=RIGHT)
def greet(self):
print("Greetings!")
def cycle_label_text(self, event):
self.label_index += 1
self.label_index %= len(self.LABEL_TEXT)
self.label_text.set(self.LABEL_TEXT[self.label_index])
# Initialize the Tk class to create the root/main app window. An app contains only one root window but can have multiple sub-windows.
root = Tk()
my_gui = MyFirstGUI(root)
root.mainloop()
# Execute this code to see a window with a title, a text label and two buttons – one which prints a message in the console, and one which closes the window.
# The window should have all the normal properties of any other window you encounter in your window manager – you are probably able to drag it around by the titlebar, resize it by dragging the frame, and maximise, minimise or close it using buttons on the titlebar.
# The window manager is the part of your operating system which handles windows. All the widgets inside a window, like buttons and other controls, may look different in every GUI toolkit, but the way that the window frames and title bars look and behave is determined by your window manager and should always stay the same.
|
#!/usr/bin/env python
import sys
from pyami import mrc
from pyami import correlator
from pyami import peakfinder
from pyami import imagefun
import scipy.fftpack
import scipy.ndimage
import scipy
import numpy
def makeMapping(output_shape, center, dr, dt):
key = (output_shape, center, dr, dt)
if key in mappings:
return mappings[key]
output_rows, output_cols = numpy.indices(output_shape)
output_rs = dr * output_rows
output_ts = dt * output_cols
rows = output_rs * numpy.cos(output_ts)
cols = output_rs * numpy.sin(output_ts)
input_rows = center[0] + rows
input_cols = center[1] + cols
mappingshape = output_shape + (2,)
mapping = numpy.zeros(mappingshape)
mapping[:,:,0] = input_rows
mapping[:,:,1] = input_cols
mappings[key] = mapping
return mapping
mappings = {}
def dummy(output, mappingarray):
return tuple(mappingarray.__getitem__(output))
def polar_transform(image, output_shape, center):
n = min(image.shape)
dr = n / numpy.sqrt(2) / output_shape[0]
dt = numpy.pi / output_shape[1]
print 'AAA'
mapping = makeMapping(output_shape, center, dr, dt)
print 'BBB'
return scipy.ndimage.geometric_transform(image, dummy, output_shape=output_shape, mode='constant', cval=0, extra_arguments=(mapping,), order=1)
#return scipy.ndimage.geometric_transform(image, mapping.__getitem__, output_shape=output_shape, mode='constant', cval=0)
# correlate polar transform of fft magnitude (shift independent) to find
# best rotation, then rotate to same angle and do normal correlation
def register(image1, image2):
trim = 8
image1 = image1[trim:-trim, trim:-trim]
image2 = image2[trim:-trim, trim:-trim]
fft1 = scipy.fftpack.fft2(image1)
fft2 = scipy.fftpack.fft2(image2)
fft1 = scipy.fftpack.fftshift(fft1, axes=[0])
fft2 = scipy.fftpack.fftshift(fft2, axes=[0])
c = int(fft1.shape[0] / 2.0)
fft1 = fft1[:,:c+1]
fft2 = fft2[:,:c+1]
mag1 = numpy.abs(fft1)
mag2 = numpy.abs(fft2)
mrc.write(mag1, 'mag1.mrc')
mrc.write(mag2, 'mag2.mrc')
center = c,0
output_shape = c,c
print 'P1'
p1 = polar_transform(mag1, output_shape, center)
#scipy.misc.imsave('p1.jpg', p1)
mrc.write(p1, 'p1.mrc')
print 'P2'
p2 = polar_transform(mag2, output_shape, center)
#scipy.misc.imsave('p2.jpg', p2)
mrc.write(p2, 'p2.mrc')
pc = correlator.phase_correlate(p1, p2, zero=False)
#pc = correlator.cross_correlate(p1, p2)
mrc.write(pc, 'pc.mrc')
#return p1, p2
## just rotate image2 to different angles and correlate
def register2(image1, image2, angles):
im2filt = scipy.ndimage.spline_filter(image2)
peaks = []
for angle in angles:
image2 = scipy.ndimage.rotate(im2filt, angle, reshape=False)
#mrc.write(image2, 'rot.mrc')
pc = correlator.phase_correlate(image1, image2, zero=False)
mrc.write(pc, 'pc.mrc')
peak = peakfinder.findSubpixelPeak(pc)
result = (angle, peak['pixel peak value'], peak['subpixel peak value'], peak['snr'])
print result
peaks.append(result)
return peaks
def testMRCImages():
file1,file2 = sys.argv[1:3]
print 'reading MRCs'
image1 = mrc.read(file1)
image2 = mrc.read(file2)
image1 = imagefun.bin(image1, 4)
image2 = imagefun.bin(image2, 4)
print 'register...'
#result = register(image1, image2, range()
#result = register2(image1, image2, range(86,95))
result = register2(image1, image2, range(90,91))
#print result
def testPolarOnImage():
infilename = sys.argv[1]
outfilename = sys.argv[2]
im = scipy.misc.imread(infilename, flatten=True)
center = im.shape[0]/2.0, im.shape[1]/2.0
pol = polar_transform(im, (128,128), center)
scipy.misc.imsave(outfilename, pol)
def testRandomImages(size):
import numpy.random
image1 = numpy.random.normal(100, 10, (size,size))
image2 = numpy.random.normal(100, 10, (size,size))
result = register(image1, image2)
print result
if __name__ == '__main__':
#testRandomImages(8)
testMRCImages()
#testPolarOnImage()
|
from rest_framework import serializers
from unicef_restlib.fields import (
CommaSeparatedExportField,
DynamicChoicesField,
FunctionRelatedField,
SeparatedReadWriteField,
WriteListSerializeFriendlyRecursiveField,
)
from unicef_restlib.serializers import (
DeletableSerializerMixin,
PKSerializerMixin,
RecursiveListSerializer,
UserContextSerializerMixin,
WritableNestedChildSerializerMixin,
WritableNestedParentSerializerMixin,
WritableNestedSerializerMixin,
)
from demo.sample.fields import FileTypeModelChoiceField
from demo.sample.models import Activity, Author, Book, Category, CategoryAbstract, FileType, Image, ISBN, Review
from demo.sample.utils import author_description
class ActivitySerializer(WritableNestedChildSerializerMixin, serializers.ModelSerializer):
class Meta(WritableNestedChildSerializerMixin.Meta):
model = Activity
fields = ("id", "activity_type", "activity_count",)
class ImageFileTypeSerializer(serializers.ModelSerializer):
file_type = FileTypeModelChoiceField(
queryset=FileType.objects.filter(code='image')
)
class Meta:
model = Image
fields = ("file_type",)
class ImageFileTypeChoiceSerializer(serializers.ModelSerializer):
file_type = serializers.ChoiceField(choices=[(1, "First"), (2, "Second")])
class Meta:
model = Image
fields = ("file_type",)
class ImageSerializer(WritableNestedChildSerializerMixin, serializers.ModelSerializer):
class Meta(WritableNestedChildSerializerMixin.Meta):
model = Image
fields = ("id", "filename",)
class ReviewSerializer(WritableNestedChildSerializerMixin, serializers.ModelSerializer):
class Meta(WritableNestedChildSerializerMixin.Meta):
model = Review
fields = ("id", "user", "rating",)
class ReviewUserSerializer(UserContextSerializerMixin, serializers.ModelSerializer):
class Meta:
model = Review
fields = ("id", "user", "rating",)
class ReviewMetaSerializer(serializers.ModelSerializer):
rating = DynamicChoicesField(
choices={1: 1, 2: 2, 3: 3, 4: 4, 5: 5},
required=False,
)
class Meta:
model = Review
fields = ("id", "user", "rating", "status", "active",)
class BookSerializer(
DeletableSerializerMixin,
WritableNestedChildSerializerMixin,
serializers.ModelSerializer
):
genre = DynamicChoicesField(choices=Book.GENRE_CHOICES, required=False)
author_description = FunctionRelatedField(source='author', read_only=True, callable_function=author_description)
class Meta(DeletableSerializerMixin.Meta, WritableNestedChildSerializerMixin.Meta):
model = Book
fields = ("id", "name", "sku_number", "author", "genre", "author_description")
class AuthorSerializer(WritableNestedParentSerializerMixin, serializers.ModelSerializer):
books = BookSerializer(many=True, required=False)
activities = ActivitySerializer(many=True, required=False)
profile_images = ImageSerializer(many=True, required=False)
full_images = ImageSerializer(many=True, required=False)
reviews = ReviewSerializer(many=True, required=False)
review_ratings = CommaSeparatedExportField(
source="reviews",
required=False,
export_attr="rating"
)
class Meta:
model = Author
fields = "__all__"
class AuthorMetaSerializer(serializers.ModelSerializer):
class Meta:
model = Author
fields = "__all__"
class AuthorPKSerializer(PKSerializerMixin, serializers.ModelSerializer):
class Meta:
model = Author
fields = ("pk", "first_name", "last_name",)
class AuthorIDSerializer(PKSerializerMixin, serializers.ModelSerializer):
class Meta:
model = Author
fields = ("id", "first_name", "last_name",)
class ISBNSerializer(WritableNestedChildSerializerMixin, serializers.ModelSerializer):
class Meta(WritableNestedChildSerializerMixin.Meta):
model = ISBN
fields = ("code",)
class BookISBNSerializer(WritableNestedParentSerializerMixin, serializers.ModelSerializer):
isbn = ISBNSerializer(required=False, allow_null=True)
class Meta:
model = Book
fields = ("name", "isbn", "author",)
class BookForwardSerializer(WritableNestedChildSerializerMixin, serializers.ModelSerializer):
class Meta(WritableNestedChildSerializerMixin.Meta):
model = Book
fields = ("id", "name",)
class ISBNForwardSerializer(WritableNestedParentSerializerMixin, serializers.ModelSerializer):
book = BookForwardSerializer()
class Meta:
model = ISBN
fields = ("id", "code", "book",)
class ReviewAuthorSerializer(WritableNestedParentSerializerMixin, serializers.ModelSerializer):
author = AuthorSerializer()
class Meta:
model = Review
fields = ("id", "rating", "author")
class AuthorReviewsSerializer(WritableNestedParentSerializerMixin, serializers.ModelSerializer):
reviews = ReviewMetaSerializer(many=True)
class Meta:
model = Author
fields = ("id", "first_name", "last_name", "reviews")
class AuthorSeparatedSerializer(serializers.ModelSerializer):
class Meta:
model = Author
fields = ("id", "first_name", "last_name", "active",)
class BookSeparatedSerializer(serializers.ModelSerializer):
author = SeparatedReadWriteField(
read_field=AuthorSeparatedSerializer(read_only=True),
label="Author",
)
class Meta:
model = Book
fields = ("id", "author", "name", "sku_number",)
class BookSeparatedWriteSerializer(serializers.ModelSerializer):
author = SeparatedReadWriteField(
read_field=AuthorSeparatedSerializer(read_only=True),
write_field=AuthorSeparatedSerializer(),
label="Author",
)
class Meta:
model = Book
fields = ("id", "author", "name", "sku_number",)
class CategorySerializer(WritableNestedSerializerMixin, serializers.ModelSerializer):
children = RecursiveListSerializer(
child=WriteListSerializeFriendlyRecursiveField(required=False),
required=False
)
class Meta(WritableNestedSerializerMixin.Meta):
model = Category
fields = ("id", "name", "parent", "children",)
class CategoryAbstractPKSerializer(PKSerializerMixin, serializers.ModelSerializer):
class Meta:
model = CategoryAbstract
class CategoryMissingPKSerializer(PKSerializerMixin, serializers.ModelSerializer):
class Meta:
model = Category
fields = ("name",)
|
"""
Mathhew Roughan et. al in 'The many facets of Internet topology and traffic':
Gravity Model for TM synthesizing:
X_ij = R_i * A_j / f_ij
R_i as the volume of incoming traffic
A_j as the outgoing traffic
The friction matrix (fij ) encodes the locality information specific to different source-destination pairs, however,
as locality is not as large a factor in Internet traffic as in the
transport of physical goods, we shall assume a lp constant for the friction factors.
Real traffic matrices may have non-constant fij , (perhaps as a db_result of different time-zones),
- interesting: traffic follows 80-20 law (20% of flows cause 80% of traffic
- interesting: gravity model on geant works but only with aggregation (1. simple gravity model with N + 7 and then aggregate 8 of these nodes)
Note: while Matthew Roughan in 'Simplifying the synthesis of Internet traffic matrices' describes that the matrix is generated by using
"""
import numpy as np
import scipy.stats as st
from traffic_provider.generic_tm_provider import GenericTM
class GravityModel(GenericTM):
def __init__(self, node_info: dict, tm_sample_nr: int, gm_scale: float, gm_type: str = "1_random_var",
gm_distribution: str = "exp", gm_loc: float = 100, gm_seed: float = 0, tm_fixed_total: float = None):
"""
creates a traffic matrix of size n using the gravity model with independent exponential or gauss distribution of ingress and egress traffic
:param node_info: node info generated by topology provider
:param tm_sample_nr: sample number of gravity model
:param gm_scale: mean for random vector generation
:param gm_type: generate matrix using 1 or 2 random vectors gm_type = ['1_random_var', '2_random_var']
:param gm_distribution: define which distribution function is used to generate random vectors; gm_distribution = ['exp', 'gauss']
:param gm_loc: loc parameter for generating random vectors
:param gm_seed: seed for generating random numbers
:param tm_fixed_total: if this is non zero the sum of all tm entries gets transformed to this value
"""
np.random.seed(gm_seed)
if not gm_distribution == "exp" and not gm_distribution == "gauss":
raise Exception("the defined distribution is not supported use [exp, gauss]")
if not gm_type == "2_random_var" and not gm_type == "1_random_var":
raise Exception("the defined gravity model type is not supported use [1_random_var, 2_random_var]")
self.seed = gm_seed
self.sample_nr = tm_sample_nr
self.type = gm_type
self.scale = gm_scale
self.n = len(node_info)
self.distribution = gm_distribution
self.loc = gm_loc
self.tm_fixed_total = tm_fixed_total
return
def __get_random_vector(self):
if self.distribution == "exp":
t = np.array([st.expon.rvs(size=self.n, scale=self.scale)])
elif self.distribution == "gauss":
t = np.array([st.norm.rvs(size=self.n, scale=self.scale, loc=self.loc)]).clip(min=0)
else:
raise Exception("Gravity Model does not support the defined distribution. choose: ['exp', 'gauss']")
return t
def __get_transformed_tm(self, tm):
total_demand, n_entries, mean = super().get_basic_stats(tm)
dm = self.tm_fixed_total / total_demand
tm.update((pair, demand * dm) for pair, demand in tm.items())
return tm
def __get_traffic(self):
t_in = self.__get_random_vector()
t_out = t_in
if self.type == "2_random_var":
t_out = self.__get_random_vector()
t = (np.sum(t_in) + np.sum(t_out)) / 2 # assumption that sum(t_in) == sum(t_out) == t
# probability matrix
p_in = t_in / np.sum(t_in)
p_out = t_out / np.sum(t_out)
p_matrix = np.matmul(p_in.T, p_out)
# traffic matrix
t_matrix = p_matrix * t
traffic_matrix = {(i, j): t_matrix[i][j] for i in range(self.n) for j in range(self.n) if
i != j and t_matrix[i][j] > 0}
return traffic_matrix
def get_traffic_matrix(self) -> dict:
""" see generic_tm_provider.py """
tm = self.__get_traffic()
if self.tm_fixed_total is None or self.tm_fixed_total <= 0:
return tm
return self.__get_transformed_tm(tm)
def get_name(self) -> str:
""" see generic_tm_provider.py """
return f"GM_scl_{self.scale:.1f}_loc_{self.loc:.1f}_t_{self.type}" \
f"_dist_{self.distribution}_seed_{self.seed}_nr_{self.sample_nr}"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.