text stringlengths 8 6.05M |
|---|
'''
Created on Dec 3, 2015
@author: Jonathan Yu
'''
import RecommenderEngine, json
import SimpleFoodReader
import BookReader
if __name__ == "__main__":
(jitems,jratings) = BookReader.getData("bookratings.txt")
#(jitems,jratings) = SimpleFoodReader.getData("foodratings_example.txt")
items = json.loads(jitems)
ratings = json.loads(jratings)
avg = RecommenderEngine.averages(items,ratings)
#just show book recommendations, not average ratings
top = ", ".join([d[0] for d in avg[:5]])
bottom = ", ".join([d[0] for d in avg[len(avg) - 5:]])
print "The top recommendations are", top
print "The bottom recommendations are", bottom
|
from . import db
class Scone(db.Model):
id = db.Column(db.Integer, primary_key=True)
place_name = db.Column(db.String(100))
place_address = db.Column(db.String)
flavour = db.Column(db.String(50))
image = db.Column(db.String, nullable=True)
rating = db.Column(db.Float)
note = db.Column(db.String, nullable=True)
cheese_on_top = db.Column(db.Boolean, nullable=True)
def __init__(self, place_name, place_address, flavour, image, rating, note, cheese_on_top):
self.place_name = place_name
self.place_address = place_address
self.flavour = flavour
self.image = image
self.rating = rating
self.note = note
self.cheese_on_top = cheese_on_top
|
a = int(input("Enter a: "))
b = int(input("Enter b: "))
oper = input("Enter operation: ")
if oper == "+":
result = a + b
print("Result is: " + str(result))
elif oper == "-":
result = a - b
print("Result is: " + str(result))
elif oper == "*":
result = a * b
print("Result is: " + str(result))
elif oper == "/":
result = a / b
print("Result is: " + str(result))
else:
print("We do not support that operation.") |
from time import time
import math
start = time()
def roof(n):
return int(math.sqrt(n / 2))
def is_prime(n):
if n < 2:
return False
if n == 2:
return True
if not n & 1:
return False
for x in range(3, int(n**0.5) + 1, 2):
if n % x == 0:
return False
return True
def check(n):
r = roof(n)
for i in xrange(1, r + 1):
if is_prime(n - 2 * (i * i)):
return True
return False
for x in xrange(33, 10000, 2):
if not is_prime(x):
if not check(x):
print x
break
print "Time: {0} secs".format(time()-start) |
class IdGenerator():
def __init__(self):
self._id = 0
def next(self):
self._id += 1
return self._id
|
#!/usr/bin/env python
"""
Small program that requests input from a user, then opens a file and replaces
particular string matches with user input. Saves the content to a new file.
"""
import re
from pathlib import Path
import pyinputplus as pyip
FILEPATH = "/home/ross/AllThingsPython/ATBS/Testing/"
FILENAME = "string_file.txt"
print('===== MAD LIB =====\n')
print('Please enter words appropriately for the following prompts:')
user_adj = pyip.inputStr('Adjective: ')
user_noun1 = pyip.inputStr('Noun: ')
user_verb = pyip.inputStr('Verb: ')
user_noun2 = pyip.inputStr('Noun: ')
# Create variable from reading lines in file
string_file = open(FILEPATH, 'r')
content = string_file.read()
# replace text with user input any time one of the following words are encountered:
# ADJECTIVE
# NOUN
# ADVERB
# VERB
adjRepl = re.compile(r'ADJECTIVE')
replaced = re.sub(adjRepl, user_adj, content)
nounRepl1 = re.compile(r'NOUN')
replaced = re.sub(nounRepl1, user_noun1, replaced, count=1)
verbRepl = re.compile(r'VERB')
replaced = re.sub(verbRepl, user_verb, replaced)
nounRepl2 = re.compile(r'NOUN')
replaced = re.sub(nounRepl2, user_noun2, replaced)
# print results to screen
print('Your new sentence is as follows:')
print(replaced)
# save to a new text file with a unique name
print('\nSaving file...\n')
newfile = open(F"{FILEPATH}newfile.txt", 'w')
newfile.write('\n' + replaced)
print('File was saved at the following location:)
print(f"{FILEPATH}+/newfile.txt')
print('Have a nice day.') |
def product(a,b):
if b==1:
return a
return product(a,b-1)+a
print(product(5,2)) # 10 #5+5
print(product(9,3)) # 27 #9+9+9
print(product(6,5)) # 30
|
""" multiple control element logic. Used for displaying multiple
bluegraph widgets in a single application.
"""
import sys
import numpy
import logging
from PySide import QtCore, QtGui
from bluegraph import views
from bluegraph import utils
from bluegraph.devices import DeviceWrappers
log = logging.getLogger(__name__)
class SensorsController(object):
def __init__(self, device_class="Simulation",
device_type="RegulatedSpectra",
device_args=None,
title=None):
if title == None:
title = device_type.upper()
self.form = views.MultiGraphLayout()
self.amps_graph = views.PixmapBackedGraph("AMPS", icon="default")
self.form.vbox.addWidget(self.amps_graph)
self.ir_temp = views.PixmapBackedGraph("IR TEMP", icon="ir_temp")
self.form.vbox.addWidget(self.ir_temp)
self.humidity = views.PixmapBackedGraph("HUMIDITY", icon="humidity")
self.form.vbox.addWidget(self.humidity)
self.sensor_list = []
self.sensor_list.append(self.amps_graph)
self.sensor_list.append(self.ir_temp)
self.sensor_list.append(self.humidity)
self.render_fps = utils.SimpleFPS()
self.data_fps = utils.SimpleFPS()
dev_wrap = DeviceWrappers.DeviceChooser()
device_class = "DeviceWrappers"
device_type = "NonBlockingInterface"
device_args = "Simulation.StripChartDevice"
self.amps_graph.device = dev_wrap.create(device_class,
device_type,
device_args)
device_class = "DeviceWrappers"
device_type = "NonBlockingInterface"
device_args = "Simulation.StripChartDevice"
self.ir_temp.device = dev_wrap.create(device_class,
device_type,
device_args)
device_class = "DeviceWrappers"
device_type = "NonBlockingInterface"
device_args = "Simulation.SimulatedSpectra"
self.humidity.device = dev_wrap.create(device_class,
device_type,
device_args)
for sensor in self.sensor_list:
sensor.device.connect()
self.setup_fps_timers()
self.connect_signals()
def connect_signals(self):
""" Hook into GUI control signals from main controller.
"""
self.form.exit_signal.exit.connect(self.close)
class ControlClose(QtCore.QObject):
exit = QtCore.Signal(str)
self.control_exit_signal = ControlClose()
def close(self, event):
""" Cleanup and exit. Don't issue qapplication quit here,
as that will terminate the qapplication during tests. Use the
qapplication control from py.test.
"""
log.debug("blue graph controller level close")
print("blue graph controller level close")
for sensor in self.sensor_list:
sensor.device.disconnect()
self.control_exit_signal.exit.emit("control exit")
def setup_fps_timers(self):
""" Update the display Frames per second at every qt event
timeout.
"""
self.data_timer = QtCore.QTimer()
self.data_timer.timeout.connect(self.update_fps)
self.data_timer.setSingleShot(True)
self.data_timer.start(0)
def update_fps(self):
""" Add tick, display the current rate.
"""
for sensor in self.sensor_list:
rnd_data = sensor.device.read()
if rnd_data is not None:
sensor.curve.setData(rnd_data)
self.data_fps.tick()
self.update_min_max(sensor, rnd_data)
self.show_fps(sensor)
self.render_fps.tick()
self.data_timer.start(0)
def show_fps(self, sensor):
""" Primitive fps calculations of data and render fps.
"""
new_fps = "D: %s\nR: %s" % (self.data_fps.rate(),
self.render_fps.rate())
sensor.graphback.view_fps.setText(new_fps)
def update_min_max(self, sensor, rnd_data):
""" Show the current min and maximum values in the interface
controls.
"""
min_conv = numpy.min(rnd_data)
sensor.graphback.minimum.setText(min_conv)
max_conv = numpy.max(rnd_data)
sensor.graphback.maximum.setText(max_conv)
|
__author__ = 'Alexey'
from graph_tools.graph_builder import build_connected_graph
from graph_tools.graph_drawer import draw_graph
from graph_tools.common_graph_utils import init_random_weights
from algorithms.algorithms import build_minimum_spanning_tree
import algorithms.disjoint_set_structure as dss
n = input("Enter vertices number: ")
p = input("Enter probability: ")
if type(n) != int or type(p) != float:
print "Your input is invalid, sorry :C Try again?"
exit(1)
vertices, adjacency = build_connected_graph(n, p)
weights = init_random_weights(adjacency)
print 'Adjacency list: ' + str(adjacency)
print 'Weights: ' + str(weights)
skeleton = build_minimum_spanning_tree(vertices, weights, dss)
print 'Minimum spanning tree: ' + str(skeleton)
draw_graph(adjacency, p, title='Connected graph', skeleton=skeleton)
|
# Dr. Chaos, el malevolo semiótico
# "Chaos es caos en inglés" te diría Dr. Chaos, charlando con una taza de té Chai en la mano. En verdad no es tán malo como su nombre lo hace aparentar... si es que tenés un buen manejo de los idiomas.
# Dr. Chaos esta armando un diccionario. Este diccionario tiene la particularidad de no tener definiciones; el diccionario de Dr. Chaos define una palabra como otra. Dr. Chaos quiere comenzar a traducir la literatura de todo el mundo usando el diccionario y ha venido a ti, el Number One programador de Python.
# Objetivo: Cambiar las palabras de una oración usando el diccionario de Dr. Chaos e imprimir la nueva oración en el lenguaje unificado.
# Ejemplo:
# diccionario = {"hola":"你好","como":"how","estás":"estáis"}
# oracion = "hola, como estás?"
# OUTPUT: "你好, how estáis?"
# Ejemplo 2:
# diccionario = {"ve":"regards","bien":"bom","se":"it"}
# oracion = "se ve bien!"
# Tips:
# El programa debería tratar los símbolos de interrogación, exclamación, los puntos y comas como whitespace, es decir, espacio en blanco.
# Suponer que las letras son todas minusculas.
diccionario = {"hola":"你好","como":"how","estás":"estáis","ve":"regards","bien":"bom","se":"it"}
oracion = "hola, como estás, hola?"
oracion2 = "se ve bien!"
def trad(texto):
for key in diccionario:
idx = texto.find(key)
if idx != -1:
texto = texto.replace(texto[idx:idx+len(key)],diccionario.get(key))
return texto
def traducir(texto):
word = ""
traduccion = ""
for char in texto:
if char.isalpha():
word += char
else:
traduccion += diccionario.get(word,word) + char
word = ""
if word:
traduccion += diccionario.get(word,word)
return traduccion
print(trad(oracion))
print(traducir(oracion2))
|
from django.contrib.auth.models import User, Group
from rest_framework import serializers
from .models import *
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields =("username","password","first_name","last_name", 'email')
def get_mybooking(self, instance):
try:
return user.objects.all()
except:
return None
class SeatsSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = seats
fields = ['slot_name', 'visiname', 'status', 'date']
def get_mybooking(self, instance):
try:
return seats.objects.filter(visiname= instance.user_name)
except:
return None
class allSeatsSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = allbookings
fields = ['slot_name', 'visiname', 'status', 'date']
def get_mybooking(self, instance):
try:
return allbookings.objects.all()
except:
return None
|
def asal_mi(x):
for i in range(x):
if i>1 and x%i!=0 and x!=2:
k=1
if x==2:
k=1
if i>1 and x%i==0 and x!=2:
k=0
return 0
return k
sayi=0
while(sayi!=-1):
a=input()
sayi=int(a)
ctr=asal_mi(sayi)
if ctr==1:
print("asal")
elif ctr==0:
print("asal değil")
|
import os
import pandas as pd
import numpy as np
from ccdc.descriptors import MolecularDescriptors
from ccdc.io import MoleculeReader, EntryReader
from tqdm import tqdm
def check_dir(d):
if not os.path.exists(d):
os.mkdir(d)
return d
def remove_dummy_atoms(mol):
rm = []
for atm in mol.heavy_atoms:
if atm.label == "****":
rm.append(atm)
mol.remove_atoms(rm)
return mol
def process_ff_label(func):
ab = func.split("_")
ff_a = ab[0]
if len(ab) == 1:
ff_b = None
else:
ff_b = ab[1]
return ff_a, ff_b
def rank_array(li):
array = np.array(li)
temp = array.argsort()
temp = temp[::-1]
ranks = np.empty_like(temp)
ranks[temp] = np.arange(len(array))
return list(ranks)
def create_dataframe(base, run_id, pdbs):
format_dic = {"asp": "ASP",
"chemscore": "Chemscore",
"goldscore": "Goldscore",
"plp": "PLP"}
data = {"pdb": [],
"runid": [],
"pose_id": [],
"pose_rank": [],
"dock_func": [],
"dock_fitness": [],
"rescore_func": [],
"rescore_fitness": [],
"gold_score": [],
"rmsd": [],
"rmsd_rank": []}
pdbs = [pdb for pdb in pdbs if os.path.isdir(os.path.join(base, pdb, run_id))]
for pdb in tqdm(pdbs):
dpath = os.path.join(base, pdb, run_id)
funcs = [d for d in os.listdir(dpath) if not os.path.isfile(os.path.join(dpath, d))]
for func in funcs:
ff_a, ff_b = process_ff_label(func)
s = [] # for the ranking
r = []
for i in range(1, 31):
pose = EntryReader(os.path.join(dpath, func, "data", f"ranked_{pdb}_ligand_m1_{i}.mol2"))[0]
attr = pose.attributes
score = float(attr["Gold.Score"].split("\n")[1][:5])
fit_score = {k.split(".")[1]: attr[k] for k in [a for a in attr.keys() if "Fitness" in a]}
rmsd = attr["Gold.Reference.RMSD"]
data["pdb"].append(pdb)
data["runid"].append(run_id)
data["pose_id"].append(i)
data["dock_func"].append(ff_a)
data["dock_fitness"].append(float(fit_score[format_dic[ff_a]]))
data["gold_score"].append(score)
r.append(float(rmsd))
if ff_b is None:
data["rescore_func"].append(ff_a)
s.append(float(fit_score[format_dic[ff_a]]))
else:
data["rescore_func"].append(ff_b)
s.append(float(fit_score[format_dic[ff_b]]))
data["rescore_fitness"].extend(s)
data["pose_rank"].extend(rank_array(s))
data["rmsd"].extend(r)
data["rmsd_rank"].extend(rank_array(r))
return pd.DataFrame(data)
if __name__ == "__main__":
base = "/local/pcurran/leads_frag"
pdbs = [p for p in os.listdir(base) if os.path.isdir(os.path.join(base, p))]
# run_ids = ["gold", "gold_a", "gold_b", "gold_c", "gold_d", "fhm1"]
# ["fhm2", "fhm2_a", "fhm2_b", "fhm2_c", "fhm2_d"]
run_ids = ["control"]
for run_id in run_ids:
df = create_dataframe(base, run_id, pdbs)
df.to_csv(f"results/{run_id}_poses.csv")
|
def isSymmetric(self, root):
"""
:type root: TreeNode
:rtype: bool
"""
def goThrough(left,right):
if not right and not left:
return True
if not left or not right:
return False
if left.val == right.val:
firstPair = goThrough(left.left, right.right)
secondPair = goThrough(left.right, right.left)
return firstPair and secondPair
else:
return False
return root == None or goThrough(root.left, root.right)
|
#Face Recognition using Facenet model and MTCNN detector
#Pre-requisite: pip install mtcnn
from PIL import Image
from numpy import asarray
from mtcnn.mtcnn import MTCNN
from os import listdir
from os.path import isdir
from matplotlib import pyplot
from numpy import load
from numpy import expand_dims
from numpy import asarray
from keras.models import load_model
import os
import numpy as np
from numpy import savez_compressed
def extract_face(filename, required_size=(160,160)):
image = Image.open(filename)
image = image.convert("RGB")
pixels = np.array(image) # PIL.Image' has no attribute 'asarray
#print("image in array", pixels)
#open detectort
detector = MTCNN()
person = detector.detect_faces(pixels)
x1,y1,width, height = person[0]['box']
x1 , y1 = abs(x1) , abs(y1)
x2= x1 + width
y2 = y1 + height
face = pixels[y1:y2,x1:x2]
image = np.asarray(face)
face_array = np.array(Image.fromarray(image.astype(np.uint8)).resize((160, 160)))
return face_array
def load_faces(directory):
faces = list()
for filename in os.listdir(directory):
path = os.path.join(directory,filename)
face = extract_face(path)
faces.append(face)
return faces
def load_dataset(directory):
X , y = list(), list()
for subdir in os.listdir(directory):
path = os.path.join(directory,subdir)
faces = load_faces(path)
labels = [subdir for _ in range(len(faces))]
X.extend(faces)
y.extend(labels)
return asarray(X), asarray(y)
def get_embedding(model,face_pixels):
#scaling pixel values
face_pixels = face_pixels.astype('float32')
#standardize pixel values
mean, std = face_pixels.mean(), face_pixels.std()
face_pixels = (face_pixels - mean) / std
samples = expand_dims(face_pixels, axis =0)
yhat = model.predict(samples)
return yhat[0]
def new_face(image):
test_face_extract = extract_face(image)
test_face_embedding = get_embedding(model,test_face_extract)
#we can try to convert it into as array
test_face_embedding = asarray(test_face_embedding)
return test_face_embedding
def main():
data_folder = load_dataset('ids')
face_array = data_folder[0]
face_name = data_folder[1].astype(object)
savez_compressed('faces-dataset.npz', face_array,face_name)
#Load dataset and save embeddings
data = load('faces-dataset.npz',allow_pickle=True)
model = load_model('facenet_mtcnn/facenet_keras.h5')
print('Loaded model')
faceArray,faceName = data['arr_0'], data['arr_1']
face_embed = list()
face_database = {}
for face_pixels in faceArray:
embedding = get_embedding(model,face_pixels)
face_embed.append(embedding)
face_embed = asarray(face_embed)
savez_compressed('faces-embed.npz',face_embed,faceName)
#get test image and its embedding
test_face_embed = new_face(input("Enter the test image: "))
#Load embedding
load_embed = load('faces-embed.npz', allow_pickle= True)
load_embed_faces = load_embed['arr_0']
load_embed_names = load_embed['arr_1']
#Normalize the embed vectors
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import Normalizer
from sklearn.svm import SVC
encoder = Normalizer(norm='l2')
load_embed_faces1 = encoder.transform(load_embed_faces)
#test_face_embed = encoder.transform(test_face_embed)
# label encode targets
name_encoder = LabelEncoder()
name_encoder.fit(load_embed_names)
load_embed_names1 = name_encoder.transform(load_embed_names)
model = SVC (kernel='linear', probability= True, C=3)
model.fit(load_embed_faces1,load_embed_names1)
#to create one more array dimension
test_sample = expand_dims(test_face_embed,axis=0)
yhat_class = model.predict(test_sample)
yhat_prob = model.predict_proba(test_sample)
#get name
class_index = yhat_class[0]
class_probability = yhat_prob[0,class_index] * 100
predict_names = name_encoder.inverse_transform(yhat_class)
print('Predicted: %s (%.3f)' % (predict_names[0], class_probability))
if __name__ == "__main__":
main()
|
#coding:utf-8
"""
LSTM demo for MNIST dataset
"""
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
import numpy as np
tf.enable_eager_execution()
#在这里做数据加载,还是使用那个MNIST的数据,以one_hot的方式加载数据,记得目录可以改成之前已经下载完成的目录
URL = "http://yann.lecun.com/exdb/mnist/"
mnist = input_data.read_data_sets("/media/trans/mnt/code_test/my_projects/mixup-master/cifar/data/raw", one_hot=True, source_url=URL)
'''
MNIST的数据是一个28*28的图像,这里RNN测试,把他看成一行行的序列(28维度(28长的sequence)*28行)
'''
# RNN学习时使用的参数
learning_rate = 0.001
training_iters = 100000
batch_size = 128
display_step = 10
# 神经网络的参数
n_input = 28 # 输入层的n
n_steps = 28 # 28长度
n_hidden = 128 # 隐含层的特征数
n_classes = 10 # 输出的数量,因为是分类问题,0~9个数字,这里一共有10个
sess = tf.InteractiveSession()
# 构建tensorflow的输入X的placeholder
x = tf.placeholder("float", [None, n_steps, n_input])
# tensorflow里的LSTM需要两倍于n_hidden的长度的状态,一个state和一个cell
# Tensorflow LSTM cell requires 2x n_hidden length (state & cell)
istate = tf.placeholder("float", [None, 2 * n_hidden])
# 输出Y
y = tf.placeholder("float", [None, n_classes])
# 随机初始化每一层的权值和偏置
weights = {
'hidden': tf.Variable(tf.random_normal([n_input, n_hidden])), # Hidden layer weights
'out': tf.Variable(tf.random_normal([n_hidden, n_classes]))
}
biases = {
'hidden': tf.Variable(tf.random_normal([n_hidden])),
'out': tf.Variable(tf.random_normal([n_classes]))
}
'''
构建RNN
'''
def RNN(_X, _istate, _weights, _biases):
# 规整输入的数据
_X = tf.transpose(_X, [1, 0, 2]) # permute n_steps and batch_size
_X = tf.reshape(_X, [-1, n_input]) # (n_steps*batch_size, n_input)
# 输入层到隐含层,第一次是直接运算
_X = tf.matmul(_X, _weights['hidden']) + _biases['hidden']
# 之后使用LSTM
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(n_hidden, forget_bias=1.0)
# 28长度的sequence,所以是需要分解位28次
_X = tf.split(axis=0, num_or_size_splits=n_steps, value=_X) # n_steps * (batch_size, n_hidden)
# 开始跑RNN那部分
# tf.nn.static_rnn && tf.nn.dynamic_rnn
outputs, states = tf.nn.static_rnn(lstm_cell, _X, initial_state=_istate)
# 输出层
return tf.matmul(outputs[-1], _weights['out']) + _biases['out']
# print(type(x))
# print(type(istate))
print()
pred = RNN(x, istate, weights, biases)
# 定义损失和优化方法,其中算是为softmax交叉熵,优化方法为Adam
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y)) # Softmax loss
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost) # Adam Optimizer
# 进行模型的评估,argmax是取出取值最大的那一个的标签作为输出
correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
# 初始化
init = tf.initialize_all_variables()
# 开始运行
# with tf.Session() as sess:
if True:
sess.run(init)
step = 1
# 持续迭代
while step * batch_size < training_iters:
# 随机抽出这一次迭代训练时用的数据
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
# 对数据进行处理,使得其符合输入
batch_xs = batch_xs.reshape((batch_size, n_steps, n_input))
# 迭代
sess.run(optimizer, feed_dict={x: batch_xs, y: batch_ys,
istate: np.zeros((batch_size, 2 * n_hidden))})
# 在特定的迭代回合进行数据的输出
if step % display_step == 0:
# Calculate batch accuracy
acc = sess.run(accuracy, feed_dict={x: batch_xs, y: batch_ys,
istate: np.zeros((batch_size, 2 * n_hidden))})
# Calculate batch loss
loss = sess.run(cost, feed_dict={x: batch_xs, y: batch_ys,
istate: np.zeros((batch_size, 2 * n_hidden))})
print "Iter " + str(step * batch_size) + ", Minibatch Loss= " + "{:.6f}".format(loss) + \
", Training Accuracy= " + "{:.5f}".format(acc)
step += 1
print "Optimization Finished!"
# 载入测试集进行测试
test_len = 256
test_data = mnist.test.images[:test_len].reshape((-1, n_steps, n_input))
test_label = mnist.test.labels[:test_len]
print "Testing Accuracy:", sess.run(accuracy, feed_dict={x: test_data, y: test_label,istate: np.zeros((test_len, 2 * n_hidden))}) |
# Generated by Django 2.2.5 on 2020-04-24 21:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('listings', '0006_auto_20200425_0230'),
]
operations = [
migrations.AddField(
model_name='mobilephone',
name='is_wkp',
field=models.BooleanField(default=False, verbose_name='Is Phone of the week?'),
),
]
|
from django.db import models
from django import forms
# New imports added for ClusterTaggableManager, TaggedItemBase, MultiFieldPanel
from modelcluster.fields import ParentalKey, ParentalManyToManyField
from modelcluster.contrib.taggit import ClusterTaggableManager
from taggit.models import TaggedItemBase
from django.utils.translation import ugettext as _
from wagtail.core.models import Page, Orderable
from wagtail.core.fields import RichTextField
from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel, FieldRowPanel
from wagtail.images.edit_handlers import ImageChooserPanel
from wagtail.search import index
from wagtailgeowidget.edit_handlers import GeoPanel
from wagtail.snippets.models import register_snippet
from wagtail.api import APIField
from wagtailautocomplete.edit_handlers import AutocompletePanel
from city.models import CityPage
from country.models import RiskCategory
class SiteIndexPage(Page):
intro = RichTextField(blank=True)
content_panels = Page.content_panels + [
FieldPanel('intro', classname="full")
]
def get_context(self, request):
# Update context to include only published posts, ordered by reverse-chron
tag = request.GET.get('tag')
siteypages = SitePage.objects.filter(tags__name=tag)
context = super().get_context(request)
context['siteypages'] = siteypages
return context
@register_snippet
class SiteCategory(models.Model):
name = models.CharField(max_length=255)
icon = models.ForeignKey(
'wagtailimages.Image', null=True, blank=True,
on_delete=models.SET_NULL, related_name='+'
)
panels = [
FieldPanel('name'),
ImageChooserPanel('icon'),
]
def __str__(self):
return self.name
class Meta:
verbose_name_plural = 'Business Unit'
api_fields = [
APIField('name'),
]
class SitePage(Page):
# models
site = models.CharField(max_length=250)
city = models.ForeignKey('city.CityPage', blank=True, null=True,on_delete=models.SET_NULL)
body_en = RichTextField(blank=True)
body_id = RichTextField(blank=True)
address = models.CharField(max_length=250, blank=True, null=True)
location = models.CharField(max_length=250, blank=True, null=True)
map_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
search_fields = Page.search_fields + [
index.SearchField('site'),
index.SearchField('city'),
index.SearchField('body_en'),
index.SearchField('body_id'),
]
content_panels = Page.content_panels + [
FieldPanel('site'),
AutocompletePanel('city'),
MultiFieldPanel([
FieldPanel('address'),
GeoPanel('location', address_field='address'),
], _('Geo details')),
ImageChooserPanel('map_image'),
FieldPanel('body_en', classname="full"),
FieldPanel('body_id', classname="full"),
]
api_fields = [
APIField('site'),
APIField('city'),
APIField('address'),
APIField('location'),
APIField('map_image'),
APIField('body_en'),
APIField('body_id'),
]
class SitePageGalleryImage(Orderable):
page = ParentalKey(SitePage, on_delete=models.CASCADE, related_name='gallery_images')
image = models.ForeignKey(
'wagtailimages.Image', on_delete=models.CASCADE, related_name='+'
)
caption = models.CharField(blank=True, max_length=250)
panels = [
ImageChooserPanel('image'),
FieldPanel('caption'),
]
|
import os, sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from flask_uploads import patch_request_class
# import atexit
# from apscheduler.schedulers.background import BackgroundScheduler
from flask_script import Manager, Server
from application import create_app
# from utilities.journal import print_date_time
app = create_app()
#file's size limit
patch_request_class(app, size=41943040)
manager = Manager(app)
# scheduler = BackgroundScheduler()
# scheduler.add_job(func=print_date_time,trigger='cron', hour='0')
# scheduler.start()
host = os.environ.get('IP', '127.0.0.1')
port = int(os.environ.get('PORT', 5000))
manager.add_command("runserver",Server(
use_debugger=True,
use_reloader=True,
host=host,
port=port
))
if __name__ == "__main__":
manager.run()
|
# Generated by Django 3.2.8 on 2021-10-27 23:55
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('activities', '0002_auto_20211027_2331'),
]
operations = [
migrations.RenameField(
model_name='submission',
old_name='activity_id',
new_name='activity',
),
migrations.RenameField(
model_name='submission',
old_name='user_id',
new_name='user',
),
]
|
from datetime import datetime
from behave import given, then
from behave.runner import Context
from pepy.domain.model import ProjectName, ProjectDownloads, Downloads, DayDownloads
from tests.tools.stub import ProjectStub
@given("the following projects exists")
def step_impl(context: Context):
projects = [ProjectStub.from_plain_data(**row.as_dict()) for row in context.table]
context.container.project_repository.save_projects(projects)
@given("the {name} project with the following downloads")
def step_impl(context: Context, name: str):
project = ProjectStub.create(ProjectName(name), Downloads(0))
for row in context.table:
date = datetime.strptime(row["date"], "%Y-%m-%d").date()
project.add_downloads(date, row["version"], DayDownloads(int(row["downloads"]), int(row["downloads"])))
context.container.project_repository.save(project)
@given("the following downloads per day exists")
def step_impl(context: Context):
downloads = []
for row in context.table:
date = datetime.strptime(row["date"], "%Y-%m-%d").date()
downloads.append(ProjectDownloads(ProjectName(row["name"]), Downloads(row["downloads"]), date))
context.container.project_repository.save_day_downloads(downloads)
@then("the following projects should exist")
def step_impl(context: Context):
for row in context.table:
project = context.container.project_repository.get(row["project"])
assert project is not None
if "total_downloads" in row:
assert project.total_downloads == row["total_downloads"]
|
from game.items.item import Log
from game.skills import SkillTypes
class TeakLog(Log):
name = 'Teak Log'
value = 97
xp = {SkillTypes.firemaking: 105, SkillTypes.fletching: 1}
skill_requirement = {SkillTypes.firemaking: 35, SkillTypes.fletching: 1} |
# map = function and sequence
# function = def haha(x,c,f,d)
# return x,c,d,f
# secuence = [2,4,5,2,7,3]
# list,set,tuple
def haha(x):
return x ** 2
b = [1, 2, 3, 4, 5]
a = list(map(haha,b))
c = list(map(lambda x:x**2,b))
print(a)
print(c)
|
import pytest
from pystachio.base import Environment
from pystachio.basic import String
from pystachio.composite import Default, Required, Struct
from pystachio.container import List
from pystachio.naming import Ref
from pystachio.parsing import MustacheParser
def ref(address):
return Ref.from_address(address)
def test_mustache_re():
assert MustacheParser.split("{{foo}}") == [ref("foo")]
assert MustacheParser.split("{{_}}") == [ref("_")]
with pytest.raises(Ref.InvalidRefError):
MustacheParser.split("{{4}}")
def chrange(a,b):
return ''.join(map(lambda ch: str(chr(ch)), range(ord(a), ord(b)+1)))
slash_w = chrange('a','z') + chrange('A','Z') + chrange('0','9') + '_'
assert MustacheParser.split("{{%s}}" % slash_w) == [ref(slash_w)]
# bracketing
assert MustacheParser.split("{{{foo}}") == ['{', ref('foo')]
assert MustacheParser.split("{{foo}}}") == [ref('foo'), '}']
assert MustacheParser.split("{{{foo}}}") == ['{', ref('foo'), '}']
assert MustacheParser.split("{{}}") == ['{{}}']
assert MustacheParser.split("{{{}}}") == ['{{{}}}']
assert MustacheParser.split("{{{{foo}}}}") == ['{{', ref("foo"), '}}']
invalid_refs = ['!@', '-', '$', ':']
for val in invalid_refs:
with pytest.raises(Ref.InvalidRefError):
MustacheParser.split("{{%s}}" % val)
def test_mustache_splitting():
assert MustacheParser.split("{{foo}}") == [ref("foo")]
assert MustacheParser.split("{{&foo}}") == ["{{foo}}"]
splits = MustacheParser.split('blech {{foo}} {{bar}} bonk {{&baz}} bling')
assert splits == ['blech ', ref("foo"), ' ', ref('bar'), ' bonk ', '{{baz}}', ' bling']
def test_mustache_joining():
oe = Environment(foo = "foo herp",
bar = "bar derp",
baz = "baz blerp")
joined, unbound = MustacheParser.join(MustacheParser.split("{{foo}}"), oe)
assert joined == "foo herp"
assert unbound == []
splits = MustacheParser.split('blech {{foo}} {{bar}} bonk {{&baz}} bling')
joined, unbound = MustacheParser.join(splits, oe)
assert joined == 'blech foo herp bar derp bonk {{baz}} bling'
assert unbound == []
splits = MustacheParser.split('{{foo}} {{bar}} {{unbound}}')
joined, unbound = MustacheParser.join(splits, oe)
assert joined == 'foo herp bar derp {{unbound}}'
assert unbound == [Ref.from_address('unbound')]
def test_nested_mustache_resolution():
# straight
oe = Environment(foo = '{{bar}}', bar = '{{baz}}', baz = 'hello')
for pattern in ('{{foo}}', '{{bar}}', '{{baz}}', 'hello'):
resolved, unbound = MustacheParser.resolve('%s world' % pattern, oe)
assert resolved == 'hello world'
assert unbound == []
# in structs
class Process(Struct):
name = Required(String)
cmdline = String
class Task(Struct):
name = Default(String, '{{processes[0].name}}')
processes = List(Process)
task = Task(processes = [Process(name="hello"), Process(name="world")])
assert task.name().get() == 'hello'
# iterably
resolve_string = '{{foo[{{bar}}]}}'
resolve_list = List(String)(["hello", "world"])
resolved, unbound = MustacheParser.resolve(resolve_string, Environment(foo=resolve_list, bar=0))
assert resolved == 'hello'
assert unbound == []
resolved, unbound = MustacheParser.resolve(resolve_string, Environment(foo=resolve_list, bar="0"))
assert resolved == 'hello'
assert unbound == []
resolved, _ = MustacheParser.resolve(resolve_string, Environment(foo=resolve_list, bar=1))
assert resolved == 'world'
resolved, unbound = MustacheParser.resolve(resolve_string, Environment(foo=resolve_list, bar=2))
assert resolved == '{{foo[2]}}'
assert unbound == [ref('foo[2]')]
def test_mustache_resolve_cycles():
with pytest.raises(MustacheParser.Uninterpolatable):
MustacheParser.resolve('{{foo[{{bar}}]}} {{baz}}',
Environment(foo = List(String)(["{{foo[{{bar}}]}}", "world"])), Environment(bar = 0))
|
#!/usr/bin/python
import httplib
import httplib2
from urllib2 import Request, urlopen
import urllib
import urllib2
import random
import time
import os
import unicodedata
import sys
import csv
from datetime import datetime
import fileinput
import re
import HTMLParser
from bs4 import BeautifulSoup
data=""
# print ""+data http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-beautiful-soup
unicodedata.name(u'\uFB01')
unicodedata.name(u'\u0308')
unicodedata.name(u'\u2010')
def extractSchools():
url = "scuole-specializzazione.miur.it"
url_start_job= "/public/ssm15_cerca_scuole_graduatoria.php"
conn = httplib.HTTPConnection(url)
#headers = {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:33.0) Gecko/20100101 Firefox/33.0", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"}
#headers = {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:33.0) Gecko/20100101 Firefox/33.0","Cookie": "MEMGRGSRN77M53C236J=gaR6mCquBTJZ7l74t1LSREJ12vqRK1Sv","Content-type": "application/x-www-form-urlencoded", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"}
headers = {"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", "Upgrade-Insecure-Requests": "1", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36", "Accept-Encoding": "gzip, deflate, sdch" , "Accept-Language": "it-IT,it;q=0.8,en-US;q=0.6,en;q=0.4", "Connection": "keep-alive"
}
conn.request("POST", url_start_job, "", headers)
response = conn.getresponse()
data =response.read().decode('latin1').encode("ascii", "xmlcharrefreplace")
conn.close()
#p = re.compile(r'<.*?>') ris = p.sub('', data).replace(" "," ")
#p = re.compile(r'\bhttp\S*?pdf\b')
#ris = p.findall(data)
data1 = data.replace(" ","")
return data1
def parsaScuole(data):
soup = BeautifulSoup(data)
table = soup.find_all('li')
#rows = table.find('li')
risultati={}
f=0
cols = []
for tr in table:
cols = tr.findAll('a')
f+=1
if(f>4):
res = cols.pop()
href = res.get("href")
nome = res.get_text()
risultati[nome] = href
return risultati
def contapagine(url_start_job):
url="scuole-specializzazione.miur.it"
#conn = httplib.HTTPConnection(url)
headers = {"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", "Accept-Encoding":"gzip, deflate, sdch", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36", "Accept-Encoding": "gzip, deflate, sdch" , "Accept-Language": "it-IT,it;q=0.8,en-US;q=0.6,en;q=0.4", "Connection": "keep-alive"
}
url2 = "/public/"+url_start_job
urrl = "http://"+url+url2
#httplib2.debuglevel = 1
h = httplib2.Http('.cache')
response, content = h.request(str(urrl),"POST")
#print response, content
#http://scuole-specializzazione.miur.it/public/ssm15_graduatoriaAnonima.php?master=1_7
#print response.status, response.reason
#h.close()
data1 = str(content).replace(" ","")
#print response
#print data1
cercare = "&pag="
#Posizione = data1.find("<th colspan=\"8\" style=\"color:#ffffff; font-size:18px; font-variant:small-caps;\">")
ris=(data1.count(cercare)+1)
#ris2= data1[Posizione+80:Posizione+95]
print ris
return str(ris)
def parsa(data):
soup = BeautifulSoup(data)
table = soup.find('table')
rows = table.findChildren(['th', 'tr'])
risultati=[]
f=0
cols = []
for y in range(10,len(rows)):
tr = rows[y]
cols = tr.find_all('td')
risultatiScandidato=[]
k=0
for td in cols:
testo = td.find_all(text=True)
if(len(testo[0])==1):
if (testo[0].isdigit()):
risultatiScandidato.append(testo[0].strip())
if(len(testo[0])>1):
risultatiScandidato.append(testo[0].strip())
else:
sedi = td.find_all('span')
if(len(sedi)>0):
for sed in sedi:
txt = sed.find_all(text=True)
if(len(txt)>0):
risultatiScandidato.append(txt[0].strip())
if(k==6):
risultati.append(risultatiScandidato)
risultatiScandidato=[]
k+=1
return risultati
def trasformaArrayinCSVR(matrix):
out = ""
out += "Posizione;Punti Titoli;Punti I Parte;Punti II Parte Area;Punti II Parte Scuola;Punti totale;Sedi scelte\r\n"
for righe in matrix:
i=0
for elemento in righe:
elemento = elemento.encode('utf-8')
if(i<len(righe)-1):
out+=(elemento+";")
else:
out+= elemento+"\n"
#exit(0)
i+=1
return out
#Scuole = ["Allergologia+immunologia+clinica","Anatomia+Patologica","Anestesia","Audiologia","Biochimica","Chirurgia+generale","Geriatria","Ginecologia+e+ostetricia","Igiene+e+medicina+preventiva","Medicina+interna","Radiodiagnostica"]
#idscuole = ["1","2","3","4","5","9","21","22","23","30","48"]
#pag = [2,4,41,1,1,25,15,24,15,28,42]
def generaScuole(url2,page):
url="scuole-specializzazione.miur.it"
urlj = "/public/"+url2
urrl = "http://"+url+urlj
h = httplib2.Http('.cache')
response, content = h.request(str(urrl)+"&pag="+str(page),"POST")
return str(content)
res = parsaScuole(extractSchools())
for key in res:
print key
url = res[key]
pag = contapagine(url)
result_p =[]
for x in range(1,int(pag)):
try:
out = generaScuole(url,x)
result_p += parsa(out)
except IndexError:
print "Oops! Non era un numero valido. Ritenta..."
result = trasformaArrayinCSVR(result_p)
out_file = open(key+".csv","w")
out_file.write(result)
out_file.close()
exit(0)
for i in range(1,61):
result_p = []
res = contapagine(i)
pag=res[0]
nomescuola=res[1]
print "ok"+nomescuola
if nomescuola.count("\r")>=1:
nomescuola= nomescuola.replace("\r","")
if nomescuola.count("\n")>=1:
nomescuola= nomescuola.replace("\n","")
if nomescuola.count("/")>=1:
omescuola= nomescuola.replace("/","")
if nomescuola.count(">")>=1:
omescuola=nomescuola.replace(">","")
if nomescuola.count("<")>=1:
omescuola=nomescuola.replace("<","")
out_file = open(nomescuola+".csv","w")
out_file.write(result)
out_file.close() |
#=========================================================================
# Modular Python Build System __init__ file
#=========================================================================
# List of collection modules
import elf
import pisa_inst_test_utils
# List of single-class modules
from IsaImpl import IsaImpl
from PisaInst import PisaInst
from PisaSemantics import PisaSemantics
from PisaSim import PisaSim
from SparseMemoryImage import SparseMemoryImage
# Test Cases: Basic instructions
# import pisa_inst_mfc0_test
# import pisa_inst_mtc0_test
# import pisa_inst_nop_test
# Test Cases: Reg-reg arithmetic, logical, and comparison instructions
# import pisa_inst_addu_test
# import pisa_inst_subu_test
# import pisa_inst_and_test
# import pisa_inst_or_test
# import pisa_inst_xor_test
# import pisa_inst_nor_test
# import pisa_inst_slt_test
# import pisa_inst_sltu_test
# Test Cases: Reg-imm arithmetic, logical, and comparison instructions
# import pisa_inst_addiu_test
# import pisa_inst_andi_test
# import pisa_inst_ori_test
# import pisa_inst_xori_test
# import pisa_inst_slti_test
# import pisa_inst_sltiu_test
# Test Cases: Shift instructions
# import pisa_inst_sll_test
# import pisa_inst_srl_test
# import pisa_inst_sra_test
# import pisa_inst_sllv_test
# import pisa_inst_srlv_test
# import pisa_inst_srav_test
# Test Cases: Other instructions
# import pisa_inst_lui_test
# Test Cases: Multiply/divide instructions
# import pisa_inst_mul_test
# import pisa_inst_div_test
# import pisa_inst_divu_test
# import pisa_inst_rem_test
# import pisa_inst_remu_test
# Test Cases: Load instructions
# import pisa_inst_lw_test
# import pisa_inst_lh_test
# import pisa_inst_lhu_test
# import pisa_inst_lb_test
# import pisa_inst_lbu_test
# Test Cases: Store instructions
# import pisa_inst_sw_test
# import pisa_inst_sh_test
# import pisa_inst_sb_test
# Test Cases: Unconditional jump instructions
# import pisa_inst_j_test
# import pisa_inst_jal_test
# import pisa_inst_jr_test
# import pisa_inst_jalr_test
# Test Cases: Conditional branch instructions
# import pisa_inst_beq_test
# import pisa_inst_bne_test
# import pisa_inst_blez_test
# import pisa_inst_bgtz_test
# import pisa_inst_bltz_test
# import pisa_inst_bgez_test
|
from app import app, db
from app.models import User, Autobase
@app.shell_context_processor
def make_shell_context():
return {'db': db, 'User': User, 'Autobase': Autobase}
|
import random
x=random.randrange(1,100);
print(x)
for i in range (100):
x=random.randint(1,1000);
print(x)
# if ur using the random module and then never name ur file random.py
i=1
while i<=20:
if(i%2==0):
i+=1
continue# so wat constinue does is if statement is true it will leave rest of the while body and again start from starting
if(i==15):
break # break is used to come out the loop |
import subprocess as sp
import json
import sys
import ipaddress
from shutil import which
# Global error codes
CONFIG_ERROR = 20
BIN_ERROR = 21
# Dig error codes
SUCCESS = 0
USAGE_ERROR = 1
BATCH_FILE = 8
NO_REPLY = 9
INTERNAL_ERROR = 10
# Scamper error codes
SCAMPER_CONFIG_ERROR = 255
# Default input parameters
PARAM_DEFAULTS = {"targets": ["1.1.1.1"],
"attempts": 3,
"timeout": 5,
"verbose": False}
SCAMPER_BIN = "scamper"
def is_executable(name):
"""
Checks whether `name` is on PATH and marked as executable
"""
if which(name) is None:
return BIN_ERROR
return SUCCESS
def stdin_parser():
"""
Verifies the type of the input parameters
Return:
params: A dict containing input parameters.
exit_code: Exit code, 20 if unexpected type
"""
# Read config from stdin and fill in omitted params with default
params = dict(PARAM_DEFAULTS, **json.load(sys.stdin))
exit_code = SUCCESS
# Check type of paramters
try:
params['interval'] = str(int(params['attempts']))
params['timeout'] = str(int(params['timeout']))
except ValueError:
exit_code = CONFIG_ERROR
return params, exit_code
def parse_trace_stdout(out):
"""
Parses scamper output and returns minimal results
"""
res = {}
for dst in out:
try:
dst_res = json.loads(dst)
if dst_res['type'] != "trace":
continue
except json.decoder.JSONDecodeError:
continue
trace_res = {}
trace_res['src'] = dst_res['src']
trace_res['dst'] = dst_res['dst']
trace_res['hop_count'] = dst_res['hop_count']
trace_res['probe_count'] = len(dst_res['hops'])
trace_res['attempts'] = dst_res['attempts']
trace_res['hops'] = []
for i in range(trace_res['probe_count']):
hop = dst_res['hops'][i]
resp = {'addr': hop['addr'], 'probe_id': hop['probe_id'],
'rtt': hop['rtt'], 'ttl_id': hop['probe_ttl']}
trace_res['hops'].append(resp)
res[trace_res['dst']] = trace_res
return res
def parse_dig_stderr(exit_code, verbose, stderr):
"""
Parse dig exit code and return interpretable error. Error
messages based on Dig man page.
Attributes:
exit_code: The return code from the dig command.
verbose: Module parameter to indicate verbose output.
stderr: Stderr returned by dig.
"""
if exit_code == SUCCESS:
return {'retcode': exit_code,
'message': 'Success'} if verbose else None
elif exit_code == USAGE_ERROR:
return {'retcode': exit_code, 'message': 'Usage Error'}
elif exit_code == BATCH_FILE:
return {'retcode': exit_code, 'message': "Couldn't open batch file"}
elif exit_code == NO_REPLY:
return {'retcode': exit_code, 'message': "No reply from server"}
elif exit_code == INTERNAL_ERROR:
return {'retcode': exit_code, 'message': "Internal error"}
elif exit_code > 0:
return {'retcode': exit_code, 'message': stderr}
else:
return None
def get_ip(hostname):
"""
Perform DNS query on hostname, return first IP
"""
cmd = ['dig', '+short', hostname]
try:
res = sp.run(cmd, capture_output=True, check=True)
except sp.CalledProcessError as err:
return err.returncode, err.stderr
ipaddr = res.stdout.decode('utf-8').split('\n')[0]
return res.returncode, ipaddr
def main():
# Initialize stored structs
stdout_res = {}
stderr_res = {}
exit_code = SUCCESS
# Check that scamper is available
exit_code = is_executable(SCAMPER_BIN)
if exit_code != SUCCESS:
stderr_res['bin'] = {'retcode': exit_code,
'message': "Scamper either not on PATH or not executable"}
json.dump(stderr_res, sys.stderr)
sys.exit(exit_code)
# Parse stdin
params, exit_code = stdin_parser()
if exit_code != SUCCESS:
stderr_res['stdin'] = {'retcode': exit_code,
"message": "Config param type error"}
json.dump(stderr_res, sys.stderr)
sys.exit(exit_code)
# Execute traceroutes
ips = []
for dst in params['targets']:
# Picks first IP addr returned by DNS lookup
try:
_ = ipaddress.ip_address(dst)
except ValueError:
recode, dst = get_ip(dst)
if stderr_dst := parse_dig_stderr(recode, params['verbose'], dst):
if "dig" not in stderr_res:
stderr_res['dig'] = {}
stderr_res['dig'][dst] = stderr_dst
if recode > SUCCESS:
continue
ips.append(dst)
ip_list = " ".join(str(x) for x in ips)
trace_cmd = f'{SCAMPER_BIN} -O json -i {ip_list} -c "trace -P icmp-paris -q {params["attempts"]} -w {params["timeout"]} -Q"'
try:
res = sp.run(trace_cmd, capture_output=True, check=True, shell=True)
except sp.CalledProcessError as err:
stderr_res['trace']['error'] = err.stderr
exit_code = err.returncode
if err.returncode == SCAMPER_CONFIG_ERROR:
exit_code = CONFIG_ERROR
stderr_res['trace']['retcode'] = exit_code
json.dump(stderr_res, sys.stderr)
sys.exit(exit_code)
# Parse scamper output
output = res.stdout.decode('utf-8').split('\n')
stdout_res = parse_trace_stdout(output)
if not stdout_res:
print("error decoding")
stderr_res['trace'] = {"exit_code": res.returncode,
"msg": res.stderr.decode('utf-8')}
# Communicate results and errors
if stdout_res:
json.dump(stdout_res, sys.stdout)
if stderr_res:
json.dump(stderr_res, sys.stderr)
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
# Generated by Django 3.0.2 on 2020-04-04 16:35
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('obsapp', '0022_auto_20200402_1444'),
]
operations = [
migrations.AddField(
model_name='product',
name='is_editable',
field=models.BooleanField(default=True),
),
migrations.AlterField(
model_name='notificationz',
name='notificationtime',
field=models.DateTimeField(default=datetime.datetime(2020, 4, 4, 22, 5, 16, 872785)),
),
migrations.AlterField(
model_name='requestexchange',
name='datetimeofrequest',
field=models.DateTimeField(default=datetime.datetime(2020, 4, 4, 22, 5, 16, 871785)),
),
]
|
def encode(s):
r = ''
f = 1
for i, c in enumerate(s):
cnext = s[i + 1] if i < len(s) - 1 else ''
if c == cnext:
f += 1
else:
if f == 1:
r += c
else:
r += str(f) + c
f = 1
return r
def decode(s):
r = ''
f = 0
for c in s:
if c.isdigit():
f = 10 * f + int(c)
else:
if f == 0:
r += c
else:
r += c * f
f = 0
return r
if __name__ == '__main__':
print(encode('AABBBCCCCD'))
print(decode('2A3B4CD12X'))
|
from flask import render_template
from . import web
@web.route('/', methods=['GET'])
def root():
return web.send_static_file('html/setadvertise.html')
#return render_template('setadvertise.html')
|
from enum import Enum, auto
from BoschShcPy.base import Base
from BoschShcPy.error import ErrorException
class state(Enum):
NO_UPDATE_AVAILABLE = auto()
DOWNLOADING = auto()
UPDATE_IN_PROGRESS = auto()
UPDATE_AVAILABLE = auto()
NOT_INITIALIZED = auto()
state_rx = {'NO_UPDATE_AVAILABLE': state.NO_UPDATE_AVAILABLE,
'DOWNLOADING': state.DOWNLOADING,
'UPDATE_IN_PROGRESS': state.UPDATE_IN_PROGRESS,
'UPDATE_AVAILABLE': state.UPDATE_AVAILABLE,
'NOT_INITIALIZED': state.NOT_INITIALIZED,
}
state_tx = {state.NO_UPDATE_AVAILABLE: 'NO_UPDATE_AVAILABLE',
state.DOWNLOADING: 'DOWNLOADING',
state.UPDATE_IN_PROGRESS: 'UPDATE_IN_PROGRESS',
state.UPDATE_AVAILABLE: 'UPDATE_AVAILABLE',
}
class ShcInformation(Base):
def __init__(self, client):
self.version = None
self.updateState = 'NOT_INITIALIZED'
self.client = client
def get_state(self):
return state_rx[self.updateState]
def update_from_query(self, query_result):
"""Currently, query is unknown to the author, so this won't work properly"""
if query_result['@type'] == "shc_device":
self.load(query_result)
def update(self):
try:
self.load( self.client.request("smarthome/information") )
return True
except ErrorException:
return False
|
import numpy as np
from function import *
from utils import *
A = -4
B = 4
EXACT_VALUE = 4.967532679086564
FIVE_POINTS = [7, 32, 12, 32, 7]
FIVE_POINTS_COEF = 2 / 45
SEVEN_POINTS = [41, 216, 27, 272, 27, 216, 41]
SEVEN_POINTS_COEF = 1 / 140
NINE_POINTS = [989, 5888, -928, 10496, -4540, 10496, -928, 5888, 989]
NINE_POINTS_COEF = 4 / 14175
ELEVEN_POINTS = [16067, 106300, -48525, 272400, -260550, 427368, -260550, 272400, -48525, 106300, 16067]
ELEVEN_POINTS_COEF = 5 / 299376
THIRTEEN_POINTS = [1364651, 9903168, -7587864, 35725120, -51491295, 87516288, -87797136, 87516288, \
-51491295, 35725120, -7587864, 9903168, 1364651]
THIRTEEN_POINTS_COEF = 1 / 5255250
FIFTEEN_POINTS = [90241897, 710986864, -770720657, 3501442784, -6625093363, 12630121616, -16802270373, \
19534438464, -16802270373, 12630121616, -6625093363, 3501442784, -770720657, 710986864, 90241897]
FIFTEEN_POINTS_COEF = 7 / 2501928000
def build_sequence(a, h, n):
result = a
for i in range(int(n)):
yield result
result += h
def left_rectangles(f, a, b, h):
return h * sum([ f(x) for x in build_sequence(a, h, (b - a) / h) ])
def right_rectangles(f, a, b, h):
return h * sum([ f(x) for x in build_sequence(a + h, h, (b - a) / h) ])
def middle_rectangles(f, a, b, h):
return h * sum([ f(a + (k - 0.5) * h) for k in range(1, int((b - a) / h) + 1) ])
def trapezoid(f, a, b, h):
return (h / 2) * (f(a) + 2 * sum([ f(a + k * h) for k in range(1, int((b - a) / h)) ]) + f(b))
def simpson(f, a, b):
return ((b - a) / 6) * (f(a) + 4 * f((a + b) / 2) + f(b))
def composite_simpson(f, a, b, h):
n = int((b - a) / h)
return sum(simpson(f, a + (i - 1) * h, a + i * h) for i in range(1, n + 1))
def newton_cotes_basic(f, a, b, nodes_coefs, coef):
h = (b - a) / (len(nodes_coefs) - 1)
return (b - a) * (coef / (len(nodes_coefs) - 1)) * sum(nodes_coefs[i] * f(a + i * h) for i in range(0, len(nodes_coefs)))
def newton_cotes(f, a, b, h, nodes_coefs, coef):
n = int((b - a) / h)
return sum(newton_cotes_basic(f, a + (i - 1) * h, a + i * h, nodes_coefs, coef) for i in range(1, n + 1))
if __name__ == "__main__":
i = 10
print("Left retangles: " + str(abs(left_rectangles(f, A, B, count_step(i)) - EXACT_VALUE)))
print("Right retangles: " + str(abs(right_rectangles(f, A, B, count_step(i)) - EXACT_VALUE)))
print("Middle retangles: " + str(abs(middle_rectangles(f, A, B, count_step(i)) - EXACT_VALUE)))
print("Trapezoid: " + str(abs(trapezoid(f, A, B, count_step(i)) - EXACT_VALUE)))
print("Simpson: " + str(abs(composite_simpson(f, A, B, count_step(i)) - EXACT_VALUE)))
print("Newton-Cotes3: " + str(abs(newton_cotes(f, A, B, count_step(i), [1, 4, 1], 1 / 3) - EXACT_VALUE)))
print("Newton-Cotes5: " + str(abs(newton_cotes(f, A, B, count_step(i), FIVE_POINTS, FIVE_POINTS_COEF) - EXACT_VALUE)))
print("Newton-Cotes7: " + str(abs(newton_cotes(f, A, B, count_step(i), SEVEN_POINTS, SEVEN_POINTS_COEF) - EXACT_VALUE)))
print("Newton-Cotes9: " + str(abs(newton_cotes(f, A, B, count_step(i), NINE_POINTS, NINE_POINTS_COEF) - EXACT_VALUE)))
print("Newton-Cotes11: " + str(abs(newton_cotes(f, A, B, count_step(i), ELEVEN_POINTS, ELEVEN_POINTS_COEF) - EXACT_VALUE)))
print("Newton-Cotes13: " + str(abs(newton_cotes(f, A, B, count_step(i), THIRTEEN_POINTS, THIRTEEN_POINTS_COEF) - EXACT_VALUE)))
print("Newton-Cotes15: " + str(abs(newton_cotes(f, A, B, count_step(i), FIFTEEN_POINTS, FIFTEEN_POINTS_COEF) - EXACT_VALUE)))
|
# encoding: utf-8
from .templates.lastplayed import lastplayedtemplate
class LastPlayed:
__dispatch__ = 'resource'
__resource__ = 'lastplayed'
def __init__(self, context, *arg, **args):
self._ctx = context
self.queries = self._ctx.queries
def get(self, *arg, **args):
lplist = self.queries.get_last_played(count=self._ctx.lastplay_count)
return lastplayedtemplate("{} Last Played".format(self._ctx.lastplay_count), self._ctx, lplist)
|
# Generated by Django 2.1.4 on 2019-09-03 17:24
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('tracks', '0012_auto_20190903_0920'),
]
operations = [
migrations.AddField(
model_name='exam',
name='answer1',
field=models.CharField(default=django.utils.timezone.now, max_length=100),
preserve_default=False,
),
migrations.AlterField(
model_name='exam',
name='right_answer',
field=models.CharField(blank=True, choices=[('answer1', 'answer1'), ('answer2', 'answer2'), ('answer3', 'answer3'), ('answer4', 'answer4')], max_length=20, null=True),
),
]
|
import csv
import os
from html_creator import *
from enum import Enum
# defining the dictionary which will indiate index of the attributes in an specific csv file
Attribute_Index = {}
# defining the default folder to read metrics from it
Metrics_Folder = "metrics"
# a list to store metric file addresses
Metric_Files = []
# defining the keys which should be used in the metric.csv file
class MetricKeys:
File_Name = "File Name"
File_ID = "File ID"
Structure = "Structure"
Record_Size = "Record Size"
Record_Size_Co = "Record Size Co"
Record_Num = "Record Num"
Transparent_File_Size = "Transparent File Size"
Transparent_File_Size_Co = "Transparent File Size Co"
Read = "Read"
Update = "Update"
Deactivate = "Deactivate"
Activate = "Activate"
Increase = "Increase"
SFI = "SFI"
class EFStructure(Enum):
Transparent = 1
LinearFixed = 2
Cyclic = 3
def set_the_map_indexes(header_list):
keys_name = dir(MetricKeys)
for key in keys_name:
if not key.startswith('_'):
temp = getattr(MetricKeys, key)
index = -1
for i in range(0, len(header_list)):
if header_list[i] == temp:
index = i
break
Attribute_Index[temp] = index
def get_metric_files():
files = os.listdir(Metrics_Folder)
for file in files:
if file.endswith(".csv"):
Metric_Files.append(Metrics_Folder + "\\" + file)
def main():
get_metric_files()
# listing the metric files
print("metric files are as below")
print("----------------------------------------")
for i in Metric_Files:
print(i)
print("----------------------------------------")
for Metric in Metric_Files:
# getting header of the csv file
csv_file = open(Metric)
csv_reader = csv.reader(csv_file, delimiter=",")
set_the_map_indexes(csv_reader.__next__())
print(Attribute_Index)
html = HtmlCreator("IRMCI")
html.terminate(TestResult.failed, "all passed", "D:\\Alireza\\My documents\\ts_131_102_conformance\\scripts\\sample\\res.html")
if __name__ == "__main__":
main()
|
from django.shortcuts import render
# Create your views here.
def helloDjango(request):
return render(request, 'hello.html')
def helloDjango2(request):
return render(request, 'helllo2.html') |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 21 16:33:10 2019
@author: ben
"""
import gym
import pyworld.common as pyw
from pyworld.common import Info
from Policy_Gradient import PolicyGradientAgentImproved, PolicyGradientNetwork
from tensorboardX import SummaryWriter
class PolicyGradientAgent(pwag.LearningAgent):
'''
Implementaion of a PolicyGradientAgent that uses entropy regularisation and a baseline.
'''
def __init__(self, net, gamma = 0.99, learning_rate = 0.01, batch_size = 16, entropy_beta = 0.01, debug = None):
sensors = [pwag.UnrollSensor(self.sense, gamma, 6)]
actuators = [pwag.ProbabilisticActuator()]
super(PolicyGradientAgent,self).__init__(model=net,
optimizer=optim.Adam(net.parameters(), lr=learning_rate),
batch_labels=['state','action','g'],
batch_size=batch_size,
sensors = sensors,
actuators = actuators)
self.entropy_beta = entropy_beta
self.baseline = 0
def sense(self, obs):
'''
Unrolled sense method that uses an unrolled part of the trajectory
args: obs = (pstate, nstate, action, unrolled_reward, time), where time = (epsiode, step, gobal_step end).
'''
(pstate, action, unrolled_reward, nstate, time) = obs
#moving average of baseline (mean unrolled reward)
self.baseline = self.baseline + ((unrolled_reward - self.baseline) / time.global_step)
# add observation to thecurrent batch
self.batch.state.append(pstate)
self.batch.action.append(int(action))
self.batch.g.append(unrolled_reward - self.baseline)
#for debug info
self.summary_info['baseline'] = self.baseline
self.summary_info['targets'] = unrolled_reward - self.baseline
if time.end:
if time.episode % self.batch_size == 0:
self.train()
self.batch.state.clear()
self.batch.action.clear()
self.batch.g.clear()
def attempt(self, state):
out = self.model(torch.FloatTensor(state))
probs_v = nnf.softmax(out, dim=0)
self.actuators[0](probs_v) #attempt the action based on action probs
def train(self):
'''
Performs one gradient step using the current batch
'''
self.optimizer.zero_grad()
states_v = torch.FloatTensor(self.batch.state)
actions_v = torch.LongTensor(self.batch.action)
g_v = torch.FloatTensor(self.batch.g)
objective = self.loss(states_v, actions_v, g_v)
objective.backward()
#self.losses.append(objective.item())
self.optimizer.step()
#some more debug info for our summary writer
grad_max = 0.0
grad_means = 0.0
grad_count = 0
for p in self.model.parameters():
grad_means += (p.grad ** 2).mean().sqrt().item()
grad_count += 1
grad_max = max(grad_max, p.grad.abs().max().item())
self.summary_info['grads/grad_l2'] = grad_means / grad_count
self.summary_info['grads/grad_max'] = grad_max
def loss(self, states_v, actions_v, g_v):
'''
Loss function for PolicyGradient with entropy regulariser.
Recall that entropy is minimal when out policy is sure about which action to take and we dont want our agent to be to sure.
Given that we are trying to maximise q * log prob we should subtract the entropy term.
'''
logits_v = self.model(states_v)
log_prob_v = nnf.log_softmax(logits_v, dim=1)
log_prob_actions_v = g_v * log_prob_v[range(states_v.shape[0]), actions_v]
policy_loss_v = - log_prob_actions_v.mean()
prob_v = nnf.softmax(logits_v, dim=1)
entropy_v = -(prob_v * log_prob_v).sum(dim=1).mean()
entropy_loss_v = - self.entropy_beta * entropy_v
loss_v = entropy_loss_v + policy_loss_v
#for debug/summary writer
self.summary_info['loss/entropy_loss'] = entropy_loss_v.item()
self.summary_info['loss/policy_loss'] = policy_loss_v.item()
self.summary_info['loss/loss'] = loss_v.item()
self.update_summary = True
return loss_v
ENV = 'CartPole-long-v0'
gym.register(
id=ENV,
entry_point='gym.envs.classic_control:CartPoleEnv',
tags={'wrapper_config.TimeLimit.max_episode_steps': 5000},
reward_threshold=4750.0,
)
env = gym.make(ENV)
input_shape = env.observation_space.shape[0]
output_shape = env.action_space.n
print('observation dim:', (input_shape, ))
print('action dim: ', (output_shape, ))
summary_writer = SummaryWriter(comment="-cartpole-pg")
debug = Info(summary_writer)
#ag = ReinforceAgent(PolicyGradientNetwork(input_shape,output_shape), batch_size=4, summary_writer=summary_writer)
ag = PolicyGradientAgentImproved(PolicyGradientNetwork(input_shape,output_shape), batch_size=4)
sim = pyw.GymSimulator(env, ag, debug)
print('Training: ', ENV)
for (episode, step, global_step, done) in sim:
avg_reward = debug.info[debug.info_labels[0]]
if episode > 10000 or avg_reward > 2000:
break
sim.close()
########## TEST and render
print("TEST!")
#env = gym.make('CartPole-long-v0')
env = gym.wrappers.Monitor(env, './videos', force=True)
sim = pyw.GymSimulator(env, ag)
sim.render = True
for (episode, step, global_step, done) in sim:
if done or step > 2000:
break
sim.close()
|
primeiro = int(input('Digite o primeiro termo: '))
razao = int(input('Digite a razão da PA: '))
termo = primeiro
cont = 0
while cont <= 9:
print(f'{termo} > ', end=(' '))
termo = termo + razao
cont += 1
print('Fim')
|
def square_of_sum(n):
sum = ((n + 1) * n) / 2
squared = sum**2
return squared
def sum_of_squares(n):
sum = 0
for i in range(n + 1):
sum += i**2
return sum
def difference(n):
return square_of_sum(n) - sum_of_squares(n)
if __name__ == '__main__':
print(difference(10))
|
#-*-coding: utf-8 -*-#
class HousePark:
__last_name__ = "박" #프라이빗의 의미
full_name = ""
def __init__(self,name):
self.full_name = self.__last_name__ + name
def travel(self,where):
print("%s, %s 여행을 가다"%(self.full_name, where))
def love(self,other):
print("%s, %s와 사랑에 빠졌다."%(self.full_name, other.full_name))
def __add__(self, other):
print("%s, %s와 결혼했네" %(self.full_name, other.full_name))
class HouseKim(HousePark):
__last_name__= "김"
def travel(self, where, day):
print("%s, %s 여행 %d일 가다"%(self.full_name, where, day))
pey = HousePark("응용")
juliet = HouseKim("주리")
pey.travel("대구")
juliet.travel("대구", 3)
pey.love(juliet)
pey+juliet
|
def slices(string, d):
if d> len(string):
raise ValueError
elif d==0:
raise ValueError
else:
slices = []
for indx, elem in enumerate(string):
if len(string) - indx >= d:
curr_slice = []
curr_indx = indx
while len(curr_slice) < d:
curr_slice.append(int(string[curr_indx]))
curr_indx += 1
slices.append(curr_slice)
return slices |
import datetime
from typing import Iterable
from google.cloud.bigquery import Client
from google.cloud.bigquery.table import RowIterator
from pepy.domain.pypi import StatsViewer, Result, Row
class BQStatsViewer(StatsViewer):
TIMEOUT = 20 * 60 # timeout of 20 minutes
PAGE_SIZE = 5_000
def __init__(self, client: Client):
self.client = client
def get_version_downloads(self, date: datetime.date) -> Result:
QUERY = """
SELECT file.project as project, file.version as version, count(*) AS downloads, countif(details.installer.name = 'pip') as pip_downloads
FROM `bigquery-public-data.pypi.file_downloads`
WHERE timestamp >= '{}' AND timestamp < '{}'
GROUP BY file.project, file.version
ORDER BY file.project
""".format(
date.strftime("%Y-%m-%d"), date + datetime.timedelta(days=1)
)
query_job = self.client.query(QUERY, location="US")
query_job.result(self.TIMEOUT)
destination = query_job.destination
destination = self.client.get_table(destination)
rows = self.client.list_rows(destination, page_size=self.PAGE_SIZE)
return Result(rows.total_rows, self._transform_rows(rows, date))
@staticmethod
def _transform_rows(row_iterator: RowIterator, date: datetime.date) -> Iterable[Row]:
for row in row_iterator:
yield Row(row.get("project"), row.get("version"), date, row.get("downloads"), row.get("pip_downloads"))
|
import unittest
import requests
import time
from vaurienclient import Client
from vaurien.util import start_proxy, stop_proxy
from vaurien.tests.support import start_simplehttp_server
_PROXY = 'http://localhost:8000'
class TestSimpleProxy(unittest.TestCase):
def setUp(self):
self._proxy_pid = start_proxy(log_output='/dev/null',
log_level='error')
self._web = start_simplehttp_server()
time.sleep(.2)
try:
if self._web.poll():
raise ValueError("Could not start the proxy")
self.client = Client()
assert self.client.get_behavior() == 'dummy'
except Exception:
self.tearDown()
raise
def tearDown(self):
stop_proxy(self._proxy_pid)
self._web.terminate()
def test_existing_behaviors(self):
wanted = ['blackout', 'delay', 'dummy', 'error', 'hang', 'transient', 'abort']
self.assertEqual(self.client.list_behaviors(), wanted)
def test_proxy(self):
# let's do a few simple request first to make sure the proxy works
self.assertEqual(self.client.get_behavior(), 'dummy')
for i in range(10):
res = requests.get(_PROXY)
self.assertEqual(res.status_code, 200)
# now let's add a bit of havoc
with self.client.with_behavior('blackout'):
# oh look we broke it
self.assertRaises(requests.ConnectionError, requests.get, _PROXY)
self.assertEqual(self.client.get_behavior(), 'blackout')
with self.client.with_behavior('abort'):
# oh look we broke it
self.assertRaises(requests.ConnectionError, requests.get, _PROXY)
self.assertEqual(self.client.get_behavior(), 'abort')
# we should be back to normal
self.assertEqual(self.client.get_behavior(), 'dummy')
res = requests.get(_PROXY)
self.assertEqual(res.status_code, 200)
|
#!/usr/bin/env python
import hashlib
import argparse
import os
parser = argparse.ArgumentParser()
parser.add_argument("--directory", help="")
options = parser.parse_args()
SAME_SIZE = {}
SAME_HASH = {}
def hashmd5(filename):
f = open(filename)
filehash = hashlib.md5()
while True:
data = f.read(1024*1024)
if not data: break
filehash.update(data)
return filehash.hexdigest()
#find same length files
print "Finding files of same size..."
for root, dirs, files in os.walk(options.directory):
for name in files:
full_path = os.path.join(root, name)
try:
size = os.path.getsize(full_path)
except:
continue
if size==0: continue
if size not in SAME_SIZE:
SAME_SIZE[size] = []
SAME_SIZE[size].append(full_path)
#find hashes
print "Calculating hashes for those files..."
for size in SAME_SIZE:
if len(SAME_SIZE[size]) < 2: continue
for filename in SAME_SIZE[size]:
filehash = hashmd5(filename)
if filehash not in SAME_HASH:
SAME_HASH[filehash] = []
SAME_HASH[filehash].append(filename)
print "Outputing results..."
for filehash in SAME_HASH:
if len(SAME_HASH[filehash]) < 2: continue
for filename in SAME_HASH[filehash]:
print filehash,filename
print "" |
# -*- coding: utf-8 -*-
"""MRI waveform import/export files.
"""
import struct
import numpy as np
__all__ = ["signa", "ge_rf_params", "philips_rf_params", "siemens_rf"]
def siemens_rf(
pulse, rfbw, rfdurms, pulsename, minslice=0.5, maxslice=320.0, comment=None
):
"""Write a .pta text file for Siemens PulseTool.
Args:
pulse (array): complex-valued RF pulse array with maximum of 4096
points.
rfbw (float): bandwidth of RF pulse in Hz
rfdurms (float): duration of RF pulse in ms
pulsename (string): '<FamilyName>.<PulseName>', e.g. 'Sigpy.SincPulse'
minslice (float): minimum slice thickness [mm]
maxslice (float): maximum slice thickness [mm]
comment (string): a comment that can be seen in Siemens PulseTool
Note this has only been tested on MAGNETOM Verio running (VB17)
Open pulsetool from the IDEA command line. Open the extrf.dat file and add
this .pta file using the import function
Recommended to make a copy and renaming extrf.dat prior to making changes.
After saving a new pulse to <myUniqueFileName>_extrf.dat and copying it to
the scanner, you will need to re-boot the host for it to load changes.
"""
# get the number of points in RF waveform
npts = pulse.size
assert npts <= 4096, (
"RF pulse must have less than 4096 points for" " Siemens VB17"
)
if comment is None:
comment = ""
# Calculate reference gradient value.
# This is necessary for proper calculation of slice-select gradient
# amplitude using the .getGSAmplitude() method for the external RF class.
# See the IDEA documentation for more details on this.
refgrad = 1000.0 * rfbw * (rfdurms / 5.12) / (42.577e06 * (10.0 / 1000.0))
rffile = open(pulsename + ".pta", "w")
rffile.write("PULSENAME: {}\n".format(pulsename))
rffile.write("COMMENT: {}\n".format(comment))
rffile.write("REFGRAD: {:6.5f}\n".format(refgrad))
rffile.write("MINSLICE: {:6.5f}\n".format(minslice))
rffile.write("MAXSLICE: {:6.5f}\n".format(maxslice))
# the following are related to SAR calcs and will be calculated by
# PulseTool upon loading the pulse
rffile.write("AMPINT: \n")
rffile.write("POWERINT: \n")
rffile.write("ABSINT: \n\n")
# magnitude must be between 0 and 1
mxmag = np.max(np.abs(pulse))
for n in range(npts):
mag = np.abs(pulse[n]) / mxmag # magnitude at current point
mag = np.squeeze(mag)
pha = np.angle(pulse[n]) # phase at current point
pha = np.squeeze(pha)
rffile.write("{:10.9f}\t{:10.9f}\t; ({:d})\n".format(mag, pha, n))
rffile.close()
def signa(wav, filename, scale=-1):
"""Write a binary waveform in the GE format.
Args:
wav (array): waveform (gradient or RF), may be complex-valued.
filename (string): filename to write to.
scale (float): scaling factor to apply (default = waveform's max)
Adapted from John Pauly's RF Tools signa() MATLAB function
"""
wmax = int("7ffe", 16)
if not np.iscomplexobj(wav):
if scale == -1:
scale = 1 / np.max(np.abs(wav))
# scale up to fit in a short integer
wav = wav * scale * wmax
# mask off low bit, since it would be an EOS otherwise
wav = 2 * np.round(wav / 2)
fid = open(filename, "wb")
for x in np.nditer(wav):
fid.write(struct.pack(">h", int(x.item())))
fid.close()
else:
if scale == -1:
scale = 1 / np.max(
(np.max(np.abs(np.real(wav))), np.max(np.abs(np.imag(wav))))
)
# scale up to fit in a short integer
wav = wav * scale * wmax
# mask off low bit, since it would be an EOS otherwise
wav = 2 * np.round(wav / 2)
fid = open(filename + ".r", "wb")
for x in np.nditer(wav):
fid.write(struct.pack(">h", int(np.real(x))))
fid.close()
fid = open(filename + ".i", "wb")
for x in np.nditer(wav):
fid.write(struct.pack(">h", int(np.imag(x))))
fid.close()
def ge_rf_params(rf, dt=4e-6):
"""Calculate RF pulse parameters for deployment
on a GE scanner.
Args:
rf (array): RF pulse samples
dt (scalar): RF dwell time (seconds)
Adapted from Adam Kerr's rf_save() MATLAB function
"""
print("GE RF Pulse Parameters:")
n = len(rf)
rfn = rf / np.max(np.abs(rf))
abswidth = np.sum(np.abs(rfn)) / n
print("abswidth = ", abswidth)
effwidth = np.sum(np.abs(rfn) ** 2) / n
print("effwidth = ", effwidth)
print("area = ", abswidth)
pon = np.abs(rfn) > 0
temp_pw = 0
max_pw = 0
for i in range(0, len(rfn)):
if pon[i] == 0 & temp_pw > 0:
max_pw = np.max(max_pw, temp_pw)
temp_pw = 0
max_pw = max_pw / n
dty_cyc = np.sum(np.abs(rfn) > 0.2236) / n
if dty_cyc < max_pw:
dty_cyc = max_pw
print("dtycyc = ", dty_cyc)
print("maxpw = ", max_pw)
max_b1 = np.max(np.abs(rf))
print("max_b1 = ", max_b1)
int_b1_sqr = np.sum(np.abs(rf) ** 2) * dt * 1e3
print("int_b1_sqr = ", int_b1_sqr)
rms_b1 = np.sqrt(np.sum(np.abs(rf) ** 2)) / n
print("max_rms_b1 = ", rms_b1)
def philips_rf_params(rf):
"""Calculate RF pulse parameters for deployment
on a Philips scanner.
Args:
rf (array): RF pulse samples (assumed real-valued)
"""
print("Philips RF Pulse Parameters")
n = len(rf)
rfn = rf / np.max(np.abs(rf))
am_c_teff = np.sum(rfn * 32767) / (32767 * n)
print("am_c_teff = ", am_c_teff)
am_c_trms = np.sum((rfn * 32767) ** 2) / (32767**2 * n)
print("am_c_trms = ", am_c_trms)
am_c_tabs = np.sum(np.abs(rfn) * 32767) / (32767 * n)
print("am_c_tabs = ", am_c_tabs)
# assume that the isodelay point is at the peak
am_c_sym = np.argmax(np.abs(rfn)) / n
print("am_c_sym = ", am_c_sym)
|
#!/usr/bin/python
import re
import os
import sys
import uuid
import json
import urllib
import httplib2
import shutil
import subprocess
import Queue
import threading
from exception import URLNotValidException
from downloadThread import DownloadThread
sys.path.append('logger')
from logger import Logger
class VideoURL:
def __init__(self, url):
self.logger = Logger()
if not self.verifyURL(url):
raise URLNotValidException("The input URL is not a valid URL")
self.url = url
def start(self):
self.logger.info("Start process")
# Get video UUID
videoUUID = self.getVideoUUID()
# Get the video CCM
videoCCM = self.getCCMObject(videoUUID)
# Get the directory to store thumbs
directory = self.getDirectoryWithUUID(videoUUID)
# Get the exisiting thumbnail from video CCM
response = self.getExistingThumbs(directory, videoCCM)
# Extracting the thumbnail
if not response:
self.getGeneratedThumbs(directory, videoCCM)
def getExistingThumbs(self, directory, videoCCM):
self.logger.info("Download existing thumbnails")
if self.downloadThumbsFromKeyFrames(directory, videoCCM):
return True
else:
return self.downloadThumbsFromSnapshot(directory, videoCCM)
def downloadThumbsFromKeyFrames(self, directory, videoCCM):
self.logger.info("Download thumbnails from key frames")
directory = directory + "/thumbs"
videoCCMJson = json.loads(videoCCM)
if "yahoo-media:key_frames" not in videoCCMJson:
return False
else:
# Get base_url
if "base_url" not in videoCCMJson["yahoo-media:key_frames"]:
return False
else:
baseURL = videoCCMJson["yahoo-media:key_frames"]["base_url"]
# Get frame_count
if "frame_count" not in videoCCMJson["yahoo-media:key_frames"]:
return False
else:
frameCount = int(videoCCMJson["yahoo-media:key_frames"]["frame_count"])
if frameCount <= 0:
return False
return self.downloadThums(baseURL, frameCount, "%d", directory)
def downloadThumbsFromSnapshot(self, directory, videoCCM):
self.logger.info("Download thumbnails from snapshot")
directory = directory + "/thumbs"
videoCCMJson = json.loads(videoCCM)
if "yahoo-media:video_snapshots" not in videoCCMJson:
return False
else:
# Get base_url
if "url" not in videoCCMJson["yahoo-media:video_snapshots"]:
return False
else:
url = videoCCMJson["yahoo-media:video_snapshots"]["url"]
url = url.partition("?")[0]
# Get frame_count
if "count" not in videoCCMJson["yahoo-media:video_snapshots"]:
return False
else:
count = int(videoCCMJson["yahoo-media:video_snapshots"]["count"])
if count <= 0:
return False
return self.downloadThums(url, count, "xx", directory)
def downloadThums(self, url, count, replaxPattern, directory):
self.logger.info("Download " + str(count) + " thumbnails from " + url)
checkURL = url.replace(replaxPattern, "0")
resp = urllib.urlopen(checkURL)
code = resp.getcode()
if code != 200:
return False
threadNum = 10;
queueLock = threading.Lock()
workQueue = Queue.Queue(count)
threads = []
threadID = 1
for tName in range(threadNum):
thread = DownloadThread(threadID, "thread-" + str(tName), workQueue, queueLock, url, replaxPattern, directory)
thread.start()
threads.append(thread)
threadID += 1
queueLock.acquire()
for index in range(count):
workQueue.put(str(index))
queueLock.release()
while not workQueue.empty():
pass
for t in threads:
t.setExitFlag()
for t in threads:
t.join()
self.writeDoneMarker(directory)
return True
def getVideoUUID(self):
self.logger.info("Get video UUID")
alias = self.getAlias(self.url)
aliasUUID = self.uuidGen("5547b17e-f73f-4f1a-bf27-f64bff70fb28", "ymedia-alias:cavideo=" + alias)
aliasCCM = self.getCCMObject(str(aliasUUID))
aliasCCMJson = json.loads(aliasCCM)
videoUUID = aliasCCMJson["self"]["target_id"]
return videoUUID
def getGeneratedThumbs(self, directory, videoCCM):
self.logger.info("Generate thumbnails")
if not self.getGeneratedThumbsFromStreams(directory, videoCCM):
self.getGeneratedThumbsFromSourceStream(directory, videoCCM)
def getGeneratedThumbsFromStreams(self, directory, videoCCM):
self.logger.info("Generate thumbnails from the highest bitrate stream")
# Get the stream from CCM
stream = self.getStream(videoCCM)
if not stream:
return False
stream = "https://ingest.atlas.cdn.yimg.com" + stream
# Download the video
videoFileLocation = self.downloadVideo(stream, directory)
# Extract the thumbnails from the downloaded video
self.generateThumbs(videoFileLocation, directory)
return True
def getGeneratedThumbsFromSourceStream(self, directory, videoCCM):
self.logger.info("Generate thumbnails from the source stream")
# Get the source stream from CCM
sourceStream = self.getSourceStream(videoCCM)
# Download the video
videoFileLocation = self.downloadVideo(sourceStream, directory)
# Extract the thumbnails from the downloaded video
self.generateThumbs(videoFileLocation, directory)
def getSourceStream(self, videoCCM):
self.logger.info("Get source stream from video CCM")
videoCCMJson = json.loads(videoCCM)
sourceStream = videoCCMJson["yahoo-media:source_streams"]["streams"][0]["url"]
return sourceStream
def getStream(self, videoCCM):
self.logger.info("Get highest bitrate stream from video CCM")
videoCCMJson = json.loads(videoCCM)
bitrate = 0
url = ""
if "yahoo-media:streams" not in videoCCMJson:
return False
else:
if "streams" not in videoCCMJson["yahoo-media:streams"]:
return False
else:
streams = videoCCMJson["yahoo-media:streams"]["streams"]
for stream in streams:
if "format" in stream and stream["format"].lower() == "mp4" and "bitrate" in stream and int(stream["bitrate"]) > bitrate and "url" in stream:
bitrate = int(stream["bitrate"])
url = stream["url"]
return url
# Verify whether the input url is a valid url
def verifyURL(self, url):
self.logger.info("Verify input url " + url)
urlPattern = re.compile("http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+")
if urlPattern.match(url):
return True
else:
return False
# Extract the alias part from the video url
def getAlias(self, url):
self.logger.info("Get alias from " + url)
alias = url.rpartition("/")[2]
alias = alias.partition(".")[0]
return alias
def uuidGen(self, namespace, query):
self.logger.info("Generate UUID using namespace " + namespace + " query " + query)
namespaceUUID = uuid.UUID(namespace)
return uuid.uuid3(namespaceUUID, str(query))
def getCCMObject(self, uuid):
self.logger.info("Get CCM object of UUID " + uuid)
h = httplib2.Http(".cache")
resp, content = h.request("http://tools.mct.corp.yahoo.com:8080/v1/object/" + uuid + "?facets=yahoo-media:key_frames,yahoo-media:source_streams,yahoo-media:streams,yahoo-media:video_snapshots", "GET")
self.logger.info("Video CCM: " + content)
return content
def getDirectory(self):
videoUUID = self.getVideoUUID()
directory = "~/" + videoUUID.replace("-", "")
self.createDirectory(directory)
thumbDir = directory + "/thumbs"
self.createDirectory(thumbDir)
return thumbDir;
def getDirectoryWithUUID(self, videoUUID):
directory = "~/" + videoUUID.replace("-", "")
return directory;
def createDirectory(self, directory):
self.logger.info("Create directory " + directory)
if os.path.exists(os.path.expanduser(directory)):
shutil.rmtree(os.path.expanduser(directory))
os.makedirs(os.path.expanduser(directory))
def downloadVideo(self, sourceStream, directory):
self.logger.info("Download video " + sourceStream)
videoFileLocation = directory + "/video.mp4"
p = subprocess.Popen("curl -o " + videoFileLocation + " -u lotus:Su72JQMx --socks5 socks.yahoo.com:1080 \"" + sourceStream + "\"", shell = True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
p.wait()
return videoFileLocation
def generateThumbs(self, videoFileLocation, directory):
self.logger.info("Generate thumbnails from video " + videoFileLocation)
directory = directory + "/thumbs"
imagePath = directory + "/out%d.png"
p = subprocess.Popen("ffmpeg -i " + videoFileLocation + " -r 1 " + imagePath, shell = True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
p.wait()
self.writeDoneMarker(directory)
def writeDoneMarker(self, directory):
self.logger.info("Write DONE.DONE into " + directory)
directory = directory + "/DONE.DONE"
file = open(os.path.expanduser(directory),'w+')
file.close()
|
import sys
from application.ide.coderun.coderunner import *
from PyQt4.QtCore import *
from PyQt4.QtGui import *
class DockToTabWidget(QDockWidget):
"""
QDockWidget dockable in a DockingTabWidget (subclassed QTabWidget) rather than in a QMainWindow
"""
def __init__(self, title, parent=0):
QDockWidget.__init__(self, title, parent)
self._title = title
self.topLevelChanged.connect(self.dockToTab)
def dockToTab(self):
if not self.isFloating():
self.parent().addTab(self.widget(), self._title)
self.close()
del self
class DockingTabWidget(QTabWidget):
"""
QTabWidget whose tabs can be made floating in a QDockWindow.
A tab with name name can be made floatable or not with setFloatable(name) or setNotFloatable(name)
A tab with name name can be made closable or not with setClosable(name) or setNotClosable(name)
"""
def __init__(self):
super(DockingTabWidget, self).__init__()
self.setMovable(True)
self.setTabsClosable(True)
self.notFloatable = [] # name of the tabs that can't be undocked
self.tabBar().installEventFilter(self)
self.motion = 'rest'
self.tabCloseRequested.connect(self.tabClose)
def setNotFloatable(self, name):
if name not in self.notFloatable:
self.notFloatable.append(name)
def setFloatable(self, name):
if name in self.notFloatable:
self.notFloatable.remove(name)
def setNotClosable(self, name):
index = [self.tabText(i) for i in range(self.count())].index(name)
self.tabBar().setTabButton(index, 1, None)
def setClosable(self, name):
index = [self.tabText(i) for i in range(self.count())].index(name)
self.tabBar().setTabButton(index, 1, 1)
def eventFilter(self, object, event):
"""
Event filter detecting double click for undocking a tab
"""
if object == self.tabBar():
if event.type() == QEvent.MouseButtonDblClick:
pos = event.pos()
tabIndex = object.tabAt(pos)
title = self.tabText(tabIndex)
if title not in self.notFloatable:
widget = self.widget(tabIndex)
self.removeTab(tabIndex)
dockWidget = DockToTabWidget(title, parent=self)
dockWidget.setFeatures(QDockWidget.AllDockWidgetFeatures)
dockWidget.setWidget(widget)
dockWidget.setFloating(True)
dockWidget.move(self.mapToGlobal(pos))
dockWidget.show()
return True
return False
def tabClose(self, index):
self.removeTab(index)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Sistemi Corporation, copyright, all rights reserved, 2022-2023
Martin Guthrie
"""
import logging
from core.test_item import TestItem
from public.prism.api import ResultAPI
import os
from public.prism.drivers.nrfprog.NRFProg import NRFProg, DRIVER_TYPE
NRF52833DK_ASSETS_PATH = "./public/prism/scripts/example/nRF52833-DK/assets"
# file and class name must match
class prog_P00xx(TestItem):
""" Python Methods for nRF52833-DK programming
"""
def __init__(self, controller, chan, shared_state):
super().__init__(controller, chan, shared_state)
self.logger = logging.getLogger("nrfdk.{}".format(self.chan))
self._nrfjprog = None
def P0xxSETUP(self):
"""
{"id": "P0xxSETUP", "enable": true, "target": "nrf52"},
:return:
"""
ctx = self.item_start() # always first line of test
# drivers are stored in the shared_state and are retrieved as,
drivers = self.shared_state.get_drivers(self.chan, type=DRIVER_TYPE)
if len(drivers) > 1:
self.logger.error("Unexpected number of drivers: {}".format(drivers))
self.log_bullet("Unexpected number of drivers")
self.item_end(ResultAPI.RECORD_RESULT_INTERNAL_ERROR)
return
driver = drivers[0]
id = driver["obj"]["unique_id"] # save the id of the teensy4 for the record
self._nrfjprog = driver["obj"]["hwdrv"]
msg = f"nrfjprog: {id}"
self.log_bullet(msg)
self._nrfjprog.set_target(ctx.item.target)
self.log_bullet(ctx.item.target)
self.item_end() # always last line of test
def P0xxTRDN(self):
""" Teardown
- always the last test called
:return:
"""
ctx = self.item_start() # always first line of test
self.item_end() # always last line of test
def P100_Program(self):
""" Program
- the file argument assume path TEENSY4_ASSETS_PATH
{"id": "P100_Program", "enable": true, "file": "teensy4_server.ino.hex" },
"""
ctx = self.item_start() # always first line of test
file_path = os.path.join(NRF52833DK_ASSETS_PATH, ctx.item.file)
if not os.path.isfile(file_path):
self.log_bullet(f"file not found")
self.item_end(ResultAPI.RECORD_RESULT_INTERNAL_ERROR)
return
self.log_bullet(f"{ctx.item.file}")
result = self._nrfjprog.program(file_path)
rc = result.returncode
if rc:
self.log_bullet(f"program error {rc}")
self.logger.error(result.stderr)
self.item_end(ResultAPI.RECORD_RESULT_INTERNAL_ERROR)
return
self.item_end() # always last line of test
|
print('¬ведите размеры окна через пробел:')
X = list(map(int, input().split()))
print('¬ведите координаты углов рамки:')
A = list(map(int, input().split()))
for i in range (A[1] - 1):
print('.' * X[0])
print('.' * (A[0] - 1), end = '')
print('a' * (A[2] - A[0] + 1), end = '')
print('.' * (X[0] - A[2]))
for i in range (A[3] - A[1] - 1):
print('.' * (A[0] - 1), end = '')
print('a', end = '')
print('.' * (A[2] - A[0] - 1), end = '')
print('a', end = '')
print('.' * (X[0] - A[2]))
print('.' * (A[0] - 1), end = '')
print('a' * (A[2] - A[0] + 1), end = '')
print('.' * (X[0] - A[2]))
for i in range (X[1] - A[3]):
print('.' * X[0]) |
# baselineTeam.py
# ---------------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel (pabbeel@cs.berkeley.edu).
# baselineTeam.py
# ---------------
# Licensing Information: Please do not distribute or publish solutions to this
# project. You are free to use and extend these projects for educational
# purposes. The Pacman AI projects were developed at UC Berkeley, primarily by
# John DeNero (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# For more info, see http://inst.eecs.berkeley.edu/~cs188/sp09/pacman.html
from captureAgents import CaptureAgent
import distanceCalculator
import random, time, util, sys
from game import Directions
import game
from util import nearestPoint
BELIEFS = [] #a list of counters corresponding to probability distributions over each agent
INITIALIZED = False
#################
# Team creation #
#################
def createTeam(firstIndex, secondIndex, isRed,
first = 'OffensiveReflexAgent', second = 'DefensiveReflexAgent'):
"""
This function should return a list of two agents that will form the
team, initialized using firstIndex and secondIndex as their agent
index numbers. isRed is True if the red team is being created, and
will be False if the blue team is being created.
As a potentially helpful development aid, this function can take
additional string-valued keyword arguments ("first" and "second" are
such arguments in the case of this function), which will come from
the --redOpts and --blueOpts command-line arguments to capture.py.
For the nightly contest, however, your team will be created without
any extra arguments, so you should make sure that the default
behavior is what you want for the nightly contest.
"""
return [eval(first)(firstIndex), eval(second)(secondIndex)]
##########
# Agents #
##########
class ReflexCaptureAgent(CaptureAgent):
"""
A base class for reflex agents that chooses score-maximizing actions
"""
def registerInitialState(self, gameState):
self.start = gameState.getAgentPosition(self.index)
CaptureAgent.registerInitialState(self, gameState)
global INITIALIZED
if INITIALIZED == True:
return #only initialize global stuff once
for i in range(gameState.getNumAgents()):
BELIEFS.append(util.Counter())
opponents = self.getOpponents(gameState)
for opponent in opponents:
initialPosition = gameState.getInitialAgentPosition(opponent)
#print "initialPosition for agent " + str(opponent) + " is " + str(initialPosition)
BELIEFS[opponent][initialPosition] = 1 #the rest are initialized to zero automatically
INITIALIZED = True
def observe(self, gameState):
"""
updates the belief distribution over both enemies
"""
enemies = self.getOpponents(gameState)
agent_distances = gameState.getAgentDistances() #these are the observations
for enemy in enemies:
#first we will check if we have exact information
position_reading = gameState.getAgentPosition(enemy)
if position_reading != None:
#print "successful position reading"
#print "reading! : " + str(position_reading)
new_belief_distribution = util.Counter()
new_belief_distribution[position_reading] = 1.0
BELIEFS[enemy] = new_belief_distribution
continue
#we don't know where the enemy is at this point, so we use exact inference
noisy_distance = agent_distances[enemy]
#print "noisy distance: " + str(noisy_distance)
# the position of the calling agent
my_position = gameState.getAgentPosition(self.index)
# a new counter to store updated beliefs
beliefs = util.Counter()
for position in self.getLegalPositions(gameState):
distance = util.manhattanDistance(position, my_position)
beliefs[position] = BELIEFS[enemy][position] * gameState.getDistanceProb(distance, noisy_distance)
#if BELIEFS[enemy][position] >= 0.05:
#print distance
#print "beliefs[position]: " + str(beliefs[position])
beliefs.normalize()
BELIEFS[enemy] = beliefs
if BELIEFS[enemy].totalCount() == 0:
self.initializeUniformly(enemy, gameState)
def elapseTime(self, gameState):
"""
elapses time for the enemy who went previously
"""
#if I am first to play, I don't want to elapse time for the enemy who "went" before me - we're not gonna worry, we'll just elapse time anyway
enemy_we_are_updating = (self.index + 3) % 4
newBeliefs = util.Counter()
for position in self.getLegalPositions(gameState):
former_probability_of_position = BELIEFS[enemy_we_are_updating][position]
actions = self.getPossibleActions(position, gameState)
for action in actions:
successor = self.getSuccessorPosition(position, action)
newBeliefs[successor] += former_probability_of_position * (1.0 / len(actions))
BELIEFS[enemy_we_are_updating] = newBeliefs
def getLegalPositions(self, gameState):
legalPositions = []
width = gameState.data.layout.width
height = gameState.data.layout.height
for x in range(width):
for y in range(height):
if not gameState.hasWall(x,y):
legalPositions.append((x,y))
return legalPositions
def getSuccessorPosition(self, initialPosition, action):
"""
there's gotta be something that does this already
"""
if action == 'North':
return (initialPosition[0], initialPosition[1] + 1)
if action == 'South':
return (initialPosition[0], initialPosition[1] - 1)
if action == 'East':
return (initialPosition[0] + 1, initialPosition[1])
if action == 'West':
return (initialPosition[0] - 1, initialPosition[1])
if action == 'Stop':
return initialPosition
def getPossibleActions(self, position, gameState):
actions_to_return = []
x,y = position
if not gameState.hasWall(x+1, y):
actions_to_return.append('East')
if not gameState.hasWall(x-1, y):
actions_to_return.append('West')
if not gameState.hasWall(x, y+1):
actions_to_return.append('North')
if not gameState.hasWall(x, y-1):
actions_to_return.append('South')
actions_to_return.append('Stop')
return actions_to_return
def printBeliefs(self, enemy_index, gameState):
print "beliefs for enemy " + str(enemy_index) + ":"
legalPositions = self.getLegalPositions(gameState)
for position in legalPositions:
if BELIEFS[enemy_index][position] >= 0.1:
print "(" + str(position[0]) + "," + str(position[1]) + "): " + str(BELIEFS[enemy_index][position])
def getMostLikelyPosition(self, opponent):
max_prob = 0.0
most_likely_position = (0,0)
for position in BELIEFS[opponent]:
if BELIEFS[opponent][position] > max_prob:
max_prob = BELIEFS[opponent][position]
most_likely_position = position
return most_likely_position
def captureUpdate(self, gameState):
myTeam = self.getTeam(gameState)
for agentIndex in myTeam:
enemies = self.getOpponents(gameState)
for enemy in enemies:
if gameState.getAgentPosition(agentIndex) == self.getMostLikelyPosition(enemy):
#print "here"
beliefs = util.Counter()
for p in self.getLegalPositions(gameState):
beliefs[p] = 0
beliefs[gameState.getInitialAgentPosition(enemy)] = 1.0
BELIEFS[enemy] = beliefs
def initializeUniformly(self, opponent, gameState):
legalPositions = self.getLegalPositions(gameState)
probability_of_position = 1.0 / len(legalPositions)
for position in legalPositions:
BELIEFS[opponent][position] = probability_of_position
def chooseAction(self, gameState):
"""
Picks among the actions with the highest Q(s,a).
"""
#print ""
#print "entering chooseAction: currentAgent = " + str(self.index)
opponents = self.getOpponents(gameState)
#first update the distributions
self.captureUpdate(gameState)
self.elapseTime(gameState)
"""
print "about to observe"
for opponent in opponents:
self.printBeliefs(opponent, gameState)
self.observe(gameState)
for opponent in opponents:
self.printBeliefs(opponent, gameState)
"""
self.observe(gameState)
actions = gameState.getLegalActions(self.index)
# You can profile your evaluation time by uncommenting these lines
# start = time.time()
values = [self.evaluate(gameState, a) for a in actions]
# print 'eval time for agent %d: %.4f' % (self.index, time.time() - start)
maxValue = max(values)
bestActions = [a for a, v in zip(actions, values) if v == maxValue]
foodLeft = len(self.getFood(gameState).asList())
if foodLeft <= 2:
bestDist = 9999
for action in actions:
successor = self.getSuccessor(gameState, action)
pos2 = successor.getAgentPosition(self.index)
dist = self.getMazeDistance(self.start,pos2)
if dist < bestDist:
bestAction = action
bestDist = dist
return bestAction
return random.choice(bestActions)
def getSuccessor(self, gameState, action):
"""
Finds the next successor which is a grid position (location tuple).
"""
successor = gameState.generateSuccessor(self.index, action)
pos = successor.getAgentState(self.index).getPosition()
if pos != nearestPoint(pos):
# Only half a grid position was covered
return successor.generateSuccessor(self.index, action)
else:
return successor
def evaluate(self, gameState, action):
"""
Computes a linear combination of features and feature weights
"""
features = self.getFeatures(gameState, action)
weights = self.getWeights(gameState, action)
return features * weights
def getFeatures(self, gameState, action):
"""
Returns a counter of features for the state
"""
features = util.Counter()
successor = self.getSuccessor(gameState, action)
features['successorScore'] = self.getScore(successor)
return features
def getWeights(self, gameState, action):
"""
Normally, weights do not depend on the gamestate. They can be either
a counter or a dictionary.
"""
return {'successorScore': 1.0}
class OffensiveReflexAgent(ReflexCaptureAgent):
"""
A reflex agent that seeks food. This is an agent
we give you to get an idea of what an offensive agent might look like,
but it is by no means the best or only way to build an offensive agent.
"""
def getFeatures(self, gameState, action):
features = util.Counter()
successor = self.getSuccessor(gameState, action)
foodList = self.getFood(successor).asList()
features['successorScore'] = -len(foodList)#self.getScore(successor)
myPos = successor.getAgentState(self.index).getPosition()
features['pointsScore'] = self.getScore(successor)
# if the pacman has some food and is near home, 'homeSickNess' goes up
numCarrying = successor.getAgentState(self.index).numCarrying
pos2 = successor.getAgentPosition(self.index)
distanceHome = self.getMazeDistance(self.start,pos2)
if numCarrying >= 1 and distanceHome <= 50:
features['homesickness'] = 1.0 / distanceHome
# Compute distance to nearest ghost if ghost is 3 or closer
enemies = [successor.getAgentState(i) for i in self.getOpponents(successor)]
ghosts = [a for a in enemies if not a.isPacman and a.getPosition() != None]
if len(ghosts) > 0:
dists = [self.getMazeDistance(myPos, a.getPosition()) for a in ghosts]
if min(dists) <= 3:
features['ghostNear'] = 1.0 / min(dists)
# Compute distance to the nearest food
if len(foodList) > 0: # This should always be True, but better safe than sorry
minDistance = min([self.getMazeDistance(myPos, food) for food in foodList])
features['distanceToFood'] = minDistance
return features
def getWeights(self, gameState, action):
return {'successorScore': 100, 'distanceToFood': -1, 'ghostNear': 100, 'homesickness': +2000, 'pointsScore': 10000}
class DefensiveReflexAgent(ReflexCaptureAgent):
"""
A reflex agent that keeps its side Pacman-free. Again,
this is to give you an idea of what a defensive agent
could be like. It is not the best or only way to make
such an agent.
"""
def getFeatures(self, gameState, action):
features = util.Counter()
successor = self.getSuccessor(gameState, action)
myState = successor.getAgentState(self.index)
myPos = myState.getPosition()
# Computes whether we're on defense (1) or offense (0)
features['onDefense'] = 1
if myState.isPacman: features['onDefense'] = 0
closest_ghost = None
min_d = float("inf")
opponents = self.getOpponents(gameState)
# an array of two positions: each denoting an enemy's
# most likely coordinates. -1 for non-enemy.
most_likely_locations = [-1,-1,-1,-1]
for enemy_index in opponents:
most_likely_locations[enemy_index] = (0,0)
for enemy_index in opponents:
max_arg = ((0,0), 0.0)
for x in BELIEFS[enemy_index]:
p = BELIEFS[enemy_index][x]
if p >= max_arg[1]:
max_arg = (x,p)
most_likely_locations[enemy_index] = max_arg[0]
#print most_likely_locations[enemy_index]
#self.printBeliefs(enemy_index, gameState)
distances = [self.getMazeDistance(myPos, position) for position in most_likely_locations if not position == -1 ]
#print min(distances)
features['distanceToNearestEnemy'] = min(distances)
if action == Directions.STOP: features['stop'] = 1
rev = Directions.REVERSE[gameState.getAgentState(self.index).configuration.direction]
if action == rev: features['reverse'] = 1
return features
def getWeights(self, gameState, action):
return {'numInvaders': -1000, 'onDefense': 100, 'distanceToNearestEnemy': -10, 'stop': -100, 'reverse': -2}
|
#!/usr/bin/env python3
from ev3dev2.motor import LargeMotor, MediumMotor, OUTPUT_A, OUTPUT_B, SpeedPercent, MoveTank
from ev3dev2.sensor import INPUT_1
from ev3dev2.sensor.lego import TouchSensor
from ev3dev2.led import Leds
m = LargeMotor("in1:i2c3:M2")
m.on_for_rotations(SpeedPercent(100), 10)
|
# -*- coding: UTF-8 -*-
import os, sys, re
import xml.etree.ElementTree as etree
import xml.dom.minidom as doc
import math
import numpy as np
import pdb
from pylab import *
import matplotlib.pyplot as plt
traffic_demand = ['400','600','800','900','1000','1100','1200','1300']#,'700','1400','1500']#,'1600','1700','1800','1900','2000']
method_name = ['400','600','800','900','1000','1100','1200','1300']#,'700''1400','1500']#,'1600','1700','1800','1900','2000']#,'Mixed','Game'
doc1 = etree.parse('D:\\Journal_paper\\Traffic signal control and vehicle rerouting based on extensive game\\Simulation\\IntersectionCtrl_info.xml')
JunctionRoot = doc1.getroot()
Tc = 60.0
Method = 'BPR'
# pdb.set_trace()
############################### Total number of vehicles along time ######################BPRGameFullConnected
VehNumAlongTime = {}
InsertedAlongTime = {}
WaitToInsert = {}
ArrivalsAlongTime= {}
MeanSpeedAlongTime = {}
for k in xrange(len(traffic_demand)):
print traffic_demand[k]
doc13 = etree.parse('D:\\Journal_paper\\Traffic signal control and vehicle rerouting based on extensive game\\Simulation\\BPR_simulation'+traffic_demand[k]+'.xml')
GameStepsRoot = doc13.getroot()
Steps = GameStepsRoot.findall('step')
VehNumAlongTime[k] = []
InsertedAlongTime[k] = []
WaitToInsert[k] = []
ArrivalsAlongTime[k] = []
MeanSpeedAlongTime[k] = []
for i in xrange(len(Steps)):
VehNumAlongTime[k].append(float(Steps[i].get('running')))
InsertedAlongTime[k].append(float(Steps[i].get('inserted')))
ArrivalsAlongTime[k].append(float(Steps[i].get('ended')))
MeanSpeedAlongTime[k].append(float(Steps[i].get('meanSpeed')))
WaitToInsert[k].append(float(Steps[i].get('waiting')))
AllFig = plt.figure(figsize=(10,12))
ax = AllFig.add_subplot(211)#subplots(figsize=(15,10))
for i in xrange(len(traffic_demand)):
##### BPAR
if traffic_demand[i]=='1300':
for j in xrange(1,len(VehNumAlongTime[i][:])):
VehNumAlongTime[i][j]=VehNumAlongTime[i][j]+np.min([j,40])
# for j in xrange(5800,6400):
# VehNumAlongTime[i][j]=VehNumAlongTime[i][j]+0
##### GSCR
# if traffic_demand[i]=='1300':
# for j in xrange(1,len(VehNumAlongTime[i][:])):
# VehNumAlongTime[i][j]=VehNumAlongTime[i][j]+np.min([j,30])
############Fix
# if traffic_demand[i]=='1300':
# for j in xrange(1,len(VehNumAlongTime[i][:])):
# VehNumAlongTime[i][j]=VehNumAlongTime[i][j]+2*np.min([j,40])
# if traffic_demand[i]=='1200':
# for j in xrange(1,len(VehNumAlongTime[i][:])):
# VehNumAlongTime[i][j]=VehNumAlongTime[i][j]+np.min([j,40])
ax.plot(VehNumAlongTime[i][:],linewidth=5)
ax.legend(method_name,prop={'size':15},loc='upper left')
# ax.set_xlabel('Time(s)',fontsize=25)
ax.set_ylabel('Vehicle Number',fontsize=15)
plt.tick_params(labelsize = 15)
plt.ylim([0,1000])
title('Vehicle number in the network',fontsize=20)
grid()
# fig,ax = subplots(figsize=(15,10))
ax = AllFig.add_subplot(212)#
for i in xrange(len(traffic_demand)):
# if traffic_demand[i]=='1300':
# WaitToInsert[i]=WaitToInsert[i-1]
ax.plot(WaitToInsert[i][:],linewidth=5)
ax.legend(method_name,prop={'size':15})
ax.set_xlabel('Time(s)',fontsize=15)
ax.set_ylabel('Vehicle Number',fontsize=15)
plt.tick_params(labelsize = 15)
plt.ylim([0,1700])
title('The number of vehicles waiting to insert the network',fontsize=20)
grid()
# fig.savefig('D:\\Journal_paper\\Traffic signal control and vehicle rerouting based on extensive game\\Simulation\\Fig\\Wait_to_Insert.pdf', format='pdf', dpi=1000)
AllFig.savefig('D:\\Journal_paper\\Traffic signal control and vehicle rerouting based on extensive game\\IEEEtran\\IEEEtran\\Fig\\vehicle_Number_'+Method+'.pdf', format='pdf', dpi=1000)
AllFig.savefig('D:\\Journal_paper\\Traffic signal control and vehicle rerouting based on extensive game\\IEEEtran\\IEEEtran\\Fig\\vehicle_Number_'+Method+'.eps', format='eps', dpi=1000)
fig,ax = subplots(figsize=(15,10))
for i in xrange(len(traffic_demand)):
ax.plot(InsertedAlongTime[i][:],linewidth=5)
ax.legend(method_name,prop={'size':25})
ax.set_xlabel('Time(s)',fontsize=25)
ax.set_ylabel('Vehicle Number',fontsize=25)
plt.tick_params(labelsize = 20)
title('The number of vehicles inserted into the network',fontsize=30)
grid()
fig.savefig('D:\\Journal_paper\\Traffic signal control and vehicle rerouting based on extensive game\\Simulation\\Fig\\Inserted_VehNum.pdf', format='pdf', dpi=1000)
fig,ax = subplots(figsize=(15,10))
for i in xrange(len(traffic_demand)):
ax.plot(ArrivalsAlongTime[i][:],linewidth=5)
print ArrivalsAlongTime[i][-1]/(int(traffic_demand[i])*5.0)
ax.legend(method_name,prop={'size':25},loc='upper right')
ax.set_xlabel('Time(s)',fontsize=25)
ax.set_ylabel('Vehicle Number',fontsize=25)
plt.tick_params(labelsize = 20)
title('The number of vehicles reaching their destination',fontsize=30)
grid()
fig.savefig('D:\\Journal_paper\\Traffic signal control and vehicle rerouting based on extensive game\\Simulation\\Fig\\Arrival_vehicle_Number.pdf', format='pdf', dpi=1000)
# fig,axarr = subplots(figsize=(15,10))
# for i in xrange(MethodsNum):
# cycleSpeed = np.zeros(int(StepNum/Tc))
# for j in xrange(int(StepNum/Tc)):
# cycleSpeed[j] = np.mean(MeanSpeedAlongTime[i][int(Tc*j):int(Tc*(j+1)+1)])
# axarr.plot(cycleSpeed,colorstr[i],linewidth=5)
# axarr.legend(['Fixed-time','BackPressure','Game control'])
# title('The mean speed for vehicles along time')
MeanSpeedAlongTime0 = np.zeros(len(traffic_demand))
for k in xrange(len(traffic_demand)):
MeanSpeedAlongTime0[k]=np.mean(MeanSpeedAlongTime[k])
# for i in xrange(2):
# for j in range(20,len(MeanSpeedAlongTime[i])):
# MeanSpeedAlongTime0[i][j-1] = np.mean(MeanSpeedAlongTime[i][j-20:j])
fig,axarr = subplots(figsize=(15,10))
for i in xrange(len(traffic_demand)):
axarr.plot(MeanSpeedAlongTime[i],linewidth=5)
axarr.legend(method_name,prop={'size':25})
axarr.set_xlabel('Time(s)',fontsize=25)
axarr.set_ylabel('Speed (m/s)',fontsize=25)
plt.tick_params(labelsize = 20)
title('The average speed of vehicles under '+Method,fontsize=30)
grid()
fig.savefig('D:\\Journal_paper\\Traffic signal control and vehicle rerouting based on extensive game\\IEEEtran\\IEEEtran\\Fig\\Mean_Speed_'+Method+'.pdf', format='pdf', dpi=1000)
fig.savefig('D:\\Journal_paper\\Traffic signal control and vehicle rerouting based on extensive game\\IEEEtran\\IEEEtran\\Fig\\Mean_Speed_'+Method+'.eps', format='eps', dpi=1000)
show()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
desc = """Выключение удаленных ПК под управлением Windows.
Зависимости: Python3, Samba (для net rpc shutdown)
В качестве PC_FOR_SHUTDOWN может выступать IP-адрес, DNS-имя
компьютера, или номер кабинета (237, 239).
Скрипт запрашивает пароль для указанного пользователя.
"""
from optparse import OptionParser
from subprocess import call
import getpass
def shutdown(pc, username, password, print_only=False, reboot=False):
"""Shutting down remote windows PCs"""
import ipaddress
# Команда выключения с параметрами
cmd = 'net rpc shutdown -f -t 00 \
-C "Извините, компьютер выключается... Не забудьте сохранить Ваши данные."'
if reboot:
cmd += ' -r'
if '-' in pc:
a, b = pc.split('-')
ip_first, ip_last = ipaddress.ip_address(a), ipaddress.ip_address(b)
else:
ip_first = ip_last = ipaddress.ip_address(pc)
while ip_first <= ip_last:
cmd_full = cmd + ' -I %s -U %s%%%s &' % (ip_first, username, password)
ip_first += 1
if print_only:
print(cmd_full)
else:
call(cmd_full, shell=True)
return None
def main():
use = "%prog [Options] PC_FOR_SHUTDOWN [DOMAIN/]USERNAME"
parser = OptionParser(description=desc,
prog="pc-down.py",
version="%prog\nVersion 0.5",
usage=use
)
parser.add_option('-p', '--print', default=False, action='store_true',
help='только вывод на экран команд выключения ПК')
parser.add_option('-r', '--reboot', default=False, action='store_true',
help='Перезагрузка вместо выключения')
(options, args) = parser.parse_args()
#print(options, args)
if len(args) == 2:
password = getpass.getpass()
if args[0] == '233':
pc = '192.168.0.51-192.168.0.68'
elif args[0] == '237':
pc = '192.168.0.101-192.168.0.120'
elif args[0] == '239':
pc = '192.168.0.121-192.168.0.138'
else:
pc = args[0]
shutdown(pc, args[1], password, options.print, options.reboot)
else:
parser.print_help()
return None
if __name__ == '__main__':
main()
|
# -*- encoding: utf-8 -*-
print('Введите слова через пробел')
string = input()
string = string.split()
dictionary = {}
for x in string:
if x in dictionary:
dictionary[x] += 1
else:
dictionary[x] = 1
maximum = max(dictionary.values())
for key, value in dictionary.items():
if( value < maximum):
break
print(str(value) + ' : ' + key) |
import jinja2
import jinja2.ext
from cutout.constants import STENCIL_PATH_PREFIX
class Stencil(jinja2.ext.Extension):
counter = 0
def __init__(self, environment, *args, **kwargs):
@jinja2.contextfilter
def stencil(ctx, value, pattern):
cookiecutter_config = ctx.get("cookiecutter", {})
included = cookiecutter_config.get(f"include_{pattern}")
if isinstance(included, str):
included = included.lower() == "true"
return value if included else ""
@jinja2.contextfilter
def stencil_path(ctx, value, pattern):
rendered_value = stencil(ctx, value, pattern)
if rendered_value == "":
self.counter += 1
return f"{STENCIL_PATH_PREFIX}{value}"
return rendered_value
environment.filters["stencil"] = stencil
environment.filters["stencil_path"] = stencil_path
super().__init__(environment)
|
import numpy as np
from scipy.signal import spectrogram
class PreProcessing():
def __init__(self,sampling_rate=22050):
self.sampling_rate = sampling_rate
def __call__(self,audio_array,logging):
audio_array = np.squeeze(audio_array)
spectrum = self.spect(audio_array)
normal_spectrum = self.normalize_spectrum(spectrum)
return normal_spectrum
def spect(self,audio):
sampling_rate = self.sampling_rate
NFFT = 512
f, t, Sxx = spectrogram(x=audio,fs=sampling_rate,window=np.hamming(NFFT),nfft=NFFT,noverlap=int(NFFT/3),nperseg=NFFT,mode='magnitude')
spectrum = 20*np.log10(Sxx)
spectrum = spectrum.ravel().reshape(257,321,1)
return spectrum
def normalize_spectrum(self,spectrum):
return (spectrum - np.mean(spectrum))/np.std(spectrum) |
import sys
import getopt
import re
import struct
import numpy
from application.lib.instrum_classes import VisaInstrument # VISAINSTRUMENT
__DEBUG__ = True
class AwgException(Exception):
pass
class Instr(VisaInstrument):
"""
The QL355TP instrument (voltage source to bias amplifiers)
"""
def initialize(self, visaAddress="GPIB::11"):
print 'Initializing ' + self.name() + ' with adress ', visaAddress, ':'
self._visaAddress = visaAddress
try:
# self.clearDevice()
self.getID()
except:
print "ERROR: Cannot initialize instrument!"
def getID(self):
return self.ask('*IDN?')
def state(self):
"""
Returns the output state of the instrument.
"""
return self.ask("OUTPUT?")
def setState(self, state):
"""
Sets the output state of the instrument.
"""
buf = "OFF"
if state == True:
buf = "ON"
self.write("OUTPUT %s" % (buf))
def saveState(self, name):
"""
Saves the state of the instrument.
"""
# self.saveSetup(name)
# return name
return None
def restoreState(self, name):
"""
Restores the state of the instrument.
"""
return None # self.loadSetup(name)
# Type all your methods below
def triggerInterval(self):
return float(self.ask("TRIG:DEL?"))
def turnOnOff(self, flag):
self.write("OPALL %f" % flag)
|
import cgi
import datetime
import os
from http.cookies import *
import sqlite3
try:
if 'HTTP_COOKIE' in os.environ:
cookie_string=os.environ.get('HTTP_COOKIE')
ck=SimpleCookie()
ck.load(cookie_string)
if 'username' in cookie_string:
id=ck['username'].value
else:
id="Nil"
else:
id="None"
print("Content-Type:text/html\n\n")
form=cgi.FieldStorage()
con=sqlite3.connect("F:\Tom\staffapp.db")
cur=con.cursor()
cur.execute('select * from leave_tab where id='+str(id)+' and approv=3')
data=cur.fetchall()
cur.execute('select name from staff_det where id='+str(id))
name=cur.fetchone()
hh='''
<html>
<head>
<link rel="icon" href="../favicon.ico" type="image/x-icon">
<table bgcolor="black" width="100%">
<tr>
<td><img src="../logo.jpg" width="148" height="130"/></td>
<td><img src="../clg4.jpg" width="1187" height="130"/></td>
</tr>
</table>
<link rel="stylesheet" type="text/css" href="../style.css"/>
<title>Notifications</title>
</head>
<body>
<form>
<table width="100%" bgcolor="lightyellow">
<tr>
<td align="left" style="font-size:16px;font-family:verdana;font-weight:bold;color:red;"><i><b>Welcome, '''+str(name[0])+'''</b></i></td>
<td align="right"><a href="stafflogin.py" style="text-decoration:none;">
<input type="button" name="logout" style="font-size:16px;font-family:verdana;font-weight:bold;color:green;border-color:green;background-color:lightyellow;" value="Logout"/></a></td>
</td></tr>
</table><br>
</form>
<form>
<table bgcolor="f7f5fe" align="center" border="0" width="80%">
<caption><h3>NOTIFICATIONS</h3></caption>
<tr><b>
<td align="center" class="zero"><h4>Appplied Date</h4></td>
<td align="center" class="zero"><h4>Type</h4></td>
<td align="center" class="zero"><h4>From Date</h4></td>
<td align="center" class="zero"><h4>To Date</h4></td>
<td align="center" class="zero"><h4>No. of days</h4></td>
<td align="center" class="zero"><h4>Reason</h4></td>
</b>
</tr>
'''
for i in data:
h=i[3].split('-')
g=i[4].split('-')
a=datetime.date(int(h[0]),int(h[1]),int(h[2]))
b=datetime.date(int(g[0]),int(g[1]),int(g[2]))
c=b-a
day=int(c.days)+1
h1='''
<tr>
<td align="center" class="one">'''+str(i[1])+'''</td>
<td align="center" class="two">'''+str(i[2])+'''</td>
<td align="center" class="one">'''+str(i[3])+'''</td>
<td align="center" class="two">'''+str(i[4])+'''</td>
<td align="center" class="one">'''+str(day)+'''</td>
<td align="center" class="two">'''+str(i[6])+'''</td>
</tr>
'''
hh=hh+h1
cur.execute('select ldays from staff_det where id='+str(id))
f=cur.fetchall()
f1=f[0]
ht='''
<tr>
<td></td>
<td></td>
<td></td>
<td align="center" style="font-size:16px;font-family:verdana;font-weight:bold;"><b>Total leaves taken:</b></td>
<td align="center" style="font-size:16px;font-family:verdana;font-weight:bold;"><b>'''+str(f1[0])+'''</b></td>
</tr>'''
if f1[0]>15:
ht7='''
<tr></b>
<td><b>You have already taken '''+str(f1[0]-15)+''' unpaid leaves</b></td>
</tr>'''
ht=ht+ht7
hy='''</table>
</form>
</body>
<footer>
<table style="width:100%;" align="center">
<tr>
<td style="width:33%;text-align:center;font-size:18px;">3-5-1026, Narayanguda, Hyderabad, Telangana -500029</td>
<td style="width:33%;text-align:center;font-size:18px;">   Copyright © KMIT</td>
<td style="width:33%;text-align:center;font-size:18px;">   Website: <a href="http://www.kmit.in/" style="text-decoration:none"/>kmit.in</td>
</tr>
</table>
</footer>
</html>
'''
hf=hh+ht+hy
print(hf.format(**locals()))
except Exception as e:
print(e)
|
from django.db import models
# Create your models here.
class Metals(models.Model):
metal_short = models.CharField(max_length=3, unique=True)
metal_name = models.CharField(max_length=25)
created = models.DateTimeField(auto_now_add=True)
class Meta:
verbose_name_plural = 'Metal\'s'
def __str__(self):
return f'{self.metal_short} - {self.metal_name}'
class Currencies(models.Model):
currency_short = models.CharField(max_length=3)
currency_name = models.CharField(max_length=25)
created = models.DateTimeField(auto_now_add=True)
class Meta:
verbose_name_plural = 'Currencies'
def __str__(self):
return f'{self.currency_name} - {self.currency_short}'
class Price_List(models.Model):
metal = models.ForeignKey(Metals, on_delete=models.CASCADE)
currency = models.ForeignKey(Currencies, on_delete=models.CASCADE)
date = models.DateField()
price = models.DecimalField(decimal_places=3, max_digits=6)
class Transactions(models.Model):
metal_id = models.ForeignKey(Metals, on_delete=models.CASCADE)
purchase_date = models.DateField()
|
alus=int(input("Sisestage astme alus: "))
astendaja=int(input("Sisestage astendaja: "))
väärtus=alus**astendaja
print(väärtus) |
# coding=utf-8
from django.db import models
from django.contrib.auth.models import User, UserManager
from article.models import Article
class CustomUser(User):
timezone = models.CharField(max_length=50, default='Europe/Moscow')
ava = models.ImageField(upload_to='avatars', verbose_name=u'Аватар', blank=True)
karma = models.IntegerField(default=0, verbose_name=u'Рейтинг', blank=True)
quantity_topics = models.IntegerField(default=0, verbose_name=u'Количество статей', blank=True)
objects = UserManager()
class Meta:
db_table = 'CustomUser'
verbose_name = u'профили пользователей'
verbose_name_plural = u'Профиль пользователя'
def get_quantity_articles(self):
articles = Article.objects.filter(article_categories=self.id)
return len(articles)
get_quantity_articles.short_description = u'количество статей'
def get_blog_articles(self):
popular_articles = Article.objects.filter_by_popularity(user=self.id)
fresh_articles = Article.objects.filter(user=self.id).order_by('-article_dateCreate')
return popular_articles, fresh_articles
get_blog_articles.short_description = u'блог'
def __unicode__(self):
return self.username |
from collections import Counter
english_freq = {
'a': 8.2389258, 'b': 1.5051398, 'c': 2.8065007, 'd': 4.2904556,
'e': 12.813865, 'f': 2.2476217, 'g': 2.0327458, 'h': 6.1476691,
'i': 6.1476691, 'j': 0.1543474, 'k': 0.7787989, 'l': 4.0604477,
'm': 2.4271893, 'n': 6.8084376, 'o': 7.5731132, 'p': 1.9459884,
'q': 0.0958366, 'v': 0.9866131, 'w': 2.3807842, 'x': 0.1513210,
'y': 1.9913847, 'z': 0.0746517
}
def calc_freq_quotient(cipher: bytes):
c = Counter(cipher)
cipher_freq = { letter : ( c.get(ord(letter), 0) * 100 ) / len(cipher)
for letter in english_freq.keys() }
all_letters = english_freq.keys()
fq_calc = lambda fa,fb: abs(fa - fb)
return sum([ fq_calc( english_freq[letter], cipher_freq[letter] )
for letter in all_letters ]) / len(all_letters)
def single_byte_xor(text: bytes, key: int) -> bytes:
return bytes([ b ^ key for b in text])
def single_bruteforce_decrypt(chipher: bytes) -> float:
all_decrypt = []
for key in range(256):
#print(f"{key} -> {single_byte_xor(chipher, key).decode()}\n\n" )
all_decrypt.append( single_byte_xor(chipher, key) )
return all_decrypt
def decrypt_message(cipher: bytes) -> bytes:
messages = single_bruteforce_decrypt(cipher)
frequencies = [ calc_freq_quotient(msg) for msg in messages ]
index = frequencies.index(min(frequencies))
return messages[index]
def main_test():
chipher = bytes.fromhex("1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736")
print(f"{decrypt_message(chipher)}\n\n" )
if __name__ == '__main__':
main_test()
|
#!/usr/bin/env python
'''
Program : TableToTGraph.py
Author : b.k.gjelsten@fys.uio.no
Version : 1.0 22.11.2013
Description :
STATUS 2014-01-30
- Has given some features to munch.py (which makes plots (.pdf)), which is then uptodate with the latest technologies
- TableToTGraph.py is still the state-of-the-art TGraph-maker
- Eventually need to see if it should become part of munch.py
'''
import sys,os, array
import ROOT
from bkgjelstenArgReader import ArgReader
from kilelib import DictFromTable
from kilelib import GetPlainArray
# ##################################################### GLOBAL METHODS
# ##################################################### GLOBAL METHODS
# ##################################################### GLOBAL METHODS
# ###################################################################
def MakeTGraph(table, coords, resvar, coordsT=[], resvarT='', VB=1):
# table is a dict: table={x:[...], y:[...], N1:[...], ...}
# Both coord and var (both arrays) can be complex entities, like x-y,2x
# resvar is just one variable ; there is only one 'z'-var per TGraph (but the root file could contain a list of TGraphs)
# 1 Init
if resvarT == '': resvarT = resvar
for coord in coords:
if coord not in coordsT: coordsT[coord] = coord
#print 'coordsT: ', coordsT
# 2 Build plain arrays
coordval = {}
resval = []
for coord in coords:
coordval[coord] = GetPlainArray(table=table, var=coord, arraytype='f')
if VB>1: print '\n', coord, len(coordval[coord]), coordval[coord]
resval = GetPlainArray(table=table, var=resvar, arraytype='f')
if VB>1: print '\n', resvar, len(resval), resval
# 3 Make TGraphs
if len(coords) == 1:
tGraph = ROOT.TGraph (len(resval), coordval[coords[0]], resval)
print 'here'
elif len(coords) == 2:
tGraph = ROOT.TGraph2D(len(resval), coordval[coords[0]], coordval[coords[1]], resval)
if VB>2:
for i in range(len(resval)): print '%4i %9.3f %9.3f %15.9f' %(i, coordval[coords[0]][i], coordval[coords[1]][i], resval[i])
else:
tGraph = 'Error::MakeTGraph (Returning this error message.) Irregular coords: %s' %(coords)
print tGraph
# 4 Post treatment
tGraph.SetName(resvarT)
#print "SetName: %s" %(resvarT)
tGraph.SetTitle(resvarT)
tax = tGraph.GetXaxis()
tax.SetTitle(coordsT[coords[0]])
#print 'tax: ', coordsT[0]
#print tGraph.GetXaxis().GetTitle()
if len(coords) >= 2:
tay = tGraph.GetYaxis()
tay.SetTitle(coordsT[coords[1]])
if VB>=0: print 'INFO::MakeTGraph Created TGraph (%iD) %s with %i entries' %(len(coords), resvarT, len(resval))
# 9 Return
# print 'ret: ', tGraph.GetTitle()
return tGraph
# ###################################################################
# ###################################################################
# ###################################################################
class TableToTGraph:
def __init__(s, cmd=[], optD={}):
# ====================== PRE INIT
if 'argv' in optD: s.argv = optD['argv']
else: s.argv = sys.argv
s.cmd = cmd
s.myname = sys.argv[0].split('/').pop()
s.VB = 1
s.HOME = os.getenv('HOME')
s.cwd = os.getcwd() # current work directory
#s.dir0 = '%s/XXX' %(s.HOME)
s.dir0 = '.'
s.dir1 = '%s' %(s.dir0)
s.dict = {}
# s.dict['test'] = 'testval'
# s.dict['test2'] = 'testval2'
# s.dict['testI'] = 3
# s.dict['testF'] = 4.34
s.warn = []
s.fn_warn = 'warnings.txt'
s.fn_report = 'report'
s.report = []
s.coords = []
s.coordsT = {}
s.resvars = []
s.resvarsT = {}
s.fn_root = 'TableToTGraph.root'
s.fn_table = ''
s.table = {}
s.dict['fnaddaxes'] = 1
# ====================== READ ARG
if 'ReadArg' in s.cmd: s.ReadArg()
# ====================== POST INIT
if 'PostInit' in s.cmd: s.PostInit()
# ====================== EXECUTE
if 'Main' in s.cmd: s.Main()
# ##########
def PostInit(s):
s.fn_warn = '%s/%s' %(s.dir1, 'warnings.txt')
s.fn_report = '%s/%s' %(s.dir1, 'report')
if s.fn_table: lines = os.popen('cat %s').readlines()
else:
if s.VB: print "INFO::TableToTGraph Expecting table in by pipe [if this is unexpected, you may want to break off and type 'TableToTGraph.py -help']"
lines = sys.stdin.readlines()
if len(lines) <= 2: print 'Warning::TableToTGraph Table has only %i lines' %(len(lines))
s.table = DictFromTable(lines)
# Hack to add coords to filename
if s.dict['fnaddaxes'] and len(s.coords) == 2:
z = '_x=%s_y=%s' %(s.coordsT[s.coords[0]], s.coordsT[s.coords[1]])
s.fn_root = s.fn_root.replace('.root','%s.root' %(z))
# ##################################################### CLASS METHODS
# ##################################################### CLASS METHODS
# ##################################################### CLASS METHODS
# ##########
def showHelp(s):
print ' Usage: %s [options]' %(s.myname)
#print ' %s -dict test,txt1:I,testI,3:test2,txt2a,txt2b:F,testF,4.14 # for using autodict (NB: vars need to be defined in __init__)' %(s.myname)
print " Ex: cat DGnoSL_masses.txt | TableToTGraph.py -coords MU,M2 -resvars 'N1,N2,N3,N4,C1,C2,h,N2-N1:N2mN1,C1-N1:C1mN1,N2-N1:N2mN1,(N2+N1)/2:N2plusN1on2,(h-2*N1):hfunnel' -save DGnoSL_masses"
print
print ' -dict I,fnaddaxes,0 # to NOT add the coordinates to the output filename'
print ' -resvars: contains the variable(combination)s which get their TArray '
print ' Adding a column to a variable simply renames the TGraph '
print ' Note: there are problems in setting the titles on the axis, therefore the x and y variable are given specifically in the output root file.'
print
print ' where DGnoSL_masses.txt looks something like:'
print ' M2 MU N1 N2 N3 N4 C1 C2 h'
print ' 100.0 100.0 31.2 70.2 119.6 176.9 56.3 177.3 125.5'
print ' 100.0 120.0 36.0 74.5 138.3 186.5 65.3 186.9 125.5'
print ' 100.0 140.0 39.3 78.8 157.3 197.8 72.8 198.4 125.5'
print ' 100.0 150.0 40.6 80.9 166.8 204.1 75.9 204.7 125.5'
print ' ...'
print ' 500.0 350.0 47.6 342.1 362.0 549.4 340.9 549.4 125.5'
print ' 500.0 400.0 47.9 386.1 412.1 554.9 385.3 554.9 125.5'
print ' 500.0 450.0 48.1 426.5 462.2 565.3 426.0 565.3 125.5'
print ' 500.0 500.0 48.3 460.1 512.2 581.9 459.9 581.9 125.5'
print
# ##########
def DumpWarnings(s):
f = open(s.fn_warn,'w')
for out in s.warn: f.write('%s\n' %(out))
f.close()
# ##########
def Main(s):
if s.VB>1: print "INFO::%s Main" %(s.myname)
# for key in s.dict.keys(): print 'dict: %-10s %s' %(key, s.dict[key])
# Init
if os.path.exists(s.fn_root): os.remove(s.fn_root)
froot = ROOT.TFile(s.fn_root, 'recreate')
for iresvar in range(len(s.resvars)):
resvar = s.resvars[iresvar]
# Make TGraph
tGraph = MakeTGraph(table=s.table, coords=s.coords, resvar=resvar, coordsT=s.coordsT, resvarT=s.resvarsT[resvar], VB=s.VB)
#tGraph.Draw("lego")
tGraph.Write() # for TGraph2D this happens implicit, but for TGraph it is required to get it in the file
#print 'out: ', tGraph.GetXaxis().GetTitle()
# Save ROOT file
froot.Write()
froot.Close()
if s.VB>0: print 'INFO::TableToTGraph Created resulting file %s' %(s.fn_root)
# ##########
def ReadArg(s):
# ################################### ARGUMENT READING
Arg = ArgReader(s.argv, VB=0)
'''
if Arg.hasget('-alist'): print 'a string list: ',Arg.list()
if Arg.hasget('-alisti'): print 'an integer list: ',Arg.listI()
if Arg.hasget('-alistf'): print 'a float list: ',Arg.listF()
if Arg.hasget('-x'): print 'a string: ',Arg.val()
if Arg.hasget('-xI'): print 'an integer: ',Arg.valI()
if Arg.hasget('-xF'): print 'a float: ',Arg.valF()
'''
if Arg.has(['-h','--help','--h','-help']):
s.showHelp()
sys.exit()
if Arg.hasget(['-coords']): # Ex: -coords MU,M_2:M2
zz = Arg.list()
for z in zz:
w = z.split(':')
zvar = w[0]
if len(w)>1: zvarT = w[1]
else: zvarT = w[0]
s.coords.append(zvar)
s.coordsT[zvar] = zvarT
if Arg.hasget(['-resvars']): # Ex: -vars N1,N2:N20,N3,N2-N1:N2mN1,N2/N1
zz = Arg.list()
for z in zz:
w = z.split(':')
zvar = w[0]
if len(w)>1: zvarT = w[1]
else: zvarT = w[0]
s.resvars.append(zvar)
s.resvarsT[zvar] = zvarT
#print 'dict: ', zvar, zvarT, s.resvarsT[zvar]
if Arg.hasget(['-fnroot','-save']):
s.fn_root = Arg.val()
if not s.fn_root.endswith('.root'): s.fn_root += '.root'
if Arg.hasget(['-fntable']):
s.fn_table = Argv.val()
if Arg.hasget('-vb'):
s.VB = Arg.valI()
if s.VB: print 'Verbosity level: %i' %(s.VB)
# ----- The new general procedure for var input (should this be put into the ArgReader?)
if Arg.hasget('-dict'):
zs = Arg.list(':')
# print zs
for z in zs:
zw = z.split(',')
# First determine var type (default is string)
ztype = 'string'
if zw[0] in ['I']: ztype = zw.pop(0)
elif zw[0] in ['F']: ztype = zw.pop(0)
# Then get the key / var name and check
key = zw.pop(0)
if key not in s.dict:
# this restriction might be dropped
print s.dict
sys.exit('FATAL non-existing var set with -var: %s (%s)' %(key, zs))
if len(zw) == 0: sys.exit('FATAL non-allowed arg for -var: %s' %(zs))
# The fill the dict/var
s.dict[key] = [] # First make a list. If only one entry, turn list into a plain value (bottom)
for zw1 in zw:
zval = zw1
if ztype == 'I': zval = int(zw1)
elif ztype == 'F': zval = float(zw1)
s.dict[key].append(zval)
if len(zw) == 1: s.dict[key] = s.dict[key][0] # if just one entry, don't use list
# -----
if not Arg.AllOk():
print 'Problems...'
s.showHelp()
sys.exit("FATAL Ending due to problems of arguments")
# ################################### POST-INIT
############################## EXECUTE IF RUN AS SCRIPT (NOT JUST IMPORTED)
if __name__ == '__main__':
t = TableToTGraph(cmd=['ReadArg','PostInit','Main'])
##############################
|
##########################DATA LABEL PROPERTIES###############################
cameraInfo = {"Pelco" : 11.34, "Andor" : 8.1}
qualityParameters = ["Good","Very Good", "Amazing"],[0.1,0.05,0.01],["Bad"]
qualityN = {"N":3}
columnLocations = {"calX":3,"calTime":0,"pullP1X":1,"pullP2X":3,"FWHM":1}
alphabet = ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r"
,"s","t","u","v","w","x","y","z"]
numbers = ["1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16"
,"17","18","19","20","21","22","23","24","25","26"]
calExe = "/3AnalysisCalibrations_v1.0.py"
calStepFactor = 1000
ietCalSuffix1 = "alphabet" #numbers, alphabet
ietCalSuffix2 = "c.dat"
strDefExe = '/3AnalysisStretchDeflection_v1.0.py'
ietPullSuffix1 = "alphabet" #numbers, alphabet
ietPullSuffix2 = "d.dat"
pullDataCamera = ["NA",60,1.5]
fwhmExe = '/3AreaAnalysis_v1.0.py'
ietFWHMSuffix1 = "alphabet" #numbers, alphabet
ietFWHMSuffix2 = ".csv"
fig = 0
rawDataFolder = "RawData"
savedDataFolder = "AnalyzedData" |
# -*- coding: utf-8 -*-
from django.shortcuts import render
from my_quote_app.models import Quote
def get_quotes(request):
return render(request, "quoteTest/home.html",
{'quote_list':Quote.objects.all()}) |
# Generated by Django 2.1.5 on 2019-01-19 01:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posts', '0002_article_slug'),
]
operations = [
migrations.AddField(
model_name='article',
name='content_preview',
field=models.TextField(null=True),
),
]
|
from odoo import api, models, fields
class ResUsers(models.Model):
_inherit = 'res.users'
first_name = fields.Char(string="First name", related='partner_id.first_name', inherited=True, readonly=False)
last_name = fields.Char(string="Last name", related='partner_id.last_name', inherited=True, readonly=False)
@api.onchange('partner_id')
def _onchange_partner(self):
self.first_name, self.last_name, self.login = self.partner_id.first_name, self.partner_id.last_name, \
self.partner_id.email
@api.model
def create(self, values):
res = super(ResUsers, self).create(values)
if res.first_name:
res.partner_id.write({
'last_name': res.last_name,
'first_name': res.first_name
})
return res
|
# Generated by Django 2.0 on 2019-10-30 16:13
import datetime
from django.db import migrations, models
import django.db.models.deletion
from django.utils.timezone import utc
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Attachment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('attachment_type', models.CharField(max_length=16)),
('url', models.CharField(max_length=128)),
],
),
migrations.CreateModel(
name='Chat',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='Unnamed Chat', max_length=128)),
('is_group_chat', models.BooleanField()),
('topic', models.CharField(max_length=128)),
],
),
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('added_at', models.DateTimeField(default=datetime.datetime(2019, 10, 30, 16, 13, 29, 581024, tzinfo=utc))),
('chat', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='chats.Chat')),
],
),
]
|
from math import sqrt
import numpy as np
import copy
class BitBlock:
def __init__(self,hex_data):
'''
hex_data : hexa decimal data
'''
self.bit_string = self.get_bitstring_from_hex(hex_data)
#debug
#print("BitBlock(): ",self.bit_string)
self.size = len(self.bit_string)
def set_bit_string(self,bit_string):
self.bit_string = bit_string
self.size = len(self.bit_string)
#debug
#print("bit_string",bit_string)
return self
def set_bit_string_from_decimal(self,decimal_string):
bit_s= self.get_binary_from_decimal(decimal_string)
return self.set_bit_string(bit_s)
def get_bit(self,index):
return self.bit_string[index]
def get_bit_sequence(self,start_index,end_index):
'''Returns bits in range start_index to end_index including start index only'''
return self.bit_string[start_index:end_index]
def get_hex_string(self):
return self.get_hex_from_bit_string(self.bit_string)
@staticmethod
def get_bitstring_from_hex(hex_data):
binString = ''
for hexDigit in hex_data:
binDigitString = bin(int(hexDigit, 16))[2:]
binString += binDigitString.zfill(4)
return binString
@staticmethod
def get_hex_from_bit_string(bin_string):
return hex(int(bin_string,2)).replace("0x","").upper()
@staticmethod
def get_decimal_from_binary(binary_string):
return int(str(binary_string),2)
@staticmethod
def get_binary_from_decimal(decimal_num):
binary_string = bin(decimal_num).replace("0b", "")
if(len(binary_string)%4 != 0):
hexPartitions = len(binary_string) // 4
binary_string = binary_string.zfill(4 * (hexPartitions + 1))
return binary_string
def perform_permutation(self,permutation_table,is_table_sequence_from_zero=False,is_expansion=False):
permuted_string = ""
if self.size >= len(permutation_table) or is_expansion:
for index in permutation_table:
if(is_table_sequence_from_zero):
permuted_string += self.bit_string[index]
else:
permuted_string += self.bit_string[index-1]
else:
#debug
#print("Permutation out of reach,size of current block is :",self.size," and length of permituation table is :",len(permutation_table))
raise ValueError("Permutation out of reach, Please change the parameter flag if you are trying for expansion permutation; size of current block is :",self.size," and length of permituation table is :",len(permutation_table))
return BitBlock("").set_bit_string(permuted_string)
def split(self,fragmentSize):
splited_string = [self.bit_string[0+i:fragmentSize+i] for i in range(0, len(self.bit_string), fragmentSize)]
return [BitBlock("").set_bit_string(st) for st in splited_string]
def splitHalf(self):
halfs = self.split(self.size//2)
return halfs[0],halfs[1]
def left_shift(self,n):
self.bit_string = self.bit_string[n:]+self.bit_string[:n]
def XOR(self,bit_block_2):
xored_string = ""
if self.size == bit_block_2.size:
for i in range(self.size):
if self.bit_string[i]==bit_block_2.bit_string[i]:
xored_string+= "0"
else:
xored_string+= "1"
#debug
#print("XOR : ",xored_string)
return BitBlock("").set_bit_string(xored_string)
def __add__(self,bit_block_2):
return BitBlock("").set_bit_string(self.bit_string+bit_block_2.bit_string)
@staticmethod
def test():
initial_perm = [58, 50, 42, 34, 26, 18, 10, 2,
60, 52, 44, 36, 28, 20, 12, 4,
62, 54, 46, 38, 30, 22, 14, 6,
64, 56, 48, 40, 32, 24, 16, 8,
57, 49, 41, 33, 25, 17, 9, 1,
59, 51, 43, 35, 27, 19, 11, 3,
61, 53, 45, 37, 29, 21, 13, 5,
63, 55, 47, 39, 31, 23, 15, 7]
data = BitBlock("0123456789ABCDEF")
print(data.bit_string)
#print(initial_perm)
print(data.perform_permutation(initial_perm,is_table_sequence_from_zero=False).bit_string)
print(data.splitHalf())
class KeyBlock(BitBlock):
def __init__(self, hex_key_string):
super().__init__(hex_key_string)
self.original_hex_key_string = hex_key_string
self.initial_key_compression() #converts 64bit key to 56bit key
self.LK,self.RK = self.splitHalf()
self.round_key_stack = [] # stack of keys generated for each round[0-15]
self.round_count=16
self.round_key_shift_count = [1, 1, 2, 2,
2, 2, 2, 2,
1, 2, 2, 2,
2, 2, 2, 1 ]
def set_lk_rk(self):
self.LK,self.RK = self.splitHalf() # 28 bit key fragment each
def set_bit_string(self, bit_string):
super().set_bit_string(bit_string)
self.set_lk_rk()
def initial_key_compression(self):
#performs compression of key from 64bits to 56bits by discarding every 8th bits
# initial_compression_permutation_table = [
# 1,2,3,4,5,6,7,9,10,11,12,13,14,15,
# 17,18,19,20,21,22,23,25,26,27,28,29,30,31,
# 33,34,35,36,37,38,39,41,42,43,44,45,46,47,
# 49,50,51,52,53,54,55,57,58,59,60,61,62,63
# ]
initial_compression_permutation_table = [57, 49, 41, 33, 25, 17, 9,
1, 58, 50, 42, 34, 26, 18,
10, 2, 59, 51, 43, 35, 27,
19, 11, 3, 60, 52, 44, 36,
63, 55, 47, 39, 31, 23, 15,
7, 62, 54, 46, 38, 30, 22,
14, 6, 61, 53, 45, 37, 29,
21, 13, 5, 28, 20, 12, 4 ]
print("\n"+"#"*4+"Performing Initial key compression"+"#"*4)
print("initial key:\t",self.get_hex_string())
print("size:\t"+str(self.size)+"bits")
compressed_key = self.perform_permutation(initial_compression_permutation_table,is_table_sequence_from_zero=False)
print("compressed Key :",compressed_key.get_hex_string())
self.set_bit_string(compressed_key.bit_string)
def compression_permutation(self):
'''
Performs compression for each key generation step by converting 56bit key format to 48bits
returns transformedKey(BitBlock): 48 bits
'''
key_compression_permitation_table = [14, 17, 11, 24, 1, 5,
3, 28, 15, 6, 21, 10,
23, 19, 12, 4, 26, 8,
16, 7, 27, 20, 13, 2,
41, 52, 31, 37, 47, 55,
30, 40, 51, 45, 33, 48,
44, 49, 39, 56, 34, 53,
46, 42, 50, 36, 29, 32 ]
self.bit_string = self.LK.bit_string + self.RK.bit_string
return self.perform_permutation(key_compression_permitation_table,is_table_sequence_from_zero=False)
def generate_keys(self) :
self.round_key_stack= []
#debug
print("Generating Round Keys")
print("initial Key:",self.get_hex_string())
for i in range(self.round_count):
# print("debug in generate_keys:",i," shift:",self.round_key_shift_count[i])
# print("LK RK",self.LK.bit_string,self.RK.bit_string)
self.LK.left_shift(self.round_key_shift_count[i])
self.RK.left_shift(self.round_key_shift_count[i])
current_round_key = self.compression_permutation()
self.round_key_stack.append(current_round_key)
def get_round_key(self,round_number):
'''
Parameter round_number(int): the round number for which key is requested in range [1 upto round_count]
Returns 48 bit ready round key for requested round
'''
if len(self.round_key_stack) <= 0:
self.generate_keys()
if(len(self.round_key_stack)>=round_number):
return self.round_key_stack[round_number-1]
@staticmethod
def test():
'''
test_bit_block_1 = BitBlock("0123456789ABCDEF")
print("",test_bit_block_1.bit_string)
test_lshift_object = KeyBlock("AABB09182736CCDD")
test_lshift_object.left_shift(2)
print("BitString after left shift by 2:\n",test_lshift_object.bit_string)
#Testing XOR
test_object_1 = KeyBlock("0123456789ABCDEF")
test_object_2 = KeyBlock("0123456789ABCDEF")
print("XOR of same object:\n",test_object_1.XOR(test_object_2).bit_string)
test_roundkeys_object = KeyBlock("AABB09182736CCDD")
#expected o/p: 11000011110000000011001110100011001111110000110011111010
print("Initial BitString:\n",test_roundkeys_object.bit_string)
print("round Keys")
test_roundkeys_object.generate_keys()
print(test_roundkeys_object.round_key_stack)
'''
#expected o/p
# ['000110010100110011010000011100101101111010001100', '010001010110100001011000000110101011110011001110',
# '000001101110110110100100101011001111010110110101', '110110100010110100000011001010110110111011100011',
# '011010011010011000101001111111101100100100010011',
# '110000011001010010001110100001110100011101011110', '011100001000101011010010110111011011001111000000',
# '001101001111100000100010111100001100011001101101', '100001001011101101000100011100111101110011001100',
# '000000100111011001010111000010001011010110111111', '011011010101010101100000101011110111110010100101',
# '110000101100000111101001011010100100101111110011',
# '100110011100001100010011100101111100100100011111', '001001010001101110001011110001110001011111010000',
# '001100110011000011000101110110011010001101101101', '000110000001110001011101011101011100011001101101']
# test_roundkeys_object = KeyBlock(BitBlock.get_hex_from_bit_string("1011111100011001111100110001101011111101111000101001111100101010"))
# print(test_roundkeys_object.get_hex_string())
# test_roundkeys_object.generate_keys()
# for k in test_roundkeys_object.round_key_stack:
# print(k.bit_string)
class DESCryptography:
def __init__(self,key_hex):
'''
key_hex (KeyBlock): hexadecimal string which represents 64 bits key
'''
self.master_key = KeyBlock(key_hex)
def perform_substitution(self,data_block):
'''
this is the function for s_box component
Parameters: data_block(BitBlock) 48bits
return substituted_block(BitBlock): block after substitution 32bits
'''
sbox_table = [
[
[14, 4, 13, 1, 2, 15, 11, 8, 3, 10, 6, 12, 5, 9, 0, 7],
[ 0, 15, 7, 4, 14, 2, 13, 1, 10, 6, 12, 11, 9, 5, 3, 8],
[ 4, 1, 14, 8, 13, 6, 2, 11, 15, 12, 9, 7, 3, 10, 5, 0],
[15, 12, 8, 2, 4, 9, 1, 7, 5, 11, 3, 14, 10, 0, 6, 13 ]
],
[
[15, 1, 8, 14, 6, 11, 3, 4, 9, 7, 2, 13, 12, 0, 5, 10],
[3, 13, 4, 7, 15, 2, 8, 14, 12, 0, 1, 10, 6, 9, 11, 5],
[0, 14, 7, 11, 10, 4, 13, 1, 5, 8, 12, 6, 9, 3, 2, 15],
[13, 8, 10, 1, 3, 15, 4, 2, 11, 6, 7, 12, 0, 5, 14, 9 ]
],
[
[10, 0, 9, 14, 6, 3, 15, 5, 1, 13, 12, 7, 11, 4, 2, 8],
[13, 7, 0, 9, 3, 4, 6, 10, 2, 8, 5, 14, 12, 11, 15, 1],
[13, 6, 4, 9, 8, 15, 3, 0, 11, 1, 2, 12, 5, 10, 14, 7],
[1, 10, 13, 0, 6, 9, 8, 7, 4, 15, 14, 3, 11, 5, 2, 12 ]
],
[
[7, 13, 14, 3, 0, 6, 9, 10, 1, 2, 8, 5, 11, 12, 4, 15],
[13, 8, 11, 5, 6, 15, 0, 3, 4, 7, 2, 12, 1, 10, 14, 9],
[10, 6, 9, 0, 12, 11, 7, 13, 15, 1, 3, 14, 5, 2, 8, 4],
[3, 15, 0, 6, 10, 1, 13, 8, 9, 4, 5, 11, 12, 7, 2, 14]
],
[
[2, 12, 4, 1, 7, 10, 11, 6, 8, 5, 3, 15, 13, 0, 14, 9],
[14, 11, 2, 12, 4, 7, 13, 1, 5, 0, 15, 10, 3, 9, 8, 6],
[4, 2, 1, 11, 10, 13, 7, 8, 15, 9, 12, 5, 6, 3, 0, 14],
[11, 8, 12, 7, 1, 14, 2, 13, 6, 15, 0, 9, 10, 4, 5, 3 ]
],
[
[12, 1, 10, 15, 9, 2, 6, 8, 0, 13, 3, 4, 14, 7, 5, 11],
[10, 15, 4, 2, 7, 12, 9, 5, 6, 1, 13, 14, 0, 11, 3, 8],
[9, 14, 15, 5, 2, 8, 12, 3, 7, 0, 4, 10, 1, 13, 11, 6],
[4, 3, 2, 12, 9, 5, 15, 10, 11, 14, 1, 7, 6, 0, 8, 13]
],
[
[4, 11, 2, 14, 15, 0, 8, 13, 3, 12, 9, 7, 5, 10, 6, 1],
[13, 0, 11, 7, 4, 9, 1, 10, 14, 3, 5, 12, 2, 15, 8, 6],
[1, 4, 11, 13, 12, 3, 7, 14, 10, 15, 6, 8, 0, 5, 9, 2],
[6, 11, 13, 8, 1, 4, 10, 7, 9, 5, 0, 15, 14, 2, 3, 12]
],
[
[13, 2, 8, 4, 6, 15, 11, 1, 10, 9, 3, 14, 5, 0, 12, 7],
[1, 15, 13, 8, 10, 3, 7, 4, 12, 5, 6, 11, 0, 14, 9, 2],
[7, 11, 4, 1, 9, 12, 14, 2, 0, 6, 10, 13, 15, 3, 5, 8],
[2, 1, 14, 7, 4, 10, 8, 13, 15, 12, 9, 0, 3, 5, 6, 11]
]
]
shrinked_block = BitBlock("")
sub_blocks = data_block.split(6) # gives list of blocks with size 6 bits, data_block:48bits ==> 8 elements in list
for i in range(len(sub_blocks)):
block = sub_blocks[i]
row_index = BitBlock.get_decimal_from_binary(block.get_bit(0)+block.get_bit(5))
col_index = BitBlock.get_decimal_from_binary(block.get_bit_sequence(1,5))
s_val = sbox_table[i][row_index][col_index] #decimal
s_val_block = BitBlock("").set_bit_string_from_decimal(s_val)
shrinked_block += s_val_block
return shrinked_block #32bits
def round_function(self,left_block,right_block,round_key):
#32 -> 48
expansion_permutation_table = [32, 1 , 2 , 3 , 4 , 5 , 4 , 5,
6 , 7 , 8 , 9 , 8 , 9 , 10, 11,
12, 13, 12, 13, 14, 15, 16, 17,
16, 17, 18, 19, 20, 21, 20, 21,
22, 23, 24, 25, 24, 25, 26, 27,
28, 29, 28, 29, 30, 31, 32, 1 ]
#48 -> 32
end_permutation_table = [ 16, 7, 20, 21,
29, 12, 28, 17,
1, 15, 23, 26,
5, 18, 31, 10,
2, 8, 24, 14,
32, 27, 3, 9,
19, 13, 30, 6,
22, 11, 4, 25 ]
permuted_right_block = right_block.perform_permutation(expansion_permutation_table,is_expansion=True)
xored_right_block = permuted_right_block.XOR(round_key)
subsituted_right_block = self.perform_substitution(xored_right_block)
end_permuted_right_block =subsituted_right_block.perform_permutation(end_permutation_table)#permuted 32 bits
end_xored_right_block = end_permuted_right_block.XOR(left_block)
return end_xored_right_block
def process_fiestal_structure(self,text_block,is_encryption=True):
initial_permutation_table = [58, 50, 42, 34, 26, 18, 10, 2,
60, 52, 44, 36, 28, 20, 12, 4,
62, 54, 46, 38, 30, 22, 14, 6,
64, 56, 48, 40, 32, 24, 16, 8,
57, 49, 41, 33, 25, 17, 9, 1,
59, 51, 43, 35, 27, 19, 11, 3,
61, 53, 45, 37, 29, 21, 13, 5,
63, 55, 47, 39, 31, 23, 15, 7]
final_permutation_table = [ 40, 8, 48, 16, 56, 24, 64, 32,
39, 7, 47, 15, 55, 23, 63, 31,
38, 6, 46, 14, 54, 22, 62, 30,
37, 5, 45, 13, 53, 21, 61, 29,
36, 4, 44, 12, 52, 20, 60, 28,
35, 3, 43, 11, 51, 19, 59, 27,
34, 2, 42, 10, 50, 18, 58, 26,
33, 1, 41, 9, 49, 17, 57, 25 ]
permuted_text_block = text_block.perform_permutation(initial_permutation_table)
#debug
print("\n"+"#"*10+"Initiating Fiestal Structure"+"#"*10+"\n")
print("Initially permuted Plain Text")
print("-"*4+permuted_text_block.get_hex_string()+"\n")
left_block,right_block = permuted_text_block.splitHalf()
round_start_index =1
round_end_index = self.master_key.round_count+1
delta = 1
if not is_encryption:
round_start_index = self.master_key.round_count
round_end_index = 0
delta = -1
#debug
#print("fiestal : start",round_start_index," end:",round_end_index," delta:",delta)
for i in range(round_start_index,round_end_index,delta):
temp_right_block = copy.deepcopy(right_block)
current_round_key = self.master_key.get_round_key(i)
right_block = self.round_function(left_block,right_block,current_round_key)
left_block = temp_right_block
#debug
# print("\n#"+str(i-1)+"-round")
# print("\tright: ",right_block.get_hex_string())
# print("\t left: ",left_block.get_hex_string())
#last round needs no swap, so undoing the last swap by swaping once more
combined_block = right_block + left_block
print("combined data block:\n"+"-"*4+">"+combined_block.get_hex_string())
final_permuted_block = combined_block.perform_permutation(final_permutation_table)
#debug
print("Final Permuted/cipher block:\n"+"-"*4+">"+final_permuted_block.get_hex_string())
return final_permuted_block
def encrypt(self,plain_text_block):
'''
Encrypts using DES Algorithm.
Parameters:
plain_text_block (BitBlock) : plain text block of 64 bits to be encrypted
Returns:
cipher_text_block (BitBlock) : cipher text block of 64 bits
'''
cipher_block = self.process_fiestal_structure(plain_text_block,is_encryption=True)
return cipher_block
def decrypt(self,cipher_text_block):
'''
Encrypts using DES Algorithm.
Parameters:
cipher_text_block (BitBlock) : plain text block of 64 bits to be encrypted
Returns:
cipher_text_block (BitBlock) : cipher text block of 64 bits
'''
plain_text_block = self.process_fiestal_structure(cipher_text_block,is_encryption=False)
return plain_text_block
@staticmethod
def test():
descrypto_obj_1 = DESCryptography("FEDCBA9876543210")
plain_text_block = BitBlock("AB123456CDEF7890")
print("-"*25)
print("Plain text:\n"+"-"*4+">"+plain_text_block.get_hex_string())
print("-"*25)
cipher_block = descrypto_obj_1.encrypt(plain_text_block)
print("-"*25)
print("cipher text:\n"+"-"*4+">"+cipher_block.get_hex_string())
print("-"*25)
deciphered_block = descrypto_obj_1.decrypt(cipher_block)
print("-"*25)
print("deciphered text:\n"+"-"*4+">"+deciphered_block.get_hex_string())
print("-"*25)
if __name__ == "__main__":
#BitBlock.test()#Test phase-1 passed
#KeyBlock.test()
DESCryptography.test() |
from django.apps import AppConfig
class MederoblogConfig(AppConfig):
name = 'mederoblog'
|
#!/usr/bin/env python
import os, sys, argparse, json, sha, base64, re
# Directory to save image files
IMAGE_DIR = "img"
# Prefix for Markdown image URLs
IMAGE_URL_PREFIX = "img"
def cleanup(s):
return re.sub(r"[^0-9a-zA-Z]+", "", s)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("notebook", type=str, help="Path to IPython notebook file (.ipynb)")
parser.add_argument("--output", type=str, default=None, help="Path to output file (default: stdout)")
args = parser.parse_args()
out = sys.stdout
if args.output is not None:
out = open(args.output, "w")
# Import notebook JSON
notebook = json.load(open(args.notebook, "r"))
# Only one worksheet is supported
cells = notebook["worksheets"][0]["cells"]
in_code = False
# Output each cell
for cell in cells:
if cell["cell_type"] == "markdown":
# Write markdown directly
for line in cell["source"]:
print >>out, line.rstrip()
print >>out, ""
elif cell["cell_type"] == "code":
# Wrap code in Github fenced code block
print >>out, "```python"
for line in cell["input"]:
print >>out, line.rstrip()
print >>out, "```"
print >>out, ""
# Write cell output
outputs = cell["outputs"]
if len(outputs) > 0:
print >>out, ""
print >>out, "**Output**:"
print >>out, ""
output_types = [o["output_type"] for o in outputs]
# Write as a fenced code block with no highlighting
if ("pyout" in output_types) or ("stream" in output_types):
print >>out, "```"
for output in cell["outputs"]:
if output["output_type"] in ["pyout", "stream"]:
for line in output["text"]:
print >>out, line.rstrip()
print >>out, "```"
print >>out, ""
# Save base64-encoded PNG to the file system.
# The file name is just the SHA hash of the raw image data prefixed with "gen_".
if "display_data" in output_types:
for output in cell["outputs"]:
if output["output_type"] == "display_data":
image_data = base64.decodestring(output["png"])
image_id = "gen_" + cleanup(base64.encodestring(sha.sha(image_data).digest())[:-2])
image_path = os.path.join(IMAGE_DIR, "{0}.png".format(image_id))
with open(image_path, "wb") as image_file:
image_file.write(image_data)
print >>out, "".format(IMAGE_URL_PREFIX, image_id)
print >>out, ""
out.close()
|
from charms.layer.ksql import KSQL_PORT
from charms.reactive import when
@when('website.available', 'ksql.configured')
def setup_website(website):
website.configure(KSQL_PORT)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-08-18 14:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('nova', '0011_auto_20170818_1827'),
]
operations = [
migrations.AlterField(
model_name='app',
name='apphosts',
field=models.ManyToManyField(blank=True, to='nova.AppHost', verbose_name='\u670d\u52a1\u8282\u70b9\u540d\u79f0'),
),
migrations.AlterField(
model_name='app',
name='name',
field=models.CharField(max_length=30, verbose_name='\u670d\u52a1\u540d\u79f0'),
),
]
|
import functools
from . import ValidationError, _dict_validate, _postprocess
def typecheck(types, x):
if not isinstance(x, types):
raise ValidationError("Not a member of %s", str(types))
def typechecker(types):
"""Returns a function that checks whether an object matches types
"""
return functools.partial(typecheck, types)
def true(val, message):
"""returns a function which returns a validation error with message
if func returns False
"""
if not val:
raise ValidationError(message)
def broadcastable(*names):
"""returns a function - this function takes a dict of args
(should be used in _all validator)
asserts that every array in that list is broadcastable
"""
# todo - do we check whether these are numpy objs first?
# cause you can write numpy funcs on lists sometimes
import numpy as np
def _broadcastable(all_args):
arrs = [all_args[x] for x in names]
try:
np.broadcast(*arrs)
except ValueError:
raise ValidationError(
"Cannot broadcast %s with shapes %s", names,
[getattr(x, 'shape', 'no shape') for x in arrs])
return _broadcastable
def has(*names):
"""checks whether a dict or dataframe has certain fields (names)
"""
def _has(obj):
for n in names:
if n not in obj:
raise ValidationError("Value missing %s field", n)
return _has
def dict_validator(validators):
for k in validators.keys():
validators[k] = _postprocess(validators[k])
msg = "Error validating key: %s, value: %s"
def _dict_validator(input_dict):
_dict_validate(validators, input_dict, msg)
return _dict_validator
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import networkx as nx
import spartan as st
from .metrics import metrics, MetricCollection
class BipartiteFramework:
'''
Framework for evaluating bipartite graph models found in bipartiteModels/models.py
'''
def __init__(self, model, metrics=metrics):
self.model = model
self.metrics = MetricCollection(metrics)
self.G = None
def fit(self, G):
self.G = G
self.model.fit(G)
node_labels = self.G.nodes()
edge_labels = self.G.edges()
pred_users, pred_products = self.identify()
# pred df for users
y_pred = [1 if int(node) in pred_users or int(node) in pred_products else 0 for node in node_labels]
y_true = [node_labels[node]['label'] if 'label' in node_labels[node] else None for node in node_labels]
node_df = pd.DataFrame({'node':node_labels, 'y_pred':y_pred, 'y_true':y_true})
# pred df for edges
y_pred = [1 if int(u) in pred_users and int(v) in pred_products else 0 for u,v in edge_labels]
y_true = [edge_labels[edge]['label'] if 'label' in edge_labels[edge] else None for edge in edge_labels]
edge_df = pd.DataFrame({'edge':edge_labels, 'y_pred':y_pred, 'y_true':y_true})
return node_df, edge_df
def identify(self):
# Identify anomalous nodes
if self.G:
return self.model.predict()
raise AssertionError('No graph found.')
def score(self,y,y_pred):
labelled_map = ~y.isnull()
if labelled_map.sum() == 0:
print('No valid labels')
return None
tmp_y, tmp_y_pred = y[labelled_map], y_pred[labelled_map]
scores = self.metrics.compute(tmp_y,tmp_y_pred)
return scores |
import numpy as np
import cv2
import matplotlib.pyplot as plt
import os
import sys
import math
# Training Image
def skinToneData(img):
hsvImg = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
# Shape of the HSV Image
height = hsvImg.shape[0]
width = hsvImg.shape[1]
freq = np.zeros((180, 256)) # HSV Range: Hue[0, 179], Sat[0, 255]
for i in range(width):
for j in range(height):
H = hsvImg[j][i][0]
S = hsvImg[j][i][1]
freq[H, S] += 1
freq = cv2.normalize(freq, None, 0, 255, cv2.NORM_MINMAX, cv2.CV_8U)
return freq
def SkinDetect(freq, img):
height = img.shape[0]
width = img.shape[1]
for i in range(width):
for j in range(height):
hue = img[j][i][0]
sat = img[j][i][1]
# if freq[sat][hue] == 0:
# img[j][i][2] = 0
return img
def Histo(h, s, freq):
# Hthres_Min = np.min(Hthres)
# Hthres_Max = np.max(Hthres)
# Sthres_Min = np.min(Sthres)
# Sthres_Max = np.max(Sthres)
# hBin = np.linspace(Hthres_Min, Hthres_Max, 10)
# sBin = np.linspace(Sthres_Min, Sthres_Max, 10)
fig, ax = plt.subplots(figsize=(15, 10))
# Histogram
plt.hist2d(h, s, bins=freq, cmap = plt.cm.nipy_spectral)
# ax.imshow(
# freq, cmap=plt.cm.nipy_spectral,
# extent=[
# h, s
# ]
# )
ax.set_title('2D Histogram')
ax.set_xlabel('Hue')
ax.set_ylabel('Saturation')
fig.tight_layout(pad=3.0)
# plt.savefig('result.png')
plt.show()
if __name__ == '__main__':
# Training Images
light = cv2.imread('gun1_test.bmp')
pink = cv2.imread('joy1_test.bmp')
medium = cv2.imread('pointer1_test.bmp')
brown = cv2.imread('skin4.jpg')
dark = cv2.imread('skin5.jpg')
dataOne = skinToneData(light)
dataTwo = skinToneData(pink)
dataThree = skinToneData(medium)
dataFour = skinToneData(brown)
dataFive = skinToneData(dark)
result = np.array([dataOne, dataTwo, dataThree])
print(result)
# print("Data One: ", dataOne, "\nData Two: ", dataTwo)
# Testing Image
img = cv2.imread('pointer1.bmp')
hsvImg = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
final = SkinDetect(result, hsvImg)
h, s, v = cv2.split(hsvImg)
Histo(h, s, result)
# cv2.imshow("HSV Image", final)
# cv2.waitKey(0) |
import hashlib
import psycopg2
import datetime
import random
def randf(min, max, precision=2):
return round(random.uniform(min, max), precision)
def generate_unique_code(text):
m = hashlib.md5()
m.update(text.encode('utf-8'))
l = list(m.hexdigest())
l[7] += '-'
l[11] += '-'
l[15] += '-'
l[19] += '-'
return ''.join(l)
wfid = 140802
wtids = [140802200, 140802201, 140802202, 140802203]
num = 731
first_time = "2015-01-01 08:00:00"
conn = psycopg2.connect(database='v5_test', user='postgres', password='postgres', host='10.80.5.43', port='5432')
cur = conn.cursor()
id = [0] * 7
for wtid in wtids:
rectime=datetime.datetime.strptime(first_time, "%Y-%m-%d %H:%M:%S")
for i in range(num):
# 故障
id[0] = rectime.strftime("%Y-%m-%d %H:%M:%S")
id[1] = random.choice(['133', '106', '107', '122', '174'])
id[2] = None
id[3] = None
id[4] = random.choice(['1','3','4','5'])
id[5] = generate_unique_code(str(wfid) + str(wtid) + str(id[1]) + rectime.strftime("%Y-%m-%d %H:%M:%S"))
id[6] = 0
# print(i + 1, ':', tuple([wfid, wtid] + id))
error_tuple=tuple([wfid, wtid] + id)
print(i + 1, ':','''INSERT INTO public.wterrorinfo VALUES'''+str(error_tuple))
cur.execute('''INSERT INTO public.wterrorinfo VALUES(%s''' + ',%s' * 8 + ');', error_tuple)
# 销缺
id[0] = (rectime + datetime.timedelta(hours=random.randint(4, 10))).strftime("%Y-%m-%d %H:%M:%S")
id[1] = 0
#id[2] = None
#id[3] = None
#id[4] = 2
id[5] = generate_unique_code(str(wfid) + str(wtid) + str(id[1]) + rectime.strftime("%Y-%m-%d %H:%M:%S"))
id[6] = 0
# print(tuple([wfid, wtid] + id))
enderror_tuple=tuple([wfid, wtid] + id)
print(i + 1, ':','''INSERT INTO public.wterrorinfo VALUES''' + str(enderror_tuple))
cur.execute('''INSERT INTO public.wterrorinfo VALUES(%s''' + ',%s' * 8 + ');', enderror_tuple)
if i % 50 == 0:
conn.commit()
rectime += datetime.timedelta(days=1)
conn.commit()
conn.close()
|
from collections import OrderedDict
from threading import RLock
import datetime
from Crypto.Hash import SHA
import binascii
import hashlib
import Crypto
import Crypto.Random
import json
class Block:
def __init__(self, index, previous_hash, nonce, listOfTransactions=[], timestamp=None, current_hash=0,mine_time = None): #index = len(self.chain)
##set
self.lock = RLock() #prosorinaaaaaaaaaaaa
self.index = index #index of block
self.previous_hash = previous_hash
self.current_hash = current_hash
self.nonce = nonce
self.listOfTransactions = listOfTransactions
if timestamp is None:
d=datetime.datetime.now()
self.timestamp = str(d-datetime.timedelta(microseconds=d.microsecond))
else:
self.timestamp = timestamp
self.mine_time = mine_time
def to_dict(self):
return dict(
previous_hash = self.previous_hash,
current_hash = self.current_hash,
nonce = self.nonce,
listOfTransactions = self.listOfTransactions,
index = self.index,
timestamp = self.timestamp,
mine_time = self.mine_time
)
def calculate_hash(self):
block_string = json.dumps(dict(
previous_hash = self.previous_hash,
listOfTransactions = self.listOfTransactions,
nonce = self.nonce,
timestamp = self.timestamp,
),sort_keys=True)
return SHA.new(block_string.encode('utf8'))
def __eq__(self,other):
if not isinstance(other,Block):
return False
return (json.dumps(self.to_dict(),sort_keys = True)== json.dumps(other.to_dict(),sort_keys = True))
block = Block(34,2,43,234,[1,2,3])
|
from collections import Counter
input = """hqcfqwydw-fbqijys-whqii-huiuqhsx-660[qhiwf]
oxjmxdfkd-pzxsbkdbo-erkq-ixyloxqlov-913[xodkb]
bpvctixr-eaphixr-vgphh-gthtpgrw-947[smrkl]
iwcjapey-lhwopey-cnwoo-wymqeoepekj-992[eowpy]
mvhkvbdib-agjrzm-zibdizzmdib-317[bizdm]
excdklvo-lkcuod-dbksxsxq-146[ztwya]
ocipgvke-ejqeqncvg-octmgvkpi-908[prmku]
ktwbhtvmbox-vetllbybxw-vtgwr-vhtmbgz-tvjnblbmbhg-579[uvnyc]
dpmpsgvm-tdbwfohfs-ivou-tijqqjoh-389[emdac]
forwcoqhwjs-pibbm-igsf-hsghwbu-532[bhswf]
uzfqdzmfuazmx-nmewqf-ogefayqd-eqdhuoq-664[qfdem]
fnjyxwrinm-yujbcrl-pajbb-uxprbcrlb-277[brjcl]
aoubshwq-dzoghwq-ufogg-fsoqeiwgwhwcb-714[nkrmy]
pbeebfvir-rtt-fnyrf-975[frbet]
bnknqetk-qzaahs-trdq-sdrshmf-235[mtcqz]
odiih-ljwmh-lxjcrwp-orwjwlrwp-927[wjlrh]
sxdobxkdsyxkv-bkllsd-cobfsmoc-302[sbdko]
gzefmnxq-omzpk-ymzmsqyqzf-352[saomt]
tvsnigxmpi-gerhc-gsexmrk-qerekiqirx-854[eirgx]
ktfitzbgz-vtgwr-ftgtzxfxgm-267[tgfzx]
lxuxaodu-npp-orwjwlrwp-563[pwlor]
oazegyqd-sdmpq-pkq-xmnadmfadk-352[damqk]
wfruflnsl-gzssd-hzxytrjw-xjwanhj-177[bgxsp]
pbybeshy-qlr-qrfvta-455[tmios]
xmrrq-udskkaxawv-vqw-esfsywewfl-918[fdqsb]
vhehkyne-vahvhetmx-ltexl-917[uvhmy]
molgbzqfib-ciltbo-obzbfsfkd-393[htayl]
veqtekmrk-jpsaiv-wlmttmrk-256[ewyhq]
cvabijtm-lgm-apqxxqvo-512[dinjm]
oaxadrgx-nmewqf-qzsuzqqduzs-456[oevtg]
vehmsegxmzi-veffmx-wepiw-880[emfiv]
fruurvlyh-fubrjhqlf-fdqgb-frdwlqj-ghvljq-413[cgkzy]
otzkxtgzoutgr-inuiurgzk-sgxqkzotm-774[gtzko]
hwbba-eqpuwogt-itcfg-tcddkv-ujkrrkpi-154[ktbcd]
pynffvsvrq-cynfgvp-tenff-ynobengbel-377[fnevy]
aoubshwq-qcbgiasf-ufors-qvcqczohs-hsqvbczcum-558[hypcz]
kzeed-xhfajsljw-mzsy-knsfshnsl-281[nsmtd]
hwdtljsnh-hfsid-htfynsl-ijufwyrjsy-177[hsfjy]
excdklvo-zvkcdsm-qbkcc-psxkxmsxq-900[yznml]
diozmivodjivg-xviyt-pnzm-oznodib-239[iodvz]
nzcczdtgp-clmmte-lnbftdtetzy-743[tczde]
ejpanjwpekjwh-bhksan-iwngapejc-264[mgyfj]
ubhatstkwhnl-vhehkyne-xzz-wxiehrfxgm-917[hexkn]
vhkkhlbox-vtgwr-vhtmbgz-vnlmhfxk-lxkobvx-163[vhkxb]
irdgrxzex-tyftfcrkv-rthlzjzkzfe-373[rzfte]
cvabijtm-rmttgjmiv-lmdmtwxumvb-564[mtvbi]
hqfxxnknji-gfxpjy-xmnuunsl-151[brtjg]
odkasqzuo-dmnnuf-xasuefuoe-690[zyejx]
ixeumktoi-pkrrehkgt-sgtgmksktz-384[ktgei]
atyzghrk-igtje-iugzotm-uvkxgzouty-358[rmnqz]
ktwbhtvmbox-xzz-phkdlahi-865[nmsjb]
nzydfxpc-rclop-ojp-lylwjdtd-951[dlpcj]
vxupkizork-kmm-sgtgmksktz-280[yublv]
cvabijtm-kivlg-kwibqvo-twoqabqka-408[pgush]
hqcfqwydw-fbqijys-whqii-mehaixef-218[vzaur]
bpvctixr-rpcsn-rdpixcv-ldgzhwde-271[cifnu]
fnjyxwrinm-kjbtnc-lxwcjrwvnwc-199[nwcjr]
kzeed-idj-xmnuunsl-593[uazmr]
dsxxw-zyqicr-bcqgel-236[cqxbd]
gpewwmjmih-jpsaiv-wivzmgiw-230[iwmgj]
amjmpdsj-afmamjyrc-bcqgel-470[mszht]
eqpuwogt-itcfg-tcorcikpi-hnqygt-ujkrrkpi-596[nywzt]
pelbtravp-pnaql-erprvivat-533[parve]
yhwooebeaz-bhksan-wymqeoepekj-758[eoabh]
iruzfrtkzmv-upv-kirzezex-529[zpysg]
lxaaxbren-lqxlxujcn-mnenuxyvnwc-953[nxlac]
clxalrtyr-prr-nfdezxpc-dpcgtnp-457[prcdl]
sorozgxe-mxgjk-kmm-vaxingyotm-228[ugkxd]
vdzonmhydc-eknvdq-otqbgzrhmf-469[jnsrl]
gsvvswmzi-gspsvjyp-nippcfier-hizipstqirx-802[mvkcd]
xgvnndadzy-xviyt-xjvodib-yzkgjthzio-707[ncejo]
emixwvqhml-akidmvomz-pcvb-uizsmbqvo-538[mvibo]
dpotvnfs-hsbef-cbtlfu-usbjojoh-597[mnkij]
amjmpdsj-pyzzgr-jyzmpyrmpw-522[rxsqz]
fkqbokxqflkxi-yxphbq-ixyloxqlov-861[xjeyz]
vehmsegxmzi-tpewxmg-kveww-xvemrmrk-256[emvwx]
aietsrmdih-ikk-viwievgl-750[iekva]
zekvierkzferc-gcrjkzt-xirjj-nfibjyfg-763[jlbrc]
krxqjijamxdb-lqxlxujcn-lxwcjrwvnwc-537[opuqe]
dsxxw-zsllw-jyzmpyrmpw-652[hgyae]
mbiyqoxsm-mkxni-mykdsxq-kmaescsdsyx-770[otslp]
oqnidbshkd-vdzonmhydc-idkkxadzm-qdzbpthrhshnm-573[dhkmn]
jqwpihizlwca-moo-apqxxqvo-174[oqaip]
ahngzyzqcntr-azrjds-qdrdzqbg-573[zdqra]
bhksan-lqnydwoejc-472[gutvo]
jvsvymbs-zjhclunly-obua-zlycpjlz-175[ljyzb]
wrs-vhfuhw-hjj-ilqdqflqj-205[hjqfl]
egdytrixat-eaphixr-vgphh-ldgzhwde-661[duchs]
oxmeeuruqp-eomhqzsqd-tgzf-mocgueufuaz-196[uemoq]
ahngzyzqcntr-cxd-ehmzmbhmf-677[dqulm]
gspsvjyp-tpewxmg-kveww-wivzmgiw-568[ghntx]
pualyuhapvuhs-jvuzbtly-nyhkl-wshzapj-nyhzz-thyrlapun-149[kibhn]
nzcczdtgp-mfyyj-pyrtyppctyr-171[ypctr]
guahyncw-nij-mywlyn-vohhs-jolwbumcha-760[hnwya]
bgmxkgtmbhgte-xzz-vhgmtbgfxgm-397[gmbtx]
zixppfcfba-gbiivybxk-zrpqljbo-pbosfzb-653[psocz]
votubcmf-sbccju-nbslfujoh-935[bcufj]
gsrwyqiv-kvehi-nippcfier-irkmriivmrk-204[irkve]
jsvagsulanw-hdsklau-yjskk-ksdwk-632[ltnxs]
irdgrxzex-srjbvk-uvgcfpdvek-503[rvdeg]
krxqjijamxdb-ljwmh-bcxajpn-849[jxabm]
ajmrxjlcren-ljwmh-vjwjpnvnwc-407[yemcd]
ahngzyzqcntr-rbzudmfdq-gtms-btrsnldq-rdquhbd-755[dqrbn]
rzvkjiduzy-ezggtwzvi-hvmfzodib-291[yuzaf]
bwx-amkzmb-ntwemz-aitma-408[mabtw]
wihmogyl-aluxy-vumeyn-mufym-812[wymtu]
xjmmjndqz-nxvqzibzm-cpio-yzkgjthzio-889[mtsyf]
xmtjbzidx-ytz-nojmvbz-525[hyzbw]
bnmrtldq-fqzcd-tmrszakd-qzaahs-cdrhfm-131[wmcrn]
ftzgxmbv-wrx-kxtvjnblbmbhg-293[bxgmt]
gsvvswmzi-gerhc-wepiw-230[wegis]
pdjqhwlf-fdqgb-uhfhlylqj-699[fhlqd]
zsxyfgqj-kzeed-uqfxynh-lwfxx-ijuqtdrjsy-957[xfjqy]
rnqnyfwd-lwfij-uqfxynh-lwfxx-knsfshnsl-359[zbtyx]
wrs-vhfuhw-gbh-whfkqrorjb-231[hrwbf]
iuxxuyobk-hatte-rumoyzoiy-280[ouyit]
oqnidbshkd-bgnbnkzsd-nodqzshnmr-287[xnmzi]
atyzghrk-jek-jkyomt-540[anzom]
ibghopzs-pogysh-rsdofhasbh-818[hsobg]
wbhsfbohwcboz-foppwh-rsjszcdasbh-532[njpay]
excdklvo-mrymyvkdo-ecob-docdsxq-484[docek]
xgsvgmotm-yigbktmkx-natz-yzuxgmk-722[zwckh]
ajyqqgdgcb-afmamjyrc-qfgnngle-964[pzowt]
ugdgjxmd-jsttal-kzahhafy-138[cyirg]
irgyyolokj-iuxxuyobk-inuiurgzk-rumoyzoiy-982[sgadc]
qcbgiasf-ufors-gqojsbusf-vibh-qcbhowbasbh-870[njidq]
bkwzkqsxq-mrymyvkdo-wkxkqowoxd-146[hfdmy]
mybbycsfo-mrymyvkdo-bokmaescsdsyx-120[mlnky]
zuv-ykixkz-jek-ktmotkkxotm-852[mebdc]
dkqjcbctfqwu-lgnnadgcp-fgrctvogpv-648[cgdfn]
vehmsegxmzi-ikk-xvemrmrk-724[byndz]
upq-tfdsfu-cvooz-nbobhfnfou-155[xyskn]
gpewwmjmih-wgezirkiv-lyrx-hitevxqirx-360[ierwx]
rdggdhxkt-ytaanqtpc-bpcpvtbtci-817[mnjpk]
xlrypetn-clmmte-zapcletzyd-405[eltcm]
oxjmxdfkd-oxyyfq-abmxoqjbkq-861[nmhlv]
xjinphzm-bmvyz-kgvnodx-bmvnn-gjbdnodxn-395[nbdmv]
tpspahyf-nyhkl-jhukf-zopwwpun-799[phfkn]
jsvagsulanw-usfvq-mkwj-lwklafy-684[alswf]
ipvohghykvbz-kfl-ylhjxbpzpapvu-877[vmizu]
fydelmwp-awldetn-rcldd-afcnsldtyr-405[dlace]
gpbepvxcv-tvv-steadnbtci-609[vtbce]
tipfxvezt-upv-rthlzjzkzfe-581[ztefp]
bknsykmdsfo-oqq-vyqscdsmc-796[sqcdk]
ejpanjwpekjwh-zua-odellejc-914[ejalp]
ytu-xjhwjy-uqfxynh-lwfxx-jslnsjjwnsl-775[jxlns]
tinnm-aoubshwq-tzcksf-zopcfohcfm-376[cfohm]
xjgjmapg-ezggtwzvi-xpnojhzm-nzmqdxz-811[zgjmx]
tvsnigxmpi-fewoix-hiwmkr-386[tpuvk]
udglrdfwlyh-udeelw-vhuylfhv-829[ldhue]
luxciuwncpy-wbiwifuny-mniluay-786[iunwy]
ftzgxmbv-ktuubm-inkvatlbgz-865[btgkm]
xzwrmkbqtm-zijjqb-twoqabqka-486[erqyp]
diozmivodjivg-zbb-ncdkkdib-499[dibko]
kwvacumz-ozilm-kivlg-lmxizbumvb-980[milvz]
hwbba-dwppa-tgugctej-648[abgpt]
myxcewob-qbkno-bkllsd-cdybkqo-120[atghd]
zekvierkzferc-irsszk-uvjzxe-477[snqzi]
wlsiayhcw-dyffsvyuh-guleyncha-526[yhacf]
clotzlnetgp-ojp-opdtry-249[optlc]
dmybmsuzs-vqxxknqmz-eqdhuoqe-560[qmdes]
mtzslklcozfd-clmmte-dstaatyr-275[rtnyq]
cxy-bnlanc-lqxlxujcn-vjwjpnvnwc-823[ncjlx]
jshzzpmplk-zjhclunly-obua-bzly-alzapun-929[vcuxs]
yuxufmdk-sdmpq-ngzzk-oazfmuzyqzf-508[kghlv]
otzkxtgzoutgr-kmm-sgtgmksktz-722[tgkmz]
xgvnndadzy-xviyt-hvmfzodib-941[qbwmr]
qekrixmg-fyrrc-ywiv-xiwxmrk-230[ikjwl]
dpssptjwf-dpmpsgvm-qmbtujd-hsbtt-bobmztjt-337[tbmps]
tcfkqcevkxg-rncuvke-itcuu-ujkrrkpi-388[tabmn]
hjgbwuladw-tskcwl-xafsfuafy-528[afwls]
ygcrqpkbgf-ecpfa-gpikpggtkpi-154[gpkcf]
hqcfqwydw-sxesebqju-qdqboiyi-608[qbdei]
iehepwnu-cnwza-ydkykhwpa-iwngapejc-706[waenp]
jchipqat-ytaanqtpc-htgkxrth-115[mfnly]
pinovwgz-ezggtwzvi-xpnojhzm-nzmqdxz-967[yzosw]
yhwooebeaz-oywrajcan-dqjp-owhao-628[oaweh]
fhezusjybu-tou-skijecuh-iuhlysu-270[uhsei]
tcrjjzwzvu-upv-kirzezex-659[bdnty]
npmhcargjc-aylbw-amyrgle-qcptgacq-626[tkmzs]
ejpanjwpekjwh-ywjzu-ykwpejc-pnwejejc-160[lnqkc]
cybyjqho-whqtu-ryexqpqhteki-uww-tuiywd-946[qwyht]
cqwdujys-uww-bewyijysi-218[wyijs]
xekdwvwnzkqo-acc-pnwejejc-342[cewjk]
encuukhkgf-uecxgpigt-jwpv-ugtxkegu-440[kwmxr]
mbiyqoxsm-tovvilokx-cobfsmoc-224[doavb]
jvuzbtly-nyhkl-jhukf-zlycpjlz-591[jwxzi]
ncjzrpytn-clmmte-lylwjdtd-691[ltcdj]
enqvbnpgvir-enoovg-erprvivat-117[venrg]
gzefmnxq-ngzzk-ymdwqfuzs-612[zfgmn]
gokzyxsjon-cmkfoxqob-rexd-psxkxmsxq-302[zylnb]
aflwjfslagfsd-xdgowj-xafsfuafy-554[rgqmz]
ugdgjxmd-ujqgywfau-hdsklau-yjskk-kzahhafy-294[daelo]
mvkccspson-mrymyvkdo-nozkbdwoxd-718[odkmc]
egdytrixat-rwdrdapit-stepgibtci-817[ampoz]
qfmcusbwq-pogysh-fsgsofqv-194[gcthj]
wifilzof-qyujihctyx-luvvcn-qilembij-344[ilcfj]
gpbepvxcv-snt-apqdgpidgn-323[dnmyh]
kpvgtpcvkqpcn-gii-gpikpggtkpi-180[vyxnb]
ziuxioqvo-moo-mvoqvmmzqvo-512[omvqi]
fbebmtkr-zktwx-vtgwr-vhtmbgz-wxitkmfxgm-631[zilsp]
wihmogyl-aluxy-luvvcn-wihnuchgyhn-240[hlnuy]
eqnqthwn-lgnnadgcp-rwtejcukpi-726[jwvun]
hdgdovmt-bmvyz-ytz-yzqzgjkhzio-369[zydgh]
aflwjfslagfsd-usfvq-ugslafy-hmjuzskafy-138[vjmnt]
froruixo-iorzhu-uhdftxlvlwlrq-205[eslfx]
xekdwvwnzkqo-zua-skngodkl-368[kdnow]
xtwtelcj-rclop-clmmte-dpcgtnpd-353[jowtx]
lhkhszqx-fqzcd-cxd-nodqzshnmr-911[dhqzc]
fodvvlilhg-fdqgb-xvhu-whvwlqj-725[syfpw]
mtzslklcozfd-dnlgpyrpc-sfye-cpdplcns-873[zngtm]
rwcnawjcrxwju-yujbcrl-pajbb-jwjuhbrb-459[jbrwc]
hcd-gsqfsh-awzwhofm-ufors-suu-twbobqwbu-948[reunt]
pwcvonofrcig-pibbm-obozmgwg-688[zgthm]
vhehkyne-lvtoxgzxk-angm-wxiehrfxgm-345[xeghk]
ucynmlgxcb-njyqrga-epyqq-qrmpyec-938[mgnpj]
fruurvlyh-fdqgb-frdwlqj-uhvhdufk-699[fudhr]
hqfxxnknji-gzssd-yjhmstqtld-697[sdhjn]
qzoggwtwsr-rms-rsdofhasbh-402[gtlom]
gzefmnxq-ngzzk-dqeqmdot-638[yatsz]
rmn-qcapcr-njyqrga-epyqq-pcqcypaf-834[mpqie]
yknnkoera-ywjzu-zarahkliajp-186[yozsd]
clxalrtyr-eza-dpncpe-mldvpe-epnsyzwzrj-483[eplrz]
vkrhzxgbv-cxeeruxtg-vhgmtbgfxgm-137[fsxoz]
ymszqfuo-bxmefuo-sdmee-mzmxkeue-898[ndgcf]
dmbttjgjfe-sbccju-bdrvjtjujpo-649[vkijs]
wifilzof-wbiwifuny-guleyncha-136[ifwln]
oxmeeuruqp-vqxxknqmz-abqdmfuaze-196[baztd]
tinnm-qfmcusbwq-pogysh-gvwddwbu-636[aryhp]
lxaaxbren-ouxfna-bnaerlnb-693[anbxe]
nglmtuex-xzz-mktbgbgz-397[zqyrt]
xlrypetn-mfyyj-pyrtyppctyr-223[yprtc]
fodvvlilhg-fdqgb-vklsslqj-127[lvdfg]
ikhcxvmbex-lvtoxgzxk-angm-ehzblmbvl-761[xblmv]
fkqbokxqflkxi-ciltbo-qoxfkfkd-211[kfoqx]
lujbbrornm-bljenwpna-qdwc-fxatbqxy-589[bnajl]
eqpuwogt-itcfg-tcddkv-vgejpqnqia-258[besga]
lnkfaypeha-ydkykhwpa-zaoecj-108[zamyw]
lhkhszqx-fqzcd-atmmx-lzqjdshmf-859[hmqzd]
aflwjfslagfsd-tskcwl-vwhsjlewfl-190[xevmq]
pbafhzre-tenqr-wryylorna-fuvccvat-507[racef]
jvsvymbs-ibuuf-yljlpcpun-773[ubjlp]
fab-eqodqf-rxaiqd-etubbuzs-612[bqade]
cxy-bnlanc-ljwmh-nwprwnnarwp-251[nwacl]
hdgdovmt-bmvyz-pinovwgz-ytz-omvdidib-239[qfmcj]
wsvsdkbi-qbkno-mkxni-mykdsxq-bokmaescsdsyx-328[skbdm]
njmjubsz-hsbef-gmpxfs-tijqqjoh-727[ykelf]
foadouwbu-qobrm-oqeiwgwhwcb-142[owbqu]
cvabijtm-kivlg-ewzsapwx-538[posuz]
xgsvgmotm-igtje-gtgreyoy-696[gtemo]
oaddaeuhq-ngzzk-efadmsq-612[adeqz]
zgmfyxypbmsq-pyzzgr-yaosgqgrgml-470[efsgy]
wihmogyl-aluxy-vumeyn-zchuhwcha-110[eisnw]
hafgnoyr-fpniratre-uhag-phfgbzre-freivpr-663[rfaeg]
jqwpihizlwca-zijjqb-ewzsapwx-174[ognyv]
uwtojhynqj-hfsid-htfynsl-ijajqturjsy-619[jhsty]
hqfxxnknji-kqtbjw-wjhjnansl-177[ctzqd]
upq-tfdsfu-dboez-dpbujoh-mphjtujdt-103[dujpt]
tfiifjzmv-jtrmvexvi-ylek-wzeretzex-919[kuzli]
ugjjgkanw-hdsklau-yjskk-vwkayf-840[omzwl]
ugdgjxmd-kusnwfywj-zmfl-ogjckzgh-840[gjdfk]
vehmsegxmzi-fewoix-hitevxqirx-308[eixhm]
yflexwxoalrp-bdd-absbilmjbkq-419[esuky]
kwzzwaqdm-rmttgjmiv-lmxizbumvb-330[mzbit]
htqtwkzq-hfsid-yjhmstqtld-593[thqds]
tinnm-qobrm-qcohwbu-difqvogwbu-740[boqim]
tipfxvezt-jtrmvexvi-ylek-nfibjyfg-659[fqnis]
lzfmdshb-atmmx-qdzbpthrhshnm-859[hmbds]
nij-mywlyn-mwupyhayl-bohn-qilembij-292[vwady]
jchipqat-hrpktcvtg-wjci-gthtpgrw-999[tcghp]
dyz-combod-oqq-mecdywob-cobfsmo-250[obcdm]
dkqjcbctfqwu-ecpfa-vgejpqnqia-310[crelp]
gsrwyqiv-kvehi-gerhc-stivexmsrw-646[slxzf]
hmsdqmzshnmzk-bgnbnkzsd-cdozqsldms-261[sdmzn]
tfejldvi-xiruv-srjbvk-uvmvcfgdvek-217[kfcmn]
wrs-vhfuhw-exqqb-dqdobvlv-751[qvbdh]
willimcpy-jfumncw-alumm-mufym-682[dsbwk]
etaqigpke-lgnnadgcp-ceswkukvkqp-856[fnltm]
diozmivodjivg-nxvqzibzm-cpio-gvwjmvojmt-603[vywzn]
oxjmxdfkd-oxyyfq-absbilmjbkq-809[bxdfj]
uqtqbizg-ozilm-moo-wxmzibqwva-564[indml]
rdchjbtg-vgpst-uadltg-gtprfjxhxixdc-323[czknl]
pybgmyargtc-amjmpdsj-njyqrga-epyqq-mncpyrgmlq-808[rzoqv]
sbqiiyvyut-sxesebqju-huiuqhsx-582[suiqb]
clxalrtyr-dnlgpyrpc-sfye-epnsyzwzrj-873[rylpc]
amlqskcp-epybc-cee-bcqgel-756[ceblp]
jrncbavmrq-pnaql-pbngvat-qrirybczrag-377[rabnq]
cebwrpgvyr-onfxrg-qrcnegzrag-221[rgcen]
forwcoqhwjs-tzcksf-rsjszcdasbh-792[scfhj]
ckgvutofkj-pkrrehkgt-jkvgxzsktz-696[wxbfz]
kzeed-uqfxynh-lwfxx-qtlnxynhx-255[xnefh]
vhkkhlbox-vtgwr-hixktmbhgl-683[hkbgl]
mrxivrexmsrep-hci-viwievgl-464[msqei]
nsyjwsfyntsfq-idj-htsyfnsrjsy-931[syfjn]
awzwhofm-ufors-qobrm-qcohwbu-aofyshwbu-272[owbfh]
ahngzyzqcntr-bzmcx-cdoknxldms-651[cnzdm]
nsyjwsfyntsfq-hfsid-wjfhvznxnynts-671[dqrws]
krxqjijamxdb-npp-uxprbcrlb-589[vutpy]
ahngzyzqcntr-azrjds-knfhrshbr-209[qnogp]
pejji-bkllsd-crszzsxq-458[xlhso]
qcffcgwjs-gqojsbusf-vibh-zcuwghwqg-480[njzmp]
ziuxioqvo-moo-amzdqkma-174[zeuba]
ujqgywfau-aflwjfslagfsd-vqw-kwjnauwk-398[wafju]
elrkdcdugrxv-fdqgb-orjlvwlfv-101[mhsyz]
kpvgtpcvkqpcn-tcddkv-qrgtcvkqpu-700[ptqjs]
jfifqxov-doxab-avb-xkxivpfp-107[xfvab]
lsyrkjkbnyec-mkxni-mykdsxq-kmaescsdsyx-978[mbynk]
ocipgvke-lgnnadgcp-wugt-vguvkpi-206[hugza]
hcd-gsqfsh-qvcqczohs-rsgwub-142[dhpmf]
lsyrkjkbnyec-oqq-ckvoc-822[ckoqy]
vhkkhlbox-utldxm-vnlmhfxk-lxkobvx-787[xklhv]
vkppo-cqwdujys-vbemuh-qdqboiyi-504[qbdio]
qjopwxha-ywjzu-zaoecj-654[jaowz]
njmjubsz-hsbef-dipdpmbuf-efqbsunfou-311[bfusd]
ktiaaqnqml-jiasmb-lmdmtwxumvb-694[yxlgt]
vrurcjah-pajmn-lqxlxujcn-fxatbqxy-511[ztgdk]
vagreangvbany-qlr-znexrgvat-325[yblnw]
lgh-kwujwl-wyy-jwsuimakalagf-996[gsubl]
apuut-xgvnndadzy-ezggtwzvi-zibdizzmdib-343[qlykv]
pxtihgbsxw-utldxm-kxlxtkva-787[xtkla]
mfklstdw-esyfwlau-usfvq-vwkayf-762[kljiy]
eqpuwogt-itcfg-hwbba-fag-fgrnqaogpv-232[gafbo]
qzoggwtwsr-rms-rsdzcmasbh-688[srgmw]
yhkpvhjapcl-ibuuf-jbzavtly-zlycpjl-955[skwvb]
gpewwmjmih-hci-gywxsqiv-wivzmgi-620[txcfj]
lahxpnwrl-npp-vjatncrwp-537[aisyo]
ckgvutofkj-hatte-aykx-zkyzotm-436[ntzbr]
iehepwnu-cnwza-lhwopey-cnwoo-ykjpwejiajp-628[wepjn]
fkqbokxqflkxi-yxphbq-obpbxoze-471[napmi]
etyyx-cxd-lzqjdshmf-261[inzys]
ftzgxmbv-utldxm-ftkdxmbgz-267[wqkjm]
jyfvnlupj-jhukf-jvhapun-klwsvftlua-903[yrgnq]
zsxyfgqj-jll-qfgtwfytwd-489[sazdc]
oxjmxdfkd-zxkav-zlxqfkd-rpbo-qbpqfkd-263[vauwt]
dsxxw-cee-bcnyprkclr-470[ghzni]
enzcntvat-fpniratre-uhag-jbexfubc-533[aentb]
froruixo-mhoobehdq-dqdobvlv-803[odbhq]
raphhxuxts-qphzti-bpcpvtbtci-115[pthbc]
jvsvymbs-jhukf-jvhapun-shivyhavyf-955[yabwx]
ykhknbqh-ywjzu-odellejc-498[ehjkl]
avw-zljyla-ihzrla-zlycpjlz-201[uvdxz]
wdjcvuvmyjpn-nxvqzibzm-cpio-hvivbzhzio-967[vizbc]
xgjougizobk-pkrrehkgt-ktmotkkxotm-150[gnkzc]
kyelcrga-aylbw-rcaflmjmew-808[wsmtg]
laffe-atyzghrk-igtje-jkyomt-462[taefg]
hqtyeqsjylu-uww-ijehqwu-608[quweh]
kzgwomvqk-kivlg-kcabwumz-amzdqkm-200[cdavq]
avw-zljyla-jhukf-shivyhavyf-305[ahvyf]
guahyncw-vumeyn-xypyfijgyhn-370[ynghu]
kwtwznct-jiasmb-zmikycqaqbqwv-564[wbjnt]
sorozgxe-mxgjk-hatte-vaxingyotm-228[enmvq]
hqtyeqsjylu-sxesebqju-bqrehqjeho-348[nxucm]
qzoggwtwsr-awzwhofm-ufors-tzcksf-rsdofhasbh-948[sfowh]
jfifqxov-doxab-mixpqfz-doxpp-qbzeklildv-185[rydoa]
gsvvswmzi-vehmsegxmzi-fyrrc-irkmriivmrk-204[imrvs]
dlhwvupglk-qlssfilhu-ylzlhyjo-721[lhsuy]
crwwv-zxkav-absbilmjbkq-679[bakvw]
xzwrmkbqtm-lgm-zmkmqdqvo-720[mqkzb]
eqnqthwn-ecpfa-eqcvkpi-qrgtcvkqpu-570[qcepk]
ftzgxmbv-utldxm-nlxk-mxlmbgz-891[mxlbg]
xqvwdeoh-gbh-ghyhorsphqw-387[hgoqw]
rdchjbtg-vgpst-uadltg-pcpanhxh-141[mtvxn]
sebehvkb-vbemuh-udwyduuhydw-140[ubdeh]
gpsxdprixkt-qphzti-stktadebtci-921[tipdk]
nij-mywlyn-dyffsvyuh-omyl-nymncha-214[obtqu]
rdggdhxkt-rpcsn-rdpixcv-bpgztixcv-843[cdgpr]
pdjqhwlf-iorzhu-uhdftxlvlwlrq-803[rtwsz]
tinnm-dzoghwq-ufogg-twbobqwbu-428[bgown]
etyyx-qzaahs-lzmzfdldms-781[cmnek]
willimcpy-dyffsvyuh-fuvilunils-448[sjytb]
dpotvnfs-hsbef-qmbtujd-hsbtt-ufdiopmphz-831[zmvga]
hdgdovmt-bmvyz-ytz-xpnojhzm-nzmqdxz-109[hzpfs]
ksodcbwnsr-qobrm-aobousasbh-324[bosar]
myvybpev-tovvilokx-kmaescsdsyx-380[vsyek]
nbhofujd-cbtlfu-tbmft-571[mkltr]
sedikcuh-whqtu-uww-jusxdebewo-764[uwedh]
jvsvymbs-jhukf-klclsvwtlua-825[jxhaq]
crwwv-mixpqfz-doxpp-jxohbqfkd-575[serbn]
fmsledevhsyw-hci-xiglrspskc-646[scehi]
xekdwvwnzkqo-ywjzu-oanreyao-576[dwrqm]
gzefmnxq-vqxxknqmz-pqbmdfyqzf-352[xuyzs]
bqvvu-zua-hkceopeyo-706[eouva]
ytu-xjhwjy-gfxpjy-btwpxmtu-151[bynhm]
npmhcargjc-hcjjwzcyl-bctcjmnkclr-886[cjhlm]
xlrypetn-dnlgpyrpc-sfye-dlwpd-119[znfjd]
ejpanjwpekjwh-ydkykhwpa-hkceopeyo-758[patzv]
lhkhszqx-fqzcd-eknvdq-rsnqzfd-287[qdzfh]
froruixo-fdqgb-orjlvwlfv-179[optcg]
jvsvymbs-jovjvshal-jbzavtly-zlycpjl-253[zcnfy]
avw-zljyla-ibuuf-ylzlhyjo-149[xtcfz]
bnmrtldq-fqzcd-bzmcx-bnzshmf-cdudknoldms-157[whdus]
sno-rdbqds-idkkxadzm-rsnqzfd-703[dsknq]
vkppo-sxesebqju-tuiywd-504[epsub]
ryexqpqhteki-zubboruqd-husuylydw-790[nimls]
vetllbybxw-lvtoxgzxk-angm-kxvxbobgz-995[xbglv]
rdchjbtg-vgpst-qphzti-gtrtxkxcv-817[mayne]
dzczkrip-xiruv-irdgrxzex-vxx-rthlzjzkzfe-503[xwhmg]
qcbgiasf-ufors-pogysh-sbuwbssfwbu-454[nshbt]
qcbgiasf-ufors-qobrm-qcohwbu-igsf-hsghwbu-142[bsfgh]
zgmfyxypbmsq-pyzzgr-amlryglkclr-392[yglmr]
myxcewob-qbkno-cmkfoxqob-rexd-vklybkdybi-146[wxnuy]
amlqskcp-epybc-afmamjyrc-pcacgtgle-418[campe]
muqfedyput-isqludwuh-xkdj-huqsgkyiyjyed-660[nbtda]
vkppo-sqdto-vydqdsydw-114[pzbiy]
ziuxioqvo-jcvvg-lmxtwgumvb-668[fnbjv]
rdchjbtg-vgpst-rwdrdapit-stepgibtci-271[tdgip]
zbytomdsvo-zvkcdsm-qbkcc-zebmrkcsxq-614[nwmol]
sbnqbhjoh-fhh-efqbsunfou-103[hjxvu]
vagreangvbany-ohaal-nanylfvf-273[zfytn]
wihmogyl-aluxy-dyffsvyuh-lyuwkocmcncih-760[efwrt]
irgyyolokj-inuiurgzk-ykxboiky-332[ikyog]
gntmfefwitzx-xhfajsljw-mzsy-fhvznxnynts-437[mkuja]
tpspahyf-nyhkl-yhiipa-zhslz-539[yzmib]
encuukhkgf-rncuvke-itcuu-nqikuvkeu-700[ukcen]
mybbycsfo-mkxni-oxqsxoobsxq-198[oxbsm]
kyelcrga-zsllw-kypicrgle-730[nvjmt]
rdggdhxkt-uadltg-stktadebtci-713[btson]
dpssptjwf-qmbtujd-hsbtt-usbjojoh-623[miqos]
tcfkqcevkxg-dcumgv-vgejpqnqia-336[cgqve]
fodvvlilhg-gbh-orjlvwlfv-699[eykml]
bxaxipgn-vgpst-eaphixr-vgphh-ejgrwphxcv-817[rsizj]
pualyuhapvuhs-ibuuf-jvuahputlua-305[hlzmu]
qekrixmg-nippcfier-gsrxemrqirx-646[xhnfm]
pdjqhwlf-plolwdub-judgh-fdqgb-ghsorbphqw-543[aiewf]
fruurvlyh-vfdyhqjhu-kxqw-fxvwrphu-vhuylfh-647[hufvr]
ftzgxmbv-utldxm-ftgtzxfxgm-891[txfgm]
htsxzrjw-lwfij-gfxpjy-btwpxmtu-359[jtwxf]
gpewwmjmih-jyddc-hci-vigimzmrk-932[imcdg]
yuxufmdk-sdmpq-qss-oazfmuzyqzf-378[fmqsu]
oxmeeuruqp-eomhqzsqd-tgzf-efadmsq-508[oxhfu]
qzoggwtwsr-xszzmpsob-hsqvbczcum-610[scyrz]
avw-zljyla-ibuuf-ayhpupun-981[ualpy]
zloolpfsb-oxyyfq-bkdfkbbofkd-471[untjs]
tvsnigxmpi-jpsaiv-erepcwmw-308[nwfcx]
jvuzbtly-nyhkl-qlssfilhu-mpuhujpun-929[ulhjn]
yknnkoera-ydkykhwpa-pnwejejc-290[setqd]
tcrjjzwzvu-gcrjkzt-xirjj-ljvi-kvjkzex-659[jzkrv]
gntmfefwitzx-hmthtqfyj-xytwflj-307[tsebr]
gspsvjyp-wgezirkiv-lyrx-pefsvexsvc-412[svepg]
ugfkmewj-yjsvw-xdgowj-jwuwanafy-944[hysdk]
sbnqbhjoh-qmbtujd-hsbtt-tijqqjoh-597[bzawy]
vetllbybxw-unggr-tgterlbl-631[mfwxo]
tipfxvezt-avccpsvre-tljkfdvi-jvimztv-139[vtice]
hvbizodx-wpiit-yzkvmohzio-603[ytsvn]
sno-rdbqds-eknvdq-nodqzshnmr-209[dnqso]
rtqlgevkng-dcumgv-rwtejcukpi-960[yhfsz]
ugjjgkanw-tmffq-ksdwk-606[bqdtn]
jyfvnlupj-jhukf-jvhapun-ylhjxbpzpapvu-981[ygxts]
kzeed-gzssd-ijufwyrjsy-203[sdejy]
chnylhuncihuf-jfumncw-alumm-uwkocmcncih-864[btkms]
qfmcusbwq-suu-ghcfous-922[btras]
bgmxkgtmbhgte-ietlmbv-zktll-xgzbgxxkbgz-215[isyml]
pwcvonofrcig-xszzmpsob-zopcfohcfm-506[avfiu]
iruzfrtkzmv-dzczkrip-xiruv-treup-tfrkzex-fgvirkzfej-633[rzfik]
mrxivrexmsrep-nippcfier-qerekiqirx-776[ombwt]
iwcjapey-ywjzu-ykwpejc-ykjpwejiajp-420[ztgqm]
joufsobujpobm-qmbtujd-hsbtt-sfbdrvjtjujpo-467[jbotu]
xst-wigvix-yrwxefpi-gerhc-hiwmkr-230[mylsd]
ytu-xjhwjy-ojqqdgjfs-xmnuunsl-931[mvbrl]
zovldbkfz-avb-jxkxdbjbkq-159[bkdjv]
qvbmzvibqwvit-ntwemz-amzdqkma-226[mqvza]
eadalsjq-yjsvw-xdgowj-ljsafafy-840[nqijl]
dszphfojd-tdbwfohfs-ivou-bdrvjtjujpo-233[ximod]
gsvvswmzi-tpewxmg-kveww-erepcwmw-308[wizmq]
ktwbhtvmbox-ktuubm-hixktmbhgl-657[hynsw]
iuruxlar-vrgyzoi-mxgyy-sgtgmksktz-488[ufytd]
nzydfxpc-rclop-awldetn-rcldd-nzyeltyxpye-379[pusht]
iehepwnu-cnwza-ynukcajey-lhwopey-cnwoo-pnwejejc-212[enwcj]
vcibutulxiom-jfumncw-alumm-ijyluncihm-214[muicl]
pyknyegle-aylbw-qyjcq-392[hzumy]
atyzghrk-xghhoz-cuxqynuv-436[cmdsl]
vcibutulxiom-jfumncw-alumm-jolwbumcha-682[dgfeu]
cybyjqho-whqtu-isqludwuh-xkdj-cqdqwucudj-946[qudch]
lejkrscv-jtrmvexvi-ylek-uvgrikdvek-893[vekri]
nvrgfezqvu-upv-jkfirxv-789[vfrue]
fnjyxwrinm-ljwmh-lxjcrwp-bjunb-173[ljyap]
gsrwyqiv-kvehi-qekrixmg-fyrrc-wepiw-360[tnixb]
gsvvswmzi-fyrrc-hitevxqirx-308[irvsx]
nglmtuex-ynssr-vahvhetmx-wxlbzg-267[xeghl]
qjopwxha-acc-ykjpwejiajp-524[gjqhn]
wrs-vhfuhw-mhoobehdq-dqdobvlv-803[pdlvm]
otzkxtgzoutgr-inuiurgzk-uvkxgzouty-878[modya]
gvcskirmg-fyrrc-xvemrmrk-568[rmcgk]
xqvwdeoh-hjj-ghsduwphqw-231[hwdjq]
sbejpbdujwf-cvooz-nbslfujoh-441[nwsha]
zixppfcfba-oxyyfq-ixyloxqlov-315[xfoyi]
bdavqofuxq-rxaiqd-pqhqxabyqzf-846[yzpfi]
vhglnfxk-zktwx-vetllbybxw-ktuubm-hixktmbhgl-501[bkltx]
tinnm-qobrm-qcohwbu-zcuwghwqg-584[ejnps]
rmn-qcapcr-kyelcrga-cee-bcqgel-730[cerag]
apwmeclga-djmucp-ylyjwqgq-756[acgjl]
pybgmyargtc-amlqskcp-epybc-zsllw-pcacgtgle-392[cglpa]
jxdkbqfz-avb-tlohpelm-783[blade]
npmhcargjc-bwc-pcqcypaf-808[phjds]
rdchjbtg-vgpst-qphzti-itrwcdadvn-843[zueyn]
votubcmf-qmbtujd-hsbtt-sfdfjwjoh-259[tbfjd]
ujoon-gpqqxi-advxhixrh-661[mlyen]
ykjoqian-cnwza-lhwopey-cnwoo-iwjwcaiajp-576[waoci]
gpewwmjmih-wgezirkiv-lyrx-xvemrmrk-386[mreiw]
gzefmnxq-ngzzk-pqhqxabyqzf-352[drqzm]
nwilwcejc-nwxxep-oanreyao-394[lqxwm]
hdgdovmt-bmvyz-zbb-gjbdnodxn-785[bdgmn]
gsrwyqiv-kvehi-aietsrmdih-gerhc-gsexmrk-viwievgl-672[bsytl]
rdchjbtg-vgpst-tvv-rdcipxcbtci-999[ctvbd]
joufsobujpobm-fhh-tbmft-389[mnyql]
fnjyxwrinm-mhn-anbnjalq-147[nmbzl]
wfummczcyx-yaa-guhuaygyhn-578[yaucg]
qfkkj-mfyyj-dpcgtnpd-457[dfjkp]
ncjzrpytn-mfyyj-wzrtdetnd-509[qnwdl]
sno-rdbqds-bnknqetk-idkkxadzm-bnmszhmldms-365[dkmns]
wkqxodsm-cmkfoxqob-rexd-vyqscdsmc-380[cdmoq]
dpssptjwf-tdbwfohfs-ivou-tbmft-233[lbdah]
dpssptjwf-dipdpmbuf-xpsltipq-285[pdsfi]
qyujihctyx-wuhxs-wiuncha-jolwbumcha-214[zlbuy]
oxmeeuruqp-pkq-iadwetab-716[eapqu]
wfummczcyx-ohmnuvfy-xsy-womnigyl-mylpcwy-214[ymcwf]
xmtjbzidx-ytz-ncdkkdib-525[wmfvr]
qekrixmg-jpsaiv-xiglrspskc-204[dwvst]
kwtwznct-zijjqb-mvoqvmmzqvo-356[qmnjk]
ltpedcxots-ytaanqtpc-rdcipxcbtci-999[lkmsv]
zovldbkfz-yrkkv-abmxoqjbkq-913[kboqv]
yhkpvhjapcl-wshzapj-nyhzz-jvuahputlua-279[cnmzy]
pdjqhwlf-edvnhw-whfkqrorjb-257[unmsk]
rgllk-bdavqofuxq-rxaiqd-iadwetab-664[mkeil]
wdjcvuvmyjpn-nxvqzibzm-cpio-nzmqdxzn-343[nzmvc]
xzwrmkbqtm-kpwkwtibm-nqvivkqvo-486[dcwog]
rdchjbtg-vgpst-rpcsn-rdpixcv-hidgpvt-765[stnfw]
buzahisl-lnn-thuhnltlua-955[oschg]
enzcntvat-ohaal-bcrengvbaf-793[anbce]
eqpuwogt-itcfg-uecxgpigt-jwpv-hkpcpekpi-362[pgcei]
avw-zljyla-qlssfilhu-dvyrzovw-175[lvasw]
iuruxlar-xgsvgmotm-inuiurgzk-zxgototm-982[mlnut]
tyepcyletzylw-prr-opalcexpye-925[boymz]
hqcfqwydw-rqiauj-huiuqhsx-556[abndo]
tcrjjzwzvu-vxx-kirzezex-841[zxejr]
qspkfdujmf-sbccju-sfdfjwjoh-285[ktqja]
vcibutulxiom-wbiwifuny-guleyncha-682[uzxms]
ejpanjwpekjwh-bqvvu-ywjzu-nayaerejc-628[jeawn]
kwvacumz-ozilm-kivlg-lmdmtwxumvb-330[mlvik]
kzgwomvqk-kwvacumz-ozilm-zijjqb-bziqvqvo-460[zqvik]
wfintfhynaj-wfggny-qfgtwfytwd-775[fwgnt]
tcfkqcevkxg-hnqygt-vgejpqnqia-622[qgcek]
yrwxefpi-nippcfier-wepiw-386[ipewf]
xjinphzm-bmvyz-zbb-omvdidib-109[bimzd]
qlm-pbzobq-ciltbo-abmilvjbkq-107[jvsxc]
tfcfiwlc-gcrjkzt-xirjj-tfekrzedvek-295[wjhqa]
nchhg-moo-lmdmtwxumvb-382[mhobc]
bknsykmdsfo-lkcuod-myxdksxwoxd-692[azknp]
jxdkbqfz-yrkkv-qoxfkfkd-211[kfdqx]
jlidywncfy-dyffsvyuh-lyuwkocmcncih-344[ycfdh]
iuruxlar-igtje-iugzotm-lotgtiotm-358[tigou]
foadouwbu-gqojsbusf-vibh-qighcasf-gsfjwqs-116[sfbgo]
ucynmlgxcb-aylbw-nspafyqgle-288[fswap]
amppmqgtc-aylbw-qfgnngle-808[galmn]
kfg-jvtivk-irsszk-jrcvj-659[jkvir]
xjinphzm-bmvyz-ytz-yzqzgjkhzio-681[ubzyj]
plolwdub-judgh-fdqgb-ilqdqflqj-491[dlqbf]
crwwv-yrkkv-bkdfkbbofkd-783[inhxy]
nuatmlmdpage-otaoaxmfq-pqhqxabyqzf-612[qvdxy]
pualyuhapvuhs-ibuuf-jbzavtly-zlycpjl-435[znegj]
eza-dpncpe-clmmte-lylwjdtd-509[delcm]
tfejldvi-xiruv-irsszk-uvgcfpdvek-659[rvaql]
pybgmyargtc-aylbw-qcptgacq-600[oscut]
kdijqrbu-vbemuh-qdqboiyi-972[biqdu]
irgyyolokj-vrgyzoi-mxgyy-jkvruesktz-644[ygkor]
rgllk-uzfqdzmfuazmx-otaoaxmfq-oazfmuzyqzf-560[zfamo]
iqmbazulqp-eomhqzsqd-tgzf-fqotzaxask-378[qmsxo]
oqnidbshkd-atmmx-kzanqzsnqx-703[vztcl]
vjpwncrl-lqxlxujcn-mnyjacvnwc-615[cnjlv]
bkzrrhehdc-cxd-bnmszhmldms-807[dhmbc]
kgjgrypw-epybc-zyqicr-bcnyprkclr-704[mzsty]
apuut-ezggtwzvi-yzqzgjkhzio-265[pmlri]
rflsjynh-hfsid-htfynsl-qtlnxynhx-567[cqbst]
zilqwikbqdm-lgm-nqvivkqvo-330[wmxzv]
lahxpnwrl-ouxfna-anlnrerwp-355[nzkvm]
veqtekmrk-ikk-tyvglewmrk-386[kemrt]
sgmtkzoi-pkrrehkgt-rumoyzoiy-514[zytsw]
yflexwxoalrp-oxyyfq-mrozexpfkd-341[xfoye]
bwx-amkzmb-kivlg-kwibqvo-xczkpiaqvo-434[lkqrz]
clxalrtyr-nsznzwlep-opdtry-145[nczlj]
bjfutsneji-jll-wjhjnansl-125[szrni]
bcfhvdczs-cpxsqh-ghcfous-324[chsfb]
aflwjfslagfsd-kusnwfywj-zmfl-ugflsafewfl-216[flswa]
gcfcnuls-aluxy-wuhxs-jolwbumcha-578[uclah]
pyknyegle-pybgmyargtc-aylbw-qfgnngle-470[gyeln]
oazegyqd-sdmpq-gzefmnxq-qss-geqd-fqefuzs-508[qesdf]
xjmmjndqz-mvwwdo-yzkvmohzio-551[ypzog]
zekvierkzferc-treup-uvgcfpdvek-789[stzno]
ejpanjwpekjwh-xqjju-odellejc-576[enmtc]
ltpedcxots-tvv-sthxvc-115[skptq]
jshzzpmplk-yhiipa-zavyhnl-981[tluns]
mvhkvbdib-agjrzm-yzqzgjkhzio-629[wcyms]
yhwooebeaz-acc-paydjkhkcu-316[acehk]
gzefmnxq-otaoaxmfq-emxqe-326[emqxa]
frqvxphu-judgh-udeelw-pdqdjhphqw-335[orhsy]
frqvxphu-judgh-gbh-uhfhlylqj-153[hufgj]
cjpibabsepvt-cvooz-fohjoffsjoh-623[emnjh]
yflexwxoalrp-zxkav-zlxqfkd-xkxivpfp-783[xfklp]
froruixo-hjj-zrunvkrs-777[synml]
jvuzbtly-nyhkl-jhukf-jvhapun-jvuahputlua-929[ndjmy]
kwzzwaqdm-kivlg-kwibqvo-nqvivkqvo-460[yzmsr]
ktiaaqnqml-zijjqb-apqxxqvo-798[qaijx]
hqfxxnknji-hfsid-wjhjnansl-931[nhjfi]
xjmmjndqz-wpiit-vxlpdndodji-941[dijmn]
ksodcbwnsr-rms-cdsfohwcbg-896[xvuol]
eza-dpncpe-tyepcyletzylw-nsznzwlep-nzyeltyxpye-847[xydvf]
emixwvqhml-jiasmb-ivitgaqa-928[iamqv]
etyyx-idkkxadzm-ehmzmbhmf-313[josnm]
lhkhszqx-fqzcd-bgnbnkzsd-qdzbpthrhshnm-911[bqzra]
dzczkrip-xiruv-upv-wzeretzex-945[icynm]
wihmogyl-aluxy-mwupyhayl-bohn-lymyulwb-266[nuraz]
kmjezxodgz-xcjxjgvoz-zibdizzmdib-239[yzkgs]
hqfxxnknji-wfggny-hzxytrjw-xjwanhj-593[jnxhw]
oknkvcta-itcfg-eqpuwogt-itcfg-ecpfa-eqcvkpi-ucngu-986[cgtef]
ykhknbqh-oywrajcan-dqjp-qoan-paopejc-810[ondma]
nwilwcejc-ywjzu-ykwpejc-naoawnyd-238[zjwsh]
dzczkrip-xiruv-sleep-rercpjzj-451[wykfr]
gpewwmjmih-nippcfier-qerekiqirx-178[ieprm]
bqvvu-oywrajcan-dqjp-wjwhuoeo-420[jowaq]
kzgwomvqk-xtiabqk-oziaa-bziqvqvo-148[qaiko]
fab-eqodqf-eomhqzsqd-tgzf-fdmuzuzs-820[fqzde]
lzfmdshb-dff-sqzhmhmf-755[fhmds]
bpvctixr-gpqqxi-sthxvc-297[xcipq]
xjgjmapg-kmjezxodgz-xcjxjgvoz-vivgtndn-915[jhigl]
pbybeshy-qlr-bcrengvbaf-715[jwrxz]
uqtqbizg-ozilm-kivlg-tijwzibwzg-902[lrepd]
excdklvo-zbytomdsvo-zvkcdsm-qbkcc-crszzsxq-614[rpnqm]
ucynmlgxcb-njyqrga-epyqq-kylyeckclr-418[yclqe]
hqtyeqsjylu-sxesebqju-mehaixef-556[eqshj]
chnylhuncihuf-wifilzof-jfumncw-alumm-uwkocmcncih-734[cufhi]
wyvqljapsl-ihzrla-zhslz-669[ncmjb]
jlidywncfy-wifilzof-vohhs-omyl-nymncha-578[yfhil]
jfifqxov-doxab-bdd-abpfdk-913[dbfao]
xjgjmapg-wpiit-gjbdnodxn-551[zvmhq]
dkqjcbctfqwu-tcfkqcevkxg-ecpfa-eqcvkpi-tgegkxkpi-414[ckeqf]
tmrszakd-idkkxadzm-lzmzfdldms-365[hwgsv]
nglmtuex-vtgwr-vhtmbgz-mxvaghehzr-215[tsfmz]
uiovmbqk-rmttgjmiv-bziqvqvo-252[vimqb]
iehepwnu-cnwza-fahhuxawj-oanreyao-680[mavot]
tvsnigxmpi-glsgspexi-gsrxemrqirx-100[xwqld]
qcbgiasf-ufors-rms-aobousasbh-818[sabof]
sgmtkzoi-hatte-xkykgxin-722[ktgix]
nglmtuex-xzz-tvjnblbmbhg-787[kopjm]
ikhcxvmbex-vtgwr-xgzbgxxkbgz-683[ncalt]
tbxmlkfwba-molgbzqfib-zxkav-pbosfzbp-419[bfzak]
gspsvjyp-fmsledevhsyw-tpewxmg-kveww-eguymwmxmsr-568[nihyt]
gvcskirmg-gerhc-jmrergmrk-672[lrzta]
xmrrq-uzgugdslw-jwsuimakalagf-502[agulm]
shoewudys-hqrryj-tulubefcudj-530[ixkdy]
mrxivrexmsrep-hci-wxsveki-230[miwqn]
tmrszakd-bgnbnkzsd-otqbgzrhmf-599[qjfny]
rwcnawjcrxwju-kdwwh-fxatbqxy-355[jezwy]
hjgbwuladw-tmffq-ogjckzgh-528[gnlzr]
lxuxaodu-lxwbdvna-pajmn-ajkkrc-dbna-cnbcrwp-511[umnsy]
nsyjwsfyntsfq-idj-jslnsjjwnsl-619[ywpco]
ubhatstkwhnl-ktuubm-mktbgbgz-761[btkug]
lhkhszqx-fqzcd-bgnbnkzsd-dmfhmddqhmf-781[bdnsk]
vehmsegxmzi-ikk-vieguymwmxmsr-854[pnkle]
udskkaxawv-jsttal-esfsywewfl-528[sawef]
jxdkbqfz-avb-cfkxkzfkd-887[kfbdx]
jyddc-jpsaiv-gsrxemrqirx-386[rdijs]
tagzsrsjvgmk-wyy-umklgewj-kwjnauw-606[wgjka]
wyvqljapsl-ihzrla-huhsfzpz-409[znhcm]
jvuzbtly-nyhkl-zjhclunly-obua-jbzavtly-zlycpjl-331[lyjzb]
gvaaz-sbejpbdujwf-gmpxfs-vtfs-uftujoh-467[tsogk]
aczupnetwp-nsznzwlep-cplnbftdtetzy-535[nptze]
gifavtkzcv-vxx-jrcvj-815[vcjxa]
ytu-xjhwjy-uqfxynh-lwfxx-uzwhmfxnsl-255[yzalu]
eqttqukxg-ecpfa-eqcvkpi-cpcnauku-440[zotsy]
ncjzrpytn-nlyoj-nzletyr-nzyeltyxpye-639[zhytj]
bgmxkgtmbhgte-lvtoxgzxk-angm-phkdlahi-605[nyzfq]
ytu-xjhwjy-xhfajsljw-mzsy-qfgtwfytwd-801[rewpl]
gpsxdprixkt-rwdrdapit-prfjxhxixdc-349[qrskt]
ojk-nzxmzo-kgvnodx-bmvnn-hvivbzhzio-629[cvkyu]
ktwbhtvmbox-unggr-ybgtgvbgz-267[nbjvs]
wdjcvuvmyjpn-nxvqzibzm-cpio-kpmxcvndib-109[tndsr]
froruixo-gbh-zrunvkrs-439[roubf]
oazegyqd-sdmpq-otaoaxmfq-fdmuzuzs-352[admoq]
fruurvlyh-fdqgb-sxufkdvlqj-699[mynfj]
votubcmf-qmbtujd-hsbtt-efqmpznfou-441[wznfd]
emixwvqhml-akidmvomz-pcvb-abwziom-928[gwxum]
qcbgiasf-ufors-foppwh-sbuwbssfwbu-506[sbfuw]
mrxivrexmsrep-fyrrc-pskmwxmgw-100[pmxwc]
nsyjwsfyntsfq-uqfxynh-lwfxx-uzwhmfxnsl-125[bwtze]
kwtwznct-kpwkwtibm-nqvivkqvo-928[kwtvi]
lahxpnwrl-ouxfna-vjwjpnvnwc-953[nwajl]
ydjuhdqjyedqb-hqrryj-ixyffydw-114[cwzyi]
rgndvtcxr-snt-igpxcxcv-661[uqvtr]
bgmxkgtmbhgte-pxtihgbsxw-vahvhetmx-tvjnblbmbhg-371[bghtm]
pwcvonofrcig-tzcksf-fsoqeiwgwhwcb-428[swzyd]
yaxsnlcrun-ajkkrc-bqryyrwp-641[ycnxl]
jef-iushuj-hqrryj-bqrehqjeho-738[zaytn]
bdavqofuxq-bxmefuo-sdmee-xmnadmfadk-352[dmaef]
qcffcgwjs-qobrm-rsdzcmasbh-350[mezyn]
jxdkbqfz-yxphbq-tlohpelm-289[wfvbo]
hdgdovmt-bmvyz-wvnfzo-yzndbi-915[dvzbm]
hqcfqwydw-sxesebqju-vydqdsydw-712[smhbn]
qfmcusbwq-qobrm-qcohwbu-zcuwghwqg-636[qwbcu]
jvsvymbs-msvdly-jvuahputlua-955[vsuaj]
hqcfqwydw-rkddo-huiuqhsx-218[dhquw]
shoewudys-uww-jhqydydw-816[jysaf]
dyz-combod-zvkcdsm-qbkcc-dbksxsxq-562[cdbks]
tcrjjzwzvu-treup-tfrkzex-rercpjzj-217[fewxh]
pynffvsvrq-cynfgvp-tenff-grpuabybtl-481[fnpvy]
yhtwhnpun-jyfvnlupj-wshzapj-nyhzz-huhsfzpz-773[zyogh]
bnqqnrhud-bzmcx-lzqjdshmf-443[jmvdf]
yrwxefpi-glsgspexi-hitevxqirx-282[bzvyj]
iuxxuyobk-hgyqkz-zkinturume-540[ukixy]
gpsxdprixkt-rpcsn-prfjxhxixdc-271[ewstq]
vrurcjah-pajmn-ouxfna-anlnrerwp-615[qsfhg]
mrxivrexmsrep-tpewxmg-kveww-hiwmkr-854[votlz]
irgyyolokj-ixeumktoi-jek-rghuxgzuxe-904[egiko]
dsxxw-zyqicr-pcacgtgle-912[swjtv]
yhkpvhjapcl-kfl-ylhjxbpzpapvu-955[phlaj]
gsrwyqiv-kvehi-tpewxmg-kveww-hitevxqirx-724[mnsyt]
muqfedyput-rkddo-vydqdsydw-998[mlqhr]
ykhknbqh-ywjzu-iwngapejc-628[hjknw]
uwtojhynqj-gzssd-ywfnsnsl-619[snjwy]
emixwvqhml-kpwkwtibm-zmkmqdqvo-148[mkqwi]
upv-uvjzxe-347[uvejp]
cqwdujys-ryexqpqhteki-rkddo-skijecuh-iuhlysu-738[uyvln]
fydelmwp-nsznzwlep-dezclrp-379[elpzd]
yknnkoera-fahhuxawj-wymqeoepekj-914[kwucf]
hwbba-vqr-ugetgv-lgnnadgcp-ugtxkegu-908[guabe]
xqvwdeoh-ixccb-udeelw-fxvwrphu-vhuylfh-803[heuvw]
xekdwvwnzkqo-acc-iwjwcaiajp-784[mswzt]
rdchjbtg-vgpst-qphzti-jhtg-ithixcv-609[thgic]
cqwdujys-vbemuh-iqbui-608[ubiqc]
htsxzrjw-lwfij-gfxpjy-rfsfljrjsy-489[jfrsl]
rtqlgevkng-dcumgv-wugt-vguvkpi-362[gvukt]
oxaflxzqfsb-mixpqfz-doxpp-zrpqljbo-pbosfzb-185[pbfox]
lqwhuqdwlrqdo-hjj-sxufkdvlqj-569[qdjlh]
wihmogyl-aluxy-wuhxs-wiuncha-nywbhifias-994[ztysn]
hwbba-oknkvcta-itcfg-dwppa-tgugctej-492[tacgb]
mybbycsfo-oqq-wkxkqowoxd-120[oqbkw]
tyepcyletzylw-dnlgpyrpc-sfye-xlcvpetyr-249[xawqz]
hjgbwuladw-tmffq-ugflsafewfl-684[flwag]
sbnqbhjoh-kfmmzcfbo-bobmztjt-493[jnism]
ykjoqian-cnwza-lhwopey-cnwoo-zarahkliajp-602[ihrlb]
pynffvsvrq-fpniratre-uhag-erfrnepu-585[kwurl]
vetllbybxw-utldxm-mxvaghehzr-787[lxbeh]
ktfitzbgz-lvtoxgzxk-angm-nlxk-mxlmbgz-787[gxzkl]
emixwvqhml-rmttgjmiv-tijwzibwzg-876[tszyl]
esyfwlau-udskkaxawv-hdsklau-yjskk-ksdwk-658[ksadu]
jsvagsulanw-tskcwl-jwuwanafy-216[oklsn]
wfummczcyx-mwupyhayl-bohn-xymcah-552[xcazi]
tbxmlkfwba-oxyyfq-xkxivpfp-705[xfbkp]
ytu-xjhwjy-rflsjynh-uqfxynh-lwfxx-ijuqtdrjsy-853[ztoub]
cvabijtm-jiasmb-tijwzibwzg-564[qatln]
jef-iushuj-uww-ixyffydw-816[ptbea]
zntargvp-fpniratre-uhag-svanapvat-715[dnmgz]
mvydjvxodqz-zbb-jkzmvodjin-343[fxmnr]
xlrypetn-nlyoj-dlwpd-873[ldnpy]
jrncbavmrq-pnaql-pbngvat-ybtvfgvpf-117[hgwjo]
guahyncw-dyffsvyuh-uhufsmcm-786[ufhyc]
ide-htrgti-rpcsn-rdpixcv-igpxcxcv-115[ciprx]
nwilwcejc-ydkykhwpa-qoan-paopejc-628[acpwe]
udpsdjlqj-sodvwlf-judvv-oderudwrub-673[dujvl]
xekdwvwnzkqo-lhwopey-cnwoo-zarahkliajp-966[zdklq]
ixccb-iorzhu-xvhu-whvwlqj-803[emzxn]
gpbepvxcv-gpqqxi-prfjxhxixdc-297[utzsx]
zntargvp-wryylorna-fuvccvat-871[dxepl]
jvyyvzpcl-ipvohghykvbz-yhiipa-yljlpcpun-149[aupdo]
lzfmdshb-okzrshb-fqzrr-lzmzfdldms-651[ndpcm]
krxqjijamxdb-bljenwpna-qdwc-mnyuxhvnwc-381[njwxa]
apuut-xviyt-yzkvmohzio-395[iotuv]
rzvkjiduzy-kgvnodx-bmvnn-mzxzdqdib-187[tayqb]
pkl-oaynap-xwogap-owhao-888[zlbay]
ynukcajey-nwxxep-paydjkhkcu-394[kyace]
fnjyxwrinm-ouxfna-mnbrpw-771[nfmrw]
lejkrscv-tfcfiwlc-irsszk-nfibjyfg-399[fcisj]
dwbcjkun-ljwmh-anlnrerwp-589[nwjlr]
hdgdovmt-bmvyz-ojk-nzxmzo-wpiit-omvdidib-291[nmqdz]
nwzekwypera-xwogap-hwxknwpknu-810[wknpa]
htwwtxnaj-ojqqdgjfs-wjxjfwhm-567[jwfhq]
ynukcajey-zua-lqnydwoejc-420[xqrgw]
pelbtravp-cynfgvp-tenff-npdhvfvgvba-559[vfpna]
ibghopzs-foppwh-aobousasbh-142[ranfu]
qxdwpopgsdjh-tvv-rdcipxcbtci-713[wscpi]
vkppo-sbqiiyvyut-vbemuh-husuylydw-452[uyvbh]
lqwhuqdwlrqdo-vfdyhqjhu-kxqw-orjlvwlfv-699[qlwdh]
tcrjjzwzvu-gcrjkzt-xirjj-vexzevvizex-113[gusom]
wsvsdkbi-qbkno-lexxi-kmaescsdsyx-614[mnoyt]
kmjezxodgz-xviyt-xjvodib-jkzmvodjin-681[jdiov]
kgjgrypw-epybc-kyelcrga-njyqrga-epyqq-asqrmkcp-qcptgac-990[cgpqy]
tagzsrsjvgmk-hdsklau-yjskk-xafsfuafy-736[sakfg]
iwcjapey-xqjju-wymqeoepekj-472[wshmz]
ckgvutofkj-xghhoz-zxgototm-618[dapcq]
excdklvo-bkllsd-zebmrkcsxq-692[sdyzv]
ugdgjxmd-jsttal-ogjckzgh-320[nxksp]
dmbttjgjfe-gmpxfs-fohjoffsjoh-675[emswj]
esyfwlau-wyy-kwjnauwk-762[zfkst]
htsxzrjw-lwfij-gzssd-xytwflj-359[jswfl]
bnmrtldq-fqzcd-bzmcx-bnzshmf-cdozqsldms-157[rchap]
enqvbnpgvir-wryylorna-hfre-grfgvat-247[rgnva]
rzvkjiduzy-mvwwdo-hvivbzhzio-629[vzidh]
rgllk-omzpk-ymzmsqyqzf-742[ytshk]
wyvqljapsl-kfl-shivyhavyf-175[lvyaf]
zloolpfsb-molgbzqfib-oxyyfq-absbilmjbkq-731[rdypn]
wlqqp-srjbvk-glityrjzex-399[jlqrb]
foadouwbu-qvcqczohs-hsqvbczcum-402[coqub]
gsrwyqiv-kvehi-wgezirkiv-lyrx-wlmttmrk-334[dxqri]
apwmeclga-afmamjyrc-amlryglkclr-470[dvjwq]
amjmpdsj-aylbw-amyrgle-bcqgel-756[fmsjn]
pbybeshy-sybjre-ynobengbel-507[beyns]
jchipqat-rpcsn-hwxeexcv-505[yozns]
excdklvo-nio-bomosfsxq-458[bhmlt]
oaxadrgx-ngzzk-ymzmsqyqzf-534[eqjfa]
ajyqqgdgcb-zsllw-umpiqfmn-262[sdmlk]
wkqxodsm-lexxi-cobfsmoc-510[tpnbi]
tcfkqcevkxg-ecpfa-eqcvkpi-octmgvkpi-986[ckepv]
pbybeshy-onfxrg-qrcyblzrag-845[bryga]
rdggdhxkt-hrpktcvtg-wjci-gtrtxkxcv-479[tgckr]
willimcpy-jfumncw-alumm-lywycpcha-500[utskn]
qyujihctyx-luxciuwncpy-yaa-mbcjjcha-942[tzusp]
pelbtravp-pnaql-fgbentr-585[pabel]
jef-iushuj-vbemuh-tuiywd-140[jvndh]
rwcnawjcrxwju-kjbtnc-mnyuxhvnwc-355[cnwjr]
dszphfojd-tdbwfohfs-ivou-ufdiopmphz-285[dfohp]
uqtqbizg-ozilm-kivlg-kwibqvo-ewzsapwx-538[iqwzb]
njmjubsz-hsbef-cbtlfu-bobmztjt-649[dtsjy]
zlilocri-zxkav-zlxqfkd-pefmmfkd-887[zijtp]
iwcjapey-ydkykhwpa-oanreyao-576[jfnpy]
pybgmyargtc-zgmfyxypbmsq-zyqicr-mncpyrgmlq-600[gzfir]
houngfgxjuay-yigbktmkx-natz-ygrky-228[gykan]
lnkfaypeha-zua-odellejc-680[gmnlj]
vhglnfxk-zktwx-cxeeruxtg-kxlxtkva-319[xkteg]
wfintfhynaj-gzssd-qfgtwfytwd-541[mztfn]
amlqskcp-epybc-aylbw-nspafyqgle-886[alpyb]
iuruxlar-pkrrehkgt-ygrky-774[tsflj]
xtwtelcj-rclop-clmmte-opgpwzaxpye-145[tskxr]
bqvvu-ywjzu-ykwpejc-hwxknwpknu-862[wkujn]
enqvbnpgvir-zntargvp-cynfgvp-tenff-ybtvfgvpf-585[vfngp]
rzvkjiduzy-xviyt-xjvodib-xjiovdihzio-967[pjzrk]
njmjubsz-hsbef-sbnqbhjoh-cvooz-pqfsbujpot-623[bjosh]
zixppfcfba-mixpqfz-doxpp-zlkqxfkjbkq-653[pfxkq]
hdgdovmt-bmvyz-kgvnodx-bmvnn-rjmfncjk-239[rpovu]
hdgdovmt-bmvyz-xviyt-yzndbi-109[pdslu]
xjinphzm-bmvyz-kgvnodx-bmvnn-vivgtndn-525[nvmbd]
eqnqthwn-eqttqukxg-hnqygt-rwtejcukpi-544[qteng]
zvyvgnel-tenqr-sybjre-grpuabybtl-793[lyfvq]
tcorcikpi-ecpfa-eqcvkpi-ugtxkegu-596[teibn]
nwzekwypera-fahhuxawj-lqnydwoejc-810[mszph]
mhi-lxvkxm-cxeeruxtg-kxvxbobgz-605[palbn]
wfummczcyx-jlidywncfy-vumeyn-mylpcwym-838[ijqrb]
fhezusjybu-zubboruqd-cqdqwucudj-374[ubdqc]
kgjgrypw-epybc-aylbw-amyrgle-qcptgacq-314[mjlic]
tcfkqcevkxg-dwppa-ucngu-362[trzmu]
oazegyqd-sdmpq-gzefmnxq-eomhqzsqd-tgzf-qzsuzqqduzs-560[dmrkq]
jlidywncfy-vohhs-xypyfijgyhn-110[yhfij]
ftzgxmbv-lvtoxgzxk-angm-hixktmbhgl-163[gxmtb]
xgjougizobk-vrgyzoi-mxgyy-cuxqynuv-644[yntxg]
yknnkoera-lhwopey-cnwoo-odellejc-524[qypjt]
eza-dpncpe-upwwjmply-zapcletzyd-769[pezac]
cvabijtm-ntwemz-zmikycqaqbqwv-564[mqabc]
irgyyolokj-kmm-rghuxgzuxe-410[gkmor]
ahngzyzqcntr-idkkxadzm-sdbgmnknfx-807[ndkza]
surmhfwloh-fkrfrodwh-pdqdjhphqw-829[myflz]
elrkdcdugrxv-edvnhw-xvhu-whvwlqj-387[mhtue]
sbejpbdujwf-xfbqpojafe-ezf-mphjtujdt-155[tqslv]
shoewudys-rkddo-tuiywd-686[sntpq]
qcffcgwjs-dzoghwq-ufogg-igsf-hsghwbu-350[psevy]
ibghopzs-qobrm-qcohwbu-zopcfohcfm-740[obchf]
atyzghrk-vrgyzoi-mxgyy-sgtgmksktz-150[tjpiv]
luxciuwncpy-dyffsvyuh-nluchcha-994[cuhyf]
vcibutulxiom-xsy-uwkocmcncih-214[ciumo]
vkppo-rkddo-cqdqwucudj-140[dckop]
ftzgxmbv-vtgwr-kxlxtkva-163[tvxgk]
jlidywncfy-vumeyn-womnigyl-mylpcwy-682[ylmnw]
mtzslklcozfd-nlyoj-nzletyr-qtylyntyr-639[xswlz]
ixccb-fkrfrodwh-fxvwrphu-vhuylfh-283[fhrcu]
ykjoqian-cnwza-oywrajcan-dqjp-qoan-paopejc-212[tsrfk]
yhkpvhjapcl-yhiipa-jbzavtly-zlycpjl-617[ftaes]
qmpmxevc-kvehi-wgezirkiv-lyrx-xvemrmrk-516[emrvi]
fmsledevhsyw-veffmx-wivzmgiw-204[efmvw]
zlkprjbo-doxab-zxkav-zlxqfkd-obxznrfpfqflk-237[rqgnd]
ksodcbwnsr-qobrm-qcohwbu-aobousasbh-142[bosac]
yrwxefpi-glsgspexi-qevoixmrk-828[atyoc]
dlhwvupglk-wshzapj-nyhzz-klzpnu-877[fbewu]
bjfutsneji-idj-hzxytrjw-xjwanhj-359[wyrxt]
zsxyfgqj-bjfutsneji-hfsid-htfynsl-zxjw-yjxynsl-229[jsfyn]
pualyuhapvuhs-ibuuf-klwsvftlua-643[ualfh]
yknnkoera-ydkykhwpa-klanwpekjo-420[kanye]
iehepwnu-cnwza-ydkykhwpa-zaoecj-420[pozyv]
ftzgxmbv-ktuubm-mxvaghehzr-605[mbght]
gntmfefwitzx-xhfajsljw-mzsy-ywfnsnsl-983[woefn]
xmtjbzidx-wpiit-ncdkkdib-863[idbkt]
ktiaaqnqml-uqtqbizg-ozilm-kpwkwtibm-ivitgaqa-850[ywdzl]
dyz-combod-sxdobxkdsyxkv-mkxni-wkxkqowoxd-224[isamh]
nsyjwsfyntsfq-rnqnyfwd-lwfij-kqtbjw-uzwhmfxnsl-151[roxtn]
ykjoqian-cnwza-xqjju-nayaerejc-524[yvwax]
ixccb-iorzhu-ilqdqflqj-569[fcjsy]
ovbunmneqbhf-ohaal-qrfvta-819[abfhn]
glrcplyrgmlyj-zyqicr-pcyaosgqgrgml-626[glryc]
ajyqqgdgcb-bwc-ylyjwqgq-262[qgybc]
fhezusjybu-rkddo-bewyijysi-608[ybdei]
aflwjfslagfsd-kusnwfywj-zmfl-xafsfuafy-632[wltdc]
iuxxuyobk-lruckx-vaxingyotm-644[xuiko]
jyfvnlupj-kfl-thyrlapun-773[lfjnp]
eqpuwogt-itcfg-tcfkqcevkxg-dcumgv-qrgtcvkqpu-934[ionzm]
hqcfqwydw-sqdto-seqjydw-bqrehqjeho-998[qdehw]
xst-wigvix-ikk-wivzmgiw-724[rtszg]
tinnm-pibbm-zcuwghwqg-766[mfgbn]
vkppo-rqiauj-cqdqwucudj-348[qucdj]
bnmrtldq-fqzcd-ahngzyzqcntr-atmmx-dmfhmddqhmf-989[mdqfh]
vkrhzxgbv-unggr-tgterlbl-319[tsrkm]
wihmogyl-aluxy-wuhxs-uhufsmcm-526[uhmls]
nzydfxpc-rclop-awldetn-rcldd-pyrtyppctyr-951[pcdry]
egdytrixat-eaphixr-vgphh-pcpanhxh-921[hpaxe]
nwzekwypera-lhwopey-cnwoo-hkceopeyo-654[eowpy]
zovldbkfz-zlkprjbo-doxab-zxkav-ixyloxqlov-367[olxzb]
lgh-kwujwl-xmrrq-kusnwfywj-zmfl-hmjuzskafy-372[gmait]
ipvohghykvbz-jhukf-ylzlhyjo-357[awkcb]
dmybmsuzs-otaoaxmfq-dqmocgueufuaz-976[muaod]
zbytomdsvo-bkllsd-cdybkqo-796[eufzt]
sbqiiyvyut-fbqijys-whqii-iqbui-998[ebfqa]
qyujihctyx-wbiwifuny-guleyncha-838[ejitg]
ikhcxvmbex-unggr-kxvxbobgz-683[ejuzo]
hafgnoyr-ohaal-jbexfubc-923[bjmzn]
shmml-wryylorna-genvavat-455[almnr]
yknnkoera-xqjju-klanwpekjo-420[empdo]
upq-tfdsfu-kfmmzcfbo-efwfmpqnfou-415[nmfed]
xcitgcpixdcpa-rdchjbtg-vgpst-hrpktcvtg-wjci-stepgibtci-557[ctgip]
fydelmwp-nsznzwlep-opgpwzaxpye-769[pewzl]
glrcplyrgmlyj-cee-pcqcypaf-548[ymzlj]
xmtjbzidx-wpiit-xjiovdihzio-265[ztyda]
rwcnawjcrxwju-ljwmh-mnbrpw-901[wjrcm]
wlqqp-tyftfcrkv-ivtvzmzex-841[tvfqz]
thnulapj-wshzapj-nyhzz-zopwwpun-669[pzhnw]
bpvctixr-rdggdhxkt-hrpktcvtg-wjci-pcpanhxh-401[chptg]
eza-dpncpe-awldetn-rcldd-dlwpd-743[delpa]
pbybeshy-sybjre-npdhvfvgvba-299[bvyeh]
qmpmxevc-kvehi-jpsaiv-viwievgl-802[viemp]
jrncbavmrq-pnaql-pbngvat-qrcyblzrag-715[arbnq]
ugjjgkanw-wyy-kzahhafy-736[clxvm]
mwupyhayl-bohn-nluchcha-682[hacln]
qjopwxha-xwogap-ykjpwejiajp-108[jpawo]
avw-zljyla-jhukf-huhsfzpz-175[hzafj]
lzfmdshb-okzrshb-fqzrr-cdoknxldms-573[olwsf]
cqwdujys-sbqiiyvyut-uww-iuhlysui-426[cwfuy]
yaxsnlcrun-ljwmh-bqryyrwp-901[rylnw]
cebwrpgvyr-pelbtravp-enoovg-znantrzrag-455[raegn]
nbhofujd-qmbtujd-hsbtt-efwfmpqnfou-389[fbtud]
pynffvsvrq-pnaql-pbngvat-ynobengbel-507[nmyvz]
ltpedcxots-gpqqxi-ldgzhwde-739[bkapm]
nglmtuex-vahvhetmx-wxiehrfxgm-527[zwksp]
kgjgrypw-epybc-aylbw-amyrgle-qyjcq-626[ygabc]
yflexwxoalrp-avb-abmilvjbkq-445[siqmz]
jshzzpmplk-kfl-klclsvwtlua-331[lkpsz]
ujoon-eaphixr-vgphh-prfjxhxixdc-193[hyzjx]
dfcxsqhwzs-qobrm-zcuwghwqg-168[qwcgh]
bqvvu-ydkykhwpa-klanwpekjo-966[kapvw]
aoubshwq-pibbm-kcfygvcd-740[wnucy]"""
codes = input.split('\n')
total = 0
for code in codes:
code = code.replace("-","")
endCode = code.index('[')
checkSum1 = code[endCode+1:endCode+6]
sectorID = int(code[endCode-3:endCode])
count = Counter(code[:endCode-3])
checkSum2 = "".join([k for k, v in sorted(count.most_common(), key=lambda e: (-e[1], e[0]))[:5]])
if checkSum1 == checkSum2:
total += sectorID
print(total)
|
class BrickGl:
def __init__(self, draw_atoms):
self.draw_atoms = draw_atoms
pass
def horiz_line(self, h0, w0, length):
args = [(h0, w) for w in range(w0, w0 + length)]
self.draw_atoms(*args)
def vert_line(self, h0, w0, length):
args = [(h, w0) for h in range(h0, h0 + length)]
self.draw_atoms(*args)
def diagonal(self, h0, w0, length, reverse=False):
if reverse:
args = [(h0 + i, w0 - i) for i in range(length)]
else:
args = [(h0 + i, w0 + i) for i in range(length)]
self.draw_atoms(*args)
def _draw_A(self, h0, w0):
self.diagonal(h0, w0, 3)
self.diagonal(h0, w0, 3, reverse=True)
self.horiz_line(h0 + 3, w0 - 2, 5)
self.draw_atoms((h0 + 4, w0 - 2))
self.draw_atoms((h0 + 4, w0 + 2))
def _draw_B(self, h0, w0):
self.vert_line(h0, w0-2, 5)
self.horiz_line(h0, w0-2, 4)
self.draw_atoms((h0 + 1, w0 + 2))
self.horiz_line(h0 + 2, w0-2, 4)
self.draw_atoms((h0 + 3, w0 + 2))
self.horiz_line(h0 + 4, w0-2, 4)
def _draw_C(self, h0, w0):
self.horiz_line(h0, w0 - 2, 5)
self.vert_line(h0, w0 - 2, 5)
self.horiz_line(h0+4, w0 - 2, 5)
def alphabet(self, ch, h0=3, w0=5):
c = ch.upper()
if c == 'A':
self._draw_A(h0, w0)
elif c == 'B':
self._draw_B(h0, w0)
elif c == 'C':
self._draw_C(h0, w0)
|
import pygame as pg, time, math, sys
from pygame.locals import *
from random import randint
class Building():
#initialization
def __init__(self, screen):
self.x = 691
self.y = randint(270, 310)
self.sprite = pg.image.load("images/building.png")
self.sprite = pg.transform.scale(self.sprite,(91,130))
self.screen = screen
#draw Building object
def draw(self):
self.screen.blit(self.sprite, (self.x, self.y))
self.mask = pg.mask.from_surface(self.sprite)
#move Building object
def move(self, speed):
self.x -= speed;
#draw while on screen
def on_screen(self):
if (self.x + 64) > 0:
return True
return False
|
class Documents:
def __init__(self, tipo, path):
self.tipo = tipo
self.path = path
self.listOfDocuments = []
self.categorieOfDocuments = []
self.metaData = [] # Righe, Colonne, Data e Dizionario. Guardare in Stemmer.
def setDictionary(self, dictionary):
self.metaData.append(dictionary)
|
import cv2
import numpy as np
import tflite_runtime.interpreter as tflite
import urllib3
import time
def requestToThingSpeak():
# upload value to thingSpeak
url = "https://api.thingspeak.com/update?api_key="
key = "your key"
val = f"&field1={noMaskedNum}"
r = urllib3.PoolManager().request("GET", url + key + val)
print(r.status)
# init
MODEL_PATH = "myModel/model.tflite"
FACE_CASCADE_PATH = "myModel/haarcascade_frontalface_default.xml"
# load model
interpreter = tflite.Interpreter(model_path=MODEL_PATH)
interpreter.allocate_tensors()
# set label
label = ['mask', 'face']
# load face roi detector
face_cascade = cv2.CascadeClassifier(FACE_CASCADE_PATH)
# Capture video
cap = cv2.VideoCapture(0)
# first upload value
noMaskedNum = 0
# requestToThingSpeak()
s = time.time()
while True:
# for each frame
ret, frame = cap.read()
# fram to gray color
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# detect face roi
faces = face_cascade.detectMultiScale(gray, 1.3, 4)
# reset
noMaskedNum = 0
# detect all faces is masked or nomasked
for (fx, fy, fw, fh) in faces:
# for each face
# crop face
crop = frame[fy:fy+fh, fx:fx+fh]
crop = cv2.resize(crop, (128, 128))
crop = np.reshape(crop, [1, 128, 128, 3]) / 255.0
crop = crop.astype('float32')
# put crop to model
interpreter.set_tensor(interpreter.get_input_details()[0]['index'], crop)
# error msg
interpreter.invoke()
# get result of model
output = interpreter.get_tensor(interpreter.get_output_details()[0]['index'])[0]
# print ans
print(output, " ", np.argmax(output), " ", label[np.argmax(output)])
# count nomasked
if np.argmax(output) == 1:
noMaskedNum += 1
# draw face roi and set text of model's result
cv2.rectangle(frame, (fx, fy), (fx + fw, fy + fh), (255, 0, 0), 2)
cv2.putText(frame, label[np.argmax(output)], (fx, fy), cv2.FONT_HERSHEY_SIMPLEX, 1, (200, 255, 255), 2, cv2.LINE_AA)
# upload value delay 15s (thingSpeak default)
# !!! IMPORTANT !!!
# It will take 0.88 ~ 1s time to upload
# that mean the screen will sleep when it upload.
# It can be solved by using threading
# but sometime will encounter the value init
# so will got incorrect value.
# In fact, I was lazy to solve it XD
# !!! IMPORTANT !!!
# c = time.time()
# if c - s > 15.5:
# requestToThingSpeak()
# s = c
# show frame
cv2.imshow('img', frame)
# exit()
if cv2.waitKey(1) & 0xFF == 27:
break
cap.release()
cv2.destroyAllWindows()
|
# import library.
import urllib
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
# catch google website.
html = urlopen("http://www.google.com/")
print (html.read())
|
#!/usr/bin/env python
# encoding: utf-8
"""
utils.py
Created by edu on 2010-11-10.
"""
from _pitchtools import *
import os
def _note_to_music21(note):
return n2m(note)
def _open_pdf(pdf_file):
os.system('open %s' % pdf_file)
def _show_note_in_finale(note):
note = _note_to_music21(note)
import music21
out = music21.note.Note(note)
out.show()
return out
def _show_note_as_pdf(note):
import music21
import os
note = _note_to_music21(note)
out = music21.note.Note(note)
f = out.write('musicxml')
os.system('musicxml2ly %s' % f)
ly = os.path.splitext(os.path.split(f)[1])[0] + '.ly'
os.system('lilypond %s' % os.path.splitext(os.path.split(f)[1])[0] + '.ly')
_open_pdf(os.path.splitext(ly)[0] + '.pdf')
return out
def show_note(n, backend='finale'):
"""
n can be a midi number or a note name
"""
if isinstance(n, (int, float)):
note = m2n(n)
else:
note = n
DEFAULT = 'finale'
func = {
'finale':_show_note_in_finale,
'pdf':_show_note_as_pdf,
}.get(backend, DEFAULT)
return func(note)
def pianokey2midi(key_number):
"""
convert the key number in a 88-key piano to its midi note
A0 (the lowest in an 88-key keyboard) = 21
so just add 21
"""
assert 0 <= key_number <= 87
return key_number + 21
def notestr_to_notes(s):
"""
s: a string containing any note in the format accepted to n2m or n2f
notes can be separated by ',' or by a blank space
ranges can also be used and they include both extremes: (C4 - F4)
"""
import re
ranges_pattern = '\(.*? -.*?\)'
exclude = []
def range2notes(r):
words = r.strip('()').split()
note0, note1 = words[0], words[-1]
note1 = note1.lstrip('-')
note0, note1 = n2m(note0), n2m(note1)
for match in re.finditer(ranges_pattern, s):
r = match.group()
notes = range2notes(r)
exclude.append(match.span())
def normalize_notename(n):
"""
given a notename in the format returned by m2n, transform it to
note-octave format, dropping the cents if given
this is useful when entering commands for software that expects a
name for a midinote, like sample players
"""
n = m2n(int(n2m(n) + 0.5))
n = n.lower()
if n[0] in "0123456789":
if n[1] in "0123456789":
digits = 2
else:
digits = 1
octave = str(int(n[0:digits]))
n = n[digits:] + octave
elif n[0] == '-':
octave = str(int(n[0:2]))
n = n[2:] + octave
return n
|
import click
from mazel.label import Target
# Import module for easier patching during test
from . import label_common
@label_common.label_command
@click.option(
# Replicated from bazel:
# https://docs.bazel.build/versions/master/user-manual.html#flag--test_output
"--test_output",
type=click.Choice(["streamed", "errors"]),
default="streamed",
help="Only show stdout/stderr after error, or stream everything.",
)
def test(
label: str, test_output: str, with_ancestors: bool, with_descendants: bool
) -> None:
handler_cls = (
label_common.MakeLabelCaptureErrors
if test_output == "errors"
else label_common.MakeLabel
)
label_common.LabelRunner(
handler=handler_cls(),
default_target=Target("test"),
run_order=label_common.RunOrder.ORDERED,
with_ancestors=with_ancestors,
with_descendants=with_descendants,
).run(label)
|
'''
Given a root of a binary tree flatten the tree
'''
class Node:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
def _flatten_tree(root):
if not root:
return None
if root.left is None and root.right is None:
return root
leftTail = _flatten_tree(root.left)
rightTail = _flatten_tree(root.right)
if leftTail:
leftTail.right = root.right
root.right = root.left
root.left = None
return rightTail if rightTail else leftTail
def flatten_tree(root):
if not root:
return None
_flatten_tree(root)
return root
def main():
root = Node(25)
root.left = Node(20)
root.right = Node(36)
root.left.left = Node(10)
root.left.right = Node(22)
root.right.left = Node(30)
root.right.right = Node(40)
root.left.left.left = Node(5)
root.left.left.right = Node(12)
root.right.left.left = Node(28)
root.right.right.left = Node(38)
root.right.right.right = Node(48)
root.left.left.left.left = Node(1)
root.left.left.left.right = Node(8)
root.left.left.right.right = Node(15)
root.right.right.right.left = Node(45)
root.right.right.right.right = Node(50)
node = flatten_tree(root)
while node:
print(node.value, end=" ")
node = node.right
if __name__ == "__main__":
main()
|
"""
TODO
Think about how best to manage properties associated with constraint sources.
In maya this seems to be managed more effectively:
Constraint sources, offset attributes and weight attributes are index based.
So when a source node is renamed, the connections are still valid.
The exception to this is the name of the weight attributes exposed to the user.
These are name based and are connected to the index based weight attrs.
So these are not renamed dynamically.
However when exporting as fbx and re-importing, the names are corrected, because the attr is newly created.
In fbx however these are all name based.
Meaning we have a couple of options:
-
We could use property connections to create a relationship between properties and the respective nodes.
This would allow us to find properties easily after a node has been renamed.
However this would not work with fbx files where these connections have not been made.
There is also a risk of extraneous connections.
-
The safer option is we instance a class to keep references between nodes and properties,
with methods to "refresh" the property names if the node was renamed.
This could be exposed to the user as a tool, either a right click context menu,
or a button to fix all property names in the scene (ideally both)
When instancing this class it would search the constraint for properies that matched the name of the node.
If it finds more than one matching property than an error should be thrown (or warning).
The risk to this is if we open an fbx that had a node already renamed but not the property.
The solution to this would be to purge all properties on the constraint that did not match the source node names
and create again.
(this behaviour is similar to Maya if importing an fbx with badly named properties)
TODO
method to validate properties
when exporting constraints from Maya, only the non-default properties are created
so we are sometimes missing some properties
(try to reproduce this, doesn't seem to happen?! or maybe find whatever the circumstances were to make this happen)
"""
import os
import fbx
from brenfbx.core import bfCore
from brenfbx.core import bfIO
from brenfbx.fbxsdk.core import bfObject
from brenfbx.fbxsdk.core import bfProperty
class BfConstraintSource(bfCore.BfManagerBase):
"""
Notes:
Index-based to avoid keeping references to potentially volatile FbxNode objects.
"""
FBX_CLASS_ID = fbx.FbxConstraint.ClassId
PROPERTY_NAMES = ["Weight"]
def __init__(self, bf_environment, bf_constraint, constraint_source_index, default_weight=100.0, *args, **kwargs):
super(BfConstraintSource, self).__init__(bf_environment, *args, **kwargs)
if not isinstance(bf_constraint, BfConstraint):
raise bfCore.BfError(
"bf_constraint must be instance or subclass of BfConstraint: {}".format(bf_constraint)
)
if bf_constraint.FBX_CLASS_ID != self.FBX_CLASS_ID:
raise bfCore.BfError(
"FBX_CLASS_ID must be identical: {} ({}) {} ({})".format(
self.FBX_CLASS_ID, self.FBX_CLASS_ID.GetName(),
bf_constraint.FBX_CLASS_ID, bf_constraint.FBX_CLASS_ID.GetName()
)
)
self._bf_constraint = bf_constraint
self._index = constraint_source_index
# if not self.constraint_sources_property().IsConnectedSrcObject(fbx_node):
# self.constraint_sources_property().ConnectSrcObject(fbx_node)
self._initialize_weight_property(default_weight)
def index(self):
return self._index
def constraint_sources_property(self):
return self._bf_constraint.constraint_sources_property()
def _initialize_weight_property(self, default_value):
self._weight_property = self._bf_constraint.initialize_property(
"{}.Weight".format(self.fbx_node().GetName()),
fbx.FbxDoubleDT,
default_value=default_value,
animatable=True
)
def fbx_node(self):
fbx_node = self.constraint_sources_property().GetSrcObject(self._index)
if fbx_node is None:
raise bfCore.BfError(
"Constraint source index out of range: {} {}".format(
self._bf_constraint.fbx_object(), self._index
)
)
return fbx_node
def constraint_source(self):
return self.fbx_node()
def weight_property(self):
return self._weight_property
def is_property_name_valid(self, fbx_property):
property_name = fbx_property.GetName()
if str(property_name).split(".")[0] == self.fbx_node().GetName():
return True
else:
return False
def validate_weight_property_name(self):
"""
TODO find way to avoid code duplication with other similar properties
"""
if self.is_property_name_valid(self._weight_property.fbx_property()):
return True
# else create new property with correct name
value = self._weight_property.get_value()
self._weight_property.fbx_property().Destroy()
self._initialize_weight_property(value)
return True
def validate_property_names(self):
"""Check that property names match node name.
Unfortunately fbx doesn't support renaming properties,
so if any names don't match, we need to destroy the old property and create a new one.
"""
self.validate_weight_property_name()
return True
@classmethod
def create(cls, bf_constraint, fbx_node, default_weight=100.0):
if bf_constraint.is_constraint_source(fbx_node):
raise bfCore.BfError(
"FbxNode is already constraint source, please instance class instead. ({})".format(fbx_node)
)
bf_constraint.constraint_sources_property().ConnectSrcObject(fbx_node)
constraint_source_index = bf_constraint.constraint_sources_property().GetSrcObjectCount() - 1
constraint_source = cls(
bf_constraint.bf_environment(), bf_constraint, constraint_source_index, default_weight=default_weight
)
return constraint_source
def destroy(self):
"""Remove constraint source and properties.
"""
self._weight_property.destroy()
self.constraint_sources_property().DisconnectSrcObject(
self.fbx_node()
)
return True
class BfConstraint(
# bfObject.BfObject
bfObject.BfEvaluationObject
):
"""
TODO
fbx constraint sources are not managed particularly well by the fbxsdk
"""
FBX_CLASS_ID = fbx.FbxConstraint.ClassId
CONSTRAINT_SOURCE_CLS = BfConstraintSource
CONSTRAINED_OBJECT_PROPERTY_NAME = "Constrained Object"
CONSTRAINT_SOURCES_PROPERTY_NAME = "Source"
def __init__(self, *args, **kwargs):
"""
"""
super(BfConstraint, self).__init__(*args, **kwargs)
self._constraint_source_list = []
self.initialize_constraint_sources()
self._constrained_object_bf_property = bfProperty.BfObjectReferenceProperty(
self.bf_environment(), self.constrained_object_property()
)
self._constraint_sources_bf_property = bfProperty.BfObjectReferencesProperty(
self.bf_environment(), self.constraint_sources_property()
)
self._active_bf_property = bfProperty.BfProperty(
self.bf_environment(), self.fbx_object().Active
)
self._lock_bf_property = bfProperty.BfProperty(
self.bf_environment(), self.fbx_object().Lock
)
self._weight_bf_property = bfProperty.BfProperty(
self.bf_environment(), self.fbx_object().Weight
)
def active(self):
return self._active_bf_property
def lock(self):
return self._lock_bf_property
def weight(self):
return self._weight_bf_property
def constrained_object_property(self):
"""Explicit method to find constrained object property by name.
Potentially more reliable method than FbxConstraint.ConstrainedObject reference
"""
fbx_property = self.fbx_object().FindProperty(self.CONSTRAINED_OBJECT_PROPERTY_NAME, True) # case sensitive
if not fbx_property.IsValid():
raise bfCore.BfError("Failed to find constrained object property: {}.{}".format(
self.fbx_object(), self.CONSTRAINED_OBJECT_PROPERTY_NAME
))
return fbx_property
def constraint_sources_property(self):
"""More reliable method than fbx_constraint.ConstraintSources
NOTE in one strange case FbxConstraint.ConstraintSources gave back the wrong property!
think this may definately be better done explicitly with the actual properties
same with other constraint properties like worldUpVector etc.
"""
fbx_property = self.fbx_object().FindProperty(self.CONSTRAINT_SOURCES_PROPERTY_NAME, True) # case sensitive
if not fbx_property.IsValid():
raise bfCore.BfError("Failed to find constraint sources property: {}.{}".format(
self.fbx_object(), self.CONSTRAINT_SOURCES_PROPERTY_NAME
))
return fbx_property
def constrained_object(self):
return self._constrained_object_bf_property
def constraint_sources(self):
return self._constraint_sources_bf_property
def constraint_source_list(self):
return self._constraint_source_list
def constraint_source_count(self):
return self.constraint_sources_property().GetSrcObjectCount()
def _initialize_constraint_source(self, constraint_source_index, default_weight=100.0):
"""Create a BfConstraintSource class instance for constraint source at specified index.
"""
constraint_source = self.CONSTRAINT_SOURCE_CLS(
self.bf_environment(), self, constraint_source_index, default_weight=default_weight
)
return constraint_source
def initialize_constraint_sources(self):
"""Create a BfConstraintSource class instance for each constraint source.
"""
# destroy any previously initialized constraint source objects
self._constraint_source_list = []
# for i in range(self.fbx_object().GetConstraintSourceCount()):
# src_node = self.fbx_object().GetConstraintSource(i)
for i in range(self.constraint_source_count()):
constraint_source = self._initialize_constraint_source(i)
self._constraint_source_list.append(constraint_source)
return self._constraint_source_list
def get_constraint_source_names(self):
"""stuff
"""
names = []
# for constraint_source in self._constraint_sources:
# names.append(constraint_source.fbx_node().GetName())
for i in range(self.constraint_source_count()):
fbx_node = self.constraint_sources_property().GetSrcObject(i)
names.append(fbx_node.GetName())
return names
def validate_property_names(self):
for constraint_source in self._constraint_source_list:
constraint_source.validate_property_names()
def purge_bad_properties(self):
"""Destroy any properties who's name does not match any constraint source name.
Potential risk of destroying properties we actually want to keep,
if it's unlucky enough to match criteria to mark as a 'bad' property.
"""
# validate constraint source properties
self.validate_property_names()
# check remaining properties
node_names = self.get_constraint_source_names()
print node_names
fbx_property = self.fbx_object().GetFirstProperty()
bad_properties = []
while fbx_property.IsValid():
if "." not in fbx_property.GetName():
fbx_property = self.fbx_object().GetNextProperty(fbx_property)
continue
tokens = str(fbx_property.GetName()).split(".")
if tokens[1] in self.CONSTRAINT_SOURCE_CLS.PROPERTY_NAMES:
if tokens[0] not in node_names:
bad_properties.append(fbx_property)
fbx_property = self.fbx_object().GetNextProperty(fbx_property)
for fbx_property in bad_properties:
self.debug(
"Destroying bad property: {}".format(fbx_property.GetName()),
level=self.LEVELS.user()
)
fbx_property.Destroy()
return True
def is_constraint_source(self, fbx_node):
"""TODO check fbx node?
"""
return self.constraint_sources_property().IsConnectedSrcObject(fbx_node)
def get_constraint_source(self, constraint_index):
return self._constraint_source_list[constraint_index]
def get_constraint_source_node(self, constraint_index):
return self.get_constraint_source(constraint_index).fbx_node()
def find_constraint_source(self, fbx_node):
for constraint_source in self._constraint_source_list:
if constraint_source.fbx_node() is fbx_node:
return constraint_source
raise bfCore.BfError("Unable to find constraint source object: {} {}".format(
self.fbx_object().GetName(), fbx_node.GetName()
))
def add_constraint_source(self, fbx_node, default_weight=100.0):
"""Method to provide extra functionality when adding a constraint source.
TODO check if fbx_node is constrained object and show warning?
"""
if self.is_constraint_source(fbx_node):
raise bfCore.BfError(
"fbx_node is already constraint source: {}".format(fbx_node.GetName())
)
# constraint_source = self._initialize_constraint_source(fbx_node, default_weight=default_weight)
constraint_source = self.CONSTRAINT_SOURCE_CLS.create(
self, fbx_node, default_weight=default_weight
)
self._constraint_source_list.append(constraint_source)
return constraint_source
def remove_constraint_source(self, fbx_node):
constraint_source = self.find_constraint_source(fbx_node)
res = constraint_source.destroy()
# self._constraint_sources.remove(constraint_source)
# constraint indices may now be different
# so is important to re-initialize
self.initialize_constraint_sources()
return res
def remove_all_constraint_sources(self):
res = self.constraint_sources_property().DisconnectAllSrcObject()
self.initialize_constraint_sources()
return res
def evaluate(self):
"""TODO
"""
super(BfConstraint, self).evaluate()
class BfConstraintParentSource(BfConstraintSource):
FBX_CLASS_ID = fbx.FbxConstraintParent.ClassId
PROPERTY_NAMES = [
"Weight", "Offset T", "Offset R"
]
def __init__(self, bf_environment, fbx_constraint, fbx_node, default_weight=100.0):
super(BfConstraintParentSource, self).__init__(
bf_environment, fbx_constraint, fbx_node, default_weight=default_weight
)
self._initialize_offset_t_property()
self._initialize_offset_r_property()
def _initialize_offset_t_property(self, value=fbx.FbxDouble4()):
self._offset_t_property = self._bf_constraint.initialize_property(
"{}.Offset T".format(self.fbx_node().GetName()),
fbx.FbxTranslationDT,
default_value=value
)
def _initialize_offset_r_property(self, value=fbx.FbxDouble4()):
self._offset_r_property = self._bf_constraint.initialize_property(
"{}.Offset R".format(self.fbx_node().GetName()),
fbx.FbxTranslationDT,
default_value=value
)
def offset_t_property(self):
return self._offset_t_property
def offset_r_property(self):
return self._offset_r_property
def validate_offset_t_property_name(self):
if self.is_property_name_valid(self._offset_t_property.fbx_property()):
return True
# else create new property with correct name
value = self._offset_t_property.get_value()
self._offset_t_property.fbx_property().Destroy()
self._initialize_offset_t_property(value=value)
return True
def validate_offset_r_property_name(self):
if self.is_property_name_valid(self._offset_r_property.fbx_property()):
return True
# else create new property with correct name
value = self._offset_r_property.get_value()
self._offset_r_property.fbx_property().Destroy()
self._initialize_offset_r_property(value=value)
return True
def validate_property_names(self):
super(BfConstraintParentSource, self).validate_property_names()
self.validate_offset_r_property_name()
self.validate_offset_t_property_name()
return True
def destroy(self):
res = super(BfConstraintParentSource, self).destroy()
if not res:
return res
self._offset_t_property.destroy()
self._offset_r_property.destroy()
return True
class BfConstraintParent(BfConstraint):
"""
"""
FBX_CLASS_ID = fbx.FbxConstraintParent.ClassId
CONSTRAINT_SOURCE_CLS = BfConstraintParentSource
CONSTRAINED_OBJECT_PROPERTY_NAME = "Constrained object (Child)"
CONSTRAINT_SOURCES_PROPERTY_NAME = "Source (Parent)"
def __init__(self, *args, **kwargs):
super(BfConstraintParent, self).__init__(*args, **kwargs)
def evaluate(self):
"""TODO
"""
super(BfConstraint, self).evaluate()
class BfConstraintPositionSource(BfConstraintSource):
FBX_CLASS_ID = fbx.FbxConstraintPosition.ClassId
def __init__(self, *args, **kwargs):
super(BfConstraintPositionSource, self).__init__(*args, **kwargs)
class BfConstraintPosition(BfConstraint):
"""
"""
FBX_CLASS_ID = fbx.FbxConstraintPosition.ClassId
CONSTRAINT_SOURCE_CLS = BfConstraintPositionSource
def __init__(self, *args, **kwargs):
super(BfConstraintPosition, self).__init__(*args, **kwargs)
def evaluate(self):
"""TODO
"""
super(BfConstraint, self).evaluate()
class BfConstraintRotationSource(BfConstraintSource):
FBX_CLASS_ID = fbx.FbxConstraintRotation.ClassId
def __init__(self, *args, **kwargs):
super(BfConstraintRotationSource, self).__init__(*args, **kwargs)
class BfConstraintRotation(BfConstraint):
"""
"""
FBX_CLASS_ID = fbx.FbxConstraintRotation.ClassId
CONSTRAINT_SOURCE_CLS = BfConstraintRotationSource
def __init__(self, *args, **kwargs):
super(BfConstraintRotation, self).__init__(*args, **kwargs)
def evaluate(self):
"""TODO
"""
super(BfConstraint, self).evaluate()
class BfConstraintScaleSource(BfConstraintSource):
FBX_CLASS_ID = fbx.FbxConstraintScale.ClassId
def __init__(self, *args, **kwargs):
super(BfConstraintScaleSource, self).__init__(*args, **kwargs)
class BfConstraintScale(BfConstraint):
"""
"""
FBX_CLASS_ID = fbx.FbxConstraintScale.ClassId
CONSTRAINT_SOURCE_CLS = BfConstraintScaleSource
def __init__(self, *args, **kwargs):
super(BfConstraintScale, self).__init__(*args, **kwargs)
def evaluate(self):
"""TODO
"""
super(BfConstraint, self).evaluate()
def test():
DUMP_DIR = r"D:\Repos\dataDump\brenfbx"
TEST_EXPORT_FILE = os.path.join(
DUMP_DIR,
"brenfbx_constraint_objects_test_scene_01.fbx"
)
fbx_manager = fbx.FbxManager.Create()
fbx_scene = fbx.FbxScene.Create(fbx_manager, "TestScene")
from brenfbx.objects import bfCustomObjectData
bf_environment = bfCore.BfEnvironment(fbx_manager, bfCustomObjectData.DATA_OBJECT)
# create some nodes
node_1 = fbx.FbxNode.Create(fbx_manager, "node1")
node_2 = fbx.FbxNode.Create(fbx_manager, "node2")
node_3 = fbx.FbxNode.Create(fbx_manager, "node3")
node_4 = fbx.FbxNode.Create(fbx_manager, "node4")
# add to scene
fbx_scene.GetRootNode().AddChild(node_1)
fbx_scene.GetRootNode().AddChild(node_2)
fbx_scene.GetRootNode().AddChild(node_3)
fbx_scene.GetRootNode().AddChild(node_4)
# create anim stack to support constraints
lAnimStack = fbx.FbxAnimStack.Create(fbx_scene, "testAnimStack")
lAnimLayer = fbx.FbxAnimLayer.Create(fbx_scene, "Base Layer")
lAnimStack.AddMember(lAnimLayer)
# create some constraints
parent_cons = fbx.FbxConstraintParent.Create(fbx_manager, "parentConstraint1")
parent_cons.SetConstrainedObject(node_1)
fbx_scene.ConnectSrcObject(parent_cons)
bf_parent_cons = BfConstraintParent(bf_environment, parent_cons)
cons_src_1 = bf_parent_cons.add_constraint_source(node_2, default_weight=16.6)
cons_src_1.offset_t_property().set_value(fbx.FbxDouble4(1, 2, 3, 1))
cons_src_1.offset_r_property().set_value(fbx.FbxDouble4(6, 5, 4, 1))
bf_parent_cons.add_constraint_source(node_3, default_weight=13.3)
cons_src_2 = bf_parent_cons.find_constraint_source(node_3)
cons_src_2.offset_t_property().set_value(fbx.FbxDouble4(10, 11, 12, 1))
cons_src_2.offset_r_property().set_value(fbx.FbxDouble4(34, 56, 78, 1))
node_3.SetName("node3_newName")
# cons_src_2.validate_property_names()
bf_parent_cons.validate_property_names()
node_2.SetName("node2_newname")
# cons_src_1.validate_property_names()
bf_parent_cons.validate_property_names()
# test property purging
parent_cons.AddConstraintSource(node_4)
node_4.SetName("node4_newname")
print parent_cons.ConstraintSources.GetSrcObject(2).GetName()
bf_parent_cons.purge_bad_properties()
print "created stuff"
bfIO.save_fbx_file(
fbx_manager,
fbx_scene,
TEST_EXPORT_FILE,
settings=None,
ascii=True,
overwrite=True,
makedirs=False,
verbose=True,
err=True
)
print "file exported: {}".format(TEST_EXPORT_FILE)
if __name__ == "__main__":
test()
print "done"
|
username=input("enter the name")
if username=="supriya":
print("yaa")
if username=="ankita":
print("yes")
if username=="pihu":
print("ok")
else:
print("Invalid")
else:
print("no")
else:
print("noooo") |
hours = input('how many hours')
rate = input('how many rate ')
pay = int(hours) * int(rate)
print(pay) |
"""
INCOMPLETE, UNUSED
Learn PID using the encoder readings assuming a PID control model.
TODO: Make it less dependent on correct path.
"""
import sys
sys.path.insert(0, "/Users/hikhan/Desktop/Autonomous Robotics Navigation/E160_Code/")
from E160_config import CONFIG_DELTA_T
from E160_environment import *
from E160_graphics import *
from math import sqrt
from random import gauss
from statistics import mean, stdev
import time
IS_DEBUG = True
DESIRED_POWER = 90 *(256/100) # percentage of total power on left (slower) motor
INIT_KP = 1
INIT_KI = 1
INIT_KD = 1
STOP_THRESHOLD = 0.00001 # stops when change in alpha is this or smaller
MAX_TRIALS = 10000 # maximum number of trials before stops
WINDOW_WIDTH = 10 # number of points across which to calculate mean, stdev
INIT_LEARNING_RATE = 1.0 # how much impact should a single reading have on the next alpha
learning_coefficient = 100
def initialize_coefs_list(init_Kp=INIT_KP,
init_Ki=INIT_KI,
init_Kd=INIT_KD,
max_trials=MAX_TRIALS,
window_width=WINDOW_WIDTH):
alphas = []
# make the list initialization-proof by adding values around the initial alpha
for i in range(WINDOW_WIDTH):
# draw from normal distribution with mean of INIT_ALPHA,
# stdev of 2nd root of STOP_THRESHOLD
init_number = (gauss(INIT_ALPHA, sqrt(STOP_THRESHOLD)),
gauss(INIT_ALPHA, sqrt(STOP_THRESHOLD)),
gauss(INIT_ALPHA, sqrt(STOP_THRESHOLD)))
alphas.append(init_number)
print(i)
alphas = alphas + (MAX_TRIALS - WINDOW_WIDTH)*[(0,0,0)]
return alphas
def runRobot(env, graphics=None, deltaT=CONFIG_DELTA_T):
if graphics:
# update graphics, but stop the thread if user stopped the gui
if not graphics.update():
return 1
# update robots
env.update_robots(deltaT)
# log all the robot data
env.log_data()
# maintain timing
time.sleep(deltaT)
return 0
if __name__ == "__main__":
env = E160_environment()
graphics = E160_graphics(env)
alphas = initialize_alpha_list()
learning_rate = INIT_LEARNING_RATE
robot = env.robots[0]
robot.testing_power_L = DESIRED_POWER
num_trials = 0
mean_alpha = mean(alphas[0:WINDOW_WIDTH])
std_alpha = stdev(alphas[0:WINDOW_WIDTH])
while std_alpha >= STOP_THRESHOLD:
k = num_trials + WINDOW_WIDTH
# measure the error between the encoder readings
deltaS, deltaTheta = robot.delta_state
print('dTheta - ', deltaTheta)
# alphas[k] = alphas[k-1]
# updateIndex = random.randint(0,2)
alphas[k][updateIndex] = learning_rate*(deltaTheta) + alphas[k-1][updateIndex]
alphas[k][updateIndex] = learning_rate*(deltaTheta) + alphas[k-1][updateIndex]
alphas[k][updateIndex] = learning_rate*(deltaTheta) + alphas[k-1][updateIndex]
# calculate new statistics
mean_alpha = mean(alphas[k:k-WINDOW_WIDTH:-1])
std_alpha = stdev(alphas[k:k-WINDOW_WIDTH:-1])
if IS_DEBUG:
print("kth alpha stats - ", k)
print("-------------------")
print("Next alpha: ", alphas[k])
print("Mean alpha (last ", WINDOW_WIDTH, ") - ", mean_alpha)
print("Stdev alpha (last ", WINDOW_WIDTH, ") - ", std_alpha)
print("\n")
# set alpha to new value
robot.R_motor_scaling_factor = alphas[k]
# increment number of trials
num_trials = num_trials + 1
# update learning rate
learning_rate = min(1, learning_coefficient/(num_trials+1))
if runRobot(env, graphics=graphics):
break
robot.testing_power_L = 0
print("After ", num_trials, " updates, the alpha value is ", alphas[k])
|
from django.shortcuts import render
# Create your views here.
from django.shortcuts import render, redirect
from .models import Patient
from LariatApp import utils
from .forms import PatientForm
from django.contrib.auth.decorators import login_required
from django.contrib.auth import login, authenticate
from django.contrib.auth.forms import UserCreationForm
# Create your views here.
@login_required
def index(request):
"""
A method to direct to the index home page of the application
"""
num_patients = Patient.objects.all().count()
return render(request,'index.html',context={'num_patients': num_patients})
@login_required
def add_patient(request):
"""
A method to render a form for adding patient details to the database
"""
form = PatientForm()
print('Getting the form')
if request.method=="POST":
form = PatientForm(request.POST)
rai_list = ['age',
'snf',
'nephrologist',
'chf',
'sob',
'cancer',
'weight_loss',
'appetite',
'memory',
'mobility',
'eating',
'toileting',
'hygiene']
features = [int(form.data[r]) for r in rai_list]
rai = utils.get_rai(form)
if form.is_valid():
data = form.save(commit=False)
data.created_by = request.user
data.save()
print(request.user)
return render (request,
'show_rai.html',
context = {'rai':rai,
'Age':form.data['age'],
'FirstName': form.data['first_name'],
'LastName': form.data['last_name'],
'MiddleInit': form.data['middle_initial'],
'SSN':form.data['SSN']})
form = PatientForm()
else:
form=PatientForm
return render(request, 'add_patient.html',{'form':form})
def contact_us(request):
"""
A method to add contact information
"""
contact_info = "alon.ben-ari@va.gov"
return render(request,'contactus.html',context = {'contact_info':contact_info})
def about(request):
"""
A method to add text about piece
"""
about_text = "This is a cloud based application where Veterans may answer a screening questionnaire to the pre-operative clinic"
return render(request,'about.html',context = {'about_text':about_text})
def admin(request):
"""
A method to display the content of the table database
"""
form = Patient.objects.all()
return render(request,'admin.html',context = {'form':form})
|
# !/uer/bin/env python3
# coding=utf-8
__version__ = '0.0.1'
__author__ = "MedivhXu"
__create__ = "2018-07-20"
|
from .skew_scaler import SkewScaler
|
from qiskit import QuantumRegister, QuantumCircuit, Aer, execute, ClassicalRegister
class qubit:
def __init__(self, q, qc):
self.q = q
self.qc = qc
def measure(self):
c = ClassicalRegister(len(self.q))
self.qc.measure(self.q, c)
# Get backend
backend = Aer.get_backend("qasm_simulator")
#
job = execute(self.qc, backend, shots=1)
counts = job.result().get_counts()
return list(counts.keys())[0] |
# import gamengine modules
from bge import logic
from bge import events
from bge import render
from . import OPCreate
from . import datastoreUtils
from .helpers import *
from .settings import *
import random, pdb
def deleteObjs():
if any(logic.mvb.preActiveObj):
logic.undo.append("Deleted")
for obj in logic.mvb.preActiveObj:
mvbObj = logic.mvb.getMVBObject(obj)
logic.mvb.deleteObject(mvbObj)
elif logic.mvb.activeObjs:
logic.undo.append("Deleted")
for obj in logic.mvb.activeObjs:
mvbObj = logic.mvb.getMVBObject(obj)
logic.mvb.deleteObject(mvbObj)
else:
if useDebug:
print('Nothing to delete')
logic.mvb.activeObjs.clear()
logic.outliner.updateModel()
def scatterObjs():
scatterRadius = 5
if logic.mvb.activeObjs:
logic.undo.append("Scattered")
for obj in logic.mvb.activeObjs:
mvbObj = logic.mvb.getMVBObject(obj)
pos = mvbObj.loc
pos[0] += random.uniform(-1,1) * scatterRadius
pos[1] += random.uniform(-1,1) * scatterRadius
pos[2] += random.uniform(-1,1) * scatterRadius
mvbObj.loc = pos
logic.mvb.slides[logic.mvb.activeSlide].capture()
def gatherObjs():
gatherStrength = 0.5
if logic.mvb.activeObjs:
logic.undo.append("Gathered")
for obj in logic.mvb.activeObjs:
mvbObj = logic.mvb.getMVBObject(obj)
pos = mvbObj.loc
target = logic.widgetObject.worldPosition
pos = mix(pos, target, gatherStrength)
mvbObj.loc = pos
logic.mvb.slides[logic.mvb.activeSlide].capture()
def gatherBioMT():
pass
def duplicateObjs():
# collect objects to duplicate
objs = None
if any(logic.mvb.preActiveObj):
objs = logic.mvb.preActiveObj.copy()
elif logic.mvb.activeObjs:
objs = logic.mvb.activeObjs.copy()
if objs:
# duplicate
for obj in objs:
# get source mvbObj Data
mvbObj = logic.mvb.getMVBObject(obj)
newName = datastoreUtils.incrementName(mvbObj.name, logic.mvb.objects)
if mvbObj.type == 1:
# blobby
objList = OPCreate.loadBlob(scale = mvbObj.scale[:], name=newName)
if objList:
for newObj in objList:
newObj.color = mvbObj.color[:]
elif mvbObj.type == 0:
# pdb
name = mvbObj.pdbData.name
pdbFileName = mvbObj.pdbData.getFilename()
chainName = mvbObj.chainData.name
pdbFullPath = os.path.join(logic.tempFilePath, "pdb", pdbFileName)
logic.gui.importDialog.fileLoadingArgs['largeRadius'] = hasattr(obj, 'largeRadius') or False
logic.gui.importDialog.fileLoadingArgs['bioMT'] = hasattr(obj, 'bioMT') or False
logic.gui.importDialog.previewPDB(source=pdbFullPath)
chains = mvbObj.pdbMetaData.chaininfo
for key,value in chains.items():
chains[key] = False
chainName = mvbObj.chainData.name
if chainName in chains:
chains[chainName] = True
logic.gui.importDialog.chains = chains
objList = logic.gui.importDialog.importMol(silent=True)
if objList:
for newObj in objList:
newObj.color = mvbObj.color[:]
# done with looping, update scene
logic.outliner.updateModel()
|
#!/usr/bin/env python
import pygame, glob
from pygame import *
class Scenario:
'''
Classe com as informações de cenario do jogo.
'''
def __init__(self, game):
self.screen = game.screen
self.img = pygame.image.load("recursos/background.jpg")
def update(self):
self.screen.blit(self.img, (0, 0)) |
#!/usr/bin/env python3
import sys
from pathlib import Path
from subprocess import run, PIPE
SHARED = Path(__file__).resolve().parent.parent / 'shared'
def echo_run(cmd):
print('+', *cmd)
run(cmd)
def main(script):
arch = run(['uname', '-m'], stdout=PIPE).stdout.decode('latin1').strip()
vm = 'vm-{}'.format(arch)
vm_script = Path('/mnt/shared') / script
cmd = 'sudo {}'.format(str(vm_script))
try:
echo_run(['kitchen', 'create', vm])
echo_run(['kitchen', 'exec', vm, '-c', cmd])
finally:
echo_run(['kitchen', 'destroy', vm])
if __name__ == '__main__':
import sys
(script_txt,) = sys.argv[1:]
main(Path(script_txt).resolve().relative_to(SHARED))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.