text stringlengths 8 6.05M |
|---|
import functions.calcTime, functions.calcHash
"""
Author: Bradley K. Hnatow
"""
class block():
def __init__(self, prevHash, blockIndex, data):
self.timeOfCreation = block.calculateTime()
self.prevHash = prevHash
self.blockIndex = blockIndex
self.data = data
self.blockHash = block.calculateHash(self)
self.proofOfWork = None |
def find_spaceship(astromap):
for i,c in enumerate(astromap.split('\n')[::-1]):
if 'X' in c:
return [c.index('X'), i]
return "Spaceship lost forever."
'''
Late last night in the Tanner household, ALF was repairing his spaceship so he
might get back to Melmac. Unfortunately for him, he forgot to put on the parking
brake, and the spaceship took off during repair. Now it's hovering in space.
ALF has the technology to bring the spaceship home if he can lock on to it's location.
Given a map:
..........
..........
..........
.......X..
..........
..........
The map will be given in the form of a string with \n separating new lines.
The bottom left of the map is [0, 0]. X is ALF's spaceship.
In this expample:
findSpaceship(map) => [7, 2]
If you cannot find the spaceship, the result should be
"Spaceship lost forever."
'''
|
from django.views.generic import ListView, DetailView, TemplateView
from pet_shelter.models import Pet
class Index(ListView):
queryset = Pet.objects.filter(is_at_home=False)
template_name = 'index.html'
class Athome(ListView):
queryset = Pet.objects.filter(is_at_home=True)
template_name = 'athome.html'
class Pet(DetailView):
model = Pet
template_name = 'pet.html'
class About(TemplateView):
template_name = 'about.html'
|
# deque는 파이썬 collections 모듈에서 가지고 온다.
from _collections import deque
queue = deque()
# 큐의 맨 끝에 데이터 삽입
queue.append("태호")
queue.append("현승")
queue.append("지웅")
queue.append("동욱")
queue.append("신의")
print(queue) # 큐 출력
# 큐의 가장 앞 데이터에 접근
print(queue[0])
# 큐의 맨 앞 데이터 삭제
print(queue.popleft())
print(queue.popleft())
print(queue.popleft())
print(queue) # 큐 출력 |
# Generated by Django 3.2.4 on 2021-07-01 06:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('database', '0005_auto_20210701_1344'),
]
operations = [
migrations.CreateModel(
name='StockExchange',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('stock_exchange', models.CharField(max_length=10)),
('name', models.CharField(max_length=100)),
],
),
]
|
import tensorflow as tf
import tensorflow_probability as tfp
tfd = tfp.distributions
sum_all = tf.math.reduce_sum
from VariationalPosterior import VariationalPosterior
class BayesianLSTMCellTied(tf.keras.Model):
def __init__(self, num_units, training, init, prior, **kwargs):
super(BayesianLSTMCellTied, self).__init__(num_units, **kwargs)
self.init = init
self.prior = prior
self.units = num_units
self.state_size = num_units
self.is_training = training
def initialise_cell(self, links):
self.num_links = links
self.W_mu = self.add_weight(shape=(self.units+self.num_links, 4*self.units),
initializer=self.init,
name='W_mu', trainable=True)
self.W_rho = self.add_weight(shape=(self.units+self.num_links, 4*self.units),
initializer=self.init,
name='W_rho', trainable=True)
self.B_mu = self.add_weight(shape=(1, 4*self.units),
initializer=self.init,
name='B_mu', trainable=True)
self.B_rho = self.add_weight(shape=(1, 4*self.units),
initializer=self.init,
name='B_rho', trainable=True)
self.W_dist = VariationalPosterior(self.W_mu, self.W_rho)
self.B_dist = VariationalPosterior(self.B_mu, self.B_rho)
## Make sure following is only printed once during training and not for testing!
print(" Tied Cell has been built (in:", links, ") (out:", self.units, ")")
self.sampling = False
self.built = True
def call(self, inputs, states):
W = self.W_dist.sample(self.is_training, self.sampling)
B = self.B_dist.sample(self.is_training, self.sampling)
c_t, h_t = tf.split(value=states[0], num_or_size_splits=2, axis=0)
concat_inputs_hidden = tf.concat([tf.cast(inputs, tf.float32), h_t], 1)
concat_inputs_hidden = tf.nn.bias_add(tf.matmul(concat_inputs_hidden, tf.squeeze(W)),
tf.squeeze(B))
self.log_prior = sum_all(self.prior.log_prob(W)) + sum_all(self.prior.log_prob(B))
self.log_variational_posterior = sum_all(self.W_dist.log_prob(W)) + sum_all(self.B_dist.log_prob(B))
# Gates: Input, New, Forget and Output
i, j, f, o = tf.split(value = concat_inputs_hidden, num_or_size_splits = 4, axis = 1)
c_new = c_t*tf.sigmoid(f) + tf.sigmoid(i)*tf.math.tanh(j)
h_new = tf.math.tanh(c_new)*tf.sigmoid(o)
new_state = tf.concat([c_new, h_new], axis=0)
return h_new, new_state
def get_initial_state(self, inputs = None, batch_size = None, dtype = None):
return tf.zeros((2*batch_size, self.units), dtype = dtype)
|
from typing import List, Optional
from datetime import datetime
from pydantic import BaseModel
class ProjectBase(BaseModel):
name: str
class ProjectCreate(ProjectBase):
pass
class ProjectUpdate(ProjectBase):
id: int
class Project(ProjectBase):
id: int
class Config:
arbitrary_types_allowed = True
orm_mode = True |
class Solution:
# @param A : tuple of integers
# @return an integer
def singleNumber(self, A):
counts = {}
for num in A:
if num in counts:
del counts[num]
else:
counts[num] = 1
for key in counts:
return key
|
# encoding: utf-8
"""
@ author: wangmingrui
@ time: 2019/1/26 17:09
@ desc: 服务器端主程序,实现管理功能
"""
import socketserver, json, os, hashlib
from ..conf import setting
from ..core.ftp_server import FTPServer
class Manager(object):
'''
主程序,包括启动FTPServer, 创建用户,登录,退出
'''
def start_ftp(self):
'''
启动server
:return:
'''
server = FTPServer(setting.ip_port)
|
'''
George Alromhin gr.858301
[1]Ali Abdelaal, Autoencoders for Image Reconstruction in Python and Keras
https://stackabuse.com/autoencoders-for-image-reconstruction-in-python-and-keras/
[2]Aditya Sharma, Autoencoder as a Classifier
https://www.datacamp.com/community/tutorials/autoencoder-classifier-python
[3]Aditya Sharma,Understanding Autoencoders
https://www.learnopencv.com/understanding-autoencoders-using-tensorflow-python/
'''
import numpy as np
import os
import cv2
import random
import matplotlib.pyplot as plt
from os.path import join as pjoin
from functools import partial
from tqdm import tqdm
from IPython.core.display import Image, display
#To test the hypotheses, the VOC 2012 dataset was used, the image has the size of 256x256 and it's grayscale image.
#The first generator takes the image from the dataset and always returns it the same.
gen_img_path = None
def first_generator(image, c_max = 255):
global gen_img_path
gen_img_path = image
img = cv2.imread(image,0) #grayscale
img = 2*img/c_max - 1 #C i (jk) = 2*C i (jk) / C max) – 1
while 1:
yield img.copy()
#The second returns the image.
def second_generator(image, c_max = 255):
while 1:
img = image
img = 2*image/c_max - 1
yield img
_image = "images/house_est_s5.png"
gen1 = first_generator(_image)
gen2 = second_generator(_image)
def initializer_glorot_uniform(input_layers, output_layers):
limit = np.sqrt(6 / (input_layers + output_layers))
return partial(np.random.uniform, low=-limit, high=limit)
#Autoencoder
#Create an Autoencoder class that takes a 256x256 grayscale image, splits it into 16x16 cropes (or others, the user specifies)
#and z compression coefficient, and based on this creates a learning model.
#usb_adapt_lr-Flag indicating whether to use adaptive learning rate
#use_norm-Flag indicating whether to normalize weights after they are updated
class Autoencoder():
def __init__(self,
z=0.5,
lr=1e-3,
use_adapt_lr=False,
use_norm=False,
phase='train'):
self.input_layers = 256 #crop size * crop size (16*16)
self.mid_layers = int(z*self.input_layers)
self.input_size = 256
self.crop_size = 16
if self.input_size % self.crop_size != 0:
raise ValueError("incorrect input data")
self.initializer = initializer_glorot_uniform(self.input_layers, self.mid_layers)
self.phase=phase
self.lr = lr
self.use_adapt_lr = use_adapt_lr
self.use_norm = use_norm
self.loss = lambda x, y: ((x - y) ** 2)
self.build()
def build(self):
self.W1 = self.initializer(size=[self.input_layers, self.mid_layers])
self.W2 = self.initializer(size=[self.mid_layers, self.input_layers])
def __call__(self, inp):
err = []
results = []
size = self.input_size
crop_size = self.crop_size
parts = inp.reshape([size, size//crop_size, crop_size]).transpose(1, 0, 2) \
.reshape((size//crop_size)**2, crop_size, crop_size)
for part in parts:
inp_part = np.expand_dims(part.flatten(), 0)
mid, res = self.forward(inp_part)
results.append(res.flatten().reshape(crop_size, crop_size))
if self.phase == 'train':
diff = res-inp_part
err.append((diff*diff).sum())
self.backward(inp_part, mid, diff)
if self.phase == 'train':
return np.sum(err)
else:
return np.array(results).reshape(size//crop_size, size, crop_size).transpose(1,0,2).reshape(size, size)
def forward(self, inp):
mid = self.encode(inp)
return mid, self.decode(mid)
def backward(self, inp, mid, err):
lr = 1/np.dot(inp, inp.T)**2 if self.use_adapt_lr else self.lr
self.W1 -= lr * np.dot(np.dot(inp.T, err), self.W2.T)
lr = 1/np.dot(mid, mid.T)**2 if self.use_adapt_lr else self.lr
self.W2 -= lr * np.dot(mid.T, err)
if self.use_norm:
self.W2 /= np.linalg.norm(self.W2, axis=0, keepdims=True)
self.W1 /= np.linalg.norm(self.W1, axis=1, keepdims=True)
def encode(self, inp):
return np.dot(inp, self.W1)
def decode(self, mid):
return np.dot(mid, self.W2)
def get_weights(self):
return [self.W1.copy(), self.W2.copy()]
def set_weights(self, weights):
self.W1, self.W2 = weights
def eval(self):
self.phase = 'test'
def predict(model):
img = cv2.imread(_image, 0)
h, w = img.shape
res = model(img)
fig, ax = plt.subplots(1,2)
ax[0].imshow(img, cmap='gray')
ax[1].imshow(res, cmap='gray')
def train_image(use_norm=True, use_adapt_lr=True, z=0.75):
model = Autoencoder(use_norm=use_norm, use_adapt_lr=use_adapt_lr, z=z)
errors = []
it_count = 300
best_weights = None
best_error = np.inf
for it in tqdm(range(it_count)):
inp = next(gen1)
err = model(inp)
errors.append(err)
if err < best_error:
best_error = err
best_weights = model.get_weights()
#print(best_error)
x = np.arange(len(errors))
plt.xlabel("iterations")
plt.ylabel("error")
plt.plot(x, np.array(errors))
idx = np.argmin(errors)
print("BEST ERROR {}".format(errors[idx]))
plt.plot(x[idx], errors[idx], 'rx--', linewidth=2, markersize=12)
model.eval()
model.set_weights(best_weights)
assert gen_img_path is not None
predict(model)
plt.show()
#Check at what selection of hyperparameters will optimally converge,
#the results on the graph
for use_norm in [False, True]:
for use_adapt_lr in [False, True]:
train_image(use_norm, use_adapt_lr)
|
import Neuron as neu
import Parser as files
class Network:
def __init__(self, n_inputs, n_hidden_layers, n_outputs, if_bias, learn_coef, momentum_coef):
# parameters:
# n_inputs - number of inputs
# n_hidden_layers - number of neurons in hidden layer
# n_outputs - number of desired outputs
# ifBias - decide if bias is added to calculation
# learn_coef - eta, learing coefficient
# momentum_coef - alfa, momentum coefficient
# Lists containing all neurons in the layers
self.hiddenLayer = []
self.outputLayer = []
neu.NeuronHidden.sigmoid_f = neu.sigmoid
neu.NeuronHidden.dF = neu.sigmoid_derivative
neu.NeuronOutput.sigmoid_f = neu.sigmoid
neu.NeuronOutput.dF = neu.sigmoid_derivative
# Initializing desired number of neurons in HL and OL, applying incrementing indexes to HL neurons
for i in range(n_hidden_layers):
hl_neuron = neu.NeuronHidden(n_inputs, if_bias, momentum_coef, learn_coef, self.outputLayer, i + 1)
self.hiddenLayer.append(hl_neuron)
for i in range(n_outputs):
ol_neuron = neu.NeuronOutput(n_hidden_layers, if_bias, momentum_coef, learn_coef, self.hiddenLayer)
self.outputLayer.append(ol_neuron)
# Function to keep the output of every output layer
def f(self, x):
out = []
for i in range(len(self.outputLayer)):
out.append(round(self.outputLayer[i].sigmoid_f(x), 2))
return out
def learn(self, x, y):
f_out = open('output_weights.out', 'w')
f_out.truncate(0)
# calculate errors for every neuron in output layer
for i in range(len(self.outputLayer)):
self.outputLayer[i].sigma(x, y[i])
# calculate errors for every neuron in hidden layer
for obj in self.hiddenLayer:
obj.sigma(x)
# correct the weights
for obj in self.outputLayer:
ol_out = obj.correct(x)
f_out.write(str(ol_out))
for obj in self.hiddenLayer:
hl_out = obj.correct(x)
f_out.write(str(hl_out))
# Step through one epoch
def learn_epoch(self, x, y):
for i in range(len(x)):
self.learn(x[i], y[i])
def delta_Y(y, wyliczone):
suma = 0
for i in range(len(y)):
for j in range(len(y[i])):
suma += pow(wyliczone[i][j] - y[i][j], 2)
return suma / (2 * len(y))
def delta_net(x, y, network):
return delta_Y(y, [network.f(x[i]) for i in range(len(x))])
def outcome(x, network):
return [network.f(x[i]) for i in range(len(x))]
|
import datetime
import logging
from datetime import datetime
from google.appengine.ext import ndb
class CallOut(ndb.Model):
date = ndb.DateProperty(indexed=True, required=True)
#date = ndb.StringProperty(indexed=True, required=True)
agency = ndb.StringProperty(indexed=True, required=True)
station_area = ndb.StringProperty(indexed=True, required=True)
district = ndb.StringProperty(indexed=True, required=True)
@classmethod
def from_csv(cls, csv_row):
#TODO Count csv_row, if not 11 raise an exception
#try:
date = datetime.strptime(csv_row[0], '%d/%m/%Y')
return cls(date=date, agency=csv_row[1], station_area=csv_row[2], district=csv_row[3])
#except Exception, e:
# raise e
@classmethod
def find_by_district(cls,district):
return cls.query(cls.district==district).order(cls.date)
@classmethod
def find_by_stationarea(cls,station_area):
return cls.query(cls.station_area==station_area).order(cls.date)
@classmethod
def find_by_date(cls,date):
return cls.query(cls.date==date).order(cls.date)
def to_dict(self):
''' Override for ndb.Model.to_dict() method so we can include id '''
d = super(CallOut, self).to_dict()
d["date"] = self.date.isoformat()
d["id"] = '%s' % str(self.key.urlsafe())
return d
|
# Import DQoc HTML from lp:ubuntu-ui-toolkit
import os, sys, re
import simplejson
from django.core.files import File
from django.core.files.storage import get_storage_class
from ..models import *
from . import Importer
__all__ = (
'DoxygenImporter',
)
SECTIONS = {}
class DoxygenImporter(Importer):
SOURCE_FORMAT = "qdoc"
def __init__(self, *args, **kwargs):
super(DoxygenImporter, self).__init__(*args, **kwargs)
self.source = self.options.get('dir')
self.DOC_ROOT = self.source
self.sections_file = self.options.get('sections')
self.class_data_map = {}
self.page_data_map = {}
def parse_pagename(self, pagename):
if pagename.endswith('.html'):
pagename = pagename[:-5]
return pagename.replace('/', '-').replace(' ', '_')
def parse_namespace(self, namespace):
if self.options.get('strip_namespace', None) and namespace:
strip_prefix = self.options.get('strip_namespace')
if namespace.startswith(strip_prefix):
namespace = namespace[len(strip_prefix):]
elif strip_prefix.startswith(namespace):
namespace = ''
if namespace.startswith('.'):
namespace = namespace[1:]
if self.options.get('namespace', None) and not namespace:
return self.options.get('namespace')
return namespace
def get_section(self, namespace, fullname):
if fullname in SECTIONS:
return SECTIONS[fullname]
elif namespace in SECTIONS:
return SECTIONS[namespace]
else:
return SECTIONS['*']
def read_json_file(self, filepath):
js_file = open(filepath)
js_data = js_file.readlines()
js_file.close()
try:
endvar = js_data.index("];\n")+1
except ValueError:
endvar = len(js_data)
try:
json_data = ''.join(js_data[1:endvar]).replace('\n', '').replace("'", '"').replace(';', '')
json_object = simplejson.loads(json_data)
return json_object
except Exception as e:
import pdb; pdb.set_trace()
return ''
def run(self):
if not os.path.exists(self.source):
print "Source directory not found"
exit(1)
if not self.sections_file:
print "You must define a sections definition file to import Doxygen API docs"
exit(2)
elif not os.path.exists(self.sections_file):
print "Sections definition file not found"
exit(1)
else:
sections_file_dir = os.path.dirname(self.sections_file)
if sections_file_dir:
if self.verbosity >= 2:
print "Adding to PYTHONPATH: %s" % sections_file_dir
sys.path.append(sections_file_dir)
sections_file_module = os.path.basename(self.sections_file)
if sections_file_module.endswith('.py'):
sections_file_module = sections_file_module[:-3]
if self.verbosity >= 2:
print "Importing module: %s" % sections_file_module
sections_data = __import__(sections_file_module)
if hasattr(sections_data, 'SECTIONS') and isinstance(sections_data.SECTIONS, dict):
SECTIONS.update(sections_data.SECTIONS)
else:
print "Sections file does not contain a SECTIONS dictionary"
exit(3)
self.read_classes(self.read_json_file(os.path.join(self.source, 'annotated.js')))
if not self.options.get('no_pages', False):
if os.path.exists(os.path.join(self.source, 'navtreedata.js')):
self.read_pages(self.read_json_file(os.path.join(self.source, 'navtreedata.js')), self.parse_namespace(None))
elif os.path.exists(os.path.join(self.source, 'navtree.js')):
self.read_pages(self.read_json_file(os.path.join(self.source, 'navtree.js')), self.parse_namespace(None))
#exit(0)
namespace_order_index = 0
#print "Namespace_order: %s" % self.namespace_order
#for nsfile in self.namespace_order:
#parent_ns_name, nsname, nsfullname, nstitle = self.namespace_map[nsfile]
#try:
#self.import_namespace(nsfile, nsname, nstitle, nsfullname, parent_ns_name, namespace_order_index)
#namespace_order_index += 1
#except ServiceOperationFailed as e:
#print "Failed to import namespace '%s': %s'" % (nsfile, e.message)
# Import class documentation
for classfile, classdef in self.class_data_map.items():
ns_name, classname, fullname = classdef
cleaned_ns_name = self.parse_namespace(ns_name)
section, section_created = Section.objects.get_or_create(name=self.get_section(ns_name, fullname), topic_version=self.version)
if cleaned_ns_name is not None and cleaned_ns_name != '':
namespace, created = Namespace.objects.get_or_create(name=ns_name, display_name=cleaned_ns_name, platform_section=section)
if created:
print "Created Namespace: %s" % ns_name
else:
namespace = None
element, created = Element.objects.get_or_create(name=classname, fullname=fullname, section=section, namespace=namespace)
if self.verbosity >= 1:
print 'Element: ' + element.fullname
doc_file = os.path.join(self.DOC_ROOT, classfile)
doc_handle = open(doc_file)
doc_data = doc_handle.readlines()
doc_handle.close()
doc_start = 2
doc_end = len(doc_data)
for i, line in enumerate(doc_data):
if '<div class="contents">' in line:
doc_start = i+1
if '</div><!-- contents -->' in line and doc_end > i:
doc_end = i-1
if '<hr/>The documentation for this ' in line and ' was generated from the following' in line and doc_end > i:
doc_end = i-1
if self.verbosity >= 2:
print "Doc range: %s:%s" % (doc_start, doc_end)
try:
brief_start = doc_data.index('<div class="contents">\n')
desc_line = self.parse_line(doc_data[brief_start+2][3:], classfile, fullname)
link_replacer = re.compile('<a [^>]*>([^<]+)</a>')
while link_replacer.search(desc_line):
desc_line = link_replacer.sub('\g<1>', desc_line, count=1)
if len(desc_line) >= 256:
desc_line = desc_line[:252]+'...'
element.description = desc_line
except ValueError:
pass
try:
# Change the content of the docs
cleaned_data = ''
for line in doc_data[doc_start:doc_end]:
if line == '' or line == '\n':
continue
if 'List of all members, including inherited members' in line:
continue
line = self.parse_line(line, classfile, fullname)
if isinstance(line, unicode):
line = line.encode('ascii', 'replace')
cleaned_data += line
element.data = cleaned_data
except Exception, e:
print "Parsing content failed:"
print e
#import pdb; pdb.set_trace()
element.source_file = os.path.basename(doc_file)
element.source_format = "doxygen"
element.save()
if not self.options.get('no_pages', False):
page_order_index = 0
self.page_order.extend(self.namespace_order)
for pagefile in self.page_order:
ns_name, pagename, pagefullname, pagetitle = self.page_data_map[pagefile]
if pagename == 'notitle':
pagename = 'index'
pagefullname = 'index'
pagetitle = 'Introduction'
try:
self.import_page(pagefile, pagename, pagetitle, pagefullname, ns_name, page_order_index)
page_order_index += 1
except ServiceOperationFailed as e:
print "Failed to import page '%s': %s'" % (pagefile, e.message)
def read_classes(self, ns_data, namespace_parent=None):
for namespace_def in ns_data:
namespace_shortname = namespace_def[0]
namespace_file = namespace_def[1]
namespace_data = namespace_def[2]
if namespace_parent:
namespace_fullname = namespace_parent + '.' + namespace_shortname
else:
namespace_fullname = namespace_shortname
if namespace_file and namespace_data:
if '#' in namespace_file:
namespace_file = namespace_file[:namespace_file.index('#')]
else:
namespace_file = namespace_file
if namespace_file.startswith('namespace'):
print "Namespace: %s" % (namespace_fullname)
if namespace_file not in self.namespace_map:
self.page_map[namespace_file] = namespace_fullname
self.page_data_map[namespace_file] = (namespace_parent, namespace_shortname, namespace_fullname, namespace_fullname)
self.namespace_order.append(namespace_file)
if isinstance(namespace_data, (str, unicode)) and os.path.exists(os.path.join(self.source, namespace_data+'.js')):
child_data = self.read_json_file(os.path.join(self.source, namespace_data+'.js'))
self.read_classes(child_data, namespace_fullname)
elif namespace_file.startswith('class'):
print "Class: %s" % (namespace_fullname)
if namespace_file not in self.class_map:
self.class_map[namespace_file] = namespace_fullname
self.class_data_map[namespace_file] = (namespace_parent, namespace_shortname, namespace_fullname)
if isinstance(namespace_data, (str, unicode)) and os.path.exists(os.path.join(self.source, namespace_data+'.js')):
child_data = self.read_json_file(os.path.join(self.source, namespace_data+'.js'))
self.read_classes(child_data, namespace_fullname)
elif namespace_file.startswith('struct'):
print "Struct: %s" % (namespace_fullname)
if namespace_file not in self.class_map:
self.class_map[namespace_file] = namespace_fullname
self.class_data_map[namespace_file] = (namespace_parent, namespace_shortname, namespace_fullname)
elif namespace_data:
if isinstance(namespace_data, list):
self.read_classes(namespace_data, namespace_fullname)
def read_pages(self, ns_data, namespace_parent=None):
for namespace_def in ns_data:
page_title = namespace_def[0]
page_href = namespace_def[1]
page_data = namespace_def[2]
if page_title in ("Namespaces", "Classes", "Files"):
return
if page_href == 'index.html' and self.options.get('no_index', False):
return
if page_href:
if '#' in page_href:
page_file = page_href[:page_href.index('#')]
else:
page_file = page_href
if page_file.endswith('.html'):
page_shortname = page_file[:-5]
else:
page_shortname = page_file
if namespace_parent:
page_fullname = namespace_parent + '.' + page_shortname
else:
page_fullname = page_shortname
if not page_file in self.page_map:
print "Page: %s" % (page_file)
self.page_map[page_file] = page_fullname
self.page_data_map[page_file] = (namespace_parent, page_shortname, page_fullname, page_title)
self.page_order.append(page_file)
if page_data:
if isinstance(page_data, list):
self.read_pages(page_data, namespace_parent)
def import_page(self, pagehref, pagename, pagetitle, pagefullname, ns_name, page_order_index):
if pagename.endswith('.html'):
pagename = pagename[:-5]
cleaned_ns_name = self.parse_namespace(ns_name)
section, section_created = Section.objects.get_or_create(name=self.get_section(ns_name, pagename), topic_version=self.version)
if cleaned_ns_name is not None and cleaned_ns_name != '':
namespace, created = Namespace.objects.get_or_create(name=ns_name, display_name=cleaned_ns_name, platform_section=section)
else:
namespace = None
if len(pagetitle) >= 64:
pagetitle = pagetitle[:60]+'...'
page, created = Page.objects.get_or_create(slug=pagename, fullname=pagefullname, title=pagetitle, section=section, namespace=namespace)
if self.verbosity >= 1:
print 'Page[%s]: %s' % (page_order_index, page.slug)
doc_file = os.path.join(self.DOC_ROOT, pagehref)
doc_handle = open(doc_file)
doc_data = doc_handle.readlines()
doc_handle.close()
doc_start = 2
doc_end = len(doc_data)
for i, line in enumerate(doc_data):
if '<div class="contents">' in line:
doc_start = i+1
if '</div><!-- doc-content -->' in line and doc_end > i:
doc_end = i-1
if '<!-- start footer part -->' in line and doc_end > i:
doc_end = i-2
if self.verbosity >= 2:
print "Doc range: %s:%s" % (doc_start, doc_end)
try:
# Change the content of the docs
cleaned_data = ''
for line in doc_data[doc_start:doc_end]:
if line == '' or line == '\n':
continue
if '<h1 class="title">' in line:
continue
line = self.parse_line(line, pagehref, pagename)
if isinstance(line, unicode):
line = line.encode('ascii', 'replace')
cleaned_data += line
page.data = cleaned_data
except Exception, e:
print "Parsing content failed:"
print e
#continue
#import pdb; pdb.set_trace()
page.source_file = os.path.basename(doc_file)
page.source_format = "doxygen"
page.order_index = page_order_index
page.save()
def import_namespace(self, nshref, nsname, nstitle, nsfullname, parent_ns_name, ns_order_index):
if nsname.endswith('.html'):
nsname = nsname[:-5]
section, section_created = Section.objects.get_or_create(name=self.get_section(nsname, None), topic_version=self.version)
if len(nstitle) >= 64:
nstitle = nstitle[:60]+'...'
ns, created = Namespace.objects.get_or_create(name=nsfullname, display_name=nsfullname, platform_section=section)
if self.verbosity >= 1:
print 'ns[%s]: %s' % (ns_order_index, ns.name)
doc_file = os.path.join(self.DOC_ROOT, nshref)
doc_handle = open(doc_file)
doc_data = doc_handle.readlines()
doc_handle.close()
doc_start = 2
doc_end = len(doc_data)
for i, line in enumerate(doc_data):
if '<div class="contents">' in line:
doc_start = i+1
if '</div><!-- doc-content -->' in line and doc_end > i:
doc_end = i-1
if '<!-- start footer part -->' in line and doc_end > i:
doc_end = i-2
if self.verbosity >= 2:
print "Doc range: %s:%s" % (doc_start, doc_end)
try:
# Change the content of the docs
cleaned_data = ''
for line in doc_data[doc_start:doc_end]:
if line == '' or line == '\n':
continue
if '<h1 class="title">' in line:
continue
line = self.parse_line(line, nshref, nsfullname)
if isinstance(line, unicode):
line = line.encode('ascii', 'replace')
cleaned_data += line
ns.data = cleaned_data
except Exception, e:
print "Parsing content failed:"
print e
#continue
#import pdb; pdb.set_trace()
ns.source_file = os.path.basename(doc_file)
ns.source_format = "doxygen"
ns.order_index = ns_order_index
ns.save()
|
""" Reaction-Diffusion
Simulates a 2D system governed by a simple reaction-diffusion equation.
You can select one of the predefined configurations.
"""
from phi.flow import *
SAMPLE_PATTERNS = {
'diagonal': {'du': 0.17, 'dv': 0.03, 'f': 0.06, 'k': 0.056},
'maze': {'du': 0.19, 'dv': 0.05, 'f': 0.06, 'k': 0.062},
'coral': {'du': 0.16, 'dv': 0.08, 'f': 0.06, 'k': 0.062},
'flood': {'du': 0.19, 'dv': 0.05, 'f': 0.06, 'k': 0.02},
'dots': {'du': 0.19, 'dv': 0.05, 'f': 0.04, 'k': 0.065},
'dots_and_stripes': {'du': 0.19, 'dv': 0.03, 'f': 0.04, 'k': 0.061},
}
# Initial condition
# u = v = CenteredGrid(Sphere(x=50, y=50, radius=2), x=100, y=100)
# u = v = CenteredGrid(lambda x: math.exp(-0.5 * math.sum((x - 50)**2) / 3**2), x=100, y=100)
u = v = CenteredGrid(Noise(scale=20, smoothness=1.3), x=100, y=100) * .3 + .1
def reaction_diffusion(u, v, du, dv, f, k):
uvv = u * v**2
su = du * field.laplace(u) - uvv + f * (1 - u)
sv = dv * field.laplace(v) + uvv - (f + k) * v
return u + dt * su, v + dt * sv
dt = vis.control(1.)
pattern = vis.control('maze', tuple(SAMPLE_PATTERNS))
viewer = view('u,v', namespace=globals())
for _ in viewer.range():
u, v = reaction_diffusion(u, v, **SAMPLE_PATTERNS[pattern])
|
import json
p = []
for i in range(1, 5):
p.append(i)
l = []
for k in range(6, 10):
l.append(k)
for num in enumerate(l, 1):
print(f'{num}')
for nur in enumerate(p, 1):
print(f'{nur}')
|
import os
from tqdm import tqdm
def file_cleanup(files, SYSTEM_COMMANDS):
current_files = []
for f in files:
if f.startswith(".") or f in SYSTEM_COMMANDS:
if not f.startswith("."):
print(
"\33[91m",
f,
"is already a default bash command and will not be used\n",
"\33[0m")
continue
elif f in [__file__, "existing_commands.txt"]:
continue
current_files.append(f)
return current_files
if __name__ == "__main__":
with open("existing_commands.txt") as f:
SYSTEM_COMMANDS = f.readline().split(",")
files = input("What files to refresh (Enter for all): ")
current_files = file_cleanup(
os.listdir() if files == "" else files.strip().split(" "),
SYSTEM_COMMANDS)
for name in current_files:
try:
work_file = open(name)
operation_file = open("../" * 5 + f"usr/local/bin/{name}", "w+")
print("\33[93m" + f"Cloning script {name}" + "\33[0m")
for line in tqdm(work_file.readlines(), ncols=100, ascii=True):
operation_file.write(line)
except FileNotFoundError:
print("\33[91m" + f"{name} does not exists\n" + "\33[0m")
continue
operation_file.close()
work_file.close()
print("\33[92m" + f"Cloned script {name}\n" + "\33[0m")
|
#!/usr/bin/env python
from urllib.request import urlopen
from urllib.parse import urlencode
from tempfile import mkstemp
from subprocess import call, Popen, PIPE
from textwrap import wrap as tw
import sys, os, json
CB = '\033[94m'
CG = '\033[92m'
CR = '\033[91m'
CE = '\033[0m'
color = lambda c,s: '{}{}{}'.format(c, s, CE)
url = 'https://aur.archlinux.org/'
abs_dir = os.path.join(os.getenv('HOME'), 'abs')
if not os.path.exists(abs_dir):
os.makedirs(abs_dir)
def make_dir (pkg):
os.makedirs(os.path.join(abs_dir, pkg), exist_ok=True)
def find (query):
data = urlencode({'type':'search', 'arg':' '.join(query)})
r = urlopen('{}rpc.php?{}'.format(url, data))
j = json.loads(r.read().decode('utf-8'))
if j['resultcount'] > 0:
for res in sorted(j['results'], key=lambda x: x['Name']):
if res['OutOfDate']:
res['OutOfDate'] = color(CR, 'OUT OF DATE')
else:
res['OutOfDate'] = ''
res['Name'] = color(CB, res['Name'])
res['Version'] = color(CG, res['Version'])
res['Description'] = '\n '.join(tw(res['Description']))
print('{Name} {Version} {OutOfDate}\n {Description}\n'.format(**res))
def info (pkg):
data = urlencode({'type':'info', 'arg':pkg})
r = urlopen('{}rpc.php?{}'.format(url, data))
j = json.loads(r.read().decode('utf-8'))
if j['resultcount'] > 0:
return j['results']
return None
def outinfo (query):
for q in query:
i = info(q)
if i:
if i['OutOfDate'] > 0:
print('** OUT OF DATE **')
print('{}/packages/{}'.format(url, i['Name']))
for k in sorted(i.keys()):
print('{:16}: {}'.format(k,i[k]))
print()
def download (query):
for q in query:
i = info(q)
if i:
print('Downloading: {}/{}'.format(url, i['URLPath']))
r = urlopen('{}/{}'.format(url, i['URLPath']))
tar = os.path.basename(i['URLPath'])
chunk = 16 * 1024
with open(os.path.join(abs_dir, tar), 'wb') as f:
while True:
ch = r.read(chunk)
if not ch:
break
f.write(ch)
curdir = os.getcwd()
os.chdir(abs_dir)
call(['tar', 'xf', tar])
os.remove(tar)
os.chdir(curdir)
def pkgbuild (query):
for q in query:
i = info(q)
if i:
path = os.path.dirname(i['URLPath'])
print('Downloading: {}/{}/PKGBUILD'.format(url, path))
r = urlopen('{}/{}/PKGBUILD'.format(url, path))
make_dir(i['Name'])
with open(os.path.join(abs_dir, i['Name'], 'PKGBUILD'), 'wb') as f:
f.write(r.read())
def update (query):
for q in query:
pac = Popen(['pacman', '-Q', '-e', q], stdout=PIPE)
out, err = pac.communicate()
if out:
i = info(q)
if i:
installed_ver = out.split()[1]
abs_ver = i['Version']
ver = Popen(['vercmp', abs_ver, installed_ver], stdout=PIPE)
out, err = ver.communicate()
vcmp = int(out)
if vcmp > 0:
print(q)
else:
sys.stderr.write('Package not found in AUR: '+q+'\n')
if __name__ == "__main__":
cmap = {
'find': find,
'info': outinfo,
'dl': download,
'pkg': pkgbuild,
'upd': update
}
if len(sys.argv) > 1:
arg = sys.argv[1]
if arg in cmap:
fn = cmap[arg]
if len(sys.argv) > 2:
q = sys.argv[2:]
else:
q = [x.strip() for x in sys.stdin.readlines()]
fn(q)
else:
print('aur: '+' '.join(cmap.keys()))
else:
print('aur: '+' '.join(cmap.keys()))
|
"""Simply Calculator program"""
# Addition function code
def add(a, b):
return a + b
def subtract(a, b):
return a - b
def multifly(a, b):
return a * b
def divide (a, b):
return a / b
print ("Choose Options")
print ("1. Addition")
print ("2. Substraction")
print ("3. Multiplication")
print ("4. Division")
choice = input("Choose Options (1 or 2 or 3 or 4): ")
num1 = int(input("Type first number: "))
num2 = int(input("Type second number: "))
if choice == "1":
print(num1,"+",num2,"=", add(num1,num2))
if choice == "2":
print(num1,"-",num2,"=", subtract(num1,num2))
if choice == "3":
print(num1,"*",num2,"=", multifly(num1,num2))
if choice == "4":
print(num1,"/",num2,"=", divide(num1,num2))
"""else:
print("Wrong input")"""
|
import urllib2
import re
symbols = ['AAPL', 'GOOG','NYT', 'SSNLF']
i = 0
while i < len(symbols):
html_file = urllib2.urlopen("http://finance.yahoo.com/q?s=%s&ql=1" % symbols[i])
html_text = html_file.read()
regex = '<span id="yfs_l84_%s">(.+?)</span>' % symbols[i].lower()
price = re.findall(regex, html_text)
print "The price of %s stock is %s" % (symbols[i], price)
i += 1
|
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import matplotlib
import kde_2d_weighted as KDE_2D
font = {'family' : 'normal',
'weight' : 'normal',
'size' : 18}
matplotlib.rc('font', **font)
cmap="RdBu_r"
##############################################################################################################################
############################################loading in data#########################################################
t_file=open('Output_Files/transition_search.dat') ### trajectory data
transition_num = 2000 ## number of transitions to plot
transitions_tot=[]
with t_file as my_file:
for line in my_file:
myarray=np.fromstring(line, dtype=float, sep=' ')
transitions_tot.append(myarray)
transitions=transitions_tot[0:transition_num]
data=np.loadtxt('Input_Files/input.dat') ### input data
z_ind = 0
inc_ind = 1
az_ind = 2
en_ind = 3
beta=1/float(np.std(data[:,en_ind])) ### std of energy to standardize energy
en_mn=np.mean(data[:,en_ind]) ### mean of energy to standardize energy
top_z=16.5 ## topmost z value to plot
bott_z=-7.5 ## bottommost z value to plot
inc_step = 18 ## stepsize in inc when plotting the boltzmann weighted density
az_step = 18 ## stepsize in inc when plotting the boltzmann weighted density
transitions_z={}
transitions_beta={}
transitions_angles={}
#############################################################################################################################
####################################################creating arrays#########################################################
z_ecutoff_0=[]
a1_ecutoff_0=[]
a2_ecutoff_0=[]
ecutoff_0=[]
accepted=0
transitions_angles=[]
transitions_z=[]
transitions_beta=[]
transitions_energy=[]
for i in range(len(transitions)):
for j in range(len(transitions[i])-1):
f=int(transitions[i][j])
if data[f][0]>bott_z and data[f][0]<top_z:
transitions_angles=np.append(transitions_angles,data[f] [inc_ind])
transitions_z=np.append(transitions_z,data[f][z_ind])
transitions_beta=np.append(transitions_beta,data[f][az_ind])
transitions_energy=np.append(transitions_energy,data[f][en_ind])
##############################################################################################################################
################################ Creating grids #################################################3
grid_z=[]
z_step=1
z_num=int((top_z-bott_z)/float(z_step))
for i in range(z_num):
z_i=top_z-z_step*i
grid_z=np.append(grid_z,z_i)
grid_inc_ang=np.arange(0,180,inc_step)
grid_az_ang=np.arange(0,360,az_step)
Grid_z = []
Grid_inc = []
Grid_az = []
iteration = 0
for z in range(len(grid_z)):
for inc in range(len(grid_inc_ang)):
for az in range(len(grid_az_ang)):
iteration+=1
Grid_density = np.zeros(iteration)
it = 0
for z in range(len(grid_z)):
for inc in range(len(grid_inc_ang)):
for az in range(len(grid_az_ang)):
it+=1
Grid_z=np.append(Grid_z,grid_z[z])
Grid_inc=np.append(Grid_inc,grid_inc_ang[inc])
Grid_az=np.append(Grid_az,grid_az_ang[az])
frames = np.where((transitions_z<grid_z[z]) & (transitions_z>grid_z[z]-z_step) & (transitions_angles>grid_inc_ang[inc]) & \
(transitions_angles<grid_inc_ang[inc]+inc_step) & (transitions_beta>grid_az_ang[az]) & \
(transitions_beta<grid_az_ang[az]+az_step))
Grid_density[it-1]=np.sum([np.exp(-beta*(transitions_energy[n]-en_mn)) for n in frames])
np.savetxt('grid_density.dat',[i/float(inc_step*az_step) for i in Grid_density])
np.savetxt('grid_z.dat',Grid_z)
np.savetxt('grid_inc.dat',Grid_inc)
np.savetxt('grid_az.dat',Grid_az)
##############################################################################################################################
################################ Creating projected density #################################################3
min_z_grid = grid_z
min_inc_grid = grid_inc_ang
min_az_grid = grid_az_ang
dens_grid = Grid_density
av_inc_z_dens = []
inc_grid_proj = []
z_grid_proj = []
inc_z_dens_Grid = np.zeros((len(min_z_grid),len(min_inc_grid)))
for z in range(len(min_z_grid)):
for inc in range(len(min_inc_grid)):
z_grid_proj = np.append(z_grid_proj,min_z_grid[z])
inc_grid_proj = np.append(inc_grid_proj,min_inc_grid[inc]+9)
bin_data = []
for d in range(len(dens_grid)):
if Grid_z[d] == min_z_grid [z]:
if Grid_inc[d] == min_inc_grid[inc]:
bin_data = np.append(bin_data,dens_grid[d])
av_inc_z_dens = np.append(av_inc_z_dens,np.mean(bin_data))
z_grid_proj = np.append(z_grid_proj,min_z_grid[z])
inc_grid_proj = np.append(inc_grid_proj,min_inc_grid[inc]+11)
av_inc_z_dens = np.append(av_inc_z_dens,np.mean(bin_data))
z_grid_proj = np.append(z_grid_proj,min_z_grid[z]-0.25)
inc_grid_proj = np.append(inc_grid_proj,min_inc_grid[inc]+9)
av_inc_z_dens = np.append(av_inc_z_dens,np.mean(bin_data))
z_grid_proj = np.append(z_grid_proj,min_z_grid[z]-0.5)
inc_grid_proj = np.append(inc_grid_proj,min_inc_grid[inc]+9)
av_inc_z_dens = np.append(av_inc_z_dens,np.mean(bin_data))
z_grid_proj = np.append(z_grid_proj,min_z_grid[z]-0.75)
inc_grid_proj = np.append(inc_grid_proj,min_inc_grid[inc]+9)
av_inc_z_dens = np.append(av_inc_z_dens,np.mean(bin_data))
z_grid_proj = np.append(z_grid_proj,min_z_grid[z]-0.25)
inc_grid_proj = np.append(inc_grid_proj,min_inc_grid[inc]+11)
av_inc_z_dens = np.append(av_inc_z_dens,np.mean(bin_data))
z_grid_proj = np.append(z_grid_proj,min_z_grid[z]-0.5)
inc_grid_proj = np.append(inc_grid_proj,min_inc_grid[inc]+11)
av_inc_z_dens = np.append(av_inc_z_dens,np.mean(bin_data))
z_grid_proj = np.append(z_grid_proj,min_z_grid[z]-0.75)
inc_grid_proj = np.append(inc_grid_proj,min_inc_grid[inc]+11)
av_inc_z_dens = np.append(av_inc_z_dens,np.mean(bin_data))
inc_z_dens_Grid[z][inc] = np.mean(bin_data)
av_az_z_dens = []
az_grid_proj = []
z_grid_proj_az = []
az_z_dens_Grid = np.zeros((len(min_z_grid),len(min_az_grid)))
for z in range(len(min_z_grid)):
for az in range(len(min_az_grid)):
z_grid_proj_az = np.append(z_grid_proj_az,min_z_grid[z])
az_grid_proj = np.append(az_grid_proj,min_az_grid[az])
bin_data = []
for d in range(len(dens_grid)):
if Grid_z[d] == min_z_grid [z]:
if Grid_az[d] == min_az_grid[az]:
bin_data = np.append(bin_data,dens_grid[d])
av_az_z_dens = np.append(av_az_z_dens,np.mean(bin_data))
z_grid_proj_az = np.append(z_grid_proj_az,min_z_grid[z]-0.25)
az_grid_proj = np.append(az_grid_proj,min_az_grid[az])
av_az_z_dens = np.append(av_az_z_dens,np.mean(bin_data))
z_grid_proj_az = np.append(z_grid_proj_az,min_z_grid[z]-0.5)
az_grid_proj = np.append(az_grid_proj,min_az_grid[az])
av_az_z_dens = np.append(av_az_z_dens,np.mean(bin_data))
z_grid_proj_az = np.append(z_grid_proj_az,min_z_grid[z]-0.75)
az_grid_proj = np.append(az_grid_proj,min_az_grid[az])
av_az_z_dens = np.append(av_az_z_dens,np.mean(bin_data))
az_z_dens_Grid[z][az] = np.mean(bin_data)
##############################################################################################################################
################################ Plotting #################################################3
plt.scatter(z_grid_proj,inc_grid_proj,c=av_inc_z_dens,cmap=cmap,s=500,marker='s')
plt.xlim(bott_z,top_z)
plt.ylim(0,180)
plt.savefig('z_inc_proj.png')
plt.close()
plt.scatter(z_grid_proj_az,az_grid_proj,c=av_az_z_dens,cmap=cmap,s=500,marker='s')
plt.xlim(bott_z,top_z)
plt.ylim(0,360)
plt.savefig('z_az_proj.png')
plt.close()
|
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import auc
import numpy as np
import csv
def toF(list):
output=[]
for i in list:
output.append(int(i))
return output
featureData=[]
AnsData=[]
with open('newdata.txt','r') as data:
rows=csv.reader(data,delimiter='\t')
switch=True
for i in rows:
if switch:
label= i
switch=False
else:
featureData.append(i[0:83])
AnsData.append(i[84])
train_data , test_data , train_label , test_label = train_test_split(featureData,AnsData,test_size=0.2)
knn = KNeighborsClassifier()
knn.fit(train_data,train_label)
pred=knn.predict(test_data)
print("Acc:",accuracy_score(test_label,pred))
#print(knn.predict(test_data))
#rint(test_label) |
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 25 11:02:02 2015
@author: HSH
"""
|
import sys
import random
_LOCKED = False # this seems secure, right?
class LockedError(Exception): pass
def lock():
"""
Lock the air shield!
"""
global _LOCKED
_LOCKED = '12345'
def unlock(code):
"""
It should be the same combination as my luggage...
Parameters
----------
code
The unlock code to try to use to unlock
"""
global _LOCKED
if _LOCKED:
if str(code) == _LOCKED:
_LOCKED = False
else:
raise ValueError('Wrong code')
def _check_lock():
if _LOCKED:
raise LockedError("The air shield is up!")
def self_destruct():
"""
The final win of the up side of the Schwartz against Dark Helmet...
"""
_check_lock()
print("Thank you for pressing the self destruct button!")
sys.exit(0)
_TOWNS_GRABBED = []
def land_snatch(town='Rock Ridge', sheriff_bart=True):
"""
Land snatching!
Parameters
----------
town : str
The town we want
sheriff_bart : bool
Whether or not Bart is the sheriff_bart
Returns
-------
bool
Whether or not the snatch was sucessful
"""
_check_lock()
if sheriff_bart:
threshold = .01
else:
threshold = .5
success = random.random() < threshold
_TOWNS_GRABBED.append(town)
if success:
print("Go do that voodoo that you do... so well!")
else:
print("To tell the truth, I'm getting a bit bored...")
return success
def towns():
"""
Get the towns that have been snatched.
Returns
-------
tuple of strings
The names of all the towns that have been snatched.
"""
return tuple(_TOWNS_GRABBED)
|
import numpy as np
from numpy.linalg import inv, pinv
from numpy.random import randn
class Kalman:
def __init__(self, x_init, P_init, dt):
# State and Covariance
self.x = np.mat(x_init, dtype=np.float32)
self.P = np.mat(P_init, dtype=np.float32)
# Transformation Matrices
self.A = np.mat([[1, 0, 0, dt, 0, 0],
[0, 1, 0, 0, dt, 0],
[0, 0, 1, 0, 0, dt],
[0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 1]],
dtype=np.float32)
self.C = np.mat([[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0]],
dtype=np.float32)
# Noise
self.Q = np.mat(np.diag([0.5*randn(), 0.5*randn(), 0.5*randn(), 2.5*randn(), 2.5*randn(), 2.5*randn()]), dtype=np.float32)
self.R = np.mat(np.diag([0.25*randn(), 0.25*randn(), 0.25*randn()]), dtype=np.float32)
def predict(self):
# Predict with Motion Model
# State and Covariance
x = self.x
P = self.P
# Transformation Matrices
A = self.A
# Motion Noise
Q = self.Q
# Predict
self.x = A @ x
self.P = A @ P @ A.T + Q
def update(self, y, area):
# Update with Measurement Model
# State and Covariance
x = self.x
P = self.P
# Transformation Matrices
C = self.C
# Measurement and Noise
y = np.mat(y, dtype=np.float32)
R = self.R/area
# Update
K = P @ C.T @ pinv(C @ P @ C.T + R)
y_check = C @ x
self.x = x + K @ (y - y_check)
self.P = (np.identity(P.shape[0]) + K @ C) @ P
|
import json
import os
import pickle
import sys
import time
import numpy as np
import torch
import torch.nn as nn
import torchvision
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
from torch.utils.data import Dataset
from utils import sec2str, weight_init
# L2 normalize a batched tensor (bs, ft)
def l2normalize(ten):
norm = torch.norm(ten, dim=1, keepdim=True)
return ten / norm
class ImageEncoder(nn.Module):
def __init__(self, out_size=256, cnn_type="resnet18", pretrained=True):
super(ImageEncoder, self).__init__()
self.cnn = getattr(torchvision.models, cnn_type)(pretrained)
# replace final fc layer
if cnn_type.startswith("vgg"):
self.fc = nn.Linear(self.model.classifier._modules["6"].in_features, out_size)
self.cnn.classifier = nn.Sequential(*list(self.cnn.classifier.children())[:-1])
elif cnn_type.startswith("resnet"):
self.fc = nn.Linear(self.cnn.fc.in_features, out_size)
self.cnn.fc = nn.Sequential()
if not pretrained:
self.cnn.apply(weight_init)
self.fc.apply(weight_init)
def forward(self, x):
resout = self.cnn(x)
out = self.fc(resout)
normed_out = l2normalize(out)
return normed_out
class CaptionEncoder(nn.Module):
def __init__(self, vocab_size, emb_size=256, out_size=256, rnn_type="LSTM", padidx=0):
super(CaptionEncoder, self).__init__()
self.out_size = out_size
self.padidx = padidx
self.emb = nn.Embedding(vocab_size, emb_size)
self.rnn = getattr(nn, rnn_type)(emb_size, out_size, batch_first=True)
self.emb.apply(weight_init)
self.rnn.apply(weight_init)
# x: (bs, seq)
# lengths: (bs)
def forward(self, x, lengths):
emb = self.emb(x)
# packed: PackedSequence of (bs, seq, emb_size)
packed = pack_padded_sequence(emb, lengths, batch_first=True, enforce_sorted=False)
output, _ = self.rnn(packed)
# output: (bs, seq, out_size)
output = pad_packed_sequence(output, batch_first=True, padding_value=self.padidx)[0]
# lengths: (bs, 1, out_size)
lengths = lengths.view(-1, 1, 1).expand(-1, -1, self.out_size) - 1
# out: (bs, out_size)
out = torch.gather(output, 1, lengths).squeeze(1)
normed_out = l2normalize(out)
return normed_out
class VSE(nn.Module):
def __init__(
self,
vocab_size,
emb_size,
out_size,
max_seqlen,
cnn_type,
rnn_type,
pretrained=True,
dropout_prob=0.1,
ss_prob=0.0,
pad_idx=0,
bos_idx=1,
):
super().__init__()
self.im_enc = ImageEncoder(out_size, cnn_type, pretrained)
self.cap_enc = CaptionEncoder(vocab_size, emb_size, out_size, rnn_type, pad_idx)
def forward(self, image, caption, lengths):
im_emb = self.im_enc(image) if image is not None else None
cap_emb = self.cap_enc(caption, lengths) if caption is not None else None
return im_emb, cap_emb
def freeze(self):
for param in self.im_enc.cnn.parameters():
param.requires_grad = False
def unfreeze(self):
for param in self.im_enc.cnn.parameters():
param.requires_grad = True
class SimpleDecoder(nn.Module):
"""
RNN decoder for captioning, Google NIC
Args:
feature_dim: dimension of image feature
emb_dim: dimension of word embeddings
memory_dim: dimension of LSTM memory
vocab_size: vocabulary size
max_seqlen: max sequence size
dropout_p: dropout probability for LSTM memory
ss_prob: scheduled sampling rate, 0 for teacher forcing and 1 for free running
"""
def __init__(
self, feature_dim, emb_dim, memory_dim, vocab_size, max_seqlen, dropout_p, ss_prob, bos_idx,
):
super().__init__()
self.vocab_size = vocab_size
self.max_seqlen = max_seqlen
self.ss_prob = ss_prob
self.bos_idx = bos_idx
self.init_h = nn.Linear(feature_dim, memory_dim)
self.init_c = nn.Linear(feature_dim, memory_dim)
self.emb = nn.Embedding(vocab_size, emb_dim)
self.rnn = nn.LSTMCell(emb_dim, memory_dim)
self.dropout = nn.Dropout(dropout_p)
self.linear = nn.Linear(memory_dim, vocab_size)
self.init_h.apply(weight_init)
self.init_c.apply(weight_init)
self.rnn.apply(weight_init)
self.emb.apply(weight_init)
self.linear.apply(weight_init)
def forward(self, feature, caption, length):
"""
Args:
torch.Tensor feature: (bs x feature_dim), torch.float
torch.Tensor caption: (bs x max_seqlen), torch.long
torch.Tensor length: (bs), torch.long
Returns:
torch.Tensor out: (bs x vocab_size x max_seqlen-1), contains logits
"""
bs = caption.size(0)
scale = bs // feature.size(0)
if scale > 1:
feature = torch.repeat_interleave(feature, scale, dim=0)
# hn, cn: (bs x memory_dim)
hn = self.init_h(feature)
cn = self.init_c(feature)
# caption: (bs x max_seqlen x emb_dim)
caption = self.emb(caption)
xn = caption[:, 0, :]
# out: (bs x vocab_size x max_seqlen-1)
out = torch.empty((bs, self.vocab_size, self.max_seqlen - 1), device=feature.device)
for step in range(self.max_seqlen - 1):
# hn, cn: (bs x memory_dim)
hn, cn = self.rnn(xn, (hn, cn))
# on: (bs x vocab_size)
on = self.linear(self.dropout(hn))
out[:, :, step] = on
# xn: (bs x emb_dim)
xn = (
self.emb(on.argmax(dim=1))
if np.random.uniform() < self.ss_prob
else caption[:, step + 1, :]
)
return out
def sample(self, feature):
bs = feature.size(0)
# hn, cn: (bs x memory_dim)
hn = self.init_h(feature)
cn = self.init_c(feature)
# xn: (bs x emb_dim)
xn = self.emb(torch.full((bs,), self.bos_idx, dtype=torch.long, device=feature.device))
# out: (bs x vocab_size x max_seqlen-1)
out = torch.empty((bs, self.vocab_size, self.max_seqlen - 1), device=feature.device)
for step in range(self.max_seqlen - 1):
# hn, cn: (bs x memory_dim)
hn, cn = self.rnn(xn, (hn, cn))
# on: (bs x vocab_size)
on = self.linear(self.dropout(hn))
out[:, :, step] = on
# xn: (bs x emb_dim)
xn = self.emb(on.argmax(dim=1))
return out
class SPVSE(nn.Module):
def __init__(
self,
vocab_size,
emb_size,
out_size,
max_seqlen,
cnn_type,
rnn_type,
pretrained=True,
dropout_prob=0.1,
ss_prob=0.0,
pad_idx=0,
bos_idx=1,
):
super().__init__()
self.im_enc = ImageEncoder(out_size, cnn_type, pretrained)
self.cap_enc = CaptionEncoder(vocab_size, emb_size, out_size, rnn_type, pad_idx)
self.cap_gen = SimpleDecoder(
out_size, emb_size, out_size, vocab_size, max_seqlen, dropout_prob, ss_prob, bos_idx,
)
self.cap_rec = SimpleDecoder(
out_size, emb_size, out_size, vocab_size, max_seqlen, dropout_prob, ss_prob, bos_idx,
)
def forward(self, image, caption, lengths):
if image is not None:
im_emb = self.im_enc(image)
gen = self.cap_gen(im_emb, caption, lengths)
else:
im_emb, gen = None, None
if caption is not None:
cap_emb = self.cap_enc(caption, lengths)
rec = self.cap_rec(cap_emb, caption, lengths)
else:
cap_emb, rec = None, None
return im_emb, cap_emb, gen, rec
def freeze(self):
for param in self.im_enc.cnn.parameters():
param.requires_grad = False
def unfreeze(self):
for param in self.im_enc.cnn.parameters():
param.requires_grad = True
if __name__ == "__main__":
ten = torch.randn((16, 3, 224, 224))
cnn = ImageEncoder()
out = cnn(ten)
print(out.size())
cap = CaptionEncoder(vocab_size=100)
seq = torch.randint(100, (16, 30), dtype=torch.long)
len = torch.randint(1, 31, (16,), dtype=torch.long)
out = cap(seq, len)
print(out.size())
|
sample={'key1':25,'key2':"Hello everyone",'key3':45}
print(sample)
print(sample['key1'])
print(sample['key2']) |
#Draw 3-color rectangles
import turtle
turtle.reset()
turtle.penup()
turtle.setpos(-100,50)
turtle.color ("red")
turtle.pendown()
turtle.begin_fill()
turtle.forward (200)
turtle.left(90)
turtle.forward(50)
turtle.left(90)
turtle.forward(200)
turtle.left(90)
turtle.forward(50)
turtle.end_fill()
turtle.color ("green")
turtle.begin_fill()
turtle.forward (50)
turtle.left(90)
turtle.forward(200)
turtle.left(90)
turtle.forward(50)
turtle.left(90)
turtle.forward(200)
turtle.left(90)
turtle.forward(50)
turtle.end_fill()
turtle.color ("blue")
turtle.begin_fill()
turtle.forward (50)
turtle.left(90)
turtle.forward(200)
turtle.left(90)
turtle.forward(50)
turtle.left(90)
turtle.forward(200)
turtle.left(90)
turtle.forward(50)
turtle.end_fill()
|
# -*- coding: utf-8 -*-
class Solution:
def uniquePathsWithObstacles(self, obstacleGrid):
m, n = len(obstacleGrid), len(obstacleGrid[0])
result = [[0 for j in range(n)] for i in range(m)]
for i in range(m):
for j in range(n):
if obstacleGrid[i][j]:
result[i][j] = 0
elif i == 0 and j == 0:
result[i][j] = 1
elif i == 0:
result[i][j] = result[i][j - 1]
elif j == 0:
result[i][j] = result[i - 1][j]
else:
result[i][j] = result[i - 1][j] + result[i][j - 1]
return result[-1][-1]
if __name__ == "__main__":
solution = Solution()
assert 2 == solution.uniquePathsWithObstacles(
[
[0, 0, 0],
[0, 1, 0],
[0, 0, 0],
]
)
assert 1 == solution.uniquePathsWithObstacles(
[
[0],
[0],
]
)
|
import sys
import heapq # priority queue
input_file = 'input.txt'
class Process:
def __init__(self, id, arrive_time, burst_time):
self.id = id
self.arrive_time = arrive_time
self.burst_time = burst_time
#for printing purpose
def __repr__(self):
return ('[id %d : arrive_time %d, burst_time %d]'%(self.id, self.arrive_time, self.burst_time))
def FCFS_scheduling(process_list):
#store the (switching time, process_id) pair
schedule = []
current_time = 0
waiting_time = 0
for process in process_list:
if (current_time < process.arrive_time):
current_time = process.arrive_time
schedule.append((current_time,process.id))
waiting_time = waiting_time + (current_time - process.arrive_time)
current_time = current_time + process.burst_time
average_waiting_time = waiting_time/float(len(process_list))
return schedule, average_waiting_time
#Input: process_list, time_quantum (Positive Integer)
#Output_1 : Schedule list contains pairs of (time_stamp, process_id) indicating the time switching to that process_id
#Output_2 : Average Waiting Time
class Process_RR:
def __init__(self, id, burst_time, quantum, earliest_end_time):
self.id = id
self.burst_time = burst_time
self.quantum = quantum
self.earliest_end_time = earliest_end_time
def RR_scheduling(process_list, time_quantum ):
process_index = 0
ready_queue = []
current_time = 0
last_scheduled_id = -1
schedule = []
waiting_time = 0
# what should I do at this current time?
while (process_index < len(process_list)) or (len(ready_queue) > 0): # while there is something to do
if process_index < len(process_list):
new_process = process_list[process_index]
# append process to queue if it has arrived
if new_process.arrive_time == current_time:
ready_queue.append( Process_RR(new_process.id, new_process.burst_time, time_quantum, new_process.arrive_time + new_process.burst_time) )
process_index += 1
# check ready queue
if len(ready_queue) == 0:
# nothing to do -> move forward one time unit (for optimisation, can fast forward to next process' arrival time)
current_time += 1
last_scheduled_id = -1
else:
# do first process in the ready queue
process = ready_queue[0]
if last_scheduled_id != process.id:
schedule.append( (current_time, process.id) )
last_scheduled_id = process.id
#print "time %d: process id %d, burst %d, quantum %d" % (current_time, process.id, process.burst_time, process.quantum)
if process.burst_time == 0:
# done with this process
#print "done with process %d at time %d" % (process.id, current_time)
waiting_time += current_time - process.earliest_end_time
ready_queue.pop(0)
elif process.quantum == 0:
# this process has used up its quantum -> move it to the back of the queue with reset time_quantum
process.quantum = time_quantum
ready_queue.pop(0)
ready_queue.append(process)
else:
# increment time
process.burst_time -= 1
process.quantum -= 1
current_time += 1
average_waiting_time = waiting_time/float(len(process_list))
return schedule, average_waiting_time
class Process_SRTF_SJF:
def __init__(self, id, burst_time, earliest_end_time):
self.id = id
self.burst_time = burst_time
self.earliest_end_time = earliest_end_time
def SRTF_scheduling(process_list):
process_index = 0
current_time = 0
ready_queue = []
last_scheduled_id = -1
schedule = []
waiting_time = 0
while (process_index < len(process_list)) or (len(ready_queue) > 0): # while there is something to do
if process_index < len(process_list):
new_process = process_list[process_index]
# append process to queue if it has arrived
if new_process.arrive_time == current_time:
heapq.heappush(ready_queue, (new_process.burst_time, Process_SRTF_SJF(new_process.id, new_process.burst_time, new_process.arrive_time + new_process.burst_time)) )
process_index += 1
# what should I do at this time?
if len(ready_queue) == 0:
# nothing to do -> move forward one time unit
current_time += 1
last_scheduled_id = -1
else:
# do first process in the ready queue
process_burst, process = ready_queue[0]
if last_scheduled_id != process.id:
schedule.append( (current_time, process.id) )
last_scheduled_id = process.id
if process.burst_time == 0:
# done with this process
waiting_time += current_time - process.earliest_end_time
heapq.heappop(ready_queue)
else:
# increment time
process.burst_time -= 1
current_time += 1
heapq.heapreplace(ready_queue, (process.burst_time, process))
average_waiting_time = waiting_time/float(len(process_list))
return schedule, average_waiting_time
def SJF_scheduling(process_list, alpha):
process_index = 0
current_time = 0
ready_queue = []
predicted_burst = {}
last_scheduled_id = -1
schedule = []
waiting_time = 0
while (process_index < len(process_list)) or (len(ready_queue) > 0): # while there is something to do
while process_index < len(process_list): # can have more than one
new_process = process_list[process_index]
# append process to queue if it has arrived
if new_process.arrive_time <= current_time:
if new_process.id not in predicted_burst:
predicted_burst[new_process.id] = 5
predicted_time = predicted_burst[new_process.id]
heapq.heappush(ready_queue, (predicted_time, Process_SRTF_SJF(new_process.id, new_process.burst_time, new_process.arrive_time + new_process.burst_time)) )
process_index += 1
else:
break
# what should I do at this time?
if len(ready_queue) == 0:
# nothing to do -> move forward one time unit
current_time += 1
last_scheduled_id = -1
else:
# do first process in the ready queue
process_burst, process = ready_queue[0]
if last_scheduled_id != process.id:
schedule.append( (current_time, process.id) )
last_scheduled_id = process.id
current_time += process.burst_time
waiting_time += current_time - process.earliest_end_time
heapq.heappop(ready_queue)
predicted_burst[process.id] = alpha * process.burst_time + (1 - alpha) * predicted_burst[process.id]
average_waiting_time = waiting_time/float(len(process_list))
return schedule, average_waiting_time
def read_input():
result = []
with open(input_file) as f:
for line in f:
array = line.split()
if (len(array)!= 3):
print ("wrong input format")
exit()
result.append(Process(int(array[0]),int(array[1]),int(array[2])))
return result
def write_output(file_name, schedule, avg_waiting_time):
with open(file_name,'w') as f:
for item in schedule:
f.write(str(item) + '\n')
f.write('average waiting time %.2f \n'%(avg_waiting_time))
def main(argv):
process_list = read_input()
print ("printing input ----")
for process in process_list:
print (process)
print ("simulating FCFS ----")
FCFS_schedule, FCFS_avg_waiting_time = FCFS_scheduling(process_list)
write_output('FCFS.txt', FCFS_schedule, FCFS_avg_waiting_time )
print ("simulating RR ----")
RR_schedule, RR_avg_waiting_time = RR_scheduling(process_list,time_quantum = 2)
write_output('RR.txt', RR_schedule, RR_avg_waiting_time )
print ("simulating SRTF ----")
SRTF_schedule, SRTF_avg_waiting_time = SRTF_scheduling(process_list)
write_output('SRTF.txt', SRTF_schedule, SRTF_avg_waiting_time )
print ("simulating SJF ----")
for i in range(21):
alpha = i*0.05
SJF_schedule, SJF_avg_waiting_time = SJF_scheduling(process_list, alpha = alpha)
print (SJF_avg_waiting_time)
write_output('SJF.txt', SJF_schedule, SJF_avg_waiting_time )
if __name__ == '__main__':
main(sys.argv[1:])
|
#lex_auth_012693763253788672132
def generate_ticket(airline,source,destination,no_of_passengers):
t=[]
#Write your logic here
res = airline+':'+source[:3]+':'+destination[:3]+':';
for i in range(1,no_of_passengers+1):
t.append(res+str(100+i))
if(len(t)<5):
return t
t = t[::-1]
t = t[:5]
#Use the below return statement wherever applicable
return t
#Provide different values for airline,source,destination,no_of_passengers and test your program
print(generate_ticket("AI","Bangalore","London",2)) |
import threading
import Globals
from Connect import *
from Logger import *
class Joystick(threading.Thread):
# Initialize thread and Joystick instance attributes
def __init__(self):
threading.Thread.__init__(self)
self.killReceived = False
self.port = 0
self.serial = None
className = str.split(str(self.__class__),"'")[1]
self.logger = Logger(className)
# Start thread
def run(self):
while not self.killReceived:
Connect.connectJoy(self)
self.setJoyConnectedFlag()
while not self.killReceived:
coordinates = Connect.read(self.serial,2)
#print(coordinates)
if not self.updateGlobals(coordinates):
Connect.close(self.serial)
break
#
def setJoyConnectedFlag(self):
Globals.lock.acquire()
Globals.joyConnected = True
Globals.lock.release()
# Update global variable 'coordinates'
def updateGlobals(self, coordinates):
updated = False
if len(coordinates) < 2:
self.logger.logger.warn("Any problem happened, trying to connect again.")
updated = False
else:
updated = True
Globals.lock.acquire()
if coordinates[0] & 1:
Globals.coordinates['x'] = coordinates[1] & 0xFE
Globals.coordinates['y'] = coordinates[0] | 0x01
else:
Globals.coordinates['x'] = coordinates[0] & 0xFE
Globals.coordinates['y'] = coordinates[1] | 0x01
#print("(" + str(Globals.coordinates['x']) + ", " + str(Globals.coordinates['y']) + ")")
Globals.lock.notify()
Globals.lock.release()
return updated
|
import can
from can import Listener
|
from django.shortcuts import render
from django.contrib import messages
from .forms import MateriaForm
from asignacionc.models import Materia, Grado, Alumno
def materia_nueva(request):
if request.method == "POST":
formulario = MateriaForm(request.POST)
if formulario.is_valid():
materia = Materia.objects.create(nom_materia=formulario.cleaned_data['nom_materia'], descripcion = formulario.cleaned_data['descripcion'])
for alumno_id in request.POST.getlist('alumnos'):
grado = Grado(alumno_id=alumno_id, materia_id = materia.id)
grado.save()
messages.add_message(request, messages.SUCCESS, 'Materia Guardada Exitosamente')
else:
formulario = MateriaForm()
return render(request, 'materias/materia_editar.html', {'formulario': formulario})
# Create your views here.
|
import sys
import subprocess
gpu = int(sys.argv[1])
mode = sys.argv[2]
assert mode in ['convex', 'mean']
assert gpu >= 0 and gpu <= 3
start_id = 0
end_id = 36
i = start_id
while i <= end_id:
if i % 4 == gpu:
cmd = 'bash launch/attack-end2end-12.sh {} {} {}'.format(gpu, mode, i)
print(cmd)
subprocess.run(cmd.split())
i += 1
|
import scrapy
class nflspider(scrapy.Spider):
name = 'nfl'
start_urls = [
'https://www.spotrac.com/nfl/rankings/2016/cap-hit.html',
'https://www.spotrac.com/nfl/rankings/2017/cap-hit.html',
'https://www.spotrac.com/nfl/rankings/2018/cap-hit.html'
]
def parse(self, response):
for table in response.xpath('/html/body/div[2]/div[2]/div/div/div[1]/div/div[3]/div/table/tbody'):
Ranking = table.css('tr td.rank.small.center.noborderright::text').extract()
Player = table.css('tr td.rank-name.player.noborderright h3 a.team-name::text').extract()
Position = table.css('tr td.rank-name.player.noborderright span.rank-position::text').extract()
CapHit = table.css('tr td span.info::text').extract()
for banner in response.xpath('/html/body/div[2]/div[2]/div/div/div[1]/div/div[3]'):
Year = banner.css('header.team-header h2::text').extract()
#Give the extracted content row wise
for item in zip(Ranking,Player,Position,CapHit):
#create a dictionary to store the scraped info
scraped_info = {
'Year' : str(Year)[2:6],
'Ranking': item[0],
'Player' : item[1],
'Position' : item[2],
'Cap_Hit' : item[3]
}
#yield or give the scraped info to scrapy
yield scraped_info
|
# -*- coding: utf-8 -*-
"""
@author: Aaron Ponti
"""
import re
import os
import logging
import xml.etree.ElementTree as xml
from datetime import datetime
import java.io.File
from org.apache.commons.io import FileUtils
#
# The Processor class performs all steps required for registering datasets
# from the assigned dropbox folder
#
class Processor:
"""Registers datasets from the dropbox folder"""
# A transaction object passed by openBIS
_transaction = None
# The _incoming folder to process (a java.io.File object)
_incoming = ""
# The user name
_username = ""
# The logger
_logger = None
# Constructor
def __init__(self, transaction, logFile):
self._transaction = transaction
self._incoming = transaction.getIncoming()
self._username = ""
# Set up logging
logging.basicConfig(filename=logFile, level=logging.DEBUG,
format='%(asctime)-15s %(levelname)s: %(message)s')
self._logger = logging.getLogger("BLSRFortessa")
def dictToXML(self, d):
"""Converts a dictionary into an XML string."""
# Create an XML node
node = xml.Element("Parameters")
# Add all attributes to the XML node
for k, v in d.iteritems():
node.set(k, v)
# Convert to XML string
xmlString = xml.tostring(node, encoding="UTF-8")
# Return the XML string
return xmlString
def createExperiment(self, expId, expName):
"""Create an experiment with given Experiment ID extended with the addition
of a string composed from current date and time.
@param expID, the experiment ID
@param expName, the experiment name
"""
# Make sure to keep the code length within the limits imposed by
# openBIS for codes
if len(expId) > 41:
expId = expId[0:41]
# Create univocal ID
expId = expId + "_" + self.getCustomTimeStamp()
# Create the experiment
self._logger.info("Register experiment %s" % expId)
exp = self._transaction.createNewExperiment(expId, "LSR_FORTESSA_EXPERIMENT")
if not exp:
msg = "Could not create experiment " + expId + "!"
self._logger.error(msg)
raise Exception(msg)
else:
self._logger.info("Created experiment with ID " + expId + ".")
# Store the name
exp.setPropertyValue("LSR_FORTESSA_EXPERIMENT_NAME", expName)
return exp
def createSampleWithGenCode(self, spaceCode,
sampleType="LSR_FORTESSA_PLATE"):
"""Create a sample with automatically generated code.
@param spaceCode, the code of the space
@param sampleType, the sample type that must already exist
@return sample An ISample
"""
# Make sure there are not slashes in the spaceCode
spaceCode = spaceCode.replace("/", "")
# Create the sample
sample = self._transaction.createNewSampleWithGeneratedCode(spaceCode,
sampleType)
if not sample:
msg = "Could not create sample with generated code"
self._logger.error(msg)
raise Exception(msg)
return sample
def formatExpDateForPostgreSQL(self, expDate):
"""Format the experiment date to be compatible with postgreSQL's
'timestamp' data type.
@param Date stored in the FCS file, in the form 01-JAN-2013
@return Date in the form 2013-01-01
"""
monthMapper = {'JAN': '01', 'FEB': '02', 'MAR': '03', 'APR': '04',
'MAY': '05', 'JUN': '06', 'JUL': '07', 'AUG': '08',
'SEP': '09', 'OCT': '10', 'NOV': '11', 'DEC': '12'}
# Separate the date into day, month, and year
(day, month, year) = expDate.split("-")
# Try mapping the month to digits (e.g. "06"). If the mapping does
# not work, return "NOT_FOUND"
month = monthMapper.get(month, "NOT_FOUND")
# Build the date in the correct format. If the month was not found,
# return 01-01-1970
if (month == "NOT_FOUND"):
self._logger.info("Invalid experiment date %s found. " \
"Reverting to 1970/01/01." % expDate)
return "1970-01-01"
else:
return (year + "-" + month + "-" + day)
def getCustomTimeStamp(self):
"""Create an univocal time stamp based on the current date and time
(works around incomplete API of Jython 2.5).
"""
t = datetime.now()
return (t.strftime("%y%d%m%H%M%S") + unicode(t)[20:])
def getSubFolders(self):
"""Return a list of subfolders of the passed incoming directory.
@return list of subfolders (String)
"""
incomingStr = self._incoming.getAbsolutePath()
return [name for name in os.listdir(incomingStr)
if os.path.isdir(os.path.join(incomingStr, name))]
def processExperiment(self, experimentNode):
"""Register an IExperimentUpdatable based on the Experiment XML node.
@param experimentNode An XML node corresponding to an Experiment
@return IExperimentUpdatable experiment
"""
# Get the experiment version
expVersion = experimentNode.attrib.get("version")
if expVersion is None:
expVersion = "0"
# Get the openBIS identifier
openBISIdentifier = experimentNode.attrib.get("openBISIdentifier")
# Get the experiment name
expName = experimentNode.attrib.get("name")
# Get the experiment date and reformat it to be compatible
# with postgreSQL
expDate = self.formatExpDateForPostgreSQL(experimentNode.attrib.get("date"))
# Get the description
description = experimentNode.attrib.get("description")
# Get the acquisition hardware
acqHardware = experimentNode.attrib.get("acq_hardware")
# Get the acquisition software
acqSoftware = experimentNode.attrib.get("acq_software")
# Get the owner name
owner = experimentNode.attrib.get("owner_name")
# Get attachments
attachments = experimentNode.attrib.get("attachments")
# Create the experiment (with corrected ID if needed: see above)
openBISExperiment = self.createExperiment(openBISIdentifier, expName)
if not openBISExperiment:
msg = "Could not create experiment " + openBISIdentifier
self._logger.error(msg)
raise Exception(msg)
# Get comma-separated tag list
tagList = experimentNode.attrib.get("tags")
if tagList != None and tagList != "":
# Retrieve or create the tags
openBISTags = self.retrieveOrCreateTags(tagList)
# Set the metaprojects (tags)
for openBISTag in openBISTags:
openBISTag.addEntity(openBISExperiment)
# Set the experiment version
openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_VERSION",
expVersion)
# Set the date
openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_DATE",
expDate)
# Set the description
openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_DESCRIPTION",
description)
# Set the acquisition hardware
openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE",
acqHardware)
# Set the acquisition hardware friendly name
openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME",
self._machinename)
# Set the acquisition software
openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE",
acqSoftware)
# Set the experiment owner
openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_OWNER",
owner)
# Add the attachments
if attachments is not None:
# Extract all relative file names
attachmentFiles = attachments.split(";")
for f in attachmentFiles:
# This is an additional security step
if f == '':
continue
# Inform
msg = "Adding file attachment " + f
self._logger.info(msg)
# Build the full path
attachmentFilePath = os.path.join(self._incoming.getAbsolutePath(),
f)
# Extract the file name
attachmentFileName = os.path.basename(attachmentFilePath)
# Read the attachment into a byte array
javaFile = java.io.File(attachmentFilePath)
byteArray = FileUtils.readFileToByteArray(javaFile)
# Add attachment
openBISExperiment.addAttachment(attachmentFilePath,
attachmentFileName,
"", byteArray)
# Return the openBIS Experiment object
return openBISExperiment
def processFCSFile(self, fcsFileNode, openBISTube, openBISExperiment):
"""Register the FCS File using the parsed properties file.
@param fcsFileNode An XML node corresponding to an FCS file (dataset)
@param openBISTube An ISample object representing a Tube or Well
@param openBISExperiment An ISample object representing an Experiment
"""
# Dataset type
datasetType = "LSR_FORTESSA_FCSFILE"
# Create a new dataset
dataset = self._transaction.createNewDataSet()
if not dataset:
msg = "Could not get or create dataset"
self._logger.error(msg)
raise Exception(msg)
# Set the dataset type
dataset.setDataSetType(datasetType)
# Assign the dataset to the sample
dataset.setSample(openBISTube)
# Assign the dataset to the experiment
dataset.setExperiment(openBISExperiment)
# Set the file type
dataset.setFileFormatType("FCS")
# Get the parameter node
for parameterNode in fcsFileNode:
if parameterNode.tag != "FCSFileParamList":
msg = "Expected FSC File Parameter List node!"
self._logger.error(msg)
raise Exception(msg)
parametersXML = self.dictToXML(parameterNode.attrib)
# Store the parameters in the LSR_FORTESSA_FCSFILE_PARAMETERS property
dataset.setPropertyValue("LSR_FORTESSA_FCSFILE_PARAMETERS", parametersXML)
# Log the parameters
self._logger.info("FCS file parameters (XML): " + str(parametersXML))
# Assign the file to the dataset (we will use the absolute path)
fileName = fcsFileNode.attrib.get("relativeFileName")
fileName = os.path.join(self._incoming.getAbsolutePath(), fileName)
# Log
self._logger.info("Registering file: " + fileName)
# Move the file
self._transaction.moveFile(fileName, dataset)
def processTray(self, trayNode, openBISExperiment):
"""Register a Tray (Plate) based on the Tray XML node
and an IExperimentUpdatable object.
@param trayNode An XML node corresponding to a Tray (Plate)
@param openBISExperiment An IExperimentUpdatable object
@param openBISSampleType sample type (default "LSR_FORTESSA_PLATE")
@return ISample sample, or null
"""
# openBIS sample type
openBISSampleType = "LSR_FORTESSA_PLATE"
# Get the identifier of the space all relevant attributes
openBISSpaceIdentifier = \
trayNode.attrib.get("openBISSpaceIdentifier")
# Get the tray name
name = trayNode.attrib.get("name")
# Get the tray geometry
trayGeometry = trayNode.attrib.get("trayGeometry")
# Create the sample. The Plate is configured in openBIS to
# auto-generate its own identifier.
openBISTray = self.createSampleWithGenCode(openBISSpaceIdentifier,
openBISSampleType)
if not openBISTray:
msg = "Could not create plate sample."
self._logger.error(msg)
raise Exception(msg)
# Set the experiment for the sample
openBISTray.setExperiment(openBISExperiment)
# Set the plate name
openBISTray.setPropertyValue("LSR_FORTESSA_PLATE_NAME", name)
# Set the tray geometry
openBISTray.setPropertyValue("LSR_FORTESSA_PLATE_GEOMETRY", trayGeometry)
# Return the openBIS ISample object
return openBISTray
def processTubeOrWell(self, tubeNode, openBISContainerSample,
specimenName, openBISExperiment):
"""Register a Tube or Well (as a child of a Specimen) based on the Tube or
Well XML node and an ISample object.
The associated fcs file is attached as a IDataset
@param tubeNode An XML node corresponding to a Tube or Well
@param openBISContainerSample An ISample object that will contain
the Tube or Well
@param specimenName Name of the specimen associated to the Tube or Well
@param openBISExperiment The IExperiment to which the Tube belongs
@param openBISSpecimenType (default "LSR_FORTESSA_TUBE"), the
sample type. One of LSR_FORTESSA_TUBE and LSR_FORTESSA_WELL.
@return ISample sample, or null
"""
# Get the name
name = tubeNode.attrib.get("name")
# openBIS type
if tubeNode.tag == "Tube":
openBISSpecimenType = "LSR_FORTESSA_TUBE"
elif tubeNode.tag == "Well":
openBISSpecimenType = "LSR_FORTESSA_WELL"
else:
msg = "Unknown tube type" + tubeNode.tag
self._logger.error(msg)
raise Exception(msg)
# Build the openBIS Identifier
openBISSpaceIdentifier = \
tubeNode.attrib.get("openBISSpaceIdentifier")
# Create the sample. The Tube/Well is configured in openBIS to
# auto-generate its own identifier.
openBISTube = self.createSampleWithGenCode(openBISSpaceIdentifier,
openBISSpecimenType)
if not openBISTube:
msg = "Could not create sample with auto-generated identifier"
self._logger.error(msg)
raise Exception(msg)
# Set the experiment to which it belongs
openBISTube.setExperiment(openBISExperiment)
# Set the Specimen name as a property
openBISTube.setPropertyValue("LSR_FORTESSA_SPECIMEN", specimenName)
# Set the name
if openBISSpecimenType == "LSR_FORTESSA_TUBE":
openBISTube.setPropertyValue("LSR_FORTESSA_TUBE_NAME", name)
elif openBISSpecimenType == "LSR_FORTESSA_WELL":
openBISTube.setPropertyValue("LSR_FORTESSA_WELL_NAME", name)
else:
msg = "Unknown value for openBISSpecimenType."
self._logger.error(msg)
raise Exception(msg)
# Set the TubeSet container
openBISTube.setContainer(openBISContainerSample)
# Return the openBIS ISample
return openBISTube
def processTubeSet(self, experimentNode, openBISExperiment):
"""Register a TubeSet (virtual tube container).
@param experimentNode An XML node corresponding to an Experiment
@param openBISExperiment An IExperimentUpdatable object
@param openBISSampleType The TubeSet sample type
@return ISample sample, or null
"""
# Sample type
openBISSampleType = "LSR_FORTESSA_TUBESET"
# Get the identifier of the space all relevant attributes
openBISSpaceIdentifier = \
experimentNode.attrib.get("openBISSpaceIdentifier")
# Create the sample. The Tubeset is configured in openBIS to
# auto-generate its own identifier.
openBISTubeSet = self.createSampleWithGenCode(openBISSpaceIdentifier,
openBISSampleType)
if not openBISTubeSet:
msg = "Could not get or create TubeSet"
self._logger.error(msg)
raise Exception(msg)
# Set the experiment for the sample
openBISTubeSet.setExperiment(openBISExperiment)
self._logger.info("Created new TubeSet " \
"with identifier %s, sample type %s" \
% (openBISTubeSet.getSampleIdentifier(),
openBISSampleType))
# Return the openBIS ISample object
return openBISTubeSet
def register(self, tree):
"""Register the Experiment using the parsed properties file.
@param tree ElementTree parsed from the properties XML file
"""
# Get the root node (obitXML)
root = tree.getroot()
# Store the username
self._username = root.attrib.get("userName")
# Store the machine name
machinename = root.attrib.get("machineName")
if machinename is None:
machinename = ""
self._machinename = machinename
# Create a virtual TubeSet
openBISTubeSet = None
# Iterate over the children (Experiments)
for experimentNode in root:
# The tag of the immediate children of the root experimentNode
# must be Experiment
if experimentNode.tag != "Experiment":
msg = "Expected Experiment node, found " + experimentNode.tag
self._logger.error(msg)
raise Exception(msg)
# Process an Experiment XML node and get/create an IExperimentUpdatable
openBISExperiment = self.processExperiment(experimentNode)
# Process children of the Experiment
for childNode in experimentNode:
# The child of an Experiment can be a Tray or a Specimen
nodeType = childNode.tag
if nodeType == "Specimen":
# A specimen is a direct child of an experiment if there
# is no plate, and the FCS files are therefore associated
# to tubes. In this case, we create a virtual TubeSet
# sample container (one for all Tubes in the experiment).
if openBISTubeSet is None:
openBISTubeSet = self.processTubeSet(experimentNode,
openBISExperiment)
# The only information we need from the Specimen is its
# name to associate to the Tubes as property
specimenNameProperty = childNode.attrib.get("name")
# Now iterate over the children of the Specimen
for tubeNode in childNode:
# The child of a Specimen is a Tube
if tubeNode.tag != "Tube":
msg = "Expected Tube node!"
self._logger.error(msg)
raise Exception(msg)
# Process the tube node and get the openBIS object
openBISTube = self.processTubeOrWell(tubeNode,
openBISTubeSet,
specimenNameProperty,
openBISExperiment)
# Now process the FCS file
for fcsNode in tubeNode:
# The child of a Tube is an FCSFile
if fcsNode.tag != "FCSFile":
msg = "Expected FSC File node!"
self._logger.error(msg)
raise Exception(msg)
# Process the FCS file node
self.processFCSFile(fcsNode, openBISTube,
openBISExperiment)
elif nodeType == "Tray":
# Process the tray node and get the openBIS object
openBISTray = self.processTray(childNode,
openBISExperiment)
# Now iterate over the children of the Tray
for specimenNode in childNode:
# The child of a Tray is a Specimen
if specimenNode.tag != "Specimen":
msg = "Expected Specimen node!"
self._logger.error(msg)
raise Exception(msg)
# The only information we need from the Specimen is its
# name to associate to the Wells as property
specimenNameProperty = specimenNode.attrib.get("name")
for wellNode in specimenNode:
# The child of a Specimen is a Tube
if wellNode.tag != "Well":
msg = "Expected Well node!"
self._logger.error(msg)
raise Exception(msg)
# Process the tube node and get the openBIS object
openBISWell = self.processTubeOrWell(wellNode,
openBISTray,
specimenNameProperty,
openBISExperiment)
# Now process the FCS file
for fcsNode in wellNode:
# The child of a Tube is an FCSFile
if fcsNode.tag != "FCSFile":
msg = "Expected FSC File node!"
self._logger.error(msg)
raise Exception(msg)
# Process the FCS file node
self.processFCSFile(fcsNode, openBISWell,
openBISExperiment)
else:
msg = "The Node must be either a Specimen or a Tray"
self._logger.error(msg)
raise Exception(msg)
# Log that we are finished with the registration
self._logger.info("Registration completed")
def retrieveOrCreateTags(self, tagList):
"""Retrieve or create the tags (metaprojects) with specified names."""
# Initialize openBISTags list
openBISTags = []
# Make sure tagList is not None
if tagList is None:
return []
# Get the individual tag names (with no blank spaces)
tags = ["".join(t.strip()) for t in tagList.split(",")]
# Process all tags (metaprojects)
for tag in tags:
if len(tag) == 0:
continue
# Retrieve the tag (metaproject)
metaproject = self._transaction.getMetaproject(tag, self._username)
if metaproject is None:
# Create the tag (metaproject)
logging("Creating metaproject " + tag)
metaproject = self._transaction.createNewMetaproject(tag,
"",
self._username)
# Check that creation was succcessful
if metaproject is None:
msg = "Could not create metaproject " + tag + \
"for user " + self._username
self._logger.error(msg)
raise Exception(msg)
# Add the created metaproject to the list
openBISTags.append(metaproject)
return openBISTags
def run(self):
"""Run the registration."""
# Make sure that incoming is a folder
if not self._incoming.isDirectory():
msg = "Incoming MUST be a folder!"
self._logger.error(msg)
raise Exception(msg)
# Log
self._logger.info("Incoming folder: " + self._incoming.getAbsolutePath())
# There must be just one subfolder: the user subfolder
subFolders = self.getSubFolders()
if len(subFolders) != 1:
msg = "Expected user subfolder!"
self._logger.error(msg)
raise Exception(msg)
# Set the user folder
userFolder = os.path.join(self._incoming.getAbsolutePath(),
subFolders[0])
# In the user subfolder we must find the data_structure.ois file
dataFileName = os.path.join(userFolder, "data_structure.ois")
if not os.path.exists(dataFileName):
msg = "File data_structure.ois not found!"
self._logger.error(msg)
raise Exception(msg)
# Now read the data structure file and store all the pointers to
# the properties files. The paths are stored relative to self._incoming,
# so we can easily build the full file paths.
propertiesFileList = []
f = open(dataFileName)
try:
for line in f:
line = re.sub('[\r\n]', '', line)
propertiesFile = os.path.join(self._incoming.getAbsolutePath(),
line)
propertiesFileList.append(propertiesFile)
finally:
f.close()
# Process (and ultimately register) all experiments
for propertiesFile in propertiesFileList:
# Log
self._logger.info("* * * Processing: " + propertiesFile + " * * *")
# Read the properties file into an ElementTree
tree = xml.parse(propertiesFile)
# Now register the experiment
self.register(tree)
def process(transaction):
"""Dropbox entry point.
@param transaction, the transaction object
"""
# Get path to containing folder
# __file__ does not work (reliably) in Jython
dbPath = "../core-plugins/flow/1/dss/drop-boxes/BDLSRFortessaDropbox"
# Path to the logs subfolder
logPath = os.path.join(dbPath, "logs")
# Make sure the logs subforder exist
if not os.path.exists(logPath):
os.makedirs(logPath)
# Path for the log file
logFile = os.path.join(logPath, "log.txt")
# Create a Processor
processor = Processor(transaction, logFile)
# Run
processor.run()
|
# Notes on Strings, Integers, Floats, and Booleans
# The number in the square brackets will determine what gets printed
# These are strings
print("Hello"[0])
print("Hello"[4])
# To declare an integer, just type out an integer (number)
# You can use underscores to better read large numbers and the computer will ignore them
# the plus sign works to add the intigers
addition = print(123 + 456)
# The Float type uses decimal points, like pi
# Boolean is only True or False
##################################################
# You can change data types. For example you can change an intiger into a string by using this function below
new_addition = str(addition)
print(type(new_addition))
# You can use the type function to tell you what kind of variable you are working with
print(type("Hello"))
# we can also convert something within the print
print(70 + float("100.5"))
# you can use str() int() or float() to convert your variables
##################################################
# how to do math in python
# addition
print("addition", 5 + 5)
# subtraction
print("subtraction", 7 - 4)
# multiplication
print("multiplication", 3 * 2)
# division
print("division", 6 / 3)
# raise to the power of
print("raise 2 to the power of (3)", 2 ** 3)
# round
print("round the value of this division", round(8 / 3))
# turn it into an integer without rounding when dividing
print("turn a division result into an integer", 8 // 3)
# you can also change a value by adding the math function to the beginning of the equals sign
score = 0
# this adds 1 to the value of score
score += 1
print("The score is", score)
##################################################
# f-String
print(f"you can mix and match variables by adding them within braces here with an 'f' at the beginning with no spaces")
|
# -*- encoding: utf-8 -*-
import setuptools
with open('README.md', 'r') as file:
readme_contents = file.read()
setuptools.setup(
name='pysofar',
version='0.1.15',
license='Apache 2 License',
install_requires=[
'requests',
'python-dotenv'
],
description='Python client for interfacing with the Sofar Wavefleet API to access Spotter Data',
long_description=readme_contents,
long_description_content_type='text/markdown',
author='Rufus Sofar',
author_email='sofaroceangithubbot@gmail.com',
url='https://github.com/wavespotter/wavefleet-client-python',
package_dir={'': 'src'},
packages=setuptools.find_packages('src'),
classifiers=[
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent"
],
project_urls={
'Sofar Ocean Site': 'https://www.sofarocean.com',
'Spotter About': 'https://www.sofarocean.com/products/spotter',
'Spotter Data FAQ': 'https://www.sofarocean.com/posts/spotter-data-subscription-and-storage',
'Sofar Dashboard': 'https://spotter.sofarocean.com/',
'Sofar Api FAQ': 'https://spotter.sofarocean.com/api'
}
)
|
#!/usr/bin/python
import numpy as np
import pylab as py
from COMMON import nanosec,yr,week,grav,msun,light,mpc,hub0,h0,omm,omv
import COMMON as CM
from USEFUL import time_estimate
from matplotlib import colors
from scipy import interpolate
import Formulas_AjithEtAl2008 as A8
#Input parameters:
from INPUT_PARAMETERS import *
datafile='../data/output/SNR_z_vs_mc_PPTA_model_'+flim_model+'/snr_pulsar' #Beginning of the names of the files.
outputplot='../plots/z_vs_mc_PPTA_model_'+flim_model+'_worst'
#-----------------------------------------------------------------
#Obtaining noise curves for the different PPTA pulsars.
fmin=1./tobs #Minimum frequency.
fmax=1./cad #Maximum frequency.
fvec=np.logspace(np.log10(fmin), np.log10(fmax), fbins)
Snf=CM.PPTA_red_noise(fvec, cad) #Matrix with the noise of each pulsar.
numpul=np.shape(Snf)[0] #Number of pulsars.
#Load SBHB candidates.
candi=np.load('../data/Graham/Candidates.npy')[()]
candi_z=candi['z']
candi_mtot=candi['t_mass']
candi_fgw=candi['f_gw_obs']
candi_mch=candi_mtot*2**(-1./5.)*0.5
candi_snr=np.load('../data/Graham/Cand_snr_PPTA_model_'+flim_model+'.npy')[()]['snr']
data=np.load(datafile+'1.npy')[()]
mch_mat=data['mch']
z_mat=data['z']
#snrsq_mat=data['snr_min']**2.
#snrsq_mat=np.zeros(np.shape(mch_mat))
fvec=data['fvec']
for pulsi in xrange(numpul):
#Load S/N data.
data=np.load(datafile+'%i.npy'%(int(pulsi)+1))[()]
if pulsi==0:
snrsq_mat=data['snr']**2.
else:
snrsq_mat+=data['snr']**2.
snr_mat=np.sqrt(snrsq_mat)
#f_mat=fvec[np.argmax(snr_mat, axis=2)] #Frequency at which the S/N is maximum, for each pixel of redshift and mass.
#snr_mat=np.amax(snr_mat, axis=2) #Maximum S/N at each pixel of redshift and mass.
indibest=candi_snr.argmax()
#indibest=candi_mch.argmax()
fdet=candi_fgw[indibest]
indifdet=abs(fvec-fdet).argmin()
f_mat=np.ones(np.shape(z_mat))*fvec[indifdet]
snr_mat=snr_mat[:,:,indifdet]
#Derive some quantities.
reds=z_mat[:,0]
mchvec=mch_mat[0,:]
if flim_model=='ISCO':
flim_mat=CM.felso(mch_mat*2.**(1./5.),mch_mat*2.**(1./5.))*1./(1.+z_mat)
elif flim_model=='A8':
flim_mat=A8.f_cut(1./4., 2.*mch_mat*2.**(1./5.))*1./(1.+z_mat)
zlim_mat=np.ones(np.shape(z_mat))*np.nan
zlim_mat[flim_mat<fmin]=1. #This matrix shows the region of z-Mc that cannot be seen, because the ISCO has already been reached below the minimum observable frequency.
snr_mat[snr_mat<=0]=np.nan
snr_mat[snr_mat>0.]=np.log10(snr_mat[snr_mat>0.])
#Optimal plotting options.
import PARAMETER_PLOTS
left, right, top, bottom, cb_fraction=0.13, 0.94, 0.96, 0.16, 0.08 #Borders of the plot.
xmin,xmax=np.amin(mch_mat),np.amax(mch_mat) #Edges of the x-axis.
ymin,ymax=np.amin(z_mat), np.amax(z_mat) #Edges of the y-axis.
#Create an S/N plot.
fig=py.figure()
fig.subplots_adjust(left=left,right=right,top=top,bottom=bottom)
ax=fig.gca()
ax.set_xscale('log')
ax.set_yscale('log')
cmap=py.cm.winter
levels=np.log10(np.logspace(np.log10(snrt), np.amax(snr_mat[np.isnan(snr_mat)==False]), snrbins))
snrmap=ax.contourf(mch_mat, z_mat, snr_mat, origin='lower', interpolation='None', aspect='auto', alpha=0.5, cmap=cmap, levels=levels)
cmap = colors.ListedColormap(['white', 'red'])
snr_mat_t=snr_mat.copy()
snr_mat_t[10**(snr_mat)<snrt]=1
snr_mat_t[10**(snr_mat)>=snrt]=np.nan
ax.contourf(mch_mat, z_mat, snr_mat_t, origin='lower', interpolation='None', aspect='auto', alpha=0.5, cmap=cmap, levels=[0., 5.])
cmap = colors.ListedColormap(['black'])
ax.contour(mch_mat, z_mat, snr_mat, origin='lower', interpolation='None', aspect='auto', alpha=0.2, cmap=cmap, levels=levels)
ax.contourf(mch_mat, z_mat, zlim_mat, origin='lower', interpolation='None', aspect='auto', alpha=0.5, cmap=cmap, levels=[0., 5.])
if detector in ['EPTA', 'PPTA']:
ax.plot(candi_mch, candi_z, '.', color='blue', markersize=3.)
mchlevels=10**(np.array([9., 10., 11., 12.]))
flevels_vec=np.zeros(len(mchlevels))
for mi in xrange(len(mchlevels)):
indi=abs(mch_mat[0,:]-mchlevels[mi]).argmin()
flevels_vec[mi]=f_mat[0, indi]
flevels_vec=np.sort(flevels_vec)
#flevels=5
#flevels_vec=np.logspace(np.log10(fmin), np.log10(fmax), flevels)
#flevels_vec=10**(np.array([-8., -7.]))
flevels_max=1e-7
else:
flevels_vec=10**(np.array([0.,0.97,1., 2., 3.]))
flevels_max=3.
#Create labels for text.
flevels_exp=np.zeros(np.shape(flevels_vec))
flevels_num=np.zeros(np.shape(flevels_vec))
for fi in xrange(len(flevels_vec)):
label=('%.1e' %flevels_vec[fi]).split('e')
exp_label=int(label[1])
num_label=float(label[0])
flevels_exp[fi]=exp_label
flevels_num[fi]=num_label
snr_level=np.zeros(np.shape(snr_mat))
ypix=abs(reds-reds_text).argmin() #Number of y-pixel where the text should appear.
rowi=f_mat[ypix,:]
for fi in xrange(len(flevels_vec)-1):
selecti=(f_mat>=flevels_vec[fi])&(f_mat<flevels_vec[fi+1])
if len(selecti[selecti])==0:
continue
snr_level[selecti]=fi+1
xpix=abs(rowi-flevels_vec[fi]).argmin() #Number of x-pixel where the text should appear.
if flevels_vec[fi]<flevels_max:
alittleleft=1.02 #The text should be a bit on the left of the line so that it can be read.
#ax.text(10**(alittleleft*np.log10(mchvec[xpix])), reds_m[ypix], '$%.1e \\mathrm{ Hz}$ '%flevels_vec[fi], horizontalalignment='center', fontsize=6, color='black', rotation=rotangle)
texti='$%.1f \\times 10^{%i} \\mathrm{ Hz}$ '%(flevels_num[fi], flevels_exp[fi])
#texti='$%.1f \\times 10^{%i} - %.1f \\times 10^{%i} \\mathrm{ Hz}$ '%(flevels_num[fi], flevels_exp[fi], flevels_num[fi+1], flevels_exp[fi+1])
ax.text(10**(alittleleft*np.log10(mchvec[xpix])), reds[ypix], texti, horizontalalignment='center', fontsize=7, color='black', rotation=rotangle)
snr_level[f_mat>=flevels_vec[-1]]=fi+2
snr_level[zlim_mat==1.]=0.
cmap = colors.ListedColormap(['black'])
ax.contour(mch_mat, z_mat, snr_level, cmap=cmap, levels=np.arange(10), alpha=1.)
#ax.contour(mch_mat, z_mat, f_mat, cmap=cmap, levels=flevels_vec, alpha=1.)
cb = fig.colorbar(snrmap,fraction=cb_fraction,format='$%i$', ticks=[-2., -1., 0., 1., 2., 3., 4., 5., 6.])
ax.set_xlabel('$\log_{10}(\\mathrm{Physical\ chirp\ mass\ /\ M_{\\odot}})$')
ax.set_ylabel('$\log_{10}(\\mathrm{Redshift})$')
ax.set_xlim(xmin,xmax)
if detector in ['EPTA', 'PPTA']:
ax.set_xticks([1e9, 1e10, 1e11, 1e12])
ax.set_xticklabels(["$9$", "$10$", "$11$", "$12$"])
else:
ax.set_xticks([1e-1, 1e0, 1e1, 1e2, 1e3])
ax.set_xticklabels(["$-1$", "$0$", "$1$", "$2$", "$3$"])
ax.set_ylim(ymin,ymax)
ax.set_yticks([1e-2,1e-1,1e0,1e1,1e2])
ax.set_yticklabels(["$-2$","$-1$","$0$","$1$","$2$"])
cb.set_label('$\\log_{10}(\\mathrm{Optimal\ S/N})$')
fig.savefig(outputplot+'.png', dpi=600)
|
from django.shortcuts import render
from django.contrib.auth import get_user_model
from django.utils.encoding import force_text
from django.utils.http import urlsafe_base64_decode
from django.contrib.auth.models import User
from rest_framework import generics
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework.parsers import FileUploadParser
from rest_framework.decorators import api_view
from rest_framework.reverse import reverse
from rest_framework import renderers
from snippets.models import Snippet, CourseList, CoursePage, CourseUsers, UserProfile
from snippets.serializers import SnippetSerializer, UserSerializer,\
UserCreateSerializer, CreateSnippetSerializer, CreateCourseSerializer,\
CreateCoursePageSerializer, CourseDetailSerializer, CourseListSerializer,\
CoursePageSerializer, CourseDetailPageSerializer, CourseUserSerializer,\
CourseUsersListSerializer, CourseUserDetailSerializer,\
UserProfileSerializer, UserUpdateSerializer
from snippets.permissions import IsOwnerOrReadOnly, IfGroup2, IsUserOrReadOnly
from utils.token_generator import token_generator
class SnippetList(generics.ListAPIView):
queryset = Snippet.objects.all()
serializer_class = SnippetSerializer
permission_classes = (permissions.DjangoModelPermissions,)
class CreateSnippet(generics.CreateAPIView):
queryset = Snippet.objects.all()
serializer_class = CreateSnippetSerializer
permission_classes = (permissions.DjangoModelPermissions,)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class SnippetDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Snippet.objects.all()
serializer_class = CreateSnippetSerializer
permission_classes = (permissions.DjangoModelPermissions, IsOwnerOrReadOnly, IfGroup2)
class UserList(generics.ListAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = (permissions.DjangoModelPermissions, IsOwnerOrReadOnly)
class UserDetail(generics.RetrieveAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = (permissions.DjangoModelPermissions, IsOwnerOrReadOnly)
class CreateUserView(generics.CreateAPIView):
model = User
permission_classes = (permissions.AllowAny,)
serializer_class = UserCreateSerializer
class UpdateUserView(generics.RetrieveUpdateAPIView):
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
serializer_class = UserUpdateSerializer
class CreateCourseView(generics.ListCreateAPIView):
queryset = CourseList.objects.all()
serializer_class = CreateCourseSerializer
permission_classes = (permissions.DjangoModelPermissions,)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class CreatePageView(generics.ListCreateAPIView):
queryset = CoursePage.objects.all()
serializer_class = CreateCoursePageSerializer
permission_classes = (permissions.DjangoModelPermissions,)
class CoursListView(generics.ListAPIView):
queryset = CourseList.objects.all()
serializer_class = CourseListSerializer
permission_classes = (permissions.DjangoModelPermissions,)
class CourseDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = CourseList.objects.all()
serializer_class = CourseDetailSerializer
permission_classes = (permissions.DjangoModelPermissions, IsOwnerOrReadOnly,)
class CoursPageListView(generics.ListAPIView):
queryset = CoursePage.objects.all()
serializer_class = CoursePageSerializer
permission_classes = (permissions.DjangoModelPermissions,)
class CoursPageDetailView(generics.RetrieveUpdateDestroyAPIView):
queryset = CoursePage.objects.all()
serializer_class = CourseDetailPageSerializer
permission_classes = (permissions.DjangoModelPermissions,)
class CoursUserView(generics.CreateAPIView):
queryset = CourseUsers.objects.all()
serializer_class = CourseUserSerializer
permission_classes = (permissions.DjangoModelPermissions,)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class CoursUserListView(generics.ListAPIView):
queryset = CourseUsers.objects.all()
serializer_class = CourseUsersListSerializer
permission_classes = (permissions.DjangoModelPermissions,)
class CoursUserDetailView(generics.RetrieveUpdateDestroyAPIView):
queryset = CourseUsers.objects.all()
serializer_class = CourseUserDetailSerializer
permission_classes = (permissions.DjangoModelPermissions, IsOwnerOrReadOnly,)
@api_view(['GET'])
def api_root(request, format=None):
return Response({
# 'users': reverse('user-list', request=request, format=format),
# 'snippets': reverse('snippet-list', request=request, format=format),
'courses': reverse('courselist-list', request=request, format=format),
'registration': reverse('user-list', request=request, format=format)
})
class ConfirmEmailView(generics.ListAPIView):
permission_classes = (permissions.AllowAny,)
@staticmethod
def get(request, user, token):
try:
uid = force_text(urlsafe_base64_decode(user))
user = get_user_model().objects.get(pk=uid)
except(TypeError, ValueError, OverflowError, get_user_model().DoesNotExist):
user = None
if user is not None and token_generator.check_token(user, token):
user.is_active = True
user.save()
# ~ return Response(data={'status': 'ok'}, content_type='application/json', status=status.HTTP_202_ACCEPTED)
return Response({'status': 'ok'})
else:
return Response({'status': 'error', 'message': 'Неверная ссылка активации'})
# ~ status=status.HTTP_400_BAD_REQUEST
class AccInfoView(generics.ListAPIView):
permission_classes = (permissions.AllowAny,)
@staticmethod
def get(request):
if request.user.id:
return Response({
'user_id': str(request.user.id),
'Name': request.user.username,
'first_name': request.user.first_name,
'last_name': request.user.last_name,
'email': request.user.email
})
else:
return Response({'user_id': str(request.user.id)})
class AccountView(generics.RetrieveUpdateDestroyAPIView):
lookup_field = 'owner_id'
queryset = UserProfile.objects.all()
serializer_class = UserProfileSerializer
permission_classes = (IsOwnerOrReadOnly,)
# ~ class AccountUploadView(APIView):
# ~ parser_class = (FileUploadParser,)
# ~ def post(self, request, *args, **kwargs):
# ~ acc_serializer = AccountSerializer(data=request.data)
# ~ if acc_serializer.is_valid():
# ~ ac_serializer.save()
# ~ return Response(acc_serializer.data, status=status.HTTP_201_CREATED)
# ~ else:
# ~ return Response(acc_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# ~ class SnippetHighlight(generics.GenericAPIView):
# ~ queryset = Snippet.objects.all()
# ~ renderer_classes = (renderers.StaticHTMLRenderer,)
# ~ def get(self, request, *args, **kwargs):
# ~ snippet = self.get_object()
# ~ return Response(snippet.highlighted)
|
##########################################################################################
# #
# ICT FaceKit #
# #
# Copyright (c) 2020 USC Institute for Creative Technologies #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
##########################################################################################
"""Defines functionality to use the ICT face model.
This module defines the FaceModel class which allows one work with faces
parameterized by the ICT morphable face model.
"""
import json
import os
import numpy as np
import openmesh as om
class FaceModel: # pylint: disable=too-many-instance-attributes
"""A class that parameterizes faces with the ICT face model.
This class represents faces parameterized by the ICT face model. Each
FaceModel object uses a list of identity and expression weights to compute
a deformed mesh in terms of the generic neutral mesh, the identity shape
modes, and the expression shape modes.
Attributes:
_model_initialized: A boolean representing whether or not the ICT Face Model
has been loaded.
_model_config: The ICT face model configuration data.
_generic_neutral_mesh: The generic neutral mesh represented as an
openmesh.PolyMesh object.
_deformed_mesh: A deep copy of the generic neutral mesh that we will
deform using the identity and expression shape modes.
_deformed_vertices: A numpy array of dimension N * 3 where N is the
number of vertices of the generic neutral mesh. Represents the
vertices of the deformed mesh.
_expression_names: A numpy array of strings representing the names of
the face model expressions.
_expression_names: A numpy array of strings representing the names of
the face model identities.
_num_expression_shapes: The number of face model expressions.
_num_identity_shapes: The number of face model identities.
_expression_weights: A numpy array of dimension K representing the
weights corresponding to each expression shape mode.
_identity_weights: A numpy array of dimension K representing the
weights corresponding to each identity shape mode.
_expression_shape_modes: A numpy array of dimension K * N * 3 where K
is the number of shape modes and N is the number of vertices.
Represents the list of expression shape modes computed from the
expression meshes read in with openmesh.
_identity_shape_modes: A numpy array of dimension K * N * 3 where K
is the number of shape modes and N is the number of vertices.
Represents the list of identity shape modes computed from the
identity meshes read in with openmesh.
"""
def __init__(self):
"""Creates a new FaceModel object.
Creates a new FaceModel object by initializing each of its attributes
to None and initializing the model loaded attribute to False.
"""
self._model_initialized = False
self._model_config = None
self._generic_neutral_mesh = None
self._deformed_mesh = None
self._deformed_vertices = None
self._expression_names = None
self._identity_names = None
self._num_expression_shapes = None
self._num_identity_shapes = None
self._expression_weights = None
self._identity_weights = None
self._expression_shape_modes = None
self._identity_shape_modes = None
def set_identity(self, identity_weights):
"""Sets the identity weights.
If the ICT Face Model has been loaded, overwrites the identity weights.
If the ICT Face Modle has not been loaded, assigns the identity weights
to the input identity weights so that they can be processed when the
ICT Face Model is loaded.
Args:
identity_weights: the new list of identity_weights
"""
if self._model_initialized:
# Make sure that identity_weights is the right dimension
min_num_ids = min(self._num_identity_shapes, len(identity_weights))
self._identity_weights[:min_num_ids] = identity_weights[:min_num_ids]
else:
self._identity_weights = identity_weights
def set_expression(self, expression_weights):
"""Sets the expression weights.
If the ICT Face Model has been loaded, overwrites the expression
weights. If the ICT Face Modle has not been loaded, assigns the
expression weights to the input expression weights so that they can be
processed when the ICT Face Model is loaded.
Args:
expression_weights: the new list of expression_weights
"""
if self._model_initialized:
# Make sure that expression_weights is the right dimension
min_num_exs = min(self._num_expression_shapes, len(expression_weights))
self._expression_weights[:min_num_exs] = expression_weights[:min_num_exs]
else:
self._expression_weights = expression_weights
def randomize_identity(self):
"""Sample a random identity from the normal distribution.
Randomly samples the normal distribution to make a new face. Does not
deform the deformed mesh, but instead updates the identity weights.
As a precondition the ICT Face Model must be loaded. Raises an
exception if the ICT Face Model was not loaded.
"""
assert self._model_initialized, "Face Model not loaded but required"
self._identity_weights = np.random.normal(size=self._num_identity_shapes)
def from_coefficients(self, id_coeffs, ex_coeffs):
self.set_identity(id_coeffs)
self.set_expression(ex_coeffs)
def reset_mesh(self):
"""Resets the deformed mesh to the generic neutral mesh.
Resets the deformed mesh to the generic neutral mesh by setting the
vertices of the deformed mesh to a copy of the vertices of the generic
neutral mesh.
As a precondition the ICT Face Model must be loaded. Raises an
exception if the ICT Face Model was not loaded.
"""
assert self._model_initialized, "Face Model not loaded but required"
self._deformed_vertices[:] = self._generic_neutral_mesh.points()
def deform_mesh(self):
"""Updates the deformed mesh based on the shape mode weights.
Updates the vertices of the deformed mesh by adding the product of
each identity weight by identity shape mode and each expression weight
by expression shape mode to the vertices of the generic neutral mesh.
As a precondition the ICT Face Model must be loaded. Raises an
exception if the ICT Face Model was not loaded.
"""
assert self._model_initialized, "Face Model not loaded but required"
# reset to generic mesh
self.reset_mesh()
# compute the contribution of the identity shape modes
self._deform_mesh_helper(self._identity_weights,
self._identity_shape_modes)
# compute the contribution of the expression shape modes
self._deform_mesh_helper(self._expression_weights,
self._expression_shape_modes)
def _deform_mesh_helper(self, weights, shape_modes):
"""Adds the specified shape modes to the deformed mesh.
Given a scalar list of weights and a list of shape modes, loops over
the weights and shape modes and adds the contribution of each current
weight * shape mode to the deformed mesh.
"""
# Loop over the weights and shape modes
for weight, shape_mode in zip(weights, shape_modes):
# Add the contribution of the current weight * shape mode
self._deformed_vertices += weight * shape_mode
def get_deformed_mesh(self):
"""Returns the deformed mesh.
As a precondition the ICT Face Model must be loaded. Raises an
exception if the ICT Face Model was not loaded.
Returns:
Returns a reference to the openmesh.PolyMesh deformed mesh object.
"""
assert self._model_initialized, "Face Model not loaded but required"
return self._deformed_mesh
|
# https://wikidocs.net/20
money = True
if money:
print('Take a taxi')
else:
print('Take a walk')
money = 2000
if money >= 3000:
print('Take a taxi')
else:
print('Take a walk')
card = True
if money >= 3000 or card:
print('Take a taxi')
else:
print('Take a walk')
pocket = ['paper', 'cellphone', 'money']
if 'money' in pocket:
print('Take a taxi')
else:
print('Take a walk')
pocket = ['paper', 'cellphone', 'money']
if 'money' in pocket:
pass
else:
print('Take out the card')
pocket = ['paper', 'cellphone']
card = True
if 'money' in pocket:
print('Take a taxi')
else:
if card:
print('Take a taxi')
else:
print('Take a walk')
if 'money' in pocket:
print('Take a taxi')
elif card:
print('Take a taxi')
else:
print('Take a walk')
if 'money' in pocket: pass
else: print('Take out the card')
score = 50
message = 'success' if score >= 60 else 'failure'
print(message) |
# Generated by Django 2.1.3 on 2018-11-07 08:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0038_auto_20181107_2059'),
]
operations = [
migrations.RemoveField(
model_name='securitygroup',
name='access_level',
),
migrations.AddField(
model_name='securitygroup',
name='access_level',
field=models.ManyToManyField(to='core.Location'),
),
]
|
import random
size_of_matrix = int(input('Enter size of matrix: '))
def gen_list(size_of_matrix):
matrix = [None for i in range(size_of_matrix)]
for i in range(size_of_matrix):
matrix[i] = random.randint(100, 999)
return matrix
def sort_list(array):
for j in range(len(array) - 1):
for i in range(len(array) - 1):
if array[i] > array[i + 1]:
buffer = array[i]
array[i] = array[i + 1]
array[i + 1] = buffer
return array
sample_matrix = gen_list(size_of_matrix)
print('Original array:', sample_matrix)
sorted_matrix = sort_list(sample_matrix)
print('Sorted array:', sorted_matrix)
|
def get_even_numbers(arr):
return list(filter(lambda x: x%2==0, arr))
'''
In Python, there is a built-in filter function that operates similarly
to JS's filter. For more information on how to use this function,
visit https://docs.python.org/3/library/functions.html#filter
The solution would work like the following:
get_even_numbers([2,4,5,6]) => [2,4,6]
'''
|
from django.conf.urls import url,include
from invoice import views
urlpatterns = [
url(r"^pdf$", views.PDFView.as_view()),
url(r"^send$", views.send.as_view()),
] |
import os
import numpy as np
import pandas as pd
cwd = os.path.dirname(os.path.realpath(__file__))
# MDR_FOI_LIST = ['', 'Add', 'Change', 'Thru2016']
# for s in MDR_FOI_LIST:
# mdr = pd.read_csv(os.path.join(cwd, 'foidev', 'mdrfoi'+s+'.txt'), sep='|', header=0, encoding='ISO-8859-1',
# error_bad_lines=False)
# print(mdr.shape[0])
# key = pd.read_csv(os.path.join(cwd, 'FDA_File', '3_Final_BI_KEY_List.txt'), sep='|', header=0,
# encoding='ISO-8859-1', error_bad_lines=False)
# print(key.shape[0])
#
# alcl = pd.read_csv(os.path.join(cwd, 'FDA_File', '5_ALCL.txt'), sep='|', header=0,
# encoding='ISO-8859-1', error_bad_lines=False)
# print(alcl.shape[0])
#
# alcl_bi = pd.read_csv(os.path.join(cwd, 'FDA_File', '5_BI_ALCL_KEY_list.txt'), sep='|', header=0,
# encoding='ISO-8859-1', error_bad_lines=False)
# print(alcl_bi.shape[0])
#
# key_result = pd.merge(key, mdr, how='inner', on='MDR_REPORT_KEY', left_index = True, right_index=False, sort=True,
# suffixes=('_x', '_y'), copy=True, indicator=False)
# print(key_result.shape[0])
# key_result = key_result['REPORT_NUMBER']
# key_result.to_csv('D:/Workplace/MAUDE/key_result.txt')
#
# alcl_result = pd.merge(alcl, mdr, how='inner', on='MDR_REPORT_KEY', left_index = True, right_index=False, sort=True,
# suffixes=('_x', '_y'), copy=True, indicator=False)
# print(alcl_result.shape[0])
# alcl_result = alcl_result['REPORT_NUMBER']
# alcl_result.to_csv('D:/Workplace/MAUDE/alcl_result.txt')
#
# alcl_bi_result = pd.merge(alcl_bi, mdr, how='inner', on='MDR_REPORT_KEY', left_index = True, right_index=False, sort=True,
# suffixes=('_x', '_y'), copy=True, indicator=False)
# print(alcl_bi_result.shape[0])
# alcl_bi_result = alcl_bi_result['REPORT_NUMBER']
# alcl_bi_result.to_csv('D:/Workplace/MAUDE/alcl_bi_result.txt')
# text = pd.read_csv(os.path.join(cwd, 'FDA_File', '1_Text_BI_List.txt'), sep='|', header=0,
# encoding='ISO-8859-1', error_bad_lines=False)
# print(text.shape[0])
#
# text_result = pd.merge(text, mdr, how='inner', on='MDR_REPORT_KEY', left_index = True, right_index=False, sort=True,
# suffixes=('_x', '_y'), copy=True, indicator=False)
# print(text_result.shape[0])
# text_result = text_result['REPORT_NUMBER']
# text_result.to_csv('D:/Workplace/MAUDE/text_result.txt')
# FTR = pd.read_csv(os.path.join(cwd, 'FDA_File', '2_FTR_List.txt'), sep='|', header=0,
# encoding='ISO-8859-1', error_bad_lines=False)
# print(FTR.shape[0])
#
# FTR_result = pd.merge(FTR, mdr, how='inner', on='MDR_REPORT_KEY', left_index = True, right_index=False, sort=True,
# suffixes=('_x', '_y'), copy=True, indicator=False)
# print(FTR_result.shape[0])
# FTR_result = FTR_result['REPORT_NUMBER']
# FTR_result.to_csv('D:/Workplace/MAUDE/FTR_result.txt')
# FWM = pd.read_csv(os.path.join(cwd, 'FDA_File', '2_FWM_List.txt'), sep='|', header=0,
# encoding='ISO-8859-1', error_bad_lines=False)
# print(FWM.shape[0])
#
# FWM_result = pd.merge(FWM, mdr, how='inner', on='MDR_REPORT_KEY', left_index = True, right_index=False, sort=True,
# suffixes=('_x', '_y'), copy=True, indicator=False)
# print(FWM_result.shape[0])
# FWM_result = FWM_result['REPORT_NUMBER']
# FWM_result.to_csv('D:/Workplace/MAUDE/FWM_result.txt')
#
# #String manipulation
# with open('FWM_result.txt', 'r', encoding='utf-8') as r:
# with open('3_FWM_result.txt.txt', 'w', encoding='utf-8') as w:
# for line in r:
# line = line.split(',')
# w.write(line[1])
# aa = pd.read_csv(os.path.join(cwd, 'ALCL_anaplastic.txt'), sep='|', header=0,
# encoding='ISO-8859-1', error_bad_lines=False)
# print(aa.shape[0])
#
# aa_result = pd.merge(aa, mdr, how='inner', on='MDR_REPORT_KEY', left_index = True, right_index=False, sort=True,
# suffixes=('_x', '_y'), copy=True, indicator=False)
# print(aa_result.shape[0])
# aa_result = aa_result['REPORT_NUMBER']
# aa_result.to_csv('D:/Workplace/MAUDE/aa_result.txt')
#
# al = pd.read_csv(os.path.join(cwd, 'ALCL_lymphoma.txt'), sep='|', header=0,
# encoding='ISO-8859-1', error_bad_lines=False)
# print(al.shape[0])
#
# al_result = pd.merge(al, mdr, how='inner', on='MDR_REPORT_KEY', left_index = True, right_index=False, sort=True,
# suffixes=('_x', '_y'), copy=True, indicator=False)
# print(al_result.shape[0])
# al_result = al_result['REPORT_NUMBER']
# al_result.to_csv('D:/Workplace/MAUDE/al_result.txt')
with open('al_result.txt', 'r', encoding='utf-8') as r:
with open('ala_result.txt', 'w', encoding='utf-8') as w:
for line in r:
line = line.split(',')
w.write(line[1]) |
# Author Yinsen Miao
import pandas as pd
import time
import json
import os
from tqdm import tqdm
prefix = "../data/parid"
os.makedirs(prefix, exist_ok=True)
has_parids = [file.replace(".txt", "") for file in os.listdir(prefix)]
list_parid_geo = []
for parid in tqdm(has_parids):
try:
with open('%s/%s.txt' % (prefix, parid)) as json_file:
data = json.load(json_file)
latitude, longitude = data["results"][0]["geos"]["centroid"]["coordinates"]
latitude, longitude = float(latitude), float(longitude)
census_tract = data["results"][0]["data"]["centroids_and_geo_info"][0]["geo_name_tract"]
parid_geo = {
"parid": parid,
"latitude": latitude,
"longitude": longitude,
"census_tract": census_tract
}
list_parid_geo.append(parid_geo)
except:
print("%s" % parid)
# aggregate all parids
parid_geo_df = pd.DataFrame(list_parid_geo)
parid_geo_df.to_csv("../data/parid_geo.csv", index=False)
|
from io import StringIO
from pathlib import Path
from sys import stdout
from tempfile import TemporaryDirectory
import inspect
import subprocess
from .argument_parser import get_args
from .file_server import run_file_server_thread
from .j2 import j2_ctx, j2_path
from .merger_config import merger
from .webhook_server import run_webhook_server_thread
from .yaml_parser import yaml
def _get_paths(config_yml_file, args, work_dir, variant):
output_dir = Path("output")
output_dir.mkdir(exist_ok=True)
ova_output_dir = output_dir.joinpath("ova")
ova_output_dir.mkdir(exist_ok=True)
box_output_dir = output_dir.joinpath("box")
box_output_dir.mkdir(exist_ok=True)
files_dir = Path("files").absolute()
scripts_dir = Path("scripts").absolute()
work_dir = Path(work_dir)
build_dir = work_dir.joinpath(variant)
build_dir.mkdir()
if args.debug:
print(f"output_dir: {output_dir}")
print(f"ova_output_dir: {ova_output_dir}")
print(f"box_output_dir: {box_output_dir}")
print(f"files_dir: {files_dir}")
print(f"scripts_dir: {scripts_dir}")
print(f"config_yml_file: {config_yml_file}")
print(f"work_dir: {work_dir}")
print(f"build_dir: {build_dir}")
return {
"output_dir": output_dir,
"ova_output_dir": ova_output_dir,
"box_output_dir": box_output_dir,
"files_dir": files_dir,
"scripts_dir": scripts_dir,
"config_yml_file": config_yml_file,
"work_dir": work_dir,
"build_dir": build_dir,
}
def _get_services(args):
# The host is accessible at 10.0.2.2 as the default gateway when using NAT in VirtualBox
# See https://www.virtualbox.org/manual/UserManual.html#network_nat
default_gateway_ip = "10.0.2.2"
webhook_server_address = run_webhook_server_thread(args.debug)
file_server_address = run_file_server_thread(args.debug)
return {
"webhook_server_url": f"http://{default_gateway_ip}:{webhook_server_address[1]}",
"file_server_url": f"http://{default_gateway_ip}:{file_server_address[1]}",
}
def _get_context(args, paths, variant, services):
config = yaml.load(paths["config_yml_file"])
context = {
"variant": variant,
"args": {k: getattr(args, k) for k in vars(args)},
"paths": {k: v.as_posix() for k, v in paths.items()},
"services": services,
}
merger.merge(context, config["defaults"])
distro_config = config["distros"][args.distro]
merger.merge(context, distro_config["defaults"])
if "variants" in distro_config and variant in distro_config["variants"]:
merger.merge(context, distro_config["variants"][variant])
merger.merge(context, distro_config[args.release])
merger.merge(context, config["variants"][variant])
if args.vagrant:
merger.merge(context, config["vagrant"])
extras_config_file = getattr(args, f"extras_{variant}_config_file")
if extras_config_file:
with open(extras_config_file) as f:
merger.merge(context, yaml.load(f))
for member in inspect.getmembers(args):
if member[0] == f"extra_{variant}_config" and member[1]:
if args.debug:
print(member)
for extra in member[1]:
merger.merge(context, extra)
for key in context:
if key in config["j2_template_fields"]:
j2_ctx(context, key)
if args.debug:
yaml.dump(context, stdout)
return context
def _get_template(args, paths, context):
packer_j2_file = Path(context["packer_j2_name"])
template = j2_path(context, packer_j2_file)
if args.debug:
print(template)
return template
def _build(args, paths, context, template):
packer_template_json_file = paths["build_dir"].joinpath("packer.json")
with packer_template_json_file.open("w+") as f:
f.write(template)
if "preseed" in context:
preseed_cfg_file = paths["build_dir"].joinpath("preseed.cfg")
with preseed_cfg_file.open("w+") as f:
f.write(context["preseed"])
if "cloud-init" in context:
user_data_file = paths["build_dir"].joinpath("user-data")
meta_data_file = paths["build_dir"].joinpath("meta-data")
with user_data_file.open("w+") as f:
user_data = context["cloud-init"]["user-data"]
if user_data:
with StringIO() as s:
yaml.dump(user_data, s)
f.write(s.getvalue().lstrip())
with meta_data_file.open("w+") as f:
meta_data = context["cloud-init"]["meta-data"]
if meta_data:
with StringIO() as s:
yaml.dump(meta_data, s)
f.write(s.getvalue().lstrip())
if not args.dry_run:
subprocess.run(
[
"packer",
"build",
"-var",
f'name={"vagrant-" if args.vagrant else ""}{args.distro}-{args.release}-{context["variant"]}',
packer_template_json_file,
],
check=True,
)
def _unregister_root(args):
subprocess.run(
[
"VBoxManage",
"unregistervm",
f'packer-{"vagrant-" if args.vagrant else ""}{args.distro}-{args.release}-root',
"--delete",
]
)
def main():
config_yml_file = Path("config.yml")
with config_yml_file.open() as f:
config = yaml.load(f)
with TemporaryDirectory() as work_dir:
args = get_args(config)
services = _get_services(args)
for variant in args.variants:
paths = _get_paths(config_yml_file, args, work_dir, variant)
context = _get_context(args, paths, variant, services)
template = _get_template(args, paths, context)
_build(args, paths, context, template)
if not args.dry_run and "root" in args.variants and args.unregister_root:
_unregister_root(args)
if __name__ == "__main__":
main()
|
class Person:
def __init__(self, name, second_name, family_name, phones_numbers):
self.name = name
self.second_name = second_name
self.family_name = family_name
self.phones_numbers = phones_numbers
def get_phone(self):
return self.phones_numbers.get('private', None)
def get_name(self):
return ' '.join([self.family_name, self.name, self.second_name])
def get_work_phone(self):
return self.phones_numbers.get('work', None)
def get_sms_text(self):
return f'Уважаемый {self.name} {self.second_name}! Примите участие в нашем ' \
f'беспроигрышном конкурсе для физических лиц'
class Company:
def __init__(self, name_of_company, type_of_company, phones_numbers, *all_workers):
self.name_of_company = name_of_company
self.type_of_company = type_of_company
self.phones_numbers = phones_numbers
self.all_workers = all_workers
def get_phone(self):
if 'contact' in self.phones_numbers:
return self.phones_numbers.get('contact')
for worker in self.all_workers:
if not worker.get_work_phone() is None:
return worker.get_work_phone()
def get_name(self):
return self.name_of_company
def get_sms_text(self):
return f'Для компании {self.name_of_company} есть супер предложение! ' \
f'Примите участие в нашем беспроигрышном конкурсе для {self.type_of_company}'
def send_sms(*args):
for user in args:
if not user.get_phone() is None:
print(f'Отправлено СМС на номер {user.get_phone()} с текстом: {user.get_sms_text()}')
else:
print(f'Не удалось отправить сообщение абоненту: {user.get_name()}')
|
import ast
from datetime import datetime
import matplotlib.dates as mdates
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import sys
import tensorflow as tf
tf.compat.v1.enable_eager_execution() # Remove when switching to tf2
from constants import nb_class
from tracking import get_dataframes
pd.plotting.register_matplotlib_converters()
###############################
# Methods for data formatting #
###############################
def order_by_label(df, key):
# Return an array containing, for each category, a Series of datetimes
output = []
for label in range(nb_class):
label_output = df[df["label"] == label][key].values
if len(label_output) == 0:
label_output = []
output.append(label_output)
return output
def get_wrong_prob(row):
probs_copy = [p for p in row["probs"]]
probs_copy.pop(row["label"])
return np.max(probs_copy)
#########################################
# Initializing dataframes and variables #
#########################################
df = get_dataframes()
df["probs"] = df["logits"].map(lambda logits: tf.nn.softmax(eval(logits)).numpy().tolist())
df["label_prob"] = df.apply(lambda row: row["probs"][row["label"]], axis=1)
df["wrong_prob"] = df.apply(get_wrong_prob, axis=1)
df_tail = df.tail(10000)
df_pre_tail = df.tail(20000).head(10000)
############
# Plotting #
############
plt.figure(figsize=(18, 12))
# Prob of label over iterations
plt.subplot(3, 3, 1)
label_probs = df["label_prob"].values
plt.plot(label_probs)
plt.ylim(0., 1.)
plt.title("Right Prob")
# Prob of label density
plt.subplot(3, 3, 2)
plt.violinplot([df_tail["label_prob"], df_pre_tail["label_prob"]])
plt.xticks([1, 2], ["Tail", "PreTail"])
plt.ylim(0., 1.)
plt.title("Right Prob density")
# Prob of label density per label
plt.subplot(3, 3, 3)
prob_per_label = order_by_label(df_tail, "label_prob")
plt.violinplot(prob_per_label)
plt.xticks([1, 2, 3, 4, 5, 6], [0, 1, 2, 3, 4, 5])
plt.ylim(0., 1.)
plt.title("Right Prob density per label - Tail")
# Entropy over iterations
plt.subplot(3, 3, 4)
label_probs = df["entropy"].values
plt.plot(label_probs, color="orange")
plt.title("Entropy")
# Entropy density
plt.subplot(3, 3, 5)
parts = plt.violinplot([df_tail["entropy"], df_pre_tail["entropy"]])
for pb in parts["bodies"]:
pb.set_facecolor("orange")
parts["cmins"].set_color("orange")
parts["cmaxes"].set_color("orange")
parts["cbars"].set_color("orange")
plt.xticks([1, 2], ["Tail", "PreTail"])
plt.title("Entropy density")
# Entropy per label
plt.subplot(3, 3, 6)
entropy_per_label = order_by_label(df_tail, "entropy")
parts = plt.violinplot(entropy_per_label)
for pb in parts["bodies"]:
pb.set_facecolor("orange")
parts["cmins"].set_color("orange")
parts["cmaxes"].set_color("orange")
parts["cbars"].set_color("orange")
plt.xticks([1, 2, 3, 4, 5, 6], [0, 1, 2, 3, 4, 5])
plt.title("Entropy density per label - Tail")
# Wrong Prob over iterations
plt.subplot(3, 3, 7)
label_probs = df["wrong_prob"].values
plt.plot(label_probs, color="green")
plt.ylim(0., 1.)
plt.title("Wrong Prob")
# Wrong Prob density
plt.subplot(3, 3, 8)
parts = plt.violinplot([df_tail["wrong_prob"], df_pre_tail["wrong_prob"]])
for pb in parts["bodies"]:
pb.set_facecolor("green")
parts["cmins"].set_color("green")
parts["cmaxes"].set_color("green")
parts["cbars"].set_color("green")
plt.xticks([1, 2], ["Tail", "PreTail"])
plt.ylim(0., 1.)
plt.title("Wrong Prob density")
# Wrong Prob density per label
plt.subplot(3, 3, 9)
wrong_prob_per_label = order_by_label(df_tail, "wrong_prob")
parts = plt.violinplot(wrong_prob_per_label)
for pb in parts["bodies"]:
pb.set_facecolor("green")
parts["cmins"].set_color("green")
parts["cmaxes"].set_color("green")
parts["cbars"].set_color("green")
plt.xticks([1, 2, 3, 4, 5, 6], [0, 1, 2, 3, 4, 5])
plt.ylim(0., 1.)
plt.title("Wrong Prob density per label - Tail")
plt.show()
|
import datetime
import pytz
from django.db import models
from catalog.models import TLE
class CatalogEntry(models.Model):
class Meta:
verbose_name_plural = "Catalog entries"
international_designator = models.CharField(
max_length=11,
unique=True
)
norad_catalog_number = models.CharField(
max_length=5,
primary_key=True
)
names = models.CharField(
max_length=255,
blank=True,
null=True
)
has_payload = models.BooleanField(
default=False
)
operational_status = models.ForeignKey(
"OperationalStatus",
models.SET_NULL,
blank=True,
null=True
)
owner = models.ForeignKey(
"Source",
models.SET_NULL,
blank=True,
null=True
)
launch_date = models.DateTimeField(
blank=True,
null=True
)
launch_site = models.ForeignKey(
"LaunchSite",
models.SET_NULL,
blank=True,
null=True
)
decay_date = models.DateTimeField(
blank=True,
null=True
)
orbital_period = models.DecimalField(
max_digits=6,
decimal_places=1,
blank=True,
null=True
)
inclination = models.DecimalField(
max_digits=4,
decimal_places=1,
blank=True,
null=True
)
apogee = models.PositiveIntegerField(
blank=True,
null=True
)
perigee = models.PositiveIntegerField(
blank=True,
null=True
)
radar_cross_section = models.DecimalField(
max_digits=7,
decimal_places=4,
blank=True,
null=True
)
orbital_status = models.ForeignKey(
"OrbitalStatus",
models.SET_NULL,
blank=True,
null=True
)
added = models.DateTimeField(
null=True
)
updated = models.DateTimeField(
null=True
)
def __str__(self):
return self.international_designator
|
from __future__ import division, print_function, absolute_import
import tflearn
import tensorflow as tf
import numpy as np
# Residual blocks
# 32 layers: n=5, 56 layers: n=9, 110 layers: n=18
# n = 5 accuracy 75%
# n = 3 acc 80%
# n = 2 acc =82%
#n = 1 acc = 62%
n = 2
#solve OSError
import win_unicode_console
win_unicode_console.enable()
# Data loading
from tflearn.data_utils import image_preloader
X, Y = image_preloader(target_path = r'C:/Users/Administrator/Desktop/all_test/lungall',
image_shape = (32, 32),mode ='folder',normalize=True,
grayscale=True, categorical_labels=True)
def my_func(x):
x_list_to_array = np.array(x)
x_s = x_list_to_array.reshape((-1, 32, 32, 1))
#one channel to 3 channel
a = x_s[:,:,:,0]
a = a.reshape((-1, 32, 32, 1))
x = np.concatenate((x_s, a), axis = 3)
x = np.concatenate((x, a), axis = 3)
return x
img_prep = tflearn.ImagePreprocessing()
img_prep.add_custom_preprocessing(my_func)
# Building Residual Network
net = tflearn.input_data(shape=[None, 32, 32, 3], data_preprocessing=img_prep)
net = tflearn.conv_2d(net, 16, 3, regularizer='L2', weight_decay=0.0001)
net = tflearn.resnext_block(net, n, 16, 32)
net = tflearn.resnext_block(net, 1, 32, 32, downsample=True)
net = tflearn.resnext_block(net, n-1, 32, 32)
net = tflearn.resnext_block(net, 1, 64, 32, downsample=True)
net = tflearn.resnext_block(net, n-1, 64, 32)
net = tflearn.batch_normalization(net)
net = tflearn.activation(net, 'relu')
net = tflearn.global_avg_pool(net)
# Regression
loss = tflearn.fully_connected(net, 3, activation='softmax')
opt = tflearn.Momentum(0.1, lr_decay=0.1, decay_step=32000, staircase=True)
network = tflearn.regression(loss, optimizer=opt, learning_rate=0.01,
loss='categorical_crossentropy')
# Training
model = tflearn.DNN(network, checkpoint_path='C:/Users/Administrator/Desktop/Resnet_test/model_resnext',
max_checkpoints=1, tensorboard_verbose=3,
tensorboard_dir='C:/Users/Administrator/Desktop/Resnet_test',
best_checkpoint_path='C:/Users/Administrator/Desktop/Resnet_test/model_resnet')
model.fit(X, Y, n_epoch=70, validation_set=0.1,
snapshot_epoch=True, snapshot_step=200,
show_metric=True, batch_size=64, shuffle=True)
model.save('C:/Users/Administrator/Desktop/Resnet_test/resnet_model')
#predict
X_test, Y_test = image_preloader(target_path = r'C:/Users/Administrator/Desktop/all_test/test',
image_shape = (32, 32), mode ='folder',normalize=True,
grayscale=True, categorical_labels=True)
X_test_array = np.array(X_test)
Y_test_array = np.array(Y_test)
num_test = len(Y_test_array)
groups = 20
image_per_group = int(num_test / 20)
acc_all = 0.0
for g in range(groups):
X_test_mini = X_test_array[g*image_per_group:(g+1)*image_per_group,:,:]
Y_test_mini = Y_test_array[g*image_per_group:(g+1)*image_per_group]
probabilities = model.predict(X_test_mini)
predict_label = model.predict_label(X_test_mini)
print('group %d' % g)
print(probabilities)
print(predict_label)
Y_predict = predict_label[:,0]
Y_test_mini = Y_test_mini.argmax(axis = 1)
acc_mini = (Y_predict == Y_test_mini).mean()
print('group %d accuracy: %f' % (g, acc_mini))
acc_all += acc_mini
acc_all = acc_all / groups
print('test accuracy: %f'% acc_all)
|
# Copyright 2022 Sipeed Technology Co., Ltd. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os,sys
import numpy as np
import tensorflow as tf
import time
from PIL import Image
from os import environ
# environ['CUDA_VISIBLE_DEVICES'] = '0'
# norm_type: "0to1", "n1to1"
def h5_to_tflite(h5_name, tflite_name, is_quant, quant_dir, norm_type = None, mean = 0.0, std = 0.0):
def representative_data_gen():
files = os.listdir(quant_dir)
valid_files = []
valid_format = [".jpg", ".jpeg", ".png", ".bmp", ".ppm", ".pgm"]
for name in files:
ext = os.path.splitext(name)[1].lower()
if ext not in valid_format:
continue
valid_files.append(os.path.join(quant_dir, name))
if len(valid_files) == 0:
raise Exception("No valid files in quant_input dir {}, support format: ".format(quant_dir, valid_format))
for path in valid_files:
img = Image.open(path)
img = np.array(img).astype(np.float32)
shape = img.shape
if len(shape) == 2:
shape = (1, shape[0], shape[1], 1)
else:
shape = (1, shape[0], shape[1], shape[2])
img = img.reshape(shape)
if norm_type is not None:
if norm_type == "0to1":
img = img/255.0
elif norm_type == "n1to1":
img = (img-128)/128
else:
raise Exception("Unsupported norm_type: {}".format(norm_type))
else:
img = (img - mean) / std
yield [img]
if is_quant==0:
tf.compat.v1.disable_eager_execution()
converter = tf.compat.v1.lite.TFLiteConverter.from_keras_model_file(h5_name)
tflite_model = converter.convert()
open(tflite_name, "wb").write(tflite_model)
print("Done")
else:
quant_type = tf.int8 #tf2 only support int8 quant
converter = tf.compat.v1.lite.TFLiteConverter.from_keras_model_file(h5_name)
converter._experimental_disable_per_channel = False #True
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_data_gen
# Ensure that if any ops can't be quantized, the converter throws an error
converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]
# Set the input and output tensors to int8 (APIs added in r2.3)
converter.inference_input_type = quant_type
converter.inference_output_type = quant_type
start_time = time.time()
tflite_model_quant = converter.convert()
used_time = time.time() - start_time
open(tflite_name, "wb").write(tflite_model_quant)
print('Done, quant used time:{}'.format(used_time))
def print_usage():
print("Usage: python3 h5_to_tflite.py h5_name tflite_name is_quant quant_dir norm_type")
print(" norm_type: 0to1, n1to1")
# python3 h5_to_tflite.py h5/mnist_dw.h5 tflite/mnist_dw_f.tflite 0
# python3 h5_to_tflite.py h5/mnist_dw.h5 tflite/mnist_dw_q.tflite 1 quant_img_mnist/ 0to1
# python3 h5_to_tflite.py h5/mbnet96_0.125.h5 tflite/mbnet96_0.125_q.tflite 1 quant_img96/ 0to1
if __name__ == '__main__':
if len(sys.argv) != 4 and len(sys.argv) != 6:
print_usage()
exit()
h5_name = sys.argv[1]
tflite_name = sys.argv[2]
is_quant = int(sys.argv[3])
quant_dir = None
norm_type = None
if is_quant == 1:
if len(sys.argv) != 6:
print_usage()
exit()
quant_dir = sys.argv[4]
norm_type = sys.argv[5]
h5_to_tflite(h5_name, tflite_name, is_quant, quant_dir, norm_type = norm_type)
|
import sys
from PyQt4 import QtGui
from Presenter import Presenter
from Model import Model
def main():
app = QtGui.QApplication(sys.argv)
model = Model()
win = Presenter(model)
app.exec_()
if __name__ == '__main__':
sys.exit(main())
|
class A:
def __init__(self):
super().__init__()
print("I am default const of A")
class B(A):
def __init__(self):
super().__init__() # calling super class const
print("I am default Const of B")
class C(B):
def __init__(self):
super().__init__() # calling super class const
print("I am default Const of C")
#----------
C()
|
#
# Given a list of denominations and a total, return the smallest number of coins
# that will add up to that total
#
# hint. Assume sorted largest to smallest
#
def determine_coins(target, denominations):
left = target
coins = []
while (left > 0):
for denomination in denominations:
if left >= denomination:
chosen = denomination
coins.append(chosen)
left -= chosen
return coins
def find_shortest(target, denominations):
possibilities = []
while denominations:
possibilities.append(determine_coins(target, denominations))
denominations = denominations[1:]
shortest_possibility = None
for possibility in possibilities:
if not shortest_possibility or len(possibility) < len(shortest_possibility):
shortest_possibility = possibility
print shortest_possibility
def test(target, denominations, expected):
result = determine_coins(target, denominations)
print "checking if coins(%s, %s) is %s" % (target, denominations, expected)
print "result of coins(%s,%s) is %s" % (target, denominations, result)
if expected == result:
print 'PASS!'
else:
print 'FAIL!!!'
# assert False
test(1, [1], [1])
test(2, [1], [1, 1])
test(3, [1], [1, 1, 1])
test(1, [5, 1], [1])
test(5, [5, 1], [5])
test(6, [5, 1], [5, 1])
test(10, [8, 7, 2, 1], [8, 2])
test(8, [6, 5, 3, 1], [5, 3])
find_shortest(8, [6, 5, 3, 1])
|
#!/usr/bin/python3
#import os
#newfile=open("writetest", "r")
#for i in range(10):
# newfile.write("\n hello World")
#for i in range(1,10):
# print(newfile.read(50))
#os.rename("write.py" , "read.py")
fo = open("/home/netguru/Dump20190320.sql", "wb")
fo.read();
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Perform ElasticSearch term aggregations over random sets of documents.
This command requires a set of document ids to be available in the database.
To store these docids, run management command gatherdocids
"""
import logging
from django.core.management.base import BaseCommand
from django.conf import settings
from texcavator.utils import daterange2dates
from services.es import multiple_document_word_cloud
from services.models import DocID
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = '<#-documents, #-repetitions>'
help = 'Perform ElasticSearch term aggregations. #-documents is the ' \
'number of documents that are aggregated per query. #-repetitions' \
' is the number of number of random document sets that are ' \
'aggregrated.'
def handle(self, *args, **options):
query_size = 100000
n_repetitions = 10
if len(args) > 0:
query_size = int(args[0])
if len(args) > 1:
n_repetitions = int(args[1])
response_times = []
for repetition in range(n_repetitions):
# select random documents
document_set = DocID.objects.order_by('?')[0:query_size]
doc_ids = [doc.doc_id for doc in document_set]
dates = daterange2dates(settings.TEXCAVATOR_DATE_RANGE)
aggr_resp = multiple_document_word_cloud(settings.ES_INDEX,
settings.ES_DOCTYPE,
None,
dates[0],
[],
[],
doc_ids)
response_times.append(int(aggr_resp.get('took')))
self.stdout.write(str(aggr_resp.get('took')))
self.stdout.flush()
avg = float(sum(response_times)/len(response_times))
print 'Average response time for aggregating over {num} documents: ' \
'{avg} miliseconds'.format(num=query_size, avg=avg)
|
#coding=utf8
# 默认值
default_value = {
'category': '未分类',
'sku_status': '正常',
'status': '上架',
'per_page': 20,
'urgent_threshold': 30,
'normal_begin': 3,
'expiry_days': 1,
'good_sale_threshold': 10, #销售好评销售量
}
# 尺码排序
size_order = {'xs': 0, 's': 1, 'm': 2, 'l': 3, 'xl': 4, 'xxl': 5, 'xxxl': 6, 'xxxxl': 7}
# 分类名称
category_list = ('未分类', '夏装', '春秋冬装', '饰品', '邮费', '其它')
# 库存状态名称
sku_status_list = ('正常', '紧张', '缺货', '卖空')
# 状态名称
status_list = ('上架', '下架', '回收站')
# 搜索字段
query_columns_dict = (("货号", 'code'), ("标题", 'subject'), ("品牌", 'brand'), ("面料", 'fabric'), ("成分", 'fabric_content'), ("图案", 'pattern'), ("备注", 'remarks'))
# 排序字段
sort_columns_dict = (('标题', 'subject'), ('分类', 'category'), ('货号', 'code'), ('品牌', 'brand'), ('图案', 'pattern'), ('面料', 'fabric'), ('成分', 'fabric_content'), ('含量', 'fabric_scale'), ('库存', 'sku_status'), ('销量', 'sale_count_diff'), ('状态', 'status'))
# 背景色映射
color = {
'紧张': 'info',
'缺货': 'warning',
'卖空': 'danger',
'下架': 'warning',
'回收站': 'danger',
}
|
# pyserial
import numpy as np
import serial
import time
import math
ser = serial.Serial("/dev/ttyUSB1")
ser.baudrate = 115200
frame_num = 0
target_time = 0
target_angle = 5.36
eol = '\n'
while 1:
target_angle = -target_angle
if frame_num % 3 == 0:
invade = "l"
elif frame_num % 3 == 1:
invade = "r"
elif frame_num % 3 == 2:
invade = "n"
time.sleep(0.5)
frame_num_write = "0000" + str(frame_num)
frame_num_write = frame_num_write[-5:]
target_angle = round(target_angle, 5)
target_angle_write = "0000000000" + str(target_angle)
target_angle_write = target_angle_write[-10:]
print(len(str(target_angle)))
target_angle_len = str(len(str(target_angle)))
message = frame_num_write + target_angle_write + invade + target_angle_len
# frame_num_write = "frame" + str(frame_num)
# target_angle_write = str(target_angle)
# ser.write(frame_num_write.encode("cp949"))
# ser.write(eol.encode("cp949"))
# time.sleep(0.001)
# ser.write(target_angle_write.encode("cp949"))
# ser.write(eol.encode("cp949"))
# time.sleep(0.001)
ser.write(message.encode("cp949"))
ser.write(eol.encode("cp949"))
time.sleep(0.001)
frame_num += 1
|
from aiogram import types
agility = types.InlineKeyboardMarkup(
inline_keyboard=[
[
types.InlineKeyboardButton(text="Anti-mage", callback_data="Anti-mage"),
types.InlineKeyboardButton(text="Drow Ranger", callback_data="Drow Ranger")],
[types.InlineKeyboardButton(text="Juggernaut", callback_data="Juggernaut"),
types.InlineKeyboardButton(text="Vengeful Spirit", callback_data="Vengeful Spirit")],
[types.InlineKeyboardButton(text="Phantom Lancer", callback_data="Phantom Lancer"),
types.InlineKeyboardButton(text="Morphling", callback_data="Morphling")],
[types.InlineKeyboardButton(text="Riki", callback_data="Riki"),
types.InlineKeyboardButton(text="Lone Druid", callback_data="Lone Druid")],
[types.InlineKeyboardButton(text="Naga Siren", callback_data="Naga Siren"),
types.InlineKeyboardButton(text="Ursa", callback_data="Ursa")],
[types.InlineKeyboardButton(text="Templar Assassin", callback_data="Templar Assassin"),
types.InlineKeyboardButton(text="Ember Spirit", callback_data="Ember Spirit")],
[types.InlineKeyboardButton(text="Bounti Hunter", callback_data="Bounti Hunter"),
types.InlineKeyboardButton(text="Sniper", callback_data="Sniper")],
[types.InlineKeyboardButton(text="Gyrocopter", callback_data="Gyrocopter"),
types.InlineKeyboardButton(text="Luna", callback_data="Luna")],
[types.InlineKeyboardButton(text="Troll Warlord", callback_data="Troll Warlord"),
types.InlineKeyboardButton(text="Faceless Void", callback_data="Faceless Void")],
[types.InlineKeyboardButton(text="Phantom Assassin", callback_data="Phantom Assassin"),
types.InlineKeyboardButton(text="Razor", callback_data="Razor")],
[types.InlineKeyboardButton(text="Clinkz", callback_data="Clinkz"),
types.InlineKeyboardButton(text="Shadow Fiend", callback_data="Shadow Fiend")],
[types.InlineKeyboardButton(text="Venomancer", callback_data="Venomancer"),
types.InlineKeyboardButton(text="Bloodseeker", callback_data="Bloodseeker")],
[types.InlineKeyboardButton(text="Viper", callback_data="Viper"),
types.InlineKeyboardButton(text="Nyx Assassin", callback_data="Nyx Assassin")],
[types.InlineKeyboardButton(text="Slark", callback_data="Slark"),
types.InlineKeyboardButton(text="Weaver", callback_data="Weaver")],
[types.InlineKeyboardButton(text="Spectre", callback_data="Spectre"),
types.InlineKeyboardButton(text="Meepo", callback_data="Meepo")],
[types.InlineKeyboardButton(text="Broodmother", callback_data="Broodmother"),
types.InlineKeyboardButton(text="Medusa", callback_data="Medusa")],
[types.InlineKeyboardButton(text="Terrorblade", callback_data="Terrorblade"),
types.InlineKeyboardButton(text="Arc Warden", callback_data="Arc Warden")],
[types.InlineKeyboardButton(text="Monkey King", callback_data="Monkey King"),
types.InlineKeyboardButton(text="Pangolier", callback_data="Pangolier")],
[types.InlineKeyboardButton(text="<-- Назад", callback_data="Назад2")
]
]) |
n = int(input())
events = input()
room = [0] * 10
for event in events:
if event == 'L':
i = 0
while True:
if room[i] == 0:
room[i] = 1
break
i += 1
elif event == 'R':
i = 9
while True:
if room[i] == 0:
room[i] = 1
break
i -= 1
else:
room[int(event)] = 0
print(''.join([str(i) for i in room]))
|
# I pledge my honor I have abided by the Stevens honor system
# Accepts a list of numbers and modifies the list by squaring each entry
def recursive_square(list_of_numbers):
# Base Case
if not list_of_numbers:
return []
# Recursive Call
return [list_of_numbers[0] ** 2] + recursive_square(list_of_numbers[1:])
def main():
list_of_numbers = []
n = int(input("Enter the number of values: "))
for i in range(n):
list_of_numbers.append(int(input("Enter value individual value: ")))
print("The new list with squared elements is: ", recursive_square(list_of_numbers))
main()
|
import os
import logging
import datetime
from flask import Flask
import click
import requests
from BlockedFrontend.api import ApiClient, APIError
from BlockedFrontend.db import db_connect_single
from BlockedFrontend.utils import parse_timestamp
from BlockedFrontend.models import User,SavedList,Item
from NORM.exceptions import ObjectNotFound
# conn = None
app = Flask("BlockedFrontend")
app.config.from_object('BlockedFrontend.default_settings')
if 'BLOCKEDFRONTEND_SETTINGS' in os.environ:
app.config.from_envvar('BLOCKEDFRONTEND_SETTINGS')
api = ApiClient(
app.config['API_EMAIL'],
app.config['API_SECRET']
)
if 'API' in app.config:
api.API = app.config['API']
logging.basicConfig(
level=logging.DEBUG if app.config['DEBUG'] else logging.INFO,
datefmt="[%Y-%m-%dT%H:%M:%S]",
format="%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s"
)
@app.cli.command()
def run_submit():
conn = db_connect_single()
c = conn.cursor()
c.execute("select distinct url from items \
inner join savedlists on list_id = savedlists.id \
where frontpage=true")
for row in c:
req = {
'url': row['url'],
}
req['signature'] = api.sign(req, ['url'])
data = api.POST('submit/url', req)
logging.info("Submitted: %s, queued=%s", row['url'], data['queued'])
c.close()
conn.disconnect()
@app.cli.command()
@click.argument('count', default=200)
def run_update(count=200):
conn = db_connect_single()
c = conn.cursor()
c2 = conn.cursor()
c.execute("select distinct url, last_checked from items inner join savedlists on list_id = savedlists.id \
where public=true \
order by last_checked nulls first limit "+ str(count))
# only evaluate based on test results from the last two weeks
for row in c:
try:
data = api.status_url(row['url'])
# decide if site is still blocked, for the purposes of frontend list selection
blocked = any([ (x['status'] == 'blocked') for x in data['results']])
networks = [x['network_id'] for x in data['results'] if x['status'] == 'blocked' ]
reported = len(data['reports']) > 0
logging.info("Status: %s, blocked=%s, reported=%s, networks=%s", row['url'], blocked, reported, networks)
c2.execute("update items set blocked=%s, reported=%s, networks=%s, last_checked=now() where url=%s",
[ blocked, reported, networks, row['url'] ])
except APIError as exc:
if 'UrlLookupError' in exc.args[0]:
# URL no longer present on the backend?
c2.execute("delete from items where url = %s", [row['url']])
conn.commit()
c.close()
@app.cli.command()
def create_admin():
conn = db_connect_single()
try:
_ = User.select_one(conn, username='admin')
app.logger.info("User admin already exists")
return
except ObjectNotFound:
pass
user = User(conn)
user.update({
'username': 'admin',
'email':'admin@localhost',
'user_type':'admin',
})
password = user.reset_password()
user.store()
conn.commit()
app.logger.info("Created admin with password: %s", password)
@app.cli.command()
def create_mobile_inconsistency_lists():
from NORM import Query
conn = db_connect_single()
lists = {}
q = Query(conn, "select * from public.isps where isp_type = 'mobile' and show_results=1",[])
for row in q:
lists[ row['name'] ] = SavedList.find_or_create(conn,
['name'],
{
'name': 'Mobile Inconsistency - blocked only on {0}'.format(row['name']),
'username': 'admin',
'public': False
})
lists[row['name']].store()
conn.commit()
app.logger.info("Set up list: %s", lists[row['name']]['name'])
q = Query(conn,
"""select urls.urlid, urls.url, urls.title, urls.last_reported,
count(*), array_agg(network_name) as network_name
from public.urls
inner join public.url_latest_status uls using (urlid)
inner join public.isps on isps.name = uls.network_name
where uls.status = 'blocked' and isp_type = 'mobile' and show_results=1 and urls.status = 'ok'
group by urlid, url, title, last_reported
having count(*) = 1""", [])
for row in q:
item = Item(conn)
item.update({
'url': row['url'],
'title': row['title'],
'blocked': True,
'reported': True if row['last_reported'] else False,
'list_id': lists[ row['network_name'][0] ]['id']
})
item.store()
conn.commit()
q = Query(conn,
"""select urls.urlid, urls.url, urls.title, urls.last_reported,
count(*) ct, array_agg(network_name) as network_name
from public.urls
inner join public.url_latest_status uls using (urlid)
inner join public.isps on isps.name = uls.network_name
where uls.status = 'blocked' and isp_type = 'mobile' and show_results=1 and urls.status = 'ok'
group by urlid, url, title, last_reported
having count(*) > 1""", [])
for row in q:
ls = SavedList.find_or_create(conn,
['name'],
{
'name': 'Mobile Inconsistency - blocked on {0} networks'.format(row['ct']),
'username': 'admin',
'public': False
})
ls.store()
item = Item(conn)
item.update({
'url': row['url'],
'title': row['title'],
'blocked': True,
'reported': True if row['last_reported'] else False,
'list_id': ls['id']
})
item.store()
conn.commit()
@app.cli.command()
#@click.argument('do_chunks', default=True)
def migrate_content(do_chunks=False, do_pages=False, do_networks=True):
import pprint
from BlockedFrontend.remotecontent import RemoteContent,RemoteContentModX
remote = RemoteContentModX(
app.config['REMOTE_SRC_MODX'],
app.config['REMOTE_AUTH_MODX'],
app.config['CACHE_PATH'],
False
)
if do_chunks:
chunks = remote.get_content('chunks')
pprint.pprint(chunks)
for (k,v) in chunks.iteritems():
req = requests.post(app.config['COCKPIT_URL'] + '/api/collections/save/chunks',
params={'token': app.config['COCKPIT_AUTH']},
json={'data':{'name': k, 'content': v}},
headers={'Content-type': 'application/json'}
)
app.logger.info("Created %s, ret: %s: %s", k, req.status_code, req.content)
app.logger.info("Req: %s", req.request.body)
if do_pages:
page_elements = ['TextAreaOne','TextAreaTwo','TextAreaThree','TextAreaFour','TextAreaFive','TextAreaSix','mainContent','page_menu','banner_text','title']
for page in app.config['REMOTE_PAGES'] + app.config.get('REMOTE_PAGES_MIGRATE',[]):
remote_content = remote.get_content(page)
pprint.pprint(remote_content)
if set(remote_content.keys()) - set(page_elements):
app.logger.error("Unknown keys: %s", set(remote_content.keys()) - set(page_elements))
return 2
data = {'name': page}
data.update({k: remote_content.get(k,'') for k in page_elements})
req = requests.post(app.config['COCKPIT_URL'] + '/api/collections/save/pages',
params={'token': app.config['COCKPIT_AUTH']},
json={'data': data},
headers={'Content-type': 'application/json'}
)
app.logger.info("Created %s, ret: %s: %s", page, req.status_code, req.content)
app.logger.info("Req: %s", req.request.body)
if do_networks:
content = remote.get_networks()
pprint.pprint(content)
for k,v in content.iteritems():
data = {'name': k, 'description': v}
req = requests.post(app.config['COCKPIT_URL'] + '/api/collections/save/networkdescriptions',
params={'token': app.config['COCKPIT_AUTH']},
json={'data': data},
headers={'Content-type': 'application/json'}
)
app.logger.info("Created: %s, status: %s", k, req.status_code)
@app.cli.command()
def create_cockpit_collections():
acl = {
"public": {
"entries_view": True,
"entries_edit": True,
"entries_create": True,
}
}
req = requests.post(app.config['COCKPIT_URL'] + '/api/collections/createCollection',
params={'token': app.config['COCKPIT_AUTH2']},
json={
"name": "pages",
"data": {
"label": "Pages",
"fields":[
{"name":"name","type":"text","localize":False,"options":[],"width":"1-1"},
{"name":"title","type":"text","localize":False,"options":[],"width":"1-1"},
{"name":"page_menu","type":"html","localize":False,"options":[],"width":"1-1"},
{"name":"banner_text","type":"html","localize":False,"options":[],"width":"1-1"},
{"name":"mainContent","type":"html","localize":False,"options":[],"width":"1-1"},
{"name":"TextAreaOne","type":"html","localize":False,"options":[],"width":"1-2"},
{"name":"TextAreaFour","type":"html","localize":False,"options":[],"width":"1-2"},
{"name":"TextAreaTwo","type":"html","localize":False,"options":[],"width":"1-2"},
{"name":"TextAreaFive","type":"html","localize":False,"options":[],"width":"1-2"},
{"name":"TextAreaThree","type":"html","localize":False,"options":[],"width":"1-2"},
{"name":"TextAreaSix","type":"html","localize":False,"options":[],"width":"1-2"}
],
"acl": acl
}
},
headers={'Content-type': 'application/json'})
app.logger.info("Ret: %s", req.status_code)
req = requests.post(app.config['COCKPIT_URL'] + '/api/collections/createCollection',
params={'token': app.config['COCKPIT_AUTH2']},
json={
"name": "networkdescriptions",
"data": {
"label": "Network Descriptions",
"fields":[
{"name":"name","type":"text","localize":False,"options":[],"width":"1-1"},
{"name":"description","type":"html","localize":False,"options":[],"width":"1-1"},
],
"acl": acl
}
},
headers={'Content-type': 'application/json'})
app.logger.info("Ret: %s", req.status_code)
req = requests.post(app.config['COCKPIT_URL'] + '/api/collections/createCollection',
params={'token': app.config['COCKPIT_AUTH2']},
json={
"name": "chunks",
"data": {
"label": "Content chunks",
"fields":[
{"name":"name","type":"text","localize":False,"options":[],"width":"1-1"},
{"name":"content","type":"html","localize":False,"options":[],"width":"1-1"},
],
"acl": acl
}
},
headers={'Content-type': 'application/json'})
app.logger.info("Ret: %s", req.status_code)
@app.cli.command()
@click.argument('username')
def reset_password(username):
from BlockedFrontend.models import User
conn = db_connect_single()
usr = User.select_one(conn, username=username)
newpw = usr.random_password()
usr.set_password(newpw)
usr.store()
conn.commit()
print("New password: " + newpw)
@app.cli.command()
def migrate_rightsholders():
from BlockedFrontend.models import Rightsholder
from NORM import Query
conn = db_connect_single()
q = Query(conn, """select distinct injunction_obtained_by, injunction_obtained_by_url
from court_judgments
where injunction_obtained_by is not null and rightsholder_id is null
order by injunction_obtained_by, injunction_obtained_by_url desc
""", [])
for row in q:
newrh = Rightsholder.find_or_create(conn,
{'name': row['injunction_obtained_by']},
{'name': row['injunction_obtained_by'], 'website': row['injunction_obtained_by_url'], 'country': 'UK'})
if not newrh['website'] and row['injunction_obtained_by_url']:
newrh['website'] = row['injunction_obtained_by_url']
newrh.store()
_ = Query(conn, "update court_judgments set rightsholder_id = %s where injunction_obtained_by = %s",
[newrh['id'], row['injunction_obtained_by']]
)
conn.commit()
print("OK") |
"""
*********************************************************************
This file is part of:
The Acorn Project
https://wwww.twistedfields.com/research
*********************************************************************
Copyright (c) 2019-2021 Taylor Alexander, Twisted Fields LLC
Copyright (c) 2021 The Acorn Project contributors (cf. AUTHORS.md).
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*********************************************************************
"""
import corner_actuator
import serial
import time
import sys
import math
from odrive.utils import dump_errors
from evdev import InputDevice, list_devices, categorize, ecodes, KeyEvent
from steering import calculate_steering, compare_steering_values, steering_to_numpy
import zmq
import pickle
import gps_tools
from collections import namedtuple
import numpy as np
import spline_lib
import os
import datetime
from motors import _STATE_ENABLED, STATE_DISCONNECTED
import rtk_process
import coloredlogs
import subprocess
from enum import Enum
from multiprocessing import shared_memory, resource_tracker
import random
# This file gets imported by server but we should only import GPIO on raspi.
if "arm" in os.uname().machine:
import board
import busio
import digitalio
from adafruit_mcp230xx.mcp23017 import MCP23017
COUNTS_PER_REVOLUTION = corner_actuator.COUNTS_PER_REVOLUTION
ACCELERATION_COUNTS_SEC = 0.5
_RESUME_MOTION_WARNING_TIME_SEC = 4
#_RESUME_MOTION_WARNING_TIME_SEC = -1
_SEC_IN_ONE_MINUTE = 60
_MAXIMUM_ALLOWED_DISTANCE_METERS = 3.5
_MAXIMUM_ALLOWED_ANGLE_ERROR_DEGREES = 120 #20
_VOLTAGE_CUTOFF = 20
_VOLTAGE_RESUME_MANUAL_CONTROL = 35
_GPS_ERROR_RETRIES = 3
CONTROL_STARTUP = "Initializing..."
CONTROL_GPS_STARTUP = "Waiting for GPS fix."
CONTROL_ONLINE = "Online and awaiting commands."
CONTROL_AUTONOMY = "Autonomy operating."
CONTROL_AUTONOMY_PAUSE = "Autonomy paused with temporary error."
CONTROL_LOW_VOLTAGE = "Low voltage Pause."
CONTROL_AUTONOMY_ERROR_DISTANCE = "Autonomy failed - too far from path."
CONTROL_AUTONOMY_ERROR_ANGLE = "Autonomy failed - path angle too great."
CONTROL_AUTONOMY_ERROR_RTK_AGE = "Autonomy failed - rtk base data too old."
CONTROL_AUTONOMY_ERROR_SOLUTION_AGE = "Autonomy failed - gps solution too old."
CONTROL_OVERRIDE = "Remote control override."
CONTROL_SERVER_ERROR = "Server communication error."
CONTROL_MOTOR_ERROR = "Motor error detected."
CONTROL_NO_STEERING_SOLUTION = "No steering solution possible."
GPS_PRINT_INTERVAL = 10
_NUM_GPS_SUBSAMPLES = 10
_LOOP_RATE = 10
_ERROR_RATE_AVERAGING_COUNT = 3
_ALLOWED_RTK_AGE_SEC = 20.0
_ALLOWED_SOLUTION_AGE_SEC = 1.0
_ALLOWED_MOTOR_SEND_LAPSE_SEC = 5
SERVER_COMMUNICATION_DELAY_LIMIT_SEC = 10
_SERVER_DELAY_RECONNECT_WIFI_SECONDS = 120
_BEGIN_AUTONOMY_SPEED_RAMP_SEC = 3.0
_PATH_END_PAUSE_SEC = 5.0
_SLOW_POLLING_SLEEP_S = 0.5
_MILLISECONDS_PER_SECOND = 1000
_POLL_MILLISECONDS = 100
_FAST_POLL_MILLISECONDS = 20
_VERY_FAST_POLL_MILLISECONDS = 5
_ERROR_SKIP_RATE = 40
_DISENGAGEMENT_RETRY_DELAY_MINUTES = 1
_DISENGAGEMENT_RETRY_DELAY_SEC = _DISENGAGEMENT_RETRY_DELAY_MINUTES * _SEC_IN_ONE_MINUTE
_JOYSTICK_MIN = 0.02
_STEERING_ANGLE_LIMIT_AUTONOMY_DEGREES = 180
_STEERING_ANGLE_LIMIT_DEGREES = 120
_DEFAULT_MAXIMUM_VELOCITY = 0.4
def get_profiled_velocity(last_vel, unfiltered_vel, period_s):
if math.fabs(unfiltered_vel-last_vel) < ACCELERATION_COUNTS_SEC * period_s:
increment = unfiltered_vel-last_vel
else:
increment = math.copysign(ACCELERATION_COUNTS_SEC, unfiltered_vel-last_vel) * period_s
return last_vel + increment
class Direction(Enum):
FORWARD = 1
BACKWARD = 2
EITHER = 3
class PathControlValues():
def __init__(self, angular_p, lateral_p, angular_d, lateral_d):
self.angular_p = angular_p
self.lateral_p = lateral_p
self.angular_d = angular_d
self.lateral_d = lateral_d
class NavigationParameters():
def __init__(self, travel_speed, path_following_direction, vehicle_travel_direction, loop_path):
self.travel_speed = travel_speed
self.path_following_direction = path_following_direction
self.vehicle_travel_direction = vehicle_travel_direction
self.loop_path = loop_path
class PathSection():
def __init__(self, points, control_values, navigation_parameters, max_dist=0,
max_angle=0, end_dist=0, end_angle=0):
self.points = points
self.spline = None
self.maximum_allowed_distance_meters = max_dist
self.maximum_allowed_angle_error_degrees = max_angle
self.control_values = control_values
self.end_distance_m = end_dist
self.end_angle_degrees = end_angle
self.navigation_parameters = navigation_parameters
class EnergySegment():
def __init__(self, sequence_num, start_gps, end_gps, distance_sum,
total_watt_seconds, avg_watts, per_motor_total_watt_seconds,
per_motor_watt_average, subsampled_points, autonomy_operating,
wifi_ap_name, wifi_signal_strength):
self.sequence_num = sequence_num
self.time_stamp = end_gps.time_stamp
self.start_gps = start_gps
self.end_gps = end_gps
self.duration = end_gps.time_stamp - start_gps.time_stamp
self.distance_sum = distance_sum
self.meters_per_second = distance_sum / self.duration
self.watt_seconds_per_meter = total_watt_seconds/distance_sum
self.height_change = end_gps.height_m - start_gps.height_m
self.avg_watts = avg_watts
self.per_motor_total_watt_seconds = per_motor_total_watt_seconds
self.per_motor_watt_average = per_motor_watt_average
self.subsampled_points = subsampled_points
self.autonomy_operating = autonomy_operating
self.wifi_ap_name = wifi_ap_name
self.wifi_signal_strength = wifi_signal_strength
class RemoteControl():
def __init__(self, remote_to_main_lock, main_to_remote_lock, remote_to_main_string, main_to_remote_string, logging, logging_details, simulated_hardware=False):
self.joy = None
self.simulated_hardware = simulated_hardware
self.motor_socket = None
self.robot_object = None
self.next_point_heading = -180
self.activate_autonomy = False
self.autonomy_velocity = 0
self.resume_motion_timer = 0
self.remote_to_main_lock = remote_to_main_lock
self.main_to_remote_lock = main_to_remote_lock
self.remote_to_main_string = remote_to_main_string
self.main_to_remote_string = main_to_remote_string
self.logger = logging.getLogger('main.remote')
_LOGGER_FORMAT_STRING, _LOGGER_DATE_FORMAT, _LOGGER_LEVEL = logging_details
coloredlogs.install(fmt=_LOGGER_FORMAT_STRING,
datefmt=_LOGGER_DATE_FORMAT,
level=_LOGGER_LEVEL,
logger=self.logger)
def run_setup(self):
if self.simulated_hardware:
class FakeAlarm():
def __init__(self):
self.value = 0
self.alarm1 = FakeAlarm()
self.alarm2 = FakeAlarm()
self.alarm3 = FakeAlarm()
else:
i2c = busio.I2C(board.SCL, board.SDA)
mcp = MCP23017(i2c)#, address=0x20) # MCP23017
self.alarm1 = mcp.get_pin(0)
self.alarm2 = mcp.get_pin(1)
self.alarm3 = mcp.get_pin(2)
self.alarm1.switch_to_output(value=False)
self.alarm2.switch_to_output(value=False)
self.alarm3.switch_to_output(value=False)
self.connect_to_motors()
self.connect_joystick()
def connect_to_motors(self, port=5590):
context = zmq.Context()
# Socket to talk to motor control process
self.motor_socket = context.socket(zmq.REQ)
self.motor_socket.connect("tcp://localhost:{}".format(port))
self.motor_socket.setsockopt(zmq.LINGER, 50)
self.motor_send_okay = True
self.motor_last_send_time = time.time()
def close_motor_socket(self):
self.motor_socket.close()
del(self.motor_socket)
self.motor_socket = None
def get_joystick_values(self, st_old, th_old, stf_old):
steer = None
throttle = None
strafe = None
count = 0
while True:
event = None
try:
event = self.joy.read_one()
except Exception as e:
self.logger.error("Joystick read exception: {}".format(e))
if event == None:
break
if event and event.type == ecodes.EV_ABS:
absevent = categorize(event)
if ecodes.bytype[absevent.event.type][absevent.event.code] == 'ABS_RX':
steer = absevent.event.value / 32768.0
if ecodes.bytype[absevent.event.type][absevent.event.code] == 'ABS_Y':
throttle = -absevent.event.value / 32768.0
if ecodes.bytype[absevent.event.type][absevent.event.code] == 'ABS_X':
strafe = absevent.event.value / 32768.0
count += 1
if not steer:
steer = st_old
if not throttle:
throttle = th_old
if not strafe:
strafe = stf_old
return steer, throttle, strafe
def connect_joystick(self):
devices = [InputDevice(fn) for fn in list_devices()]
for dev in devices:
if "Microsoft" in dev.name:
self.joy = dev
return
if "Logitech" in dev.name:
self.joy = dev
return
def load_path(self, path, simulation_teleport=False, generate_spline=False):
self.logger.debug(path)
if 'PathSection' in str(type(path)):
self.nav_path = path
if len(path.points) > 2 and generate_spline == True:
self.nav_path.spline = spline_lib.GpsSpline(path.points, smooth_factor=10, num_points=1000)
self.nav_path.points = self.nav_path.spline.points
if len(path.points) == 2:
self.nav_path.points = gps_tools.check_point(self.nav_path.points[0]), gps_tools.check_point(self.nav_path.points[1])
else:
# Legacy paths
nav_spline = spline_lib.GpsSpline(path, smooth_factor=10, num_points=1000)
self.nav_path = PathSection(points=nav_spline.points, control_values=self.default_path_control_vals, navigation_parameters=self.default_navigation_parameters, max_dist=_MAXIMUM_ALLOWED_DISTANCE_METERS, max_angle=_MAXIMUM_ALLOWED_ANGLE_ERROR_DEGREES, end_dist=1.0, end_angle=45)
self.nav_path.spline = nav_spline
self.loaded_path_name = self.robot_object.loaded_path_name
if self.simulated_hardware and simulation_teleport:
# Place simulated robot at start of path.
if len(path.points) == 2:
start_index = 0
else:
start_index = int(len(self.nav_path.spline.points)/2) - 5
initial_heading = gps_tools.get_heading(self.nav_path.points[start_index], self.nav_path.points[start_index+1])
self.simulated_sample = gps_tools.GpsSample(self.nav_path.points[start_index].lat, self.nav_path.points[start_index].lon, self.simulated_sample.height_m, ("fix","fix"), 20, initial_heading + 30, time.time(), 0.5)
self.latest_gps_sample = self.simulated_sample
# Set initial nav_direction.
if self.nav_path.navigation_parameters.path_following_direction == Direction.EITHER:
dist_start = gps_tools.get_distance(self.latest_gps_sample, self.nav_path.points[0])
dist_end = gps_tools.get_distance(self.latest_gps_sample, self.nav_path.points[len(self.nav_path.points)-1])
# This may be overridden farther down.
if dist_start < dist_end:
self.nav_direction = 1
else:
self.nav_direction = -1
elif self.nav_path.navigation_parameters.path_following_direction == Direction.FORWARD:
self.nav_direction = 1
elif self.nav_path.navigation_parameters.path_following_direction == Direction.BACKWARD:
self.nav_direction = -1
def run_loop(self):
joy_steer = 0
joy_throttle = 0
joy_strafe = 0
vel_cmd = 0
last_vel_cmd = 0
tick_time = time.time()
#self.default_navigation_parameters = NavigationParameters(travel_speed=0.0, path_following_direction=Direction.BACKWARD, vehicle_travel_direction=Direction.FORWARD, loop_path=True)
#self.default_navigation_parameters = NavigationParameters(travel_speed=0.0, path_following_direction=Direction.FORWARD, vehicle_travel_direction=Direction.BACKWARD, loop_path=True)
self.default_navigation_parameters = NavigationParameters(travel_speed=0.0, path_following_direction=Direction.EITHER, vehicle_travel_direction=Direction.EITHER, loop_path=True)
#self.default_navigation_parameters = NavigationParameters(travel_speed=0.0, path_following_direction=Direction.FORWARD, vehicle_travel_direction=Direction.FORWARD, loop_path=True)
#self.default_navigation_parameters = NavigationParameters(travel_speed=0.0, path_following_direction=Direction.BACKWARD, vehicle_travel_direction=Direction.BACKWARD, loop_path=True)
self.default_path_control_vals = PathControlValues(angular_p=0.9, lateral_p=-0.25, angular_d=0.3, lateral_d=-0.05)
self.nav_path = PathSection(points=[],
control_values=self.default_path_control_vals,
navigation_parameters=self.default_navigation_parameters,
max_dist=_MAXIMUM_ALLOWED_DISTANCE_METERS,
max_angle=_MAXIMUM_ALLOWED_ANGLE_ERROR_DEGREES,
end_dist=1.0,
end_angle=30)
self.maximum_velocity = _DEFAULT_MAXIMUM_VELOCITY
self.nav_path_list = []
self.nav_path_index = 0
self.gps_path = []
self.load_path_time = time.time()
auto_throttle = 0
self.loaded_path_name = ""
self.autonomy_hold = True
self.control_state = CONTROL_STARTUP
self.motor_state = STATE_DISCONNECTED
self.driving_direction = 1.0
self.gps_path_lateral_error = 0
self.gps_path_lateral_error_rate = 0
self.gps_path_angular_error = 0
self.gps_path_angular_error_rate = 0
self.gps_error_update_time = 0
self.gps_angle_error_rate_averaging_list = []
self.gps_lateral_error_rate_averaging_list = []
self.last_autonomy_steer_cmd = 0
self.last_autonomy_strafe_cmd = 0
self.solution_age_averaging_list = []
self.voltage_average = 0
self.disengagement_time = time.time()
self.power_consumption_list = []
self.avg_watts_per_meter = 0
self.watt_hours_per_meter = 0
self.total_watts = 0
self.voltages = []
self.bus_currents = []
self.last_energy_segment = None
self.temperatures = []
self.simulated_sample = gps_tools.GpsSample(37.353039233, -122.333725682, 100, ("fix","fix"), 20, 0, time.time(), 0.5)
self.last_calculated_steering = calculate_steering(0, 0, 0, _STEERING_ANGLE_LIMIT_DEGREES)
steering_tmp = steering_to_numpy(self.last_calculated_steering)
self.reloaded_path = True
# set up shared memory for GUI four wheel steeing debugger (sim only).
try:
self.steering_debug_shared_memory = shared_memory.SharedMemory(name='acorn_steering_debug')
self.logger.info("Connected to existing shared memory acorn_steering_debug")
except:
self.steering_debug_shared_memory = shared_memory.SharedMemory(
name="acorn_steering_debug",
create=True,
size=steering_tmp.nbytes)
self.logger.info("Created shared memory acorn_steering_debug")
# Untrack the resource so it does not get destroyed. This allows the
# steering debug window to stay open.
resource_tracker.unregister(self.steering_debug_shared_memory._name, 'shared_memory')
self.steering_debug = np.ndarray(steering_tmp.shape, dtype=steering_tmp.dtype, buffer=self.steering_debug_shared_memory.buf)
self.steering_debug[:] = steering_tmp[:]
autonomy_vel_cmd = 0
last_wifi_restart_time = 0
if self.simulated_hardware:
rtk_socket1 = None
rtk_socket2 = None
else:
rtk_process.launch_rtk_sub_procs(self.logger)
rtk_socket1, rtk_socket2 = rtk_process.connect_rtk_procs(self.logger)
self.latest_gps_sample = None
self.last_good_gps_sample = None
self.gps_buffers = ["",""]
debug_time = time.time()
try:
loop_count = -1
while True:
loop_count += 1
# Get real or simulated GPS data.
if self.simulated_hardware:
if loop_count % GPS_PRINT_INTERVAL == 0:
self.logger.info("Lat: {:.10f}, Lon: {:.10f}, Azimuth: {:.2f}, Distance: {:.4f}, Fixes: ({}, {}), Period: {:.2f}".format(self.simulated_sample.lat, self.simulated_sample.lon, self.simulated_sample.azimuth_degrees, 2.8, True, True, 0.100))
self.latest_gps_sample = gps_tools.GpsSample(self.simulated_sample.lat, self.simulated_sample.lon, self.simulated_sample.height_m, ("fix","fix"), 20, self.simulated_sample.azimuth_degrees, time.time(), 0.5)
else:
self.gps_buffers, self.latest_gps_sample = (
rtk_process.rtk_loop_once(rtk_socket1, rtk_socket2,
buffers=self.gps_buffers,
print_gps=loop_count % GPS_PRINT_INTERVAL == 0,
last_sample=self.latest_gps_sample,
retries=_GPS_ERROR_RETRIES,
logger=self.logger))
debug_time = time.time()
if self.latest_gps_sample is not None:
self.last_good_gps_sample = self.latest_gps_sample
else:
# Occasional bad samples are fine. A very old sample will
# get flagged in the final checks.
self.latest_gps_sample = self.last_good_gps_sample
try:
# Read robot object from shared memory.
# Object is sent by main process.
recieved_robot_object = None
time1 = time.time() - debug_time
with self.main_to_remote_lock:
recieved_robot_object = pickle.loads(self.main_to_remote_string["value"])
time2 = time.time() - debug_time
except Exception as e:
self.logger.error("Exception reading remote string.")
raise(e)
if recieved_robot_object:
if str(type(recieved_robot_object))=="<class '__main__.Robot'>":
self.robot_object = recieved_robot_object
self.logger.debug("Remote received new robot object.")
time3 = time.time() - debug_time
if len(self.nav_path.points) == 0 or self.loaded_path_name != self.robot_object.loaded_path_name:
if len(self.robot_object.loaded_path) > 0 and self.latest_gps_sample is not None:
if isinstance(self.robot_object.loaded_path[0], PathSection):
self.nav_path_list = self.robot_object.loaded_path
if self.simulated_hardware:
self.nav_path_index = 0
self.load_path(self.nav_path_list[self.nav_path_index], simulation_teleport=True, generate_spline=True)
else:
closest_row_index = 0
min_distance = math.inf
for index in range(len(self.nav_path_list)):
row_path = self.nav_path_list[index]
for point in row_path.points:
dist = gps_tools.get_distance(self.latest_gps_sample, point)
if dist < min_distance:
min_distance = dist
closest_row_index = index
self.logger.info("Loading path. List length {}, Closest path index {}, min_distance {}".format(len(self.nav_path_list), closest_row_index, min_distance))
self.nav_path_index = closest_row_index
self.load_path(self.nav_path_list[self.nav_path_index], simulation_teleport=True, generate_spline=True)
self.reloaded_path = True
else:
self.load_path(self.robot_object.loaded_path, simulation_teleport=True, generate_spline=True)
self.reloaded_path = True
self.load_path_time = time.time()
self.activate_autonomy = self.robot_object.activate_autonomy
# self.autonomy_velocity = self.robot_object.autonomy_velocity
# Autonomy is disabled until robot is ready or if joystick is used.
if self.autonomy_hold:
self.activate_autonomy = False
# Reset disabled autonomy if autonomy is turned off in the command.
if self.robot_object.clear_autonomy_hold:
self.autonomy_hold = False
# Reset disengagement timer.
self.disengagement_time = time.time() - _DISENGAGEMENT_RETRY_DELAY_SEC
if self.robot_object==None:
self.logger.info("Waiting for valid robot object before running remote control code.")
time.sleep(_SLOW_POLLING_SLEEP_S)
continue
time4 = time.time() - debug_time
# print("begin robot calc")
debug_points = (None, None, None, None)
calculated_rotation = None
calculated_strafe = None
gps_lateral_distance_error = 0
gps_path_angle_error = 0
absolute_path_distance = math.inf
time5 = 0
strafe_multiplier = 1.0
if self.robot_object and self.latest_gps_sample is not None:
vehicle_front = gps_tools.project_point(self.latest_gps_sample, self.latest_gps_sample.azimuth_degrees, 1.0)
vehicle_rear = gps_tools.project_point(self.latest_gps_sample, self.latest_gps_sample.azimuth_degrees, -1.0)
# if time.time() - self.latest_gps_sample.time_stamp > _ALLOWED_SOLUTION_AGE_SEC:
# self.logger.error("SOLUTION AGE {} NOT OKAY AND LATEST GPS SAMPLE IS: {}".format(time.time() - self.latest_gps_sample.time_stamp, self.latest_gps_sample))
# self.logger.error("Took {} sec to get here. {} {} {} {}".format(time.time()-debug_time, time1, time2, time3, time4))
time5 = time.time() - debug_time
"""
Begin steering calculation.
TODO: Break this out to a separate module.
"""
projected_path_tangent_point = gps_tools.GpsPoint(0, 0)
closest_path_point = None
if(len(self.nav_path.points)>0):
path_point_heading = None
if len(self.nav_path.points) == 2:
if self.nav_path.navigation_parameters.vehicle_travel_direction == Direction.EITHER:
closest_path_point = gps_tools.check_point(self.nav_path.points[0])
else:
if self.nav_direction == -1:
closest_path_point = gps_tools.check_point(self.nav_path.points[0])
elif self.nav_direction == 1:
closest_path_point = gps_tools.check_point(self.nav_path.points[-1])
path_point_heading = gps_tools.get_heading(self.nav_path.points[0], self.nav_path.points[1])
else:
closest_u = self.nav_path.spline.closestUOnSpline(self.latest_gps_sample)
closest_path_point = self.nav_path.spline.coordAtU(closest_u)
# Heading specified at this point on the path.
path_point_heading = math.degrees(self.nav_path.spline.slopeRadiansAtU(closest_u))
absolute_path_distance = gps_tools.get_distance(self.latest_gps_sample, closest_path_point)
calculated_rotation = path_point_heading - self.latest_gps_sample.azimuth_degrees
self.logger.debug("calculated_rotation: {}, DISTANCE: {}".format(calculated_rotation,abs(absolute_path_distance)))
projected_path_tangent_point = gps_tools.project_point(closest_path_point, path_point_heading, 3.0)
gps_lateral_distance_error = gps_tools.get_approx_distance_point_from_line(self.latest_gps_sample, closest_path_point, projected_path_tangent_point)
self.logger.debug("robot heading {}, path heading {}".format(self.latest_gps_sample.azimuth_degrees, path_point_heading))
calculated_strafe = gps_lateral_distance_error
# Truncate values to between 0 and 360
calculated_rotation %= 360
# Set value to +/- 180
if calculated_rotation > 180:
calculated_rotation -= 360
self.logger.debug("calculated_rotation: {}".format(calculated_rotation))
_MAXIMUM_ROTATION_ERROR_DEGREES = 140
drive_solution_okay = True
if self.nav_path.navigation_parameters.vehicle_travel_direction == Direction.EITHER:
self.driving_direction = 1
if abs(calculated_rotation) > 90:
calculated_rotation -= math.copysign(180, calculated_rotation)
self.driving_direction = -1
if self.nav_direction == -1:
self.driving_direction *= -1
calculated_strafe *= -1
elif self.nav_path.navigation_parameters.vehicle_travel_direction == Direction.FORWARD:
self.driving_direction = 1
if abs(calculated_rotation) > _MAXIMUM_ROTATION_ERROR_DEGREES:
if self.nav_path.navigation_parameters.path_following_direction in (Direction.EITHER, Direction.BACKWARD):
calculated_rotation -= math.copysign(180, calculated_rotation)
calculated_strafe *= -1
self.nav_direction = -1
if self.nav_path.navigation_parameters.path_following_direction == Direction.FORWARD:
if abs(calculated_rotation) > _MAXIMUM_ROTATION_ERROR_DEGREES:
drive_solution_okay = False
elif self.nav_path.navigation_parameters.path_following_direction == Direction.BACKWARD:
drive_solution_okay = False
elif self.nav_path.navigation_parameters.vehicle_travel_direction == Direction.BACKWARD:
self.driving_direction = -1
if abs(calculated_rotation) > _MAXIMUM_ROTATION_ERROR_DEGREES:
if self.nav_path.navigation_parameters.path_following_direction in (Direction.EITHER, Direction.FORWARD):
calculated_rotation -= math.copysign(180, calculated_rotation)
#calculated_strafe *= -1
self.nav_direction = 1
elif self.nav_path.navigation_parameters.path_following_direction == Direction.BACKWARD:
if abs(calculated_rotation) > _MAXIMUM_ROTATION_ERROR_DEGREES:
drive_solution_okay = False
elif self.nav_path.navigation_parameters.path_following_direction == Direction.FORWARD:
drive_solution_okay = False
elif self.nav_path.navigation_parameters.path_following_direction == Direction.BACKWARD:
calculated_strafe *= -1
drive_reverse = 1.0
if len(self.nav_path.points) == 2 and self.nav_path.navigation_parameters.path_following_direction == Direction.EITHER:
vehicle_position = (self.latest_gps_sample.lat, self.latest_gps_sample.lon)
if abs(calculated_rotation) > 20:
calculated_strafe_original = calculated_strafe
if abs(calculated_rotation) > 40:
calculated_strafe = 0
self.gps_lateral_error_rate_averaging_list = []
else:
calculated_strafe *= (40-abs(calculated_rotation))/20.0
self.logger.debug("Reduced strafe from {}, to: {}".format(calculated_strafe_original, calculated_strafe))
else:
drive_reverse = gps_tools.determine_point_move_sign(self.nav_path.points, vehicle_position)
# Figure out if we're close to aligned with the
# target point and reduce forward or reverse
# velocity if so.
closest_pt_on_line = gps_tools.find_closest_pt_on_line(self.nav_path.points[0], self.nav_path.points[1], vehicle_position)
dist_along_line = gps_tools.get_distance(self.nav_path.points[0], closest_pt_on_line)
self.logger.debug("dist_along_line {}".format(dist_along_line))
if gps_lateral_distance_error > 1.0:
# Reduce forward/reverse direction command
# if we are far from the line but also
# aligned to the target point.
if dist_along_line < 1.0:
drive_reverse = 0
elif dist_along_line < 2.0:
drive_reverse *= 0.1
elif dist_along_line < 4.0:
drive_reverse *= 0.25
self.logger.debug("rotation {}, strafe: {} direction {}, drive_reverse {}".format(calculated_rotation, calculated_strafe, self.driving_direction, drive_reverse))
if not drive_solution_okay:
self.autonomy_hold = True
self.activate_autonomy = False
self.control_state = CONTROL_NO_STEERING_SOLUTION
self.logger.error("Could not find drive solution. Disabling autonomy.")
self.logger.error("calculated_rotation: {}, vehicle_travel_direction {}, path_following_direction {}".format(calculated_rotation,self.nav_path.navigation_parameters.vehicle_travel_direction, self.nav_path.navigation_parameters.path_following_direction))
self.logger.debug("calculated_rotation: {}, vehicle_travel_direction {}, path_following_direction {}, self.nav_direction {}, self.driving_direction {}".format(calculated_rotation,self.nav_path.navigation_parameters.vehicle_travel_direction, self.nav_path.navigation_parameters.path_following_direction, self.nav_direction, self.driving_direction))
gps_path_angle_error = calculated_rotation
# Accumulate a list of error values for angular and
# lateral error. This allows averaging of errors
# and also determination of their rate of change.
time_delta = time.time() - self.gps_error_update_time
self.gps_error_update_time = time.time()
if not self.reloaded_path:
self.gps_lateral_error_rate_averaging_list.append((gps_lateral_distance_error - self.gps_path_lateral_error) / time_delta)
self.logger.debug("gps_lateral_distance_error: {}, self.gps_path_lateral_error: {}".format(gps_lateral_distance_error, self.gps_path_lateral_error))
self.gps_angle_error_rate_averaging_list.append((gps_path_angle_error - self.gps_path_angular_error) / time_delta)
self.gps_path_angular_error = gps_path_angle_error
self.gps_path_lateral_error = gps_lateral_distance_error
# Check end conditions.
if self.nav_direction == -1:
end_distance = gps_tools.get_distance(self.latest_gps_sample, self.nav_path.points[0])
elif self.nav_direction == 1:
end_distance = gps_tools.get_distance(self.latest_gps_sample, self.nav_path.points[-1])
if abs(calculated_rotation) < self.nav_path.end_angle_degrees and end_distance < self.nav_path.end_distance_m:
self.logger.info("MET END CONDITIONS {} {}".format(calculated_rotation, absolute_path_distance))
if self.nav_path.navigation_parameters.loop_path == True:
self.load_path(self.nav_path, simulation_teleport=False, generate_spline=False)
self.load_path_time = time.time()
self.gps_lateral_error_rate_averaging_list = []
self.gps_angle_error_rate_averaging_list = []
self.reloaded_path = True
continue
else:
self.nav_path_index += 1
if self.nav_path_index < len(self.nav_path_list):
self.load_path(self.nav_path_list[self.nav_path_index], simulation_teleport=False, generate_spline=True)
self.gps_angle_error_rate_averaging_list = []
self.gps_lateral_error_rate_averaging_list = []
self.reloaded_path = True
continue
else:
# self.nav_path_list = []
self.nav_path_index = 0
self.load_path(self.nav_path_list[self.nav_path_index], simulation_teleport=True, generate_spline=True)
self.gps_angle_error_rate_averaging_list = []
self.gps_lateral_error_rate_averaging_list = []
self.reloaded_path = True
continue
# self.control_state = CONTROL_ONLINE
# self.autonomy_hold = True
# self.activate_autonomy = False
self.reloaded_path = False
while len(self.gps_lateral_error_rate_averaging_list) > _ERROR_RATE_AVERAGING_COUNT:
self.gps_lateral_error_rate_averaging_list.pop(0)
while len(self.gps_angle_error_rate_averaging_list) > _ERROR_RATE_AVERAGING_COUNT:
self.gps_angle_error_rate_averaging_list.pop(0)
if len(self.gps_lateral_error_rate_averaging_list) > 0:
self.gps_path_lateral_error_rate = sum(self.gps_lateral_error_rate_averaging_list) / len(self.gps_lateral_error_rate_averaging_list)
else:
self.gps_path_lateral_error_rate = 0
if len(self.gps_angle_error_rate_averaging_list) > 0:
self.gps_path_angular_error_rate = sum(self.gps_angle_error_rate_averaging_list) / len(self.gps_angle_error_rate_averaging_list)
else:
self.gps_path_angular_error_rate = 0
self.logger.debug("self.gps_path_lateral_error_rate {}, {} / {}".format(self.gps_path_lateral_error_rate, sum(self.gps_lateral_error_rate_averaging_list), len(self.gps_lateral_error_rate_averaging_list)))
self.next_point_heading = calculated_rotation
# These extra points can be displayed in the web UI.
# TODO: That's commented out in the server code. Resolve?
debug_points = (vehicle_front, vehicle_rear, projected_path_tangent_point, closest_path_point)
time6 = time.time() - debug_time
# Get joystick value
if self.simulated_hardware and False:
joy_steer, joy_throttle, joy_strafe = 0.0, 0.0, 0.0
else:
joy_steer, joy_throttle, joy_strafe = self.get_joystick_values(joy_steer, joy_throttle, joy_strafe)
if abs(joy_throttle) < _JOYSTICK_MIN:
joy_throttle = 0.0
if abs(joy_steer) < _JOYSTICK_MIN:
joy_steer = 0.0
# Disable autonomy if manual control is activated.
if abs(joy_steer) > 0.1 or abs(joy_throttle) > 0.1 or abs(joy_strafe) > 0.1:
self.logger.info("DISABLED AUTONOMY Steer: {}, Joy {}".format(joy_steer, joy_throttle))
self.autonomy_hold = True
self.activate_autonomy = False
self.control_state = CONTROL_OVERRIDE
strafe_d = 0
steer_d = 0
strafe_p = 0
steer_p = 0
user_web_page_plot_steer_cmd = 0
user_web_page_plot_strafe_cmd = 0
# Calculate driving commands for autonomy.
if self.next_point_heading != -180 and self.activate_autonomy and calculated_rotation!= None and calculated_strafe!= None:
# calculated_rotation *= 2.0
# calculated_strafe *= -0.35
# steer_d = self.gps_path_angular_error_rate * 0.8
# strafe_d = self.gps_path_lateral_error_rate * -0.2
steer_p = calculated_rotation * self.nav_path.control_values.angular_p
strafe_p = calculated_strafe * self.nav_path.control_values.lateral_p
steer_d = self.gps_path_angular_error_rate * self.nav_path.control_values.angular_d
strafe_d = self.gps_path_lateral_error_rate * self.nav_path.control_values.lateral_d
# self.logger.debug(self.nav_path.control_values)
self.logger.debug("strafe_p {}, strafe_d {}, steer_p {} steer_d {}".format(strafe_p, strafe_d, strafe_d, steer_d))
steer_command_value = steer_p + steer_d
strafe_command_value = strafe_p + strafe_d
_STRAFE_LIMIT = 0.25
_STEER_LIMIT = 45
# Value clamping
steering_angle = steer_command_value
if steering_angle > _STEER_LIMIT:
steering_angle = _STEER_LIMIT
if steering_angle < -_STEER_LIMIT:
steering_angle = -_STEER_LIMIT
if strafe_command_value > _STRAFE_LIMIT:
unfiltered_strafe_cmd = _STRAFE_LIMIT
if strafe_command_value < -_STRAFE_LIMIT:
unfiltered_strafe_cmd = -_STRAFE_LIMIT
if math.fabs(strafe_command_value) < _STRAFE_LIMIT:
unfiltered_strafe_cmd = strafe_command_value
unfiltered_steer_cmd = steering_angle/45.0
unfiltered_strafe_cmd *= self.driving_direction * strafe_multiplier
autonomy_steer_diff = unfiltered_steer_cmd - self.last_autonomy_steer_cmd
autonomy_strafe_diff = unfiltered_strafe_cmd - self.last_autonomy_strafe_cmd
self.logger.debug("diffs: {}, {}".format(autonomy_steer_diff * _LOOP_RATE, autonomy_strafe_diff * _LOOP_RATE))
# Rate of change clamping
steer_rate = 4.0/_LOOP_RATE
strafe_rate = 40.0/_LOOP_RATE
if autonomy_steer_diff > steer_rate:
autonomy_steer_cmd = self.last_autonomy_steer_cmd + steer_rate
elif autonomy_steer_diff < -steer_rate:
autonomy_steer_cmd = self.last_autonomy_steer_cmd - steer_rate
else:
autonomy_steer_cmd = unfiltered_steer_cmd
autonomy_steer_cmd *= self.driving_direction
if autonomy_strafe_diff > strafe_rate:
autonomy_strafe_cmd = self.last_autonomy_strafe_cmd + strafe_rate
elif autonomy_strafe_diff < -strafe_rate:
autonomy_strafe_cmd = self.last_autonomy_strafe_cmd - strafe_rate
else:
autonomy_strafe_cmd = unfiltered_strafe_cmd
if abs(autonomy_steer_cmd) > 0.8:
# Maxed out steer and strafe can result in strafe only
# due to steering limits, so reduce strafe with maxed
# steering. TODO: linear taper
autonomy_strafe_cmd *= 0.2
self.last_autonomy_steer_cmd = autonomy_steer_cmd
self.last_autonomy_strafe_cmd = autonomy_strafe_cmd
user_web_page_plot_steer_cmd = autonomy_steer_cmd * self.driving_direction
user_web_page_plot_strafe_cmd = autonomy_strafe_cmd * self.driving_direction
if 0.0 <= self.nav_path.navigation_parameters.travel_speed <= self.maximum_velocity:
autonomy_vel_cmd = self.nav_path.navigation_parameters.travel_speed * self.driving_direction * drive_reverse
self.logger.debug("Travel speed: {}".format(self.nav_path.navigation_parameters.travel_speed))
else:
self.logger.error("Invalid travel speed specified! Got {}. Maximum allowed is {}".format(self.nav_path.navigation_parameters.travel_speed, self.maximum_velocity))
autonomy_vel_cmd = 0.0
self.logger.debug("self.autonomy_velocity {}, self.driving_direction {}, drive_reverse {} , autonomy_vel_cmd {}".format(self.autonomy_velocity, self.driving_direction, drive_reverse, autonomy_vel_cmd))
joy_steer = 0.0 # ensures that vel goes to zero when autonomy disabled
logger_string = "steer_cmd: {:.2f}, strafe_cmd: {:.2f}, vel_cmd: {:.2f}, calculated_rotation: {:.2f}, calculated_strafe: {:.2f}".format(autonomy_steer_cmd, autonomy_strafe_cmd, vel_cmd, calculated_rotation, calculated_strafe)
if loop_count % 10 == 0:
self.logger.info(logger_string)
else:
self.logger.debug(logger_string)
#print("Steer: {}, Throttle: {}".format(steer_cmd, vel_cmd))
zero_output = False
# In the following list the order for when we set control state
# matters
# TODO: clarify? What does this note mean?
time7 = time.time() - debug_time
# Begin Safety Checks
error_messages = []
fatal_error = False
if self.motor_state != _STATE_ENABLED:
error_messages.append("Motor error so zeroing out autonomy commands.")
zero_output = True
self.control_state = CONTROL_MOTOR_ERROR
self.resume_motion_timer = time.time()
if self.voltage_average < _VOLTAGE_CUTOFF:
fatal_error = True
error_messages.append("Voltage low so zeroing out autonomy commands.")
zero_output = True
self.control_state = CONTROL_LOW_VOLTAGE
self.resume_motion_timer = time.time()
if gps_tools.is_dual_fix(self.latest_gps_sample) == False:
error_messages.append("No GPS fix so zeroing out autonomy commands.")
zero_output = True
self.control_state = CONTROL_GPS_STARTUP
self.resume_motion_timer = time.time()
elif abs(absolute_path_distance) > self.nav_path.maximum_allowed_distance_meters:
# Distance from path exceeds allowed limit.
zero_output = True
self.resume_motion_timer = time.time()
self.control_state = CONTROL_AUTONOMY_ERROR_DISTANCE
error_messages.append("GPS distance {} meters too far from path so zeroing out autonomy commands.".format(abs(absolute_path_distance)))
elif abs(gps_path_angle_error) > self.nav_path.maximum_allowed_angle_error_degrees:
zero_output = True
self.resume_motion_timer = time.time()
self.control_state = CONTROL_AUTONOMY_ERROR_ANGLE
error_messages.append("GPS path angle {} exceeds allowed limit {} so zeroing out autonomy commands.".format(abs(gps_path_angle_error),self.nav_path.maximum_allowed_angle_error_degrees))
elif self.latest_gps_sample.rtk_age > _ALLOWED_RTK_AGE_SEC:
zero_output = True
self.resume_motion_timer = time.time()
self.control_state = CONTROL_AUTONOMY_ERROR_RTK_AGE
error_messages.append("RTK base station data too old so zeroing out autonomy commands.")
elif time.time() - self.latest_gps_sample.time_stamp > _ALLOWED_SOLUTION_AGE_SEC:
zero_output = True
self.resume_motion_timer = time.time()
self.control_state = CONTROL_AUTONOMY_ERROR_SOLUTION_AGE
error_messages.append("RTK solution too old so zeroing out autonomy commands.")
if time.time() - self.robot_object.last_server_communication_stamp > SERVER_COMMUNICATION_DELAY_LIMIT_SEC:
server_communication_okay = False
zero_output = True
self.resume_motion_timer = time.time()
error_messages.append("Server communication error so zeroing out autonomy commands. Last stamp age {} exceeds allowed age of {} seconds. AP name: {}, Signal Strength {} dbm\r\n".format(time.time() - self.robot_object.last_server_communication_stamp, SERVER_COMMUNICATION_DELAY_LIMIT_SEC, self.robot_object.wifi_ap_name, self.robot_object.wifi_strength))
if loop_count % _ERROR_SKIP_RATE == 0:
for error in error_messages:
self.logger.error(error)
if time.time() > last_wifi_restart_time + 500 and time.time() - self.robot_object.last_server_communication_stamp > _SERVER_DELAY_RECONNECT_WIFI_SECONDS and self.robot_object.last_server_communication_stamp > 0:
self.logger.error("Last Wifi signal strength: {} dbm\r\n".format(self.robot_object.wifi_strength))
self.logger.error("Last Wifi AP associated: {}\r\n".format(self.robot_object.wifi_ap_name))
self.logger.error("Restarting wlan1...")
try:
subprocess.check_call("ifconfig wlan1 down", shell=True)
subprocess.check_call("ifconfig wlan1 up", shell=True)
except:
pass
last_wifi_restart_time = time.time()
self.logger.error("Restarted wlan1.")
if zero_output == True:
if self.activate_autonomy and time.time() - self.disengagement_time > _DISENGAGEMENT_RETRY_DELAY_SEC:
self.disengagement_time = time.time()
self.logger.error("Disengaging Autonomy.")
# Ensure we always print errors if we are deactivating autonomy.
if loop_count % _ERROR_SKIP_RATE != 0:
for error in error_messages:
self.logger.error(error)
self.logger.error("Last Wifi signal strength: {} dbm\r\n".format(self.robot_object.wifi_strength))
self.logger.error("Last Wifi AP associated: {}\r\n".format(self.robot_object.wifi_ap_name))
self.logger.error("Last CPU Temp: {}\r\n".format(self.robot_object.cpu_temperature_c))
with open("error_log.txt", 'a+') as file1:
file1.write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\r\n")
file1.write("Disegagement Log\r\n")
file1.write(datetime.datetime.now().strftime("%a %b %d, %I:%M:%S %p\r\n"))
file1.write("Last Wifi signal strength: {} dbm\r\n".format(self.robot_object.wifi_strength))
file1.write("Last Wifi AP associated: {}\r\n".format(self.robot_object.wifi_ap_name))
file1.write("Last CPU Temp: {}\r\n".format(self.robot_object.cpu_temperature_c))
if self.last_good_gps_sample is not None:
file1.write("Last known GPS location: {}, {}\r\n".format(self.last_good_gps_sample.lat, self.last_good_gps_sample.lon))
else:
file1.write("No valid GPS location recorded.")
error_count = 1
for error in error_messages:
file1.write("Error {}: {}\r\n".format(error_count, error))
error_count += 1
file1.write("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\r\n")
if fatal_error:
self.autonomy_hold = True
self.activate_autonomy = False
elif not self.activate_autonomy:
self.resume_motion_timer = time.time()
self.control_state = CONTROL_ONLINE
time8 = time.time() - debug_time
# Activate a brief pause at the end of a track.
if time.time() - self.load_path_time < _PATH_END_PAUSE_SEC and not zero_output:
zero_output = True
# Don't use the motion timer here as it reactivates alarm.
if loop_count % 10 == 0:
self.logger.info("Taking a short delay at the end of the path so zeroing out autonomy commands.")
if time.time() - self.disengagement_time < _DISENGAGEMENT_RETRY_DELAY_SEC:
zero_output = True
self.resume_motion_timer = time.time()
if loop_count % 10 == 0:
self.logger.info("Disengaged for {:.1f} more seconds so zeroing out autonomy commands.".format(_DISENGAGEMENT_RETRY_DELAY_SEC - (time.time() - self.disengagement_time)))
if self.activate_autonomy == True and zero_output == False and time.time() - self.resume_motion_timer < _RESUME_MOTION_WARNING_TIME_SEC:
zero_output = True
self.alarm1.value = True
self.alarm2.value = False
self.alarm3.value = True
else:
self.alarm1.value = False
self.alarm2.value = False
self.alarm3.value = False
# Determine final drive commands.
if self.activate_autonomy:
autonomy_time_elapsed = time.time() - self.load_path_time - _PATH_END_PAUSE_SEC
if autonomy_time_elapsed < _BEGIN_AUTONOMY_SPEED_RAMP_SEC:
autonomy_vel_cmd*= autonomy_time_elapsed/_BEGIN_AUTONOMY_SPEED_RAMP_SEC
print(autonomy_time_elapsed)
# autonomy_strafe_cmd = 0
self.control_state = CONTROL_AUTONOMY
if zero_output:
self.control_state = CONTROL_AUTONOMY_PAUSE
vel_cmd = 0.0
steer_cmd = 0.0
strafe_cmd = 0
else:
vel_cmd = autonomy_vel_cmd
steer_cmd = autonomy_steer_cmd
strafe_cmd = autonomy_strafe_cmd
else:
if self.voltage_average < _VOLTAGE_CUTOFF:
vel_cmd = 0.0
steer_cmd = 0.0
strafe_cmd = 0
if loop_count % _ERROR_SKIP_RATE == 0:
self.logger.error("LOW VOLTAGE PAUSE. Voltage average: {:.2f}".format(self.voltage_average))
else:
vel_cmd = joy_throttle
steer_cmd = joy_steer
if math.fabs(joy_strafe) < 0.1:
strafe_cmd = 0
else:
strafe_cmd = math.copysign(math.fabs(joy_strafe) - 0.1, joy_strafe)
vel_cmd = vel_cmd * 1.0/(1.0 + abs(steer_cmd)) # Slow Vel down by 50% when steering is at max.
time8b = time.time() - debug_time
# Update master on latest calculations.
send_data = (self.latest_gps_sample,self.nav_path.points,self.next_point_heading, debug_points, self.control_state, self.motor_state, self.autonomy_hold, self.gps_path_lateral_error, self.gps_path_angular_error, self.gps_path_lateral_error_rate, self.gps_path_angular_error_rate, strafe_p, steer_p, strafe_d, steer_d, user_web_page_plot_steer_cmd, user_web_page_plot_strafe_cmd, gps_tools.is_dual_fix(self.latest_gps_sample), self.voltage_average, self.last_energy_segment, self.temperatures)
with self.remote_to_main_lock:
self.remote_to_main_string["value"] = pickle.dumps(send_data)
period = time.time() - tick_time
self.last_energy_segment = None
time9 = time.time() - debug_time
vel_cmd = vel_cmd * 0.6 # Fixed factor reduction
# Perform acceleration on vel_cmd value.
profiled_vel = get_profiled_velocity(last_vel_cmd, vel_cmd, period)
self.logger.debug("last_vel_cmd {}, vel_cmd {}, profiled_vel {}".format(last_vel_cmd, vel_cmd, profiled_vel))
vel_cmd = profiled_vel
last_vel_cmd = vel_cmd
tick_time = time.time()
steering_limit = _STEERING_ANGLE_LIMIT_DEGREES
if len(self.nav_path.points) == 2 and self.activate_autonomy:
steering_limit = _STEERING_ANGLE_LIMIT_AUTONOMY_DEGREES
self.logger.debug("Final values: Steer {}, Vel {}, Strafe {}".format(steer_cmd, vel_cmd, strafe_cmd))
calc = calculate_steering(steer_cmd, vel_cmd, strafe_cmd, steering_limit)
self.logger.debug("Calculated 4ws values: Steer {}, Vel {}".format(calc, vel_cmd))
steering_okay, steering_error_string = compare_steering_values(self.last_calculated_steering, calc)
if not steering_okay:
self.logger.error("{}".format(steering_error_string))
self.logger.error("Final values: Steer {}, Vel {}, Strafe {}".format(steer_cmd, vel_cmd, strafe_cmd))
self.logger.error("old 4ws values: Steer {}".format(self.last_calculated_steering))
self.logger.error("new 4ws values: Steer {}".format(calc))
# if self.activate_autonomy:
# TODO: the following line is a hack for testing. TLA 9/30/2021
# self.load_path_time = time.time() - _PATH_END_PAUSE_SEC
# calc["front_left"] = (calc["front_left"][0], 0.0)
# calc["front_right"] = (calc["front_right"][0], 0.0)
# calc["rear_left"] = (calc["rear_left"][0], 0.0)
# calc["rear_right"] = (calc["rear_right"][0], 0.0)
# calc["front_left"] = (0.6, 0.0)
# calc["front_right"] = (0.0, 0.0)
# calc["rear_left"] = (0.0, 0.0)
# calc["rear_right"] = (0.0, 0.0)
self.last_calculated_steering = calc
# Send calculated values to shared memory, copying in values
# rather than replacing the object.
self.steering_debug[:] = steering_to_numpy(calc)[:]
# if not steering_okay:
# sys.exit()
# If the robot is simulated, estimate movement odometry.
if self.simulated_hardware:
if abs(steer_cmd) > 0.001 or abs(vel_cmd) > 0.001 or abs(strafe_cmd) > 0.001:
steering_multiplier = 0.4
vel_multiplier = 0.5
strafe_multiplier = 0.4
steering_multiplier = 0.4
vel_multiplier = 0.5
strafe_multiplier = 1.0
new_heading_degrees = self.latest_gps_sample.azimuth_degrees + steer_cmd * 45.0 * steering_multiplier * self.driving_direction
new_heading_degrees %= 360
next_point = gps_tools.project_point(self.latest_gps_sample, new_heading_degrees, vel_cmd * vel_multiplier)
# Calculate translation for strafe, which is movement 90 degrees from heading.
next_point = gps_tools.project_point(next_point, new_heading_degrees + 90, strafe_cmd * strafe_multiplier)
rand_dist = random.uniform(-0.05, 0.05)
rand_angle = random.uniform(0, 360.0)
next_point = gps_tools.project_point(next_point, rand_angle, rand_dist)
new_heading_degrees += random.uniform(-3.0, 3.0)
self.simulated_sample = gps_tools.GpsSample(next_point.lat, next_point.lon, self.simulated_sample.height_m, ("fix","fix"), 20, new_heading_degrees, time.time(), 0.5)
if not self.motor_socket:
self.logger.error("Connect to motor control socket")
self.connect_to_motors()
time10 = 0
# Try to send final drive commands to motor process.
try:
if self.motor_send_okay == True:
#print("send_calc")
self.motor_socket.send_pyobj(pickle.dumps(calc), flags=zmq.NOBLOCK)
self.motor_send_okay = False
self.motor_last_send_time = time.time()
time10 = time.time() - debug_time
# else:
# print("NOT OKAY TO SEND")
while self.motor_socket.poll(timeout=_VERY_FAST_POLL_MILLISECONDS):
#print("poll motor message")
motor_message = pickle.loads(self.motor_socket.recv_pyobj())
self.motor_send_okay = True
#print("motor_message: {}".format(motor_message))
try:
self.motor_state = motor_message[0]
self.voltages = motor_message[1]
self.bus_currents = motor_message[2]
self.temperatures = motor_message[3]
self.total_watts = 0
if len(self.voltages) > 0:
self.voltage_average = sum(self.voltages)/len(self.voltages)
for volt, current in zip(self.voltages, self.bus_currents):
self.total_watts += volt * current
#print("Drawing {} Watts.".format(int(self.total_watts)))
except Exception as e:
self.logger.error("Error reading motor state message.")
self.logger.error(e)
self.motor_state = STATE_DISCONNECTED
except zmq.error.Again as e:
self.logger.error("Remote server unreachable.")
except zmq.error.ZMQError as e:
self.logger.error("ZMQ error with motor command socket. Resetting.")
self.motor_state = STATE_DISCONNECTED
self.close_motor_socket()
# If we have a GPS fix, update power consumption metrics.
if gps_tools.is_dual_fix(self.latest_gps_sample):
# Calculate power consumption metrics in 1 meter segments
self.power_consumption_list.append((self.latest_gps_sample, self.total_watts, self.voltages, self.bus_currents))
oldest_power_sample_gps = self.power_consumption_list[0][0]
distance = gps_tools.get_distance(oldest_power_sample_gps, self.latest_gps_sample)
if distance > 1.0:
total_watt_seconds = 0
watt_average = self.power_consumption_list[0][1]
distance_sum = 0
duration = 0
height_change = 0
last_sample_gps = None
motor_total_watt_seconds = [0, 0, 0, 0]
motor_watt_average = [0, 0, 0, 0]
list_subsamples = []
sample_collector_index = 0
for sample_num in range(1, len(self.power_consumption_list)):
sample1 = self.power_consumption_list[sample_num - 1]
sample2 = self.power_consumption_list[sample_num]
if sample1 == None or sample2 == None:
continue
sample_distance = gps_tools.get_distance(sample1[0], sample2[0])
sample_duration = sample2[0].time_stamp - sample1[0].time_stamp
sample_avg_watts = (sample1[1] + sample2[1])/2.0
if sample_num > sample_collector_index:
list_subsamples.append(sample1[0])
if len(self.power_consumption_list) > _NUM_GPS_SUBSAMPLES:
sample_collector_index+=int(len(self.power_consumption_list)/_NUM_GPS_SUBSAMPLES)
else:
sample_collector_index+=1
#print(sample1[2])
try:
for idx in range(len(sample1[2])):
motor_watt_average[idx] = (sample1[2][idx] * sample1[3][idx] + sample2[2][idx] * sample2[3][idx]) * 0.5
motor_total_watt_seconds[idx] = motor_watt_average[idx] * sample_duration
except Exception as e:
self.logger.error(e)
self.logger.error(sample1[2])
self.logger.error(sample1[3])
watt_average += sample2[1]
watt_seconds = sample_avg_watts * sample_duration
total_watt_seconds += watt_seconds
distance_sum += sample_distance
last_sample_gps = sample2[0]
height_change = last_sample_gps.height_m - oldest_power_sample_gps.height_m
avg_watts = (watt_average)/len(self.power_consumption_list)
list_subsamples.append(last_sample_gps)
self.last_energy_segment = EnergySegment(loop_count, oldest_power_sample_gps, last_sample_gps, distance_sum, total_watt_seconds, avg_watts, motor_total_watt_seconds, motor_watt_average, list_subsamples, self.activate_autonomy, self.robot_object.wifi_ap_name, self.robot_object.wifi_strength)
# Reset power consumption list.
self.power_consumption_list = []
self.logger.info(" Avg watts {:.1f}, watt seconds per meter: {:.1f}, meters per second: {:.2f}, height change {:.2f}".format(self.last_energy_segment.avg_watts, self.last_energy_segment.watt_seconds_per_meter, self.last_energy_segment.meters_per_second, self.last_energy_segment.height_change))
if self.simulated_hardware:
time.sleep(_POLL_MILLISECONDS/_MILLISECONDS_PER_SECOND)
# time.sleep(0.1)
self.logger.debug("Took {} sec to get here. {} {} {} {} {} {} {} {} {} {} {} {}".format(time.time()-debug_time, time1, time2, time3, time4, time5, time6, time7, time8, time8b, time9, time10, self.robot_object.wifi_ap_name))
except KeyboardInterrupt:
pass
def run_control(remote_to_main_lock, main_to_remote_lock, remote_to_main_string, main_to_remote_string, logging, logging_details, simulated_hardware=False):
remote_control = RemoteControl(remote_to_main_lock, main_to_remote_lock, remote_to_main_string, main_to_remote_string, logging, logging_details, simulated_hardware)
remote_control.run_setup()
remote_control.run_loop()
if __name__=="__main__":
run_control()
|
#importamos las librerias
import numpy as np
from sklearn import datasets, linear_model
import matplotlib.pyplot as plt
##################Preparamos la data#################
#se importan las librerias a utilizar
boston = datasets.load_boston()
print(boston)
print()
##################Entendimiento de la data#################
#Verifico la informacion contenida en el dataset
print('informacion del dataset: ')
print(boston.keys())
print()
#Verifico las caracteristicas en el dataset
print('Caracteristicas del dataset: ')
print(boston.DESCR)
print()
#verifico la cantidad de datos que hay en los dataset
print('Cantidad de datos: ')
print(boston.data.shape)
print()
#verifico nombre de columnas
print('Nombres columnas: ')
print(boston.feature_names)
print()
##################PREPARAR LA DATA REGRESION LINEAL SIMPLE#################
#selecionamos solamente la columna 5 del data set
X = boston.data[:, np.newaxis, 5]
#Defino los datos correspondientes a las etiquetas
y = boston.target
#Graficamos los datos correspondientes
plt.scatter(X, y)
plt.xlabel('Numero de habitaciones')
plt.ylabel('Valor medio')
#plt.show()
########### IMPLEMENTACION DE REGRESION LINEAL SIMPLE #############
from sklearn.model_selection import train_test_split
#Separo los datos del "train " en entrenamiento y prueba para probar los algoritmos
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
#Definimos el algoritmo a utilizar
lr = linear_model.LinearRegression()
#Realizo una prediccion
lr.fit(X_train, y_train)
#realizo una prediccion
Y_pred = lr.predict(X_test)
#Graficamos los datos junto con el modelo
plt.scatter(X_test, y_test)
plt.plot(X_test, Y_pred, color='red', linewidth=3)
plt.title('Regresion Lineal Simple')
plt.xlabel('numero de habitaciones')
plt.ylabel('valor medio')
plt.show()
print()
print('Datos del modelo regresion lineal simple')
print('')
print('valor de la pendiente o coeficient "a"')
print(lr.coef_)
print('varlos de interseccion o coeficiente "b"')
print(lr.intercept_)
print('La ecuacion del modelo es igual a :')
print('y =', lr.coef_, 'x ', lr.intercept_)
print()
print('Precision del modelo es igual a:')
print(lr.score(X_train, y_train))
|
"""Core API viewsets."""
from django.utils.translation import gettext as _
import django_otp
from django_otp.plugins.otp_static.models import StaticDevice, StaticToken
from django_otp.plugins.otp_totp.models import TOTPDevice
from rest_framework import permissions, response, viewsets
from rest_framework.decorators import action
from drf_spectacular.utils import extend_schema
from modoboa.lib.throttle import GetThrottleViewsetMixin
from . import serializers
class AccountViewSet(GetThrottleViewsetMixin, viewsets.ViewSet):
"""Account viewset.
Contains endpoints used to manipulate current user's account.
"""
permission_classes = (permissions.IsAuthenticated, )
serializer_class = None
@action(methods=["post"], detail=False, url_path="tfa/setup")
def tfa_setup(self, request):
"""Initiate TFA setup."""
instance, created = TOTPDevice.objects.get_or_create(
user=request.user,
defaults={"name": "{} TOTP device".format(request.user)}
)
return response.Response()
@extend_schema(
request=serializers.CheckTFASetupSerializer
)
@action(methods=["post"], detail=False, url_path="tfa/setup/check")
def tfa_setup_check(self, request):
"""Check TFA setup."""
serializer = serializers.CheckTFASetupSerializer(
data=request.data, context={"user": request.user})
serializer.is_valid(raise_exception=True)
# create static device for recovery purposes
device = StaticDevice.objects.create(
user=request.user,
name="{} static device".format(request.user)
)
for cpt in range(10):
token = StaticToken.random_token()
device.token_set.create(token=token)
django_otp.login(self.request, request.user.totpdevice_set.first())
return response.Response()
@action(methods=["post"], detail=False, url_path="tfa/disable")
def tfa_disable(self, request):
"""Disable TFA."""
serializer = serializers.CheckPasswordTFASerializer(
data=request.data,
context={
"user": request.user,
"remote_addr": request.META["REMOTE_ADDR"]
}
)
serializer.is_valid(raise_exception=True)
if not request.user.tfa_enabled:
# We include it as "password" to display the error
return response.Response({"error": _("2FA is not enabled")},
status=403)
request.user.totpdevice_set.all().delete()
request.user.staticdevice_set.all().delete()
request.user.tfa_enabled = False
request.user.save()
return response.Response()
@extend_schema(tags=['account'])
@action(methods=["post"], detail=False, url_path="tfa/reset_codes")
def tfa_reset_codes(self, request, *args, **kwargs):
"""Reset recovery codes."""
serializer = serializers.CheckPasswordTFASerializer(
data=request.data,
context={
"user": request.user,
"remote_addr": request.META["REMOTE_ADDR"]
}
)
serializer.is_valid(raise_exception=True)
device = request.user.staticdevice_set.first()
if device is None:
return response.Response({"error": _("2FA is not enabled")},
status=403)
device.token_set.all().delete()
for cpt in range(10):
token = StaticToken.random_token()
device.token_set.create(token=token)
return response.Response({
"tokens": device.token_set.all().values_list("token", flat=True)
})
|
num=int(input("Enter no. :"))
value=num
j=10**(len(str(num))-1)
sum=0
for i in range(len(str(num))):
temp=num%10
sum+=temp**3
num=num//10
j=j/10
if value==sum:
print("{} is an armstrong no.".format(value))
else:
print("{} is not an armstrong no.".format(value))
|
from sklearn import tree
#[height, weight, shoe size]
X = [[181, 180, 44], [177, 170, 43], [160, 140, 38], [152, 92, 36], [123, 14, 23],
[123, 33, 45], [192, 111, 23], [121, 42, 92], [191, 233, 94], [123,23, 34],
[181, 85, 43]]
Y = ['female', 'female', 'female', 'female', 'female', 'female', 'male',
'male', 'male', 'male', 'male']
clf = tree.DecisionTreeClassifier()
clf = clf.fit(X,Y)
prediction = clf.predict([[183, 143, 38]])
print prediction |
class Animal(object):
def __init__(self, name):
self.name = name
self.health = 100
def walk(self, amount=1):
self.health -= 1 * amount
return self
def run(self, amount =1):
self.health -= 5 * amount
return self
def displayHealth(self):
print "Animal: {}, Health: {}".format(self.name, self.health)
animal= Animal("Lion")
animal.walk(3).run(2).displayHealth()
class Dog(Animal):
def __init__(self, name):
super(Dog, self).__init__(self)
self.health += 50
self.name = name
def pet(self, amount = 1):
self.health += 5 * amount
return self
sakila= Dog("Dog")
sakila.walk(3).run(2).pet().displayHealth()
class Dragon(Animal):
def __init__(self, name):
super(Dragon, self).__init__(self)
self.health += 70
self.name = name
def fly(self, amount = 1):
self.health -= 10 * amount
return self
def displayHealth(self):
print "This is a Dragon!!"
super(Dragon, self).displayHealth()
return self
drogon = Dragon("Drogon")
drogon.walk(3).run(2).fly(2).displayHealth()
|
# -*- coding: utf-8 -*-
import sqlite3 as sl
import sys
from Singleton import *
@singleton
class DbUtils:
"""
Klasse in der die Datenbankverbindung hergestellt wird
- Datenbank aktualisieren und erstellen
- Wrapper für Abfragen
"""
def __init__(self):
""" Constructor """
# Datenbank-Attribut mit None initialisieren
self.con = None
def __del__(self):
""" Destructor - Beendet Datenbankverbindung beim Löschen """
# Datenbankverbindung beenden, falls eine besteht
if self.con:
self.con.close()
def get_connection(self):
"""
Singleton für die Datenbankverbindung
Gibt die aktuelle Verbindung zurück oder erzeugt eine, falls diese nicht existiert
"""
try:
if not self.con:
# Falls noch keine Datenbankverbindung besteht, diese herstellen
self.con = sl.connect('ttr_mediacenter.db')
return self.con
except sl.Error, e:
# Fehlermeldung, falls Verbindung nicht hergestellt werden konnte
print "Error %s:" % e.args[0]
sys.exit(1)
def create_database(self):
""" Löscht und erstellt die Datenbank """
con = self.get_connection()
cur = con.cursor()
# Tabellen erzeugen, ggf. vorher löschen
self.createTableFilme(cur)
self.createTableGenres(cur)
self.createTableFileTypes(cur)
# Statements auf DB ausführen
con.commit()
# Tabelle FileTypes anlegen und mit Standardwerten füllen
def createTableFileTypes(self, cur):
# Table für die FileTypes
cur.execute("DROP TABLE IF EXISTS FileTypes")
cur.execute("CREATE TABLE FileTypes("
"db_id INTEGER PRIMARY KEY AUTOINCREMENT, "
"name Text NOT NULL,"
"extension Text NOT NULL UNIQUE"
")"
)
# Default-Werte setzen
filetypes = [('AVI', '.avi'),
('MPEG', '.mpeg'),
('MPEG', '.mpg'),
('Windows Media Video', '.wmv'),
('Flash Video', '.flv'),
('QuickTime File Format', '.mov')
]
cur.executemany("INSERT INTO FileTypes(name, extension) VALUES(?, ?)", filetypes)
# Tabelle Genres anlegen und mit Standardwerten füllen
def createTableGenres(self, cur):
# Table für die Genres
cur.execute("DROP TABLE IF EXISTS Genres")
cur.execute("CREATE TABLE Genres("
"db_id INTEGER PRIMARY KEY AUTOINCREMENT, "
"name Text NOT NULL UNIQUE "
")"
)
# Default-Werte setzen
genres = [('Horror',),
('Komoedie',),
('Science Fiction',),
('Dokumentation',),
('Action',)
]
cur.executemany("INSERT INTO Genres(name) VALUES(?)", genres)
# Tabelle Filme anlegen
def createTableFilme(self, cur):
# Table für die Filme
cur.execute("DROP TABLE IF EXISTS Filme")
cur.execute("CREATE TABLE Filme("
"db_id INTEGER PRIMARY KEY AUTOINCREMENT, "
"titel Text NOT NULL, "
"pfad Text NOT NULL, "
"filename Text NOT NULL, "
"checksum Text NOT NULL, "
"genre INTEGER, "
"filetype INTEGER, "
"UNIQUE (pfad, filename)"
")"
)
# Index zum Suchen, wenn eine neue Datei hinzugefügt wird
cur.execute("CREATE INDEX index_pfad ON Filme(pfad, filename)")
|
#
# Copyright © 2021 Uncharted Software Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
from typing import Dict, Optional
import torch
from d3m import container, utils
from d3m.metadata import base as metadata_base, hyperparams, params
from d3m.primitive_interfaces import base
from d3m.primitive_interfaces.supervised_learning import PrimitiveBase
from distil.modeling.bert_models import BERTPairClassification
from distil.utils import CYTHON_DEP
import version
_all__ = ("BertPairClassification",)
logger = logging.getLogger(__name__)
class Hyperparams(hyperparams.Hyperparams):
doc_col_0 = hyperparams.Hyperparameter[int](
default=0,
semantic_types=[
"https://metadata.datadrivendiscovery.org/types/ControlParameter"
],
description="The index of the column containing the first documents in the classification pairs.",
)
doc_col_1 = hyperparams.Hyperparameter[int](
default=1,
semantic_types=[
"https://metadata.datadrivendiscovery.org/types/ControlParameter"
],
description="The index of the column containing the second documents in the classification pairs.",
)
force_cpu = hyperparams.Hyperparameter[bool](
default=False,
semantic_types=[
"https://metadata.datadrivendiscovery.org/types/ControlParameter"
],
description="Force CPU execution regardless of GPU availability.",
)
batch_size = hyperparams.Hyperparameter[int](
default=32,
semantic_types=[
"https://metadata.datadrivendiscovery.org/types/TuningParameter"
],
description="Number of samples to load in each training batch.",
)
epochs = hyperparams.Hyperparameter[int](
default=3,
semantic_types=[
"https://metadata.datadrivendiscovery.org/types/TuningParameter"
],
description="The number of passes to make over the training set.",
)
learning_rate = hyperparams.Hyperparameter[float](
default=5e-5,
semantic_types=[
"https://metadata.datadrivendiscovery.org/types/TuningParameter"
],
description="The change in the model in reponse to estimated error.",
)
class Params(params.Params):
model: BERTPairClassification
target_col: str
class BertPairClassificationPrimitive(
PrimitiveBase[container.DataFrame, container.DataFrame, Params, Hyperparams]
):
"""
Uses a pre-trained pytorch BERT model to predict a label of 0 or 1 for a pair of documents, given training samples
of document pairs labelled 0/1. Takes a datrame of documents and a dataframe of labels as inputs, and returns
a dataframe containing the predictions as a result.
"""
metadata = metadata_base.PrimitiveMetadata(
{
"id": "7c305f3a-442a-41ad-b9db-8c437753b119",
"version": version.__version__,
"name": "BERT pair classification",
"python_path": "d3m.primitives.classification.bert_classifier.DistilBertPairClassification",
"source": {
"name": "Distil",
"contact": "mailto:cbethune@uncharted.software",
"uris": [
"https://github.com/uncharted-distil/distil-primitives/blob/main/distil/primitives/bert_classifier.py",
"https://github.com/uncharted-distil/distil-primitives",
],
},
"installation": [
CYTHON_DEP,
{
"type": metadata_base.PrimitiveInstallationType.PIP,
"package_uri": "git+https://github.com/uncharted-distil/distil-primitives.git@{git_commit}#egg=distil-primitives".format(
git_commit=utils.current_git_commit(os.path.dirname(__file__)),
),
},
{
"type": "FILE",
"key": "bert-base-uncased-model",
"file_uri": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased.tar.gz",
"file_digest": "57f8763c92909d8ab1b0d2a059d27c9259cf3f2ca50f7683edfa11aee1992a59",
},
{
"type": "FILE",
"key": "bert-base-uncased-vocab",
"file_uri": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt",
"file_digest": "07eced375cec144d27c900241f3e339478dec958f92fddbc551f295c992038a3",
},
],
"algorithm_types": [
metadata_base.PrimitiveAlgorithmType.BERT,
],
"primitive_family": metadata_base.PrimitiveFamily.CLASSIFICATION,
},
)
def __init__(
self,
*,
hyperparams: Hyperparams,
random_seed: int = 0,
volumes: Dict[str, str] = None,
) -> None:
super().__init__(
hyperparams=hyperparams, random_seed=random_seed, volumes=volumes
)
self._volumes = volumes
self._model: Optional[BERTPairClassification] = None
self._target_col: str = ""
def set_training_data(
self, *, inputs: container.DataFrame, outputs: container.DataFrame
) -> None:
self._inputs = inputs
self._outputs = outputs
self._target_col = self._outputs.columns[0]
def fit(
self, *, timeout: float = None, iterations: int = None
) -> base.CallResult[None]:
logger.debug(f"Fitting {__name__}")
# lazy init because we needed data to be set
if not self._model:
columns = (
self._inputs.columns[self.hyperparams["doc_col_0"]],
self._inputs.columns[self.hyperparams["doc_col_1"]],
)
if torch.cuda.is_available():
if self.hyperparams["force_cpu"]:
logger.info("Detected CUDA support - forcing use of CPU")
device = "cpu"
else:
logger.info("Detected CUDA support - using GPU")
device = "cuda"
else:
logger.info("CUDA does not appear to be supported - using CPU.")
device = "cpu"
if self._volumes:
model_path = self._volumes["bert-base-uncased-model"]
vocab_path = self._volumes["bert-base-uncased-vocab"]
else:
raise ValueError(
"No volumes supplied for primitive - static models cannot be loaded."
)
self._model = BERTPairClassification(
model_path=model_path,
vocab_path=vocab_path,
device=device,
columns=columns,
epochs=self.hyperparams["epochs"],
batch_size=self.hyperparams["batch_size"],
learning_rate=self.hyperparams["learning_rate"],
)
self._model.fit(self._inputs, self._outputs)
return base.CallResult(None)
def produce(
self,
*,
inputs: container.DataFrame,
timeout: float = None,
iterations: int = None,
) -> base.CallResult[container.DataFrame]:
logger.debug(f"Producing {__name__}")
inputs = inputs
# create dataframe to hold result
if self._model is None:
raise ValueError("No model available for primitive")
result = self._model.predict(inputs)
# use the original saved target column name
result_df = container.DataFrame(
{self._target_col: result}, generate_metadata=True
)
# mark the semantic types on the dataframe
result_df.metadata = result_df.metadata.add_semantic_type(
(metadata_base.ALL_ELEMENTS, 0),
"https://metadata.datadrivendiscovery.org/types/PredictedTarget",
)
logger.debug(f"\n{result_df}")
print(result_df)
return base.CallResult(result_df)
def get_params(self) -> Params:
return Params(model=self._model, target_col=self._target_col)
def set_params(self, *, params: Params) -> None:
self._model = params["model"]
self._target_col = params["target_col"]
return
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-01 11:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("elections", "0043_auto_20180720_1631")]
operations = [
migrations.AlterField(
model_name="election",
name="suggested_status",
field=models.CharField(
choices=[
("suggested", "Suggested"),
("rejected", "Rejected"),
("approved", "Approved"),
("deleted", "Deleted"),
],
default="suggested",
max_length=255,
),
)
]
|
# coding=utf-8
import cPickle
import os.path
def loadOrDefault(path, defalut):
'''
@parameter path 文件路径
@parameter default 文件不存在时默认的返回值
'''
if os.path.isfile(path):
with open(path, 'rb') as f:
return cPickle.load(f)
return defalut
def load(path):
'''
@parameter path 文件路径
'''
with open(path, 'rb') as f:
return cPickle.load(f)
def dump(path, obj):
'''
@parameter path 文件路径
@parameter obj 保存的对象
'''
with open(path, 'wb') as f:
return cPickle.dump(obj, f)
|
# coding=utf-8
"""
复杂链表的复制
"""
class ComplexListNode(object):
def __init__(self, value, next=None, sibling=None):
self.value = value
self.next = next
self.sibling = sibling
def clone_nodes(head):
"""
复制原始链表的任意节点N并创建新节点N',再把N'链接到N的后面
"""
current_node = head
while current_node:
clone_node = ComplexListNode(current_node.value)
clone_node.next = current_node.next
current_node.next = clone_node
current_node = clone_node.next
def connect_sibling_nodes(head):
"""
如果原始链表上的节点N的sibling执行S,则它对应的复制节点N'的sibling执行S的复制节点S'
"""
current_node = head
while current_node:
clone_node = current_node.next
if current_node.sibling:
clone_node.sibling = current_node.sibling.next
else:
clone_node.sibling = None
current_node = clone_node.next
def reconnect_nodes(head):
"""
将第二步得到的链表拆成两个链表,奇数位置上的节点组成原始链表,偶数位置上的节点组成复制出来的节点
"""
if not head:
return head
clone_head = head.next
origin_current_node = head
clone_current_node = head.next
while origin_current_node:
origin_current_node.next = clone_current_node.next
if origin_current_node.next:
origin_current_node = origin_current_node.next
clone_current_node.next = origin_current_node.next
clone_current_node = clone_current_node.next
else:
origin_current_node = None
clone_current_node.next = None
return clone_head
def clone(head):
clone_nodes(head)
connect_sibling_nodes(head)
return reconnect_nodes(head)
if __name__ == '__main__':
node1 = ComplexListNode(1)
node2 = ComplexListNode(2)
node3 = ComplexListNode(3)
node4 = ComplexListNode(4)
node1.next = node2
node2.next = node3
node3.next = node4
node1.sibling = node4
node3.sibling = node2
a = clone(node1)
print a
|
def covert_to_float_and_sum_up(list):
floats = [float(i) if isinstance(i, str) else i for i in list]
return sum(floats)
print(covert_to_float_and_sum_up(['2.3','5.1','1.3'])) |
from pico2d import *
import random
import game_framework
import game_world
import ui
from player import Player
from missile import Missile
from background import Background
from item import *
from highscore import Highscore
GAMESTATE_READY, GAMESTATE_INPLAY, GAMESTATE_PAUSED, GAMESTETE_GAMEOVER = range(4)
BULLETS_AT_START = 10
class Life:
red = None
white = None
LIFE_AT_START = 5
def __init__(self):
if Life.red == None:
Life.white = load_image('heart_white.png')
Life.red = load_image('heart_red.png')
def draw(self, life):
x, y = get_canvas_width() - 50, get_canvas_height() - 50
for i in range(Life.LIFE_AT_START):
heart = Life.red if i < life else Life.white
heart.draw(x, y)
x -= 50
player = None
life = None
scoreLabel = None
highscore = None
gameOverImage = None
music_bg = None
wav_bomb = None
wav_item = None
gameState = GAMESTATE_READY
def enter():
global player, life, scoreLabel
bg = Background()
game_world.add_object(bg, game_world.layer_bg)
player = Player()
game_world.add_object(player, game_world.layer_player)
life = Life()
bg.target = player
label = ui.Label("Score: 0", 35, get_canvas_height() - 55, 45, ui.FONT_2)
label.color = (255, 127, 127)
ui.labels.append(label)
scoreLabel = label
global highscore
highscore = Highscore()
global music_bg, wav_bomb, wav_item
music_bg = load_music('background.mp3')
wav_bomb = load_wav('explosion.wav')
wav_item = load_wav('item.wav')
game_world.isPaused = isPaused
ready_game()
global gameOverImage
gameOverImage = load_image('game_over.png')
def start_game():
global gameState
gameState = GAMESTATE_INPLAY
global music_bg
music_bg.set_volume(64)
music_bg.repeat_play()
def ready_game():
global gameState
gameState = GAMESTATE_READY
game_world.remove_objects_at_layer(game_world.layer_obstacle)
game_world.remove_objects_at_layer(game_world.layer_item)
player.init(Life.LIFE_AT_START)
update_score()
def end_game():
global gameState, player, highscore
gameState = GAMESTETE_GAMEOVER
highscore.add(Highscore.Entry(player.score))
global music_bg
music_bg.stop()
def isPaused():
global gameState
return gameState != GAMESTATE_INPLAY
def createMissle():
m = Missile(*gen_random(), random.randint(20, 60))
game_world.add_object(m, game_world.layer_obstacle)
def collides_distance(a, b):
dx, dy = a.x - b.x, a.y - b.y
sq_dist = dx ** 2 + dy ** 2
radius_sum = a.size / 2 + b.size / 2
return sq_dist < radius_sum ** 2
def gen_random():
global player
field_width = get_canvas_width()
field_height = get_canvas_height()
dx, dy = random.random(), random.random()
if (dx < 0.5): dx -= 1
if (dy < 0.5): dy -= 1
side = random.randint(1, 4) # 1=top, 2=left, 3=bottom, 4=right
if (side == 1): # top
x, y = random.randint(0, field_width), 0
if (dy < 0): dy = -dy
if (side == 2): # left
x, y = 0, random.randint(0, field_height)
if (dx < 0): dx = -dx
if (side == 3): # bottom
x, y = random.randint(0, field_width), field_height
if (dy > 0): dy = -dy
if (side == 4): # right
x, y = field_width, random.randint(0, field_height)
if (dx > 0): dx = -dx
speed = 1 + player.score / 60
dx, dy = dx * speed, dy * speed
return x, y, dx, dy
def draw():
clear_canvas()
game_world.draw()
ui.draw()
global player
life.draw(player.life)
global gameState, gameOverImage
if gameState == GAMESTETE_GAMEOVER:
gameOverImage.draw(get_canvas_width() / 2, get_canvas_height() / 2)
highscore.draw()
update_canvas()
def update():
global player, gameState, wav_bomb, wav_item
ui.update()
game_world.update()
if gameState == GAMESTATE_INPLAY:
if random.random() < 0.01:
if (random.random() < 0.5):
item = Item(*gen_random())
else:
item = CoinItem(*gen_random())
game_world.add_object(item, game_world.layer_item)
print("Items:", game_world.count_at_layer(game_world.layer_item))
for m in game_world.objects_at_layer(game_world.layer_obstacle):
collides = collides_distance(player, m)
if collides:
wav_bomb.play()
player.life -= 1
print("Player Life = ", player.life)
if player.life > 0:
game_world.remove_object(m)
else:
end_game()
break
for m in game_world.objects_at_layer(game_world.layer_item):
collides = collides_distance(player, m)
if collides:
wav_item.play()
game_world.remove_object(m)
if player.life < Life.LIFE_AT_START:
player.life += 1
else:
player.score += m.score
break
player.score += game_framework.frame_time
update_score()
obstacle_count = game_world.count_at_layer(game_world.layer_obstacle)
# print(obstacle_count)
if obstacle_count < BULLETS_AT_START + player.score // 10:
# print("Missiles:", (obstacle_count + 1))
createMissle()
delay(0.03)
# print()
def update_score():
global player, scoreLabel
str = "Score: {:4.1f}".format(player.score)
scoreLabel.text = str
def toggle_paused():
global player, gameState
if gameState == GAMESTETE_GAMEOVER:
ready_game()
elif gameState == GAMESTATE_INPLAY:
gameState = GAMESTATE_PAUSED
player.score -= 2.0
if player.score < 0:
player.score = 0
update_score()
else:
gameState = GAMESTATE_INPLAY
def handle_events():
global player, gameState
events = get_events()
for e in events:
if e.type == SDL_QUIT:
game_framework.quit()
elif (e.type, e.key) == (SDL_KEYDOWN, SDLK_ESCAPE):
game_framework.pop_state()
elif (e.type, e.key) == (SDL_KEYDOWN, SDLK_SPACE):
toggle_paused()
elif e.type == SDL_MOUSEBUTTONDOWN:
if player.mouse_control:
toggle_paused()
return
handled = player.handle_event(e)
if handled:
if gameState == GAMESTATE_READY:
start_game()
elif gameState == GAMESTATE_PAUSED:
gameState = GAMESTATE_INPLAY
ui.handle_event(e)
def exit():
game_world.clear()
global music_bg, wav_bomb, wav_item
del(music_bg)
del(wav_bomb)
del(wav_item)
global life, highscore
del(life)
del(highscore)
if __name__ == '__main__':
import sys
current_module = sys.modules[__name__]
open_canvas()
game_framework.run(current_module)
close_canvas()
|
import json
import statistics
# Load data from internal json file
with open('sensor_data.json') as f:
sensor_data = json.load(f)
sensor_value = {
'temperature': {
'min': 999,
'max': 0,
'med': 0,
'avg': 0
},
'humidity': {
'min': 999,
'max': 0,
'med': 0,
'avg': 0
}
}
sum_t = 0
sum_h = 0
count = 0
for i in sensor_data['array']:
# Get the minimum data
if (sensor_value['temperature']['min'] > i['temperature']):
sensor_value['temperature']['min'] = i['temperature']
if (sensor_value['humidity']['min'] > i['humidity']):
sensor_value['humidity']['min'] = i['humidity']
# print(i)
# Get the maximum data
if (sensor_value['temperature']['max'] < i['temperature']):
sensor_value['temperature']['max'] = i['temperature']
if (sensor_value['humidity']['max'] < i['humidity']):
sensor_value['humidity']['max'] = i['humidity']
# Get sum of temperature and humidity
sum_t = sum_t + i['temperature']
sum_h = sum_h + i['humidity']
count = count + 1
list_t = [0]*count
list_h = [0]*count
# Get median of temperature and humidity
for i in sensor_data['array']:
for j in range(count):
list_t[j] = i['temperature']
list_h[j] = i['humidity']
sensor_value['temperature']['med'] = statistics.median(list_t)
sensor_value['humidity']['med'] = statistics.median(list_h)
# Get mean/average of temperature and humidity
sensor_value['temperature']['avg'] = sum_t/count
sensor_value['humidity']['avg'] = sum_h/count
# Writing temperature and humidity data to endpoint
with open('temperature_and_humidity_summary_data.json', 'w') as outfile:
json.dump(sensor_value, outfile, indent = 2) |
# creates a dash (html based) visualization of chosen funds growth
#
import json
import pandas as pd
import numpy as np
import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output, State
# import funds_rates dict and number of units in possesion
with open("C:\\Users\\Pawel\\Documents\\Github\\projects\\personal_invest_dash\\data\\funds_rates.json", 'r', encoding='utf-8') as f_saved:
funds_rates = json.load(f_saved)
with open("C:\\Users\\Pawel\\Documents\\Github\\projects\\personal_invest_dash\\data\\P_units.json", 'r', encoding='utf-8') as f:
P_units = json.load(f)
# some key names are parts of other key names, therefore, I cannot use 'a in b' cause it might give me same element multiple times,
# instead I remove parts of key names after '(' and use 'a == b'
funds_rates2 = {}
for key in funds_rates:
new_key = key.split("(")[0].strip()
funds_rates2[new_key] = funds_rates[key]
# get values of how much money units are worth
dict1 = {}
for i in P_units:
for j in funds_rates2:
if i == j:
dict2 = {}
for k in funds_rates2[j]:
dict2[k] = P_units[i]*funds_rates2[j][k]
dict1[j] = dict2
df = pd.DataFrame(dict1)
df = df.sort_index(axis = 0)
# fill empty cells in dataFrame by interpolated values
df_interp = (df.interpolate(method='linear')).interpolate(method='linear', limit_direction='backward')
# sum of all my funds
df_sum = df_interp.sum(axis = 1)
# check which date has all the values filled
def first_last_full(data_frame, first_last):
# check the first/last row of dataframe that is fully filled and return its index
# inputs: (dataFrame, first/last = +1/-1)
if first_last == -1:
idx = -1
elif first_last == 1:
idx = 0
all_nan = True
while all_nan == True:
last_full = idx
all_nan = pd.isna(data_frame.iloc[idx]).any()
idx = idx + first_last
return last_full
last_full_idx = first_last_full(df, -1)
colors = {
'bg': '#111111',
'bg2': '#1a1a1a',
'text': '#A9A9A9',
'bg-tran': 'rgba(0,0,0,0)'
}
app = dash.Dash(__name__)
app.layout = html.Div(
style = {'backgroundColor': colors['bg'],
},
children = [
html.Table([
html.Tr([
html.Td([
html.Div([
dcc.Dropdown(
id = 'drop-myfunds',
options = [
{'label': i, 'value': i} for i in df.columns
],
multi = True,
value = df.columns[0]
),
html.Button('Select all', id = 'sa-button'),
dcc.Graph(
id = 'plot-myfunds',
),
dcc.RangeSlider(
id = 'data-slider',
updatemode = 'mouseup',
min = 0,
max = len(df.index)-1,
value = [0, len(df.index)-1],
marks = {i: {'label': date}
for i, date in enumerate(df.index)
}
)
],
style = {
#'width': '48%',
#'padding': 10,
#'display': 'inline-block',
'backgroundColor': colors['bg2']
}
)], style = {'width': '60%', 'padding': 5}),
html.Td([
html.Div([
dcc.DatePickerSingle(
id = 'pick-date',
date = df.index[last_full_idx],
display_format = 'DD/MM/YYYY',
min_date_allowed = df.index[1],
max_date_allowed = df.index[last_full_idx],
),
dcc.Graph(
id = 'my-shares',
),
],
style = {
#'width': '48%',
#'padding': 10,
#'display': 'inline-block',
'backgroundColor': colors['bg2']
}
)
], style = {'padding': 5})
])])])
# callback for the 'select all' button
@app.callback(
Output('drop-myfunds', 'value'),
[Input('sa-button', 'n_clicks')],
state=[State('drop-myfunds', 'value')])
def select_all(btn, btn2):
return df.columns
# callback to update 'my-funds' plot based on chosen funds and time range
@app.callback(
Output('plot-myfunds', 'figure'),
[Input('drop-myfunds', 'value'),
Input('data-slider', 'value')])
def update_myfunds_plot(funds, f_date):
data = []
for fund in funds:
# change of the fund unit value compared to chosen initial date
y_change = (df[fund][f_date[0]:f_date[1]+1]-df[fund][f_date[0]])/df[fund][f_date[0]]*100
data.append(dict(
x = df.index[f_date[0]:f_date[1]+1],
y = np.round(y_change,2),
name = fund,
))
# change of the sum of my funds compared to chosen initial date
y_sum_change = (df_sum[f_date[0]:f_date[1]+1]-df_sum[f_date[0]])/df_sum[f_date[0]]*100
data.append(dict(
x = df.index[f_date[0]:f_date[1]+1],
y = np.round(y_sum_change,2),
name = 'Sum',
mode = 'lines+markers',
line = {'width': 5, 'color': 'white'},
))
return {
'data': data,
'layout': {
'yaxis': {
'title': {'text':'Change [%]'}},
'legend': {
'orientation': 'h'
},
'paper_bgcolor': colors['bg-tran'],
'plot_bgcolor': colors['bg-tran'],
'font':{
'color': colors['text']}
}
}
# callback to update the pie chart
@app.callback(
Output('my-shares', 'figure'),
[Input('pick-date', 'date')])
def update_pie(date):
return {
'data': [
dict(
values = np.around(df.loc[date].values),
labels = df.columns,
type = 'pie',
hole = .3,
hoverinfo = 'label+value'
)
],
'layout': {
'legend': {'orientation': 'h'},
'paper_bgcolor': colors['bg-tran'],
'plot_bgcolor': colors['bg-tran'],
'font':{
'color': colors['text']
},
}
}
if __name__ == '__main__':
app.run_server(debug=False) |
'''
Created on 2017-5-18
@author: Alex Wang
'''
def devide(a, b):
try:
return a / b
except Exception as e:
print(e.__str__())
return None
def test_none():
if devide(0, 2) is None:
print(devide(0, 2))
print("devide(0,2) is None")
if devide(2, 0) is None:
print("devide(2,0) is None")
def arg_num_wunknow(*args, **kwargs):
for arg in args:
print("arg:" + str(arg))
for key in kwargs:
print("kwargs:" + key + "\t" + kwargs[key])
def test_arguments(name, age, school=None):
"""
use tuple and dict as arguments
"""
print('name:{}, age:{}, school:{}'.format(name, age, school))
if __name__ == "__main__":
test_none()
if 0 is None:
print("0 is None")
arg_num_wunknow(45, 23, "abc", karg_one="one", karg_two="two")
# use tuple and dict as arguments
tuple_args_1 = ('Alex Wang', 18, 'SDU')
tuple_args_2 = ('Alex Wang', 19)
dict_args = {'name':'Alex Wang', 'age':20}
test_arguments(*tuple_args_1)
test_arguments(*tuple_args_2)
test_arguments(**dict_args)
|
recipes=[]
|
from .custom_driver import client, use_browser
import time
from random import randint
from .utils import log
from .village import open_village, open_city, open_building
from .farming import send_farm
from .util_game import (
close_modal,
shortcut,
open_shortcut,
check_resources,
old_shortcut,
)
from .settings import settings
import json
def check_for_attack_thread(
browser: client,
village: int,
interval: int,
units: list,
target: list,
save_resources: bool,
units_train: list,
) -> None:
time.sleep(randint(0, 10))
if save_resources:
with open(settings.units_path, "r") as f:
content = json.load(f)
while True:
sleep_time = interval
attack_time = check_for_attack(browser, village)
if attack_time:
timelist = attack_time.split(":")
countdown = (
int(timelist[0]) * 60 * 60 + int(timelist[1]) * 60 + int(timelist[0])
)
save_send_time = 10 * 60
if countdown < save_send_time:
# send units away
unit_dict = {}
# fill the dict, -1 means send all units
for unit in units:
unit_dict[int(unit)] = -1
send_farm(
browser=browser,
village=village,
units=unit_dict,
x=int(target[0]),
y=int(target[1]),
)
log("units sent to rescue")
if save_resources:
save_resources_gold(browser, units_train, content)
sleep_time = save_send_time # sleep at least until attack is over
elif countdown > sleep_time + save_send_time:
# do nothing and wait for next waking up
pass
else:
# wake up before attack so the countdown will be smaller than save_send_time
sleep_time = countdown - (save_send_time - 10)
pass
# log("checking for attacks going to sleep")
time.sleep(sleep_time)
@use_browser
def check_for_attack(browser: client, village: int) -> str:
# log("checking for incoming attacks...")
open_village(browser, village)
movements = browser.find("//div[@id='troopMovements']")
ul = movements.find_element_by_xpath(".//ul")
lis = ul.find_elements_by_xpath(".//li")
for li in lis:
classes = li.get_attribute("class")
if "incoming_attacks" in classes:
cd = li.find_element_by_xpath(".//div[@class='countdown']")
countdown = cd.get_attribute("innerHTML")
log("incoming attack in {} !".format(countdown))
return countdown
return ""
@use_browser
def save_resources(browser: client, threshold: list) -> None:
open_shortcut(browser, shortcut.barrack)
el = browser.find("//div[@class='modalContent']")
max_button = el.find_element_by_xpath(
".//div[@class='iconButton maxButton clickable']"
)
browser.click(max_button, 1)
browser.sleep(1)
train_button = browser.find("//button[contains(@class, 'animate footerButton')]")
browser.click(train_button, 1)
close_modal(browser)
# put resource left to market based on threshold
browser.sleep(1)
resource = check_resources(browser)
foo = 0
open_shortcut(browser, shortcut.marketplace)
browser.sleep(1)
el = browser.find("//div[@class='modalContent']")
sell_tab = el.find_element_by_xpath(
".//a[contains(@class, 'naviTabSell clickable')]"
)
browser.click(sell_tab, 1)
merchant = el.find_element_by_xpath(".//div[@class='marketplaceHeaderGroup']")
merchant = merchant.find_element_by_xpath(".//div[@class='circle']/span")
merchant = int(merchant.get_attribute("innerHTML"))
browser.sleep(1)
if merchant > 0:
for res_name in resource.keys():
if resource[res_name] >= threshold[foo]:
offering = browser.find("//div[@class='offerBox']")
offering = offering.find_element_by_xpath(
".//div[@class='resourceFilter filterBar']"
)
offering_type = offering.find_elements_by_xpath(
".//a[contains(@class, 'filter iconButton')]"
)
browser.click(offering_type[foo], 1)
input_offering = browser.find(
"//input[@id='marketNewOfferOfferedAmount']"
)
input_offering.click()
input_offering.send_keys("1000")
browser.sleep(1)
searching = browser.find("//div[@class='searchBox']")
searching = searching.find_element_by_xpath(
".//div[@class='resourceFilter filterBar']"
)
searching_type = searching.find_elements_by_xpath(
".//a[contains(@class, 'filter iconButton')]"
)
browser.click(searching_type[(foo + 1) % 2], 1)
input_searching = browser.find(
"//input[@id='marketNewOfferSearchedAmount']"
)
input_searching.click()
input_searching.send_keys("2000")
browser.sleep(1)
while resource[res_name] >= threshold[foo] and merchant > 0:
sell_btn = browser.find(
"//button[contains(@class, 'createOfferBtn')]"
)
browser.click(sell_btn, 1)
resource[res_name] -= 1000
merchant -= 1
browser.sleep(1)
foo += 1
browser.sleep(1)
close_modal(browser)
@use_browser
def save_resources_gold(browser: client, units_train: list, content: dict) -> None:
tribe_id = browser.find('//*[@id="troopsStationed"]//li[contains(@class, "tribe")]')
tribe_id = tribe_id.get_attribute("tooltip-translate")
units_cost = [] # resources cost for every unit in units_train
total_units_cost = [] # total resources cost for every unit in units_train
training_queue: dict = {} # dict for training queue
for tribe in content["tribe"]:
if tribe_id in tribe["tribeId"]:
for unit in tribe["units"]:
if unit["unitId"] in units_train:
units_cost.append(unit["trainingCost"])
training_cost = sum(unit["trainingCost"].values())
total_units_cost.append(training_cost)
# initializing training_queue
training_queue[unit["unitTrain"]] = {}
training_queue[unit["unitTrain"]][unit["unitId"]] = 0
resources = check_resources(browser)
total_resources = sum(resources.values())
# training amount distributed by: equal resources consumption per unit type
training_amount = [] # amount of troop for training
for cost in total_units_cost:
train_amount = total_resources // (len(units_train) * cost)
training_amount.append(train_amount)
# fetching training_amount to training_queue
_iter = (x for x in training_amount) # generator of training_amount
for unit_train in training_queue:
for unit_id in training_queue[unit_train]:
training_queue[unit_train][unit_id] = next(_iter)
total_training_cost = [] # amount of troop * units_cost
_iter = (x for x in training_amount) # generator of training_amount
for unit_cost in units_cost:
amount = next(_iter)
temp = {} # temporary dict
for _keys, _values in unit_cost.items():
temp[_keys] = _values * amount
total_training_cost.append(temp)
wood, clay, iron = 0, 0, 0
for resource in total_training_cost:
wood += resource["wood"]
clay += resource["clay"]
iron += resource["iron"]
_resource = (x for x in (wood, clay, iron)) # generator of resources
# NPC the resources through the marketplace
open_shortcut(browser, shortcut.marketplace)
npc_tab = browser.find('//*[@id="optimizely_maintab_NpcTrade"]')
browser.click(npc_tab, 1)
market_content = browser.find('//div[contains(@class, "marketContent npcTrader")]')
trs = market_content.find_elements_by_xpath('.//tbody[@class="sliderTable"]/tr')
browser.sleep(1)
for tr in trs[:-2]:
input = tr.find_element_by_xpath(".//input")
browser.sleep(0.5)
input.clear()
browser.sleep(1.5)
input.send_keys(next(_resource))
browser.sleep(1.5)
lock = tr.find_element_by_xpath('.//div[@class="lockButtonBackground"]')
browser.sleep(1.5)
browser.click(lock, 1)
browser.sleep(1.5)
convert_button = market_content.find_element_by_xpath(
'.//div[@class="merchantBtn"]/button'
)
browser.click(convert_button, 1)
# close marketplace
close_modal(browser)
# Start training troops
for unit_train in training_queue:
old_shortcut(browser, unit_train)
for unit_id in training_queue[unit_train]:
# click picture based unit_id
unit_type = "unitType{}".format(unit_id)
image_troop = browser.find(
"//div[@class='modalContent']//img[contains(@class, '{}')]".format(
unit_type
)
)
browser.click(image_troop, 1)
# input amount based training_queue[unit_train][unit_id]
input_troop = browser.find('//div[@class="inputContainer"]')
input_troop = input_troop.find_element_by_xpath("./input").send_keys(
training_queue[unit_train][unit_id]
)
browser.sleep(1.5)
# click train button
train_button = browser.find(
"//button[contains(@class, 'animate footerButton')]"
)
browser.click(train_button, 1)
browser.sleep(1.5)
browser.sleep(1)
close_modal(browser)
|
#!/usr/bin/env python3
# usage: $ oj generate-input 'python3 generate.py'
import random
# generated by online-judge-template-generator v4.4.0 (https://github.com/kmyk/online-judge-template-generator)
def main():
# n = random.randint(1, 10) # TODO: edit here
n = 10
x = [None for _ in range(n)]
y = [None for _ in range(n)]
# m = random.randint(1, 10) # TODO: edit here
m = 10
a = [None for _ in range(m)]
for i in range(n):
x[i] = random.randint(1, 10 ** 2) # TODO: edit here
y[i] = random.randint(1, 10 ** 2) # TODO: edit here
for i in range(m):
a[i] = random.randint(1, 10 ** 2) # TODO: edit here
print(n, m)
for i in range(n):
print(x[i], y[i])
for i in range(m):
print(a[i])
if __name__ == "__main__":
main()
|
import os
from os.path import abspath, expanduser
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
import torch
from PIL import Image
from .utils import download_and_extract_archive, download_file_from_google_drive, extract_archive, verify_str_arg
from .vision import VisionDataset
class WIDERFace(VisionDataset):
"""`WIDERFace <http://shuoyang1213.me/WIDERFACE/>`_ Dataset.
Args:
root (string): Root directory where images and annotations are downloaded to.
Expects the following folder structure if download=False:
.. code::
<root>
└── widerface
├── wider_face_split ('wider_face_split.zip' if compressed)
├── WIDER_train ('WIDER_train.zip' if compressed)
├── WIDER_val ('WIDER_val.zip' if compressed)
└── WIDER_test ('WIDER_test.zip' if compressed)
split (string): The dataset split to use. One of {``train``, ``val``, ``test``}.
Defaults to ``train``.
transform (callable, optional): A function/transform that takes in a PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in the
target and transforms it.
download (bool, optional): If true, downloads the dataset from the internet and
puts it in root directory. If dataset is already downloaded, it is not
downloaded again.
"""
BASE_FOLDER = "widerface"
FILE_LIST = [
# File ID MD5 Hash Filename
("15hGDLhsx8bLgLcIRD5DhYt5iBxnjNF1M", "3fedf70df600953d25982bcd13d91ba2", "WIDER_train.zip"),
("1GUCogbp16PMGa39thoMMeWxp7Rp5oM8Q", "dfa7d7e790efa35df3788964cf0bbaea", "WIDER_val.zip"),
("1HIfDbVEWKmsYKJZm4lchTBDLW5N7dY5T", "e5d8f4248ed24c334bbd12f49c29dd40", "WIDER_test.zip"),
]
ANNOTATIONS_FILE = (
"http://shuoyang1213.me/WIDERFACE/support/bbx_annotation/wider_face_split.zip",
"0e3767bcf0e326556d407bf5bff5d27c",
"wider_face_split.zip",
)
def __init__(
self,
root: str,
split: str = "train",
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
download: bool = False,
) -> None:
super().__init__(
root=os.path.join(root, self.BASE_FOLDER), transform=transform, target_transform=target_transform
)
# check arguments
self.split = verify_str_arg(split, "split", ("train", "val", "test"))
if download:
self.download()
if not self._check_integrity():
raise RuntimeError("Dataset not found or corrupted. You can use download=True to download and prepare it")
self.img_info: List[Dict[str, Union[str, Dict[str, torch.Tensor]]]] = []
if self.split in ("train", "val"):
self.parse_train_val_annotations_file()
else:
self.parse_test_annotations_file()
def __getitem__(self, index: int) -> Tuple[Any, Any]:
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is a dict of annotations for all faces in the image.
target=None for the test split.
"""
# stay consistent with other datasets and return a PIL Image
img = Image.open(self.img_info[index]["img_path"])
if self.transform is not None:
img = self.transform(img)
target = None if self.split == "test" else self.img_info[index]["annotations"]
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self) -> int:
return len(self.img_info)
def extra_repr(self) -> str:
lines = ["Split: {split}"]
return "\n".join(lines).format(**self.__dict__)
def parse_train_val_annotations_file(self) -> None:
filename = "wider_face_train_bbx_gt.txt" if self.split == "train" else "wider_face_val_bbx_gt.txt"
filepath = os.path.join(self.root, "wider_face_split", filename)
with open(filepath) as f:
lines = f.readlines()
file_name_line, num_boxes_line, box_annotation_line = True, False, False
num_boxes, box_counter = 0, 0
labels = []
for line in lines:
line = line.rstrip()
if file_name_line:
img_path = os.path.join(self.root, "WIDER_" + self.split, "images", line)
img_path = abspath(expanduser(img_path))
file_name_line = False
num_boxes_line = True
elif num_boxes_line:
num_boxes = int(line)
num_boxes_line = False
box_annotation_line = True
elif box_annotation_line:
box_counter += 1
line_split = line.split(" ")
line_values = [int(x) for x in line_split]
labels.append(line_values)
if box_counter >= num_boxes:
box_annotation_line = False
file_name_line = True
labels_tensor = torch.tensor(labels)
self.img_info.append(
{
"img_path": img_path,
"annotations": {
"bbox": labels_tensor[:, 0:4].clone(), # x, y, width, height
"blur": labels_tensor[:, 4].clone(),
"expression": labels_tensor[:, 5].clone(),
"illumination": labels_tensor[:, 6].clone(),
"occlusion": labels_tensor[:, 7].clone(),
"pose": labels_tensor[:, 8].clone(),
"invalid": labels_tensor[:, 9].clone(),
},
}
)
box_counter = 0
labels.clear()
else:
raise RuntimeError(f"Error parsing annotation file {filepath}")
def parse_test_annotations_file(self) -> None:
filepath = os.path.join(self.root, "wider_face_split", "wider_face_test_filelist.txt")
filepath = abspath(expanduser(filepath))
with open(filepath) as f:
lines = f.readlines()
for line in lines:
line = line.rstrip()
img_path = os.path.join(self.root, "WIDER_test", "images", line)
img_path = abspath(expanduser(img_path))
self.img_info.append({"img_path": img_path})
def _check_integrity(self) -> bool:
# Allow original archive to be deleted (zip). Only need the extracted images
all_files = self.FILE_LIST.copy()
all_files.append(self.ANNOTATIONS_FILE)
for (_, md5, filename) in all_files:
file, ext = os.path.splitext(filename)
extracted_dir = os.path.join(self.root, file)
if not os.path.exists(extracted_dir):
return False
return True
def download(self) -> None:
if self._check_integrity():
print("Files already downloaded and verified")
return
# download and extract image data
for (file_id, md5, filename) in self.FILE_LIST:
download_file_from_google_drive(file_id, self.root, filename, md5)
filepath = os.path.join(self.root, filename)
extract_archive(filepath)
# download and extract annotation files
download_and_extract_archive(
url=self.ANNOTATIONS_FILE[0], download_root=self.root, md5=self.ANNOTATIONS_FILE[1]
)
|
import subprocess
orc = subprocess.Popen('C:\\Program Files (x86)\\Tesseract-OCR\\tesseract ./p1.jpg ./a')
print(orc)
import pytesseract
from PIL import Image
image = Image.open('p1.jpg')
# C:\Program Files (x86)\Tesseract-OCR
text = pytesseract.image_to_string(image)
print(text) |
import unittest
import time
import datetime
import urllib
from urllib.error import HTTPError
from utils.function.get_report import *
from selenium import webdriver
import requests
list_cat = []
list_product =[]
list_product_img =[]
now = datetime.datetime.now()
elem_directory_category = {
'cat_lvl_1' : "div.span12 div.box div.rel-category-wrapper div.row-fluid div.span10 div.row-fluid h2.fs-15 a",
'cat_lvl_2' : "/html/body/div/div[2]/div/div/div/div/div/div[2]/div[2]/div/ul/li/a",
'cat_lvl_3' : "/html/body/div/div[2]/div/div[7]/div/div/div/div/div[2]/div/ul/li/ul/li/a"
}
class TestSweepProduct(unittest.TestCase):
def setUp(self):
chromedriver= "D:\Python34\Scripts\chromedriver"
self.driver = webdriver.Chrome(chromedriver)
self.driver.set_window_size(1920, 1080)
#Fungsi check image product di kategori
""""def test_check_img_product_in_cat(self):
driver = self.driver
def check_directory_list(driver):
driver.get("https://test.tokopedia.nginx/p")
print ("START SCANNING DIRECTORY LIST...")
print ("================================")
for cat_lvl_1 in driver.find_elements_by_css_selector(elem_directory_category['cat_lvl_1']):
print (cat_lvl_1.get_attribute("href"))
list_cat.append(cat_lvl_1.get_attribute("href"))
# for cat_lvl_2 in driver.find_elements_by_xpath(elem_directory_category['cat_lvl_2']):
# print (cat_lvl_2.get_attribute("href"))
# list_cat.append(cat_lvl_2.get_attribute("href"))
#
# for cat_lvl_3 in driver.find_elements_by_xpath(elem_directory_category['cat_lvl_3']):
# print (cat_lvl_3.get_attribute("href"))
# list_cat.append(cat_lvl_3.get_attribute("href"))
print ("TEST CHECKING DIREKTORI DIMULAI")
check_img_product_in_cat(driver)
def check_img_product_in_cat(driver):
total_img_ok = 0
total_img_modified = 0
total_img_error_500 = 0
total_img_error_404 = 0
total_img_error_502 = 0
total_img_error_504 = 0
total_img_error_others = 0
for x in list_cat:
driver.get(x)
for b in driver.find_elements_by_xpath("//*[@id='content-directory']/div[1]/div/a/div/div[1]/img"):
print (b.get_attribute("src"))
list_product_img.append(b.get_attribute("src"))
time.sleep(3)
for y in list_product_img:
request = urllib.request.urlopen(y)
driver.get(y) #akses setiap gambar product di halaman direktori page 1
time.sleep(0.5)
#if request.getcode() == "200":
# print (y + " is successfully accessed (200 OK)")
if request.getcode() == 200:
print (y + " is successfully accessed with response code " + str(request.getcode()))
total_img_ok += 1
elif request.getcode()== 304:
print (y + " is successfully accessed with response code " + str(request.getcode()))
total_img_modified +=1
elif request.getcode() == 404:
print (y + " got error with response code " + str(request.getcode()))
total_img_error_404 += 1
elif request.getcode() == 504:
print (y + " got error with response code " + str(request.getcode()))
total_img_error_504 += 1
elif request.getcode() == 502:
print (y + " got error with response code " + str(request.getcode()))
total_img_error_502 += 1
elif request.getcode() == 500:
print (y + " got error with response code " + str(request.getcode()))
total_img_error_500 += 1
#elif request.getcode() == "504":
# print(y + " is GATEWAY TIMEOUT 504")
# total_img_error_504 += 1
else:
print (y + " error with response code " + str(request.getcode()))
total_img_error_others += 1
print ("URL Image Result(OK 200) : %s" %(str(total_img_ok)))
print ("URL Image Result(OK but Not modified 304) : %s" %(str(total_img_modified)))
print ("URL Image Result(ERROR 404) : %s" %(str(total_img_error_404)))
print ("URL Image Result(ERROR 500) : %s" %(str(total_img_error_500)))
print ("URL Image Result(ERROR 502) : %s" %(str(total_img_error_502)))
print ("URL Image Result(ERROR 504) : %s" %(str(total_img_error_504)))
print ("URL Image Result(OTHER ERRORS) : %s" %(str(total_img_error_others)))
#=====Fungsi Check List Product======
# def check_product_in_cat(driver):
# for x in list_cat:
# driver.get(x)
# #for b in driver.find_elements_by_css_selector("div.main-content div#content-directory div.grid-shop-product div.product a"):
# #Check dan tampung link Product di halaman direktori 1
# for b in driver.find_elements_by_xpath("//*[@id='content-directory']/div[1]/div/a"):
# print (b.get_attribute("href"))
# list_product.append(b.get_attribute("href"))
# time.sleep(5)
#
# for y in list_product:
# driver.get(y) #akses setiap hal product di halaman direktori page 1
# time.sleep(3)
check_directory_list(driver)"""
def test_check_index_product_in_cat(self):
driver = self.driver
def check_directory_list(driver, report_dir_row):
driver.get("https://test.tokopedia.nginx/p")
requests.get('https://test.tokopedia.nginx', verify = True)
print ("START SCANNING DIRECTORY LIST...")
print ("================================")
for cat_lvl_1 in driver.find_elements_by_css_selector(elem_directory_category['cat_lvl_1']):
print (cat_lvl_1.get_attribute("href"))
list_cat.append(cat_lvl_1.get_attribute("href"))
report_dir_sheet.write(report_dir_row, report_dir_col, cat_lvl_1.get_attribute("href"))
report_dir_row += 1
# for cat_lvl_2 in driver.find_elements_by_xpath(elem_directory_category['cat_lvl_2']):
# print (cat_lvl_2.get_attribute("href"))
# list_cat.append(cat_lvl_2.get_attribute("href"))
# report_dir_sheet.write(report_dir_row, report_dir_col, cat_lvl_2.get_attribute("href"))
# report_dir_row += 1
#
#
# for cat_lvl_3 in driver.find_elements_by_xpath(elem_directory_category['cat_lvl_3']):
# print (cat_lvl_3.get_attribute("href"))
# list_cat.append(cat_lvl_3.get_attribute("href"))
# report_dir_sheet.write(report_dir_row, report_dir_col, cat_lvl_3.get_attribute("href"))
# report_dir_row += 1
report_dir.close()
print ("TEST CHECKING DIREKTORI DIMULAI")
check_product_in_cat(driver, report_index_product_row)
def check_product_in_cat(driver, report_index_product_row):
total_product_ok = 0
total_product_modified = 0
total_product_error_500 = 0
total_product_error_404 = 0
total_product_error_502 = 0
total_product_error_504 = 0
total_product_error_others = 0
for dir in list_cat:
driver.get(dir)
#while True:
try:
for each_product in driver.find_elements_by_xpath("/html/body/div[1]/div[4]/div[1]/div[1]/div[2]/div/div/div[1]/div/a"):
print (each_product.get_attribute("href"))
list_product.append(each_product.get_attribute("href"))
time.sleep(3)
#break
#Fungsi untuk check "next page"
# next_page = driver.find_element_by_xpath("/html/body/div[1]/div[3]/div[1]/div[1]/div[2]/div/div/div[2]/div/div[2]/div/ul/li[last()]/a")
# if next_page.text == "»" :
# page_numb = page_numb +1
# print("masuk ke halaman %s" %str(page_numb))
# print (next_page.text)
#
# driver.get(next_page.get_attribute("href"))
#
# time.sleep(5)
# else :
# break
except:
print ("No product / catalog")
report_index_product_sheet.write(report_index_product_row, report_index_product_col, dir)
report_index_product_sheet.write(report_index_product_row, report_index_product_col+1, "No Product/Catalog")
report_index_product_row += 1
# if next_page.is_displayed != True:
# break
for product in list_product:
try:
request = urllib.request.urlopen(product)
driver.get(product) #akses setiap gambar product di halaman direktori page 1
load_time = requests.get(product).elapsed.total_seconds()
time.sleep(0.5)
if request.code == 200:
print (product + " is successfully accessed with response code " + str(request.getcode()) + " in " + str(load_time) + " second")
print (request.getcode())
total_product_ok += 1
report_index_product_sheet.write('A1',"Product Link", bold)
report_index_product_sheet.write('B1',"Status Code", bold)
report_index_product_sheet.write('C1', "Load Time (Second)", bold)
report_index_product_sheet.write(report_index_product_row, report_index_product_col, product)
report_index_product_sheet.write(report_index_product_row, report_index_product_col+1, str(request.getcode()))
report_index_product_sheet.write(report_index_product_row, report_index_product_col+2, str(load_time))
report_index_product_row += 1
except HTTPError as err:
print ("Response code error : %s" %err.code)
if err.code==404:
print (product + " got error with response code " + str(err.code))
total_product_error_404 +=1
report_index_product_sheet.write('A1',"Product Link", bold)
report_index_product_sheet.write('B1',"Status Code", bold)
report_index_product_sheet.write(report_index_product_row, report_index_product_col, product)
report_index_product_sheet.write(report_index_product_row, report_index_product_col+1, str(err.code))
report_index_product_row += 1
elif err.code==500:
print (product + " got error with response code " + str(err.code))
total_product_error_500 += 1
report_index_product_sheet.write('A1',"Product Link", bold)
report_index_product_sheet.write('B1',"Status Code", bold)
report_index_product_sheet.write(report_index_product_row, report_index_product_col, product)
report_index_product_sheet.write(report_index_product_row, report_index_product_col+1, str(request.getcode()))
report_index_product_row += 1
elif err.code == 504:
print (product + " got error with response code " + str(err.code))
total_product_error_504 += 1
report_index_product_sheet.write('A1',"Product Link", bold)
report_index_product_sheet.write('B1',"Status Code", bold)
report_index_product_sheet.write(report_index_product_row, report_index_product_col, product)
report_index_product_sheet.write(report_index_product_row, report_index_product_col+1, str(err.code))
report_index_product_row += 1
elif err.code == 502:
print (product + " got error with response code " + str(err.code))
total_product_error_502 += 1
report_index_product_sheet.write('A1',"Product Link", bold)
report_index_product_sheet.write('B1',"Status Code", bold)
report_index_product_sheet.write(report_index_product_row, report_index_product_col, product)
report_index_product_sheet.write(report_index_product_row, report_index_product_col+1, str(err.code))
report_index_product_row += 1
else:
print (product + " error with response code " + str(err.code))
total_product_error_others += 1
report_index_product_sheet.write('A1',"Product Link", bold)
report_index_product_sheet.write('B1',"Status Code", bold)
report_index_product_sheet.write(report_index_product_row, report_index_product_col, product)
report_index_product_sheet.write(report_index_product_row, report_index_product_col+1, str(err.code))
report_index_product_row +=1
print ("URL Index Product Result(OK 200) : %s" %(str(total_product_ok)))
print ("URL Index Product Result(OK but Not modified 304) : %s" %(str(total_product_modified)))
print ("URL Index Product Result(ERROR 404) : %s" %(str(total_product_error_404)))
print ("URL Index Product Result(ERROR 500) : %s" %(str(total_product_error_500)))
print ("URL Index Product Result(ERROR 502) : %s" %(str(total_product_error_502)))
print ("URL Index Product Result(ERROR 504) : %s" %(str(total_product_error_504)))
print ("URL Index Product Result(OTHER ERRORS) : %s" %(str(total_product_error_others)))
report_index_product.close()
check_directory_list(driver, report_dir_row)
def tearDown(self):
self.driver.close()
if __name__ == '__main__':
unittest.main()
""""while (1) :
for member in list_cat :
driver.get(member)
time.sleep(3)"""
|
from __future__ import print_function
from test_base import OrloLiveTest, OrloTest, ConfigChange, ReleaseDbUtil
from orlo.deploy import BaseDeploy, HttpDeploy, ShellDeploy
from orlo.orm import db, Release, Package
from test_base import OrloLiveTest
from test_base import ReleaseDbUtil
import unittest
__author__ = 'alforbes'
class DeployTest(OrloLiveTest, ReleaseDbUtil):
"""
Test the Deploy class
"""
CLASS = BaseDeploy
def setUp(self):
super(DeployTest, self).setUp()
rid = self._create_release()
pid = self._create_package(rid)
self.release = db.session.query(Release).first()
def test_init(self):
"""
Test that we can instantiate the class
"""
o = self.CLASS(self.release)
self.assertIsInstance(o, BaseDeploy)
class TestBaseDeploy(DeployTest):
def test_not_implemented(self):
"""
Base Deploy class should raise NotImplementedError on start
"""
o = self.CLASS(self.release)
with self.assertRaises(NotImplementedError):
o.start()
class TestHttpDeploy(DeployTest):
CLASS = HttpDeploy
@unittest.skip("Not implemented")
def test_start(self):
"""
Test that start emits an http call
"""
pass
@unittest.skip("Not implemented")
def test_kill(self):
"""
Test that kill emits an http call
"""
pass
class TestShellDeploy(DeployTest):
CLASS = ShellDeploy
def test_start(self):
"""
Test that start emits a shell command
"""
with ConfigChange('deploy', 'timeout', '3'), \
ConfigChange('deploy_shell', 'command_path', '/bin/true'):
deploy = ShellDeploy(self.release)
# Override server_url, normally it is set by config:
deploy.server_url = self.get_server_url()
deploy.start()
@unittest.skip("Not implemented")
def test_kill(self):
"""
Test that kill emits a shell command
"""
pass
@unittest.skip("Doesn't work on travis")
def test_start_example_deployer(self):
"""
Test the example deployer completes
DOESN'T WORK ON TRAVIS, as /bin/env python gives the system python
"""
with ConfigChange('deploy', 'timeout', '3'):
deploy = ShellDeploy(self.release)
# Override server_url, normally it is set by config:
deploy.server_url = self.get_server_url()
deploy.start()
def test_output(self):
"""
Test that we return the output of the deploy
Not a good test, as it relies on the test-package being an argument,
and simply echoing it back. This is the "spec", but this test could
break if the arguments change.
"""
with ConfigChange('deploy', 'timeout', '3'), \
ConfigChange('deploy_shell', 'command_path', '/bin/echo'):
deploy = ShellDeploy(self.release)
# Override server_url, normally it is set by config:
deploy.server_url = self.get_server_url()
output = deploy.start()
self.assertEqual(output['stdout'], b'test-package=1.2.3\n')
|
import tkinter as tk
import speech_recognition as sr
import os
from gtts import gTTS
from tkinter.filedialog import askopenfilename
from PIL import Image
import pyrebase
from tkinter import Tk
import face_recognition
from PIL import Image, ImageDraw
from shutil import copyfile
import cv2
from pathlib import Path
import shutil
import pathlib
import pandas as pd
import time
from datetime import datetime, timedelta
from datetime import date
import smtplib
from openpyxl import load_workbook
from PIL import Image, ImageTk
class Projectt:
def __init__(self, name, HEIGHT, WIDTH, bgc, vr_icon, canvas, root, signUpWindow, vr_image, setEmailWindow, upload_window, name_variable, num, fn, k, password_array, chances, chances1, d1, setPasswordWindow, pazzword, pazzword_variable, emaill, emaill_variable):
self.name=name
self.HEIGHT=HEIGHT
self.WIDTH=WIDTH
self.bgc = bgc
self.vr_icon=vr_icon
self.canvas=canvas
self.root=root=tk.Tk()
self.signUpWindow=signUpWindow
self.vr_image=vr_image
self.setEmailWindow=setEmailWindow
self.upload_window=upload_window
self.name_variable=name_variable
self.num=num
self.fn=fn
self.k=k
self.password_array=password_array
self.chances=chances
self.chances1=chances1
self.d1=d1
self.setPasswordWindow=setPasswordWindow
self.pazzword=pazzword
self.pazzword_variable=pazzword_variable
self.emaill=emaill
self.emaill_variable=emaill_variable
'''\
def remove_new_folders(self, d1):
os.chdir('..')
if os.path.exists(d1):
shutil.rmtree(d1)'''
def face_recognition_for_multiple_images(self):
def click_event(event, x, y, flags, param):
if event == cv2.EVENT_LBUTTONDOWN:
os.chdir(pathlib.Path(__file__).parent.absolute())
result=cv2.imwrite("unknown.jpg", frame)
cam.release()
cv2.destroyAllWindows()
direc=os.getcwd()
os.chdir(direc)
cam = cv2.VideoCapture(0)
cam.set(3, 2048)
cam.set(4, 2048)
while cv2.waitKey(1):
ret, frame = cam.read()
if ret == False:
break
cv2.imshow("test", frame)
#cv2.waitKey(1)
cv2.setMouseCallback("test", click_event)
pd=pathlib.Path(__file__).parent.absolute()
#print(pd)
d='images'
path=os.path.join(pd, d)
mode=0o666
os.mkdir(path, mode)
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL" : "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1",
"serviceAccount": "D:/lol/tympass-32736-firebase-adminsdk-73kvc-7991327d54.json"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
#path_on_cloud="images"
all_files = storage.list_files()
for file in all_files:
#print(file.name)
self.d1=path
os.chdir(self.d1)
file.download_to_filename(file.name)
filelist = [f for f in os.listdir(self.d1) if f.endswith(".xlsx")]
for f in filelist:
os.remove(os.path.join(self.d1,f))
os.chdir('..')
try:
#Add known images
image_of_person = face_recognition.load_image_file('unknown.jpg')
person_face_encoding = face_recognition.face_encodings(image_of_person)[0]
for file_name in os.listdir(self.d1):
#Load the file
newPic = face_recognition.load_image_file(file_name)
#Search every detected face
for face_encoding in face_recognition.face_encodings(newPic):
results = face_recognition.compare_faces([person_face_encoding], face_encoding, 0.5)
self.num=0
#If match, show it
if results[0] == True:
#copyFile(file_name, "./img/saved" + file_name)
self.num=self.num+1
self.fn=Path(file_name).stem
#print("Hi"+ str(fn))
os.remove('unknown.jpg')
except:
self.failed_signIn()
os.remove('unknown.jpg')
if(self.num==1):
#print("Hi "+ str(fn))
self.signIn()
os.chdir('..')
if os.path.exists(self.d1):
shutil.rmtree(d1)
else:
self.failed_signIn()
os.chdir('..')
if os.path.exists(self.d1):
shutil.rmtree(d1)
os.chdir('..')
if os.path.exists(self.d1):
shutil.rmtree(d1)
def camera(self):
def click_event(event, x, y, flags, param):
if event == cv2.EVENT_LBUTTONDOWN:
result=cv2.imwrite("unknown.jpg", frame)
cam.release()
cv2.destroyAllWindows()
direc=pathlib.Path(__file__).parent.absolute()
os.chdir(direc)
cam = cv2.VideoCapture(0)
cam.set(3, 2048)
cam.set(4, 2048)
while cv2.waitKey(1):
ret, frame = cam.read()
if ret == False:
break
cv2.imshow("Camera", frame)
#cv2.waitKey(1)
cv2.setMouseCallback("Camera", click_event)
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL" : "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = (str(self.name_variable)+".jpg")
path_local=("unknown.jpg");
storage.child(path_on_cloud).put(path_local)
os.remove("unknown.jpg")
def openn(self):
Tk().withdraw()
filename = askopenfilename()
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL" : "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = (str(self.name_variable)+".jpg")
path_local=(filename);
storage.child(path_on_cloud).put(path_local)
def voice_output(self, mytext):
# Language in which you want to convert
language = 'en'
# Passing the text and language to the engine,
# here we have marked slow=False. Which tells
# the module that the converted audio should
# have a high speed
myobj = gTTS(text="Your entered" + str(mytext), lang=language, slow=False)
# Saving the converted audio in a mp3 file named
# welcome
d=os.getcwd()
os.chdir(d)
myobj.save("welcome.mp3")
# Playing the converted file
#welcome = r'D:\voce\welcome.mp3'
#os.system("mpg123" + welcome)
from playsound import playsound
playsound("welcome.mp3")
os.remove("welcome.mp3")
def voice_outputt(self, mytext):
# Language in which you want to convert
language = 'en'
# Passing the text and language to the engine,
# here we have marked slow=False. Which tells
# the module that the converted audio should
# have a high speed
myobj = gTTS(text=str(mytext), lang=language, slow=False)
# Saving the converted audio in a mp3 file named
# welcome
d=os.getcwd()
os.chdir(d)
myobj.save("welcome1.mp3")
# Playing the converted file
#welcome = r'D:\voce\welcome.mp3'
#os.system("mpg123" + welcome)
from playsound import playsound
playsound("welcome1.mp3")
os.remove("welcome1.mp3")
def voice_input(self):
r = sr.Recognizer()
mic = sr.Microphone(device_index=0)
with mic as source:
r.adjust_for_ambient_noise(source, duration=0)
#print("What is your name: ")
self.voice_outputt("Speak now")
audio = r.listen(source, timeout=0)
print("Wait till your voice is recognised......\n")
d=r.recognize_google(audio)
self.name.insert(0, d)
def starrt(self):
self.HEIGHT = 2048
self.WIDTH = 2048
self.bgc='lightyellow'
self.root.destroy()
self.root = tk.Tk()
self.root.title('TRACK SMART Attendence System')
#this to define canvas in GUI
self.canvas = tk.Canvas(self.root, height=self.HEIGHT, width=self.WIDTH, bg='black')
self.canvas.pack()
'''
self.canvas = tk.Toplevel(self.root, height=self.HEIGHT, width=self.WIDTH, bg='lightpink')
#self.canvas.title('SIGN UP for TRACK SMART Attendence')
'''
#photoimage for icon
self.vr_image = tk.PhotoImage(file = "vr_icon.png")
self.vr_icon = self.vr_image.subsample(11,11)
def remainn(self):
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
if os.path.exists('images'):
shutil.rmtree('images')
self.root.destroy()
self.root=tk.Tk()
self.root.title('TRACK SMART Attendence System')
#this to define canvas in GUI
self.canvas = tk.Canvas(self.root, height=self.HEIGHT, width=self.WIDTH, bg='black')
self.canvas.pack()
#to print welcome message
welcomeMsg = tk.Message(self.canvas, text='WELCOME TO SMART TRACK ATTENDENCE SYSTEM')
welcomeMsg.config(bg='lightpink', font=('times', 48, 'italic'))
welcomeMsg.place(relx= 0.05, rely=0.05, relwidth=0.4, relheight=0.9)
#button for new user
signUpBtn = tk.Button(self.canvas, text="SIGN UP", font=('times', 36), command=self.signUp)
signUpBtn.place(relx=0.6, rely=0.35, relheight=0.08, relwidth=0.15)
#button for existing student
signInBtn = tk.Button(self.canvas, text="SIGN IN", font=('times', 36), command=self.face_recognition_for_multiple_images)
signInBtn.place(relx=0.6, rely=0.55, relheight=0.08, relwidth=0.15)
image = Image.open("black_bioChem.jpg")
photo = ImageTk.PhotoImage(image)
wlcmLabel = tk.Label(image=photo)
wlcmLabel.place(relx=0.1, rely=0.2, relwidth=0.36, relheight=0.6)
def submittedScreen(self):
self.root.destroy()
self.root=tk.Tk()
congoWindow = tk.Canvas(self.root, height=self.HEIGHT, width=self.WIDTH)
congoWindow.pack()
#self.voice_outputt("Congratulations you are successfully registered with track smart attendance system")
self.root.title('CONGRATULATIONS')
congoMsg = tk.Message(congoWindow, text='Congratulations...\nYou are successfully registered\nwith\nTRACK SMART ATTENDENCE SYSTEM')
congoMsg.config(justify='center', font=('times', 52, 'italic'))
congoMsg.place(relx= 0.05, rely=0.075, relwidth=0.9, relheight=0.6)
exitBtn = tk.Button(congoWindow, text="GO TO WELCOME SCREEN", font=('times', 36), command=self.remainn)
exitBtn.place(relx=0.31, rely=0.75, relheight=0.075, relwidth=0.38)
def confirm_submit(self):
self.emaill_variable = ' '
self.emaill_variable = self.emaill.get()
email_label = tk.Label(self.setEmailWindow, text="Are you sure you want to continue?", font=('times', 36))
email_label.place(rely=0.5, relwidth=1)
confirmationFinal = tk.Label(self.setEmailWindow, text="You won't be able to change it later.", font=('times', 36))
confirmationFinal.place(rely=0.6, relwidth=1)
finalButton = tk.Button(self.setEmailWindow, text="CONFIRM", font=('times', 36), command=lambda:[self.submittedScreen(),self.add_new_email()])
finalButton.place(relx=0.75, rely=0.8, relwidth = 0.15)
backButton = tk.Button(self.setEmailWindow, text="BACK", font=('times', 36), command=self.toPasswordScreen)
backButton.place(relx=0.1, rely=0.8, relwidth = 0.15)
def confirm_submit2(self):
self.pazzword_variable = ' '
self.pazzword_variable = self.pazzword.get()
email_label = tk.Label(self.setPasswordWindow, text="Are you sure you want to continue?", font=('times', 36), bg='lightyellow')
email_label.place(rely=0.5, relwidth=1)
confirmationFinal = tk.Label(self.setPasswordWindow, text="You won't be able to change it later.",
font=('times', 36), bg='lightyellow')
confirmationFinal.place(rely=0.6, relwidth=1)
finalButton = tk.Button(self.setPasswordWindow, text="CONFIRM", font=('times', 36), command=lambda:[self.toEmailScreen(), self.add_new_password()])
finalButton.place(relx=0.75, rely=0.8, relwidth=0.15)
backButton = tk.Button(self.setPasswordWindow, text="BACK", font=('times', 36), command=self.imgUploadScreen)
backButton.place(relx=0.1, rely=0.8, relwidth=0.15)
def toEmailScreen(self):
self.root.destroy()
self.root=tk.Tk()
self.setEmailWindow = tk.Canvas(self.root,height=self.HEIGHT, width=self.WIDTH)
self.root.title('SET EMAIL')
self.setEmailWindow.pack()
self.voice_outputt("Input your email")
emailMsg = tk.Label(self.setEmailWindow, text="Please enter your email-ID below", font=('times', 36))
emailMsg.place(rely=0.15, relwidth=1)
self.emaill = tk.Entry(self.setEmailWindow, font=('times', 36))
self.emaill.place(rely=0.3, relx=0.13, relwidth=0.50, relheight=0.08)
email_button = tk.Button(self.setEmailWindow, text="SUBMIT Email", font=('times', 36), command=self.confirm_submit)
email_button.place(relx=0.68, rely=0.3, relwidth=0.18, relheight=0.08)
def reimgUploadScreen(self):
self.root.destroy()
self.root=tk.Tk()
self.root.title('UPLOAD IMAGE')
upload_window = tk.Canvas(self.root, height=self.HEIGHT, width=self.WIDTH)
upload_window.pack()
upload_label = tk.Label(upload_window, text="Upload Your Image for Face Recognistion", font=('times', 36))
upload_label.place(rely=0.2, relwidth=1)
webcam_button = tk.Button(upload_window, text="WEBCAM", font=('times', 36))
webcam_button.place(relx=0.4, rely=0.4, relwidth=0.2)
upload_button = tk.Button(upload_window, text="UPLOAD", font=('times', 36))
upload_button.place(relx=0.4, rely=0.55, relwidth=0.2)
proceed_button = tk.Button(upload_window, text="PROCEED", font=('times', 36), command=self.toPasswordScreen)
proceed_button.place(relx=0.75, rely=0.8, relwidth=0.15)
back_button = tk.Button(upload_window, text="BACK", font=('times', 36), command=self.signUp)
back_button.place(relx=0.1, rely=0.8, relwidth=0.15)
def toPasswordScreen(self):
self.root.destroy()
self.root=tk.Tk()
setPasswordWindow = tk.Canvas(self.root,height=self.HEIGHT, width=self.WIDTH)
self.root.title('SET PASSCODE')
setPasswordWindow.pack()
pswd = tk.Label(setPasswordWindow, text="Please set your Passcode by pressing the button below", font=('times', 36))
pswd.place(rely=0.15, relwidth=1)
#photoimage for icon
self.vr_image = tk.PhotoImage(file = "vr_icon.png")
self.vr_icon = self.vr_image.subsample(11,11)
vrPasscode = tk.Button(setPasswordWindow, image = self.vr_image, font=('times', 36))
vrPasscode.place(relx=0.4, rely=0.3, width=300, height=400)
proceed_button = tk.Button(setPasswordWindow, text="PROCEED", font=('times', 36), command=self.toEmailScreen)
proceed_button.place(relx=0.75, rely=0.8, relwidth=0.15)
back_button = tk.Button(setPasswordWindow, text="BACK", font=('times', 36), command=self.imgUploadScreen)
back_button.place(relx=0.1, rely=0.8, relwidth=0.15)
def PasswordScreen(self):
self.root.destroy()
self.root = tk.Tk()
self.root.title('SET PASSWORD')
self.setPasswordWindow = tk.Canvas(self.root, height=self.HEIGHT, width=self.WIDTH, bg=self.bgc)
self.setPasswordWindow.pack()
# self.signUpWindow.title('SIGN UP for TRACK SMART Attendence')
# here i have added frame to our GUI for name entry
entryFrame = tk.Frame(self.setPasswordWindow, bg=self.bgc, bd=10)
entryFrame.place(relx=0.5, rely=0.25, relwidth=0.8, relheight=0.1, anchor='n')
# entry field for name
self.pazzword = tk.Entry(entryFrame, font=('times', 36))
self.pazzword.place(relwidth=0.6, relheight=1)
self.voice_outputt("Input your password")
'''
self.vr_image = tk.PhotoImage(file="vr_icon.png")
self.vr_icon = self.vr_image.subsample(11, 11)
# button for voice recognition
vr_button = tk.Button(entryFrame, image=self.vr_icon, )
vr_button.place(relx=0.64, relheight=1, relwidth=0.07)
'''
# button for name
name_button = tk.Button(entryFrame, text="SUBMIT Password", font=('times', 36), command = self.confirm_submit2)
name_button.place(relx=0.65, relheight=1, relwidth=0.32)
def resignUp(self):
self.root.destroy()
self.root=tk.Tk()
self.root.title('SIGN UP for TRACK SMART Attendence')
self.signUpWindow = tk.Canvas(self.root,height=self.HEIGHT, width=self.WIDTH, bg=self.bgc)
self.signUpWindow.pack()
#self.signUpWindow.title('SIGN UP for TRACK SMART Attendence')
#here i have added frame to our GUI for name entry
entryFrame = tk.Frame(self.signUpWindow, bg=self.bgc, bd=10)
entryFrame.place(relx=0.5, rely=0.25, relwidth=0.8, relheight=0.1, anchor='n')
#entry field for name
self.name = tk.Entry(entryFrame, font=('times', 36))
self.name.place(relwidth=0.6, relheight=1)
self.vr_image = tk.PhotoImage(file = "vr_icon.png")
self.vr_icon = self.vr_image.subsample(11,11)
#button for voice recognition
vr_button = tk.Button(entryFrame, image = self.vr_icon)
vr_button.place(relx=0.64, relheight=1, relwidth=0.07)
#button for name
name_button = tk.Button(entryFrame, text="SUBMIT Name", font=('times', 36), command=self.confirm_name)
name_button.place(relx=0.75, relheight=1, relwidth=0.25)
def destroy_uw(self):
self.resignUp
#to upload ot click image for face recognistion
def imgUploadScreen(self):
self.root.destroy()
self.root=tk.Tk()
self.root.title('UPLOAD IMAGE')
self.upload_window = tk.Canvas(self.root, height=self.HEIGHT, width=self.WIDTH)
self.upload_window.pack()
self.voice_outputt("Upload your image")
upload_label = tk.Label(self.upload_window, text="Upload Your Image for Face Recognistion", font=('times', 36))
upload_label.place(rely=0.2, relwidth=1)
webcam_button = tk.Button(self.upload_window, text="WEBCAM", font=('times', 36), command=self.camera)
webcam_button.place(relx=0.4, rely=0.4, relwidth=0.2)
upload_button = tk.Button(self.upload_window, text="UPLOAD", font=('times', 36), command=self.openn)
upload_button.place(relx=0.4, rely=0.55, relwidth=0.2)
proceed_button = tk.Button(self.upload_window, text="PROCEED", font=('times', 36), command=self.PasswordScreen)
proceed_button.place(relx=0.75, rely=0.8, relwidth=0.15)
back_button = tk.Button(self.upload_window, text="BACK", font=('times', 36), command=self.signUp)
back_button.place(relx=0.1, rely=0.8, relwidth=0.15)
def goToWlcmScreen(self):
self.signUpWindow.destroy()
upload_window.destroy()
self.setPasswordWindow.destroy()
setEmailWindow.destroy()
self.congoWindow.destroy()
setEmailWindow.destroy()
#def congo_screen():
def destroy_ew(self):
setEmailWindow.destroy()
#def email_screen():
def destroy_pw(self):
self.setPasswordWindow.destroy()
#def pw_screen():
def relaunchSignUp(self):
self.signUpWindow.destroy()
self.signUp()
#to confirm name from user
def confirm_name(self):
self.name_variable= ' '
self.name_variable=self.name.get()
name_label = tk.Label(self.signUpWindow, bg=self.bgc, text="You entered \"" + str(self.name_variable)+'\"' , font=('times', 36))
name_label.place(rely=0.5, relwidth=1)
confirmation = tk.Label(self.signUpWindow, bg=self.bgc, text="Are you sure you want to continue ?", font=('times', 36))
confirmation.place(rely=0.6, relwidth=1)
backButton = tk.Button(self.signUpWindow, text="RETAKE", font=('times', 36), command=self.relaunchSignUp)
backButton.place(relx=0.1, rely=0.8, relwidth = 0.15)
yesButton = tk.Button(self.signUpWindow, text="CONFIRM", font=('times', 36), command=lambda: [
self.add_new_name(), self.imgUploadScreen()])
yesButton.place(relx=0.75, rely=0.8, relwidth = 0.15)
#def add_new_email(self):
def add_new_name(self):
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
# path_local=r'D:\lol\demo.xlsx';
# storage.child(path_on_cloud).put(path_local)
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
#name = input("Enter your name - ")
df = pd.DataFrame({'Name': [self.name_variable]})
writer = pd.ExcelWriter('new.xlsx', engine='openpyxl')
writer.book = load_workbook('new.xlsx')
writer.sheets = dict((ws.title, ws) for ws in writer.book.worksheets)
reader = pd.read_excel('new.xlsx')
df.to_excel(writer, index=False, header=False, startrow=len(reader) + 1)
writer.close()
# firebase = pyrebase.initialize_app(config)
# storage = firebase.storage()
path_on_cloud = "demo.xlsx"
path_local = "new.xlsx";
storage.child(path_on_cloud).put(path_local)
# d = os.getcwd
# os.chdir(d)
# storage.child(path_on_cloud).download("new.xlsx")
os.remove("new.xlsx")
def add_new_password(self):
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
# path_local=r'D:\lol\demo.xlsx';
# storage.child(path_on_cloud).put(path_local)
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
#password = input("Enter your password - ")
df = pd.read_excel('new.xlsx')
df.loc[df['Name'] == self.name_variable, ['Password']] = str(self.pazzword_variable)
df.to_excel('new.xlsx', index=False)
path_on_cloud = "demo.xlsx"
path_local = "new.xlsx";
storage.child(path_on_cloud).put(path_local)
# d = os.getcwd
# os.chdir(d)
# storage.child(path_on_cloud).download("new.xlsx")
os.remove("new.xlsx")
def add_new_email(self):
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
# path_local=r'D:\lol\demo.xlsx';
# storage.child(path_on_cloud).put(path_local)
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
password = input("Enter your email - ")
df = pd.read_excel('new.xlsx')
df.loc[df['Name'] == self.name_variable, ['email']] = str(self.emaill_variable)
df.to_excel('new.xlsx', index=False)
path_on_cloud = "demo.xlsx"
path_local = "new.xlsx";
storage.child(path_on_cloud).put(path_local)
# d = os.getcwd
# os.chdir(d)
# storage.child(path_on_cloud).download("new.xlsx")
os.remove("new.xlsx")
#function for signup
def signUp(self):
self.root.destroy()
self.root=tk.Tk()
self.root.title('SIGN UP for TRACK SMART Attendence')
self.signUpWindow = tk.Canvas(self.root,height=self.HEIGHT, width=self.WIDTH, bg=self.bgc)
self.signUpWindow.pack()
#self.signUpWindow.title('SIGN UP for TRACK SMART Attendence')
#here i have added frame to our GUI for name entry
entryFrame = tk.Frame(self.signUpWindow, bg=self.bgc, bd=10)
entryFrame.place(relx=0.5, rely=0.25, relwidth=0.8, relheight=0.1, anchor='n')
#entry field for name
self.name = tk.Entry(entryFrame, font=('times', 36))
self.name.place(relwidth=0.6, relheight=1)
self.voice_outputt("Input your name")
self.vr_image = tk.PhotoImage(file = "vr_icon.png")
self.vr_icon = self.vr_image.subsample(11,11)
#button for voice recognition
vr_button = tk.Button(entryFrame, image = self.vr_icon, command=self.voice_input)
vr_button.place(relx=0.64, relheight=1, relwidth=0.07)
#button for name
name_button = tk.Button(entryFrame, text="SUBMIT Name", font=('times', 36), command=lambda:[self.voice_output(self.name.get()), self.confirm_name()])
name_button.place(relx=0.75, relheight=1, relwidth=0.25)
''' #here i have added frame to our GUI for name entry
entryFrame = tk.Frame(self.signUpWindow, bg=self.bgc, bd=10)
entryFrame.place(relx=0.5, rely=0.25, relwidth=0.8, relheight=0.1, anchor='n')
#entry field for name
self.name = tk.Entry(entryFrame, font=('times', 36))
self.name.place(relwidth=0.6, relheight=1)
#button for voice recognition
vr_button = tk.Button(entryFrame, image = self.vr_icon)
vr_button.place(relx=0.64, relheight=1, relwidth=0.07)
#button for name
name_button = tk.Button(entryFrame, text="SUBMIT Name", font=('times', 36), command=self.signUp.confirm_name)
name_button.place(relx=0.75, relheight=1, relwidth=0.25)
'''
#function for signin
def signIn(self):
self.chances = tk.IntVar()
self.chances1= tk.IntVar()
self.chances.set(3)
self.chances1.set(self.chances.get())
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
df = pd.read_excel('new.xlsx')
x = []
x = df[df['Name'] == self.fn]['Time']
k = x[0]
if(k==' '):
self.root.destroy()
self.root = tk.Tk()
signInWindow = tk.Canvas(self.root, height=self.HEIGHT, width=self.WIDTH)
self.root.title('SIGN IN for TRACK SMART Attendence')
signInWindow.pack()
welcomeUser = tk.Label(signInWindow, text="Welcome " + str(self.fn) + ",\n\nPlease say your passcode...",
font=('times', 36))
welcomeUser.place(rely=0.1, relwidth=1)
self.vr_image = tk.PhotoImage(file="vr_icon.png")
self.vr_icon = self.vr_image.subsample(11, 11)
vrPasscode = tk.Button(signInWindow, image=self.vr_image, font=('times', 36), command=self.passwordd)
vrPasscode.place(relx=0.4, rely=0.4, width=300, height=400)
backButton = tk.Button(signInWindow, text="BACK", font=('times', 36), command=self.remainn)
backButton.place(relx=0.1, rely=0.8, relwidth=0.15)
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
df = pd.read_excel('new.xlsx')
# name=input("Enter your name - ")
self.password_array = []
df.loc[df['Name'] == self.fn, ['Time']] = ' '
df.to_excel('new.xlsx', index=False)
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
path_local = 'new.xlsx';
storage.child(path_on_cloud).put(path_local)
os.remove('new.xlsx')
else:
k_updated = pd.to_datetime(k)
now = datetime.now()
#print(now)
#print(k_updated)
if(k_updated<now):
self.root.destroy()
self.root = tk.Tk()
signInWindow = tk.Canvas(self.root, height=self.HEIGHT, width=self.WIDTH)
self.root.title('SIGN IN for TRACK SMART Attendence')
signInWindow.pack()
welcomeUser = tk.Label(signInWindow, text="Welcome " + str(self.fn) + ",\n\nPlease say your passcode...", font=('times', 36))
welcomeUser.place(rely=0.1, relwidth=1)
self.vr_image = tk.PhotoImage(file = "vr_icon.png")
self.vr_icon = self.vr_image.subsample(11,11)
vrPasscode = tk.Button(signInWindow, image = self.vr_image, font=('times', 36), command=self.passwordd)
vrPasscode.place(relx=0.4, rely=0.4, width=300, height=400)
backButton = tk.Button(signInWindow, text="BACK", font=('times', 36), command=self.remainn)
backButton.place(relx=0.1, rely=0.8, relwidth = 0.15)
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
df = pd.read_excel('new.xlsx')
df.loc[df['Name'] == self.fn, ['Time']] = ' '
df.to_excel('new.xlsx', index=False)
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
path_local = 'new.xlsx';
storage.child(path_on_cloud).put(path_local)
os.remove('new.xlsx')
else:
self.tryAgainScreen()
def failed_signIn(self):
self.root.destroy()
self.root=tk.Tk()
signInWindow = tk.Canvas(self.root, height=self.HEIGHT, width=self.WIDTH)
self.root.title('SIGN IN for TRACK SMART Attendence')
signInWindow.pack()
welcomeUser = tk.Label(signInWindow, text="Sorry! Couldn't recognise you.", font=('times', 72))
welcomeUser.place(rely=0.4, relwidth=1)
backButton = tk.Button(signInWindow, text="BACK", font=('times', 36), command=self.remainn)
backButton.place(relx=0.1, rely=0.8, relwidth = 0.15)
cancelButton = tk.Button(signInWindow, text="EXIT", font=('times', 36), command=self.root.destroy)
cancelButton.place(relx=0.75, rely=0.8, relwidth = 0.15)
def welcome_button(self):
HEIGHT = 2048
WIDTH = 2048
bgc = 'lightyellow'
self.root.destroy()
self.root = tk.Tk()
self.root.title('TRACK SMART Attendence System')
# this to define canvas in GUI
canvas = tk.Canvas(self.root, height=HEIGHT, width=WIDTH, bg='black')
canvas.pack()
btn_image = Image.open("black_bioChem.jpg")
btn_photo = ImageTk.PhotoImage(btn_image)
welcome_button = tk.Button(canvas, image=btn_photo, font=('times', 36), command = self.mainn)
welcome_button.place(relx=0.32, rely=0.2, relwidth=0.34, relheight=0.6)
self.root.mainloop()
def mainn(self):
#self.root.destroy()
self.starrt()
#to print welcome message
welcomeMsg = tk.Message(self.canvas, text='WELCOME TO SMART TRACK ATTENDENCE SYSTEM')
welcomeMsg.config(bg='black', font=('times', 48, 'italic'))
welcomeMsg.place(relx= 0.05, rely=0.05, relwidth=0.4, relheight=0.9)
#button for new user
signUpBtn = tk.Button(self.canvas, text="SIGN UP", font=('times', 36), command=self.signUp)
signUpBtn.place(relx=0.6, rely=0.35, relheight=0.08, relwidth=0.15)
#button for existing student
signInBtn = tk.Button(self.canvas, text="SIGN IN", font=('times', 36), command=self.face_recognition_for_multiple_images)
signInBtn.place(relx=0.6, rely=0.55, relheight=0.08, relwidth=0.15)
image = Image.open("black_bioChem.jpg")
photo = ImageTk.PhotoImage(image)
wlcmLabel = tk.Label(image=photo)
wlcmLabel.place(relx=0.1, rely=0.2, relwidth=0.33, relheight=0.6)
self.root.mainloop()
def verifiedScreen(self):
self.root.destroy()
self.root=tk.Tk()
congoWindow = tk.Canvas(self.root, height=self.HEIGHT, width=self.WIDTH)
congoWindow.pack()
self.root.title('CONGRATULATIONS')
congoMsg = tk.Message(congoWindow, text='Congratulations...\nYou have been marked as\n PRESENT.')
congoMsg.config(justify='center', font=('times', 52, 'italic'))
congoMsg.place(relx= 0.05, rely=0.075, relwidth=0.9, relheight=0.6)
exitBtn = tk.Button(congoWindow, text="GO TO WELCOME SCREEN", font=('times', 36), command=self.mainn)
exitBtn.place(relx=0.31, rely=0.75, relheight=0.075, relwidth=0.38)
def unverifiedScreen(self):
self.root.destroy()
self.root=tk.Tk()
congoWindow = tk.Canvas(self.root, height=self.HEIGHT, width=self.WIDTH)
congoWindow.pack()
self.root.title('FAILURE')
congoMsg = tk.Message(congoWindow, text='Sorry...\nCouldn\'t understand \n Please Try Again.')
congoMsg.config(justify='center', font=('times', 52, 'italic'))
congoMsg.place(relx= 0.05, rely=0.075, relwidth=0.9, relheight=0.6)
exitBtn = tk.Button(congoWindow, text="RETRY", font=('times', 36), command=self.signIn)
exitBtn.place(relx=0.31, rely=0.75, relheight=0.075, relwidth=0.38)
def tryAgainTimeLimit(self):
now = datetime.now()
current_time = now.strftime("%H:%M:%S")
# print("Current Time = ", current_time)
# added_time = timedelta(minutes=15)
updated_time = now + timedelta(minutes=15)
updated_timee = updated_time.strftime("%H:%M:%S")
#name = input("Enter your name - ")
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL" : "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
df = pd.read_excel('new.xlsx')
#name=input("Enter your name - ")
self.password_array=[]
df.loc[df['Name'] == self.fn, ['Time']] = str(updated_timee)
df.to_excel('new.xlsx', index=False)
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
path_local = 'new.xlsx';
storage.child(path_on_cloud).put(path_local)
os.remove('new.xlsx')
# print("Updated time = ", updated_timee)
def tryAgainScreen(self):
self.root.destroy()
self.root=tk.Tk()
retryWindow = tk.Canvas(self.root, height=self.HEIGHT, width=self.WIDTH)
retryWindow.pack()
self.root.title('RETRY')
backButton = tk.Button(retryWindow, text="BACK", font=('times', 36), command=self.remainn)
backButton.place(relx=0.1, rely=0.8, relwidth=0.15)
cancelButton = tk.Button(retryWindow, text="EXIT", font=('times', 36), command=self.root.destroy)
cancelButton.place(relx=0.75, rely=0.8, relwidth=0.15)
# define the countdown func.
def countdown(t):
while t:
mins, secs = divmod(t, 60)
timer = '{:02d}:{:02d}'.format(mins, secs)
#self.root.destroy()
retryMsg = tk.Message(retryWindow, text="Try again after \n" + timer + " \nminutes." + "\r" )
retryMsg.config(justify='center', font=('times', 52, 'italic'))
retryMsg.place(relx= 0.05, rely=0.075, relwidth=0.9, relheight=0.6)
self.root.update()
#exitBtn = tk.Button(retryWindow, text="GO TO WELCOME SCREEN", font=('times', 36))
#exitBtn.place(relx=0.31, rely=0.75, relheight=0.075, relwidth=0.38)
time.sleep(1)
t -= 1
#print('Fire in the hole!!')
# input time in seconds
#t = input("Enter the time in seconds: ")
# function call
#countdown(int(t))
now = datetime.now()
current_time = now.strftime("%H:%M:%S")
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
df = pd.read_excel('new.xlsx')
x=[]
x=df[df['Name']=='Swapnil Pant']['Time']
k=x[0]
k_updated=pd.to_datetime(k)
diff = k_updated-now
if(k_updated>now):
countdown(int(diff.total_seconds()) )
#else:
#print('welcome')
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
path_local = 'new.xlsx';
storage.child(path_on_cloud).put(path_local)
os.remove('new.xlsx')
os.chdir('..')
if os.path.exists(self.d1):
shutil.rmtree(self.d1)
def passwordd(self):
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL" : "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
df = pd.read_excel('new.xlsx')
#name=input("Enter your name - ")
self.password_array=[]
self.password_array=df[df['Name']==self.fn]['Password'].tolist()
self.k=self.password_array[0]
print(self.k)
'''
r = sr.Recognizer()
mic = sr.Microphone(device_index=0)
with mic as source:
r.adjust_for_ambient_noise(source, duration=0)
#print("What is your name: ")
myobj = gTTS(text="Speak Now", lang='en', slow=False)
myobj.save("welcome.mp3")
from playsound import playsound
playsound("welcome.mp3")
os.remove("welcome.mp3")
audio = r.listen(source, timeout=0)
#print("Wait till your voice is recognised......\n")
d=r.recognize_google(audio)
print(d)
#self.name.insert(0, d)
'''
#print(self.chances.get())
if(self.chances.get()>0):
try:
r = sr.Recognizer()
mic = sr.Microphone(device_index=0)
with mic as source:
r.adjust_for_ambient_noise(source, duration=0)
#print("What is your name: ")
myobj = gTTS(text="Speak Now", lang='en', slow=False)
myobj.save("welcome.mp3")
from playsound import playsound
playsound("welcome.mp3")
os.remove("welcome.mp3")
audio = r.listen(source, timeout=0)
#print("Wait till your voice is recognised......\n")
d=r.recognize_google(audio)
print(d)
#self.name.insert(0, d)
except:
myobj = gTTS(text="Sorry couldn not understand please speak again", lang='en', slow=False)
myobj.save("welcome.mp3")
from playsound import playsound
playsound("welcome.mp3")
os.remove("welcome.mp3")
if (d==self.k):
self.verifiedScreen()
self.chances.set(self.chances.get()-1)
self.chances1.set(self.chances.get())
self.append_new_date_column()
self.apply_present()
self.sending_mail()
elif(d!=self.k):
#unverifiedScreen()
#print('lol')
myobj = gTTS(text="Wrong Password Speak Again.", lang='en', slow=False)
myobj.save("welcome.mp3")
from playsound import playsound
playsound("welcome.mp3")
os.remove("welcome.mp3")
self.chances.set(self.chances.get()-1)
self.chances1.set(self.chances.get())
elif(self.chances.get()==0):
print(self.chances.get())
self.tryAgainTimeLimit()
self.tryAgainScreen()
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
path_local = 'new.xlsx';
storage.child(path_on_cloud).put(path_local)
os.remove('new.xlsx')
#path_local='new.xlsx'
#storage.child(path_on_cloud).put(path_local)
#os.remove('new.xlsx')
def append_new_date_column(self):
today = date.today()
datee = today.strftime("%d/%m/%Y")
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
df = pd.read_excel('new.xlsx')
length = len(df.columns)
try:
df.insert(length, str(datee), "ABSENT")
df.to_excel('new.xlsx', index=False)
except:
pass
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
path_local = 'new.xlsx';
storage.child(path_on_cloud).put(path_local)
os.remove('new.xlsx')
def apply_present(self):
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
df = pd.read_excel('new.xlsx')
today = date.today()
datee = today.strftime("%d/%m/%Y")
df.loc[df['Name'] == str(self.fn), [str(datee)]] = 'PRESENT'
df.to_excel('new.xlsx', index=False)
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
path_local = 'new.xlsx';
storage.child(path_on_cloud).put(path_local)
os.remove('new.xlsx')
def sending_mail(self):
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL": "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "demo.xlsx"
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
df = pd.read_excel('new.xlsx')
today = date.today()
datee = today.strftime("%d/%m/%Y")
x = []
x = df[df['Name'] == str(self.fn)][str(datee)]
j = x[0]
y = []
y = df[df['Name'] == str(self.fn)]['email']
k = y[0]
print(k)
print(j)
yesterday = today - timedelta(days=1)
yesterday_datee = yesterday.strftime("%d/%m/%Y")
'''
z = []
z = df[df['Name'] == 'Swapnil Pant'][str(yesterday_datee)]
l = z[0]
'''
p = 0
if (j == 'PRESENT'):
p = p + 1
q = 0
if (j == 'ABSENT'):
q = q + 1
# creates SMTP session
s = smtplib.SMTP_SSL('smtp.gmail.com', 465)
# start TLS for security
# s.starttls()
# Authentication
s.login("tracksmartattendance@gmail.com", "12345678a@")
# message to be sent
message = ("You were present on " + str(datee) + ".")
message1 = ("You were present on " + str(yesterday_datee) + ".")
print(message)
print(message1)
print(p)
print(q)
if (p == 1):
s.sendmail("tracksmartattendance@gmail.com", k, message)
if (q == 1):
s.sendmail("tracksmartattendance@gmail.com", k, message1)
else:
pass
s.quit()
os.remove('new.xlsx')
name=' '
HEIGHT = 0
WIDTH = 0
bgc=' '
vr_icon = ' '
canvas=' '
root=' '
signUpWindow= ' '
vr_image=' '
setEmailWindow= ' '
upload_window=' '
name_variable=' '
num=0
fn=' '
k=' '
password_array=' '
chances=0
chances1=0
d1=' '
setPasswordWindow=' '
pazzword=' '
pazzword_variable=' '
emaill=' '
emaill_variable=' '
p=Projectt(name,HEIGHT, WIDTH, bgc, vr_icon, canvas, root, signUpWindow, vr_image, setEmailWindow, upload_window, name_variable, num, fn, k, password_array, chances, chances1, d1, setPasswordWindow, pazzword, pazzword_variable, emaill, emaill_variable)
#p.starrt()
#p.signUp()
p.welcome_button()
#p.mainn()
d = pathlib.Path(__file__).parent.absolute()
os.chdir(d)
if os.path.exists('images'):
shutil.rmtree('images')
os.remove('new.xlsx')
#print(p.name_variable)
#print(p.name.get())
|
import math
from qiskit import (
# IBMQ,
QuantumCircuit,
QuantumRegister,
ClassicalRegister,
execute,
Aer,
)
def dec2bin(n):
a = 1
list = []
while a > 0:
a, b = divmod(n, 2)
list.append(str(b))
n = a
s = ""
for i in range(len(list) - 1, -1, -1):
s += str(list[i])
s = s.zfill(10)
return s
def AS(input, count_times):
simulator = Aer.get_backend('qasm_simulator')
a = QuantumRegister(10)
b = QuantumRegister(2)
c = ClassicalRegister(10)
qc = QuantumCircuit(a, b, c)
input_string = dec2bin(input)
for i in range(len(input_string)):
if input_string[9 - i] == '1':
qc.x(a[i])
qc.barrier(a)
qc.h(a[2])
qc.p(math.pi / 4, a[2])
qc.x(b[0])
qc.h(b[1])
qc.p(math.pi / 2, b[1])
for i in range(9):
control = []
control.append(b[0])
for j in range(9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[0], a[0])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(7):
control = []
control.append(b[1])
for j in range(2, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[1], a[2])
qc.barrier(a)
qc.measure(a, c)
# circuit_drawer(qc, filename='./AS_circuit')
job = execute(qc, simulator, shots=count_times * 100)
result = job.result()
counts = result.get_counts(qc)
return counts
# add
def AS_M1(input, count_times):
simulator = Aer.get_backend('qasm_simulator')
a = QuantumRegister(10)
b = QuantumRegister(2)
c = ClassicalRegister(10)
qc = QuantumCircuit(a, b, c)
input_string = dec2bin(input)
for i in range(len(input_string)):
if input_string[9 - i] == '1':
qc.x(a[i])
qc.barrier(a)
qc.cnot(a[0], a[6]) # M1
qc.h(a[2])
qc.p(math.pi / 4, a[2])
qc.x(b[0])
qc.h(b[1])
qc.p(math.pi / 2, b[1])
for i in range(9):
control = []
control.append(b[0])
for j in range(9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[0], a[0])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(7):
control = []
control.append(b[1])
for j in range(2, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[1], a[2])
qc.barrier(a)
qc.measure(a, c)
##circuit_drawer(qc, filename='./AS_M1_circuit')
job = execute(qc, simulator, shots=count_times * 100)
result = job.result()
counts = result.get_counts(qc)
return counts
def AS_M2(input, count_times):
simulator = Aer.get_backend('qasm_simulator')
a = QuantumRegister(10)
b = QuantumRegister(2)
c = ClassicalRegister(10)
qc = QuantumCircuit(a, b, c)
input_string = dec2bin(input)
for i in range(len(input_string)):
if input_string[9 - i] == '1':
qc.x(a[i])
qc.barrier(a)
qc.swap(a[6], a[0]) # M2
qc.h(a[2])
qc.p(math.pi / 4, a[2])
qc.x(b[0])
qc.h(b[1])
qc.p(math.pi / 2, b[1])
for i in range(9):
control = []
control.append(b[0])
for j in range(9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[0], a[0])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(7):
control = []
control.append(b[1])
for j in range(2, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[1], a[2])
qc.barrier(a)
qc.measure(a, c)
##circuit_drawer(qc, filename='./AS_M2_circuit')
job = execute(qc, simulator, shots=count_times * 100)
result = job.result()
counts = result.get_counts(qc)
return counts
def AS_M3(input, count_times):
simulator = Aer.get_backend('qasm_simulator')
a = QuantumRegister(10)
b = QuantumRegister(2)
c = ClassicalRegister(10)
qc = QuantumCircuit(a, b, c)
input_string = dec2bin(input)
for i in range(len(input_string)):
if input_string[9 - i] == '1':
qc.x(a[i])
qc.barrier(a)
qc.h(a[2])
qc.p(math.pi / 4, a[2])
qc.x(b[0])
qc.h(b[1])
qc.p(math.pi / 2, b[1])
for i in range(9):
control = []
control.append(b[0])
for j in range(9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[0], a[0])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(7):
control = []
control.append(b[1])
for j in range(2, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[1], a[2])
qc.barrier(a)
qc.cx(a[6], a[0]) # M3
qc.measure(a, c)
##circuit_drawer(qc, filename='./AS_M3_circuit')
job = execute(qc, simulator, shots=count_times * 100)
result = job.result()
counts = result.get_counts(qc)
return counts
def AS_M4(input, count_times):
simulator = Aer.get_backend('qasm_simulator')
a = QuantumRegister(10)
b = QuantumRegister(2)
c = ClassicalRegister(10)
qc = QuantumCircuit(a, b, c)
input_string = dec2bin(input)
for i in range(len(input_string)):
if input_string[9 - i] == '1':
qc.x(a[i])
qc.barrier(a)
qc.h(a[2])
qc.p(math.pi / 4, a[2])
qc.x(b[0])
qc.h(b[1])
qc.p(math.pi / 2, b[1])
for i in range(9):
control = []
control.append(b[0])
for j in range(9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[0], a[0])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(7):
control = []
control.append(b[1])
for j in range(2, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[1], a[2])
qc.barrier(a)
qc.cswap(a[5], a[8], a[0]) # M4
qc.measure(a, c)
##circuit_drawer(qc, filename='./AS_M4_circuit')
job = execute(qc, simulator, shots=count_times * 100)
result = job.result()
counts = result.get_counts(qc)
return counts
def AS_M5(input, count_times):
simulator = Aer.get_backend('qasm_simulator')
a = QuantumRegister(10)
b = QuantumRegister(2)
c = ClassicalRegister(10)
qc = QuantumCircuit(a, b, c)
input_string = dec2bin(input)
for i in range(len(input_string)):
if input_string[9 - i] == '1':
qc.x(a[i])
qc.barrier(a)
qc.h(a[2])
qc.p(math.pi / 4, a[2])
qc.x(b[0])
qc.h(b[1])
qc.p(math.pi / 2, b[1])
for i in range(9):
control = []
control.append(b[0])
for j in range(9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[0], a[0])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
qc.cnot(a[6], a[0])
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(7):
control = []
control.append(b[1])
for j in range(2, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[1], a[2])
qc.barrier(a)
qc.measure(a, c)
##circuit_drawer(qc, filename='./AS_M5_circuit')
job = execute(qc, simulator, shots=count_times * 100)
result = job.result()
counts = result.get_counts(qc)
return counts
def AS_specification(input):
simulator = Aer.get_backend('statevector_simulator')
a = QuantumRegister(10)
b = QuantumRegister(2)
c = ClassicalRegister(10)
qc = QuantumCircuit(a, b, c)
input_string = dec2bin(input)
for i in range(len(input_string)):
if input_string[9 - i] == '1':
qc.x(a[i])
qc.barrier(a)
qc.h(a[2])
qc.p(math.pi / 4, a[2])
qc.x(b[0])
qc.h(b[1])
qc.p(math.pi / 2, b[1])
for i in range(9):
control = []
control.append(b[0])
for j in range(9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[0], a[0])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(8):
control = []
control.append(b[0])
control.append(b[1])
for j in range(1, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.ccx(b[0], b[1], a[1])
qc.barrier(a)
for i in range(7):
control = []
control.append(b[1])
for j in range(2, 9 - i):
control.append(a[j])
qc.mct(control, a[9 - i])
qc.cnot(b[1], a[2])
vector = execute(qc, simulator).result().get_statevector()
return vector
def probabilityComputing(input):
pt = []
t = AS_specification(input)
for i in range(1024):
temp = 0
for j in range(4):
temp += abs(t[j * 1024 + i]) ** 2
pt.append(temp)
return pt
|
# import pyximport; pyximport.install()
from .version import __version__
from .utils.reads_utils import ReadSet
from .utils.out_utils import DiceFile, SampleFile
from .utils.bias_utils import FastaFile, BiasFile
from .utils.tran_utils import TranUnits, TranSplice
from .utils.sam_utils import load_samfile, fetch_reads
from .utils.gtf_utils import Gene, Transcript, load_annotation, loadgene
from .models.mcmc_sampler import mcmc_sampler
from .models.bayes_factor import miso_BF, dicediff_BF, get_BioVar
from .models.model_GP import Psi_GP_MH, normal_pdf, GP_K, Geweke_Z
from .models.model_static import Psi_MCMC_MH, Psi_analytic, Psi_junction
|
from __future__ import division
import numpy as np
import sys
import itertools
import math
from operator import itemgetter
import matplotlib
matplotlib.use('Agg') #prevent error running remotely
import matplotlib.pyplot as plt
from collections import defaultdict
import heapq
import time
from profilehooks import profile
import pickle
import numba as nb
import scipy
import scipy.stats
# SEED=32 #incorrect method seems to do well with this seed??
SEED=1
np.random.seed(SEED)
UPPER_BOUND_MULTIPLIER = 0.0
def accelerated_a_star_sample(probs):
'''
Inputs:
- probs: (numpy array)
Output:
- best_assignments: (list of pairs) best_assignments[i][0] is the cost of the ith best
assignment. best_assignments[i][1] is the ith best assignment, which is a list of pairs
where each pair represents an association in the assignment (1's in assignment matrix)
'''
init_node = Node(probs, gumbel_truncation=np.inf)
#heap, storing the smallest negative gumbel perturbed states, or largest gumbel perturbed states
#each element is a tuple of (-gumbel_perturbed_state, Node)
negative_gumbel_perturbed_heap = []
heapq.heappush(negative_gumbel_perturbed_heap, (-init_node.rand_assoc_gumbel_perturbed_state, init_node))
cur_partition = init_node.partition()
while(True):
np.set_printoptions(linewidth=300)
max_gumbel_upper_bound = -negative_gumbel_perturbed_heap[0][0]
node_idx_with_max_gumbel_ub = None
for cur_idx, cur_node in enumerate(cur_partition):
if cur_node.upper_bound_gumbel_perturbed_state > max_gumbel_upper_bound:
max_gumbel_upper_bound = cur_node.upper_bound_gumbel_perturbed_state
node_idx_with_max_gumbel_ub = cur_idx
if node_idx_with_max_gumbel_ub is None:
break #we found the maximum gumbel perturbed state
heapq.heappush(negative_gumbel_perturbed_heap, (-cur_partition[node_idx_with_max_gumbel_ub].rand_assoc_gumbel_perturbed_state, cur_partition[node_idx_with_max_gumbel_ub]))
if cur_partition[node_idx_with_max_gumbel_ub].assignment_count > 1:
cur_partition.extend(cur_partition[node_idx_with_max_gumbel_ub].partition())
del cur_partition[node_idx_with_max_gumbel_ub]
smallest_gumbel_perturbed_cost = heapq.heappop(negative_gumbel_perturbed_heap)
sample_of_log_Z = -smallest_gumbel_perturbed_cost[0] - np.euler_gamma
# sampled_state =smallest_gumbel_perturbed_cost[1].random_state_idx
sampled_state = np.where(probs == smallest_gumbel_perturbed_cost[1].random_state_prob)
assert(len(sampled_state) == 1)
sampled_state = int(sampled_state[0])
#the total number of assignments is N! or the number of assignments in each of the partitioned
#nodes plus the number of explicitlitly found assignments in negative_gumbel_perturbed_heap
total_state_count = sum([node.assignment_count for node in cur_partition]) + len(negative_gumbel_perturbed_heap) + 1
assert(total_state_count == len(probs))
return sample_of_log_Z, sampled_state
def compare_truncated_gumbel(n_vals, truncation):
'''
https://cmaddis.github.io/
sample the max of n gumbels with location 0 and scale 1, truncated at truncation
sample 1 gumbel and return the truncated value scaled for each value of n in n_vals
'''
return_vals = []
gumbel = np.random.gumbel()
for n in n_vals:
assert(n>0), n
cur_gumbel = gumbel + math.log(n)
return_vals.append(-np.log(np.exp(-cur_gumbel) + np.exp(-truncation)))
return return_vals
class Node:
# @profile
def __init__(self, probs, gumbel_truncation):
'''
Following the terminology used by [1], a node is defined to be a nonempty subset of possible
assignments to a cost matrix. Every assignment in node N is required to contain
required_cells and exclude excluded_cells.
Inputs:
- probs: (numpy array) the (unnormalized) probabilities contained in this node
'''
self.probs = probs
self.number_of_states = len(probs)
self.gumbel_truncation = gumbel_truncation
self.assignment_count = self.count_assignments()
if self.assignment_count == 0: #this node is empty
return
# compare_gumbel_vals = compare_truncated_gumbel(n_vals=[1, self.assignment_count], truncation=gumbel_truncation)
compare_gumbel_vals = compare_truncated_gumbel(n_vals=[1, np.sum(self.probs)], truncation=gumbel_truncation)
self.max_gumbel_1 = compare_gumbel_vals[0]
self.max_gumbel = compare_gumbel_vals[1]
# assert(self.max_gumbel == self.max_gumbel_1 + np.log(len(self.probs))), (self.max_gumbel, self.max_gumbel_1 + np.log(len(self.probs)), self.gumbel_truncation)
self.random_state_idx, self.random_state_prob = self.sample_state_uniform()
self.rand_assoc_gumbel_perturbed_state = np.log(self.random_state_prob) + self.max_gumbel
# self.rand_assoc_gumbel_perturbed_state = np.log(self.random_state_prob) + self.max_gumbel
self.upper_bound_on_sum_of_probs = np.sum(self.probs) + UPPER_BOUND_MULTIPLIER*np.random.rand()
# #improved upper bound, WRONG
# self.upper_bound_gumbel_perturbed_state = np.log(self.upper_bound_on_sum_of_probs) + self.max_gumbel_1
# hypothesized bounds
#improved upper bound, hypothesized
# self.upper_bound_gumbel_perturbed_state = np.log(self.upper_bound_on_sum_of_probs) + self.max_gumbel_1 + np.log(np.max(self.probs)) - np.log(self.upper_bound_on_sum_of_probs/len(self.probs))
self.upper_bound_gumbel_perturbed_state = self.max_gumbel + np.log(np.max(self.probs)) - np.log(self.upper_bound_on_sum_of_probs/len(self.probs))
# self.upper_bound_gumbel_perturbed_state = np.inf
#matching lower bound, hypothesized
# self.random_state_idx = np.random.choice(len(self.probs), p=self.probs/np.sum(self.probs))
self.random_state_idx = np.random.choice(len(self.probs))
self.random_state_prob = self.probs[self.random_state_idx]
#need to change this wehn UPPER_BOUND_MULTIPLIER != 0
# self.rand_assoc_gumbel_perturbed_state = self.max_gumbel_1 + np.log(self.upper_bound_on_sum_of_probs) + np.log(self.random_state_prob) - np.log(self.upper_bound_on_sum_of_probs/len(self.probs))
self.rand_assoc_gumbel_perturbed_state = self.max_gumbel + np.log(self.random_state_prob) - np.log(self.upper_bound_on_sum_of_probs/len(self.probs))
self.rand_assoc_gumbel_perturbed_state = self.max_gumbel + np.log(self.random_state_prob) - np.log(self.random_state_prob)
# self.rand_assoc_gumbel_perturbed_state = self.max_gumbel + np.log(self.random_state_prob)
# self.gumbel_max = self.max_gumbel_1 + np.log(self.upper_bound_on_sum_of_probs) #fix truncation during partitioning
# original A* sampling upper bound
# self.upper_bound_gumbel_perturbed_state = np.log(np.max(self.probs)) + self.max_gumbel
# assert(np.abs(self.upper_bound_gumbel_perturbed_state - self.hyp_upper_bound_gumbel_perturbed_state)<.0001), (self.upper_bound_gumbel_perturbed_state, self.max_gumbel_1 + np.log(len(self.probs)) + np.log(np.max(self.probs)), self.hyp_upper_bound_gumbel_perturbed_state)
# assert(np.abs(self.rand_assoc_gumbel_perturbed_state - self.hyp_rand_assoc_gumbel_perturbed_state)<.0001), (self.rand_assoc_gumbel_perturbed_state, self.hyp_rand_assoc_gumbel_perturbed_state)
def sample_state_uniform(self):
'''
sample a state from among this node's states uniformly, assumes
'''
random_state_idx = np.random.choice(len(self.probs))
random_state_prob = self.probs[random_state_idx]
return random_state_idx, random_state_prob
def partition(self):
'''
Partition this node
Output:
- new_partition: a list of mutually disjoint Nodes, whose union with the minimum assignment
of this node forms the set of possible assignments represented by this node
'''
# print( '#'*80)
# print( "new_partition called on node with assignment_count =", self.assignment_count)
partition1_count = (self.assignment_count-1)//2
partition2_count = self.assignment_count - 1 - partition1_count
probs_to_partition = self.probs.copy()
probs_to_partition = np.delete(probs_to_partition, self.random_state_idx)
partition1_indices = np.random.choice(len(probs_to_partition), size=partition1_count, replace=False)
partition2_indices = np.array([i for i in range(len(probs_to_partition)) if (i not in partition1_indices)])
assert(len(partition1_indices) == partition1_count and len(partition2_indices) == partition2_count)
partition1_probs = np.array([probs_to_partition[i] for i in partition1_indices])
partition2_probs = np.array([probs_to_partition[i] for i in partition2_indices])
# print partition1_probs
# print partition2_probs
# print self.probs
# print self.random_state_prob
# print self.random_state_idx
# print probs_to_partition
assert(np.abs((np.sum(self.probs) - self.random_state_prob) - (np.sum(partition1_probs) + np.sum(partition2_probs))) < .00000001), (np.sum(self.probs)-self.random_state_prob, np.sum(partition1_probs) + np.sum(partition2_probs))
new_partition = []
partition_assignment_counts = []
if len(partition1_probs) > 0:
partition1_node = Node(probs=partition1_probs, gumbel_truncation=self.max_gumbel)
new_partition.append(partition1_node)
partition_assignment_counts.append(partition1_node.assignment_count)
if len(partition2_probs) > 0:
partition2_node = Node(probs=partition2_probs, gumbel_truncation=self.max_gumbel)
new_partition.append(partition2_node)
partition_assignment_counts.append(partition2_node.assignment_count)
#the sum of assignments over each partitioned node + 1 (the minimum assignment in this node)
#should be equal to the number of assignments in this node
assert(self.assignment_count == np.sum(partition_assignment_counts) + 1), (self.assignment_count, partition_assignment_counts, sum(partition_assignment_counts))
return new_partition
def count_assignments(self, use_brute_force = False):
'''
Count the number of states in this Node.
'''
return len(self.probs)
# @profile
def test_accelerated_a_star_sample(N,iters,probs=None):
'''
Find the sum of the top k assignments and compare with the trivial bound
on the remaining assignments of (N!-k)*(the kth best assignment)
Inputs:
- N: use a random cost matrix of size (NxN)
- iters: number of random problems to solve and check
'''
if probs is None:
probs = np.random.rand(N)
all_samples_of_log_Z = []
runtimes_list = []
all_sampled_states = []
wall_time = 0
for test_iter in range(iters):
if test_iter % 1000 == 0:
print "completed", test_iter, "iters"
t1 = time.time()
sample_of_log_Z, sampled_state = accelerated_a_star_sample(probs)
t2 = time.time()
runtimes_list.append(t2-t1)
all_sampled_states.append(sampled_state)
cur_wall_time = t2-t1
wall_time += cur_wall_time
all_samples_of_log_Z.append(sample_of_log_Z)
print()
# print( "exact log(permanent):", np.log(calc_permanent_rysers(matrix)))
print( "np.mean(all_samples_of_log_Z) =", np.mean(all_samples_of_log_Z))
log_Z_estimate = np.mean(all_samples_of_log_Z)
return runtimes_list, all_sampled_states, wall_time, log_Z_estimate, all_samples_of_log_Z
def test_sampling_correctness(N=5, ITERS=10000000, probs_to_use='0_1'):
# check for smaller n
# check total variatianal distance and compare with sampling normally
'''
Test that we're sampling from the correct distributions over assocations
'''
#key: length n tuple of associations, each is a tuple of length 2
#value: dict, with keys:
# - 'true probability', value: (float)
# - 'empirical probability', value: (float)
if probs_to_use == 'rand':
probs = np.random.rand(N)
elif probs_to_use == '0_1':
probs = np.random.rand(N)
for idx in range(len(probs)):
if probs[idx] < .5:
probs[idx] = 0 + probs[idx]/10000000000
else:
probs[idx] = 1.0 - probs[idx]/10000000000
elif probs_to_use == 'step':
probs = np.array([.1, .101, .3, .301, .302, .6, .601, .8, .801, .802])
else:
assert(False), "wrong parameter for probs_to_use!!: %s" % probs_to_use
exact_partition_function = np.sum(probs)
all_state_probs = {}
list_of_all_true_probabilities = []
for idx in range(len(probs)):
true_probability = probs[idx]/exact_partition_function
all_state_probs[idx] = {'true probability': true_probability,
'empirical probability': 0.0}
list_of_all_true_probabilities.append(true_probability)
runtimes_list, all_sampled_states, wall_time, log_Z_estimate, all_samples_of_log_Z =\
test_accelerated_a_star_sample(N, iters=ITERS, probs=probs)
print("wall_time =", wall_time)
print("log_Z_estimate =", log_Z_estimate)
print("np.log(exact_partition_function) =", np.log(exact_partition_function))
for sampled_state in all_sampled_states:
all_state_probs[sampled_state]['empirical probability'] += 1/ITERS
#key: association
#value: empirical probability based on ITERS standard samples from the true distribution
empirical_probs_sampled_standard = defaultdict(int)
assert(ITERS == len(all_sampled_states))
assert(N == len(list_of_all_true_probabilities))
for i in range(ITERS):
sampled_state_idx = np.random.choice(N, p=list_of_all_true_probabilities)
empirical_probs_sampled_standard[sampled_state_idx] += 1/ITERS
empirical_probs_exponential_gumbel = defaultdict(int)
for i in range(ITERS):
gumbel_perturbed_states = probs.copy()
gumbel_perturbed_states = np.log(gumbel_perturbed_states)
for idx in range(len(gumbel_perturbed_states)):
compare_gumbel_vals = compare_truncated_gumbel(n_vals=[1], truncation=np.inf)
gumbel_perturbed_states[idx] += compare_gumbel_vals[0]
sampled_state_idx = np.argmax(gumbel_perturbed_states)
empirical_probs_exponential_gumbel[sampled_state_idx] += 1/ITERS
empirical_probs = []
empirical_probs_sampled_standard_list = []
empirical_probs_exponential_gumbel_list = []
true_probs = []
standard_tv_distance = 0
gumbel_tv_distance = 0
max_standard_error = 0
max_gumbel_error = 0
for state_idx, probabilities in all_state_probs.items():
true_probs.append(probabilities['true probability'])
empirical_probs.append(probabilities['empirical probability'])
empirical_probs_sampled_standard_list.append(empirical_probs_sampled_standard[state_idx])
empirical_probs_exponential_gumbel_list.append(empirical_probs_exponential_gumbel[state_idx])
gumbel_tv_distance += np.abs(true_probs[-1] - empirical_probs[-1])
standard_tv_distance += np.abs(true_probs[-1] - empirical_probs_sampled_standard_list[-1])
# print "cur gumbel error =", np.abs(true_probs[-1] - empirical_probs[-1])
if np.abs(true_probs[-1] - empirical_probs[-1]) > max_gumbel_error:
max_gumbel_error = np.abs(true_probs[-1] - empirical_probs[-1])
if np.abs(true_probs[-1] - empirical_probs_sampled_standard_list[-1]) > max_standard_error:
max_standard_error = np.abs(true_probs[-1] - empirical_probs_sampled_standard_list[-1])
print "gumbel_tv_distance =", gumbel_tv_distance
print "standard_tv_distance =", standard_tv_distance
print "max_gumbel_error =", max_gumbel_error
print "max_standard_error =", max_standard_error
print
simulated_gumbel_mean = 0
for i in range(ITERS):
simulated_gumbel_mean += np.random.gumbel()
simulated_gumbel_mean /= ITERS
simulated_gumbel_error = np.abs(simulated_gumbel_mean - np.euler_gamma)
print "hypothesized gumbel log_Z_estimate error =", np.abs(log_Z_estimate - np.log(exact_partition_function))
print "simulated_gumbel_error =", simulated_gumbel_error
print
statistic, critical_values, significance_level = scipy.stats.anderson(all_samples_of_log_Z, dist='gumbel_r')
print "Anderson-Darling statistic:", statistic
print "Anderson-Darling critical_values:", critical_values
print "Anderson-Darling significance_level:", significance_level
statistic, critical_values, significance_level = scipy.stats.anderson(np.random.gumbel(loc=0, size=ITERS), dist='gumbel_r')
print "true gumbel Anderson-Darling statistic:", statistic
print "true gumbel Anderson-Darling critical_values:", critical_values
print "true gumbel Anderson-Darling significance_level:", significance_level
statistic, critical_values, significance_level = scipy.stats.anderson(np.random.gumbel(loc=1, scale=10, size=ITERS), dist='gumbel_r')
print "true gumbel location 1 Anderson-Darling statistic:", statistic
print "true gumbel location 1 Anderson-Darling critical_values:", critical_values
print "true gumbel location 1 Anderson-Darling significance_level:", significance_level
statistic, critical_values, significance_level = scipy.stats.anderson(np.random.normal(loc=0, size=ITERS), dist='gumbel_r')
print "normal Anderson-Darling statistic:", statistic
print "normal Anderson-Darling critical_values:", critical_values
print "normal Anderson-Darling significance_level:", significance_level
print
print "ITERS =", ITERS
fig = plt.figure()
ax = plt.subplot(111)
ax.plot(range(N), empirical_probs, 'r+', label='empirical_probs A*' , markersize=10)
ax.plot(range(N), empirical_probs_sampled_standard_list, 'b+', label='empirical_probs sampled standard' , markersize=10)
ax.plot(range(N), empirical_probs_exponential_gumbel_list, 'm+', label='empirical_probs sampled exponential gumbel' , markersize=10)
ax.plot(range(N), true_probs, 'gx', label='true_probs' , markersize=10)
plt.title('permutation probabilities')
plt.xlabel('arbitrary index')
plt.ylabel('probability')
# Put a legend below current axis
lgd = ax.legend(loc='upper center', bbox_to_anchor=(0.5, -.1),
fancybox=False, shadow=False, ncol=2, numpoints = 1)
# plt.show()
fig_file_name = "2hypothesis_test_accel_astar_correctness_UPPER_BOUND_MULTIPLIER=%d_N=%d_seed=%d_1gumbel_iters=%d_matrix=%s"%(UPPER_BOUND_MULTIPLIER, N, SEED, ITERS, probs_to_use)
pickle_file_name = "./pickle_experiment_results/" + fig_file_name + ".pickle"
fig.savefig(fig_file_name, bbox_extra_artists=(lgd,), bbox_inches='tight')
plt.close()
f = open(pickle_file_name, 'wb')
pickle.dump((empirical_probs, empirical_probs_sampled_standard_list, true_probs, all_sampled_states, log_Z_estimate, all_samples_of_log_Z), f)
f.close()
return gumbel_tv_distance, standard_tv_distance
if __name__ == "__main__":
test_sampling_correctness(ITERS=100000, probs_to_use='rand')
# test_sampling_correctness(ITERS=100000, probs_to_use='0_1')
# test_sampling_correctness(ITERS=100000, probs_to_use='step')
|
#!/usr/bin/env /proj/sot/ska/bin/python
#################################################################################################
# #
# acis_cti_plot.py: plotting cti trends #
# #
# author: t. isobe (tisobe@cfa.harvard.edu) #
# #
# Last update: Jan 27, 2015 #
# #
#################################################################################################
import os
import sys
import re
import string
import random
import operator
import math
import numpy
import matplotlib as mpl
if __name__ == '__main__':
mpl.use('Agg')
#
#--- check whether this is a test case
#
comp_test = 'live'
if len(sys.argv) == 2:
if sys.argv[1] == 'test': #---- test case
comp_test = 'test'
elif sys.argv[1] == 'live': #---- automated read in
comp_test = 'live'
else:
comp_test = sys.argv[1].strip() #---- input data name
#
#--- reading directory list
#
if comp_test == 'test' or comp_test == 'test2':
path = '/data/mta/Script/ACIS/CTI/house_keeping/dir_list_py_test'
else:
path = '/data/mta/Script/ACIS/CTI/house_keeping/dir_list_py'
f = open(path, 'r')
data = [line.strip() for line in f.readlines()]
f.close()
for ent in data:
atemp = re.split(':', ent)
var = atemp[1].strip()
line = atemp[0].strip()
exec "%s = %s" %(var, line)
#
#--- append a path to a private folder to python directory
#
sys.path.append(bin_dir)
sys.path.append(mta_dir)
#
#--- converTimeFormat contains MTA time conversion routines
#
import convertTimeFormat as tcnv
import mta_common_functions as mcf
import robust_linear as robust #---- robust linear fit
from kapteyn import kmpfit
#import kmpfit_chauvenet as chauv
#
#--- temp writing file name
#
rtail = int(10000 * random.random())
zspace = '/tmp/zspace' + str(rtail)
#--- today date to set plotting range
#
today_time = tcnv.currentTime()
txmax = today_time[0] + 1.5
#
#--- fitting line division date
#
img_div = 2011
spc_div = 2011
bks_div = 2009
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
def run_for_year(year):
base = 3600. * 24. * 365
lyear = str(year)
head = '/data/mta/Script/ACIS/CTI/Data/Det_Data_adjust/mn_ccd'
head2 = '/data/mta/Script/ACIS/CTI/Data/Data_adjust/mn_ccd'
ssave = []
esave = []
asave = []
for k in range(0, 10):
if k ==5 or k == 7:
ccd = head2 + str(k)
else:
ccd = head + str(k)
f = open(ccd, 'r')
data = [line.strip() for line in f.readlines()]
f.close()
time = []
vals = []
for ent in data:
atemp = re.split('\s+', ent)
btemp = re.split('-', atemp[0])
if btemp[0] == lyear:
ctemp = re.split('\+\-', atemp[1])
q0 = float(ctemp[0])
ctemp = re.split('\+\-', atemp[2])
q1 = float(ctemp[0])
ctemp = re.split('\+\-', atemp[3])
q2 = float(ctemp[0])
ctemp = re.split('\+\-', atemp[4])
q3 = float(ctemp[0])
if q0 < 0.0:
continue
avg = 0.25 * ( q0 + q1 + q2 + q3)
time.append(float(atemp[-2])/ base )
vals.append(q0)
[intc, slope, ierr, serr] = linear_fit(time, vals, 100)
mval = numpy.mean(vals)
lslope = "%4.3f" % round(slope * 1e1, 3)
lerr = "%4.3f" % round(serr * 1e1, 3)
lavg = "%4.3f" % round(mval, 3)
ssave.append(lslope)
esave.append(lerr)
asave.append(lavg)
line = '<tr>\n'
for k in range(0, 10):
line = line + '<td>' + ssave[k] + '<br />+/-' + esave[k] + '</td>\n'
line = line + '</tr>'
print line
print '\n\n'
line = '<tr>\n'
for k in range(0, 10):
line = line + '<td>' + asave[k] + '</td>\n'
line = line + '</tr>'
print line
#---------------------------------------------------------------------------------------------------
#-- linear_fit: linear fitting function with 99999 error removal ---
#---------------------------------------------------------------------------------------------------
def linear_fit(x, y, iter):
"""
linear fitting function with -99999 error removal
Input: x --- independent variable array
y --- dependent variable array
iter --- number of iteration to computer slope error
Output: intc --- intercept
slope--- slope
"""
#
#--- first remove error entries
#
sum = 0
sum2 = 0
tot = 0
for i in range(0, len(y)):
if y[i] > 0:
sum += y[i]
sum2 += y[i] *y[i]
tot += 1
if tot > 0:
avg = sum / tot
sig = math.sqrt(sum2/tot - avg * avg)
else:
avg = 3.0
lower = 0.0
upper = avg + 2.0
xn = []
yn = []
for i in range(0, len(x)):
# if (y[i] > 0) and (y[i] < yupper): #--- removing -99999/9999 error
if (y[i] > lower) and (y[i] < upper):
xn.append(x[i])
yn.append(y[i])
if len(yn) > 10:
[intc, slope, serr] = robust.robust_fit(xn, yn, iter=iter)
# [intc, slope, serr] = robust.robust_fit(xn, yn, iter=1)
else:
[intc, slope, serr] = [0, 0, 0]
#
#--- modify array to numpy array
#
# d = numpy.array(xn)
# v = numpy.array(yn)
#
#--- kmpfit
#
# param = [0, 1]
# fitobj = kmpfit.Fitter(residuals=residuals, data=(d,v))
# fitobj.fit(params0=param)
#
# [intc, slope] = fitobj.params
# [ierr, serr] = fitobj.stderr
#
#--- chauvenet exclusion of outlyers and linear fit
#
# [intc, slope, ierr, serr] = chauv.run_chauvenet(d,v)
return [intc, slope, 0.0, serr]
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
def model(p, x):
a, b = p
y = a + b * x
return y
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
def residuals(p, data):
x, y = data
return y - model(p, x)
#---------------------------------------------------------------------------------------------------
#-- convTimeFullColumn: convert time format to fractional year for the entire array ---
#---------------------------------------------------------------------------------------------------
def convTimeFullColumn(time_list):
"""
convert time format to fractional year for the entire array
Input: time_list --- a list of time
Output: converted --- a list of tine in dom
"""
converted = []
for ent in time_list:
time = tcnv.dateFormatConAll(ent)
year = time[0]
ydate = time[6]
chk = 4.0 * int(0.25 * year)
if year == chk:
base = 366
else:
base = 365
yf = year + ydate / base
converted.append(yf)
return converted
#---------------------------------------------------------------------------------------------------
#-- separateErrPart: separate the error part of each entry of the data array ---
#---------------------------------------------------------------------------------------------------
def separateErrPart(data):
"""
drop the error part of each entry of the data array
Input: data --- data array
Ouptput:cleane --- data array without error part
err --- data array of error
"""
cleaned = []
err = []
for ent in data:
atemp = re.split('\+\-', ent)
cleaned.append(float(atemp[0]))
err.append(float(atemp[1]))
return [cleaned,err]
#---------------------------------------------------------------------------------------------------
#--- plotPanel: plots multiple data in separate panels ---
#---------------------------------------------------------------------------------------------------
def plotPanel(xmin, xmax, yMinSets, yMaxSets, xSets, ySets, xname, yname, entLabels, ydiv, yErrs = [], intList = [], slopeList = [], intList2 = [], slopeList2 = []):
"""
This function plots multiple data in separate panels
Input: xmin, xmax, ymin, ymax: plotting area
xSets: a list of lists containing x-axis data
ySets: a list of lists containing y-axis data
yMinSets: a list of ymin
yMaxSets: a list of ymax
entLabels: a list of the names of each data
Output: a png plot: out.png
"""
#
#--- set line color list
#
colorList = ('blue', 'green', 'red', 'aqua', 'lime', 'fuchsia', 'maroon', 'black', 'yellow', 'olive')
#
#--- clean up the plotting device
#
plt.close('all')
#
#---- set a few parameters
#
mpl.rcParams['font.size'] = 9
props = font_manager.FontProperties(size=9)
plt.subplots_adjust(hspace=0.08)
tot = len(ySets)
#
#--- start plotting each data
#
for i in range(0, len(entLabels)):
axNam = 'ax' + str(i)
#
#--- setting the panel position
#
j = i + 1
if i == 0:
line = str(tot) + '1' + str(j)
else:
line = str(tot) + '1' + str(j) + ', sharex=ax0'
line = str(tot) + '1' + str(j)
exec "ax%s = plt.subplot(%s)" % (str(i), line)
exec "ax%s.set_autoscale_on(False)" % (str(i)) #---- these three may not be needed for the new pylab, but
exec "ax%s.set_xbound(xmin,xmax)" % (str(i)) #---- they are necessary for the older version to set
exec "ax%s.set_xlim(xmin=xmin, xmax=xmax, auto=False)" % (str(i))
exec "ax%s.set_ylim(ymin=yMinSets[i], ymax=yMaxSets[i], auto=False)" % (str(i))
xdata = xSets[i]
ydata = ySets[i]
echk = 0
if len(yErrs) > 0:
yerr = yErrs[i]
echk = 1
pchk = 0
if len(intList) > 0:
intc = intList[i]
slope = slopeList[i]
pstart = intc + slope * xmin
pstop = intc + slope * xmax
pchk = 1
#
#---- actual data plotting
#
p, = plt.plot(xdata, ydata, color=colorList[i], marker='.', markersize=3.0, lw =0)
if echk > 0:
# p, = plt.errorbar(xdata, ydata, yerr=yerr, marker='.', markersize=1.5, lw =0)
plt.errorbar(xdata, ydata, yerr=yerr,color=colorList[i], markersize=3.0, fmt='.')
if pchk > 0:
plt.plot([xmin,xmax], [pstart, pstop], colorList[i], lw=1)
#
#--- add legend
#
ltext = entLabels[i] + ' / Slope (CTI/Year): ' + str(round(slopeList[i] * 1.0e2, 3)) + ' x 10**-2 '
# ltext = ltext + str(round(slopeList2[i] * 1.0e2, 3)) + ' x 10**-2 '
leg = legend([p], [ltext], prop=props, loc=2)
leg.get_frame().set_alpha(0.5)
exec "ax%s.set_ylabel(yname, size=8)" % (str(i))
#
#--- add x ticks label only on the last panel
#
### for i in range(0, tot):
if i != tot-1:
exec "line = ax%s.get_xticklabels()" % (str(i))
for label in line:
label.set_visible(False)
else:
pass
xlabel(xname)
#
#--- set the size of the plotting area in inch (width: 10.0in, height 2.08in x number of panels)
#
fig = matplotlib.pyplot.gcf()
height = (2.00 + 0.08) * tot
fig.set_size_inches(10.0, height)
#
#--- save the plot in png format
#
plt.savefig('out.png', format='png', dpi=100)
#---------------------------------------------------------------------------------------------------
#-- update_cti_page: update cti web page ---
#---------------------------------------------------------------------------------------------------
def update_cti_page():
"""
update cti web page
Input: None but use <house_keeping>/cti_page_template
Output: <web_page>/cti_page.html
"""
ctime = tcnv.currentTime("Display")
file = house_keeping + 'cti_page_template'
html = open(file, 'r').read()
html = html.replace('#DATE#', ctime)
out = web_dir + 'cti_page.html'
fo = open(out, 'w')
fo.write(html)
fo.close()
#--------------------------------------------------------------------
if __name__== "__main__":
if len(sys.argv) > 1:
year = sys.argv[1]
year.strip()
run_for_year(year)
else:
print "Provide year"
|
class Persona:
def __init__(self, nombre, edad, dpi):
self.__nombre = nombre
self.__edad = edad
self.__dpi = dpi
def get_nombre(self):
return self.__nombre
def set_nombre(self,nombre):
self.__nombre = nombre
def get_edad(self):
return self.__edad
def set_edad(self, edad):
self.__edad = edad
def get_dpi(self):
return self.__dpi
def set_dpi(self, dpi):
self.__dpi = dpi
persona1 = Persona("Angel", 23, 2988166440101)
print(persona1.get_nombre(),"Tu edad es:", persona1.get_edad(),"Tu numero de dpi es:", persona1.get_dpi()) |
# Generated by Django 3.0.4 on 2020-04-04 19:52
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ObjectStats',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('object_class', models.CharField(default='', max_length=50)),
('quantity_sold', models.IntegerField(blank=True, default=0)),
],
),
]
|
def f(n):
if isinstance(n, int) and n >= 1:
return n * (n + 1) / 2
|
#!/usr/bin/env python3
import sys
import os
from report_num_vals_in_dict import report_nums
info = {}
lines = 0
with open('/var/log/syslog') as logfile:
for line in logfile:
parts = line.split()
who = parts[4]
pos = who.find('[')
if pos != -1:
who = who[:pos]
count = info.setdefault(who, 0)
info[who] = count + 1
lines += 1
report_nums(info, sortkey=info.get, reverse=True)
|
#!/usr/bin/env python
from collections import deque
def spam(name, count):
for i in range(count):
print("I'm {name} {i}".format(name=name, i=i))
yield
def main():
taskqueue = deque()
taskqueue.append(spam('foo', 23))
taskqueue.append(spam('bar', 42))
while taskqueue:
task = taskqueue.pop()
try:
next(task)
taskqueue.appendleft(task)
except StopIteration:
pass
if __name__ == '__main__':
main()
|
from configparser import ConfigParser
import piplates.DAQC2plate as DAQC2
import time
parser = ConfigParser()
CONF='/home/pi/Desktop/inserttest-master/inserttest.config'
parser.read(CONF)
if (parser['inserttest_config']['motor'] == 'on') and (parser['inserttest_config']['loadcell'] == 'on'):
parser.read(CONF)
if (parser['inserttest_config']['insert_section'] == 'right') or (parser['inserttest_config']['insert_section'] == 'left'):
if (parser['inserttest_config']['size'] == '5'):
steps = 100 #4965
if (parser['inserttest_config']['size'] == '5.5'):
steps = 4413
if (parser['inserttest_config']['size'] == '7'):
steps = 3310
parser.read(CONF)
if (parser['inserttest_config']['insert_section'] == 'middle'):
if (parser['inserttest_config']['size'] == '5'):
steps = 5241
if (parser['inserttest_config']['size'] == '5.5'):
steps = 4689
if (parser['inserttest_config']['size'] == '7'):
flag == 1
steps = 4413
x=1
speed=.001
slowpoint = (steps) *0.75
#1000ms = 14.5/16ths of an inch on input resolution of 200
DAQC2.toggleDOUTbit(7,0)
if flag == 1:
for x in range(steps):
print(x)
DAQC2.toggleDOUTbit(7,2)
x=x+1
if x==(int(slowpoint)):
speed=.005
time.sleep(speed)
if flag == steps:
flag == 0
DAQC2.toggleDOUTbit(7,1)
speed=.001
for x in range(steps):
DAQC2.toggleDOUTbit(7,2)
x=x+1
time.sleep(speed)
DAQC2.setDOUTall(7,0)
else:
print ("Error")
|
'''
Object image onscreen layout using Pygame.
Created on Jun 24, 2011
@author: romanows
Copyright 2011 Brian Romanowski. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY BRIAN ROMANOWSKI ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
EVENT SHALL BRIAN ROMANOWSKI OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors.
'''
from __future__ import division
import pygame
from random import Random
class GridElement:
def __init__(self, rect, row, col):
self.obj = None # experiment object
self.rect = rect
self.row = row
self.col = col
def __repr__(self):
if self.obj is not None:
# object filename, type, color, row, col, x, y
s = '(%s,%s,%s,%d,%d,%d,%d)' % (self.obj.filename, self.obj.type, self.obj.color, self.row, self.col, self.rect.left, self.rect.top)
else :
s = '(,,,%d,%d,%d,%d)' % (self.row, self.col, self.rect.left, self.rect.top)
return s
class ObjectGrid:
'''
Onscreen grid for object placement.
Uses PyGame for drawing.
'''
def __init__(self, windowSurface, numRows, numCols, jitterFactor = 1.0, avgNumMissing = 3, keepCenter = True, fontSize = 48, rnd = Random(42)):
'''
Constructor.
@param windowSurface: pygame surface object
@param numRows: number of rows in grid
@param numCols: number of columns in grid
@param jitterFactor: a number between 0 and 1.0 that is multiplied with a random jitter offset for row and column screen coordinates
@param avgNumMissing: about how many grid elements should be empty (not containing objects)
@param keepCenter: always keep the center object
@param fontSize: font size of the labels used for the images
@param rnd: a Random.random object
'''
self.windowSurface = windowSurface
self.numRows = numRows
self.numCols = numCols
self.size = numRows * numCols
self.fontSize = fontSize
self.fractionMissing = avgNumMissing / self.size
self.keepCenter = keepCenter
self.jitterFactor = jitterFactor
self.rnd = rnd
# Sprite size depends on how many we expect to fit on the screen, plus room for margin/padding. They must be square.
self.imageWidth = min(windowSurface.get_rect().width/(numCols+2), windowSurface.get_rect().height/(numRows+2))
self.imageHeight = self.imageWidth
# Distribute leftover space as padding between the sprites. Note that there are N+1 padding "cells" for N images.
self.paddingWidth = (self.windowSurface.get_rect().width - self.numCols * self.imageWidth) / (self.numCols+1)
self.paddingHeight = (self.windowSurface.get_rect().height - self.numRows * (self.imageHeight + self.fontSize)) / (self.numRows+1)
# a fractional number effectively indicates no center
self.centerRow = (self.numRows - 1) / 2.0
self.centerCol = (self.numCols - 1) / 2.0
self.elements = [] # flat 1d container for grid elements
def populate(self, experimentObjects):
# Clear old rectangles and objects
del(self.elements[:])
# Create grid of rectangles to later receive images
for r,y in [(y*(self.paddingHeight + self.imageHeight + self.fontSize),y) for y in xrange(self.numRows)]:
for c,x in [(x*(self.paddingWidth + self.imageWidth),x) for x in xrange(self.numCols)]:
if self.rnd.random() < self.fractionMissing:
if not self.keepCenter or (self.keepCenter and not(y == self.centerRow and x == self.centerCol)):
self.elements.append(None)
continue
colJitter = int(self.jitterFactor * self.rnd.uniform(-self.paddingWidth/2.0, self.paddingWidth/2.0))
rowJitter = int(self.jitterFactor * self.rnd.uniform(-self.paddingHeight/2.0, self.paddingHeight/2.0))
self.elements.append(GridElement(pygame.Rect(c + self.paddingWidth + colJitter, r + self.paddingHeight + rowJitter, self.imageWidth, self.imageHeight),y,x))
# Copy and shuffle the objects to fill the grid
objectDeck = []
while len(objectDeck) < self.size:
self.rnd.shuffle(experimentObjects)
objectDeck.extend(experimentObjects)
objectDeck = objectDeck[:self.size]
labelFont = pygame.font.SysFont(None, self.fontSize)
# draw the object images on the rectangles
for e,o in zip(self.elements,objectDeck):
if e is None:
continue # skip empty grid elements
r = e.rect
self.windowSurface.blit(o.image,r)
e.obj = o
e.label = labelFont.render(o.type, True, (0, 0, 0), (255,255,255,0))
lr = e.label.get_rect()
lr.left, lr.top = r.left, r.bottom
self.windowSurface.blit(e.label, lr)
def get(self, row, col):
return self.elements[row*self.numCols + col]
def highlight(self):
# pick a valid displayed object in the non-edge portion of the screen
# TODO: FIXME: repeated random selections is ugly, and might never halt if center is empty, but let's not worry about that now
while True:
r,c = self.rnd.randint(1, self.numRows-2), self.rnd.randint(1, self.numCols-2)
if self.get(r,c) is not None:
break
e = self.get(r,c)
p = e.rect
pygame.draw.rect(self.windowSurface, (0,0,0), (p.left, p.top, p.width, p.height), 12)
pygame.draw.rect(self.windowSurface, (255,255,0), (p.left, p.top, p.width, p.height), 4)
return e |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.