text stringlengths 8 6.05M |
|---|
from typing import Optional
import numpy as np
import sklearn
import sklearn.decomposition
import sklearn.linear_model
import sklearn.pipeline
import sklearn.preprocessing
import torch
from fastprogress.fastprogress import force_console_behavior
import mabe
import mabe.config
import mabe.features
import mabe.model
master_bar, progress_bar = force_console_behavior()
# %%
device = "cuda:3"
# %%
result_file = "training_results_2021-04-16 05:26:50.104528_baseline2_task12_smallcontext_0.845.pt"
# TODO: use all runs
result = torch.load(mabe.config.ROOT_PATH / result_file)[0]
# %%
config = result.config
cpc_params = result.best_params[0]
num_features = 37
num_extra_features = 2
cpc = mabe.model.ConvCPC(
num_features,
config.num_embeddings,
config.num_context,
config.num_ahead,
config.num_ahead_subsampling,
config.subsample_length,
num_embedder_blocks=config.num_embedder_blocks,
input_dropout=config.input_dropout,
head_dropout=config.head_dropout,
dropout=config.dropout,
split_idx=config.split_idx,
num_extra_features=num_extra_features,
).to(device)
cpc.load_state_dict(cpc_params)
cpc = cpc.eval()
# %%
task3_path = mabe.config.ROOT_PATH / "train_task3.npy"
test_path = mabe.config.ROOT_PATH / "test-release.npy"
# %%
X_test, X_test_extra, _, groups_test, _ = mabe.features.load_dataset(test_path)
features_test = []
with torch.no_grad():
for idx in range(len(X_test)):
# from feature preprocessing
crop_pre = 1
crop_post = 0
group = groups_test[idx]
x = X_test[idx].astype(np.float32)
if config.use_extra_features:
x_extra = X_test_extra[idx].astype(np.float32)
x_extra = torch.from_numpy(x_extra).to(device, non_blocking=True)
x = torch.transpose(torch.from_numpy(x[None, :, :]), 2, 1).to(device, non_blocking=True)
x_emb = cpc.embedder(x)
crop = (x.shape[-1] - x_emb.shape[-1]) // 2
crop_pre += crop
crop_post += crop
c = cpc.apply_contexter(x_emb, device)
crop = x_emb.shape[-1] - c.shape[-1]
crop_pre += crop
logreg_features = c[0].T
if config.use_extra_features:
x_extra = x_extra[crop_pre : -(crop_post - 1)]
if config.use_extra_features:
x_cominbed = torch.cat((logreg_features, x_extra), dim=-1)
else:
x_combined = logreg_features
x_combined = x_cominbed.cpu().data.numpy()
features_test.append(x_combined)
features_test = np.concatenate(features_test)
# %%
cv_scores = []
for behavior, (X, X_extra, Y, groups, annotators) in mabe.features.load_task3_datasets(task3_path):
X_flat = []
Y_flat = []
groups_flat = []
with torch.no_grad():
for idx in range(len(X)):
# from feature preprocessing
crop_pre = 1
crop_post = 0
x = X[idx].astype(np.float32)
x_extra = None
if config.use_extra_features:
x_extra = X_extra[idx].astype(np.float32)
x_extra = torch.from_numpy(x_extra).to(device, non_blocking=True)
g = np.array([idx])
x = torch.transpose(torch.from_numpy(x[None, :, :]), 2, 1).to(device, non_blocking=True)
x_emb = cpc.embedder(x)
crop = (x.shape[-1] - x_emb.shape[-1]) // 2
crop_pre += crop
crop_post += crop
c = cpc.apply_contexter(x_emb, device)
crop = x_emb.shape[-1] - c.shape[-1]
crop_pre += crop
logreg_features = c[0].T
x_extra = x_extra[crop_pre : -(crop_post - 1)]
y = Y[idx][crop_pre : -(crop_post - 1)]
x_cominbed = torch.cat((logreg_features, x_extra), dim=-1)
X_flat.append(x_cominbed.cpu().data.numpy())
Y_flat.append(y)
groups_flat.append(g.repeat(len(y)))
X_flat = np.concatenate(X_flat)
Y_flat = np.concatenate(Y_flat)
groups_flat = np.concatenate(groups_flat)
print(behavior)
print(len(np.unique(groups_flat)))
if len(np.unique(groups_flat)) > 1:
cv = sklearn.model_selection.GroupShuffleSplit(8)
else:
cv = sklearn.model_selection.StratifiedShuffleSplit(8)
X_flat_all = np.concatenate((X_flat, features_test))
scaler = sklearn.preprocessing.StandardScaler().fit(X_flat_all)
X_flat = scaler.transform(X_flat)
linear = sklearn.pipeline.make_pipeline(
sklearn.linear_model.LogisticRegression(
multi_class="multinomial", class_weight="balanced", max_iter=1000, C=1e-1
)
)
scores = sklearn.model_selection.cross_validate(
linear,
X_flat,
Y_flat,
n_jobs=8,
cv=cv,
groups=groups_flat,
scoring=dict(
f1=sklearn.metrics.make_scorer(sklearn.metrics.f1_score), # , average="macro"),
precision=sklearn.metrics.make_scorer(
sklearn.metrics.precision_score
), # , average="macro"),
),
)
if len(np.unique(groups_flat)) > 1:
cv_scores.append(scores["test_f1"])
print(np.median(scores["test_f1"]))
print()
print(np.mean(cv_scores))
# %%
submission: dict[str, dict] = {}
for behavior, (X, X_extra, Y, groups, annotators) in mabe.features.load_task3_datasets(task3_path):
submission[behavior] = dict()
X_flat = []
Y_flat = []
groups_flat = []
with torch.no_grad():
for idx in range(len(X)):
# from feature preprocessing
crop_pre = 1
crop_post = 0
x = X[idx].astype(np.float32)
x_extra = None
if config.use_extra_features:
x_extra = X_extra[idx].astype(np.float32)
x_extra = torch.from_numpy(x_extra).to(device, non_blocking=True)
g = np.array([idx])
x = torch.transpose(torch.from_numpy(x[None, :, :]), 2, 1).to(device, non_blocking=True)
x_emb = cpc.embedder(x)
crop = (x.shape[-1] - x_emb.shape[-1]) // 2
crop_pre += crop
crop_post += crop
c = cpc.apply_contexter(x_emb, device)
crop = x_emb.shape[-1] - c.shape[-1]
crop_pre += crop
logreg_features = c[0].T
x_extra = x_extra[crop_pre : -(crop_post - 1)]
y = Y[idx][crop_pre : -(crop_post - 1)]
x_cominbed = torch.cat((logreg_features, x_extra), dim=-1)
X_flat.append(x_cominbed.cpu().data.numpy())
Y_flat.append(y)
groups_flat.append(g.repeat(len(y)))
X_flat = np.concatenate(X_flat)
Y_flat = np.concatenate(Y_flat)
groups_flat = np.concatenate(groups_flat)
X_flat_all = np.concatenate((X_flat, features_test))
scaler = sklearn.preprocessing.StandardScaler().fit(X_flat_all)
X_flat = scaler.transform(X_flat)
linear = sklearn.pipeline.make_pipeline(
sklearn.linear_model.LogisticRegression(
multi_class="multinomial", class_weight="balanced", max_iter=1000, C=1e-1
)
)
linear.fit(X_flat, Y_flat)
with torch.no_grad():
for idx in range(len(X_test)):
# from feature preprocessing
crop_pre = 1
crop_post = 0
group = groups_test[idx]
x = X_test[idx].astype(np.float32)
x_extra = None
if config.use_extra_features:
x_extra = X_test_extra[idx].astype(np.float32)
x_extra = torch.from_numpy(x_extra).to(device, non_blocking=True)
x = torch.transpose(torch.from_numpy(x[None, :, :]), 2, 1).to(device, non_blocking=True)
x_emb = cpc.embedder(x)
crop = (x.shape[-1] - x_emb.shape[-1]) // 2
crop_pre += crop
crop_post += crop
c = cpc.apply_contexter(x_emb, device)
crop = x_emb.shape[-1] - c.shape[-1]
crop_pre += crop
logreg_features = c[0].T
x_extra = x_extra[crop_pre : -(crop_post - 1)]
x_cominbed = torch.cat((logreg_features, x_extra), dim=-1)
x_combined = x_cominbed.cpu().data.numpy()
y_pred = linear.predict(scaler.transform(x_combined))
# TODO: off-by-one?
y_pred = np.concatenate(
(y_pred[:1].repeat(crop_pre), y_pred, y_pred[-1:].repeat(crop_post))
)
submission[behavior][group] = y_pred
# %%
sample_submission = np.load(
mabe.config.ROOT_PATH / "sample-submission-task3.npy", allow_pickle=True
).item()
def validate_submission(submission, sample_submission):
if not isinstance(submission, dict):
print("Submission should be dict")
return False
if not submission.keys() == sample_submission.keys():
print("Submission keys don't match")
return False
for behavior in submission:
sb = submission[behavior]
ssb = sample_submission[behavior]
if not isinstance(sb, dict):
print("Submission should be dict")
return False
if not sb.keys() == ssb.keys():
print("Submission keys don't match")
return False
for key in sb:
sv = sb[key]
ssv = ssb[key]
if not len(sv) == len(ssv):
print(f"Submission lengths of {key} doesn't match")
return False
for key, sv in sb.items():
if not all(isinstance(x, (np.int32, np.int64, int)) for x in list(sv)):
print(f"Submission of {key} is not all integers")
return False
print("All tests passed")
return True
# %%
if validate_submission(submission, sample_submission):
np.save(mabe.config.ROOT_PATH / "task3_submission2.npy", submission)
|
from datetime import datetime
from google.auth.transport import requests
from vs_bim import speak, time
def current_weather():
ow_url = "http://api.openweathermap.org/data/2.5/weather?"
city = 'hanoi'
if not city:
pass
api_key = "fe8d8c65cf345889139d8e545f57819a"
call_url = ow_url + "appid=" + api_key + "&q=" + city + "&units=metric"
response = requests.get(call_url)
data = response.json()
if data["cod"] != "404":
city_res = data["main"]
current_temperature = city_res["temp"]
current_pressure = city_res["pressure"]
current_humidity = city_res["humidity"]
suntime = data["sys"]
sunrise = datetime.datetime.fromtimestamp(suntime["sunrise"])
sunset = datetime.datetime.fromtimestamp(suntime["sunset"])
wthr = data["weather"]
weather_description = wthr[0]["description"]
now = datetime.datetime.now()
content = """
Hôm nay là ngày {day} tháng {month} năm {year}
Mặt trời mọc vào {hourrise} giờ {minrise} phút
Mặt trời lặn vào {hourset} giờ {minset} phút
Nhiệt độ trung bình là {temp} độ C
Áp suất không khí là {pressure} héc tơ Pascal
Độ ẩm là {humidity}%
Trời hôm nay quang mây. Dự báo mưa rải rác ở một số nơi.""".format(day = now.day,month = now.month, year= now.year, hourrise = sunrise.hour, minrise = sunrise.minute,
hourset = sunset.hour, minset = sunset.minute,
temp = current_temperature, pressure = current_pressure, humidity = current_humidity)
speak(content)
time.sleep(20)
else:
speak("Không tìm thấy địa chỉ của bạn")
|
from django.shortcuts import render, redirect, get_object_or_404
from .models import Write, Comment
from .forms import WriteForm, CommentForm
from django.views.decorators.http import require_POST
from django.contrib.auth.decorators import login_required
import random
# Create your views here.
def index(request):
writes = Write.objects.all()
context = {
'writes': writes
}
return render(request, 'index.html', context)
@login_required
def create(request):
if request.method == 'POST':
form = WriteForm(request.POST)
if form.is_valid():
writes = form.save(commit=False)
writes.user = request.user
writes.cnt = 0
writes.save()
return redirect('todos:index')
else:
form = WriteForm()
context = {
'form': form,
}
return render(request, 'post.html', context)
def detail(request, id):
write = get_object_or_404(Write, id=id)
comment_form = CommentForm()
write.cnt += 1
write.save()
context = {
'write': write,
'comment_form': comment_form,
}
return render(request, 'detail.html', context)
def delete(request, id):
if request.method == 'POST':
get_object_or_404(Write, id=id).delete()
return redirect('todos:index')
else:
return redirect('todos:index')
def update(request, id):
write = get_object_or_404(Write, id=id)
if request.method == 'POST':
form = WriteForm(request.POST, instance=write)
if form.is_valid():
form.save()
return redirect('todos:detail', id)
else:
form = WriteForm(instance=write)
context = {
'form': form,
}
return render(request, 'post.html', context)
@require_POST
def comment_create(request, id):
write = get_object_or_404(Write, id=id)
comments_form = CommentForm(request.POST)
if comments_form.is_valid():
comment_ = comments_form.save(commit=False)
comment_.write = write
comment_.user = request.user
comment_.save()
return redirect('todos:detail', id)
@require_POST
def comment_delete(request, id1, id2):
get_object_or_404(Comment, id=id2).delete()
return redirect('todos:detail', id1)
def search(request):
if request.method == 'POST':
a = request.POST
keyword = request.POST.get('keyword')
writes = Write.objects.filter(title__icontains=keyword)
context = {
'writes': writes,
'a': a
}
return render(request, 'search.html', context)
else:
write = Write.objects.all()
context = {
'writes': writes
}
return render(request, 'search.html', context)
def like(request, id):
post = get_object_or_404(Write, id=id)
user = request.user
if user in post.like_users.all():
post.like_users.remove(user)
else:
post.like_users.add(user)
return redirect('todos:detail', id)
def comment_like(request, write_id, comment_id):
post = get_object_or_404(Comment, id=comment_id)
user = request.user
if user in post.comment_like_users.all():
post.comment_like_users.remove(user)
else:
post.comment_like_users.add(user)
return redirect('todos:detail', write_id)
|
import pymongo
_MongoengineConnect = 'mynihongo2'
_MongoUrl = 'localhost'
_Client = pymongo.MongoClient(_MongoUrl,27017)
_Db = _Client[_MongoengineConnect]
|
#!/usr/bin/env python
# -*- coding=utf8 -*-
#######################
#用线程池来写同一个文件
#######################
#######多线程写文件#######
import time
import threading
import logger
import thread_pool
def addNum():
global num #在每个线程中都获取这个全局变量
time.sleep(3)
if lock.acquire(): #修改数据前枷锁
num -= 1
# print('num-->%s'%num)
logger.logger.info('num-->%s'%num)
lock.release() #修改后释放
def watchdog(num):
print('final num:',num)
def callback(success, result):
print("id-->%s,success-->%s"%(num,success))
num = 10 #设定一个共享变量
lock = threading.Lock() #生成全局锁
pool = thread_pool.ThreadPool(5)
for i in range(10):
# pool.run(target=addNum,args=(i,),callback=callback)
pool.run(target=addNum,callback=callback)
pool.close()
# http://www.cnblogs.com/alex3714/articles/5230609.html
# import time
# from threading import Thread
# class CountdownTask:
# def __init__(self):
# self._running = True
#
# def terminate(self):
# self._running = False
#
# def run(self,n):
# while self._running and n > 0:
# print('T-minus',n)
# n -= 1
# time.sleep(5)
# c = CountdownTask()
# t = Thread(target=c.run,args=(10,))
# t.start()
# c.terminate()
# t.join()
# from threading import Thread,Event
# import time
#
# #Code to execute in an independent thread
# def countdown(n,started_evt):
# print('countdown starting')
#
# while n > 0:
# print('T-minus',n)
# n -= 1
# time.sleep(5)
# if n == 5:
# started_evt.set()
#
# #Create the event object that will be used to signal startup
# started_evt = Event()
#
# #Launch the thread and pass the startup event
# print('Launching countdown')
# t = Thread(target=countdown,args=(10,started_evt))
# t.start()
#
# #Wait for the thread to start
# started_evt.wait()
# print('countdown is running')
# import threading
# import time
#
# class PeriodicTimer:
# def __init__(self,interval):
# self._interval = interval
# self._flag = 0
# self._cv = threading.Condition()
# def start(self):
# t = threading.Thread(target=self.run)
# t.daemon = True
#
# t.start()
# def run(self):
# '''
# Run the timer and notify waiting threads after each interval
# '''
# while True:
# time.sleep(self._interval)
# with self._cv:
# self._flag ^= 1
# self._cv.notify_all()
# def wait_for_tick(self):
# '''
# Wait for the next tick of the timer
# '''
# with self._cv:
# last_flag = self._flag
# while last_flag == self._flag:
# self._cv.wait()
#
# # Example use of the timer
# ptimer = PeriodicTimer(5)
# ptimer.start()
#
# # Two threads that synchronize on the timer
# def countdown(nticks):
# while nticks > 0:
# ptimer.wait_for_tick()
# print ('T-minus',nticks)
# nticks -= 1
#
# def countup(last):
# n = 0
# while n < last:
# ptimer.wait_for_tick()
# print('Counting',n)
# n += 1
#
# threading.Thread(target=countdown,args=(10,)).start()
# threading.Thread(target=countup,args=(5,)).start()
# import threading
#
# # Worker thread
# def worker(n,sema):
# # Wait to be signaled
# sema.acquire()
#
# # Do some work
# print('Working',n)
#
# # Create some threads
# sema = threading.Semaphore(0)
# nworkers = 10
# for n in range(nworkers):
# t = threading.Thread(target=worker,args=(n,sema,))
# t.start()
#
#
# sema.release()
# import time
# import threading
#
# def addNum():
# global num #在每个线程中都获取这个全局变量
# # print('--get num:',num )
# time.sleep(1)
# locks.acquire(1) #修改数据前加锁
# print('num-->%s'%num)
# num -= 1 #对此公共变量进行-1操作
# locks.release() #修改后释放
#
# num = 5 #设定一个共享变量
# thread_list = []
# locks = threading.Lock() #生成全局锁
# for i in range(5):
# t = threading.Thread(target=addNum)
# t.start()
# thread_list.append(t)
#
# for t in thread_list: #等待所有线程执行完毕
# t.join()
#
# print('final num:', num )
# import threading
# import time
#
# class MyThread(threading.Thread):
# def run(self):
# global num
# time.sleep(1)
#
# if mutex.acquire(1):
# num = num+1
# msg = self.name+' set num to '+str(num)
# print msg
# mutex.release()
# num = 0
# mutex = threading.Lock()
# def test():
# for i in range(5):
# t = MyThread()
# t.start()
# if __name__ == '__main__':
# test()
|
from dictators.dictators_game import models
def create_user(username: str,
password_hash: str,
password_salt: str,
email_address: str) -> bool:
username_match = models.User.objects.filter(username=username)
email_match = models.User.objects.filter(email_address=email_address)
if username_match or email_match:
return False
new_user = models.User(username=username,
password_hash=password_hash,
password_salt=password_salt,
email_address=email_address)
new_user.save()
return True
def get_user(username: str) -> models.User:
return models.User.objects.get(username=username)
def delete_user(username: str,
password_hash: str,
password_salt: str) -> bool:
user = models.User.objects.filter(username=username)
if (user and
user[0].password_hash == password_hash and
user[0].password_salt == password_salt):
user[0].delete()
return True
return False
def authenticate_user(username: str,
password_hash: str,
password_salt: str) -> bool:
user = models.User.objects.filter(username=username)
return (user and
user[0].password_hash == password_hash and
user[0].password_salt == password_salt)
|
import glob
import os
import ssl
import argparse
import urllib3
import json
import logging
import urllib.request
import base64
import pandas as pd
import numpy as np
from pathlib import Path
from datetime import datetime
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
urllib3.disable_warnings(urllib3.HTTPResponse)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Script to import CICIDS2017 data from CSV into elasticsearch.")
parser.add_argument("-e --es_host", dest="es_host", type=str, default="127.0.0.1",
help="Address to the elasticsearch instance. Defaults to 127.0.0.1/localhost.")
parser.add_argument("-po --es_port", dest="es_port", type=int, default=9200,
help="Port of the elasticsearch instance. Defaults to 9200.")
parser.add_argument("-u --es_user", dest="es_user", type=str, required=True,
help="Username of elasticsearch account which has to have write access to the target index. "
"Required.")
parser.add_argument("-pa --es_password", dest="es_password", type=str, required=True,
help="Password of elasticsearch account. Required.")
parser.add_argument("-i --es_index", dest="es_index", type=str, required=True,
help="Target index to write into. Required.")
parser.add_argument("-m --http_method", dest="http_method", type=str, default="https",
help="Specify http method. Default method is https.")
parser.add_argument("-l --logging", dest="logging", default="INFO",
help="Set logging severity. Defaults to INFO.")
params = parser.parse_args()
ES_HOST = params.es_host
ES_PORT = params.es_port
ES_USER = params.es_user
ES_PW = params.es_password
INDEX_NAME = params.es_index
HTTP_METHOD = params.http_method
LOGGING = params.logging
# Create logging instance with file output
LOG_FORMATTER = logging.Formatter(fmt="%(asctime)s :: %(levelname)s :: %(message)s", datefmt="%H:%M:%S")
LOGGER = logging.getLogger(__name__)
FILE_HANDLER = logging.FileHandler(Path(f"./run-{datetime.now().strftime('%d-%m-%YT%H-%M-%S')}.log"))
FILE_HANDLER.setFormatter(LOG_FORMATTER)
LOGGER.addHandler(FILE_HANDLER)
CONSOLE_HANDLER = logging.StreamHandler()
CONSOLE_HANDLER.setFormatter(LOG_FORMATTER)
LOGGER.addHandler(CONSOLE_HANDLER)
if LOGGING == "DEBUG":
LOGGER.setLevel(logging.DEBUG)
elif LOGGING == "WARNING":
LOGGER.setLevel(logging.WARNING)
elif LOGGING == "ERROR":
LOGGER.setLevel(logging.ERROR)
elif LOGGING == "CRITICAL":
LOGGER.setLevel(logging.CRITICAL)
else:
LOGGER.setLevel(logging.INFO)
# Reading in the csv files
folder = "./data/"
os.chdir(Path(folder))
li = []
for file in glob.glob("*.csv"):
LOGGER.info(f"Found file '{file}'! Loading ...")
df = pd.read_csv(filepath_or_buffer=file, header=0, sep=",", engine="python")
# Remove weird whitespace character
header = []
for col in df.columns:
if " " in col[0]:
header.append(col[1:])
else:
header.append(col)
df.columns = header
# Monday data has seconds, all others don't
if "Monday" in file:
df["Timestamp"] = pd.to_datetime(df["Timestamp"], format="%d/%m/%Y %H:%M:%S")
else:
df["Timestamp"] = pd.to_datetime(df["Timestamp"], format="%d/%m/%Y %H:%M")
LOGGER.info(f"{df.info()}")
LOGGER.info(f"{df.to_string(max_rows=10, max_cols=100)}")
li.append(df)
if not li:
LOGGER.error("Couldn't find any csv file in the data folder, aborting.")
exit(1)
df = pd.concat(li, axis=0, ignore_index=True)
li = [] # Clear memory
LOGGER.info("Finished loading, preprocessing ...")
# Fill inf values with NaN
df.replace([np.inf, -np.inf], np.nan, inplace=True)
# Drop rows with all values NaN
df.dropna(how="all", inplace=True)
# Fill NaN values with a 0
df.fillna(0, inplace=True)
# Replace empty and whitespace values with a 0
df.replace(["", " "], 0, inplace=True)
# Adjust DType of DataFrame columns
df = df.astype({"Source Port": np.uint32,
"Destination Port": np.uint32,
"Protocol": np.uint8,
"Flow Duration": np.int32,
"Total Fwd Packets": np.uint32,
"Total Backward Packets": np.uint32,
"Total Length of Fwd Packets": np.uint32,
"Total Length of Bwd Packets": np.uint32,
"Fwd Packet Length Max": np.uint16,
"Fwd Packet Length Min": np.uint16,
"Bwd Packet Length Max": np.uint16,
"Bwd Packet Length Min": np.uint16,
"Flow IAT Max": np.int32,
"Flow IAT Min": np.int32,
"Fwd IAT Total": np.int32,
"Fwd IAT Max": np.int32,
"Fwd IAT Min": np.int32,
"Bwd IAT Total": np.int32,
"Bwd IAT Max": np.uint32,
"Bwd IAT Min": np.uint32,
"Fwd PSH Flags": np.uint8,
"Bwd PSH Flags": np.uint8,
"Fwd URG Flags": np.uint8,
"Bwd URG Flags": np.uint8,
"Fwd Header Length": np.uint64,
"Bwd Header Length": np.uint64,
"Fwd Packets/s": np.uint32,
"Bwd Packets/s": np.uint32,
"Min Packet Length": np.uint16,
"Max Packet Length": np.uint16,
"FIN Flag Count": np.uint8,
"SYN Flag Count": np.uint8,
"RST Flag Count": np.uint8,
"PSH Flag Count": np.uint8,
"ACK Flag Count": np.uint8,
"URG Flag Count": np.uint8,
"CWE Flag Count": np.uint8,
"ECE Flag Count": np.uint8,
"Fwd Header Length.1": np.int64,
"Fwd Avg Bytes/Bulk": np.uint8,
"Fwd Avg Packets/Bulk": np.uint8,
"Fwd Avg Bulk Rate": np.uint8,
"Bwd Avg Bytes/Bulk": np.uint8,
"Bwd Avg Packets/Bulk": np.uint8,
"Bwd Avg Bulk Rate": np.uint8,
"Subflow Fwd Packets": np.uint32,
"Subflow Fwd Bytes": np.uint32,
"Subflow Bwd Packets": np.uint32,
"Subflow Bwd Bytes": np.uint32,
"Init_Win_bytes_forward": np.int32,
"Init_Win_bytes_backward": np.int32,
"act_data_pkt_fwd": np.uint32,
"min_seg_size_forward": np.int32,
"Active Max": np.uint32,
"Active Min": np.uint32,
"Idle Max": np.uint32,
"Idle Min": np.uint32})
# Sort the DataFrame by Stime
df.sort_values(by=["Timestamp"], inplace=True, ignore_index=True)
LOGGER.info("Finished!")
LOGGER.debug(f"\n{df.to_string(max_rows=10, max_cols=100)}")
LOGGER.debug(f"\n{df.dtypes}")
count = 0
LOGGER.info(f"Ready to send {df.shape[0]} docs to cluster, Starting!")
# Begin creating one request body per DataFrame row and send it to elastic search
for index, row in df.iterrows():
count = count + 1
if count % 5000 == 0:
LOGGER.info(f"{count / df.shape[0] * 100:.2f}% ...")
body = {
"@timestamp": row["Timestamp"].strftime('%Y-%m-%dT%H:%M:%S'),
"@version": "1",
"ecs": {
"version": "1.5.0"
},
"event": {
"kind": "event",
"dataset": "flow",
"action": "network_flow",
"category": "network_traffic",
"start": row["Timestamp"].strftime('%Y-%m-%dT%H:%M:%S'),
"duration": row["Flow Duration"] * 1000
},
"source": {
"ip": row["Source IP"],
"port": row["Source Port"],
"packets": row["Total Fwd Packets"],
"bytes": row["Total Length of Fwd Packets"]
},
"destination": {
"ip": row["Destination IP"],
"port": row["Destination Port"],
"packets": row["Total Backward Packets"],
"bytes": row["Total Length of Bwd Packets"]
},
"network": {
"transport": row["Protocol"],
"type": "ipv4",
"bytes": row["Total Length of Fwd Packets"] + row["Total Length of Bwd Packets"],
"packets": row["Total Fwd Packets"] + row["Total Backward Packets"]
},
"CICFlowMeter": {
"flow_id": row["Flow ID"],
"down_up_ratio": row["Down/Up Ratio"],
"fwd": {
"psh_flags": row["Fwd PSH Flags"],
"urg_flags": row["Fwd URG Flags"],
"header_bytes": row["Fwd Header Length"],
"header_length": row["Fwd Header Length.1"],
"packets/s": row["Fwd Packets/s"],
"init_win_bytes": row["Init_Win_bytes_forward"],
"act_data_pkt": row["act_data_pkt_fwd"],
"min_segment_size": row["min_seg_size_forward"],
"packet_length": {
"max": row["Fwd Packet Length Max"],
"min": row["Fwd Packet Length Min"],
"mean": row["Fwd Packet Length Mean"],
"std": row["Fwd Packet Length Std"]
},
"IAT": {
"total": row["Fwd IAT Total"],
"max": row["Fwd IAT Max"],
"min": row["Fwd IAT Min"],
"mean": row["Fwd IAT Mean"],
"std": row["Fwd IAT Std"]
},
"avg": {
"segment_size": row["Avg Fwd Segment Size"],
"bytes/bulk": row["Fwd Avg Bytes/Bulk"],
"packets/bulk": row["Fwd Avg Packets/Bulk"],
"bulk_rate": row["Fwd Avg Bulk Rate"],
},
"subflow": {
"packets": row["Subflow Fwd Packets"],
"bytes": row["Subflow Fwd Bytes"],
}
},
"bwd": {
"psh_flags": row["Bwd PSH Flags"],
"urg_flags": row["Bwd URG Flags"],
"header_bytes": row["Bwd Header Length"],
"packets/s": row["Bwd Packets/s"],
"init_win_bytes": row["Init_Win_bytes_backward"],
"packet_length": {
"max": row["Bwd Packet Length Max"],
"min": row["Bwd Packet Length Min"],
"mean": row["Bwd Packet Length Mean"],
"std": row["Bwd Packet Length Std"]
},
"IAT": {
"total": row["Bwd IAT Total"],
"max": row["Bwd IAT Max"],
"min": row["Bwd IAT Min"],
"mean": row["Bwd IAT Mean"],
"std": row["Bwd IAT Std"]
},
"avg": {
"segment_size": row["Avg Bwd Segment Size"],
"bytes/bulk": row["Bwd Avg Bytes/Bulk"],
"packets/bulk": row["Bwd Avg Packets/Bulk"],
"bulk_rate": row["Bwd Avg Bulk Rate"],
},
"subflow": {
"packets": row["Subflow Bwd Packets"],
"bytes": row["Subflow Bwd Bytes"],
}
},
"flow": {
"bytes/s": row["Flow Bytes/s"],
"packets/s": row["Flow Packets/s"],
"IAT": {
"max": row["Flow IAT Max"],
"min": row["Flow IAT Min"],
"mean": row["Flow IAT Mean"],
"std": row["Flow IAT Std"]
}
},
"packets": {
"avg_size": row["Average Packet Size"],
"length": {
"max": row["Max Packet Length"],
"min": row["Min Packet Length"],
"mean": row["Packet Length Mean"],
"std": row["Packet Length Std"],
"variance": row["Packet Length Variance"],
}
},
"flag_count": {
"FIN": row["FIN Flag Count"],
"SYN": row["SYN Flag Count"],
"RST": row["RST Flag Count"],
"PSH": row["PSH Flag Count"],
"ACK": row["ACK Flag Count"],
"URG": row["URG Flag Count"],
"CWE": row["CWE Flag Count"],
"ECE": row["ECE Flag Count"],
},
"active": {
"max": row["Active Max"],
"min": row["Active Min"],
"mean": row["Active Mean"],
"std": row["Active Std"],
},
"idle": {
"max": row["Idle Max"],
"min": row["Idle Min"],
"mean": row["Idle Mean"],
"std": row["Idle Std"],
}
},
"tags": ["CICIDS2017", row["Label"]],
"type": "flow"
}
LOGGER.debug(f"Sending {body}")
elastic_target = f"{HTTP_METHOD}://{ES_HOST}:{ES_PORT}/{INDEX_NAME}/_doc"
req = urllib.request.Request(elastic_target)
json_data = json.dumps(body)
json_data_as_bytes = json_data.encode("utf-8")
credentials = base64.b64encode(f"{ES_USER}:{ES_PW}".encode("utf-8")).decode("utf-8")
req.add_header("Authorization", f"Basic {credentials}")
req.add_header("Content-Type", "application/json; charset=utf-8")
req.add_header("Content-Length", len(json_data_as_bytes))
ssl._create_default_https_context = ssl._create_unverified_context
response = urllib.request.urlopen(req, json_data_as_bytes)
LOGGER.debug(f"Response {json.loads(response.read().decode('utf-8'))}")
LOGGER.info("All done! Please check your index for completeness.")
|
from django.shortcuts import render
from django.http import HttpResponse
from django.views.generic import View
from django.http import JsonResponse
from django import forms
from django.views.decorators.csrf import csrf_exempt
from django.utils.decorators import method_decorator
from report.models import *
import json
from .DBHelper import DBHelper
# Create your views here.
def index(request):
return render(request, 'forms_paymentMethod.html')
class ProductList(View):
def get(self, request):
products = list(Product.objects.all().values())
data = dict()
data['products'] = products
response = JsonResponse(data)
response["Access-Control-Allow-Origin"] = "*"
return response
class CustomerList(View):
def get(self, request):
customers = list(Customer.objects.all().values())
data = dict()
data['customers'] = customers
response = JsonResponse(data)
response["Access-Control-Allow-Origin"] = "*"
return response
class CustomerGet(View):
def get(self, request, customer_code):
customers = list(Customer.objects.filter(customer_code=customer_code).values())
data = dict()
data['customers'] = customers
response = JsonResponse(data)
response["Access-Control-Allow-Origin"] = "*"
return response
@method_decorator(csrf_exempt, name='dispatch')
class CustomerSave(View):
def post(self, request):
request.POST = request.POST.copy()
form = CustomerForm(request.POST)
if form.is_valid():
form.save()
else:
ret = dict()
ret['result'] = "error"
return JsonResponse(ret)
customers = list(Customer.objects.all().values())
data = dict()
data['customers'] = customers
return render(request, 'forms_customer.html', data)
class CustomerForm(forms.ModelForm):
class Meta:
model = Customer
fields = '__all__'
@method_decorator(csrf_exempt, name='dispatch')
class CustomerSave2(View):
def post(self, request):
request.POST = request.POST.copy()
form = CustomerForm(request.POST)
if form.is_valid():
form.save()
else:
ret = dict()
ret['result'] = "error"
ret['customers'] = list()
return JsonResponse(ret)
customers = list(Customer.objects.all().values())
data = dict()
data['customers'] = customers
response = JsonResponse(data)
response["Access-Control-Allow-Origin"] = "*"
return response
#return render(request, 'forms_customer.html', data)
class PaymentMethodList(View):
def get(self, request):
paymentmethods = list(Paymentmethod.objects.all().values())
data = dict()
data['paymentmethods'] = paymentmethods
response = JsonResponse(data)
response["Access-Control-Allow-Origin"] = "*"
return response
class PaymentMethodGet(View):
def get(self, request, payment_method_code):
paymentmethods = list(Paymentmethod.objects.filter(payment_method_code=payment_method_code).values())
data = dict()
data['paymentmethods'] = paymentmethods
response = JsonResponse(data)
response["Access-Control-Allow-Origin"] = "*"
return response
@method_decorator(csrf_exempt, name='dispatch')
class PaymentMethodSave(View):
def post(self, request):
request.POST = request.POST.copy()
form = PaymentMethodForm(request.POST)
if form.is_valid():
form.save()
else:
ret = dict()
ret['result'] = "error"
return JsonResponse(ret)
paymentmethods = list(Paymentmethod.objects.all().values())
data = dict()
data['paymentmethods'] = paymentmethods
return render(request, 'forms_paymentMethod.html', data)
class PaymentMethodForm(forms.ModelForm):
class Meta:
model = Paymentmethod
fields = '__all__'
@method_decorator(csrf_exempt, name='dispatch')
class PaymentMethodSave2(View):
def post(self, request):
request.POST = request.POST.copy()
form = PaymentMethodForm(request.POST)
if form.is_valid():
form.save()
else:
ret = dict()
ret['result'] = "error"
ret['paymentmethods'] = list()
return JsonResponse(ret)
paymentmethods = list(Paymentmethod.objects.all().values())
data = dict()
data['paymentmethods'] = paymentmethods
response = JsonResponse(data)
response["Access-Control-Allow-Origin"] = "*"
return response
#return render(request, 'forms_customer.html', data)
def ReportListAllInvoices(request):
db = DBHelper()
data, columns = db.fetch ('SELECT i.invoice_no as "Invoice No", i.date as "Date" '
' , i.customer_code as "Customer Code", c.name as "Customer Name" '
' , i.due_date as "Due Date", i.total as "Total", i.vat as "VAT", i.amount_due as "Amount Due" '
' , ili.product_code as "Product Code", p.name as "Product Name" '
' , ili.quantity as "Quantity", ili.unit_price as "Unit Price", ili.extended_price as "Extended Price" '
' FROM invoice i JOIN customer c ON i.customer_code = c.customer_code '
' JOIN invoice_line_item ili ON i.invoice_no = ili.invoice_no '
' JOIN product p ON ili.product_code = p.code '
' ')
data_report = dict()
data_report['data'] = CursorToDict (data,columns)
data_report['column_name'] = columns
return render(request, 'report_list_all_invoices.html', data_report)
def ReportProductsSold(request):
db = DBHelper()
data, columns = db.fetch ('SELECT ili.product_code as "Product Code", p.name as "Product Name" '
' , SUM(ili.quantity) as "Total Quantity Sold", SUM(ili.extended_price) as "Total Value Sold" '
' FROM invoice i JOIN invoice_line_item ili ON i.invoice_no = ili.invoice_no '
' JOIN product p ON ili.product_code = p.code '
' GROUP BY p.code, ili.product_code, p.name '
' ')
data_report = dict()
data_report['data'] = CursorToDict (data,columns)
data_report['column_name'] = columns
return render(request, 'report_products_sold.html', data_report)
def ReportListAllProducts(request):
db = DBHelper()
data, columns = db.fetch ('SELECT code as "Code", name as "Name", units as "Units" FROM product '
' ')
data_report = dict()
data_report['data'] = CursorToDict (data,columns)
data_report['column_name'] = columns
return render(request, 'report_list_all_products.html', data_report)
def CursorToDict(data,columns):
result = []
fieldnames = [name.replace(" ", "_").lower() for name in columns]
for row in data:
rowset = []
for field in zip(fieldnames, row):
rowset.append(field)
result.append(dict(rowset))
return result |
import pandas as pd
import json
import sys
from casos import casos_positivos, casos_fallecidos
poblacion_sanmartin = 906777
positivos_sanmartin = list(casos_positivos[casos_positivos['DEPARTAMENTO'] == "SAN MARTIN"].shape)[0]
positivos_hombres_sanmartin = list(casos_positivos[(casos_positivos['DEPARTAMENTO'] == "SAN MARTIN") &(casos_positivos['SEXO'] == "MASCULINO")].shape)[0]
positivos_mujeres_sanmartin = list(casos_positivos[(casos_positivos['DEPARTAMENTO'] == "SAN MARTIN") &(casos_positivos['SEXO'] == "FEMENINO")].shape)[0]
fallecidos_sanmartin = list(casos_fallecidos[casos_fallecidos['DEPARTAMENTO'] == "SAN MARTIN"].shape)[0]
fallecidos_hombres_sanmartin = list(casos_fallecidos[(casos_fallecidos['DEPARTAMENTO'] == "SAN MARTIN") &(casos_fallecidos['SEXO'] == "MASCULINO")].shape)[0]
fallecidos_mujeres_sanmartin = list(casos_fallecidos[(casos_fallecidos['DEPARTAMENTO'] == "SAN MARTIN") &(casos_fallecidos['SEXO'] == "FEMENINO")].shape)[0]
#!Departamento San Martin - Etapa de vida
fallecidos_preinfancia_sanmartin = list(casos_fallecidos[(casos_fallecidos['DEPARTAMENTO'] == "SAN MARTIN") & (casos_fallecidos['EDAD_DECLARADA'] >= 0) & (
casos_fallecidos['EDAD_DECLARADA'] <= 5)].shape)[0]
fallecidos_infancia_sanmartin = list(casos_fallecidos[(casos_fallecidos['DEPARTAMENTO'] == "SAN MARTIN") & (casos_fallecidos['EDAD_DECLARADA'] >= 6) & (
casos_fallecidos['EDAD_DECLARADA'] <= 11)].shape)[0]
fallecidos_adolescencia_sanmartin = list(casos_fallecidos[(casos_fallecidos['DEPARTAMENTO'] == "SAN MARTIN") & (casos_fallecidos['EDAD_DECLARADA'] >= 12) & (
casos_fallecidos['EDAD_DECLARADA'] <= 18)].shape)[0]
fallecidos_juventud_sanmartin = list(casos_fallecidos[(casos_fallecidos['DEPARTAMENTO'] == "SAN MARTIN") & (casos_fallecidos['EDAD_DECLARADA'] >= 19) & (
casos_fallecidos['EDAD_DECLARADA'] <= 26)].shape)[0]
fallecidos_adultez_sanmartin = list(casos_fallecidos[(casos_fallecidos['DEPARTAMENTO'] == "SAN MARTIN") & (casos_fallecidos['EDAD_DECLARADA'] >= 27) & (
casos_fallecidos['EDAD_DECLARADA'] <= 59)].shape)[0]
fallecidos_persona_mayor_sanmartin = list(
casos_fallecidos[(casos_fallecidos['DEPARTAMENTO'] == "SAN MARTIN") & (casos_fallecidos['EDAD_DECLARADA'] >= 60)].shape)[0]
#!Provincias San Martin
#!San Martin-Moyobamba
poblacion_san_martin_moyobamba = 157062
positivos_san_martin_moyobamba = list(casos_positivos[casos_positivos['PROVINCIA'] == "MOYOBAMBA"].shape)[0]
positivos_hombres_san_martin_moyobamba = list(casos_positivos[(casos_positivos['SEXO'] == "MASCULINO") & (casos_positivos['PROVINCIA'] == "MOYOBAMBA")].shape)[0]
positivos_mujeres_san_martin_moyobamba = list(casos_positivos[(casos_positivos['SEXO'] == "FEMENINO") & (casos_positivos['PROVINCIA'] == "MOYOBAMBA")].shape)[0]
fallecidos_san_martin_moyobamba = list(casos_fallecidos[casos_fallecidos['PROVINCIA'] == "MOYOBAMBA"].shape)[0]
fallecidos_hombres_san_martin_moyobamba = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MOYOBAMBA") &(casos_fallecidos['SEXO'] == "MASCULINO")].shape)[0]
fallecidos_mujeres_san_martin_moyobamba = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MOYOBAMBA") &(casos_fallecidos['SEXO'] == "FEMENINO")].shape)[0]
#!Provincia Moyobamba - Etapa de vida
fallecidos_preinfancia_san_martin_moyobamba = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MOYOBAMBA") & (casos_fallecidos['EDAD_DECLARADA'] >= 0) & (
casos_fallecidos['EDAD_DECLARADA'] <= 5)].shape)[0]
fallecidos_infancia_san_martin_moyobamba = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MOYOBAMBA") & (casos_fallecidos['EDAD_DECLARADA'] >= 6) & (
casos_fallecidos['EDAD_DECLARADA'] <= 11)].shape)[0]
fallecidos_adolescencia_san_martin_moyobamba = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MOYOBAMBA") & (casos_fallecidos['EDAD_DECLARADA'] >= 12) & (
casos_fallecidos['EDAD_DECLARADA'] <= 18)].shape)[0]
fallecidos_juventud_san_martin_moyobamba = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MOYOBAMBA") & (casos_fallecidos['EDAD_DECLARADA'] >= 19) & (
casos_fallecidos['EDAD_DECLARADA'] <= 26)].shape)[0]
fallecidos_adultez_san_martin_moyobamba = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MOYOBAMBA") & (casos_fallecidos['EDAD_DECLARADA'] >= 27) & (
casos_fallecidos['EDAD_DECLARADA'] <= 59)].shape)[0]
fallecidos_persona_mayor_san_martin_moyobamba = list(
casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MOYOBAMBA") & (casos_fallecidos['EDAD_DECLARADA'] >= 60)].shape)[0]
#!San Martin-Bellavista
poblacion_san_martin_bellavista = 62904
positivos_san_martin_bellavista = list(casos_positivos[casos_positivos['PROVINCIA'] == "BELLAVISTA"].shape)[0]
positivos_hombres_san_martin_bellavista = list(casos_positivos[(casos_positivos['SEXO'] == "MASCULINO") & (casos_positivos['PROVINCIA'] == "BELLAVISTA")].shape)[0]
positivos_mujeres_san_martin_bellavista = list(casos_positivos[(casos_positivos['SEXO'] == "FEMENINO") & (casos_positivos['PROVINCIA'] == "BELLAVISTA")].shape)[0]
fallecidos_san_martin_bellavista = list(casos_fallecidos[casos_fallecidos['PROVINCIA'] == "BELLAVISTA"].shape)[0]
fallecidos_hombres_san_martin_bellavista = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "BELLAVISTA") &(casos_fallecidos['SEXO'] == "MASCULINO")].shape)[0]
fallecidos_mujeres_san_martin_bellavista = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "BELLAVISTA") &(casos_fallecidos['SEXO'] == "FEMENINO")].shape)[0]
#!Provincia Bellavista - Etapa de vida
fallecidos_preinfancia_san_martin_bellavista = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "BELLAVISTA") & (casos_fallecidos['EDAD_DECLARADA'] >= 0) & (
casos_fallecidos['EDAD_DECLARADA'] <= 5)].shape)[0]
fallecidos_infancia_san_martin_bellavista = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "BELLAVISTA") & (casos_fallecidos['EDAD_DECLARADA'] >= 6) & (
casos_fallecidos['EDAD_DECLARADA'] <= 11)].shape)[0]
fallecidos_adolescencia_san_martin_bellavista = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "BELLAVISTA") & (casos_fallecidos['EDAD_DECLARADA'] >= 12) & (
casos_fallecidos['EDAD_DECLARADA'] <= 18)].shape)[0]
fallecidos_juventud_san_martin_bellavista = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "BELLAVISTA") & (casos_fallecidos['EDAD_DECLARADA'] >= 19) & (
casos_fallecidos['EDAD_DECLARADA'] <= 26)].shape)[0]
fallecidos_adultez_san_martin_bellavista = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "BELLAVISTA") & (casos_fallecidos['EDAD_DECLARADA'] >= 27) & (
casos_fallecidos['EDAD_DECLARADA'] <= 59)].shape)[0]
fallecidos_persona_mayor_san_martin_bellavista = list(
casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "BELLAVISTA") & (casos_fallecidos['EDAD_DECLARADA'] >= 60)].shape)[0]
#!San Martin-Dorado
poblacion_san_martin_dorado = 42819
positivos_san_martin_dorado = list(casos_positivos[casos_positivos['PROVINCIA'] == "EL DORADO"].shape)[0]
positivos_hombres_san_martin_dorado = list(casos_positivos[(casos_positivos['SEXO'] == "MASCULINO") & (casos_positivos['PROVINCIA'] == "EL DORADO")].shape)[0]
positivos_mujeres_san_martin_dorado = list(casos_positivos[(casos_positivos['SEXO'] == "FEMENINO") & (casos_positivos['PROVINCIA'] == "EL DORADO")].shape)[0]
fallecidos_san_martin_dorado = list(casos_fallecidos[casos_fallecidos['PROVINCIA'] == "EL DORADO"].shape)[0]
fallecidos_hombres_san_martin_dorado = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "EL DORADO") &(casos_fallecidos['SEXO'] == "MASCULINO")].shape)[0]
fallecidos_mujeres_san_martin_dorado = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "EL DORADO") &(casos_fallecidos['SEXO'] == "FEMENINO")].shape)[0]
#!Provincia Dorado - Etapa de vida
fallecidos_preinfancia_san_martin_dorado = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "EL DORADO") & (casos_fallecidos['EDAD_DECLARADA'] >= 0) & (
casos_fallecidos['EDAD_DECLARADA'] <= 5)].shape)[0]
fallecidos_infancia_san_martin_dorado = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "EL DORADO") & (casos_fallecidos['EDAD_DECLARADA'] >= 6) & (
casos_fallecidos['EDAD_DECLARADA'] <= 11)].shape)[0]
fallecidos_adolescencia_san_martin_dorado = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "EL DORADO") & (casos_fallecidos['EDAD_DECLARADA'] >= 12) & (
casos_fallecidos['EDAD_DECLARADA'] <= 18)].shape)[0]
fallecidos_juventud_san_martin_dorado = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "EL DORADO") & (casos_fallecidos['EDAD_DECLARADA'] >= 19) & (
casos_fallecidos['EDAD_DECLARADA'] <= 26)].shape)[0]
fallecidos_adultez_san_martin_dorado = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "EL DORADO") & (casos_fallecidos['EDAD_DECLARADA'] >= 27) & (
casos_fallecidos['EDAD_DECLARADA'] <= 59)].shape)[0]
fallecidos_persona_mayor_san_martin_dorado = list(
casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "EL DORADO") & (casos_fallecidos['EDAD_DECLARADA'] >= 60)].shape)[0]
#!San Martin-Huallaga
poblacion_san_martin_huallaga = 27229
positivos_san_martin_huallaga = list(casos_positivos[casos_positivos['PROVINCIA'] == "HUALLAGA"].shape)[0]
positivos_hombres_san_martin_huallaga = list(casos_positivos[(casos_positivos['SEXO'] == "MASCULINO") & (casos_positivos['PROVINCIA'] == "HUALLAGA")].shape)[0]
positivos_mujeres_san_martin_huallaga = list(casos_positivos[(casos_positivos['SEXO'] == "FEMENINO") & (casos_positivos['PROVINCIA'] == "HUALLAGA")].shape)[0]
fallecidos_san_martin_huallaga = list(casos_fallecidos[casos_fallecidos['PROVINCIA'] == "HUALLAGA"].shape)[0]
fallecidos_hombres_san_martin_huallaga = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "HUALLAGA") & (casos_fallecidos['SEXO'] == "MASCULINO")].shape)[0]
fallecidos_mujeres_san_martin_huallaga = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "HUALLAGA") & (casos_fallecidos['SEXO'] == "FEMENINO")].shape)[0]
#!Provincia Huallaga - Etapa de vida
fallecidos_preinfancia_san_martin_huallaga = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "HUALLAGA") & (casos_fallecidos['EDAD_DECLARADA'] >= 0) & (
casos_fallecidos['EDAD_DECLARADA'] <= 5)].shape)[0]
fallecidos_infancia_san_martin_huallaga = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "HUALLAGA") & (casos_fallecidos['EDAD_DECLARADA'] >= 6) & (
casos_fallecidos['EDAD_DECLARADA'] <= 11)].shape)[0]
fallecidos_adolescencia_san_martin_huallaga = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "HUALLAGA") & (casos_fallecidos['EDAD_DECLARADA'] >= 12) & (
casos_fallecidos['EDAD_DECLARADA'] <= 18)].shape)[0]
fallecidos_juventud_san_martin_huallaga = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "HUALLAGA") & (casos_fallecidos['EDAD_DECLARADA'] >= 19) & (
casos_fallecidos['EDAD_DECLARADA'] <= 26)].shape)[0]
fallecidos_adultez_san_martin_huallaga = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "HUALLAGA") & (casos_fallecidos['EDAD_DECLARADA'] >= 27) & (
casos_fallecidos['EDAD_DECLARADA'] <= 59)].shape)[0]
fallecidos_persona_mayor_san_martin_huallaga = list(
casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "HUALLAGA") & (casos_fallecidos['EDAD_DECLARADA'] >= 60)].shape)[0]
#!San Martin - Lamas
poblacion_san_martin_lamas = 90993
positivos_san_martin_lamas = list(casos_positivos[casos_positivos['PROVINCIA'] == "LAMAS"].shape)[0]
positivos_hombres_san_martin_lamas = list(casos_positivos[(casos_positivos['SEXO'] == "MASCULINO") & (casos_positivos['PROVINCIA'] == "LAMAS")].shape)[0]
positivos_mujeres_san_martin_lamas = list(casos_positivos[(casos_positivos['SEXO'] == "FEMENINO") & (casos_positivos['PROVINCIA'] == "LAMAS")].shape)[0]
fallecidos_san_martin_lamas = list(casos_fallecidos[casos_fallecidos['PROVINCIA'] == "LAMAS"].shape)[0]
fallecidos_hombres_san_martin_lamas = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "LAMAS") & (casos_fallecidos['SEXO'] == "MASCULINO")].shape)[0]
fallecidos_mujeres_san_martin_lamas = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "LAMAS") & (casos_fallecidos['SEXO'] == "FEMENINO")].shape)[0]
#!Provincia Huallaga - Etapa de vida
fallecidos_preinfancia_san_martin_lamas = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "LAMAS") & (casos_fallecidos['EDAD_DECLARADA'] >= 0) & (
casos_fallecidos['EDAD_DECLARADA'] <= 5)].shape)[0]
fallecidos_infancia_san_martin_lamas = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "LAMAS") & (casos_fallecidos['EDAD_DECLARADA'] >= 6) & (
casos_fallecidos['EDAD_DECLARADA'] <= 11)].shape)[0]
fallecidos_adolescencia_san_martin_lamas = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "LAMAS") & (casos_fallecidos['EDAD_DECLARADA'] >= 12) & (
casos_fallecidos['EDAD_DECLARADA'] <= 18)].shape)[0]
fallecidos_juventud_san_martin_lamas = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "LAMAS") & (casos_fallecidos['EDAD_DECLARADA'] >= 19) & (
casos_fallecidos['EDAD_DECLARADA'] <= 26)].shape)[0]
fallecidos_adultez_san_martin_lamas = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "LAMAS") & (casos_fallecidos['EDAD_DECLARADA'] >= 27) & (
casos_fallecidos['EDAD_DECLARADA'] <= 59)].shape)[0]
fallecidos_persona_mayor_san_martin_lamas = list(
casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "LAMAS") & (casos_fallecidos['EDAD_DECLARADA'] >= 60)].shape)[0]
#!San Martin-Mariscal Caceres
poblacion_san_martin_mariscal_caceres = 56563
positivos_san_martin_mariscal_caceres = list(casos_positivos[casos_positivos['PROVINCIA'] == "MARISCAL CACERES"].shape)[0]
positivos_hombres_san_martin_mariscal_caceres = list(casos_positivos[(casos_positivos['SEXO'] == "MASCULINO") & (casos_positivos['PROVINCIA'] == "MARISCAL CACERES")].shape)[0]
positivos_mujeres_san_martin_mariscal_caceres = list(casos_positivos[(casos_positivos['SEXO'] == "FEMENINO") & (casos_positivos['PROVINCIA'] == "MARISCAL CACERES")].shape)[0]
fallecidos_san_martin_mariscal_caceres = list(casos_fallecidos[casos_fallecidos['PROVINCIA'] == "MARISCAL CACERES"].shape)[0]
fallecidos_hombres_san_martin_mariscal_caceres = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "MARISCAL CACERES") & (casos_fallecidos['SEXO'] == "MASCULINO")].shape)[0]
fallecidos_mujeres_san_martin_mariscal_caceres = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "MARISCAL CACERES") & (casos_fallecidos['SEXO'] == "FEMENINO")].shape)[0]
#!Provincia Mariscal Caceres - Etapa de vida
fallecidos_preinfancia_san_martin_mariscal_caceres = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MARISCAL CACERES") & (casos_fallecidos['EDAD_DECLARADA'] >= 0) & (
casos_fallecidos['EDAD_DECLARADA'] <= 5)].shape)[0]
fallecidos_infancia_san_martin_mariscal_caceres = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MARISCAL CACERES") & (casos_fallecidos['EDAD_DECLARADA'] >= 6) & (
casos_fallecidos['EDAD_DECLARADA'] <= 11)].shape)[0]
fallecidos_adolescencia_san_martin_mariscal_caceres = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MARISCAL CACERES") & (casos_fallecidos['EDAD_DECLARADA'] >= 12) & (
casos_fallecidos['EDAD_DECLARADA'] <= 18)].shape)[0]
fallecidos_juventud_san_martin_mariscal_caceres = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MARISCAL CACERES") & (casos_fallecidos['EDAD_DECLARADA'] >= 19) & (
casos_fallecidos['EDAD_DECLARADA'] <= 26)].shape)[0]
fallecidos_adultez_san_martin_mariscal_caceres = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MARISCAL CACERES") & (casos_fallecidos['EDAD_DECLARADA'] >= 27) & (
casos_fallecidos['EDAD_DECLARADA'] <= 59)].shape)[0]
fallecidos_persona_mayor_san_martin_mariscal_caceres = list(
casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "MARISCAL CACERES") & (casos_fallecidos['EDAD_DECLARADA'] >= 60)].shape)[0]
#!San Martin-Provincia Picota
poblacion_san_martin_picota = 47178
positivos_san_martin_picota = list(casos_positivos[casos_positivos['PROVINCIA'] == "PICOTA"].shape)[0]
positivos_hombres_san_martin_picota = list(casos_positivos[(casos_positivos['SEXO'] == "MASCULINO") & (casos_positivos['PROVINCIA'] == "PICOTA")].shape)[0]
positivos_mujeres_san_martin_picota = list(casos_positivos[(casos_positivos['SEXO'] == "FEMENINO") & (casos_positivos['PROVINCIA'] == "PICOTA")].shape)[0]
fallecidos_san_martin_picota = list(casos_fallecidos[casos_fallecidos['PROVINCIA'] == "PICOTA"].shape)[0]
fallecidos_hombres_san_martin_picota = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "PICOTA") & (casos_fallecidos['SEXO'] == "MASCULINO")].shape)[0]
fallecidos_mujeres_san_martin_picota = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "PICOTA") & (casos_fallecidos['SEXO'] == "FEMENINO")].shape)[0]
#!Provincia Picota - Etapa de vida
fallecidos_preinfancia_san_martin_picota = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "PICOTA") & (casos_fallecidos['EDAD_DECLARADA'] >= 0) & (
casos_fallecidos['EDAD_DECLARADA'] <= 5)].shape)[0]
fallecidos_infancia_san_martin_picota = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "PICOTA") & (casos_fallecidos['EDAD_DECLARADA'] >= 6) & (
casos_fallecidos['EDAD_DECLARADA'] <= 11)].shape)[0]
fallecidos_adolescencia_san_martin_picota = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "PICOTA") & (casos_fallecidos['EDAD_DECLARADA'] >= 12) & (
casos_fallecidos['EDAD_DECLARADA'] <= 18)].shape)[0]
fallecidos_juventud_san_martin_picota = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "PICOTA") & (casos_fallecidos['EDAD_DECLARADA'] >= 19) & (
casos_fallecidos['EDAD_DECLARADA'] <= 26)].shape)[0]
fallecidos_adultez_san_martin_picota = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "PICOTA") & (casos_fallecidos['EDAD_DECLARADA'] >= 27) & (
casos_fallecidos['EDAD_DECLARADA'] <= 59)].shape)[0]
fallecidos_persona_mayor_san_martin_picota = list(
casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "PICOTA") & (casos_fallecidos['EDAD_DECLARADA'] >= 60)].shape)[0]
#!Provincia Rioja
poblacion_san_martin_rioja = 138990
positivos_san_martin_rioja = list(casos_positivos[casos_positivos['PROVINCIA'] == "RIOJA"].shape)[0]
positivos_hombres_san_martin_rioja = list(casos_positivos[(casos_positivos['SEXO'] == "MASCULINO") & (casos_positivos['PROVINCIA'] == "RIOJA")].shape)[0]
positivos_mujeres_san_martin_rioja = list(casos_positivos[(casos_positivos['SEXO'] == "FEMENINO") & (casos_positivos['PROVINCIA'] == "RIOJA")].shape)[0]
fallecidos_san_martin_rioja = list(casos_fallecidos[casos_fallecidos['PROVINCIA'] == "RIOJA"].shape)[0]
fallecidos_hombres_san_martin_rioja = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "RIOJA") & (casos_fallecidos['SEXO'] == "MASCULINO")].shape)[0]
fallecidos_mujeres_san_martin_rioja = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "RIOJA") & (casos_fallecidos['SEXO'] == "FEMENINO")].shape)[0]
#!Provincia Rioja - Etapa de vida
fallecidos_preinfancia_san_martin_rioja = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "RIOJA") & (casos_fallecidos['EDAD_DECLARADA'] >= 0) & (
casos_fallecidos['EDAD_DECLARADA'] <= 5)].shape)[0]
fallecidos_infancia_san_martin_rioja = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "RIOJA") & (casos_fallecidos['EDAD_DECLARADA'] >= 6) & (
casos_fallecidos['EDAD_DECLARADA'] <= 11)].shape)[0]
fallecidos_adolescencia_san_martin_rioja = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "RIOJA") & (casos_fallecidos['EDAD_DECLARADA'] >= 12) & (
casos_fallecidos['EDAD_DECLARADA'] <= 18)].shape)[0]
fallecidos_juventud_san_martin_rioja = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "RIOJA") & (casos_fallecidos['EDAD_DECLARADA'] >= 19) & (
casos_fallecidos['EDAD_DECLARADA'] <= 26)].shape)[0]
fallecidos_adultez_san_martin_rioja = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "RIOJA") & (casos_fallecidos['EDAD_DECLARADA'] >= 27) & (
casos_fallecidos['EDAD_DECLARADA'] <= 59)].shape)[0]
fallecidos_persona_mayor_san_martin_rioja = list(
casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "RIOJA") & (casos_fallecidos['EDAD_DECLARADA'] >= 60)].shape)[0]
#!San Martin-San Martin
poblacion_san_martin_san_martin = 203728
positivos_san_martin_san_martin = list(casos_positivos[casos_positivos['PROVINCIA'] == "SAN MARTIN"].shape)[0]
positivos_hombres_san_martin_san_martin = list(casos_positivos[(casos_positivos['SEXO'] == "MASCULINO") & (casos_positivos['PROVINCIA'] == "SAN MARTIN")].shape)[0]
positivos_mujeres_san_martin_san_martin = list(casos_positivos[(casos_positivos['SEXO'] == "FEMENINO") & (casos_positivos['PROVINCIA'] == "SAN MARTIN")].shape)[0]
fallecidos_san_martin_san_martin = list(casos_fallecidos[casos_fallecidos['PROVINCIA'] == "SAN MARTIN"].shape)[0]
fallecidos_hombres_san_martin_san_martin = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "SAN MARTIN") & (casos_fallecidos['SEXO'] == "MASCULINO")].shape)[0]
fallecidos_mujeres_san_martin_san_martin = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "SAN MARTIN") & (casos_fallecidos['SEXO'] == "FEMENINO")].shape)[0]
#!Provincia San Martin - Etapa de vida
fallecidos_preinfancia_san_martin_san_martin = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "SAN MARTIN") & (casos_fallecidos['EDAD_DECLARADA'] >= 0) & (
casos_fallecidos['EDAD_DECLARADA'] <= 5)].shape)[0]
fallecidos_infancia_san_martin_san_martin = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "SAN MARTIN") & (casos_fallecidos['EDAD_DECLARADA'] >= 6) & (
casos_fallecidos['EDAD_DECLARADA'] <= 11)].shape)[0]
fallecidos_adolescencia_san_martin_san_martin = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "SAN MARTIN") & (casos_fallecidos['EDAD_DECLARADA'] >= 12) & (
casos_fallecidos['EDAD_DECLARADA'] <= 18)].shape)[0]
fallecidos_juventud_san_martin_san_martin = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "SAN MARTIN") & (casos_fallecidos['EDAD_DECLARADA'] >= 19) & (
casos_fallecidos['EDAD_DECLARADA'] <= 26)].shape)[0]
fallecidos_adultez_san_martin_san_martin = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "SAN MARTIN") & (casos_fallecidos['EDAD_DECLARADA'] >= 27) & (
casos_fallecidos['EDAD_DECLARADA'] <= 59)].shape)[0]
fallecidos_persona_mayor_san_martin_san_martin = list(
casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "SAN MARTIN") & (casos_fallecidos['EDAD_DECLARADA'] >= 60)].shape)[0]
#!San Martin-Tocache
poblacion_san_martin_tocache = 79311
positivos_san_martin_tocache = list(casos_positivos[casos_positivos['PROVINCIA'] == "TOCACHE"].shape)[0]
positivos_hombres_san_martin_tocache = list(casos_positivos[(casos_positivos['SEXO'] == "MASCULINO") & (casos_positivos['PROVINCIA'] == "TOCACHE")].shape)[0]
positivos_mujeres_san_martin_tocache = list(casos_positivos[(casos_positivos['SEXO'] == "FEMENINO") & (casos_positivos['PROVINCIA'] == "TOCACHE")].shape)[0]
fallecidos_san_martin_tocache = list(casos_fallecidos[casos_fallecidos['PROVINCIA'] == "TOCACHE"].shape)[0]
fallecidos_hombres_san_martin_tocache = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "TOCACHE") & (casos_fallecidos['SEXO'] == "MASCULINO")].shape)[0]
fallecidos_mujeres_san_martin_tocache = list(casos_fallecidos[(
casos_fallecidos['PROVINCIA'] == "TOCACHE") & (casos_fallecidos['SEXO'] == "FEMENINO")].shape)[0]
#!Provincia Tocache - Etapa de vida
fallecidos_preinfancia_san_martin_tocache = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "TOCACHE") & (casos_fallecidos['EDAD_DECLARADA'] >= 0) & (
casos_fallecidos['EDAD_DECLARADA'] <= 5)].shape)[0]
fallecidos_infancia_san_martin_tocache = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "TOCACHE") & (casos_fallecidos['EDAD_DECLARADA'] >= 6) & (
casos_fallecidos['EDAD_DECLARADA'] <= 11)].shape)[0]
fallecidos_adolescencia_san_martin_tocache = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "TOCACHE") & (casos_fallecidos['EDAD_DECLARADA'] >= 12) & (
casos_fallecidos['EDAD_DECLARADA'] <= 18)].shape)[0]
fallecidos_juventud_san_martin_tocache = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "TOCACHE") & (casos_fallecidos['EDAD_DECLARADA'] >= 19) & (
casos_fallecidos['EDAD_DECLARADA'] <= 26)].shape)[0]
fallecidos_adultez_san_martin_tocache = list(casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "TOCACHE") & (casos_fallecidos['EDAD_DECLARADA'] >= 27) & (
casos_fallecidos['EDAD_DECLARADA'] <= 59)].shape)[0]
fallecidos_persona_mayor_san_martin_tocache = list(
casos_fallecidos[(casos_fallecidos['PROVINCIA'] == "TOCACHE") & (casos_fallecidos['EDAD_DECLARADA'] >= 60)].shape)[0]
sanmartin = {
"name": "San Martin",
"poblacion": poblacion_sanmartin,
"positivos": positivos_sanmartin,
"hombres_infectados": positivos_hombres_sanmartin,
"mujeres_infectados": positivos_mujeres_sanmartin,
"fallecidos": fallecidos_sanmartin,
"hombres_fallecidos": fallecidos_hombres_sanmartin,
"mujeres_fallecidos": fallecidos_mujeres_sanmartin,
"type": "Departamento",
"etapa_de_vida_fallecidos": {
"primera_infancia": fallecidos_preinfancia_sanmartin,
"infancia": fallecidos_infancia_sanmartin,
"adolescencia": fallecidos_adolescencia_sanmartin,
"juventud": fallecidos_juventud_sanmartin,
"adultez": fallecidos_adultez_sanmartin,
"persona_mayor": fallecidos_persona_mayor_sanmartin
},
"url": "san-martin",
"provincias": [
{"name": "Moyobamba", "positivos": positivos_san_martin_moyobamba,"poblacion": poblacion_san_martin_moyobamba , "hombres_infectados": positivos_hombres_san_martin_moyobamba,"mujeres_infectados": positivos_mujeres_san_martin_moyobamba, "fallecidos": fallecidos_san_martin_moyobamba, "hombres_fallecidos": fallecidos_hombres_san_martin_moyobamba,
"mujeres_fallecidos": fallecidos_mujeres_san_martin_moyobamba, "type": "Provincia", "etapa_de_vida_fallecidos": {
"primera_infancia": fallecidos_preinfancia_san_martin_moyobamba,
"infancia": fallecidos_infancia_san_martin_moyobamba,
"adolescencia": fallecidos_adolescencia_san_martin_moyobamba,
"juventud": fallecidos_juventud_san_martin_moyobamba,
"adultez": fallecidos_adultez_san_martin_moyobamba,
"persona_mayor": fallecidos_persona_mayor_san_martin_moyobamba
}},
{"name": "Bellavista", "positivos": positivos_san_martin_bellavista,"poblacion": poblacion_san_martin_bellavista , "hombres_infectados": positivos_hombres_san_martin_bellavista,"mujeres_infectados": positivos_mujeres_san_martin_bellavista, "fallecidos": fallecidos_san_martin_bellavista, "hombres_fallecidos": fallecidos_hombres_san_martin_bellavista,
"mujeres_fallecidos": fallecidos_mujeres_san_martin_bellavista, "type": "Provincia", "etapa_de_vida_fallecidos": {
"primera_infancia": fallecidos_preinfancia_san_martin_bellavista,
"infancia": fallecidos_infancia_san_martin_bellavista,
"adolescencia": fallecidos_adolescencia_san_martin_bellavista,
"juventud": fallecidos_juventud_san_martin_bellavista,
"adultez": fallecidos_adultez_san_martin_bellavista,
"persona_mayor": fallecidos_persona_mayor_san_martin_bellavista
}},
{"name": "El Dorado", "positivos": positivos_san_martin_dorado,"poblacion": poblacion_san_martin_dorado , "hombres_infectados": positivos_hombres_san_martin_dorado,"mujeres_infectados": positivos_mujeres_san_martin_dorado, "fallecidos": fallecidos_san_martin_dorado, "hombres_fallecidos": fallecidos_hombres_san_martin_dorado,
"mujeres_fallecidos": fallecidos_mujeres_san_martin_dorado, "type": "Provincia", "etapa_de_vida_fallecidos": {
"primera_infancia": fallecidos_preinfancia_san_martin_dorado,
"infancia": fallecidos_infancia_san_martin_dorado,
"adolescencia": fallecidos_adolescencia_san_martin_dorado,
"juventud": fallecidos_juventud_san_martin_dorado,
"adultez": fallecidos_adultez_san_martin_dorado,
"persona_mayor": fallecidos_persona_mayor_san_martin_dorado
}},
{"name": "Huallaga", "positivos": positivos_san_martin_huallaga,"poblacion": poblacion_san_martin_huallaga , "hombres_infectados": positivos_hombres_san_martin_huallaga,"mujeres_infectados": positivos_mujeres_san_martin_huallaga, "fallecidos": fallecidos_san_martin_huallaga, "hombres_fallecidos": fallecidos_hombres_san_martin_huallaga,
"mujeres_fallecidos": fallecidos_mujeres_san_martin_huallaga, "type": "Provincia", "etapa_de_vida_fallecidos": {
"primera_infancia": fallecidos_preinfancia_san_martin_huallaga,
"infancia": fallecidos_infancia_san_martin_huallaga,
"adolescencia": fallecidos_adolescencia_san_martin_huallaga,
"juventud": fallecidos_juventud_san_martin_huallaga,
"adultez": fallecidos_adultez_san_martin_huallaga,
"persona_mayor": fallecidos_persona_mayor_san_martin_huallaga
}},
{"name": "Lamas", "positivos": positivos_san_martin_lamas,"poblacion": poblacion_san_martin_lamas , "hombres_infectados": positivos_hombres_san_martin_lamas,"mujeres_infectados": positivos_mujeres_san_martin_lamas, "fallecidos": fallecidos_san_martin_lamas, "hombres_fallecidos": fallecidos_hombres_san_martin_lamas,
"mujeres_fallecidos": fallecidos_mujeres_san_martin_lamas, "type": "Provincia", "etapa_de_vida_fallecidos": {
"primera_infancia": fallecidos_preinfancia_san_martin_lamas,
"infancia": fallecidos_infancia_san_martin_lamas,
"adolescencia": fallecidos_adolescencia_san_martin_lamas,
"juventud": fallecidos_juventud_san_martin_lamas,
"adultez": fallecidos_adultez_san_martin_lamas,
"persona_mayor": fallecidos_persona_mayor_san_martin_lamas
}},
{"name": "Mariscal Caceres", "positivos": positivos_san_martin_mariscal_caceres,"poblacion": poblacion_san_martin_mariscal_caceres , "hombres_infectados": positivos_hombres_san_martin_mariscal_caceres,"mujeres_infectados": positivos_mujeres_san_martin_mariscal_caceres, "fallecidos": fallecidos_san_martin_mariscal_caceres, "hombres_fallecidos": fallecidos_hombres_san_martin_mariscal_caceres, "mujeres_fallecidos": fallecidos_mujeres_san_martin_mariscal_caceres, "type": "Provincia", "etapa_de_vida_fallecidos": {
"primera_infancia": fallecidos_preinfancia_san_martin_mariscal_caceres,
"infancia": fallecidos_infancia_san_martin_mariscal_caceres,
"adolescencia": fallecidos_adolescencia_san_martin_mariscal_caceres,
"juventud": fallecidos_juventud_san_martin_mariscal_caceres,
"adultez": fallecidos_adultez_san_martin_mariscal_caceres,
"persona_mayor": fallecidos_persona_mayor_san_martin_mariscal_caceres
}},
{"name": "Picota", "positivos": positivos_san_martin_picota, "poblacion": poblacion_san_martin_picota, "hombres_infectados": positivos_hombres_san_martin_picota, "mujeres_infectados": positivos_mujeres_san_martin_picota, "fallecidos": fallecidos_san_martin_picota, "hombres_fallecidos": fallecidos_hombres_san_martin_picota, "mujeres_fallecidos": fallecidos_mujeres_san_martin_picota, "type": "Provincia", "etapa_de_vida_fallecidos": {
"primera_infancia": fallecidos_preinfancia_san_martin_picota,
"infancia": fallecidos_infancia_san_martin_picota,
"adolescencia": fallecidos_adolescencia_san_martin_picota,
"juventud": fallecidos_juventud_san_martin_picota,
"adultez": fallecidos_adultez_san_martin_picota,
"persona_mayor": fallecidos_persona_mayor_san_martin_picota
}},
{"name": "Rioja", "positivos": positivos_san_martin_rioja,"poblacion": poblacion_san_martin_rioja , "hombres_infectados": positivos_hombres_san_martin_rioja,"mujeres_infectados": positivos_mujeres_san_martin_rioja, "fallecidos": fallecidos_san_martin_rioja, "hombres_fallecidos": fallecidos_hombres_san_martin_rioja, "mujeres_fallecidos": fallecidos_mujeres_san_martin_rioja, "type": "Provincia", "etapa_de_vida_fallecidos": {
"primera_infancia": fallecidos_preinfancia_san_martin_rioja,
"infancia": fallecidos_infancia_san_martin_rioja,
"adolescencia": fallecidos_adolescencia_san_martin_rioja,
"juventud": fallecidos_juventud_san_martin_rioja,
"adultez": fallecidos_adultez_san_martin_rioja,
"persona_mayor": fallecidos_persona_mayor_san_martin_rioja
}},
{"name": "San Martin", "positivos": positivos_san_martin_san_martin,"poblacion": poblacion_san_martin_san_martin , "hombres_infectados": positivos_hombres_san_martin_san_martin,"mujeres_infectados": positivos_mujeres_san_martin_san_martin, "fallecidos": fallecidos_san_martin_san_martin, "hombres_fallecidos": fallecidos_hombres_san_martin_san_martin, "mujeres_fallecidos": fallecidos_mujeres_san_martin_san_martin, "type": "Provincia", "etapa_de_vida_fallecidos": {
"primera_infancia": fallecidos_preinfancia_san_martin_san_martin,
"infancia": fallecidos_infancia_san_martin_san_martin,
"adolescencia": fallecidos_adolescencia_san_martin_san_martin,
"juventud": fallecidos_juventud_san_martin_san_martin,
"adultez": fallecidos_adultez_san_martin_san_martin,
"persona_mayor": fallecidos_persona_mayor_san_martin_san_martin
}},
{"name": "Tocache", "positivos": positivos_san_martin_tocache,"poblacion": poblacion_san_martin_tocache , "hombres_infectados": positivos_hombres_san_martin_tocache,"mujeres_infectados": positivos_mujeres_san_martin_tocache, "fallecidos": fallecidos_san_martin_tocache, "hombres_fallecidos": fallecidos_hombres_san_martin_tocache, "mujeres_fallecidos": fallecidos_mujeres_san_martin_tocache, "type": "Provincia", "etapa_de_vida_fallecidos": {
"primera_infancia": fallecidos_preinfancia_san_martin_tocache,
"infancia": fallecidos_infancia_san_martin_tocache,
"adolescencia": fallecidos_adolescencia_san_martin_tocache,
"juventud": fallecidos_juventud_san_martin_tocache,
"adultez": fallecidos_adultez_san_martin_tocache,
"persona_mayor": fallecidos_persona_mayor_san_martin_tocache
}}
]
}
print(json.dumps(sanmartin));
sys.stdout.flush();
|
# Author : Mohammad Farhan Fahrezy
# https://github.com/farhanfahrezy/
# Insert any number that larger than -1
number = int(input("Insert any positive integer = "))
factorial = 1
if number < 0:
print("Cannot calculate the factorial of negative numbers")
else:
for i in range (number,0,-1):
factorial = factorial*i
print("The factorial value of the number is :",factorial) |
from django.urls import path
from . import views
urlpatterns = [
path("", views.index, name="index"),
path("login", views.login_view, name="login"),
path("logout", views.logout_view, name="logout"),
path("register", views.register, name="register"),
path("new", views.create_new_listing, name="new"),
path("watchlist", views.watchlist_page, name = "watchlist"),
path("category/<str:type>", views.specific_category_list, name="category"),
path("<int:listing_id>", views.listing_detail, name="detail"),
path("<int:listing_id>/bid", views.listing_bid, name = "listing_bid"),
path("<int:listing_id>/comment", views.listing_comment, name ="listing_comment"),
path("<int:listing_id>/close", views.listing_close, name = "listing_close"),
path("<int:listing_id>/add", views.modify_watchlist, name = "modify_watchlist")
]
|
import random
def divide():
"""Add a dashed line to separate output."""
print("----------------------------------------")
# Classes for Superhero Game
class Ability:
def __init__(self, name, attack_strength):
"""
Parameters:
name (String)
max_damage(Integer)
"""
self.name = name
self.max_damage = attack_strength
def attack(self):
"""
Parameters: none
Returns:
An int between 0 and the maximum attack value.
"""
return random.randint(0, self.max_damage)
class Armor:
def __init__(self, name, max_block):
"""
Parameters:
name (String)
max_block(Integer)
"""
self.name = name
self.max_block = max_block
def block(self):
"""Returns a random integer between 0 and max_block strength."""
return random.randint(0, self.max_block)
class Hero:
def __init__(self, name, starting_health=100):
"""
Parameters:
name (String)
starting _health (Integer)
"""
self.abilities = list() # stores Ability instances
self.armors = list() # stores Armor instances
self.name = name
self.starting_health = self.current_health = starting_health
self.deaths = 0
self.kills = 0
def add_ability(self, ability):
"""
Add ability to abilities list.
Parameters: ability (Ability)
"""
self.abilities.append(ability)
def add_armor(self, armor):
'''Add armor to self.armors
armor: Armor object
'''
self.armors.append(armor)
def attack(self):
"""Calculate the total damage from all ability attacks.
return: total (Integer)
"""
total = 0
for ability in self.abilities:
total = total + ability.attack()
return total
def defend(self):
"""
Runs block method on each armor and returns sum of all block.
Parameters: none
Returns: total_defense (Integer)
"""
total_defense = 0
if not len(self.armors) == 0:
for armor in self.armors:
total_defense += armor.block()
return total_defense
def take_damage(self, damage):
"""
Updates current_health by adding to it the
difference of damage and the defend() method.
Parameters: damage (Integer)
"""
change_in_health = self.defend() - damage
self.current_health += change_in_health
def is_alive(self):
"""Return True and False based on current_health."""
return self.current_health >= 0
def add_kill(self, num_kills):
'''Update kill count.'''
self.kills += num_kills
def add_deaths(self, num_deaths):
'''Update death count.'''
self.deaths += num_deaths
def add_weapon(self, weapon):
'''Add weapon (Weapon object) to self.abilities.'''
self.abilities.append(weapon)
def fight(self, opponent):
"""
Parameters:
opponent (Hero)
"""
if len(self.abilities) == 0 and len(opponent.abilities) == 0:
print("Draw!")
else:
while self.is_alive() and opponent.is_alive():
opponent.take_damage(self.attack())
self.take_damage(opponent.attack())
# check after each exchange of attacks for who's still alive
if self.is_alive() and not opponent.is_alive():
print(f"{self.name} won!")
# update kill/death stats
self.add_kill(1)
opponent.add_deaths(1)
elif not self.is_alive() and opponent.is_alive():
print(f"{opponent.name} won!")
# update kill/death stats
self.add_deaths(1)
opponent.add_kill(1)
def str_to_object(self, name, object_type):
'''This helper function searches for an object of a certain type,
and returns the object that matches the name.
Parameter: name(str)
object_type(Ability, Weapon, Armor)
Returns: type(object_type)
'''
# choose which of Hero's property lists to search
if object_type == "Ability" or object_type == "Weapon":
for ability in self.abilities:
if name == ability.name:
return ability
elif object_type == "Armor":
for armor in self.armors:
if name == armor.name:
return armor
def capture_index(self, name, object_type):
'''This function returns the index position where an
Ability/Weapon/Armor object stored in in respective list.
'''
if object_type == "Ability" or object_type == "Weapon":
for i in range(len(self.abilities)):
if name == self.abilities[i].name:
return i
elif object_type == "Armor":
for i in range(len(self.armors)):
if name == self.armors[i].name:
return i
def provide_prompts(self, obj_type, plural):
'''A helper method to provide prompts to use in edit methods.
Params:
obj_type (str): type of object being edited
plural (str): the plural form of the noun corresponding w/ obj_type
Returns: prompts (list): contains common prompts
used through edit_ methods in Hero class
'''
prompts = [f"Do you know the {plural} of this Hero (Y/N)? ",
f"Here are the {plural} available for this Hero:",
f"Which {obj_type} would you like to change? \n",
f"Enter the name here. Enter Q to finish: ",
f"{obj_type} not found. Please try again: ",
f"N = name of {obj_type} \n",
f"A = attack strength of {obj_type} \n",
f"D = Delete this {obj_type} \n",
f"{obj_type} has been removed!"]
return prompts
def edit_powers(self, power_type):
'''Prompts user for information and adjusts self.abilites,
or self.armors.
Param: power_type (str): specifies if Ability, Weapon, or Armor
is being edited.
'''
# decides which prompts to show user
prompts = list() # stores prompt strings
if power_type == "Ability":
prompts = self.provide_prompts("Ability", "Abilities")
elif power_type == "Weapon":
prompts = self.provide_prompts("Weapon", "Weapons")
elif power_type == "Armor":
prompts = self.provide_prompts("Armor", "Armors")
choice = input(prompts[0])
list_to_change = list() # stores self.abilities or self.armors
if choice.lower() == "n": # user doesn't know all abilities
# print all abilities
divide()
print(prompts[1])
if power_type == "Ability" or power_type == "Weapon":
list_to_change = self.abilities
elif power_type == "Armor":
list_to_change = self.armors
# printing the powers in their respective list
for power in list_to_change:
if power_type == "Ability":
if type(power) == Ability:
print(power.name)
elif power_type == "Weapon":
if type(power) == Weapon:
print(power.name)
elif power_type == "Armor":
print(power.name)
divide()
choice = input(prompts[2] + prompts[3])
while not choice.upper() == "Q":
# check to make sure valid Ability/Armor entered
names_of_powers = list()
for power in list_to_change:
names_of_powers.append(ability.name)
while choice not in names_of_powers:
choice = input(prompts[4])
else: # valid object entered
index = self.capture_index(choice, power_type)
current_obj = list_to_change[index]
op_choice = input("What would you like to change? \n" +
prompts[5] +
prompts[6] +
prompts[7] +
"Please select one: ")
if op_choice.upper() == "N":
new_name = input("Please enter a new name: ")
# replaces Ability object
new_obj = Ability(new_name, current_obj.max_damage)
list_to_change[index] = new_obj
assert new_name == list_to_change[index].name
divide()
elif op_choice.upper() == "A":
new_stren = input("Enter a new attack strength: ")
list_to_change[index] = Ability(choice, new_stren)
err_msg = "Attack strength change failed!"
assert new_stren == list_to_change[index].max_damage, (
err_msg)
divide()
elif op_choice.upper() == "D":
list_to_change.pop(index)
print(prompts[8]) # print removal message
divide()
else:
print("Sorry, that choice is invalid.")
def check_all_powers(self):
'''A function to remove unauthorized Abilities and Armors.'''
if len(self.abilities) >= 0:
for i in range(len(self.abilities)):
if self.abilities[i].max_damage > 9000:
self.abilities.pop(i)
if len(self.armors) >= 0:
for i in range(len(self.armors)):
if self.armors[i].max_block > 9000:
self.armors.pop(i)
# a Hero that can steal Abilities or Armors from other heroes
class Thief(Hero):
def steal(self, other_hero):
'''A function to take an Ability, Weapon, or Armor from another Hero.
Param: other_hero(Hero)
Return: None
'''
if not len(other_hero.abilities) == 0:
stolen = other_hero.abilities[0]
other_hero.abilities.pop(0)
self.abilities.append(stolen)
else:
divide()
print(f"{self.name} cannot steal armors from {other_hero.name}")
divide()
if not len(other_hero.armors) == 0:
stolen = other_hero.armors[0]
other_hero.armors.pop(0)
self.armors.append(stolen)
else:
divide()
print(f"{self.name} cannot steal armors from {other_hero.name}")
divide()
class Weapon(Ability):
def attack(self):
""" This method returns a random value
between one half to the full attack power of the weapon.
"""
return random.randint(self.max_damage // 2, self.max_damage)
class Team:
def __init__(self, name):
''' Initialize a team with a team name.
Parameter: name (str)
'''
self.name = name
self.heroes = list() # an empty list of heroes on the team
def add_hero(self, hero):
'''Add a new hero to the team.
Parameter: hero (Hero obj)
'''
self.heroes.append(hero)
def remove_hero(self, name):
''' Remove a hero from the team by their name.
If the hero isn't found, return 0.
Parameter: name (str)
'''
# check if the Hero is present
hero_names = list()
for hero in self.heroes:
hero_names.append(hero.name)
if name not in hero_names:
return 0
# removes a Hero
else:
for hero in self.heroes:
if hero.name == name:
self.heroes.remove(hero)
def view_all_heroes(self):
'''List all heroes on the team.'''
for hero in self.heroes:
print(hero.name)
# methods for teams to attack/defend
def attack(self, other_team):
'''Battle each team against one another.'''
# Randomly selects a Hero from this team
max_index = len(self.heroes) - 1 # last index position in heroes list
hero = self.heroes[random.randint(0, max_index)]
# Randomly selects a Hero from this team
max_index = len(other_team.heroes) - 1
enemy = other_team.heroes[random.randint(0, max_index)]
# if the hero is a Thief, steal from the enemy before the fight
if type(hero) == Thief:
hero.steal(enemy)
hero.fight(enemy)
def revive_heroes(self, heath=100):
'''Reset all heroes' health to starting_health.'''
for hero in self.heroes:
hero.current_health = hero.starting_health
def stats(self):
'''Print stats of the team.'''
print("Here are the kill/death ratios for your team's Heroes:")
ratios = list() # stores all ratios for the team
for hero in self.heroes:
if not hero.deaths == 0:
ratio = hero.kills/hero.deaths
ratios.append(ratio)
print(f"{hero.name}: {ratio}")
else:
print(f"{hero.name}: No deaths, no ratio")
# calculates and prints average kill/death ratio
sum = 0
for ratio in ratios:
sum += ratio
if not len(ratios) == 0:
avg = sum/len(ratios)
print(f"Average kill/death ratio: {avg}")
else:
print("Average kill/death ratio: N/A")
def remove_all_heroes(self):
'''A function to delete all heroes on a team.'''
for hero in self.heroes:
self.heroes.remove(hero)
class Arena:
def __init__(self):
self.team_one = None
self.team_two = None
def create_powers(self, obj_type):
'''Prompt for Ability/Weapon/Armor information.
Param: obj_type(str) - ability, weapon, or armor
Return: appropiate object with values from user input.
'''
divide()
name = input(f"Enter the name for your new {obj_type}: ")
strength_prompt = ""
if obj_type == "armor":
strength_prompt = ("Enter the blocking power. \n" +
"All armors below 9000 are authorized: ")
else:
strength_prompt = ("Enter attack power. \n" +
"All attacks below 9000 are authorized: ")
strength = int(input(strength_prompt))
# decide if new object is authorized based on strength
authorized = True
if strength > 9000:
authorized = False
divide()
warning = input(f"WARNING! This {obj_type} is over 9000. \n" +
"Unauthorized powers will be removed.\n" +
"Do you wish to proceed (Y/N)? ")
if warning.lower() == "n":
strength = int(input("Please enter a new value: "))
divide()
if strength <= 9000:
authorized = True
# choose which type of object to return
if obj_type == "ability":
return Ability(name, strength)
elif obj_type == "weapon":
return Weapon(name, strength)
elif obj_type == "armor":
return Armor(name, strength)
def prompt_for(self, hero, attribute):
'''Helper function for create_hero(). Continually prompts user for
abilities/weapons/armors for their new Hero.
Parameter: attribute (str): whichever of the three Hero attributes
that the user is currently being prompted to give.
hero (Hero): object whose properties are being changed.
Returns: nothing
'''
choice = ""
while not (choice == "N" or choice == "n"):
choice = input(f"Would you like to add a new {attribute} (Y/N)?")
if (choice == "Y" or choice == "y") and attribute == "ability":
new_ability = self.create_powers("ability")
hero.add_ability(new_ability)
elif (choice == "Y" or choice == "y") and attribute == "weapon":
new_weapon = self.create_powers("weapon")
hero.add_ability(new_weapon)
elif (choice == "Y" or choice == "y") and attribute == "armor":
new_armor = self.create_powers("armor")
hero.add_armor(new_armor)
def prompt_all(self, hero):
'''A function to go through prompt_for 3 times,
one for adding Abilities, Weapons, and Armors to the Hero.
'''
# loop for prompting abilities
self.prompt_for(hero, "ability")
# loop for prompting weapons
self.prompt_for(hero, "weapon")
# loop for prompting armors
self.prompt_for(hero, "armor")
def create_hero(self):
'''Prompt user for Hero information
return Hero with values from user input.
'''
# user can choose to make Hero a Thief
make_thief = input("Is this Hero able to steal(Y/N)? ")
if make_thief.lower() == "y":
name = input("Enter a name for your new hero: ")
new_hero = Thief(name)
self.prompt_all(new_hero)
else:
name = input("Enter a name for your new hero: ")
new_hero = Hero(name)
self.prompt_all(new_hero)
return new_hero
def build_team_one(self):
'''Prompt the user to build team_one.'''
team_one_name = input("Enter name for Team One: ")
self.team_one = Team(team_one_name)
team_size = input("Enter the size of this team: ")
'''
for each hero requested by user,
create a Hero object
and append it to the heroes list in the Team object
'''
heroes_added = 0
while heroes_added < int(team_size):
divide()
new_team_player = self.create_hero()
self.team_one.add_hero(new_team_player)
heroes_added += 1
def build_team_two(self):
'''Prompt the user to build team_two.'''
team_two_name = input("Enter the name for Team Two: ")
self.team_two = Team(team_two_name)
team_size = input("Enter the size of this team: ")
'''
for each hero requested by user,
create a Hero object
and append it to the heroes list in the Team object
'''
heroes_added = 0
while heroes_added < int(team_size):
new_team_player = self.create_hero()
self.team_two.add_hero(new_team_player)
heroes_added += 1
def remove_unauthorized(self, team_number):
"""Removes any propoerties from Heroes which are above 9000."""
heroes_list = list()
if team_number == 1:
heroes_list = self.team_one.heroes
elif team_number == 2:
heroes_list = self.team_two.heroes
# check every Hero on the Team
for hero in heroes_list:
# remove unauthoized Abilitiies and Weapons
hero.check_all_powers()
def team_battle(self):
'''Battle team_one and team_two together.'''
# remove all unauthorized Abilities, Weapons, or Armors from Team One
self.remove_unauthorized(1)
# repeat for Team Two
self.remove_unauthorized(2)
self.team_one.attack(self.team_two)
def show_stats(self):
'''Prints team statisitics to terminal.'''
# lists to contain names of all alive heroes on teams
team_one_live_heroes_names = list()
team_two_live_heroes_names = list()
# winning team decided by whom has more alive heroes
live_one_heroes = 0
for hero in self.team_one.heroes:
if hero.is_alive():
live_one_heroes += 1
team_one_live_heroes_names.append(hero.name)
live_two_heroes = 0
for hero in self.team_two.heroes:
if hero.is_alive():
live_two_heroes += 1
team_two_live_heroes_names.append(hero.name)
# decides which team won
if live_one_heroes > live_two_heroes:
print("Result: Team One wins!")
elif live_two_heroes > live_one_heroes:
print("Result: Team Two wins!")
else:
print("Result: No Team wins!")
# showing stats for first team
print("Stats for Team One:")
self.team_one.stats()
print("These are the Heroes who are Still Alive on Team One:")
divide()
for name in team_one_live_heroes_names:
print(name)
divide()
# showing stats for first team
print("Stats for Team Two:")
self.team_two.stats()
print("These are the Heroes who are Still Alive on Team Two:")
divide()
for name in team_two_live_heroes_names:
print(name)
def recreate_teams(self):
'''Removes all heroes from each team, and then rebuilds them.'''
self.team_one.remove_all_heroes()
self.team_two.remove_all_heroes()
self.build_team_one()
self.build_team_two()
def edit_team(self, selected_team):
'''Changes parts of the team which user selects.
Param: selected_team (Team): Team being changed
'''
divide()
see_heroes = input("View the heroes on this Team (Y/N)?")
if see_heroes.lower() == "y":
selected_team.view_all_heroes()
divide()
# check if the user enters a valid Hero
hero_names = list()
for hero in selected_team.heroes:
hero_names.append(hero.name)
num_changes = input("Enter the number of Heroes you'd like to change ")
for i in num_changes: # repeats for Hero user wants to change
hero_choice = input(f"Enter Hero #{i} you'd like to change: ")
while hero_choice not in hero_names:
hero_choice = input("Name cannot be found. Please try again: ")
print("Name found! Success!")
# when the Hero is found, pull the Hero out of the Team's hero list
# and store it in hero_choice
for hero in selected_team.heroes:
if hero_choice == hero.name:
hero_choice = hero # changes from str to Hero type
divide()
attribute_choice = input("Which property is being changed? \n" +
"A = hero's abilites \n" +
"W = hero's weapons \n" +
"AR = hero's armors \n" +
"T = make Hero a Thief \n" +
"Please enter your choice: ")
if attribute_choice == "A":
hero_choice.edit_powers("Ability")
elif attribute_choice == "W":
hero_choice.edit_powers("Weapon")
elif attribute_choice == "AR":
hero_choice.edit_powers("Armor")
elif attribute_choice == "T":
hero_choice = Thief(hero_choice.name)
if __name__ == "__main__":
game_is_running = True
arena = Arena() # instanatiate Arena
# Build teams
arena.build_team_one()
arena.build_team_two()
while game_is_running:
divide()
arena.team_battle()
arena.show_stats()
play_again = input("Play Again? Y or N: ")
# Check for Player Input
if play_again.lower() == "n":
game_is_running = False
else:
# Revive heroes to play again
arena.team_one.revive_heroes()
arena.team_two.revive_heroes()
# Ask user if they want to reset the teams
reset_choice = input("Would you like to reset the teams (Y/N)?")
if reset_choice.lower() == "y":
arena.recreate_teams()
elif reset_choice.upper() == "N":
# Ask user if they want to change the teams
edit_choice = input("Would you like to edit the teams (Y/N)? ")
if edit_choice.lower() == "y":
team_choice = input("Enter the Team you want to edit: ")
if team_choice == arena.team_one.name:
arena.edit_team(arena.team_one)
elif team_choice == arena.team_two.name:
arena.edit_team(arena.team_two)
|
#!/usr/bin/env python3
import csv
import time
import subprocess
from flask import Flask, render_template, send_file, abort, request, Markup
#CONFIG
AUTH_TOKEN={"key":"Vpnmanagertoken","value":"randomnumber"}
CHECK_HEADER_TOKEN=False
CHECK_HEADER_AUTH=False
APP_ROOT=""
HOST='127.0.0.1'
PORT=5000
TRUSTED_PROXIES = ('127.0.0.1')
TRUSTED_HOSTS = ('127.0.0.1')
KEYGEN_BASE="/etc/openvpn/keygen"
KEYS_BASE="/etc/openvpn/keys"
CA_INDEX_DB=KEYS_BASE+"/index.txt"
SITE="example-com-vpn"
CSS_FILE="bulma.min.css"
CSS_LOGO="openvpn-logo.png"
FAVICON="favicon.ico"
ZIP_ENABLED=False
"""
OpenSSL Index file
The index file consists of zero or more lines, each containing the following fields separated by tab characters:
#0 1. Certificate status flag (V=valid, R=revoked, E=expired).
#1 2. Certificate expiration date in YYMMDDHHMMSSZ format.
#2 3. Certificate revocation date in YYMMDDHHMMSSZ[,reason] format. Empty if not revoked.
#3 4. Certificate serial number in hex.
#4 5. Certificate filename or literal string ‘unknown’.
#5 6. Certificate distinguished name.
"""
app = Flask(__name__)
# ['V', '340414203029Z', '', '01', 'unknown', '/C=GB/ST=London/L=London/O=Example OU/OU=IT/CN=jira.example.com/emailAddress=it-support@example.com']
#open index file
def read_index():
CA_DB=[]
CA_DB_SERIAL_DICT={}
CA_DB_NAME_DICT={}
with open(CA_INDEX_DB, "r", newline='') as f:
reader = csv.reader(f, delimiter='\t', quoting=csv.QUOTE_NONE)
for num, row in enumerate(reader):
if num == 0 or "site-2-site" in row:
#skip server self cert and s2s certs
continue
serial=row[3]
out=[]
elem=""
for elnum, elem in enumerate(row):
if elnum == 1 or elnum == 2:
if elem:
elem=elem[2:4]+"/"+elem[4:6]+"/20"+elem[:2]+" "+elem[6:8]+":"+elem[8:10]+":"+elem[10:12]
else:
elem = ""
if elnum == 5:
elem=elem.split("CN=")[1].split("/")[0]
out.append(elem)
CA_DB.append(out)
CA_DB_SERIAL_DICT[serial]=out
CA_DB_NAME_DICT[elem]=out
CA_DB=[]
for k in sorted(CA_DB_NAME_DICT):
CA_DB.append(CA_DB_NAME_DICT[k])
return (CA_DB_SERIAL_DICT, CA_DB)
def protect_invalid_chars(txt=""):
invalid_chars=["<",">",'"',"'","`","(",")","[","]","{","}","&","$","^","~","ˇ","˘",'˝',";",":","/","=","!","%","\\","|","€"," ","*"]
return "".join([char for char in txt if char not in invalid_chars])
def processExecutor(cmd, args=''):
try:
ps= subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = ps.stdout.read()
err = ps.stderr.read()
ps.stdout.close()
ps.stderr.close()
ps.wait()
return str(err.decode("utf-8"))+str(output.decode("utf-8"))
except Exception as e:
print(e)
return None
@app.route(APP_ROOT+"/")
def list():
CA_DB_SERIAL_DICT, CA_DB = read_index()
return render_template('template.html', title="List of All OpenVPN certificates", all_tab_active="is-active", site=SITE, my_list=CA_DB, app_route=APP_ROOT)
@app.route(APP_ROOT+"/list-valid")
def list_valid():
CA_DB_SERIAL_DICT, CA_DB = read_index()
CA_DB=[j for j in CA_DB if 'V' in j[0]]
return render_template('template.html', title="List of Valid OpenVPN certificates", valid_tab_active="is-active", site=SITE, my_list=CA_DB, app_route=APP_ROOT)
@app.route(APP_ROOT+"/list-revoke")
def list_revoke():
CA_DB_SERIAL_DICT, CA_DB = read_index()
CA_DB=[j for j in CA_DB if 'R' in j[0]]
return render_template('template.html', title="List of Revoked OpenVPN certificates", revoke_tab_active="is-active", site=SITE, my_list=CA_DB, app_route=APP_ROOT)
@app.route(APP_ROOT+'/download/<serial>', methods=['GET', 'POST'])
def download(serial):
CA_DB_SERIAL_DICT, CA_DB = read_index()
serial=protect_invalid_chars(serial)
if serial in CA_DB_SERIAL_DICT:
if CA_DB_SERIAL_DICT[serial][0] == 'V':
cert_name=CA_DB_SERIAL_DICT[serial][5]
if ZIP_ENABLED:
file_name=KEYS_BASE+"/"+cert_name+"/"+cert_name+".zip"
else:
file_name=KEYS_BASE+"/"+cert_name+"/"+cert_name+".ovpn"
return send_file(file_name, as_attachment=True)
else:
abort(404) # 404 Not Found
@app.route(APP_ROOT+'/revoke/<serial>', methods=['GET'])
def revoke(serial):
serial=protect_invalid_chars(serial)
CA_DB_SERIAL_DICT, CA_DB = read_index()
message="Confirm revoke certificate"
if serial in CA_DB_SERIAL_DICT:
if CA_DB_SERIAL_DICT[serial][0] == 'V':
cert_to_revoke=CA_DB_SERIAL_DICT[serial][5]
else:
abort(404) # 404 Not Found
return render_template('revoke.html', title=message, serial=serial, cert_name=cert_to_revoke,app_route=APP_ROOT, site=SITE)
@app.route(APP_ROOT+'/revoke_certificate/<serial>', methods=['POST'])
def revoke_certificate(serial):
CA_DB_SERIAL_DICT, CA_DB = read_index()
message="Certificate revoked"
certname=""
serial=protect_invalid_chars(serial)
if serial in CA_DB_SERIAL_DICT:
if CA_DB_SERIAL_DICT[serial][0] == 'V':
certname=CA_DB_SERIAL_DICT[serial][5]
else:
abort(404) # 404 Not Found
if certname:
cmd="cd "+KEYGEN_BASE+" && ./revoke-client "+certname
#execute create command
out=""
done=""
lines = processExecutor(cmd).split("\n")
print(lines)
for line in lines:
out+=str(line)+"<br />"
#out=out.replace("b'","").replace("'","").replace('"',"")
if "DONE." in out and "certificate revoked" in out:
done="done"
out=Markup(out)
CA_DB_SERIAL_DICT, CA_DB = read_index()
return render_template('revoke.html', title=message, serial=serial, cert_name=certname, done=done, out=out, app_route=APP_ROOT)
else:
return render_template('revoke.html', title="Certificate revocation error.", serial=serial, cert_name="Missing cert name", app_route=APP_ROOT, site=SITE)
@app.route(APP_ROOT+"/create")
def create():
CA_DB_SERIAL_DICT, CA_DB = read_index()
return render_template('create.html', title="Create NEW OpenVPN certificate", selected_tcp="selected",app_route=APP_ROOT, site=SITE)
@app.route(APP_ROOT+"/create-form", methods=['POST'])
def create_form():
CA_DB_SERIAL_DICT, CA_DB = read_index()
error_msg=""
selected_internal="selected"
title="Creating new certificate ..."
error=""
done=""
cmd=""
certname=""
download_serial="0"
new_cert = protect_invalid_chars(request.form["cert_name"])
cert_type = protect_invalid_chars(request.form["select_gw"])
if not len(new_cert):
error_msg="Certificate name cannot be empty!"
error=" ERROR."
#disable s2s
if "site-2-site" in new_cert:
error_msg="Do not generate / manage S2S via WEB interface!"
title+=" ERROR."
if cert_type == "gw":
selected_gw="selected"
selected_tcp=""
selected_internal=""
certname=new_cert+"-"+SITE+"-tcp-gw"
cmd="cd "+KEYGEN_BASE+" && ./build-key-embed-tcp-commongw "+certname
elif cert_type == "tcp":
selected_gw=""
selected_tcp="selected"
selected_internal=""
certname=new_cert+"-"+SITE+"-tcp"
cmd="cd "+KEYGEN_BASE+" && ./build-key-embed-tcp "+certname
else:
selected_gw=""
selected_tcp=""
selected_internal="selected"
certname=new_cert+"-"+SITE
cmd="cd "+KEYGEN_BASE+" && ./build-key-embed "+certname
#check exiting certificate
if not error_msg:
for v in CA_DB:
if new_cert == v[5]:
error_msg="Already existing certificate name in cert database!"
error=" ERROR."
break
#execute create command
out=""
if not error_msg:
lines = processExecutor(cmd).split("\n")
print(lines)
for line in lines:
out+=str(line)+"<br />"
#out=out.replace("b'","").replace("'","").replace('"',"")
if "DONE." in out:
done="done"
out=Markup(out)
CA_DB_SERIAL_DICT, CA_DB = read_index()
for k,v in CA_DB_SERIAL_DICT.items():
if v[5] == certname:
download_serial=k
break
return render_template('create.html', title=title, error=error, error_message=error_msg, read_only="readonly", selected_internal=selected_internal, selected_gw=selected_gw, cert_name_val=certname,out=out,done=done,serial=download_serial,selected_tcp=selected_tcp,app_route=APP_ROOT, site=SITE)
@app.route(APP_ROOT+"/css")
def css():
return send_file(CSS_FILE, as_attachment=True)
@app.route(APP_ROOT+"/logo")
def logo():
return send_file(CSS_LOGO, as_attachment=True)
@app.route(APP_ROOT+"/favicon.ico")
def favicon():
return send_file(FAVICON, as_attachment=True)
@app.before_request
def limit_remote_addr():
route = request.access_route + [request.remote_addr]
#print("Route:",route)
#remote_addr = next((addr for addr in reversed(route) if addr not in TRUSTED_PROXIES), request.remote_addr)
#print("Remote Addr:",remote_addr)
print("Remote Addr:",request.remote_addr)
auth_header=request.headers.get('Authorization')
token_header=request.headers.get(AUTH_TOKEN["key"])
if request.remote_addr not in TRUSTED_HOSTS:
print("Untrused remote address, 403:",request.remote_addr)
abort(403) # Forbidden
if CHECK_HEADER_TOKEN and (not auth_header):
print("No authorization header, 403:",request.remote_addr)
abort(403) # Forbidden
if CHECK_HEADER_AUTH and (token_header != AUTH_TOKEN["value"]):
print("Authorization token mismatch, 403:",request.remote_addr)
abort(403) # Forbidden
if __name__ == '__main__':
try:
app.run(host=HOST, port=PORT, debug=True)
except OSError:
print("Process already running. Exit.")
|
from selene.support import by
from pteromyini.core.com.find_element import find_spaces, find_texts_in_element
from pteromyini.core.com.space import space
from pteromyini.lib.debug.profiler import ProfileTime
def __remove_whitespace(texts: list):
result = []
for t in texts:
if t is None:
continue
t.replace(" ", "")
result.append(t)
return result
def get_dict_row(context, value=None, key=None, index=None) -> dict:
"""
Find row by index or key-value in table and return as dictionary
the table must have a head
:return: {'head':'text', 'head2':['text1', 'tex2'] ..}
"""
pt = ProfileTime(get_dict_row.__name__)
sp = space(context)
# for find last tr index
if index is not None:
index = int(index)
if index < 0:
index = table_length(context) + index + 1
# header can be in thead or in body as tr[1]
head_elements = sp.ss(by.xpath('.//thead/tr/th'))
if len(head_elements) == 0:
head_elements = sp.ss(by.xpath('.//tr/th'))
if index is not None:
index += 1
heads = [el.text for el in head_elements if el.is_displayed()]
# row
tag = 'tr' if index is None else f'tr[{index}]'
# TODO: fix bug: if table have same value in different column then get only last
e = find_spaces(context, value, tag)[-1]
el_rows = e.ss(by.xpath(".//td"))
# row = [el.text.replace(" ", "") for el in el_body]
row = []
for el in el_rows:
if not el.is_displayed():
continue
texts = find_texts_in_element(el)
if len(texts) == 1:
texts = texts[0]
row.append(texts)
heads = [" ".join(head.split()) for head in heads]
d = dict(zip(heads, row))
pt.stop()
if key is None or value is None: # call if find by index
return d
else: # call if find by key value
if d[key] == value:
return d
assert d is not None, f"cant find row where {key}:{value}" # TODO: use raise
def table_length(context):
sp = space(context)
head_elements = sp.ss(by.xpath('.//thead/tr/th'))
length = len(sp.ss(by.xpath('.//tbody/tr')))
if len(head_elements) == 0:
return length - 1
return length
def get_list_row(context, value=None, key=None, index=None) -> list:
"""
:return list ['text', [texts, texts], 'tttt', [], []]
"""
pt = ProfileTime(get_list_row.__name__)
if index is not None: # increment index if head body
heads = space(context).ss(by.xpath('.//tbody//tr/th'))
if len(heads) > 0:
index = int(float(index)) + 1
tag = 'tr' if index is None else f'tr[{index}]'
e = find_spaces(context, value, tag)[-1]
el_rows = e.ss(by.xpath(".//td"))
row = []
for el in el_rows:
texts = find_texts_in_element(el)
if len(texts) == 1:
texts = texts[0]
row.append(texts)
# row = __remove_whitespace(row)
pt.stop()
if value is None:
return row
elif value in row:
return row
assert False, f"cant find row where {value}"
def get_list_column(context, name) -> list:
"""
Find collum by name and return all value in list
:param name - column header
:return list ['text', [texts, texts], 'tttt', [], []]
"""
pt = ProfileTime(get_list_column.__name__)
sp = space(context)
index = 0
head_elements = sp.ss(by.xpath('.//thead/tr/th'))
if len(head_elements) == 0:
head_elements = sp.ss(by.xpath('.//tr/th'))
index += 1
heads = []
for el in head_elements:
heads.append(el.text if el.is_displayed() else None)
column_index = heads.index(name) + 1
el_rows = sp.ss(by.xpath(f".//td[{column_index}]"))
column = []
for el in el_rows:
texts = find_texts_in_element(el)
if len(texts) == 1:
texts = texts[0]
column.append(texts)
pt.stop()
return column |
np.random.seed(123)
#In [2]:
## NUMBER OF ASSETS
n_assets = 4
## NUMBER OF OBSERVATIONS
n_obs = 1000
return_vec = np.random.randn(n_assets, n_obs)
#In [3]:
plt.plot(return_vec.T, alpha=.4);
plt.xlabel('time')
plt.ylabel('returns')
plt.show()
#In [4]:
def rand_weights(n):
''' Produces n random weights that sum to 1 '''
k = np.random.rand(n)
return k / sum(k)
print(rand_weights(n_assets))
print(rand_weights(n_assets))
#In [5]:
def random_portfolio(returns):
'''
Returns the mean and standard deviation of returns for a random portfolio
'''
p = np.asmatrix(np.mean(returns, axis=1))
w = np.asmatrix(rand_weights(returns.shape[0]))
C = np.asmatrix(np.cov(returns))
mu = w * p.T
sigma = np.sqrt(w * C * w.T)
# This recursion reduces outliers to keep plots pretty
if sigma > 2:
return random_portfolio(returns)
return mu, sigma
n_portfolios = 500
means, stds = np.column_stack([
random_portfolio(return_vec)
for _ in xrange(n_portfolios)
])
fig = plt.figure()
plt.plot(stds, means, 'o', markersize=5)
plt.xlabel('std')
plt.ylabel('mean')
plt.title('Mean and standard deviation of returns of randomly generated portfolios')
plt.savefig('output/markovitz_1.png')
np.savetxt('output/means.csv', means, delimiter=",")
np.savetxt('output/stds.csv', stds, delimiter=",")
pd.concat([stds.tolist(), ])
#py.iplot_mpl(fig, filename='mean_std', strip_style=True)
means |
from telegraph import Telegraph
import scrape
from argparse import ArgumentParser
parser = ArgumentParser(description="Script to give you allow you to easily read doujins as telegraph articles")
parser.add_argument("-s", "--source", help="The digits of the doujin", type=int)
args = parser.parse_args()
if not args.source:
print("Please provide source to scrape")
exit(1)
title, data, image_tags,= scrape.get_info(args.source)
telegraph = Telegraph()
telegraph.create_account(short_name='nhentai_bot')
article_path = telegraph.create_page(title, html_content=image_tags)['path']
print("title :", title)
for key, value in data.items():
if value:
print(key,": ",value,"\n")
print("Path: ",'https://telegra.ph/{}'.format(article_path))
|
from orun.urls import path, re_path
from . import views
app_name = 'admin'
urlpatterns = [
path('web/', views.client.index),
path('web/login/', views.client.login, name='login'),
path('web/logout/', views.client.logout),
path('web/login/authenticated/', views.client.is_authenticated, name='logged'),
path('web/js/templates/', views.client.js_templates),
path('web/content/<int:content_id>/', views.client.content),
path('web/content/upload/', views.client.upload_attachment),
path('web/file/upload/<model>/<meth>/', views.client.upload_file),
path('web/data/reorder/', views.client.reorder),
path('web/image/<model>/<field>/<id>/', views.client.image),
path('web/company/logo/', views.client.company_logo),
path('web/reports/<path:path>', views.client.report),
path('web/menu/search/', views.client.search_menu),
path('web/action/<str:service>/view/', views.api.view_model),
# path('web/test/', views.test.qunit),
# admin rpc
path('api/rpc/<service>/<meth>/', views.api.rpc),
path('api/view/<service>/', views.api.view),
path('api/dashboard/<int:service>/', views.dashboard.index),
path('api/dashboard/<service>/', views.dashboard.index),
path('api/dashboard/<service>/<method>/', views.dashboard.rpc),
path('api/field/choices/<service>/<field>/', views.api.choices),
path('api/app/settings/', views.api.app_settings),
path('api/public/query/<int:id>/', views.api.public_query),
# admin api
path('admin/api/report/<str:qualname>/', views.api.admin_report_api),
]
|
#!/usr/bin/python
#\file property.py
#\brief Test of @property.
#\author Akihiko Yamaguchi, info@akihikoy.net
#\version 0.1
#\date Aug.20, 2015
'''
Refs.
http://qiita.com/knzm/items/a8a0fead6e1706663c22
http://stackoverflow.com/questions/15458613/python-why-is-read-only-property-writable
NOTE:
- Each class that defines property should be a sub class of 'object' class.
'''
class TTest1(object):
def __init__(self):
self.x= 2
@property
def square(self):
return self.x**2
class TTest2(object):
def __init__(self):
self.x= 3
@property
def square(self):
return self.x**2
@square.setter
def square(self, value):
raise Exception('not writable')
class TTest3(object):
def __init__(self):
self.x= 4
@property
def square(self):
return self.x**2
@square.getter
def square(self):
return self.x**3 #WARNING: This is called for INSTANCE.square, but very confusing
class TTest4(object):
def __init__(self):
self.x= 5
@property
def square(self):
return self.x**2
@square.setter
def square(self, value):
self.x= value**0.5
class TTest5(object):
def __init__(self):
self._x= [1,2,3]
@property
def x(self):
return self._x
if __name__=='__main__':
test1= TTest1()
print test1.square #result: 4
#test1.square= 10 #ERROR: 'AttributeError: can't set attribute'
#print test1.square
print '-------------'
test2= TTest2()
print test2.square #result: 9
#test2.square= 10 #ERROR: Exception: not writable
#print test2.square
print '-------------'
test3= TTest3()
print test3.square #result: 64
print '-------------'
test4= TTest4()
print test4.square #result: 25
test4.square= 10
print test4.square #result: 10
print '-------------'
test5= TTest5()
print test5.x #result: [1, 2, 3]
test5.x[1]= 10
print test5.x #result: [1, 10, 3]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import datetime
import getpass
import sys
import mysql.connector
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--user', help='db user, default login user')
parser.add_argument('--host', help='db host, default "127.0.0.1"')
parser.add_argument('-d', '--dbname', help='db name, default `test_db`')
parser.add_argument('-b', '--business_name', required=True, help='必须。业务线名称')
parser.add_argument('--admin', required=True, help='必须。管理员用户名')
# print(sys.argv)
args, _ = parser.parse_known_args(sys.argv[1:])
# print(args)
dbuser = args.user or getpass.getuser()
password = getpass.getpass('db password:')
host = args.host or '127.0.0.1'
dbname = args.dbname or 'test_db'
config = {
'user': dbuser,
'password': password,
'host': host,
'database': dbname,
'raise_on_warnings': True,
}
business_name = args.business_name
admin_username = args.admin
business_description = business_name + '业务线'
now = datetime.datetime.now()
current_time_millis = int(now.timestamp() * 1000)
cnx = mysql.connector.connect(**config)
cursor = cnx.cursor()
exists = 0
cursor.execute("select count(*) from business_user where username='%s'" %
admin_username)
for (count, ) in cursor:
exists = count
if exists:
print('用户 %s 已存在' % admin_username)
exit()
######################
# other db ops
######################
# Make sure data is committed to the database
cnx.commit()
cursor.close()
cnx.close()
print('create business %s and user %s finished' % (business_name, admin_username))
|
from abc import ABCMeta, abstractmethod
class AbsSelectorTemperatura(metaclass=ABCMeta):
@staticmethod
@abstractmethod
def obtener_selector():
pass |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
@Time : 2018/9/11 下午3:23
@Author : fanyuexiang
@Site :
@File : test.py
@Software: PyCharm
@version: 1.0
@describe: 项目测试用例
'''
from dataformate import get_knn_data
filepath = 'dataset/KNNData.txt'
name = ['fly distance', 'play time', 'ice cream', 'like level']
df = get_knn_data(filepath, names=name)
print(df) |
from myhdl import *
for i in range(-127,128,1):
x = intbv(i)[8:]
print i, bin(x,8), x, x.signed()
|
import uniform from random
def append_random_numbers(numbers_list, quantity):
if quantity == 1:
numbers_list = []
|
# Generated by Django 2.1.4 on 2019-01-11 15:56
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('speciality', '0032_auto_20190111_1556'),
('orientation', '0002_auto_20190109_0221'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('clientele', '0005_clientele_services'),
('services', '0002_auto_20190109_1816'),
('psychologues', '0011_auto_20190111_1551'),
]
operations = [
migrations.RenameModel(
old_name='Psychologues',
new_name='Psychologue',
),
]
|
import mastl._iohelp as io
import tensorflow as tf
import numpy as np
import os
import time
import datetime
import shutil
def optimistic_restore(session, save_file, graph=tf.get_default_graph()):
"""
Adapted version of an answer by StackOverflow user Lior at
https://stackoverflow.com/questions/47997203/tensorflow-restore-if-present
Args:
session: session object
save_file: save file to load
graph: graph to act upon
"""
session.run(tf.global_variables_initializer())
reader = tf.train.NewCheckpointReader(save_file)
saved_shapes = reader.get_variable_to_shape_map()
var_names = sorted([(var.name, var.name.split(':')[0]) for var in tf.global_variables()
if var.name.split(':')[0] in saved_shapes])
restore_vars = []
for var_name, saved_var_name in var_names:
curr_var = graph.get_tensor_by_name(var_name)
var_shape = curr_var.get_shape().as_list()
if var_shape == saved_shapes[saved_var_name]:
restore_vars.append(curr_var)
opt_saver = tf.train.Saver(restore_vars)
opt_saver.restore(session, save_file)
def train_denoiser(config):
try:
try:
dataset = config['dataset']
assert dataset is not None
io.log("Data loaded: {} training samples, {} testing samples".format(
len(dataset.data_files),
len(dataset.test_files)
))
except (AssertionError, KeyError):
io.log("Could not load dataset from configure.py", "error")
return
if 'output' in config:
dirname = config['output']
else:
dirname = str(int(time.time()))
io.log("Creating save dir ./{}".format(dirname))
os.mkdir(dirname)
if 'restore' in config:
restore = config['restore']
noticefile = open("{}/IMPORTANT_NOTICE".format(dirname), "a")
noticefile.write("This model is a continuation of {}. ".format(restore))
noticefile.write("You should disregard the model specifications in "
"configure.py, and look there instead. The training "
"specifications however, are correct.\n\n")
noticefile.close()
try:
model_datatype = config['model_datatype']
assert model_datatype is not None
except:
io.log("Could not read mode_datatype from configure.py", "warn")
io.log("Defaulting to tf.float32", "detail")
model_datatype = tf.float32
try:
x, y, channels_num = config['x_size'], config['y_size'], config['channels_num']
except:
io.log("Could not read x, y or c size from configure.py", "warn")
io.log("Defaulting to None", "detail")
x, y, channels_num = None, None, None
if 'restore' in config:
io.log("Loading previous sampling pattern")
sampling_mask = np.load("{}/pattern.npy".format(restore))
else:
io.log("Loading sampling pattern from config")
try:
sampling_mask = config['sampling_mask']
assert sampling_mask is not None
except (AssertionError, KeyError):
io.log("Could not read sampling pattern from configure.py", "warn")
io.log("Defaulting to gaussian sampling at 20 % sampling rate", "detail")
if x is None or y is None:
io.log("Can't create sampling pattern, x or y is None", "error")
return
from tools.patterns import gaussian_sampling
sampling_mask = gaussian_sampling(x, y, int(x * y * 0.2))
np.save("{}/pattern.npy".format(dirname), sampling_mask)
io.log("Sampling rate: {}".format(np.mean(sampling_mask)), "detail")
sampling_mask = np.fft.fftshift(sampling_mask)
def sample_op(input):
fft_input = np.fft.fft2(input)
fft_input[np.invert(sampling_mask)] = 0
return fft_input
io.log("Creating TF Graph")
try:
batch_size = config['batch_size']
assert batch_size is not None
except AssertionError:
io.log("Could not read batch size from configure.py", "warn")
io.log("Defaulting to 10", "detail")
batch_size = 10
input = tf.placeholder(
model_datatype,
shape=[batch_size, y, x, channels_num]
)
is_training = tf.placeholder(tf.bool)
try:
output = config['model'](input, is_training)
except Exception as e:
io.log("Could not create model from configure.py", "error")
io.log("Error: {}".format(str(e)), "detail")
return
true_output = tf.placeholder(model_datatype, shape=[batch_size, y, x, channels_num])
try:
loss = config['loss'](output, true_output)
except:
io.log("Could not get loss function from configure.py", "error")
return
if 'restore' in config:
io.log("Loading train_losses and updating training status")
losses = np.load("{}/train_losses.npy".format(restore)).tolist()
start_train = len(losses)
# Update dataset status
dataset.epoch = start_train * batch_size // len(
dataset)
dataset.index = start_train * batch_size % len(
dataset)
else:
losses = []
start_train = 0
try:
train_steps = config['train_steps']
assert train_steps is not None
except AssertionError:
io.log("Could not read train_steps from configure.py", "warn")
io.log("Defaulting to 100", "detail")
train_steps = 100
try:
optimizer = config['optimizer']
assert optimizer is not None
except:
io.log("Could not read optimizer from configure.py", "warn")
io.log("Defaulting to tf.train.GradientDecentOptimizer", "detail")
optimizer = tf.train.GradientDescentOptimizer(0.001)
train_step = optimizer.minimize(loss, var_list=tf.global_variables(), colocate_gradients_with_ops=True)
# Initialize saver
saver = tf.train.Saver(max_to_keep=None)
io.log("Planning to do {} training steps in total, ie {} epochs".format(train_steps,
train_steps * batch_size // (
len(
dataset))))
# Start TF session
io.log("Initializing TF Session")
sessconfig = tf.ConfigProto(allow_soft_placement=True)
with tf.Session(config=sessconfig) as sess:
if 'restore' in config:
saver.restore(sess, "{}/last_run.ckpt".format(config['restore']))
else:
sess.run(tf.global_variables_initializer())
io.log("Beginning training")
time_start = time.time()
try:
for i in range(start_train, start_train + train_steps):
if i == start_train:
print(
"\r%.1f %% Loss: %-10g Time elapsed: %-10s ETA: %-10s " % (
0,
np.nan,
"-",
"-"
),
end=""
)
else:
if i % 20 == 0:
print(
"\rSTATUS: %.1f %% done Avg loss: %-10g Training progress: %s %s" % (
(i - start_train) * 100 / train_steps,
np.mean(np.array(losses[i - 20:i - 1])),
"{} / {} in epoch {}".format(dataset.index,
len(dataset),
dataset.epoch),
" "*40
)
)
elapsed = int(time_now - time_start)
eta = int(
elapsed / (i - start_train + 1) * (train_steps - i + start_train))
print(
"\r%.1f %% Loss: %-10g Time elapsed: %-10s ETA: %-10s Training progress: %s " % (
(i - start_train) * 100 / train_steps,
losses[i - 1],
str(datetime.timedelta(seconds=elapsed)),
str(datetime.timedelta(seconds=eta)),
"{} / {} in epoch {}".format(dataset.index,
len(dataset),
dataset.epoch)
),
end=""
)
if i % 500 == 0:
print("\nSaving at {} steps".format(i))
saver.save(sess, "{}/at_{}_steps.ckpt".format(dirname, i))
dataset.ready_next_batch(batch_size)
dataset.sample_loaded_batch(sample_op)
data, sampled = dataset.get_next_batch()
sess.run(train_step, feed_dict={input: sampled,
true_output: data,
is_training: True
})
losses.append(sess.run(loss, feed_dict={input: sampled,
true_output: data,
is_training: False
}))
time_now = time.time()
print("\r100.0 %% Loss: %g%s" % (
losses[start_train + train_steps - 1], " " * 80))
elapsed = time_now - time_start
io.log("Total time: {}".format(str(datetime.timedelta(seconds=elapsed))))
except KeyboardInterrupt:
print()
io.log("Stopping training")
noticefile = open("{}/IMPORTANT_NOTICE".format(dirname), "a")
noticefile.write("Training was aborted after {} steps.\n\n".format(i))
noticefile.close()
io.log("Status: {} / {} in epoch {}".format(dataset.index,
len(dataset),
dataset.epoch))
io.log("Running model on train data")
os.mkdir("{}/{}".format(dirname, "train_results"))
dataset.ready_next_batch(batch_size)
dataset.sample_loaded_batch(sample_op)
data, sampled = dataset.get_next_batch()
recovery = sess.run(output, feed_dict={input: sampled, is_training: False})
for j in range(batch_size):
num = i * batch_size + j
np.save(
"{}/{}/{:03}_sample.npy".format(dirname, "train_results", (num)),
np.squeeze(np.abs(data[j]))
)
np.save(
"{}/{}/{:03}_adjoint.npy".format(dirname, "train_results", (num)),
np.abs(np.fft.ifft2(np.squeeze(sampled[j])))
)
np.save(
"{}/{}/{:03}_recovery.npy".format(dirname, "train_results", (num)),
np.squeeze(np.abs(recovery[j]))
)
io.log("Running model on test data")
os.mkdir("{}/{}".format(dirname, "test_results"))
test_set, sampled_test_set = dataset.get_test_set(sample_op=sample_op)
for i in range(len(test_set) // batch_size):
print(
"\rRunning... %.2f %%" % (
i / (len(test_set) // batch_size) * 100),
end=""
)
data = test_set[i * batch_size: (i + 1) * batch_size]
sampled = sampled_test_set[i * batch_size: (i + 1) * batch_size]
recovery = sess.run(output, feed_dict={input: sampled, is_training: False})
for j in range(batch_size):
num = i * batch_size + j
np.save(
"{}/{}/{:03}_sample.npy".format(dirname, "test_results", (num)),
np.squeeze(np.abs(data[j]))
)
np.save(
"{}/{}/{:03}_adjoint.npy".format(dirname, "test_results", (num)),
np.abs(np.fft.ifft2(np.squeeze(sampled[j])))
)
np.save(
"{}/{}/{:03}_recovery.npy".format(dirname, "test_results", (num)),
np.squeeze(np.abs(recovery[j]))
)
print("\rRunning... 100 % ")
np.save("{}/train_losses.npy".format(dirname), np.array(losses))
saver.save(sess, "{}/last_run.ckpt".format(dirname))
except BaseException as e:
print()
io.log("Error occured!", "error")
# io.log("Cleaning up save dir", "detail")
# shutil.rmtree("./{}".format(dirname))
raise e
|
#!/usr/bin/env python
import codecs
import datetime
import dateutil.parser
import dateutil.tz
import json
import mimetypes
import os
import re
import platform
import sys
import shutil
if sys.version_info[0] > 2:
from urllib.parse import urlparse
from urllib.parse import parse_qs
else:
from urlparse import urlparse
from urlparse import parse_qs
entity_map = {
"&": "&", "<": "<", ">": ">", '"': """,
"'": "'", "/": "/", "`": "`", "=": "="
}
def escape_html(text):
return "".join(entity_map.get(c, c) for c in text)
def merge(maps):
target = {}
for map in maps:
target.update(map)
return target
def mustache(template, view, partials):
def replace_section(match):
name = match.group(1)
content = match.group(2)
if name in view:
section = view[name]
if isinstance(section, list) and len(section) > 0:
return "".join(mustache(content, merge([ view, item ]), partials) for item in section);
if isinstance(section, bool) and section:
return mustache(content, view, partials)
return ""
template = re.sub(r"{{#\s*([-_\/\.\w]+)\s*}}\s?([\s\S]*){{\/\1}}\s?", replace_section, template)
def replace_partial(match):
name = match.group(1)
if callable(partials):
return mustache(partials(name), view, partials)
return match.group(0)
template = re.sub(r"{{>\s*([-_/.\w]+)\s*}}", replace_partial, template)
def replace(match):
name = match.group(1)
value = match.group(0)
if name in view:
value = view[name]
if callable(value):
value = value()
return mustache(value, view, partials)
return value
template = re.sub(r"{{{\s*([-_/.\w]+)\s*}}}", replace, template)
def replace_escape(match):
name = match.group(1)
value = match.group(0)
if name in view:
value = view[name]
if callable(value):
value = value()
value = escape_html(value)
return value
template = re.sub(r"{{\s*([-_/.\w]+)\s*}}", replace_escape, template)
return template
def read_file(path):
with codecs.open(path, mode="r", encoding="utf-8") as open_file:
return open_file.read()
def write_file(path, data):
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
with codecs.open(path, mode="w", encoding="utf-8") as open_file:
open_file.write(data)
def format_date(date, format):
if format == "atom":
return date.astimezone(dateutil.tz.gettz("UTC")).isoformat("T").split("+")[0] + "Z"
if format == "rss":
return date.astimezone(dateutil.tz.gettz("UTC")).strftime("%a, %d %b %Y %H:%M:%S %z")
if format == "user":
return date.strftime("%b %d, %Y").replace(" 0", " ")
return ""
def posts():
folders = []
for post in sorted(os.listdir("content/blog"), reverse=True):
if os.path.isdir("content/blog/" + post) and os.path.exists("content/blog/" + post + "/index.html"):
folders.append(post)
return folders
tag_regexp = re.compile(r"<(\w+)[^>]*>")
entity_regexp = re.compile(r"(#?[A-Za-z0-9]+;)")
break_regexp = re.compile(r" |<|&")
truncate_map = { "pre": True, "code": True, "img": True, "table": True, "style": True, "script": True, "h2": True, "h3": True }
def truncate(text, length):
close_tags = {}
ellipsis = ""
count = 0
index = 0
while count < length and index < len(text):
if text[index] == "<":
if index in close_tags:
index += len(close_tags.pop(index))
else:
match = tag_regexp.match(text[index:])
if match:
tag = match.groups()[0].lower()
if tag in truncate_map and truncate_map[tag]:
break
index += match.end()
match = re.search("(</" + tag + "\\s*>)", text[index:], re.IGNORECASE)
if match:
close_tags[index + match.start()] = "</" + tag + ">"
else:
index += 1
count += 1
elif text[index] == "&":
index += 1
match = entity_regexp.match(text[index:])
if match:
index += match.end()
count += 1
else:
if text[index] == " ":
index += 1
count += 1
skip = len(text) - index
match = break_regexp.search(text[index:])
if match:
skip = match.start()
if count + skip > length:
ellipsis = "…"
if count + skip - 15 > length:
skip = length - count
index += skip
count += skip
output = [text[:index]]
if len(ellipsis) > 0:
output.append(ellipsis)
for k in sorted(close_tags.keys()):
output.append(close_tags[k])
return "".join(output)
def load_post(path):
if os.path.exists(path) and not os.path.isdir(path):
data = read_file(path)
item = {}
content = []
metadata = -1
lines = re.split(r"\r\n?|\n", data)
while len(lines) > 0:
line = lines.pop(0)
if line.startswith("---"):
metadata += 1
elif metadata == 0:
index = line.find(":")
if index >= 0:
name = line[0:index].strip()
value = line[index+1:].strip()
if value.startswith('"') and value.endswith('"'):
value = value[1:-1]
item[name] = value
else:
content.append(line)
item["content"] = "\n".join(content)
return item
return None
def render_blog(folders, root, page):
view = { "items": [] }
count = 10
while count > 0 and len(folders) > 0:
folder = folders.pop(0)
item = load_post("content/blog/" + folder + "/index.html")
if item and (item["state"] == "post" or environment != "production"):
item["url"] = "blog/" + folder + "/"
if "date" in item:
date = dateutil.parser.parse(item["date"])
item["date"] = format_date(date, "user")
content = item["content"]
content = re.sub(r"\s\s", " ", content)
truncated = truncate(content, 250)
item["content"] = truncated
item["more"] = truncated != content
view["items"].append(item)
count -= 1
view["placeholder"] = []
if len(folders) > 0:
page += 1
location = "blog/page" + str(page) + ".html";
view["placeholder"].append({ "url": "/" + location })
destination = root + "/" + location
data = render_blog(folders, root, page)
write_file(destination, data)
template = read_file("themes/" + theme + "/feed.html")
return mustache(template, view, None)
def render_post(source, destination):
if source.startswith("content/blog/") and source.endswith("/index.html"):
item = load_post(source)
if item:
if not "author" in item:
item["author"] = configuration["name"]
if "date" in item:
date = dateutil.parser.parse(item["date"])
item["date"] = format_date(date, "user")
if not "telemetry" in item:
item["telemetry"] = ""
if "telemetry" in configuration:
item["telemetry"] = mustache(configuration["telemetry"], item, None)
view = merge([ configuration, item ])
template = read_file("themes/" + theme + "/post.html")
data = mustache(template, view, lambda name: read_file("themes/" + theme + "/" + name))
write_file(destination, data)
return True
return False
def render_feed(source, destination):
host = configuration["host"]
format = os.path.splitext(source)[1].replace(".", "")
url = host + "/blog/feed." + format
count = 10
feed = {
"name": configuration["name"],
"description": configuration["description"],
"author": configuration["name"],
"host": host,
"url": url,
"items": []
}
recent_found = False
recent = datetime.datetime.now()
folders = posts()
while len(folders) > 0 and count > 0:
folder = folders.pop(0)
item = load_post("content/blog/" + folder + "/index.html")
if item and (item["state"] == "post" or environment != "production"):
item["url"] = host + "/blog/" + folder + "/"
if not "author" in item or item["author"] == configuration["name"]:
item["author"] = False
if "date" in item:
date = dateutil.parser.parse(item["date"])
updated = date
if "updated" in item:
updated = dateutil.parser.parse(item["updated"])
item["date"] = format_date(date, format)
item["updated"] = format_date(updated, format)
if not recent_found or recent < updated:
recent = updated
recent_found = True
item["content"] = escape_html(truncate(item["content"], 10000));
feed["items"].append(item)
count -= 1
feed["updated"] = format_date(recent, format)
template = read_file(source)
data = mustache(template, feed, None)
write_file(destination, data)
def render_page(source, destination):
if render_post(source, destination):
return
template = read_file(os.path.join("./", source))
view = merge([ configuration ])
view["blog"] = lambda: render_blog(posts(), os.path.dirname(destination), 0) + """<script type=\"text/javascript\">
function updateStream() {
var element = document.getElementById("stream");
if (element) {
var rect = element.getBoundingClientRect();
var threshold = 0;
if (rect.bottom > threshold && (window.innerHeight - rect.top) > threshold) {
var url = element.getAttribute("title");
var xmlHttp = new XMLHttpRequest();
xmlHttp.open("GET", url, true);
xmlHttp.onreadystatechange = function () {
if (xmlHttp.readyState == 4 && xmlHttp.status == 200) {
element.insertAdjacentHTML('beforebegin', xmlHttp.responseText);
element.parentNode.removeChild(element);
updateStream();
}
};
xmlHttp.send(null);
}
}
}
updateStream();
window.addEventListener('scroll', function(e) {
updateStream();
});
</script>
"""
view["pages"] = []
for page in configuration["pages"]:
active = ("content" + page["url"]).rstrip('/') == os.path.dirname(source)
if active or ("visible" in page and page["visible"]):
view["pages"].append({"name": page["name"], "url": page["url"], "active": active })
data = mustache(template, view, lambda name: read_file("themes/" + theme + "/" + name))
write_file(destination, data)
def render_file(source, destination):
shutil.copyfile(source, destination)
def render(source, destination):
print(destination)
extension = os.path.splitext(source)[1]
if extension == ".rss" or extension == ".atom":
render_feed(source, destination)
elif extension == ".html":
render_page(source, destination)
else:
render_file(source, destination)
def render_directory(source, destination):
if not os.path.exists(destination):
os.makedirs(destination)
for item in os.listdir(source):
if not item.startswith("."):
if os.path.isdir(source + item):
render_directory(source + item + "/", destination + item + "/")
else:
render(source + item, destination + item)
def clean_directory(directory):
if os.path.exists(directory) and os.path.isdir(directory):
for item in os.listdir(directory):
item = directory + "/" + item
if os.path.isdir(item):
shutil.rmtree(item)
else:
os.remove(item)
environment = os.getenv("ENVIRONMENT")
print("python " + platform.python_version() + " " + (environment if environment else ""))
with open("content.json") as configurationFile:
configuration = json.load(configurationFile)
destination = "build"
theme = "default"
args = sys.argv[1:]
while len(args) > 0:
arg = args.pop(0)
if arg == "--theme" and len(args) > 0:
theme = args.pop(0)
else:
destination = arg
clean_directory(destination)
render_directory("content/", destination + "/") ;
|
import logging
import time
from apispec.exceptions import OpenAPIError
from apispec.utils import validate_spec
from flask_script import Command, Option
from libtrustbridge.websub import repos
from libtrustbridge.websub.processors import Processor
from api import use_cases
from api.docs import spec
from api.repos import ChannelRepo
logger = logging.getLogger(__name__)
class GenerateApiSpecCommand(Command):
"""
Generate api spec
"""
def get_options(self):
return (
Option('-f', '--filename',
dest='filename',
default='docs/swagger.yaml',
help='save generated spec into file'),
)
def run(self, filename):
try:
validate_spec(spec)
except OpenAPIError as e:
logger.exception(e)
print(f'API spec is not valid')
exit(1)
with open(filename, 'w') as fp:
fp.write(spec.to_yaml())
print(f'API spec has been written into {filename}')
class RunProcessorCommand(Command):
def __call__(self, app=None, *args, **kwargs):
self.app = app
return super().__call__(app, *args, **kwargs)
def run(self):
logger.info('Starting processor %s',self.__class__.__name__)
processor = self.get_processor()
logger.info('Run processor for use case "%s"', processor.use_case.__class__.__name__)
for result in processor:
if result is None:
time.sleep(1)
def get_processor(self):
raise NotImplementedError
class RunCallbackSpreaderProcessorCommand(RunProcessorCommand):
"""
Convert each incoming message to set of messages containing (websub_url, message)
so they may be sent and fail separately
"""
def get_processor(self):
config = self.app.config
notifications_repo = repos.NotificationsRepo(config['NOTIFICATIONS_REPO_CONF'])
delivery_outbox_repo = repos.DeliveryOutboxRepo(config['DELIVERY_OUTBOX_REPO_CONF'])
subscriptions_repo = repos.SubscriptionsRepo(config.get('SUBSCRIPTIONS_REPO_CONF'))
use_case = use_cases.DispatchMessageToSubscribersUseCase(
notifications_repo=notifications_repo,
delivery_outbox_repo=delivery_outbox_repo,
subscriptions_repo=subscriptions_repo,
)
return Processor(use_case=use_case)
class RunCallbackDeliveryProcessorCommand(RunProcessorCommand):
"""
Iterate over the DeliverCallbackUseCase.
"""
def get_processor(self):
config = self.app.config
delivery_outbox_repo = repos.DeliveryOutboxRepo(config['DELIVERY_OUTBOX_REPO_CONF'])
use_case = use_cases.DeliverCallbackUseCase(
delivery_outbox_repo=delivery_outbox_repo,
hub_url=config['HUB_URL'],
)
return Processor(use_case=use_case)
class RunNewMessagesObserverCommand(RunProcessorCommand):
"""
Watch for new messages being sent to us and send notifications by jurisdiction
"""
def get_processor(self):
config = self.app.config
channel_repo = ChannelRepo(config['CHANNEL_REPO_CONF'])
notifications_repo = repos.NotificationsRepo(config['NOTIFICATIONS_REPO_CONF'])
use_case = use_cases.NewMessagesNotifyUseCase(
receiver=config['JURISDICTION'],
channel_repo=channel_repo,
notifications_repo=notifications_repo,
)
return Processor(use_case=use_case)
|
../geometry.py |
import argparse
import os
import sys
import warnings
import numpy as np
import pandas as pd
import torch
import src.dataprocessing as dataproc
import src.training as train_n2f
import src.experimentutils as experutils
import src.runutils as runutils
import src.utils
parser = argparse.ArgumentParser(description="Run the training on ethereum graph")
parser.add_argument("path", type=str, help="path for saving submission_results")
parser.add_argument("--data_path", type=str, help="the number of dims to search",
default="data/preprocessed_ethereum_2018_2020.csv")
parser.add_argument("--split_path", type=str, help="Path to train, val, test split folder",
default=None)
parser.add_argument("--node2vec_path", type=str, help="Path to pretrained node2vec embeddings",
default=None)
parser.add_argument("--config_path", type=str, help="path were the configs can be read",
default="configs_eth/eth_config_auto_noise.json")
parser.add_argument("--max_dim", type=int, help="the number of dims to search", default=3)
parser.add_argument("--min_dim", type=int, help="the number of dims to search", default=1)
parser.add_argument("--seed", type=int, help="random seed", default=1234)
parser.add_argument("--use_gpu", action='store_true', help="Use gpu")
parser.add_argument("--gates_init", type=str, help="Which gates init to use", default='zeros')
parser.add_argument("--only_baselines", action='store_true', help="Only run the baselines")
parser.add_argument("--skip_baselines", action='store_true', help="Only run the baselines")
parser.add_argument("--verbosity", type=int, help="verbosity level", default=1)
parser.add_argument("--debug", action='store_true', help="Run in debug mode, only one epoch per model")
args = parser.parse_args()
chp_folder = os.path.join(args.path, "chpt")
os.makedirs(args.path, exist_ok=True)
os.makedirs(chp_folder, exist_ok=True)
dev = "cuda" if torch.cuda.is_available() and args.use_gpu else "cpu"
device = torch.device(dev)
if device.type == 'cuda' and args.verbosity > 0:
print(f"Using {torch.cuda.get_device_name(0)}")
experutils.set_seeds(args.seed)
configs = src.utils.load_configs(args.config_path)
configs['hyperpara_config'].emb_grad_noise = src.utils.GradientNoise(False, np.inf, 0.)
configs['hyperpara_config'].gates_grad_noise = src.utils.GradientNoise(False, np.inf, 0.)
configs['baseline_hyperpara_config'].grad_noise = src.utils.GradientNoise(False, np.inf, 0.)
configs['init_config'].gates_init = args.gates_init
configs['train_config'].max_steps = 1e5
configs['train_config'].tol = 1e-5
configs['train_config'].substep_tol = 1e-5
configs['init_config'].max_steps = 1e4
fg_max_iter = 100
if args.debug:
warnings.warn("RUNNING WITH DEBUG CONFIGURATIONS. MODELS WONT BE TRAINED.")
configs['train_config'].max_steps = 0
configs['init_config'].embedding_init = 'zeros'
configs['init_config'].gates_init = 'zeros'
configs['init_config'].max_steps = 0
fg_max_iter = 1
# fg_max_iter = None
emb_reg_weights = [3., 1, 0.3, 0.1]
gates_reg_weights = [3., 1., 0.3, 0.1]
q_indices = [0, 1, 2]
graph = dataproc.Graph.read_csv(args.data_path)
num_nodes = graph.num_vertices()
flow_summary = dataproc.flow_summary(graph.flow)
pd.DataFrame([flow_summary]).to_csv(os.path.join(args.path, f"ethereum_flow_info.csv"), header=True, index=False)
if args.split_path is None:
train_graph, val_graph, test_graph = graph.split_train_val_test_graphs((0.7, 0.15, 0.15))
train_graph.to_csv(os.path.join(args.path, f"preprocessed_ethereum_train.csv"))
val_graph.to_csv(os.path.join(args.path, f"preprocessed_ethereum_val.csv"))
test_graph.to_csv(os.path.join(args.path, f"preprocessed_ethereum_test.csv"))
else:
train_graph = dataproc.Graph.read_csv(os.path.join(args.split_path, f"preprocessed_ethereum_train.csv"))
val_graph = dataproc.Graph.read_csv(os.path.join(args.split_path, f"preprocessed_ethereum_val.csv"))
test_graph = dataproc.Graph.read_csv(os.path.join(args.split_path, f"preprocessed_ethereum_test.csv"))
base = train_n2f.TrainerBase(train_graph=train_graph, val_graph=val_graph, device=device,
loss_config=configs['loss_config'])
model_factory = experutils.ModelFactory(base=base)
if not args.skip_baselines:
runutils.run_eth_hp_search_nn_baselines(model_factory, emb_reg_weights, q_indices, fg_max_iter, args, configs)
else:
print("====> Skipping baselines")
if args.only_baselines:
sys.exit("Finished baselines with option 'only_baselines', exiting.")
runutils.run_eth_hp_search_gradient_models(model_factory, emb_reg_weights, gates_reg_weights, args, configs)
|
# coding: utf-8
#算法纯属乱搞T^T
class Solution:
# @return an integer
def atoi(self, s):
s = s.strip()
begin, end = 0, len(s)
if s == '':
return 0
haveSign = False
if s[0] in '+-':
begin = 1
haveSign = True #是否有符号(+1, -2)
for index in range(begin,len(s)):
if not ('0' <= s[index] <= '9'):
end = index
break
if begin == end:
return 0
elif not ('0' <= s[end-1] <= '9'):
return 0
if haveSign:
begin -=1
num = int(s[begin:end])
if num > 2147483647:
return 2147483647
elif num < -2147483648:
return -2147483648
return num
a = Solution()
print a.atoi("1")
|
i=input("Enter the Number");
if i%4==0:
print ("Leap Year");
else:
print ("Not Leap Year");
|
from kaa.reach import ReachSet
from kaa.flowpipe import FlowPipePlotter
from models.basic.basic import Basic
import kaa.benchmark as Benchmark
def test_plot_basic():
basic_mod = Basic()
basic_reach = ReachSet(basic_mod)
flowpipe = basic_reach.computeReachSet(10)
FlowPipePlotter(flowpipe).plot2DProj(0)
Benchmark.generate_stats()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat May 9 18:35:57 2020
@author: TakahiroKurokawa
"""
from typing import Optional
def increment(
page_num:int,
last:int,
*,
ignore_error:bool = False) ->Optional[int]:
next_page = page_num + 1
if next_page <= last:
print(next_page)
return next_page
if ignore_error:
return None
raise ValueError("Invalid arguments")
print(increment.__annotations__)
def decrement(page_num:int) -> int:
prev_page:int
prev_page= page_num - 1
return prev_page
a=decrement(2)
print(a) |
import arcpy
arcpy.env.workspace = "M:/Programming/Practical1/Albertsurface"
try:
try:
arcpy.ImportToolbox("M:/Programming/practical1/albertsurface/Models.tbx", "models")
except arcpy.ExecuteError as e:
print("Import toolbox error", e)
if arcpy.Exists("int.shp"):
arcpy.Delete_management("int.shp")
if arcpy.Exists("buff.shp"):
arcpy.Delete_management("buff.shp")
try:
arcpy.Explosion_models("explosion0/point","100 Meters","build0/polygon","int.shp","buff.shp")
except arcpy.ExecuteError as e:
print("Model run error", e)
except Exception as e:
print(e)
|
from bson.objectid import ObjectId
class PointOfInterest:
def __init__(self, db):
self.db = db
self.collection = self.db['POI']
async def fetchPOIInfo(self, poi_id):
query = {"_id": ObjectId(poi_id)}
poi = await self.collection.find_one(query)
return poi
async def fetchPOIAll(self):
pois = await self.collection.find().to_list(length=100)
return pois
async def create(self, type, title, content):
result = await self.collection.insert_one(
{
"poi_type": type,
"title": title,
"content": content
}
)
return result
|
# Eksempel på tilordning av variabel
navn = 'Joakim'
print('Jeg heter', navn)
|
"""
Author: Sidhin S Thomas (sidhin@trymake.com)
Copyright (c) 2017 Sibibia Technologies Pvt Ltd
All Rights Reserved
Unauthorized copying of this file, via any medium is strictly prohibited
Proprietary and confidential
"""
from django.db import models
from trymake.apps.orders_management.models import Order
from trymake.apps.support_staff.models import Staff
class Complaint(models.Model):
RESOLVED = 1
WAITING = 2
PROCESSING = 3
CANCELED = 4
CHOICES = (
(RESOLVED, "Resolved"),
(WAITING, "Waiting"),
(PROCESSING, "Processing"),
(CANCELED, "Canceled")
)
order = models.ForeignKey(Order,db_index=True)
title = models.CharField(max_length=250)
body = models.TextField()
date_published = models.DateTimeField()
last_change = models.DateTimeField()
date_closed = models.DateTimeField() # Resolved or Canceled can be counted as closed
status = models.PositiveSmallIntegerField(choices=CHOICES)
assigned = models.ForeignKey(Staff, null=True, on_delete=models.SET_NULL)
|
N, K = map( int, input().split())
S = input()
ans = S[:K-1]
if S[K-1] == "A":
ans += "a"
elif S[K-1] == "B":
ans += "b"
else:
ans += "c"
ans += S[K:]
print(ans)
|
# General imports
from django.contrib import admin
# Models import
from . import models as comment_models
class CommentInline(admin.TabularInline):
model = comment_models.Comment
extra = 1
@admin.register(comment_models.CommentField)
class CommentFieldAdmin(admin.ModelAdmin):
list_display = [
'field_type',
'commented_id',
]
list_filter = [
'field_type',
]
inlines = [
CommentInline
]
|
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 15 16:18:57 2020
@author: RAHIL
"""
from EuclidianExtended import EuclidianExtended as EExtended
class MCipher:
def __init__(self,k1,n):
print("Using Multiplicative cipher")
self.key1 = k1
#self.key2 = k2
self.mod = n
self.base = ord('a')
def encrypt(self, plainText):
crypt = ""
for c in plainText:
delta = ( (ord(c) - ord('a')) * self.key1 )%self.mod
crypt += chr(self.base + delta)
return crypt
def decrypt(self, crypt):
InvGen = EExtended()
plain = ""
gcd,inv = InvGen.run(self.key1, self.mod)
for c in crypt:
delta = ( ((ord(c)-ord('a')))*inv )%self.mod
plain += chr(self.base + delta)
return plain
print(__name__)
if(__name__ == "__main__"):
k1,n = list(map(int,input("enter key & numberOfCharacters : ").split()))
MCrypter = MCipher(k1, n)
for _ in range(int(input("number of test case : "))):
plain = input("Plain text: ")
cipher = MCrypter.encrypt(plain)
print("cipher : ",cipher)
print("plain text(decrypted) : ",MCrypter.decrypt(cipher))
|
# %%
import os
import mne
import matplotlib.pyplot as plt
data_folder = '../MVPA_data_xdawn_v3'
raw_data_folder = '../MVPA_data_xdawn_raw_v3'
uid = 'MEG_S03-0'
# %%
epochs = mne.read_epochs(os.path.join(data_folder, f'{uid}-train-epo.fif'))
raw_epochs = mne.read_epochs(os.path.join(
raw_data_folder, f'{uid}-train-epo.fif'))
epochs.apply_baseline((None, 0))
raw_epochs.apply_baseline((None, 0))
display(epochs, raw_epochs)
# %%
plt.style.use('ggplot')
times = {'1': [0.2, 0.3, 0.4, 0.5, 0.6],
'3': [0]}
for event in ['1', '3']:
epochs[event].average().plot_joint(times=times[event],
title=f'Denoise')
raw_epochs[event].average().plot_joint(times=times[event],
title=f'Raw')
print('-')
# %%
|
import os
from glob import glob
from setuptools import setup
PACKAGE_NAME = 'squarbo_gazebo'
setup(
name=PACKAGE_NAME,
version='1.0.0',
package_dir={'': 'src'},
data_files=[
(os.path.join('share', PACKAGE_NAME), glob('launch/*.launch.py')),
(os.path.join('share', PACKAGE_NAME), glob('world/*.world'))
],
packages=[PACKAGE_NAME],
install_requires=['setuptools'],
zip_safe=True,
tests_require=['pytest'],
entry_points={
'console_scripts': [
'spawner_squarbo = squarbo_gazebo.spawner_squarbo:main',
],
},
)
|
import numpy as np
import random,copy,time
import pygame as pg
def weights_classifier(structure,chromosome):
weights_list=[]
for i in range(0,len(structure)-2):
if i == 0:
a=0
b=structure[i]*structure[i+1]
else:
a=b
b=a+(structure[i]*structure[i+1])
r=structure[i]
c=structure[i+1]
w=np.reshape(np.matrix(chromosome[a:b]),(r,c))
weights_list.append(w)
return weights_list
def sigmoid(x):
return 1/(1+np.exp(-x))
def feed_forward(structure,chromosome,train_input):
weights_list=weights_classifier(structure,chromosome)
for i in range(len(weights_list)):
if i==0:
output=sigmoid(np.dot(train_input,weights_list[i]))
else:
output=sigmoid(np.dot(output,weights_list[i]))
return output
def playing(structure,chromosome,train_input):
decision=feed_forward(structure,chromosome,train_input)
return np.array(decision)
def visionApple(snake,apple):
vision_apple=[0, 0, 0, 0, 0, 0, 0, 0]
n=snake[0][1]
s=snake[0][1]
w=snake[0][0]
e=snake[0][0]
for i in range(10):
n-=60
s+=60
w-=60
e+=60
if apple[1]==n and apple[0]==snake[0][0]:
vision_apple[0]=1
if apple[1]==s and apple[0]==snake[0][0]:
vision_apple[1]=1
if apple[0]==w and apple[1]==snake[0][1]:
vision_apple[2]=1
if apple[0]==e and apple[1]==snake[0][1]:
vision_apple[3]=1
if apple[1]==n and apple[0]==w:
vision_apple[4]=1
if apple[1]==s and apple[0]+60==w:
vision_apple[5]=1
if apple[1]+60==n and apple[0]==e:
vision_apple[6]=1
if apple[1]==s and apple[0]==e:
vision_apple[7]=1
return vision_apple
def visionBody(snake,apple):
vision_body=[0, 0, 0, 0, 0, 0, 0, 0]
y=snake[0][1]
x=snake[0][0]
for i in range(1,len(snake)):
#n
if snake[i][1]+60==y and snake[i][0]==x:
vision_body[0]=1
#s
if snake[i][1]-60==y and snake[i][0]==x:
vision_body[1]=1
#w
if snake[i][0]+60==x and snake[i][1]==y:
vision_body[2]=1
#e
if snake[i][0]-60==x and snake[i][1]==y:
vision_body[3]=1
#nw
if snake[i][1]+60==y and snake[i][0]+60==x:
vision_body[4]=1
#sw
if snake[i][1]-60==y and snake[i][0]+60==x:
vision_body[5]=1
#ne
if snake[i][1]+60==y and snake[i][0]-60==x:
vision_body[6]=1
#se
if snake[i][1]-60==y and snake[i][0]-60==x:
vision_body[7]=1
return vision_body
def visionWall(snake):
# nw,sw,ww,ew
vision_wall=[0, 0, 0, 0]
vision_wall[0]=int((snake[0][1]/60))
vision_wall[1]=int(((600-snake[0][1])/60)-1)
vision_wall[2]=int((snake[0][0]/60))
vision_wall[3]=int(((600-snake[0][0])/60)-1)
return vision_wall
def move_d(decision):
move=[0,0,0,0]
j=np.amax(decision)
k=np.where(decision==j)
i=k[1][0]
#print('\nj= ',j,' k= ',k,' i= ',i)
move[i]=1
return move
def game(structure,chromosome,speed):
weights_list=weights_classifier(structure,chromosome)
snake_frame_w=600
snake_frame_h=600
nn_frame_w=800
nn_frame_h=1000
pg.init()
screen = pg.display.set_mode((snake_frame_w+nn_frame_w,nn_frame_h))
white=[255,255,255]
red=[255,0,0]
black=[0,0,0]
green=[0,255,0]
desert=[115,92,64]
light_desert=[232,186,129]
grey=[64,64,64]
running=True
apple_present=False
score=0
steps=0
apple_steps=500
x=[]
y=[]
for i in range(0,snake_frame_w,60):
x.append(i)
y.append(i)
grid=[]
for i in range(len(x)):
for j in range(len(y)):
grid.append((x[i],y[j]))
snake_head=grid[55]
snake=[snake_head]
while running:
pg.event.get()
grid_c=copy.deepcopy(grid)
if apple_present==False:
for body in snake:
if body in grid:
grid_c.remove(body)
apple=random.choice(grid_c)
apple_present=True
vision_apple=visionApple(snake,apple)
if np.amax(vision_apple) > 0:
line=vision_apple.index(int(np.amax(vision_apple)))
colors=[red,red,red,red,red,red,red,red]
colors[line]=green
else:
colors=[red,red,red,red,red,red,red,red]
if steps==0:
vision_body=[0, 0, 0, 0, 0, 0, 0, 0]
vision_apple=[0, 0, 0, 0, 0, 0, 0, 0]
vision_wall=[0, 0, 0, 0]
head_direction=[0, 0, 0, 0]
tail_direction=[0, 0, 0, 0]
# print('\nApple= ',vision_apple)
# print('Body= ',vision_body)
# print('head_direction= ',head_direction,' tail_direction= ',tail_direction, ' wall_dis= ',vision_wall)
# print(' Score= ',score,' steps= ',steps)
nn_input=np.concatenate([vision_apple,vision_body,vision_wall,head_direction,tail_direction])
###call nn
decision=playing(structure,chromosome,nn_input)
#print(decision)
#### call move
move=move_d(decision)
#print(decision)
snake_c=copy.deepcopy(snake)
if move ==[1,0,0,0]:
snake[0]=(snake[0][0],snake[0][1]-60)
if move ==[0,1,0,0]:
snake[0]=(snake[0][0],snake[0][1]+60)
if move ==[0,0,1,0]:
snake[0]=(snake[0][0]-60,snake[0][1])
if move ==[0,0,0,1]:
snake[0]=(snake[0][0]+60,snake[0][1])
for i in range(1,len(snake)):
if snake[i][0]==snake[0][0] and snake[i][1]==snake[0][1]:
#print('snake= ',snake,' apple= ',apple,' Score= ',score,' steps= ',steps)
print('Body Collision')
running=False
for i in range(1,len(snake)):
snake[i]=snake_c[i-1]
if snake[0]==apple:
apple_present=False
snake.append(snake_c[-1])
#snake_c=copy.deepcopy(snake)
score+=1
apple_steps=500
#print('Apple Gone')
if len(snake)==1:
vision_body=[0, 0, 0, 0, 0, 0, 0, 0]
elif len(snake)>1:
vision_body=visionBody(snake,apple)
if snake[0][0] < 0 or snake[0][0] > 540 or snake[0][1] < 0 or snake[0][1] > 540 :
print('Wall Collision')
running=False
if len(snake) != 1:
if (snake_c[-1][1]-snake[-1][1])<0:
tail_direction=[0,1,0,0]
if (snake_c[-1][1]-snake[-1][1])>0:
tail_direction=[1,0,0,0]
if (snake_c[-1][0]-snake[-1][0])<0:
tail_direction=[0,0,0,1]
if (snake_c[-1][0]-snake[-1][0])>0:
tail_direction=[0,0,1,0]
else:
tail_direction=move
head_direction=move
vision_wall=visionWall(snake)
steps+=1
apple_steps -= 1
if apple_steps == 0:
running=False
print('Too much steps')
screen.fill(white)
wall=pg.Rect(0,0,snake_frame_w,snake_frame_h)
background=pg.Rect(0,0,snake_frame_w,snake_frame_h)
pg.draw.rect(screen,desert,wall)
pg.draw.rect(screen,black,wall,2)
stat_border=pg.Rect(0,snake_frame_h,snake_frame_w,snake_frame_h)
stat_background=pg.Rect(0,0,snake_frame_w,nn_frame_h)
pg.draw.rect(screen,light_desert,stat_background)
pg.draw.rect(screen,black,stat_border,2)
nn_background=pg.Rect(snake_frame_w,0,nn_frame_w,nn_frame_h)
nn_board=pg.Rect(snake_frame_w,0,nn_frame_w,nn_frame_h)
pg.draw.rect(screen,grey,nn_background)
pg.draw.rect(screen,black,nn_board,1)
#for i in range(len(grid_c)):
#g_grid=pg.Rect(grid_c[i][0],grid_c[i][1],60,60)
#pg.draw.rect(screen,black,g_grid,1)
# ln_l=pg.draw.line(screen,colors[0],(snake[0][0],snake[0][1]),(snake[0][0],0),2)
# s_l=pg.draw.line(screen,colors[1],(snake[0][0],snake[0][1]),(snake[0][0],600),2)
# w_l=pg.draw.line(screen,colors[2],(snake[0][0],snake[0][1]),(0,snake[0][1]),2)
# e_l=pg.draw.line(screen,colors[3],(snake[0][0],snake[0][1]),(600,snake[0][1]),2)
# nw_l=pg.draw.line(screen,colors[4],(snake[0][0],snake[0][1]),(snake[0][0]-600,snake[0][1]-600),2)
# sw_l=pg.draw.line(screen,colors[5],(snake[0][0],snake[0][1]),(snake[0][0]-600,snake[0][1]+600),2)
# ne_l=pg.draw.line(screen,colors[6],(snake[0][0],snake[0][1]),(snake[0][0]+600,snake[0][1]-600),2)
# ne_l=pg.draw.line(screen,colors[7],(snake[0][0],snake[0][1]),(snake[0][0]+600,snake[0][1]+600),2)
g_apple=pg.Rect(apple[0]+15,apple[1]+15,30,30)
pg.draw.rect(screen,green,g_apple)
for i in range(len(snake)):
if i==0:
color=red
else:
color=black
g_snake=pg.Rect(snake[i][0],snake[i][1],60,60)
b_snake=pg.Rect(snake[i][0],snake[i][1],60,60)
pg.draw.rect(screen,color,g_snake)
pg.draw.rect(screen,white,b_snake,1)
outputs=[0]
for i in range(len(weights_list)):
if i==0:
output=sigmoid(np.dot(nn_input,weights_list[i]))
else:
output=sigmoid(np.dot(output,weights_list[i]))
outputs.append(output)
outputs
node_cords=[]
for i in range(len(structure)-1):
if i !=0:
strongest_nodes=np.where(outputs[i]==np.amax(outputs[i]))[1].tolist()
if i == 0:
y_cor=snake_frame_w+10+15
else:
y_cor=(y_cor+int((nn_frame_h/30)/(len(structure)-1))*30)
start_x=int(((nn_frame_h/30)-structure[i])/2) * 30
nodes_center_cor=[]
for x in range(structure[i]):
if x==0:
x_cor=(start_x-30)+45
else:
x_cor=x_cor+30
nodes_center_cor.append((x_cor,y_cor))
if i == 0:
if nn_input[x] != 0:
pg.draw.circle(screen, green, (y_cor,x_cor) , 13)
else:
pg.draw.circle(screen, white, (y_cor,x_cor) , 10)
elif i != 0:
if x in strongest_nodes:
#print('---------------->',x,strongest_line)
pg.draw.circle(screen, green, (y_cor,x_cor) , 13)
else:
pg.draw.circle(screen, white, (y_cor,x_cor) , 10)
else:
#print('-->',x)
pg.draw.circle(screen, white, (y_cor,x_cor) , 10)
node_cords.append(nodes_center_cor)
#node_cords.append(0)
for i in range(len(node_cords)):
if i < len(node_cords)-1:
for from_cor in node_cords[i]:
#print(from_cor[0],from_cor[1]+10)
for to_cor in node_cords[i+1]:
#print(from_cor[0],from_cor[1]+10,to_cor[0],to_cor[1]-10)
r=random.randint(0,255)
g=random.randint(0,255)
b=random.randint(0,255)
l_color=[r,g,b]
line=pg.draw.line(screen,l_color,(from_cor[1]+13,from_cor[0]),(to_cor[1]-13,to_cor[0]),1)
pg.display.flip()
time.sleep(speed)
return score,steps
structure=[28,14,7,4,0]
start=int(input('From= '))
end=int(input('To= '))
speed=float(input('Frame Speed (s)= '))
for j in range(start,end):
while True:
try:
file=open('weights_5/gen_'+str(j)+'.txt','r')
break
except:
file=open('weights_5/gen_'+str(j-1)+'.txt','r')
weights=file.read()
file.close()
refined_w=weights.split(',')
chromosome=[None]*len(refined_w)
for i in range(len(refined_w)):
if i==0:
chromosome[i]=(float(refined_w[i].split('[')[1]))
elif i==len(refined_w)-1:
chromosome[i]=(float(refined_w[i].split(']')[0]))
else:
chromosome[i]=(float(refined_w[i]))
score,steps=game(structure,chromosome,speed)
print('Gen= ',j,'-----Score= ',score)
weights=file.read()
file.close()
refined_w=weights.split(',')
chromosome=[None]*len(refined_w)
for i in range(len(refined_w)):
if i==0:
chromosome[i]=(float(refined_w[i].split('[')[1]))
elif i==len(refined_w)-1:
chromosome[i]=(float(refined_w[i].split(']')[0]))
else:
chromosome[i]=(float(refined_w[i]))
score,steps=game(structure,chromosome,speed)
print('Gen= ',j,'-----Score= ',score)
|
import re
files_path = "files/"
""" Read phonemes """
f = open(files_path + 'phoneme_transcriptions.txt', 'r')
lines = f.readlines()
# chars = set()
""" spliting test, train anf validation data """
test_lines = lines[2800:]
lines = lines[:2800]
val_lines = lines[:int(len(lines)/10)]
train_lines = lines[int(len(lines)/10):]
def data_to_dict(line):
phonemes = []
for line in line:
line = line.replace('\n', '')
phoneme = line.split('\t')[1]
name = line.split('\t')[0]
phoneme = phoneme.replace('CH', 'c').replace('KH', 'k').replace('SH', 's').replace('SIL', 'i').replace('AH', 'h').replace('ZH', 'z').replace('AA', 'a') ## replace multi chars by one new char
phoneme = re.sub("\[([0-9]+)\]\s*", '', phoneme)
# chars.update(set(phoneme.split(' ')))
phenome = "/mnt/hdd1/adibian/Tacotron2/files/wavs/" + name + ".wav|" + phoneme
phonemes.append(phenome)
return phonemes
train_data = data_to_dict(train_lines)
test_data = data_to_dict(test_lines)
val_data = data_to_dict(val_lines)
with open(files_path + 'text_files/train_data.txt', 'w') as fp:
for line in train_data:
fp.write(line + '\n')
with open(files_path + 'text_files/test_data.txt', 'w') as fp:
for line in test_data:
fp.write(line + '\n')
with open(files_path + 'text_files/val_data.txt', 'w') as fp:
for line in val_data:
fp.write(line + '\n')
|
#!/usr/bin/python3
def roman_to_int(roman_string):
rom_num = {"I": 1, "V": 5, "X": 10, "L": 50, "C": 100, "D": 500, "M": 1000}
value = 0
if type(roman_string) is not str or roman_string is None:
return 0
if len(roman_string) == 1:
return rom_num.get(roman_string)
for i in range(len(roman_string)):
if (i + 1) == len(roman_string):
value += rom_num.get(roman_string[i])
elif rom_num.get(roman_string[i]) >= rom_num.get(roman_string[i + 1]):
value += rom_num.get(roman_string[i])
else:
value -= rom_num.get(roman_string[i])
return value
|
import bottle, api_functions
import logging as log
from json import dumps # needed to return a top level JSON array
api_functions.set_up_logs()
api = api_functions.API()
@bottle.get('/programs')
@bottle.get('/programs/<program_id>')
def get_programs(program_id=None):
if program_id is None:
# must set content type manually and use dumps for top level array
bottle.response.content_type = 'application/json'
return dumps(api.programs)
else:
try:
return api.get_program_by_id(program_id)
except KeyError:
log.warning('Attempted to retrieve unknown program: ' + program_id)
bottle.abort(404, 'The specified program does not exist.')
@bottle.post('/programs')
@bottle.post('/programs/<program_id>')
def add_or_update(program_id=None):
request = bottle.request
log.debug('Content-Type: ' + request.get_header('Content-Type'))
log.debug('Data: ' + str(request.json))
if request.headers.get('Content-Type') != 'application/json':
log.warning('Attempted POST without application/json header')
bottle.abort(400, 'The application/json header is required.')
if not api.get_program_by_id(program_id) and program_id is not None:
log.warning('Attempted to update unknown program: ' + program_id)
bottle.abort(404, 'The specified program does not exist.')
try:
if api.add_or_update(request.json, program_id) is True:
if program_id is not None:
return api.get_program_by_id(program_id)
else:
return api.programs
else:
log.warning('Attempted to create invalid program state')
bottle.abort(400, 'Invalid parameters were supplied.')
except ValueError:
log.warning('JSON formatting error in body of request')
bottle.abort(400, 'JSON formatting is incorrect.')
@bottle.delete('/programs/<program_id>')
@bottle.delete('/programs/<program_id>/<run_day>')
@bottle.delete('/programs/<program_id>/<run_day>/<run_hour>')
@bottle.delete('/programs/<program_id>/<run_day>/<run_hour>/<run_minute>')
def delete(program_id=None, run_day=None, run_hour=None, run_minute=None):
run_time = None
if run_day:
try:
run_time = run_day + ' ' + run_hour + ':' + run_minute
except TypeError:
log.warning('Attempted to delete a run time using incomplete ' +
'parameters')
bottle.abort(404, 'Incomplete run time parameters supplied.')
try:
api.delete(program_id, run_time)
if not run_time:
return api.programs
else:
return api.programs[program_id]
except KeyError:
if not run_time:
log.warning('Attempted to delete unknown program: ' + program_id)
bottle.abort(404, 'The specified program does not exist.')
except ValueError:
log.warning('Attempted to delete unknown run time from program ' +
program_id + ': ' + run_time)
bottle.abort(404, 'The specified run time does not exist.')
@bottle.post('/run/program/<program_id>')
def run_program(program_id=None):
try:
api.run_program(program = api.get_program_by_id(program_id))
return api.status
except KeyError:
log.warning('Attempted to run unknown program: ' + program_id)
bottle.abort(404, 'The specified program does not exist.')
@bottle.post('/run/manual/<zone>')
@bottle.post('/run/manual/<zone>/<time>')
def run_manual(zone, time=-1):
if not zone.isdigit():
log.warning('Zone index not numeric: ' + zone)
bottle.abort(404, 'Zones designators are positive integers.')
try:
api.run_manual(int(zone) - 1, time)
return api.status
except IndexError:
log.warning('Attempted to run unknown zone index: ' + zone)
bottle.abort(404, 'The specified zone does not exist.')
@bottle.get('/status')
@bottle.get('/status/<designator>')
def get_status(designator=None):
if designator is None:
return api.status
try:
return {designator: api.status[designator]}
except KeyError:
log.warning('Attempted to retrieve invalid status: ' + designator)
bottle.abort(404, 'The specified status designator does not exist.')
@bottle.post('/stop')
def stop():
api.stop_sprinklers(reschedule=True)
return api.status
# typically only called internally
@bottle.post('/reschedule')
def reschedule():
api.schedule_next_program()
return api.status
# typically only called internally
@bottle.post('/recalculate')
def recalculate():
api.choose_next_program()
return api.status
|
from unittest2 import TestCase
import aux.logging as log
class LoggingTest(TestCase):
def test_start_logging(self):
log.start()
log.info("This is a info message.")
log.debug("This is a debug message.")
log.error("This is an error message.")
log.warning("This is a warning message.")
log.critical("This is a critical message.")
|
import requests
import psycopg2
conn = psycopg2.connect(host="localhost", database="cartola_fc",
user="postgres", password="postgres")
print("Conectado ao banco")
cur = conn.cursor()
rowcount = cur.rowcount
url = "https://api.cartolafc.globo.com/atletas/mercado"
try:
data = requests.get(url).json()
print("Carregando as informações dos atletas")
print("!!!!!Essa operação pode ser demorada!!!!!")
print("Aguarde")
for atleta in data['atletas']:
result = []
atleta_id = atleta['atleta_id']
nome = atleta['nome']
slug = atleta['slug']
apelido = atleta['apelido']
foto = atleta['foto']
clube_id = atleta['clube_id']
posicao_id = atleta['posicao_id']
result = [atleta_id, nome, slug, apelido, foto, clube_id, posicao_id]
cur.execute("""INSERT into cartola_fc.tb_atletas
VALUES
( %s,
%s,
%s,
%s,
%s,
%s,
%s
)""",(result))
conn.commit()
cur.close()
print("Sucesso! Inicializando próxima carga....")
except IOError as io:
print("Erro") |
/Users/karshenglee/anaconda3/lib/python3.6/linecache.py |
from meetingmaker import app
import uuid
import logging
import CONFIG
if __name__ == "__main__":
# App is created above so that it will
# exist whether this is 'main' or not
# (e.g., if we are running in a CGI script)
app.secret_key = str(uuid.uuid4())
app.debug=CONFIG.DEBUG
app.logger.setLevel(logging.DEBUG)
# We run on localhost only if debugging,
# otherwise accessible to world
if CONFIG.DEBUG:
# Reachable only from the same computer
app.run(port=CONFIG.PORT)
else:
# Reachable from anywhere
app.run(port=CONFIG.PORT,host="0.0.0.0") |
def fname():
input(name) |
from tools import gain, fini, strategie
from strat import Strat
from a_start import a_start
import random
import numpy as np
def tournoi(strat, iterations, game):
nbLignes = game.spriteBuilder.rowsize
nbColonnes = game.spriteBuilder.colsize
wallStates = [w.get_rowcol() for w in game.layers['obstacle']]
players = [o for o in game.layers['joueur']]
goalStates = [o.get_rowcol() for o in game.layers['ramassable']]
posPlayers = [o.get_rowcol() for o in game.layers['joueur']]
nbPlayers = len(players)
list_goal = [None]*nbPlayers
L = []
for i in range(len(strat)) :
for j in range(i+1,len(strat)):
L.append([strat[i],strat[j]])
# ==== positionement des joueurs
allowedStates = [(x,y) for x in range(nbLignes) for y in range(nbColonnes)\
if (x,y) not in wallStates or goalStates]
for j in range(nbPlayers):
x,y = random.choice(allowedStates)
players[j].set_rowcol(x,y)
game.mainiteration()
posPlayers[j]=(x,y)
list_Player_strat = [None]*nbPlayers
classement_f = dict()
classement_f2 = dict()
for s in strat:
classement_f[s.get_nom()] = 0
classement_f2[s.get_nom()] = 0
for a in range(len(L)):
strategie(L[a],list_Player_strat)
# ==== debut iteration
liste_final = battle_royal(iterations, posPlayers, list_goal, game, list_Player_strat, False)
res = 0
res2 = 0
for b in range(len(liste_final)):
if b < (len(liste_final)/2) :
res += res + liste_final[b]
else :
res2 += res2 + liste_final[b]
if res > res2 :
classement_f[L[a][0].get_nom()] += 1
else :
classement_f[L[a][1].get_nom()] += 1
classement_f2[L[a][0].get_nom()] += res
classement_f2[L[a][1].get_nom()] += res2
print("score strategie", L[a][0].get_nom(),":",res)
print("score strategie", L[a][1].get_nom(),":",res2)
print("classement par victoire : ", [(i,classement_f[i]) for i in sorted(classement_f, key=classement_f.get, reverse=True)])
print("classement par score : ", [(i,classement_f2[i]) for i in sorted(classement_f2, key=classement_f2.get, reverse=True)])
return [(i,classement_f[i]) for i in sorted(classement_f, key=classement_f.get, reverse=True)], \
[(i,classement_f2[i]) for i in sorted(classement_f2, key=classement_f2.get, reverse=True)]
def battle_royal(iterations, posPlayers, list_goal, game, strat, affichage=True):
nbLignes = game.spriteBuilder.rowsize
nbColonnes = game.spriteBuilder.colsize
goalStates = [o.get_rowcol() for o in game.layers['ramassable']]
wallStates = [w.get_rowcol() for w in game.layers['obstacle']]
players = [o for o in game.layers['joueur']]
nbPlayers = len(posPlayers)
liste_gain = np.zeros(nbPlayers)
for j in range(iterations):
# ==== Initialisation des positions d'arriver
chemin = [None]*nbPlayers
for k in range(nbPlayers):
list_goal[k] = goalStates[strat[k].get_goal()]
chemin[k] = a_start(posPlayers[k], list_goal[k], nbLignes, nbColonnes, wallStates)
while (not fini(chemin)):
for i in range(len(chemin)):
if len(chemin[i]) == 0:
continue
next_row,next_col = chemin[i].pop(0)
players[i].set_rowcol(next_row,next_col)
game.mainiteration()
col=next_col
row=next_row
posPlayers[i] = (row,col)
if (row,col) == list_goal[i]:
game.mainiteration()
d = gain(posPlayers, liste_gain, goalStates)
Strat.repartition(d)
if affichage:
for k in range(len(liste_gain)) :
print("resultat joueur",k)
print("type de stratégie: ",strat[k].get_nom())
print("score :", liste_gain[k])
print("score moyen :", liste_gain[k]/iterations)
return liste_gain
|
import os
from behave import *
from pteromyini.core.core import Core
@given("skip regression")
def step_impl(context):
if 'regression' not in os.getenv('run_type', 'sanity'):
Core.skip_next_step()
|
import re
email = "shivani@datagrokr.com sam@company.com "
pattern = "\w+@(\w+).com"
ans = re.findall(pattern , email)
print(ans)
|
#!/usr/bin/env python3
import sys
from pprint import pprint
def load_list(load_element):
size = int(input())
res = []
for _ in range(size):
res.append(load_element())
return res
def save_list(l, save_element):
print(len(l))
for element in l:
save_element(element)
class Teren:
def __init__(self):
self.data = None
def load(self):
load_int = lambda: int(input())
load_list_of_int = lambda: load_list(load_int)
self.data = load_list(load_list_of_int)
def __str__(self):
save_list(self.data, lambda x: save_list(x, print))
class Mapa:
def __init__(self):
self.pocet_hracov = None
self.w = None
self.h = None
self.priblizny_teren = Teren()
def load(self):
self.pocet_hracov = int(input())
self.w = int(input())
self.h = int(input())
self.priblizny_teren.load()
def __str__(self):
print(self.pocet_hracov)
print(self.w)
print(self.h)
print(priblizny_teren)
mapa = Mapa()
mapa.load()
print('{} {}'.format(mapa.w, mapa.h), file=sys.stderr)
for l in mapa.priblizny_teren.data:
print(''.join(map(str,l)), file=sys.stderr)
|
#coding:utf-8
import dht_cs
import time
from define import THREAD_NUMBER, WORKING_TIME, BOOTSTRAP_NODES
if __name__ == '__main__':
thread_num = THREAD_NUMBER
working_time = WORKING_TIME
threads = []
for i in xrange(thread_num):
i += 8000
thread = dht_cs.DHT(host='0, 0, 0, 0', port=i)
thread.start()
i -= 8000
thread.bootstrap(BOOTSTRAP_NODES[i][0], BOOTSTRAP_NODES[i][1])
threads.append(thread)
time.sleep(WORKING_TIME)
for i in threads:
print 'stop thread'
i.stop()
i.join()
print 'finish thread'
|
print('AGAIN checking') |
__author__ = 'Aaron J Masino'
import pandas as pd
import numpy as np
def missing_percents(df):
d = {}
denom = float(len(df))
for c in df.columns:
d[c] = np.sum(pd.isnull(df[c]))/denom*100
return d
# TODO
# add an imputation method that generates a normal random sample based on the sample mean and stdev in the
# non-missing data |
import numpy as np
from grappa.utils import cartesian_product, sources_from_targets, eval_at_positions, number_geometries
def kernel_estimation(kspace, mask=None, af=4, ny=3, lamda=1e-6):
"""GRAPPA kernel estimation
Arguments:
- kspace (ndarray): the undersampled k-space, zero-filled. Its shape
must be ncoils x readout x phase.
"""
if mask is None:
raise ValueError('For now mask has to be passed for kernel estimation')
ac = _autocalibration_signal(kspace, mask, af)
n_geometries = number_geometries(mask)
ncoils = kspace.shape[0]
grappa_kernels = [
_geometry_kernel_estimation(ac, i_geom, ny, n_geometries, ncoils, lamda=lamda)
for i_geom in range(n_geometries)
]
return grappa_kernels
def _geometry_kernel_estimation(ac, i_geom, ny=3, n_geometries=4, ncoils=15, lamda=1e-6):
targets = cartesian_product(
# readout dimension
np.arange(ny // 2, ac.shape[1] - ny + (ny // 2) + 1),
# phase dimension
np.arange(i_geom + 1, ac.shape[2] - n_geometries + i_geom),
)
target_values = [
np.take(ac[c], np.ravel_multi_index(targets.T, ac[c].shape))
for c in range(ncoils)
]
target_values = np.array(target_values)
# to refactor: for a given target position we always know the source
# positions. This will be used for application.
sources = sources_from_targets(
targets,
i_geom,
n_geometries,
ny,
ncoils,
)
source_values = eval_at_positions(ac, sources)
source_values = np.array(source_values)
# taken from
# https://users.fmrib.ox.ac.uk/~mchiew/Teaching.html
regularizer = lamda*np.linalg.norm(source_values)*np.eye(source_values.shape[0])
regualrized_inverted_sources = np.linalg.pinv(source_values @ source_values.conj().T + regularizer)
grappa_kernel = target_values @ source_values.conj().T @ regualrized_inverted_sources
return grappa_kernel
def _autocalibration_signal(kspace, mask, af=4):
center_fraction = (32//af) / 100
num_low_freqs = int(np.round(mask.shape[-1] * center_fraction))
ac_center = mask.shape[-1] // 2
ac_slice = slice(ac_center - num_low_freqs//2, ac_center + num_low_freqs//2)
ac = kspace[..., ac_slice]
return ac
|
from random import randint
print('Jogo do PAR ou ÍMPAR!!!')
v=0
while True:
jogador = int(input('Faça sua jogada: '))
computador = randint(0, 10)
total = computador + jogador
tipo = ' '
while tipo not in 'PI':
tipo = str(input('Você quer par ou impar: ')).strip().upper()[0]
print(f'Você escolheu {jogador} e o computador {computador}. Total = {total}')
print('Deu par' if total %2 == 0 else 'Deu impar' )
if tipo == 'P':
if total%2 == 0:
print('Você venceu')
v+=1
else:
print('Você perdeu!')
break
elif tipo == 'I':
if total%2 == 1:
print('Você venceu!')
v+=1
else:
print('Você perdeu')
break
print('Vamos jogar novamente?')
print(f'GAME OVER, você venceu {v} vezes!')
|
# módulo destinado a importar los tests
import libreria as lib
import unittest
import os
import preprocesamiento
RESULT1 = {x for x in range(19)}
RESULT2 = {0, 1, 4, 6, 8, 9, 10, 11, 14, 15, 17, 18}
RESULT3 = {0, 1, 4, 6, 7, 8, 9, 10, 11, 14, 15, 17, 18}
RESULT4 = {0}
RESULT5 = {0, 1}
RESULT6 = ['Musical', 'Comedia', 'Ciencia ficción', 'Acción']
RESULT7 = ['Belén Saldías, Piratas del caribe, Matrix']
RESULT81 = ['Jaime Castro, Braveheart, Pulp fiction']
RESULT82 = ['Ignacio Hermosilla, Titanic, El exorcista',
'Fernando Pieressa, El exorcista']
RESULT91 = ['Hernán Valdivieso']
RESULT92 = ['Enzo Tamburini']
class TestearLibreria(unittest.TestCase):
"""As seen at: https://github.com/IIC2233/contenidos/blob/master/
semana-04/03-testing.ipynb"""
def setUp(self):
self.results = {"load_database": RESULT1,
"filter_by_date": RESULT2,
"popular_movies": RESULT3,
"best_comments": RESULT4,
"take_movie_while": RESULT5,
"popular_genre": RESULT6,
"popular_actors": RESULT7,
"highest_paid_actors1": RESULT81,
"highest_paid_actors2": RESULT82,
"successful_actors1": RESULT91,
"successful_actors2": RESULT92
}
def tearDown(self):
if os.path.isfile("clean_reviews.csv"):
os.remove("clean_reviews.csv")
def test_load_database(self):
result = lib.procesar_queries(["load_database"])
movies = {int(movie.id) for movie in result}
self.assertEqual(movies, self.results["load_database"])
def test_filter_by_date(self):
result = lib.procesar_queries(["filter_by_date", ["popular_movies",
["load_database"],
50, 100, "RT"],
2009])
movies = {int(movie.id) for movie in result}
self.assertEqual(movies, self.results["filter_by_date"])
def test_popular_movies(self):
result = lib.procesar_queries(["popular_movies", ["load_database"],
50, 100, "RT"])
movies = {int(movie.id) for movie in result}
self.assertEqual(movies, self.results["popular_movies"])
def test_best_comments(self):
preprocesamiento.reviews_writer("clean_reviews.csv",
"TestingDatabase/reviews.csv")
result = lib.procesar_queries(["best_comments", ["take_movie_while",
["load_database"],
"RT", ">", 30], 1])
movies = {int(movie.id) for movie in result}
self.assertEqual(movies, self.results["best_comments"])
def test_take_movie_while(self):
result = lib.procesar_queries(["take_movie_while", ["load_database"],
"date", "<", 2005])
movies = {int(movie.id) for movie in result}
self.assertEqual(movies, self.results["take_movie_while"])
def test_popular_genre(self):
result = lib.procesar_queries(["popular_genre", ["filter_by_date",
["popular_movies",
["load_database"],
50, 100, "RT"],
2000, False], "RT"])
self.assertIsInstance(result, list)
self.assertEqual(result, self.results["popular_genre"])
def test_popular_actors(self):
result = lib.procesar_queries(["popular_actors", ["load_database"], 1,
10, "RT"])
self.assertIsInstance(result, list)
self.assertEqual(result, self.results["popular_actors"])
def test_highest_paid_actors1(self):
result = lib.procesar_queries(["highest_paid_actors",
["filter_by_date", ["popular_movies",
["load_database"],
50, 100, "RT"],
2000]])
self.assertIsInstance(result, list)
self.assertEqual(result, self.results["highest_paid_actors1"])
def test_highest_paid_actors2(self):
result = lib.procesar_queries(["highest_paid_actors",
["filter_by_date", ["popular_movies",
["load_database"],
55, 95, "RT"],
2002], 2])
self.assertIsInstance(result, list)
self.assertEqual(result, self.results["highest_paid_actors2"])
def test_successfulll_actors1(self):
result = lib.procesar_queries(
["successful_actors", ["filter_by_date", ["popular_movies",
["load_database"], 70,
90, "RT"], 2010, True]])
self.assertIsInstance(result, list)
self.assertEqual(result, self.results["successful_actors1"])
def test_successfulll_actors2(self):
result = lib.procesar_queries(
["successful_actors", ["filter_by_date", ["popular_movies",
["load_database"], 60,
100, "RT"], 2001]])
self.assertIsInstance(result, list)
self.assertEqual(result, self.results["successful_actors2"])
suite = unittest.TestLoader().loadTestsFromTestCase(TestearLibreria)
unittest.TextTestRunner().run(suite)
|
from django import forms
from .models import User
class SignUpForm(forms.ModelForm):
class Meta:
model = User
fields = [
"login",
"email",
"password",
"first_name",
"last_name",
]
class SignInForm(forms.ModelForm):
class Meta:
model = User
fields = [
"login",
"password",
]
class EditProfileForm(forms.ModelForm):
class Meta:
model = User
fields = [
"first_name",
"last_name",
"profile_photo",
] |
from typing import Dict, Union
MovieMapping = Dict[int, Dict[str, Union[str, int]]]
|
import numpy as np
import matplotlib.pyplot as plt
import datetime, os, csv
g = 9.81 # Gravity
rho = 1.2 # Density of air at sea level
coeffDrag = 0.5 # Drag Coefficient
initialVelocity = 10 # m/s
initialAngle = np.radians(30) # degrees
time = 40 # s
tstep = 0.01 # time step
# M80 FMJ Ball
r = 0.00381 # m Bullet radius (7.62x51 mm NATO) 0.3085in
ogiveLength = 0.0194818 # m 0.767in
ogiveRadius = 0.0762 # m 3.00in
meplatDiameter = 0.001524 # m 0.060in
boatTailLength = 0.004445 # m .175in
barrelTwistRate = 12 # Inches/rotation
boatTailAngle = np.radians(9.0) # deg
oal = 0.028956 # m 1.140in
bulletMass = 9.52544 # kg 9.52544g = 147gr
# Assume homogenous density and approximate mass distribution with cones and cylinders
areaSection = np.pi*r**2
BaseRadius = r - boatTailLength/np.tan(boatTailAngle)
volumeBaseConeSmall = np.pi*BaseRadius**3*np.tan(boatTailAngle)/3
volumeBaseConeBig = np.pi*r**3*np.tan(boatTailAngle)/3
volumeOgive = np.pi*r**2*ogiveLength/3
volumeBoatTail = (np.pi*np.tan(boatTailAngle)/3)*(r**3 - BaseRadius**3)
volumeBearing = np.pi*r**2*(oal - boatTailLength - ogiveLength)
volumeTotal = volumeOgive + volumeBoatTail + volumeBearing
density = bulletMass/volumeTotal
massOgive = volumeOgive * density
massBoatTail = volumeBoatTail * density
massBearing = volumeBearing * density
massBaseConeSmall = volumeBaseConeSmall*density
massBaseConeBig = volumeBaseConeBig*density
IogiveInline = 3*massOgive*r**2/10
IbtConeSmallInline = 3*massBaseConeSmall*BaseRadius**2/10
IbtConeBigInline = 3*massBaseConeBig*r**2/10
IboatTailInline = IbtConeBigInline - IbtConeSmallInline
IbearingInline = massBearing*r**2/2
Inertia_Inline = IogiveInline + IboatTailInline + IbearingInline
IbearingOffAxis = massBearing*(r**2/4 + ((oal - ogiveLength - boatTailLength)**2)/12)
IbtConeSmallOffAxis = massBaseConeSmall*((BaseRadius*np.tan(boatTailAngle))**2/10 + 3*BaseRadius**2/20)
IbtConeBigOffAxis = massBaseConeBig*((r*np.tan(boatTailAngle))**2/10 + 3*r**2/20)
InertiaBoatTailOffAxis = IbtConeBigOffAxis - IbtConeSmallOffAxis
InertiaOgiveOffAxis = massOgive*(ogiveLength**2/10 + 3*r**2/20)
Inertia_OffAxis = IbearingOffAxis + InertiaBoatTailOffAxis + InertiaOgiveOffAxis
initialSpin = (12/barrelTwistRate)*(initialVelocity*(2*np.pi*3.28084))
def get_filename(prefix, suffix, base_path):
'''
Gets a unique file name in the base path.
Appends date and time information to file name and adds a number
if the file name is stil not unique.
prefix = Homework assignment name
suffix = Extension
base_path = Location of log file
'''
# Set base filename for compare
fileNameBase = base_path + prefix + "_" + datetime.datetime.now().strftime("%y%m%d_%H%M%S")
# Set base for numbering system if filename exists
num = 1
# Generate complete filename to check existence
fileName = fileNameBase + suffix
# Find a unique filename
while os.path.isfile(fileName):
# if the filename is not unique, add a number to the end of it
fileName = fileNameBase + "_" + str(num) + suffix
# increments the number in case the filename is still not unique
num = num + 1
return fileName
def ode45py(func, x, y, st_sz=1.0e-4, tol=1.0e-6, iter_lim=50000):
'''
Numerical Methods: Differential Equations, Initial Value Problems
4th-order / 5th-order Runge-Kutta Method
Includes adaptive step size adjustment
Imitates MATLAB ode45 functionality and output
'''
# Dormand-Prince coefficients for RK algorithm -
a1 = 0.2; a2 = 0.3; a3 = 0.8; a4 = 8/9; a5 = 1.0; a6 = 1.0
c0 = 35/384; c2 = 500/1113; c3 = 125/192; c4 = -2187/6784; c5=11/84
d0 = 5179/57600; d2 = 7571/16695; d3 = 393/640; d4 = -92097/339200; d5 = 187/2100; d6 = 1/40
b10 = 0.2
b20 = 0.075; b21 = 0.225
b30 = 44/45; b31 = -56/15; b32 = 32/9
b40 = 19372/6561; b41 = -25360/2187; b42 = 64448/6561; b43 = -212/729
b50 = 9017/3168; b51 = -355/33; b52 = 46732/5247; b53 = 49/176; b54 = -5103/18656
b60 = 35/384; b62 = 500/1113; b63 = 125/192; b64 = -2187/6784; b65 = 11/84
# Store initial values
x_f = x[-1]
x_n = x[0]
# y_n = y
# Initialize variables
X = []
Y = []
# Add the first set of known conditions
X.append(x_n)
Y.append(y)
# Set up to break the for loop at the end
stopper = 0 # Integration stopper, 0 = off, 1 = on
# Initialize a k0 to start with the step size
k0 = st_sz * func(x_n, y)
# Generate the RK coefficients
for i in range(iter_lim):
k1 = st_sz * func(x_n + a1*st_sz, y + b10*k0)
k2 = st_sz * func(x_n + a2*st_sz, y + b20*k0 + b21*k1)
k3 = st_sz * func(x_n + a3*st_sz, y + b30*k0 + b31*k1 + b32*k2)
k4 = st_sz * func(x_n + a4*st_sz, y + b40*k0 + b41*k1 + b42*k2 + b43*k3)
k5 = st_sz * func(x_n + a5*st_sz, y + b50*k0 + b51*k1 + b52*k2 + b53*k3 + b54*k4)
k6 = st_sz * func(x_n + a6*st_sz, y + b60*k0 + b62*k2 + b63*k3 + b64*k4 + b65*k5)
# Getting to the slope is the whole point of this mess
dy = c0*k0 + c2*k2 + c3*k3 + c4*k4 + c5*k5
# Determine the estimated change in slope by comparing the output coefficients for each RK coefficient
E = (c0 - d0)*k0 + (c2 - d2)*k2 + (c3 - d3)*k3 + (c4 - d4)*k4 + (c5 - d5)*k5 - d6*k6
# Find the estimated error using a sum of squares method
e = np.sqrt(np.sum(E**2)/len(y))
# we don't know if the new value i
hNext = 0.9*st_sz*(tol/e)**0.2
pcnt = (i/iter_lim)*100
psolv = (x_n/x_f)*100
print('Correction limit : {:1.2f}# x-domain solved: {:1.2f}#'.format(pcnt, psolv))
# If approximated error is within tolerance, accept this integration step and move on
if e <= tol:
# Store the new result
i = i-1
y = y + dy
# Increment the x-value by the new step size
x_n = x_n + st_sz
# Add the new values into the output vector
X.append(x_n)
Y.append(y)
# Check to break the loop when we have reached the desired x-value
if stopper == 1: break # Reached end of x-range
# Set limits on how much the next step size can increase to avoid missing data points
if abs(hNext) > 10*abs(st_sz): hNext = 10*st_sz
# Determine if the algorithm has reached the end of the dataset
if (st_sz > 0.0) == ((x_n + hNext) >= x_f):
hNext = x_f - x_n
# Sets the break condition for the next loop iteration
stopper = 1
print('Success! Reached the end of the data set.')
# Setting k0 to k6 * (next step size) / (current step size) forces the algorithm to use the 4th order formula for the next step
k0 = k6*hNext/st_sz
else:
# The error estimate is outside the required threshold to move on, we need to redo the calculation with a smaller step size
if abs(hNext) < abs(st_sz)*0.1 : hNext = st_sz*0.1
# Set up k0 to go through the 5th order RK method on the next iteration because the error was no good.
k0 = k0*hNext/st_sz
# Set the next iteration step size
st_sz = hNext
# Returns the arrays for x and y values
return np.array(X), np.array(Y)
def bulletSim():
def fcn(t, x):
# Instantiate an array of zeros to hold the array elements
fcn = np.zeros(8)
fcn[0] = x[4] # Velocity
fcn[1] = x[5] # UBwx
fcn[2] = x[6] # UBwy
fcn[3] = x[7] # UBwz
fcn[4] = -g*np.sin(initialAngle) - coeffDrag*x[4]/bulletMass
fcn[5] = -coeffDrag*x[5]/Inertia_Inline
fcn[6] = (1-Inertia_Inline/Inertia_OffAxis)*x[5]*x[7]
fcn[7] = (-1*Inertia_Inline/Inertia_OffAxis)*x[5]*x[6]
return fcn
# Array for start and ending times
t = np.array([0, 100])
# Array holding the initial values (velocity and displacement) for each of the jumpers (everything starts at zero)
x = np.array([0]*8)
x[0] = 0 # Q1
x[1] = 0 # rad QBwx
x[2] = 0 # rad QBwy
x[3] = initialAngle # rad QBwz
x[4] = 860 # m/s U1
x[5] = 1.7772815 # rad/s UBwx
x[6] = 0.01 # rad/s UBwy
x[7] = 0.01
# Feed ode45py almost exactly like you would in MATLAB
X, Y = ode45py(fcn, t, x, iter_lim=2000000)
dataFile = get_filename('trajectoryData','.csv','./log_files/')
heading = ['time(s)', 'Q1', 'QBwx', 'QBwy', 'QBwz', 'U1', 'UBwx', 'UBwy', 'UBwz']
with open(dataFile, 'a', newline='\n') as myFile:
writer = csv.writer(myFile)
writer.writerow(heading)
for key, val in enumerate(X):
dataOut = []
dataOut.append(val)
for i in Y[key]:
dataOut.append(i)
writer.writerow(dataOut)
plotFile1 = get_filename('timeAndDisplacement','.png','./log_files/')
# Displacement data is stored in the first three columns
plt.plot(X, Y[:,0])
# plt.plot(X, Y[:,1])
# plt.plot(X, Y[:,2])
# plt.plot(X, Y[:,3])
plt.xlabel('Time (s)')
plt.ylabel('Displacement (m)')
plt.title('Displacement Data')
plt.savefig(plotFile1, bbox_inches='tight')
plt.show()
plotFile2 = get_filename('velocityAndTime','.png','./log_files/')
plt.plot(X, Y[:,4])
# plt.plot(X, Y[:,5])
# plt.plot(X, Y[:,6])
# plt.plot(X, Y[:,7])
plt.xlabel('Time (s)')
plt.ylabel('Velocity (m/s)')
plt.title('Velocity Data')
plt.savefig(plotFile2, bbox_inches='tight')
plt.show()
plotFile3 = get_filename('trajectory','.png','./log_files/')
pos_x = []
pos_y = []
for i in Y[:,0]:
pos_x.append(i*np.cos(initialAngle))
pos_y.append(i*np.sin(initialAngle))
plt.plot(pos_x, pos_y)
plt.xlabel('Displacement X (m)')
plt.ylabel('Displacement Y (m)')
plt.title('Trajectory Data')
plt.savefig(plotFile3, bbox_inches='tight')
plt.show()
bulletSim() |
from random import randint
print('It\'s quizzing time!!!')
while True:
num1 = randint(1, 10)
num2 = randint(1, 10)
summ_num = num1 + num2
quiz_quess = int(input(f'So today\'s question is...\nGive answer for this mathematical expression: {num1} + {num2}\nYour answer: '))
if quiz_quess == summ_num:
print('Yep, that\'s the right one!) Congratulation!')
break
else:
print(f'You were so close(\nRight answer was {summ_num}\nMaybe you\'ll get lucky another time?') |
TIP_RATE = 0.15
SALES_TAX_RATE = 0.095
cost = float(input("Enter meal cost please:"))
tip = cost * TIP_RATE
tax = cost * SALES_TAX_RATE
total = cost + tip + tax
print("tip:",round( tip, 2))
print("tax:",round( tax, 2))
print("total:",round( total, 2))
TIP_RATE = 0.18
SALES_TAX_RATE = 0.095
cost = float(input("Enter meal cost please:"))
tip = cost * TIP_RATE
tax = cost * SALES_TAX_RATE
total = cost + tip + tax
print("tip:",round( tip, 2))
print("tax:",round( tax, 2))
print("total:",round( total, 2))
TIP_RATE = 0.20
SALES_TAX_RATE = 0.095
cost = float(input("Enter meal cost please:"))
tip = cost * TIP_RATE
tax = cost * SALES_TAX_RATE
total = cost + tip + tax
print("tip:",round( tip, 2))
print("tax:",round( tax, 2))
print("total:",round( total, 2)) |
FACEBOOK_APP_ID = ""
FACEBOOK_APP_SECRET = ""
import os
from google.appengine.ext.webapp import template
import json
import time
#verify user email
from random import randint
import urllib
import urllib2
from google.appengine.ext import db
from google.appengine.api import users,mail
import webapp2
from webapp2_extras import sessions
#Facebook
import facebook
# files from the project
from models import Member,Country,Entity,Committee,Experience,Role,Year
from utilities import utility,Message
class BaseHandler(webapp2.RequestHandler):
message = Message()
def dispatch(self):
# Get a session store for this request.
self.session_store = sessions.get_store(request=self.request)
try:
# Dispatch the request.
webapp2.RequestHandler.dispatch(self)
finally:
# Save all sessions.
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def session(self):
# Returns a session using the default cookie key.
return self.session_store.get_session()
def show_error(self,message):
path = os.path.join(os.path.dirname(__file__),'error.html')
self.response.out.write(template.render(path,{'error':message}))
def show_message(self,message):
path = os.path.join(os.path.dirname(__file__),'message.html')
self.response.out.write(template.render(path,{'message':message}))
def render(self,page,param):
path = os.path.join(os.path.dirname(__file__),page + '.html')
return template.render(path,param)
def show_page(self,page,param=None):
if param:
path = os.path.join(os.path.dirname(__file__),page + '.html')
self.response.out.write(template.render(path,param))
else:
path = os.path.join(os.path.dirname(__file__),page + '.html')
self.response.out.write(template.render(path,self.render_parameters()))
def current_member(self):
member_id = self.session.get('member_id')
member = Member.gql("WHERE id = :member_id",member_id= member_id)
return member.get()
def render_parameters(self):
member = self.current_member()
render_paramaters={}
render_paramaters['member'] = member
if self.message.content:
render_paramaters['message'] = self.message
return render_paramaters
def url_request(self, url, params = {}, method="POST"):
if method == "GET":
return urllib2.Request(url, data=urllib.urlencode(params))
else:
return urllib2.Request(url + "?" + urllib.urlencode(params))
def show_entity(self,entity,message=None): # Shows the Committee active profile page.
render_parameters = self.render_parameters()
render_parameters["entity"] = entity
# Find the active committee profile which is shown by default
active_committee = Committee.all().get() #entity.get_active_committee()
if active_committee:
# Extract Roles and send them to the form
render_parameters["committee"] = active_committee
#check if member is already in this profile
render_parameters["is_member"] = active_committee.is_member(self.current_member())
# Send current members list
render_parameters['committee_member_list'] = active_committee.get_members()
self.show_page('entity', render_parameters)
class MainHandler(BaseHandler):
def get(self):
self.redirect("https://apps.facebook.com/iamanaiesecer")
def post(self):
signed_request = utility.parse_signed_request(self.request.get('signed_request'),FACEBOOK_APP_SECRET)
if signed_request:
if signed_request.has_key('user_id'):
# if user is Authorized and has user id, then capture new information
# and update if they exist if not put them into datastore.
self.session['oauth_token'] = signed_request['oauth_token']
self.session['member_id'] = signed_request['user_id']
member_id = signed_request['user_id']
member = Member.gql("WHERE id = :member_id",member_id= member_id).get()
render_parameters = {}
# if not member add member.
graph = facebook.GraphAPI(signed_request['oauth_token'])
me = graph.get_object("me")
member = self.current_member()
if not member :
member = Member(key_name= me['id'])
member.id = me['id']
member.username = me['username']
member.first_name = me['first_name']
member.last_name = me['last_name']
member.name = me['name']
member.email = db.Email(me['email']) # It might be null if user did not provide an email
member.country = Country.country_name(signed_request['user']['country'])
member.put()
if member.aiesec_email: #Already an AIESECer!
#Get current member and see if she already has a home_entity
render_parameters = {'member':member}
if member.home_committee: # Member already has a home_entity
param = self.render_parameters()
param["body_function"] = "committee_body('{0}');".format(member.home_committee.key())
self.show_page("base", param)
else: # Member is not in a commmittee
param = self.render_parameters()
param["body_function"] = "entity_welcome_body();"
self.show_page("base", param)
else: #Member is not verified to be an AIESECer yet
# 1 - First method, to see if user email is an aiesec.net email
if(member.email.find("@aiesec.net")>-1):
render_parameters['aiesec_email_found'] = member.email
# 2 - Second method of getting aiesec email, user authorize the app to access
# if user is already signed in then get his email
if users.get_current_user():
current_user = users.get_current_user()
url_linktext = ''
current_email = str(current_user.email())
if current_email.find("@aiesec.net")>-1:
#keep the email in a login_aiesec_email and pass to form
render_parameters['login_aiesec_email'] = current_email
else:
#Ask for logout
url_linktext = 'Logout from ' + current_email
url = users.create_logout_url(self.request.uri)
render_parameters['url'] = url
render_parameters['url_linktext'] = url_linktext
else:
url = users.create_login_url(self.request.uri)
render_parameters['url'] = url
url_linktext = 'Login to aiesec.net account'
render_parameters['url_linktext'] = url_linktext
# 3 - Third method: Sending verification code
if member.temp_email_code:
if member.temp_email_code.find("@aiesec.net"):
render_parameters['code_sent_email'] = member.temp_email_code.split(';')[0]
render_parameters['member'] = member
self.show_page(r"register/welcome", render_parameters)
else:
#if user is not signed in the application he will be prompted to sign in
self.show_page(r'register/login', {})
class RegisterGoogleLogin(BaseHandler):
def get(self):
#Second verification method, Login to aiesec.net account
render_parameters = {}
render_parameters['google_login_url'] = users.create_login_url(self.request.uri)
self.redirect(users.create_login_url(self.request.uri))
def post(self):
#Second verification method, Login to aiesec.net account
render_parameters = {}
render_parameters['google_login_url'] = users.create_login_url(self.request.uri)
self.redirect(users.create_login_url(self.request.uri))
class EntityHandler(BaseHandler):
def post(self):
if self.request.get('entity_key'):
member=self.current_member()
entity = db.get(self.request.get('entity_key'))
member.home_entity = entity
member.put()
self.show_entity(entity)
else:
self.show_page("start_over")
class EntityCreateCommittee(BaseHandler):
def post(self):
if self.request.get('committee_name'):
committee_name = self.request.get('committee_name')
entity = db.get(self.request.get('entity_key'))
committee = Committee(parent=entity,key_name= (entity.name + " " + committee_name))
committee.name = committee_name
committee.is_current = True
committee.entiti = entity
committee.put()
self.show_entity(entity)
else:
self.show_page("start_over")
class CommitteeExperience(BaseHandler):
def post(self):
if self.request.get('committee_key'):
committee = db.get(self.request.get('committee_key'))
if self.request.get('action') == "join":
xp = Experience()
xp.member = self.current_member()
xp.committee = committee
xp.put()
else:
if self.request.get('action') == "leave":
member = self.current_member()
xp= Experience.gql("where member = :member_key and committee = :committee_key",
member_key = member.key(),committee_key = committee.key()).get()
xp.delete()
#self.show_entity(committee.entity)
self.show_page("start_over")
class RPCHandler(BaseHandler):
def member_body(self,*args):
param = self.render_parameters()
member_key = args[0]
member = db.get(member_key)
param["member"] = member
#param["experience_part"] = "experience_part('{0}');".format(member_key)
return self.render("member_body", param)
def committee_experience_approve(self,*args):
experience_key = args[0]
experience = db.get(experience_key)
member_id = experience.member.id
if args[1]:
experience.approved = True
experience.put()
else:
experience.delete()
return member_id
def committee_experience_leave(self,*args):
committee_key = args[0]
committee = db.get(committee_key)
member = self.current_member()
experience = Experience.get_experience(member, committee)
if experience:
experience.delete()
time.sleep(2)
return self.committee_experience_part(*args)
def committee_experience_join(self,*args):
committee_key = args[0]
committee = db.get(committee_key)
role_key = args[1]
role = db.get(role_key)
department = args[2]
member = self.current_member()
experience = Experience.get_experience(member, committee) or Experience(key_name= "{0} {1}".format(committee.name(),member.id))
experience.committee = committee.key()
experience.role = role.key()
experience.approved = False
experience.department = department
experience.member = member.key()
experience.put()
time.sleep(2)
return self.committee_experience_part(*args)
def committee_experience_part(self,*args):
committee_key = args[0]
committee = db.get(committee_key)
member = self.current_member()
experience = Experience.get_experience(member, committee)
param = self.render_parameters()
param["committee"] = committee
param["role_list"] = Role.all()
param["experience"] = experience
if experience and experience.approved:
param["pending_list"] = experience.pending_list()
return self.render("committee_experience_part", param)
def committee_set_home(self,*args):
committee_key = args[0]
committee = db.get(committee_key)
member = self.current_member()
member.home_committee = committee.key()
member.put()
return "Done!" #.format(committee.name())
def committee_body(self,*args):
param = self.render_parameters()
committee_key = args[0]
committee = db.get(committee_key)
param["committee"] = committee
param["experience_function"] = "committee_experience_part('{0}');".format(committee_key)
return self.render("committee_body", param)
def entity_browser_committee_create_part(self,*args):
entity_key = args[0]
year_key = args[1]
entity = db.get(entity_key)
year = db.get(year_key)
committee = Committee(parent = entity, key_name = Committee.create_key(entity, year))
committee.year = year.key()
committee.entiti = entity.key()
committee.put()
return self.entity_browser_committee_part(*args)
def entity_browser_committee_part(self,*args):
entity_key = args[0]
entity = db.get(entity_key)
committee_result = Committee.gql("where entiti=:entity_key",entity_key = entity.key())
param = self.render_parameters()
committee = None
committee_list = []
for cmte in committee_result:
if cmte.year.is_current:
committee = cmte
param["president"] = committee.president()
else:
committee_list.append(cmte)
param["committee_list"] = committee_list
param["committee"] = committee
param["entity"] = entity
year = Year.all()
if committee:
year.filter("__key__ !=", committee.year.key())
for cmte in committee_list:
year.filter("__key__ !=", cmte.year.key())
if year.count > 0:
year_list = []
for y in year:
year_list.append(y)
param["year_list"] = year_list
#Temporary
return self.render("entity_browser_committee_part", param)
def entity_browser_entity_part(self,*args):
country_name = args[0]
country = Country.get_by_name(country_name)
entity_result = Entity.gql("where country=:country_key",country_key = country.key())
param = self.render_parameters()
entity_lc = []
entity_mc = None
entity_ai = None
for entity in entity_result:
if entity.type.name == "MEMBER COMMITTEE":
entity_mc = entity
elif entity.type.name == "AIESEC INTERNATIONAL":
entity_ai = entity
else:
entity_lc.append(entity)
param["entity_lc_list"] = entity_lc
param["entity_mc"] = entity_mc
param["entity_ai"] = entity_ai
param["flag_url"] = country.get_flag_url()
return self.render("entity_browser_entity_part", param)
def entity_welcome_body(self,*args):
param = self.render_parameters()
return self.render("entity_welcome_body", param)
def entity_browser_body(self,*args):
country_key = args[0]
entity_list = None
if country_key:
entity_list = Entity.gql("where country=:country_key",country_key = country_key)
else:
member = self.current_member()
country = Country.gql("WHERE name = :country_name",country_name= member.country).get()
entity_list = Entity.gql("where country=:country_key",country_key = country.key())
param = self.render_parameters()
param["entity_list"] = entity_list
param["country_list"] = Country.all()
return self.render("entity_browser_body", param)
def register_code_verify(self,*args):
render_parameters = self.render_parameters()
member = self.current_member()
code_entered = args[0]
sent_code = member.temp_email_code.split(';')[1]
aiesec_email = member.temp_email_code.split(';')[0]
render_parameters['aiesec_email'] = aiesec_email
if sent_code == code_entered:
member.aiesec_email = aiesec_email
member.save()
render_parameters['member'] = member
return self.render('entity_welcome_body',render_parameters)
else:
render_parameters['wrong_code'] = code_entered
return self.render(r'register/code_sent',render_parameters)
def verified(self, *args):
#find the user
member = self.current_member()
render_parameters = self.render_parameters()
member.aiesec_email = args[0]
member.put()
render_parameters["member"] = member
return self.render("entity_welcome_body", render_parameters)
def show_page(self, *args):
#find the user
member = self.current_member()
render_parameters = self.render_parameters()
page = args[0]
render_parameters["member"] = member
return self.render(page, render_parameters)
def register_code_sent(self,*args):
if args[0]:
aiesec_email = args[0]
random_number = randint(10000,99999)
member = self.current_member()
member.temp_email_code = aiesec_email + ";" + str(random_number)
message = mail.EmailMessage(sender="I am an AIESECer <IamanAIESECer.Facebook@gmail.com>",
subject="Verification code")
message.to = "{0} <{1}>".format(member.name, aiesec_email)
message.body = """
Dear {0}:
This is the verification code for your account:
{1}
Please enter this code in the application to verify your account.
I am an AIESECer Team
""".format(member.first_name,random_number)
message.send()
member.save()
return self.render(r'register/code_sent',{'aiesec_email':aiesec_email})
else:
return self.render(r'register/code_request',{'message':'Invalid email.'})
def post(self):
args = json.loads(self.request.body)
func, args = args[0], args[1:]
if func[0] == '_':
self.error(403) # access denied
return
func = getattr(self, func, None)
if not func:
self.error(404) # file not found
return
result = func(*args)
self.response.out.write(json.dumps(result))
def get(self):
country_name = self.request.get('country_name')
country = Country.get_by_name(country_name)
if country:
self.response.out.write(self.entity_browser_entity_part(country.key()))
else:
self.response.out.write(json.dumps("Country not found."))
config = {}
config['webapp2_extras.sessions'] = {'secret_key': 'dellnya',}
app = webapp2.WSGIApplication([
('/', MainHandler),
('/rpc', RPCHandler),
('/committee_change_membership_status/', CommitteeExperience),
('/entity/', EntityHandler),
('/entity_create_committee/', EntityCreateCommittee),
('/register_google_login/',RegisterGoogleLogin)
] ,config=config ,debug=True)
def main():
app.run()
if __name__ == "__main__":
main()
|
from django.db import models
from django.contrib.auth.models import User
from address.models import Address
from law_firm.models import LawFirm, LawFirmRates
from investigator.models import Investigator, InvestigatorRates
from constance import config
class Broker(models.Model):
user = models.OneToOneField(User)
phone_number_one = models.CharField(max_length=20)
phone_number_two = models.CharField(max_length=20, blank=True, null=True)
email_one = models.EmailField()
email_two = models.EmailField(blank=True, null=True)
address = models.ForeignKey(Address)
more_info = models.TextField()
photograph = models.ImageField(blank=True, null=True, upload_to='broker-photos/')
is_active = models.BooleanField(default=True)
#meta
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class BrokerLawFirmLink(models.Model):
broker = models.ForeignKey(Broker)
law_firm = models.ForeignKey(LawFirm)
law_firm_rates = models.ForeignKey(LawFirmRates)
class BrokerInvestigatorLink(models.Model):
broker = models.ForeignKey(Broker)
investigator = models.ForeignKey(Investigator)
investigator_rates = models.ForeignKey(InvestigatorRates)
|
#!/usr/bin/python
from Player import *
from Board import *
from Card import *
from Unit import *
from Effect import *
from Const import *
import math
import logging
BOARD_LENGTH = 19
BOARD_WIDTH = 5
DRAW_FREQUENCY = 2
UPKEEP_GOLD = 2
MAX_HAND_SIZE = 5
class Game:
def __init__(self):
self.players = {}
self.board = None
self.setup_phase()
self.turn = 0
self.turn_advantage = 0
def reset(self):
self.board.clear()
def setup_phase(self):
# Set up board
self.board = Board(self, field_length=BOARD_LENGTH, field_width=BOARD_WIDTH)
# Instantiate Players
self.players[0] = Player(0)
self.players[1] = Player(1)
self.players[1].set_direction(Const.FACING_LEFT)
# Initial Decks
self.players[0].set_deck([
Card.get_card('Rifleman'),
Card.get_card('Footman'),
Card.get_card('Footman'),
Card.get_card('Peasant'),
Card.get_card('Peasant'),
])
self.players[0].set_hand([
Card.get_card('Footman'),
Card.get_card('Rifleman'),
Card.get_card('Scout'),
Card.get_card('Peasant'),
Card.get_card('Peasant'),
])
self.players[1].set_deck([
Card.get_card('Rifleman'),
Card.get_card('Footman'),
Card.get_card('Footman'),
Card.get_card('Peasant'),
Card.get_card('Peasant'),
])
self.players[1].set_hand([
Card.get_card('Footman'),
Card.get_card('Rifleman'),
Card.get_card('Scout'),
Card.get_card('Peasant'),
Card.get_card('Peasant'),
])
# Initial Heroes
middle = (BOARD_WIDTH - 1) / 2
self.put_in_play(Card.get_card('Arius'), 0, (-1, middle))
self.put_in_play(Card.get_card('Arius'), 1, (BOARD_LENGTH - 2, middle))
def main_loop(self):
# Main Loop
while (True):
winner = self.main_loop_once()
if winner is not None:
return winner
def main_loop_once(self):
self.pre_main_phases()
gameover = self.post_main_phases()
if gameover is not None:
return gameover
def pre_main_phases(self):
logging.info("********************")
logging.info("Beginning of Turn {0}".format(self.turn))
self.upkeep_phase()
self.cleanup_phase()
self.draw_phase()
def post_main_phases(self):
self.spell_phase()
self.damage_phase()
self.cleanup_phase()
self.move_phase()
self.money_phase()
result = self.cleanup_phase()
if result is not None:
return result
logging.info("End of Turn {0}".format(self.turn))
logging.info("********************")
self.increment_turn()
def increment_turn(self):
self.turn += 1
def upkeep_phase(self):
self.turn_advantage = self.calculate_advantage()
logging.info("Player {0} has advantage for this turn".format(self.turn_advantage))
# Refresh must occur first
self.board.refresh_units()
# Let modifiers do updates. This must occur before effects, as a
# effects may apply modifiers.
for location, unit in self.board.grid.iteritems():
for modifier in unit.modifiers:
modifier.upkeepLogic(self.board)
for id, player in self.players.iteritems():
for modifier in player.modifiers:
modifier.upkeepLogic(self.board)
for spell in self.board.spells[id]:
if spell is not None:
for modifier in spell.modifiers:
modifier.upkeepLogic(self.board)
for building in self.board.buildings[id]:
if building is not None:
for modifier in building.modifiers:
modifier.upkeepLogic(self.board)
for id, player in self.players.iteritems():
player.gold += UPKEEP_GOLD
self.apply_phase_effects()
def draw_phase(self):
if self.turn % DRAW_FREQUENCY == 0:
for id, player in self.players.iteritems():
self.players[id].draw()
def damage_phase(self):
first = self.get_turn_advantage()
second = (first + 1) % 2
self.board.do_combat(self.players[first], self.players[second])
# Reduce the health of every unit that is out of will by 1/3
for location, unit in self.board.grid.iteritems():
if unit.get_curr_ammo() <= 0:
unit._hp -= int(math.ceil(float(unit._max_hp) / 3.0))
def move_phase(self):
first = self.get_turn_advantage()
second = (first + 1) % 2
self.board.do_movements(self.players[first], self.players[second])
def money_phase(self):
"""
Workers get money from the sector they end up in, but should only be
paid for a sector once per life.
"""
for location, unit in self.board.grid.iteritems():
if isinstance(unit, Worker):
sector = self.board.get_sector_for_position(location)
# Flip sector for p1
if unit.owner_id == 1:
sector = len(self.board.SECTOR_COLS) - 1 - sector
if sector not in unit.visited_sectors:
payout = unit.payout[len(unit.visited_sectors)]
players[unit.owner_id].gold += payout
logging.info("{0} gold gained for sector {1}".format(payout, sector))
unit.visited_sectors.append(sector)
def spell_phase(self):
# Spell logic
first = self.get_turn_advantage()
second = (first + 1) % 2
self.board.process_spells(self.players[first], self.players[second])
self.board.process_spells(self.players[second], self.players[first])
def cleanup_phase(self):
logging.info("Start of cleanup phase")
num_removed = True
while (num_removed):
"""
We run cleanup whenever shit may be needed to be removed from
the board. Cleanup will go through every modifier and check if it
needs to go, then check if any units need to go. It'll keep doing
this until no units die (meaning no modifiers will change, meaning
we've reached a stable state)
"""
for location, unit in self.board.grid.iteritems():
for modifier in unit.modifiers:
modifier.cleanupLogic(self.board)
for id, player in self.players.iteritems():
for modifier in player.modifiers:
modifier.cleanupLogic(self.board)
for spell in self.board.spells[id]:
if spell is not None:
for modifier in spell.modifiers:
modifier.cleanupLogic(self.board)
for building in self.board.buildings[id]:
if building is not None:
for modifier in building.modifiers:
modifier.cleanupLogic(self.board)
num_removed = self.board.remove_dead()
# Calculate Gameover
is_tie = True
for id, player in self.players.iteritems():
if player.get_curr_health() > 0:
is_tie = False
for id, player in self.players.iteritems():
if player.get_curr_health() <= 0:
return 'Tie' if is_tie else (id + 1) % 2
return None
def get_turn_advantage(self):
return self.turn_advantage
def calculate_advantage(self):
return self.board.get_next_turn_advantage()
def play_unit(self, card_name, id, position):
"""plays a card for player id from his hand at position (u,v)"""
if not self.board.is_playable(self.players[id], position):
logging.debug("{0} not playable at {1}".format(card_name, position))
return False
card = self.players[id].play(card_name)
if card == None:
return False
unit = self.board.place_unit(card, self.players[id], position)
if unit.play_effect != None:
Effect.applyEffect(
unit.play_effect,
self.players[id],
self.players[(id + 1) % 2],
unit,
self.board,
unit.play_effect_args
)
return True
def play_spell(self, spell_name, id, slot):
""" Plays a spell at a given position (0-4 inclusive) for id"""
if (self.board.spells[id][slot]):
logging.debug("{0} not playable at {1}".format(spell_name, slot))
return False
card = self.players[id].play(spell_name)
if card == None:
return False
self.board.place_spell(card, self.players[id], slot)
return True
def play_building(self, building_name, id, slot):
""" Plays a buidling at a given position (0-4 inclusive) for id"""
if (self.board.buildings[id][slot]):
logging.debug("{0} not playable at {1}".format(building_name, slot))
return False
card = self.players[id].play(building_name)
if card == None:
return False
self.board.place_building(card, self.players[id], slot)
return True
def put_in_play(self, card, id, position):
""" puts a unit into play without paying the cost """
self.board.is_playable(self.players[id], position)
self.players[id].inplay.append(card)
self.board.grid[(position)] = Unit.get_unit(card, self.players[id])
def apply_phase_effects(self):
"""
Go through each building and apply its Effect
TODO: Should go through everything on the board and apply effects.
TODO: Make this phase independent
"""
first = self.get_turn_advantage()
second = (first + 1) % 2
self.apply_phase_effects_for_player(first, second)
self.apply_phase_effects_for_player(second, first)
def apply_phase_effects_for_player(self, player_id, opponent_id):
for object in self.board.get_everything():
if object and object.upkeep_effect and \
object.owner_id == player_id:
Effect.applyEffect(
object.upkeep_effect,
self.players[player_id],
self.players[opponent_id],
object,
self.board,
object.upkeep_effect_args
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import json
def get_configs():
f = os.path.join(sys.path[0], 'oauth2py.json')
if os.path.isfile(f):
try:
with open(f) as data:
return json.load(data)
except Exception as e:
raise 'could not load configs: {0}!'.format(e)
class OauthClient(object):
__configs = get_configs()
def __init__(self):
super(OauthClient, self).__init__()
@classmethod
def load(cls, name):
module = OauthClient.get_class('oauth2py.{0}.{1}'.format(
name.lower(), name.lower().capitalize()))
client = module()
if cls.__configs:
cfg = next((c for c in cls.__configs
if c['name'].lower() == name.lower()), None)
if cfg:
client.init(cfg)
return client
@classmethod
def reload_configs(cls):
cls.__configs = get_configs()
@staticmethod
def get_class(kls):
parts = kls.split('.')
module = '.'.join(parts[:-1])
m = __import__(module)
for comp in parts[1:]:
m = getattr(m, comp)
return m
|
# Given a table of purchases by date, calculate the month-over-month percentage change in revenue.
# The output should include the year-month date (YYYY-MM) and percentage change, rounded to the 2nd
# decimal point, and sorted from the beginning of the year to the end of the year.
# The percentage change column will be populated from the 2nd month forward and can be calculated as
# ((this month's revenue - last month's revenue) / last month's revenue)*100.
# Import your libraries
import pandas as pd
# Start writing code
df = sf_transactions
df['month_year'] = df.created_at.dt.to_period('M')
df = df.groupby('month_year', as_index=False)['value'].sum()
df['prior_mo'] = df.value.shift(1)
df['pct_change'] = (((df['value']-df['prior_mo'])/df['prior_mo'])*100).round(2)
df[['month_year', 'pct_change']]
# (sf_transactions.groupby(sf_transactions.created_at.dt.to_period("M")).value.sum().pct_change()*100).reset_index().round(2)
|
from django.shortcuts import render
from django.http import *
from .Cookie import *
from .verify import *
from .database.delete import *
from .database.save import *
from .database.search import user_of_cookie, user_of_username
from .database import *
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth import *
def redir_to_index(request):
return HttpResponseRedirect('index/')
def login(request):
context = {}
username=request.POST.get('username', None)
pswd=request.POST.get('pswd', None)
if username is not None and pswd is not None:
if not pswd_correct(username, pswd):
context['login_fail_notice'] = 'Wrong Username or Password!'
return HttpResponse(render(request, 'login.html', context))
context['title'] = 'Login Success'
context['url'] = '../index/'
context['error_msg'] = 'You have logged in successfully!'
response = HttpResponse(render(request, 'jump.html', context))
cookie = Cookie()
save_cookie(user_of_username(username),cookie)
response.set_cookie('id', cookie.cookie_id, expires=cookie.expire)
return response
else:
response=HttpResponse(render(request, 'login.html'))
response.delete_cookie('id')
return response
def logout(request):
context={}
context['title'] = 'Logout Success'
context['url'] = '../index/'
context['error_msg'] = 'You have logged out successfully!'
response = HttpResponse(render(request, 'jump.html', context))
response.delete_cookie('id')
cookie_id = request.COOKIES.get('id',None)
if cookie_id is not None:
delete_cookie(cookie_id)
return response
def index(request):
context = {}
user=None
cookie_id=request.COOKIES.get('id',None)
if cookie_id is not None:
user = user_of_cookie(cookie_id)
response = HttpResponse()
if user is not None:
context['islogin']=True
context['name'] = user.name
cookie = Cookie()
save_cookie(user, cookie)
response.set_cookie('id', cookie.cookie_id, expires=cookie.expire)
response.content = render(request, 'index.html', context)
return response
def register(request):
context = {}
username = request.POST.get('username', None)
password1 = request.POST.get('password1', None)
password2 = request.POST.get('password2', None)
if username is not None and password1 is not None and password2 is not None:
verify_result = veri(username, password1, password2)
if veri(username, password1, password2) == True:
save_name_pswd(username, password1)
context['title']='Register Success'
context['url']='../login/'
context['error_msg'] = 'You have registered successfully!'
return HttpResponse(render(request, 'jump.html', context))
else:
context['register_fail_notice'] = verify_result
return HttpResponse(render(request, 'register.html', context))
else:
return HttpResponse(render(request, 'register.html'))
def response_not_logged_in(request):
context = {}
context['title'] = 'Not logged in'
context['url'] = '../login/'
context['error_msg'] = 'You are not logged, please log in!'
response = HttpResponse(render(request, 'jump.html', context))
return response
def creatclub(request):
context = {}
response = HttpResponse(render(request, 'creatclub.html', context))
return response
def searchclub(request):
context = {}
response = HttpResponse(render(request, 'searchclub.html', context))
return response |
import csv
import sys
reader = csv.DictReader(open("/Users/aashild/Documents/Python/CSV2HTML/concerts.csv"))
f_html = open('/Users/aashild/Documents/Python/CSV2HTML/formatted.html',"w")
for row in reader:
f_html.write('<tr>')
f_html.write('<td>' + row['År'] + '</td>')
f_html.write('<td>' + row['Komponist'] + '</td>')
f_html.write('<td>' + row['Verk'] + '</td>')
f_html.write('<td>' + row['Dirigent'] + '</td>')
f_html.write('<td>' + row['Sted'] + '</td>')
if row['Mer Info'] != (''):
f_html.write('<td><a href="' + row['Mer Info'] + '"target="_blank">Link</a></td>')
else:
f_html.write('<td>' + '</td>')
f_html.write('</tr>')
|
#!/usr/bin/python
#\file slider2.py
#\brief Slider with labels.
#\author Akihiko Yamaguchi, info@akihikoy.net
#\version 0.1
#\date Apr.14, 2021
import sys
from PyQt4 import QtCore,QtGui
def Print(*s):
for ss in s: print ss,
print ''
class TSlider(QtGui.QWidget):
def __init__(self):
QtGui.QWidget.__init__(self)
self.InitUI()
def InitUI(self):
# Set window size.
self.resize(320, 120)
# Set window title
self.setWindowTitle("Slider")
mainlayout= QtGui.QVBoxLayout()
self.setLayout(mainlayout)
slidergroup= QtGui.QGridLayout()
#slidergroup.move(10, 60)
#slidergroup.resize(10, 60)
#self.setLayout(slidergroup)
mainlayout.addLayout(slidergroup)
#slider1= QtGui.QSlider(QtCore.Qt.Vertical, self)
slider1= QtGui.QSlider(QtCore.Qt.Horizontal, self)
slider1.setTickPosition(QtGui.QSlider.TicksBothSides)
slider1.setRange(0, 8)
slider1.setTickInterval(1)
slider1.setSingleStep(1)
slider1.setValue(6)
#slider1.move(10, 60)
slider1.resize(100, 20)
slider1.toValue= lambda: 1000 + 100*self.slider1.value()
slider1.valueChanged.connect(lambda:self.label1.setText(str(self.slider1.toValue())))
self.slider1= slider1
slidergroup.addWidget(slider1, 0, 0, 1, 5)
label1= QtGui.QLabel('0',self)
self.label1= label1
slidergroup.addWidget(label1, 0, 5, 1, 1)
self.label1.setText(str(self.slider1.toValue()))
labelt1= QtGui.QLabel('1000',self)
slidergroup.addWidget(labelt1, 1, 0, 1, 1, QtCore.Qt.AlignLeft)
labelt2= QtGui.QLabel('1200',self)
slidergroup.addWidget(labelt2, 1, 1, 1, 1, QtCore.Qt.AlignLeft)
labelt3= QtGui.QLabel('1400',self)
slidergroup.addWidget(labelt3, 1, 2, 1, 1, QtCore.Qt.AlignCenter)
labelt4= QtGui.QLabel('1600',self)
slidergroup.addWidget(labelt4, 1, 3, 1, 1, QtCore.Qt.AlignRight)
labelt5= QtGui.QLabel('1800',self)
slidergroup.addWidget(labelt5, 1, 4, 1, 1, QtCore.Qt.AlignRight)
vspacer1= QtGui.QSpacerItem(10, 10, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
slidergroup.addItem(vspacer1, 2, 0, 1, 6)
# Add a button
btn1= QtGui.QPushButton('_________Exit?_________', self)
#btn1.setFlat(True)
btn1.setToolTip('Click to make something happen')
btn1.clicked.connect(lambda:self.close() if self.slider1.toValue()<1500 else Print('Hint: Set value less than 1500 to exit'))
btn1.resize(btn1.sizeHint())
#btn1.move(100, 150)
self.btn1= btn1
mainlayout.addWidget(btn1)
# Show window
self.show()
# Create an PyQT4 application object.
a = QtGui.QApplication(sys.argv)
# The QWidget widget is the base class of all user interface objects in PyQt4.
w = TSlider()
sys.exit(a.exec_())
|
import chainer
import chainer.links as L
import chainer.functions as F
import numpy as np
from enum import Enum
linear_init = chainer.initializers.LeCunUniform()
def seq_func(func, x, reconstruct_shape=True):
""" Change implicitly function's target to ndim=3
Apply a given function for array of ndim 3,
shape (batchsize, dimension, sentence_length),
instead for array of ndim 2.
"""
batch, length, units = x.shape
e = F.reshape(x, shape=(batch * length, units))
e = func(e)
if not reconstruct_shape:
return e
out_units = e.shape[1]
e = e.reshape(batch, length, out_units)
return e
class LayerNormalizationSentence(L.LayerNormalization):
""" Position-wise Linear Layer for Sentence Block
Position-wise layer-normalization layer for array of shape
(batchsize, dimension, sentence_length).
"""
def __init__(self, *args, **kwargs):
super(LayerNormalizationSentence, self).__init__(*args, **kwargs)
def __call__(self, x):
y = seq_func(super(LayerNormalizationSentence, self).__call__, x) # shape = batch, length, out_units
return y
class ScaledDotProductAttention(chainer.Chain):
def __init__(self, d_model, attn_dropout=0.1):
super(ScaledDotProductAttention, self).__init__()
self.temper = d_model ** 0.5
self.attn_dropout = attn_dropout
def __call__(self, q, k, v, attn_mask=None):
# q k v shape = (n_head* batch) x len_q/len_k x d_k or d_v
attn = F.matmul(q, F.transpose(k, axes=(0,2,1))) / self.temper
# # (n_head*batch) of matrix multiply (len_q x d_k) x (d_k x len_k) = (n_head*batch) x len_q x len_k
if attn_mask is not None:
assert attn_mask.shape == attn.shape, \
'Attention mask shape {0} mismatch ' \
'with Attention logit tensor shape ' \
'{1}.'.format(attn_mask.shape, attn.shape)
if hasattr(attn_mask, "data"):
attn_mask.data = attn_mask.data.astype(bool)
attn = F.where(attn_mask, self.xp.full(attn.shape, -np.inf, 'f'), attn)
attn = F.softmax(attn, axis=2) # (n_head*batch) x len_q x len_k
attn = F.dropout(attn, ratio=self.attn_dropout)
output = F.matmul(attn, v) # (n_head*batch) matrix of (len_q x len_k) x (len_v x d_v) = (n_head*batch) x len_q x d_v
# 因为d_k == d_q,所以 output = (n_head*batch) x len_q x d_v
return output, attn
class MultiHeadAttention(chainer.Chain):
def __init__(self, n_heads, d_model, d_k=64, d_v=64, dropout=0.1):
super(MultiHeadAttention, self).__init__()
self.n_heads = n_heads
self.d_k = d_k
self.d_v = d_v
self.dropout_ratio = dropout
with self.init_scope():
self.w_qs = chainer.Parameter(linear_init, shape=(n_heads, d_model, d_k))
self.w_ks = chainer.Parameter(linear_init, shape=(n_heads, d_model, d_k))
self.w_vs = chainer.Parameter(linear_init, shape=(n_heads, d_model, d_v))
self.attention = ScaledDotProductAttention(d_model)
self.layer_norm = LayerNormalizationSentence(d_model,eps=1e-6)
self.proj = L.Linear(n_heads * d_v, d_model) # note that typical case d_v = d_model // n_heads
def __call__(self, q, k, v, attn_mask=None):
d_k, d_v = self.d_k, self.d_v
n_head = self.n_heads
batch_size, len_q, d_model = q.shape
batch_size, len_k, d_model = k.shape
batch_size, len_v, d_model = v.shape
residual = q
# treat as a (n_head) size batch, shape = (heads x batch), number_words, d_model; then (heads, (batch x len_q), d_model)
q_s = F.tile(q, reps=(n_head, 1, 1)).reshape(n_head, -1, d_model) # n_head x (batch_size*len_q) x d_model
k_s = F.tile(k, reps=(n_head, 1, 1)).reshape(n_head, -1, d_model) # n_head x (batch_size*len_k) x d_model
v_s = F.tile(v, reps=(n_head, 1, 1)).reshape(n_head, -1, d_model) # n_head x (batch_size*len_v) x d_model
# (n_head) batch matrix multiply of ((batch * len_q) x d_model) x (d_model, d_k) = (batch * len_q) x d_k
# treat the result as a (n_head * mb_size) size batch
q_s = F.matmul(q_s, self.w_qs).reshape(-1, len_q, d_k) # (n_head*mb_size) x len_q x d_k
k_s = F.matmul(k_s, self.w_ks).reshape(-1, len_k, d_k) # (n_head*mb_size) x len_k x d_k
v_s = F.matmul(v_s, self.w_vs).reshape(-1, len_v, d_v) # (n_head*mb_size) x len_v x d_v
# outputs size = (n_head * mb_size) x len_q x d_v, attns size = (n_head*mb_size) x len_q x len_k
if attn_mask is not None:
attn_mask = F.tile(attn_mask, reps=(n_head, 1, 1))
outputs, attns = self.attention(q_s, k_s, v_s, attn_mask=attn_mask) # (n_head*batch) x len_q x d_v
outputs = F.concat(F.split_axis(outputs, n_head, axis=0), axis=2) # = batch_size, len_q, (n_head*d_v)
outputs = F.reshape(outputs, shape=(batch_size * len_q, n_head * d_v))
# project back to residual size
outputs = self.proj(outputs)
outputs = F.dropout(outputs, self.dropout_ratio)
outputs = F.reshape(outputs, shape=(batch_size, len_q, d_model))
return self.layer_norm(outputs + residual)
class ConvolutionSentence(L.Convolution2D):
""" Position-wise Linear Layer for Sentence Block
Position-wise linear layer for array of shape
(batchsize, dimension, sentence_length)
can be implemented a convolution layer.
"""
def __init__(self, in_channels, out_channels,
ksize=1, stride=1, pad=0, nobias=False,
initialW=None, initial_bias=None):
super(ConvolutionSentence, self).__init__(
in_channels, out_channels,
ksize, stride, pad, nobias,
initialW, initial_bias)
def __call__(self, x):
"""Applies the linear layer.
Args:
x (~chainer.Variable): Batch of input vector block. Its shape is
(batchsize, in_channels, sentence_length).
Returns:
~chainer.Variable: Output of the linear layer. Its shape is
(batchsize, out_channels, sentence_length).
"""
x = F.expand_dims(x, axis=3) # shape = (batch_size, in_channels, sentence_length, 1)
# 1d fc along sentence_length in parallel can be simulated by 1x1 convolution
y = super(ConvolutionSentence, self).__call__(x)
y = F.squeeze(y, axis=3)
return y
class PositionFFNType(Enum):
regular = 1
nstep_lstm = 2
class PositionwiseFeedForwardLayer(chainer.Chain):
def __init__(self, n_layers, in_size, out_size=None, n_inner_units=1024, dropout=0.1, forward_type=PositionFFNType.nstep_lstm):
super(PositionwiseFeedForwardLayer, self).__init__()
self.dropout_ratio = dropout
self.forward_type = forward_type
assert n_layers >= 2
if out_size is None:
out_size = in_size
# n_inner_units = in_size * 4 # hidden layer dimension is big than input/output dimension
with self.init_scope():
self.layer_name = []
if n_layers == 2:
self.w0 = ConvolutionSentence(in_size, n_inner_units,
initialW=linear_init)
self.w1 = ConvolutionSentence(n_inner_units, out_size,
initialW=linear_init)
self.layer_name.append("w0")
self.layer_name.append("w1")
elif n_layers > 2:
self.w0 = ConvolutionSentence(in_size, n_inner_units,
initialW=linear_init)
self.layer_name.append("w0")
for i in range(1, n_layers-1):
setattr(self, "w{}".format(i), ConvolutionSentence(n_inner_units, n_inner_units,
initialW=linear_init))
self.layer_name.append("w{}".format(i))
setattr(self, "w{}".format(n_layers-1),ConvolutionSentence(n_inner_units, out_size,
initialW=linear_init))
self.layer_name.append("w{}".format(n_layers-1))
self.layer_norm = LayerNormalizationSentence(out_size)
self.act = F.leaky_relu
def __call__(self, x):
if self.forward_type == PositionFFNType.nstep_lstm:
return self.forward_nstep_lstm(x)
elif self.forward_type == PositionFFNType.regular:
return self.forward_regular(x)
def forward_regular(self, x): # shape of x = batch, T, d_model
residual = x
x = F.transpose(x, axes=(0,2,1))
for conv_layer in self.layer_name:
x = self.act(getattr(self, conv_layer)(x))
output = F.transpose(x, axes=(0,2,1))
output = F.dropout(output, self.dropout_ratio)
return self.layer_norm(output + residual)
def forward_nstep_lstm(self, es):
# e shape = list of (sentence_length, in_channels)
out_es = []
es = F.stack(es) # B, T, D
es = F.transpose(es, axes=(0,2,1)) # B, D, T
for e in es:
e = F.transpose(e, axes=(1, 0)) # D, T
e = F.expand_dims(e, axis=0) # 1,D,T
for conv_layer in self.layer_name:
e = self.act(getattr(self, conv_layer)(e))
e = F.transpose(e, axes=(0, 2, 1))[0] # return B, T, D, then [0] = T,D
e = F.dropout(e, self.dropout_ratio)
out_es.append(e)
out_es = F.stack(out_es) # B,T,D
return [F.squeeze(e) for e in F.split_axis(self.layer_norm(out_es), out_es.shape[0], axis=0, force_tuple=True)] # return list of (T,D)
class EncoderLayer(chainer.Chain):
def __init__(self, d_model, d_inner_hid, n_head, d_k, d_v, dropout=0.1):
super(EncoderLayer, self).__init__()
with self.init_scope():
self.slf_attn = MultiHeadAttention(n_heads=n_head, d_model=d_model, d_k=d_k, d_v=d_v, dropout=dropout)
# self.pos_ffn = PositionwiseFeedForwardLayer(n_layers=1, in_size=d_model, n_inner_units=d_inner_hid,
# dropout=dropout, forward_type=PositionFFNType.regular)
def __call__(self, enc_input, slf_attn_mask=None):
enc_output = self.slf_attn(enc_input, enc_input, enc_input, attn_mask=slf_attn_mask) # # enc_output shape = mb_size x len_q x d_model
# enc_output = self.pos_ffn(enc_output)
return enc_output # shape = batch x len_q x d_model
class Encoder(chainer.Chain):
def __init__(self, n_layer=6, n_head=8,d_model=2048, d_inner_hid=1024, d_k=256, d_v=256,dropout=0.1):
super(Encoder, self).__init__()
self.layer_names = []
assert d_k == d_model//n_head
assert d_v == d_model//n_head
with self.init_scope():
for i in range(1, n_layer+1):
name = "l{}".format(i)
layer = EncoderLayer(d_model, d_inner_hid, n_head, d_k, d_v, dropout=dropout)
self.add_link(name, layer)
self.layer_names.append(name)
def __call__(self, e, xx_mask):
for name in self.layer_names:
e = getattr(self, name)(e, slf_attn_mask=xx_mask)
return e
class AttentionBlock(chainer.Chain):
def __init__(self, n_layers, d_model, out_size, d_inner_hid=1024, n_head=4, dropout=0.1, max_length=1500):
super(AttentionBlock, self).__init__()
self.layer_names = []
self.out_size = out_size
self.d_model = d_model
self.position_encoding_block = self.initialize_position_encoding(max_length, d_model)
d_k = d_model // n_head
d_v = d_model // n_head
with self.init_scope():
self.encoder = Encoder(n_layers, n_head=n_head, d_model=d_model, d_inner_hid=d_inner_hid, d_k=d_k, d_v=d_v,
dropout=dropout)
# 本来是ConvolutionSentence,修改fc层权宜之计,因为显存不够用
self.final_linear = L.Linear(d_model, out_size)
def initialize_position_encoding(self, length, n_units):
xp = self.xp
# Implementation in the Google tensor2tensor repo
channels = n_units
position = xp.arange(length, dtype='f')
num_timescales = channels // 2
log_timescale_increment = (
xp.log(10000. / 1.) / (float(num_timescales) - 1))
inv_timescales = 1. * xp.exp(
xp.arange(num_timescales).astype('f') * - log_timescale_increment)
scaled_time = \
xp.expand_dims(position, 1) * \
xp.expand_dims(inv_timescales, 0)
signal = xp.concatenate(
[xp.sin(scaled_time), xp.cos(scaled_time)], axis=1)
signal = xp.reshape(signal, [1, length, channels]) # shape = [1, length, channels(n_units)]
return signal
def __call__(self, x_lst, mask=None):
# x is shape = (batch, T, D)
x = F.stack(x_lst)
batch, length, unit = x.shape
x += self.xp.array(self.position_encoding_block[:, :length, :])
h = self.encoder(x, mask) # self attention shape= batch x len_q x d_model
batch, len_q, d_model = h.shape
h = F.reshape(h, (batch*len_q, d_model))
h = self.final_linear(h) # shape = B, out_size, len_q
h = F.reshape(h, (batch, len_q, self.out_size))
# shape = B, len_q, out_size , then convert to [len_q, out_size] that is list of T,D
# return [F.squeeze(e) for e in F.split_axis(F.transpose(h, axes=(0, 2, 1)), 1, axis=0, force_tuple=True)]
return [F.squeeze(e) for e in F.split_axis(h, 1, axis=0, force_tuple=True)] |
import os
import re
import subprocess
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from .helpers import determine_tag_value, figs_assert, initiate_figs, plot_helper_settings
def get_lobster(bond=0, filepath='COHPCAR.lobster', ISPIN=None, plot=False, xlim=None, ylim=None,
on_figs=None):
"""
Get the COHPCAR or COOPCAR, with consideration of spin-polarization.
Parameters
----------
bond: int
the bond number in 'COHPCAR.lobster'/'COOPCAR.lobster', counting from 1. Default to 0, meaning the average
filepath: string
filepath, default to 'COHPCAR.lobster'
ISPIN: int
user specified ISPIN
If not given, infer from 'OUTCAR'/'INCAR'.
plot: bool
whether to plot the data, default to False
xlim: list
the range of x-axis, 2 values in a list
ylim: list
the range of y-axis, 2 values in a list(, of the spin-combined plot if ISPIN == 2)
on_figs: list/int
the current figure numbers to plot to, default to new figures
Returns
-------
a dict, containing
'data': a pandas dataframe
'ax': the axes reference
"""
# get data
basename = os.path.basename(filepath)
datatype = 'COHP' if 'COHP' in basename.upper() else 'COOP'
with open(filepath, 'r') as f:
LOBSTERCAR = f.readlines()
for line_num, line in enumerate(LOBSTERCAR):
if re.match(r'No\.\d*:.*\(.*\)', line):
break
N_headerlines = line_num
for line_num, line in enumerate(LOBSTERCAR[N_headerlines:]):
if not re.match(r'No\.\d*:.*\(.*\)', line):
break
N_bonds = line_num
data_start_line = N_headerlines + N_bonds
for i in range(len(LOBSTERCAR)):
LOBSTERCAR[i] = LOBSTERCAR[i].split()
NEDOS = int(LOBSTERCAR[1][2])
if ISPIN:
print("Using user specified ISPIN.")
else:
ISPIN = determine_tag_value('ISPIN', filepath)
data = np.array(LOBSTERCAR[data_start_line:data_start_line + NEDOS], dtype=float)
# confluence and data organizing
if ISPIN == 1:
col_names = ['E', 'avg', 'avg_integrated']
for n_bond in range(1, N_bonds + 1):
col_names.extend(['bond_{0}'.format(n_bond), 'bond_{0}_integrated'.format(n_bond)])
return_dict = {'data': pd.DataFrame(**{'columns': col_names, 'data': data})}
elif ISPIN == 2:
data1 = data[:, :(3 + 2 * N_bonds)]
data2 = data[:, (3 + 2 * N_bonds):(5 + 4 * N_bonds)]
data2 = np.column_stack((data[:, 0], data2))
col_names1 = ['E', 'avg_up', 'avg_integrated_up']
col_names2 = ['E', 'avg_down', 'avg_integrated_down']
for n_bond in range(1, N_bonds + 1):
col_names1.extend(['bond_{0}_up'.format(n_bond), 'bond_{0}_integrated_up'.format(n_bond)])
col_names2.extend(['bond_{0}_down'.format(n_bond), 'bond_{0}_integrated_down'.format(n_bond)])
return_dict = {'data_spin_up': pd.DataFrame(**{'columns': col_names1, 'data': data1}),
'data_spin_down': pd.DataFrame(**{'columns': col_names2, 'data': data2}),
}
if plot:
# start plotting
figs_assert(on_figs, ISPIN, 'lobster')
if ISPIN == 1:
col_num = bond * 2 + 1
initiate_figs(on_figs)
if datatype == 'COHP':
plt.plot(data[:, 0], -data[:, col_num])
elif datatype == 'COOP':
plt.plot(data[:, 0], data[:, col_num])
ax = plt.gca()
plot_helper_settings((xlim, ylim), datatype)
return_dict.update({'ax': ax})
elif ISPIN == 2:
col_bond = bond * 2 + 1
# Plot the combined COHP/COOP
initiate_figs(on_figs)
if datatype == 'COHP':
plt.plot(data1[:, 0], -data1[:, col_bond] - data2[:, col_bond], label='spin up + down')
elif datatype == 'COOP':
plt.plot(data1[:, 0], data1[:, col_bond] + data2[:, col_bond], label='spin up + down')
ax1 = plt.gca()
plot_helper_settings((xlim, ylim), datatype)
# Plot the separated COHP/COOP
initiate_figs(on_figs)
if datatype == 'COHP':
plt.plot(data1[:, 0], -data1[:, col_bond], label='spin up')
plt.plot(data2[:, 0], -data2[:, col_bond], label='spin down')
elif datatype == 'COOP':
plt.plot(data1[:, 0], data1[:, col_bond], label='spin up')
plt.plot(data2[:, 0], data2[:, col_bond], label='spin down')
ax2 = plt.gca()
ylim_sp = None
if ylim:
ylim_sp = ylim[:]
ylim_sp[0] /= 2.
ylim_sp[1] /= 2.
plot_helper_settings((xlim, ylim_sp), datatype)
return_dict.update({'ax_spin_combined': ax1, 'ax_spin_separated': ax2})
return return_dict
def get_integrated_lobster(filepath='ICOHPLIST.lobster', return_total=True):
"""
Get the ICOHPLIST or ICOOPLIST, with consideration of spin-polarization.
Parameters
----------
filepath: string
filepath, default to 'ICOHPLIST.lobster'
return_total: bool
whether return the spin-up and spin-down summed dataframe or a dict with
key 1 meaning spin-up, key 2 meaning spin-down, default to True
Returns
----------
a pandas dataframe if return_total is True, or a dict if return_total is False
"""
if 'COHP' in os.path.basename(filepath):
filetype = 'COHP'
elif 'COOP' in os.path.basename(filepath):
filetype = 'COOP'
linenum_list = [int(i.split(':')[0]) for i in
subprocess.getoutput(' '.join(['grep -n', filetype, filepath])).split('\n')]
ILOBSTERLIST_dict = {}
if len(linenum_list) == 1:
is_mag = False
ILOBSTERLIST_dict[1] = pd.read_table(filepath, sep='\s+', index_col=filetype + '#', usecols=range(5))
else:
is_mag = True
n_interactions = linenum_list[1] - linenum_list[0] - 1
ILOBSTERLIST_dict[1] = pd.read_table(filepath, nrows=n_interactions, sep='\s+', index_col=filetype + '#',
usecols=range(5))
ILOBSTERLIST_dict[-1] = pd.read_table(filepath, skiprows=n_interactions + 1, nrows=n_interactions, sep='\s+',
index_col=filetype + '#', usecols=range(5))
ILOBSTER = ILOBSTERLIST_dict[1] + ILOBSTERLIST_dict[-1] if is_mag else ILOBSTERLIST_dict[1]
if return_total:
return ILOBSTER
else:
return ILOBSTERLIST_dict
def filter_lobster_by_elements(e1, e2, ILOBSTER):
"""
Get the filtered ICOHPLIST or ICOOPLIST, by the constituting elements.
Parameters
----------
e1, e2: string
element symbols. Order doesn't matter
ILOBSTER: pandas dataframe of ICOHPLIST or ICOOPLIST
Returns
----------
a filtered pandas dataframe by elements
"""
e1_regex = '^' + e1 + '\d+$'
e2_regex = '^' + e2 + '\d+$'
mask = ((ILOBSTER['atomMU'].str.contains(e1_regex) & ILOBSTER['atomNU'].str.contains(e2_regex)) | \
(ILOBSTER['atomNU'].str.contains(e1_regex) & ILOBSTER['atomMU'].str.contains(e2_regex)))
return ILOBSTER[mask]
|
a=int(input("Enter any value:"))
b=int(input("Enter any value:"))
operator =(input("Enter any operator:"))
#This function add two no.
if operator=='+'or operator == "add":
print(a+b)
#This function sub two no.
elif operator =='-'or operator == "substraction":
print(a-b)
#This function multiply two no.
elif operator =='*'or operator == "multiplication":
print(a*b)
#This function divide two no.
elif operator =='/'or operator == "division":
print(a/b)
#This function floor division two function no.
elif operator =='//'or operator == "floor division":
print(a//b)
#This function exponent two function no.
elif operator =='**'or operator == "exponent":
print(a**b)
else:
print("Enter valid operator")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging, ConfigParser, serial, sys
from Tkinter import Tk
from tkMessageBox import *
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(filename)s [line:%(lineno)d] %(levelname)s %(message)s',
datafmt='%Y-%m-%d %H:%M:%S',
filename='sms.log')
class Phone(object):
def __init__(self, phone):
self.__phone = phone
def encode(self):
result = ''
addr = '86' + self.__phone + 'F'
for i in range(0, len(addr), 2):
result = result + addr[i+1:i+2] + addr[i:i+1]
return result
# 生成短信中心号码
def get_sms_center(self):
center = '91' + self.encode()
return '{0:=02X}'.format(len(center) / 2) + center
class Message(object):
def __init__(self, message):
self.__message = message
# 格式化短信内容
def encode(self):
#content = self.__message.encode('raw_unicode_escape').replace('\\u', '')
content = ''.join(['{0:=04X}'.format(ord(x)) for x in self.__message])
return '{0:=02X}'.format(len(content) / 2) + content.upper()
class pdu(object):
def __init__(self):
config = ConfigParser.ConfigParser()
config.read('conf/sd.conf')
self.__sms_center = '0891' + Phone(config.get('serial', 'smscenter')).encode()
try:
self.__serial = serial.Serial(config.get('serial', 'port'), config.get('serial', 'baudrate'), timeout=1)
except Exception, e:
logging.error('cannot initiate serial port!')
root = Tk()
root.overrideredirect(True)
showerror('短信错误', '串口初始化失败!')
sys.exit(1)
# 发送短信
def send_message(self, phone, message):
self.__sendto = phone
self.__message = message
print message
try:
self.open_serial()
self.__serial.reset_input_buffer()
self.__serial.reset_output_buffer()
# 设置pdu模式
self.__serial.timeout = 1
self.__serial.write('AT+CMGF=0\r\n')
result = self.__serial.read(1024)
if not 'OK' in result and not 'ok' in result:
logging.error('AT+CMGF=0 -> ' + result.replace('\r\n', ''))
return 2
# 短信内容长度
message_to_send = self.get_sms_content()
self.__serial.write('AT+CMGS=' + str(len(message_to_send) / 2) + '\r\n')
result = self.__serial.read(1024)
if not '>' in result:
logging.error('AT+CMGS= -> ' + result.replace('\r\n', ''))
return 2
# 发送
self.__serial.timeout = 6
self.__serial.write(self.__sms_center + message_to_send + chr(26))
result = self.__serial.read(1024)
if 'OK' in result:
logging.info('send message "{0}" to "{1}", result:{2}'.format(message.encode('utf-8'), phone, result))
return 0
logging.error('message send -> ' + result.replace('\r\n', ''))
return 2
except Exception, e:
logging.critical(e)
return 2
# 生成发送内容
def get_sms_content(self):
content = '11000D91' + Phone(self.__sendto).encode() + '000800' + Message(self.__message).encode()
return content
# 格式化短信中心号码
def get_sms_center(self):
return self.__sms_center
# 打开串口
def open_serial(self):
if not self.__serial.is_open:
self.__serial.open()
logging.debug("serial port open again!")
# 关闭串口
def close_serial(self):
if self.__serial.is_open:
self.__serial.close()
|
#!/usr/bin/python
# Display movie on left and right viewport with arbitary inter-viewport delay.
#
# Copyright (C) 2010-2013 Huang Xin
#
# See LICENSE.TXT that came with this file.
from __future__ import division
import sys
import random
import pygame
import numpy as np
from StimControl.LightStim.Core import DefaultScreen
from StimControl.LightStim.LightData import dictattr
from StimControl.LightStim.SweepSeque import TimingSeque
from StimControl.LightStim.FrameControl import FrameSweep
from StimControl.LightStim.SweepController import SweepSequeStimulusController
from StimControl.LightStim.Movie import SurfaceTextureObject, TimingSetMovie
DefaultScreen(['left','right'], bgcolor=(0.0,0.0,0.0))
argv = list(sys.argv)
subject = None
if len(argv) >= 2:
subject = argv[1]
while subject is None:
sys.stdout.write('Please input lowercase initials of subject name: ')
subject = raw_input()
interval = None
if len(argv) >= 3:
interval = int(argv[2]) / 1000
while interval is None:
sys.stdout.write('Please input stimulus interval in miliseconds: ')
interval = int(raw_input()) / 1000
stim_interval = interval
pre_left = 0.0 if stim_interval > 0 else abs(stim_interval)
pre_right = 0.0 if stim_interval <= 0 else stim_interval
layout = None
if len(argv) >= 4:
layout = argv[3]
if layout not in ("LR", "TB"):
layout = "2D"
filename = argv[-1]
movie = pygame.movie.Movie(filename)
width, height = movie.get_size()
pygame_surface = pygame.surface.Surface((width,height))
movie.set_display(pygame_surface)
texture_object = SurfaceTextureObject(dimensions=2)
p_left = dictattr()
p_left.layout = layout
p_left.bgbrightness = 0.0
p_left.contrast = 1.0
p_right = dictattr()
p_right.layout = layout
p_right.bgbrightness = 0.0
p_right.contrast = 0.5
cycle_left = dictattr(duration=0.016, pre=pre_left, stimulus=0.016)
cycle_right = dictattr(duration=0.016, pre=pre_right, stimulus=0.016)
block_left = dictattr(repeat=None, cycle=cycle_left, interval=0.0)
block_right = dictattr(repeat=None, cycle=cycle_right, interval=0.0)
sequence_left = TimingSeque(repeat=1, block=block_left, shuffle=True)
sequence_right = TimingSeque(repeat=1, block=block_right, shuffle=True)
if __name__ == '__main__':
sweep = FrameSweep()
movie_left = TimingSetMovie(viewport='left',
surface=pygame_surface, texture_obj=texture_object,
params=p_left, subject=subject, sweepseq=sequence_left)
movie_right = TimingSetMovie(viewport='right',
surface=pygame_surface, texture_obj=texture_object,
params=p_right, subject=subject, sweepseq=sequence_right)
sweep.add_stimulus(movie_left)
sweep.add_stimulus(movie_right)
sweep.add_quit_callback(movie.stop)
movie.play()
sweep.go(prestim=0.5,poststim=0.5)
|
import os
import sys
import json
import argparse
# this returns the same hash for hitboxes that are "functionally equivalent"
# i.e. have the same post-hit effect and hit the same targets
def hitboxHash(hitbox):
fields = ["damage", "angle", "kbGrowth", "weightDepKb", "hitboxInteraction",
"baseKb", "element", "shieldDamage", "hitGrounded", "hitAirborne"]
return ",".join(str(hitbox[field]) for field in fields)
def getPatch(datJsonFile, disableGrabs, zeroGravity):
with open(datJsonFile) as f:
data = json.load(f)
patch = []
maxGuid = 0
dataOffset = 0x20
for i, subaction in enumerate(data["nodes"][0]["data"]["subactions"]):
eventStrOffset = dataOffset + subaction["eventsOffset"]
hitboxes = {}
activeHitboxes = {}
eventOffset = eventStrOffset
for event in subaction["events"]:
hitbox = None
if "name" in event and event["name"] == "hitbox":
hitbox = event["fields"]
activeHitboxes[event["fields"]["id"]] = hitbox
if disableGrabs and event["fields"]["element"] == "grab":
# the byte @ 17 has this layout: BEEEEEXS
# where B belongs to base knockback, E to element, X is unknown, S to shield damage
offset = eventOffset + 17
originalValue = bytes.fromhex(event["bytes"])[17]
# we want to zero out the element bits to make it 'normal'
patch.append((offset, bytes([0b10000011 & originalValue])))
if "name" in event and event["name"] == "adjustHitboxDamage":
hitboxId = event["fields"]["hitboxId"]
if hitboxId in activeHitboxes:
hitbox = activeHitboxes[hitboxId].copy()
hitbox["damage"] = event["fields"]["damage"]
if hitbox:
hbHash = hitboxHash(hitbox)
if hbHash in hitboxes:
hitboxGuid = hitboxes[hbHash]
else:
hitboxGuid = len(hitboxes)
hitboxes[hbHash] = hitboxGuid
# for hitbox: we are skipping the first bit of the damage field here
# we don't want to set it anyways, but we also have to hope it's never 1
# for the adjustHitboxDamage event: the damage is in the last byte (+3 too)
damageOffset = eventOffset + 3
if hitboxGuid > maxGuid:
maxGuid = hitboxGuid
#print("{} {}: {}".format(hex(i), subaction["name"], hitboxGuid))
assert damageOffset < 0xffff
assert hitboxGuid < 0xff
patch.append((damageOffset, bytes([hitboxGuid])))
eventOffset += event["length"]
print(maxGuid)
if zeroGravity:
gravityOffset = 0x17 * 4
fullAttributesOffset = dataOffset + data["nodes"][0]["data"]["attributesOffset"]
patch.append((fullAttributesOffset + gravityOffset, b'\0\0\0\0'))
return {data["sourceFile"]: patch}
parser = argparse.ArgumentParser(description="Generate patch data.")
parser.add_argument("datdumpsroot", help="The directory containing .dat json files.")
parser.add_argument("outfile", help="The json file to write the patch data to.")
parser.add_argument("--zerogravity", action="store_true", help="Adds data to the patch file that will set the gravity to 0 for all characters. This is useful so you can record aerial attacks without the hitbox interpolation distorting them by the character falling/rising.")
parser.add_argument("--disablegrabs", action="store_true", help="Changes all hitboxes with the grab effect to 'normal' so they will not all be rendered purple, but with the different colors they should have after being patched.")
args = parser.parse_args()
patch = {}
for file in os.listdir(args.datdumpsroot):
path = os.path.join(args.datdumpsroot, file)
if file.endswith(".json"):
print(file)
patch.update(getPatch(path, args.disablegrabs, args.zerogravity))
jsonPatch = {}
# preprocess before saving to json
for file in patch:
jsonPatch[file] = []
# save list of offset, byte-string (alternating)
for i in range(len(patch[file])):
jsonPatch[file].append(patch[file][i][0])
jsonPatch[file].append(" ".join("{:02x}".format(byte) for byte in patch[file][i][1]))
# write out json file by hand to be a little bit more space efficient
with open(args.outfile, "w") as f:
json.dump(jsonPatch, f)
# with open(args.outfile, "w") as f:
# f.write("{\n")
# for file in patch:
# f.write(' "{}": [\n'.format(file))
# for offset, data in patch[file]:
# f.write(' [{}, "{}"],\n'.format(offset, data))
# f.write(" ],\n")
# f.write("}\n")
|
import pandas as pd
import csv
import nltk
import numpy as np
from nltk.corpus import stopwords
from nltk.stem import SnowballStemmer
import re
from sklearn.naive_bayes import GaussianNB
from sklearn import preprocessing
from sklearn.model_selection import train_test_split
from sklearn import metrics
from sklearn import preprocessing
from sklearn.feature_extraction.text import CountVectorizer
np.set_printoptions(threshold=np.inf)
def abonos_limpio(abono):
if(pd.isnull(abono)):
return ""
else:
return float(abono)
def cargos_limpio(cargo):
if(pd.isnull(cargo)):
return ""
else:
return float(cargo)
def label_movimientos(mov):
if(pd.isnull(mov)):
return 0
else:
return 1
def descripcion_limpio(desc): # a la parte derecha le saca las palabras "basuras"
if(desc.find(':') != -1 ):
return " ".join([i for i in re.sub("[^a-zA-Z_]"," ",desc.split(":")[1]).split() if (i not in words and len(i)>1)]).lower()
else:
return " ".join([i for i in re.sub("[^a-zA-Z_]"," ",desc).split() if (i not in words)]).lower()
def transaccion_limpio(desc):
if(desc.find(':') != -1 ):
return " ".join([i for i in re.sub("[^a-zA-Z]"," ",desc.split(":")[0]).split()]).lower()
else:
desc = desc.lower()
list_de_categorias = desc.split(" ")
for j in list_de_categorias:
if(j in transac_pagos):
return j
elif(j in transac_recibos):
return j
predicciones = []
path = '/Users/fneut/Desktop/PP/SalidaData2.csv'
path2 = '/Users/fneut/Desktop/PP/set_categorias.csv'
df = pd.read_csv(path) #dataframe de cartola exportada
df2 = pd.read_csv(path2) #dataframe de categorías
stemmer = SnowballStemmer('spanish')
words = stopwords.words('spanish')
transac_pagos = ['pago', 'traspaso a', 'giro', 'pagos']
transac_recibos = ['traspaso de', 'retiro']
######################################### TEST DATA #########################################
df['Des_limpio'] = df['Descripción'].apply(lambda x: descripcion_limpio(x))
df['Transaccion'] = df['Descripción'].apply(lambda x: transaccion_limpio(x))
df['Cargo_limpio'] = df['Cargos (CLP)'].apply(lambda x: cargos_limpio(x))
df['Abono_limpio'] = df['Abonos (CLP)'].apply(lambda x: abonos_limpio(x))
df['Saldo_limpio'] = df['Saldo (CLP)'].apply(lambda x: float(x))
df['movimiento'] = df['Cargos (CLP)'].apply(lambda x: label_movimientos(x))
df2['limpio'] = df2['Descripcion'].apply(lambda x: " ".join([i for i in re.sub("[^a-zA-Z]"," ",x).split() if (i not in words and len(i)>1)]).lower())
vectorizer = CountVectorizer()
print(df)
print(df2)
vectorizer.fit(df2['limpio'])
X_counts = vectorizer.fit_transform(df2['limpio']).toarray()
print("get features names: "+str(vectorizer.get_feature_names()) + '\n')
counts = vectorizer.transform(df2['limpio'])
print("printing count" + '\n' + str(counts.toarray()))
# ######### ASIGNAR LABELS A CATEGORIAS
le = preprocessing.LabelEncoder()
categoria_encoded=le.fit_transform(df2['Categoria'])
######################################### MODEL #########################################
X_train = X_counts
gnb = GaussianNB()
gnb.fit(X_train,categoria_encoded)
######################################### TEST #########################################
X_counts2 = vectorizer.transform(df['Des_limpio']).toarray()
predicted= gnb.predict(X_counts2)
print("\n")
print("######################################### Input data: ######################################### " + "\n")
print(df[['Descripción','Cargos (CLP)','Abonos (CLP)','Saldo (CLP)']])
# print("######################################### Los cargos de entrada son: ######################################### " + "\n")
print("\n")
print("######################################### Resultado del modelo: ######################################### " + "\n")
for numero,x in enumerate(predicted):
predicciones.append(list(le.classes_)[x])
df = df.assign(Categoria = predicciones)
with pd.option_context('display.max_rows', None):
print(df[['Descripción','Cargos (CLP)','Abonos (CLP)','Saldo (CLP)','Categoria']])
#"""
|
# from django.conf import settings
from django.core import checks
@checks.register
def check_settings(app_configs, **kwargs):
# temporary solution
return []
|
import os
# django imports
import humanize
import requests
from django.contrib.auth.models import AnonymousUser
from django.core.files import File as DjangoCoreFile
from django.http import StreamingHttpResponse
from folder.decorators import (allow_parent_root, check_id_parent_folder,
check_is_owner_parent_folder,
check_parent_folder_not_trashed,
check_request_attr, check_valid_name)
from rest_framework import status
from rest_framework.parsers import FormParser, JSONParser, MultiPartParser
from rest_framework.permissions import IsAuthenticatedOrReadOnly
from rest_framework.response import Response
from rest_framework.views import APIView
# local imports
from user.serializers import ProfileSerializer, UserSerializer
from user.tasks import send_mail, sync_send_mail
from user.utils import get_client_server
from file.tasks import remove_file
from .decorators import *
from .serializers import FileSerializer
from .utils import create_file, get_presigned_url, get_cloud_filename, rename_s3, upload_file_to_s3, delete_s3
from folder.utils import propagate_size_change
POST_FILE = ["file", "PARENT"]
PATCH_FILE = ["id"]
REQUIRED_DRIVE_POST_PARAMS = ["PARENT", "DRIVE_URL", "NAME"]
class FileView(APIView):
parser_classes = (MultiPartParser, FormParser, JSONParser)
@check_id_file
@check_has_access_file
@check_file_not_trashed
@update_last_modified_file
def get(self, request, * args, **kwargs):
id = request.GET["id"]
file = File.objects.get(id=id)
data = FileSerializer(file).data
return Response(data=data, status=status.HTTP_200_OK)
@check_request_attr(["file", "PARENT", "REPLACE"])
# @check_valid_name_request_file
@allow_parent_root
@check_id_parent_folder
@check_is_owner_parent_folder
@check_parent_folder_not_trashed
@check_already_present(to_check="req_file_name")
@check_storage_available_file_upload
def post(self, request, * args, **kwargs):
if(not request.data.get("REPLACE")):
# regular post request
parent_id = request.data["PARENT"]
parent = Folder.objects.get(id=parent_id)
data = []
for req_file in request.FILES.getlist('file'):
req_file_name = req_file.name
new_file = create_file(
request.user, req_file, parent, req_file_name, req_file.size)
request.user.profile.storage_used += req_file.size
request.user.profile.save()
new_file = FileSerializer(new_file).data
data.append(new_file)
storage_data = ProfileSerializer(
request.user.profile).data["storage_data"]
return Response(data={"file_data": data, **storage_data}, status=status.HTTP_201_CREATED)
else:
parent_id = request.data["PARENT"]
parent = Folder.objects.get(id=parent_id)
data = []
for req_file in request.FILES.getlist('file'):
req_file_name = req_file.name
# if a duplicate is found
children = parent.children_file.all().filter(name=req_file_name, trash=False)
if(children):
print("old s3 key = ", children[0].get_cloud_storage_key())
new_file, _ = self.manage_file_fileObj_update(
children[0], req_file, request.user.profile)
print("new_file s3 key = ", new_file.get_cloud_storage_key())
else:
new_file = create_file(
request.user, req_file, parent, req_file_name, req_file.size)
request.user.profile.storage_used += req_file.size
request.user.profile.save()
new_file = FileSerializer(new_file).data
data.append(new_file)
storage_data = ProfileSerializer(
request.user.profile).data["storage_data"]
return Response(data={"file_data": data, **storage_data}, status=status.HTTP_201_CREATED)
def manage_file_fileObj_update(self, file, req_file, profile):
old_file_cloud_storage_key = file.get_cloud_storage_key()
old_file_size = file.size
# attaching new s3 file
delete_s3(old_file_cloud_storage_key)
new_key = upload_file_to_s3(req_file, old_file_cloud_storage_key)
# making changes to file details
# to remove media/ from the name
file.file.name = new_key[6:]
file.size = req_file.size
file.shared_among.set([])
file.present_in_shared_me_of.set([])
file.save()
# making changes to storage
profile.storage_used += req_file.size - old_file_size
profile.save()
# making changes to parent folders
propagate_size_change(file.parent, req_file.size - old_file_size)
return file, profile
@check_request_attr(["id", "file"])
@check_id_file
@check_is_owner_file
@check_storage_available_file_upload
def put(self, request, * args, **kwargs):
# getting old details
id = request.data["id"]
file = File.objects.get(id=id)
req_file = request.FILES['file']
file, profile = self.manage_file_fileObj_update(
file, req_file, request.user.profile)
data = FileSerializer(file).data
storage_data = ProfileSerializer(
profile).data["storage_data"]
return Response(data={"file_data": data, **storage_data}, status=status.HTTP_200_OK)
@check_valid_name
@check_id_file
@check_is_owner_file
@check_file_not_trashed
@check_already_present(to_check="req_data_name")
def patch(self, request, * args, **kwargs):
id = request.data["id"]
file = File.objects.get(id=id)
if("trash" in request.data):
new_trash = request.data["trash"]
# if we are moving to trash
if(new_trash):
# file was not trashed
if(new_trash != file.trash):
updated = True
file.trash = new_trash
else:
return Response(data={"message": "Already in Trash"}, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(data={"message": "Use Recovery route to recover file"}, status=status.HTTP_400_BAD_REQUEST)
if("name" in request.data):
updated = True
new_name_file_system = request.data["name"]
""" will use this rename lines just before download"""
# new_path = os.path.join(settings.MEDIA_ROOT, new_name)
# initial_path = file_obj.file.path
# os.rename(initial_path, new_path)
old_file_key = file.get_cloud_storage_key()
s3_new_filename = get_cloud_filename(new_name_file_system)
new_file_key = file.make_key(s3_new_filename)
rename_s3(old_file_key, new_file_key)
file.file.name = s3_new_filename
file.name = new_name_file_system
if("privacy" in request.data):
updated = True
file.privacy = request.data["privacy"]
if("favourite" in request.data):
updated = True
file.favourite = request.data["favourite"]
if("shared_among" in request.data):
updated = True
ids = request.data["shared_among"]
# make unique & discard owner
ids = set(ids)
ids.discard(file.owner.id)
ids = list(ids)
try:
users = [User.objects.get(pk=id)
for id in ids]
users_for_mail = []
for user in users:
if(user not in file.shared_among.all()):
users_for_mail.append(user)
users_json = UserSerializer(users_for_mail, many=True).data
client = get_client_server(request)["client"]
title_kwargs = {
"sender_name": f"{request.user.first_name} {request.user.last_name} ({request.user.username})",
"resource_name": f'a file "{file.name}"'
}
body_kwargs = {
"resource_url": f"{client}/share/file/{file.id}"
}
sync_send_mail("SHARED_WITH_ME", users_json,
title_kwargs, body_kwargs)
except Exception as e:
print(e)
return Response(data={"message": "invalid share id list", "exception": str(e)}, status=status.HTTP_400_BAD_REQUEST)
file.shared_among.set(users)
file.present_in_shared_me_of.set(users)
if(updated):
file.save()
data = FileSerializer(file).data
return Response(data=data, status=status.HTTP_200_OK)
def manage_file_delete(self, file):
profile = file.owner.profile
size = file.size
parent = file.parent
file.delete()
profile.storage_used -= size
profile.save()
propagate_size_change(parent, -size)
@check_id_file
@check_is_owner_file
def delete(self, request, * args, **kwargs):
id = get_id(request)
file = File.objects.get(id=id)
self.manage_file_delete(file)
storage_data = ProfileSerializer(
file.owner.profile).data["storage_data"]
return Response(data={"id": id, "storage_data": storage_data,"type":"file"}, status=status.HTTP_200_OK)
class UploadByDriveUrl(FileView):
def get_django_file_object(self, drive_url, name):
# getting the django file object
cloud_filename = get_cloud_filename(name)
try:
r = requests.get(drive_url, allow_redirects=True)
except:
return Response(data={"message": "Invalid URL"}, status=status.HTTP_400_BAD_REQUEST)
open(cloud_filename, 'wb').write(r.content)
local_file = open(cloud_filename, 'rb')
djangofile = DjangoCoreFile(local_file)
return djangofile, cloud_filename
@check_request_attr(["PARENT", "DRIVE_URL", "NAME", "REPLACE"])
@check_valid_name
@allow_parent_root
@check_id_parent_folder
# checking storage available inside the function
@check_already_present(to_check="req_data_name")
def post(self, request, *args, **kwargs):
# getting request attrs
parent = request.data["PARENT"]
drive_url = request.data["DRIVE_URL"]
name = request.data["NAME"]
replace_flag = request.data["REPLACE"]
parent_folder = Folder.objects.get(id=parent)
children = parent_folder.children_file.all().filter(name=name, trash=False)
if(children and replace_flag):
djangofile, _ = self.get_django_file_object(drive_url, name)
new_file, profile = self.manage_file_fileObj_update(
children[0], djangofile, request.user.profile)
data = FileSerializer(new_file).data
storage_data = ProfileSerializer(profile).data["storage_data"]
return Response(data={"file_data": data, **storage_data}, status=status.HTTP_200_OK)
# getting the django file object
djangofile, cloud_filename = self.get_django_file_object(
drive_url, name)
# checking storage available or not
profile = request.user.profile
if(djangofile.size + profile.storage_used > profile.storage_avail):
os.remove(cloud_filename)
return Response(data={"message": "Insufficient space"}, status=status.HTTP_400_BAD_REQUEST)
# making File object
file = create_file(
request.user, djangofile, parent_folder, name, djangofile.size)
file.save()
# remove temp file
os.remove(cloud_filename)
# remove_file.delay(cloud_filename)
# making change to storage data
propagate_size_change(file.parent, djangofile.size - djangofile.size)
profile.storage_used += djangofile.size
profile.save()
# returing response
data = FileSerializer(file).data
storage_data = ProfileSerializer(
file.owner.profile).data["storage_data"]
return Response(data={"file_data": data, **storage_data}, status=status.HTTP_201_CREATED)
@check_request_attr(["id", "DRIVE_URL"])
@check_id_file
@check_is_owner_file
# checking storage available inside the function
def put(self, request, * args, **kwargs):
drive_url = request.data["DRIVE_URL"]
# getting old details
id = request.data["id"]
file = File.objects.get(id=id)
old_file_cloud_storage_key = file.get_cloud_storage_key()
old_file_size = file.size
djangofile, cloud_filename = self.get_django_file_object(
drive_url, file.name)
# checking storage available or not
profile = request.user.profile
if(djangofile.size - old_file_size + profile.storage_used > profile.storage_avail):
os.remove(cloud_filename)
return Response(data={"message": "Insufficient space"}, status=status.HTTP_400_BAD_REQUEST)
# attaching new s3 file
delete_s3(old_file_cloud_storage_key)
new_key = upload_file_to_s3(djangofile, old_file_cloud_storage_key)
# making changes to file details
file.file.name = new_key
file.size = djangofile.size
file.shared_among.set([])
file.present_in_shared_me_of.set([])
# making changes to storage
profile.storage_used += djangofile.size - old_file_size
profile.save()
# making changes to parent folders
propagate_size_change(file.parent, djangofile.size - old_file_size)
# remove temp file
os.remove(cloud_filename)
# remove_file.delay(cloud_filename)
data = FileSerializer(file).data
storage_data = ProfileSerializer(profile).data["storage_data"]
return Response(data={"file_data": data, **storage_data}, status=status.HTTP_200_OK)
class DownloadFile(APIView):
@check_id_file
@check_has_access_file
@check_file_not_trashed
@update_last_modified_file
def get(self, request, *args, **kwargs):
id = request.GET["id"]
file = File.objects.get(id=id)
url = get_presigned_url(file.get_cloud_storage_key())
return Response(data={"url": url}, status=status.HTTP_200_OK)
|
# coding: utf-8
def longest_common_subsequence(a, b):
dp = [[0] * len(b) for i in range(len(a))]
for i in range(len(a)):
for j in range(len(b)):
if a[i] == b[j]:
dp[i][j] = dp[i - 1][j - 1] + 1
else:
dp[i][j] = max(dp[i][j - 1], dp[i - 1][j])
i = len(a) - 1
j = len(b) - 1
res = []
while i >= 0 and j >= 0:
if a[i] == b[j]:
res.append(a[i])
i -= 1
j -= 1
elif dp[i][j] == dp[i - 1][j]:
i -= 1
else:
j -= 1
return ''.join(res)[::-1]
if __name__ == "__main__":
assert longest_common_subsequence('abcd', 'e') == ''
assert longest_common_subsequence('abcd', 'bwdb') == 'bd'
assert longest_common_subsequence('asdf', 'bsedf') == 'sdf'
assert longest_common_subsequence('sdsdfsd', 'bsdfsedf') == 'sdsdf' |
'''
10. Given a number line from -infinity to +infinity.
You start at 0 and can go either to the left or to the right.
The condition is that in i’th move, you take i steps.
In the first move take 1 step, second move 2 steps and so on.
Hint: 3 can be reached in 2 steps (0, 1) (1, 3). 2 can be reached in 3 steps (0, 1) (1,-1) (-1, 2)
a) Find the optimal number of steps to reach position 1000000000 and -1000000000.
'''
num = 0
step_counter = 0
pos1 = 1000000000
pos2 = -1000000000
lst = [pos1,pos2]
for val in lst:
while(num != val):
step_counter += 1
a = num
if((num+step_counter) > val):
num = num - step_counter
else:
num = num + step_counter
b = num
print (a,b)
print 'The number of steps to reach %s is : %s'%(val, step_counter) |
from flask import Flask, render_template, request, session, redirect, url_for
from authenticate import authenticate
app = Flask(__name__)
@app.route("/")
@app.route("/home")
def home():
return render_template("home.html")
@app.route("/login", methods=["GET","POST"])
def login():
if request.method == "GET":
return render_template("login.html")
else:
user = request.form['user']
password = request.form['pass']
if authenticate(user, password):
return redirect(url_for('secret'))
@app.route("/logout")
def logout():
session['n'] = 0
form['user']=""
form['password']=""
return render_template("home.html")
@app.route("/secret")
def secret():
if n not in session:
session['n']=1
return render_template("secret.html")
@app.route("/random")
def random():
number = Math.random()*100;
return render_template("random.html", n = number)
if __name__ == "__main__":
app.debug = True
app.secret_key = "Keyblade"
app.run(host="0.0.0.0", port=8000)
|
#! python3
# BENCH PRESS ONE REP MAX
'''
Formula
https://www.unm.edu/~rrobergs/478RMStrengthPrediction.pdf
Bryzcki
1RM = weight / (1.0278 - (0.0278 * reps))
weight = 1RM * (1.0278 - (0.0278 * reps))
O'Connor
1RM = (0.025 * (weight * reps)) + weight
Formula
%1RM = 55.51 * e^(-0.0723 * reps) + 48.47
reps = (log(((1RM - 48.47) / 55.51)))/-0.0723
'''
from math import exp
import os
def check_float(number):
try:
number = float(number)
return True
except ValueError:
return False
def round_to(weight, precision = 0.25):
# Round number to closest 0.25 kg
remainder = weight % precision
if remainder < (precision / 2):
return (weight - remainder)
else:
return (weight + (precision - remainder))
def rm_bryzcki(weight, reps):
# Estimate 1RM using Bryzcki formula
return weight / (1.0278 - (0.0278 * reps))
def rm_oconnor(weight, reps):
# Estimate 1RM using O'Connor formula
return (0.025 * (weight * reps)) + weight
def rm_mean(weight, reps):
# Estimate 1RM as the mean between Bryzcki and O'Connor results
return round((rm_bryzcki(weight, reps) + rm_oconnor(weight, reps)) / 2, 2)
def rm_table(rm):
rep_range = range(2, 21)
rm_range = {}
for rep in rep_range:
# calcolo percentuale del RM per rep
perc_rm = (55.51 * exp(-0.0723 * rep) + 48.47) / 100
rm_rep = rm * perc_rm
rm_range[rep] = round(rm_rep, 2)
return rm_range
# Input
os.system('cls')
print('\nCalculate Your Bench Press One Rep Max (1RM)')
print('Enter weight and reps of your maxed set.\n')
weight = input('Weight (kg): ')
# Format check
while check_float(weight) == False:
print('Enter a number.')
weight = input('Weight (kg): ')
weight = float(weight)
reps = input('Reps: ')
# Format check
while reps.isdigit() == False:
print('Enter a integer.')
reps = input('Reps: ')
reps = float(reps)
# Estimate 1RM
rm_mean = rm_mean(weight, reps)
# Print 1RM
print('\n\n################')
print(f'\n 1RM: {round_to(rm_mean):.2f} kg\n')
print('################\n')
# Calculate table of RM
rm_range = rm_table(rm_mean)
# Print table
print('Reps Weight (kg)')
print(f'{1:>3}: {round_to(rm_mean):>8.2f}')
for el in rm_range:
print(f'{el:>3}: {round_to(rm_range[el]):>8.2f}')
# Keep window open
input()
|
import os,random
import tkinter as tk
from PIL import Image, ImageTk
from playsound import playsound
TOPS = [str('tops/') + imgFile for imgFile in os.listdir('tops/')]
BOTTOMS = [str('bottoms/') + imgFile for imgFile in os.listdir('bottoms/')]
SHOES = [str('shoes/') + imgFile for imgFile in os.listdir('shoes/')]
class OutfitApp:
def __init__(self, root):
self.root = root
self.topImg = TOPS
self.bottomImg = BOTTOMS
self.topPath = self.topImg[0]
self.bottomPath = self.bottomImg[0]
self.topFrame = tk.Frame(self.root, bg = '#d0b4dc')
self.bottomFrame = tk.Frame(self.root, bg = '#d0b4dc')
self.topLabel = self.display_photo(self.topPath, self.topFrame)
self.topLabel.pack(side=tk.TOP)
self.bottomLabel = self.display_photo(self.bottomPath, self.bottomFrame)
self.bottomLabel.pack(side=tk.TOP)
self.shoeImg = SHOES ##
self.shoePath = self.shoeImg[0]
self.shoeFrame = tk.Frame(self.root, bg = '#d0b4dc')
self.shoeLabel = self.display_photo(self.shoePath, self.shoeFrame)
self.shoeLabel.pack(side=tk.TOP)
self.create_display()
def create_display(self):
self.root.title("Choose My Outfit")
lbl=tk.Label(self.root, text="Choose My Outfit", bg = '#a95aec', fg='black', font=("Comic Sans MS", 16), width=600, pady=5)
lbl.pack(side=tk.TOP)
text=tk.Label(self.root, text="Select the 'Create Outfit' button to randomly choose an outfit or ", bg = '#d0b4dc', fg='navy', font=("Comic Sans MS", 10))
text2=tk.Label(self.root, text = "use the surrounding buttons for your own customization. ", bg = '#d0b4dc', fg='navy', font=("Comic Sans MS", 10))
text.pack( anchor = tk.CENTER, fill = tk.BOTH)
text2.pack( anchor = tk.CENTER, fill = tk.BOTH)
self.root.geometry('420x680')
self.btns()
self.topFrame.pack(fill = tk.BOTH, expand = tk.YES)
self.bottomFrame.pack(fill = tk.BOTH, expand = tk.YES)
self.shoeFrame.pack(fill = tk.BOTH, expand = tk.YES) #
def display_photo(self, imgPath, frame):
img = Image.open(imgPath)
newImg = img.resize((150,150), Image.ANTIALIAS)
tkImg = ImageTk.PhotoImage(newImg)
labelImg = tk.Label(frame, image = tkImg, anchor = tk.CENTER)
labelImg.image = tkImg
return labelImg
def btns(self):
topPrev = tk.Button(self.topFrame, text="Prev", command = self.prevTop, padx=10,font=("Comic Sans MS",12), activebackground='purple', activeforeground='white')
topPrev.pack(side = tk.LEFT)
topNext = tk.Button(self.topFrame, text="Next", command = self.nextTop , padx=10, font=("Comic Sans MS",12), activebackground='purple', activeforeground='white')
topNext.pack(side = tk.RIGHT)
bottomPrev = tk.Button(self.bottomFrame, text="Prev", command = self.prevBottom, padx=10, font=("Comic Sans MS",12), activebackground='purple', activeforeground='white' )
bottomPrev.pack(side = tk.LEFT)
bottomNext = tk.Button(self.bottomFrame, text="Next", command = self.nextBottom, padx=10, font=("Comic Sans MS",12), activebackground='purple', activeforeground='white' )
bottomNext.pack(side = tk.RIGHT)
makeOutfit = tk.Button(self.topFrame, text = "Create Outfit", command = self.createOutfit, font=("Comic Sans MS",14), padx=15, activebackground='purple', activeforeground='white')
makeOutfit.pack(anchor = tk.CENTER)
shoePrev = tk.Button(self.shoeFrame, text="Prev", command = self.prevShoe, padx=10, font=("Comic Sans MS",12), activebackground='purple', activeforeground='white' )
shoePrev.pack(side = tk.LEFT)
shoeNext = tk.Button(self.shoeFrame, text="Next", command = self.nextShoe, padx=10, font=("Comic Sans MS",12), activebackground='purple', activeforeground='white' )
shoeNext.pack(side = tk.RIGHT)
def btnNext(self, current, group, incr = True):
index = group.index(current)
final = len(group) - 1
nexti = 0
if incr and index == final:
nexti = 0
elif not incr and index == 0:
nexti = final
else:
increment = 1 if incr else -1
nexti = index + increment
nextImg = group[nexti]
if current in self.topImg:
imgLabel = self.topLabel
self.topPath = nextImg
elif current in self.bottomImg:
imgLabel = self.bottomLabel
self.bottomPath = nextImg
else:
imgLabel = self.shoeLabel
self.shoePath = nextImg
self.updatePhoto(nextImg, imgLabel)
def nextTop(self):
self.btnNext(self.topPath, self.topImg, incr = True)
def prevTop(self):
self.btnNext(self.topPath, self.topImg, incr = False)
def prevBottom(self):
self.btnNext(self.bottomPath, self.bottomImg, incr = False)
def nextBottom(self):
self.btnNext(self.bottomPath, self.bottomImg, incr = True)
def prevShoe(self):
self.btnNext(self.shoePath, self.shoeImg, incr = False)
def nextShoe(self):
self.btnNext(self.shoePath, self.shoeImg, incr = True)
def updatePhoto(self, imgPath, imgLabel):
img = Image.open(imgPath)
newImg = img.resize((150,150), Image.ANTIALIAS)
tkImg = ImageTk.PhotoImage(newImg)
imgLabel.configure(image = tkImg)
imgLabel.image = tkImg
def createOutfit(self):
top = random.randint(0, len(self.topImg)-1)
bottom = random.randint(0, len(self.bottomImg)-1)
shoe = random.randint(0, len(self.shoeImg)-1)
self.updatePhoto(self.topImg[top], self.topLabel)
self.updatePhoto(self.bottomImg[bottom], self.bottomLabel)
self.updatePhoto(self.shoeImg[shoe], self.shoeLabel)
if __name__ == '__main__':
root = tk.Tk()
app = OutfitApp(root)
root.mainloop() |
class Persona():
def __init__(self, nombre,edad,lugarResidencia):
self.__nombre=nombre;
self.__edad=edad;
self.__lugarResidencia=lugarResidencia
def descripcion(self):
print ("Nombre: ", self.__nombre, " Edad : ", self.__edad, "Residencia: ", self.__lugarResidencia)
class Empleado(Persona):
def __init__(self, salario,antiguadad,nombre,edad,recidencia):
super().__init__(nombre,edad,recidencia)
self.__salario=salario
self.__antiguadad=antiguadad
def descripcion(self):
super().descripcion()
print("Salario: ", self.__salario, " Antiguadad: ", self.__antiguadad)
mipersona=Empleado(500000,15,"kevin",19,"Colombia")
mipersona.descripcion() |
# coding: utf-8
# Copyright 2013 The Font Bakery Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# See AUTHORS.txt for the list of Authors and LICENSE.txt for the License.
import os.path as op
from bakery_lint.base import BakeryTestCase as TestCase
from bakery_cli.pifont import PiFont
from bakery_cli.utils import UpstreamDirectory
class FontTestPrepolation(TestCase):
name = __name__
targets = ['upstream-repo']
tool = 'lint'
def test_family_glyph_names_match(self):
""" Each font in family has matching glyph names? """
directory = UpstreamDirectory(self.operator.path)
# TODO does this glyphs list object get populated?
glyphs = []
for f in directory.get_fonts():
font = PiFont(op.join(self.operator.path, f))
glyphs_ = font.get_glyphs()
if glyphs and glyphs != glyphs_:
# TODO report which font
self.fail('Family has different glyphs across fonts')
def test_font_prepolation_glyph_contours(self):
""" Check that glyphs has same number of contours across family """
directory = UpstreamDirectory(self.operator.path)
glyphs = {}
for f in directory.get_fonts():
font = PiFont(op.join(self.operator.path, f))
glyphs_ = font.get_glyphs()
for glyphcode, glyphname in glyphs_:
contours = font.get_contours_count(glyphname)
if glyphcode in glyphs and glyphs[glyphcode] != contours:
msg = ('Number of contours of glyph "%s" does not match.'
' Expected %s contours, but actual is %s contours')
self.fail(msg % (glyphname, glyphs[glyphcode], contours))
glyphs[glyphcode] = contours
def test_font_prepolation_glyph_points(self):
""" Check that glyphs has same number of points across family """
directory = UpstreamDirectory(self.operator.path)
glyphs = {}
for f in directory.get_fonts():
font = PiFont(op.join(self.operator.path, f))
glyphs_ = font.get_glyphs()
for g, glyphname in glyphs_:
points = font.get_points_count(glyphname)
if g in glyphs and glyphs[g] != points:
msg = ('Number of points of glyph "%s" does not match.'
' Expected %s points, but actual is %s points')
self.fail(msg % (glyphname, glyphs[g], points))
glyphs[g] = points
|
from django.contrib import admin
from .models import Book, Category, DiscountCash, DiscountPercent
# Register your models here.
@admin.register(Category)
class CategoryAdmin(admin.ModelAdmin):
list_display = ('title',) #فیلدهایی که در پنل ادمین درخصوص مدل مورد نظر میخواهیم نشان دهد
prepopulated_fields = {'slug': ('title',)} #فیلد slug را در پنل ادمین براساس فیلد title در نظر میگبرد
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'stock', 'categories')
list_display_links = ['title']
prepopulated_fields = {'slug': ('title',)}
search_fields = ('title',)
change_list_template = "change.html"
def categories(self, obj):
return [cat.title for cat in obj.category.all()]
@admin.register(DiscountCash)
class DiscountCashAdmin(admin.ModelAdmin):
list_display = ('amount', 'valid_from', 'valid_to', 'active')
list_filter = ('active', 'valid_from', 'valid_to')
search_fields = ('name',)
@admin.register(DiscountPercent)
class DiscountPercentAdmin(admin.ModelAdmin):
list_display = ('percentage', 'valid_from', 'valid_to', 'active')
list_filter = ('active', 'valid_from', 'valid_to')
search_fields = ('name',)
|
class Deploy:
def hello( str ):
return 'Hello World: '+str;
|
import uuid
from core import plugin, model
from core.models import conduct, trigger, webui
from plugins.viewonce.models import action
class _viewonce(plugin._plugin):
version = 0.1
def install(self):
# Register models
model.registerModel("viewonce","_viewonce","_document","plugins.viewonce.models.viewonce")
model.registerModel("viewonceSet","_viewonceSet","_action","plugins.viewonce.models.action")
model.registerModel("viewonceGet","_viewonceGet","_action","plugins.viewonce.models.action")
model.registerModel("viewonceCleanup","_viewonceCleanup","_action","plugins.viewonce.models.action")
c = conduct._conduct().new("viewonceCore")
c = conduct._conduct().getAsClass(id=c.inserted_id)[0]
t = trigger._trigger().new("viewonceCore")
t = trigger._trigger().getAsClass(id=t.inserted_id)[0]
a = action._viewonceCleanup().new("viewonceCore")
a = action._viewonceCleanup().getAsClass(id=a.inserted_id)[0]
c.triggers = [t._id]
flowTriggerID = str(uuid.uuid4())
flowActionID = str(uuid.uuid4())
c.flow = [
{
"flowID" : flowTriggerID,
"type" : "trigger",
"triggerID" : t._id,
"next" : [
{"flowID": flowActionID, "logic": True }
]
},
{
"flowID" : flowActionID,
"type" : "action",
"actionID" : a._id,
"next" : []
}
]
webui._modelUI().new(c._id,{ "ids":[ { "accessID":"0","delete": True,"read": True,"write": True } ] },flowTriggerID,0,0,"")
webui._modelUI().new(c._id,{ "ids":[ { "accessID":"0","delete": True,"read": True,"write": True } ] },flowActionID,100,0,"")
c.acl = { "ids":[ { "accessID":"0","delete": True,"read": True,"write": True } ] }
c.enabled = True
c.update(["triggers","flow","enabled","acl"])
t.acl = { "ids":[ { "accessID":"0","delete": True,"read": True,"write": True } ] }
t.schedule = "60-90s"
t.enabled = True
t.update(["schedule","enabled","acl"])
a.acl = { "ids":[ { "accessID":"0","delete": True,"read": True,"write": True } ] }
a.enabled = True
a.update(["enabled","acl"])
return True
def uninstall(self):
# deregister models
model.deregisterModel("viewonce","_viewonce","_document","plugins.viewonce.models.viewonce")
model.deregisterModel("viewonceSet","_viewonceSet","_action","plugins.viewonce.models.action")
model.deregisterModel("viewonceGet","_viewonceGet","_action","plugins.viewonce.models.action")
model.deregisterModel("viewonceCleanup","_viewonceCleanup","_action","plugins.viewonce.models.action")
return True
def upgrade(self,LatestPluginVersion):
pass
# if self.version < 0.2:
|
# -*- coding: utf-8 -*-
import json
import os
class DworldMixin(object):
plugin_slug = 'datakit-dworld'
def get_auth_headers(self):
return {
'Authorization': 'Bearer {0}'.format(self.configs['api_token']),
'Content-Type': 'application/json',
}
def get_project_path(self):
return os.getcwd()
def get_settings_path(self):
return os.path.join(
self.get_project_path(), 'config', 'datakit-dworld.json')
def get_settings_data(self):
settings_data = {}
settings_path = self.get_settings_path()
if os.path.exists(settings_path):
with open(settings_path, 'r') as settings_file:
settings_data = json.load(settings_file)
return settings_data
def save_settings_data(self, data):
settings_path = self.get_settings_path()
settings_dir = os.path.dirname(settings_path)
os.makedirs(settings_dir, exist_ok=True)
with open(settings_path, 'w') as settings_file:
json.dump(data, settings_file)
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
import math
import numpy as np
from neon.backends import CPU
from neon.params import (UniformValGen, AutoUniformValGen,
GaussianValGen, NormalValGen,
SparseEigenValGen, NodeNormalizedValGen,
IdentityValGen)
from neon.util.testing import assert_tensor_equal
class TestValInit(object):
def __init__(self):
# this code gets called prior to each test
self.be = CPU()
def test_uni_basics(self):
uni = UniformValGen(backend=self.be)
assert str(uni) == ("UniformValGen utilizing CPU backend\n\t"
"low: 0.0, high: 1.0")
def test_uni_gen(self):
uni = UniformValGen(backend=self.be)
res = uni.generate(shape=[1, 1])
assert res.shape == (1, 1)
out = self.be.empty((1, 1))
self.be.min(res, axes=None, out=out)
assert out.asnumpyarray() >= 0.0
self.be.max(res, axes=None, out=out)
assert out.asnumpyarray() < 1.0
def test_uni_params(self):
low = -5.5
high = 10.2
uni = UniformValGen(backend=self.be, low=low, high=high)
assert str(uni) == ("UniformValGen utilizing CPU backend\n\t"
"low: {low}, high: {high}".format(low=low,
high=high))
res = uni.generate(shape=[4, 4])
assert res.shape == (4, 4)
out = self.be.empty((1, 1))
self.be.min(res, axes=None, out=out)
assert out.asnumpyarray() >= low
self.be.max(res, axes=None, out=out)
assert out.asnumpyarray() < high
def test_autouni_gen(self):
autouni = AutoUniformValGen(backend=self.be, relu=True)
assert autouni.relu is True
assert str(autouni) == ("AutoUniformValGen utilizing CPU backend\n\t"
"low: nan, high: nan")
res = autouni.generate([3, 3])
assert res.shape == (3, 3)
out = self.be.empty((1, 1))
self.be.min(res, axes=None, out=out)
expected_val = math.sqrt(2) * (1.0 / math.sqrt(3))
assert out.asnumpyarray() >= - expected_val
self.be.max(res, axes=None, out=out)
assert out.asnumpyarray() < expected_val
def test_gaussian_gen(self):
loc = 5
scale = 2.0
gauss = GaussianValGen(backend=self.be, loc=loc, scale=scale)
assert str(gauss) == ("GaussianValGen utilizing CPU backend\n\t"
"loc: {}, scale: {}".format(loc, scale))
res = gauss.generate([5, 10])
assert res.shape == (5, 10)
# TODO: test distribution of vals to ensure ~gaussian dist
def test_normal_gen(self):
loc = -2.5
scale = 3.0
gauss = NormalValGen(backend=self.be, loc=loc, scale=scale)
assert str(gauss) == ("GaussianValGen utilizing CPU backend\n\t"
"loc: {}, scale: {}".format(loc, scale))
res = gauss.generate([9, 3])
assert res.shape == (9, 3)
# TODO: test distribution of vals to ensure ~gaussian dist
def test_sparseeig_gen(self):
sparseness = 10
eigenvalue = 3.1
eig = SparseEigenValGen(backend=self.be, sparseness=sparseness,
eigenvalue=eigenvalue)
assert str(eig) == ("SparseEigenValGen utilizing CPU backend\n\t"
"sparseness: {}, eigenvalue: "
"{}".format(sparseness, eigenvalue))
res = eig.generate([20, 20])
assert res.shape == (20, 20)
# TODO: test distribution of vals
def test_nodenorm_gen(self):
scale = 3.0
nodenorm = NodeNormalizedValGen(backend=self.be, scale=scale)
assert str(nodenorm) == ("NodeNormalizedValGen utilizing CPU backend"
"\n\tscale: {}".format(scale))
res = nodenorm.generate([8, 9])
assert res.shape == (8, 9)
out = self.be.empty((1, 1))
self.be.min(res, axes=None, out=out)
expected_val = scale * math.sqrt(6) / math.sqrt(8 + 9.)
assert out.asnumpyarray() >= - expected_val
self.be.max(res, axes=None, out=out)
assert out.asnumpyarray() < expected_val
def test_identity_gen(self):
scale = 3.0
target = scale * np.eye(9, 3)
identity = IdentityValGen(backend=self.be, scale=scale)
params = identity.generate([9, 3])
assert_tensor_equal(params, target)
|
from django.urls import path, include
from rest_framework_nested import routers
from .views import QuizViewSet
quiz_router = routers.SimpleRouter()
quiz_router.register('quiz', QuizViewSet, base_name='quiz')
urlpatterns = [
path('', include(quiz_router.urls)),
]
|
print("Python Mathematical Operations")
print("Addition")
a=12
b=13
print( "The addition of 12 and 13 is "+ str(a+b) )
print("Subtraction")
a=10
b=5
print("The subtraction of 10 and 5 is " + str(a-b))
print("Multiplication")
a=10
b=5
print("The multiplication of 10 and 5 is " + str(a*b))
print("Multiplication")
a=10
b=5
print("The multiplication of 10 and 5 is " + str(a*b))
print("Division")
a=13
b=5
print("The division of 10 and 5 is " + str(a/b))
print("Floor Division")
a=13
b=5
print("The whole number of divison of 13 and 5 is " + str(a//b))
print ("Modulas operator")
a=13
b=5
print("The remainder of the divison of 13 and 5 is "+ str(a%b))
|
import pyodbc
cnxn = pyodbc.connect('DRIVER={SQL Server};'
'SERVER=DEVACCESSA-PC\LOGIDEV2016;'
'DATABASE=DATAKS_MC;'
'UID=sa;'
'PWD=Logi2131')
#'Trusted_Connection=yes;')
cursor = cnxn.cursor()
cursor.execute('select top (10) * FROM [DATAKS_MC].[dbo].[POS_TAB]')
for row in cursor:
print(row)
cursor.close()
cnxn.close()
|
#import sys
#input = sys.stdin.readline
def permute(X, Y):
# X is permuted by Y
ret = [0]*len(X)
for i, x in enumerate(X):
ret[i] = Y[x]
return ret
def main():
N, M, D = map(int,input().split())
A = list(map(int,input().split()))
permutation = [i for i in range(N+1)]
# print(permutation)
for a in A:
permutation[a], permutation[a+1] = permutation[a+1], permutation[a]
now = [i for i in range(N+1)]
while D > 0:
if D%2 == 1:
now = permute(now, permutation)
D //= 2
permutation = permute(permutation, permutation)
ANS = [0]*(N+1)
for i, x in enumerate(now):
ANS[x] = i
print("\n".join(map(str, ANS[1:])))
if __name__ == '__main__':
main()
|
from flask import jsonify, redirect, g
from models import db, User
def create_user(**form_args):
if not form_args['name'] or not form_args['email'] or not form_args['password']:
raise Exception('Name, Email, and Password are required fields')
if User.query.filter_by(email=form_args['email']).first() is not None:
raise Exception('There is already a user with this email')
new_user = User(**form_args)
new_user.set_password(form_args['password'])
db.session.add(new_user)
db.session.commit()
g.user = new_user
# Authorize the user
token = new_user.generate_token()
return jsonify(user=new_user.as_dict(), token=token.decode('ascii'), status_code=201)
def update_user(id, **update_values):
user = User.query.get(id)
if user and user.id == g.user.id:
for key, value in update_values.items():
setattr(user, key, value)
db.session.commit()
return jsonify(user.as_dict())
else:
raise Exception('Error updating user at id {}'.format(id))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.