text stringlengths 8 6.05M |
|---|
# Attempt to build a program that models and tracks population genetics
# Time Tracker & Websites
## 3/31 - 1 hour
## 4/2 - 3 hours
## https://stackoverflow.com/questions/11487049/python-list-of-lists
## https://stackoverflow.com/questions/18265935/python-create-list-
## with-numbers-between-2-values/36002096
## 4/14 3 hours - Decent breakthrough with long list and skipping. I had tried lists for each trait before.
## https://www.learnpython.org/en/Basic_String_Operations
## 4/15 1 hour - paper
## 4/22 1 hour - paper
## 4/24 2 hours - paper
## 4/26 4 hours - Big breakthrough with breeding.
### The import random here allows the program to bring in a random choice tool
import random
### Sets the number of alleles for each gene(currently 2) and meiosis range (2 options).
alleles = [0,1]
meiosis = [0,1]
### Asks the user to determine the population size, and records that value.
print("How many Rebops in your population? (Enter value >1)")
pgen_pop = int((input("")))
### Asks the user to determine the number of generations to be run, and records that value.
print("How many breeding cycles would you like to run?")
cycles = int((input("")))
### Creates rebop_geno list, which will be the full library of genomes for the breeding run.
### Also creates pgen_alleles value (total alleles in the pop) Number is 2*number of genes being tracked.
rebop_geno = []
pgen_alleles = 10*pgen_pop
### Fills the library of genomes randomly. This could be tweaked (all heterozygous, for example).
for i in range (pgen_alleles):
rebop_geno.insert(0,random.choice(alleles))
### Prints out each individual's genome.
for i in range (pgen_pop):
print ("P Rebop " + str(i+1)+ " genome: " + str(rebop_geno [10*i:10*(i+1):1]))
for x in range (cycles):
### Mate selection and gamete formation. mate = selects the start of an individual genome from the
### library, the rebop_geno append commands build the new generation allele by allele from the selected
### parent (i) and the mate by going to each locus and adding either 0 or 1. The mate is then reset.
### The "x*pgen_alleles" is there to adjust based on which generation is breeding.
for i in range (pgen_pop):
mate = (10*random.randrange(pgen_pop)+x*pgen_alleles)
if x <1:
print ("P Rebop " + str(i+1) + " mates with " "P Rebop " + str(-x*pgen_pop + 1+(int(mate/10))))
else:
print ("f" + str(x) + " Rebop " + str(i+1) + " mates with " "f" + str(x) +" Rebop " + str(-x*pgen_pop + 1+(int(mate/10))))
rebop_geno.append (rebop_geno [x*pgen_alleles+((2*5*i)+0+random.choice(meiosis))])
rebop_geno.append (rebop_geno [((mate)+0+random.choice(meiosis))])
rebop_geno.append (rebop_geno [x*pgen_alleles+((2*5*i)+2+random.choice(meiosis))])
rebop_geno.append (rebop_geno [((mate)+2+random.choice(meiosis))])
rebop_geno.append (rebop_geno [x*pgen_alleles+((2*5*i)+4+random.choice(meiosis))])
rebop_geno.append (rebop_geno [((mate)+4+random.choice(meiosis))])
rebop_geno.append (rebop_geno [x*pgen_alleles+((2*5*i)+6+random.choice(meiosis))])
rebop_geno.append (rebop_geno [((mate)+6+random.choice(meiosis))])
rebop_geno.append (rebop_geno [x*pgen_alleles+((2*5*i)+8+random.choice(meiosis))])
rebop_geno.append (rebop_geno [((mate)+8+random.choice(meiosis))])
mate = (10*random.randrange(pgen_pop)+x*pgen_alleles)
for i in range (pgen_pop):
print ("f" + str(x+1) + " Rebop " + str(i+1)+ " genome: " + str(rebop_geno [x*pgen_alleles+10*i:x*pgen_alleles+10*(i+1):1]))
### Attempt to keep program results readable in window by delaying the actual end of the program using an input request.
print("There are the results! Hit enter to end program.")
last_words = (input(""))
|
from flask import Flask, request, jsonify
import db_handler as dh
app = Flask(__name__)
@app.route("/")
def hello():
return "Gym backend"
@app.route("/drop_all")
def drop_all():
dh.drop_all()
return "dropped all"
@app.route("/<category>/add", methods=['POST'])
def add_entity(category):
print(request.__dict__)
print(request.get_json())
#file_=request.files[0]
#file_.save('temp')
result = dh.add_to_db(category, request)
return result
@app.route("/<category>", methods=['GET'])
def get_entities(category):
result=dh.get_entities(category)
return jsonify(result)
@app.route("/<category>/<ide>", methods=['GET'])
def get_entity(category, ide):
result=dh.get_entity(category, ide)
return jsonify(result)
@app.route("/<category>/<ide>/edit", methods=['POST'])
def edit_entity(category, ide):
result=dh.edit_entity(category, ide, request)
return jsonify(result)
@app.route("/<category>/<ide>/history", methods=['GET'])
def get_history(category, ide):
print('FOUND')
result=dh.get_history(category, ide)
return jsonify(result)
if __name__ == "__main__":
app.run(host='0.0.0.0')
|
import pandas as pd
import numpy as np
import sys
from scipy.spatial.distance import jensenshannon
from scipy.cluster.hierarchy import dendrogram
from matplotlib import pyplot as plt
def average_period(period, ts):
return ts.loc[:, period[0]:period[-1]].mean(axis=1)
def distance_at_place_average(periods, place, ts):
return jensenshannon(average_period(periods[place],ts), average_period(periods[place+1],ts))
def distance_at_place_pairwise(periods, place, ts):
return np.mean([jensenshannon(ts[y1], ts[y2]) for y1 in periods[place] for y2 in periods[place+1]])
def distance_at_place_start_end_(periods, place, ts):
# I don't like the result
# beginning is still similar to end
return jensenshannon(ts[periods[place][0]], ts[periods[place][-1]])
def distance_at_place(periods, place, ts):
distance_at_place_pairwise(periods, place, ts)
def vizualize(cluster_table, periods):
figure = plt.gcf()
figure.set_size_inches(16, 12)
plt.title('Time-aware Clustering Dendrogram')
plt.xlabel('Date')
plt.ylabel('distance')
dendrogram(cluster_table, leaf_font_size=8, leaf_rotation=45, labels=[p[0] for p in periods])
plt.tight_layout()
plt.savefig("dendrogram.png")
def time_aware_clustering(periods, ts):
cluster_number = {p:i for i,p in enumerate(periods)}
cluster_counter = len(periods)
dists = [distance_at_place(periods, i, ts) for i in range(len(periods)-1)]
cluster_table = None
while len(periods) > 1:
merge_left_index = np.argmin(np.array(dists))
new_period = periods[merge_left_index]+periods[merge_left_index+1]
new_cluster = [cluster_number[periods[merge_left_index]],
cluster_number[periods[merge_left_index+1]],
dists[merge_left_index],
len(new_period)]
cluster_number[new_period] = cluster_counter
cluster_counter += 1
if cluster_table is None:
cluster_table = np.array([new_cluster])
else:
cluster_table = np.vstack([cluster_table, np.array(new_cluster)])
periods = periods[:merge_left_index] + [new_period] + periods[merge_left_index+2:]
if len(periods) == 1:
break
new_dists = []
for i,d in enumerate(dists):
if i == merge_left_index+1:
continue
elif i in [merge_left_index-1, merge_left_index]:
if i < len(periods)-1:
new_dists.append(distance_at_place(periods, i, ts))
else:
new_dists.append(d)
dists = new_dists
print("FINAL: ", cluster_table)
return cluster_table
if __name__ == "__main__":
inp_file = sys.argv[1]
ts = pd.read_csv(inp_file, sep='\t')
topic_words = pd.Series(ts.topic_words.values, index=ts.topic_id).to_dict()
ts = ts.pivot(index='topic_id', columns='year', values='topic_weight')
periods = [(y,) for y in list(ts.columns)]
cluster_table = time_aware_clustering(periods, ts)
print(cluster_table)
vizualize(cluster_table, periods)
|
class JustCreator:
def checkType(ttype):
pass
def create(con, dni, start, days):
pass
|
__author__ = 'apple'
import os
from osgeo import ogr
shapefile = r"ShapefileTest/GIS_CensusTract_poly.shp" # Path Your Shapefile
driver = ogr.GetDriverByName("ESRI Shapefile")
dataSource = driver.Open(shapefile, 0)
layer = dataSource.GetLayer()
for feature in layer:
print feature.GetField("SOURCE_3") # Get field SOURCE_3 all feature
|
sexo = ''
aux = 0
while(aux == 0):
sexo = str(input('Digite o sexo [m/f]: ')).lower()
if sexo == 'm' or sexo == 'f':
aux = 1
else:
print('Digitação incorreta, tente novamente.')
print('Voce escolher {}.'.format(sexo)) |
# coding: utf-8
# # VQE Screening
# In[1]:
scaffold_codeXX = """
const double alpha0 = 3.14159265359;
module initialRotations(qbit reg[2]) {
Rx(reg[0], alpha0);
CNOT(reg[0], reg[1]);
H(reg[0]);
}
module entangler(qbit reg[2]) {
H(reg[0]);
CNOT(reg[0], reg[1]);
H(reg[1]);
CNOT(reg[1], reg[0]);
}
module prepareAnsatz(qbit reg[2]) {
initialRotations(reg);
entangler(reg);
}
module measure(qbit reg[2], cbit result[2]) {
result[0] = MeasX(reg[0]);
result[1] = MeasX(reg[1]);
}
int main() {
qbit reg[2];
cbit result[2];
prepareAnsatz(reg);
measure(reg, result);
return 0;
}
"""
# In[2]:
scaffold_codeYY = """
const double alpha0 = 3.14159265359;
module initialRotations(qbit reg[2]) {
Rx(reg[0], alpha0);
CNOT(reg[0], reg[1]);
H(reg[0]);
}
module entangler(qbit reg[2]) {
H(reg[0]);
CNOT(reg[0], reg[1]);
H(reg[1]);
CNOT(reg[1], reg[0]);
}
module prepareAnsatz(qbit reg[2]) {
initialRotations(reg);
entangler(reg);
}
module measure(qbit reg[2], cbit result[2]) {
Rx(reg[0], 1.57079632679);
result[0] = MeasZ(reg[0]);
Rx(reg[1], 1.57079632679);
result[1] = MeasZ(reg[1]);
}
int main() {
qbit reg[2];
cbit result[2];
prepareAnsatz(reg);
measure(reg, result);
return 0;
}
"""
# In[3]:
scaffold_codeZZ = """
const double alpha0 = 3.14159265359;
module initialRotations(qbit reg[2]) {
Rx(reg[0], alpha0);
CNOT(reg[0], reg[1]);
H(reg[0]);
}
module entangler(qbit reg[2]) {
H(reg[0]);
CNOT(reg[0], reg[1]);
H(reg[1]);
CNOT(reg[1], reg[0]);
}
module prepareAnsatz(qbit reg[2]) {
initialRotations(reg);
entangler(reg);
}
module measure(qbit reg[2], cbit result[2]) {
result[0] = MeasZ(reg[0]);
result[1] = MeasZ(reg[1]);
}
int main() {
qbit reg[2];
cbit result[2];
prepareAnsatz(reg);
measure(reg, result);
return 0;
}
"""
# ***
# # Executing it!
# In[4]:
# Compile the Scaffold to OpenQASM
from scaffcc_interface import ScaffCC
openqasmXX = ScaffCC(scaffold_codeXX).get_openqasm()
openqasmYY = ScaffCC(scaffold_codeYY).get_openqasm()
openqasmZZ = ScaffCC(scaffold_codeZZ).get_openqasm()
print(openqasmXX)
print(openqasmYY)
print(openqasmZZ)
# ### Execute on a Simulator
# In[5]:
from qiskit import Aer,QuantumCircuit, execute
Aer.backends()
# In[6]:
simulator = Aer.get_backend('qasm_simulator')
vqe_circXX = QuantumCircuit.from_qasm_str(openqasmXX)
vqe_circYY = QuantumCircuit.from_qasm_str(openqasmYY)
vqe_circZZ = QuantumCircuit.from_qasm_str(openqasmZZ)
num_shots = 100000
sim_resultXX = execute(vqe_circXX, simulator, shots=num_shots).result()
sim_resultYY = execute(vqe_circYY, simulator, shots=num_shots).result()
sim_resultZZ = execute(vqe_circZZ, simulator, shots=num_shots).result()
countsXX = sim_resultXX.get_counts()
countsYY = sim_resultYY.get_counts()
countsZZ = sim_resultZZ.get_counts()
expected_valueXX = (countsXX.get('00', 0) - countsXX.get('01', 0) - countsXX.get('10', 0) + countsXX.get('11', 0)) / num_shots
expected_valueYY = (countsYY.get('00', 0) - countsYY.get('01', 0) - countsYY.get('10', 0) + countsYY.get('11', 0)) / num_shots
expected_valueZZ = (countsZZ.get('00', 0) - countsZZ.get('01', 0) - countsZZ.get('10', 0) + countsZZ.get('11', 0)) / num_shots
expected_value = 0.5 - 0.5 * expected_valueXX + 0.5 * expected_valueZZ - 0.5 * expected_valueYY
print('The lowest eigenvalue is the expected value, which is : %s' % expected_value)
# ***
# # Circuit Visualization
# In[7]:
from qiskit.tools.visualization import circuit_drawer
circuit_drawer(vqe_circXX, scale=.4)
# In[8]:
from qiskit.tools.visualization import circuit_drawer
circuit_drawer(vqe_circYY, scale=.4)
# In[9]:
from qiskit.tools.visualization import circuit_drawer
circuit_drawer(vqe_circZZ, scale=.4)
|
# demo02_dataFrame.py DataFrame示例
import numpy as np
import pandas as pd
df = pd.DataFrame()
print(df)
# 通过列表创建DataFrame
ary = np.array([1,2,3,4,5])
df = pd.DataFrame(ary)
print(df, df.shape)
data = [ ['Alex',10],['Bob',12],('Clarke',13) ]
df = pd.DataFrame(data, index=['s1', 's2', 's3'],
columns=['Name', 'Age'])
print(df)
# 通过字典创建DataFrame
data = {'Name':['Tom', 'Jack', 'Steve', 'Ricky'],
'Age':[28,34,29,42]}
df = pd.DataFrame(data, index=['s1','s2','s3','s4'])
print(df)
# 细节
data = {'Name':pd.Series(['Tom','Jack','Steve','Ricky']),
'Age':pd.Series([28,34,29], index=[1,2,3]) }
df = pd.DataFrame(data)
print(df)
print(df.index)
print(df.columns)
print(df.head(2)) # 头两行
print(df.tail(2)) # 后两行
# 列的访问
print('-' * 40)
d = {'one' : pd.Series([1, 2, 3], index=['a', 'b', 'c']),
'two' : pd.Series([1, 2, 3, 4], index=['a', 'b', 'c', 'd']),
'three' : pd.Series([1, 3, 4], index=['a', 'c', 'd'])}
df = pd.DataFrame(d)
print(df)
print(df[df.columns[:-1]])
# 列的添加
print(df)
df['four'] = pd.Series([15,43,12],index=['a','d','c'])
print(df)
# 列的删除
print(df.drop(['three', 'four'], axis=1))
# 行的访问
print(df)
print(df.loc['b'])
print(df.loc[['b','c']])
print(df.loc['b':'d'])
print(df.iloc[1])
print(df.iloc[[1, 2]])
print(df.iloc[1:])
# 行的添加
print(df)
# 向df中添加一行
newdf = pd.DataFrame([[10, 20, 30, 40]],columns=df.columns)
df = df.append(newdf)
print(df)
# 行的删除 drop axis=0
print(df.drop([0, 'd'], axis=0))
print('-' * 40)
# 读取上午的电信用户数据,
# 把pack_type, extra_flow, loss存入dataFrame, 获取前5行数据
with open('CustomerSurvival.csv', 'r') as f:
data = []
for i, line in enumerate(f.readlines()):
row = tuple(line[:-1].split(','))
data.append(row)
# 转成ndarray
data = np.array(data, dtype={
'names':['index','pack_type','extra_time',
'extra_flow','pack_change','contract',
'asso_pur','group_user','use_month','loss'],
'formats':['i4','i4','f8','f8','i4',
'i4','i4','i4','i4','i4']
})
data = pd.DataFrame(data).head(10)
print(data)
# 瘦身: 只需要pack_type, extra_time, loss
sub_data = data[['pack_type', 'extra_time', 'loss']]
# 追加一列:extra_flow
sub_data['extra_flow'] = data['extra_flow']
# 选择所有未流失的数据行
sub_data = sub_data[sub_data['loss']!=1]
print(sub_data)
sub_data['extra_flow'][9] = 0
print(sub_data)
# 复合索引
print('-' * 40)
# 生成一组(6,3)的随机数。要求服从期望=85,标准差=3的正态分布。
data = np.floor(np.random.normal(85, 3, (6,3)))
df = pd.DataFrame(data)
print(df)
# 设置行级标签索引为复合索引
index = [('A', 'M'), ('A', 'F'), ('B', 'M'),
('B', 'F'), ('C', 'M'), ('C', 'F')]
df.index = pd.MultiIndex.from_tuples(index)
print(df)
columns = [('score','math'), ('score','reading'),
('score', 'writing')]
df.columns = pd.MultiIndex.from_tuples(columns)
print(df)
# C班男生的信息:
print(df.loc['C','M'])
print(df.loc[['A', 'C']])
# 访问复合索引列
print(df['score','writing']) |
import numpy as np
import torch
from torch import nn
from torch.nn import functional as F
from torch.autograd import Variable
class SELayer(nn.Module):
def __init__(self, channel, reduction=16):
super(SELayer, self).__init__()
self.fc = nn.Sequential(
nn.Linear(channel, channel // reduction, bias=False),
nn.ReLU(inplace=True),
nn.Linear(channel // reduction, channel, bias=False),
nn.Sigmoid()
)
def forward(self, x):
y = self.fc(x)
return x * y
class CSE(nn.Module):
def __init__(self, in_ch, r):
super(CSE, self).__init__()
self.linear_1 = nn.Linear(in_ch, in_ch//r)
self.linear_2 = nn.Linear(in_ch//r, in_ch)
def forward(self, x):
input_x = x
x = x.view(*(x.shape[:-2]),-1).mean(-1)
x = F.relu(self.linear_1(x), inplace=True)
x = self.linear_2(x)
x = x.unsqueeze(-1).unsqueeze(-1)
x = F.sigmoid(x)
x = input_x * x
return x
class SSE(nn.Module):
def __init__(self, in_ch):
super(SSE, self).__init__()
self.conv = nn.Conv2d(in_ch, in_ch, kernel_size=1, stride=1)
def forward(self, x):
input_x = x
x = self.conv(x)
x = F.sigmoid(x)
x = input_x * x
return x
class SCSE(nn.Module):
def __init__(self, in_ch, r=8):
super(SCSE, self).__init__()
self.cSE = CSE(in_ch, r)
self.sSE = SSE(in_ch)
def forward(self, x):
cSE = self.cSE(x)
sSE = self.sSE(x)
x = cSE + sSE
return x
class SEBlock(nn.Module):
def __init__(self, in_ch, r=8):
super(SEBlock, self).__init__()
self.linear_1 = nn.Linear(in_ch, in_ch//r)
self.linear_2 = nn.Linear(in_ch//r, in_ch)
def forward(self, x):
input_x = x
x = F.relu(self.linear_1(x), inplace=True)
x = self.linear_2(x)
x = F.sigmoid(x)
x = input_x * x
return x
def get_sinusoid_encoding_table(n_position, d_hid, padding_idx=None):
''' Sinusoid position encoding table '''
def cal_angle(position, hid_idx):
return position / np.power(10000, 2 * (hid_idx // 2) / d_hid)
def get_posi_angle_vec(position):
return [cal_angle(position, hid_j) for hid_j in range(d_hid)]
sinusoid_table = np.array([get_posi_angle_vec(pos_i) for pos_i in range(n_position)])
sinusoid_table[:, 0::2] = np.sin(sinusoid_table[:, 0::2]) # dim 2i
sinusoid_table[:, 1::2] = np.cos(sinusoid_table[:, 1::2]) # dim 2i+1
if padding_idx is not None:
# zero vector for padding dimension
sinusoid_table[padding_idx] = 0.
return sinusoid_table
def get_sinusoid_encoding_table_2d(H,W, d_hid):
''' Sinusoid position encoding table '''
n_position=H*W
sinusoid_table=get_sinusoid_encoding_table(n_position,d_hid)
sinusoid_table=sinusoid_table.reshape(H,W,d_hid)
return sinusoid_table
class CBAM_Module(nn.Module):
def __init__(self, channels, reduction=4,attention_kernel_size=3,position_encode=False):
super(CBAM_Module, self).__init__()
self.position_encode=position_encode
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.max_pool = nn.AdaptiveMaxPool2d(1)
self.fc1 = nn.Conv2d(channels, channels // reduction, kernel_size=1,
padding=0)
self.relu = nn.ReLU(inplace=True)
self.fc2 = nn.Conv2d(channels // reduction, channels, kernel_size=1,
padding=0)
self.sigmoid_channel = nn.Sigmoid()
if self.position_encode:
k=3
else:
k=2
self.conv_after_concat = nn.Conv2d(k, 1,
kernel_size = attention_kernel_size,
stride=1,
padding = attention_kernel_size//2)
self.sigmoid_spatial = nn.Sigmoid()
self.position_encoded=None
def forward(self, x):
# Channel attention module
module_input = x
avg = self.avg_pool(x)
mx = self.max_pool(x)
avg = self.fc1(avg)
mx = self.fc1(mx)
avg = self.relu(avg)
mx = self.relu(mx)
avg = self.fc2(avg)
mx = self.fc2(mx)
x = avg + mx
x = self.sigmoid_channel(x)
# Spatial attention module
x = module_input * x
module_input = x
b, c, h, w = x.size()
if self.position_encode:
if self.position_encoded is None:
pos_enc=get_sinusoid_encoding_table(h,w)
pos_enc=Variable(torch.FloatTensor(pos_enc),requires_grad=False)
if x.is_cuda:
pos_enc=pos_enc.cuda()
self.position_encoded=pos_enc
avg = torch.mean(x, 1, True)
mx, _ = torch.max(x, 1, True)
if self.position_encode:
pos_enc=self.position_encoded
pos_enc = pos_enc.view(1, 1, h, w).repeat(b, 1, 1, 1)
x = torch.cat((avg, mx,pos_enc), 1)
else:
x = torch.cat((avg, mx), 1)
x = self.conv_after_concat(x)
x = self.sigmoid_spatial(x)
x = module_input * x
return x
|
import csv
import json
# Constants to make everything easier
CSV_PATH = './GEMIDDELDE_NEERSLAG_2016.csv'
f = open(CSV_PATH, 'rt')
# Reads the file the same way that you did
csv_file = csv.reader(f)
# Created a list and adds the rows to the list
jsonlist = []
for row in csv_file:
row = row[0].split(";")
# Save header row.
rowdict = {"firstkey": row[0], "secondkey": row[1]}
print rowdict
jsonlist.append(rowdict)
print jsonlist
# Writes the json output to the file
with open('jsonfile.json', 'w') as outfile:
json.dump(jsonlist, outfile) |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import torch
from config import get_args
from train_eval import train, evaluate
from Utils.utils import get_device, set_seed
from Utils.data_utils import load_data, random_dataloader, sequential_dataloader
from transformers import (
WEIGHTS_NAME, AdamW,
AlbertConfig, AlbertForSequenceClassification, AlbertTokenizer,
BertConfig, BertForSequenceClassification, BertTokenizer,
RobertaConfig, RobertaForSequenceClassification, RobertaTokenizer,
XLNetConfig, XLNetForSequenceClassification, XLNetTokenizer,
get_linear_schedule_with_warmup,
)
MODEL_CLASSES = {
"bert": (BertConfig, BertForSequenceClassification, BertTokenizer),
"xlnet": (XLNetConfig, XLNetForSequenceClassification, XLNetTokenizer),
"roberta": (RobertaConfig, RobertaForSequenceClassification, RobertaTokenizer),
"albert": (AlbertConfig, AlbertForSequenceClassification, AlbertTokenizer),
}
def main(args):
if (os.path.exists(args.output_dir) and os.listdir(args.output_dir)
and args.do_train):
print("输出目录 ({}) 已经存在且不为空. ".format(args.output_dir))
print("你想覆盖掉该目录吗?type y or n")
if input() == 'n':
return
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
# gpu ready
gpu_ids = [int(device_id) for device_id in args.gpu_ids.split()]
args.device, args.n_gpu = get_device(gpu_ids[0])
# PTM ready
config_class, model_class, tokenizer_class = MODEL_CLASSES[args.model_type]
config = config_class.from_pretrained(
args.config_file,
num_labels = 2,
cache_dir=None
)
tokenizer = tokenizer_class.from_pretrained(
args.vocab_file,
do_lower_case=args.do_lower_case,
cache_dir=None
)
# train and eval get the checkpoint
if args.do_train:
train_dataset = load_data(args, tokenizer, 'train')
train_dataloader = random_dataloader(train_dataset, args.train_batch_size)
dev_dataset = load_data(args, tokenizer, 'dev')
dev_dataloader = sequential_dataloader(dev_dataset, args.dev_batch_size)
# 模型准备
model = model_class.from_pretrained(
args.model_file,
from_tf=False,
config=config,
cache_dir=None
)
model.to(args.device)
if args.n_gpu > 1:
model = torch.nn.DataParallel(model, device_ids=gpu_ids)
# optimizer ready
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],
"weight_decay": args.weight_decay,
},
{"params": [p for n, p in model.named_parameters() if any(
nd in n for nd in no_decay)], "weight_decay": 0.0},
]
optimizer = AdamW(optimizer_grouped_parameters,
lr=args.learning_rate, eps=args.adam_epsilon)
t_total = len(train_dataloader) // args.gradient_accumulation_steps * args.num_train_epochs
scheduler = get_linear_schedule_with_warmup(
optimizer, num_warmup_steps=args.warmup_steps, num_training_steps=t_total
)
train(args, train_dataloader, dev_dataloader, model, optimizer, scheduler, tokenizer)
# Predict checkpoint result
tokenizer = tokenizer_class.from_pretrained(
args.output_dir, do_lower_case=args.do_lower_case)
test_dataset = load_data(args, tokenizer, 'test')
test_dataloader = sequential_dataloader(test_dataset, args.test_batch_size)
model = model_class.from_pretrained(args.output_dir)
model.to(args.device)
eval_loss, eval_metric = evaluate(args, model, test_dataloader, do_predict=True)
for key, val in eval_metric.items():
print('the test dataset {} is {}'.format(key, val))
if __name__ == "__main__":
args = get_args()
main(args)
|
import pandas as pd
import matplotlib.pyplot as plt
df=pd.read_csv('E:\csvdhf5xlsxurlallfiles\percent-bachelors-degrees-women-usa.csv')
print(df.head())
print(df.columns)
year=df[['Year']]
architecture=df[['Computer Science']]
print(architecture)
engineering=[['Physical Sciences']]
print(engineering)
plt.plot(year, architecture, color='blue', label='Architecture')
plt.plot(year, engineering, color='green', label='Engineering')
ar_max=architecture.max()
yr_max=year[architecture.max()]
plt.plot('maximum', xy=(ar_max, yr_max), xytext=(yr_max+5, ar_max+5), arrowprops=dict(facecolor='black'))
plt.xlabel('YEAR')
plt.ylabel('Enrolment(%)')
plt.title('Undergraduate Enrolment of women')
plt.show()
|
from bs4 import BeautifulSoup
import requests
url = "http://www.saramin.co.kr/zf_user/jobs/list/job-category?cat_cd=404&panel_type=&search_optional_item=n&search_done=y&panel_count=y"
response = requests.get(url)
html = BeautifulSoup(response.text, 'html.parser')
'''
company_names = html.select('.company_name')
recruit_names = html.select('.recruit_name')
recruit_conditions = html.select('.list_recruit_condition')
for company_name, recruit_name, condition in zip(company_names, recruit_names, recruit_conditions):
print(f'{company_name.text}- {recruit_name.text}')
print(condition.text)
'''
'''
company = html.select('.part_top')
for com in company:
print(f'{com.select_one(".company_name").text}- {com.select_one(".recruit_name").text}')
print(com.select_one('.list_recruit_condition').text)
break
'''
company_list = html.select('ul.product_list li')
for com in company_list:
idx = com.select_one('a')['href'].split('=')[-1]
company_info_url = 'http://www.saramin.co.kr/zf_user/jobs/relay/view-ajax'
company_info_params = { 'rec_idx': idx }
company_response = requests.post(company_info_url, params=company_info_params)
print(company_response)
company_html = BeautifulSoup(company_response.text, 'html.parser')
company_title = company_html.select_one('a.company').text
print(company_title.strip())
break
|
import os
from django.conf import settings
from django.http import HttpResponse
def get_election_fixture(request):
with open(
os.path.join(settings.BASE_DIR, "data/elections.json")
).read() as out:
return HttpResponse(out, status=200, content_type="application/json")
|
"""
learned from others
2nd approach: hashtable + array
- save value: index in hashtable
- when delete, swap the target item and the last item in the array, and remove the last item
- see ./idea_add.png and ./idea_remove.png
Insert Time O(1)
Remove Time O(1)
GetRandom Time O(1)
Space O(n) the unique keys
92 ms, faster than 78.93%
"""
class RandomizedSet(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.ht = {}
self.nums = []
def insert(self, val):
"""
Inserts a value to the set. Returns true if the set did not already contain the specified element.
:type val: int
:rtype: bool
"""
if val in self.ht:
return False
self.ht[val] = len(self.nums)
self.nums.append(val)
return True
def remove(self, val):
"""
Removes a value from the set. Returns true if the set contained the specified element.
:type val: int
:rtype: bool
"""
if val not in self.ht:
return False
idx = self.ht[val]
last = self.nums[-1]
self.ht[last] = idx
self.nums[idx] = last
self.nums.pop()
del self.ht[val]
return True
def getRandom(self):
"""
Get a random element from the set.
:rtype: int
"""
idx = random.randint(0, len(self.nums)-1)
return self.nums[idx] |
# _*_ coding:utf-8 _*_
# choi,judge,player,alien,shoot等都写在这里
# 由于原代码上面几个应用比较乱,改写时要小心且尽量简化
#
import random
import End
room = [[1, 1], [1, 2], [1, 3], [1, 4], [1, 5],
[2, 1], [2, 2], [2, 3], [2, 4], [2, 5],
[3, 1], [3, 2], [3, 4], [3, 5],
[4, 1], [4, 2], [4, 3], [4, 4], [4, 5],
[5, 1], [5, 2], [5, 3], [5, 4], [5, 5]]
green_house = [[2, 3], [3, 2], [3, 4], [4, 3]]
colors = {
'RED': '\033[91m',
'GREEN': '\033[92m',
'END': '\033[0m',
'YELLOW': '\033[33m'
}
class Alien(object):
global room, green_house, colors
def __init__(self, blood):
self.full_blood = blood
self.blood = blood
self.init_inform = '它似乎还没有来过这个房间'
self.track = {}
for i in room:
self.track[str(i)] = self.init_inform
self.cur_pos = self.start_pos()
def start_pos(self):
self.cur_pos = room[random.randint(0, len(room) - 1)]
while self.cur_pos in green_house:
self.cur_pos = room[random.randint(0, len(room) - 1)]
return self.cur_pos
def rand(self, pos):
self.row_line = random.randint(0, 1)
if random.randint(0, 1):
pos[self.row_line] += random.randint(1, 2)
# print '+1'
else:
pos[self.row_line] -= random.randint(1, 2)
# print '-1'
return pos
def move(self):
self.mid_val = self.rand(list(self.cur_pos))
while not Judge().in_map(self.mid_val) or self.mid_val in green_house:
self.mid_val = list(self.cur_pos)
self.mid_val = self.rand(self.mid_val)
self.cur_pos = self.mid_val
#print '异形位置:',self.cur_pos#;print player.cam_pos # 这行测试用的
self.track[str(self.cur_pos)] = '它似乎来过这个房间'
if self.cur_pos in player.cam_pos:
print ''
print '{0[RED]}!!!!你安装的监视仪发出警报: {1}{0[END]}'.format(colors, self.cur_pos)
else:
print ''
print colors['YELLOW'] + '你听见了一些动静,它在移动,或许留下了一些痕迹' + colors['END']
End.End().dead(player.cur_pos, self.cur_pos)
class Judge(object):
global room
def direction(self, statement):
self.direc = raw_input('%s\n>' % statement)
if self.direc != 'up' and \
self.direc != 'down' and \
self.direc != 'left' and \
self.direc != 'right':
print colors['RED'] + "我特么不知道你想干啥!" + colors['END']
self.direction(statement)
else:
return self.direc
def in_map(self, pos):
if pos in room:
return True
else:
return False
class Player(object):
global colors
def __init__(self, start_pos, cam):
self.cur_pos = start_pos
self.cam = cam
self.cam_pos = []
def change(self, pos, direc):
if direc == 'down':
pos[0] += 1
elif direc == 'up':
pos[0] -= 1
elif direc == 'left':
pos[1] -= 1
elif direc == 'right':
pos[1] += 1
return pos
def move(self):
self.direc = Judge().direction('去哪个方向')
self.may_pos = self.change(list(self.cur_pos), self.direc)
while not Judge().in_map(self.may_pos):
print colors['RED'] + '\n不能通行!\n自毁系统已经禁止了飞船的任何出入\n' + colors['END']
self.direc = Judge().direction('去哪个方向')
self.may_pos = self.change(list(self.cur_pos), self.direc)
self.cur_pos = list(self.may_pos)
def choi1(self):
self.choice = raw_input('move, stay, camera or shoot?\n>')
if self.choice == 'shoot':
self.shoot()
self.choi2()
elif self.choice == 'stay':
print colors['YELLOW'] + '你停留在了原地,它来了' + colors['END']
End.End().dead(self.cur_pos, alien.cur_pos)
elif self.choice == 'move':
self.move()
End.End().dead(self.cur_pos, alien.cur_pos)
elif self.choice == 'camera' and self.cam > 0:
self.cam -= 1
# print self.cam # 测试行
self.cam_pos.append(list(self.cur_pos))
# print self.cam_pos # 测试行
print ''
print colors['YELLOW'] + '你在此处设置了一个检测仪' + colors['END']
self.choi2()
elif self.choice == 'camera' and self.cam <= 0:
print colors['RED'] + '你已经没有检测仪了' + colors['END']
self.choi1()
else:
print colors['RED'] + '你特么要干啥!' + colors['END']
self.choi1()
def choi2(self):
self.choice = raw_input('move or stay?\n>')
if self.choice == 'move':
self.move()
elif self.choice == 'stay':
print colors['YELLOW'] + '\n你停留在了原地,异形不见踪影\n' + colors['END']
else:
print colors['RED'] + '你特么要干啥!' + colors['END']
self.choi2()
def shoot(self):
self.judge = Judge().direction('朝哪个方向开枪')
if self.cur_pos[0] == alien.cur_pos[0] + 1 and self.cur_pos[1] == alien.cur_pos[1] and self.judge == 'up' or \
self.cur_pos[0] == alien.cur_pos[0] - 1 and self.cur_pos[1] == alien.cur_pos[1] and self.judge == 'down' or \
self.cur_pos[1] == alien.cur_pos[1] + 1 and self.cur_pos[0] == alien.cur_pos[0] and self.judge == 'left' or \
self.cur_pos[1] == alien.cur_pos[1] - 1 and self.cur_pos[0] == alien.cur_pos[0] and self.judge == 'right':
alien.blood -= 1
print ''
print colors['YELLOW'] + '命中!' + colors['END']
self.death = ['正中后肢', '正中腹部', '正中前胸']
print colors['YELLOW'] + self.death[random.randint(0, 2)] + colors['END']
print ''
End.End().win(alien.full_blood, alien.blood)
else:
print colors['YELLOW'] + "\n你什么也没有打中,它似乎不在那里\n" + colors['END']
# --------------------------------------------------------------------------------------------------------------
class Engine(object):
global green_house, colors
def __init__(self, time):
self.step = 0
self.time = time
def start(self):
print ''
print 'Earth Time: 5:43 pm\n'
print '狗蛋!醒醒!!\n'
raw_input('按回车键继续>')
print """
深空号飞船遭遇不明生物入侵!%d分钟后生命自毁系统将自动开启!
作为唯一脱离冬眠状态的船长
你需要在自毁开始前找到The Alien并且杀死它
拯救你冬眠中的船员以及你自己的生命
""" % self.time
raw_input('按回车键继续>')
print """
这是深空号各船舱的坐标,绿色标识为安全舱:\n
[1, 1] [1, 2] [1, 3] [1, 4] [1, 5]
[2, 1] [2, 2] {1[GREEN]}{0[0]}{1[END]} [2, 4] [2, 5]
[3, 1] {1[GREEN]}{0[1]}{1[END]} {1[GREEN]}{0[2]}{1[END]} [3, 5]
[4, 1] [4, 2] {1[GREEN]}{0[3]}{1[END]} [4, 4] [4, 5]
[5, 1] [5, 2] [5, 3] [5, 4] [5, 5]
""".format(green_house, colors)
print """
检测系统被异形破坏了,它最后一次出现的位置是:
%s
""" % alien.cur_pos
raw_input('按回车键继续>\n')
print '你现在的位置:%s' % player.cur_pos
print "输入'up', 'down', 'right', 'left'在房间移动"
print "输入'up', 'down', 'right', 'left'向隔壁一间船舱开枪"
print "你需要击中三次才能杀死异形"
print "异形一直在移动,一次移动一个房间,或直线上两个房间,而你不会想和它在同一房间相遇"
print "但你手中有%d架检测仪,异形经过时它们会发出警报" % player.cam
print "时间紧迫,小心行事"
print "Good Luck"
print ''
print 'Time Count: %d mins left\n' % self.time
raw_input('按回车键继续>')
def engine(self):
while self.step < self.time / 10:
self.step += 1
alien.move()
player.choi1()
self.inform()
if self.step in range(6, 30, 5):
print ''
print """
这是深空号各船舱的坐标:\n
[1, 1] [1, 2] [1, 3] [1, 4] [1, 5]
[2, 1] [2, 2] {1[GREEN]}{0[0]}{1[END]} [2, 4] [2, 5]
[3, 1] {1[GREEN]}{0[1]}{1[END]} {1[GREEN]}{0[2]}{1[END]} [3, 5]
[4, 1] [4, 2] {1[GREEN]}{0[3]}{1[END]} [4, 4] [4, 5]
[5, 1] [5, 2] [5, 3] [5, 4] [5, 5]
""".format(green_house, colors)
print 'Time has run out!'
print '自毁系统开启'
End.End().timeout()
def inform(self):
print '\n---------------------------'
print '你的位置:', player.cur_pos
print ''
print 'Time Count: %d mins left' % (self.time - self.step * 10)
print ''
print '异形血量:', alien.blood
print ''
print '剩余检测仪数量:', player.cam
print ''
print '检测仪位置:', str(player.cam_pos)
print ''
print '线索:', alien.track[str(player.cur_pos)]
print '---------------------------'
alien = Alien(5)
player = Player([2, 3], 5)
play = Engine(280)
# 可以写一个跳过start的分支
play.start()
play.engine()
|
from typing import Dict, Any
class NameItem(object):
def __init__(self, data: Dict[str, Any]):
self.__data: Dict[str, Any] = data
@property
def name(self):
return self.__data['name']
@property
def id(self):
return self.__data['id']
@property
def category(self):
return self.__data['category']
|
from .APIError import APIError
class NoSearchResultsError(APIError):
'''Exception thrown when a company cannot be found in the database'''
def __init__(self, search):
self.search = search
self.json_error = self.api_error(self.__repr__())
def __repr__(self):
return "No results for search term: {}".format(self.search)
|
from logger import Logger
logger_object = Logger("log.txt")
logger_object.info("this is FYI")
log = Logger("mylog.txt")
log.critical("this is a major f up")
'''
from singleton import SingletonObject
obj1 = SingletonObject()
obj1.val = "Hello"
print(f"obj1 {obj1}")
print("-----")
obj2 = SingletonObject()
obj2.val = "World"
print(f"obj1 {obj1}")
print(f"obj2 {obj2}")
print(f"{obj1 == obj2}")
''' |
from cassandra.cluster import Cluster
cluster = Cluster()
session = cluster.connect('benchmark')
session.execute("""
CREATE TABLE IF NOT EXISTS testing (uid uuid PRIMARY KEY, name text);
""")
session.execute("""
INSERT INTO testing(uid, name) VALUES(uuid(), 'jonathan');
""")
rows = session.execute("""
SELECT * from testing;
""")
for row in rows:
print(row)
|
test_url = 'https://mercari.com/'
|
# lesson 1: image classification
# export LANG=en_US.utf8
# https://github.com/fastai/course:v3/blob/master/nbs/dl1/lesson1:pets.ipynb
import matplotlib
import matplotlib.pyplot as plt
plt.ion()
from fastai import *
from fastai.vision import *
from fastai.metrics import error_rate
path=untar_data(URLs.PETS)
print("using image data from: {}".format(path))
np.random.seed(2)
bs=140
# %%
path_anno, path_img=path.ls()
fnames=get_image_files(path_img)
pat=r"""/([^/]+)_\d+.jpg$"""
data=ImageDataBunch.from_name_re(path_img, fnames, pat, ds_tfms=get_transforms(), size=224, bs=bs)
data.normalize(imagenet_stats)
print("use look() to see example data")
def look():
data.show_batch(rows=3)
print(data.classes)
learn=cnn_learner(data, models.resnet34, metrics=error_rate)
print("learn extra layers at the end")
fn=pathlib.Path("/home/martin/.fastai/data/oxford-iiit-pet/images/models/save-1.pth")
if ( fn.is_file() ):
learn.load(fn.stem)
else:
learn.fit_one_cycle(4)
learn.save(fn.stem)
# %%
interp=ClassificationInterpretation.from_learner(learn)
losses, idxs=interp.top_losses()
interp.plot_top_losses(9)
interp.plot_confusion_matrix()
print(interp.most_confused(min_val=2))
# %%
print("learn parameters of the whole model")
fn2=pathlib.Path("/home/martin/.fastai/data/oxford-iiit-pet/images/models/save-2.pth")
if ( fn2.is_file() ):
learn.load(fn2.stem)
else:
learn.lr_find()
learn.recorder.plot()
learn.unfreeze()
learn.fit_one_cycle(2, max_lr=slice((1.00e-6), (1.00e-4)))
learn.save(fn2.stem) |
import z
import math
import readchar
import csv
import buy
import os
from sortedcontainers import SortedSet
import glob
import yfinance as yf
from pandas_datareader import data as pdr
year = "2020"
yf.pdr_override()
def getDataFromYahoo(astock, cdate):
df = None
try:
print("dl astock: {}".format( astock))
df = pdr.get_data_yahoo([astock], start=cdate)
except Exception as e:
z.trace(e)
exit()
try:
df = pdr.get_data_yahoo([astock], start=cdate)
except Exception as e:
z.trace(e)
return None
return df
def setlistofstocks():
path = z.getPath("split/*/*{}.csv".format(year))
files = glob.glob(path)
stocks = [ os.path.splitext(os.path.basename(entry))[0].replace("_{}".format(year), "") for entry in files ]
# with open("ooops", "r") as f:
# lines = f.readlines()
# for aline in lines:
# astock = aline.split("_")
# stocks.append(astock[0])
#
# z.setp(stocks, "listofstocks")
etfs = z.getEtfList()
listofs = list()
for astock in stocks:
if astock in etfs:
continue;
listofs.append(astock)
z.setp(listofs, "listofs")
problems = set()
def updateStocks():
global problems
import datetime
stocks = z.getp("listofstocks")
already_updated = 0
try:
now = datetime.datetime.now()
consecutive_misses = 0
cdate_missing = list()
current_cday = None
added = False
for astock in stocks:
apath = z.getPath("split/{}/{}_{}.csv".format(astock[0], astock, year))
try:
csvdate = datetime.datetime.fromtimestamp(os.path.getmtime(apath))
csvday = csvdate.day
csvmonth = csvdate.month
ttoday = datetime.date.today().day
tmonth = datetime.date.today().month
if csvday >= ttoday and tmonth == csvmonth:
consecutive_misses = 0
already_updated += 1
continue
# readFile = open(apath)
# lines = readFile.readlines()
# readFile.close()
# if lines[-1].split(",")[0] == lines[-2].split(",")[0]:
# w = open(apath,'w')
# w.writelines([item for item in lines[:-1]])
# w.close()
except:
continue
for row in csv.DictReader(open(apath)):
pass
try:
date = row['Date']
cclose = row['Adj Close']
except:
continue
print("date: {}".format( date))
df = getDataFromYahoo(astock, date)
if df is None:
print("problem downloading: {}".format( astock))
consecutive_misses += 1
if consecutive_misses > 5:
problems.add(astock)
print("problems : {}".format( problems ))
z.setp(problems, "problems")
exit()
continue
consecutive_misses = 0
with open(apath, "a") as f:
first = True
for idx in df.index:
if first:
first = False
continue
cdate = str(idx.to_pydatetime()).split(" ")[0]
if date == cdate:
continue
try:
opend = round(df.at[idx, "Open"],3)
high = round(df.at[idx, "High"],3)
low = round(df.at[idx, "Low"],3)
closed = round(df.at[idx, "Close"],3)
adj = round(df.at[idx, "Adj Close"],3)
vol = df.at[idx, "Volume"]
except:
opend = round(df.at[idx, "Open"][0],3)
high = round(df.at[idx, "High"][0],3)
low = round(df.at[idx, "Low"][0],3)
closed = round(df.at[idx, "Close"][0],3)
adj = round(df.at[idx, "Adj Close"][0],3)
vol = df.at[idx, "Volume"][0]
try:
chg = round(adj/cclose,3)
except:
chg = 1
if not math.isnan(opend):
cclose = adj
added = True
f.write("{},{},{},{},{},{},{},{}\n".format(cdate, opend, high, low, closed, adj, vol, chg))
if not added:
problems.add(astock)
print ("problem with {}".format(astock))
except Exception as e:
print ("problem with gbuy")
z.trace(e)
exit()
print("already_updated : {}".format( already_updated ))
def saveQuick():
portFolioValue= z.getp("ports")
quick = z.getp("savePs")
quick = [ stock[1] for stock in quick ]
quick += list(portFolioValue.keys())
orders = z.getp("orders")
quick += list(orders.keys())
quick += z.getp("top95")
quick = list(set(quick))
try:
quick.remove("TMUSR")
except:
pass
print ("Here's quick")
z.setp(quick, "quick", True)
if __name__ == '__main__':
import args
args.args.bta = True
try:
if not args.args.noupdate:
updateStocks()
buy.updateDates()
import current
print ("current {} ".format(len(stocks)))
current.procs(stocks)
#
buy.savePs("qq")
print ("prob up 1 year")
import prob_up_1_year
prob_up_1_year.procs(stocks)
import avgs
avgs.procs(stocks)
if not args.args.quick:
buy.savePs()
saveQuick()
except Exception as e:
print ("problem with gbuy")
z.trace(e)
exit()
if problems:
print("delete problems: {}".format( problems))
key = readchar.readkey()
if key == "y":
import gained_discount
gained_discount.batchdelete(problems)
|
import numpy as np
from hilbertcurve.hilbertcurve import HilbertCurve
import cloudmetrics
def test_hilbert_curve():
"""
Test on Hilbert curve (should have fracDim=2)
"""
mask = np.zeros((512, 512))
p_hil = 8
n_hil = 2
dist = 2 ** (p_hil * n_hil)
hilbert_curve = HilbertCurve(p_hil, n_hil)
coords = np.zeros((dist, n_hil))
for i in range(dist):
coords[i, :] = hilbert_curve.point_from_distance(i)
coords = coords.astype(int)
coords *= 2
coords_av = ((coords[1:, :] + coords[:-1, :]) / 2).astype(int)
mask[coords[:, 0], coords[:, 1]] = 1
mask[coords_av[:, 0], coords_av[:, 1]] = 1
fractal_dim = cloudmetrics.mask.fractal_dimension(mask=mask)
np.testing.assert_allclose(fractal_dim, 2.0, atol=1e-4)
def test_random():
"""
Test on randomly scattered points (should have fractal_dim=2)
"""
mask = np.random.rand(512, 512)
mask[mask > 0.5] = 1
mask[mask <= 0.5] = 0
fractal_dim = cloudmetrics.mask.fractal_dimension(mask=mask)
np.testing.assert_allclose(fractal_dim, 2.0, atol=1e-4)
def test_line():
"""
Test on vertical line (should have fractal_dim=1)
"""
mask = np.zeros((512, 512))
mask[:, 250:252] = 1
fractal_dim = cloudmetrics.mask.fractal_dimension(mask=mask)
np.testing.assert_allclose(fractal_dim, 1.0, atol=1e-4)
|
# -*- coding: utf-8 -*-
from django.contrib.auth import login
from django.shortcuts import redirect, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.contrib.auth.decorators import user_passes_test
from django.forms.formsets import formset_factory
from django.forms.models import inlineformset_factory
from annoying.decorators import render_to
from django.contrib.auth.models import User
from Professor.models import Professor, Monitor
from Professor.views.utils import prof_monit_exist
from Materia.Turma.models import Turma
from Avaliacao.models import TemplateAvaliacao, Avaliacao
from Avaliacao.Questao.models import QuestaoDeAvaliacao, Questao ,FiltroQuestao
from Avaliacao.forms import criarTemplateAvaliacaoForm,criarFiltroQuestaoForm
@prof_monit_exist
@login_required
@render_to('avaliacao/criar.html')
def criar_avaliacao(request):
autor = request.user
criado=False
QuestoesFormsSet = formset_factory(criarFiltroQuestaoForm)
formsetInline=inlineformset_factory(TemplateAvaliacao, FiltroQuestao,extra=1)
if request.method == "POST":
form = criarTemplateAvaliacaoForm(request.POST)
if form.is_valid():
novaAvalicao=form.save(commit=False)
novaAvalicao.autor = autor
QuestoesForms=formsetInline(request.POST,instance=novaAvalicao)
if QuestoesForms.is_valid():
criado=True
novaAvalicao.save()
QuestoesForms.save()
else:
QuestoesForms=formsetInline()
form = criarTemplateAvaliacaoForm()
return locals()
|
from sqlalchemy import TEXT, Column, Integer, String
from .database import ENGINE, Base
class ArticleOrm(Base):
__tablename__ = "ArticleOrm"
id = Column("id", Integer, primary_key=True, autoincrement=True, nullable=False)
reply_to = Column("reply_to", Integer, nullable=False)
assumption_id = Column("assumption_id", Integer, nullable=False)
user_id = Column("user_id", Integer, nullable=False)
title = Column("title", String(255), nullable=False)
comment = Column("comment", TEXT(16380), nullable=False)
like_sum = Column("like_sum", Integer, nullable=False)
class AssumptionOrm(Base):
__tablename__ = "AssumptionOrm"
id = Column("id", Integer, primary_key=True, autoincrement=True, nullable=False)
category_id = Column("category_id", Integer, nullable=False)
title = Column("title", String(256), nullable=False)
like_sum = Column("like_sum", Integer, nullable=False)
comments_like_sum = Column("comments_like_sum", Integer, nullable=False)
class CategoryOrm(Base):
__tablename__ = "CategoryOrm"
id = Column("id", Integer, primary_key=True, autoincrement=True, nullable=False)
title = Column("title", String(255), nullable=False)
picture = Column("picture", String(255))
like_sum = Column("like_sum", Integer, nullable=False)
assumptions_like_sum = Column("asuumptions_like_sum", Integer, nullable=False)
class FollowOrm(Base):
__tablename__ = "FollowOrm"
id = Column("id", Integer, primary_key=True, autoincrement=True, nullable=False)
follow_id = Column("follow_id", Integer, nullable=False)
follower_id = Column("follower_id", Integer, nullable=False)
class LikeOrm(Base):
__tablename__ = "LikeOrm"
id = Column("id", Integer, primary_key=True, autoincrement=True, nullable=False)
category_id = Column("category_id", Integer, nullable=False)
title = Column("title", String(255), nullable=False)
user_id = Column("user_id", Integer, nullable=False)
like_sum = Column("email", Integer, nullable=False)
class RequestOrm(Base):
__tablename__ = "RequestOrm"
id = Column("id", Integer, primary_key=True, autoincrement=True, nullable=False)
category_id = Column("category_id", Integer, nullable=False)
title = Column("title", String(256), nullable=False)
user_id = Column("user_id", Integer, nullable=False)
like_sum = Column("like_sum", Integer, nullable=False)
class UserORM(Base):
__tablename__ = "UserOrm"
id = Column("id", Integer, primary_key=True, autoincrement=True, nullable=False)
name = Column("name", String(256), nullable=False)
picture = Column("picture", String(256), nullable=False)
like_sum = Column("like_sum", Integer, nullable=False)
profile = Column("profile", TEXT(16380))
def main() -> None:
Base.metadata.create_all(bind=ENGINE)
return
if __name__ == "__main__":
main()
|
from spack import *
import sys,os
sys.path.append(os.path.join(os.path.dirname(__file__), '../../common'))
from scrampackage import write_scram_toolfile
class CascadeToolfile(Package):
url = 'file://' + os.path.dirname(__file__) + '/../../common/junk.xml'
version('1.0', '68841b7dcbd130afd7d236afe8fd5b949f017615', expand=False)
depends_on('cascade')
def install(self, spec, prefix):
values = {}
values['VER'] = spec['cascade'].version
values['PFX'] = spec['cascade'].prefix
fname = 'cascade.xml'
contents = str("""
<tool name="cascade" version="${VER}">
<lib name="cascade_merged"/>
<client>
<environment name="CASCADE_BASE" default="${PFX}"/>
<environment name="LIBDIR" default="$$CASCADE_BASE/lib"/>
</client>
<runtime name="CASCADE_PDFPATH" value="$$CASCADE_BASE/share"/>
<use name="f77compiler"/>
<use name="cascade_headers"/>
</tool>
""")
write_scram_toolfile(contents, values, fname, prefix)
fname = 'cascade_headers.xml'
contents = str("""
<tool name="cascade_headers" version="${VER}">
<client>
<environment name="CASCADE_HEADERS_BASE" default="${PFX}"/>
<environment name="INCLUDE" default="$$CASCADE_HEADERS_BASE/include"/>
</client>
<runtime name="ROOT_INCLUDE_PATH" value="$$INCLUDE" type="path"/>
<use name="root_cxxdefaults"/>
</tool>
""")
write_scram_toolfile(contents, values, fname, prefix)
|
#print the index value of every element in the list
fr=['bhvaya','komal','khushi','akshuni','divya']
for i in fr:
print('Index value of element({}) is:={}'.format(i,fr.index(i)))
|
# Generated by Django 2.1.5 on 2019-08-05 05:01
from django.db import migrations
class Migration(migrations.Migration):
atomic = False
dependencies = [
('blog', '0065_auto_20190805_0558'),
]
operations = [
migrations.RemoveField(
model_name='webgroup',
name='admin',
),
migrations.RemoveField(
model_name='webgroup',
name='first_member',
),
migrations.RemoveField(
model_name='webgroup',
name='group_members',
),
migrations.DeleteModel(
name='WebGroup',
),
]
|
#!/usr/bin/python3
import tkinter
import random
from tkinter import messagebox
import math
import sys
class CalculateNError(Exception):
def __init__(self, msg):
self.msg = msg
gachaProb = [[0, 400000], # R essence
[400000, 520000], # SR essence
[520000, 560000], # SSR essence
[560000, 960000], # R servant
[960000, 990000], # SR servant
[990000, 1000000]]# SSR servant
def multiRangeRandom(args):
max_val = 0
normalize = []
for i in args:
max_val = i[1] - i[0]
pass
def calculatePMonteCarlo(cycles, servant, rarity, probability, reliability):
if rarity < 3 or rarity > 5:
raise CalculateNError("¯\_(ツ)_/¯")
if (servant):
shift = rarity
else:
shift = rarity - 3
success = 0
min_val = int(gachaProb[shift][0])
max_val = min_val + int(probability * 1000000)
if max_val > int(gachaProb[shift][1]):
raise CalculateNError("Its imposible!")
garantServant = random.randint(1,10)
garantSR = random.randint(1,10)
for iter in range(0,cycles):
for currentCard in range(1,11):
if currentCard == garantServant:
if currentCard == garantSR:
value = random.randint(gachaProb[4][0],gachaProb[5][1] - 1)
else:
value = random.randint(gachaProb[3][0],gachaProb[5][1] - 1)
else:
if currentCard == garantSR:
pserv = gachaProb[5][1] - gachaProb[4][0]
pwhole = gachaProb[5][1] - gachaProb[4][0] + gachaProb[2][1] - gachaProb[1][0]
if random.randint(1,pwhole) < pserv:
value = random.randint(gachaProb[4][0],gachaProb[5][1] - 1)
else:
value = random.randint(gachaProb[1][0],gachaProb[2][1] - 1)
else:
value = random.randint(gachaProb[0][0],gachaProb[5][1] - 1)
if value >= min_val and value < max_val:
success += 1
oneProb = success / cycles
if oneProb >= 1:
oneProb = 0.999
return oneProb
def calculateServantP(rarity, p, reliability):
if (rarity == 5):
if (p > 0.01):
raise CalculateNError("Its imposible!")
oneRollProb = 1 - (((1 - p) ** 8) * ((0.44 - p) / 0.44) * 0.9 * ((0.2 - p)/ 0.2) + ((1 - p) ** 9) * ((0.04 - p)/0.04) * 0.1)
elif (rarity == 4):
if (p > 0.03):
raise CalculateNError("Its imposible!")
oneRollProb = 1 - (((1 - p) ** 8) * ((0.44 - p) / 0.44) * 0.9 * ((0.2 - p)/ 0.2) + ((1 - p) ** 9) * ((0.04 - p)/0.04) * 0.1)
elif (rarity == 3):
if (p > 0.40):
raise CalculateNError("Its imposible!")
oneRollProb = 1 - (((1 - p) ** 8) * ((0.44 - p) / 0.44) * 0.9 + 0.1 * ((1 - p) ** 9))
else:
raise CalculateNError("WTF????")
return oneRollProb
def calculateEssenceP(rarity,p,reliability):
if (rarity == 5):
if (p > 0.04):
raise CalculateNError("Its imposible!")
oneRollProb = 1 - (((1 - p) ** 8) * ((0.2 - p)/ 0.2) * 0.9 + 0.1 * ((1 - p) ** 9))
elif (rarity == 4):
if (p > 0.12):
raise CalculateNError("Its imposible!")
oneRollProb = 1 - (((1 - p) ** 8) * ((0.2 - p)/ 0.2) * 0.9 + 0.1 * ((1 - p) ** 9))
elif (rarity == 3):
if (p > 0.40):
raise CalculateNError("Its imposible!")
oneRollProb = 1 - (((1 - p) ** 8))
else:
raise CalculateNError("WTF????")
return oneRollProb
def calculateN (MonteCarlo, servant, rarity, p, reliability, cycles):
oneRoll = 0
if (MonteCarlo):
oneRoll = calculatePMonteCarlo(cycles, servant, rarity, p, reliability)
else:
if (servant):
oneRoll = calculateServantP(rarity, p, reliability)
else:
oneRoll = calculateEssenceP(rarity, p, reliability)
return (oneRoll, math.log(1 - reliability, 1 - oneRoll))
class Window:
def __init__(self):
self.window = tkinter.Tk()
self.type = tkinter.StringVar(self.window)
self.type.set("Servant")
self.type.trace("w",self.reset)
self.reliability = tkinter.StringVar(self.window)
self.reliability.set("95.0")
self.probability = tkinter.StringVar(self.window)
self.probability.set("1.0")
self.rarity = tkinter.IntVar(self.window)
self.rarity.set(5)
self.MonteCarlo = tkinter.IntVar(self.window)
self.MonteCarlo.set(1)
self.MonteCarloCycles = tkinter.IntVar(self.window)
self.typeOptMenu = tkinter.OptionMenu(self.window,self.type,"Servant", "Essence")
self.raritySBox = tkinter.Spinbox(self.window, from_ = 3, to = 5, textvariable = self.rarity, command=self.reset)
self.pLab = tkinter.Label(self.window, text="Probability(%):")
self.rLab = tkinter.Label(self.window, text="Reliability(%):")
self.pEntry = tkinter.Entry(self.window, textvariable = self.probability)
self.rEntry = tkinter.Entry(self.window, textvariable = self.reliability)
self.CheckMonteCarlo = tkinter.Checkbutton(self.window, text = "Monte-Carlo method", variable = self.MonteCarlo,command = self.enableMonteCarlo)
self.MonteCarloCyclesBox = tkinter.Spinbox(self.window, from_ = 10000,to = sys.maxsize, textvariable = self.MonteCarloCycles)
self.CalcButton = tkinter.Button(self.window, text = "Calculate", command=self.calc)
self.RstButton = tkinter.Button(self.window, text = "Reset", command=self.reset)
self.labNumber = tkinter.Label(self.window, text = "Number:")
self.labProbRoll = tkinter.Label(self.window, text = "Probability for one Roll:")
self.labMX = tkinter.Label(self.window, text = "Expected value:")
self.labTNumber = tkinter.Label(self.window, text = "Number of Tickets:")
self.outNumber = tkinter.Label(self.window)
self.outProbRoll = tkinter.Label(self.window)
self.outMX = tkinter.Label(self.window)
self.outNumber = tkinter.Label(self.window)
self.outTNumber = tkinter.Label(self.window)
self.typeOptMenu.grid(row=0, column= 0); self.raritySBox.grid(row=0,column=1)
self.pLab.grid(row=1, column=0,sticky=tkinter.E); self.pEntry.grid(row=1, column=1)
self.rLab.grid(row=2, column=0,sticky=tkinter.E); self.rEntry.grid(row=2, column=1)
self.CheckMonteCarlo.grid(row=3, column=0,sticky=tkinter.E);self.MonteCarloCyclesBox.grid(row=3, column=1,sticky=tkinter.W)
self.CalcButton.grid(row=4,column=0); self.RstButton.grid(row=4,column=1)
self.labNumber.grid(row=5, column=0,sticky=tkinter.E); self.outNumber.grid(row=5, column=1,sticky=tkinter.W)
self.labProbRoll.grid(row=6, column=0,sticky=tkinter.E); self.outProbRoll.grid(row=6, column=1,sticky=tkinter.W)
self.labMX.grid(row=7, column=0,sticky=tkinter.E); self.outMX.grid(row=7, column=1,sticky=tkinter.W)
self.labTNumber.grid(row=8, column=0,sticky=tkinter.E); self.outTNumber.grid(row=8, column=1,sticky=tkinter.W)
self.window.mainloop()
def clear(self):
self.outNumber.config(text="")
self.outProbRoll.config(text="")
self.outMX.config(text="")
self.outTNumber.config(text="")
def reset(self, *args):
if(self.type.get() == "Servant"):
rarity = self.rarity.get()
if (rarity == 3):
self.probability.set("40.0")
elif (rarity == 4):
self.probability.set("3.0")
elif (rarity == 5):
self.probability.set("1.0")
elif(self.type.get() == "Essence"):
rarity = self.rarity.get()
if (rarity == 3):
self.probability.set("40.0")
elif (rarity == 4):
self.probability.set("12.0")
elif (rarity == 5):
self.probability.set("4.0")
self.clear()
def output(self, N, p, M, TN):
self.outNumber.config(text=str(math.ceil(N)))
self.outProbRoll.config(text=str(round(p,2))+" %")
self.outMX.config(text=str(round(M,2)))
self.outTNumber.config(text=str(math.ceil(TN)))
def enableMonteCarlo(self):
if (not self.MonteCarlo.get()):
self.MonteCarloCyclesBox.config(state = tkinter.DISABLED)
else:
self.MonteCarloCyclesBox.config(state = tkinter.NORMAL)
def calc(self):
self.clear()
try:
servant = (self.type.get() == "Servant")
rarity = self.rarity.get()
probability = float(self.probability.get())/100
reliability = float(self.reliability.get())/100
oneRollProb, N = calculateN(self.MonteCarlo.get(), servant, rarity, probability, reliability, self.MonteCarloCycles.get())
TicketN = math.log(1 - reliability, 1 - probability)
MX = oneRollProb * N
except ValueError:
messagebox.showerror("Error","You should input correct values")
except CalculateNError as error:
messagebox.showerror("Error",error.msg)
except ZeroDivisionError:
messagebox.showerror("Sasi","Sasi")
else:
self.output(N, oneRollProb * 100, MX, TicketN)
Window()
|
import argparse
import sys
import numpy as np
import matplotlib.pyplot as plt
import math
from orderlib import *
from numpy.random import randint
from numpy.random import rand
from numpy.random import choice
from random import randrange
from time import perf_counter
from math import sqrt
from statistics import mean, stdev
sys.setrecursionlimit(10000000)
"""ARGPARSE"""
# Inicializar el argparse
tester = argparse.ArgumentParser(description='Script para comparar el tiempo de ejecucion de diversos algoritmos de ordenamiento')
# Añadir los argumentos para el programa
tester.add_argument('-i', help = "Numero de veces que se ejecutará la prueba", type = int)
tester.add_argument('-t', help = "Establece la prueba específica a ser llamada", type = int)
tester.add_argument('-g', help = "Activa la creacion de la grafica de resultados obtenidos", action = "store_true", default = False)
tester.add_argument('Enteros', metavar='N', type=int, nargs='+', help='Un numero de elementos para las pruebas') # Recibe la entrada posicional
# Procesamiento de los argumentos ingresados
args = tester.parse_args()
# Inicializacion por default de las 3 variables pertinentes a los parámetros de las pruebas
n = args.Enteros # Tamaño del arreglo
i = 3 # Numero de pruebas
t = 1 # Tipo de prueba
# Recibir los argumentos
if args.i:
i = args.i
if args.t:
t = args.t
if args.g:
g = True
else:
g = False
""" MANEJO DE ERRORES Y ENTRADAS INVALIDAS """
# Manejo de Entrada de datos Incorrecta
try:
assert(len(n) > 0 and int(i) > 0 and 1 <= int(t) <= 7)
except:
print("Valores Invalidos. n e i deben ser mayor que 0 y t debe estar entre 1 y 7.")
print("\nEl programa terminara.")
tester.exit(status=0, message="\nERROR = Datos invalidos")
# Verificar que haya al menos dos cantidades de elementos N para graficar
if g:
try:
assert(len(n) >= 2)
except:
tester.exit(status=0, message="\nSi activa -g debe introducir al menos 2 cantidades de elementos a probar.")
""" BIENVENIDA """
# Mostrar en la terminal los valores registrados
print("i=" + str(i))
print("t=" + str(t))
print("g=" + str(g))
""" FUNCIONES """
def algos(Arr:list): # Funcion para ejecutar los algoritmos
ArrCopy = Arr[:]
# Corrida Mergesort
start = perf_counter() # Inicio tiempo de ejecucion
mergeSort(ArrCopy)
end = perf_counter() # Fin tiempo de ejecucion
time_select = (end - start)
mergesort.append(time_select)
ArrCopy = Arr[:]
print("listo 1")
# Corrida Quicksort Iterativo
start = perf_counter()
quicksortIter(ArrCopy)
end = perf_counter()
time_select = (end - start)
quick_iter.append(time_select)
ArrCopy = Arr[:]
print("listo 2")
"""
# Corrida Quicksort
start = perf_counter()
quickSort(ArrCopy, 0, len(ArrCopy) - 1)
end = perf_counter()
time_select = (end - start)
quick.append(time_select)
"""
ArrCopy = Arr[:]
print("listo 3")
# Corrida Quicksort Median of 3
start = perf_counter()
quicksortMedian(ArrCopy, 0, len(ArrCopy) - 1)
end = perf_counter()
time_select = (end - start)
quick_median.append(time_select)
ArrCopy = Arr[:]
print("listo 4")
# Corrida Introsort
start = perf_counter()
introSort(ArrCopy, 0, len(ArrCopy) - 1)
end = perf_counter()
time_select = (end - start)
intro.append(time_select)
ArrCopy = Arr[:]
print("listo 5")
# Corrida Quicksort with 3-way-partitioning
start = perf_counter()
quicksortThreeWay(ArrCopy, 0, len(ArrCopy) - 1)
end = perf_counter()
time_select = (end - start)
quick_way.append(time_select)
ArrCopy = Arr[:]
print("listo 6")
# Corrida Dual Pivot Quicksort
start = perf_counter()
quicksortDual(ArrCopy, 0, len(ArrCopy) - 1)
end = perf_counter()
time_select = (end - start)
quick_dual.append(time_select)
ArrCopy = Arr[:]
print("listo 7")
# Corrida Timsort
start = perf_counter()
sorted(ArrCopy)
end = perf_counter()
time_select = (end - start)
tim.append(time_select)
print("listo 8")
def mostrar_resultados(size): # Funcion para mostrar en pantalla los resultados de las pruebas acordes
print("\nAnalisis para arreglo de " + str(n[size]) + " elementos.")
print("\nTiempo de ejecucion promedio de Mergesort: " + str("{0:.2f}".format(mean(mergesort))) + "s." +
" STD: " + str("{0:.2f}".format(stdev(mergesort))) + "s." )
promedios_merge.append(mean(mergesort))
print("\nTiempo de ejecucion promedio de Quicksort Iterativo: " + str("{0:.2f}".format(mean(quick_iter))) + "s." +
" STD: " + str("{0:.2f}".format(stdev(quick_iter))) + "s.")
promedios_quickIter.append(mean(quick_iter))
print("\nTiempo de ejecucion promedio de Quicksort: " + str("{0:.2f}".format(mean(quick))) + "s." +
" STD: " + str("{0:.2f}".format(stdev(quick))) + "s.")
promedios_quick.append(mean(quick))
print("\nTiempo de ejecucion promedio de Median-of-3 Quicksort: " + str("{0:.2f}".format(mean(quick_median))) + "s." +
" STD: " + str("{0:.2f}".format(stdev(quick_median))) + "s.")
promedios_quickMedian.append(mean(quick_median))
print("\nTiempo de ejecucion promedio de Introsort: " + str("{0:.2f}".format(mean(intro))) + "s." +
" STD: " + str("{0:.2f}".format(stdev(intro))) + "s." )
promedios_intro.append(mean(intro))
print("\nTiempo de ejecucion promedio de Quicksort con 3-way-partitioning: " + str("{0:.2f}".format(mean(quick_way))) + "s." +
" STD: " + str("{0:.2f}".format(stdev(quick_way))) + "s.")
promedios_quickWay.append(mean(quick_way))
print("\nTiempo de ejecucion promedio de Dual Pivot Quicksort: " + str("{0:.2f}".format(mean(quick_dual))) + "s." +
" STD: " + str("{0:.2f}".format(stdev(quick_dual))) + "s.")
promedios_quickDual.append(mean(quick_dual))
print("\nTiempo de ejecucion promedio de Timsort: " + str("{0:.2f}".format(mean(tim))) + "s." +
" STD: " + str("{0:.2f}".format(stdev(tim))) + "s.")
promedios_tim.append(mean(tim))
def mensaje_Inicial(t:int, size:int): # Funcion para mostrar en pantalla una bienvenida y contexto al programa
if t == 1:
print("\nMostrando resultados de la prueba 1: Punto flotante para el arreglo de tamanyo " + str(n[size]) + ".")
elif t == 2:
print("\nMostrando resultados de la prueba 2: Ordenado para el arreglo de tamanyo " + str(n[size]) + ".")
elif t == 3:
print("\nMostrando resultados de la prueba 3: Ordenado Inverso para el arreglo de tamanyo " + str(n[size]) + ".")
elif t == 4:
print("\nMostrando resultados de la prueba 4: Cero-Uno para el arreglo de tamanyo " + str(n[size]) + ".")
elif t == 5:
print("\nMostrando resultados de la prueba 5: Mitad para el arreglo de tamanyo " + str(n[size]) + ".")
elif t == 6:
print("\nMostrando resultados de la prueba 6: Casi ordenado 1 para el arreglo de tamanyo " + str(n[size]) + ".")
elif t == 7:
print("\nMostrando resultados de la prueba 7: Casi ordenado 2 para el arreglo de tamanyo " + str(n[size]) + ".")
def generadorArr(t:int, n:int) -> list: # Funciona para generar arreglos aletorios dependiendo del caso suministrado
# t es un entero que indica cual prueba de la 1 a la 7 se va a realizar y n es el entero que señala el numero
# de elementos que tendran de los arreglos generados.
# assert(1 <= t <= 7 and n >= 0)
def arreglo5(n:int):
# Funcion que se encarga de generar el arreglo "Mitad"
# n es el numero de elementos del arreglo.
# assert(n >= 0)
arreglo = [0]*n
for i in range(n//2):
arreglo[i] = i + 1
arreglo[n - i - 1] = i + 1
return arreglo
def arreglo6(n:int):
# Funcion que se encarga de generar el arreglo "Casi ordenado 1"
# n es el numero de elementos del arreglo.
# assert(n >= 0)
ordenado = sorted(randint(0, n + 1, n))
i = 0
while i != 16:
k = randrange(0, n - 8)
ordenado[k], ordenado[k + 8] = ordenado[k + 8], ordenado[k]
i += 1
return ordenado
def arreglo7(n:int):
# Funcion que se encarga de generar el arreglo "Casi ordenado 2"
# n es el numero de elementos del arreglo.
# assert(n >= 0)
ordenado = sorted(randint(0, n + 1, n))
i = 0
while i != (n//4):
print(i)
k = randrange(0, n - 4)
ordenado[k], ordenado[k + 4] = ordenado[k + 4], ordenado[k]
i += 1
return ordenado
if t == 1:
Arr = rand(n) # Punto Flotante
elif t == 2:
Arr = sorted(randint(0, n + 1, n)) # Arreglo ordenado de enteros
elif t == 3:
Arr = sorted(randint(0, n + 1, n), reverse = True) # Arreglo ordenado a la inversa de enteros
elif t == 4:
Arr = choice([0, 1], size=(n)) # Arreglo de 0 y 1
elif t == 5:
Arr = arreglo5(n) # Arreglo de la forma (1, 2,..., N/2, N/2,...,2,1)
elif t == 6:
Arr = arreglo6(n) # Arreglo casi ordenado 1
elif t == 7:
Arr = arreglo7(n) # Arreglo casi ordenado 2
return Arr
""" FUNCIONES GRAFICAS """
num_graficas = 0
marcadores = ['.', 'o', '*', '+', 'v', ',', '^', '<', '>', '1', '2', '3', '4', '5', '6', '7', '8', 's', 'p', 'P']
color_defecto = "C"
max_num_def_colores = 10
#
# Descripción: Encuentra el mejor ajuste de un conjunto de puntos
# a un polinomio de orden cuadrático
#
# Parametros:
# x: Lista con las coordenadas del eje X.
# y: Lista con las coordenadas del eje Y.
#
def puntos_cuadraticos(x, y):
fit = np.polyfit(x,y,2)
fit_fn = np.poly1d(fit)
x_new = np.linspace(x[0], x[-1], 50)
y_new = fit_fn(x_new)
return x_new, y_new
#
# Descripción: Dibuja puntos en el plano de la gráfica
#
# Parametros:
# x: Lista con las coordenadas del eje X.
# y: Lista con las coordenadas del eje Y.
# nombre: nombre de la grafica
def dibujar_grafica(x, y, nombre):
global num_graficas
global marcadores
global color_defecto
global max_num_def_colores
marca = marcadores[num_graficas % len(marcadores)]
color = color_defecto + str(num_graficas % max_num_def_colores)
x_new, y_new = puntos_cuadraticos(x, y)
plt.plot(x_new, y_new)
plt.plot(x, y, color+marca, label=nombre)
num_graficas += 1
#
# Descripción: Muestra en pantalla el gráfico dibujado
#
# Parametros:
# x_etiqueta: Etiqueta de las coordenadas del eje X.
# y_etiqueta: Etiqueta de las coordenadas del eje Y.
def mostrar_grafico(x_etiqueta, y_etiqueta):
plt.xlabel(x_etiqueta)
plt.ylabel(y_etiqueta)
plt.legend(loc=2)
plt.legend(bbox_to_anchor=(0.05, 0.95), loc=2, borderaxespad=0.)
plt.show()
def display_graph(): # Funcion para llamar al graficador
dibujar_grafica(n, promedios_merge, "Mergesort")
dibujar_grafica(n, promedios_quickIter, "Quicksort Iterativo")
dibujar_grafica(n, promedios_quick, "Quicksort")
dibujar_grafica(n, promedios_quickMedian, "Quicksort Median-of-3")
dibujar_grafica(n, promedios_intro, "Introsort")
dibujar_grafica(n, promedios_quickWay, "Quicksort with 3-way-partition")
dibujar_grafica(n, promedios_tim, "Timsort")
mostrar_grafico("Numero de elementos", "Tiempo(seg)")
""" COMIENZA EL PROGRAMA """
# Inicializar arreglos que almacenaran el tiempo promedio de corrida para cada N-corrida introducida en la linea de comandos.
promedios_merge = []
promedios_quickIter = []
promedios_quick = []
promedios_quickMedian = []
promedios_intro = []
promedios_quickWay = []
promedios_quickDual = []
promedios_tim = []
for size in range(len(n)): # Ciclo para evaluar todos los elementos suministrados por el usuario
mensaje_Inicial(t, size)
# Inicializar arreglos para almacenar los tiempos de cada corrida en cada N distinto
mergesort = []
quick_iter = []
quick = []
quick_median = []
intro = []
quick_way = []
quick_dual = []
tim = []
for k in range(i): # Ciclo interno para realizar todas las pruebas i-veces
arreglo = generadorArr(t, n[size])
algos(arreglo)
mostrar_resultados(size)
# Mostrar la grafica resultante en pantalla si -g fue llamado
if g:
display_graph() |
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) Qotto, 2019
from typing import Type
# Import BaseEventHandler
from tonga.models.handlers.event.event_handler import BaseEventHandler
# Import StoreBuilderBase
from tonga.stores.manager.kafka_store_manager import KafkaStoreManager
# Import BaseProducer
from tonga.services.producer.base import BaseProducer
# Import Coffee Model
from examples.coffee_bar.cash_register.models.events.coffee_served import CoffeeServed
from examples.coffee_bar.cash_register.models.events.bill_paid import BillPaid
from examples.coffee_bar.cash_register.models.bill import Bill
class CoffeeServedHandler(BaseEventHandler):
_store_builder: KafkaStoreManager
_transactional_producer: BaseProducer
def __init__(self, store_builder: KafkaStoreManager, transactional_producer: BaseProducer):
self._store_builder = store_builder
self._transactional_producer = transactional_producer
async def handle(self, event: Type[CoffeeServed]) -> None:
# Gets bill in local store
bill = Bill.__from_bytes_dict__(await self._store_builder.get_from_local_store(event.context['bill_uuid']))
# Updates bill
bill.set_is_paid(True)
bill.set_context(event.context)
# Sets updated bill on cash register local store
await self._store_builder.set_from_local_store(bill.uuid, bill.__to_bytes_dict__())
# Creates BillPaid event
bill_paid = BillPaid(uuid=bill.uuid, coffee_uuid=bill.coffee_uuid, amount=bill.amount, context=bill.context)
# Sends BillCreated event
await self._transactional_producer.send_and_wait(bill_paid, 'cash-register-events')
@classmethod
def handler_name(cls) -> str:
return 'tonga.waiter.event.CoffeeServed'
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
def merge_states(states):
test_set = set()
for state in states:
test_set.add(frozenset(state.items()))
return [{k: v for k, v in state} for state in test_set]
|
import datetime
date = datetime.datetime.now()
print(date) # 2021-09-24 11:22:46.729396
print(date.date()) # 2021-09-24
print(date.time()) # 11:22:46.729396
print(
date.year, date.month, date.day,
date.hour, date.minute, date.second, date.microsecond,
date.weekday(),
)
# 2021 9 24 11 22 46 729396 4
customDate = datetime.datetime(2021, 9, 25)
print(customDate) # 2021-09-25 00:00:00
delta = datetime.timedelta(
weeks=1, days=2, hours=2,
minutes=1, seconds=12, milliseconds=10
)
print(delta) # 9 days, 2:01:12.010000
zeroDelta = datetime.timedelta()
print(zeroDelta) # 0:00:00
print(customDate - date)
# 12:37:13.270604
print(customDate + delta)
# 2021-10-04 02:01:12.010000
|
# -*- coding: utf-8 -*-
import pyloco
import copy
import cartopy
import cartopy.util
default_projection = "PlateCarree"
class EarthPlotTask(pyloco.taskclass("ncplot")):
"""Create a plot for earth science
Examples
---------
"""
_name_ = "earthplot"
_version_ = "0.1.6"
_install_requires_ = ["nctools", "cartopy"]
def __init__(self, parent):
super(EarthPlotTask, self).__init__(parent)
self.add_option_argument("--projection", default=default_projection, param_parse=True,
help="set map projection (default=%s)" % default_projection)
self.add_option_argument("--coastlines", nargs="?", param_parse=True, const="", help="add coastlines to the map")
self.add_option_argument("--stock-image", nargs="?", param_parse=True, const="", help="add an underlay image to the map")
self.add_option_argument("--colorbar", nargs="?", param_parse=True, const="", help="add a color bar to the map")
self.add_option_argument("--colorbar-eval", nargs="?", param_parse=True, const="", help="add a color bar evaluation")
self.add_option_argument("--cyclic-point", param_parse=True, help="add cyclic point in an array")
self.add_option_argument("--transform", param_parse=True, help="data coordinate system")
self.add_option_argument("--shape-earth", param_parse=True, help="nature earth shapes")
def pre_perform(self, targs):
super(EarthPlotTask, self).pre_perform(targs)
self._env["cartopy"] = cartopy
if targs.projection:
projection = targs.projection.vargs[0]
proj_args = ["%s=%s" % p for p in targs.projection.kwargs.items()]
proj = "cartopy.crs.%s(%s)" % (projection, ", ".join(proj_args))
else:
proj = "cartopy.crs.%s()" % default_projection
#if hasattr(targs, "subplot") and targs.subplot:
if targs.subplot:
for subplot in targs.subplot:
subplot.kwargs["projection"] = proj
else:
opt = pyloco.Option(projection=proj)
targs.subplot = [opt]
if targs.coastlines:
targs.coastlines.context.append("coastlines")
#if hasattr(targs, "axes") and targs.axes:
if targs.axes:
targs.axes.append(targs.coastlines)
else:
targs.axes = [targs.coastlines]
if targs.stock_image:
targs.stock_image.context.append("stock_img")
#if hasattr(targs, "axes") and targs.axes:
if targs.axes:
targs.axes.append(targs.stock_image)
else:
targs.axes = [targs.stock_image]
if targs.colorbar:
targs.colorbar.context.insert(0, "colorbar")
#if hasattr(targs, "pyplot") and targs.pyplot:
if targs.pyplot:
targs.pyplot.append(targs.colorbar)
else:
targs.pyplot = [targs.colorbar]
if targs.colorbar_eval:
ctx = targs.colorbar_eval.context.pop(0)
vargs = targs.colorbar_eval.vargs
kwargs = targs.colorbar_eval.kwargs
for idx in range(len(vargs)):
vargs[idx] = "_pyplots_['%s']." % ctx + vargs[idx]
for key in kwargs.keys():
kwargs[key] = "_pyplots_['%s']." % ctx + kwargs[key]
if targs.pyplot_eval:
targs.pyplot_eval.append(targs.colorbar_eval)
else:
targs.pyplot_eval = [targs.colorbar_eval]
if targs.cyclic_point:
args = []
data = targs.cyclic_point.vargs[0] + "[:]"
args.append(data)
coord = targs.cyclic_point.kwargs.get("coord", None)
axis = targs.cyclic_point.kwargs.get("axis", "-1")
if coord:
coord += "[:]"
args.append("coord=" + coord)
args.append("axis=" + axis)
exec("%s, %s = cartopy.util.add_cyclic_point(%s)" % (data, coord, ",".join(args)), self._env)
else:
args.append("axis=" + axis)
exec("%s = cartopy.util.add_cyclic_point(%s)" % (data, ",".join(args)), self._env)
if targs.shape_earth:
import cartopy.io.shapereader as shpreader
res = eval(targs.shape_earth.kwargs.pop("resolution", "'110m'"), self._env)
cat = eval(targs.shape_earth.kwargs.pop("category", "'cultural'"), self._env)
name = eval(targs.shape_earth.kwargs.pop("name", "'admin_1_states_provinces_lakes_shp'"), self._env)
shapes = shpreader.natural_earth(resolution=res, category=cat, name=name)
if len(targs.shape_earth.vargs) == 0:
targs.shape_earth.vargs.append(proj)
#for idx, shape in enumerate(shpreader.Reader(shapes).records()):
for idx, shape in enumerate(shpreader.Reader(shapes).geometries()):
newopt = copy.deepcopy(targs.shape_earth)
shape_varname = "_shape_earth_record_%d" % idx
self._env[shape_varname] = shape
newopt.vargs.insert(0, "[%s]" % shape_varname)
newopt.context.append("add_geometries")
if targs.axes:
targs.axes.append(newopt)
else:
targs.axes = [newopt]
transform_name = None
transform_args = ""
if targs.transform:
transform_name = targs.transform.context[0]
targs.transform.context = []
transform_args = str(targs.transform)
if targs.plot:
for plot in targs.plot:
if transform_name is not None:
plot.kwargs["transform"] = ("cartopy.crs.%s(%s)" %
(transform_name, transform_args))
elif "transform" not in plot.kwargs:
plot.kwargs["transform"] = "cartopy.crs.PlateCarree()"
|
# Copyright 2023 Pulser Development Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines a special Result subclass for simulation runs returning states."""
from __future__ import annotations
from dataclasses import dataclass
from typing import Union, cast
import numpy as np
import qutip
from pulser.result import Result
@dataclass
class QutipResult(Result):
"""Represents the result of a run as a Qutip QObj.
Args:
atom_order: The order of the atoms in the bitstrings that
represent the measured states.
meas_basis: The measurement basis.
state: The Qobj representing the state. Can be a statevector
or a density matrix.
matching_meas_basis: Whether the measurement basis is the
same as the state's basis.
"""
state: qutip.Qobj
matching_meas_basis: bool
@property
def sampling_errors(self) -> dict[str, float]:
"""The sampling error associated to each bitstring's sampling rate.
Uses the standard error of the mean as a quantifier for sampling error.
"""
return {bitstr: 0.0 for bitstr in self.sampling_dist}
@property
def _dim(self) -> int:
full_state_size = np.prod(self.state.shape)
if not self.state.isket:
full_state_size = np.sqrt(full_state_size)
return cast(
int, np.rint(full_state_size ** (1 / self._size)).astype(int)
)
@property
def _basis_name(self) -> str:
if self._dim > 2:
return "all"
if self.meas_basis == "XY":
return "XY"
if not self.matching_meas_basis:
return (
"digital"
if self.meas_basis == "ground-rydberg"
else "ground-rydberg"
)
return self.meas_basis
def _weights(self) -> np.ndarray:
n = self._size
if not self.state.isket:
probs = np.abs(self.state.diag())
else:
probs = (np.abs(self.state.full()) ** 2).flatten()
if self._dim == 2:
if self.matching_meas_basis:
# State vector ordered with r first for 'ground_rydberg'
# e.g. n=2: [rr, rg, gr, gg] -> [11, 10, 01, 00]
# Invert the order -> [00, 01, 10, 11] correspondence
# The same applies in XY mode, which is ordered with u first
weights = (
probs if self.meas_basis == "digital" else probs[::-1]
)
else:
# Only 000...000 is measured
weights = np.zeros(probs.size)
weights[0] = 1.0
elif self._dim == 3:
if self.meas_basis == "ground-rydberg":
one_state = 0 # 1 = |r>
ex_one = slice(1, 3)
elif self.meas_basis == "digital":
one_state = 2 # 1 = |h>
ex_one = slice(0, 2)
else:
raise RuntimeError(
f"Unknown measurement basis '{self.meas_basis}' "
"for a three-level system.'"
)
probs = probs.reshape([3] * n)
weights = np.zeros(2**n)
for dec_val in range(2**n):
ind: list[Union[int, slice]] = []
for v in np.binary_repr(dec_val, width=n):
if v == "0":
ind.append(ex_one)
else:
ind.append(one_state)
# Eg: 'digital' basis : |1> = index2, |0> = index0, 1 = 0:2
# p_11010 = sum(probs[2, 2, 0:2, 2, 0:2])
# We sum all probabilites that correspond to measuring
# 11010, namely hhghg, hhrhg, hhghr, hhrhr
weights[dec_val] = np.sum(probs[tuple(ind)])
else:
raise NotImplementedError(
"Cannot sample system with single-atom state vectors of "
"dimension > 3."
)
# Takes care of numerical artefacts in case sum(weights) != 1
return cast(np.ndarray, weights / sum(weights))
def get_state(
self,
reduce_to_basis: str | None = None,
ignore_global_phase: bool = True,
tol: float = 1e-6,
normalize: bool = True,
) -> qutip.Qobj:
"""Gets the state with some optional post-processing.
Args:
reduce_to_basis: Reduces the full state vector
to the given basis ("ground-rydberg" or "digital"), if the
population of the states to be ignored is negligible. Doesn't
apply to XY mode.
ignore_global_phase: If True and if the final state is a vector,
changes the final state's global phase such that the largest
term (in absolute value) is real.
tol: Maximum allowed population of each eliminated state.
normalize: Whether to normalize the reduced state.
Returns:
The resulting state.
Raises:
TypeError: If trying to reduce to a basis that would eliminate
states with significant occupation probabilites.
"""
state = self.state.copy()
is_density_matrix = state.isoper
if ignore_global_phase and not is_density_matrix:
full = state.full()
global_ph = float(np.angle(full[np.argmax(np.abs(full))])[0])
state *= np.exp(-1j * global_ph)
if self._dim != 3:
if reduce_to_basis not in [None, self._basis_name]:
raise TypeError(
f"Can't reduce a system in {self._basis_name}"
+ f" to the {reduce_to_basis} basis."
)
elif reduce_to_basis is not None:
if is_density_matrix: # pragma: no cover
# Not tested as noise in digital or all basis not implemented
raise NotImplementedError(
"Reduce to basis not implemented for density matrix"
" states."
)
if reduce_to_basis == "ground-rydberg":
ex_state = "2"
elif reduce_to_basis == "digital":
ex_state = "0"
else:
raise ValueError(
"'reduce_to_basis' must be 'ground-rydberg' "
+ f"or 'digital', not '{reduce_to_basis}'."
)
ex_inds = [
i
for i in range(3**self._size)
if ex_state in np.base_repr(i, base=3).zfill(self._size)
]
ex_probs = np.abs(state.extract_states(ex_inds).full()) ** 2
if not np.all(np.isclose(ex_probs, 0, atol=tol)):
raise TypeError(
"Can't reduce to chosen basis because the population of a "
"state to eliminate is above the allowed tolerance."
)
state = state.eliminate_states(ex_inds, normalize=normalize)
return state.tidyup()
|
# djikstra.py
import heapq
def relax(graph, costs, node, child):
if costs[child] > costs[node] + graph[node][child]:
costs[child] = costs[node] + graph[node][child]
def dijkstra(graph, source):
costs = {}
for node in graph:
costs[node] = float('Inf')
costs[source] = 0
visited = set()
queue = [(0, source)]
while len(queue) > 0:
node = heapq.heappop(queue)[1]
for child in graph[node]:
if child not in visited:
relax(graph, costs, node, child)
heapq.heappush(queue, (costs[child], child))
visited.add(node)
return costs
|
import numpy as np
from numpy.fft import fft, ifft, fftfreq, rfftfreq
from astropy.io import ascii,fits
from scipy.interpolate import InterpolatedUnivariateSpline, interp1d
from scipy.integrate import trapz
from scipy.special import j1
import multiprocessing as mp
import sys
import gc
import os
import bz2
import h5py
from functools import partial
import itertools
from collections import OrderedDict
import Starfish
from .spectrum import create_log_lam_grid, calculate_dv, calculate_dv_dict
from . import constants as C
def chunk_list(mylist, n=mp.cpu_count()):
'''
Divide a lengthy parameter list into chunks for parallel processing and
backfill if necessary.
:param mylist: a lengthy list of parameter combinations
:type mylist: 1-D list
:param n: number of chunks to divide list into. Default is ``mp.cpu_count()``
:type n: integer
:returns: **chunks** (*2-D list* of shape (n, -1)) a list of chunked parameter lists.
'''
length = len(mylist)
size = int(length / n)
chunks = [mylist[0+size*i : size*(i+1)] for i in range(n)] #fill with evenly divisible
leftover = length - size*n
edge = size*n
for i in range(leftover): #backfill each with the last item
chunks[i%n].append(mylist[edge+i])
return chunks
def determine_chunk_log(wl, wl_min, wl_max):
'''
Take in a wavelength array and then, given two minimum bounds, determine
the boolean indices that will allow us to truncate this grid to near the
requested bounds while forcing the wl length to be a power of 2.
:param wl: wavelength array
:type wl: np.ndarray
:param wl_min: minimum required wavelength
:type wl_min: float
:param wl_max: maximum required wavelength
:type wl_max: float
:returns: a np.ndarray boolean array used to index into the wl array.
'''
# wl_min and wl_max must of course be within the bounds of wl
assert wl_min >= np.min(wl) and wl_max <= np.max(wl), "determine_chunk_log: wl_min {:.2f} and wl_max {:.2f} are not within the bounds of the grid {:.2f} to {:.2f}.".format(wl_min, wl_max, np.min(wl), np.max(wl))
# Find the smallest length synthetic spectrum that is a power of 2 in length
# and longer than the number of points contained between wl_min and wl_max
len_wl = len(wl)
npoints = np.sum((wl >= wl_min) & (wl <= wl_max))
chunk = len_wl
inds = (0, chunk)
# This loop will exit with chunk being the smallest power of 2 that is
# larger than npoints
while chunk > npoints:
if chunk/2 > npoints:
chunk = chunk//2
else:
break
assert type(chunk) == np.int, "Chunk is not an integer!. Chunk is {}".format(chunk)
if chunk < len_wl:
# Now that we have determined the length of the chunk of the synthetic
# spectrum, determine indices that straddle the data spectrum.
# Find the index that corresponds to the wl at the center of the data spectrum
center_wl = (wl_min + wl_max)/2.
center_ind = (np.abs(wl - center_wl)).argmin()
#Take a chunk that straddles either side.
inds = (center_ind - chunk//2, center_ind + chunk//2)
ind = (np.arange(len_wl) >= inds[0]) & (np.arange(len_wl) < inds[1])
else:
print("keeping grid as is")
ind = np.ones_like(wl, dtype='bool')
assert (min(wl[ind]) <= wl_min) and (max(wl[ind]) >= wl_max), "Model"\
"Interpolator chunking ({:.2f}, {:.2f}) didn't encapsulate full"\
" wl range ({:.2f}, {:.2f}).".format(min(wl[ind]), max(wl[ind]), wl_min, wl_max)
return ind
class RawGridInterface:
'''
A base class to handle interfacing with synthetic spectral libraries.
:param name: name of the spectral library
:type name: string
:param param_names: the names of the parameters (dimensions) of the grid
:type param_names: list
:param points: the grid points at which
spectra exist (assumes grid is square, not ragged, meaning that every combination
of parameters specified exists in the grid).
:type points: list of numpy arrays
:param air: Are the wavelengths measured in air?
:type air: bool
:param wl_range: the starting and ending wavelength ranges of the grid to
truncate to.
:type wl_range: list of len 2 [min, max]
:param base: path to the root of the files on disk.
:type base: string
'''
def __init__(self, name, param_names, points, air=True, wl_range=[3000,13000], base=None):
self.name = name
self.param_names = param_names
self.points = points
self.air = air
self.wl_range = wl_range
self.base = os.path.expandvars(base)
def check_params(self, parameters):
'''
Determine if the specified parameters are allowed in the grid.
:param parameters: parameter set to check
:type parameters: np.array
:raises C.GridError: if the parameter values are outside of the grid bounds
'''
assert len(parameters) == len(self.param_names)
for param, ppoints in zip(parameters, self.points):
if param not in ppoints:
raise C.GridError("{} not in the grid points {}".format(param, ppoints))
def load_flux(self, parameters, norm=True):
'''
Load the synthetic flux from the disk and :meth:`check_params`
:param parameters: stellar parameters describing a spectrum
:type parameters: np.array
.. note::
This method is designed to be extended by the inheriting class
'''
pass
class PHOENIXGridInterface(RawGridInterface):
'''
An Interface to the PHOENIX/Husser synthetic library.
:param norm: normalize the spectrum to solar luminosity?
:type norm: bool
'''
def __init__(self, air=True, norm=True, wl_range=[3000, 54000],
base=Starfish.grid["raw_path"]):
super().__init__(name="PHOENIX",
param_names = ["temp", "logg", "Z", "alpha"],
points=[
np.array([2300, 2400, 2500, 2600, 2700, 2800, 2900, 3000, 3100, 3200,
3300, 3400, 3500, 3600, 3700, 3800, 3900, 4000, 4100, 4200, 4300, 4400,
4500, 4600, 4700, 4800, 4900, 5000, 5100, 5200, 5300, 5400, 5500, 5600,
5700, 5800, 5900, 6000, 6100, 6200, 6300, 6400, 6500, 6600, 6700, 6800,
6900, 7000, 7200, 7400, 7600, 7800, 8000, 8200, 8400, 8600, 8800, 9000,
9200, 9400, 9600, 9800, 10000, 10200, 10400, 10600, 10800, 11000, 11200,
11400, 11600, 11800, 12000]),
np.arange(0.0, 6.1, 0.5),
np.arange(-2., 1.1, 0.5),
np.array([-0.2, 0.0, 0.2, 0.4, 0.6, 0.8])],
air=air, wl_range=wl_range, base=base) #wl_range used to be [2999, 13001]
self.norm = norm #Normalize to 1 solar luminosity?
self.par_dicts = [None,
None,
{-2:"-2.0", -1.5:"-1.5", -1:'-1.0', -0.5:'-0.5',
0.0: '-0.0', 0.5: '+0.5', 1: '+1.0'},
{-0.4:".Alpha=-0.40", -0.2:".Alpha=-0.20",
0.0: "", 0.2:".Alpha=+0.20", 0.4:".Alpha=+0.40",
0.6:".Alpha=+0.60", 0.8:".Alpha=+0.80"}]
# if air is true, convert the normally vacuum file to air wls.
try:
base = os.path.expandvars(self.base)
wl_file = fits.open(base + "WAVE_PHOENIX-ACES-AGSS-COND-2011.fits")
except OSError:
raise C.GridError("Wavelength file improperly specified.")
w_full = wl_file[0].data
wl_file.close()
if self.air:
self.wl_full = vacuum_to_air(w_full)
else:
self.wl_full = w_full
self.ind = (self.wl_full >= self.wl_range[0]) & (self.wl_full <= self.wl_range[1])
self.wl = self.wl_full[self.ind]
self.rname = self.base + "Z{2:}{3:}/lte{0:0>5.0f}-{1:.2f}{2:}{3:}" \
".PHOENIX-ACES-AGSS-COND-2011-HiRes.fits"
def load_flux(self, parameters, norm=True):
'''
Load just the flux and header information.
:param parameters: stellar parameters
:type parameters: np.array
:raises C.GridError: if the file cannot be found on disk.
:returns: tuple (flux_array, header_dict)
'''
self.check_params(parameters) # Check to make sure that the keys are
# allowed and that the values are in the grid
# Create a list of the parameters to be fed to the format string
# optionally replacing arguments using the dictionaries, if the formatting
# of a certain parameter is tricky
str_parameters = []
for param, par_dict in zip(parameters, self.par_dicts):
if par_dict is None:
str_parameters.append(param)
else:
str_parameters.append(par_dict[param])
fname = self.rname.format(*str_parameters)
#Still need to check that file is in the grid, otherwise raise a C.GridError
#Read all metadata in from the FITS header, and append to spectrum
try:
flux_file = fits.open(fname)
f = flux_file[0].data
hdr = flux_file[0].header
flux_file.close()
except OSError:
raise C.GridError("{} is not on disk.".format(fname))
#If we want to normalize the spectra, we must do it now since later we won't have the full EM range
if self.norm:
f *= 1e-8 #convert from erg/cm^2/s/cm to erg/cm^2/s/A
F_bol = trapz(f, self.wl_full)
f = f * (C.F_sun / F_bol) #bolometric luminosity is always 1 L_sun
#Add temp, logg, Z, alpha, norm to the metadata
header = {}
header["norm"] = self.norm
header["air"] = self.air
#Keep only the relevant PHOENIX keywords, which start with PHX
for key, value in hdr.items():
if key[:3] == "PHX":
header[key] = value
return (f[self.ind], header)
class PHOENIXGridInterfaceNoAlpha(PHOENIXGridInterface):
'''
An Interface to the PHOENIX/Husser synthetic library.
:param norm: normalize the spectrum to solar luminosity?
:type norm: bool
'''
def __init__(self, air=True, norm=True, wl_range=[3000, 54000],
base=Starfish.grid["raw_path"]):
# Initialize according to the regular PHOENIX values
super().__init__(air=air, norm=norm, wl_range=wl_range, base=base)
# Now override parameters to exclude alpha
self.param_names = ["temp", "logg", "Z"]
self.points=[
np.array([2300, 2400, 2500, 2600, 2700, 2800, 2900, 3000, 3100, 3200,
3300, 3400, 3500, 3600, 3700, 3800, 3900, 4000, 4100, 4200, 4300, 4400,
4500, 4600, 4700, 4800, 4900, 5000, 5100, 5200, 5300, 5400, 5500, 5600,
5700, 5800, 5900, 6000, 6100, 6200, 6300, 6400, 6500, 6600, 6700, 6800,
6900, 7000, 7200, 7400, 7600, 7800, 8000, 8200, 8400, 8600, 8800, 9000,
9200, 9400, 9600, 9800, 10000, 10200, 10400, 10600, 10800, 11000, 11200,
11400, 11600, 11800, 12000]),
np.arange(0.0, 6.1, 0.5),
np.arange(-2., 1.1, 0.5)]
self.par_dicts = [None,
None,
{-2:"-2.0", -1.5:"-1.5", -1:'-1.0', -0.5:'-0.5',
0.0: '-0.0', 0.5: '+0.5', 1: '+1.0'}]
base = os.path.expandvars(self.base)
self.rname = base + "Z{2:}/lte{0:0>5.0f}-{1:.2f}{2:}" \
".PHOENIX-ACES-AGSS-COND-2011-HiRes.fits"
class KuruczGridInterface(RawGridInterface):
'''Kurucz grid interface.
Spectra are stored in ``f_nu`` in a filename like
``t03500g00m25ap00k2v070z1i00.fits``, ``ap00`` means zero alpha enhancement,
and ``k2`` is the microturbulence, while ``z1`` is the macroturbulence.
These particular values are roughly the ones appropriate for the Sun.
'''
def __init__(self, air=True, norm=True, wl_range=[5000, 5400], base=Starfish.grid["raw_path"]):
super().__init__(name="Kurucz",
param_names = ["temp", "logg", "Z"],
points=[np.arange(3500, 9751, 250),
np.arange(0.0, 5.1, 0.5),
np.array([-2.5, -2.0, -1.5, -1.0, -0.5, 0.0, 0.5])],
air=air, wl_range=wl_range, base=base)
self.par_dicts = [None, None, {-2.5:"m25", -2.0:"m20", -1.5:"m15", -1.0:"m10", -0.5:"m05", 0.0:"p00", 0.5:"p05"}]
self.norm = norm #Convert to f_lam and average to 1, or leave in f_nu?
self.rname = base + "t{0:0>5.0f}/g{1:0>2.0f}/t{0:0>5.0f}g{1:0>2.0f}{2}ap00k2v000z1i00.fits"
self.wl_full = np.load(base + "kurucz_raw_wl.npy")
self.ind = (self.wl_full >= self.wl_range[0]) & (self.wl_full <= self.wl_range[1])
self.wl = self.wl_full[self.ind]
def load_flux(self, parameters, norm=True):
'''
Load a the flux and header information.
:param parameters: stellar parameters
:type parameters: dict
:raises C.GridError: if the file cannot be found on disk.
:returns: tuple (flux_array, header_dict)
'''
self.check_params(parameters)
str_parameters = []
for param, par_dict in zip(parameters, self.par_dicts):
if par_dict is None:
str_parameters.append(param)
else:
str_parameters.append(par_dict[param])
#Multiply logg by 10
str_parameters[1] *= 10
fname = self.rname.format(*str_parameters)
#Still need to check that file is in the grid, otherwise raise a C.GridError
#Read all metadata in from the FITS header, and append to spectrum
try:
flux_file = fits.open(fname)
f = flux_file[0].data
hdr = flux_file[0].header
flux_file.close()
except OSError:
raise C.GridError("{} is not on disk.".format(fname))
#We cannot normalize the spectra, since we don't have a full wl range, so instead we set the average
#flux to be 1
#Also, we should convert from f_nu to f_lam
if self.norm:
f *= C.c_ang / self.wl**2 #Convert from f_nu to f_lambda
f /= np.average(f) #divide by the mean flux, so avg(f) = 1
#Add temp, logg, Z, norm to the metadata
header = {}
header["norm"] = self.norm
header["air"] = self.air
#Keep the relevant keywords
for key, value in hdr.items():
header[key] = value
return (f[self.ind], header)
class BTSettlGridInterface(RawGridInterface):
'''BTSettl grid interface. Unlike the PHOENIX and Kurucz grids, the
individual files of the BTSettl grid do not always have the same wavelength
sampling. Therefore, each call of :meth:`load_flux` will interpolate the
flux onto a LogLambda spaced grid that ranges between `wl_range` and has a
velocity spacing of 0.08 km/s or better.
If you have a choice, it's probably easier to use the Husser PHOENIX grid.
'''
def __init__(self, air=True, norm=True, wl_range=[2999, 13000], base="libraries/raw/BTSettl/"):
super().__init__(name="BTSettl",
points={"temp":np.arange(3000, 7001, 100),
"logg":np.arange(2.5, 5.6, 0.5),
"Z":np.arange(-0.5, 0.6, 0.5),
"alpha": np.array([0.0])},
air=air, wl_range=wl_range, base=base)
self.norm = norm #Normalize to 1 solar luminosity?
self.rname = self.base + "CIFIST2011/M{Z:}/lte{temp:0>3.0f}-{logg:.1f}{Z:}.BT-Settl.spec.7.bz2"
# self.Z_dict = {-2:"-2.0", -1.5:"-1.5", -1:'-1.0', -0.5:'-0.5', 0.0: '-0.0', 0.5: '+0.5', 1: '+1.0'}
self.Z_dict = {-0.5:'-0.5a+0.2', 0.0: '-0.0a+0.0', 0.5: '+0.5a0.0'}
wl_dict = create_log_lam_grid(wl_start=self.wl_range[0], wl_end=self.wl_range[1], min_vc=0.08/C.c_kms)
self.wl = wl_dict['wl']
def load_flux(self, parameters):
'''
Because of the crazy format of the BTSettl, we need to sort the wl to make sure
everything is unique, and we're not screwing ourselves with the spline.
'''
super().load_file(parameters) #Check to make sure that the keys are allowed and that the values are in the grid
str_parameters = parameters.copy()
#Rewrite Z
Z = parameters["Z"]
str_parameters["Z"] = self.Z_dict[Z]
#Multiply temp by 0.01
str_parameters["temp"] = 0.01 * parameters['temp']
fname = self.rname.format(**str_parameters)
file = bz2.BZ2File(fname, 'r')
lines = file.readlines()
strlines = [line.decode('utf-8') for line in lines]
file.close()
data = ascii.read(strlines, col_starts=[0, 13], col_ends=[12, 25], Reader=ascii.FixedWidthNoHeader)
wl = data['col1']
fl_str = data['col2']
fl = idl_float(fl_str) #convert because of "D" exponent, unreadable in Python
fl = 10 ** (fl - 8.) #now in ergs/cm^2/s/A
#"Clean" the wl and flux points. Remove duplicates, sort in increasing wl
wl, ind = np.unique(wl, return_index=True)
fl = fl[ind]
if self.norm:
F_bol = trapz(fl, wl)
fl = fl * (C.F_sun / F_bol)
# the bolometric luminosity is always 1 L_sun
# truncate the spectrum to the wl range of interest
# at this step, make the range a little more so that the next stage of
# spline interpolation is properly in bounds
ind = (wl >= (self.wl_range[0] - 50.)) & (wl <= (self.wl_range[1] + 50.))
wl = wl[ind]
fl = fl[ind]
if self.air:
#Shift the wl that correspond to the raw spectrum
wl = vacuum_to_air(wl)
#Now interpolate wl, fl onto self.wl
interp = InterpolatedUnivariateSpline(wl, fl, k=5)
fl_interp = interp(self.wl)
return fl_interp
class CIFISTGridInterface(RawGridInterface):
'''CIFIST grid interface, grid available here: https://phoenix.ens-lyon.fr/Grids/BT-Settl/CIFIST2011_2015/FITS/.
Unlike the PHOENIX and Kurucz grids, the
individual files of the BTSettl grid do not always have the same wavelength
sampling. Therefore, each call of :meth:`load_flux` will interpolate the
flux onto a LogLambda spaced grid that ranges between `wl_range` and has a
velocity spacing of 0.08 km/s or better.
If you have a choice, it's probably easier to use the Husser PHOENIX grid.
'''
def __init__(self, air=True, norm=True, wl_range=[3000, 13000], base=Starfish.grid["raw_path"]):
super().__init__(name="CIFIST",
points=[np.concatenate((np.arange(1200, 2351, 50), np.arange(2400, 7001, 100)), axis=0),
np.arange(2.5, 5.6, 0.5)],
param_names = ["temp", "logg"],
air=air, wl_range=wl_range, base=base)
self.par_dicts = [None, None]
self.norm = norm #Normalize to 1 solar luminosity?
self.rname = self.base + "lte{0:0>5.1f}-{1:.1f}-0.0a+0.0.BT-Settl.spec.fits.gz"
wl_dict = create_log_lam_grid(dv=0.08, wl_start=self.wl_range[0], wl_end=self.wl_range[1])
self.wl = wl_dict['wl']
print(self.wl)
def load_flux(self, parameters):
'''
Because of the crazy format of the BTSettl, we need to sort the wl to make sure
everything is unique, and we're not screwing ourselves with the spline.
'''
self.check_params(parameters)
str_parameters = []
for param, par_dict in zip(parameters, self.par_dicts):
if par_dict is None:
str_parameters.append(param)
else:
str_parameters.append(par_dict[param])
#Multiply temp by 0.01
str_parameters[0] = 0.01 * parameters[0]
fname = self.rname.format(*str_parameters)
#Still need to check that file is in the grid, otherwise raise a C.GridError
#Read all metadata in from the FITS header, and append to spectrum
try:
flux_file = fits.open(fname)
data = flux_file[1].data
hdr = flux_file[1].header
wl = data["Wavelength"] * 1e4 # [Convert to angstroms]
fl = data["Flux"]
flux_file.close()
except OSError:
raise C.GridError("{} is not on disk.".format(fname))
#"Clean" the wl and flux points. Remove duplicates, sort in increasing wl
wl, ind = np.unique(wl, return_index=True)
fl = fl[ind]
if self.norm:
F_bol = trapz(fl, wl)
fl = fl * (C.F_sun / F_bol)
# the bolometric luminosity is always 1 L_sun
# truncate the spectrum to the wl range of interest
# at this step, make the range a little more so that the next stage of
# spline interpolation is properly in bounds
ind = (wl >= (self.wl_range[0] - 50.)) & (wl <= (self.wl_range[1] + 50.))
wl = wl[ind]
fl = fl[ind]
if self.air:
#Shift the wl that correspond to the raw spectrum
wl = vacuum_to_air(wl)
#Now interpolate wl, fl onto self.wl
interp = InterpolatedUnivariateSpline(wl, fl, k=5)
fl_interp = interp(self.wl)
#Add temp, logg, Z, norm to the metadata
header = {}
header["norm"] = self.norm
header["air"] = self.air
#Keep the relevant keywords
for key, value in hdr.items():
header[key] = value
return (fl_interp, header)
class HDF5Creator:
'''
Create a HDF5 grid to store all of the spectra from a RawGridInterface,
along with metadata.
'''
def __init__(self, GridInterface, filename, Instrument, ranges=None,
key_name=Starfish.grid["key_name"], vsinis=None):
'''
:param GridInterface: :obj:`RawGridInterface` object or subclass thereof
to access raw spectra on disk.
:param filename: where to create the HDF5 file. Suffix ``*.hdf5`` recommended.
:param Instrument: the instrument to convolve/truncate the grid. If you
want a high-res grid, use the NullInstrument.
:param ranges: lower and upper limits for each stellar parameter,
in order to truncate the number of spectra in the grid.
:type ranges: dict of keywords mapped to 2-tuples
:param key_name: formatting string that has keys for each of the parameter
names to translate into a hash-able string.
:type key_name: string
This object is designed to be run in serial.
'''
if ranges is None:
# Programatically define each range to be (-np.inf, np.inf)
ranges = []
for par in Starfish.parname:
ranges.append([-np.inf,np.inf])
self.GridInterface = GridInterface
self.filename = os.path.expandvars(filename) #only store the name to the HDF5 file, because
# otherwise the object cannot be parallelized
self.Instrument = Instrument
# The flux formatting key will always have alpha in the name, regardless
# of whether or not the library uses it as a parameter.
self.key_name = key_name
# Take only those points of the GridInterface that fall within the ranges specified
self.points = []
# We know which subset we want, so use these.
for i,(low, high) in enumerate(ranges):
valid_points = self.GridInterface.points[i]
ind = (valid_points >= low) & (valid_points <= high)
self.points.append(valid_points[ind])
# Note that at this point, this is just the grid points that fall within the rectangular
# bounds set by ranges. If the raw library is actually irregular (e.g. CIFIST),
# then self.points will contain points that don't actually exist in the raw library.
# the raw wl from the spectral library
self.wl_native = self.GridInterface.wl #raw grid
self.dv_native = calculate_dv(self.wl_native)
self.hdf5 = h5py.File(self.filename, "w")
self.hdf5.attrs["grid_name"] = GridInterface.name
self.hdf5.flux_group = self.hdf5.create_group("flux")
self.hdf5.flux_group.attrs["unit"] = "erg/cm^2/s/A"
# We'll need a few wavelength grids
# 1. The original synthetic grid: ``self.wl_native``
# 2. A finely spaced log-lambda grid respecting the ``dv`` of
# ``self.wl_native``, onto which we can interpolate the flux values
# in preperation of the FFT: ``self.wl_FFT``
# [ DO FFT ]
# 3. A log-lambda spaced grid onto which we can downsample the result
# of the FFT, spaced with a ``dv`` such that we respect the remaining
# Fourier modes: ``self.wl_final``
# There are three ranges to consider when wanting to make a grid:
# 1. The full range of the synthetic library
# 2. The full range of the instrument/dataset
# 3. The range specified by the user in config.yaml
# For speed reasons, we will always truncate to to wl_range. If either
# the synthetic library or the instrument library is smaller than this range,
# raise an error.
#inst_min, inst_max = self.Instrument.wl_range
wl_min, wl_max = Starfish.grid["wl_range"]
buffer = Starfish.grid["buffer"] # [AA]
wl_min -= buffer
wl_max += buffer
# If the raw synthetic grid doesn't span the full range of the user
# specified grid, raise an error.
# Instead, let's choose the maximum limit of the synthetic grid?
if (self.wl_native[0] > wl_min) or (self.wl_native[-1] < wl_max):
print("Synthetic grid does not encapsulate chosen wl_range in config.yaml, truncating new grid to extent of synthetic grid, {}, {}".format(self.wl_native[0], self.wl_native[-1]))
wl_min, wl_max = self.wl_native[0], self.wl_native[-1]
# Calculate wl_FFT
# use the dv that preserves the native quality of the raw PHOENIX grid
wl_dict = create_log_lam_grid(self.dv_native, wl_min, wl_max)
self.wl_FFT = wl_dict["wl"]
self.dv_FFT = calculate_dv_dict(wl_dict)
print("FFT grid stretches from {} to {}".format(self.wl_FFT[0], self.wl_FFT[-1]))
print("wl_FFT dv is {} km/s".format(self.dv_FFT))
# The Fourier coordinate
self.ss = rfftfreq(len(self.wl_FFT), d=self.dv_FFT)
# The instrumental taper
sigma = self.Instrument.FWHM / 2.35 # in km/s
# Instrumentally broaden the spectrum by multiplying with a Gaussian in Fourier space
self.taper = np.exp(-2 * (np.pi ** 2) * (sigma ** 2) * (self.ss ** 2))
self.ss[0] = 0.01 # junk so we don't get a divide by zero error
# The final wavelength grid, onto which we will interpolate the
# Fourier filtered wavelengths, is part of the Instrument object
dv_temp = self.Instrument.FWHM/self.Instrument.oversampling
wl_dict = create_log_lam_grid(dv_temp, wl_min, wl_max)
self.wl_final = wl_dict["wl"]
self.dv_final = calculate_dv_dict(wl_dict)
#Create the wl dataset separately using float64 due to rounding errors w/ interpolation.
wl_dset = self.hdf5.create_dataset("wl", (len(self.wl_final),), dtype="f8", compression='gzip', compression_opts=9)
wl_dset[:] = self.wl_final
wl_dset.attrs["air"] = self.GridInterface.air
wl_dset.attrs["dv"] = self.dv_final
def process_flux(self, parameters):
'''
Take a flux file from the raw grid, process it according to the
instrument, and insert it into the HDF5 file.
:param parameters: the model parameters.
:type parameters: 1D np.array
.. note::
:raises AssertionError: if the `parameters` vector is not
the same length as that of the raw grid.
:returns: a tuple of (parameters, flux, header). If the flux could
not be loaded, returns (None, None, None).
'''
# assert len(parameters) == len(Starfish.parname), "Must pass numpy array {}".format(Starfish.parname)
print("Processing", parameters)
# If the parameter length is one more than the grid pars,
# assume this is for vsini convolution
if len(parameters) == (len(Starfish.parname) + 1):
vsini = parameters[-1]
parameters = parameters[:-1]
else:
vsini = 0.0
try:
flux, header = self.GridInterface.load_flux(parameters)
# Interpolate the native spectrum to a log-lam FFT grid
interp = InterpolatedUnivariateSpline(self.wl_native, flux, k=5)
fl = interp(self.wl_FFT)
del interp
gc.collect()
# Do the FFT
FF = np.fft.rfft(fl)
if vsini > 0.0:
# Calculate the stellar broadening kernel
ub = 2. * np.pi * vsini * self.ss
sb = j1(ub) / ub - 3 * np.cos(ub) / (2 * ub ** 2) + 3. * np.sin(ub) / (2 * ub ** 3)
# set zeroth frequency to 1 separately (DC term)
sb[0] = 1.
# institute vsini and instrumental taper
FF_tap = FF * sb * self.taper
else:
# apply just instrumental taper
FF_tap = FF * self.taper
# do IFFT
fl_tapered = np.fft.irfft(FF_tap)
# downsample to the final grid
interp = InterpolatedUnivariateSpline(self.wl_FFT, fl_tapered, k=5)
fl_final = interp(self.wl_final)
del interp
gc.collect()
return (fl_final, header)
except C.GridError as e:
print("No file with parameters {}. C.GridError: {}".format(parameters, e))
return (None, None)
def process_grid(self):
'''
Run :meth:`process_flux` for all of the spectra within the `ranges`
and store the processed spectra in the HDF5 file.
Only executed in serial for now.
'''
# points is now a list of numpy arrays of the values in the grid
# Take all parameter permutations in self.points and create a list
# param_list will be a list of numpy arrays, specifying the parameters
param_list = []
# use itertools.product to create permutations of all possible values
for i in itertools.product(*self.points):
param_list.append(np.array(i))
all_params = np.array(param_list)
print("Total of {} files to process.".format(len(param_list)))
for i,param in enumerate(all_params):
fl, header = self.process_flux(param)
if fl is None:
print("Deleting {} from all params, does not exist.".format(param))
all_params = np.delete(all_params, i, axis=0)
continue
# The PHOENIX spectra are stored as float32, and so we do the same here.
flux = self.hdf5["flux"].create_dataset(self.key_name.format(*param),
shape=(len(fl),), dtype="f", compression='gzip',
compression_opts=9)
flux[:] = fl
# Store header keywords as attributes in HDF5 file
for key,value in header.items():
if key != "" and value != "": #check for empty FITS kws
flux.attrs[key] = value
par_dset = self.hdf5.create_dataset("pars", all_params.shape, dtype="f8", compression='gzip', compression_opts=9)
par_dset[:] = all_params
self.hdf5.close()
class HDF5Interface:
'''
Connect to an HDF5 file that stores spectra.
'''
def __init__(self, filename=Starfish.grid["hdf5_path"], key_name=Starfish.grid["key_name"]):
'''
:param filename: the name of the HDF5 file
:type param: string
:param ranges: optionally select a smaller part of the grid to use.
:type ranges: dict
'''
self.filename = os.path.expandvars(filename)
self.key_name = key_name
# In order to properly interface with the HDF5 file, we need to learn
# a few things about it
# 1.) Which parameter combinations exist in the file (self.grid_points)
# 2.) What are the minimum and maximum values for each parameter (self.bounds)
# 3.) Which values exist for each parameter (self.points)
with h5py.File(self.filename, "r") as hdf5:
self.wl = hdf5["wl"][:]
self.wl_header = dict(hdf5["wl"].attrs.items())
self.dv = self.wl_header["dv"]
self.grid_points = hdf5["pars"][:]
#determine the bounding regions of the grid by sorting the grid_points
low = np.min(self.grid_points, axis=0)
high = np.max(self.grid_points, axis=0)
self.bounds = np.vstack((low, high)).T
self.points = [np.unique(self.grid_points[:, i]) for i in range(self.grid_points.shape[1])]
self.ind = None #Overwritten by other methods using this as part of a ModelInterpolator
def load_flux(self, parameters):
'''
Load just the flux from the grid, with possibly an index truncation.
:param parameters: the stellar parameters
:type parameters: np.array
:raises KeyError: if spectrum is not found in the HDF5 file.
:returns: flux array
'''
key = self.key_name.format(*parameters)
with h5py.File(self.filename, "r") as hdf5:
try:
if self.ind is not None:
fl = hdf5['flux'][key][self.ind[0]:self.ind[1]]
else:
fl = hdf5['flux'][key][:]
except KeyError as e:
raise C.GridError(e)
#Note: will raise a KeyError if the file is not found.
return fl
@property
def fluxes(self):
'''
Iterator to loop over all of the spectra stored in the grid, for PCA.
Loops over parameters in the order specified by grid_points.
'''
for grid_point in self.grid_points:
yield self.load_flux(grid_point)
def load_flux_hdr(self, parameters):
'''
Just like load_flux, but also returns the header
'''
key = self.key_name.format(*parameters)
with h5py.File(self.filename, "r") as hdf5:
try:
hdr = dict(hdf5['flux'][key].attrs)
if self.ind is not None:
fl = hdf5['flux'][key][self.ind[0]:self.ind[1]]
else:
fl = hdf5['flux'][key][:]
except KeyError as e:
raise C.GridError(e)
#Note: will raise a KeyError if the file is not found.
return (fl, hdr)
class IndexInterpolator:
'''
Object to return fractional distance between grid points of a single grid variable.
:param parameter_list: list of parameter values
:type parameter_list: 1-D list
'''
def __init__(self, parameter_list):
self.parameter_list = np.unique(parameter_list)
self.index_interpolator = interp1d(self.parameter_list, np.arange(len(self.parameter_list)), kind='linear')
pass
def __call__(self, value):
'''
Evaluate the interpolator at a parameter.
:param value:
:type value: float
:raises C.InterpolationError: if *value* is out of bounds.
:returns: ((low_val, high_val), (frac_low, frac_high)), the lower and higher bounding points in the grid
and the fractional distance (0 - 1) between them and the value.
'''
try:
index = self.index_interpolator(value)
except ValueError as e:
raise C.InterpolationError("Requested value {} is out of bounds. {}".format(value, e))
high = np.ceil(index)
low = np.floor(index)
frac_index = index - low
return ((self.parameter_list[low], self.parameter_list[high]), ((1 - frac_index), frac_index))
class Interpolator:
'''
Quickly and efficiently interpolate a synthetic spectrum for use in an MCMC
simulation. Caches spectra for easier memory load.
:param interface: :obj:`HDF5Interface` (recommended) or :obj:`RawGridInterface` to load spectra
:param wl: data wavelength of the region you are trying to fit. Used to truncate the grid for speed.
:type DataSpectrum: np.array
:param cache_max: maximum number of spectra to hold in cache
:type cache_max: int
:param cache_dump: how many spectra to purge from the cache once :attr:`cache_max` is reached
:type cache_dump: int
'''
def __init__(self, wl, interface, cache_max=256, cache_dump=64):
self.interface = interface
self.wl = self.interface.wl
self.dv = self.interface.dv
self.npars = len(Starfish.grid["parname"])
self._determine_chunk_log(wl)
self.setup_index_interpolators()
self.cache = OrderedDict([])
self.cache_max = cache_max
self.cache_dump = cache_dump #how many to clear once the maximum cache has been reached
def _determine_chunk_log(self, wl):
'''
Using the DataSpectrum, determine the minimum chunksize that we can use and then
truncate the synthetic wavelength grid and the returned spectra.
Assumes HDF5Interface is LogLambda spaced, because otherwise you shouldn't need a grid
with 2^n points, because you would need to interpolate in wl space after this anyway.
'''
wl_interface = self.interface.wl # The grid we will be truncating.
wl_min, wl_max = np.min(wl), np.max(wl)
# Previously this routine retuned a tuple () which was the ranges to truncate to.
# Now this routine returns a Boolean mask,
# so we need to go and find the first and last true values
ind = determine_chunk_log(wl_interface, wl_min, wl_max)
self.wl = self.wl[ind]
# Find the index of the first and last true values
self.interface.ind = np.argwhere(ind)[0][0], np.argwhere(ind)[-1][0] + 1
def _determine_chunk(self):
'''
Using the DataSpectrum, set the bounds of the interpolator to +/- 5 Ang
'''
wave_grid = self.interface.wl
wl_min, wl_max = np.min(self.dataSpectrum.wls), np.max(self.dataSpectrum.wls)
ind_low = (np.abs(wave_grid - (wl_min - 5.))).argmin()
ind_high = (np.abs(wave_grid - (wl_max + 5.))).argmin()
self.wl = self.wl[ind_low:ind_high]
assert min(self.wl) < wl_min and max(self.wl) > wl_max, "ModelInterpolator chunking ({:.2f}, {:.2f}) didn't encapsulate full DataSpectrum range ({:.2f}, {:.2f}).".format(min(self.wl), max(self.wl), wl_min, wl_max)
self.interface.ind = (ind_low, ind_high)
print("Wl is {}".format(len(self.wl)))
def __call__(self, parameters):
'''
Interpolate a spectrum
:param parameters: stellar parameters
:type parameters: dict
Automatically pops :attr:`cache_dump` items from cache if full.
'''
if len(self.cache) > self.cache_max:
[self.cache.popitem(False) for i in range(self.cache_dump)]
self.cache_counter = 0
return self.interpolate(parameters)
def setup_index_interpolators(self):
# create an interpolator between grid points indices.
# Given a parameter value, produce fractional index between two points
# Store the interpolators as a list
self.index_interpolators = [IndexInterpolator(self.interface.points[i]) for i in range(self.npars)]
lenF = self.interface.ind[1] - self.interface.ind[0]
self.fluxes = np.empty((2**self.npars, lenF)) #8 rows, for temp, logg, Z
def interpolate(self, parameters):
'''
Interpolate a spectrum without clearing cache. Recommended to use :meth:`__call__` instead.
:param parameters: grid parameters
:type parameters: np.array
:raises C.InterpolationError: if parameters are out of bounds.
'''
# Previously, parameters was a dictionary of the stellar parameters.
# Now that we have moved over to arrays, it is a numpy array.
try:
edges = []
for i in range(self.npars):
edges.append(self.index_interpolators[i](parameters[i]))
except C.InterpolationError as e:
raise C.InterpolationError("Parameters {} are out of bounds. {}".format(parameters, e))
#Edges is a list of [((6000, 6100), (0.2, 0.8)), ((), ()), ((), ())]
params = [tup[0] for tup in edges] #[(6000, 6100), (4.0, 4.5), ...]
weights = [tup[1] for tup in edges] #[(0.2, 0.8), (0.4, 0.6), ...]
#Selects all the possible combinations of parameters
param_combos = list(itertools.product(*params))
#[(6000, 4.0, 0.0), (6100, 4.0, 0.0), (6000, 4.5, 0.0), ...]
weight_combos = list(itertools.product(*weights))
#[(0.2, 0.4, 1.0), (0.8, 0.4, 1.0), ...]
# Assemble key list necessary for indexing cache
key_list = [self.interface.key_name.format(*param) for param in param_combos]
weight_list = np.array([np.prod(weight) for weight in weight_combos])
assert np.allclose(np.sum(weight_list), np.array(1.0)), "Sum of weights must equal 1, {}".format(np.sum(weight_list))
#Assemble flux vector from cache, or load into cache if not there
for i,param in enumerate(param_combos):
key = key_list[i]
if key not in self.cache.keys():
try:
#This method already allows loading only the relevant region from HDF5
fl = self.interface.load_flux(np.array(param))
except KeyError as e:
raise C.InterpolationError("Parameters {} not in master HDF5 grid. {}".format(param, e))
self.cache[key] = fl
self.fluxes[i,:] = self.cache[key]*weight_list[i]
# Do the averaging and then normalize the average flux to 1.0
fl = np.sum(self.fluxes, axis=0)
fl /= np.median(fl)
return fl
#Convert R to FWHM in km/s by \Delta v = c/R
class Instrument:
'''
Object describing an instrument. This will be used by other methods for
processing raw synthetic spectra.
:param name: name of the instrument
:type name: string
:param FWHM: the FWHM of the instrumental profile in km/s
:type FWHM: float
:param wl_range: wavelength range of instrument
:type wl_range: 2-tuple (low, high)
:param oversampling: how many samples fit across the :attr:`FWHM`
:type oversampling: float
Upon initialization, calculates a ``wl_dict`` with the properties of the
instrument.
'''
def __init__(self, name, FWHM, wl_range, oversampling=4.):
self.name = name
self.FWHM = FWHM #km/s
self.oversampling = oversampling
self.wl_range = wl_range
def __str__(self):
'''
Prints the relevant properties of the instrument.
'''
return "Instrument Name: {}, FWHM: {:.1f}, oversampling: {}, " \
"wl_range: {}".format(self.name, self.FWHM, self.oversampling, self.wl_range)
class TRES(Instrument):
'''TRES instrument'''
def __init__(self, name="TRES", FWHM=6.8, wl_range=(3500, 9500)):
super().__init__(name=name, FWHM=FWHM, wl_range=wl_range)
#sets the FWHM and wl_range
class Reticon(Instrument):
'''Reticon Instrument'''
def __init__(self, name="Reticon", FWHM=8.5, wl_range=(5145,5250)):
super().__init__(name=name, FWHM=FWHM, wl_range=wl_range)
class KPNO(Instrument):
'''KNPO Instrument'''
def __init__(self, name="KPNO", FWHM=14.4, wl_range=(6250,6650)):
super().__init__(name=name, FWHM=FWHM, wl_range=wl_range)
class SPEX(Instrument):
'''SPEX Instrument'''
def __init__(self, name="SPEX", FWHM=150., wl_range=(7500, 54000)):
super().__init__(name=name, FWHM=FWHM, wl_range=wl_range)
class SPEX_SXD(Instrument):
'''SPEX Instrument short mode'''
def __init__(self, name="SPEX", FWHM=150., wl_range=(7500, 26000)):
super().__init__(name=name, FWHM=FWHM, wl_range=wl_range)
class IGRINS_H(Instrument):
'''IGRINS H band instrument'''
def __init__(self, name="IGRINS_H", FWHM=7.5, wl_range=(14250, 18400)):
super().__init__(name=name, FWHM=FWHM, wl_range=wl_range)
self.air = False
class IGRINS_K(Instrument):
'''IGRINS K band instrument'''
def __init__(self, name="IGRINS_K", FWHM=7.5, wl_range=(18500, 25200)):
super().__init__(name=name, FWHM=FWHM, wl_range=wl_range)
self.air = False
class ESPaDOnS(Instrument):
'''ESPaDOnS Instrument'''
def __init__(self, name="ESPaDOnS", FWHM=4.4, wl_range=(3700, 10500)):
super().__init__(name=name, FWHM=FWHM, wl_range=wl_range)
class DCT_DeVeny(Instrument):
'''DCT DeVeny spectrograph Instrument.'''
def __init__(self, name="DCT_DeVeny", FWHM=105.2, wl_range=(6000, 10000)):
super().__init__(name=name, FWHM=FWHM, wl_range=wl_range)
class WIYN_Hydra(Instrument):
'''WIYN Hydra spectrograph Instrument.'''
def __init__(self, name="WIYN_Hydra", FWHM=300., wl_range=(5500, 10500)):
super().__init__(name=name, FWHM=FWHM, wl_range=wl_range)
def vacuum_to_air(wl):
'''
Converts vacuum wavelengths to air wavelengths using the Ciddor 1996 formula.
:param wl: input vacuum wavelengths
:type wl: np.array
:returns: **wl_air** (*np.array*) - the wavelengths converted to air wavelengths
.. note::
CA Prieto recommends this as more accurate than the IAU standard.'''
sigma = (1e4 / wl) ** 2
f = 1.0 + 0.05792105 / (238.0185 - sigma) + 0.00167917 / (57.362 - sigma)
return wl / f
def calculate_n(wl):
'''
Calculate *n*, the refractive index of light at a given wavelength.
:param wl: input wavelength (in vacuum)
:type wl: np.array
:return: **n_air** (*np.array*) - the refractive index in air at that wavelength
'''
sigma = (1e4 / wl) ** 2
f = 1.0 + 0.05792105 / (238.0185 - sigma) + 0.00167917 / (57.362 - sigma)
new_wl = wl / f
n = wl/new_wl
print(n)
def vacuum_to_air_SLOAN(wl):
'''
Converts vacuum wavelengths to air wavelengths using the outdated SLOAN definition.
:param wl:
The input wavelengths to convert
From the SLOAN website:
AIR = VAC / (1.0 + 2.735182E-4 + 131.4182 / VAC^2 + 2.76249E8 / VAC^4)'''
air = wl / (1.0 + 2.735182E-4 + 131.4182 / wl ** 2 + 2.76249E8 / wl ** 4)
return air
def air_to_vacuum(wl):
'''
Convert air wavelengths to vacuum wavelengths.
:param wl: input air wavelegths
:type wl: np.array
:return: **wl_vac** (*np.array*) - the wavelengths converted to vacuum.
.. note::
It is generally not recommended to do this, as the function is imprecise.
'''
sigma = 1e4 / wl
vac = wl + wl * (6.4328e-5 + 2.94981e-2 / (146 - sigma ** 2) + 2.5540e-4 / (41 - sigma ** 2))
return vac
def get_wl_kurucz(filename):
'''The Kurucz grid is log-linear spaced.'''
flux_file = fits.open(filename)
hdr = flux_file[0].header
num = len(flux_file[0].data)
p = np.arange(num)
w1 = hdr['CRVAL1']
dw = hdr['CDELT1']
wl = 10 ** (w1 + dw * p)
return wl
@np.vectorize
def idl_float(idl_num):
'''
idl_float(idl_num)
Convert an idl *string* number in scientific notation it to a float.
:param idl_num:
the idl number in sci_notation'''
#replace 'D' with 'E', convert to float
return np.float(idl_num.replace("D", "E"))
def load_BTSettl(temp, logg, Z, norm=False, trunc=False, air=False):
rname = "BT-Settl/CIFIST2011/M{Z:}/lte{temp:0>3.0f}-{logg:.1f}{Z:}.BT-Settl.spec.7.bz2".format(temp=0.01 * temp, logg=logg, Z=Z)
file = bz2.BZ2File(rname, 'r')
lines = file.readlines()
strlines = [line.decode('utf-8') for line in lines]
file.close()
data = ascii.read(strlines, col_starts=[0, 13], col_ends=[12, 25], Reader=ascii.FixedWidthNoHeader)
wl = data['col1']
fl_str = data['col2']
fl = idl_float(fl_str) #convert because of "D" exponent, unreadable in Python
fl = 10 ** (fl - 8.) #now in ergs/cm^2/s/A
if norm:
F_bol = trapz(fl, wl)
fl = fl * (C.F_sun / F_bol)
#this also means that the bolometric luminosity is always 1 L_sun
if trunc:
#truncate to only the wl of interest
ind = (wl > 3000) & (wl < 13000)
wl = wl[ind]
fl = fl[ind]
if air:
wl = vacuum_to_air(wl)
return [wl, fl]
def load_flux_full(temp, logg, Z, alpha=None, norm=False, vsini=0, grid="PHOENIX"):
'''Load a raw PHOENIX or kurucz spectrum based upon temp, logg, and Z. Normalize to C.F_sun if desired.'''
if grid == "PHOENIX":
if alpha is not None:
rname = "raw_grids/PHOENIX/Z{Z:}{alpha:}/lte{temp:0>5.0f}-{logg:.2f}{Z:}{alpha:}" \
".PHOENIX-ACES-AGSS-COND-2011-HiRes.fits".format(Z=Z, temp=temp, logg=logg, alpha=alpha)
else:
rname = "raw_grids/PHOENIX/Z{Z:}/lte{temp:0>5.0f}-{logg:.2f}{Z:}" \
".PHOENIX-ACES-AGSS-COND-2011-HiRes.fits".format(Z=Z, temp=temp, logg=logg)
elif grid == "kurucz":
rname = "raw_grids/Kurucz/TRES/t{temp:0>5.0f}g{logg:.0f}{Z:}v{vsini:0>3.0f}.fits".format(temp=temp,
logg=10 * logg, Z=Z, vsini=vsini)
else:
print("No grid %s" % (grid))
return 1
flux_file = fits.open(rname)
f = flux_file[0].data
if norm:
f *= 1e-8 #convert from erg/cm^2/s/cm to erg/cm^2/s/A
F_bol = trapz(f, w_full)
f = f * (C.F_sun / F_bol)
#this also means that the bolometric luminosity is always 1 L_sun
if grid == "kurucz":
f *= C.c_ang / wave_grid_kurucz_raw ** 2 #Convert from f_nu to f_lambda
flux_file.close()
#print("Loaded " + rname)
return f
def create_fits(filename, fl, CRVAL1, CDELT1, dict=None):
'''Assumes that wl is already log lambda spaced'''
hdu = fits.PrimaryHDU(fl)
head = hdu.header
head["DISPTYPE"] = 'log lambda'
head["DISPUNIT"] = 'log angstroms'
head["CRPIX1"] = 1.
head["CRVAL1"] = CRVAL1
head["CDELT1"] = CDELT1
head["DC-FLAG"] = 1
if dict is not None:
for key, value in dict.items():
head[key] = value
hdu.writeto(filename)
class MasterToFITSIndividual:
'''
Object used to create one FITS file at a time.
:param interpolator: an :obj:`Interpolator` object referenced to the master grid.
:param instrument: an :obj:`Instrument` object containing the properties of the final spectra
'''
def __init__(self, interpolator, instrument):
self.interpolator = interpolator
self.instrument = instrument
self.filename = "t{temp:0>5.0f}g{logg:0>2.0f}{Z_flag}{Z:0>2.0f}v{vsini:0>3.0f}.fits"
#Create a master wl_dict which correctly oversamples the instrumental kernel
self.wl_dict = self.instrument.wl_dict
self.wl = self.wl_dict["wl"]
def process_spectrum(self, parameters, out_unit, out_dir=""):
'''
Creates a FITS file with given parameters
:param parameters: stellar parameters :attr:`temp`, :attr:`logg`, :attr:`Z`, :attr:`vsini`
:type parameters: dict
:param out_unit: output flux unit? Choices between `f_lam`, `f_nu`, `f_nu_log`, or `counts/pix`. `counts/pix` will do spline integration.
:param out_dir: optional directory to prepend to output filename, which is chosen automatically for parameter values.
Smoothly handles the *C.InterpolationError* if parameters cannot be interpolated from the grid and prints a message.
'''
#Preserve the "popping of parameters"
parameters = parameters.copy()
#Load the correct C.grid_set value from the interpolator into a LogLambdaSpectrum
if parameters["Z"] < 0:
zflag = "m"
else:
zflag = "p"
filename = out_dir + self.filename.format(temp=parameters["temp"], logg=10*parameters["logg"],
Z=np.abs(10*parameters["Z"]), Z_flag=zflag, vsini=parameters["vsini"])
vsini = parameters.pop("vsini")
try:
spec = self.interpolator(parameters)
# Using the ``out_unit``, determine if we should also integrate while doing the downsampling
if out_unit=="counts/pix":
integrate=True
else:
integrate=False
# Downsample the spectrum to the instrumental resolution.
spec.instrument_and_stellar_convolve(self.instrument, vsini, integrate)
spec.write_to_FITS(out_unit, filename)
except C.InterpolationError as e:
print("{} cannot be interpolated from the grid.".format(parameters))
print("Processed spectrum {}".format(parameters))
class MasterToFITSGridProcessor:
'''
Create one or many FITS files from a master HDF5 grid. Assume that we are not going to need to interpolate
any values.
:param interface: an :obj:`HDF5Interface` object referenced to the master grid.
:param points: lists of output parameters (assumes regular grid)
:type points: dict of lists
:param flux_unit: format of output spectra {"f_lam", "f_nu", "ADU"}
:type flux_unit: string
:param outdir: output directory
:param processes: how many processors to use in parallel
Basically, this object is doing a one-to-one conversion of the PHOENIX spectra. No interpolation necessary,
preserving all of the header keywords.
'''
def __init__(self, interface, instrument, points, flux_unit, outdir, alpha=False, integrate=False, processes=mp.cpu_count()):
self.interface = interface
self.instrument = instrument
self.points = points #points is a dictionary with which values to spit out for each parameter
self.filename = "t{temp:0>5.0f}g{logg:0>2.0f}{Z_flag}{Z:0>2.0f}v{vsini:0>3.0f}.fits"
self.flux_unit = flux_unit
self.integrate = integrate
self.outdir = outdir
self.processes = processes
self.pids = []
self.alpha = alpha
self.vsini_points = self.points.pop("vsini")
names = self.points.keys()
#Creates a list of parameter dictionaries [{"temp":8500, "logg":3.5, "Z":0.0}, {"temp":8250, etc...}, etc...]
#which does not contain vsini
self.param_list = [dict(zip(names,params)) for params in itertools.product(*self.points.values())]
#Create a master wl_dict which correctly oversamples the instrumental kernel
self.wl_dict = self.instrument.wl_dict
self.wl = self.wl_dict["wl"]
#Check that temp, logg, Z are within the bounds of the interface
for key,value in self.points.items():
min_val, max_val = self.interface.bounds[key]
assert np.min(self.points[key]) >= min_val,"Points below interface bound {}={}".format(key, min_val)
assert np.max(self.points[key]) <= max_val,"Points above interface bound {}={}".format(key, max_val)
#Create a temporary grid to resample to that matches the bounds of the instrument.
low, high = self.instrument.wl_range
self.temp_grid = create_log_lam_grid(wl_start=low, wl_end=high, min_vc=0.1)['wl']
def process_spectrum_vsini(self, parameters):
'''
Create a set of FITS files with given stellar parameters temp, logg, Z and all combinations of `vsini`.
:param parameters: stellar parameters
:type parameters: dict
Smoothly handles the *KeyError* if parameters cannot be drawn from the interface and prints a message.
'''
try:
#Check to see if alpha, otherwise append alpha=0 to the parameter list.
if not self.alpha:
parameters.update({"alpha": 0.0})
print(parameters)
if parameters["Z"] < 0:
zflag = "m"
else:
zflag = "p"
#This is a Base1DSpectrum
base_spec = self.interface.load_file(parameters)
master_spec = base_spec.to_LogLambda(instrument=self.instrument, min_vc=0.1/C.c_kms) #convert the Base1DSpectrum to a LogLamSpectrum
#Now process the spectrum for all values of vsini
for vsini in self.vsini_points:
spec = master_spec.copy()
#Downsample the spectrum to the instrumental resolution, integrate to give counts/pixel
spec.instrument_and_stellar_convolve(self.instrument, vsini, integrate=self.integrate)
#Update spectrum with vsini
spec.metadata.update({"vsini":vsini})
filename = self.outdir + self.filename.format(temp=parameters["temp"], logg=10*parameters["logg"],
Z=np.abs(10*parameters["Z"]), Z_flag=zflag, vsini=vsini)
spec.write_to_FITS(self.flux_unit, filename)
except KeyError as e:
print("{} cannot be loaded from the interface.".format(parameters))
def process_chunk(self, chunk):
'''
Process a chunk of parameters to FITS
:param chunk: stellar parameter dicts
:type chunk: 1-D list
'''
print("Process {} processing chunk {}".format(os.getpid(), chunk))
for param in chunk:
self.process_spectrum_vsini(param)
def process_all(self):
'''
Process all parameters in :attr:`points` to FITS by chopping them into chunks.
'''
print("Total of {} FITS files to create.".format(len(self.vsini_points) * len(self.param_list)))
chunks = chunk_list(self.param_list, n=self.processes)
for chunk in chunks:
p = mp.Process(target=self.process_chunk, args=(chunk,))
p.start()
self.pids.append(p)
for p in self.pids:
#Make sure all threads have finished
p.join()
def main():
pass
if __name__ == "__main__":
main()
|
import json
from datetime import datetime
from cryptography.fernet import Fernet
from flask import current_app, jsonify, request
from flask.views import MethodView
from flask_jwt_extended import (create_access_token, create_refresh_token,
decode_token, get_jwt_identity,
jwt_refresh_token_required, set_access_cookies,
set_refresh_cookies, unset_jwt_cookies)
from flask_smorest import Blueprint, abort
from ldap3 import Connection, Server
from ldap3.core.exceptions import LDAPSocketOpenError
from ..models.user import LDAPUser
from ..schemas.user import (LoginSchema, RefreshSchema, ResetSchema,
TokenSchema, UserSchema)
blueprint = Blueprint('auth', 'auth')
@blueprint.route('/login', endpoint='login')
class AuthLoginAPI(MethodView):
@blueprint.response(LoginSchema)
@blueprint.arguments(TokenSchema)
def post(self, args):
"""Authenticates and generates a token"""
email = args.get('email', None)
password = args.get('password', None)
if email is None:
abort(403, message='Email not provided')
atSign = email.find('@')
if atSign < 0:
abort(409, message='Wrong mail format')
user = LDAPUser(email[:atSign], email[atSign + 1:])
ldap_config = current_app.config['LDAP']
data = {
'uid': user.uid,
'domain': user.domain,
}
root = current_app.config['PROJECT_ROOT']
key_path = f'{root}/secret.key'
with open(key_path, 'rb') as key_file:
key = key_file.read()
encoded_password = password.encode()
f = Fernet(key)
hashed_password = f.encrypt(encoded_password)
data['password'] = hashed_password.decode('utf-8')
identity = json.dumps(data)
dn = ldap_config['dn'].format(user=user)
try:
server = Server(current_app.config['LDAP']['server'])
conn = Connection(server, dn, password)
if current_app.config['LDAP']['tls']:
conn.start_tls()
if not conn.bind():
abort(403, mesage='No such user or email/password wrong')
except LDAPSocketOpenError:
abort(409, message='Connection to LDAP server failed')
access_token = create_access_token(identity=identity)
refresh_token = create_refresh_token(identity=identity)
access_expire = current_app.config['JWT_ACCESS_TOKEN_EXPIRES']
refresh_expire = current_app.config['JWT_REFRESH_TOKEN_EXPIRES']
resp = jsonify({
'accessExpire': int(access_expire.total_seconds()),
'refreshExpire': int(refresh_expire.total_seconds()),
})
set_access_cookies(resp, access_token)
set_refresh_cookies(resp, refresh_token)
refresh_path = current_app.config['JWT_REFRESH_COOKIE_PATH']
refresh_secure = current_app.config['JWT_COOKIE_SECURE']
refresh_expire_date = datetime.now() + refresh_expire
resp.set_cookie(
'refresh_expire',
value=str(refresh_expire_date),
expires=refresh_expire_date,
path=refresh_path,
httponly=True,
secure=refresh_secure,
)
return resp
@blueprint.route('/logout', endpoint='logout')
class AuthLogoutAPI(MethodView):
def post(self):
"""Logout"""
resp = jsonify({})
unset_jwt_cookies(resp)
return resp
@blueprint.route('/refresh', endpoint='refresh')
class AuthRefreshAPI(MethodView):
@blueprint.response(RefreshSchema)
@jwt_refresh_token_required
def post(self):
"""Refresh access token"""
identity = get_jwt_identity()
data = json.loads(identity)
user = LDAPUser(data['uid'], data['domain'])
ldap_config = current_app.config['LDAP']
dn = ldap_config['dn'].format(user=user)
server = Server(current_app.config['LDAP']['server'])
conn = Connection(server, dn)
if current_app.config['LDAP']['tls']:
conn.start_tls()
# conn.bind()
if not conn.search(dn, '(objectclass=person)'):
abort(403, message='No such user')
access_expire = current_app.config['JWT_ACCESS_TOKEN_EXPIRES']
access_token = create_access_token(identity=identity)
refresh_expire_date = datetime.strptime(
request.cookies['refresh_expire'], '%Y-%m-%d %H:%M:%S.%f')
refresh_delta = refresh_expire_date - datetime.now()
resp = jsonify({
'accessExpire': int(access_expire.total_seconds()),
'refreshExpire': int(refresh_delta.total_seconds()),
})
set_access_cookies(resp, access_token)
return resp
|
'''class A:
def hi(self):
print ("Say Hi")
class B(A):
def hi(self):
print ("Say hello")
b=B()
b.hi()
'''
# overriding without inheritance based on object execution happens
class A:
def hi(self):
print ("Say Hi")
class B:
def hi(self):
print ("Say hello")
def common(test):
test.hi()
a=A()
b=B()
common(a)
common(b)
|
from __future__ import division
from django.shortcuts import render
# Create your views here.
from django.shortcuts import render, get_object_or_404
from .models import Target
from .models import TargetForm
from .models import ProbesForm
from .models import *
from .atlas_request_creation import atlas_api_call
from .atlas_request_stop import atlas_api_stop_call
from .atlas_request_fetch import atlas_api_result_call
from .forms import DateForm
from . import atlas_request_creation
from .models import Probes
from .models import Specification
from django.forms import modelformset_factory
from django.http import HttpResponse
from django.http import HttpRequest
from django.http import HttpResponseRedirect
from django.forms import ModelForm
import datetime
import json
from datetime import timedelta
def index(request):
return render(request,'measurement/index.html')
def startf(request):
if request.method == "POST":
form = SpecificationForm(request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect('http://127.0.0.1:8000/measurement/probe')
else:
form = SpecificationForm()
return render(request, 'measurement/startf.html', {'form': form})
def probefill(request):
if request.method == "POST":
form = ProbesForm(request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect('http://127.0.0.1:8000/measurement/target')
else:
form = ProbesForm()
return render(request, 'measurement/probefill.html', {'form': form})
def targetfill(request):
if request.method == "POST":
form = TargetForm(request.POST)
if form.is_valid():
form.save()
msm=atlas_api_call()
#return HttpResponseRedirect('http://127.0.0.1:8000/measurement/create')
#context={'data':form.cleaned_data}
#context = {'data': msm}
q = Target.objects.all().last()
q.msm_id=int(msm['measurements'][0])
q.save()
context = {'data': int(msm['measurements'][0])}
return render(request, 'measurement/create.html', context)
else:
form = TargetForm()
return render(request, 'measurement/targetfill.html', {'form': form})
def create(request):
return HttpResponse('<h1>Measurement created</h1>')
def currenth(request):
all_target=Target.objects.all()
context={'all_target':all_target}
return render(request,'measurement/currenth.html',context)
def option(request, msm_idvar):
all_target=Target.objects.get(msm_id=msm_idvar)
context={'all_target':all_target}
return render(request,'measurement/option.html',context)
def stop(request,msm_idvar):
atlas_api_stop_call(msm_idvar)
q=Target.objects.get(msm_id=msm_idvar)
q.status='stopped'
q.save()
return HttpResponse('<h1>Measurement stopped</h1>')
def result(request,msm_idvar):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = DateForm(request.POST)
# check whether it's valid:
if form.is_valid():
start_date = form.cleaned_data['start_date']
stop_date = form.cleaned_data['stop_date']
countr=form.cleaned_data['country']
#atlas_api_result_call(msm_idvar,start_date,stop_date,countr)
des=Target.objects.get(msm_id=msm_idvar)
desc=des.description
rel=int(filter(str.isdigit, str(desc)))
desc=desc.replace(" ","")
desc=str(desc)
rel="Relation"+str(rel)
list1=[]
#m=1
startins=start_date
#print startins
while(True):
date_min1=datetime.datetime.combine(startins, datetime.time.min)
date_max1 = datetime.datetime.combine(startins, datetime.time.max)
q1 = eval(desc).objects.filter(timestamp__range=[date_min1, date_max1])
b=Countries.objects.get(country=countr)
q2=q1.filter(countries=int(b.id))
q3=q1.filter(countries__isnull=False)
#print q1.count()
#print q2.count()
while(q2.count()==0):
startins = startins + timedelta(days=1)
date_min1 = datetime.datetime.combine(startins, datetime.time.min)
date_max1 = datetime.datetime.combine(startins, datetime.time.max)
q1 = eval(desc).objects.filter(timestamp__range=[date_min1, date_max1])
b = Countries.objects.get(country=countr)
q2 = q1.filter(countries=int(b.id))
q3=q1.filter(countries__isnull=False)
ratio=(q2.count()/q3.count())
ratio1=ratio*100
list1.append([str(startins),ratio1])
#m=m+1
startins=startins+timedelta(days=3)
if(startins>stop_date):
break
date_min2 = datetime.datetime.combine(start_date, datetime.time.min)
date_max2 = datetime.datetime.combine(stop_date, datetime.time.max)
l1=eval(desc).objects.filter(timestamp__range=[date_min2, date_max2])
lm = Countries.objects.get(country="OO")
#lm1=Countries.objects.get(country="KI")
l2 = l1.filter(countries=int(lm.id))
l3=l1.filter(countries__isnull=False)
print date_min2
print date_max2
print l2.count()
print l3.count()
perc=(l2.count()/l3.count())*100
list2={"Missing data":perc}
context = {'reading': json.dumps(list1), 'reading1':list2}
#context={'reading':[['April', 1000],['May', 1170]]}
#q1=eval(desc).objects.filter(timestamp__contains=datetime.date()
return render(request, 'measurement/graph.html', context)
# if a GET (or any other method) we'll create a blank form
else:
form = DateForm()
return render(request, 'measurement/result.html', {'form': form})
|
import dash_bootstrap_components as dbc
from dash import html
from .util import make_subheading
table_header = html.Thead(
html.Tr(
[
html.Th("#"),
html.Th("First name"),
html.Th("Last name"),
]
)
)
table_body = html.Tbody(
[
html.Tr(
[
html.Th("1", scope="row"),
html.Td("Tom"),
html.Td("Cruise"),
]
),
html.Tr(
[
html.Th("2", scope="row"),
html.Td("Jodie"),
html.Td("Foster"),
]
),
html.Tr(
[
html.Th("3", scope="row"),
html.Td("Chadwick"),
html.Td("Boseman"),
]
),
]
)
table = html.Div(
[
make_subheading("Table", "table"),
dbc.Table(
[table_header, table_body],
className="mb-2",
),
dbc.Row(
[
dbc.Col(
dbc.Table(
[table_header, table_body],
responsive=True,
striped=True,
hover=True,
className="mb-2",
),
),
dbc.Col(
dbc.Table(
[table_header, table_body],
bordered=True,
dark=True,
hover=True,
responsive=True,
striped=True,
className="mb-2",
),
),
]
),
dbc.Row(
[
dbc.Col(
dbc.Table(
[table_header, table_body],
color=color,
className="mb-2",
),
xs=6,
)
for color in [
"primary",
"secondary",
"success",
"danger",
"warning",
"info",
"light",
"dark",
]
]
),
],
className="mb-4",
)
|
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 24 15:37:25 2015
@author: HSH
"""
class Solution(object):
def createLine(self, words, L, start, end, totLen, isLast):
result = []
result.append(words[start])
n = end - start + 1
if n == 1 or isLast:
for i in range(start+1, end+1):
result.append(' ')
result.append(words[i])
j = L - totLen - (n - 1)
result.append(' '*j)
return ''.join(result)
k = int((L - totLen) / (n - 1))
m = (L - totLen) % (n - 1)
for i in range(start + 1, end+1):
nspace = k + 1 if i - start <=m else k
result.append(' ' * nspace)
result.append(words[i])
return ''.join(result)
def fullJustify(self, words, maxWidth):
"""
:type words: List[str]
:type maxWidth: int
:rtype: List[str]
"""
start = 0
end = -1
totLen = 0
result = []
i = 0
while i < len(words):
newLen = totLen + (end - start + 1) + len(words[i])
if newLen <= maxWidth:
end = i
totLen += len(words[i])
i += 1
else:
line = self.createLine(words, maxWidth, start, end, totLen, False)
result.append(line)
start = i
end = i - 1
totLen = 0
lastLine = self.createLine(words, maxWidth, start, end, totLen, True)
result.append(lastLine)
return result |
w, h = 1050, 600
colors = [(188, 216, 193), (214, 219, 178), (227, 217, 133), (229, 122, 68)]
colors = [(219, 177, 188), (211, 196, 227), (143, 149, 211), (137, 218, 255)]
colors = [(191, 107, 99), (217, 163, 132), (91, 158, 166), (169, 212, 217)]
grid_x = 22
grid_y = 22
grid_x_pixels = 1200
grid_y_pixels = 1200
sep_x = float(grid_x_pixels) / (grid_x - 1)
sep_y = float(grid_y_pixels) / (grid_y - 1)
def get_random_element(l):
return l[int(random(len(l)))]
def setup():
size(w, h)
background(150)
strokeWeight(3)
pixelDensity(2)
current_x = w/2.0 - grid_x_pixels/2.0 - 200
current_y = h/2.0 - grid_y_pixels/2.0
rotate(QUARTER_PI)
for i in range(grid_x):
for j in range(grid_y):
fill(0)
circle(current_x, current_y, 35)
if (random(1) < .3):
fill(200, 100, 100)
else:
fill(230, 230, 230)
if (random(1) < .6):
offset = random(10, 15)
else:
offset = 0
circle(current_x - offset, current_y, 35)
current_y += sep_y
current_y = h/2.0 - grid_y_pixels/2.0
current_x += sep_x
save("Examples/" + str(int(random(100000))) + ".png"
|
# -*- coding: utf-8 -*-
"""mods.py: Module loader for the IRC bot.
Loader for commands and listeners.
TODO:
* Refactor<3
"""
import os
import sys
import time
import string
import random
import imp
# Symbian S60 specific compatibility
s60 = False
if sys.platform == "symbian_s60":
s60 = True
sys.path.append("e:\\python")
sys.path.append("c:\\python")
sys.path.append("c:\\DATA\\python")
from helpers import *
from const import *
import traceback
import urllib
import re
def LoadAll():
dirlist = os.listdir("modules/")
modules = {}
writedocs = False
docs = []
for item in dirlist:
module = LoadSingle(item)
if module:
name = module[0]
mod = module[1]
if "__doc__" in dir(mod["class"]):
docs.append((name, mod["class"].__doc__))
mod["name"] = name
if not "aliases" in mod.keys():
mod["aliases"] = []
if not "throttle" in mod.keys():
mod["throttle"] = None
if not "interval" in mod.keys():
mod["interval"] = None
if not "zone" in mod.keys():
mod["zone"] = IRC_ZONE_BOTH
modules[module[0]] = module[1]
if writedocs:
docs.sort()
s = ""
for doc in docs:
s += "* " + doc[0] + "\n"
parts = doc[1].split("\n")
lines = [" > "+line.strip() for line in parts if line.strip()]
s += "\n".join(lines)
s += "\n\n"
f = open("mod_docs.txt","w")
f.write(s)
f.close()
return modules
def LoadSingle(filename):
if filename[0] == ".":
return False
parts = filename.split(".")
if len(parts) < 2:
return False
name = parts[0]
ext = parts[-1]
# only load .py modules that don't begin with an underscore
if ext == "py" and name[0] != "_":
try:
mod = imp.load_source(name, "modules/" + filename)
if not "module" in dir(mod):
return False
return name, mod.module
except Exception, e:
print "Failed loading:", name
print traceback.format_exc()
print sys.exc_info()[0]
return False
return False
modules = LoadAll()
|
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import dataclasses
import itertools
from dataclasses import dataclass
from typing import Any, Iterator, cast
from pants.build_graph.address import BANNED_CHARS_IN_PARAMETERS
from pants.engine.addresses import Address
from pants.engine.collection import Collection
from pants.engine.engine_aware import EngineAwareParameter
from pants.engine.target import (
Field,
FieldDefaults,
ImmutableValue,
Target,
TargetTypesToGenerateTargetsRequests,
)
from pants.util.frozendict import FrozenDict
from pants.util.strutil import bullet_list, softwrap
def _named_args_explanation(arg: str) -> str:
return (
f"To use `{arg}` as a parameter, you can pass it as a keyword argument to "
f"give it an alias. For example: `parametrize(short_memorable_name={arg})`"
)
@dataclass(frozen=True)
class Parametrize:
"""A builtin function/dataclass that can be used to parametrize Targets.
Parametrization is applied between TargetAdaptor construction and Target instantiation, which
means that individual Field instances need not be aware of it.
"""
args: tuple[str, ...]
kwargs: FrozenDict[str, ImmutableValue]
def __init__(self, *args: str, **kwargs: Any) -> None:
object.__setattr__(self, "args", args)
object.__setattr__(self, "kwargs", FrozenDict.deep_freeze(kwargs))
def to_parameters(self) -> dict[str, Any]:
"""Validates and returns a mapping from aliases to parameter values.
This conversion is executed lazily to allow for more context in error messages, such as the
TargetAdaptor consuming the Parametrize instance.
"""
parameters = dict(self.kwargs)
for arg in self.args:
if not isinstance(arg, str):
raise Exception(
f"In {self}:\n Positional arguments must be strings, but "
f"`{arg!r}` was a `{type(arg).__name__}`.\n\n"
+ _named_args_explanation(f"{arg!r}")
)
previous_arg = parameters.get(arg)
if previous_arg is not None:
raise Exception(
f"In {self}:\n Positional arguments cannot have the same name as "
f"keyword arguments. `{arg}` was also defined as `{arg}={previous_arg!r}`."
)
banned_chars = BANNED_CHARS_IN_PARAMETERS & set(arg)
if banned_chars:
raise Exception(
f"In {self}:\n Positional argument `{arg}` contained separator characters "
f"(`{'`,`'.join(banned_chars)}`).\n\n" + _named_args_explanation(arg)
)
parameters[arg] = arg
return parameters
@classmethod
def expand(
cls, address: Address, fields: dict[str, Any | Parametrize]
) -> Iterator[tuple[Address, dict[str, Any]]]:
"""Produces the cartesian product of fields for the given possibly-Parametrized fields.
Only one level of expansion is performed: if individual field values might also contain
Parametrize instances (in particular: an `overrides` field), expanding those will require
separate calls.
"""
try:
parametrized: list[list[tuple[str, str, Any]]] = [
[
(field_name, alias, field_value)
for alias, field_value in v.to_parameters().items()
]
for field_name, v in fields.items()
if isinstance(v, Parametrize)
]
except Exception as e:
raise Exception(f"Failed to parametrize `{address}`:\n{e}") from e
if not parametrized:
yield (address, fields)
return
non_parametrized = tuple(
(field_name, field_value)
for field_name, field_value in fields.items()
if not isinstance(field_value, Parametrize)
)
for parametrized_args in itertools.product(*parametrized):
expanded_address = address.parametrize(
{field_name: alias for field_name, alias, _ in parametrized_args}
)
parametrized_args_fields = tuple(
(field_name, field_value) for field_name, _, field_value in parametrized_args
)
expanded_fields: dict[str, Any] = dict(non_parametrized + parametrized_args_fields)
yield expanded_address, expanded_fields
def __repr__(self) -> str:
strs = [str(s) for s in self.args]
strs.extend(f"{alias}={value}" for alias, value in self.kwargs.items())
return f"parametrize({', '.join(strs)})"
@dataclass(frozen=True)
class _TargetParametrization:
original_target: Target | None
parametrization: FrozenDict[Address, Target]
@property
def all(self) -> Iterator[Target]:
if self.original_target:
yield self.original_target
yield from self.parametrization.values()
def get(self, address: Address) -> Target | None:
"""Find the Target with an exact Address match, if any."""
if self.original_target and self.original_target.address == address:
return self.original_target
return self.parametrization.get(address)
# TODO: This is not the right name for this class, nor the best place for it to live. But it is
# consumed by both `pants.engine.internals.graph` and `pants.engine.internals.build_files`, and
# shouldn't live in `pants.engine.target` (yet? needs more stabilization).
@dataclass(frozen=True)
class _TargetParametrizationsRequest(EngineAwareParameter):
address: Address
description_of_origin: str = dataclasses.field(hash=False, compare=False)
def __post_init__(self) -> None:
if self.address.is_parametrized or self.address.is_generated_target:
raise ValueError(
softwrap(
f"""
Cannot create {self.__class__.__name__} on a generated or parametrized target.
Self: {self}
"""
)
)
def debug_hint(self) -> str:
return self.address.spec
# TODO: See TODO on _TargetParametrizationsRequest about naming this.
class _TargetParametrizations(Collection[_TargetParametrization]):
"""All parametrizations and generated targets for a single input Address.
If a Target has been parametrized, the original Target might _not_ be present, due to no Target
being addressable at the un-parameterized Address.
"""
@property
def all(self) -> Iterator[Target]:
"""Iterates over all Target instances which are valid after parametrization."""
for parametrization in self:
yield from parametrization.all
@property
def parametrizations(self) -> dict[Address, Target]:
"""Returns a merged dict of all generated/parametrized instances, excluding originals."""
return {
a: t for parametrization in self for a, t in parametrization.parametrization.items()
}
def generated_for(self, address: Address) -> FrozenDict[Address, Target]:
"""Find all Targets generated by the given generator Address."""
assert not address.is_generated_target
for parametrization in self:
if (
parametrization.original_target
and parametrization.original_target.address == address
):
return parametrization.parametrization
raise self._bare_address_error(address)
def get(
self,
address: Address,
target_types_to_generate_requests: TargetTypesToGenerateTargetsRequests | None = None,
) -> Target | None:
"""Find the Target with an exact Address match, if any."""
for parametrization in self:
instance = parametrization.get(address)
if instance is not None:
return instance
# TODO: This is an accommodation to allow using file/generator Addresses for
# non-generator atom targets. See https://github.com/pantsbuild/pants/issues/14419.
if target_types_to_generate_requests and address.is_generated_target:
base_address = address.maybe_convert_to_target_generator()
original_target = self.get(base_address, target_types_to_generate_requests)
if original_target and not target_types_to_generate_requests.is_generator(
original_target
):
return original_target
return None
def get_all_superset_targets(self, address: Address) -> Iterator[Address]:
"""Yield the input address itself, or any parameterized addresses which are a superset of
the input address.
For example, an input address `dir:tgt` may yield `(dir:tgt@k=v1, dir:tgt@k=v2)`.
If no targets are a match, will yield nothing.
"""
# Check for exact matches.
if self.get(address) is not None:
yield address
return
for parametrization in self:
if parametrization.original_target is not None and address.is_parametrized_subset_of(
parametrization.original_target.address
):
yield parametrization.original_target.address
for parametrized_tgt in parametrization.parametrization.values():
if address.is_parametrized_subset_of(parametrized_tgt.address):
yield parametrized_tgt.address
def get_subset(
self,
address: Address,
consumer: Target,
field_defaults: FieldDefaults,
target_types_to_generate_requests: TargetTypesToGenerateTargetsRequests,
) -> Target:
"""Find the Target with the given Address, or with fields matching the given consumer."""
# Check for exact matches.
instance = self.get(address, target_types_to_generate_requests)
if instance is not None:
return instance
def remaining_fields_match(candidate: Target) -> bool:
"""Returns true if all Fields absent from the candidate's Address match the consumer."""
unspecified_param_field_names = {
key for key in candidate.address.parameters.keys() if key not in address.parameters
}
return all(
_concrete_fields_are_equivalent(
field_defaults,
consumer=consumer,
candidate_field=field,
)
for field_type, field in candidate.field_values.items()
if field_type.alias in unspecified_param_field_names
)
for parametrization in self:
# If the given Address is a subset-match of the parametrization's original Target
# (meaning that the user specified an un-parameterized generator Address), then we
# need to match against one of the generated Targets instead (because a parametrized
# generator does not keep its Fields).
if (
parametrization.original_target
and address.is_parametrized_subset_of(parametrization.original_target.address)
and parametrization.parametrization
and remaining_fields_match(next(iter(parametrization.parametrization.values())))
):
return parametrization.original_target
# Else, see whether any of the generated targets match.
for candidate in parametrization.parametrization.values():
if address.is_parametrized_subset_of(candidate.address) and remaining_fields_match(
candidate
):
return candidate
raise ValueError(
f"The explicit dependency `{address}` of the target at `{consumer.address}` does "
"not provide enough address parameters to identify which parametrization of the "
"dependency target should be used.\n"
f"Target `{address.maybe_convert_to_target_generator()}` can be addressed as:\n"
f"{bullet_list(str(t.address) for t in self.all)}"
)
def generated_or_generator(self, maybe_generator: Address) -> Iterator[Target]:
"""Yield either the Target, or the generated Targets for the given Address."""
for parametrization in self:
if (
not parametrization.original_target
or parametrization.original_target.address != maybe_generator
):
continue
if parametrization.parametrization:
# Generated Targets.
yield from parametrization.parametrization.values()
else:
# Did not generate targets.
yield parametrization.original_target
return
raise self._bare_address_error(maybe_generator)
def _bare_address_error(self, address) -> ValueError:
return ValueError(
"A `parametrized` target cannot be consumed without its parameters specified.\n"
f"Target `{address}` can be addressed as:\n"
f"{bullet_list(str(t.address) for t in self.all)}"
)
def _concrete_fields_are_equivalent(
field_defaults: FieldDefaults, *, consumer: Target, candidate_field: Field
) -> bool:
candidate_field_type = type(candidate_field)
candidate_field_value = field_defaults.value_or_default(candidate_field)
if consumer.has_field(candidate_field_type):
return cast(
bool,
field_defaults.value_or_default(consumer[candidate_field_type])
== candidate_field_value,
)
# Else, see if the consumer has a field that is a superclass of `candidate_field_value`, to
# handle https://github.com/pantsbuild/pants/issues/16190. This is only safe because we are
# confident that both `candidate_field_type` and the fields from `consumer` are _concrete_,
# meaning they are not abstract templates like `StringField`.
superclass = next(
(
consumer_field
for consumer_field in consumer.field_types
if isinstance(candidate_field, consumer_field)
),
None,
)
if superclass is None:
return False
return cast(
bool, field_defaults.value_or_default(consumer[superclass]) == candidate_field_value
)
|
import pygame, sys, time, random
from pygame.locals import *
import numpy as np
import math
import joblib # import Parallel, delayed
import multiprocessing
class Particle:
"""
@summary: Data class to store particle details i.e. Position, Direction and speed of movement, radius, etc
"""
def __init__(self):
self.__version = 0
"""@type: int"""
self.__position = []
self.__movement = []
self.__radius = 0
self.__lifetime = 20
# Python overrides -------------------------------------------------------------------------------------------------
def __str__(self):
printStr = ''
printStr += 'Position: (' + str(self.__position[0]) + ',' + str(self.__position[1]) + ') '
printStr += 'Direction and Speed: (' + str(self.__movement[0]) + ',' + str(self.__movement[1]) + ') '
printStr += 'Radius: ' + str(self.__radius)
return printStr
def __setitem__(self, position, movement, rad, c):
print position, movement, rad
# TODO: Check inputs
self.__position = position
self.__movement = movement
self.__radius = rad
# Properties -------------------------------------------------------------------------------------------------------
@property
def Position(self):
return self.__position
@property
def Movement(self):
return self.__movement
@property
def Radius(self):
return self.__radius
# Methods ----------------------------------------------------------------------------------------------------------
def SetPosition(self, pos):
self.__position = pos
def SetMovement(self, move):
self.__movement = move
def SetRadius(self, rad):
self.__radius = rad
def DecreaseLifetime(self):
self.__lifetime -= 0.1
return True if self.__lifetime <= 0 else False
def CalculateGrid(screenWidth, screenHeight, resolution):
x_size = resolution + divmod(screenWidth, resolution)[1]
y_size = resolution + divmod(screenHeight, resolution)[1]
print x_size, y_size
grid = []
for y in range(0, y_size):
temp_list = []
for x in range(0, x_size):
temp_list += [[x * (screenWidth / x_size), y * (screenHeight / y_size)]]
grid += [temp_list]
print np.array(grid).shape
return grid
# Takes a binary string of all the four corners of the cell, where 1 = occupied and 0 = empty.
# The corners ('0000') read from left to right: top-left, top-right, bottom-right, bottom-left
# Returns a list with pairs of x,y co-ordinates for the start and end of the line(s)
# A----B 0----1 0--P--1
# | | | | | |
# | | or | | to S Q
# C----D 3----2 | |
# 3--R--2
def DrawLine(score, x, y, sizex, sizey, sum_corners):
top_centre = [x + int(sizex/2), y]
left_centre = [x, int(y + sizey/2)]
bottom_centre = [x + int(sizex/2), y + sizey]
right_centre = [x + sizex, y + int(sizey/2)]
if int(score, 2) == 0 or int(score) == 15:
return []
P = []
Q = []
R = []
S = []
# print sum_corners
# Interpolated points:
if sum_corners[0] == sum_corners[1]:
P = [top_centre]
else:
P = [x + ((x + sizex) - x) * ((1 - sum_corners[0]) / (sum_corners[1] - sum_corners[0])), y]
if sum_corners[1] == sum_corners[2]:
Q = [right_centre]
else:
Q = [x + sizex, y + ((y + sizey) - y) * ((1 - sum_corners[1]) / (sum_corners[2] - sum_corners[1]))]
if sum_corners[2] == sum_corners[3]:
R = [bottom_centre]
else:
R = [x + ((x + sizex) - x) * ((1 - sum_corners[3]) / (sum_corners[2] - sum_corners[3])), y + sizey]
if sum_corners[0] == sum_corners[3]:
S = [left_centre]
else:
S = [x, y + ((y + sizey) - y) * ((1 - sum_corners[0]) / (sum_corners[3] - sum_corners[0]))]
if int(score, 2) == 1 or int(score, 2) == 14:
return [S, R]
elif int(score, 2) == 2 or int(score, 2) == 13:
return [R, Q]
elif int(score, 2) == 3 or int(score, 2) == 12:
return [S, Q]
elif int(score, 2) == 4 or int(score, 2) == 11:
return [P, Q]
elif int(score, 2) == 5 or int(score, 2) == 10:
return [S, P, R, Q]
elif int(score, 2) == 6 or int(score, 2) == 9:
return [P, R]
elif int(score, 2) == 7 or int(score, 2) == 8:
return [S, P]
def ComputeSquares(index, grid, circle_objs, x_size, cell_size_x, cell_size_y):
score = ''
sum_corner = [0.0, 0.0, 0.0, 0.0]
x = grid[int(math.floor(index / x_size))][index - ((index / x_size) * x_size)][0]
y = grid[int(math.floor(index / x_size))][index - ((index / x_size) * x_size)][1]
# Put 'slime' at bottom of the screen
if int(math.floor(index / x_size))-1 == len(grid[:]) - 2:
sum_corner[2] += 1.1
sum_corner[3] += 1.1
# Put line near top to look like water surface
if int(math.floor(index / x_size))-1 == 10:
sum_corner[2] += 1.1
sum_corner[3] += 1.1
# print x, y
for p in circle_objs:
if abs(p.Position[0] - x) < p.Radius ** 2 and abs(p.Position[1] - y) < p.Radius ** 2:
sum_corner[0] += pow(p.Radius, 2) / (pow(x - p.Position[0], 2) + pow(y - p.Position[1], 2))
sum_corner[1] += pow(p.Radius, 2) / (pow((x + cell_size_x) - p.Position[0], 2) + pow(y - p.Position[1], 2))
sum_corner[2] += pow(p.Radius, 2) / (
pow((x + cell_size_x) - p.Position[0], 2) + pow((y + cell_size_y) - p.Position[1], 2))
sum_corner[3] += pow(p.Radius, 2) / (pow(x - p.Position[0], 2) + pow((y + cell_size_y) - p.Position[1], 2))
for corner in sum_corner:
if corner > 1:
score += '1'
else:
score += '0'
if int(score, 2) != 0 or int(score, 2) != 15:
lines = DrawLine(score, x, y, cell_size_x, cell_size_y, sum_corner)
return lines
else:
return []
def createRandCircle(min_radius, max_radius, sh, sw, max_dx, max_dy):
p = Particle()
p.SetRadius(random.randrange(min_radius, max_radius))
p.SetPosition([random.randrange(p.Radius, sw - p.Radius), p.Radius]) # random.randrange(p.Radius, sh - p.Radius)
p.SetMovement([random.random() * max_dx + 1, p.Radius + (random.random() * 3)]) #
return p
if __name__ == '__main__':
pygame.init()
windowSurface = pygame.display.set_mode((500, 400), 0, 32)
pygame.display.set_caption("Paint")
# get screen size
info = pygame.display.Info()
sw = info.current_w
sh = info.current_h
grid = CalculateGrid(sw, sh, 30) # 26
y_size = len(grid[:])
x_size = len(grid[0])
cell_size_x = sw / x_size
cell_size_y = sh / y_size
print x_size, y_size
print cell_size_x, cell_size_y
max_dx = 0
max_dy = 7
min_radius = 10
max_radius = 15
circle_objs = []
num_circles = 7
num_cores = multiprocessing.cpu_count()
print "num_cores", num_cores
for i in range(0, num_circles):
circle_objs += [createRandCircle(min_radius, max_radius, sh, sw, max_dx, max_dy)]
BLACK = (0, 0, 0)
GREEN = (0, 255, 0)
windowSurface.fill(BLACK)
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
circle_objs += [createRandCircle(min_radius, max_radius, sh, sw, max_dx, max_dy)]
windowSurface.fill(BLACK)
# Make parallel
for particle in circle_objs:
if (particle.Position[1] + particle.Radius) + 1 < sh:
particle.SetPosition([particle.Position[0] + 0, particle.Position[1] + particle.Movement[1]])
elif particle.Position[1] + 1 < sh:
dy = particle.Movement[1] - 0.5 if particle.Movement[1] > 0 else 0
dx = particle.Movement[0] - 0.5 if particle.Movement[1] > 0 else 0
particle.SetMovement([dx, dy])
for particle in circle_objs:
if particle.DecreaseLifetime():
circle_objs.remove(particle)
# update position with direction
# pos = particle.Position
# print dx, dy
# # check bounds
# if (pos[0] - radius) + dx < 0 or (pos[0] + radius) + dx > sw:
# dx = -dx
# particle.SetMovement([dx, dy])
# if (pos[1] - radius) + dy < 0 or (pos[1] + radius) + dy > sh:
# dy = -dy
# particle.SetMovement([dx, dy])
# Make parallel
lines = joblib.Parallel(n_jobs=1)(joblib.delayed(ComputeSquares)(i, grid, circle_objs, x_size, cell_size_x,
cell_size_y) for i in range(0, x_size * y_size))
# print lines
for line in lines:
if line != None and len(line) > 0:
pygame.draw.lines(windowSurface, GREEN, False, line, 1)
pygame.time.Clock().tick(20)
pygame.display.update()
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 10 14:47:14 2020
@author: logun
"""
from scipy import ndimage
import matplotlib.pyplot as plt
import numpy as np
import kernel_function as kf
import cv2
img = cv2.imread('ring.png', cv2.IMREAD_GRAYSCALE)
plt.figure(dpi=700)
dims = img.shape
#find start point:
def start_point():
for row in range (dims[0]):
for col in range (dims[1]):
if img[row][col] != 255:
return row, col
sp = start_point()
cur_point = None
last_point = None
chain = []
print(sp)
while (cur_point != sp):
if cur_point == None:
cur_point = sp
for entry in kf.get_four_neighbours_ext(cur_point[0], cur_point[1], dims):
coord = entry[0]
num = entry[1]
if (img[coord[0]][coord[1]] != 255 and coord != last_point):
chain.append(num)
last_point = cur_point
cur_point = coord
#print(chain)
break
print(chain)
plt.imshow(img) |
from math import log
from drivingenvs.vehicles.ackermann import AckermannSteeredVehicle
from drivingenvs.envs.driving_env_with_vehicles import DrivingEnvWithVehicles
from yarp.envs.torchgymenv import TorchGymEnv
from yarp.envs.unsupervised_env import UnsupervisedEnv
from yarp.policies.tanhgaussianpolicy import TanhGaussianMLPPolicy
from yarp.networks.mlp import MLP
from yarp.networks.valuemlp import SingleHeadQMLP
from yarp.networks.mlp_discriminator import MLPDiscriminator
from yarp.replaybuffers.unsupervisedreplaybuffer import UnsupervisedReplayBuffer
from yarp.algos.sac import SAC
from yarp.algos.diayn import DIAYN
from yarp.experiments.experiment import Experiment
from torch import nn
contexts = 10
max_steps = 50
max_rew = -max_steps * log(1/contexts)
print('contexts = {}, max_steps = {}, max_rew = {}'.format(contexts, max_steps, max_rew))
veh = AckermannSteeredVehicle((4, 2))
env = DrivingEnvWithVehicles(veh, distance=200.0, n_lanes = 5, dt=0.2, max_steps = max_steps, start_lane = 2)
env = UnsupervisedEnv(env, context_dim=contexts)
print(env.reset())
policy = TanhGaussianMLPPolicy(env, hiddens = [300, 300], hidden_activation=nn.ReLU)
qf1 = SingleHeadQMLP(env, hiddens = [300, 300], hidden_activation=nn.ReLU, logscale=True, scale=1.0)
target_qf1 = SingleHeadQMLP(env, hiddens = [300, 300], hidden_activation=nn.ReLU, logscale=True, scale=1.0)
qf2 = SingleHeadQMLP(env, hiddens = [300, 300], hidden_activation=nn.ReLU, logscale=True, scale=1.0)
target_qf2 = SingleHeadQMLP(env, hiddens = [300, 300], hidden_activation=nn.ReLU, logscale=True, scale=1.0)
buf = UnsupervisedReplayBuffer(env)
sac = SAC(env, policy, qf1, target_qf1, qf2, target_qf2, buf, discount = 0.99, reward_scale=1/max_rew, learn_alpha=True, alpha=0.01, steps_per_epoch=1000, qf_itrs=1000, qf_batch_size=256, target_update_tau=0.005, epochs=int(1e7))
disc = MLPDiscriminator(in_idxs=[2, 7, 8, 9, 10, 11], outsize=env.context_dim, hiddens = [300, 300])
print(disc)
diayn = DIAYN(env, buf, disc, sac)
experiment = Experiment(diayn, 'diayn_learn_alpha_v_laneid_full_traffic', save_every=10, save_logs_every=1)
#import pdb; pdb.set_trace()
experiment.run()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 31 15:13:57 2017
@author: mulugetasemework
"""
# encoding: UTF-8
# Copyright 2016 Google.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import tensorflow as tf
except:
import tf
#print("Tensorflow version " + tf.__version__)
tf.set_random_seed(0.0)
import numpy as np
import os
import matplotlib.pyplot as plt
runfile('/Users/.../Phyton/processDataAndSetup.py', wdir='/Users/.../Phyton')
learning_rate=0.0000000001
#
# · · · · · · · · · · (input data, flattened pixels) X [batch, imageSize1*imageSize1] # imageSize1*imageSize1 = imageSize1 * imageSize1
# \x/x\x/x\x/x\x/x\x/ -- fully connected layer (softmax) W [imageSize1*imageSize1, 10] b[10]
# · · · · · · · · Y [batch, 10]
# The model is:
#
# Y = softmax( X * W + b)
# X: matrix for 100 grayscale images of imageSize1ximageSize1 pixels, flattened (there are 100 images in a mini-batch)
# W: weight matrix with imageSize1*imageSize1 lines and 10 columns
# b: bias vector with 10 dimensions
# +: add with broadcasting: adds the vector to each line of the matrix (numpy)
# softmax(matrix) applies softmax on each line
# softmax(line) applies an exp to each value then divides by the norm of the resulting line
# Y: output matrix with 100 lines and 10 columns
# input X: imageSize1ximageSize1 grayscale images, the first dimension (None) will index the images in the mini-batch
X = tf.placeholder(tf.float32, [None, imageSize1,imageSize1])
# correct answers will go here
Y_ = tf.placeholder(tf.float32, [None, n_classes])
# weights W[imageSize1*imageSize1, n_classes] imageSize1*imageSize1=imageSize1*imageSize1
W = tf.Variable(tf.zeros([imageSize1*imageSize1, n_classes]))
# biases b[n_classes]
b = tf.Variable(tf.zeros([n_classes]))
# flatten the images into a single line of pixels
# -1 in the shape definition means "the only possible dimension that will preserve the number of elements"
XX = tf.reshape(X, [-1, imageSize1*imageSize1])
# The model
Y = tf.nn.softmax(tf.matmul(XX, W) + b)
# loss function: cross-entropy = - sum( Y_i * log(Yi) )
# Y: the computed output vector
# Y_: the desired output vector
# cross-entropy
# log takes the log of each element, * multiplies the tensors element by element
# reduce_mean will add all the components in the tensor
# so here we end up with the total cross-entropy for all images in the batch
cross_entropy = -tf.reduce_mean(Y_ * tf.log(Y)) * 1000.0 # normalized for batches of 100 images,
# *10 because "mean" included an unwanted division by 10
# accuracy of the trained model, between 0 (worst) and 1 (best)
correct_prediction = tf.equal(tf.argmax(Y, 1), tf.argmax(Y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# training, learning rate = 0.005
train_step = tf.train.GradientDescentOptimizer(learning_rate).minimize(cross_entropy)
# matplotlib visualisation
allweights = tf.reshape(W, [-1])
allbiases = tf.reshape(b, [-1])
# init
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
def training_step(i, update_train_data, update_test_data, update_valid_data):
thisCountTr = return_counterUpdateTr()
start = thisCountTr[-1]
end = start + batch_size
batch_X,batch_Y = train_features[start:end], train_labels[start:end]
# compute training values
if update_train_data:
a, c, w, b = sess.run([accuracy, cross_entropy, allweights, allbiases], {X: batch_X, Y_: batch_Y})
print(str(i) + ": |--------- " + str(a) + " --- " + str(c) +
" --- <-Training accuracy:" + " <- loss: " + " : epoch " +
str(i*100//len(train_features)+1) + " (lr:" + str(learning_rate) + ")")
return_train_cost(c)
return_train_accuracy(a,i,testEvery)
if TransormTrainingData==1:
if end <=len( test_features):
batch_X_trans,batch_Y_trans = train_features_trans[start:end], train_labels_trans[start:end]
a_trans, c_trans, w_trans, b_trans = sess.run([accuracy, cross_entropy, allweights, allbiases], {X: batch_X_trans, Y_: batch_Y_trans})
return_train_cost_trans(c_trans)
return_train_accuracy_trans(a_trans)
if update_valid_data and doNotValidate == 0:
startV = i
end = startV + 1
batch_X_valid,batch_Y_valid = valid_features[startV:end], valid_labels[startV:end]
a, valid_cost, w, b = sess.run([accuracy, cross_entropy, allweights, allbiases], {X: batch_X_valid, Y_: batch_Y_valid})
print(str(i) + ":*** Validation accuracy:" + str(a) +
" loss: " + str(valid_cost) + " (lr:" + str(learning_rate) + ")")
return_valid_cost(valid_cost)
return_valid_accuracy(a,i)
if update_test_data:
thisCount = return_counterUpdate()
startTst = thisCount[-1]
end = startTst + test_batch_size
if end <=len( test_features):
batch_X_test,test_labels2 = test_features[startTst:end], test_labels[startTst:end]
a, c = sess.run([accuracy, cross_entropy ], {X: batch_X_test, Y_: test_labels2})
print(str(i) + ": ********* epoch " + str(i*100//len(test_features)+1) +
" ********* test accuracy:" + str(a) + " test loss: " + str(c))
return_test_cost(c)
return_test_accuracy(a,i)
if test_thiscode==1:
test_labels3 = np.array(swapped_test_labels[startTst:end])
aS, ctestS= sess.run([accuracy, cross_entropy ], {X: batch_X_test, Y_: test_labels3})
print("inside code test")
return_test_costS(ctestS)
return_test_accuracyS(aS,i)
if test_shuffled == 1:
thisCount = return_counterUpdate_shuff_test()
startTst_shuff = thisCount[-1]
end_shuff = startTst_shuff + test_batch_size
if end_shuff <= len(test_features):
test_labels_reversed = test_labels.iloc[::-1]
test_features_reversed = test_features[::-1]
batch_X_shuff,batch_Y_shuff = test_features_reversed[startTst_shuff: end_shuff], test_labels[startTst: end_shuff]
aS_shuff, ctestS_shuff = sess.run([accuracy, cross_entropy ], {X: (batch_X_shuff), Y_: (batch_Y_shuff) })
return_test_cost_shuff(ctestS_shuff)
return_test_accuracy_shuff(aS_shuff,i)
thisCount = return_counterUpdate_trans()
startTst_trans = thisCount[-1]
end_trans = startTst_trans + test_batch_size_trans
if end_trans <= len(test_features_trans):
batch_X_test_trans,test_labels2_trans = test_features_trans[startTst_trans:end_trans], test_labels_trans[startTst_trans: end_trans]
a_trans, c_trans = sess.run([accuracy, cross_entropy ], {X: batch_X_test_trans, Y_: test_labels2_trans})
return_test_cost_trans(c_trans)
return_test_accuracy_trans(a_trans,i,testEvery_trans)
sess.run(train_step, {X: batch_X, Y_: batch_Y })
for i in range(epochs): training_step(i, i , i % testEvery == 0, i % validateEvery==0)
runfile('/Users/.../plotDLs.py', wdir='/Users/.../Phyton')
mainTitle2='1L_softmax--' + 'TransformTrainingData:' + str(TransormTrainingData
) +'.svg'
mainTitle='1_layer_softmax '+ ' ******* Translate: ' + str(translateImage
)+ ' Rotate: ' + str(rotateImage)+ ' Affine: ' + str(affineOrNot
)+ ' Perspective: ' + str(perspectiveOrNot)+ ' Warp: ' + str(WarpOrNot
) + ' keepDataLength: ' + str(keepDataSize
) + ' TransformTrainingData: ' + str(TransormTrainingData
) + ' \n Learning_rate : ' + str(learning_rate)
figDir="/Users/mulugetasemework/Documents/Python/"
figname= mainTitle+'.svg'
f.suptitle(mainTitle,size=7 )
plt.subplots_adjust(left=0.1, wspace=0.2, top=0.7, bottom=0.2)
f.show()
os.chdir(figDir)
#plt.savefig(mainTitle2, format='svg', dpi=1200)
|
# coding: utf-8
"""
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
import re
class CloudJobFiles(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
CloudJobFiles - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'file_matching_pattern': 'Empty',
'names': 'list[CloudJobFilesName]',
'total': 'int',
'total_failed': 'int',
'total_pending': 'int',
'total_processing': 'int',
'total_succeeded': 'int'
}
self.attribute_map = {
'file_matching_pattern': 'file_matching_pattern',
'names': 'names',
'total': 'total',
'total_failed': 'total_failed',
'total_pending': 'total_pending',
'total_processing': 'total_processing',
'total_succeeded': 'total_succeeded'
}
self._file_matching_pattern = None
self._names = None
self._total = None
self._total_failed = None
self._total_pending = None
self._total_processing = None
self._total_succeeded = None
@property
def file_matching_pattern(self):
"""
Gets the file_matching_pattern of this CloudJobFiles.
The file filtering logic to find files for this job
:return: The file_matching_pattern of this CloudJobFiles.
:rtype: Empty
"""
return self._file_matching_pattern
@file_matching_pattern.setter
def file_matching_pattern(self, file_matching_pattern):
"""
Sets the file_matching_pattern of this CloudJobFiles.
The file filtering logic to find files for this job
:param file_matching_pattern: The file_matching_pattern of this CloudJobFiles.
:type: Empty
"""
self._file_matching_pattern = file_matching_pattern
@property
def names(self):
"""
Gets the names of this CloudJobFiles.
A list of files to be addressed by this job. (Note* these are only reported when audit_level is 'high'
:return: The names of this CloudJobFiles.
:rtype: list[CloudJobFilesName]
"""
return self._names
@names.setter
def names(self, names):
"""
Sets the names of this CloudJobFiles.
A list of files to be addressed by this job. (Note* these are only reported when audit_level is 'high'
:param names: The names of this CloudJobFiles.
:type: list[CloudJobFilesName]
"""
self._names = names
@property
def total(self):
"""
Gets the total of this CloudJobFiles.
The total number of files addressed by this job
:return: The total of this CloudJobFiles.
:rtype: int
"""
return self._total
@total.setter
def total(self, total):
"""
Sets the total of this CloudJobFiles.
The total number of files addressed by this job
:param total: The total of this CloudJobFiles.
:type: int
"""
self._total = total
@property
def total_failed(self):
"""
Gets the total_failed of this CloudJobFiles.
The number of files which failed
:return: The total_failed of this CloudJobFiles.
:rtype: int
"""
return self._total_failed
@total_failed.setter
def total_failed(self, total_failed):
"""
Sets the total_failed of this CloudJobFiles.
The number of files which failed
:param total_failed: The total_failed of this CloudJobFiles.
:type: int
"""
self._total_failed = total_failed
@property
def total_pending(self):
"""
Gets the total_pending of this CloudJobFiles.
The number of files pending action
:return: The total_pending of this CloudJobFiles.
:rtype: int
"""
return self._total_pending
@total_pending.setter
def total_pending(self, total_pending):
"""
Sets the total_pending of this CloudJobFiles.
The number of files pending action
:param total_pending: The total_pending of this CloudJobFiles.
:type: int
"""
self._total_pending = total_pending
@property
def total_processing(self):
"""
Gets the total_processing of this CloudJobFiles.
The number of files currently being processed
:return: The total_processing of this CloudJobFiles.
:rtype: int
"""
return self._total_processing
@total_processing.setter
def total_processing(self, total_processing):
"""
Sets the total_processing of this CloudJobFiles.
The number of files currently being processed
:param total_processing: The total_processing of this CloudJobFiles.
:type: int
"""
self._total_processing = total_processing
@property
def total_succeeded(self):
"""
Gets the total_succeeded of this CloudJobFiles.
The total number of files successfully completed
:return: The total_succeeded of this CloudJobFiles.
:rtype: int
"""
return self._total_succeeded
@total_succeeded.setter
def total_succeeded(self, total_succeeded):
"""
Sets the total_succeeded of this CloudJobFiles.
The total number of files successfully completed
:param total_succeeded: The total_succeeded of this CloudJobFiles.
:type: int
"""
self._total_succeeded = total_succeeded
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
#
# @lc app=leetcode.cn id=188 lang=python3
#
# [188] 买卖股票的最佳时机 IV
#
# @lc code=start
class Solution:
def maxProfit(self, k: int, prices: List[int]) -> int:
"""DP table: 最多进行k次交易"""
if not prices:
return 0
n = len(prices)
dp = [[[0, -float("inf")]] * (k+1)] * n
for i in range(n):
for j in range(k, 0, -1):
if i == 0:
dp[i][j][0] = 0
dp[i][j][1] = -prices[0]
continue
dp[i][j][0] = max(dp[i-1][j][0], dp[i-1][j][1] + prices[i])
dp[i][j][1] = max(dp[i-1][j][1], dp[i-1][j-1][0] - prices[i])
return dp[n-1][k][0]
# @lc code=end
|
import pandas as pd
from avg_fun import kde_wavg
def agg_itineraries(points_merged, CONFIG):
# TODO is this still needed the escape below
if points_merged.empty:
return None
grouped = points_merged.groupby(['mot_segment_id', 'itinerary_id'])
diagnostics = grouped.agg({
'distance': {
'mean': lambda x: x.mean(),
'count': lambda x: x.count(),
'median': lambda x: x.median(),
'min': lambda x: x.min(),
'max': lambda x: x.max(),
# 'filtered_mean': lambda x: x[outlier_flag(x)].mean()
# 'sem': lambda x: x.std() / np.sqrt(x.count().astype(float))
# Standard error on the mean -- big bias because of the non-gaussisanity of the underlying distribution...
# also remove NaN
# Need the in_mot_segment boolean to implement below...
},
'within_mot_segment': {
'n_time_in': lambda x: x.astype(int).sum()
}})
# Count the total number of points in MoT segment, join for all itineraries
pts_within_mot = points_merged.loc[points_merged['within_mot_segment'], ['mot_segment_id', 'point_id']]
pts_within_mot_count = pts_within_mot.drop_duplicates().groupby(['mot_segment_id']).count()
cnt = diagnostics.join(pts_within_mot_count)['point_id'].astype(float)
# Removes the top-most hierarchical index generated by the aggregation query (distance)
diagnostics.columns = diagnostics.columns.droplevel(0)
# This needs a full apply since more than one column is involved
long_stop_weight = float(CONFIG.get('params', 'TRAIN_LONG_STOP_WEIGHT'))
diagnostics['kde_weighed_avg'] = grouped.apply(kde_wavg, wgt=long_stop_weight)
# Changes dtypes
col_list = ['min', 'max', 'median', 'mean', 'kde_weighed_avg']
diagnostics[col_list] = diagnostics[col_list].astype(int)
# Sometimes is boolean if all 1
diagnostics['n_time_in'] = diagnostics['n_time_in'].astype(float) / cnt
diagnostics['warning_str'] = diagnostics.apply(raise_flags, axis=1, args=(CONFIG,))
diagnostics['warning_bool'] = diagnostics['warning_str'] != ''
# Sorts on boolean is false first (no warning) then distance kde weighted
return diagnostics.reset_index().sort_values(['mot_segment_id', 'warning_bool', 'kde_weighed_avg'])
def compare_itineraries(group):
""" Generate statistics from the diagnostics/metrics
requires a series, e.g. x = diagnostics['kde_weighed_avg']
!! GROUP MUST BE SORTED IN ASCENDING ORDER !!
"""
col_names = ['itinerary_id', 'count', 'n_time_in', 'warning_bool', 'warning_str', 'min_value', 'delta_next', 'n2x',
'confidence']
stats = pd.Series(index=col_names)
stats[col_names[:5]] = group[col_names[:5]].iloc[0] # x.argmin()
x = group['kde_weighed_avg']
stats['min_value'] = x.iloc[0] # x.min()
if x.shape[0] > 1:
stats['delta_next'] = x.nsmallest(2).iloc[1] - x.nsmallest(2).iloc[0]
stats['n2x'] = x[x < x.min()*2.].count() - 1
else:
stats['delta_next'] = -1
stats['n2x'] = -1
# Confidence =0 if flag is raised, =1 if no flag and kde_weighed_avg=0, reduces by 1/2 if other itineraries nearby
# TODO : check it works, also add * n_pts_in ?
stats['confidence'] = (1. - group['warning_bool'].iloc[0].astype(int)) * (1. - stats['min_value']/7000.)\
* 0.5 * (2 - (int((stats['delta_next'] < 1500) and (stats['delta_next'] >= 0)))) * stats['n_time_in']
return stats
def raise_flags(diagnostic, CONFIG):
"""
diagnostic is one row of diagnostics
:param diagnostic: pandas.Series (single row of diagnostics DataFrame)
:param CONFIG:
:return: str, each warning separated by \n
"""
n_row_min = int(CONFIG.get('params', 'N_ROW_MIN'))
n_dist_max = int(CONFIG.get('params', 'N_DIST_MAX'))
n_time_min = float(CONFIG.get('params', 'N_TIME_MIN'))
warning_list = ['']
if diagnostic['count'] < n_row_min:
warning_list.append('WARNING -- Low Counts (<{x})'.format(x=n_row_min))
if diagnostic['kde_weighed_avg'] > n_dist_max:
warning_list.append('WARNING -- High Avg. Distance (>{x}k)'.format(x=int(n_dist_max/1000)))
if diagnostic['n_time_in'] < n_time_min:
warning_list.append('WARNING -- Low overlap with MoT segment (<{x})'.format(x=n_time_min))
return '\n'.join(warning_list)
def get_best_itinerary(trip_link, points, point_meta, CONFIG):
# Merge all 3 dataframe together, excluding out of order outlier points (ooo_outlier)
points_merged = pd.merge(trip_link.reset_index()[['mot_segment_id', 'itinerary_id', 'leg_id', 'segment_id']],
point_meta[~point_meta['ooo_outlier']].reset_index(), on='segment_id', how='inner')
points_merged = pd.merge(points_merged, points.reset_index(), on='point_id', how='inner')
# Generate quality metrics for each itinerary (returns sorted by 'quality')
diagnostics = agg_itineraries(points_merged, CONFIG)
# Compares the best itinerary to the others and surfaces stats
stats = diagnostics.groupby('mot_segment_id').apply(compare_itineraries)
return stats, diagnostics
|
# Chaining
class HashTable:
def __init__(self, hash_func=None, bucket_size=16):
if hash_func is None:
self.hash_func = hash
else:
self.hash_func = hash_func
self.bucket_size = bucket_size
self.bucket = [None] * bucket_size
def set(self, key, value):
hash_value = self.hash_func(key)
bucket_index = hash_value % self.bucket_size
node = self.bucket[bucket_index]
if node is None:
self.bucket[bucket_index] = [key, value, None]
return
prev = None
while node:
if node[0] == key:
node[1] = value
return
prev = node
node = node[2]
prev[2] = [key, value, None]
def get(self, key):
hash_value = self.hash_func(key)
bucket_index = hash_value % self.bucket_size
node = self.bucket[bucket_index]
while node:
if node[0] == key:
return node[1]
node = node[2]
return None
def delete(self, key):
hash_value = self.hash_func(key)
bucket_index = hash_value % self.bucket_size
node = self.bucket[bucket_index]
# if node is None:
# return
prev = None
while node:
if node[0] == key:
if prev is not None:
prev[2] = node[2]
return
else:
self.bucket[bucket_index] = node[2]
return
prev = node
node = node[2]
return
def print_hash(self):
print(self.bucket)
# Open Addressing
class HashTable2:
def __init__(self, hash_func=None, bucket_size=16):
if hash_func is None:
self.hash_func = hash
else:
self.hash_func = hash_func
self.bucket_size = bucket_size
self.bucket = [None] * bucket_size
self.count = 0
def insert(self, key, value):
if self.count >= self.bucket_size:
return
hash_value = self.hash_func(key)
bucket_index = hash_value % self.bucket_size
# 없어도 되지만 가독성을 위해 추가
node = self.bucket[bucket_index]
if node is None:
self.bucket[bucket_index] = [key, value]
return
for i in range(bucket_index, self.bucket_size):
if self.bucket[i] is None:
self.bucket[i] = [key, value]
return
elif self.bucket[i][0] == key:
self.bucket[i][1] = value
return
def delete(self, key):
pass
def print_table(self):
print(self.bucket)
ht = HashTable(hash_func=lambda x: x % 20)
ht.set(0, 'a')
ht.set(20, 'b')
ht.set(40, 'c')
print(ht.get(0))
print(ht.get(20))
print(ht.get(40))
print(ht.get(60))
ht.print_hash()
|
while True:
valor = int(input('Qual valor você quer saber a tabuada? '))
if valor >= 0:
print(10*'=-=')
for i in range(1,11):
print(f'{valor} x {i} = {valor * i}')
print(10*'=-=')
else:
break
print('Obrigado por acessar') |
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
import load_text
embedding_dim = 300
inputs = keras.Input(shape=(54,), name="input_text")
embedding_layer = layers.Embedding(load_text.vocab_size, embedding_dim, name="embedding")(inputs)
conv_3_layer = layers.Conv1D(100, 3, activation='relu', name="filter_size_3")(embedding_layer)
conv_4_layer = layers.Conv1D(100, 4, activation='relu', name="filter_size_4")(embedding_layer)
conv_5_layer = layers.Conv1D(100, 5, activation='relu', name="filter_size_5")(embedding_layer)
max_pool_3_layer = layers.MaxPool1D(pool_size=52, name="max_pool_3", padding="same")(conv_3_layer)
max_pool_4_layer = layers.MaxPool1D(pool_size=51, name="max_pool_4", padding="same")(conv_4_layer)
max_pool_5_layer = layers.MaxPool1D(pool_size=50, name="max_pool_5", padding="same")(conv_5_layer)
flatten_3_layer = layers.Flatten()(max_pool_3_layer)
flatten_4_layer = layers.Flatten()(max_pool_4_layer)
flatten_5_layer = layers.Flatten()(max_pool_5_layer)
concatenate_layer = layers.concatenate([flatten_3_layer, flatten_4_layer, flatten_5_layer])
dropout_layer = layers.Dropout(rate=0.5)(concatenate_layer)
outputs = layers.Dense(2, activation="softmax")(dropout_layer)
model = keras.Model(inputs=inputs, outputs=outputs, name="test_model")
keras.utils.plot_model(model, "my_first_model.png", show_shapes=True)
model.compile(optimizer='adam',
loss=tf.keras.losses.BinaryCrossentropy(from_logits=True),
metrics=['accuracy'])
train_data = load_text.train_data
test_data = load_text.test_data
model.fit(train_data, epochs=2, verbose=1)
test_loss, test_acc = model.evaluate(test_data, verbose=2)
print(test_acc)
|
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class People(Base):
__tablename__ = 'people'
id = Column(Integer, primary_key=True)
person_identifier = Column(String(250), unique=True)
person_name = Column(String(250))
person_type = Column(String(250))
wants_accomodation = Column(String(25))
office_allocated = Column(String(250))
living_space_allocated = Column(String(250))
def __repr__(self):
return "<Person(person_name='%s')>" % self.person_name
class Rooms(Base):
__tablename__ = 'rooms'
id = Column(Integer, primary_key=True)
room_name = Column(String(250), nullable=False)
room_type = Column(String(250), nullable=False)
room_capacity = Column(Integer)
def __repr__(self):
return "<Room(room_name='%s')>" % self.room_name
class DatabaseManager(object):
"""
Creates a db connection object
"""
def __init__(self, db_name=None):
self.db_name = db_name
if self.db_name:
self.db_name = db_name + '.sqlite'
else:
self.db_name = 'default_amity_db.sqlite'
self.engine = create_engine('sqlite:///' + self.db_name)
self.session = sessionmaker()
self.session.configure(bind=self.engine)
Base.metadata.create_all(self.engine)
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Category'
db.create_table('product_category', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('slug', self.gf('django.db.models.fields.CharField')(max_length=255)),
('active', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('product', ['Category'])
# Adding model 'Product'
db.create_table('product_product', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('slug', self.gf('django.db.models.fields.CharField')(max_length=255)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['product.Category'], null=True, blank=True)),
('price', self.gf('django.db.models.fields.DecimalField')(max_digits=10, decimal_places=2)),
('kdv', self.gf('django.db.models.fields.DecimalField')(max_digits=10, decimal_places=2)),
('total_price', self.gf('django.db.models.fields.DecimalField')(max_digits=10, decimal_places=2)),
('order', self.gf('django.db.models.fields.IntegerField')(default=0)),
('active', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('product', ['Product'])
# Adding model 'ProductSettings'
db.create_table('product_productsettings', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('order_type', self.gf('django.db.models.fields.CharField')(max_length=1)),
('site_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('kdv_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal('product', ['ProductSettings'])
def backwards(self, orm):
# Deleting model 'Category'
db.delete_table('product_category')
# Deleting model 'Product'
db.delete_table('product_product')
# Deleting model 'ProductSettings'
db.delete_table('product_productsettings')
models = {
'product.category': {
'Meta': {'object_name': 'Category'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'product.product': {
'Meta': {'object_name': 'Product'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['product.Category']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kdv': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'total_price': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'})
},
'product.productsettings': {
'Meta': {'object_name': 'ProductSettings'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kdv_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'order_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'site_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
}
}
complete_apps = ['product'] |
# deployment
#DIRECTORY_ADDRESS = 'tcp://127.0.0.1:10001'
import sys
import os
import fabric.api as fabi
# sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')))
# import config_main as cfg
# import config_private as private
CONTRACT_GAS = "0x1000000000"
# TRANSACTION_GAS = "0x1000000"
TRANSACTION_GAS = 2000000
#5205164
#5169695
NUM_TYPES =9999999999999;
PRECISION = 4294967296;
MAX_QUANTITY = 100;
START_INTERVAL = 1;
END_INTERVAL = 100;
INTERVAL_LENGTH = 60;
SOLVING_INTERVAL = 5;
POLLING_INTERVAL = 1 # seconds Used in actor to determin how often to check for events
# MINER_IP = '172.21.20.34'
# MINER_PORT = '10000'
# DIR_IP = '172.21.20.34'
# SOLVER_IP = '172.21.20.34'
# RECORDER_IP = '172.21.20.34'
|
from gym_tic_tac_toe.envs.tic_tac_toe_env import TicTacToeEnv
# from gym_tic_tac_toe.envs.gym_tic_tac_toe_extrahard_env import TicTacToeExtraHardEnv |
from Server import flask
if __name__ == '__main__': # Main method
flask.run(port=8088, debug=False, threaded=True, host="127.0.0.1") # Starts server
|
from pyspark.sql.functions import udf
from pyspark.sql.types import ArrayType, StringType
import json
def regist_udf_str2arr(ss,logger):
logger.info("注册str2arr UDF", event="regist_udf")
def string2array(str):
if len(str) :
return ','.join(json.loads(str))
# arr = str.replace('\\"','').replace('\\[','').replace('\\]','')
# return arr
return None
str2arr_udf = udf(lambda str : string2array(str),StringType())
ss.udf.register("str2arr", string2array)
|
import config_readers
import os
from unittest.mock import Mock
from mock import patch
class TestReader:
def test_local_reading_config(self):
reader = config_readers.LocalUserConfigReader('./tests/fixtures/test_user_configs/')
res = reader.get_config_files()
assert len(res) == 2
def test_remote_reading_config(self):
test_file_path = './tests/fixtures/test_user_configs/'
configmap_mock = Mock()
configmap_mock.metadata.namespace = 'test_namespace'
configmap_mock.metadata.name = 'test_configmap_name'
configmap_data = dict()
test_conf_num = 1
for file in os.listdir(test_file_path):
file_path = os.path.join(test_file_path, file)
configmap_data['test_user_config ' + str(test_conf_num)] = open(file_path, 'r')
test_conf_num += 1
configmap_mock.data = configmap_data
with patch('config_readers.RemoteUserConfigReader._get_configmap_list') \
as get_configmap_list:
get_configmap_list.return_value = [configmap_mock]
config_reader = config_readers.RemoteUserConfigReader()
assert len(config_reader.get_config_files()) == 2
|
# _compat.py - Python 2/3 compatibility
import sys
PY2 = sys.version_info[0] == 2
if PY2: # pragma: no cover
text_type = unicode
def iteritems(d):
return d.iteritems()
else: # pragma: no cover
text_type = str
def iteritems(d):
return iter(d.items())
|
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import matplotlib.pyplot as plt
import cv2
from sklearn.cluster import DBSCAN
from extra_functions import cluster_gen
import pcl
import numpy as np
import matplotlib.cm as cm
def DBScan():
# Load Point Cloud file
cloud = pcl.load_XYZRGB('./test_rgb.pcd')
# Voxel Grid Downsampling filter
################################
# Create a VoxelGrid filter object for our input point cloud
vox = cloud.make_voxel_grid_filter()
# Choose a voxel (also known as leaf) size
# Note: this (1) means 1mx1mx1m is a poor choice of leaf size
# Experiment and find the appropriate size!
#LEAF_SIZE = 0.01
LEAF_SIZE =45
# Set the voxel (or leaf) size
vox.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
# Call the filter function to obtain the resultant downsampled point cloud
cloud_filtered = vox.filter()
filename = './pcd_out/voxel_downsampled.pcd'
pcl.save(cloud_filtered, filename)
# PassThrough filter
################################
# Create a PassThrough filter object.
passthrough = cloud_filtered.make_passthrough_filter()
# Assign axis and range to the passthrough filter object.
filter_axis = 'z'
passthrough.set_filter_field_name(filter_axis)
axis_min = 0
axis_max = 100
passthrough.set_filter_limits(axis_min, axis_max)
# Finally use the filter function to obtain the resultant point cloud.
cloud_filtered = passthrough.filter()
filename = './pcd_out/pass_through_filtered.pcd'
pcl.save(cloud_filtered, filename)
# RANSAC plane segmentation
################################
# Create the segmentation object
seg = cloud_filtered.make_segmenter()
# Set the model you wish to fit
seg.set_model_type(pcl.SACMODEL_PLANE)
seg.set_method_type(pcl.SAC_RANSAC)
# Max distance for a point to be considered fitting the model
# Experiment with different values for max_distance
# for segmenting the table
max_distance = 0.01
seg.set_distance_threshold(max_distance)
# Call the segment function to obtain set of inlier indices and model coefficients
inliers, coefficients = seg.segment()
# Extract outliers
# Save pcd for tabletop objects
################################
extracted_outliers = cloud_filtered.extract(inliers, negative=True)
e=np.asarray(extracted_outliers)
#print e[:,:-1]
filename = './pcd_out/extracted_outliers.pcd'
pcl.save(extracted_outliers, filename)
# Generate some clusters!
data = e[:,:-1]
return(data)
|
#Created on 1/22/2015
#@author: Ryan Spies (rspies@lynkertech.com)
# Python 2.7
# This script reads CHPS csv file from QIN plot display and finds the start and end
# of the observed hourly QIN record, # of valid data points, and % of total available.
# Outputs summary data to csv file
import os
import pandas as pd
import numpy as np
import datetime
os.chdir("../..")
maindir = os.path.abspath(os.curdir)
############################### User input ###################################
##############################################################################
##### IMPORTANT: Make sure to call the correct CHPS .csv output columns ######
##### and specify the calibration period in next section
RFC = 'NCRFC_FY2017'
fx_group = '' # set to '' if not used
input_type = 'usgs' # choices: 'usgs' or 'chps'
if fx_group != '':
new_summary = open(maindir + os.sep +'Calibration_NWS' + os.sep + RFC[:5] + os.sep + RFC + os.sep + 'data_csv' + os.sep + 'QIN' + os.sep + fx_group + '_QIN_' + input_type + '_statistical_summary.csv','w')
data_dir = fx_group + os.sep + 'merged_csv'
else:
new_summary = open(maindir + os.sep +'Calibration_NWS' + os.sep + RFC[:5] + os.sep + RFC + os.sep + 'data_csv' + os.sep + 'QIN' + os.sep + 'QIN_' + input_type + '_statistical_summary.csv','w')
data_dir = 'merged_csv'
############################ End User input ##################################
##############################################################################
################ Define the corresponding column of data in the csv file #################
call_date = 0
call_qin = 1
############ End User input ##################################################
if input_type == 'usgs':
csv_loc = maindir + os.sep +'Calibration_NWS' + os.sep + RFC[:5] + os.sep + RFC + os.sep + 'data_csv' + os.sep + 'QIN' + os.sep + data_dir
header = 3
if input_type == 'chps':
csv_loc = maindir + os.sep +'Calibration_NWS' + os.sep + RFC[:5] + os.sep + RFC + os.sep + 'data_csv' + os.sep + 'QIN' + os.sep + 'chps_export'
header = 2
if input_type == 'ibwc':
csv_loc = maindir + os.sep +'Calibration_NWS' + os.sep + RFC[:5] + os.sep + RFC + os.sep + 'data_csv' + os.sep + 'IBWC' + os.sep + 'chps_csv'
new_summary = open(maindir + os.sep +'Calibration_NWS' + os.sep + RFC[:5] + os.sep + RFC + os.sep + 'data_csv' + os.sep + 'IBWC' + os.sep + 'QIN_' + input_type + '_statistical_summary.csv','w')
header = 3
new_summary.write('Basin/Gauge' + ',' + '# of Obs' + ',' + 'Start Date' + ',' + 'End Date'
+ ',' + 'Mean QIN (cfs)' + ',' + 'Max QIN (cfs)' + ',' + 'Min QIN (cfs)' +','+ 'Standard Deviation (cfs)'
+ ',' + 'Date Max' + ',' + 'Date Min' + '\n')
csv_files = os.listdir(csv_loc)
for csv_file in csv_files:
basin_name = csv_file.split('_')[0]
print basin_name
csv_read = open(csv_loc + os.sep + csv_file,'r')
###### tab delimitted CHPS QIN dishcarge CSV file into panda arrays ###########
test = pd.read_csv(csv_read,sep=',',skiprows=header,
usecols=[call_date,call_qin],parse_dates=['date'],names=['date', 'QIN'])
### assign column data to variables
date_qin = test['date'].tolist() # convert to list (indexible)
all_qin = test['QIN'].tolist()
# find max/min of all data
Q_mask = np.ma.masked_less(all_qin,0.0) # mask values less than 0 to ignore
#Q_mask = np.ma.masked_less(all_qin,'-999') # mask values less than 0 to ignore
#Q_mask = np.ma.masked_invalid(all_qin) # mask missing and 'nan' instances
date_mask = np.ma.masked_where(np.ma.getmask(Q_mask) == True, date_qin) # mask dates containing missing discharge data
Q_data = np.ma.compressed(Q_mask).tolist() # create list with only valid dishcharge data
final_date = np.ma.compressed(date_mask).tolist() # create list with corresponding date
if len(final_date) != len(Q_data):
print 'WARNING -- Date and Discharge Data not the same length'
if len(final_date) > 0 and len(Q_data) > 0: # check that there is flow data in csv file
day_count = str(len(Q_data)) # number of valid daily data values
start_date = str(min(final_date).strftime('%Y-%m-%d')) # date of first measurement
end_date = str(max(final_date).strftime('%Y-%m-%d')) # date of last measurement
mean_Q = str(np.average(Q_data)) # mean of all QME data
max_Q = np.max(Q_data) # maximum of all QME
max_index = Q_data.index(max_Q) # index of the max daily QME value
date_max = str(final_date[max_index].strftime('%Y-%m-%d')) # date of maximum discharge
min_Q = np.min(Q_data) # minimum of all QME
min_indices = [i for i, x in enumerate(Q_data) if x == min_Q]
if len(min_indices) > 1: # if more than 1 occurence of Q_min return "numerous"
date_min = 'Numerous'
else:
min_index = Q_data.index(min_Q) # index of the minimum daily QME value
date_min = str(final_date[min_index].strftime('%Y-%m-%d')) # date of minimum discharge
sd = str(np.std(Q_data)) # standard deviation of QME data
new_summary.write(basin_name+','+day_count+','+start_date+','+end_date+','+mean_Q+','
+str(max_Q)+','+str(min_Q)+','+sd+','+date_max+','+date_min+'\n')
else:
print 'No data in the csv file... please check -> ' + basin_name
new_summary.close()
print 'Finished!'
print datetime.datetime.now() |
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 18 12:43:31 2019
@author: hi
"""
from tkinter import *
import csv
import pandas as pd
import numpy as np
import sklearn as sk
import keras
from keras.models import Sequential
from keras.layers import Dense
from sklearn.model_selection import train_test_split
tk=Tk()
tk.title("Dengue prediction system")
tk.minsize(500,500)
l1=Label(tk,text="Fill the data",width=15,font=("bold",15))
l1.place(x=180,y=30)
l2=Label(tk,text="Rainfall",width=20,font=("bold",10))
l2.place(x=80,y=90)
s1=DoubleVar()
e1=Entry(tk,textvariable=s1)
e1.place(x=240,y=90)
s2=DoubleVar()
l3=Label(tk,text="Population density",width=20,font=("bold",10))
l3.place(x=80,y=130)
e2=Entry(tk,textvariable=s2)
e2.place(x=240,y=130)
s3=DoubleVar()
l4=Label(tk,text="Minimum Tempearture",width=20,font=("bold",10))
l4.place(x=80,y=170)
e3=Entry(tk,textvariable=s3)
e3.place(x=240,y=170)
s4=DoubleVar()
l5=Label(tk,text="Maxium temperature",width=20,font=("bold",10))
l5.place(x=80,y=210)
e4=Entry(tk,textvariable=s4)
e4.place(x=240,y=210)
s5=DoubleVar()
l6=Label(tk,text="Forest cover",width=20,font=("bold",10))
l6.place(x=80,y=250)
e5=Entry(tk,textvariable=s5)
e5.place(x=240,y=250)
s6=DoubleVar()
l7=Label(tk,text="Stagnant water",width=20,font=("bold",10))
l7.place(x=80,y=290)
e5=Entry(tk,textvariable=s6)
e5.place(x=240,y=290)
s7=DoubleVar()
l8=Label(tk,text="WasteLand Accumulation",width=20,font=("bold",10))
l8.place(x=80,y=330)
e6=Entry(tk,textvariable=s7)
e6.place(x=240,y=330)
s8=StringVar()
l9=Label(tk,text="Result",width=20,font=("bold",10))
l9.place(x=80,y=370)
e7=Entry(tk,textvariable=s8)
e7.place(x=240,y=370)
def normalisemaxtemp(value,k1,k2):
new_maxa=1
new_mina=0
mina=k2
maxa=k1
#print("temp",mina,maxa)
return float((((value-mina)/(maxa-mina))*(new_maxa-new_mina)+new_mina))
def normalisemintemp(value,k1,k2):
new_maxa=1
new_mina=0
mina=k2
maxa=k1
#print("min temp",mina,maxa)
return(((value-mina)/(maxa-mina))*(new_maxa-new_mina)+new_mina)
def normalisedenspop(value,k1,k2):
new_maxa=1
new_mina=0
mina=k2
maxa=k1
#print("pop den",mina,maxa)
return(((value-mina)/(maxa-mina))*(new_maxa-new_mina)+new_mina)
def normalisegeogarea(value,k1,k2):
new_maxa=1
new_mina=0
mina=k2
maxa=k1
#print("forest",mina,maxa)
return(((value-mina)/(maxa-mina))*(new_maxa-new_mina)+new_mina)
def normaliserainfall(value,k1,k2):
new_maxa=1
new_mina=0
mina=k2
maxa=k1
#print("rainfall",mina,maxa)
return float((((value-mina)/(maxa-mina))*(new_maxa-new_mina)+new_mina))
def normalise(value,k1,k2):
new_maxa=1
new_mina=0
mina=k2
maxa=k1
#print("rainfall",mina,maxa)
return float((((value-mina)/(maxa-mina))*(new_maxa-new_mina)+new_mina))
dmean=0
def callback():
l=[]
df=pd.read_excel(r'C:\\Users\\hi\\Desktop\\raw_data.xlsx')
df.columns=['YEAR','MONTH','MANDAL','PopDensity','Forest','Rainfall','TempMin','TempMax','StagWater','WasteLand','DENGUECASES']
dmean1=np.mean(df['DENGUECASES'])
d_max=max(df['DENGUECASES'])
d_min=min(df['DENGUECASES'])
#dmean=normalise(dmean1,d_max,d_min)
dmean=0.0159942
k1=float(max(df['Rainfall']))
k2=float(min(df['Rainfall']))
str1=float(normaliserainfall(s1.get(),k1,k2))
k1=max(df['PopDensity'])
k2=min(df['PopDensity'])
str2=normalisedenspop(s2.get(),k1,k2)
k1=max(df['TempMax'])
k2=min(df['TempMin'])
str3=normalisemintemp(s3.get(),k1,k2)
k1=max(df['TempMax'])
k2=min(df['TempMin'])
str4=normalisemaxtemp(s4.get(),k1,k2)
k1=max(df['Forest'])
k2=min(df['Forest'])
str5=normalisegeogarea(s5.get(),k1,k2)
l.append(str1)
l.append(str2)
l.append(str3)
l.append(str4)
l.append(str5)
l.append(int(s6.get()))
l.append(int(s7.get()))
#return l
#with open(r"C:\\Users\\hi\\Desktop\\final_data.csv",'a') as csvfile:
# newfile=csv.writer(csvfile)
# newfile.writerow(l)
return l
def predict1():
print("ynew",ynew)
print("dmean",dmean)
if(ynew>=0.0159942):
e7.insert(20,"yes")
else:
e7.insert(20,"no")
b1=Button(tk,text="submit",width=20,bg="brown",fg="white",command=callback)
b1.place(x=140,y=400)
b2=Button(tk,text="predict",width=20,bg="brown",fg="white",command=predict1)
b2.place(x=300,y=400)
dataset=pd.read_csv('C:\\Users\\hi\\Desktop\\final_data.csv')
x=dataset.iloc[:,:7].values
y=dataset.iloc[:,7].values
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size = 0.2)
model=Sequential()
model.add(Dense(input_dim=7,init='uniform',output_dim=80,activation='relu'))
model.add(Dense(output_dim=50,init='uniform',activation='relu'))
model.add(Dense(output_dim=20,init='uniform',activation='relu'))
model.add(Dense(output_dim=10,init='uniform',activation='relu'))
model.add(Dense(output_dim=1,init='uniform',activation='sigmoid'))
model.compile(optimizer='adam',loss='binary_crossentropy',metrics=['accuracy'])
model.fit(x_train,y_train,epochs=10)
print('------------------------------------------')
val_loss,val_acc=model.evaluate(x_test,y_test)
print(val_loss,val_acc)
#algorithm
ynew=model.predict(np.array(np.reshape(callback(),(1,7))))
print(ynew)
tk.mainloop()
|
from .dry_plugin_quart import (
test_quart_doc,
test_quart_no_response,
test_quart_return_model,
test_quart_skip_validation,
test_quart_validate,
test_quart_validation_error_response_status_code,
)
__all__ = [
"test_quart_return_model",
"test_quart_skip_validation",
"test_quart_validation_error_response_status_code",
"test_quart_doc",
"test_quart_validate",
"test_quart_no_response",
]
|
table = ['a', 'b', 'c', 'd', 'e']
space = ' '
print(space.join(table))
|
import numpy as np
import json
import pickle
from ELMo.ELMoForManyLangs.elmoformanylangs import embedder
from ELMo.sent2elmo import sent2elmo
class Embedder:
"""
The class responsible for loading a pre-trained ELMo model and provide the ``embed``
functionality for downstream BCN model.
You can modify this class however you want, but do not alter the class name and the
signature of the ``embed`` function. Also, ``__init__`` function should always have
the ``ctx_emb_dim`` parameter.
"""
def __init__(self, n_ctx_embs, ctx_emb_dim):
"""
The value of the parameters should also be specified in the BCN model config.
"""
self.n_ctx_embs = n_ctx_embs
self.ctx_emb_dim = ctx_emb_dim
self.model_path = 'ELMo/final/model_0'
config_path = 'ELMo/config/config.json'
with open(config_path, 'r') as f:
self.config = json.load(f)
char_lexicon_path = 'ELMo/final/charLexicon.pkl'
with open(char_lexicon_path, 'rb') as f:
self.char_lexicon = pickle.load(f)
self.device = 'cuda:0'
self.elmo = sent2elmo(self.char_lexicon, self.config, self.device, self.model_path)
# TODO
def __call__(self, sentences, max_sent_len):
"""
Generate the contextualized embedding of tokens in ``sentences``.
Parameters
----------
sentences : ``List[List[str]]``
A batch of tokenized sentences.
max_sent_len : ``int``
All sentences must be truncated to this length.
Returns
-------
``np.ndarray``
The contextualized embedding of the sentence tokens.
The ndarray shape must be
``(len(sentences), min(max(map(len, sentences)), max_sent_len), self.n_ctx_embs, self.ctx_emb_dim)``
and dtype must be ``np.float32``.
"""
# TODO
max_len = min(max(map(len, sentences)), max_sent_len)
sentence = []
for sent in sentences:
while True:
if len(sent) > max_len:
sent.pop()
elif len(sent) < max_len:
sent.append('<pad>')
else:
break
sent.append('<eos>')
sent.insert(0, '<bos>')
sentence.append(sent)
features = self.elmo.get_feature(sentence)
features = features.detach().cpu().numpy()
features = np.expand_dims(features, axis=2)
return features
|
from rest_framework import serializers
from .models import Stock
class StockSerializer(serializers.ModelSerializer):
is_tracking = serializers.SerializerMethodField(read_only=True)
class Meta:
model = Stock
fields = ['ticker', 'company_name', 'is_tracking']
def get_is_tracking(self, obj):
user = self.context.get('user')
if user in obj.tracked_by.all():
return True
else:
return False
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-08-03 13:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0003_auto_20160803_1249'),
]
operations = [
migrations.AlterField(
model_name='reviews',
name='rate',
field=models.IntegerField(blank=True, default=0, null=True),
),
]
|
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the plusMinus function below.
def plusMinus(arr):
myArr = {0:0, 1:0, -1:0}
for i in arr:
if i == 0:
myArr[0]+=1
elif i <0:
myArr[-1]+=1
else:
myArr[1]+=1
positiveRatio = '%.6f'%(myArr[1]/len(arr)) if myArr[1]>0 else '%.6f'%0
negativeRatio = '%.6f'%(myArr[-1]/len(arr)) if myArr[-1]>0 else '%.6f'%0
zeroRatio = '%.6f'%(myArr[0]/len(arr)) if myArr[0]>0 else '%.6f'%0
for i in [positiveRatio, negativeRatio,zeroRatio]:
print(i)
if __name__ == '__main__':
# n = int(input())
# arr = list(map(int, input().rstrip().split()))
arr = [1,-1,-5,0,0,5,23,2,2,3]
plusMinus(arr)
|
from pynput import keyboard
import os
from random import choice
from colorama import init, Fore, Back, Style
# colorama init
init(autoreset=True)
NEW_ELEMENTS_CHOICE = [1, 1, 2]
COLORS = {
'.': Fore.WHITE,
0: Fore.WHITE,
1: Fore.WHITE,
2: Fore.BLUE,
4: Fore.CYAN,
8: Fore.GREEN,
16: Fore.RED,
32: Fore.YELLOW,
64: Fore.MAGENTA,
128: Fore.LIGHTGREEN_EX,
256: Fore.LIGHTBLUE_EX,
512: Fore.LIGHTRED_EX,
1024: Fore.LIGHTYELLOW_EX,
2048: Fore.LIGHTCYAN_EX
}
FIELD_WIDTH = 3
FIELD_HEIGHT = 3
# generating field
field = [[0 for j in range(FIELD_WIDTH)] for i in range(FIELD_HEIGHT)]
def on_press(key):
# print(key)
if key == keyboard.Key.esc:
exit(0)
do_turn(key)
def do_turn(key):
if key == keyboard.Key.up:
for i in range(FIELD_HEIGHT):
for j in range(FIELD_WIDTH):
el = field[i][j]
delta = 0
while True:
new_delta = delta + 1
if i - new_delta < 0:
break
if field[i - new_delta][j] != 0 and field[i - new_delta][j] != el:
break
delta = new_delta
if delta != 0:
field[i - delta][j] += el
field[i][j] = 0
elif key == keyboard.Key.down:
for i in reversed(range(FIELD_HEIGHT)):
for j in range(FIELD_WIDTH):
el = field[i][j]
delta = 0
while True:
new_delta = delta - 1
if i - new_delta > FIELD_HEIGHT - 1:
break
if field[i - new_delta][j] != 0 and field[i - new_delta][j] != el:
break
delta = new_delta
if delta != 0:
field[i - delta][j] += el
field[i][j] = 0
elif key == keyboard.Key.left:
for i in range(FIELD_WIDTH):
for j in range(FIELD_HEIGHT):
el = field[i][j]
if el == 0:
continue
delta = 0
while True:
new_delta = delta + 1
if j - new_delta < 0:
break
if field[i][j - new_delta] != 0 and field[i][j - new_delta] != el:
break
delta = new_delta
# print(delta)
if delta != 0:
field[i][j - delta] += el
field[i][j] = 0
elif key == keyboard.Key.right:
for i in reversed(range(FIELD_WIDTH)):
for j in range(FIELD_HEIGHT):
el = field[i][j]
if el == 0:
continue
delta = 0
while True:
new_delta = delta - 1
if j - new_delta > FIELD_WIDTH - 1:
break
if field[i][j - new_delta] != 0 and field[i][j - new_delta] != el:
break
delta = new_delta
# print(delta)
if delta != 0:
field[i][j - delta] += el
field[i][j] = 0
spawn()
render()
def spawn():
free_space = []
for ind, i in enumerate(field):
for qeq, j in enumerate(i):
if j == 0:
free_space.append((ind, qeq))
if len(free_space) == 0:
return False
index = choice(free_space)
value = choice(NEW_ELEMENTS_CHOICE)
field[index[0]][index[1]] = value
return True
def render():
os.system('cls')
for i in field:
els = []
for j in i:
el = j
if el == 0:
el = '.'
el = COLORS[j] + str(el)
els.append(str(el))
print('')
s = ' '.join(['{:^15}' for i in els])
# print(s)
# print(els)
print(s.format(*els))
def new_game():
pass
if __name__ == '__main__':
spawn()
render()
with keyboard.Listener(
on_press=on_press
) as listener:
listener.join()
|
from .torchvggish import *
from .vggish_input import *
from .vggish_params import *
name = "torchvggish"
|
from PikaObj import *
class Operator(TinyObj):
def plusInt(self, num1: int, num2: int) -> int: ...
def plusFloat(self, num1: float, num2: float) -> float: ...
def minusInt(self, num1: int, num2: int) -> int: ...
def minusFloat(self, num1: float, num2: float) -> float: ...
def equalInt(self, num1: int, num2: int) -> int: ...
def equalFloat(self, num1: float, num2: float) -> int: ...
def graterThanInt(self, num1: int, num2: int) -> int: ...
def graterThanFloat(self, num1: float, num2: float) -> int: ...
def lessThanInt(self, num1: int, num2: int) -> int: ...
def lessThanFloat(self, num1: float, num2: float) -> int: ...
def AND(self, flag1: int, flag2: int) -> int: ...
def OR(self, flag1: int, flag2: int) -> int: ...
def NOT(self, flag: int) -> int: ...
def __str__(self) -> str: ...
def __del__(self): ...
class Math(TinyObj):
pi: float
e: float
def __init__(self):
pass
def ceil(self, x: float) -> int:
pass
def fabs(self, x: float) -> float:
pass
def floor(self, x: float) -> int:
pass
def fmod(self, x: float, y: float) -> float:
pass
def remainder(self, x: float, y: float) -> float:
pass
def trunc(self, x: float) -> float:
pass
# 幂函数和对数函数
def exp(self, x: float) -> float:
pass
def log(self, x: float) -> float:
pass
def log2(self, x: float) -> float:
pass
def log10(self, x: float) -> float:
pass
def pow(self, x: float, y: float) -> float:
pass
def sqrt(self, x: float) -> float:
pass
# 三角函数
def acos(self, x: float) -> float:
pass
def asin(self, x: float) -> float:
pass
def atan(self, x: float) -> float:
pass
def atan2(self, x: float, y: float) -> float:
pass
def cos(self, x: float) -> float:
pass
def sin(self, x: float) -> float:
pass
def tan(self, x: float) -> float:
pass
# 角度转换
def degrees(self, x: float) -> float:
pass
def radians(self, x: float) -> float:
pass
# 双曲函数
def cosh(self, x: float) -> float:
pass
def sinh(self, x: float) -> float:
pass
def tanh(self, x: float) -> float:
pass
class Quaternion(TinyObj):
def __init__(self):
pass
def set(self, x: float, y: float, z: float, w: float):
"xi+yj+zk+w"
pass
def get(self, key: int) -> float:
pass
def add(self, quat: Quaternion):
pass
def sub(self, quat: Quaternion):
pass
def mul(self, quat: Quaternion):
pass
def magnituded(self) -> float:
pass
def magnitudedsquare(self) -> float:
pass
def reverse(self):
pass
def inverse(self):
pass
def normalize(self):
pass
def isnormalize(self) -> int:
pass
def dot(self, quat: Quaternion) -> float:
pass
def crossproduct(self, quat: Quaternion):
pass
def fromEuler(self, yaw: float, pitch: float, roll: float, mode: int):
"mode=1 is deg and mode=0 is rad"
pass
def toEuler(self) -> list:
"Z-Y-X"
pass
|
import glob, os, sys
import numpy as np
from random import*
import matplotlib.pyplot as plt
import matplotlib.cm as cmx
import matplotlib.colors as colors
def get_rand_color(val):
h,s,v = random()*6, 0.5, 243.2
colors = []
for i in range(val):
h += 3.75#3.708
tmp = ((v, v-v*s*abs(1-h%2), v-v*s)*3)[5**int(h)/3%3::int(h)%2+1][:3]
colors.append('#' + '%02x' *3%tmp)
if i%5/4:
s += 0.1
v -= 51.2
return colors
def main(argv):
logFileName = sys.argv[1]
isTask = sys.argv[2]
plotTitle = sys.argv[3]
print logFileName
# get csv file from current directory
if isTask == 'y':
resultsCSV = sorted(glob.glob(logFileName + '.*_taskTimes.csv'))
xLabel = 'Task Execution Times [msec]'
yLabel = 'Number of Tasks'
else:
resultsCSV = sorted(glob.glob(logFileName + '.*_stageTimes.csv'))
xLabel = 'Stage Execution Times [msec]'
yLabel = 'Number of Stages'
fig = plt.figure()
ax1 = fig.add_subplot(111)
temp = np.genfromtxt(resultsCSV[0], delimiter = ',')
for j in range(1, len(resultsCSV)): # TODO: check if range should start from 0 or 1
temp1 = np.genfromtxt(resultsCSV[j], delimiter = ',')
data = np.concatenate((temp1, temp))
temp = data
temp1 = []
if isTask == 'y':
taskOrStageTimes = data[:, 4] - data[:, 3]
else:
taskOrStageTimes = data[:, 3] - data[:, 2]
ax1.hist(taskOrStageTimes, bins=2000)
ax1.set_xlim([500, 2000])
ax1.set_xlabel(xLabel)
ax1.set_ylabel(yLabel)
ax1.set_title(logFileName)
plt.show()
if __name__ == "__main__":
main(sys.argv[1])
|
import traceback
import numpy as np
import cv2,random,os,sys
from time import sleep as tmSleep
import logging
import time
from moviepy.audio.io.AudioFileClip import AudioFileClip
import xlrd
from openpyxl import load_workbook
from datetime import datetime
from moviepy.video.io.VideoFileClip import VideoFileClip
from moviepy.video.VideoClip import ImageClip
from moviepy.video.compositing.CompositeVideoClip import CompositeVideoClip
from moviepy.video.compositing.concatenate import concatenate_videoclips # new addition
from moviepy.audio.AudioClip import CompositeAudioClip
from PyQt5 import QtWidgets,QtGui,QtCore
from PyQt5.QtWidgets import * #QApplication,QToolTip,QLabel, QMainWindow ,QWidget,QDesktopWidget,QMessageBox,QLineEdit,QGridLayout, QFileDialog ,QAction, qApp,QMenu,QFrame,QColorDialog
from PyQt5.QtGui import * #QIcon, QColor,QIntValidator
from PyQt5.QtCore import * #Qt,QRunnable,QObject, QThread,QThreadPool, pyqtSignal, pyqtSlot
import sys
from PIL import ImageColor,Image, ImageFont, ImageDraw
from pathlib import Path
import shutil
class Worker(QThread):
countChanged = pyqtSignal(int) # progress bar
start_activation= pyqtSignal(bool) # start Button Activation deActivation
stopwork=pyqtSignal(bool)# pause the process
setError=pyqtSignal(str) # Send Error to GUI
try:
log_file_path=""
temp_path=""
Images_files_path_collection=[]
new_audfile=[]
filepath=""
stop_process=False
start_process=False
Limit_aud=0;
Intro_arr=[]
Outro_arr=[]
intro_sel_path=""
outro_sel_path=""
except Exception as error:
msg=str(error)
#self.setError.emit(msg)
def __init__(self,Excel_filepath,Image_folder_path,Input_intro_path,Input_outro_path,Input_audio_path,Input_dest_path,Input_text_color,Input_back_color,Input_time_Perslide,Aud,No_of_Row):
QThread.__init__(self)
self.Excel_filepath = Excel_filepath #GLobal class Variavbles
self.Image_folder_path=Image_folder_path
self.Input_intro_path=Input_intro_path
self.Input_outro_path=Input_outro_path
self.Input_audio_path=Input_audio_path
self.Input_text_color=Input_text_color
self.Input_back_color=Input_back_color
self.Input_time_Perslide=Input_time_Perslide
self.Input_dest_path=Input_dest_path
self.Aud=Aud;
self.No_of_Row=No_of_Row
temp_path=self.temp_path
Images_files_path_collection=self.Images_files_path_collection
filepath=self.filepath
stop_process=self.stop_process
log_file_path=self.log_file_path
new_audfile=self.new_audfile
Limit_aud=self.Limit_aud
Intro_arr=self.Intro_arr
Outro_arr=self.Outro_arr
intro_sel_path=self.intro_sel_path
outro_sel_path=self.outro_sel_path
#@pyqtSlot()
def on_stopprocess(self,val):
try:
if(val==True):
self.stop_process=True
print("QThread terminated")
print('IN THREAD AREA WE R',self.stop_process)
self.Logger("process Stop Successfull")
except Exception as error:
msg=str(error)
var = traceback.format_exc()
print(var)
self.Logger(var)
self.setError.emit(msg)
def run(self):
try:
if self.stop_process==False:
print("process is going to stop")
start_btn=False
self.start_activation.emit(start_btn)
self.store_audio() # Store Aud Files
self.store_intro_outro_path() # store Intro outro Paths
print("Intro Arr",self.Intro_arr)
print("Outro Arr",self.Outro_arr)
TIME_LIMIT=100
self.Create_temp_directory();
self.resize_images()
print("OK")
Row_Of_Questions=[];
temp_arr=[]
wb = xlrd.open_workbook(self.Excel_filepath)
sheet = wb.sheet_by_index(0)
arr=[]
rows = sheet.nrows
columns = sheet.ncols
self.No_of_Row=columns
print("rows:",rows)
print("cols:",columns)
for k in range(sheet.nrows):
temp_arr.append(sheet.row_values(k))
row_count=0;
for val in temp_arr:
if val != None :
Row_Of_Questions.append(val)
row_count=row_count+1;
#print(Row_Of_Questions)
count=0;
count_add_audio=0;
video_count=0;
video_path=[];
count_progress=0;
print("No of Rows:",self.No_of_Row)
print(TIME_LIMIT)
p_dur=0
for x in Row_Of_Questions:
print(x)
if self.stop_process == False:
p_dur=TIME_LIMIT/row_count # for progressBar
print("No _of Rows",row_count)
print("count_progress:",count_progress)
print("p_dur:",p_dur)
count=0;
#questions=x.split(',');
img_paths=[]
count_for_image_path=0;
for q in x:
if q:
if self.stop_process == False:
#print("Questions at a Row :",q)
count=count+1;
count_for_image_path=count_for_image_path+1;
#print("NextRow")
path=self.create_image(q,count,count_for_image_path);
img_paths.append(path);
video_count=video_count+1;
if self.stop_process == False:
self.create_video(img_paths,video_count);
if self.stop_process == False:
count_progress =count_progress+p_dur
if(count_progress>0 and count_progress<=TIME_LIMIT):
self.countChanged.emit(count_progress)
#count_progress =count_progress+p_dur
#self.Add_Audio(video_path,video_count);
#QApplication.processEvents()
self.Remove_temp_files()
start_btn=True
self.start_activation.emit(start_btn)
self.Logger("Excel file Row fetch part Done Success")
except Exception as error:
msg=str(error)
var = traceback.format_exc()
#print(var)
self.Logger(var)
self.setError.emit(msg)
def Remove_temp_files(self):
try:
dir_path = self.temp_path
#print("deleted files:",dir_path)
try:
shutil.rmtree(dir_path)
self.Logger("Temporary files Deleted Permanetly Sucess")
except OSError as error:
#print("Error: %s : %s" % (dir_path, error.strerror))
msg=str(error)
self.setError.emit(msg)
except Exception as error:
var = traceback.format_exc()
self.Logger(var)
msg=str(error)
self.setError.emit(msg)
def Create_temp_directory(self):
try:
d=chr(92)
self.temp_path=self.Input_dest_path+d+"Temp"
Path(self.temp_path).mkdir(parents=True, exist_ok=True) # creating a new directory
# print(self.temp_path)
self.Logger("Temporaray directory created Successfully")
except Exception as error:
msg=str(error)
var = traceback.format_exc()
self.Logger(var)
self.setError.emit(msg)
def resize_images(self):
try:
os.chdir(self.Image_folder_path)
path=self.Image_folder_path #self.Image_path_first
for file in os.listdir('.'):
if file.endswith(".jpg") or file.endswith(".jpeg") or file.endswith("png"):
# opening image using PIL Image
im = Image.open(os.path.join(path, file))
# im.size includes the height and width of image
width, height = im.size
# print(width, height)
self.Images_files_path_collection.append(im.filename)
self.Logger("Find Images from Input Folder Sucessfully")
except Exception as error:
msg=str(error)
var = traceback.format_exc()
self.Logger(var)
self.setError.emit(msg)
def Check_Caps(self,test_text):
try:
word=test_text
count=0;
if word.islower():
count=1;
if word.isupper():
count=2
if not word.islower() and not word.isupper():
count=1
self.Logger("Check Capital Letters Sucessfully")
return count;
except Exception as e:
var = traceback.format_exc()
self.Logger(var)
def create_image(self,text,i,count_for_image_path):
try:
frame_no=i;
count_img=count_for_image_path;
test_text=text
path=random.choice(self.Images_files_path_collection)
image=cv2.imread(path);
image = cv2.resize(image,(1280,720))
overlay = image.copy()
image_new=""
#image = cv2.resize(image,(1280,720)) # resize the image
## font = cv2.FONT_HERSHEY_COMPLEX;
## org = (70, 80)
## org_center=(400,80)
## org_large_center=(200,80)
fontScale = 1
print("color:",self.Input_text_color)
color = self.Input_text_color #(255, 255, 255, .4)
thickness = 2 # Line thickness of 2 px
alpha = 0.15
color_transparent = self.Input_back_color#(255, 20, 147);
print("Text COlor __________",color)
print("Tranparent COlor __________",color_transparent)
# print("length_of_input_string:",len(test_text))
#print(image.shape)
labelSize=cv2.getTextSize(test_text,cv2.FONT_HERSHEY_COMPLEX,1,1)
width_of_rectangle=labelSize[0][0]+20;
# print("Text_size_width",labelSize[0][0])
#ori=cv2.rectangle(image, (50, 30), (1230,100), (255, 255, 255, .4), 10)
val=0;
val=self.Check_Caps(test_text)
rectangle_count=0;
chunks=self.split_text(test_text,val)
print("val:",val)
for i in chunks:
rectangle_count=rectangle_count+1;
# print("rectangle_count :",rectangle_count)
rectangle_measure=labelSize[0][0]/1140
# print("rectangle_measure_for text sizw:",rectangle_measure)
if(len(test_text)<30 and val==1):
ori=cv2.rectangle(overlay, (270, 30), (960,100),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (263, 23), (967,107), (255, 255, 255, .4), 10)
elif(len(test_text)>=30 and len(test_text)<50 and val==1):
ori=cv2.rectangle(overlay, (170, 30), (1080,100),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (163, 23), (1087,107), (255, 255, 255, .4), 10)
elif(len(test_text)<40 and val==2):
ori=cv2.rectangle(overlay, (170, 30), (1080,100),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (163, 23), (1087,107), (255, 255, 255, .4), 10)
elif(len(test_text)>=40 and len(test_text)<51 and val==2):
print("hit rec row 4 ")
ori=cv2.rectangle(overlay, (50, 30), (1240,100),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,107), (255, 255, 255, .4), 10)
elif (len(test_text)>=50 and len(test_text)<=74 and val==1):
ori=cv2.rectangle(overlay, (50, 30), (1240,100),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,107), (255, 255, 255, .4), 10)
elif (rectangle_count==1):
ori=cv2.rectangle(overlay, (50, 30), (1240,100),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,107), (255, 255, 255, .4), 10)
elif (rectangle_count==2):
ori=cv2.rectangle(overlay, (50, 30), (1240,150),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,157), (255, 255, 255, .4), 10)
elif (rectangle_count==3):
ori=cv2.rectangle(overlay, (50, 30), (1240,200),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,207), (255, 255, 255, .4), 10)
elif (rectangle_count==4):
ori=cv2.rectangle(overlay, (50, 30), (1240,250),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,257), (255, 255, 255, .4), 10)
elif (rectangle_count==5):
ori=cv2.rectangle(overlay, (50, 30), (1240,300),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,307), (255, 255, 255, .4), 10)
elif (rectangle_count==6):
ori=cv2.rectangle(overlay, (50, 30), (1240,350),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,357), (255, 255, 255, .4), 10)
elif (rectangle_count==7):
ori=cv2.rectangle(overlay, (50, 30), (1240,400),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,407), (255, 255, 255, .4), 10)
elif (rectangle_count==8):
ori=cv2.rectangle(overlay, (50, 30), (1240,450),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,457), (255, 255, 255, .4), 10)
elif (rectangle_count==9):
ori=cv2.rectangle(overlay, (50, 30), (1240,500),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,507), (255, 255, 255, .4), 10)
elif (rectangle_count==10):
ori=cv2.rectangle(overlay, (50, 30), (1240,550),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,557), (255, 255, 255, .4), 10)
elif (rectangle_count==11):
ori=cv2.rectangle(overlay, (50, 30), (1240,600),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,607), (255, 255, 255, .4), 10)
elif (rectangle_count==12):
ori=cv2.rectangle(overlay, (50, 30), (1240,650),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,657), (255, 255, 255, .4), 10)
elif (rectangle_count==13):
ori=cv2.rectangle(overlay, (50, 30), (1240,700),color_transparent , -1)
image_new = cv2.addWeighted(overlay, alpha, image, 1 - alpha, 0)
ori=cv2.rectangle(image_new, (43, 23), (1247,707), (255, 255, 255, .4), 10)
print("Length of string",len(test_text))
length_of_input_string=len(test_text)
## font dec;aration here :
img_pil = Image.fromarray(image_new)
draw = ImageDraw.Draw(img_pil)
# specified font size
fontpath = r"D:\projects_freelance\Video_creator\fINAL\Cabin-Bold.ttf"
font = ImageFont.truetype(fontpath, 35)
#font = ImageFont.truetype("Cabin-Bold.ttf",35)
#check Capital string here
if(length_of_input_string>74 and val==1):
print("value:",val)
chunks=self.split_text(test_text,val)
print(chunks);
count=50;
for i in chunks:
print("chunk:",i)
print("Length of Chunk is :",len(i))
print("count:",count)
# drawing text size
draw.text((60, count), i, font = font, align ="left" ,fill=color)
img = np.array(img_pil)
count=count+50;
print("count after :",count)
elif(length_of_input_string<30 and val==1):
print("hit this !!! ")
draw.text((400, 50), test_text, font = font, align ="left",fill=color)
img = np.array(img_pil)
elif(length_of_input_string>=30 and length_of_input_string<50 and val==1):
# drawing text size
print("hit this ")
draw.text((200, 50), test_text, font = font, align ="left",fill=color)
img = np.array(img_pil)
elif( length_of_input_string>=50 and length_of_input_string<=74 and val == 1 ):
# drawing text size
print("lesss than 75 hit _________________________________________________________________!!!!")
draw.text((60, 50), test_text, font = font, align ="left",fill=color)
img = np.array(img_pil)
elif(length_of_input_string>51 and val == 2):
chunks=self.split_text(test_text,val)
print(chunks);
print("hitr me")
count=50;
for i in chunks:
print("chunk:",i)
print("Length of Chunk is :",len(i))
print("count:",count)
# drawing text size
draw.text((60, count), i, font = font, align ="left",fill=color)
img = np.array(img_pil)
count=count+50;
print("count after :",count)
elif(length_of_input_string>=40 and length_of_input_string<51 and val == 2):
print("hit row 4")
draw.text((60, 50), test_text, font = font, align ="left",fill=color)
img = np.array(img_pil)
elif(length_of_input_string<40 and val==2):
draw.text((200, 50), test_text, font = font, align ="left",fill=color)
img = np.array(img_pil)
img_folder_path=self.temp_path
output_img=self.Input_dest_path;
unique_no=datetime.now().timestamp()
unique_No=str(unique_no)
d=chr(92)
out=img_folder_path+d+"img"+unique_No+'.jpg'
print("des_path:",out)
cv2.imwrite(out,img);
output_image_path=out
return output_image_path;
self.Logger("Images creadted Successfully!!!")
except Exception as error:
msg=str(error)
var = traceback.format_exc()
self.Logger(var)
self.setError.emit(msg)
def Logger(self,message,level=0):
#print("Im in Logger")
d=chr(92)
file_name=self.Input_dest_path+d+"logs.txt"
log_file_path=os.path.join(self.Input_dest_path,file_name)
if os.path.isfile(log_file_path):
try:
with open(file_name,'a') as f:
ast='**'
spaces=ast*level
log=spaces+message
f.write(str(datetime.now())+' '+str(log)+'\n')
except Exception as e:
var = traceback.format_exc()
self.Logger(var)
else:
pass
else:
try:
with open(log_file_path,'w') as f:
f.write('Logs for process started on'+str(datetime.now()))
except:
var = traceback.format_exc()
self.Logger(var)
# Create Image Functiuons is here
def path_generator(self,i):
try:
count=i
img_folder_path=self.temp_path
output_img=self.Input_dest_path;
unique_no=datetime.now().timestamp()
unique_No=str(unique_no)
d=chr(92)
count=str(count)
out=img_folder_path+d+"Row"+count+"video"+".mp4" #temporarry path
#print("video_des_path:",out)
out2=img_folder_path+d+"Rowvideo_aud"+unique_No+".mp4" #destination path
out3=output_img+d+"Row"+count+"video"+unique_No+".mp4" #destination path
self.Logger("path generator Runs Successfully ")
return out,out2,out3
except Exception as error:
var = traceback.format_exc()
self.Logger(var)
msg="Input Folder does not exist in yout system"
self.setError.emit(msg)
def create_question(self,img_path,i):
try:
self.intro_sel_path=random.choice(self.Intro_arr)
self.outro_sel_path=random.choice(self.Outro_arr)
print("intro",self.intro_sel_path)
print("outro",self.outro_sel_path)
count=i
result=self.path_generator(count)
out=result[0]
out2=result[1]
out3=result[2]
count_dest=0;
#set duration
duration=int(self.Input_time_Perslide)
# Add a fource
fourcc = cv2.VideoWriter_fourcc(*'mp4v')
width=1280
height=720
video = cv2.VideoWriter(out, fourcc, 1, (width, height))
#read Intro image first
if(self.intro_sel_path.endswith(".jpg") or self.intro_sel_path.endswith(".jpeg") or self.intro_sel_path.endswith(".jfif") or self.intro_sel_path.endswith(".png")):
image_intro=cv2.imread(self.intro_sel_path)
image_intro = cv2.resize(image_intro,(1280,720))
count_dest=count_dest+1;
for i in range(duration):
video.write(image_intro)
#read Questions in video
for i in img_path:
for x in range (duration):
video.write(cv2.imread(i))
#read outro image
# print("outro path :",self.Input_outro_path)
if(self.outro_sel_path.endswith(".jpg") or self.outro_sel_path.endswith(".jpeg") or self.outro_sel_path.endswith(".jfif") or self.outro_sel_path.endswith(".png")):
image_outro=cv2.imread(self.outro_sel_path)
image_outro= cv2.resize(image_outro,(1280,720))
count_dest=count_dest+1;
for i in range(duration):
video.write(image_outro)
cv2.destroyAllWindows()
video.release()
time.sleep(2)
self.Logger("Question Videos Sucess!!!!!!!")
self.Add_Audio(out,out2,out3,count_dest)
self.concat_video(out,out2,out3)
except Exception as error:
var = traceback.format_exc()
self.Logger(var)
msg=str(error)
self.setError.emit(msg)
def concat_video(self,out,out2,out3):
try:
if(self.intro_sel_path.endswith(".mp4") ) and(self.outro_sel_path.endswith(".mp4") ):
clip1 = VideoFileClip(self.intro_sel_path,audio=True)
clip3=VideoFileClip(self.outro_sel_path,audio=True)
clip2=VideoFileClip(out2,audio=True)
final = concatenate_videoclips([clip1,clip2,clip3],method="compose")
final.write_videofile(out3)
clip1.close()
clip2.close()
clip3.close()
if(self.intro_sel_path.endswith(".jpg") or self.intro_sel_path.endswith(".jpeg") or self.intro_sel_path.endswith(".jfif") or self.intro_sel_path.endswith(".png")) and(self.outro_sel_path.endswith(".mp4")):
clip3 = VideoFileClip(self.outro_sel_path,audio=True)
clip2=VideoFileClip(out2,audio=True)
final = concatenate_videoclips([clip2,clip3],method="compose")
final.write_videofile(out3)
clip3.reader.close()
#clip3.audio.reader.close_proc()
clip2.reader.close()
clip2.audio.reader.close_proc()
if(self.intro_sel_path.endswith(".mp4") ) and(self.outro_sel_path.endswith(".jpg") or self.outro_sel_path.endswith(".png") or self.outro_sel_path.endswith(".jpeg") or self.outro_sel_path.endswith(".jfif")):
clip1 = VideoFileClip(self.intro_sel_path,audio=True)
clip2=VideoFileClip(out2,audio=True)
final = concatenate_videoclips([clip1,clip2],method="compose")
print("out2 path:",out2)
final.write_videofile(out3)
clip1.close()
clip2.close()
self.Logger("Conactenation of Vidoes Success!!")
except Exception as error:
var = traceback.format_exc()
self.Logger(var)
msg=str(error)
self.setError.emit(msg)
def create_video(self,img_path,i):
try:
Image_path=img_path
count=i
self.create_question(img_path,i)
self.Logger("Create video Function Run Successfully !!!!!!!!")
except Exception as error:
var = traceback.format_exc()
self.Logger(var)
msg=str(error)
self.setError.emit(msg)
def Trim_Audio(self,dur):
try:
output_img=self.Input_dest_path;
unique_no=datetime.now().timestamp()
unique_No=str(unique_no)
d=chr(92)
temp_audio=output_img+d+"Temp"+d+"audrow"+unique_No+".mp3" #temporarry path
# print("Audio_path:",temp_audio)
file=self.validate_audio() #self.Input_audio_path
if file:
snd=AudioFileClip(file)
nsnd=snd.subclip(0,dur) #'00:00','00:10')
nsnd.write_audiofile(temp_audio)
snd.close()
self.Logger("Trim Audio Success!!!")
return temp_audio
else:
self.Logger("Failed to get a Audio path from Validate Audio Func")
except Exception as error:
var = traceback.format_exc()
self.Logger(var)
msg=str(error)
self.setError.emit(msg)
def Add_Audio(self,path1,path2,path3,count_dest):
try:
if(count_dest==2):
path2=path3
# print("video_path:",path1)
vidname= path1 #"C:\\Users\\Lenovo\\Desktop\\Edited_images\\video1.mp4"
#audname=self.Input_audio_path;
my_clip = VideoFileClip(vidname)
dur=my_clip.duration
s=int(dur)
trim_aud_path=self.Trim_Audio(s)
if trim_aud_path:
audio_background = AudioFileClip(trim_aud_path)
#print("Duration of Video:",my_clip.duration)
#print("Duration of Audio:",audio_background.duration)
final_clip = my_clip.set_audio(audio_background)
#fps=int(self.Aud)
final_clip.write_videofile(path2,fps=s-1)
audio_background.close()
my_clip.close()
final_clip.close()
self.Logger("Add Audio Success")
except Exception as error:
var = traceback.format_exc()
self.Logger(var)
msg="Error occired In Audio try different Audio File "
self.setError.emit(msg)
def store_audio(self):
try:
wb = xlrd.open_workbook(self.Excel_filepath)
sheet = wb.sheet_by_index(0)
arr=[]
rows = sheet.nrows
columns = sheet.ncols
self.No_of_Row=columns
os.chdir(self.Input_audio_path)
path=self.Input_audio_path #self.Input_audio_path_first
limit=0;
time=int(self.Input_time_Perslide)
print("time:",time)
col=columns
limit=(time*col)+(time*2)
print("LIMIT:",limit)
count_audio=0;
if rows ==0:
print(self.No_of_Row)
self.setError.emit("Your Excel File is Empty Enter a Valid Excel File !!!!!! ")
else:
audfile=[]
for root, dirs, files in os.walk(path):
for file in files:
p=os.path.join(root,file)
if p.endswith(".mp3"):
audfile.append(os.path.abspath(p))
size=0;
for i in audfile:
audio_background = AudioFileClip(i)
print("Duration of Audio:",audio_background.duration)
aud_duration=audio_background.duration
print("aud_duration:",aud_duration)
size=int(aud_duration)
audio_background.close()
print("limit:",limit)
print("Size:",size)
if(limit<size):
self.new_audfile.append(i)
count_audio=count_audio+1;
if(count_audio>0):
print("AudFIles:",self.new_audfile)
print("--------------------------------------------------------------------------------------")
self.Logger("Store Valid Audio Files Successfully!!!!")
else:
print("else Hit ___________________________________________________________________________________________ else ")
self.Logger("Error Occured No Audio Files Meet Condition They are smaller ")
self.setError.emit("Error Occured Enter a Audio files which has size greater than "+str(limit)+" seconds")
self.on_stopprocess(True)
except Exception as error:
msg=str(error)
var = traceback.format_exc()
self.Logger(var)
self.setError.emit(msg)
def store_intro_outro_path(self):
try:
print("func intro outro!!!!!")
os.chdir(self.Input_intro_path)
path1=self.Input_intro_path #self.Input_audio_path_first
arr=[]
for root, dirs, files in os.walk(path1):
for file in files:
p=os.path.join(root,file)
if p.endswith(".jpg") or p.endswith(".jpeg") or p.endswith(".jfif") or p.endswith(".png") or p.endswith(".mp4"):
self.Intro_arr.append(os.path.abspath(p))
#chdir to path2
os.chdir(self.Input_outro_path)
path2=self.Input_outro_path
for root, dirs, files in os.walk(path2):
for file in files:
p=os.path.join(root,file)
if p.endswith(".jpg") or p.endswith(".jpeg") or p.endswith(".jfif") or p.endswith(".png") or p.endswith(".mp4"):
self.Outro_arr.append(os.path.abspath(p))
#print("Input Arr:",self.Intro_arr)
#print("Outro Arr ",self.Outro_arr)
self.Logger("Intro Outro Files are fetch From Input Folder are Done Sucesssfully!!!!!")
except Exception as error:
msg=str(error)
var = traceback.format_exc()
self.Logger(var)
self.setError.emit(msg)
def validate_audio(self):
try:
size=0;
audpath=random.choice(self.new_audfile)
limit=self.Limit_aud
audio_background = AudioFileClip(audpath)
print("Duration of Audio:",audio_background.duration)
aud_duration=audio_background.duration
print("aud_duration:",aud_duration)
size=int(aud_duration)
audio_background.close()
if limit>size:
while limit>size:
audpath=random.choice(self.new_audfile)
audio_background = AudioFileClip(audpath)
print("Duration of Audio:",audio_background.duration)
aud_duration=audio_background.duration
print("aud_duration:",aud_duration)
size=int(aud_duration)
audio_background.close()
print("audpath:",audpath)
print("final path:",audpath)
if size>limit:
self.Logger("Audio Selection from Given Directory Sucessfully")
return audpath;
else:
self.Logger("Error Occured No Audio meets Condition Size of Audio is too much less")
self.setError.emit("Error Occured Enter a Audio files which has size greater than "+str(limit)+" seconds")
self.stop_process=True
except Exception as error:
msg=str(error)
var = traceback.format_exc()
self.Logger(var)
self.setError.emit(msg)
def split_text(self,t,val):
try:
if val==1:
no=74
if val==2:
no=53
t=t;
Array_of_sentence=[]
S1=""
S2=""
S3=""
S4=""
S5=""
S6=""
S7=""
S8=""
S9=""
S10=""
S11=""
S12=""
L=0;
count=0;
question=t.split()
for q in question:
L=len(q)+L+1
if(L>no):
L=0;
L=len(q)+1;
count=count+1;
if(count==0 and L<=no):
S1=S1+q+" ";
if(count==1 and L<=no):
S2=S2+q+" ";
if(count==2 and L<=no):
S3=S3+q+" ";
if(count==3 and L<=no):
S4=S4+q+" ";
if(count==4 and L<=no):
S5=S5+q+" ";
if(count==5 and L<=no):
S6=S6+q+" ";
if(count==6 and L<=no):
S7=S7+q+" ";
if(count==7 and L<=no):
S8=S8+q+" ";
if(count==8 and L<=no):
S9=S9+q+" ";
if(count==9 and L<=no):
S10=S10+q+" ";
if(count==10 and L<=no):
S11=S11+q+" ";
if(count==11 and L<=no):
S12=S12+q+" ";
if(len(S1)>0):
Array_of_sentence.append(S1)
if(len(S2)>0):
Array_of_sentence.append(S2)
if(len(S3)>0):
Array_of_sentence.append(S3)
if(len(S4)>0):
Array_of_sentence.append(S4)
if(len(S5)>0):
Array_of_sentence.append(S5)
if(len(S6)>0):
Array_of_sentence.append(S6)
if(len(S7)>0):
Array_of_sentence.append(S7)
if(len(S8)>0):
Array_of_sentence.append(S8)
if(len(S9)>0):
Array_of_sentence.append(S9)
if(len(S10)>0):
Array_of_sentence.append(S10)
if(len(S11)>0):
Array_of_sentence.append(S11)
if(len(S12)>0):
Array_of_sentence.append(S12)
self.Logger("Split the text Function Success!!!")
return Array_of_sentence;
except Exception as error:
var = traceback.format_exc()
self.Logger(var)
msg=str(error)
self.setError.emit(msg)
class MyWindow(QMainWindow):
sig = pyqtSignal(bool)
Excel_filepath= "" #GLobal class Variavbles
Image_folder_path=""
Input_intro_path=""
Input_outro_path=""
Input_audio_path=""
Input_text_color=""
Input_back_color=""
Input_time_Perslide=0
Input_dest_path=""
temp_path=""
filepath=""
No_of_Row=0
Error_audio=False
Aud=0;
cancel_msg=False
log_file_path=""
count_error=False
Error_Audio_Folder=False
def __init__(self):
super(MyWindow,self).__init__()
Excel_filepath=self.Excel_filepath #GLobal class Variavbles
Image_folder_path=self.Image_folder_path
Input_intro_path=self.Input_intro_path
Input_outro_path=self.Input_outro_path
Input_audio_path=self.Input_audio_path
Input_text_color=self.Input_text_color
Input_back_color=self.Input_back_color
Input_time_Perslide=self.Input_time_Perslide
Input_dest_path=self.Input_dest_path
temp_path=self.temp_path
No_of_Row=self.No_of_Row
Error_audio=self.Error_audio
Aud=self.Aud
cancel_msg=self.cancel_msg
count_error=self.count_error
Error_Audio_Folder=self.Error_Audio_Folder
self.initUI()
self.center()
self.setWindowIcon(QIcon('icons8-commodore-amiga-480.png'))
filepath=self.filepath
self.Input_text_color=(0,0,0)
self.Input_back_color=(0,0,0)
#self.Error_Audio_Folder=False
def button_clicked(self):
self.label.setText("you pressed the button")
self.update()
def initUI(self):
sigstop = pyqtSignal(int)
self.setWindowFlags(Qt.WindowCloseButtonHint | Qt.WindowMinimizeButtonHint | Qt.CustomizeWindowHint)
self.threadpool = QThreadPool() # delette it
self.setGeometry(200, 200, 800, 580)
#self.statusBar().showMessage('Ready')
self.setWindowTitle('Video Creator')
#color
col = QColor(0,0,0)
#print(self.filepath)
#self.label = QtWidgets.QLabel(self)
#self.label.setText("we r we will")
#self.label.move(300,50)
#line Edits for Every Button
self.le = QLineEdit(self)
self.le.move(200, 50)
self.le.setFixedWidth(520)
self.sel_folder=QLineEdit(self)
self.sel_folder.move(200,100)
self.sel_folder.setFixedWidth(520)
self.sel_intro=QLineEdit(self)
self.sel_intro.move(200,150)
self.sel_intro.setFixedWidth(520)
self.sel_outro=QLineEdit(self)
self.sel_outro.move(200,200)
self.sel_outro.setFixedWidth(520)
self.sel_audio=QLineEdit(self)
self.sel_audio.move(200,350)
self.sel_audio.setFixedWidth(520)
self.sel_dest=QLineEdit(self)
self.sel_dest.move(200,300)
self.sel_dest.setFixedWidth(520)
self.sel_time=QLineEdit(self) # timw for one question
self.sel_time.move(670,250)
self.sel_time.setFixedWidth(50)
#self.sel_time.setValidator(QIntValidator(1, 20))
regex=QtCore.QRegExp("/^[1-9]$|^[1-9]$|^1[0-9]$|^20$/")
validator=QtGui.QRegExpValidator(regex, self.sel_time)
self.sel_time.setValidator(validator)
#self.sel_time.returnPressed.connect(self.Validate_audio)
try:
self.Input_time_Perslide=self.sel_time.text()
except Exception as e:
self.sel_msg.setText("Error Occured Enter Correct Time")
#self.Input_time_Perslide=self.sel_time.text()
self.sel_msg=QLineEdit(self)
self.sel_msg.move(200,490)
self.sel_msg.setFixedWidth(520)
self.btn_msg = QtWidgets.QPushButton(self)
self.btn_msg.setEnabled(False)
self.btn_msg.setText("Messages")
self.btn_msg.clicked.connect(self.openExcelFileNameDialog)
self.btn_msg.move(50,490)
self.btn_msg.setStyleSheet('QPushButton{ background-color: #FF0000; border-radius: 6px;color:white; min-width: 100px;}')
self.setStyleSheet("""
QProgressBar:horizontal {
border: 1px solid #3A3939;
text-align: center;
padding: 1px;
background: #201F1F;
}
QProgressBar::chunk:horizontal {
background-color: qlineargradient(spread:reflect, x1:1, y1:0.545, x2:1, y2:0, stop:0 rgba(28, 66, 111, 255), stop:1 rgba(37, 87, 146, 255));
}
QToolTip
{
border: 1px solid #3A3939;
background-color: rgb(90, 102, 117);;
color: white;
padding: 1px;
opacity: 200;
}
QWidget
{
color: silver;
background-color: #302F2F;
selection-background-color:#3d8ec9;
selection-color: black;
background-clip: border;
border-image: none;
outline: 0;
}
QWidget:item:hover
{
background-color: #78879b;
color: black;
}
QWidget:item:selected
{
background-color: #3d8ec9;
}
QCheckBox
{
spacing: 5px;
outline: none;
color: #bbb;
margin-bottom: 2px;
}
QCheckBox:disabled
{
color: #777777;
}
QCheckBox::indicator,
QGroupBox::indicator
{
width: 18px;
height: 18px;
}
QGroupBox::indicator
{
margin-left: 2px;
}
QCheckBox::indicator:unchecked,
QCheckBox::indicator:unchecked:hover,
QGroupBox::indicator:unchecked,
QGroupBox::indicator:unchecked:hover
{
image: url(:/dark_blue/img/checkbox_unchecked.png);
}
QCheckBox::indicator:unchecked:focus,
QCheckBox::indicator:unchecked:pressed,
QGroupBox::indicator:unchecked:focus,
QGroupBox::indicator:unchecked:pressed
{
border: none;
image: url(:/dark_blue/img/checkbox_unchecked_focus.png);
}
QCheckBox::indicator:checked,
QCheckBox::indicator:checked:hover,
QGroupBox::indicator:checked,
QGroupBox::indicator:checked:hover
{
image: url(:/dark_blue/img/checkbox_checked.png);
}
QCheckBox::indicator:checked:focus,
QCheckBox::indicator:checked:pressed,
QGroupBox::indicator:checked:focus,
QGroupBox::indicator:checked:pressed
{
border: none;
image: url(:/dark_blue/img/checkbox_checked_focus.png);
}
QCheckBox::indicator:indeterminate,
QCheckBox::indicator:indeterminate:hover,
QCheckBox::indicator:indeterminate:pressed
QGroupBox::indicator:indeterminate,
QGroupBox::indicator:indeterminate:hover,
QGroupBox::indicator:indeterminate:pressed
{
image: url(:/dark_blue/img/checkbox_indeterminate.png);
}
QCheckBox::indicator:indeterminate:focus,
QGroupBox::indicator:indeterminate:focus
{
image: url(:/dark_blue/img/checkbox_indeterminate_focus.png);
}
QCheckBox::indicator:checked:disabled,
QGroupBox::indicator:checked:disabled
{
image: url(:/dark_blue/img/checkbox_checked_disabled.png);
}
QCheckBox::indicator:unchecked:disabled,
QGroupBox::indicator:unchecked:disabled
{
image: url(:/dark_blue/img/checkbox_unchecked_disabled.png);
}
QRadioButton
{
spacing: 5px;
outline: none;
color: #bbb;
margin-bottom: 2px;
}
QRadioButton:disabled
{
color: #777777;
}
QRadioButton::indicator
{
width: 21px;
height: 21px;
}
QRadioButton::indicator:unchecked,
QRadioButton::indicator:unchecked:hover
{
image: url(:/dark_blue/img/radio_unchecked.png);
}
QRadioButton::indicator:unchecked:focus,
QRadioButton::indicator:unchecked:pressed
{
border: none;
outline: none;
image: url(:/dark_blue/img/radio_unchecked_focus.png);
}
QRadioButton::indicator:checked,
QRadioButton::indicator:checked:hover
{
border: none;
outline: none;
image: url(:/dark_blue/img/radio_checked.png);
}
QRadioButton::indicator:checked:focus,
QRadioButton::indicato::menu-arrowr:checked:pressed
{
border: none;
outline: none;
image: url(:/dark_blue/img/radio_checked_focus.png);
}
QRadioButton::indicator:indeterminate,
QRadioButton::indicator:indeterminate:hover,
QRadioButton::indicator:indeterminate:pressed
{
image: url(:/dark_blue/img/radio_indeterminate.png);
}
QRadioButton::indicator:checked:disabled
{
outline: none;
image: url(:/dark_blue/img/radio_checked_disabled.png);
}
QRadioButton::indicator:unchecked:disabled
{
image: url(:/dark_blue/img/radio_unchecked_disabled.png);
}
QMenuBar
{
background-color: #302F2F;
color: silver;
}
QMenuBar::item
{
background: transparent;
}
QMenuBar::item:selected
{
background: transparent;
border: 1px solid #3A3939;
}
QMenuBar::item:pressed
{
border: 1px solid #3A3939;
background-color: #3d8ec9;
color: black;
margin-bottom:-1px;
padding-bottom:1px;
}
QMenu
{
border: 1px solid #3A3939;
color: silver;
margin: 1px;
}
QMenu::icon
{
margin: 1px;
}
QMenu::item
{
padding: 2px 2px 2px 25px;
margin-left: 5px;
border: 1px solid transparent; /* reserve space for selection border */
}
QMenu::item:selected
{
color: black;
}
QMenu::separator {
height: 2px;
background: lightblue;
margin-left: 10px;
margin-right: 5px;
}
QMenu::indicator {
width: 16px;
height: 16px;
}
/* non-exclusive indicator = check box style indicator
(see QActionGroup::setExclusive) */
QMenu::indicator:non-exclusive:unchecked {
image: url(:/dark_blue/img/checkbox_unchecked.png);
}
QMenu::indicator:non-exclusive:unchecked:selected {
image: url(:/dark_blue/img/checkbox_unchecked_disabled.png);
}
QMenu::indicator:non-exclusive:checked {
image: url(:/dark_blue/img/checkbox_checked.png);
}
QMenu::indicator:non-exclusive:checked:selected {
image: url(:/dark_blue/img/checkbox_checked_disabled.png);
}
/* exclusive indicator = radio button style indicator (see QActionGroup::setExclusive) */
QMenu::indicator:exclusive:unchecked {
image: url(:/dark_blue/img/radio_unchecked.png);
}
QMenu::indicator:exclusive:unchecked:selected {
image: url(:/dark_blue/img/radio_unchecked_disabled.png);
}
QMenu::indicator:exclusive:checked {
image: url(:/dark_blue/img/radio_checked.png);
}
QMenu::indicator:exclusive:checked:selected {
image: url(:/dark_blue/img/radio_checked_disabled.png);
}
QMenu::right-arrow {
margin: 5px;
image: url(:/dark_blue/img/right_arrow.png)
}
QWidget:disabled
{
color: #808080;
background-color: #302F2F;
}
QAbstractItemView
{
alternate-background-color: #3A3939;
color: silver;
border: 1px solid 3A3939;
border-radius: 2px;
padding: 1px;
}
QWidget:focus, QMenuBar:focus
{
border: 1px solid #78879b;
}
QTabWidget:focus, QCheckBox:focus, QRadioButton:focus, QSlider:focus
{
border: none;
}
QLineEdit
{
background-color: #201F1F;
padding: 2px;
border-style: solid;
border: 1px solid #3A3939;
border-radius: 10px;
}
QGroupBox {
border:1px solid #3A3939;
border-radius: 2px;
margin-top: 20px;
background-color: #302F2F;
color: silver;
}
QGroupBox::title {
subcontrol-origin: margin;
subcontrol-position: top center;
padding-left: 10px;
padding-right: 10px;
padding-top: 10px;
}
QAbstractScrollArea
{
border-radius: 2px;
border: 1px solid #3A3939;
background-color: transparent;
}
QScrollBar:horizontal
{
height: 15px;
margin: 3px 15px 3px 15px;
border: 1px transparent #2A2929;
border-radius: 4px;
background-color: #2A2929;
}
QScrollBar::handle:horizontal
{
background-color: #605F5F;
min-width: 5px;
border-radius: 4px;
}
QScrollBar::add-line:horizontal
{
margin: 0px 3px 0px 3px;
border-image: url(:/dark_blue/img/right_arrow_disabled.png);
width: 10px;
height: 10px;
subcontrol-position: right;
subcontrol-origin: margin;
}
QScrollBar::sub-line:horizontal
{
margin: 0px 3px 0px 3px;
border-image: url(:/dark_blue/img/left_arrow_disabled.png);
height: 10px;
width: 10px;
subcontrol-position: left;
subcontrol-origin: margin;
}
QScrollBar::add-line:horizontal:hover,QScrollBar::add-line:horizontal:on
{
border-image: url(:/dark_blue/img/right_arrow.png);
height: 10px;
width: 10px;
subcontrol-position: right;
subcontrol-origin: margin;
}
QScrollBar::sub-line:horizontal:hover, QScrollBar::sub-line:horizontal:on
{
border-image: url(:/dark_blue/img/left_arrow.png);
height: 10px;
width: 10px;
subcontrol-position: left;
subcontrol-origin: margin;
}
QScrollBar::up-arrow:horizontal, QScrollBar::down-arrow:horizontal
{
background: none;
}
QScrollBar::add-page:horizontal, QScrollBar::sub-page:horizontal
{
background: none;
}
QScrollBar:vertical
{
background-color: #2A2929;
width: 15px;
margin: 15px 3px 15px 3px;
border: 1px transparent #2A2929;
border-radius: 4px;
}
QScrollBar::handle:vertical
{
background-color: #605F5F;
min-height: 5px;
border-radius: 4px;
}
QScrollBar::sub-line:vertical
{
margin: 3px 0px 3px 0px;
border-image: url(:/dark_blue/img/up_arrow_disabled.png);
height: 10px;
width: 10px;
subcontrol-position: top;
subcontrol-origin: margin;
}
QScrollBar::add-line:vertical
{
margin: 3px 0px 3px 0px;
border-image: url(:/dark_blue/img/down_arrow_disabled.png);
height: 10px;
width: 10px;
subcontrol-position: bottom;
subcontrol-origin: margin;
}
QScrollBar::sub-line:vertical:hover,QScrollBar::sub-line:vertical:on
{
border-image: url(:/dark_blue/img/up_arrow.png);
height: 10px;
width: 10px;
subcontrol-position: top;
subcontrol-origin: margin;
}
QScrollBar::add-line:vertical:hover, QScrollBar::add-line:vertical:on
{
border-image: url(:/dark_blue/img/down_arrow.png);
height: 10px;
width: 10px;
subcontrol-position: bottom;
subcontrol-origin: margin;
}
QScrollBar::up-arrow:vertical, QScrollBar::down-arrow:vertical
{
background: none;
}
QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical
{
background: none;
}
QTextEdit
{
background-color: #201F1F;
color: silver;
border: 1px solid #3A3939;
}
QPlainTextEdit
{
background-color: #201F1F;;
color: silver;
border-radius: 2px;
border: 1px solid #3A3939;
}
QHeaderView::section
{
background-color: #3A3939;
color: silver;
padding-left: 4px;
border: 1px solid #6c6c6c;
}
QSizeGrip {
image: url(:/dark_blue/img/sizegrip.png);
width: 12px;
height: 12px;
}
QMainWindow
{
background-color: #302F2F;
}
QMainWindow::separator
{
background-color: #302F2F;
color: white;
padding-left: 4px;
spacing: 2px;
border: 1px dashed #3A3939;
}
QMainWindow::separator:hover
{
background-color: #787876;
color: white;
padding-left: 4px;
border: 1px solid #3A3939;
spacing: 2px;
}
QMenu::separator
{
height: 1px;
background-color: #3A3939;
color: white;
padding-left: 4px;
margin-left: 10px;
margin-right: 5px;
}
QFrame
{
border-radius: 8px;
border: 1px solid #444;
padding:2px;
}
QFrame[frameShape="0"]
{
border-radius: 8px;
border: 1px transparent #444;
}
QStackedWidget
{
background-color: #302F2F;
border: 1px transparent black;
}
QToolBar {
border: 1px transparent #393838;
background: 1px solid #302F2F;
font-weight: bold;
}
QToolBar::handle:horizontal {
image: url(:/dark_blue/img/Hmovetoolbar.png);
}
QToolBar::handle:vertical {
image: url(:/dark_blue/img/Vmovetoolbar.png);
}
QToolBar::separator:horizontal {
image: url(:/dark_blue/img/Hsepartoolbar.png);
}
QToolBar::separator:vertical {
image: url(:/dark_blue/img/Vsepartoolbars.png);
}
QPushButton
{
color: silver;
background-color: #302F2F;
border-width: 2px;
border-color: #4A4949;
border-style: solid;
padding-top: 2px;
padding-bottom: 2px;
padding-left: 10px;
padding-right: 10px;
border-radius: 4px;
/* outline: none; */
/* min-width: 40px; */
}
QPushButton:disabled
{
background-color: #302F2F;
border-width: 2px;
border-color: #3A3939;
border-style: solid;
padding-top: 2px;
padding-bottom: 2px;
padding-left: 10px;
padding-right: 10px;
/*border-radius: 2px;*/
color: #808080;
}
QPushButton:focus {
background-color: #3d8ec9;
color: white;
}
QComboBox
{
selection-background-color: #3d8ec9;
background-color: #201F1F;
border-style: solid;
border: 1px solid #3A3939;
border-radius: 2px;
padding: 2px;
min-width: 75px;
}
QPushButton:checked{
background-color: #4A4949;
border-color: #6A6969;
}
QPushButton:hover {
border: 2px solid #78879b;
color: silver;
}
QComboBox:hover, QAbstractSpinBox:hover,QLineEdit:hover,QTextEdit:hover,QPlainTextEdit:hover,QAbstractView:hover,QTreeView:hover
{
border: 1px solid #78879b;
color: silver;
}
QComboBox:on
{
background-color: #626873;
padding-top: 3px;
padding-left: 4px;
selection-background-color: #4a4a4a;
}
QComboBox QAbstractItemView
{
background-color: #201F1F;
border-radius: 2px;
border: 1px solid #444;
selection-background-color: #3d8ec9;
color: silver;
}
QComboBox::drop-down
{
subcontrol-origin: padding;
subcontrol-position: top right;
width: 15px;
border-left-width: 0px;
border-left-color: darkgray;
border-left-style: solid;
border-top-right-radius: 3px;
border-bottom-right-radius: 3px;
}
QComboBox::down-arrow
{
image: url(:/dark_blue/img/down_arrow_disabled.png);
}
QComboBox::down-arrow:on, QComboBox::down-arrow:hover,
QComboBox::down-arrow:focus
{
image: url(:/dark_blue/img/down_arrow.png);
}
QPushButton:pressed
{
background-color: #484846;
}
QAbstractSpinBox {
padding-top: 2px;
padding-bottom: 2px;
border: 1px solid #3A3939;
background-color: #201F1F;
color: silver;
border-radius: 2px;
min-width: 75px;
}
QAbstractSpinBox:up-button
{
background-color: transparent;
subcontrol-origin: border;
subcontrol-position: top right;
}
QAbstractSpinBox:down-button
{
background-color: transparent;
subcontrol-origin: border;
subcontrol-position: bottom right;
}
QAbstractSpinBox::up-arrow,QAbstractSpinBox::up-arrow:disabled,QAbstractSpinBox::up-arrow:off {
image: url(:/dark_blue/img/up_arrow_disabled.png);
width: 10px;
height: 10px;
}
QAbstractSpinBox::up-arrow:hover
{
image: url(:/dark_blue/img/up_arrow.png);
}
QAbstractSpinBox::down-arrow,QAbstractSpinBox::down-arrow:disabled,QAbstractSpinBox::down-arrow:off
{
image: url(:/dark_blue/img/down_arrow_disabled.png);
width: 10px;
height: 10px;
}
QAbstractSpinBox::down-arrow:hover
{
image: url(:/dark_blue/img/down_arrow.png);
}
QLabel
{
border: 0px solid black;
}
QTabWidget{
border: 1px transparent black;
}
QTabWidget::pane {
border: 1px solid #444;
border-radius: 3px;
padding: 3px;
}
QTabBar
{
qproperty-drawBase: 0;
left: 5px; /* move to the right by 5px */
}
QTabBar:focus
{
border: 0px transparent black;
}
QTabBar::close-button {
image: url(:/dark_blue/img/close.png);
background: transparent;
}
QTabBar::close-button:hover
{
image: url(:/dark_blue/img/close-hover.png);
background: transparent;
}
QTabBar::close-button:pressed {
image: url(:/dark_blue/img/close-pressed.png);
background: transparent;
}
/* TOP TABS */
QTabBar::tab:top {
color: #b1b1b1;
border: 1px solid #4A4949;
border-bottom: 1px transparent black;
background-color: #302F2F;
padding: 5px;
border-top-left-radius: 2px;
border-top-right-radius: 2px;
}
QTabBar::tab:top:!selected
{
color: #b1b1b1;
background-color: #201F1F;
border: 1px transparent #4A4949;
border-bottom: 1px transparent #4A4949;
border-top-left-radius: 0px;
border-top-right-radius: 0px;
}
QTabBar::tab:top:!selected:hover {
background-color: #48576b;
}
/* BOTTOM TABS */
QTabBar::tab:bottom {
color: #b1b1b1;
border: 1px solid #4A4949;
border-top: 1px transparent black;
background-color: #302F2F;
padding: 5px;
border-bottom-left-radius: 2px;
border-bottom-right-radius: 2px;
}
QTabBar::tab:bottom:!selected
{
color: #b1b1b1;
background-color: #201F1F;
border: 1px transparent #4A4949;
border-top: 1px transparent #4A4949;
border-bottom-left-radius: 0px;
border-bottom-right-radius: 0px;
}
QTabBar::tab:bottom:!selected:hover {
background-color: #78879b;
}
/* LEFT TABS */
QTabBar::tab:left {
color: #b1b1b1;
border: 1px solid #4A4949;
border-left: 1px transparent black;
background-color: #302F2F;
padding: 5px;
border-top-right-radius: 2px;
border-bottom-right-radius: 2px;
}
QTabBar::tab:left:!selected
{
color: #b1b1b1;
background-color: #201F1F;
border: 1px transparent #4A4949;
border-right: 1px transparent #4A4949;
border-top-right-radius: 0px;
border-bottom-right-radius: 0px;
}
QTabBar::tab:left:!selected:hover {
background-color: #48576b;
}
/* RIGHT TABS */
QTabBar::tab:right {
color: #b1b1b1;
border: 1px solid #4A4949;
border-right: 1px transparent black;
background-color: #302F2F;
padding: 5px;
border-top-left-radius: 2px;
border-bottom-left-radius: 2px;
}
QTabBar::tab:right:!selected
{
color: #b1b1b1;
background-color: #201F1F;
border: 1px transparent #4A4949;
border-right: 1px transparent #4A4949;
border-top-left-radius: 0px;
border-bottom-left-radius: 0px;
}
QTabBar::tab:right:!selected:hover {
background-color: #48576b;
}
QTabBar QToolButton::right-arrow:enabled {
image: url(:/dark_blue/img/right_arrow.png);
}
QTabBar QToolButton::left-arrow:enabled {
image: url(:/dark_blue/img/left_arrow.png);
}
QTabBar QToolButton::right-arrow:disabled {
image: url(:/dark_blue/img/right_arrow_disabled.png);
}
QTabBar QToolButton::left-arrow:disabled {
image: url(:/dark_blue/img/left_arrow_disabled.png);
}
QDockWidget {
border: 1px solid #403F3F;
titlebar-close-icon: url(:/dark_blue/img/close.png);
titlebar-normal-icon: url(:/dark_blue/img/undock.png);
}
QDockWidget::close-button, QDockWidget::float-button {
border: 1px solid transparent;
border-radius: 2px;
background: transparent;
}
QDockWidget::close-button:hover, QDockWidget::float-button:hover {
background: rgba(255, 255, 255, 10);
}
QDockWidget::close-button:pressed, QDockWidget::float-button:pressed {
padding: 1px -1px -1px 1px;
background: rgba(255, 255, 255, 10);
}
QTreeView, QListView, QTextBrowser, AtLineEdit, AtLineEdit::hover {
border: 1px solid #444;
background-color: silver;
border-radius: 3px;
margin-left: 3px;
color: black;
}
QTreeView:branch:selected, QTreeView:branch:hover {
background: url(:/dark_blue/img/transparent.png);
}
QTreeView::branch:has-siblings:!adjoins-item {
border-image: url(:/dark_blue/img/transparent.png);
}
QTreeView::branch:has-siblings:adjoins-item {
border-image: url(:/dark_blue/img/transparent.png);
}
QTreeView::branch:!has-children:!has-siblings:adjoins-item {
border-image: url(:/dark_blue/img/transparent.png);
}
QTreeView::branch:has-children:!has-siblings:closed,
QTreeView::branch:closed:has-children:has-siblings {
image: url(:/dark_blue/img/branch_closed.png);
}
QTreeView::branch:open:has-children:!has-siblings,
QTreeView::branch:open:has-children:has-siblings {
image: url(:/dark_blue/img/branch_open.png);
}
QTreeView::branch:has-children:!has-siblings:closed:hover,
QTreeView::branch:closed:has-children:has-siblings:hover {
image: url(:/dark_blue/img/branch_closed-on.png);
}
QTreeView::branch:open:has-children:!has-siblings:hover,
QTreeView::branch:open:has-children:has-siblings:hover {
image: url(:/dark_blue/img/branch_open-on.png);
}
QListView::item:!selected:hover, QListView::item:!selected:hover, QTreeView::item:!selected:hover {
background: rgba(0, 0, 0, 0);
outline: 0;
color: #FFFFFF
}
QListView::item:selected:hover, QListView::item:selected:hover, QTreeView::item:selected:hover {
background: #3d8ec9;
color: #FFFFFF;
}
QSlider::groove:horizontal {
border: 1px solid #3A3939;
height: 8px;
background: #201F1F;
margin: 2px 0;
border-radius: 2px;
}
QSlider::handle:horizontal {
background: QLinearGradient( x1: 0, y1: 0, x2: 0, y2: 1,
stop: 0.0 silver, stop: 0.2 #a8a8a8, stop: 1 #727272);
border: 1px solid #3A3939;
width: 14px;
height: 14px;
margin: -4px 0;
border-radius: 2px;
}
QSlider::groove:vertical {
border: 1px solid #3A3939;
width: 8px;
background: #201F1F;
margin: 0 0px;
border-radius: 2px;
}
QSlider::handle:vertical {
background: QLinearGradient( x1: 0, y1: 0, x2: 0, y2: 1, stop: 0.0 silver,
stop: 0.2 #a8a8a8, stop: 1 #727272);
border: 1px solid #3A3939;
width: 14px;
height: 14px;
margin: 0 -4px;
border-radius: 2px;
}
QToolButton {
/* background-color: transparent; */
border: 2px transparent #4A4949;
border-radius: 4px;
background-color: dimgray;
margin: 2px;
padding: 2px;
}
QToolButton[popupMode="1"] { /* only for MenuButtonPopup */
padding-right: 20px; /* make way for the popup button */
border: 2px transparent #4A4949;
border-radius: 4px;
}
QToolButton[popupMode="2"] { /* only for InstantPopup */
padding-right: 10px; /* make way for the popup button */
border: 2px transparent #4A4949;
}
QToolButton:hover, QToolButton::menu-button:hover {
border: 2px solid #78879b;
}
QToolButton:checked, QToolButton:pressed,
QToolButton::menu-button:pressed {
background-color: #4A4949;
border: 2px solid #78879b;
}
/* the subcontrol below is used only in the InstantPopup or DelayedPopup mode */
QToolButton::menu-indicator {
image: url(:/dark_blue/img/down_arrow.png);
top: -7px; left: -2px; /* shift it a bit */
}
/* the subcontrols below are used only in the MenuButtonPopup mode */
QToolButton::menu-button {
border: 1px transparent #4A4949;
border-top-right-radius: 6px;
border-bottom-right-radius: 6px;
/* 16px width + 4px for border = 20px allocated above */
width: 16px;
outline: none;
}
QToolButton::menu-arrow {
image: url(:/dark_blue/img/down_arrow.png);
}
QToolButton::menu-arrow:open {
top: 1px; left: 1px; /* shift it a bit */
border: 1px solid #3A3939;
}
QPushButton::menu-indicator {
subcontrol-origin: padding;
subcontrol-position: bottom right;
left: 4px;
}
QTableView
{
border: 1px solid #444;
gridline-color: #6c6c6c;
background-color: #201F1F;
}
QTableView, QHeaderView
{
border-radius: 0px;
}
QTableView::item:pressed, QListView::item:pressed, QTreeView::item:pressed {
background: #78879b;
color: #FFFFFF;
}
QTableView::item:selected:active, QTreeView::item:selected:active, QListView::item:selected:active {
background: #3d8ec9;
color: #FFFFFF;
}
QHeaderView
{
border: 1px transparent;
border-radius: 2px;
margin: 0px;
padding: 0px;
}
QHeaderView::section {
background-color: #3A3939;
color: silver;
padding: 4px;
border: 1px solid #6c6c6c;
border-radius: 0px;
text-align: center;
}
QHeaderView::section::vertical::first, QHeaderView::section::vertical::only-one
{
border-top: 1px solid #6c6c6c;
}
QHeaderView::section::vertical
{
border-top: transparent;
}
QHeaderView::section::horizontal::first, QHeaderView::section::horizontal::only-one
{
border-left: 1px solid #6c6c6c;
}
QHeaderView::section::horizontal
{
border-left: transparent;
}
QHeaderView::section:checked
{
color: white;
background-color: #5A5959;
}
/* style the sort indicator */
QHeaderView::down-arrow {
image: url(:/dark_blue/img/down_arrow.png);
}
QHeaderView::up-arrow {
image: url(:/dark_blue/img/up_arrow.png);
}
QTableCornerButton::section {
background-color: #3A3939;
border: 1px solid #3A3939;
border-radius: 2px;
}
QToolBox {
padding: 3px;
border: 1px transparent black;
}
QToolBox::tab {
color: #b1b1b1;
background-color: #302F2F;
border: 1px solid #4A4949;
border-bottom: 1px transparent #302F2F;
border-top-left-radius: 5px;
border-top-right-radius: 5px;
}
QToolBox::tab:selected { /* italicize selected tabs */
font: italic;
background-color: #302F2F;
border-color: #3d8ec9;
}
QStatusBar::item {
border: 1px solid #3A3939;
border-radius: 2px;
}
QSplitter::handle {
border: 1px dashed #3A3939;
}
QSplitter::handle:hover {
background-color: #787876;
border: 1px solid #3A3939;
}
QSplitter::handle:horizontal {
width: 1px;
}
QSplitter::handle:vertical {
height: 1px;
}
QListWidget {
background-color: silver;
border-radius: 5px;
margin-left: 5px;
}
QListWidget::item {
color: black;
}
QMessageBox {
messagebox-critical-icon : url(:/dark_blue/img/critical.png);
messagebox-information-icon : url(:/dark_blue/img/information.png);
messagebox-question-icon : url(:/dark_blue/img/question.png);
messagebox-warning-icon: : url(:/dark_blue/img/warning.png);
min-width:500 px;
font-size: 20px;
QMessageBox.Yes{wisth:40;height:20}
}
ColorButton::enabled {
border-radius: 0px;
border: 1px solid #444444;
}
ColorButton::disabled {
border-radius: 0px;
border: 1px solid #AAAAAA;
}
""")
self.progress = QProgressBar(self)
self.progress.setGeometry(50, 540, 700, 30)
self.progress.setMaximum(100)
#self.btn_file.setObjectName("first_button");
self.btn_file = QtWidgets.QPushButton(self)
self.btn_file.setText("Add File")
self.btn_file.clicked.connect(self.openExcelFileNameDialog)
self.btn_file.move(50,50)
self.btn_file.setStyleSheet('QPushButton{ border-radius: 6px;min-width: 100px;}')
self.btn_img_folder = QtWidgets.QPushButton(self)
self.btn_img_folder.setText("Select Image Folder")
self.btn_img_folder.clicked.connect(self.openFolder)
self.btn_img_folder.move(50,100)
self.btn_img_folder.setStyleSheet('QPushButton{ border-radius: 6px;min-width: 100px;}')
self.btn_intro = QtWidgets.QPushButton(self)
self.btn_intro.setText("Add Introduction")
self.btn_intro.clicked.connect(self.OpenIntroDialog)
self.btn_intro.move(50,150)
self.btn_intro.setStyleSheet('QPushButton{ border-radius: 6px;min-width: 100px;}')
self.btn_outro = QtWidgets.QPushButton(self)
self.btn_outro.setText("Add Outro ")
self.btn_outro.clicked.connect(self.OpenOutroDialog)
self.btn_outro.move(50,200)
self.btn_outro.setStyleSheet('QPushButton{ border-radius: 6px;min-width: 100px;}')
self.btn_audio = QtWidgets.QPushButton(self)
self.btn_audio.setText("Add Audio")
self.btn_audio.clicked.connect(self.openFolder_audio)
self.btn_audio.move(50,350)
self.btn_audio.setStyleSheet('QPushButton{ border-radius: 6px;min-width: 100px;}')
self.btn_start = QtWidgets.QPushButton(self)
self.btn_start.setObjectName('button1')
self.btn_start.setText("Start⯈")
self.btn_start.clicked.connect(self.Start)
#self.btn_start.clicked.connect(self.onButtonClick)
self.btn_start.move(300,405)#430)
self.btn_start.setStyleSheet('QPushButton{ min-height: 40px; min-width: 150px;font-size: 20px;color:white;font-family: "Times New Roman";font-weight: bold;border:1px solid white;border-radius: 8px;}')
self.btn_cancel = QtWidgets.QPushButton(self)
self.btn_cancel.setObjectName('button1')
self.btn_cancel.setText("Cancel")
#self.btn_start.clicked.connect(self.OpenAudioDialog)
self.btn_cancel.move(530,405)#430)
self.btn_cancel.clicked.connect(self.onstop_work) # new one
self.btn_cancel.setEnabled(False)
self.btn_cancel.setStyleSheet('QPushButton{ min-height: 40px; min-width: 150px;font-size: 20px;font-family: "Times New Roman";font-weight: bold;border:1px solid white;border-radius: 8px;}')
self.btn_destination = QtWidgets.QPushButton(self)
self.btn_destination.setText("Destination Folder")
self.btn_destination.clicked.connect(self.openFolder_dest)
self.btn_destination.move(50,300)
self.btn_destination.setStyleSheet('QPushButton{ border-radius: 6px;min-width: 100px;}')
# Color Add Area For text
self.btn_text_color = QtWidgets.QPushButton(self)
self.btn_text_color.setText("Add Text Color")
self.btn_text_color.clicked.connect(self.showColorDialog)
self.btn_text_color.move(50,250)
self.btn_text_color.setStyleSheet('QPushButton{ border-radius: 6px;min-width: 100px;}')
#open color chooser
self.frm = QFrame(self)
self.frm.setStyleSheet("QWidget { background-color: %s;border-radius:8px; }"
% col.name())
self.frm.setGeometry(200, 250, 60, 30)
self.frm.setToolTip('Text Color')
#backgound-color
self.btn_back_color = QtWidgets.QPushButton(self)
self.btn_back_color.setText("Backgound Color")
self.btn_back_color.clicked.connect(self.showColorDialog2)
self.btn_back_color.move(280,250)
self.btn_back_color.setStyleSheet('QPushButton{ border-radius: 6px;min-width: 100px;}')
#open color chooser
self.frm1 = QFrame(self)
self.frm1.setStyleSheet("QWidget { background-color: %s;border-radius:8px; }"
% col.name())
self.frm1.setGeometry(430, 250, 60, 30)
self.frm1.setToolTip('Text Background Color')
self.btn_time = QtWidgets.QPushButton(self)
self.btn_time.setText("Duration of Questions")
#self.btn_time.clicked.connect(self.showColorDialog2)
self.btn_time.move(520,250)
self.btn_time.setStyleSheet('QPushButton{ border-radius: 6px;min-width: 110px;}')
self.btn_time.setEnabled(False)
def center(self):
qr = self.frameGeometry()
cp = QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Message',
"Are you sure to quit?", QMessageBox.Yes |
QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
def openFolder_audio(self):
try:
file = str(QFileDialog.getExistingDirectory(self, "Select Audio Folder"))
self.Input_audio_path=file
self.sel_audio.setText(str(file))
except Exception as e:
self.sel_msg.setText("Error Occured No Folder/Directory Exist !!!!")
def openFolder_dest(self):
try:
file = str(QFileDialog.getExistingDirectory(self, "Select Destination Folder"))
self.Input_dest_path=file
self.sel_dest.setText(str(file))
except Exception as e:
self.sel_msg.setText("Error Occured No Folder/Directory Exist !!!!")
def openFolder(self):
try:
file = str(QFileDialog.getExistingDirectory(self, "Select Image Folder"))
self.Image_folder_path=file;
self.sel_folder.setText(str(file))
except Exception as e:
self.sel_msg.setText("Error Occured No Folder/Directory Exist !!!!")
def OpenIntroDialog(self):
try:
file = str(QFileDialog.getExistingDirectory(self, "Select Intro Folder"))
self.Input_intro_path=file;
self.sel_intro.setText(str(file))
except Exception as e:
self.sel_msg.setText("Error Occured No Folder/Directory Exist !!!!")
def OpenOutroDialog(self):
try:
file = str(QFileDialog.getExistingDirectory(self, "Select Outro Folder"))
self.Input_outro_path=file;
self.sel_outro.setText(str(file))
except Exception as e:
self.sel_msg.setText("Error Occured No Folder/Directory Exist !!!!")
def openExcelFileNameDialog(self):
try:
options = QFileDialog.Options()
fileName, _ = QFileDialog.getOpenFileName(self,"Select Excel File", "","Excel Files (*.xlsx)", options=options)
self.Excel_filepath =fileName
if fileName:
self.filepath=fileName
self.le.setText(str(fileName))
except Exception as e:
self.sel_msg.setText("Error Occured No File Exist !!!!")
def showColorDialog(self):
try:
col = QColorDialog.getColor()
if col.isValid():
self.frm.setStyleSheet("QWidget { background-color: %s }"
% col.name())
h=ImageColor.getrgb(col.name())
self.Input_text_color=h
print(h)
except Exception as e:
self.sel_msg.setText("Error Occured Select Color !!!!")
def showColorDialog2(self):
try:
col = QColorDialog.getColor()
if col.isValid():
self.frm1.setStyleSheet("QWidget { background-color: %s }"
% col.name())
h=ImageColor.getrgb(col.name()) #print(h)
self.Input_back_color=h
except Exception as e:
self.sel_msg.setText("Error Occured Select Color!!!!")
def Start(self):
try:
self.sel_msg.setText(" ")
self.progress.setValue(0)
print("END_Start")
#self.Validate_audio()
self.validate_input()
except Exception as e:
val=str(e);
msg="Error ocuured "+val+" "
self.sel_msg.setText(msg)
def RepresentsInt(self,s):
try:
int(s)
return True
except ValueError:
return False
## def Validate_audio(self):
## try:
## self.Error_Audio_Folder=False
## print("error_audio---",self.Error_Audio_Folder)
## error_excel=True
## error_audio=True
## error_time=True
##
## if not self.Excel_filepath:
## self.le.setText("please Select Excel File First!!!")
## error_excel= False
## print("Error file:",error_excel)
##
## if not self.Input_audio_path:
## self.sel_audio.setText("please Select Audio File!!!")
## error_audio= False
## print("Error audio:",error_audio)
## try:
## if self.Input_time_Perslide == 0:
## print(self.Input_time_Perslide,"time")
## self.sel_msg.setText("Enter Duration!!!")
## error_time=False
## print("error_time:",error_time)
## except Exception as e:
## self.sel_msg.setText("Error Occured Enter Correct Time between 0 _ 20 ")
##
## if(error_excel==True and error_audio==True and error_time==True):
## wb = xlrd.open_workbook(self.Excel_filepath)
## sheet = wb.sheet_by_index(0)
## arr=[]
## rows = sheet.nrows
## columns = sheet.ncols
## self.No_of_Row=columns
## self.Input_time_Perslide=self.sel_time.text()
## os.chdir(self.Input_audio_path)
## path=self.Input_audio_path #self.Input_audio_path_first
## limit=0;
## time=int(self.Input_time_Perslide)
## print("time:",time)
## col=columns
## limit=(time*col)+(time*2)
## print("LIMIT:",limit)
## count_audio=0;
## if rows ==0:
## print(self.No_of_Row)
## self.sel_msg.setText("Your Excel File is Empty Enter a Valid Excel File !!!!!! ")
## else:
## audfile=[]
## for root, dirs, files in os.walk(path):
## for file in files:
## p=os.path.join(root,file)
## if p.endswith(".mp3"):
## audfile.append(os.path.abspath(p))
##
## size=0;
## for i in audfile:
## audio_background = AudioFileClip(i)
## print("Duration of Audio:",audio_background.duration)
## aud_duration=audio_background.duration
## print("aud_duration:",aud_duration)
## size=int(aud_duration)
## audio_background.close()
## if(limit<size):
## self.Error_Audio_Folder=True
## count_audio=count_audio+1;
## break;
## if(self.Error_Audio_Folder==False):
## self.sel_msg.setText("Error Occured Enter a Audio files which has size greater than "+str(limit)+" seconds")
#### if(self.Error_Audio_Folder==True):
#### self.sel_msg.setText(str(count_audio)+" Audio Files are Valid for process !!!")
##
## except Exception as e:
## val=str(e)
## #print("Exception Occured !!!",val)
## self.sel_msg.setText("Error Occured !!! Enter Fields correctly ")
def validate_input(self):
print("validateInput")
try:
self.Input_time_Perslide=self.sel_time.text()
check=self.RepresentsInt(self.Input_time_Perslide)
if(check==True):
self.Input_time_Perslide=self.sel_time.text()
else:
self.Input_time_Perslide=""
#print("error_time:",check)
#self.Input_time_Perslide=self.sel_time.text()
error_excel=True
error_img=True
error_intro=True
error_outro=True
error_audio=True
error_dest=True
error_time=True
print("Input_time_Perslide",self.Input_time_Perslide)
if not self.Excel_filepath:
self.le.setText("please Select Excel File First!!!")
error_excel= False
#print("Error file:",error_excel)
if not self.Image_folder_path:
self.sel_folder.setText("please Select Images Folder!!!")
error_img= False
#print("Error folder img:",error_img)
if not self.Input_intro_path:
self.sel_intro.setText("please Select Introduction First!!!")
error_intro= False
#print("Error Input:",error_intro)
if not self.Input_outro_path:
self.sel_outro.setText("please Select Outro First!!!")
error_outro= False
#print("Error outro:",error_outro)
if not self.Input_audio_path:
self.sel_audio.setText("please Select Audio File!!!")
error_audio= False
#print("Error audio:",error_audio)
if not self.Input_dest_path:
self.sel_dest.setText("please Select Destination Folder!!!")
error_dest= False
#print("Error dest:",error_dest)
if not self.Input_time_Perslide:
self.sel_time.setText("?")
self.sel_msg.setText("Enter Duration!!!")
error_time=False
if(check==True and error_excel==True and error_img==True and error_intro==True and error_outro==True and error_audio==True and error_dest==True and error_time==True):
self.sel_msg.setText("process start !!!!!")
self.Read_Excel_File()
# print("start Now ______")
except Exception as e:
#print(e)
msg=str(e)
self.sel_msg.setText(msg)
def onCountChanged(self, value):
self.progress.setValue(value)
message="process is : "+str(value)+" % done Wait !!!!"
self.sel_msg.setText(message)
# Processing of backend starts here
def Read_Excel_File(self):
#print("END_read")
#self.btn_start.setEnabled(False)
self.myThread = Worker(self.Excel_filepath,self.Image_folder_path,self.Input_intro_path,self.Input_outro_path,self.Input_audio_path,self.Input_dest_path,self.Input_text_color,self.Input_back_color,self.Input_time_Perslide,self.Aud,self.No_of_Row)
self.myThread.countChanged.connect(self.onCountChanged)
self.myThread.start_activation.connect(self.onstart_activation)
self.myThread.setError.connect(self.onsetError)
self.sig.connect(self.myThread.on_stopprocess)
#self.sigstop.connect(self.myThread.on_startprocess)
self.myThread.start()
#print("end of thread1")
def onstart_activation(self,value):
print("start Func hit --------------------------------------value :",value)
if value==True:
print("True if hit _--------------------------------------------------")
self.btn_start.setEnabled(True)
#if self.count_error==False:
self.sel_msg.setText("Process is Finished Now !!!")
self.btn_cancel.setEnabled(False)
if value==False:
self.btn_start.setEnabled(False)
# print("start button is de-active now ")
#self.sel_msg.setText("Process is Finsihed")
self.btn_cancel.setEnabled(True)
def onstop_work(self,value):
self.sig.emit(True)
self.sel_msg.setText("Process is going to stop Wait !!!!")
#self.btn_start.setEnabled(True)
self.btn_cancel.setEnabled(False)
#self.sel_msg.setText("Process is Cancelled")
def onsetError(self,value):
self.count_error=True
#print("Error emit",value)
msg="Error Occured "+value+" !!!!!!"
self.sel_msg.setText(msg)
self.btn_start.setEnabled(True)
def window():
app = QApplication(sys.argv)
win = MyWindow()
win.show()
sys.exit(app.exec_())
window()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
__author__ = 'avasilyev2'
import db_work
import pymongo
from pymongo import MongoClient
client = MongoClient('mongodb://admin:pass@ds062807.mongolab.com:62807/games')
db = client['games']
collection = db['general_collection']
collection.remove({"shop":"hobbygames"}) |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# According to: http://liangjiabin.com/blog/2015/04/leetcode-best-time-to-buy-and-sell-stock.html
class Solution(object):
def maxProfit(self, prices):
if not prices:
return 0
max_profit = 0
for i in range(1, len(prices)):
diff = prices[i] - prices[i-1]
if diff > 0:
max_profit += diff
return max_profit
"""
# Not readable.
class Solution(object):
def maxProfit(self, prices):
if not prices:
return 0
buy_price = prices[0]
sell_price = buy_price
max_profit = 0
days = len(prices)
ith_day = 1
# Get the sum of every growth zone and
# then we will get the best profit.
while ith_day < days:
if prices[ith_day] > sell_price:
sell_price = prices[ith_day]
else:
max_profit += sell_price - buy_price
buy_price = prices[ith_day]
sell_price = buy_price
ith_day += 1
max_profit += sell_price - buy_price
return max_profit
"""
"""
[]
[3,4,5,6,2,4]
[6,5,4,3,2,1]
[1,2,3,4,3,2,1,9,11,2,20]
"""
|
script_game_start = 0
script_game_get_use_string = 1
script_game_quick_start = 2
script_get_army_size_from_slider_value = 3
script_spawn_quick_battle_army = 4
script_player_arrived = 5
script_game_set_multiplayer_mission_end = 6
script_game_enable_cheat_menu = 7
script_game_get_console_command = 8
script_game_event_party_encounter = 9
script_game_event_simulate_battle = 10
script_game_event_battle_end = 11
script_order_best_besieger_party_to_guard_center = 12
script_game_get_item_buy_price_factor = 13
script_game_get_item_sell_price_factor = 14
script_get_trade_penalty = 15
script_game_event_buy_item = 16
script_game_event_sell_item = 17
script_start_wedding_cutscene = 18
script_game_get_troop_wage = 19
script_game_get_total_wage = 20
script_game_get_join_cost = 21
script_game_get_upgrade_xp = 22
script_game_get_upgrade_cost = 23
script_game_get_prisoner_price = 24
script_game_check_prisoner_can_be_sold = 25
script_game_get_morale_of_troops_from_faction = 26
script_game_event_detect_party = 27
script_game_event_undetect_party = 28
script_game_get_statistics_line = 29
script_game_get_date_text = 30
script_game_get_money_text = 31
script_game_get_party_companion_limit = 32
script_game_reset_player_party_name = 33
script_game_get_troop_note = 34
script_game_get_center_note = 35
script_game_get_faction_note = 36
script_game_get_quest_note = 37
script_game_get_info_page_note = 38
script_game_get_scene_name = 39
script_game_get_mission_template_name = 40
script_add_kill_death_counts = 41
script_warn_player_about_auto_team_balance = 42
script_check_team_balance = 43
script_check_creating_ladder_dust_effect = 44
script_money_management_after_agent_death = 45
script_initialize_aristocracy = 46
script_initialize_trade_routes = 47
script_initialize_faction_troop_types = 48
script_initialize_item_info = 49
script_initialize_town_arena_info = 50
script_initialize_banner_info = 51
script_initialize_economic_information = 52
script_initialize_all_scene_prop_slots = 53
script_initialize_scene_prop_slots = 54
script_use_item = 55
script_determine_team_flags = 56
script_calculate_flag_move_time = 57
script_move_death_mode_flags_down = 58
script_move_flag = 59
script_move_headquarters_flags = 60
script_set_num_agents_around_flag = 61
script_change_flag_owner = 62
script_move_object_to_nearest_entry_point = 63
script_multiplayer_server_on_agent_spawn_common = 64
script_multiplayer_server_player_joined_common = 65
script_multiplayer_server_before_mission_start_common = 66
script_multiplayer_server_on_agent_killed_or_wounded_common = 67
script_multiplayer_close_gate_if_it_is_open = 68
script_multiplayer_move_moveable_objects_initial_positions = 69
script_move_belfries_to_their_first_entry_point = 70
script_team_set_score = 71
script_player_set_score = 72
script_player_set_kill_count = 73
script_player_set_death_count = 74
script_set_attached_scene_prop = 75
script_set_team_flag_situation = 76
script_start_death_mode = 77
script_calculate_new_death_waiting_time_at_death_mod = 78
script_calculate_number_of_targets_destroyed = 79
script_initialize_objects = 80
script_initialize_objects_clients = 81
script_show_multiplayer_message = 82
script_get_headquarters_scores = 83
script_draw_this_round = 84
script_check_achievement_last_man_standing = 85
script_find_most_suitable_bot_to_control = 86
script_game_receive_url_response = 87
script_game_get_cheat_mode = 88
script_game_receive_network_message = 89
script_cf_multiplayer_evaluate_poll = 90
script_multiplayer_accept_duel = 91
script_game_get_multiplayer_server_option_for_mission_template = 92
script_game_multiplayer_server_option_for_mission_template_to_string = 93
script_cf_multiplayer_team_is_available = 94
script_find_number_of_agents_constant = 95
script_game_multiplayer_event_duel_offered = 96
script_game_get_multiplayer_game_type_enum = 97
script_game_multiplayer_get_game_type_mission_template = 98
script_multiplayer_get_mission_template_game_type = 99
script_multiplayer_fill_available_factions_combo_button = 100
script_multiplayer_get_troop_class = 101
script_multiplayer_clear_player_selected_items = 102
script_multiplayer_init_player_slots = 103
script_multiplayer_initialize_belfry_wheel_rotations = 104
script_send_open_close_information_of_object = 105
script_multiplayer_send_initial_information = 106
script_multiplayer_remove_headquarters_flags = 107
script_multiplayer_remove_destroy_mod_targets = 108
script_multiplayer_init_mission_variables = 109
script_multiplayer_event_mission_end = 110
script_multiplayer_event_agent_killed_or_wounded = 111
script_multiplayer_get_item_value_for_troop = 112
script_multiplayer_get_previous_item_for_item_and_troop = 113
script_cf_multiplayer_is_item_default_for_troop = 114
script_multiplayer_calculate_cur_selected_items_cost = 115
script_multiplayer_set_item_available_for_troop = 116
script_multiplayer_send_item_selections = 117
script_multiplayer_set_default_item_selections_for_troop = 118
script_multiplayer_display_available_items_for_troop_and_item_classes = 119
script_multiplayer_fill_map_game_types = 120
script_multiplayer_count_players_bots = 121
script_multiplayer_find_player_leader_for_bot = 122
script_multiplayer_find_bot_troop_and_group_for_spawn = 123
script_multiplayer_change_leader_of_bot = 124
script_multiplayer_find_spawn_point = 125
script_multiplayer_find_spawn_point_2 = 126
script_multiplayer_buy_agent_equipment = 127
script_party_get_ideal_size = 128
script_game_get_party_prisoner_limit = 129
script_game_get_item_extra_text = 130
script_game_on_disembark = 131
script_game_context_menu_get_buttons = 132
script_game_event_context_menu_button_clicked = 133
script_game_get_skill_modifier_for_troop = 134
script_npc_get_troop_wage = 135
script_setup_talk_info = 136
script_setup_talk_info_companions = 137
script_update_party_creation_random_limits = 138
script_set_trade_route_between_centers = 139
script_average_trade_good_prices = 140
script_average_trade_good_prices_2 = 141
script_average_trade_good_productions = 142
script_normalize_trade_good_productions = 143
script_update_trade_good_prices = 144
script_update_trade_good_price_for_party = 145
script_center_get_production = 146
script_center_get_consumption = 147
script_get_enterprise_name = 148
script_do_merchant_town_trade = 149
script_party_calculate_regular_strength = 150
script_party_calculate_strength = 151
script_loot_player_items = 152
script_party_calculate_loot = 153
script_calculate_main_party_shares = 154
script_party_give_xp_and_gold = 155
script_setup_troop_meeting = 156
script_setup_party_meeting = 157
script_get_meeting_scene = 158
script_party_remove_all_companions = 159
script_party_remove_all_prisoners = 160
script_party_add_party_companions = 161
script_party_add_party_prisoners = 162
script_party_prisoners_add_party_companions = 163
script_party_prisoners_add_party_prisoners = 164
script_party_add_party = 165
script_party_copy = 166
script_clear_party_group = 167
script_party_add_wounded_members_as_prisoners = 168
script_get_nonempty_party_in_group = 169
script_collect_prisoners_from_empty_parties = 170
script_change_party_morale = 171
script_count_casualties_and_adjust_morale = 172
script_print_casualties_to_s0 = 173
script_write_fit_party_members_to_stack_selection = 174
script_remove_fit_party_member_from_stack_selection = 175
script_remove_random_fit_party_member_from_stack_selection = 176
script_add_routed_party = 177
script_cf_training_ground_sub_routine_1_for_melee_details = 178
script_training_ground_sub_routine_2_for_melee_details = 179
script_cf_training_ground_sub_routine_for_training_result = 180
script_print_troop_owned_centers_in_numbers_to_s0 = 181
script_get_random_melee_training_weapon = 182
script_start_training_at_training_ground = 183
script_party_count_fit_regulars = 184
script_party_count_fit_for_battle = 185
script_party_count_members_with_full_health = 186
script_get_stack_with_rank = 187
script_inflict_casualties_to_party = 188
script_move_members_with_ratio = 189
script_count_parties_of_faction_and_party_type = 190
script_faction_get_number_of_armies = 191
script_faction_recalculate_strength = 192
script_cf_select_random_town_with_faction = 193
script_cf_select_random_village_with_faction = 194
script_cf_select_random_walled_center_with_faction = 195
script_cf_select_random_walled_center_with_faction_and_owner_priority_no_siege = 196
script_cf_select_random_walled_center_with_faction_and_less_strength_priority = 197
script_cf_select_random_town_at_peace_with_faction = 198
script_cf_select_random_town_at_peace_with_faction_in_trade_route = 199
script_cf_select_most_profitable_town_at_peace_with_faction_in_trade_route = 200
script_shuffle_troop_slots = 201
script_get_quest = 202
script_get_dynamic_quest = 203
script_get_political_quest = 204
script_npc_find_quest_for_player_to_s11 = 205
script_cf_get_random_enemy_center_within_range = 206
script_cf_faction_get_random_enemy_faction = 207
script_cf_faction_get_random_friendly_faction = 208
script_cf_troop_get_random_enemy_troop_with_occupation = 209
script_cf_get_random_lord_in_a_center_with_faction = 210
script_cf_get_random_lord_except_king_with_faction = 211
script_cf_get_random_lord_from_another_faction_in_a_center = 212
script_get_closest_walled_center = 213
script_get_closest_center = 214
script_get_closest_center_of_faction = 215
script_get_closest_walled_center_of_faction = 216
script_let_nearby_parties_join_current_battle = 217
script_party_wound_all_members_aux = 218
script_party_wound_all_members = 219
script_calculate_battle_advantage = 220
script_cf_check_enemies_nearby = 221
script_get_heroes_attached_to_center_aux = 222
script_get_heroes_attached_to_center = 223
script_get_heroes_attached_to_center_as_prisoner_aux = 224
script_get_heroes_attached_to_center_as_prisoner = 225
script_give_center_to_faction = 226
script_give_center_to_faction_aux = 227
script_change_troop_faction = 228
script_troop_set_title_according_to_faction = 229
script_give_center_to_lord = 230
script_get_number_of_hero_centers = 231
script_cf_get_random_enemy_center = 232
script_find_travel_location = 233
script_get_relation_between_parties = 234
script_calculate_weekly_party_wage = 235
script_calculate_player_faction_wage = 236
script_calculate_hero_weekly_net_income_and_add_to_wealth = 237
script_cf_reinforce_party = 238
script_hire_men_to_kingdom_hero_party = 239
script_get_percentage_with_randomized_round = 240
script_create_cattle_herd = 241
script_buy_cattle_from_village = 242
script_kill_cattle_from_herd = 243
script_create_kingdom_hero_party = 244
script_create_kingdom_party_if_below_limit = 245
script_cf_create_kingdom_party = 246
script_get_troop_attached_party = 247
script_center_get_food_consumption = 248
script_center_get_food_store_limit = 249
script_refresh_village_merchant_inventory = 250
script_refresh_village_defenders = 251
script_village_set_state = 252
script_process_village_raids = 253
script_process_sieges = 254
script_lift_siege = 255
script_process_alarms = 256
script_allow_vassals_to_join_indoor_battle = 257
script_party_set_ai_state = 258
script_cf_party_under_player_suggestion = 259
script_troop_does_business_in_center = 260
script_process_kingdom_parties_ai = 261
script_process_hero_ai = 262
script_begin_assault_on_center = 263
script_select_faction_marshall = 264
script_get_center_faction_relation_including_player = 265
script_update_report_to_army_quest_note = 266
script_decide_faction_ai = 267
script_check_and_finish_active_army_quests_for_faction = 268
script_troop_get_player_relation = 269
script_change_troop_renown = 270
script_change_player_relation_with_troop = 271
script_change_player_relation_with_center = 272
script_change_player_relation_with_faction = 273
script_set_player_relation_with_faction = 274
script_change_player_relation_with_faction_ex = 275
script_cf_get_random_active_faction_except_player_faction_and_faction = 276
script_make_kingdom_hostile_to_player = 277
script_change_player_honor = 278
script_change_player_party_morale = 279
script_cf_player_has_item_without_modifier = 280
script_get_player_party_morale_values = 281
script_diplomacy_start_war_between_kingdoms = 282
script_diplomacy_party_attacks_neutral = 283
script_party_calculate_and_set_nearby_friend_enemy_follower_strengths = 284
script_init_ai_calculation = 285
script_recalculate_ais = 286
script_calculate_troop_ai = 287
script_diplomacy_start_peace_between_kingdoms = 288
script_event_kingdom_make_peace_with_kingdom = 289
script_randomly_start_war_peace_new = 290
script_exchange_prisoners_between_factions = 291
script_add_notification_menu = 292
script_finish_quest = 293
script_get_information_about_troops_position = 294
script_recruit_troop_as_companion = 295
script_setup_random_scene = 296
script_enter_dungeon = 297
script_enter_court = 298
script_setup_meet_lady = 299
script_find_high_ground_around_pos1 = 300
script_select_battle_tactic = 301
script_select_battle_tactic_aux = 302
script_battle_tactic_init = 303
script_battle_tactic_init_aux = 304
script_calculate_team_powers = 305
script_apply_effect_of_other_people_on_courage_scores = 306
script_apply_death_effect_on_courage_scores = 307
script_decide_run_away_or_not = 308
script_battle_tactic_apply = 309
script_battle_tactic_apply_aux = 310
script_team_get_class_percentages = 311
script_get_closest3_distance_of_enemies_at_pos1 = 312
script_team_get_average_position_of_enemies = 313
script_search_troop_prisoner_of_party = 314
script_change_debt_to_troop = 315
script_abort_quest = 316
script_cf_is_quest_troop = 317
script_check_friendly_kills = 318
script_simulate_retreat = 319
script_simulate_battle_with_agents_aux = 320
script_map_get_random_position_around_position_within_range = 321
script_get_number_of_unclaimed_centers_by_player = 322
script_cf_troop_check_troop_is_enemy = 323
script_troop_get_leaded_center_with_index = 324
script_cf_troop_get_random_leaded_walled_center_with_less_strength_priority = 325
script_cf_troop_get_random_leaded_town_or_village_except_center = 326
script_troop_write_owned_centers_to_s2 = 327
script_troop_write_family_relations_to_s1 = 328
script_troop_get_family_relation_to_troop = 329
script_collect_friendly_parties = 330
script_encounter_calculate_fit = 331
script_encounter_init_variables = 332
script_calculate_renown_value = 333
script_cf_get_first_agent_with_troop_id = 334
script_cb_on_bullet_hit = 335
script_cf_team_get_average_position_of_agents_with_type_to_pos1 = 336
script_cf_turn_windmill_fans = 337
script_print_party_members = 338
script_round_value = 339
script_change_banners_and_chest = 340
script_remove_siege_objects = 341
script_describe_relation_to_s63 = 342
script_describe_center_relation_to_s3 = 343
script_center_ambiance_sounds = 344
script_center_set_walker_to_type = 345
script_cf_center_get_free_walker = 346
script_center_remove_walker_type_from_walkers = 347
script_init_town_walkers = 348
script_cf_enter_center_location_bandit_check = 349
script_init_town_agent = 350
script_init_town_walker_agents = 351
script_agent_get_town_walker_details = 352
script_town_walker_occupation_string_to_s14 = 353
script_tick_town_walkers = 354
script_set_town_walker_destination = 355
script_town_init_doors = 356
script_siege_init_ai_and_belfry = 357
script_cf_siege_move_belfry = 358
script_cf_siege_rotate_belfry_platform = 359
script_cf_siege_assign_men_to_belfry = 360
script_siege_move_archers_to_archer_positions = 361
script_store_movement_order_name_to_s1 = 362
script_store_riding_order_name_to_s1 = 363
script_store_weapon_usage_order_name_to_s1 = 364
script_team_give_order_from_order_panel = 365
script_update_order_panel = 366
script_update_agent_position_on_map = 367
script_convert_3d_pos_to_map_pos = 368
script_update_order_flags_on_map = 369
script_update_order_panel_checked_classes = 370
script_update_order_panel_statistics_and_map = 371
script_set_town_picture = 372
script_consume_food = 373
script_calculate_troop_score_for_center = 374
script_assign_lords_to_empty_centers = 375
script_create_village_farmer_party = 376
script_do_party_center_trade = 377
script_player_join_faction = 378
script_player_leave_faction = 379
script_deactivate_player_faction = 380
script_activate_player_faction = 381
script_agent_reassign_team = 382
script_start_quest = 383
script_conclude_quest = 384
script_succeed_quest = 385
script_fail_quest = 386
script_report_quest_troop_positions = 387
script_end_quest = 388
script_cancel_quest = 389
script_update_village_market_towns = 390
script_update_mercenary_units_of_towns = 391
script_update_volunteer_troops_in_village = 392
script_update_npc_volunteer_troops_in_village = 393
script_update_companion_candidates_in_taverns = 394
script_update_ransom_brokers = 395
script_update_tavern_travellers = 396
script_update_villages_infested_by_bandits = 397
script_update_booksellers = 398
script_update_tavern_minstrels = 399
script_update_other_taverngoers = 400
script_update_faction_notes = 401
script_update_faction_political_notes = 402
script_update_faction_traveler_notes = 403
script_update_troop_notes = 404
script_update_troop_location_notes = 405
script_update_troop_location_notes_prisoned = 406
script_update_troop_political_notes = 407
script_update_center_notes = 408
script_update_center_recon_notes = 409
script_update_all_notes = 410
script_agent_troop_get_banner_mesh = 411
script_shield_item_set_banner = 412
script_troop_agent_set_banner = 413
script_add_troop_to_cur_tableau = 414
script_add_troop_to_cur_tableau_for_character = 415
script_add_troop_to_cur_tableau_for_inventory = 416
script_add_troop_to_cur_tableau_for_profile = 417
script_add_troop_to_cur_tableau_for_retirement = 418
script_add_troop_to_cur_tableau_for_party = 419
script_get_prosperity_text_to_s50 = 420
script_spawn_bandits = 421
script_count_mission_casualties_from_agents = 422
script_get_max_skill_of_player_party = 423
script_upgrade_hero_party = 424
script_get_improvement_details = 425
script_cf_troop_agent_is_alive = 426
script_cf_village_recruit_volunteers_cond = 427
script_village_recruit_volunteers_recruit = 428
script_get_troop_item_amount = 429
script_get_name_from_dna_to_s50 = 430
script_change_center_prosperity = 431
script_get_center_ideal_prosperity = 432
script_good_price_affects_good_production = 433
script_get_poorest_village_of_faction = 434
script_troop_add_gold = 435
script_initialize_npcs = 436
script_objectionable_action = 437
script_post_battle_personality_clash_check = 438
script_event_player_defeated_enemy_party = 439
script_event_player_captured_as_prisoner = 440
script_npc_morale = 441
script_retire_companion = 442
script_reduce_companion_morale_for_clash = 443
script_calculate_ransom_amount_for_troop = 444
script_offer_ransom_amount_to_player_for_prisoners_in_party = 445
script_event_hero_taken_prisoner_by_player = 446
script_cf_check_hero_can_escape_from_player = 447
script_cf_party_remove_random_regular_troop = 448
script_place_player_banner_near_inventory = 449
script_place_player_banner_near_inventory_bms = 450
script_stay_captive_for_hours = 451
script_set_parties_around_player_ignore_player = 452
script_randomly_make_prisoner_heroes_escape_from_party = 453
script_fill_tournament_participants_troop = 454
script_get_num_tournament_participants = 455
script_get_random_tournament_participant = 456
script_add_tournament_participant = 457
script_get_random_tournament_team_amount_and_size = 458
script_get_troop_priority_point_for_tournament = 459
script_sort_tournament_participant_troops = 460
script_remove_tournament_participants_randomly = 461
script_end_tournament_fight = 462
script_get_win_amount_for_tournament_bet = 463
script_tournament_place_bet = 464
script_calculate_amount_of_cattle_can_be_stolen = 465
script_draw_banner_to_region = 466
script_get_troop_custom_banner_num_positionings = 467
script_get_custom_banner_charge_type_position_scale_color = 468
script_get_random_custom_banner = 469
script_get_custom_banner_color_from_index = 470
script_cf_check_color_visibility = 471
script_get_next_active_kingdom = 472
script_remove_cattles_if_herd_is_close_to_party = 473
script_get_rumor_to_s61 = 474
script_lord_comment_to_s43 = 475
script_add_log_entry = 476
script_get_relevant_comment_for_log_entry = 477
script_get_relevant_comment_to_s42 = 478
script_merchant_road_info_to_s42 = 479
script_get_manhunt_information_to_s15 = 480
script_rebellion_arguments = 481
script_get_culture_with_party_faction_for_music = 482
script_get_culture_with_faction_for_music = 483
script_music_set_situation_with_culture = 484
script_combat_music_set_situation_with_culture = 485
script_play_victorious_sound = 486
script_set_items_for_tournament = 487
script_custom_battle_end = 488
script_remove_troop_from_prison = 489
script_debug_variables = 490
script_troop_describes_troop_to_s15 = 491
script_troop_describes_quarrel_with_troop_to_s14 = 492
script_cf_test_lord_incompatibility_to_s17 = 493
script_troop_get_romantic_chemistry_with_troop = 494
script_cf_troop_get_romantic_attraction_to_troop = 495
script_cf_random_political_event = 496
script_evaluate_realm_stability = 497
script_battle_political_consequences = 498
script_faction_inflict_war_damage_on_faction = 499
script_calculate_troop_political_factors_for_liege = 500
script_cf_troop_can_intrigue = 501
script_troop_change_relation_with_troop = 502
script_troop_get_relation_with_troop = 503
script_appoint_faction_marshall = 504
script_center_get_item_consumption = 505
script_locate_player_minister = 506
script_lord_get_home_center = 507
script_get_kingdom_lady_social_determinants = 508
script_age_troop_one_year = 509
script_add_lady_items = 510
script_init_troop_age = 511
script_assign_troop_love_interests = 512
script_faction_conclude_feast = 513
script_lady_evaluate_troop_as_suitor = 514
script_courtship_event_troop_court_lady = 515
script_courtship_event_lady_break_relation_with_suitor = 516
script_courtship_event_bride_marry_groom = 517
script_npc_decision_checklist_party_ai = 518
script_npc_decision_checklist_troop_follow_or_not = 519
script_find_total_prosperity_score = 520
script_calculate_center_assailability_score = 521
script_find_center_to_defend = 522
script_npc_decision_checklist_peace_or_war = 523
script_npc_decision_checklist_male_guardian_assess_suitor = 524
script_npc_decision_checklist_marry_female_pc = 525
script_courtship_poem_reactions = 526
script_diplomacy_faction_get_diplomatic_status_with_faction = 527
script_faction_follows_controversial_policy = 528
script_internal_politics_rate_feast_to_s9 = 529
script_faction_get_adjective_to_s10 = 530
script_setup_tavern_attacker = 531
script_activate_tavern_attackers = 532
script_deactivate_tavern_attackers = 533
script_activate_town_guard = 534
script_cf_prisoner_offered_parole = 535
script_neutral_behavior_in_fight = 536
script_party_inflict_attrition = 537
script_add_rumor_string_to_troop_notes = 538
script_character_can_wed_character = 539
script_troop_change_career = 540
script_center_get_goods_availability = 541
script_lord_find_alternative_faction = 542
script_set_up_duel_with_troop = 543
script_test_player_for_career_and_marriage_incompatability = 544
script_deduct_casualties_from_garrison = 545
script_npc_decision_checklist_take_stand_on_issue = 546
script_npc_decision_checklist_evaluate_faction_strategy = 547
script_process_player_enterprise = 548
script_replace_scene_items_with_spawn_items_before_ms = 549
script_replace_scene_items_with_spawn_items_after_ms = 550
script_cf_is_melee_weapon_for_tutorial = 551
script_iterate_pointer_arrow = 552
script_find_center_to_attack_alt = 553
script_npc_decision_checklist_evaluate_enemy_center_for_attack = 554
script_npc_decision_checklist_faction_ai_alt = 555
script_faction_last_reconnoitered_center = 556
script_reduce_exact_number_to_estimate = 557
script_calculate_castle_prosperities_by_using_its_villages = 558
script_initialize_tavern_variables = 559
script_prepare_alley_to_fight = 560
script_prepare_town_to_fight = 561
script_change_player_right_to_rule = 562
script_indict_lord_for_treason = 563
script_give_center_to_faction_while_maintaining_lord = 564
script_check_concilio_calradi_achievement = 565
script_refresh_center_inventories = 566
script_refresh_center_armories = 567
script_refresh_center_weaponsmiths = 568
script_refresh_center_stables = 569
|
import dicom
import os
import numpy as np
from matplotlib import pyplot as plt
from matplotlib import cm
import csv
# indicate path to image data
PathDicom = "C:\\Users\\David\\Documents\\CT\\CT_data\Images"
lstFilesDICOM = []
# read all images in path
for dirName, subdirList, fileList in os.walk(PathDicom):
for filename in fileList:
lstFilesDICOM.append(os.path.join(dirName, filename))
# read first image in order to find image dimensions and pixel spacing
a = dicom.read_file(lstFilesDICOM[0])
# get pixel dimensions
PixelDims = (int(a.Rows), int(a.Columns), len(lstFilesDICOM))
PixelSpacing = (float(a.PixelSpacing[0]), float(a.PixelSpacing[1]), float(a.SliceThickness))
print('PixelSpacing:')
print(PixelSpacing)
# create a np array for storing the pixel intensities
dicomArray = np.zeros(PixelDims, dtype=a.pixel_array.dtype)
print(dicomArray.shape)
# read in all dicom images
for filenameDICOM in lstFilesDICOM:
ds = dicom.read_file(filenameDICOM)
# store raw image data
dicomArray[:, :, lstFilesDICOM.index(filenameDICOM)] = ds.pixel_array
# convert to Hounsfield Units
intercept = a.RescaleIntercept
slope = a.RescaleSlope
dicomArray = dicomArray*slope + intercept
# plot one of the images
plt.figure()
#plt.pcolormesh(x,y,np.flipud(dicomArray[:,:,33]))
plt.imshow(dicomArray[:,:,50], cmap='gray')
plt.hold(True)
# import contour of coronary vessel
with open('C:\\Users\\David\\Documents\\CT\\contour1.txt') as csvfile:
contour = csv.reader(csvfile)
x= []
y= []
for row in contour:
x.append(int(row[0]))
y.append(int(row[1]))
# plot contour on vessel
plt.plot(x,y, 'r', linewidth=1)
plt.show()
# simple thresholding (200 <= HU >= 600)
arrayThresh = dicomArray.copy()
arrayThresh[arrayThresh < 200] = 0
arrayThresh[arrayThresh > 600] = 0
arrayThresh[arrayThresh > 0] = 1
plt.figure()
#plt.pcolormesh(x,y,np.flipud(dicomArray[:,:,33]))
plt.imshow(arrayThresh[:,:,50], cmap='gray')
plt.hold(True)
plt.show() |
s=input()
l=len(s)
c=0
w=''
lw=0
for i in range(l):
if(s[i]==''):
c=c+1
else:
w=w+s[i]
if(w[0]=='a' or w[0]=='e' or w[0]=='i' or w[0]=='o' or w[0]=='u'):
print(w)
|
from django.db import models
from django.conf import settings
# Create your models here.
class Message(models.Model) :
message = models.TextField()
sender = models.ForeignKey(settings.AUTH_USER_MODEL,related_name="messages",on_delete=models.CASCADE,null=True)
timestamp = models.DateTimeField(auto_now_add=True,auto_now=False)
class Chat(models.Model) :
messages = models.ManyToManyField(Message,related_name="chats")
participants = models.ManyToManyField(settings.AUTH_USER_MODEL,related_name="total_chats",null=True)
|
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 22 22:37:22 2015
@author: HSH
"""
"""
# DP solution
# Time Limite Exceeded
class Solution(object):
"""
class Solution:
# DP solution
# Time Limite Exceeded
def isMatch_v1(self, s, p):
m = len(s)
n = len(p)
dp = [[False] * (m + 1)]
for k in range(n):
dp.append([False] * (m + 1))
dp[0][0] = True
for i in range(1, n + 1):
if p[i - 1] == '*':
dp[i][0] = dp[i - 1][0]
for j in range(1, m + 1):
if p[i - 1] == s[j - 1] or p[i - 1] == '?':
dp[i][j] = dp[i - 1][j - 1]
elif p[i - 1] == '*':
dp[i][j] = dp[i][j - 1] or dp[i - 1][j] or dp[i - 1][j - 1]
return dp[n][m]
def isMatch(self, s, p):
s_cur = 0
p_cur = 0
match = 0
star = -1
while s_cur < len(s):
if p_cur < len(p) and (s[s_cur] == p[p_cur] or p[p_cur] == '?'):
s_cur += 1
p_cur += 1
elif p_cur < len(p) and p[p_cur] == '*':
match = s_cur
star = p_cur
p_cur += 1
elif star != -1:
p_cur = star + 1
match = match + 1
s_cur = match
else:
return False
while p_cur < len(p) and p[p_cur] == '*':
p_cur += 1
if p_cur == len(p):
return True
else:
return False
sol = Solution()
sol.isMatch("abedd", "?b*d")
|
# Random Forest Regression
|
import math
p,q = map(int,input().split())
if p%q != 0:
print(p)
pass
|
import os
import re
def __main__():
"""One time utility script ran for changes in attribute instantiation.
This script performs the below, one-time conversion on all attributes of
SpotiBot classes - this is done so that the same codes can instantiate
json API responses and serialized objects stored in MongoDB without any
other changes to the data model.
.. code-block:: python
class.get('attr') # original
object_handler(class, 'attr') # revised
Date: 2020-06-11
"""
spotibot_root = os.path.join(os.getcwd(), 'spotibot')
files_to_convert = \
[os.path.join(dirpath, file) for dirpath, dirnames, files
in os.walk(spotibot_root, topdown=False) for file in files
if file not in [r'__init__.py', r'__main__.py']
and 'cache' not in re.escape(dirpath)
and 'utils' not in re.escape(dirpath)]
# / Exporting list of files to convert into autodocs directory /
wr_path = os.path.join(os.getcwd(), 'src_autodoc', '20200610~1.txt')
assert not os.path.isfile(wr_path)
with open(wr_path, 'w') as f:
f.write('\n'.join([re.escape(file) for file in files_to_convert]))
# / Continuing replacement logic /
pattern = re.compile(r"(.*self\.\w+:\s\w+\s=\s\\\n\s+)[^self]"
r"\s+(\w+)\.get\(('\w+')\)")
for file in files_to_convert:
with open(file, 'r') as r:
obj_py = r.read()
conv_py = pattern.sub(r"\1object_handler(\2, \3)", obj_py)
with open(file, 'w') as w:
w.write(conv_py)
print(f"Written modified file to:\n\t{file}\n")
if __name__ == '__main__':
__main__()
|
import pandas as pd
import matplotlib.pyplot as plt
#########
def SO(max_so=7,start_so=1,last_so=0,so=.02, so_step=1.1):
if start_so == 1:
last_so = so
else:
last_so = so + last_so*so_step
if start_so < max_so:
start_so+=1
return SO(max_so=max_so,start_so=start_so,last_so=last_so)
else:
return last_so
def getSOperc(max_so=7):
return [SO(i) for i in range(1,max_so+1)]
#########
data = pd.read_csv('finances/crypto/data/ccxt_binance_BTC-USDT_1h_20190101-20200101.csv',index_col='Time')
data.head()
def getPriceLeves(base_order, percentages):
return [(base_order - base_order*i) for i in percentages]
# prices = data['Close'].tolist()
order_size=100
position=0
take_profit=0.01
so_flag=0
final_price = base_order
bought=0
wallet=pd.DataFrame([{'USDT':800,'BTC':0}])
maxSO=int(wallet['USDT']/amount)
for i in range(len(data)):
price = data.Close[i]
base_order = prices[0]
percentages = getSOperc()
price_levels=getPriceLeves(base_order, percentages)
if so_flag==0 and position == 0:
print('buy: '+str(order_size))
wallet['BTC']+=order_size/price
wallet['USDT'] -= order_size
if price > (final_price + final_price*take_profit):
print('Sell')
wallet['USDT']+=wallet['BTC']*price
wallet['BTC']=0
elif price <= price_levels[so_flag]:
so_flag+=1
print('SO [{}] - buy: '.format(so_flag,amount))
final_price=(price + final_price)/2
else:
print('Hold!!!')
pass
|
#!/usr/bin/env python
import rospy
import tf
from sensor_msgs.msg import Imu
def callbackIMU(data):
br = tf.TransformBroadcaster()
br.sendTransform((0, 0, 2), (data.orientation.x,data.orientation.y,data.orientation.z,data.orientation.w), rospy.Time.now(), "cns5000_frame", "map")
'''
br2 = tf.TransformBroadcaster()
r,p,y = tf.transformations.euler_from_quaternion([data.orientation.x,data.orientation.y,data.orientation.z,data.orientation.w])
br2.sendTransform((0, 0, 1), tf.transformations.quaternion_from_euler(y,p,r), rospy.Time.now(), "imu_frame2", "map")
'''
if __name__ == '__main__':
rospy.init_node('imu_tf_broadcaster')
rospy.Subscriber('/imu_data', Imu, callbackIMU)
rospy.spin()
|
import urllib.request
from numpy.core.fromnumeric import searchsorted
import pandas as pd
from datetime import datetime,timedelta
import requests
import xmltodict
import openpyxl
url_base = 'http://openapi.data.go.kr/openapi/service/rest/Covid19/getCovid19SidoInfStateJson?serviceKey='
url_serviceKey = 'XK5R2J%2B6nCIWNwl5Bn7iYOJ6fRZ4cyiryScBggttDGsPPsbXjvKJLlZ%2FOhHS3Uf2M3DHKjex3HMOdivbFB5Blg%3D%3D'
url_pages = '10'
url_start_date = '20200527'
url_end_date = datetime.today().strftime('%Y%m%d')
url = url_base + url_serviceKey + '&pageNo=1&num0fRows=' + url_pages + '&startCreateDt=' + url_start_date + '&endCreateDt=' + url_end_date
print("Raw Data url:" + url)
req = requests.get(url).content
xmlObject = xmltodict.parse(req)
dict_data = xmlObject['response']['body']['items']['item']
df_conf = pd.DataFrame(dict_data)
df_conf1 = df_conf[['createDt','deathCnt','gubun','isolClearCnt','isolIngCnt']]
df_conf1.columns = ['날짜','사망자수','구분','격리해소','격리중']
#is_seoul = df_conf1['구분'] == '서울'
#is_daegu = df_conf1['구분'] == '대구'
#finalData = df_conf1[is_seoul | is_daegu]
dateVar = input('검색하실 날짜를 입력해 주세요: ')
contain_data = df_conf1['날짜'].str.contains(dateVar)
finalData_date = df_conf1[contain_data]
sortedDate = finalData_date.sort_values(by=['사망자수'])
sortedDate.reset_index(drop=True)
print(sortedDate)
fileName = input('결과데이터 파일 이름: ')
with pd.ExcelWriter('./'+fileName+'.xlsx') as writer:
sortedDate.to_excel(writer, sheet_name = 'raw_data')
'''
# 불러온 데이터 중 하루에 두 번 이상 데이터가 존재하는 경우를 대비해(오전, 오후), 하루 중 마지막에 발표한 데이터로 중복 처리
df_conf = df_conf.drop_duplicates(['stateDt'])
# 데이터를 날짜순으로 오름차순 정리
df_conf_1 = df_conf.sort_values(by='stateDt')
df_conf_1.to_excel('./data.xlsx',sheet_name=1)
# 공공데이터포털의 일일값의 합과 누적값에 차이 있어
# 명확한 가이드라인이 주어지지 않으면 누적값을 차분에 계산
# 숫자여야 할 열(누적확진자)이 object로 되어있으므로 숫자로 형변환 필요
df_conf_1.iloc[:,7] = df_conf_1.iloc[:,7].apply(pd.to_numeric)
# 누적확진자를 일일확진자로 변경
df_conf_1['daily_decideCnt'] = df_conf_1['decideCnt'].diff()
# 숫자여야 할 열(누적 사망자 수)이 object로 되어있으므로 숫자로 형변환
df_conf_1.iloc[:,6] = df_conf_1.iloc[:,6].apply(pd.to_numeric)
# 누적 사망자를 일일 사망자로 변경
df_conf_1['daily_deathCnt'] = df_conf_1['deathCnt'].diff()
# 숫자여야 할 열(누적검사수)이 object로 되어있으므로 숫자로 형변환 필요
df_conf_1.iloc[:,1] = df_conf_1.iloc[:,1].apply(pd.to_numeric)
# 누적검사수를 일일검사수로 변경
df_conf_1['daily_ExamCnt'] = df_conf_1['accExamCnt'].diff()
# 날짜, 확진자 수, 누적 확진자 수, 사망자 수, 누적 사망자 수, 검사자 수, 누적 검사자 수
# 1차 백신 접종자 수, 누적 1차 백신 접종자 수, 2차 백신 접종자 수, 누적 2차 백신 접종자 수
df_conf_2 = df_conf_1[['accDefRate','resutlNegCnt','stateDt','daily_decideCnt','decideCnt','daily_deathCnt','deathCnt','daily_ExamCnt','accExamCnt']]
df_conf_2.columns = ['누적 환진률','결과 음성 수','날짜','확진자 수','누적 확진자 수','사망자 수','누적 사망자 수','검사자 수','누적 검사자 수']
# 한국 데이터의 틀린 부분 수정 코드
# 공공데이터포털의 오픈API에서 불러오는 데이터에 수정이 있을 경우 삭제 가능
add_dat = pd.DataFrame({"날짜":['20200121','20200122','20200123','20200124','20200125','20200126','20200127','20200128','20200129',
'20200130','20200131','20200201','20200202','20200203','20200204','20200205'],
"확진자 수":[1,0,0,1,0,1,1,0,0,2,5,1,3,0,1,3],
"누적 확진자 수":[1,1,1,2,2,3,4,4,4,6,11,12,15,15,16,19]})
df_conf_3 = pd.concat([add_dat, df_conf_2.iloc[6:,]], ignore_index = True)
df_conf_3
with pd.ExcelWriter('./'+fileName+'.xlsx') as writer:
df_conf_1.to_excel(writer, sheet_name = 'raw_data')
df_conf_2.to_excel(writer, sheet_name = 'second')
df_conf_3.to_excel(writer, sheet_name = 'final_data')
'''
|
from __future__ import absolute_import, division, unicode_literals
from six.moves.urllib.parse import urlencode
from twisted.trial.unittest import SynchronousTestCase
from twisted.internet.task import Clock
from mimic.core import MimicCore
from mimic.resource import MimicRoot
from mimic.test.helpers import json_request, request
class MailGunAPITests(SynchronousTestCase):
"""
Tests for the Mailgun api
"""
def setUp(self):
"""
Initialize core and root
"""
self.core = MimicCore(Clock(), [])
self.root = MimicRoot(self.core).app.resource()
def create_message_successfully(self, root, data):
"""
Create a message and validate the response is the
"""
(response, content) = self.successResultOf(json_request(
self, root, b"POST", "/cloudmonitoring.rackspace.com/messages",
urlencode(data).encode("utf-8")))
self.assertEqual(200, response.code)
def get_content_from_list_messages(self, root, to_filter=None):
"""
Get messages and return content
"""
url = "/cloudmonitoring.rackspace.com/messages"
if to_filter:
url = "/cloudmonitoring.rackspace.com/messages?to={0}".format(to_filter)
(response, content) = self.successResultOf(json_request(
self, root, b"GET", url))
self.assertEqual(200, response.code)
return content
def test_mailgun_send_message(self):
"""
``/cloudmonitoring.rackspace.com/messages`` returns response code 200
when a create message is successful.
"""
self.create_message_successfully(
self.root,
{"to": "example@eg.com", "subject": "test"})
def test_mailgun_send_message_receives_error_500(self):
"""
``/cloudmonitoring.rackspace.com/messages`` returns response code 500
when the `to` address is `bademail@example.com`.
"""
response = self.successResultOf(request(
self, self.root, b"POST", "/cloudmonitoring.rackspace.com/messages",
urlencode({"to": "bademail@example.com"}).encode("utf-8")))
self.assertEqual(500, response.code)
def test_mailgun_send_message_receives_error_400(self):
"""
``/cloudmonitoring.rackspace.com/messages`` returns response code 400
when the `to` address is `failingemail@example.com`.
"""
response = self.successResultOf(request(
self, self.root, b"POST", "/cloudmonitoring.rackspace.com/messages",
urlencode({"to": "failingemail@example.com"}).encode("utf-8")))
self.assertEqual(400, response.code)
def test_mailgun_get_messages(self):
"""
``/cloudmonitoring.rackspace.com/messages`` returns response code 200
and returns the list of messages created thus far.
"""
for x in range(5):
self.create_message_successfully(
self.root,
{"to": "example{0}@eg.com".format(x), "subject": "test"})
content = self.get_content_from_list_messages(self.root)
self.assertEqual(len(content["items"]), 5)
def test_mailgun_get_messages_by_filter(self):
"""
``/cloudmonitoring.rackspace.com/messages`` returns response code 200
and returns the list of messages created thus far, fitered by `to`
address.
"""
for x in range(5):
self.create_message_successfully(
self.root,
{"to": "example{0}@eg.com".format(x), "subject": "test"})
content = self.get_content_from_list_messages(self.root, 'example0@eg.com')
self.assertEqual(len(content["items"]), 1)
def test_mailgun_get_messages_resulting_in_500s(self):
"""
``/cloudmonitoring.rackspace.com/messages/500s`` returns response code 200
and count of requests that resulted in 500s during message creation.
"""
for x in range(5):
response = self.successResultOf(request(
self, self.root, b"POST", "/cloudmonitoring.rackspace.com/messages",
urlencode({"to": "bademail@example.com", "subject": "test"}).encode("utf-8")))
self.assertEqual(500, response.code)
(response, content) = self.successResultOf(json_request(
self, self.root, b"GET", "/cloudmonitoring.rackspace.com/messages/500s"))
self.assertEqual(200, response.code)
self.assertEqual(content["count"], 5)
def test_mailgun_get_message_header_for_message(self):
"""
``/cloudmonitoring.rackspace.com/messages/headers`` returns response code 200
and headers recieved for a given `to` address.
"""
self.create_message_successfully(
self.root,
{"to": "other-address@example.com", "h:X-State": ["OKAY"],
"subject": "not what you're looking for"})
self.create_message_successfully(
self.root,
{"to": "email@example.com", "h:X-State": ["WARNING"],
"subject": "test"})
(response, content) = self.successResultOf(json_request(
self, self.root,
b"GET", "/cloudmonitoring.rackspace.com/messages/headers?to=email@example.com"))
self.assertEqual(200, response.code)
self.assertTrue(content["email@example.com"])
def test_mailgun_get_message_header_no_such_message(self):
"""
The ``messages/headers`` endpoint returns a response code of 404 when
no messages to the given address are found.
"""
(response, content) = self.successResultOf(json_request(
self, self.root,
b"GET", "/cloudmonitoring.rackspace.com/"
"messages/headers?to=email@example.com"))
self.assertEqual(404, response.code)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
from functools import partial
import json
import traceback
import imlib as im
import numpy as np
import pylib
import tensorflow as tf
import tflib as tl
import data
import models
# ==============================================================================
# = param =
# ==============================================================================
parser = argparse.ArgumentParser()
parser.add_argument('--experiment_name', dest='experiment_name', help='experiment_name')
parser.add_argument('--test_int', dest='test_int', type=float, default=1.0, help='test_int')
args_ = parser.parse_args()
with open('./output/%s/setting.txt' % args_.experiment_name) as f:
args = json.load(f)
# model
atts = args['atts']
n_att = len(atts)
img_size = args['img_size']
shortcut_layers = args['shortcut_layers']
inject_layers = args['inject_layers']
enc_dim = args['enc_dim']
dec_dim = args['dec_dim']
dis_dim = args['dis_dim']
dis_fc_dim = args['dis_fc_dim']
enc_layers = args['enc_layers']
dec_layers = args['dec_layers']
dis_layers = args['dis_layers']
# testing
thres_int = args['thres_int']
test_int = args_.test_int
# others
experiment_name = args_.experiment_name
# ==============================================================================
# = graphs =
# ==============================================================================
# data
sess = tl.session()
te_data = data.Celeba('./data', atts, img_size, 1, part='test', sess=sess)
# models
Genc = partial(models.Genc, dim=enc_dim, n_layers=enc_layers)
Gdec = partial(models.Gdec, dim=dec_dim, n_layers=dec_layers, shortcut_layers=shortcut_layers, inject_layers=inject_layers)
# inputs
xa_sample = tf.placeholder(tf.float32, shape=[None, img_size, img_size, 3])
_b_sample = tf.placeholder(tf.float32, shape=[None, n_att])
# sample
x_sample = Gdec(Genc(xa_sample, is_training=False), _b_sample, is_training=False)
# ==============================================================================
# = test =
# ==============================================================================
# initialization
ckpt_dir = './output/%s/checkpoints' % experiment_name
try:
tl.load_checkpoint(ckpt_dir, sess)
except:
raise Exception(' [*] No checkpoint!')
# sample
try:
for idx, batch in enumerate(te_data):
xa_sample_ipt = batch[0]
a_sample_ipt = batch[1]
b_sample_ipt_list = [a_sample_ipt] # the first is for reconstruction
for i in range(len(atts)):
tmp = np.array(a_sample_ipt, copy=True)
tmp[:, i] = 1 - tmp[:, i] # inverse attribute
tmp = data.Celeba.check_attribute_conflict(tmp, atts[i], atts)
b_sample_ipt_list.append(tmp)
x_sample_opt_list = [xa_sample_ipt, np.full((1, img_size, img_size // 10, 3), -1.0)]
for i, b_sample_ipt in enumerate(b_sample_ipt_list):
_b_sample_ipt = (b_sample_ipt * 2 - 1) * thres_int
if i > 0: # i == 0 is for reconstruction
_b_sample_ipt[..., i - 1] = _b_sample_ipt[..., i - 1] * test_int / thres_int
x_sample_opt_list.append(sess.run(x_sample, feed_dict={xa_sample: xa_sample_ipt, _b_sample: _b_sample_ipt}))
sample = np.concatenate(x_sample_opt_list, 2)
save_dir = './output/%s/sample_testing' % experiment_name
pylib.mkdir(save_dir)
im.imwrite(sample.squeeze(0), '%s/%d.png' % (save_dir, idx + 182638))
print('%d.png done!' % (idx + 182638))
except:
traceback.print_exc()
finally:
sess.close()
|
# write by yqyao
#
import os
import pickle
import os.path
import sys
import torch
import torch.utils.data as data
import torchvision.transforms as transforms
import cv2
import numpy as np
from PIL import Image
from data.config import mydataset as cfg
from sklearn.preprocessing import LabelEncoder, OneHotEncoder
class MyDataset(data.Dataset):
def __init__(self, root, phase, tarnsform=None, target_transform=None,
dataset_name='MyDataset'):
self.root = root
self.phase = phase
self.tarnsform = tarnsform
self.target_transform = target_transform
self.name = dataset_name
self.images_targets = list()
data_list = os.path.join(root, self.phase + '.txt')
with open(data_list ,"r") as f:
for line in f.readlines():
line = line.strip().split()
if self.phase == 'test':
self.images_targets.append((line[0], 0))
else:
self.images_targets.append((line[0], int(line[1])))
def __getitem__(self, index):
img_id = self.images_targets[index]
target = img_id[1]
path = img_id[0]
# img = Image.open(path).convert('RGB')
img = cv2.imread(path)
if self.target_transform is not None:
target = self.target_transform(target)
if self.tarnsform is not None:
img = self.tarnsform(img)
return img, target
def __len__(self):
return len(self.images_targets)
class MyWhaleDataset(data.Dataset):
def __init__(self, datafolder, datatype='train', df=None, transform=None, y=None):
self.datafolder = datafolder
self.datatype = datatype
self.y = y
if self.datatype == 'train' or self.datatype=='val':
self.df = df.values
self.image_files_list = [s for s in os.listdir(os.path.join(datafolder,'train'))]
else:
self.image_files_list = [s for s in os.listdir(os.path.join(datafolder, datatype))]
self.transform = transform
def __len__(self):
if self.datatype == 'train' or self.datatype =='val':
return len(self.df)
else:
return len(self.image_files_list)
def __getitem__(self, idx):
if self.datatype == 'train' or self.datatype =='val':
img_name = os.path.join(self.datafolder, 'train', self.df[idx][0])
label = self.y[idx]
elif self.datatype == 'test':
img_name = os.path.join(self.datafolder, self.datatype, self.image_files_list[idx])
label = np.zeros((cfg['num_classes'],))
img = cv2.imread(img_name)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
image = self.transform(image=img)
if self.datatype == 'train'or self.datatype =='val':
return image, label
elif self.datatype == 'test':
# so that the images will be in a correct order
return image, label, self.image_files_list[idx]
def prepare_labels(y):
# From here: https://www.kaggle.com/pestipeti/keras-cnn-starter
values = np.array(y)
label_encoder = LabelEncoder()
integer_encoded = label_encoder.fit_transform(values)
onehot_encoder = OneHotEncoder(sparse=False)
integer_encoded = integer_encoded.reshape(len(integer_encoded), 1)
onehot_encoded = onehot_encoder.fit_transform(integer_encoded)
y = onehot_encoded
return y, label_encoder
|
from .AssetBrowser import AssetBrowser
from bsp.leveleditor import MaterialPool
from PyQt5 import QtCore, QtGui
class MaterialBrowser(AssetBrowser):
FileExtensions = ["mat"]
Thumbnails = {}
def getThumbnail(self, filename, context):
# Delay the thumbnail loading so we don't freeze the application loading
# a ton of materials at once, and so we don't get a stack overflow calling
# this method a bunch of times.
if filename in self.Thumbnails:
icon = self.Thumbnails[filename]
else:
ref = MaterialPool.getMaterial(filename)
if ref.pixmap:
pixmap = ref.pixmap.scaled(96, 96, QtCore.Qt.KeepAspectRatio,
QtCore.Qt.SmoothTransformation)
icon = QtGui.QIcon(pixmap)
else:
icon = QtGui.QIcon("Not Found")
self.Thumbnails[filename] = icon
return icon
|
def coin_problem(value, coin_list):
coin_list = sorted(coin_list, reverse = True)
count_coin = [0 for _ in range(len(coin_list))]
for idx, coin in enumerate(coin_list):
num_coin = int(value // coin)
count_coin[idx] = num_coin
value -= coin * num_coin
return count_coin
def fractional_knapsack_problem(weights, values, max_weight):
data_list = [(weight, value) for weight, value in zip(weights, values)]
data_list = sorted(data_list, reverse = True, key = lambda x: x[1] / x[0])
amounts = []
for weight, value in data_list:
amount = min(max_weight, weight)
if max_weight - amount >= 0:
max_weight -= amount
amounts.append(((weight, value), amount))
return amounts
if __name__ == '__main__':
print(coin_problem(4720, [500, 100, 50, 1]))
print(fractional_knapsack_problem(weights = [10,15,20,25,30], values=[10,12,10,8,5], max_weight = 30))
|
x = fetch.Msidset(['PM3THV1T','PM3THV2T','PM4THV1T','PM4THV2T'], '2013:002:05:00:00.000','2013:002:07:00:00.000')
close('all')
subplot(2,1,1)
x['PM3THV1T'].plot('b', label='PM3THV1T')
x['PM3THV2T'].plot('r', label='PM3THV2T')
title('Sample MUPS-3 Temperatures during Heater Cycles')
legend()
subplot(2,1,2)
x['PM4THV1T'].plot('b', label='PM4THV1T')
x['PM4THV2T'].plot('r', label='PM4THV2T')
title('Sample MUPS-4 Temperatures during Heater Cycles')
legend() |
__author__ = 'sudoz'
import mymodule
mymodule.say_hi()
print('version is', mymodule.__version__) |
# Sean Kim
# Unit 3 Review Problem 4
def mean (my_list):
sum = 0
for num in list:
sum += num
return sum
list = []
|
import copy
import sys
INPUT = "1,0,0,3,1,1,2,3,1,3,4,3,1,5,0,3,2,1,10,19,1,6,19,23,1,10,23,27,2,27,13,31,1,31,6,35,2,6,35,39,1,39,5,43,1,6,43,47,2,6,47,51,1,51,5,55,2,55,9,59,1,6,59,63,1,9,63,67,1,67,10,71,2,9,71,75,1,6,75,79,1,5,79,83,2,83,10,87,1,87,5,91,1,91,9,95,1,6,95,99,2,99,10,103,1,103,5,107,2,107,6,111,1,111,5,115,1,9,115,119,2,119,10,123,1,6,123,127,2,13,127,131,1,131,6,135,1,135,10,139,1,13,139,143,1,143,13,147,1,5,147,151,1,151,2,155,1,155,5,0,99,2,0,14,0"
def execute(program, noun, verb):
memory = copy.copy(program)
memory[1] = noun
memory[2] = verb
pc = 0
while (memory[pc] != 99):
opcode = memory[pc]
op1 = memory[pc+1]
op2 = memory[pc+2]
dest = memory[pc+3]
# print pc, op1, op2, dest
if opcode == 1:
memory[dest] = memory[op1] + memory[op2]
elif opcode == 2:
memory[dest] = memory[op1] * memory[op2]
else:
print "invalid opcode ", opcode
break
pc += 4
return memory[0]
program = [int(t) for t in INPUT.split(',')]
print execute(program, 12, 2)
for noun in range(0, 100):
print noun
for verb in range(0, 100):
if execute(program, noun, verb) == 19690720:
print noun, verb, 100*noun + verb
sys.exit(0)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.