blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
โ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0139a90daa7aba09474a438506fda874d445904a
|
71df0a69bcfba49c7a5d0bff5fcd314942f10541
|
/viewStatsPage.py
|
7dc99083a1166f8666a42bbbede0f46c6385f17f
|
[] |
no_license
|
linzinha/lvlUP
|
24afde28362e62e04ef5c41ffcbd4f44fa4c4ad8
|
17fca44c02c92a8a8a25520b794187c19b6c7ada
|
refs/heads/main
| 2023-05-29T12:36:53.635989
| 2021-06-13T13:36:22
| 2021-06-13T13:36:22
| 376,544,097
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,358
|
py
|
#########
# Imports
#########
import json
import time
import tkinter as tk
from tkinter import *
################
# Open JSON File
# ##############
with open('stats.json', 'r') as string:
my_dict = json.load(string)
string.close()
####################
# Global Variables
####################
p0 = my_dict['points']
p1 = my_dict['player1']
p2 = my_dict['player2']
items = list(p0.items())
bottom = len(items) + 1
timeStr = time.strftime("%Y-%m-%d_%H%M")
lxVarList = []
activityList = []
# Player 1
p1JSON = list(p1.items())
p1WeekStats = p1JSON[2]
p1MonthStats = p1JSON[3]
p1TotalJSON = p1JSON[4]
p1statListWeekScores = p1WeekStats[1]
p1statListMonthScores = p1MonthStats[1]
p1Totals = p1TotalJSON[1]
p1TotalWeek = p1Totals['total week']
# Player 2
p2JSON = list(p2.items())
p2WeekStats = p2JSON[2]
p2MonthStats = p2JSON[3]
p2TotalJSON = p2JSON[4]
p2statListWeekScores = p2WeekStats[1]
p2statListMonthScores = p2MonthStats[1]
p2Totals = p2TotalJSON[1]
p2TotalWeek = p2Totals['total week']
def viewStats():
viewS = tk.Tk()
viewS.configure(bg="turquoise")
viewS.geometry("545x700")
viewS.title("Lvl^")
addPGreeting = tk.Label(text="Current Stats:", bg="mediumturquoise", fg="black", font="10", width=60, pady=20)
addPGreeting.grid(row=0, columnspan=6, sticky=EW)
####################
# Columns
####################
# Players
p1PlayerOne = tk.Label(viewS, text=p1['name'], width=6)
p1PlayerOne.grid(row=1, column=1, columnspan=2, sticky=EW)
p2PlayerTwo = tk.Label(viewS, text=p2['name'], width=6)
p2PlayerTwo.grid(row=1, column=3, columnspan=2, sticky=EW)
# Player1
p1Week = tk.Label(viewS, text="Week", width=6)
p1Week.grid(row=2, column=1, sticky=EW)
p1Month = tk.Label(viewS, text="Month", width=6)
p1Month.grid(row=2, column=2, sticky=EW)
# Player2
p2Week = tk.Label(viewS, text="Week", width=6)
p2Week.grid(row=2, column=3, sticky=EW)
p2Month = tk.Label(viewS, text="Month", width=6)
p2Month.grid(row=2, column=4, sticky=EW)
#############################
# Populates the Activity List
#############################
row = 3
for category in p1statListWeekScores:
labelEntry = tk.Label(viewS, text=category, width=14)
labelEntry.grid(row=row, column=0, sticky=EW, pady=3, padx=18)
row += 1
def changeWindow():
viewS.destroy()
from main import mainPage
mainPage()
################
# Player 1 Stats
################
# week
row = 3
for stat in p1statListWeekScores:
point = str(p1statListWeekScores[stat])
p1WeekPoints = tk.Label(viewS, text=point, width=6)
p1WeekPoints.grid(row=row, column=1, sticky=EW)
row += 1
# month
row = 3
for stat in p1statListMonthScores:
point = str(p1statListMonthScores[stat])
p1WeekPoints = tk.Label(viewS, text=point, width=6)
p1WeekPoints.grid(row=row, column=2, sticky=EW)
row += 1
################
# Player 2 Stats
################
# week
row = 3
for stat in p2statListWeekScores:
point = str(p2statListWeekScores[stat])
p2WeekPoints = tk.Label(viewS, text=point, width=6)
p2WeekPoints.grid(row=row, column=3, sticky=EW)
row += 1
# month
row = 3
for stat in p2statListMonthScores:
point = str(p2statListMonthScores[stat])
p2WeekPoints = tk.Label(viewS, text=point, width=6)
p2WeekPoints.grid(row=row, column=4, sticky=EW)
row += 1
############
# Clear Week
############
def cWeek():
row = 3
for stat in p1statListWeekScores:
p1statListWeekScores[stat] = 0
p1WeekPoints = tk.Label(viewS, text=p1statListWeekScores[stat], width=6)
p1WeekPoints.grid(row=row, column=1, sticky=EW)
row += 1
p1Totals['total week'] = 0
row = 3
for stat in p2statListWeekScores:
p2statListWeekScores[stat] = 0
p2WeekPoints = tk.Label(viewS, text=p2statListWeekScores[stat], width=6)
p2WeekPoints.grid(row=row, column=3, sticky=EW)
row += 1
p2Totals['total week'] = 0
a_file = open("stats.json", "w")
json.dump(my_dict, a_file)
a_file.close()
archive = open("stats" + timeStr + ".json", "w")
json.dump(my_dict, archive)
archive.close()
#############
# Clear Month
#############
def cMonth():
row = 3
for stat in p1statListMonthScores:
p1statListMonthScores[stat] = 0
p1MonthPoints = tk.Label(viewS, text=p1statListMonthScores[stat], width=6)
p1MonthPoints.grid(row=row, column=2, sticky=EW)
row += 1
p1Totals['total month'] = 0
row = 3
for stat in p2statListMonthScores:
p2statListMonthScores[stat] = 0
p2MonthPoints = tk.Label(viewS, text=p2statListMonthScores[stat], width=6)
p2MonthPoints.grid(row=row, column=4, sticky=EW)
row += 1
p2Totals['total month'] = 0
a_file = open("stats.json", "w")
json.dump(my_dict, a_file)
a_file.close()
archive = open("stats" + timeStr + ".json", "w")
json.dump(my_dict, archive)
archive.close()
##########################
# Go Back and Quit Buttons
##########################
em1 = tk.Label(viewS, text="", width=40, bg="turquoise")
em1.grid(row=bottom + 1, columnspan=5, sticky=EW, pady=3, padx=18)
clearWeek = Button(viewS, text="Clear Week", command=cWeek, width=12)
clearWeek.grid(row=bottom + 2, column=1, columnspan=2, sticky=EW, padx=6)
clearMonth = Button(viewS, text="Clear Month", command=cMonth, width=12)
clearMonth.grid(row=bottom + 2, column=3, columnspan=2, sticky=EW, padx=6)
em2 = tk.Label(viewS, text="", width=40, bg="turquoise")
em2.grid(row=bottom + 3, columnspan=5, sticky=EW, pady=3, padx=18)
goBack = Button(viewS, text="Go back", command=lambda *args: changeWindow(), width=30)
goBack.grid(row=bottom + 4, column=1, columnspan=4, sticky=EW, padx=10)
quitG = Button(viewS, text="Quit", command=viewS.destroy, width=30)
quitG.grid(row=bottom + 5, column=1, columnspan=4, sticky=EW, padx=10)
viewS.mainloop()
|
[
"noreply@github.com"
] |
linzinha.noreply@github.com
|
e33487b216736e6059584d1fa1c040ace6df1cc7
|
5bfbc89974b7cb29e476c5c8e18d6e363019aacf
|
/Example/sentiment_lstm_regression.py
|
83642cf1479c949bec11858edf1decc1c74e671f
|
[
"MIT"
] |
permissive
|
lagleki/BayesianRNN
|
1cec3f39bb4bc41a965d0443f7b01e1fd4186b9a
|
0426c503438aa9106c676e8f68e85aa9f16bd05b
|
refs/heads/master
| 2021-09-06T07:27:11.933093
| 2018-02-03T19:56:02
| 2018-02-03T19:56:02
| 120,124,468
| 0
| 0
| null | 2018-02-03T19:55:26
| 2018-02-03T19:55:26
| null |
UTF-8
|
Python
| false
| false
| 5,025
|
py
|
# Train a Bayesian LSTM on the IMDB sentiment classification task.
# To use the GPU:
# THEANO_FLAGS=mode=FAST_RUN,device=gpu,floatX=float32 python imdb_lstm_regression.py
# To speed up Theano, create a ram disk:
# mount -t tmpfs -o size=512m tmpfs /mnt/ramdisk
# Then add flag THEANO_FLAGS='base_compiledir=/mnt/ramdisk'
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
import sys
import theano
from callbacks import ModelTest
from dataset import loader
from keras.optimizers import SGD, RMSprop, Adagrad
from keras.models import Sequential
from keras.layers.core import Dense, Dropout
from keras.layers.embeddings import Embedding
from keras.layers.recurrent import LSTM, GRU, SimpleRNN
from keras.regularizers import l2
# Process inpus:
if len(sys.argv) == 1:
print("Expected args: p_W, p_U, p_dense, p_emb, weight_decay, batch_size, maxlen")
print("Using default args:")
# sys.argv = ["", "0.", "0.", "0.", "0.", "1e-4", "128", "200"]
sys.argv = ["", "0.25", "0.25", "0.25", "0.25", "1e-4", "128", "200"]
args = [float(a) for a in sys.argv[1:]]
print(args)
p_W, p_U, p_dense, p_emb, weight_decay, batch_size, maxlen = args
batch_size = int(batch_size)
maxlen = int(maxlen)
folder = "/scratch/home/Projects/rnn_dropout/exps/"
filename = ("sa_DropoutLSTM_pW_%.2f_pU_%.2f_pDense_%.2f_pEmb_%.2f_reg_%f_batch_size_%d_cutoff_%d_epochs"
% (p_W, p_U, p_dense, p_emb, weight_decay, batch_size, maxlen))
print(filename)
# Global params:
nb_words = 20000
skip_top = 0
test_split = 0.2
init_seed = 0
global_seed = 0
# Load data:
print("Loading data...")
dataset = loader(init_seed, maxlen, nb_words, skip_top, test_split)
X_train, X_test, Y_train, Y_test = dataset.X_train, dataset.X_test, dataset.Y_train, dataset.Y_test
mean_y_train, std_y_train = dataset.mean_y_train, dataset.std_y_train
# Set seed:
np.random.seed(global_seed)
# Build model:
print('Build model...')
model = Sequential()
model.add(Embedding(nb_words + dataset.index_from, 128, W_regularizer=l2(weight_decay),
dropout=p_emb, input_length=maxlen, batch_input_shape=(batch_size, maxlen)))
model.add(LSTM(128, W_regularizer=l2(weight_decay), U_regularizer=l2(weight_decay),
b_regularizer=l2(weight_decay), dropout_W=p_W, dropout_U=p_U))
model.add(Dropout(p_dense))
model.add(Dense(1, W_regularizer=l2(weight_decay), b_regularizer=l2(weight_decay)))
#optimiser = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=False)
optimiser = 'adam'
model.compile(loss='mean_squared_error', optimizer=optimiser)
# Potentially load weights
# model.load_weights("path")
# Train model
print("Train...")
# Theano
modeltest_1 = ModelTest(X_train[:100],
mean_y_train + std_y_train * np.atleast_2d(Y_train[:100]).T,
test_every_X_epochs=1, verbose=0, loss='euclidean',
mean_y_train=mean_y_train, std_y_train=std_y_train)
modeltest_2 = ModelTest(X_test, np.atleast_2d(Y_test).T, test_every_X_epochs=1,
verbose=0, loss='euclidean',
mean_y_train=mean_y_train, std_y_train=std_y_train)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=250,
callbacks=[modeltest_1, modeltest_2])
# # Tensorflow
# modeltest_1 = ModelTest(X_train[:batch_size],
# mean_y_train + std_y_train * np.atleast_2d(Y_train[:batch_size]).T,
# test_every_X_epochs=1, verbose=0, loss='euclidean',
# mean_y_train=mean_y_train, std_y_train=std_y_train, batch_size=batch_size)
# tensorflow_test_size = batch_size * (len(X_test) / batch_size)
# modeltest_2 = ModelTest(X_test[:tensorflow_test_size], np.atleast_2d(Y_test[:tensorflow_test_size]).T,
# test_every_X_epochs=1, verbose=0, loss='euclidean',
# mean_y_train=mean_y_train, std_y_train=std_y_train, batch_size=batch_size)
# tensorflow_train_size = batch_size * (len(X_train) / batch_size)
# model.fit(X_train[:tensorflow_train_size], Y_train[:tensorflow_train_size],
# batch_size=batch_size, nb_epoch=250, callbacks=[modeltest_1, modeltest_2])
# Potentially save weights
# model.save_weights("path", overwrite=True)
# Evaluate model
# Dropout approximation for training data:
standard_prob = model.predict(X_train, batch_size=500, verbose=1)
print(np.mean(((mean_y_train + std_y_train * np.atleast_2d(Y_train).T)
- (mean_y_train + std_y_train * standard_prob))**2, 0)**0.5)
# Dropout approximation for test data:
standard_prob = model.predict(X_test, batch_size=500, verbose=1)
print(np.mean((np.atleast_2d(Y_test).T - (mean_y_train + std_y_train * standard_prob))**2, 0)**0.5)
# MC dropout for test data:
T = 50
prob = np.array([modeltest_2.predict_stochastic(X_test, batch_size=500, verbose=0)
for _ in xrange(T)])
prob_mean = np.mean(prob, 0)
print(np.mean((np.atleast_2d(Y_test).T - (mean_y_train + std_y_train * prob_mean))**2, 0)**0.5)
|
[
"yaringal@gmail.com"
] |
yaringal@gmail.com
|
2be7b229988ee87da8b2f46796797fd123be5e00
|
20722df255492f591b1a988c7499e1eab6c71a90
|
/Ch1-Image_Recognition/stacked_autoencoders_for_mnist_classification.py
|
b7276e8c60f5905ae6df8947b5ada8ac112426d5
|
[] |
no_license
|
solaris33/TensorFlow_Examples
|
551f721aa4c97b4735496d5a1aecf742de081fa9
|
122116b268badf27b605d7a3857215474ab99e6a
|
refs/heads/master
| 2020-12-24T19:13:20.622985
| 2018-11-01T12:44:41
| 2018-11-01T12:44:41
| 58,610,121
| 4
| 1
| null | 2016-05-12T05:27:23
| 2016-05-12T05:20:56
| null |
UTF-8
|
Python
| false
| false
| 6,234
|
py
|
# -*- coding: utf-8 -*-
# MNIST ์ซ์ ๋ถ๋ฅ๋ฅผ ์ํ Stacked AutoEncoder ์์
# ์ ๋ ์ํฌํธ ์ค์
from __future__ import division, print_function, absolute_import
# ํ์ํ ๋ผ์ด๋ธ๋ฌ๋ฆฌ๋ค์ ์ํฌํธ
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
# MNIST ๋ฐ์ดํฐ๋ฅผ ๋ค์ด๋ก๋ ํ๋ค.
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
# ํ๋ผ๋ฏธํฐ ์ค์
learning_rate_RMSProp = 0.01
learning_rate_Gradient_Descent = 0.5
training_epochs = 400 # epoch ํ์ (iteration)
batch_size = 256
display_step = 1 # ๋ช Step๋ง๋ค log๋ฅผ ์ถ๋ ฅํ ์ง ๊ฒฐ์ ํ๋ค.
examples_to_show = 10 # reconstruct๋ ์ด๋ฏธ์ง ์ค ๋ช๊ฐ๋ฅผ ๋ณด์ฌ์ค์ง๋ฅผ ๊ฒฐ์ ํ๋ค.
n_hidden_1 = 200 # ์ฒซ๋ฒ์งธ ํ๋ ๋ ์ด์ด์ ๋
ธ๋ ๊ฐ์
n_hidden_2 = 200 # ๋๋ฒ์งธ ํ๋ ๋ ์ด์ด์ ๋
ธ๋ ๊ฐ์
n_input = 784 # MNIST ๋ฐ์ดํฐ input (์ด๋ฏธ์ง ํฌ๊ธฐ: 28*28)
# Stacked Autoencoder๋ฅผ ์ํ ํ๋ผ๋ฏธํฐ๋ค
# Input ๋ฐ์ดํฐ ์ค์
X = tf.placeholder("float", [None, n_input])
# ํ๋ ๋ ์ด์ด 1์ ์ํ Weights์ Biases
Wh_1 = tf.Variable(tf.random_normal([n_input, n_hidden_1]))
bh_1 = tf.Variable(tf.random_normal([n_hidden_1]))
h_1 = tf.nn.sigmoid(tf.matmul(X, Wh_1) +bh_1) # ํ๋ ๋ ์ด์ด 1์ activation (sigmoid ํจ์๋ฅผ ์ฌ์ฉ)
# ํ๋ ๋ ์ด์ด 2์ ์ํ Weights์ Biases
Wh_2 = tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2]))
bh_2 = tf.Variable(tf.random_normal([n_hidden_2]))
h_2 = tf.nn.sigmoid(tf.matmul(h_1, Wh_2) +bh_2) # ํ๋ ๋ ์ด์ด 2์ activation (sigmoid ํจ์๋ฅผ ์ฌ์ฉ)
# Output ๋ ์ด์ด๋ฅผ ์ํ Weights์ Biases
Wo = tf.Variable(tf.random_normal([n_hidden_2, n_input]))
bo = tf.Variable(tf.random_normal([n_input]))
y_pred = tf.nn.sigmoid(tf.matmul(h_2,Wo) + bo) # Output ๋ ์ด์ด์ activation (sigmoid ํจ์๋ฅผ ์ฌ์ฉ)
# Output ๊ฐ(True Output)์ ์ค์ (=Input ๊ฐ)
y_true = X
# Softmax Classifier๋ฅผ ์ํ ํ๋ผ๋ฏธํฐ๋ค
W = tf.Variable(tf.zeros([n_hidden_2, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.nn.softmax(tf.matmul(h_2, W) + b) # ์์ธก๋ Output : ๋๋ฒ์งธ ํ๋ ๋ ์ด์ด์ activation output์ input์ผ๋ก ์ฌ์ฉํ๋ค.
y_ = tf.placeholder(tf.float32, [None, 10]) # True Output
# Optimization์ ์ํ ํ๋ผ๋ฏธํฐ๋ค
# Autoencoder Optimization์ ์ํ ํ๋ผ๋ฏธํฐ๋ค
cost = tf.reduce_mean(tf.pow(y_true - y_pred, 2)) # squared error loss ํจ์
optimizer = tf.train.RMSPropOptimizer(learning_rate_RMSProp).minimize(cost)
# Softmax Classifier Optimization์ ์ํ ํ๋ผ๋ฏธํฐ๋ค
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1])) # cross-entropy loss ํจ์
train_step = tf.train.GradientDescentOptimizer(learning_rate_Gradient_Descent).minimize(cross_entropy)
# Fine Tuning Optimization์ ์ํ ํ๋ผ๋ฏธํฐ๋ค
finetuning_cost = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1])) + tf.reduce_mean(tf.pow(y_true - y_pred, 2)) # cross-entropy loss ํจ์ + squared error loss ํจ์
finetuning_train_step = tf.train.GradientDescentOptimizer(learning_rate_Gradient_Descent).minimize(finetuning_cost)
# Step 1: Stacked Autoencoder pre-training
# ๋ณ์๋ค์ ์ด๊ธฐํํ๋ค.
init = tf.initialize_all_variables()
# graph๋ฅผ ์คํํ๋ค.
sess = tf.Session()
sess.run(init)
total_batch = int(mnist.train.num_examples/batch_size)
# Training์ ์์ํ๋ค.
for epoch in range(training_epochs):
# ๋ชจ๋ ๋ฐฐ์น๋ค์ ๋์๊ฐ๋ฉด์(Loop) ํ์ตํ๋ค.
for i in range(total_batch):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
# batch ๋ฐ์ดํฐ๋ฅผ ์ด์ฉํด์ ํธ๋ ์ด๋์ ์งํํ๋ค.
_, cost_value = sess.run([optimizer, cost], feed_dict={X: batch_xs})
# ์ผ์ epoch step๋ง๋ค ๋ก๊ทธ๋ฅผ ์ถ๋ ฅํ๋ค.
if epoch % display_step == 0:
print("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(cost_value))
print("Stacked Autoencoder pre-training Optimization Finished!")
# Step 2: test ๋ฐ์ดํฐ์
์ autoencoder๋ก reconstruction ํด๋ณธ๋ค.
reconstructed_image = sess.run(y_pred, feed_dict={X: mnist.test.images[:examples_to_show]})
# ์๋ณธ ์ด๋ฏธ์ง์ ์ฌ๊ตฌ์ถ(reconstructed)๋ ์ด๋ฏธ์ง๋ฅผ ๋น๊ตํ๋ค.
f, a = plt.subplots(2, 10, figsize=(10, 2))
for i in range(examples_to_show):
a[0][i].imshow(np.reshape(mnist.test.images[i], (28, 28)))
a[1][i].imshow(np.reshape(reconstructed_image[i], (28, 28)))
f.show()
plt.draw()
#plt.waitforbuttonpress() # ๋ฒํผ์ ๋๋ฅผ๋๊น์ง ์์
์ ์ง
f.savefig('reconstructed_mnist_image.png') # reconstruction ๊ฒฐ๊ณผ๋ฅผ png๋ก ์ ์ฅํ๋ค.
# Step 3: Softmax Classifier๋ฅผ ํ์ตํ๋ค.
for i in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={X: batch_xs, y_: batch_ys})
print("Softmax Classifier Optimization Finished!")
# Step 4: ํ์ต๋ ๋ชจ๋ธ์ด ์ผ๋ง๋ ์ ํํ์ง๋ฅผ ์ถ๋ ฅํ๋ค. (Before fine-tuning)
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print("Accuracy(before fine-tuning): ") # Accuracy ~ 0.9282
print(sess.run(accuracy, feed_dict={X: mnist.test.images, y_: mnist.test.labels}))
# Step 5: Fine-tuning softmax model
# Training์ ์์ํ๋ค.
for epoch in range(training_epochs):
# ๋ชจ๋ ๋ฐฐ์น๋ค์ ๋์๊ฐ๋ฉด์(Loop) ํ์ตํ๋ค.
for i in range(total_batch):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
# batch ๋ฐ์ดํฐ๋ฅผ ์ด์ฉํด์ ํธ๋ ์ด๋์ ์งํํ๋ค.
_, cost_value = sess.run([finetuning_train_step, finetuning_cost], feed_dict={X: batch_xs, y_: batch_ys})
# ์ผ์ epoch step๋ง๋ค ๋ก๊ทธ๋ฅผ ์ถ๋ ฅํ๋ค.
if epoch % display_step == 0:
print("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(cost_value))
print("Fine-tuning softmax model Optimization Finished!")
# Step 6: ํ์ต๋ ๋ชจ๋ธ์ด ์ผ๋ง๋ ์ ํํ์ง๋ฅผ ์ถ๋ ฅํ๋ค. (After fine-tuning)
print("Accuracy(after fine-tuning): ") # Accuracy ~ 0.9714
print(sess.run(accuracy, feed_dict={X: mnist.test.images, y_: mnist.test.labels}))
|
[
"jinhoyang@snu.ac.kr"
] |
jinhoyang@snu.ac.kr
|
92a5da607045107bbee7fd23ac0e9e1ec54453da
|
0d2811497b377bc3530c3ddc10f4e948ef3ab902
|
/launcher/common/Keys.py
|
c7b1c24ad592f0a93219e61683c4b41f827cacc2
|
[
"BSD-3-Clause"
] |
permissive
|
liblit/sampler
|
a475b44d2a257bc9a2cf93bb5d04e94abc9d15b2
|
eaedba51ee8367b9b355e6f85a6c677878160d49
|
refs/heads/master
| 2021-04-24T21:30:53.227637
| 2018-10-08T02:00:59
| 2018-10-08T02:00:59
| 117,015,273
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 287
|
py
|
ASKED = 'asked'
MASTER = 'enabled'
def settings():
# always use "schema_id" once Fedora 20 is no longer supported
import gi
key = 'schema_id' if gi.version_info >= (3, 14) else 'schema'
from gi.repository import Gio
return Gio.Settings(**{key: 'edu.wisc.cs.cbi'})
|
[
"liblit@cs.wisc.edu"
] |
liblit@cs.wisc.edu
|
ed5a1888ab5c1d3ceab8561da97ea4f49ae630dc
|
5f635d35ec6837142ecdd709af32646c936e36f5
|
/ship.py
|
d7267d61d06ffbec16ed248ac045bdebe3b668bd
|
[] |
no_license
|
SLinShi/AlinenInvasion
|
e6b61fc1b40331f4eb7c38c6be5476e9463a40bc
|
468adcd796438cd83564ba1e4af79ed586d4f940
|
refs/heads/main
| 2023-02-25T00:22:51.156284
| 2021-02-03T04:24:16
| 2021-02-03T04:24:16
| 315,885,528
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,755
|
py
|
import pygame
class Ship:
# ้ฃ่น็ฑป
def __init__(self, ai_game):
# ๅๅงๅ้ฃ่นๅนถๅๅงๅๅ
ถไฝ็ฝฎ
self.screen = ai_game.screen
self.screen_rect = ai_game.screen.get_rect()
# ๅ ่ฝฝ้ฃ่นๅพๅๅนถ่ทๅๅ
ถๅคๆฅ็ฉๅฝข
self.image = pygame.image.load(
"/Users/shilin/Documents/VSCode/Python/alien_incasion/images/ship.png"
)
# ๆนๅ้ฃ่นๅคงๅฐ
self.image = pygame.transform.scale(self.image, (50, 50))
self.rect = self.image.get_rect()
# ๆพ็ฝฎ้ฃ่นๅฐ็ชๅฃๅบ้จไธญๅคฎ
self.rect.midbottom = self.screen_rect.midbottom
# ็งปๅจๆ ๅฟ
self.moving_right = False
self.moving_left = False
self.moving_up = False
self.moving_down = False
self.settings = ai_game.settings
# ๅจ้ฃ่น็xๅฑๆงไธญๅจๅญๅฐๆฐ
self.x = float(self.rect.x)
self.y = float(self.rect.y)
def update(self):
# ๆ นๆฎ็งปๅจๆ ๅฟ่ฐๆด้ฃ่นไฝ็ฝฎ
# ้ๅถ็งปๅจ่ๅด
if self.moving_right and self.rect.right < self.screen_rect.right:
self.x += self.settings.ship_speed # ๆดๆฐself.x็ๅผ
if self.moving_left and self.rect.left > 0:
self.x -= self.settings.ship_speed
if self.moving_up and self.rect.top > 0:
self.y -= self.settings.ship_speed # ๆดๆฐself.y็ๅผ
if self.moving_down and self.rect.bottom < self.screen_rect.bottom:
self.y += self.settings.ship_speed
# ๆ นๆฎself.x็ๅผๆดๆฐrectๅฏน่ฑก
self.rect.x = self.x
self.rect.y = self.y
def blitme(self):
# ๅจๆๅฎไฝ็ฝฎ็ปๅถ้ฃ่น
self.screen.blit(self.image, self.rect)
|
[
"anonymity_one@outlook.com"
] |
anonymity_one@outlook.com
|
34d1d295d4ea0e1a1589db374ba5f46c1c017507
|
6e922c94dc20370de2ad34cd501bdfa824d20515
|
/analysis/planner_2D.py
|
2b2768b17cc60f1162e12d1aba42f3ad169c6da1
|
[] |
no_license
|
funhere/auto-medical-detection
|
0dc24c6e1a7ecc98cb33a37876c31c4678e17dfc
|
653154b338bb844e73fa2ba931144d39db6f0174
|
refs/heads/master
| 2021-08-10T11:56:22.009012
| 2020-06-06T00:42:32
| 2020-06-06T00:42:32
| 188,710,527
| 4
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,737
|
py
|
import shutil, os
from analysis.planner_3D import Planner
from bins.analyze_and_preprocess import get_lists_of_splitted_dataset
from preprocessing.preprocessor import Preprocessor2D
from config.default_configs import *
from utils.files_utils import *
from net_architecture.generic_UNet import Generic_UNet
import numpy as np
from utils.analysis_utils import get_pool_and_conv_props
class Planner2D(Planner):
def __init__(self, folder_of_cropped_data, preprocessing_out_folder):
super(Planner2D, self).__init__(folder_of_cropped_data,
preprocessing_out_folder)
self.data_identifier = "UNet_2D"
self.transpose_forward = [0, 1, 2]
self.transpose_backward = [0, 1, 2]
self.plans_fname = join(self.preprocessing_out_folder, default_plans_identifier + "_plans_2D.pkl")
def load_plans(self):
self.plans = load_pickle(self.plans_fname)
self.plans_per_stage = self.plans['plans_per_stage']
self.dataset_properties = self.plans['dataset_properties']
self.transpose_forward = self.plans['transpose_forward']
self.transpose_backward = self.plans['transpose_backward']
def plan_exps(self):
def get_stage_properties(current_spacing, original_spacing, original_shape, num_cases, transpose_forward,
num_modalities, num_classes):
current_spacing_transposed = np.array([current_spacing[i] for i in transpose_forward])[1:]
new_median_shape = np.round(original_spacing / current_spacing * original_shape).astype(int)
dataset_num_voxels = np.prod(new_median_shape) * num_cases
input_patch_size = new_median_shape[transpose_forward][1:]
net_numpool, net_pool_kernel_sizes, net_conv_kernel_sizes, input_patch_size, \
shape_must_be_divisible_by = get_pool_and_conv_props(current_spacing_transposed, input_patch_size,
FEATUREMAP_MIN_EDGE_LENGTH_BOTTLENECK,
Generic_UNet.MAX_NUMPOOL_2D)
estimated_gpu_ram_consumption = Generic_UNet.compute_vram_consumption(input_patch_size,
net_numpool,
Generic_UNet.BASE_NUM_FEATURES_2D,
Generic_UNet.MAX_FILTERS_2D,
num_modalities, num_classes,
net_pool_kernel_sizes)
batch_size = int(np.floor(Generic_UNet.use_this_for_batch_size_computation_2D /
estimated_gpu_ram_consumption * Generic_UNet.DEFAULT_BATCH_SIZE_2D))
if batch_size < dataset_min_batch_size_cap:
raise RuntimeError("Unsupported patches size. patch-based solution will be implemented later.")
# check if batch size is too large (more than 5 % of dataset)
max_batch_size = np.round(batch_size_covers_max_percent_of_dataset * dataset_num_voxels /
np.prod(input_patch_size)).astype(int)
batch_size = min(batch_size, max_batch_size)
plan = {
'batch_size': batch_size,
'num_pool_per_axis': net_numpool,
'patch_size': input_patch_size,
'median_patient_size_in_voxels': new_median_shape,
'current_spacing': current_spacing,
'original_spacing': original_spacing,
'pool_op_kernel_sizes': net_pool_kernel_sizes,
'conv_kernel_sizes': net_conv_kernel_sizes,
'do_dummy_2D_data_aug': False
}
return plan
use_nonzero_mask_for_normalization = self.use_norm_mask()
print("Are you using the nonzero maks for normalizaion?", use_nonzero_mask_for_normalization)
spacings = self.dataset_properties['all_spacings']
sizes = self.dataset_properties['all_sizes']
all_classes = self.dataset_properties['all_classes']
modalities = self.dataset_properties['modalities']
num_modalities = len(list(modalities.keys()))
target_spacing = self.get_target_spacing()
new_shapes = np.array([np.array(i) / target_spacing * np.array(j) for i, j in zip(spacings, sizes)])
max_spacing_axis = np.argmax(target_spacing)
remaining_axes = [i for i in list(range(3)) if i != max_spacing_axis]
self.transpose_forward = [max_spacing_axis] + remaining_axes
self.transpose_backward = [np.argwhere(np.array(self.transpose_forward) == i)[0][0] for i in range(3)]
new_shapes = new_shapes[:, self.transpose_forward]
# Calculations are based on the median shape of the datasets
median_shape = np.median(np.vstack(new_shapes), 0)
print("the median shape of the dataset is ", median_shape)
max_shape = np.max(np.vstack(new_shapes), 0)
print("the max shape in the dataset is ", max_shape)
min_shape = np.min(np.vstack(new_shapes), 0)
print("the min shape in the dataset is ", min_shape)
print("Don't want feature maps smaller than ", FEATUREMAP_MIN_EDGE_LENGTH_BOTTLENECK, " in the bottleneck")
# how many stages will the image pyramid have?
self.plans_per_stage = []
self.plans_per_stage.append(get_stage_properties(target_spacing, target_spacing, median_shape,
num_cases=len(self.list_of_cropped_npz_files),
transpose_forward=self.transpose_forward,
num_modalities=num_modalities,
num_classes=len(all_classes) + 1))
print(self.plans_per_stage)
self.plans_per_stage = self.plans_per_stage[::-1]
self.plans_per_stage = {i: self.plans_per_stage[i] for i in range(len(self.plans_per_stage))} # convert to dict
normalization_schemes = self.do_normalization_scheme()
store_largest_connected_component, min_size_per_class, min_region_size_per_class = \
self.do_postprocessing()
# these are independent of the stage
plans = {'num_stages': len(list(self.plans_per_stage.keys())), 'num_modalities': num_modalities,
'modalities': modalities, 'normalization_schemes': normalization_schemes,
'dataset_properties': self.dataset_properties, 'list_of_npz_files': self.list_of_cropped_npz_files,
'original_spacings': spacings, 'original_sizes': sizes,
'preprocessing_data_folder': self.preprocessing_out_folder, 'num_classes': len(all_classes),
'all_classes': all_classes, 'base_num_features': Generic_UNet.BASE_NUM_FEATURES_3D,
'use_mask_for_norm': use_nonzero_mask_for_normalization,
'keep_only_largest_region': store_largest_connected_component,
'min_region_size_per_class': min_region_size_per_class, 'min_size_per_class': min_size_per_class,
'transpose_forward': self.transpose_forward, 'transpose_backward': self.transpose_backward,
'data_identifier': self.data_identifier, 'plans_per_stage': self.plans_per_stage}
self.plans = plans
self.save_plans()
def do_preprocessing(self, num_threads):
if os.path.isdir(join(self.preprocessing_out_folder, "gt_segmentations")):
shutil.rmtree(join(self.preprocessing_out_folder, "gt_segmentations"))
shutil.copytree(join(self.folder_of_cropped_data, "gt_segmentations"), join(self.preprocessing_out_folder,
"gt_segmentations"))
normalization_schemes = self.plans['normalization_schemes']
use_nonzero_mask_for_normalization = self.plans['use_mask_for_norm']
intensityproperties = self.plans['dataset_properties']['intensityproperties']
preprocessor = Preprocessor2D(normalization_schemes, use_nonzero_mask_for_normalization,
intensityproperties, self.transpose_forward[0])
target_spacings = [i["current_spacing"] for i in self.plans_per_stage.values()]
preprocessor.run(target_spacings, self.folder_of_cropped_data, self.preprocessing_out_folder,
self.plans['data_identifier'], num_threads)
if __name__ == "__main__":
t = "Task_BoneSeg"
print("\n\n\n", t)
cropped_out_dir = os.path.join(cropped_output_dir, t)
preprocessing_out_dir = os.path.join(preprocessing_output_dir, t)
splitted_4D_out_dir_task = os.path.join(splitted_4D_out_dir, t)
lists, modalities = get_lists_of_splitted_dataset(splitted_4D_out_dir_task)
# need to be careful with RAM usage
if t in ["Task_LITS", "Task_Liver", "Task_BoneSegOrigs", "Task_BoneSeg"]:
threads = 3
elif t in ["Task_LungIntern", "Task_FibroticLungSeg", "Task_Lung", "Task_HepaticVessel"]:
threads = 6
else:
threads = 8
print("number of threads: ", threads, "\n")
print("\n\n\n", t)
exp_planner = Planner2D(cropped_out_dir, preprocessing_out_dir, threads)
exp_planner.plan_exps()
exp_planner.do_preprocessing()
|
[
"noreply@github.com"
] |
funhere.noreply@github.com
|
0019c8ec5dd7461f3fd5921e275c326e9f78ab39
|
66149a32aca9d64f08d3ba111ede8613cd1cfb25
|
/11-06-2020_SAMBIT_EMWAVES.py
|
a99ed74f578c7afa73afd522b853a49bd88672d6
|
[
"MIT"
] |
permissive
|
MAVIN-07/EM-WAVES-SAMBIT
|
ad3180ffe6a278f5ac55fe369832f5639f26d3f3
|
c7c72111eed14a2aaa551306811fa1f26ea04f22
|
refs/heads/master
| 2022-12-11T08:17:21.756078
| 2020-06-17T22:04:04
| 2020-06-17T22:04:04
| 273,086,413
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,778
|
py
|
#CREATED AND MAINTAINED BY: SAMBIT KUMAR MISHRA
#Email-ID: sambitmishra1968@gmail.com
import math
import cmath
import matplotlib.pyplot as plt
import matplotlib.image as img
#===========================================================================================================================================================
# Defining lists to use through this program
Eo = []
Q = []
dp = [] #[permittivity1,permeability1,permittivity2,permeability2]
n = [0,0]
B = [0,0]
ba = [0,0] #[perpendicular,parallel]
arr_phase = [0,0] #[transmission,reflection]
arr_magnitude = [0,0] #[transmission,reflection]
critical_angle = 0
#===========================================================================================================================================================
#Printing on the screen- Assumptions and sign conventions to be used.
print("ASSUMPTIONS-NOTE THESE BEFORE PROCEEDING")
print("_________________________________________________________________________________________________________________________________________________")
print("")
print("Assume that the complex Electric field is given by the equation: E = ((a+jb)x^ + (c+jd)y^ + (m+jn)z^)e*exp(-j*B*(xcos(Qx)+ycos(Qy)+zcos(Qz)))")
print("Where ^ means cap, eg. x^ means unit vector in direction of x")
print("j = complex imaginary number analogous to i in maths")
print("e = natural log base")
print("a,c,m are coefficients of real part whereas b,d,n are coefficients of imaginary part of Eo")
print("Qx,Qy,Qz represent the angle made by the incident electric field wrt x,y,z axes respectively")
print("Qi is the incident angle of wave wrt to the normal to the plane of incidence")
print("_________________________________________________________________________________________________________________________________________________")
print("")
#===========================================================================================================================================================
# TAKING THE INPUTS FROM THE USER
try:
Eo = list(map(float, input("Enter the values of a,b,c,d,m,n in this exact order separated by space= ").split()))
# w = float(input("Enter the frequency of the EM Wave in Hz= "))#We dont have to use because in B value it will get cancelled Nr and Dr
Q = list(map(float, input("Enter the values of Qx,Qy,Qz,Qi in this exact order separated by spaces= ").split()))
# Enter 0 if degrees and 1 if radians
unit = int(input("Enter the unit of angle chosen:- TYPE '0' for degrees, TYPE '1' for radians "))
if unit == 0:
Q[0] = math.radians(Q[0])
Q[1] = math.radians(Q[1])
Q[2] = math.radians(Q[2])
Q[3] = math.radians(Q[3])
# If the input type is chosen as radians then leave the value as it is.
#This loop will exit only if the user enters proper values of dielectric properties
while 1:
dp = list(map(float, input(
"Enter the values of permittivity_medium1, permeability_medium1, permittivity_medium2, permeability_medium2 in "
"this same order separated by space= ").split()))
if dp[3]*dp[2] == dp[1]*dp[0]:
print("ERROR: Enter the values as per assumptions")
else:
break
print("")
print("For the following two categories:- TYPE '1' for XY, TYPE '2' for YZ, TYPE '3' for XZ")
print("")
poi = int(input("Enter the plane of interface= "))
pod = int(input("Enter the plane of dielectric= "))
#===========================================================================================================================================================
#CALCULATION OF POLARISATION OF WAVE
polarisation = 0
#Declaration of polarisation variable for using in program.
if poi == 1:
if math.cos(Q[2]) == 0:
polarisation = 1 # Perpendicular polarisation
elif math.sin(Q[2]) == 0:
polarisation = 0 # Parallel polarisation
elif poi == 2:
if math.cos(Q[0]) == 0:
polarisation = 1 # Perpendicular polarisation
elif math.sin(Q[0]) == 0:
polarisation = 0 # Parallel polarisation
elif poi == 3:
if math.cos(Q[1]) == 0:
polarisation = 1 # Perpendicular polarisation
elif math.sin(Q[1]) == 0:
polarisation = 0 # Parallel polarisation
#===============================================================================================================================================================
#Calculation of the magnitude of Incident Electric Field: Absolute value of Eo
Ei=0
#Declaration of Ei variable
for i in range(0, 6):
Ei += Eo[i]**2 # We have to take square root of this value(Ei) to obtain Magnitude of incident electric field.
#===============================================================================================================================================================
#CALCULATION OF BREWSTER'S ANGLE
#The square root value might come out to be negative hence complex square root must be taken into account so this step can be postponed
# reading png image file
im = img.imread('EMWAVE_2.png')
# show image
plt.imshow(im)
#For Perpendicular Polarisation
if ((dp[3]/dp[1])*(((dp[3]*dp[0])-(dp[1]*dp[2]))/((dp[3]*dp[2])-(dp[1]*dp[0])))) >= 0:
ba[0] = math.atan(math.sqrt((dp[3]/dp[1])*(((dp[3]*dp[0])-(dp[1]*dp[2]))/((dp[3]*dp[2])-(dp[1]*dp[0])))))
else:
print("BREWSTER ANGLE IS NOT POSSIBLE FOR THIS CASE")
#For Parallel Polarisation
if ((dp[2]/dp[0])*(((dp[1]*dp[2])-(dp[3]*dp[0]))/((dp[3]*dp[2])-(dp[1]*dp[0])))) >= 0:
ba[1] = math.atan(math.sqrt((dp[2]/dp[0])*(((dp[1]*dp[2])-(dp[3]*dp[0]))/((dp[3]*dp[2])-(dp[1]*dp[0])))))
else:
print("BREWSTER ANGLE IS NOT POSSIBLE FOR THIS CASE")
#=====================================================================================================================================================================
#The case when incident wave just grazes through the plane of dielectric interface
#In this case no reflection or transmission of wave is possible. This is an exceptional case.
if math.cos(Q[3]) == 0:
print("NO TRANSMISSION OR REFLECTION POSSIBLE IN THIS CASE: BECAUSE THE WAVE GRAZES THROUGH THE PLANE OF INTERFACE")
#ACTUAL CALCULATION BEGINS HERE
else:
n[0] = (120*math.pi)/(math.sqrt((dp[1])/(dp[0]))) #For medium 1
n[1] = (120*math.pi)/(math.sqrt((dp[3])/(dp[2]))) #For medium 2
B[0] = math.sqrt(dp[1]*dp[0]) #For medium 1
B[1] = math.sqrt(dp[3]*dp[2]) #For medium 2
b1 = n[1] * math.cos(Q[3])
b2 = n[0] * math.cos(Q[3])
#==================================================================================================================================================================
#CASE-1: When the incident wave is at an angle greater than the critical Angle and medium 2 is rarer than medium 1
if ((B[0] / B[1]) * math.sin(Q[3])) >= 1 and (dp[3]*dp[2])<=(dp[1]*dp[0]):
#==================================================================================================================================================================
print("THIS IS THE CASE OF TOTAL INTERNAL REFLECTION")
print("")
critical_angle = math.asin(B[1]/B[0])
# reading png image file
im = img.imread('EMWAVE_TIR.png')
# show image
plt.imshow(im)
if (B[0] / B[1])*math.sin(Q[3]) > 1:
if polarisation == 1:
reflection_coefficient = (b1 - 1j*(n[0]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1))) / (b1 + 1j*(n[0]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)))
arr_phase[1] = (-2)*math.atan((n[0]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)) / b1)
elif polarisation == 0:
reflection_coefficient = (b2 - 1j*(n[1]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1))) / (b2 + 1j*(n[1]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)))
arr_phase[1] = (-2)*math.atan((n[1]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)) / b2)
#Calculation of magnitude
arr_magnitude[0] = "N/A"
arr_phase[0] = "N/A"
arr_magnitude[1] = reflection_coefficient * math.sqrt(Ei)
elif (B[0] / B[1]) * math.sin(Q[3]) == 1:
try:
if Q[3] == critical_angle:
if polarisation == 1:
reflection_coefficient = (b1 - 1j*(n[0]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1))) / (b1 + 1j*(n[0]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)))
arr_phase[1] = (-2)*math.atan((n[0]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)) / b1)
elif polarisation == 0:
reflection_coefficient = (b2 - 1j*(n[1]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1))) / (b2 + 1j*(n[1]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)))
arr_phase[1] = (-2)*math.atan((n[1]*math.sqrt((((B[0]/B[1])*math.sin(Q[3]))**2)-1)) / b2)
#Calculation of magnitude
arr_magnitude[0] = "N/A"
arr_phase[0] = "N/A"
arr_magnitude[1] = reflection_coefficient * math.sqrt(Ei)
else:
print("ERROR: DISCREPANCY IN ANALYTICAL AND INPUT VALUES")
except:
print("ERROR!")
else:
print("ERROR: Please re-enter practical values in input")
#==================================================================================================================================================================
#CASE-2: When the wave is incident at Brewster's Angle
elif Q[3]==ba[0] or Q[3]==ba[1]:
#==================================================================================================================================================================
#No reflection will take place in this case
arr_magnitude[1] = "N/A"
arr_phase[1] = "N/A"
# reading png image file
im = img.imread('EMWAVE_BA.png')
# show image
plt.imshow(im)
a1 = n[0] * math.cos(transmitted_angle)
a2 = n[1] * math.cos(transmitted_angle)
#Case of perpendicular polarisation
#--------------------------------------------------------
if polarisation == 1:
transmission_coefficient = (2 * b1) / (b1 + a1)
if transmission_coefficient >= 0:
arr_phase[0] = (2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3])))
else:
arr_phase[0] = (-2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3]))) # A phase change of Pi
#Calculation of magnitude
arr_magnitude[0] = transmission_coefficient * math.sqrt(Ei)
#Case of parallel polarisation
#--------------------------------------------------------
elif polarisation == 0:
transmission_coefficient = (2 * b1) / (b2 + a2)
if transmission_coefficient >= 0:
arr_phase[0] = (2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3])))
else:
arr_phase[0] = (-2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3]))) # A phase change of Pi
#Calculation of magnitude
arr_magnitude[0] = transmission_coefficient * math.sqrt(Ei)
#==================================================================================================================================================================
#CASE-3: The general case of reflection and transmission
#if ((B[0] / B[1]) * math.sin(Q[3])) < 1:
else:
#==================================================================================================================================================================
transmitted_angle = math.asin(B[0]/B[1]) * math.sin(Q[3])
a1 = n[0] * math.cos(transmitted_angle)
a2 = n[1] * math.cos(transmitted_angle)
# reading png image file
im = img.imread('EMWAVE_2.png')
# show image
plt.imshow(im)
# For the case of perpendicular polarisation
if polarisation == 1:
#----------------------------------------------
reflection_coefficient = (b1 - a1) / (b1 + a1)
transmission_coefficient = (2 * b1) / (b1 + a1)
if reflection_coefficient >= 0:
#Calculation of phase of wave after reflection or transmission is a bit tricky: Need to look into that
arr_phase[1] = (2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.sin(Q[3]), 2))) / (n[0] * math.cos(Q[3])))
else:
arr_phase[1] = (-2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.sin(Q[3]), 2))) / (n[0] * math.cos(Q[3]))) # A phase change of Pi
if transmission_coefficient >= 0:
arr_phase[0] = (2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3])))
else:
arr_phase[0] = (-2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3]))) # A phase change of Pi
#Calculation of magnitude
arr_magnitude[0] = transmission_coefficient * math.sqrt(Ei)
arr_magnitude[1] = reflection_coefficient * math.sqrt(Ei)
#----------------------------------------------------------------------------------------------------------------------------------
# For the case of parallel polarisation
elif polarisation == 0:
#-------------------------------------------
reflection_coefficient = (b2 - a2) / (b2 + a2)
transmission_coefficient = (2 * b1) / (b2 + a2)
if reflection_coefficient >= 0:
arr_phase[1] = (2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.sin(Q[3]), 2))) / (n[0] * math.cos(Q[3])))
else:
arr_phase[1] = (-2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.sin(Q[3]), 2))) / (n[0] * math.cos(Q[3]))) # A phase change of Pi
if transmission_coefficient >= 0:
arr_phase[0] = (2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3])))
else:
arr_phase[0] = (-2) * math.atan((n[1] * math.sqrt(1 - math.pow((B[0] / B[1]) * math.cos(Q[3]), 2))) / (n[0] * math.sin(Q[3]))) # A phase change of Pi
#Calculation of magnitude
arr_magnitude[0] = transmission_coefficient * math.sqrt(Ei)
arr_magnitude[1] = reflection_coefficient * math.sqrt(Ei)
#======================================================================================================================================================================
# The final required values using the input values are printed below
print("__________________________________________________________________________")
print("The phase of transmitted wave is " + str(arr_phase[0]))
print("The phase of reflected wave is " + str(arr_phase[1]))
print("The magnitude of transmitted wave is " + str(arr_phase[0]))
print("The magnitude of reflected wave is " + str(arr_phase[1]))
print("")
print("These were your final results. THANK YOU")
print("__________________________________________________________________________")
#======================================================================================================================================================================
except:
# reading png image file
im = img.imread('SORRY.png')
# show image
plt.imshow(im)
print("__________________________________________")
print("PLEASE RECHECK THE VALUES YOU HAVE ENTERED")
print("__________________________________________")
#THE END
#FOR QUERIES/REMARKS CONTACT: SAMBIT KUMAR MISHRA (sambitmishra1968@gmail.com)
|
[
"noreply@github.com"
] |
MAVIN-07.noreply@github.com
|
d8b9910b487bbb5eb2e7b49068c6d3b302e6cd43
|
7043eec45732b05686dd7da397f922339efad693
|
/TA.py
|
892b74546802ad34a83a52569521a4b2a44c031a
|
[] |
no_license
|
rtgrimm/SpectralKineticsODE
|
7fd35f924ad64aac08d76dd1141cf3b38051b82f
|
68b3f16372a8294a714dd7df5a6029d1183fd9ae
|
refs/heads/master
| 2023-07-15T06:11:15.368814
| 2021-08-09T17:15:56
| 2021-08-09T17:15:56
| 392,377,367
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,619
|
py
|
import numpy as np
from dataclasses import dataclass
from typing import *
from scipy.constants import Planck, speed_of_light, elementary_charge
from scipy.integrate import odeint, solve_ivp
import matplotlib.pyplot as plt
from kinetics import Scaler, Parameters, run
from style import set_style
@dataclass
class TAParams:
pump_spectrum : np.ndarray
probe_spectrum : np.ndarray
time : np.ndarray
tau_list : np.ndarray
probe_time_env : Callable[[Scaler], Callable[[Scaler], Scaler]]
pump_time_env: Callable[[Scaler], Callable[[Scaler], Scaler]]
make_params : Any
probe_scale = 0.1
@dataclass
class TAResults:
spectra : np.ndarray
def run_TA(TA_params : TAParams) -> TAResults:
params = TA_params.make_params(
lambda t: TA_params.probe_time_env(
np.min(TA_params.tau_list))(t) * TA_params.probe_spectrum * 0.1)
pump_spectrum = TA_params.pump_spectrum
probe_spectrum = TA_params.probe_spectrum
result_A_0 = run(params, TA_params.time)
A_0 = np.sum(result_A_0.spectral_fluxes, axis=0)
spectra = []
pump = TA_params.pump_time_env(np.min(TA_params.tau_list))
for tau in TA_params.tau_list:
print(f"{(tau / np.max(TA_params.tau_list)) * 100.0}%")
probe = TA_params.pump_time_env(tau)
exc_total = lambda t: pump(t) * pump_spectrum + probe(t) * probe_spectrum * 0.1
params = TA_params.make_params(exc_total)
results = run(params, TA_params.time)
spectra.append(np.sum(results.spectral_fluxes, axis=0) - A_0 * 0)
spectra = np.array(spectra)
return TAResults(spectra)
|
[
"rygr1645@colorado.edu"
] |
rygr1645@colorado.edu
|
4a24d53b5b6eb6b7db4ed323a7726a5e0b69cd7b
|
16cb142fb04798313c43a073cf1e0ddd6f4bf13b
|
/timing/resources/time.py
|
978a0750832068ee69e9c5e493fb89f1c059e468
|
[] |
no_license
|
iRhonin/timing
|
46d3b080333fe4b169252cdb37f3d3194fc00072
|
91b7d86ba053cf46df6a3edbdf273947d8a37ae3
|
refs/heads/master
| 2023-04-30T21:02:26.861632
| 2021-05-14T20:16:12
| 2021-05-14T20:16:12
| 279,684,898
| 0
| 0
| null | 2021-05-14T20:16:13
| 2020-07-14T20:19:47
|
Python
|
UTF-8
|
Python
| false
| false
| 2,027
|
py
|
from datetime import datetime
import falcon
from sqlalchemy import func
from timing.authorization import Authorization
from timing.models.time import Time, DATETIME_FORMAT
from timing.paginate import Paginate
from timing.schemas.time import TimeInSchema
class TimesResource(object):
@falcon.before(Authorization())
def on_post(self, req, resp):
data = TimeInSchema(
user_id=req.context['user_id'],
createdAt=req.params.get('t', None),
hours=req.media['hours'],
)
time = Time(**data.dict(exclude_unset=True))
req.context.db_session.add(time)
req.context.db_session.commit()
resp.media = time.to_dict()
@falcon.before(Authorization())
@falcon.before(Paginate())
def on_get(self, req, resp):
time_query = self._get_times(req, query_on=Time)
total_count = time_query.count()
time_query = time_query \
.order_by(Time.created_at) \
.limit(req.context['limit']) \
.offset(req.context['offset']) \
resp.media = [t.to_dict() for t in time_query]
resp.set_header('X-COUNT', total_count)
@falcon.before(Authorization())
def on_get_calculator(self, req, resp):
sum_times = self._get_times(req, query_on=func.sum(Time.hours)).one()
sum_times = int(sum_times[0] or 0)
resp.media = Time(
created_at=datetime.utcnow(),
hours=sum_times,
).to_dict()
def _get_times(self, req, query_on):
time_query = req.context.db_session.query(query_on) \
.filter(Time.user_id == req.context.user_id)
if from_ := req.params.get('from'):
time_query = time_query.filter(
Time.created_at >= datetime.strptime(from_, DATETIME_FORMAT)
)
if to_ := req.params.get('to'):
time_query = time_query.filter(
Time.created_at <= datetime.strptime(to_, DATETIME_FORMAT)
)
return time_query
|
[
"fatahzade@gmail.com"
] |
fatahzade@gmail.com
|
c19012af2e5fe52651cc00b9775abc1d3e4e6ea1
|
a71d5838e292e2c0c7371f7fc7870c7018820ae1
|
/day03/03_pie.py
|
71c8ec39a03c52234f30d2660394d2f3d37a995f
|
[] |
no_license
|
skywalkerqwer/DataScience
|
be91541c3da383d15ee52d0101d2dbb0289c2fde
|
4cfd42f3a9795e295393cdb045852d46e99b6e59
|
refs/heads/master
| 2020-06-17T11:41:40.113864
| 2019-07-15T09:49:40
| 2019-07-15T09:49:40
| 195,913,553
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 449
|
py
|
"""
็ปๅถ้ฅผ็ถๅพ
"""
import numpy as np
import matplotlib.pyplot as mp
labels = ['Python', 'JavaScript', 'C++', 'Java', 'PHP']
values = [26, 17, 21, 29, 11]
spaces = [0.05, 0.01, 0.01, 0.01, 0.01]
colors = ['dodgerblue', 'orangered', 'limegreen', 'violet', 'gold']
mp.figure('Pie Chart', facecolor='lightgray')
mp.title('Languages PR')
mp.pie(values, spaces, labels, colors, '%.1f%%', shadow=True, startangle=0, radius=1)
mp.legend()
mp.show()
|
[
"15902162780@163.com"
] |
15902162780@163.com
|
ebcfe501255bb644caa92394017c550197a10ee4
|
d27d98fc322ea3b29b77fdf0a8751bc6a6e4355a
|
/python/20/ccc20j4.py
|
09d8866cfee2b5c41d870b92254d90a5edccd997
|
[
"MIT"
] |
permissive
|
sadmanca/ccc-solutions
|
9e48279879241b94e746c7ce1c202d66ce5c46f4
|
1ab0d1d7a59eaf0f6b231208a5f42a5eb364caaa
|
refs/heads/master
| 2023-03-05T17:19:18.776037
| 2021-02-17T00:40:30
| 2021-02-17T00:40:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 316
|
py
|
# COMPLETE
t = input()
s = input()
l = len(s)
shifts = set()
shifts.add(s)
for i in range(l-1):
s = s[1:]+s[0]
shifts.add(s)
cyc = False
for string in shifts:
for i in range(0, len(t)-l+1):
if string == t[i:i+l]:
cyc = True
if cyc == True:
print("yes")
else:
print("no")
|
[
"41028402+Sadman-Hossain@users.noreply.github.com"
] |
41028402+Sadman-Hossain@users.noreply.github.com
|
0847d046a1cea08f4fceb4e24d0e9848c3efd510
|
e1bcef91c724a3b89f442079ab51977ce5a6bc2f
|
/quote/forms.py
|
1d803b8df3d24b45e9b8c59cb8586deddbaa426e
|
[] |
no_license
|
mahdistt/CRM
|
48550b1bb8adbd012b254e42edefc69a572a33fc
|
a1aa33df4ef2e342268f8965d43bf026be61db37
|
refs/heads/master
| 2023-07-09T05:11:23.497844
| 2021-08-11T18:45:56
| 2021-08-11T18:45:56
| 390,491,640
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 476
|
py
|
from django import forms
from . import models
class QuoteCreateViewForm(forms.ModelForm):
class Meta:
model = models.QuoteItem
fields = (
'quote',
'product',
'price',
'quantity',
'discount',
)
widgets = {
'quantity': forms.NumberInput,
'discount': forms.NumberInput,
'price': forms.NumberInput,
'quote': forms.Select,
}
|
[
"80247767+mahdistt@users.noreply.github.com"
] |
80247767+mahdistt@users.noreply.github.com
|
09f920dba59ee54359a4cf44bf01235cf2d05171
|
122779e414685ac95076f440f2809d314800e510
|
/src/tests/integration/test_cli.py
|
2f9573fd5a48bf3689f6afadd0a0a6530705bcbd
|
[
"BSD-2-Clause"
] |
permissive
|
alexseitsinger/package-controller
|
3fb7a09d2b965630fe11203f6f5c47e664826c7d
|
0ee896986cfa17a96bf9fb6afff35dd97f0b1211
|
refs/heads/master
| 2022-12-11T18:00:17.922442
| 2019-08-21T02:24:35
| 2019-08-21T02:24:35
| 185,510,280
| 2
| 0
|
BSD-2-Clause
| 2022-12-08T05:50:25
| 2019-05-08T02:10:23
|
Python
|
UTF-8
|
Python
| false
| false
| 238
|
py
|
import pytest
from click.testing import CliRunner
from package_controller.cli import main
def test_add():
pass
def test_build():
pass
def test_commit():
pass
def test_release():
pass
def test_version():
pass
|
[
"16756928+alexseitsinger@users.noreply.github.com"
] |
16756928+alexseitsinger@users.noreply.github.com
|
6924a180ef02d2c2fcdab06a4084459706c24ff6
|
ed8bfb44c7d8fd2ef3d0d493d39eba6d7b729aec
|
/decode.py
|
05c18d5140f05ddeb6b1977cef37d543fe16147e
|
[] |
no_license
|
omri123/text_lord
|
5cedeb8fb53c69977b21ec25fe77e2d598614d97
|
59f8095784f702891d24c771281c03ec0402e4f0
|
refs/heads/master
| 2022-04-09T11:39:20.561554
| 2020-03-12T22:05:04
| 2020-03-12T22:05:04
| 243,927,064
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,893
|
py
|
import torchtext
from restorant_dataset import START, END
from model import NoEncoderFConvDecoderModel
from archive.utils import load_checkpoint, vocab_to_dictionary
from restorant_dataset import get_dataset
import os
import pickle
import copy
def gready_decode_single(model: NoEncoderFConvDecoderModel, vocab: torchtext.vocab,
stars: int, sample_id: int, start_token=START, end_token=END, device='cpu'):
max_length = 25
src_tokens = torch.tensor([[sample_id, stars]], dtype=torch.int64, device=device)
src_lengths = torch.full((1, 1), 5, device=device)
reviews = torch.tensor([[vocab.stoi[start_token]]], dtype=torch.int64, device=device)
sentence = [start_token]
length = 0
while end_token not in sentence:
logits, _ = model(src_tokens, src_lengths, reviews)
logits_for_new_token = logits[0, -1, :] # batch, seq, vocab
word_index = torch.argmax(logits_for_new_token).item()
sentence.append(vocab.itos[word_index])
length += 1
if length > max_length:
break
sentence_by_indecies = [vocab.stoi[word] for word in sentence]
reviews = torch.tensor([sentence_by_indecies], dtype=torch.int64, device=device)
return ' '.join(sentence)
def gready_decode(model, vocab, src_tokens, src_lengths, start_token, end_token):
pass
import torch
from queue import PriorityQueue
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class BeamSearchNode(object):
def __init__(self, sentence, logProb, length):
'''
:param sentence: a list of tokens!!!
:param logProb: logp sum
:param length:
'''
self.sent = sentence
self.logp = logProb
self.leng = length
def eval(self, alpha=1.0):
reward = 0
# Add here a function for shaping a reward
return - (self.logp / float(self.leng - 1 + 1e-6) + alpha * reward)
def __lt__(self, other):
return self.eval() <= other.eval()
def beam_decode_single(model, vocab, sample_id, stars, beam_width=10, topk=1, device='cpu', SOS_token='<s>', EOS_token='</s>', MAX_LENGTH=50):
"""
decode single example using beam search.
:param decoder: a NoEncoderFConvDecoderModel object
:param vocab:
:param sample_id:
:param stars:
:param beam_width:
:param SOS_token:
:param EOS_token:
:param MAX_LENGTH:
:return:
"""
src_tokens = torch.tensor([[sample_id, stars]], dtype=torch.int64, device=device)
src_lengths = torch.full((1, 1), 5, device=device)
review = [SOS_token]
# review = torch.tensor([[vocab.stoi[SOS_token]]], dtype=torch.int64)
solutions = []
nodes = PriorityQueue()
node = BeamSearchNode(review, 0, 1)
nodes.put(node)
qsize = 1
while True:
# finished
if len(solutions) == topk: break
# give up when decoding takes too long
if qsize > 2000:
for i in range(topk - len(solutions)):
solutions.append(nodes.get())
break
# fetch the best node
node = nodes.get()
review = node.sent
review_int = [vocab.stoi[w] for w in review]
review_torch = torch.tensor([review_int], dtype=torch.int64, device=device)
if review[-1] == EOS_token:
solutions.append(node)
continue
logits, _ = model(src_tokens, src_lengths, review_torch)
predictions = torch.log_softmax(logits, dim=2)
log_probs, indexes = torch.topk(predictions, beam_width)
for new_k in range(beam_width):
word_index = indexes[0, len(review)-1, new_k].item()
word = vocab.itos[word_index]
review_new = copy.deepcopy(node.sent)
review_new.append(word)
new_word_log_p = log_probs[0, 0, new_k].item()
new_node = BeamSearchNode(review_new, node.logp + new_word_log_p, node.leng)
nodes.put(new_node)
qsize += 1
return [' '.join(s.sent) for s in solutions]
def main():
foldername = '/cs/labs/dshahaf/omribloch/data/text_lord/restorant/train/note_tiny_no_noise_dim_32_ntokens_5_nconv_10_nsamples_102400_content_noise_0.001/'
# foldername = '/cs/labs/dshahaf/omribloch/data/text_lord/restorant/train/note_EM_no_noise_dim_32_ntokens_10_nconv_4_nsamples_1024_content_noise_0.0/'
vocab_path = os.path.join(foldername, 'vocab.pickle')
model_ckpt_path = os.path.join(foldername, 'last_checkpoint.ckpt')
with open(vocab_path, 'rb') as file:
vocab = pickle.load(file)
print('vocab was loaded')
decoder_dictionary = vocab_to_dictionary(vocab)
device = 'cpu'
nsamples = 102400
ntokens = 5
dim = 32
content_noise = 0.001
dropout = 0
nconv = 10
model = load_checkpoint(model_ckpt_path, 'cpu',
device, nsamples, decoder_dictionary.pad(),
ntokens, dim, content_noise, dropout,
decoder_dictionary, 50, nconv)
print('model loaded')
model.eval()
dataset, vocab = get_dataset(10000, '/cs/labs/dshahaf/omribloch/data/text_lord/restorant/', vocab)
for i in range(10):
sid = dataset[i].id
stars = dataset[i].stars
# stars = 1
review_sentence = ' '.join(dataset[i].review)
print(review_sentence)
decoded_sentence = gready_decode_single(model, vocab, stars, sid)
print(decoded_sentence)
decoded_sentence = gready_decode_single(model, vocab, 1-stars, sid)
print(decoded_sentence)
print('-------------')
decoded_sentence = beam_decode_single(model, vocab, sid, stars, topk=10, beam_width=4)
for d in decoded_sentence:
print(d)
print('==============================')
if __name__=='__main__':
main()
|
[
"omri.bloch@mail.huji.ac.il"
] |
omri.bloch@mail.huji.ac.il
|
1e5237044f42dfe836143cd43c09e4bc38b7033a
|
c12d0e0935aadc52c6ae03d901fcb52e2b8fe4d7
|
/task1/solution.py
|
f3614fd0a3446ed63f3aa00630f3e45ebe2df4f7
|
[] |
no_license
|
skeleta/python-retrospective
|
c19ec29867ea34ff96cd86c7c69fc0e1108d7f1c
|
e808e007f375fccbbb0ae7082bab0d39b93189fe
|
refs/heads/master
| 2021-01-18T03:14:27.664850
| 2013-04-28T20:18:35
| 2013-04-28T20:18:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 402
|
py
|
SIGNS = ("ะะพะทะธัะพะณ", "ะะพะดะพะปะตะน", "ะ ะธะฑะธ", "ะะฒะตะฝ", "ะขะตะปะตั", "ะะปะธะทะฝะฐัะธ",
"ะ ะฐะบ", "ะัะฒ", "ะะตะฒะฐ", "ะะตะทะฝะธ", "ะกะบะพัะฟะธะพะฝ", "ะกััะตะปะตั")
RANGES = {1: 19, 2: 18, 3: 20, 4: 20, 5: 20, 6: 20,
7: 21, 8: 22, 9: 22, 10: 22, 11: 21, 12: 21}
def what_is_my_sign(day, month):
return SIGNS[month - 12 - (day <= RANGES[month])]
|
[
"ivankapukaranov@gmail.com"
] |
ivankapukaranov@gmail.com
|
442831645ae4c6f34df075f4576fd9a948e313c2
|
649c930c4c5c89b48673b438c8d42f70e6b00cfd
|
/day 03/ไฝไธ.py
|
d60c1b0df8e1db4ae550c6e5524337bfd22e6c18
|
[] |
no_license
|
zhangchenghao0617/Learn
|
a0f1ebb9b6248c83e0c87cb756d19b2243722679
|
32cbaacc697d7e5992e077f368c8c80c5900f96e
|
refs/heads/master
| 2023-01-12T07:20:57.911545
| 2020-10-25T09:40:04
| 2020-10-25T09:40:04
| 306,071,934
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,515
|
py
|
# # 1.ๆๅ้name = "aleX leNb" ๅฎๆๅฆไธๆไฝ๏ผ
# name = "aleX leNb"
# # - ็งป้ค name ๅ้ๅฏนๅบ็ๅผไธค่พน็็ฉบๆ ผ,ๅนถ่พๅบๅค็็ปๆ
# print(name.strip())
# # - ๅคๆญ name ๅ้ๆฏๅฆไปฅ "al" ๅผๅคด,ๅนถ่พๅบ็ปๆ
# print(name.startswith('al'))
# # - ๅคๆญnameๅ้ๆฏๅฆไปฅ"Nb"็ปๅฐพ,ๅนถ่พๅบ็ปๆ
# print(name.endswith('Nb'))
# # - ๅฐ name ๅ้ๅฏนๅบ็ๅผไธญ็ ๆๆ็"l" ๆฟๆขไธบ "p",ๅนถ่พๅบ็ปๆ
# print(name.replace('l','p'))
# # - ๅฐnameๅ้ๅฏนๅบ็ๅผไธญ็็ฌฌไธไธช"l"ๆฟๆขๆ"p",ๅนถ่พๅบ็ปๆ
# print(name.replace('l','p',1))
# # - ๅฐ name ๅ้ๅฏนๅบ็ๅผๆ นๆฎ ๆๆ็"l" ๅๅฒ,ๅนถ่พๅบ็ปๆใ
# print(name.split('l'))
# # - ๅฐnameๅ้ๅฏนๅบ็ๅผๆ นๆฎ็ฌฌไธไธช"l"ๅๅฒ,ๅนถ่พๅบ็ปๆใ
# print(name.split('l',1))
# # - ๅฐ name ๅ้ๅฏนๅบ็ๅผๅๅคงๅ,ๅนถ่พๅบ็ปๆ
# print(name.upper())
# # - ๅฐ name ๅ้ๅฏนๅบ็ๅผๅๅฐๅ,ๅนถ่พๅบ็ปๆ
# print(name.lower())
# # - ๅคๆญnameๅ้ๅฏนๅบ็ๅผๅญๆฏ"l"ๅบ็ฐๅ ๆฌก๏ผๅนถ่พๅบ็ปๆ
# print(name.count('l'))
# # - ๅฆๆๅคๆญnameๅ้ๅฏนๅบ็ๅผๅๅไฝ"l"ๅบ็ฐๅ ๆฌก,ๅนถ่พๅบ็ปๆ
# print(name.count('l',0,4))
# # - ่ฏท่พๅบ name ๅ้ๅฏนๅบ็ๅผ็็ฌฌ 2 ไธชๅญ็ฌฆ?
# print(name[1])
# # - ่ฏท่พๅบ name ๅ้ๅฏนๅบ็ๅผ็ๅ 3 ไธชๅญ็ฌฆ?
# print(name[0:3])
# # - ่ฏท่พๅบ name ๅ้ๅฏนๅบ็ๅผ็ๅ 2 ไธชๅญ็ฌฆ?
# print(name[-2:])
#
# # 2.ๆๅญ็ฌฆไธฒs = "123a4b5c"
# s = "123a4b5c"
# # - ้่ฟๅฏนsๅ็ๅฝขๆๆฐ็ๅญ็ฌฆไธฒs1,s1 = "123"
# print(s[0:3])
# # - ้่ฟๅฏนsๅ็ๅฝขๆๆฐ็ๅญ็ฌฆไธฒs2,s2 = "a4b"
# print(s[3:6])
# # - ้่ฟๅฏนsๅ็ๅฝขๆๆฐ็ๅญ็ฌฆไธฒs3,s3 = "1345"
# print(s[0:7:2])
# # - ้่ฟๅฏนsๅ็ๅฝขๆๅญ็ฌฆไธฒs4,s4 = "2ab"
# print(s[1:6:2])
# # - ้่ฟๅฏนsๅ็ๅฝขๆๅญ็ฌฆไธฒs5,s5 = "c"
# print(s[-1:])
# # - ้่ฟๅฏนsๅ็ๅฝขๆๅญ็ฌฆไธฒs6,s6 = "ba2"
# print(s[-3:-8:-2])
#
# # 3.ไฝฟ็จwhileๅforๅพช็ฏๅๅซๆๅฐๅญ็ฌฆไธฒs="asdfer"ไธญๆฏไธชๅ
็ด ใ
# s="asdfer"
# while:
# i = 0
# while i < len(s):
# print(s[i])
# i+=1
# for
# for i in s:
# print(i)
#
# # 4.ไฝฟ็จforๅพช็ฏๅฏนs="asdfer"่ฟ่กๅพช็ฏ๏ผไฝๆฏๆฏๆฌกๆๅฐ็ๅ
ๅฎน้ฝๆฏ"asdfer"ใ
# s="asdfer"
# for i in s:
# print(s)
#
# # 5.ไฝฟ็จforๅพช็ฏๅฏนs="abcdefg"่ฟ่กๅพช็ฏ๏ผๆฏๆฌกๆๅฐ็ๅ
ๅฎนๆฏๆฏไธชๅญ็ฌฆๅ ไธsb๏ผ ไพๅฆ๏ผasb, bsb๏ผcsb,...gsbใ
# s="abcdefg"
# for i in s:
# print(i.join('sb'))
#
# for i in s:
# print(i + 'sb')
#
# # 6.ไฝฟ็จforๅพช็ฏๅฏนs="321"่ฟ่กๅพช็ฏ๏ผๆๅฐ็ๅ
ๅฎนไพๆฌกๆฏ๏ผ"ๅ่ฎกๆถ3็ง"๏ผ"ๅ่ฎกๆถ2็ง"๏ผ"ๅ่ฎกๆถ1็ง"๏ผ"ๅบๅ๏ผ"ใ
# s="321"
# for i in s:
# print('ๅ่ฎกๆถ'+ i +'็ง๏ผ')
# print('ๅบๅ๏ผ')
#
# # 7.ๅฎ็ฐไธไธชๆดๆฐๅ ๆณ่ฎก็ฎๅจ(ไธคไธชๆฐ็ธๅ )๏ผๅฆ๏ผcontent = input("่ฏท่พๅ
ฅๅ
ๅฎน:") ็จๆท่พๅ
ฅ๏ผ5+9ๆ5+ 9ๆ5 + 9๏ผ็ถๅ่ฟ่กๅๅฒๅ่ฟ่ก่ฎก็ฎใ
# content = input("่ฏท่พๅ
ฅๅ
ๅฎน:")
# content_space = content.replace(' ','')
# print(content_space)
# list = content_space.split('+')
# print(list)
# sum = 0
# for i in list:
# sum = sum + int(i)
# print(sum)
#
# # 8.ๅฎ็ฐไธไธชๆดๆฐๅ ๆณ่ฎก็ฎๅจ๏ผๅคไธชๆฐ็ธๅ ๏ผ๏ผๅฆ๏ผcontent = input("่ฏท่พๅ
ฅๅ
ๅฎน:") ็จๆท่พๅ
ฅ๏ผ5+9+6 +12+ 13๏ผ็ถๅ่ฟ่กๅๅฒๅ่ฟ่ก่ฎก็ฎใ
# content = input("่ฏท่พๅ
ฅๅ
ๅฎน:")
# content_space = content.replace(' ','')
# print(content_space)
# list = content_space.split('+')
# sum = 0
# for i in list:
# sum = sum + int(i)
# print(sum)
# # 9.่ฎก็ฎ็จๆท่พๅ
ฅ็ๅ
ๅฎนไธญๆๅ ไธชๆดๆฐ๏ผไปฅไธชไฝๆฐไธบๅไฝ๏ผใๅฆ๏ผcontent = input("่ฏท่พๅ
ฅๅ
ๅฎน๏ผ") # ๅฆfhdal234slfh98769fjdla
# str = '1234a'
# content = 0
# for i in str:
# if i.isdecimal() :
# content +=1
# print(content)
#
#
#
# # 10.้ๅ้ข**๏ผๅไปฃ็ ๏ผๅฎๆไธๅ้ๆฑ๏ผ็จๆทๅฏๆ็ปญ่พๅ
ฅ๏ผ็จwhileๅพช็ฏ๏ผ๏ผ็จๆทไฝฟ็จ็ๆ
ๅต๏ผ
# # ่พๅ
ฅA๏ผๅๆพ็คบ่ตฐๅคง่ทฏๅๅฎถ๏ผ็ถๅๅจ่ฎฉ็จๆท่ฟไธๆญฅ้ๆฉ๏ผ
# # ๆฏ้ๆฉๅ
ฌไบค่ฝฆ๏ผ่ฟๆฏๆญฅ่ก๏ผ
# # ้ๆฉๅ
ฌไบค่ฝฆ๏ผๆพ็คบ10ๅ้ๅฐๅฎถ๏ผๅนถ้ๅบๆดไธช็จๅบใ
# # ้ๆฉๆญฅ่ก๏ผๆพ็คบ20ๅ้ๅฐๅฎถ๏ผๅนถ้ๅบๆดไธช็จๅบใ
# # ่พๅ
ฅB๏ผๅๆพ็คบ่ตฐๅฐ่ทฏๅๅฎถ๏ผๅนถ้ๅบๆดไธช็จๅบใ
# # ่พๅ
ฅC๏ผๅๆพ็คบ็ป้ๅๅฎถ๏ผ็ถๅๅจ่ฎฉ็จๆท่ฟไธๆญฅ้ๆฉ๏ผ
# # ๆฏ้ๆฉๆธธๆๅ
็ฉไผ๏ผ่ฟๆฏ็ฝๅง๏ผ
# # ้ๆฉๆธธๆๅ
๏ผๅๆพ็คบ โไธไธชๅๅฐๆถๅฐๅฎถ๏ผ็ธ็ธๅจๅฎถ๏ผๆฟๆฃ็ญไฝ ใโๅนถ่ฎฉๅ
ถ้ๆฐ่พๅ
ฅA๏ผB,C้้กนใ
# # ้ๆฉ็ฝๅง๏ผๅๆพ็คบโไธคไธชๅฐๆถๅฐๅฎถ๏ผๅฆๅฆๅทฒๅๅฅฝไบๆๆๅๅคใโๅนถ่ฎฉๅ
ถ้ๆฐ่พๅ
ฅA๏ผB,C้้กนใ
# while 1 :
# print('A:่ตฐๅคง่ทฏ๏ผB๏ผ่ตฐๅฐ่ทฏ,C:็ป้')
# choice = input('่ฏท่พๅ
ฅ๏ผ')
# choice_upper = choice.upper()
# if choice_upper == 'A':
# print('ไฝ ้ๆฉไบ่ตฐๅคง่ทฏ')
# choice1 = input('่ฏท้ๆฉA:ๅๅ
ฌไบค๏ผB๏ผ่ตฐ่ทฏ:')
# choice_upper1 = choice1.upper()
# if choice_upper1 == 'A':
# print('ๅๅ
ฌไบค๏ผๅๅ้ๅๅฐๅฎถ')
# break
# else:
# print('่ตฐ่ทฏๅๅฎถ๏ผไบๅๅ้ๅๅฐๅฎถ')
# break
# elif choice_upper == 'B':
# print('ไฝ ้ๆฉไบ่ตฐๅฐ่ทฏ')
# break
# else:
# print('ไฝ ้ๆฉไบ็ป้')
# choice2 = input('่ฏท้ๆฉA:ๅบๆธธๆๅ
๏ผB๏ผๅป็ฝๅง:')
# choice_upper2 = choice2.upper()
# if choice_upper2 == 'A':
# print('ไธไธชๅๅฐๆถๅฐๅฎถ๏ผ็ธ็ธๅจๅฎถ๏ผๆฟๆฃ็ญไฝ ใ')
# else:
# print('ไธคไธชๅฐๆถๅฐๅฎถ๏ผๅฆๅฆๅทฒๅๅฅฝไบๆๆๅๅคใ')
#
# # 1.ๅไปฃ็ ๏ผ่ฎก็ฎ1 - 2 + 3 - 4 + 5 - 6... + 99ไธญ้คไบ88ไปฅๅคๆๆๆฐ็ๆปๅ๏ผ
# s1 = 0
# s2 = 0
# i = 1
# while i <= 99 :
# if i % 2 == 0:
# if i == 88:
# i += 1
# continue
# else:
# s1 -= i
# else:
# s2 += i
# i += 1
# print(s1+s2)
#
# # 2. ** ้ๅ้ข๏ผ ** ้ๅ้ข๏ผๅคๆญไธๅฅ่ฏๆฏๅฆๆฏๅๆ.ๅๆ: ๆญฃ็ๅฟตๅๅ็ๅฟตๆฏไธๆ ท็.ไพๅฆ, ไธๆตท่ชๆฅๆฐดๆฅ่ชๆตทไธ
# str = input('่ฏท่พๅ
ฅ:')
# if str[-1::-1] == str:
# print("ๆฏ")
# else:
# print('ไธๆฏ')
#
# # 3.ๅถไฝ่ถฃๅณๆจกๆฟ็จๅบ้ๆฑ๏ผ็ญๅพ
็จๆท่พๅ
ฅๅๅญใๅฐ็นใ็ฑๅฅฝ๏ผๆ นๆฎ็จๆท็ๅๅญๅ็ฑๅฅฝ่ฟ่กไปปๆ็ฐๅฎ,ๅฆ๏ผๆฌ็ฑๅฏไบฒ็xxx๏ผๆๅๆฌขๅจxxxๅฐๆนๅนฒxxx
# your_name = input('่ฏท่พๅ
ฅๅงๅ')
# your_place = input('่ฏท่พๅ
ฅๅฐ็น')
# your_hobby = input('่ฏท่พๅ
ฅ็ฑๅฅฝ')
# msg = 'ๅฏไบฒ็{name}๏ผๆๅๆฌขๅจ{place}ๅฐๆนๅนฒ{hobby}'.format(name = your_name,place = your_place,hobby = your_hobby)
# print(msg)
|
[
"zhangchenghao0617@qq.com"
] |
zhangchenghao0617@qq.com
|
49bee32f7d8ddec8715ce98a577f088ab50e9d45
|
a961a54e8d466b1fb98aee86d437ed6872e98d18
|
/shows/management/commands/get_english_names.py
|
403bad44964aea456ad293b48eb19dc7b5d768de
|
[] |
no_license
|
akaram94/animemelody
|
6bcc75e2746def0f3638b0e6acf6cf5b0e2b6f7c
|
2adf79f232d06733bbc8af4def3778ea55838efa
|
refs/heads/master
| 2023-08-19T00:19:43.688266
| 2020-07-27T16:24:56
| 2020-07-27T16:24:56
| 268,275,539
| 0
| 0
| null | 2021-09-22T19:08:05
| 2020-05-31T12:29:51
|
Python
|
UTF-8
|
Python
| false
| false
| 1,233
|
py
|
from django.core.management.base import BaseCommand, CommandError
from shows.models import Show, Theme
import datetime
import requests
import json
import time
class Command(BaseCommand):
help = '''
Used to import english show names from the Jikan API (rate limit of 2s)
'''
def handle(self, *args, **options):
feed_url = 'https://api.jikan.moe/v3/anime/'
message = 'Getting english names...'
shows = Show.objects.filter(english_name='')
print(message)
show_count = 0
# Retrieve all data from each endpoint
for show in shows:
current_url = feed_url + str(show.mal_id)
time.sleep(2)
r = requests.get(current_url)
data = r.json()
if data['title_english']:
title_english = str(data['title_english'])
else:
title_english = None
show.english_name = title_english
show.save()
show_count = show_count + 1
print(show_count)
print('----------------------------------------------------------')
print('Show Count: ' + str(show_count))
print('Successfully updated.')
|
[
"root@DESKTOP-FIFE14Q.localdomain"
] |
root@DESKTOP-FIFE14Q.localdomain
|
ef967c7df544cf14fd81acac69d9aa53ed4449d0
|
9cd4bd2d3c43d14a78c4f72dd2d8c6900aec56c4
|
/TabuadaPython.py
|
7f386b00ecc7d585dacc937cfee125710246f99e
|
[] |
no_license
|
paulorod07/Tabuada
|
937005686f0a9fb1c5b8a37dc49ae7178477c97f
|
14ced3e51ec0ce2c9c7c92bdf6696ce24a956a45
|
refs/heads/master
| 2020-04-27T16:22:05.703246
| 2019-03-08T06:41:19
| 2019-03-08T06:41:19
| 174,481,856
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 152
|
py
|
print('Bem Vindo a Tabuada!')
numero = int(input('Digite um numero para a tabuada: '))
for n in range(11):
print(numero, 'x', n, '=', numero * n)
|
[
"noreply@github.com"
] |
paulorod07.noreply@github.com
|
e714abb10599586f922ae28ac3e229eafdfbbc3b
|
711ca7d1996f3eab0c9f8338fd49c9067274d5e5
|
/image_augment-master/test_detect_multi_process.py
|
34ce758c0c9c30786eaa30c6bbdf0b7112d8c2c3
|
[] |
no_license
|
zj463261929/tools
|
408ccdbe93ae00d4825b74d6e37cc2bd77208504
|
47545fbbb6779378ad833a7b84c89cc91848e345
|
refs/heads/master
| 2020-03-20T13:01:10.970807
| 2019-07-22T02:36:12
| 2019-07-22T02:36:12
| 137,446,532
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,139
|
py
|
#coding=utf-8
import codecs
import random
import cv2, os
import sys #sysๆฏPythonๅ
ๅปบๆ ๅๅบ
sys.path.append(os.getcwd())
from function import *
import function as FUN
import math
import numpy as np
import time
from test_detect_single import img_aug
from multiprocessing import Process
image_dir = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/img/" #ๅๅงๅพ็่ทฏๅพ
xml_dir = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/xml/" #ๅพ็ๅฏนๅบxml็่ทฏๅพ
image_txt1 = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/minitruck.txt"#ๅๅงๅพ็ ๆ ็ญพๅฏนๅบ็txt
image_txt2 = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/forklift.txt"#ๅๅงๅพ็ ๆ ็ญพๅฏนๅบ็txt
image_txt3 = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/digger.txt"#ๅๅงๅพ็ ๆ ็ญพๅฏนๅบ็txt
'''image_txt4 = "/opt/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/ImageSets_8558/Main/train_4.txt"#ๅๅงๅพ็ ๆ ็ญพๅฏนๅบ็txt
image_txt5 = "/opt/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/ImageSets_8558/Main/train_5.txt"#ๅๅงๅพ็ ๆ ็ญพๅฏนๅบ็txt
image_txt6 = "/opt/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/ImageSets_8558/Main/train_6.txt"#ๅๅงๅพ็ ๆ ็ญพๅฏนๅบ็txt
'''
save_img_dir = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/img_aug/"
save_xml_dir = "/opt/zhangjing/Detectron/data/oil_vehicle_person_10cls/VOCdevkit2007/VOC2007/aug/xml_aug/" #ๅค็ๅไฟๅญ็่ทฏๅพ๏ผๆฏไธชๅขๅผบๆนๆณไผไฟๅญๅจๅฏนๅบๆไปถๅคนไธ
if not os.path.exists(save_xml_dir):
os.mkdir(save_xml_dir)
if not os.path.exists(save_img_dir):
os.mkdir(save_img_dir)
if __name__ == "__main__":
print time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
print 'Parent process %s starts.' % os.getpid()
#ๅๅปบ็บฟ็จ
my_process1 = Process(target = img_aug, args=(image_dir,xml_dir,image_txt1,save_img_dir,save_xml_dir) , name= 'ys_process1')
my_process2 = Process(target = img_aug, args=(image_dir,xml_dir,image_txt2,save_img_dir,save_xml_dir) , name= 'ys_process2')
my_process3 = Process(target = img_aug, args=(image_dir,xml_dir,image_txt3,save_img_dir,save_xml_dir) , name= 'ys_process3')
'''
my_process4 = Process(target = img_aug, args=(image_dir,xml_dir,image_txt4,save_img_dir,save_xml_dir) , name= 'ys_process4')
my_process5 = Process(target = img_aug, args=(image_dir,xml_dir,image_txt5,save_img_dir,save_xml_dir) , name= 'ys_process5')
my_process6 = Process(target = img_aug, args=(image_dir,xml_dir,image_txt6,save_img_dir,save_xml_dir) , name= 'ys_process6')
'''
#็ญๅพ
2s
time.sleep(2)
#ๅฏๅจ็บฟ็จ
my_process1.start()
my_process2.start()
my_process3.start()
'''my_process4.start()
my_process5.start()
my_process6.start()'''
#็ญๅพ
็บฟ็จ็ปๆ
my_process1.join()
my_process2.join()
my_process3.join()
'''my_process4.join()
my_process5.join()
my_process6.join()'''
print time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
print 'process %s ends.' % os.getpid()
|
[
"zhagnjing1@mail.e-u.cn"
] |
zhagnjing1@mail.e-u.cn
|
424d47cd8ca4fd8aff8087b739e35b6c874f1161
|
35ce7e974e1651b1031841d84bce190be73be196
|
/autonomous_car/system/controller/pid_controller.py
|
7da59857bae3bd7f590eaffa8b93d5e6a45e0ef8
|
[] |
no_license
|
th-sato/py-graduate-final-project
|
459b515b1ba4629fb55ca0f028f0bb92ab715167
|
48d7afb0c0c49ed98a1263c9efdfcf26e99673cd
|
refs/heads/master
| 2022-01-25T15:46:28.268045
| 2019-07-07T03:48:02
| 2019-07-07T03:48:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,583
|
py
|
class PIDController:
def __init__(self):
# Constantes do controlador
# Proporcional
self.KP = {'angle': 250.0, 'speed': 5.0}
# Integral
# self.KI = {'angle': 30.0, 'speed': 1.0}
# Derivative
# self.KD = {'angle': 1.0, 'speed': 1.0}
self.max_error = 100000.0
self.min_error = -100000.0
# self.previous_i_error = {'angle': 0.0, 'speed': 0.0}
# self.previous_d_error = {'angle': 0.0, 'speed': 0.0}
# self.previous_time = 0.0
def reset(self, now_time):
# self.previous_i_error = {'angle': 0, 'speed': 0}
# self.previous_d_error = {'angle': 0, 'speed': 0}
# self.previous_time = now_time
return
def proportional(self, error, variable):
proporcional_controller = self.KP[variable] * error
return self.__set_max(proporcional_controller, self.min_error, self.max_error)
# def integral(self, error, time_interval, variable):
# actual_error = error * time_interval
# integral_part = self.previous_i_error[variable] + actual_error
# self.previous_i_error[variable] = integral_part
# integral_controller = self.KI[variable] * integral_part
# return self.__set_max(integral_controller, self.min_error, self.max_error)
# def derivative(self, error, time_interval, variable):
# derivative_part = (error - self.previous_d_error[variable]) / time_interval
# self.previous_d_error[variable] = error
# derivative_controller = self.KD[variable] * derivative_part
# return self.__set_max(derivative_controller, self.min_error, self.max_error)
def p_controller(self, error, variable, interval=0.01):
p = self.proportional(error, variable)
# i = self.integral(error, interval, variable)
# d = self.derivative(error, interval, variable)
return p
def output(self, distance_center, radius_curvature, run_time):
try:
# interval = run_time - self.previous_time
# self.previous_time = run_time
angle = self.p_controller(distance_center, 'angle')
# speed = self.pid_controller(radius_curvature, interval)
speed = 45
return speed, angle
except Exception as e:
print str(e)
return 0, 0
@staticmethod
def __set_max(value, min_value, max_value):
if value > 0 and value > max_value:
return max_value
if value < 0 and value < min_value:
return min_value
return value
|
[
"thiagosato22@hotmail.com"
] |
thiagosato22@hotmail.com
|
f15414bf81d05d0860a3345a7f7b0679f6cbba74
|
db35cba13f89601467b4bb4553d2b648c18fc3fb
|
/stores/topshop.py
|
8e7c906ff53b33b1ba4e7916231b878688d31d1a
|
[] |
no_license
|
jitsejan/outfitter
|
e6d73fcc2ec46c5236207c8bb8e1e72fc8929c38
|
53faab3e30e312bbdbc4ca0154efe35592708a8b
|
refs/heads/master
| 2023-01-02T20:15:12.519289
| 2016-09-15T14:07:00
| 2016-09-15T14:07:00
| 309,139,169
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,113
|
py
|
################################################################################
# Application: Outfitter
# File: topshop.py
# Goal: topshop.py will retrieve specific information from a
# given Topshop link and save the data to a variable
# Input: url of website
# Output: Target data
# Example: info = crawl("http://eu.topman.com/en/tmeu/product/clothing-617800/mens-jumpers-cardigans-617811/charcoal-shawl-cardigan-4656120?bi=0&ps=20")
#
# History: 2015-09-28 - JJ Creation of the file
# 2015-10-29 - JJ Added functionality to retrieve all images
#
################################################################################
################################################################################
# Imports
################################################################################
from product import Product
################################################################################
# Definitions
################################################################################
author = "JJ"
appName = "Topshop Crawler"
################################################################################
# Functions
################################################################################
################################################################################
# Classes
################################################################################
class TopshopProduct(Product):
def __init__(self, *args, **kwargs):
super(TopshopProduct,self).__init__('Topshop', *args, **kwargs)
################################################################################
# Function: _get_image
# Input: tree
# Output: URL of the image
# Goal: Find the image URL and return it
# Targets: Use a css selecter to find the image URL
# Example: Find <meta property="og:image" content="http://media.topman.com/wcsstore/TopManEU/images/catalog/81F22KCHR_normal.jpg"/>
def _get_images(self):
images = []
try:
# Use css to select image
image_data = self._tree.cssselect('meta[property*=\'og:image\']')
# Save the image link from the content field to the variable image
images.append(image_data[0].attrib['content'])
except:
pass
# Return the link to the image
return images
################################################################################
# Function: _get_price
# Input: tree
# Output: Price in euro
# Goal: Find the price and return it
# Targets: Use a css selecter to find the price
# Example: Find <span itemprop="offers"><meta itemprop="price" content="E 39,95">
def _get_price(self):
price = ""
try:
# Use css to select the meta-tag with name equal to twitter:data1
price_meta = self._tree.cssselect('meta[property*=\'og:price:amount\']')
# Save the price from the content field to the variable price
price = price_meta[0].attrib['content']
except:
pass
# Return the price
return price
def _get_currency(self):
currency = ""
try:
currency_meta = self._tree.cssselect('meta[property*=\'og:price:currency\']')
currency = currency_meta[0].attrib['content']
except:
pass
# Return the currency
return currency
################################################################################
# Function: _get_color
# Input: tree
# Output: Color
# Goal: Find the color and return it
# Targets: Use a css selecter to find the color
# Example: Find <ul class="product_summary"><li class="product_colour">Colour: <span>GREY</span></li>
def _get_color(self):
color = ""
try:
# Use css to select to find the color
color_meta = self._tree.cssselect('ul[class*=\'product_summary\'] li[class*=\'product_colour\'] span')
# Save the price from the content field to the variable price
color = color_meta[0].text_content().strip()
except:
pass
# Return the color
return color
################################################################################
# Function: _get_title
# Input: tree
# Output: Title
# Goal: Find the title and return it
# Targets: Use a css selecter to find the title
# Example: Find <div class="title"><h1>ASOS Jumper<h1>
def _get_title(self):
title = ""
try:
# Use css to select the meta-tag with name equal to description
title_data = self._tree.cssselect('meta[property*=\'og:description\']')
# Retrieve the text from h1 and strip unwanted characters
title = title_data[0].attrib['content']
except:
pass
# Return the title
return title
################################################################################
# Function: _get_category
# Input: self
# Output: Title
# Goal: Find the title and return it
# Targets: Use a css selecter to find the title
# Example: Find last word of description. There is no category explicitly
def _get_category(self):
category = ""
try:
# Use css to select the meta-tag with name equal to description
category_data = self._tree.cssselect('meta[property*=\'og:description\']')
# Retrieve the text and take the last word
category = category_data[0].attrib['content'].rsplit(' ')[-1]
except:
pass
# Return the title
return category
def _get_brand(self):
brand = ""
return brand
################################################################################
# main
################################################################################
def get_product(url):
product = TopshopProduct(url)
return product
def get_price(url):
product = TopshopProduct(url)
return product.get_price()
|
[
"jitsejan@gmail.com"
] |
jitsejan@gmail.com
|
f6cc157fcac65c81a3809080db0f1aac9d4e2f7d
|
82f40f23ea87d8ed09974a6ce87418cb7934b14b
|
/Logistic Regression & MultiLayer Perceptron/Code/logistic_kclass.py
|
83abd0da19f525bde30fef8e517fe84551510fd0
|
[] |
no_license
|
priyankpshah/DataAnalysis_MachineLearning
|
22a69bea1426d3291461177a4ddfd67a9f19741b
|
68c7a27a84b341a2a19d7481410536c23750fa24
|
refs/heads/master
| 2021-06-08T14:51:05.167801
| 2016-12-15T21:37:28
| 2016-12-15T21:37:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,061
|
py
|
from __future__ import division
from sklearn.datasets import load_iris, fetch_mldata
from sklearn.cross_validation import KFold
import numpy as np
def indicator(class_fact, class_val):
ind = []
for label in class_fact:
if label == class_val:
ind.append(1)
else:
ind.append(0)
return np.asarray(ind)
def Hypo(theta, X, thetas):
den = exp_sum(thetas, X)
hypo = np.exp(np.dot(X, theta))
hypo /= den
return hypo
def exp_sum(thetas, X):
sum = 0
m, n = np.shape(thetas)
for i in range(n):
sum += np.exp(np.dot(X, thetas[:, i]))
return sum
def Find_Theta(X, Y, estimate, iterations):
calssval = [0,1,2]
x, y = np.shape(X)
mul_theta = np.ones((y, len(calssval)))
for j in range(iterations):
for i, c in enumerate(calssval):
theta = mul_theta[:, i]
temp_hypo = Hypo(theta, X, mul_theta)
ind = indicator(Y, c)
theta_i = estimate * (np.sum((temp_hypo - ind).reshape(len(temp_hypo), 1) * X, axis=0))
theta_i = theta_i.reshape(theta.shape)
theta = theta - theta_i
mul_theta[:, i] = theta
return mul_theta
def confuide_mat(ytest, ypredict):
cm = []
clab = [0,1,2]
for i in clab:
tmp = [0] * len(clab)
for j in range(len(ytest)):
if ytest[j] == i and ytest[j] == ypredict[j]:
tmp[clab.index(i)] += 1
elif ytest[j] == i and ytest[j] != ypredict[j]:
tmp[clab.index(ypredict[j])] += 1
cm.append(tmp)
return np.array(cm)
def predict(X_Test, thetas):
Y_prediction = []
thetas = thetas.T
#print thetas
for x in X_Test:
h = -np.inf
for i, theta in enumerate(thetas):
h_hat = np.dot(x, theta)
#print h_hat
if h_hat > h:
h = h_hat
label = i
Y_prediction.append(label)
return Y_prediction
def confusion_mat(cm):
precesion = np.zeros(2)
recall = np.zeros(2)
f1measure = np.zeros(2)
accuracy = 0
tot = np.sum(confusion_mat)
for i in range(0,2):
for j in range(0,2):
precesion[i] += cm[j][i]
recall[i] += cm[i][j]
if(i==j):
accuracy = accuracy + cm[i][j]
precesion[i] = cm[i][i]/precesion[i]
recall[i] = cm[i][i]/recall[i]
f1measure[i] = ((2*precesion[i]*recall[i])/(precesion[i]+recall[i]))
accuracy = float(accuracy)/tot
return precision,recall,f_measure,accuracy
if __name__ == "__main__":
mnist = fetch_mldata('MNIST original')
X, Y = mnist.data / 255., mnist.target
matrix = np.concatenate((X[Y == 0], X[Y == 1], X[Y == 2]), axis=0)
y = np.concatenate((Y[Y == 0], Y[Y == 1], Y[Y == 2]), axis=0)
kf = KFold(X.shape[0], n_folds=10, shuffle=True)
accuracy = 0.0
precision = np.zeros(3)
recall = np.zeros(3)
f_measure = np.zeros(3)
for train,test in kf:
X_Train, X_Test = X[train], X[test]
Y_Train, Y_Test = Y[train], Y[test]
thetas = Find_Theta(X_Train, Y_Train, 0.001, 2500)
Y_Prediction = predict(X_Test, thetas)
cm = confuide_mat(Y_Test, Y_Prediction)
pre, rec, f1, acc = confusion_mat(cm)
precision = np.add(precision, pre)
recall = np.add(recall, rec)
f_measure = np.add(f_measure, f1)
accuracy = accuracy + acc
precision = map(lambda x: x/10, precision)
recall = map(lambda x: x/10, recall)
f1measure = map(lambda x: x/10, f_measure)
accuracy /= 10
print " Confusion Matrix:"+ str(cm[0])
print "\t\t\t\t "+ str(cm[1])
print " Precesion: Recall: F-1 Measures"
print "Class 0: " + str(round(precision[0],3))+" "+str(round(recall[0],3))+" "+str(round(f1measure[0],3))
print "Class 1: " + str(round(precision[1],3))+" "+str(round(recall[1],3))+" "+str(round(f1measure[1],3))
print "\nAccuracy: "+str(round(accuracy,3)*100)+ "%"
|
[
"priyank687@gmail.com"
] |
priyank687@gmail.com
|
668cd341318eeaefb3cfccd3c9694710dc5d5f46
|
6dec3256279f73c563b116a8fa44900c4b51a4ce
|
/scrapy_demo/scrapy_demo/pipelines.py
|
736cf257e01c3b8c355daf21b7e4d34a1cf2bf32
|
[] |
no_license
|
gannonk08/scrapy-demo
|
6802cfbe109ff428f58c5e9957ac109bfed282a0
|
86939c56b83c9142ac129f02769770128d1f6fc6
|
refs/heads/master
| 2020-07-05T22:50:47.026269
| 2016-11-17T21:09:01
| 2016-11-17T21:09:01
| 73,976,758
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,052
|
py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import psycopg2
import logging
from spiders.items import awayTeamRushItem
from scrapy.conf import settings
from scrapy.exceptions import DropItem
class ScrapyDemoPipeline(object):
def __init__(self):
self.connection = psycopg2.connect(host='localhost', database='scraping_demo', user='Gannon')
self.cursor = self.connection.cursor()
def process_item(self, item, spider):
try:
if type(item) is awayTeamRushItem:
table = """awayteamrush"""
self.cursor.execute("""INSERT INTO """ + table + """ (rusher, car, yds, avg, td, longest) VALUES(%s, %s, %s, %s, %s, %s)""", (item.get('rusher'), item.get('car'), item.get('yds'), item.get('avg'), item.get('td'), item.get('longest')))
self.connection.commit()
self.cursor.fetchall()
except psycopg2.DatabaseError, e:
print "Error: %s" % e
return item
|
[
"gannonk08@gmail.com"
] |
gannonk08@gmail.com
|
595635a1e4beaf535e5f7640bd2b7ca17a517b2c
|
b205909a015abf08dc15133797c465fd58a48352
|
/sdes.py
|
4d2868d7e9e2d273c541caee44e96f0f1bba4ee5
|
[] |
no_license
|
AlexanderFrancoletti/CryptographyHW1
|
2648f5b52053504ac6cbceb7964a596d91c1a4bb
|
2d0b728046b91d099c3f6af9996c805db0df566e
|
refs/heads/master
| 2020-03-29T05:20:02.226714
| 2018-09-20T17:02:00
| 2018-09-20T17:02:00
| 149,577,970
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,547
|
py
|
#Define keys initially
IP = [2, 6, 3, 1, 4, 8, 5, 7]
EP = [4, 1, 2, 3, 2, 3, 4, 1]
IP_inverse = [4, 1, 3, 5, 7, 2, 8, 6]
P10 = [3, 5, 2, 7, 4, 10, 1, 9, 8, 6]
P8 = [6, 3, 7, 4, 8, 5, 10, 9]
P4 = [2, 4, 3, 1]
sbox0 = [[1, 0, 3, 2],
[3, 2, 1, 0],
[0, 2, 1, 3],
[3, 1, 3, 2]]
sbox1 = [[0, 1, 2, 3],
[2, 0, 1, 3],
[3, 0, 1, 0],
[2, 1, 0, 3]]
default_key = '1010000010'
#Function which returns a string of bits randomized based on the key being used
def permutate(original, key):
return ''.join(original[i-1] for i in key)
#bitList = []
#for char in bitString:
# if char == '1':
# bitList.append(1)
# else:
# bitList.append(0)
#return bitList
#Returns the left half of bits in any key
def keyLeftHalf(bits):
return bits[:(len(bits)/2)]
#Returns the right half of bits in any key
def keyRightHalf(bits):
return bits[(len(bits)/2):]
#Split the bits passed in into 2 different halves, and then perform a leftwise shift on each half
#and returns the combined result
def shift(bits):
leftHalfShift = keyLeftHalf(bits)[1:] + keyLeftHalf(bits)[0]
rightHalfShift = keyRightHalf(bits)[1:] + keyRightHalf(bits)[0]
return leftHalfShift+rightHalfShift
#This function generates the key from the first round of shifts
def generateKey1():
return permutate(shift(permutate(default_key, P10)), P8)
#This function generates the key from the second round of shifts
def generateKey2():
return permutate(shift(shift(shift(permutate(default_key, P10)))), P8)
#XOR function, this returns the string of bits created by applying the XOR operation between 2 bits
def xor(bits, key):
newBitString = ''
for bit, key_bit in zip(map(int, bits), map(int, key)):
if (bit != key_bit):
newBitString = newBitString + '1'
else:
newBitString = newBitString + '0'
return newBitString
#This performs the non-linear sbox operations, which will transform the bits used as input
#based on the sbox tables defined above
def sboxLookup(bits, sbox):
row = int(bits[0] + bits[3], 2)
col = int(bits[1] + bits[2], 2)
return '{0:02b}'.format(sbox[row][col])
#This is the function which will actually perform the DES algorithm by using the
#helper functions above
def fk(bits, key):
#Split the bits into left and right halves
left = keyLeftHalf(bits)
right = keyRightHalf(bits)
#Permutate the right half
bits = permutate(right, EP)
bits = xor(bits, key)
#Transform the bits using the sbox table
bits = sboxLookup(keyLeftHalf(bits), sbox0) + sboxLookup(keyRightHalf(bits), sbox1)
#Permutate the resulting bits using the 4 bit key
bits = permutate(bits, P4)
#Perform xor operation on the
return xor(bits, left)
#Encryption call takes 8 bit inputs and calls the DES functions in order
def encrypt(plainText):
#bits = stringToBits(plainText)
#Cipher the bits according to the IP key
bits = permutate(plainText, IP)
#Store the first key generated by the DES algorithm
temp = fk(bits, generateKey1())
#Generate the second key for the DES algorithm
bits = keyRightHalf(bits) + temp
bits = fk(bits, generateKey2())
#Return the resulting encrypted string of bytes
return permutate(bits + temp, IP_inverse)
#Decryption call has identical operations as encrypt, with the exception of
#calling the Key2 function first
def decrypt(cipherText):
bits = permutate(cipherText, IP)
temp = fk(bits, generateKey2())
bits = keyRightHalf(bits) + temp
bits = fk(bits, generateKey1())
return permutate(bits + temp, IP_inverse)
|
[
"alexander.francoletti@gmail.com"
] |
alexander.francoletti@gmail.com
|
45deb898113df9ae83c65ce0424014c7fb729b1e
|
5ee088a8573a305ea30bfc6d083c4ec6a1112d1f
|
/doule_exit/InitialPedestrian.py
|
d22aa771df9f3d3e6f99b9247685e1904db665b2
|
[] |
no_license
|
Poloma07/CA_Multi_Exits
|
f176d3c68625e6ab571b08c20229416960931659
|
c1736861c81a4328d9344522cc530bda7a8f67a3
|
refs/heads/master
| 2020-04-18T16:57:45.466170
| 2019-01-13T12:24:39
| 2019-01-13T12:24:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,499
|
py
|
import random
import numpy as np
import Data, Block
import matplotlib.pyplot as plt
def creatPeople():
'''
ไบง็้ๆบ่กไบบ
:return:่กไบบๅ่กจ
'''
allBlock = [] # ็จไบๅญๆพๆ ผๅญ
allPeople = [] # ็จไบๅญๆพ่กไบบ
# ๅฐๆๆๆ ผๅญๅ
จ้จๅญๅ
ฅๅ่กจ'
for i in range(1, Data.ROOM_M):
for j in range(1, Data.ROOM_N):
b = Block.Block()
b.x = i
b.y = j
if random.random() > 0.5: # ้กบๆถ้ๅ้ๆถ้่กไบบๅๅ ไธๅ
b.clock_wise = True
else:
b.clock_wise = False
#----------ๅๅงๅ่กไบบๆถ็-------------
b.income_inertia = np.zeros(9)
b.income_wall = np.zeros(9)
b.income_exit = np.zeros(9)
b.income_memory = np.zeros(9)
b.income_all = np.zeros(9)
allBlock.append(b) # ๆทปๅ ่กไบบ
random.shuffle(allBlock) # ้ๆบๆๅบ
allPeople = allBlock[:Data.PEOPLE_NUMBER] # ๅๅNไธช ๅฏๆๆ้ฒๆญขๆ ้ไบง็้ๆบๆฐ
return allPeople
def creatAppointPeo():
'''
ไบง็ๆๅฎ่กไบบ
:return: ่กไบบๅ่กจ
'''
allPeople = []
b3 = Block.Block()
b3.x = 3
b3.y = 10
b3.type = False
b3.clock_wise = False
b3.income_inertia = np.zeros(9)
b3.income_wall = np.zeros(9)
b3.income_exit = np.zeros(9)
b3.income_memory = np.zeros(9)
b3.income_all = np.zeros(9)
allPeople.append(b3)
return allPeople
|
[
"1279010287@qq.com"
] |
1279010287@qq.com
|
0da39b2b6595f0a25f70e3735197ce8c382da45b
|
c7522a46908dfa0556ed6e2fe584fd7124ee5cdc
|
/ApplicationUsers/views.py
|
80dd9c405729e423ad243becbd6d5c57ca1b5930
|
[] |
no_license
|
stheartsachu/Eventlee
|
461cf35961a7f294229d6c611e58a09d9f4e1eb5
|
6b67dfc873203f1322c16664923ffe5a760d50ed
|
refs/heads/master
| 2022-11-13T14:48:39.097718
| 2020-06-30T04:54:45
| 2020-06-30T04:54:45
| 276,000,638
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,806
|
py
|
from django.shortcuts import render,HttpResponse,redirect,HttpResponseRedirect
from ApplicationUsers.form import ApplicationuserForm
# Create your views here.
from ApplicationUsers.models import users
def home(request):
return render(request,"index.html")
def Contact(request):
return render(request,"contact.html")
def gallery(request):
return render(request,"gallery.html")
def signup(request):
if request.method == 'POST':
form = ApplicationuserForm(request.POST)
f = form.save(commit=False)
f.first_name = request.POST['fn']
f.last_name = request.POST['ln']
f.email = request.POST['email']
if request.POST['p1'] == request.POST['p2']:
f.password = request.POST['p2']
else:
return HttpResponse("<h1> Password and Confirm password is not same</h1>")
f.status = True
f.save()
return HttpResponse("User is created sucessfully now, can login to website")
return render(request, 'registration.html')
def login(request):
if request.method == "POST":
un = request.POST["email"]
up = request.POST["password"]
try:
data = users.objects.get(email=un)
except:
return render(request, "login.html", {'emailerror': True})
dp = data.password
active = data.status
if (active == False):
return render(request, "login.html", {'activeerror': True})
else:
if (dp == up):
request.session['emailid'] = un
request.session['Authentication'] = True
return HttpResponse("You are sucessfullly login")
else:
return render(request, "login.html", {'passworderror': True})
return render(request, "login.html")
|
[
"seartsachu@gmail.com"
] |
seartsachu@gmail.com
|
0153dcd0356625cc5eeb56abefd0fa3a6ed54d56
|
1f0837796a613fcf1788b2361fe4d8d5943bcc47
|
/Tarea Nยบ1/Ejer7.py
|
5bd8720ac4dfec14680de5faa08d98c167e2b592
|
[] |
no_license
|
LeonardoArroba/Tarea-de-Estructura
|
557f47206c007dad6a7ed5ecc8e29f73c1ab895c
|
d37e9e9fdf1a562c8f5d29c5fee8e836d87a32c9
|
refs/heads/main
| 2023-06-02T12:36:43.153125
| 2021-06-30T02:53:46
| 2021-06-30T02:53:46
| 381,556,550
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,180
|
py
|
#Determinar la cantidad de dinero que recibirรก un trabajador por concepto de las horas extras trabajadas en una empresa,
# sabiendo que cuando las horas de trabajo exceden de 40, el resto se consideran horas extras y que รฉstas se pagan al doble
# de una hora normal cuando no exceden de 8; si las horas extras exceden de 8 se pagan las primeras 8 al doble de lo que
# se paga por una hora normal y el resto al triple
"""Ejercicio 7"""
class Trabajador:
def __init__(self):
pass
def calcularJornada(self):
ht, he, het=0,0,0
ph, phe, pt, ph8=0,0,0,0
ht = int(input("Ingrese horas trabajadas: "))
ph = float(input("Ingrese valor hora: "))
if ht > 40:
he = ht-40
if he > 8:
het = he-8
ph8 = 8*ph*2
ph8 =het*ph*3
else:
ph8 = he*ph*2
pt = 40*ph+phe+ph8
else:
pt = ht*ph
print("Sobretiempo<8:{} Sobretiempo>8:{} Jornada:{} ".format(ph8,phe,pt))
tarea = Trabajador()
tarea.calcularJornada()
|
[
"noreply@github.com"
] |
LeonardoArroba.noreply@github.com
|
cdcf3cbc200241606b4cc9140c33f8aa85da216f
|
2c332ae49c8130ab88ee9be7b092f66e4f88324a
|
/MyOwnSolution/compare_images2.py
|
91e3c8d21c645f96e8bf20b90a1abd0d7e2fdea6
|
[] |
no_license
|
lalona/parking-spot-classifier
|
fbc34b385d3620513126c91ecc106c3727a0be63
|
835ae0a514daecf623ba456b2ee5cab26123f939
|
refs/heads/master
| 2020-04-07T09:44:28.396823
| 2019-05-02T19:51:14
| 2019-05-02T19:51:14
| 158,263,531
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,367
|
py
|
"""
La idea es simple: con una imagen donde el espacio se encuentre vacio se compara con otra imagenes y dependiendo las diferencias
se concluye si estรก ocupado o vacio
"""
import cv2
import os
from skimage.measure import compare_ssim as ssim
from skimage.measure import compare_nrmse as nrmse
import pickle
import argparse
from operator import itemgetter
from itertools import groupby
import matplotlib.pyplot as plt
import numpy as np
from tqdm import tqdm
import ntpath
import json
path_pklot = 'C:\\Eduardo\\ProyectoFinal\\Datasets\\PKLot'
def getGrayscaleImage(filepath):
"""
Read the image files and converts it to gray scale
:param filepath: the path to the image
:return: the image in grayscale
"""
image = cv2.imread(filepath)
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
def extractUniqueItemsByKey(list, key):
"""
This will take a list and sorted by the key and
then it will return the list with just the elements from that
key without duplicates
:param list:
:param key:
:return:
"""
list.sort(key=lambda x: x[key])
return [k for k, v in groupby(list, key=lambda x: x[key])]
def getNewImageInfo(image_info):
image_info['filepath'] = os.path.join(path_pklot, image_info['filepath'], image_info['filename'])
return image_info
def mse(imageA, imageB):
# the 'Mean Squared Error' between the two images is the
# sum of the squared difference between the two images;
# NOTE: the two images must have the same dimension
err = np.sum((imageA.astype("float") - imageB.astype("float")) ** 2)
err /= float(imageA.shape[0] * imageA.shape[1])
# return the MSE, the lower the error, the more "similar"
# the two images are
return err
def main():
parser = argparse.ArgumentParser(description='Select the type of reduced.')
parser.add_argument("-f", "--filename", type=str, required=True,
help='Path to the file the contains the dictionary with the info of the dataset reduced.')
args = vars(parser.parse_args())
info_filename = args["filename"]
# test set
with open(info_filename, "rb") as fp: # Unpickling
images_info = pickle.load(fp)
grouper = itemgetter('parkinglot', 'space')
images_info = sorted(images_info, key=grouper)
parkinglots = extractUniqueItemsByKey(images_info, 'parkinglot')
images_info_by_patkinglot = {}
for parkinglot in parkinglots:
image_info_parkinglot = [i for i in images_info if i['parkinglot'] == parkinglot]
spaces_parkinglot = extractUniqueItemsByKey(image_info_parkinglot, 'space')
images_info_by_spaces = {}
for space in spaces_parkinglot:
images_info_by_spaces[space] = [getNewImageInfo(i) for i in image_info_parkinglot if i['space'] == space]
images_info_by_patkinglot[parkinglot] = images_info_by_spaces
# Hasta este punto ya tengo un dictionario dividido por estacionamiento que a su vez se divide por espacios
# Voy a obtener la lista de un espacio en particular de un estacionamiento, voy a obtener el primer espacio vacio que
# encuentre y despues voy a compararlo con los demas
# Mostrar en una ventana el espacio vacio y en la otra la comparacion y el resultado
empty_space_filepath = ''
errors = []
for parkinglot, images_info_by_spaces in images_info_by_patkinglot.items():
for space, images_info_of_space in images_info_by_spaces.items():
error_count_empty = 0
error_count_occupied = 0
error_empty = 0
error_occupied = 0
empty_space_filepath = ''
example_list = images_info_of_space
for example in tqdm(example_list):
if example['state'] == '0' and len(empty_space_filepath) == 0:
empty_space_filepath = example['filepath']
img_empty_space = getGrayscaleImage(empty_space_filepath)
break
for example in tqdm(example_list):
comparision_space_filepath = example['filepath']
img_comparision_space = getGrayscaleImage(comparision_space_filepath)
try:
sim = ssim(img_empty_space, img_comparision_space)
except:
height1, width1 = img_empty_space.shape
img_comparision_space = cv2.resize(img_comparision_space, (width1, height1))
sim = ssim(img_empty_space, img_comparision_space)
nm = nrmse(img_empty_space, img_comparision_space)
# m = mse(img_empty_space, img_comparision_space)
space_comparing_name = 'state: {} sim: {} nrmse: {}'.format(example['state'], sim, nm)
if sim < 0.4 and example['state'] == '0':
error_count_empty += 1
error_empty += abs(0.4 - sim)
if sim >= 0.4 and example['state'] == '1':
error_count_occupied += 1
error_occupied += abs(sim - 0.4)
if sim > 0.7:
empty_space_filepath = example['filepath']
img_empty_space = img_comparision_space
"""
fig = plt.figure('title')
plt.suptitle(space_comparing_name)
# show first image
ax = fig.add_subplot(1, 2, 1)
plt.imshow(img_empty_space, cmap=plt.cm.gray)
plt.axis("off")
# show the second image
ax = fig.add_subplot(1, 2, 2)
plt.imshow(img_comparision_space, cmap=plt.cm.gray)
plt.axis("off")
# show the images
plt.show()
"""
error_occupied = 0 if error_count_occupied == 0 else (error_occupied / error_count_occupied)
error_empty = 0 if error_count_empty == 0 else (error_empty / error_count_empty)
print('In the space {} in a total of {} there was an error of occupied {} {} empty {} {}'.format(space, len(
example_list), error_count_occupied, error_occupied, error_count_empty, error_empty))
errors.append({'parkinglot': parkinglot, 'space': space, 'total': len(example_list),
'error_count_occupied': error_count_occupied,
'error_occupied': error_occupied,
'error_count_empty': error_count_empty, 'error_empty': error_empty})
info = {'dataset': info_filename, 'threshold': 0.4, 'comparision_method': 'sim', 'errors': errors}
dataset_name = ntpath.basename(info_filename).split('.')[0]
feedback_filename = '{}_{}_{}.json'.format(dataset_name, 0.4, 'sim')
with open(feedback_filename, 'w') as outfile:
json.dump(info, outfile)
# s = ssim(grayscale_selected_image, grayscale_current_image)
if __name__ == "__main__":
main()
|
[
"sebastianxiro@gmail.com"
] |
sebastianxiro@gmail.com
|
6846461a15b491de3c42e18d6aa4d646d87bad7a
|
4bd5e9b67d98bfcc9611bd8b774c9ab9f4f4d446
|
/Pythonๅบ็ก็ฌ่ฎฐ/13/ไปฃ็ /3.ๅค็ปงๆฟ.py
|
1693fc8f7b66401a95f44f287cfcb7d4c149f841
|
[] |
no_license
|
zhenguo96/test1
|
fe21510aea7feb674e52fd7a86d4177666f841c5
|
0d8de7e73e7e635d26462a0bc53c773d999498be
|
refs/heads/master
| 2020-05-03T13:09:53.592103
| 2019-04-06T07:08:47
| 2019-04-06T07:08:47
| 178,646,627
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 623
|
py
|
# # ๅค็ปงๆฟ
# class Base1:
# def __init__(self,name):
# self.name = name
# def t1(self):
# print("Base1")
#
# class Base2:
# def __init__(self,name):
# self.name = name
# def t2(self):
# print("Base2")
#
# class Base3:
# def __init__(self, name):
# self.name = name
# def t3(self):
# print("Base3")
#
# # ๅค็ปงๆฟ็ๅญ็ฑป
# class Child(Base1,Base2,Base3):
# pass
# child = Child('tom')
# print(child.__dict__)
# child.t1()
# child.t2()
# # ็ปงๆฟ้กบๅบ
# print(Child.mro())
# print(Child.__mro__)
#
|
[
"1148421588@qq.com"
] |
1148421588@qq.com
|
a9c4497452165d55ce911de902394c5c85022e33
|
1d1cb14e6d0b2a439348d6677eb9e8e72390d39f
|
/01_LiniarRegression/Live/linearRegressionWGradientDescent.py
|
9be2c569d79cf3a532fbfc0bbbbd1a1c066e0416
|
[] |
no_license
|
EduardoFAFernandes/MyDeepLearningIntro
|
2c7b2278ed1cf446c9f3656ae9dd421c22648933
|
a35a43f0690ddfa499097335d9b8aa058d1db021
|
refs/heads/master
| 2021-09-05T06:51:33.479757
| 2018-01-25T01:07:54
| 2018-01-25T01:07:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,514
|
py
|
from numpy import *
def compute_error_for_given_points(m, b, points):
totalError = 0
for point in points:
totalError += (point[1] - (m * point[0] + b))**2
return totalError / float(size(points))
def step_gradient(current_b, current_m, points, learning_rate):
gradient_m = 0
gradient_b = 0
N = float(len(points))
for point in points:
x = point[0]
y = point[1]
gradient_m += -(2/N) * x *(y - ((current_m * x) + current_b))
gradient_b += -(2/N) * (y - ((current_m * x) + current_b))
new_b = current_b - (learning_rate * gradient_b)
new_m = current_m - (learning_rate * gradient_m)
return [ new_b, new_m]
def gradient_descent_runner(points, initial_b, initial_m,
learning_rate, num_iterations ):
b = initial_b
m = initial_m
for i in range(num_iterations):
b,m = step_gradient(b, m, array(points), learning_rate)
return [b, m]
def run():
points = genfromtxt('data.csv', delimiter = ',')
#hyperparameters
learning_rate = 0.0001
#y = mx + b
initial_b = 0
initial_m = 0
num_iterations = 1000
print("Starting gradient descent at b = {0}, m = {1}, error = {2}".format(
initial_b, initial_m,
compute_error_for_given_points(initial_b, initial_m, points)))
print("Running...")
[b, m] = gradient_descent_runner(points, initial_b, initial_m,
learning_rate, num_iterations )
print("After {0} iterations b = {1}, m = {2}, error = {3}".format(
num_iterations, b, m, compute_error_for_given_points(b, m, points)))
if __name__ == '__main__':
run()
|
[
"nomeaocalhas3@gmail.com"
] |
nomeaocalhas3@gmail.com
|
3a319731bfbf4784b70ae3333efef1452e0f0744
|
bb0af8077407b27f8c3b787d32bdd9c4b37e977a
|
/fitmodel_densitysplit/do_fitting/modelfitfullk.py
|
82cbe8d7dac400a6d8e1c6ddf94bc83a4de38ec0
|
[] |
no_license
|
JulianWack/IfA-SummerStudent-2022
|
94948169acc830da5b2b1bb5d227fb4e782dbe1c
|
d1c6ebe48e0a527fe6138c73adc35464538e2e04
|
refs/heads/main
| 2023-04-15T02:51:59.098422
| 2022-09-10T10:52:38
| 2022-09-10T10:52:38
| 508,616,897
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,407
|
py
|
# Fit Kaiser with FoG term over largest possible k range
# Identical to modelfit.py excepet for storing paths and line 94
import numpy as np
import matplotlib.pyplot as plt
from scipy.special import legendre, erf
from datetime import timedelta
import time
from os import mkdir, listdir
import helper_funcs as hf
from astropy.cosmology import Planck18 as Planck18_astropy
import camb
import zeus
from nbodykit.lab import *
from nbodykit import style
plt.style.use(style.notebook)
### MCMC functions###
def logprior(theta, i, kmax):
''' The natural logarithm of the prior probability. Assume parameters independent such that log priors add.
Note that normalization is irrelevant for MCMC.'''
lp = 0.
b1, beta, sigma = theta
sigma_min, sigma_max = 1, 5
sigma_max = 5 if kmax < 0.075 else 60
lp_sigma = 0. if sigma_min < sigma < sigma_max else -np.inf
b1_min, b1_max = 0, 3
if i == 0:
beta_min, beta_max = -3, 3
else:
beta_min, beta_max = 0, 3
lp_b1 = 0. if b1_min < b1 < b1_max else -np.inf
lp_beta = 0. if beta_min < beta < beta_max else -np.inf
return lp_b1 + lp_beta + lp_sigma
def loglike(theta, data_multipoles, k, C_inv):
'''Return logarithm of likelihood i.e. -0.5*chi2.
data_multipoles must be an array of shape (len(ells), len(k)). theta is parameter vector: [b1, beta, sigma].'''
ells = [0,2]
model_multipoles = np.empty((len(ells), len(k)))
b1, beta, sigma = theta
model_multipoles[0] = ( 1/(2*(k*sigma)**5) * (np.sqrt(2*np.pi)*erf(k*sigma/np.sqrt(2))*(3*beta**2+(k*sigma)**4+2*beta*(k*sigma)**2) +
np.exp(-0.5*(k*sigma)**2)*(-2*beta*(beta+2)*(k*sigma)**3-6*beta**2*k*sigma) ) ) * b1**2 * Plin(k)
model_multipoles[1] = ( -5/(4*(k*sigma)**7) * (np.sqrt(2*np.pi)*erf(k*sigma/np.sqrt(2))*(-45*beta**2+(k*sigma)**6+(2*beta-3)*(k*sigma)**4+3*(beta-6)*beta*(k*sigma)**2) +
np.exp(-0.5*(k*sigma)**2)*((4*beta*(beta+2)+6)*(k*sigma)**5+12*beta*(2*beta+3)*(k*sigma)**3+90*beta**2*k*sigma) ) ) * b1**2 * Plin(k)
D_M = (data_multipoles - model_multipoles).flatten()
return -0.5*D_M@(C_inv @ D_M)
def logpost(theta, i, data_multipoles, k, C_inv):
'''Returns the logarithm of the posterior. By Bayes' theorem, this is just the sum of the log prior and log likelihood (up
to a irrelavant constant).
Uses values for theta from pre-analysis step to inform prior
'''
return logprior(theta, i, k[-1]) + loglike(theta, data_multipoles, k, C_inv)
#####################
### Set up MCMC ###
LOS = [0,0,1]
redshift = 0.2
BoxSize = 2000
cosmo = cosmology.Cosmology.from_astropy(Planck18_astropy)
Plin = cosmology.LinearPower(cosmo, redshift, transfer='CLASS')
sigma8_lin = Plin.sigma_r(8)
# load Planck18 data for CAMB and find f*sigma8 at redshift
# follows https://camb.readthedocs.io/en/latest/CAMBdemo.html
pars=camb.read_ini('../planck_2018.ini')
_ = pars.set_matter_power(redshifts=[redshift], kmax=1.4)
pars.NonLinear = camb.model.NonLinear_none
results = camb.get_results(pars)
fs8_true = results.get_fsigma8()[0]
ptile_labels = [r'$0^{th}$', r'$1^{st}$', r'$2^{nd}$', r'$3^{rd}$', r'$4^{th}$', r'$5^{th}$', r'$6^{th}$', r'$7^{th}$', r'$8^{th}$', r'$9^{th}$']
dk = 0.01
ells = [0,2]
# load computed power spectra to deduce multipoles in each bin and P(k,mu) from data
# need to pass location location of folder containing stored power spectra which lies one level lower in this case
k_full, shotnoise, n_ptile, Pk_ells_full = hf.load_power_data('../', ells, get_data_Pkmus=False)
# for given BoxSize, k is NaN above 0.034
possible_kmax = k_full[k_full<=0.343][1:] # ignore first k bin
kmax_range = possible_kmax
Nkmax = len(kmax_range)
b1_fits, beta_fits, sigma_fits, delta_fs8 = np.full((n_ptile, Nkmax), np.nan), np.full((n_ptile, Nkmax), np.nan), np.full((n_ptile, Nkmax), np.nan), np.full((n_ptile, Nkmax), np.nan)
b1_stds, beta_stds, sigma_stds, delta_fs8_stds = np.full((n_ptile, Nkmax), np.nan), np.full((n_ptile, Nkmax), np.nan), np.full((n_ptile, Nkmax), np.nan), np.full((n_ptile, Nkmax), np.nan)
reduced_chi2 = np.full((n_ptile, Nkmax), np.nan)
nsteps = 2500
ndim = 3
nwalkers = 8
start_b1 = 0.5 + 1*np.random.random(nwalkers)
start_beta = 0.5 + 1*np.random.random(nwalkers)
start_sigma = 1 + 4*np.random.random(nwalkers)
start = np.column_stack([start_b1, start_beta, start_sigma])
###################
### Run MCMC ###
root_path = '../fit_results/FoG_fullk/'
print("Fitting up to kmax=%.3f"%kmax_range[-1])
for i in range(n_ptile):
store_path = root_path+'chains_ptile%d/'%i
if 'chains_ptile%d'%i not in listdir(root_path):
mkdir(store_path)
cov_mat = np.loadtxt('../bruteforce_covmat/covariance_matricies/cov_ptile_%d.txt'%i)
t1 = time.time()
for j,kmax in enumerate(kmax_range):
if 'k%d.npy'%j in listdir(store_path):
continue
# slice up to increasingly large kmax and find delta_fs8 for each bin
mask = np.full(len(k_full), False)
mask = k_full <= kmax
mask[0] = False
k_sliced = k_full[mask]
Pk_ells_i = Pk_ells_full[:,:,mask][i]
C_inv = hf.mock_cov_mat_inv(cov_mat, k_full, kmax)
sampler = zeus.EnsembleSampler(nwalkers, ndim, logpost, maxiter=1e5, verbose=False, args=[i, Pk_ells_i, k_sliced, C_inv])
sampler.run_mcmc(start, nsteps)
chain = sampler.get_chain(flat=True, discard=nsteps//2)
# save chain without burn-in
np.save(store_path+'k%d'%j, chain)
b1_fits[i][j], b1_stds[i][j] = np.mean(chain[:,0]), np.std(chain[:,0])
# parameter space is sym about b1=0 for Kaiser model. To get non negative fs8 assure that b1 and beta have the same sign
if i == 0:
b1_fits[i][j] *= -1
beta_fits[i][j], beta_stds[i][j] = np.mean(chain[:,1]), np.std(chain[:,1])
delta_fs8[i][j] = 1 - sigma8_lin*(beta_fits[i][j]*b1_fits[i][j])/fs8_true
delta_fs8_stds[i][j] = np.abs(sigma8_lin/fs8_true*(beta_stds[i][j]*b1_fits[i][j]+beta_fits[i][j]*b1_stds[i][j]))
sigma_fits[i][j], sigma_stds[i][j] = np.mean(chain[:,2]), np.std(chain[:,2])
reduced_chi2[i][j] = -2*loglike([b1_fits[i][j], beta_fits[i][j], sigma_fits[i][j]], Pk_ells_i, k_sliced, C_inv) / (len(ells)*len(k_sliced)-ndim)
t2 = time.time()
print('Fitted %d-th percentile in %s'%(i,str(timedelta(seconds=t2-t1))))
################
### Store fit result ###
np.savetxt(root_path+'b1_fits.txt', b1_fits)
np.savetxt(root_path+'b1_stds.txt', b1_stds)
np.savetxt(root_path+'beta_fits.txt', beta_fits)
np.savetxt(root_path+'beta_stds.txt', beta_stds)
np.savetxt(root_path+'delta_fs8.txt', delta_fs8)
np.savetxt(root_path+'delta_fs8_stds.txt', delta_fs8_stds)
np.savetxt(root_path+'sigma_fits.txt', sigma_fits)
np.savetxt(root_path+'sigma_stds.txt', sigma_stds)
np.savetxt(root_path+'reduced_chi2.txt', reduced_chi2)
########################
### Make fs8 plot ###
fig = plt.figure(figsize=(20,8))
for i in range(n_ptile):
plt.plot(kmax_range, delta_fs8[i], label=ptile_labels[i])
plt.fill_between(kmax_range, delta_fs8[i]-delta_fs8_stds[i,:], delta_fs8[i]+delta_fs8_stds[i,:], alpha=0.1)
plt.title(r'$\Delta f\sigma_8$ at $z=%.3f$'%redshift)
plt.xlabel(r'$k_{max}$ [$h \ \mathrm{Mpc}^{-1}$]')
plt.ylabel(r'$1 - (\sigma_8^{lin}*\beta*b_1) \ / \ (f\sigma_8)^{true}$')
handles, labels = plt.gca().get_legend_handles_labels()
fig.legend(handles, labels, loc='upper center', bbox_to_anchor=(0.5, -0.05), ncol=n_ptile)
fig.savefig("../plots/KaiserFoG_fullk_dfs8_vs_kmax.pdf")
#####################
### Make fit plot ###
fig = plt.figure(figsize=(26,18))
ax_b1 = plt.subplot(2,3,1)
ax_beta = plt.subplot(2,3,2)
ax_sigma = plt.subplot(2,3,3)
ax_chi2 = plt.subplot(2,3,(4,6))
for i in range(n_ptile):
ax_b1.plot(kmax_range, b1_fits[i], label=ptile_labels[i])
ax_b1.fill_between(kmax_range, b1_fits[i]-b1_stds[i], b1_fits[i]+b1_stds[i], alpha=0.1)
ax_beta.plot(kmax_range, beta_fits[i], label=ptile_labels[i])
ax_beta.fill_between(kmax_range, beta_fits[i]-beta_stds[i], beta_fits[i]+beta_stds[i], alpha=0.1)
ax_sigma.plot(kmax_range, sigma_fits[i], label=ptile_labels[i])
ax_sigma.fill_between(kmax_range, sigma_fits[i]-sigma_stds[i], sigma_fits[i]+sigma_stds[i], alpha=0.1)
ax_chi2.plot(kmax_range[1:], reduced_chi2[i][1:], label=ptile_labels[i]) # first element negative, s.t. not shown on log scale
ax_b1.set_title(r'$b_1$ mean and $1\sigma$ interval')
ax_b1.set_xlabel(r'$k_{max}$ [$h \ \mathrm{Mpc}^{-1}$]')
ax_b1.set_ylabel(r'$b_1$')
ax_beta.set_title(r'$\beta$ mean and $1\sigma$ interval')
ax_beta.set_xlabel(r'$k_{max}$ [$h \ \mathrm{Mpc}^{-1}$]')
ax_beta.set_ylabel(r'$\beta$')
ax_sigma.set_title(r'$\sigma$ mean and $1\sigma$ interval')
ax_sigma.set_xlabel(r'$k_{max}$ [$h \ \mathrm{Mpc}^{-1}$]')
ax_sigma.set_ylabel(r'$\sigma$ [$h^{-1} \ \mathrm{Mpc}$]')
ax_chi2.set_title(r'reduced $\chi^2$')
ax_chi2.set_yscale('log')
ax_chi2.set_xlabel(r'$k_{max}$ [$h \ \mathrm{Mpc}^{-1}$]')
ax_chi2.set_ylabel(r'$\chi^2 / dof$')
handles, labels = plt.gca().get_legend_handles_labels()
fig.legend(handles, labels, loc='upper center', bbox_to_anchor=(0.5, +0.05), ncol=n_ptile)
fig.savefig("../plots/KaiserFoG_fullk_fits.pdf")
#####################
|
[
"jwack@cuillin.roe.ac.uk"
] |
jwack@cuillin.roe.ac.uk
|
964887bf2513e38eadfe80819f983c5826f676de
|
0b9f6534a99ff551f0006df78a24e8af30340580
|
/Source/BackupFiles/HDAPI/hdapi/API ๋ฌธ์/python_sample.py
|
08a1d2e3fc1c3952637678e603a976500b276f82
|
[] |
no_license
|
jayirum/ChartProjSvr
|
9bac49865d1e081de0cbd7559d0d2cbdd26279c2
|
d69edfcb3ac3698e1bdfcf5862d5e63bb305cb52
|
refs/heads/master
| 2020-03-24T03:22:18.176911
| 2019-07-05T01:38:03
| 2019-07-05T01:38:03
| 142,416,528
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 130,254
|
py
|
#-*- coding: utf-8 -*-
# Form implementation generated from reading ui file './python_sample.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
# Python version 2.7
from PyQt4 import QtCore, QtGui
#from PyQt4.QAxContainer import *
#from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QAxContainer import *
import time
import os
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
CONST_FO_SERIES = "101L6000"
CONST_FO_SISE_51 = "51" # ๊ตญ๋ด์ ๋ฌผํธ๊ฐ
CONST_FO_SISE_65 = "65" # ๊ตญ๋ด์ ๋ฌผ์ฒด๊ฒฐ
CONST_FO_SISE_58 = "58" # ๊ตญ๋ด์ํ์ ๋ฌผํธ๊ฐ
CONST_FO_SISE_71 = "71" # ๊ตญ๋ด์ํ์ ๋ฌผ์ฒด๊ฒฐ
CONST_FO_ORDER_181 = "181" # ๊ตญ๋ด์ฃผ๋ฌธ์ค์๊ฐ์ ์
CONST_FO_ORDER_182 = "182" # ๊ตญ๋ด์ฃผ๋ฌธ์ค์๊ฐ๋ฏธ์ฒด๊ฒฐ
CONST_FO_ORDER_183 = "183" # ๊ตญ๋ด์ฃผ๋ฌธ์ค์๊ฐ๋ฏธ๊ฒฐ์
CONST_FO_ORDER_184 = "184" # ๊ตญ๋ด์ฃผ๋ฌธ์ค์๊ฐ์๊ณ
CONST_FO_ORDER_185 = "185" # ๊ตญ๋ด์ฃผ๋ฌธ์ค์๊ฐ์ฒด๊ฒฐ
CONST_FR_SERIES = "6AH16"
CONST_FR_SISE_76 = "76" # ํด์ธ์ ๋ฌผํธ๊ฐ
CONST_FR_SISE_82 = "82" # ํด์ธ์ ๋ฌผ์ฒด๊ฒฐ
CONST_FR_ORDER_196 = "196" # ํด์ธ์ฃผ๋ฌธ์ค์๊ฐ์ ์
CONST_FR_ORDER_186 = "186" # ํด์ธ์ฃผ๋ฌธ์ค์๊ฐ๋ฏธ์ฒด๊ฒฐ
CONST_FR_ORDER_187 = "187" # ํด์ธ์ฃผ๋ฌธ์ค์๊ฐ๋ฏธ๊ฒฐ์
CONST_FR_ORDER_188 = "188" # ํด์ธ์ฃผ๋ฌธ์ค์๊ฐ์๊ณ
CONST_FR_ORDER_189 = "189" # ํด์ธ์ฃผ๋ฌธ์ค์๊ฐ์ฒด๊ฒฐ
CONST_FX_SERIES = "EUR/USD"
CONST_FX_SISE_171 = "171"
#attributes
m_AccListFO = {}
m_AccListFR = {}
m_AccListFX = {}
m_TrQueryListFO = {} # ๊ตญ๋ด ์กฐํ
m_FidQueryListFO = {} # ๊ตญ๋ด FID์กฐํ
m_TrOrderListFO = {} # ๊ตญ๋ด ์ฃผ๋ฌธ
m_TrQueryListFR = {}
m_FidQueryListFR = {} # ํด์ธ FID์กฐํ
m_TrOrderListFR = {}
m_TrQueryListFX = {}
m_FidQueryListFX = {} # ํด์ธ FID์กฐํ
m_TrOrderListFX = {}
# ์ค์๊ฐ
m_RealSiseListFO = {}
m_RealOrderListFO = {}
m_RealSiseListFR = {}
m_RealOrderListFR = {}
m_RealSiseListFX = {}
m_RealOrderListFX = {}
m_bUseComboTrChange = True
def setupUi(self, MainWindow):
_TOP_Pos = 50
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(800, 600)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.label_6 = QtGui.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(10, 10, 56, 12))
self.label_6.setObjectName(_fromUtf8("label_6"))
self.Edit_Path = QtGui.QLineEdit(self.centralwidget)
self.Edit_Path.setGeometry(QtCore.QRect(70, 7, 400, 20))
self.Edit_Path.setObjectName(_fromUtf8("Edit_Path"))
self.BTN_Connect = QtGui.QPushButton(self.centralwidget)
self.BTN_Connect.setGeometry(QtCore.QRect(10, _TOP_Pos+10, 121, 23))
self.BTN_Connect.setObjectName(_fromUtf8("BTN_Connect"))
self.BTN_DisConnect = QtGui.QPushButton(self.centralwidget)
self.BTN_DisConnect.setGeometry(QtCore.QRect(150, _TOP_Pos+10, 111, 23))
self.BTN_DisConnect.setObjectName(_fromUtf8("BTN_DisConnect"))
self.label = QtGui.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(10, _TOP_Pos+50, 56, 12))
self.label.setObjectName(_fromUtf8("label"))
self.label_2 = QtGui.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(200, _TOP_Pos+50, 70, 12))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.label_3 = QtGui.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(401, _TOP_Pos+50, 111, 16))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.Edit_UserID = QtGui.QLineEdit(self.centralwidget)
self.Edit_UserID.setGeometry(QtCore.QRect(70, _TOP_Pos+45, 113, 20))
self.Edit_UserID.setObjectName(_fromUtf8("Edit_UserID"))
self.Edit_UserPW = QtGui.QLineEdit(self.centralwidget)
self.Edit_UserPW.setGeometry(QtCore.QRect(280, _TOP_Pos+46, 113, 20))
self.Edit_UserPW.setEchoMode(QtGui.QLineEdit.Password)
self.Edit_UserPW.setObjectName(_fromUtf8("Edit_UserPW"))
self.Edit_CertPW = QtGui.QLineEdit(self.centralwidget)
self.Edit_CertPW.setGeometry(QtCore.QRect(510, _TOP_Pos+47, 113, 20))
self.Edit_CertPW.setEchoMode(QtGui.QLineEdit.Password)
self.Edit_CertPW.setObjectName(_fromUtf8("Edit_CertPW"))
self.Btn_Login = QtGui.QPushButton(self.centralwidget)
self.Btn_Login.setGeometry(QtCore.QRect(390, _TOP_Pos+80, 75, 23))
self.Btn_Login.setObjectName(_fromUtf8("Btn_Login"))
self.Btn_Logout = QtGui.QPushButton(self.centralwidget)
self.Btn_Logout.setGeometry(QtCore.QRect(550, _TOP_Pos+80, 75, 23))
self.Btn_Logout.setObjectName(_fromUtf8("Btn_Logout"))
# ๊ตญ๋ด/ํด์ธ/FX
self.groupBox1 = QtGui.QGroupBox(self.centralwidget)
self.groupBox1.setGeometry(QtCore.QRect(10, _TOP_Pos+110, 200, 41))
self.groupBox1.setObjectName(_fromUtf8("groupBox"))
self.BtnRadio1 = QtGui.QRadioButton(self.groupBox1)
self.BtnRadio1.setGeometry(QtCore.QRect(10, 10, 111, 22))
self.BtnRadio1.setObjectName(_fromUtf8("BtnRadio1"))
self.BtnRadio2 = QtGui.QRadioButton(self.groupBox1)
self.BtnRadio2.setGeometry(QtCore.QRect(70, 10, 111, 22))
self.BtnRadio2.setObjectName(_fromUtf8("BtnRadio2"))
self.BtnRadio3 = QtGui.QRadioButton(self.groupBox1)
self.BtnRadio3.setGeometry(QtCore.QRect(140, 10, 111, 22))
self.BtnRadio3.setObjectName(_fromUtf8("BtnRadio3"))
# Tr์กฐํ, Fid์กฐํ, ์ฃผ๋ฌธ
self.groupBox2 = QtGui.QGroupBox(self.centralwidget)
self.groupBox2.setGeometry(QtCore.QRect(251, _TOP_Pos+110, 200, 41))
self.groupBox2.setObjectName(_fromUtf8("groupBox2"))
self.BtnRadioQry1 = QtGui.QRadioButton(self.groupBox2)
self.BtnRadioQry1.setGeometry(QtCore.QRect(10, 10, 111, 22))
self.BtnRadioQry1.setObjectName(_fromUtf8("BtnRadioQry1"))
self.BtnRadioQry2 = QtGui.QRadioButton(self.groupBox2)
self.BtnRadioQry2.setGeometry(QtCore.QRect(70, 10, 111, 22))
self.BtnRadioQry2.setObjectName(_fromUtf8("BtnRadioQry2"))
self.BtnRadioQry3 = QtGui.QRadioButton(self.groupBox2)
self.BtnRadioQry3.setGeometry(QtCore.QRect(140, 10, 111, 22))
self.BtnRadioQry3.setObjectName(_fromUtf8("BtnRadioQry3"))
#ํด์ธ ์ข
๋ชฉ์ฝ๋ ์์
self.Btn_ReqJMCodeFR = QtGui.QPushButton(self.centralwidget)
self.Btn_ReqJMCodeFR.setGeometry(QtCore.QRect(460, _TOP_Pos+120, 120, 23))
self.Btn_ReqJMCodeFR.setObjectName(_fromUtf8("Btn_ReqJMCodeFR"))
# ๊ณ์ข ์ฝค๋ณด
self.label_4 = QtGui.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(10, _TOP_Pos+165, 56, 12))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.ComboAcc = QtGui.QComboBox(self.centralwidget)
self.ComboAcc.setGeometry(QtCore.QRect(70, _TOP_Pos+160, 111, 22))
self.ComboAcc.setObjectName(_fromUtf8("ComboAcc"))
self.label_acctNm = QtGui.QLabel(self.centralwidget)
self.label_acctNm.setGeometry(QtCore.QRect(450, _TOP_Pos+165, 56, 12))
self.label_acctNm.setObjectName(_fromUtf8("label_acctNm"))
#๊ณ์ข๋น๋ฒ
self.label_7 = QtGui.QLabel(self.centralwidget)
self.label_7.setGeometry(QtCore.QRect(200, _TOP_Pos+165, 56, 12))
self.label_7.setObjectName(_fromUtf8("label_7"))
self.Edit_AcctPW = QtGui.QLineEdit(self.centralwidget)
self.Edit_AcctPW.setGeometry(QtCore.QRect(280, _TOP_Pos+160, 113, 20))
self.Edit_AcctPW.setEchoMode(QtGui.QLineEdit.Password)
self.Edit_AcctPW.setObjectName(_fromUtf8("Edit_AcctPW"))
#์๋น์ค ์ฝค๋ถ
self.label_5 = QtGui.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(10, _TOP_Pos+200, 56, 12))
self.label_5.setObjectName(_fromUtf8("label_5"))
self.ComboTr = QtGui.QComboBox(self.centralwidget)
self.ComboTr.setGeometry(QtCore.QRect(70, _TOP_Pos+195, 200, 22))
self.ComboTr.setObjectName(_fromUtf8("ComboTr"))
self.label_8 = QtGui.QLabel(self.centralwidget)
self.label_8.setGeometry(QtCore.QRect(280, _TOP_Pos+200, 60, 12))
self.label_8.setObjectName(_fromUtf8("label_8"))
self.label_tr = QtGui.QLabel(self.centralwidget)
self.label_tr.setGeometry(QtCore.QRect(340, _TOP_Pos+200, 150, 12))
self.label_tr.setObjectName(_fromUtf8("label_tr"))
# ์
๋ ฅ ๋ถ๋ถ
self.label_10 = QtGui.QLabel(self.centralwidget)
self.label_10.setGeometry(QtCore.QRect(10, _TOP_Pos+230, 60, 12))
self.label_10.setObjectName(_fromUtf8("label_10"))
self.Edit_Input = QtGui.QLineEdit(self.centralwidget)
self.Edit_Input.setGeometry(QtCore.QRect(70, _TOP_Pos+225, 450, 20))
self.Edit_Input.setObjectName(_fromUtf8("Edit_Input"))
self.Edit_Input_FID = QtGui.QLineEdit(self.centralwidget)
self.Edit_Input_FID.setGeometry(QtCore.QRect(525, _TOP_Pos+225, 120, 20))
self.Edit_Input_FID.setObjectName(_fromUtf8("Edit_Input_FID"))
self.BTN_Query = QtGui.QPushButton(self.centralwidget)
self.BTN_Query.setGeometry(QtCore.QRect(650, _TOP_Pos+224, 60, 22))
self.BTN_Query.setObjectName(_fromUtf8("BTN_Query"))
# ์ค์๊ฐ
self.groupBox3 = QtGui.QGroupBox(self.centralwidget)
self.groupBox3.setGeometry(QtCore.QRect(10, _TOP_Pos+250, 200, 41))
self.groupBox3.setObjectName(_fromUtf8("groupBox"))
self.label_11 = QtGui.QLabel(self.groupBox3)
self.label_11.setGeometry(QtCore.QRect(10, 10, 111, 22))
self.label_11.setObjectName(_fromUtf8("label_11"))
self.BtnRadioSiseReal = QtGui.QRadioButton(self.groupBox3)
self.BtnRadioSiseReal.setGeometry(QtCore.QRect(70, 10, 111, 22))
self.BtnRadioSiseReal.setObjectName(_fromUtf8("BtnRadioSiseReal"))
self.BtnRadioOrderReal = QtGui.QRadioButton(self.groupBox3)
self.BtnRadioOrderReal.setGeometry(QtCore.QRect(140, 10, 111, 22))
self.BtnRadioOrderReal.setObjectName(_fromUtf8("BtnRadioOrderReal"))
# ์ค์๊ฐ ๋ฑ๋ก/ํด์ง
self.groupBox4 = QtGui.QGroupBox(self.centralwidget)
self.groupBox4.setGeometry(QtCore.QRect(251, _TOP_Pos+250, 150, 41))
self.groupBox4.setObjectName(_fromUtf8("groupBox"))
self.BtnRadioRealRegister = QtGui.QRadioButton(self.groupBox4)
self.BtnRadioRealRegister.setGeometry(QtCore.QRect(10, 10, 111, 22))
self.BtnRadioRealRegister.setObjectName(_fromUtf8("BtnRadioRealRegister"))
self.BtnRadioRealUnRegister = QtGui.QRadioButton(self.groupBox4)
self.BtnRadioRealUnRegister.setGeometry(QtCore.QRect(70, 10, 111, 22))
self.BtnRadioRealUnRegister.setObjectName(_fromUtf8("BtnRadioRealUnRegister"))
self.ComboReal = QtGui.QComboBox(self.centralwidget)
self.ComboReal.setGeometry(QtCore.QRect(420, _TOP_Pos+260, 200, 22))
self.ComboReal.setObjectName(_fromUtf8("ComboReal"))
self.label_12 = QtGui.QLabel(self.centralwidget)
self.label_12.setGeometry(QtCore.QRect(10, _TOP_Pos+300, 60, 12))
self.label_12.setObjectName(_fromUtf8("label_12"))
self.Edit_Input_Real_Key = QtGui.QLineEdit(self.centralwidget)
self.Edit_Input_Real_Key.setGeometry(QtCore.QRect(70, _TOP_Pos+295, 50, 20))
self.Edit_Input_Real_Key.setObjectName(_fromUtf8("Edit_Input_Real_Key"))
self.Edit_Input_Real_Val = QtGui.QLineEdit(self.centralwidget)
self.Edit_Input_Real_Val.setGeometry(QtCore.QRect(130, _TOP_Pos+295, 250, 20))
self.Edit_Input_Real_Val.setObjectName(_fromUtf8("Edit_Input_Real_Val"))
self.BTN_Query_Real = QtGui.QPushButton(self.centralwidget)
self.BTN_Query_Real.setGeometry(QtCore.QRect(380, _TOP_Pos+294, 60, 22))
self.BTN_Query_Real.setObjectName(_fromUtf8("BTN_Query_Real"))
self.multiline = QtGui.QPlainTextEdit(self.centralwidget)
self.multiline.setGeometry(QtCore.QRect(10, _TOP_Pos+330, 781, 191))
self.multiline.setObjectName(_fromUtf8("listView"))
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 21))
self.menubar.setObjectName(_fromUtf8("menubar"))
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
self.SetupTabOrder()
self.SetupSignal()
def SetupSignal(self):
QtCore.QObject.connect(self.BTN_Connect, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnConnect)
QtCore.QObject.connect(self.BTN_DisConnect, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnDisConnect)
QtCore.QObject.connect(self.Btn_Login, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnLogin)
QtCore.QObject.connect(self.Btn_Logout, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnLogout)
QtCore.QObject.connect(self.BtnRadio1, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioAcctTpChange)
QtCore.QObject.connect(self.BtnRadio2, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioAcctTpChange)
QtCore.QObject.connect(self.BtnRadio3, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioAcctTpChange)
QtCore.QObject.connect(self.BtnRadioQry1, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioTrChange)
QtCore.QObject.connect(self.BtnRadioQry2, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioTrChange)
QtCore.QObject.connect(self.BtnRadioQry3, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioTrChange)
QtCore.QObject.connect(self.Btn_ReqJMCodeFR, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnReqJmCodeFr)
QtCore.QObject.connect(self.ComboAcc, QtCore.SIGNAL(_fromUtf8("currentIndexChanged(QString)")), self.OnComboAccChange)
QtCore.QObject.connect(self.ComboTr, QtCore.SIGNAL(_fromUtf8("currentIndexChanged(QString)")), self.OnComboTrChange)
QtCore.QObject.connect(self.BTN_Query, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnQuery)
QtCore.QObject.connect(self.BtnRadioSiseReal, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioRealChange)
QtCore.QObject.connect(self.BtnRadioOrderReal, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRadioRealChange)
QtCore.QObject.connect(self.ComboReal, QtCore.SIGNAL(_fromUtf8("currentIndexChanged(QString)")), self.OnComboReal)
QtCore.QObject.connect(self.BTN_Query_Real, QtCore.SIGNAL(_fromUtf8("clicked()")), self.OnRealRegister)
def SetupTabOrder(self):
QtCore.QMetaObject.connectSlotsByName(MainWindow)
MainWindow.setTabOrder(self.BTN_Connect, self.Edit_UserID)
MainWindow.setTabOrder(self.Edit_UserID, self.Edit_UserPW)
MainWindow.setTabOrder(self.Edit_UserPW, self.Edit_CertPW)
MainWindow.setTabOrder(self.Edit_CertPW, self.Btn_Login)
MainWindow.setTabOrder(self.Btn_Login, self.ComboAcc)
MainWindow.setTabOrder(self.ComboAcc, self.ComboTr)
MainWindow.setTabOrder(self.ComboTr, self.Btn_Logout)
MainWindow.setTabOrder(self.Btn_Logout, self.BTN_DisConnect)
def SetupHDFOcx(self, MainWindow):
self.HDF = QAxWidget("HDFCOMMAGENT.HDFCommAgentCtrl.1")
self.HDF.connect(self.HDF, QtCore.SIGNAL("OnDataRecv(QString, int)"), self.OnDataRecv)
self.HDF.connect(self.HDF, QtCore.SIGNAL("OnGetBroadData(QString, int)"), self.OnGetBroadData)
self.HDF.connect(self.HDF, QtCore.SIGNAL("OnGetMsgWithRqId(int, QString, QString)"), self.OnGetMsgWithRqId)
# ์คํ์์น ์ค์
strPath = os.getcwd()
self.Edit_Path.setText(_translate("MainWindow", strPath, None))
self.HDF.dynamicCall("CommSetOCXPath(" + strPath + ")")
def HD_CommGetRepeatCnt(self, strTrCode, nRealType, strFildNm):
nRqID = self.HDF.dynamicCall("CommGetRepeatCnt(QString, int, QString)", strTrCode, nRealType, strFildNm)
return range(nRqID.toLongLong()[0])
def HD_CommRqData(self, strTrCode, strInputData, nInputLength, strPrevOrNext):
nRqID = self.HDF.dynamicCall("CommRqData(QString, QString, LONG, QString)", strTrCode, strInputData, nInputLength, strPrevOrNext)
return nRqID.toLongLong()[0]
def HD_CommFIDRqData(self, strTrCode, strInputData, sReqFidList, sPrevOrNext):
#BSTR sFidCode, BSTR sInputData, BSTR sReqFidList, LONG nLength, BSTR sPrevOrNext
nRqID = self.HDF.dynamicCall("CommFIDRqData(QString, QString, QString, LONG, QString)", strTrCode, strInputData, sReqFidList, len(strInputData), sPrevOrNext)
return nRqID.toLongLong()[0]
def HD_CommJumunSvr(self, strTrCode, strInputData):
nRqID = self.HDF.dynamicCall("CommJumunSvr(QString, QString)", strTrCode, strInputData)
return nRqID.toLongLong()[0]
def HD_CommGetData(self, strTrCode, nRealType, strRecNm, nIndex, strFieldNm):
strFieldNm = unicode(str(strFieldNm).strip(), 'utf-8')
strData = self.HDF.dynamicCall("CommGetData(QString, int, QString, int, QString)", strTrCode, nRealType, strRecNm, nIndex, strFieldNm)
return strData.toString()
def HD_RegReal(self, bReg, strValue, nKeyType):
if ( bReg == True ):
nRet = self.HDF.dynamicCall("CommSetBroad(QString, LONG)", strValue, nKeyType)
else:
nRet = self.HDF.dynamicCall("CommRemoveBroad(QString, LONG)", strValue, nKeyType)
return nRet
def HD_CommGetAccInfo(self):
strData = self.HDF.dynamicCall("CommGetAccInfo()")
return strData.toString()
def HD_Login(self, strUserID, strUserWd, strCertPw):
ret = self.HDF.dynamicCall("CommLogin(QString, QString, QString)", strUserID, strUserWd, strCertPw)
def HD_Logout(self, strUserID):
ret = self.HDF.dynamicCall("CommLogout(QString)", strUserID)
def HD_CommReqMakeCod(self):
ret = self.HDF.dynamicCall("CommReqMakeCod(QString, BOOL)", "JMCODE", False)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow", None))
self.label_6.setText(_translate("MainWindow", "์คํ์์น", None))
self.BTN_Connect.setText(_translate("MainWindow", "ํต์ ๊ด๋ฆฌ์ ์คํ", None))
self.BTN_DisConnect.setText(_translate("MainWindow", "ํต์ ๊ด๋ฆฌ์ ์ข
๋ฃ", None))
self.label.setText(_translate("MainWindow", "์ฌ์ฉ์ ID", None))
self.label_2.setText(_translate("MainWindow", "HTS๋น๋ฒ", None))
self.label_3.setText(_translate("MainWindow", "๊ณต์ธ์ธ์ฆ ๋น๋ฐ๋ฒํธ", None))
self.Btn_Login.setText(_translate("MainWindow", "๋ก๊ทธ์ธ", None))
self.Btn_Logout.setText(_translate("MainWindow", "๋ก๊ทธ์์", None))
self.label_4.setText(_translate("MainWindow", "๊ณ์ข๋ฒํธ", None))
self.label_7.setText(_translate("MainWindow", "๊ณ์ข๋น๋ฒ", None))
self.label_5.setText(_translate("MainWindow", "์กฐํ์ ํ", None))
self.label_8.setText(_translate("MainWindow", "TRCode : ", None))
self.label_10.setText(_translate("MainWindow", "Input : ", None))
self.BTN_Query.setText(_translate("MainWindow", "์กฐํ", None))
self.BtnRadio1.setText(_translate("MainWindow", "๊ตญ๋ด", None))
self.BtnRadio2.setText(_translate("MainWindow", "ํด์ธ", None))
self.BtnRadio3.setText(_translate("MainWindow", "FX", None))
self.BtnRadio1.setChecked(True)
self.BtnRadioQry1.setText(_translate("MainWindow", "TR", None))
self.BtnRadioQry2.setText(_translate("MainWindow", "FID", None))
self.BtnRadioQry3.setText(_translate("MainWindow", "์ฃผ๋ฌธ", None))
self.BtnRadioQry1.setChecked(True)
self.Btn_ReqJMCodeFR.setText(_translate("MainWindow", "ํด์ธ์ข
๋ชฉ์ฝ๋ ์์ ", None))
self.label_11.setText(_translate("MainWindow", "์ค์๊ฐ", None))
self.BtnRadioSiseReal.setText(_translate("MainWindow", "์์ธ", None))
self.BtnRadioOrderReal.setText(_translate("MainWindow", "์ฃผ๋ฌธ", None))
self.BtnRadioSiseReal.setChecked(True)
self.BtnRadioRealRegister.setText(_translate("MainWindow", "๋ฑ๋ก", None))
self.BtnRadioRealUnRegister.setText(_translate("MainWindow", "ํด์ง", None))
self.BtnRadioRealRegister.setChecked(True)
self.label_12.setText(_translate("MainWindow", "์ค์๊ฐ", None))
self.BTN_Query_Real.setText(_translate("MainWindow", "๋ฑ๋ก/ํด์ง", None))
self.Edit_UserID.setText(u"sivas99")
self.Edit_UserPW.setText(u"qwe123")
self.Edit_AcctPW.setText(u"1234")
self.SetTrToDictionary_FO()
self.SetTrToDictionary_FR()
self.SetTrToDictionary_FX()
self.SetRealToDictionary_FO()
self.SetRealToDictionary_FR()
self.SetRealToDictionary_FX()
self.OnRadioTrChange()
self.SetComboReal()
pass
def SetTrToDictionary_FO(self):
self.m_TrQueryListFO.update({u"01.๊ตญ๋ด๋ฏธ์ฒด๊ฒฐ์ฃผ๋ฌธ์กฐํ" :[u"g11002.DQ0104&",u"๊ณ์ข๋ฒํธ,11,๋๋ฌ๋ฒํธ,3,๊ณ์ข๋น๋ฒ,8"]})
self.m_TrQueryListFO.update({u"02.๊ตญ๋ด์ฒด๊ฒฐ๋ด์ญ๋ฆฌ์คํธ" :[u"g11002.DQ0107&",u"๊ณ์ข๋ฒํธ,11,๋๋ฌ๋ฒํธ,3,๊ณ์ข๋น๋ฒ,8"]})
self.m_TrQueryListFO.update({u"03.๊ตญ๋ด๋ฏธ๊ฒฐ์ ์ฝ์ ์กฐํ" :[u"g11002.DQ0110&",u"๊ณ์ข๋ฒํธ,11,๋๋ฌ๋ฒํธ,3,๊ณ์ข๋น๋ฒ,8"]})
self.m_TrQueryListFO.update({u"04.๊ตญ๋ด๊ณ ๊ฐ์ํ๊ธ์กฐํ" :[u"g11002.DQ0242&",u"๊ณ์ข๋ฒํธ,11,๊ณ์ข๋น๋ฒ,8" ]})
self.m_TrQueryListFO.update({u"05.๊ตญ๋ด์ข
๋ชฉ๋ช
์กฐํ" :[u"g11002.DQ0622&",u"exchtp,1,exchid,2,fotp,1"]})
self.m_TrQueryListFO.update({u"06.์ผ๊ฐCME๋ฏธ์ฒด๊ฒฐ์ฃผ๋ฌธ์กฐํ":[u"g11002.DQ0116&",u"๊ณ์ข๋ฒํธ,11,๋๋ฌ๋ฒํธ,3,๊ณ์ข๋น๋ฒ,8"]})
self.m_TrQueryListFO.update({u"07.์ผ๊ฐCME์ฒด๊ฒฐ๋ด์ญ๋ฆฌ์คํธ":[u"g11002.DQ0119&",u"๊ณ์ข๋ฒํธ,11,๋๋ฌ๋ฒํธ,3,๊ณ์ข๋น๋ฒ,8"]})
self.m_TrQueryListFO.update({u"08.์ผ๊ฐCME๋ฏธ๊ฒฐ์ ์ฝ์ ์กฐํ":[u"g11002.DQ0122&",u"๊ณ์ข๋ฒํธ,11,๋๋ฌ๋ฒํธ,3,๊ณ์ข๋น๋ฒ,8"]})
self.m_FidQueryListFO.update({u"๊ตญ๋ด์ ๋ฌผ์ต์
๋ง์คํฐ" :[u"s20001",u"์ข
๋ชฉ,8,์กฐํ๋ชจ๋,1,์กฐํ๊ฑด์,4",u"000075051057"]})
self.m_FidQueryListFO.update({u"๊ตญ๋ด์ฃผ์์ ๋ฌผ๋ง์คํฐ" :[u"s30001",u"์ข
๋ชฉ,8,์กฐํ๋ชจ๋,1,์กฐํ๊ฑด์,4",u"000075051057"]})
self.m_FidQueryListFO.update({u"์ผ๊ฐCME์ ๋ฌผ์ต์
๋ง์คํฐ" :[u"s21001",u"์ข
๋ชฉ,8,์กฐํ๋ชจ๋,1,์กฐํ๊ฑด์,4",u"000075051057"]})
self.m_TrOrderListFO.update({u"01.๊ตญ๋ด์ ๊ท์ฃผ๋ฌธ" :[u"g12001.DO1601&",u"๊ณ์ข๋ฒํธ,11,๊ณ์ข๋น๋ฒ,8,์ข
๋ชฉ,32,๋งค๋งค๊ตฌ๋ถ,1,์ฒด๊ฒฐ๊ตฌ๋ถ,1,์ฒด๊ฒฐ์กฐ๊ฑด,1,์ฃผ๋ฌธ๊ฐ๊ฒฉ,13,์ฃผ๋ฌธ์๋,5"]})
self.m_TrOrderListFO.update({u"02.๊ตญ๋ด์ ์ ์ฃผ๋ฌธ" :[u"g12001.DO1901&",u"๊ณ์ข๋ฒํธ,11,๊ณ์ข๋น๋ฒ,8,์ข
๋ชฉ,32,๋งค๋งค๊ตฌ๋ถ,1,์ฒด๊ฒฐ๊ตฌ๋ถ,1,์ฒด๊ฒฐ์กฐ๊ฑด,1,์ฃผ๋ฌธ๊ฐ๊ฒฉ,13,์ฃผ๋ฌธ์๋,5,์ฃผ๋ฌธ๋ฒํธ,7"]})
self.m_TrOrderListFO.update({u"03.๊ตญ๋ด์ทจ์์ฃผ๋ฌธ" :[u"g12001.DO1701&",u"๊ณ์ข๋ฒํธ,11,๊ณ์ข๋น๋ฒ,8,์ข
๋ชฉ,32,๋งค๋งค๊ตฌ๋ถ,1,์ฒด๊ฒฐ๊ตฌ๋ถ,1,์ฒด๊ฒฐ์กฐ๊ฑด,1,์ฃผ๋ฌธ๊ฐ๊ฒฉ,13,์ฃผ๋ฌธ์๋,5,์ฃผ๋ฌธ๋ฒํธ,7"]})
self.m_TrOrderListFO.update({u"04.CME๊ตญ๋ด์ ๊ท์ฃผ๋ฌธ" :[u"g12001.DO2201&",u"๊ณ์ข๋ฒํธ,11,๊ณ์ข๋น๋ฒ,8,์ข
๋ชฉ,32,๋งค๋งค๊ตฌ๋ถ,1,์ฒด๊ฒฐ๊ตฌ๋ถ,1,์ฒด๊ฒฐ์กฐ๊ฑด,1,์ฃผ๋ฌธ๊ฐ๊ฒฉ,13,์ฃผ๋ฌธ์๋,5"]})
self.m_TrOrderListFO.update({u"05.CME๊ตญ๋ด์ ์ ์ฃผ๋ฌธ" :[u"g12001.DO2101&",u"๊ณ์ข๋ฒํธ,11,๊ณ์ข๋น๋ฒ,8,์ข
๋ชฉ,32,๋งค๋งค๊ตฌ๋ถ,1,์ฒด๊ฒฐ๊ตฌ๋ถ,1,์ฒด๊ฒฐ์กฐ๊ฑด,1,์ฃผ๋ฌธ๊ฐ๊ฒฉ,13,์ฃผ๋ฌธ์๋,5,์ฃผ๋ฌธ๋ฒํธ,7"]})
self.m_TrOrderListFO.update({u"06.CME๊ตญ๋ด์ทจ์์ฃผ๋ฌธ" :[u"g12001.DO2001&",u"๊ณ์ข๋ฒํธ,11,๊ณ์ข๋น๋ฒ,8,์ข
๋ชฉ,32,๋งค๋งค๊ตฌ๋ถ,1,์ฒด๊ฒฐ๊ตฌ๋ถ,1,์ฒด๊ฒฐ์กฐ๊ฑด,1,์ฃผ๋ฌธ๊ฐ๊ฒฉ,13,์ฃผ๋ฌธ์๋,5,์ฃผ๋ฌธ๋ฒํธ,7"]})
pass
def SetTrToDictionary_FR(self):
self.m_TrQueryListFR.update({u"01.ํด์ธ๋ฏธ์ฒด๊ฒฐ์ฃผ๋ฌธ๋ด์ญ" :[u"g11004.AQ0401%",u"์กฐํ์ ํ,1,์ฌ์ฉ์ID,8,๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8"]})
self.m_TrQueryListFR.update({u"02.ํด์ธ์ฒด๊ฒฐ์ฃผ๋ฌธ๋ด์ญ" :[u"g11004.AQ0402%",u"์กฐํ์ ํ,1,์ฌ์ฉ์ID,8,๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8"]})
self.m_TrQueryListFR.update({u"03.ํด์ธ๋ฏธ๊ฒฐ์ ์ฃผ๋ฌธ๋ด์ญ" :[u"g11004.AQ0403%",u"์กฐํ์ ํ,1,์ฌ์ฉ์ID,8,๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8"]})
self.m_TrQueryListFR.update({u"04.ํด์ธ์ํ์์ฐ์กฐํ" :[u"g11004.AQ0607%",u"์กฐํ์ ํ,1,์ฌ์ฉ์ID,8,๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8,groupnm,20,ํตํ์ฝ๋,3"]})
self.m_TrQueryListFR.update({u"05.ํด์ธ์ผ์ฃผ์๋ถํฑ" :[u"o44005",u"keyvalue,18,์ฝ๋,32,์กฐํ์ผ์,8,์กฐํ์๊ฐ,10,๋ค์๊ตฌ๋ถ,1,๋ฐ์ดํฐ๊ตฌ๋ถ,1,์๊ฐ๊ตฌ๋ถ,3,๋ฐ์ดํฐ๊ฑด์,5,์ฅ๊ตฌ๋ถ,1,ํ๋ด์ค๋ด๊ตฌ๋ถ,1"]})
self.m_FidQueryListFR.update({u"01.ํด์ธ์ ๋ฌผ์ต์
๋ง์คํฐ" :[u"o51000",u"์ข
๋ชฉ,32", u"000001002003004005006007008009010011012013014015016017018019020021022023024025026027028029030031032033034035036037"]})
self.m_FidQueryListFR.update({u"02.ํด์ธํธ๊ฐ์ ๋ณด" :[u"o51010",u"์ข
๋ชฉ,32", u"000001002003004005006007"]})
self.m_FidQueryListFR.update({u"03.ํด์ธ์ข
๋ชฉ์ ๋ณด" :[u"o51210",u"์ข
๋ชฉ,32", u"000001002003004005006007008009010011012013014015016017018019020021"]})
self.m_TrOrderListFR.update({u"01.ํด์ธ์ ๊ท์ฃผ๋ฌธ" :[u"g12003.AO0401%",u"๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8,์ข
๋ชฉ,32,๋งค๋งค๊ตฌ๋ถ,1,์ฒด๊ฒฐ๊ตฌ๋ถ,1,์ฒด๊ฒฐ์กฐ๊ฑด,1,์ฃผ๋ฌธ๊ฐ๊ฒฉ,15,์ฃผ๋ฌธ์๋,10,strtp,1,stopp,15,minqty,10"]})
self.m_TrOrderListFR.update({u"02.ํด์ธ์ ์ ์ฃผ๋ฌธ" :[u"g12003.AO0402%",u"๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8,์ข
๋ชฉ,32,๋งค๋งค๊ตฌ๋ถ,1,์ฒด๊ฒฐ๊ตฌ๋ถ,1,์ฒด๊ฒฐ์กฐ๊ฑด,1,์ฃผ๋ฌธ๊ฐ๊ฒฉ,15,์ฃผ๋ฌธ์๋,10,์ฃผ๋ฌธ๋ฒํธ,10,strtp,1,stopp,15,minqty,10"]})
self.m_TrOrderListFR.update({u"03.ํด์ธ์ทจ์์ฃผ๋ฌธ" :[u"g12003.AO0403%",u"๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8,์ข
๋ชฉ,32,์ฒด๊ฒฐ๊ตฌ๋ถ,1,์ฒด๊ฒฐ์กฐ๊ฑด,1,์ฃผ๋ฌธ๊ฐ๊ฒฉ,15,์ฃผ๋ฌธ์๋,10,์ฃผ๋ฌธ๋ฒํธ,10"]})
pass
def SetTrToDictionary_FX(self):
self.m_TrQueryListFX.update({u"01.FX์์ฐ๋ด์ญ์กฐํ" :[u"g11004.AQ0901%",u"์กฐํ์ ํ,1,์ฌ์ฉ์ID,8,๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8"]})
self.m_TrQueryListFX.update({u"02.FX๋ฏธ์ฒด๊ฒฐ๋ด์ญ์กฐํ" :[u"g11004.AQ0904%",u"์กฐํ์ ํ,1,์ฌ์ฉ์ID,8,๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8"]})
self.m_TrQueryListFX.update({u"03.FX๊ณ์ข์ ๋ณดSummary" :[u"g11004.AQ906%",u"์กฐํ์ ํ,1,์ฌ์ฉ์ID,8,๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8"]})
self.m_TrQueryListFX.update({u"04.FX๋ฏธ์ฒญ์ฐํฌ์ง์
์กฐํ" :[u"g11004.AQ0908%",u"์กฐํ์ ํ,1,์ฌ์ฉ์ID,8,๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8"]})
self.m_TrQueryListFX.update({u"05.FX์ฒญ์ฐํฌ์ง์
์กฐํ" :[u"g11004.AQ0910%",u"์กฐํ์ ํ,1,์ฌ์ฉ์ID,8,๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8,groupnm,20,์์์ผ์,8,์ข
๋ฃ์ผ์,8"]})
self.m_FidQueryListFX.update({u"01.FX๋ง์คํฐ" :[u"x00001",u"์ข
๋ชฉ,10", u"000001002003004005006007008009010011012013014015016017018019020021022"]})
self.m_TrOrderListFX.update({u"01.FX์ ๊ท์ฃผ๋ฌธ" :[u"g12003.AO0501%",u"๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8,์ข
๋ชฉ,32,๋งค๋งค๊ตฌ๋ถ,1,์ฃผ๋ฌธ๊ฐ๊ฒฉ,20,์ฃผ๋ฌธ์๋,15,ordertr,10,์ฒด๊ฒฐ๊ตฌ๋ถ,1,strtp,1,stopp,20,limitp,20,mrktrange,5,trailingstop,10,trdno,20,trdseq,5"]})
self.m_TrOrderListFX.update({u"02.FX์ ์ ์ฃผ๋ฌธ" :[u"g12003.AO0502%",u"๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8,์ข
๋ชฉ,32,์ฃผ๋ฌธ๋ฒํธ,20,์ฃผ๋ฌธSEQ,15,์ฃผ๋ฌธ๊ฐ๊ฒฉ,15,์ฃผ๋ฌธ์๋,10,ordertr,10,strtp,1,stopp,15,limitp,20,trailingstop,10"]})
self.m_TrOrderListFX.update({u"03.FX์ทจ์์ฃผ๋ฌธ" :[u"g12003.AO0503%",u"๊ณ์ข๋ฒํธ,6,๊ณ์ข๋น๋ฒ,8,์ข
๋ชฉ,32,์ฃผ๋ฌธ๋ฒํธ,20"]})
pass
def SetRealToDictionary_FO(self):
self.m_RealSiseListFO.update({u"01.๊ตญ๋ด์ ๋ฌผํธ๊ฐ" :[self.CONST_FO_SISE_51,u"์ข
๋ชฉ"]})
self.m_RealSiseListFO.update({u"02.๊ตญ๋ด์ ๋ฌผ์ฒด๊ฒฐ" :[self.CONST_FO_SISE_65,u"์ข
๋ชฉ"]})
self.m_RealSiseListFO.update({u"03.๊ตญ๋ด์ต์
ํธ๊ฐ" :["52",u"์ข
๋ชฉ"]})
self.m_RealSiseListFO.update({u"04.๊ตญ๋ด์ต์
์ฒด๊ฒฐ" :["66",u"์ข
๋ชฉ"]})
self.m_RealSiseListFO.update({u"05.๊ตญ๋ด์ํ์ ๋ฌผํธ๊ฐ" :[self.CONST_FO_SISE_58,u"์ข
๋ชฉ"]})
self.m_RealSiseListFO.update({u"06.๊ตญ๋ด์ํ์ ๋ฌผ์ฒด๊ฒฐ" :[self.CONST_FO_SISE_71,u"์ข
๋ชฉ"]})
self.m_RealSiseListFO.update({u"07.๊ตญ๋ด์ํ์ต์
ํธ๊ฐ" :[u"59",u"์ข
๋ชฉ"]})
self.m_RealSiseListFO.update({u"08.๊ตญ๋ด์ํ์ต์
์ฒด๊ฒฐ" :[u"73",u"์ข
๋ชฉ"]})
self.m_RealSiseListFO.update({u"09.๊ตญ๋ดCME์ ๋ฌผํธ๊ฐ" :[u"75",u"์ข
๋ชฉ"]})
self.m_RealSiseListFO.update({u"10.๊ตญ๋ดCME์ ๋ฌผ์ฒด๊ฒฐ" :[u"77",u"์ข
๋ชฉ"]})
self.m_RealSiseListFO.update({u"11.๊ตญ๋ด์ฃผ์์ ๋ฌผํธ๊ฐ" :[u"56",u"์ข
๋ชฉ"]})
self.m_RealSiseListFO.update({u"12.๊ตญ๋ด์ฃผ์์ ๋ฌผ์ฒด๊ฒฐ" :[u"68",u"์ข
๋ชฉ"]})
self.m_RealOrderListFO.update({u"01.๊ตญ๋ด์ฃผ๋ฌธ์ค์๊ฐ์ ์" :[self.CONST_FO_ORDER_181,u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFO.update({u"02.๊ตญ๋ด์ฃผ๋ฌธ์ค์๊ฐ๋ฏธ์ฒด๊ฒฐ" :[self.CONST_FO_ORDER_182,u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFO.update({u"03.๊ตญ๋ด์ฃผ๋ฌธ์ค์๊ฐ๋ฏธ๊ฒฐ์ " :[self.CONST_FO_ORDER_183,u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFO.update({u"04.๊ตญ๋ด์ฃผ๋ฌธ์ค์๊ฐ์๊ณ " :[self.CONST_FO_ORDER_184,u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFO.update({u"05.๊ตญ๋ด์ฃผ๋ฌธ์ค์๊ฐ์ฒด๊ฒฐ" :[self.CONST_FO_ORDER_185,u"๊ณ์ข๋ฒํธ"]})
pass
def SetRealToDictionary_FR(self):
self.m_RealSiseListFR.update({u"01.ํด์ธ์ ๋ฌผํธ๊ฐ" :[self.CONST_FR_SISE_76,u"์ข
๋ชฉ"]})
self.m_RealSiseListFR.update({u"02.ํด์ธ์ ๋ฌผ์ฒด๊ฒฐ" :[self.CONST_FR_SISE_82,u"์ข
๋ชฉ"]})
self.m_RealOrderListFR.update({u"01.ํด์ธ์ฃผ๋ฌธ์ค์๊ฐ์ ์" :[self.CONST_FR_ORDER_196,u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFR.update({u"02.ํด์ธ์ฃผ๋ฌธ์ค์๊ฐ๋ฏธ์ฒด๊ฒฐ" :[self.CONST_FR_ORDER_186,u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFR.update({u"03.ํด์ธ์ฃผ๋ฌธ์ค์๊ฐ๋ฏธ๊ฒฐ์ " :[self.CONST_FR_ORDER_187,u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFR.update({u"04.ํด์ธ์ฃผ๋ฌธ์ค์๊ฐ์๊ณ " :[self.CONST_FR_ORDER_188,u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFR.update({u"05.ํด์ธ์ฃผ๋ฌธ์ค์๊ฐ์ฒด๊ฒฐ" :[self.CONST_FR_ORDER_189,u"๊ณ์ข๋ฒํธ"]})
pass
def SetRealToDictionary_FX(self):
self.m_RealSiseListFX.update({u"01.FX์ฒด๊ฒฐ" :[self.CONST_FX_SISE_171,u"์ข
๋ชฉ"]})
self.m_RealOrderListFX.update({u"01.FX์ฃผ๋ฌธ์ค์๊ฐ์ ์" :[u"197",u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFX.update({u"02.FX์ฃผ๋ฌธ์ค์๊ฐ๋ฏธ์ฒด๊ฒฐ" :[u"191",u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFX.update({u"03.FX์ฃผ๋ฌธ์ค์๊ฐ๋ฏธ์ฒญ์ฐ" :[u"192",u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFX.update({u"04.FX์ฃผ๋ฌธ์ค์๊ฐ์ฒญ์ฐ" :[u"193",u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFX.update({u"05.FX์ฃผ๋ฌธ์ค์๊ฐ์์ฐ" :[u"194",u"๊ณ์ข๋ฒํธ"]})
self.m_RealOrderListFX.update({u"06.FX์ฃผ๋ฌธ์ค์๊ฐ์์ฝ" :[u"195",u"๊ณ์ข๋ฒํธ"]})
pass
def Util_WriteLog(self, strMsg):
now = time.localtime(time.time())
strHead = time.strftime("[%H:%M:%S]", now)
self.multiline.insertPlainText( strHead + strMsg + "\n")
self.multiline.moveCursor(QTextCursor.End)
def Util_GetCurDate(self):
now = time.localtime(time.time())
strDate = time.strftime("%Y%m%d", now)
return strDate
def Util_FillStr(self, strSrc, nTotLen, chPad):
nPadLen = nTotLen - len(strSrc)
strPad = strSrc + chPad * nPadLen
return strPad
def OnConnect(self):
self.Util_WriteLog("Connect")
ret = self.HDF.dynamicCall("CommInit(1)")
def OnDisConnect(self):
self.OnLogout()
self.HDF.dynamicCall("CommTerminate(1)")
def OnLogin(self):
self.Util_WriteLog("Login")
strUserID = self.Edit_UserID.text()
strUserPW = self.Edit_UserPW.text()
strCertPW = self.Edit_CertPW.text()
nRet = self.HD_Login(strUserID, strUserPW, strCertPW)
pass
def OnLogout(self):
self.Util_WriteLog("Logout")
strUserID = self.Edit_UserID.text()
self.HD_Logout(strUserID)
pass
def OnReqJmCodeFr(self):
self.Util_WriteLog("ํด์ธ์ข
๋ชฉ์์ฒญ")
self.HD_CommReqMakeCod()
pass
def GetAccOrTrDic(self, strQryTp):
tmpList = []
tmpAccDic = {}
tmpTrDic = {}
tmpFidDic = {}
if ( self.BtnRadio1.isChecked() == True ): #๊ตญ๋ด
tmpAccDic = self.m_AccListFO
if ( self.BtnRadioQry1.isChecked() == True ): #TR์กฐํ
tmpTrDic = self.m_TrQueryListFO
elif ( self.BtnRadioQry2.isChecked() == True ): #FID์กฐํ
tmpTrDic = self.m_FidQueryListFO
elif ( self.BtnRadioQry3.isChecked() == True ): #์ฃผ๋ฌธ
tmpTrDic = self.m_TrOrderListFO
elif ( self.BtnRadio2.isChecked() == True ): #ํด์ธ
tmpAccDic = self.m_AccListFR
if ( self.BtnRadioQry1.isChecked() == True ): #TR์กฐํ
tmpTrDic = self.m_TrQueryListFR
elif ( self.BtnRadioQry2.isChecked() == True ): #FID์กฐํ
tmpTrDic = self.m_FidQueryListFR
elif ( self.BtnRadioQry3.isChecked() == True ): #์ฃผ๋ฌธ
tmpTrDic = self.m_TrOrderListFR
elif ( self.BtnRadio3.isChecked() == True ): #FX
tmpAccDic = self.m_AccListFX
if ( self.BtnRadioQry1.isChecked() == True ): #TR์กฐํ
tmpTrDic = self.m_TrQueryListFX
elif ( self.BtnRadioQry2.isChecked() == True ): #FID์กฐํ
tmpTrDic = self.m_FidQueryListFX
elif ( self.BtnRadioQry3.isChecked() == True ): #์ฃผ๋ฌธ
tmpTrDic = self.m_TrOrderListFX
if ( strQryTp == "ACC"):
return tmpAccDic
elif (strQryTp == "TR"):
return tmpTrDic
pass
pass
def GetRealDic(self):
tmpRealDic = {}
if ( self.BtnRadio1.isChecked() == True ): # ๊ตญ๋ด
if ( self.BtnRadioSiseReal.isChecked() == True ):
# ์์ธ ๋ก๋
tmpRealDic = self.m_RealSiseListFO
else:
#์ฃผ๋ฌธ ๋ก๋
tmpRealDic = self.m_RealOrderListFO
pass
elif ( self.BtnRadio2.isChecked() == True ):
if ( self.BtnRadioSiseReal.isChecked() == True ):
# ์์ธ ๋ก๋
tmpRealDic = self.m_RealSiseListFR
else:
#์ฃผ๋ฌธ ๋ก๋
tmpRealDic = self.m_RealOrderListFR
pass
elif ( self.BtnRadio3.isChecked() == True ):
if ( self.BtnRadioSiseReal.isChecked() == True ):
# ์์ธ ๋ก๋
tmpRealDic = self.m_RealSiseListFX
else:
#์ฃผ๋ฌธ ๋ก๋
tmpRealDic = self.m_RealOrderListFX
pass
return tmpRealDic
pass
def OnRadioAcctTpChange(self):
self.ComboAcc.clear()
tmpAccDic = self.GetAccOrTrDic("ACC")
tmpList = list(tmpAccDic.keys())
for i in range(len(tmpList)):
strAcctNo = tmpList[i]
self.ComboAcc.addItem(strAcctNo)
pass
self.OnRadioTrChange()
self.OnRadioRealChange()
pass
def OnRadioTrChange(self):
self.ComboTr.clear()
tmpTrDic = self.GetAccOrTrDic("TR")
tmpList = list(tmpTrDic.keys())
for i in range(len(tmpList)):
strServiceNm = tmpList[i]
self.ComboTr.addItem(strServiceNm)
self.ComboTr.model().sort(0)
bShow = False
if ( self.BtnRadioQry2.isChecked() == True ):
bShow = True
self.Edit_Input_FID.setVisible(bShow)
self.m_bUseComboTrChange = True
pass
def OnComboAccChange(self, strSelData):
strInData = unicode(strSelData, 'utf-8')
tmpAccDic = self.GetAccOrTrDic("ACC")
strAcctNm = tmpAccDic.get(strInData)
self.label_acctNm.setText(_fromUtf8(strAcctNm))
self.OnComboTrChange(self.ComboTr.currentText())
pass
def OnComboTrChange(self, strSelData):
strInData = unicode(strSelData, 'utf-8')
tmpList = []
tmpTrDic = {}
tmpTrDic = self.GetAccOrTrDic("TR")
tmpList = tmpTrDic.get(strInData)
if ( tmpList is None ):
return None
strTrCode = tmpList[0]
self.label_tr.setText(strTrCode)
#data parsing
strData = str(tmpList[1])
commaArr = strData.split(",")
strInput = ""
self.Edit_Input.setText(strInput)
for strItem in commaArr:
if ( strItem == "๊ณ์ข๋ฒํธ" ):
strAcctNo = self.ComboAcc.currentText()
strAcctNo = str(strAcctNo).strip()
if ( self.BtnRadio1.isChecked() == True ): #๊ตญ๋ด
strInput += self.Util_FillStr(strAcctNo, 11, ' ')
else:
strInput += self.Util_FillStr(strAcctNo, 6, ' ')
elif ( strItem == "๊ณ์ข๋น๋ฒ" ):
strInput += self.Util_FillStr(self.Edit_AcctPW.text(), 8, ' ')
elif ( strItem == "๋๋ฌ๋ฒํธ" ):
strInput += self.Util_FillStr("001", 3, ' ') #๋ฌด์กฐ๊ฑด "001"
elif ( strItem == "์กฐํ์ผ์" or strItem == "์์์ผ์" or strItem == "์ข
๋ฃ์ผ์"):
strInput += self.Util_FillStr(self.Util_GetCurDate(), 8, ' ')
elif ( strItem == "exchtp" ):
strInput += self.Util_FillStr("1", 1, ' ') #1:KRX
elif ( strItem == "exchid" ):
strInput += self.Util_FillStr("01", 2, ' ') #์ข
๋ชฉ๋ช
์ 2,3๋ฒ์งธ์๋ฆฌ ex)101FC000 -> 01
elif ( strItem == "fotp" ):
strInput += self.Util_FillStr("F", 1, ' ') #F:์ ๋ฌผ, O:์ต์
elif ( strItem == "์ข
๋ชฉ" ):
if ( self.BtnRadio1.isChecked() == True ): #๊ตญ๋ด
if ( self.BtnRadioQry2.isChecked() == True ):
strInput += self.Util_FillStr(self.CONST_FO_SERIES, 8, ' ') #F:์ ๋ฌผ, O:์ต์
else:
strInput += self.Util_FillStr(self.CONST_FO_SERIES, 32, ' ') #F:์ ๋ฌผ, O:์ต์
elif ( self.BtnRadio2.isChecked() == True ):
strInput += self.Util_FillStr(self.CONST_FR_SERIES, 32, ' ')
elif ( self.BtnRadio3.isChecked() == True ):
strInput += self.Util_FillStr(self.CONST_FX_SERIES, 32, ' ')
elif ( strItem == "๋งค๋งค๊ตฌ๋ถ" ):
strInput += self.Util_FillStr("1", 1, ' ') #1:๋งค์, 2:๋งค๋
elif ( strItem == "์ฒด๊ฒฐ๊ตฌ๋ถ" ):
strInput += self.Util_FillStr("1", 1, ' ') #1:์ง์ ๊ฐ, 2:์์ฅ๊ฐ
elif ( strItem == "์ฒด๊ฒฐ์กฐ๊ฑด" ):
if ( self.BtnRadio1.isChecked() == True ): #๊ตญ๋ด
strInput += self.Util_FillStr("1", 1, ' ') #1:FAS, 2:FOK, 3:FAK
else:
strInput += self.Util_FillStr("1", 1, ' ') #0:DAY, 1:GTC, 3:IOC FAK 4:FOK
elif ( strItem == "์ฃผ๋ฌธ๊ฐ๊ฒฉ" ):
if ( self.BtnRadio1.isChecked() == True ): #๊ตญ๋ด
strInput += self.Util_FillStr("23055", 13, ' ') #์ฃผ๋ฌธ๊ฐ๊ฒฉ, ์์์ ์์
else:
strInput += self.Util_FillStr("8808", 15, ' ') #์ฃผ๋ฌธ๊ฐ๊ฒฉ, ์์์ ์์
elif ( strItem == "์ฃผ๋ฌธ์๋" ):
strInput += self.Util_FillStr("1", 5, ' ') #์ฃผ๋ฌธ์๋
elif ( strItem == "์ฃผ๋ฌธ๋ฒํธ" ):
if ( self.BtnRadio1.isChecked() == True ): #๊ตญ๋ด
strInput += self.Util_FillStr("1111111", 7, ' ')
else:
strInput += self.Util_FillStr("0000000000", 10, ' ')
elif ( strItem == "์ฌ์ฉ์ID" ):
strInput += self.Util_FillStr(self.Edit_UserID.text(), 8, ' ')
elif ( strItem == "์กฐํ์ ํ" ): #๋ฌด์กฐ๊ฑด 1
strInput += self.Util_FillStr("1", 1, ' ')
elif ( strItem == "groupnm" ):
strInput += self.Util_FillStr(" ", 20, ' ')
elif ( strItem == "ํตํ์ฝ๋" ):
strInput += self.Util_FillStr("USD", 3, ' ') #USD(๋ฏธ๊ตญ๋ฌ๋ฌ),EUR(์ ๋กํ),JPY(์ผ๋ณธ์ํ),KRW(ํ๊ตญ์ํ)
elif ( strItem == "๋ถ์์ฝ๋" ):
strInput += self.Util_FillStr("001", 3, ' ')
elif ( strItem == "์ ์๊ตฌ๋ถ" ):
strInput += self.Util_FillStr("1", 1, ' ') # 0(์ ์ฒด), 1(์์ต), 2(์ด์), 3(์์๋ฃ), 4(์
๊ธ), 5(์ถ๊ธ), 6(๊ธฐํ)
elif ( strItem == "์กฐํ๋ชจ๋" ):
strInput += self.Util_FillStr("3", 1, ' ') #(1:LT 2:LE 3:EQ 4:GE 5:GT)
elif ( strItem == "์กฐํ๊ฑด์" ):
strInput += self.Util_FillStr("0001", 4, ' ')
elif ( strItem == "keyvalue"):
strInput += self.Util_FillStr(" ", 18, ' ')
elif ( strItem == "์กฐํ์๊ฐ"):
strInput += self.Util_FillStr("9999999999", 10, ' ')
elif ( strItem == "๋ค์๊ตฌ๋ถ"):
strInput += self.Util_FillStr("0", 1, ' ') # 0:๊ธฐ๋ณธ
elif ( strItem == "๋ฐ์ดํฐ๊ตฌ๋ถ"):
strInput += self.Util_FillStr("1", 1, ' ') # 3:์ผ, 4:์ฃผ, 5:์, 2:๋ถ, 6:ํฑ
elif ( strItem == "์๊ฐ๊ตฌ๋ถ"):
strInput += self.Util_FillStr("001", 3, ' ') # N๋ถ/ํฑ
elif ( strItem == "๋ฐ์ดํฐ๊ฑด์"):
strInput += self.Util_FillStr("00010", 5, ' ') # ์กฐํ์์ฒญ๊ฐ์
elif ( strItem == "์ฅ๊ตฌ๋ถ"):
strInput += self.Util_FillStr("1", 1, ' ') # 1:์ ์ฐ์ฅ, 0:๋ณธ์ฅ
elif ( strItem == "ํ๋ด์ค๋ด๊ตฌ๋ถ"):
strInput += self.Util_FillStr("0", 1, ' ') # 0:์ค๋ด๋ง, 1:ํ๋ด+์ค๋ด
else:
try:
nFieldLen = int(strItem)
if ( type(nFieldLen) is int ): #์
๋ ฅํ๋ ๊ธธ์ด ์ด๋ฏ๋ก pass
pass
else:
self.Util_WriteLog("UnKnown FieldNm : " + strItem)
except ValueError:
pass
self.Edit_Input.setText( "[" + strInput + "]")
if ( self.BtnRadioQry2.isChecked() == True ):
strFidInput = str(tmpList[2])
self.Edit_Input_FID.setText("[" + strFidInput + "]")
pass
def OnQuery(self):
strTr = self.label_tr.text()
strInput = self.Edit_Input.text()
strInput.replace("[", "")
strInput.replace("]", "")
strInputFid = self.Edit_Input_FID.text()
strInputFid.replace("[", "")
strInputFid.replace("]", "")
nRqID = 0
strMsg = ""
if ( self.BtnRadioQry3.isChecked() == True ):
nRqID = self.HD_CommJumunSvr(strTr, strInput)
strMsg = u"์ฃผ๋ฌธ์ ์ก UniqueID : " + str(nRqID)
elif ( self.BtnRadioQry2.isChecked() == True ):
nRqID = self.HD_CommFIDRqData(strTr, strInput, strInputFid, "")
strMsg = u"์กฐํ์์ฒญ(FID) UniqueID : " + str(nRqID)
else:
nRqID = self.HD_CommRqData(strTr, strInput, len(strInput), u"")
strMsg = u"์กฐํ์์ฒญ(TR) UniqueID : " + str(nRqID)
self.Util_WriteLog( strMsg )
pass
def OnRadioRealChange(self):
self.ComboReal.clear()
self.SetComboReal()
strSelData = self.ComboReal.currentText()
self.OnComboReal(strSelData)
pass
def SetComboReal(self):
tmpRealDic = {}
tmpRealDic = self.GetRealDic()
tmpList = []
tmpList = list(tmpRealDic.keys())
for i in range(len(tmpList)):
strRealServNo = tmpList[i]
self.ComboReal.addItem(strRealServNo)
self.ComboReal.model().sort(0)
pass
def OnComboReal(self, strSelData):
strInData = unicode(strSelData, 'utf-8')
tmpList = []
tmpRealDic = {}
tmpRealDic = self.GetRealDic()
tmpList = tmpRealDic.get(strInData)
strInput = ""
try:
strItem = tmpList[0]
self.Edit_Input_Real_Key.setText(tmpList[0])
strItem = tmpList[1]
if ( strItem == "์ข
๋ชฉ" ):
if ( self.BtnRadio1.isChecked() == True ):
strInput = self.Util_FillStr(self.CONST_FO_SERIES, 32, ' ')
elif ( self.BtnRadio2.isChecked() == True ):
strInput += self.Util_FillStr(self.CONST_FR_SERIES, 32, ' ')
elif ( self.BtnRadio3.isChecked() == True ):
strInput += self.Util_FillStr(self.CONST_FX_SERIES, 10, ' ')
pass
elif (strItem == "๊ณ์ข๋ฒํธ" ):
strAcctNo = self.ComboAcc.currentText()
strInput = self.Util_FillStr(strAcctNo, 11, ' ')
pass
except TypeError:
pass
self.Edit_Input_Real_Val.setText( "[" + strInput + "]" )
pass
def OnRealRegister(self):
bReg = True
strMsg = ""
strValue = self.Edit_Input_Real_Val.text()
nKeyType = int(self.Edit_Input_Real_Key.text())
strValue.replace("[", "")
strValue.replace("]", "")
if ( self.BtnRadioRealRegister.isChecked() == True ):
bReg = True
strMsg = u"์ค์๊ฐ ๋ฑ๋ก ์์ฒญ Value[%s] Key[%d]" %(strValue, nKeyType)
elif ( self.BtnRadioRealUnRegister.isChecked() == True ):
bReg = False
strMsg = u"์ค์๊ฐ ํด์ง ์์ฒญ Value[%s] Key[%d]" %(strValue, nKeyType)
self.Util_WriteLog( strMsg )
self.HD_RegReal(bReg, strValue, nKeyType)
pass
def OnDataRecv(self, sTrCode, nRqID):
strRecv = u"์กฐํ์๋ต Tr = [%s] UniqueID = [%d]" %(sTrCode, nRqID)
self.Util_WriteLog( strRecv )
i = 0
if ( sTrCode == "o44020"): #MRKT.cod
pass
elif ( sTrCode == "o44021"): #PMCode.cod
pass
elif ( sTrCode == "o44022" ): #JMCode.cod(ํด์ธ)
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"์์ ๊ฐ์ : " + str(nRptCnt))
for i in nRptCnt:
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog(strData)
pass
elif (sTrCode == "g11002.DQ0104&" ): #๊ตญ๋ด๋ฏธ์ฒด๊ฒฐ์กฐํ
self.procDQ0104(sTrCode)
pass
elif (sTrCode == "g11002.DQ0107&" ): #๊ตญ๋ด์ฒด๊ฒฐ์กฐํ
self.procDQ0107(sTrCode)
pass
elif (sTrCode == "g11002.DQ0110&" ): #๊ตญ๋ด๋ฏธ์ฒด๊ฒฐ์กฐํ
self.procDQ0110(sTrCode)
pass
elif (sTrCode == "g11002.DQ0242&" ): #๊ตญ๋ด๊ณ ๊ฐ์ํ๊ธ์กฐํ
self.procDQ0242(sTrCode)
pass
elif sTrCode == "g11002.DQ0622&":#์ข
๋ชฉ์ฝ๋(๊ตญ๋ด)
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
for i in nRptCnt:
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog(strData)
pass
elif (sTrCode == "g11002.DQ0116&"):
pass
elif (sTrCode == "g11002.DQ0119&"): #์ผ๊ฐCME์ฒด๊ฒฐ๋ด์ญ๋ฆฌ์คํธ
pass
elif (sTrCode == "g11002.DQ0122&"): #์ผ๊ฐCME๋ฏธ๊ฒฐ์ ์ฝ์ ์กฐํ
pass
elif (sTrCode == "s20001" ): #๊ตญ๋ด์ ๋ฌผ์ต์
๋ง์คํฐ
self.procs20001(sTrCode)
pass
elif (sTrCode == "s30001" ): #๊ตญ๋ด์ฃผ์์ ๋ฌผ๋ง์คํฐ
pass
elif (sTrCode == "s21001" ): #์ผ๊ฐCME์ ๋ฌผ์ต์
๋ง์คํฐ
pass
elif (sTrCode == "g12001.DO1601&" ): #๊ตญ๋ด์ ๊ท์ฃผ๋ฌธ
self.procDO1000(sTrCode)
pass
elif (sTrCode == "g12001.DO1901&" ): #๊ตญ๋ด์ ์ ์ฃผ๋ฌธ
self.procDO1000(sTrCode)
pass
elif (sTrCode == "g12001.DO1701&" ): #๊ตญ๋ด์ทจ์์ฃผ๋ฌธ
self.procDO1000(sTrCode)
pass
elif (sTrCode == "g12001.DO2201&" ): #CME๊ตญ๋ด์ ๊ท์ฃผ๋ฌธ
self.procDO2000(sTrCode)
pass
elif (sTrCode == "g12001.DO2101&" ): #CME๊ตญ๋ด์ ์ ์ฃผ๋ฌธ
self.procDO2000(sTrCode)
pass
elif (sTrCode == "g12001.DO2001&" ): #CME๊ตญ๋ด์ทจ์์ฃผ๋ฌธ
self.procDO2000(sTrCode)
pass
elif (sTrCode == "g11004.AQ0401%"): #ํด์ธ๋ฏธ์ฒด๊ฒฐ์ฃผ๋ฌธ๋ด์ญ
self.procAQ0401(sTrCode)
pass
elif (sTrCode == "g11004.AQ0402%"): #ํด์ธ๋ฏธ์ฒด๊ฒฐ์ฃผ๋ฌธ๋ด์ญ
self.procAQ0402(sTrCode)
pass
elif (sTrCode == "g11004.AQ0403%"):
self.procAQ0403(sTrCode)
pass
elif (sTrCode == "g11004.AQ0607%"):
self.procAQ0607(sTrCode)
pass
elif (sTrCode == "o51000" ): #ํด์ธ์ฒด๊ฒฐ์ ๋ณด
self.proco51000(sTrCode)
pass
elif (sTrCode == "o51010" ): #ํด์ธํธ๊ฐ์ ๋ณด
self.proco51010(sTrCode)
pass
elif (sTrCode == "o51210" ): #ํด์ธ์ข
๋ชฉ์ ๋ณด
self.proco51210(sTrCode)
pass
elif (sTrCode == "g12003.AO0401" ): #ํด์ธ์ ๊ท์ฃผ๋ฌธ
self.procAO0400(sTrCode)
pass
elif (sTrCode == "g12003.AO0402" ): #ํด์ธ์ ์ ์ฃผ๋ฌธ
self.procAO0400(sTrCode)
pass
elif (sTrCode == "g12003.AO0403" ): #ํด์ธ์ทจ์์ฃผ๋ฌธ
self.procAO0400(sTrCode)
pass
elif sTrCode == "g11004.AQ0101%": #ใ
์ข๋ชฉ๋ก ์์
self.Util_WriteLog(u"Recv Account List")
self.procAQ0101(sTrCode)
pass
elif sTrCode == "g11004.AQ0450%": #ํด์ธ๋ฏธ์ฒด๊ฒฐ์ฃผ๋ฌธ๋ด์ญ
self.procAQ0450(sTrCode)
pass
def OnGetBroadData(self, sTrCode, nRealType):
strRealType = unicode(str(nRealType), 'utf-8')
self.Util_WriteLog (u"์ค์๊ฐ ์์ [%s] [%s]" % (sTrCode, strRealType))
if ( strRealType == self.CONST_FO_SISE_51 ):
self.procReal51(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_SISE_65 ):
self.procReal65(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_SISE_58 ):
self.procReal58(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_SISE_71 ):
self.procReal71(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_ORDER_181 ):
self.procReal181(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_ORDER_182 ):
self.procReal182(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_ORDER_183 ):
self.procReal183(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_ORDER_184 ):
self.procReal184(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FO_ORDER_185 ):
self.procReal185(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FR_SISE_76 ):
self.procReal76(sTrCode, nRealType)
pass
elif ( strRealType == self.CONST_FR_SISE_82 ): # ํด์ธ ์์ธ ์ฒด๊ฒฐ
strCode = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ธฐ์ค์ฒด๊ฒฐ์๊ฐ")
strPric = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ๊ฐ")
self.Util_WriteLog("[" + strCode + "][" + strData + "][" + strPric + "]")
pass
elif (strRealType == self.CONST_FR_ORDER_196 ):
self.procReal196(sTrCode, nRealType)
pass
elif (strRealType == self.CONST_FR_ORDER_186 ):
self.procReal186(sTrCode, nRealType)
pass
elif (strRealType == self.CONST_FR_ORDER_187 ):
self.procReal187(sTrCode, nRealType)
pass
elif (strRealType == self.CONST_FR_ORDER_188 ):
self.procReal188(sTrCode, nRealType)
pass
elif (strRealType == self.CONST_FR_ORDER_189 ):
self.procReal189(sTrCode, nRealType)
pass
elif (strRealType == self.CONST_FX_SISE_171):
self.procReal171(sTrCode, nRealType)
pass
pass
def procReal171(self, sTrCode, nRealType):
strTime = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํธ๊ฐ์๊ฐ ")
strBid = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค์ํธ๊ฐ ")
strData = strTime + " " + strBid
self.Util_WriteLog(strData)
pass
def procReal189(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๋ฒํธ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ฒํธ ")
self.Util_WriteLog(u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ช
")
self.Util_WriteLog(u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ ")
self.Util_WriteLog(u"์ข
๋ชฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋งค๊ตฌ๋ถ ")
self.Util_WriteLog(u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๊ฐ๊ฒฉ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์๋ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ๊ฐ๊ฒฉ ")
self.Util_WriteLog(u"์ฒด๊ฒฐ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ์๋ ")
self.Util_WriteLog(u"์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"STOP๊ฐ๊ฒฉ ")
self.Util_WriteLog(u"STOP๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฝ์ ๊ธ์ก ")
self.Util_WriteLog(u"์ฝ์ ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์ ํ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์ ํ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์์๋ฃ ")
self.Util_WriteLog(u"์์๋ฃ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์ผ์ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์๊ฐ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ์ผ์ ")
self.Util_WriteLog(u"์ฒด๊ฒฐ์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ์๊ฐ ")
self.Util_WriteLog(u"์ฒด๊ฒฐ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ฑฐ๋์์ผ์ ")
self.Util_WriteLog(u"๊ฑฐ๋์์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ฑฐ๋์์๊ฐ ")
self.Util_WriteLog(u"๊ฑฐ๋์์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํตํ์ฝ๋ ")
self.Util_WriteLog(u"ํตํ์ฝ๋ : " + strData)
pass
def procReal188(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ฒํธ ")
self.Util_WriteLog(u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ช
")
self.Util_WriteLog(u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๊ฐ๋ฅ๊ธ์ก")
self.Util_WriteLog(u"์ฃผ๋ฌธ๊ฐ๋ฅ๊ธ์ก: " + strData)
pass
def procReal187(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ฒํธ ")
self.Util_WriteLog(u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ช
")
self.Util_WriteLog(u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ ")
self.Util_WriteLog(u"์ข
๋ชฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋งค๊ตฌ๋ถ ")
self.Util_WriteLog(u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํ๊ท ๋จ๊ฐ ")
self.Util_WriteLog(u"ํ๊ท ๋จ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋น์ผ๋ฏธ๊ฒฐ์ ์๋ ")
self.Util_WriteLog(u"๋น์ผ๋ฏธ๊ฒฐ์ ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋ฏธ๊ฒฐ์ ์ฝ์ ๊ธ์ก ")
self.Util_WriteLog(u"๋ฏธ๊ฒฐ์ ์ฝ์ ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ๊ท์ฒด๊ฒฐ์๋ ")
self.Util_WriteLog(u"์ ๊ท์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ๊ท์ฒด๊ฒฐ๊ธ์ก ")
self.Util_WriteLog(u"์ ๊ท์ฒด๊ฒฐ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ํ๋งค์ฒด๊ฒฐ์๋ ")
self.Util_WriteLog(u"์ ํ๋งค์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ํ๋งค์ฒด๊ฒฐ๊ธ์ก ")
self.Util_WriteLog(u"์ ํ๋งค์ฒด๊ฒฐ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ํ๋งคHold์๋ ")
self.Util_WriteLog(u"์ ํ๋งคHold์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์๋์ฒญ์ฐHold์๋ ")
self.Util_WriteLog(u"์๋์ฒญ์ฐHold์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ํ์ฆ๊ฑฐ๊ธ ")
self.Util_WriteLog(u"์ํ์ฆ๊ฑฐ๊ธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํตํ์ฝ๋ ")
self.Util_WriteLog(u"ํตํ์ฝ๋ : " + strData)
pass
def procReal186(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ฒํธ ")
self.Util_WriteLog(u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ช
")
self.Util_WriteLog(u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ธฐ์ค์ผ์ ")
self.Util_WriteLog(u"๊ธฐ์ค์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๋ฒํธ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ ")
self.Util_WriteLog(u"์ข
๋ชฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋งค๊ตฌ๋ถ ")
self.Util_WriteLog(u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๊ฐ๊ฒฉ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์๋ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ์๋ ")
self.Util_WriteLog(u"์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์๋ ")
self.Util_WriteLog(u"์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"STOP ๊ฐ๊ฒฉ ")
self.Util_WriteLog(u"STOP ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์๊ฐ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ฑฐ๋์์๊ฐ ")
self.Util_WriteLog(u"๊ฑฐ๋์์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๊ตฌ๋ถ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ๋ต๊ตฌ๋ถ ")
self.Util_WriteLog(u"์ ๋ต๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ต์ด์์ฃผ๋ฌธ๋ฒํธ ")
self.Util_WriteLog(u"์ต์ด์์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ทธ๋ฃน์ฃผ๋ฌธ๋ฒํธ ")
self.Util_WriteLog(u"๊ทธ๋ฃน์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค์์ฃผ๋ฌธ์๋ ")
self.Util_WriteLog(u"๋งค์์ฃผ๋ฌธ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋์ฃผ๋ฌธ์๋ ")
self.Util_WriteLog(u"๋งค๋์ฃผ๋ฌธ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์ ํ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์ ํ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ฐ๊ฒฉ์กฐ๊ฑด ")
self.Util_WriteLog(u"๊ฐ๊ฒฉ์กฐ๊ฑด : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ์กฐ๊ฑด ")
self.Util_WriteLog(u"์ฒด๊ฒฐ์กฐ๊ฑด : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"GTD์ผ์ ")
self.Util_WriteLog(u"GTD์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"IOC์ต์์ฒด๊ฒฐ์๋ ")
self.Util_WriteLog(u"IOC์ต์์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์์ฃผ๋ฌธ๋ฒํธ ")
self.Util_WriteLog(u"์์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์์ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์์ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํตํ์ฝ๋ ")
self.Util_WriteLog(u"ํตํ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๊ธ์ก ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์์์ผ์ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์์์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ต์
ํ์ฌ์์ฝ์ฌ๋ถ ")
self.Util_WriteLog(u"์ต์
ํ์ฌ์์ฝ์ฌ๋ถ : " + strData)
pass
def procReal196(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog(u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๊ฐ๊ฒฉ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์๋")
self.Util_WriteLog(u"์ฃผ๋ฌธ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋งค๊ตฌ๋ถ")
self.Util_WriteLog(u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์๊ฐ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๊ตฌ๋ถ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๊ตฌ๋ถ(1:์ ๊ท, 2:์ ์ , 3:์ทจ์) : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒ๋ฆฌ๊ตฌ๋ถ")
self.Util_WriteLog(u"์ฒ๋ฆฌ๊ตฌ๋ถ(0:์ ์, 1:๊ฑฐ๋ถ) : " + strData)
pass
def procReal181(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog(u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๊ฐ๊ฒฉ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์๋")
self.Util_WriteLog(u"์ฃผ๋ฌธ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋งค๊ตฌ๋ถ")
self.Util_WriteLog(u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ์์๊ฐ")
self.Util_WriteLog(u"์ ์์๊ฐ : " + strData)
pass
def procReal182(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"key")
self.Util_WriteLog(u"key : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ฒํธ")
self.Util_WriteLog(u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋๋ฌ๋ฒํธ")
self.Util_WriteLog(u"๋๋ฌ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ช
")
self.Util_WriteLog(u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋๋ฌ๋ช
")
self.Util_WriteLog(u"๋๋ฌ๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog(u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋งค๊ตฌ๋ถ")
self.Util_WriteLog(u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๊ฐ๊ฒฉ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์๋")
self.Util_WriteLog(u"์ฃผ๋ฌธ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ์ ์๋")
self.Util_WriteLog(u"์ ์ ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ทจ์์๋")
self.Util_WriteLog(u"์ทจ์์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ์๋")
self.Util_WriteLog(u"์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์๋")
self.Util_WriteLog(u"์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog(u"์์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ต์ด์์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog(u"์ต์ด์์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํต์ ์ฃผ๋ฌธ๊ตฌ๋ถ")
self.Util_WriteLog(u"ํต์ ์ฃผ๋ฌธ๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์ ๋ต๊ตฌ๋ถ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์ ๋ต๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ์กฐ์๊ตฌ๋ถ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์กฐ์๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ฑฐ๋์์ ์์๊ฐ")
self.Util_WriteLog(u"๊ฑฐ๋์์ ์์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์์
์ฌ์")
self.Util_WriteLog(u"์์
์ฌ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ทธ๋ฃน์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog(u"๊ทธ๋ฃน์ฃผ๋ฌธ๋ฒํธ : " + strData)
pass
def procReal183(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"key")
self.Util_WriteLog(u"key : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ฒํธ")
self.Util_WriteLog(u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋๋ฌ๋ฒํธ")
self.Util_WriteLog(u"๋๋ฌ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ช
")
self.Util_WriteLog(u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋๋ฌ๋ช
")
self.Util_WriteLog(u"๋๋ฌ๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ")
self.Util_WriteLog(u"์ข
๋ชฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋งค๊ตฌ๋ถ")
self.Util_WriteLog(u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ์ผ ๋ฏธ๊ฒฐ์ ์๋")
self.Util_WriteLog(u"์ ์ผ ๋ฏธ๊ฒฐ์ ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋น์ผ ๋ฏธ๊ฒฐ์ ์๋")
self.Util_WriteLog(u"๋น์ผ ๋ฏธ๊ฒฐ์ ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋ฏธ์ฒด๊ฒฐ์๋")
self.Util_WriteLog(u"๋ฏธ์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํ๊ท ๋จ๊ฐ")
self.Util_WriteLog(u"ํ๊ท ๋จ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋น์ผ๋ฏธ๊ฒฐ์ ์ฝ์ ๊ธ์ก")
self.Util_WriteLog(u"๋น์ผ๋ฏธ๊ฒฐ์ ์ฝ์ ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ๊ท์ฒด๊ฒฐ์๋")
self.Util_WriteLog(u"์ ๊ท์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ๊ท์ฒด๊ฒฐ๊ธ์ก")
self.Util_WriteLog(u"์ ๊ท์ฒด๊ฒฐ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ํ๋งค ์ฒด๊ฒฐ์๋")
self.Util_WriteLog(u"์ ํ๋งค ์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ํ๋งค ์ฒด๊ฒฐ๊ธ์ก")
self.Util_WriteLog(u"์ ํ๋งค ์ฒด๊ฒฐ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํ์ฌ ์ ์ฒญ์๋")
self.Util_WriteLog(u"ํ์ฌ ์ ์ฒญ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํ์ฌ ๋ฐฐ์ ์๋")
self.Util_WriteLog(u"ํ์ฌ ๋ฐฐ์ ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํ์ฌ ๊ฑฐ๋ถ์๋")
self.Util_WriteLog(u"ํ์ฌ ๊ฑฐ๋ถ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋ง๊ธฐ์๋")
self.Util_WriteLog(u"ํ์ฌ ๋ง๊ธฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฅ๋ถ๋จ๊ฐ")
self.Util_WriteLog(u"์ฅ๋ถ๋จ๊ฐ : " + strData)
pass
def procReal184(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ฒํธ")
self.Util_WriteLog(u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋๋ฌ๋ฒํธ")
self.Util_WriteLog(u"๋๋ฌ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ช
")
self.Util_WriteLog(u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋๋ฌ๋ช
")
self.Util_WriteLog(u"๋๋ฌ๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๊ฐ๋ฅ๊ธ์ก")
self.Util_WriteLog(u"์ฃผ๋ฌธ๊ฐ๋ฅ๊ธ์ก : " + strData)
pass
def procReal185(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"key")
self.Util_WriteLog(u"key : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ฒํธ")
self.Util_WriteLog(u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋๋ฌ๋ฒํธ")
self.Util_WriteLog(u"๋๋ฌ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๊ณ์ข๋ช
")
self.Util_WriteLog(u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋๋ฌ๋ช
")
self.Util_WriteLog(u"๋๋ฌ๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog(u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋งค๊ตฌ๋ถ")
self.Util_WriteLog(u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ๊ฐ๊ฒฉ")
self.Util_WriteLog(u"์ฒด๊ฒฐ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ์๋")
self.Util_WriteLog(u"์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ์๊ฐ")
self.Util_WriteLog(u"์ฒด๊ฒฐ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ๋ต๊ตฌ๋ถ")
self.Util_WriteLog(u"์ ๋ต๊ตฌ๋ถ : " + strData)
pass
def procReal76(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog(u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํธ๊ฐ์๊ฐ")
self.Util_WriteLog(u"ํธ๊ฐ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋ํธ๊ฐ1")
self.Util_WriteLog(u"๋งค๋ํธ๊ฐ1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค์ํธ๊ฐ1")
self.Util_WriteLog(u"๋งค์ํธ๊ฐ1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋ํธ๊ฐ์๋1")
self.Util_WriteLog(u"๋งค๋ํธ๊ฐ์๋1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค์ํธ๊ฐ์๋1")
self.Util_WriteLog(u"๋งค์ํธ๊ฐ์๋1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋ํธ๊ฐ๊ฑด์1")
self.Util_WriteLog(u"๋งค๋ํธ๊ฐ๊ฑด์1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค์ํธ๊ฐ๊ฑด์1")
self.Util_WriteLog(u"๋งค์ํธ๊ฐ๊ฑด์1 : " + strData)
pass
def procReal71(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog(u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ์๊ฐ")
self.Util_WriteLog(u"์ฒด๊ฒฐ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"change")
self.Util_WriteLog(u"์ ์ผ๋๋น : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํ์ฌ๊ฐ")
self.Util_WriteLog(u"ํ์ฌ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"cvolume")
self.Util_WriteLog(u"์ฒด๊ฒฐ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋์ ๊ฑฐ๋๋")
self.Util_WriteLog(u"๋์ ๊ฑฐ๋๋ : " + strData)
pass
def procReal58(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"kfutcode")
self.Util_WriteLog(u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"hotime")
self.Util_WriteLog(u"ํธ๊ฐ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"offerho1")
self.Util_WriteLog(u"๋งค๋ํธ๊ฐ1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"bidho1")
self.Util_WriteLog(u"๋งค์ํธ๊ฐ1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"offerrem1")
self.Util_WriteLog(u"๋งค๋ํธ๊ฐ์๋1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"bidrem1")
self.Util_WriteLog(u"๋งค์ํธ๊ฐ์๋1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"offercnt1")
self.Util_WriteLog(u"๋งค๋ํธ๊ฐ๊ฑด์1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"bidcnt1")
self.Util_WriteLog(u"๋งค์ํธ๊ฐ๊ฑด์1 : " + strData)
pass
def procReal51(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog(u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํธ๊ฐ์๊ฐ")
self.Util_WriteLog(u"ํธ๊ฐ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋ํธ๊ฐ1")
self.Util_WriteLog(u"๋งค๋ํธ๊ฐ1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค์ํธ๊ฐ1")
self.Util_WriteLog(u"๋งค์ํธ๊ฐ1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋ํธ๊ฐ์๋1")
self.Util_WriteLog(u"๋งค๋ํธ๊ฐ์๋1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค์ํธ๊ฐ์๋1")
self.Util_WriteLog(u"๋งค์ํธ๊ฐ์๋1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค๋ํธ๊ฐ๊ฑด์1")
self.Util_WriteLog(u"๋งค๋ํธ๊ฐ๊ฑด์1 : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋งค์ํธ๊ฐ๊ฑด์1")
self.Util_WriteLog(u"๋งค์ํธ๊ฐ๊ฑด์1 : " + strData)
pass
def procReal65(self, sTrCode, nRealType):
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog(u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ์๊ฐ")
self.Util_WriteLog(u"์ฒด๊ฒฐ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ ์ผ๋๋น")
self.Util_WriteLog(u"์ ์ผ๋๋น : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"ํ์ฌ๊ฐ")
self.Util_WriteLog(u"ํ์ฌ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"์ฒด๊ฒฐ๋")
self.Util_WriteLog(u"์ฒด๊ฒฐ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, nRealType, u"OutRec1", 0, u"๋์ ๊ฑฐ๋๋")
self.Util_WriteLog(u"๋์ ๊ฑฐ๋๋ : " + strData)
pass
def procDO1000(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ์๊ตฌ๋ถ")
self.Util_WriteLog( u"์ ์๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฒ๋ฆฌ๊ตฌ๋ถ")
self.Util_WriteLog( u"์ฒ๋ฆฌ๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๊ณ์ข๋ฒํธ")
self.Util_WriteLog( u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog( u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog( u"์ข
๋ชฉ์ฝ๋ : " + strData)
pass
def procDO2000(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ์๊ตฌ๋ถ")
self.Util_WriteLog( u"์ ์๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฒ๋ฆฌ๊ตฌ๋ถ")
self.Util_WriteLog( u"์ฒ๋ฆฌ๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๊ณ์ข๋ฒํธ")
self.Util_WriteLog( u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog( u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog( u"์ข
๋ชฉ์ฝ๋ : " + strData)
pass
def procAO0400(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ์๊ตฌ๋ถ")
self.Util_WriteLog( u"์ ์๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog( u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฒ๋ฆฌ์ฝ๋")
self.Util_WriteLog( u"์ฒ๋ฆฌ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฒ๋ฆฌ๋ฉ์์ง")
self.Util_WriteLog( u"์ฒ๋ฆฌ๋ฉ์์ง : " + strData)
pass
def procAQ0401(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"AQ0401 ๋ฐ๋ณตํ์ : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ๋ฒํธ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ฒํธ ")
self.Util_WriteLog( u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ช
")
self.Util_WriteLog( u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ข
๋ชฉ์ฝ๋ ")
self.Util_WriteLog( u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋งค๋งค๊ตฌ๋ถ ")
self.Util_WriteLog( u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ๊ฐ๊ฒฉ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์๋ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒด๊ฒฐ์๋ ")
self.Util_WriteLog( u"์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์๋ ")
self.Util_WriteLog( u"์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์ ํ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์ ํ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"STOP ๊ฐ๊ฒฉ ")
self.Util_WriteLog( u"STOP ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์๊ฐ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํต์ ๊ตฌ๋ถ ")
self.Util_WriteLog( u"ํต์ ๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ ๋ต๊ตฌ๋ถ ")
self.Util_WriteLog( u"์ ๋ต๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ต์ด์์ฃผ๋ฌธ๋ฒํธ ")
self.Util_WriteLog( u"์ต์ด์์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์ผ์ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ทธ๋ฃน ์ฃผ๋ฌธ ๋ฒํธ ")
self.Util_WriteLog( u"๊ทธ๋ฃน ์ฃผ๋ฌธ ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ฐ๊ฒฉ์กฐ๊ฑด ")
self.Util_WriteLog( u"๊ฐ๊ฒฉ์กฐ๊ฑด : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒด๊ฒฐ์กฐ๊ฑด ")
self.Util_WriteLog( u"์ฒด๊ฒฐ์กฐ๊ฑด : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์ข
๋ฃ์ผ์ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์ข
๋ฃ์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"IOC ์ต์์ฒด๊ฒฐ์๋ ")
self.Util_WriteLog( u"IOC ์ต์์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํตํ์ฝ๋ ")
self.Util_WriteLog( u"ํตํ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ๊ธ์ก ")
self.Util_WriteLog( u"์ฃผ๋ฌธ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์์์ผ์ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์์์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ต์
ํ์ฌ์์ฝ์ฌ๋ถ ")
self.Util_WriteLog( u"์ต์
ํ์ฌ์์ฝ์ฌ๋ถ : " + strData)
pass
def procAQ0402(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"AQ0402 ๋ฐ๋ณตํ์ : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ๋ฒํธ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ฒํธ ")
self.Util_WriteLog( u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ช
")
self.Util_WriteLog( u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ข
๋ชฉ ")
self.Util_WriteLog( u"์ข
๋ชฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋งค๋งค๊ตฌ๋ถ ")
self.Util_WriteLog( u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ๊ฐ๊ฒฉ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์๋ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒด๊ฒฐ๊ฐ๊ฒฉ ")
self.Util_WriteLog( u"์ฒด๊ฒฐ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒด๊ฒฐ์๋ ")
self.Util_WriteLog( u"์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"STOP๊ฐ๊ฒฉ ")
self.Util_WriteLog( u"STOP๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฝ์ ๊ธ์ก ")
self.Util_WriteLog( u"์ฝ์ ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์ ํ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์ ํ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์์๋ฃ ")
self.Util_WriteLog( u"์์๋ฃ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์ผ์ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์๊ฐ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒด๊ฒฐ์ผ์ ")
self.Util_WriteLog( u"์ฒด๊ฒฐ์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒด๊ฒฐ์๊ฐ ")
self.Util_WriteLog( u"์ฒด๊ฒฐ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ฑฐ๋์์ผ์ ")
self.Util_WriteLog( u"๊ฑฐ๋์์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ฑฐ๋์์๊ฐ ")
self.Util_WriteLog( u"๊ฑฐ๋์์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํตํ์ฝ๋ ")
self.Util_WriteLog( u"ํตํ์ฝ๋ : " + strData)
pass
def procAQ0403(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
print(nRptCnt)
self.Util_WriteLog( u"AQ0403 ๋ฐ๋ณตํ์ : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ฒํธ ")
self.Util_WriteLog( u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ช
")
self.Util_WriteLog( u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ข
๋ชฉ ")
self.Util_WriteLog( u"์ข
๋ชฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋งค๋งค๊ตฌ๋ถ ")
self.Util_WriteLog( u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋น์ผ์ ๋ฏธ๊ฒฐ์ ์๋ ")
self.Util_WriteLog( u"๋น์ผ์ ๋ฏธ๊ฒฐ์ ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํ๊ท ๋จ๊ฐ(์์์ ๋ฐ์) ")
self.Util_WriteLog( u"ํ๊ท ๋จ๊ฐ(์์์ ๋ฐ์) : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํ๊ท ๋จ๊ฐ ")
self.Util_WriteLog( u"ํ๊ท ๋จ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํ์ฌ๊ฐ ")
self.Util_WriteLog( u"ํ์ฌ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํ๊ฐ์์ต ")
self.Util_WriteLog( u"ํ๊ฐ์์ต : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ํ์ฆ๊ฑฐ๊ธ ")
self.Util_WriteLog( u"์ํ์ฆ๊ฑฐ๊ธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ ๊ท์ ํ์ผ ")
self.Util_WriteLog( u"์ ๊ท์ ํ์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ต์ข
๊ฑฐ๋์ผ ")
self.Util_WriteLog( u"์ต์ข
๊ฑฐ๋์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํตํ์ฝ๋ ")
self.Util_WriteLog( u"ํตํ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ต์
์์ฅ๊ฐ์น ")
self.Util_WriteLog( u"์ต์
์์ฅ๊ฐ์น : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ต์
ํ์ฌ์์ฝ์ฌ๋ถ ")
self.Util_WriteLog( u"์ต์
ํ์ฌ์์ฝ์ฌ๋ถ : " + strData)
pass
def procAQ0607(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"AQ0607 ๋ฐ๋ณตํ์ : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ฒํธ ")
self.Util_WriteLog( u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ช
")
self.Util_WriteLog( u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํตํ์ฝ๋ ")
self.Util_WriteLog( u"ํตํ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ํ๊ธ์ด์ก ")
self.Util_WriteLog( u"์ํ๊ธ์ด์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ํ๊ธ์์ก ")
self.Util_WriteLog( u"์ํ๊ธ์์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํ๊ฐ๊ธ์ก ")
self.Util_WriteLog( u"ํ๊ฐ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋ฏธ์๊ธ์ก ")
self.Util_WriteLog( u"๋ฏธ์๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ฒฐ์ ๊ธ์ก๋ถ์กฑ ")
self.Util_WriteLog( u"๊ฒฐ์ ๊ธ์ก๋ถ์กฑ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋ฏธ๊ฒฐ์ ์ฝ์ ์ฆ๊ฑฐ๊ธ ")
self.Util_WriteLog( u"๋ฏธ๊ฒฐ์ ์ฝ์ ์ฆ๊ฑฐ๊ธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ธ์ถ๊ฐ๋ฅ๊ธ ")
self.Util_WriteLog( u"์ธ์ถ๊ฐ๋ฅ๊ธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ๊ฐ๋ฅ๊ธ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ๊ฐ๋ฅ๊ธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ ์ง์ฆ๊ฑฐ๊ธ ")
self.Util_WriteLog( u"์ ์ง์ฆ๊ฑฐ๊ธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์ฆ๊ฑฐ๊ธ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์ฆ๊ฑฐ๊ธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ํ์ฆ๊ฑฐ๊ธ ")
self.Util_WriteLog( u"์ํ์ฆ๊ฑฐ๊ธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ฑฐ๋์์๋ฃ ")
self.Util_WriteLog( u"๊ฑฐ๋์์๋ฃ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒญ์ฐ์์ต ")
self.Util_WriteLog( u"์ฒญ์ฐ์์ต : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํ๊ฐ์์ต ")
self.Util_WriteLog( u"ํ๊ฐ์์ต : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋ฏธ๋ฐ์ ")
self.Util_WriteLog( u"๋ฏธ๋ฐ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ถ๊ฐ์ฆ๊ฑฐ๊ธ ")
self.Util_WriteLog( u"์ถ๊ฐ์ฆ๊ฑฐ๊ธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ด๊ณ์ ์์ฐ๊ฐ์น ")
self.Util_WriteLog( u"์ด๊ณ์ ์์ฐ๊ฐ์น : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ธํ๊ณ ์ํ์จ ")
self.Util_WriteLog( u"์ธํ๊ณ ์ํ์จ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ต์
๋งค๋งค๋๊ธ ")
self.Util_WriteLog( u"์ต์
๋งค๋งค๋๊ธ : " + strData)
pass
def proco51000(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋ ")
self.Util_WriteLog( u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"ํ๊ธ์ข
๋ชฉ๋ช
")
self.Util_WriteLog( u"ํ๊ธ์ข
๋ชฉ๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"ํ์ฌ๊ฐ ")
self.Util_WriteLog( u"ํ์ฌ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ์ผ๋๋น๊ตฌ๋ถ ")
self.Util_WriteLog( u"์ ์ผ๋๋น๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ์ผ๋๋น ")
self.Util_WriteLog( u"์ ์ผ๋๋น : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ์ผ๋๋น๋ฑ๋ฝ์จ ")
self.Util_WriteLog( u"์ ์ผ๋๋น๋ฑ๋ฝ์จ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ง์ ๋๋น๊ตฌ๋ถ ")
self.Util_WriteLog( u"์ง์ ๋๋น๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์๊ฐ ")
self.Util_WriteLog( u"์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๊ณ ๊ฐ ")
self.Util_WriteLog( u"๊ณ ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ๊ฐ ")
self.Util_WriteLog( u"์ ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"LT๊ณ ๊ฐ ")
self.Util_WriteLog( u"LT๊ณ ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"LT๊ณ ๊ฐ์ผ ")
self.Util_WriteLog( u"LT๊ณ ๊ฐ์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"LT์ ๊ฐ ")
self.Util_WriteLog( u"LT์ ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"LT์ ๊ฐ์ผ ")
self.Util_WriteLog( u"LT์ ๊ฐ์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์์
์ผ ")
self.Util_WriteLog( u"์์
์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์์น๊ฑฐ๋๋ ")
self.Util_WriteLog( u"์์น๊ฑฐ๋๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"ํ๋ฝ๊ฑฐ๋๋ ")
self.Util_WriteLog( u"ํ๋ฝ๊ฑฐ๋๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋์ ๊ฑฐ๋๋ ")
self.Util_WriteLog( u"๋์ ๊ฑฐ๋๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋ง๊ธฐ์ผ ")
self.Util_WriteLog( u"๋ง๊ธฐ์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ์ฐ๊ฐ ")
self.Util_WriteLog( u"์ ์ฐ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ์ฐ์ผ ")
self.Util_WriteLog( u"์ ์ฐ์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์์กด์ผ์ ")
self.Util_WriteLog( u"์์กด์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋งค๋ํธ๊ฐ ")
self.Util_WriteLog( u"๋งค๋ํธ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋งค์ํธ๊ฐ ")
self.Util_WriteLog( u"๋งค์ํธ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๊ธฐ์ค๊ฐ ")
self.Util_WriteLog( u"๊ธฐ์ค๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ์ผ๊ฑฐ๋๋ ")
self.Util_WriteLog( u"์ ์ผ๊ฑฐ๋๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ์ผ๊ฑฐ๋๋๋๋น์จ ")
self.Util_WriteLog( u"์ ์ผ๊ฑฐ๋๋๋๋น์จ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฒด๊ฒฐ์๊ฐ ")
self.Util_WriteLog( u"์ฒด๊ฒฐ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๊ตญ๋ด์๊ฐ ")
self.Util_WriteLog( u"๊ตญ๋ด์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๊ฑฐ๋์๊ตฌ๋ถ ")
self.Util_WriteLog( u"๊ฑฐ๋์๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"ETH์์ ")
self.Util_WriteLog( u"ETH์์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"ETH์ข
๋ฃ ")
self.Util_WriteLog( u"ETH์ข
๋ฃ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์๋ฌธ์ข
๋ชฉ๋ช
")
self.Util_WriteLog( u"์๋ฌธ์ข
๋ชฉ๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ๊ท๊ฑฐ๋์ ํ์ผ ")
self.Util_WriteLog( u"์ ๊ท๊ฑฐ๋์ ํ์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ต์ข
๊ฑฐ๋์ผ ")
self.Util_WriteLog( u"์ต์ข
๊ฑฐ๋์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"ํธ๊ฐ๋ฐฉ์ ")
self.Util_WriteLog( u"ํธ๊ฐ๋ฐฉ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฃผ๋ฌธ์ ํํญ ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์ ํํญ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ค๋ฌผ์ธ์๋์์์ผ์")
self.Util_WriteLog( u"์ค๋ฌผ์ธ์๋์์์ผ์: " + strData)
pass
def proco51010(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog( u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"ํธ๊ฐ์๊ฐ")
self.Util_WriteLog( u"ํธ๊ฐ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋งค๋ํธ๊ฐ1")
self.Util_WriteLog( u"๋งค๋ํธ๊ฐ1 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋งค์ํธ๊ฐ1")
self.Util_WriteLog( u"๋งค์ํธ๊ฐ1 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋งค๋ํธ๊ฐ์๋1")
self.Util_WriteLog( u"๋งค๋ํธ๊ฐ์๋1 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋งค์ํธ๊ฐ์๋1")
self.Util_WriteLog( u"๋งค์ํธ๊ฐ์๋1 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋งค๋ํธ๊ฐ๊ฑด์1")
self.Util_WriteLog( u"๋งค๋ํธ๊ฐ๊ฑด์1 : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋งค์ํธ๊ฐ๊ฑด์1")
self.Util_WriteLog( u"๋งค์ํธ๊ฐ๊ฑด์1 : " + strData)
pass
def proco51210(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog( u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ต์ข
๊ฑฐ๋์ผ")
self.Util_WriteLog( u"์ต์ข
๊ฑฐ๋์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๊ฑฐ๋์")
self.Util_WriteLog( u"๊ฑฐ๋์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์์ฅ์ผ")
self.Util_WriteLog( u"์์ฅ์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋ง๊ธฐ์ผ")
self.Util_WriteLog( u"๋ง๊ธฐ์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ญ์ ์ผ")
self.Util_WriteLog( u"์ญ์ ์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ์ฐ๊ฐ")
self.Util_WriteLog( u"์ ์ฐ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ ์ฐ์ผ")
self.Util_WriteLog( u"์ ์ฐ์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๊ฐ๊ฒฉํ์")
self.Util_WriteLog( u"๊ฐ๊ฒฉํ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฅ์์์๊ฐ(CME)")
self.Util_WriteLog( u"์ฅ์์์๊ฐ(CME) : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฅ์ข
๋ฃ์๊ฐ(CME)")
self.Util_WriteLog( u"์ฅ์ข
๋ฃ์๊ฐ(CME) : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฅ์์์๊ฐ(ํ๊ตญ)")
self.Util_WriteLog( u"์ฅ์์์๊ฐ(ํ๊ตญ) : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฅ์ข
๋ฃ์๊ฐ(ํ๊ตญ)")
self.Util_WriteLog( u"์ฅ์ข
๋ฃ์๊ฐ(ํ๊ตญ) : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๊ฑฐ๋ํตํ")
self.Util_WriteLog( u"๊ฑฐ๋ํตํ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ํ๊ตฌ๋ถ")
self.Util_WriteLog( u"์ํ๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋งค๋งค์ฌ๋ถ")
self.Util_WriteLog( u"๋งค๋งค์ฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์์กด์ผ์")
self.Util_WriteLog( u"์์กด์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"ticksize")
self.Util_WriteLog( u"ticksize : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"tickvalue")
self.Util_WriteLog( u"tickvalue : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ต์ข
๊ฒฐ์ ์ผ")
self.Util_WriteLog( u"์ต์ข
๊ฒฐ์ ์ผ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ฆ๊ฑฐ๊ธ")
self.Util_WriteLog( u"์ฆ๊ฑฐ๊ธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๊ณ์ฝ๋จ์")
self.Util_WriteLog( u"๊ณ์ฝ๋จ์ : " + strData)
pass
def procs20001(self, sTrCode):
i = 0
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋จ์ถ์ฝ๋")
self.Util_WriteLog(strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํธ๊ฐ์์ ์๊ฐ")
self.Util_WriteLog(strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํ์ฌ๊ฐ")
self.Util_WriteLog(strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋์ ๊ฑฐ๋๋")
self.Util_WriteLog(strData)
pass
def procAQ0101(self, sTrCode):
strData = self.HD_CommGetAccInfo()
nAcctCnt = int(strData[0:5])
nLenAcctNo = 11
nLenAcctNm = 30
nLenAcctGb = 1
strAcctInfo = str(strData[5:]).encode('cp949')
nLenAccInfo = nLenAcctNo + nLenAcctNm + nLenAcctGb
#strSelAccGb = "1" #1:ํด์ธ, 2:FX, 9:๊ตญ๋ด
#if ( self.BtnRadio1.isChecked() == True ):
# strSelAccGb = "9" #๊ตญ๋ด
#elif ( self.BtnRadio2.isChecked() == True ):
# strSelAccGb = "1" #ํด์ธ
#elif ( self.BtnRadio3.isChecked() == True ):
# strSelAccGb = "2" #FX
for i in range(0, nAcctCnt):
nStPos = (i*nLenAccInfo)
strAcctNo = strAcctInfo[nStPos :nStPos+nLenAcctNo]
strAcctNm = strAcctInfo[nStPos+(nLenAcctNo) :nStPos+nLenAcctNo+nLenAcctNm]
strAcctGb = strAcctInfo[nStPos+(nLenAcctNo+nLenAcctNm):nStPos+nLenAcctNo+nLenAcctNm+nLenAcctGb]
strAcctNm = unicode(strAcctNm, 'cp949')
strAcctNo = unicode(strAcctNo.strip(), 'utf-8')
strAcctNm = str(strAcctNm.strip())
tmpDic = {strAcctNo:strAcctNm}
if ( strAcctGb == "9" ):
self.m_AccListFO.update(tmpDic)
elif(strAcctGb == "1" ):
self.m_AccListFR.update(tmpDic)
elif(strAcctGb == "2" ):
self.m_AccListFX.update(tmpDic)
self.Util_WriteLog(strAcctNo + "," + strAcctNm + "," + strAcctGb)
self.OnRadioAcctTpChange()
pass
def procDQ0242(self, sTrCode):
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๊ณ์ข๋ฒํธ")
self.Util_WriteLog( u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ํ๊ธ์ก-์ด์ก")
self.Util_WriteLog( u"์ํ๊ธ์ก-์ด์ก :" + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ํ๊ธ์ก-ํ๊ธ")
self.Util_WriteLog( u"์ํ๊ธ์ก-ํ๊ธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ํ๊ธ์ก-๋์ฉ")
self.Util_WriteLog( u"์ํ๊ธ์ก-๋์ฉ :" + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ํ์ธํ")
self.Util_WriteLog( u"์ํ์ธํ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋น์ผ์์ต")
self.Util_WriteLog( u"๋น์ผ์์ต : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์ํ์์๋ฃ")
self.Util_WriteLog( u"์ํ์์๋ฃ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"๋น์ผ์์์ต")
self.Util_WriteLog( u"๋น์ผ์์์ต : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"ํ๊ฐ์ํ์ด์ก")
self.Util_WriteLog( u"ํ๊ฐ์ํ์ด์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", 0, u"์์์ฐ-์ดํ๊ฐ์ก")
self.Util_WriteLog( u"์์์ฐ-์ดํ๊ฐ์ก : " + strData)
pass
def procAQ0450(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"AQ0450 ์กฐํ๊ฐ์ : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ฒํธ")
self.Util_WriteLog(u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ช
")
self.Util_WriteLog(u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog(u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋งค๋งค๊ตฌ๋ถ")
self.Util_WriteLog(u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ๊ฐ๊ฒฉ")
self.Util_WriteLog(u"์ฃผ๋ฌธ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์๋")
self.Util_WriteLog(u"์ฃผ๋ฌธ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒด๊ฒฐ์๋")
self.Util_WriteLog(u"์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์๋")
self.Util_WriteLog(u"์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์ ํ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์ ํ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"STOP ๊ฐ๊ฒฉ")
self.Util_WriteLog(u"STOP ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์๊ฐ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์ ")
self.Util_WriteLog(u"์ฃผ๋ฌธ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํต์ ๊ตฌ๋ถ")
self.Util_WriteLog(u"ํต์ ๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ ๋ต๊ตฌ๋ถ")
self.Util_WriteLog(u"์ ๋ต๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ต์ด์์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog(u"์ต์ด์์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์ผ์")
self.Util_WriteLog(u"์ฃผ๋ฌธ์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ทธ๋ฃน์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog(u"๊ทธ๋ฃน์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ฐ๊ฒฉ์กฐ๊ฑด")
self.Util_WriteLog(u"๊ฐ๊ฒฉ์กฐ๊ฑด : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒด๊ฒฐ์กฐ๊ฑด")
self.Util_WriteLog(u"์ฒด๊ฒฐ์กฐ๊ฑด : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"GTD์ผ์")
self.Util_WriteLog(u"GTD์ผ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"IOC์ต์์ฒด๊ฒฐ์๋")
self.Util_WriteLog(u"IOC์ต์์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog(u"์์ฃผ๋ฌธ๋ฒํธ : " + strData)
pass
def procDQ0104(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"DQ0104 ์กฐํ๊ฐ์ : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ฒํธ")
self.Util_WriteLog( u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ช
")
self.Util_WriteLog( u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog( u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog( u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋งค๋งค๊ตฌ๋ถ")
self.Util_WriteLog( u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ๊ฐ๊ฒฉ")
self.Util_WriteLog( u"์ฃผ๋ฌธ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์๋")
self.Util_WriteLog( u"์ฃผ๋ฌธ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ ์ ์๋")
self.Util_WriteLog( u"์ ์ ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ทจ์์๋")
self.Util_WriteLog( u"์ทจ์์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒด๊ฒฐ์๋")
self.Util_WriteLog( u"์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์๋")
self.Util_WriteLog( u"์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog( u"์์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ต์ด์์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog( u"์ต์ด์์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํต์ ์ฃผ๋ฌธ๊ตฌ๋ถ")
self.Util_WriteLog( u"ํต์ ์ฃผ๋ฌธ๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์ ๋ต๊ตฌ๋ถ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์ ๋ต๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ์กฐ์๊ตฌ๋ถ")
self.Util_WriteLog( u"์ฃผ๋ฌธ์กฐ์๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ฑฐ๋์์ ์์๊ฐ")
self.Util_WriteLog( u"๊ฑฐ๋์์ ์์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์์
์ฌ์")
self.Util_WriteLog( u"์์
์ฌ์ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ทธ๋ฃน์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog( u"๊ทธ๋ฃน์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋๋ฌ๋ฒํธ")
self.Util_WriteLog( u"๋๋ฌ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋๋ฌ๋ช
")
self.Util_WriteLog( u"๋๋ฌ๋ช
: " + strData)
pass
def procDQ0107(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"DQ0107 ์กฐํ๊ฐ์ : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ฒํธ")
self.Util_WriteLog( u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ช
")
self.Util_WriteLog( u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฃผ๋ฌธ๋ฒํธ")
self.Util_WriteLog( u"์ฃผ๋ฌธ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ข
๋ชฉ์ฝ๋")
self.Util_WriteLog( u"์ข
๋ชฉ์ฝ๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋งค๋งค๊ตฌ๋ถ")
self.Util_WriteLog( u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒด๊ฒฐ๊ฐ๊ฒฉ")
self.Util_WriteLog( u"์ฒด๊ฒฐ๊ฐ๊ฒฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒด๊ฒฐ์๋")
self.Util_WriteLog( u"์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฒด๊ฒฐ์๊ฐ")
self.Util_WriteLog( u"์ฒด๊ฒฐ์๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ ๋ต๊ตฌ๋ถ")
self.Util_WriteLog( u"์ ๋ต๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํต์ ๊ตฌ๋ถ")
self.Util_WriteLog( u"ํต์ ๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋๋ฌ๋ฒํธ")
self.Util_WriteLog( u"๋๋ฌ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋๋ฌ์ข๋ช
")
self.Util_WriteLog( u"๋๋ฌ์ข๋ช
: " + strData)
pass
def procDQ0110(self, sTrCode):
nRptCnt = self.HD_CommGetRepeatCnt(sTrCode, -1, u"OutRec1")
self.Util_WriteLog( u"DQ0110 ์กฐํ๊ฐ์ : " + str(nRptCnt) )
for i in nRptCnt:
self.Util_WriteLog( u"===============================================" )
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ฒํธ")
self.Util_WriteLog( u"๊ณ์ข๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๊ณ์ข๋ช
")
self.Util_WriteLog( u"๊ณ์ข๋ช
: " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ข
๋ชฉ")
self.Util_WriteLog( u"์ข
๋ชฉ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋งค๋งค๊ตฌ๋ถ")
self.Util_WriteLog( u"๋งค๋งค๊ตฌ๋ถ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ ์ผ๋ฏธ๊ฒฐ์ ์๋")
self.Util_WriteLog( u"์ ์ผ๋ฏธ๊ฒฐ์ ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋น์ผ๋ฏธ๊ฒฐ์ ์๋")
self.Util_WriteLog( u"๋น์ผ๋ฏธ๊ฒฐ์ ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋ฏธ์ฒด๊ฒฐ ์๋")
self.Util_WriteLog( u"๋ฏธ์ฒด๊ฒฐ ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํ๊ท ๋จ๊ฐ")
self.Util_WriteLog( u"ํ๊ท ๋จ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"์ฅ๋ถ๋จ๊ฐ")
self.Util_WriteLog( u"์ฅ๋ถ๋จ๊ฐ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋น์ผ๋ฏธ๊ฒฐ์ ์ฝ์ ๊ธ์ก")
self.Util_WriteLog( u"๋น์ผ๋ฏธ๊ฒฐ์ ์ฝ์ ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋น์ผ์ฒด๊ฒฐ์๋")
self.Util_WriteLog( u"๋น์ผ์ฒด๊ฒฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋น์ผ์ฒด๊ฒฐ๊ธ์ก")
self.Util_WriteLog( u"๋น์ผ์ฒด๊ฒฐ๊ธ์ก : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํ์ฌ์ ์ฒญ์๋")
self.Util_WriteLog( u"ํ์ฌ์ ์ฒญ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํ์ฌ๋ฐฐ์ ์๋")
self.Util_WriteLog( u"ํ์ฌ๋ฐฐ์ ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"ํ์ฌ๊ฑฐ๋ถ์๋")
self.Util_WriteLog( u"ํ์ฌ๊ฑฐ๋ถ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋ง๊ธฐ์๋")
self.Util_WriteLog( u"๋ง๊ธฐ์๋ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋๋ฌ๋ฒํธ")
self.Util_WriteLog( u"๋๋ฌ๋ฒํธ : " + strData)
strData = self.HD_CommGetData(sTrCode, -1, u"OutRec1", i, u"๋๋ฌ๋ช
")
self.Util_WriteLog( u"๋๋ฌ๋ช
: " + strData)
pass
def OnGetMsgWithRqId(self, nRqID, strErrCode, strErrMsg):
strRecv = "์กฐํ์๋ต Tr = [%s] UniqueID = [%d]" %(strErrCode, nRqID)
strRecvMsg = "๋ฉ์์ง์์ UniqueID = [%s] ErrorCode=[%s] ErrorMessage=[%s]" % (str(nRqID), unicode(strErrCode), unicode(strErrMsg))
self.Util_WriteLog( strRecvMsg )
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
MainWindow = QtGui.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
ui.SetupHDFOcx(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
[
"jay.bwkim@gmail.com"
] |
jay.bwkim@gmail.com
|
b19eb4c4a4c8e2182848c6efdc592bdd73bafd78
|
7f004635957d252b7e8d7d6e0aee47e24aca0b82
|
/catkin_ws/src/sound_play/src/sound_play/libsoundplay.py
|
12c62a885fc10f5d991711f131baaf324d3e99cd
|
[] |
no_license
|
Pontusun/CIS700_Team2
|
75bef2305f5ccb7992f152b9fca20ea475077833
|
17b125b652d3be41884d6f625b7424b00ca0a6dd
|
refs/heads/master
| 2021-01-10T16:02:19.327233
| 2016-02-11T22:20:46
| 2016-02-11T22:20:46
| 44,488,662
| 1
| 3
| null | 2015-11-29T19:54:32
| 2015-10-18T17:35:24
|
CMake
|
UTF-8
|
Python
| false
| false
| 13,215
|
py
|
#!/usr/bin/env python
#***********************************************************
#* Software License Agreement (BSD License)
#*
#* Copyright (c) 2009, Willow Garage, Inc.
#* All rights reserved.
#*
#* Redistribution and use in source and binary forms, with or without
#* modification, are permitted provided that the following conditions
#* are met:
#*
#* * Redistributions of source code must retain the above copyright
#* notice, this list of conditions and the following disclaimer.
#* * Redistributions in binary form must reproduce the above
#* copyright notice, this list of conditions and the following
#* disclaimer in the documentation and/or other materials provided
#* with the distribution.
#* * Neither the name of the Willow Garage nor the names of its
#* contributors may be used to endorse or promote products derived
#* from this software without specific prior written permission.
#*
#* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
#* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
#* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
#* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
#* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
#* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
#* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
#* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
#* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
#* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
#* POSSIBILITY OF SUCH DAMAGE.
#***********************************************************
# Author: Blaise Gassend
import rospy
import roslib
import actionlib
import os, sys
from sound_play.msg import SoundRequest
from sound_play.msg import SoundRequestGoal
from sound_play.msg import SoundRequestAction
## \brief Class that publishes messages to the sound_play node.
##
## This class is a helper class for communicating with the sound_play node
## via the \ref sound_play.SoundRequest message. It has two ways of being used:
##
## - It can create Sound classes that represent a particular sound which
## can be played, repeated or stopped.
##
## - It provides methods for each way in which the sound_play.SoundRequest
## message can be invoked.
class Sound(object):
def __init__(self, client, snd, arg, volume=1.0):
self.client = client
self.snd = snd
self.arg = arg
self.vol = volume
## \brief Play the Sound.
##
## This method causes the Sound to be played once.
def play(self, **kwargs):
self.client.sendMsg(self.snd, SoundRequest.PLAY_ONCE, self.arg,
vol=self.vol, **kwargs)
## \brief Play the Sound repeatedly.
##
## This method causes the Sound to be played repeatedly until stop() is
## called.
def repeat(self, **kwargs):
self.client.sendMsg(self.snd, SoundRequest.PLAY_START, self.arg,
vol=self.vol, **kwargs)
## \brief Stop Sound playback.
##
## This method causes the Sound to stop playing.
def stop(self):
self.client.sendMsg(self.snd, SoundRequest.PLAY_STOP, self.arg)
## This class is a helper class for communicating with the sound_play node
## via the \ref sound_play.SoundRequest message. There is a one-to-one mapping
## between methods and invocations of the \ref sound_play.SoundRequest message.
class SoundClient(object):
def __init__(self, blocking=False):
"""
The SoundClient can send SoundRequests in two modes: non-blocking mode
(by publishing a message to the soundplay_node directly) which will
return as soon as the sound request has been sent, or blocking mode (by
using the actionlib interface) which will wait until the sound has
finished playing completely.
The blocking parameter here is the standard behavior, but can be
over-ridden. Each say/play/start/repeat method can take in an optional
`blocking=True|False` argument that will over-ride the class-wide
behavior. See soundclient_example.py for an example of this behavior.
:param blocking: Used as the default behavior unless over-ridden,
(default = false)
"""
self._blocking = blocking
# NOTE: only one of these will be used at once, but we need to create
# both the publisher and actionlib client here.
self.actionclient = actionlib.SimpleActionClient(
'sound_play', SoundRequestAction)
self.pub = rospy.Publisher('robotsound', SoundRequest, queue_size=5)
## \brief Create a voice Sound.
##
## Creates a Sound corresponding to saying the indicated text.
##
## \param s Text to say
def voiceSound(self, s, volume=1.0):
return Sound(self, SoundRequest.SAY, s, volume=volume)
## \brief Create a wave Sound.
##
## Creates a Sound corresponding to indicated file.
##
## \param s File to play. Should be an absolute path that exists on the
## machine running the sound_play node.
def waveSound(self, sound, volume=1.0):
if sound[0] != "/":
rootdir = os.path.join(roslib.packages.get_pkg_dir('sound_play'),'sounds')
sound = rootdir + "/" + sound
return Sound(self, SoundRequest.PLAY_FILE, sound, volume=volume)
## \brief Create a builtin Sound.
##
## Creates a Sound corresponding to indicated builtin wave.
##
## \param id Identifier of the sound to play.
def builtinSound(self, id, volume=1.0):
return Sound(self, id, "", volume)
## \brief Say a string
##
## Send a string to be said by the sound_node. The vocalization can be
## stopped using stopSaying or stopAll.
##
## \param text String to say
def say(self,text, voice='', volume=1.0, **kwargs):
self.sendMsg(SoundRequest.SAY, SoundRequest.PLAY_ONCE, text, voice,
volume, **kwargs)
## \brief Say a string repeatedly
##
## The string is said repeatedly until stopSaying or stopAll is used.
##
## \param text String to say repeatedly
def repeat(self,text, volume=1.0, **kwargs):
self.sendMsg(SoundRequest.SAY, SoundRequest.PLAY_START, text,
vol=volume, **kwargs)
## \brief Stop saying a string
##
## Stops saying a string that was previously started by say or repeat. The
## argument indicates which string to stop saying.
##
## \param text Same string as in the say or repeat command
def stopSaying(self,text):
self.sendMsg(SoundRequest.SAY, SoundRequest.PLAY_STOP, text)
## \brief Plays a WAV or OGG file
##
## Plays a WAV or OGG file once. The playback can be stopped by stopWave or
## stopAll.
##
## \param sound Filename of the WAV or OGG file. Must be an absolute path valid
## on the computer on which the sound_play node is running
def playWave(self, sound, volume=1.0, **kwargs):
if sound[0] != "/":
rootdir = os.path.join(roslib.packages.get_pkg_dir('sound_play'),'sounds')
sound = rootdir + "/" + sound
self.sendMsg(SoundRequest.PLAY_FILE, SoundRequest.PLAY_ONCE, sound,
vol=volume, **kwargs)
## \brief Plays a WAV or OGG file repeatedly
##
## Plays a WAV or OGG file repeatedly until stopWave or stopAll is used.
##
## \param sound Filename of the WAV or OGG file. Must be an absolute path valid
## on the computer on which the sound_play node is running.
def startWave(self, sound, volume=1.0, **kwargs):
if sound[0] != "/":
rootdir = os.path.join(roslib.packages.get_pkg_dir('sound_play'),'sounds')
sound = rootdir + "/" + sound
self.sendMsg(SoundRequest.PLAY_FILE, SoundRequest.PLAY_START, sound,
vol=volume, **kwargs)
## \brief Stop playing a WAV or OGG file
##
## Stops playing a file that was previously started by playWave or
## startWave.
##
## \param sound Same string as in the playWave or startWave command
def stopWave(self,sound):
if sound[0] != "/":
rootdir = os.path.join(roslib.package.get_pkg_dir('sound_play'),'sounds')
sound = rootdir + "/" + sound
self.sendMsg(SoundRequest.PLAY_FILE, SoundRequest.PLAY_STOP, sound)
## \brief Plays a WAV or OGG file
##
## Plays a WAV or OGG file once. The playback can be stopped by stopWaveFromPkg or
## stopAll.
##
## \param package Package name containing the sound file.
## \param sound Filename of the WAV or OGG file. Must be an path relative to the package valid
## on the computer on which the sound_play node is running
def playWaveFromPkg(self, package, sound, volume=1.0, **kwargs):
self.sendMsg(SoundRequest.PLAY_FILE, SoundRequest.PLAY_ONCE, sound, package,
volume, **kwargs)
## \brief Plays a WAV or OGG file repeatedly
##
## Plays a WAV or OGG file repeatedly until stopWaveFromPkg or stopAll is used.
##
## \param package Package name containing the sound file.
## \param sound Filename of the WAV or OGG file. Must be an path relative to the package valid
## on the computer on which the sound_play node is running
def startWaveFromPkg(self, package, sound, volume=1.0, **kwargs):
self.sendMsg(SoundRequest.PLAY_FILE, SoundRequest.PLAY_START, sound,
package, volume, **kwargs)
## \brief Stop playing a WAV or OGG file
##
## Stops playing a file that was previously started by playWaveFromPkg or
## startWaveFromPkg.
##
## \param package Package name containing the sound file.
## \param sound Filename of the WAV or OGG file. Must be an path relative to the package valid
## on the computer on which the sound_play node is running
def stopWaveFromPkg(self,sound, package):
self.sendMsg(SoundRequest.PLAY_FILE, SoundRequest.PLAY_STOP, sound, package)
## \brief Play a buildin sound
##
## Starts playing one of the built-in sounds. built-ing sounds are documented
## in \ref SoundRequest.msg. Playback can be stopped by stopall.
##
## \param sound Identifier of the sound to play.
def play(self,sound, volume=1.0, **kwargs):
self.sendMsg(sound, SoundRequest.PLAY_ONCE, "", vol=volume, **kwargs)
## \brief Play a buildin sound repeatedly
##
## Starts playing one of the built-in sounds repeatedly until stop or
## stopall is used. Built-in sounds are documented in \ref SoundRequest.msg.
##
## \param sound Identifier of the sound to play.
def start(self,sound, volume=1.0, **kwargs):
self.sendMsg(sound, SoundRequest.PLAY_START, "", vol=volume, **kwargs)
## \brief Stop playing a built-in sound
##
## Stops playing a built-in sound started with play or start.
##
## \param sound Same sound that was used to start playback
def stop(self,sound):
self.sendMsg(sound, SoundRequest.PLAY_STOP, "")
## \brief Stop all currently playing sounds
##
## This method stops all speech, wave file, and built-in sound playback.
def stopAll(self):
self.stop(SoundRequest.ALL)
def sendMsg(self, snd, cmd, s, arg2="", vol=1.0, **kwargs):
"""
Internal method that publishes the sound request, either directly as a
SoundRequest to the soundplay_node or through the actionlib interface
(which blocks until the sound has finished playing).
The blocking behavior is nominally the class-wide setting unless it has
been explicitly specified in the play call.
"""
# Use the passed-in argument if it exists, otherwise fall back to the
# class-wide setting.
blocking = kwargs.get('blocking', self._blocking)
msg = SoundRequest()
msg.sound = snd
# Threshold volume between 0 and 1.
msg.volume = max(0, min(1, vol))
msg.command = cmd
msg.arg = s
msg.arg2 = arg2
rospy.logdebug('Sending sound request with volume = {}'
' and blocking = {}'.format(msg.volume, blocking))
# Defensive check for the existence of the correct communicator.
if blocking and not self.pub:
rospy.logerr('Publisher for SoundRequest must exist')
return
if not blocking and not self.actionclient:
rospy.logerr('Action client for SoundRequest does not exist.')
return
if not blocking: # Publish message directly and return immediately
self.pub.publish(msg)
if self.pub.get_num_connections() < 1:
rospy.logwarn("Sound command issued, but no node is subscribed"
" to the topic. Perhaps you forgot to run"
" soundplay_node.py?")
else: # Block until result comes back.
assert self.actionclient, 'Actionclient must exist'
rospy.logdebug('Sending action client sound request [blocking]')
self.actionclient.wait_for_server()
goal = SoundRequestGoal()
goal.sound_request = msg
self.actionclient.send_goal(goal)
self.actionclient.wait_for_result()
rospy.logdebug('sound request response received')
return
|
[
"genesis@nuc"
] |
genesis@nuc
|
96f4d811c08062451b1b929e346ee171461170de
|
18aee5d93a63eab684fe69e3aa0abd1372dd5d08
|
/test/legacy_test/test_poisson_op.py
|
ee66d578014c70395ec3525f8118d2780886458c
|
[
"Apache-2.0"
] |
permissive
|
Shixiaowei02/Paddle
|
8d049f4f29e281de2fb1ffcd143997c88078eadb
|
3d4d995f26c48f7792b325806ec3d110fc59f6fc
|
refs/heads/develop
| 2023-06-26T06:25:48.074273
| 2023-06-14T06:40:21
| 2023-06-14T06:40:21
| 174,320,213
| 2
| 1
|
Apache-2.0
| 2022-12-28T05:14:30
| 2019-03-07T10:09:34
|
C++
|
UTF-8
|
Python
| false
| false
| 8,481
|
py
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import unittest
import numpy as np
from eager_op_test import OpTest
import paddle
paddle.enable_static()
paddle.seed(100)
def output_hist(out, lam, a, b):
prob = []
bin = []
for i in range(a, b + 1):
prob.append((lam**i) * math.exp(-lam) / math.factorial(i))
bin.append(i)
bin.append(b + 0.1)
hist, _ = np.histogram(out, bin)
hist = hist.astype("float32")
hist = hist / float(out.size)
return hist, prob
class TestPoissonOp1(OpTest):
def setUp(self):
self.op_type = "poisson"
self.python_api = paddle.tensor.poisson
self.config()
self.attrs = {}
self.inputs = {'X': np.full([2048, 1024], self.lam, dtype=self.dtype)}
self.outputs = {'Out': np.ones([2048, 1024], dtype=self.dtype)}
def config(self):
self.lam = 10
self.a = 5
self.b = 15
self.dtype = "float64"
def verify_output(self, outs):
hist, prob = output_hist(np.array(outs[0]), self.lam, self.a, self.b)
np.testing.assert_allclose(hist, prob, rtol=0.01)
def test_check_output(self):
self.check_output_customized(self.verify_output)
def test_check_grad_normal(self):
self.check_grad(
['X'],
'Out',
user_defined_grads=[np.zeros([2048, 1024], dtype=self.dtype)],
user_defined_grad_outputs=[
np.random.rand(2048, 1024).astype(self.dtype)
],
)
class TestPoissonOp2(TestPoissonOp1):
def config(self):
self.lam = 5
self.a = 1
self.b = 8
self.dtype = "float32"
class TestPoissonAPI(unittest.TestCase):
def test_static(self):
with paddle.static.program_guard(
paddle.static.Program(), paddle.static.Program()
):
x_np = np.random.rand(10, 10)
x = paddle.static.data(name="x", shape=[10, 10], dtype='float64')
y = paddle.poisson(x)
exe = paddle.static.Executor()
y_np = exe.run(
paddle.static.default_main_program(),
feed={"x": x_np},
fetch_list=[y],
)
self.assertTrue(np.min(y_np) >= 0)
def test_dygraph(self):
with paddle.fluid.dygraph.base.guard():
x = paddle.randn([10, 10], dtype='float32')
y = paddle.poisson(x)
self.assertTrue(np.min(y.numpy()) >= 0)
x = paddle.randn([10, 10], dtype='float32')
x.stop_gradient = False
y = paddle.poisson(x)
y.backward()
self.assertTrue(np.min(y.numpy()) >= 0)
np.testing.assert_array_equal(np.zeros_like(x), x.gradient())
def test_fixed_random_number(self):
# Test GPU Fixed random number, which is generated by 'curandStatePhilox4_32_10_t'
if not paddle.is_compiled_with_cuda():
return
print("Test Fixed Random number on GPU------>")
paddle.disable_static()
paddle.set_device('gpu')
paddle.seed(2021)
x = paddle.full([32, 3, 1024, 768], 10.0, dtype="float32")
y = paddle.poisson(x)
y_np = y.numpy()
expect = [
13.0,
13.0,
11.0,
8.0,
12.0,
6.0,
9.0,
15.0,
16.0,
6.0,
13.0,
12.0,
9.0,
15.0,
17.0,
8.0,
11.0,
16.0,
11.0,
10.0,
]
np.testing.assert_array_equal(y_np[0, 0, 0, 0:20], expect)
expect = [
15.0,
7.0,
12.0,
8.0,
14.0,
10.0,
10.0,
11.0,
11.0,
11.0,
21.0,
6.0,
9.0,
13.0,
13.0,
11.0,
6.0,
9.0,
12.0,
12.0,
]
np.testing.assert_array_equal(y_np[8, 1, 300, 200:220], expect)
expect = [
10.0,
15.0,
9.0,
6.0,
4.0,
13.0,
10.0,
10.0,
13.0,
12.0,
9.0,
7.0,
10.0,
14.0,
7.0,
10.0,
8.0,
5.0,
10.0,
14.0,
]
np.testing.assert_array_equal(y_np[16, 1, 600, 400:420], expect)
expect = [
10.0,
9.0,
14.0,
12.0,
8.0,
9.0,
7.0,
8.0,
11.0,
10.0,
13.0,
8.0,
12.0,
9.0,
7.0,
8.0,
11.0,
11.0,
12.0,
5.0,
]
np.testing.assert_array_equal(y_np[24, 2, 900, 600:620], expect)
expect = [
15.0,
5.0,
11.0,
13.0,
12.0,
12.0,
13.0,
16.0,
9.0,
9.0,
7.0,
9.0,
13.0,
11.0,
15.0,
6.0,
11.0,
9.0,
10.0,
10.0,
]
np.testing.assert_array_equal(y_np[31, 2, 1023, 748:768], expect)
x = paddle.full([16, 1024, 1024], 5.0, dtype="float32")
y = paddle.poisson(x)
y_np = y.numpy()
expect = [
4.0,
5.0,
2.0,
9.0,
8.0,
7.0,
4.0,
7.0,
4.0,
7.0,
6.0,
3.0,
10.0,
7.0,
5.0,
7.0,
2.0,
5.0,
5.0,
6.0,
]
np.testing.assert_array_equal(y_np[0, 0, 100:120], expect)
expect = [
1.0,
4.0,
8.0,
11.0,
6.0,
5.0,
4.0,
4.0,
7.0,
4.0,
4.0,
7.0,
11.0,
6.0,
5.0,
3.0,
4.0,
6.0,
3.0,
3.0,
]
np.testing.assert_array_equal(y_np[4, 300, 300:320], expect)
expect = [
7.0,
5.0,
4.0,
6.0,
8.0,
5.0,
6.0,
7.0,
7.0,
7.0,
3.0,
10.0,
5.0,
10.0,
4.0,
5.0,
8.0,
7.0,
5.0,
7.0,
]
np.testing.assert_array_equal(y_np[8, 600, 600:620], expect)
expect = [
8.0,
6.0,
7.0,
4.0,
3.0,
0.0,
4.0,
6.0,
6.0,
4.0,
3.0,
10.0,
5.0,
1.0,
3.0,
8.0,
8.0,
2.0,
1.0,
4.0,
]
np.testing.assert_array_equal(y_np[12, 900, 900:920], expect)
expect = [
2.0,
1.0,
14.0,
3.0,
6.0,
5.0,
2.0,
2.0,
6.0,
5.0,
7.0,
4.0,
8.0,
4.0,
8.0,
4.0,
5.0,
7.0,
1.0,
7.0,
]
np.testing.assert_array_equal(y_np[15, 1023, 1000:1020], expect)
paddle.enable_static()
if __name__ == "__main__":
unittest.main()
|
[
"noreply@github.com"
] |
Shixiaowei02.noreply@github.com
|
7bed90a14fc2ce416d14e56c5bf265e8b646487f
|
7d3b096f803d1a47ad71a5c8aab30ba3aa67828c
|
/chibi_file/__init__.py
|
fe22184683cfdc5c75ca908282fad7a086a9d2bc
|
[] |
no_license
|
dem4ply/chibi_file
|
462244dac712d88915f2b931c5f0822f6d1fa937
|
d27cef794512014b1602486edd0235052b38087a
|
refs/heads/master
| 2020-12-03T05:09:15.825690
| 2017-08-23T09:36:57
| 2017-08-23T09:36:57
| 95,737,905
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,835
|
py
|
import mmap
import os
import shutil
def current_dir():
return os.getcwd()
def inflate_dir( src ):
if '~' in src:
return os.path.expanduser( src )
else:
return os.path.abspath( src )
def is_dir( src ):
return os.path.isdir( src )
def is_file( src ):
return os.path.isfile( src )
def ls( src=None ):
if src is None:
src = current_dir()
return ( name for name in os.listdir( src ) )
def ls_only_dir( src=None ):
return ( name for name in ls( src ) if is_dir( name ) )
def join( *patch ):
return os.path.join( *patch )
def exists( file_name ):
return os.path.exists( file_name )
def copy( source, dest ):
shutil.copy( source, dest )
class Chibi_file:
def __init__( self, file_name ):
self._file_name = file_name
if not self.exists:
self.touch()
self.reread()
@property
def file_name( self ):
return self._file_name
def __del__( self ):
self._file_content.close()
def find( self, string_to_find ):
if isinstance( string_to_find, str ):
string_to_find = string_to_find.encode()
return self._file_content.find( string_to_find )
def reread( self ):
with open( self._file_name, 'r' ) as f:
self._file_content = mmap.mmap( f.fileno(), 0,
prot=mmap.PROT_READ )
def __contains__( self, string ):
return self.find( string ) >= 0
def append( self, string ):
with open( self._file_name, 'a' ) as f:
f.write( string )
self.reread()
@property
def exists( self ):
return exists( self.file_name )
def touch( self ):
open( self.file_name, 'a' ).close()
def copy( self, dest ):
copy( self.file_name, dest )
|
[
"dem4ply@gmail.com"
] |
dem4ply@gmail.com
|
d6e8faee78b555a964bcdabf9d7b434fba09a3c0
|
b96f1bad8a74d31d8ff79bc955813bfcd17d7b26
|
/24. Swap Nodes in Pairs.py
|
75e6d9a0451fd14aadd62f665ddbd922cfa44910
|
[] |
no_license
|
brianhu0716/LeetCode-Solution
|
e7177af15e84e833ce8ab05027683ed4ac489643
|
158a4359c90b723545b22c4898047274cc1b80a6
|
refs/heads/main
| 2023-07-11T05:29:56.783795
| 2021-08-28T12:53:14
| 2021-08-28T12:53:14
| 374,991,658
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 444
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 11 15:06:14 2021
@author: Brian
"""
'''
ไบคๆ็ธ้ฐๅ
ฉnode็ๅผๅณๅฏ
'''
class Solution:
def swapPairs(self, head: ListNode) -> ListNode:
while not head or not head.next : return head
ptr = head
while ptr and ptr.next:
temp = ptr.val
ptr.val = ptr.next.val
ptr.next.val = temp
ptr = ptr.next.next
return head
|
[
"85205343+brianhu0716@users.noreply.github.com"
] |
85205343+brianhu0716@users.noreply.github.com
|
b7e89b7513c6151d39dc8adad4fee33e8afcf8f1
|
09cc8367edb92c2f02a0cc1c95a8290ff0f52646
|
/ipypublish_plugins/example_new_plugin.py
|
2fe177802ec9fd3259ca9ac9ac002ef160f3c1f2
|
[
"BSD-3-Clause"
] |
permissive
|
annefou/ipypublish
|
7e80153316ab572a348afe26d309c2a9ee0fb52b
|
917c7f2e84be006605de1cf8851ec13d1a163b24
|
refs/heads/master
| 2020-04-13T16:08:59.845707
| 2018-07-30T18:26:12
| 2018-07-30T18:26:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,612
|
py
|
"""html in standard nbconvert format, but with
- a table of contents
- toggle buttons for showing/hiding code & output cells
- converts or removes (if no converter) latex tags (like \cite{abc}, \ref{})
"""
from ipypublish.filters.replace_string import replace_string
from ipypublish.html.create_tpl import create_tpl
from ipypublish.html.ipypublish import latex_doc
# from ipypublish.html.standard import inout_prompt
from ipypublish.html.ipypublish import toc_sidebar
from ipypublish.html.ipypublish import toggle_buttons
from ipypublish.html.standard import content
from ipypublish.html.standard import content_tagging
from ipypublish.html.standard import document
from ipypublish.html.standard import mathjax
from ipypublish.html.standard import widgets
from ipypublish.preprocessors.latex_doc_captions import LatexCaptions
from ipypublish.preprocessors.latex_doc_html import LatexDocHTML
from ipypublish.preprocessors.latex_doc_links import LatexDocLinks
from ipypublish.preprocessors.latextags_to_html import LatexTagsToHTML
from ipypublish.preprocessors.split_outputs import SplitOutputs
oformat = 'HTML'
config = {'TemplateExporter.filters': {'replace_string': replace_string},
'Exporter.filters': {'replace_string': replace_string},
'Exporter.preprocessors': [SplitOutputs, LatexDocLinks, LatexDocHTML, LatexTagsToHTML, LatexCaptions]}
template = create_tpl([
document.tpl_dict,
content.tpl_dict, content_tagging.tpl_dict,
mathjax.tpl_dict, widgets.tpl_dict,
# inout_prompt.tpl_dict,
toggle_buttons.tpl_dict, toc_sidebar.tpl_dict,
latex_doc.tpl_dict
])
|
[
"chrisj_sewell@hotmail.com"
] |
chrisj_sewell@hotmail.com
|
003e7a2f14683e4f88eca45a7c30f8fd449eba92
|
aafedf44cbb6a2fd0616f01311e59bc55d19a87a
|
/todo/notes.py
|
42ee723a6e0d7170fed945cff77a7390a0c070dc
|
[
"MIT"
] |
permissive
|
rakeshsingh/todo.txt
|
57ae6b86a9815945eebe9a2f25894ae5df060326
|
6d9d13aae99a4714bfa67c1c7b9d7d11b1ce6460
|
refs/heads/master
| 2021-08-16T22:13:24.960790
| 2021-06-30T20:37:53
| 2021-06-30T20:37:53
| 144,942,401
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,986
|
py
|
import argparse
from datetime import datetime
import os
import re
import logging
from .log import setup_logging
from .exceptions import (
InvalidNotesFile,
)
from .utils import(
print_colorful,
set_file,
get_file,
)
from .constants import (
DEFAULT_FILE_SEPERATOR,
DEFAULT_FILE_SUFFIX,
DEFAULT_NOTES_PREFIX,
DEFAULT_NOTES_CATEGORY
)
logger = logging.getLogger(__name__)
'''
- Label it with the current month (it's best to start this at the beginning of a month if you can). Put contact information on the inside cover in case you lose it.
- Start a new page every day. Put the date at the top of the page.
- Metadata about the day goes in the margins. I use this to keep a log of who is front as well as taking medicine.
- Write prose freely.
- TODO items start with a -. Those represent things you need to do but haven't done yet.
- When the item is finished, put a vertical line through the - to make it a +.
- If the item either can't or won't be done, cross out the - to make it into a *.
- If you have to put off a task to a later date, turn the - into a ->. If there is room, put a brief description of why it needs to be moved or when it is moved to. If there's no room feel free to write it out in prose form at the end of your page.
- Notes start with a middot (ยท). They differ from prose as they are not complete sentences. If you need to, you can always turn them into TODO items later.
- Write in pencil so you can erase mistakes. Erase carefully to avoid ripping the paper, You hardly need to use any force to erase things.
- There is only one action, appending. Don't try and organize things by topic as you would on a computer. This is not a computer, this is paper. Paper works best when you append only. There is only one direction, forward.
- If you need to relate a bunch of notes or todo items with a topic, skip a line and write out the topic ending with a colon. When ending the topical notes, skip another line.
- Don't be afraid to write in it. If you end up using a whole notebook before the month is up, that is a success. Record insights, thoughts, feelings and things that come to your mind. You never know what will end up being useful later.
- At the end of the month, look back at the things you did and summarize/index them in the remaining pages. Discover any leftover items that you haven't completed yet so you can either transfer them over to next month or discard them. It's okay to not get everything done. You may also want to scan it to back it up into the cloud. You may never reference these scans, but backups never hurt.
'''
class NotesManager:
"""
Note file format: YYYYMMDD_[notes].txt
examples:
20210101_notes.txt(default)
20180101-meeting-generic.txt
20180102_one-on-one_rohit.txt
20180102_interview_shiva-sundaram.txt
"""
def __init__(self):
self.BASEDIR='./data/'
def list(self, category=None):
if category is None:
category = ''
for filename in os.listdir(self.BASEDIR):
if category in filename:
print(filename)
class Note:
"""
Note file format: YYYYMMDD_[notes].txt
examples:
20210101_notes.txt(default)
20180101-meeting-generic.txt
20180102_one-on-one_rohit.txt
20180102_interview_shiva-sundaram.txt
"""
def __init__(self, notes_date:str=datetime.today().strftime('%Y%m%d'),notes_prefix:str=DEFAULT_NOTES_PREFIX, notes_category:str=DEFAULT_NOTES_CATEGORY):
self.notes_date = notes_date
self.notes_category = notes_category
self.notes_file_name = DEAFULT_FILE_SEPERATOR.join(notes_date, notes_prefix, notes_category,DEFAULT_FILE_SUFFIX)
if get_file(self.notes_file_name):
pass
else:
set_file(notes_file_name)
def edit(line:int):
pass
def list_todos():
pass
def list_notes():
pass
def list_categories():
pass
def list_people():
pass
def get_context():
'''Provides summary of the note. What all people are mentioned in the note, what are the action items, and their statuses, what are the related topics'''
pass
if __name__ =='__main__':
nm = NotesManager()
parser = argparse.ArgumentParser()
parser.add_argument("-n", "--new", action="store_true", help="create a new note\n usage: notes.py -c category detail")
parser.add_argument("-l", "--list", action="store_true", help="list most recent notes")
parser.add_argument("-c", "--category", help="category of the notes, e.g. meeting, todo, oneonone")
parser.add_argument("-d", "--detail", help="additional details for the notes e.g. meeting subject, 1_1 person")
args = parser.parse_args()
if args.list:
nm.list(args.category)
elif args.new:
nm.create(args.category, args.detail)
nm.list(args.category)
else:
nm.list(args.category)
|
[
"kumar.rakesh@gmail.com"
] |
kumar.rakesh@gmail.com
|
c02698bcbb5677d5aa1cdf687d66869a34eea59c
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02742/s024664971.py
|
37251941a04a71608f69d756b2f8eb6bf24e8a52
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 259
|
py
|
H, W = [int(_) for _ in input().split()]
if H == 1 or W == 1:
ans = 1
else:
ans = (H // 2) * (W // 2) * 2
if H % 2 == 1:
ans += W // 2
if W % 2 == 1:
ans += H // 2
if H % 2 == 1 and W % 2 == 1:
ans += 1
print(ans)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
7374ce7e683ccf1d4913b6f64fb04fb50b016df7
|
6c686d118e6d3072b3694c02c684a6619d4dd03e
|
/rsdns/tests/test_client.py
|
cb34bcfaef1aa74df689f00debfbff8959f697df
|
[
"Apache-2.0"
] |
permissive
|
masthalter/reddwarf
|
02e7b78e1e61178647fe8d98ab53eadfabe66e7f
|
72cf41d573cd7c35a222d9b7a8bfaad937f17754
|
HEAD
| 2016-11-08T16:12:16.783829
| 2012-04-26T22:26:56
| 2012-04-26T22:26:56
| 2,387,563
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,033
|
py
|
import httplib2
import mox
import unittest
from eventlet import pools
from novaclient.client import HTTPClient
from novaclient import exceptions
from rsdns.client.dns_client import DNSaasClient
ACCOUNT_ID = 1155
USERNAME = "test_user"
API_KEY="key"
AUTH_URL="urly"
MANAGEMENT_BASE_URL="mgmter"
class FakeResponse(object):
def __init__(self, status):
self.status = status
class WhenDNSaasClientConnectsSuccessfully(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.VerifyAll()
def fake_auth(self, *args, **kwargs):
self.auth_called = True
def create_mock_client(self, fake_request_method):
"""
Creates a mocked DNSaasClient object, which calls "fake_request_method"
instead of httplib2.request.
"""
class FakeHttpLib2(object):
pass
FakeHttpLib2.request = fake_request_method
mock_client = self.mox.CreateMock(DNSaasClient)
mock_client.http_pool = pools.Pool()
mock_client.http_pool.create = FakeHttpLib2
mock_client.auth_token = 'token'
return mock_client
def test_make_request(self):
kwargs = {
'headers': {},
'body': "{}"
}
def fake_request(self, *args, **kwargs):
return FakeResponse(200), '{"hi":"hello"}'
mock_client = self.create_mock_client(fake_request)
resp, body = DNSaasClient.request(mock_client, **kwargs)
self.assertEqual(200, resp.status)
self.assertEqual({"hi":"hello"}, body)
def test_make_request_with_old_token(self):
kwargs = {
'headers': {},
'body': '{"message":"Invalid authentication token. Please renew."}'
}
def fake_request(self, *args, **kwargs):
return FakeResponse(401), \
'{"message":"Invalid authentication token. Please renew."}'
mock_client = self.create_mock_client(fake_request)
mock_client.authenticate()
mock_client.authenticate()
mock_client.authenticate()
self.mox.ReplayAll()
self.assertRaises(exceptions.Unauthorized, DNSaasClient.request,
mock_client, **kwargs)
def test_make_request_with_old_token_2(self):
kwargs = {
'headers': {},
'body': "{}"
}
self.count = 0
def fake_request(_self, *args, **kwargs):
self.count += 1
if self.count > 1:
return FakeResponse(200), '{"hi":"hello"}'
else:
return FakeResponse(401), \
'{"message":"Invalid authentication token. ' \
'Please renew."}'
mock_client = self.create_mock_client(fake_request)
mock_client.authenticate()
self.mox.ReplayAll()
resp, body = DNSaasClient.request(mock_client, **kwargs)
self.assertEqual(200, resp.status)
self.assertEqual({"hi":"hello"}, body)
|
[
"tim.simpson@rackspace.com"
] |
tim.simpson@rackspace.com
|
89b926a667374d24e238ffecadfdddc4cc558e8b
|
79164d3ac85ab44b8b29dbea8ff7395b1bf15cba
|
/configurations/configurations.py
|
8b3390c66948a3f2bb2856b3d26bbda53e6ce345
|
[] |
no_license
|
ReubenBagtas/pytestFramework
|
60cb3daa5a8613ef0bca4f21d3282cdfa2cfd6a0
|
cceca23007aab71447949d54949a0c306a4281f1
|
refs/heads/master
| 2020-06-02T04:06:45.139851
| 2019-06-09T16:34:38
| 2019-06-09T16:34:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 101
|
py
|
ALPHA = {}
DELTA = {}
PROD = {}
DEMO = {
'google-homepage': 'https://www.google.com'
}
DEFAULT = {}
|
[
"reubenbagtas.com"
] |
reubenbagtas.com
|
919cfaed50624e9fb97a068432bd5441d59ed4a1
|
ff9c646af161bc4d1d38093380d8e2deb54f42d1
|
/MiniAODAnalysis2/MiniAODAnalysis2/python/ConfFile_cfg.py
|
e4fe4e7ad2a2e67f021cd0c065696543022fb7a7
|
[] |
no_license
|
MatthiasHamer/CMS
|
3ab99a9008324e104913c03c2aa322a816a2d582
|
3594ba61e5d794917cf5e0136da743fd5e6190f9
|
refs/heads/master
| 2016-09-10T03:55:30.810030
| 2015-07-27T16:45:40
| 2015-07-27T16:45:40
| 22,997,971
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 475
|
py
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("Demo")
process.load("FWCore.MessageService.MessageLogger_cfi")
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
process.source = cms.Source("PoolSource",
# replace 'myfile.root' with the source file you want to use
fileNames = cms.untracked.vstring(
'file:myfile.root'
)
)
process.demo = cms.EDAnalyzer('MiniAODAnalysis2'
)
process.p = cms.Path(process.demo)
|
[
"Matthias.Hamer@cern.ch"
] |
Matthias.Hamer@cern.ch
|
5fb4b852b0812c437735609954ef8693db355baf
|
75802efe7ac5e39d12c90b1ab91028f7a83f3623
|
/ex3_2_nonlinear_model.py
|
5527d8f1c41efad01b46792a6808fa3434ad63ef
|
[] |
no_license
|
AOE-khkhan/PytorchZeroToAll
|
fbd5ecba967a4d7316667c028af37340518c451d
|
f8d9a392e44197de6c593dfd4b32cb192d3a6ba9
|
refs/heads/master
| 2020-09-02T12:21:32.800832
| 2018-05-09T12:38:52
| 2018-05-09T12:38:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,068
|
py
|
x_data = [1.0, 2.0, 3.0]
y_data = [6.0, 17.0, 34.0]
w2, w1, b = 1.0, 1.0, 0.0 # a random guess: random value
lr = 1e-2 # a learning rate
num_epochs = 100 # numbe rof epochs
# our model forward pass
def forward(x):
return x * x * w2 + x * w1 + b
# Loss function
def loss(x, y):
y_pred = forward(x)
return (y_pred - y) * (y_pred - y)
# compute gradient
def gradient(x, y):
dw2 = 2 * x ** 2 * (forward(x) - y)
dw1 = 2 * x * (forward(x) - y)
db = 1 * (forward(x) - y)
return dw2, dw1, db
# Before training
print('predict (before training):', 4, forward(4))
# Training loop
for epoch in range(num_epochs):
l_ = None
for x_val, y_val in zip(x_data, y_data):
dw2, dw1, db = gradient(x_val, y_val)
w2 += -lr * dw2
w1 += -lr * dw1
b += -lr * db
l_ = loss(x_val, y_val)
print("probress: {}, w2 = {}, w1 = {}, b = {}, loss = {}".format(epoch, w2, w1, b, l_))
# After training
print("predict (after training)", "4 hours:", forward(4))
|
[
"sbkim0407@gmail.com"
] |
sbkim0407@gmail.com
|
a911b9df049a048325fc20b414321d00fb1d10f4
|
64d36f801ff52465bb04b6783fab26359ffc6177
|
/python-basic/meta10.4.py
|
27ee8518f5623e3e896ed81b39d4010990855314
|
[] |
no_license
|
asiachrispy/tensor
|
3ee3257e4c7cc518677395f81158a92339d410a2
|
29bbcd6350d3e4ce8363b25e86d708bce6612eac
|
refs/heads/master
| 2021-01-11T20:19:14.381846
| 2018-05-15T09:33:13
| 2018-05-15T09:33:13
| 79,088,526
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 919
|
py
|
# early stopping
from numpy import loadtxt
from xgboost import XGBClassifier
from sklearn.cross_validation import train_test_split
from sklearn.metrics import accuracy_score
# load data
dataset = loadtxt('pima-indians-diabetes.csv', delimiter=",")
# split data into X and y
X = dataset[:,0:8]
Y = dataset[:,8]
# split data into train and test sets
seed = 7
test_size = 0.33
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=test_size,
random_state=seed)
# fit model no training data
model = XGBClassifier()
eval_set = [(X_test, y_test)]
model.fit(X_train, y_train, early_stopping_rounds=10, eval_metric="logloss",
eval_set=eval_set, verbose=True)
# make predictions for test data
y_pred = model.predict(X_test)
predictions = [round(value) for value in y_pred]
# evaluate predictions
accuracy = accuracy_score(y_test, predictions)
print("Accuracy: %.2f%%" % (accuracy * 100.0))
|
[
"huangzhong@jd.com"
] |
huangzhong@jd.com
|
10a0f0560d4f34d9937c65d9b6b1f0b3ba125dcb
|
4d8a66ebd98d18e407c20c93d4268b85f5548ed4
|
/public/http_request.py
|
9c15a12525a537248fd19e00abbaff92638d1ed6
|
[] |
no_license
|
2295256562/API_Project
|
57cc40a9c79e855aa30a25db820bbffb0add6410
|
66b72690d765ed96a9d8ae72debeaba4fe7a5073
|
refs/heads/master
| 2022-12-14T10:20:32.753852
| 2019-10-10T13:40:22
| 2019-10-10T13:40:22
| 213,394,060
| 0
| 0
| null | 2022-12-08T06:42:03
| 2019-10-07T13:42:25
|
Python
|
UTF-8
|
Python
| false
| false
| 1,913
|
py
|
import copy
import requests
from config.globals import headers
from config.globals import login_url
from functools import lru_cache
class HttpRequest:
def __init__(self):
self.headers_teml = {
'content-type': 'application/json',
'token': None
}
@lru_cache()
def _login(self):
"""
:return: ่ฟๅtoken
"""
data = {
"username": "",
"password": ""
}
# url = login_url
# r = requests.post(url=url, json=data, headers=headers)
x = "ldddwlfwfwelof"
return x
def headers(self):
headers = copy.deepcopy(self.headers_teml)
headers.update({'token': self._login()})
return headers
def http_request(self, url, data, http_method, header):
"""
http ่ฏทๆฑๅบ็ก็ฑป
:param url: ่ฏทๆฑ็url
:param data: ่ฏทๆฑๆฐๆฎ
:param http_method: ่ฏทๆฑๆนๅผ GETใPOST
:return: res
"""
# headers = self.headers()
try:
if http_method.upper() == 'GET':
if data != None:
res = requests.get(url, eval(data))
else:
res = requests.get(url)
elif http_method.upper() == 'POST':
if data != None:
res = requests.post(url, eval(data))
else:
res = requests.post(url)
else:
raise NameError("ไฝ ่พๅ
ฅ็่ฏทๆฑๆนๅผไธๅฏน๏ผ ่ฏทไฝ ่พๅ
ฅGETๆPOST")
except Exception as e:
raise e
return res
if __name__ == '__main__':
C = HttpRequest().http_request('http://127.0.0.1:8000/api/reg', '{"username":"123425653","password":"1111"}',
'POST')
print(C.headers)
|
[
"xinman.kuang@daddylab.com"
] |
xinman.kuang@daddylab.com
|
5b6ae4546dda852369334665c79612273e580227
|
0eaf0d3f0e96a839f2ef37b92d4db5eddf4b5e02
|
/past3/e.py
|
6a5af7eb3744903d23f04c0de4ad30e373c33a27
|
[] |
no_license
|
silphire/atcoder
|
b7b02798a87048757745d99e8564397d1ca20169
|
f214ef92f13bc5d6b290746d5a94e2faad20d8b0
|
refs/heads/master
| 2023-09-03T17:56:30.885166
| 2023-09-02T14:16:24
| 2023-09-02T14:16:24
| 245,110,029
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 433
|
py
|
n, m, q = map(int, input().split())
e = [set() for _ in range(n)]
for i in range(m):
u, v = map(int, input().split())
u -= 1
v -= 1
e[u].add(v)
e[v].add(u)
c = list(map(int, input().split()))
for i in range(q):
s = tuple(map(int, input().split()))
vv = s[1] - 1
print(c[vv])
if s[0] == 1:
for ee in e[vv]:
c[ee] = c[vv]
else:
c[vv] = s[2]
|
[
"silphire@gmail.com"
] |
silphire@gmail.com
|
9378b601770bd4c71b6a616ad9a7a895ad48a7b2
|
6b2a8dd202fdce77c971c412717e305e1caaac51
|
/solutions_5658571765186560_1/Python/StefanPochmann/D.py
|
feca86f2ed13ba681c7c3230c60ce03f3e2c21f7
|
[] |
no_license
|
alexandraback/datacollection
|
0bc67a9ace00abbc843f4912562f3a064992e0e9
|
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
|
refs/heads/master
| 2021-01-24T18:27:24.417992
| 2017-05-23T09:23:38
| 2017-05-23T09:23:38
| 84,313,442
| 2
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 500
|
py
|
#f = open('D.in')
#def input():
# return next(f)
T = int(input())
for x in range(1, T + 1):
X, R, C = map(int, input().split())
A = R * C
s, S = sorted((R, C))
gabriel = A % X == 0 and \
(X == 1 or
X == 2 or
X == 3 and s >= 2 or
X == 4 and s >= 3 or
X == 5 and s >= 3 and A > 15 or
X == 6 and s >= 4)
print('Case #{}: {}'.format(x, 'GABRIEL' if gabriel else 'RICHARD'))
|
[
"eewestman@gmail.com"
] |
eewestman@gmail.com
|
1537c16957f6dab218fa76a979c37a5ba45f9468
|
0b4b1dd3e5652572571128750f31d088b28b19ad
|
/blog/views.py
|
926c879a7599aea1421bbf3987c9a362189fe6b7
|
[] |
no_license
|
cozynn/djangoProject
|
66d74d70efb8e0efeb4a2a21146e4a4473ab50e0
|
1bf95b7b906ce030616e994c091c9693e34c30ab
|
refs/heads/master
| 2023-02-02T19:07:37.276396
| 2020-12-20T19:23:02
| 2020-12-20T19:23:02
| 323,114,124
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,615
|
py
|
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
from .models import Post
from .forms import PostForm
# Create your views here.
def post_list(request):
posts = Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')
return render(request, 'blog/post_list.html', {'posts': posts})
def post_detail(request, pk):
post = get_object_or_404(Post, pk=pk)
return render(request, 'blog/post_detail.html', {'post': post})
def post_new(request):
if request.method == "POST":
form = PostForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.published_date = timezone.now()
post.save()
return redirect('blog:post_detail', pk=post.pk)
else:
form = PostForm()
return render(request, 'blog/post_edit.html', {'form': form})
def post_edit(request, pk):
post = get_object_or_404(Post, pk=pk)
if request.method == "POST":
form = PostForm(request.POST, instance=post)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.published_date = timezone.now()
post.save()
return redirect('blog:post_detail', pk=post.pk)
else:
form = PostForm(instance=post)
return render(request, 'blog/post_edit.html', {'form': form})
def post_delete(request, pk):
post = Post.objects.get(id=pk)
post.author = request.user
post.delete()
return redirect('blog:post_list')
|
[
"cozynn@naver.com"
] |
cozynn@naver.com
|
3c3d8847ece82de5f4ddb2fa122ea976e7da211e
|
2ee3a2b8971118b1a1e8c101382702d698021ad5
|
/weather/models.py
|
8372f28956277f086c9e5f53ff17faa6a968168c
|
[] |
no_license
|
manikshahkataria/weather
|
29a34264fd281cf26758be06d19dd19bbd226cfc
|
1bb5160caab2dc287118ab7ed4a25cf575453ee4
|
refs/heads/master
| 2022-12-11T07:50:28.988645
| 2019-01-19T10:22:10
| 2019-01-19T10:22:10
| 163,946,933
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 226
|
py
|
from django.db import models
class City(models.Model):
name= models.CharField(max_length=25)
def __str__(self):
return self.name
class Meta:
verbose_name_plural='cities'
# Create your models here.
|
[
"manikshah1998@gmail.com"
] |
manikshah1998@gmail.com
|
170427ab7a1e4482bd8912d41bdaa5ebbaf1c595
|
fe969d059327d767c9eb442a441395fd5e389d6a
|
/OriginalCode-v1/create_graphs.py
|
09812b2133780e5e29a5211411aabf3c7a09eb36
|
[
"MIT"
] |
permissive
|
Wayne-Bai/Graph
|
ba514418261189a89801ff10839fbfb651d98dc7
|
4b563c824d946471393a1c404810d6f39a49f7fb
|
refs/heads/master
| 2022-12-18T20:32:06.797842
| 2020-09-16T14:12:50
| 2020-09-16T14:12:50
| 277,702,901
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,766
|
py
|
import networkx as nx
import numpy as np
from utils import *
from data import *
from data_process import Graph_load_batch as ast_graph_load_batch
def create(args):
### load datasets
graphs=[]
# synthetic graphs
if args.graph_type=='ladder':
graphs = []
for i in range(100, 201):
graphs.append(nx.ladder_graph(i))
args.max_prev_node = 10
elif args.graph_type=='ladder_small':
graphs = []
for i in range(2, 11):
graphs.append(nx.ladder_graph(i))
args.max_prev_node = 10
elif args.graph_type=='tree':
graphs = []
for i in range(2,5):
for j in range(3,5):
graphs.append(nx.balanced_tree(i,j))
args.max_prev_node = 256
elif args.graph_type=='caveman':
# graphs = []
# for i in range(5,10):
# for j in range(5,25):
# for k in range(5):
# graphs.append(nx.relaxed_caveman_graph(i, j, p=0.1))
graphs = []
for i in range(2, 3):
for j in range(30, 81):
for k in range(10):
graphs.append(caveman_special(i,j, p_edge=0.3))
args.max_prev_node = 100
elif args.graph_type=='caveman_small':
# graphs = []
# for i in range(2,5):
# for j in range(2,6):
# for k in range(10):
# graphs.append(nx.relaxed_caveman_graph(i, j, p=0.1))
graphs = []
for i in range(2, 3):
for j in range(6, 11):
for k in range(20):
graphs.append(caveman_special(i, j, p_edge=0.8)) # default 0.8
args.max_prev_node = 20
elif args.graph_type=='caveman_small_single':
# graphs = []
# for i in range(2,5):
# for j in range(2,6):
# for k in range(10):
# graphs.append(nx.relaxed_caveman_graph(i, j, p=0.1))
graphs = []
for i in range(2, 3):
for j in range(8, 9):
for k in range(100):
graphs.append(caveman_special(i, j, p_edge=0.5))
args.max_prev_node = 20
elif args.graph_type.startswith('community'):
num_communities = int(args.graph_type[-1])
print('Creating dataset with ', num_communities, ' communities')
c_sizes = np.random.choice([12, 13, 14, 15, 16, 17], num_communities)
#c_sizes = [15] * num_communities
for k in range(3000):
graphs.append(n_community(c_sizes, p_inter=0.01))
args.max_prev_node = 80
elif args.graph_type=='grid':
graphs = []
for i in range(10,20):
for j in range(10,20):
graphs.append(nx.grid_2d_graph(i,j))
args.max_prev_node = 40
elif args.graph_type=='grid_small':
graphs = []
for i in range(2,5):
for j in range(2,6):
graphs.append(nx.grid_2d_graph(i,j))
args.max_prev_node = 15
elif args.graph_type=='barabasi':
graphs = []
for i in range(100,200):
for j in range(4,5):
for k in range(5):
graphs.append(nx.barabasi_albert_graph(i,j))
args.max_prev_node = 130
elif args.graph_type=='barabasi_small':
graphs = []
for i in range(4,21):
for j in range(3,4):
for k in range(10):
graphs.append(nx.barabasi_albert_graph(i,j))
args.max_prev_node = 20
elif args.graph_type=='grid_big':
graphs = []
for i in range(36, 46):
for j in range(36, 46):
graphs.append(nx.grid_2d_graph(i, j))
args.max_prev_node = 90
elif 'barabasi_noise' in args.graph_type:
graphs = []
for i in range(100,101):
for j in range(4,5):
for k in range(500):
graphs.append(nx.barabasi_albert_graph(i,j))
graphs = perturb_new(graphs,p=args.noise/10.0)
args.max_prev_node = 99
# real graphs
elif args.graph_type == 'enzymes':
graphs= Graph_load_batch(min_num_nodes=10, name='ENZYMES')
args.max_prev_node = 25
elif args.graph_type == 'enzymes_small':
graphs_raw = Graph_load_batch(min_num_nodes=10, name='ENZYMES')
graphs = []
for G in graphs_raw:
if G.number_of_nodes()<=20:
graphs.append(G)
args.max_prev_node = 15
elif args.graph_type == 'protein':
graphs = Graph_load_batch(min_num_nodes=20, name='PROTEINS_full')
args.max_prev_node = 80
elif args.graph_type == 'DD':
graphs = Graph_load_batch(min_num_nodes=100, max_num_nodes=500, name='DD',node_attributes=False,graph_labels=True)
args.max_prev_node = 230
elif args.graph_type == 'citeseer':
_, _, G = Graph_load(dataset='citeseer')
G = max(nx.connected_component_subgraphs(G), key=len)
G = nx.convert_node_labels_to_integers(G)
graphs = []
for i in range(G.number_of_nodes()):
G_ego = nx.ego_graph(G, i, radius=3)
if G_ego.number_of_nodes() >= 50 and (G_ego.number_of_nodes() <= 400):
graphs.append(G_ego)
args.max_prev_node = 250
elif args.graph_type == 'citeseer_small':
_, _, G = Graph_load(dataset='citeseer')
G = max(nx.connected_component_subgraphs(G), key=len)
G = nx.convert_node_labels_to_integers(G)
graphs = []
for i in range(G.number_of_nodes()):
G_ego = nx.ego_graph(G, i, radius=1)
if (G_ego.number_of_nodes() >= 4) and (G_ego.number_of_nodes() <= 20):
graphs.append(G_ego)
shuffle(graphs)
graphs = graphs[0:200]
args.max_prev_node = 15
elif args.graph_type == 'AST' or args.graph_type == '200Graphs':
graphs = ast_graph_load_batch(min_num_nodes=10, name=args.graph_type)
# update edge_feature_output_dim
if not args.max_node_feature_num:
# print(type(graphs[1].nodes._nodes), graphs[1].nodes._nodes.keys())
args.max_node_feature_num = len(list(graphs[1].nodes._nodes._atlas[1].keys())) # now equals to 28
args.max_prev_node = 120
# TODO: args.max_edge_feature_num update
if not args.edge_feature_output_dim:
args.edge_feature_output_dim = args.max_edge_feature_num + 2 #int(args.max_prev_node * args.max_edge_feature_num)
# 2 indicates two directions of edges
if not args.node_feature_input_dim:
args.node_feature_input_dim = args.max_node_feature_num + args.max_prev_node + args.edge_feature_output_dim
# args.node_feature_input_dim = args.max_prev_node
return graphs
|
[
"bwh.buaa@gmail.com"
] |
bwh.buaa@gmail.com
|
8f296f11eca9ea2bac005150d59334d961aca574
|
2e423a357321278e08fb009812f5fd2f794f66e5
|
/users/migrations/0001_initial.py
|
44c4896eefc330924229af13f6fc98432e429d7d
|
[] |
no_license
|
rafnixg/platzigram
|
2a0f5d4c5994be31401408fd3176f57beb1da98e
|
e51a9d92c14c23d0de24cdda78ce9683955c43e3
|
refs/heads/master
| 2022-04-27T12:04:21.549548
| 2020-01-17T21:26:18
| 2020-01-17T21:26:18
| 206,177,266
| 0
| 0
| null | 2022-04-22T22:25:27
| 2019-09-03T21:41:51
|
Python
|
UTF-8
|
Python
| false
| false
| 1,116
|
py
|
# Generated by Django 2.2.5 on 2019-09-04 17:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('website', models.URLField(blank=True)),
('biography', models.TextField(blank=True)),
('phone_number', models.CharField(blank=True, max_length=20)),
('picture', models.ImageField(blank=True, null=True, upload_to='users/pictures')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"rafnixg@gmail.com"
] |
rafnixg@gmail.com
|
c7293eeb3014b78e8398f1c4818bd0ae1e72f0a8
|
7ddae3bb754cadccae7bb9a9e11986c5d3bc06b2
|
/sample/getweather.py
|
d99bd9b98f4d400b82dc3ae5c90f9567613ae096
|
[
"MIT"
] |
permissive
|
joembis/weather_texter
|
b9fa1449ad9097a292362556c26e4d2700713ed2
|
046fdf6811847072662b52e3873fae8bd45a8e85
|
refs/heads/main
| 2023-03-04T09:16:44.055293
| 2021-02-15T10:42:17
| 2021-02-15T10:42:17
| 339,039,341
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,469
|
py
|
import requests
from datetime import datetime as dt
from datetime import timedelta
def request_weather(url):
"""request the weather from openweathermap.org API. Returns a dict of the json file"""
response = requests.get(url)
response_dict = response.json()
return response_dict
def parse_weather(weather_data_raw):
"""parse the useful weather data from dict of the openweathermap.org json data.
returns another, slimmed down dict with the chosen elements"""
parsed_weather = {}
parsed_weather['sunrise'] = dt.fromtimestamp(weather_data_raw.get("city").get("sunrise")).time()
parsed_weather['sunset'] = dt.fromtimestamp(weather_data_raw.get("city").get("sunset")).time()
for period in weather_data_raw['list']:
# limiting the parsed weather data to weather for the next day
if dt.fromtimestamp(period.get("dt")).date() == dt.today().date() + timedelta(days=1):
time_period = dt.fromtimestamp(period.get("dt"))
# the dict key for each period is a 2-dight 24-hour time, e.g 15 for 3.00pm
parsed_weather[str(time_period.time())[:2]] = [
str(time_period.time())[:2],
round(period.get("main").get("temp")),
period.get("weather")[0].get("main").center(15),
str(period.get("clouds").get("all")).zfill(3),
str(round(period.get("wind").get("speed"))).zfill(3)
]
return parsed_weather
|
[
"76848685+jobororo@users.noreply.github.com"
] |
76848685+jobororo@users.noreply.github.com
|
7587bb7173fe75660b164c360fe327e5f35df3c5
|
6e8b2c9cd80af70fc070085a07d8f34216d16ec1
|
/Task 25/main.py
|
7e8a4623cd9610f3db37fea47aca067d85871129
|
[] |
no_license
|
tregubchenko-dima/modern_tech_programming
|
65957a8da63903e17f8421c84379d3312ece3521
|
1aa824f9f5ef32772cfc61fa4d53ab1f898594fb
|
refs/heads/master
| 2023-04-09T04:54:23.166673
| 2021-04-27T18:51:28
| 2021-04-27T18:51:28
| 352,748,720
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,567
|
py
|
from random import randint
from math import sqrt
def BozoSort(values, asc=True):
if isinstance(values[0], list):
list_temp = []
for i in values:
for j in i:
list_temp.append(j)
values = list_temp
num1 = randint(0, len(values)-1)
num2 = randint(0, len(values)-1)
temp = values[num1]
values[num1] = values[num2]
values[num2] = temp
for i in range(0, len(values)):
if i >= len(values) - 1:
return values
elif asc:
if values[i] > values[i+1]:
return BozoSort(values, asc)
else:
if values[i] < values[i+1]:
return BozoSort(values, asc)
try:
n = int(input('ะะฒะตะดะธัะต ะบะพะปะธัะตััะฒะพ ัะธัะตะป n: '))
if 3 < n <= 100:
values = list(map(int, input(f'ะะฒะตะดะธัะต ัะธัะปะฐ: ').split(' ')))
if n != len(values):
print('ะะตะฒะตัะฝะพะต ะบะพะปะธัะตััะฒะพ ัะธัะตะป')
exit(1)
print(*BozoSort(values))
print(*BozoSort(values, False))
size = int(sqrt(n))
arr = []
for i in range(0, size):
arr.append(values[i*size:(i*size)+size])
print(*BozoSort(arr))
print(*BozoSort(arr, False))
print(*BozoSort(values[0:3]))
print(*BozoSort(values[0:3], False))
else:
print('ะะตะฒะตัะฝะพะต ะบะพะปะธัะตััะฒะพ ัะธัะตะป')
exit(1)
except ValueError:
print("ะั ะฒะฒะตะปะธ ะฝะตะดะพะฟัััะธะผะพะต ะทะฝะฐัะตะฝะธะต")
exit(1)
|
[
"tregubchenko2@yandex.ru"
] |
tregubchenko2@yandex.ru
|
ab881c94078041feb7fe0fefd3fb0913233feb4b
|
e715be7aef31a307d2cf09d8a4ecf46ea662826f
|
/device_simulator/src/orchestator.py
|
e88831369f171c3e6acd4859ce8da628125314b0
|
[] |
no_license
|
GabrielMartinMoran/TFI_UNTREF
|
0dcfd0d5b4d69c282ce732a21039c4a69a6530af
|
e4abc9bc93b840627a008e3af5f4d86b7cd30732
|
refs/heads/main
| 2023-06-23T11:06:35.138785
| 2021-07-14T13:21:14
| 2021-07-14T13:21:14
| 358,573,316
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,489
|
py
|
import time
from datetime import datetime
from src.models.energy_sensor import EnergySensor
from src.models.console_display import ConsoleDisplay
from src.models.measure import Measure
from src.repositories.user_repository import UserRepository
from src.repositories.device_repository import DeviceRepository
import config
class Orchestator:
def __init__(self, device, ref_voltage, ref_current, user_secret):
self.sensor = EnergySensor(ref_voltage, ref_current)
self.display = ConsoleDisplay()
self.device = device
self.user_repository = UserRepository(user_secret)
self.device_repository = DeviceRepository(user_secret)
self.user = self.user_repository.get_user_data()
self.message = ''
def loop(self):
while(True):
measure = Measure(
self.sensor.get_voltage(),
self.sensor.get_current(),
self.__get_timestamp()
)
try:
self.device_repository.add_measure(self.device.ble_id, measure)
self.message = 'Muestra enviada al servidor'
except Exception as e:
self.message = f'Error: {e}'
self.device.set_last_measure(measure)
self.display.set_ui(self.device, self.user, self.message)
self.display.draw()
time.sleep(config.TIME_BETWEEN_MEASUREMENTS)
def __get_timestamp(self):
return int(datetime.now().timestamp())
|
[
"moran.gabriel.95@gmail.com"
] |
moran.gabriel.95@gmail.com
|
5fc93f5180bbbf9d6e8482073bcb89bf2d923892
|
2c68f9156087d6d338373f9737fee1a014e4546b
|
/src/connectedk8s/azext_connectedk8s/vendored_sdks/models/authentication_details_value.py
|
982b4554803e85c978165d7b651f09cd77ff0c69
|
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
anpaz/azure-cli-extensions
|
8b0d4071c49840da9883f13cb0fd1f4515246ee0
|
847fd487fe61e83f2a4163a9393edc9555267bc2
|
refs/heads/master
| 2023-04-23T17:22:53.427404
| 2021-01-29T17:48:28
| 2021-01-29T18:01:33
| 257,394,204
| 2
| 0
|
MIT
| 2021-01-28T10:31:07
| 2020-04-20T20:19:43
|
Python
|
UTF-8
|
Python
| false
| false
| 890
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AuthenticationDetailsValue(Model):
"""Authentication token value.
:param token: Authentication token.
:type token: str
"""
_attribute_map = {
'token': {'key': 'token', 'type': 'str'},
}
def __init__(self, **kwargs):
super(AuthenticationDetailsValue, self).__init__(**kwargs)
self.token = kwargs.get('token', None)
|
[
"noreply@github.com"
] |
anpaz.noreply@github.com
|
2b152bec1bfa703b5df15c67f5fc0e3aa9ab815e
|
432a58b3bad9eb008ea332c06f22700172c660ac
|
/admin/client.py
|
19d4b3ccd622ebea00d2084316019e164af6a53a
|
[
"Apache-2.0"
] |
permissive
|
achanda/flocker
|
7b5c5264b52489e9da774ff011699c6b62a4bddd
|
ac822c3d6687ea63cad2aea81334a86100bfda0e
|
refs/heads/master
| 2020-12-11T01:40:20.625304
| 2015-08-01T05:47:10
| 2015-08-01T05:47:10
| 37,820,406
| 0
| 0
| null | 2015-06-21T18:24:07
| 2015-06-21T18:24:06
| null |
UTF-8
|
Python
| false
| false
| 9,859
|
py
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Run the acceptance tests.
"""
import sys
import yaml
from zope.interface import Interface, implementer
from characteristic import attributes
from eliot import add_destination
from twisted.internet.error import ProcessTerminated
from twisted.python.usage import Options, UsageError
from twisted.python.filepath import FilePath
from twisted.internet.defer import inlineCallbacks, returnValue
from flocker.common.version import make_rpm_version
from flocker.provision import PackageSource, CLOUD_PROVIDERS
import flocker
from flocker.provision._ssh import (
run_remotely)
from flocker.provision._install import (
task_client_installation_test,
install_cli,
)
from effect.twisted import perform
from flocker.provision._ssh._conch import make_dispatcher
from .runner import run
def remove_known_host(reactor, hostname):
"""
Remove all keys belonging to hostname from a known_hosts file.
:param reactor: Reactor to use.
:param bytes hostname: Remove all keys belonging to this hostname from
known_hosts.
"""
return run(reactor, ['ssh-keygen', '-R', hostname])
def run_client_tests(reactor, node):
"""
Run the client acceptance tests.
:param INode node: The node to run client acceptance tests against.
:return int: The exit-code of trial.
"""
def check_result(f):
f.trap(ProcessTerminated)
if f.value.exitCode is not None:
return f.value.exitCode
else:
return f
return perform(make_dispatcher(reactor), run_remotely(
username=node.get_default_username(),
address=node.address,
commands=task_client_installation_test()
)).addCallbacks(
callback=lambda _: 0,
errback=check_result,
)
class INodeRunner(Interface):
"""
Interface for starting and stopping nodes for acceptance testing.
"""
def start_nodes(reactor):
"""
Start nodes for running acceptance tests.
:param reactor: Reactor to use.
:return Deferred: Deferred which fires with a list of nodes to run
tests against.
"""
def stop_nodes(reactor):
"""
Stop the nodes started by `start_nodes`.
:param reactor: Reactor to use.
:return Deferred: Deferred which fires when the nodes have been
stopped.
"""
RUNNER_ATTRIBUTES = [
'distribution', 'top_level', 'config', 'package_source'
]
@implementer(INodeRunner)
@attributes(RUNNER_ATTRIBUTES + [
'provisioner',
], apply_immutable=True)
class LibcloudRunner(object):
"""
Start and stop cloud nodes for acceptance testing.
:ivar LibcloudProvioner provisioner: The provisioner to use to create the
nodes.
:ivar VolumeBackend volume_backend: The volume backend the nodes are
configured with.
"""
def __init__(self):
self.nodes = []
self.metadata = self.config.get('metadata', {})
try:
creator = self.metadata['creator']
except KeyError:
raise UsageError("Must specify creator metadata.")
if not creator.isalnum():
raise UsageError(
"Creator must be alphanumeric. Found {!r}".format(creator)
)
self.creator = creator
@inlineCallbacks
def start_nodes(self, reactor, node_count):
"""
Start cloud nodes for client tests.
:return list: List of addresses of nodes to connect to, for client
tests.
"""
metadata = {
'purpose': 'client-testing',
'distribution': self.distribution,
}
metadata.update(self.metadata)
for index in range(node_count):
name = "client-test-%s-%d" % (self.creator, index)
try:
print "Creating node %d: %s" % (index, name)
node = self.provisioner.create_node(
name=name,
distribution=self.distribution,
metadata=metadata,
)
except:
print "Error creating node %d: %s" % (index, name)
print "It may have leaked into the cloud."
raise
yield remove_known_host(reactor, node.address)
self.nodes.append(node)
del node
returnValue(self.nodes)
def stop_nodes(self, reactor):
"""
Deprovision the nodes provisioned by ``start_nodes``.
"""
for node in self.nodes:
try:
print "Destroying %s" % (node.name,)
node.destroy()
except Exception as e:
print "Failed to destroy %s: %s" % (node.name, e)
DISTRIBUTIONS = ('centos-7', 'ubuntu-14.04', 'ubuntu-15.04')
PROVIDERS = tuple(sorted(CLOUD_PROVIDERS.keys()))
class RunOptions(Options):
description = "Run the client tests."
optParameters = [
['distribution', None, None,
'The target distribution. '
'One of {}.'.format(', '.join(DISTRIBUTIONS))],
['provider', None, 'rackspace',
'The target provider to test against. '
'One of {}.'.format(', '.join(PROVIDERS))],
['config-file', None, None,
'Configuration for providers.'],
['branch', None, None, 'Branch to grab packages from'],
['flocker-version', None, flocker.__version__,
'Version of flocker to install'],
['flocker-version', None, flocker.__version__,
'Version of flocker to install'],
['build-server', None, 'http://build.clusterhq.com/',
'Base URL of build server for package downloads'],
]
optFlags = [
["keep", "k", "Keep VMs around, if the tests fail."],
]
synopsis = ('Usage: run-client-tests --distribution <distribution> '
'[--provider <provider>]')
def __init__(self, top_level):
"""
:param FilePath top_level: The top-level of the flocker repository.
"""
Options.__init__(self)
self.top_level = top_level
def postOptions(self):
if self['distribution'] is None:
raise UsageError("Distribution required.")
if self['config-file'] is not None:
config_file = FilePath(self['config-file'])
self['config'] = yaml.safe_load(config_file.getContent())
else:
self['config'] = {}
if self['flocker-version']:
rpm_version = make_rpm_version(self['flocker-version'])
os_version = "%s-%s" % (rpm_version.version, rpm_version.release)
if os_version.endswith('.dirty'):
os_version = os_version[:-len('.dirty')]
else:
os_version = None
package_source = PackageSource(
version=self['flocker-version'],
os_version=os_version,
branch=self['branch'],
build_server=self['build-server'],
)
if self['distribution'] not in DISTRIBUTIONS:
raise UsageError(
"Distribution %r not supported. Available distributions: %s"
% (self['distribution'], ', '.join(DISTRIBUTIONS)))
if self['provider'] not in PROVIDERS:
raise UsageError(
"Provider %r not supported. Available providers: %s"
% (self['provider'], ', '.join(PROVIDERS)))
if self['provider'] in CLOUD_PROVIDERS:
# Configuration must include credentials etc for cloud providers.
try:
provider_config = self['config'][self['provider']]
except KeyError:
raise UsageError(
"Configuration file must include a "
"{!r} config stanza.".format(self['provider'])
)
provisioner = CLOUD_PROVIDERS[self['provider']](**provider_config)
self.runner = LibcloudRunner(
config=self['config'],
top_level=self.top_level,
distribution=self['distribution'],
package_source=package_source,
provisioner=provisioner,
)
from .acceptance import eliot_output
@inlineCallbacks
def main(reactor, args, base_path, top_level):
"""
:param reactor: Reactor to use.
:param list args: The arguments passed to the script.
:param FilePath base_path: The executable being run.
:param FilePath top_level: The top-level of the flocker repository.
"""
options = RunOptions(top_level=top_level)
add_destination(eliot_output)
try:
options.parseOptions(args)
except UsageError as e:
sys.stderr.write("%s: %s\n" % (base_path.basename(), e))
raise SystemExit(1)
runner = options.runner
from flocker.common.script import eliot_logging_service
log_file = open("%s.log" % base_path.basename(), "a")
log_writer = eliot_logging_service(
log_file=log_file,
reactor=reactor,
capture_stdout=False)
log_writer.startService()
reactor.addSystemEventTrigger(
'before', 'shutdown', log_writer.stopService)
try:
nodes = yield runner.start_nodes(reactor, node_count=1)
yield perform(
make_dispatcher(reactor),
install_cli(runner.package_source, nodes[0]))
result = yield run_client_tests(reactor=reactor, node=nodes[0])
except:
result = 1
raise
finally:
# Unless the tests failed, and the user asked to keep the nodes, we
# delete them.
if not (result != 0 and options['keep']):
runner.stop_nodes(reactor)
elif options['keep']:
print "--keep specified, not destroying nodes."
raise SystemExit(result)
|
[
"jon.giddy@clusterhq.com"
] |
jon.giddy@clusterhq.com
|
b5f8da9a720fc87c6d511ba662b7005be382c8eb
|
da93b0746d5b12899c17db53839cd3055d6e8267
|
/bin/dcn2.py
|
111d73937abee3c2b859718d0ddab6fd1c745ffd
|
[
"Apache-2.0"
] |
permissive
|
mattmoehr/rtdl
|
01228261739dcc3cf3fb0e47f3a9d987c8e322eb
|
44cdf56fd958bc60609ae595911d272afa998d67
|
refs/heads/main
| 2023-08-08T10:05:01.482980
| 2021-09-02T14:33:13
| 2021-09-02T14:33:13
| 408,602,108
| 0
| 0
|
Apache-2.0
| 2021-09-20T21:14:04
| 2021-09-20T21:14:03
| null |
UTF-8
|
Python
| false
| false
| 8,536
|
py
|
# %%
import math
import typing as ty
from pathlib import Path
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import zero
import lib
# %%
class CrossLayer(nn.Module):
def __init__(self, d, dropout):
super().__init__()
self.linear = nn.Linear(d, d)
self.dropout = nn.Dropout(dropout)
def forward(self, x0, x):
return self.dropout(x0 * self.linear(x)) + x
class DCNv2(nn.Module):
def __init__(
self,
*,
d_in: int,
d: int,
n_hidden_layers: int,
n_cross_layers: int,
hidden_dropout: float,
cross_dropout: float,
d_out: int,
stacked: bool,
categories: ty.Optional[ty.List[int]],
d_embedding: int,
) -> None:
super().__init__()
if categories is not None:
d_in += len(categories) * d_embedding
category_offsets = torch.tensor([0] + categories[:-1]).cumsum(0)
self.register_buffer('category_offsets', category_offsets)
self.category_embeddings = nn.Embedding(sum(categories), d_embedding)
nn.init.kaiming_uniform_(self.category_embeddings.weight, a=math.sqrt(5))
print(f'{self.category_embeddings.weight.shape=}')
self.first_linear = nn.Linear(d_in, d)
self.last_linear = nn.Linear(d if stacked else 2 * d, d_out)
deep_layers = sum(
[
[nn.Linear(d, d), nn.ReLU(True), nn.Dropout(hidden_dropout)]
for _ in range(n_hidden_layers)
],
[],
)
cross_layers = [CrossLayer(d, cross_dropout) for _ in range(n_cross_layers)]
self.deep_layers = nn.Sequential(*deep_layers)
self.cross_layers = nn.ModuleList(cross_layers)
self.stacked = stacked
def forward(self, x_num, x_cat):
if x_cat is not None:
x_cat = self.category_embeddings(x_cat + self.category_offsets[None])
x = torch.cat([x_num, x_cat.view(x_cat.size(0), -1)], dim=-1)
else:
x = x_num
x = self.first_linear(x)
x_cross = x
for cross_layer in self.cross_layers:
x_cross = cross_layer(x, x_cross)
if self.stacked:
return self.last_linear(self.deep_layers(x_cross)).squeeze(1)
else:
return self.last_linear(
torch.cat([x_cross, self.deep_layers(x)], dim=1)
).squeeze(1)
# %%
args, output = lib.load_config()
# %%
zero.set_randomness(args['seed'])
dataset_dir = lib.get_path(args['data']['path'])
stats: ty.Dict[str, ty.Any] = {
'dataset': dataset_dir.name,
'algorithm': Path(__file__).stem,
**lib.load_json(output / 'stats.json'),
}
timer = zero.Timer()
timer.run()
D = lib.Dataset.from_dir(dataset_dir)
X = D.build_X(
normalization=args['data'].get('normalization'),
num_nan_policy='mean',
cat_nan_policy='new',
cat_policy=args['data'].get('cat_policy', 'counter'),
cat_min_frequency=args['data'].get('cat_min_frequency', 0.0),
seed=args['seed'],
)
if not isinstance(X, tuple):
X = (X, None)
zero.set_randomness(args['seed'])
Y, y_info = D.build_y(args['data'].get('y_policy'))
lib.dump_pickle(y_info, output / 'y_info.pickle')
X = tuple(None if x is None else lib.to_tensors(x) for x in X)
Y = lib.to_tensors(Y)
device = lib.get_device()
if device.type != 'cpu':
X = tuple(None if x is None else {k: v.to(device) for k, v in x.items()} for x in X)
Y_device = {k: v.to(device) for k, v in Y.items()}
else:
Y_device = Y
X_num, X_cat = X
if not D.is_multiclass:
Y_device = {k: v.float() for k, v in Y_device.items()}
train_size = len(X_num[lib.TRAIN])
batch_size = args['training']['batch_size']
epoch_size = stats['epoch_size'] = math.ceil(train_size / batch_size)
loss_fn = (
F.binary_cross_entropy_with_logits
if D.is_binclass
else F.cross_entropy
if D.is_multiclass
else F.mse_loss
)
args['model'].setdefault('d_embedding', None)
model = DCNv2(
d_in=X_num[lib.TRAIN].shape[1],
d_out=D.info['n_classes'] if D.is_multiclass else 1,
categories=lib.get_categories(X_cat),
**args['model'],
).to(device)
stats['n_parameters'] = lib.get_n_parameters(model)
optimizer = lib.make_optimizer(
args['training']['optimizer'],
model.parameters(),
args['training']['lr'],
args['training']['weight_decay'],
)
stream = zero.Stream(lib.IndexLoader(train_size, batch_size, True, device))
progress = zero.ProgressTracker(args['training']['patience'])
training_log = {lib.TRAIN: [], lib.VAL: [], lib.TEST: []}
timer = zero.Timer()
checkpoint_path = output / 'checkpoint.pt'
def print_epoch_info():
print(f'\n>>> Epoch {stream.epoch} | {lib.format_seconds(timer())} | {output}')
print(
' | '.join(
f'{k} = {v}'
for k, v in {
'lr': lib.get_lr(optimizer),
'batch_size': batch_size,
'epoch_size': stats['epoch_size'],
'n_parameters': stats['n_parameters'],
}.items()
)
)
@torch.no_grad()
def evaluate(parts):
model.eval()
metrics = {}
predictions = {}
for part in parts:
predictions[part] = (
torch.cat(
[
model(X_num[part][idx], None if X_cat is None else X_cat[part][idx])
for idx in lib.IndexLoader(
len(X_num[part]),
args['training']['eval_batch_size'],
False,
device,
)
]
)
.cpu()
.numpy()
)
try:
metrics[part] = lib.calculate_metrics(
D.info['task_type'],
Y[part].numpy(), # type: ignore[code]
predictions[part], # type: ignore[code]
'logits',
y_info,
)
except ValueError as err:
assert (
'Target scores need to be probabilities for multiclass roc_auc'
in str(err)
)
metrics[part] = {'score': -999999999.0}
for part, part_metrics in metrics.items():
print(f'[{part:<5}]', lib.make_summary(part_metrics))
return metrics, predictions
def save_checkpoint(final):
torch.save(
{
'model': model.state_dict(),
'optimizer': optimizer.state_dict(),
'stream': stream.state_dict(),
'random_state': zero.get_random_state(),
**{
x: globals()[x]
for x in [
'progress',
'stats',
'timer',
'training_log',
]
},
},
checkpoint_path,
)
lib.dump_stats(stats, output, final)
lib.backup_output(output)
# %%
timer.run()
for epoch in stream.epochs(args['training']['n_epochs']):
print_epoch_info()
model.train()
epoch_losses = []
for batch_idx in epoch:
optimizer.zero_grad()
loss = loss_fn(
model(
X_num[lib.TRAIN][batch_idx],
None if X_cat is None else X_cat[lib.TRAIN][batch_idx],
),
Y_device[lib.TRAIN][batch_idx],
)
if loss.isnan():
print('Loss is nan!')
break
loss.backward()
optimizer.step()
epoch_losses.append(loss.detach())
if loss.isnan():
break
epoch_losses = torch.stack(epoch_losses).tolist()
training_log[lib.TRAIN].extend(epoch_losses)
print(f'[{lib.TRAIN}] loss = {round(sum(epoch_losses) / len(epoch_losses), 3)}')
metrics, predictions = evaluate(lib.PARTS)
for k, v in metrics.items():
training_log[k].append(v)
progress.update(metrics[lib.VAL]['score'])
if progress.success:
print('New best epoch!')
stats['best_epoch'] = stream.epoch
stats['metrics'] = metrics
save_checkpoint(False)
for k, v in predictions.items():
np.save(output / f'p_{k}.npy', v)
elif progress.fail:
break
# %%
print('\nRunning the final evaluation...')
model.load_state_dict(torch.load(checkpoint_path)['model'])
stats['metrics'], predictions = evaluate(lib.PARTS)
for k, v in predictions.items():
np.save(output / f'p_{k}.npy', v)
stats['time'] = lib.format_seconds(timer())
save_checkpoint(True)
print('Done!')
|
[
"strausmg@gmail.com"
] |
strausmg@gmail.com
|
b1f2496ebe49df46a46292c6d998c4fbb52a383b
|
a10b0c634e83a652b02db4e6a24ba7d0429dfa05
|
/main.py
|
1caa556c793abd75cfe2dcbcde0ba8e6167be17a
|
[] |
no_license
|
Diptasri/Intro-to-Ai-ML-5600
|
9f822327d5825e0e9c4cf7b446b61eaa9ee39a55
|
05efaa4f973f8c55bb12cb0b65407c3f9ce9bb50
|
refs/heads/main
| 2023-07-27T03:47:36.049328
| 2021-09-12T03:33:41
| 2021-09-12T03:33:41
| 399,465,959
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,388
|
py
|
import matplotlib.pyplot as plt
# inputs
a = float (input ("Enter a value: "))
b = float (input ("Enter b value: "))
ax = a + b
ay = a - b
bx = a - b
by = a + b
# Internal division section formula
Px = ((a * a) + (b * b)) / (a + b)
Py = ((a * a) + (2 * a * b) - (b * b)) / (a + b)
# External division section formula
Qx = ((a * a) - (2 * a * b) - (b * b)) / (a - b)
Qy = ((a * a) + (b * b)) / (a - b)
plt.figure(1)
plt.scatter([ax, bx, Px], [ay, by, Py], color= 'r')
plt.text(ax, ay + 0.5, '({},{})'.format(ax, ay))
plt.text(bx, by + 0.5, '({},{})'.format(bx, by))
plt.text(Px, Py + 0.5, '({},{})'.format(Px, Py))
plt.plot([ax, bx, Px], [ay, by, Py])
plt.title("Internal Division Section")
plt.xlabel("X- Axis")
plt.ylabel("Y- Axis")
plt.grid(True)
plt.show()
plt.figure(2)
plt.scatter([ax, bx, Qx], [ay, by, Qy], color= 'r')
plt.text(ax, ay + 0.5, '({},{})'.format(ax, ay))
plt.text(bx, by + 0.5, '({},{})'.format(bx, by))
plt.text(Qx, Qy + 0.5, '({},{})'.format(Qx, Qy))
plt.plot([ax, bx, Qx], [ay, by, Qy])
plt.title("External Division Section")
plt.xlabel("X- Axis")
plt.ylabel("Y- Axis")
plt.grid(True)
plt.show()
# Final coordinates
internal_coordinates = (Px, Py)
external_coordinates = (Qx, Qy)
# Printing outputs
print ("(Px, Py) = " + str (internal_coordinates))
print ("(Qx, Qy) = " + str (external_coordinates))
|
[
"noreply@github.com"
] |
Diptasri.noreply@github.com
|
1ee56e00fc1f6518207dde8d7e2c4ad70939ccb7
|
62b90959763f40954a7c6270bfb0529b536b2888
|
/user/forms.py
|
e3f2e1e1a1c2c677d176cbff33084fa0620bcb3a
|
[
"MIT"
] |
permissive
|
thiagosouzalink/blogphoto_Django
|
68698c4fc684f0ba1d9dde795a07f72df32ead38
|
7d09f44b196897c4d31fff2eff8d2a164e44db27
|
refs/heads/master
| 2023-02-20T20:32:00.527084
| 2021-01-25T15:16:07
| 2021-01-25T15:16:07
| 332,782,817
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,226
|
py
|
from django import forms
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from .models import CustomUser
class UserForm(forms.ModelForm):
""" Formulรกrio para cadastrar usuรกrio"""
username = forms.CharField(
label='Usuรกrio',
error_messages= {
'invalid': 'Nome de usuรกrio invรกlido, informe apenas letras, nรบmeros ou @, ., +, -, _',
'max_length': 'Vocรช excedeu o limite de caracteres.',
'unique': 'Nome de usuรกrio jรก existe.'
},
help_text= "Requeridos 150 caracteres ou menos. Letras, dรญgitos e @ /. / + / - / _ apenas",
widget=forms.TextInput(attrs={'placeholder':'Username'})
)
email = forms.EmailField(
label='E-mail',
error_messages={'invalid': 'E-mail invรกlido.'},
help_text='user@dominio.com',
widget=forms.TextInput(attrs={'placeholder':'E-mail'})
)
first_name = forms.CharField(
label='Nome',
error_messages={'max_length': 'Nome nรฃo pode ter mais de 30 caracteres'},
widget=forms.TextInput(attrs={'placeholder':'Nome'})
)
last_name = forms.CharField(
label='Sobrenome',
error_messages={'max_length': 'Sobrenome nรฃo pode ter mais de 150 caracteres'},
widget=forms.TextInput(attrs={'placeholder':'Sobrenome'})
)
telefone = forms.CharField(
label='Telefone',
help_text='(xx) xxxxx-xxxx',
widget=forms.TextInput(attrs={'placeholder':'Telefone...'})
)
password = forms.CharField(
label='Senha',
help_text="Digite uma senha segura",
widget=forms.PasswordInput(attrs={'placeholder':'Senha'})
)
password2 = forms.CharField(
label='Confirmar senha',
widget=forms.PasswordInput(attrs={'placeholder':'Repetir senha'})
)
class Meta:
model = CustomUser
fields = (
'username',
'email',
'first_name',
'last_name',
'telefone'
)
def clean_password2(self):
passwords = self.cleaned_data
if passwords['password2'] != passwords['password']:
raise forms.ValidationError("Senhas diferentes")
return passwords['password2']
def save(self, commit=True):
user = CustomUser.objects.create_user(
username=self.cleaned_data['username'],
email=self.cleaned_data['email'],
password=self.cleaned_data['password'],
first_name=self.cleaned_data['first_name'],
last_name=self.cleaned_data['last_name'],
telefone=self.cleaned_data['telefone']
)
return user
class UserProfileForm(forms.ModelForm):
""" Formulรกrio para atualizar dados do usuรกrio"""
facebook = forms.CharField(widget=forms.TextInput(attrs={'placeholder': 'https://www.facebook.com/seu_username'}), required=False)
instagram = forms.CharField(widget=forms.TextInput(attrs={'placeholder': 'https://www.instagram.com/seu_username'}), required=False)
twitter = forms.CharField(widget=forms.TextInput(attrs={'placeholder': 'https://www.twitter.com/seu_username'}), required=False)
class Meta:
model = CustomUser
fields = fields = (
'username',
'email',
'first_name',
'last_name',
'telefone',
'facebook',
'instagram',
'twitter',
'bio'
)
class CustomUserCreateForm(UserCreationForm):
""" Formulรกrio para criar usuรกrio no painel administrativo"""
class Meta:
model = CustomUser
fields = ('first_name', 'last_name', 'telefone')
labels = {'username': 'Username/E-mail'}
def save(self, commit=True):
user = super().save(commit=False)
user.set_password(self.cleaned_data["password1"])
user.username = self.cleaned_data["username"]
if commit:
user.save()
return user
class CustomUserChangeForm(UserChangeForm):
""" Atualizar usuรกrio no painel administrativo"""
class Meta:
model = CustomUser
fields = ('email', 'first_name', 'last_name', 'telefone')
|
[
"thiagolsmail@gmail.com"
] |
thiagolsmail@gmail.com
|
21d962029b74b4eafe0c5b512082596bdf3800f2
|
95e7cf518b8d71270a7de6e7c7254861010f5035
|
/garage/tf/algos/batch_polopt.py
|
4245380d53581a9a7d6e72637049760557283eaf
|
[
"MIT"
] |
permissive
|
reslthrowaway/garage
|
aaeadf7e918d80d467b2fcce61c50e8404480f83
|
e921119434d205b6f644f139f6075516fb9ece74
|
refs/heads/master
| 2020-03-28T08:32:58.835060
| 2018-09-08T21:55:41
| 2018-09-08T21:55:41
| 147,972,769
| 2
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,354
|
py
|
import time
import tensorflow as tf
from garage.algos import RLAlgorithm
import garage.misc.logger as logger
from garage.tf.plotter import Plotter
from garage.tf.samplers import BatchSampler
from garage.tf.samplers import VectorizedSampler
class BatchPolopt(RLAlgorithm):
"""
Base class for batch sampling-based policy optimization methods.
This includes various policy gradient methods like vpg, npg, ppo, trpo,
etc.
"""
def __init__(self,
env,
policy,
baseline,
scope=None,
n_itr=500,
start_itr=0,
batch_size=5000,
max_path_length=500,
discount=0.99,
gae_lambda=1,
plot=False,
pause_for_plot=False,
center_adv=True,
positive_adv=False,
store_paths=False,
whole_paths=True,
fixed_horizon=False,
sampler_cls=None,
sampler_args=None,
force_batch_sampler=False,
**kwargs):
"""
:param env: Environment
:param policy: Policy
:type policy: Policy
:param baseline: Baseline
:param scope: Scope for identifying the algorithm. Must be specified if
running multiple algorithms
simultaneously, each using different environments and policies
:param n_itr: Number of iterations.
:param start_itr: Starting iteration.
:param batch_size: Number of samples per iteration.
:param max_path_length: Maximum length of a single rollout.
:param discount: Discount.
:param gae_lambda: Lambda used for generalized advantage estimation.
:param plot: Plot evaluation run after each iteration.
:param pause_for_plot: Whether to pause before contiuing when plotting.
:param center_adv: Whether to rescale the advantages so that they have
mean 0 and standard deviation 1.
:param positive_adv: Whether to shift the advantages so that they are
always positive. When used in conjunction with center_adv the
advantages will be standardized before shifting.
:param store_paths: Whether to save all paths data to the snapshot.
:return:
"""
self.env = env
self.policy = policy
self.baseline = baseline
self.scope = scope
self.n_itr = n_itr
self.start_itr = start_itr
self.batch_size = batch_size
self.max_path_length = max_path_length
self.discount = discount
self.gae_lambda = gae_lambda
self.plot = plot
self.pause_for_plot = pause_for_plot
self.center_adv = center_adv
self.positive_adv = positive_adv
self.store_paths = store_paths
self.whole_paths = whole_paths
self.fixed_horizon = fixed_horizon
if sampler_cls is None:
if self.policy.vectorized and not force_batch_sampler:
sampler_cls = VectorizedSampler
else:
sampler_cls = BatchSampler
if sampler_args is None:
sampler_args = dict()
self.sampler = sampler_cls(self, **sampler_args)
self.init_opt()
def start_worker(self, sess):
self.sampler.start_worker()
if self.plot:
self.plotter = Plotter(self.env, self.policy, sess)
self.plotter.start()
def shutdown_worker(self):
self.sampler.shutdown_worker()
if self.plot:
self.plotter.shutdown()
def obtain_samples(self, itr):
return self.sampler.obtain_samples(itr)
def process_samples(self, itr, paths):
return self.sampler.process_samples(itr, paths)
def train(self, sess=None):
created_session = True if (sess is None) else False
if sess is None:
sess = tf.Session()
sess.__enter__()
sess.run(tf.global_variables_initializer())
self.start_worker(sess)
start_time = time.time()
last_average_return = None
for itr in range(self.start_itr, self.n_itr):
itr_start_time = time.time()
with logger.prefix('itr #%d | ' % itr):
logger.log("Obtaining samples...")
paths = self.obtain_samples(itr)
logger.log("Processing samples...")
samples_data = self.process_samples(itr, paths)
last_average_return = samples_data["average_return"]
logger.log("Logging diagnostics...")
self.log_diagnostics(paths)
logger.log("Optimizing policy...")
self.optimize_policy(itr, samples_data)
logger.log("Saving snapshot...")
params = self.get_itr_snapshot(itr, samples_data)
if self.store_paths:
params["paths"] = samples_data["paths"]
logger.save_itr_params(itr, params)
logger.log("Saved")
logger.record_tabular('Time', time.time() - start_time)
logger.record_tabular('ItrTime', time.time() - itr_start_time)
logger.dump_tabular(with_prefix=False)
if self.plot:
self.plotter.update_plot(self.policy, self.max_path_length)
if self.pause_for_plot:
input("Plotting evaluation run: Press Enter to "
"continue...")
self.shutdown_worker()
if created_session:
sess.close()
return last_average_return
def log_diagnostics(self, paths):
self.policy.log_diagnostics(paths)
self.baseline.log_diagnostics(paths)
def init_opt(self):
"""
Initialize the optimization procedure. If using tensorflow, this may
include declaring all the variables and compiling functions
"""
raise NotImplementedError
def get_itr_snapshot(self, itr, samples_data):
"""
Returns all the data that should be saved in the snapshot for this
iteration.
"""
raise NotImplementedError
def optimize_policy(self, itr, samples_data):
raise NotImplementedError
|
[
"reslthrowaway@yandex.com"
] |
reslthrowaway@yandex.com
|
8602e4c68c80a37d2046a827fca331f3acf97906
|
c1cad053e9fbfe536722c13338fff63e471ff252
|
/Next.tech/Analyzing-Text-Data/solution/chunking.py
|
58c9c3ededb97de3f1ce991ac04f48ebe62345aa
|
[] |
no_license
|
adataholic/Datascience
|
9bedfda47c3b84e72e9833b293ce2f602ca2ec9f
|
766a34c480eec61afdd0f485a4e77428cf3eba95
|
refs/heads/master
| 2020-12-27T01:33:55.458858
| 2020-01-06T23:48:28
| 2020-01-06T23:48:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 803
|
py
|
import nltk
nltk.download('brown')
import numpy as np
from nltk.corpus import brown
# Split a text into chunks
def splitter(data, num_words):
words = data.split(' ')
output = []
cur_count = 0
cur_words = []
for word in words:
cur_words.append(word)
cur_count += 1
if cur_count == num_words:
output.append(' '.join(cur_words))
cur_words = []
cur_count = 0
output.append(' '.join(cur_words) )
return output
if __name__=='__main__':
# Read the data from the Brown corpus
data = ' '.join(brown.words()[:10000])
# Number of words in each chunk
num_words = 1700
chunks = []
counter = 0
text_chunks = splitter(data, num_words)
print "Number of text chunks =", len(text_chunks)
|
[
"akshaymm13b034@gmail.com"
] |
akshaymm13b034@gmail.com
|
7c07de17053aa19697fa3b8ec39bfe5a624ff542
|
feca84ca57eee5045a1235006e472b4766ca3b06
|
/flaskapi/graphapp01.py
|
083c083606a6b6a7422cad37d931d98e190b6d4e
|
[] |
no_license
|
Chasbott182/mycode
|
1b89fa5d1d94a4ff39c8ce98b046189a2d9df10f
|
5afcd3cc32feee16e0fecc85a6165aaa01f60774
|
refs/heads/main
| 2023-07-14T17:06:34.618830
| 2021-08-13T20:46:59
| 2021-08-13T20:46:59
| 392,015,327
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,815
|
py
|
#!/usr/bin/python3
import numpy as np # number operations
import yaml # pyyaml for yaml
import re # regex
import paramiko # ssh into servers
from flask import Flask, render_template
import matplotlib.pyplot as plt
def sshlogin(ip, un, passw):
sshsession = paramiko.SSHClient()
sshsession.set_missing_host_key_policy(paramiko.AutoAddPolicy())
sshsession.connect(hostname=ip, username=un, password=passw)
ssh_stdin, ssh_stdout, ssh_stderr = sshsession.exec_command("cat /proc/uptime")
sshresult = ssh_stdout.read().decode('utf-8').split()[0]
with open("sshresult", "w") as myfile:
myfile.write(sshresult)
days = (int(float(sshresult)) / 86400) # convert uptime in sec to days
sshsession.close()
print(days)
return days
app = Flask(__name__)
@app.route("/graphin")
def graphin():
with open("C:\mycode\sshpass.yml") as sshpass: # creds for our servers
creds = yaml.load(sshpass)
svruptime = []
xtick = []
for cred in creds:
xtick.append(cred['ip'])
resp = sshlogin(cred['ip'], cred['un'], cred['passw'])
svruptime.append(resp)
xtick = tuple(xtick) # create a tuple
svruptime = tuple(svruptime)
# graphin
N = 2 # total number of bars
ind = np.arange(N) # the x locations for the groups
width = 0.35 # the width of the bars: can also be len(x) sequence
p1 = plt.bar(ind, svruptime, width)
plt.ylabel('Uptime in Days')
plt.title('Uptime of Servers in Days')
plt.xticks(ind, xtick)
plt.yticks(np.arange(0, 20, 1)) # prob want to turn this into a log scale
plt.savefig('static/status.png') # might want to save this with timestamp for history purposes
return render_template("graph.html")
if __name__ == "__main__":
app.run(host="0.0.0.0", port=3000)
|
[
"chasbott182@gmail.com"
] |
chasbott182@gmail.com
|
3ffd2a81defe2dd17e7c4f0ee0d2e75c6f233e90
|
b481964a107b7a1afd5997e2736235ffb2c17138
|
/hud.py
|
86a51accc3edf2f32b32261581f9c30e58e9030f
|
[
"MIT"
] |
permissive
|
marax27/pyNoid
|
06503b576e97e839bcfa8d132bf8855769db7777
|
c988db7ef6750352ecb6e3749f73302da4b71488
|
refs/heads/master
| 2021-09-04T11:19:22.625982
| 2018-01-18T07:03:43
| 2018-01-18T07:08:23
| 111,729,907
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,505
|
py
|
#!/usr/bin/python3
from constants import Constants
from vec2 import vec2
import sdl2.sdlttf
import sdl2.ext
import sdl2
import misc
class UIElement:
def __init__(self):
self.position = vec2(0,0)
self.size = vec2(0,0)
def centerHorizontally(self):
self.position.x = (Constants.WINDOW_SIZE.x - self.size[0])//2
def centerVertically(self):
self.position.y = (Constants.WINDOW_SIZE.y - self.size[1])//2
def center(self):
self.centerHorizontally()
self.centerVertically()
class Text(UIElement):
"""Single piece of text."""
font_manager = sdl2.ext.FontManager('resources/vga_437.ttf', size=24)
def __init__(self, text, renderer, size=None, color=None):
self.position = vec2(0,0)
self.texture = None
self.load(text, renderer, size, color)
def render(self, renderer, pos=None):
"""Render the text, using the renderer."""
r = sdl2.SDL_Rect(self.position[0] if not pos else pos[0],
self.position[1] if not pos else pos[1],
self.size[0],
self.size[1])
sdl2.SDL_RenderCopy(renderer.renderer, self.texture, None, r)
def load(self, text, renderer, size=None, color=None):
"""Update a Text object."""
if self.texture:
sdl2.SDL_DestroyTexture(self.texture) #That does the trick.
surf = Text.font_manager.render(text, size=size, color=color)
sprite = sdl2.ext.SoftwareSprite(surf, True)
self.size = (surf.w, surf.h)
self.texture = sdl2.SDL_CreateTextureFromSurface(renderer.renderer, sprite.surface)
class Button(UIElement):
"""Button class."""
IDLE, HOVER, PRESSED = 0x1001, 0x1002, 0x1003
@staticmethod
def buildClickableText(message, renderer, idle_color, pressed_color, hover_color, size, pos=None):
"""Generates a text label that will change color according to whether it's pressed or not."""
return Button(
Text(message, renderer, size, idle_color),
Text(message, renderer, size, pressed_color),
Text(message, renderer, size, hover_color),
pos
)
def __init__(self, idle_state, pressed_state, hover_state=None, pos=None):
self.states = {}
self.states[self.IDLE] = idle_state
self.states[self.HOVER] = hover_state if hover_state is not None else idle_state
self.states[self.PRESSED] = pressed_state
self.state = self.IDLE
self.position = pos if pos else vec2(0, 0)
# Size restriction.
if self.states[self.IDLE].size != self.states[self.HOVER].size or self.states[self.IDLE].size != self.states[self.PRESSED].size:
raise ValueError()
self.size = self.states[self.IDLE].size
def render(self, renderer):
self.states[self.state].render(renderer, self.position)
def handleEvent(self, event):
"""Handle mouse events."""
mx, my = misc.getMousePos()
if ((self.position[0] <= mx < self.position[0] + self.size[0]) and
(self.position[1] <= my < self.position[1] + self.size[1])):
if event.type == sdl2.SDL_MOUSEBUTTONDOWN:
self.state = self.PRESSED
else:
self.state = self.HOVER
else:
self.state = self.IDLE
def isPressed(self):
return self.state == self.PRESSED
class VerticalContainer:
def __init__(self, elements=[], y_pos=0):
self.elem = elements
self.y_pos = y_pos
self.adjust()
def adjust(self):
if len(self.elem):
self.elem[0].position.y = self.y_pos
for i in range(1, len(self.elem)):
self.elem[i].position.y = self.elem[i-1].position.y+self.elem[i-1].size[1]
for i in self.elem:
i.centerHorizontally()
def render(self, renderer):
for i in self.elem:
i.render(renderer)
|
[
"kacpert314@gmail.com"
] |
kacpert314@gmail.com
|
6919b3a14f65b0c108aa619c12b7e531c0c04e51
|
6328387281d1b2b0bec13d51916d916fea1e7351
|
/myvenv/bin/easy_install-3.7
|
185061d76e1848a152ec8b720265f7b58b47dc6b
|
[] |
no_license
|
MedApplive/my-first-blog
|
81e40789efe28cf99c7a342f7b8c48e879e70b4c
|
0268eb6a64aa2733552b285accceba8cb82cd64d
|
refs/heads/master
| 2020-05-01T11:58:28.305631
| 2020-01-20T17:04:57
| 2020-01-20T17:04:57
| 177,456,209
| 0
| 0
| null | 2019-09-30T14:45:40
| 2019-03-24T18:57:54
|
Python
|
UTF-8
|
Python
| false
| false
| 272
|
7
|
#!/home/maurice/Documents/djangogirls/myvenv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"msourir@gmail.com"
] |
msourir@gmail.com
|
339b6652e6902305375b21f8ec23ad0e0f230c76
|
6ddd0cfdbaca412ee2b3a7a01e7fcaad63550ac2
|
/Python GUI/GUI window with various label,buttons,colors,font.py
|
b0274813b6a312766b548cf1260d2688c7d120c0
|
[] |
no_license
|
pranshu798/Python-programs
|
b302be9c9fd3aaf66824857bdb8bb2d9b8a9b5de
|
fb6e712594c72d8ea0be1026e6fb26c7fdd639ba
|
refs/heads/master
| 2022-12-08T15:52:21.625098
| 2020-09-14T11:57:26
| 2020-09-14T11:57:26
| 281,318,764
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 721
|
py
|
#Python program to demontrate complete details of GUI window
from tkinter import *
gui = Tk()
def hello():
c = a.get()
guil3 = Label(text=c, fg='red', bg='yellow', font=10).pack()
def delete():
guil4 = Label(text='Delete', fg='red', bg='yellow', font=10).pack()
a = StringVar()
gui.title("Aliyas Shaik Button Program")
gui.geometry("500x500+100+100")
guil1 = Label(text='Label One', fg='red', bg='yellow', font=10).pack()
button1 = Button(text='Enter', fg='red', bg='yellow', command=hello, font=10).pack()
button2 = Button(text='Delete', fg='red', bg='yellow', command=delete, font=10).pack()
# Place method places label @ specified place within the window
text = Entry(textvariable=a).pack()
gui.mainloop()
|
[
"pranshuverma798@gmail.com"
] |
pranshuverma798@gmail.com
|
8402be75cce1ddbd62ff54e6ca1af746d547ba7e
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p04044/s043863854.py
|
77675a21e84977960d992326c6742602cc68d034
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 172
|
py
|
N, L = map(int,input().split())
word = []
count = 0
while N > count:
S = input()
word.append(S)
count += 1
word = sorted(word)
ans = ''.join(word)
print(ans)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
a799262d26ff055133010df02e61e893ddf72751
|
97ebc5e257d68eb9f46551d1897f3901e4faca41
|
/poketypes/basic.py
|
3037cd29cb0fec3108555337432f07f2ec491987
|
[
"BSD-2-Clause"
] |
permissive
|
GrenderG/PokeDuino
|
16f6ab982c447d15ff5ed9ec55a9fd35551d3fcd
|
5b2d7f8fa64edb52d68b647e723f513405b391d4
|
refs/heads/master
| 2021-06-01T02:33:46.419676
| 2016-06-29T09:57:47
| 2016-06-29T09:57:47
| 285,349,854
| 1
| 0
|
NOASSERTION
| 2020-08-05T16:44:29
| 2020-08-05T16:44:28
| null |
UTF-8
|
Python
| false
| false
| 1,941
|
py
|
import ctypes
from . import encoding
class PokeMetaStructure(type(ctypes.BigEndianStructure)):
def __new__(metacls, name, bases, dct):
cls = super().__new__(metacls, name, bases, dct)
for member, adapter_type in cls._adapters_:
cls.buildProperty(member, adapter_type)
return cls
def buildProperty(cls, member, adapter_type):
def get(self):
return adapter_type(getattr(self, member))
def set(self, value):
if isinstance(value, adapter_type):
setattr(self, member, value.value)
else:
setattr(self, member, value)
if member.startswith("_"):
property_name = member[1:]
else:
property_name = member + "_adapter"
setattr(cls, property_name,
property(fget=get, fset=set, doc="%s adapter to member %s" % (adapter_type.__name__, member)))
class PokeStructure(ctypes.BigEndianStructure, metaclass=PokeMetaStructure):
_pack_ = 1
_adapters_ = []
@classmethod
def fromBytes(cls, data):
return cls.from_buffer_copy(data)
def bytes(self):
return ctypes.string_at(ctypes.byref(self), ctypes.sizeof(self))
def bytecount(self):
return ctypes.sizeof(self)
def Pokearray(length):
# okay, I learned.
# It's not possible to use a custom base class
# in a ctypes.Structure field. Forget about it
@classmethod
def fromBytes(cls, data):
return cls(*data)
def asBytes(self):
return bytes(iter(self))
t = ctypes.c_uint8 * length
t.fromBytes = fromBytes
t.bytes = asBytes
return t
def Pokestring(length):
@classmethod
def fromString(cls, data, fillUp=False):
encoded = encoding.encode(data) + encoding.ENDCHAR
if fillUp:
encoded = encoded.ljust(length, encoding.ENDCHAR)
return cls(*encoded[:length])
def toString(self):
encoded = self.bytes().partition(encoding.ENDCHAR)[0]
return encoding.decode(encoded)
t = Pokearray(length)
t.fromString = fromString
t.toString = toString
return t
def PaddingBytes(length):
return ("padding", Pokearray(length))
|
[
"sven.koehler@student.hpi.uni-potsdam.de"
] |
sven.koehler@student.hpi.uni-potsdam.de
|
e18bf50cd5e5c0cee6e3670840430470da5195de
|
f1e4a8ab1ce478b3a95e8e8a74faa16409ac86e2
|
/lambdas/InitializeDBCustomLambda/index.py
|
7d26789efb9fa031b09f6bbee5909f5d6e904771
|
[] |
no_license
|
Ryanjlowe/pinpoint-engagement-scoring
|
6b761fd7ada905db251d42ec1d4b47c0e27ad810
|
a75f29f687c799ed6e0125c3c0044cc39c9dd2d5
|
refs/heads/master
| 2020-09-30T05:02:39.204421
| 2019-12-13T17:41:44
| 2019-12-13T17:41:44
| 227,209,468
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,868
|
py
|
import json
import boto3
import logging
import os
from botocore.vendored import requests
dynamodb = boto3.resource('dynamodb')
PINPOINT_PROJECT_ID = os.environ.get('PINPOINT_PROJECT_ID')
scoring_definition_table = dynamodb.Table(os.environ.get('SCORING_DEFINITION_TABLE'))
def lambda_handler(event, context):
global log_level
log_level = str(os.environ.get('LOG_LEVEL')).upper()
if log_level not in [
'DEBUG', 'INFO',
'WARNING', 'ERROR',
'CRITICAL'
]:
log_level = 'ERROR'
logging.getLogger().setLevel(log_level)
logging.info(event)
try:
populate_score_definition('_email.click', 50)
populate_score_definition('_email.open', 10)
populate_score_definition('_email.delivered', 2)
populate_score_definition('_email.hardbounce', -1000)
populate_score_definition('_email.complaint', -1000)
populate_score_definition('_email.unsubscribe', -500)
populate_score_definition('_SMS.SUCCESS', 2)
populate_score_definition('_SMS.OPTOUT', -500)
populate_score_definition('_campaign.send', 2)
populate_score_definition('_campaign.opened_notification', 50)
populate_score_definition('_campaign.received_foreground', 2)
populate_score_definition('_campaign.received_background', 2)
populate_score_definition('_session.start', 2)
populate_score_definition('_userauth.sign_up', 50)
populate_score_definition('_monetization.purchase', 100)
except Exception as e:
logging.error('Received Error while populating default values: %s', e)
send(event, context, 'FAILED', {})
else:
send(event, context, 'SUCCESS', {})
def populate_score_definition(event_type, score_offset):
try:
scoring_definition_table.put_item(
Item={
'EventType': event_type,
'PinpointProjectId': PINPOINT_PROJECT_ID,
'ScoreOffset':score_offset
}
)
except Exception as e:
logging.error('Received Error while populate_score_definition: %s', e)
raise e
######
# Following taken from: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html#w2ab1c20c25c14b9c15
######
# Copyright 2016 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
# This file is licensed to you under the AWS Customer Agreement (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at http://aws.amazon.com/agreement/ .
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied.
# See the License for the specific language governing permissions and limitations under the License.
def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False):
responseUrl = event['ResponseURL']
print(responseUrl)
responseBody = {}
responseBody['Status'] = responseStatus
responseBody['Reason'] = 'See the details in CloudWatch Log Stream: ' + context.log_stream_name
responseBody['PhysicalResourceId'] = physicalResourceId or context.log_stream_name
responseBody['StackId'] = event['StackId']
responseBody['RequestId'] = event['RequestId']
responseBody['LogicalResourceId'] = event['LogicalResourceId']
responseBody['NoEcho'] = noEcho
responseBody['Data'] = responseData
json_responseBody = json.dumps(responseBody)
print("Response body:\n" + json_responseBody)
headers = {
'content-type' : '',
'content-length' : str(len(json_responseBody))
}
try:
response = requests.put(responseUrl,
data=json_responseBody,
headers=headers)
print("Status code: " + response.reason)
except Exception as e:
print("send(..) failed executing requests.put(..): " + str(e))
|
[
"rjlowe@amazon.com"
] |
rjlowe@amazon.com
|
e9dfc09020ce8682f64b107a1c8ad0d41d060345
|
b1b520c9bae5b241405dbc1a6b25ef928877c317
|
/defangIpAddress.py
|
7bed80a95e1468a02ca4701cc4900f4fa9552b8a
|
[] |
no_license
|
TBobcat/Leetcode
|
0ee5d06be3f7f10077961a8e3aedcd28fe4cb85a
|
3b6e3d38ac2d5158329a9444ad7e7e7ee9d07176
|
refs/heads/main
| 2023-06-04T10:40:00.275728
| 2021-06-16T23:05:35
| 2021-06-16T23:05:35
| 375,470,361
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 660
|
py
|
def defangIPaddr(address):
"""
:type address: str
:rtype: str
"""
## use list(str) and str.join(list_str)
## join can also be used like this
## 'abc'.join(address1.split('.')) -> '1abc1abc1abc1'
## str.replace() would also be useful
res = ""
list_address = list(address)
for i,char in enumerate(list_address):
if char == ".":
list_address[i] = "[.]"
res = res.join(list_address)
return res
if __name__ == '__main__':
address1 = "1.1.1.1"
address2 = "255.100.50.0"
print(defangIPaddr(address1))
print(defangIPaddr(address2))
else:
pass
|
[
"xiangyuantech@gmail.com"
] |
xiangyuantech@gmail.com
|
37dafa17ed9dc319a258358248dd28b2bbf33390
|
ee8c4c954b7c1711899b6d2527bdb12b5c79c9be
|
/assessment2/amazon/run/core/controllers/old.py
|
b834b8f7e61106d74a5d8d14bccffde5155b3848
|
[] |
no_license
|
sqlconsult/byte
|
02ac9899aebea4475614969b594bfe2992ffe29a
|
548f6cb5038e927b54adca29caf02c981fdcecfc
|
refs/heads/master
| 2021-01-25T14:45:42.120220
| 2018-08-11T23:45:31
| 2018-08-11T23:45:31
| 117,135,069
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 360
|
py
|
#!/usr/bin/env python3
from flask import Blueprint, Flask, render_template, request, url_for
controller = Blueprint('old', __name__, url_prefix='/old')
# @controller.route('/<string:title>', methods=['GET'])
# def lookup(title):
# if title == 'Republic': # TODO 2
# return render_template('republic.html') # TODO 2
# else:
# pass
|
[
"sqlconsult@hotmail.com"
] |
sqlconsult@hotmail.com
|
4106443cb73c63c91456d1feb0e571e206168629
|
0ad1a9530f0765b07d568095cb1534babc4432f5
|
/utils/db.py
|
0391f7d7acc6be817a2340825b6016c54fba89ab
|
[
"MIT"
] |
permissive
|
bryceweiner/Infiniti
|
18acc471b5882bd676e61b840bff793c22d6c272
|
5ea829dfa26c0948970329651d3cacff7788d116
|
refs/heads/master
| 2022-12-15T06:10:10.981048
| 2018-09-25T20:03:02
| 2018-09-25T20:03:02
| 147,550,307
| 1
| 2
|
MIT
| 2022-12-07T23:52:23
| 2018-09-05T16:47:43
|
Python
|
UTF-8
|
Python
| false
| false
| 2,781
|
py
|
import rocksdb,time,ast
from infiniti.params import *
MAX_RETRY_CREATE_DB = 100
def writebatch():
return rocksdb.WriteBatch()
def open_db(filename, logger=None, read_only=False):
db_default_path = (filename, "wallet_test")[filename == ""]
db_path = db_default_path
retry_count = 0
db = None
save_err=None
while db is None and retry_count < MAX_RETRY_CREATE_DB:
opts = rocksdb.Options()
opts.create_if_missing = True
opts.max_open_files = -1
opts.write_buffer_size = 67108864
opts.max_write_buffer_number = 3
opts.target_file_size_base = 67108864
opts.table_factory = rocksdb.BlockBasedTableFactory(
filter_policy=rocksdb.BloomFilterPolicy(10),
block_cache=rocksdb.LRUCache(2 * (1024 ** 3)),
block_cache_compressed=rocksdb.LRUCache(500 * (1024 ** 2)))
try:
db = rocksdb.DB(db_path, opts, read_only)
except Exception as err:
save_err=err
time.sleep(.1)
retry_count += 1
if retry_count == MAX_RETRY_CREATE_DB:
raise save_err
return db
def uuid_exists(object_db,uuid):
try:
_db = open_db(join_path(DATA_PATH,object_db))
it = _db.iteritems()
it.seek(uuid)
items = dict(itertools.takewhile(lambda item: item[0].startswith(uuid), it))
return len(items) == 0
except Exception as err:
raise err
def get_infiniti_object(object_db,uuid):
"""
All Inifiniti objects have a unique UUID, so just dump the object
"""
try:
_db = open_db(join_path(DATA_PATH,object_db))
it = _db.iteritems()
it.seek(uuid)
result = {}
for key,value in dict(itertools.takewhile(lambda item: item[0].startswith(uuid), it)):
_uuid,_field = key.split('.')
_value = value
result.update = { _field : _value }
return result
except Exception as err:
raise err
def put_infiniti_object(object_db,obj):
try:
_db = open_db(join_path(DATA_PATH,object_db))
wb = writebatch()
for attr in dir(obj):
if attr.startswith('_') and not attr.startswith('__'):
wb.put("{0}.{1}".format(obj.uuid,attr),getattr(obj,attr))
db.write(wb)
return True
except Exception as err:
raise err
def utxo_by_address(address,network,block_height):
db = open_db(join_path(join_path(DATA_PATH,network),'utxo'))
it = db.iteritems()
it.seek_to_first()
total = 0
utxo = []
for k,v in it:
addr,amt = v.split('|')
height,tx_hash = k.split('.')
if address == addr:
utxo.append({
'amount':Decimal(amt),
'confirmations':int(int(block_height)-int(height)),
'tx_hash':tx_hash
})
if len(utxo) > 0:
return utxo
else:
return None
def balance_by_address(address,network):
db = open_db(join_path(join_path(DATA_PATH,network),'utxo'))
it = db.iteritems()
it.seek_to_first()
total = 0
for k,v in it:
addr,amt = v.split('|')
if address == addr:
total += Decimal(amt)
return total
|
[
"bryce@altsystem.io"
] |
bryce@altsystem.io
|
7436c86cf9c2ad83dacb6291b7f64493ca90f126
|
56e17984c230be6ee841264d65d7ba6ce949ceb1
|
/DateAndTime/Calendar_Module.py
|
a2a0fd44eca638970bd921c1fc7b96f6358a26ef
|
[] |
no_license
|
Shamsullo/HackerRank_Python-
|
203b4277d7eb1de2236ff983415344a1409277d7
|
1f231f04cdd1cb0cd4e6a65318113ed15f1e47bc
|
refs/heads/master
| 2021-05-15T05:45:58.885667
| 2018-01-11T18:36:28
| 2018-01-11T18:36:28
| 115,595,144
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 262
|
py
|
import calendar, datetime
input_date = input()
month,day,year = map(int,input_date.split())
day_of_the_week = datetime.date(year,month,day)
print calendar.day_name[day_of_the_week.weekday()].upper()
#the code isn't giving an expected result, i wonder why?
|
[
"noreply@github.com"
] |
Shamsullo.noreply@github.com
|
cdbf1c4051359a9d4660b03166c1b23f3c05428f
|
bb097a986d37c5b6d49bf6d1f9df17960e32e36e
|
/final_project/poi_id.py
|
9f2d37fc735529d2c4baaf05ebe14afbc6c019da
|
[] |
no_license
|
noslav/enron_analysis
|
84e32fa2060c35d18e921cdb6647fd04019e7f57
|
9c98dfe8dcf71728cf8a8d75ab4b0c8468bd2d1e
|
refs/heads/master
| 2021-03-22T04:42:56.034087
| 2017-12-07T15:15:55
| 2017-12-07T15:15:55
| 96,874,843
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,872
|
py
|
#!/usr/bin/python
import sys
import pickle
sys.path.append("../tools/")
from feature_format import featureFormat, targetFeatureSplit
from tester import dump_classifier_and_data
import pandas
import numpy as np
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2, f_classif, mutual_info_classif
from sklearn.feature_selection import RFE
from sklearn.linear_model import LogisticRegression
from sklearn.decomposition import PCA
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.model_selection import StratifiedShuffleSplit
import matplotlib.pyplot as plt
from sklearn.svm import SVC
from sklearn.model_selection import StratifiedKFold
from sklearn.feature_selection import RFECV
import matplotlib.pyplot
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_score
from sklearn.model_selection import cross_val_score
#from sklearn.datasets import make_classification
#=================================================================================#
### Task 1: Select what features you'll use.
### features_list is a list of strings, each of which is a feature name.
### The first feature must be "poi".
### for feature selection we run the selectKbest on the data set.
enron_data = pickle.load(open("../final_project/final_project_dataset.pkl", "r"))
#names of the columns in the 'enron_pickle_features.csv' data set.
names = ["salary" , "to_messages" , "deferral_payments" , "total_payments" ,
"exercised_stock_options", "bonus" , "restricted_stock" , "shared_receipt_with_poi" ,
"restricted_stock_deferred", "total_stock_value", "expenses" , "loan_advances" ,
"from_messages" , "other" , "from_this_person_to_poi",
"director_fees" , "deferred_income" , "long_term_incentive",
"from_poi_to_this_person"]
#getting the data and then reading them into dataframes for manipulation and feature
#selection
def makeDataFramesFeature():
enron_dataframe = pandas.read_csv('enron_pickle_features.csv')
enron_poi = pandas.read_csv('enron_poi.csv')
del enron_dataframe['Unnamed: 0']
del enron_poi['Unnamed: 0']
array_data = enron_dataframe.values
X = array_data[:,0:19]
array_poi = enron_poi.values.ravel()
Y = array_poi
return X, Y
#X, Y = makeDataFramesFeature()
def featureSelectKbest(X,Y): #Cannot be used since we have negative values
test = SelectKBest(score_func= chi2, k=4)
fit = test.fit(X, Y)
# summarize scores
np.set_printoptions(precision=3)
print(fit.scores_)
features = fit.transform(X)
# summarize selected features
print(features[0:6,:])
def featureRFE(X,Y): #recursive feature elimination using a random forest cl
from sklearn.ensemble import RandomForestClassifier
model = RandomForestClassifier(n_estimators=200)
rfe = RFE(model, 4)
fit = rfe.fit(X, Y)
print("Num Features: %d") % fit.n_features_
print("Selected Features: %s") % fit.support_
print("Feature Ranking: %s") % fit.ranking_
#print names
i = 0
namesdict = {}
for values in names:
namesdict[values] = fit.support_[i]
i +=1
print namesdict
#==============================================================================
# featureRFEout = { 'salary': False, 'to_messages': False, \
# 'deferral_payments': False, 'total_payments': False, 'loan_advances': False, \
# 'bonus': True, 'restricted_stock_deferred': False, 'total_stock_value': True, \
# 'shared_receipt_with_poi': False, 'long_term_incentive': False, \
# 'exercised_stock_options': False, 'from_messages': False, 'other': True, \
# 'from_poi_to_this_person': False, 'from_this_person_to_poi': False, \
# 'deferred_income': False, 'expenses': True, 'restricted_stock': False, \
# 'director_fees': False }
#==============================================================================
#Recursive feature elimination using random forests suggest I should use
#Bonus, total_stock_value, expenses and other (we can ignore this for now)
def featureRFECross(X,Y): #recursive feature elimination super nice methoed to automatically
#select good feature using a cross validation for classification
svc = SVC(kernel="rbf") #taking too much time.
#model = LogisticRegression()
# The "accuracy" scoring is proportional to the number of correct
# classifications
rfecv = RFECV(estimator=svc, step=1, cv=StratifiedKFold(2),
scoring='accuracy')
rfecv.fit(X, Y)
print("Optimal number of features : %d" % rfecv.n_features_)
# Plot number of features VS. cross-validation scores
plt.figure()
plt.xlabel("Number of features selected")
plt.ylabel("Cross validation score (nb of correct classifications)")
plt.plot(range(1, len(rfecv.grid_scores_) + 1), rfecv.grid_scores_)
plt.show()
i = 0
namesdict = {}
for values in names:
namesdict[values] = rfecv.support_[i]
i +=1
print namesdict
def featurePCA(X,Y): #also tried this but I could not figure out how to name the features
# for feature extraction feature extraction
pca = PCA(n_components=4)
fit = pca.fit(X)
# summarize components
print("Explained Variance: %s") % fit.explained_variance_ratio_
print(fit.components_)
def featureETC(X,Y): #extra method learnt to select features
# feature extraction using bagged trees
namesdict = {}
model = ExtraTreesClassifier()
model.fit(X, Y)
#print(model.feature_importances_)
#print names
i = 0
for values in names:
namesdict[values] = model.feature_importances_[i]
i +=1
print namesdict
#==============================================================================
# featureETCoutput = {'salary': 0.041632257710185139, 'to_messages': 0.039859107742246484,\
# 'deferral_payments': 0.024074150367185947, \
# 'total_payments': 0.064555518959854619, \
# 'loan_advances': 0.0093116015112734048, \
# 'bonus': 0.073052292449863596, \
# 'restricted_stock_deferred': 0.0042245370370370353,\
# 'total_stock_value': 0.09369028839725424, \
# 'shared_receipt_with_poi': 0.071123487226284629, \
# 'long_term_incentive': 0.04046480320342271, \
# 'exercised_stock_options': 0.1129150917901071, \
# 'from_messages': 0.024095798427395669, \
# 'other': 0.054538316103403574, \
# 'from_poi_to_this_person': 0.036628178381556319, \
# 'from_this_person_to_poi': 0.052257500165677206, \
# 'deferred_income': 0.10772890822303453, \
# 'expenses': 0.10845624961908869,\
# 'restricted_stock': 0.038109225603169372, \
# 'director_fees': 0.0032826870819597218}
#==============================================================================
#the results from feature RFE selection recommended using , exercised stock value, total_stock_value, expenses
#===================================================================================#
features_list = ['poi','salary', 'total_stock_value', 'expenses', 'bonus']
# You will need to use more features + your feature
### Load the dictionary containing the dataset
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
#===================================================================================#
##plotting for outlier removal
### Task 2: Remove outliers #done in .rmd file
data1 = featureFormat(data_dict, features_list)
def plotSalaryStock(data):
for point in data :
salary = point[1]
total_stock_value = point[2]
matplotlib.pyplot.scatter(salary, total_stock_value)
matplotlib.pyplot.xlabel("salary")
matplotlib.pyplot.ylabel("total_stock_value")
matplotlib.pyplot.show()
def plotSalaryExpenses(data):
for point in data :
salary = point[1]
expenses = point[3]
matplotlib.pyplot.scatter(salary, expenses)
matplotlib.pyplot.xlabel("salary")
matplotlib.pyplot.ylabel("expenses")
matplotlib.pyplot.show()
def plotStockExpenses(data):
for point in data :
total_stock_value = point[2]
expenses = point[3]
matplotlib.pyplot.scatter(total_stock_value, expenses)
matplotlib.pyplot.xlabel("total_stock_value")
matplotlib.pyplot.ylabel("expenses")
matplotlib.pyplot.show()
def plotSalaryBonus(data):
for point in data :
salary = point[1]
bonus = point[4]
matplotlib.pyplot.scatter(salary, bonus)
matplotlib.pyplot.xlabel("salary")
matplotlib.pyplot.ylabel("bonus")
matplotlib.pyplot.show()
plotSalaryStock(data1)
plotSalaryExpenses(data1)
plotStockExpenses(data1)
plotSalaryBonus(data1)
#outliers were noticed in data in salary, bonus and total stock value
#===================================================================================#
#outlier removal functions
def salaryOut():
outlierlist = []
for keys in data_dict :
if type(data_dict[keys]["salary"]) == int or keys == "TRAVEL AGENCY IN THE PARK" or keys == "LOCKHART EUGENE E":
if data_dict[keys]["salary"] > 2.5e7:
data_dict[keys]["salary"] = "NaN"
outlierlist.append(keys)
elif data_dict[keys]["salary"] < 0:
data_dict[keys]["salary"] = "NaN"
outlierlist.append(keys)
else:
pass
else:
pass
print "Salary outliers removed :", outlierlist, "\n"
def totalStockValueOut():
outlierlist = []
for keys in data_dict :
if type(data_dict[keys]["total_stock_value"]) == int or keys == "TRAVEL AGENCY IN THE PARK" or keys == "LOCKHART EUGENE E" :
if data_dict[keys]["total_stock_value"] > 4.0e8:
data_dict[keys]["total_stock_value"] = "NaN"
outlierlist.append(keys)
elif data_dict[keys]["total_stock_value"] < 0:
data_dict[keys]["total_stock_value"] = "NaN"
outlierlist.append(keys)
else:
pass
else:
pass
print "Total stock value outliers removed :",outlierlist, "\n"
def bonusOut():
outlierlist = []
for keys in data_dict :
if type(data_dict[keys]["bonus"]) == int or keys == "TRAVEL AGENCY IN THE PARK" or keys == "LOCKHART EUGENE E":
if data_dict[keys]["bonus"] > 0.8e8:
data_dict[keys]["bonus"] = "NaN"
outlierlist.append(keys)
elif data_dict[keys]["bonus"]< 0:
data_dict[keys]["bonus"] = "NaN"
outlierlist.append(keys)
else:
pass
else:
pass
print "Bonus outliers removed :" , outlierlist, "\n"
def expenseOut():
outlierlist = []
for keys in data_dict :
if type(data_dict[keys]["expenses"]) == int or keys == "TRAVEL AGENCY IN THE PARK" or keys == "LOCKHART EUGENE E":
if data_dict[keys]["expenses"] > 5.0e6:
data_dict[keys]["expenses"] = "NaN"
outlierlist.append(keys)
elif data_dict[keys]["expenses"]< 0:
data_dict[keys]["expenses"] = "NaN"
outlierlist.append(keys)
else:
pass
else:
pass
print "Expenses outliers removed ", outlierlist, "\n"
#removing salary outliers
#removing total_stock_value outliers
#removing bonus outliers
#removing expense outliers
salaryOut()
totalStockValueOut()
bonusOut()
expenseOut()
#===================================================================================#
#replotting removed outliers
print "Take a look at outlier removed graphs :"
#reassinging data to new mat for looking at plots from new modified dictionaires
data2 = featureFormat(data_dict, features_list)
#plotting to see the effect of having removed outliers
plotSalaryStock(data2)
plotSalaryExpenses(data2)
plotStockExpenses(data2)
plotSalaryBonus(data2)
### Task 3: Create new feature(s)
#===================================================================================#
#making the new feature
#Total number of "to emails" to this person - shared_receipt_with_poi - sent_by_poi_to_this_person.
#The lower this number greater the relative importance since it means that most \
#of the communication to this person involved conversation with a poi or in the \
#same conversation with a poi. The number of emails from other people to this \
#person was low.
def newFeature():
for keys in data_dict :
if data_dict[keys].get("to_messages") and data_dict[keys].get("shared_receipt_with_poi") and data_dict[keys].get("from_poi_to_this_person") != 'NaN' :
to = data_dict[keys].get("to_messages")
shared = data_dict[keys].get("shared_receipt_with_poi")
from_poi = data_dict[keys].get("from_poi_to_this_person")
data_dict[keys]["relative_importance"] = to - (shared + from_poi)
else:
data_dict[keys]["relative_importance"] = "NaN"
#print "relative_importance :", data_dict[keys].get("relative_importance")
#print enron_data[keys].get("relative_importance")
newFeature()
def extractRelativeImportance(): #funtion to see the values of relative importance
i = []
j = []
for keys in data_dict:
if data_dict[keys]["relative_importance"] != 'NaN':
i.append(data_dict[keys].get("relative_importance"))
j.append(keys)
else:
pass
return i, j
### Store to my_dataset for easy export below.
#enron_dataframe.insert(
my_dataset = data_dict
#including the new features in the feature list
features_list2 = ['poi','salary', 'total_stock_value', 'expenses', 'bonus', 'relative_importance']
### Extract features and labels from dataset for local testing
data3 = featureFormat(my_dataset, features_list2, sort_keys = True)
labels, features = targetFeatureSplit(data3)
## done for finding out number of features to use.
features_list4 = ["poi", "salary" , "to_messages" , "deferral_payments" , "total_payments" ,
"exercised_stock_options", "bonus" , "restricted_stock" , "shared_receipt_with_poi" ,
"restricted_stock_deferred", "total_stock_value", "expenses" , "loan_advances" ,
"from_messages" , "other" , "from_this_person_to_poi",
"director_fees" , "deferred_income" , "long_term_incentive",
"from_poi_to_this_person"]
data4 = featureFormat(my_dataset, features_list4, sort_keys = True)
labels4, features4 = targetFeatureSplit(data4)
labels4 = np.array(labels4)
features4 = np.array(features4)
#===================================================================================#
#trying the classifiers
### Task 4: Try a varity of classifiers
### Please name your classifier clf for easy export below.
### Note that if you want to do PCA or other multi-stage operations,
### you'll need to use Pipelines. For more info:
### http://scikit-learn.org/stable/modules/pipeline.html
# Provided to give you a starting point. Try a variety of classifiers.
def classifyNB():
from sklearn.naive_bayes import GaussianNB
clf = GaussianNB()
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
acc = accuracy_score(labels_test, pred)
pres = precision_score(labels_test, pred)
print acc , "NB Accuracy" , pres, "NB precision","\n"
return clf , acc, pres
def classifyDT():
from sklearn import tree
clf = tree.DecisionTreeClassifier(criterion='gini', splitter='best', \
max_depth=None, min_samples_split=2, \
min_samples_leaf=1, min_weight_fraction_leaf=0.0,\
max_features=None, random_state=8, \
max_leaf_nodes=None, min_impurity_split=1e-07,\
class_weight=None, presort=False)
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
acc = accuracy_score(labels_test, pred)
pres = precision_score(labels_test, pred)
print acc , "DT Accuracy" , pres, "DT precision", "\n"
return clf , acc, pres
def classifySVM():
from sklearn.svm import SVC
clf = SVC(C=0.9, cache_size=200, class_weight=None, coef0=0.0,
decision_function_shape=None, degree=3, gamma='auto', kernel='poly')
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
acc = accuracy_score(labels_test, pred)
pres = precision_score(labels_test, pred)
print acc , "SVM Accuracy" , pres, "SVM precision" , "\n"
return clf , acc, pres
def classifyRF():
from sklearn.ensemble import RandomForestClassifier
clf = RandomForestClassifier(n_estimators=200)
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
acc = accuracy_score(labels_test, pred)
pres = precision_score(labels_test, pred)
print acc , "RF Accuracy" , pres, "RF precision" , "\n"
return clf , acc, pres
def classifyAB():
from sklearn.ensemble import AdaBoostClassifier
clf = AdaBoostClassifier(base_estimator=None, n_estimators=200, \
learning_rate=0.5, algorithm='SAMME', random_state=None)
clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
acc = accuracy_score(labels_test, pred)
pres = precision_score(labels_test, pred)
print acc , "AB Accuracy" , pres, "AB precision"
scores = cross_val_score(clf,features_test,pred )
print scores.mean() , "AB Cross val score" , "\n"
return clf , acc, pres
#give the number of splits on the validation step
n_splits = 5
SSS = StratifiedShuffleSplit(n_splits, test_size=0.5, random_state=0)
SSS.get_n_splits(features, labels)
print SSS ,"\n"
print "performing a stratified shuffle split for the validation process \
to ensure that an equal ratio of POIs to non-POIs are present in the training and \
test set", "\n"
accuracyNB = []
precisionNB = []
accuracyRF = []
precisionRF = []
accuracyAB = []
precisionAB = []
accuracyDT = []
precisionDT = []
for train_index, test_index in SSS.split(features, labels):
print "=================================================================="
print "TRAIN:", train_index, "\n" "TEST:", test_index, "\n"
features = np.array(features)
labels = np.array(labels)
features_train, features_test = features[train_index], features[test_index]
labels_train, labels_test = labels[train_index], labels[test_index]
clf1, acc1, prec1 = classifyNB()
clf2, acc2, prec2 = classifyRF()
clf3, acc3, prec3 = classifyAB()
clf4, acc4, prec4 = classifyDT()
accuracyNB.append(acc1)
precisionNB.append(prec1)
accuracyRF.append(acc2)
precisionRF.append(prec2)
accuracyAB.append(acc3)
precisionAB.append(prec3)
accuracyDT.append(acc4)
precisionDT.append(prec4)
print "=================================================================="
print "average accuracy NB : " , float(sum(accuracyNB))/n_splits
print "precision accuracy NB : " , float(sum(precisionNB))/n_splits
print "average accuracy RF : " , float(sum(accuracyRF))/n_splits
print "precision accuracy RF : " , float(sum(precisionRF))/n_splits
print "average accuracy AB : " , float(sum(accuracyAB))/n_splits
print "precision accuracy AB : " , float(sum(precisionAB))/n_splits
print "average accuracy DT : " , float(sum(accuracyDT))/n_splits
print "precision accuracy DT : " , float(sum(precisionDT))/n_splits
#===================================================================================#
#exporting features
my_dataset = my_dataset
features_list = features_list
clf, acc, prec = classifyDT()
#clf = classifyAB()
#clf = classifySVM()
#clf = classifyDT()
dump_classifier_and_data(clf, my_dataset, features_list)
#==============================================================================
#==============================================================================
# GaussianNB(priors=None)
# Accuracy: 0.85629 Precision: 0.49612 Recall: 0.38400 F1: 0.43292 F2: 0.40218
# Total predictions: 14000 True positives: 768 False positives: 780 False negatives: 1232 True negatives: 11220
#
#==============================================================================
#==============================================================================
|
[
"pranay.valson@gmail.com"
] |
pranay.valson@gmail.com
|
6645d5a6dce03632ee27b25baedaec0596e6733e
|
4eb32b229dffcfc19cc28189e12602e63a3799cc
|
/tests/conftest.py
|
b4628e9431f31f66807cbb01daa476159a7e75b7
|
[
"MIT"
] |
permissive
|
karlneco/kanji-test-maker
|
4fc09e31a80fdb47e176ba7f69daaa36790dc412
|
6d3c855718971cb0061b4c238ebb8329328018bf
|
refs/heads/master
| 2023-03-31T00:45:59.830161
| 2023-03-27T04:29:10
| 2023-03-27T04:29:10
| 227,032,872
| 2
| 0
|
MIT
| 2023-02-15T23:08:52
| 2019-12-10T05:00:46
|
Python
|
UTF-8
|
Python
| false
| false
| 3,456
|
py
|
import pytest
from flask_login import login_user, login_manager, LoginManager
from hktm import create_app, db
from hktm.models import User, Lesson, LessonMaterial, MaterialType
@pytest.fixture()
def app():
app = create_app('test.cfg')
return app
@pytest.fixture()
def auth_user(app):
@app.login_manager.request_loader
def load_user_from_request(request):
return User.query.first()
@pytest.fixture(scope='module')
def test_client():
app = create_app('test.cfg')
client = app.test_client()
ctx = app.app_context()
ctx.push()
yield client
ctx.pop()
@pytest.fixture()
def init_database():
db.create_all()
db.session.commit()
yield db
db.drop_all()
@pytest.fixture()
def add_data(init_database):
material_type1 = MaterialType('KJTS','Kanji Test','Instruction for Kanji Test')
material_type2 = MaterialType('TRCP','Tracing','Instruction for Tracing')
material_type3 = MaterialType('KJWR','Kanji Writing','Instruction for Kanji Writing')
material_type4 = MaterialType('KJRD','Kanji Reading','Instruction for Kanji Reading')
material_type5 = MaterialType('NWRD','New Reading','Instruction for New Reading')
material_type6 = MaterialType('KT36','Kanji Test 3-6','Instruction for Kanji Test grades 3-6')
user1 = User('user1@gmail.com','password')
user1.grades = '1'
user2 = User('user26@gmail.com','password')
user2.grades = '2,6'
user3 = User('userempty@gmail.com','password')
user3.grades = '3'
user4 = User('usernoteset@gmail.com','password')
userAdmin = User('admin@hoshuko.com','password')
userAdmin.grades = 'A123456789'
lesson11 = Lesson('Grade 1 - Lesson 1','1')
lesson12 = Lesson('Grade 1 - Lesson 2','1')
lesson21 = Lesson('Grade 2 - Lesson 1','2')
lesson22 = Lesson('Grade 2 - Lesson 2','2')
lesson61 = Lesson('Grade 6 - Lesson 1','6')
db.session.add_all([material_type1,material_type2,material_type3,material_type4,material_type5,material_type6])
db.session.add_all([user1, user2, user3, user4, userAdmin])
db.session.add_all([lesson11, lesson12, lesson21, lesson22, lesson61])
db.session.commit()
lesson_mat_11_1 = LessonMaterial('Lesson 1 Material 1','something',lesson11.id,'KJTS')
db.session.add(lesson_mat_11_1)
db.session.commit()
@pytest.fixture()
def existing_user():
user = User('testuser@gmail.com','password')
user.grades = '1'
db.session.add(user)
db.session.commit()
@pytest.fixture
def authenticated_request(test_client):
user = User('testuser@gmail.com','password')
user.grades = '1'
db.session.add(user)
db.session.commit()
# with flask_app.test_request_context():
# yield login_user(User('testuser@gmail.com','password'))
######################################## fix for Live_server fixture and windows
# @pytest.fixture(scope="session")
# def flask_port():
# ## Ask OS for a free port.
# #
# with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
# s.bind(("", 0))
# addr = s.getsockname()
# port = addr[1]
# return port
#
# @pytest.fixture(scope="session", autouse=True)
# def live_server(flask_port):
# env = os.environ.copy()
# env["FLASK_APP"] = "main.py"
# server = subprocess.Popen(['flask', 'run', '--port', str(flask_port)], env=env)
# try:
# yield server
# finally:
# server.terminate()
|
[
"karl1112@gmail.com"
] |
karl1112@gmail.com
|
16ca7bb88839c3a6b72181762a2ffaa3aac44251
|
f4062989a2b3cab117ac18a1e5408b521f69bf76
|
/users/validators.py
|
d7e65c36f869f9ac0cf31fc4c5ebfb0141d88c2e
|
[] |
no_license
|
saeedhassan/CHQ_Backend
|
4e07bd5d95251e6bcd472d320e92f092d64bf071
|
3798a2b8c4e5bfbb4f4826ee22d9574316a18a90
|
refs/heads/main
| 2023-02-26T02:36:59.478206
| 2021-01-25T17:07:53
| 2021-01-25T17:07:53
| 467,846,247
| 1
| 0
| null | 2022-03-09T08:49:02
| 2022-03-09T08:49:02
| null |
UTF-8
|
Python
| false
| false
| 1,732
|
py
|
"""
Custom model validators
"""
import re
import django
import jsonschema
from django.core.exceptions import ValidationError
from django.core.validators import BaseValidator
from django.utils.translation import gettext_lazy as _
from users.CHQ_Scoring.github_score import CHQScore
from django.conf import settings
from users.utils import get_github_username
from users import news
class JSONSchemaValidator(BaseValidator):
"""validate json schemas against templates"""
def compare(self, input, schema):
try:
jsonschema.validate(input, schema)
except jsonschema.exceptions.ValidationError:
raise django.core.exceptions.ValidationError(
'%(value)s failed JSON schema check', params={'value': input})
def validate_no_news_source(value):
"""news needs to be part of news source"""
if value not in news.NEWS_SITES:
raise ValidationError(
_('%(value)s is not an available news source'),
params={'value': value},
)
def validate_github_url(value):
"""validate github profile"""
pattern = r'github.com\/[a-zA-Z0-9]+(?:-[a-zA-Z0-9]+)*\/?$'
if re.search(pattern, value) is None:
raise ValidationError(
_('%(value)s is not a valid github profile.'),
params={'value': value},
)
def validate_github_user(value):
"""make sure github user is an actual user, consumes one api call"""
user_name = get_github_username(value)
chq_score = CHQScore(settings.GITHUB_TOKEN)
if chq_score.check_user_exists(user_name) == False:
raise ValidationError(_('github username %(value)s doesnt exist'),
params={'value': user_name},)
|
[
"x.suwaidi@gmail.com"
] |
x.suwaidi@gmail.com
|
64f1c7bd8f0f8bab932d8e95efb828f317b84145
|
50008b3b7fb7e14f793e92f5b27bf302112a3cb4
|
/recipes/Python/438806_catenateFilesFactory/recipe-438806.py
|
59a8c77281e6b148e80c4e006fc5987455451ecf
|
[
"Python-2.0",
"MIT"
] |
permissive
|
betty29/code-1
|
db56807e19ac9cfe711b41d475a322c168cfdca6
|
d097ca0ad6a6aee2180d32dce6a3322621f655fd
|
refs/heads/master
| 2023-03-14T08:15:47.492844
| 2021-02-24T15:39:59
| 2021-02-24T15:39:59
| 341,878,663
| 0
| 0
|
MIT
| 2021-02-24T15:40:00
| 2021-02-24T11:31:15
|
Python
|
UTF-8
|
Python
| false
| false
| 2,449
|
py
|
import os
def catenateFilesFactory(isTextFiles=True, isClearTgt=True, isCreateTgt=True):
"""return a catenateFiles function parameterized by the factory arguments.
isTextFiles: Catenate text files. If the last line of a non-empty file
is not terminated by an EOL, append an EOL to it.
isClearTgt If the target file already exists, clear its original
contents before appending the source files.
isCreateTgt If the target file does not already exist, and this
parameter is True, create the target file; otherwise raise
an IOError.
"""
eol = os.linesep
lenEol = len(eol)
def catenateFiles(tgtFile, *srcFiles):
isTgtAppendEol = False
if os.path.isfile(tgtFile):
if isClearTgt:
tgt = open(tgtFile, 'wb')
tgt.close()
elif isTextFiles:
tgt = open(tgtFile, 'rb')
data = tgt.read()
tgt.close()
if len(data) and (len(data) < lenEol or data[-lenEol:] != eol):
isTgtAppendEol = True
elif not isCreateTgt:
raise IOError, "catenateFiles target file '%s' not found" % (
tgtFile)
tgt = open(tgtFile, 'ab')
if isTgtAppendEol:
tgt.write(eol)
for srcFile in srcFiles:
src = open(srcFile, 'rb')
data = src.read()
src.close()
tgt.write(data)
if (isTextFiles and len(data) and
(len(data) < lenEol or data[-lenEol:] != eol)):
tgt.write(eol)
tgt.close()
return
# Support reflection and doc string.
catenateFiles.isTextFiles = isTextFiles
catenateFiles.isClearTgt = isClearTgt
catenateFiles.isCreateTgt = isCreateTgt
if isTextFiles:
docFileType = "text"
else:
docFileType = "binary"
if isCreateTgt:
docCreate = "Create tgtFile if it does not already exist."
else:
docCreate = "Require that tgtFile already exists."
if isClearTgt:
docClear = "replace"
else:
docClear = "append to"
catenateFiles.__doc__ = """Catenate %s srcFiles to %s the tgtFile.
%s
All of the srcFiles must exist; otherwise raise an IOError.
""" % (docFileType, docClear, docCreate)
return catenateFiles
|
[
"betty@qburst.com"
] |
betty@qburst.com
|
35b33536fa724c6e8c12b3a831280b5ef91b1f3e
|
336f6cbc2fa3821ad07c702c136bd55034cd2780
|
/api/views.py
|
5f4dc1ce2afdd034d87e15aa2e72ff373803d22c
|
[] |
no_license
|
wucg-python/dry1
|
2521bccfa23c84d4d93e28c9a4a19c4eb0c6b6ec
|
e68ebc5e6f65ee3ca5850223a61574b40837feba
|
refs/heads/master
| 2023-01-05T19:28:02.699639
| 2020-11-02T02:16:08
| 2020-11-02T02:16:08
| 309,234,902
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,390
|
py
|
from django.shortcuts import render
# Create your views here.
from rest_framework import mixins
from rest_framework.generics import GenericAPIView
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import generics
from api.models import Book, User
from api.serializers import BookModelSerializer, UserModelSerializer
from rest_framework import viewsets
class BookAPIView(APIView):
def get(self,request,*args,**kwargs):
book_id = kwargs.get('id')
if book_id:
book = Book.objects.filter(id=book_id,is_delete=False)
serialize = BookModelSerializer(book,many=True).data
# print(serialize)
return Response({
'status':200,
"message":"ๆฅ่ฏขไธไธชไนฆ็ฑ",
"result":serialize,
})
else:
books = Book.objects.filter(is_delete=False)
serialize = BookModelSerializer(books,many=True).data
return Response({
'status': 200,
"message": "ๆฅ่ฏขๆๆไนฆ็ฑ",
"result": serialize,
})
def post(self,request,*args,**kwargs):
request_data = request.data
print(request_data)
if isinstance(request_data,dict):
many = False
elif isinstance(request_data,list):
many = True
else:
return Response({
"status":400,
"message":"ๆทปๅ ๅคฑ่ดฅ"
})
serialize = BookModelSerializer(data=request_data,many=many)
serialize.is_valid(raise_exception=True)
book = serialize.save()
return Response({
'status': 200,
"message": "ๆทปๅ ไนฆ็ฑ",
"result": BookModelSerializer(book,many=many).data,
})
def delete(self,request,*args,**kwargs):
id = kwargs.get('id')
if id:
# ๅ ้คๅไธช
ids = [id]
else:
ids = request.data
response = Book.objects.filter(id__in=ids,is_delete=False).update(is_delete=True)
if response:
return Response({
"status":200,
"message":"ๅ ้คๆๅ",
})
else:
return Response({
"status": 400,
"message": "ๅ ้คๅคฑ่ดฅๆๅทฒ่ขซๅ ้ค",
})
# ๆดๆฐๅไธชๆดไฝ
def put(self,request,*args,**kwargs):
# ่ทๅๅฐไฟฎๆน็ๅผ
request_data = request.data
# ่ทๅๅฐ่ขซไฟฎๆน็ๅฏน่ฑก
book_id = kwargs.get('id')
try:
book_obj = Book.objects.get(id=book_id)
except:
return Response({
"status":400,
"message":"ๅฏน่ฑกไธๅญๅจ"
})
serializer = BookModelSerializer(data=request_data,instance=book_obj,partial=True)
serializer.is_valid(raise_exception=True)
book = serializer.save()
return Response({
"status":200,
"message":"ไฟฎๆนๆๅ",
"result":BookModelSerializer(book).data
})
# def patch(self,request,*args,**kwargs):
# # ่ทๅๅฐไฟฎๆน็ๅผ
# request_data = request.data
# # ่ทๅๅฐ่ขซไฟฎๆน็ๅฏน่ฑก
# book_id = kwargs.get('id')
# try:
# book_obj = Book.objects.get(id=book_id)
# except:
# return Response({
# "status": 400,
# "message": "ๅฏน่ฑกไธๅญๅจ"
# })
# serializer = BookModelSerializer(data=request_data, instance=book_obj,partial=True)
# serializer.is_valid(raise_exception=True)
# book = serializer.save()
# return Response({
# "status": 200,
# "message": "ไฟฎๆนๆๅ",
# "result": BookModelSerializer(book).data
# })
def patch(self,request,*args,**kwargs):
'''
ๆดๆฐๅไธช id kwargs.get('id')
ๆดๆฐๅคไธช [{},{},{}]
:param request:
:param args:
:param kwargs:
:return:
'''
book_id = kwargs.get('id')
request_data = request.data
# ไฟฎๆนๅไธช
if book_id and isinstance(request_data,dict):
ids = [book_id]
request_data = [request_data]
elif not book_id and isinstance(request_data,list):
ids = []
for i in request_data:
id = i.pop('id',None)
if id:
ids.append(id)
else:
return Response({
'status':400,
"message":"idไธๅญๅจ"
})
else:
return Response({
"status":400,
"message":"ๆ ผๅผ้่ฏฏ"
})
books_obj = []
new_data = []
for index,id in enumerate(ids):
print(index,id)
try:
book_obj = Book.objects.get(id)
print(book_obj)
books_obj.append(book_obj)
new_data.append(request_data[index])
except:
continue
serializer = BookModelSerializer(data=new_data,instance=books_obj,partial=True,many=True)
serializer.is_valid(raise_exception=True)
datas = serializer.save()
return Response({
"status":200,
"message":"ๆๅ",
"result": BookModelSerializer(datas).data
})
class BookGenericAPIView(GenericAPIView,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
mixins.CreateModelMixin,
mixins.UpdateModelMixin,
):
queryset = Book.objects.filter(is_delete=False)
serializer_class = BookModelSerializer
lookup_field = "id"
def get(self,request,*args,**kwargs):
if "id" in kwargs:
return self.retrieve(request,*args,**kwargs)
return self.list(request,*args,**kwargs)
def post(self,request,*args,**kwargs):
return self.create(request,*args,**kwargs)
def delete(self,request,*args,**kwargs):
return self.destroy(request,*args,**kwargs)
def put(self,request,*args,**kwargs):
return self.partial_update(request,*args,**kwargs)
class BookGenerics(generics.ListAPIView,
generics.ListCreateAPIView):
queryset = Book.objects.filter()
serializer_class = BookModelSerializer
lookup_field = "id"
class UserAPIView(viewsets.GenericViewSet,
mixins.CreateModelMixin):
queryset = User.objects.all()
serializer_class = UserModelSerializer
lookup_field = "id"
def register(self,request,*args,**kwargs):
return self.create(request,*args,**kwargs)
def login(self,request,*args,**kwargs):
request_data = request.data
print(request_data)
user= User.objects.filter(username=request_data.get('username'),password=request_data.get('password'))
if user:
return Response({
"status":200,
"message":"็ป้ๆๅ"
})
return Response({
"status":400,
"message":"็ป้ๅคฑ่ดฅ"
})
|
[
"1490168051@qq.com"
] |
1490168051@qq.com
|
db6db0e486babf550b51b49de237a138ddc8b6ff
|
960a8f1bec84563680271d10e2b9dfd296599d86
|
/python/kyu_6/replace_with_alphabet_position.py
|
d24aef52d6acd7655c9d3a2f654f25666d09e95a
|
[] |
no_license
|
Sqvall/codewars
|
42bcbfad99cd6c34fd3ec5fd903010d255e5d8e9
|
4f102d89ff86cd544eed362a232cbc1f4afea77c
|
refs/heads/master
| 2021-11-18T22:35:38.717604
| 2021-09-27T10:54:10
| 2021-09-27T10:54:10
| 204,555,403
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 606
|
py
|
""" https://www.codewars.com/kata/546f922b54af40e1e90001da """
from string import ascii_lowercase
def alphabet_position(text):
score = 1
score_dict = {}
for i in ascii_lowercase:
score_dict[i] = score
score += 1
t = text.lower()
out = ''
for i in t:
if i in score_dict:
out += repr(score_dict[i]) + " "
return out[0:-1]
print(alphabet_position("The sunset sets at twelve o' clock."))
# '20 8 5 19 21 14 19 5 20 19 5 20 19 1 20 20 23 5 12 22 5 15 3 12 15 3 11'
# '20 8 5 19 21 14 19 5 20 19 5 20 19 1 20 20 23 5 12 22 5 15 3 12 15 3 11'
|
[
"masster1987@inbox.ru"
] |
masster1987@inbox.ru
|
b003fe59cd7510a33a775dff69cf99d6c0c439fb
|
c21b64617d440a2b70b75ecdb756607044b1b797
|
/todo/views.py
|
c135c8223640593189972880eb382f5f623da91c
|
[
"Apache-2.0"
] |
permissive
|
foxy4096/DjangoToDo
|
cfe8d6151ba2f32f922777f7d4646dd11d6b36cb
|
f19d1a6d0d953354245cb4e3dedd2ad8710b4ec0
|
refs/heads/main
| 2023-09-02T03:34:33.144510
| 2021-11-12T10:21:17
| 2021-11-12T10:21:17
| 427,317,178
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 850
|
py
|
from django.views.generic import *
from django.shortcuts import redirect
from .models import ToDo
class ToDoListView(ListView):
model = ToDo
context_object_name = "todos"
class ToDoCreateView(CreateView):
model = ToDo
success_url = '/'
fields = '__all__'
def form_valid(self, form):
form.save()
return super().form_valid(form)
class ToDoUpdateView(UpdateView):
model = ToDo
success_url = '/'
fields = '__all__'
def form_valid(self, form):
form.save()
return super().form_valid(form)
class ToDoDeleteView(DeleteView):
model = ToDo
success_url = '/'
def get_context_data(self, **kwargs):
context = super(ToDoDeleteView, self).get_context_data(**kwargs)
context['todo'] = ToDo.objects.get(pk=self.object.pk)
return context
|
[
"54215788+foxy4096@users.noreply.github.com"
] |
54215788+foxy4096@users.noreply.github.com
|
d1fdbec314276a6ab8c6ad6bf9fb6fc536447263
|
09756a11e9c1e7c771a1ee7afaafff2bee28c9aa
|
/pyReadFileEx.py
|
30682bc1a8cadb34471a27fbe7b7c67e3103ea17
|
[] |
no_license
|
mgupte7/python-examples1
|
74c1038ce0973ea6c668adec064c64ad59341073
|
cfa903f3a6e021a408013f2fd45d3cb281a094ab
|
refs/heads/main
| 2023-08-27T10:06:24.681075
| 2021-11-11T02:56:28
| 2021-11-11T02:56:28
| 405,139,727
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 949
|
py
|
# ------------------------------------------------
# ----------------Python Read Files---------------
# ------------------------------------------------
# ex 1 - The open() function returns a file object, which has a read() method for reading the content of the file:
f = open("demofile.txt", "r")
print(f.read())
# ex 2 - Open a file on a different location:
f = open("D:\\myfiles\welcome.txt", "r")
print(f.read())
# ex 3 - Return the 5 first characters of the file:
f = open("demofile.txt", "r")
print(f.read(5))
# ex 4 - Read one line of the file:
f = open("demofile.txt", "r")
print(f.readline())
# ex 5 - Read two lines of the file:
f = open("demofile.txt", "r")
print(f.readline())
print(f.readline())
# ex 6 - Loop through the file line by line:
f = open("demofile.txt", "r")
for x in f:
print(x)
# ex 7 - Close the file when you are finish with it:
f = open("demofile.txt", "r")
print(f.readline())
f.close()
# ex 8
# ex 9
# ex 10
|
[
"noreply@github.com"
] |
mgupte7.noreply@github.com
|
0384ff350e0c11acd5694d601dd75ef9f8c1794b
|
0e0254ead600d156e96b6a3a814806156f0d807d
|
/users/urls.py
|
a54f169d91f50903e4223765ed8dae5f79e9e341
|
[] |
no_license
|
catding/trainserver
|
1ade4b9b53995ba93d290f19d74c7981ecc1a9bf
|
3d0dbcf92642fe42293736f8bdf812d43e0cb67b
|
refs/heads/master
| 2022-04-08T05:03:33.773819
| 2020-03-07T13:13:08
| 2020-03-07T13:13:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,611
|
py
|
from django.urls import include, path, re_path
from rest_framework import routers
from .api import (
LoginView, LogoutView, AccountDetailView, PasswordChangeView,
PasswordResetView, PasswordResetConfirmView, ExcelfileUploadView, UserAvatarView, UserView
)
router = routers.SimpleRouter(trailing_slash=False)
router.register(r'user', UserView)
urlpatterns = [
# URLs that do not require a session or valid token
path('auth/password/reset', PasswordResetView.as_view(),
name='user_password_reset'),
path('auth/password/reset/confirm', PasswordResetConfirmView.as_view(),
name='user_password_reset_confirm'),
path('auth/password/change', PasswordChangeView.as_view(),
name='user_password_change'),
path('auth/login', LoginView.as_view(), name='user_login'),
# URLs that require a user to be logged in with a valid session / token.
path('auth/logout', LogoutView.as_view(), name='auth_logout'),
path('account/info', AccountDetailView.as_view(), name='account_details'),
path('account/avatar', UserAvatarView.as_view(), name='account_avatar'),
# path('user/list/trainmanager', UserListView.as_view(), name='user_list'),
path('user/upload', ExcelfileUploadView.as_view(), name='user_upload'),
# path('user/list/trainmanager', UserListView.as_view({'get': 'list'}), name='user_trainmanagerlist'),
# path('user/list', UserListView.as_view({'get': 'list', 'patch': 'bulkdelete'}), name='user_list'),
# path('user/list/<str:roles>', UserListView.as_view({'get': 'list'}), name='user_listrole'),
]
urlpatterns += router.urls
|
[
"36583983@qq.com"
] |
36583983@qq.com
|
3e1077c0c7f104ca8e5cff93ad223d48c668bb26
|
d04975c798ad43c8fadc8cf7b0b3742bbbdbc632
|
/dashboard/consumption/queries.py
|
402f2756edeaf320d860670f4f342dbda480aacc
|
[] |
no_license
|
yuki0417/smap-coding-challenge
|
331e99a980adcf561338ec539631c78e33b251d0
|
6041d895f6088d8d19c12cd0ec7a44c3bb15d04c
|
refs/heads/master
| 2023-03-13T14:48:44.443640
| 2021-03-02T13:07:59
| 2021-03-02T13:07:59
| 339,998,433
| 0
| 0
| null | 2021-03-02T13:08:00
| 2021-02-18T09:28:08
|
Python
|
UTF-8
|
Python
| false
| false
| 2,451
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.models import Avg, Sum
from .models import Consumption, User
class ConsumptionQueryset():
"""queryset for consumption data
"""
def get_consumption_avg_and_sum():
"""get average and sum of consumption
Get average and sum of consumption all time
Returns:
QuerySet (dict): consumption data for each user
Raises:
Consumption.DoesNotExist: if Consumption is not found
"""
consum_data = Consumption.objects.values('datetime').annotate(
Avg('consumption'), Sum('consumption')).order_by('datetime')
if consum_data.exists():
return consum_data
else:
raise Consumption.DoesNotExist
def get_each_user_average_consumption():
"""get user's average consumption
Get user's average comsumption
Returns:
QuerySet (dict): consumption data for each user
Raises:
Consumption.DoesNotExist: if Consumption is not found
"""
consum_data = Consumption.objects.values('user_id').annotate(
Avg('consumption'))
if consum_data.exists():
return consum_data
else:
raise Consumption.DoesNotExist
def get_user_consumption_order_by_date(user_id):
"""get user's consumption data order by datetime
Get specified user's comsumption.
Args:
user_id (int): user id
Returns:
QuerySet (Consumption): consumption data for each user
Raises:
Consumption.DoesNotExist: if user is not found
"""
user = User.objects.get(id=user_id)
consum_data = Consumption.objects.filter(
user_id=user).order_by('datetime')
if consum_data.exists():
return consum_data
else:
raise Consumption.DoesNotExist
class UserQueryset():
"""queryset for user data
"""
def get_all_user():
"""get all user
Get All user data.
Args:
user (User object): user model object
Returns:
QuerySet (User): all user data
Raises:
User.DoesNotExist: if user is not found
"""
user_data = User.objects.all()
if user_data.exists():
return user_data
else:
raise User.DoesNotExist
|
[
"nicorasuster@gmail.com"
] |
nicorasuster@gmail.com
|
34ffe8b08928d8766903d48e45e665ebdbb0834e
|
8f90482e83eaac3af8b8acf019a232ec92fc878d
|
/assignment1/q2_neural.py
|
5c6f31e1da37706d0dfe27371bfba3de63ff1230
|
[] |
no_license
|
aoussou/cs224n
|
72413ec35ad0c760127703da2ff0d6753d3a3559
|
1a27d8d359c920013424a5e3376fa734c20ac10c
|
refs/heads/master
| 2021-10-23T16:08:50.336982
| 2019-03-18T17:31:39
| 2019-03-18T17:31:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,627
|
py
|
#!/usr/bin/env python
import numpy as np
import random
from q1_softmax import softmax
from q2_sigmoid import sigmoid, sigmoid_grad
from q2_gradcheck import gradcheck_naive
def forward_backward_prop(data, labels, params, dimensions):
"""
Forward and backward propagation for a two-layer sigmoidal network
Compute the forward propagation and for the cross entropy cost,
and backward propagation for the gradients for all parameters.
Arguments:
data -- M x Dx matrix, where each row is a training example.
labels -- M x Dy matrix, where each row is a one-hot vector.
params -- Model parameters, these are unpacked for you.
dimensions -- A tuple of input dimension, number of hidden units
and output dimension
"""
### Unpack network parameters (do not modify)
ofs = 0
Dx, H, Dy = (dimensions[0], dimensions[1], dimensions[2])
W1 = np.reshape(params[ofs:ofs+ Dx * H], (Dx, H))
ofs += Dx * H
b1 = np.reshape(params[ofs:ofs + H], (1, H))
ofs += H
W2 = np.reshape(params[ofs:ofs + H * Dy], (H, Dy))
ofs += H * Dy
b2 = np.reshape(params[ofs:ofs + Dy], (1, Dy))
z1 = np.dot(data,W1) + b1
h = sigmoid(z1)
z2 = np.dot(h,W2) + b2
yhat = softmax(z2)
cost = -np.sum(labels*np.log(yhat))
d1 = yhat - labels
d2 = d1.dot(W2.T)
d3 = d2*sigmoid_grad(h)
gradW2 = np.dot(h.T,d1)
gradb2 = np.sum(d1,axis = 0)
gradW1 = np.dot(data.T,d3)
gradb1 = np.sum(d3,axis = 0)
### Stack gradients (do not modify)
grad = np.concatenate((gradW1.flatten(), gradb1.flatten(),
gradW2.flatten(), gradb2.flatten()))
return cost, grad
def sanity_check():
"""
Set up fake data and parameters for the neural network, and test using
gradcheck.
"""
print "Running sanity check..."
N = 20
dimensions = [10, 5, 10]
data = np.random.randn(N, dimensions[0]) # each row will be a datum
labels = np.zeros((N, dimensions[2]))
for i in xrange(N):
labels[i, random.randint(0,dimensions[2]-1)] = 1
params = np.random.randn((dimensions[0] + 1) * dimensions[1] + (
dimensions[1] + 1) * dimensions[2], )
gradcheck_naive(lambda params:
forward_backward_prop(data, labels, params, dimensions), params)
def your_sanity_checks():
"""
Use this space add any additional sanity checks by running:
python q2_neural.py
This function will not be called by the autograder, nor will
your additional tests be graded.
"""
if __name__ == "__main__":
sanity_check()
your_sanity_checks()
|
[
"noreply@github.com"
] |
aoussou.noreply@github.com
|
f2f5ef38ae9682ff7b25a938c4bedefddb4f355e
|
51a38a3b7d90bae3b8d137decb681eea5264c1d6
|
/i_scene_cp77_gltf/material_types/glass.py
|
188e0a71a10560152bd25f176c3e8fc445ba17c1
|
[] |
no_license
|
Walrus159/cp77research
|
8ece0de4fec7ab9a61c43dbafc38350ee1f6d0ef
|
4612c86245f874dec3fbf5c2deff9cbf998d23ce
|
refs/heads/main
| 2023-07-19T12:32:59.968590
| 2021-08-31T23:56:42
| 2021-08-31T23:56:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,285
|
py
|
import bpy
import os
from ..main.common import imageFromPath
class Glass:
def __init__(self, BasePath,image_format):
self.BasePath = BasePath
self.image_format = image_format
def create(self,Glass,Mat):
CurMat = Mat.node_tree
CurMat.nodes['Principled BSDF'].inputs['Transmission'].default_value = 1
Color = CurMat.nodes.new("ShaderNodeRGB")
Color.location = (-400,200)
Color.hide = True
Color.label = "TintColor"
Color.outputs[0].default_value = (float(Glass["TintColor"]["Red"])/255,float(Glass["TintColor"]["Green"])/255,float(Glass["TintColor"]["Blue"])/255,float(Glass["TintColor"]["Alpha"])/255)
CurMat.links.new(Color.outputs[0],CurMat.nodes['Principled BSDF'].inputs['Base Color'])
IOR = CurMat.nodes.new("ShaderNodeValue")
IOR.location = (-400,-150)
IOR.outputs[0].default_value = float(Glass["IOR"])
IOR.hide = True
IOR.label = "IOR"
CurMat.links.new(IOR.outputs[0],CurMat.nodes['Principled BSDF'].inputs['IOR'])
rImg = imageFromPath(self.BasePath + Glass["Roughness"],self.image_format,True)
rImgNode = CurMat.nodes.new("ShaderNodeTexImage")
rImgNode.location = (-800,50)
rImgNode.image = rImg
rImgNode.label = "Roughness"
CurMat.links.new(rImgNode.outputs[0],CurMat.nodes['Principled BSDF'].inputs['Roughness'])
nImg = imageFromPath(self.BasePath + Glass["Normal"],self.image_format,True)
nImgNode = CurMat.nodes.new("ShaderNodeTexImage")
nImgNode.location = (-800,-300)
nImgNode.image = nImg
nImgNode.label = "Normal"
nRgbCurve = CurMat.nodes.new("ShaderNodeRGBCurve")
nRgbCurve.location = (-500,-300)
nRgbCurve.hide = True
nRgbCurve.mapping.curves[2].points[0].location = (0,1)
nRgbCurve.mapping.curves[2].points[1].location = (1,1)
nMap = CurMat.nodes.new("ShaderNodeNormalMap")
nMap.location = (-200,-300)
nMap.hide = True
CurMat.links.new(nImgNode.outputs[0],nRgbCurve.inputs[1])
CurMat.links.new(nRgbCurve.outputs[0],nMap.inputs[1])
CurMat.links.new(nMap.outputs[0],CurMat.nodes['Principled BSDF'].inputs['Normal'])
|
[
"65016231+ja-to@users.noreply.github.com"
] |
65016231+ja-to@users.noreply.github.com
|
c15c181958bc2aea8abc6b30ac520658a40dd56e
|
5a903f3d295b9942224e5f01bce388a25a788f35
|
/Q_learning_Games_v2/Q_Learning_Games_v2_/Sarsa_vs_Q_Learning/Taxi_Game/Taxi_Analysis/print_taxi_comparison_plots.py
|
dc4dd5c1b825083bece4a64ebfcd723565d9adaa
|
[
"MIT"
] |
permissive
|
GiacomoFerro/Bachelor-Thesis
|
0382e6b36c5b890e72028a97e59e80b5563d0f0f
|
a9ac91a208dfc175084cf22673f88add6ec15281
|
refs/heads/master
| 2020-04-06T18:00:16.103447
| 2019-10-09T15:11:29
| 2019-10-09T15:11:29
| 157,681,361
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,723
|
py
|
#libreria per generare grafici
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
#lib to remove files
import os
print("Make the comparison Plots")
plt.rc('xtick', labelsize=8)
plt.rc('ytick', labelsize=8)
plt.figure(figsize=(10, 5))
f=open("rewards_taxi_qlearning.txt","r")
stringa=f.readline()
n=0
while stringa!="":#count the number of rewards
n+=1
stringa=f.readline()
newRewards=[ 0 for i in range(n)]
newRewardsSarsa=[ 0 for i in range(n)]
#read q-learning rewards
f=open("rewards_taxi_qlearning.txt","r")
stringa=f.readline()
n=0
while stringa!="":#make the rewards list
newRewards[n]=stringa
n+=1
stringa=f.readline()
f.close()
#read sarsa rewards
f=open("rewards_taxi_sarsa.txt","r")
stringa=f.readline()
n=0
while stringa!="":#make the rewards list
newRewardsSarsa[n]=stringa
n+=1
stringa=f.readline()
f.close()
#eps list with numRewards slots
eps=range(0,1000)
plt.plot(eps,newRewards,'r',eps,newRewardsSarsa,'b')
plt.title("Rewards collected over the time for Taxi game")
plt.xlabel("Trials")
plt.ylabel("Rewards")
plt.grid()#put the grid
qlearningLegend = mpatches.Patch(color='red', label='Q-learning')
SarsaLegend = mpatches.Patch(color='blue', label='Sarsa')
plt.legend(handles=[qlearningLegend,SarsaLegend])
plt.show()#print in output the plot and give the possibility to save it on your computer
plt.savefig('taxi_sarsa_vs_Q_learning.png')
os.remove("/home/giacomo/Scrivania/Q_Learning_Games_v2_/Sarsa_vs_Q_Learning/Taxi_Game/Taxi_Analysis/rewards_taxi_sarsa.txt")
os.remove("/home/giacomo/Scrivania/Q_Learning_Games_v2_/Sarsa_vs_Q_Learning/Taxi_Game/Taxi_Analysis/rewards_taxi_qlearning.txt")#to remove the file
|
[
"noreply@github.com"
] |
GiacomoFerro.noreply@github.com
|
1da061864c1b1847e3f5e231440d129e3e4feb12
|
a489362349b6c5b7eaa6b0be0a721e295d25b98b
|
/Gan.py
|
50e05e54bef6fd823c8f0287aedc6382c45774aa
|
[] |
no_license
|
gmc112/Synthesising-Images-Via-Imagination
|
6de97ee71a321b203ed9a33094e2d7dbb51d45ae
|
d32fb800125b8fb8230a3e712ef287624098d5b0
|
refs/heads/master
| 2020-06-28T00:29:46.680922
| 2019-08-01T17:39:20
| 2019-08-01T17:39:20
| 200,092,288
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,951
|
py
|
import tensorflow as tf
import tensorflow.contrib.layers as tfcl
import os
import numpy as np
import imageio
import datetime
import Utilities as util
import Layers_gan as lay
LEARNING_RATE = 2e-4
BATCH_SIZE = 64
HEIGHT = 128
WIDTH = 128
CHANNEL = 3
DATA_REPEATS = 4
EPOCHS = 1000
VERSION = "GAN_1"
DATE = datetime.datetime.now().strftime("%d-%m-%H-%M-%S")
SAVE_PATH = "./Project Out/" + VERSION + "/saved/"
MODE = 1 # 0 For PNG, 1 for JPEG
DROPOUT_RATE = 0.9
DISTORTED = False
RESTORE = True
# MODELS ARE TAKEN FROM EXAMPLE https://www.youtube.com/watch?v=yz6dNf7X7SA
# The generator and discriminator functions are in theory the same layers though they have been heavily refactored to
# improve modularity and reduce the repeated code across
# Comments in the train function mark code sourced from the above link
# These comments are relevant across all three GAN models
def generator(input, random_dim, is_train, reuse=False):
c4, c8, c16, c32, c64 = 512, 256, 128, 64, 32 # channel num
s4 = 4
output_dim = CHANNEL # RGB image
with tf.variable_scope('gen') as scope:
if reuse:
scope.reuse_variables()
w1 = tf.get_variable('w1', shape=[random_dim, s4 * s4 * c4], dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=0.02))
b1 = tf.get_variable('b1', shape=[c4 * s4 * s4], dtype=tf.float32,
initializer=tf.constant_initializer(0.0))
flat_conv1 = tf.add(tf.matmul(input, w1), b1, name='flat_conv1')
# Convolution, bias, activation, repeat!
with tf.name_scope("gen1"):
conv1 = tf.reshape(flat_conv1, shape=[-1, s4, s4, c4], name='conv1')
bn1 = tfcl.batch_norm(conv1, is_training=is_train, epsilon=1e-5, decay=0.9,
updates_collections=None, scope='bn1')
act1 = tf.nn.relu(bn1, name='act1')
tf.summary.histogram("conv_t_act", act1)
# 8*8*256
# Convolution, bias, activation, repeat!
act2 = lay.conv_t(act1, c8, "gen2", is_train, False)
# 16*16*128
act3 = lay.conv_t(act2, c16, "gen3", is_train, False)
# 32*32*64
act4 = lay.conv_t(act3, c32, "gen4", is_train, False)
# 64*64*32
act5 = lay.conv_t(act4, c64, "gen5", is_train, False)
# 128*128*3
return lay.conv_t(act5, output_dim, "gen6", is_train, True)
def discriminator(input, is_train, reuse=False):
c2, c4, c8, c16 = 64, 128, 256, 512 # channel num: 64, 128, 256, 512
with tf.variable_scope('dis') as scope:
if reuse:
scope.reuse_variables()
act1 = lay.conv(input, c2, "dis1", is_train)
act2 = lay.conv(act1, c4, "dis2", is_train)
act3 = lay.conv(act2, c8, "dis3", is_train)
act4 = lay.conv(act3, c16, "dis4", is_train)
dim = int(np.prod(act4.get_shape()[1:]))
fc1 = tf.reshape(act4, shape=[-1, dim], name='fc1')
w2 = tf.get_variable('w2', shape=[fc1.shape[-1], 1], dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=0.02))
b2 = tf.get_variable('b2', shape=[1], dtype=tf.float32,
initializer=tf.constant_initializer(0.0))
logits = tf.add(tf.matmul(fc1, w2), b2, name='logits')
return logits
def train(output, restore):
dataset, size = util.read_images(DATA_REPEATS, MODE)
size = size * EPOCHS
dataset = dataset.map(lambda path: util.parse_image(path, MODE, HEIGHT, WIDTH, CHANNEL, DISTORTED))
dataset = dataset.shuffle(buffer_size=10000)
dataset = dataset.repeat(EPOCHS)
dataset = dataset.batch(BATCH_SIZE)
iterator = dataset.make_one_shot_iterator()
image_batch = iterator.get_next()
iterator_sum = dataset.make_one_shot_iterator()
im_sum_batch = iterator_sum.get_next()
# The following section is sourced as is from the above link
random_dim = 100
with tf.variable_scope('input'):
real_image = tf.placeholder('float', shape=[None, HEIGHT, WIDTH, CHANNEL], name='real_img')
random_input = tf.placeholder('float', shape=[None, random_dim], name='rand_input')
is_train = tf.placeholder('bool', name='is_train')
fake_image = generator(random_input, random_dim, is_train)
real_result = discriminator(real_image, is_train)
fake_result = discriminator(fake_image, is_train, reuse=True)
d_loss = tf.reduce_mean(fake_result) - tf.reduce_mean(real_result) # This optimizes the discriminator.
dloss_summary = tf.summary.scalar("dis loss", d_loss)
g_loss = -tf.reduce_mean(fake_result) # This optimizes the generator.
gloss_summary = tf.summary.scalar("gen loss", g_loss)
t_vars = tf.trainable_variables()
d_vars = [var for var in t_vars if 'dis' in var.name]
g_vars = [var for var in t_vars if 'gen' in var.name]
trainer_d = tf.train.RMSPropOptimizer(learning_rate=LEARNING_RATE).minimize(d_loss, var_list=d_vars)
trainer_g = tf.train.RMSPropOptimizer(learning_rate=LEARNING_RATE).minimize(g_loss, var_list=g_vars)
d_clip = [v.assign(tf.clip_by_value(v, -0.01, 0.01)) for v in d_vars]
# End of sourced section
sess = tf.Session()
saver = tf.train.Saver()
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
if not os.path.exists("./log/" + VERSION + DATE):
os.makedirs("./log/" + VERSION + DATE)
writer = tf.summary.FileWriter("./log/" + VERSION + " " + DATE)
writer.add_graph(sess.graph)
merged = tf.summary.merge_all()
i = 0
while True:
try:
# The following section is sourced as is from the above link except the Tensorboard summaries
print("Running iteration {}/{}...".format(i, (size // BATCH_SIZE + 1)//6))
if restore:
checkpoint = tf.train.latest_checkpoint(SAVE_PATH)
if checkpoint is not None:
saver.restore(sess=sess, save_path=checkpoint)
restore = False
d_iters = 5
g_iters = 1
for k in range(d_iters):
train_noise = np.random.uniform(-1.0, 1.0, size=[BATCH_SIZE, random_dim]).astype(np.float32)
# sess.run(iterator.initializer)
train_image = sess.run(image_batch)
# wgan clip weights
sess.run(d_clip)
# Update the discriminator
_, dloss, m = sess.run([trainer_d, d_loss, merged],
feed_dict={random_input: train_noise, real_image: train_image, is_train: True})
writer.add_summary(m, i)
# Update the generator
for k in range(g_iters):
train_noise = np.random.uniform(-1.0, 1.0, size=[BATCH_SIZE, random_dim]).astype(np.float32)
img = sess.run(im_sum_batch)
_, gloss, m = sess.run([trainer_g, g_loss, merged],
feed_dict={random_input: train_noise, real_image: img, is_train: True})
writer.add_summary(m, i)
#End of Sourced section
if i % 50 == 0:
# save images
print("saving samples")
if not os.path.exists(output):
os.makedirs(output)
sample_noise = np.random.uniform(-1.0, 1.0, size=[BATCH_SIZE, random_dim]).astype(np.float32)
imgtest = sess.run(fake_image, feed_dict={random_input: sample_noise, is_train: False})
imgtest = ((imgtest + 1.) / 2.) # Keep same scale and floats but remove negatives
imgtest = imgtest * 255
imgtest = tf.cast(imgtest, dtype=tf.uint8)
imgtest = sess.run(imgtest)
n = 0
for img in imgtest:
path = output + "/" + str(i) + "-" + str(n) + ".png"
imageio.imwrite(path, img)
n += 1
if not os.path.exists(output + "/grid"):
os.makedirs(output + "/grid")
path = output + "/grid/" + str(i) + ".png"
imageio.imwrite(path, util.create_grid(imgtest, HEIGHT, WIDTH, BATCH_SIZE))
if i % 250 == 0:
if not os.path.exists(SAVE_PATH):
os.makedirs(SAVE_PATH)
saver.save(sess=sess, save_path=SAVE_PATH + "VAE", global_step=i)
i += 1
except tf.errors.OutOfRangeError:
print("Training Complete")
break
print("Generating Final Output")
output = output + "/final_" + VERSION
if not os.path.exists(output):
os.makedirs(output)
for x in range(5):
try:
out_enc = np.random.standard_normal(size=(64, 128, 128, 3))
final_out = sess.run(fake_image, feed_dict={input: out_enc, is_train: False})
final_out = ((final_out + 1.) / 2.) # Keep same scale and floats but remove negatives
final_out = final_out * 255
final_out = tf.cast(final_out, dtype=tf.uint8)
final_out = sess.run(final_out)
n = 0
for img in final_out:
path = output + "/" + str(i) + "-" + str(n) + ".png"
imageio.imwrite(path, img)
n += 1
if not os.path.exists(output + "/grid"):
os.makedirs(output + "/grid")
path = output + "/grid/" + str(i) + ".png"
imageio.imwrite(path, util.create_grid(imgtest, HEIGHT, WIDTH, BATCH_SIZE))
except tf.errors.OutOfRangeError:
print("Finished Generating output")
break
if __name__ == "__main__":
output = "./Project Out/" + VERSION + "/" + DATE
util.save_params(output, LEARNING_RATE, BATCH_SIZE, DATA_REPEATS, DROPOUT_RATE, EPOCHS, DISTORTED)
train(output, RESTORE)
|
[
"ntb15144@uni.strath.ac.uk"
] |
ntb15144@uni.strath.ac.uk
|
323ae2986aeb577ac7207ed8bc111206556ec27d
|
fbf7929ede740a416362e40b3b0d44d2b823c14e
|
/distinct_occure_helpers.py
|
f1b794b0ee5a00fe9dcf5314be1f2033a3856710
|
[
"MIT"
] |
permissive
|
martynaut/mirnaome_somatic_mutations
|
f588545c57871c0125656445cc66198f04c98895
|
b7e332d56ee17c0b54969db8e515001bf23300f8
|
refs/heads/master
| 2020-04-24T22:33:20.446928
| 2019-08-08T20:14:56
| 2019-08-08T20:14:56
| 172,315,673
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,968
|
py
|
import numpy as np
def set_balance(row, ratio):
if row['ratio 3/(3+5)'] >= ratio:
return '3p'
elif row['ratio 5/(3+5)'] >= ratio:
return '5p'
elif np.isnan(row['reads_3p']) and np.isnan(row['reads_5p']):
return 'unknown'
elif np.isnan(row['reads_3p']):
return '5p'
elif np.isnan(row['reads_5p']):
return '3p'
else:
return 'both'
def find_in_mirna(row, df_loc):
if df_loc[
(df_loc['chrom'] == row['chrom']) &
(df_loc['start'] <= row['pos']) &
(df_loc['orientation'] == row['orient_loc']) &
(df_loc['stop'] >= row['pos'])].shape[0] != 0:
temp = df_loc[
(df_loc['chrom'] == row['chrom']) &
(df_loc['start'] <= row['pos']) &
(df_loc['orientation'] == row['orient_loc']) &
(df_loc['stop'] >= row['pos'])].values[0]
if row['orient_loc'] == '+':
start = row['pos'] - temp[2] + 1
stop = row['pos'] - temp[3] - 1
else:
start = -(row['pos'] - temp[3] - 1)
stop = -(row['pos'] - temp[2] + 1)
localiz = [start, stop]
else:
localiz = [np.nan,
np.nan]
return localiz
def find_arm(row):
if row['-/+'] == '+':
if row['start'] - row['start_pre'] < row['stop_pre'] - row['stop']:
return '5p'
else:
return '3p'
if row['-/+'] == '-':
if row['start'] - row['start_pre'] < row['stop_pre'] - row['stop']:
return '3p'
else:
return '5p'
def from_start(row, column_start, column_stop):
if row['orient_loc'] == '+':
return row['pos'] - row[column_start] + 1
else:
return row[column_stop] - row['pos'] + 1
def from_end(row, column_stop, column_start):
if row['orient_loc'] == '+':
return row['pos'] - row[column_stop] - 1
else:
return row[column_start] - row['pos'] - 1
def find_localization(row, df_loc):
# fix values that were not in reference
if row['name'].lower() == 'hsa-mir-4477b' and \
row['start'] == 63819560 and \
row['stop'] == 63819669:
row['Strand'] = '+'
elif row['name'].lower() == 'hsa-mir-6723':
row['Strand'] = '-'
elif row['name'].lower() == 'hsa-mir-3656':
row['Strand'] = '+'
if (type(row['Strand']) != str and
df_loc[(df_loc['name'].str.contains(row['name'].lower())) &
(df_loc['chrom'] == row['chrom']) &
(df_loc['start'] <= row['pos']) &
(df_loc['stop'] >= row['pos'])].shape[0] != 0):
localiz = df_loc[(df_loc['name'].str.contains(row['name'].lower())) &
(df_loc['chrom'] == row['chrom']) &
(df_loc['start'] <= row['pos']) &
(df_loc['stop'] >= row['pos'])].values[0]
elif df_loc[(df_loc['name'].str.contains(row['name'].lower())) &
(df_loc['chrom'] == row['chrom']) &
(df_loc['start'] <= row['pos']) &
(df_loc['stop'] >= row['pos']) &
(df_loc['orientation'] == row['Strand'])].shape[0] != 0:
localiz = df_loc[(df_loc['name'].str.contains(row['name'].lower())) &
(df_loc['chrom'] == row['chrom']) &
(df_loc['start'] <= row['pos']) &
(df_loc['stop'] >= row['pos']) &
(df_loc['orientation'] == row['Strand'])].values[0]
else:
localiz = [np.nan,
np.nan,
np.nan,
np.nan,
np.nan,
np.nan]
return localiz
def if_complex(row, complex_df):
if complex_df[(complex_df['chrom'] == row['chrom']) &
(complex_df['start'] == row['start']) &
(complex_df['stop'] == row['stop']) &
(complex_df['gene'] == row['gene']) &
(complex_df['seq_type'] == row['seq_type'])].shape[0] != 0:
values = complex_df[(complex_df['chrom'] == row['chrom']) &
(complex_df['start'] == row['start']) &
(complex_df['stop'] == row['stop']) &
(complex_df['gene'] == row['gene']) &
(complex_df['seq_type'] == row['seq_type'])]['complex'].unique()
if 1 in values:
return 1
else:
return 0
else:
return 0
def concat_ints(col):
row = list(col.values)
new_row = []
for x in row:
new_row.append(str(x))
return '"' + ':'.join(new_row) + '"'
def concat_alg(col):
row = list(col.values)
new_row = []
for x in row:
new_row.append(str(x))
new_row = sorted(set(new_row))
return '"' + ':'.join(new_row) + '"'
def type_of_mutation(row):
if len(row['ref']) > len(row['alt']):
return 'del'
elif len(row['ref']) == len(row['alt']):
return 'subst'
elif ',' in row['alt']:
return 'subst'
else:
return 'ins'
def take_from_coord(coordinates, column_name, row):
return coordinates[(coordinates['chr'] == row['chrom']) &
(coordinates['start'] < int(row['pos'])) &
(coordinates['stop'] > int(row['pos']))][column_name].values[0]
def seq_type(value, list_df):
if 'hsa-' in value:
return 'mirna'
elif value in list_df:
return 'cancer_exome'
else:
return 'not_defined'
def subst_type(row):
if row['mutation_type'] == 'subst':
if (((row['ref'] in ['A', 'G']) and (row['alt'] in ['A', 'G'])) or
((row['ref'] in ['C', 'T']) and (row['alt'] in ['C', 'T']))):
return 'transition'
else:
return 'transversion'
else:
return 'n.a.'
|
[
"martyna.urbanek@gmail.com"
] |
martyna.urbanek@gmail.com
|
b4c799535b5e0995f3c1d6c81b0b0eaede036e40
|
19200bc6b36e33f9dec1394b90ff41272601b16d
|
/gistApi/asgi.py
|
d22173ac28e314752aaea0d52dc74a9b1c28f963
|
[] |
no_license
|
AnoshaRehan/github-gist-api-django
|
103c8f0656899cd6cf068ea7945f29a51eb90e56
|
4c0a97f7eebca276de050ae071aabe40331806e1
|
refs/heads/main
| 2023-03-02T09:48:38.647915
| 2021-02-14T08:19:04
| 2021-02-14T08:19:04
| 338,624,345
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 391
|
py
|
"""
ASGI config for gistApi project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gistApi.settings')
application = get_asgi_application()
|
[
"anosha.rehan@lovefordata.com"
] |
anosha.rehan@lovefordata.com
|
5f2b378d006e7aa2e46251661e0d4e03d3b9810f
|
d452e34253561a47b974e260dabd8dcda6e750a2
|
/supervised_learning/0x0B-face_verification/5-main.py
|
0859b3e7ecf4dc518afbab30ba555f77a521f265
|
[] |
no_license
|
JohnCook17/holbertonschool-machine_learning
|
57fcb5b9d351826c3e3d5478b3b4fbe16cdfac9f
|
4200798bdbbe828db94e5585b62a595e3a96c3e6
|
refs/heads/master
| 2021-07-07T10:16:21.583107
| 2021-04-11T20:38:33
| 2021-04-11T20:38:33
| 255,424,823
| 3
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 554
|
py
|
#!/usr/bin/env python3
from align import FaceAlign
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
from matplotlib.patches import Circle
import numpy as np
fa = FaceAlign('models/landmarks.dat')
test_img = mpimg.imread('HBTN/KirenSrinivasan.jpg')
anchors = np.array([[0.194157, 0.16926692], [0.7888591, 0.15817115], [0.4949509, 0.5144414]], dtype=np.float32)
aligned = fa.align(test_img, np.array([36, 45, 33]), anchors, 96)
plt.imshow(aligned)
ax = plt.gca()
for anchor in anchors:
ax.add_patch(Circle(anchor * 96, 1))
plt.show()
|
[
"jcook0017@gmail.com"
] |
jcook0017@gmail.com
|
d5b8181aea069440370a3630b8e9b4e47a43870f
|
72c9e235b19b80d9e332c1f19e1c4e1e28ff2cac
|
/craw/spider/LJ_parser.py
|
c2d8b50614943d46663ac37ac69ad4d2e39a7455
|
[] |
no_license
|
VinceLim68/python-craw
|
56a1299b4e3ac55a3690946a6f4ff8c2c1ef5b04
|
37d1570ee10d080e55b5b8cf885b7e7a3b00c81d
|
refs/heads/master
| 2021-01-20T06:12:10.753799
| 2017-10-28T13:56:30
| 2017-10-28T13:56:30
| 89,851,102
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,521
|
py
|
#coding:utf-8
from html_parser import HtmlParser
import mytools
import urllib2
from bs4 import BeautifulSoup
import re
class LjParser(HtmlParser):
# ่ฟไบๆฏ2016.11.17้ๅ็๏ผๅๆฅ้ฃไธชๅบ็ฐไบ่งฃๆ้่ฏฏ
def _get_new_datas(self,soup):
page_datas = []
titles = soup.select("div.title > a")
houseinfo = soup.select("div.houseInfo")
positionInfo = soup.select("div.positionInfo")
totalprices = soup.select("div.totalPrice")
for title,info,position,totalPrice in zip(titles,houseinfo,positionInfo,totalprices):
# each_dataๆไบ้่ฆ่ฎพ็ฝฎๅๅงๅผ
each_data = {'builded_year':0,'spatial_arrangement':'','floor_index':0,'total_floor':0}
each_data['title'] = title.get_text()
each_data['details_url'] = title.get('href')
each_data['total_price'] = int(round(float(re.search('(\d+.?\d+)ไธ'.decode('utf8'),totalPrice.get_text()).groups(0)[0]),0))
info_item = (info.get_text().split('|'))
each_data['community_name'] = info_item[0].strip() # ็ฌฌ1ไธชๆปๆฏๅฐๅบๅ็งฐ
for i in range(1,len(info_item)):
d1 = {}
d1 = self.parse_item(info_item[i].strip())
if d1.has_key('advantage') and each_data.has_key('advantage'):
d1['advantage'] = each_data['advantage'] + ',' + d1['advantage']
each_data = dict(each_data, **d1)
position = position.get_text().replace('\t','').replace('\n','').split()
each_data['block'] = position[-1]
if ')' not in position[0]: #้พๅ็ๅซๅข
ไผ็จ'4ๅฑ2008ๅนดๅปบ'็ๅฝขๅผ๏ผๅ ๅ
ฅ')'๏ผไปฅไพฟๅ้
position[0] = position[0].replace('ๅฑ', 'ๅฑ)')
for item in position[0].split(')'): #2017.4.1้พๅฎถๆ ผๅผๆๆน
d1 = {}
# d1 = self.parse_item(position[i].strip())
d1 = self.parse_item(item.strip()) #2017.4.1้พๅฎถๆ ผๅผๆๆน
each_data = dict(each_data, **d1)
each_data['price'] = float(each_data['total_price']*10000/each_data['area'])
each_data['from'] = "lianjia"
each_data = self.pipe(each_data)
if each_data.has_key('total_floor') and each_data.has_key('total_price') and each_data.has_key('area') and each_data.has_key('community_name'):
page_datas.append(each_data)
else:
if mytools.ShowInvalideData(each_data):page_datas.append(each_data)
return page_datas
def _get_new_urls(self , soup):
new_urls = set()
# links = soup.select("div.page-box")
links = soup.select("div.house-lst-page-box") #2016.11.11ไฟฎๆน๏ผ็ฝ้กตๆนไบ
if len(links) == 0 :
print "Only 1 page!!"
else:
t_page = eval(links[0].get('page-data'))['totalPage']
url = links[0].get('page-url')
for i in range(1,t_page+1):
new_urls.add("http://xm.lianjia.com" + url.replace("{page}",str(i)))
return new_urls
def _ischeck(self,soup):
# ๅคๆญๆฏๅฆๆฏ้ช่ฏ็้ข
ischeck = soup.select("title")
if len(ischeck) > 0: #ๅฆๆๆพไธๅฐtitle,ๅฐฑ่ฎคไธบไธๆฏ้ช่ฏ็้ข
iscode = ischeck[0].get_text().strip() == "้ช่ฏๅผๅธธๆต้-้พๅฎถ็ฝ"
else:
iscode = False
return iscode
|
[
"1500725439@qq.com"
] |
1500725439@qq.com
|
7508ed13cb989f8e06150d4a366684e8cb626f4c
|
890c8b8e90e516a5a3880eca9b2d217662fe7d84
|
/armulator/armv6/opcodes/abstract_opcodes/usad8.py
|
6568222b03d6464463dd16b171bf86a89484d155
|
[
"MIT"
] |
permissive
|
doronz88/armulator
|
b864135996f876c7857b79a314d4aa06cc19c549
|
0294feac2785c8947e5943ac0c34f941ee4b5fff
|
refs/heads/master
| 2022-11-05T08:14:42.405335
| 2020-06-18T23:53:17
| 2020-06-18T23:53:17
| 273,363,061
| 2
| 0
| null | 2020-06-18T23:51:03
| 2020-06-18T23:51:02
| null |
UTF-8
|
Python
| false
| false
| 1,008
|
py
|
from armulator.armv6.opcodes.abstract_opcode import AbstractOpcode
from bitstring import BitArray
class Usad8(AbstractOpcode):
def __init__(self, m, d, n):
super(Usad8, self).__init__()
self.m = m
self.d = d
self.n = n
def execute(self, processor):
if processor.condition_passed():
absdiff1 = abs(
processor.registers.get(self.n)[24:32].uint - processor.registers.get(self.m)[24:32].uint)
absdiff2 = abs(
processor.registers.get(self.n)[16:24].uint - processor.registers.get(self.m)[16:24].uint)
absdiff3 = abs(
processor.registers.get(self.n)[8:16].uint - processor.registers.get(self.m)[8:16].uint)
absdiff4 = abs(
processor.registers.get(self.n)[0:8].uint - processor.registers.get(self.m)[0:8].uint)
result = absdiff1 + absdiff2 + absdiff3 + absdiff4
processor.registers.set(self.d, BitArray(uint=result, length=32))
|
[
"matan1008@gmail.com"
] |
matan1008@gmail.com
|
8b5c5c6b77660de1cffa41c624fe3f6e1c9a7840
|
fa88ee925ec2a3ca1def17f1518d6b3e59cab197
|
/Database Preprocessing/prepare_protein_data.py
|
f8ba54116deca63a435cf94336b6c7590a31e788
|
[] |
no_license
|
asadahmedtech/DEELIG
|
20d915851bc053db7647d94e3ee61570524339d7
|
3a3993fc903c40f1ce904111c8e085c79fb45df6
|
refs/heads/master
| 2023-05-19T21:53:18.196879
| 2021-06-09T14:15:18
| 2021-06-09T14:15:18
| 264,231,113
| 9
| 5
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,353
|
py
|
from Bio import *
import os
#PDB Parser
from Bio.PDB.PDBParser import PDBParser
from Bio.PDB.DSSP import DSSP
from Bio.PDB.NACCESS import run_naccess, process_rsa_data
import pickle
#Labels for files
"""Secondary Structure in Millers Format
H - Alpha Helix (4-12) = 1
B - Isolated Beta Bridge residue = 2
E - Strand = 3
G - 3-10 helix = 4
I - Pi Helix = 5
T - Turn = 6
S - bend = 7
- - None = 0
"""
SS_Labels = {'H' : 1, 'B' : 2, 'E' : 3, 'G' : 4, 'I' : 5, 'T' : 6, 'S' : 7, '-' : 0}
"""Relactive Solvent Accessiblity (RSA)
Threshold = 25
Exposed (> Threshold) = 1
Burried (<= Threshold) = 0
"""
RSA_Threshold = 25
def parse_PSSM(file, path = '/home/binnu/Asad/dataset/new_db/pssm/'):
pssm = {}
with open(os.path.join(path, file), 'r') as f:
lines = f.readlines()
# lines = [i.split() if(len(i.split()) == 44) for i in lines]
lines_new = []
for i in lines:
i = i.split()
if(len(i) == 44):
lines_new.append(i)
lines_new = [i[:22] for i in lines_new]
for i in lines_new:
scores = i[2:]
scores = [int(temp_i) for temp_i in scores]
pssm[i[0]] = scores
# print(pssm)
return pssm
def calc_features(PATH, pdb_ligand_ID, OUTPATH):
#Loading the files
parser = PDBParser(PERMISSIVE = 1)
PDB_id = pdb_ligand_ID[:4].lower() #+ '_pocket'
filename = os.path.join(PATH, PDB_id + ".pdb")
structure = parser.get_structure(PDB_id, filename)
model = structure[0]
#DSSP Analysis for SS, PHI, PSI
dssp = DSSP(model, filename)
#NACCESS Analysis for SASA
rsa, asa = run_naccess(model, filename)
rsa = process_rsa_data(rsa)
# print(rsa)
#Feature mapping to each atomic coordinate
dssp_present, dssp_not_present = 0, 0
feature = dict() #The feature dictionary
for model in structure:
for chain in model:
if(chain.get_full_id()[2] == pdb_ligand_ID.split('_')[2]):
pssm_ID = chain.get_full_id()[0][:4].upper() + '_' + chain.get_full_id()[2]
pssm = parse_PSSM(pssm_ID)
start = True
gap = 0
idx_prev = 0
for residue in chain:
# if(start):
# start_idx =residue.get_full_id()[3][1]
# idx_prev = 0
idx = residue.get_full_id()[3][1]
if(idx < 1):
print(idx)
a = 0
pass
elif(idx - idx_prev >= 1):
print(idx)
a = 1
gap += idx - idx_prev -1
# elif(start):
# gap += -1
# start = False
for atom in residue:
# print(atom.get_full_id())
ID = (atom.get_full_id()[2], atom.get_full_id()[3])
if(ID in list(dssp.keys())):
if(rsa[ID]["all_atoms_abs"] > RSA_Threshold):
rsa_label = 1
else:
rsa_label = 0
print(gap, atom.get_full_id()[3][1], a)
feat = (SS_Labels[dssp[ID][2]], dssp[ID][4]/360, dssp[ID][5]/360, rsa_label) + tuple(pssm[str(atom.get_full_id()[3][1] - gap)])
feature[tuple(atom.get_coord())] = feat
print(pdb_ligand_ID[:4], ID, atom.get_coord(), feat)
dssp_present += 1
else:
print(">>> ID not present : ", atom.get_full_id())
dssp_not_present += 1
idx_prev = idx
#Printing the Stats
print("===> STATS : PDBID : %s , DSSP PRESENT : %s , DSSP NOT PRESENT : %s"%(PDB_id, dssp_present, dssp_not_present))
#Saving the feature to each PDB file
with open(os.path.join(OUTPATH, pdb_ligand_ID + ".dat"), "wb+") as f:
pickle.dump(feature, f)
print("====> Dump completed")
if __name__ == '__main__':
input_dir = '/home/binnu/Asad/dataset/new_db/protein_pdb/'
output_dir = "/home/binnu/Asad/dataset/new_db/protein_pdb_featurized/"
IDs = '/home/binnu/Asad/dataset/new_db/PDB_ligands_chain_ID_pssm_Admet_padel.txt'
files = open(IDs, 'r')
files = files.readlines()
files = [i[:-1] for i in files]
files_done = os.listdir(output_dir)
files_done = [i[:-4] for i in files_done]
print(files_done)
naccess_error = ['5FQD_LVY', '4EJG_NCT', '3N7A_FA1' , '2IJ7_TPF', '4EJH_0QA','2QJY_SMA','1WPG_TG1', '2A06_SMA','4UHL_VFV','3N8K_D1X','5FV9_Y6W','3N75_G4P','3B8H_NAD','3B82_NAD','3B78_NAD']
for file in files:
if(file not in files_done) and file.split('_')[0] + '_' + file.split('_')[1] not in naccess_error:
print("==> Featurizing : ", file)
calc_features(input_dir, file, output_dir)
|
[
"aahmed3@student.nitw.ac.in"
] |
aahmed3@student.nitw.ac.in
|
844abc176cf2c1ec5d48f1e98a0747b94eccebec
|
004b7726cfbedd6ecb6a1dec455211f1b1635919
|
/OCR_Test/gendata.py
|
8b5a276f17c703b35952b97c6ac1e980d94f708b
|
[] |
no_license
|
bluedott/p3_fuckup
|
e1cafabeda43b2d190836bd994c4af5d5b8c2379
|
6ff119cded8c30ef3acfc02c5ecefaa4d9178520
|
refs/heads/main
| 2023-01-22T00:16:01.252835
| 2020-12-07T03:00:45
| 2020-12-07T03:00:45
| 319,187,963
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,790
|
py
|
# GenData.py
import sys
import numpy as np
import cv2
import os
# module level variables ##########################################################################
MIN_CONTOUR_AREA = 100
RESIZED_IMAGE_WIDTH = 20
RESIZED_IMAGE_HEIGHT = 30
###################################################################################################
def main():
imgTrainingNumbers = cv2.imread("training_chars_small_test.png") # read in training numbers image
if imgTrainingNumbers is None: # if image was not read successfully
print ("error: image not read from file \n\n") # print error message to std out
os.system("pause") # pause so user can see error message
return # and exit function (which exits program)
# end if
imgGray = cv2.cvtColor(imgTrainingNumbers, cv2.COLOR_BGR2GRAY) # get grayscale image
imgBlurred = cv2.GaussianBlur(imgGray, (5,5), 0) # blur
# filter image from grayscale to black and white
imgThresh = cv2.adaptiveThreshold(imgBlurred, # input image
255, # make pixels that pass the threshold full white
cv2.ADAPTIVE_THRESH_GAUSSIAN_C, # use gaussian rather than mean, seems to give better results
cv2.THRESH_BINARY_INV, # invert so foreground will be white, background will be black
11, # size of a pixel neighborhood used to calculate threshold value
2) # constant subtracted from the mean or weighted mean
cv2.imshow("imgThresh", imgThresh) # show threshold image for reference
imgThreshCopy = imgThresh.copy() # make a copy of the thresh image, this in necessary b/c findContours modifies the image
imgContours, npaContours, npaHierarchy = cv2.findContours(imgThreshCopy, # input image, make sure to use a copy since the function will modify this image in the course of finding contours
cv2.RETR_EXTERNAL, # retrieve the outermost contours only
cv2.CHAIN_APPROX_SIMPLE) # compress horizontal, vertical, and diagonal segments and leave only their end points
# declare empty numpy array, we will use this to write to file later
# zero rows, enough cols to hold all image data
npaFlattenedImages = np.empty((0, RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT))
intClassifications = [] # declare empty classifications list, this will be our list of how we are classifying our chars from user input, we will write to file at the end
# possible chars we are interested in are digits 0 through 9, put these in list intValidChars
intValidChars = [ord('0'), ord('1'), ord('2'), ord('3'), ord('4'), ord('5'), ord('6'), ord('7'), ord('8'), ord('9'),
ord('A'), ord('B'), ord('C'), ord('D'), ord('E'), ord('F'), ord('G'), ord('H'), ord('I'), ord('J'),
ord('K'), ord('L'), ord('M'), ord('N'), ord('O'), ord('P'), ord('Q'), ord('R'), ord('S'), ord('T'),
ord('U'), ord('V'), ord('W'), ord('X'), ord('Y'), ord('Z')]
for npaContour in npaContours: # for each contour
if cv2.contourArea(npaContour) > MIN_CONTOUR_AREA: # if contour is big enough to consider
[intX, intY, intW, intH] = cv2.boundingRect(npaContour) # get and break out bounding rect
# draw rectangle around each contour as we ask user for input
cv2.rectangle(imgTrainingNumbers, # draw rectangle on original training image
(intX, intY), # upper left corner
(intX+intW,intY+intH), # lower right corner
(0, 0, 255), # red
2) # thickness
imgROI = imgThresh[intY:intY+intH, intX:intX+intW] # crop char out of threshold image
imgROIResized = cv2.resize(imgROI, (RESIZED_IMAGE_WIDTH, RESIZED_IMAGE_HEIGHT)) # resize image, this will be more consistent for recognition and storage
cv2.imshow("imgROI", imgROI) # show cropped out char for reference
cv2.imshow("imgROIResized", imgROIResized) # show resized image for reference
cv2.imshow("training_numbers.png", imgTrainingNumbers) # show training numbers image, this will now have red rectangles drawn on it
intChar = cv2.waitKey(0) # get key press
if intChar == 27: # if esc key was pressed
sys.exit() # exit program
elif intChar in intValidChars: # else if the char is in the list of chars we are looking for . . .
intClassifications.append(intChar) # append classification char to integer list of chars (we will convert to float later before writing to file)
npaFlattenedImage = imgROIResized.reshape((1, RESIZED_IMAGE_WIDTH * RESIZED_IMAGE_HEIGHT)) # flatten image to 1d numpy array so we can write to file later
npaFlattenedImages = np.append(npaFlattenedImages, npaFlattenedImage, 0) # add current flattened impage numpy array to list of flattened image numpy arrays
# end if
# end if
# end for
fltClassifications = np.array(intClassifications, np.float32) # convert classifications list of ints to numpy array of floats
npaClassifications = fltClassifications.reshape((fltClassifications.size, 1)) # flatten numpy array of floats to 1d so we can write to file later
print ("\n\ntraining complete !!\n")
np.savetxt("classifications2.txt", npaClassifications) # write flattened images to file
np.savetxt("flattened_images2.txt", npaFlattenedImages) #
cv2.destroyAllWindows() # remove windows from memory
return
###################################################################################################
if __name__ == "__main__":
main()
# end if
|
[
"34089891+bluedott@users.noreply.github.com"
] |
34089891+bluedott@users.noreply.github.com
|
84670e385fe2ed6a8e0c8a66732d455455589e1c
|
5b5bac2aa1104bd7afe27527dc53c260667b4de1
|
/spotify_convert/migrations/0011_remove_addedsong_spotify_name.py
|
555b6a34320e1e011c126db41348d3376c956b96
|
[] |
no_license
|
mvasiliou/spotify-convert-app
|
4bf86443ce9c216dc8a9bcbf3f7c81ff01828175
|
876e456c956fd0aeae65dc1db9f683373bfa9c9e
|
refs/heads/master
| 2021-01-19T13:37:19.060429
| 2017-02-18T17:25:00
| 2017-02-18T17:25:00
| 82,402,537
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 409
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2017-02-15 22:01
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('spotify_convert', '0010_auto_20170215_1712'),
]
operations = [
migrations.RemoveField(
model_name='addedsong',
name='spotify_name',
),
]
|
[
"mvasiliou94@gmail.com"
] |
mvasiliou94@gmail.com
|
ce9f9a67e6de02ba78da7819553047f0a148bb68
|
9bcb054fa53e1ff749f2ba06c7477b60c0a1dccf
|
/internet connection check.py
|
025e54ba14c6fdfcd2dcc55e5a6b72e06dbf6956
|
[] |
no_license
|
thatscoding/Hacktoberfest-2023
|
e67594420d39340095dde77e02bcf20353a5f645
|
40cfc950487e040b192a304a9d387dda2988845b
|
refs/heads/main
| 2023-08-28T07:28:41.110272
| 2021-10-30T11:48:00
| 2021-10-30T11:48:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 596
|
py
|
import requests
from requests.exceptions import ConnectionError
def internet_connection_test():
url = 'https://www.google.com/'
print(f'Attempting to connect to {url} to determine internet connection status.')
try:
print(url)
resp = requests.get(url, timeout = 10)
resp.text
resp.status_code
print(f'Connection to {url} was successful.')
return True
except ConnectionError as e:
requests.ConnectionError
print(f'Failed to connect to {url}.')
return False
except:
print(f'Failed with unparsed reason.')
return False
internet_connection_test()
|
[
"noreply@github.com"
] |
thatscoding.noreply@github.com
|
cfbc73e87a1f617dd5d05185217fead9b365b1cd
|
8c0e6de3a106ef148dd2994973f3f5cb807c70eb
|
/General Programing/Documentation Hub/imageviewer.py
|
ac39a35f9c94c5dffe56d0b6d169a8e29ad2f0fd
|
[] |
no_license
|
TKprotich/Practices-and-Experimenting
|
8f8928cc43e6cb9996b064d66dc6783fc8edf362
|
211e11f3f52b36dd6dc944d3b503c81d412acb4b
|
refs/heads/main
| 2023-01-11T00:12:14.404189
| 2020-11-15T13:41:24
| 2020-11-15T13:41:24
| 304,259,482
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 180
|
py
|
import pyglet
window = pyglet.window.Window()
image = pyglet.resource.image('workout.JPG')
@window.event
def on_draw():
window.clear()
image.blit(0, 0)
pyglet.app.run()
|
[
"marchemjor@gmail.com"
] |
marchemjor@gmail.com
|
4e1ab212e6ce46a3bc3a610e4685b7449006b6ef
|
ad9a1711ec2d36b57576b30a5550a481bc07452a
|
/PANDAS/ser_02.py
|
f5d076631ff20dc4605d988fb89dd07e2abd6ebf
|
[] |
no_license
|
Harshad06/Python_programs
|
5ca0ba3ba8cbccbd39215efec96fb7b68307aa5a
|
ad908e59425b41c1bed032aaf9a115e123fd46c4
|
refs/heads/master
| 2023-06-22T07:44:45.435258
| 2021-07-26T08:16:22
| 2021-07-26T08:16:22
| 347,367,223
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 504
|
py
|
# porgam to depict deep copy
# in pandas series
# import module
import pandas as pd
# assign series
ser = pd.Series(['Mandy', 'Ron', 'Jacob', 'Bayek'])
# shallow copy
copyser = ser.copy(deep=True)
# comparing deep copied series
# and original series
print('\nBefore Operation:\n', copyser == ser)
# assignmnet operation
copyser[2] = 'Geeks'
# comparing deep copied series
# and original series
print('\nAfter Operation:\n', copyser == ser)
print('\nOriginal Dataframe after operation:\n', ser)
|
[
"harshad.shringi@gmail.com"
] |
harshad.shringi@gmail.com
|
3f45f7cee6d8468efa1e1fb46e91f0292b773b93
|
22295cda10cf11472fee987093e0b245f6f96ef3
|
/common/infohistology/band073_tracks.py
|
bd72af8bb7165ed2ca221d979e82efa7299e791e
|
[] |
no_license
|
sjara/jaratest
|
aecb9e3bcc1ff91db35e7cd551c0f4f3da0b690a
|
09bf2c76bd5bf45191a2c37c14171ae1e8902c4b
|
refs/heads/master
| 2023-08-11T09:55:17.684814
| 2023-08-03T22:03:31
| 2023-08-03T22:03:31
| 63,100,718
| 2
| 5
| null | 2023-04-11T18:14:08
| 2016-07-11T20:43:04
|
Python
|
UTF-8
|
Python
| false
| false
| 1,697
|
py
|
'''
TODO:
- add reference to type of recording device used? e.g. tetrode vs silicon probe, probe geometry, etc.
- 'shank' is very specific to silicon probes, replace with more generic info key?
'''
subject = 'band073'
tracks = [
{'subject':subject, 'brainArea':'LeftAC', 'histImage':'p1-C5-03',
'recordingTrack':'midlateralDiO', 'shank':1, 'atlasZ':180},
{'subject':subject, 'brainArea':'LeftAC', 'histImage':'p1_D1-03',
'recordingTrack':'medialDiD', 'shank':1, 'atlasZ':185},
{'subject':subject, 'brainArea':'LeftAC', 'histImage':'p1_D5-03',
'recordingTrack':'medialDiD', 'shank':2, 'atlasZ':193},
{'subject':subject, 'brainArea':'LeftAC', 'histImage':'p1-D2-03',
'recordingTrack':'midlateralDiO', 'shank':2, 'atlasZ':188},
{'subject':subject, 'brainArea':'LeftAC', 'histImage':'p2_A1-03',
'recordingTrack':'medialDiD', 'shank':3, 'atlasZ':200},
{'subject':subject, 'brainArea':'LeftAC', 'histImage':'p2_A5-03',
'recordingTrack':'medialDiD', 'shank':4, 'atlasZ':208},
{'subject':subject, 'brainArea':'RightAC', 'histImage':'p2-A2-02',
'recordingTrack':'middleDiO', 'shank':4, 'atlasZ':201},
{'subject':subject, 'brainArea':'RightAC', 'histImage':'p2-A5-02',
'recordingTrack':'middleDiO', 'shank':3, 'atlasZ':206},
{'subject':subject, 'brainArea':'RightAC', 'histImage':'p2-B3-02',
'recordingTrack':'middleDiO', 'shank':2, 'atlasZ':212},
{'subject':subject, 'brainArea':'RightAC', 'histImage':'p2-B5-02',
'recordingTrack':'medialDiD', 'shank':1, 'atlasZ':219},
{'subject':subject, 'brainArea':'RightAC', 'histImage':'p2-C2-02',
'recordingTrack':'middleDiO', 'shank':1, 'atlasZ':225},
]
|
[
"jarauser@jararigtest"
] |
jarauser@jararigtest
|
664fd5e22572825ff02d5842fce584a415d850a8
|
62bdde43ce88507530610a2b77d2ce0859eebc8b
|
/SWEA/queue/L5105-๋ฏธ๋ก์_๊ฑฐ๋ฆฌ/L5105-๋ฏธ๋ก์_๊ฑฐ๋ฆฌ-jiwoong.py
|
9ca4aecca30d1d8a77e4b2053a655343efd4a697
|
[] |
no_license
|
j2woong1/algo-itzy
|
6810f688654105cf4aefda3b0876f714ca8cbd08
|
7cf6cd8383dd8e9ca63f605609aab003790e1565
|
refs/heads/master
| 2023-08-21T12:51:54.874102
| 2021-10-03T04:35:21
| 2021-10-03T04:35:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,008
|
py
|
def bfs(x, y, z, visited):
# ์ํ์ข์ฐ
dx = [0, 1, 0, -1]
dy = [-1, 0, 1, 0]
# ํ์ฌ ์์น queue
q = [(x, y, z)]
# ํ๊ฐ ๋น ๋๊น์ง
while q:
curr_x, curr_y, curr_z = q.pop(0)
visited[curr_y][curr_x] = 1
for i in range(4):
# ๋๊ฐ์ง ์์ผ๋ฉด
if 0 <= curr_y + dy[i] < N and 0 <= curr_x + dx[i] < N:
new_x = curr_x + dx[i]
new_y = curr_y + dy[i]
if arr[new_y][new_x] == 3:
return curr_z
if not arr[new_y][new_x] and not visited[new_y][new_x]:
q.append((new_x, new_y, curr_z + 1))
return 0
T = int(input())
for tc in range(1, T + 1):
N = int(input())
arr = []
for i in range(N):
arr.append(list(map(int, input())))
visited = [[0] * N for _ in range(N)]
for i in range(N):
for j in range(N):
if arr[i][j] == 2:
print(f'#{tc} {bfs(j, i, 0, visited)}')
|
[
"j2woong1@gmail.com"
] |
j2woong1@gmail.com
|
78abb25a1c9ddc2546892cffbfa7b04ce2ad80ee
|
2298959b25d93360be8dc45f00e989cac96b5c5d
|
/ex40.py
|
6f7443d7d3dc28ec948c6a9106ff589391e9d7f1
|
[] |
no_license
|
VigneshMcity/Python
|
6e7cdef7e4cb09eec587ca85073f8171aa9492ec
|
8970368210d6e8c3f6ab1f6e6cf3213dd51b004a
|
refs/heads/master
| 2020-04-14T20:08:27.298067
| 2019-12-13T07:17:04
| 2019-12-13T07:17:04
| 164,083,462
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 435
|
py
|
class Song(object):
def __init__(self, lyrics):
self.lyrics = lyrics
def sing_me_a_song(self):
for line in self.lyrics:
print(line)
happy_bday = Song([
"Happy birthday to you",
"I dont want to get sued",
"So I'll stop right here" ])
bulls_on_parade = Song([
"They rally around the family",
"With pocket full of sheels" ])
happy_bday.sing_me_a_song()
bulls_on_parade.sing_me_a_song()
|
[
"vignesh.rajendran01@infosys.com"
] |
vignesh.rajendran01@infosys.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.