repo_name stringlengths 6 97 | path stringlengths 3 341 | text stringlengths 8 1.02M |
|---|---|---|
dmgav/dpcmaps | dpcmaps/pyspecfile.py | """
SPEC file format writer
... and partial reader, but it's recommended to use Specfile instead
"""
import re
import os
import time
import itertools
import numpy as np
TIME_FORMAT = "%a %b %d %H:%M:%S %Y"
def split_sequence(iterable, size):
"""
Take an iterable sequence (not just lists), and split it into
equal sized chunks (<= size).
"""
it = iter(iterable)
item = list(itertools.islice(it, size))
while item:
yield item
item = list(itertools.islice(it, size))
class SPECFileError(Exception):
pass
class SPECFileMotorListError(SPECFileError):
pass
class SPECFileWriter(object):
"""
Writes SPEC-format files for a scan.
Simple example:
motors = ['m0', 'm1']
data_names = ['test', 'one', 'two']
a = SPECFileWriter('test.txt', comment='my comment', motors=motors)
a.write_scan_start(command='dscan something', seconds=1)
a.write_motor_positions([3, 4])
a.write_scan_data_start(data_names)
for d1, d2, d3 in zip(range(10), range(2,12), range(10,20)):
a.write_scan_data([d1,d2,d3])
a.finish_scan()
Remember that SPEC scan counts are 0-based -- this means that 0 data points
is actually 1 data point in EPICS.
"""
COLUMNS = 8
def __init__(self, filename, starting_number=None, comment="", motors=[]):
if not filename:
raise SPECFileError("Must specify filename")
self.filename = os.path.abspath(filename)
self._motors = list(sorted(motors))
self._comment = comment
self._data_lines = 0 # how many lines in the current scan there are
# Scan headers are not written until there's at least one data point
# available. (otherwise, the C-based spec file reader can fail)
self._buffer_write = False
self._buffer = []
if os.path.exists(filename) and os.path.getsize(filename) > 1:
if not check_motor_list(filename, motors):
raise SPECFileMotorListError("Motor list does not match")
with SPECFileReader(filename) as reader:
self.start_time = reader.epoch()
if starting_number is None:
self.scan_number = get_last_scan_number(reader)
else:
self.scan_number = starting_number
print('SPECFileWriter: Appending to "%s"' % filename)
self._f = open(filename, "at")
self._fix_ending_newlines(filename)
self._header_written = True
else:
print('SPECFileWriter: New SPEC-format file "%s"' % filename)
if starting_number is None:
starting_number = 0
self.scan_number = starting_number
self._f = open(filename, "wt")
self._header_written = False
def _get_motors(self):
return list(self._motors)
def _set_motors(self, motors):
if self._header_written:
if motors != self._motors:
raise SPECFileMotorListError("Cannot change motor list when header is already written")
return
self._motors = list(motors)
def _fix_ending_newlines(self, filename=None, amount=2):
if filename is None:
filename = self.filename
count = self._check_end_lines(filename, amount)
for i in range(amount - count):
self.blank_line()
def _check_end_lines(self, filename, ending=2, char="\n"):
try:
f = open(filename, "rt")
# Seek two bytes before the end of the file
f.seek(-ending, 2)
return f.read().count(char)
except Exception:
return 0
def close(self):
if self._f:
self._f.close()
self._f = None
def write_line(self, line):
line = "%s\n" % line
if self._buffer_write:
self._buffer.append(line)
else:
if self._buffer:
self._f.writelines(self._buffer)
self._buffer = []
self._f.write(line)
def write_info(self, tag, data):
self.write_line("#%s %s" % (tag, data))
def write_scan_start(self, number=None, command="", scan_info=None, seconds=None):
"""
Writes the following:
Scan information, date, wait time
In the format:
#S 1 hklscan 0.9 1.1 0 0 0 0 20 1
#D Wed Feb 17 19:25:55 1994
#T 1 (Seconds)
"""
if not self._header_written:
self._write_header(self._comment, self._motors)
if self._data_lines > 0:
self.finish_scan()
if number is None:
self.scan_number += 1
number = self.scan_number
# Buffer this scan header until data comes in
self._buffer_write = True
# S - scan number and command
self.write_info("S", "%d %s" % (number, command))
self.write_date()
if seconds is not None:
# T - scan/settling time
self.write_info("T", "%f (Seconds)" % (seconds,))
def write_scan_data_start(self, columns):
if columns is None:
columns = []
self._data_lines = 0
# N - Number of columns
self.write_info("N", len(columns))
# L - Column names (double-space separated)
self.write_info("L", " ".join(columns))
def write_motor_positions(self, positions):
if not positions:
return
for i, pos in enumerate(split_sequence(positions, self.COLUMNS)):
# P{num} - starting motor positions (that aren't necessarily being
# scanned)
tag = "P%d" % i
self.write_info(tag, " ".join([str(p) for p in pos]))
def write_scan_data(self, data):
self._buffer_write = False
self._data_lines += 1
self.write_line(" ".join([str(d) for d in data]))
def write_mca_calib(self, a, b, c):
"""
Ref: http://www.esrf.eu/blissdb/macros/macdoc.py?macname=saveload.mac
CALIBRATION = a + b*CHANNEL + c*CHANNEL^2
"""
self.write_line("#@CALIB %.7g %.7g %.7g" % (a, b, c))
def write_mca_data(self, data, calibration=None, first=False):
""" """
if data is None or np.size(data) == 0:
return
self._buffer_write = False
self._data_lines += 1
# does it really support multiple sets of MCA data per scan?
# if isinstance(data, (list, tuple)):
# if isinstance(data[0], np.ndarray):
# for array_ in data:
# self.write_mca_data(array_, calibration=calibration, first=first)
# return
if first:
# MCA format (full line)
self.write_line("#@MCA %%%dC" % len(data))
# number of channels, first idx, last idx, reduction coefficient
self.write_line("#@CHANN %d 0 %d 1" % (len(data), len(data) - 1))
if calibration:
self.write_mca_calib(*calibration)
self.write_line("@A %s" % " ".join([str(d) for d in data]))
@property
def date_string(self):
# return time.strftime('%c')
# TODO maybe this ^^ is more appropriate for other locales?
return time.strftime(TIME_FORMAT)
def _write_starting_seconds(self):
self.start_time = time.time()
self.write_info("E", int(self.start_time))
def write_date(self):
self.write_info("D", self.date_string)
def write_timestamp(self):
self.write_info("E", self.date_string)
def write_scan_aborted(self, points_written=None):
self._buffer_write = False
if points_written is not None:
self._data_lines = points_written
if self._data_lines > 0:
self.write_info("C", "%s. Scan aborted after %d points." % (self.date_string, self._data_lines))
self.blank_line()
self._data_lines = 0
self._f.flush()
else:
# Cancelled before it even started. Don't even write the
# buffered header.
self._buffer = []
def blank_line(self):
self.write_line("")
def _write_header(self, comment="", motors=[]):
self._header_written = True
self.write_info("F", self.filename)
self._write_starting_seconds()
self.write_date()
if comment is not None:
for line in comment.split("\n"):
self.write_info("C", line) # TODO multiple lines valid?
for i, mot in enumerate(split_sequence(motors, self.COLUMNS)):
# O{num} - motor names (that aren't necessarily being scanned)
tag = "O%d" % i
self.write_info(tag, " ".join([str(m) for m in mot]))
self.blank_line()
self._data_lines = 0
def finish_scan(self):
self._fix_ending_newlines()
self._data_lines = 0
self._f.flush()
class SPECFileReader(object):
"""
NOTE: Really untested except for reading the header.
(just needed a safe, quick way to check the motor list.)
"""
def __init__(self, filename, parse_data=True):
if not os.path.exists(filename):
raise ValueError("Invalid SPEC filename")
self._f = open(filename, "rt")
self._in_scan = False
self._buffer_lines = []
self._buffer_groups = []
self._scans = []
self._scan = None
self._eof = False
self._epoch = time.time()
self._mca_line = False
self._parse_data = parse_data
self.spec_filename = filename
self.motors = []
self.comment = ""
self._read_header()
def close(self):
if self._f:
self._f.close()
self._f = None
def scanno(self):
return 0 # TODO
def epoch(self):
return self._epoch
def _read_line(self):
while True:
while self._buffer_lines:
line = self._buffer_lines.pop(0)
yield line
line = self._f.readline()
if line == "":
yield "#S" # TODO: fix so this isn't necessary
self._eof = True
break
line = line.strip()
if line and (line.startswith("#") or self._in_scan):
yield line
def _read_group(self):
while self._buffer_groups:
yield self._buffer_groups.pop(0)
current_tag = None
group = []
tag_done = False
for line in self._read_line():
if self._eof:
if current_tag is not None:
yield current_tag.upper(), group
break
if line.startswith("#"):
# print('-> %s' % line)
if " " in line[1:]:
tag, info = line[1:].split(" ", 1)
else:
tag, info = line[1], ""
info = info.lstrip()
m = re.match(r"([@a-zA-Z]+)(\d*)", tag)
if m:
tag, tag_index = m.groups()
else:
tag_index = None
# print('current', current_tag, 'read tag', tag, 'index',
# tag_index)
if current_tag is None:
current_tag = tag
group.append(info)
if tag_index is None:
tag_done = True
else:
if tag == current_tag:
group.append(info)
else:
self._buffer_lines.append(line)
tag_done = True
elif self._in_scan:
if self._parse_data:
self._parse_scan_line(line)
else:
self._scan["unparsed"].append(line)
if tag_done:
yield current_tag.upper(), group
current_tag = None
group = []
tag_done = False
def _parse_list(self, list_):
list_ = re.sub(r"\s+", ",", list_)
return list_.split(",")
def _parse_header_F(self, spec_filename):
self.spec_filename = spec_filename
def _parse_header_D(self, date_):
self._epoch = time.mktime(time.strptime(date_, TIME_FORMAT))
def _parse_header_list_O(self, motors):
self.motors = motors
def _parse_header_C(self, comment):
self.comment = comment
def _read_section(self, section, end_tags=None, ignore_first_tag=False):
first_tag = True
for tag, lines in self._read_group():
if self._eof:
return
if end_tags is not None and tag in end_tags or self._eof:
if not (first_tag and ignore_first_tag):
self._buffer_groups.insert(0, (tag, lines))
break
# print('section', section, tag, lines)
lines = " ".join(lines)
fcn_name = "_parse_%s_list_%s" % (section, tag)
if hasattr(self, fcn_name):
fcn = getattr(self, fcn_name)
fcn(self._parse_list(lines))
fcn_name = "_parse_%s_%s" % (section, tag)
if hasattr(self, fcn_name):
# print('calling', fcn_name)
fcn = getattr(self, fcn_name)
fcn(lines)
first_tag = False
def _read_header(self):
self._read_section("header", end_tags=["S"])
def _parse_scan_S(self, scan_info):
number, command = scan_info.split(" ", 1)
self._in_scan = True
self._scan = {
"lines": [],
"mca_data": [],
"columns": [],
"time": 0,
"hkl": [0, 0, 0],
"fourc": [],
"positions": [],
}
if not self._parse_data:
self._scan["unparsed"] = []
self._scan["number"] = number
self._scan["command"] = command.strip()
self._scans.append(self._scan)
def _parse_scan_list_L(self, columns):
self._scan["columns"] = columns
def _parse_scan_D(self, date_):
self._scan["time"] = time.mktime(time.strptime(date_, TIME_FORMAT))
def _parse_scan_list_G(self, fourc_info):
self._scan["fourc"] = fourc_info
def _parse_scan_list_Q(self, hkl):
self._scan["hkl"] = hkl
def _parse_scan_line(self, line):
if line.startswith("@A"):
line = line.split(" ", 1)[1]
# Line is now everything after @A
data = [float(f) for f in line.split(" ")]
# If the MCA data is spread on multiple lines, extend the previous
# data, otherwise it's a new set
if self._mca_line:
self._scan["mca_data"][-1].extend(data)
else:
self._scan["mca_data"].append(data)
self._mca_line = line.endswith("\\")
else:
try:
line = line.replace("None", "0.0") # TODO during saving
self._scan["lines"].append([float(f) for f in line.split(" ")])
except Exception:
print("Bad scan line: %s" % line)
def parse_data(self, scan):
"""
Parse the scan data after the file is loaded, if parse_data was set
"""
if self._parse_data:
return
if "unparsed" not in scan:
return
self._scan = scan
for line in scan["unparsed"]:
self._parse_scan_line(line)
del scan["unparsed"]
def _parse_scan_list_P(self, positions):
positions = [float(p) if p != "None" else 0.0 for p in positions] # TODO fix
self._scan["positions"] = dict(zip(self.motors, positions))
@property
def scans(self):
while True:
scan = self.read_scan()
if scan is None:
break
yield scan
def read_scan(self):
if self._eof:
return None
self._read_section(
"scan",
end_tags=[
"S",
],
ignore_first_tag=True,
)
# should 'C' really be an end_tag, since you can use scan_on?
# what does scan_on output look like?
self._in_scan = False
return self._scan
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def check_motor_list(filename, motors):
try:
sf = SPECFileReader(filename)
except Exception:
return False
return motors == sf.motors
def get_last_scan_number(filename):
numbers = [0]
try:
if isinstance(filename, SPECFileReader):
reader = filename
else:
reader = SPECFileReader(filename)
for scan in reader.scans:
try:
numbers.append(int(scan["number"]))
except Exception:
pass
return max(numbers)
except Exception as ex:
print("Failed to get last scan number: (%s) %s" % (filename, ex))
return 0
if __name__ == "__main__":
motors = ["m0", "m1"]
data_names = ["test", "one", "two"]
a = SPECFileWriter("test.txt", comment="my comment", motors=motors)
a.write_scan_start(command="dscan something", seconds=1)
a.write_motor_positions([3, 4])
a.write_scan_data_start(data_names)
for d1, d2, d3 in zip(range(10), range(2, 12), range(10, 20)):
a.write_scan_data([d1, d2, d3])
a.finish_scan()
# reader = SPECFileReader('/epics/data/aug_21_11')
reader = SPECFileReader("../test_output")
for scan in reader.scans:
print(scan["number"], scan["command"], scan.keys(), scan["columns"])
print(reader._scans[-1], len(reader._scans))
print(reader._buffer_groups)
print(reader._scans[-1]["lines"])
print(reader._scans[-1]["columns"])
print(reader._scan["columns"])
|
dmgav/dpcmaps | dpcmaps/dpc_batch_gui.py | <filename>dpcmaps/dpc_batch_gui.py
"""
Created on Feb 23, 2017
@author: <NAME>, 2nd Look
"""
from __future__ import division
import sys
import os
from PyQt5.QtWidgets import (
QMainWindow,
QWidget,
QVBoxLayout,
QHBoxLayout,
QGroupBox,
QCheckBox,
QLabel,
QLineEdit,
QPushButton,
QFileDialog,
QMessageBox,
QApplication,
QTextEdit,
)
from PyQt5.QtGui import QPalette, QColor, QIntValidator, QTextCursor
from PyQt5.QtCore import Qt, QCoreApplication, QSettings
from .dpc_batch import run_batch
from dpcmaps import __version__
# #----------------------------------------------------------------------
# class EmittingStream(QObject):
#
# textWritten = pyqtSignal(str)
#
# def write(self, text):
# self.textWritten.emit(str(text))
""" ------------------------------------------------------------------------------------------------"""
class MainFrame(QMainWindow):
def __init__(self):
super(MainFrame, self).__init__()
self.settings = QSettings("dpcmaps", "DPC-BATCH-GUI")
self.script_file = ""
try:
val = self.settings.value("scan_range").toPyObject()
except AttributeError:
val = None
if val is None:
val = ""
self.scan_range = val
try:
val = self.settings.value("scan_nums").toPyObject()
except AttributeError:
val = None
if val is None:
val = ""
self.scan_nums = val
try:
val = self.settings.value("every_n").toPyObject()
except AttributeError:
val = None
if val is None:
val = 1
self.every_n = val
try:
val = self.settings.value("load_params_datastore").toPyObject()
except AttributeError:
val = None
if val is None:
val = 0
self.read_data_from_datastore = val
try:
val = self.settings.value("filestore_key").toPyObject()
except AttributeError:
val = None
if val is None:
val = "merlin1"
self.filestore_key = val
try:
val = self.settings.value("data_dir").toPyObject()
except AttributeError:
val = None
if val is None:
val = ""
self.data_directory = val
try:
val = self.settings.value("file_format").toPyObject()
except AttributeError:
val = None
if val is None:
val = "S{0}.h5"
self.file_format = val
try:
val = self.settings.value("load_params_datastore").toPyObject()
except AttributeError:
val = None
if val is None:
val = 0
self.load_params_from_broker = val
try:
val = self.settings.value("param_file").toPyObject()
except AttributeError:
val = None
if val is None:
val = ""
self.parameter_file = val
try:
val = self.settings.value("processes").toPyObject()
except AttributeError:
val = None
if val is None:
val = 1
self.processes = val
try:
val = self.settings.value("save_dir").toPyObject()
except AttributeError:
val = None
if val is None:
val = ""
self.save_dir = val
try:
val = self.settings.value("save_fn").toPyObject()
except AttributeError:
val = None
if val is None:
val = ""
self.save_filename = val
try:
val = self.settings.value("save_png").toPyObject()
except AttributeError:
val = None
if val is None:
val = 0
self.save_png = val
try:
val = self.settings.value("save_txt").toPyObject()
except AttributeError:
val = None
if val is None:
val = 0
self.save_txt = val
self.resize(600, 720)
self.setWindowTitle(f"DPC Batch {__version__}")
pal = QPalette()
self.setAutoFillBackground(True)
pal.setColor(QPalette.Window, QColor("white"))
self.setPalette(pal)
self.mainWidget = QWidget(self)
self.setCentralWidget(self.mainWidget)
vbox = QVBoxLayout(self.mainWidget)
vbox.setContentsMargins(20, 10, 20, 20)
sizer1 = QGroupBox("Scans")
vbox1 = QVBoxLayout()
self.cb_usedatastore = QCheckBox(" Read the Data from DataStore", self)
self.cb_usedatastore.setChecked(self.read_data_from_datastore)
self.cb_usedatastore.stateChanged.connect(self.OnUseDataStore)
vbox1.addWidget(self.cb_usedatastore)
hbox = QHBoxLayout()
l1 = QLabel("Scan numbers & ranges \t", self)
self.tc_scan_range = QLineEdit(self)
self.tc_scan_range.setAlignment(Qt.AlignLeft)
self.tc_scan_range.setText(self.scan_range)
l1.setToolTip("Set scan numbers and ranges. Example: 2, 3-5, 7-15, 23, 30-55")
self.tc_scan_range.setToolTip("Set scan numbers and ranges. Example: 2, 3-5, 7-15, 23, 30-55")
hbox.addWidget(l1)
hbox.addWidget(self.tc_scan_range)
vbox1.addLayout(hbox)
hbox = QHBoxLayout()
l2 = QLabel("Process every n-th scan \t", self)
self.ntc_every_n = QLineEdit(self)
self.ntc_every_n.setValidator(QIntValidator(1, 99999, self))
self.ntc_every_n.setAlignment(Qt.AlignRight)
self.ntc_every_n.setText(str(self.every_n))
hbox.addWidget(l2)
hbox.addWidget(self.ntc_every_n)
hbox.addStretch(1)
vbox1.addLayout(hbox)
# hbox = QHBoxLayout()
# l1 = QLabel('Scan numbers \t', self)
# self.tc_scans = QLineEdit(self)
# self.tc_scans.setAlignment(Qt.AlignLeft)
# self.tc_scans.setText(str(self.scan_nums))
# l1.setToolTip('Set scan numbers. Example: 1, 24, 26')
# self.tc_scans.setToolTip('Set scan numbers. Example: 1, 24, 26')
# hbox.addWidget(l1)
# hbox.addWidget(self.tc_scans)
# vbox1.addLayout(hbox)
hbox = QHBoxLayout()
l1 = QLabel("Filestore key \t", self)
self.tc_fskey = QLineEdit(self)
self.tc_fskey.setAlignment(Qt.AlignLeft)
self.tc_fskey.setText(self.filestore_key)
hbox.addWidget(l1)
hbox.addWidget(self.tc_fskey)
vbox1.addLayout(hbox)
hbox = QHBoxLayout()
l1 = QLabel("Data Directory \t", self)
self.tc_datadir = QLineEdit(self)
self.tc_datadir.setAlignment(Qt.AlignLeft)
l1.setToolTip("Data Direcory if not using datastore.")
self.tc_datadir.setToolTip("Data Direcory if not using datastore.")
self.tc_datadir.setText(self.data_directory)
self.button_d1 = QPushButton("Browse")
self.button_d1.clicked.connect(self.OnSelectDataDir)
hbox.addWidget(l1)
hbox.addWidget(self.tc_datadir)
hbox.addWidget(self.button_d1)
vbox1.addLayout(hbox)
hbox = QHBoxLayout()
l1 = QLabel("File format \t", self)
self.tc_format = QLineEdit(self)
self.tc_format.setAlignment(Qt.AlignLeft)
self.tc_format.setToolTip("Data file format.")
self.tc_format.setText(self.file_format)
hbox.addWidget(l1)
hbox.addWidget(self.tc_format)
vbox1.addLayout(hbox)
sizer1.setLayout(vbox1)
vbox.addWidget(sizer1)
sizer3 = QGroupBox("Scan Parameters")
vbox3 = QVBoxLayout()
self.cb_paramsdatastore = QCheckBox(" Read the Parameters from DataStore", self)
self.cb_paramsdatastore.setChecked(self.load_params_from_broker)
vbox3.addWidget(self.cb_paramsdatastore)
hbox = QHBoxLayout()
l1 = QLabel("Parameter file \t", self)
self.tc_paramfile = QLineEdit(self)
self.tc_paramfile.setAlignment(Qt.AlignLeft)
self.tc_paramfile.setText(self.parameter_file)
button_d3 = QPushButton("Select")
button_d3.clicked.connect(self.OnSelectParamFile)
hbox.addWidget(l1)
hbox.addWidget(self.tc_paramfile)
hbox.addWidget(button_d3)
vbox3.addLayout(hbox)
sizer3.setLayout(vbox3)
vbox.addWidget(sizer3)
sizer2 = QGroupBox("Run configurations")
vbox2 = QVBoxLayout()
hbox = QHBoxLayout()
l2 = QLabel("Processes \t", self)
self.ntc_processes = QLineEdit(self)
self.ntc_processes.setValidator(QIntValidator(1, 64, self))
self.ntc_processes.setAlignment(Qt.AlignRight)
self.ntc_processes.setText(str(self.processes))
hbox.addWidget(l2)
hbox.addWidget(self.ntc_processes)
hbox.addStretch(1)
vbox2.addLayout(hbox)
sizer2.setLayout(vbox2)
vbox.addWidget(sizer2)
sizer4 = QGroupBox("Saving Results")
vbox4 = QVBoxLayout()
hbox = QHBoxLayout()
hbox = QHBoxLayout()
l1 = QLabel("Save Directory \t", self)
self.tc_savedir = QLineEdit(self)
self.tc_savedir.setAlignment(Qt.AlignLeft)
l1.setToolTip("Data Direcory where results will be stored.")
self.tc_savedir.setToolTip("Data Direcory where results will be stored.")
self.tc_savedir.setText(self.save_dir)
button_d2 = QPushButton("Browse")
button_d2.clicked.connect(self.OnSelectSaveDir)
hbox.addWidget(l1)
hbox.addWidget(self.tc_savedir)
hbox.addWidget(button_d2)
vbox4.addLayout(hbox)
hbox = QHBoxLayout()
l1 = QLabel("Save Filename \t", self)
self.tc_savefn = QLineEdit(self)
self.tc_savefn.setAlignment(Qt.AlignLeft)
self.tc_savefn.setText(self.save_filename)
hbox.addWidget(l1)
hbox.addWidget(self.tc_savefn)
vbox4.addLayout(hbox)
self.cb_savepng = QCheckBox(" Save results as .png files", self)
self.cb_savepng.setChecked(self.save_png)
vbox4.addWidget(self.cb_savepng)
self.cb_savetxt = QCheckBox(" Save results as .txt files", self)
self.cb_savetxt.setChecked(self.save_txt)
vbox4.addWidget(self.cb_savetxt)
cb_savetif = QCheckBox(" Save results as .tif files", self)
cb_savetif.setChecked(True)
cb_savetif.setDisabled(True)
vbox4.addWidget(cb_savetif)
sizer4.setLayout(vbox4)
vbox.addWidget(sizer4)
hbox = QHBoxLayout()
self.button_save = QPushButton("Save")
self.button_save.clicked.connect(self.OnSave)
hbox.addWidget(self.button_save)
self.button_start = QPushButton("Start")
self.button_start.clicked.connect(self.OnStart)
hbox.addWidget(self.button_start)
vbox.addLayout(hbox)
self.console_info = QTextEdit(self)
self.console_info.setReadOnly(True)
vbox.addWidget(self.console_info)
# sys.stdout = EmittingStream(textWritten=self.ConsoleOutput)
self.show()
if sys.platform == "darwin":
self.raise_()
self.OnUseDataStore()
# ----------------------------------------------------------------------
def __del__(self):
sys.stdout = sys.__stdout__
# ----------------------------------------------------------------------
def ConsoleOutput(self, text):
cursor = self.console_info.textCursor()
cursor.movePosition(QTextCursor.End)
cursor.insertText(text)
self.console_info.setTextCursor(cursor)
self.console_info.ensureCursorVisible()
# ----------------------------------------------------------------------
def OnBrowseDir(self):
directory = QFileDialog.getExistingDirectory(
self, "Choose a directory", "", QFileDialog.ShowDirsOnly | QFileDialog.ReadOnly
)
if directory == "":
return ""
return str(directory)
# ----------------------------------------------------------------------
def OnSelectDataDir(self):
datapath = self.OnBrowseDir()
self.tc_datadir.setText(str(os.path.abspath(datapath)))
# ----------------------------------------------------------------------
def OnSelectSaveDir(self):
datapath = self.OnBrowseDir()
self.tc_savedir.setText(str(os.path.abspath(datapath)))
# ----------------------------------------------------------------------
def OnSelectParamFile(self):
paramfile = QFileDialog.getOpenFileName(self, "Choose a parameter file", "", "Text file (*.txt)")[0]
self.tc_paramfile.setText(str(os.path.abspath(paramfile)))
# ----------------------------------------------------------------------
def OnUseDataStore(self):
if self.cb_usedatastore.isChecked():
self.read_data_from_datastore = 1
self.tc_datadir.setDisabled(True)
self.button_d1.setDisabled(True)
self.tc_format.setDisabled(True)
self.ntc_every_n.setDisabled(False)
self.tc_fskey.setDisabled(False)
else:
self.read_data_from_datastore = 0
self.tc_datadir.setDisabled(False)
self.button_d1.setDisabled(False)
self.tc_format.setDisabled(False)
self.ntc_every_n.setDisabled(True)
self.tc_fskey.setDisabled(True)
# ----------------------------------------------------------------------
def OnStart(self, evt):
self.console_info.append("Started DPC batch...")
QCoreApplication.processEvents()
QApplication.setOverrideCursor(Qt.WaitCursor)
self.script_file = "DPCBatchGUIScriptFile.txt"
self.Save(self.script_file)
run_batch(self.script_file)
QApplication.restoreOverrideCursor()
self.console_info.append("DPC finished.")
# ----------------------------------------------------------------------
def OnSave(self, evt):
scriptfile = QFileDialog.getSaveFileName(self, "Choose a script file", "", "Text file (*.txt)")[0]
if scriptfile == "":
return
scriptfile = os.path.abspath(scriptfile)
self.Save(scriptfile)
# ----------------------------------------------------------------------
def Save(self, scriptfile):
# Get the info
self.scan_range = self.tc_scan_range.text()
self.settings.setValue("scan_range", self.scan_range)
# self.scan_nums = self.tc_scans.text()
# self.settings.setValue('scan_nums', self.scan_nums)
if self.scan_range == "":
QMessageBox.warning(self, "Error", "Please enter scan range or scan number.")
return
self.every_n = self.ntc_every_n.text()
self.settings.setValue("every_n", self.every_n)
if self.cb_usedatastore.isChecked():
self.read_data_from_datastore = 1
else:
self.read_data_from_datastore = 0
self.settings.setValue("read_data_from_datastore", self.read_data_from_datastore)
self.filestore_key = self.tc_fskey.text()
self.settings.setValue("filestore_key", self.filestore_key)
self.data_directory = self.tc_datadir.text()
self.settings.setValue("data_dir", self.data_directory)
if (self.data_directory == "") and (self.read_data_from_datastore == 0):
QMessageBox.warning(self, "Error", "Please enter data directory or read from DataStore.")
return
self.file_format = self.tc_format.text()
self.settings.setValue("file_format", self.file_format)
if self.cb_paramsdatastore.isChecked():
self.load_params_from_broker = 1
else:
self.load_params_from_broker = 0
self.settings.setValue("load_params_datastore", self.load_params_from_broker)
self.parameter_file = self.tc_paramfile.text()
self.settings.setValue("param_file", self.parameter_file)
if self.parameter_file == "":
QMessageBox.warning(self, "Error", "Please enter scan parameter file.")
return
self.processes = self.ntc_processes.text()
self.settings.setValue("processes", self.processes)
self.save_dir = self.tc_savedir.text()
if self.save_dir == "":
QMessageBox.warning(self, "Error", "Please enter save directory.")
return
self.settings.setValue("save_dir", self.save_dir)
self.save_filename = self.tc_savefn.text()
if self.save_filename == "":
QMessageBox.warning(self, "Error", "Please enter save file name.")
return
self.settings.setValue("save_fn", self.save_filename)
if self.cb_savepng.isChecked():
self.save_png = 1
else:
self.save_png = 0
self.settings.setValue("save_png", self.save_png)
if self.cb_savetxt.isChecked():
self.save_txt = 1
else:
self.save_txt = 0
self.settings.setValue("save_txt", self.save_txt)
# Save the info into script file
self.console_info.append("\n#DPC script file")
if self.scan_range != "":
self.console_info.append("scan_range = {0}".format(self.scan_range))
# if self.scan_nums != '':
# self.console_info.append('scan_numbers = {0}'.format(self.scan_nums))
self.console_info.append("every_nth_scan = {0}".format(self.every_n))
self.console_info.append("get_data_from_datastore = {0}".format(self.read_data_from_datastore))
self.console_info.append("file_store_key = {0}".format(self.filestore_key))
self.console_info.append("data_directory = {0}".format(self.data_directory))
self.console_info.append("file_format = {0}".format(self.file_format))
self.console_info.append("parameter_file = {0}".format(self.parameter_file))
self.console_info.append("read_params_from_datastore = {0}".format(self.load_params_from_broker))
self.console_info.append("processes = {0}".format(self.processes))
self.console_info.append("save_path = {0}".format(self.save_dir))
self.console_info.append("save_filename = {0}".format(self.save_filename))
self.console_info.append("save_pngs = {0}".format(self.save_png))
self.console_info.append("save_txt = {0}".format(self.save_txt))
try:
sf = open(scriptfile, "w")
sf.write("#DPC script file\n")
if self.scan_range != "":
sf.write("scan_range = {0}\n".format(self.scan_range))
# if self.scan_nums != '':
# sf.write('scan_numbers = {0}\n'.format(self.scan_nums))
sf.write("every_nth_scan = {0}\n".format(self.every_n))
sf.write("get_data_from_datastore = {0}\n".format(self.read_data_from_datastore))
sf.write("file_store_key = {0}\n".format(self.filestore_key))
sf.write("data_directory = {0}\n".format(self.data_directory))
sf.write("file_format = {0}\n".format(self.file_format))
sf.write("parameter_file = {0}\n".format(self.parameter_file))
sf.write("read_params_from_datastore = {0}\n".format(self.load_params_from_broker))
sf.write("processes = {0}\n".format(self.processes))
sf.write("save_path = {0}\n".format(self.save_dir))
sf.write("save_filename = {0}\n".format(self.save_filename))
sf.write("save_pngs = {0}\n".format(self.save_png))
sf.write("save_txt = {0}\n".format(self.save_txt))
sf.close()
self.console_info.append("\nSaved script file {0}".format(scriptfile))
except Exception:
QMessageBox.warning(self, "Error", "Error writing script file!")
return
self.script_file = scriptfile
""" ------------------------------------------------------------------------------------------------"""
def run_dpc_batch_gui():
app = QApplication(sys.argv)
frame = MainFrame()
frame.show()
sys.exit(app.exec_())
if __name__ == "__main__":
run_dpc_batch_gui()
|
dmgav/dpcmaps | dpcmaps/dpc_batch.py | """
Created on Feb 23, 2017
@author: <NAME>, 2nd Look
"""
from __future__ import print_function, division
import sys
import os
import numpy as np
import multiprocessing as mp
import h5py
import PIL
try:
from tifffile import imsave
havetiff = True
except ImportError as ex:
print("[!] Import error - tifffile not available. Tif files will not be saved")
print("[!] (import error: {})".format(ex))
havetiff = False
from .db_config.db_config import db
# try:
# from databroker import db, get_events
# except ImportError as ex:
# print("[!] Unable to import DataBroker library.")
try:
import hxntools
import hxntools.handlers
from hxntools.scan_info import ScanInfo
except ImportError as ex:
print("[!] Unable to import hxntools library.")
print("[!] (import error: {})".format(ex))
hxntools = None
from .dpc_kernel import main as dpc_kernel_main
from .dpc_kernel import load_image_filestore
version = "0.1.0"
def load_scan_from_mds(scan_id):
hdrs = list(db(scan_id=scan_id))
if len(hdrs) > 1:
print(f"Multiple scans are available for scan_id {scan_id}. Processing the latest scan ...")
hdr = hdrs[0]
return ScanInfo(hdr)
def get_ref_from_mds(scan, first_image, file_store_key):
if scan is None:
return
iter_ = iter(scan)
first_image = max((1, first_image + 1))
ref_image = None
try:
for i in range(first_image):
ref_image = next(iter_)
except StopIteration:
print("Reference image #{} does not exist with data key {}" "".format(first_image, file_store_key))
print(ref_image)
return ref_image
def set_scan_from_scaninfo(scan):
if scan.dimensions is None or len(scan.dimensions) == 0:
return
scan_range = scan.range
print("Scan dimensions", scan.dimensions)
print("Scan range:", scan_range)
pyramid_scan = scan.pyramid
if isinstance(scan_range, dict):
scan_range = [scan_range[mtr] for mtr in scan.motors]
if len(scan.dimensions) == 1:
nx, ny = scan.dimensions[0], 1
if scan_range is not None:
dx = np.diff(scan_range[0]) / nx
dy = 0.0
else:
nx, ny = scan.dimensions
if scan_range is not None:
dx = np.diff(scan_range[0]) / nx
dy = np.diff(scan_range[1]) / ny
cols = nx
rows = ny
return dx, dy, cols, rows, pyramid_scan
def load_data_hdf5(path):
"""
Read images using the h5py lib
"""
f = h5py.File(str(path), "r")
entry = f["entry"]
instrument = entry["instrument"]
detector = instrument["detector"]
dsdata = detector["data"]
data = dsdata[...]
return np.array(data)
def load_image_hdf5(path):
data = load_data_hdf5(path)
return data[0, :, :]
def save_results(
a, gx, gy, phi, rx, ry, save_path, save_filename, scan_number, save_pngs=True, save_tif=True, save_txt=True
):
save_filename = os.path.join(save_path, "S{0}_{1}".format(scan_number, save_filename))
if os.path.isdir(save_path):
if save_txt:
a_path = save_filename + "_a.txt"
np.savetxt(a_path, a)
gx_path = save_filename + "_gx.txt"
np.savetxt(gx_path, gx)
gy_path = save_filename + "_gy.txt"
np.savetxt(gy_path, gy)
rx_path = save_filename + "_rx.txt"
np.savetxt(rx_path, rx)
ry_path = save_filename + "_ry.txt"
np.savetxt(ry_path, ry)
if phi is not None:
phi_path = save_filename + "_phi.txt"
np.savetxt(phi_path, phi)
if save_pngs:
a_path = save_filename + "_a.png"
im = PIL.Image.fromarray((2.0 / a.ptp() * (a - a.min())).astype(np.uint8))
im.save(a_path)
gx_path = save_filename + "_gx.png"
im = PIL.Image.fromarray((255.0 / gx.ptp() * (gx - gx.min())).astype(np.uint8))
im.save(gx_path)
gy_path = save_filename + "_gy.png"
im = PIL.Image.fromarray((255.0 / gy.ptp() * (gy - gy.min())).astype(np.uint8))
im.save(gy_path)
rx_path = save_filename + "_rx.png"
im = PIL.Image.fromarray((255.0 / rx.ptp() * (rx - rx.min())).astype(np.uint8))
im.save(rx_path)
ry_path = save_filename + "_ry.png"
im = PIL.Image.fromarray((255.0 / ry.ptp() * (ry - ry.min())).astype(np.uint8))
im.save(ry_path)
if phi is not None:
phi_path = save_filename + "_phi.png"
im = PIL.Image.fromarray((255.0 / phi.ptp() * (phi - phi.min())).astype(np.uint8))
im.save(phi_path)
if save_tif and havetiff:
if phi is not None:
imgs = np.stack((a, gx, gy, rx, ry, phi))
imsave(save_filename + ".tif", imgs.astype(np.float32))
else:
imgs = np.stack((a, gx, gy, rx, ry))
imsave(save_filename + ".tif", imgs.astype(np.float32))
# a_path = save_filename + '_a.tif'
# imsave(a_path, a.astype(np.float32))
# gx_path = save_filename + '_gx.tif'
# imsave(gx_path, gx.astype(np.float32))
# gy_path = save_filename + '_gy.tif'
# imsave(gy_path, gy.astype(np.float32))
# rx_path = save_filename + '_rx.tif'
# imsave(rx_path, rx.astype(np.float32))
# ry_path = save_filename + '_ry.tif'
# imsave(ry_path, ry.astype(np.float32))
# if phi is not None:
# phi_path = save_filename + '_phi.tif'
# imsave(phi_path, phi.astype(np.float32))
else:
print("Could not save results! Save directory {0} does not exist.".format(save_path))
# ----------------------------------------------------------------------
def init_scan_parameters():
# Init settings
scan_parameters = {
"file_format": "S%d.h5",
"dx": 0.1,
"dy": 0.1,
"ref_image": None,
"rows": 121,
"cols": 121,
"start_point": [1, 0],
"pixel_size": 55,
"focus_to_det": 1.46,
"energy": 19.5,
"pool": None,
"first_image": 0,
"roi_x1": None,
"roi_x2": None,
"roi_y1": None,
"roi_y2": None,
"bad_pixels": [],
"solver": "Nelder-Mead",
"display_fcn": None,
"random": 1,
"pyramid": -1,
"hang": 1,
"swap": -1,
"reverse_x": 1,
"reverse_y": 1,
"mosaic_x": 1,
"mosaic_y": 1,
"load_image": load_image_hdf5,
"use_mds": False,
"scan": None,
"save_path": None,
"pad": False,
}
return scan_parameters
# ----------------------------------------------------------------------
def read_scan_parameters_from_file(scan_parameters, param_filename):
print("Reading scan parameters from ", param_filename)
# try:
if True:
f = open(param_filename, "rt")
for line in f:
if line.startswith("#"):
continue
elif "step_size_dx_um" in line.lower():
slist = line.strip().split("=")
scan_parameters["dx"] = float(slist[1])
elif "step_size_dy_um" in line.lower():
slist = line.strip().split("=")
scan_parameters["dy"] = float(slist[1])
elif "cols_x" in line.lower():
slist = line.strip().split("=")
scan_parameters["cols"] = int(slist[1])
elif "rows_y" in line.lower():
slist = line.strip().split("=")
scan_parameters["rows"] = int(slist[1])
elif "pixel_size_um" in line.lower():
slist = line.strip().split("=")
scan_parameters["pixel_size_um"] = float(slist[1])
elif "detector_sample_distance" in line.lower():
slist = line.strip().split("=")
scan_parameters["focus_to_det"] = float(slist[1])
elif "energy_kev " in line.lower():
slist = line.strip().split("=")
scan_parameters["energy"] = float(slist[1])
elif "roi_x1" in line.lower():
slist = line.strip().split("=")
scan_parameters["roi_x1"] = int(slist[1])
elif "roi_x2" in line.lower():
slist = line.strip().split("=")
scan_parameters["roi_x2"] = int(slist[1])
elif "roi_y1" in line.lower():
slist = line.strip().split("=")
scan_parameters["roi_y1"] = int(slist[1])
elif "roi_y2" in line.lower():
slist = line.strip().split("=")
scan_parameters["roi_y2"] = int(slist[1])
elif "mosaic_column_number_x" in line.lower():
slist = line.strip().split("=")
scan_parameters["mosaic_x"] = int(slist[1])
elif "mosaic_column_number_y" in line.lower():
slist = line.strip().split("=")
scan_parameters["mosaic_y"] = int(slist[1])
elif "solver" in line.lower():
slist = line.strip().split("=")
scan_parameters["solver"] = slist[1].strip()
elif "random" in line.lower():
slist = line.strip().split("=")
scan_parameters["random"] = int(slist[1])
elif "pyramid" in line.lower():
slist = line.strip().split("=")
scan_parameters["pyramid"] = int(slist[1])
elif "hang" in line.lower():
slist = line.strip().split("=")
scan_parameters["hang"] = int(slist[1])
elif "swap" in line.lower():
slist = line.strip().split("=")
scan_parameters["swap"] = int(slist[1])
elif "reverse_x" in line.lower():
slist = line.strip().split("=")
scan_parameters["reverse_x"] = int(slist[1])
elif "reverse_y" in line.lower():
slist = line.strip().split("=")
scan_parameters["reverse_y"] = int(slist[1])
elif "pad" in line.lower():
slist = line.strip().split("=")
scan_parameters["pad"] = int(slist[1])
f.close()
# except:
# print('Could not read the script file. Exiting.')
# return
# for key,value in scan_parameters.items():
# print(key + " = " + str(value))
return scan_parameters
# ----------------------------------------------------------------------
def read_scan_parameters_from_datastore(scan_parameters):
print("Reading scan parameters from DataStore")
return scan_parameters
# ----------------------------------------------------------------------
def read_scan_parameters(scan_parameters, param_filename="", read_from_datastore=False):
if read_from_datastore:
scan_parameters = read_scan_parameters_from_datastore(scan_parameters)
else:
scan_parameters = read_scan_parameters_from_file(scan_parameters, param_filename)
return scan_parameters
# ----------------------------------------------------------------------
def parse_scan_range(scan_range, scan_numbers, str_scan_range):
slist = str_scan_range.split(",")
for item in slist:
if "-" in item:
slist = item.split("-")
scan_range.append((int(slist[0].strip()), int(slist[1].strip())))
else:
scan_numbers.append(int(item.strip()))
return scan_range, scan_numbers
# ----------------------------------------------------------------------
def parse_script(script_file):
scan_range = []
scan_numbers = []
every_nth_scan = 1
get_data_from_datastore = 0
scan_header_index = 0
data_directory = ""
read_params_from_datastore = 0
parameter_file = ""
processes = 1
scan_header_index = 0
file_format = "S{0}.h5"
save_filename = "results"
file_store_key = ""
save_path = ""
save_pngs = 1
save_txt = 1
try:
# if True:
f = open(script_file, "rt")
for line in f:
if line.startswith("#"):
continue
elif "scan_range" in line.lower():
slist = line.strip().split("=")
scan_range, scan_numbers = parse_scan_range(scan_range, scan_numbers, slist[1])
elif "scan_numbers" in line.lower():
slist = line.strip().split("=")
slist = slist[1].split(",")
for item in slist:
scan_numbers.append(int(item.strip()))
elif "every_nth_scan" in line.lower():
slist = line.strip().split("=")
every_nth_scan = int(slist[1])
elif "get_data_from_datastore" in line.lower():
slist = line.strip().split("=")
get_data_from_datastore = int(slist[1])
elif "read_params_from_datastore" in line.lower():
slist = line.strip().split("=")
read_params_from_datastore = int(slist[1])
elif "processes" in line.lower():
slist = line.strip().split("=")
processes = int(slist[1])
elif "scan_header_index" in line.lower():
slist = line.strip().split("=")
scan_header_index = int(slist[1])
elif "data_directory" in line.lower():
slist = line.strip().split("=")
data_directory = slist[1].strip()
elif "save_path" in line.lower():
slist = line.strip().split("=")
save_path = slist[1].strip()
elif "save_filename" in line.lower():
slist = line.strip().split("=")
save_filename = slist[1].strip()
elif "save_pngs" in line.lower():
slist = line.strip().split("=")
save_pngs = int(slist[1])
elif "save_txt" in line.lower():
slist = line.strip().split("=")
save_txt = int(slist[1])
elif "parameter_file" in line.lower():
slist = line.strip().split("=")
parameter_file = slist[1].strip()
elif "file_format" in line.lower():
slist = line.strip().split("=")
file_format = slist[1].strip()
elif "file_store_key" in line.lower():
slist = line.strip().split("=")
file_store_key = slist[1].strip()
f.close()
except Exception:
print("Could not read the script file. Exiting.")
exit()
print("Script setup:")
print("scan_range", scan_range)
print("scan_numbers", scan_numbers)
print("scan_header_index", scan_header_index)
print("file_store_key", file_store_key)
print("every_nth_scan", every_nth_scan)
print("get_data_from_datastore", get_data_from_datastore)
print("data_directory", data_directory)
print("file_format", file_format)
print("read_params_from_datastore", read_params_from_datastore)
print("parameter_file", parameter_file)
print("processes", processes)
print("save_path", save_path)
print("save_filename", save_filename)
print("save_pngs", save_pngs)
print("save_txt", save_txt)
return (
scan_range,
scan_numbers,
every_nth_scan,
get_data_from_datastore,
data_directory,
read_params_from_datastore,
parameter_file,
processes,
scan_header_index,
file_format,
file_store_key,
save_path,
save_filename,
save_pngs,
save_txt,
)
""" ------------------------------------------------------------------------------------------------"""
def run_batch(script_file):
print("Parsing script ", script_file)
(
scan_range,
scan_numbers,
every_nth_scan,
get_data_from_datastore,
data_directory,
read_params_from_datastore,
parameter_file,
processes,
scan_header_index,
file_format,
file_store_key,
save_path,
save_filename,
save_pngs,
save_txt,
) = parse_script(script_file)
if get_data_from_datastore == 1:
if hxntools is None:
print("Warning! Cannot read scan parameters from DataStore because hnxtools library is not available.")
print("Reading data from DataStore.")
else:
print("Reading data from .h5 files.")
calc_scan_numbers = np.array((), dtype=np.int)
calc_scan_numbers = np.append(calc_scan_numbers, np.array(scan_numbers, dtype=np.int))
# Get scan numbers from the range
for item in scan_range:
calc_scan_numbers = np.append(
calc_scan_numbers, np.arange(item[0], item[1] + 1, every_nth_scan, dtype=np.int)
)
scan_parameters = init_scan_parameters()
try:
scan_parameters = read_scan_parameters(scan_parameters, param_filename=parameter_file)
except Exception:
print("Could not read scan parameters from parameter file {}. Using defaults.".format(parameter_file))
dpc_settings = {
"file_format": "",
"save_path": data_directory,
"dx": scan_parameters["dx"],
"dy": scan_parameters["dy"],
"x1": scan_parameters["roi_x1"],
"y1": scan_parameters["roi_y1"],
"x2": scan_parameters["roi_x2"],
"y2": scan_parameters["roi_y2"],
"pixel_size": scan_parameters["pixel_size"],
"focus_to_det": scan_parameters["focus_to_det"],
"energy": scan_parameters["energy"],
"rows": scan_parameters["rows"],
"cols": scan_parameters["cols"],
"mosaic_y": scan_parameters["mosaic_y"],
"mosaic_x": scan_parameters["mosaic_x"],
"swap": scan_parameters["swap"],
"reverse_x": scan_parameters["reverse_x"],
"reverse_y": scan_parameters["reverse_y"],
"random": scan_parameters["random"],
"pyramid": scan_parameters["pyramid"],
"pad": scan_parameters["pad"],
"hang": scan_parameters["hang"],
"ref_image": scan_parameters["ref_image"],
"first_image": scan_parameters["first_image"],
"solver": scan_parameters["solver"],
"scan": None,
"use_mds": scan_parameters["use_mds"],
"calculate_results": True,
}
n_scans = calc_scan_numbers.size
for i_scan in range(n_scans):
scan_filename = os.path.join(data_directory, file_format.format(calc_scan_numbers[i_scan]))
print("\nProcessing scan number ", calc_scan_numbers[i_scan])
dpc_settings["file_format"] = scan_filename
dpc_settings["ref_image"] = scan_filename
dpc_settings["scan"] = calc_scan_numbers[i_scan]
if get_data_from_datastore:
load_image = load_image_filestore
dpc_settings["file_format"] = ""
dpc_settings["ref_image"] = ""
dpc_settings["use_hdf5"] = False
dpc_settings["use_mds"] = True
try:
scan_id = int(calc_scan_numbers[i_scan])
mds_scan = load_scan_from_mds(scan_id)
except Exception as ex:
print(
"Filestore load failed (datum={}): ({}) {}"
"".format(calc_scan_numbers[i_scan], ex.__class__.__name__, ex)
)
raise
mds_scan.key = file_store_key
dpc_settings["scan"] = mds_scan
dpc_settings["ref_image"] = get_ref_from_mds(mds_scan, scan_parameters["first_image"], file_store_key)
if read_params_from_datastore == 1:
dx, dy, cols, rows, pyramid_scan = set_scan_from_scaninfo(mds_scan)
dpc_settings["dx"] = dx
dpc_settings["dy"] = dy
dpc_settings["rows"] = rows
dpc_settings["cols"] = cols
dpc_settings["pyramid"] = pyramid_scan
else:
print("\nProcessing scan ", scan_filename)
load_image = load_image_hdf5
dpc_settings["use_hdf5"] = True
if processes == 0:
print(
"Error - number of processes in myscript.txt is equal to 0. "
"Please set to minimum 1 with processes = 1."
)
exit()
else:
pool = mp.Pool(processes=processes)
# Run the analysis
a, gx, gy, phi, rx, ry = dpc_kernel_main(
pool=pool, display_fcn=None, load_image=load_image, **dpc_settings
)
save_results(
a,
gx,
gy,
phi,
rx,
ry,
save_path,
save_filename,
calc_scan_numbers[i_scan],
save_pngs=save_pngs,
save_tif=True,
save_txt=save_txt,
)
print("DPC finished")
""" ------------------------------------------------------------------------------------------------"""
def run_dpc_script():
try:
script_file = sys.argv[1]
except Exception:
print("Script file is not specified.\nUsage: dpcmaps-script <script-file_name>")
exit()
run_batch(script_file)
if __name__ == "__main__":
run_dpc_script()
|
tcal42/actralyze | app.py | import streamlit as st
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn import preprocessing
# Import VADER sentiment analyzer.
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
analyzer = SentimentIntensityAnalyzer()
st.title('Actralyze - analysis of survey responses')
st.subheader('Average sentiment by category')
df = pd.read_csv('train_comments.csv')
le = preprocessing.LabelEncoder()
le.fit(df['category'].tolist())
categories = list(le.classes_)
df2 = pd.read_csv('labeled_pool_comments.csv')
category = pd.DataFrame(le.inverse_transform(df2["preds"].values))
df2["category"] = category
# Compute and plot polarity scores.
positive_scores = []
negative_scores = []
for comment in df2.comment.values:
vs = analyzer.polarity_scores(comment)
positive_scores.append(vs["pos"])
negative_scores.append(vs["neg"])
positive_df = pd.DataFrame(positive_scores)
negative_df = pd.DataFrame(negative_scores)
df2['positive'] = positive_df
df2['negative'] = -1*negative_df
df4 = df2.groupby(['category']).mean()
df4.drop(columns = ['preds'], inplace = True)
st.bar_chart(df4)
# Show distribution of categories
st.subheader('Distribution of categories')
df3 = df2.groupby(['category']).count()
hist_data = df3['comment']
st.bar_chart(hist_data)
st.subheader('Example comments:')
# Add a dropdown menu to view example text
add_selectbox = st.selectbox(
'Choose a category to view example text',
tuple(categories)
)
df_category = df2.loc[df2['category'] == add_selectbox]
if df_category.empty:
st.write('No comments in this category.')
else:
example_quotes = df_category.comment.values
example_quote = example_quotes[np.random.randint(len(example_quotes))]
st.write(example_quote)
example_quote = example_quotes[np.random.randint(len(example_quotes))]
st.write(example_quote)
example_quote = example_quotes[np.random.randint(len(example_quotes))]
st.write(example_quote)
|
bentech/tquery | tmp/pythonweb/webserver.py | #!/usr/bin/python
# Copyright <NAME> , turtlemeat.com
# Modified by nikomu @ code.google.com
import string,cgi,time
from os import curdir, sep
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import os # os. path
CWD = os.path.abspath('.')
## print CWD
# PORT = 8080
UPLOAD_PAGE = 'upload.html' # must contain a valid link with address and port of the server s
def make_index( relpath ):
abspath = os.path.abspath(relpath) # ; print abspath
flist = os.listdir( abspath ) # ; print flist
rellist = []
for fname in flist :
relname = os.path.join(relpath, fname)
rellist.append(relname)
# print rellist
inslist = []
for r in rellist :
inslist.append( '<a href="%s">%s</a><br>' % (r,r) )
# print inslist
page_tpl = "<html><head></head><body>%s</body></html>"
ret = page_tpl % ( '\n'.join(inslist) , )
return ret
# -----------------------------------------------------------------------
class MyHandler(BaseHTTPRequestHandler):
def do_GET(self):
try:
if self.path == '/' :
page = make_index( '.' )
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(page)
return
if self.path.endswith(".html"):
## print curdir + sep + self.path
f = open(curdir + sep + self.path)
#note that this potentially makes every file on your computer readable by the internet
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(f.read())
f.close()
return
if self.path.endswith(".esp"): #our dynamic content
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write("hey, today is the" + str(time.localtime()[7]))
self.wfile.write(" day in the year " + str(time.localtime()[0]))
return
else : # default: just send the file
filepath = self.path[1:] # remove leading '/'
f = open( os.path.join(CWD, filepath), 'rb' )
#note that this potentially makes every file on your computer readable by the internet
self.send_response(200)
self.send_header('Content-type', 'application/octet-stream')
self.end_headers()
self.wfile.write(f.read())
f.close()
return
return # be sure not to fall into "except:" clause ?
except IOError as e :
# debug
print e
self.send_error(404,'File Not Found: %s' % self.path)
def do_POST(self):
# global rootnode ## something remained in the orig. code
try:
ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
if ctype == 'multipart/form-data' :
# original version :
'''
query=cgi.parse_multipart(self.rfile, pdict)
upfilecontent = query.get('upfile')
print "filecontent", upfilecontent[0]
'''
# using cgi.FieldStorage instead, see
# http://stackoverflow.com/questions/1417918/time-out-error-while-creating-cgi-fieldstorage-object
fs = cgi.FieldStorage( fp = self.rfile,
headers = self.headers, # headers_,
environ={ 'REQUEST_METHOD':'POST' } # all the rest will come from the 'headers' object,
# but as the FieldStorage object was designed for CGI, absense of 'POST' value in environ
# will prevent the object from using the 'fp' argument !
)
## print 'have fs'
else: raise Exception("Unexpected POST request")
fs_up = fs['upfile']
filename = os.path.split(fs_up.filename)[1] # strip the path, if it presents
fullname = os.path.join(CWD, filename)
# check for copies :
if os.path.exists( fullname ):
fullname_test = fullname + '.copy'
i = 0
while os.path.exists( fullname_test ):
fullname_test = "%s.copy(%d)" % (fullname, i)
i += 1
fullname = fullname_test
if not os.path.exists(fullname):
with open(fullname, 'wb') as o:
# self.copyfile(fs['upfile'].file, o)
o.write( fs_up.file.read() )
self.send_response(200)
self.end_headers()
self.wfile.write("<HTML><HEAD></HEAD><BODY>POST OK.<BR><BR>");
self.wfile.write( "File uploaded under name: " + os.path.split(fullname)[1] );
self.wfile.write( '<BR><A HREF=%s>back</A>' % ( UPLOAD_PAGE, ) )
self.wfile.write("</BODY></HTML>");
except Exception as e:
# pass
print e
self.send_error(404,'POST to "%s" failed: %s' % (self.path, str(e)) )
def main():
try:
server = HTTPServer(('', 8080), MyHandler)
print 'started httpserver...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
main()
|
bentech/tquery | plugins/cannonjs/vendor/cannon.js/utils/build.py | <reponame>bentech/tquery
#!/usr/bin/env python
try:
import argparse
ap = 1
except ImportError:
import optparse
ap = 0
import os
import tempfile
import sys
from JSCompress import JSCompressor
COMMON_FILES = ['Cannon.js',
'collision/Broadphase.js',
'collision/NaiveBroadphase.js',
'math/Mat3.js',
'math/Vec3.js',
'math/Quaternion.js',
'objects/Shape.js',
'objects/RigidBody.js',
'objects/Sphere.js',
'objects/Box.js',
'objects/Plane.js',
'objects/Compound.js',
'solver/Solver.js',
'material/Material.js',
'material/ContactMaterial.js',
'world/World.js']
DEMO_FILES = ['demo/Demo.js']
def merge(files):
buffer = []
for filename in files:
with open(os.path.join('..', 'src', filename), 'r') as f:
buffer.append(f.read())
return "".join(buffer)
def output(text, filename):
with open(os.path.join('..', 'build', filename), 'w') as f:
f.write(text)
def compress(text):
compressor = JSCompressor()
return compressor.compress(text)
def addHeader(text):
with open(os.path.join('..', 'VERSION'), 'r') as handle:
revision = handle.read().rstrip()
with open(os.path.join('..', 'LICENSE'), 'r') as handle:
license = handle.read().rstrip()
return license + "\n\n" +text
def buildLib(files, minified, filename):
text = merge(files)
filename = filename + '.js'
print "=" * 40
print "Compiling", filename
print "=" * 40
if minified:
text = compress(text)
output(addHeader(text), filename)
def parse_args():
if ap:
parser = argparse.ArgumentParser(description='Build and compress cannon.js')
args = parser.parse_args()
else:
parser = optparse.OptionParser(description='Build and compress cannon.js')
args, remainder = parser.parse_args()
"""
# If no arguments have been passed, show the help message and exit
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
"""
return args
def main(argv=None):
args = parse_args()
buildLib(COMMON_FILES, False, 'cannon')
buildLib(COMMON_FILES, True, 'cannon.min')
buildLib(DEMO_FILES, False, 'cannon.demo')
if __name__ == "__main__":
main()
|
HamidrezaZarrabi/BlessMark | utils.py | <filename>utils.py
import h5py
import numpy as np
from copy import deepcopy
# -------- My normalized correlation function
def my_nc(watermark_extr, watermark_orig):
assert(np.shape(watermark_extr) == np.shape(watermark_orig))
extr = deepcopy(watermark_extr)
orig = deepcopy(watermark_orig)
height, width = np.shape(orig)
for m in range(0, height):
for n in range(0, width):
if orig[m, n] == 0:
orig[m, n] = -1
if extr[m, n] == 0:
extr[m, n] = -1
tmp = np.sum(orig * extr)
tmp = tmp / watermark_orig.size
tmp = tmp + 1
nc = tmp / 2
return nc
# --------------- Extract blocks from image ---------
def extract_blocks(img, block_height, block_width):
# ---------- Convert RGB image to grayscale image and scale it to range [0, 1]
assert img.ndim == 4
if img.shape[3] == 3: # RGB image
img = rgb2gray(img) # black-white conversion
img = img / 255. # Scale to 0-1 range
else:
img = img / 255. # scale to 0-1 range
img = paint_border(img, block_height, block_width) # Extend image
blocks_img = extract_ordered(img, block_height, block_width) # Extract blocks
return blocks_img
# ------------- Divide the image into blocks -------
def extract_ordered(full_img, block_h, block_w):
assert (len(full_img.shape) == 4) # 4D arrays
assert (full_img.shape[3] == 1 or full_img.shape[3] == 3) # Check the channel is 1 or 3
img_h = full_img.shape[1] # Height of the full image
img_w = full_img.shape[2] # Width of the full image
n_blocks_h = int(img_h/block_h) # Round to lowest int
if img_h % block_h != 0:
print("warning: " + str(n_blocks_h) + " blocks in height, with about " + str(img_h % block_h) +
" pixels left over")
n_blocks_w = int(img_w/block_w) # Round to lowest int
if img_w % block_w != 0:
print("warning: " + str(n_blocks_w) + " blocks in width, with about " + str(img_w % block_w) +
" pixels left over")
n_blocks_tot = (n_blocks_h*n_blocks_w)*full_img.shape[0]
blocks = np.empty((n_blocks_tot, block_h, block_w, full_img.shape[3]))
iter_tot = 0 # Total number of blocks (N_blocks)
for h in range(n_blocks_h):
for w in range(n_blocks_w):
block = full_img[0, h*block_h:(h*block_h)+block_h, w*block_w:(w*block_w)+block_w, :]
blocks[iter_tot] = block
iter_tot += 1
assert (iter_tot == n_blocks_tot)
return blocks # Array with the full_img divided in blocks
# -------- Construct the image with the blocks ----------
def recompone(data, n_h, n_w):
assert (data.shape[3] == 1 or data.shape[3] == 3) # Check the channel is 1 or 3
assert(len(data.shape) == 4)
n_block_per_img = n_w*n_h
assert(data.shape[0] % n_block_per_img == 0)
n_full_img = int(data.shape[0]/n_block_per_img)
block_h = data.shape[1]
block_w = data.shape[2]
# Define and start full recompone
full_recomp = np.empty((n_full_img, n_h*block_h, n_w*block_w, data.shape[3]))
k = 0 # Iter full img
s = 0 # Iter single block
while s < data.shape[0]:
# Recompone one:
single_recon = np.empty((n_h*block_h, n_w*block_w, data.shape[3]))
for h in range(n_h):
for w in range(n_w):
single_recon[h*block_h:(h*block_h)+block_h, w*block_w:(w*block_w)+block_w, :] = data[s]
s += 1
full_recomp[k] = single_recon
k += 1
assert(k == n_full_img)
return full_recomp
# ----------------- Extend the image because block division is not exact
def paint_border(data, block_h, block_w):
assert (data.shape[3] == 1 or data.shape[3] == 3) # Check the channel is 1 or 3
img_h = data.shape[1]
img_w = data.shape[2]
if (img_h % block_h) == 0:
new_img_h = img_h
else:
new_img_h = int(np.ceil(img_h/block_h))*block_h
if (img_w % block_w) == 0:
new_img_w = img_w
else:
new_img_w = int(np.ceil(img_w/block_w))*block_w
new_data = np.zeros((data.shape[0], new_img_h, new_img_w, data.shape[3]))
new_data[:, 0:img_h, 0:img_w, :] = data
return new_data
def load_hdf5(infile):
with h5py.File(infile, "r") as f: # "with" close the file after its nested commands
return f["image"][()]
def write_hdf5(arr, outfile):
with h5py.File(outfile, "w") as f:
f.create_dataset("image", data=arr, dtype=arr.dtype)
# -------- Convert RGB image into grayscale
def rgb2gray(rgb):
assert rgb.ndim == 4 # 4D arrays
bn_img = rgb[:, :, :, 0]*0.299 + rgb[:, :, :, 1]*0.587 + rgb[:, :, :, 2]*0.114
bn_img = np.reshape(bn_img, (rgb.shape[0], rgb.shape[1], rgb.shape[2], 1))
return bn_img
# ------------------ Convert the prediction arrays in corresponding blocks
def pred_to_imgs(pred, block_height, block_width, mode="original"):
assert (len(pred.shape) == 3) # 3D array: (n_blocks, height*width, 2)
assert (pred.shape[2] == 2) # Check the classes are 2
pred_image = np.empty((pred.shape[0], pred.shape[1])) # (n_blocks,height*width)
if mode == "original":
for i in range(pred.shape[0]):
for pix in range(pred.shape[1]):
pred_image[i, pix] = pred[i, pix, 1]
elif mode == "threshold":
for i in range(pred.shape[0]):
for pix in range(pred.shape[1]):
if pred[i, pix, 1] >= 0.5:
pred_image[i, pix] = 1
else:
pred_image[i, pix] = 0
else:
print("mode " + str(mode) + " not recognized, it can be 'original' or 'threshold'")
exit()
pred_image = np.reshape(pred_image, (pred_image.shape[0], block_height, block_width, 1))
return pred_image
|
HamidrezaZarrabi/BlessMark | main.py | import argparse
import os
import numpy as np
import time
import scipy.io as sio
import shutil
from tqdm import tqdm
from PIL import Image
from embedding import embedding
from extraction import extraction
from skimage.measure import compare_psnr as psnr
from utils import my_nc
# ---------------------------- Parser arguments -----------------
ap = argparse.ArgumentParser()
ap.add_argument('-data_path', type=str, help='Where to get the images for embedding/extraction')
ap.add_argument('-img_size', '--image_size', type=int, nargs='+', help='Which size of image, [height, width, channel]')
ap.add_argument('-process_name', choices=['embedding', 'extraction'], type=str, help='Embedding or extraction process')
ap.add_argument('-coef', '--coefficient', type=int, nargs='+', help='Which used DCT coefficients in our algorithm')
ap.add_argument('-thresh', '--threshold', default=0.01, type=float, help='Which used threshold in our algorithm')
ap.add_argument('-blk_size', '--block_size', type=int, help='Which block size of image')
ap.add_argument('-cap', '--capacity', default=0.012, type=float, help='Capacity of watermarking as bit per pixel')
ap.add_argument('-seg_path', '--segmentation_model_path', type=str, help='Where to get the segmentation model')
ap.add_argument('-class_path', '--classification_model_path', type=str, help='Where to get the distortion detection model')
args = ap.parse_args()
data_path = args.data_path
img_size = args.image_size
process_name = args.process_name
coefficient = args.coefficient
thresh = args.threshold
block_size = args.block_size
capacity = args.capacity
segment_model_path = args.segmentation_model_path
class_model_path = args.classification_model_path
# --------------------------- Embedding process --------
if process_name == 'embedding':
files = os.listdir(data_path)
shutil.rmtree('workspace/img_marked', ignore_errors=True)
os.makedirs('workspace/img_marked') # Path of watermarked image
# --------- Generate the random binary watermark -----------
total_bit = int(capacity*np.prod(img_size))
mark = np.random.randint(2, size=(total_bit, 1), dtype='uint8')
switched_block = [] # Percent of switched NROI block into ROI block
start_time = time.time()
for file in tqdm(files):
img_org = Image.open(os.path.join(data_path, file))
img_org = np.asarray(img_org)
[img_marked, switched] = embedding(img_org, img_size, mark, block_size, thresh, coefficient,
segment_model_path) # embedding
switched_block.append(switched)
img_marked = Image.fromarray(img_marked)
img_marked.save(os.path.join('workspace/img_marked', file))
elapsed = time.time() - start_time
sio.savemat('workspace/mark_'+str(capacity)+'.mat', {'mark': mark})
print('Average percent of switched NROI block into ROI block : ' + str(np.mean(switched_block)))
print('Embedding time: ', elapsed)
# ------------------------------------ Extraction process-----------
elif process_name == 'extraction':
files = os.listdir(data_path)
shutil.rmtree('workspace/img_recovered', ignore_errors=True)
os.makedirs('workspace/img_recovered')
mark_orig = sio.loadmat('./workspace/mark_' + str(capacity) + '.mat')['mark']
start_time = time.time()
for file in tqdm(files):
img_marked = Image.open(os.path.join(data_path, file))
img_marked = np.asarray(img_marked)
[img_recovered, mark_extr] = extraction(img_marked, img_size, block_size, thresh, coefficient,
capacity, segment_model_path, class_model_path) # Extraction
img_recovered = Image.fromarray(img_recovered)
img_recovered.save(os.path.join('workspace/img_recovered', file))
assert (my_nc(mark_extr, mark_orig) == 1)
elapsed = time.time() - start_time
print('Extraction time: ', elapsed)
# ----------------- Evaluate ---------
PSNR_NROI_marked, PSNR_NROI_recovered, PSNR_ROI_marked, PSNR_ROI_recovered, PSNR_img_marked, PSNR_img_recovered = \
[[] for _ in range(6)]
NROI_img_orig, NROI_img_marked, NROI_img_recovered, ROI_img_orig, ROI_img_marked, ROI_img_recovered = \
[[] for _ in range(6)]
gtruth_extension = os.path.splitext(os.listdir('workspace/img_gtruth')[0])[1] # Extension of gtruth image
for file in tqdm(files):
img_marked = Image.open(os.path.join(data_path, file))
img_marked = np.asarray(img_marked)
img_orig = Image.open(os.path.join('workspace/img_orig', file))
img_orig = np.asarray(img_orig)
img_gtruth = Image.open(os.path.join('workspace/img_gtruth', os.path.splitext(file)[0] + gtruth_extension))
img_gtruth = np.asarray(img_gtruth)
img_recovered = Image.open(os.path.join('workspace/img_recovered', file))
img_recovered = np.asarray(img_recovered)
img_height, img_width = img_gtruth.shape
for m in range(0, img_height):
for n in range(0, img_width):
if img_gtruth[m, n] == 0: # NROI pixel
NROI_img_orig.append(img_orig[m, n])
NROI_img_marked.append(img_marked[m, n])
NROI_img_recovered.append(img_recovered[m, n])
else: # ROI pixel
ROI_img_orig.append(img_orig[m, n])
ROI_img_marked.append(img_marked[m, n])
ROI_img_recovered.append(img_recovered[m, n])
PSNR_NROI_marked.append(psnr(np.array(NROI_img_orig), np.array(NROI_img_marked), data_range=255))
PSNR_NROI_recovered.append(psnr(np.array(NROI_img_orig), np.array(NROI_img_recovered), data_range=255))
PSNR_ROI_marked.append(psnr(np.array(ROI_img_orig), np.array(ROI_img_marked), data_range=255))
PSNR_ROI_recovered.append(psnr(np.array(ROI_img_orig), np.array(ROI_img_recovered), data_range=255))
PSNR_img_marked.append(psnr(img_orig, img_marked, data_range=255))
PSNR_img_recovered.append(psnr(img_orig, img_recovered, data_range=255))
print('Mean PSNR of watermarked images: ' + str(np.mean(PSNR_img_marked)))
print('Mean PSNR of recovered images: ' + str(np.mean(PSNR_img_recovered)))
print('Mean PSNR improvement: ' + str(np.abs(np.mean(PSNR_img_marked) - np.mean(PSNR_img_recovered))))
print('Mean PSNR of NROI blocks before recovery: ' + str(np.mean(PSNR_NROI_marked)))
print('Mean PSNR of NROI blocks after recovery: ' + str(np.mean(PSNR_NROI_recovered)))
print('Mean NROI PSNR improvement: ' + str(np.abs(np.mean(PSNR_NROI_marked) - np.mean(PSNR_NROI_recovered))))
print('Mean PSNR of ROI block before recovery: ' + str(np.mean(PSNR_ROI_marked)))
print('Mean PSNR of ROI block after recovery: ' + str(np.mean(PSNR_ROI_recovered)))
print('Mean ROI PSNR improvement: ' + str(np.abs(np.mean(PSNR_ROI_marked) - np.mean(PSNR_ROI_recovered))))
|
HamidrezaZarrabi/BlessMark | extraction.py | import cv2
import os
import tensorflow as tf
from utils import *
from copy import deepcopy
model_from_json = tf.keras.models.model_from_json
def extraction(img_marked, img_size, block_size, th, coef, cap, segment_model_pth, class_model_path):
if img_marked.ndim == 2: # Grayscale image
img_marked = np.expand_dims(img_marked, -1)
img_marked = np.expand_dims(img_marked, 0)
elif img_marked.ndim == 3: # RGB image
img_marked = np.expand_dims(img_marked, 0)
img_height, img_width, img_channel = img_size
# -------------------------------- Load the segmentation model and its weights
with open(os.path.join(segment_model_pth, 'architecture_segmentation.json'), 'r') as json_file:
model_json = json_file.read()
model = model_from_json(model_json)
json_file.close()
model.load_weights(os.path.join(segment_model_pth, 'best_weights_segmentation.h5'))
# -------------------- Divide original image into blocks -------
blocks_img_marked = extract_blocks(img=img_marked, block_height=block_size, block_width=block_size)
# -------------------------------- Segment blocks ---------------
predictions = model.predict(blocks_img_marked)
# print("predicted image size: ", predictions.shape)
# ------------------------------- Convert the prediction arrays in corresponding blocks -------
predict_blocks = pred_to_imgs(predictions, block_size, block_size, "threshold")
# -------------------------------- Construct the segmented image with the segmented blocks ------
predict_marked = recompone(predict_blocks, int(np.ceil((img_height/block_size))),
int(np.ceil((img_width/block_size))))
predict_marked = predict_marked[:, 0:img_height, 0:img_width, :]
# -------------------------- Extraction and recovery process --------------
[u, v] = coef # DCT coefficients
total_bit = int(cap * np.prod(img_size)) # Size of watermark
# -------------------------------- Load the classification model and its weights ------
with open(os.path.join(class_model_path, 'architecture_classification.json'), 'r') as json_file:
model_json = json_file.read()
model = model_from_json(model_json)
json_file.close()
model.load_weights(os.path.join(class_model_path, 'best_weights_classification.h5'))
mark = np.zeros((total_bit, 1), dtype='uint8') # Extracted watermark
img_recovered = deepcopy(img_marked) # Recovered image
cnt_mark = 0 # Counter of extracted watermark
for m in range(0, block_size*(img_height//block_size), block_size):
if cnt_mark == total_bit: # Whole watermark has extracted
break
for n in range(0, block_size*(img_width//block_size), block_size):
if cnt_mark == total_bit:
break
if np.sum(predict_marked[0, m:m+block_size, n:n+block_size, 0]) == 0: # NROI block
for chn in range(0, img_channel):
if cnt_mark == total_bit:
break
cover = img_marked[0, m:m+block_size, n:n+block_size, chn] / 255.
cover_dct = cv2.dct(cover)
prediction = model.predict(img_marked[0:1, m:m+block_size, n:n+block_size, chn:chn+1] / 255.)
if cover_dct[u, v] >= cover_dct[v, u]:
mark[cnt_mark] = 0
if np.round(prediction) == 1: # Classifier detected that block has distorted during embedding
cover_dct[u, v] -= th
if cover_dct[u, v] > cover_dct[v, u]:
cover_dct[u, v], cover_dct[v, u] = cover_dct[v, u], cover_dct[u, v]
rec = cv2.idct(cover_dct) * 255.
for p in range(0, block_size):
for q in range(0, block_size):
if rec[p, q] > 255:
rec[p, q] = 255
elif rec[p, q] < 0:
rec[p, q] = 0
img_recovered[0, m:m+block_size, n:n+block_size, chn] = np.round(rec)
else:
mark[cnt_mark] = 1
if np.round(prediction) == 1: # Classifier detected that block has distorted during embedding
cover_dct[v, u] -= th
if cover_dct[v, u] > cover_dct[u, v]:
cover_dct[u, v], cover_dct[v, u] = cover_dct[v, u], cover_dct[u, v]
rec = cv2.idct(cover_dct) * 255.
for p in range(0, block_size):
for q in range(0, block_size):
if rec[p, q] > 255:
rec[p, q] = 255
elif rec[p, q] < 0:
rec[p, q] = 0
img_recovered[0, m:m+block_size, n:n+block_size, chn] = np.round(rec)
cnt_mark += 1
return np.squeeze(img_recovered), mark
|
HamidrezaZarrabi/BlessMark | embedding.py | <reponame>HamidrezaZarrabi/BlessMark
import cv2
import os
import tensorflow as tf
from utils import *
from copy import deepcopy
model_from_json = tf.keras.models.model_from_json
def embedding(img_orig, img_size, mark, block_size, th, coef, segment_model_path):
if img_orig.ndim == 2: # Grayscale image
img_orig = np.expand_dims(img_orig, -1)
img_orig = np.expand_dims(img_orig, 0)
elif img_orig.ndim == 3: # RGB image
img_orig = np.expand_dims(img_orig, 0)
img_height, img_width, img_channel = img_size
# -------------------------------- Load the segmentation model and its weights
with open(os.path.join(segment_model_path, 'architecture_segmentation.json'), 'r') as json_file:
model_json = json_file.read()
model = model_from_json(model_json)
json_file.close()
model.load_weights(os.path.join(segment_model_path, 'best_weights_segmentation.h5'))
# -------------------- Divide original image into blocks -------
blocks_img_orig = extract_blocks(img=img_orig, block_height=block_size, block_width=block_size)
# -------------------------------- Segment blocks ---------------
predictions = model.predict(blocks_img_orig)
# print("predicted images size: ", predictions.shape)
# -------------------------------- Convert the prediction arrays in corresponding blocks -------
pred_blocks = pred_to_imgs(predictions, block_size, block_size, "threshold")
# -------------------------------- Construct the segmented image with the segmented blocks ------
pred_orig = recompone(pred_blocks, int(np.ceil((img_height/block_size))),
int(np.ceil((img_width/block_size)))) # predictions
pred_orig = pred_orig[:, 0:img_height, 0:img_width, :]
# ------------------------------- Embedding process -----------------------
[u, v] = coef # DCT coefficients
total_bit = mark.size # Size of watermark
img_marked = deepcopy(img_orig) # Watermarked image
pred = deepcopy(pred_orig[0, :, :, 0]) # Segmented image before embedding
total_switch = 0 # Total number of switched NROI block into ROI block
while True: # Embed until ROI block map remains unchanged
cnt_mark = 0 # Counter of embedded watermark
for m in range(0, block_size*(img_height//block_size), block_size):
if cnt_mark == total_bit: # Whole watermark has embedded
break
for n in range(0, block_size*(img_width//block_size), block_size):
if cnt_mark == total_bit: # whole watermark has embedded
break
if np.sum(pred[m:m+block_size, n:n+block_size]) == 0: # NROI block
for chn in range(0, img_channel): # Embedding throughout channels
if cnt_mark == total_bit: # Whole watermark has embedded
break
cover = img_orig[0, m:m+block_size, n:n+block_size, chn] / 255.
cover_dct = cv2.dct(cover) # Apply DCT
if (mark[cnt_mark] == 0) and (cover_dct[u, v] <= cover_dct[v, u]):
cover_dct[u, v], cover_dct[v, u] = cover_dct[v, u], cover_dct[u, v]
cover_dct[u, v] += th
elif (mark[cnt_mark] == 1) and (cover_dct[v, u] <= cover_dct[u, v]):
cover_dct[u, v], cover_dct[v, u] = cover_dct[v, u], cover_dct[u, v]
cover_dct[v, u] += th
marked = cv2.idct(cover_dct) # Apply inverse DCT
cnt_mark += 1
marked = marked * 255.
marked = np.round(marked)
for p in range(0, block_size):
for q in range(0, block_size):
if marked[p, q] > 255: # Overflow
marked[p, q] = 255
elif marked[p, q] < 0: # Underflow
marked[p, q] = 0
img_marked[0, m:m+block_size, n:n+block_size, chn] = marked
# -------------------------- Divide watermarked image into blocks -------
blocks_img_marked = extract_blocks(img=img_marked, block_height=block_size, block_width=block_size)
# -------------------------------- Segment blocks ---------------
predictions = model.predict(blocks_img_marked)
# -------------------------------- Convert the prediction arrays in corresponding blocks -------
pred_blocks = pred_to_imgs(predictions, block_size, block_size, "threshold")
# -------------------------------- Construct the segmented image with the segmented blocks ------
pred_marked = recompone(pred_blocks, int(np.ceil((img_height/block_size))),
int(np.ceil((img_width/block_size))))
pred_marked = pred_marked[:, 0:img_height, 0:img_width, :]
cnt_switch = 0 # Number of switched NROI block into ROI block in this iteration
for m in range(0, block_size*(img_height//block_size), block_size):
for n in range(0, block_size*(img_width//block_size), block_size):
if np.sum(pred[m:m+block_size, n:n+block_size]) == 0:
if np.sum(pred_marked[0, m:m+block_size, n:n+block_size, 0]) != 0:
cnt_switch += 1
pred = np.logical_or(pred_marked[0, :, :, 0], pred)
assert(total_bit == cnt_mark)
if cnt_switch == 0: # ROI block map remain unchanged
break
else:
total_switch += cnt_switch
# ------------------------------- Calculate percent of switched NROI block into ROI block --------
cnt_nroi = 0 # Total number of NROI blocks
for m in range(0, block_size * (img_height // block_size), block_size):
for n in range(0, block_size * (img_width // block_size), block_size):
if np.sum(pred_orig[0, m:m + block_size, n:n + block_size, 0]) == 0:
cnt_nroi += 1
switched_blk = (total_switch * 100) / cnt_nroi
return img_marked.squeeze(), switched_blk
|
dtglidden/hotspot-exon-paper | scripts/usage_by_HI_score.py | import sys
from os.path import join
from numpy import mean
import numpy as np
import argparse
chroms = set(['chr'+str(i) for i in range(1,23)] + ['chrY', 'chrX', 'chrM'])
if __name__ == '__main__' :
parser = argparse.ArgumentParser()
parser.add_argument('--anno_path', help='Path to GENCODE GTF annotation file')
parser.add_argument('--HI_path', help='Path to gene haploinsufficency score file')
parser.add_argument('--usage_dir', help='Directory with 5\' and 3\' usage data files')
parser.add_argument('--sample', help='Name of the sample from which the usage data was calculated')
parser.add_argument('--out_dir', help='Where to output the average usage file to')
args = parser.parse_args()
anno_path, HI_path, usage_dir, sample, out_dir = args.anno_path, args.HI_path, args.usage_dir, args.sample, args.out_dir
#Parse gene haploinsufficency scores
HI_scores = {}
with open(HI_path) as in_file:
for line in in_file:
gene, score = line.strip().split('\t')
if gene != 'Gene_name':
HI_scores[gene] = float(score)
#Parse splice site usage data
usages = {ss_type:{chrom:{'+':{}, '-':{}} for chrom in chroms} for ss_type in ['3p', '5p']}
for ss_type in usages:
with open(join(usage_dir, '_'.join([sample, ss_type, 'ss_usage.txt']))) as in_file:
for line in in_file:
chrom, site, strand, _, _, usage = line.strip().split('\t')
if chrom != 'Chrom':
usages[ss_type][chrom][strand][int(site)] = float(usage)
#Parse the exons of each transcript and map transcript IDs to gene IDs
tx_exons = {}
gene_to_tx = {}
with open(anno_path) as in_file:
for line in in_file:
chrom, _, entry_type, start, end, _, strand, _, info = line.strip().split('\t')
if entry_type == 'exon':
info_pairs = info.split('; ')[:-1]
values = set([e.split(' ')[1].strip('\"') for e in info_pairs])
info_dict = {e.split(' ')[0]:e.split(' ')[1].strip('\"') for e in info_pairs}
tx_id, gene_id, gene_name = info_dict['transcript_id'].split('.')[0], info_dict['gene_id'].split('.')[0], info_dict['gene_name']
exon = (int(start), int(end))
if 'appris_principal_1' in values:
if gene_id not in gene_to_tx:
gene_to_tx[gene_id] = {'name':gene_name, 'tx':[tx_id], 'info':(chrom, strand)}
else:
gene_to_tx[gene_id]['tx'].append(tx_id)
if tx_id not in tx_exons:
tx_exons[tx_id] = [exon]
else:
tx_exons[tx_id].append(exon)
#Identify the longest transcript in the gene
gene_max_tx = {gene:max(gene_to_tx[gene]['tx'], key=lambda t:len(tx_exons[t])) for gene in gene_to_tx}
#Calculate average usage for each gene that has an HI score
gene_usage_by_HI = []
for gene in gene_to_tx:
gene_name = gene_to_tx[gene]['name']
if gene_name in HI_scores:
HI = HI_scores[gene_name]
max_tx = gene_max_tx[gene]
exon_num = len(tx_exons[max_tx])
avg_usage = []
chrom, strand = gene_to_tx[gene]['info']
for i in range(len(tx_exons[max_tx])):
exon_start, exon_end = tx_exons[max_tx][i]
if strand == '+':
fivep_site, threep_site = exon_end, exon_start
else:
fivep_site, threep_site = exon_start, exon_end
if i != 0 and threep_site in usages['3p'][chrom][strand]:
avg_usage.append(usages['3p'][chrom][strand][threep_site])
if i != len(tx_exons[max_tx]) - 1 and fivep_site in usages['5p'][chrom][strand]:
avg_usage.append(usages['5p'][chrom][strand][fivep_site])
#Only output data for transcripts that have usage for each splice site in every exon
if len(avg_usage) > max(2.0*len(tx_exons[max_tx])-3, 0):
gene_usage_by_HI.append((gene, chrom, strand, str(mean(avg_usage)), str(HI)))
with open(join(out_dir, '{}_gene_usage_HI_primary_tx.txt'.format(sample)), 'w') as out_file:
out_file.write('Ensembl_ID\tChrom\tStrand\tAvg_usage\tHI_score\n')
for i in range(len(gene_usage_by_HI)):
out_file.write('\t'.join(gene_usage_by_HI[i]) + '\n')
|
dtglidden/hotspot-exon-paper | scripts/splice_site_usage.py | <reponame>dtglidden/hotspot-exon-paper
import sys
from os.path import join
from intervaltree import Interval, IntervalTree
import numpy as np
import time
import argparse
site_types = ['3p', '5p']
chroms = set(['chr'+str(i) for i in range(1,23)] + ['chrY', 'chrX'])
def parse_annotation(anno_path):
print(time.strftime('%m-%d %I:%M:%S%p') + ' - Parsing GENCODE annotation...')
anno_sites = {ss_type:{chrom:{'+':[], '-':[]} for chrom in chroms} for ss_type in site_types}
with open(anno_path) as in_file:
for line in in_file:
if line[0] != '#':
chrom, _, entry_type, start, end, _, strand, _, info = line.strip().split('\t')
if entry_type == 'exon' and chrom in chroms:
start, end = int(start), int(end)
if strand == '+':
three_p_ss, five_p_ss = start, end
else:
three_p_ss, five_p_ss = end, start
anno_sites['3p'][chrom][strand].append(three_p_ss)
anno_sites['5p'][chrom][strand].append(five_p_ss)
anno_sites = {ss_type:{chrom:{strand:np.sort(anno_sites[ss_type][chrom][strand]) for strand in ['+', '-']} for chrom in chroms} for ss_type in site_types}
return anno_sites
def ss_usage(sj_paths, anno_sites):
print(time.strftime('%m-%d %I:%M:%S%p') + ' - Parsing junction reads...')
strand_dict = {'1':'+', '2':'-'}
junctions = {chrom:{'+':IntervalTree(), '-':IntervalTree()} for chrom in chroms}
for sj_path in sj_paths:
with open(sj_path) as in_file:
for line in in_file:
chrom, start, end, strand, _, _, reads, _, _ = line.strip().split('\t')
start, end, reads = int(start)-1, int(end)+1, int(reads)
if chrom not in chroms or strand == '0' or reads == 0:
continue
strand = strand_dict[strand]
if strand == '+':
five_p_ss, three_p_ss = start, end
else:
five_p_ss, three_p_ss = end, start
if five_p_ss not in anno_sites['5p'][chrom][strand] or three_p_ss not in anno_sites['3p'][chrom][strand]:
continue
junc = Interval(start, end, {'reads':reads})
junctions[chrom][strand].add(junc)
print(time.strftime('%m-%d %I:%M:%S%p') + ' - Calculating splice site usages...')
fivep_ss_usage, threep_ss_usage = {chrom:{'+':{}, '-':{}} for chrom in chroms}, {chrom:{'+':{}, '-':{}} for chrom in chroms}
for chrom in junctions:
for strand in junctions[chrom]:
juncs_processed = set()
for junc_int in junctions[chrom][strand]:
if (junc_int.begin, junc_int.end) not in juncs_processed:
if strand == '+':
fivep_ss, threep_ss = junc_int.begin, junc_int.end
next_threep_index = np.searchsorted(anno_sites['3p'][chrom][strand], fivep_ss, side='right')
next_fivep_index = np.searchsorted(anno_sites['5p'][chrom][strand], threep_ss, side='left')-1
next_fivep_ss = anno_sites['5p'][chrom][strand][next_fivep_index]
next_threep_ss = anno_sites['3p'][chrom][strand][next_threep_index]
fivep_juncs = junctions[chrom][strand].overlap(fivep_ss, next_threep_ss)
threep_juncs = junctions[chrom][strand].overlap(next_fivep_ss, threep_ss)
else:
fivep_ss, threep_ss = junc_int.end, junc_int.begin
next_threep_index = np.searchsorted(anno_sites['3p'][chrom][strand], fivep_ss, side='left')-1
next_fivep_index = np.searchsorted(anno_sites['5p'][chrom][strand], threep_ss, side='right')
next_fivep_ss = anno_sites['5p'][chrom][strand][next_fivep_index]
next_threep_ss = anno_sites['3p'][chrom][strand][next_threep_index]
fivep_juncs = junctions[chrom][strand].overlap(next_threep_ss, fivep_ss)
threep_juncs = junctions[chrom][strand].overlap(threep_ss, next_fivep_ss)
fivep_ss_reads, fivep_reads_total = 0.0, 0.0
for junc in fivep_juncs:
if strand == '+':
site = junc.begin
else:
site = junc.end
if site == fivep_ss:
fivep_ss_reads += junc.data['reads']
fivep_reads_total += junc.data['reads']
fivep_usage = fivep_ss_reads/fivep_reads_total
threep_ss_reads, threep_reads_total = 0.0, 0.0
for junc in threep_juncs:
if strand == '+':
site = junc.end
else:
site = junc.begin
if site == threep_ss:
threep_ss_reads += junc.data['reads']
threep_reads_total += junc.data['reads']
threep_usage = threep_ss_reads/threep_reads_total
fivep_ss_usage[chrom][strand][fivep_ss] = (fivep_ss_reads, fivep_reads_total-fivep_ss_reads)
threep_ss_usage[chrom][strand][threep_ss] = (threep_ss_reads, threep_reads_total-threep_ss_reads)
juncs_processed.add((junc_int.begin, junc_int.end))
return fivep_ss_usage, threep_ss_usage
if __name__ == '__main__' :
parser = argparse.ArgumentParser()
parser.add_argument('--anno_path', help='Path to GENCODE GTF annotation file')
parser.add_argument('--sj_files', help='Comma-separated list of splice junction read files from STAR')
parser.add_argument('--sample', help='Name of the sample to be used in output file names')
parser.add_argument('--out_dir', help='Where to output the splice site usage files to')
args = parser.parse_args()
anno_path, sj_files, sample, out_dir = args.anno_path, args.sj_files.split(','), args.sample, args.out_dir
anno_sites = parse_annotation(anno_path)
print(time.strftime('%m-%d %I:%M:%S%p') + ' - Processing usage data for ' + sample + '...')
fivep_ss_usage, threep_ss_usage = ss_usage(sj_files, anno_sites)
with open(join(out_dir, sample + '_5p_ss_usage.txt'), 'w') as out_file:
out_file.write('Chrom\tSite\tStrand\tInclusion_reads\tExclusion_reads\tUsage\n')
for chrom in fivep_ss_usage:
for strand in fivep_ss_usage[chrom]:
for fivep_site in fivep_ss_usage[chrom][strand]:
inc_reads, exc_reads = fivep_ss_usage[chrom][strand][fivep_site]
usage = inc_reads/float(inc_reads + exc_reads)
out_file.write('\t'.join([chrom, str(fivep_site), strand, str(inc_reads), str(exc_reads), str(usage)]) + '\n')
with open(join(out_dir, sample + '_3p_ss_usage.txt'), 'w') as out_file:
out_file.write('Chrom\tSite\tStrand\tInclusion_reads\tExclusion_reads\tUsage\n')
for chrom in threep_ss_usage:
for strand in threep_ss_usage[chrom]:
for threep_site in threep_ss_usage[chrom][strand]:
inc_reads, exc_reads = threep_ss_usage[chrom][strand][threep_site]
usage = inc_reads/float(inc_reads + exc_reads)
out_file.write('\t'.join([chrom, str(threep_site), strand, str(inc_reads), str(exc_reads), str(usage)]) + '\n')
|
dtglidden/hotspot-exon-paper | scripts/estimate_hotspot_prevalence.py | import sys
from os.path import join
from numpy import mean, concatenate, searchsorted
import matplotlib.pyplot as plt
import time
import argparse
chroms = set(['chr'+str(i) for i in range(1,23)] + ['chrY', 'chrX'])
def parse_annotation(anno_path):
id_to_name, name_to_id = {}, {}
gene_tx = {}
with open(anno_path) as in_file:
for line in in_file:
if line[0] != '#':
chrom, _, seq_type, start, end, _, strand, _, info = line.strip().split('\t')
info_pairs = info.split('; ')[:-1]
values = set([e.split(' ')[1].strip('\"') for e in info_pairs])
info_dict = {e.split(' ')[0]:e.split(' ')[1].strip('\"') for e in info_pairs}
if seq_type == 'exon' and info_dict['transcript_type'] == 'protein_coding' and chrom in chroms and 'appris_principal_1' in values:
gene_id, gene_name = info_dict['gene_id'].split('.')[0], info_dict['gene_name']
tx_id = info_dict['transcript_id'].split('.')[0]
if gene_id not in gene_tx:
gene_tx[gene_id] = {}
id_to_name[gene_id], name_to_id[gene_name] = gene_name, gene_id
if tx_id not in gene_tx[gene_id]:
gene_tx[gene_id][tx_id] = []
exon = (chrom, strand, int(start), int(end))
gene_tx[gene_id][tx_id].append(exon)
return gene_tx, id_to_name, name_to_id
def parse_HI_scores(HI_path):
HI_scores = {}
with open(HI_path) as in_file:
for line in in_file:
gene, score = line.strip().split('\t')
if gene != 'Gene_name':
HI_scores[gene] = float(score)
return HI_scores
def parse_usage(sample, usage_dir):
usages = {ss_type:{chrom:{'+':{}, '-':{}} for chrom in chroms} for ss_type in ['3p', '5p']}
for ss_type in usages:
with open(join(usage_dir, '_'.join([sample, ss_type, 'ss_usage.txt']))) as in_file:
for line in in_file:
chrom, site, strand, _, _, usage = line.strip().split('\t')
if chrom != 'Chrom':
usages[ss_type][chrom][strand][int(site)] = float(usage)
return usages
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--anno_path', help='Path to GENCODE GTF annotation file')
parser.add_argument('--HI_path', help='Path to gene haploinsufficiency scores')
parser.add_argument('--usage_dir', help='Directory with 5\' and 3\' usage files')
parser.add_argument('--sample', help='Name of sample to process (should match first portion of usage file name)')
parser.add_argument('--out_dir', help='Where to output the predicted hotspots file')
args = parser.parse_args()
anno_path, HI_path, usage_dir, sample, out_dir = args.anno_path, args.HI_path, args.usage_dir, args.sample, args.out_dir
gene_tx, id_to_name, name_to_id = parse_annotation(anno_path)
longest_tx = {g:max(gene_tx[g].keys(), key=lambda t:len(gene_tx[g][t])) for g in gene_tx}
print(time.strftime('%m-%d %I:%M:%S%p') + ' - Done parsing annotation...')
HI_scores = parse_HI_scores(HI_path)
print(time.strftime('%m-%d %I:%M:%S%p') + ' - Done parsing HI scores...')
print(time.strftime('%m-%d %I:%M:%S%p') + ' - Processing {0}...'.format(sample))
usages = parse_usage(sample, usage_dir)
gene_usages = {}
for gene_id in gene_tx:
if id_to_name[gene_id] in HI_scores:
gene_usages[gene_id] = []
for chrom, strand, start, end in gene_tx[gene_id][longest_tx[gene_id]][1:-1]:
if strand == '+':
fivep_site, threep_site = end, start
else:
fivep_site, threep_site = start, end
if fivep_site in usages['5p'][chrom][strand] and threep_site in usages['3p'][chrom][strand]:
gene_usages[gene_id] += [usages['5p'][chrom][strand][fivep_site], usages['3p'][chrom][strand][threep_site]]
with open(join(out_dir, '{0}_predicted_hotspot_exons_by_usage.txt'.format(sample)), 'w') as out_file:
exons_low, exons_reg = 0, 0
out_file.write('Gene_ID\tChrom\tStart\tEnd\tStrand\t5p_usage\t3p_usage\tLow_usage\tNeighborhood_size\n')
for gene_id in gene_tx:
gene_name, intron_num = id_to_name[gene_id], len(gene_tx[gene_id][longest_tx[gene_id]])-1
if gene_name in HI_scores and intron_num > 1:
gene_HI = HI_scores[gene_name]
HI_neighbors = [name_to_id[g] for g in HI_scores if g in name_to_id and abs(HI_scores[g] - gene_HI) <= 0.1]
intron_and_HI_neighbors = [g for g in HI_neighbors if abs(len(gene_tx[g][longest_tx[g]])-intron_num)<0.1*intron_num and len(gene_tx[g][longest_tx[g]])>2]
neighbor_usages = [gene_usages[g] for g in intron_and_HI_neighbors]
if len(neighbor_usages) > 0:
neighbor_usages = sorted(concatenate(neighbor_usages))
for chrom, strand, start, end in gene_tx[gene_id][longest_tx[gene_id]][1:-1]:
if strand == '+':
fivep_site, threep_site = end, start
else:
fivep_site, threep_site = start, end
if fivep_site in usages['5p'][chrom][strand] and threep_site in usages['3p'][chrom][strand]:
fivep_usage, threep_usage = usages['5p'][chrom][strand][fivep_site], usages['3p'][chrom][strand][threep_site]
fivep_rank = (searchsorted(neighbor_usages, fivep_usage, side='right')+1)/float(len(neighbor_usages))
threep_rank = (searchsorted(neighbor_usages, threep_usage, side='right')+1)/float(len(neighbor_usages))
output = [gene_id, chrom, start, end, strand, fivep_usage, threep_usage]
if fivep_rank <= 0.1 and threep_rank <= 0.1:
output += ['True', len(neighbor_usages)]
exons_low += 1
else:
output += ['False', len(neighbor_usages)]
exons_reg += 1
out_file.write('\t'.join([str(e) for e in output]) + '\n')
|
dtglidden/hotspot-exon-paper | lib/ss_usage.py | <gh_stars>0
#!/usr/bin/env python
# Make sure you already loaded the 'Python' module (python 3)
import sj2psi as sj
import tempfile
def get_usage(filename):
# Convert this to a Dict so it can be imported into R code via rPython
df = sj.get_psis(sj.read_sj_out_tab(filename), min_unique=0, min_multimap=0)
fh, fn = tempfile.mkstemp()
df.to_csv(fn, index=False)
return fn
|
dtglidden/hotspot-exon-paper | scripts/usage_by_introns.py | import sys
from os.path import join
from numpy import mean
import argparse
chroms = set(['chr'+str(i) for i in range(1,23)] + ['chrY', 'chrX', 'chrM'])
if __name__ == '__main__' :
parser = argparse.ArgumentParser()
parser.add_argument('--anno_path', help='Path to GENCODE GTF annotation file')
parser.add_argument('--intron_path', help='Path to a BED file of introns')
parser.add_argument('--usage_dir', help='Directory with 5\' and 3\' usage data files')
parser.add_argument('--sample', help='Name of the sample from which the usage data was calculated')
parser.add_argument('--out_dir', help='Where to output the average usage file to')
args = parser.parse_args()
anno_path, intron_path, usage_dir, sample, out_dir = args.anno_path, args.intron_path, args.usage_dir, args.sample, args.out_dir
#Parse introns from BED file of GENCODE annotation obtained from UCSC table browser
tx_introns = {}
with open(intron_path) as in_file:
for line in in_file:
chrom, start, end, info, _, strand = line.strip().split('\t')
if chrom in chroms:
tx_id, intron = info.split('.')[0], (int(start), int(end)+1)
if tx_id not in tx_introns:
tx_introns[tx_id] = [intron]
else:
tx_introns[tx_id].append(intron)
#Map gene IDs to the transcript IDs of each intron set
gene_to_tx = {}
with open(anno_path) as in_file:
for line in in_file:
chrom, _, entry_type, start, end, _, strand, _, info = line.strip().split('\t')
if entry_type == 'transcript' and chrom in chroms:
info_pairs = info.split('; ')[:-1]
values = set([e.split(' ')[1].strip('\"') for e in info_pairs])
info_dict = {e.split(' ')[0]:e.split(' ')[1].strip('\"') for e in info_pairs}
gene_id, tx_id = info_dict['gene_id'].split('.')[0], info_dict['transcript_id'].split('.')[0]
if tx_id in tx_introns and 'appris_principal_1' in values:
if gene_id not in gene_to_tx:
gene_to_tx[gene_id] = {'tx':[tx_id], 'info':(chrom, strand)}
else:
gene_to_tx[gene_id]['tx'].append(tx_id)
#Identify the longest transcript in the gene
gene_longest_tx = {gene:max(gene_to_tx[gene]['tx'], key=lambda t:len(tx_introns[t])) for gene in gene_to_tx}
#Parse splice site usage data
usages = {ss_type:{chrom:{'+':{}, '-':{}} for chrom in chroms} for ss_type in ['3p', '5p']}
for ss_type in usages:
with open(join(usage_dir, '_'.join([sample, ss_type, 'ss_usage.txt']))) as in_file:
for line in in_file:
chrom, site, strand, _, _, usage = line.strip().split('\t')
if chrom != 'Chrom':
usages[ss_type][chrom][strand][int(site)] = float(usage)
#Calculate average usage for the gene's longest transcript
gene_usage_by_introns = []
for gene in gene_longest_tx:
max_tx = gene_longest_tx[gene]
intron_num = len(tx_introns[max_tx])
avg_usage = []
chrom, strand = gene_to_tx[gene]['info']
for intron_start, intron_end in tx_introns[max_tx]:
if strand == '+':
fivep_site, threep_site = intron_start, intron_end
else:
fivep_site, threep_site = intron_end, intron_start
if fivep_site in usages['5p'][chrom][strand] and threep_site in usages['3p'][chrom][strand]:
avg_usage.append(usages['5p'][chrom][strand][fivep_site])
avg_usage.append(usages['3p'][chrom][strand][threep_site])
#Only output data for transcripts that have usage for each splice site in every intron
if len(avg_usage) >= 2.0*len(tx_introns[max_tx]):
gene_usage_by_introns.append((gene, chrom, strand, str(mean(avg_usage)), str(intron_num)))
with open(join(out_dir, '{}_gene_usage_intron_primary_tx.txt'.format(sample)), 'w') as out_file:
out_file.write('Ensembl_ID\tChrom\tStrand\tAvg_usage\tIntron_num\n')
for i in range(len(gene_usage_by_introns)):
out_file.write('\t'.join(gene_usage_by_introns[i]) + '\n')
|
zcybupt/RFBNet_With_GUI | RFB_GUI.py | from __future__ import print_function
import sys
import torch
import torch.backends.cudnn as cudnn
import numpy as np
from data import BaseTransform, VOC_300, VOC_512
import cv2
from layers.functions import Detect, PriorBox
import matplotlib.patches as patches
from collections import OrderedDict
from PyQt5 import QtWidgets, QtCore, QtGui
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from models.RFB_Net_vgg import build_net
import time
classes = ['aeroplane', 'ship', 'storage_tank', 'baseball_diamond', 'tennis_court', 'basketball_court',
'ground_track_field', 'harbor', 'bridge', 'vehicle']
class RFB_GUI(QtWidgets.QMainWindow):
def __init__(self):
super(RFB_GUI, self).__init__()
MyMessageBox(self)
self.setWindowTitle("RFB-GUI Demo Program")
self.resize(1280, 900)
self.setFocus()
self.file_item = QtWidgets.QAction('Open image', self)
self.file_item.setShortcut('Ctrl+O')
self.file_item.triggered.connect(self.select_file)
self.label = DragLabel("Please drag image here\nor\nPress Ctrl+O to select", self)
self.label.addAction(self.file_item)
self.setCentralWidget(self.label)
self.priorbox = PriorBox(self.cfg)
self.cuda = True
self.numclass = 21
self.net = build_net('test', self.input_size, self.numclass) # initialize detector
state_dict = torch.load(self.trained_model)
new_state_dict = OrderedDict()
for k, v in state_dict.items():
head = k[:7]
if head == 'module.':
name = k[7:]
else:
name = k
new_state_dict[name] = v
self.net.load_state_dict(new_state_dict)
self.net.eval()
if self.cuda:
self.net = self.net.cuda()
cudnn.benchmark = True
else:
self.net = self.net.cpu()
print('Finished loading model!')
def select_file(self):
file_path = QtWidgets.QFileDialog.getOpenFileName(self, 'Select image',
r'/home/zcy/data/NWPU_VHR-10_dataset/positive_image_set',
"Image files(*.bmp *.jpg *.pbm *.pgm *.png *.ppm *.xbm *.xpm)"
";;All files (*.*)")
# try:
self.detect(file_path[0], self)
# except Exception as e:
# QtWidgets.QMessageBox.information(self, "Alert", str(e))
def detect(self, file_name, object):
print(file_name)
start_time = time.time()
img = cv2.imread(file_name.strip())
if img is None:
QtWidgets.QMessageBox.information(self, "Alert", "Please select images")
return
scale = torch.Tensor([img.shape[1], img.shape[0],
img.shape[1], img.shape[0]])
detector = Detect(object.numclass, 0, object.cfg)
transform = BaseTransform(object.net.size, (123, 117, 104), (2, 0, 1))
with torch.no_grad():
x = transform(img).unsqueeze(0)
if object.cuda:
x = x.cuda()
scale = scale.cuda()
out = object.net(x)
with torch.no_grad():
priors = object.priorbox.forward()
if object.cuda:
priors = priors.cuda()
boxes, scores = detector.forward(out, priors)
boxes = boxes[0]
scores = scores[0]
boxes *= scale
boxes = boxes.cpu().numpy()
scores = scores.cpu().numpy()
result_set = []
for j in range(1, object.numclass):
max_ = max(scores[:, j])
inds = np.where(scores[:, j] > 0.2)[0] # conf > 0.6
if inds is None:
continue
c_bboxes = boxes[inds]
c_scores = scores[inds, j]
c_dets = np.hstack((c_bboxes, c_scores[:, np.newaxis])).astype(
np.float32, copy=False)
keep = object.nms_py(c_dets, 0.6)
c_dets = c_dets[keep, :]
c_bboxes = c_dets[:, :4]
for bbox in c_bboxes:
# Create a Rectangle patch
rect = patches.Rectangle((int(bbox[0]), int(bbox[1])), int(bbox[2]) - int(bbox[0]) + 1,
int(bbox[3]) - int(bbox[1]) + 1, linewidth=1, edgecolor='r')
result_set.append(str(rect))
cv2.rectangle(img, (int(bbox[0]), int(bbox[1])), (int(bbox[2]), int(bbox[3])), (255, 0, 0), 2)
cv2.imwrite("my_test.png", img)
end_time = time.time()
print(end_time - start_time)
img_data = QtGui.QPixmap("my_test.png")
height = object.height()
width = object.height() / img_data.height() * img_data.width()
img_data = img_data.scaled(width, height)
object.label.resize(width, height)
object.label.setPixmap(img_data)
self.setFocus()
def nms_py(self, dets, thresh):
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
order = scores.argsort()[::-1]
ndets = dets.shape[0]
suppressed = np.zeros((ndets), dtype=np.int)
keep = []
for _i in range(ndets):
i = order[_i]
if suppressed[i] == 1:
continue
keep.append(i)
ix1 = x1[i]
iy1 = y1[i]
ix2 = x2[i]
iy2 = y2[i]
iarea = areas[i]
for _j in range(_i + 1, ndets):
j = order[_j]
if suppressed[j] == 1:
continue
xx1 = max(ix1, x1[j])
yy1 = max(iy1, y1[j])
xx2 = min(ix2, x2[j])
yy2 = min(iy2, y2[j])
w = max(0.0, xx2 - xx1 + 1)
h = max(0.0, yy2 - yy1 + 1)
inter = w * h
ovr = inter / (iarea + areas[j] - inter)
if ovr >= thresh:
suppressed[j] = 1
return keep
class MyMessageBox(QMessageBox):
def __init__(self, parent):
super().__init__()
self.setWindowTitle('Input Size')
self.setText("Please choose the input image size ")
self.setFont(QtGui.QFont("Ubuntu Mono", 14))
_300_button = self.addButton(self.tr('300 x 300'), QMessageBox.ActionRole)
_512_button = self.addButton(self.tr('512 x 512'), QMessageBox.ActionRole)
cancel_button = self.addButton(' Cancel ', QMessageBox.ActionRole)
self.exec_()
button = self.clickedButton()
if button == _300_button:
parent.input_size = 300
parent.cfg = VOC_300
parent.trained_model = 'weights/RFB_vgg_NWPU_300.pth'
elif button == _512_button:
parent.input_size = 512
parent.cfg = VOC_512
parent.trained_model = 'weights/RFB_vgg_NWPU_512.pth'
elif button == cancel_button:
sys.exit()
class DragLabel(QLabel):
def __init__(self, text, parent):
super().__init__(text, parent)
self.parent = parent
self.setAcceptDrops(True)
self.setFont(QtGui.QFont("Ubuntu Mono", 30))
self.setAlignment(Qt.AlignCenter)
def dragEnterEvent(self, QDragEnterEvent):
QDragEnterEvent.accept()
def dropEvent(self, QDropEvent):
self.parent.detect(QDropEvent.mimeData().text()[7:], self.parent)
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
rfb_gui = RFB_GUI()
rfb_gui.show()
sys.exit(app.exec_())
|
dapianzi/pics | cats/models.py | <filename>cats/models.py<gh_stars>0
from django.db import models
from django.contrib.auth.models import User,GroupManager
from . import managers
# Create your models here.
class ViewLog(models.Model):
author = models.ForeignKey(
User,
on_delete=models.CASCADE,
# 默认为id字段,其他字段需要用'unique'索引
to_field='username',
db_column='author',
)
last_page = models.IntegerField(default=0)
# db_index=True 建立索引
last_view = models.DateTimeField(db_index=True)
def __str__(self):
return self.last_view
class Meta:
verbose_name = 'view log'
verbose_name_plural = 'view log'
# 指定表名
db_table = 'cats_view_log'
class CatImgs(models.Model):
adate = models.DateTimeField(auto_now_add=True)
img_hash = models.CharField(default='', null=False, max_length=64, unique=True)
img_src = models.CharField(default='', max_length=255)
img_desc = models.CharField(default='', max_length=255)
img_from = models.CharField(default='', max_length=255)
img_status = models.BooleanField(default=0, db_index=True)
img_like = models.IntegerField(default=0)
# relationship with comment
comments = models.ManyToManyField(
User,
through='PicComments',
through_fields=('img', 'author'),
related_name='img_comments+',
)
likes = models.ManyToManyField(
User,
through='PicLikes',
through_fields=('img', 'user'),
related_name='img_likes+',
)
objects = managers.ImgManager()
def __str__(self):
return self.img_hash
class Meta:
verbose_name = 'cat imgs'
verbose_name_plural = 'cat imgs'
db_table = 'cat_imgs'
class PicComments(models.Model):
adate = models.DateTimeField(db_index=True, auto_now_add=True)
author = models.ForeignKey(
User,
on_delete=models.CASCADE,
to_field='username',
# used in <Queryset>
related_name='comment_user',
)
img = models.ForeignKey(
CatImgs,
on_delete=models.CASCADE,
related_name='comment_img'
)
content = models.TextField(default='')
stars = models.DecimalField(default=0, max_digits=4, decimal_places=1)
class Meta:
verbose_name = 'pic comments'
verbose_name_plural = 'cat imgs comments'
db_table = 'cats_pic_comments'
class PicLikes(models.Model):
IS_LIKE = (
(-1, '不喜欢'),
(0, '取消'),
(1, '喜欢'),
)
adate = models.DateTimeField(db_index=True, auto_now_add=True)
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
to_field='username',
related_name='like_author',
)
img = models.ForeignKey(
CatImgs,
on_delete=models.CASCADE,
related_name='like_img'
)
is_like = models.IntegerField(choices=IS_LIKE)
class Meta:
verbose_name = 'cat imgs likes'
verbose_name_plural = 'cat imgs likes'
db_table = 'cats_pic_likes'
class PicStars(models.Model):
img = models.OneToOneField(
CatImgs,
on_delete=models.CASCADE,
primary_key=True
)
stars = models.BigIntegerField(default=0)
comments = models.BigIntegerField(default=0)
class Meta:
verbose_name = 'cat imgs stars'
verbose_name_plural = 'cat imgs stars'
db_table = 'cats_pic_stars' |
dapianzi/pics | pics/utils.py | <filename>pics/utils.py
# coding=utf-8
import re
import json
from django.http import HttpResponse
def _ajax_return(status_code=0, msg='', data=None):
return HttpResponse(json.dumps({
'code': status_code,
'msg': msg,
'content': data,
}))
def _ajax_success(data=None):
return _ajax_return(0, '', data)
def _ajax_error(status_code, msg=''):
return _ajax_return(status_code, msg)
def _is_doubtful(str):
return len(str)>50 or bool(re.search(r'\'|"|=|\\|\/', str))
|
dapianzi/pics | cats/migrations/0012_auto_20171226_1821.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-12-26 10:21
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cats', '0011_auto_20171226_1745'),
]
operations = [
migrations.CreateModel(
name='CatImgs',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('adate', models.DateTimeField(auto_now_add=True)),
('img_hash', models.CharField(default='', max_length=64, unique=True)),
('img_src', models.CharField(default='', max_length=255)),
('img_desc', models.CharField(default='', max_length=255)),
('img_from', models.CharField(default='', max_length=255)),
('img_status', models.BooleanField(db_index=True, default=0)),
('img_like', models.IntegerField(default=0)),
],
options={
'verbose_name': 'cat imgs',
'verbose_name_plural': 'cat imgs',
'db_table': 'cat_imgs',
},
),
migrations.CreateModel(
name='PicComments',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('adate', models.DateTimeField(auto_now_add=True, db_index=True)),
('content', models.TextField(default='')),
('stars', models.DecimalField(decimal_places=1, default=0, max_digits=4)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, to_field='username')),
('img_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cats.CatImgs')),
],
options={
'verbose_name': 'pic comments',
'verbose_name_plural': 'cat imgs comments',
'db_table': 'cats_pic_comments',
},
),
migrations.CreateModel(
name='PicLikes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('adate', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_like', models.IntegerField(choices=[(-1, '不喜欢'), (0, '取消'), (1, '喜欢')])),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL, to_field='username')),
('img_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cats.CatImgs')),
],
options={
'verbose_name': 'cat imgs likes',
'verbose_name_plural': 'cat imgs likes',
'db_table': 'cats_pic_likes',
},
),
migrations.CreateModel(
name='PicStars',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('stars', models.BigIntegerField(default=0)),
('comments', models.BigIntegerField(default=0)),
('img_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cats.CatImgs')),
],
options={
'verbose_name': 'cat imgs stars',
'verbose_name_plural': 'cat imgs stars',
'db_table': 'cats_pic_stars',
},
),
migrations.AlterField(
model_name='viewlog',
name='author',
field=models.ForeignKey(db_column='author', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, to_field='username'),
),
]
|
dapianzi/pics | cats/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='home'),
url(r'^signin', views.SigninView.as_view(), name='signin'),
url(r'^ajaxsignin', views.AjaxSignin.as_view(), name='ajaxsignin'),
url(r'^signout', views.signout, name='signout'),
url(r'^likes$', views.LikesView.as_view(), name='likes'),
url(r'^comment$', views.CommentView.as_view(), name='comment'),
url(r'^delete', views.delete, name='delete'),
url(r'^more', views.MoreView.as_view(), name='more'),
]
|
dapianzi/pics | cats/migrations/0008_merge_20171226_1731.py | <filename>cats/migrations/0008_merge_20171226_1731.py
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-12-26 09:31
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cats', '0007_delete_viewlog'),
('cats', '0004_auto_20171226_1721'),
]
operations = [
]
|
dapianzi/pics | spider/views.py | # coding=utf-8
import os
import time
import json
from django.shortcuts import render, get_object_or_404
from django.views.generic import View
from django.db.models import ObjectDoesNotExist
from pics.utils import _ajax_error, _ajax_success, _is_doubtful
from . import models as spider_models
# Create your views here.
class IndexView(View):
"""
首页搜索
"""
template_name = 'spider/index.html'
ip = ''
_DOUBTFUL_COUNT = 50
_MAX_TASKS = 16
_MAX_RUNTIME = 1
_TASK_STATUS_EXPIRED = 3
_TASK_STATUS_CACHING = 2
_TASK_STATUS_RUNNING = 1
_TASK_STATUS_PENDING = 0
def get(self, request, *args, **kwargs):
context = dict()
context['title'] = 'What can I do for you?'
context['types'] = spider_models.ContentType.objects.filter(active=spider_models.ContentType.ACTIVE[0][0])
keyword = request.GET.get('keyword', '')
type_id = request.GET.get('type', 1)
if len(keyword) >= 2:
context['keyword'] = keyword
context['type_id'] = int(type_id) if type_id.isdigit() else 0
self.ip = request.META.get("REMOTE_ADDR", '')
if not self._is_blocked():
# if tasks number more than _MAX_TASKS, raise too busy error
running_tasks = self._running_scrapy()
if running_tasks >= self._MAX_TASKS:
# too busy
context['is_busy'] = True
return render(request, self.template_name, context)
# handle search task
self._handle_search_task(keyword, type_id)
context['get_result'] = True
else:
context['get_result'] = False
elif keyword != '':
context['get_result'] = False
context['err_msg'] = '搜索关键词不能少于2个字哦'
return render(request, self.template_name, context)
def _handle_search_task(self, keyword, type_id):
content_type = get_object_or_404(spider_models.ContentType, id=type_id)
spider_task = spider_models.SpiderTask.objects.filter(
keyword=keyword, content_type=content_type).order_by('-id')[:1]
if spider_task.count() > 0:
renew_task = False
# if run_time is empty
if spider_task[0].run_time is None:
renew_task = True
# if pending, running
elif spider_task[0].status == self._TASK_STATUS_PENDING or spider_task[0].status == self._TASK_STATUS_RUNNING:
running_timestamp = time.mktime(spider_task[0].run_time.timetuple())
running_expired = time.time() - self._MAX_RUNTIME*86400
if running_timestamp <= running_expired:
renew_task = True
# if caching
elif spider_task[0].status == self._TASK_STATUS_CACHING:
running_timestamp = time.mktime(spider_task[0].run_time.timetuple())
running_expired = time.time() - content_type.expire_time * 86400
if running_timestamp <= running_expired:
renew_task = True
else:
renew_task = True
if renew_task:
st = spider_task[0]
st.status = self._TASK_STATUS_EXPIRED
st.save()
self._recordSearch(keyword)
# launch a task
return self._launch_task(keyword, content_type)
else:
return self._launch_task(keyword, content_type)
def _launch_task(self, keyword, content_type):
st = spider_models.SpiderTask.objects.create(
keyword=keyword, content_type=content_type, status=self._TASK_STATUS_PENDING,
run_time=time.time()
)
ret = []
if st:
spiders = spider_models.Spider.objects.filter(content_type=content_type)
if spiders:
for s in spiders:
f = os.popen('/var/www/shell/run_scrapy.sh %s %d %d %s' % (s.name, s.id, st.id, keyword))
# f = os.popen('python -V')
ret.append(f.read())
return ret
def _running_scrapy(self):
with os.popen('ps -ef | grep "scrapy crawl" | grep -v "grep" | wc -l') as f:
rs = f.read()
return int(rs) if rs.isdigit() else 0
return self._MAX_TASKS
def _is_blocked(self):
try:
block = spider_models.BlackList.objects.get(ip=self.ip, is_deny=1)
except ObjectDoesNotExist:
return False
else:
return True
def _recordSearch(self, content):
doubtful = _is_doubtful(content)
spider_models.SearchRecord.objects.create(ip=self.ip, content=content, is_doubtful=doubtful)
# consider to block ip
if spider_models.SearchRecord.objects.filter(ip=self.ip, is_doubtful=True).count() >= self._DOUBTFUL_COUNT:
spider_models.BlackList.objects.create(ip=self.ip)
class GetResult(View):
"""
拉取结果
"""
def post(self, request, *args, **kwargs):
keyword = request.POST.get('keyword', '')
type_id = request.POST.get('type', 1)
idx = request.POST.get('idx', 0)
# valid int
type_id = int(type_id) if type_id.isdigit() else 1
idx = int(idx) if idx.isdigit() else 0
page_limit = 20
content_type = get_object_or_404(spider_models.ContentType, id=type_id)
task = spider_models.SpiderTask.objects.filter(keyword=keyword,
content_type=content_type).order_by('-id')[:1]
if not task.exists():
return _ajax_success({
'idx': 0,
'status': 1,
'counts': 0,
'result': []
})
# if task.status ==
results = spider_models.Items.objects.filter(spider_info=task[0])[idx:page_limit]
ret = []
# convert queryset to list
for r in results:
ret.append(json.loads(r.result))
return _ajax_success({
'idx': idx+len(ret),
'status': 0 if task[0].status<=1 else 0,
'counts': len(ret),
'result': ret
})
def handle_process(request):
# task_id = request.POST.get('id', 0)
pass
|
dapianzi/pics | cats/migrations/0002_auto_20171226_1710.py | <reponame>dapianzi/pics
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-12-26 09:10
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cats', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='viewlog',
options={'verbose_name': 'view log', 'verbose_name_plural': 'view log'},
),
migrations.AlterModelTable(
name='viewlog',
table='cats_view_log',
),
]
|
dapianzi/pics | pics/middleware.py | import re
class UserAssignMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
# One-time configuration and initialization.
def __call__(self, request):
# Code to be executed for each request before
# the view (and later middleware) are called.
response = self.get_response(request)
# Code to be executed for each request/response after
# the view is called.
# response is rendered.
# can not change context data any more.
# if request.method == 'GET':
# if not 'title' in response.context_data:
# response.context_data['title'] = 'Dapianzi hate cats'
# print(response.is_rendered)
return response
def process_template_response(self, request, response):
"""Add default title if not given in views"""
if request.method == 'GET':
if 'title' not in response.context_data:
print(request.__dict__.keys())
response.context_data['title'] = 'Dapianzi hate cats'
return response
class SetRemoteAddrFromXForwardedFor(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
try:
real_ip = request.META['HTTP_X_FORWARDED_FOR']
except KeyError:
pass
else:
# HTTP_X_FORWARDED_FOR can be a comma-separated list of IPs.
# Take just the first one.
real_ip = real_ip.split(",")[0].spilt()
# safe ipv4
if re.match(r'^(\d+)(\.\d+){3}$', real_ip):
request.META['REMOTE_ADDR'] = real_ip
response = self.get_response(request)
return response
|
dapianzi/pics | spider/admin.py | <gh_stars>0
from django.contrib import admin
from django.http import HttpResponse, StreamingHttpResponse
from django.core import serializers
from spider.models import Spider, ContentType, SpiderTask, BlackList
# Register your models here.
@admin.register(ContentType)
class ContentTypeAdmin(admin.ModelAdmin):
# 列表项
list_display = ['name', 'max_item', 'expire_time', 'active']
# 批量操作
actions = [
'set_active',
'set_discard',
'export_csv',
]
def set_active(self, request, queryset):
r = queryset.update(active=0)
self.message_user(request, "%s个类别被激活" % r)
# 设置操作描述
set_active.short_description = '激活'
def set_discard(self, req, qys):
r = qys.update(active=1)
self.message_user(req, "%s个类别被激活" % r)
set_discard.short_description = '禁用'
def export_csv(self, request, queryset):
"""导出csv"""
response = StreamingHttpResponse('\n'.join([','.join(
# list元素转str
list(map(lambda x: str(x), x))
) for x in list(
# queryset 转 list
queryset.values_list('name', 'max_item', 'expire_time', 'active')
)]).encode('gbk').decode('gbk'), charset='gbk', content_type='attachment/csv')
response['Content-Disposition'] = 'attachment;filename="test.csv"'
return response
export_csv.short_description = '导出csv'
@admin.register(Spider)
class SpiderAdmin(admin.ModelAdmin):
list_display = ('name', 'c_name', 'domain', 'n_start', 'n_end', 'content_type')
@admin.register(SpiderTask)
class SpiderTaskAdmin(admin.ModelAdmin):
list_display = ('keyword', 'content_type', 'status', 'run_time', 'finish_time', 'running')
@admin.register(BlackList)
class BlackListAdmin(admin.ModelAdmin):
list_display = ('ip', 'time', 'is_deny')
|
dapianzi/pics | cats/managers.py | <filename>cats/managers.py<gh_stars>0
from django.db import models
# db manager
class ImgManager(models.Manager):
'''
custom img model manager
'''
def get_queryset(self):
return super(ImgManager, self).get_queryset().filter(img_status=0)
def with_info(self, offset=0, limit=30):
from django.db import connection
result_list = []
with connection.cursor() as cursor:
cursor.execute('''
SELECT i.id,i.img_src,i.img_from,i.img_desc,IFNULL(l.likes,0)likes,
IFNULL(s.comments,0)comments,IFNULL(s.stars,0)stars FROM cat_imgs i
LEFT JOIN
(SELECT img_id,COUNT(DISTINCT user_id) likes FROM cats_pic_likes WHERE is_like=1 GROUP BY img_id ) l
ON i.id=l.img_id
LEFT JOIN
(SELECT img_id,comments,stars FROM cats_pic_stars) s
ON i.id=s.img_id
WHERE img_status=0 LIMIT %d,%d
''' % (offset, limit))
for row in cursor.fetchall():
p = self.model(id=row[0], img_src=row[1], img_from=row[2], img_desc=row[3])
p.n_likes = row[4]
p.n_comments = row[5]
p.n_stars = row[6]
p.n_star = 0 if row[5]==0 else row[6]//row[5]
result_list.append(p)
return result_list
def only_likes(self):
from django.db import connection
with connection.cursor() as cursor:
cursor.execute('''
SELECT i.id,i.img_src,i.img_from,i.img_desc,l.likes,s.comments,s.stars FROM cat_imgs i
LEFT JOIN
(SELECT img_id,COUNT(DISTINCT user_id) likes FROM cats_pic_lisks WHERE is_like=1 GROUP BY img_id ) l
ON i.id=l.img_id
LEFT JOIN
(SELECT img_id,comments,stars FROM cats_pic_stars) s ON i.id=s.img_id) s
ON i.id=s.img_id
WHERE likes>0
''') |
dapianzi/pics | spider/urls.py | <filename>spider/urls.py
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='home'),
url(r'^result', views.GetResult.as_view(), name='get'),
]
|
dapianzi/pics | spider/models.py | <filename>spider/models.py
# coding=utf-8
from django.db import models
# Create your models here.
class Config(models.Model):
"""
配置信息
"""
CONF_TYPE = (
(1, 'int'),
(2, 'str'),
(3, 'float'),
(4, 'json'),
)
conf_key = models.CharField('配置项', max_length=50, default='', unique=True, null=False)
conf_type = models.IntegerField('配置项', choices=CONF_TYPE)
conf_value = models.TextField('配置项', max_length=1024, default='')
def __str__(self):
return self.conf_key
class Spider(models.Model):
"""
爬虫信息配置
"""
name = models.CharField('爬虫名称', max_length=50, default='', db_index=True)
c_name = models.CharField('爬虫名称', max_length=50, default='')
domain = models.CharField('爬虫域名', max_length=150, default='')
n_start = models.IntegerField('起始页数', default=0)
n_end = models.IntegerField('终止页数', default=0)
content_type = models.ForeignKey(
'ContentType',
related_name='spider_content_type',
on_delete=models.CASCADE,
)
def __str__(self):
return '%s[%s]' % (self.c_name, self.name)
class ContentType(models.Model):
"""
抓取内容配置
"""
ACTIVE = (
(0, 'active'),
(1, 'discard'),
)
name = models.CharField('内容类型', max_length=50, default='')
max_item = models.IntegerField('每个爬虫最大抓取数', default=10)
expire_time = models.IntegerField('过期时间(天)', default=30)
active = models.BooleanField('是否废弃', choices=ACTIVE, db_index=True, default=0)
def __str__(self):
return self.name
class SpiderTask(models.Model):
"""
爬虫任务
"""
STATUS = (
(0, '初始化'),
(1, '抓取中'),
(2, '缓存中'),
(3, '已过期'),
)
keyword = models.CharField('爬虫关键字', max_length=50, default='', db_index=True)
content_type = models.ForeignKey(
ContentType,
related_name='task_content_type',
on_delete=models.CASCADE,
)
status = models.IntegerField('任务状态', choices=STATUS, default=0, db_index=True)
run_time = models.DateTimeField('爬虫开始时间', auto_now_add=True)
finish_time = models.DateTimeField('爬虫结束时间', null=True)
running = models.IntegerField('运行数', default=0)
def __str__(self):
return self.keyword
class SpiderProcess(models.Model):
"""
爬虫进程
"""
spider = models.ForeignKey(
Spider,
related_name='spider_process_name',
on_delete=models.CASCADE
)
pid = models.IntegerField('pid', null=True)
class SearchRecord(models.Model):
"""
搜索记录
"""
time = models.DateTimeField('搜索时间', auto_now_add=True, null=False)
ip = models.GenericIPAddressField('客户端IP', default='', db_index=True)
content = models.CharField('搜索内容', max_length=50)
is_doubtful = models.BooleanField('是否可疑', default=False, db_index=True)
def __str__(self):
return "[%s]%s - %s" % (self.time, self.ip, self.content)
class BlackList(models.Model):
"""
IP黑名单
"""
IS_DENY = (
(1, '黑名单'),
(2, '白名单'),
)
time = models.DateTimeField('添加时间', auto_now_add=True, null=False)
ip = models.GenericIPAddressField('客户端IP', default='', db_index=True)
is_deny = models.BooleanField('黑白名单', choices=IS_DENY, default=1)
def __str__(self):
return self.ip
class Items(models.Model):
"""
抓取结果
"""
time = models.DateTimeField('抓取时间', auto_now_add=True)
spider_info = models.ForeignKey(
SpiderTask,
related_name='spider_info',
on_delete=models.CASCADE,
)
spider = models.ForeignKey(
Spider,
related_name='spider_id',
on_delete=models.CASCADE,
)
result = models.TextField('抓取结果', default='', max_length=2048)
|
dapianzi/pics | spider/migrations/0001_initial.py | <gh_stars>0
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2018-01-11 11:18
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='BlackList',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.DateTimeField(auto_now_add=True, verbose_name='添加时间')),
('ip', models.GenericIPAddressField(db_index=True, default='', verbose_name='客户端IP')),
('is_deny', models.BooleanField(choices=[(1, '黑名单'), (2, '白名单')], default=1, verbose_name='黑白名单')),
],
),
migrations.CreateModel(
name='Config',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('conf_key', models.CharField(default='', max_length=50, unique=True, verbose_name='配置项')),
('conf_type', models.IntegerField(choices=[(1, 'int'), (2, 'str'), (3, 'float'), (4, 'json')], verbose_name='配置项')),
('conf_value', models.TextField(default='', max_length=1024, verbose_name='配置项')),
],
),
migrations.CreateModel(
name='ContentType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='', max_length=50, verbose_name='内容类型')),
('max_item', models.IntegerField(default=10, verbose_name='每个爬虫最大抓取数')),
('expire_time', models.IntegerField(default=30, verbose_name='过期时间(天)')),
('active', models.BooleanField(choices=[(0, 'active'), (1, 'discard')], db_index=True, default=0, verbose_name='是否废弃')),
],
),
migrations.CreateModel(
name='Items',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.DateTimeField(auto_now_add=True, verbose_name='抓取时间')),
('result', models.TextField(default='', max_length=2048, verbose_name='抓取结果')),
],
),
migrations.CreateModel(
name='SearchRecord',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.DateTimeField(auto_now_add=True, verbose_name='搜索时间')),
('ip', models.GenericIPAddressField(db_index=True, default='', verbose_name='客户端IP')),
('content', models.CharField(max_length=50, verbose_name='搜索内容')),
('is_doubtful', models.BooleanField(db_index=True, default=False, verbose_name='是否可疑')),
],
),
migrations.CreateModel(
name='Spider',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, default='', max_length=50, verbose_name='爬虫名称')),
('c_name', models.CharField(default='', max_length=50, verbose_name='爬虫名称')),
('domain', models.CharField(default='', max_length=150, verbose_name='爬虫域名')),
('n_start', models.IntegerField(default=0, verbose_name='起始页数')),
('n_end', models.IntegerField(default=0, verbose_name='终止页数')),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='spider_content_type', to='spider.ContentType')),
],
),
migrations.CreateModel(
name='SpiderTask',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('keyword', models.CharField(db_index=True, default='', max_length=50, verbose_name='爬虫关键字')),
('status', models.BooleanField(choices=[(0, '初始化'), (1, '抓取中'), (2, '缓存中'), (3, '已过期')], db_index=True, verbose_name='任务状态')),
('run_time', models.DateTimeField(auto_now_add=True, verbose_name='爬虫开始时间')),
('finish_time', models.DateTimeField(null=True, verbose_name='爬虫结束时间')),
('pid', models.IntegerField(null=True, verbose_name='pid')),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_content_type', to='spider.ContentType')),
],
),
migrations.AddField(
model_name='items',
name='spider',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='spider_id', to='spider.Spider'),
),
migrations.AddField(
model_name='items',
name='spider_info',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='spider_info', to='spider.SpiderTask'),
),
]
|
dapianzi/pics | cats/migrations/0009_viewlog.py | <gh_stars>0
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-12-26 09:39
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('cats', '0008_merge_20171226_1731'),
]
operations = [
migrations.CreateModel(
name='ViewLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('last_page', models.IntegerField(default=0)),
('last_view', models.DateTimeField(db_index=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='username+', related_query_name='author', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'view log',
'verbose_name_plural': 'view log',
'db_table': 'cats_view_log',
},
),
]
|
dapianzi/pics | cats/forms.py | from django.forms import Form
class SigninForm(Form):
error = ''
def is_valid(self):
return True
def is_multipart(self):
return False
class SuggestForm(Form):
def is_valid(self):
return True |
dapianzi/pics | cats/views.py | from django.views.generic.base import View,TemplateView,RedirectView
from django.shortcuts import render,redirect,get_object_or_404,get_list_or_404
from django.http import HttpResponse,Http404,HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib.auth import authenticate,login,logout
from django.contrib.auth.decorators import login_required
import json
from django.contrib.auth.models import User
from . import models as Cat_models
from .forms import SigninForm,SuggestForm
from pics.utils import _ajax_error,_ajax_success
# Create your views here.
class IndexView(TemplateView):
template_name = 'cats/index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
imgs = Cat_models.CatImgs.objects.with_info(0, 50)
context['imgs'] = imgs
return context
class SigninView(View):
form_class = SigninForm
initial = {'title': '登录'}
template_name = 'cats/signin.html'
def get(self, request, *args, **kwargs):
form = self.form_class()
return render(request, self.template_name, {'form': form})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
if form.is_valid():
username = request.POST.get('username', '')
password = request.POST.get('password', '')
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
return HttpResponseRedirect(reverse('cats:home'))
return render(request, self.template_name, {'error': '用户名或密码错误'})
class AjaxSignin(View):
form_class = SigninForm
template_name = 'cats/ajaxlogin.html'
def get(self, request, *args, **kwargs):
form = self.form_class()
return render(request, self.template_name, {'title': '请先登录'})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
username = request.POST.get('username', '')
password = request.POST.get('password', '')
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
return _ajax_success({'username':user.username, 'id':user.id})
return _ajax_error(101, 'username or password is incorrect.')
def signout(request):
logout(request)
return HttpResponseRedirect(reverse('cats:home'))
class suggestView(View):
form_class = SigninForm
template_name = 'cats/signin.html'
def get(self, request, *args, **kwargs):
form = self.form_class(initial=self.initial)
return render(request, self.template_name, {'form': form})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
return HttpResponseRedirect('/')
class LikesView(View):
def post(self, request, *args, **kwargs):
if not request.user.is_authenticated:
# Do something for authenticated users.
return HttpResponse(json.dumps({'status': -1, 'content': 'Invalid user!', 'code': 100}))
img_id = int(request.POST.get('id', 0))
user = User.objects.get(username='carl')
img = Cat_models.CatImgs.objects.get(id=img_id)
if img:
pic_like = Cat_models.PicLikes(img=img, user=user, is_like=1)
pic_like.save()
return _ajax_success(pic_like.id)
else:
return _ajax_error(200, 'Invalid img id!')
class CommentView(View):
def get(self, request, *args, **kwargs):
img_id = int(request.GET.get('id', 0))
if img_id > 0:
is_comment = Cat_models.CatImgs.objects.get(id=img_id).comments.filter(username='carl')
if is_comment:
return render(request, 'cats/comment.html', {'title': '已经评论过了'})
else:
return render(request, 'cats/comment.html', {'title': '添加评论'})
else:
return _ajax_error('Not Found')
def post(self, request, *args, **kwargs):
return _ajax_success('Hello, World!')
@login_required
def delete(RedirectView):
def post(self, request, id, *args, **kwargs):
ImgModel = Cat_models.CatImgs
img = get_list_or_404(ImgModel, id=id)
class MoreView(View):
def post(self, request, *args, **kwargs):
n = request.POST['n']
result = Cat_models.CatImgs.objects.with_info(int(n), 30)
content = list()
for img in result:
content.append({
'id': img.id,
'img_from': img.img_from,
'img_desc': img.img_desc,
'n_stars': img.n_stars,
'n_likes': img.n_likes,
'n_comments': img.n_comments,
})
return _ajax_success(content)
|
mercuree/reapy | reapy/reascript_api/network/client.py | <gh_stars>0
from reapy.errors import DisconnectedClientError, DistError
from reapy.tools import json
from .socket import Socket
class Client(Socket):
def __init__(self, port):
super(Client, self).__init__()
self._connect(port)
def _connect(self, port):
super(Client, self).connect(("localhost", port))
self.address = self.recv(timeout=None).decode("ascii")
def _get_result(self):
s = self.recv(timeout=None).decode()
return json.loads(s)
def run_program(self, program, input):
"""
Send a program to the server and return its output.
Parameters
----------
program : reapy.tools.Program
Program to run.
input : dict
Input to the program.
Returns
-------
result
Program output
Raises
------
DistError
When an error occurs while the server runs the program, its
traceback is sent to the client and used to raise a
DistError.
"""
program = program.to_dict()
request = {"program": program, "input": input}
request = json.dumps(request).encode()
self.send(request)
result = self._get_result()
if result["type"] == "result":
return result["value"]
elif result["type"] == "error":
raise DistError(result["traceback"])
|
mercuree/reapy | reapy/tools/inside_reaper.py | import contextlib
import reapy
if not reapy.is_inside_reaper():
from . import dist_program
class InsideReaper(contextlib.ContextDecorator):
"""
Context manager for efficient calls from outside REAPER.
It can also be used as a function decorator.
Examples
--------
Instead of running:
>>> project = reapy.Project()
>>> l = [project.bpm for i in range(1000)
which takes around 30 seconds, run:
>>> project = reapy.Project()
>>> with reapy.inside_reaper():
... l = [project.bpm for i in range(1000)
...
which takes 0.1 seconds!
Example usage as decorator:
>>> @reapy.inside_reaper()
... def add_n_tracks(n):
... for x in range(n):
... reapy.Project().add_track()
"""
def __enter__(self):
if not reapy.is_inside_reaper():
dist_program.Program("HOLD").run()
def __exit__(self, exc_type, exc_val, exc_tb):
if not reapy.is_inside_reaper():
dist_program.Program("RELEASE").run()
return False
|
mercuree/reapy | reapy/tools/dist_program.py | <gh_stars>0
"""Define distant Program class."""
import reapy
from reapy.errors import DisabledDistAPIError, DisabledDistAPIWarning
from . import program
if not reapy.is_inside_reaper():
try:
from reapy.reascript_api.network import Client, WebInterface
WEB_INTERFACE = WebInterface(reapy.config.WEB_INTERFACE_PORT)
CLIENT = Client(WEB_INTERFACE.get_reapy_server_port())
except DisabledDistAPIError:
import warnings
warnings.warn(DisabledDistAPIWarning())
class Program(program.Program):
@staticmethod
def from_function(function_name):
code = "result = {}(*args, **kwargs)".format(function_name)
program = Program(code, "result")
def g(*args, **kwargs):
return program.run(args=args, kwargs=kwargs)[0]
return g
def run(self, **input):
if reapy.is_inside_reaper():
return super(Program, self).run(**input)
else:
return CLIENT.run_program(self, input)
|
mercuree/reapy | setup.py | <reponame>mercuree/reapy
from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md")) as f:
long_description = f.read()
setup(
name="python-reapy",
version="0.3.0",
description="A pythonic wrapper for REAPER's ReaScript Python API",
long_description=long_description,
long_description_content_type="text/markdown",
author="<NAME>",
author_email="<EMAIL>",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3"
],
keywords="REAPER DAW ReaScript API wrapper music audio",
packages=find_packages(exclude=["docs"]),
python_requires=">=3.0"
)
|
mercuree/reapy | reapy/core/track/send.py | import reapy
from reapy import reascript_api as RPR
from reapy.core import ReapyObject
from reapy.tools import Program
class Send(ReapyObject):
_class_name = "Send"
def __init__(self, track=None, index=0, track_id=None, type="send"):
if track_id is None:
message = "One of `track` or `track_id` must be specified."
assert track is not None, message
track_id = track.id
self.index = index
self.track_id = track_id
self.type = type
def _get_int_type(self):
types = {
"hardware": 1,
"send": 0
}
int_type = types[self.type]
return int_type
@property
def _kwargs(self):
return {
"index": self.index,
"track_id": self.track_id,
"type": self.type
}
def delete(self):
"""
Delete send.
"""
RPR.RemoveTrackSend(self.track_id, self._get_int_type(), self.index)
def flip_phase(self):
"""
Toggle whether phase is flipped.
"""
code = """
send.is_phase_flipped = not send.is_phase_flipped
"""
Program(code).run(send=self)
def get_info(self, param_name):
value = RPR.GetTrackSendInfo_Value(
self.track_id, self._get_int_type(), self.index, param_name
)
return value
@property
def is_mono(self):
"""
Whether send is mono or stereo.
:type: bool
"""
is_mono = bool(self.get_info("B_MONO"))
return is_mono
@is_mono.setter
def is_mono(self, mono):
self.set_info("B_MONO", mono)
@property
def is_muted(self):
"""
Whether send is muted.
:type: bool
"""
is_muted = bool(self.get_info("B_MUTE"))
return is_muted
@is_muted.setter
def is_muted(self, is_muted):
"""
Mute or unmute send.
Parameters
----------
is_muted : bool
Whether to mute or unmute send.
"""
self.set_info("B_MUTE", is_muted)
@property
def is_phase_flipped(self):
"""
Whether send phase is flipped (i.e. signal multiplied by -1).
:type: bool
"""
is_phase_flipped = bool(self.get_info("B_PHASE"))
return is_phase_flipped
@is_phase_flipped.setter
def is_phase_flipped(self, flipped):
self.set_info("B_PHASE", flipped)
def mute(self):
"""
Mute send.
"""
self.is_muted = True
@property
def pan(self):
"""
Send pan (from -1=left to 1=right).
:type: float
"""
pan = self.get_info("D_PAN")
return pan
@pan.setter
def pan(self, pan):
"""
Set send pan.
Parameters
----------
pan : float
New pan between -1 (left) and 1 (right).
"""
self.set_info("D_PAN", pan)
def set_info(self, param_name, value):
RPR.SetTrackSendInfo_Value(
self.track_id, self._get_int_type(), self.index, param_name, value
)
@property
def source_track(self):
"""
Source track.
:type: Track
"""
track = reapy.Track(self.track_id)
return track
def unmute(self):
"""
Unmute send.
"""
self.is_muted = False
@property
def volume(self):
"""
Send volume.
:type: float
"""
volume = self.get_info("D_VOL")
return volume
@volume.setter
def volume(self, volume):
self.set_info("D_VOL", volume)
|
mercuree/reapy | reapy/reascripts/activate_reapy_server.py | <filename>reapy/reascripts/activate_reapy_server.py<gh_stars>0
"""
Activate ``reapy`` server.
Running this ReaScript from inside REAPER sets the ``reapy`` server
that receives and executes API calls requests from outside. It will
automatically be run when importing ``reapy`` from outside, if it is
enabled.
"""
import reapy
import os
import site
if reapy.is_inside_reaper():
from reapy import reascript_api as RPR
from reapy.reascript_api.network import Server
def main_loop():
# Get new connections
SERVER.accept()
# Process API call requests
requests = SERVER.get_requests()
results = SERVER.process_requests(requests)
SERVER.send_results(results)
# Run main_loop again
RPR_defer("main_loop()")
def generate_api_module():
function_names = RPR.__all__
sitepackages_dir = site.getusersitepackages()
if not os.path.exists(sitepackages_dir):
os.makedirs(sitepackages_dir, 0o770, False)
filepath = os.path.join(
sitepackages_dir, "reapy_generated_api.py"
)
with open(filepath, "w") as file:
lines = [
"from reapy.tools import Program",
"",
"__all__ = ["
]
lines += [" \"{}\",".format(name) for name in function_names]
lines.append("]\n\n")
file.write("\n".join(lines))
for name in function_names:
file.write(
"{name} = Program.from_function(\"RPR.{name}\")\n".format(
name=name
)
)
def get_new_reapy_server():
server_port = reapy.config.REAPY_SERVER_PORT
reapy.set_ext_state("reapy", "server_port", server_port)
server = Server(server_port)
return server
if __name__ == "__main__":
SERVER = get_new_reapy_server()
generate_api_module()
main_loop()
RPR_atexit("reapy.delete_ext_state('reapy', 'server_port')")
|
mercuree/reapy | reapy/tools/__init__.py | <gh_stars>0
"""Define tools such as Program and custom json module."""
import reapy
from .inside_reaper import InsideReaper
if not reapy.is_inside_reaper():
from .dist_program import Program
else:
from .program import Program
|
mercuree/reapy | reapy/core/item/item.py | <reponame>mercuree/reapy
import reapy
from reapy import reascript_api as RPR
from reapy.core import ReapyObject
from reapy.tools import Program
class Item(ReapyObject):
_class_name = "Item"
def __init__(self, id):
self.id = id
def __eq__(self, other):
return self.id == other.id and isinstance(other, Item)
@property
def _args(self):
return self.id,
@property
def active_take(self):
"""
Return the active take of the item.
Returns
-------
take : Take
Active take of the item.
"""
take = reapy.Take(RPR.GetActiveTake(self.id))
return take
def add_take(self):
"""
Create and return a new take in item.
Returns
-------
take : Take
New take in item.
"""
take_id = RPR.AddTakeToMediaItem(self.id)
take = reapy.Take(take_id)
return take
def delete(self):
"""Delete item."""
code = "RPR.DeleteTrackMediaItem(item.track.id, item.id)"
Program(code).run(item=self)
def get_info_value(self, param_name):
value = RPR.GetMediaItemInfo_Value(self.id, param_name)
return value
def get_take(self, index):
"""
Return index-th take of item.
Parameters
----------
index : int
Take index.
Returns
-------
take : Take
index-th take of media item.
"""
take_id = RPR.GetItemTake(self.id, index)
take = reapy.Take(take_id)
return take
@property
def is_selected(self):
"""
Return whether item is selected.
Returns
-------
is_selected : bool
Whether item is selected.
"""
is_selected = bool(RPR.IsMediaItemSelected(self.id))
return is_selected
@property
def length(self):
"""
Return item length in seconds.
Returns
-------
length : float
Item length in seconds.
"""
param_name = "D_LENGTH"
length = self.get_info_value(param_name)
return length
@length.setter
def length(self, length):
"""
Set item length.
Parameters
----------
length : float
New item length in seconds.
"""
RPR.SetMediaItemLength(self.id, length, True)
@property
def n_takes(self):
"""
Return the number of takes of media item.
Returns
-------
n_takes : int
Number of takes of media item.
"""
n_takes = RPR.GetMediaItemNumTakes(self.id)
return n_takes
@property
def position(self):
"""
Return item position in seconds.
Returns
-------
position : float
Item position in seconds.
"""
position = self.get_info_value("D_POSITION")
return position
@position.setter
def position(self, position):
"""
Set media item position to `position`.
Parameters
----------
position : float
New item position in seconds.
"""
RPR.SetMediaItemPosition(self.id, position, False)
@property
def project(self):
"""
Return item parent project.
Returns
-------
project : Project
Item parent project.
"""
project_id = RPR.GetItemProjectContext(self.id)
project = reapy.Project(project_id)
return project
def split(self, position):
"""
Split item and return left and right parts.
Parameters
----------
position : float
Split position in seconds.
Returns
-------
left, right : Item
Left and right parts of the split.
"""
right_id = RPR.SplitMediaItem(self.id, position)
left, right = self, Item(right_id)
return left, right
@property
def takes(self):
"""
Return list of all takes of media item.
Returns
-------
takes : list of Take
List of all takes of media item.
"""
code = """
n_takes = RPR.GetMediaItemNumTakes(item_id)
take_ids = [RPR.GetMediaItemTake(item_id, i) for i in range(n_takes)]
"""
take_ids = Program(code, "take_ids").run(item_id=self.id)[0]
takes = [reapy.Take(take_id) for take_id in take_ids]
return takes
@property
def track(self):
"""
Parent track of item.
Set it by passing a track, or a track index.
:type: Track
Examples
--------
>>> track0, track1 = project.tracks[0:2]
>>> item = track0.items[0]
>>> item.track == track0
True
>>> item.track = track1 # Move to track 1
>>> item.track = 0 # Move to track 0
"""
track_id = RPR.GetMediaItemTrack(self.id)
track = reapy.Track(track_id)
return track
@track.setter
def track(self, track):
if isinstance(track, int):
track = reapy.Track(track, project=self.project)
RPR.MoveMediaItemToTrack(self.id, track.id)
def update(self):
"""Update item in REAPER interface."""
RPR.UpdateItemInProject(self.id)
|
mercuree/reapy | reapy/reascript_api/network/server.py | """Define Server class."""
import reapy
from reapy import reascript_api as RPR
from reapy.tools import json
from .socket import Socket
import socket
import traceback
if reapy.is_inside_reaper():
from reapy.tools.program import Program
class Server(Socket):
"""
Server part of the ``reapy`` dist API.
It is instantiated inside REAPER. It receives and processes API
call requests coming from the outside.
"""
def __init__(self, port):
super(Server, self).__init__()
self.bind(("", port))
self.listen()
self.connections = {}
self.settimeout(.0001)
@Socket._non_blocking
def _get_request(self, connection, address):
try:
request = connection.recv()
request = json.loads(request.decode())
except (ConnectionAbortedError, ConnectionResetError):
# Client has disconnected
# Pretend client has nicely requested to disconnect
code = "server.disconnect(address)"
program = Program(code).to_dict()
input = {"address": address, "server": self}
request = {"program": program, "input": input}
return request
def _hold_connection(self, address):
connection = self.connections[address]
result = {"type": "result", "value": None}
self._send_result(connection, result)
request = self._get_request(connection, address)
while request is None or request["program"][0] != "RELEASE":
if request is None:
request = self._get_request(connection, address)
continue
result = self._process_request(request, address)
try:
self._send_result(connection, result)
request = self._get_request(connection, address)
except (ConnectionAbortedError, ConnectionResetError):
# request was to disconnect
request = {"program": ["RELEASE"]}
result = {"type": "result", "value": None}
return result
def _process_request(self, request, address):
if request["program"][0] == "HOLD":
return self._hold_connection(address)
program = Program(*request["program"])
result = {}
request["input"].update({"RPR": RPR, "reapy": reapy})
try:
result["value"] = program.run(**request["input"])
result["type"] = "result"
except Exception:
# Errors are sent back to the client instead of raised in REAPER
# (which would cause the server to crash).
result["traceback"] = traceback.format_exc()
result["type"] = "error"
return result
def _send_result(self, connection, result):
result = json.dumps(result).encode()
connection.send(result)
@Socket._non_blocking
def accept(self):
connection, address = super(Server, self).accept()
self.connections[address] = connection
connection.send("{}".format(address).encode("ascii"))
def disconnect(self, address):
connection = self.connections[address]
connection.shutdown(socket.SHUT_RDWR)
connection.close()
del self.connections[address]
def get_requests(self):
requests = {}
for address, connection in self.connections.items():
request = self._get_request(connection, address)
if request is not None:
requests[address] = request
return requests
def process_requests(self, requests):
results = {}
for address, request in requests.items():
result = self._process_request(request, address)
results[address] = result
return results
def send_results(self, results):
for address, result in results.items():
try:
connection = self.connections[address]
self._send_result(connection, result)
except (KeyError, BrokenPipeError):
# Happens when the client requested to disconnect.
# Nothing must be returned in that case.
pass
|
mercuree/reapy | reapy/tools/json.py | """Encode and decode ``reapy`` objects as JSON."""
import importlib
import json
class ClassCache(dict):
_core = None
def __missing__(self, key):
if self._core is None:
# The import is here because otherwise there is an import loop
# and to perform import just once.
self._core = importlib.import_module("reapy.core")
self[key] = getattr(self._core, key)
return self[key]
_CLASS_CACHE = ClassCache()
class ReapyEncoder(json.JSONEncoder):
def default(self, x):
if hasattr(x, '_to_dict'):
return x._to_dict()
return json.JSONEncoder.default(self, x)
def loads(s):
return json.loads(s, object_hook=object_hook)
def dumps(x):
return json.dumps(x, cls=ReapyEncoder)
def object_hook(x):
if "__reapy__" not in x:
return x
reapy_class = _CLASS_CACHE[x["class"]]
return reapy_class(*x["args"], **x["kwargs"])
|
mercuree/reapy | reapy/tools/program.py | <gh_stars>0
"""
Define base Program class.
Notes
-----
Runing ``from reapy.tools import Program`` only imports this
``Program`` class if called from inside REAPER. If not, then the
subclass ``reapy.tools.dist_program.Program``, which overrides
``Program.run``, is imported.
"""
import reapy
from reapy import reascript_api as RPR
class Program:
def __init__(self, code, *output):
"""
Build program.
Parameters
----------
code : str
Code to execute. Note that if all lines except the empty first ones
have constant indentation, this indentation is removed (allows for
docstring code).
output : iterable of str
Variable names for which values at the end of the program are
returned after execution.
"""
self._code = self.parse_code(code)
self._output = tuple(output)
def to_dict(self):
"""
Return dict representation of program.
Returns
-------
rep : dict
dict representation of program. A new program with same state can
be created from `rep` with `Program(**rep)`.
"""
return (self._code,) + self._output
def parse_code(self, code):
"""
Return code with correct indentation.
Parameters
----------
code : str
Code to be parsed.
Returns
-------
code : str
Parsed code.
"""
code = code.replace("\t", " "*4)
lines = code.split("\n")
while lines[0] == "":
lines.pop(0)
indentation = len(lines[0]) - len(lines[0].lstrip(" "))
lines = [line[indentation:] for line in lines]
code = "\n".join(lines)
return code
def run(self, **input):
"""
Run program and return output.
Parameters
----------
input : dict
Dictionary with variable names as keys variables values as values.
Passed as input to the program when running.
Returns
-------
output : tuple
Output values.
"""
input.update({"RPR": RPR, "reapy": reapy})
exec(self._code, input)
output = tuple(input[o] for o in self._output)
return output
|
mercuree/reapy | reapy/reascript_api/__init__.py | <reponame>mercuree/reapy<filename>reapy/reascript_api/__init__.py
import reapy
import sys
if reapy.is_inside_reaper():
# Import functions without the useless starting "RPR_".
import reaper_python as _RPR
__all__ = [s[4:] for s in _RPR.__dict__ if s.startswith("RPR_")]
for s in __all__:
exec("{} = _RPR.__dict__['{}']".format(s, "RPR_" + s))
# Import SWS functions.
try:
import sws_python as _SWS
sws_functions = set(_SWS.__dict__) - set(_RPR.__dict__)
__all__ += list(sws_functions)
for s in sws_functions:
exec("from sws_python import {}".format(s))
except ModuleNotFoundError: # SWS is not installed
pass
else:
from .dist_api import __all__
from .dist_api import *
|
mercuree/reapy | reapy/reascript_api/dist_api.py | import reapy
if not reapy.is_inside_reaper():
try:
from reapy_generated_api import __all__
from reapy_generated_api import *
except ImportError: # Happens when ``reapy`` dist API is disabled
__all__ = []
|
mercuree/reapy | reapy/reascript_api/network/socket.py | import socket
class Socket:
"""
Wrapped `socket` that can send and receive data of any length.
"""
def __init__(self, s=None):
self._socket = socket.socket() if s is None else s
@staticmethod
def _non_blocking(f):
"""
Modify a socket method so that it returns `None` when time
out is reached.
"""
def g(*args, **kwargs):
try:
return f(*args, **kwargs)
except socket.timeout:
pass
return g
def accept(self, *args, **kwargs):
connection, address = self._socket.accept(*args, **kwargs)
connection = Socket(connection)
return connection, address
def bind(self, *args, **kwargs):
return self._socket.bind(*args, **kwargs)
def close(self, *args, **kwargs):
return self._socket.close(*args, **kwargs)
def connect(self, *args, **kwargs):
return self._socket.connect(*args, **kwargs)
def listen(self, *args, **kwargs):
return self._socket.listen(*args, **kwargs)
def recv(self, timeout=.0001):
"""
Receive data of arbitrary length.
"""
# First get data length
self.settimeout(timeout)
length = self._socket.recv(8)
length = int.from_bytes(length, "little")
if length == 0:
raise ConnectionAbortedError
# Then receive data (split it into smaller bits if too big)
self.settimeout(None)
data = b""
max_size = 2**32
for _ in range(length // max_size):
data += self._socket.recv(max_size)
data += self._socket.recv(length % max_size)
return data
def send(self, data):
"""
Send data.
"""
# First send data length
length = len(data).to_bytes(8, "little")
self._socket.sendall(length)
# Then send data
self._socket.sendall(data)
def settimeout(self, *args, **kwargs):
return self._socket.settimeout(*args, **kwargs)
def shutdown(self, *args, **kwargs):
return self._socket.shutdown(*args, **kwargs)
|
mercuree/reapy | reapy/core/project/marker.py | import reapy
from reapy import reascript_api as RPR
from reapy.core import ReapyObject
from reapy.tools import Program
class Marker(ReapyObject):
_class_name = "Marker"
def __init__(
self, parent_project=None, index=None, parent_project_id=None
):
if parent_project_id is None:
message = (
"One of `parent_project` or `parent_project_id` must be "
"specified."
)
assert parent_project is not None, message
parent_project_id = parent_project.id
self.project_id = parent_project_id
self.index = index
def _get_enum_index(self):
"""
Return marker index as needed by RPR.EnumProjectMarkers2.
"""
code = """
index = [
i for i, m in enumerate(project.markers)
if m.index == marker.index
][0]
"""
index = Program(code, "index").run(
marker=self, project=reapy.Project(self.project_id)
)[0]
return index
@property
def _kwargs(self):
return {
"index": self.index, "parent_project_id": self.project_id
}
def delete(self):
"""
Delete marker.
"""
RPR.DeleteProjectMarker(self.project_id, self.index, False)
@property
def position(self):
"""
Return marker position.
Returns
-------
position : float
Marker position in seconds.
"""
code = """
index = marker._get_enum_index()
position = RPR.EnumProjectMarkers2(
marker.project_id, index, 0, 0, 0, 0, 0
)[4]
"""
position = Program(code, "position").run(marker=self)[0]
return position
@position.setter
def position(self, position):
"""
Set marker position.
Parameters
----------
position : float
Marker position in seconds.
"""
RPR.SetProjectMarker2(
self.project_id, self.index, False, position, 0, ""
)
|
mercuree/reapy | reapy/core/fx/fx_param.py | <gh_stars>0
import reapy
import reapy.reascript_api as RPR
from reapy.core import ReapyObject, ReapyObjectList
from reapy.errors import DistError
from reapy.tools import Program
class FXParam(float):
"""FX parameter."""
def __init__(self, value, parent_list, index, functions):
float.__init__(value)
self.parent_list = parent_list
self.index = index
self.functions = functions
def __new__(self, value, *args, **kwargs):
return float.__new__(self, value)
def add_envelope(self):
"""
Create envelope for the parameter and return it.
Returns
-------
envelope : Envelope
New envelope for the parameter.
Notes
-----
If the envelope already exists, the function returns it.
"""
parent_fx = self.parent_list.parent_fx
parent = parent_fx.parent
if isinstance(parent, reapy.Track):
callback = RPR.GetFXEnvelope
else: # Then it is a Take
callback = self.functions["GetEnvelope"]
envelope = reapy.Envelope(parent, callback(
parent.id, parent_fx.index, self.index, True
))
return envelope
@property
def envelope(self):
"""
Parameter envelope (or None if it doesn't exist).
:type: Envelope or NoneType
"""
parent_fx = self.parent_list.parent_fx
parent = parent_fx.parent
if isinstance(parent, reapy.Track):
callback = RPR.GetFXEnvelope
else: # Then it is a Take
callback = self.functions["GetEnvelope"]
envelope = reapy.Envelope(parent, callback(
parent.id, parent_fx.index, self.index, False
))
if not envelope._is_defined:
envelope = None
return envelope
def format_value(self, value):
"""
Return human readable string for value.
It is the way ``value`` would be printed in REAPER GUI if it
was the actual parameter value. Only works with FX that
support Cockos VST extensions.
Parameters
----------
value : float
Value to format.
Returns
-------
formatted : str
Formatted value.
"""
parent_fx = self.parent_list.parent_fx
parent = parent_fx.parent
return self.functions["FormatParamValue"](
parent.id, parent_fx.index, self.index, value, "", 2048
)[5]
@property
def formatted(self):
"""
Human readable string for parameter value.
Only works with FX that support Cockos VST extensions.
:type: str
"""
parent_fx = self.parent_list.parent_fx
parent = parent_fx.parent
return self.functions["GetFormattedParamValue"](
parent.id, parent_fx.index, self.index, "", 2048
)[4]
@property
def name(self):
"""
Parameter name.
:type: str
"""
parent_list = self.parent_list
name = self.functions["GetParamName"](
parent_list.parent_id, parent_list.fx_index, self.index, "", 2048
)[4]
return name
@property
def normalized(self):
"""
Normalized FX parameter.
Attribute can be set with a float, but be careful that since
floats are immutable, this parameter won't have to right value
anymore. See Examples below.
:type: NormalizedFXParam
Examples
--------
Say the parameter range is (0.0, 20.0).
>>> param = fx.params[0]
>>> param
10.0
>>> param.normalized
0.5
If you set the parameter like below, the parameter moves in
REPAER, but the FXParam object you are using is not valid
anymore.
>>> param.normalized = 1
>>> param, param.normalized
10.0, 0.5
You thus have to grab the updated FXParam from the FX like
below.
>>> param = fx.params[0]
>>> param, param.normalized
20.0, 1.0
"""
min, max = self.range
value = (self - min)/(max - min)
return NormalizedFXParam(
value, self.parent_list, self.index, self.functions
)
@normalized.setter
def normalized(self, value):
parent_fx = self.parent_list.parent_fx
parent = parent_fx.parent
self.functions["SetParamNormalized"](
parent.id, parent_fx.id, self.index, value
)
@property
def range(self):
"""
Parameter range.
:type: float, float
"""
parent_list = self.parent_list
min, max = self.functions["GetParam"](
parent_list.parent_id, parent_list.fx_index, self.index, 0, 0
)[-2:]
return min, max
class FXParamsList(ReapyObjectList):
"""
Container class for a list of FX parameters.
Parameters can be accessed by name or index.
Examples
--------
>>> params_list = fx.params
>>> params_list[0] # Say this is "Dry Gain" parameter
0.5
>>> params_list["Dry Gain"]
0.5
>>> params_list["Dry Gain"] = 0.1
>>> params_list[0]
0.1
"""
def __init__(
self, parent_fx=None, parent_id=None, parent_fx_index=None
):
if parent_fx is None:
parent_fx = reapy.FX(parent_id=parent_id, index=parent_fx_index)
self.parent_id = parent_fx.parent_id
self.fx_index = parent_fx.index
self.functions = parent_fx.functions
def __getitem__(self, i):
with reapy.inside_reaper():
if isinstance(i, str):
i = self._get_param_index(i)
n_params = len(self)
if i >= n_params:
raise IndexError(
"{} has only {} params".format(self.parent_fx, n_params)
)
i = i % n_params # Allows for negative values
value = self.functions["GetParam"](
self.parent_id, self.fx_index, i, 0, 0
)[0]
param = FXParam(value, self, i, self.functions)
return param
def __iter__(self):
code = """
values = [param_list.functions["GetParam"](
param_list.parent_id, param_list.fx_index, i, 0, 0
)[0] for i in range(len(param_list))]
"""
values, = Program(code, "values").run(param_list=self)
for i, value in enumerate(values):
yield FXParam(value, self, i, self.functions)
def __len__(self):
length = self.parent_fx.n_params
return length
def __setitem__(self, i, value):
with reapy.inside_reaper():
if isinstance(i, str):
i = self._get_param_index(i)
n_params = len(self)
if i >= n_params:
raise IndexError(
"{} has only {} params".format(self.parent_fx, n_params)
)
i = i % n_params # Allows for negative values
self.functions["SetParam"](
self.parent_id, self.fx_index, i, value
)
def _get_param_index(self, name):
code = """
names = [param_list[i].name for i in range(len(param_list))]
index = names.index(name)
"""
try:
index = Program(code, "index").run(
name=name, param_list=self
)[0]
return index
except DistError:
raise IndexError(
"{} has no param named {}".format(self.parent_fx, name)
)
@property
def _kwargs(self):
return {
"parent_fx_index": self.fx_index, "parent_id": self.parent_id
}
@property
def parent_fx(self):
"""
Parent FX.
:type: FX
"""
fx = reapy.FX(parent_id=self.parent_id, index=self.fx_index)
return fx
class NormalizedFXParam(FXParam):
"""
Normalized FX parameter.
Access it via FXParam.normalized.
Examples
--------
>>> fx.params[0]
0.0
>>> fx.params[0].range
(-2.0, 0.0)
>>> fx.params[0].normalized
1.0
>>> fx.params[0].normalized.range
(0.0, 1.0)
"""
def format_value(self, value):
"""
Return human readable string for value.
It is the way ``value`` would be printed in REAPER GUI if it
was the actual parameter value. Only works with FX that
support Cockos VST extensions.
Parameters
----------
value : float
Value to format.
Returns
-------
formatted : str
Formatted value.
"""
parent_fx = self.parent_list.parent_fx
parent = parent_fx.parent
return self.functions["FormatParamValueNormalized"](
parent.id, parent_fx.index, self.index, value, "", 2048
)[5]
@property
def range(self):
"""
Parameter range (always equal to (0.0, 1.0)).
"""
return (0.0, 1.0)
@property
def raw(self):
"""
Raw (i.e. unnormalized) parameter.
:type: FXParam
"""
return self.parent_list[self.index]
|
kevinxin90/bte_schema_web | src/app.py | <reponame>kevinxin90/bte_schema_web
import tornado.auth
import tornado.escape
import tornado.ioloop
import tornado.options
import tornado.web
import os.path
#from handlers.displayhandler import DisplayHandler
class HomeHandler(tornado.web.RequestHandler):
@tornado.web.addslash
def get(self):
self.render("about.html", messages=None)
"""
class MainHandler(tornado.web.RequestHandler):
@tornado.web.addslash
def get(self):
self.render("connect.html", messages=None)
class DiscoverHandler(tornado.web.RequestHandler):
@tornado.web.addslash
def get(self):
self.render("discover.html", messages=None)
class DisplayHandler1(tornado.web.RequestHandler):
@tornado.web.addslash
def get(self):
self.render("display.html", messages=None)
"""
class Application(tornado.web.Application):
def __init__(self):
settings = {
'debug': True,
'template_path': os.path.join(os.path.dirname(__file__),
"templates"),
'static_path': os.path.join(os.path.dirname(__file__),
"static")
}
handlers = [
(r"/explorer/?", HomeHandler),
(r"/explorer/static/(.*)",
tornado.web.StaticFileHandler,
{'path': settings['static_path']}),
]
tornado.web.Application.__init__(self, handlers, **settings)
"""
(r"/explorer/connect/?", MainHandler),
(r"/explorer/connect/static/(.*)",
tornado.web.StaticFileHandler,
{'path': settings['static_path']}),
(r"/explorer/discover/?", DiscoverHandler),
(r"/explorer/discover/static/(.*)",
tornado.web.StaticFileHandler,
{'path': settings['static_path']}),
(r"/explorer/display1/?", DisplayHandler1),
(r"/explorer/display1/static/(.*)",
tornado.web.StaticFileHandler,
{'path': settings['static_path']}),
(r"/explorer/display/?", DisplayHandler),
(r"/explorer/display/static/(.*)",
tornado.web.StaticFileHandler,
{'path': settings['static_path']}),
"""
def main():
app = Application()
app.listen(8853)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
kevinxin90/bte_schema_web | src/handlers/displayhandler.py | <reponame>kevinxin90/bte_schema_web
import tornado.web
import tornado.template
from networkx.readwrite import json_graph
import json
from biothings_explorer.registry import Registry
from biothings_explorer.connect import ConnectTwoConcepts
reg = Registry()
colors = {1: 'green', 2: 'red', 3: 'rgba(255,168,7)'}
class DisplayHandler(tornado.web.RequestHandler):
def post(self):
input_cls = self.get_body_argument("input_cls")
input_id = self.get_body_argument("input_id")
edge1 = self.get_body_argument("edge1").split(',')
edge2 = self.get_body_argument("edge2").split(',')
input_val = self.get_body_argument("input_val")
output_cls = self.get_body_argument("output_cls")
output_id = self.get_body_argument("output_id")
output_val = self.get_body_argument("output_val")
_input = '.'.join([input_cls, input_id, input_val])
_output = '.'.join([output_cls, output_id, output_val])
self.set_status(302)
self.redirect('/explorer/display?input=' + _input + '&output=' + _output)
def get(self):
_input = self.get_query_argument('input')
_output = self.get_query_argument('output')
#_edge1 = self.get_query_argument('edge1')
#_edge2 = self.get_query_argument('edge2')
input_cls, input_id, input_val = _input.split('.')
output_cls, output_id, output_val = _output.split('.')
rest_input = {'type': input_cls,
'identifier': 'bts:' + input_id,
'values': input_val}
# restructure output as a dict
rest_output = {'type': output_cls,
'identifier': 'bts:' + output_id,
'values': output_val}
ctc = ConnectTwoConcepts(rest_input, rest_output,
edge1=None, edge2=None,
registry=reg)
ctc.connect()
# if no results found, return error message
try:
res = json_graph.node_link_data(ctc.G)
except AttributeError:
self.clear()
self.set_status(200)
self.write("Unable to find any connections")
self.finish()
return
links = res['links']
new_links = []
for _link in links:
_link['from'] = _link.pop('source')
_link['to'] = _link.pop('target')
_link['font'] = {'align': 'middle'}
_link['arrows'] = 'to'
new_links.append(_link)
res['links'] = new_links
new_nodes = []
for _node in res['nodes']:
_node['label'] = _node['identifier'][4:] + ':' + str(_node['id'])
_node['color'] = colors[_node['level']]
if 'equivalent_ids' in _node:
equ_ids = []
for k, v in _node['equivalent_ids'].items():
if isinstance(v, list):
for _v in v:
equ_ids.append(k + ':' + str(_v))
else:
equ_ids.append(k + ":" + str(v))
equ_ids = '<br>'.join(equ_ids)
_node['equivalent_ids'] = equ_ids
new_nodes.append(_node)
res['nodes'] = new_nodes
if res:
self.clear()
self.set_status(200)
self.render("display.html", myvalue=json.dumps(res))
return
|
Abhir1902/HackotberFest2021 | Instabot.py | from instabot import Bot
b = Bot()
choice = int(input("Login : 1 \nUpload image : 2 \nFollow : 3 \nSend message : 4\n"))
print("")
while(choice!=5):
if(choice==1):
p=input("Enter the username : ")
q=input("Enter the password : ")
b.login(username=p,password=q)
if(choice==2):
m = str(input("Enter the image location : "))
n=str(input("Write a caption : \n"))
b.upload_photo(m,caption=n)
if(choice==3):
t=str(input("Enter the username of the personality to be followed : "))
b.follow(t)
if(choice==4):
l=[]
n=int(input("Enter the number of people whom the message is needed to be sent : "))
while(n):
s=str(input("Enter the username to whom the message is to be sent"))
l.append(s)
n-=1
w=str(input("Enter the message needed to be sent : \n"))
b.send_message(w,l)
|
sienaiwun/Unity_TiledResource | Assets/image_generator.py | from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
import os
image_dir_name = "images"
texture_scale = 1
image_name_prefix = "image_"
def to_scale_size(input):
return (int)(input*texture_scale)
texture_size = to_scale_size(512)
def image_char(char, image_size, font_size, outline_lenght, number_str):
img = Image.new("RGB", (image_size, image_size), (0,0,0))
draw = ImageDraw.Draw(img)
draw.rectangle([(outline_lenght, outline_lenght), (image_size-outline_lenght, image_size-outline_lenght)], fill =(255,255,255) )
font_path = "C:\Windows\Fonts\Arial.ttf"
font = ImageFont.truetype(font_path, font_size)
draw.text((to_scale_size(5), to_scale_size(135)), char, (0,0,0),font=font)
save_location = os.getcwd()
dir_name = save_location + os.path.sep + image_dir_name
if not os.path.isdir(dir_name):
os.mkdir(dir_name)
img.save(dir_name + os.path.sep + number_str + '.png')
if __name__ == "__main__":
for i in xrange(1024):
number_str = '{:3d}'.format(i)
file_str = '{:03d}'.format(i)
image_char(number_str, image_size = texture_size, font_size = to_scale_size(300), outline_lenght =0, number_str = image_name_prefix+file_str)
|
sienaiwun/Unity_TiledResource | Assets/tiles_generator.py | from PIL import Image
import os
import math
import shutil
from image_generator import texture_size
tiles_dir_name = "StreamingAssets"
cache_dir_name_prefix = "cache"
table_Size = 32
padding_size = 0
dimension_size = texture_size
class PixelData:
def __init__(self, size, path, name):
self._component_count = 3
self._component_size = 1 # in char
self._pixel_size = self._component_count * self._component_size #in char
self._size = size
self._path = path
self._filename = name
file_size = self._size * self._pixel_size
for row in xrange(self._size):
data_file_name = self.get_file_path(row)
if not os.path.exists(data_file_name):
open(data_file_name, "w+")
with open(data_file_name,"r+") as f:
old_size = os.path.getsize(data_file_name)
if old_size != file_size:
self.__set_file_size(f, size)
def __set_file_size(self, f, size):
f.seek(size - 1)
f.write(b"\0")
def __clamp(self, value, min_value, max_value):
return max(min(value, max_value), min_value)
def __block_copy(self, src, src_offset, dest, dest_offset, length):
for i in xrange(length):
if dest_offset + i < len(dest) and src_offset + i < len(src):
dest[dest_offset + i] = src[src_offset + i]
def get_file_path(self, row):
file_path = self._path + os.path.sep + "{0}_{1}".format(self._filename, row)
return file_path
def set_pixels(self, x, y, block_width, block_height, colors):
for row in xrange(block_height):
data_file_name = self.get_file_path(row + y)
if not os.path.exists(data_file_name):
open(data_file_name, "w+")
with open(data_file_name,"r+") as f:
f.seek(x*self._pixel_size)
begin = row * block_width * self._pixel_size
end = begin + block_width * self._pixel_size
write_byte = colors[begin:end]
byte_array = bytearray(write_byte)
f.write(byte_array)
def shuffle_height(self, h, blockheight, mip):
mode = blockheight /int(pow(2,mip))
index = h/mode
offset = h%mode
chunk_num = blockheight/mode
chunk_height = blockheight/chunk_num
shuffleh = (chunk_num -1 - index)*chunk_height + offset
return shuffleh
def get_pixels(self, x, y, block_width, block_height,mip):
pixels = [b'\x00']* block_width * block_height*self._pixel_size
for h in xrange(block_height):
row = self.__clamp(y + h, 0, self._size - 1)
read_data_file = self.get_file_path(row)
with open(read_data_file, "rb") as f:
h = self.shuffle_height(h,block_height,mip)
self.__get_pixels(x,block_width, f, pixels, h*block_width*self._pixel_size)
return pixels
def __get_pixels(self, x, block_width, f_reader, pixels, pixeloffset):
begin = self.__clamp(x,0, self._size -1)
length = min(block_width + x-begin, self._size-begin)
f_reader.seek(begin*self._pixel_size)
buf_length = length * self._pixel_size
buf = f_reader.read(buf_length)
self.__block_copy(buf,0,pixels,pixeloffset+(begin-x)*self._pixel_size, buf_length)
if x < 0:
for i in xrange(begin-x):
self.__block_copy(buf,0,pixels,pixeloffset + i*self._pixel_size,self._pixel_size)
if length < block_width:
for i in xrange(block_width- length):
self.__block_copy(buf, buf_length - self._pixel_size, pixels, pixeloffset + (length+i)*self._pixel_size, self._pixel_size)
def get_pixel_data(mip_level):
cur_location = os.getcwd()
cache_dir_name = cur_location + os.path.sep + cache_dir_name_prefix
if not os.path.isdir(cache_dir_name):
os.mkdir(cache_dir_name)
size = table_Size * dimension_size >> mip_level
data_name = "mip_{0}".format(mip_level)
pixel_data = PixelData(size,cache_dir_name, data_name)
return pixel_data
def data_from_image(img):
img_width, img_height = img.size
pixels = list(img.getdata())
pixels = [pixels[i * img_width:(i + 1) * img_width] for i in xrange(img_height)]
import itertools
pixels_data = list(itertools.chain.from_iterable(pixels))
pixels_data = list(itertools.chain(*pixels_data))
return pixels_data
def generate_mip0():
from image_generator import texture_size, image_dir_name, image_name_prefix
cur_location = os.getcwd()
pixel_data = get_pixel_data(0)
for row in xrange(table_Size):
for col in xrange(table_Size):
number_str = '{:03d}'.format(row * table_Size + col)
input_file_string = cur_location + os.path.sep + image_dir_name + os.path.sep + image_name_prefix + number_str + '.png'
if not os.path.exists(input_file_string):
continue
img = Image.open(input_file_string)
pixels_data = data_from_image(img)
img_width, img_height = img.size
pixel_data.set_pixels(col*texture_size, row*texture_size,img_width, img_height, pixels_data)
print("Generate Mip0Cache Done.")
def generate_mip(mip):
input_data = get_pixel_data(mip-1)
output_data = get_pixel_data(mip)
patch_size = dimension_size
double_patch_size = patch_size *2
patch_count = output_data._size / patch_size
for row in xrange(patch_count):
for col in xrange(patch_count):
input_pixel = input_data.get_pixels(col*double_patch_size,row*double_patch_size,double_patch_size,double_patch_size,0)
input_img = Image.frombytes("RGB",(double_patch_size,double_patch_size),''.join(input_pixel))
input_img.thumbnail((patch_size,patch_size))
#input_img.show()
output_img_data = data_from_image(input_img)
output_data.set_pixels(col*patch_size,row*patch_size,patch_size,patch_size,output_img_data)
def output_data(mip):
print ("output mip"+str(mip))
img_data = get_pixel_data(mip)
size_with_padding = dimension_size + padding_size*2
page_count = img_data._size/ dimension_size
cur_location = os.getcwd()
dir_name = cur_location + os.path.sep + tiles_dir_name
for row in xrange(page_count):
for col in xrange(page_count):
pixel_data = img_data.get_pixels(
col * dimension_size - padding_size,
row * dimension_size - padding_size,
size_with_padding,
size_with_padding,
mip
)
output_img = Image.frombytes(mode = 'RGB',size = (size_with_padding,size_with_padding), data = ''.join(pixel_data))
img_file_name = "Tiles_MIP{2}_Y{1}_X{0}.png".format( col, row, mip)
output_img.save(dir_name + os.path.sep + img_file_name)
def tiles():
cur_location = os.getcwd()
tile_dir_name = cur_location + os.path.sep + tiles_dir_name
if os.path.isdir(tile_dir_name):
shutil.rmtree(tile_dir_name)
cache_dir_name = cur_location + os.path.sep + cache_dir_name_prefix
if os.path.isdir(cache_dir_name):
shutil.rmtree(cache_dir_name)
os.mkdir(tile_dir_name)
maxLevel = int(math.log(table_Size, 2)) + 1
for mip in xrange(maxLevel):
if mip == 0:
generate_mip0()
else:
generate_mip(mip)
output_data(mip)
if __name__ == "__main__":
'''img = Image.open("output/black.png")
size = img.size
raw = img.tobytes()
#b = bytes(raw, 'utf-8')
#st = str(b)
pixel_data = b'\xff' * 12
list1 = list(pixel_data)
list1[0] = b'\x00'
#list1[1] = b'\x00'
#list1[2] = b'\x00'
list1[3] = b'\x00'
list1[4] = b'\x00'
list1[5] = b'\x00'
st = ''.join(list1)
st2 = str(list1)
import io
output_img = Image.frombytes('RGB', size=(2, 2), data=str(list1))
#output_img = Image.open(io.BytesIO(pixel_data))
output_img.show()
'''
tiles() |
bedirhansaglam/PythonMachineLearning | PythonMachineLearning/NavieBayes.py | # -*- coding: utf-8 -*-
"""
Created on Tue Jan 02 00:11:04 2018
@author: Bedirhan
"""
def default_training_data():
training_data=[]
training_data.append(("futbol","spor"))
training_data.append(("basketbol","spor"))
training_data.append(("tenis","spor"))
training_data.append(("voleybol","spor"))
training_data.append(("gol","spor"))
training_data.append(("puan","spor"))
training_data.append(("sayı","spor"))
training_data.append(("kaleci","spor"))
training_data.append(("hakem","spor"))
training_data.append(("skor","spor"))
training_data.append(("galatasaray","spor"))
training_data.append(("fenerbahçe","spor"))
training_data.append(("beşiktaş","spor"))
training_data.append(("bursaspor","spor"))
training_data.append(("trabzonspor","spor"))
training_data.append(("futbolcu","spor"))
training_data.append(("mhk","spor"))
training_data.append(("tff","spor"))
training_data.append(("birincilig","spor"))
training_data.append(("süperlig","spor"))
training_data.append(("lider","spor"))
training_data.append(("smaç","spor"))
training_data.append(("ribaund","spor"))
training_data.append(("faul","spor"))
training_data.append(("ofsayt","spor"))
training_data.append(("para","ekonomi"))
training_data.append(("euro","ekonomi"))
training_data.append(("dolar","ekonomi"))
training_data.append(("tl","ekonomi"))
training_data.append(("altın","ekonomi"))
training_data.append(("alış","ekonomi"))
training_data.append(("satış","ekonomi"))
training_data.append(("çek","ekonomi"))
training_data.append(("senet","ekonomi"))
training_data.append(("çeyrek","ekonomi"))
training_data.append(("faiz","ekonomi"))
training_data.append(("hisse","ekonomi"))
training_data.append(("cari","ekonomi"))
training_data.append(("oran","ekonomi"))
training_data.append(("açık","ekonomi"))
training_data.append(("deflasyon","ekonomi"))
training_data.append(("enflasyon","ekonomi"))
training_data.append(("imf","ekonomi"))
training_data.append(("makro","ekonomi"))
training_data.append(("piyasa","ekonomi"))
training_data.append(("sermaye","ekonomi"))
training_data.append(("endeks","ekonomi"))
training_data.append(("yatırım","ekonomi"))
training_data.append(("yatırımcı","ekonomi"))
training_data.append(("tahvil","ekonomi"))
return training_data
def prior_probability(text,liste):
a=0
b=0
for i in liste:
if i[1]==text:
a=a+1
else:
b=b+1
sonuc=float(a)/(a+b)
return sonuc
def t_hesapla(text,category,liste):
testliste=[]
a=1
for i in liste:
if i[1]==category:
testliste=i[0].split( )
for t in testliste:
if t==text:
a=a+1
return a
def p_hesapla(text,category,t_values):
toplam_deger=0
text_degeri=0
p_degeri=0
for t in t_values:
if t[1]==category:
toplam_deger+=t[2]
if t[0]==text:
text_degeri=t[2]
p_degeri=float(text_degeri)/toplam_deger
return p_degeri
def fit(liste):
cat=[] #kategoriler
pp=[] #Prior Probablity list
t_value=[]
p_value=[]
# listedeki kategoriler ayriliyor
for item in liste:
if item[1] not in cat:
cat.append(item[1])
#kategorilerin prior probability degeri hesaplaniyor
for c in cat:
pp.append((c,prior_probability(c,liste)))
for c in cat:
for item in liste:
t_value.append((item[0],c,t_hesapla(item[0],c,liste)))
for c in cat:
for item in liste:
p_value.append((item[0],c,p_hesapla(item[0],c,t_value)))
return p_value
def predict(cumle,p_val):
import numpy as np
p_values=p_val
cat=[]
cat_val=[]
for val in p_values:
if val[1] not in cat:
cat.append(val[1])
words=[]
for i in cumle.split():
words.append(i.lower())
for c in cat:
p_c=1
for word in words:
for p in p_values:
if word == p[0] and c==p[1]:
p_c*=float(p[2])
cat_val.append((c,p_c))
max_list=sorted(cat_val, key=lambda cat_val: cat_val[1])
print max_list
mm=max_list[len(max_list)-1][0]
return mm
|
bedirhansaglam/PythonMachineLearning | PythonMachineLearning/tasarim.py | <reponame>bedirhansaglam/PythonMachineLearning
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'tasarim.ui'
#
# Created: Tue Jan 02 18:48:48 2018
# by: PyQt4 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(1280, 720)
Dialog.setMinimumSize(QtCore.QSize(1280, 720))
Dialog.setMaximumSize(QtCore.QSize(1280, 720))
Dialog.setStyleSheet(_fromUtf8("#Dialog{\n"
"background-color:white;}\n"
"QGraphicsView{\n"
"border:2px solid #FF895D}\n"
"QLabel{\n"
"color:#FF895D}\n"
"QGroupBox{\n"
"background-color: white;\n"
"text-align:center;}\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" subcontrol-position: top center; /* position at the top center */\n"
" padding: 0 3px;\n"
" color: #FF895D;\n"
"}\n"
"QComboBox{\n"
"color:#FF895D;\n"
"background-color:#fff;\n"
"}\n"
"QTableWidget{\n"
"border:2px solid #FF895D;}"))
self.gb_main_menu = QtGui.QGroupBox(Dialog)
self.gb_main_menu.setGeometry(QtCore.QRect(0, 50, 200, 670))
font = QtGui.QFont()
font.setPointSize(10)
self.gb_main_menu.setFont(font)
self.gb_main_menu.setStyleSheet(_fromUtf8("QPushButton{\n"
"background-color: #FF895D;\n"
"color: #fff;\n"
"border-radius:1px;\n"
"text-align: left;\n"
"padding-left:15px\n"
"}\n"
"QPushButton:hover{\n"
"background-color:#1B435D;\n"
"color: #D5EEFF;\n"
"}\n"
"#gb_main_menu{\n"
"background-color:#FF895D}"))
self.gb_main_menu.setTitle(_fromUtf8(""))
self.gb_main_menu.setObjectName(_fromUtf8("gb_main_menu"))
self.pb_main_menu_1 = QtGui.QPushButton(self.gb_main_menu)
self.pb_main_menu_1.setGeometry(QtCore.QRect(2, 0, 195, 30))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.pb_main_menu_1.setFont(font)
self.pb_main_menu_1.setObjectName(_fromUtf8("pb_main_menu_1"))
self.pb_main_menu_2 = QtGui.QPushButton(self.gb_main_menu)
self.pb_main_menu_2.setGeometry(QtCore.QRect(2, 30, 195, 30))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.pb_main_menu_2.setFont(font)
self.pb_main_menu_2.setObjectName(_fromUtf8("pb_main_menu_2"))
self.pb_main_menu_3 = QtGui.QPushButton(self.gb_main_menu)
self.pb_main_menu_3.setGeometry(QtCore.QRect(2, 60, 195, 30))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.pb_main_menu_3.setFont(font)
self.pb_main_menu_3.setObjectName(_fromUtf8("pb_main_menu_3"))
self.pb_main_menu_4 = QtGui.QPushButton(self.gb_main_menu)
self.pb_main_menu_4.setGeometry(QtCore.QRect(2, 90, 195, 30))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.pb_main_menu_4.setFont(font)
self.pb_main_menu_4.setObjectName(_fromUtf8("pb_main_menu_4"))
self.pb_main_menu_5 = QtGui.QPushButton(self.gb_main_menu)
self.pb_main_menu_5.setGeometry(QtCore.QRect(2, 120, 195, 30))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.pb_main_menu_5.setFont(font)
self.pb_main_menu_5.setObjectName(_fromUtf8("pb_main_menu_5"))
self.pb_main_menu_6 = QtGui.QPushButton(self.gb_main_menu)
self.pb_main_menu_6.setGeometry(QtCore.QRect(2, 150, 195, 30))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.pb_main_menu_6.setFont(font)
self.pb_main_menu_6.setObjectName(_fromUtf8("pb_main_menu_6"))
self.pb_main_menu_7 = QtGui.QPushButton(self.gb_main_menu)
self.pb_main_menu_7.setGeometry(QtCore.QRect(2, 180, 195, 30))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.pb_main_menu_7.setFont(font)
self.pb_main_menu_7.setObjectName(_fromUtf8("pb_main_menu_7"))
self.pb_main_menu_8 = QtGui.QPushButton(self.gb_main_menu)
self.pb_main_menu_8.setGeometry(QtCore.QRect(2, 210, 195, 30))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.pb_main_menu_8.setFont(font)
self.pb_main_menu_8.setObjectName(_fromUtf8("pb_main_menu_8"))
self.pb_main_menu_9 = QtGui.QPushButton(self.gb_main_menu)
self.pb_main_menu_9.setGeometry(QtCore.QRect(2, 240, 195, 30))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.pb_main_menu_9.setFont(font)
self.pb_main_menu_9.setObjectName(_fromUtf8("pb_main_menu_9"))
self.gb_top_menu = QtGui.QGroupBox(Dialog)
self.gb_top_menu.setGeometry(QtCore.QRect(0, 0, 1280, 51))
self.gb_top_menu.setStyleSheet(_fromUtf8("#gb_top_menu{\n"
"background-color:#FF895D;\n"
"border:2px solid #FF895D; }"))
self.gb_top_menu.setTitle(_fromUtf8(""))
self.gb_top_menu.setObjectName(_fromUtf8("gb_top_menu"))
self.pb_hamburger_menu = QtGui.QPushButton(self.gb_top_menu)
self.pb_hamburger_menu.setGeometry(QtCore.QRect(0, 0, 48, 48))
self.pb_hamburger_menu.setCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.pb_hamburger_menu.setStyleSheet(_fromUtf8("#pb_hamburger_menu{\n"
"color: grey;\n"
" border-image: url(./icons/menu.png) 3 10 3 10;\n"
" border-top: 3px transparent;\n"
" border-bottom: 3px transparent;\n"
" border-right: 10px transparent;\n"
" border-left: 10px transparent;}\n"
"\n"
"#pb_hamburger_menu:hover{\n"
"color: grey;\n"
" border-image: url(./icons/menu_hover.png) 5 12 5 12;\n"
" border-top: 5px transparent;\n"
" border-bottom: 5px transparent;\n"
" border-right: 12px transparent;\n"
" border-left: 12px transparent;}\n"
""))
self.pb_hamburger_menu.setText(_fromUtf8(""))
self.pb_hamburger_menu.setObjectName(_fromUtf8("pb_hamburger_menu"))
self.gb_knn_sinif = QtGui.QGroupBox(Dialog)
self.gb_knn_sinif.setGeometry(QtCore.QRect(200, 50, 1080, 670))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.gb_knn_sinif.setFont(font)
self.gb_knn_sinif.setStyleSheet(_fromUtf8("#gb_knn_sinif{\n"
"background-color: white;\n"
"text-align:center;}\n"
"#gb_knn_sinif::title {\n"
" subcontrol-origin: margin;\n"
" subcontrol-position: top center; /* position at the top center */\n"
" padding: 0 3px;\n"
" color: #FF895D;\n"
"}\n"
""))
self.gb_knn_sinif.setObjectName(_fromUtf8("gb_knn_sinif"))
self.t1_gv_nokta = QtGui.QGraphicsView(self.gb_knn_sinif)
self.t1_gv_nokta.setGeometry(QtCore.QRect(10, 390, 425, 270))
self.t1_gv_nokta.setObjectName(_fromUtf8("t1_gv_nokta"))
self.label = QtGui.QLabel(self.gb_knn_sinif)
self.label.setGeometry(QtCore.QRect(20, 10, 121, 31))
font = QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName(_fromUtf8("label"))
self.label_2 = QtGui.QLabel(self.gb_knn_sinif)
self.label_2.setGeometry(QtCore.QRect(510, 350, 101, 31))
font = QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.t1_gv_veriseti = QtGui.QGraphicsView(self.gb_knn_sinif)
self.t1_gv_veriseti.setGeometry(QtCore.QRect(10, 50, 425, 270))
self.t1_gv_veriseti.setObjectName(_fromUtf8("t1_gv_veriseti"))
self.gb_new_point = QtGui.QGroupBox(self.gb_knn_sinif)
self.gb_new_point.setGeometry(QtCore.QRect(550, 30, 271, 301))
self.gb_new_point.setTitle(_fromUtf8(""))
self.gb_new_point.setObjectName(_fromUtf8("gb_new_point"))
self.t1_te_x = QtGui.QPlainTextEdit(self.gb_new_point)
self.t1_te_x.setGeometry(QtCore.QRect(190, 90, 51, 31))
self.t1_te_x.setObjectName(_fromUtf8("t1_te_x"))
self.label_3 = QtGui.QLabel(self.gb_new_point)
self.label_3.setGeometry(QtCore.QRect(20, 30, 161, 31))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_3.setFont(font)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.label_5 = QtGui.QLabel(self.gb_new_point)
self.label_5.setGeometry(QtCore.QRect(20, 130, 161, 31))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_5.setFont(font)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.t1_te_y = QtGui.QPlainTextEdit(self.gb_new_point)
self.t1_te_y.setGeometry(QtCore.QRect(190, 130, 51, 31))
self.t1_te_y.setObjectName(_fromUtf8("t1_te_y"))
self.label_4 = QtGui.QLabel(self.gb_new_point)
self.label_4.setGeometry(QtCore.QRect(20, 90, 171, 21))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_4.setFont(font)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.t1_te_k = QtGui.QPlainTextEdit(self.gb_new_point)
self.t1_te_k.setGeometry(QtCore.QRect(190, 30, 51, 31))
self.t1_te_k.setObjectName(_fromUtf8("t1_te_k"))
self.t1_pb_kumele = QtGui.QPushButton(self.gb_new_point)
self.t1_pb_kumele.setGeometry(QtCore.QRect(50, 180, 171, 91))
font = QtGui.QFont()
font.setPointSize(11)
self.t1_pb_kumele.setFont(font)
self.t1_pb_kumele.setStyleSheet(_fromUtf8("QPushButton{\n"
"background-color: #FF895D;\n"
"color: #fff;\n"
"border-radius:1px;\n"
"}\n"
"QPushButton:hover{\n"
"background-color:#1B435D;\n"
"color: #D5EEFF;\n"
"}"))
self.t1_pb_kumele.setObjectName(_fromUtf8("t1_pb_kumele"))
self.label_6 = QtGui.QLabel(self.gb_knn_sinif)
self.label_6.setGeometry(QtCore.QRect(10, 360, 231, 31))
font = QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setWeight(75)
self.label_6.setFont(font)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.t1_gv_sonuc = QtGui.QGraphicsView(self.gb_knn_sinif)
self.t1_gv_sonuc.setGeometry(QtCore.QRect(510, 390, 425, 270))
self.t1_gv_sonuc.setObjectName(_fromUtf8("t1_gv_sonuc"))
self.gb_k_means = QtGui.QGroupBox(Dialog)
self.gb_k_means.setGeometry(QtCore.QRect(200, 50, 0, 0))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_k_means.setFont(font)
self.gb_k_means.setStyleSheet(_fromUtf8("#gb_k_means{\n"
"background-color: white;\n"
"text-align:center;}\n"
"#gb_k_means::title {\n"
" subcontrol-origin: margin;\n"
" subcontrol-position: top center; /* position at the top center */\n"
" padding: 0 3px;\n"
" color: #FF895D;\n"
"}\n"
"QGroupBox{\n"
"background-color: white;\n"
"text-align:center;}\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" subcontrol-position: top center; /* position at the top center */\n"
" padding: 0 3px;\n"
" color: #FF895D;\n"
"}\n"
"QPushButton{\n"
"background-color: #FF895D;\n"
"color: #fff;\n"
"border-radius:1px;\n"
"}\n"
"QPushButton:hover{\n"
"background-color:#1B435D;\n"
"color: #D5EEFF;\n"
"}\n"
"QComboBox{\n"
"color:#FF895D;\n"
"background-color:#fff;\n"
"}\n"
"QTableWidget{\n"
"border:2px solid #FF895D;}"))
self.gb_k_means.setObjectName(_fromUtf8("gb_k_means"))
self.t2_gv_sonuc = QtGui.QGraphicsView(self.gb_k_means)
self.t2_gv_sonuc.setGeometry(QtCore.QRect(640, 380, 420, 270))
self.t2_gv_sonuc.setObjectName(_fromUtf8("t2_gv_sonuc"))
self.t2_pb_kmeans = QtGui.QPushButton(self.gb_k_means)
self.t2_pb_kmeans.setGeometry(QtCore.QRect(510, 460, 101, 71))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.t2_pb_kmeans.setFont(font)
self.t2_pb_kmeans.setObjectName(_fromUtf8("t2_pb_kmeans"))
self.t2_pb_dataload = QtGui.QPushButton(self.gb_k_means)
self.t2_pb_dataload.setGeometry(QtCore.QRect(370, 270, 221, 41))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.t2_pb_dataload.setFont(font)
self.t2_pb_dataload.setObjectName(_fromUtf8("t2_pb_dataload"))
self.t2_gv_data = QtGui.QGraphicsView(self.gb_k_means)
self.t2_gv_data.setGeometry(QtCore.QRect(60, 380, 420, 270))
self.t2_gv_data.setObjectName(_fromUtf8("t2_gv_data"))
self.k_means_tbl_data = QtGui.QTableWidget(self.gb_k_means)
self.k_means_tbl_data.setGeometry(QtCore.QRect(50, 50, 291, 281))
self.k_means_tbl_data.setObjectName(_fromUtf8("k_means_tbl_data"))
self.k_means_tbl_data.setColumnCount(0)
self.k_means_tbl_data.setRowCount(0)
self.label_37 = QtGui.QLabel(self.gb_k_means)
self.label_37.setGeometry(QtCore.QRect(50, 35, 91, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_37.setFont(font)
self.label_37.setObjectName(_fromUtf8("label_37"))
self.groupBox = QtGui.QGroupBox(self.gb_k_means)
self.groupBox.setGeometry(QtCore.QRect(370, 50, 221, 191))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.groupBox.setFont(font)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.label_40 = QtGui.QLabel(self.groupBox)
self.label_40.setGeometry(QtCore.QRect(30, 50, 46, 13))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_40.setFont(font)
self.label_40.setObjectName(_fromUtf8("label_40"))
self.kmeans_x = QtGui.QLineEdit(self.groupBox)
self.kmeans_x.setGeometry(QtCore.QRect(70, 50, 110, 20))
self.kmeans_x.setObjectName(_fromUtf8("kmeans_x"))
self.label_41 = QtGui.QLabel(self.groupBox)
self.label_41.setGeometry(QtCore.QRect(30, 80, 46, 13))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_41.setFont(font)
self.label_41.setObjectName(_fromUtf8("label_41"))
self.kmeans_y = QtGui.QLineEdit(self.groupBox)
self.kmeans_y.setGeometry(QtCore.QRect(70, 80, 110, 20))
self.kmeans_y.setObjectName(_fromUtf8("kmeans_y"))
self.kmeans_etiket = QtGui.QComboBox(self.groupBox)
self.kmeans_etiket.setGeometry(QtCore.QRect(70, 110, 110, 20))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.kmeans_etiket.setFont(font)
self.kmeans_etiket.setObjectName(_fromUtf8("kmeans_etiket"))
self.kmeans_etiket.addItem(_fromUtf8(""))
self.kmeans_etiket.addItem(_fromUtf8(""))
self.kmeans_pb_ekle = QtGui.QPushButton(self.groupBox)
self.kmeans_pb_ekle.setGeometry(QtCore.QRect(130, 150, 75, 23))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.kmeans_pb_ekle.setFont(font)
self.kmeans_pb_ekle.setObjectName(_fromUtf8("kmeans_pb_ekle"))
self.gb_rus_ros = QtGui.QGroupBox(Dialog)
self.gb_rus_ros.setGeometry(QtCore.QRect(200, 50, 0, 0))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_rus_ros.setFont(font)
self.gb_rus_ros.setStyleSheet(_fromUtf8("#gb_rus_ros{\n"
"background-color: white;\n"
"text-align:center;}\n"
"#gb_rus_ros::title {\n"
" subcontrol-origin: margin;\n"
" subcontrol-position: top center; /* position at the top center */\n"
" padding: 0 3px;\n"
" color: #FF895D;\n"
"}\n"
"QPushButton{\n"
"background-color: #FF895D;\n"
"color: #fff;\n"
"border-radius:1px;\n"
"}\n"
"QPushButton:hover{\n"
"background-color:#1B435D;\n"
"color: #D5EEFF;\n"
"}"))
self.gb_rus_ros.setObjectName(_fromUtf8("gb_rus_ros"))
self.gb_rus_data_set = QtGui.QGroupBox(self.gb_rus_ros)
self.gb_rus_data_set.setGeometry(QtCore.QRect(10, 20, 1011, 321))
self.gb_rus_data_set.setStyleSheet(_fromUtf8("#gb_rus_data_set{\n"
"background-color: white;\n"
"text-align:center;}\n"
"#gb_rus_data_set::title {\n"
" subcontrol-origin: margin;\n"
" subcontrol-position: top center; /* position at the top center */\n"
" padding: 0 3px;\n"
" color: #FF895D;\n"
"}"))
self.gb_rus_data_set.setObjectName(_fromUtf8("gb_rus_data_set"))
self.rus_ros_gv_data = QtGui.QGraphicsView(self.gb_rus_data_set)
self.rus_ros_gv_data.setGeometry(QtCore.QRect(300, 20, 420, 270))
self.rus_ros_gv_data.setObjectName(_fromUtf8("rus_ros_gv_data"))
self.rus_ros_n_samples = QtGui.QLineEdit(self.gb_rus_data_set)
self.rus_ros_n_samples.setGeometry(QtCore.QRect(40, 70, 161, 20))
self.rus_ros_n_samples.setObjectName(_fromUtf8("rus_ros_n_samples"))
self.rus_ros_slider = QtGui.QSlider(self.gb_rus_data_set)
self.rus_ros_slider.setGeometry(QtCore.QRect(40, 120, 161, 22))
self.rus_ros_slider.setStyleSheet(_fromUtf8("#rus_ros_slider:groove:horizontall {\n"
" background: #FF895D;\n"
" position: absolute;\n"
" left: 1px; right: 1px;\n"
"}\n"
"#rus_ros_slider:handle:horizontall {\n"
" height: 10px;\n"
" background: #1B435D ;\n"
" margin: 0 4px; /* expand outside the groove */\n"
"}"))
self.rus_ros_slider.setOrientation(QtCore.Qt.Horizontal)
self.rus_ros_slider.setObjectName(_fromUtf8("rus_ros_slider"))
self.lbl_rus_ros_slider = QtGui.QLabel(self.gb_rus_data_set)
self.lbl_rus_ros_slider.setGeometry(QtCore.QRect(210, 125, 46, 13))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.lbl_rus_ros_slider.setFont(font)
self.lbl_rus_ros_slider.setObjectName(_fromUtf8("lbl_rus_ros_slider"))
self.label_38 = QtGui.QLabel(self.gb_rus_data_set)
self.label_38.setGeometry(QtCore.QRect(40, 50, 121, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_38.setFont(font)
self.label_38.setObjectName(_fromUtf8("label_38"))
self.label_39 = QtGui.QLabel(self.gb_rus_data_set)
self.label_39.setGeometry(QtCore.QRect(40, 100, 121, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_39.setFont(font)
self.label_39.setObjectName(_fromUtf8("label_39"))
self.rus_ros_pb_create_dataset = QtGui.QPushButton(self.gb_rus_data_set)
self.rus_ros_pb_create_dataset.setGeometry(QtCore.QRect(40, 170, 161, 23))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.rus_ros_pb_create_dataset.setFont(font)
self.rus_ros_pb_create_dataset.setObjectName(_fromUtf8("rus_ros_pb_create_dataset"))
self.rus_ros_gv_sonuc = QtGui.QGraphicsView(self.gb_rus_ros)
self.rus_ros_gv_sonuc.setGeometry(QtCore.QRect(310, 360, 420, 270))
self.rus_ros_gv_sonuc.setObjectName(_fromUtf8("rus_ros_gv_sonuc"))
self.radiobuton_rus = QtGui.QRadioButton(self.gb_rus_ros)
self.radiobuton_rus.setGeometry(QtCore.QRect(60, 420, 131, 17))
self.radiobuton_rus.setObjectName(_fromUtf8("radiobuton_rus"))
self.rus_ros_pb = QtGui.QPushButton(self.gb_rus_ros)
self.rus_ros_pb.setGeometry(QtCore.QRect(60, 510, 161, 23))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.rus_ros_pb.setFont(font)
self.rus_ros_pb.setObjectName(_fromUtf8("rus_ros_pb"))
self.radiobuton_ros = QtGui.QRadioButton(self.gb_rus_ros)
self.radiobuton_ros.setGeometry(QtCore.QRect(60, 460, 141, 17))
self.radiobuton_ros.setObjectName(_fromUtf8("radiobuton_ros"))
self.gb_knn_kume = QtGui.QGroupBox(Dialog)
self.gb_knn_kume.setGeometry(QtCore.QRect(200, 50, 0, 0))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_knn_kume.setFont(font)
self.gb_knn_kume.setStyleSheet(_fromUtf8("#gb_knn_kume{\n"
"background-color: white;\n"
"text-align:center;}\n"
"#gb_knn_kume::title {\n"
" subcontrol-origin: margin;\n"
" subcontrol-position: top center; /* position at the top center */\n"
" padding: 0 3px;\n"
" color: #FF895D;\n"
"}\n"
"QGroupBox{\n"
"background-color: white;\n"
"text-align:center;}\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" subcontrol-position: top center; /* position at the top center */\n"
" padding: 0 3px;\n"
" color: #FF895D;\n"
"}\n"
"QPushButton{\n"
"background-color: #FF895D;\n"
"color: #fff;\n"
"border-radius:1px;\n"
"}\n"
"QPushButton:hover{\n"
"background-color:#1B435D;\n"
"color: #D5EEFF;\n"
"}\n"
"QComboBox{\n"
"color:#FF895D;\n"
"background-color:#fff;\n"
"}\n"
"QTableWidget{\n"
"border:2px solid #FF895D;}"))
self.gb_knn_kume.setObjectName(_fromUtf8("gb_knn_kume"))
self.groupBox_2 = QtGui.QGroupBox(self.gb_knn_kume)
self.groupBox_2.setGeometry(QtCore.QRect(40, 50, 281, 181))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.groupBox_2.setFont(font)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.knn_cluster_max_range = QtGui.QLineEdit(self.groupBox_2)
self.knn_cluster_max_range.setGeometry(QtCore.QRect(120, 50, 113, 20))
font = QtGui.QFont()
font.setPointSize(10)
self.knn_cluster_max_range.setFont(font)
self.knn_cluster_max_range.setObjectName(_fromUtf8("knn_cluster_max_range"))
self.knn_cluster_count = QtGui.QLineEdit(self.groupBox_2)
self.knn_cluster_count.setGeometry(QtCore.QRect(120, 90, 113, 20))
font = QtGui.QFont()
font.setPointSize(10)
self.knn_cluster_count.setFont(font)
self.knn_cluster_count.setObjectName(_fromUtf8("knn_cluster_count"))
self.knn_cluster_pb_create_dataset = QtGui.QPushButton(self.groupBox_2)
self.knn_cluster_pb_create_dataset.setGeometry(QtCore.QRect(150, 120, 81, 31))
self.knn_cluster_pb_create_dataset.setObjectName(_fromUtf8("knn_cluster_pb_create_dataset"))
self.label_42 = QtGui.QLabel(self.groupBox_2)
self.label_42.setGeometry(QtCore.QRect(20, 50, 101, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_42.setFont(font)
self.label_42.setObjectName(_fromUtf8("label_42"))
self.label_43 = QtGui.QLabel(self.groupBox_2)
self.label_43.setGeometry(QtCore.QRect(20, 90, 101, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_43.setFont(font)
self.label_43.setObjectName(_fromUtf8("label_43"))
self.knn_cluster_data = QtGui.QGraphicsView(self.gb_knn_kume)
self.knn_cluster_data.setGeometry(QtCore.QRect(470, 50, 420, 270))
self.knn_cluster_data.setObjectName(_fromUtf8("knn_cluster_data"))
self.knn_cluster_result = QtGui.QGraphicsView(self.gb_knn_kume)
self.knn_cluster_result.setGeometry(QtCore.QRect(470, 370, 420, 270))
self.knn_cluster_result.setObjectName(_fromUtf8("knn_cluster_result"))
self.knn_pb_cluster = QtGui.QPushButton(self.gb_knn_kume)
self.knn_pb_cluster.setGeometry(QtCore.QRect(920, 100, 131, 51))
self.knn_pb_cluster.setObjectName(_fromUtf8("knn_pb_cluster"))
self.knn_cluster_cb = QtGui.QComboBox(self.gb_knn_kume)
self.knn_cluster_cb.setGeometry(QtCore.QRect(920, 60, 131, 22))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.knn_cluster_cb.setFont(font)
self.knn_cluster_cb.setObjectName(_fromUtf8("knn_cluster_cb"))
self.knn_cluster_cb.addItem(_fromUtf8(""))
self.knn_cluster_cb.addItem(_fromUtf8(""))
self.knn_cluster_cb.addItem(_fromUtf8(""))
self.label_44 = QtGui.QLabel(self.gb_knn_kume)
self.label_44.setGeometry(QtCore.QRect(470, 30, 151, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_44.setFont(font)
self.label_44.setObjectName(_fromUtf8("label_44"))
self.label_45 = QtGui.QLabel(self.gb_knn_kume)
self.label_45.setGeometry(QtCore.QRect(480, 340, 131, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_45.setFont(font)
self.label_45.setObjectName(_fromUtf8("label_45"))
self.knn_cluster_tbl = QtGui.QTableWidget(self.gb_knn_kume)
self.knn_cluster_tbl.setGeometry(QtCore.QRect(40, 270, 281, 381))
self.knn_cluster_tbl.setObjectName(_fromUtf8("knn_cluster_tbl"))
self.knn_cluster_tbl.setColumnCount(0)
self.knn_cluster_tbl.setRowCount(0)
self.label_46 = QtGui.QLabel(self.gb_knn_kume)
self.label_46.setGeometry(QtCore.QRect(40, 250, 161, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_46.setFont(font)
self.label_46.setObjectName(_fromUtf8("label_46"))
self.gb_navie = QtGui.QGroupBox(Dialog)
self.gb_navie.setGeometry(QtCore.QRect(200, 50, 0, 0))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_navie.setFont(font)
self.gb_navie.setStyleSheet(_fromUtf8("#gb_navie{\n"
"background-color: white;\n"
"text-align:center;}\n"
"#gb_navie::title {\n"
" subcontrol-origin: margin;\n"
" subcontrol-position: top center; /* position at the top center */\n"
" padding: 0 3px;\n"
" color: #FF895D;\n"
"}"))
self.gb_navie.setObjectName(_fromUtf8("gb_navie"))
self.gb_navie_create_dataset = QtGui.QGroupBox(self.gb_navie)
self.gb_navie_create_dataset.setGeometry(QtCore.QRect(10, 30, 361, 621))
self.gb_navie_create_dataset.setTitle(_fromUtf8(""))
self.gb_navie_create_dataset.setObjectName(_fromUtf8("gb_navie_create_dataset"))
self.tbl_navie_data_set = QtGui.QTableWidget(self.gb_navie_create_dataset)
self.tbl_navie_data_set.setGeometry(QtCore.QRect(20, 170, 301, 431))
font = QtGui.QFont()
font.setPointSize(12)
self.tbl_navie_data_set.setFont(font)
self.tbl_navie_data_set.setObjectName(_fromUtf8("tbl_navie_data_set"))
self.tbl_navie_data_set.setColumnCount(0)
self.tbl_navie_data_set.setRowCount(0)
self.groupBox_3 = QtGui.QGroupBox(self.gb_navie_create_dataset)
self.groupBox_3.setGeometry(QtCore.QRect(20, 10, 301, 151))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.groupBox_3.setFont(font)
self.groupBox_3.setObjectName(_fromUtf8("groupBox_3"))
self.le_navie_kelime = QtGui.QLineEdit(self.groupBox_3)
self.le_navie_kelime.setGeometry(QtCore.QRect(100, 30, 171, 20))
font = QtGui.QFont()
font.setPointSize(12)
self.le_navie_kelime.setFont(font)
self.le_navie_kelime.setObjectName(_fromUtf8("le_navie_kelime"))
self.label_59 = QtGui.QLabel(self.groupBox_3)
self.label_59.setGeometry(QtCore.QRect(20, 25, 61, 31))
font = QtGui.QFont()
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.label_59.setFont(font)
self.label_59.setObjectName(_fromUtf8("label_59"))
self.le_naive_kategori = QtGui.QLineEdit(self.groupBox_3)
self.le_naive_kategori.setGeometry(QtCore.QRect(100, 65, 171, 20))
font = QtGui.QFont()
font.setPointSize(12)
self.le_naive_kategori.setFont(font)
self.le_naive_kategori.setObjectName(_fromUtf8("le_naive_kategori"))
self.label_60 = QtGui.QLabel(self.groupBox_3)
self.label_60.setGeometry(QtCore.QRect(20, 60, 71, 31))
font = QtGui.QFont()
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.label_60.setFont(font)
self.label_60.setObjectName(_fromUtf8("label_60"))
self.pb_navie_veriekle = QtGui.QPushButton(self.groupBox_3)
self.pb_navie_veriekle.setGeometry(QtCore.QRect(180, 90, 91, 31))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.pb_navie_veriekle.setFont(font)
self.pb_navie_veriekle.setStyleSheet(_fromUtf8("QPushButton{\n"
"background-color: #FF895D;\n"
"color: #fff;\n"
"border-radius:1px;\n"
"}\n"
"QPushButton:hover{\n"
"background-color:#1B435D;\n"
"color: #D5EEFF;\n"
"}"))
self.pb_navie_veriekle.setObjectName(_fromUtf8("pb_navie_veriekle"))
self.groupBox_4 = QtGui.QGroupBox(self.gb_navie)
self.groupBox_4.setGeometry(QtCore.QRect(390, 20, 671, 631))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.groupBox_4.setFont(font)
self.groupBox_4.setObjectName(_fromUtf8("groupBox_4"))
self.label_61 = QtGui.QLabel(self.groupBox_4)
self.label_61.setGeometry(QtCore.QRect(10, 30, 151, 31))
font = QtGui.QFont()
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.label_61.setFont(font)
self.label_61.setObjectName(_fromUtf8("label_61"))
self.pb_navie_siniflandir = QtGui.QPushButton(self.groupBox_4)
self.pb_navie_siniflandir.setGeometry(QtCore.QRect(490, 390, 151, 41))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.pb_navie_siniflandir.setFont(font)
self.pb_navie_siniflandir.setStyleSheet(_fromUtf8("QPushButton{\n"
"background-color: #FF895D;\n"
"color: #fff;\n"
"border-radius:1px;\n"
"}\n"
"QPushButton:hover{\n"
"background-color:#1B435D;\n"
"color: #D5EEFF;\n"
"}"))
self.pb_navie_siniflandir.setObjectName(_fromUtf8("pb_navie_siniflandir"))
self.lbl_navie_sonuc = QtGui.QLabel(self.groupBox_4)
self.lbl_navie_sonuc.setGeometry(QtCore.QRect(160, 400, 141, 31))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.lbl_navie_sonuc.setFont(font)
self.lbl_navie_sonuc.setText(_fromUtf8(""))
self.lbl_navie_sonuc.setObjectName(_fromUtf8("lbl_navie_sonuc"))
self.le_metin = QtGui.QPlainTextEdit(self.groupBox_4)
self.le_metin.setGeometry(QtCore.QRect(10, 60, 631, 321))
font = QtGui.QFont()
font.setPointSize(12)
self.le_metin.setFont(font)
self.le_metin.setObjectName(_fromUtf8("le_metin"))
self.lbl_navie_sonuc_2 = QtGui.QLabel(self.groupBox_4)
self.lbl_navie_sonuc_2.setGeometry(QtCore.QRect(20, 400, 141, 31))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.lbl_navie_sonuc_2.setFont(font)
self.lbl_navie_sonuc_2.setObjectName(_fromUtf8("lbl_navie_sonuc_2"))
self.gb_parkinson = QtGui.QGroupBox(Dialog)
self.gb_parkinson.setGeometry(QtCore.QRect(200, 50, 0, 0))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_parkinson.setFont(font)
self.gb_parkinson.setStyleSheet(_fromUtf8("#gb_parkinson{\n"
"background-color: white;\n"
"text-align:center;\n"
"background-attachment:scroll;}\n"
"#gb_parkinson::title {\n"
" subcontrol-origin: margin;\n"
" subcontrol-position: top center; /* position at the top center */\n"
" padding: 0 3px;\n"
" color: #FF895D;\n"
"}\n"
"QTableWidget {\n"
" selection-background-color: qlineargradient(x1: 0, y1: 0, x2: 0.5, y2: 0.5,\n"
" stop: 0 #FF92BB, stop: 1 white);\n"
" border:2px solid #FF895D;\n"
"}\n"
"\n"
"QTableWidget QTableCornerButton::section {\n"
" background: #FF895D;\n"
" border: 2px outset red;\n"
"}\n"
"QScrollArea{\n"
"background:white;}"))
self.gb_parkinson.setObjectName(_fromUtf8("gb_parkinson"))
self.scrollArea = QtGui.QScrollArea(self.gb_parkinson)
self.scrollArea.setGeometry(QtCore.QRect(0, 20, 1080, 2000))
self.scrollArea.setStyleSheet(_fromUtf8("QGroupBox{\n"
"background-color: white;\n"
"text-align:center;\n"
"background-attachment:scroll;}\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" subcontrol-position: top center; /* position at the top center */\n"
" padding: 0 3px;\n"
" color: #FF895D;\n"
"}\n"
"QTableWidget {\n"
" selection-background-color: qlineargradient(x1: 0, y1: 0, x2: 0.5, y2: 0.5,\n"
" stop: 0 #FF92BB, stop: 1 white);\n"
" border:2px solid #FF895D;\n"
"}\n"
"\n"
"QTableWidget QTableCornerButton::section {\n"
" background: #FF895D;\n"
" border: 2px outset red;\n"
"}\n"
""))
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName(_fromUtf8("scrollArea"))
self.scrollAreaWidgetContents = QtGui.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 1078, 4000))
self.scrollAreaWidgetContents.setMinimumSize(QtCore.QSize(1078, 4000))
self.scrollAreaWidgetContents.setObjectName(_fromUtf8("scrollAreaWidgetContents"))
self.gb_sst = QtGui.QGroupBox(self.scrollAreaWidgetContents)
self.gb_sst.setGeometry(QtCore.QRect(0, 70, 1080, 575))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_sst.setFont(font)
self.gb_sst.setObjectName(_fromUtf8("gb_sst"))
self.tbl_sst_x_train = QtGui.QTableWidget(self.gb_sst)
self.tbl_sst_x_train.setGeometry(QtCore.QRect(10, 60, 200, 500))
self.tbl_sst_x_train.setObjectName(_fromUtf8("tbl_sst_x_train"))
self.tbl_sst_x_train.setColumnCount(0)
self.tbl_sst_x_train.setRowCount(0)
self.label_7 = QtGui.QLabel(self.gb_sst)
self.label_7.setGeometry(QtCore.QRect(10, 40, 71, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_7.setFont(font)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.tbl_sst_y_train = QtGui.QTableWidget(self.gb_sst)
self.tbl_sst_y_train.setGeometry(QtCore.QRect(230, 60, 100, 500))
self.tbl_sst_y_train.setObjectName(_fromUtf8("tbl_sst_y_train"))
self.tbl_sst_y_train.setColumnCount(0)
self.tbl_sst_y_train.setRowCount(0)
self.label_8 = QtGui.QLabel(self.gb_sst)
self.label_8.setGeometry(QtCore.QRect(230, 40, 71, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_8.setFont(font)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.tbl_sst_x_test = QtGui.QTableWidget(self.gb_sst)
self.tbl_sst_x_test.setGeometry(QtCore.QRect(340, 60, 200, 500))
self.tbl_sst_x_test.setObjectName(_fromUtf8("tbl_sst_x_test"))
self.tbl_sst_x_test.setColumnCount(0)
self.tbl_sst_x_test.setRowCount(0)
self.label_9 = QtGui.QLabel(self.gb_sst)
self.label_9.setGeometry(QtCore.QRect(340, 40, 71, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_9.setFont(font)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.tbl_sst_rf_cm = QtGui.QTableWidget(self.gb_sst)
self.tbl_sst_rf_cm.setGeometry(QtCore.QRect(570, 60, 200, 200))
self.tbl_sst_rf_cm.setObjectName(_fromUtf8("tbl_sst_rf_cm"))
self.tbl_sst_rf_cm.setColumnCount(0)
self.tbl_sst_rf_cm.setRowCount(0)
self.label_10 = QtGui.QLabel(self.gb_sst)
self.label_10.setGeometry(QtCore.QRect(570, 300, 191, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_10.setFont(font)
self.label_10.setObjectName(_fromUtf8("label_10"))
self.tbl_sst_gv_cm = QtGui.QTableWidget(self.gb_sst)
self.tbl_sst_gv_cm.setGeometry(QtCore.QRect(570, 320, 200, 200))
self.tbl_sst_gv_cm.setObjectName(_fromUtf8("tbl_sst_gv_cm"))
self.tbl_sst_gv_cm.setColumnCount(0)
self.tbl_sst_gv_cm.setRowCount(0)
self.label_11 = QtGui.QLabel(self.gb_sst)
self.label_11.setGeometry(QtCore.QRect(830, 40, 201, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_11.setFont(font)
self.label_11.setObjectName(_fromUtf8("label_11"))
self.label_12 = QtGui.QLabel(self.gb_sst)
self.label_12.setGeometry(QtCore.QRect(570, 40, 221, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_12.setFont(font)
self.label_12.setObjectName(_fromUtf8("label_12"))
self.label_13 = QtGui.QLabel(self.gb_sst)
self.label_13.setGeometry(QtCore.QRect(830, 300, 201, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_13.setFont(font)
self.label_13.setObjectName(_fromUtf8("label_13"))
self.lbl_sst_rf_as = QtGui.QLabel(self.gb_sst)
self.lbl_sst_rf_as.setGeometry(QtCore.QRect(830, 60, 201, 41))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.lbl_sst_rf_as.setFont(font)
self.lbl_sst_rf_as.setText(_fromUtf8(""))
self.lbl_sst_rf_as.setObjectName(_fromUtf8("lbl_sst_rf_as"))
self.lbl_sst_gv_as = QtGui.QLabel(self.gb_sst)
self.lbl_sst_gv_as.setGeometry(QtCore.QRect(830, 330, 201, 41))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.lbl_sst_gv_as.setFont(font)
self.lbl_sst_gv_as.setText(_fromUtf8(""))
self.lbl_sst_gv_as.setObjectName(_fromUtf8("lbl_sst_gv_as"))
self.gb_buttons = QtGui.QGroupBox(self.scrollAreaWidgetContents)
self.gb_buttons.setGeometry(QtCore.QRect(0, 0, 1080, 70))
self.gb_buttons.setContextMenuPolicy(QtCore.Qt.DefaultContextMenu)
self.gb_buttons.setObjectName(_fromUtf8("gb_buttons"))
self.pb_parkinson_veri_yukle = QtGui.QPushButton(self.gb_buttons)
self.pb_parkinson_veri_yukle.setGeometry(QtCore.QRect(30, 10, 48, 48))
self.pb_parkinson_veri_yukle.setCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.pb_parkinson_veri_yukle.setStyleSheet(_fromUtf8("#pb_parkinson_veri_yukle{\n"
"color: grey;\n"
" border-image: url(./icons/data_load.png) 3 10 3 10;\n"
" border-top: 3px transparent;\n"
" border-bottom: 3px transparent;\n"
" border-right: 10px transparent;\n"
" border-left: 10px transparent;}\n"
"\n"
"#pb_parkinson_veri_yukle:hover{\n"
"color: grey;\n"
" border-image: url(./icons/data_load_hover.png) 5 12 5 12;\n"
" border-top: 5px transparent;\n"
" border-bottom: 5px transparent;\n"
" border-right: 12px transparent;\n"
" border-left: 12px transparent;}\n"
""))
self.pb_parkinson_veri_yukle.setText(_fromUtf8(""))
self.pb_parkinson_veri_yukle.setObjectName(_fromUtf8("pb_parkinson_veri_yukle"))
self.pb_parkinson_class = QtGui.QPushButton(self.gb_buttons)
self.pb_parkinson_class.setGeometry(QtCore.QRect(120, 10, 48, 48))
self.pb_parkinson_class.setCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.pb_parkinson_class.setStyleSheet(_fromUtf8("#pb_parkinson_class{\n"
"color: grey;\n"
" border-image: url(./icons/class.png) 3 10 3 10;\n"
" border-top: 3px transparent;\n"
" border-bottom: 3px transparent;\n"
" border-right: 10px transparent;\n"
" border-left: 10px transparent;}\n"
"\n"
"#pb_parkinson_class:hover{\n"
"color: grey;\n"
" border-image: url(./icons/class_hover.png) 5 12 5 12;\n"
" border-top: 5px transparent;\n"
" border-bottom: 5px transparent;\n"
" border-right: 12px transparent;\n"
" border-left: 12px transparent;}\n"
""))
self.pb_parkinson_class.setText(_fromUtf8(""))
self.pb_parkinson_class.setObjectName(_fromUtf8("pb_parkinson_class"))
self.gb_dst = QtGui.QGroupBox(self.scrollAreaWidgetContents)
self.gb_dst.setGeometry(QtCore.QRect(0, 645, 1080, 575))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_dst.setFont(font)
self.gb_dst.setObjectName(_fromUtf8("gb_dst"))
self.tbl_dst_rf_cm = QtGui.QTableWidget(self.gb_dst)
self.tbl_dst_rf_cm.setGeometry(QtCore.QRect(570, 60, 200, 200))
self.tbl_dst_rf_cm.setObjectName(_fromUtf8("tbl_dst_rf_cm"))
self.tbl_dst_rf_cm.setColumnCount(0)
self.tbl_dst_rf_cm.setRowCount(0)
self.tbl_dst_x_train = QtGui.QTableWidget(self.gb_dst)
self.tbl_dst_x_train.setGeometry(QtCore.QRect(10, 60, 200, 500))
self.tbl_dst_x_train.setObjectName(_fromUtf8("tbl_dst_x_train"))
self.tbl_dst_x_train.setColumnCount(0)
self.tbl_dst_x_train.setRowCount(0)
self.label_14 = QtGui.QLabel(self.gb_dst)
self.label_14.setGeometry(QtCore.QRect(830, 40, 201, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_14.setFont(font)
self.label_14.setObjectName(_fromUtf8("label_14"))
self.label_15 = QtGui.QLabel(self.gb_dst)
self.label_15.setGeometry(QtCore.QRect(230, 40, 71, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_15.setFont(font)
self.label_15.setObjectName(_fromUtf8("label_15"))
self.lbl_dst_rf_as = QtGui.QLabel(self.gb_dst)
self.lbl_dst_rf_as.setGeometry(QtCore.QRect(830, 60, 201, 41))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.lbl_dst_rf_as.setFont(font)
self.lbl_dst_rf_as.setText(_fromUtf8(""))
self.lbl_dst_rf_as.setObjectName(_fromUtf8("lbl_dst_rf_as"))
self.lbl_dst_gv_as = QtGui.QLabel(self.gb_dst)
self.lbl_dst_gv_as.setGeometry(QtCore.QRect(830, 330, 201, 41))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.lbl_dst_gv_as.setFont(font)
self.lbl_dst_gv_as.setText(_fromUtf8(""))
self.lbl_dst_gv_as.setObjectName(_fromUtf8("lbl_dst_gv_as"))
self.tbl_dst_y_train = QtGui.QTableWidget(self.gb_dst)
self.tbl_dst_y_train.setGeometry(QtCore.QRect(230, 60, 100, 500))
self.tbl_dst_y_train.setObjectName(_fromUtf8("tbl_dst_y_train"))
self.tbl_dst_y_train.setColumnCount(0)
self.tbl_dst_y_train.setRowCount(0)
self.label_16 = QtGui.QLabel(self.gb_dst)
self.label_16.setGeometry(QtCore.QRect(830, 300, 201, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_16.setFont(font)
self.label_16.setObjectName(_fromUtf8("label_16"))
self.tbl_dst_x_test = QtGui.QTableWidget(self.gb_dst)
self.tbl_dst_x_test.setGeometry(QtCore.QRect(340, 60, 200, 500))
self.tbl_dst_x_test.setObjectName(_fromUtf8("tbl_dst_x_test"))
self.tbl_dst_x_test.setColumnCount(0)
self.tbl_dst_x_test.setRowCount(0)
self.label_17 = QtGui.QLabel(self.gb_dst)
self.label_17.setGeometry(QtCore.QRect(340, 40, 71, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_17.setFont(font)
self.label_17.setObjectName(_fromUtf8("label_17"))
self.tbl_dst_gv_cm = QtGui.QTableWidget(self.gb_dst)
self.tbl_dst_gv_cm.setGeometry(QtCore.QRect(570, 320, 200, 200))
self.tbl_dst_gv_cm.setObjectName(_fromUtf8("tbl_dst_gv_cm"))
self.tbl_dst_gv_cm.setColumnCount(0)
self.tbl_dst_gv_cm.setRowCount(0)
self.label_18 = QtGui.QLabel(self.gb_dst)
self.label_18.setGeometry(QtCore.QRect(10, 40, 71, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_18.setFont(font)
self.label_18.setObjectName(_fromUtf8("label_18"))
self.label_19 = QtGui.QLabel(self.gb_dst)
self.label_19.setGeometry(QtCore.QRect(570, 40, 221, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_19.setFont(font)
self.label_19.setObjectName(_fromUtf8("label_19"))
self.label_20 = QtGui.QLabel(self.gb_dst)
self.label_20.setGeometry(QtCore.QRect(570, 300, 191, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_20.setFont(font)
self.label_20.setObjectName(_fromUtf8("label_20"))
self.gb_stcp = QtGui.QGroupBox(self.scrollAreaWidgetContents)
self.gb_stcp.setGeometry(QtCore.QRect(0, 1220, 1080, 575))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_stcp.setFont(font)
self.gb_stcp.setObjectName(_fromUtf8("gb_stcp"))
self.tbl_stcp_x_train = QtGui.QTableWidget(self.gb_stcp)
self.tbl_stcp_x_train.setGeometry(QtCore.QRect(10, 60, 200, 500))
self.tbl_stcp_x_train.setObjectName(_fromUtf8("tbl_stcp_x_train"))
self.tbl_stcp_x_train.setColumnCount(0)
self.tbl_stcp_x_train.setRowCount(0)
self.label_21 = QtGui.QLabel(self.gb_stcp)
self.label_21.setGeometry(QtCore.QRect(10, 40, 71, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_21.setFont(font)
self.label_21.setObjectName(_fromUtf8("label_21"))
self.tbl_stcp_y_train = QtGui.QTableWidget(self.gb_stcp)
self.tbl_stcp_y_train.setGeometry(QtCore.QRect(230, 60, 100, 500))
self.tbl_stcp_y_train.setObjectName(_fromUtf8("tbl_stcp_y_train"))
self.tbl_stcp_y_train.setColumnCount(0)
self.tbl_stcp_y_train.setRowCount(0)
self.label_22 = QtGui.QLabel(self.gb_stcp)
self.label_22.setGeometry(QtCore.QRect(230, 40, 71, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_22.setFont(font)
self.label_22.setObjectName(_fromUtf8("label_22"))
self.tbl_stcp_x_test = QtGui.QTableWidget(self.gb_stcp)
self.tbl_stcp_x_test.setGeometry(QtCore.QRect(340, 60, 200, 500))
self.tbl_stcp_x_test.setObjectName(_fromUtf8("tbl_stcp_x_test"))
self.tbl_stcp_x_test.setColumnCount(0)
self.tbl_stcp_x_test.setRowCount(0)
self.label_23 = QtGui.QLabel(self.gb_stcp)
self.label_23.setGeometry(QtCore.QRect(340, 40, 71, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_23.setFont(font)
self.label_23.setObjectName(_fromUtf8("label_23"))
self.tbl_stcp_rf_cm = QtGui.QTableWidget(self.gb_stcp)
self.tbl_stcp_rf_cm.setGeometry(QtCore.QRect(570, 60, 200, 200))
self.tbl_stcp_rf_cm.setObjectName(_fromUtf8("tbl_stcp_rf_cm"))
self.tbl_stcp_rf_cm.setColumnCount(0)
self.tbl_stcp_rf_cm.setRowCount(0)
self.label_24 = QtGui.QLabel(self.gb_stcp)
self.label_24.setGeometry(QtCore.QRect(570, 300, 191, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_24.setFont(font)
self.label_24.setObjectName(_fromUtf8("label_24"))
self.tbl_stcp_gv_cm = QtGui.QTableWidget(self.gb_stcp)
self.tbl_stcp_gv_cm.setGeometry(QtCore.QRect(570, 320, 200, 200))
self.tbl_stcp_gv_cm.setObjectName(_fromUtf8("tbl_stcp_gv_cm"))
self.tbl_stcp_gv_cm.setColumnCount(0)
self.tbl_stcp_gv_cm.setRowCount(0)
self.label_25 = QtGui.QLabel(self.gb_stcp)
self.label_25.setGeometry(QtCore.QRect(830, 40, 201, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_25.setFont(font)
self.label_25.setObjectName(_fromUtf8("label_25"))
self.label_26 = QtGui.QLabel(self.gb_stcp)
self.label_26.setGeometry(QtCore.QRect(570, 40, 221, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_26.setFont(font)
self.label_26.setObjectName(_fromUtf8("label_26"))
self.label_27 = QtGui.QLabel(self.gb_stcp)
self.label_27.setGeometry(QtCore.QRect(830, 300, 201, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_27.setFont(font)
self.label_27.setObjectName(_fromUtf8("label_27"))
self.lbl_stcp_rf_as = QtGui.QLabel(self.gb_stcp)
self.lbl_stcp_rf_as.setGeometry(QtCore.QRect(830, 60, 201, 41))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.lbl_stcp_rf_as.setFont(font)
self.lbl_stcp_rf_as.setText(_fromUtf8(""))
self.lbl_stcp_rf_as.setObjectName(_fromUtf8("lbl_stcp_rf_as"))
self.lbl_stcp_gv_as = QtGui.QLabel(self.gb_stcp)
self.lbl_stcp_gv_as.setGeometry(QtCore.QRect(830, 330, 201, 41))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.lbl_stcp_gv_as.setFont(font)
self.lbl_stcp_gv_as.setText(_fromUtf8(""))
self.lbl_stcp_gv_as.setObjectName(_fromUtf8("lbl_stcp_gv_as"))
self.gb_all_data = QtGui.QGroupBox(self.scrollAreaWidgetContents)
self.gb_all_data.setGeometry(QtCore.QRect(0, 1860, 1080, 575))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_all_data.setFont(font)
self.gb_all_data.setObjectName(_fromUtf8("gb_all_data"))
self.tbl_all_data = QtGui.QTableWidget(self.gb_all_data)
self.tbl_all_data.setGeometry(QtCore.QRect(10, 60, 200, 500))
self.tbl_all_data.setObjectName(_fromUtf8("tbl_all_data"))
self.tbl_all_data.setColumnCount(0)
self.tbl_all_data.setRowCount(0)
self.label_28 = QtGui.QLabel(self.gb_all_data)
self.label_28.setGeometry(QtCore.QRect(10, 40, 71, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_28.setFont(font)
self.label_28.setObjectName(_fromUtf8("label_28"))
self.tbl_x_train = QtGui.QTableWidget(self.gb_all_data)
self.tbl_x_train.setGeometry(QtCore.QRect(230, 60, 200, 500))
self.tbl_x_train.setObjectName(_fromUtf8("tbl_x_train"))
self.tbl_x_train.setColumnCount(0)
self.tbl_x_train.setRowCount(0)
self.label_29 = QtGui.QLabel(self.gb_all_data)
self.label_29.setGeometry(QtCore.QRect(230, 40, 71, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_29.setFont(font)
self.label_29.setObjectName(_fromUtf8("label_29"))
self.tbl_x_test = QtGui.QTableWidget(self.gb_all_data)
self.tbl_x_test.setGeometry(QtCore.QRect(440, 60, 200, 500))
self.tbl_x_test.setObjectName(_fromUtf8("tbl_x_test"))
self.tbl_x_test.setColumnCount(0)
self.tbl_x_test.setRowCount(0)
self.label_30 = QtGui.QLabel(self.gb_all_data)
self.label_30.setGeometry(QtCore.QRect(440, 40, 71, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_30.setFont(font)
self.label_30.setObjectName(_fromUtf8("label_30"))
self.tbl_all_data_rf_cm = QtGui.QTableWidget(self.gb_all_data)
self.tbl_all_data_rf_cm.setGeometry(QtCore.QRect(680, 60, 200, 200))
self.tbl_all_data_rf_cm.setObjectName(_fromUtf8("tbl_all_data_rf_cm"))
self.tbl_all_data_rf_cm.setColumnCount(0)
self.tbl_all_data_rf_cm.setRowCount(0)
self.label_31 = QtGui.QLabel(self.gb_all_data)
self.label_31.setGeometry(QtCore.QRect(680, 290, 111, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_31.setFont(font)
self.label_31.setObjectName(_fromUtf8("label_31"))
self.tbl_all_data_cd_cm = QtGui.QTableWidget(self.gb_all_data)
self.tbl_all_data_cd_cm.setGeometry(QtCore.QRect(680, 310, 200, 200))
self.tbl_all_data_cd_cm.setObjectName(_fromUtf8("tbl_all_data_cd_cm"))
self.tbl_all_data_cd_cm.setColumnCount(0)
self.tbl_all_data_cd_cm.setRowCount(0)
self.label_32 = QtGui.QLabel(self.gb_all_data)
self.label_32.setGeometry(QtCore.QRect(910, 40, 111, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_32.setFont(font)
self.label_32.setObjectName(_fromUtf8("label_32"))
self.label_33 = QtGui.QLabel(self.gb_all_data)
self.label_33.setGeometry(QtCore.QRect(680, 40, 181, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_33.setFont(font)
self.label_33.setObjectName(_fromUtf8("label_33"))
self.label_34 = QtGui.QLabel(self.gb_all_data)
self.label_34.setGeometry(QtCore.QRect(920, 300, 111, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_34.setFont(font)
self.label_34.setObjectName(_fromUtf8("label_34"))
self.lbl_all_data_rf_as = QtGui.QLabel(self.gb_all_data)
self.lbl_all_data_rf_as.setGeometry(QtCore.QRect(910, 60, 201, 41))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.lbl_all_data_rf_as.setFont(font)
self.lbl_all_data_rf_as.setText(_fromUtf8(""))
self.lbl_all_data_rf_as.setObjectName(_fromUtf8("lbl_all_data_rf_as"))
self.lbl_all_data_gv_as = QtGui.QLabel(self.gb_all_data)
self.lbl_all_data_gv_as.setGeometry(QtCore.QRect(920, 320, 131, 41))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.lbl_all_data_gv_as.setFont(font)
self.lbl_all_data_gv_as.setText(_fromUtf8(""))
self.lbl_all_data_gv_as.setObjectName(_fromUtf8("lbl_all_data_gv_as"))
self.label_35 = QtGui.QLabel(self.gb_all_data)
self.label_35.setGeometry(QtCore.QRect(820, 20, 221, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_35.setFont(font)
self.label_35.setObjectName(_fromUtf8("label_35"))
self.label_36 = QtGui.QLabel(self.gb_all_data)
self.label_36.setGeometry(QtCore.QRect(820, 270, 111, 16))
font = QtGui.QFont()
font.setPointSize(9)
font.setBold(True)
font.setWeight(75)
self.label_36.setFont(font)
self.label_36.setObjectName(_fromUtf8("label_36"))
self.gb_parkinson_bottom_menu = QtGui.QGroupBox(self.scrollAreaWidgetContents)
self.gb_parkinson_bottom_menu.setGeometry(QtCore.QRect(0, 1790, 1080, 70))
self.gb_parkinson_bottom_menu.setContextMenuPolicy(QtCore.Qt.DefaultContextMenu)
self.gb_parkinson_bottom_menu.setObjectName(_fromUtf8("gb_parkinson_bottom_menu"))
self.pb_parkinson_all_data = QtGui.QPushButton(self.gb_parkinson_bottom_menu)
self.pb_parkinson_all_data.setGeometry(QtCore.QRect(30, 10, 48, 48))
self.pb_parkinson_all_data.setCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.pb_parkinson_all_data.setStyleSheet(_fromUtf8("#pb_parkinson_all_data{\n"
"color: grey;\n"
" border-image: url(./icons/data_load.png) 3 10 3 10;\n"
" border-top: 3px transparent;\n"
" border-bottom: 3px transparent;\n"
" border-right: 10px transparent;\n"
" border-left: 10px transparent;}\n"
"\n"
"#pb_parkinson_all_data:hover{\n"
"color: grey;\n"
" border-image: url(./icons/data_load_hover.png) 5 12 5 12;\n"
" border-top: 5px transparent;\n"
" border-bottom: 5px transparent;\n"
" border-right: 12px transparent;\n"
" border-left: 12px transparent;}\n"
""))
self.pb_parkinson_all_data.setText(_fromUtf8(""))
self.pb_parkinson_all_data.setObjectName(_fromUtf8("pb_parkinson_all_data"))
self.pb_parkinson_class_2 = QtGui.QPushButton(self.gb_parkinson_bottom_menu)
self.pb_parkinson_class_2.setGeometry(QtCore.QRect(170, 10, 0, 0))
self.pb_parkinson_class_2.setCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.pb_parkinson_class_2.setStyleSheet(_fromUtf8("#pb_parkinson_class_2{\n"
"color: grey;\n"
" border-image: url(./icons/class.png) 3 10 3 10;\n"
" border-top: 3px transparent;\n"
" border-bottom: 3px transparent;\n"
" border-right: 10px transparent;\n"
" border-left: 10px transparent;}\n"
"\n"
"#pb_parkinson_class_2:hover{\n"
"color: grey;\n"
" border-image: url(./icons/class_hover.png) 5 12 5 12;\n"
" border-top: 5px transparent;\n"
" border-bottom: 5px transparent;\n"
" border-right: 12px transparent;\n"
" border-left: 12px transparent;}\n"
""))
self.pb_parkinson_class_2.setText(_fromUtf8(""))
self.pb_parkinson_class_2.setObjectName(_fromUtf8("pb_parkinson_class_2"))
self.pb_parkinson_split = QtGui.QPushButton(self.gb_parkinson_bottom_menu)
self.pb_parkinson_split.setGeometry(QtCore.QRect(100, 10, 0, 0))
self.pb_parkinson_split.setCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.pb_parkinson_split.setStyleSheet(_fromUtf8("#pb_parkinson_split{\n"
"color: grey;\n"
" border-image: url(./icons/split.png) 3 10 3 10;\n"
" border-top: 3px transparent;\n"
" border-bottom: 3px transparent;\n"
" border-right: 10px transparent;\n"
" border-left: 10px transparent;}\n"
"\n"
"#pb_parkinson_split:hover{\n"
"color: grey;\n"
" border-image: url(./icons/split_hover.png) 5 12 5 12;\n"
" border-top: 5px transparent;\n"
" border-bottom: 5px transparent;\n"
" border-right: 12px transparent;\n"
" border-left: 12px transparent;}\n"
""))
self.pb_parkinson_split.setText(_fromUtf8(""))
self.pb_parkinson_split.setObjectName(_fromUtf8("pb_parkinson_split"))
self.pb_parkinson_reload_split = QtGui.QPushButton(self.gb_parkinson_bottom_menu)
self.pb_parkinson_reload_split.setGeometry(QtCore.QRect(240, 10, 0, 0))
self.pb_parkinson_reload_split.setCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.pb_parkinson_reload_split.setStyleSheet(_fromUtf8("#pb_parkinson_reload_split{\n"
"color: grey;\n"
" border-image: url(./icons/reload.png) 3 10 3 10;\n"
" border-top: 3px transparent;\n"
" border-bottom: 3px transparent;\n"
" border-right: 10px transparent;\n"
" border-left: 10px transparent;}\n"
"\n"
"#pb_parkinson_reload_split:hover{\n"
"color: grey;\n"
" border-image: url(./icons/reload_hover.png) 5 12 5 12;\n"
" border-top: 5px transparent;\n"
" border-bottom: 5px transparent;\n"
" border-right: 12px transparent;\n"
" border-left: 12px transparent;}\n"
""))
self.pb_parkinson_reload_split.setText(_fromUtf8(""))
self.pb_parkinson_reload_split.setObjectName(_fromUtf8("pb_parkinson_reload_split"))
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.gb_normalizasyon = QtGui.QGroupBox(Dialog)
self.gb_normalizasyon.setGeometry(QtCore.QRect(200, 50, 0, 0))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_normalizasyon.setFont(font)
self.gb_normalizasyon.setObjectName(_fromUtf8("gb_normalizasyon"))
self.pb_normalizasyon_dataload = QtGui.QPushButton(self.gb_normalizasyon)
self.pb_normalizasyon_dataload.setGeometry(QtCore.QRect(330, 50, 48, 48))
self.pb_normalizasyon_dataload.setCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.pb_normalizasyon_dataload.setStyleSheet(_fromUtf8("#pb_normalizasyon_dataload{\n"
"color: grey;\n"
" border-image: url(./icons/data_load.png) 3 10 3 10;\n"
" border-top: 3px transparent;\n"
" border-bottom: 3px transparent;\n"
" border-right: 10px transparent;\n"
" border-left: 10px transparent;}\n"
"\n"
"#pb_normalizasyon_dataload:hover{\n"
"color: grey;\n"
" border-image: url(./icons/data_load_hover.png) 5 12 5 12;\n"
" border-top: 5px transparent;\n"
" border-bottom: 5px transparent;\n"
" border-right: 12px transparent;\n"
" border-left: 12px transparent;}\n"
""))
self.pb_normalizasyon_dataload.setText(_fromUtf8(""))
self.pb_normalizasyon_dataload.setObjectName(_fromUtf8("pb_normalizasyon_dataload"))
self.tbl_norm_data = QtGui.QTableWidget(self.gb_normalizasyon)
self.tbl_norm_data.setGeometry(QtCore.QRect(30, 100, 351, 481))
self.tbl_norm_data.setObjectName(_fromUtf8("tbl_norm_data"))
self.tbl_norm_data.setColumnCount(0)
self.tbl_norm_data.setRowCount(0)
self.tbl_norm_result = QtGui.QTableWidget(self.gb_normalizasyon)
self.tbl_norm_result.setGeometry(QtCore.QRect(650, 100, 351, 481))
self.tbl_norm_result.setObjectName(_fromUtf8("tbl_norm_result"))
self.tbl_norm_result.setColumnCount(0)
self.tbl_norm_result.setRowCount(0)
self.pb_normalize = QtGui.QPushButton(self.gb_normalizasyon)
self.pb_normalize.setGeometry(QtCore.QRect(450, 320, 121, 41))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.pb_normalize.setFont(font)
self.pb_normalize.setStyleSheet(_fromUtf8("QPushButton{\n"
"background-color: #FF895D;\n"
"color: #fff;\n"
"border-radius:1px;\n"
"}\n"
"QPushButton:hover{\n"
"background-color:#1B435D;\n"
"color: #D5EEFF;\n"
"}"))
self.pb_normalize.setObjectName(_fromUtf8("pb_normalize"))
self.gb_norm_options = QtGui.QGroupBox(self.gb_normalizasyon)
self.gb_norm_options.setGeometry(QtCore.QRect(420, 160, 181, 151))
self.gb_norm_options.setTitle(_fromUtf8(""))
self.gb_norm_options.setObjectName(_fromUtf8("gb_norm_options"))
self.rb_norm_minmax = QtGui.QRadioButton(self.gb_norm_options)
self.rb_norm_minmax.setGeometry(QtCore.QRect(40, 30, 82, 17))
self.rb_norm_minmax.setObjectName(_fromUtf8("rb_norm_minmax"))
self.rb_norm_zscore = QtGui.QRadioButton(self.gb_norm_options)
self.rb_norm_zscore.setGeometry(QtCore.QRect(40, 70, 82, 17))
self.rb_norm_zscore.setObjectName(_fromUtf8("rb_norm_zscore"))
self.rb_norm_median = QtGui.QRadioButton(self.gb_norm_options)
self.rb_norm_median.setGeometry(QtCore.QRect(40, 110, 82, 17))
self.rb_norm_median.setObjectName(_fromUtf8("rb_norm_median"))
self.pb_normalizasyon_datasave = QtGui.QPushButton(self.gb_normalizasyon)
self.pb_normalizasyon_datasave.setGeometry(QtCore.QRect(950, 50, 48, 48))
self.pb_normalizasyon_datasave.setCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.pb_normalizasyon_datasave.setStyleSheet(_fromUtf8("#pb_normalizasyon_datasave{\n"
"color: grey;\n"
" border-image: url(./icons/save.png) 3 10 3 10;\n"
" border-top: 3px transparent;\n"
" border-bottom: 3px transparent;\n"
" border-right: 10px transparent;\n"
" border-left: 10px transparent;}\n"
"\n"
"#pb_normalizasyon_datasave:hover{\n"
"color: grey;\n"
" border-image: url(./icons/save_hover.png) 5 12 5 12;\n"
" border-top: 5px transparent;\n"
" border-bottom: 5px transparent;\n"
" border-right: 12px transparent;\n"
" border-left: 12px transparent;}\n"
""))
self.pb_normalizasyon_datasave.setText(_fromUtf8(""))
self.pb_normalizasyon_datasave.setObjectName(_fromUtf8("pb_normalizasyon_datasave"))
self.gb_randomforest = QtGui.QGroupBox(Dialog)
self.gb_randomforest.setGeometry(QtCore.QRect(200, 50, 0, 0))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_randomforest.setFont(font)
self.gb_randomforest.setObjectName(_fromUtf8("gb_randomforest"))
self.pb_random_forest_data_load = QtGui.QPushButton(self.gb_randomforest)
self.pb_random_forest_data_load.setGeometry(QtCore.QRect(260, 40, 48, 48))
self.pb_random_forest_data_load.setCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.pb_random_forest_data_load.setStyleSheet(_fromUtf8("#pb_random_forest_data_load{\n"
"color: grey;\n"
" border-image: url(./icons/data_load.png) 3 10 3 10;\n"
" border-top: 3px transparent;\n"
" border-bottom: 3px transparent;\n"
" border-right: 10px transparent;\n"
" border-left: 10px transparent;}\n"
"\n"
"#pb_random_forest_data_load:hover{\n"
"color: grey;\n"
" border-image: url(./icons/data_load_hover.png) 5 12 5 12;\n"
" border-top: 5px transparent;\n"
" border-bottom: 5px transparent;\n"
" border-right: 12px transparent;\n"
" border-left: 12px transparent;}\n"
""))
self.pb_random_forest_data_load.setText(_fromUtf8(""))
self.pb_random_forest_data_load.setObjectName(_fromUtf8("pb_random_forest_data_load"))
self.tbl_random_forest_data = QtGui.QTableWidget(self.gb_randomforest)
self.tbl_random_forest_data.setGeometry(QtCore.QRect(30, 90, 281, 501))
self.tbl_random_forest_data.setObjectName(_fromUtf8("tbl_random_forest_data"))
self.tbl_random_forest_data.setColumnCount(0)
self.tbl_random_forest_data.setRowCount(0)
self.tbl_random_forest_x_train = QtGui.QTableWidget(self.gb_randomforest)
self.tbl_random_forest_x_train.setGeometry(QtCore.QRect(330, 90, 231, 501))
self.tbl_random_forest_x_train.setObjectName(_fromUtf8("tbl_random_forest_x_train"))
self.tbl_random_forest_x_train.setColumnCount(0)
self.tbl_random_forest_x_train.setRowCount(0)
self.tbl_random_forest_x_test = QtGui.QTableWidget(self.gb_randomforest)
self.tbl_random_forest_x_test.setGeometry(QtCore.QRect(580, 90, 231, 501))
self.tbl_random_forest_x_test.setObjectName(_fromUtf8("tbl_random_forest_x_test"))
self.tbl_random_forest_x_test.setColumnCount(0)
self.tbl_random_forest_x_test.setRowCount(0)
self.tbl_random_forest_confusionm = QtGui.QTableWidget(self.gb_randomforest)
self.tbl_random_forest_confusionm.setGeometry(QtCore.QRect(840, 390, 221, 191))
self.tbl_random_forest_confusionm.setObjectName(_fromUtf8("tbl_random_forest_confusionm"))
self.tbl_random_forest_confusionm.setColumnCount(0)
self.tbl_random_forest_confusionm.setRowCount(0)
self.label_47 = QtGui.QLabel(self.gb_randomforest)
self.label_47.setGeometry(QtCore.QRect(840, 370, 131, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_47.setFont(font)
self.label_47.setObjectName(_fromUtf8("label_47"))
self.label_48 = QtGui.QLabel(self.gb_randomforest)
self.label_48.setGeometry(QtCore.QRect(840, 320, 131, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_48.setFont(font)
self.label_48.setObjectName(_fromUtf8("label_48"))
self.lbl_random_forest_accuraryscore = QtGui.QLabel(self.gb_randomforest)
self.lbl_random_forest_accuraryscore.setGeometry(QtCore.QRect(970, 320, 81, 16))
font = QtGui.QFont()
font.setPointSize(15)
font.setBold(True)
font.setWeight(75)
self.lbl_random_forest_accuraryscore.setFont(font)
self.lbl_random_forest_accuraryscore.setText(_fromUtf8(""))
self.lbl_random_forest_accuraryscore.setObjectName(_fromUtf8("lbl_random_forest_accuraryscore"))
self.pb_random_forest = QtGui.QPushButton(self.gb_randomforest)
self.pb_random_forest.setGeometry(QtCore.QRect(840, 250, 201, 41))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.pb_random_forest.setFont(font)
self.pb_random_forest.setStyleSheet(_fromUtf8("QPushButton{\n"
"background-color: #FF895D;\n"
"color: #fff;\n"
"border-radius:1px;\n"
"}\n"
"QPushButton:hover{\n"
"background-color:#1B435D;\n"
"color: #D5EEFF;\n"
"}"))
self.pb_random_forest.setObjectName(_fromUtf8("pb_random_forest"))
self.pb_random_forest_modelsave = QtGui.QPushButton(self.gb_randomforest)
self.pb_random_forest_modelsave.setGeometry(QtCore.QRect(1010, 590, 48, 48))
self.pb_random_forest_modelsave.setCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.pb_random_forest_modelsave.setStyleSheet(_fromUtf8("#pb_random_forest_modelsave{\n"
"color: grey;\n"
" border-image: url(./icons/save.png) 3 10 3 10;\n"
" border-top: 3px transparent;\n"
" border-bottom: 3px transparent;\n"
" border-right: 10px transparent;\n"
" border-left: 10px transparent;}\n"
"\n"
"#pb_random_forest_modelsave:hover{\n"
"color: grey;\n"
" border-image: url(./icons/save_hover.png) 5 12 5 12;\n"
" border-top: 5px transparent;\n"
" border-bottom: 5px transparent;\n"
" border-right: 12px transparent;\n"
" border-left: 12px transparent;}\n"
""))
self.pb_random_forest_modelsave.setText(_fromUtf8(""))
self.pb_random_forest_modelsave.setObjectName(_fromUtf8("pb_random_forest_modelsave"))
self.label_49 = QtGui.QLabel(self.gb_randomforest)
self.label_49.setGeometry(QtCore.QRect(30, 70, 131, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_49.setFont(font)
self.label_49.setObjectName(_fromUtf8("label_49"))
self.label_50 = QtGui.QLabel(self.gb_randomforest)
self.label_50.setGeometry(QtCore.QRect(330, 70, 131, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_50.setFont(font)
self.label_50.setObjectName(_fromUtf8("label_50"))
self.label_51 = QtGui.QLabel(self.gb_randomforest)
self.label_51.setGeometry(QtCore.QRect(580, 70, 131, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_51.setFont(font)
self.label_51.setObjectName(_fromUtf8("label_51"))
self.pb_random_forest_tandt = QtGui.QPushButton(self.gb_randomforest)
self.pb_random_forest_tandt.setGeometry(QtCore.QRect(840, 192, 201, 41))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.pb_random_forest_tandt.setFont(font)
self.pb_random_forest_tandt.setStyleSheet(_fromUtf8("QPushButton{\n"
"background-color: #FF895D;\n"
"color: #fff;\n"
"border-radius:1px;\n"
"}\n"
"QPushButton:hover{\n"
"background-color:#1B435D;\n"
"color: #D5EEFF;\n"
"}"))
self.pb_random_forest_tandt.setObjectName(_fromUtf8("pb_random_forest_tandt"))
self.random_forest_slider = QtGui.QSlider(self.gb_randomforest)
self.random_forest_slider.setGeometry(QtCore.QRect(840, 150, 161, 22))
self.random_forest_slider.setStyleSheet(_fromUtf8("#random_forest_slider:groove:horizontall {\n"
" background: #FF895D;\n"
" position: absolute;\n"
" left: 1px; right: 1px;\n"
"}\n"
"#random_forest_slider:handle:horizontall {\n"
" height: 10px;\n"
" background: #1B435D ;\n"
" margin: 0 4px; /* expand outside the groove */\n"
"}"))
self.random_forest_slider.setOrientation(QtCore.Qt.Horizontal)
self.random_forest_slider.setObjectName(_fromUtf8("random_forest_slider"))
self.lbl_random_forest_slider = QtGui.QLabel(self.gb_randomforest)
self.lbl_random_forest_slider.setGeometry(QtCore.QRect(1010, 150, 46, 21))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.lbl_random_forest_slider.setFont(font)
self.lbl_random_forest_slider.setObjectName(_fromUtf8("lbl_random_forest_slider"))
self.label_52 = QtGui.QLabel(self.gb_randomforest)
self.label_52.setGeometry(QtCore.QRect(840, 120, 161, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_52.setFont(font)
self.label_52.setObjectName(_fromUtf8("label_52"))
self.gb_train_test = QtGui.QGroupBox(Dialog)
self.gb_train_test.setGeometry(QtCore.QRect(200, 50, 0, 0))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gb_train_test.setFont(font)
self.gb_train_test.setObjectName(_fromUtf8("gb_train_test"))
self.pb_train_and_test = QtGui.QPushButton(self.gb_train_test)
self.pb_train_and_test.setGeometry(QtCore.QRect(30, 572, 201, 41))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.pb_train_and_test.setFont(font)
self.pb_train_and_test.setStyleSheet(_fromUtf8("QPushButton{\n"
"background-color: #FF895D;\n"
"color: #fff;\n"
"border-radius:1px;\n"
"}\n"
"QPushButton:hover{\n"
"background-color:#1B435D;\n"
"color: #D5EEFF;\n"
"}"))
self.pb_train_and_test.setObjectName(_fromUtf8("pb_train_and_test"))
self.tbl_train_test_data = QtGui.QTableWidget(self.gb_train_test)
self.tbl_train_test_data.setGeometry(QtCore.QRect(20, 60, 281, 421))
self.tbl_train_test_data.setObjectName(_fromUtf8("tbl_train_test_data"))
self.tbl_train_test_data.setColumnCount(0)
self.tbl_train_test_data.setRowCount(0)
self.tbl_train_test_x_train = QtGui.QTableWidget(self.gb_train_test)
self.tbl_train_test_x_train.setGeometry(QtCore.QRect(320, 60, 250, 600))
self.tbl_train_test_x_train.setObjectName(_fromUtf8("tbl_train_test_x_train"))
self.tbl_train_test_x_train.setColumnCount(0)
self.tbl_train_test_x_train.setRowCount(0)
self.lbl_train_test = QtGui.QLabel(self.gb_train_test)
self.lbl_train_test.setGeometry(QtCore.QRect(200, 530, 46, 21))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.lbl_train_test.setFont(font)
self.lbl_train_test.setObjectName(_fromUtf8("lbl_train_test"))
self.label_53 = QtGui.QLabel(self.gb_train_test)
self.label_53.setGeometry(QtCore.QRect(20, 40, 131, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_53.setFont(font)
self.label_53.setObjectName(_fromUtf8("label_53"))
self.test_and_train_slider = QtGui.QSlider(self.gb_train_test)
self.test_and_train_slider.setGeometry(QtCore.QRect(30, 530, 161, 22))
self.test_and_train_slider.setStyleSheet(_fromUtf8("#test_and_train_slider:groove:horizontall {\n"
" background: #FF895D;\n"
" position: absolute;\n"
" left: 1px; right: 1px;\n"
"}\n"
"#test_and_train_slider:handle:horizontall {\n"
" height: 10px;\n"
" background: #1B435D ;\n"
" margin: 0 4px; /* expand outside the groove */\n"
"}"))
self.test_and_train_slider.setPageStep(5)
self.test_and_train_slider.setOrientation(QtCore.Qt.Horizontal)
self.test_and_train_slider.setObjectName(_fromUtf8("test_and_train_slider"))
self.label_54 = QtGui.QLabel(self.gb_train_test)
self.label_54.setGeometry(QtCore.QRect(30, 500, 161, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_54.setFont(font)
self.label_54.setObjectName(_fromUtf8("label_54"))
self.pb_train_test_data_load = QtGui.QPushButton(self.gb_train_test)
self.pb_train_test_data_load.setGeometry(QtCore.QRect(250, 10, 48, 48))
self.pb_train_test_data_load.setCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
self.pb_train_test_data_load.setStyleSheet(_fromUtf8("#pb_train_test_data_load{\n"
"color: grey;\n"
" border-image: url(./icons/data_load.png) 3 10 3 10;\n"
" border-top: 3px transparent;\n"
" border-bottom: 3px transparent;\n"
" border-right: 10px transparent;\n"
" border-left: 10px transparent;}\n"
"\n"
"#pb_train_test_data_load:hover{\n"
"color: grey;\n"
" border-image: url(./icons/data_load_hover.png) 5 12 5 12;\n"
" border-top: 5px transparent;\n"
" border-bottom: 5px transparent;\n"
" border-right: 12px transparent;\n"
" border-left: 12px transparent;}\n"
""))
self.pb_train_test_data_load.setText(_fromUtf8(""))
self.pb_train_test_data_load.setObjectName(_fromUtf8("pb_train_test_data_load"))
self.label_55 = QtGui.QLabel(self.gb_train_test)
self.label_55.setGeometry(QtCore.QRect(320, 40, 131, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_55.setFont(font)
self.label_55.setObjectName(_fromUtf8("label_55"))
self.tbl_train_test_y_train = QtGui.QTableWidget(self.gb_train_test)
self.tbl_train_test_y_train.setGeometry(QtCore.QRect(580, 60, 100, 600))
self.tbl_train_test_y_train.setObjectName(_fromUtf8("tbl_train_test_y_train"))
self.tbl_train_test_y_train.setColumnCount(0)
self.tbl_train_test_y_train.setRowCount(0)
self.label_56 = QtGui.QLabel(self.gb_train_test)
self.label_56.setGeometry(QtCore.QRect(580, 40, 91, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_56.setFont(font)
self.label_56.setObjectName(_fromUtf8("label_56"))
self.label_57 = QtGui.QLabel(self.gb_train_test)
self.label_57.setGeometry(QtCore.QRect(690, 40, 131, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_57.setFont(font)
self.label_57.setObjectName(_fromUtf8("label_57"))
self.label_58 = QtGui.QLabel(self.gb_train_test)
self.label_58.setGeometry(QtCore.QRect(950, 40, 91, 16))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_58.setFont(font)
self.label_58.setObjectName(_fromUtf8("label_58"))
self.tbl_train_test_x_test = QtGui.QTableWidget(self.gb_train_test)
self.tbl_train_test_x_test.setGeometry(QtCore.QRect(690, 60, 250, 600))
self.tbl_train_test_x_test.setObjectName(_fromUtf8("tbl_train_test_x_test"))
self.tbl_train_test_x_test.setColumnCount(0)
self.tbl_train_test_x_test.setRowCount(0)
self.tbl_train_test_y_test = QtGui.QTableWidget(self.gb_train_test)
self.tbl_train_test_y_test.setGeometry(QtCore.QRect(950, 60, 100, 600))
self.tbl_train_test_y_test.setObjectName(_fromUtf8("tbl_train_test_y_test"))
self.tbl_train_test_y_test.setColumnCount(0)
self.tbl_train_test_y_test.setRowCount(0)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "134410010 Bedirhan Sağlam Makine Öğrenmesi Ödev", None))
self.pb_main_menu_1.setText(_translate("Dialog", "KNN ile Sınıflandırma", None))
self.pb_main_menu_2.setText(_translate("Dialog", "RUS Ve ROS Method", None))
self.pb_main_menu_3.setText(_translate("Dialog", "K-Means ile Kümeleme", None))
self.pb_main_menu_4.setText(_translate("Dialog", "KNN ile Kümeleme", None))
self.pb_main_menu_5.setText(_translate("Dialog", "Navie Bayes", None))
self.pb_main_menu_6.setText(_translate("Dialog", "Normalizasyon", None))
self.pb_main_menu_7.setText(_translate("Dialog", "Random Forest", None))
self.pb_main_menu_8.setText(_translate("Dialog", "Train and Test Split", None))
self.pb_main_menu_9.setText(_translate("Dialog", "Parkinson", None))
self.gb_knn_sinif.setTitle(_translate("Dialog", "KNN İLE SINIFLANDIRMA", None))
self.label.setText(_translate("Dialog", "Veri Seti", None))
self.label_2.setText(_translate("Dialog", "Sonuç :", None))
self.label_3.setText(_translate("Dialog", "K değerini giriniz :", None))
self.label_5.setText(_translate("Dialog", "y değerini giriniz :", None))
self.label_4.setText(_translate("Dialog", "x değerini giriniz :", None))
self.t1_pb_kumele.setText(_translate("Dialog", "SINIFLANDIR", None))
self.label_6.setText(_translate("Dialog", "Girilen Yeni Nokta", None))
self.gb_k_means.setTitle(_translate("Dialog", "K-Means ile Kümeleme", None))
self.t2_pb_kmeans.setText(_translate("Dialog", "KÜMELE", None))
self.t2_pb_dataload.setText(_translate("Dialog", "Verileri Yükle", None))
self.label_37.setText(_translate("Dialog", "Veri Seti :", None))
self.groupBox.setTitle(_translate("Dialog", "Verisetine Yeni Eleman Ekle", None))
self.label_40.setText(_translate("Dialog", "X :", None))
self.label_41.setText(_translate("Dialog", "Y :", None))
self.kmeans_etiket.setItemText(0, _translate("Dialog", "c1", None))
self.kmeans_etiket.setItemText(1, _translate("Dialog", "c2", None))
self.kmeans_pb_ekle.setText(_translate("Dialog", "Ekle", None))
self.gb_rus_ros.setTitle(_translate("Dialog", "RUS ve ROS Method", None))
self.gb_rus_data_set.setTitle(_translate("Dialog", "Dataset Oluştur", None))
self.lbl_rus_ros_slider.setText(_translate("Dialog", "0", None))
self.label_38.setText(_translate("Dialog", "Örnek Sayısı :", None))
self.label_39.setText(_translate("Dialog", "Yüzde Dağılımı :", None))
self.rus_ros_pb_create_dataset.setText(_translate("Dialog", "Veri Seti Oluştur", None))
self.radiobuton_rus.setText(_translate("Dialog", "RUS", None))
self.rus_ros_pb.setText(_translate("Dialog", "Uygula", None))
self.radiobuton_ros.setText(_translate("Dialog", "ROS", None))
self.gb_knn_kume.setTitle(_translate("Dialog", "KNN ile Kümeleme", None))
self.groupBox_2.setTitle(_translate("Dialog", "Veri Seti Oluştur", None))
self.knn_cluster_pb_create_dataset.setText(_translate("Dialog", "Tamam", None))
self.label_42.setText(_translate("Dialog", "Üst Sınır :", None))
self.label_43.setText(_translate("Dialog", "Eleman sayısı :", None))
self.knn_pb_cluster.setText(_translate("Dialog", "KÜMELE", None))
self.knn_cluster_cb.setItemText(0, _translate("Dialog", "Öklid", None))
self.knn_cluster_cb.setItemText(1, _translate("Dialog", "Manhattan", None))
self.knn_cluster_cb.setItemText(2, _translate("Dialog", "Minkowski", None))
self.label_44.setText(_translate("Dialog", "VERİ SETİ GRAFİĞİ :", None))
self.label_45.setText(_translate("Dialog", "SONUÇ GRAFİĞİ :", None))
self.label_46.setText(_translate("Dialog", "VERİ SETİ :", None))
self.gb_navie.setTitle(_translate("Dialog", "NAVİE BAYES", None))
self.groupBox_3.setTitle(_translate("Dialog", "Yeni Kelime veya Kategori Ekle", None))
self.label_59.setText(_translate("Dialog", "Kelime :", None))
self.label_60.setText(_translate("Dialog", "Kategori :", None))
self.pb_navie_veriekle.setText(_translate("Dialog", "Ekle", None))
self.groupBox_4.setTitle(_translate("Dialog", "Metin Sınıflandırma", None))
self.label_61.setText(_translate("Dialog", "Metin Giriniz :", None))
self.pb_navie_siniflandir.setText(_translate("Dialog", "Sınıflandır", None))
self.lbl_navie_sonuc_2.setText(_translate("Dialog", "Kategori :", None))
self.gb_parkinson.setTitle(_translate("Dialog", "Parkinson", None))
self.gb_sst.setTitle(_translate("Dialog", "SST", None))
self.label_7.setText(_translate("Dialog", "X_Train :", None))
self.label_8.setText(_translate("Dialog", "y_Train :", None))
self.label_9.setText(_translate("Dialog", "X_Test :", None))
self.label_10.setText(_translate("Dialog", "Decision Tree Confusion Matrix", None))
self.label_11.setText(_translate("Dialog", "Random Forest Accuracy Score :", None))
self.label_12.setText(_translate("Dialog", "Random Forest Confusion Matrix :", None))
self.label_13.setText(_translate("Dialog", "Decision Tree Accuracy Score :", None))
self.gb_buttons.setTitle(_translate("Dialog", "Top Menu", None))
self.pb_parkinson_veri_yukle.setToolTip(_translate("Dialog", "Verileri Yükle", None))
self.pb_parkinson_class.setToolTip(_translate("Dialog", "Verileri Sınıflandır", None))
self.gb_dst.setTitle(_translate("Dialog", "DST", None))
self.label_14.setText(_translate("Dialog", "Random Forest Accuracy Score :", None))
self.label_15.setText(_translate("Dialog", "y_Train :", None))
self.label_16.setText(_translate("Dialog", "Decision Tree Accuracy Score :", None))
self.label_17.setText(_translate("Dialog", "X_Test :", None))
self.label_18.setText(_translate("Dialog", "X_Train :", None))
self.label_19.setText(_translate("Dialog", "Random Forest Confusion Matrix :", None))
self.label_20.setText(_translate("Dialog", "Decision Tree Confusion Matrix", None))
self.gb_stcp.setTitle(_translate("Dialog", "STCP", None))
self.label_21.setText(_translate("Dialog", "X_Train :", None))
self.label_22.setText(_translate("Dialog", "y_Train :", None))
self.label_23.setText(_translate("Dialog", "X_Test :", None))
self.label_24.setText(_translate("Dialog", "Decision Tree Confusion Matrix", None))
self.label_25.setText(_translate("Dialog", "Random Forest Accuracy Score :", None))
self.label_26.setText(_translate("Dialog", "Random Forest Confusion Matrix :", None))
self.label_27.setText(_translate("Dialog", "Decision Tree Accuracy Score :", None))
self.gb_all_data.setTitle(_translate("Dialog", "ALL DATA", None))
self.label_28.setText(_translate("Dialog", "All Data", None))
self.label_29.setText(_translate("Dialog", "X_Train :", None))
self.label_30.setText(_translate("Dialog", "X_Test :", None))
self.label_31.setText(_translate("Dialog", "Confusion Matrix :", None))
self.label_32.setText(_translate("Dialog", "Accuracy Score :", None))
self.label_33.setText(_translate("Dialog", "Confusion Matrix :", None))
self.label_34.setText(_translate("Dialog", "Accuracy Score :", None))
self.label_35.setText(_translate("Dialog", "Random Forest", None))
self.label_36.setText(_translate("Dialog", "Decision Tree", None))
self.gb_parkinson_bottom_menu.setTitle(_translate("Dialog", "Bottom Menu", None))
self.pb_parkinson_all_data.setToolTip(_translate("Dialog", "Verileri Yükle", None))
self.pb_parkinson_class_2.setToolTip(_translate("Dialog", "Verileri Sınıflandır", None))
self.pb_parkinson_split.setToolTip(_translate("Dialog", "Verileri Ayır", None))
self.pb_parkinson_reload_split.setToolTip(_translate("Dialog", "Yeniden Sınıflandır", None))
self.gb_normalizasyon.setTitle(_translate("Dialog", "Normalizasyon", None))
self.pb_normalizasyon_dataload.setToolTip(_translate("Dialog", "Verileri Dosyadan Yükle", None))
self.pb_normalize.setText(_translate("Dialog", "Uygula", None))
self.rb_norm_minmax.setText(_translate("Dialog", "Min Max", None))
self.rb_norm_zscore.setText(_translate("Dialog", "Z Score", None))
self.rb_norm_median.setText(_translate("Dialog", "Medyan", None))
self.pb_normalizasyon_datasave.setToolTip(_translate("Dialog", "Kaydet", None))
self.gb_randomforest.setTitle(_translate("Dialog", "Random Forest", None))
self.pb_random_forest_data_load.setToolTip(_translate("Dialog", "Verileri Dosyadan Yükle", None))
self.label_47.setText(_translate("Dialog", "CONFUSİON MATRİX :", None))
self.label_48.setText(_translate("Dialog", "ACCURARY SCORE :", None))
self.pb_random_forest.setText(_translate("Dialog", "RANDOM FOREST", None))
self.pb_random_forest_modelsave.setToolTip(_translate("Dialog", "Kaydet", None))
self.label_49.setText(_translate("Dialog", "DATA:", None))
self.label_50.setText(_translate("Dialog", "X Train:", None))
self.label_51.setText(_translate("Dialog", "X Test:", None))
self.pb_random_forest_tandt.setText(_translate("Dialog", "TRAİN AND TEST", None))
self.lbl_random_forest_slider.setText(_translate("Dialog", "0", None))
self.label_52.setText(_translate("Dialog", "TEST YÜZDESİ :", None))
self.gb_train_test.setTitle(_translate("Dialog", "Train and Test Split", None))
self.pb_train_and_test.setText(_translate("Dialog", "TRAİN AND TEST", None))
self.lbl_train_test.setText(_translate("Dialog", "0", None))
self.label_53.setText(_translate("Dialog", "DATA:", None))
self.label_54.setText(_translate("Dialog", "TEST YÜZDESİ :", None))
self.pb_train_test_data_load.setToolTip(_translate("Dialog", "Verileri Dosyadan Yükle", None))
self.label_55.setText(_translate("Dialog", "X TRAİN :", None))
self.label_56.setText(_translate("Dialog", "Y TRAİN :", None))
self.label_57.setText(_translate("Dialog", "X TEST :", None))
self.label_58.setText(_translate("Dialog", "Y TEST :", None))
|
bedirhansaglam/PythonMachineLearning | PythonMachineLearning/knn.py | # -*- coding: utf-8 -*-
"""
Created on Tue Dec 19 19:50:49 2017
@author: Bedirhan
"""
import matplotlib.pyplot as plt
def dataload_knn():
veriListesi=[]
veriListesi.append((2,4,"kotu"))
veriListesi.append((3,6,"iyi"))
veriListesi.append((4,10,"kotu"))
veriListesi.append((3,4,"iyi"))
veriListesi.append((5,8,"kotu"))
veriListesi.append((6,3,"iyi"))
veriListesi.append((7,9,"iyi"))
veriListesi.append((9,7,"kotu"))
veriListesi.append((11,7,"kotu"))
veriListesi.append((10,2,"kotu"))
for m in veriListesi:
renk=''
if m[2]=="kotu":
renk='b^'
else:
renk='ro'
plt.plot(m[0],m[1],renk,markersize=10)
plt.axis([0,30,0,30])
plt.title("Veri Seti")
plt.savefig("./resource/knn_data_set.png")
return veriListesi |
bedirhansaglam/PythonMachineLearning | PythonMachineLearning/dialog_form.py | # -*- coding: utf-8 -*-
"""
Created on Wed Nov 29 18:12:45 2017
@author: Bedirhan
"""
import sys
from PyQt4 import QtGui
from Main import MainWindow
def main():
app=QtGui.QApplication(sys.argv)
mainWindow=MainWindow()
mainWindow.show()
return app.exec_()
if __name__=="__main__":
main() |
bedirhansaglam/PythonMachineLearning | PythonMachineLearning/ParkinsonDataSet.py | # -*- coding: utf-8 -*-
"""
Created on Tue Dec 26 21:29:46 2017
@author: Bedirhan
"""
import os
import numpy as np
def saveData(filename,data):
db=np.array(data)
f=open(filename,'w')
for i,a in enumerate(db):
for p,j in enumerate(db[i]):
if p!=(len(db[i])-1):
f.write(j+",")
else:
f.write(j+"\n")
def readDataFile(filename):
f = open(filename)
data=[]
for i,row in enumerate(f.readlines()):
currentline = row.split(",")
temp=[]
for column_value in currentline:
temp.append(column_value)
data.append(temp)
data=np.array(data)
return data
def allData():
Data=[]
#X_train datalari aktariliyor
work_dir="./data/parkinson/hw_dataset"
folderList=os.listdir(work_dir)
for i,folder_name in enumerate(folderList):
folder=os.listdir(work_dir+"/"+folder_name)
if folder_name=="control":
deger=0
elif folder_name=="parkinson":
deger=1
for file_name in folder:
f = open(work_dir+"/"+folder_name+"/"+file_name)
for i,row in enumerate(f.readlines()):
currentline = row.split(";")
temp=[]
for i,column_value in enumerate(currentline):
if i!=5 and i!=6:
temp.append(column_value)
if i==6:
temp.append(deger)
Data.append(temp)
work_dir="./data/parkinson/new_dataset"
folderList=os.listdir(work_dir)
for i,folder_name in enumerate(folderList):
folder=os.listdir(work_dir+"/"+folder_name)
for file_name in folder:
f = open(work_dir+"/"+folder_name+"/"+file_name)
for i,row in enumerate(f.readlines()):
currentline = row.split(";")
temp=[]
for i,column_value in enumerate(currentline):
if i!=5 and i!=6:
temp.append(column_value)
if i==6:
temp.append(1)
Data.append(temp)
return Data
def SST_Data():
Data=[]
#X_train datalari aktariliyor
work_dir="./data/parkinson/hw_dataset"
folderList=os.listdir(work_dir)
for i,folder_name in enumerate(folderList):
folder=os.listdir(work_dir+"/"+folder_name)
if folder_name=="control":
deger=0
elif folder_name=="parkinson":
deger=1
for file_name in folder:
f = open(work_dir+"/"+folder_name+"/"+file_name)
for i,row in enumerate(f.readlines()):
currentline = row.split(";")
temp=[]
if currentline[6]=="0\n":
for i,column_value in enumerate(currentline):
if i!=5 and i!=6:
temp.append(column_value)
if i==6:
temp.append(deger)
Data.append(temp)
return Data
def DST_Data():
Data=[]
#X_train datalari aktariliyor
work_dir="./data/parkinson/hw_dataset"
folderList=os.listdir(work_dir)
for i,folder_name in enumerate(folderList):
folder=os.listdir(work_dir+"/"+folder_name)
if folder_name=="control":
deger=0
elif folder_name=="parkinson":
deger=1
for file_name in folder:
f = open(work_dir+"/"+folder_name+"/"+file_name)
for i,row in enumerate(f.readlines()):
currentline = row.split(";")
temp=[]
if currentline[6]=="1\n":
for i,column_value in enumerate(currentline):
if i!=5 and i!=6:
temp.append(column_value)
if i==6:
temp.append(deger)
Data.append(temp)
return Data
def STCP_Data():
Data=[]
#X_train datalari aktariliyor
work_dir="./data/parkinson/hw_dataset"
folderList=os.listdir(work_dir)
for i,folder_name in enumerate(folderList):
folder=os.listdir(work_dir+"/"+folder_name)
if folder_name=="control":
deger=0
elif folder_name=="parkinson":
deger=1
for file_name in folder:
f = open(work_dir+"/"+folder_name+"/"+file_name)
for i,row in enumerate(f.readlines()):
currentline = row.split(";")
temp=[]
if currentline[6]=="2\n":
for i,column_value in enumerate(currentline):
if i!=5 and i!=6:
temp.append(column_value)
if i==6:
temp.append(deger)
Data.append(temp)
return Data
def test_sst():
Data=[]
#X_train datalari aktariliyor
work_dir="./data/parkinson/new_dataset"
folderList=os.listdir(work_dir)
for i,folder_name in enumerate(folderList):
folder=os.listdir(work_dir+"/"+folder_name)
for file_name in folder:
f = open(work_dir+"/"+folder_name+"/"+file_name)
for i,row in enumerate(f.readlines()):
currentline = row.split(";")
temp=[]
if currentline[6]=="0\n":
for i,column_value in enumerate(currentline):
if i!=5 and i!=6:
temp.append(column_value)
if i==6:
temp.append(1)
Data.append(temp)
return Data
def test_dst():
Data=[]
#X_train datalari aktariliyor
work_dir="./data/parkinson/new_dataset"
folderList=os.listdir(work_dir)
for i,folder_name in enumerate(folderList):
folder=os.listdir(work_dir+"/"+folder_name)
for file_name in folder:
f = open(work_dir+"/"+folder_name+"/"+file_name)
for i,row in enumerate(f.readlines()):
currentline = row.split(";")
temp=[]
if currentline[6]=="1\n":
for i,column_value in enumerate(currentline):
if i!=5 and i!=6:
temp.append(column_value)
if i==6:
temp.append(1)
Data.append(temp)
return Data
def test_stcp():
Data=[]
#X_train datalari aktariliyor
work_dir="./data/parkinson/new_dataset"
folderList=os.listdir(work_dir)
for i,folder_name in enumerate(folderList):
folder=os.listdir(work_dir+"/"+folder_name)
for file_name in folder:
f = open(work_dir+"/"+folder_name+"/"+file_name)
for i,row in enumerate(f.readlines()):
currentline = row.split(";")
temp=[]
if currentline[6]=="2\n":
for i,column_value in enumerate(currentline):
if i!=5 and i!=6:
temp.append(column_value)
if i==6:
temp.append(1)
Data.append(temp)
return Data |
bedirhansaglam/PythonMachineLearning | PythonMachineLearning/kmeans.py | # -*- coding: utf-8 -*-
from math import sqrt
from math import pow
from matplotlib import pyplot as plt
#rastgele etiketlenmiş verisetimiz mevcut
def data():
veriListesi1=[]
veriListesi1.append((4,2,"c1"))
veriListesi1.append((6,4,"c2"))
veriListesi1.append((5,1,"c2"))
veriListesi1.append((10,6,"c1"))
veriListesi1.append((11,8,"c2"))
veriListesi1.append((12,10,"c2"))
veriListesi1.append((9,6,"c2"))
veriListesi1.append((12,7,"c1"))
veriListesi1.append((15,12,"c1"))
veriListesi1.append((26,7,"c2"))
veriListesi1.append((3,5,"c2"))
veriListesi1.append((8,9,"c2"))
veriListesi1.append((1,1,"c2"))
veriListesi1.append((7,8,"c1"))
veriListesi1.append((1,25,"c2"))
veriListesi1.append((4,22,"c2"))
return veriListesi1
def plot(liste):
plt.cla()
for m in liste:
renk=''
if m[2]=="c1":
renk='bo'
else:
renk='ro'
plt.plot(m[0],m[1],renk)
plt.title("Veri Seti Ilk hali")
plt.savefig("./resource/kmeans_ilk.png")
def hesapla(liste):
# print("iterasyon")
veriListesi=liste
c1x=0
c2x=0
c1s=0
c2s=0
c1y=0
c2y=0
for m in veriListesi:
if m[2]=="c1":
c1s=c1s+1
c1x=c1x+m[0]
c1y=c1y+m[1]
else :
c2s=c2s+1
c2x=c2x+m[0]
c2y=c2y+m[1]
c1merkez=[]
c2merkez=[]
c1merkez.append((c1x/c1s, c1y/c1s))
c2merkez.append((c2x/c2s,c2y/c2s))
#print ("c1 merkez : ", c1merkez[0], "c2 merkez",c2merkez[0])
yeniliste=[]
c1uzaklik=0
c2uzaklik=0
#hataorani=0
e1=0;
e2=0;
for m in veriListesi:
c1uzaklik=sqrt(pow((int(m[0])-c1merkez[0][0]),2)+pow((int(m[1])-c1merkez[0][1]),2))
c2uzaklik=sqrt(pow((int(m[0])-c2merkez[0][0]),2)+pow((int(m[1])-c2merkez[0][1]),2))
e1=e1+pow((int(m[0])-c1merkez[0][0]),2)+pow((int(m[1])-c1merkez[0][1]),2)
e2=e2+pow((int(m[0])-c2merkez[0][0]),2)+pow((int(m[1])-c2merkez[0][1]),2)
if c1uzaklik>c2uzaklik:
yeniliste.append("c2")
else :
yeniliste.append("c1")
#print ("e1", e1,"e2",e2)
#hataorani=e1+e2
#print("Hata Oranı:",hataorani)
degisim=0
sonListe=[]
for p in range(0,len(yeniliste)):
if yeniliste[p]==veriListesi[p][2]:
sonListe.append(veriListesi[p])
else:
sonListe.append((veriListesi[p][0],veriListesi[p][1],yeniliste[p]))
degisim=degisim+1
#for d in sonListe:
#print (d[2])
if degisim!=0:
hesapla(sonListe)
else:
plt.cla()
for d in sonListe:
renk=''
if d[2]=="c1":
renk='bo'
else:
renk='ro'
plt.plot(d[0],d[1],renk)
plt.title("<NAME>")
plt.savefig("./resource/kmeans_son.png")
|
bedirhansaglam/PythonMachineLearning | PythonMachineLearning/Normalizasyon.py | <reponame>bedirhansaglam/PythonMachineLearning
# -*- coding: utf-8 -*-
"""
Created on Sat Dec 30 21:59:11 2017
@author: Bedirhan
"""
import statistics
import numpy as np
def zscore(data):
zscoredata=[]
standart_sapma=statistics.stdev(data)
ortalama=statistics.mean(data)
for d in data:
z=(d-ortalama)/standart_sapma
zscoredata.append(z)
return zscoredata
def minmax(data):
minmaxscoredata=[]
maxData=max(data)
minData=min(data)
for d in data:
m=(d-minData)/(maxData-minData)
minmaxscoredata.append(m)
return minmaxscoredata
def median(data):
medianscore=[]
med=statistics.median(data)
for d in data:
m=d/med
medianscore.append(m)
return medianscore
def normalizasyon(filename,currentIndex):
f = open(filename)
X=[]
for i,row in enumerate(f.readlines()):
currentline = row.split(",")
temp=[]
for column_value in currentline:
temp.append(column_value)
X.append(temp)
X=np.array(X)
norm_veri=[]
if len(X[0])!=len(X[len(X)-1]): #eger son satir ile ilk satirin degeri esit degilse son satiri siliyoruz
X=np.delete(X,[len(X)-1])
for a in range(0,len(X[0])-1):
datalist=[]
for b in range(0,len(X)):
p=float(X[b][a])
datalist.append(p)
if currentIndex==0:
norm_veri.append(minmax(datalist))
elif currentIndex==1:
norm_veri.append(zscore(datalist))
elif currentIndex==2:
norm_veri.append(median(datalist))
y_list=[]
for y in X:
y_list.append(y[len(y)-1])
norm_veri.append(y_list)
norm_veri=np.array(norm_veri)
norm_veri=np.transpose(norm_veri)
return norm_veri
|
bedirhansaglam/PythonMachineLearning | PythonMachineLearning/KNNCluster.py | <filename>PythonMachineLearning/KNNCluster.py<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
---- KNN Kümeleme -----
Rastgele dataset oluşturuyoruz , oluşturduğumuz noktaların (0,0) noktasına olan uzaklığını buluyoruz (Minkowski,<NAME>).
Daha sonra orijine en yakın noktayı ve en uzak noktayı belirliyoruz bu 2 nokta 2 grubun referans noktası oluyor.
Datasetteki bütün noktaların uzaklıkları referans noktalara göre hesaplanıyor ve 2 listede bu değerler tutuluyor.
Örn groupA=(dataset[1],uzaklık),(dataset[2],uzaklık)... groupB=(dataset[1],uzaklık),(dataset[2],uzaklık)...
Bu 2 listedeki uzaklık değerleri karşılaştırılıyor ve datasetteki nokta hangi referans noktasına daha yakınsa o gruba dahil ediliyor.
-----------------------
"""
from math import sqrt
from math import pow
import numpy as np
import matplotlib.pyplot as plt
def datasetOlustur(maxRange,count):
data = np.random.random_integers(0, maxRange, count*2).reshape((count,2))
plt.cla()
for m in data:
renk='go'
plt.plot(int(m[0]),int(m[1]),renk)
plt.show
plt.savefig("./resource/knn_kume_data.png")
return data
def minkowski_liste(ref,verilistesi):
dataset=verilistesi
veri=[]
for j in dataset:
bisey=(pow(abs(ref[0]-j[0]),3)+pow(abs(ref[1]-j[1]),3))
mnk=pow(bisey,0.33)
veri.append(mnk)
return veri
def minkowski_sifir(verilistesi):
dataset=verilistesi
veri=[]
for j in dataset:
bisey=(pow(abs(j[0]-0),3)+pow(abs(j[1]-0),3))
mnk=pow(bisey,0.33)
veri.append((mnk,j))
veri=sorted(veri, key=lambda veri: veri[0])
return veri
def manhattan_liste(ref,verilistesi):
dataset=verilistesi
veri=[]
for j in dataset:
veri.append((abs(ref[0]-j[0])+abs(ref[1]-j[1])))
return veri
def manhattan_sifir(verilistesi):
dataset=verilistesi
veri=[]
for j in dataset:
veri.append(((abs(j[0]-0)+abs(j[1]-0)),j))
veri=sorted(veri, key=lambda veri: veri[0])
return veri
def oklid_liste(ref,verilistesi):
dataset=verilistesi
veri=[]
for j in dataset:
veri.append((sqrt(pow((ref[0]-j[0]),2)+pow(ref[1]-j[1],2))))
return veri
def oklid_sifir(verilistesi):
dataset=verilistesi
veri=[]
for i in dataset:
veri.append(((sqrt(pow((i[0]-0),2)+pow(i[1]-0,2))),i))
veri=sorted(veri, key=lambda veri: veri[0])
return veri
def kumele(dataset,currentIndex):
if currentIndex==0:
uzakliksifir=oklid_sifir(dataset)
elif currentIndex==1:
uzakliksifir=manhattan_sifir(dataset)
elif currentIndex==2:
uzakliksifir=minkowski_sifir(dataset)
son=len(uzakliksifir)-1
a1=[]
b1=[]
a_ref=uzakliksifir[0][1]
b_ref=uzakliksifir[son][1]
if currentIndex==0: # oklid
for a in dataset:
a_list=oklid_liste(a_ref,dataset)
b_list=oklid_liste(b_ref,dataset)
for i in range (0,len(a_list)):
if a_list[i] >b_list[i]:
b1.append((dataset[i]))
else:
a1.append((dataset[i]))
elif currentIndex==1: #manhattan
for a in dataset:
a_list=manhattan_liste(a_ref,dataset)
b_list=manhattan_liste(b_ref,dataset)
for i in range (0,len(a_list)):
if a_list[i] >b_list[i]:
b1.append((dataset[i]))
else:
a1.append((dataset[i]))
elif currentIndex==2:#minkowski
for a in dataset:
a_list=minkowski_liste(a_ref,dataset)
b_list=minkowski_liste(b_ref,dataset)
for i in range (0,len(a_list)):
if a_list[i] >b_list[i]:
b1.append((dataset[i]))
else:
a1.append((dataset[i]))
plt.cla()
for m in a1:
renk='ro'
plt.plot(int(m[0]),int(m[1]),renk)
for m in b1:
renk='bo'
plt.plot(int(m[0]),int(m[1]),renk)
plt.show
plt.savefig("./resource/knn_kume.png")
|
bedirhansaglam/PythonMachineLearning | PythonMachineLearning/Main.py | <reponame>bedirhansaglam/PythonMachineLearning
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 29 18:13:38 2017
@author: Bedirhan
"""
from PyQt4 import QtGui
from PyQt4 import QtCore
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import matplotlib.pyplot as plt
import numpy
from PIL.ImageQt import ImageQt
from math import sqrt
from tasarim import Ui_Dialog
from sklearn.externals import joblib
import pickle
import kmeans
from knn import dataload_knn
from ParkinsonDataSet import SST_Data
from ParkinsonDataSet import DST_Data
from ParkinsonDataSet import STCP_Data
from ParkinsonDataSet import test_sst
from ParkinsonDataSet import test_dst
from ParkinsonDataSet import test_stcp
from ParkinsonDataSet import allData
import ParkinsonDataSet
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import confusion_matrix
from sklearn.decomposition import PCA
from sklearn.datasets import make_classification
from imblearn.over_sampling import RandomOverSampler
from imblearn.under_sampling import RandomUnderSampler
import KNNCluster
import Normalizasyon
import NavieBayes
class MainWindow(QtGui.QMainWindow,Ui_Dialog):
def __init__(self):
QtGui.QMainWindow.__init__(self)
self.setupUi(self)
self.veriListesi=dataload_knn()
self.gb_main_menu.setGeometry(QtCore.QRect(0, 50, 0, 670))
self.pb_hamburger_menu.clicked.connect(self.main_menu_clicked)
self.sifirla()
self.gb_knn_sinif.setGeometry(QtCore.QRect(200,50,1080,670))
self.pb_main_menu_1.clicked.connect(self.mm_pb_knn)
self.pb_main_menu_2.clicked.connect(self.mm_pb_rus)
self.pb_main_menu_3.clicked.connect(self.mm_pb_kmeans)
self.pb_main_menu_4.clicked.connect(self.mm_pb_knn_kume)
self.pb_main_menu_5.clicked.connect(self.mm_pb_navie)
self.pb_main_menu_6.clicked.connect(self.mm_pb_normalizasyon)
self.pb_main_menu_7.clicked.connect(self.mm_pb_random_forest)
self.pb_main_menu_8.clicked.connect(self.mm_pb_train_test)
self.pb_main_menu_9.clicked.connect(self.mm_pb_parkinson)
#knn-kümeleme---b--
self.knn_cluster_pb_create_dataset.clicked.connect(self.knn_cluster_create_dataset)
self.knn_pb_cluster.clicked.connect(self.knn_cluster)
#knn kümelme --s--
#navie-text-class -b-
self.pb_navie_veriekle.clicked.connect(self.navie_item_add)
self.pb_navie_siniflandir.clicked.connect(self.navie_bayes_siniflandir)
#rus-ros butons----b--
self.rus_ros_pb_create_dataset.clicked.connect(self.rus_ros_dataSet_olustur)
self.rus_ros_pb.clicked.connect(self.rus_ros_uygula)
self.rus_ros_slider.valueChanged.connect(self.rus_ros_slider_changedValue)
#rus-ros butons----s--
#normalizasyon -b-
self.pb_normalizasyon_dataload.clicked.connect(self.norm_data_load)
self.pb_normalize.clicked.connect(self.normalize_uygula)
self.pb_normalizasyon_datasave.clicked.connect(self.normalize_data_save)
#normalizasyon -s-
#Random Forest -b-
self.pb_random_forest_data_load.clicked.connect(self.random_forest_data_load)
self.pb_random_forest_tandt.clicked.connect(self.random_forest_train_test)
self.random_forest_slider.valueChanged.connect(self.random_forest_slider_changedValue)
self.pb_random_forest.clicked.connect(self.random_forest_uygula)
self.pb_random_forest_modelsave.clicked.connect(self.random_forest_model_save)
#Random forest -s-
#Train & Test -B-
self.pb_train_test_data_load.clicked.connect(self.train_test_data_load)
self.pb_train_and_test.clicked.connect(self.train_test_uygula)
self.test_and_train_slider.valueChanged.connect(self.test_and_train_slider_changedValue)
#parkinson top menu --b-
self.pb_parkinson_veri_yukle.clicked.connect(self.parkinson_veri_yukle)
self.pb_parkinson_class.clicked.connect(self.parkinson_classfication)
#parkinson top menu --s-
#parkinson bottom menu --b-
self.pb_parkinson_all_data.clicked.connect(self.parkinson_all_data)
self.pb_parkinson_split.clicked.connect(self.parkinson_train_and_test)
self.pb_parkinson_class_2.clicked.connect(self.parkinson_classfication_2)
self.pb_parkinson_reload_split.clicked.connect(self.parkinson_reload)
#parkinson bottom menu --s-
self.form_load()
#============================================================================================================================================================
#---------Gorsel Showlar Baslangic ---------------------#
def main_menu_clicked(self):
if self.gb_main_menu.width()==0:
self.animation=QPropertyAnimation(self.gb_main_menu,"geometry")
self.animation.setDuration(500)
self.animation.setStartValue(QRect(0, 50, 0, 670))
self.animation.setEndValue(QRect(0, 50, 200, 670))
self.animation.start()
else:
self.animation=QPropertyAnimation(self.gb_main_menu,"geometry")
self.animation.setDuration(500)
self.animation.setStartValue(QRect(0, 50, 200,670))
self.animation.setEndValue(QRect(0, 50, 0, 670))
self.animation.start()
def mm_pb_knn(self):
self.sifirla()
self.animasyon_baslat(self.gb_knn_sinif,200,50,1080,670)
def mm_pb_rus(self):
self.sifirla()
self.animasyon_baslat(self.gb_rus_ros,200,50,1080,670)
def mm_pb_kmeans(self):
self.sifirla()
self.animasyon_baslat(self.gb_k_means,200,50,1080,670)
self.data_kmeans=kmeans.data()
self.verileri_tabloya_dok(self.data_kmeans,self.k_means_tbl_data)
def mm_pb_knn_kume(self):
self.sifirla()
self.animasyon_baslat(self.gb_knn_kume,200,50,1080,670)
def mm_pb_navie(self):
self.sifirla()
self.animasyon_baslat(self.gb_navie,200,50,1080,670)
self.navie_bayes_data_set=NavieBayes.default_training_data()
self.verileri_tabloya_dok(self.navie_bayes_data_set,self.tbl_navie_data_set)
def mm_pb_normalizasyon(self):
self.sifirla()
self.animasyon_baslat(self.gb_normalizasyon,200,50,1080,670)
def mm_pb_random_forest(self):
self.sifirla()
self.animasyon_baslat(self.gb_randomforest,200,50,1080,670)
def mm_pb_train_test(self):
self.sifirla()
self.animasyon_baslat(self.gb_train_test,200,50,1080,670)
def mm_pb_parkinson(self):
self.sifirla()
self.animasyon_baslat(self.gb_parkinson,200,50,1080,670)
def animasyon_baslat(self,animasyon,x,y,w,h):
if animasyon.width()==0 :
self.animation=QPropertyAnimation(animasyon,"geometry")
self.animation.setDuration(1000)
self.animation.setStartValue(QRect(x, y, 0, 0))
self.animation.setEndValue(QRect(x, y, w, h))
self.animation.start()
def buton_animasyon(self,animasyon,x,y,w,h):
if animasyon.width()==0:
self.animation=QPropertyAnimation(animasyon,"geometry")
self.animation.setDuration(500)
self.animation.setStartValue(QRect(x, y, 0, h))
self.animation.setEndValue(QRect(x, y, w, h))
self.animation.start()
def sifirla(self):
self.gb_rus_ros.setGeometry(QtCore.QRect(200,50,0,0))
self.gb_k_means.setGeometry(QtCore.QRect(200,50,0,0))
self.gb_knn_kume.setGeometry(QtCore.QRect(200,50,0,0))
self.gb_navie.setGeometry(QtCore.QRect(200,50,0,0))
self.gb_knn_sinif.setGeometry(QtCore.QRect(200,50,0,0))
self.gb_parkinson.setGeometry(QtCore.QRect(200,50,0,0))
self.gb_normalizasyon.setGeometry(QtCore.QRect(200,50,0,0))
self.gb_randomforest.setGeometry(QtCore.QRect(200,50,0,0))
self.gb_train_test.setGeometry(QtCore.QRect(200,50,0,0))
#---------Gorsel Showlar Son ---------------------#
#============================================================================================================================================================
#-------------Genel Fonksiyonlar --------- Baslangic ------------------------
def show_image(self,img_name,width,height):
pixMap=QtGui.QPixmap(img_name)
pixMap=pixMap.scaled(width-5,height-5)
pixItem=QtGui.QGraphicsPixmapItem(pixMap)
scene=QGraphicsScene()
scene.addItem(pixItem)
return scene
def show_pil_image(self,img,width,height):
show_image=ImageQt(img)
pixMap=QtGui.QPixmap.fromImage(show_image)
pixMap=pixMap.scaled(width-5,height-5)
pixItem=QtGui.QGraphicsPixmapItem(pixMap)
scene=QGraphicsScene()
scene.addItem(pixItem)
return scene
def verileri_tabloya_dok(self,table_value,table_name):
num_rows=len(table_value) #tablonun satir sayisi aliniyor
num_column=len(table_value[0]) #tablonun sutun sayisi aliniyor
table_name.clear() #tabloda onceden deger var ise temizleniyor
table_name.setColumnCount(num_column) #tablonun sutun sayisi set ediliyor
table_name.setRowCount(num_rows) #tablonun satir sayisi set ediliyor
for rowNumber,row in enumerate(table_value):#tabloya eklenecek deger satir satir ve
for columnNumber in range(0,len(table_value[0])):#sutun sutun okunuyor
table_name.setItem(rowNumber,columnNumber,QtGui.QTableWidgetItem(str(row[columnNumber]))) #okunan degerler tabloya set ediliyor
def RFclassification(self,X_train,y_train,X_test):
self.rf_clf=RandomForestClassifier()
self.rf_clf.fit(X_train,y_train)
results=self.rf_clf.predict(X_test)
return results
def DTclassification(self,X_train,y_train,X_test):
clf=DecisionTreeClassifier()
clf.fit(X_train,y_train)
results=clf.predict(X_test)
return results
def saveData(self,filename,data):
db=numpy.array(data)
f=open(filename,'w')
for i,a in enumerate(db):
for p,j in enumerate(db[i]):
if p!=(len(db[i])-1):
f.write(j+",")
else:
f.write(j)
def form_load(self):
self.w,self.h=self.t1_gv_veriseti.width(),self.t1_gv_veriseti.height()
self.t1_gv_veriseti.setScene(self.show_image("./resource/knn_data_set.png",self.w,self.h))
#-------------Genel Fonksiyonlar --------- SON ------------------------
#============================================================================================================================================================
#------KNN Siniflandirma BASLANGİC-------------------------
def yeni_nokta_knn(self,x,y):
plt.cla()
for m in self.veriListesi:
renk=''
if m[2]=="kotu":
renk='b^'
else:
renk='ro'
plt.plot(m[0],m[1],renk,markersize=10)
plt.plot(x,y,'gd',markersize=15)
plt.savefig("./resource/knn_new_point.png")
def KNN(self,k,x,y):
sonuc=0
plt.cla()
self.uzakliklar=[]
for m in self.veriListesi:
oklid=sqrt(pow(int(x)-int(m[0]),2)+pow(int(y)-int(m[1]),2))
self.uzakliklar.append((oklid,m[2]))
self.uzakliklar.sort()
for i in range(0,int(k)):
if self.uzakliklar[i][1]=="iyi":
sonuc=sonuc+1
else:
sonuc=sonuc-1
if sonuc>0:
self.veriListesi.append((int(x),int(y),"iyi"))
else:
self.veriListesi.append((int(x),int(y),"kotu"))
for m in self.veriListesi:
renk=''
if m[2]=="kotu":
renk='b^'
else:
renk='ro'
plt.plot(m[0],m[1],renk,markersize=10)
plt.savefig("./resource/knn_sonuc.png")
#------KNN siniflandirma SON-------------------------
#============================================================================================================================================================
#---------------RUS VE ROS ------ FONKSİYONLAR---- BASLANGİC ---------
def rus_ros_slider_changedValue(self):
deger=self.rus_ros_slider.value()
self.lbl_rus_ros_slider.setText(str(deger))
def rus_ros_generate_dataset(self,samples,w1,w2):
self.X_rus_ros, self.y_rus_ros = make_classification(n_classes=2, class_sep=2, weights=[w1, w2],
n_informative=3, n_redundant=1, flip_y=0,
n_features=20, n_clusters_per_class=1,
n_samples=int(samples), random_state=10)
self.pca = PCA(n_components=2)
self.X_vis= self.pca.fit_transform(self.X_rus_ros)
def rus_ros_plot(self,X,y,title):
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
plt.scatter(X[y==0, 0], X[y==0, 1],alpha=.5, label='Class #0',c="r")
plt.scatter(X[y==1, 0], X[y==1, 1],alpha=.5, label='Class #1')
# make nice plotting
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
ax.spines['left'].set_position(('outward', 10))
ax.spines['bottom'].set_position(('outward', 10))
ax.set_xlim([-6, 6])
ax.set_ylim([-6, 6])
plt.title(title)
plt.legend()
plt.tight_layout()
plt.savefig("./resource/rus_ros_orijinal.png")
def rus_and_ros(self,X,y,X_vis,val):
if val==0: #rus
plt.cla()
rus = RandomUnderSampler(return_indices=True)
X_resampled, y_resampled, idx_resampled = rus.fit_sample(X, y)
X_res_vis = self.pca.transform(X_resampled)
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
idx_samples_removed = numpy.setdiff1d(numpy.arange(X_vis.shape[0]),
idx_resampled)
idx_class_0 = y_resampled == 0
plt.scatter(X_res_vis[idx_class_0, 0], X_res_vis[idx_class_0, 1],
alpha=.5, label='Class #0',c="r")
plt.scatter(X_res_vis[~idx_class_0, 0], X_res_vis[~idx_class_0, 1],
alpha=.5, label='Class #1')
plt.scatter(X_vis[idx_samples_removed, 0], X_vis[idx_samples_removed, 1],
alpha=.1, label='Removed samples',c="g")
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
ax.spines['left'].set_position(('outward', 10))
ax.spines['bottom'].set_position(('outward', 10))
ax.set_xlim([-6, 6])
ax.set_ylim([-6, 6])
plt.title('RUS Method')
plt.legend()
plt.tight_layout()
plt.savefig("./resource/rus.png")
elif val==1: #ros
ros = RandomOverSampler()
X_resampled, y_resampled = ros.fit_sample(X, y)
X_res_vis = self.pca.transform(X_resampled)
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
plt.scatter(X_res_vis[y_resampled == 0, 0], X_res_vis[y_resampled == 0, 1],label="Class #0", alpha=.5,c='r')
plt.scatter(X_res_vis[y_resampled == 1, 0], X_res_vis[y_resampled == 1, 1],label="Class #1", alpha=.5)
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
ax.spines['left'].set_position(('outward', 10))
ax.spines['bottom'].set_position(('outward', 10))
ax.set_xlim([-6, 6])
ax.set_ylim([-6, 6])
plt.title('ROS Method')
plt.legend()
plt.tight_layout()
plt.savefig("./resource/ros.png")
#---------------RUS VE ROS ------ FONKSİYONLAR---- SON ---------
#============================================================================================================================================================
#-------------NAVİE BAYES - BASLANGİC ---------------------------------------
def navie_item_add(self):
kelime=self.le_navie_kelime.text()
kategori=self.le_naive_kategori.text()
kelime=kelime.lower()
kategori=kategori.lower()
self.navie_bayes_data_set.append((kelime,kategori))
self.verileri_tabloya_dok(self.navie_bayes_data_set,self.tbl_navie_data_set)
self.le_navie_kelime.setText("")
def navie_bayes_siniflandir(self):
sonuc=NavieBayes.predict(self.le_metin.toPlainText(),NavieBayes.fit(self.navie_bayes_data_set))
sonuc=sonuc.upper()
self.lbl_navie_sonuc.setText(sonuc)
#-------------NAVİE BAYES - SON ---------------------------------------
#============================================================================================================================================================
#------------NORMALIZASYON BASLANGIC-----------------------
def norm_data_load(self):
self.fileName=unicode(QtGui.QFileDialog.getOpenFileName(self,u"Data Dosyası Seçin",".",u"(*.data *.pkl *.txt)"))
self.tbl_norm_data.clear()
self.tbl_norm_result.clear()
if self.fileName:
f = open(self.fileName)
self.norm_data=[]
for i,row in enumerate(f.readlines()):
currentline = row.split(",")
temp=[]
for column_value in currentline:
temp.append(column_value)
self.norm_data.append(temp)
self.norm_data=numpy.array(self.norm_data)
if len(self.norm_data[0])!=len(self.norm_data[len(self.norm_data)-1]): #eger son satir ile ilk satirin degeri esit degilse son satiri siliyoruz(iris datada son deger bos)
self.norm_data=numpy.delete(self.norm_data,[len(self.norm_data)-1])
self.verileri_tabloya_dok(self.norm_data,self.tbl_norm_data)
def normalize_data_save(self):
save_file_name=unicode(QtGui.QFileDialog.getSaveFileName(self,u"Dosyayı Kaydet",".",u"(*.data)"))
self.saveData(save_file_name,self.result)
def normalize_uygula(self):
if self.rb_norm_minmax.isChecked():
self.result=Normalizasyon.normalizasyon(self.fileName,0)
elif self.rb_norm_zscore.isChecked():
self.result=Normalizasyon.normalizasyon(self.fileName,1)
elif self.rb_norm_median.isChecked():
self.result=Normalizasyon.normalizasyon(self.fileName,2)
self.verileri_tabloya_dok(self.result,self.tbl_norm_result)
#------------NORMALIZASYON SON-----------------------
#============================================================================================================================================================
#----------- Random Forest Baslangic------------------
def random_forest_data_load(self):
self.tbl_random_forest_confusionm.clear()
self.lbl_random_forest_accuraryscore.setText("")
self.tbl_random_forest_data.clear()
self.tbl_random_forest_x_test.clear()
self.tbl_random_forest_x_train.clear()
self.fileName=unicode(QtGui.QFileDialog.getOpenFileName(self,u"Data Dosyası Seçin",".",u"(*.data *.pkl)"))
if self.fileName:
f = open(self.fileName)
self.rf_data=[]
for i,row in enumerate(f.readlines()):
currentline = row.split(",")
temp=[]
for column_value in currentline:
temp.append(column_value)
self.rf_data.append(temp)
self.rf_data=numpy.array(self.rf_data)
if len(self.rf_data[0])!=len(self.rf_data[len(self.rf_data)-1]): #eger son satir ile ilk satirin degeri esit degilse son satiri siliyoruz(iris datada son deger bos)
self.rf_data=numpy.delete(self.rf_data,[len(self.rf_data)-1])
self.verileri_tabloya_dok(self.rf_data,self.tbl_random_forest_data)
def random_forest_train_test(self):
self.tbl_random_forest_confusionm.clear()
self.lbl_random_forest_accuraryscore.setText("")
deger=self.random_forest_slider.value()
t_size=float(deger)/100
data=numpy.array(self.rf_data)
a=len(data[0])-1
X=data[:,:a]
y=data[:,a]
self.X_train_random_forest,self.X_test_random_forest,self.y_train_random_forest,self.y_test_random_forest=train_test_split(X,y,test_size=t_size)
self.verileri_tabloya_dok(self.X_train_random_forest,self.tbl_random_forest_x_train)
self.verileri_tabloya_dok(self.X_test_random_forest,self.tbl_random_forest_x_test)
def random_forest_uygula(self):
results=self.RFclassification(self.X_train_random_forest,self.y_train_random_forest,self.X_test_random_forest)
cm=confusion_matrix(self.y_test_random_forest,results)
rf_as=(round(accuracy_score(self.y_test_random_forest,results),2))*100
self.verileri_tabloya_dok(cm,self.tbl_random_forest_confusionm)
self.lbl_random_forest_accuraryscore.setText("%"+str(rf_as))
def random_forest_slider_changedValue(self):
deger=self.random_forest_slider.value()
self.lbl_random_forest_slider.setText(str(deger))
def random_forest_model_save(self):
save_file_name=unicode(QtGui.QFileDialog.getSaveFileName(self,u"Dosyayı Kaydet",".",u"(*.pkl)"))
joblib.dump(self.rf_clf,save_file_name)
#----------- Random Forest SON------------------------
#============================================================================================================================================================
#------------Train & Test Baslangic ---------------
def train_test_data_load(self):
self.tbl_train_test_x_train.clear()
self.tbl_train_test_x_test.clear()
self.tbl_train_test_y_train.clear()
self.tbl_train_test_y_test.clear()
self.fileName=unicode(QtGui.QFileDialog.getOpenFileName(self,u"Data Dosyası Seçin",".",u"(*.data *.pkl)"))
if self.fileName: #Eger dosya secilmis ise yap
f = open(self.fileName)
self.tt_data=[]
for i,row in enumerate(f.readlines()):
currentline = row.split(",")
temp=[]
for column_value in currentline:
temp.append(column_value)
self.tt_data.append(temp)
self.tt_data=numpy.array(self.tt_data)
if len(self.tt_data[0])!=len(self.tt_data[len(self.tt_data)-1]): #eger son satir ile ilk satirin degeri esit degilse son satiri siliyoruz(iris datada son deger bos)
self.tt_data=numpy.delete(self.tt_data,[len(self.tt_data)-1])
self.verileri_tabloya_dok(self.tt_data,self.tbl_train_test_data)
def test_and_train_slider_changedValue(self):
val=self.test_and_train_slider.value()
self.lbl_train_test.setText("%"+str(val))
def train_test_uygula(self):
val=self.test_and_train_slider.value()
t_size=float(val)/100
data=numpy.array(self.tt_data)
a=len(data[0])-1
X=data[:,:a]
y=data[:,a]
X_train_tt,X_test_tt,y_train_tt,y_test_tt=train_test_split(X,y,test_size=t_size)
self.verileri_tabloya_dok(X_train_tt,self.tbl_train_test_x_train)
self.verileri_tabloya_dok(X_test_tt,self.tbl_train_test_x_test)
self.verileri_tabloya_dok(y_train_tt,self.tbl_train_test_y_train)
self.verileri_tabloya_dok(y_test_tt,self.tbl_train_test_y_test)
#------------Train & Test SON ---------------
#============================================================================================================================================================
#-------Parkinson Verileri Global Degiskenlere Ataniyor ----Baslangic------
def parkinson_veri(self):
sst_d=ParkinsonDataSet.readDataFile("./data/parkinson/sst.data")
dst_d=ParkinsonDataSet.readDataFile("./data/parkinson/dst.data")
stcp_d=ParkinsonDataSet.readDataFile("./data/parkinson/stcp.data")
t_sst=ParkinsonDataSet.readDataFile("./data/parkinson/sst_test.data")
t_dst=ParkinsonDataSet.readDataFile("./data/parkinson/dst_test.data")
t_stcp=ParkinsonDataSet.readDataFile("./data/parkinson/stcp_test.data")
#Bu bölümde .data uzantısı olmadan verileri klasörden cekebiliyoruz
# sst_d=numpy.array(SST_Data())
# dst_d=numpy.array(DST_Data())
# stcp_d=numpy.array(STCP_Data())
# t_sst=numpy.array(test_sst())
# t_dst=numpy.array(test_dst())
# t_stcp=numpy.array(test_stcp())
#SST
self.X_test_sst=t_sst[:,:5]
self.y_test_sst=t_sst[:,5]
self.X_train_sst=sst_d[:,:5]
self.y_train_sst=sst_d[:,5]
#DST
self.X_test_dst=t_dst[:,:5]
self.y_test_dst=t_dst[:,5]
self.X_train_dst=dst_d[:,:5]
self.y_train_dst=dst_d[:,5]
#STCP
self.X_train_stcp=stcp_d[:,:5]
self.y_train_stcp=stcp_d[:,5]
self.X_test_stcp=t_stcp[:,:5]
self.y_test_stcp=t_stcp[:,5]
#-------Parkinson Verileri Global Degiskenlere Ataniyor ----Son------
#============================================================================================================================================================
#-------------------- Button Click olaylari ---- Baslangic ----------------------------
#-----------------RUS VE ROS BUTON OLAYLARI BASLANGİC-----------------
#-------------RUS-ROS--- DATASET OLUSTUR --- BASLANGİC--------
def rus_ros_dataSet_olustur(self):
samples=self.rus_ros_n_samples.text()
val=self.rus_ros_slider.value()
w1=(100-val)
w1=float(w1)/100
w2=1-w1
self.rus_ros_generate_dataset(samples,w1,w2)
self.rus_ros_plot(self.X_vis,self.y_rus_ros,"Orijinal Data Set")
self.rr_w,self.rr_h=self.rus_ros_gv_data.width(),self.rus_ros_gv_data.height()
self.rus_ros_gv_data.setScene(self.show_image("./resource/rus_ros_orijinal.png",self.rr_w,self.rr_h))
#-------------RUS-ROS--- DATASET OLUSTUR --- SON--------
#--------------------------RUS-ROS -UYGULA---------- BASLANGİC------
def rus_ros_uygula(self):
w,h=self.rus_ros_gv_sonuc.width(),self.rus_ros_gv_sonuc.height()
if self.radiobuton_ros.isChecked():
self.rus_and_ros(self.X_rus_ros,self.y_rus_ros,self.X_vis,1)
self.rus_ros_gv_sonuc.setScene(self.show_image("./resource/ros.png",w,h))
if self.radiobuton_rus.isChecked():
self.rus_and_ros(self.X_rus_ros,self.y_rus_ros,self.X_vis,0)
self.rus_ros_gv_sonuc.setScene(self.show_image("./resource/rus.png",w,h))
#--------------------------RUS-ROS -UYGULA---------- SON------
#-----------------RUS VE ROS BUTON OLAYLARI SON-----------------
#-------Parkinson top menu -------- Baslangic -------------
def parkinson_veri_yukle(self):
self.parkinson_veri()
#----SST-----
self.verileri_tabloya_dok(self.X_train_sst,self.tbl_sst_x_train)
self.verileri_tabloya_dok(self.y_train_sst,self.tbl_sst_y_train)
self.verileri_tabloya_dok(self.X_test_sst,self.tbl_sst_x_test)
#----DST-----
self.verileri_tabloya_dok(self.X_train_dst,self.tbl_dst_x_train)
self.verileri_tabloya_dok(self.y_train_dst,self.tbl_dst_y_train)
self.verileri_tabloya_dok(self.X_test_dst,self.tbl_dst_x_test)
#----STCP-----
self.verileri_tabloya_dok(self.X_train_stcp,self.tbl_stcp_x_train)
self.verileri_tabloya_dok(self.y_train_stcp,self.tbl_stcp_y_train)
self.verileri_tabloya_dok(self.X_test_stcp,self.tbl_stcp_x_test)
def parkinson_classfication(self):
#----SST-----
sst_rf_result=self.RFclassification(self.X_train_sst,self.y_train_sst,self.X_test_sst)
sst_rf_cm=confusion_matrix(sst_rf_result,self.y_test_sst)
sst_rf_as=(round(accuracy_score(self.y_test_sst,sst_rf_result),2))*100
self.verileri_tabloya_dok(sst_rf_cm,self.tbl_sst_rf_cm)
self.lbl_sst_rf_as.setText("%"+str(sst_rf_as))
sst_dt_result=self.DTclassification(self.X_train_sst,self.y_train_sst,self.X_test_sst)
sst_dt_cm=confusion_matrix(sst_dt_result,self.y_test_sst)
sst_dt_as=(round(accuracy_score(self.y_test_sst,sst_dt_result),2))*100
self.verileri_tabloya_dok(sst_dt_cm,self.tbl_sst_gv_cm)
self.lbl_sst_gv_as.setText("%"+str(sst_dt_as))
#----DST-----
dst_rf_result=self.RFclassification(self.X_train_dst,self.y_train_dst,self.X_test_dst)
dst_rf_cm=confusion_matrix(dst_rf_result,self.y_test_dst)
dst_rf_as=(round(accuracy_score(self.y_test_dst,dst_rf_result),2))*100
self.verileri_tabloya_dok(dst_rf_cm,self.tbl_dst_rf_cm)
self.lbl_dst_rf_as.setText("%"+str(dst_rf_as))
dst_dt_result=self.DTclassification(self.X_train_dst,self.y_train_dst,self.X_test_dst)
dst_dt_cm=confusion_matrix(dst_dt_result,self.y_test_dst)
dst_dt_as=(round(accuracy_score(self.y_test_dst,dst_dt_result),2))*100
self.verileri_tabloya_dok(dst_dt_cm,self.tbl_dst_gv_cm)
self.lbl_dst_gv_as.setText("%"+str(dst_dt_as))
#----STCP-----
stcp_rf_result=self.RFclassification(self.X_train_stcp,self.y_train_stcp,self.X_test_stcp)
stcp_rf_cm=confusion_matrix(stcp_rf_result,self.y_test_stcp)
stcp_rf_as=(round(accuracy_score(self.y_test_stcp,stcp_rf_result),2))*100
self.verileri_tabloya_dok(stcp_rf_cm,self.tbl_stcp_rf_cm)
self.lbl_stcp_rf_as.setText("%"+str(stcp_rf_as))
stcp_dt_result=self.DTclassification(self.X_train_stcp,self.y_train_stcp,self.X_test_stcp)
stcp_dt_cm=confusion_matrix(stcp_dt_result,self.y_test_stcp)
stcp_dt_as=(round(accuracy_score(self.y_test_stcp,stcp_dt_result),2))*100
self.verileri_tabloya_dok(stcp_dt_cm,self.tbl_stcp_gv_cm)
self.lbl_stcp_gv_as.setText("%"+str(stcp_dt_as))
#-------Parkinson top menu -------- Son -------------
#-------Parkinson bottom menu ----- baslangic -------
def parkinson_all_data(self):
all_data=ParkinsonDataSet.readDataFile("./data/parkinson/all.data")
# all_data=numpy.array(allData())
self.X=all_data[:,:5]
self.y=all_data[:,5]
self.verileri_tabloya_dok(all_data,self.tbl_all_data)
self.pb_parkinson_all_data.setEnabled(False)
self.pb_parkinson_split.setEnabled(True)
self.buton_animasyon(self.pb_parkinson_split,100,10,48,48)
def parkinson_train_and_test(self):
self.X_train,self.X_test,self.y_train,self.y_test=train_test_split(self.X,self.y,test_size=0.25)
self.verileri_tabloya_dok(self.X_train,self.tbl_x_train)
self.verileri_tabloya_dok(self.X_test,self.tbl_x_test)
self.pb_parkinson_split.setEnabled(False)
self.pb_parkinson_class_2.setEnabled(True)
self.buton_animasyon(self.pb_parkinson_class_2,170,10,48,48)
def parkinson_classfication_2(self):
#---Random Forest -------- B
rf_results=self.RFclassification(self.X_train,self.y_train,self.X_test)
rf_cm=confusion_matrix(rf_results,self.y_test)
rf_as=(round(accuracy_score(self.y_test,rf_results),2))*100
self.verileri_tabloya_dok(rf_cm,self.tbl_all_data_rf_cm)
self.lbl_all_data_rf_as.setText("%"+str(rf_as))
#---Random Forest -------- S
#---------Decision Tree --------------B
gs_results=self.DTclassification(self.X_train,self.y_train,self.X_test)
gs_cm=confusion_matrix(gs_results,self.y_test)
gs_as=(round(accuracy_score(self.y_test,gs_results),2))*100
self.verileri_tabloya_dok(gs_cm,self.tbl_all_data_cd_cm)
self.lbl_all_data_gv_as.setText("%"+str(gs_as))
# #---------Decision Tree --------------B
self.pb_parkinson_class_2.setEnabled(False)
self.pb_parkinson_reload_split.setEnabled(True)
self.buton_animasyon(self.pb_parkinson_reload_split,240,10,48,48)
def parkinson_reload(self):
self.pb_parkinson_reload_split.setEnabled(False)
self.pb_parkinson_split.setEnabled(True)
self.lbl_all_data_gv_as.setText("")
self.lbl_all_data_rf_as.setText("")
self.tbl_all_data_cd_cm.clear()
self.tbl_all_data_rf_cm.clear()
self.tbl_x_test.clear()
self.tbl_x_train.clear()
#-------Parkinson bottom menu ----- son -------
#-----------KNN - KÜMELEME ----------- BASLANGİC---------
def knn_cluster_create_dataset(self):
maxRange=int(self.knn_cluster_max_range.text())
count=int(self.knn_cluster_count.text())
self.knn_c_data=KNNCluster.datasetOlustur(maxRange,count)
self.w,self.h=self.knn_cluster_data.width(),self.knn_cluster_data.height()
self.knn_cluster_data.setScene(self.show_image("./resource/knn_kume_data.png",self.w,self.h))
self.verileri_tabloya_dok(self.knn_c_data,self.knn_cluster_tbl)
def knn_cluster(self):
value=self.knn_cluster_cb.currentIndex()
KNNCluster.kumele(self.knn_c_data,value)
self.w,self.h=self.knn_cluster_result.width(),self.knn_cluster_result.height()
self.knn_cluster_result.setScene(self.show_image("./resource/knn_kume.png",self.w,self.h))
#-----------KNN - KÜMELEME ----------- SON---------
@QtCore.pyqtSignature("bool")
def on_t1_pb_kumele_clicked(self):
if self.t1_te_k.toPlainText()!=None and self.t1_te_k.toPlainText()!="" and self.t1_te_x.toPlainText()!=None and self.t1_te_x.toPlainText()!="" and self.t1_te_y.toPlainText()!=None and self.t1_te_y.toPlainText()!="" :
self.wy,self.hy=self.t1_gv_nokta.width(),self.t1_gv_nokta.height()
self.yeni_nokta_knn(int(self.t1_te_x.toPlainText()),int(self.t1_te_y.toPlainText()))
self.t1_gv_nokta.setScene(self.show_image("./resource/knn_new_point.png",self.wy,self.hy))
self.KNN(int(self.t1_te_k.toPlainText()),int(self.t1_te_x.toPlainText()),int(self.t1_te_y.toPlainText()))
self.ws,self.hs=self.t1_gv_sonuc.width(),self.t1_gv_sonuc.height()
self.t1_gv_sonuc.setScene(self.show_image("./resource/knn_sonuc.png",self.ws,self.hs))
#--------- K means Butonlar - Baslangic -------------
@QtCore.pyqtSignature("bool")
def on_t2_pb_dataload_clicked(self):
kmeans.plot(self.data_kmeans)
self.w,self.h=self.t2_gv_data.width(),self.t2_gv_data.height()
self.t2_gv_data.setScene(self.show_image("./resource/kmeans_ilk.png",self.w,self.h))
@QtCore.pyqtSignature("bool")
def on_kmeans_pb_ekle_clicked(self):
x=int(self.kmeans_x.text())
y=int(self.kmeans_y.text())
if self.kmeans_etiket.currentIndex()==0:
etiket="c1"
elif self.kmeans_etiket.currentIndex()==1:
etiket="c2"
self.data_kmeans.append((x,y,etiket))
self.verileri_tabloya_dok(self.data_kmeans,self.k_means_tbl_data)
@QtCore.pyqtSignature("bool")
def on_t2_pb_kmeans_clicked(self):
kmeans.hesapla(self.data_kmeans)
self.w,self.h=self.t2_gv_sonuc.width(),self.t2_gv_sonuc.height()
self.t2_gv_sonuc.setScene(self.show_image("./resource/kmeans_son.png",self.w,self.h))
#-------------------- Button Click olaylari ---- SON --------------------------
|
Ruila/PythonCrwalerMarathon_Day25 | scrapy_demo/spiders/ettoday.py | import scrapy
from scrapy_demo.items import ScrapyDemoItem
class EttodaySpider(scrapy.Spider):
name = 'ettoday'
allowed_domains = ['www.ettoday.net']
start_urls = ['https://www.ettoday.net/news/20201004/1824032.html','https://www.ettoday.net/news/20210120/1902773.html']
def parse(self, response):
item = ScrapyDemoItem()
for v in response.css('p::text'):
# print(v.extract(), '\n')
item['text'] = v.extract()
yield item
|
Omoshirokunai/holmes | ela.py | <filename>ela.py
"""
error level analysis
script that displays the output of ela on an image gotten from holmes.py
"""
from PIL import Image, ImageChops, ImageEnhance
import sys, os.path
import start_screen
import streamlit as st
def convert_to_ela_image(path, quality):
filename = path
resaved_filename = filename.split('.')[0] + '.resaved.jpg'
ELA_filename = filename.split('.')[0] + '.ela.png'
im = Image.open(filename).convert('RGB')
im.save(resaved_filename, 'JPEG', quality=quality)
resaved_im = Image.open(resaved_filename)
ela_im = ImageChops.difference(resaved_im,im)
extrema = ela_im.getextrema()
max_diff = max([ex[1] for ex in extrema])
if max_diff == 0:
max_diff = 1
scale = 255.0 / max_diff
ela_im = ImageEnhance.Brightness(ela_im).enhance(scale)
return ela_im
def app():
try:
col1, col2 = st.beta_columns((1,2))
with col1:
value = st.slider("quality",1,255,95)
p = convert_to_ela_image(start_screen.x,value)
# p = convert_to_ela_image(metadata.im,value)
extrema = p.getextrema()
max_diff = max([ex[1] for ex in extrema])
st.write("Maximum difference was %d" % (max_diff))
with col2:
st.image(p,use_column_width=True)
except AttributeError:
st.error("no image selected")
|
Omoshirokunai/holmes | quantization_table.py | """
Quantiztion table viewer
this is a script that extracts chrominance and luminace quantization tables of an image
then from a list of available tables we can deduce the software used to export the image this can come in handy in cases where
metadata has also been tampered with or is not available.
"""
import streamlit as st
import jpegio as jio
import start_screen
def app():
try:
jpeg = jio.read(start_screen.x)
col1, col2 = st.beta_columns(2)
# coef_array = jpeg.coef_arrays[0]
with col1:
quant_tbluminace = jpeg.quant_tables[0]
st.write("luminance",quant_tbluminace)
with col2:
quant_tbcrominace = jpeg.quant_tables[1]
st.write("Chrominace",quant_tbcrominace)
st.write("Matches")
except AttributeError:
st.error("no image selected")
|
Omoshirokunai/holmes | start_screen.py | <filename>start_screen.py
import os
import easygui as g
import streamlit as st
from PIL import Image
currdir = os.getcwd()
title = 'Choose your image'
def browseforimage():
valid_images=[".png",".jpg",".bmp"]
while True:
try:
filename = g.fileopenbox(title="pick image",filetypes=[['.png',".jpg",".bmp","Images"]], default = currdir)
if filename[-4:] in valid_images:
print("file: %s" % filename)
return filename
else:
raise Exception("input a valid image")
except (TypeError,AttributeError):
print("Please select a valid image")
break
except Exception as e:
print(e)
break
else:
break
def app():
st.title("Welcome :smiley_cat:")
if(st.button("Open an Image")):
global x ## lord forgive me for i have sinned
x = browseforimage()
try:
img = Image.open(x)
col1,col2 = st.beta_columns((1,2))
with col1:
st.image(img,use_column_width=True,use_column_height=True)
with col2:
st.write(x)
except (NameError,AttributeError):
st.info("Images must be in ('jpg','png', or '.bmp') formats")
|
Omoshirokunai/holmes | metadata.py | <filename>metadata.py
"""
metadata viewer
This script extracts the exif metadata from the image in main.py and displays it in a streamlit dataframe
"""
import streamlit as st
from skimage.io import imread
import pandas as pd
import exifview
import start_screen
def app():
try:
im = str(start_screen.x)
img = imread(im)
col1, col2 = st.beta_columns((1,2))
col1.header("Image")
col1.image(img, use_column_width=True)
with col2:
col2.header("Image Metadata")
p = exifview.exif_meta(im)
if p:
df = pd.DataFrame(list(p.items()),columns = ['exif','values'])
st.dataframe(df,900,500)
else:
st.error("Sorry No Exif Found :crying_cat_face:")
except AttributeError:
st.error("no image selected")
|
Omoshirokunai/holmes | exifview.py | <reponame>Omoshirokunai/holmes
import sys
from PIL import Image
from PIL.ExifTags import TAGS
"""
Get exif metadata of an image.
function that takes an image path as a str and returns a dictionary containing the image's exif metadata
Args:
filepath: Get metadata from this file.
Returns:
Extracted exif metadata
Raises:
IOError: File could not be read.
"""
def exif_meta(image):
try:
image = Image.open(image)
except (AttributeError,IOError) as e:
print(e)
# if image.endswith('.png'):
# image.load() # Needed only for .png EXIF data (see citation above)
# elif image.endswith('.jpg'):
exif = image.getexif()
meta = {}
for tagID in exif:
tag = TAGS.get(tagID, tagID)
data = exif.get(tagID)
if isinstance(data, bytes):
# data = data.decode()
data = str(data)
meta[tag] = data
return meta
|
Omoshirokunai/holmes | holmes.py | """
holmes main script
This the main streamlit script that contains page navigation and image to be processed
"""
import metadata
import quantization_table
import streamlit as st
import ela
import os
import start_screen
import easygui as g
from skimage.io import imread
import pandas as pd
import exifview
# st.set_page_config(page_title='HOLMES', layout = "wide", initial_sidebar_state = 'auto')
PAGES = {
"welcome": start_screen,
"EXIF data": metadata,
"Quantization Tables":quantization_table,
"Error Level Analysis": ela
}
st.sidebar.title('Navigation')
selection = st.sidebar.selectbox("Go to", list(PAGES.keys()),index=0)
page = PAGES[selection]
if st.sidebar.button("Open Image"):
start_screen.x = start_screen.browseforimage()
# side bar to load pages
if PAGES[selection]:
w = st.sidebar.slider("zoom",100,400,300,100,format("",""))
try:
img= imread(start_screen.x)
if PAGES[selection] != start_screen:
st.sidebar.image(img,width=w)
st.title("{}".format(selection))
st.write("\n")
except:
pass
finally:
page.app()
|
shatiilrahman/Machine-Learning | _apachespark_online_installation_template.py | <filename>_apachespark_online_installation_template.py
# -*- coding: utf-8 -*-
"""_ApacheSpark_online_installation_Template
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1pht4oqcZBSGH9dXJe7VsykROTXyGWByi
"""
#Apache_Spark
!apt-get install openjdk-8-jdk-headless -qq > /dev/null
!wget -q https://archive.apache.org/dist/spark/spark-2.4.5/spark-2.4.5-bin-hadoop2.6.tgz
!tar xvf spark-2.4.5-bin-hadoop2.6.tgz
!pip install -q findspark
import os
os.environ["JAVA_HOME"] = "/usr/lib/jvm/java-8-openjdk-amd64"
os.environ["SPARK_HOME"] = "/content/spark-2.4.5-bin-hadoop2.6"
import findspark
findspark.init()
from pyspark.sql import SparkSession
spark = SparkSession.builder.getOrCreate()
from pyspark import SparkContext, SparkConf
from pyspark.sql import SQLContext, SparkSession
from pyspark.sql.types import StructType, StructField, DoubleType, IntegerType, StringType
sc = SparkContext.getOrCreate(SparkConf().setMaster("local[*]"))
from pyspark.sql import SparkSession
spark = SparkSession \
.builder \
.getOrCreate()
#Total Code
!apt-get install openjdk-8-jdk-headless -qq > /dev/null
!wget -q https://archive.apache.org/dist/spark/spark-2.4.5/spark-2.4.5-bin-hadoop2.6.tgz
!tar xvf spark-2.4.5-bin-hadoop2.6.tgz
!pip install -q findspark
import os
os.environ["JAVA_HOME"] = "/usr/lib/jvm/java-8-openjdk-amd64"
os.environ["SPARK_HOME"] = "/content/spark-2.4.5-bin-hadoop2.6"
import findspark
findspark.init()
from pyspark.sql import SparkSession
spark = SparkSession.builder.getOrCreate()
from pyspark import SparkContext, SparkConf
from pyspark.sql import SQLContext, SparkSession
from pyspark.sql.types import StructType, StructField, DoubleType, IntegerType, StringType
sc = SparkContext.getOrCreate(SparkConf().setMaster("local[*]"))
from pyspark.sql import SparkSession
spark = SparkSession \
.builder \
.getOrCreate()
#systemML
!pip install https://github.com/IBM/coursera/blob/master/systemml-1.3.0-SNAPSHOT-python.tar.gz?raw=true
!mkdir -p /home/dsxuser/work/systemml
from systemml import MLContext, dml
ml = MLContext(spark)
ml.setConfigProperty("sysml.localtmpdir", "mkdir /home/dsxuser/work/systemml")
print(ml.version())
if not ml.version() == '1.3.0-SNAPSHOT':
raise ValueError('please upgrade to SystemML 1.3.0, or restart your Kernel (Kernel->Restart & Clear Output)')
|
pkakhandiki277/DiagnoX | data_loader.py | <filename>data_loader.py
from glob import glob
import os
import xml.etree.ElementTree as ET
import random
import cv2
class Dataset(object):
def __init__(self, xmls_path, images_path, positive_classes):
self.xml_files = glob(os.path.join(xmls_path, "*.xml"))
self.image_files = glob(os.path.join(images_path, "*.png"))
self.image_path_map = {os.path.basename(p).replace(".png", ''): p for p in self.image_files}
self.positive_classes = positive_classes
def get_positive_data(self):
images = []
labels = []
for xml_fn in self.xml_files:
info = self.get_info_from(xml_fn)
base_fn = os.path.basename(xml_fn).replace(".xml", '')
img_data = self.get_image_slices(self.image_path_map[base_fn], info)
for i, img in enumerate(img_data):
images.append(img)
labels.append(info[i][0])
return labels, images
def get_negative_data(self, size=(300, 300)):
images = []
labels = []
for xml_fn in self.xml_files:
info = self.get_info_from(xml_fn)
if not info:
continue
positive_zones = [x[2:] for x in info]
x1, y1, x2, y2 = self.calculate_negative_offsets(info[0][1], size, positive_zones)
base_fn = base_fn = os.path.basename(xml_fn).replace(".xml", '')
img_slice = self.get_image_slices(self.image_path_map[base_fn], [['', '', x1, y1, x2, y2]])
images.extend(img_slice)
labels.append('negative')
return labels, images
def calculate_negative_offsets(self, img_size, slice_size, positive_zones):
x1 = random.randint(0, img_size[0])
y1 = random.randint(0, img_size[1])
x2 = x1 + slice_size[0]
y2 = y1 + slice_size[1]
for pzone in positive_zones:
if x1 > pzone[2] or pzone[0] > x2:
continue
elif y2 > pzone[3] or pzone[1] < y1:
continue
else:
self.calculate_negative_offsets(img_size, slice_size, positive_zones)
return x1, y1, x2, y2
def get_info_from(self, xml_filepath):
tree = ET.parse(xml_filepath)
root = tree.getroot()
data = []
size = root.find("size")
height = int(size.find('width').text)
width = int(size.find('height').text)
for node in root.iterfind("object"):
name = node.find('name').text
bndbox = node.find("bndbox")
xmin = bndbox.find("xmin").text
xmax = bndbox.find("xmax").text
ymax = bndbox.find("ymax").text
ymin = bndbox.find("ymin").text
if name in self.positive_classes:
data.append((name, (width, height), int(xmin), int(ymin), int(xmax), int(ymax)))
return data
def get_image_slices(self, img_filepath, data):
img = cv2.imread(img_filepath)
if img is None:
print("Cannot open {}".format(img_filepath))
return []
img_data = []
for _, _, x1, y1, x2, y2 in data:
img_data.append(img[int(y1):int(y2), int(x1):int(x2)])
return img_data
def load_data(self):
pos_labels, pos_images = self.get_positive_data()
neg_labels, neg_images = self.get_negative_data()
return pos_labels + neg_labels, pos_images + neg_images
|
pkakhandiki277/DiagnoX | train.py | from sklearn.externals import joblib
from sklearn.svm import LinearSVC
from hog import HOG
from data_loader import Dataset
import argparse
from skimage.transform import rescale, resize, downscale_local_mean
from skimage.color import rgb2gray
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix, accuracy_score
import matplotlib.pyplot as plt
import itertools
import numpy as np
#==========================================================================================================================================
#FILL THE FOLLOWING VARIABLES WITH YOUR DIRECTORY/INFO
myDirectory = '[FILL THIS IN]'
def plot_confusion_matrix(cm, classes=['inflamed aorta', 'negative'],
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
ap = argparse.ArgumentParser()
ap.add_argument("-m", "--model", default="svm.pickle",
help="path to where the model will be stored")
args = vars(ap.parse_args())
print("Collecting annotations ...")
#CHANGE 'inflammed aorta' to the disease which you are working to diagnose
d = Dataset(myDirectory,
myDirectory, ['inflamed aorta'])
labels, images = d.load_data()
print("Gathered {} image slices".format(len(images)))
data = []
labels_new = []
hog = HOG(orientations=19, pixelsPerCell=(8, 8),
cellsPerBlock=(3, 3), transform=True)
for i, image in enumerate(images):
if i % 100 == 0:
print("Gathering features, {} of {}".format(i, len(images)))
if 0 not in image.shape:
image_resized = resize(image, (291, 218), anti_aliasing=True)
hist = hog.describe(rgb2gray(image_resized))
data.append(hist)
labels_new.append(labels[i])
X_train, X_test, y_train, y_test = train_test_split(data, labels_new, random_state=0)
print("Training on {} images".format(len(X_train)))
print("Testing on {} images".format(len(X_test)))
clf = LinearSVC()
clf.fit(X_train, y_train)
y_pred = clf.predict(X_test)
# Compute confusion matrix
cnf_matrix = confusion_matrix(y_test, y_pred)
np.set_printoptions(precision=2)
# Plot non-normalized confusion matrix
plt.figure()
plot_confusion_matrix(cnf_matrix,
title='Confusion matrix, without normalization')
plt.show()
print("Accuracy Score: {:.2f}".format(accuracy_score(y_test, y_pred)))
|
pkakhandiki277/DiagnoX | hog.py | <filename>hog.py
from skimage import feature
class HOG:
def __init__(self, orientations=9, pixelsPerCell=(8, 8), cellsPerBlock=(3, 3), transform=False):
cellsPerBlock = (3, 3)
self.orientations = orientations
self.pixelsPerCell = pixelsPerCell
self.cellsPerBlock = cellsPerBlock
self.transform = transform
def describe(self, image):
hist = feature.hog(image, orientations=self.orientations, pixels_per_cell=self.cellsPerBlock,
transform_sqrt=self.transform, block_norm='L2-Hys')
return hist
|
YuMurata/Tournament | Tournament/tournament.py | from enum import Enum, auto
from random import sample
import logging
import typing
from .player import TwoPlayer, PlayerList
class TournamentException(Exception):
pass
class RoundException(TournamentException):
pass
class MatchException(TournamentException):
pass
class CompeteException(TournamentException):
pass
class CompleteException(TournamentException):
pass
class GameWin(Enum):
LEFT = auto()
RIGHT = auto()
BOTH_WIN = auto()
BOTH_LOSE = auto()
class Tournament:
@classmethod
def make_player_index_list(cls, player_num: int) -> (list, list):
current_player_index_list = list(range(player_num))
current_player_index_list = \
sample(current_player_index_list, player_num)
next_player_index_list = []
return current_player_index_list, next_player_index_list
def __init__(self, player_list: PlayerList,
*, handler: logging.StreamHandler = None):
self.logger = logging.getLogger('Tournament')
self.logger.setLevel(logging.INFO)
if handler is not None:
self.logger.addHandler(handler)
self.player_list = player_list
player_num = len(player_list)
self.current_player_index_list = list(range(player_num))
self.current_player_index_list = \
sample(self.current_player_index_list, player_num)
self.next_player_index_list = []
self.old_player_num = player_num
self.is_match = False
self.is_complete = False
self.round_count = 1
self.match_count = 0
self.logger.debug('init')
self.logger.info(f'--- game start ---')
self._log_start_round()
def _log_start_round(self):
self.logger.info(f'start {self.round_count}th round')
self.logger.info(
f'--- current player index: {self.current_player_index_list} ---')
score_list = [player.score for player in self.player_list]
self.logger.info(f'--- score: {score_list} ---')
def _new_round(self):
if len(self.current_player_index_list) >= 2:
raise RoundException('invalid round')
for index in self.current_player_index_list:
self.player_list[index].score_up()
self.next_player_index_list.extend(self.current_player_index_list)
self.current_player_index_list = \
sample(self.next_player_index_list,
len(self.next_player_index_list))
self.next_player_index_list.clear()
current_player_num = len(self.current_player_index_list)
is_no_change_player_num = current_player_num == self.old_player_num
is_no_player = current_player_num < 2
self.is_complete = is_no_change_player_num or is_no_player
self.old_player_num = len(self.current_player_index_list)
self.round_count += 1
self.match_count = 0
self._log_start_round()
def new_match(self) -> (bool, TwoPlayer):
if self.is_match:
raise MatchException('match is already ready')
if self.is_complete:
raise CompleteException('game is already over')
self.logger.info(f'--- new match start ---')
if len(self.current_player_index_list) >= 2:
self.left_player_index = self.current_player_index_list.pop()
self.right_player_index = self.current_player_index_list.pop()
self.is_match = True
self.match_count += 1
left_player = \
self.player_list[self.left_player_index]
right_player = \
self.player_list[self.right_player_index]
self.logger.info(
f'--- left player index: {self.left_player_index} ---')
self.logger.info(
f'--- right player index: {self.right_player_index} ---')
return (False, (left_player, right_player))
else:
self._new_round()
if self.is_complete:
return (True, None)
else:
return self.new_match()
def compete(self, winner: GameWin) -> typing.NoReturn:
if not self.is_match:
raise CompeteException('match is not ready yet')
if self.is_complete:
raise CompleteException('game is already over')
def _win(winner_index: int):
self.player_list[winner_index].score_up()
self.next_player_index_list.append(winner_index)
if winner == GameWin.BOTH_WIN:
_win(self.left_player_index)
_win(self.right_player_index)
elif winner == GameWin.LEFT:
_win(self.left_player_index)
elif winner == GameWin.RIGHT:
_win(self.right_player_index)
self.logger.info(f'--- winner: {winner.name} ---')
self.is_match = False
is_no_current_player = len(self.current_player_index_list) == 0
is_only_one_winner = len(self.next_player_index_list) == 1
is_championship = is_no_current_player and is_only_one_winner
is_no_player = is_no_current_player and len(
self.next_player_index_list) == 0
if is_championship or is_no_player:
self.is_complete = True
@property
def get_match_num(self):
current_match_num = len(self.current_player_index_list)-1
next_match_num = len(self.next_player_index_list)
return current_match_num+next_match_num
|
YuMurata/Tournament | Tournament/player.py | <filename>Tournament/player.py
from abc import ABCMeta, abstractclassmethod
import typing
class Player(metaclass=ABCMeta):
'''
implement
---
decode(self)
decode so that you can compare
- - -
player has __init__(self, param: typing.Any, score: int = 1)
'''
def __init__(self, param: typing.Any, score: int = 1):
'''
Parameters
----------
param : Any
score : int = 1
'''
self.param = param
self.score = score
@abstractclassmethod
def decode(self) -> typing.Any:
'''
'''
pass
def score_up(self) -> typing.NoReturn:
self.score *= 2
def to_dict(self) -> dict:
return {'score': self.score, 'param': self.param}
TwoPlayer = typing.Tuple[Player, Player]
PlayerList = typing.List[Player]
|
YuMurata/Tournament | Tournament/__init__.py | from .tournament import Tournament, TournamentException
from .tournament import GameWin, CompleteException
from .player import Player, PlayerList
|
laulin/pihole-blacklist | get_top_1000000.py | from parser import get_list
import re
from pprint import pprint
from multiprocessing import Pool
import multiprocessing
def scrap_stuffgate(index):
url = "http://stuffgate.com/stuff/website/top-{}-sites".format(index)
print(url)
get = True
while get:
try:
page = get_list(url)
get = False
except:
pass
domains = re.findall(r'<td><a href=.+?target=.+>(.+)</a></td>', page)
return (index, domains)
if __name__ == "__main__":
p = Pool(multiprocessing.cpu_count()*4)
output = p.map(scrap_stuffgate, range(1000, 1001000, 1000))
output = sorted(output)
output = map(lambda x: x[1], output)
output = [item for sublist in output for item in sublist]
with open("top_1000000.txt", "w") as f:
f.write("\n".join(output))
|
laulin/pihole-blacklist | get_blacklists.py | import yaml
from pprint import pprint
from parser import Parser
from multiprocessing import Pool
from glob import glob
from pathlib import Path
def get_blacklist(name, parameters):
print(name)
parser = Parser(parameters["url"], parameters["format"])
domain_list = parser.parse()
Path("blacklists").mkdir(parents=True, exist_ok=True)
with open("blacklists/"+name + ".blacklist.txt", "w") as f:
f.write("\n".join(domain_list))
if __name__ == "__main__":
with open("config.yml") as f:
config = yaml.load(f, Loader=yaml.FullLoader)
p = Pool()
p.starmap(get_blacklist, config.items())
tmp = []
for b in glob("blacklists/*.txt"):
with open(b, "r") as input_file:
data=input_file.read()
lines = data.splitlines()
tmp.extend(lines)
with open("local.txt", "r") as input_file:
data = input_file.read()
lines = data.splitlines()
tmp.extend(lines)
tmp = sorted(set(tmp))
with open("blacklist.txt", "w") as output_file:
data = "\n".join(tmp)
output_file.write(data)
|
laulin/pihole-blacklist | parser.py | import re
import requests
import yaml
import tldextract
from urllib.parse import urlparse
from pprint import pprint
def get_list(url):
response = requests.get(url)
return response.text
class Parser:
def __init__(self, url, list_type):
self._url = url
self._list_type = list_type
def parse(self, _get_list=get_list):
text = _get_list(self._url)
text = self.sanitize(text)
if self._list_type == "ip domain":
raw_domains = self.extract_ip_domain(text)
domains = self.filter_valid_domains(raw_domains)
return domains
if self._list_type == "domain":
raw_domains = self.extract_domain(text)
domains = self.filter_valid_domains(raw_domains)
return domains
if self._list_type == "url":
raw_domains = self.extract_url(text)
domains = self.filter_valid_domains(raw_domains)
return domains
raise Exception("list_type is invalid")
def sanitize(self, blacklist):
output = re.sub("#.*?\n", "", blacklist)
output = re.sub("\n+", "\n", output)
return output
def extract_ip_domain(self, blacklist):
result = re.findall(".+?[\t ]+(.+)", blacklist)
return result
def extract_domain(self, blacklist):
result = re.findall("[\t ]*(.+)", blacklist)
return result
def extract_url(self, blacklist):
result = [urlparse(url).netloc for url in blacklist.splitlines()]
return result
def filter_valid_domains(self, blacklist):
def predicate(x):
extracted = tldextract.extract(x)
return extracted.suffix != ""
return tuple(filter(predicate, blacklist))
|
junhuih/DFP-Project | twitter_comment.py | <filename>twitter_comment.py
# -*- coding: utf-8 -*-
"""
@author: <NAME>, <NAME>, <NAME>, <NAME>
"""
import requests
# credential
bearer_token = "REPLACE_WITH_YOUR_OWN"
# Get the top twitter comment from twitter through API
def get_twitter_comments(school_name):
try:
url = "https://api.twitter.com/2/tweets/search/recent?query="
url = url + school_name + '&tweet.fields=created_at'
headers = {"Authorization": "Bearer {}".format(bearer_token)}
response = requests.request("GET", url, headers=headers)
if response is None:
print("No tweets for "+school_name)
else:
try:
res_json = response.json()
print(res_json['data'][0]['text'])
print(res_json['data'][1]['text'])
print(res_json['data'][2]['text'])
except:
print("No tweets for "+school_name)
except:
print("Error fetching twitter comments! Please check your internet!")
|
junhuih/DFP-Project | helpers.py | <reponame>junhuih/DFP-Project
# -*- coding: utf-8 -*-
"""
@author: <NAME>, <NAME>, <NAME>, <NAME>
"""
import pandas as pd
all_states = [
"AL",
"AK",
"AZ",
"AR",
"CA",
"CO",
"CT",
"DE",
"FL",
"GA",
"HI",
"ID",
"IL",
"IN",
"IA",
"KS",
"KY",
"LA",
"ME",
"MD",
"MA",
"MI",
"MN",
"MS",
"MO",
"MT",
"NE",
"NV",
"NH",
"NJ",
"NM",
"NY",
"NC",
"ND",
"OH",
"OK",
"OR",
"PA",
"RI",
"SC",
"SD",
"TN",
"TX",
"UT",
"VT",
"VA",
"WA",
"WV",
"WI",
"WY",
]
def get_input(maxInput):
while True:
x = input()
try:
intX = int(x)
if 0 < intX and intX <= maxInput:
return intX
else:
errorMessage(x)
except:
errorMessage(x)
def get_states():
while True:
x = input()
try:
if x in all_states:
return x
else:
errorMessage(x)
except:
errorMessage(x)
def errorMessage(x):
print("==========================")
print("You've entered " + x + ", which is an invalid input.")
print("Please enter again!")
return
def exitMessage(x):
print("==========================")
print("Thank you for using college helper!")
return
def demoFunction():
print("==========================")
print("fill up demo function, program ends here")
print("==========================")
def read_final_data():
d = pd.read_excel("merged_data.xlsx")
d = d.drop_duplicates(
subset=[
"School Name",
"20 Year Net ROI",
"Total 4 Year Cost",
"Graduation Rate",
"Typical Years to Graduate",
]
)
d["Rank"] = range(len(d))
d["School Name"] = [
(s.split("-")[0] + " - " + s.split("-")[1]).strip()
if s.find("-") != -1
else s
for s in d["School Name"]
]
return d
|
junhuih/DFP-Project | average_stats.py | # -*- coding: utf-8 -*-
"""
@author: <NAME>, <NAME>, <NAME>, <NAME>
"""
import pandas as pd
import helpers as h
import numpy as np
us_state_to_abbrev = {
"Alabama": "AL",
"Alaska": "AK",
"Arizona": "AZ",
"Arkansas": "AR",
"California": "CA",
"Colorado": "CO",
"Connecticut": "CT",
"Delaware": "DE",
"Florida": "FL",
"Georgia": "GA",
"Hawaii": "HI",
"Idaho": "ID",
"Illinois": "IL",
"Indiana": "IN",
"Iowa": "IA",
"Kansas": "KS",
"Kentucky": "KY",
"Louisiana": "LA",
"Maine": "ME",
"Maryland": "MD",
"Massachusetts": "MA",
"Michigan": "MI",
"Minnesota": "MN",
"Mississippi": "MS",
"Missouri": "MO",
"Montana": "MT",
"Nebraska": "NE",
"Nevada": "NV",
"New Hampshire": "NH",
"New Jersey": "NJ",
"New Mexico": "NM",
"New York": "NY",
"North Carolina": "NC",
"North Dakota": "ND",
"Ohio": "OH",
"Oklahoma": "OK",
"Oregon": "OR",
"Pennsylvania": "PA",
"Rhode Island": "RI",
"South Carolina": "SC",
"South Dakota": "SD",
"Tennessee": "TN",
"Texas": "TX",
"Utah": "UT",
"Vermont": "VT",
"Virginia": "VA",
"Washington": "WA",
"West Virginia": "WV",
"Wisconsin": "WI",
"Wyoming": "WY",
"District of Columbia": "DC",
"American Samoa": "AS",
"Guam": "GU",
"Northern Mariana Islands": "MP",
"Puerto Rico": "PR",
"United States Minor Outlying Islands": "UM",
"U.S. Virgin Islands": "VI",
}
# invert the dictionary
abbrev_to_us_state = dict(map(reversed, us_state_to_abbrev.items()))
# Get the average statistics for a given state
def get_average_stats(dataframe=h.read_final_data()):
newR = []
for i, row in dataframe.iterrows():
roi = row["20 Year Net ROI"][1:].replace(",", "")
cost = row["Total 4 Year Cost"][1:].replace(",", "")
loan = row["Average Loan Amount"][1:].replace(",", "")
try:
roi = int(roi)
except:
roi = 0
try:
cost = int(cost)
except:
cost = 0
try:
graduate = int(row["Typical Years to Graduate"])
except:
graduate = 0
try:
loan = int(loan)
except:
loan = 0
newR.append((roi, cost, graduate, loan))
newR = pd.DataFrame(newR)
print("==========================")
print("Displaying average stats about all colleges")
print(
"%-30s" % "Average 20 Year Net ROI: "
+ str(np.round(newR.mean()[0], 2))
)
print("%-30s" % "Total 4 Year Cost: " + str(np.round(newR.mean()[1], 2)))
print(
"%-30s" % "Typical Years to Graduate: "
+ str(np.round(newR.mean()[2], 2))
)
print("%-30s" % "Average Loan Amount: " + str(np.round(newR.mean()[3], 2)))
print()
# Funciton adapted from https://stackoverflow.com/questions/39742305/how-to-use-basemap-python-to-plot-us-with-50-states
# Draw the data based on the input, with regards to state and value
def draw_map(inputValue, title):
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap as Basemap
from matplotlib.colors import rgb2hex
from matplotlib.patches import Polygon
from matplotlib.colors import Normalize
from matplotlib.colorbar import ColorbarBase
# Lambert Conformal map of lower 48 states.
m = Basemap(
llcrnrlon=-119,
llcrnrlat=22,
urcrnrlon=-64,
urcrnrlat=49,
projection="lcc",
lat_1=33,
lat_2=45,
lon_0=-95,
)
shp_info = m.readshapefile("st99_d00", "states", drawbounds=True)
# choose a color for each state based on population density.
colors = {}
statenames = []
cmap = plt.cm.hot # use 'hot' colormap
vmin = min(inputValue.values())
vmax = max(inputValue.values()) # set range.
for shapedict in m.states_info:
statename = shapedict["NAME"]
# skip DC and Puerto Rico.
if statename not in ["District of Columbia", "Puerto Rico"]:
curValue = inputValue[statename]
# calling colormap with value between 0 and 1 returns
# rgba value. Invert color range (hot colors are high
# population), take sqrt root to spread out colors more.
colors[statename] = cmap(
1 - np.sqrt((curValue - vmin) / (vmax - vmin))
)[:4]
statenames.append(statename)
# cycle through state names, color each one.
ax = plt.gca() # get current axes instance
for nshape, seg in enumerate(m.states):
# skip DC and Puerto Rico.
if statenames[nshape] not in ["District of Columbia", "Puerto Rico"]:
color = rgb2hex(colors[statenames[nshape]])
poly = Polygon(seg, facecolor=color, edgecolor=color)
ax.add_patch(poly)
plt.title(title)
norm = Normalize(
vmin=min(inputValue.values()), vmax=max(inputValue.values())
)
cax = plt.gcf().add_axes([0.27, 0.1, 0.5, 0.05]) # posititon
cb = ColorbarBase(
cax, cmap=cmap.reversed(), norm=norm, orientation="horizontal"
)
plt.show()
# Compute the return over interest and draw the map
def compute_roi_and_draw_map(dataframe=h.read_final_data()):
average = dict()
for value in abbrev_to_us_state.values():
average[value] = (0, 0)
for i, row in dataframe.iterrows():
roi = row["20 Year Net ROI"][1:].replace(",", "")
try:
roi = int(roi)
except:
roi = 0
try:
state = abbrev_to_us_state[(row["State"])]
(allVal, count) = average[state]
average[state] = (allVal + roi, count + 1)
except:
pass
for key in average.keys():
a, b = average[key]
try:
average[key] = a / b
except:
average[key] = 0
draw_map(average, "Average ROI By States ($)")
print("Please refer to the map!")
# Compute the cost and draw the map
def compute_cost_and_draw_map(dataframe=h.read_final_data()):
average = dict()
for value in abbrev_to_us_state.values():
average[value] = (0, 0)
for i, row in dataframe.iterrows():
cost = row["Total 4 Year Cost"][1:].replace(",", "")
try:
cost = int(cost)
except:
cost = 0
try:
state = abbrev_to_us_state[(row["State"])]
(allVal, count) = average[state]
average[state] = (allVal + cost, count + 1)
except:
pass
for key in average.keys():
a, b = average[key]
try:
average[key] = a / b
except:
average[key] = 0
draw_map(average, "Total 4 Year Cost ($)")
print("Please refer to the map!")
# Compute the loan and draw the map
def compute_loan_and_draw_map(dataframe=h.read_final_data()):
average = dict()
for value in abbrev_to_us_state.values():
average[value] = (0, 0)
for i, row in dataframe.iterrows():
loan = row["Average Loan Amount"][1:].replace(",", "")
try:
loan = int(loan)
except:
loan = 0
try:
state = abbrev_to_us_state[(row["State"])]
(allVal, count) = average[state]
average[state] = (allVal + loan, count + 1)
except:
pass
for key in average.keys():
a, b = average[key]
try:
average[key] = a / b
except:
average[key] = 0
draw_map(average, "Average Loan Amount ($)")
print("Please refer to the map!")
if __name__ == "__main__":
compute_roi_and_draw_map()
compute_cost_and_draw_map()
compute_loan_and_draw_map()
|
junhuih/DFP-Project | search_colleges.py | # -*- coding: utf-8 -*-
"""
@author: <NAME>, <NAME>, <NAME>, <NAME>
"""
import twitter_comment as tc
import fbi_crime_data as fbi
import pandas as pd
import matplotlib.pyplot as plt
import helpers as h
#######################Display the data#######################
def search_colleges(college, dataframe):
college_name = dataframe["School Name"]
dataset = pd.read_excel("merged_data.xlsx")
dataset = dataset.fillna("missing")
for i in college_name:
if i.upper().find(college.upper()) != -1:
print("******************" + i + "******************")
print(
"%-30s" % "State: "
+ dataset.loc[dataset["School Name"] == i, ["State"]].values[
0
][0]
)
print(
"%-30s" % "City: "
+ dataset.loc[dataset["School Name"] == i, ["City"]].values[0][
0
]
)
print(
"%-30s" % "20 Year Net ROI: "
+ dataframe.loc[
dataframe["School Name"] == i, ["20 Year Net ROI"]
].values[0][0]
)
print(
"%-30s" % "Total 4 Year Cost: "
+ dataframe.loc[
dataframe["School Name"] == i, ["Total 4 Year Cost"]
].values[0][0]
)
print(
"%-30s" % "Typical Years to Graduate: "
+ (
dataframe.loc[
dataframe["School Name"] == i,
["Typical Years to Graduate"],
].values[0][0]
)
)
print(
"%-30s" % "Average Loan Amount: "
+ str(
dataframe.loc[
dataframe["School Name"] == i, ["Average Loan Amount"]
].values[0][0]
)
)
print(
"%-30s" % "Acceptance Rate: "
+ dataset.loc[
dataset["School Name"] == i, ["Acceptance Rate"]
].values[0][0]
)
print(
"%-30s" % "SAT Range: "
+ dataset.loc[
dataset["School Name"] == i, ["SAT Range"]
].values[0][0]
)
print()
print("The most recent Twitter Comments: ")
plt.style.use("seaborn-white")
tc.get_twitter_comments(i)
if (
dataset.loc[dataset["School Name"] == i, ["State"]].values[0][
0
]
== "missing"
):
break
else:
plt.style.use("seaborn-white")
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(8, 8))
for x in range(2):
for y in range(2):
df = fbi.get_crime_data_of_interest(
x * 2 + y,
fbi.all_states.index(
dataset.loc[
dataset["School Name"] == i, ["State"]
].values[0][0]
),
)
df = df.fillna(0)
if df.empty:
axes[x, y].plot()
axes[x, y].set_title(
"There's not enough data to plot"
)
else:
df = df[df["data_year"] > 2009]
axes[x, y].plot(df.iloc[:, 1], df.iloc[:, 0])
axes[x, y].set_title(
fbi.categories_of_interest[x * 2 + y]
.replace("-", " ")
.title()
)
fig.tight_layout()
# axes[x, y].xticks(df['data_year'])
# axes[x, y].xticks(df['count'])
plt.show()
break
print("Crime data displayed by plot.")
else:
if i == college_name[len(college_name) - 1]:
print(
"We can not find the "
+ college
+ ". Please check your input. "
)
def search_colleges_wrapper():
college = input("Please enter the college name you want to search: ")
search_colleges(college, h.read_final_data())
if __name__ == "__main__":
college = input("Please enter the college name you want to search: ")
search_colleges(college, h.read_final_data())
|
junhuih/DFP-Project | fetch_all_data.py | # -*- coding: utf-8 -*-
"""
@author: <NAME>, <NAME>, <NAME>, <NAME>
"""
# import os
import time
import random
import requests
import numpy as np
import pandas as pd
from bs4 import BeautifulSoup
from selenium import webdriver
from urllib.request import urlopen
from pandas.core.frame import DataFrame
from user_agent import DESKTOP_USER_AGENTS
from selenium.common.exceptions import NoSuchElementException
import csv
edge_driver_path = (
"path/to/webdriver"
)
#######################ROI DATA#######################
# Crawl the ROI data with beautifulsoup
# Time Warning: about 10 minutes
def get_roi():
school_name = []
rank = []
twenty_year_roi = []
total_4_year_cost = []
graduation_rate = []
typical_years_to_graduate = []
average_loan_amount = []
for page in range(1, 199, 1):
html = "https://www.payscale.com/college-roi/page/" + str(page)
# get HTML
html = urlopen(html)
bsyc = BeautifulSoup(html.read(), "html.parser")
fout = open("payscale_temp.txt", "wt", encoding="utf-8")
fout.write(str(bsyc))
fout.close()
# get lists
tc_table = list(bsyc.body.div.div)
table = list(tc_table[1].children)
body = table[4].tbody
for i in body.children:
li = list(
i.find_all("span", {"class": "roi-grid__schoolname--text"})
)
for j in li:
school_name.append(str(j).split(">")[2][:-3])
for i in body.children:
li = list(i.find_all("span", {"class": "roi-grid__rank--text"}))
for j in li:
rank.append(str(j).split(">")[1][:-6])
datas = []
for i in body.children:
li = list(i.find_all("span", {"class": "data-table__value"}))
for j in li:
datas.append(str(j).split(">")[1][:-6])
for i in range(len(datas)):
if i % 7 == 2:
twenty_year_roi.append(datas[i])
if i % 7 == 3:
total_4_year_cost.append(datas[i])
if i % 7 == 4:
graduation_rate.append(datas[i])
if i % 7 == 5:
typical_years_to_graduate.append(datas[i])
if i % 7 == 6:
average_loan_amount.append(datas[i])
# convert to dictionary
dataf = {
"Rank": rank,
"School Name": school_name,
"20 Year Net ROI": twenty_year_roi,
"Total 4 Year Cost": total_4_year_cost,
"Graduation Rate": graduation_rate,
"Typical Years to Graduate": typical_years_to_graduate,
"Average Loan Amount": average_loan_amount,
}
# Convert to dataFrame and output as Excel
dataframe = DataFrame(dataf, index=rank)
dataframe.to_excel("output.xlsx")
# read ROI data
# since the time to crawl is long, I stored them in an excel, and now I will read from the excel
def clean_roi():
roi = pd.read_excel("output.xlsx")
roi = roi.drop_duplicates(
subset=[
"School Name",
"20 Year Net ROI",
"Total 4 Year Cost",
"Graduation Rate",
"Typical Years to Graduate",
]
)
roi["Rank"] = range(len(roi))
roi["School Name"] = [
(s.split("-")[0] + " - " + s.split("-")[1]).strip()
if s.find("-") != -1
else s
for s in roi["School Name"]
]
#os.remove("output.xlsx")
return roi
#######################NICHE DATA#######################
# Crawl the NICHE SAT data with beautifulsoup
# Time Warning: about 10 minutes
def send_request(link):
time.sleep(random.choice(range(2, 10)))
headers = {"user-agent": random.choice(DESKTOP_USER_AGENTS)}
res = requests.get(link, headers=headers)
if res == None:
print("res is empty")
return res
return res
def get_niche():
# create a list with the url of all 82 pages of the ranking
allurl = ["https://www.niche.com/colleges/search/best-value-colleges/",]
nexturl = "https://www.niche.com/colleges/search/best-value-colleges/?page="
for i in range(2,82):
myurl = nexturl+str(i)
allurl.append(myurl)
school_name = []
fact = []
location = []
# iterate and scrape through each link
for url in allurl:
#define the link to be scrape and sent request
res =send_request(url)
#parse data from each page
newsoup = BeautifulSoup(res.text,'html.parser')
#find the info on each school
#for each school, create a new dict that contain its name and fact
for div in newsoup.find_all("div", class_="card"):
# remove the sponsered colleges
if len(div.find_all("div", class_="search-result__sponsered-bar"))==0:
school_name.append(div.find("h2", class_="search-result__title"))
fact.append(div.find_all("span", class_= "search-result-fact__value"))
if len(div.find_all("li", class_= "search-result-tagline__item"))!=0:
location.append(div.find_all("li", class_= "search-result-tagline__item")[1])
else:
location.append('null')
cleaned_school_name = []
for i in school_name:
new = str(i).split('>')
if (len(new)<2):
cleaned_school_name.append("null")
else:
cleaned_school_name.append(new[1].split('<')[0])
cleaned_fact = []
for list in fact:
temp = []
for i in list:
new = str(i).split('>')
if (len(new)<2):
temp.append("null")
else:
temp.append(new[1].split('<')[0])
cleaned_fact.append(temp)
cleaned_location = []
for i in location:
new = str(i).split('>')
if (len(new)<2):
cleaned_location.append("null")
else:
cleaned_location.append(new[1].split('<')[0])
city = []
state = []
for i in cleaned_location:
new = i.split(', ')
if (len(new)<2):
city.append("null")
state.append("null")
else:
city.append(new[0])
state.append(new[1])
Acceptance_Rate = []
Net_Price = []
SAT_Range = []
for eachfact in cleaned_fact:
if len(eachfact)>0:
Acceptance_Rate.append(eachfact[0]), Net_Price.append(eachfact[1])
if len(eachfact)==3:
if len(eachfact[2])>1:
SAT_Range.append(eachfact[2])
else:
SAT_Range.append("null")
else:
SAT_Range.append("null")
else:
Acceptance_Rate.append("null"), Net_Price.append("null"), SAT_Range.append("null")
file = open("cleaned_niche.csv", "w", newline = "")
writer = csv.writer(file)
# field names
fields = ['School Name', 'City', 'State', 'Acceptance Rate', 'Net Price', 'SAT Range']
writer.writerow(fields)
for i in range(len(cleaned_school_name)):
if cleaned_school_name[i] != "null":
writer.writerow([cleaned_school_name[i], city[i], state[i], Acceptance_Rate[i], Net_Price[i], SAT_Range[i]])
file.close()
#######################Merge the data#######################
def add_calculation_columns(merged_data):
def convert_currency_to_int(currency):
try:
return int(currency.replace(",", "").replace("$", ""))
except:
return np.NaN
def get_sat_range_min(sat_range):
if type(sat_range) is float and np.isnan(sat_range):
return np.nan
else:
return int(sat_range.split("-")[0])
def get_sat_range_max(sat_range):
if type(sat_range) is float and np.isnan(sat_range):
return np.nan
else:
return int(sat_range.split("-")[1])
merged_data["SAT Min"] = merged_data["SAT Range"].map(
lambda cell: get_sat_range_min(cell)
)
merged_data["SAT Max"] = merged_data["SAT Range"].map(
lambda cell: get_sat_range_max(cell)
)
merged_data["Total 4 Year Cost (Integer)"] = merged_data[
"Total 4 Year Cost"
].map(lambda x: convert_currency_to_int(x))
def merge_data():
roi = clean_roi()
roi["Rank"] = roi["Rank"] + 1
niche = pd.read_csv("cleaned_niche.csv", encoding='ISO-8859-1')
niche = niche.drop_duplicates()
merged_data = pd.merge(roi, niche, how="left", on="School Name")
merged_data = merged_data.drop(labels="Unnamed: 0", axis=1)
add_calculation_columns(merged_data)
merged_data.to_excel("merged_data.xlsx")
##################### Combining Fetch ###############
def refresh_all_data():
get_roi()
get_niche()
merge_data()
# get_careers_data() - uncomment if you have webdriver path set
print("Success! All data is refreshed.")
#################### Best Colleges Data ####################
# this portion of the script is to scrape bestcollege.com
# for information on career data
def get_careers_data():
# Call your browser
driver = webdriver.Edge(executable_path=edge_driver_path)
# Get all career links
driver.get("https://www.bestcolleges.com/careers/")
career_anchors = driver.find_elements_by_css_selector(
"div.swiper-slide a[data-wpel-link='internal']"
)
career_links = [career.get_attribute("href") for career in career_anchors]
career_data_list = []
# Loop through urls collected and collect data
for career in career_links:
driver.get(career)
career_name = driver.find_element_by_css_selector(
"section.hero h1"
).text
career_info = driver.find_element_by_css_selector(
"section.container.content>p:first-child"
).text
try:
why_career = driver.find_element_by_css_selector(
'a[id^="why-pursue"]+h2+p'
).text
why_career += (
" "
+ driver.find_element_by_css_selector(
'a[id^="why-pursue"]+h2+p+div+p'
).text
)
except NoSuchElementException:
why_career = ""
try:
how_to_start = driver.find_element_by_css_selector(
"a#advancing-your-career+h2+p"
).text
how_to_start += driver.find_element_by_css_selector(
"a#advancing-your-career+h2+p+div+p"
).text
except NoSuchElementException:
how_to_start = ""
career_data_list.append(
{
"career_name": career_name,
"career_info": career_info,
"why_career": why_career,
"how_to_start": how_to_start,
}
)
# Create a data frame of the data collected
career_data = pd.DataFrame(
career_data_list,
columns=["career_name", "career_info", "why_career", "how_to_start"],
)
# Save the data frame created for future use.
career_data.to_csv(
"bestcolleges_careers.csv", index=False, encoding="utf-8"
)
driver.close()
merge_data() |
junhuih/DFP-Project | user_agent.py | <reponame>junhuih/DFP-Project
# *********************************************************************************#
# NOTICE:
# This file is not created by our group.
# It is a useful helper to simulate the user clicks when parsing through websites.
# *********************************************************************************#
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: acumming
# @Date: 2015-05-04 15:03:24
# @Last Modified by: kedparab
# @Last Modified time: 2015-11-10 15:52:28
# From https://techblog.willshouse.com/2012/01/03/most-common-user-agents/
# Last Updated: Mon, 04 May 2015 19:06:41 +0000
DESKTOP_USER_AGENTS = [
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.104 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/600.3.18 (KHTML, like Gecko) Version/8.0.3 Safari/600.3.18',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/600.3.18 (KHTML, like Gecko) Version/8.0.4 Safari/600.4.10',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/600.4.10 (KHTML, like Gecko) Version/8.0.4 Safari/600.4.10',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/600.5.17 (KHTML, like Gecko) Version/8.0.5 Safari/600.5.17',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.78.2 (KHTML, like Gecko) Version/6.1.6 Safari/537.78.2',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.4.10 (KHTML, like Gecko) Version/7.1.4 Safari/537.85.13',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.5.17 (KHTML, like Gecko) Version/7.1.5 Safari/537.85.14',
'Mozilla/5.0 (Windows NT 5.1; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36 OPR/28.0.1750.51',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.89 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:35.0) Gecko/20100101 Firefox/35.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; rv:11.0) like Gecko',
'Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/41.0.2272.76 Chrome/41.0.2272.76 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:31.0) Gecko/20100101 Firefox/31.0 Iceweasel/31.6.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:37.0) Gecko/20100101 Firefox/37.0',
]
{"status":200,"message":"Success","data":[]} |
junhuih/DFP-Project | bestcolleges_helper.py | <gh_stars>0
# -*- coding: utf-8 -*-
"""
@author: <NAME>, <NAME>, <NAME>, <NAME>
"""
# Importing required packages
import numpy as np
import pandas as pd
from fuzzywuzzy import fuzz
from fuzzywuzzy import process
# Declaring global variables
bestcolleges_data_path = "bestcolleges_careers.csv"
bestcolleges_data = pd.read_csv(bestcolleges_data_path)
# Print all the careers
def view_all_careers():
print("We are ready to guide you on the following careers : ")
for index, careers_name in enumerate(
bestcolleges_data.career_name.to_list()
):
print(str(index + 1) + ". " + careers_name)
# Search and print all information for the given career
def view_career_info_by_name(searched_career):
# Doing a partial search to find the search term in the database.
closest_result = process.extractOne(
searched_career,
bestcolleges_data.career_name.to_list(),
scorer=fuzz.partial_ratio,
) # Selects rows that meet the search criteria
# If we have results, show!
if len(closest_result) > 0:
search_result_row = bestcolleges_data[
bestcolleges_data.career_name == closest_result[0]
]
print(
"Here is some guidance on " + search_result_row.career_name.iloc[0]
)
# If data point exists, show else skip the section
if search_result_row.career_info is not np.nan:
print("---- Info ----")
print(search_result_row.career_info.iloc[0])
if search_result_row.why_career is not np.nan:
print(
"---- Why " + search_result_row.career_name.iloc[0] + " ----"
)
print(search_result_row.why_career.iloc[0])
if search_result_row.how_to_start.iloc[0] is not np.nan:
print(
"---- How to start on "
+ search_result_row.career_name.iloc[0]
+ " ----"
)
print(search_result_row.how_to_start.iloc[0])
|
junhuih/DFP-Project | recommender_helper.py | <reponame>junhuih/DFP-Project<filename>recommender_helper.py
# -*- coding: utf-8 -*-
"""
@author: <NAME>, <NAME>, <NAME>, <NAME>
"""
import pandas as pd
merged_data_path = "merged_data.xlsx"
merged_data = pd.read_excel(merged_data_path, index_col=0)
def view_recommendations(preferred_state, sat_score, total_4_year_cost):
in_range_college = merged_data[
(merged_data["State"] == preferred_state)
& (merged_data["SAT Min"] < sat_score)
# & (merged_data['SAT Max'] > sat_score) - the more the
# score the better?
& (
merged_data["Total 4 Year Cost (Integer)"]
< (float(total_4_year_cost) * 1.1)
)
]
if len(in_range_college) > 0:
print("Here are the schools that we recommend!:")
result_count = 0
for index, college in in_range_college.iterrows():
result_count += 1
if result_count > 10:
print("\n^ Showing top 10 results! ^\n")
break
print("- - - - - - - - - - - - - - - - - - - - - - - - - -")
print(f"20 Year Net ROI: {str(college[2]):>30}")
print(f"College Name: {str(college[1]):>30}")
print(f"Total 4 Year Cost: {str(college[3]):>30}")
print(f"Graduation Rate: {str(college[4]):>30}")
print(f"City: {str(college[7]):>30}")
print(f"State: {str(college[8]):>30}")
print(f"Acceptance Rate: {str(college[9]):>30}")
else:
print("\nConsider updating preferences to find better results!")
|
junhuih/DFP-Project | college_helper.py | # -*- coding: utf-8 -*-
"""
@author: <NAME>, <NAME>, <NAME>, <NAME>
"""
# This is our main program file that displays the menu for the user to use
import helpers as h
import search_colleges as sc
import average_stats as avg_stat
import recommender_helper as r_helper
import bestcolleges_helper as bs_helper
import fetch_all_data as data_fetcher
def college_helper():
print("Welcome to college helper!")
print("Please select the following prompt:")
print("1. See recommended college based on my preferences")
print("2. See top level stats about colleges")
print("3. Browse careers")
print("4. Search colleges")
print("5. Help")
print("6. Refresh all data - Takes 15 mins!")
print("7. Exit")
x = h.get_input(7)
if x == 1:
get_recommendation()
elif x == 2:
view_general_data()
elif x == 3:
browse_careers()
elif x == 4:
sc.search_colleges_wrapper()
print("\n==========================")
college_helper()
elif x == 5:
help_message()
elif x == 6:
data_fetcher.refresh_all_data()
college_helper()
else:
h.exitMessage(x)
return
# Gets recommendation for the client given his preferences
def get_recommendation():
print("Please input your preferences:")
print("What state would you prefer to study in: (ex. LA, PA) ")
preferred_state = h.get_states()
print("Your SAT Score:")
sat_score = h.get_input(1600)
print("How much are you ready to pay for college: (in numbers)")
total_4_year_cost = h.get_input(1000000)
r_helper.view_recommendations(
preferred_state, sat_score, total_4_year_cost
)
print("\n==========================")
college_helper()
# Show general data to the client about school across the US
def view_general_data():
print("==========================")
print("Viewing college by filters:")
print("1. View the average stats of all states")
print("2. View ROI by states")
print("3. View total 4 year costs by states")
print("4. View average loan amount by states")
print("5. Go back to menu")
x = h.get_input(5)
if x == 1:
avg_stat.get_average_stats()
view_general_data()
elif x == 2:
avg_stat.compute_roi_and_draw_map()
view_general_data()
elif x == 3:
avg_stat.compute_cost_and_draw_map()
view_general_data()
elif x == 4:
avg_stat.compute_loan_and_draw_map()
view_general_data()
else:
print("\n==========================")
college_helper()
# I am sure you don't understand all the careers out there.
# This function allows you to search through careers and see
# how can one pursue that career
def browse_careers():
print(
"""How do you want to browse careers? (choose an option)
1. Show all careers:
2. View career information by name
3. Exit"""
)
user_input = h.get_input(3)
if user_input == 1:
bs_helper.view_all_careers()
browse_careers()
elif user_input == 2:
print("Career Name:")
user_input = input()
bs_helper.view_career_info_by_name(user_input)
browse_careers()
else:
print("\n==========================")
college_helper()
# Displays the helper string for the client
def help_message():
print("==========================")
print("College helper is good to help you find colleges!")
print(
"""
You can navigate through the menu and browse useful information!
The information would be valuable for you to find the college that matches the best with your preferences!
"""
)
print("==========================")
college_helper()
# Where everything begins!
if __name__ == "__main__":
college_helper()
|
snub-fighter/python-bitrue | bitrue/examples/trades_to_csv.py | <gh_stars>1-10
from bitrue.client import Client
import pandas as pd
if __name__ == '__main__':
client = Client(api_key='',
api_secret='',
)
trades = client.get_my_trades()
df = pd.DataFrame(trades)
df = df[['symbol','id','orderId','origClientOrderId','price','qty','commission','commissionAssert','time','isBuyer','isMaker','isBestMatch']]
df.to_csv('bitrue_trades.csv', sep=',', encoding='utf-8')
|
snub-fighter/python-bitrue | bitrue/examples/check_open_orders.py | from bitrue.client import Client
from tabulate import tabulate
from pprint import pprint
if __name__ == '__main__':
client = Client(api_key='<KEY>',
api_secret='<KEY>',
)
open_orders = client.get_open_orders(symbol='XRPUSDT')
#pprint(open_orders)
order_formatted = client._order_format_print(open_orders, orient='h')
print(order_formatted)
'''
Standard output
[{'clientOrderId': '',
'cummulativeQuoteQty': '0.0000000000000000',
'executedQty': '0.0000000000000000',
'icebergQty': '',
'isWorking': False,
'orderId': '53096850',
'origQty': '11.3000000000000000',
'price': '0.4470000000000000',
'side': 'SELL',
'status': 'NEW',
'stopPrice': '',
'symbol': 'XRPUSDT',
'time': 1559593125000,
'timeInForce': '',
'type': 'LIMIT',
'updateTime': 1559593126000}]
symbol orderId clientOrderId price origQty executedQty cummulativeQuoteQty status timeInForce type side stopPrice icebergQty time updateTime isWorking
-------- --------- --------------- ------- --------- ------------- --------------------- -------- ------------- ------ ------ ----------- ------------ ------------- ------------- -----------
XRPUSDT 53289178 2 250 0 0 NEW LIMIT SELL 1559664112000 1559664114000 False
XRPUSDT 53289160 2 127.5 0 0 NEW LIMIT SELL 1559664102000 1559664104000 False
''' |
snub-fighter/python-bitrue | setup.py | #!/usr/bin/env python3
import os
from setuptools import setup
# get key package details from bitrue/__version__.py
about = {} # type: ignore
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'bitrue', '__version__.py')) as f:
exec(f.read(), about)
# package configuration - for reference see:
# https://setuptools.readthedocs.io/en/latest/setuptools.html#id9
setup(
name=about['__title__'],
description=about['__description__'],
long_description_content_type='text/markdown',
version=about['__version__'],
author=about['__author__'],
author_email=about['__author_email__'],
url=about['__url__'],
packages=['bitrue'],
include_package_data=True,
python_requires=">=3.7.*",
install_requires=['numpy', 'requests'],
license=about['__license__'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.7',
],
keywords='package development template'
)
|
snub-fighter/python-bitrue | bitrue/examples/full_trade_cycle.py | from bitrue.client import Client
from tabulate import tabulate
from pprint import pprint
import os
import time
if __name__ == '__main__':
RECV_WINDOW = 6000000
client = Client(api_key='',
api_secret='',
)
sell_price = 1.5
sell_qty = 5
buyback_price = .40
usdvalue = sell_price*sell_qty
buyback_qty = usdvalue/buyback_price
#Create intial sell order
create_order = client.order_limit_sell(symbol='XRPUSDT', quantity=sell_qty, price=sell_price)
pprint(create_order)
orderId_Sell = create_order['orderId']
print(type(orderId_Sell))
while True:
#check order status
orderstatus = client.get_order(symbol='XRPUSDT', orderId=orderId_Sell,recvWindow=RECV_WINDOW)
if orderstatus['status'] == 'FILLED':
#buy back using gains
buyback_order = client.order_limit_buy(symbol='XRPUSDT', quantity=buyback_qty, price=buyback_price)
print(buyback_order)
else:
os.system('cls' if os.name == 'nt' else 'clear') # clear screen
print('OrderId: {} - still open'.format(orderId_Sell))
time.sleep(1)
'''
{'clientOrderId': '',
'orderId': 53322247,
'symbol': 'XRPUSDT',
'transactTime': 1559674713334}
OrderId: 53322247 - still open
'''
|
snub-fighter/python-bitrue | bitrue/examples/historical_trades.py | from bitrue.client import Client
if __name__ == '__main__':
client = Client(api_key='',
api_secret='',
)
trades = client.get_my_trades()
print(client._order_format_print(trades))
'''
symbol id orderId origClientOrderId price qty commission commissionAssert time isBuyer isMaker isBestMatch
-------- ------- --------- ------------------- ----------- ---------- ------------ ------------------ ------------- --------- --------- -------------
HOTXRP 1583958 53673021 0.004473 717 1559843532000 True True True
'''
|
snub-fighter/python-bitrue | bitrue/__version__.py | """
__version__.py
~~~~~~~~~~~~~~
Information about the current version of the py-package-template package.
"""
__title__ = 'bitrue-python'
__description__ = 'A python package to communicate with Bitrue API'
__version__ = '0.0.2'
__author__ = '<NAME>'
__author_email__ = '<EMAIL>'
__license__ = 'MIT'
__url__ = 'https://github.com/snub-fighter/python-bitrue'
|
tianhwu/microblog | upload.py | from flask import Flask, render_template, request
from flask_uploads import UploadSet, configure_uploads, DATA
#loads data manipulation
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import os
from datetime import datetime, timedelta
#loads mapping
import folium
from folium import plugins
from folium.plugins import HeatMap
from folium.plugins import MarkerCluster
from folium.plugins import HeatMapWithTime
app = Flask(__name__)
loc_data = UploadSet('data', DATA)
#app.config['UPLOADED_DEFAULTS_DEST'] = 'static/img'
app.config['UPLOADED_DATA_DEST'] = 'static/data'
configure_uploads(app, loc_data)
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'POST' and 'data' in request.files:
filename = loc_data.save(request.files['data'])
df = pd.read_csv(app.config['UPLOADED_DATA_DEST'] + "/" + filename)
os.remove(os.path.join(app.config['UPLOADED_DATA_DEST']+ "/" + filename))
#ensures our lat and long are numeric datatypes
num_cols = ['latitude','longitude']
df[num_cols] = df[num_cols].apply(pd.to_numeric)
#ensures our date column is a datetime object
dt_cols = ['date']
df[dt_cols] = df[dt_cols].apply(pd.to_datetime)
#Calculates a timedelta based and creates a new integer column. Casting sucks in python
df = df.assign(days_delta=df.date - df.date.min(axis=0))
df[['days_int']] = (df[['days_delta']]/np.timedelta64(1, 'D')).astype(np.int64)
#Generates a heatmap centered on New York
heatmap = folium.Map(location=[40, 12],zoom_start = 2.5)
heat_data = [[[row['latitude'],row['longitude']]
for index, row in df[df['days_int'] == i].iterrows()]
for i in range(df.days_int.min(axis=0),df.days_int.max(axis=0))]
#plots a HeatMapWithTime graph
hm = plugins.HeatMapWithTime(heat_data,auto_play=True,max_opacity=0.7)
hm.add_to(heatmap)
heatmap.save('static/map.html')
return str(df.days_int.max(axis=0))
return render_template('upload.html')
@app.route('/map')
def createsMap():
return """
<h1>Please run the upload script before this</h1>
<iframe src="/static/map.html" width="1200" height="600" frameborder="0" allowfullscreen></iframe>
"""
if __name__ == '__main__':
app.run(debug=True)
|
tianhwu/microblog | simpleupload.py | from flask import Flask, render_template, request
from flask_uploads import UploadSet, configure_uploads, DATA
import pandas as pd
app = Flask(__name__)
loc_data = UploadSet('data', DATA)
#app.config['UPLOADED_DEFAULTS_DEST'] = 'static/img'
app.config['UPLOADED_DATA_DEST'] = 'static/img'
configure_uploads(app, loc_data)
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'POST' and 'data' in request.files:
filename = loc_data.save(request.files['data'])
df = pd.read_csv('static/img/' + filename)
return list(df)[1]
return render_template('upload.html')
if __name__ == '__main__':
app.run(debug=True) |
tianhwu/microblog | hello.py | <gh_stars>0
from flask import Flask
from flask import request
from flask_uploads import UploadSet, configure_uploads, IMAGES
from flask import render_template
import pandas as pd
app = Flask(__name__)
@app.route('/')
def index():
return "welcome to the index"
@app.route('/hayabusa')
def hello_world():
return str(2+4)
photos = UploadSet('photos', IMAGES)
app.config['UPLOADED_PHOTOS_DEST'] = 'static/img'
configure_uploads(app, photos)
@app.route('/upload', methods=['GET', 'POST'])
def upload():
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
return filename
return render_template('upload.html')
@app.route('/uploadcsv', methods=['POST'])
def uploadcsv():
return render_template('upload.html')
if __name__ == '__main__':
app.run(debug=True)
|
debrine/Bank-Reconciliation | BankReconciliation.py | <gh_stars>0
from tkinter import *
from tkinter import ttk
from tkinter import filedialog
from csv import reader
from openpyxl import load_workbook
import datetime
from openpyxl import Workbook
from openpyxl.utils import get_column_letter
from openpyxl.styles import Font, Border, Side
#global variables
bank_statement_path = None
general_ledger_path = None
#solution found at https://stackoverflow.com/questions/44033894/removing-common-values-from-two-lists-in-python/44033987
def remove_values_from_list(the_list, val):
'''Removes all instances of a specified value from a list.
Args:
the_list - list which will have a specified value removed from it
val - the value which you would like removed from the list
Returns:
new_list - the new list which does not contain the specified value
'''
return [value for value in the_list if value != val]
def sheet_setup(sheet):
'''Creates the headers for each sheet of the excel report, and styles them
Args:
sheet - excel worksheet which will have the header added to it.
'''
sheet['A1'] = 'Bank Statement'
sheet['F1'] = 'General Ledger'
sheet['A2'] = 'Date'
sheet['B2'] = 'Source Num'
sheet['C2'] = 'Comment'
sheet['D2'] = 'Debit'
sheet['E2'] = 'Credit'
sheet['F2'] = 'Date'
sheet['G2'] = 'Source Num'
sheet['H2'] = 'Comment'
sheet['I2'] = 'Debit'
sheet['J2'] = 'Credit'
sheet['A1'].font = Font(size=14, underline="single", bold=True)
sheet['F1'].font = Font(size=14, underline="single", bold=True)
row = sheet['A2':'J2']
row = row[0]
for cell in row:
cell.border = Border(bottom=Side(border_style="thin"))
cell.font = Font(bold=True)
sheet['E2'].border = Border(right=Side(border_style="thin"), bottom=Side(border_style='thin'))
def populate(sheet, bank_or_ledger, list):
'''Populates a given worksheet with the data provided in the list
Args:
sheet - The excel worksheet which is to have data added to it
bank_or_ledger - a string that contains bank_statement or general_ledger. This indicates what side of the worksheet to be populated
list - The list of entries which are to be added to the specified excel worksheet
'''
if bank_or_ledger == 'bank_statement':
cells = ['A', 'B', 'C', 'D', 'E']
else:
cells = ['F', 'G', 'H', 'I', 'J']
entry_order = ['date', 'source_num', 'comment', 'debit', 'credit']
for i in range(3, len(list) + 3):
for j in range(len(cells)):
cell_index = cells[j] + str(i)
sheet[cell_index] = list[i-3][entry_order[j]]
e_cell = 'E' + str(i)
sheet[e_cell].border = Border(right=Side(border_style='thin'))
#solution by velis at https://stackoverflow.com/questions/13197574/openpyxl-adjust-column-width-size
def resize_sheet_columns(sheet):
'''Resizes the columns of the excel report to automatically fit the included data.
Args:
sheet - the excel worksheet that will have its columns adjusted.
'''
dims = {}
for row in sheet.rows:
for cell in row:
if cell.value:
dims[cell.column_letter] = max((dims.get(cell.column_letter, 0), len(str(cell.value))))
for col, value in dims.items():
sheet.column_dimensions[col].width = value + 2
def reconcile():
'''The main function that is called when the reconcile button is clicked. This function handles the
data included in the attached files, then processes it and organizes it into specific lists. Then,
the data is written into an excel file and saved onto the local computer.
'''
if (bank_statement_path == None or general_ledger_path == None):
pass
else:
CSV = processCSV()
excel = processExcel()
ascending_dates(CSV)
ascending_dates(excel)
entry_lists = {}
entry_lists["matching_cheques"] = {}
entry_lists["matching_cheques"]["bank_statements"] = []
entry_lists["matching_cheques"]["general_ledger"] = []
entry_lists["matching_cheques"]["total_credit"] = [0, 0]
entry_lists["matching_cheques"]["total_debit"] = [0, 0]
entry_lists["canada_helps"] = {}
entry_lists["canada_helps"]["bank_statements"] = []
entry_lists["canada_helps"]["general_ledger"] = []
entry_lists["canada_helps"]["total_debit"] = [0, 0]
entry_lists["paypal"] = {}
entry_lists["paypal"]["bank_statements"] = []
entry_lists["paypal"]["general_ledger"] = []
entry_lists["paypal"]["total_credit"] = [0, 0]
entry_lists["paypal"]["total_debit"] = [0, 0]
entry_lists["etransfer"] = {}
entry_lists["etransfer"]["bank_statements"] = []
entry_lists["etransfer"]["general_ledger"] = []
entry_lists["etransfer"]["total_credit"] = [0, 0]
entry_lists["etransfer"]["total_debit"] = [0, 0]
for i in range(len(CSV)):
if 'canada help' in CSV[i]["comment"].lower():
entry_lists["canada_helps"]["bank_statements"].append(CSV[i])
entry_lists["canada_helps"]["total_debit"][0] += float(CSV[i]["debit"])
CSV[i] = 0
elif 'email money tran' in CSV[i]["comment"].lower():
entry_lists["etransfer"]["bank_statements"].append(CSV[i])
entry_lists["etransfer"]["total_debit"][0] += float(CSV[i]["debit"])
entry_lists["etransfer"]["total_credit"][0] += float(CSV[i]["credit"])
CSV[i] = 0
elif 'paypal' in CSV[i]["comment"].lower() or 'pay pal' in CSV[i]["comment"].lower():
entry_lists["paypal"]["bank_statements"].append(CSV[i])
entry_lists["paypal"]["total_debit"][0] += float(CSV[i]["debit"])
CSV[i] = 0
elif CSV[i]["source_num"] != "":
for j in range(len(excel)):
if excel[j] != 0:
if CSV[i]["source_num"] == excel[j]["source_num"]:
entry_lists["matching_cheques"]["bank_statements"].append(CSV[i])
entry_lists["matching_cheques"]["general_ledger"].append(excel[j])
entry_lists["matching_cheques"]["total_debit"][0] += float(CSV[i]["debit"])
entry_lists["matching_cheques"]["total_credit"][0] += float(CSV[i]["credit"])
entry_lists["matching_cheques"]["total_credit"][1] += float(excel[j]["credit"])
entry_lists["matching_cheques"]["total_debit"][1] += float(excel[j]["debit"])
CSV[i] = 0
excel[j] = 0
break
for i in range(len(excel)):
if excel[i] != 0:
if 'canada' in excel[i]["source_num"].lower():
entry_lists["canada_helps"]["general_ledger"].append(excel[i])
entry_lists["canada_helps"]["total_debit"][1] += float(excel[i]["debit"])
excel[i] = 0
elif 'etransfer' in excel[i]["comment"].lower() or 'e transfer' in excel[i]["comment"].lower():
entry_lists["etransfer"]["general_ledger"].append(excel[i])
entry_lists["etransfer"]["total_debit"][1] += float(excel[i]["debit"])
entry_lists["etransfer"]["total_credit"][1] += float(excel[i]["credit"])
excel[i] = 0
elif 'paypal' in excel[i]["comment"].lower() or 'pay pal' in excel[i]["comment"].lower():
entry_lists["paypal"]["general_ledger"].append(excel[i])
entry_lists["paypal"]["total_credit"][1] += float(excel[i]["credit"])
entry_lists["paypal"]["total_debit"][1] += float(excel[i]["debit"])
excel[i] = 0
CSV = remove_values_from_list(CSV, 0)
excel = remove_values_from_list(excel, 0)
work_book = Workbook()
wb_filename = 'bank_rec_' + str(datetime.date.today()) + '.xlsx'
print(wb_filename)
matched_cheques = work_book.active
matched_cheques.title = 'Matched Cheques'
canada_helps = work_book.create_sheet(title="Canada Helps")
paypal = work_book.create_sheet(title="Paypal")
etransfer = work_book.create_sheet(title="Etransfer")
unmatched_entries = work_book.create_sheet(title="Unmatched Sheets")
sheets = []
sheets.append(matched_cheques)
sheets.append(canada_helps)
sheets.append(paypal)
sheets.append(etransfer)
sheets.append(unmatched_entries)
for sheet in sheets:
sheet_setup(sheet)
populate(matched_cheques, 'bank_statement', entry_lists["matching_cheques"]["bank_statements"])
populate(matched_cheques, 'general_ledger', entry_lists["matching_cheques"]["general_ledger"])
populate(canada_helps, 'bank_statement', entry_lists["canada_helps"]["bank_statements"])
populate(canada_helps, 'general_ledger', entry_lists["canada_helps"]["general_ledger"])
populate(paypal, 'bank_statement', entry_lists["paypal"]["bank_statements"])
populate(paypal, 'general_ledger', entry_lists["paypal"]["general_ledger"])
populate(etransfer, 'bank_statement', entry_lists["etransfer"]["bank_statements"])
populate(etransfer, 'general_ledger', entry_lists["etransfer"]["general_ledger"])
populate(unmatched_entries, 'bank_statement', CSV)
populate(unmatched_entries, 'general_ledger', excel)
for sheet in sheets:
resize_sheet_columns(sheet)
work_book.save(filename = wb_filename)
#testing print statements
'''
print("Matched cheques:\n")
for pair in entry_lists["matching_cheques"]["cheques"]:
print(f"bank statement: {pair[0]} \ngeneral ledger: {pair[1]}\n")
print(f"Matched Cheques: \nBank Deposits: credit: ${entry_lists['matching_cheques']['total_credit'][0]}", \
f"debit: ${entry_lists['matching_cheques']['total_debit'][0]}", \
f"\nGeneral Ledger: credit: ${entry_lists['matching_cheques']['total_credit'][1]}", \
f"debit: ${entry_lists['matching_cheques']['total_debit'][1]}\n")
print(f"Canada Helps:\nBank Statement Debits: ${entry_lists['canada_helps']['total_debit'][0]}", \
f"General Ledger Debits: ${entry_lists['canada_helps']['total_debit'][1]}\n")
print(f"Paypal: \nBank Deposits: credit: ${entry_lists['paypal']['total_credit'][0]}", \
f"debit: ${entry_lists['paypal']['total_debit'][0]}", \
f"\nGeneral Ledger: credit: ${entry_lists['paypal']['total_credit'][1]}", \
f"debit: ${entry_lists['paypal']['total_debit'][1]}\n")
print(f"E transfer: \nBank Deposits: credit: ${entry_lists['etransfer']['total_credit'][0]}", \
f"debit: ${entry_lists['etransfer']['total_debit'][0]}", \
f"\nGeneral Ledger: credit: ${entry_lists['etransfer']['total_credit'][1]}", \
f"debit: ${entry_lists['etransfer']['total_debit'][1]}\n")
'''
#found at https://stackoverflow.com/questions/8270092/remove-all-whitespace-in-a-string
def removeExtraSpaces(string):
return(" ".join(string.split()))
def ascending_dates(list):
first_date = list[0]["date"]
last_date = list[-1]["date"]
first_day = first_date.split("-")[2]
last_day = last_date.split("-")[2]
if int(first_day) > int(last_day):
list.reverse()
def standardize_date_string(string):
split_string = string.split('-')
new_string = ''
year = split_string[2]
new_string += '20' + year + '-'
orig_month = split_string[1]
if len(orig_month) != 3:
orig_month = orig_month[:3]
months = {"Jan":"01","Feb":"02","Mar":"03","Apr":"04","May":"05","Jun":"06","Jul":"07","Aug":"08","Sep":"09","Oct":"10","Nov":"11","Dec":"12"}
new_month = months[orig_month]
new_string += new_month + '-'
new_string += split_string[0]
return new_string
def processCSV():
formattedCSV = []
with open(bank_statement_path, newline='') as csvfile:
csv_reader = reader(csvfile)
for row in csv_reader:
entry = {}
entry["date"] = standardize_date_string(row[1])
entry["comment"] = removeExtraSpaces(row[2])
entry["source_num"] = str(row[3]).strip()
entry["credit"] = str(row[4]).strip()
if entry["credit"] == '':
entry["credit"] = '0'
entry["debit"] = str(row[5]).strip()
if entry["debit"] == '':
entry["debit"] = '0'
formattedCSV.append(entry)
print(formattedCSV[0]["date"])
print(type(formattedCSV[0]["date"]))
return formattedCSV
def processExcel():
workbook = load_workbook(filename=general_ledger_path, read_only=True)
sheet = workbook['Sheet1']
rows = list(sheet.rows)
rows = rows[5:-2]
formattedExcel = []
for row in rows:
data = []
for cell in row:
data.append(cell.value)
entry = {}
entry["date"] = str(data[2])[0:10]
entry["comment"] = str(data[3]).strip()
entry["source_num"] = str(data[4]).strip()
entry["debit"] = str(data[6]).strip()
entry["credit"] = str(data[7]).strip()
formattedExcel.append(entry)
return formattedExcel
def select_bank_file():
global bank_statement_path
bank_statement_path = filedialog.askopenfilename(initialdir = "/",title = "Select Bank File",filetypes = ( ("csv files","*.csv"), ))
bank_statement_name.set(bank_statement_path.split('/')[-1])
def select_sage_file():
global general_ledger_path
general_ledger_path = filedialog.askopenfilename(initialdir = "/",title = "Select Sage File",filetypes = ( ("xlsx files","*.xlsx"), ))
general_ledger_name.set(general_ledger_path.split('/')[-1])
def main():
root = Tk()
root.title("Bank Reconciliation")
mainframe = ttk.Frame(root, padding="3 3 12 12")
mainframe.grid(column=0, row=0, sticky=(N, W, E, S))
root.columnconfigure(0, weight=1)
root.rowconfigure(0, weight=1)
bank_statement_name = StringVar()
bank_statement_name.set('None')
general_ledger_name = StringVar()
general_ledger_name.set('None')
ttk.Label(mainframe, text='Select Bank Statement:').grid(column=0, row=0, sticky=W, padx=(50, 15), pady=5)
ttk.Button(mainframe, text='Choose .csv File', command=select_bank_file).grid(column=1, row=0, padx=(15,50), pady=5)
ttk.Label(mainframe, text='Selected File:').grid(column=0, row=1, padx=(50, 15), pady=5, sticky=W)
ttk.Label(mainframe, textvariable=bank_statement_name).grid(column=1, row=1, padx = 15, pady=5, sticky=W)
ttk.Label(mainframe, text='Select General Ledger:').grid(column=2, row=0, sticky=W, padx=(50, 15), pady=5)
ttk.Button(mainframe, text='Choose .xlsx File', command=select_sage_file).grid(column=3, row=0, padx=(15, 50), pady=5)
ttk.Label(mainframe, text='Selected File:').grid(column=2, row=1, padx=(50,15), pady=5, sticky=W)
ttk.Label(mainframe, textvariable=general_ledger_name).grid(column=3, row=1, padx=(15,50), pady=5, sticky=W)
ttk.Button(mainframe, text='Reconcile', command=reconcile).grid(column=4, row=0, rowspan=2, padx=(50,15), pady=15)
root.mainloop()
main() |
liualexiang/liualexiang.github.io | _posts/modify_front_matter.py | import os,re,codecs
def get_docs_list():
docs_list=[]
for path, subdirs, files in os.walk("."):
for name in files:
if len(name.split(".")) == 2 and name.split(".")[1] == "md":
docs_list.append(os.path.join(path, name))
return docs_list
def get_doc_title(file):
with open(file,"r+", encoding="utf-8") as f:
content = f.readlines()
for line in content:
if re.fullmatch("#+ .*\n", line):
titleRegex = re.compile(r"#+ (.*)\n")
title = titleRegex.search(line).group(1)
return title
def remove_front_matter(file):
with codecs.open(file, "r+", encoding="utf-8") as f:
content = f.read()
#print(content)
# 使用正则匹配由---所包含的区域,然后使用re.sub进行替换,将搜索到的内容都替换为空。中文字符在UTF-8的编码下,正则为 \u4e00-\u9fa5
reg = re.compile("---\r\n[\sa-zA-Z:\u4e00-\u9fa50-9\-\+]*---")
subcontent = re.sub(reg, "", content)
print(subcontent)
f.seek(0)
f.write(subcontent)
def add_front_matter(file):
with open(file,"r+", encoding="utf-8") as f:
content = f.read()
title = get_doc_title(file)
f.seek(0)
f.write("---\nauthor: liualexiang\ntitle: {title}\nlayout: article\ndate: 2021-01-01 00:00:00 +0800\n---\n".format(title= title) + content)
def do_add_front_matter(file):
with open(file,"r+", encoding="utf-8") as f:
first_line = f.readlines()[0]
if re.match("-+", first_line):
pass
else:
add_front_matter(file)
def traversal_remove_front_matter():
docs_list = get_docs_list()
for doc in docs_list:
remove_front_matter(doc)
def traversal_add_front_matter():
docs_list = get_docs_list()
for doc in docs_list:
print(doc)
do_add_front_matter(doc)
# if __name__ == "__main__":
# traversal_remove_front_matter()
if __name__ == "__main__":
traversal_add_front_matter() |
liualexiang/liualexiang.github.io | _posts/modify_md_name_with_time.py | import os
fileStartDate = "2021-01-01"
def update_md_file_name():
docs_list=[]
for path, subdirs, files in os.walk("."):
for name in files:
if len(name.split(".")) == 2 and name.split(".")[1] == "md":
newName = fileStartDate + "-" + name
print("old name is{name}, new name is{newName}".format(name=name,newName=newName))
os.rename(name, newName)
#docs_list.append(os.path.join(path, name))
update_md_file_name() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.