text stringlengths 8 6.05M |
|---|
from flask import Flask, render_template, request, redirect
app = Flask(__name__)
@app.route('/')
def index():
return render_template("index.html")
@app.route('/checkout', methods=['POST'])
def checkout():
fname = request.form["first_name"]
lname = request.form["last_name"]
sid = request.form ["student_id"]
apple = request.form["apple"]
strawberry = request.form["strawberry"]
raspberry = request.form["raspberry"]
total = [int(apple)+int(strawberry)+int(raspberry)]
print(request.form)
print (f"Charging,{fname} {lname} for {total} fruits")
return render_template("checkout.html", fname=fname,lname=lname, sid=sid, apple=int(apple),strawberry=int(strawberry),raspberry=int(raspberry))
@app.route('/fruits')
def fruits():
return render_template("fruits.html")
if __name__=="__main__":
app.run(debug=True) |
class Person(object):
def __init__(self, num):
self.num = num
# 运算符重载,对象相加
def __add__(self, other):
return Person(self.num+other.num)
def __str__(self):
return "num is "+str(self.num)
per1 = Person(1)
per2 = Person(2)
print(per1 + per2) # 等价于per1.__add__(per2)
print(per1.__add__(per2))
|
import numpy as np
import itertools
__version__ = "0.2.0"
|
def my_method(arg1, arg2):
return arg1 + arg2
my_method(5,6)
def addition_simplified(*args):
return sum(args)
print(addition_simplified(3,5,7,12,14,55))
##
def what_are_kwargs(*args, **kwargs):
print(args)
print(kwargs)
what_are_kwargs(12,34,56, word="test") |
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 4 15:44:28 2017
@author: zx621293
"""
def myFA (X, label_refine, label, n_components, max_iter=2):
n = X.shape[0]
if len(label_refine) != n:
label_refine = [0]*n
label = ['no ground truth']
print('No ground truth provided in this dataset')
estimator = decomposition.FactorAnalysis(n_components=n_components, max_iter=2)
t0=time()
X_fa = estimator.fit_transform(X)
t1=time()
plt.figure(figsize=(30,10))
plt.suptitle("Factor Analysis on dataset with accepted %i experiments, each with %i covariates. \nClasses: %s "
% (X.shape[0],X.shape[1],label), fontsize=24)
k = len(label)
for i in [1,2]:
plt.subplot(1, 2, i)
plt.title("Independent components - FastICA' (%.2g sec)" %(t1-t0))
for j,lab in zip(np.linspace(0,k-1,k),label):
plt.scatter(X_fa[label_refine==j, np.mod(i,2)], X_fa[label_refine==j, np.mod(i,2)+1]#
,cmap=plt.cm.Spectral,label=lab)
plt.xlabel("%i principal component"%(np.mod(i,2)+1),fontsize=14)
plt.ylabel("%i principal component"%(np.mod(i,2)+2),fontsize=14)
plt.legend(loc=1)
plt.axis()
plt.show()
components = estimator.components_
return X_fa,components; |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
##################################################
# GNU Radio Python Flow Graph
# Title: Meteor M2 Receiver 4 USRP (QT)
# Author: Antoine CAILLOT
# Description: Demodulator of Meteor M2 satellites
# GNU Radio version: 3.7.13.5
##################################################
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print "Warning: failed to XInitThreads()"
from PyQt4 import Qt
from PyQt4.QtCore import QObject, pyqtSlot
from datetime import datetime
from gnuradio import analog
from gnuradio import blocks
from gnuradio import digital
from gnuradio import eng_notation
from gnuradio import filter
from gnuradio import gr
from gnuradio import qtgui
from gnuradio import uhd
from gnuradio.eng_option import eng_option
from gnuradio.filter import firdes
from gnuradio.qtgui import Range, RangeWidget
from optparse import OptionParser
import os.path
import sip
import sys
import time
from gnuradio import qtgui
class top_block(gr.top_block, Qt.QWidget):
def __init__(self):
gr.top_block.__init__(self, "Meteor M2 Receiver 4 USRP (QT) ")
Qt.QWidget.__init__(self)
self.setWindowTitle("Meteor M2 Receiver 4 USRP (QT) ")
qtgui.util.check_set_qss()
try:
self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc'))
except:
pass
self.top_scroll_layout = Qt.QVBoxLayout()
self.setLayout(self.top_scroll_layout)
self.top_scroll = Qt.QScrollArea()
self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame)
self.top_scroll_layout.addWidget(self.top_scroll)
self.top_scroll.setWidgetResizable(True)
self.top_widget = Qt.QWidget()
self.top_scroll.setWidget(self.top_widget)
self.top_layout = Qt.QVBoxLayout(self.top_widget)
self.top_grid_layout = Qt.QGridLayout()
self.top_layout.addLayout(self.top_grid_layout)
self.settings = Qt.QSettings("GNU Radio", "top_block")
self.restoreGeometry(self.settings.value("geometry").toByteArray())
##################################################
# Variables
##################################################
self.symbol_rate = symbol_rate = 72000
self.signal_samp_rate = signal_samp_rate = 140e3
self.path_to_save_dir = path_to_save_dir = os.path.expanduser("~/Desktop")
self.decim_factor = decim_factor = 4
self.usrp_samp_rate = usrp_samp_rate = signal_samp_rate*decim_factor
self.samp_per_sec = samp_per_sec = (signal_samp_rate * 1.0) / (symbol_rate * 1.0)
self.rx_freq_old = rx_freq_old = 137.1e6
self.rx_freq = rx_freq = 137.1e6
self.rf_gain = rf_gain = 20
self.record = record = False
self.pll_alpha = pll_alpha = 0.015
self.file_path = file_path = path_to_save_dir + "/LRPT_" + datetime.now().strftime("%d%m%Y_%H%M")+".s"
self.clock_alpha = clock_alpha = 0.001
##################################################
# Blocks
##################################################
self.qtgui_tab_widget_0 = Qt.QTabWidget()
self.qtgui_tab_widget_0_widget_0 = Qt.QWidget()
self.qtgui_tab_widget_0_layout_0 = Qt.QBoxLayout(Qt.QBoxLayout.TopToBottom, self.qtgui_tab_widget_0_widget_0)
self.qtgui_tab_widget_0_grid_layout_0 = Qt.QGridLayout()
self.qtgui_tab_widget_0_layout_0.addLayout(self.qtgui_tab_widget_0_grid_layout_0)
self.qtgui_tab_widget_0.addTab(self.qtgui_tab_widget_0_widget_0, 'Reception')
self.qtgui_tab_widget_0_widget_1 = Qt.QWidget()
self.qtgui_tab_widget_0_layout_1 = Qt.QBoxLayout(Qt.QBoxLayout.TopToBottom, self.qtgui_tab_widget_0_widget_1)
self.qtgui_tab_widget_0_grid_layout_1 = Qt.QGridLayout()
self.qtgui_tab_widget_0_layout_1.addLayout(self.qtgui_tab_widget_0_grid_layout_1)
self.qtgui_tab_widget_0.addTab(self.qtgui_tab_widget_0_widget_1, 'Demodulation')
self.qtgui_tab_widget_0_widget_2 = Qt.QWidget()
self.qtgui_tab_widget_0_layout_2 = Qt.QBoxLayout(Qt.QBoxLayout.TopToBottom, self.qtgui_tab_widget_0_widget_2)
self.qtgui_tab_widget_0_grid_layout_2 = Qt.QGridLayout()
self.qtgui_tab_widget_0_layout_2.addLayout(self.qtgui_tab_widget_0_grid_layout_2)
self.qtgui_tab_widget_0.addTab(self.qtgui_tab_widget_0_widget_2, 'Decoding')
self.top_grid_layout.addWidget(self.qtgui_tab_widget_0)
self._rx_freq_options = (137.1e6, 137.9e6, 101.8e6, )
self._rx_freq_labels = ('137.1 MHz', '137.9 MHz', 'Test FM', )
self._rx_freq_tool_bar = Qt.QToolBar(self)
self._rx_freq_tool_bar.addWidget(Qt.QLabel('RX Frequency'+": "))
self._rx_freq_combo_box = Qt.QComboBox()
self._rx_freq_tool_bar.addWidget(self._rx_freq_combo_box)
for label in self._rx_freq_labels: self._rx_freq_combo_box.addItem(label)
self._rx_freq_callback = lambda i: Qt.QMetaObject.invokeMethod(self._rx_freq_combo_box, "setCurrentIndex", Qt.Q_ARG("int", self._rx_freq_options.index(i)))
self._rx_freq_callback(self.rx_freq)
self._rx_freq_combo_box.currentIndexChanged.connect(
lambda i: self.set_rx_freq(self._rx_freq_options[i]))
self.qtgui_tab_widget_0_grid_layout_0.addWidget(self._rx_freq_tool_bar, 0, 2, 1, 1)
for r in range(0, 1):
self.qtgui_tab_widget_0_grid_layout_0.setRowStretch(r, 1)
for c in range(2, 3):
self.qtgui_tab_widget_0_grid_layout_0.setColumnStretch(c, 1)
self._rf_gain_range = Range(0, 65, 5, 20, 200)
self._rf_gain_win = RangeWidget(self._rf_gain_range, self.set_rf_gain, 'RF input gain', "counter_slider", float)
self.qtgui_tab_widget_0_grid_layout_0.addWidget(self._rf_gain_win, 0, 0, 1, 2)
for r in range(0, 1):
self.qtgui_tab_widget_0_grid_layout_0.setRowStretch(r, 1)
for c in range(0, 2):
self.qtgui_tab_widget_0_grid_layout_0.setColumnStretch(c, 1)
_record_check_box = Qt.QCheckBox('Raw I/Q Record')
self._record_choices = {True: True, False: False}
self._record_choices_inv = dict((v,k) for k,v in self._record_choices.iteritems())
self._record_callback = lambda i: Qt.QMetaObject.invokeMethod(_record_check_box, "setChecked", Qt.Q_ARG("bool", self._record_choices_inv[i]))
self._record_callback(self.record)
_record_check_box.stateChanged.connect(lambda i: self.set_record(self._record_choices[bool(i)]))
self.qtgui_tab_widget_0_grid_layout_0.addWidget(_record_check_box, 0, 3, 1, 1)
for r in range(0, 1):
self.qtgui_tab_widget_0_grid_layout_0.setRowStretch(r, 1)
for c in range(3, 4):
self.qtgui_tab_widget_0_grid_layout_0.setColumnStretch(c, 1)
self._pll_alpha_range = Range(0.001, 0.100, 0.001, 0.015, 200)
self._pll_alpha_win = RangeWidget(self._pll_alpha_range, self.set_pll_alpha, 'PLL alpha', "counter_slider", float)
self.qtgui_tab_widget_0_grid_layout_2.addWidget(self._pll_alpha_win, 4, 0, 1, 1)
for r in range(4, 5):
self.qtgui_tab_widget_0_grid_layout_2.setRowStretch(r, 1)
for c in range(0, 1):
self.qtgui_tab_widget_0_grid_layout_2.setColumnStretch(c, 1)
self._clock_alpha_range = Range(0.001, 0.01, 0.001, 0.001, 200)
self._clock_alpha_win = RangeWidget(self._clock_alpha_range, self.set_clock_alpha, 'Clock alpha', "counter_slider", float)
self.qtgui_tab_widget_0_grid_layout_2.addWidget(self._clock_alpha_win, 3, 0, 1, 1)
for r in range(3, 4):
self.qtgui_tab_widget_0_grid_layout_2.setRowStretch(r, 1)
for c in range(0, 1):
self.qtgui_tab_widget_0_grid_layout_2.setColumnStretch(c, 1)
self.root_raised_cosine_filter_0 = filter.fir_filter_ccf(1, firdes.root_raised_cosine(
1, signal_samp_rate, symbol_rate, 0.6, 361))
self.rational_resampler_xxx_0 = filter.rational_resampler_ccc(
interpolation=1,
decimation=decim_factor,
taps=None,
fractional_bw=None,
)
self.qtgui_waterfall_sink_x_0 = qtgui.waterfall_sink_c(
1024, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
rx_freq, #fc
signal_samp_rate, #bw
'Shifted signal', #name
1 #number of inputs
)
self.qtgui_waterfall_sink_x_0.set_update_time(0.10)
self.qtgui_waterfall_sink_x_0.enable_grid(True)
self.qtgui_waterfall_sink_x_0.enable_axis_labels(True)
if not True:
self.qtgui_waterfall_sink_x_0.disable_legend()
if "complex" == "float" or "complex" == "msg_float":
self.qtgui_waterfall_sink_x_0.set_plot_pos_half(not True)
labels = ['', '', '', '', '',
'', '', '', '', '']
colors = [6, 0, 0, 0, 0,
0, 0, 0, 0, 0]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_waterfall_sink_x_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_waterfall_sink_x_0.set_line_label(i, labels[i])
self.qtgui_waterfall_sink_x_0.set_color_map(i, colors[i])
self.qtgui_waterfall_sink_x_0.set_line_alpha(i, alphas[i])
self.qtgui_waterfall_sink_x_0.set_intensity_range(-100, -30)
self._qtgui_waterfall_sink_x_0_win = sip.wrapinstance(self.qtgui_waterfall_sink_x_0.pyqwidget(), Qt.QWidget)
self.qtgui_tab_widget_0_grid_layout_0.addWidget(self._qtgui_waterfall_sink_x_0_win, 2, 0, 1, 4)
for r in range(2, 3):
self.qtgui_tab_widget_0_grid_layout_0.setRowStretch(r, 1)
for c in range(0, 4):
self.qtgui_tab_widget_0_grid_layout_0.setColumnStretch(c, 1)
self.qtgui_freq_sink_x_1 = qtgui.freq_sink_c(
1024, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
rx_freq+signal_samp_rate, #fc
usrp_samp_rate, #bw
'Centered on RAW signal', #name
3 #number of inputs
)
self.qtgui_freq_sink_x_1.set_update_time(0.10)
self.qtgui_freq_sink_x_1.set_y_axis(-140, 0)
self.qtgui_freq_sink_x_1.set_y_label('Relative Gain', 'dB')
self.qtgui_freq_sink_x_1.set_trigger_mode(qtgui.TRIG_MODE_FREE, 0.0, 0, "")
self.qtgui_freq_sink_x_1.enable_autoscale(False)
self.qtgui_freq_sink_x_1.enable_grid(True)
self.qtgui_freq_sink_x_1.set_fft_average(1.0)
self.qtgui_freq_sink_x_1.enable_axis_labels(True)
self.qtgui_freq_sink_x_1.enable_control_panel(True)
if not True:
self.qtgui_freq_sink_x_1.disable_legend()
if "complex" == "float" or "complex" == "msg_float":
self.qtgui_freq_sink_x_1.set_plot_pos_half(not True)
labels = ['Filtered', 'Shifted', 'RAW', '', '',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "black", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "dark blue"]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(3):
if len(labels[i]) == 0:
self.qtgui_freq_sink_x_1.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_freq_sink_x_1.set_line_label(i, labels[i])
self.qtgui_freq_sink_x_1.set_line_width(i, widths[i])
self.qtgui_freq_sink_x_1.set_line_color(i, colors[i])
self.qtgui_freq_sink_x_1.set_line_alpha(i, alphas[i])
self._qtgui_freq_sink_x_1_win = sip.wrapinstance(self.qtgui_freq_sink_x_1.pyqwidget(), Qt.QWidget)
self.qtgui_tab_widget_0_grid_layout_0.addWidget(self._qtgui_freq_sink_x_1_win, 1, 0, 1, 4)
for r in range(1, 2):
self.qtgui_tab_widget_0_grid_layout_0.setRowStretch(r, 1)
for c in range(0, 4):
self.qtgui_tab_widget_0_grid_layout_0.setColumnStretch(c, 1)
self.qtgui_freq_sink_x_0 = qtgui.freq_sink_c(
1024, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
rx_freq, #fc
signal_samp_rate, #bw
"", #name
1 #number of inputs
)
self.qtgui_freq_sink_x_0.set_update_time(0.10)
self.qtgui_freq_sink_x_0.set_y_axis(-140, 10)
self.qtgui_freq_sink_x_0.set_y_label('Relative Gain', 'dB')
self.qtgui_freq_sink_x_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, 0.0, 0, "")
self.qtgui_freq_sink_x_0.enable_autoscale(False)
self.qtgui_freq_sink_x_0.enable_grid(False)
self.qtgui_freq_sink_x_0.set_fft_average(1.0)
self.qtgui_freq_sink_x_0.enable_axis_labels(True)
self.qtgui_freq_sink_x_0.enable_control_panel(False)
if not True:
self.qtgui_freq_sink_x_0.disable_legend()
if "complex" == "float" or "complex" == "msg_float":
self.qtgui_freq_sink_x_0.set_plot_pos_half(not True)
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "dark blue"]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_freq_sink_x_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_freq_sink_x_0.set_line_label(i, labels[i])
self.qtgui_freq_sink_x_0.set_line_width(i, widths[i])
self.qtgui_freq_sink_x_0.set_line_color(i, colors[i])
self.qtgui_freq_sink_x_0.set_line_alpha(i, alphas[i])
self._qtgui_freq_sink_x_0_win = sip.wrapinstance(self.qtgui_freq_sink_x_0.pyqwidget(), Qt.QWidget)
self.qtgui_tab_widget_0_grid_layout_1.addWidget(self._qtgui_freq_sink_x_0_win, 0, 0, 1, 1)
for r in range(0, 1):
self.qtgui_tab_widget_0_grid_layout_1.setRowStretch(r, 1)
for c in range(0, 1):
self.qtgui_tab_widget_0_grid_layout_1.setColumnStretch(c, 1)
self.qtgui_const_sink_x_0 = qtgui.const_sink_c(
1024, #size
"", #name
1 #number of inputs
)
self.qtgui_const_sink_x_0.set_update_time(1.0 / symbol_rate)
self.qtgui_const_sink_x_0.set_y_axis(-1, 1)
self.qtgui_const_sink_x_0.set_x_axis(-1, 1)
self.qtgui_const_sink_x_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, qtgui.TRIG_SLOPE_POS, 0.0, 0, "")
self.qtgui_const_sink_x_0.enable_autoscale(False)
self.qtgui_const_sink_x_0.enable_grid(False)
self.qtgui_const_sink_x_0.enable_axis_labels(True)
if not True:
self.qtgui_const_sink_x_0.disable_legend()
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "red", "red", "red",
"red", "red", "red", "red", "red"]
styles = [0, 0, 0, 0, 0,
0, 0, 0, 0, 0]
markers = [0, 0, 0, 0, 0,
0, 0, 0, 0, 0]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_const_sink_x_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_const_sink_x_0.set_line_label(i, labels[i])
self.qtgui_const_sink_x_0.set_line_width(i, widths[i])
self.qtgui_const_sink_x_0.set_line_color(i, colors[i])
self.qtgui_const_sink_x_0.set_line_style(i, styles[i])
self.qtgui_const_sink_x_0.set_line_marker(i, markers[i])
self.qtgui_const_sink_x_0.set_line_alpha(i, alphas[i])
self._qtgui_const_sink_x_0_win = sip.wrapinstance(self.qtgui_const_sink_x_0.pyqwidget(), Qt.QWidget)
self.qtgui_tab_widget_0_grid_layout_2.addWidget(self._qtgui_const_sink_x_0_win, 0, 0, 3, 1)
for r in range(0, 3):
self.qtgui_tab_widget_0_grid_layout_2.setRowStretch(r, 1)
for c in range(0, 1):
self.qtgui_tab_widget_0_grid_layout_2.setColumnStretch(c, 1)
self.low_pass_filter_0 = filter.fir_filter_ccf(1, firdes.low_pass(
2, usrp_samp_rate, signal_samp_rate, 25e3, firdes.WIN_HAMMING, 6.76))
self.digital_costas_loop_cc_0 = digital.costas_loop_cc(pll_alpha, 4, False)
self.digital_constellation_soft_decoder_cf_0 = digital.constellation_soft_decoder_cf(digital.constellation_calcdist(([-1-1j, -1+1j, 1+1j, 1-1j]), ([0, 1, 3, 2]), 4, 1).base())
self.digital_clock_recovery_mm_xx_0 = digital.clock_recovery_mm_cc(samp_per_sec, clock_alpha**2/4.0, 0.5, clock_alpha, 0.005)
self.blocks_wavfile_sink_1 = blocks.wavfile_sink(file_path+"_rawIQ.wav", 2, int(usrp_samp_rate/decim_factor), 8)
self.blocks_mute_xx_0 = blocks.mute_cc(bool(record))
self.blocks_multiply_xx_0 = blocks.multiply_vcc(1)
self.blocks_float_to_char_0 = blocks.float_to_char(1, 127)
self.blocks_file_sink_0 = blocks.file_sink(gr.sizeof_char*1, file_path, False)
self.blocks_file_sink_0.set_unbuffered(False)
self.blocks_complex_to_float_0 = blocks.complex_to_float(1)
self.analog_sig_source_x_0 = analog.sig_source_c(usrp_samp_rate, analog.GR_COS_WAVE, signal_samp_rate, 1, 0)
self.analog_rail_ff_0 = analog.rail_ff(-1, 1)
self.analog_agc_xx_0 = analog.agc_cc(100e-3, 500e-3, 1.0)
self.analog_agc_xx_0.set_max_gain(4e3)
self.USRP = uhd.usrp_source(
",".join(("", "")),
uhd.stream_args(
cpu_format="fc32",
args='peak=0.003906',
channels=range(1),
),
)
self.USRP.set_samp_rate(usrp_samp_rate)
self.USRP.set_center_freq(rx_freq+signal_samp_rate, 0)
self.USRP.set_gain(rf_gain, 0)
self.USRP.set_antenna('TX/RX', 0)
self.USRP.set_bandwidth(usrp_samp_rate, 0)
self.USRP.set_auto_dc_offset(True, 0)
self.USRP.set_auto_iq_balance(True, 0)
##################################################
# Connections
##################################################
self.connect((self.USRP, 0), (self.blocks_multiply_xx_0, 0))
self.connect((self.USRP, 0), (self.blocks_mute_xx_0, 0))
self.connect((self.USRP, 0), (self.qtgui_freq_sink_x_1, 2))
self.connect((self.analog_agc_xx_0, 0), (self.root_raised_cosine_filter_0, 0))
self.connect((self.analog_rail_ff_0, 0), (self.blocks_float_to_char_0, 0))
self.connect((self.analog_sig_source_x_0, 0), (self.blocks_multiply_xx_0, 1))
self.connect((self.blocks_complex_to_float_0, 1), (self.blocks_wavfile_sink_1, 1))
self.connect((self.blocks_complex_to_float_0, 0), (self.blocks_wavfile_sink_1, 0))
self.connect((self.blocks_float_to_char_0, 0), (self.blocks_file_sink_0, 0))
self.connect((self.blocks_multiply_xx_0, 0), (self.low_pass_filter_0, 0))
self.connect((self.blocks_multiply_xx_0, 0), (self.qtgui_freq_sink_x_1, 1))
self.connect((self.blocks_multiply_xx_0, 0), (self.qtgui_waterfall_sink_x_0, 0))
self.connect((self.blocks_mute_xx_0, 0), (self.blocks_complex_to_float_0, 0))
self.connect((self.digital_clock_recovery_mm_xx_0, 0), (self.digital_constellation_soft_decoder_cf_0, 0))
self.connect((self.digital_clock_recovery_mm_xx_0, 0), (self.qtgui_const_sink_x_0, 0))
self.connect((self.digital_constellation_soft_decoder_cf_0, 0), (self.analog_rail_ff_0, 0))
self.connect((self.digital_costas_loop_cc_0, 0), (self.digital_clock_recovery_mm_xx_0, 0))
self.connect((self.low_pass_filter_0, 0), (self.qtgui_freq_sink_x_1, 0))
self.connect((self.low_pass_filter_0, 0), (self.rational_resampler_xxx_0, 0))
self.connect((self.rational_resampler_xxx_0, 0), (self.analog_agc_xx_0, 0))
self.connect((self.rational_resampler_xxx_0, 0), (self.qtgui_freq_sink_x_0, 0))
self.connect((self.root_raised_cosine_filter_0, 0), (self.digital_costas_loop_cc_0, 0))
def closeEvent(self, event):
self.settings = Qt.QSettings("GNU Radio", "top_block")
self.settings.setValue("geometry", self.saveGeometry())
event.accept()
def get_symbol_rate(self):
return self.symbol_rate
def set_symbol_rate(self, symbol_rate):
self.symbol_rate = symbol_rate
self.set_samp_per_sec((self.signal_samp_rate * 1.0) / (self.symbol_rate * 1.0))
self.root_raised_cosine_filter_0.set_taps(firdes.root_raised_cosine(1, self.signal_samp_rate, self.symbol_rate, 0.6, 361))
self.qtgui_const_sink_x_0.set_update_time(1.0 / self.symbol_rate)
def get_signal_samp_rate(self):
return self.signal_samp_rate
def set_signal_samp_rate(self, signal_samp_rate):
self.signal_samp_rate = signal_samp_rate
self.set_usrp_samp_rate(self.signal_samp_rate*self.decim_factor)
self.set_samp_per_sec((self.signal_samp_rate * 1.0) / (self.symbol_rate * 1.0))
self.root_raised_cosine_filter_0.set_taps(firdes.root_raised_cosine(1, self.signal_samp_rate, self.symbol_rate, 0.6, 361))
self.qtgui_waterfall_sink_x_0.set_frequency_range(self.rx_freq, self.signal_samp_rate)
self.qtgui_freq_sink_x_1.set_frequency_range(self.rx_freq+self.signal_samp_rate, self.usrp_samp_rate)
self.qtgui_freq_sink_x_0.set_frequency_range(self.rx_freq, self.signal_samp_rate)
self.low_pass_filter_0.set_taps(firdes.low_pass(2, self.usrp_samp_rate, self.signal_samp_rate, 25e3, firdes.WIN_HAMMING, 6.76))
self.analog_sig_source_x_0.set_frequency(self.signal_samp_rate)
self.USRP.set_center_freq(self.rx_freq+self.signal_samp_rate, 0)
def get_path_to_save_dir(self):
return self.path_to_save_dir
def set_path_to_save_dir(self, path_to_save_dir):
self.path_to_save_dir = path_to_save_dir
self.set_file_path(self.path_to_save_dir + "/LRPT_" + datetime.now().strftime("%d%m%Y_%H%M")+".s")
def get_decim_factor(self):
return self.decim_factor
def set_decim_factor(self, decim_factor):
self.decim_factor = decim_factor
self.set_usrp_samp_rate(self.signal_samp_rate*self.decim_factor)
def get_usrp_samp_rate(self):
return self.usrp_samp_rate
def set_usrp_samp_rate(self, usrp_samp_rate):
self.usrp_samp_rate = usrp_samp_rate
self.qtgui_freq_sink_x_1.set_frequency_range(self.rx_freq+self.signal_samp_rate, self.usrp_samp_rate)
self.low_pass_filter_0.set_taps(firdes.low_pass(2, self.usrp_samp_rate, self.signal_samp_rate, 25e3, firdes.WIN_HAMMING, 6.76))
self.analog_sig_source_x_0.set_sampling_freq(self.usrp_samp_rate)
self.USRP.set_samp_rate(self.usrp_samp_rate)
self.USRP.set_bandwidth(self.usrp_samp_rate, 0)
def get_samp_per_sec(self):
return self.samp_per_sec
def set_samp_per_sec(self, samp_per_sec):
self.samp_per_sec = samp_per_sec
self.digital_clock_recovery_mm_xx_0.set_omega(self.samp_per_sec)
def get_rx_freq_old(self):
return self.rx_freq_old
def set_rx_freq_old(self, rx_freq_old):
self.rx_freq_old = rx_freq_old
def get_rx_freq(self):
return self.rx_freq
def set_rx_freq(self, rx_freq):
self.rx_freq = rx_freq
self._rx_freq_callback(self.rx_freq)
self.qtgui_waterfall_sink_x_0.set_frequency_range(self.rx_freq, self.signal_samp_rate)
self.qtgui_freq_sink_x_1.set_frequency_range(self.rx_freq+self.signal_samp_rate, self.usrp_samp_rate)
self.qtgui_freq_sink_x_0.set_frequency_range(self.rx_freq, self.signal_samp_rate)
self.USRP.set_center_freq(self.rx_freq+self.signal_samp_rate, 0)
def get_rf_gain(self):
return self.rf_gain
def set_rf_gain(self, rf_gain):
self.rf_gain = rf_gain
self.USRP.set_gain(self.rf_gain, 0)
def get_record(self):
return self.record
def set_record(self, record):
self.record = record
self._record_callback(self.record)
self.blocks_mute_xx_0.set_mute(bool(self.record))
def get_pll_alpha(self):
return self.pll_alpha
def set_pll_alpha(self, pll_alpha):
self.pll_alpha = pll_alpha
self.digital_costas_loop_cc_0.set_loop_bandwidth(self.pll_alpha)
def get_file_path(self):
return self.file_path
def set_file_path(self, file_path):
self.file_path = file_path
self.blocks_wavfile_sink_1.open(self.file_path+"_rawIQ.wav")
self.blocks_file_sink_0.open(self.file_path)
def get_clock_alpha(self):
return self.clock_alpha
def set_clock_alpha(self, clock_alpha):
self.clock_alpha = clock_alpha
self.digital_clock_recovery_mm_xx_0.set_gain_omega(self.clock_alpha**2/4.0)
self.digital_clock_recovery_mm_xx_0.set_gain_mu(self.clock_alpha)
def main(top_block_cls=top_block, options=None):
from distutils.version import StrictVersion
if StrictVersion(Qt.qVersion()) >= StrictVersion("4.5.0"):
style = gr.prefs().get_string('qtgui', 'style', 'raster')
Qt.QApplication.setGraphicsSystem(style)
qapp = Qt.QApplication(sys.argv)
tb = top_block_cls()
tb.start()
tb.show()
def quitting():
tb.stop()
tb.wait()
qapp.connect(qapp, Qt.SIGNAL("aboutToQuit()"), quitting)
qapp.exec_()
if __name__ == '__main__':
main()
|
from airflow import DAG
from airflow.operators.python import PythonOperator, BranchPythonOperator
from airflow.operators.bash import BashOperator
import boto3
from dbfread import DBF
import pandas as pd
import os
from io import BytesIO, StringIO
from datetime import datetime, timedelta
# Configuring Spaces Client
session = boto3.session.Session()
client = session.client('s3',
region_name='nyc3', # Replace with correct region
endpoint_url='https://nyc3.digitaloceanspaces.com', # Replace with correct endpoint
aws_access_key_id=os.getenv('SPACES_KEY'), # Replace with access key
aws_secret_access_key=os.getenv('SPACES_SECRET')) # Replace with secret key
s3 = session.resource('s3',
region_name='nyc3', # Replace with correct region
endpoint_url='https://nyc3.digitaloceanspaces.com', # Replace with correct endpoint
aws_access_key_id=os.getenv('SPACES_KEY'), # Replace with access key
aws_secret_access_key=os.getenv('SPACES_SECRET')) # Replace with secret key
def_args = {
'owner':'airflow',
'start_date': datetime(2021, 1, 1),
'depends_on_past': False,
'email': ['ivanedric@gmail.com'], # Change to preferred email
'email_on_failure': True,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=20)
}
with DAG('wf_data_dag',
default_args=def_args,
schedule_interval='@weekly', # '0 0 * * 0': Runs weekly on Sunday mornings
catchup=False,) as dag:
def _DBF_to_Sales_Reports(**kwargs):
''' Converts DBFs to CSV and uploads back to Ocean Spaces '''
# Read from DigitalOcean Space
sdet_obj = client.get_object(kwargs['Bucket'], kwargs['Key_SDET'])
sdet_dbf = DBF(BytesIO(sdet_obj['Body'].read()))
sdet_df = pd.DataFrame(iter(sdet_dbf))
sls_obj = client.get_object(kwargs['Bucket'], kwargs['Key_SLS'])
sls_dbf = DBF(BytesIO(sls_obj['Body'].read()))
sls_df = pd.DataFrame(iter(sls_dbf))
pages_obj = client.get_object(kwargs['Bucket'], kwargs['Key_PAGES'])
pages_dbf = DBF(BytesIO(pages_obj['Body'].read()), encoding = "ISO-8859-1")
pages_df = pd.DataFrame(iter(pages_dbf))
menu_obj = client.get_object(kwargs['Bucket'], kwargs['Key_MENU'])
menu_dbf = DBF(BytesIO(menu_obj['Body'].read()), encoding = "ISO-8859-1")
menu_df = pd.DataFrame(iter(menu_dbf))
pagetype_obj = client.get_object(kwargs['Bucket'], kwargs['Key_PAGETYPE'])
pagetype_dbf = DBF(BytesIO(pagetype_obj['Body'].read()))
pagetype_df = pd.DataFrame(iter(pagetype_dbf))
revcent_obj = client.get_object(kwargs['Bucket'], kwargs['Key_REVCENT'])
revcent_dbf = DBF(BytesIO(revcent_obj['Body'].read()))
revcent_df = pd.DataFrame(iter(revcent_dbf))
tipopag_obj = client.get_object(kwargs['Bucket'], kwargs['Key_TIPOPAG'])
tipopag_dbf = DBF(BytesIO(tipopag_obj['Body'].read()))
tipopag_df = pd.DataFrame(iter(tipopag_dbf))
# Write back to DigitalOcean Space
sdet_csv_buffer = StringIO()
sdet_df.to_csv(sdet_csv_buffer)
sdet_key = kwargs['Target_Key_SDET'] + '/SDET_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], sdet_key, Body=sdet_csv_buffer.getvalue())
sls_csv_buffer = StringIO()
sls_df.to_csv(sls_csv_buffer)
sls_key = kwargs['Target_Key_SLS'] + '/SLS_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], sls_key, Body=sls_csv_buffer.getvalue())
pages_csv_buffer = StringIO()
pages_df.to_csv(pages_csv_buffer)
pages_key = kwargs['Target_Key_PAGES'] + '/PAGES_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], pages_key, Body=pages_csv_buffer.getvalue())
menu_csv_buffer = StringIO()
menu_df.to_csv(menu_csv_buffer)
menu_key = kwargs['Target_Key_MENU'] + '/MENU_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], menu_key, Body=menu_csv_buffer.getvalue())
pagetype_csv_buffer = StringIO()
pagetype_df.to_csv(pagetype_csv_buffer)
pagetype_key = kwargs['Target_Key_PAGETYPE'] + '/PAGETYPE_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], pagetype_key, Body=pagetype_csv_buffer.getvalue())
revcent_csv_buffer = StringIO()
revcent_df.to_csv(revcent_csv_buffer)
revcent_key = kwargs['Target_Key_REVCENT'] + '/REVCENT_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], revcent_key, Body=revcent_csv_buffer.getvalue())
tipopag_csv_buffer = StringIO()
tipopag_df.to_csv(tipopag_csv_buffer)
tipopag_key = kwargs['Target_Key_TIPOPAG'] + '/TIPOPAG_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], tipopag_key, Body=tipopag_csv_buffer.getvalue())
''' We can also delete the original DBF from Digital Ocean to save costs '''
# client.delete_objects(
# Bucket=kwargs['Bucket'],
# Delete={
# 'Objects': [
# {'Key': kwargs['Key_SDET']},
# {'Key': kwargs['Key_SLS']},
# {'Key': kwargs['Key_PAGES']},
# {'Key': kwargs['Key_MENU']},
# {'Key': kwargs['Key_PAGETYPE']},
# {'Key': kwargs['Key_REVCENT']},
# {'Key': kwargs['Key_TIPOPAG']}
# ]
# }
# )
''' Generating reports '''
grp_lookup_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_GROUP_LOOKUP'])
grp_lookup_file = pd.read_excel(grp_lookup_obj['Body'], index_col='Unnamed: 0')
rc_lookup_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_RC_LOOKUP'])
rc_lookup_file = pd.read_excel(rc_lookup_obj['Body'], index_col='Unnamed: 0')
# Group Sales
group_sales_sdet = sdet_df[['ORD_DATE', 'REF_NO']]
group_sales_sdet['SALES_VAT'] = sdet_df['RAW_PRICE'] - sdet_df['VAT_ADJ']
group_sales_df = pd.merge(group_sales_sdet, menu_df, how="left", on=["REF_NO"]) # Joining the MENU table with the SDET table
group_sales_df = group_sales_df[['ORD_DATE', 'REF_NO', 'DESCRIPT', 'PAGE_NUM', 'SALES_VAT']] # Only taking these columns
group_sales_df = pd.merge(group_sales_df, pages_df, how='left', on=['PAGE_NUM']) # Joining MENU + SDET with PAGES
group_sales_df = group_sales_df[['ORD_DATE', 'REF_NO', 'DESCRIPT', 'PAGE_NUM', 'PAGE_NAME', 'SALES_VAT']] # Only taking these columns
group_sales_df['Branch'] = kwargs['Branch']
# Revenue Center Sales
rc_sales_sls = sls_df[['DATE', 'BILL_NO', 'RECEIVED', 'REV_CENTER', 'TAXES']]
rc_sales_df = pd.merge(rc_sales_sls, revcent_df, how="left", on=["REV_CENTER"]) # Merging SLS and REVCENT
rc_sales_df = rc_sales_df[['DATE', 'BILL_NO', 'REV_CENTER', 'RC_NAME', 'RECEIVED', 'TAXES']]
rc_sales_df['Branch'] = kwargs['Branch']
# Item Sales by Group
sdet_menu = pd.merge(sdet_df, menu_df, on ='REF_NO',how ='left')
sdet_menu_cut = sdet_menu[["ORD_DATE","REF_NO","DESCRIPT","PAGE_NUM","RAW_PRICE","VAT_ADJ"]]
item_sales_long = pd.merge(sdet_menu_cut, pages_df, on ='PAGE_NUM',how ='left')
item_sales = item_sales_long[["ORD_DATE", "REF_NO","DESCRIPT","PAGE_NUM","PAGE_NAME","RAW_PRICE","VAT_ADJ"]]
item_sales["Sales + VAT"] = item_sales["RAW_PRICE"] - item_sales["VAT_ADJ"]
item_sales.rename(columns={'PAGE_NAME': 'Group', 'PAGE_NUM': 'Group Number'}, inplace=True)
pdf_grp_df = item_sales.groupby(["Group Number","Group"]).agg(
Sales=pd.NamedAgg(column="RAW_PRICE", aggfunc="sum"),
Taxes=pd.NamedAgg(column="VAT_ADJ", aggfunc="sum"),
Total = pd.NamedAgg(column="Sales + VAT", aggfunc="sum"))
pdf_grp_df['Branch'] = kwargs['Branch']
# Item Sales by Revenue Center
pdf_rc_df = rc_sales_df.groupby(["REV_CENTER", "RC_NAME"]).agg(
Trans=pd.NamedAgg(column="REV_CENTER", aggfunc="count"),
Cust=pd.NamedAgg(column="REV_CENTER", aggfunc="count"),
Sales=pd.NamedAgg(column="RECEIVED", aggfunc="sum"),
Taxes=pd.NamedAgg(column="TAXES", aggfunc="sum"))
pdf_rc_df['Total'] = pdf_rc_df['Taxes'] + pdf_rc_df['Sales']
pdf_rc_df['Trans Average'] = pdf_rc_df['Sales'] / pdf_rc_df['Trans']
pdf_rc_df['Customer Average'] = pdf_rc_df['Sales'] / pdf_rc_df['Cust']
pdf_rc_df['Branch'] = kwargs['Branch']
# VLOOKUP
grp_lookup_file.columns = ["PAGE_NAME", "Simple group"]
rc_lookup_file.drop(rc_lookup_file.columns[[2,3,4,5,6,7]], axis = 1, inplace = True)
rc_lookup_file.columns = ["RC_NAME", "Simple revcent"]
vlookup_rc = pd.merge(pdf_rc_df, rc_lookup_file, on ='RC_NAME',how ='left') # Merge Revenue Center with Sales Report
sumif_rc = vlookup_rc.groupby("Simple revcent")["Total"].sum() # Get the sum of sales per Revenue Center
sumif_rc = sumif_rc.to_frame()
sumif_rc['Branch'] = kwargs['Branch']
vlookup_grp = pd.merge(group_sales_df, grp_lookup_file, on ='PAGE_NAME',how ='left') # Merge Group lookup file with Sales Report
sumif_grp = vlookup_grp.groupby("Simple group")["SALES_VAT"].sum() # Get brand sales
sumif_grp = sumif_grp.to_frame()
sumif_grp['Branch'] = kwargs['Branch']
# Send back to DigitalOcean Space
group_sales_csv_buffer = StringIO()
group_sales_df.to_csv(group_sales_csv_buffer)
group_sales_key = kwargs['Target_Key_GROUP_SALES'] + '/GROUP_SALES_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], group_sales_key, Body=group_sales_csv_buffer.getvalue())
rc_sales_csv_buffer = StringIO()
rc_sales_df.to_csv(rc_sales_csv_buffer)
rc_sales_key = kwargs['Target_Key_RC_SALES'] + '/RC_SALES_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], rc_sales_key, Body=rc_sales_csv_buffer.getvalue())
pdf_grp_csv_buffer = StringIO()
pdf_grp_df.to_csv(pdf_grp_csv_buffer)
item_sales_group_key = kwargs['Target_Key_ITEM_SALES_GROUP'] + '/ITEM_SALES_GROUP_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], item_sales_group_key, Body=pdf_grp_csv_buffer.getvalue())
pdf_rc_csv_buffer = StringIO()
pdf_rc_df.to_csv(pdf_rc_csv_buffer)
item_sales_rc_key = kwargs['Target_Key_ITEM_SALES_RC'] + '/ITEM_SALES_RC_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], item_sales_rc_key, Body=pdf_rc_csv_buffer.getvalue())
sumif_rc_csv_buffer = StringIO()
sumif_rc.to_csv(sumif_rc_csv_buffer)
sumif_rc_key = kwargs['Target_Key_SUMIF_RC'] + '/SUMIF_RC_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], sumif_rc_key, Body=sumif_rc_csv_buffer.getvalue())
sumif_grp_csv_buffer = StringIO()
sumif_grp.to_csv(sumif_grp_csv_buffer)
sumif_grp_key = kwargs['Target_Key_SUMIF_GRP'] + '/SUMIF_GRP_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], sumif_grp_key, Body=sumif_grp_csv_buffer.getvalue())
# def _generate_reports(**kwargs):
# ''' Generates needed reports for one branch'''
# # Get CSVs from DigitalOcean Space
# sdet_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_SDET'])
# sdet_df = pd.read_csv(sdet_obj['Body'], index_col='Unnamed: 0')
# sls_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_SLS'])
# sls_df = pd.read_csv(sls_obj['Body'], index_col='Unnamed: 0')
# pages_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_PAGES'])
# pages_df = pd.read_csv(pages_obj['Body'], index_col='Unnamed: 0')
# menu_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_MENU'])
# menu_df = pd.read_csv(menu_obj['Body'], index_col='Unnamed: 0')
# revcent_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_REVCENT'])
# revcent_df = pd.read_csv(revcent_obj['Body'], index_col='Unnamed: 0')
# # grp_lookup_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_GROUP_LOOKUP'])
# grp_lookup_file = pd.read_excel(grp_lookup_obj['Body'], index_col='Unnamed: 0')
# rc_lookup_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_RC_LOOKUP'])
# rc_lookup_file = pd.read_excel(rc_lookup_obj['Body'], index_col='Unnamed: 0')
# # Group Sales
# group_sales_sdet = sdet_df[['ORD_DATE', 'REF_NO']]
# group_sales_sdet['SALES_VAT'] = sdet_df['RAW_PRICE'] - sdet_df['VAT_ADJ']
# group_sales_df = pd.merge(group_sales_sdet, menu_df, how="left", on=["REF_NO"]) # Joining the MENU table with the SDET table
# group_sales_df = group_sales_df[['ORD_DATE', 'REF_NO', 'DESCRIPT', 'PAGE_NUM', 'SALES_VAT']] # Only taking these columns
# group_sales_df = pd.merge(group_sales_df, pages_df, how='left', on=['PAGE_NUM']) # Joining MENU + SDET with PAGES
# group_sales_df = group_sales_df[['ORD_DATE', 'REF_NO', 'DESCRIPT', 'PAGE_NUM', 'PAGE_NAME', 'SALES_VAT']] # Only taking these columns
# group_sales_df['Branch'] = kwargs['Branch']
# # Revenue Center Sales
# rc_sales_sls = sls_df[['DATE', 'BILL_NO', 'RECEIVED', 'REV_CENTER', 'TAXES']]
# rc_sales_df = pd.merge(rc_sales_sls, revcent_df, how="left", on=["REV_CENTER"]) # Merging SLS and REVCENT
# rc_sales_df = rc_sales_df[['DATE', 'BILL_NO', 'REV_CENTER', 'RC_NAME', 'RECEIVED', 'TAXES']]
# rc_sales_df['Branch'] = kwargs['Branch']
# # Item Sales by Group
# sdet_menu = pd.merge(sdet_df, menu_df, on ='REF_NO',how ='left')
# sdet_menu_cut = sdet_menu[["ORD_DATE","REF_NO","DESCRIPT","PAGE_NUM","RAW_PRICE","VAT_ADJ"]]
# item_sales_long = pd.merge(sdet_menu_cut, pages_df, on ='PAGE_NUM',how ='left')
# item_sales = item_sales_long[["ORD_DATE", "REF_NO","DESCRIPT","PAGE_NUM","PAGE_NAME","RAW_PRICE","VAT_ADJ"]]
# item_sales["Sales + VAT"] = item_sales["RAW_PRICE"] - item_sales["VAT_ADJ"]
# item_sales.rename(columns={'PAGE_NAME': 'Group', 'PAGE_NUM': 'Group Number'}, inplace=True)
# pdf_grp_df = item_sales.groupby(["Group Number","Group"]).agg(
# Sales=pd.NamedAgg(column="RAW_PRICE", aggfunc="sum"),
# Taxes=pd.NamedAgg(column="VAT_ADJ", aggfunc="sum"),
# Total = pd.NamedAgg(column="Sales + VAT", aggfunc="sum"))
# pdf_grp_df['Branch'] = kwargs['Branch']
# # Item Sales by Revenue Center
# pdf_rc_df = rc_sales_df.groupby(["REV_CENTER", "RC_NAME"]).agg(
# Trans=pd.NamedAgg(column="REV_CENTER", aggfunc="count"),
# Cust=pd.NamedAgg(column="REV_CENTER", aggfunc="count"),
# Sales=pd.NamedAgg(column="RECEIVED", aggfunc="sum"),
# Taxes=pd.NamedAgg(column="TAXES", aggfunc="sum"))
# pdf_rc_df['Total'] = pdf_rc_df['Taxes'] + pdf_rc_df['Sales']
# pdf_rc_df['Trans Average'] = pdf_rc_df['Sales'] / pdf_rc_df['Trans']
# pdf_rc_df['Customer Average'] = pdf_rc_df['Sales'] / pdf_rc_df['Cust']
# pdf_rc_df['Branch'] = kwargs['Branch']
# # VLOOKUP
# grp_lookup_file.columns = ["PAGE_NAME", "Simple group"]
# rc_lookup_file.drop(rc_lookup_file.columns[[2,3,4,5,6,7]], axis = 1, inplace = True)
# rc_lookup_file.columns = ["RC_NAME", "Simple revcent"]
# vlookup_rc = pd.merge(pdf_rc_df, rc_lookup_file, on ='RC_NAME',how ='left') # Merge Revenue Center with Sales Report
# sumif_rc = vlookup_rc.groupby("Simple revcent")["Total"].sum() # Get the sum of sales per Revenue Center
# sumif_rc = sumif_rc.to_frame()
# sumif_rc['Branch'] = kwargs['Branch']
# vlookup_grp = pd.merge(group_sales_df, grp_lookup_file, on ='PAGE_NAME',how ='left') # Merge Group lookup file with Sales Report
# sumif_grp = vlookup_grp.groupby("Simple group")["SALES_VAT"].sum() # Get brand sales
# sumif_grp = sumif_grp.to_frame()
# sumif_grp['Branch'] = kwargs['Branch']
# # Send back to DigitalOcean Space
# group_sales_csv_buffer = StringIO()
# group_sales_df.to_csv(group_sales_csv_buffer)
# group_sales_key = kwargs['Target_Key_GROUP_SALES'] + '/GROUP_SALES_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
# client.put_object(kwargs['Bucket'], group_sales_key, Body=group_sales_csv_buffer.getvalue())
# rc_sales_csv_buffer = StringIO()
# rc_sales_df.to_csv(rc_sales_csv_buffer)
# rc_sales_key = kwargs['Target_Key_RC_SALES'] + '/RC_SALES_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
# client.put_object(kwargs['Bucket'], rc_sales_key, Body=rc_sales_csv_buffer.getvalue())
# pdf_grp_csv_buffer = StringIO()
# pdf_grp_df.to_csv(pdf_grp_csv_buffer)
# item_sales_group_key = kwargs['Target_Key_ITEM_SALES_GROUP'] + '/ITEM_SALES_GROUP_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
# client.put_object(kwargs['Bucket'], item_sales_group_key, Body=pdf_grp_csv_buffer.getvalue())
# pdf_rc_csv_buffer = StringIO()
# pdf_rc_df.to_csv(pdf_rc_csv_buffer)
# item_sales_rc_key = kwargs['Target_Key_ITEM_SALES_RC'] + '/ITEM_SALES_RC_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
# client.put_object(kwargs['Bucket'], item_sales_rc_key, Body=pdf_rc_csv_buffer.getvalue())
# sumif_rc_csv_buffer = StringIO()
# sumif_rc.to_csv(sumif_rc_csv_buffer)
# sumif_rc_key = kwargs['Target_Key_SUMIF_RC'] + '/SUMIF_RC_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
# client.put_object(kwargs['Bucket'], sumif_rc_key, Body=sumif_rc_csv_buffer.getvalue())
# sumif_grp_csv_buffer = StringIO()
# sumif_grp.to_csv(sumif_grp_csv_buffer)
# sumif_grp_key = kwargs['Target_Key_SUMIF_GRP'] + '/SUMIF_GRP_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
# client.put_object(kwargs['Bucket'], sumif_grp_key, Body=sumif_grp_csv_buffer.getvalue())
def _merge_reports(**kwargs):
''' Merges reports '''
# TODO: Finish concatenation of branch files
# Read all branches from DigitalOcean Space
try:
# Merging for the item sales by group
lf_bgc_isg_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_LF_BGC_ISG'])
lf_bgc_isg_df = pd.read_csv(lf_bgc_isg_obj['Body'])
wf_bgc_isg_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_WF_BGC_ISG'])
wf_bgc_isg_df = pd.read_csv(wf_bgc_isg_obj['Body'])
wf_greenhills_isg_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_WF_GREENHILLS_ISG'])
wf_greenhills_isg_df = pd.read_csv(wf_greenhills_isg_obj['Body'])
wf_podium_isg_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_WF_PODIUM_ISG'])
wf_podium_isg_df = pd.read_csv(wf_podium_isg_obj['Body'])
wf_rada_isg_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_WF_RADA_ISG'])
wf_rada_isg_df = pd.read_csv(wf_rada_isg_obj['Body'])
wf_rockwell_isg_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_WF_ROCKWELL_ISG'])
wf_rockwell_isg_df = pd.read_csv(wf_rockwell_isg_obj['Body'])
wf_salcedo_isg_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_WF_SALCEDO_ISG'])
wf_salcedo_isg_df = pd.read_csv(wf_salcedo_isg_obj['Body'])
wf_uptown_isg_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_WF_UPTOWN_ISG'])
wf_uptown_isg_df = pd.read_csv(wf_uptown_isg_obj['Body'])
wfi_bgc_isg_obj = s3.get_object(Bucket=kwargs['Bucket'], Key=kwargs['Key_WFI_BGC_ISG'])
wfi_bgc_isg_df = pd.read_csv(wfi_bgc_isg_obj['Body'])
# Concat files
isg_frames = [lf_bgc_isg_df, wf_bgc_isg_df, wf_greenhills_isg_df, wf_podium_isg_df, wf_rada_isg_df, wf_rockwell_isg_df, wf_salcedo_isg_df, wf_uptown_isg_df, wfi_bgc_isg_df]
isg_master_df = pd.concat(isg_frames)
# Upload files back to DigitalOcean
isg_master_csv_buffer = StringIO()
isg_master_df.to_csv(isg_master_csv_buffer)
isg_key = kwargs['Target_Key_ISG'] + '/ISG_Master_' + str(datetime.today().strftime('%Y-%m-%d')) + '.csv' # NOTE: Edit this to fit desired bucket structure
client.put_object(kwargs['Bucket'], isg_key, Body=isg_master_csv_buffer.getvalue())
return success
except:
return failure
# Jobs
# NOTE: Must replace Buckets and Keys with correct Buckets + Keys
LF_BGC_DBF_to_Sales_Reports = PythonOperator(
task_id = 'LF_BGC_DBF_to_Sales_Reports',
python_callable = _DBF_to_Sales_Reports,
op_kwargs={'Bucket': 'name_of_bucket',
'Key_SDET': 'name_of_key',
'Key_SLS': 'name_of_key',
'Key_PAGES': 'name_of_key',
'Key_MENU': 'name_of_key',
'Key_PAGETYPE': 'name_of_key',
'Key_REVCENT': 'name_of_key',
'Key_TIPOPAG': 'name_of_key',
'Target_Key_SDET': 'name_of_key',
'Target_Key_SLS': 'name_of_key',
'Target_Key_PAGES': 'name_of_key',
'Target_Key_MENU': 'name_of_key',
'Target_Key_PAGETYPE': 'name_of_key',
'Target_Key_REVCENT': 'name_of_key',
'Target_Key_TIPOPAG': 'name_of_key',
'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
'Target_Key_GROUP_SALES': 'name_of_key',
'Target_Key_RC_SALES': 'name_of_key',
'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
'Target_Key_ITEM_SALES_RC': 'name_of_key',
'Target_Key_SUMIF_RC': 'name_of_key',
'Target_Key_SUMIF_GRP': 'name_of_key',
'Branch': 'LF BGC'
}
)
WF_BGC_DBF_to_Sales_Reports = PythonOperator(
task_id = 'WF_BGC_DBF_to_Sales_Reports',
python_callable = _DBF_to_Sales_Reports,
op_kwargs={'Bucket': 'name_of_bucket',
'Key_SDET': 'name_of_key',
'Key_SLS': 'name_of_key',
'Key_PAGES': 'name_of_key',
'Key_MENU': 'name_of_key',
'Key_PAGETYPE': 'name_of_key',
'Key_REVCENT': 'name_of_key',
'Key_TIPOPAG': 'name_of_key',
'Target_Key_SDET': 'name_of_key',
'Target_Key_SLS': 'name_of_key',
'Target_Key_PAGES': 'name_of_key',
'Target_Key_MENU': 'name_of_key',
'Target_Key_PAGETYPE': 'name_of_key',
'Target_Key_REVCENT': 'name_of_key',
'Target_Key_TIPOPAG': 'name_of_key',
'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
'Target_Key_GROUP_SALES': 'name_of_key',
'Target_Key_RC_SALES': 'name_of_key',
'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
'Target_Key_ITEM_SALES_RC': 'name_of_key',
'Target_Key_SUMIF_RC': 'name_of_key',
'Target_Key_SUMIF_GRP': 'name_of_key',
'Branch': 'WF BGC'},
)
WF_Greenhills_DBF_to_Sales_Reports = PythonOperator(
task_id = 'WF_Greenhills_DBF_to_Sales_Reports',
python_callable = _DBF_to_Sales_Reports,
op_kwargs={'Bucket': 'name_of_bucket',
'Key_SDET': 'name_of_key',
'Key_SLS': 'name_of_key',
'Key_PAGES': 'name_of_key',
'Key_MENU': 'name_of_key',
'Key_PAGETYPE': 'name_of_key',
'Key_REVCENT': 'name_of_key',
'Key_TIPOPAG': 'name_of_key',
'Target_Key_SDET': 'name_of_key',
'Target_Key_SLS': 'name_of_key',
'Target_Key_PAGES': 'name_of_key',
'Target_Key_MENU': 'name_of_key',
'Target_Key_PAGETYPE': 'name_of_key',
'Target_Key_REVCENT': 'name_of_key',
'Target_Key_TIPOPAG': 'name_of_key',
'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
'Target_Key_GROUP_SALES': 'name_of_key',
'Target_Key_RC_SALES': 'name_of_key',
'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
'Target_Key_ITEM_SALES_RC': 'name_of_key',
'Target_Key_SUMIF_RC': 'name_of_key',
'Target_Key_SUMIF_GRP': 'name_of_key',
'Branch': 'WF_Greenhills'}
)
WF_Podium_DBF_to_Sales_Reports = PythonOperator(
task_id = 'WF_Podium_DBF_to_Sales_Reports',
python_callable = _DBF_to_Sales_Reports,
op_kwargs={'Bucket': 'name_of_bucket',
'Key_SDET': 'name_of_key',
'Key_SLS': 'name_of_key',
'Key_PAGES': 'name_of_key',
'Key_MENU': 'name_of_key',
'Key_PAGETYPE': 'name_of_key',
'Key_REVCENT': 'name_of_key',
'Key_TIPOPAG': 'name_of_key',
'Target_Key_SDET': 'name_of_key',
'Target_Key_SLS': 'name_of_key',
'Target_Key_PAGES': 'name_of_key',
'Target_Key_MENU': 'name_of_key',
'Target_Key_PAGETYPE': 'name_of_key',
'Target_Key_REVCENT': 'name_of_key',
'Target_Key_TIPOPAG': 'name_of_key',
'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
'Target_Key_GROUP_SALES': 'name_of_key',
'Target_Key_RC_SALES': 'name_of_key',
'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
'Target_Key_ITEM_SALES_RC': 'name_of_key',
'Target_Key_SUMIF_RC': 'name_of_key',
'Target_Key_SUMIF_GRP': 'name_of_key',
'Branch': 'WF Podium'}
)
WF_Rada_DBF_to_Sales_Reports = PythonOperator(
task_id = 'WF_Rada_DBF_to_Sales_Reports',
python_callable = _DBF_to_Sales_Reports,
op_kwargs={'Bucket': 'name_of_bucket',
'Key_SDET': 'name_of_key',
'Key_SLS': 'name_of_key',
'Key_PAGES': 'name_of_key',
'Key_MENU': 'name_of_key',
'Key_PAGETYPE': 'name_of_key',
'Key_REVCENT': 'name_of_key',
'Key_TIPOPAG': 'name_of_key',
'Target_Key_SDET': 'name_of_key',
'Target_Key_SLS': 'name_of_key',
'Target_Key_PAGES': 'name_of_key',
'Target_Key_MENU': 'name_of_key',
'Target_Key_PAGETYPE': 'name_of_key',
'Target_Key_REVCENT': 'name_of_key',
'Target_Key_TIPOPAG': 'name_of_key',
'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
'Target_Key_GROUP_SALES': 'name_of_key',
'Target_Key_RC_SALES': 'name_of_key',
'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
'Target_Key_ITEM_SALES_RC': 'name_of_key',
'Target_Key_SUMIF_RC': 'name_of_key',
'Target_Key_SUMIF_GRP': 'name_of_key',
'Branch': 'WF Rada'}
)
WF_Rockwell_DBF_to_Sales_Reports = PythonOperator(
task_id = 'WF_Rockwell_DBF_to_Sales_Reports',
python_callable = _DBF_to_Sales_Reports,
op_kwargs={'Bucket': 'name_of_bucket',
'Key_SDET': 'name_of_key',
'Key_SLS': 'name_of_key',
'Key_PAGES': 'name_of_key',
'Key_MENU': 'name_of_key',
'Key_PAGETYPE': 'name_of_key',
'Key_REVCENT': 'name_of_key',
'Key_TIPOPAG': 'name_of_key',
'Target_Key_SDET': 'name_of_key',
'Target_Key_SLS': 'name_of_key',
'Target_Key_PAGES': 'name_of_key',
'Target_Key_MENU': 'name_of_key',
'Target_Key_PAGETYPE': 'name_of_key',
'Target_Key_REVCENT': 'name_of_key',
'Target_Key_TIPOPAG': 'name_of_key',
'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
'Target_Key_GROUP_SALES': 'name_of_key',
'Target_Key_RC_SALES': 'name_of_key',
'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
'Target_Key_ITEM_SALES_RC': 'name_of_key',
'Target_Key_SUMIF_RC': 'name_of_key',
'Target_Key_SUMIF_GRP': 'name_of_key',
'Branch': 'WF Rockwell'}
)
WF_Salcedo_DBF_to_Sales_Reports = PythonOperator(
task_id = 'WF_Salcedo_DBF_to_Sales_Reports',
python_callable = _DBF_to_Sales_Reports,
op_kwargs={'Bucket': 'name_of_bucket',
'Key_SDET': 'name_of_key',
'Key_SLS': 'name_of_key',
'Key_PAGES': 'name_of_key',
'Key_MENU': 'name_of_key',
'Key_PAGETYPE': 'name_of_key',
'Key_REVCENT': 'name_of_key',
'Key_TIPOPAG': 'name_of_key',
'Target_Key_SDET': 'name_of_key',
'Target_Key_SLS': 'name_of_key',
'Target_Key_PAGES': 'name_of_key',
'Target_Key_MENU': 'name_of_key',
'Target_Key_PAGETYPE': 'name_of_key',
'Target_Key_REVCENT': 'name_of_key',
'Target_Key_TIPOPAG': 'name_of_key',
'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
'Target_Key_GROUP_SALES': 'name_of_key',
'Target_Key_RC_SALES': 'name_of_key',
'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
'Target_Key_ITEM_SALES_RC': 'name_of_key',
'Target_Key_SUMIF_RC': 'name_of_key',
'Target_Key_SUMIF_GRP': 'name_of_key',
'Branch': 'WF Salcedo'}
)
WF_Uptown_DBF_to_Sales_Reports = PythonOperator(
task_id = 'WF_Uptown_DBF_to_Sales_Reports',
python_callable = _DBF_to_Sales_Reports,
op_kwargs={'Bucket': 'name_of_bucket',
'Key_SDET': 'name_of_key',
'Key_SLS': 'name_of_key',
'Key_PAGES': 'name_of_key',
'Key_MENU': 'name_of_key',
'Key_PAGETYPE': 'name_of_key',
'Key_REVCENT': 'name_of_key',
'Key_TIPOPAG': 'name_of_key',
'Target_Key_SDET': 'name_of_key',
'Target_Key_SLS': 'name_of_key',
'Target_Key_PAGES': 'name_of_key',
'Target_Key_MENU': 'name_of_key',
'Target_Key_PAGETYPE': 'name_of_key',
'Target_Key_REVCENT': 'name_of_key',
'Target_Key_TIPOPAG': 'name_of_key',
'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
'Target_Key_GROUP_SALES': 'name_of_key',
'Target_Key_RC_SALES': 'name_of_key',
'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
'Target_Key_ITEM_SALES_RC': 'name_of_key',
'Target_Key_SUMIF_RC': 'name_of_key',
'Target_Key_SUMIF_GRP': 'name_of_key',
'Branch': 'WF Uptown'}
)
WFI_BGC_DBF_to_Sales_Reports = PythonOperator(
task_id = 'WFI_BGC_DBF_to_Sales_Reports',
python_callable = _DBF_to_Sales_Reports,
op_kwargs={'Bucket': 'name_of_bucket',
'Key_SDET': 'name_of_key',
'Key_SLS': 'name_of_key',
'Key_PAGES': 'name_of_key',
'Key_MENU': 'name_of_key',
'Key_PAGETYPE': 'name_of_key',
'Key_REVCENT': 'name_of_key',
'Key_TIPOPAG': 'name_of_key',
'Target_Key_SDET': 'name_of_key',
'Target_Key_SLS': 'name_of_key',
'Target_Key_PAGES': 'name_of_key',
'Target_Key_MENU': 'name_of_key',
'Target_Key_PAGETYPE': 'name_of_key',
'Target_Key_REVCENT': 'name_of_key',
'Target_Key_TIPOPAG': 'name_of_key',
'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
'Target_Key_GROUP_SALES': 'name_of_key',
'Target_Key_RC_SALES': 'name_of_key',
'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
'Target_Key_ITEM_SALES_RC': 'name_of_key',
'Target_Key_SUMIF_RC': 'name_of_key',
'Target_Key_SUMIF_GRP': 'name_of_key',
'Branch': 'WFI BGC'}
)
# Generates necessary reports
# LF_BGC_generate_reports = PythonOperator(
# task_id = 'LF_BGC_generate_reports',
# python_callable = _generate_reports,
# op_kwargs={'Bucket': 'name_of_bucket',
# 'Key_SDET': 'name_of_key',
# 'Key_SLS': 'name_of_key',
# 'Key_PAGES': 'name_of_key',
# 'Key_MENU': 'name_of_key',
# 'Key_PAGETYPE': 'name_of_key',
# 'Key_REVCENT': 'name_of_key',
# 'Key_TIPOPAG': 'name_of_key',
# 'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
# 'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
# 'Target_Key_GROUP_SALES': 'name_of_key',
# 'Target_Key_RC_SALES': 'name_of_key',
# 'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
# 'Target_Key_ITEM_SALES_RC': 'name_of_key',
# 'Target_Key_SUMIF_RC': 'name_of_key',
# 'Target_Key_SUMIF_GRP': 'name_of_key',
# 'Branch': 'LF BGC'
# }
# )
# WF_BGC_generate_reports = PythonOperator(
# task_id = 'WF_BGC_generate_reports',
# python_callable = _generate_reports,
# op_kwargs={'Bucket': 'name_of_bucket',
# 'Key_SDET': 'name_of_key',
# 'Key_SLS': 'name_of_key',
# 'Key_PAGES': 'name_of_key',
# 'Key_MENU': 'name_of_key',
# 'Key_PAGETYPE': 'name_of_key',
# 'Key_REVCENT': 'name_of_key',
# 'Key_TIPOPAG': 'name_of_key',
# 'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
# 'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
# 'Target_Key_GROUP_SALES': 'name_of_key',
# 'Target_Key_RC_SALES': 'name_of_key',
# 'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
# 'Target_Key_ITEM_SALES_RC': 'name_of_key',
# 'Target_Key_SUMIF_RC': 'name_of_key',
# 'Target_Key_SUMIF_GRP': 'name_of_key',
# 'Branch': 'WF BGC'
# }
# )
# WF_Greenhills_generate_reports = PythonOperator(
# task_id = 'WF_Greenhills_generate_reports',
# python_callable = _generate_reports,
# op_kwargs={'Bucket': 'name_of_bucket',
# 'Key_SDET': 'name_of_key',
# 'Key_SLS': 'name_of_key',
# 'Key_PAGES': 'name_of_key',
# 'Key_MENU': 'name_of_key',
# 'Key_PAGETYPE': 'name_of_key',
# 'Key_REVCENT': 'name_of_key',
# 'Key_TIPOPAG': 'name_of_key',
# 'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
# 'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
# 'Target_Key_GROUP_SALES': 'name_of_key',
# 'Target_Key_RC_SALES': 'name_of_key',
# 'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
# 'Target_Key_ITEM_SALES_RC': 'name_of_key',
# 'Target_Key_SUMIF_RC': 'name_of_key',
# 'Target_Key_SUMIF_GRP': 'name_of_key',
# 'Branch': 'WF Greenhills'
# }
# )
# WF_Podium_generate_reports = PythonOperator(
# task_id = 'WF_Podium_generate_reports',
# python_callable = _generate_reports,
# op_kwargs={'Bucket': 'name_of_bucket',
# 'Key_SDET': 'name_of_key',
# 'Key_SLS': 'name_of_key',
# 'Key_PAGES': 'name_of_key',
# 'Key_MENU': 'name_of_key',
# 'Key_PAGETYPE': 'name_of_key',
# 'Key_REVCENT': 'name_of_key',
# 'Key_TIPOPAG': 'name_of_key',
# 'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
# 'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
# 'Target_Key_GROUP_SALES': 'name_of_key',
# 'Target_Key_RC_SALES': 'name_of_key',
# 'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
# 'Target_Key_ITEM_SALES_RC': 'name_of_key',
# 'Target_Key_SUMIF_RC': 'name_of_key',
# 'Target_Key_SUMIF_GRP': 'name_of_key',
# 'Branch': 'WF Podium'
# }
# )
# WF_Rada_generate_reports = PythonOperator(
# task_id = 'WF_Rada_generate_reports',
# python_callable = _generate_reports,
# op_kwargs={'Bucket': 'name_of_bucket',
# 'Key_SDET': 'name_of_key',
# 'Key_SLS': 'name_of_key',
# 'Key_PAGES': 'name_of_key',
# 'Key_MENU': 'name_of_key',
# 'Key_PAGETYPE': 'name_of_key',
# 'Key_REVCENT': 'name_of_key',
# 'Key_TIPOPAG': 'name_of_key',
# 'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
# 'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
# 'Target_Key_GROUP_SALES': 'name_of_key',
# 'Target_Key_RC_SALES': 'name_of_key',
# 'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
# 'Target_Key_ITEM_SALES_RC': 'name_of_key',
# 'Target_Key_SUMIF_RC': 'name_of_key',
# 'Target_Key_SUMIF_GRP': 'name_of_key',
# 'Branch': 'WF Rada'
# }
# )
# WF_Rockwell_generate_reports = PythonOperator(
# task_id = 'WF_Rockwell_generate_reports',
# python_callable = _generate_reports,
# op_kwargs={'Bucket': 'name_of_bucket',
# 'Key_SDET': 'name_of_key',
# 'Key_SLS': 'name_of_key',
# 'Key_PAGES': 'name_of_key',
# 'Key_MENU': 'name_of_key',
# 'Key_PAGETYPE': 'name_of_key',
# 'Key_REVCENT': 'name_of_key',
# 'Key_TIPOPAG': 'name_of_key',
# 'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
# 'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
# 'Target_Key_GROUP_SALES': 'name_of_key',
# 'Target_Key_RC_SALES': 'name_of_key',
# 'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
# 'Target_Key_ITEM_SALES_RC': 'name_of_key',
# 'Target_Key_SUMIF_RC': 'name_of_key',
# 'Target_Key_SUMIF_GRP': 'name_of_key',
# 'Branch': 'WF Rockwell'
# }
# )
# WF_Salcedo_generate_reports = PythonOperator(
# task_id = 'WF_Salcedo_generate_reports',
# python_callable = _generate_reports,
# op_kwargs={'Bucket': 'name_of_bucket',
# 'Key_SDET': 'name_of_key',
# 'Key_SLS': 'name_of_key',
# 'Key_PAGES': 'name_of_key',
# 'Key_MENU': 'name_of_key',
# 'Key_PAGETYPE': 'name_of_key',
# 'Key_REVCENT': 'name_of_key',
# 'Key_TIPOPAG': 'name_of_key',
# 'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
# 'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
# 'Target_Key_GROUP_SALES': 'name_of_key',
# 'Target_Key_RC_SALES': 'name_of_key',
# 'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
# 'Target_Key_ITEM_SALES_RC': 'name_of_key',
# 'Target_Key_SUMIF_RC': 'name_of_key',
# 'Target_Key_SUMIF_GRP': 'name_of_key',
# 'Branch': 'WF Salcedo'
# }
# )
# WF_Uptown_generate_reports = PythonOperator(
# task_id = 'WF_Uptown_generate_reports',
# python_callable = _generate_reports,
# op_kwargs={'Bucket': 'name_of_bucket',
# 'Key_SDET': 'name_of_key',
# 'Key_SLS': 'name_of_key',
# 'Key_PAGES': 'name_of_key',
# 'Key_MENU': 'name_of_key',
# 'Key_PAGETYPE': 'name_of_key',
# 'Key_REVCENT': 'name_of_key',
# 'Key_TIPOPAG': 'name_of_key',
# 'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
# 'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
# 'Target_Key_GROUP_SALES': 'name_of_key',
# 'Target_Key_RC_SALES': 'name_of_key',
# 'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
# 'Target_Key_ITEM_SALES_RC': 'name_of_key',
# 'Target_Key_SUMIF_RC': 'name_of_key',
# 'Target_Key_SUMIF_GRP': 'name_of_key',
# 'Branch': 'WF Uptown'
# }
# )
# WFI_BGC_generate_reports = PythonOperator(
# task_id = 'WFI_BGC_generate_reports',
# python_callable = _generate_reports,
# op_kwargs={'Bucket': 'name_of_bucket',
# 'Key_SDET': 'name_of_key',
# 'Key_SLS': 'name_of_key',
# 'Key_PAGES': 'name_of_key',
# 'Key_MENU': 'name_of_key',
# 'Key_PAGETYPE': 'name_of_key',
# 'Key_REVCENT': 'name_of_key',
# 'Key_TIPOPAG': 'name_of_key',
# 'Key_GROUP_LOOKUP': 'name_of_group_lookup_file',
# 'Key_RC_LOOKUP': 'name_of_rc_lookup_file',
# 'Target_Key_GROUP_SALES': 'name_of_key',
# 'Target_Key_RC_SALES': 'name_of_key',
# 'Target_Key_ITEM_SALES_GROUP': 'name_of_key',
# 'Target_Key_ITEM_SALES_RC': 'name_of_key',
# 'Target_Key_SUMIF_RC': 'name_of_key',
# 'Target_Key_SUMIF_GRP': 'name_of_key',
# 'Branch': 'WFI BGC'
# }
# )
merge_reports = BranchPythonOperator(
task_id = 'merge_reports',
python_callable = _merge_reports,
op_kwargs={'Bucket': 'name_of_bucket',
'Key_LF_BGC_ISG': 'name_of_key',
'Key_WF_BGC_ISG': 'name_of_key',
'Key_WF_GREENHILLS_ISG': 'name_of_key',
'Key_WF_PODIUM_ISG': 'name_of_key',
'Key_WF_RADA_ISG': 'name_of_key',
'Key_WF_ROCKWELL_ISG': 'name_of_key',
'Key_WF_SALCEDO_ISG': 'name_of_key',
'Key_WF_UPTOWN_ISG': 'name_of_key',
'Key_WFI_BGC_ISG': 'name_of_key',
'Target_Key_ISG': 'name_of_key',
}
)
success = BashOperator(
task_id = 'success',
bash_command = "echo 'Pipeline success'"
)
failure = BashOperator(
task_id = 'failure',
bash_command = "echo 'Pipeline failed'"
)
[LF_BGC_DBF_to_Sales_Reports,
WF_BGC_DBF_to_Sales_Reports,
WF_Greenhills_DBF_to_Sales_Reports,
WF_Podium_DBF_to_Sales_Reports,
WF_Rada_DBF_to_Sales_Reports,
WF_Rockwell_DBF_to_Sales_Reports,
WF_Salcedo_DBF_to_Sales_Reports,
WF_Uptown_DBF_to_Sales_Reports,
WFI_BGC_DBF_to_Sales_Reports] >> merge_reports >> [success, failure] |
import pygame
import sys
from math import pi
import neurodot_present.present_lib as pl
pl.DEBUG = False
if __name__ == "__main__":
pygame.init()
pygame.mouse.set_visible(False)
try:
FC = pl.FixationCross(color = 'black')
# using polar coordinates and specified velocity
aFC_left = pl.AnimatedFixationCross(use_polar_coords = True,
position_initial = [-0.5, 0],
velocity = [0, -pi],
movement_duration = 8,
color = 'black'
)
aFC_right = pl.AnimatedFixationCross(use_polar_coords = True,
position_initial = [0.5, 0],
velocity = [0, -pi],
movement_duration = 8,
color = 'blue'
)
# using cartesian coordinates and specified final position instead of velocity
aFC_line_left = pl.AnimatedFixationCross(use_polar_coords = False,
position_initial = [-0.5, 0],
position_final = [0.5, 0],
movement_duration = 8,
color = 'green'
)
# using cartesian coordinates and specified velocity
aFC_line_right = pl.AnimatedFixationCross(use_polar_coords = False,
position_initial = [0.5, 0],
velocity = [-1.0/8.0, 0],
movement_duration = 8,
color = 'green'
)
scr = pl.Screen(color = 'white', fixation_cross = FC)
aSCR = pl.AnimatedScreen(screen_bgColor = 'white', constrain_aspect = True, sprite_list = [aFC_left, aFC_right, aFC_line_left, aFC_line_right])
scr.run(duration = 2)
aSCR.run(duration = 10)
except pl.UserEscape as exc:
print exc
pygame.quit()
sys.exit() |
from time import sleep
from random import randint
itens = ('Pedra', 'Papel', 'Tesoura')
print('Suas opções: ')
print("""[ 0 ] PEDRA
[ 1 ] PAPEL
[ 2 ] TESOURA""")
computador = randint(0,2)
jogador = int(input('Qual é a sua jogada? '))
print('JO')
sleep(1)
print('KEN')
sleep(1)
print('PO!!!')
print('-=' * 11)
print('Computador jogou {}'.format(itens[computador]))
print('Jogador jogou {}'.format(itens[jogador]))
print('-=' * 11)
if computador == 0 and jogador == 1 or computador == 2 and jogador == 0 or computador == 1 and jogador == 2:
print('JOGADOR VENCE')
elif jogador == 0 and computador == 1 or jogador == 2 and computador == 0 or jogador == 1 and computador == 2:
print('COPUTADOR VENCE')
elif jogador == computador:
print('EMPATE')
else:
print('Opção inválida')
|
import os, hashlib, binascii
from flask import Flask, jsonify, request, render_template
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.exc import IntegrityError
from sqlalchemy import exc
from flask_migrate import Migrate
app = Flask(__name__)
db_path = os.path.join(os.path.dirname(__file__), 'test.db')
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///./test.db'
db = SQLAlchemy(app)
migrate = Migrate(app, db)
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True, nullable=False)
password = db.Column(db.String(100), nullable=False)
email = db.Column(db.String(120), unique=True, nullable=False)
salt = db.Column(db.String(16), nullable= False)
def __repr__(self):
return '<User,{}>'.format(self.username)
db.create_all()
@app.route('/users/', methods=["GET", "POST", "DELETE"])
def users():
if request.method == "POST":
data = request.json
if not all((data.get("username"), data.get("email"), data.get("password"))):
return jsonify({"error": {"description" : "All fields are mandatory"}}), 400
salt = os.urandom(16)
psw = hashlib.pbkdf2_hmac('sha512', data.get("password").encode(), salt, 10000)
u = User(username=data.get("username"), email=data.get("email"), password=binascii.hexlify(psw), salt=salt)
db.session.add(u)
try:
db.session.commit()
except IntegrityError as e:
print(e)
return jsonify({"error": {"description":"User already exist"}}), 400
else:
return jsonify({"data":{"username": u.username, "email": u.email}}), 201
elif request.method == "GET":
users = User.query.all()
result = [{"username": u.username, "email": u.email, "password": str(u.password)} for u in users]
return jsonify({"data":result}), 200
elif request.method == "DELETE":
data = request.json
username = data.get("username")
usr = User.query.filter_by(username=username).first()
if usr:
db.session.delete(usr)
try:
db.session.commit()
return jsonify({"error":{"description": "User deleted"}}), 200
except exc.SQLAlchemyError as e:
print(e)
return jsonify({"error":{"description": "Error while processing request"}}), 400
else:
return jsonify({"error":{"description": "User does not exist"}}), 400
@app.route('/users/<username>', methods=["PUT"])
def update_user(username):
data = request.json
email = data.get("email")
password = data.get("password")
usr = User.query.filter_by(username=username).first()
if usr:
if email:
usr.email = email
if password:
usr.password = password
try:
db.session.commit()
return jsonify({"data":{"description": "User updated"}}), 200
except exc.SQLAlchemyError as e:
print(e)
return jsonify({"error":{"description": "Error while processing request"}}), 400
else:
return jsonify({"error":{"description": "User does not exist"}}), 400
@app.route('/users/auth/', methods=["POST"])
def users_auth():
if request.method == "POST":
data = request.json
email = data.get("email")
password = data.get("password")
if not all((email, password)):
return jsonify({"error": {"description": "Email and password are required"}}), 400
usr = User.query.filter_by(email=email).first()
if usr:
hash_psw = binascii.hexlify(hashlib.pbkdf2_hmac('sha512', password.encode(), usr.salt, 10000))
if usr.password == hash_psw:
return jsonify({"data": {"username": usr.username, "email": usr.email}}), 200
else:
return jsonify({"error":{"description": "Email or password are wrong"}}), 404
else:
return jsonify({"error":{"description": "Email or password are wrong"}}), 404
if __name__ == "main":
app.run()
|
from django.contrib import admin
from . import models
admin.site.register(models.Host)
admin.site.register(models.HostDetails)
# Register your models here.
|
from django.contrib import admin
from .models import Institution, Platform, OnlineCourse
admin.site.register(Institution)
admin.site.register(Platform)
admin.site.register(OnlineCourse)
|
from ClassyVCoderX import ClassyVCoder as ClassyCoder
from keras.datasets import mnist
from keras.utils import to_categorical
import matplotlib.pyplot as plt
import numpy as np
import os
from dataset_generatorX import dataset_generator
from make_parallel import make_parallel
CATEGORIES = 5
BATCH_SIZE = 64
EPOCH_SIZE = BATCH_SIZE*8 #must be a multiple of BATCH_SIZE ?!
EPOCHS = 40
CC = ClassyCoder((64, 64, 1), CATEGORIES, True)
DG = dataset_generator(CC)
def fetch_data(training=True, samples = 128):
(X, Y) = DG.fetch(samples, training)
return (X, [X, Y])
def fetch_data_generator(training=True, samples = 128):
while True:
yield fetch_data(training, samples)
#(x_train, y_train) = fetch_data(1024, True)
print "Training begins."
CC.classycoder.fit_generator(fetch_data_generator(True, BATCH_SIZE), EPOCH_SIZE,
epochs=EPOCHS,
use_multiprocessing=True,
workers=4,
max_queue_size=128,
validation_data=fetch_data_generator(False, BATCH_SIZE),
validation_steps=50 )
print "Training ends, testing begins."
(x_test, y_test) = fetch_data(False, 100)
# encode and decode some crops
# note that we take them from the *validation* set
encoded_imgs = CC.encoder.predict(x_test)
decoded_imgs = CC.decoder.predict(encoded_imgs)
classif_imgs = CC.featureclassifier.predict(encoded_imgs)
score = CC.featureclassifier.evaluate(encoded_imgs, y_test[1], verbose=0)
print('Classifier Accuracy:', score)
CC.save(os.path.join(CC.path(), "ClassyVCoderX.h5"))
print("weights saved")
(x_test, y_test) = fetch_data(False, 100)
# encode and decode some crops
# note that we take them from the *test* set
encoded_imgs = CC.encoder.predict(x_test)
decoded_imgs = CC.decoder.predict(encoded_imgs)
classif_imgs = CC.featureclassifier.predict(encoded_imgs)
n = 20 # how many crops we will display
plt.figure(figsize=(20, 4))
for i in range(n):
# display original
ax = plt.subplot(4, n, i + 1)
plt.imshow(x_test[i].reshape(64, 64))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# display reconstruction
ax = plt.subplot(4, n, i + 1 + n)
plt.imshow(decoded_imgs[i].reshape(64, 64))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# display classification
ax = plt.subplot(4, n, i + 1 + 2*n)
plt.imshow(classif_imgs[i].reshape(1, 5))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# true classification
ax = plt.subplot(4, n, i + 1 + 3*n)
plt.imshow(y_test[1][i].reshape(1, 5))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.show()
n = 20
plt.figure(figsize=(20, 8))
for i in range(n):
ax = plt.subplot(1, n, i + 1)
plt.imshow(encoded_imgs[i].reshape(8,8).T)
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.show()
|
#!/usr/bin/python3
'''
Author : Sonal Rashmi / Sanket
Date : 17/07/2020 / 08/01/2021
Description : It takes the IPD directory post successful completion of IPD run, generates a html report with Basic Alignment stats(using picard), Coverage of SARS-CoV2(using samtools), abundance plots (Overall and SARS-CoV2), Novel variant and Clade assessment
version : 2
'''
import os
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
from scipy.interpolate import make_interp_spline, BSpline
from globals import *
from gisaidvcfannotator import *
import subprocess
from markdown import markdown
from localblastn import *
from gisaidmetadataparser import *
from variantcladeassessment import *
'''
Input to the object: Output Directory of IPD
'''
class CustomError(Exception):
def __int__(self,value):
self.value=value
def __str__(self):
return repr(self.value)
class CoV2ReportGenerator(object):
def __init__(self, outdir=None):
GlobalVar.initialize()
if outdir:
self.outdir=outdir
self.vcf_map={}
self.countfile_map={}
self.total_fragment_list=[]
self.coverage_map={}
self.summaryout_map={}
self.contig_map={}
self.cov2outdir=outdir+"cov2output/"
if not os.path.exists(self.cov2outdir):
os.mkdir(self.cov2outdir)
for file in os.listdir(self.outdir):
if file.endswith("_count.bam"):
bam=os.path.join(self.outdir, file)
bam_sample=os.path.basename(bam)
sample=bam.replace("_count.bam","")
summary_file_out=self.bam_summary_generation(bam)
if summary_file_out:
self.summaryout_map[sample]=summary_file_out
coverage_file_out=self.bam_coverage(bam)
if coverage_file_out:
self.coverage_map[sample]=coverage_file_out
if file.endswith("_finalcounts.tsv"):
countfile=os.path.join(self.outdir, file)
countfile_sample=os.path.basename(countfile)
sample=countfile_sample.replace("_finalcounts.tsv","")
self.countfile_map[sample]=countfile
if file.endswith("_featurecounts.tsv.summary"):
featurecountsummary=os.path.join(self.outdir, file)
data=pd.read_csv(featurecountsummary,sep="\t",index_col = "Status")
data_t=pd.DataFrame.transpose(data)
total=data_t['Assigned'].to_list()[0]
self.total_fragment_list.append(total)
#print(self.total_fragment_list)
if file.endswith("_final_annotated.vcf"):
vcf=os.path.join(self.outdir, file)
vcf_sample=os.path.basename(vcf)
sample=vcf_sample.replace("_final_annotated.vcf","")
self.vcf_map[sample]=vcf
#where clade assignment happens
#print(self.vcf_map)
self.varcladeassessmentobj_=VariantCladeAssessment(self.vcf_map)
#self.clade_assessment_obj=GisaidVcfAnnotator(self.vcf_map)
else:
print("Input directory %s not found!" %(outdir))
sys.exit(0)
def bam_summary_generation(self, bam=None):
summary_file=None
if bam:
if os.path.isfile(bam):
#sort bam
sortedbam=bam.replace(".bam","_sorted.bam")
cmd=GlobalVar.picard_+" SortSam INPUT="+ bam +" OUTPUT="+sortedbam+" SORT_ORDER=coordinate VALIDATION_STRINGENCY=SILENT TMP_DIR="+os.path.join(self.outdir, "tmp")
cprocess=subprocess.run(cmd, shell=True, stdout=subprocess.DEVNULL)
cprocess.check_returncode()
#bam indexing
cmd=GlobalVar.picard_+" BuildBamIndex INPUT="+ sortedbam +" OUTPUT="+sortedbam+".bai VALIDATION_STRINGENCY=SILENT TMP_DIR="+os.path.join(self.outdir, "tmp")
cprocess=subprocess.run(cmd, shell=True, stdout=subprocess.DEVNULL)
cprocess.check_returncode()
#picard summary
summaryout=sortedbam.replace("_count_sorted.bam","_summary.tsv")
cmd=GlobalVar.picard_+" CollectAlignmentSummaryMetrics R="+ GlobalVar.hspathofa_ +" I="+ sortedbam +" O="+summaryout
cprocess=subprocess.run(cmd, shell=True, stdout=subprocess.DEVNULL)
cprocess.check_returncode()
if os.path.isfile(summaryout):
summary_file=summaryout
else:
print("Error in generating summary file "+bam)
return summary_file
def bam_coverage(self, bam=None):
coverage_out=None
if bam:
if os.path.isfile(bam):
#samtools depth
sortedbam=bam.replace(".bam","_sorted.bam")
coverage=sortedbam.replace("_count_sorted.bam","_coverage.tsv")
cmd=GlobalVar.samtools_+" depth -r NC_045512.2:1-29903 -a "+ sortedbam + " -o "+coverage
cprocess=subprocess.run(cmd, shell=True, stdout=subprocess.DEVNULL)
cprocess.check_returncode()
if os.path.isfile(coverage):
coverage_out=coverage
else:
print("Error in generating coverage for "+bam)
return coverage_out
def basic_stats_tabulation_for_file(self,file_name,sample):
data=open(file_name,'r')
Pandasobj=pd.read_csv(file_name,skiprows=5,nrows=3,sep="\t",engine='python',header=1)
PandasDf=pd.DataFrame(data=Pandasobj)
col = ['TOTAL_READS','PF_READS_ALIGNED','PCT_PF_READS_ALIGNED','MEAN_READ_LENGTH']
if PandasDf["CATEGORY"].shape[0] == 3:
PandasDf=PandasDf.loc[PandasDf["CATEGORY"] == "FIRST_OF_PAIR"]
PandasDf=PandasDf.rename(index={2:0})
elif PandasDf['CATEGORY'].shape[0] == 1:
PandasDf=PandasDf.loc[PandasDf["CATEGORY"] == "UNPAIRED"]
df = pd.DataFrame(PandasDf, columns=col)
df['Percent_Aligned_Reads']=df['PCT_PF_READS_ALIGNED']*100
df=df.drop(['PCT_PF_READS_ALIGNED'],axis=1)
df=df.rename(columns={"TOTAL_READS":"Total_Reads","PF_READS_ALIGNED":"Aligned_Reads","Percent_Aligned_Reads":"Percent_Aligned_Reads","MEAN_READ_LENGTH":"Mean_Read_Length"})
df=df[["Total_Reads","Aligned_Reads","Percent_Aligned_Reads","Mean_Read_Length"]]
df_t = pd.DataFrame.transpose(df).rename(columns={0:os.path.basename(sample)})
return df_t
def get_basic_stats_tabulation_for_the_batch(self):
output_file=None
all_sample_df = pd.DataFrame()
cnt=0
for sample in self.summaryout_map:
if self.summaryout_map[sample]:
file=self.summaryout_map[sample]
data=open(file,'r')
temp_df=self.basic_stats_tabulation_for_file(file,sample)
if cnt == 0:
all_sample_df = temp_df
else:
all_sample_df = all_sample_df.merge(temp_df, left_index=True, right_index=True)
cnt=cnt+1
output_file=self.cov2outdir+"Basic_stats_summary.csv"
all_sample_df.to_csv(output_file)
if os.path.isfile(output_file):
print("Basic Stats summary tabulation generated!")
else:
print("Error in basic Stats summary output generation.")
return output_file
def get_coverage_plot(self):
image_file_list=[]
coverage_out=None
#coverage_out=open(os.path.join(self.cov2outdir,"cov2_coverage_compilation.csv"),'a+',encoding='UTF-8')
coverage_out=open(self.cov2outdir+"cov2_coverage_compilation.csv",'a+',encoding='UTF-8')
for sample in self.coverage_map:
if self.coverage_map[sample]:
x = []
y = []
table = pd.read_csv(self.coverage_map[sample] , sep='\t',header=None)
alignment = pd.DataFrame(data=table)
x=alignment[1]
y=alignment[2]
axes = plt.gca()
axes.set_ylim([0,500])
median=np.median(y)
ss=sample.split("/")
sample=ss[len(ss)-1]
#base name obtained - change Nov 5,2020 - Sanket
title_var= sample+" (Median Coverage = "+str(median)+" )"
plt.plot(x,y,color="grey")
plt.xlabel('Position in SARS CoV2 Genome')
plt.ylabel('Depth of Coverage')
plt.title(title_var, loc='center')
plt.savefig(self.cov2outdir+sample+"_coverage.png")
plt.close(fig=None)
image_file_list.append(self.cov2outdir+sample+"_coverage.png")
print(str(sample)+"\t"+str(median),file=coverage_out)
print("Coverage output generated!!!")
return image_file_list
def get_abundance_plot(self):
human=[]
CoV2=[]
pathogen=[]
CoV2_fpkm=[]
pathogen_all=[]
sample_list=[]
image_file_list=[]
cov2_fpkm_out=open(self.cov2outdir+"cov2_fpkm_compilation.csv",'a+',encoding='UTF-8')
for sample in self.countfile_map:
file=self.countfile_map[sample]
data=open(file,'r')
table = pd.read_csv(file, sep='\t')
count = pd.DataFrame(data=table)
human_df=count[count['Feature'].str[0:4].isin(['ENSG'])].round(2)
human.append(round(human_df['Fragments'].sum(),2))
CoV2_df=count[count.ID == "NC_045512.2"]
CoV2.append(round(CoV2_df["Fragments"],2).to_string(index=False))
CoV2_fpkm.append(round(CoV2_df["FPKM"],2).to_string(index=False))
pathogen_df=count[~count['Feature'].str[0:4].isin(['ENSG'])]
pathogen_all.append(round(pathogen_df['Fragments'].sum(),2))
sample_list.append(sample)
print(str(sample)+"\t"+str(round(CoV2_df["FPKM"],2).to_string(index=False)),file=cov2_fpkm_out)
print("Abundance Summary generated!")
CoV2=list(map(float, CoV2))
CoV2_fpkm=list(map(float, CoV2_fpkm))
Total=self.total_fragment_list
pathogen=[x1 - x2 for (x1, x2) in zip(pathogen_all, CoV2)]
Unaligned=[round(x4 - (x1 + x2 + x3),2) for (x1, x2, x3, x4) in zip(human, pathogen, CoV2, Total)]
#Plot FPKM
CoV2_fpkm_log2=[]
for i in CoV2_fpkm:
if i > 0:
x=np.log2(i)
else:
x=i
CoV2_fpkm_log2.append(x)
sample=sample_list
log2FPKM=CoV2_fpkm_log2
#print(sample,log2FPKM)
sample_pos = [i for i, _ in enumerate(sample)]
fpkm_plot_file=self.cov2outdir+'CoV2_FPKM.png'
plt.bar(sample_pos, log2FPKM, color='red', width=1)
plt.xlabel('')
plt.ylabel('CoV2 log2 FPKM')
plt.title("SARS-CoV-2 quantification (FPKM)")
plt.xticks(sample_pos, sample, rotation=90)
plt.savefig(fpkm_plot_file, bbox_inches = 'tight', pad_inches = 0.1)
#plt.savefig(fpkm_plot_file, bbox_inches = 'tight')
plt.close(fig=None)
print("FPKM plot is generated")
#Plot Stack bar plot
Sample = sample_list
CoV2= np.array([(x/y)*100 for x, y in zip(map(float, CoV2), map(int, Total))])
Human= np.array([(x/y)*100 for x, y in zip(map(float, human), map(int, Total))])
Pathogen= np.array([(x/y)*100 for x, y in zip(map(float, pathogen), map(int, Total))])
Unaligned= np.array([(x/y)*100 for x, y in zip(map(float, Unaligned), map(int, Total))])
ind = [x for x, _ in enumerate(Sample)]
#print(Sample)
#print(CoV2)
#print(Human)
#print(Pathogen)
#print(Unaligned)
plt.bar(ind, Unaligned, width=0.5, label='Unaligned', color='blue', bottom=Human + Pathogen+ CoV2)
plt.bar(ind, Human, width=0.5, label='Human', color='green', bottom=Pathogen + CoV2)
plt.bar(ind, Pathogen, width=0.5, label='Pathogen', color='yellow', bottom=CoV2)
plt.bar(ind, CoV2, width=0.5, label='CoV2', color='red')
stack_plot_file=self.cov2outdir+'Samples_stackbar.png'
plt.xticks(ind, Sample, rotation = 90)
plt.ylabel("Relative Composition")
plt.xlabel("")
plt.legend(loc="upper right")
plt.title("Sample Composition")
plt.savefig(stack_plot_file, bbox_inches = 'tight', pad_inches = 0.1)
plt.close(fig=None)
print("Abundance stack plot is generated!")
#print("Abundance stack plot is generated!")
image_file_list=[fpkm_plot_file,stack_plot_file]
return image_file_list
def get_clade_assessment(self):
clade_df=pd.DataFrame()
try:
#clade_df=self.clade_assessment_obj.get_pandas_df_clade_assessment()
clade_df=self.varcladeassessmentobj_.get_clade_assessment_data_frame()
except Exception as e:
print(e)
print("Error in Clade Assessment")
sys.exit(0)
cov2_clade_out=self.cov2outdir+"Variant_based_clade_assessment.csv"
clade_df.to_csv(cov2_clade_out)
return clade_df
def get_novel_variant(self):
novel_var_df=pd.DataFrame()
try:
#novel_var_df=self.clade_assessment_obj.get_pandas_df_novel_variant()
novel_var_df=self.varcladeassessmentobj_.get_novel_variant_data_frame()
except:
print("Error in Novel Variant")
novel_var_out=self.cov2outdir+"Novel_variant.csv"
novel_var_df.to_csv(novel_var_out)
return novel_var_df
'''
input: output directory of IPD post successful run
'''
import argparse
def main():
parser = argparse.ArgumentParser(description='Generate an automated report for IPD analysed, SARS-CoV-2 sequenced samples')
parser.add_argument('-dir',const=None, help="IPD output directory location", dest='inputdir')
if len(sys.argv) < 2:
parser.print_help()
sys.exit(0)
args=parser.parse_args()
outdir=args.inputdir
if not outdir.endswith("/"):
outdir = outdir +"/"
if os.path.exists(outdir):
obj=CoV2ReportGenerator(outdir)
out_file=obj.get_basic_stats_tabulation_for_the_batch()
cov_plot_list=obj.get_coverage_plot()
ab_plot_list=obj.get_abundance_plot()
clade_df=obj.get_clade_assessment()
novel_variant_df=obj.get_novel_variant()
cov2outdir=outdir+"cov2output/"
output_file = open(cov2outdir+"Output.html", "w", encoding="utf-8", errors="xmlcharrefreplace")
header=markdown("#**IPD REPORT**")
output_file.write(header)
header1 = markdown("###**Sequence Statistics**")
output_file.write(header1)
df=pd.read_table(out_file, sep=',',index_col=0)
html = df.round(2).to_html()
output_file.write(html)
header2 = markdown("###**Coverage Plot**")
output_file.write(header2)
if len(cov_plot_list) > 2:
for i,k in zip(cov_plot_list[0::2], cov_plot_list[1::2]):
image=" "
html_image = markdown(image)
output_file.write(html_image)
elif len(cov_plot_list) == 1:
image=""
html_image = markdown(image)
output_file.write(html_image)
#Commented to reason the image placement error
#image2="
#image2="
image2=""
image0=""
header3 = markdown("###**Relative Abundance**")
output_file.write(header3)
html_image2 = markdown(image2)
output_file.write(html_image2)
#Added for testing
html_image0 = markdown(image0)
output_file.write(html_image0)
header6 = markdown("###**Novel Variants**")
output_file.write(header6)
if not novel_variant_df.empty:
html2 = novel_variant_df.round(2).to_html(index=False,justify='center')
else:
html2 = markdown("No novel variants in the sample.")
output_file.write(html2)
header5 = markdown("###**Variant Based Related Strains**")
output_file.write(header5)
if not clade_df.empty:
html3 = clade_df.round(2).to_html(index=False,justify='center')
else:
html3 = markdown("No intersecting variants found. Clade cannot be determined.")
output_file.write(html3)
output_file.close()
#HTML to PDF report
#import pdfkit
#pdfkit.from_file(os.path.join(cov2outdir,"Output.html"), os.path.join(cov2outdir,"OutputReport.pdf"))
#2.0 uses wkhtmltopdf
cmd= "wkhtmltopdf " + cov2outdir+"Output.html " +cov2outdir+"OutputReport.pdf"
cprocess=subprocess.run(cmd, shell=True)
cprocess.check_returncode()
if __name__ =="__main__":
main()
|
import instabot
bot = instabot.Bot()
bot.login(username = "patagonian_review", password = "comandos1")
1
#Obtener la lista de followers que interesa.
hashtags = ["Bariloche", "SanMartin", "Patagonia", "Sur"]
hashtag_users = []
for hashtag in hashtags:
users_one_hashtag = bot.get_hashtag_users(hashtag)
for users in users_one_hashtag:
hashtag_users.append(users)
#La lista de potenciales a seguir está en hashtag_users
#Pruebo seguir a 50
#bot.follow_users(hashtag_users[1:50])
# Quiero obtener toda la informacion que se pueda de ellos.
# df = DataFrame(list(diccionario.items()) #Lo transformo a dataframe, o me amigo con los diccionarios.
user_info_dic_list = []
for user in hashtag_users[1:50]:
user_info = bot.get_user_info(user)
user_info_dic_list.append(user_info)
ds = user_info_dic_list
d = {}
for k in ds[1].keys():
d[k] = tuple(d[k] for d in ds)
print(d)
#PARA SUBIR UNA FOTO
#bot.upload_photo(photo = "/home/santiago/Downloads/GPM.jpg",
# caption = "Who knows the name of this wonderful spot? /n #Patagonia")
bot.upload_photo(photo = "/home/santiago/Downloads/Gutierrez.jpg",
caption = "Lago Gutierrez! unforgettable place /n #Patagonia #l:398302889")
|
# KVM-based Discoverable Cloudlet (KD-Cloudlet)
# Copyright (c) 2015 Carnegie Mellon University.
# All Rights Reserved.
#
# THIS SOFTWARE IS PROVIDED "AS IS," WITH NO WARRANTIES WHATSOEVER. CARNEGIE MELLON UNIVERSITY EXPRESSLY DISCLAIMS TO THE FULLEST EXTENT PERMITTEDBY LAW ALL EXPRESS, IMPLIED, AND STATUTORY WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT OF PROPRIETARY RIGHTS.
#
# Released under a modified BSD license, please see license.txt for full terms.
# DM-0002138
#
# KD-Cloudlet includes and/or makes use of the following Third-Party Software subject to their own licenses:
# MiniMongo
# Copyright (c) 2010-2014, Steve Lacy
# All rights reserved. Released under BSD license.
# https://github.com/MiniMongo/minimongo/blob/master/LICENSE
#
# Bootstrap
# Copyright (c) 2011-2015 Twitter, Inc.
# Released under the MIT License
# https://github.com/twbs/bootstrap/blob/master/LICENSE
#
# jQuery JavaScript Library v1.11.0
# http://jquery.com/
# Includes Sizzle.js
# http://sizzlejs.com/
# Copyright 2005, 2014 jQuery Foundation, Inc. and other contributors
# Released under the MIT license
# http://jquery.org/license
__author__ = 'Sebastian'
import M2Crypto
import os
import binascii
import base64
import struct
DEFAULT_RSA_LENGTH_BITS = 2048
DEFAULT_RSA_PUB_EXPONENT = 65537
#############################################################################################
# Creates a key pair and stores it in the given locations.
#############################################################################################
def create_key_pair(private_key_file_path, public_key_file_path, key_length=DEFAULT_RSA_LENGTH_BITS, exponent=DEFAULT_RSA_PUB_EXPONENT):
# Seed the random number generator with 1024 random bytes (8192 bits)
M2Crypto.Rand.rand_seed(os.urandom(1024))
print "Generating a 2048 bit private/public key pair..."
keypair = M2Crypto.RSA.gen_key(key_length, exponent)
keypair.save_key(private_key_file_path, None)
keypair.save_pub_key(public_key_file_path)
#print "Public exponent: " + str(get_exponent(keypair))
#print "Modulus: " + str(get_modulus(keypair))
#############################################################################################
# Returns the exponent as an int, assuming its size in bytes is written in 1 byte.
#############################################################################################
def get_exponent(keypair):
return get_int_from_der(keypair.e, 1)
#############################################################################################
# Returns the modulus, asuming it is key whose length in bytes is written in 2 bytes.
# The extra one removed is the 00 added to pad).
#############################################################################################
def get_modulus(keypair):
return get_int_from_der(keypair.n, 3)
#############################################################################################
# Converts a public RSA key from PEM format into adbkey.pub format.
# NOTE: this assumes that the ASB keys are all of 2048-bit length.
#############################################################################################
def convert_pub_rsa_to_adb(rsa_file_path, adb_file_path):
print "Converting RSA public key file into adbkey.pub..."
# Get values from the RSA public key.
public_key = M2Crypto.RSA.load_pub_key(rsa_file_path)
exponent = get_exponent(public_key)
modulus = get_modulus(public_key)
key_size_bits = DEFAULT_RSA_LENGTH_BITS
# Calculate derived values.
length = key_size_bits / 32 # Key size in words (4-byte chunks)
r = pow(2, key_size_bits)
rr = pow(r, 2) % modulus
#print 'R: ' + str(pow(2, key_size_bits))
#print 'RR: ' + hex(rr)
# Transform length and exponent into byte arrays.
length_array = int_to_bytes(length, endianness='little')
exponent_array = int_to_bytes(exponent, endianness='little')
#print 'Exp: ' + binascii.hexlify(exponent_array)
# Move big integers into arrays.
modulus_array = int_to_bytes(modulus, endianness='little')
rr_array = int_to_bytes(rr, endianness='little')
#print 'Mod Array: ' + binascii.hexlify(modulus_array)
#print 'RR Array: ' + binascii.hexlify(rr_array)
# Calculate remaining values.
b = pow(2, 32)
n0inv = b - mod_inverse(modulus, b)
n0inv_array = int_to_bytes(n0inv, endianness='little')
# Put all values in one byte array.
data = length_array + n0inv_array + modulus_array + rr_array + exponent_array
print 'Total size: ' + str(len(data))
# Base64 encode the whole thing.
b64_string = base64.b64encode(data)
# Add the unused user.
key_string = b64_string + ' unknown@unknown'
# Store into file.
with open(adb_file_path, "w") as text_file:
text_file.write(key_string)
#############################################################################################
# Returns an integer from a DER value (which comes with its overhead/length).
#############################################################################################
def get_int_from_der(der_value, bytes_to_remove=0):
# Remove leading 0s, but ensure we have a valid hex representation (it may need 1 leading zero).
hex = binascii.hexlify(der_value)
stripped_hex = hex.lstrip('0')
if len(stripped_hex) % 2 != 0:
stripped_hex = '0' + stripped_hex
#print stripped_hex
# Remove the overhead (usually length).
value_hex = stripped_hex[2*bytes_to_remove:]
#print value_hex
# Return the int representation of this hex value.
return int(value_hex, 16)
###################################################################################
#
###################################################################################
def int_to_bytes(val, endianness='big'):
#print 'Original value: ' + hex(val)
# Define endianness for each byte.
if endianness == 'big':
prefix = '>'
else:
prefix = '<'
# Check how many 4-byte words we will need.
four_byte_mask = pow(2, 32) - 1
num_words = 0
val_check = val
parts = []
while val_check != 0:
# "Cut" the part we are currently getting.
current_lower_part = val_check & four_byte_mask
#print 'Total: ' + hex(val_check)
#print 'Current: ' + hex(current_lower_part)
parts.append(current_lower_part)
val_check >>= 32
num_words += 1
# The minimum is 1 word, even for a 0 as a value.
if num_words == 0:
num_words = 1
parts = [val]
# By default, the array parts will be little-endian, since we put the smaller pieces first.
if endianness == 'big':
parts = parts[::-1]
#print 'Num words: ' + str(num_words)
struct_def = struct.Struct(prefix + 'I'*num_words)
data = struct_def.pack(*parts)
#print str(val) + ' Packed: ' + binascii.hexlify(data) +
#print 'Size ' + str(struct_def.size)
return bytearray(data)
###################################################################################
#
###################################################################################
def mod_inverse(x, p):
"""
Calculate the modular inverse of x ( mod p )
the modular inverse is a number such that:
(inverse(x, p) * x) % p == 1
you could think of this as: 1/x
"""
inv1 = 1
inv2 = 0
while p != 1 and p != 0:
inv1, inv2 = inv2, inv1 - inv2 * (x / p)
x, p = p, x % p
return inv2
|
h = humanPlayer()
ply.eps=0
def human_game(ply, opp, dialog=True):
#choose players from ['human', 'defaultOpp', 'opponent', 'qOpp', 'deepQopp']
#ply is player 0, opp is player 1
env = Environment(np.random.choice(2))
ply.player=0
opp.player=1
reward=0
startingPlayer = env.beginner
print('opponent hand was: ', opp.dice)
def _dialog(ply='Player', dialog=dialog):
if dialog:
if reward: print('Reward: ', reward)
else: print(ply, 'bet: ', env.state[-1])
if startingPlayer==0:
called, bet = ply.makeTurn(env)
env.finishTurn(bet)
_dialog()
while True:
# for debugging insert: print('current state :', env.state)
#opponent turn:
called, bet = opp.makeTurn(env)
if called:
reward = env.callout(player=1, ply=ply, opp=opp)
else:
env.finishTurn(bet)
if env.state[-1][0]==8:
reward = env.callout(player=0, ply=ply, opp=opp)
if dialog:
print('Opponent overbet. pips: ', env.state[-1][1])
_dialog(ply='Opponent')
if reward:
return reward
#player turn:
called, bet = ply.makeTurn(env)
if called:
reward = env.callout(player=0, ply=ply, opp=opp)
else:
env.finishTurn(bet)
if env.state[-1][0]>=8:
reward = env.callout(player=1, ply=ply, opp=opp)
if dialog:
print('Player overbet. pips: ', env.state[-1][1])
_dialog()
if reward:
return reward
if str(ply)=='human':
print('opponent hand was: ', opp.dice)
h.reset()
ply.reset()
human_game(h, ply)
|
from ABC.NodeAST import NodeAST
from ABC.Instruction import Instruction
from ST.Exception import Exception
from ST.Type import TYPE, getTypeString
from ST.SymbolTable import SymbolTable
from Instructions.Break import Break
from Instructions.Return import Return
from Instructions.Continue import Continue
class For(Instruction):
def __init__(self, declaration_instruction, expression, for_increment, instructions, line, column):
self.declaration_instruction = declaration_instruction
self.expression = expression
self.instructions = instructions
self.for_increment = for_increment
self.line = line
self.column = column
self.array = False
self.arrayDim = 0
self.arraySize = 0
self.ReportSymbol = None
def interpreter(self, tree, table):
ambitfor = SymbolTable(table)
var_de = self.declaration_instruction.interpreter(tree, ambitfor)
if isinstance(var_de, Exception): return var_de
while True:
expression = self.expression.interpreter(tree, ambitfor)
if isinstance(expression, Exception): return expression
if self.expression.type == TYPE.BOOLEAN:
if expression == True: # check expression for true
newTable2 = SymbolTable(ambitfor) #new environment
if self.instructions != None:
for instruction in self.instructions:
result = instruction.interpreter(tree, newTable2) #run instruction inside of the if
# print(instruction)
if isinstance(result, Exception) :
tree.getException().append(result)
tree.updateConsole(result.toString())
if isinstance(result, Continue):
decintcrement = self.__increment(tree, ambitfor) #new ambit for cicle and ignored instruction afer continue
if isinstance(result, Break): return None
if isinstance(result, Return): return result
self.ReportSymbol = ({ 'ID': self.declaration_instruction.id,
'TYPE': 'Funcion',
'TYPE2': getTypeString(self.declaration_instruction.type),
'ENTORNO': "Instruccion For",
'VALOR': var_de,
'LINE': self.line,
'COLUMN': self.column,
})
else:
break
else:
return Exception("Semantico", "Tipo de dato no booleano en for.", self.line, self.column)
decintcrement = self.__increment(tree, ambitfor)
if isinstance(decintcrement, Exception): return decintcrement
def __increment(self, tree, table):
return self.for_increment.interpreter(tree, table)
def getNode(self):
node = NodeAST("FOR")
nodeCondition = NodeAST("PARAMETROS")
nodeCondition.addChild(self.declaration_instruction.getNode())
nodeCondition.addChild(self.expression.getNode())
nodeCondition.addChild(self.for_increment.getNode())
node.addChild(nodeCondition)
nodeInstructions = NodeAST("INSTRUCCIONES")
for instruction in self.instructions:
nodeInstructions.addChild(instruction.getNode())
node.addChild(nodeInstructions)
return node |
BOOTSTRAP = (
"https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css"
)
GRID = "https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap-grid.min.css" # noqa
_BOOTSWATCH_BASE = "https://cdn.jsdelivr.net/npm/bootswatch@5.3.0/dist/"
CERULEAN = _BOOTSWATCH_BASE + "cerulean/bootstrap.min.css"
COSMO = _BOOTSWATCH_BASE + "cosmo/bootstrap.min.css"
CYBORG = _BOOTSWATCH_BASE + "cyborg/bootstrap.min.css"
DARKLY = _BOOTSWATCH_BASE + "darkly/bootstrap.min.css"
FLATLY = _BOOTSWATCH_BASE + "flatly/bootstrap.min.css"
JOURNAL = _BOOTSWATCH_BASE + "journal/bootstrap.min.css"
LITERA = _BOOTSWATCH_BASE + "litera/bootstrap.min.css"
LUMEN = _BOOTSWATCH_BASE + "lumen/bootstrap.min.css"
LUX = _BOOTSWATCH_BASE + "lux/bootstrap.min.css"
MATERIA = _BOOTSWATCH_BASE + "materia/bootstrap.min.css"
MINTY = _BOOTSWATCH_BASE + "minty/bootstrap.min.css"
MORPH = _BOOTSWATCH_BASE + "morph/bootstrap.min.css"
PULSE = _BOOTSWATCH_BASE + "pulse/bootstrap.min.css"
QUARTZ = _BOOTSWATCH_BASE + "quartz/bootstrap.min.css"
SANDSTONE = _BOOTSWATCH_BASE + "sandstone/bootstrap.min.css"
SIMPLEX = _BOOTSWATCH_BASE + "simplex/bootstrap.min.css"
SKETCHY = _BOOTSWATCH_BASE + "sketchy/bootstrap.min.css"
SLATE = _BOOTSWATCH_BASE + "slate/bootstrap.min.css"
SOLAR = _BOOTSWATCH_BASE + "solar/bootstrap.min.css"
SPACELAB = _BOOTSWATCH_BASE + "spacelab/bootstrap.min.css"
SUPERHERO = _BOOTSWATCH_BASE + "superhero/bootstrap.min.css"
UNITED = _BOOTSWATCH_BASE + "united/bootstrap.min.css"
VAPOR = _BOOTSWATCH_BASE + "vapor/bootstrap.min.css"
YETI = _BOOTSWATCH_BASE + "yeti/bootstrap.min.css"
ZEPHYR = _BOOTSWATCH_BASE + "zephyr/bootstrap.min.css"
|
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.shortcuts import render, redirect
from .models import *
# Create your views here.
# Home page
def index(request):
return render(request, 'app/index.html')
# ******************************************* #
# #
# L O G I N #
# #
# ******************************************* #
# Faculty Login
def login_as_faculty(request):
"""
Login the faculty with provided email
or password
:param request: a must have object, which make this
function a "view" and contains every
information sent by user
:return: Render a response based on provided parameters
"""
if request.method == "POST":
faculty_email = request.POST['faculty_email']
faculty_password = request.POST['faculty_password']
user = authenticate(
request,
username=faculty_email,
password=faculty_password)
if user is not None:
login(request, user)
return redirect('faculty-portal')
else:
messages.error(request, "Invalid Email or Password")
return render(request, 'app/faculty/registration/login.html')
# Student Login
def login_as_student(request):
"""
Login student with provided Registration number and
password
:param request: a must have object, which make this
function a "view" and contains every
information sent by user
:return: Render a response based on provided parameters
"""
context = {
'program': Program.objects.all(),
'batch': Batch.objects.all(),
}
if request.method == "POST":
print("POST---------: ", request.POST)
student_batch = request.POST['student_batch']
student_program = request.POST['student_program']
student_number = request.POST['student_number']
student_password = request.POST['student_password']
regno = "{}-{}-{}".format(student_batch, student_program, student_number)
user = authenticate(request, username=regno, password=student_password)
if user is not None:
login(request, user)
return redirect('student-portal')
else:
messages.error(request, "Invalid Registration Number or Password")
return render(request, 'app/students/registration/login.html', context)
# ******************************************* #
# #
# Registration #
# #
# ******************************************* #
# Faculty Registration
def signup_as_faculty(request):
"""
Register faculty based on provided information
also log the faculty in if registration is
successful
:param request: a must have object, which make this
function a "view" and contains every
information sent by user
:return: Render a response based on provided parameters or
redirect to portal if successfully registered
"""
if request.method == "POST":
print(request.POST)
faculty_name = request.POST['faculty_name']
faculty_email = request.POST['faculty_email']
faculty_password = request.POST['faculty_password']
faculty_password_confirm = request.POST['faculty_password_confirm']
if faculty_password == faculty_password_confirm:
user = authenticate(request, username=faculty_email, password=faculty_password)
if user is not None:
user.first_name = faculty_name
user.is_faculty = True
user.save()
faculty = Faculty()
faculty.user = user
if request.FILES["faculty_image"]:
faculty.image = request.FILES["faculty_image"]
faculty.save()
login(request, user)
return redirect('faculty-portal')
else:
# error about registration
pass
return render(request, 'app/faculty/registration/signup.html')
# Student Signup
def signup_as_student(request):
"""
Register student based on provided information
also log the faculty in if registration is
successful
:param request: a must have object, which make this
function a "view" and contains every
information sent by user
:return: Render a response based on provided parameters or
redirect to portal if successfully registered
"""
context = {
'program': Program.objects.all(),
'batch': Batch.objects.all(),
}
if request.method == "POST":
print(request.POST)
student_name = request.POST['student_name']
student_batch = request.POST['student_batch']
student_program = request.POST['student_program']
student_number = request.POST['student_number']
student_email = request.POST['student_email']
student_password = request.POST['student_password']
student_password_confirm = request.POST['student_password_confirm']
regno = "{}-{}-{}".format(student_batch, student_program, student_number)
if student_password == student_password_confirm:
user = authenticate(request, username=regno, password=student_password)
if user is not None:
user.first_name = student_name
user.email = student_email
user.is_student = True
user.save()
student = Student()
student.user = user
student.batch = Batch.objects.get(pk=int(student_batch))
student.program = Program.objects.get(pk=int(student_program))
student.number = student_number
if request.FILES["student_image"]:
student.image = request.FILES['student_image']
student.save()
login(request, user)
return redirect('student-portal')
else:
# error about registration
pass
return render(request, 'app/students/registration/signup.html', context)
# Logout any user
def user_logout(request):
"""
Logout any user Faculty or Student
:param request:
:return: Redirect to Home Page
"""
logout(request)
return redirect('home')
# ******************************************* #
# #
# F A C U L T Y #
# #
# ******************************************* #
@login_required
def faculty_portal(request):
"""
Redirect to Faculty portal after successful login
or registration
:param request:
:return:
"""
return render(request, 'app/faculty/profile.html')
@login_required
def faculty_notifications(request):
"""
Read notifications for current logged in user
:param request:
:return:
"""
context = {}
if request.user.is_authenticated:
try:
faculty = request.user.faculty
notifications = faculty.faculty_receiver.all().order_by('is_seen', '-created_at')
context['notifications'] = notifications
except FacultyNotifications.DoesNotExist:
context['notifications'] = None
return render(request, 'app/faculty/notifications.html', context)
@login_required
def mark_faculty_notification_read(request, pk):
"""
Mark notification as Read/Seen when "Mark Read" is
clicked
:param request:
:param pk: ID of Notification/Message
:return:
"""
notification = FacultyNotifications.objects.get(pk=pk)
notification.is_seen = True
notification.save()
return redirect('faculty-notifications')
@login_required
def make_supervisor(request, pk):
"""
Create Faculty Member a Supervisor when clicked
"Make" button
:param request:
:param pk:
:return:
"""
if request.user.is_authenticated:
faculty = Faculty.objects.get(pk=pk)
faculty.is_supervisor = True
faculty.save()
title = "You have been given Supervisor's privileges"
msg = "Now, you can create groups for student, submit project or proposal requests."
notify_faculty(title, msg, request.user.faculty, faculty)
return redirect('coordinator-supervisors')
@login_required
def remove_supervisor(request, pk):
"""
Remove a faculty member from Supervisor privilleges
when clicked "Remove" button
:param request:
:param pk:
:return:
"""
if request.user.is_authenticated:
faculty = Faculty.objects.get(pk=pk)
faculty.is_supervisor = False
faculty.save()
title = "Your Supervisor's privileges has been removed"
msg = "You can no longer create student groups and manage projects"
notify_faculty(title, msg, request.user.faculty, faculty)
return redirect('coordinator-supervisors')
@login_required
def make_coordinator(request, pk):
"""
Make faculty member a Coordinator
:param request:
:param pk:
:return:
"""
if request.user.is_authenticated:
faculty = Faculty.objects.get(pk=pk)
faculty.is_coordinator = True
faculty.save()
title = "You have been given Coordinator's privileges"
msg = "Now, you can manage faculty members and project requests."
notify_faculty(title, msg, request.user.faculty, faculty)
return redirect('coordinator-supervisors')
@login_required
def remove_coordinator(request, pk):
"""
Remove faculty member from Coordinator privilleges
:param request:
:param pk:
:return:
"""
if request.user.is_authenticated:
faculty = Faculty.objects.get(pk=pk)
faculty.is_coordinator = False
faculty.save()
title = "You Coordinator's privileges has been removed"
msg = "You can no longer manage faculty or project requests"
notify_faculty(title, msg, request.user.faculty, faculty)
return redirect('coordinator-supervisors')
@login_required
def make_committee_head(request, pk):
"""
Make faculty member a Comittee Head
:param request:
:param pk:
:return:
"""
if request.user.is_authenticated:
faculty = Faculty.objects.get(pk=pk)
faculty.is_committee_head = True
faculty.save()
title = "You are Committee head now"
msg = "Now, you can review Projects, member's comments and give feedback to supervisors."
notify_faculty(title, msg, request.user.faculty, faculty)
return redirect('coordinator-supervisors')
@login_required
def remove_committee_head(request, pk):
"""
Remove faculty member from Committee Head privileges
:param request:
:param pk:
:return:
"""
if request.user.is_authenticated:
faculty = Faculty.objects.get(pk=pk)
faculty.is_committee_head = False
faculty.save()
title = "Your Committee head privileges has been removed."
msg = "You can't review comments or give feed back to supervisors"
notify_faculty(title, msg, request.user.faculty, faculty)
return redirect('coordinator-supervisors')
@login_required
def make_committee_member(request, pk):
"""
Make faculty member a Committee Member
:param request:
:param pk:
:return:
"""
if request.user.is_authenticated:
faculty = Faculty.objects.get(pk=pk)
faculty.is_committee_member = True
faculty.save()
title = "You have been given Committee Member's privileges"
msg = "Now, you can give feedback to students and projects."
notify_faculty(title, msg, request.user.faculty, faculty)
return redirect('coordinator-supervisors')
@login_required
def remove_committee_member(request, pk):
"""
Remove faculty from Committee Member
:param request:
:param pk:
:return:
"""
if request.user.is_authenticated:
faculty = Faculty.objects.get(pk=pk)
faculty.is_committee_member = False
faculty.save()
title = "Your Committee Member's privileges has been removed"
msg = "You can no longer give feedback to students and projects."
notify_faculty(title, msg, request.user.faculty, faculty)
return redirect('coordinator-supervisors')
@login_required
def make_evaluation_committee(request, pk):
"""
Make faculty member an Evaluation Committee
:param request:
:param pk:
:return:
"""
if request.user.is_authenticated:
faculty = Faculty.objects.get(pk=pk)
faculty.is_evaluation_committee = True
faculty.save()
title = "You have been given Evaluation Committee's privileges"
msg = "Now, you can accept or reject projects."
notify_faculty(title, msg, request.user.faculty, faculty)
return redirect('coordinator-supervisors')
@login_required
def remove_evaluation_committee(request, pk):
"""
Remove faculty member from evaluation committee privileges
:param request:
:param pk:
:return:
"""
if request.user.is_authenticated:
faculty = Faculty.objects.get(pk=pk)
faculty.is_evaluation_committee = False
faculty.save()
title = "Your Evaluation Committee's privileges has been removed"
msg = "You can no longer accept or reject projects."
notify_faculty(title, msg, request.user.faculty, faculty)
return redirect('coordinator-supervisors')
@login_required
def activate_faculty(request, pk):
"""
Activate/Unblock faculty member
:param request:
:param pk:
:return:
"""
if request.user.is_authenticated:
faculty = Faculty.objects.get(pk=pk)
faculty.is_active = True
faculty.save()
return redirect('coordinator-supervisors')
@login_required
def deactivate_faculty(request, pk):
"""
Deactivate/Block faculty member
:param request:
:param pk:
:return:
"""
if request.user.is_authenticated:
faculty = Faculty.objects.get(pk=pk)
faculty.is_active = False
faculty.save()
return redirect('coordinator-supervisors')
@login_required
def coordinator_supervisors(request):
"""
Display all faculty members under coordinator privileges
:param request:
:return:
"""
context = {
'faculty': Faculty.objects.all()
}
return render(request, 'app/faculty/coordinator/supervisors.html', context)
@login_required
def coordinator_projects(request):
"""
Display all projects under coordinator privileges
:param request:
:return:
"""
context = {
'projects': Project.objects.all()
}
return render(request, 'app/faculty/coordinator/projects.html', context)
@login_required
def coordinator_forward_project(request, pk):
"""
Forward project requested by supervisor to
evaluation committee under
Coordinator privileges
:param request:
:param pk:
:return:
"""
project_request = ProjectRequest.objects.filter(pk=pk).get()
project_request.is_forwarded = True
project_request.save()
title = "Your project {} is under evaluation".format(project_request.project.title)
msg = "Your project is forwarded to evaluation committee. You will be notified for further procedure or result"
notify_faculty(title, msg, request.user.faculty, project_request.project.supervisor)
title = "{}'s project {} need evaluation".format(project_request.project.supervisor, project_request.project.title)
msg = "{} has submitted proposal for '{}'. Kindly evaluate it as per required and act accordingly.".format(
project_request.project.supervisor, project_request.project.title)
for ec in Faculty.objects.filter(is_evaluation_committee=True).all():
notify_faculty(title, msg, request.user.faculty, ec)
return redirect('coordinator-projects')
@login_required
def coordinator_reject_project(request, pk):
"""
Reject project requested by supervisor under
Coordinator privileges
:param request:
:param pk:
:return:
"""
project_request = ProjectRequest.objects.get(pk=pk)
if request.method == "POST":
reason = request.POST["reason"]
project_request.is_active = False
project_request.is_accepted = False
project_request.reject_reason = reason
project_request.save()
title = "Your project {} was rejected by Coordinator".format(project_request.project.title)
msg = "The reason for project rejection is: {}".format(reason)
notify_faculty(title, msg, request.user.faculty, project_request.project.supervisor)
return redirect("coordinator-projects")
context = {"project": project_request.project}
return render(request, 'app/faculty/coordinator/reject_project.html', context)
@login_required
def supervisor_projects(request):
"""
Display all projects under supervision privileges
:param request:
:return:
"""
try:
projects = Project.objects.filter(supervisor=request.user.faculty).all()
context = {'projects': projects}
except Project.DoesNotExist:
context = {'projects': None}
return render(request, 'app/faculty/supervisor/projects.html', context)
@login_required
def supervisor_new_project_request(request):
"""
Request new project under supervision privileges
:param request:
:return:
"""
if request.user.is_authenticated:
if request.user.faculty.is_supervisor:
if request.method == "POST" and request.FILES['project_proposal']:
project = Project()
project.title = request.POST["project_title"]
project.description = request.POST['project_description']
project.proposal = request.FILES['project_proposal']
project.supervisor = request.user.faculty
project.save()
project_request = ProjectRequest()
project_request.project = project
project_request.is_accepted = False
project_request.is_forwarded = False
project_request.is_need_changes = False
project_request.is_active = True
project_request.save()
title = "New Project Request from {}".format(request.user.first_name)
msg = "{} has request approval of project '{}'.".format(request.user.first_name, project.title)
for coordinator in Faculty.objects.filter(is_coordinator=True).all():
notify_faculty(title, msg, request.user.faculty, coordinator)
messages.success(request, "Your request for '{}' has been created".format(project.title))
return redirect("faculty-supervisor-projects")
return render(request, 'app/faculty/supervisor/new_project.html')
@login_required
def supervisor_groups(request):
"""
Display all groups under supervision privileges
:param request:
:return:
"""
group = None
context = {'group': group}
return render(request, 'app/faculty/supervisor/groups.html', context)
@login_required
def supervisor_new_group(request):
"""
:param request:
:return:
"""
projects = None
context = {'projects': projects}
return render(request, 'app/faculty/supervisor/new_group.html', context)
@login_required
def supervisor_resubmit_project(request, pk):
"""
Resubmit project request under supervision of
supervisor
:param request:
:param pk:
:return:
"""
project = Project.objects.get(pk=pk)
if request.user.is_authenticated:
if request.user.faculty.is_supervisor:
if request.method == "POST":
project.title = request.POST["project_title"]
project.description = request.POST['project_description']
project.proposal = request.FILES['project_proposal']
project.save()
project.projectrequest.is_active = True
project.projectrequest.is_need_changes = False
project.projectrequest.save()
title = "{} is resubmitted".format(project.title)
msg = "The project '{}' is resubmitted. Kindly review it and forward it or reject it accordingly" \
.format(project.title)
for coordinator in Faculty.objects.filter(is_coordinator=True).all():
notify_faculty(title, msg, request.user.faculty, coordinator)
return redirect("faculty-supervisor-projects")
context = {'project': project}
return render(request, 'app/faculty/supervisor/new_project.html', context)
@login_required
def evaluation_committee_proposals(request):
"""
Display only forwarded projects under Evaluation
Committee privileges
:param request:
:return:
"""
context = {"projects": ProjectRequest.objects.filter(is_forwarded=True).all()}
return render(request, 'app/faculty/evaluation_committee/proposals.html', context)
@login_required
def evaluation_committee_approve(request, pk):
"""
Approve project request under evaluation committee
privileges
:param request:
:param pk:
:return:
"""
project_request = ProjectRequest.objects.get(pk=pk)
context = {"project": project_request}
if request.method == "POST":
comment = request.POST["comment"]
project_request.is_accepted = True
project_request.is_active = True
project_request.is_need_changes = False
project_request.is_forwarded = False
project_request.comments = comment
project_request.save()
title = "Your project is Approved"
msg = "Your {} project is approved by Evaluation Committee".format(project_request.project.title)
notify_faculty(title, msg, request.user.faculty, project_request.project.supervisor)
title = "{}'s project '{}' was approved".format(project_request.project.supervisor,
project_request.project.title)
msg = "{}'s project '{}' was approved by committee after evaluation process".format(
project_request.project.supervisor,
project_request.project.title)
for co in Faculty.objects.filter(is_coordinator=True).all():
notify_faculty(title, msg, request.user.faculty, co)
return redirect('faculty-ec-proposals')
return render(request, 'app/faculty/evaluation_committee/accept_project.html', context)
@login_required
def evaluation_committee_changes(request, pk):
"""
Suggest changes against project request to supervisor
under Evaluation Committee privileges
:param request:
:param pk:
:return:
"""
project_request = ProjectRequest.objects.get(pk=pk)
context = {"project": project_request}
if request.method == "POST":
comment = request.POST["changes"]
project_request.is_accepted = False
project_request.is_active = True
project_request.is_need_changes = True
project_request.is_forwarded = False
project_request.changes_required = comment
project_request.save()
title = "Your project Need Changes"
msg = "Your {} project need following changes: {}".format(project_request.project.title, comment)
notify_faculty(title, msg, request.user.faculty, project_request.project.supervisor)
title = "{}'s project Need Changes".format(project_request.project.supervisor, project_request.project.title)
msg = "{}'s project need following changes: {}".format(project_request.project.supervisor, comment)
for co in Faculty.objects.filter(is_coordinator=True).all():
notify_faculty(title, msg, request.user.faculty, co)
return redirect('faculty-ec-proposals')
return render(request, 'app/faculty/evaluation_committee/suggest_change.html', context)
def notify_faculty(title, text, sender, receiver):
"""
Send notification to designated recipient
:param title: Title of notification
:param text: Message of notification
:param sender: Sender of notification
:param receiver: Receiver of notification
:return:
"""
notification = FacultyNotifications()
notification.title = title
notification.text = text
notification.sender = sender
notification.receiver = receiver
notification.save()
# ******************************************* #
# #
# S T U D E N T S #
# #
# ******************************************* #
@login_required
def student_portal(request):
"""
Redirect to Student portal after successful login
or registration
:param request:
:return:
"""
return render(request, 'app/students/base.html')
@login_required
def student_projects(request):
"""
Display any subject belongs to student
:param request:
:return:
"""
projects = None
context = {'projects': projects}
return render(request, 'app/students/projects.html', context)
@login_required
def student_group_members(request):
"""
Display all group members related to student's group
:param request:
:return:
"""
group = None
context = {'group': group}
return render(request, 'app/students/group_members.html', context)
@login_required
def student_events(request):
"""
Display all events related to student's group
:param request:
:return:
"""
events = None
context = {'events': events}
return render(request, 'app/students/events.html', context)
@login_required
def student_notifications(request):
"""
Display all events related to student's group
:param request:
:return:
"""
notifications = None
context = {'notifications': notifications}
return render(request, 'app/students/notifications.html', context)
@login_required
def notify_student(title, text, sender, receiver):
"""
Send notification to designated recipient
:param title: Title of notification
:param text: Message of notification
:param sender: Sender of notification
:param receiver: Receiver of notification
:return:
"""
notification = StudentNotifications()
notification.title = title
notification.text = text
notification.sender = sender
notification.receiver = receiver
notification.save()
|
import pandas as pd
def weekday_weekend_traffic_differences(df, week):
"""
Calculates the difference between weekday and weekend traffic
to approximate the number of commuters coming through the station.
Args:
df (DataFrame): A DataFrame for which the difference should be calculated
Returns:
A New Dataframe with the entry and exit differences
"""
daily_entries = df.loc[df["WEEK"] == week].groupby(
["STATION", "WDAY", "LATITUDE", "LONGITUDE"]).sum().reset_index().sort_values(
["STATION", "WDAY"], ascending=False)
daily_entries["WEEKEND"] = daily_entries["WDAY"] > 4
average_daily_week_v_weekend_entries = daily_entries.groupby(
["STATION", "WEEKEND", "LATITUDE", "LONGITUDE"]).mean().reset_index().drop(
["WDAY", "index", "WEEK", "HOUR"], axis=1)
average_daily_week_v_weekend_entries["Entry_diffs"] = average_daily_week_v_weekend_entries.groupby(
["STATION", "LATITUDE", "LONGITUDE"]).diff(periods=-1)["ENTRIES"]
average_daily_week_v_weekend_entries["Exit_diffs"] = average_daily_week_v_weekend_entries.groupby(
["STATION", "LATITUDE", "LONGITUDE"]).diff(periods=-1)["EXITS"]
average_daily_week_v_weekend_entries = average_daily_week_v_weekend_entries.dropna(how='any')
average_daily_week_v_weekend_entries = average_daily_week_v_weekend_entries.drop(["ENTRIES",
"WEEKEND", "EXITS"], axis=1)
return average_daily_week_v_weekend_entries |
segundos_str = input ("Por Favor, entre com o número de segundos que deseja converter:")
total_segs = int (segundos_str)
dias = total_segs // (24*3600)
segs_restantesfirst = total_segs % (24*3600)
horas = segs_restantesfirst // 3600
segs_restantessecond = segs_restantesfirst % 3600
minutos = segs_restantessecond // 60
segs_restantes_final = segs_restantessecond % 60
print (dias, "dias,", horas, "horas,", minutos, "minutos e", segs_restantes_final, "segundos.") |
from .analysis import *
from .mean import *
from .stats import *
|
# run PyCharm as administrator
import subprocess
from subprocess import Popen, PIPE
import os
import wgetter
import time
from _base.base_actions import BaseActions
# # x = subprocess.Popen(['D:\\verismicCleanUp.cmd']) #work
# clean_up = subprocess.Popen(['C:\\VCMS\\verismicCleanUp.cmd']) #home
# # print clean_up
# time.sleep(120)
# download_agent = \
# wgetter.download('https://testteamtest.cloudmanagementsuite.com/WebService/api/v1/Downloads/vRepSetup.msi', outdir='C:\\VCMS')
# print download_agent
# time.sleep(20)
# install_agent = os.system('msiexec /i %s /qn' % 'C:\\VCMS\\vRepSetup-testteamtest.msi')
# print install_agent
# time.sleep(60)
class DownloadAndInstall(BaseActions):
# x = subprocess.Popen(['D:\\verismicCleanUp.cmd']) #work
def clean_up_device(self):
subprocess.Popen(['C:\\VCMS\\verismicCleanUp.cmd'], shell = True) #home
# subprocess.Popen(['runas', '/user:VKYV\igork', 'C:\\VCMS\\verismicCleanUp.cmd'], shell=True) # home
time.sleep(90)
self.logger.info("Clean_up on device is finished")
def download_agent(self):
installer = wgetter.download('https://testteamtest.cloudmanagementsuite.com/WebService/api/v1/Downloads/vRepSetup.msi', outdir='C:\\VCMS')
# wgetter.download(str(url), outdir='C:\\VCMS')
time.sleep(10)
self.logger.info("Installation is downloaded" + str(installer))
def install_agent(self):
# os.system('msiexec /i %s /qn' % 'C:\\VCMS\\vRepSetup-testteamtest.msi')
install = os.system('msiexec /i %s /qn' % 'C:\\VCMS\\vRepSetup-testteamtest.msi')
time.sleep(60)
self.logger.info("Installation is finished" + str(install))
# class DownloadAndInstall:
#
# def clean_up_agent_on_device(self):
# x = subprocess.Popen(['D:\\verismicCleanUp.cmd'])
# print x
# time.sleep(60)
#
# def download_installer(self, filename = None, path = None):
# # This method works only if PyCharm is running with the admin cridentials (run PyCharm as administrator)
# file_name = wgetter.download('https://testteamtest.cloudmanagementsuite.com/WebService/api/v1/Downloads/vRepSetup.msi', outdir='D:\\')
#
# def install_package(self):
# os.system('msiexec /i %s /qn' % 'C:\\vRepSetup-testteamtest.msi')
# cmd = 'msiexec /i C:\\vRepSetup-testteamtest.msi /qn'
# p = subprocess.Popen(['runas', '/user:VKYV\igork', '', cmd], shell=True)
# print p
# p.poll()
# p.wait()
# cmd = 'ping google.com -c 3'
# PIPE = subprocess.PIPE
# p = subprocess.Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE,
# stderr=subprocess.STDOUT, close_fds=True)
# while True:
# s = p.stdout.readline()
# if not s: break
# print s,
# x = subprocess.Popen(['runas', '/user:VKYV\igork', '','C:\\Users\\igork.VKYV\\Desktop\\verismicCleanUp.cmd'], shell = True)
# print x
# cmd = 'msiexec /i %s /qb' %'D:\\ResponderSetup-testteamtest.msi'
# p = subprocess.call(['runas', '/user:VKYV\igork', '', cmd], shell = True)
# file_name = wgetter.download('https://testteamtest.cloudmanagementsuite.com/WebService/api/v1/Downloads/ResponderSetup.msi', outdir='C:\\')
# os.system('msiexec /i %s /qn' % 'C:\\vRepSetup-testteamtest.msi')
# x= os.system('Notepad.exe')
# print x
# p = subprocess.call('msiexec /i %s /qn' % ('C:\\vRepSetup-testteamdev.msi'), shell=True)
#run as administrator
# x = subprocess.Popen(['msiexec /i %s /qn' % 'C:\\vRepSetup-testteamtest.msi'], shell = True)
# x.poll()
# x.wait()
# z = subprocess.call(['runas', '/user:igork.VKYV', '','D:\\ResponderSetup-testteamtest.msi'])
# c = subprocess.Popen('runas / user:igork.VKYV' 'cmd.exe')
# y = subprocess.call(['runas/user:VKYV\igork', '','msiexec /i %s /qn' % 'D:\\vRepSetup-testteamtest.msi'], shell = True)
# x = subprocess.call(['runas', '/user:Administrator', 'ADMIN_PASS','vRepSetup-testteamdev.msi'])
# print "X is:", x
|
from django.shortcuts import render, redirect
from django.http import HttpResponse
from .forms import *
from .models import *
from .slice import *
# Create your views here.
def main(request):
return HttpResponse("<h1>Hello</h1>")
def upload(request):
if request.method == 'POST':
form = BookForm(request.POST, request.FILES)
if form.is_valid():
form.save()
obj = form.instance
print(type(obj.page))
return render(request, 'books/upload.html', {"obj": obj})
else:
form = BookForm()
page = Book.objects.all()
return render(request, 'books/upload.html', {"page": page, "form": form})
def view(request):
pages = Book.objects.all()
context = {'view': pages}
return render(request, 'books/view.html', context)
|
from .serialize import Serialize
from .serializemd import SerializeMd
from .hfml import SerializeHFML
from .foot_note import SerializeFootNote |
import sys
def get_ints(): return map(int, sys.stdin.readline().strip().split())
def get_string(): return sys.stdin.readline().split()
a = list(get_ints())
s = get_string()
print((a))
print(list(s)) |
# -*- coding: utf-8 -*-
import pytest
from schematics.datastructures import Context
from schematics.models import Model
from schematics.types import *
from schematics.exceptions import ConversionError, ValidationError, DataError
def test_ipv4_type():
assert IPv4Type().validate('255.255.255.255')
with pytest.raises(ValidationError):
IPv4Type().validate('1')
with pytest.raises(ValidationError):
IPv4Type().validate('255.256.255.255')
with pytest.raises(ValidationError):
IPv4Type().validate('255.255.255.2555')
def test_ipv6_type():
field = IPv6Type()
addrs = [
'fe80::223:6caf:fe76:c12d',
'2001:14ba:ff:a000:223:6caf:fe76:c12d',
'::255.255.255.255',
'::1',
]
for addr in addrs:
field.validate(addr)
addrs = [
'',
'::255.256.255.255',
':255.255.255.255',
'2001:ff:a000:223:6caf:fe76:c12d',
'fe80::223:6caff:fe76:c12d',
]
for addr in addrs:
with pytest.raises(ValidationError):
field.validate(addr)
def test_ip_type():
assert IPAddressType().validate('255.255.255.255')
assert IPAddressType().validate('fe80::223:6caf:fe76:c12d')
def test_mac_type():
addrs = [
'00-00-00-00-00-00',
'03:0F:25:B7:10:1E',
'030F25B7104E',
'030F25:B7104E',
'030F25-B7104E',
'030F.25B7.104E',
]
for addr in addrs:
assert MACAddressType().validate(addr)
addrs = [
'00-00-00-00-00',
'00:00-00-00-00-00',
'00:00-00-00-00-00',
'030F25B7104',
'030F25B7104Z',
'30F25:B7104E',
'030F2-B7104E',
'030F:25B7.104E',
]
for addr in addrs:
with pytest.raises(ValidationError):
MACAddressType().validate(addr)
mock = MACAddressType(required=True).mock()
assert MACAddressType().validate(mock)
s = MACAddressType().to_primitive(value='00-00-00-00-00-00')
assert MACAddressType().validate(s)
def test_url_type_with_valid_urls():
field = URLType()
urls = [
u'https://x.' + 'x' * 63 + '.com',
u'https://123456789.' + ('x' * 59 + '.') * 4 + 'com', # len = 253
u'https://123456789.' + ('x' * 59 + '.') * 4 + 'com.', # len = 253 + '.'
u'https://example.fi',
u'http://foo-bar.example.com',
u'HTTP://example.com:80',
u'http://-user:123:%:456(z)@example.com:80',
u'http://example.com/a/b/../c+d/e;f/~jdoe/@?q(x=1;y=2)&r=0#yo!',
u'http://example.com./a/',
u'http://crème-brûlée.tld/menu/à%20la%20carte/',
u'http://はじめよう.みんな',
u'http://xn--p8j9a0d9c9a.xn--q9jyb4c',
u'http://∫ç√œΩ@example.com/?µ=0.3&∂=0.1',
u'http://user:123@127.0.0.1',
u'http://127.0.0.1:99999/',
u'http://127.0.0.1:99999/qweasd',
u'http://[2001:4802:7901::e60a:1375:0:5]',
u'http://[2001:4802:7901::e60a:1375:0:5]:99999',
]
for url in urls:
field.validate(url)
field = URLType(fqdn=False)
urls = [
u'https://1',
u'https://111.q2w',
u'https://localhost',
]
for url in urls:
field.validate(url)
def test_url_type_with_invalid_url():
field = URLType()
urls = [
u'https://1',
u'https://111.q2w',
u'https://localhost',
u'http:example.com',
u'https://example.f',
u'https://example.fi0',
u'ftp://example.com',
u'https://x.' + 'x' * 64 + '.com',
u'https://1234567890.' + ('x' * 59 + '.') * 4 + 'com', # len = 254
u'http://-foobar.example.com',
u'http://qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq-.example.com',
u'http://example.com../a/',
u'http://ex..ample.com/a/',
u'http://.example.com/a/',
u'http://exam%70le.com/a/',
u'http://example.com|/a/',
u'http://example.com/a b/',
u'http://foo_bar.example.com',
u'http://xn--abcdäedfg.xn--q9jyb4c', # ACE prefix + non-ASCII character
u'http://example.com/a/\x7F', # illegal ASCII character
u'http://127.0.0.1:999999/',
u'http://2001:4802:7901::e60a:1375:0:5',
]
for url in urls:
with pytest.raises(ValidationError):
field.validate(url)
def test_url_type_with_unreachable_url():
with pytest.raises(ValidationError):
URLType(verify_exists=True).validate('http://127.0.0.1:99999/')
def test_email_type_with_valid_addresses():
field = EmailType()
addrs = [
r'"()\\\<>[]:,;@!\"#$%&*+-/=?^_`{}|~.a"@example.org',
u'"foo bar baz"@example.org',
u'Z@foo.zz',
u'123.qwe.asd@foo.bar.baz'
]
for addr in addrs:
field.validate(addr)
def test_email_type_with_invalid_addresses():
field = EmailType()
addrs = [
r'"qweasd\"@example.org',
u'"qwe"asd"@example.org',
u'curaçao@example.org',
u'foo@local',
]
for addr in addrs:
with pytest.raises(ValidationError):
field.validate(addr)
|
import sys
input = sys.stdin.readline
num = int(input())
pineapples = sorted(list(map(int, input().split())))[::-1]
low = pineapples[num // 2:]
high = pineapples[:num // 2][::-1]
new = []
for i in range(num // 2):
new += [low[i], high[i]]
if num % 2 != 0:
new.append(low[-1])
print(' '.join(list(map(str, new))))
|
# Copyright 2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import io
import re
import subprocess
import sys
try:
from configparser import Error as ConfigParserError, RawConfigParser
except ImportError:
from ConfigParser import Error as ConfigParserError, RawConfigParser
from portage import _encodings, _unicode_encode, _unicode_decode
from portage.util import writemsg
def parse_desktop_entry(path):
"""
Parse the given file with RawConfigParser and return the
result. This may raise an IOError from io.open(), or a
ParsingError from RawConfigParser.
"""
parser = RawConfigParser()
# use read_file/readfp in order to control decoding of unicode
try:
# Python >=3.2
read_file = parser.read_file
except AttributeError:
read_file = parser.readfp
with io.open(_unicode_encode(path,
encoding=_encodings['fs'], errors='strict'),
mode='r', encoding=_encodings['repo.content'],
errors='replace') as f:
content = f.read()
# In Python 3.2, read_file does not support bytes in file names
# (see bug #429544), so use StringIO to hide the file name.
read_file(io.StringIO(content))
return parser
_trivial_warnings = re.compile(r' looks redundant with value ')
_ignored_errors = (
# Ignore error for emacs.desktop:
# https://bugs.freedesktop.org/show_bug.cgi?id=35844#c6
'error: (will be fatal in the future): value "TextEditor" in key "Categories" in group "Desktop Entry" requires another category to be present among the following categories: Utility',
)
def validate_desktop_entry(path):
args = ["desktop-file-validate", path]
if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000:
# Python 3.1 does not support bytes in Popen args.
args = [_unicode_encode(x, errors='strict') for x in args]
proc = subprocess.Popen(args,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output_lines = _unicode_decode(proc.communicate()[0]).splitlines()
proc.wait()
if output_lines:
filtered_output = []
for line in output_lines:
if line[len(path)+2:] in _ignored_errors:
continue
filtered_output.append(line)
output_lines = filtered_output
if output_lines:
output_lines = [line for line in output_lines
if _trivial_warnings.search(line) is None]
return output_lines
if __name__ == "__main__":
for arg in sys.argv[1:]:
for line in validate_desktop_entry(arg):
writemsg(line + "\n", noiselevel=-1)
|
from .CurrencyExchangeRepository import CurrencyExchangeRepository
|
#!/usr/bin/env python3
import subprocess
# Simple command
subprocess.call(['celery -A socinsta worker -l info -Q deneme1,volta'], shell=True)
|
# Generated by Django 2.2.7 on 2019-12-26 13:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('statics', '0008_auto_20191226_2211'),
]
operations = [
migrations.AlterField(
model_name='notification',
name='type',
field=models.CharField(choices=[('comment_review', 'Comment Review'), ('comment_reply', 'Comment Reply'), ('like_review', 'Like Review'), ('following', 'Following')], max_length=255, verbose_name='알림 유형'),
),
]
|
начало:
расчитать матрицу растояний
цикл (пока кластеров > 2):
найти минимальное растоние в матрице растояний <=>растояние между наиболее близки кластерами
объединить выбанный кластер (с мин. растоянием)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Simple example for a search & report tool using the pyILT2 library.
(c) 2018 Frank Roemer; see http://wgserve.de/pyilt2
Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
from . import (properties, prop2abr, abr2prop, query, __version__)
import argparse
import datetime
import sys
import time
import threading
import os
import requests
# version of the search & report tool
__prgversion__ = '1.1'
__prgdescrpt__ = 'A search and report tool for the ILThermo v2.0 database from NIST (http://ilthermo.boulder.nist.gov).'
# ===============================================================================
# local classes and functions
# ===============================================================================
class Spinner:
"""A class providing a spinning courser for cli tools."""
busy = False
delay = 0.1
@staticmethod
def spinning_cursor():
while 1:
for cursor in '|/-\\': yield cursor
def __init__(self, delay=None):
self.spinner_generator = self.spinning_cursor()
if delay and float(delay): self.delay = delay
def spinner_task(self):
while self.busy:
sys.stdout.write(next(self.spinner_generator))
sys.stdout.flush()
time.sleep(self.delay)
sys.stdout.write('\b')
sys.stdout.flush()
def start(self):
"""Start the spinner."""
self.busy = True
threading.Thread(target=self.spinner_task).start()
def stop(self):
"""Stop the spinner."""
self.busy = False
time.sleep(self.delay)
# create a spinner object for some of the following functions
spinner = Spinner()
def printPropAbbrList():
"""
Print a table, showing the physical properties which can be addressed in a query,
and the respective *abbreviation* which is used in the :func:`pyilt2.query` function, to *stdout*::
Abbr. Property
------ -----------------------------------------
Dself Self-diffusion coefficient
Dterm Thermal diffusivity
Dtrac Tracer diffusion coefficient
H Enthalpy
Hap Apparent enthalpy
... ...
"""
print ( "%6s %s" % ('Abbr.','Property') )
print ('------ -----------------------------------------')
for key in sorted(abr2prop):
print ( "%6s %s" % (key, abr2prop[key]) )
def printResultTable(resObj):
"""
Print a result table (similar to the web version) to *stdout*, like::
# ref prop np components(s)
---- -------------------- ------ ---- ----------------------------------------
0 Krolikowska2012 dens 65 1-ethyl-3-methylimidazolium thiocyanate
1 Klomfar2015a dens 37 1-ethyl-3-methylimidazolium thiocyanate
2 Freire2011 dens 18 1-ethyl-3-methylimidazolium thiocyanate
3 Neves2013b dens 18 1-ethyl-3-methylimidazolium thiocyanate
"""
print('\n # {0:20s} {1:6s} {2:>4s} {3:s}'.format('ref', 'prop', 'np', 'components(s)'))
print('{0:s} {1:s} {2:s} {3:s} {4:s}'.format('-' * 4, '-' * 20, '-' * 6, '-' * 4, '-' * 40))
for i in range(0, len(resObj)):
r = resObj[i]
print('{0:4d} {1:20s} {2:6s} {3:4d} {4:s}'.
format( i, r.sref, prop2abr[r.prop], r.np, ' | '.join(r.listOfComp) ) )
def metaDataStr(datObj):
"""
Returns the meta data of an :class:`pyilt2.dataset` object as a *string*, like::
Property:
Specific density
Reference:
"Densities, isobaric expansivities and isothermal compressibilities [...].",
Krolikowska, M.; Hofman, T. (2012) Thermochim. Acta 530, 1-6.
Component(s):
1) 1-ethyl-3-methylimidazolium thiocyanate
Method: Vibrating tube method
Phase(s): Liquid
Data columns:
1) Temperature/K
2) Pressure/kPa
...
:param datObj: dataset object
:type datObj: :class:`pyilt2.dataset`
:return: meta data
:rtype: str
"""
out = 'Property:\n {0:s}\n'.format(datObj.setDict['title'].split(':')[-1].strip())
out += 'Reference:\n'
out += ' "{0:s}",\n'.format(datObj.setDict['ref']['title'])
out += ' {0:s}\n'.format(datObj.setDict['ref']['full'])
out += 'Component(s):\n'
for i in range(0, datObj.numOfComp):
out += ' {0:d}) {1:s}\n'.format(i+1, datObj.listOfComp[i])
if datObj.setDict['expmeth']:
out += 'Method: {0:s}\n'.format(datObj.setDict['expmeth'])
out += 'Phase(s): {0:s}\n'.format(', '.join(datObj.setDict['phases']))
if datObj.setDict['solvent']:
out += 'Solvent: {0:s}\n'.format(datObj.setDict['solvent'])
out += 'Data columns:\n'
for i in range(0, len(datObj.headerList)):
out += ' {0:d}) {1:s}\n'.format(i+1, datObj.headerList[i])
out += 'Data points: {0:d}\n'.format(datObj.np)
out += 'ILT2 setid: {0:s}\n'.format(datObj.setid)
return out
def writeReport(listOfDataSets, reportDir=None, resDOI=False, verbose=False):
dtnow = datetime.datetime.now()
if not reportDir:
reportDir = 'pyilt2report_' + dtnow.strftime("%Y-%m-%d_%H:%M:%S")
os.mkdir(reportDir)
if verbose:
print('\nWrite report to folder: '+reportDir)
print(' << report.txt')
rep = open(reportDir + '/report.txt', 'w')
rep.write(dtnow.strftime("%d. %b. %Y (%H:%M:%S)") + '\n')
rep.write('-' * 24 + '\n')
for i in range(0, len(listOfDataSets)):
dataSet = listOfDataSets[i]
dataFile = 'ref{0:d}.dat'.format(i)
# write data file
dataSet.write(reportDir + '/' + dataFile)
if verbose:
print(' << {0:s} [{1:s}]'.format(dataFile, dataSet.setid))
# write meta data to report file
rep.write('\nRef. #{0:d}\n'.format(i, dataSet.setid))
rep.write('=' * 10 + '\n')
rep.write( metaDataStr(dataSet) )
if resDOI:
if verbose:
print(' >> resolve DOI ... ', end='')
spinner.start()
try:
(doi, url, score) = citation2doi(dataSet.fullcite)
except:
if verbose:
spinner.stop()
e = sys.exc_info()[1]
print('Error: {0:s}'.format(str(e)))
else:
if verbose:
spinner.stop()
print('\b {0:s} (score: {1:f}) done!'.format(doi, score))
rep.write('DOI: {0:s} (score: {1:f})\n'.format(doi, score))
rep.write('URL: {0:s}\n'.format(url))
rep.close()
return reportDir
def doicache( func ):
""" Decorator function for :func:`citation2doi` providing a cache. """
_doicache = {}
def func_wrapper( citation ):
_hash = hash(citation)
if _hash in list(_doicache.keys()):
return _doicache[_hash]
else:
_doicache[_hash] = func(citation)
return _doicache[_hash]
# this we do for sphinx.autodoc!
func_wrapper.__doc__= func.__doc__
return func_wrapper
@doicache
def citation2doi( citation ):
"""
Resolves a citation string like the respective DOI ,URL and a score.
Therefore we use Crossref's REST API: https://github.com/CrossRef/rest-api-doc
.. code-block:: py
>>> cite='Lennard-Jones, J. E. "Cohesion" Proc. Phys. Soc., 1931, 43, 461-482'
>>> print( citation2doi(cite) )
('10.1088/0959-5309/43/5/301',
'http://dx.doi.org/10.1088/0959-5309/43/5/301',
69.865814)
:param citation: citation in *natural* form
:type citation: str
:return: DOI, URL, score
:rtype: tuple
"""
url = 'https://api.crossref.org/works'
payload = {'query.bibliographic': citation}
r = requests.get(url, params=payload)
r = r.json()['message']['items'][0]
return ( r['DOI'], r['URL'], r['score'] )
def cliQuery(comp='', numOfComp=0, year='', author='', keywords='', prop='', verbose=True):
"""
This is a wapper function for :func:`pyilt2.query` which is suitable for cli tools.
It shows a spinning cursor while waiting for the answer from the web server and includes error handling.
:param comp: Chemical formula (case-sensitive), CAS registry number, or name (part or full)
:type comp: str
:param numOfComp: Number of mixture components. Default '0' means *any* number.
:type numOfComp: int
:param year: Publication year
:type year: str
:param author: Author's last name
:type author: str
:param keywords: Keyword(s)
:type keywords: str
:param prop: Physical property by abbreviation. Default '' means *unspecified*.
:type prop: str
:param verbose: Show messages and spinning cursor while waiting.
:type verbose: bool
:return: result object
:rtype: :class:`pyilt2.result`
"""
resObj=None
if verbose:
print('Make query to NIST... ', end='')
spinner.start()
try:
resObj = query(comp=comp,
numOfComp=numOfComp,
year=year,
author=author,
keywords=keywords,
prop=prop)
except:
if verbose:
spinner.stop()
e = sys.exc_info()[1]
print('Error: {0:s}'.format(str(e)))
exit(1)
else:
if verbose:
spinner.stop()
print('\b done! ({0:d} hits)'.format(len(resObj)))
return resObj
def getAllData(resObj, verbose=False):
"""
Requests the data sets for all references of a :class:`pyilt2.result`
object and returns them as a list.
:param resObj: A result object
:type resObj: :class:`pyilt2.result`
:param verbose: Show messages and spinning cursor while waiting.
:return: List of :class:`pyilt2.dataset` objects
"""
dataSets = []
if verbose:
print('\nRequest data sets from NIST:')
for i in range(0, len(resObj)):
if verbose:
print(' >> {0:s} [{1:s}] ... '.format(resObj[i].ref, resObj[i].setid), end='')
spinner.start()
try:
dataSets.append(resObj[i].get())
except:
if verbose:
spinner.stop()
e = sys.exc_info()[1]
print('Error: {0:s}'.format(str(e)))
exit(1)
else:
if verbose:
spinner.stop()
print('\b done!')
return dataSets
def _getArgParser():
"""Argument parser for pyilt2report cli tool."""
parser = argparse.ArgumentParser(description=__prgdescrpt__,
epilog="Type 'man pyilt2report' for more information.")
parser.add_argument('-c', type=str, metavar='str',
help='chemical formula, CAS registry number, or name (part or full)', default='')
parser.add_argument('-n', type=int, metavar='0',
help='number of mixture components. Default: 0 = any number.', default=0)
parser.add_argument('-y', type=str, metavar='2018',
help='publication year', default='')
parser.add_argument('-a', type=str, metavar='name',
help='author’s last name', default='')
parser.add_argument('-k', type=str, metavar='str',
help='keyword(s)', default='')
parser.add_argument('-p', type=str, metavar='prop',
help='physical property by abbreviation.', default=None)
parser.add_argument('-o', '--out', type=str, metavar='dir',
help='result folder for output files', default=None)
parser.add_argument('--doi', action='store_true',
help='try to resolve DOI from citation (experimental!)', default=False)
parser.add_argument('--auto', action='store_true',
help='dont ask if to proceed creating report', default=False)
parser.add_argument('--props', action='store_true',
help='show properties abbreviations and exit', default=False)
parser.add_argument('--version', action='version',
version="%(prog)s " + __prgversion__ + " (pyilt2 " + __version__ + ")")
return parser
# ===============================================================================
# run
# ===============================================================================
def run():
"""CLI main entry point."""
# get command line arguments
parser = _getArgParser()
args = parser.parse_args()
# show properties abbreviations and exit (option: --props)
if args.props:
printPropAbbrList()
exit(0)
# check the 'phys. property' search option
sprop = ''
if args.p:
if args.p not in list(abr2prop.keys()):
print('Error! Invalid abbreviation "{0:s}" for physical property.'.format(args.p))
exit(1)
else:
sprop = args.p
# makes the request to the NIST database
res = cliQuery(comp=args.c, numOfComp=args.n, year=args.y,
author=args.a, keywords=args.k, prop=sprop, verbose=True)
# show results and ask if to proceed
printResultTable(res)
if not args.auto:
print('\nProceed? [Y]/n ', end='')
answ = sys.stdin.readline().strip()
if answ not in ['', 'y', 'Y']:
print('Abort by user!')
exit(1)
# get full data sets for _all_ references
dataSets = getAllData(res, verbose=True)
# write report
dname = writeReport(dataSets, verbose=True, resDOI=args.doi, reportDir=args.out)
# print('\nReport written to ' + dname)
print('pyilt2report finished!')
# Script entry point
if __name__ == "__main__":
run() |
import os
import uuid
from unittest.mock import patch
import fsspec
import pytest
from rubicon_ml import domain
from rubicon_ml.repository import LocalRepository
from rubicon_ml.repository.utils import slugify
def test_initialization():
local_repo = LocalRepository(root_dir="/local/root")
assert local_repo.PROTOCOL == "file"
assert type(local_repo.filesystem) == fsspec.implementations.local.LocalFileSystem
@patch("fsspec.implementations.local.LocalFileSystem.open")
@patch("fsspec.implementations.local.LocalFileSystem.mkdirs")
def test_persist_bytes(mock_mkdirs, mock_open):
bytes_data = b"test data {uuid.uuid4()}"
bytes_path = "/local/root/path/to/data"
local_repo = LocalRepository(root_dir="/local/root")
local_repo._persist_bytes(bytes_data, bytes_path)
mock_mkdirs.assert_called_once_with(os.path.dirname(bytes_path), exist_ok=True)
mock_open.assert_called_once_with(bytes_path, "wb")
@patch("fsspec.implementations.local.LocalFileSystem.open")
@patch("fsspec.implementations.local.LocalFileSystem.mkdirs")
def test_persist_domain(mock_mkdirs, mock_open):
project = domain.Project(f"Test Project {uuid.uuid4()}")
project_metadata_path = f"/local/root/{slugify(project.name)}/metadata.json"
local_repo = LocalRepository(root_dir="/local/root")
local_repo._persist_domain(project, project_metadata_path)
mock_mkdirs.assert_called_once_with(os.path.dirname(project_metadata_path), exist_ok=True)
mock_open.assert_called_once_with(project_metadata_path, "w")
@patch("fsspec.implementations.local.LocalFileSystem.open")
@patch("fsspec.implementations.local.LocalFileSystem.mkdirs")
def test_persist_domain_throws_error(mock_mkdirs, mock_open):
not_serializable = str
project = domain.Project(f"Test Project {uuid.uuid4()}", description=not_serializable)
project_metadata_path = f"/local/root/{slugify(project.name)}/metadata.json"
local_repo = LocalRepository(root_dir="/local/root")
with pytest.raises(TypeError):
local_repo._persist_domain(project, project_metadata_path)
mock_mkdirs.assert_not_called()
mock_open.assert_not_called()
|
# -*- coding: utf-8 -*-
"""Application configuration.
Most configuration is set via environment variables.
For local development, use a .env file to set
environment variables.
"""
from environs import Env
from decimal import Decimal
env = Env()
env.read_env()
ENV = env.str("FLASK_ENV", default="production")
DEBUG = ENV == "development"
SITE_URL = env.str('SITE_URL', default='https://zfnd.org')
ADMIN_SITE_URL = env.str('ADMIN_SITE_URL', default='https://grants-admin.zfnd.org')
E2E_TESTING = env.str("E2E_TESTING", default=None)
E2E_DATABASE_URL = env.str("E2E_DATABASE_URL", default=None)
SQLALCHEMY_DATABASE_URI = E2E_DATABASE_URL if E2E_TESTING else env.str("DATABASE_URL")
SQLALCHEMY_ECHO = False # True will print queries to log
QUEUES = ["default"]
SECRET_KEY = env.str("SECRET_KEY")
BCRYPT_LOG_ROUNDS = env.int("BCRYPT_LOG_ROUNDS", default=13)
DEBUG_TB_ENABLED = DEBUG
DEBUG_TB_INTERCEPT_REDIRECTS = False
CACHE_TYPE = "simple" # Can be "memcached", "redis", etc.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# so backend session cookies are first-party
SESSION_COOKIE_DOMAIN = env.str('SESSION_COOKIE_DOMAIN', default=None)
CORS_DOMAINS = env.str('CORS_DOMAINS', default='*')
SESSION_COOKIE_SAMESITE = env.str('SESSION_COOKIE_SAMESITE', default='None')
SESSION_COOKIE_SECURE = True if SESSION_COOKIE_SAMESITE == 'None' else False
SENDGRID_API_KEY = env.str("SENDGRID_API_KEY", default="")
SENDGRID_DEFAULT_FROM = "noreply@grants.zfnd.org"
SENDGRID_DEFAULT_FROMNAME = "ZF Grants"
SENTRY_DSN = env.str("SENTRY_DSN", default=None)
SENTRY_RELEASE = env.str("SENTRY_RELEASE", default=None)
MAX_CONTENT_LENGTH = 5 * 1024 * 1024 # 5MB (limits file uploads, raises RequestEntityTooLarge)
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY")
AWS_DEFAULT_REGION = env.str("AWS_DEFAULT_REGION")
S3_BUCKET = env.str("S3_BUCKET")
SECURITY_USER_IDENTITY_ATTRIBUTES = ['email_address'] # default is 'email'
SECURITY_PASSWORD_HASH = 'bcrypt'
SECURITY_PASSWORD_SALT = SECRET_KEY
GITHUB_CLIENT_ID = env.str("GITHUB_CLIENT_ID")
GITHUB_CLIENT_SECRET = env.str("GITHUB_CLIENT_SECRET")
TWITTER_CLIENT_ID = env.str("TWITTER_CLIENT_ID")
TWITTER_CLIENT_SECRET = env.str("TWITTER_CLIENT_SECRET")
LINKEDIN_CLIENT_ID = env.str("LINKEDIN_CLIENT_ID")
LINKEDIN_CLIENT_SECRET = env.str("LINKEDIN_CLIENT_SECRET")
BLOCKCHAIN_REST_API_URL = env.str("BLOCKCHAIN_REST_API_URL")
BLOCKCHAIN_API_SECRET = env.str("BLOCKCHAIN_API_SECRET")
STAGING_PASSWORD = env.str("STAGING_PASSWORD", default=None)
EXPLORER_URL = env.str("EXPLORER_URL", default="https://chain.so/tx/ZECTEST/<txid>")
PROPOSAL_STAKING_AMOUNT = Decimal(env.str("PROPOSAL_STAKING_AMOUNT"))
PROPOSAL_TARGET_MAX = Decimal(env.str("PROPOSAL_TARGET_MAX"))
UI = {
'NAME': 'ZF Grants',
'PRIMARY': '#CF8A00',
'SECONDARY': '#2D2A26',
}
|
# -*- mode: python ; coding: utf-8 -*-
import shutil, os
block_cipher = None
a = Analysis(['run.py'],
pathex=['.'],
binaries=[],
datas=[],
hiddenimports=[],
hookspath=[],
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
[],
name='AnnouncementsWindow',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
upx_exclude=[],
runtime_tmpdir=None,
console=False, icon='Data\\favicon.ico')
os.makedirs('dist/Data')
shutil.copyfile('Data/favicon.ico', 'dist/Data/favicon.ico')
shutil.copyfile('filters.txt', 'dist/filters.txt')
shutil.copyfile('wordcolor.txt', 'dist/wordcolor.txt')
shutil.copyfile('Data/filters.dat', 'dist/Data/filters.dat')
shutil.copyfile('readme.md', 'dist/readme.md')
shutil.copyfile('readme.txt', 'dist/readme.txt')
shutil.copytree('Icons', 'dist/Icons')
shutil.copyfile('icon.cfg', 'dist/icon.cfg')
|
from django.contrib import admin
from .models import Ride
from .forms import RideAdminForm
# Register your models here.
class RideAdmin(admin.ModelAdmin):
form = RideAdminForm
admin.site.register(Ride, RideAdmin)
|
import random
from board2 import Board
b = Board().withSize(size=5)
for x in range(0, 20):
moves = list(b.moves())
print(repr(b))
for type, args in moves:
print(repr(b.apply_move(type, *args)))
type, args = random.choice(moves)
b = b.apply_move(type, *args)
print("")
|
thisset = {"apple", "banana", "cherry"}
for x in thisset:
print(x)
print("banana" in thisset)
anotherSet = {'a', 'b', 'c'}
anotherSet.add('d')
print(anotherSet)
# {'b', 'c', 'd', 'a'}
anotherSet.update(['slime', 'fruit'])
print(anotherSet)
# {'a', 'slime', 'c', 'd', 'fruit', 'b'}
anotherSet.remove('slime')
print(anotherSet)
# {'a', 'c', 'd', 'fruit', 'b'}
anotherSet.discard('fruit')
print(anotherSet)
# {'a', 'c', 'd', 'b'}
anotherSet.clear()
print(anotherSet)
# set()
del anotherSet
print(anotherSet)
# NameError: name 'anotherSet' is not defined
# Union
{1, 2, 3, 4, 5}.union({3, 4, 5, 6}) # {1, 2, 3, 4, 5, 6}
# Difference
{1, 2, 3, 4}.difference({2, 3, 5}) # {1, 4}
# Intersection
{1, 2, 3, 4, 5}.intersection({3, 4, 5, 6}) # {3, 4, 5}
# Superset check
{1, 2}.issuperset({1, 2, 3}) # False
# Subset check
{1, 2}.issubset({1, 2, 3}) # True
|
import os
def makedir():
askname=input("Please enter the folder name: ")
os.mkdir(askname)
def deldir():
folder_name=input("Please enter the folder name: ")
os.removedirs(folder_name)
def renamedir():
present_name=input("Please enter the present name of the folder:")
update_name=input("Please enter the name to update: ")
os.rename(present_name,update_name)
print("Program is to Create Folder and Delete Folder")
askfor=input("Please type your choice:\t1 for Make Directory\t2 for Delete Directory\t3 for Rename Folder\n")
if askfor==("1"):
makedir()
elif askfor==("2"):
deldir()
elif askfor==("3"):
renamedir()
else:
print("Wrong Input")
|
def yo():
print("hi i am bhuvan")
print("who are you?")
yo()
print("honey")
yo()
|
import dash_bootstrap_components as dbc
from dash import html
card = dbc.Card(
dbc.CardBody(
[
html.H4("Title", className="card-title"),
html.H6("Card subtitle", className="card-subtitle"),
html.P(
"Some quick example text to build on the card title and make "
"up the bulk of the card's content.",
className="card-text",
),
dbc.CardLink("Card link", href="#"),
dbc.CardLink("External link", href="https://google.com"),
]
),
style={"width": "18rem"},
)
|
# Copyright (C) 2020 FireEye, Inc. All Rights Reserved.
from speakeasy.struct import EmuStruct
import ctypes as ct
class POINT(EmuStruct):
def __init__(self, ptr_size):
super().__init__(ptr_size)
self.x = ct.c_uint32
self.y = ct.c_uint32
class RECT(EmuStruct):
def __init__(self, ptr_size):
super().__init__(ptr_size)
self.left = ct.c_int32
self.top = ct.c_int32
self.right = ct.c_int32
self.bottom = ct.c_int32
class MONITORINFO(EmuStruct):
def __init__(self, ptr_size):
super().__init__(ptr_size)
self.cbSize = ct.c_uint32
self.rcMonitor = RECT
self.rcWORK = RECT
self.dwFlags = ct.c_uint32
|
from unittest import TestCase
from app.core import species_repository
from app.utils.constants import meta_data_resource_id
from mock import patch, Mock
class RepositoryTestCase(TestCase):
@patch("app.core.species_repository.get_data_from_ckan",
return_value={"result": {"records": [{"resource_id": "resource123"}]}})
def test_get_resource_id_by_name(self, get_data_from_ckan):
resource_id = species_repository.get_resource_id_by_name("fishes")
self.assertEqual("resource123", resource_id)
@patch("app.core.species_repository.get_data_from_ckan", return_value={"result": {"records": []}})
def test_get_resource_id_by_name_should_return_none(self, get_data_from_ckan):
resource_id = species_repository.get_resource_id_by_name("fishes")
self.assertEqual(None, resource_id)
@patch("app.core.species_repository.get_data_from_ckan",
return_value={"result": {"records": [{"resource_id": "resource123"}, {"resource_id": "resource1234"}]}})
def test_get_parent_details(self, get):
actual = species_repository.get_parent_details('Fish')
self.assertEqual({"resource_id": "resource123"}, actual)
@patch("app.core.species_repository.get_data_from_ckan", return_value={"result": {"records": []}})
def test_get_parent_details(self, get):
actual = species_repository.get_parent_details('Fish')
self.assertEqual(None, actual)
@patch("app.core.species_repository.get_data_from_ckan",
return_value={"result": {"records": [{"resource_id": "resource123"}, {"resource_id": "resource1234"}]}})
def test_get_home_page_data(self, get):
expected = [{"resource_id": "resource123"}, {"resource_id": "resource1234"}]
self.assertEqual(expected, species_repository.get_home_page_data())
@patch("app.core.species_repository.get_data_from_ckan",
return_value={"result": {"records": [{"resource_id": "resource123"}, {"resource_id": "resource1234"}]}})
def test_getSpeciesDetail(self, get):
self.assertEqual({"resource_id": "resource123"}, species_repository.getSpeciesDetail('', ''))
def test_form_query_params(self):
query = 'query WHERE species LIKE '
query += "'species_one%'"
self.assertEqual(query, species_repository.form_species_query("query", "species_one"))
@patch("app.core.species_repository.get_data_from_ckan")
def test_get_data_from_ckan(self, get_data_from_ckan):
ckan_data = {"key": "success"}
get_data_from_ckan.return_value = ckan_data
call_parameter = "sql query"
self.assertEqual(species_repository.get_data_from_ckan(call_parameter), ckan_data)
get_data_from_ckan.assert_called_with(call_parameter)
def test_form_sql_query_when_only_select_parameter_passed(self):
resource_id = "resource_id"
expected = 'select id,name,desc from \"' + resource_id + '\"'
actual = species_repository.form_sql_query(resource_id=resource_id, select_parameters=['id', 'name', 'desc'])
self.assertEqual(actual, expected)
def test_form_sql_query_when_condition_is_passed(self):
resource_id = "resource_id"
expected = "select id,name,desc from \"" + resource_id + "\" where id=123 and name='aaa'"
actual = species_repository.form_sql_query(resource_id=resource_id, select_parameters=['id', 'name', 'desc'],
condition={'id': 123, 'name': 'aaa'})
self.assertEqual(actual, expected)
@patch("app.core.species_repository.form_sql_query")
def test_form_sql_query_with_meta_data_table(self, form_sql_query):
success_return_value = "success"
meta_resource_id = "meta_resource_id"
species_repository.meta_data_resource_id = meta_resource_id
form_sql_query.return_value = success_return_value
selected_parameters = "selected_parameters"
condition = "condition"
self.assertEqual(species_repository.form_sql_query_with_meta_data_table(select_parameters=selected_parameters,
condition=condition),
success_return_value)
form_sql_query.assert_called_once_with(resource_id=meta_resource_id, select_parameters=selected_parameters,
condition=condition)
def test_get_result_record(self):
data = {'result': {'records': 'success_value'}}
self.assertEqual(species_repository.get_result_record(data), 'success_value')
@patch("app.core.species_repository.form_sql_query",
return_value="select id, name, resource_id from resourceId123")
def test_form_sql_query_with_visual_table(self, form_sql_query):
result = species_repository.form_sql_query_with_visual_table(['id', 'name', 'resource_id'])
self.assertEqual("select id, name, resource_id from resourceId123", result)
@patch("app.core.species_repository.form_sql_query_with_visual_table")
@patch("app.core.species_repository.get_data_from_ckan")
@patch("app.core.species_repository.get_result_record")
def test_get_visual_data(self, get_result_record, get_data_from_ckan, form_sql_query_with_visual_table):
form_sql_query_with_visual_table.return_value = "select * from visual_table"
get_data_from_ckan.return_value = {"result":{"record":[{"id":1, "meta_data_id":2, "visual":"fishes"}]}}
get_result_record.return_value = [{"id":1, "meta_data_id":2, "visual":"fishes"}]
result = species_repository.get_visual_data(1)
self.assertEqual(result, ["fishes"])
@patch("app.core.species_repository.form_sql_query_with_visual_table")
@patch("app.core.species_repository.get_data_from_ckan")
@patch("app.core.species_repository.get_result_record")
def test_get_all_visual_data_files(self, get_result_record, get_data_from_ckan, form_sql_query_with_visual_table):
form_sql_query_with_visual_table.return_value = "select * from visual_table"
get_data_from_ckan.return_value = {"result":{"record":[{"id":1, "meta_data_id":2, "visual":"fishes"}]}}
get_result_record.return_value = [{"id":1, "meta_data_id":2, "visual":"fishes"}]
result = species_repository.get_all_visual_data_files()
self.assertEqual(result, ["fishes"])
@patch("app.core.species_repository.form_sql_query_with_visual_table")
@patch("app.core.species_repository.get_data_from_ckan")
@patch("app.core.species_repository.get_result_record")
def test_get_category_data(self, get_result_record, get_data_from_ckan, form_sql_query_with_visual_table):
form_sql_query_with_visual_table.return_value = "select * from metadata_table"
get_data_from_ckan.return_value = {"result":{"record":[{"id":1, "resource_id":2, "name":"fishes"}]}}
get_result_record.return_value = [{"id":1, "resource_id":2, "name":"fishes"}]
result = species_repository.get_category_data(0)
self.assertEqual(result, [{"id":1, "resource_id":2, "name":"fishes"}])
@patch("app.core.species_repository.get_resource_id_ckan")
@patch("app.core.species_repository.form_sql_query")
@patch("app.core.species_repository.get_data_from_ckan")
@patch("app.core.species_repository.get_result_record")
def test_get_species_data(self, get_result_record, get_data_from_ckan, form_sql_query, get_resource_id_ckan):
get_resource_id_ckan.return_value = 123
form_sql_query.return_value = "select species, kingdom, genus from species_table"
get_data_from_ckan.return_value = {"result":{"record":[{"species":"some", "kingdom":"any", "genus":"any"}]}}
get_result_record.return_value = [{"species":"some", "kingdom":"any", "genus":"any"}]
result = species_repository.get_species_data({"name":"fishes"}, 0)
self.assertEqual(result, [{"species":"some", "kingdom":"any", "genus":"any"}])
@patch("app.core.species_repository.validateAndExtractResult")
@patch("app.core.species_repository.get_data_from_ckan")
@patch("app.core.species_repository.form_sql_query_with_meta_data_table")
def test_get_resource_id_ckan(self, form_sql_query_with_meta_data_table, get_data_from_ckan, validateAndExtractResult):
form_sql_query_with_meta_data_table.return_value = "select * from metadata_table"
get_data_from_ckan.return_value = {"result":{"record":[{"id":1, "resource_id":2, "name":"fishes"}]}}
validateAndExtractResult.return_value = 2
result = species_repository.get_resource_id_ckan("fishes")
self.assertEqual(result, 2)
@patch("app.core.species_repository.form_sql_query")
@patch("app.core.species_repository.get_data_from_ckan")
@patch("app.core.species_repository.get_result_record")
def test_get_all_species_details(self, get_result_record, get_data_from_ckan, form_sql_query):
form_sql_query.return_value = "select * from metdata_table"
get_data_from_ckan.return_value = {"result":{"record":[{"id":1, "name":"any", "resource_id":"12334"}]}}
get_result_record.return_value = [{"id":1, "name":"any", "resource_id":"12334"}]
result = species_repository.get_all_species_details()
self.assertEqual(result, [{"id":1, "name":"any", "resource_id":"12334"}])
@patch("app.core.species_repository.form_sql_query")
@patch("app.core.species_repository.get_data_from_ckan")
@patch("app.core.species_repository.get_result_record")
def test_get_species_experts_data(self, get_result_record, get_data_from_ckan, form_sql_query):
form_sql_query.return_value = "select * from expert_table"
get_data_from_ckan.return_value = {"result":{"record":[{"id":1, "name":"any", "work_done":"12334"}]}}
get_result_record.return_value = [{"id":1, "name":"any", "work_done":"12334"}]
result = species_repository.get_species_experts_data(1)
self.assertEqual(result, [{"id":1, "name":"any", "work_done":"12334"}]) |
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
import math
from datetime import datetime, timezone, date, timedelta
from time import time
from astral import Astral
a = Astral()
location = a['Stockholm']
data = pd.read_csv('bird_jan25jan16.txt', sep=" | | ", header=None, engine='python')
data.columns = ["date", "movement",]
# Convert date to datetime objects. Deletes rows with faulty formatting
data['date'] = pd.to_datetime(data['date']).dt.tz_localize('UTC').dt.tz_convert('Europe/Stockholm')
def old_to_datetime():
dates = data.date
dates_list = []
idx = 0
for date in dates:
try:
date = datetime.fromisoformat(date).replace(tzinfo=timezone.utc).astimezone(tz=None)
dates_list.append(date)
except ValueError:
data = data.drop(index=data.index[idx])
idx += 1
data['date'] = dates_list
data = data.dropna()
movements = data.movement
movement_inc = []
movement_list = []
prev = 70
prev_time = data['date'].iloc[0]
idx_2 = 0
for row in data.movement:
curr_time = data.iloc[idx_2, 0]
# If movement value in the textfile is not zero
if row != 0:
mov_inc = row - prev
if mov_inc < 4.0:
movement_inc.append(mov_inc)
# If movement increses with more than one between two values.
# Determines if there has been an movement increase with more than 4 movements per minute.
# If not, value is added, if bird has been fluttering, the value is deleted.
else:
time_difference = curr_time - prev_time
time_difference = int(round(time_difference.total_seconds() / 60))
try:
mov_per_minute = mov_inc / time_difference
except ZeroDivisionError:
mov_per_minute = mov_inc
if mov_per_minute <= 4:
movement_inc.append(float(mov_inc))
else:
data = data.drop(index=data.index[idx_2])
idx_2 -= 1
prev = row
# If movement value in textfile restarts counting from 0
else:
prev = 0
movement_inc.append(float(prev))
idx_2 += 1
prev_time = curr_time
data['movement'] = movement_inc
data.query('movement >= 0', inplace=True)
data.set_index('date', drop=False, inplace=True, verify_integrity=True)
# Takes input for which dates to analyze.
# ------------------------------------------------------------------------
print("Choose which dates to anaylyse. Write a start date in format YYYY-MM-DD between 2015-01-25 and 2016-01-16.")
start_date = input()
print("Write a end date in format YYYY-MM-DD between 2015-01-25 and 2016-01-16.")
end_date = input()
# Slices dataframe based on dates to analyze.
# ------------------------------------------------------------------------
data = data.sort_index()
sliced_data = data.index.slice_locs(start=start_date, end=end_date, kind='getitem')
sliced_data = data.iloc[sliced_data[0]:sliced_data[1]]
dates = sliced_data.index.date
dates = np.unique(dates)
# Computes sunrise and sunset times for specific days.
# ------------------------------------------------------------------------
sunrise_times = []
sunset_times = []
for date in dates:
sun = location.sun(local=True, date=date)
sunrise = sun['sunrise']
sunrise = sunrise.replace(second=0, microsecond=0, minute=0, hour=sunrise.hour) + timedelta(hours=sunrise.minute//30)
sunrise_times.append(sunrise)
sunset = sun['sunset']
sunrise = sunset.replace(second=0, microsecond=0, minute=0, hour=sunrise.hour) + timedelta(hours=sunrise.minute//30)
sunset_times.append(sunset)
rise_index = []
set_index = []
# Finds index for row in dataframe which has datetime nearest sunrise/sunset time.
for rise_time in sunrise_times:
idx = sliced_data.index.get_loc(rise_time, method='nearest')
rise_index.append(idx)
for set_time in sunset_times:
idx = sliced_data.index.get_loc(set_time, method='nearest')
set_index.append(idx)
date_newdf = []
mov_newdf = []
mov_while_light = pd.DataFrame(columns=['date', 'movement'])
# Creates a new dataframe containing datetime objects and movements for when the sun is up.
# ------------------------------------------------------------------------
def movement_daylight():
for index_rise, index_set in zip(rise_index, set_index):
mov = sliced_data.iloc[index_rise:index_set].movement.sum()
date = sliced_data.iloc[index_rise].date
date = datetime(year=date.year, month=date.month, day=date.day)
date_newdf.append(date)
mov_newdf.append(mov)
mov_while_light['date'] = date_newdf
mov_while_light['movement'] = mov_newdf
# Functions for plotting
# ------------------------------------------------------------------------
def plt_mov_dark():
movement_daylight()
total_mov = sliced_data.movement.sum()
day_move = mov_while_light.movement.sum()
night_move = total_mov - day_move
fig, ax = plt.subplots()
ax.pie([day_move, night_move], colors=['#FFFF7F', '#7A7A7A'], labels=('Daytime movements', 'Nighttime movements'), autopct='%1.1f%%', shadow=True, startangle=90)
ax.axis('equal')
plt.show()
def plt_mov_year():
data_plt = data.resample('D').sum()
fig, ax = plt.subplots()
ax.plot(data_plt, color='black')
ax.set_ylim(bottom=0)
MFmt = mdates.DateFormatter('%b')
#DFmt = mdates.DateFormatter('%d')
ax.xaxis.set_major_formatter(MFmt)
ax.xaxis.set_major_locator(mdates.MonthLocator())
# ax.xaxis.set_minor_formatter(DFmt)
ax.xaxis.set_minor_locator(mdates.DayLocator(interval=7))
ax.xaxis.grid(True)
ax.axvspan(datetime(year=2015, month=5, day=1), datetime(year=2015, month=6, day=1), alpha=0.4, facecolor='yellow')
ax.axvspan(datetime(year=2015, month=6, day=1), datetime(year=2015, month=6, day=16), alpha=0.4, facecolor='blue')
ax.axvspan(datetime(year=2015, month=6, day=16), datetime(year=2015, month=7, day=8), alpha=0.4, facecolor='green')
ax.set_title('Bird movements over year')
ax.set_xlabel('Date and time')
ax.set_ylabel('Movements')
plt.show()
def plt_hour():
movement_daylight()
plt_mov = sliced_data.resample('H').sum()
fig, ax = plt.subplots()
ax.plot(plt_mov, color='black')
ax.set_ylim(bottom=0)
DFmt = mdates.DateFormatter('%m/%d')
HFmt = mdates.DateFormatter('%H')
ax.xaxis.set_major_formatter(DFmt)
ax.xaxis.set_minor_formatter(HFmt)
ax.xaxis.set_major_locator(mdates.DayLocator())
ax.xaxis.set_minor_locator(mdates.HourLocator(byhour=range(0,24,1), interval=3))
ax.tick_params(pad=10, labelrotation=45)
ax.xaxis.grid(True)
ax.set_title('Analysis of bird movement on an hourly basis')
ax.set_xlabel('Date and time')
ax.set_ylabel('Movements')
for rise, down in zip(sunrise_times, sunset_times):
ax.axvspan(rise, down, alpha=0.5, facecolor='yellow')
plt.show()
def plt_mov_sun_relation():
test = sliced_data.resample('H').sum()
HFmt = mdates.DateFormatter('%H')
DFmt = mdates.DateFormatter('%m/%d')
fig, ax = plt.subplots()
ax.plot(test, color='black')
ax.set_ylim(bottom=0)
ax.xaxis.set_major_formatter(DFmt)
ax.xaxis.set_minor_formatter(HFmt)
ax.xaxis.set_major_locator(mdates.DayLocator())
ax.xaxis.set_minor_locator(mdates.HourLocator(byhour=range(0,24,1), interval=12))
ax.tick_params(pad=10, labelrotation=45)
ax.xaxis.grid(True)
ax.set_title('Relation between bird movement and sunrise/sunset')
ax.set_xlabel('Date and time')
ax.set_ylabel('Movements')
for rise, down in zip(sunrise_times, sunset_times):
ax.axvspan(rise, down, alpha=0.5, facecolor='yellow')
plt.show()
plt_mov_dark()
plt_mov_year()
plt_hour()
plt_mov_sun_relation()
|
import uuid
from django.db import models
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
from ..common.models import Article
from ..utils import upload_and_rename
def upload_and_rename_category(instance, filename):
return upload_and_rename(instance.name+'-cover', filename)
def upload_and_rename_case_detail(instance, filename):
return upload_and_rename(instance.pk, filename)
class Category(models.Model):
id = models.AutoField(primary_key=True)
create_at = models.DateTimeField(_('创建时间'), auto_now_add=True)
update_at = models.DateTimeField(_('修改时间'), auto_now=True)
name = models.CharField(_("名称"), max_length=80, unique=True)
def __str__(self):
return self.name
class Meta:
verbose_name = _('案例类型')
verbose_name_plural = _('案例类型')
class Case(models.Model):
id = models.AutoField(primary_key=True)
create_at = models.DateTimeField(_('创建时间'), auto_now_add=True)
update_at = models.DateTimeField(_('修改时间'), auto_now=True)
category = models.ForeignKey(Category, verbose_name=_('案例类型'), related_name='cases', on_delete=models.SET_NULL, null=True)
name = models.CharField(_("名称"), max_length=80)
short_description = models.TextField(_('简介'), null=True, blank=True)
def __str__(self):
return '{}-{}'.format(self.category, self.name)
class Meta:
verbose_name = _('案例')
verbose_name_plural = _('案例')
class CasePicture(models.Model):
uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
create_at = models.DateTimeField(_('创建时间'), auto_now_add=True)
update_at = models.DateTimeField(_('修改时间'), auto_now=True)
case = models.ForeignKey(Case, verbose_name='案例', related_name='images', on_delete=models.SET_NULL, null=True)
image = models.ImageField(upload_to=upload_and_rename_case_detail, verbose_name=_('图片'))
description = models.CharField(_('描述'), max_length=80, null=True, blank=True)
featured = models.BooleanField(_('首页展示'), default=False)
cover = models.BooleanField(_('作为封面'), default=False)
class Meta:
verbose_name = _('案例图片')
verbose_name_plural = _('案例图片')
def image_tag(self):
return mark_safe('<img src="%s" width="150" height="150" />' % (self.image.url))
image_tag.short_description = '图片预览'
class CaseDetailedDescription(models.Model):
id = models.AutoField(primary_key=True)
case = models.OneToOneField(Case, on_delete=models.CASCADE)
article = models.OneToOneField(Article, on_delete=models.CASCADE, null=True)
class Meta:
verbose_name = _('案例详情描述')
verbose_name_plural = _('案例详情描述')
|
#!/usr/bin/env python3
from PyQt5 import QtWidgets
from PyQt5.QtWidgets import QMessageBox
import ManualFit_design
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
import pathlib
import sys
cwd = pathlib.Path(__file__).resolve()
ROOT = str(cwd.parent.parent.parent.resolve())
sys.path.append(ROOT)
import ADAS #import data, fit
ADAS.data._initADAS()
class ManualFit(QtWidgets.QMainWindow):
def __init__(self):
"""
Constructor.
"""
QtWidgets.QMainWindow.__init__(self)
self.ui = ManualFit_design.Ui_ManualFit()
self.ui.setupUi(self)
self.element_I, self.element_T = None, None
self.figure = Figure(tight_layout=True)
self.canvas = FigureCanvas(self.figure)
self.ax = self.figure.add_subplot(111)
self.fitLayout = QtWidgets.QVBoxLayout(self.ui.widget)
self.fitLayout.addWidget(self.canvas)
self.ui.cbMethod.addItem('Single cross-section')
self.ui.cbMethod.addItem('Single cross-section, 3-parameter')
self.ui.cbMethod.addItem('Double cross-sections')
self.ui.cbMethod.setCurrentIndex(0)
self.showParams({'C1': None, 'C2': None, 'DI1': None, 'DI2': None, 'betaStar': None, 'beta2': None})
self.loadElements()
self.bindEvents()
def bindEvents(self):
"""
Bind to control events.
"""
self.ui.cbElements.currentIndexChanged.connect(self.elementSelected)
self.ui.cbCS.currentIndexChanged.connect(self.chargeStateSelected)
self.ui.hsTlower.valueChanged.connect(self.TlowerChanged)
self.ui.hsTupper.valueChanged.connect(self.TupperChanged)
self.ui.btnFit.clicked.connect(self.doFit)
def drawSafe(self):
try:
self.canvas.draw()
except RuntimeError as e:
msg = QMessageBox()
msg.setIcon(QMessageBox.Critical)
msg.setText(e.strerror)
msg.setWindowTitle('Runtime Error')
msg.setStandardButtons(QMessageBox.Ok)
msg.exec_()
def loadElements(self):
"""
Load the element named in the 'Element' text box.
"""
for e in ADAS.data.ELEMENTS.keys():
self.ui.cbElements.addItem(e)
self.ui.cbElements.setCurrentIndex(0)
self.elementSelected()
def elementSelected(self):
"""
A new element has been selected in the element combobox.
"""
global ROOT
el = self.ui.cbElements.currentText()
self.element_I, self.element_Z, _, self.element_T, _ = ADAS.data.getIonizationData(el)
self.ui.hsTlower.setMaximum(self.element_T.size)
self.ui.hsTupper.setMaximum(self.element_T.size)
self.TlowerChanged()
self.TupperChanged()
self.updateChargeStates()
def updateChargeStates(self):
"""
Update the list of available charge states.
"""
Z = int(self.element_Z)
self.ui.cbCS.clear()
for i in range(Z):
self.ui.cbCS.addItem(str(i))
self.ui.cbCS.setCurrentIndex(0)
self.chargeStateSelected()
def chargeStateSelected(self):
"""
A new charge state as been selected in the charge state combobox.
"""
pass
def getElement(self):
"""
Returns the name of the currently selected element.
"""
return self.ui.cbElements.currentText()
def getTlower(self):
"""
Returns the currently selected lower temperature cut-off.
"""
return self.element_T[self.ui.hsTlower.value()]
def getTupper(self):
"""
Returns the currently selected upper temperature cut-off.
"""
return self.element_T[self.element_T.size - self.ui.hsTupper.value() - 1]
def getZ0(self):
"""
Returns the currently selected charge state.
"""
return int(self.ui.cbCS.currentText())
def TlowerChanged(self):
"""
Lower temperature bound changed.
"""
Tlower = self.getTlower()
self.ui.lblTlower.setText('{:.3f} eV'.format(Tlower))
def TupperChanged(self):
"""
Upper temperature bound changed.
"""
Tupper = self.getTupper()
self.ui.lblTupper.setText('{:.3f} eV'.format(Tupper))
def doFit(self):
"""
Fit the cross section to the selected element/charge state,
using the specified model on the chosen temperature interval.
"""
species = self.getElement()
Z0 = self.getZ0()
T_lower = self.getTlower()
T_upper = self.getTupper()
if T_lower >= T_upper:
QMessageBox.critical(self,
"Invalid temperature range selected",
"The lower temperature cut-off must be strictly less than the upper temperature cut-off.")
return
idx = self.ui.cbMethod.currentIndex()
if idx == 0:
method = 'single'
elif idx == 1:
method = 'single3p'
elif idx == 2:
method = 'double'
else:
QMessageBox.critical(self,
"Unrecognized fitting method",
"Unrecognized fitting method selected: '{}'.".format(self.ui.cbMethod.currentText()))
_, _, params = ADAS.fit.fitKineticIonizationForSpecies(species, Z0=Z0, fittype=method, T_lower=T_lower, T_upper=T_upper)
self.drawFit(params)
self.showParams(params)
def drawFit(self, params):
"""
Plot the curves resulting from a fit.
"""
I_fit = ADAS.fit.evaluateAveragedCrossSection(T=self.element_T, **params)
Z0 = self.getZ0()
self.ax.clear()
self.ax.loglog(self.element_T, self.element_I[Z0,:,0], 'k')
self.ax.loglog(self.element_T, I_fit, 'r--')
self.ax.loglog(self.element_T, I_fit, 'rx')
Tmin, Tmax = self.element_T[0], self.element_T[-1]
ymin, ymax = 1e-25, 1e-10
Tl, Tu = self.getTlower(), self.getTupper()
self.ax.loglog([Tl, Tl], [ymin, ymax], 'c--')
self.ax.loglog([Tu, Tu], [ymin, ymax], 'c--')
self.ax.legend(['ADAS', 'Fit'])
self.ax.set_title('{}$^{{{}+}}$'.format(self.getElement(), Z0))
self.ax.set_xlim([Tmin, Tmax])
self.ax.set_ylim([ymin, ymax])
self.drawSafe()
def showParams(self, params):
"""
Visualize the resulting fit parameters.
"""
def show(val, lbl1, lbl2):
s = val is not None
if s:
lbl2.setText('{:.12f}'.format(val))
lbl1.setVisible(s)
lbl2.setVisible(s)
show(params['C1'], self.ui.lblC1l, self.ui.lblC1)
show(params['C2'], self.ui.lblC2l, self.ui.lblC2)
show(params['DI1'], self.ui.lblDI1l, self.ui.lblDI1)
show(params['DI2'], self.ui.lblDI2l, self.ui.lblDI2)
show(params['betaStar'], self.ui.lblBetaStarl, self.ui.lblBetaStar)
show(params['beta2'], self.ui.lblBeta2l, self.ui.lblBeta2)
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
win = ManualFit()
win.show()
sys.exit(app.exec_())
|
import Tkinter as tk # use tk namespace for Tkinter items
import random
def main():
print (" welcome to the dice rolling")
print ("")
end_program = 0
while end_program != "q" or "Q" or "Quit" or "quit":
print (" Please press enter to roll the dice or q to quit")
raw_input()
number = random.randint(1,6)
if number == 1:
print("-------")
print (" O")
print("-------")
print("1")
end_program = raw_input()
elif number == 2:
print("-------")
print (" O O")
print("-------")
print("2")
end_program = raw_input()
elif number == 3:
print("-------")
print (" O ")
print (" O O")
print("-------")
print("3")
end_program = raw_input()
elif number == 4:
print("-------")
print (" O O")
print (" O O")
print("-------")
print("4")
end_program = raw_input()
elif number == 5:
print("-------")
print (" O O")
print (" O ")
print (" O O")
print("-------")
print("5")
end_program = raw_input()
elif number == 6:
print("-------")
print (" O O")
print (" O O")
print (" O O")
print("-------")
print("6")
end_program = raw_input()
main()
|
a = input('Введите целое число: ')
while type(a) != int:
try:
a = int(a)
except:
print('Вы ввели не целое число!')
a = input('Введите целое число: ') |
import requests
response = requests.get('http://stream.radiojar.com/kpkt2m0mb.m3u')
print(response.content)
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'daa_pc_form.py'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
import sys
import os
import daa_ui1
import binascii
import socket
import json
import time
import traceback
from check_sum import *
from data_parser import *
from daa_frame import *
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWebEngineWidgets import *
from MyQGraphicsItem import *
server_ip1 = "192.168.1.2" #xhx lxg
server_ip2 = "192.168.1.3" #xct
request_port1 = 10101 #lxg
request_port2 = 10001 #xhx
request_port3 = 9998 #xct
class Communication_Server1_Thread(QThread):
signal_a = pyqtSignal(str,list)
def __init__(self):
super(Communication_Server1_Thread, self).__init__()
self.socket_1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.transmit_port = 0 #new port
self.socket_2 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def run(self):
'''
step1:首次握手:UI向Server发送分辨率数据,发起连接, Server端响应连接,返回新的通信端口
'''
frame = DAAFrame() #模拟一个发起连接的数据帧
frame.header = 0x4f3c #UI to Server
frame.type = 0xa1
#屏幕长1024宽768 0102:长426宽362 0103:长426宽302 0201:长598宽756 0204长408宽324
# frame.body = 0x0400030001AA016A01AA012E025602F401980144.to_bytes(length=20, byteorder='little',signed=False)
frame.body = 0x040003000000000000F800A000000000012000D4.to_bytes(length=20, byteorder='little',signed=False)
frame.length = 29
frame.checksum = checksum(list(frame.pack()[:-2]))
self.socket_1.sendto(frame.pack(), (server_ip1, request_port1))
print("UI向Server1发送第一帧数据!")
while True:
data, address = self.socket_1.recvfrom(2048)
frame = DAAFrame()
frame.unpack(data)
if frame.header == 0x3c4f and frame.type == 0xa2: #Server响应连接,返回数据通信端口
temp_port = frame.body.hex()
low_2 = temp_port[-2:]
high_2 = temp_port[0:2]
temp_port = low_2+high_2 #16进制高两位和低两位互换
self.transmit_port = int(temp_port, 16)
break
if self.transmit_port:
'''
step2:二次握手 UI发送新的连接确认至新的监听端口,Server端返回界面数据
'''
self.socket_2.bind((server_ip1, 10010))
frame = DAAFrame() # 模拟一个连接确认的数据帧
frame.header = 0x4f3c # UI to Server
frame.type = 0xa3 # 连接确认
frame.body = b''
frame.length = 9
frame.checksum = checksum(list(frame.pack()[:-2]))
self.socket_2.sendto(frame.pack(), (server_ip1, self.transmit_port))
print("UI向Server1发送第二帧数据!")
while True:
time.sleep(1)
data, address = self.socket_2.recvfrom(4096)
frame = DAAFrame()
frame.unpack(data)
if frame.header == 0x3c4f and frame.type == 0xa4: # Server响应连接,返回数据
datas = json.loads(frame.body.decode("utf-8"))
self.signal_a.emit("server1",datas)
class Communication_Server2_Thread(QThread):
signal_a = pyqtSignal(str,list)
'''
step1:首次握手:UI向Server发送分辨率数据,发起连接, Server端响应连接,返回新的通信端口
'''
def __init__(self):
super(Communication_Server2_Thread, self).__init__()
self.socket_1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.transmit_port = 0 # new port
self.socket_2 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def run(self):
frame = DAAFrame() #模拟一个发起连接的数据帧
frame.header = 0x4f3c #UI to Server
frame.type = 0xa1
# frame.body = 0x0400030001AA016A01AA012E025602F401980144.to_bytes(length=20, byteorder='little',signed=False) #屏幕长1024宽768
frame.body = 0x040003000000000000F800A000000000012000D4.to_bytes(length=20, byteorder='little',signed=False) # 屏幕长1024宽768 A高度层 248*160 B高度层288*212
frame.length = 29
frame.checksum = checksum(list(frame.pack()[:-2]))
self.socket_1.sendto(frame.pack(), (server_ip1, request_port2))
print("UI向Server2发送第一帧数据!")
while True:
data, address = self.socket_1.recvfrom(2048)
frame = DAAFrame()
frame.unpack(data)
if frame.header == 0x3c4f and frame.type == 0xa2: #Server响应连接,返回数据通信端口
temp_port = frame.body.hex()
low_2 = temp_port[-2:]
high_2 = temp_port[0:2]
temp_port = low_2+high_2 #16进制高两位和低两位互换
self.transmit_port = int(temp_port, 16)
break
if self.transmit_port:
'''
step2:二次握手 UI发送新的连接确认至新的监听端口,Server端返回界面数据
'''
self.socket_2.bind((server_ip1, 10011))
self.data_parser = DataParser()
frame = DAAFrame() #模拟一个连接确认的数据帧
frame.header = 0x4f3c #UI to Server
frame.type = 0xa3 #连接确认
frame.body = b''
frame.length = 9
frame.checksum = checksum(list(frame.pack()[:-2]))
self.socket_2.sendto(frame.pack(), (server_ip1, self.transmit_port))
print("UI向Server2发送第二帧数据!")
while True:
data, address = self.socket_2.recvfrom(4096)
parsed = self.data_parser.accept(data)
if not parsed:
continue
frame_type, frame_body = parsed
if frame_type == 0xa4: # Server响应连接,返回数据
datas = json.loads(frame_body.decode("utf-8"))
self.signal_a.emit("server2",datas)
class Communication_Server3_Thread(QThread):
signal_a = pyqtSignal(str,list)
'''
step1:首次握手:UI向Server发送分辨率数据,发起连接, Server端响应连接,返回新的通信端口
'''
def __init__(self):
super(Communication_Server3_Thread, self).__init__()
self.socket_1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.transmit_port = 0 # new port
self.socket_2 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def run(self):
frame = DAAFrame() #模拟一个发起连接的数据帧
frame.header = 0x4f3c #UI to Server
frame.type = 0xa1
frame.body = 0x040003000000000000F800A000000000012000D4.to_bytes(length=20, byteorder='little',signed=False) # 屏幕长1024宽768 A高度层 248*160 B高度层288*212
# frame.body = 0x0400030001AA016A01AA012E025602F401980144.to_bytes(length=20, byteorder='little',signed=False)
frame.length = 29
frame.checksum = checksum(list(frame.pack()[:-2]))
self.socket_1.sendto(frame.pack(), (server_ip2, request_port3))
print("UI向Server3发送第一帧数据!")
while True:
data, address = self.socket_1.recvfrom(2048)
frame = DAAFrame()
frame.unpack(data)
if frame.header == 0x3c4f and frame.type == 0xa2: #Server响应连接,返回数据通信端口
temp_port = frame.body.hex()
low_2 = temp_port[-2:]
high_2 = temp_port[0:2]
temp_port = low_2+high_2 #16进制高两位和低两位互换
self.transmit_port = int(temp_port, 16)
break
if self.transmit_port:
'''
step2:二次握手 UI发送新的连接确认至新的监听端口,Server端返回界面数据
'''
self.socket_2.bind((server_ip1, 10012))
frame = DAAFrame() #模拟一个连接确认的数据帧
frame.header = 0x4f3c #UI to Server
frame.type = 0xa3 #连接确认
frame.body = b''
frame.length = 10
frame.checksum = checksum(list(frame.pack()[:-2]))
print("UI向Server3发送第二帧数据!")
self.socket_2.sendto(frame.pack(), (server_ip2, self.transmit_port))
while True:
data, address = self.socket_2.recvfrom(2048)
frame = DAAFrame()
frame.unpack(data)
if frame.header == 0x3c4f and frame.type == 0xa4: # Server响应连接,返回数据
datas = json.loads(frame.body.decode("utf-8"))
self.signal_a.emit("server3",datas)
class MainWindow(QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.setWindowFlags(Qt.FramelessWindowHint)
self.ui = daa_ui1.Ui_MainWindow()
self.ui.setupUi(self)
self.ui.frame.setStyleSheet("background:black")
self.ui.tabWidget.tabBar().hide() #隐藏
self.ui.btn_jump1.clicked.connect(self.update_tab1)
self.ui.btn_jump2.clicked.connect(self.update_tab2)
self.ui.btn_jump1.setStyleSheet("background:transparent")
self.ui.btn_jump2.setStyleSheet("background:transparent")
self.ui.frame_1.setStyleSheet("background:black")
self.ui.frame_2.setStyleSheet("background:black")
self.ui.frame_3.setStyleSheet("background:black")
self.ui.frame_4.setStyleSheet("background:black")
self.ui.frame_5.setStyleSheet("background:black")
self.ui.frame_6.setStyleSheet("background:black")
self.ui.frame_7.setStyleSheet("background:black")
self.ui.listWidget_1.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff) #禁用竖直滑动条
self.ui.listWidget_1.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff) #禁用水平滑动条
self.target_white_pix = QPixmap("pic/benji.png")
self.target_yellow_pix = QPixmap("pic/qtfeiji1.png") # 黄色
self.target_red_pix = QPixmap("pic/qtfeiji2.png") # 红色
self.target_green_pix = QPixmap("pic/qtfeiji3.png") # 绿色
self.ui.tab1_own_pix.setVisible(False)
self.ui.tab2_own_pix.setVisible(False)
self.old_ownship_id = ""
self.old_target_id = [""]*30
self.old_tab1_target_id_list = []
self.old_tab2_target_id_list = []
self.tab1_dict_temp = {} # key:flight_id value:target_index
self.tab2_dict_temp = {} # key:flight_id value:target_index
self.tab1_new_num = 0
self.tab2_new_num = 0
for i in range(1,6):
eval("self.ui.tab1_target" + str(i)+"_info.setVisible(False)")
eval("self.ui.tab1_target" + str(i) + "_pix.setVisible(False)")
eval("self.ui.tab2_target" + str(i) + "_info.setVisible(False)")
eval("self.ui.tab2_target" + str(i) + "_pix.setVisible(False)")
eval("self.ui.tab1_target" + str(i)+"_info").setStyleSheet("color:white;background-color:transparent;font-size:10px")
eval("self.ui.tab2_target" + str(i) + "_info").setStyleSheet("color:white;background-color:transparent;font-size:10px")
eval("self.ui.tab1_target" + str(i) + "_info").setGeometry(QRect(0, 0, 66, 60))
eval("self.ui.tab2_target" + str(i) + "_info").setGeometry(QRect(0, 0, 66, 60))
for i in range(0,500):#listWidget1创建500个刻度条 1个刻度条代表100m海拔高度
if i % 10 == 0:
label = QLabel("-"+str(50000 - i*100))
label.setFont(QFont("宋体", 8))
label.setStyleSheet("color:white;background-color:black")
label.setFixedSize(35, 10)
item = QListWidgetItem() # 创建QListWidgetItem对象
item.setSizeHint(QSize(45, 10)) # 设置QListWidgetItem大小
self.ui.listWidget_1.addItem(item) # 添加item
self.ui.listWidget_1.setItemWidget(item, label) # 为item设置widget
else:
label = QLabel("-") # 头像显示
label.setFont(QFont("宋体", 7))
label.setStyleSheet("color:white;background-color:black")
label.setFixedSize(10, 8)
item = QListWidgetItem() # 创建QListWidgetItem对象
item.setSizeHint(QSize(45, 8)) # 设置QListWidgetItem大小
self.ui.listWidget_1.addItem(item) # 添加item
self.ui.listWidget_1.setItemWidget(item, label) # 为item设置widget
self.ui.listWidget_1.setCurrentRow(500 - 390 + 16) # 刻度标移到39000
self.ui.listWidget_2.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff) #禁用竖直滑动条
self.ui.listWidget_2.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff) #禁用水平滑动条
for i in range(0,700):#listWidget2创建700个刻度条 1刻度条1飞行速度
if i % 10 == 0:
label = QLabel("-"+str(700 - i))
label.setFont(QFont("宋体", 8))
label.setStyleSheet("color:white;background-color:black")
label.setFixedSize(25, 10)
item = QListWidgetItem() # 创建QListWidgetItem对象
item.setSizeHint(QSize(45, 10)) # 设置QListWidgetItem大小
self.ui.listWidget_2.addItem(item) # 添加item
self.ui.listWidget_2.setItemWidget(item, label) # 为item设置widget
else:
label = QLabel("-") # 头像显示
label.setFont(QFont("宋体", 7))
label.setStyleSheet("color:white;background-color:black")
label.setFixedSize(10, 8)
item = QListWidgetItem() # 创建QListWidgetItem对象
item.setSizeHint(QSize(45, 8)) # 设置QListWidgetItem大小
self.ui.listWidget_2.addItem(item) # 添加item
self.ui.listWidget_2.setItemWidget(item, label) # 为item设置widget
self.ui.listWidget_2.setCurrentRow(700 - 486 + 17) # 刻度标移到486
url = os.getcwd() + '/map/map_a.html'
self.browser = QWebEngineView()
self.browser.load(QUrl.fromLocalFile(url))
pen_white = QPen(Qt.white)
pen_blue = QPen(Qt.blue)
pen_blue.setStyle(Qt.DashLine)
# tab1 地图模块及信息显示模块
self.ui.horizontalLayout_1.addWidget(self.browser)
self.ui.tabelWidget_1.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)
self.ui.tabelWidget_1.horizontalHeader().setStyleSheet("QHeaderView::section{background:black;color:white}")
self.ui.tabelWidget_1.verticalHeader().setVisible(False)
# tab1 罗盘区域 view2
self.ui.horizontalLayoutWidget_3.setGeometry(QRect(40, 30, 302, 302))
self.view_2 = QGraphicsView() #创建视图窗口
self.view_2.setRenderHint(QPainter.Antialiasing)
self.view_2.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.view_2.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.ui.horizontalLayout_2.addWidget(self.view_2)
self.view_2.setStyleSheet("background:transparent;border:0px")
self.view_2.setWindowFlags(Qt.FramelessWindowHint)
self.view_2.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.view_2.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.scene_2 = QGraphicsScene(self) #绘制画布
self.view_2.setScene(self.scene_2)
self.view_2.setStyleSheet("border:0px")
self.compass1_item = MyCompass1_Item() #罗盘
self.scene_2.addItem(self.compass1_item)
centerPos1 = self.compass1_item.boundingRect().center() #设置罗盘按中心点旋转
self.compass1_item.setTransformOriginPoint(centerPos1)
pixmap_ownship = QPixmap("pic/feiji1.png") #本机图标
self.ownship_item = self.scene_2.addPixmap(pixmap_ownship)
self.ownship_item.setPos(130, 130)
pixmap_chuizhixian = QPixmap("pic/chuizhixian1.png") #垂直条图标
self.chuizhixian_item = self.scene_2.addPixmap(pixmap_chuizhixian)
self.chuizhixian_item.setPos(140,0)
pixmap_xiaoyuan = QPixmap("pic/xiaoyuan.png") #小圆图标
self.xiaoyuan_item = self.scene_2.addPixmap(pixmap_xiaoyuan)
self.xiaoyuan_item.setPos(0, 0)
self.ARC_item = MyArc1_Item()
self.scene_2.addItem(self.ARC_item)
#tab2 UI
self.ui.btn1.setStyleSheet("QPushButton{border-image:url(button2.png)}")
self.ui.btn2.setStyleSheet("QPushButton{border-image:url(button2.png)}")
self.ui.btn3.setStyleSheet("QPushButton{border-image:url(button2.png)}")
#tab2 罗盘区域 view5
self.ui.horizontalLayoutWidget_5.setGeometry(QRect(80, 130, 484, 484))
self.view_5 = QGraphicsView() #创建视图窗口
self.view_5.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.view_5.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.ui.horizontalLayout_5.addWidget(self.view_5)
self.scene_5 = QGraphicsScene(self)
self.view_5.setScene(self.scene_5)
self.view_5.setStyleSheet("border:0px")
self.compass2_item = MyCompass2_Item()
self.scene_5.addItem(self.compass2_item)
self.ARC2_item = MyArc2_Item()
self.scene_5.addItem(self.ARC2_item)
pixmap_ownship = QPixmap("pic/feiji1.png") # 本机图标
self.ownship_item = self.scene_5.addPixmap(pixmap_ownship)
self.ownship_item.setPos(220, 220)
pixmap_chuizhixian = QPixmap("pic/chuizhixian1.png")
pixmap_chuizhixian1 = pixmap_chuizhixian.scaled(33, 483, aspectRatioMode=Qt.KeepAspectRatioByExpanding)
self.chuizhixian_item = self.scene_5.addPixmap(pixmap_chuizhixian1)
self.chuizhixian_item.setPos(223,-1)
pixmap_xiaoyuan = QPixmap("pic/luopan2.png") #小圆图标
self.xiaoyuan_item = self.scene_5.addPixmap(pixmap_xiaoyuan)
self.xiaoyuan_item.setPos(0, 0)
self.worker1 = Communication_Server1_Thread()
self.worker1.signal_a.connect(self.update_UI)
self.worker1.start()
self.worker2 = Communication_Server2_Thread()
self.worker2.signal_a.connect(self.update_UI)
self.worker2.start()
self.worker3 = Communication_Server3_Thread()
self.worker3.signal_a.connect(self.update_UI)
self.worker3.start()
def update_tab1(self):
self.ui.tabWidget.setCurrentIndex(1)
# infos = [
# {'command_id': '04', 'id': '010304',
# 'value': [{'color': '#FFFFFF', 'describe': '', 'isSelf': 1, 'step': 2, 'x': 0, 'y': 0},
# {'color': '#FFFF00', 'describe': 'in1\n0.0NM 0.0KT', 'isSelf': 0, 'step': 2, 'x': 100, 'y': 50},
# {'color': '#FF0000', 'describe': 'in2\n2.1NM 1.3KT', 'isSelf': 0, 'step': 2, 'x': 30, 'y': 30}]},
# {'command_id': '04', 'id': '020301',
# 'value': [{'color': '#FFFFFF', 'describe': '', 'isSelf': 1, 'step': 2, 'x': 0, 'y': -0.672},
# {'color': '#FFFFFF', 'describe': '\n0.0NM 0.0KT', 'isSelf': 0, 'step': 2, 'x': 0,
# 'y': 83.318001},
# {'color': '#FFFFFF', 'describe': '\n0.0NM 0.0KT', 'isSelf': 0, 'step': 2, 'x': 0,
# 'y': 268.093994}]},
# {'command_id': '03', 'id': '010201', 'value': 84.37641143798828},
# {'command_id': '02', 'id': '010202',
# 'value': [{'alt': 36325.4593175853, 'dir': 0.2454411871194467, 'flightID': 'in1', 'status': 3, 'x': -0.224,
# 'y': -0.072},
# {'alt': 36325.4593175853,
# 'dir': -1.3990138810797839,
# 'flightID': 'in2',
# 'status': 3, 'x': 0.012,
# 'y': 0.232},
# {'alt': 37047.24409448819,
# 'dir': -0.5399702255044248,
# 'flightID': 'in3',
# 'status': 0, 'x': -0.093,
# 'y': 0.025}]},
# {'command_id': '02', 'id': '010203',
# 'value': [{'color': '#ff0000', 'lowerBound': 0.0, 'upperBound': 109.37641143798828},
# {'color': '#1fca36', 'lowerBound': 109.37641143798828, 'upperBound': 264.37640380859375},
# {'color': '#ff0000', 'lowerBound': 264.37640380859375, 'upperBound': 360.0}]},
# {'command_id': '02', 'id': '010204', 'value': 84.37641143798828},
# {'command_id': '01', 'id': '010205', 'value': 486.0},
# {'command_id': '01', 'id': '010206', 'value': [
# {'lowerBound': 10.0, 'status': 3.0, 'upperBound': 571.0},
# {'lowerBound': 571.0, 'status': 4.0, 'upperBound': 700.0}]},
# {'command_id': '03', 'id': '020101', 'value': 84.37641143798828}, {'command_id': '02', 'id': '020102',
# 'value': [{'alt': '2700',
# 'dir': 0.2454411871194467,
# 'flightID': 'in1',
# 'status': 3, 'x': -0.224,
# 'y': -0.072}, {'alt': '2700',
# 'dir': -1.3990138810797839,
# 'flightID': 'in2',
# 'status': 3,
# 'x': 0.012,
# 'y': 0.232},
# {'alt': '2700',
# 'dir': -0.5399702255044248,
# 'flightID': 'in3',
# 'status': 0, 'x': -0.093,
# 'y': 0.025}]},
# {'command_id': '07', 'id': '020103', 'value': 2}, {'command_id': '02', 'id': '020104', 'value': {
# 'description': 'ALT 35997m\nVS 0.00ft/m\nGS 486.00Kts', 'location': 'N 30.398553 E 104.076888'}},
# {'command_id': '07', 'id': '020105',
# 'value': [{'color': '#ff0000', 'lowerBound': 0.0, 'upperBound': 109.37641143798828},
# {'color': '#1fca36', 'lowerBound': 109.37641143798828, 'upperBound': 264.37640380859375},
# {'color': '#ff0000', 'lowerBound': 264.37640380859375, 'upperBound': 360.0}]},
# {'command_id': '02', 'id': '010401', 'value': [
# {'alt': 35997.37532808399, 'dir': 84.37641143798828, 'flightID': 'self.own_ship.FlightID',
# 'isSelf': True, 'lat': 30.398553253647684, 'lon': 104.07688842696501},
# {'alt': 36325.4593175853, 'dir': 98.43915557861328, 'flightID': 'in1', 'isSelf': False,
# 'lat': 30.41293104282459, 'lon': 104.0696756263138, 'status': 3},
# {'alt': 36325.4593175853, 'dir': 4.218820571899414, 'flightID': 'in2', 'isSelf': False,
# 'lat': 30.399294341593386, 'lon': 104.09482480434198, 'status': 3},
# {'alt': 37047.24409448819, 'dir': 53.43839645385742, 'flightID': 'in3', 'isSelf': False,
# 'lat': 30.404850793610205, 'lon': 104.07809056040688, 'status': 0}]}
# ]
# self.ui.tab1_own_pix.setVisible(False)
# self.ui.tab2_own_pix.setVisible(False)
# for i in range(1,6):
# eval("self.ui.tab1_target" + str(i)+"_info.setVisible(False)")
# eval("self.ui.tab1_target" + str(i) + "_pix.setVisible(False)")
# eval("self.ui.tab2_target" + str(i) + "_info.setVisible(False)")
# eval("self.ui.tab2_target" + str(i) + "_pix.setVisible(False)")
# try:
# for item in infos:
# if item['id'] == '010304': # 本机及目标机高度层展示426*302
# target_index = 1 #目标机索引
# for air_info in item['value']:
# air_type = air_info['isSelf'] # 1为本机,0为目标机
# if air_type: # 本机默认为白色
# self.ui.tab1_own_pix.setVisible(True)
# x = air_info['x']
# y = air_info['y']
# self.ui.tab1_own_pix.setGeometry(x+180,y+140,25,10) #180,140为原点
# else:
# print("tab1 出现一架入侵机")
# eval("self.ui.tab1_target" + str(target_index)+"_pix.setVisible(True)")
# eval("self.ui.tab1_target" + str(target_index) + "_info.setVisible(True)")
# air_color = air_info['color']
# if air_color == '#FFFFFF': # 白色
# eval("self.ui.tab1_target" + str(target_index) + "_pix").setPixmap(QPixmap("pic/benji.png"))
# if air_color == '#FFFF00': # 黄色
# eval("self.ui.tab1_target" + str(target_index) + "_pix").setPixmap(QPixmap("pic/qtfeiji1.png"))
# if air_color == '#FF0000': # 红色
# eval("self.ui.tab1_target" + str(target_index) + "_pix").setPixmap(QPixmap("pic/qtfeiji2.png"))
# if air_color == '#00FF00': # 绿色
# eval("self.ui.tab1_target" + str(target_index) + "_pix").setPixmap(QPixmap("pic/qtfeiji3.png"))
# x = air_info['x']
# y = air_info['y']
# print(x,y)
# eval("self.ui.tab1_target" + str(target_index) + "_pix").setGeometry(180+x, 140-y, 25, 10)
# describe = air_info['describe']
# eval("self.ui.tab1_target" + str(target_index) + "_info").setText(" "+describe)
# eval("self.ui.tab1_target" + str(target_index) + "_info").setGeometry(160+x, 150-y, 66, 20)
# target_index += 1
# if item['id'] == '020301': # 本机及目标机高度层展示 408*324
# target_index = 1 # 目标机索引
# for air_info in item['value']:
# air_type = air_info['isSelf'] # 1为本机,0为目标机
# if air_type: # 本机默认为白色
# self.ui.tab2_own_pix.setVisible(True)
# x = air_info['x']
# y = air_info['y']
# self.ui.tab2_own_pix.setGeometry(x + 205, y + 145, 25, 10) # 205,145为原点
# else:
# print("tab2 出现一架入侵机")
# eval("self.ui.tab2_target" + str(target_index) + "_pix.setVisible(True)")
# eval("self.ui.tab2_target" + str(target_index) + "_info.setVisible(True)")
# air_color = air_info['color']
# if air_color == '#FFFFFF': # 白色
# eval("self.ui.tab2_target" + str(target_index) + "_pix").setPixmap(
# QPixmap("pic/benji.png"))
# if air_color == '#FFFF00': # 黄色
# eval("self.ui.tab2_target" + str(target_index) + "_pix").setPixmap(
# QPixmap("pic/qtfeiji1.png"))
# if air_color == '#FF0000': # 红色
# eval("self.ui.tab2_target" + str(target_index) + "_pix").setPixmap(
# QPixmap("pic/qtfeiji2.png"))
# if air_color == '#00FF00': # 绿色
# eval("self.ui.tab2_target" + str(target_index) + "_pix").setPixmap(
# QPixmap("pic/qtfeiji3.png"))
# x = air_info['x']
# y = air_info['y']
# print(x,y)
# eval("self.ui.tab2_target" + str(target_index) + "_pix").setGeometry(205+x, 145-y, 25,10)
# describe = air_info['describe']
# eval("self.ui.tab2_target" + str(target_index) + "_info").setText(" " + describe)
# eval("self.ui.tab2_target" + str(target_index) + "_info").setGeometry(185+x, 155-y, 66,20)
# target_index += 1
#
# if item['id'] == '010202': # 入侵机列表 在罗盘中绘制入侵机
# for item_info in item['value']:
# flight_id = item_info['flightID']
# x = item_info['x']
# y = item_info['y']
# angle = item_info['dir']
# status = item_info['status']
# target_item = QGraphicsPixmapItem()
# if status == 0: # 999999 灰色
# target_item = QGraphicsPixmapItem(QPixmap("pic/target-gray.png"))
# self.scene_2.addItem(target_item)
# if status == 1 or status == 2: # ffff00 黄色
# target_item = QGraphicsPixmapItem(QPixmap("pic/target-yellow.png"))
# self.scene_2.addItem(target_item)
# if status == 3: # ff0000 红色
# target_item = QGraphicsPixmapItem(QPixmap("pic/target-red.png"))
# self.scene_2.addItem(target_item)
# if status == 4: # 00ff00 绿色
# target_item = QGraphicsPixmapItem(QPixmap("pic/target-green.png"))
# self.scene_2.addItem(target_item)
# if target_item:
# target_item.setPos(130+150*x, 130+150*y)
# target_item.setRotation(angle)
#
# if item['id'] == '020102': # 入侵机列表 在罗盘中绘制入侵机
# for item_info in item['value']:
# flight_id = item_info['flightID']
# x = item_info['x']
# y = item_info['y']
# angle = item_info['dir']
# status = item_info['status']
# target_item = QGraphicsPixmapItem()
# if status == 0: # 999999 灰色
# target_item = QGraphicsPixmapItem(QPixmap("pic/target-gray.png"))
# self.scene_5.addItem(target_item)
# if status == 1 or status == 2: # ffff00 黄色
# target_item = self.scene_5.addPixmap(QPixmap("pic/target-yellow.png"))
# if status == 3: # ff0000 红色
# target_item = self.scene_5.addPixmap(QPixmap("pic/target-red.png"))
# if status == 4: # 00ff00 绿色
# target_item = self.scene_5.addPixmap(QPixmap("pic/target-green.png"))
# if target_item:
# target_item.setPos(220+240*x, 220+240*y)
# target_item.setRotation(angle)
#
# except:
# traceback.print_exc()
def update_tab2(self):
self.ui.tabWidget.setCurrentIndex(0)
def mouseMoveEvent(self, e: QMouseEvent): # 重写移动事件
try:
self._endPos = e.pos() - self._startPos
self.move(self.pos() + self._endPos)
except:
traceback.print_exc()
def mousePressEvent(self, e: QMouseEvent):
if e.button() == Qt.LeftButton:
self._isTracking = True
self._startPos = QPoint(e.x(), e.y())
def mouseReleaseEvent(self, e: QMouseEvent):
if e.button() == Qt.LeftButton:
self._isTracking = False
self._startPos = None
self._endPos = None
def update_UI(self,server,infos):
# print("receive from: "+server)
# print("Infos: "+ str(infos))
# self.ui.tabelWidget_1.setRowCount(0) # tabelWidget_1初始化
# self.ui.tabelWidget_1.clearContents()
try:
for item in infos:
# server3
if item['id'] == '010304': # 本机及目标机高度层展示426*302
target_index = 1 #目标机索引
for air_info in item['value']:
air_type = air_info['isSelf'] # 1为本机,0为目标机
if air_type: # 本机默认为白色
self.ui.tab1_own_pix.setVisible(True)
x = air_info['x']
y = air_info['y']
self.ui.tab1_own_pix.setGeometry(x+180,y+140,25,10) #180,140为原点
else:
x = air_info['x']
y = air_info['y']
x_geo = 180 + int(1.5*x)
if y >= 0:
y_geo = 130 - int(y/(160*2)*100)
else:
y_geo = 130 + int(y/(160*2)*100)
if y_geo >= 45 and y_geo <= 245 and x_geo >= 40 and x_geo <= 350:
eval("self.ui.tab1_target" + str(target_index)+"_pix.setVisible(True)")
eval("self.ui.tab1_target" + str(target_index) + "_info.setVisible(True)")
air_color = air_info['color']
if air_color == '#FFFFFF': # 白色
eval("self.ui.tab1_target" + str(target_index) + "_pix").setPixmap(QPixmap("pic/qtfeiji0.png"))
eval("self.ui.tab1_target" + str(target_index) + "_info").setStyleSheet(
"color:white;background-color:transparent;font-size:10px")
if air_color == '#FFFF00': # 黄色
eval("self.ui.tab1_target" + str(target_index) + "_pix").setPixmap(QPixmap("pic/qtfeiji1.png"))
eval("self.ui.tab1_target" + str(target_index) + "_info").setStyleSheet("color:yellow;background-color:transparent;font-size:10px")
if air_color == '#FF0000': # 红色
eval("self.ui.tab1_target" + str(target_index) + "_pix").setPixmap(QPixmap("pic/qtfeiji2.png"))
eval("self.ui.tab1_target" + str(target_index) + "_info").setStyleSheet(
"color:red;background-color:transparent;font-size:10px")
if air_color == '#00FF00': # 绿色
eval("self.ui.tab1_target" + str(target_index) + "_pix").setPixmap(QPixmap("pic/qtfeiji3.png"))
eval("self.ui.tab1_target" + str(target_index) + "_info").setStyleSheet(
"color:green;background-color:transparent;font-size:10px")
eval("self.ui.tab1_target" + str(target_index) + "_pix").setGeometry(x_geo, y_geo, 25, 10)
describe = air_info['describe']
eval("self.ui.tab1_target" + str(target_index) + "_info").setText(" "+describe)
eval("self.ui.tab1_target" + str(target_index) + "_info").setGeometry(x_geo-20,y_geo+10, 66, 20)
target_index += 1
if item['id'] == '020301': # 本机及目标机高度层展示 408*324
target_index = 1 # 目标机索引
for air_info in item['value']:
air_type = air_info['isSelf'] # 1为本机,0为目标机
if air_type: # 本机默认为白色
self.ui.tab2_own_pix.setVisible(True)
x = air_info['x']
y = air_info['y']
self.ui.tab2_own_pix.setGeometry(x + 205, y + 145, 25, 10) # 205,145为原点
else:
x = air_info['x']
y = air_info['y']
x_geo = 205 + int(x*145/380)
if y>=0:
y_geo = 145 - int(y*60/268)
else:
y_geo = 145 + int(y*60/268)
if y_geo>=30 and y_geo<=275 and x_geo>=45 and x_geo<=355:
eval("self.ui.tab2_target" + str(target_index) + "_pix.setVisible(True)")
eval("self.ui.tab2_target" + str(target_index) + "_info.setVisible(True)")
air_color = air_info['color']
if air_color == '#FFFFFF': # 白色
eval("self.ui.tab2_target" + str(target_index) + "_pix").setPixmap(
QPixmap("pic/qtfeiji0.png"))
eval("self.ui.tab2_target" + str(target_index) + "_info").setStyleSheet(
"color:white;background-color:transparent;font-size:10px")
if air_color == '#FFFF00': # 黄色
eval("self.ui.tab2_target" + str(target_index) + "_pix").setPixmap(
QPixmap("pic/qtfeiji1.png"))
eval("self.ui.tab2_target" + str(target_index) + "_info").setStyleSheet(
"color:yellow;background-color:transparent;font-size:10px")
if air_color == '#FF0000': # 红色
eval("self.ui.tab2_target" + str(target_index) + "_pix").setPixmap(
QPixmap("pic/qtfeiji2.png"))
eval("self.ui.tab2_target" + str(target_index) + "_info").setStyleSheet(
"color:red;background-color:transparent;font-size:10px")
if air_color == '#00FF00': # 绿色
eval("self.ui.tab2_target" + str(target_index) + "_pix").setPixmap(
QPixmap("pic/qtfeiji3.png"))
eval("self.ui.tab2_target" + str(target_index) + "_info").setStyleSheet(
"color:green;background-color:transparent;font-size:10px")
eval("self.ui.tab2_target" + str(target_index) + "_pix").setGeometry(x_geo, y_geo, 25,10)
describe = air_info['describe']
eval("self.ui.tab2_target" + str(target_index) + "_info").setText(" " + describe)
eval("self.ui.tab2_target" + str(target_index) + "_info").setGeometry(x_geo-20, y_geo+10, 66,20)
target_index += 1
# server1
if item['id'] == '010101': # 界面1目标机列表 done
target_num = len(item['value']['content']) # 目标机数量
self.ui.tabelWidget_1.setRowCount(target_num + 1) # 设置tabelWidget_1行数
ownship_id = item['value']['header'][0]
ownship_alt = item['value']['header'][1]
ownship_lon = item['value']['header'][2]
ownship_lat = item['value']['header'][3]
if self.old_ownship_id!= ownship_id:
self.browser.page().runJavaScript("remove_overlay();")
js_string_own_init = '''init_ownship(%f,%f,'%s');''' % (
float(ownship_lon), float(ownship_lat), ownship_id)
self.browser.page().runJavaScript(js_string_own_init)
self.old_ownship_id = ownship_id
ownship_speed = item['value']['header'][4]
ownship_v_speed = item['value']['header'][5]
ownship_angle = item['value']['header'][6]
ownship_id_item = QTableWidgetItem(ownship_id)
ownship_id_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter) # 居中显示
ownship_id_item.setForeground(QColor(255, 255, 255)) # 白色背景
ownship_alt_item = QTableWidgetItem(ownship_alt)
ownship_alt_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter)
ownship_alt_item.setForeground(QColor(255, 255, 255))
ownship_lon_item = QTableWidgetItem(ownship_lon)
ownship_lon_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter)
ownship_lon_item.setForeground(QColor(255, 255, 255))
ownship_lat_item = QTableWidgetItem(ownship_lat)
ownship_lat_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter)
ownship_lat_item.setForeground(QColor(255, 255, 255))
ownship_speed_item = QTableWidgetItem(ownship_speed)
ownship_speed_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter)
ownship_speed_item.setForeground(QColor(255, 255, 255))
ownship_v_speed_item = QTableWidgetItem(ownship_v_speed)
ownship_v_speed_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter)
ownship_v_speed_item.setForeground(QColor(255, 255, 255))
ownship_angle_item = QTableWidgetItem(ownship_angle)
ownship_angle_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter)
ownship_angle_item.setForeground(QColor(255, 255, 255))
self.ui.tabelWidget_1.setItem(0, 0, ownship_id_item)
self.ui.tabelWidget_1.setItem(0, 1, ownship_alt_item)
self.ui.tabelWidget_1.setItem(0, 2, ownship_lon_item)
self.ui.tabelWidget_1.setItem(0, 3, ownship_lat_item)
self.ui.tabelWidget_1.setItem(0, 4, ownship_speed_item)
self.ui.tabelWidget_1.setItem(0, 5, ownship_v_speed_item)
self.ui.tabelWidget_1.setItem(0, 6, ownship_angle_item)
# 目标机
for index in range(0, target_num):
target_id = item['value']['content'][index][0]
target_alt = item['value']['content'][index][1]
target_lon = item['value']['content'][index][2]
target_lat = item['value']['content'][index][3]
if self.old_target_id[index] != target_id:
# self.browser.page().runJavaScript("remove_overlay();")
js_string_target_init = '''init_target(%d,%f,%f,'%s');''' % (
index + 1, float(target_lon), float(target_lat), target_id)
print(js_string_target_init)
self.browser.page().runJavaScript(js_string_target_init)
self.old_target_id[index] = target_id
target_speed = item['value']['content'][index][4]
target_v_speed = item['value']['content'][index][5]
target_angle = item['value']['content'][index][6]
target_id_item = QTableWidgetItem(target_id)
target_id_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter) # 居中显示
target_id_item.setForeground(QColor(255, 255, 255)) # 白色背景
target_alt_item = QTableWidgetItem(target_alt)
target_alt_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter)
target_alt_item.setForeground(QColor(255, 255, 255))
target_lon_item = QTableWidgetItem(target_lon)
target_lon_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter)
target_lon_item.setForeground(QColor(255, 255, 255))
target_lat_item = QTableWidgetItem(target_lat)
target_lat_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter)
target_lat_item.setForeground(QColor(255, 255, 255))
target_speed_item = QTableWidgetItem(target_speed)
target_speed_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter)
target_speed_item.setForeground(QColor(255, 255, 255))
target_v_speed_item = QTableWidgetItem(target_v_speed)
target_v_speed_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter)
target_v_speed_item.setForeground(QColor(255, 255, 255))
target_angle_item = QTableWidgetItem(target_angle)
target_angle_item.setTextAlignment(Qt.AlignCenter | Qt.AlignVCenter)
target_angle_item.setForeground(QColor(255, 255, 255))
self.ui.tabelWidget_1.setItem(index + 1, 0, target_id_item)
self.ui.tabelWidget_1.setItem(index + 1, 1, target_alt_item)
self.ui.tabelWidget_1.setItem(index + 1, 2, target_lon_item)
self.ui.tabelWidget_1.setItem(index + 1, 3, target_lat_item)
self.ui.tabelWidget_1.setItem(index + 1, 4, target_speed_item)
self.ui.tabelWidget_1.setItem(index + 1, 5, target_v_speed_item)
self.ui.tabelWidget_1.setItem(index + 1, 6, target_angle_item)
if item['id'] == '010501': # 避撞信息提示 done
self.ui.label_9.setText(item['value'])
if item['id'] == '010301': # 刻度尺的高度值 done
self.ui.angle_kedu_txt.setText(str(int(item['value'])))
self.ui.listWidget_1.setCurrentRow(516 - int(item['value'] / 100))
if item['id'] == '010305': # 3个高度层文本 done
self.ui.alt_label1.setText(item['value'][2])
self.ui.alt_label2.setText(item['value'][1])
self.ui.alt_label3.setText(item['value'][0])
if item['id'] == '010306': # 警戒范围及颜色
for item_info in item['value']:
lowerBound = int(item_info['lowerBound'])
upperBound = int(item_info['upperBound'])
status = item_info['status']
if lowerBound < 0:
lowerBound = 0
if upperBound < 0:
upperBound = 0
# print(status, lowerBound, upperBound)
lower_index = int((50000 - lowerBound) / 100)
upper_index = int((50000 - upperBound) / 100)
if status == 0: # 999999 灰色
for i in range(upper_index, lower_index):
self.ui.listWidget_1.itemWidget(self.ui.listWidget_1.item(i)).setStyleSheet(
"color:white;background-color:gray")
if status == 1 or status == 2: # ffff00 黄色
for i in range(upper_index, lower_index):
self.ui.listWidget_1.itemWidget(self.ui.listWidget_1.item(i)).setStyleSheet(
"color:white;background-color:#cccc33")
if status == 3: # ff0000 红色
for i in range(upper_index, lower_index):
self.ui.listWidget_1.itemWidget(self.ui.listWidget_1.item(i)).setStyleSheet(
"color:white;background-color:#ff0000")
if status == 4: # 00ff00 绿色
for i in range(upper_index, lower_index):
self.ui.listWidget_1.itemWidget(self.ui.listWidget_1.item(i)).setStyleSheet(
"color:white;background-color:lightgreen")
if item['id'] == '020201': # RA/TA/CLC文本 done
self.ui.textBrowser.setText(item['value']['ra']['location1'])
self.ui.textBrowser_2.setText(item['value']['ra']['location2'])
self.ui.textBrowser_3.setText(item['value']['ra']['speed'])
if item['id'] == '020302': # 5个高度层文本 done
self.ui.alt_label1_2.setText(item['value'][4])
self.ui.alt_label2_2.setText(item['value'][3])
self.ui.alt_label3_2.setText(item['value'][2])
self.ui.alt_label4.setText(item['value'][1])
self.ui.alt_label5.setText(item['value'][0])
# server2
if item['id'] == '010201': # 旋转角度 文本 done
self.ui.txt_angle.setText(str(int(item['value'])))
if item['id'] == '010202': # 入侵机列表 在罗盘中绘制入侵机
for flight_id in self.tab1_dict_temp: #visible false
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setVisible(False)")
for item_info in item['value']:
flight_id = item_info['flightID']
if flight_id not in self.old_tab1_target_id_list:
self.old_tab1_target_id_list.append(flight_id)
# create
# self.tab1_target1_item = QGraphicsPixmapItem()
# self.tab1_target1_item.setVisible()
exec("self.tab1_target" + str(self.tab1_new_num + 1) + "_item = QGraphicsPixmapItem()")
exec("self.scene_2.addItem(self.tab1_target"+str(self.tab1_new_num+1)+"_item)")
self.tab1_dict_temp[flight_id] =self.tab1_new_num+1
self.tab1_new_num+=1
else:
x = item_info['x']
y = item_info['y']
angle = item_info['dir']
status = item_info['status']
x_view = 130 + 150 * x
y_view = 130 + 150 * y
y_view = 300 - y_view
#先判断入侵机坐标范围 300*300
if x_view < 15 or y_view < 20 or x_view > 285 or y_view > 280:
pass
else:
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setVisible(True)")
if status == 0: # 999999 灰色
gray_pix = QPixmap("pic/target-gray.png")
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setPixmap(gray_pix)")
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setPos(x_view, y_view)")
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setRotation(angle*180/3.14)")
if status == 1 or status == 2: # ffff00 黄色
yellow_pix = QPixmap("pic/target-yellow.png")
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setPixmap(yellow_pix)")
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setPos(x_view, y_view)")
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setRotation(angle*180/3.14)")
if status == 3: # ff0000 红色
red_pix = QPixmap("pic/target-red.png")
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setPixmap(red_pix)")
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setPos(x_view, y_view)")
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setRotation(angle*180/3.14)")
if status == 4: # 00ff00 绿色
green_pix = QPixmap("pic/target-green.png")
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setPixmap(green_pix)")
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setPos(x_view, y_view)")
eval("self.tab1_target" + str(self.tab1_dict_temp[flight_id]) + "_item.setRotation(angle*180/3.14)")
if item['id'] == '010203': # 罗盘弧度绘制 done
self.ARC_item.update_arc(item['value'])
if item['id'] == '010204': # 罗盘旋转角度 done
self.compass1_item.setAngle(360 - int(item['value']))
if item['id'] == '010205': # 本机地速 done
self.ui.label_6.setText(str(int(item['value'])))
self.ui.listWidget_2.setCurrentRow(700+17 - int(item['value'])) # 刻度标移动
if item['id'] == '010206': # 地速警戒范围及颜色
for item_info in item['value']:
lowerBound = int(item_info['lowerBound'])
upperBound = int(item_info['upperBound'])
status = item_info['status']
# print("lowerBound:"+str(lowerBound))
# print("upperBound:" + str(upperBound))
# print("status:" + str(status))
lower_index = int(700 - lowerBound)
upper_index = int(700 - upperBound)
# print(lower_index)
# print(upper_index)
if status == 0: # 999999 灰色
for i in range(upper_index, lower_index + 1):
self.ui.listWidget_2.itemWidget(self.ui.listWidget_2.item(i)).setStyleSheet(
"color:white;background-color:gray")
if status == 1 or status == 2: # ffff00 黄色
for i in range(upper_index, lower_index + 1):
self.ui.listWidget_2.itemWidget(self.ui.listWidget_2.item(i)).setStyleSheet(
"color:white;background-color:#cccc33")
if status == 3: # ff0000 红色
for i in range(upper_index, lower_index + 1):
self.ui.listWidget_2.itemWidget(self.ui.listWidget_2.item(i)).setStyleSheet(
"color:white;background-color:#ff0000")
if status == 4: # 00ff00 绿色
for i in range(upper_index, lower_index + 1):
self.ui.listWidget_2.itemWidget(self.ui.listWidget_2.item(i)).setStyleSheet(
"color:white;background-color:lightgreen")
if item['id'] == '020103': # 罗盘步长 done
self.ui.label_step1.setText(str(item['value'] * 2))
self.ui.label_step2.setText(str(item['value']))
self.ui.label_step3.setText(str(item['value'] * -1))
self.ui.label_step4.setText(str(item['value'] * -2))
self.ui.label_step4_2.setText(str(item['value'] * 2))
self.ui.label_step4_3.setText(str(item['value']))
self.ui.label_step4_5.setText(str(item['value'] * -1))
self.ui.label_step4_6.setText(str(item['value'] * -2))
self.ui.label_step4_1.setText(str(item['value'] * 2))
self.ui.label_step4_7.setText(str(item['value']))
self.ui.label_step4_8.setText(str(item['value'] * -1))
self.ui.label_step4_10.setText(str(item['value'] * -2))
if item['id'] == '020104': # 飞机信息列表 done
self.ui.label_11.setText(str(item['value']['location']))
self.ui.label_12.setText(str(item['value']['description']))
if item['id'] == '020105': # 罗盘弧度绘制 done
self.ARC2_item.update_arc(item['value'])
if item['id'] == '020101': # 旋转角度 done
self.ui.angle_txt.setText(str(int(item['value'])))
self.compass2_item.setAngle(360 - int(item['value']))
if item['id'] == '020102': # 入侵机列表 在罗盘中绘制入侵机
for flight_id in self.tab2_dict_temp:#visible false
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setVisible(False)")
for item_info in item['value']:
flight_id = item_info['flightID']
if flight_id not in self.old_tab2_target_id_list:
self.old_tab2_target_id_list.append(flight_id)
# create
exec("self.tab2_target" + str(self.tab2_new_num + 1) + "_item = QGraphicsPixmapItem()")
exec("self.scene_5.addItem(self.tab2_target" + str(self.tab2_new_num + 1) + "_item)")
self.tab2_dict_temp[flight_id] = self.tab2_new_num + 1
self.tab2_new_num += 1
else:
x = item_info['x']
y = item_info['y']
angle = item_info['dir']
status = item_info['status']
x_view = 220 + 240 * x
y_view = 220 + 240 * y
y_view = 480 - y_view
# 先判断入侵机坐标范围 480*480
if x_view < 15 or y_view < 15 or x_view > 465 or y_view > 465:
# eval("self.scene_5.removeItem(self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item)")
# print("delete tab2 target index: " + str(self.tab2_dict_temp[flight_id]))
pass
else:
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setVisible(True)")
if status == 0: # 999999 灰色
gray_pix = QPixmap("pic/target-gray.png")
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setPixmap(gray_pix)")
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setPos(x_view, y_view)")
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setRotation(angle*180/3.14)")
if status == 1 or status == 2: # ffff00 黄色
yellow_pix = QPixmap("pic/target-yellow.png")
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setPixmap(yellow_pix)")
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setPos(x_view, y_view)")
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setRotation(angle*180/3.14)")
if status == 3: # ff0000 红色
red_pix = QPixmap("pic/target-red.png")
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setPixmap(red_pix)")
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setPos(x_view, y_view)")
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setRotation(angle*180/3.14)")
if status == 4: # 00ff00 绿色
green_pix = QPixmap("pic/target-green.png")
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setPixmap(green_pix)")
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setPos(x_view, y_view)")
eval("self.tab2_target" + str(self.tab2_dict_temp[flight_id]) + "_item.setRotation(angle*180/3.14)")
if item['id'] == '010401': # 地图绘制飞机
temp = 0
for item in item['value']:
angle = item['dir']
id = item['flightID']
isself = item['isSelf']
if isself == True: # 绘制本机
js_string_own_update = '''update_own_position(%f,%f,%d);''' % (
item['lon'], item['lat'], angle)
# print(js_string_own_update)
self.browser.page().runJavaScript(js_string_own_update)
else: # 绘制入侵机
js_string_target_update = '''update_target_position(%d,%f,%f,%d);''' % (
temp + 1, item['lon'], item['lat'], angle)
# print(js_string_target_update)
self.browser.page().runJavaScript(js_string_target_update)
temp += 1
except:
traceback.print_exc()
if __name__=='__main__':
import sys
QApplication.setAttribute(Qt.AA_EnableHighDpiScaling)
app = QApplication(sys.argv)
form1 = MainWindow()
form1.show()
sys.exit(app.exec_())
|
from settings import CATEGORY_BASE
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.template.defaultfilters import slugify
import mptt
from taggit.managers import TaggableManager
from managers import TumbleItemManager
class Category(models.Model):
category_name = models.CharField(max_length="200", verbose_name="Category Name")
slug = models.SlugField(max_length="200", verbose_name="Slug Field")
parent = models.ForeignKey('self', null=True, blank=True, related_name='children')
denormalized_path = models.CharField(max_length="500", null=True, blank=True)
def __unicode__(self):
return self.category_name
def save(self):
if not self.denormalized_path:
super(Category, self ).save()
cat_path = self.get_ancestors()
self.slug = slugify(self.category_name)
if not self.get_ancestors():
cat_path = "/%s/%s/" % (CATEGORY_BASE, self.slug)
else:
cat_path = [ path.slug for path in cat_path]
cat_path = "/".join(cat_path)
cat_path = "/%s/%s/%s/" % (CATEGORY_BASE, cat_path, self.slug)
self.denormalized_path = cat_path
if self.get_descendant_count() > 0:
for desc in self.get_descendants():
if not desc.get_ancestors():
cat_path = "/%s/%s/" % (CATEGORY_BASE, desc.slug)
else:
cat_path = desc.get_ancestors()
cat_path = [ path.slug for path in cat_path]
cat_path = "/".join(cat_path)
cat_path = "/%s/%s/%s/" % (CATEGORY_BASE, cat_path, desc.slug)
desc.denormalized_path = cat_path
desc.save()
try:
contents = TumbleItem.objects.filter(category=self)
for content in contents:
content.category = self
content.denormalized_full_path = content._build_full_path()
content.save()
except:
#do some logging
pass
super(Category, self ).save()
def get_absolute_url(self):
return self.denormalized_path
class Meta:
verbose_name_plural = "Categories"
verbose_name = "Category"
mptt.register(Category, order_insertion_by=['category_name'])
class TumbleItem(models.Model):
post_type = models.ForeignKey(ContentType)
category = models.ForeignKey(Category)
title = models.CharField(max_length = 200)
author = models.ForeignKey(User, blank=True, null=True)
slug = models.SlugField(max_length="200", verbose_name="Url Slug")
post_summary = models.TextField()
created_at = models.DateField(auto_now_add=True)
updated_at = models.DateField(auto_now=True)
published_at = models.DateField()
denormalized_tags = models.CharField(max_length="500", blank=True, null=True)
denormalized_post_type = models.CharField(max_length="250", blank=True, null=True)
denormalized_full_path = models.CharField(max_length="500", blank=True, null=True)
objects = TumbleItemManager()
tags = TaggableManager()
def __unicode__(self):
return self.title
def get_tags(self):
tags = self.denormalized_tags.split("/")
return tags
def get_post_type(self):
return self.denormalized_post_type
def full_path(self):
return self.denormalized_full_path
def get_absolute_url(self):
return self.denormalized_full_path
def _build_tags(self, tags):
names = []
for tag in tags:
if u',' in tags:
names.append('"%s"' % tag)
else:
names.append(tag)
tags = u'/'.join(sorted(names))
return tags
def _build_full_path(self):
cat_path = self.category.get_ancestors().select_related()
cat_path = [ path.slug for path in cat_path]
cat_path = "/".join(cat_path)
cat_path = "%s/%s/%s.html" % (cat_path,str(self.category.slug).lower(), self.slug)
return cat_path
class Meta:
ordering = ['-published_at']
class Post(TumbleItem):
post_body = models.TextField()
def __unicode__(self):
return self.title
class Meta:
verbose_name_plural = "Posts"
verbose_name = "Post"
class Link(models.Model):
pass
TumbleItem.objects.follow_model(Post) |
color = {
'face_1': 'light',
'face_2': 'dark',
'face_3': 'dark',
'face_4': 'light',
'face_5': 'dark',
'face_6': 'light',
'face_7': 'dark',
'face_8': 'dark',
'face_9': 'light',
'face_10': 'light',
'face_11': 'dark',
'face_12': 'light',
'face_13': 'light',
'face_14': 'light',
'face_15': 'dark',
'face_16': 'dark',
'face_17': 'dark',
'face_18': 'light',
'face_19': 'dark',
'face_20': 'light',
}
length = {
'face_1': 'long',
'face_2': 'long',
'face_3': 'short',
'face_4': 'short',
'face_5': 'long',
'face_6': 'long',
'face_7': 'long',
'face_8': 'short',
'face_9': 'long',
'face_10': 'short',
'face_11': 'short',
'face_12': 'long',
'face_13': 'long',
'face_14': 'long',
'face_15': 'short',
'face_16': 'long',
'face_17': 'short',
'face_18': 'short',
'face_19': 'short',
'face_20': 'short',
} |
from led_values_manager import LedValuesManager
def main():
# TODO : @Kabroc you might have to test to modify ledValuesManager parameters
ledValuesManager = LedValuesManager(1920, 1080, 10, 20)
ledValuesManager.run()
if __name__ == "__main__":
main()
|
# pylint: disable=invalid-name
'''
Driver for creating all the SkewT diagrams needed for a specific input dataset.
'''
# pylint: disable=wrong-import-position, wrong-import-order
import matplotlib as mpl
mpl.use('Agg')
# pylint: enable=wrong-import-position, wrong-import-order
import argparse
import copy
import gc
import glob
from multiprocessing import Pool, Process
import os
import sys
import time
import zipfile
import matplotlib.pyplot as plt
import yaml
from adb_graphics.datahandler import gribfile
from adb_graphics.datahandler import gribdata
import adb_graphics.errors as errors
from adb_graphics.figures import maps
from adb_graphics.figures import skewt
import adb_graphics.utils as utils
AIRPORTS = 'static/Airports_locs.txt'
def create_skewt(cla, fhr, grib_path, workdir):
''' Generate arguments for parallel processing of Skew T graphics,
and generate a pool of workers to complete the tasks. '''
# Create the file object to load the contents
gfile = gribfile.GribFile(grib_path)
args = [(cla, fhr, gfile.contents, site, workdir) for site in cla.sites]
print(f'Queueing {len(args)} Skew Ts')
with Pool(processes=cla.nprocs) as pool:
pool.starmap(parallel_skewt, args)
def create_maps(cla, fhr, gribfiles, workdir):
''' Generate arguments for parallel processing of plan-view maps and
generate a pool of workers to complete the task. '''
model = cla.images[0]
for tile in cla.tiles:
args = []
for variable, levels in cla.images[1].items():
for level in levels:
# Load the spec for the current variable
spec = cla.specs.get(variable, {}).get(level)
if not spec:
msg = f'graphics: {variable} {level}'
raise errors.NoGraphicsDefinitionForVariable(msg)
args.append((cla, fhr, gribfiles.contents, level, model, spec,
variable, workdir, tile))
print(f'Queueing {len(args)} maps')
with Pool(processes=cla.nprocs) as pool:
pool.starmap(parallel_maps, args)
def create_zip(png_files, zipf):
''' Create a zip file. Use a locking mechanism -- write a lock file to disk. '''
lock_file = f'{zipf}._lock'
retry = 2
count = 0
while True:
if not os.path.exists(lock_file):
fd = open(lock_file, 'w')
print(f'Writing to zip file {zipf} for files like: {png_files[0][-10:]}')
try:
with zipfile.ZipFile(zipf, 'a', zipfile.ZIP_DEFLATED) as zfile:
for png_file in png_files:
if os.path.exists(png_file):
zfile.write(png_file, os.path.basename(png_file))
except: # pylint: disable=bare-except
print(f'Error on writing zip file! {sys.exc_info()[0]}')
count += 1
if count >= retry:
raise
else:
# When zipping is successful, remove png_files
for png_file in png_files:
if os.path.exists(png_file):
os.remove(png_file)
finally:
fd.close()
if os.path.exists(lock_file):
os.remove(lock_file)
break
# Wait before trying to obtain the lock on the file
time.sleep(5)
def gather_gribfiles(cla, fhr, gribfiles):
''' Returns the appropriate gribfiles object for the type of graphics being
generated -- whether it's for a single forecast time or all forecast lead
times. '''
# We already checked that the current file exists and is old enough, so
# assume that the earlier ones are, too.
filenames = {'01fcst': [], 'free_fcst': []}
fcst_hours = [int(fhr)]
if cla.all_leads and gribfiles is None:
fcst_hours = list(range(int(fhr) + 1))
for fcst_hour in fcst_hours:
filename = os.path.join(cla.data_root,
cla.file_tmpl.format(FCST_TIME=fcst_hour))
if fcst_hour <= 1:
filenames['01fcst'].append(filename)
else:
filenames['free_fcst'].append(filename)
if gribfiles is None or not cla.all_leads:
# Create a new GribFiles object, include all hours, or just this one,
# depending on command line argument flag
gribfiles = gribfile.GribFiles(
coord_dims={'fcst_hr': fcst_hours},
filenames=filenames,
filetype=cla.file_type,
model=cla.images[0],
)
else:
# Append a single forecast hour to the existing GribFiles object.
gribfiles.coord_dims.get('fcst_hr').append(fhr)
gribfiles.append(filenames)
return gribfiles
def generate_tile_list(arg_list):
''' Given the input arguments -- a list if the argument is provided, return
the list. If no arg is provided, defaults to the full domain, and if 'all'
is provided, the full domain, and all subdomains are plotted. '''
if not arg_list:
return ['full']
if ',' in arg_list[0]:
arg_list = arg_list[0].split(',')
hrrr_ak_only = ('Anchorage', 'AKRange', 'Juneau')
rap_only = ('AK', 'AKZoom', 'conus', 'HI')
if 'all' in arg_list:
all_list = ['full'] + list(maps.TILE_DEFS.keys())
return [tile for tile in all_list if tile not in hrrr_ak_only + rap_only]
return arg_list
def load_images(arg):
''' Check that input image file exists, and that it contains the
requested section. Return a 2-list (required by argparse) of the
file path and dictionary of images to be created.
'''
# Agument is expected to be a 2-list of file name and internal
# section name.
image_file = arg[0]
image_set = arg[1]
# Check that the file exists
image_file = utils.path_exists(image_file)
# Load yaml file
with open(image_file, 'r') as fn:
images = yaml.load(fn, Loader=yaml.Loader)[image_set]
return [images.get('model'), images.get('variables')]
def load_sites(arg):
''' Check that the sites file exists, and return its contents. '''
# Check that the file exists
path = utils.path_exists(arg)
with open(path, 'r') as sites_file:
sites = sites_file.readlines()
return sites
def load_specs(arg):
''' Check to make sure arg file exists. Return its contents. '''
spec_file = utils.path_exists(arg)
with open(spec_file, 'r') as fn:
specs = yaml.load(fn, Loader=yaml.Loader)
return specs
def parse_args():
''' Set up argparse command line arguments, and return the Namespace
containing the settings. '''
parser = argparse.ArgumentParser(description='Script to drive the \
creation of graphices files.')
# Positional argument
parser.add_argument(
'graphic_type',
choices=['maps', 'skewts'],
help='The type of graphics to create.',
)
# Short args
parser.add_argument(
'-a',
dest='data_age',
default=3,
help='Age in minutes required for data files to be complete. Default = 3',
type=int,
)
parser.add_argument(
'-d',
dest='data_root',
help='Cycle-independant data directory location.',
required=True,
type=utils.path_exists,
)
parser.add_argument(
'-f',
dest='fcst_hour',
help='A list describing forecast hours. If one argument, \
one fhr will be processed. If 2 or 3 arguments, a sequence \
of forecast hours [start, stop, [increment]] will be \
processed. If more than 3 arguments, the list is processed \
as-is.',
nargs='+',
required=True,
type=int,
)
parser.add_argument(
'-m',
default='Unnamed Experiment',
dest='model_name',
help='string to use in title of graphic.',
type=str,
)
parser.add_argument(
'-n',
default=1,
dest='nprocs',
help='Number of processes to use for parallelization.',
type=int,
)
parser.add_argument(
'-o',
dest='output_path',
help='Directory location desired for the output graphics files.',
required=True,
)
parser.add_argument(
'-s',
dest='start_time',
help='Start time in YYYYMMDDHH format',
required=True,
type=utils.to_datetime,
)
parser.add_argument(
'-w',
dest='wait_time',
default=10,
help='Time in minutes to wait on data files to be available. Default = 10',
type=int,
)
parser.add_argument(
'-z',
dest='zip_dir',
help='Full path to zip directory.',
)
# Long args
parser.add_argument(
'--all_leads',
action='store_true',
help='Use --all_leads to accumulate all forecast lead times.',
)
parser.add_argument(
'--file_tmpl',
default='wrfnat_hrconus_{FCST_TIME:02d}.grib2',
help='File naming convention',
)
parser.add_argument(
'--file_type',
choices=('nat', 'prs'),
default='nat',
help='Type of levels contained in grib file.',
)
# SkewT-specific args
skewt_group = parser.add_argument_group('SkewT Arguments')
skewt_group.add_argument(
'--max_plev',
help='Maximum pressure level to plot for profiles.',
type=int,
)
skewt_group.add_argument(
'--sites',
help='Path to a sites file.',
type=load_sites,
)
# Map-specific args
map_group = parser.add_argument_group('Map Arguments')
map_group.add_argument(
'--images',
help='Path to YAML config file specifying which \
variables to map and the top-level section to use.',
metavar=('[FILE,', 'SECTION]'),
nargs=2,
)
map_group.add_argument(
'--specs',
default='adb_graphics/default_specs.yml',
help='Path to the specs YAML file.',
)
map_group.add_argument(
'--subh_freq',
default=60,
help='Sub-hourly frequency in minutes.',
)
map_group.add_argument(
'--tiles',
default=['full'],
help='The domains to plot. Choose from any of those listed. Special ' \
'choices: full is full model output domain, and all is the full domain, ' \
'plus all of the sub domains. ' \
f'Choices: {["full", "all", "conus", "AK"] + list(maps.TILE_DEFS.keys())}',
nargs='+',
)
return parser.parse_args()
def parallel_maps(cla, fhr, ds, level, model, spec, variable, workdir,
tile='full'):
# pylint: disable=too-many-arguments,too-many-locals
'''
Function that creates a single plan-view map. Can be used in
parallel.
Input:
fhr forecast hour
ds xarray dataset from the grib file
level the vertical level of the variable to be plotted
corresponding to a key in the specs file
model model name: rap, hrrr, hrrre, rrfs, rtma
spec the dictionary of specifications for the given variable
and level
variable the name of the variable section in the specs file
workdir output directory
'''
# Object to be plotted on the map in filled contours.
field = gribdata.fieldData(
ds=ds,
fhr=fhr,
filetype=cla.file_type,
level=level,
model=model,
short_name=variable,
)
try:
field.field
except errors.GribReadError:
print(f'Cannot find grib2 variable for {variable} at {level}. Skipping.')
return
# Create a list of fieldData objects for each contour field requested
# These will show up as line contours on the plot.
contours = spec.get('contours')
contour_fields = []
if contours is not None:
for contour, contour_kwargs in contours.items():
if '_' in contour:
var, lev = contour.split('_')
else:
var, lev = contour, level
contour_fields.append(gribdata.fieldData(
ds=ds,
fhr=fhr,
level=lev,
model=model,
contour_kwargs=contour_kwargs,
short_name=var,
))
# Create a list of fieldData objects for each hatched area requested
hatches = spec.get('hatches')
hatch_fields = []
if hatches is not None:
for hatch, hatch_kwargs in hatches.items():
var, lev = hatch.split('_')
hatch_fields.append(gribdata.fieldData(
ds=ds,
fhr=fhr,
level=lev,
model=model,
contour_kwargs=hatch_kwargs,
short_name=var,
))
if cla.model_name == "HRRR-HI":
inches = 12.2
else:
inches = 10
fig, ax = plt.subplots(1, 1, figsize=(inches, inches))
# Generate a map object
m = maps.Map(
airport_fn=AIRPORTS,
ax=ax,
grid_info=field.grid_info(),
model=model,
tile=tile,
)
# Send all objects (map, field, contours, hatches) to a DataMap object
dm = maps.DataMap(
field=field,
contour_fields=contour_fields,
hatch_fields=hatch_fields,
map_=m,
model_name=cla.model_name,
)
# Draw the map
dm.draw(show=True)
# Build the output path
png_suffix = level if level != 'ua' else ''
png_file = f'{variable}_{tile}_{png_suffix}_f{fhr:03d}.png'
png_file = png_file.replace("__", "_")
png_path = os.path.join(workdir, png_file)
print('*' * 120)
print(f"Creating image file: {png_path}")
print('*' * 120)
# Save the png file to disk
plt.savefig(
png_path,
bbox_inches='tight',
dpi=72,
format='png',
orientation='landscape',
pil_kwargs={'optimize': True},
)
fig.clear()
# Clear the current axes.
plt.cla()
# Clear the current figure.
plt.clf()
# Closes all the figure windows.
plt.close('all')
del field
del m
gc.collect()
def parallel_skewt(cla, fhr, ds, site, workdir):
'''
Function that creates a single SkewT plot. Can be used in parallel.
Input:
cla command line arguments Namespace object
ds the XArray dataset
fhr the forecast hour integer
site the string representation of the site from the sites file
workdir output directory
'''
skew = skewt.SkewTDiagram(
ds=ds,
fhr=fhr,
filetype=cla.file_type,
loc=site,
max_plev=cla.max_plev,
model_name=cla.model_name,
)
skew.create_diagram()
outfile = f"{skew.site_code}_{skew.site_num}_skewt_f{fhr:03d}.png"
png_path = os.path.join(workdir, outfile)
print('*' * 80)
print(f"Creating image file: {png_path}")
print('*' * 80)
# pylint: disable=duplicate-code
plt.savefig(
png_path,
bbox_inches='tight',
dpi='figure',
format='png',
orientation='landscape',
)
plt.close()
@utils.timer
def graphics_driver(cla):
'''
Function that interprets the command line arguments to locate the input grib
file, create the output directory, and call the graphic-specifc function.
Input:
cla Namespace object containing command line arguments.
'''
# pylint: disable=too-many-branches, too-many-locals
# Create an empty zip file
if cla.zip_dir:
zipfiles = {}
tiles = cla.tiles if cla.graphic_type == "maps" else ['skewt']
for tile in tiles:
tile_zip_dir = os.path.join(cla.zip_dir, tile)
tile_zip_file = os.path.join(tile_zip_dir, 'files.zip')
print(f"checking for {tile_zip_file}")
if os.path.isfile(tile_zip_file):
os.remove(tile_zip_file)
print(f"{tile_zip_file} found and removed")
os.makedirs(tile_zip_dir, exist_ok=True)
zipfiles[tile] = tile_zip_file
fcst_hours = copy.deepcopy(cla.fcst_hour)
# Initialize a timer used for killing the program
timer_end = time.time()
gribfiles = None
# Allow this task to run concurrently with UPP by continuing to check for
# new files as they become available.
while fcst_hours:
timer_sleep = time.time()
for fhr in sorted(fcst_hours):
grib_path = os.path.join(cla.data_root,
cla.file_tmpl.format(FCST_TIME=fhr))
# UPP is most likely done writing if it hasn't written in data_age
# mins (default is 3 to address most CONUS-sized domains)
if os.path.exists(grib_path) and utils.old_enough(cla.data_age, grib_path):
fcst_hours.remove(fhr)
else:
# Try next forecast hour
print(f'Cannot find {grib_path}')
continue
# Create the working directory
workdir = os.path.join(cla.output_path,
f"{utils.from_datetime(cla.start_time)}{fhr:02d}")
os.makedirs(workdir, exist_ok=True)
print((('-' * 80)+'\n') * 2)
print()
print(f'Graphics will be created for input file: {grib_path}')
print(f'Output graphics directory: {workdir}')
print()
print((('-' * 80)+'\n') * 2)
if cla.graphic_type == 'skewts':
create_skewt(cla, fhr, grib_path, workdir)
else:
gribfiles = gather_gribfiles(cla, fhr, gribfiles)
create_maps(cla,
fhr=fhr,
gribfiles=gribfiles,
workdir=workdir,
)
# Zip png files and remove the originals in a subprocess
if cla.zip_dir:
for tile, zipf in zipfiles.items():
png_files = glob.glob(os.path.join(workdir, f'*_{tile}_*{fhr:02d}.png'))
zip_proc = Process(group=None,
target=create_zip,
args=(png_files, zipf),
)
zip_proc.start()
zip_proc.join()
# Keep track of last time we did something useful
timer_end = time.time()
# Give up trying to process remaining forecast hours after waiting
# wait_time mins. This accounts for slower UPP processes. Default for
# most CONUS-sized domains is 10 mins.
if time.time() - timer_end > cla.wait_time * 60:
print(f"Exiting with forecast hours remaining: {fcst_hours}")
print((('-' * 80)+'\n') * 2)
break
# Wait for a bit if it's been < 2 minutes (about the length of time UPP
# takes) since starting last loop
if fcst_hours and time.time() - timer_sleep < 120:
print(f"Waiting for a minute before forecast hours: {fcst_hours}")
print((('-' * 80)+'\n') * 2)
time.sleep(60)
if __name__ == '__main__':
CLARGS = parse_args()
CLARGS.fcst_hour = utils.fhr_list(CLARGS.fcst_hour)
# Only need to load the default in memory if we're making maps.
if CLARGS.graphic_type == 'maps':
CLARGS.specs = load_specs(CLARGS.specs)
CLARGS.images = load_images(CLARGS.images)
CLARGS.tiles = generate_tile_list(CLARGS.tiles)
print(f"Running script for {CLARGS.graphic_type} with args: ")
print((('-' * 80)+'\n') * 2)
for name, val in CLARGS.__dict__.items():
if name not in ['specs', 'sites']:
print(f"{name:>15s}: {val}")
graphics_driver(CLARGS)
|
import util
import operator
from collections import defaultdict
util.getCsv.csvdir = "historical"
spdf = util.getCsv("SPY")
#stocks = util.getStocks(ivv=True)
stocks = util.getStocks()
every = 5
bought_low = defaultdict(int)
bought_high = defaultdict(int)
lowpaid = 0
highpaid = 0
offsets = defaultdict(lambda:None)
avgchangel = list()
avgchangeh = list()
avgchangels = list()
avgchangehs = list()
lastcount = len(spdf)-1
for idx in spdf.index:
if idx % 5:
continue
cdate = spdf.at[idx, "Date"]
theday = dict()
maxv = 0
minv = 10
highstock = None
lowstock = None
for astock in stocks:
df = util.getCsv(astock)
if df is None:
print("astock: {}".format( astock))
continue
if offsets.get(astock) == None:
dates = list(df["Date"])
try:
starti = dates.index(cdate)
offsets[astock] = idx-starti
except:
continue
off = offsets[astock]
if off == None:
continue
myidx = idx - off
try:
change = round(df.at[myidx,"Close"]/df.at[myidx,"Open"],3)
except:
continue
if change > maxv:
highstock = astock
maxv = change
if change < minv:
lowstock = astock
minv = change
# theday[astock] = change
# sorted_x = sorted(theday.items(), key=operator.itemgetter(1))
# print("highstock : {}".format( highstock ))
# print("lowstock : {}".format( lowstock ))
# lowstock = sorted_x[0][0]
df = util.getCsv(lowstock)
myidx = idx - offsets[lowstock]
bought = round(df.at[myidx + 1,"Open"])
bought_low[lowstock] += 1
# print("bought_low: {}".format( bought_low))
lowpaid += bought
# highstock = sorted_x[-1][0]
df = util.getCsv(highstock)
myidx = idx - offsets[highstock]
bought = round(df.at[myidx + 1,"Open"])
bought_high[highstock] += 1
# print("bought_high: {}".format( bought_high))
highpaid += bought
if not idx % 160 and idx > 0 or idx == lastcount:
try:
lowb = util.calcPortfolio(bought_low, idx=cdate)
highb = util.calcPortfolio(bought_high, idx=cdate)
except:
pass
lowchange = lowb/lowpaid
highchange = highb/ highpaid
avgchangel.append(lowchange)
avgchangeh.append(highchange)
avgchangels.append(lowpaid)
avgchangehs.append(highpaid)
bought_low = defaultdict(int)
bought_high = defaultdict(int)
lowpaid = 0
highpaid = 0
try:
print("avgchangel: {}".format(util.formatDecimal(sum(avgchangel)/len(avgchangel))))
print("avgchangeh: {}".format(util.formatDecimal(sum(avgchangeh)/len(avgchangeh))))
print("avgchangels: {}".format(round(sum(avgchangels)/len(avgchangels))))
print("avgchangehs: {}".format(round(sum(avgchangehs)/len(avgchangehs))))
except:
pass
#print("bought_high: {}".format( bought_high))
#print("bought_low: {}".format( bought_low))
#print("lowpaid : {}".format( lowpaid ))
#print("highpaid : {}".format( highpaid ))
# drop = row["Close"] / row["Open"]
|
from django.db import models
from django.utils import timezone
class Blog(models.Model):
title = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
body = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def __unicode__(self):
return '%s' % self.title
def publish(self):
self.published_date = timezone.now()
self.save()
# @permalink
# def get_absolute_url(self):
# return ('view_blog_post', None, { 'slug': self.slug })
|
# -*- coding: utf-8 -*-
import logging
from custom.utils import get_user_cookie
logger = logging.getLogger(__name__)
class AccountMiddleware(object):
""" 登录账户信息中间件 """
def process_request(self, request):
account = get_user_cookie(request)
if account is None:
account = {'is_login': False}
request.account = account
|
__author__ = 'Elisabetta Ronchieri'
import commands
import os
from tstorm.utils import utils
class Cat:
def __init__(self, fn='input-file'):
self.ifn = fn
self.cmd = {
'name':'cat'}
self.otpt = {
'status':'',
'otpt':''}
def get_command(self):
a = self.cmd['name'] + ' ' + self.ifn
return a
def run_command(self):
a=()
if utils.cmd_exist(self.cmd['name']):
a=commands.getstatusoutput(self.get_command())
return a
def get_output(self):
a=self.run_command()
if a[0] == 0:
self.otpt['status'] = 'PASS'
self.otpt['otpt'] = a[1]
else:
self.otpt['status'] = 'FAILURE'
return self.otpt
class Rf:
def __init__(self, fn='input-file'):
self.ifn = fn
self.otpt = {
'status':'',
'otpt':''}
def get_output(self):
try:
f = open(self.ifn,'r')
self.otpt['otpt'] = f.read()
f.close()
self.otpt['status'] = 'PASS'
except IOError:
self.otpt['status'] = 'FAILURE'
pass
return self.otpt
|
import json
import pandas as pd
# Generating table images as csv
train = pd.read_csv("./data/train.csv")
images_table = train.drop(["EncodedPixels", "AttributesIds", "ClassId"], axis=1)
images_table["Group"] = "train"
images_table = images_table.drop_duplicates().reset_index(drop=True)
images_table["Id"] = images_table.index.values
images_table = images_table.loc[:, ["Id", "ImageId", "Height", "Width", "Group"]]
# Generating table attributes and categories as csv
with open('./data/label_descriptions.json', 'r') as file:
label_desc = json.load(file)
# Generating table segmentation as csv
segm_table = train.loc[:, ["ImageId", "EncodedPixels", "ClassId", "AttributesIds"]]
segm_table = segm_table.merge(images_table[["Id", "ImageId"]], on="ImageId")
segm_table = segm_table.loc[:, ["Id", "EncodedPixels", "ClassId", "AttributesIds"]]
attributes_table = pd.DataFrame(label_desc["attributes"])
categories_table = pd.DataFrame(label_desc["categories"])
categories_table["detail"] = 0
categories_table.loc[categories_table["supercategory"].isin(
["garment parts", "closures", "decorations"]), "detail"] = 1
images_table.to_csv("./data/images_table.csv", index=None)
attributes_table.to_csv("./data/attributes_table.csv", index=None)
categories_table.to_csv("./data/categories_table.csv", index=None)
segm_table.to_csv("./data/segmentation_table.csv", index=None)
|
# Generated by Django 3.1.7 on 2021-03-12 21:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0006_auto_20210312_2124'),
]
operations = [
migrations.CreateModel(
name='Benefits',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateField(auto_now_add=True, verbose_name='Criação')),
('modified', models.DateField(auto_now=True, verbose_name='Atualização')),
('active', models.BooleanField(default=True, verbose_name='Ativo?')),
('name', models.CharField(max_length=150, verbose_name='Benefícios')),
],
options={
'verbose_name': 'Benefício',
'verbose_name_plural': 'Benefícios',
},
),
migrations.RemoveField(
model_name='plan',
name='description',
),
migrations.AddField(
model_name='plan',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='core.benefits'),
),
]
|
__author__ = "Narwhale"
string = input()
lenth = len(string)
num = 1
if string.upper() != string:
num = 0
if string.upper() == string:
# 条件一:检测AAA
for i in range(lenth - 1):
if string[i] == string[i + 1]:
num = 0
break
#条件二:检测ABACADA
for i in range(lenth - 3):
print(string[i])
x1 = string.find(string[i], i + 2)
print('x1:',x1)
if x1 == -1:
continue
else:
#条件三:检测THETXH
for j in range((i + 1), x1):
print(string[j])
x2 = string.find(string[j], (x1 + 1))
print('x2:',x2)
if x2 > 0:
num = 0
break
if num == 0:
print('Dislikes')
else:
print('Likes')
|
from .auth import *
from .contest import *
|
import os
import time
from collections import Counter
from multiprocessing.dummy import Pool as ThreadPool, Process, Manager
class Uploader(object):
def __init__(self, file_list, n_treads, q):
assert isinstance(file_list, (list, tuple))
assert isinstance(n_treads, int)
assert n_treads >= 0
assert type(q) is Manager().Queue
self.is_active = True
self.file_list = file_list
self.n_treads = n_treads
self.q = q
def _upload_file(self, file):
try:
""" code uploader to FTP"""
time.sleep(2)
except Exception as e:
self.q.put("Error! File {} is not loaded.\n{}".format(file, e))
return False
finally:
self.q.put("Success! File {} is uploaded to the server.".format(file))
return True
# return "done"
def _upload_pool(self):
pool = ThreadPool(processes=self.n_treads)
res = pool.map(self._upload_file, self.file_list)
n_res = len(res)
res_dict = Counter(res)
succes = res_dict[True]
self.q.put("Uploaded {}/{}".format(succes, n_res))
self.is_active = False
def start(self):
proc = Process(target=self._upload_pool)
proc.start()
if __name__ == '__main__':
q = Manager().Queue()
files_list = [os.path.join(os.getcwd(), "test_data", i) for i in os.listdir("./test_data")]
uploader = Uploader(files_list, 2, q)
uploader.start()
while uploader.is_active:
progress = q.get()
print(progress)
|
#!/usr/bin/python3
import time
import smbus
DEVICE_BUS = 1
DEVICE_ADDRESS = 0x1D
DATA_REG = 0x00
CTRL_REG = 0x2A
DATA_CFG_REG = 0x0E
SLEEP_CNT_REG = 0x29
class Command:
def __init__(self, desc, func):
self.desc = desc
self.func = func
def execute(self, bus):
self.func(bus)
def command_help(commands):
print("Available commands:")
for key, val in commands.items():
print(key + " - " + val.desc)
def standby(bus):
bus.write_byte_data(DEVICE_ADDRESS, CTRL_REG, 0x00)
def activate(bus):
bus.write_byte_data(DEVICE_ADDRESS, CTRL_REG, 0x01)
def sleep(bus):
bus.write_byte_data(DEVICE_ADDRESS, CTRL_REG, 0x02)
def read_data(bus):
data = bus.read_i2c_block_data(DEVICE_ADDRESS, 0x00, 7)
# Parse data
x = (data[1] * 256 + data[2]) / 16
if x > 2047:
x -= 4096
y = (data[3] * 256 + data[4]) / 16
if y > 2047:
y -= 4096
z = (data[5] * 256 + data[6]) / 16
if z > 2047:
z -= 4096
print("X: " + str(x) + ", Y: " + str(y) + ", Z: " + str(z))
return {'x': x, 'y': y, 'z': z}
def read_loop(bus):
while True:
read_data(bus)
time.sleep(0.1)
def main():
# Prepare I2C bus
bus = smbus.SMBus(DEVICE_BUS)
bus.write_byte_data(DEVICE_ADDRESS, DATA_CFG_REG, 0x00)
commands = {'r': Command('Read data line', read_data),
'0': Command('Standby Mode', standby),
'1': Command('Active Mode', activate),
'2': Command('Sleep Mode', sleep),
'l': Command('Read Loop', read_loop),
'h': Command('Help', command_help)}
command_help(commands)
while True:
cmd = input("Command: ")
if cmd not in commands:
print("Invalid command")
continue
if cmd == 'h':
commands[cmd].execute(commands)
else:
commands[cmd].execute(bus)
main()
|
import sys
import subprocess
def bail(msg):
"""Easier exception raising"""
raise Exception(msg)
class CallException(Exception):
def __init__(self, args):
self.cmd, self.stdout, self.stderr, self.code = self.args = args
def __str__(self):
return repr(self.args)
def dumperror(e):
print("Error calling '{}':".format(e.cmd))
print("Stdout:", e.stdout)
print("Stderr:", e.stderr, file=sys.stderr)
print("Return:", e.code)
def call(cmd, newlines=True, sets=False, exceptions=True, combine=False, stdout=True, stderr=True, pipe=False, **kwargs):
"""
Simple Popen wrapper returning stdout, stderr, returncode, command
newlines=Toggles universal_newlines
sets=True will wrap stdout/stderr in set()
exceptions=True will raise an exception on !0 process exit
pipe=True will log stdout/stderr as received (and return None, None)
combine=True will combine stderr with stdout
stdout=False will pipe stdout to /dev/null
stderr=False will pipe stderr to /dev/null
"""
with subprocess.Popen(cmd, stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL if not stdout else subprocess.PIPE if pipe else sys.stdout, stderr=subprocess.DEVNULL if not stderr else subprocess.STDOUT if combine else subprocess.PIPE if pipe else sys.stderr, universal_newlines=newlines, **kwargs) as process:
if pipe:
try:
stdout, stderr = process.communicate()
except KeyboardInterrupt:
print("Interrupt!")
else:
stdout, stderr = None, None
process.wait()
code = process.returncode
if exceptions and code:
raise CallException((cmd, stdout, stderr, code))
else:
if sets:
return set(stdout.split()) if stdout else set(), set(stderr.split()) if stderr else set(), code, cmd
else:
return stdout, stderr, code, cmd
|
import sys
n = sys.argv[1]
|
from .KFold import KFold
from .holdout import holdout
__all__ = ["kfold", "holdout"]
|
from django.db import models
from django.db.models.base import Model
# Create your models here.
class ToDoWork(models.Model):
msg = models.CharField(max_length=200,blank=True)
class ToDoProject(models.Model):
msg = models.CharField(max_length=200,blank=True)
class ToDoHobby(models.Model):
msg = models.CharField(max_length=200,blank=True) |
#!/usr/bin/python
import sys
#compute index of mutated position in aligned sequence
def compute_conservation(file,residue_start,index,wild_type):
count_pos = 0
pos = 0
handle = open(file,"r")
lines = iter(handle.readlines())
for line in lines:
if(line.startswith('>')):
continue
else:
for word in line.split():
if(residue_start > len(word)):
count_pos = 0
print(count_pos)
for i in range(0,len(word),1):
if(word[i] != '-'):
count_pos +=1
if(count_pos == residue_start+index-residue_start):
pos = i
print(word[i])
break
else:
print(residue_start)
print(index)
count_pos = 0
if(residue_start < 0):
chain_res = index - abs(residue_start)#+residue_start #+ abs(residue_start) + abs(residue_start) -1
elif (residue_start == 1):
chain_res= index+residue_start-1
else:
chain_res= index+residue_start-1
for i in range(0,len(word),1):
if(word[i] != '-'):
print(word[i])
count_pos +=1
if(count_pos == chain_res):
pos = i
print(pos)
print("ACID:" + word[i])
if(word[i] == wild_type):
print('Prvy pokus OK')
break
else:
count_pos = 0
chain_res = index + residue_start - residue_start
for i in range(0, len(word), 1):
if (word[i] != '-'):
print(word[i])
count_pos += 1
if (count_pos == chain_res):
pos = i
print(pos)
print("ACID:" + word[i])
if (word[i] == wild_type):
print('Druhy pokus OK')
return pos
else:
print('Neuspech')
break
print(pos)
return pos
compute_conservation(sys.argv[1],int(sys.argv[2]),int(sys.argv[3]),sys.argv[4]) |
# -*- coding: utf-8 -*-
"""
Salt engine for intercepting state jobs and forwarding to the Architect.
"""
# Import python libs
from __future__ import absolute_import
import logging
from architect_client.libarchitect import ArchitectClient
# Import salt libs
import salt.utils.event
logger = logging.getLogger(__name__)
def start(project='default',
host='127.0.0.1',
port=8181,
username=None,
password=None):
'''
Listen to state jobs events and forward state functions and node info
'''
state_functions = ['state.sls', 'state.apply', 'state.highstate']
model_functions = ['architect.node_info']
class_tag = 'architect/minion/classify'
if __opts__['__role'] == 'master':
event_bus = salt.utils.event.get_master_event(__opts__,
__opts__['sock_dir'],
listen=True)
else:
event_bus = salt.utils.event.get_event(
'minion',
transport=__opts__['transport'],
opts=__opts__,
sock_dir=__opts__['sock_dir'],
listen=True)
logger.info('Architect Engine initialised')
while True:
event = event_bus.get_event()
if event and event.get('fun', None) in state_functions:
is_test_run = 'test=true' in [arg.lower() for arg in event.get('fun_args', [])]
if not is_test_run:
output = ArchitectClient().push_event(event)
logger.info("Sent Architect state function {}".format(output))
if event and event.get('fun', None) in model_functions:
output = ArchitectClient().push_node_info({event['id']: event['return']})
logger.info("Sent Architect node info function {}".format(output))
if event and event.get('tag', None) == class_tag:
output = ArchitectClient().classify_node({
'name': event['id'],
'data': event['data']
})
logger.info("Sent Architect node classification {}".format(output))
|
# -*- coding: utf-8 -*-
import inject
import logging
import json
'''
from actions.chat import SendEventToClients
from actions.status import GetStatus
from actions.groups import ListGroups, FindGroup, FindMembers, RemoveMembers, AddMembers, UpdateGroup, CreateGroup
from actions.systems import ListSystems
from actions.laboralInsertion import PersistLaboralInsertion, FindLaboralInsertion, CreateLanguages,PersistLanguage, DeleteLanguage, FindLanguage, ListLanguages, CreateDegrees, PersistDegree, DeleteDegree, FindDegree, ListDegree, AcceptTermsAndConditions, CheckTermsAndConditions
'''
from actions.laboralInsertion import PersistLaboralInsertion, FindLaboralInsertion, CreateLanguages, PersistLanguage, DeleteLanguage, FindLanguage, ListLanguages, CreateDegrees, PersistDegree, DeleteDegree, FindDegree, ListDegree, AcceptTermsAndConditions, CheckTermsAndConditions, PersistLaboralInsertionCV, FindLaboralInsertionCV, GetLaboralInsertionData
# from autobahn.twisted.websocket import WebSocketServerProtocol
# from autobahn.twisted.websocket import WebSocketServerFactory
# from twisted.python import log
# from twisted.internet import reactor
from autobahn.asyncio.websocket import WebSocketServerProtocol
from autobahn.asyncio.websocket import WebSocketServerFactory
import asyncio
from asyncio import coroutine
# http://code.activestate.com/recipes/439358-twisted-from-blocking-functions-to-deferred-functi/
# from twisted.internet.threads import deferToThread
# deferred = deferToThread.__get__
from model.config import Config
from model.utils import DateTimeEncoder
from model.session import Session
from model.exceptions import *
''' actions del core '''
from actions.login.password import ChangePassword, ResetPassword
from actions.users.users import UpdateUser, FindUser, ListUsers
from actions.users.mail import ListMails, PersistMail, ConfirmMail, RemoveMail
from actions.requests.requests import CreateAccountRequest, ResendAccountRequest, ConfirmAccountRequest, ListAccountRequests, ApproveAccountRequest, RemoveAccountRequest, RejectAccountRequest
''' sistemas '''
from actions.systems.assistance.assistance import GetAssistanceData, GetAssistanceStatus, GetAssistanceStatusByUsers, GetFailsByDate, GetFailsByFilter, GetSchedules, NewSchedule, DeleteSchedule, GetPosition, UpdatePosition
from actions.systems.assistance.logs import GetAssistanceLogs
from actions.systems.assistance.justifications import GetJustifications, GetJustificationStock, GetJustificationRequests, GetJustificationRequestsToManage, GetJustificationRequestsByDate, RequestJustification, RequestJustificationRange, UpdateJustificationRequestStatus, GetSpecialJustifications, RequestGeneralJustification, GetGeneralJustificationRequests, DeleteGeneralJustificationRequest, RequestGeneralJustificationRange, GetJustificationsByUser, UpdateJustificationStock
from actions.systems.assistance.overtime import GetOvertimeRequests, GetOvertimeRequestsToManage, RequestOvertime, UpdateOvertimeRequestStatus
''' firmware asistencia '''
# from actions.systems.assistance.firmware import FirmwareDeviceAnnounce, FirmwareSyncUser, FirmwareSyncLogs
from actions.systems.students.students import CreateStudent, FindStudent, PersistStudent, FindAllStudents
from actions.systems.tutors.tutors import PersistTutorData, ListTutorData
from actions.systems.ntdomain.domain import PersistDomain, DeleteDomain, FindDomain
from actions.systems.mail.mail import PersistInstitutionalMail, DeleteInstitutionalMail, FindInstitutionalMail
from actions.systems.offices.offices import GetOffices, GetUserOfficeRoles, GetUserInOfficesByRole, GetOfficesByUserRole, GetOfficesUsers, DeleteOfficeRole, AddOfficeRole, PersistOfficeRole, PersistOffice, RemoveUserFromOffice, AddUserToOffices, GetRolesAdmin
from actions.systems.issue.issue import NewRequest, GetIssuesByUser, DeleteIssue, UpdateIssueData
''' aca se definen las acciones a ser manejadas por el server de websocket '''
actions = [
PersistLaboralInsertion(), FindLaboralInsertion(), CreateLanguages(), PersistLanguage(), DeleteLanguage(), FindLanguage(), ListLanguages(), CreateDegrees(), PersistDegree(), DeleteDegree(), FindDegree(), ListDegree(), AcceptTermsAndConditions(), CheckTermsAndConditions(), PersistLaboralInsertionCV(), FindLaboralInsertionCV(), GetLaboralInsertionData(),
ChangePassword(), ResetPassword(),
ListUsers(), UpdateUser(), FindUser(), ListMails(), PersistMail(), ConfirmMail(), RemoveMail(),
PersistDomain(), DeleteDomain(), FindDomain(),
PersistInstitutionalMail(), DeleteInstitutionalMail(), FindInstitutionalMail(),
CreateStudent(), FindStudent(), PersistStudent(), FindAllStudents(),
PersistTutorData(), ListTutorData(),
GetOffices(), GetUserOfficeRoles(), GetUserInOfficesByRole(), GetOfficesByUserRole(), GetOfficesUsers(), DeleteOfficeRole(), AddOfficeRole(), PersistOfficeRole(), PersistOffice(), RemoveUserFromOffice(), AddUserToOffices(), GetRolesAdmin(),
# FirmwareDeviceAnnounce(), FirmwareSyncUser(), FirmwareSyncLogs(),
GetAssistanceLogs(), GetAssistanceData(), GetSchedules(), NewSchedule(), DeleteSchedule(), GetPosition(), UpdatePosition(), GetFailsByFilter(), GetFailsByDate(), GetAssistanceStatus(), GetAssistanceStatusByUsers(), GetOffices(), GetJustifications(), GetJustificationsByUser(), GetJustificationStock(), UpdateJustificationStock(), GetJustificationRequests(), GetJustificationRequestsToManage(), GetJustificationRequestsByDate(), RequestJustification(), RequestJustificationRange(), UpdateJustificationRequestStatus(),GetSpecialJustifications(), RequestGeneralJustification(), GetGeneralJustificationRequests(), DeleteGeneralJustificationRequest(), RequestGeneralJustificationRange(),
GetOvertimeRequests(), GetOvertimeRequestsToManage(), RequestOvertime(), UpdateOvertimeRequestStatus(),
CreateAccountRequest(), ResendAccountRequest(), ConfirmAccountRequest(), ListAccountRequests(), ApproveAccountRequest(), RemoveAccountRequest(), RejectAccountRequest(),
NewRequest(), GetIssuesByUser(), DeleteIssue(), UpdateIssueData(),
]
"""
''' la transformo en un deferred para que sea procesada en otro thread '''
@deferred
def dispatch(protocol,message):
protocol._dispatch(message)
''' esto es necesario en funcion para usar .callFromThread '''
def sendMessage(protocol,message):
protocol.sendMessage(message,False)
"""
class ActionsServerProtocol(WebSocketServerProtocol):
session = inject.attr(Session)
def _encodeMessage(self, msg):
jmsg = json.dumps(msg, ensure_ascii=False, cls=DateTimeEncoder)
if (len(jmsg) < 1024):
logging.debug(jmsg)
ejmsg = jmsg.encode('utf-8')
return ejmsg
def _sendEncodedMessage(self, msg):
sm = super(WebSocketServerProtocol, self).sendMessage
loop = asyncio.get_event_loop()
loop.call_soon_threadsafe(sm, msg)
# super(WebSocketServerProtocol,self).sendMessage(msg,False)
# reactor.callFromThread(sendMessage, super(WebSocketServerProtocol, self), msg)
def sendException(self, e):
msg = {'type': 'Exception', 'name': e.__class__.__name__}
self.sendMessage(msg)
def sendError(self, msg, e):
mmsg = {'id': msg['id'], 'error': e.__class__.__name__}
self.sendMessage(mmsg)
def sendMessage(self, msg):
ejmsg = self._encodeMessage(msg)
self._sendEncodedMessage(ejmsg)
def broadcast(self, msg):
msg = self._encodeMessage(msg)
self.factory.broadcast(msg)
def _dispatch(self, message):
try:
managed = False
for action in actions:
managed = action.handleAction(self, message)
if managed:
break
except Exception as e:
logging.exception(e)
self.sendError(message, e)
@coroutine
def onMessage(self, payload, isBinary):
try:
if isBinary:
""" por ahora no manejo mensajes binarios """
return
msg = payload.decode('utf-8')
if len(msg) < 1024:
logging.debug('cliente -> server {}'.format(msg))
message = json.loads(msg)
if 'action' not in message:
raise MalformedMessage()
if 'id' not in message:
raise MalformedMessage()
if 'session' in message:
sid = message['session']
self.session.touch(sid)
loop = asyncio.get_event_loop()
yield from loop.run_in_executor(None, self._dispatch, message)
except Exception as e:
logging.exception(e)
self.sendException(e)
def onConnect(self, cr):
logging.debug('cliente conectado')
logging.debug(cr)
return None
def onOpen(self):
logging.debug('conexión establecida')
self.factory.register(self)
def onClose(self, wasClean, code, reason):
logging.debug('cliente desconectado {0}, {1}, {2}'.format(wasClean, code, reason))
def connectionLost(self, reason):
WebSocketServerProtocol.connectionLost(self, reason)
self.factory.unregister(self)
class BroadcastServerFactory(WebSocketServerFactory):
def __init__(self, debug=False, debugCodePaths=False):
super().__init__(debug=debug, debugCodePaths=debugCodePaths)
self.clients = []
def register(self, client):
if client not in self.clients:
logging.debug("registered client {}".format(client.peer))
self.clients.append(client)
def unregister(self, client):
if client in self.clients:
logging.debug("unregistered client {}".format(client.peer))
self.clients.remove(client)
def broadcast(self, msg):
logging.debug("broadcasting message '{}' ..".format(msg))
for c in self.clients:
c._sendEncodedMessage(msg)
logging.debug("message sent to {}".format(c.peer))
def getLoop():
config = inject.instance(Config)
factory = BroadcastServerFactory()
factory.protocol = ActionsServerProtocol
loop = asyncio.get_event_loop()
coro = loop.create_server(factory, config.configs['server_ip'], int(config.configs['server_port']))
server = loop.run_until_complete(coro)
return (loop, server, factory)
"""
def getPort():
config = inject.instance(Config)
log.startLogging(sys.stdout)
factory = BroadcastServerFactory()
factory.protocol = ActionsServerProtocol
factory.protocol = ActionsServerProtocol
port = reactor.listenTCP(int(config.configs['server_port']), factory=factory, interface=config.configs['server_ip'])
return (reactor,port,factory)
"""
|
from CallBackOperator import CallBackOperator
from DeltaCPClient import DeltaCPClient
from abc import abstractmethod
from SignalSendingPackage.SignalVisualizer import SignalVisualizer
from threading import Thread
from SignalSendingPackage.SignalTimer import SignalTimer
from SignalGenerationPackage.SignalData import SignalData
from LoggersConfig import loggers
from SignalSendingPackage.SendingLogger import SendingLogger
from time import sleep, time
import copy
class SignalSendingOperator(CallBackOperator):
def __init__(self, signal_main_window, plot_widget, DebugMode=True):
super().__init__(signal_main_window, model=None, value_range=None)
# Поскольку виджеты для отправки сигнала находятся на окошке
# для генерации сигнала (user_interface), названия
# виджетов могут отличаться (Название самих классов).
# Поэтому надо их переопределить
self.signal_main_window = signal_main_window
self.plot_widget = plot_widget
self.DebugMode = DebugMode
# Ниже - Набор параметров для обоих способов отправки сигнала -
# наперёд (Как Сергей сказал), и более наивный способ
self.TimeStamp = 0
self.ValueToSend = 0
self.Timer = SignalTimer(interval=1.0, function=self.TestTimer)
self.DeltaCPClient = DeltaCPClient()
self.SendingLogger = SendingLogger()
self.SetFreqs = []
self.FunctionWasCalled = False
self.SendingThreadWasLaunched = False
self.SignalVisualizerConstructed = False
self.SendingOnPause = False
self.SendingStopped = False
self.EndlessSendingEnabled = False
self.CycleSendingEnabled = True # Предустановлено на интерфейсе
self.CycleFinishedSuccessfully = False
self.CycleRestarted = False
self.IsFirstCycle = True
self.SendingThread = None
self.SignalVisualizer = None
self.PointsIterator = 0 # Just Counter to iterate over [x, y] arrays of SignalData
self.CycleGap = 0.01 # Сколько секунд ожидать перед отправкой следующего цикла? (При непрерывной отправке)
self.CommandExecutionTime = 0.0 # Часть времени уходит на исполнение команды (отправку частоты на
# частотник, обновление отрисовки). Надо подобрать этот параметр,
# и начинать исполнение команды на dt раньше, чтобы учесть задержку по времени
# на исполнение команды
self.tasks_queue = None
self.task_queue_thread = None # Параллельный поток, мониторящий очередь задач
self.wait_to_finish = False
self.task_queue_thread_started = False
self.lag_portion = 0 # Отправку каждой команды в следующем цикле делаем на lag_portion быстрее, компенсируя задержки по времени
self.start_sending_time = 0
self.cycle_counter = 0
self.point_arr = None # Массив точек для отправки. Изначально это копия SignalData.point_array_with_requests,
# далее он пересчитывается с каждым циклом
@abstractmethod
def ExecuteSending(self, Time):
pass
def get_log_filename_lineedit(self):
return self.signal_main_window.get_log_filename_lineedit()
def get_start_button(self):
return self.signal_main_window.get_start_button()
def get_pause_radio_button(self):
return self.signal_main_window.get_pause_radio_button()
def get_resume_radio_button(self):
return self.signal_main_window.get_resume_radio_button()
def get_stop_button(self):
return self.signal_main_window.get_stop_button()
def get_endless_send_radiobutton(self):
return self.signal_main_window.get_endless_send_radiobutton()
def get_cycles_number_widget(self):
return self.signal_main_window.get_cycles_number_widget()
def get_cycle_send_radiobutton(self):
return self.signal_main_window.get_cycle_send_radiobutton()
# overridden
def ConnectCallBack(self):
# Абстрактные методы, т.к. названия виджетов могут отличаться
# для разных окошек сигналов
StartButton = self.get_start_button()
PauseRadioButton = self.get_pause_radio_button()
ResumeRadioButton = self.get_resume_radio_button()
StopButton = self.get_stop_button()
EndlessSendCheckbox = self.get_endless_send_radiobutton()
# Надо сделать так, чтобы бесконечная отправка (EndlessSendCheckbox)
# И отправка циклов (CyclesNumberSpinBox) были взаимоисключающими
# Поэтому, коннектим взаимоисключающие отклики
EndlessSendCheckbox.toggled.connect(lambda: self.EnableSendingRegime()) # Какой режим - бесконечной отправки
# Или кол-во циклов
StartButton.clicked.connect(self.StartSendingSignal)
PauseRadioButton.toggled.connect(self.PauseSending)
ResumeRadioButton.toggled.connect(self.ResumeSending)
StopButton.clicked.connect(self.StopSendingSignal)
def EnableSendingRegime(self):
EndlessSendradioButton = self.get_endless_send_radiobutton()
CycleSendradioButton = self.get_cycle_send_radiobutton()
endless_selected = EndlessSendradioButton.isChecked()
cycle_send_selected = CycleSendradioButton.isChecked()
self.EndlessSendingEnabled = endless_selected
self.CycleSendingEnabled = cycle_send_selected
def PauseSending(self):
if self.window.PauseSendingradioButton.isChecked():
loggers['Application'].info('Sending Paused')
loggers['SignalSending'].info('Sending Paused')
self.SendingOnPause = True
self.window.ResumeSendingradioButton.setChecked(False)
else:
self.window.ResumeSendingradioButton.setChecked(True)
def ResumeSending(self):
if self.window.ResumeSendingradioButton.isChecked():
loggers['Application'].info('Sending Paused')
loggers['SignalSending'].info('Sending Paused')
self.SendingOnPause = False
self.window.PauseSendingradioButton.setChecked(False)
else:
self.window.PauseSendingradioButton.setChecked(True)
def StopSendingSignal(self):
try:
self.SendingStopped = True
if self.task_queue_thread is not None:
self.wait_to_finish = True
self.task_queue_thread.join()
self.wait_to_finish = False
self.task_queue_thread_started = False # дождались завершения параллельного потока с помощью join(). Тред убился. Поэтому
# выставляем флаг, что тред не стартовал, чтобы вновь его создать при нажатии "Start Sending"
if self.tasks_queue is not None:
with self.tasks_queue.mutex:
self.tasks_queue.queue.clear()
self.SetFreqs.clear()
self.DeltaCPClient.SetFrequency(0)
self.DeltaCPClient.SendStop()
self.IsFirstCycle = True
self.lag_portion = 0 # Обнуление задержек по времени
current_cycle_display = self.signal_main_window.get_LCD_display()
current_cycle_display.display(0) # Обновить дисплей с текущим циклом - обратно на ноль
loggers['Debug'].debug(f'Stopping sending thread')
if not (self.SendingThread is None):
self.SendingThread.join()
self.SendingThread = None
# Отрисуем на графике исходный сигнал
self.SignalVisualizer.ResetPlot()
# Сохраним файл лога
self.SaveLog()
except:
import sys
print(sys.exc_info())
def SaveLog(self):
log_lineedit = self.get_log_filename_lineedit()
log_filename = log_lineedit.text()
self.SendingLogger.output_filename = log_filename + '.xlsx'
self.SendingLogger.save_database()
def TestTimer(self):
# Если self.ValueToSend - это None. Значит это "фиктивная точка" - то есть
# не надо выставлять её на частотник. Надо только опросить текущую частоту и вывести на график.
# Итого, опрашивать частоту надо в любом случае, поэтому вывел её за пределы if/else
if self.DebugMode:
CurrentFreq = 0
else:
CurrentFreq = self.DeltaCPClient.RequestCurrentFrequency()
if not self.SendingStopped:
self.SignalVisualizer.UpdateCurrentFrequency(self.TimeStamp, CurrentFreq)
if self.ValueToSend is None:
loggers['Debug'].debug(f'SignalSendingOperator: TestTimer: Request current freq')
loggers['SignalSending'].info(f'Current frequency = {CurrentFreq} Hz')
else:
loggers['Debug'].debug(f'TestTimer: ValueToSend = {self.ValueToSend}')
# Если окошко не закрыто - продолжаем визуализацию и отправку
value_to_send = int(self.ValueToSend * 100) # Привести к инту, иначе pymodbus выдаёт ошибку
self.DeltaCPClient.SetFrequency(value_to_send)
self.SignalVisualizer.UpdateSetFrequency(self.TimeStamp, self.ValueToSend)
self.FunctionWasCalled = True
def RestartSignalIterator(self):
self.PointsIterator = 0
def RestartVisualization(self, TimeArray):
self.SignalVisualizer.Restart(TimeArray)
def LaunchSendingThread(self):
self.SendingThread = Thread(target=self.ThreadFunc)
self.SendingThread.start()
def StartSendingSignal(self):
current_cycle_display = self.signal_main_window.get_LCD_display()
current_cycle_display.display(1) # Сбросить значение на дисплее текущего цикла
if self.SendingThread is None:
self.SendingStopped = False # Надо почистить флаг - иначе неверно работает при последовательности:
# Закрыть визуализацию - Нажать Stop - Нажать Start
loggers['Debug'].debug(f'Launching thread, thread is None')
if not self.SignalVisualizerConstructed:
self.SignalVisualizer = SignalVisualizer(self.plot_widget)
self.DeltaCPClient.SendStart()
self.LaunchSendingThread()
else:
if not self.SendingThread.is_alive():
loggers['Debug'].debug(f'Launching thread, thread is not alive')
self.SignalVisualizer.Restart(TimeArray=[])
self.RestartSignalIterator()
self.SendingStopped = False # Надо почистить этот флаг
self.LaunchSendingThread()
else:
loggers['Debug'].debug(f'Prev sending thread is executing, cant launch one')
def ThreadFunc(self):
self.Timer = SignalTimer(interval=0.1, function=self.TestTimer)
self.point_arr = copy.deepcopy(SignalData.point_array_with_requests)
updated_x = SignalData.x.copy()
self.SignalVisualizer.RefreshData(SignalData.x, SignalData.y)
self.ExecuteSending(self.point_arr)
self.cycle_counter = 0
cycle_number_widget = self.signal_main_window.get_cycles_number_widget()
current_cycle_display = self.signal_main_window.get_LCD_display()
while True:
if self.SendingStopped == True:
self.SendingStopped = False # Reset the flag
current_cycle_display.display(0)
return
if self.CycleFinishedSuccessfully:
self.CycleFinishedSuccessfully = False
self.cycle_counter += 1
if self.EndlessSendingEnabled:
current_cycle_display.display(self.cycle_counter + 1)
self.RestartSending(updated_x)
if self.CycleSendingEnabled:
cycles_to_perform = cycle_number_widget.value()
if self.cycle_counter >= cycles_to_perform:
return
else:
current_cycle_display.display(self.cycle_counter + 1)
self.RestartSending(updated_x)
@abstractmethod
def get_signal_length(self):
pass
def RestartSending(self, updated_x):
upd_val = SignalData.x[-1]
self.update_time_stamps(upd_val)
updated_x = self.update_array(updated_x, upd_val)
# restarting points Iterator, Visualisation and Sending Thread
self.PointsIterator = 0
self.SignalVisualizer.Restart(updated_x) # SignalVisualizer отрисовывает X, Y, без реквестов
self.CycleRestarted = True
dt_diff = (time() - self.start_sending_time) - ((self.cycle_counter) * self.get_signal_length())
self.SendingLogger.log_cycle_dt_delay(dt_diff)
if dt_diff > 0:
self.lag_portion = dt_diff / (len(SignalData.point_array_with_requests) - 2)
loggers['Debug'].debug(f'lag portion = {self.lag_portion}')
self.ExecuteSending(self.point_arr)
def update_time_stamps(self, upd_val):
for p in self.point_arr:
p.x += upd_val
@staticmethod
def update_array(arr, upd_val):
for i in range(len(arr)):
arr[i] += upd_val
return arr
def Restart(self, Time):
self.CycleFinishedSuccessfully = False
upd_val = SignalData.x[-1]
for i in range(len(Time)):
Time[i] += upd_val + SignalData.dx[i]
self.RestartSignalIterator()
self.RestartVisualization(Time)
self.ExecuteSending(Time)
def PresetFrequency(self, value, x_coord):
# Перед запуском, если частота ненулевая - убедиться, предварительно задать требуемую начальную частоту
# Привести к инту, иначе pymodbus выдаёт ошибку
value_to_send = int(value * 100)
self.DeltaCPClient.SetFrequency(value_to_send)
if not self.DebugMode:
self.RequestFreqUntilEqual(value)
self.SignalVisualizer.UpdateSetFrequency(x_coord, value)
def RequestFreqUntilEqual(self, value):
accuracy = 0.05
dt_to_wait = 1.7 # Время, которое подождём, прежде чем опять запросить частоту, чтобы сравнить
# с предустановленной
requests_limit = 4 # Может быть такое, что задание частоты (SetFrequency) не пройдёт с первого раза
# Тогда, спустя retries попыток опросить частоту, будем задавать её повторно
requests_number = 0
prev_freq = None
value_to_send = int(value * 100)
while True:
sleep(dt_to_wait)
current_freq = self.DeltaCPClient.RequestCurrentFrequency(DebugMode=self.DebugMode)
# loggers['Debug'].debug(f'RequestFreqUntilEqual: F_current = {current_freq} Hz')
if not (current_freq is None) and abs(current_freq - value) <= accuracy:
return # Значит, с точностью до accuracy уже достигли частоты
requests_number += 1
if requests_number == requests_limit:
requests_number = 0
# На случай, если предзадана большАя частота (например, 30 Гц), и разгон до неё
# маленький - может быть ситуация, при которой команда задания частоты прошла успешно,
# просто частотный преобразователь ещё не успел разогнаться. Тогда, задавать частоту
# повторно имеет смысл только при current_freq == prev_freq
if (prev_freq is not None) and (current_freq is not None):
loggers['Debug'].debug(f'Waiting limit reached: F_prev = {prev_freq}, F_current = {current_freq}')
if abs(prev_freq - current_freq) <= accuracy: # т.е. если предыдущая частота совпадает с текущей, то задаём ещё раз
loggers['Debug'].debug(f'RequestFreqUntilEqual: Retrying to set frequency')
self.DeltaCPClient.SetFrequency(value_to_send)
prev_freq = current_freq
# loggers['Debug'].debug(f'RequestFreqUntilEqual: F_prev = {prev_freq} Hz') |
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.action_chains import ActionChains
from random import randint
import os, time, sys
from main.page.base import BasePage
import subprocess
class peoplePage(BasePage):
# locator variable tab
_tab_personal_profile_loc = (By.XPATH, "//*[@id='content-container']/div[5]/div/div[2]/ul/li[1]/a")
_tab_address_list_loc = (By.XPATH, "//*[@id='content-container']/div[5]/div/div[2]/ul/li[2]/a")
_tab_bank_accounts_loc = (By.XPATH, "//*[@id='content-container']/div[5]/div/div[2]/ul/li[3]/a")
_tab_notification_loc = (By.XPATH, "//*[@id='content-container']/div[5]/div/div[2]/ul/li[4]/a")
_tab_privacy_settings_loc = (By.XPATH, "//*[@id='content-container']/div[5]/div/div[2]/ul/li[5]/a")
#--
#locator tab 1 personal profile
_name_loc = (By.ID, "full-name")
_birthday_date_dd_loc = (By.XPATH, "//select[@name='bday_dd']/option")
_birthday_date_mm_loc = (By.XPATH, "//select[@name='bday_mm']/option")
_birthday_date_yy_loc = (By.XPATH, "//select[@name='bday_yy']/option")
_gender_male_loc = (By.ID, "gender-male")
_gender_female_loc = (By.ID, "gender-female")
_hobbies_loc = (By.ID, "hobbies")
_messenger_loc = (By.ID, "messenger")
_password_loc = (By.XPATH, "//*[@id='form-edit-profile']/div[8]/div[2]/div/input")
_submit_personal_profile_loc = (By.XPATH, '//*[@id="form-edit-profile"]/div[9]/button')
#--
#locator tab 1 edit photo
_upload_image_loc = (By.ID, 'pickfiles')
#--
# locator tab 1 edit password
_edit_password_loc = (By.XPATH, '//*[@id="img-profile"]/div[2]/button')
_old_password_loc = (By.ID, "oldpassword")
_new_password_loc = (By.ID, "newpassword")
_confirmation_password_loc = (By.ID, "confpassword")
_save_password_loc = (By.XPATH, '//*[@id="edit-contact"]/div[4]/button[2]')
#--
#locator tab 2 address list
_add_new_address_loc = (By.XPATH, "//*[@id='content-container']/div[5]/div/div[2]/div/div[1]/div/div[2]/small/a")
#--
#instance tab 2 add new address
_address_as_loc = (By.ID, "addr_name")
_receiver_name_loc = (By.ID, "receiver_name")
_address_loc = (By.ID, "alamat")
_postal_code_loc = (By.ID, "postal_code")
_province_loc = (By.XPATH, "//select[@id='provinsi']/option")
_regency_loc = (By.XPATH, "//select[@id='kota']/option")
_district_loc = (By.XPATH, "//select[@id='kec']/option")
_phone_address_loc = (By.ID, "kontak")
_password_address_loc = (By.ID, "usr_pwd")
_submit_new_address_loc = (By.XPATH, '//*[@id="add-address"]/div[9]/button[2]')
#--
#instance tab 2 edit address
_link_edit_loc = (By.CSS_SELECTOR, "a.edit-address")
_submit_edit_address_loc = (By.XPATH, '//*[@id="edit-address"]/div[10]/button[2]')
_link_delete_loc = (By.CSS_SELECTOR, "a.delete-address")
_submit_delete_address_loc = (By.XPATH, '//*[@id="delete-address"]/div[2]/button[2]')
_link_set_default_loc = (By.CSS_SELECTOR, "a.set-default")
_submit_set_default_address_loc = (By.XPATH, '//*[@id="set-default-address"]/div[2]/button[2]')
#--
#instance tab 2 sorting address
_search_address_loc = (By.ID, 'siteSearchBox')
_button_search_loc = (By.XPATH, '//*[@id="siteSearchSubmit"]')
_sorting_address_loc = (By.ID, 'address-order-by')
#--
#instance tab 3 bank accounts list
_add_bank_account_loc = (By.XPATH, '//*[@id="content-container"]/div[5]/div/div[2]/div/div[1]/small/a')
#--
#instance tab 3 bank accoounts add new bank accounts
_account_name_loc = (By.ID, "acc_name")
_account_no_loc = (By.ID, "acc_no")
_choose_bank_loc = (By.ID, "choose-bank")
_branch_name_loc = (By.ID, "nama-cabang")
_send_otp_loc = (By.ID, "sent-verification-code")
_password_bank_loc = (By.ID, "usr_pwd")
_submit_new_bank_account_loc = (By.XPATH, "//*[@id='add-bank-acc']/div[7]/button[2]")
_input_bank_loc = (By.ID, "input-bank")
_button_search_bank_loc = (By.XPATH, "//*[@id='add-bank']/div[1]/button")
_radio_choose_bank_loc = (By.ID, "nama-bank-sel1")
#--
#instance tab 3 edit bank accounts
_link_edit_bank_loc = (By.CSS_SELECTOR, "a.edit-bank-acc")
_link_edit_bank_list_loc = (By.ID, "edit-bank")
_link_delete_bank_loc = (By.CSS_SELECTOR, "a.delete-bank-acc")
_submit_delete_bank_loc = (By.XPATH, '//*[@id="delete-address"]/div[2]/button[2]')
_link_set_default_bank_loc = (By.CSS_SELECTOR, "a.set-default-acc")
_submit_set_default_bank_loc = (By.XPATH, '//*[@id="set-default-bank-acc"]/div[2]/button[2]')
_submit_edit_bank_account_loc = (By.XPATH, '//*[@id="edit-bank-acc"]/div[7]/button[2]')
#--
#instance tab 4 notification
_notice_newsletter_loc = (By.ID, 'f-notice-news-letter')
_notice_review_loc = (By.ID, 'f-notice-review')
_notice_talk_loc = (By.ID, 'f-notice-talk-product')
_notice_pm_loc = (By.ID, 'f-notice-pm')
_notice_pm_admin_loc = (By.ID, 'f-notice-pm-from-admin')
_button_save_notification_loc = (By.XPATH, '//*[@id="frm_notification"]/button')
#--
#instance tab 5 privacy settings
_setting_birthdate_loc = (By.XPATH, "//select[@id='setting_flag_birthdate']/option")
_setting_email_loc = (By.XPATH, "//select[@id='setting_flag_email']/option")
_button_save_privacy_settings_loc = (By.XPATH, '//*[@id="frm_privacy"]/button')
#--
# current page
_pl = 'people/' + '2395339'
#--
def open(self, site=""):
self._open(site, self._pl)
def go_to_edit_people_page(self):
self.driver.find_element(By.XPATH,
"//*[@id='content-container']/div[5]/div[1]/div/div/div/div[2]/div/div/a").click()
def go_to_personal_profile_tab(self):
self.driver.find_element(*self._tab_personal_profile_loc).click()
def go_to_address_list_tab(self):
self.driver.find_element(*self._tab_address_list_loc).click()
def go_to_bank_accounts_tab(self):
self.driver.find_element(*self._tab_bank_accounts_loc).click()
def go_to_notification_tab(self):
self.driver.find_element(*self._tab_notification_loc).click()
def go_to_privacy_settings_tab(self):
self.driver.find_element(*self._tab_privacy_settings_loc).click()
def edit_personal_profile(self):
try:
self.driver.find_element(*self._name_loc).click()
self.choose_date_of_birth()
self.driver.find_element(*self._gender_male_loc).click()
self.driver.find_element(*self._hobbies_loc).clear()
self.driver.find_element(*self._hobbies_loc).send_keys("Sepakbola")
self.driver.find_element(*self._messenger_loc).clear()
self.driver.find_element(*self._messenger_loc).send_keys("muhajirin.imam")
self.driver.find_element(*self._password_loc).clear()
self.driver.find_element(*self._password_loc).send_keys("imtokopedia91")
self.driver.find_element(*self._submit_personal_profile_loc).click()
except Exception as inst:
print(inst)
def choose_date_of_birth(self):
try:
time.sleep(1)
self.driver.execute_script("document.getElementsByName('bday_dd')[0].style.display='block'")
self.driver.execute_script(
"document.getElementsByClassName('span2 selectBox-dropdown')[0].style.display='none'")
list_bday_dd = self.driver.find_elements(*self._birthday_date_dd_loc)
i = randint(1, len(list_bday_dd))
list_bday_dd[i].click()
time.sleep(1)
self.driver.execute_script("document.getElementsByName('bday_mm')[0].style.display='block'")
self.driver.execute_script(
"document.getElementsByClassName('span4 selectBox-dropdown')[0].style.display='none'")
list_bday_mm = self.driver.find_elements(*self._birthday_date_mm_loc)
i = randint(1, len(list_bday_mm))
list_bday_mm[i].click()
time.sleep(1)
self.driver.execute_script("document.getElementsByName('bday_yy')[0].style.display='block'")
self.driver.execute_script(
"document.getElementsByClassName('span3 selectBox-dropdown')[0].style.display='none'")
list_bday_yy = self.driver.find_elements(*self._birthday_date_yy_loc)
i = randint(1, len(list_bday_yy))
list_bday_yy[i].click()
except Exception as inst:
print(inst)
def edit_password(self):
try:
time.sleep(5)
self.driver.find_element(*self._edit_password_loc).click()
time.sleep(5)
self.driver.find_element(*self._old_password_loc).send_keys("imtokopedia91")
self.driver.find_element(*self._new_password_loc).send_keys("imtokopedia91")
self.driver.find_element(*self._confirmation_password_loc).send_keys("imtokopedia91")
self.driver.find_element(*self._save_password_loc).click()
time.sleep(2)
except Exception as inst:
print(inst)
def edit_photo(self):
try:
time.sleep(3)
self.driver.find_element(*self._upload_image_loc).click()
time.sleep(2)
subprocess.Popen(r"C:\autoit\upload-image.exe")
time.sleep(2)
except Exception as inst:
print(inst)
def add_new_address(self):
try:
time.sleep(2)
self.go_to_address_list_tab()
self.driver.find_element(*self._add_new_address_loc).click()
self.driver.find_element(*self._address_as_loc).send_keys("Alamat Rumah")
self.driver.find_element(*self._receiver_name_loc).send_keys("Imam Muhajirin")
self.driver.find_element(*self._address_loc).send_keys(
"Graha Handaya Unit R, S, T Jalan Raya Perjuangan No 12A, Kebon Jeruk, Jakarta Barat 11530")
self.driver.find_element(*self._postal_code_loc).send_keys("11530")
self.choose_province()
self.choose_regency()
self.choose_district()
self.driver.find_element(*self._phone_address_loc).send_keys("085640226509")
self.driver.find_element(*self._submit_new_address_loc).click()
time.sleep(3)
except Exception as inst:
print(inst)
def choose_province(self):
try:
time.sleep(1)
list_province = self.driver.find_elements(*self._province_loc)
i = randint(1, len(list_province))
list_province[i].click()
except Exception as inst:
print(inst)
def choose_regency(self):
try:
time.sleep(1)
list_regency = self.driver.find_elements(*self._regency_loc)
i = randint(1, len(list_regency))
list_regency[i].click()
except Exception as inst:
print(inst)
def choose_district(self):
try:
time.sleep(1)
list_district = self.driver.find_elements(*self._district_loc)
i = randint(1, len(list_district))
list_district[i].click()
except Exception as inst:
print(inst)
def edit_address(self):
try:
time.sleep(2)
self.go_to_address_list_tab()
self.driver.find_element(*self._link_edit_loc).click()
self.driver.find_element(*self._address_as_loc).clear()
self.driver.find_element(*self._address_as_loc).send_keys("Alamat Rumah")
self.driver.find_element(*self._receiver_name_loc).clear()
self.driver.find_element(*self._receiver_name_loc).send_keys("Imam Muhajirin")
self.driver.find_element(*self._address_loc).clear()
self.driver.find_element(*self._address_loc).send_keys(
"Graha Handaya Unit R, S, T Jalan Raya Perjuangan No 12A, Kebon Jeruk, Jakarta Barat 11530")
self.driver.find_element(*self._postal_code_loc).clear()
self.driver.find_element(*self._postal_code_loc).send_keys("11530")
self.choose_province()
self.choose_regency()
self.choose_district()
self.driver.find_element(*self._phone_address_loc).clear()
self.driver.find_element(*self._phone_address_loc).send_keys("085640226509")
self.driver.find_element(*self._password_address_loc).clear()
self.driver.find_element(*self._password_address_loc).send_keys("imtokopedia91")
self.driver.find_element(*self._submit_edit_address_loc).click()
time.sleep(3)
except Exception as inst:
print(inst)
def delete_address(self):
try:
time.sleep(2)
self.go_to_address_list_tab()
self.driver.find_element(*self._link_delete_loc).click()
self.driver.find_element(*self._submit_delete_address_loc).click()
except Exception as inst:
print(inst)
def set_default_address(self):
try:
time.sleep(2)
self.go_to_address_list_tab()
self.driver.find_element(*self._link_set_default_loc).click()
self.driver.find_element(*self._submit_set_default_address_loc).click()
except Exception as inst:
print(inst)
def action_address(self, flag, N):
try:
print("Action " + str(flag) + " " + str(N) + " kali.")
i = 0
while (i < N):
if (flag == "add"):
print("Tambah Address ke-" + str(i + 1))
self.add_new_address()
if (flag == "edit"):
print("Edit Address ke-" + str(i + 1))
self.edit_address()
if (flag == "delete"):
self.delete_address()
print("Hapus Address ke-" + str(i + 1))
if (flag == "default"):
self.set_default_address()
print("Set Default Address ke-" + str(i + 1))
self.driver.refresh()
i += 1
except Exception as inst:
print(inst)
def search_address(self):
try:
time.sleep(2)
self.go_to_address_list_tab()
self.driver.find_element(*self._search_address_loc).send_keys("SADIS")
self.driver.find_element(*self._button_search_loc).click()
time.sleep(5)
except Exception as inst:
print(inst)
def choose_sorting(self):
try:
time.sleep(2)
self.go_to_address_list_tab()
self.driver.execute_script("document.getElementById('address-order-by').style.display = '';")
list_sorting = self.driver.find_elements(By.XPATH, "//select[@id='address-order-by']/option")
i = randint(1, len(list_sorting))
list_sorting[i].click()
except Exception as inst:
print(inst)
def add_bank_account(self):
try:
time.sleep(2)
self.go_to_bank_accounts_tab()
self.driver.find_element(*self._add_bank_account_loc).click()
self.driver.find_element(*self._account_name_loc).send_keys("Imam Muhajirin")
self.driver.find_element(*self._account_no_loc).send_keys("00123456789")
self.driver.find_element(*self._choose_bank_loc).click()
self.driver.find_element(*self._input_bank_loc).send_keys("BCA")
self.driver.find_element(*self._button_search_bank_loc).click()
self.driver.find_element(*self._radio_choose_bank_loc).click()
self.driver.find_element(*self._branch_name_loc).send_keys("Kebon Jeruk")
self.driver.find_element(*self._send_otp_loc).click()
time.sleep(30)
self.driver.find_element(*self._password_bank_loc).send_keys("imtokopedia91")
self.driver.find_element(*self._submit_new_bank_account_loc).click()
except Exception as inst:
print(inst)
def edit_bank_account(self):
try:
time.sleep(2)
self.go_to_bank_accounts_tab()
self.driver.find_element(*self._link_edit_bank_loc).click()
self.driver.find_element(*self._account_name_loc).clear()
self.driver.find_element(*self._account_name_loc).send_keys("Imam Muhajirin")
self.driver.find_element(*self._account_no_loc).clear()
self.driver.find_element(*self._account_no_loc).send_keys("98765432100")
self.driver.find_element(*self._link_edit_bank_list_loc).click()
self.driver.find_element(*self._input_bank_loc).send_keys("BCA")
self.driver.find_element(*self._button_search_bank_loc).click()
self.driver.find_element(*self._radio_choose_bank_loc).click()
self.driver.find_element(*self._branch_name_loc).clear()
self.driver.find_element(*self._branch_name_loc).send_keys("Kedoya")
self.driver.find_element(*self._send_otp_loc).click()
time.sleep(30)
self.driver.find_element(*self._password_bank_loc).clear()
self.driver.find_element(*self._password_bank_loc).send_keys("imtokopedia91")
self.driver.find_element(*self._submit_edit_bank_account_loc).click()
except Exception as inst:
print(inst)
def delete_bank_account(self):
try:
time.sleep(2)
self.go_to_bank_accounts_tab()
self.driver.find_element(*self._link_delete_bank_loc).click()
self.driver.find_element(*self._submit_delete_bank_loc).click()
except Exception as inst:
print(inst)
def set_default_bank_account(self):
try:
time.sleep(2)
self.go_to_bank_accounts_tab()
self.driver.find_element(*self._link_set_default_bank_loc).click()
self.driver.find_element(*self._submit_set_default_bank_loc).click()
except Exception as inst:
print(inst)
def action_bank_account(self, flag, N):
try:
print("Action " + str(flag) + " " + str(N) + " kali.")
i = 0
while (i < N):
if (flag == "add"):
print("Tambah Bank Account ke-" + str(i + 1))
self.add_bank_account()
if (flag == "edit"):
print("Edit Bank Account ke-" + str(i + 1))
self.edit_bank_account()
if (flag == "delete"):
self.delete_bank_account()
print("Hapus Bank Account ke-" + str(i + 1))
if (flag == "default"):
self.set_default_bank_account()
print("Set Default Bank Account ke-" + str(i + 1))
self.driver.refresh()
i += 1
except Exception as inst:
print(inst)
def set_notification(self):
try:
time.sleep(2)
self.go_to_notification_tab()
i = 1
while (i < 2):
self.driver.find_element(*self._notice_newsletter_loc).click()
self.driver.find_element(*self._notice_review_loc).click()
self.driver.find_element(*self._notice_talk_loc).click()
self.driver.find_element(*self._notice_pm_loc).click()
self.driver.find_element(*self._notice_pm_admin_loc).click()
i += 1
self.driver.find_element(*self._button_save_notification_loc).click()
except Exception as inst:
print(inst)
def set_privacy_settings(self):
try:
time.sleep(2)
self.go_to_privacy_settings_tab()
self.set_birthdate_settings()
self.set_email_settings()
self.driver.find_element(*self._button_save_privacy_settings_loc).click()
except Exception as inst:
print(inst)
def set_email_settings(self):
try:
time.sleep(2)
self.driver.execute_script("document.getElementById('setting_flag_email').style.display = 'block';")
list_select = self.driver.find_elements(*self._setting_email_loc)
list_select[1].click()
except Exception as inst:
print(inst)
def get_people_ID(self):
url = self.driver.current_url
print (url)
ID = url.strip('https://www.tokopedia.com/people/')
return(ID)
|
from sklearn.datasets import fetch_california_housing
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from tensorflow import keras
housing = fetch_california_housing()
X_train_full, X_test, y_train_full, y_test = train_test_split(housing.data, housing.target)
X_train, X_valid, y_train, y_valid = train_test_split(X_train_full, y_train_full)
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_valid = scaler.transform(X_valid)
X_test = scaler.transform(X_test)
model = keras.models.Sequential([
keras.layers.Dense(10, activation="relu", input_shape=X_train.shape[1:]),
keras.layers.Dense(1)])
model.compile(loss="mean_squared_error", optimizer='sgd')
history = model.fit(X_train, y_train, epochs=20,
validation_data=(X_valid, y_valid))
mse_test = model.evaluate(X_test, y_test)
X_new = X_test[:3]
y_pred = model.predict(X_new)
import pandas as pd
import matplotlib.pyplot as plt
pd.DataFrame(history.history).plot(figsize=(8,5))
plt.grid(True)
plt.gca().set_ylim(0,1)
plt.show()
|
"""
Every page should contain following sections
1) all the locators
2) function to verify the current page is right one
3) function to make sure page is loaded
4) all the actions can be done in this page as functions
"""
from irobot.libraries.iSelenium import iSelenium
from robot.libraries.BuiltIn import BuiltIn
from terminal_commissioning_wizard_page import terminal_commissioning_wizard_page
class terminal_dashboard_page(object):
def __init__(self):
# all the locators will be defined under init
self.lnk_admin = "link:Administration"
self.lnk_commissioning = "link:Commissioning"
self.lnk_commissioning_wizard = "link:Commissioning Wizard"
def wait_for_page_to_load(self):
try:
iSelenium().wait_until_element_is_visible(self.lnk_commissioning, timeout=20)
return True
except:
return False
def goto_commissioning_wizard_page(self):
BuiltIn().sleep(10)
iSelenium().wait_until_element_is_enabled(self.lnk_commissioning)
iSelenium().click_link(self.lnk_commissioning)
iSelenium().wait_until_element_is_enabled(self.lnk_commissioning_wizard)
iSelenium().click_link(self.lnk_commissioning_wizard)
return self.wait_for_page_to_load() |
class Solution:
# @param num, a list of integer
# @return an integer
def findMin(self, num):
size = len(num)
l = 0
r = size - 1
min_value = num[0]
for i in xrange(1,size):
if min_value > num[i]:
return num[i]
return min_value
s = Solution()
print s.findMin([1,2,3])
|
# -*- coding:utf-8 -*-
from django.db import models
import datetime
import django.utils.timezone as timezone
# Create your models here.
class News(models.Model):
id = models.AutoField(primary_key=True)
title = models.CharField(max_length=30) # 标题
content = models.TextField(max_length=800) # 内容
times = models.IntegerField(default=0) # 浏览次数
author = models.CharField(max_length=20) # 发布者
updatetime =models.DateTimeField()
def __unicode__(self):
return self.title
def getday(self):
return datetime.datetime.strftime(self.updatetime,'%m-%d')
def getyear(self):
return datetime.datetime.strftime(self.updatetime,'%Y')
def getdate(self):
return datetime.datetime.strftime(self.updatetime,'%Y/%m/%d')
class Notice(models.Model):
id = models.AutoField(primary_key=True)
title = models.CharField(max_length=30) # 标题
content = models.TextField(max_length=800) # 内容
times = models.IntegerField(default=0) # 浏览次数
author = models.CharField(max_length=20) # 发布者
updatetime =models.DateTimeField()
def __unicode__(self):
return self.title
def getday(self):
return datetime.datetime.strftime(self.updatetime,'%m-%d')
def getyear(self):
return datetime.datetime.strftime(self.updatetime,'%Y')
def getdate(self):
return datetime.datetime.strftime(self.updatetime,'%Y/%m/%d')
class Recruit(models.Model):
id = models.AutoField(primary_key=True)
position = models.CharField(max_length=30) # 职位
description = models.TextField() # 描述
ask = models.TextField() # 要求
updatetime =models.DateTimeField()
def __unicode__(self):
return self.position
class Industry(models.Model):
id = models.AutoField(primary_key=True)
companyname = models.CharField(max_length=50,unique=True) # 公司名称
addr = models.CharField(max_length=50) # 企业所在地
companyInd = models.CharField(max_length=20) # 企业所在行业
nature = models.CharField(max_length=20) # 企业性质
product = models.CharField(max_length=20) # 企业主营产品
area = models.CharField(max_length=20) # 企业产品销售区域
users = models.IntegerField() # 企业员工人数
outputvalue = models.IntegerField() # 企业上一年产值
profit = models.IntegerField() # 企业上一年净利润
status = models.CharField(max_length=20) # 企业行业地位
license = models.FileField(upload_to='./upload') # 企业营业执照上传
bankfile = models.FileField(upload_to='./upload') # 银行账户开户批准上传
reportfile = models.FileField(upload_to='./upload') # 上年审计报表上传
updatetime = models.DateTimeField(default=timezone.now)
def __unicode__(self):
return self.companyname
class ProdIndustry(models.Model):
id = models.AutoField(primary_key=True)
companyid = models.ForeignKey(Industry) # Industry表外键 一对多
indust = models.CharField(max_length=20) # 行业
product = models.CharField(max_length=20) # 品名
mode = models.CharField(max_length=20) # 贸易方式
quality = models.CharField(max_length=50) # 质量指标
place = models.CharField(max_length=20) # 原产地
delivplace = models.CharField(max_length=20) # 交货地
unitsandpack = models.CharField(max_length=20) # 数量单位与包装
unit = models.CharField(max_length=20) # 计价单位
proportion = models.CharField(max_length=20) # 定金比例
inspection = models.FileField(upload_to='./upload') # 商品质检单上传
placeProve = models.FileField(upload_to='./upload') # 商品原地产证明上传
xczlicense = models.FileField(upload_to='./upload') # 商品生产许可证上传
otherfile = models.FileField(upload_to='./upload') # 商品其他证明文件上传
updatetime = models.DateTimeField()
class Company(models.Model):
id = models.AutoField(primary_key=True)
companyname = models.CharField(max_length=50,unique=True) # 企业名
companyaddr = models.CharField(max_length=50) # 地址
telphone = models.CharField(max_length=20) # 电话
fax = models.CharField(max_length=50) # 传真
mobilePhone = models.CharField(max_length=20) # 移动电话
updatetime = models.DateTimeField() #
def __unicode__(self):
return self.companyname
class Users(models.Model):
id = models.AutoField(primary_key=True)
companyid = models.ForeignKey(Company) # 企业id Company外键,一对多
username = models.CharField(max_length=30,unique=True) # 用户名
password = models.CharField(max_length=30) # 密码
telphone = models.CharField(max_length=20) # 电话
fax = models.CharField(max_length=50) # 传真
mobilePhone = models.CharField(max_length=20) # 移动电话
email = models.EmailField(max_length=50) # email
updatetime = models.DateTimeField()
def __unicode__(self):
return self.username
class Order(models.Model):
id = models.AutoField(primary_key=True)
username = models.CharField(max_length=30) # 挂单用户名
product = models.CharField(max_length=20) # 挂单品种
byorsell = models.IntegerField() # 0:buy 1:sell
price = models.DecimalField(max_digits=8,decimal_places=2) # 挂单价格
place = models.CharField(max_length=20) # 产地
delivplace = models.CharField(max_length=20) # 交货地
pay = models.CharField(max_length=20) # 付款方式
quality = models.CharField(max_length=50) # 质量标准
delivmethod = models.CharField(max_length=20) # 提货方式
putnumb = models.IntegerField() # 挂单数量
dealnumb = models.IntegerField(default=0) # 成交数量
deposit = models.IntegerField() # 定金(百分比),程序内需要乘以100
state = models.IntegerField() # 成交状态,0:未成交,1:部分成交,2:全部成交,3:失效
delvDate =models.DateField() # 交货期
livetime = models.DateTimeField() # 有效期
updatetime = models.DateTimeField()
def __unicode__(self):
return self.product
def getday(self):
return datetime.datetime.strftime(self.updatetime,'%Y-%m-%d')
def gettime(self):
return datetime.datetime.strftime(self.updatetime,'%H:%M')
def getbyorsell(self):
if self.byorsell == 0:
return 'purchase_img'
else:
return 'sale_img'
def getsurplus(self):
return self.putnumb - self.dealnumb
def getdeal(self):
if self.dealnumb == self.putnumb:
return 'all_bargin'
else:
return ''
class Trade(models.Model):
id = models.AutoField(primary_key=True)
orderid = models.ForeignKey(Order,related_name='orderid') # 挂单用户id Order 外键,一对多
tradeid = models.ForeignKey(Order,related_name='tradeid') # 挂单用户id Order 外键,一对多
product = models.CharField(max_length=20) # 挂单品种
dealnum = models.IntegerField() # 成交数量
updatetime = models.DateTimeField()
def __unicode__(self):
return self.product
class Product1(models.Model):
''' 产品品种一级大类'''
id = models.AutoField(primary_key=True)
prokey = models.CharField(max_length=20) # 挂单品种键
productname = models.CharField(max_length=20) # 挂单品种
updatetime = models.DateTimeField()
def __unicode__(self):
return self.productname
class Product2(models.Model):
''' 产品品种二级大类'''
id = models.AutoField(primary_key=True)
orderid = models.ForeignKey(Product1) # 一级大类id Product1 外键,一对多
prokey = models.CharField(max_length=20) # 挂单品种键
productname = models.CharField(max_length=20) # 挂单品种
updatetime = models.DateTimeField()
def __unicode__(self):
return self.productname
class Modes(models.Model):
id = models.AutoField(primary_key=True)
modekey = models.CharField(max_length=20) # 贸易键
mode = models.CharField(max_length=20) # 贸易方式
updatetime = models.DateTimeField()
def __unicode__(self):
return self.mode
class Places(models.Model):
id = models.AutoField(primary_key=True)
plackey = models.CharField(max_length=20) # 产地键
place = models.CharField(max_length=20) # 原产地
updatetime = models.DateTimeField()
def __unicode__(self):
return self.place
class Delivery(models.Model):
id = models.AutoField(primary_key=True)
delivkey = models.CharField(max_length=20) # 交货地键
delivplace = models.CharField(max_length=20) # 交货地
updatetime = models.DateTimeField()
def __unicode__(self):
return self.delivplace
class Delivme(models.Model):
id = models.AutoField(primary_key=True)
delivmvkey = models.CharField(max_length=20) # 提货方式键
delivmv = models.CharField(max_length=20) # 提货方式
updatetime = models.DateTimeField()
def __unicode__(self):
return self.delivmv
class Pay(models.Model):
id = models.AutoField(primary_key=True)
paykey = models.CharField(max_length=20) # 付款方式键
pay = models.CharField(max_length=20) # 付款方式
updatetime = models.DateTimeField()
def __unicode__(self):
return self.pay
class Quality(models.Model):
id = models.AutoField(primary_key=True)
qualkey = models.CharField(max_length=20) # 质量标准键
qual = models.CharField(max_length=20) # 质量标准
updatetime = models.DateTimeField()
def __unicode__(self):
return self.qual
class Deposit(models.Model):
id = models.AutoField(primary_key=True)
deposkey = models.CharField(max_length=20) # 定金键
deposvalue = models.IntegerField() # 定金键
depos = models.CharField(max_length=20) # 质量标准
updatetime = models.DateTimeField()
def __unicode__(self):
return self.depos
|
import viz
headTrackingActive = False
# Key commands
KEYS = { 'forward' : viz.KEY_UP
,'back' : viz.KEY_DOWN
,'left' : viz.KEY_LEFT
,'right' : viz.KEY_RIGHT
,'reset' : 'r'
,'camera' : 'c'
,'help' : ' '
}
initRHpos = [1.0615520477294922, 0.74419105052948, -1.2517306804656982]
rhLeapOffset = [-0.154, 0.121871, -0.0263254]
initLHpos = [0.5220695734024048, 0.74419105052948, -1.2517306804656982]
lhLeapOffset = [0.5051565766334534, 0.6588385105133057, -1.2056124210357666]
#initUserPos = [0.7755154967308044, -0.5069681406021118, -1.5151630640029907]
initUserPos = [-0.8098157048225403, -0.08894657343626022, 1.2626420259475708]
HMD_TYPE = {'RIFT' : 0, 'VIVE' : 1}
hmdUsed = None
PPT1_roomModelPath = 'D:/github repos/smellStuff/resources/labroom.osgb'
lhEmptyModelPath = 'D:/github repos/smellStuff/resources/leftHandOpen.osgb'
lhDonutModelPath = 'D:/github repos/smellStuff/resources/LeftHandwDonut.osgb'
rhEmptyModelPath = 'D:/github repos/smellStuff/resources/rightHandOpen.osgb'
rhDonutModelPath = 'D:/github repos/smellStuff/resources/RightHandwDonut.osgb'
PPT1Scale = 0.7
PPT1pos = [0.0, 0, 0.0]
#PPT1pos = [-0.4,0,-1.4]
vrpn = viz.add('vrpn7.dle')
PPT_HOSTNAME = '171.64.33.43'
headVirtualID = 1
donutVirtualID = 2
rightHandVirtualID = 3
leftHandVirtualID = 4
wristToHandPositionOffset = [-0.019646525382995605, 0.12299728393554688, 0.0752265453338623]
#wristToHandPositionOffset = [0.0, 0.0, 0.0]
isensePortForLeftHand = 5
isensePortForRightHand = 6
#donutScale = 1/20.0
donutScale = 0.33
handScale = .4
#data collection constants
DATA_COLLECTION_FOLDER_FOR_SMELL = 'smellData/'
SMELL_TOUCH_CONDITION_ID = 'YSYT'
NO_SMELL_TOUCH_CONDITION_ID = 'NSYT'
SMELL_NO_TOUCH_CONDITION_ID = 'YSNT'
NO_SMELL_NO_TOUCH_CONDITION_ID = 'NSNT'
ID_CHARACTER_LENGTH = 4
ID_FILENAME = 'smellData.csv'
COUNTER_FIELD_NAMES = ['ID', 'totalTime', 'totalDistanceFromHandToDonut']
NUMBER_OF_DIGITS_FOR_ID = 4
|
#!/usr/bin/env python3
#
# dump a given file's tagging as json
import re
import sys
import json
import struct
import binascii
from helpers import dissect_file, intervals_from_text, interval_tree_to_hierarchy, FinterNode, finter_type_to_struct_fmt
# we'll augment the default node type with the ability to produce a python
# data structure serializable to json
class JsonNode(FinterNode):
def __init__(self, begin, end, type_, comment):
super().__init__(begin, end, type_, comment)
# generate a name based on our comment, eg:
# comment name
# ------- ----
# "public_key_size=0x408" "public_key_size"
# "tag=0x4 AVB_DESCRIPTOR_TAG_CHAIN_PARTITION" "tag"
# "data[1128]" "data"
self.name = re.split('[^a-zA-Z0-9_]', self.comment)[0]
# needed
self.fp = None
def set_fp(self, fp):
self.fp = fp
for child in self.children:
child.set_fp(fp)
def data_structify(self):
# if we have child children, do not emit our "value" (the bytes we tag)
# instead, return a dict with named children
if self.children:
# get the requested names of each child
names = [ch.name for ch in self.children]
# resolve conflicts by appending a numeric distinguisher
suffix = {}
for ch in self.children:
name = ch.name
if names.count(name) > 1:
if name in suffix:
ch.name = f'{ch.name}{suffix[ch.name]}'
suffix[name] += 1
else:
ch.name = f'{ch.name}0'
suffix[name] = 1
# make the data structure recursively
return { ch.name : ch.data_structify() for ch in self.children }
# we have no children: emit our tagged bytes in serialized form
else:
self.fp.seek(self.begin)
data = self.fp.read(self.end - self.begin)
match self.type_:
case 'raw': return binascii.hexlify(data).decode('utf-8')
case 'none': return 'null'
case _:
fmt = finter_type_to_struct_fmt(self.type_)
return struct.unpack(fmt, data)[0]
if __name__ == '__main__':
if len(sys.argv) < 2:
print('ERROR: missing file parameter')
print('usage: %s <file>' % sys.argv[0])
sys.exit(-1)
fpath = sys.argv[1]
interval_tree = dissect_file(fpath)
root = interval_tree_to_hierarchy(interval_tree, JsonNode)
sorted_children = sorted(root.children, key=lambda x: x.begin)
# debug?
if 0:
graph(root)
sys.exit(-1)
with open(sys.argv[1], 'rb') as fp:
root.set_fp(fp)
ds = root.data_structify()
print(json.dumps(ds, indent=4))
|
from flask import request
from chineseviewer.tts import Tts
from chineseviewer import app
@app.route('/post/speak', methods=['POST'])
def create_speak():
item = request.form.get('item')
return Tts(item, lang='zh-cn').to_base64()
|
from django.shortcuts import render, get_object_or_404, redirect
from django.http import HttpResponse
from cms.models import Article
from cms.forms import ArticleForm, UrlForm
from classifier.classifier import Classifier # @UnresolvedImport
def article_list(request):
"""記事の一覧"""
articles = Article.objects.all().order_by('id')
return render(request,
'cms/article_list.html', # 使用するテンプレート
{'articles': articles}) # テンプレートに渡すデータ
def article_edit(request, article_id=None):
"""記事の編集"""
if article_id: # article_id が指定されている (修正時)
article = get_object_or_404(Article, pk=article_id)
if request.method == 'POST':
form = ArticleForm(request.POST, instance=article)
if form.is_valid(): # フォームのバリデーション
article = form.save(commit=False)
article.save()
return redirect('cms:article_list')
else: # GET の時
form = ArticleForm(instance=article)
else: # article_id が指定されていない (追加時)
article = Article()
if request.method == 'POST':
form = UrlForm(request.POST)
if form.is_valid(): # フォームのバリデーション
url = form.cleaned_data['article_url']
title, url, category = Classifier().classify(
url) # データベースの追加をしてから値も返す。
Classifier().save_article(title, url, category)
return render(request, 'cms/article_new.html',
dict(title=title, url=url, category=category))
# GET の時←「追加」ボタンを押して最初に行く画面のことか?であれば、UrlFormを生成してtemplateのhtmlに渡す。
else:
form = UrlForm()
return render(request, 'cms/article_edit.html',
dict(form=form, article_id=article_id))
def article_del(request, article_id):
"""記事の削除"""
article = get_object_or_404(Article, pk=article_id)
article.delete()
return redirect('cms:article_list')
|
"""Assignment 2: Modelling CS Education research paper data
=== CSC148 Winter 2019 ===
This code is provided solely for the personal and private use of
students taking the CSC148 course at the University of Toronto.
Copying for purposes other than this use is expressly prohibited.
All forms of distribution of this code, whether as given or with
any changes, are expressly prohibited.
All of the files in this directory and all subdirectories are:
Copyright (c) 2019 Bogdan Simion, David Liu, Diane Horton, Jacqueline Smith
=== Module Description ===
This module contains a new class, PaperTree, which is used to model data on
publications in a particular area of Computer Science Education research.
This data is adapted from a dataset presented at SIGCSE 2019.
You can find the full dataset here: https://www.brettbecker.com/sigcse2019/
Although this data is very different from filesystem data, it is still
hierarchical. This means we are able to model it using a TMTree subclass,
and we can then run it through our treemap visualisation tool to get a nice
interactive graphical representation of this data.
"""
import csv
from typing import List, Dict
from tm_trees import TMTree
# Filename for the dataset
DATA_FILE = 'cs1_papers.csv'
class PaperTree(TMTree):
"""A tree representation of Computer Science Education research paper data.
=== Private Attributes ===
_authors:
The author(s) of the paper represented by this tree.
_doi:
The digital object identifier (DOI) used to identify the specific
paper represented by this tree.
=== Inherited Attributes ===
rect:
The pygame rectangle representing this node in the treemap
visualization.
data_size:
The size of the data represented by this tree.
_colour:
The RGB colour value of the root of this tree.
_name:
The root value of this tree, or None if this tree is empty.
_subtrees:
The subtrees of this tree.
_parent_tree:
The parent tree of this tree; i.e., the tree that contains this tree
as a subtree, or None if this tree is not part of a larger tree.
_expanded:
Whether or not this tree is considered expanded for visualization.
=== Representation Invariants ===
- All TMTree RIs are inherited.
"""
_authors: str
_doi: str
_by_year: bool
_all_paper: bool
def __init__(self, name: str, subtrees: List[TMTree], authors: str = '',
doi: str = '', citations: int = 0, by_year: bool = True,
all_papers: bool = False) -> None:
"""Initialize a new PaperTree with the given <name> and <subtrees>,
<authors> and <doi>, and with <citations> as the size of the data.
If <all_papers> is True, then this tree is to be the root of the paper
tree. In that case, load data about papers from DATA_FILE to build the
tree.
If <all_papers> is False, Do NOT load new data.
<by_year> indicates whether or not the first level of subtrees should be
the years, followed by each category, subcategory, and so on. If
<by_year> is False, then the year in the dataset is simply ignored.
"""
if all_papers is True:
year = by_year
sample_dict = _load_papers_to_dict(DATA_FILE, by_year)
subtrees_2 = _build_tree_from_dict(sample_dict, year)
TMTree.__init__(self, name, subtrees_2, citations)
self._authors = authors
self._doi = doi
self._by_year = by_year
self._all_paper = all_papers
else:
TMTree.__init__(self, name, subtrees, citations)
self._authors = authors
self._doi = doi
self._by_year = by_year
self._all_paper = all_papers
def get_separator(self) -> str:
"""Return the string used to separate names in the string
representation of a path from the tree root to this tree.
"""
return '\\'
def get_suffix(self) -> str:
"""Return the string used at the end of the string representation of
a path from the tree root to this tree.
"""
if len(self._subtrees) == 0:
return ' (paper)'
else:
return ' (category)'
def _load_papers_to_dict(dataset: csv, by_year: bool = True) -> Dict:
"""Return a nested dictionary of the data read from the papers dataset file.
If <by_year>, then use years as the roots of the subtrees of the root of
the whole tree. Otherwise, ignore years and use categories only.
"""
data = {}
with open(dataset, newline='') as csvfile:
data_reader = csv.reader(csvfile, delimiter=',')
next(data_reader)
for line in data_reader:
for item in line:
item.strip(' ')
year = line[2]
if by_year is True:
if year not in data.keys():
data[year] = {}
categories = line[3].split(': ')
check = _subcategories(data, year, categories, by_year)
current_category = _add_category(data, year, categories, check,
by_year)
current_category['papers'].append({'authors': line[0],
'name': line[1], 'doi': line[4],
'citations': int(line[5])})
csvfile.close()
return data
def _subcategories(overall_data: Dict, yr: str, lst: List, by_year: bool) -> \
int:
"""Return the depth up until categories do not exist in nested dictionary
<overall_data>.
"""
if by_year is True:
dic = overall_data[yr]
else:
dic = overall_data
pos = 0
while pos < len(lst):
sub = lst[pos]
if sub not in dic.keys():
return pos
dic = dic[sub]
pos += 1
return pos
def _add_category(overall_data: Dict, yr: str, lst: List, index: int,
by_year: bool) -> Dict:
"""Add non-existing categories as nested dictionaries into an existing
category that is an dictionary and return dictionary of deepest depth.
"""
curr_d = 0
if by_year is True:
curr = overall_data[yr]
else:
curr = overall_data
while curr_d != index:
curr = curr[lst[curr_d]]
curr_d += 1
while curr_d != len(lst):
curr[lst[curr_d]] = {'papers': []}
curr = curr[lst[curr_d]]
curr_d += 1
return curr
def _build_tree_from_dict(nested_dict: Dict, year: bool) -> List[PaperTree]:
"""Return a list of trees from the nested dictionary <nested_dict>.
"""
tree = []
for i in nested_dict.keys():
if isinstance(nested_dict[i], list) and len(nested_dict[i]) != 0:
for j in nested_dict[i]:
tree.append(PaperTree(j['name'], [], j['authors'],
j['doi'], j['citations'], year))
elif isinstance(nested_dict[i], list) and len(nested_dict[i]) == 0:
pass
elif isinstance(nested_dict[i], dict):
subtree = []
subtree.extend(_build_tree_from_dict(nested_dict[i], year))
tree.extend([PaperTree(i, subtree)])
return tree
if __name__ == '__main__':
import python_ta
python_ta.check_all(config={
'allowed-import-modules': ['python_ta', 'typing', 'csv', 'tm_trees'],
'allowed-io': ['_load_papers_to_dict'],
'max-args': 8
})
|
import torch.utils.data as data
from PIL import Image
import torchvision.transforms as transforms
import glob
import numpy as np
import torch
def open_image(path):
img = Image.open(path).convert('RGB')
# Center crop
width = img.size[0]
height = img.size[1]
return img.crop(
(
(width - 224) / 2,
(height - 224) / 2,
(width + 224) / 2,
(height + 224) / 2
)
)
def get_dataframe_row_by_id(df, id, test_mode):
if test_mode:
# TODO can we do better than this?
return 0
# Return a DataFrame row by its id in numpy format
row = df.loc[id].as_matrix().astype(np.float32)
return torch.FloatTensor(row)
def get_image_id(file_path):
# Remove everything before the last /
filename = file_path.rsplit('/',1)[1]
# Remove everything before the last '.'
file_id = filename.rsplit('.',1)[0]
return int(file_id)
def make_dataset(dir, target_dataframes, test_mode):
# Read all image files in directory
images_paths = glob.glob(dir +"/*.jpg")
# TODO
if test_mode:
print("Using test mode")
# Create an array of tuples (tensor image and its target vector), one row per image
images_targets = [
(open_image(image_path), # image
get_dataframe_row_by_id(target_dataframes, get_image_id(image_path), test_mode)) # target
for image_path in images_paths]
return images_targets, images_paths
def get_images_ids_from_image_paths(images_paths):
return [get_image_id(image_path) for image_path in images_paths]
class ImageDataset(data.Dataset):
def __init__(self, root, target_dataframes, test_mode):
images_targets, images_paths = make_dataset(root, target_dataframes, test_mode)
if len(images_targets) == 0:
raise(RuntimeError("Found 0 images in subfolders of: " + root ))
self.image_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
])
self.root = root
self.images_targets = images_targets
self.images_idx_to_id = get_images_ids_from_image_paths(images_paths)
# images_idx_to_id[5] --> 145689 (.jpg)
def __getitem__(self, index):
image, target = self.images_targets[index]
tensor = self.image_transform(image)
return tensor, target
def __len__(self):
return len(self.images_targets) |
from xing.xachartdata import Chartdata
from xing.xasession import Session
from xing.xareal import RealManager,Real
server = {
"address" :"hts.ebestsec.co.kr", # 서버주소
"port" : 20001, # 서버포트
"type" : 0 # 서버 타입
}
user = {
"id" : "songdh10", # 아이디
"passwd" : "gusdl57", # 비밀번호
"account_passwd" : "2121", # 계좌 비밀번호
"certificate_passwd" : "gusdlsla57" # 공인인증서 비밀번호
}
session = Session()
session.login(server, user)
real = Real("SC1", ("eventid", "ordxctptncode", "ordmktcode", "ordptncode", "mgmtbrnno",
"accno1","Isuno", "Isunm", "ordno", "orgordno", "execno",
"ordqty", "ordprc", "execqty", "execprc", "ordtrxptncode",
"secbalqty", "avrpchsprc", "pchsant"), Queue(100))
manager= RealManager()
manager.addTask("H1_", ("shcode", "hottime","totofferrem", "totbidrem",
"offerho1", "bidho1", "offerrem1", "bidrem1",
"offerho2", "bidho2", "offerrem2", "bidrem2",
"offerho3", "bidho3", "offerrem3", "bidrem3",
"offerho4", "bidho4", "offerrem4", "bidrem4",
"offerho5", "bidho5", "offerrem5", "bidrem5",
"offerho6", "bidho6", "offerrem6", "bidrem6",
"offerho7", "bidho7", "offerrem7", "bidrem7",
"offerho8", "bidho8", "offerrem8", "bidrem8",
"offerho9", "bidho9", "offerrem9", "bidrem9",
"offerho10", "bidho10", "offerrem10", "bidrem10"
), 100).addTarget(["005930","035420"])
print(manager.getQueue("H1_")) |
from setuptools import setup
exec(open('saltpeter/version.py').read())
setup(name='saltpeter',
version=__version__,
description='Distributed cron based on salt',
url='http://github.com/syscollective/saltpeter',
author='Marin Vintila',
author_email='marin.vintila@syscollective.com',
license='MIT',
packages=['saltpeter'],
entry_points = {
'console_scripts': ['saltpeter=saltpeter.main:main'],
},
install_requires=[
'salt',
'crontab',
'pyyaml',
'tornado',
'elasticsearch',
'opensearch-py',
],
zip_safe=False)
|
import random
import cal_time
ls = list(range(100))
random.shuffle(ls)
ls=[2,1,1]
def p(ls, left, right):
tmp = ls[left]
while left < right:
if right > left and ls[right] >= tmp:
right -= 1
ls[left], ls[right] = ls[right], ls[left]
if left < right and ls[left] <= tmp:
left += 1
ls[left], ls[right] = ls[right], ls[left]
return left
def quick_sort(ls, left, right):
if left < right:
mid = p(ls, left, right)
quick_sort(ls, left, mid - 1)
quick_sort(ls, mid + 1, right)
# @cal_time.run_time
def main():
quick_sort(ls, len(ls)-2, len(ls) - 1)
print(ls)
main()
|
import socket, re, itertools, ssl
from time import sleep
from os import strerror
from multiprocessing import Pool, Lock, active_children
global lock
lock = Lock()
class BruteUser(object):
def __init__(self,username):
self.username = username
def run(self):
self.donothing = 0
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if (secure == 1): self.ssocket = ssl.wrap_socket(self.s)
else: self.ssocket = self.s
self.connmsg = self.ssocket.connect_ex((host,port))
while (self.connmsg != 0):
print("ERROR:\t%s" % strerror(self.connmsg))
sleep(2.1)
self.connmsg = self.ssocket.connect_ex((host,port))
self.ssocket.send("HEAD "+path+"author/"+self.username+"/ HTTP/1.1\r\nHost: "+host+"\r\n\r\n")
self.chunk = self.ssocket.recv(20)
while (len(self.chunk) < 20):
sleep(1)
self.chunk += self.ssocket.recv(20)
self.ssocket.shutdown(socket.SHUT_RDWR)
self.ssocket.close()
if (self.chunk.find("200 OK") > 0):
print("Valid user found:\t%s" % self.username)
lock.acquire()
f = open(userout,"a")
f.write(self.username+"\n")
f.close()
lock.release()
elif (self.chunk.find("500") > 0):
print("500 Internal Server Error seen, you may be sending too fast!")
return 1
elif (self.chunk.find("404") > 0): self.donothing = 1
else:
print("Irregular server response seen.\n%s" % str(chunk))
return 1
return 0
def worker(users):
for user in users:
user = str(user.strip("\n"))
#print("Trying %s" % user)
while (BruteUser(user).run() != 0): sleep(1)
def grouper(iterable,n,fillvalue=None):
it = iter(iterable)
def take():
while 1: yield list(itertools.islice(it,n))
return iter(take().next,[])
def bruteUser(userlist,psize,hosti,pathi,porti,securei,userfound):
global host
host = hosti
global port
port = porti
global secure
secure = securei
global userout
userout = userfound
global path
path = pathi
f = open(userout,'w').close()
usersize = len(userlist)
# manage pool
if (usersize <= psize): chunksize = 1
else: chunksize = ((usersize / psize) + (usersize % psize))
print("Userlist size: %d\tChunk size: %d\tPool size: %d" % (usersize,chunksize,psize))
print("Bruteforcing usernames")
pool = Pool(processes=psize)
for chunk in itertools.izip(grouper(userlist,chunksize)): pool.map_async(worker,chunk)
pool.close()
try:
while(len(active_children()) > 0): # how many active children do we have
sleep(2)
ignore = active_children()
except KeyboardInterrupt: exit('CTRL^C caught, exiting...\n\n')
print("Username bruteforce complete")
|
import os
import requests
from datetime import *
APP_ID = os.environ["APP_ID"]
API_KEY = os.environ.get("APP_KEY")
SHEET_ENDPOINT = os.environ.get("SHEET_ENDPOINT")
TOKEN = os.environ.get("TOKEN")
USERNAME = os.environ.get("USERNAME")
PASSWORD = os.environ.get("PASSWORD")
sheet_headers = {
"Authorization": TOKEN
}
exercise_endpoint = "https://trackapi.nutritionix.com/v2/natural/exercise"
exercise_headers = {
"x-app-id": APP_ID,
"x-app-key": API_KEY,
}
exercise_data = {
"query": input("Tell me which exercises you did: "),
}
response = requests.post(url=exercise_endpoint, headers=exercise_headers, json=exercise_data)
response.raise_for_status()
# Put this exercise data to Google Sheets
exercise_data = response.json()["exercises"]
today_date = datetime.now().strftime("%d/%m/%Y")
today_time = datetime.now().strftime("%H:%M:%S")
for exercise in exercise_data:
sheet_inputs = {
"workout": {
"date": today_date,
"time": today_time,
"exercise": exercise["name"].title(),
"calories": exercise["nf_calories"],
"duration": exercise["duration_min"],
}
}
sheet_response = requests.post(SHEET_ENDPOINT, json=sheet_inputs, headers=sheet_headers)
print(sheet_response.text) |
# Conversions
integer = 5
print(integer)
f = float(integer)
print(f)
float_1 = 10.6
integer_1 = int(float_1)
print(float_1)
print(integer_1)
float_2 = -1 * float_1
integer_2 = int(float_2)
print(float_2)
print(integer_2)
# Strings only compatible values
string_value = '2.5'
float_of_string = float(string_value)
print(string_value)
print(float_of_string)
# Vise versa
integer_3 = 25
string_of_integer = str(integer_3)
print(integer_3)
print(string_of_integer)
# But non compatible generates error
string = '1p'
print("ERROR: integer_of_string = int(string)")
print(string)
# Converting sequences
list_1 = [1, 2, 3]
print(list_1)
set_1 = set(list_1)
print(set_1)
tuple_1 = tuple(set_1)
print(tuple_1)
tuple_2 = tuple(list_1)
print(tuple_2)
string_3 = 'hello'
list_3 = list(string_3)
print(string_3)
print(list_3)
# Converting dictionaries only with sequence within sequence
list_a = [1, 2]
list_b = [3, 4]
list_ab = [list_a, list_b]
print(list_a, list_b, list_ab)
dict_ab = dict(list_ab)
print(dict_ab)
tuple_a = (5, 6)
tuple_b = (7, 8)
list_t = [tuple_a, tuple_b]
print(list_t)
dict_lt = dict(list_t)
print(dict_lt)
|
import os
import sys
import subprocess
import shutil
sys.path.insert(0, 'scripts')
sys.path.insert(0, os.path.join("tools", "trees"))
sys.path.insert(0, os.path.join("tools", "families"))
import experiments as exp
import create_random_tree
def simulate_msa(msa_path, tree_path, sites, outputdir):
#gene_tree = os.path.join(genes_dir, "gene_" + str(i) + ".newick")
#seqgen_gene_tree = os.path.join(genseq_genes_dir, "gene_" + str(i) + ".newick")
#shutil.copyfile(gene_tree, seqgen_gene_tree)
#subprocess.check_call(["sed", "-i", "s/[SDLT]//g", seqgen_gene_tree])
command = []
command.append(exp.seq_gen_exec)
command.append("-l")
command.append(str(sites))
command.append("-m")
command.append("GTR")
command.append("-of")
command.append(tree_path)
#command.append("-z")
#command.append(str(int(i) + int(seed)))
with open(msa_path, "w") as writer:
subprocess.check_call(command, stdout=writer)
print("Simulated MSA saved in " + msa_path)
def simulate(taxa, sites, outputdir):
os.mkdir(outputdir)
tree = create_random_tree.create_random_tree_taxa_number(taxa)
tree_path = os.path.join(outputdir, "simulated_tree.newick")
msa_path = os.path.join(outputdir, "simulated_msa.fasta")
tree.write(outfile = tree_path, format = 1)
print("Simulated tree saved in " + tree_path)
simulate_msa(msa_path, tree_path, sites, outputdir)
if (__name__ == "__main__"):
if (len(sys.argv) != 4):
print("Syntax python " + os.path.basename(__file__) + "taxa sites output_dir")
sys.exit(1)
taxa = int(sys.argv[1])
sites = int(sys.argv[2])
outputdir = sys.argv[3]
simulate(taxa, sites, outputdir)
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
from dataclasses import dataclass
from typing import Iterator
from pants.core.util_rules import system_binaries
from pants.core.util_rules.system_binaries import UnzipBinary
from pants.engine.fs import Digest, MergeDigests, RemovePrefix
from pants.engine.process import Process, ProcessResult
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import CoarsenedTargets
from pants.jvm.compile import ClasspathEntry, ClasspathEntryRequest, ClasspathEntryRequestFactory
from pants.jvm.compile import rules as jvm_compile_rules
from pants.jvm.resolve.key import CoursierResolveKey
from pants.util.logging import LogLevel
logger = logging.getLogger(__name__)
@dataclass(frozen=True)
class Classpath:
"""A transitive classpath which is sufficient to launch the target(s) it was generated for.
There are two primary ways to consume a Classpath:
1. Using the `(root_)immutable_inputs` methods, which produce the argument to
`Process.immutable_input_digests` and adapted CLI args for use with that argument.
2. Using the `digests` and `(root_)args` methods, which can be merged to produce the
argument to `Process.input_digest` and CLI args for use with a digest.
The first approach should be preferred, because it allows for symlinking of inputs. If
possible, the latter method should be removed when consumers have migrated.
This classpath is guaranteed to contain only JAR files.
"""
entries: tuple[ClasspathEntry, ...]
resolve: CoursierResolveKey
def args(self, *, prefix: str = "") -> Iterator[str]:
"""All transitive filenames for this Classpath."""
return ClasspathEntry.args(ClasspathEntry.closure(self.entries), prefix=prefix)
def root_args(self, *, prefix: str = "") -> Iterator[str]:
"""The root filenames for this Classpath."""
return ClasspathEntry.args(self.entries, prefix=prefix)
def digests(self) -> Iterator[Digest]:
"""All transitive Digests for this Classpath."""
return (entry.digest for entry in ClasspathEntry.closure(self.entries))
def immutable_inputs(self, *, prefix: str = "") -> Iterator[tuple[str, Digest]]:
"""Returns (relpath, Digest) tuples for use with `Process.immutable_input_digests`."""
return ClasspathEntry.immutable_inputs(ClasspathEntry.closure(self.entries), prefix=prefix)
def immutable_inputs_args(self, *, prefix: str = "") -> Iterator[str]:
"""Returns relative filenames for the given entries to be used as immutable_inputs."""
return ClasspathEntry.immutable_inputs_args(
ClasspathEntry.closure(self.entries), prefix=prefix
)
def root_immutable_inputs(self, *, prefix: str = "") -> Iterator[tuple[str, Digest]]:
"""Returns root (relpath, Digest) tuples for use with `Process.immutable_input_digests`."""
return ClasspathEntry.immutable_inputs(self.entries, prefix=prefix)
def root_immutable_inputs_args(self, *, prefix: str = "") -> Iterator[str]:
"""Returns root relative filenames for the given entries to be used as immutable_inputs."""
return ClasspathEntry.immutable_inputs_args(self.entries, prefix=prefix)
@rule
async def classpath(
coarsened_targets: CoarsenedTargets,
classpath_entry_request: ClasspathEntryRequestFactory,
) -> Classpath:
# Compute a single shared resolve for all of the roots, which will validate that they
# are compatible with one another.
resolve = await Get(CoursierResolveKey, CoarsenedTargets, coarsened_targets)
# Then request classpath entries for each root.
classpath_entries = await MultiGet(
Get(
ClasspathEntry,
ClasspathEntryRequest,
classpath_entry_request.for_targets(component=t, resolve=resolve, root=True),
)
for t in coarsened_targets
)
return Classpath(classpath_entries, resolve)
@dataclass(frozen=True)
class LooseClassfiles:
"""The contents of a classpath entry as loose classfiles.
Note that `ClasspathEntry` and `Classpath` both guarantee that they contain JAR files, and so
creating loose classfiles from them involves extracting their entry.
"""
digest: Digest
@rule
async def loose_classfiles(
classpath_entry: ClasspathEntry, unzip_binary: UnzipBinary
) -> LooseClassfiles:
dest_dir = "dest"
process_results = await MultiGet(
Get(
ProcessResult,
Process(
argv=[
unzip_binary.path,
"-d",
dest_dir,
filename,
],
output_directories=(dest_dir,),
description=f"Extract {filename}",
immutable_input_digests=dict(ClasspathEntry.immutable_inputs([classpath_entry])),
level=LogLevel.TRACE,
),
)
for filename in ClasspathEntry.immutable_inputs_args([classpath_entry])
)
merged_digest = await Get(Digest, MergeDigests(pr.output_digest for pr in process_results))
return LooseClassfiles(await Get(Digest, RemovePrefix(merged_digest, dest_dir)))
def rules():
return [
*collect_rules(),
*system_binaries.rules(),
*jvm_compile_rules(),
]
|
import numpy as np
import pandas as pd
import pickle
import json
path = '../Data/HanDeSeT1.csv'
output_path = '../Data/adj_list.json'
df = pd.read_csv(path)
df1 = df.drop_duplicates()
print(len(df), len(df1))
print('Your grace')
# x = df.loc[df['motion'] == 'That this House regrets the continuing lack of balance in the UK economy and the UK Government’s over-reliance on unsustainable consumer debt to support economic growth; notes in particular the UK’s poor export performance, which resulted in a trade deficit in goods of £123 billion in 2014; further notes the UK’s continuing poor productivity record and the lack of a credible long-term plan to improve it; and is deeply concerned by the UK Government’s change to Innovate UK funding of innovation from grants to loans, which this House believes will result in a deterioration of private sector research and development.']
# x = df.loc[df['name'] == 'David Mowat']
# print(x.columns)
# # print(x[['id', 'manual motion', 'govt/opp motion','motion party affiliation', 'manual speech', 'vote speech', 'party affiliation', 'name']])
# print(x[['name', 'utt1', 'manual speech', 'vote speech']].values)
adj_lists = [{}, {}]
adj_list = dict(map(lambda x: (x, {}), df['name'].values))
groups = {
'party_basis': {},
'motion_basis': {}
}
# Populating the groups
for id, row in df.iterrows():
party = row['party affiliation']
title = row['motion']
name = row['name']
if not party in groups['party_basis']:
groups['party_basis'][party] = []
groups['party_basis'][party].append(name)
if not title in groups['motion_basis']:
groups['motion_basis'][title] = []
groups['motion_basis'][title].append((name, row['manual speech']))
pos, neg, s = 0, 0, 0
for title, title_group in groups['motion_basis'].items():
l = len(title_group)
# print('Length', ' - len ->', l)
s+=l*(l-1)
curr = 0
for name1, pol1 in title_group:
for name2, pol2 in title_group:
if name1 == name2:
continue
curr+=1
if title not in adj_list[name1]:
adj_list[name1][title] = {}
# if title not in adj_list[name2]:
# adj_list[name2][title] = {}
pol = 1-(int(pol1)^int(pol2))
adj_list[name1][title][name2] = 2*pol - 1
# adj_list[name2][title][name1] = 2*pol - 1
if 2*pol-1 == -1:
neg+=1
else :
pos+=1
if curr!=l*(l-1):
mm = {}
for a,b in title_group:
if a not in mm:
mm[a] = list()
mm[a].append(b)
print(title)
print('Hello')
print(curr, l, l*(l-1))
print('*'*20)
print(title_group)
print('*'*20)
print(sorted(mm.items(), reverse = True))
print('*'*20)
print(np.unique(title_group, return_counts = True))
for a,b in mm.items():
if len(b)>1 and a=='David Mowat':
x = df.loc[(df['name'] == a )& (df['motion'] == title)]
print(x.values)
a = input('Lets go ?')
print('Positives->', pos, '| Negatives->', neg, 'Tot: ', s, pos+neg)
# with open(output_path, 'w') as f:
# json.dump(adj_list, f)
|
l = [8,3,2,7,6,7]
b = 0
a = 7
while b < len(l):
c = l[b]
if a == c:
print(b)
break
b = b + 1
|
from rest_framework import serializers
from django.contrib.auth import get_user_model
from . import models
from artuium_server.statics import models as statics_models
User = get_user_model()
class ProfileSerializer(serializers.ModelSerializer):
is_me = serializers.SerializerMethodField()
is_following = serializers.SerializerMethodField()
following_friends_count = serializers.SerializerMethodField()
class Meta:
model = models.User
fields = ['id', 'nickname', 'profile_image', 'background_image', 'following_count', 'follower_count', 'is_me', 'is_following', 'following_friends_count', 'like_exhibition_count', 'like_artwork_count', 'like_review_count', 'push_token', 'initial']
def get_is_me(self, obj):
if 'request' in self.context:
request = self.context['request']
if request.user.id == obj.id:
return True
else:
return False
else:
return False
def get_is_following(self, obj):
if 'request' in self.context:
request = self.context['request']
follow = statics_models.Follow.objects.filter(following = request.user, follower = obj)
if follow.count() > 0:
return True
else:
return False
else:
return False
def get_following_friends_count(self, obj):
if 'request' in self.context:
request = self.context['request']
user = request.user
following = statics_models.Follow.objects.filter(following = user).values_list('follower__id', flat = True)
follow = statics_models.Follow.objects.filter(follower = obj, following__id__in = following)
return follow.count()
else:
return 0 |
from basetest import BaseTest
from pages.advertpage import AdvertPage
from pages.authpage import AuthPage
from pages.mainpage import MainPage
class TestLogic(BaseTest):
def close_noisy_window(self):
self.page.close_sms_verification()
def registrate_new_user(self, user_info):
self.page = AuthPage(self.driver)
self.page.registrate_new_user(user_info)
def login(self, user_info):
self.page = AuthPage(self.driver)
self.page.login(user_info)
def make_advertisement(self, advertisement_info):
self.close_noisy_window()
self.page = AdvertPage(self.driver)
self.page.place_advertisement(advertisement_info)
self.assertTrue(self.page.is_message_show())
def make_advertisement_for_new_user(self, advertisement_info, user_info, mail):
self.page = MainPage(self.driver)
self.page.make_advertisement()
if "account" in self.driver.current_url:
self.registrate_new_user(user_info)
# get link from email and activate account
self.driver.get(mail.get_link_from_email_by_number(1))
self.page.make_advertisement()
self.make_advertisement(advertisement_info)
def make_advertisement_for_existing_user(self, advertisement_info, user_info):
self.page = MainPage(self.driver)
self.page.make_advertisement()
if "account" in self.driver.current_url:
self.login(user_info)
self.make_advertisement(advertisement_info)
|
from torch import nn
import torch
import numpy as np
import torch.utils.data as Data
from torch.utils.data import DataLoader
import torch.optim as optim
from torch.autograd import Variable
from matplotlib import pyplot as plt
batch_size = 32
lr = 0.001
dr = 0.0001
num_epoch = 15
mode = True # True表示使用全部数据集训练, False表示使用90%训练,10%验证
class Model(nn.Module):
def __init__(self, bn=False):
super(Model, self).__init__()
if not bn:
self. conv_layers = nn.Sequential(
nn.Conv2d(1, 16, kernel_size=4, stride=2, padding=1), # 32*32
nn.ReLU(inplace=True),
# nn.MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False),
nn.Conv2d(16, 32, kernel_size=4, stride=2, padding=1), # 16*16
nn.ReLU(inplace=True),
# nn.MaxPool2d(kernel_size=2, stride=2),
nn.Conv2d(32, 64, kernel_size=4, stride=2, padding=1), # 8*8
nn.ReLU(inplace=True),
nn.Conv2d(64, 128, kernel_size=4, stride=2, padding=1), # 4*4
nn.ReLU(inplace=True)
)
else:
self.conv_layers = nn.Sequential(
nn.Conv2d(1, 16, kernel_size=4, stride=2, padding=1), # 32*32
nn.BatchNorm2d(16),
nn.ReLU(inplace=True),
nn.Conv2d(16, 32, kernel_size=4, stride=2, padding=1), # 16*16
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
nn.Conv2d(32, 64, kernel_size=4, stride=2, padding=1), # 8*8
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
nn.Conv2d(64, 128, kernel_size=4, stride=2, padding=1), # 4*4
nn.ReLU(inplace=True)
)
self.fc_layers = nn.Sequential(
nn.Linear(128*4*4, 128),
nn.Dropout(),
nn.ReLU(inplace=True),
nn.Linear(128, 19),
nn.ReLU(inplace=True)
)
def forward(self, x):
x = self.conv_layers(x)
x = x.view(x.size(0), -1)
x = self.fc_layers(x)
# x = x.argmax(1)
return x
class TrainData(Data.Dataset):
def __init__(self, flag=True):
if flag:
self.data = torch.load('train_data.pth').unsqueeze(1).numpy()
self.label = torch.load('train_label.pth').unsqueeze(1).numpy()
# self.data = self.data.transpose(0, 3, 1, 2).astype('float32') # conv2d 的输入维度为(N,C,H,W)
self.len = self.data.shape[0]
else:
self.data = torch.load('tr_data.pth').unsqueeze(1).numpy()
self.label = torch.load('tr_label.pth').unsqueeze(1).numpy()
# self.data = self.data.transpose(0, 3, 1, 2).astype('float32') # conv2d 的输入维度为(N,C,H,W)
self.len = self.data.shape[0]
def __getitem__(self, index):
return self.data[index], self.label[index]
def __len__(self):
return self.len
def get_data(self):
return torch.from_numpy(self.data)
def get_label(self):
return torch.from_numpy(self.label)
def train():
model = Model()
model.train()
for m in model.modules():
if isinstance(m, nn.Conv2d):
nn.init.xavier_normal_(m.weight.data)
# nn.init.kaiming_normal(m.weight.data) # 卷积层参数初始化
m.bias.data.fill_(0)
dataset = TrainData(mode)
criterion = nn.CrossEntropyLoss()
train_loader = DataLoader(dataset, batch_size, shuffle=True)
optimizer = optim.SGD(model.parameters(), lr, momentum=0.9, weight_decay=dr)
loss_all = []
acc_all = []
for epoch in range(num_epoch):
model.train()
total_loss = 0
for i, (data, label) in enumerate(train_loader):
if torch.cuda.is_available():
data = Variable(data).cuda()
label = Variable(label).cuda()
else:
data = Variable(data)
label = Variable(label)
pred = model(data).float()
label = label
loss = criterion(pred, label.type(torch.LongTensor).squeeze())
total_loss += loss.data.item()
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (i+1) % 100 == 0:
print(
'Epoch [%d/%d], Iter [%d/%d], Average Loss: %.4f'
% (epoch, num_epoch, i, len(train_loader), total_loss /float(batch_size * (i + 1))))
class_truth = dataset.get_label().squeeze()
class_pred = model(dataset.get_data()).argmax(1)
num = torch.nonzero(class_truth - class_pred).size(0)
print('acc for this epoch is:%.4f' % (1 - num/class_truth.size(0)))
loss_all.append(total_loss)
acc_all.append(1 - num/class_truth.size(0))
if mode:
torch.save(model.state_dict(), 'model.pth')
else:
torch.save(model.state_dict(), 'val_model.pth')
return model, loss_all, acc_all
if __name__ =='__main__':
model, loss_all, acc_all = train()
x = np.arange(0, num_epoch)
plt.title("loss for each epoch")
plt.xlabel("epoch")
plt.ylabel("loss")
plt.plot(x, loss_all)
plt.show()
plt.title("acc for each epoch")
plt.xlabel("epoch")
plt.ylabel("acc")
plt.plot(x, acc_all)
plt.show()
|
from django.contrib import admin
from sms_controll.models import Kierowca, Zlecenie, Zlecenie2, FirmaFaktura, Firma, Uzytkownik, Trasa #import modelu Kierowca
# Register your models here.
admin.site.register(FirmaFaktura)
admin.site.register(Zlecenie)
admin.site.register(Zlecenie2)
admin.site.register(Trasa)
admin.site.register(Kierowca)
admin.site.register(Firma)
admin.site.register(Uzytkownik)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.