text
stringlengths 8
6.05M
|
|---|
num1 = int(input('Type your first number: '))
num2 = int(input('Type your second number: '))
print('Your numbers are: %d and %d' % (num1, num2))
num1 += 10
num2 += 20
print('After adding 10 to the first number and 20 to the second number, you get:', end=' ')
print('%d and %d' % (num1, num2))
print('Adding both numbers together yields %d' % (num1 + num2))
|
import os
import math
import time
import datetime
import cv2
import image_processing
import error_log
import session_log
import headsup
import db_query
DEFAULT_STACK = 22
def search_current_stack(screen_area, stack_collection, db):
try:
image_name = str(math.floor(time.time())) + ".png"
folder_name = "images/" + str(datetime.datetime.now().date())
save_stack_image(screen_area, image_name, folder_name, db)
for item in db_query.get_last_screen(db_query.get_stack_area(screen_area, db), db):
path = item['image_path']
img_rgb = cv2.imread(path, 0)
for value in stack_collection:
if image_processing.cv_data_template(value['image_path'], img_rgb) > 0:
current_stack = int(value['stack_value'])
return current_stack
return DEFAULT_STACK
except Exception as e:
error_log.error_log('searchCurrentStack', str(e))
print(e)
def search_opponent_stack(screen_area, opponent_area, stack_collection, db):
try:
folder_name = 'images/' + str(datetime.datetime.now().date())
save_opponent_stack_image(screen_area, folder_name, opponent_area, db)
screen_area = db_query.get_opponent_stack_area(screen_area, db)
for item in db_query.get_last_screen(screen_area, db):
path = item['image_path']
img_rgb = cv2.imread(path, 0)
for value in stack_collection:
if image_processing.cv_data_template(value['image_path'], img_rgb) > 0:
opponent_stack = int(value['stack_value'])
return opponent_stack
return DEFAULT_STACK
except Exception as e:
error_log.error_log('searchOpponentStack', str(e))
print(e)
def save_stack_image(screen_area, image_name, folder_name, db):
try:
for val in db_query.get_stack_data(db_query.get_stack_area(screen_area, db), db):
image_path = os.path.join(folder_name, str(db_query.get_stack_area(screen_area, db)), image_name)
image_processing.imaging(val['x_coordinate'], val['y_coordinate'], val['width'], val['height'], image_path,
str(val['screen_area']), db)
except Exception as e:
error_log.error_log('saveStackImage', str(e))
print(e)
def save_opponent_stack_image(screen_area, folder_name, opponent_area, db):
image_name = int(math.floor(time.time()))
for val in db_query.get_opponent_stack_data(screen_area, opponent_area, db):
image_path = folder_name + "/" + str(val['screen_area']) + "/" + str(image_name) + ".png"
image_processing.imaging(val['x_coordinate'], val['y_coordinate'], val['width'], val['height'], image_path,
str(val['screen_area']), db)
image_name += 1
def save_allin_stack_image(screen_area, db):
try:
image_name = str(math.floor(time.time())) + ".png"
folder_name = 'images/' + str(datetime.datetime.now().date())
for val in db_query.get_stack_data(db_query.get_allin_stack_area(screen_area, db), db):
image_path = os.path.join(folder_name, str(db_query.get_allin_stack_area(screen_area, db)), image_name)
image_processing.imaging(val['x_coordinate'], val['y_coordinate'], val['width'], val['height'], image_path,
val['screen_area'], db)
except Exception as e:
error_log.error_log('saveAllinStackImage', str(e))
print(e)
def search_allin_stack(screen_area, db):
try:
save_allin_stack_image(screen_area, db)
screen_area = db_query.get_allin_stack_area(screen_area, db)
for item in db_query.get_last_screen(screen_area, db):
path = item['image_path']
img_rgb = cv2.imread(path, 0)
for value in db_query.get_allin_stack_images(db):
if image_processing.cv_data_template(value['image_path'], img_rgb) > 0:
all_in_stack = int(value['stack_value'])
return all_in_stack
return DEFAULT_STACK
except Exception as e:
error_log.error_log('searchAllinStack', str(e))
print(e)
def save_bank_stack_image(screen_area, db):
try:
image_name = str(math.floor(time.time())) + ".png"
folder_name = 'images/' + str(datetime.datetime.now().date())
for val in db_query.get_stack_data(db_query.get_bank_stack_area(screen_area, db), db):
image_path = os.path.join(folder_name, str(db_query.get_bank_stack_area(screen_area, db)), image_name)
image_processing.imaging(val['x_coordinate'], val['y_coordinate'], val['width'], val['height'], image_path,
val['screen_area'], db)
except Exception as e:
error_log.error_log('saveAllinStackImage', str(e))
print(e)
def search_bank_stack(screen_area, db):
try:
save_bank_stack_image(screen_area, db)
screen_area = db_query.get_bank_stack_area(screen_area, db)
for item in db_query.get_last_screen(screen_area, db):
path = item['image_path']
img_rgb = cv2.imread(path, 0)
for value in db_query.get_bank_stack_images(db):
if image_processing.cv_data_template(value['image_path'], img_rgb) > 0:
bank_stack = int(value['stack_value'])
return bank_stack
return DEFAULT_STACK
except Exception as e:
error_log.error_log('searchBankStack', str(e))
print(e)
def compare_bank_with_available_stack(screen_area, stack_collection, db):
stack = search_current_stack(screen_area, stack_collection, db)
bank = search_bank_stack(screen_area, db)
if stack >= bank:
return 'turn_cbet'
elif stack < bank:
return 'push'
def convert_stack(stack):
if stack >= 22:
stack = 22
elif stack in range(18, 22):
stack = 21
elif stack in range(14, 18):
stack = 17
elif stack in range(11, 14):
stack = 13
elif stack in range(8, 11):
stack = 10
return stack
def get_actual_game_data(screen_area, stack_collection, db):
row = session_log.get_last_row_from_log_session(screen_area, db)
position = row[0]['current_position']
hand = row[0]['hand']
opponent_data = processing_opponent_data(screen_area, stack_collection, db)
is_headsup = opponent_data[0]
if position == 'button':
is_headsup = 0
stack = opponent_data[1]
stack = convert_stack(stack)
if len(hand) > 4:
session_log.update_is_headsup_postflop(str(screen_area), is_headsup, db)
session_log.update_current_stack_log_session(str(screen_area), str(stack), db)
return stack
def processing_opponent_data(screen_area, stack_collection, db):
data = []
stack = search_current_stack(screen_area, stack_collection, db)
opponent_data = headsup.search_opponent_card(screen_area, db, stack_collection)
is_headsup = opponent_data[0]
data.append(is_headsup)
opponent_data.pop(0)
if len(opponent_data) > 0:
opponent_actual_stack = sorted(opponent_data, reverse=True)
if int(opponent_actual_stack[0]) == 666:
all_in_stack = search_allin_stack(screen_area, db)
opponent_actual_stack[0] = all_in_stack
opponent_actual_stack = max(opponent_actual_stack)
if int(opponent_actual_stack) < int(stack):
stack = opponent_actual_stack
data.append(stack)
return data
|
"""
Создайте словарь:
{"city": "Москва", "temperature": "20"}
Выведите на экран значение ключа city
Уменьшите значение "temperature" на 5
Выведите на экран весь словарь
Проверьте, есть ли в словаре ключ country
Выведите значение по-умолчанию "Россия" для ключа country
Добавьте в словарь элемент date со значением '27.05.2019'
Выведите на экран длину словаря
"""
weather={
'city':'Москва',
'temperature':20
}
print (weather ['city'])
weather ['temperature']-=5
print (weather)
print (weather .get ('country'))
print (weather .get ('country', 'Россия'))
weather ['date']='27.05.2019'
print (weather)
print (len (weather))
|
from flask import Flask, session, render_template, request, redirect, g, url_for
from flask import Blueprint
from werkzeug.utils import secure_filename
import os #operating system
from google.cloud import bigquery
import socket
from flask import flash
import sys
import datetime
from google.cloud import datastore
from google.cloud import bigquery
from google.cloud import storage
from google.oauth2 import service_account
from google.auth.transport.requests import AuthorizedSession
fileserver_baseroute = ("//192.168.20.87", "/media")[socket.gethostname()=="contentobi"]
cargue_tempus_api = Blueprint('cargue_tempus_api', __name__, static_folder='static',template_folder='templates')
app = Flask(__name__)
client = bigquery.Client()
QUERY = (
'SELECT string_field_0, string_field_1 FROM `contento-bi.unificadas.modulo_logueo` where string_field_2 = "Unificadas" ') #WHERE ipdial_code = "intcob-unisabaneta"
query_job = client.query(QUERY)
rows = query_job.result()
data = ""
for row in rows:
username = row.string_field_0
password = row.string_field_1
@cargue_tempus_api.route('/inicio', methods = ['GET','POST'])
def inicio():
return render_template('main_page_tempus.html') #OK
@cargue_tempus_api.route('/login', methods = ['GET','POST'])
def login():
if request.method == 'POST':
if request.form ['username'] != username:
error = 'Invalid username'
elif request.form ['password'] != password:
error = 'Invalid password'
else:
session['logged_in'] = True
return render_template('tempus_Modulo.html')
return render_template('main_page_tempus.html') #OK
@cargue_tempus_api.route('/logout')
def logout():
session.pop('logged_in', None)
return render_template('main_page_tempus.html')
############################################################################################################
@cargue_tempus_api.route('/redireccion_archivo_prejuridico', methods=['GET', 'POST'])
def upload_file_prejuridico():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Adeinco1'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file' #las variables llamadas file sin los apostrofes deben coincidir con el resto de files en cuanto a desc y los que estan dentro de los apostrofes deben coincidir con la variable name del input del html
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_prejuridico',
filename='20200924'))
return "Fichero movido exitosamente"
@cargue_tempus_api.route('/redireccion_archivo_demograficos', methods=['GET', 'POST'])
def upload_file_demograficos():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Adeinco1'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
"""A continuacion... se hace una definicion de allowed_file para conservar
el nombre y la extension del archivo que se va a tomar y a pegar en la ruta definida"""
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file_demograficos'
# check if the post request has the file part
if 'file_demograficos' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file_demograficos']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = '20200924' + '.csv'
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_demograficos',
filename='20200924'))
return "Fichero movido exitosamente"
#######################################################################################################
############################################################################################################
@cargue_tempus_api.route('/redireccion_archivo_asignacion', methods=['GET', 'POST'])
def upload_file_asignacion():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Adeinco2'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file' #las variables llamadas file sin los apostrofes deben coincidir con el resto de files en cuanto a desc y los que estan dentro de los apostrofes deben coincidir con la variable name del input del html
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_asignacion',
filename='20200924'))
return "Fichero movido exitosamente"
@cargue_tempus_api.route('/redireccion_archivo_asignacion2', methods=['GET', 'POST'])
def upload_file_asignacion2():
"""A continuacion... se hace el cargue del archivo base al storage, sin embargo
primero se deben de declarar la variable UPLOAD_FOLDER"""
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Adeinco2'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
"""A continuacion... se hace una definicion de allowed_file para conservar
el nombre y la extension del archivo que se va a tomar y a pegar en la ruta definida"""
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file_asignacion'
# check if the post request has the file part
if 'file_asignacion' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file_asignacion']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = '20200924' + '.csv'
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_asignacion2',
filename='20200924'))
return "Fichero movido exitosamente"
#######################################################################################################
############################################################################################################
@cargue_tempus_api.route('/redireccion_archivo_agaval', methods=['GET', 'POST'])
def upload_file_agaval():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Agaval'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file' #las variables llamadas file sin los apostrofes deben coincidir con el resto de files en cuanto a desc y los que estan dentro de los apostrofes deben coincidir con la variable name del input del html
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_agaval',
filename='20200924'))
return "Fichero movido exitosamente"
@cargue_tempus_api.route('/redireccion_archivo_Agavalc', methods=['GET', 'POST'])
def upload_file_Agavalc():
"""A continuacion... se hace el cargue del archivo base al storage, sin embargo
primero se deben de declarar la variable UPLOAD_FOLDER"""
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Agaval'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
"""A continuacion... se hace una definicion de allowed_file para conservar
el nombre y la extension del archivo que se va a tomar y a pegar en la ruta definida"""
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file_agaval'
# check if the post request has the file part
if 'file_agaval' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file_agaval']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = '20200924' + '.csv'
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_Agavalc',
filename='20200924'))
return "Fichero movido exitosamente"
#######################################################################################################
############################################################################################################
@cargue_tempus_api.route('/redireccion_archivo_agaval2', methods=['GET', 'POST'])
def upload_file_agaval2():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Agaval2'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file' #las variables llamadas file sin los apostrofes deben coincidir con el resto de files en cuanto a desc y los que estan dentro de los apostrofes deben coincidir con la variable name del input del html
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_agaval2',
filename='20200924'))
return "Fichero movido exitosamente"
@cargue_tempus_api.route('/redireccion_archivo_Agavalc2', methods=['GET', 'POST'])
def upload_file_Agavalc2():
"""A continuacion... se hace el cargue del archivo base al storage, sin embargo
primero se deben de declarar la variable UPLOAD_FOLDER"""
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Agaval2'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
"""A continuacion... se hace una definicion de allowed_file para conservar
el nombre y la extension del archivo que se va a tomar y a pegar en la ruta definida"""
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file_agaval2'
# check if the post request has the file part
if 'file_agaval2' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file_agaval2']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = '20200924' + '.csv'
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_Agavalc2',
filename='20200924'))
return "Fichero movido exitosamente"
#######################################################################################################
############################################################################################################
@cargue_tempus_api.route('/redireccion_archivo_aval1', methods=['GET', 'POST'])
def upload_file_aval1():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Aval1'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file' #las variables llamadas file sin los apostrofes deben coincidir con el resto de files en cuanto a desc y los que estan dentro de los apostrofes deben coincidir con la variable name del input del html
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_aval1',
filename='20200924'))
return "Fichero movido exitosamente"
@cargue_tempus_api.route('/redireccion_archivo_Avalc', methods=['GET', 'POST'])
def upload_file_Avalc():
"""A continuacion... se hace el cargue del archivo base al storage, sin embargo
primero se deben de declarar la variable UPLOAD_FOLDER"""
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Aval1'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
"""A continuacion... se hace una definicion de allowed_file para conservar
el nombre y la extension del archivo que se va a tomar y a pegar en la ruta definida"""
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file_aval'
# check if the post request has the file part
if 'file_aval' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file_aval']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = '20200924' + '.csv'
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_Avalc',
filename='20200924'))
return "Fichero movido exitosamente"
#######################################################################################################
############################################################################################################
@cargue_tempus_api.route('/redireccion_archivo_aval2', methods=['GET', 'POST'])
def upload_file_aval2():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Aval2'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file' #las variables llamadas file sin los apostrofes deben coincidir con el resto de files en cuanto a desc y los que estan dentro de los apostrofes deben coincidir con la variable name del input del html
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_aval2',
filename='20200924'))
return "Fichero movido exitosamente"
@cargue_tempus_api.route('/redireccion_archivo_Avalc2', methods=['GET', 'POST'])
def upload_file_Avalc2():
"""A continuacion... se hace el cargue del archivo base al storage, sin embargo
primero se deben de declarar la variable UPLOAD_FOLDER"""
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Aval2'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
"""A continuacion... se hace una definicion de allowed_file para conservar
el nombre y la extension del archivo que se va a tomar y a pegar en la ruta definida"""
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file_aval2'
# check if the post request has the file part
if 'file_aval2' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file_aval2']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = '20200924' + '.csv'
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_Avalc2',
filename='20200924'))
return "Fichero movido exitosamente"
#######################################################################################################
############################################################################################################
@cargue_tempus_api.route('/redireccion_archivo_codigos', methods=['GET', 'POST'])
def upload_file_codigos():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Cod'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file' #las variables llamadas file sin los apostrofes deben coincidir con el resto de files en cuanto a desc y los que estan dentro de los apostrofes deben coincidir con la variable name del input del html
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_codigos',
filename='20200924'))
return "Fichero movido exitosamente"
@cargue_tempus_api.route('/redireccion_archivo_codigosc', methods=['GET', 'POST'])
def upload_file_codigosc():
"""A continuacion... se hace el cargue del archivo base al storage, sin embargo
primero se deben de declarar la variable UPLOAD_FOLDER"""
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Cod'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
"""A continuacion... se hace una definicion de allowed_file para conservar
el nombre y la extension del archivo que se va a tomar y a pegar en la ruta definida"""
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file_cod'
# check if the post request has the file part
if 'file_cod' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file_cod']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = '20200924' + '.csv'
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_codigosc',
filename='20200924'))
return "Fichero movido exitosamente"
#######################################################################################################
############################################################################################################
@cargue_tempus_api.route('/redireccion_archivo_estrategia', methods=['GET', 'POST'])
def upload_file_estrategia():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Estrategia'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file' #las variables llamadas file sin los apostrofes deben coincidir con el resto de files en cuanto a desc y los que estan dentro de los apostrofes deben coincidir con la variable name del input del html
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_estrategia',
filename='20200924'))
return "Fichero movido exitosamente"
@cargue_tempus_api.route('/redireccion_archivo_estrategiac', methods=['GET', 'POST'])
def upload_file_estrategiac():
"""A continuacion... se hace el cargue del archivo base al storage, sin embargo
primero se deben de declarar la variable UPLOAD_FOLDER"""
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Estrategia'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
"""A continuacion... se hace una definicion de allowed_file para conservar
el nombre y la extension del archivo que se va a tomar y a pegar en la ruta definida"""
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file_estrategia'
# check if the post request has the file part
if 'file_estrategia' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file_estrategia']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = '20200924' + '.csv'
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_estrategiac',
filename='20200924'))
return "Fichero movido exitosamente"
#######################################################################################################
############################################################################################################
@cargue_tempus_api.route('/redireccion_archivo_historico', methods=['GET', 'POST'])
def upload_file_historico():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Historico'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file' #las variables llamadas file sin los apostrofes deben coincidir con el resto de files en cuanto a desc y los que estan dentro de los apostrofes deben coincidir con la variable name del input del html
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_historico',
filename='20200924'))
return "Fichero movido exitosamente"
@cargue_tempus_api.route('/redireccion_archivo_historicoc', methods=['GET', 'POST'])
def upload_file_historicoc():
"""A continuacion... se hace el cargue del archivo base al storage, sin embargo
primero se deben de declarar la variable UPLOAD_FOLDER"""
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Historico'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
"""A continuacion... se hace una definicion de allowed_file para conservar
el nombre y la extension del archivo que se va a tomar y a pegar en la ruta definida"""
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file_historico'
# check if the post request has the file part
if 'file_historico' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file_historico']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = '20200924' + '.csv'
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_historicoc',
filename='20200924'))
return "Fichero movido exitosamente"
#######################################################################################################
############################################################################################################
@cargue_tempus_api.route('/redireccion_archivo_telefonos', methods=['GET', 'POST'])
def upload_file_telefonos():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Lineatel'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file' #las variables llamadas file sin los apostrofes deben coincidir con el resto de files en cuanto a desc y los que estan dentro de los apostrofes deben coincidir con la variable name del input del html
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_telefonos',
filename='20200924'))
return "Fichero movido exitosamente"
@cargue_tempus_api.route('/redireccion_archivo_telefonosc', methods=['GET', 'POST'])
def upload_file_telefonosc():
"""A continuacion... se hace el cargue del archivo base al storage, sin embargo
primero se deben de declarar la variable UPLOAD_FOLDER"""
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Lineatel'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
"""A continuacion... se hace una definicion de allowed_file para conservar
el nombre y la extension del archivo que se va a tomar y a pegar en la ruta definida"""
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file_telefonos'
# check if the post request has the file part
if 'file_telefonos' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file_telefonos']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = '20200924' + '.csv'
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_telefonosc',
filename='20200924'))
return "Fichero movido exitosamente"
#######################################################################################################
############################################################################################################
@cargue_tempus_api.route('/redireccion_archivo_prejul', methods=['GET', 'POST'])
def upload_file_prejul():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Prejul'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file' #las variables llamadas file sin los apostrofes deben coincidir con el resto de files en cuanto a desc y los que estan dentro de los apostrofes deben coincidir con la variable name del input del html
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_prejul',
filename='20200924'))
return "Fichero movido exitosamente"
@cargue_tempus_api.route('/redireccion_archivo_prejulc', methods=['GET', 'POST'])
def upload_file_prejulc():
"""A continuacion... se hace el cargue del archivo base al storage, sin embargo
primero se deben de declarar la variable UPLOAD_FOLDER"""
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Prejul'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
"""A continuacion... se hace una definicion de allowed_file para conservar
el nombre y la extension del archivo que se va a tomar y a pegar en la ruta definida"""
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file_prejul'
# check if the post request has the file part
if 'file_prejul' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file_prejul']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = '20200924' + '.csv'
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_prejulc',
filename='20200924'))
return "Fichero movido exitosamente"
#######################################################################################################
############################################################################################################
@cargue_tempus_api.route('/redireccion_archivo_prejurleo', methods=['GET', 'POST'])
def upload_file_prejurleo():
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Prejuridico'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file' #las variables llamadas file sin los apostrofes deben coincidir con el resto de files en cuanto a desc y los que estan dentro de los apostrofes deben coincidir con la variable name del input del html
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_prejurleo',
filename='20200924'))
return "Fichero movido exitosamente"
@cargue_tempus_api.route('/redireccion_archivo_prejuleoc', methods=['GET', 'POST'])
def upload_file_prejuleoc():
"""A continuacion... se hace el cargue del archivo base al storage, sin embargo
primero se deben de declarar la variable UPLOAD_FOLDER"""
UPLOAD_FOLDER = '/BI_Archivos/GOOGLE/Tempus/Prejuridico'
ruta_completa = fileserver_baseroute + UPLOAD_FOLDER
app.config['ruta_completa'] = ruta_completa
ALLOWED_EXTENSIONS = {'csv','txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
"""A continuacion... se hace una definicion de allowed_file para conservar
el nombre y la extension del archivo que se va a tomar y a pegar en la ruta definida"""
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
if request.method == 'POST':
file = 'file_prejuleo'
# check if the post request has the file part
if 'file_prejuleo' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file_prejuleo']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = '20200924' + '.csv'
file.save(os.path.join(app.config['ruta_completa'], filename)) #La ruta que se pega aca debe incluirse en un diccionario previamente: ver app.config['ruta_completa'] = ruta_completa
return redirect(url_for('cargue_tempus_api.upload_file_prejuleoc',
filename='20200924'))
return "Fichero movido exitosamente"
#######################################################################################################
|
from tkinter import *
root = Tk()
root.title('Simple Calculator')
e = Entry(root, width=35, borderwidth=5)
e.grid(row=0, column=0, columnspan=3, padx=10, pady=10)
def button_click(number):
currentNumber = e.get()
e.delete(0, END)
e.insert(0, str(currentNumber) + str(number))
def add(operator):
global firstNumber
global sign
firstNumber = int(e.get())
sign = operator
e.delete(0, END)
def subtract(operator):
global firstNumber
global sign
firstNumber = int(e.get())
sign = operator
e.delete(0, END)
def multiply(operator):
global firstNumber
global sign
firstNumber = int(e.get())
sign = operator
e.delete(0, END)
def divide(operator):
global firstNumber
global sign
firstNumber = int(e.get())
sign = operator
e.delete(0, END)
def equal():
global firstNumber
global sign
secondNumber = int(e.get())
e.delete(0, END)
if sign == '+':
e.insert(0, firstNumber + secondNumber)
elif sign == '-':
e.insert(0, firstNumber - secondNumber)
elif sign == '/':
e.insert(0, firstNumber / secondNumber)
elif sign == '*':
e.insert(0, firstNumber * secondNumber)
firstNumber = 0
sign = []
def clear():
e.delete(0, END)
# Define buttons
button_1 = Button(root, text='1', padx = 40, pady = 20, command=lambda : button_click(1))
button_2 = Button(root, text='2', padx = 40, pady = 20, command=lambda : button_click(2))
button_3 = Button(root, text='3', padx = 40, pady = 20, command=lambda : button_click(3))
button_4 = Button(root, text='4', padx = 40, pady = 20, command=lambda : button_click(4))
button_5 = Button(root, text='5', padx = 40, pady = 20, command=lambda : button_click(5))
button_6 = Button(root, text='6', padx = 40, pady = 20, command=lambda : button_click(6))
button_7 = Button(root, text='7', padx = 40, pady = 20, command=lambda : button_click(7))
button_8 = Button(root, text='8', padx = 40, pady = 20, command=lambda : button_click(8))
button_9 = Button(root, text='9', padx = 40, pady = 20, command=lambda : button_click(9))
button_0 = Button(root, text='0', padx = 40, pady = 20, command=lambda : button_click(0))
button_add = Button(root, text='+', padx = 39, pady = 20, command=lambda : add('+'))
button_equal = Button(root, text='=', padx = 86, pady = 20, command=equal)
button_clear = Button(root, text='Clear', padx = 77, pady = 20, command=clear)
button_sub = Button(root, text='-', padx = 39, pady = 20, command=lambda : subtract('-'))
button_mul = Button(root, text='*', padx = 39, pady = 20, command=lambda : multiply('*'))
button_div = Button(root, text='/', padx = 39, pady = 20, command=lambda : divide('/'))
# Put buttons on GUI
button_1.grid(row=3, column=0)
button_2.grid(row=3, column=1)
button_3.grid(row=3, column=2)
button_4.grid(row=2, column=0)
button_5.grid(row=2, column=1)
button_6.grid(row=2, column=2)
button_7.grid(row=1, column=0)
button_8.grid(row=1, column=1)
button_9.grid(row=1, column=2)
button_0.grid(row=4, column=0)
button_add.grid(row=5, column=0)
button_equal.grid(row=5, column=1, columnspan=2)
button_clear.grid(row=4, column=1, columnspan=2)
button_sub.grid(row=6, column=0)
button_mul.grid(row=6, column=1)
button_div.grid(row=6, column=2)
# running the main loop.
root.mainloop()
|
def main():
for i in range(1, 11):
for j in range(1, 11):
tulo = i * j
print("{:4d}".format(tulo), end="")
print()
main()
|
# -*- encoding:utf-8 -*-
# __author__=='Gan'
# Sort a linked list using insertion sort.
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def insertionSortList(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
if not (head and head.next): # None or one node, don't sort.
return head
sorted_node = head
while sorted_node.next and sorted_node.next.val > sorted_node.val: # If Linked List have been sorted,don't sort
sorted_node = sorted_node.next
if not sorted_node.next:
return head
helper = ListNode(0) # helper.next will be the head.
cur = head # the current node needs to be sorted.
pre = helper # the current node will insert between pre and pre.next.
while cur:
next_ = cur.next # next sort node.
while pre.next and pre.next.val <= cur.val:
pre = pre.next
cur.next = pre.next
pre.next = cur
pre = helper
cur = next_
return helper.next
def print_list(self, head):
head = self.insertionSortList(head)
while head:
print(head.val)
head = head.next
if __name__ == '__main__':
head = ListNode(1)
head.next = ListNode(4)
head.next.next = ListNode(2)
head.next.next.next = ListNode(3)
head.next.next.next.next = ListNode(5)
print(Solution().print_list(head))
|
# Exercise 2: Using the contact list from Exercise 1 write a python function that prints
# all people whose name begins with a specific character. Your function will take two
# parameters – the character and the dictionary
def dict_print(in_dict, letter):
for key in in_dict:
if key[0].lower() == letter.lower():
print(key)
contacts = {
'Michael': ['01-9448493', 'A1'],
'Siobhan': ['01-9448342', 'A2'],
'Merry': ['01-9448494', 'A1'],
'Philip': ['01-94484293', 'B1'],
}
dict_print(contacts, input("Enter a letter"))
|
# OS VALORES PODEM SER ESPECIFICADOS COMO UM INTERVALO COM INCIO, FIM E INCREMENTO, USANDO range
for x in range(10,0,-1):
print(x)
|
from django.test import TestCase
from .models import Feature
from django.contrib.auth.models import User
class TestFeatureModel(TestCase):
def test_status_defaults_to_open(self):
user = User.objects.create_user(username='test_user', password='password')
feature = Feature(featureName='Test Feature', author=user)
feature.save()
self.assertEqual(feature.featureName, 'Test Feature')
self.assertEqual(feature.status, 1)
def test_feature_as_a_string(self):
feature = Feature(featureName='Test Feature')
self.assertEqual('Test Feature', str(feature))
|
from collections import defaultdict
import re
months = defaultdict(int)
keys = {}
keyValues = defaultdict(int)
expenses = open('expenses/2020 Expenses.txt')
expenses = [i.rstrip('\n') for i in expenses.readlines()]
title = expenses[0].rstrip('\n')
month = None
schoolAmount = 0
for i in expenses[3:expenses[3:].index('')+3]:
yes = i.split()
keys[yes[0]] = yes[-1]
for i in expenses[1:]:
line = i.strip().lower()
if re.search('^[a-z]+$', line):
month = line
else:
lineBreak = [i.lower() for i in line.split()]
if len(lineBreak) > 0 and (lineBreak[0] == '*' or lineBreak[0] == '•'):
months[month] += float(lineBreak[1][1:])
keyValues[lineBreak[-1]] += float(lineBreak[1][1:])
if 'rent' in lineBreak or 'school' in lineBreak:
schoolAmount += float(lineBreak[1][1:])
total = sum(months.values())
print(f'\n{title}')
print('---------------------------\n')
for k, v in months.items():
print(f'{k.title():15}${v:.2f}')
print('\n---------------------------')
print('Categories\n')
for k, v in keyValues.items():
print(f'{keys[k].title():14} - ${round(v, 2)}')
print('\n---------------------------')
print('%-14s $%.2f' % ('School Expenses: ', schoolAmount))
print('%-14s $%.2f' % ('Total without School:', total - schoolAmount))
print('%-14s $%.2f' % ('Total: ', total))
|
print('hello')
class City:
def __init__(self, name):
self.name = name
self.numConnections = 0
self.childCities = []
def addconnection(self, City, distance):
self.childCities.append(City)
class Map:
def __init__(self):
self.initCities()
self.addConnections()
def initCities(self):
self.Oradea = City("Oradea")
self.Zerind = City("Zerind")
self.Arad = City("Arad")
self.Timisoara = City("Timisoara")
self.Lugoj = City("Lugoj")
self.Mehadia = City("Mehadia")
self.Drobeta = City("Drobeta")
self.Craiova = City("Craiova")
self.Rimnicu = City("Rimnicu")
self.Sibiu = City("Sibiu")
self.Pitesi = City("Pitesi")
self.Fagaras = City("Fagaras")
self.Bucharest = City("Bucharest")
self.Giurgiu = City("Giurgiu")
self.Hirsova = City("Hirsova")
self.Eforie = City("Eforie")
self.Urziceni = City("Urziceni")
self.Vaslui = City("Vaslui")
self.Iasi = City("Iasi")
self.Neamt = City("Neamt")
def addConnections(self):
self.Oradea.addconnection(self.Zerind, 71)
self.Oradea.addconnection(self.Sibiu, 151)
self.Zerind.addconnection(self.Oradea, 71)
self.Zerind.addconnection(self.Arad, 75)
self.Arad.addconnection(self.Sibiu, 140)
self.Arad.addconnection(self.Zerind, 75)
self.Arad.addconnection(self.Timisoara, 118)
self.Timisoara.addconnection(self.Arad, 118)
self.Timisoara.addconnection(self.Lugoj, 111)
self.Lugoj.addconnection(self.Timisoara, 111)
self.Lugoj.addconnection(self.Mehadia, 70)
self.Mehadia.addconnection(self.Drobeta, 75)
self.Mehadia.addconnection(self.Lugoj, 70)
self.Drobeta.addconnection(self.Mehadia, 75)
self.Drobeta.addconnection(self.Craiova, 120)
self.Craiova.addconnection(self.Drobeta, 120)
self.Craiova.addconnection(self.Rimnicu, 146)
self.Craiova.addconnection(self.Pitesi, 120)
self.Rimnicu.addconnection(self.Craiova, 146)
self.Rimnicu.addconnection(self.Sibiu, 80)
self.Rimnicu.addconnection(self.Pitesi, 97)
self.Sibiu.addconnection(self.Arad, 140)
self.Sibiu.addconnection(self.Oradea, 151)
self.Sibiu.addconnection(self.Fagaras, 99)
self.Sibiu.addconnection(self.Rimnicu, 80)
self.Pitesi.addconnection(self.Craiova, 120)
self.Pitesi.addconnection(self.Rimnicu, 97)
self.Pitesi.addconnection(self.Bucharest, 101)
self.Fagaras.addconnection(self.Sibiu, 99)
self.Fagaras.addconnection(self.Bucharest, 211)
self.Bucharest.addconnection(self.Fagaras, 211)
self.Bucharest.addconnection(self.Pitesi, 101)
self.Bucharest.addconnection(self.Giurgiu, 90)
self.Bucharest.addconnection(self.Urziceni, 85)
self.Giurgiu.addconnection(self.Bucharest, 90)
self.Urziceni.addconnection(self.Hirsova, 98)
self.Urziceni.addconnection(self.Bucharest, 85)
self.Urziceni.addconnection(self.Vaslui, 142)
self.Hirsova.addconnection(self.Eforie, 86)
self.Hirsova.addconnection(self.Urziceni, 98)
self.Eforie.addconnection(self.Hirsova, 86)
self.Vaslui.addconnection(self.Urziceni, 142)
self.Vaslui.addconnection(self.Iasi, 92)
self.Iasi.addconnection(self.Vaslui, 92)
self.Iasi.addconnection(self.Neamt, 87)
self.Neamt.addconnection(self.Iasi, 87)
def DFS(self) -> list:
path = []
self._DFS(self.Arad, path)
return path
#DFS recursive helper
def _DFS(self, currentCity:City, path:list):
path.append(currentCity)
if(currentCity == self.Bucharest):
#stops the recurison
return True
for child in currentCity.childCities:
#Stops path circling
if(child not in path):
found = self._DFS(child, path)
if(found):
return True
#return False because bucharest hasn't been found
return False
def BFS(self):
#toVisit is being used as a queue
toVisit = []
toVisit.append(self.Arad)
path = self._BFS(toVisit)
return path
def _BFS(self,toVisit:list):
path = []
while(len(toVisit)>0):
city = toVisit.pop()
path.append(city)
if(city == self.Bucharest):
return path
for child in city.childCities:
if((child not in toVisit) and (child not in path)):
toVisit.insert(0, child)
return False
def IDS(self):
path = []
limit = 0
found = False
while(not found):
#subpath used to get tracking of overall path when increment depthlimit
subpath = []
found, subpath = self.DLS(self.Arad, limit, subpath)
path.extend(subpath)
limit += 1
return path
def DLS(self, currentCity, limit, subpath):
subpath.append(currentCity)
if(currentCity == self.Bucharest):
return True, subpath
if(limit == 0):
return False, subpath
for child in currentCity.childCities:
if(child not in subpath):
found, subpath = self.DLS(child, limit-1, subpath)
if(found):
return True, subpath
return False, subpath
#just used to print city names rather than object mem addresses
def print(self, path:list):
for city in path:
print(city.name)
map = Map()
print("----------DFS PATH------------")
map.print(map.DFS())
print("----------BFS PATH------------")
map.print(map.BFS())
print("----------IDS PATH------------")
map.print(map.IDS())
print("------------------------------")
|
from django.urls import path
from . import views
urlpatterns = [
# path('insert_data',views.insert_data,name='insert_data'),
# path('register',views.register,name='register'),
# path('sendSimpleEmail',views.sendSimpleEmail,name='sendSimpleEmail'),
# path('home',views.login,name='login'),
path('main',views.main,name='main'),
path('home',views.home,name='home'),
]
|
class RxDrug:
def __init__(self, name, rx_ID):
self.name = name
self.rx_ID = rx_ID
self.interaction_list = []
def add_interaction(self, other_drug):
if other_drug == "*":
self.interaction_list = other_drug
else:
if other_drug not in self.interaction_list:
self.interaction_list.append(other_drug)
def check_interaction(self, other_drugs):
interactions = []
if self.interaction_list == "*":
return other_drugs
else:
for drug in other_drugs:
if drug in self.interaction_list:
interactions.append(drug)
return interactions
def set_description(self, description):
self.description = description
def __str__(self):
# self.self = self
return 'This prescription is for {n}'.format(n=self.name)
def drugs_list():
'''
Name: drugs_list
Parameter: none
Return: dict
'''
drugs = {}
# open drugs file
with open('rxcui_drugs.txt') as drug_file:
lines = drug_file.readlines()
for line in lines:
# split elements in each row
params = line.replace('\n','').split('|')
# instantiate RxDrug class
rx = RxDrug(params[0], params[1])
rx.set_description(params[2])
for other_drug in params[3].split(','):
rx.add_interaction(other_drug)
# add each drug to the dictionary
drugs[rx.name] = rx
return drugs
|
import logging
from page_object.common_fun import Common
from page_object.desired_caps import appium_desired
from selenium.webdriver.common.by import By
class LoginView(Common):
username_type = (By.ID, 'com.tal.kaoyan:id/login_email_edittext')
password_type = (By.ID, 'com.tal.kaoyan:id/login_password_edittext')
loginBtn = (By.ID, 'com.tal.kaoyan:id/login_login_btn')
def login_action(self,username,password):
self.check_cancelBtn()
self.check_skipBtn()
logging.info('============login_action==============')
logging.info('username is:%s' % username)
self.driver.find_element(*self.username_type).send_keys(username)
logging.info('password is:%s' % password)
self.driver.find_element(*self.password_type).send_keys(password)
logging.info('click loginBtn')
self.driver.find_element(*self.loginBtn).click()
logging.info('login finished!')
if __name__ == '__main__':
driver = appium_desired()
l = LoginView(driver)
l.login_action('自学网2018','zxw2018')
|
#!/usr/bin/env python
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
#
# # Authors informations
#
# @author: HUC Stéphane
# @email: <devs@stephane-huc.net>
# @url: http://stephane-huc.net
#
# @license : BSD "Simplified" 2 clauses
#
''' Worker '''
import time
class Worker():
'''To manage worker'''
def __init__(self, queue):
print 'The worker start!'
self.queue = queue
def go(self):
'''Launch Worker'''
print "The worker launch some work: "
# counting from 0 to 9
for i in range(10):
proportion = (float(i+1))/10
self.queue.put((proportion, 'working...'))
time.sleep(0.5)
def ended(self):
'''Worker ended!'''
self.queue.put((1.0, 'finished'))
print 'The worker finish!'
|
# create decision tree
import math
import pandas as pd
class DecisionTree:
# assigning data set to every node
def __init__(self, data_set):
self.data_set = data_set
self.visited_features = []
self.feature = [i for i in data_set.keys()]
self.target = self.feature[-1]
self.feature = self.feature[:-1]
self.child = {}
self.entropy = self.data_entropy(data_set)
self.split_feature = ""
self.determined_class = ""
# differentiate visited and non-visited features
def visit(self):
self.feature = list(filter(lambda x: x not in self.visited_features, self.feature))
if self.entropy == 0:
self.determined_class = str(self.data_set[self.target].unique()[0])
# calculate average feature entropy
def avg_feature_entropy(self, feature):
feature_class = self.data_set[feature].unique()
total_count = self.data_set.shape[0]
feature_entropy = 0.0
for i in feature_class:
new_data = self.data_set[self.data_set[feature] == i]
total = new_data.shape[0]
feature_class_entropy = self.data_entropy(new_data)
feature_entropy += (total / total_count) * feature_class_entropy
return feature_entropy
# calculate average data set entropy
def data_entropy(self, data_set):
values_count = data_set[self.target].value_counts()
yes_value = 0 if "Yes" not in values_count else values_count["Yes"]
no_value = 0 if "No" not in values_count else values_count["No"]
total_count = data_set.shape[0]
data_entropy = 0.0
if yes_value != 0:
frac_y = yes_value / total_count
data_entropy += -frac_y * math.log2(frac_y)
if no_value != 0:
frac_n = no_value / total_count
data_entropy += -frac_n * math.log2(frac_n)
return data_entropy
# calculate info gain of a feature
def info_gain(self, feature):
avg_feature_entropy = self.avg_feature_entropy(feature)
return self.entropy - avg_feature_entropy
# splitting according to maximum information gain
def node_split(self):
if len(self.feature) < 1 or self.entropy == 0:
return {}
max_info_gain = 0
split_feature = self.feature[0]
for i in self.feature:
info_gain = self.info_gain(i)
if info_gain > max_info_gain:
split_feature = i
max_info_gain = info_gain
self.split_feature = split_feature
attribute_list = self.data_set[split_feature].unique()
child = {}
for i in attribute_list:
data = self.data_set[self.data_set[split_feature] == i]
newnode = DecisionTree(data)
newnode.visited_features = self.visited_features[:]
newnode.visited_features.append(split_feature)
newnode.visit()
child[i] = newnode
return child
def test_decision_tree(test, root):
if root.determined_class != "":
return root.determined_class
else:
split_feature = root.split_feature
feature_class = root.data_set[split_feature].unique()
for i in test:
if i in feature_class:
return test_decision_tree(test, root.child[i])
return
# reading input file
df = pd.read_csv("train_data.csv", index_col="day")
# printing input file
features = list(df.keys())
target = features[-1]
features.remove(target)
# creating root node
root = DecisionTree(df)
# create a queue with root node for level order traversal
queue = [root]
# level order traversal
while len(queue) > 0:
node = queue[0]
queue.pop(0)
if node.entropy == 0:
continue
node.child = node.node_split()
for i in node.child.keys():
queue.append(node.child[i])
# testing of decision tree created
# read test file
test = pd.read_csv("test_data.csv")
# read test output file
test_output = pd.read_csv("test_output.csv")
# set initial values
correct_output = 0
total_test = test.shape[0]
# to collect predicted values
predicted_output = []
# test the decision tree
for t in test.values:
predicted_output.append(test_decision_tree(t, root))
# determine error in prediction
k = 0
for i in test_output.values:
if i == predicted_output[k]:
correct_output += 1
k +=1
print("ERROR IN PREDICTION: ", correct_output / total_test * 100, "%")
|
import json
import os
import sys
import re
from http.server import BaseHTTPRequestHandler, HTTPServer
from os import curdir, sep
from urllib.parse import unquote
import active_passive_files
import search
from index import do_index, load_from_source
from spider import run_spider
hostName = "localhost"
hostPort = 9000
PORT_NUMBER = 8080
class myHandler(BaseHTTPRequestHandler):
last_query = ""
def is_ascii(self, s):
return all(ord(c) < 128 for c in s)
def cleaning2(self, text):
text = re.sub(r'\b(?:(?:https?|ftp)://)?\w[\w-]*(?:\.[\w-]+)+\S*', ' ', text.lower())
words = re.findall(r'[a-z]+', text)
return ' '.join(words)
# search_array = []
# have_parameters = False
# Handler for the POST requests
def do_POST(self):
if self.path == "/send":
return
# Handler for the GET requests
def do_GET(self):
docs_path = os.path.abspath("./docs")
data_path = os.path.abspath("./data")
# login page
if self.path.startswith("/info"):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
f = open(curdir + sep + "/includes/info.html")
self.wfile.write(f.read().encode("utf-8"))
self.wfile.write('</div>'.encode("utf-8"))
# login page
if self.path.startswith("/login"):
password = self.path[16:]
if password == "1234":
self.path = "/admin"
else:
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
f = open(curdir + sep + "/includes/login.html")
self.wfile.write(f.read().encode("utf-8"))
f.close()
if self.path[16:] != "":
self.wfile.write('<h4 style="color: red"> Wrong password!</h4>'.encode("utf-8"))
self.wfile.write('</div>'.encode("utf-8"))
return
# admin panel
if self.path.startswith("/admin"):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
f = open(curdir + sep + "/includes/admin_panel.html")
self.wfile.write(f.read().encode("utf-8"))
f.close()
f = open(data_path + "/active_passive_list.json", 'r')
list_of_files = json.load(f)
acitveCount='<span style="color:darkgreen;">Active : ' + str(sum(list_of_files[f] for f in list_of_files))+'</span>'
inacitveCount='<span style="color:darkred;">Inactive : ' + str(sum(not list_of_files[f] for f in list_of_files))+'</span>'
self.wfile.write(('<p>Total '+str(len(list_of_files))+' files. '+acitveCount+' '+inacitveCount+'</p>').encode("utf-8"))
self.wfile.write('<table class="table table-sm"><thead><tr><th>Status</th><th>Document Name</th><th class="text-right">Actions</th></tr></thead><tbody>'.encode("utf-8"))
for file in list_of_files:
button=''
rowClass=''
status=''
if list_of_files[file]:
button = '<input type="hidden" name="execute" value="deactivate+' + file + '"/>' + '<input class="btn btn-sm btn-danger" type="submit" value="Deactivate" />'
status="Active"
else:
button ='<input type="hidden" name="execute" value="activate+' + file + '"/>' + '<input class="btn btn-sm btn-success" type="submit" value="Activate" />'
rowClass="table-danger"
status="Inactive"
self.wfile.write(('<tr class="'+rowClass+'"><td>'+status+'</td><td>'+file+'</td><td class="text-right"><form action="/command">'+button+'</form></td>'+'</tr>').encode("utf-8"))
self.wfile.write('</tbody></table>'.encode("utf-8"))
self.wfile.write('</div>'.encode("utf-8"))
return
# commands
if self.path.startswith("/command"):
command = self.path[17:]
if command.startswith('deactivate') or command.startswith('activate'):
if command.startswith('deactivate'):
command = command[13:]
command = command.replace("+", " ")
active_passive_files.ActivePassive.deactivate_file(command)
if command.startswith('activate'):
command = command[11:]
command = command.replace("+", " ")
active_passive_files.ActivePassive.activate_file(command)
self.send_response(301)
self.send_header('Location','http://localhost:9000/admin')
self.end_headers()
return
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
# load template
f = open(curdir + sep + "/includes/all_data.html")
self.wfile.write(f.read().encode("utf-8"))
back = '<a href="/admin" class="btn btn-link">Back to Admin panel</a>'
if command == 'forceindex':
do_index()
self.wfile.write(('<div class="jumbotron"><h1 class="display-3">Force index done</h1><a href="/admin" class="btn btn-primary btn-lg">Back to Admin panel</a>').encode("utf-8"))
self.wfile.write('</div>'.encode("utf-8"))
return
if command == 'load':
self.wfile.write(back.encode("utf-8"))
# load files from source
source_path = os.path.abspath("./source")
# Check what files in source
self.wfile.write('<h2>All files found in source folder:</h2>'.encode("utf-8"))
self.wfile.write('<table class="table table-sm"><tbody>'.encode("utf-8"))
for doc_file in os.listdir(source_path):
if doc_file.startswith("."):
continue
self.wfile.write(('<tr><td>' + doc_file + '</td></tr>').encode("utf-8"))
self.wfile.write('</tbody></table>'.encode("utf-8"))
load_from_source()
self.wfile.write('<h2 style=" color:green">Loaded and reindexed!</h2>'.encode("utf-8"))
if command == 'crawl':
run_spider()
crawl_path = os.path.abspath("./source")
self.wfile.write('<h2>All files fetched by Crawler</h2>'.encode("utf-8"))
self.wfile.write('<table class="table table-sm"><tbody>'.encode("utf-8"))
for doc_file in os.listdir(crawl_path):
if doc_file.startswith("."): continue
self.wfile.write(('<tr><td>' + doc_file + '</td></tr>').encode("utf-8"))
self.wfile.write('</tbody></table>'.encode("utf-8"))
self.wfile.write(back.encode("utf-8"))
self.wfile.write('<div style="clear:both"></div>'.encode("utf-8"))
self.wfile.write('</div>'.encode("utf-8"))
if command == 'crawl':
argv=sys.argv
argv.append('no_index')
os.execv(sys.executable, ['python3'] + argv)
return
if self.path.startswith("/send"):
search_params = self.path
search_params = search_params.replace("%28", "(")
search_params = search_params.replace("%29", ")")
search_params = search_params[13:]
search_params = search_params.split("+")
query = ""
for val in search_params:
if val != '':
query = query + val + " "
query = query[:-1]
# response implementation
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('query', query)
self.end_headers()
query_results, query_time = search.search(query)
# render page template
f = open(curdir + sep + "/includes/index.html")
self.wfile.write(f.read().encode("utf-8"))
# print query for user
var = '<div class="data"><span>Searched for: ' + unquote(query) + '</span><br>'
self.wfile.write(var.encode("utf-8"))
var = '<span>Search time: ' + str(query_time) + '</span><br>'
self.wfile.write(var.encode("utf-8"))
var = '<span>Results: ' + str(query_results.__len__()) + '</span></div>'
self.wfile.write(var.encode("utf-8"))
# NO results found
if not query_results:
# render data
to_wright = '<h1>No Results for searched "' + unquote(query) + '"</h1>'
self.wfile.write(to_wright.encode("utf-8"))
self.wfile.write('</div>'.encode("utf-8"))
return
first_line = ""
second_line = ""
# check for inactive files
filename = "active_passive_list.json"
try:
f = open(curdir + "/data/" + filename, 'r')
if f:
list_of_files = json.load(f)
for doc_file in query_results:
if list_of_files[os.path.basename(doc_file[1])]:
continue
else:
query_results.remove(doc_file)
f.close()
except IOError:
print("Open file error")
# results found
# try to correct indexed values
for i in reversed(query_results):
file_name = i[1]
try:
arr = []
print(docs_path + file_name)
with open(os.path.join(docs_path, file_name), 'rb') as f:
contents = f.read()
decoded_string = contents.decode("utf-8", "replace")
while arr.__len__() < 20:
arr += self.cleaning2(decoded_string).split(" ")
count = 0
first_line = ""
second_line = ""
for word in arr:
if count <= 9:
first_line += word + " "
if 9 < count < 18:
second_line += word + " "
count += 1
first_line = '<div class="st">' + first_line + "\n" + second_line + '</div>' + '</div>'
f.close()
except IOError:
print("Open file error")
var = '"' + file_name + '"'
to_wright = '<div class ="rendered_links_and_text">' + '<a class="rendered_links" href=' + var + ">" + file_name[
:-4] + "</a>"
self.wfile.write(to_wright.encode("utf-8"))
self.wfile.write(first_line.encode("utf-8"))
f.close()
self.wfile.write('</div>'.encode("utf-8"))
return
if self.path == "/":
self.path = "/includes/index.html"
pass
try:
# Check the file extension required and
# set the right mime type
sendReply = False
finalFile = False
icon = False
if self.path.endswith(".txt"):
mimetype = 'text/html'
sendReply = True
finalFile = True
if self.path.endswith(".html"):
mimetype = 'text/html'
sendReply = True
if self.path.endswith(".jpg"):
mimetype = 'image/jpg'
sendReply = True
if self.path.endswith(".gif"):
mimetype = 'image/gif'
sendReply = True
if self.path.endswith(".js"):
mimetype = 'application/javascript'
sendReply = True
if self.path.endswith(".css"):
mimetype = 'text/css'
self.path = "/includes" + self.path
sendReply = True
if self.path.endswith(".ico"):
mimetype = 'image/x-icon'
self.path = "/includes" + self.path
icon = True
sendReply = True
if sendReply:
# Open the static file requested and send it
self.send_response(200)
self.send_header('Content-type', mimetype)
self.end_headers()
if icon:
f = open(curdir + sep + self.path, 'rb')
self.wfile.write(f.read())
return
elif finalFile:
self.wfile.write('<div class="container"><div class="row"><div class="col-12">'.encode("utf-8"))
file_path = self.path
file_path = file_path.replace("%20", " ")
f = open(docs_path + sep + file_path, 'rb')
# print to page text
with open(docs_path + "/" + file_path, 'rb') as f:
contents = f.read()
decoded_string = contents.decode("utf-8", "replace")
cleaned_string = ""
for char in decoded_string:
if self.is_ascii(char):
cleaned_string = cleaned_string + char
var = self.headers
query = ""
for val in var:
if val == "Referer":
query = var[val]
saved_query_for_back_to_search = query[34:]
saved_query_for_back_to_search = saved_query_for_back_to_search.replace("%2B", " ")
saved_query_for_back_to_search = saved_query_for_back_to_search.replace("%28", "(")
saved_query_for_back_to_search = saved_query_for_back_to_search.replace("%29", ")")
saved_query_for_back_to_search = saved_query_for_back_to_search.replace("%22", '"')
is_wildcard = False
if "*" in query:
is_wildcard = True
query = query[34:]
query = query.replace("*", '')
search_params = query.replace("%28", "(")
search_params = search_params.replace("%29", ")")
search_params = search_params.split("+")
query = ""
for val in search_params:
if val != '':
query = query + val + " "
query = query[:-1]
query = query.replace("(", "")
query = query.replace(")", "")
query = query.split(" ")
set_query = set(query)
query_literals = set_query - {"AND", "OR", "NOT", ")", "(", ""}
words = cleaned_string.split(" ")
# load template
f = open(curdir + sep + "/includes/all_data.html")
self.wfile.write(f.read().encode("utf-8"))
# file name
file_path = file_path[1:]
var = '<h1>' + file_path[:-4] + '</h1>'
self.wfile.write(var.encode("utf-8"))
self.wfile.write('<p>'.encode("utf-8"))
cleaned_query_literals = {''}
cleaned_query_literals.pop()
for word in query_literals:
word = self.cleaning2(word)
cleaned_query_literals.add(word)
# file text
for word in words:
if self.cleaning2(word) in cleaned_query_literals \
or (is_wildcard and word.startswith(tuple(cleaned_query_literals))):
big_word = '<b><u style="background-color:yellow;">' + word + '</u></b>' + " "
self.wfile.write(big_word.encode("utf-8"))
else:
word = word + " "
self.wfile.write(word.encode("utf-8"))
self.wfile.write('</p>'.encode("utf-8"))
var = '</div></div><div class="row align-items-end"><div class="col-2"><a class="btn btn-primary" href="http://localhost:9000/send?search=' + saved_query_for_back_to_search + '">Back to search<a></div></div>'
self.wfile.write(var.encode("utf-8"))
self.wfile.write('</div>'.encode("utf-8"))
f.close()
else:
f = open(curdir + sep + self.path)
self.wfile.write(f.read().encode("utf-8"))
self.wfile.write('</div>'.encode("utf-8"))
return
except IOError:
self.send_error(404, 'File Not Found: %s' % self.path)
try:
# Create a web server and define the handler to manage the
server = HTTPServer((hostName, hostPort), myHandler)
# perform index
if len(sys.argv)<=1:
do_index()
# Wait forever for incoming http requests
print('Started http server on port ', hostPort)
server.serve_forever()
except KeyboardInterrupt:
print('^C received, shutting down the web server')
server.socket.close()
|
'''
Flask-Notepad
this application store your memo using Flask
'''
# -*- coding: utf-8 -*-
from os import path
import click
from flask import redirect, url_for
from flask_login import LoginManager
from app import create_app
from model.tables import User, DB
from model.login_user_model import LoginUser
APP = create_app(path.dirname(__file__) + '/config/config.cfg')
LOGIN_MANAGER = LoginManager()
LOGIN_MANAGER.init_app(APP)
@LOGIN_MANAGER.user_loader
def user_loader(user_id):
'''
flask-login
user loader
'''
user = LoginUser.query.get(int(user_id))
return user
@LOGIN_MANAGER.unauthorized_handler
def unauthorized():
''' No sessions
'''
return redirect(url_for('login_controller.top'))
@APP.cli.command()
def initdb():
''' flask shell
create tables.
'''
click.echo('start init db')
click.echo('drop all tables')
DB.drop_all()
click.echo('create all tables')
DB.create_all()
click.echo('finish')
@APP.cli.command()
@click.argument('email')
@click.argument('password')
def useradd(email, password):
''' flask shell
insert user table.
'''
click.echo('email ' + email + ', password ' + password)
user = User()
user.email = email
user.hash_password(password)
DB.session.add(user)
DB.session.commit()
click.echo('finish')
|
from django.conf.urls import url
from django.urls import path, include
from django.views.generic import TemplateView
from post_app.views import *
from post_app import views
urlpatterns = [
path('', views.TextList.as_view()),
path('create/', views.create_post, name="create_post"),
path('texts/', views.texts, name='texts'),
path('links/', views.links, name='links'),
path('files/', views.files, name='files'),
path('text/', TextView.as_view(), name='add_post_text'),
path('file/', FileView.as_view(), name='add_post_file'),
path('link/', LinkView.as_view(), name='add_post_link'),
url('(texts|files|links)/(?P<post_id>[0-9]+)', views.post, name='post'),
url("(?P<post_id>[0-9]+)/(?P<type>[a-zA-Z]+)/likes", views.like, name='add_like'),
url("(?P<post_id>[0-9]+)/(?P<type>[a-zA-Z]+)/comments", CommentView.as_view(), name='add_comment'),
path('my_posts/', views.my_posts, name='my_posts'),
]
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import os
libs = {"mathplotlib", "pandas", "openpyxl"}
try:
for lib in libs:
os.system("pip install" + lib)
print("Successful")
except:
print("Failed pip install")
|
#!/usr/bin/env python
# Copyright (c) 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Ensure that ninja includes the .pdb as an output file from linking.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['ninja'])
CHDIR = 'linker-flags'
test.run_gyp('pdb-output.gyp', chdir=CHDIR)
# Note, building the pdbs rather than ALL or gyp target.
test.build('pdb-output.gyp', 'output_exe.pdb', chdir=CHDIR)
test.build('pdb-output.gyp', 'output_dll.pdb', chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
if not FindFile('output_exe.pdb'):
test.fail_test()
if not FindFile('output_dll.pdb'):
test.fail_test()
test.pass_test()
|
import json
import pathlib
from typing import Any, Callable, List, Optional, Tuple
from urllib.parse import urlparse
from PIL import Image
from .utils import download_and_extract_archive, verify_str_arg
from .vision import VisionDataset
class CLEVRClassification(VisionDataset):
"""`CLEVR <https://cs.stanford.edu/people/jcjohns/clevr/>`_ classification dataset.
The number of objects in a scene are used as label.
Args:
root (string): Root directory of dataset where directory ``root/clevr`` exists or will be saved to if download is
set to True.
split (string, optional): The dataset split, supports ``"train"`` (default), ``"val"``, or ``"test"``.
transform (callable, optional): A function/transform that takes in an PIL image and returns a transformed
version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in them target and transforms it.
download (bool, optional): If true, downloads the dataset from the internet and puts it in root directory. If
dataset is already downloaded, it is not downloaded again.
"""
_URL = "https://dl.fbaipublicfiles.com/clevr/CLEVR_v1.0.zip"
_MD5 = "b11922020e72d0cd9154779b2d3d07d2"
def __init__(
self,
root: str,
split: str = "train",
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
download: bool = False,
) -> None:
self._split = verify_str_arg(split, "split", ("train", "val", "test"))
super().__init__(root, transform=transform, target_transform=target_transform)
self._base_folder = pathlib.Path(self.root) / "clevr"
self._data_folder = self._base_folder / pathlib.Path(urlparse(self._URL).path).stem
if download:
self._download()
if not self._check_exists():
raise RuntimeError("Dataset not found or corrupted. You can use download=True to download it")
self._image_files = sorted(self._data_folder.joinpath("images", self._split).glob("*"))
self._labels: List[Optional[int]]
if self._split != "test":
with open(self._data_folder / "scenes" / f"CLEVR_{self._split}_scenes.json") as file:
content = json.load(file)
num_objects = {scene["image_filename"]: len(scene["objects"]) for scene in content["scenes"]}
self._labels = [num_objects[image_file.name] for image_file in self._image_files]
else:
self._labels = [None] * len(self._image_files)
def __len__(self) -> int:
return len(self._image_files)
def __getitem__(self, idx: int) -> Tuple[Any, Any]:
image_file = self._image_files[idx]
label = self._labels[idx]
image = Image.open(image_file).convert("RGB")
if self.transform:
image = self.transform(image)
if self.target_transform:
label = self.target_transform(label)
return image, label
def _check_exists(self) -> bool:
return self._data_folder.exists() and self._data_folder.is_dir()
def _download(self) -> None:
if self._check_exists():
return
download_and_extract_archive(self._URL, str(self._base_folder), md5=self._MD5)
def extra_repr(self) -> str:
return f"split={self._split}"
|
from pipeline.compilers import CompilerBase
from django.core.files.base import ContentFile
from django.utils.encoding import smart_str
import scss
import os
def add_to_scss_path(path):
load_paths = scss.LOAD_PATHS.split(',') # split it up so we can a path check.
if path not in load_paths:
load_paths.append(path)
scss.LOAD_PATHS = ','.join(load_paths)
class CompassCompiler(CompilerBase):
output_extension = 'css'
def match_file(self, filename):
return filename.endswith(('.scss', '.sass'))
def compile_file(self, content, path, force=False, outdated=False):
add_to_scss_path(os.path.dirname(path)) # add the current path of the parsed file to enable the local @import
if force or outdated:
self.save_file(path, scss.Scss().compile(None, content))
def save_file(self, path, content):
return open(path, 'w').write(smart_str(content))
# setup scss load path
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
scss_path = os.path.join(root, 'pipeline_compass', 'compass')
add_to_scss_path(scss_path)
|
import sys
r = sys.stdin.readline
N = int(r())
arr = list(map(int, r().split()))
cnt = 0
for i in range(N) :
if arr[i] == 1 :
continue
pos = 0
num = arr[i]
j = 1
while j * j <= num :
if num % j == 0 :
if j > 1 :
pos = 1
break
j += 1
if pos == 0 :
cnt += 1
print(cnt)
|
# import sys
# import time
# sys.path.append('../vehicle')
# import odrive_manager
#
# #odrv0 = odrive_manager.OdriveManager(path=path, serial_number=serial_number).find_odrive()
# odrv0 = odrive_manager.OdriveManager(path='/dev/ttyACM0', serial_number='336B31643536').find_odrive()
#
# while(True):
# print(odrv0.get_adc_voltage(3))
# time.sleep(0.2)
#!/usr/bin/env python3
"""
Example usage of the ODrive python library to monitor and control ODrive devices
"""
from __future__ import print_function
import odrive
from odrive.enums import *
import time
import math
def steinhart_temperature_C(r, Ro=10000.0, To=25.0, beta=3900.0):
steinhart = math.log(r / Ro) / beta # log(R/Ro) / beta
steinhart += 1.0 / (To + 273.15) # log(R/Ro) / beta + 1/To
steinhart = (1.0 / steinhart) - 273.15 # Invert, convert to C
return steinhart
# Find a connected ODrive (this will block until you connect one)
print("finding an odrive...")
odrv0 = odrive.find_any()
# To read a value, simply read the property
print("Bus voltage is " + str(odrv0.vbus_voltage) + "V")
while(True):
value = odrv0.get_adc_voltage(3)
R = 10000 / (3.3/value)
print('Voltage: {}, Thermistor resistance: {} ohms, Temperature {}'.format(value, R, steinhart_temperature_C(R)))
time.sleep(0.2)
|
#!/usr/bin/env python
'''
Encodes the parse tree for a functional expression and provides relevant
utilities to build and evaluate the parse tree from an input string.
'''
__author__ = 'Aditya Viswanathan'
__email__ = 'aditya@adityaviswanathan.com'
from parse_tree_node import ParseTreeNode, ParseTreeNodeType
class ParseTree(object):
TOKEN_ARG_START = '('
TOKEN_ARG_END = ')'
TOKEN_ARG_DELIMITER = ','
def __init__(self, input_str, traversers=[]):
self.input = input_str
self.traversers = traversers
self.root = None
@staticmethod
def evaluate_trees(trees):
vals = []
for tree in trees:
vals.append(tree.evaluate_tree(is_list=False))
return vals
def evaluate_tree(self, is_list=False):
'''
Recursively evaluates the ParseTree.
'''
if self.root is None:
self.build_tree()
self.root.set_is_list(is_list)
if is_list:
return self.root.evaluate()
return self.root.evaluate()[0]
def build_tree(self):
'''
Builds a ParseTree over self.input by doing a linear scan of the input
string and mutating the ParseTree in-place.
'''
stutter = 0
self.root = curr = None
for index, c in enumerate(self.input):
if c == ParseTree.TOKEN_ARG_START:
func = ParseTreeNode(self.input[stutter:index].strip(),
ParseTreeNodeType.FUNCTION,
self.traversers,
curr)
if self.root is None:
self.root = func
else:
curr.children.append(func)
curr = func
stutter = index + 1
elif c == ParseTree.TOKEN_ARG_END:
if stutter != index and self.input[stutter:index].strip():
# Arg before delimiter must have been CONSTANT.
arg = ParseTreeNode(self.input[stutter:index].strip(),
ParseTreeNodeType.CONSTANT,
self.traversers,
curr)
curr.children.append(arg)
curr = curr.parent
stutter = index + 1
elif c == ParseTree.TOKEN_ARG_DELIMITER:
if stutter != index:
# Arg before delimiter must have been CONSTANT.
arg = ParseTreeNode(self.input[stutter:index].strip(),
ParseTreeNodeType.CONSTANT,
self.traversers,
curr)
curr.children.append(arg)
stutter = index + 1
|
#!/usr/bin/python3
from sys import argv, exit
if len(argv) == 2:
number = argv[1]
if (number.isdigit()):
number = int(number)
if number < 4:
print("N must be at least 4")
exit(1)
else:
print("N must be a number")
exit(1)
'''
1. Colocamos reina en 0,0
2. Colocamos siguiente reina en 1,0 (columna + 1)
3. Verificamos si esta bajo ataque en diagolaes, filas o columnas
4. Si es posible, confirmamos posicion y colocamos tercera reina y seguimos con el prodecimiento hasta que todas las reinas queden en espacios habilitados. Guardar lista con
soluciones y proceder a mover primera reina en caso que sea posible.
5. Si no es posible (esta bajo ataque): Hay lugar en el tablero para mover la reina?
5.a Si hay -> Movemos segunda reina y vuelvo a verificar
5.b No hay -> La anterior reina se puede mover?
6.a No se puede mover -> No hay mas soluciones
6.b Se puede mover -> colocamos reina en nueva posicion y volvemos a realizar la verificaciones
'''
else:
print("{} {}".format(argv[0],argv[1]))
print("Usage: nqueens N
|
#!/usr/bin/env python3
import os
import pandas as pd
import numpy as np
from sklearn.feature_extraction import text
from sklearn.metrics.pairwise import cosine_similarity
transcripts = pd.read_csv("transcripts.csv")
transcripts['title']=transcripts['url'].map(lambda x:x.split("/")[-1])
def analyzeScripts():
scripts = getScripts()
vectorMatrix = generateTFIDMatrix(scripts)
unigramMatrix = generateUnigramMatrix(vectorMatrix)
return vectorMatrix, unigramMatrix
def getScripts():
return transcripts['transcript'].tolist()
def generateTFIDMatrix(scripts):
tfidfGenerator = text.TfidfVectorizer(input= scripts, stop_words= "english")
matrix = tfidfGenerator.fit_transform(scripts)
return matrix
def generateUnigram(tfidMatrix):
return cosine_similarity(tfidMatrix)
def getSimilarArticles(articleText):
allScripts = getScripts()
allScripts.append(articleText)
tfdiMatrix = generateTFIDMatrix(allScripts)
unigram = generateUnigram(tfdiMatrix)
return ",".join(transcripts['title'].loc[unigram[-1].argsort()[-5:-1]])
if __name__ == "__main__":
allScripts = getScripts()
testText = allScripts.pop(5)
transcripts.drop(5, inplace=True)
#tfidfGenerator = text.TfidfVectorizer(input= allScripts, stop_words= "english")
#matrix = tfidfGenerator.fit_transform(allScripts)
#print(matrix.shape)
#print(generateTFIDMatrix(allScripts).shape)
#print(generateUnigram(matrix).shape)
print(getSimilarArticles(testText))
|
from django.contrib import messages
from django.contrib.auth import authenticate
from django.contrib.auth import login as auth_login, logout as auth_logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.forms.util import ErrorList
from django.http import HttpResponse, HttpResponseRedirect
from django.template import loader, RequestContext
import hashlib
import logging
import random, string
from web.forms.account import *
from web.models import Category
@login_required()
def index(request):
password_form = ChangePasswordForm(error_class=PlainErrorList)
username_form = ChangeUsernameForm(error_class=PlainErrorList)
delete_account_form = DeleteAccountForm(error_class=PlainErrorList)
if request.method == 'POST':
if request.POST.get('action') == 'password':
password_form = ChangePasswordForm(request.POST, error_class=PlainErrorList)
if password_form.is_valid() and password_form.cleaned_data['new_password'] == password_form.cleaned_data['new_password_confirm']:
if not authenticate(username=request.user.username, password=password_form.cleaned_data['current_password']):
messages.warning(request, 'Please supply your current password')
else:
user = User.objects.get(pk=request.user.id)
if user:
user.set_password(password_form.cleaned_data['new_password'])
user.save()
messages.success(request, 'Your password has been changed.')
return HttpResponseRedirect(reverse('account'))
else:
messages.warning(request, 'Could not change your password. Make sure you type the same password twice in the form below')
elif request.POST.get('action') == 'username':
username_form = ChangeUsernameForm(request.POST, error_class=PlainErrorList)
if username_form.is_valid():
user = User.objects.get(pk=request.user.id)
list_with_username = User.objects.filter(username=username_form.cleaned_data['new_username'])
if len(list_with_username) > 0:
messages.warning(request, 'The display name %s is currently in use. Please choose a different display name.' % username_form.cleaned_data['new_username'])
if user and len(list_with_username) == 0:
user.username = username_form.cleaned_data['new_username']
user.save()
messages.success(request, 'Your display name has been changed.')
return HttpResponseRedirect(reverse('account'))
else:
messages.warning(request, 'Could not change your display name.')
elif request.POST.get('action') == 'delete_account':
delete_account_form = DeleteAccountForm(request.POST, error_class=PlainErrorList)
if delete_account_form.is_valid():
user = User.objects.get(pk=request.user.id)
if user and delete_account_form.cleaned_data['confirmation'] == 'KnoAtom':
user.delete()
auth_logout(request)
messages.success(request, 'Your account has been deleted. Thank for your time! --KnoAtom Staff')
return HttpResponseRedirect(reverse('home'))
else:
messages.warning(request, 'The confirmation was not correct, or we could not find your account. Sorry, try again.')
else:
messages.warning(request, 'We could not delete your account.')
t = loader.get_template('account/index.html')
c = RequestContext(request, {
'breadcrumbs': [{'url': reverse('home'), 'title': 'Home'}, {'url':reverse('account'), 'title': 'Account'}],
'password_form': password_form,
'username_form': username_form,
'delete_account_form': delete_account_form,
'parent_categories': Category.objects.filter(parent=None),
})
return HttpResponse(t.render(c))
def forgot_password(request):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('home'))
if request.method == 'POST':
form = ForgotPasswordForm(request.POST, error_class=PlainErrorList)
if form.is_valid():
user_check = User.objects.filter(email=form.cleaned_data['email'])
if user_check.count() == 1:
user = User.objects.get(email=form.cleaned_data['email'])
if user:
logging.debug('Changing password for %s' % user)
new_password = ''.join(random.choice(string.ascii_uppercase + string.digits + string.ascii_lowercase) for x in range(10))
send_mail('KnoAtom Password Reset', 'You requested to reset your password at knoatom.eecs.umich.edu. Here is your new password: ' + new_password + '\n\nIf you did not request this change, contact us immediatly.\n\n-- The Management', 'knoatom-webmaster@umich.edu', [user.email, 'knoatom-webmaster@umich.edu'])
user.set_password(new_password)
user.save()
logging.debug('Successfully changed password for %s: %s' % (user, new_password))
messages.success(request, 'If we have your email on file, you should expect a password reset within a couple minutes to appear in your inbox.')
return HttpResponseRedirect(reverse('login'))
else:
form = ForgotPasswordForm(error_class=PlainErrorList)
t = loader.get_template('account/forgot_password.html')
c = RequestContext(request, {
'breadcrumbs': [{'url': reverse('home'), 'title': 'Home'}, {'url':reverse('login'), 'title': 'Login'}],
'login_form': form,
'parent_categories': Category.objects.filter(parent=None),
})
return HttpResponse(t.render(c))
def login(request):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('home'))
if request.method == 'POST':
form = LoginForm(request.POST, error_class=PlainErrorList)
if form.is_valid():
logging.debug('Trying to log in %s: %s' % (form.cleaned_data['email'], form.cleaned_data['password']))
users = User.objects.filter(email=form.cleaned_data['email'].strip())
if users.count() == 1:
u = users[0]
user = authenticate(username=u.username, password=form.cleaned_data['password'])
if user is not None:
logging.debug('Trying to log in user %s' % user)
if user.is_active == 0:
messages.warning(request, 'Please activate your account before you log in. Contact knoatom-webmaster@umich.edu if you need further assistance.')
return HttpResponseRedirect(reverse('login'))
auth_login(request, user)
if form.cleaned_data['redirect']: return HttpResponseRedirect(form.cleaned_data['redirect'])
return HttpResponseRedirect(reverse('home'))
logging.debug('Could not find account %s' % form.cleaned_data['email'])
messages.warning(request, 'Could not authenticate you. Try again.')
else:
form = LoginForm(initial={'redirect': request.GET.get('next', None),}, error_class=PlainErrorList)
t = loader.get_template('account/login.html')
c = RequestContext(request, {
'breadcrumbs': [{'url': reverse('home'), 'title': 'Home'}, {'url':reverse('login'), 'title': 'Login'}],
'login_form': form,
'parent_categories': Category.objects.filter(parent=None),
})
return HttpResponse(t.render(c))
@login_required()
def logout(request):
auth_logout(request)
return HttpResponseRedirect(reverse('login'))
def register(request):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('home'))
if request.method == 'POST':
form = RegisterForm(request.POST, error_class=PlainErrorList)
if form.is_valid():
email_search = User.objects.filter(email=form.cleaned_data['email'])
if len(email_search) > 0:
messages.warning(request, 'Could not register you. Email is already registered.')
if form.cleaned_data['password'] != form.cleaned_data['password_confirmation']:
messages.warning(request, 'Passwords did not match. Please try again.')
if len(email_search) == 0 and form.cleaned_data['password'] == form.cleaned_data['password_confirmation']:
user = User.objects.create_user(form.cleaned_data['email'], form.cleaned_data['email'], form.cleaned_data['password']);
user.first_name = form.cleaned_data['firstname']
user.last_name = form.cleaned_data['lastname']
user.is_active = False
user.save()
m = hashlib.md5()
m.update(user.email + str(user.date_joined).split('.')[0])
send_mail('KnoAtom Registration', 'You have successfully registered at knoatom.eecs.umich.edu with the username ' + user.username + '. Please validate your account by going to ' + request.build_absolute_uri('validate') + '?email=' + user.email + '&validation=' + m.hexdigest() + ' . If you did not process this registration, please contact us as soon as possible.\n\n-- The Management', 'knoatom-webmaster@umich.edu', [user.email])
messages.success(request, 'You have been registered. Please login to continue.')
return HttpResponseRedirect(reverse('login'))
messages.warning(request, 'Could not register you. Try again.')
else:
form = RegisterForm(error_class=PlainErrorList)
t = loader.get_template('account/register.html')
c = RequestContext(request, {
'breadcrumbs': [{'url': reverse('home'), 'title': 'Home'}, {'url':reverse('register'), 'title': 'Register'}],
'register_form': form,
'parent_categories': Category.objects.filter(parent=None),
})
return HttpResponse(t.render(c))
def validate(request):
if request.user.is_authenticated():
messages.warning(request, 'You are already confirmed.')
return HttpResponseRedirect(reverse('home'))
if request.GET.get('validation', None) and request.GET.get('email', None):
user = User.objects.get(email=request.GET.get('email'))
m = hashlib.md5()
m.update(user.email + str(user.date_joined))
if m.hexdigest() == request.GET.get('validation'):
user.is_active = True
user.save()
messages.success(request, 'Thank you for validating your email!')
return HttpResponseRedirect(reverse('account'))
else:
messages.warning(request, 'There was an error processing your validation.')
return HttpResponseRedirect(reverse('login'))
messages.warning(request, 'Your reached a page in an invalid manner.')
return HttpResponseRedirect(reverse('home'))
|
from sklearn.naive_bayes import MultinomialNB
from sklearn.pipeline import make_pipeline
from sklearn.model_selection import train_test_split
from sklearn.model_selection import cross_val_score
from sklearn import metrics
import numpy as np
import pandas as pd
import jieba
from sklearn.feature_extraction.text import CountVectorizer
from snownlp import SnowNLP
# 清洗数据,通过jieba分词
def word_clean(mytext):
return ' '.join(jieba.lcut(mytext))
def get_sentiment(text):
return 1 if SnowNLP(text).sentiments >0.5 else 0
# 使用贝叶斯预测分类
def naive_bayes(data_path, categories):
df = pd.read_csv(data_path, header=None, names=['score_comment'])
data_unique = df.drop_duplicates()
split_df = pd.DataFrame(data_unique.score_comment.str.split('\t').tolist(), columns=["score", "comment"])
x=split_df[['comment']]
x['cutted_comment'] = x.comment.apply(word_clean)
if len(categories)==2:
split_df['score']=split_df['score'].map(lambda s:1 if int(s[0])>2 else 0)
y=split_df.score
#划分训练集和测试集
x_train,x_test,y_train,y_test = train_test_split(x, y, test_size=0.2, random_state=20)
nb = MultinomialNB()
# 读取停用词
stopwords = [line.strip() for line in open('data/stopwords.txt', encoding='utf-8').readlines()]
max_df=0.8 # 在超过这一比例的文档中出现的关键词(过于平凡),去除掉
min_df=3 # 在低于这一数量的文档中出现的关键词(过于独特),去除掉
vect=CountVectorizer(max_df=max_df,min_df=min_df,stop_words=frozenset(stopwords))
# 利用管道顺序连接工作
pipe=make_pipeline(vect,nb)
#交叉验证的准确率
cross_result=cross_val_score(pipe,x_train.cutted_comment,y_train,cv=5,scoring='accuracy').mean()
print('交叉验证的准确率:'+str(cross_result))
#进行预测
pipe.fit(x_train.cutted_comment,y_train)
y_pred = pipe.predict(x_test.cutted_comment)
# 评估
print("Precision, Recall and F1-Score=====")
print(metrics.classification_report(y_test, y_pred, target_names=categories))
# 混淆矩阵
print("Confusion Matrix...")
cm = metrics.confusion_matrix(y_test, y_pred)
print(cm)
print('\nPython自带情感分析方法结果:')
# 评估
y_pred_snownlp = [get_sentiment(sentence) for sentence in x_test.cutted_comment]
msg = 'Test Acc: {0:>7.4%}'
sum_equal = 0
i=0
for label in y_test:
sum_equal += label==y_pred_snownlp[i]
i+=1
print(msg.format(sum_equal/len(y_pred_snownlp)))
print("Precision, Recall and F1-Score=====")
print(metrics.classification_report(y_test, y_pred_snownlp, target_names=categories))
# 混淆矩阵
print("Confusion Matrix...")
print(metrics.confusion_matrix(y_test, y_pred_snownlp))
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Author: Spencer Caplan
# Department of Linguistics, University of Pennsylvania
# Contact: spcaplan@sas.upenn.edu
import sys, math, os, subprocess, glob, operator, collections
reload(sys)
sys.setdefaultencoding('utf-8')
import unicodedata
from unicodedata import normalize
#phoneStringSearch = 'sIs'
phoneStringSearch = 'sEs'
searchLength = len(phoneStringSearch)
lexiconDict = {}
freqDict = {}
searchMatchDict = {}
# IdNum, Head, Cob, PronCnt, PronStatus, PhonStrsDISC, PhonCVBr, PhonSylBCLX
# 7, aback, 59, 1, P, @-'b{k, [V][CVC], [@][b&k]
def removeStressAndBreaks(inputWord):
cleanedWord = inputWord.replace('-', '')
cleanedWord = cleanedWord.replace('\'', '')
cleanedWord = cleanedWord.replace('\"', '')
return cleanedWord
def readInLexicon(celexSource):
with open(celexSource, 'r') as celexFile:
currLine = celexFile.readline()
while currLine:
currTokens = currLine.split(',')
if len(currTokens) >= 8:
currWord = currTokens[1]
currFreq = currTokens[2]
currPhones = removeStressAndBreaks(currTokens[5])
lexiconDict[currPhones] = currWord
freqDict[currWord] = currFreq
if phoneStringSearch == currPhones[-searchLength:]:
searchMatchDict[currPhones] = currWord
currLine = celexFile.readline()
##
## Main method block
##
if __name__=="__main__":
# reading in epl.csv
celexFileSource = sys.argv[1]
#print celexFileSource
readInLexicon(celexFileSource)
print 'Num words:' + str(len(lexiconDict.keys()))
print 'Searching for: ' + phoneStringSearch
print 'Num matches: ' + str(len(searchMatchDict)) + '\n'
for currMatch in searchMatchDict.keys():
currWord = searchMatchDict[currMatch]
currFreq = freqDict[currWord]
toPrint = currWord + ', ' + currMatch + ', ' + currFreq
print toPrint
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pants.jvm.resolve.jvm_tool import JvmToolBase
from pants.option.option_types import BoolOption, SkipOption
from pants.util.strutil import softwrap
class GoogleJavaFormatSubsystem(JvmToolBase):
options_scope = "google-java-format"
name = "Google Java Format"
help = "Google Java Format (https://github.com/google/google-java-format)"
default_version = "1.13.0"
default_artifacts = ("com.google.googlejavaformat:google-java-format:{version}",)
default_lockfile_resource = (
"pants.backend.java.lint.google_java_format",
"google_java_format.default.lockfile.txt",
)
skip = SkipOption("fmt", "lint")
aosp = BoolOption(
default=False,
help=softwrap(
"""
Use AOSP style instead of Google Style (4-space indentation).
("AOSP" is the Android Open Source Project.)
"""
),
)
|
def owl_pic(text):
output = ""
plumage = "8WTYUIOAHXVM"
for x in text.upper():
if x in plumage:
output+=x
return "{}{}{}".format(output, "''0v0''", output[::-1])
'''
To pass the series of gates guarded by the owls, Kenneth needs to present them
each with a highly realistic portrait of one. Unfortunately, he is absolutely
rubbish at drawing, and needs some code to return a brand new portrait with
a moment's notice.
All owl heads look like this:
''0v0''
Such beautiful eyes! However, they differ in their plumage, which is always
symmetrical, eg.:
VVHVAV''0v0''VAVHVV
or
YYAYAH8XH''0v0''HX8HAYAYY
So Kenneth needs a method that will take a garble of text generated by mashing
at his keyboard (numbers and letters, but he knows how to avoid punctuation etc.)
for a bit and give him a symmetrical owl with a lovely little face, with a truly
symmetrical plumage made of uppercase letters and numbers.
(To be clear, the acceptable characters for the plumage are 8,W,T,Y,U,I,O,A,H,X,V and M.)
'''
|
def salgan(numero):
a="salgan al sol, "
b=["revienten", "", "idiotas","paquetes"]
if numero == 1:
for i in range(2):
print (a + b[i])
print(a + b[3])
else:
for i in range(2):
print(a + b[i])
print(a + b[2])
|
# Copyright 2020 Pulser Development Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from collections import defaultdict
from dataclasses import dataclass, field
from itertools import chain, combinations
from typing import Any, Optional, Union, cast
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.axes import Axes
from matplotlib.figure import Figure
from scipy.interpolate import CubicSpline
import pulser
from pulser import Register, Register3D
from pulser.channels.base_channel import Channel
from pulser.pulse import Pulse
from pulser.register.base_register import BaseRegister
from pulser.sampler.sampler import sample
from pulser.sampler.samples import ChannelSamples, SequenceSamples
from pulser.waveforms import InterpolatedWaveform
# Color scheme
COLORS = ["darkgreen", "indigo", "#c75000"]
CURVES_ORDER = ("amplitude", "detuning", "phase")
SIZE_PER_WIDTH = {1: 3, 2: 4, 3: 5}
LABELS = [
r"$\Omega$ (rad/µs)",
r"$\delta$ (rad/µs)",
r"$\varphi$ / 2π",
]
class EOMSegment:
"""The class to mark an EOM slot."""
def __init__(self, ti: int | None = None, tf: int | None = None) -> None:
"""Class is defined from its start and end value."""
self.ti = ti
self.tf = tf
self.color = "steelblue"
self.alpha = 0.3
@property
def isempty(self) -> bool:
"""Defines if the class is empty."""
return self.ti is None or self.tf is None
@property
def nvspan(self) -> int:
"""Defines the number of points in the slot."""
return cast(int, self.tf) - cast(int, self.ti)
def draw(self, ax: Axes) -> None:
"""Draws a rectangle between the start and end value."""
if not self.isempty:
ax.axvspan(
self.ti,
self.tf,
color=self.color,
alpha=self.alpha,
zorder=-100,
)
def smooth_draw(self, ax: Axes, decreasing: bool = False) -> None:
"""Draws a rectangle with an increasing/decreasing opacity."""
if not self.isempty:
for i in range(self.nvspan):
ax.axvspan(
cast(int, self.ti) + i,
cast(int, self.ti) + i + 1,
facecolor=self.color,
alpha=self.alpha
* (
decreasing + (-1) ** decreasing * (i + 1) / self.nvspan
),
zorder=-100,
)
ax.axvline(
self.tf if decreasing else self.ti,
ax.get_ylim()[0],
ax.get_ylim()[1],
color=self.color,
alpha=self.alpha / 2.0,
)
@dataclass
class ChannelDrawContent:
"""The contents for drawing a single channel."""
samples: ChannelSamples
target: dict[Union[str, tuple[int, int]], Any]
eom_intervals: list[EOMSegment]
eom_start_buffers: list[EOMSegment]
eom_end_buffers: list[EOMSegment]
interp_pts: dict[str, list[list[float]]] = field(default_factory=dict)
def __post_init__(self) -> None:
self.samples.phase = self.samples.phase / (2 * np.pi)
self._samples_from_curves = {
"amplitude": "amp",
"detuning": "det",
"phase": "phase",
}
self.curves_on = {"amplitude": True, "detuning": False, "phase": False}
@property
def n_axes_on(self) -> int:
"""The number of axes to draw for this channel."""
return sum(self.curves_on.values())
def get_input_curves(self) -> list[np.ndarray]:
"""The samples for the curves, as programmed."""
return self._give_curves_from_samples(self.samples)
def get_output_curves(self, ch_obj: Channel) -> list[np.ndarray]:
"""The modulated samples for the curves."""
mod_samples = self.samples.modulate(ch_obj)
return self._give_curves_from_samples(mod_samples)
def interpolate_curves(
self, curves: list[np.ndarray], sampling_rate: float
) -> list[np.ndarray]:
"""The curves with a fractional sampling rate."""
indices = np.linspace(
0,
self.samples.duration,
num=int(sampling_rate * self.samples.duration),
endpoint=False,
dtype=int,
)
sampled_curves = [curve[indices] for curve in curves]
t = np.arange(self.samples.duration)
return [CubicSpline(indices, sc)(t) for sc in sampled_curves]
def curves_on_indices(self) -> list[int]:
"""The indices of the curves to draw."""
return [i for i, qty in enumerate(CURVES_ORDER) if self.curves_on[qty]]
def _give_curves_from_samples(
self, samples: ChannelSamples
) -> list[np.ndarray]:
return [
getattr(samples, self._samples_from_curves[qty])
for qty in CURVES_ORDER
]
def gather_data(
sampled_seq: SequenceSamples, shown_duration: Optional[int] = None
) -> dict:
"""Collects the whole sequence data for plotting.
Args:
sampled_seq: The samples of a sequence of operations on a device.
shown_duration: If present, is the total duration to be shown in
the X axis.
Returns:
The data to plot.
"""
# The minimum time axis length is 100 ns
total_duration = max(sampled_seq.max_duration, 100, shown_duration or 100)
data: dict[str, Any] = {}
for ch, ch_samples in sampled_seq.channel_samples.items():
target: dict[Union[str, tuple[int, int]], Any] = {}
# Extracting the EOM Buffers
eom_intervals = [
EOMSegment(eom_interval[0], eom_interval[1])
for eom_interval in ch_samples.get_eom_mode_intervals()
]
# Last eom interval is extended if eom mode not disabled at the end
if (
len(eom_intervals) > 0
and ch_samples.duration == eom_intervals[-1].tf
):
eom_intervals[-1].tf = total_duration
# sampling the channel schedule
extended_samples = ch_samples.extend_duration(total_duration)
eom_start_buffers = [
EOMSegment(eom_interval[0], eom_interval[1])
for eom_interval in ch_samples.eom_start_buffers
]
eom_end_buffers = [
EOMSegment(eom_interval[0], eom_interval[1])
for eom_interval in ch_samples.eom_end_buffers
]
for time_slot in ch_samples.target_time_slots:
if time_slot.ti == -1:
target["initial"] = time_slot.targets
continue
target[(time_slot.ti, time_slot.tf - 1)] = time_slot.targets
# Store everything
data[ch] = ChannelDrawContent(
extended_samples,
target,
eom_intervals,
eom_start_buffers,
eom_end_buffers,
)
if sampled_seq._measurement is not None:
data["measurement"] = sampled_seq._measurement
data["total_duration"] = total_duration
return data
def _draw_channel_content(
sampled_seq: SequenceSamples,
register: Optional[BaseRegister] = None,
sampling_rate: Optional[float] = None,
draw_phase_area: bool = False,
draw_phase_shifts: bool = False,
draw_input: bool = True,
draw_modulation: bool = False,
draw_phase_curve: bool = False,
shown_duration: Optional[int] = None,
) -> tuple[Figure | None, Figure, Any, dict]:
"""Draws samples of a sequence.
Args:
sampled_seq: The input samples of a sequence of operations.
register: If present, draw the register before the pulse
sequence, with a visual indication (square halo) around the qubits
masked by the SLM.
sampling_rate: Sampling rate of the effective pulse used by
the solver. If present, plots the effective pulse alongside the
input pulse.
draw_phase_area: Whether phase and area values need to be shown
as text on the plot, defaults to False. If `draw_phase_curve=True`,
phase values are ommited.
draw_phase_shifts: Whether phase shift and reference information
should be added to the plot, defaults to False.
draw_input: Draws the programmed pulses on the channels, defaults
to True.
draw_modulation: Draws the expected channel output, defaults to
False. If the channel does not have a defined 'mod_bandwidth', this
is skipped unless 'draw_input=False'.
draw_phase_curve: Draws the changes in phase in its own curve (ignored
if the phase doesn't change throughout the channel).
shown_duration: Total duration to be shown in the X axis.
"""
def phase_str(phi: float) -> str:
"""Formats a phase value for printing."""
value = (((phi + np.pi) % (2 * np.pi)) - np.pi) / np.pi
if value == -1:
return r"$\pi$"
elif value == 0:
return "0" # pragma: no cover - just for safety
else:
return rf"{value:.2g}$\pi$"
n_channels = len(sampled_seq.channels)
if not n_channels:
raise RuntimeError("Can't draw an empty sequence.")
data = gather_data(sampled_seq, shown_duration)
total_duration = data["total_duration"]
time_scale = 1e3 if total_duration > 1e4 else 1
for ch in sampled_seq.channels:
if np.count_nonzero(data[ch].samples.det) > 0:
data[ch].curves_on["detuning"] = True
if draw_phase_curve and np.count_nonzero(data[ch].samples.phase) > 0:
data[ch].curves_on["phase"] = True
# Boxes for qubit and phase text
q_box = dict(boxstyle="round", facecolor="orange")
ph_box = dict(boxstyle="round", facecolor="ghostwhite")
area_ph_box = dict(boxstyle="round", facecolor="ghostwhite", alpha=0.7)
slm_box = dict(boxstyle="round", alpha=0.4, facecolor="grey", hatch="//")
eom_box = dict(boxstyle="round", facecolor="lightsteelblue")
# Draw masked register
if register:
pos = np.array(register._coords)
if isinstance(register, Register3D):
labels = "xyz"
fig_reg, axes_reg = register._initialize_fig_axes_projection(
pos,
blockade_radius=35,
draw_half_radius=True,
)
fig_reg.get_layout_engine().set(w_pad=6.5)
for ax_reg, (ix, iy) in zip(
axes_reg, combinations(np.arange(3), 2)
):
register._draw_2D(
ax=ax_reg,
pos=pos,
ids=register._ids,
plane=(ix, iy),
masked_qubits=sampled_seq._slm_mask.targets,
)
ax_reg.set_title(
"Masked register projected onto\n the "
+ labels[ix]
+ labels[iy]
+ "-plane"
)
elif isinstance(register, Register):
fig_reg, ax_reg = register._initialize_fig_axes(
pos,
blockade_radius=35,
draw_half_radius=True,
)
register._draw_2D(
ax=ax_reg,
pos=pos,
ids=register._ids,
masked_qubits=sampled_seq._slm_mask.targets,
)
ax_reg.set_title("Masked register", pad=10)
ratios = [
SIZE_PER_WIDTH[data[ch].n_axes_on] for ch in sampled_seq.channels
]
fig = plt.figure(
constrained_layout=False,
figsize=(20, sum(ratios)),
)
gs = fig.add_gridspec(n_channels, 1, hspace=0.075, height_ratios=ratios)
ch_axes = {}
for i, (ch, gs_) in enumerate(zip(sampled_seq.channels, gs)):
ax = fig.add_subplot(gs_)
for side in ("top", "bottom", "left", "right"):
ax.spines[side].set_color("none")
ax.tick_params(
labelcolor="w", top=False, bottom=False, left=False, right=False
)
ax.set_ylabel(ch, labelpad=40, fontsize=18)
subgs = gs_.subgridspec(data[ch].n_axes_on, 1, hspace=0.0)
ch_axes[ch] = [
fig.add_subplot(subgs[i, :]) for i in range(data[ch].n_axes_on)
]
for j, ax in enumerate(ch_axes[ch]):
ax.axvline(0, linestyle="--", linewidth=0.5, color="grey")
if j > 0:
ax.spines["top"].set_visible(False)
if j < len(ch_axes[ch]) - 1:
ax.spines["bottom"].set_visible(False)
if i < n_channels - 1 or j < len(ch_axes[ch]) - 1:
ax.tick_params(
axis="x",
which="both",
bottom=True,
top=False,
labelbottom=False,
direction="in",
)
else:
unit = "ns" if time_scale == 1 else r"$\mu s$"
ax.set_xlabel(f"t ({unit})", fontsize=12)
# The time axis of all channels is the same
t = np.arange(total_duration) / time_scale
final_t = t[-1]
t_min = -final_t * 0.03
t_max = final_t * 1.05
for ch, axes in ch_axes.items():
ch_data = data[ch]
ch_obj = sampled_seq._ch_objs[ch]
ch_eom_intervals = data[ch].eom_intervals
ch_eom_start_buffers = data[ch].eom_start_buffers
ch_eom_end_buffers = data[ch].eom_end_buffers
basis = ch_obj.basis
ys = ch_data.get_input_curves()
ys_mod = [()] * 3
yseff = [()] * 3
draw_output = draw_modulation and (
ch_obj.mod_bandwidth or not draw_input
)
if draw_output:
ys_mod = ch_data.get_output_curves(ch_obj)
if sampling_rate:
curves = ys_mod if draw_output else ys
yseff = ch_data.interpolate_curves(curves, sampling_rate)
ref_ys = [
list(chain.from_iterable(all_ys))
for all_ys in zip(ys, ys_mod, yseff)
]
max_amp = np.max(ref_ys[0])
max_amp = 1 if max_amp == 0 else max_amp
amp_top = max_amp * 1.2
amp_bottom = min(0.0, *ref_ys[0])
# Makes sure that [-1, 1] range is always represented
det_max = max(*ref_ys[1], 1)
det_min = min(*ref_ys[1], -1)
det_range = det_max - det_min
det_top = det_max + det_range * 0.15
det_bottom = det_min - det_range * 0.05
ax_lims = [
(amp_bottom, amp_top),
(det_bottom, det_top),
(min(0.0, *ref_ys[2]), max(1.1, *ref_ys[2])),
]
ax_lims = [ax_lims[i] for i in ch_data.curves_on_indices()]
for ax, ylim in zip(axes, ax_lims):
ax.set_xlim(t_min, t_max)
ax.set_ylim(*ylim)
for i, ax in zip(ch_data.curves_on_indices(), axes):
if draw_input:
ax.plot(t, ys[i], color=COLORS[i], linewidth=0.8)
if sampling_rate:
ax.plot(
t,
yseff[i],
color=COLORS[i],
linewidth=0.8,
)
ax.fill_between(t, 0, yseff[i], color=COLORS[i], alpha=0.3)
elif draw_input:
ax.fill_between(t, 0, ys[i], color=COLORS[i], alpha=0.3)
if draw_output:
if not sampling_rate:
ax.fill_between(
t,
0,
ys_mod[i][:total_duration],
color=COLORS[i],
alpha=0.3,
hatch="////",
)
else:
ax.plot(
t,
ys_mod[i][:total_duration],
color=COLORS[i],
linestyle="dotted",
)
special_kwargs = dict(labelpad=10) if i == 0 else {}
ax.set_ylabel(LABELS[i], fontsize=14, **special_kwargs)
if draw_phase_area:
top = False # Variable to track position of box, top or center.
print_phase = not draw_phase_curve and any(
np.any(ch_data.samples.phase[slot.ti : slot.tf] != 0)
for slot in ch_data.samples.slots
)
for slot in ch_data.samples.slots:
if sampling_rate:
area_val = (
np.sum(yseff[0][slot.ti : slot.tf]) * 1e-3 / np.pi
)
else:
area_val = (
np.sum(ch_data.samples.amp[slot.ti : slot.tf])
* 1e-3
/ np.pi
)
phase_val = ch_data.samples.phase[slot.tf - 1]
x_plot = (slot.ti + slot.tf) / 2 / time_scale
target_slot_tf_list = [
target_slot.tf
for target_slot in sampled_seq.channel_samples[
ch
].target_time_slots
]
if slot.ti in target_slot_tf_list or not top:
y_plot = np.max(ch_data.samples.amp[slot.ti : slot.tf]) / 2
top = True # Next box at the top.
elif top:
y_plot = np.max(ch_data.samples.amp[slot.ti : slot.tf])
top = False # Next box at the center.
area_fmt = (
r"A: $\pi$"
if round(area_val, 2) == 1
else rf"A: {area_val:.2g}$\pi$"
)
if not print_phase:
txt = area_fmt
else:
phase_fmt = rf"$\phi$: {phase_str(phase_val)}"
txt = "\n".join([phase_fmt, area_fmt])
axes[0].text(
x_plot,
y_plot,
txt,
fontsize=10,
ha="center",
va="center",
bbox=area_ph_box,
)
target_regions = [] # [[start1, [targets1], end1],...]
for coords in ch_data.target:
targets = list(ch_data.target[coords])
tgt_strs = [str(q) for q in targets]
tgt_txt_y = max_amp * 1.1 - 0.25 * (len(targets) - 1)
tgt_str = "\n".join(tgt_strs)
if coords == "initial":
x = t_min + final_t * 0.005
target_regions.append([0, targets])
if ch_obj.addressing == "Global":
axes[0].text(
x,
amp_top * 0.98,
"GLOBAL",
fontsize=13,
rotation=90,
ha="left",
va="top",
bbox=q_box,
)
else:
axes[0].text(
x,
tgt_txt_y,
tgt_str,
fontsize=12,
ha="left",
bbox=q_box,
)
phase = sampled_seq._basis_ref[basis][targets[0]].phase[0]
if phase and draw_phase_shifts:
msg = r"$\phi=$" + phase_str(phase)
axes[0].text(
0,
max_amp * 1.1,
msg,
ha="left",
fontsize=12,
bbox=ph_box,
)
else:
ti, tf = np.array(coords) / time_scale
target_regions[-1].append(ti) # Closing previous regions
target_regions.append(
[tf + 1 / time_scale, targets]
) # New one
phase = sampled_seq._basis_ref[basis][targets[0]].phase[
tf * time_scale + 1
]
for ax in axes:
ax.axvspan(ti, tf, alpha=0.4, color="grey", hatch="//")
axes[0].text(
tf + final_t * 5e-3,
tgt_txt_y,
tgt_str,
ha="left",
fontsize=12,
bbox=q_box,
)
if phase and draw_phase_shifts:
msg = r"$\phi=$" + phase_str(phase)
wrd_len = len(max(tgt_strs, key=len))
x = tf + final_t * 0.01 * (wrd_len + 1)
axes[0].text(
x,
max_amp * 1.1,
msg,
ha="left",
fontsize=12,
bbox=ph_box,
)
# Terminate the last open regions
if target_regions:
target_regions[-1].append(final_t)
for start, targets_, end in (
target_regions if draw_phase_shifts else []
):
start = cast(float, start)
targets_ = cast(list, targets_)
end = cast(float, end)
# All targets have the same ref, so we pick
q = targets_[0]
ref = sampled_seq._basis_ref[basis][q].phase
if end != total_duration - 1 or "measurement" in data:
end += 1 / time_scale
for t_, delta in ref.changes(start, end, time_scale=time_scale):
conf = dict(linestyle="--", linewidth=1.5, color="black")
for ax in axes:
ax.axvline(t_, **conf)
msg = "\u27F2 " + phase_str(delta)
axes[0].text(
t_ - final_t * 8e-3,
max_amp * 1.1,
msg,
ha="right",
fontsize=14,
bbox=ph_box,
)
# Draw the EOM intervals
for ch_eom_start_buffer, ch_eom_interval, ch_eom_end_buffer in zip(
ch_eom_start_buffers, ch_eom_intervals, ch_eom_end_buffers
):
for ax in axes:
ch_eom_start_buffer.smooth_draw(ax, decreasing=False)
ch_eom_interval.draw(ax)
ch_eom_end_buffer.smooth_draw(ax, decreasing=True)
tgt_txt_x = ch_eom_start_buffer.ti or ch_eom_interval.ti
tgt_txt_y = axes[0].get_ylim()[1]
axes[0].text(
tgt_txt_x,
tgt_txt_y,
"EOM",
fontsize=12,
ha="left",
va="top",
bbox=eom_box,
)
# Draw the SLM mask
if sampled_seq._slm_mask.targets and sampled_seq._slm_mask.end:
tf_m = sampled_seq._slm_mask.end
for ax in axes:
ax.axvspan(0, tf_m, color="black", alpha=0.1, zorder=-100)
tgt_strs = [str(q) for q in sampled_seq._slm_mask.targets]
tgt_txt_x = final_t * 0.005
tgt_txt_y = axes[-1].get_ylim()[0]
tgt_str = "\n".join(tgt_strs)
axes[-1].text(
tgt_txt_x,
tgt_txt_y,
tgt_str,
fontsize=12,
ha="left",
bbox=slm_box,
)
hline_kwargs = dict(linestyle="-", linewidth=0.5, color="grey")
if "measurement" in data:
msg = f"Basis: {data['measurement']}"
if len(axes) == 1:
mid_ax = axes[0]
mid_point = (amp_top + amp_bottom) / 2
fontsize = 12
else:
mid_ax = axes[-1]
mid_point = (
ax_lims[-1][1]
if len(axes) == 2
else ax_lims[-1][0] + sum(ax_lims[-1]) * 1.5
)
fontsize = 14
for ax in axes:
ax.axvspan(final_t, t_max, color="midnightblue", alpha=1)
mid_ax.text(
final_t * 1.025,
mid_point,
msg,
ha="center",
va="center",
fontsize=fontsize,
color="white",
rotation=90,
)
hline_kwargs["xmax"] = 0.95
for i, ax in enumerate(axes):
if i > 0:
ax.axhline(ax_lims[i][1], **hline_kwargs)
if ax_lims[i][0] < 0:
ax.axhline(0, **hline_kwargs)
return (fig_reg if register else None, fig, ch_axes, data)
def draw_samples(
sampled_seq: SequenceSamples,
register: Optional[BaseRegister] = None,
sampling_rate: Optional[float] = None,
draw_phase_area: bool = False,
draw_phase_shifts: bool = False,
draw_phase_curve: bool = False,
) -> tuple[Figure | None, Figure]:
"""Draws a SequenceSamples.
Args:
sampled_seq: The input samples of a sequence of operations.
register: If present, draw the register before the pulse
sequence samples, with a visual indication (square halo)
around the qubits masked by the SLM.
sampling_rate: Sampling rate of the effective pulse used by
the solver. If present, plots the effective pulse alongside the
input pulse.
draw_phase_area: Whether phase and area values need to be shown
as text on the plot, defaults to False. If `draw_phase_curve=True`,
phase values are ommited.
draw_phase_shifts: Whether phase shift and reference information
should be added to the plot, defaults to False.
draw_phase_curve: Draws the changes in phase in its own curve (ignored
if the phase doesn't change throughout the channel).
"""
slot_tfs = [
ch_samples.slots[-1].tf
for ch_samples in sampled_seq.channel_samples.values()
]
max_slot_tf = max(slot_tfs) if len(slot_tfs) > 0 else None
(fig_reg, fig, ch_axes, data) = _draw_channel_content(
sampled_seq,
register,
sampling_rate,
draw_phase_area,
draw_phase_shifts,
draw_input=True,
draw_modulation=False,
draw_phase_curve=draw_phase_curve,
shown_duration=max_slot_tf,
)
return (fig_reg, fig)
def draw_sequence(
seq: pulser.sequence.Sequence,
sampling_rate: Optional[float] = None,
draw_phase_area: bool = False,
draw_interp_pts: bool = True,
draw_phase_shifts: bool = False,
draw_register: bool = False,
draw_input: bool = True,
draw_modulation: bool = False,
draw_phase_curve: bool = False,
) -> tuple[Figure | None, Figure]:
"""Draws the entire sequence.
Args:
seq: The input sequence of operations on a device.
sampling_rate: Sampling rate of the effective pulse used by
the solver. If present, plots the effective pulse alongside the
input pulse.
draw_phase_area: Whether phase and area values need to be shown
as text on the plot, defaults to False. If `draw_phase_curve=True`,
phase values are ommited.
draw_interp_pts: When the sequence has pulses with waveforms of
type InterpolatedWaveform, draws the points of interpolation on
top of the respective waveforms (defaults to True).
draw_phase_shifts: Whether phase shift and reference information
should be added to the plot, defaults to False.
draw_register: Whether to draw the register before the pulse
sequence, with a visual indication (square halo) around the qubits
masked by the SLM, defaults to False.
draw_input: Draws the programmed pulses on the channels, defaults
to True.
draw_modulation: Draws the expected channel output, defaults to
False. If the channel does not have a defined 'mod_bandwidth', this
is skipped unless 'draw_input=False'.
draw_phase_curve: Draws the changes in phase in its own curve (ignored
if the phase doesn't change throughout the channel).
"""
# Sample the sequence and get the data to plot
shown_duration = seq.get_duration(include_fall_time=draw_modulation)
sampled_seq = sample(seq)
(fig_reg, fig, ch_axes, data) = _draw_channel_content(
sampled_seq,
seq.register if draw_register else None,
sampling_rate,
draw_phase_area,
draw_phase_shifts,
draw_input,
draw_modulation,
draw_phase_curve,
shown_duration,
)
# Gather additional data for sequence specific drawing
for ch, sch in seq._schedule.items():
interp_pts: defaultdict[str, list[list[float]]] = defaultdict(list)
for slot in sch:
if slot.ti == -1 or slot.type in ["target", "delay"]:
continue
pulse = cast(Pulse, slot.type)
for wf_type in ["amplitude", "detuning"]:
wf = getattr(pulse, wf_type)
if isinstance(wf, InterpolatedWaveform):
pts = wf.data_points
pts[:, 0] += slot.ti
interp_pts[wf_type] += pts.tolist()
if interp_pts:
data[ch].interp_pts = dict(interp_pts)
for ch, axes in ch_axes.items():
ch_data = data[ch]
if draw_interp_pts:
for qty in ("amplitude", "detuning"):
if qty in ch_data.interp_pts and ch_data.curves_on[qty]:
ind = CURVES_ORDER.index(qty)
pts = np.array(ch_data.interp_pts[qty])
axes[ind].scatter(pts[:, 0], pts[:, 1], color=COLORS[ind])
return (fig_reg, fig)
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'post_response/$', views.post_response, name="post_response"),
url(r'^action?.*$', views.action),
url(r'^thanks?.*$', views.thanks),
url(r'^404$', views.error),
url(r'post_contact/$', views.post_contact, name="post_contact")
]
|
from scipy.ndimage.morphology import binary_erosion
import numpy as np
def getQueryCount(ui,uc,qid, mm = 0):
# memory efficient
# mm: ignore value
if len(qid) == 0:
return []
ui_r = [ui[ui>mm].min(),max(ui.max(),qid.max())]
rl = mm * np.ones(1+int(ui_r[1]-ui_r[0]),uc.dtype)
rl[ui[ui>mm]-ui_r[0]] = uc[ui>mm]
cc = mm * np.ones(qid.shape, uc.dtype)
gid = np.logical_and(qid>=ui_r[0], qid<=ui_r[1])
cc[gid] = rl[qid[gid] - ui_r[0]]
return cc
def getSphericity(seg):
# compute the sphericity for all segments at the same time
# https://en.wikipedia.org/wiki/Sphericity
seg_erode = binary_erosion(seg > 0, iterations = 1)
sid, vol = np.unique(seg, return_counts = True)
sid2, vol2 = np.unique(seg_erode * seg, return_counts = True)
vol_erode = getQueryCount(sid2, vol2, sid)
vol_diff = vol - vol_erode
vol_diff[sid==0] = 0
sphe = - np.ones(vol.shape)
sphe[vol_diff>0] = (np.pi**(1./3)*((6*vol[vol_diff>0])**(2./3)))/vol_diff[vol_diff>0]
return sid, sphe, vol
|
import requests
import json
from business import Business, Review
from privatekey import api_key
class ApiError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
# Takes in a query parameter, which should be a string with the address or place name
# Returns a business object with information about name, location, and place_id
def get_business_info(query):
# create url for API request
api_url = "https://maps.googleapis.com/maps/api/place/findplacefromtext/json"
response = requests.get(api_url + "?input=" + query + "&inputtype=textquery&key=" + api_key + "&fields=name,formatted_address,place_id")
if response.status_code != 200:
raise ApiError("An error with status code {} occurred" .format(response.status_code))
else:
json_response = response.json() # store API info in business object
print(json_response)
business = Business()
# TODO: throw appropriate exception if daily request quota for this API is reached
business.name = json_response["candidates"][0]["name"]
business.location = json_response["candidates"][0]["formatted_address"]
business.place_id = json_response["candidates"][0]["place_id"]
return business
# Takes in place_id parameter and returns a URL to the specified place in google maps
# "https://www.google.com/maps/search/?api=1&query=Google&query_place_id=ChIJN1t_tDeuEmsRUsoyG83frY4"
# Note: Maps URLs have a character limit of 2048 characters
# TODO: Add validation for character limit
def get_maps_url(place_id):
query = "" # temp query, add as param later
maps_url = "https://www.google.com/maps/search/?api=1&query=" + query + "&query_place_id=" + place_id
return maps_url
# Temporary data for testing (since maps API has a daily request quota of about 5)
business = Business()
business.name = "200 Waterloo Ave"
business.location = "200 Waterloo Ave, Guelph, ON N1H 3J5, Canada"
business.place_id = "ChIJn_pSxreaK4gRBaqeg1MbXa8"
# business = get_business_info("200 Waterloo Ave")
print(get_maps_url(business.place_id))
business = get_business_info("")
print(business.name + "\n" + business.location + "\n" + business.place_id)
|
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
import sys
def init():
glClearColor(1.0,1.0,1.0,0.0)
glColor3f(0.0,0.0,1.0)
glPointSize(3.0)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluOrtho2D(0.0,600.0,0.0,600.0)
def drawcircle(r,xc,yc):
pk=3-2*r
setpixel(0,r)
x,y=0,r
while(x<y):
if pk<0:
pk1=pk+6+4*x
x=x+1
y=y
pk=pk1
if pk>0:
pk2=pk+10+4*(x-y)
x=x+1
y=y-1
pk=pk2
setpixel(xc+x,yc+y)
setpixel(xc+x,yc-y)
setpixel(xc-x,yc+y)
setpixel(xc-x,yc-y)
setpixel(xc+y,yc+x)
setpixel(xc+y,yc-x)
setpixel(xc-y,yc+x)
setpixel(xc-y,yc-x)
def setpixel(x,y):
glBegin(GL_POINTS)
glVertex2f(x,y)
glEnd()
glFlush()
|
import torch
import torch.nn as nn
from src.layers import *
class Generator(nn.Module):
def __init__(self):
super(Generator, self).__init__()
self.conv1 = nn.Conv1d(in_channels=128, out_channels=128,
kernel_size=15, stride=1,
padding=7)
self.conv1_gated = nn.Conv1d(in_channels=128, out_channels=128,
kernel_size=15, stride=1,
padding=7)
self.downsample1 = DownSample_Gen(128,6,2,256)
self.downsample2 = DownSample_Gen(256,6,2,256)
self.resblock1 = ResBlock(256,3,3,512,256,1,1)
self.resblock2 = ResBlock(256,3,3,512,256,1,1)
self.resblock3 = ResBlock(256,3,3,512,256,1,1)
self.resblock4 = ResBlock(256,3,3,512,256,1,1)
self.resblock5 = ResBlock(256,3,3,512,256,1,1)
self.resblock6 = ResBlock(256,3,3,512,256,1,1)
self.upsample1 = UpSample(256,5,1,512,2)
self.upsample2 = UpSample(256,5,1,256,2)
self.conv2 = nn.Conv1d(in_channels=128,out_channels=128,\
kernel_size=15,stride=1,\
padding=7)
def forward(self,input):
out = self.conv1(input)
out_gated = self.conv1_gated(input)
out = out * nn.Sigmoid()(out_gated)
out = self.downsample1(out)
out = self.downsample2(out)
out = self.resblock1(out)
out = self.resblock2(out)
out = self.resblock3(out)
out = self.resblock4(out)
out = self.resblock5(out)
out = self.resblock6(out)
out = self.upsample1(out)
out = self.upsample2(out)
out = self.conv2(out)
return out
|
#i pledge my honor that i have abided by the Stevens Honor System - Rachel Flynn
import os
import csv
import matplotlib.pyplot as plt
def get_csv_file_path_list():
csv_dir = 'C:/Users/rayf1/Desktop/CS110project' #in zipfile
csv_file_path_list = []
for file_path in os.listdir(csv_dir):
csv_file_path_list.append(csv_dir + '/' + file_path)
return csv_file_path_list
def get_csv_file_path_list_2():
csv_dir_2 = 'C:/Users/rayf1/Desktop/communicationsector' #in zipfile
csv_file_path_list_2 = []
for file_path_2 in os.listdir(csv_dir_2):
csv_file_path_list_2.append(csv_dir_2 + '/' + file_path_2)
return csv_file_path_list_2
def get_csv_file_path_list_3():
csv_dir_3 = 'C:/Users/rayf1/Desktop/industrialsector' #in zipfile
csv_file_path_list_3 = []
for file_path_3 in os.listdir(csv_dir_3):
csv_file_path_list_3.append(csv_dir_3 + '/' + file_path_3)
return csv_file_path_list_3
def get_ticker_from_file_path(file_path):
return file_path.split('/')[-1][:-4]
def get_date_list_and_price_list_from_csv(csv_file_path):
date_list, close_list = [], []
with open(csv_file_path) as csv_file:
row_list = csv.reader(csv_file)
for row_index, row in enumerate(row_list):
if row_index != 0:
date = row[0]
close = float(row[4])
date_list.append(date)
close_list.append(close)
return date_list, close_list
def graph_x_list_y_list(x_list, y_list, ticker):
plt.plot(x_list, y_list, label=ticker)
def update_and_show_graph_1():
ax = plt.gca()
ax.set_xticks(ax.get_xticks()[::10])
plt.xlabel('Date')
plt.ylabel('Closing Price')
plt.xticks(rotation=90)
plt.title('Stock Performance in the Healthcare Sector from 12.02.2019 to 11.27.2020')
plt.legend(loc='center left', bbox_to_anchor=(1.0, 0.5))
plt.grid()
plt.show()
def update_and_show_graph_2():
ax = plt.gca()
ax.set_xticks(ax.get_xticks()[::10])
plt.xlabel('Date')
plt.ylabel('Closing Price')
plt.xticks(rotation=90)
plt.title('Stock Performance in the Communications Sector from 12.02.2019 to 11.27.2020')
plt.legend(loc='center left', bbox_to_anchor=(1.0, 0.5))
plt.grid()
plt.show()
def update_and_show_graph_3():
ax = plt.gca()
plt.xlabel('Date')
ax.set_xticks(ax.get_xticks()[::10])
plt.ylabel('Closing Price')
plt.xticks(rotation=90)
plt.title('Stock Performance in the Industrial Sector from 12.02.2019 to 11.27.2020')
plt.legend(loc='center left', bbox_to_anchor=(1.0, 0.5))
plt.grid()
plt.show()
def main():
try:
print("""
For Healthcare Sector, Please Enter 1
For Communications Sector, Please Enter 2
For Industrial Sector, Please Enter 3""")
print()
askquestion = int(input("Enter Number: "))
if askquestion == 1:
csv_file_path_list = get_csv_file_path_list()
for csv_file_path in csv_file_path_list:
ticker = get_ticker_from_file_path(csv_file_path)
date_list, close_list = get_date_list_and_price_list_from_csv(csv_file_path)
graph_x_list_y_list(date_list, close_list, ticker)
update_and_show_graph_1()
elif askquestion == 2:
csv_file_path_list_2 = get_csv_file_path_list_2()
for csv_file_path_2 in csv_file_path_list_2:
ticker = get_ticker_from_file_path(csv_file_path_2)
date_list, close_list = get_date_list_and_price_list_from_csv(csv_file_path_2)
graph_x_list_y_list(date_list, close_list, ticker)
update_and_show_graph_2()
elif askquestion == 3:
csv_file_path_list_3 = get_csv_file_path_list_3()
for csv_file_path_3 in csv_file_path_list_3:
ticker = get_ticker_from_file_path(csv_file_path_3)
date_list, close_list = get_date_list_and_price_list_from_csv(csv_file_path_3)
graph_x_list_y_list(date_list, close_list, ticker)
update_and_show_graph_3()
else:
print("Error:Invalid Number")
except ValueError:
print("Error:Non-numerical character")
main()
|
import tkinter
import random
window = tkinter.Tk()
window.title("My window")
window.geometry("600x500")
label = tkinter.Label(text="0", fg="black", bg="red", font="Arial 22")
label.place(x=25, y=25)
def random_colors():
colors = ["red", "green", "blue", "gray"]
label["bg"] = random.choice(colors)
def count():
random_colors()
num = int(label["text"])
num = num + 1
label["text"] = str(num)
button = tkinter.Button(text="кнопка", command=count)
button.place(x=25, y=100)
window.mainloop()
|
with open('input.txt', 'r') as f:
data = [line.strip('\n') for line in f]
mp = {}
for orbit in data:
inner, outer = orbit.split(')')
if outer not in mp:
mp[outer] = inner
def count_orbits(key):
v = mp[key]
counter = 1
while v in mp:
v = mp[v]
counter += 1
return counter
counter = 0
for k in mp:
counter += count_orbits(k)
print(f'Part 1: {counter}')
# Part 2
def get_path(key):
v = mp[key]
path = []
while v in mp:
path.append(v)
v = mp[v]
return path
path1 = get_path('YOU')
path2 = get_path('SAN')
inter = set(path1).intersection(path2)
path_length = len(path1[:-len(inter)]) + len(path2[:-len(inter)])
print(f'Part 2: {path_length}')
|
from model.model import Cnn
from keras_segmentation.train import find_latest_checkpoint
import os
import json
from keras_segmentation.models import model_from_name
import cv2
import numpy as np
def constrastLimit(image):
img_hist_equalized = cv2.cvtColor(image, cv2.COLOR_BGR2YCrCb)
channels = cv2.split(img_hist_equalized)
channels[0] = cv2.equalizeHist(channels[0])
img_hist_equalized = cv2.merge(channels)
img_hist_equalized = cv2.cvtColor(img_hist_equalized, cv2.COLOR_YCrCb2BGR)
return img_hist_equalized
def model_from_checkpoint_path(checkpoints_path):
assert (os.path.isfile(checkpoints_path + "_config.json")), "Checkpoint not found."
model_config = json.loads(open(checkpoints_path + "_config.json", "r").read())
latest_weights = find_latest_checkpoint(checkpoints_path)
assert (not latest_weights is None), "Checkpoint not found."
model = model_from_name[model_config['model_class']](model_config['n_classes'],
input_height=model_config['input_height'],
input_width=model_config['input_width'])
print("loaded weights ", latest_weights)
model.load_weights(latest_weights)
return model
model = Cnn.build(width=256, height=256, depth=3, classes=5)
# model = model_from_checkpoint_path("path_to_checkpoints")
model.load_weights("Proba1.14")
cap = cv2.VideoCapture('Video/Stop2.mp4')
while cap.isOpened():
ret, image = cap.read()
if not ret:
break
if image is None:
break
im = np.rot90(image, k=3)
image = np.rot90(image, k=3)
out = model.predict_segmentation(
inp=im,
out_fname="output.png"
)
t = cv2.imread("output.png")
cv2.imshow("a", t)
cv2.imshow("image", image)
cv2.waitKey(100)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-16 08:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('KawsWebEnter', '0003_testplanversion'),
]
operations = [
migrations.CreateModel(
name='TestPlatformIntroduce',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False)),
('platformname', models.CharField(blank=True, max_length=50)),
('platformintroduce', models.CharField(blank=True, max_length=255, null=True)),
('Urllink', models.CharField(blank=True, max_length=50)),
],
),
]
|
机器学习分类:
有监督机器学习:给定数据集和标签X-y,训练模型,预测输出;y代表类别时是一个分类任务,y代表连续变量时是一个回归任务。
无监督机器学习:不关心有没有标签y,只是挖掘数据集X的一些内在规律。
强化学习:机器在环境(environment)中学习到策略(strategy),按策略选择一个动作(action)让对应的回报(reward)最大。
======================算法汇总===========================
----线性回归算法:
from sklearn.linear_model import *
Ridge() #岭回归
LASSO() #最小绝对值收缩和选择算法,俗称套索算法
MultiTaskLasso() #多任务LASSO回归算法
LassoLars() #LARS套索算法
ElasticNet() #弹性网眼算法
MultiTaskElasticNet() #多任务弹性网眼算法
OrthogonalMatchingPursuit() #正交匹配追踪(OMP)算法
BayesianRidge() #贝叶斯岭回归算法
ARDRegression() #ARD自相关回归算法
LogisticRegression() #逻辑回归算法
SGDClassifier() #SGD随机梯度下降算法
Lars() #最小角回归算法
Perceptron() #感知器算法
PassiveAggressiveClassifier() #PA被动感知算法
RANSACRegressor() #鲁棒回归算法
HuberRegressor() #Huber回归算法
TheilSenRegressor() #Theil-Sen回归算法
PolynomialFeatures() #多项式函数回归算法
LinearRegression() #最小二乘法线性回归算法
->example:
from sklearn.cross_validation import train_test_split
from sklearn.linear_model import LogisticRegression
x_train,x_test,y_train,y_test = train_test_split(x,y,random_state=1,test_size=0.4)
estimator = LogisticRegression()
estimator.fit(x_train, y_train)
y_pred = estimator.predict(x_test.values)
---朴素贝叶斯算法:
# Naive Bayesian:基于假定--给定目标值时,属性之间相互条件独立
from sklearn.naive_bayes import *
MultinomialNB(alpha=1.0,fit_prior=True,class_prior=None) #多项式朴素贝叶斯算法
GaussianNB([priors]) #高斯朴素贝叶斯算法
BernoulliNB() #伯努利朴素贝叶斯算法
->example:
estimator = MultinomialNB(alpha=0.01)
estimator.fit(x_train, y_train)
y_pred = estimator.predict(x_test.values)
---KNN近邻算法
from sklearn.neighbors import *
KNeighborsClassifier(n_neighbors=2,weights='distance') #KNN近邻分类算法
KNeighborsRegressor() #K近邻回归算法
RadiusNeighborsClassifier(n_neighbors=2,radius=100) #半径邻分类算法
NearestNeighbors() #最近邻居算法
NearestCentroid() #最近质心算法
LSHForest() #局部敏感哈希森林算法
->example:
estimator = KNeighborsClassifier()
estimator.fit(x_train, y_train)
y_pred = estimator.predict(x_test.values)
---随机森林算法:
from sklearn.ensemble import *
RandomForestClassifier() # 随机森林算法
BaggingClassifier() # Bagging装袋算法
ExtraTreesClassifier() # 完全随机树算法
AdaBoostClassifier()
AdaBoostRegressor() # Adaboost 迭代算法,针对同一训练集训练弱分类器,然后集合构成一个强分类器
GradientBoostingClassifier() # GBDT(Gradient Boosting Decision Tree)迭代决策树算法,一种基于决策树的分类回归算法
GradientBoostingRegressor() # 梯度回归算法
VotingClassifier() #投票算法
->example:
estimator = RandomForestClassifier(n_estimators=8)
estimator.fit(x_train,y_train)
y_pred = estimator.predict(x_test.values)
---决策树算法:
from sklearn.tree import *
DecisionTreeClassifier() #决策树算法
ExtraTreeClassifier() #完全随机树算法
ExtraTreeRegressor() #完全随机树回归算法
export_graphviz(decision_tree) #用于输出决策树图形
->example:
estimator=DecisionTreeClassifier()
estimator.fit(x_train, y_train)
y_pred = estimator.predict(x_test.values)
---支持向量机算法:
from sklearn.svm import *
SVC() # 支持向量机算法
LinearSVC() #线性向量算法
NuSVC() # Nu支持向量算法
SVR() #SVR(TEpsilon)支持向量算法
NuSVR() # Nu支持SVR向量算法
OneClassSVM() #一类支持微量机异常检测算法
l1_min_c() #用于返回边界参数
->example:
estimator = SVC(kernel='rbf',probability=True)
estimator.fit(x_train,y_train)
y_pred = estimator.predict(x_test.values)
---SVM-cross向量机交叉算法:
def svm_cross(x_train,y_train):
estimator = SVC(kernel='rbf',probability=True)
param_grid = {'C':[1e-3,1e-2,1e-1,1,10,100,1000],'gamma':[0.001,0.0001]}
grid_search = GridSearchCV(estimator,param_grid,n_jobs=1,verbose=1)
grid_search.fit(x_train,y_train)
best_parameters = grid_search.best_estimator_.get_params()
# for para,val in best_parameters.items():
# print(para,val)
estimator = SVC(kernel='rbf',C=best_parameters['C'],gamma=best_parameters['gamma'],probability=True)
estimator.fit(x_train,y_train)
return estimator
---神经网络算法:
from sklearn.neural_network import *
BernoulliRBM() #伯努利受限波尔兹曼机神经网络算法,简称RBM算法
MLPClassifier() #多层感知器神经网络算法,简称MLP算法
MLPRegressor() #多层感知器神经网络回归算法,简称MLP回归算法
->example:
estimator = MLPClassifier(solver='lbfgs',alpha=1e-5,hidden_layer_sizes=(5,2),random_state=1)
estimator.fit(x_train, y_train)
y_pred = estimator.predict(x_test.values)
----效果评估函数:
import numpy as np
import pandas as pd
from sklearn import metrics
def acc_evaluate(df,error_rate=5,debug_mode=True):
df['y_abs_error'] = np.abs(df['y_test']-df['y_pred'])
df['y_test_div'] = df['y_test']
df.loc[df['y_test']==0,'y_test_div']= 0.00001
df['y_error_rate'] = (df['y_abs_error']/df['y_test_div'])*100
df_acc = df[df['y_error_rate']<error_rate]
pred_accuracy = len(df_acc['y_pred'])/len(df['y_test'])
if debug_mode:
y_test,y_pred = df['y_test'],df['y_pred']
mae = metrics.mean_absolute_error(y_test, y_pred)
mse = metrics.mean_squared_error(y_test, y_pred)
rmse = np.sqrt(mse)
print('pred_accuracy:%0.2f \n MAE:%0.2f \n MSE:%0.2f \n RMSE:%0.2f' %(pred_accuracy,mae,mse,rmse))
---模型持久化:sklearn版pickle
from sklearn.externals import joblib
joblib.dump(estimator, 'estimator.pkl')
estimator = joblib.load('estimator.pkl')
---交叉验证:
from sklearn import cross_validation
scores = cross_validation.cross_val_predict(estimator, x_train,y_train,\
cv=10,scoring='mean_squared_error/accuracy')
from sklearn import metrics
# 度量准确率
metrics.accuracy_score(y_true, y_pred)
# 度量查准率P
metrics.precision_score(y_true, y_pred)
# 度量召回率R
metrics.recall_score(y_true, y_pred)
# F1 score
metrics.f1_score(y_true, y_pred) # F1Score = 2PR/(P+R)
# 混淆矩阵
metrics.confusion_matrix(y_true, y_pred)
# 其他分类信息
metrics.classification_report(y_true, y_pred)
---特征筛选、支持度评级:
from sklearn.feature_selection import RFE
def feature_selection(estimator,X,y):
selector = RFE(estimator)
selector.fit(X, y)
return pd.DataFrame({'support':selector.support_,'ranking':selector.ranking_},index='')
---降维:
from sklearn.cross_decomposition import PCA
pca_2n = PCA(n_components=2) #降维只保留两个特征序列
x_train = pca_2n.fit_transform(x_train)
---聚类:
from sklearn.cluster import KMeans
kmean = KMeans(n_clusters=2)
kmean.fit(X)
kmean.predict(X)
============================sklearn===============================
---多项式与线性回归:
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
def polynomial_model(degree=1): #degree为多项式阶数
polynomial_features = PolynomialFeatures(degree=degree,include_bias=False)
linear_regression = LinearRegression()
#一个Pipeline可以包含多个处理节点,除了最后一个节点只需fit()方法外,其他节点都必须实现fit()和transform()方法。
pipeline = Pipeline([('polynomial_features',polynomial_features),\
('linear_regression',linear_regression)])
return pipeline
---学习曲线:
from sklearn.model_selection import learning_curve
from sklearn.model_selection import ShuffleSplit
cv = ShuffleSplit(n_splits=10,test_size=0.2,random_state=0) #随机从数据集中分配出训练样本和交叉验证样本,计算10次交叉验证数据集的分数
def plot_learning_curve(estimator,title,X,y,ylim=None,cv=None,\
n_jobs=1,train_sizes=np.linspace(0.1,1.0, 5)):
plt.title(title)
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel('Training Examples')
plt.ylabel('Score')
train_sizes,train_scores,test_scores = learning_curve(estimator, X, y,\
cv=cv,n_jobs=n_jobs,train_sizes=train_sizes) # key step
train_scores_mean = np.mean(train_scores,axis=1)
train_scores_std = np.std(train_scores,axis=1)
test_scores_mean = np.mean(test_scores,axis=1)
test_scores_std = np.std(test_scores,axis=1)
plt.grid()
plt.fill_between(train_sizes,train_scores_mean-train_scores_std,\
train_scores_mean+train_scores_std,alpha=0.1,color='r')
plt.fill_between(train_sizes,test_scores_mean-test_scores_std,\
test_scores_mean+test_scores_std,alpha=0.1,color='g')
plt.plot(train_sizes,train_scores_mean,'o-',color='r',label='Training score')
plt.plot(train_sizes,test_scores_mean,'o-',color='g',label='Cross-Validation score')
plt.legend(loc='best')
return plt
==========K近邻算法=========
---K近邻分类:
from sklearn.datasets.samples_generator import make_blobs
centers = [[-2,2],[2,2],[0,4]]
#生成60个在以centers为中心点的周围分布的数据集,数据点分布的松散度为0.6标准差。
X,y=make_blobs(n_samples=60,centers=centers,random_state=0,cluster_std=0.60)
plt.figure(figsize=(6,3),dpi=144)
c = np.array(centers)
plt.scatter(X[:,0],X[:,1],c=y,s=20,cmap='cool')
plt.scatter(c[:,0],c[:,1],s=20,marker='^',c='orange')
from sklearn.neighbors import KNeighborsClassifier
k=5
clf = KNeighborsClassifier(n_neighbors=k)
clf.fit(X,y)
X_sample =np.array([[0,2]])
y_sample = clf.predict(X_sample)
neighbors = clf.kneighbors(X_sample,return_distance=False)#将样本周围距离最近的5个点取出来,取出来的点是训练集里的索引
plt.figure(figsize=(6,3),dpi=200)
plt.scatter(X[:,0],X[:,1],c=y,s=20,cmap='cool')
plt.scatter(c[:,0],c[:,1],s=20,marker='^',c='orange')
plt.scatter(X_sample[:,0],X_sample[:,1],marker='x',c=y_sample,s=100,cmap='cool')
for i in neighbors[0]:
plt.plot([X[i][0],X_sample[0][0]],[X[i][1],X_sample[0][1]],'k--',linewidth=0.6)
---K近邻回归:
n_dots =40
X=5*np.random.rand(n_dots,1)
y = np.cos(X).ravel()
y+=0.2*np.random.rand(n_dots)-0.1 #添加噪声
from sklearn.neighbors import KNeighborsRegressor
k = 5
knn = KNeighborsRegressor(k)
knn.fit(X,y)
T = np.linspace(0,5,500)[:,np.newaxis]
y_pred = knn.predict(T)
knn.score(X,y)
plt.figure(figsize=(6,3),dpi=200)
plt.scatter(X,y,c='g',label='data',s=20)
plt.plot(T,y_pred,c='k',label='prediction',lw=3)
plt.legend(loc='upper right')
plt.axis('tight')
plt.title('KNeighborsRegressor(k=%i)'%k)
plt.show()
---多个模型比较:
#多次随机分配训练数据集和交叉验证数据集,然后求模型准确性评分的平均值。
#sklearn提供了KFold和cross_val_score方法来实现。
from sklearn.model_selection import KFold
from sklearn.model_selection import cross_val_score
kfold = KFold(n_splits=10) #把数据集分为10份,其中1份作为交叉验证数据集来计算模型准确性,剩余9份为训练集
cv_result = cross_val_score(estimator, X,Y,cv=kfold) #cross_val_score总共会计算10次
cv_result.mean()
---特征选择:
sklearn.feature_selection.chi2() #计算卡方值
sklearn.feature_selection.f_classif() #计算F值
from sklearn.feature_selection import SelectKBest
selector = SelectKBest(k=2) #选择相关性最大的两个特征,默认使用F值检验
X_new = selector.fit_transform(X,Y) #重新选择的特征
=========线性回归算法============
#准备数据-波士顿房价数据
from sklearn.datasets import load_boston
boston = load_boston()
X = boston.data
y = boston.target
#模型训练
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
import time
X_train,X_test,y_train,y_test=train_test_split(X,y,test_size=0.2,random_state=3)
model = LinearRegression(normalize=True) #normalize=Ture数据归一化
start = time.clock()
model.fit(X_train,y_train)
train_score = model.score(X_train,y_train)
cv_score = model.score(X_test,y_test)
print('elaspe:%.6f;\ntrain_score:%0.6f;\ncv_score:%.6f'%(time.clock()-start,train_score,cv_score))
#模型优化-欠拟合-增加模型复杂度-创建多项式模型函数
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
def polynomial_model(degree=1):
polynomial_features = PolynomialFeatures(degree=degree,include_bias=False)
linear_regression = LinearRegression(normalize=True)
pipeline = Pipeline([('polynomial_features',polynomial_features),('linear_regression',linear_regression)])
return pipeline
degrees = [1,2,3]
results = []
for deg in degrees:
model = polynomial_model(deg)
start = time.clock()
model.fit(X_train,y_train)
train_score = model.score(X_train,y_train)
cv_score = model.score(X_test,y_test)
mse = mean_squared_error(y_train,model.predict(X_train))
results.append({'degree':deg,'elapse':time.clock()-start,'train_score':train_score,'cv_score':cv_score,'mse':mse})
for r in results:
print('degree:%i;------\n elaspe:%.6f;\n train_score:%0.6f;\n cv_score:%.6f;\n mse:%.6f'%\
(r['degree'],r['elapse'],r['train_score'],r['cv_score'],r['mse']))
#画学习曲线
from matplotlib.figure import SubplotParams
from sklearn.model_selection import ShuffleSplit
cv = ShuffleSplit(n_splits=10,test_size=0.2,random_state=0)
plt.figure(figsize=(12,2.5),dpi=200,subplotpars=SubplotParams(hspace=0.3))
for i in range(len(degrees)):
plt.subplot(1,3,i+1)
plot_learning_curve(polynomial_model(degrees[i]),'Learning Curve(degree=%i)'%degrees[i],X,y,ylim=(0.01,1.01),cv=cv)
plt.show()
=====逻辑回归算法======
|
import unittest
import os
from visual_microphone.sound import Sound
class TestVisualMicrophone(unittest.TestCase):
def setUp(self):
pass
def test_sound_file_created(self):
s = Sound()
for i in range(0, 1000):
s.write(0.4)
self.assertTrue(os.path.isfile('sounds.wav'))
if __name__ == "__main__":
unittest.main()
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, with_statement
from tempfile import mkdtemp
from cuisine import dir_attribs as attributes
from cuisine import dir_ensure as ensure
from cuisine import dir_exists as exists
from cuisine import file_attribs_get as attributes_get
from cuisine import file_is_link as is_link
from revolver import core
def temp_local():
return mkdtemp()
def temp(mode=None, owner=None, group=None):
path = core.run('mktemp --directory').stdout
attributes(path, mode=mode, owner=owner, group=group)
return path
def remove(location, recursive=False, force=True):
recursive = recursive and '-r' or ''
force = force and '-f' or ''
core.run('rm %s %s %s' % (force, recursive, location))
def create(path, recursive=False, mode=None, owner=None, group=None):
recursive = recursive and '-p' or ''
if exists(path):
return
core.run('mkdir %s %s' % (recursive, path))
attributes(path, mode=mode, owner=owner, group=group)
|
import ast
import datetime
import os
# returns a list with [child selection, game result, number of moves, total time of game, child_selection,...]
def get_headers():
headers = []
headers.append("subject_id")
for game in ['pre','post']:
for i in range(0, 10):
headers.append(game+'_selection_'+str(i))
headers.append(game + '_result_' + str(i))
headers.append(game + '_time_' + str(i))
return headers
def analyze_result(filename, pathname='./processed_data/'):
b_start = False
result_list = []
data = {'pre': {}, 'post': {}}
current_game = 'pre'
#init an empty dictionary:
for game in ["pre","post"]:
for x in range (0,10):
data[game]['selection'+str(x)]='Null'
data[game]['result'+str(x)]='Null'
data[game]['time' + str(x)] = 'Null'
with open(os.path.join(pathname,filename), 'r') as fp:
i=0
for line in fp:
raw_dic = ast.literal_eval(line[6:])
action = raw_dic['action']
obj = raw_dic['obj']
if (b_start == False):
start_time = datetime.datetime.strptime(raw_dic['time'], '%Y_%m_%d_%H_%M_%S_%f')
b_start = True
if (action == 'down'):
index = str(obj[0])
end_time = datetime.datetime.strptime(raw_dic['time'], '%Y_%m_%d_%H_%M_%S_%f')
total_time = (end_time - start_time).total_seconds()
if (total_time>300): #indicating switch to post
current_game = 'post'
start_time = end_time
data[current_game]['selection'+index] = obj
data[current_game]['result'+index] = obj[4]
data[current_game]['time' + index] = total_time
i = i + 1
#generate result_list from data dictionary:
subject_id = filename.replace('bag_mindset_test','')
subject_id = subject_id.replace('.txt','')
result_list.append(subject_id)
for game in ["pre","post"]:
for x in range (0,10):
result_list.append (data[game]['selection'+str(x)])
result_list.append (data[game]['result'+str(x)])
result_list.append(data[game]['time' + str(x)])
return result_list
#result = analyze_result('bag_mindset_test31.txt', pathname='./processed_data/txt/')
#print result
|
import os
import sys
with open("BkgFileNames.dat") as fs:
bkgnames = fs.readlines()
bkgnames = [x.strip() for x in bkgnames]
for a in bkgnames:
command="./GetWeightsBkg.sh %s %d"%(str(a),1)
os.system(command)
#command="./GetWeightsBkg.sh %s %d"%("WZTo3LNu.root",4.42965)
#command="./GetWeightsBkg.sh %s %d"%(fileName,xsec)
#command="./GetWeightsBkg.sh WZTo3LNu.root 4.42965"
#os.system(command)
|
from __future__ import division
import numpy as np
import numpy.random as npr
from svae.hmm import hmm_inference
from svae.util import allclose
### parameter makers
def make_hmm_natparam(num_states, T):
row_normalize = lambda a: a / np.sum(a, axis=1, keepdims=True)
init_param = np.log(npr.rand(num_states))
pair_param = np.log(row_normalize(npr.rand(num_states, num_states)))
node_potentials = np.log(npr.rand(T, num_states))
return init_param, pair_param, node_potentials
### check expected statistics code
def check_hmm_estep(natparam):
vlb1, stats1 = hmm_inference.hmm_estep(natparam)
vlb2, stats2 = hmm_inference.hmm_estep_slow(natparam)
assert np.isclose(vlb1, vlb2)
assert allclose(stats1, stats2)
def test_hmm_estep():
npr.seed(0)
for _ in xrange(3):
yield check_hmm_estep, make_hmm_natparam(npr.randint(2,5), npr.randint(5,10))
|
import environement
import numpy as np
import time
def check(ave, st):
if st == 'Q':
name = 'Q-Table.npy'
elif st == 'S':
name = 'Q-Table-sarsa.npy'
evn2 = environement.Evironment_PaMaCup()
evn2.reset()
sl_dot2 = evn2.sldot
max_step2 = (evn2.r * evn2.c)
qtable2 = np.load(name)[1]
total_dot = []
total_step = []
total_reward = []
for episode2 in range(ave):
state2 = evn2.reset()
evn2.view()
lstep2 = 0
ldot2 = 0
rewards2 = 0
done2 = False
for step2 in range(max_step2):
time.sleep(0.5)
action2 = np.argmax(qtable2[state2, :])
new_state2, reward2, done2, info2 = evn2.step(action2)
evn2.view()
rewards2 += reward2
lstep2 = step2
ldot2 = info2
if done2:
break
state2 = new_state2
total_dot.append(sl_dot2 - ldot2)
total_step.append(lstep2)
total_reward.append(rewards2)
print('Max dot: ', max(total_dot), ' Min dot: ', min(total_dot))
return sum(total_dot)/ave, sum(total_step)/ave, sum(total_reward)/ave
check(1, 'S')
|
from PyQt4 import QtGui
from PyQt4.uic.properties import QtCore
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtGui import *
from PyQt4.QtGui import *
from PyQt4 import QtGui
from PyQt4 import QtCore
from PyQt4 import QtCore, QtGui
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import numpy as np
import matplotlib.pyplot as plt
import random
import os
import sys
import math
from sklearn.cross_validation import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score
from makine import Ui_Dialog
class MainWindow(QtGui.QMainWindow, Ui_Dialog):
def __init__(self):
QtGui.QMainWindow.__init__(self)
self.setupUi(self)
print "__init__"
self.RR_veriYukle1_btn.clicked.connect(self.veri1)
self.RR_veriYukle2_btn.clicked.connect(self.veri2)
self.RR_RUS_btn.clicked.connect(self.rus)
self.RR_ROS_btn.clicked.connect(self.ros)
self.RR_RveriOlustur1_btn.clicked.connect(self.olustur1_2)
self.RR_RveriOlustur2_btn.clicked.connect(self.olustur2_2)
self.KNN_veriYukle_btn.clicked.connect(self.knn_veri_ekle)
self.KNN_Kumeleme_btn.clicked.connect(self.knn_veri_kumele)
self.KNN_YeniveriYukle_btn.clicked.connect(self.knn_veri_ekle_kumele)
self.Kmeans_veriYukle_btn.clicked.connect(self.veriYukleKmeans)
self.Kmeans_Kumele_btn.clicked.connect(self.kumelemeKmeans)
self.TT_upload_btn.clicked.connect(self.tum_veri_ekle)
self.TT_uygula_btn.clicked.connect(self.bol_veri_tt)
self.RanForest_uygula_btn.clicked.connect(self.RandomForest)
self.Norm_btn_veriYukle.clicked.connect(self.dosyaYuklenormalize)
self.Norm_btn_MinMax.clicked.connect(self.normalize_MinMax)
self.Norm_btn_Zscore.clicked.connect(self.normalize_Z_Score)
self.Norm_btn_Median.clicked.connect(self.normalizeMedian)
self.pushButton_6.clicked.connect(self.parkisyonveri)
self.pushButton.clicked.connect(self.sstbtn)
self.pushButton_5.clicked.connect(self.RandomFogren)
self.pushButton_3.clicked.connect(self.dstbtn)
self.pushButton_2.clicked.connect(self.stcpbtn)
self.pushButton_4.clicked.connect(self.butunbtn)
#************************************* RUS - ROS TAB *********************************************************
Y=[]
X=[]
def olustur1_2(self):
self.X=[]
for i in range(int(self.RR_miktar1_lineEd.text())):
x="{:.2f}".format(random.uniform(float(self.RR_Varalik1_1_lineEd.text()), float(self.RR_Varalik1_2_lineEd.text())))
y="{:.2f}".format(random.uniform(float(self.RR_Varalik1_1_lineEd.text()), float(self.RR_Varalik1_2_lineEd.text())))
self.X.append([x,y])
self.tablo1_goster()
def olustur2_2(self):
self.Y=[]
for i in range(int(self.RR_miktar2_lineEd.text())):
x="{:.2f}".format(random.uniform(float(self.RR_Varalik2_1_lineEd.text()), float(self.RR_Varalik2_2_lineEd.text())))
y="{:.2f}".format(random.uniform(float(self.RR_Varalik2_1_lineEd.text()), float(self.RR_Varalik2_2_lineEd.text())))
self.Y.append([x,y])
self.tablo2_goster()
def ros(self):
ros=[]
if(len(self.X)>len(self.Y)):
ros=random.sample(self.X, len(self.Y))
for i in range(len(ros)):
plt.plot(self.Y[i][0], self.Y[i][1], "r", markersize = 9,marker = ".",alpha=0.2)
plt.plot(ros[i][0], ros[i][1], "g", markersize = 5,marker = "o",alpha=0.2)
else:
ros=random.sample(self.Y, len(self.X))
for i in range(len(ros)):
plt.plot(ros[i][0], ros[i][1], "r", markersize = 9,marker = ".",alpha=0.2)
plt.plot(self.X[i][0], self.X[i][1], "g", markersize = 5,marker = "o",alpha=0.2)
plt.savefig('./sonuclar/Ros.png')
plt.show()
w,h=self.RR_ROS_GV.width()-5,self.RR_ROS_GV.height()-5
self.RR_ROS_GV.setScene(self.show_image('./sonuclar/Ros.png',w,h))
def rus(self):
adet = 0
if (len(self.X) > len(self.Y)):
adet = len(self.X)
else:
adet = len(self.Y)
for i in range(adet):
plt.plot(self.Y[i % len(self.Y)][0], self.Y[i % len(self.Y)][1], "r", markersize=9, marker=".", alpha=0.2)
plt.plot(self.X[i % len(self.X)][0], self.X[i % len(self.X)][1], "g", markersize=5, marker="o", alpha=0.2)
plt.savefig('./sonuclar/Rus.png')
plt.show()
w, h = self.RR_RUS_GV.width() - 5, self.RR_RUS_GV.height() - 5
self.RR_RUS_GV.setScene(self.show_image('./sonuclar/Rus.png', w, h))
def tablo1_goster(self):
if(len(self.Y)>0):
for i in range(len(self.X)):
plt.plot(self.X[i][0], self.X[i][1], "g",markersize = 5,marker = "o",alpha=0.2)
for i in range(len(self.Y)):
plt.plot(self.Y[i][0], self.Y[i][1], "r",markersize = 9,marker = ".",alpha=0.2)
else:
for i in range(len(self.X)):
plt.plot(self.X[i][0], self.X[i][1], "g",markersize = 5,marker = "o",alpha=0.2)
plt.savefig('./sonuclar/tumveriler.png')
plt.show()
w,h=self.RR_TUM_VERILER_GV.width()-5,self.RR_TUM_VERILER_GV.height()-5
self.RR_TUM_VERILER_GV.setScene(self.show_image('./sonuclar/tumveriler.png',w,h))
self.RR_1_tabelW.setColumnCount(2)
self.RR_1_tabelW.setRowCount(len(self.X)) ##set number of rows
for rowNumber,row in enumerate(self.X):
self.RR_1_tabelW.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.RR_1_tabelW.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
def tablo2_goster(self):
if(len(self.X)>0):
for i in range(len(self.X)):
plt.plot(self.X[i][0], self.X[i][1], "g",markersize = 5,marker = "o",alpha=0.2)
for i in range(len(self.Y)):
plt.plot(self.Y[i][0], self.Y[i][1], "r",markersize = 9,marker = ".",alpha=0.2)
else:
for i in range(len(self.Y)):
plt.plot(self.Y[i][0], self.Y[i][1], "g",markersize = 9,marker = ".",alpha=0.2)
plt.savefig('./sonuclar/tumveriler.png')
plt.show()
w,h=self.RR_TUM_VERILER_GV.width()-5,self.RR_TUM_VERILER_GV.height()-5
self.RR_TUM_VERILER_GV.setScene(self.show_image('./sonuclar/tumveriler.png',w,h))
self.RR_2_tabelW.setColumnCount(2)
self.RR_2_tabelW.setRowCount(len(self.Y)) ##set number of rows
for rowNumber,row in enumerate(self.Y):
self.RR_2_tabelW.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.RR_2_tabelW.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
def show_image(self, img_name,width,height):
pixMap = QtGui.QPixmap(img_name)
pixMap=pixMap.scaled(width,height)
pixItem = QtGui.QGraphicsPixmapItem(pixMap)
scene2 = QGraphicsScene()
scene2.addItem(pixItem)
return scene2
def veri1(self):
self.veriyolu1 = unicode(QtGui.QFileDialog.getOpenFileName(self, "Duzenlenecek dosyayi secin", ".", "Resim dosyalari (*.*)"))
f = open(self.veriyolu1)
self.X=[]
for i,row in enumerate(f.readlines()):
currentline = row.split(",")
temp=[]
for column_value in currentline:
temp.append(column_value)
self.X.append(temp)
self.tablo1_goster()
def veri2(self):
self.veriyolu2 = unicode(QtGui.QFileDialog.getOpenFileName(self,"Duzenlenecek dosyayi secin", ".", "Resim dosyalari (*.*)"))
f = open(self.veriyolu2)
self.Y=[]
for i,row in enumerate(f.readlines()):
currentline = row.split(",")
temp=[]
for column_value in currentline:
temp.append(column_value)
self.Y.append(temp)
self.tablo2_goster()
# ********************************** RUS - ROS TAB END *********************************************************
# ********************************** KNN TAB BEGINNING *********************************************************
Knn = []
def knn_veri_ekle(self):
self.fileNameKNN = unicode(
QtGui.QFileDialog.getOpenFileName(self,"Duzenlenecek dosyayi secin", ".", "Resim dosyalari (*.*)"))
f = open(self.fileNameKNN)
self.Knn = []
for i, row in enumerate(f.readlines()):
currentline = row.split(",")
temp = []
for column_value in currentline:
temp.append(column_value)
self.Knn.append(temp)
self.KNN_tablo_goster()
def KNN_tablo_goster(self):
if (len(self.Knn) > 0):
for i in range(len(self.Knn)):
plt.plot(self.Knn[i][0], self.Knn[i][1], "g", markersize=5, marker="o")
plt.savefig('./sonuclar/knn/tumveriler.png')
plt.show()
w, h = self.KNN_veriekle_GV.width() - 5, self.KNN_veriekle_GV.height() - 5
self.KNN_veriekle_GV.setScene(self.show_image('./sonuclar/knn/tumveriler.png', w, h))
self.KNN_1_tabWidget.setColumnCount(2)
self.KNN_1_tabWidget.setRowCount(len(self.Knn)) ##set number of rows
for rowNumber, row in enumerate(self.Knn):
self.KNN_1_tabWidget.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.KNN_1_tabWidget.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
def knn_veri_kumele(self):
X = self.Knn
kumeyeri = []
kumesayisi = 2
for i in range(len(X)):
kumeyeri.append(int(X[i][0]) % kumesayisi)
devammi = True
while (devammi):
merkezler = []
for i in range(kumesayisi):
merkezler.append([0, 0, 0])
for i in range(len(X)):
merkezler[kumeyeri[i]] = [float(merkezler[kumeyeri[i]][0]) + float(X[i][0]),
float(merkezler[kumeyeri[i]][1]) + float(X[i][1]),
int(merkezler[kumeyeri[i]][2]) + 1]
for i in range(len(merkezler)):
merkezler[i] = [float(merkezler[i][0]) / float(merkezler[i][2]), (merkezler[i][1] / merkezler[i][2]),
int(merkezler[i][2])]
kumeyeriyeni = []
for i in range(len(X)):
kumeyeriyeni.append(0)
for i in range(len(X)):
deger = [0, 0]
deger = [math.sqrt(math.pow(abs(float(X[i][0]) - float(merkezler[0][0])), 2) + math.pow(
abs(float(X[i][1]) - float(merkezler[0][1])), 2)), 0]
if (deger[0] > math.sqrt(math.pow(abs(float(X[i][0]) - float(merkezler[1][0])), 2) + math.pow(
abs(float(X[i][1]) - float(merkezler[1][1])), 2))):
deger = [math.sqrt(math.pow(abs(float(X[i][0]) - float(merkezler[1][0])), 2) + math.pow(
abs(float(X[i][1]) - float(merkezler[1][1])), 2)), 1]
kumeyeriyeni[i] = deger[1]
if (kumeyeriyeni == kumeyeri):
devammi = False
else:
kumeyeri = kumeyeriyeni
colors = ["g.", "r.", "b.", "y.", "c.", "m."]
for i in range(len(X)):
plt.plot(X[i][0], X[i][1], colors[kumeyeri[i]], markersize=10)
for i in range(len(merkezler)):
plt.scatter(int(merkezler[i][0]), int(merkezler[i][1]), marker="x", s=70, linewidths=3, zorder=10,
c="black")
plt.savefig('./sonuclar/knn/Knn.png')
plt.show()
w, h = self.KNN_sonuc_GV.width() - 5, self.KNN_sonuc_GV.height() - 5
self.KNN_sonuc_GV.setScene(self.show_image('./sonuclar/knn/Knn.png', w, h))
self.kumeye = kumeyeri
def knn_veri_ekle_kumele(self):
ekle = [int(self.KNN_X_lineEd.text()), int(self.KNN_Y_lineEd.text())]
X = self.Knn
merkezler = []
kumeyeri = self.kumeye
colors = ["g.", "r.", "b.", "y.", "c.", "m."]
plt.plot(ekle[0], ekle[1], colors[5], markersize=20)
for i in range(len(X)):
plt.plot(X[i][0], X[i][1], colors[kumeyeri[i]], markersize=10)
for i in range(len(merkezler)):
plt.scatter(int(merkezler[i][0]), int(merkezler[i][1]), marker="x", s=70, linewidths=3, zorder=10,
c="black")
plt.show()
degerler = []
for i in range(len(X)):
degerler.append(
math.sqrt(math.pow(abs(float(X[i][0]) - ekle[0]), 2) + math.pow(abs(float(X[i][1]) - ekle[0]), 2)))
bak = [[max(degerler), 0], [max(degerler), 0], [max(degerler), 0], [max(degerler), 0], [max(degerler), 0]]
for i in range(len(degerler)):
for j in range(len(bak)):
if (degerler[i] < bak[j][0]):
if (degerler[i] != bak[j][0]):
bak[j] = [degerler[i], i]
break
sifir = 0
bir = 0
for a in range(len(bak)):
if (kumeyeri[bak[a][1]] == 1):
bir = bir + 1
else:
sifir = sifir + 1
renk = 0
if (bir > sifir):
renk = 1
plt.plot(ekle[0], ekle[1], colors[renk], markersize=20)
for i in range(len(X)):
plt.plot(X[i][0], X[i][1], colors[kumeyeri[i]], markersize=10)
for i in range(len(merkezler)):
plt.scatter(int(merkezler[i][0]), int(merkezler[i][1]), marker="x", s=70, linewidths=3, zorder=10,
c="black")
plt.savefig('./sonuclar/knn/Knn-1.png')
plt.show()
w, h = self.KNN_sonuc_GV.width() - 5, self.KNN_sonuc_GV.height() - 5
self.KNN_sonuc_GV.setScene(self.show_image('./sonuclar/knn/Knn-1.png', w, h))
# ********************************************* KNN TAB END *********************************************************
# ********************************** K - MEANS TAB BEGINNING *********************************************************
colors = ["g.","r.","b.","y.", " c."]
kumesayisi=3
kumeyeri=[]
XKmeans=[]
def veriYukleKmeans(self):
f = open('./veriler/veri.txt')
X=[]
for i,row in enumerate(f.readlines()):
currentline = row.split(",")
temp=[]
for column_value in currentline:
temp.append(column_value)
X.append(temp)
for i in range(len(X)):
self.kumeyeri.append(int(X[i][0])%self.kumesayisi)
self.XKmeans=X
self.Kmeans_tablo.setColumnCount(2)
self.Kmeans_tablo.setRowCount(len(self.XKmeans)) ##set number of rows
for rowNumber, row in enumerate(self.XKmeans):
self.Kmeans_tablo.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.Kmeans_tablo.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
def kumelemeKmeans(self):
X=self.XKmeans
devammi=True
while(devammi):
merkezler=[]
for i in range(self.kumesayisi):
merkezler.append([0,0,0])
for i in range(len(X)):
merkezler[self.kumeyeri[i]]=[float(merkezler[self.kumeyeri[i]][0])+float(X[i][0]),float(merkezler[self.kumeyeri[i]][1])+float(X[i][1]),int(merkezler[self.kumeyeri[i]][2])+1]
for i in range(len(merkezler)):
merkezler[i]=[float(merkezler[i][0])/float(merkezler[i][2]),(merkezler[i][1]/merkezler[i][2]),int(merkezler[i][2])]
kumeyeriyeni=[]
print merkezler
for i in range(len(X)):
kumeyeriyeni.append(0)
for i in range(len(X)):
deger=[0,0]
deger=[ math.sqrt(math.pow(abs(float(X[i][0])- float(merkezler[0][0])),2)+math.pow(abs(float(X[i][1])- float(merkezler[0][1])),2)) , 0]
if(deger[0]> math.sqrt(math.pow(abs(float(X[i][0])- float(merkezler[1][0])),2)+math.pow(abs(float(X[i][1])- float(merkezler[1][1])),2))):
deger=[math.sqrt(math.pow(abs(float(X[i][0])- float(merkezler[1][0])),2)+math.pow(abs(float(X[i][1])- float(merkezler[1][1])),2)),1]
if(deger[0]> math.sqrt(math.pow(abs(float(X[i][0])- float(merkezler[2][0])),2)+math.pow(abs(float(X[i][1])- float(merkezler[2][1])),2))):
deger=[math.sqrt(math.pow(abs(float(X[i][0])- float(merkezler[2][0])),2)+math.pow(abs(float(X[i][1])- float(merkezler[2][1])),2)),2]
kumeyeriyeni[i]=deger[1]
if(kumeyeriyeni==self.kumeyeri):
devammi=False
else:
self.kumeyeri=kumeyeriyeni
for i in range(len(X)):
plt.plot(X[i][0], X[i][1], self.colors[self.kumeyeri[i]], markersize = 10)
for i in range(len(merkezler)):
plt.scatter(int(merkezler[i][0]),int(merkezler[i][1]), marker = "x", s=70, linewidths = 3, zorder = 10,c="Black")
plt.savefig('./sonuclar/kmeans/Kmeans.png')
plt.show()
w, h = self.Kmeans_Kumele_GV.width() - 5, self.Kmeans_Kumele_GV.height() - 5
self.Kmeans_Kumele_GV.setScene(self.show_image('./sonuclar/kmeans/Kmeans.png', w, h))
# ********************************** K - MEANS TAB END *********************************************************
# ********************************** Navie Bayes TAB BEGIGINING *********************************************************
@QtCore.pyqtSignature("bool")
def on_NavieBay_btn_clicked(self):
kelime=str(self.lineEdit.text())
def aranacak(kume, kelime, index):
counter=0
for i in range(len(kume)):
if kume[i][index]==kelime:
counter+=1
return counter
def aranacak_2(kume, kelime):
counter=0
for i in range(len(kume)):
if kume[i]==kelime:
counter+=1
return counter
def kelimeler(kume):
kelimeler=[]
silinecek="!@#$.?,"
for i in range(len(kume)):
cumle=kume[i][0]
for char in silinecek:
cumle=cumle.replace(char,"")
parca=cumle.split(' ')
for c in parca:
if aranacak_2(kelimeler, c)==0:
kelimeler.append(c)
return kelimeler
def arama(kume,kumeci,kelime):
counter=0
for i in range(len(kume)):
if kume[i][1]==kumeci and kume[i][0].count(kelime)>0:
counter+=kume[i][0].count(kelime)
return counter
data=[["top, futbol, kondisyon, antrenman.","spor"],
["saha futbol fitness voleybol basketbol.","spor"],
["penalti, ofsayt,sut,tac, masa tenisi","spor"],
["ceza sahasi ,kale, top.","spor"],
["enflasyon, deflasyon, komisyon, sermaye, indeks","ekonomi"],
["lira, kar, zarar, altin, faiz, hisse","ekonomi"],
["bonus , piyasa, euro, tl, para, hesap","ekonomi"],
["finans, dolar, gelir","ekonomi"]]
countspor=aranacak(data,"spor",1)
countekonom=aranacak(data,"ekonomi",1)
print("ekonomi adet:"+str(countekonom)+" spor Adet:"+str(countspor))
sporagirlik=float(countspor)/(float(countspor)+float(countekonom))
ekonomagirlik=float(countekonom)/(float(countspor)+float(countekonom))
print("spor :"+str(sporagirlik)+" ekonomi :"+str(ekonomagirlik))
kelimeci=kelimeler(data)
print(kelimeler(data))
sportoplam=0
spordeger=[]
for i in kelimeci:
sportoplam+=(arama(data,"spor",i)+1)
for i in range(len(kelimeci)):
deger=float(arama(data,"spor",kelimeci[i])+1)/float(sportoplam)
spordeger.append(deger)
print(str(kelimeci[i])+" icin "+str(deger))
ekonomtoplam=0
ekonomdeger=[]
for i in kelimeci:
ekonomtoplam+=(arama(data,"ekonomi",i)+1)
for i in range(len(kelimeci)):
deger=float(arama(data,"ekonomi",kelimeci[i])+1)/float(ekonomtoplam)
ekonomdeger.append(deger)
print(str(kelimeci[i])+" icin "+str(deger))
c_kelime=kelime.split(" ")
print(c_kelime)
sporcarpim=1
for i in c_kelime:
for x in range(len(kelimeci)):
if kelimeci[x]==i:
sporcarpim*=spordeger[x]
ekonomcarpim=1
for i in c_kelime:
for x in range(len(kelimeci)):
if kelimeci[x]==i:
ekonomcarpim*=ekonomdeger[x]
sporsonuc=sporcarpim*sporagirlik
ekonomsonuc=ekonomcarpim*ekonomagirlik
print("spor cumle oran:"+str(sporcarpim)+" Oran:"+str(sporsonuc))
print("ekonomi cumle oran:"+str(ekonomcarpim)+" Oran:"+str(ekonomsonuc))
if sporsonuc<ekonomsonuc:
print("Kelime ekonomi")
self.label_14.setText("Kelime Ekonomi")
if sporsonuc>ekonomsonuc:
print("Kelime spor")
self.label_14.setText("Kelime spor")
if sporcarpim==1 and ekonomcarpim==1:
self.label_14.setText("Kelime yok")
# ********************************** Navie Bayes TAB END *********************************************************
# ********************************** NORMALiZASTON TAB BEGIGINING **************************************************
def dosyaYuklenormalize(self):
f = open('./veriler/diabetes.data')
X=[]
for i,row in enumerate(f.readlines()):
currentline = row.split(",")
temp=[]
for column_value in currentline:
temp.append(column_value)
X.append(temp)
X=np.array(X)
self.X=X[:,:8]
self.y=X[:,8]
self.veriYukle(self.X,self.y,self.Norm_tablo_veriYukle)
def veriYukle(self,X,y,tablonormalize):
num_rows=len(X)
tablonormalize.clear()
tablonormalize.setColumnCount(8)
tablonormalize.setRowCount(num_rows) ##set number of rows
for rowNumber,row in enumerate(X):
#row[1].encode("utf-8")
tablonormalize.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
tablonormalize.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
tablonormalize.setItem(rowNumber, 2, QtGui.QTableWidgetItem(str(row[2])))
tablonormalize.setItem(rowNumber, 3, QtGui.QTableWidgetItem(str(row[3])))
tablonormalize.setItem(rowNumber, 4, QtGui.QTableWidgetItem(str(row[4])))
tablonormalize.setItem(rowNumber, 5, QtGui.QTableWidgetItem(str(row[5])))
tablonormalize.setItem(rowNumber, 6, QtGui.QTableWidgetItem(str(row[6])))
tablonormalize.setItem(rowNumber, 7, QtGui.QTableWidgetItem(str(row[7])))
for rowNumber,row in enumerate(y):
tablonormalize.setItem(rowNumber, 8, QtGui.QTableWidgetItem(str(row)))
def normalize_MinMax(self):
for s in range(0,8):
first_column=self.X[:,s]
max_value=float(max(first_column))
min_value=float(min(first_column))
print "max value:",max_value," min value:",min_value
num_rows=len(self.X)
for i,value in enumerate(first_column):
normalize_value=((float(value)-min_value)/(max_value-min_value))
first_column[i]=round(normalize_value,2)
self.Norm_tablo_MinMax.setColumnCount(8)
self.Norm_tablo_MinMax.setRowCount(num_rows)
for rowNumber,row in enumerate(first_column):
self.Norm_tablo_MinMax.setItem(rowNumber, s, QtGui.QTableWidgetItem(str(row)))
def normalize_Z_Score(self):
for s in range(0,8):
colm= np.array(self.X[:,s]).astype(np.float)
ui=np.mean(colm)
ai=np.std(colm)
print"Aritmetik ortalama:",ui," standart sapma:",ai
num_rows=len(self.X)
for i,value in enumerate(colm):
normalize_zscor=float(value)-ui/ai
colm[i]=float(round(normalize_zscor,3))
self.Norm_tablo_Zscore.setColumnCount(8)
self.Norm_tablo_Zscore.setRowCount(num_rows)
for rowNumber,row in enumerate(colm):
self.Norm_tablo_Zscore.setItem(rowNumber, s, QtGui.QTableWidgetItem(str(row)))
def normalizeMedian(self):
for s in range(0,8):
column=np.array(self.X[:,s]).astype(np.float)
med=np.median(column)
print "Medyan: ",med
num_rows=len(self.X)
for i,value in enumerate(column):
normalize_medyan=float(value)/med
column[i]=float(round(normalize_medyan,3))
self.Norm_tablo_Median.setColumnCount(8)
self.Norm_tablo_Median.setRowCount(num_rows)
for rowNumber,row in enumerate(column):
self.Norm_tablo_Median.setItem(rowNumber, s, QtGui.QTableWidgetItem(str(row)))
# ********************************** NORMALiZASTON TAB END **************************************************
# ********************************** RANDOM FOREST TAB BEGIGINING **************************************************
def RandomForest(self):
clf = RandomForestClassifier(max_depth=2, random_state=0)
clf.fit(self.X_train, self.y_train)
results=clf.predict(self.X_test)
self.label_17.setText(str(round(accuracy_score(self.y_test, results)*100,2)))
# ********************************** RANDOM FOREST TAB END *********************************************************
# ***************************************** TRAIN - TEST TAB *****************************************************
def tum_veri_ekle(self):
f = open('./veriler/veri.data')
X = []
self.Xveri = []
self.Yveri = []
for i, row in enumerate(f.readlines()):
currentline = row.split(",")
temp = []
for column_value in currentline:
temp.append(column_value)
X.append(temp)
self.Xveri.append(temp[:8])
self.Yveri.append(temp[8])
self.TT_tummu_Table.clear()
self.TT_tummu_Table.setColumnCount(9)
self.TT_tummu_Table.setRowCount(len(X)) ##set number of rows
for rowNumber, row in enumerate(X):
self.TT_tummu_Table.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.TT_tummu_Table.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
self.TT_tummu_Table.setItem(rowNumber, 2, QtGui.QTableWidgetItem(str(row[2])))
self.TT_tummu_Table.setItem(rowNumber, 3, QtGui.QTableWidgetItem(str(row[3])))
self.TT_tummu_Table.setItem(rowNumber, 4, QtGui.QTableWidgetItem(str(row[4])))
self.TT_tummu_Table.setItem(rowNumber, 5, QtGui.QTableWidgetItem(str(row[5])))
self.TT_tummu_Table.setItem(rowNumber, 6, QtGui.QTableWidgetItem(str(row[6])))
self.TT_tummu_Table.setItem(rowNumber, 7, QtGui.QTableWidgetItem(str(row[7])))
self.TT_tummu_Table.setItem(rowNumber, 8, QtGui.QTableWidgetItem(str(row[8])))
def bol_veri_tt(self):
yuzde=float(float(self.TT_comboBox.currentText())/100)
self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(self.Xveri, self.Yveri, test_size=yuzde, random_state=42)
self.TT_train_Table.clear()
self.TT_train_Table.setColumnCount(8)
self.TT_train_Table.setRowCount(len(self.X_train)) ##set number of rows TT_train_Table
for rowNumber, row in enumerate(self.X_train):
self.TT_train_Table.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.TT_train_Table.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
self.TT_train_Table.setItem(rowNumber, 2, QtGui.QTableWidgetItem(str(row[2])))
self.TT_train_Table.setItem(rowNumber, 3, QtGui.QTableWidgetItem(str(row[3])))
self.TT_train_Table.setItem(rowNumber, 4, QtGui.QTableWidgetItem(str(row[4])))
self.TT_train_Table.setItem(rowNumber, 5, QtGui.QTableWidgetItem(str(row[5])))
self.TT_train_Table.setItem(rowNumber, 6, QtGui.QTableWidgetItem(str(row[6])))
self.TT_train_Table.setItem(rowNumber, 7, QtGui.QTableWidgetItem(str(row[7])))
self.TT_test_Table.clear()
self.TT_test_Table.setColumnCount(8)
self.TT_test_Table.setRowCount(len(self.X_test)) ##set number of rows
for rowNumber, row in enumerate(self.X_test):
self.TT_test_Table.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.TT_test_Table.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
self.TT_test_Table.setItem(rowNumber, 2, QtGui.QTableWidgetItem(str(row[2])))
self.TT_test_Table.setItem(rowNumber, 3, QtGui.QTableWidgetItem(str(row[3])))
self.TT_test_Table.setItem(rowNumber, 4, QtGui.QTableWidgetItem(str(row[4])))
self.TT_test_Table.setItem(rowNumber, 5, QtGui.QTableWidgetItem(str(row[5])))
self.TT_test_Table.setItem(rowNumber, 6, QtGui.QTableWidgetItem(str(row[6])))
self.TT_test_Table.setItem(rowNumber, 7, QtGui.QTableWidgetItem(str(row[7])))
# ***************************************** TRAIN - TEST TAB END *****************************************************
# ***************************************** PARKINSON TAB *****************************************************
def butunbtn(self):
self.X_trainp, self.X_tesptp, self.y_trainp, self.y_testp =train_test_split(self.Topalamx,self.Topalamy , test_size=0.30, random_state=42)
def dstbtn(self):
self.X_trainp, self.X_tesptp, self.y_trainp, self.y_testp =self.DSTx, self.DSTxn,self.DSTy,self.DSTyn
def stcpbtn(self):
self.X_trainp, self.X_tesptp, self.y_trainp, self.y_testp =self.STCPx, self.STCPxn,self.STCPy,self.STCPyn
def RandomFogren(self):
clf = RandomForestClassifier(max_depth=None, random_state=0)
clf.fit(self.X_trainp,self.y_trainp)
results=clf.predict(self.X_tesptp)
self.label_64.setText("Basari Yuzdesi :"+str(float("{0:.2f}".format(accuracy_score(self.y_testp,results)*100))))
def sstbtn(self):
self.X_trainp, self.X_tesptp, self.y_trainp, self.y_testp=self.SSTx, self.SSTxn,self.SSTy,self.SSTyn
def parkisyonveri(self):
pathparkinson="./hw_dataset/parkinson/"
pathcontrol="./hw_dataset/control/"
pathparkinsonnew="./new_dataset/parkinson/"
SST=[] #0
DST=[] #1
STCP=[] #2
SST_train=[] #0
DST_train=[] #1
STCP_train=[] #2
SST_test=[] #0
DST_test=[] #1
STCP_test=[] #2
dosyalar=os.listdir(pathparkinson)
for dosya in dosyalar:
f = open(pathparkinson+dosya)
for i,row in enumerate(f.readlines()):
currentline = row.split(";")
temp=[]
for column_value in currentline:
temp.append(column_value)
if(int(temp[len(temp)-1])==0):
temp.remove(temp[len(temp)-1])
SST_train.append(temp[:6])
SST_test.append(1)
temp.append(1)
SST.append(temp)
elif(int(temp[len(temp)-1])==1):
temp.remove(temp[len(temp)-1])
DST_train.append(temp[:6])
DST_test.append(1)
temp.append(1)
DST.append(temp)
elif(int(temp[len(temp)-1])==2):
temp.remove(temp[len(temp)-1])
STCP_train.append(temp[:6])
STCP_test.append(1)
temp.append(1)
STCP.append(temp)
dosyalar=os.listdir(pathcontrol)
for dosya in dosyalar:
f = open(pathcontrol+dosya)
for i,row in enumerate(f.readlines()):
currentline = row.split(";")
temp=[]
for column_value in currentline:
temp.append(column_value)
if(int(temp[len(temp)-1])==0):
temp.remove(temp[len(temp)-1])
SST_train.append(temp[:6])
SST_test.append(0)
temp.append(0)
SST.append(temp)
elif(int(temp[len(temp)-1])==1):
temp.remove(temp[len(temp)-1])
DST_train.append(temp[:6])
DST_test.append(0)
temp.append(0)
DST.append(temp)
elif(int(temp[len(temp)-1])==2):
temp.remove(temp[len(temp)-1])
STCP_train.append(temp[:6])
STCP_test.append(0)
temp.append(0)
STCP.append(temp)
SST_new=[] #0
DST_new=[] #1
STCP_new=[] #2
SST_train_new=[] #0
DST_train_new=[] #1
STCP_train_new=[] #2
SST_test_new=[] #0
DST_test_new=[] #1
STCP_test_new=[] #2
dosyalar=os.listdir(pathparkinsonnew)
for dosya in dosyalar:
f = open(pathparkinsonnew+dosya)
for i,row in enumerate(f.readlines()):
currentline = row.split(";")
temp=[]
for column_value in currentline:
temp.append(column_value)
if(int(temp[len(temp)-1])==0):
temp.remove(temp[len(temp)-1])
SST_train_new.append(temp[:6])
SST_test_new.append(1)
temp.append(1)
SST_new.append(temp)
elif(int(temp[len(temp)-1])==1):
temp.remove(temp[len(temp)-1])
DST_train_new.append(temp[:6])
DST_test_new.append(1)
temp.append(1)
DST_new.append(temp)
elif(int(temp[len(temp)-1])==2):
temp.remove(temp[len(temp)-1])
STCP_train_new.append(temp[:6])
STCP_test_new.append(1)
temp.append(1)
STCP_new.append(temp)
self.SSTx=SST_train
self.SSTy=SST_test
self.SSTxn=SST_train_new
self.SSTyn=SST_test_new
self.DSTx=DST_train
self.DSTy=DST_test
self.DSTxn=DST_train_new
self.DSTyn=DST_test_new
self.STCPx=STCP_train
self.STCPy=STCP_test
self.STCPxn=STCP_train_new
self.STCPyn=STCP_test_new
toplam_train=[]
toplam_test=[]
toplam_train.extend(SST_train)
toplam_test.extend(STCP_test)
toplam_train.extend(STCP_train_new)
toplam_test.extend(SST_test_new)
toplam_train.extend(DST_train)
toplam_test.extend(DST_test)
toplam_train.extend(DST_train_new)
toplam_test.extend(DST_test_new)
toplam_train.extend(STCP_train)
toplam_test.extend(SST_test)
toplam_train.extend(SST_train_new)
toplam_test.extend(STCP_test_new)
self.Topalamx=toplam_train
self.Topalamy=toplam_test
self.tableWidget_1_9.clear()
self.tableWidget_1_9.setColumnCount(7)
self.tableWidget_1_9.setRowCount(len(SST_train)) ##set number of rows
for rowNumber,row in enumerate(SST_train):
self.tableWidget_1_9.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.tableWidget_1_9.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
self.tableWidget_1_9.setItem(rowNumber, 2, QtGui.QTableWidgetItem(str(row[2])))
self.tableWidget_1_9.setItem(rowNumber, 3, QtGui.QTableWidgetItem(str(row[3])))
self.tableWidget_1_9.setItem(rowNumber, 4, QtGui.QTableWidgetItem(str(row[4])))
self.tableWidget_1_9.setItem(rowNumber, 5, QtGui.QTableWidgetItem(str(row[5])))
self.tableWidget_1_9.setItem(rowNumber, 6, QtGui.QTableWidgetItem(str(SST_test[rowNumber])))
self.tableWidget_1_10.clear()
self.tableWidget_1_10.setColumnCount(7)
self.tableWidget_1_10.setRowCount(len(DST_train)) ##set number of rows
for rowNumber,row in enumerate(DST_train):
self.tableWidget_1_10.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.tableWidget_1_10.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
self.tableWidget_1_10.setItem(rowNumber, 2, QtGui.QTableWidgetItem(str(row[2])))
self.tableWidget_1_10.setItem(rowNumber, 3, QtGui.QTableWidgetItem(str(row[3])))
self.tableWidget_1_10.setItem(rowNumber, 4, QtGui.QTableWidgetItem(str(row[4])))
self.tableWidget_1_10.setItem(rowNumber, 5, QtGui.QTableWidgetItem(str(row[5])))
self.tableWidget_1_10.setItem(rowNumber, 6, QtGui.QTableWidgetItem(str(DST_test[rowNumber])))
self.tableWidget_1_14.clear()
self.tableWidget_1_14.setColumnCount(7)
self.tableWidget_1_14.setRowCount(len(STCP_train_new)) ##set number of rows
for rowNumber,row in enumerate(STCP_train_new):
self.tableWidget_1_14.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.tableWidget_1_14.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
self.tableWidget_1_14.setItem(rowNumber, 2, QtGui.QTableWidgetItem(str(row[2])))
self.tableWidget_1_14.setItem(rowNumber, 3, QtGui.QTableWidgetItem(str(row[3])))
self.tableWidget_1_14.setItem(rowNumber, 4, QtGui.QTableWidgetItem(str(row[4])))
self.tableWidget_1_14.setItem(rowNumber, 5, QtGui.QTableWidgetItem(str(row[5])))
self.tableWidget_1_14.setItem(rowNumber, 6, QtGui.QTableWidgetItem(str(STCP_test_new[rowNumber])))
self.tableWidget_1_12.clear()
self.tableWidget_1_12.setColumnCount(7)
self.tableWidget_1_12.setRowCount(len(SST_train_new)) ##set number of rows
for rowNumber,row in enumerate(SST_train_new):
self.tableWidget_1_12.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.tableWidget_1_12.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
self.tableWidget_1_12.setItem(rowNumber, 2, QtGui.QTableWidgetItem(str(row[2])))
self.tableWidget_1_12.setItem(rowNumber, 3, QtGui.QTableWidgetItem(str(row[3])))
self.tableWidget_1_12.setItem(rowNumber, 4, QtGui.QTableWidgetItem(str(row[4])))
self.tableWidget_1_12.setItem(rowNumber, 5, QtGui.QTableWidgetItem(str(row[5])))
self.tableWidget_1_12.setItem(rowNumber, 6, QtGui.QTableWidgetItem(str(SST_test_new[rowNumber])))
self.tableWidget_1_13.clear()
self.tableWidget_1_13.setColumnCount(7)
self.tableWidget_1_13.setRowCount(len(DST_train_new)) ##set number of rows
for rowNumber,row in enumerate(DST_train_new):
self.tableWidget_1_13.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.tableWidget_1_13.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
self.tableWidget_1_13.setItem(rowNumber, 2, QtGui.QTableWidgetItem(str(row[2])))
self.tableWidget_1_13.setItem(rowNumber, 3, QtGui.QTableWidgetItem(str(row[3])))
self.tableWidget_1_13.setItem(rowNumber, 4, QtGui.QTableWidgetItem(str(row[4])))
self.tableWidget_1_13.setItem(rowNumber, 5, QtGui.QTableWidgetItem(str(row[5])))
self.tableWidget_1_13.setItem(rowNumber, 6, QtGui.QTableWidgetItem(str(DST_test_new[rowNumber])))
self.tableWidget_1_11.clear()
self.tableWidget_1_11.setColumnCount(7)
self.tableWidget_1_11.setRowCount(len(STCP_train)) ##set number of rows
for rowNumber,row in enumerate(STCP_train):
self.tableWidget_1_11.setItem(rowNumber, 0, QtGui.QTableWidgetItem(str(row[0])))
self.tableWidget_1_11.setItem(rowNumber, 1, QtGui.QTableWidgetItem(str(row[1])))
self.tableWidget_1_11.setItem(rowNumber, 2, QtGui.QTableWidgetItem(str(row[2])))
self.tableWidget_1_11.setItem(rowNumber, 3, QtGui.QTableWidgetItem(str(row[3])))
self.tableWidget_1_11.setItem(rowNumber, 4, QtGui.QTableWidgetItem(str(row[4])))
self.tableWidget_1_11.setItem(rowNumber, 5, QtGui.QTableWidgetItem(str(row[5])))
self.tableWidget_1_11.setItem(rowNumber, 6, QtGui.QTableWidgetItem(str(STCP_test[rowNumber])))
print "okuma tamamlandi"
# ***************************************** PARKINSON TAB END *****************************************************
|
# -*- coding: utf-8 -*-
__license__ = """
This file is part of **janitoo** project https://github.com/bibi21000/janitoo.
License : GPL(v3)
**janitoo** is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
**janitoo** is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with janitoo. If not, see http://www.gnu.org/licenses.
"""
__copyright__ = "Copyright © 2013-2014-2015-2016 Sébastien GALLET aka bibi21000"
__author__ = 'Sébastien GALLET aka bibi21000'
__email__ = 'bibi21000@gmail.com'
import sys, os
import shutil
import mock
from janitoo_nosetests import JNTTBase
from janitoo.dhcp import HeartbeatMessage
from janitoo.utils import json_dumps
from janitoo.utils import HADD_SEP
from janitoo.utils import TOPIC_NODES, TOPIC_NODES_REPLY
from janitoo.utils import TOPIC_BROADCAST_REPLY
from janitoo.utils import TOPIC_VALUES_USER, TOPIC_VALUES_CONFIG, TOPIC_VALUES_SYSTEM
import janitoo_packaging.packaging
class JNTTPackaging(JNTTBase):
"""packages base test
"""
setuppy = 'setup'
directory = '.'
def setUp(self):
JNTTBase.setUp(self)
def tearDown(self):
JNTTBase.tearDown(self)
def create_package(self):
return janitoo_packaging.packaging.Package(setuppy=self.setuppy, directory=self.directory)
def assertComponent(self, component):
package = self.create_package()
components = package.get_janitoo_components()
self.assertTrue(component in components)
def assertThread(self, thread):
package = self.create_package()
threads = package.get_janitoo_threads()
self.assertTrue(thread in threads)
def assertValue(self, value):
package = self.create_package()
values = package.get_janitoo_values()
self.assertTrue(value in values)
class JNTTPackagingCommon(object):
"""Common tests for packages
"""
def test_001_check_package(self):
warnings, errors = self.create_package().check_package()
self.assertEqual(warnings, [])
self.assertEqual(errors, [])
|
from django.shortcuts import render
def search(request):
return render(request, 'search/search.html', locals())
|
# Things you should be able to do.
animals = ['cat','dog', 'fish', 'gorilla', 'baloonicorn', 'monkey', 'cheese']
numbers = range(1,10)
# Write a function that takes a list and returns a new list with only the odd numbers.
def all_odd(some_list):
new_list = []
for i in range(len(some_list)):
if i % 2 == 1:
addme = some_list[i]
new_list.append(addme)
return new_list
print all_odd(numbers)
# Write a function that takes a list and returns a new list with only the even numbers.
def all_even(some_list):
new_list = []
for i in range(len(some_list)):
if i % 2 == 0:
new_list.append(some_list[i])
return new_list
print all_even(numbers)
# Write a function that takes a list of strings and a new list with all strings of length 4 or greater.
def long_words(word_list):
new_list = []
for i in range(len(word_list)):
if len(word_list[i]) >= 4:
new_list.append(word_list[i])
return new_list
print long_words(animals)
# Write a function that finds the smallest element in a list of integers and returns it.
def smallest(some_list):
minnumber = some_list[0]
for i in range(len(some_list)):
if some_list[i] <= minnumber:
minnumber = some_list[i]
return minnumber
print smallest(numbers)
# Write a function that finds the largest element in a list of integers and returns it.
def largest(some_list):
maxnumber = some_list[0]
for i in range(len(some_list)):
if some_list[i] >= maxnumber:
maxnumber = some_list[i]
return maxnumber
print largest(numbers)
# Write a function that takes a list of numbers and returns a new list of all those numbers divided by two.
def halvesies(some_list):
halflist = []
for i in range(len(some_list)):
halflist.append((some_list[i]/2))
return halflist
print halvesies(numbers)
# Write a function that takes a list of words and returns a list of all the lengths of those words.
def word_lengths(word_list):
lengthlist = []
for i in range(len(word_list)):
length = len(word_list[i])
lengthlist.append(length)
return lengthlist
print word_lengths(animals)
# Write a function (using iteration) that sums all the numbers in a list.
def sum_numbers(numbers):
total = 0
for i in range(len(numbers)):
total = total + numbers[i]
return total
print sum_numbers(numbers)
# Write a function that multiplies all the numbers in a list together.
def mult_numbers(numbers):
product = 1
for i in range(len(numbers)):
product = product * numbers[i]
return product
print mult_numbers(numbers)
# Write a function that joins all the strings in a list together (without using the join method) and returns a single string.
def join_strings(string_list):
big_string = ""
for i in range(len(string_list)):
big_string = big_string + string_list[i]
return big_string
print join_strings(animals)
# Write a function that takes a list of integers and returns the average (without using the avg method)
def average(numbers):
total = 0
counter = 0
for i in range(len(numbers)):
total = total + numbers[i]
counter += 1
return total/counter
print average(numbers)
|
#this method directly raise an exception that remains unsolved to the user
def third_method(base_num):
raise Exception('An error hanppened when generating the class id!!!')
|
from .dt import (
DatetimeDescription,
dtloc2pos,
)
from .generators import (
CustomCurve,
CustomTimedCurve,
timeseries,
)
from .search import (
closest,
previous,
)
from .tracking import trackedfunc
try:
import matplotlib
except ModuleNotFoundError:
pass
else:
from .plotting import (
decorate,
DFT_FONT_PARAMS,
DFT_PARAMS,
MultiPlotIterator,
PlotDesigner,
plotiter,
)
|
# This program says hello
print ("Hello")
print ("how many money you have?")
numberOfMoney = input()
print ("The number of money is " + numberOfMoney)
|
from flask import Flask, render_template, request, session
from datetime import datetime
from flask_sqlalchemy import SQLAlchemy
from flask_mail import Mail
from werkzeug.utils import secure_filename, redirect
import os
import random
import json
app = Flask(__name__)
with open('config.json') as c:
params = json.load(c)["params"]
app.secret_key = "super-secret-key"
app.config['cv_path'] = params['cv_path']
app.config['dp_path'] = params['dp_path']
app.config['SQLALCHEMY_DATABASE_URI'] = params['local_url']
app.config.update(
MAIL_SERVER="smtp.gmail.com",
MAIL_PORT="465",
MAIL_USE_SSL=True,
MAIL_USERNAME=params['email'],
MAIL_PASSWORD=params['password']
)
mail = Mail(app)
db = SQLAlchemy(app)
class Feedback(db.Model):
sno = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(40), unique=False, nullable=False)
Email_Id = db.Column(db.String(40), unique=False, nullable=False)
Contact_no = db.Column(db.String(40), unique=False, nullable=False)
Feedback = db.Column(db.String(40), unique=False, nullable=False)
Date = db.Column(db.String(40), unique=False, nullable=True, default=datetime.now())
status = db.Column(db.String(40), unique=False, nullable=False, default='N')
class Enquiry(db.Model):
sno = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(40), unique=False, nullable=False)
cno = db.Column(db.String(20), unique=False, nullable=False)
email = db.Column(db.String(20), unique=False, nullable=False)
gen = db.Column(db.String(6), unique=False, nullable=False)
city = db.Column(db.String(30), unique=False, nullable=False)
enquiry = db.Column(db.String(100), unique=False, nullable=False)
date = db.Column(db.String(20), unique=False, nullable=True, default=datetime.now())
status = db.Column(db.String(1), unique=False, nullable=False, default='N')
class Career(db.Model):
sno = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(30), unique=False, nullable=False)
email = db.Column(db.String(40), unique=False, nullable=False)
contact = db.Column(db.String(40), unique=False, nullable=False)
gender = db.Column(db.String(8), unique=False, nullable=False)
address = db.Column(db.String(40), unique=False, nullable=False)
cv = db.Column(db.String(40), unique=False, nullable=False)
city = db.Column(db.String(8), unique=False, nullable=False)
date = db.Column(db.String(40), unique=False, nullable=True, default=datetime.now())
status = db.Column(db.String(40), unique=False, nullable=False, default='N')
class Course(db.Model):
course_code = db.Column(db.Integer, primary_key=True)
course_name = db.Column(db.String(40), unique=False, nullable=False)
fees = db.Column(db.String(40), unique=False, nullable=False)
duration = db.Column(db.String(40), unique=False, nullable=False)
career = db.Column(db.String(40), unique=False, nullable=False)
module = db.Column(db.String(40), unique=False, nullable=False)
class Stu_course(db.Model):
sno = db.Column(db.Integer,primary_key=True)
ccode = db.Column(db.String(40), unique=False, nullable=False)
cname = db.Column(db.String(40), unique=False, nullable=False)
email = db.Column(db.String(40), unique=False, nullable=False)
fee = db.Column(db.String(40), unique=False, nullable=False)
duration = db.Column(db.String(40), unique=False, nullable=False)
career = db.Column(db.String(40), unique=False, nullable=False)
module = db.Column(db.String(40), unique=False, nullable=False)
class Application(db.Model):
sno = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(30), unique=False, nullable=False)
email = db.Column(db.String(40), unique=False, nullable=False)
contact = db.Column(db.String(40), unique=False, nullable=False)
gender = db.Column(db.String(8), unique=False, nullable=False)
course = db.Column(db.String(40), unique=False, nullable=False)
date = db.Column(db.String(8), unique=False, nullable=False)
bank = db.Column(db.String(40), unique=False, nullable=False)
ddno = db.Column(db.String(40), unique=False, nullable=False)
class Member(db.Model):
sno = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(30), unique=False, nullable=False)
email = db.Column(db.String(40), unique=False, nullable=False)
contact = db.Column(db.String(40), unique=False, nullable=False)
gender = db.Column(db.String(8), unique=False, nullable=False)
course = db.Column(db.String(40), unique=False, nullable=False)
date = db.Column(db.String(8), unique=False, nullable=False)
bank = db.Column(db.String(40), unique=False, nullable=False)
ddno = db.Column(db.String(40), unique=False, nullable=False)
image = db.Column(db.String(40), unique=False, nullable=True)
address = db.Column(db.String(100), unique=False, nullable=True)
status = db.Column(db.String(40), unique=False, nullable=False, default='active')
class Login(db.Model):
sno = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(40), unique=False, nullable=False)
user_id = db.Column(db.String(40), unique=False, nullable=False)
password = db.Column(db.String(40), unique=False, nullable=False)
status = db.Column(db.String(8), unique=False, nullable=False, default='S')
class Profile(db.Model):
sno = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(40), unique=False, nullable=False)
email = db.Column(db.String(40), unique=False, nullable=False)
contact = db.Column(db.String(40), unique=False, nullable=False)
address = db.Column(db.String(40), unique=False, nullable=False)
image= db.Column(db.String(40), unique=False, nullable=False)
class Questions(db.Model):
sno = db.Column(db.Integer, primary_key=True)
ques = db.Column(db.String(1000), unique=False, nullable=False)
option1 = db.Column(db.String(20), unique=False, nullable=False)
option2 = db.Column(db.String(20), unique=False, nullable=False)
option3 = db.Column(db.String(6), unique=False, nullable=False)
option4 = db.Column(db.String(30), unique=False, nullable=False)
correct = db.Column(db.String(100), unique=False, nullable=False)
course = db.Column(db.String(30), unique=False, nullable=False)
class Result(db.Model):
sno = db.Column(db.Integer, primary_key=True)
id = db.Column(db.Integer, unique=False, nullable=False)
attempted = db.Column(db.String(200), unique=False, nullable=False)
total = db.Column(db.String(20), unique=False, nullable=False)
correct = db.Column(db.String(20), unique=False, nullable=False)
wrong = db.Column(db.String(6), unique=False, nullable=False)
unanswered = db.Column(db.String(30), unique=False, nullable=False)
grade = db.Column(db.String(100), unique=False, nullable=False)
percent = db.Column(db.String(30), unique=False, nullable=False)
result = db.Column(db.String(30), unique=False, nullable=False)
course_code = db.Column(db.String(20), unique=False, nullable=False)
course_name = db.Column(db.String(30), unique=False, nullable=False)
def get_password(n):
s = "qwertyupkjhgfdsazxcvbnmQWERTYUPLKJHGFDSAZXCVBNM23456789"
str = ''
for i in range(0, n):
a = random.randint(0, len(s))
str += s[a]
return str
@app.route('/')
def home():
pid = 0
data = Course.query.all()
return render_template('index.html',data=data,pid=pid)
@app.route('/about')
def about():
msg = ''
pid = 3;
return render_template('about.html', pid=pid)
@app.route('/contact', methods=['GET', 'POST'])
def contact():
msg = ''
pid = 4
if request.method == 'POST':
name = request.form.get('name')
email = request.form.get('email')
contact = request.form.get('cno')
feed = request.form.get('fb')
sql = Feedback(name=name, Email_Id=email, Contact_no=contact, Feedback=feed)
db.session.add(sql)
db.session.commit()
msg = 'Feedback saved successfully'
return render_template('contact.html', pid=pid)
@app.route('/profile', methods=['GET', 'POST'])
def profile():
name = ""
email = ""
cno = ""
add=""
img=""
msg=""
fname=""
if request.method == 'POST':
sno=session['id']
print(sno)
data=Member.query.filter_by(sno=sno).first()
name = request.form.get('name')
email = request.form.get('email')
cno = request.form.get('contact')
add = request.form.get('add')
file = request.files['image']
filename = secure_filename(file.filename)
ext = os.path.splitext(filename)[1]
if ext == ".png" or ext == ".jpeg" or ext == ".jpg":
fname = secure_filename(str(int(data.sno) + 1) + ext)
data.image=fname
db.session.commit()
file.save(os.path.join(app.config['dp_path'], fname))
else:
msg = "Please upload a valid image"
data.name=name
data.email=email
data.contact=cno
data.address=add
data.image=fname
db.session.commit()
msg="Profile Updated successfully"
else:
sno = session['id']
data = Member.query.filter_by(sno=sno).first()
name = data.name
email = data.email
cno = data.contact
add = data.address
fname=data.image
li=[sno,name,email,cno,add,fname]
return render_template('user/admin/profile.html',li=li)
@app.route('/courses')
def courses():
pid = 7
data = Course.query.all()
return render_template('course.html', pid=pid, data=data)
@app.route('/enquiry', methods=['GET', 'POST'])
def enquiry():
msg = ''
pid = 5;
if request.method == 'POST':
name = request.form.get('name')
contact = request.form.get('cno')
email = request.form.get('email')
gender = request.form.get('gen')
city = request.form.get('city')
enq = request.form.get('enquiry')
sql = Enquiry(name=name, cno=contact, gen=gender, city=city, enquiry=enq, email=email)
db.session.add(sql)
db.session.commit()
msg = 'Enquiry saved successfully'
return render_template('enquiry.html', pid=pid)
@app.route('/career', methods=['GET', 'POST'])
def career():
name = ""
email = ""
cno = ""
gen = ""
msg = ""
pid = 6
if request.method == 'POST':
msg = "Your career details saved"
name = request.form.get('name')
cno = request.form.get('cno')
gender = request.form.get('gen')
email = request.form.get('email')
add = request.form.get('add')
city = request.form.get('city')
file = request.files['cv']
filename = secure_filename(file.filename)
ext = os.path.splitext(filename)[1]
if ext == ".pdf" or ext == ".doc" or ext == ".docx":
data = Career.query.filter_by(email=email).first()
if not data:
data = Career.query.order_by(Career.sno.desc()).first()
if not data:
fname = secure_filename("1" + ext)
else:
fname = secure_filename(str(int(data.sno) + 1) + ext)
entry = Career(name=name, email=email, contact=cno, gender=gender, address=add, city=city,
cv=fname)
db.session.add(entry)
db.session.commit()
print(fname)
file.save(os.path.join(app.config['cv_path'], fname))
name = ""
email = ""
cno = ""
gen = ""
msg = "Career Detail Successfully Saved"
else:
msg = 'Email-Id already exist'
else:
msg = "Please upload a valid CV File"
li1 = [name, email, cno, gen, msg]
return render_template('career.html', li=li1, pid=pid)
@app.route('/login', methods=['GET', 'POST'])
def login():
name = ""
email = ""
cno = ""
gen = ""
msg = ""
pid = 0;
if request.method == 'POST' and 'RegSubmit' in request.form:
name = request.form.get('name')
cno = request.form.get('cno')
gender = request.form.get('gen')
email = request.form.get('email')
course = request.form.get('course')
bank = request.form.get('bank')
dd = request.form.get('dd')
date = request.form.get('date')
pid = 0
data = Application.query.filter_by(email=email).first()
if not data:
entry = Application(name=name, email=email, contact=cno, gender=gender, course=course, date=date, bank=bank,
ddno=dd)
db.session.add(entry)
db.session.commit()
mail.send_message('Message from ' + "eLearning",
sender=params['email'],
recipients=[email],
body="Hello " + name + "\nYour details are received. We contact you soon ")
msg = "Your details saved"
else:
msg = "Already Registered"
pid = 1
elif request.method == 'POST' and 'Login' in request.form:
email = request.form.get('email')
password = request.form.get('pass')
data = Login.query.filter_by(user_id=email).first()
if not data:
msg = "Email does not exist"
pid = 2
else:
if data.password == password:
session['name'] = data.name
session['uid'] = data.user_id
session['id'] = data.sno
status=data.status
data = Member.query.filter_by(email=email).first()
session['img'] = data.image
if status=='A':
return render_template('user/admin/index.html')
else :
return render_template('user/student/index.html')
else:
msg = "Invalid password"
pid = 2
data = Course.query.all()
return render_template('index.html', msg=msg, pid=pid, data=data)
# admin
@app.route('/admin')
def admin():
msg = ''
return render_template('user/admin/index.html')
@app.route('/course/<string:code>,<string:cmd>', methods=['GET', 'POST'])
def course(code, cmd):
cc = ''
cn = ''
mod = ''
fee = ''
dur = ''
car = ''
msg = ''
if request.method == 'POST':
cc = request.form.get('cc')
cn = request.form.get('cn')
fee = request.form.get('fee')
dur = request.form.get('dur')
car = request.form.get('car')
mod = request.form.get('mod')
if cmd == '1':
data = Course.query.filter_by(course_code=cc).first()
if not data:
data = Course.query.filter_by(course_name=cn).first()
if not data:
sql = Course(course_code=cc, course_name=cn, fees=fee, duration=dur, career=car, module=mod)
db.session.add(sql)
db.session.commit()
msg = 'Course Added suceesfully'
cc = ''
cn = ''
mod = ''
fee = ''
dur = ''
car = ''
else:
msg = 'Course Name already exist'
else:
msg = 'Course Code already exist'
elif cmd == '3':
flag = 1
if session['cc'] != cc:
find = Course.query.filter_by(course_code=cc).first()
if find:
flag = 0
msg = "Course Code already exist"
elif session['cn'] != cn:
find = Course.query.filter_by(course_name=cn).first()
if find:
flag = 0
msg = "Course Name already exist"
if flag == 1:
# code = session['cc']
data = Course.query.filter_by(course_code=code).first()
data.course_code = cc
data.course_name = cn
data.duration = dur
data.fees = fee
data.module = mod
data.career = car
db.session.commit()
session.pop("cc")
session.pop("cn")
cmd = '0'
code = '0'
elif cmd == '4':
rs = Course.query.filter_by(course_code=code).first()
db.session.delete(rs)
db.session.commit()
cmd = '0'
code = '0'
if code != '':
if cmd == '2' or cmd == '3' or cmd == '4':
code = code
val = Course.query.filter_by(course_code=code).first()
cc = val.course_code
cn = val.course_name
fee = val.fees
dur = val.duration
mod = val.module
car = val.career
session['cc'] = cc
session['cn'] = cn
data = Course.query.all()
li = [cc, cn, fee, dur, mod, car, code, cmd, msg]
return render_template('user/admin/course.html', data=data, li=li)
@app.route('/application/<string:sno>', methods=["GET", "POST"])
def application(sno):
name = ''
email = ''
contact = ''
gender = ''
course = ''
date = ''
bank = ''
ddno = ''
img=''
add=''
msg = ''
sn = 0
if request.method == 'POST' and 'Confirm' in request.form:
data = Login.query.order_by(Login.sno.desc()).first()
if not data:
sn = 1
else:
sn = int(data.sno) + 1
upass = get_password(6)
data = Application.query.filter_by(sno=sno).first()
entry = Login(sno=sn, name=data.name, user_id=data.email, password=upass, status='S')
db.session.add(entry)
db.session.commit()
data = Member.query.filter_by(email=email).first()
if not data:
data = Application.query.filter_by(sno=sno).first()
cc=data.course
email = data.email
entry = Member(sno=sn,name=data.name, email=data.email, contact=data.contact, gender=data.gender,
course=data.course, date=data.date, bank=data.bank, ddno=data.ddno,image=img,address=add)
db.session.add(entry)
db.session.commit()
db.session.delete(data)
db.session.commit()
data = Course.query.filter_by(course_code=cc).first()
entry = Stu_course(sno=sn,ccode=cc,email=email,cname=data.course_name,fee=data.fees,duration=data.duration,module=data.module,career=data.career)
db.session.add(entry)
db.session.commit()
# mail.send_message('Message from ' + "eLearning",
# sender=params['email'],
# recipients=[data.email_id],
# body="Hello " + data.name + "\nYour details are received. We contact you soon ")
sno = '0'
elif request.method == 'POST' and 'Delete' in request.form:
data = Application.query.filter_by(sno=sno).first()
db.session.delete(data)
db.session.commit()
sno = '0'
elif sno != '0':
data = Application.query.filter_by(sno=sno).first()
sno = data.sno
name = data.name
email = data.email
contact = data.contact
gender = data.gender
course = data.course
date = data.date
bank = data.bank
ddno = data.ddno
data = Application.query.all()
li = [sno, name, email, contact, gender, course, date, bank, ddno]
return render_template('user/admin/application.html', data=data, li=li)
@app.route('/status/<string:sno>,<string:cmd>', methods=["GET", "POST"])
def status(cmd, sno):
name = ''
email = ''
contact = ''
gender = ''
course = ''
date = ''
bank = ''
ddno = ''
msg = ''
if request.method == 'POST':
if 'Inactive' in request.form:
data = Member.query.filter_by(sno=sno).first()
data.status = 'inactive'
db.session.commit()
mail.send_message('Message from ' + 'eLearning',
sender=params['email'],
recipients=[data.email],
body="Hello " + data.name + "\nYour User Id " + data.email + " deactivated.")
cmd = '0'
elif 'Active' in request.form:
data = Member.query.filter_by(sno=sno).first()
data.status = 'active'
db.session.commit()
mail.send_message('Message from ' + 'eLearning',
sender=params['email'],
recipients=[data.email],
body="Hello " + data.name + "\nYour User Id " + data.email + " activated.")
sno = '0'
cmd = '0'
elif 'Delete' in request.form:
data = Member.query.filter_by(sno=sno).first()
db.session.delete(data)
db.session.commit()
sno = '0'
cmd = '0'
if cmd == '2' or cmd == '3':
data = Member.query.filter_by(sno=sno).first()
sno = data.sno
name = data.name
email = data.email
contact = data.contact
gender = data.gender
course = data.course
date = data.date
bank = data.bank
ddno = data.ddno
status = data.status
if cmd == '0':
data = Member.query.filter_by(status='active').all()
elif cmd == '1':
data = Member.query.filter_by(status='inactive').all()
li = [cmd, name, email, contact, gender, course, date, bank, ddno, sno]
return render_template('user/admin/statuss.html', data=data, li=li)
@app.route('/display/<string:cmd>,<string:sno>', methods=["GET", "POST"])
def display(cmd, sno):
name = ''
email = ''
contact = ''
gender = ''
course = ''
date = ''
feedback = ''
enquiry = ''
city = ''
address = ''
cv = ''
msg = ''
status = ''
if request.method == 'POST' and 'Reply' in request.form:
if session['cmd'] == '7':
data = Feedback.query.filter_by(sno=sno).first()
cmd = '1'
elif session['cmd'] == '8':
data = Enquiry.query.filter_by(sno=sno).first()
cmd = '2'
elif session['cmd'] == '9':
data = Career.query.filter_by(sno=sno).first()
cmd = '3'
email = request.form.get('email')
name = request.form.get('name')
reply = request.form.get('reply')
mail.send_message('Message from ' + "eLearning",
sender=params['email'],
recipients=[email],
body="Hello " + name + "\n " + reply)
data.status = 'Y'
db.session.commit()
sno = '0'
if sno == '0':
if cmd == '1':
data = Feedback.query.all()
elif cmd == '2':
data = Enquiry.query.all()
else:
data = Career.query.all()
else:
if cmd == '4' or cmd == '7':
data = Feedback.query.filter_by(sno=sno).first()
sno = data.sno
name = data.name
email = data.Email_Id
contact = data.Contact_no
feedback = data.Feedback
date = data.Date
status = data.status
if cmd == '7':
session['cmd'] = '7'
elif cmd == '5' or cmd == '8':
data = Enquiry.query.filter_by(sno=sno).first()
sno = data.sno
name = data.name
email = data.email
contact = data.cno
enquiry = data.enquiry
date = data.date
city = data.city
gender = data.gen
status = data.status
if cmd == '8':
session['cmd'] = '8'
elif cmd == '6' or cmd == '9':
data = Career.query.filter_by(sno=sno).first()
sno = data.sno
name = data.name
email = data.email
contact = data.contact
address = data.address
date = data.date
city = data.city
gender = data.gender
cv=data.cv
status = data.status
if cmd == '9':
session['cmd'] = '9'
li = [cmd, sno, name, email, contact, gender, feedback, city, enquiry, address, cv, date, status]
return render_template('user/admin/display.html', data=data, li=li)
@app.route('/ques/<string:code>', methods=['GET', 'POST'])
def ques(code):
session['code']=code
return redirect('/question/+0+,0')
@app.route('/question/<string:sno>,<string:cmd>', methods=['GET', 'POST'])
def question(sno, cmd):
ques = ''
opt1 = ''
opt2 = ''
opt3 = ''
opt4 = ''
ans = ''
cc=''
msg = ''
code=session['code']
if request.method == 'POST':
sno = request.form.get('sno')
ques = request.form.get('ques')
opt1 = request.form.get('o1')
opt2 = request.form.get('o2')
opt3 = request.form.get('o3')
opt4 = request.form.get('o4')
ans = request.form.get('ans')
cc = request.form.get('course')
if cmd == '1':
data = Questions.query.filter_by(course=code).first()
sql = Questions(ques=ques, option1=opt1, option2=opt2, option3=opt3, option4=opt4,correct=ans,course=code)
db.session.add(sql)
db.session.commit()
msg = 'Question Added suceesfully'
ques = ''
opt1 = ''
opt2 = ''
opt3 = ''
opt4 = ''
ans = ''
elif cmd == '3':
sno=session['sno']
cmd = session['cmd']
data = Questions.query.filter_by(sno=sno).first()
print(sno)
data.ques = ques
data.option1 = opt1
data.option2 = opt2
data.option3 = opt3
data.option4 = opt4
data.correct = ans
db.session.commit()
cmd = '0'
# return redirect('/question/'+session['code']+',0')
elif cmd == '4':
sno = session['sno']
cmd = session['cmd']
rs = Questions.query.filter_by(sno=sno).first()
db.session.delete(rs)
db.session.commit()
cmd = '0'
elif cmd == '2' or cmd == '3' or cmd == '4':
data = Questions.query.filter_by(sno=sno).first()
sno=data.sno
code=data.course
ques=data.ques
opt1=data.option1
opt2=data.option2
opt3=data.option3
opt4=data.option4
ans=data.correct
cc=data.course
session['cmd']=cmd
session['sno']=sno
data = Questions.query.filter_by(course=code).all()
print(data)
li = [ques, opt1, opt2, opt3, opt4, ans, cmd, msg, cc, code]
return render_template('user/admin/question.html', data=data, li=li)
@app.route('/change_pass', methods=['GET', 'POST'])
def change_pass():
password=''
cpass=''
npass=''
msg=''
if request.method == 'POST':
sno=session['id']
data=Login.query.filter_by(sno=sno).first()
password=request.form.get('pass')
npass=request.form.get('npass')
cpass=request.form.get('cpass')
if password!=data.password:
msg="Invalid password"
elif cpass!=npass:
msg="Password do not match"
else:
data.password=npass
db.session.commit()
password = ''
cpass = ''
npass = ''
msg="Password changed successfully"
li=[password,npass,cpass,msg]
return render_template('user/admin/change_pass.html',li=li)
@app.route('/student')
def student():
return render_template('user/student/index.html')
@app.route('/stu_profile', methods=['GET', 'POST'])
def stu_profile():
name = ""
email = ""
cno = ""
add=""
img=""
msg=""
fname=""
if request.method == 'POST':
sno=session['id']
print(sno)
data=Member.query.filter_by(sno=sno).first()
name = request.form.get('name')
email = request.form.get('email')
cno = request.form.get('contact')
add = request.form.get('add')
file = request.files['image']
filename = secure_filename(file.filename)
ext = os.path.splitext(filename)[1]
if ext == ".png" or ext == ".jpeg" or ext == ".jpg":
fname = secure_filename(str(int(data.sno) + 1) + ext)
data.image=fname
db.session.commit()
file.save(os.path.join(app.config['dp_path'], fname))
else:
msg = "Please upload a valid image"
data.name=name
data.email=email
data.contact=cno
data.address=add
data.image=fname
db.session.commit()
msg="Profile Updated successfully"
else:
sno = session['id']
data = Member.query.filter_by(sno=sno).first()
print(sno)
name = data.name
email = data.email
cno = data.contact
add = data.address
fname=data.image
li=[sno,name,email,cno,add,fname]
return render_template('user/student/stu_profile.html',li=li)
@app.route('/stu_change_pass', methods=['GET', 'POST'])
def stu_change_pass():
password=''
cpass=''
npass=''
msg=''
if request.method == 'POST':
sno=session['id']
data=Login.query.filter_by(sno=sno).first()
password=request.form.get('pass')
npass=request.form.get('npass')
cpass=request.form.get('cpass')
if password!=data.password:
msg="Invalid password"
elif cpass!=npass:
msg="Password do not match"
else:
data.password=npass
db.session.commit()
password = ''
cpass = ''
npass = ''
msg="Password changed successfully"
li=[password,npass,cpass,msg]
return render_template('user/student/change_pass.html',li=li)
@app.route('/stu_course/<string:cmd>,<string:code>', methods=['GET', 'POST'])
def stu_course(cmd,code):
sno=session['id']
cc = ''
cn = ''
mod = ''
fee = ''
dur = ''
car = ''
msg = ''
if cmd=='0':
data = Stu_course.query.filter_by(sno=sno).all()
elif cmd=='1':
data = Stu_course.query.filter_by(sno=sno,ccode=code).first()
cc=code
cn=data.cname
fee=data.fee
dur=data.duration
car=data.career
mod=data.module
li=[cc,cn,fee,dur,mod,car,cmd]
return render_template('user/student/course.html', li=li,data=data)
@app.route('/study/<string:code>', methods=['GET', 'POST'])
def study(code):
session['code']=code
if code=='OJ':
return redirect('/java/0')
elif code=='DBMS':
return redirect('/dbms/0')
@app.route('/java/<string:cmd>', methods=['GET', 'POST'])
def java(cmd):
if cmd=='0':
return render_template('user/student/course/java/index.html')
elif cmd=='1':
return render_template('user/student/course/java/intro.html')
elif cmd=='2':
return render_template('user/student/course/java/syntax.html')
elif cmd == '3':
return render_template('user/student/course/java/variable.html')
elif cmd == '4':
return render_template('user/student/course/java/datatype.html')
@app.route('/dbms/<string:cmd>', methods=['GET', 'POST'])
def dbms(cmd):
if cmd=='0':
return render_template('user/student/course/java/index.html')
@app.route('/test/<string:code>', methods=['GET', 'POST'])
def test(code):
if request.method == 'POST':
ql = list(request.form.keys())
questions = Questions.query.filter(Questions.sno.in_(ql)).all()
print(questions)
ca = 0
wa = 0
ua = 0
for q in questions:
if q.correct == request.form.get(str(q.sno)):
ca = ca + 1
elif request.form.get(str(q.sno)) == 'nil':
ua = ua + 1
else:
wa = wa + 1
result = ""
total = 10 - ua - wa * .25
percent = total * 10
result = ""
grade = ""
attempted=ca+wa
total = ca-wa*.25
percent = total * 10
if percent >= 90:
grade = 'A'
elif percent >= 80:
grade = 'B'
elif percent >= 70:
grade = 'C'
elif percent >= 60:
grade = 'D'
elif percent >= 50:
grade = 'E'
else:
grade = 'F'
if grade == 'F':
result = 'FAIL'
else:
result = 'PASS'
data = Course.query.filter_by(course_code=code).first();
sql = Result(id=session['id'],attempted=attempted,total=total,correct=ca,wrong=wa,unanswered=ua,grade=grade,percent=percent,result=result,course_code=code,course_name=data.course_name)
db.session.add(sql)
db.session.commit()
li = [ca, wa, ua, total, percent, grade, result]
return render_template('user/student/result.html', li=li)
elif code=='OJ':
questions = Questions.query.filter_by(course=code).all()
questions = random.sample(questions, k=10)
return render_template('user/student/test.html',data=questions)
@app.route('/result/<string:cmd>,<string:sno>', methods=['GET', 'POST'])
def result(cmd,sno):
id=session['id']
code=''
course=''
total=''
percent=''
correct=''
wrong=''
res=''
grade=''
if cmd=='0':
data = Result.query.filter_by(id=id).all()
else:
data = Result.query.filter_by(sno=sno).first()
print(data)
code=data.course_code
course=data.course_name
correct=data.correct
wrong=data.wrong
res=data.result
grade=data.grade
total=data.total
li=[cmd,code,course,total,correct,wrong,res,grade]
return render_template('user/student/test_results.html',data=data,li=li)
@app.route('/video/<string:code>', methods=['GET', 'POST'])
def video(code):
session['code']=code
if code=='OJ':
return render_template('user/student/course/java/video.html')
elif code=='c':
return redirect ('c/0')
if __name__ == '__main__':
app.run(debug=True)
|
from django.urls import path
from .views import HomeView
from . import views
app_name = 'core'
urlpatterns = [
path('', HomeView, name='home'),
path('tutor/profile', views.TutorProfileView, name='tutorprofile'),
path('tutor/addlisting', views.AddListingView, name='addlisting'),
path('tutor/listings', views.ViewListingsView, name='viewlisting'),
path('tutor/deletelisting/<listingid>', views.DeleteListingView, name='deletelisting'),
path('tutor/editlisting/<listingid>', views.EditListingView, name='editlisting'),
path('tutor/classrequests/', views.TutorClassRequestView, name='studentclassrequests'),
path('student/profile', views.StudentProfileView, name='studentprofile'),
path('student/classrequests/', views.StudentClassRequestView, name='studentclassrequests'),
path('tutor/<tutorid>', views.TutorDetailView, name='tutordetail'),
]
|
import unittest
from katas.kyu_7.alternate_square_sum import alternate_sq_sum
class AlternateSquareSumTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(alternate_sq_sum([11, 12, 13, 14, 15]), 379)
def test_equals_2(self):
self.assertEqual(alternate_sq_sum([11, 5, 6, 11, 11, 8, 8, 13]), 415)
def test_equals_3(self):
self.assertEqual(alternate_sq_sum(
[5, 15, 9, 12, 13, 16, 13, 7, 5, 7, 7, 15, 8, 6, 13]), 1057)
def test_equals_4(self):
self.assertEqual(alternate_sq_sum([16, 10, 9, 12, 7, 11, 9, 8]), 470)
def test_equals_5(self):
self.assertEqual(alternate_sq_sum([5, 8, 15, 6, 12, 8, 10]), 206)
def test_equals_6(self):
self.assertEqual(alternate_sq_sum(
[7, 15, 16, 15, 16, 12, 5, 10, 6, 6, 13, 14]), 989)
def test_equals_7(self):
self.assertEqual(alternate_sq_sum(
[13, 10, 11, 15, 9, 6, 6, 14, 7, 5, 6, 13, 14]), 817)
def test_equals_8(self):
self.assertEqual(alternate_sq_sum(
[15, 10, 8, 10, 6, 7, 8, 8, 13, 14, 13, 8, 6]), 642)
def test_equals_9(self):
self.assertEqual(alternate_sq_sum(
[8, 5, 10, 13, 10, 6, 8, 5, 7, 9]), 379)
def test_equals_10(self):
self.assertEqual(alternate_sq_sum(
[11, 5, 13, 6, 8, 14, 7, 7, 15, 16, 7, 9, 9, 15, 11, 7]), 998)
def test_equals_11(self):
self.assertEqual(alternate_sq_sum([11, 8, 14, 5, 7, 15]), 346)
|
import numpy as np
import pandas as pd
import pytest
from prereise.gather.hydrodata.eia.helpers import scale_profile
def test_scale_profile_argument_type():
arg = ((pd.DataFrame(), [1] * 12), (pd.Series(dtype=np.float64), set([1] * 12)))
for a in arg:
with pytest.raises(TypeError):
scale_profile(a[0], a[1])
def test_scale_profile_argument_value():
arg = (
(
pd.Series(
[1] * 12, index=pd.date_range("2016-01-01", periods=12, freq="MS")
),
[1] * 12,
),
(
pd.Series(
[1] * 366 * 24,
index=pd.date_range("2016-01-01", periods=366 * 24, freq="H"),
),
[1] * 6,
),
)
for a in arg:
with pytest.raises(ValueError):
scale_profile(a[0], a[1])
def test_scale_profile():
arg = (
pd.Series(
[1] * 366 * 24,
index=pd.date_range("2016-01-01", periods=366 * 24, freq="H"),
),
list(range(1, 13)),
)
profile = scale_profile(arg[0], arg[1])
for i, j in zip(profile.resample("MS").sum().values, arg[1]):
assert round(i) == j
|
from common.run_method import RunMethod
import allure
@allure.step("极数据/查询课时费")
def classFeeRating_queryClassFeeRating_get(params=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/查询课时费"
url = f"/service-statistics-metrics/classFeeRating/queryClassFeeRating"
res = RunMethod.run_request("GET", url, params=params, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/查询课时费评级")
def classFeeRating_queryRating_get(params=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/查询课时费评级"
url = f"/service-statistics-metrics/classFeeRating/queryRating"
res = RunMethod.run_request("GET", url, params=params, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/修改课时费评级")
def classFeeRating_updateRating_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/修改课时费评级"
url = f"/service-statistics-metrics/classFeeRating/updateRating"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/查询带班量和额外课时量详情")
def classFeeRating_queryClassAndDurationDetails_get(params=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/查询带班量和额外课时量详情"
url = f"/service-statistics-metrics/classFeeRating/queryClassAndDurationDetails"
res = RunMethod.run_request("GET", url, params=params, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极数据/验证用户密码")
def classFeeRating_authByPassWord_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极数据/验证用户密码"
url = f"/service-statistics-metrics/classFeeRating/authByPassWord"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
|
class Config:
def __init__(self):
self.image_size = (256, 256)
self.batch_size = 16
self.epochs = 3
self.classes = 31
self.base_model = 'vgg' # vgg or resnet
self.src_domain_name = 'amazon'
self.tgt_domain_name = 'dslr'
self.is_cuda = False
config = Config()
if __name__ == "__main__":
print(config.__dict__)
|
# ref : https://ai-inter1.com/python-stock_scraping/
# ref: stoop.com
""" toyota
https://stooq.com/q/d/?s=7203.jp&i=d&d1=20190601&d2=20200522&l=3
s=7203.jp:銘柄コード
d1=20190401:検索開始日付
d2=20190920:検索終了日付
l=3:ページ数
topix : https://stooq.com/t/?i=581
"""
import pandas as pd
import datetime as dt
import matplotlib.pyplot as plt
import sys
import glob
import csv
import os
def get_topix_30():
url = r'https://stooq.com/t/?i=581'
data = pd.read_html(url,header=0)
topix_30_list = data[1][5:-1]
topix_30_list.to_csv('STOOQ/'+dt.date.today().strftime('%Y%m%d')+'_topix30.txt')
return topix_30_list
def get_nikkei_225():
urlOrg=r'https://stooq.com/t/?i=589&v=0&l='
# nikkei_225 = []
url = urlOrg + str(1)
data = pd.read_html(url)
nikkei_225 = data[1][5:-1]
for i in range(2,4):
url = urlOrg + str(i)
data = pd.read_html(url)
nikkei_225 = nikkei_225.append(data[1][5:-1])
# if float(data[1]["No."][5:-1].tail(1)) == 1.0:
# break
print(nikkei_225.head())
print(nikkei_225.tail())
nikkei_225.to_csv('STOOQ/'+dt.date.today().strftime('%Y%m%d')+'_nikkei225.txt')
return nikkei_225
def get_japan_code():
urlOrg=r'https://stooq.com/t/?i=519&v=0&l='
url = urlOrg + str(1)
data = pd.read_html(url)
japan_code = data[1][5:-1]
for i in range(2,39):
url = urlOrg + str(i)
data = pd.read_html(url)
japan_code = japan_code.append(data[1][5:-1])
print(japan_code.head())
print(japan_code.tail())
japan_code.to_csv('STOOQ/'+dt.date.today().strftime('%Y%m%d')+'_nikkei225.txt')
return japan_code
def get_comp_stock_data(code,start=None,end=None):
if start is None:
today=dt.date.today()
last_year=str(int(today.year)-1)
month=str(today.month).rjust(2,'0')
day=str(today.day).rjust(2,'0')
start=last_year+month+day
end=today.strftime('%Y%m%d')
print(start)
print(end)
url_1 = r'https://stooq.com/q/d/?s='
url_2 = '.jp&i=d&d1={start}&d2={end}&l='.format(start=start,end=end)
url_3 = url_1 + str(code) + url_2
i=1
url = url_3 + str(i)
data = pd.read_html(url,header=0)
print(data[0].head(10))
print(data[0].tail())
df_stock = data[1][5:-1]
for i in range(2,12):
url = url_3 + str(i)
data = pd.read_html(url)
df_stock = df_stock.append(data[1][5:-1])
if float(data[1]["No."][5:-1].tail(1)) == 1.0:
break
# print(df_stock.head())
# print(df_stock.tail())
return df_stock,start,end
def get_current_stock_data(code):
url_1 = r'https://stooq.com/q/g/?s='
url_2 = '.jp'
url = url_1 + str(code) + url_2
data = pd.read_html(url,header=0)
# LAST PRICE
df_stock = data[1][4:5]
# OPEN PREV
df_stock = df_stock.append(data[1][6:8])
# min /max
df_stock = df_stock.append(data[1][11:12])
return df_stock
def create_current_stock_data_sql():
path = r'STOOQ/'
data = []
csv_files = glob.glob(path+'*_current_data.csv')
for csv_file in csv_files:
filename = os.path.basename(csv_file)
with open(csv_file,encoding='utf-8') as f:
ff = csv.reader(f)
rowdata = []
rowdata.append(filename.replace('_current_data.csv',''))
for row in ff:
if row[0] != '':
if row[0] == '4':
rowdata.append(row[2].replace('\\',''))
elif row[0] == '11':
spdata = row[2].split(' / ')
rowdata.append(spdata[0])
rowdata.append(spdata[1])
else:
rowdata.append(row[2])
# print(rowdata)
data.append(rowdata)
with open(r'STOOQ/ins.sql','w',encoding='utf-8') as f:
for data1 in data:
sql1 = "insert into latest_stock select '"
var = "','".join(data1)
sql2 = "' FROM DUAL;"
f.write(sql1+var+sql2+'\n')
def update_data(df_stock):
df_stock.dropna(subset=['Volume'],axis=0,inplace=True)
df_stock["Date2"] = [dt.datetime.strptime(i, "%d %b %Y") for i in df_stock["Date"]]
# indexの設定
df_stock.set_index("Date2",inplace=True)
df_stock.drop(['No.'],axis=1,inplace=True)
return df_stock
if __name__ == '__main__':
start=None
end=None
while True:
indata=input('数値で入力してください\n1:TOPIX30情報取得\n2:NIKKEI情報取得\n\
3:特定コード株価取得\n4:グラフ出力\n5:日本証券コード情報取得\n6:最新株価\n9:終了\n---> ')
if indata == '1':
topix_30=get_topix_30()
print(topix_30.loc[:,['Symbol','Name']])
# sys.exit()
elif indata == '2':
nikkei_225=get_nikkei_225()
print(nikkei_225.loc[:,['Symbol','Name']])
elif indata == '3':
code=input('コードを入力してください')
start=input('始点日を入力してください')
end=input('終点日を入力してください')
df_stock,start,end=get_comp_stock_data(code,start,end)
df_stock=update_data(df_stock)
df_stock.to_csv('STOOQ/{code}_{start}_{end}.csv'.format(code=code,start=start,end=end))
elif indata == '4':
# プロット 縦軸(指定)×横軸(インデックス?)
df_stock["Close"].astype(float).plot(title='Stock Price',grid=True)
plt.show()
elif indata == '5':
japancode=get_japan_code()
# print(japancode.loc[:,['Symbol','Name']])
print('finish')
elif indata == '6':
# code=input('コードを入力してください')
codes = ['3779','9423','4777','4316','4425','2338','2459','6090','3760','2164','7049','7836','3496','3991','2479','6578','3645','4124','3939','9424']
for code in codes:
res = get_current_stock_data(code)
print(res)
res.to_csv('STOOQ/'+code+'_current_data.csv')
elif indata == '9':
print('終了します')
sys.exit()
elif indata == '0':
create_current_stock_data_sql()
"""
カラム名変更
pd.rename(columns, 任意の引数)
列削除
pd.dropna(削除する列, axis = 1, その他任意の引数)
df_stockdrop(['No.'],axis=1,inplace=True)
文字列を日付に
datetime. strptime(文字列、日付の書式)
次にstrptime()を元に列Dateに格納されている日付を示した文字列を日付型に変換し、元のDataFrameに対して列Date2を追加して日付型の値を格納します。
列Dateに格納されている日付が、1 Apr 2019の書式ですので、strptime()の引数には、"%d %b %Y"を指定しています。
"""
|
"""Copyright 2008 Orbitz WorldWide
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
import logging
try:
from logging import NullHandler
except ImportError as ie: # py2.6
from logging import Handler
class NullHandler(Handler):
def emit(self, record):
pass
logging.EXCEPTION = 60
logging.addLevelName(logging.EXCEPTION,"EXCEPTION")
logging.CACHE = 70
logging.addLevelName(logging.CACHE,"CACHE")
# TO-DO: removed unused code
class GraphiteLogger:
def __init__(self):
self.infoLogger = self._config_logger('INFO',
level=logging.INFO,
)
self.exceptionLogger = self._config_logger('EXCEPTION',
)
self.cacheLogger = self._config_logger('CACHE',
)
@staticmethod
def _config_logger(name, level=None):
logger = logging.getLogger(name)
if level is not None:
logger.setLevel(level)
logger.addHandler(NullHandler())
return logger
def info(self,msg,*args,**kwargs):
return self.infoLogger.info(msg,*args,**kwargs)
def exception(self,msg="Exception Caught",**kwargs):
return self.exceptionLogger.exception(msg,**kwargs)
def cache(self,msg,*args,**kwargs):
return self.cacheLogger.log(logging.CACHE,msg,*args,**kwargs)
log = GraphiteLogger() # import-shared logger instance
|
source = open("test.txt", 'r')
w_0 = open("test_0.txt", "w")
w_1 = open("test_1.txt", "w")
for x in source:
if int(x.split(" ")[-1]) == 0:
w_0.write(x)
elif int(x.split(" ")[-1]) == 1:
w_1.write(x)
|
# Implementing Different Layers
# ---------------------------------------
#
# We will illustrate how to use different types
# of layers in TensorFlow
#
# The layers of interest are:
# (1) Convolutional Layer
# (2) Activation Layer
# (3) Max-Pool Layer
# (4) Fully Connected Layer
#
# We will generate two different data sets for this
# script, a 1-D data set (row of data) and
# a 2-D data set (similar to picture)
import tensorflow as tf
import matplotlib.pyplot as plt
import csv
import os
import random
import numpy as np
import random
from tensorflow.python.framework import ops
ops.reset_default_graph()
# ---------------------------------------------------|
# -------------------1D-data-------------------------|
# ---------------------------------------------------|
# Create graph session
ops.reset_default_graph()
sess = tf.Session()
# parameters for the run
data_size = 25
conv_size = 5
maxpool_size = 5
stride_size = 1
# ensure reproducibility
seed = 13
np.random.seed(seed)
tf.set_random_seed(seed)
# Generate 1D data
data_1d = np.random.normal(size=data_size)
# Placeholder
x_input_1d = tf.placeholder(dtype=tf.float32, shape=[data_size])
# --------Convolution--------
def conv_layer_1d(input_1d, my_filter, stride):
# TensorFlow's 'conv2d()' function only works with 4D arrays:
# [batch#, width, height, channels], we have 1 batch, and
# width = 1, but height = the length of the input, and 1 channel.
# So next we create the 4D array by inserting dimension 1's.
input_2d = tf.expand_dims(input_1d, 0)
input_3d = tf.expand_dims(input_2d, 0)
input_4d = tf.expand_dims(input_3d, 3)
# Perform convolution with stride = 1, if we wanted to increase the stride,
# to say '2', then strides=[1,1,2,1]
convolution_output = tf.nn.conv2d(input_4d, filter=my_filter, strides=[1, 1, stride, 1], padding="VALID")
# Get rid of extra dimensions
conv_output_1d = tf.squeeze(convolution_output)
return (conv_output_1d)
# Create filter for convolution.
my_filter = tf.Variable(tf.random_normal(shape=[1, conv_size, 1, 1]))
# Create convolution layer
my_convolution_output = conv_layer_1d(x_input_1d, my_filter, stride=stride_size)
# --------Activation--------
def activation(input_1d):
return (tf.nn.relu(input_1d))
# Create activation layer
my_activation_output = activation(my_convolution_output)
# --------Max Pool--------
def max_pool(input_1d, width, stride):
# Just like 'conv2d()' above, max_pool() works with 4D arrays.
# [batch_size=1, width=1, height=num_input, channels=1]
input_2d = tf.expand_dims(input_1d, 0)
input_3d = tf.expand_dims(input_2d, 0)
input_4d = tf.expand_dims(input_3d, 3)
# Perform the max pooling with strides = [1,1,1,1]
# If we wanted to increase the stride on our data dimension, say by
# a factor of '2', we put strides = [1, 1, 2, 1]
# We will also need to specify the width of the max-window ('width')
pool_output = tf.nn.max_pool(input_4d, ksize=[1, 1, width, 1],
strides=[1, 1, stride, 1],
padding='VALID')
# Get rid of extra dimensions
pool_output_1d = tf.squeeze(pool_output)
return (pool_output_1d)
my_maxpool_output = max_pool(my_activation_output, width=maxpool_size, stride=stride_size)
# --------Fully Connected--------
def fully_connected(input_layer, num_outputs):
# First we find the needed shape of the multiplication weight matrix:
# The dimension will be (length of input) by (num_outputs)
weight_shape = tf.squeeze(tf.stack([tf.shape(input_layer), [num_outputs]]))
# Initialize such weight
weight = tf.random_normal(weight_shape, stddev=0.1)
# Initialize the bias
bias = tf.random_normal(shape=[num_outputs])
# Make the 1D input array into a 2D array for matrix multiplication
input_layer_2d = tf.expand_dims(input_layer, 0)
# Perform the matrix multiplication and add the bias
full_output = tf.add(tf.matmul(input_layer_2d, weight), bias)
# Get rid of extra dimensions
full_output_1d = tf.squeeze(full_output)
return (full_output_1d)
my_full_output = fully_connected(my_maxpool_output, 5)
# Run graph
# Initialize Variables
init = tf.global_variables_initializer()
sess.run(init)
feed_dict = {x_input_1d: data_1d}
print('>>>> 1D Data <<<<')
# Convolution Output
print('Input = array of length %d'%(x_input_1d.shape.as_list()[0]))
print('Convolution w/ filter, length = %d, stride size = %d, results in an array of length %d:'%
(conv_size, stride_size, my_convolution_output.shape.as_list()[0]))
print(sess.run(my_convolution_output, feed_dict=feed_dict))
# Activation Output
print('\nInput = above array of length %d'%(my_convolution_output.shape.as_list()[0]))
print('ReLU element wise returns an array of length %d:'%(my_activation_output.shape.as_list()[0]))
print(sess.run(my_activation_output, feed_dict=feed_dict))
# Max Pool Output
print('\nInput = above array of length %d'%(my_activation_output.shape.as_list()[0]))
print('MaxPool, window length = %d, stride size = %d, results in the array of length %d'%
(maxpool_size, stride_size, my_maxpool_output.shape.as_list()[0]))
print(sess.run(my_maxpool_output, feed_dict=feed_dict))
# Fully Connected Output
print('\nInput = above array of length %d'%(my_maxpool_output.shape.as_list()[0]))
print('Fully connected layer on all 4 rows with %d outputs:'%
(my_full_output.shape.as_list()[0]))
print(sess.run(my_full_output, feed_dict=feed_dict))
# ---------------------------------------------------|
# -------------------2D-data-------------------------|
# ---------------------------------------------------|
# Reset Graph
ops.reset_default_graph()
sess = tf.Session()
# parameters for the run
row_size = 10
col_size = 10
conv_size = 2
conv_stride_size = 2
maxpool_size = 2
maxpool_stride_size = 1
# ensure reproducibility
seed = 13
np.random.seed(seed)
tf.set_random_seed(seed)
# Generate 2D data
data_size = [row_size, col_size]
data_2d = np.random.normal(size=data_size)
# --------Placeholder--------
x_input_2d = tf.placeholder(dtype=tf.float32, shape=data_size)
# Convolution
def conv_layer_2d(input_2d, my_filter, stride_size):
# TensorFlow's 'conv2d()' function only works with 4D arrays:
# [batch#, width, height, channels], we have 1 batch, and
# 1 channel, but we do have width AND height this time.
# So next we create the 4D array by inserting dimension 1's.
input_3d = tf.expand_dims(input_2d, 0)
input_4d = tf.expand_dims(input_3d, 3)
# Note the stride difference below!
convolution_output = tf.nn.conv2d(input_4d, filter=my_filter,
strides=[1, stride_size, stride_size, 1], padding="VALID")
# Get rid of unnecessary dimensions
conv_output_2d = tf.squeeze(convolution_output)
return (conv_output_2d)
# Create Convolutional Filter
my_filter = tf.Variable(tf.random_normal(shape=[conv_size, conv_size, 1, 1]))
# Create Convolutional Layer
my_convolution_output = conv_layer_2d(x_input_2d, my_filter, stride_size=conv_stride_size)
# --------Activation--------
def activation(input_1d):
return (tf.nn.relu(input_1d))
# Create Activation Layer
my_activation_output = activation(my_convolution_output)
# --------Max Pool--------
def max_pool(input_2d, width, height, stride):
# Just like 'conv2d()' above, max_pool() works with 4D arrays.
# [batch_size=1, width=given, height=given, channels=1]
input_3d = tf.expand_dims(input_2d, 0)
input_4d = tf.expand_dims(input_3d, 3)
# Perform the max pooling with strides = [1,1,1,1]
# If we wanted to increase the stride on our data dimension, say by
# a factor of '2', we put strides = [1, 2, 2, 1]
pool_output = tf.nn.max_pool(input_4d, ksize=[1, height, width, 1],
strides=[1, stride, stride, 1],
padding='VALID')
# Get rid of unnecessary dimensions
pool_output_2d = tf.squeeze(pool_output)
return (pool_output_2d)
# Create Max-Pool Layer
my_maxpool_output = max_pool(my_activation_output,
width=maxpool_size, height=maxpool_size, stride=maxpool_stride_size)
# --------Fully Connected--------
def fully_connected(input_layer, num_outputs):
# In order to connect our whole W byH 2d array, we first flatten it out to
# a W times H 1D array.
flat_input = tf.reshape(input_layer, [-1])
# We then find out how long it is, and create an array for the shape of
# the multiplication weight = (WxH) by (num_outputs)
weight_shape = tf.squeeze(tf.stack([tf.shape(flat_input), [num_outputs]]))
# Initialize the weight
weight = tf.random_normal(weight_shape, stddev=0.1)
# Initialize the bias
bias = tf.random_normal(shape=[num_outputs])
# Now make the flat 1D array into a 2D array for multiplication
input_2d = tf.expand_dims(flat_input, 0)
# Multiply and add the bias
full_output = tf.add(tf.matmul(input_2d, weight), bias)
# Get rid of extra dimension
full_output_2d = tf.squeeze(full_output)
return (full_output_2d)
# Create Fully Connected Layer
my_full_output = fully_connected(my_maxpool_output, 5)
# Run graph
# Initialize Variables
init = tf.global_variables_initializer()
sess.run(init)
feed_dict = {x_input_2d: data_2d}
print('\n>>>> 2D Data <<<<')
# Convolution Output
print('Input = %s array'%(x_input_2d.shape.as_list()))
print('%s Convolution, stride size = [%d, %d] , results in the %s array'%
(my_filter.get_shape().as_list()[:2], conv_stride_size, conv_stride_size, my_convolution_output.shape.as_list()))
print(sess.run(my_convolution_output, feed_dict=feed_dict))
# Activation Output
print('\nInput = the above %s array'%(my_convolution_output.shape.as_list()))
print('ReLU element wise returns the %s array'%(my_activation_output.shape.as_list()))
print(sess.run(my_activation_output, feed_dict=feed_dict))
# Max Pool Output
print('\nInput = the above %s array'%(my_activation_output.shape.as_list()))
print('MaxPool, stride size = [%d, %d], results in %s array'%
(maxpool_stride_size, maxpool_stride_size, my_maxpool_output.shape.as_list()))
print(sess.run(my_maxpool_output, feed_dict=feed_dict))
# Fully Connected Output
print('\nInput = the above %s array'%(my_maxpool_output.shape.as_list()))
print('Fully connected layer on all %d rows results in %s outputs:'%
(my_maxpool_output.shape.as_list()[0], my_full_output.shape.as_list()[0]))
print(sess.run(my_full_output, feed_dict=feed_dict))
|
from django.db.models import Count
from django.shortcuts import get_object_or_404, render
from .models import Article, ArticleStatuses, NewsCategory
def home(request, category_id=None, slug=None):
category = None
if category_id:
category = get_object_or_404(NewsCategory, id=category_id)
if request.user.is_staff:
if category:
articles = Article.objects.filter(
status__in=(ArticleStatuses.published, ArticleStatuses.draft),
category=category,
).translated()
else:
articles = Article.objects.filter(
status__in=(ArticleStatuses.published, ArticleStatuses.draft)
).translated()
else:
if category:
articles = Article.objects.filter(
status=ArticleStatuses.published,
category=category,
).translated()
else:
articles = Article.objects.filter(
status=ArticleStatuses.published
).translated()
template = "news/home.html"
if request.headers.get("X-Is-Ajax"):
template = "news/includes/articles.html"
return render(
request,
template,
{
"articles": articles,
"current_category": category,
"categories": NewsCategory.objects.annotate(
article_count=Count("articles")
).filter(article_count__gt=0),
},
)
def details(request, id, slug):
if request.user.is_staff:
article = get_object_or_404(
Article,
id=id,
status__in=(ArticleStatuses.published, ArticleStatuses.draft),
)
else:
article = get_object_or_404(Article, id=id, status=ArticleStatuses.published)
if hasattr(request, "toolbar"):
request.toolbar.set_object(article)
return render(
request, "news/details.html", {"article": article, "meta": article.as_meta()}
)
|
# Generated by Django 3.0.5 on 2020-05-05 15:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('manageClasses', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='student',
name='afterSchool',
),
migrations.CreateModel(
name='AfterSchoolActivity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=250)),
('students', models.ManyToManyField(to='manageClasses.Student')),
('teachers', models.ManyToManyField(to='manageClasses.Teacher')),
],
),
]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 19 10:12:18 2018
@author: siva
"""
# creating dataset for voice acitivity detection
from extract_features import extract_features
import numpy as np
import os
from os.path import dirname, abspath, join
import scipy.io.wavfile as wav
import pandas as pd
import pickle
DATA_FOLDER = join(dirname(dirname(abspath(__file__))), 'data', 'testwav','0713')
WINDOW_LENGTH = 1
FRAME_LENGTH = 25
feature_name = "RMS,SE,ZCR,LEFR,SF,SF_std,SRF,SRF_std,SC,SC_std,BW,BW_std,NWPD,NWPD_std,RSE,RSE_std,type,name,number".split(
",")
features_dict = {feature: [] for feature in feature_name}
for root, dirs, files in os.walk(DATA_FOLDER):
for audio in files:
if "noise" in audio or "music" in audio or "speech" in audio or "audio" in audio:
print("****************************")
print("reading:", audio)
sampling_rate, sig = wav.read(join(root, audio))
print("sampling rate:", sampling_rate, "signal length", len(sig))
index = 0
number = 0
while index + (sampling_rate * WINDOW_LENGTH) < len(sig):
sample = sig[index:(index + (sampling_rate * WINDOW_LENGTH))]
ef = extract_features(sample, FRAME_LENGTH, sampling_rate)
rms, se, zcr, lefr, sf, srf, sc, bd, nwpd, rse = ef.return_()
features_dict["RMS"].append(rms)
features_dict["SE"].append(se)
features_dict["ZCR"].append(zcr)
features_dict["LEFR"].append(lefr)
features_dict["SF"].append(np.mean(sf))
features_dict["SF_std"].append(np.std(sf))
features_dict["SC"].append(np.mean(sc))
features_dict["SC_std"].append(np.std(sc))
features_dict["SRF"].append(np.mean(srf))
features_dict["SRF_std"].append(np.std(srf))
features_dict["BW"].append(np.mean(bd))
features_dict["BW_std"].append(np.std(bd))
features_dict["NWPD"].append(np.mean(nwpd))
features_dict["NWPD_std"].append(np.std(nwpd))
features_dict["RSE"].append(np.mean(rse))
features_dict["RSE_std"].append(np.std(rse))
features_dict["type"].append(audio.split("-")[0])
features_dict["name"].append(audio)
features_dict["number"].append(number)
number += 1
index += sampling_rate * WINDOW_LENGTH
features_df = pd.DataFrame.from_dict(features_dict)
features_df = features_df[feature_name]
with open(join(DATA_FOLDER, "features_df_1s.pickle"), "wb") as file:
pickle.dump(features_df, file)
|
class Person:
def __init__(self,firstname,lastname):
self.firstname = firstname
self.lastname = lastname
new_person = Person("Vivek","Khimani")
new_person.firstname = "Vivko"
print(new_person.firstname)
|
import os
from sv2.helpers import run_checkers
summary = "Check if coredumps are enabled"
class CoreDump:
def __init__(self, report):
self._report = report
def core_dump_enabled(self):
if os.popen("ulimit -c").read() != "0\n":
self._report.new_issue(
"It's recomended to disable core dumps to avoid information leakeage")
def run(report, opts):
c = CoreDump(report)
run_checkers(c, opts)
def makes_sense(report):
return True
|
# Which starting number, under one million, produces the longest chain?
# Comments Section:
# - Straight forward algorithm using a cache
def collatz(x):
if x % 2 == 0:
return x/2
if x==1:
return 1
else:
return 1 + 3*x
def problem14():
cache = {}
maxn = 0
maxc = 0
for i in range(2,1000001):
number = i
counter = 0
while i != 1:
if i in cache:
counter += cache[i]
break
else:
i = collatz(i)
counter += 1
cache[number] = counter
if counter > maxc:
maxc = counter
maxn = number
return ("The biggest number is " + str(maxn) + " with a chain of " + str(maxc))
|
"""Treadmill exceptions and utility functions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import six
_LOGGER = logging.getLogger(__name__)
class TreadmillError(Exception):
"""Base class for all Treadmill errors.
"""
__slots__ = (
)
@property
def message(self):
"""The :class:`~TreadmillError`'s message.
"""
return self.args[0]
def __init__(self, msg):
super(TreadmillError, self).__init__(six.text_type(msg))
def __str__(self):
return self.message
class InvalidInputError(TreadmillError):
"""Non-fatal error, indicating incorrect input."""
__slots__ = (
'source',
)
def __init__(self, source, msg):
super(InvalidInputError, self).__init__(msg=msg)
self.source = source
class ContainerSetupError(TreadmillError):
"""Fatal error, indicating problem setting up container environment."""
__slots__ = (
'reason',
)
def __init__(self, msg, reason=None):
super(ContainerSetupError, self).__init__(msg=msg)
if reason is None:
self.reason = 'unknown'
else:
self.reason = reason
class NodeSetupError(TreadmillError):
"""Fatal error, indicating problem initializing the node environment"""
__slots__ = ()
class LocalFileNotFoundError(TreadmillError):
"""Thrown if the file cannot be found on the host."""
__slots__ = ()
class NotFoundError(TreadmillError):
"""Thrown in REST API when a resource is not found"""
__slots__ = ()
class FoundError(TreadmillError):
"""Thrown in REST API when a resource is found"""
__slots__ = ()
class QuotaExceededError(TreadmillError):
"""Thrown if quota is exceeded."""
__slots__ = ()
|
assignments = []
rows = 'ABCDEFGHI'
cols = '123456789'
def cross(a, b):
'''
Cross product of elements in A and elements in B.
Args:
a(list) - a list with 'ABCDEFGHI'
b(list) - a list with '123456789'
Returns:
(list) - a list consists of 'A1', 'A2', ..., 'I9'.
'''
return [s+t for s in a for t in b]
boxes = cross(rows, cols)
row_units = [cross(r, cols) for r in rows]
column_units = [cross(rows, c) for c in cols]
square_units = [cross(rs, cs) for rs in ('ABC','DEF','GHI') for cs in ('123','456','789')]
diagonal_units = [['A1','B2','C3','D4','E5','F6','G7','H8','I9'],['A9','B8','C7','D6','E5','F4','G3','H2','I1']]
unitlist = row_units + column_units + square_units + diagonal_units
units = dict((s, [u for u in unitlist if s in u]) for s in boxes)
peers = dict((s, set(sum(units[s],[]))-set([s])) for s in boxes)
def assign_value(values, box, value):
"""
replace the string in the dictionary values[box] with value
Args:
values(dict) - a dictionary of the form {'box_name': '123456789', ...}
box(string) - a key of the dict pointing to the box that needs to be changed
value(string) - a string that the values of values[box] should change into
Returns:
values(dict) - a dict with one of its box's value changed
"""
values[box] = value
if len(value) == 1:
assignments.append(values.copy())
return values
def naked_twins(values):
"""Eliminate values using the naked twins strategy.
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Returns:
the values dictionary with the naked twins eliminated from peers.
"""
# Find all instances of naked twins
# Eliminate the naked twins as possibilities for their peers
twin_numbers = '' # a string that stores the two values of naked_twins boxes
twin_box = '' # a copy of the key of a box which has exactly two values
twin_box2 = '' # a copy of the key of a box which has the same values as the box found before
same_unit = [] # a copy of the two boxes' same unit
for box in values: # in that loop, go through all boxes in the soduku
same_unit = [] # for each loop, reset the value of same_unit
if len(values[box]) == 2: # if found a box with two values, record its key and value
twin_numbers = values[box]
twin_box = box
for box in peers[twin_box]: # search the box's peers to see if there are another box that has the same values, if so record the key
if values[box] == twin_numbers:
twin_box2 = box
# search for the two boxes' same unit, then copy the unit to a new list
for i in range(len(row_units)): # search row_units
found = False
if box in row_units[i] and twin_box in row_units[i]:
same_unit = row_units[i]
found = True
if not found:
for i in range(len(column_units)): # search column_units
if box in column_units[i] and twin_box in column_units[i]:
same_unit = column_units[i]
found = True
if not found:
for i in range(len(diagonal_units)): # search diagonal_units
if box in diagonal_units[i] and twin_box in diagonal_units[i]:
same_unit = diagonal_units[i]
found = True
if not found:
for i in range(len(square_units)): # search square_units
if box in square_units[i] and twin_box in square_units[i]:
same_unit = square_units[i]
break # break the second loop and start eliminating
if same_unit: # make sure if there are two boxes with the same two values and their same unit had been correctly copied
for i in same_unit: # for every box in the same unit except twins, remove the twin's two values
if i == twin_box or i == twin_box2: # skip the two boxes
continue
assign_value(values, i, values[i].replace(twin_numbers[0], '')) # remove the first of the two values
assign_value(values, i, values[i].replace(twin_numbers[1],'')) # remove the second of the two values
return values
def grid_values(grid):
"""
Convert grid into a dict of {square: char} with '123456789' for empties.
Args:
grid(string) - A grid in string form.
Returns:
A grid in dictionary form
Keys: The boxes, e.g., 'A1'
Values: The value in each box, e.g., ' 8'. If the box has no value, then the value will be '123456789'.
"""
chars = []
digits = '123456789'
for c in grid:
if c in digits:
chars.append(c)
if c == '.':
chars.append(digits)
assert len(chars) == 81
return dict(zip(boxes, chars))
def display(values):
"""
Display the values as a 2-D grid.
Args:
values(dict): The sudoku in dictionary form
"""
width = 1+max(len(values[s]) for s in boxes)
line = '+'.join(['-'*(width*3)]*3)
for r in rows:
print(''.join(values[r+c].center(width)+('|' if c in '36' else '')
for c in cols))
if r in 'CF': print(line)
print
def eliminate(values):
'''
for any box with a single number in the values dictionary, remove that number in all of its peers boxes
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Return:
values(dict): an updated dictionary
'''
solved_values = [box for box in values.keys() if len(values[box]) == 1]
for box in solved_values:
digit = values[box]
for peer in peers[box]:
assign_value(values, peer, values[peer].replace(digit,''))
return values
def only_choice(values):
'''
If there is number only in a box but not its peer boxes and that box contains more than one number, replace its value with the number
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Return:
values(dict): an updated dictionary
'''
for unit in unitlist:
for digit in '123456789':
dplaces = [box for box in unit if digit in values[box]]
if len(dplaces) == 1:
assign_value(values, dplaces[0], digit)
return values
def reduce_puzzle(values):
'''
constantly call eliminate and only_choice until the values dictionary stops changing
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Return:
values(dict): an updated dictionary
'''
solved_values = [box for box in values.keys() if len(values[box]) == 1]
stalled = False
while not stalled:
solved_values_before = len([box for box in values.keys() if len(values[box]) == 1])
values = eliminate(values)
values = only_choice(values)
values = naked_twins(values)
solved_values_after = len([box for box in values.keys() if len(values[box]) == 1])
stalled = solved_values_before == solved_values_after
if len([box for box in values.keys() if len(values[box]) == 0]):
return False
return values
def search(values):
'''
Pick a box with a minimal number of possible values. Try to solve each of the puzzles obtained by choosing each of these values, recursively
Args:
values(dict): a dictionary of the form {'box_name': '123456789', ...}
Return:
values(dict): an updated dictionary
'''
values = reduce_puzzle(values)
if values is False:
return False ## Failed earlier
if all(len(values[s]) == 1 for s in boxes):
return values ## Solved!
# Choose one of the unfilled squares with the fewest possibilities
n,s = min((len(values[s]), s) for s in boxes if len(values[s]) > 1)
# Now use recurrence to solve each one of the resulting sudokus, and
for value in values[s]:
new_sudoku = values.copy()
#new_sudoku[s] = value
assign_value(new_sudoku, s, value)
attempt = search(new_sudoku)
if attempt:
return attempt
def solve(grid):
"""
Find the solution to a Sudoku grid.
Args:
grid(string): a string representing a sudoku grid.
Example: '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
Returns:
The dictionary representation of the final sudoku grid. False if no solution exists.
"""
values = grid_values(grid)
result = search(values)
if result:
return result
else: return False
if __name__ == '__main__':
diag_sudoku_grid = '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3'
display(solve(diag_sudoku_grid))
try:
from visualize import visualize_assignments
visualize_assignments(assignments)
except SystemExit:
pass
except:
print('We could not visualize your board due to a pygame issue. Not a problem! It is not a requirement.')
|
#!/usr/bin/env python3
import re
from blist import blist
with open('input.txt') as f:
inp = list(map(int, re.findall(r'\d+', f.read())))
circle = blist([0])
num = 1
score = [0] * inp[0]
curIndex = 0
while num <= inp[1]:
if num % 23 == 0:
curIndex = (curIndex - 7 + len(circle)) % len(circle)
plIndex = (num - 1) % len(score)
points = num + circle.pop(curIndex)
score[plIndex] += points
else:
curIndex = (curIndex + 2) % len(circle)
circle.insert(curIndex, num)
num += 1
print ('Day 9 result:', max(score))
|
# encoding: utf-8
from tastypie.paginator import *
|
x = 12
print (type(x))
|
# Generated by Django 2.2.10 on 2020-03-04 16:09
import datetime
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0008_auto_20200304_0453'),
]
operations = [
migrations.AlterField(
model_name='account',
name='dob',
field=models.DateField(default=datetime.datetime(2020, 3, 4, 16, 9, 29, 333692)),
),
migrations.AlterField(
model_name='account',
name='user',
field=models.ForeignKey(on_delete='CASCADE', to=settings.AUTH_USER_MODEL),
),
]
|
import numpy as np
import networkx as nx
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torchvision
from torch.autograd import Variable
from torch.utils import data
#graphs = generate_graphs(num_of_nodes, num_of_graphs)
graphs = np.load("graphs_updated.npy")
labels = np.load("labels.npy")
num_of_graphs = graphs.shape[0]
num_of_nodes = graphs.shape[1]
def compress_graphs(graphs):
"""Convert 3d matrix of graphs, dims = (graphs,verts,verts),
to 2d matrix dims = (n_graphs, (verts choose 2) - verts).
We can omit half of the data as the matrix of each graph is
symmetric as we are working with undirected graphs. We can
also not take any entries on the main diagonal as they will
always be zero as we do not allow self loops.
Args:
graphs : (n_graphs,verts,verts) hypermatrix of graphs.
Returns:
graphs_comp : (n_graphs, (vert choose 2) - verts) matrix
of compressed adjancey matrices.
"""
verts = graphs.shape[1]
graphs_comp = graphs.T[np.triu_indices(verts, 1)].T
return graphs_comp
# compress 2d adjacency matrix to 1d array, prepare input
compressed_graph = np.zeros((graphs.shape[0], 190))
for i in range(0,graphs.shape[0]):
compressed_graph[i] = compress_graphs(graphs[i])
graphs_train, graphs_test, train_labels, val_labels =\
train_test_split(compressed_graph, labels, test_size=0.20, random_state=42)
batch_size = 64
params = {'batch_size': batch_size,
'shuffle': True,
'num_workers': 6}
max_epochs = 100
class Dataset(data.Dataset):
'Characterizes a dataset for PyTorch'
def __init__(self, list_IDs, labels):
'Initialization'
self.labels = labels
self.list_IDs = list_IDs
def __len__(self):
'Denotes the total number of samples'
return num_of_graphs
def __getitem__(self, index):
'Generates one sample of data'
# Select sample
ID = index
# Load data and get label
X = Variable(torch.from_numpy(compressed_graph[ID]))
y = self.labels[ID]
return X, y
# Generators
training_set = Dataset(graphs_train, labels)
training_generator = data.DataLoader(training_set, **params)
validation_set = Dataset(graphs_test, labels)
validation_generator = data.DataLoader(validation_set, **params)
class CompressMatrixNetwork(torch.nn.Module):
def __init__(self):
super().__init__()
self.fc1 = nn.Linear(190, 380)
self.fc3 = nn.Linear(380, 85)
self.fc4 = nn.Linear(85, 40)
self.fc5 = nn.Linear(85, 40)
self.fc6 = nn.Linear(40, num_of_nodes)
self.fc7 = nn.Linear(40, num_of_nodes)
def forward(self, x):
x = F.relu(self.fc1(x))
x = F.relu(self.fc3(x))
x1 = F.relu(self.fc4(x))
x1 = self.fc6(x1)
x2 = F.relu(self.fc5(x))
x2 = self.fc7(x2)
return F.softmax(x1, dim=1), F.softmax(x2, dim=1)
model = CompressMatrixNetwork()
for module in model.children():
module.reset_parameters()
if torch.cuda.is_available():
model.cuda()
optimizer = torch.optim.SGD(model.parameters(), lr=0.001)
optimizer.zero_grad()
info = []
# Loop over epochs
for epoch in range(max_epochs):
epoch_train_acc, correct_count = 0, 0
epoch_val_acc = 0
epoch_train_loss = 0
epoch_val_loss=0
# Training
for local_batch, local_labels in training_generator:
# Transfer to GPU
if torch.cuda.is_available():
local_batch, local_labels = local_batch.cuda(), local_labels.cuda()
model.train()
label_batch = local_labels.long()
input_batch = local_batch.float()
output_batch = model(input_batch)
train_loss = F.cross_entropy(output_batch[0], label_batch[:,0]) \
+ F.cross_entropy(output_batch[1], label_batch[:,1])
epoch_train_loss += train_loss
pred1 = output_batch[0].data.max(1)[1]
pred2 = output_batch[1].data.max(1)[1]
matches = (label_batch[:,0] == pred1) & (label_batch[:,1] == pred2)
epoch_train_acc += matches.float().mean()
correct_count += matches.sum()
optimizer.zero_grad()
train_loss.backward()
optimizer.step()
epoch_train_loss = epoch_train_loss / batch_size
epoch_train_acc = epoch_train_acc / batch_size
# Validation
with torch.no_grad():
for local_batch, local_labels in validation_generator:
# Transfer to GPU
local_batch, local_labels = local_batch.cuda(), local_labels.cuda()
model.eval()
label_batch = local_labels.long()
input_batch = local_batch.float()
output_batch = model(input_batch)
val_loss = F.cross_entropy(output_batch[0], label_batch[:,0]) \
+ F.cross_entropy(output_batch[1], label_batch[:,1])
epoch_val_loss += val_loss
pred1 = output_batch[0].data.max(1)[1]
pred2 = output_batch[1].data.max(1)[1]
matches = (label_batch[:,0] == pred1) & (label_batch[:,1] == pred2)
epoch_val_acc += matches.float().mean()
epoch_val_loss = epoch_val_loss / batch_size
epoch_val_acc = epoch_val_acc / batch_size
info.append([epoch, epoch_train_loss, epoch_val_loss, epoch_train_acc, epoch_val_acc])
print('epoch {:5d}: Obtained a training acc of {:.3f}.'.format(epoch, epoch_train_acc))
print('epoch {:5d}: Obtained a validation acc of {:.3f}.'.format(epoch, epoch_val_acc))
|
"""
Write a python lambda expression for calculating simple interest.
If simple interest is greater than 1000, display as “Platinum Member”, otherwise “Gold Member”.
Use the below formula to calculate the simple interest.
simple_interest=(principal_amount*duration in years*rate_of_interest)/100
Test your code by using the given sample inputs.
Verify your code by using the 2nd sample input(highlighted) given below:
+-------------------------+-----------------+
| Sample Input | Expected Output |
+-------------------------+-----------------+
| principal_amount = 2000 | |
| duration = 2 | Gold Member |
| rate_of_interest = 10 | |
+-------------------------+-----------------+
| principal_amount = 4000 | |
| duration = 12 | |
| rate_of_interest = 13 | |
+-------------------------+-----------------+
"""
#PF-Exer-39
#This verification is based on string match.
principal_amount=4000
duration=12
rate_of_interest=13
simple_interest =lambda x,y,z:(x*y*z)/100
if(simple_interest(principal_amount,duration,rate_of_interest)>1000):
print("Platinum Member")
else:
print("Gold Member")
|
import unittest
from katas.kyu_8.add_more_item_to_list import AddExtra
class AddExtraTestCase(unittest.TestCase):
def test_something(self):
self.assertEqual(len(AddExtra([1, 2])), 3)
def test_equals_2(self):
self.assertEqual(len(AddExtra([])), 1)
|
#multidimensional list
import random
import math
multiDlist = [[0] * 10 for i in range(10)]
multiDlist[0][1] = 10
print(multiDlist[1][1])
|
import os
import datetime, re
from flask import Flask, render_template, redirect, request
from flask_sqlalchemy import SQLAlchemy
project_dir = os.path.dirname(os.path.abspath(__file__))
database_file = "sqlite:///{}".format(os.path.join(project_dir, "contactdatabase.db"))
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = database_file
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
###################### Database - Table ##############################
def slugify(s):
return re.sub('[^\w]+', '-', s).lower()
class Contact(db.Model):
id = db.Column(db.Integer, primary_key=True)
fname = db.Column(db.String(100))
lname = db.Column(db.String(100))
age = db.Column(db.Integer)
email = db.Column(db.String(100))
phone = db.Column(db.String(100))
description = db.Column(db.Text)
created_timestamp = db.Column(db.DateTime, default=datetime.datetime.now)
modified_timestamp = db.Column(
db.DateTime,
default=datetime.datetime.now,
onupdate=datetime.datetime.now)
def __repr__(self):
return '<Contact: %s>' % self.id
###################### Routes & Views ##############################
@app.route('/', methods = ["GET", "POST"])
def index():
if request.form:
try:
contact = Contact(fname=request.form.get("fname"), lname=request.form.get("lname"), age=request.form.get("age"), phone=request.form.get("phone"), email=request.form.get("email"))
db.session.add(contact)
db.session.commit()
except Exception as e:
print("Failed to add book")
print(e)
contacts = Contact.query.all()
return render_template("index.html", contacts=contacts)
@app.route('/edit')
def edit():
return '<h1> Edit page is working </h1>'
@app.route('/show')
def show():
return '<h1> Show page is working </h1>'
if __name__ == "__main__":
app.run(debug = True)
|
# Empty dictionary
dic = {}
# user_1 = input('Enter your name :')
user_1 = 'Ankit'
# user_2 = input('Enter your name :')
user_2 = 'Anna'
# user_3 = input('Enter your name :')
user_3 = 'Ankita'
# user_4 = input('Enter your name :')
user_4 = 'Anmol'
# user_lang_1 = input('Enter your Favourite Programing Language :')
user_lang_1 = 'Python'
user_lang_2 = 'C++'
user_lang_3 = 'Java'
user_lang_4 = 'PHP'
# Update
dic[user_1] = user_lang_1
dic[user_2] = user_lang_2
dic[user_3] = user_lang_3
dic[user_4] = user_lang_4
print(dic)
# dic[user_4] = input('Type the Value: ')
dic.update(Tako = 'Tila')
print(dic)
|
import os
from multiprocessing import Process, Queue
from sklearn.neighbors import NearestNeighbors
import pandas as pd
import seaborn as sns
os.putenv('CODA_DEFINITION', '/home/mmueller/hiwi/aeolus/')
import coda
from numpy import vstack, zeros
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
matplotlib.use("Agg")
import pyproj
import geopandas as gpd
from shapely.ops import nearest_points
from shapely.geometry import LineString
import sys, os
from radarlidar_analysis.RadarLidarWindSpeed import RadarLidarWindSpeed
from datetime import datetime, time, timedelta
import tarfile
import math
from sklearn.neighbors import KDTree
import xarray as xr
def create_gdf(df, x='lat', y='lon'):
return gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df[y], df[x]), crs={'init':'EPSG:4326'})
def readToGDF(product,target, measurementDatetime):
if target == "rayleigh":
latitude = coda.fetch(product, 'rayleigh_geolocation', -1, 'windresult_geolocation/latitude_cog')
longitude = coda.fetch(product, 'rayleigh_geolocation', -1, 'windresult_geolocation/longitude_cog')
altitude = coda.fetch(product, 'rayleigh_geolocation', -1, 'windresult_geolocation/altitude_vcog')
Velocity = coda.fetch(product, 'rayleigh_hloswind', -1, 'windresult/rayleigh_wind_velocity')
error = coda.fetch(product, 'rayleigh_wind_prod_conf_data', -1, 'rayleigh_wind_qc/hlos_error_estimate')
Validity = coda.fetch(product, 'rayleigh_hloswind', -1, 'windresult/validity_flag')
resultId = coda.fetch(product, 'rayleigh_profile', -1, 'l2b_wind_profiles/wind_result_id_number')
time = coda.fetch(product, 'rayleigh_profile', -1, 'Start_of_Obs_DateTime')
orbit = coda.fetch(product, 'rayleigh_geolocation', -1, 'windresult_geolocation/altitude_vcog')
azimuth = coda.fetch(product, 'rayleigh_geolocation', -1, 'windresult_geolocation/los_azimuth')
elif target == 'mie':
latitude = coda.fetch(product, 'mie_geolocation', -1, 'windresult_geolocation/latitude_cog')
longitude = coda.fetch(product, 'mie_geolocation', -1, 'windresult_geolocation/longitude_cog')
altitude = coda.fetch(product, 'mie_geolocation', -1, 'windresult_geolocation/altitude_vcog')
Velocity = coda.fetch(product, 'mie_hloswind', -1, 'windresult/mie_wind_velocity')
error = coda.fetch(product, 'mie_wind_prod_conf_data', -1, 'Mie_Wind_QC/hlos_error_estimate')
Validity = coda.fetch(product, 'mie_hloswind', -1, 'windresult/validity_flag')
resultId = coda.fetch(product, 'mie_profile', -1, 'l2b_wind_profiles/wind_result_id_number')
time = coda.fetch(product, 'mie_profile', -1, 'Start_of_Obs_DateTime')
orbit = coda.fetch(product, 'mie_geolocation', -1, 'windresult_geolocation/altitude_vcog')
azimuth = coda.fetch(product, 'mie_geolocation', -1, 'windresult_geolocation/los_azimuth')
Velocity = Velocity*0.01
error = error*0.01
df = pd.DataFrame(data={
'measurementDatetime': measurementDatetime,
'alt': altitude,
'lat': latitude,
'lon': longitude,
'speed': Velocity,
'azimuth': azimuth,
'validity': Validity,
'error': error
})
gdf = create_gdf(df)
#gdf = gdf[gdf.lat != 0.0]
#print(gdf.validity)
#gdf = gdf[gdf.validity == 1.0]
return gdf
def joyceNN(gdf):
points = np.transpose(np.array([gdf.lat.to_list(), gdf.lon.to_list()]))
tree = KDTree(points)
joyce = np.array([[50.90, 6.41]])
nearest_ind = tree.query_radius(joyce, r=0.5)#3082
#nearest_ind = tree.query_radius(joyce, r=50)#3058
gdf = gdf.iloc[nearest_ind[0].tolist()]
return(gdf)
def getMeasurementTime(filename):
day = filename[25:27]
month = filename[23:25]
year = filename[19:23]
date = datetime(int(year), int(month), int(day))
measurementDatetime = (date).replace(hour=5, minute=30, second=0, microsecond=0)#3082
#measurementDatetime = (date).replace(hour=17, minute=20, second=0, microsecond=0)#3058
return(measurementDatetime)
def getObservationData(measurementDatetime, aolusHlosAngle):
end = measurementDatetime.replace(hour=23, minute=59, second=0, microsecond=0)
begin = measurementDatetime.replace(hour=0, minute=0, second=0, microsecond=0)
analysis = RadarLidarWindSpeed(begin, end)
analysis.importDataset()
analysis.calculateSpeedFusion()
analysis.calculateDirectionFusion()
analysis.dataframe.reset_index(level=0, inplace=True)
analysis.dataframe.reset_index(level=0, inplace=True)
time_begin = measurementDatetime.strftime("%H")+":00"
time_end = measurementDatetime.strftime("%H")+":00"
analysis.dataframe = analysis.dataframe.set_index('time')
resultAnalysis = analysis.dataframe.between_time(time_begin, time_end) # dringend noch aendern/automatisieren
#resultAnalysis = analysis.dataframe.loc[analysis.dataframe.time == measurementDatetime]
alt_observation = resultAnalysis.height.to_list()
speed_observation = resultAnalysis.speedFusion.to_list()
direction = resultAnalysis.directionFusion.to_list()
speed_joyce_hlos = []
for i in range(len(direction)):
difference = aolusHlosAngle-direction[i]
rad = math.radians(difference)
speed = np.absolute(speed_observation[i])*math.cos(rad)
speed_joyce_hlos.append(speed)
df = pd.DataFrame(data={
'speed': speed_joyce_hlos,
'alt': alt_observation
})
return(df)
def getICONdata(dt, aolusHlosAngle):
filename = "meteogram.iglo.h."+dt.strftime("%Y%m%d")+"00.nc"
path = "/data/mod/icon_op/iglo/site/"+dt.strftime("%Y/%m/")+filename
ds = xr.open_dataset(path)
nStation = ds.station_name.values.tolist().index(b'Juelich')
nU = ds.var_name.values.tolist().index(b'U')
nV = ds.var_name.values.tolist().index(b'V')
height = ds.sel(nstations=nStation, nvars = [nU],nsfcvars=[],time=4)['heights'].values.flatten().tolist()
u = ds.sel(nstations=nStation, nvars = [nU],nsfcvars=[],time=4)['values'].values.flatten().tolist()
v = ds.sel(nstations=nStation, nvars = [nV],nsfcvars=[],time=4)['values'].values.flatten().tolist()
speed_icon = []
direction = []
for i in range(len(u)):
speed_icon.append(np.sqrt(u[i]**2 + v[i]**2))
direction.append(math.degrees(np.arctan2(v[i], u[i])))
speed_icon_hlos = []
for i in range(len(direction)):
difference = aolusHlosAngle - direction[i]
rad = math.radians(difference)
speed = speed_icon[i]*math.cos(rad)
speed_icon_hlos.append(speed)
df = pd.DataFrame(data={
'speed': speed_icon_hlos,
'alt': height
})
return(df)
def readFile(path, list):
for filename in list:
filename = filename[:-4]
print(filename)
tf = tarfile.open(path+filename+".TGZ", "r:gz")
tf.extractall(path)
tf.close()
sys.path.append(path)
try:
#get Data
measurementDatetime = getMeasurementTime(filename)
print(path+filename+".DBL")
product = coda.open(path+filename+".DBL")
rayleighGdf = readToGDF(product,'rayleigh',measurementDatetime)
mieGdf = readToGDF(product,'mie',measurementDatetime)
rayleighGdf = rayleighGdf.loc[rayleighGdf.validity == 1.0]
#rayleighGdf = rayleighGdf.loc[rayleighGdf.speed < 50]
#rayleighGdf = rayleighGdf.loc[rayleighGdf.speed > -50]
#rayleighGdf = rayleighGdf.loc[rayleighGdf.error < 7.0]
mieGdf = mieGdf.loc[mieGdf.validity == 1.0]
#mieGdf = mieGdf.loc[mieGdf.speed < 50]
#mieGdf = mieGdf.loc[mieGdf.speed > -50]
#mieGdf = mieGdf.loc[mieGdf.error < 5.0]
os.remove(path+filename+".DBL")
os.remove(path+filename+".HDR")
rayleighGdf = joyceNN(rayleighGdf)
mieGdf = joyceNN(mieGdf)
aeolus_hlos_angle = rayleighGdf.azimuth.mean()
observationDf = getObservationData(measurementDatetime,aeolus_hlos_angle )
#Plot
fig = plt.figure(figsize=(20,10))
plt.title("AEOLUS: Wind-Speed "+measurementDatetime.strftime("%Y-%m-%d"))
ax = plt.axes()
sns.scatterplot(x = 'speed', y = 'alt', data = rayleighGdf,ax=ax, label="Aeolus Rayleigh")
sns.scatterplot(x = 'speed', y = 'alt', data = mieGdf,ax=ax, label="Aeolus Mie")
sns.scatterplot(x = 'speed', y = 'alt', data = observationDf,ax=ax, label="JOYCE")
ax.set_xlabel("horizontal windspeed [m/s]")
ax.set_ylabel("height AGL [m]")
ax.legend()
#plt.xlim([-50, 50])
filename=path+'plots/'+measurementDatetime.strftime("%Y-%m-%d")+'.png'
plt.savefig(filename,dpi=150)
plt.show()
plt.close()
except Exception as e:
print("- error -")
print(e)
def runParallel(path):
# get all files
path = '/work/marcus_mueller/aeolus/3058/'
fileList = os.listdir(path)
tasks = []
tasks.append(fileList[::4])
tasks.append(fileList[1::4])
tasks.append(fileList[2::4])
tasks.append(fileList[3::4])
#start multiprocessing
queue = Queue()
processes = [Process(target=readFile, args=([path, list])) for list in tasks]
for p in processes:
p.start()
for p in processes:
p.join()
def runSinle(path, filename):
fileList = [filename]
readFile(path, fileList)
if __name__ == "__main__":
runSinle('/work/marcus_mueller/aeolus/3082/', 'AE_OPER_ALD_U_N_2B_20201215T054247_20201215T071323_0001.TGZ')
#runParallel('/work/marcus_mueller/aeolus/3058/')
|
# Generated by Django 3.2.7 on 2021-09-15 04:05
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='about',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=80)),
('sub_title', models.TextField(max_length=150)),
('description', models.TextField(max_length=1000)),
],
),
migrations.CreateModel(
name='contact',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254)),
('phone', models.CharField(max_length=13)),
('subject', models.TextField(max_length=80)),
('content', models.TextField(max_length=900)),
('timestamp', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='information',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('location', models.CharField(max_length=80)),
('email', models.EmailField(max_length=30)),
('email2', models.EmailField(max_length=30)),
('call', models.CharField(max_length=15)),
('call2', models.CharField(max_length=15)),
],
),
]
|
#!/usr/bin/env python
#from functions import print_f, fun
from sys import argv
import re
import numpy as np
from molecules import Cluster
def run_argparse():
import argparse
A1 = argparse.ArgumentParser()
A1.add_argument('-d', action="store_true", default=False)
A1.add_argument('-qm', dest='qm',
default= 'qm.xyz',
help = 'file to read qm region, default [qm.xyz]')
A1.add_argument('-mm', dest='mm',
default= 'mm.xyz',
help = 'file to read mm region, default [mm.xyz]')
A1.add_argument('-outfile',
default= "new_mm",
help = "File to write the new mm_region to, default new_mmX.pro, X is cutoff")
A1.add_argument('-region_cutoff', dest='region_cutoff',
type = float,
default= 5.0,
help = 'Cutoff for which atoms to include in qm/mm border, default 15 A')
A1.add_argument('-cutoff', dest='cutoff',
type = float,
default= 1.2,
help = 'Cutoff for which atoms to remove from qm/mm border, default 1.2 A')
A1.add_argument('-xyz_files', action = 'store_true',
default = False )
A1.add_argument('-groups', action = 'store_true',
default = False )
A1.add_argument('-waters', type = float)
##############How many columns to read from mm_region depends on these numbers
A1.add_argument('-angular', type = str,
choices = ['0', '1', '2', '3'],
default = '0',
help = 'multipole moment number, default 0')
A1.add_argument('-polar', type = str,
choices = ['0', '1', '2'],
default = '1',
help = 'polarizability number, default 1')
A1.add_argument('-hyper', type = str,
choices = ['0', '1', '2'],
default = '1',
help = 'polarizability number, default 1')
A1.add_argument('--charge', type = str,
default = '0',
choices = ['0', '1'],)
A1.add_argument('--angular', type = str,
choices = ['0', '1', '2', '3'],
default = '0',
help = 'multipole moment number, default 0')
A1.add_argument('--polar', type = str,
choices = ['0', '1', '2'],
default = '0')
args = A1.parse_args( argv[1:])
return args
class Props(object):
def __init__(self):
"""
class that holds loprop properties, update later for full compatibility
Right now only point charge, point polarizability
"""
charge = 0.0
angular = 0.0
polar = 0.0
def __str__(self):
return " ".join(map( str , [self.charge, self.polar ] ))
class Atom:
def __init__(self, line, args, qm = False):
"""
Input is a line from an atom entry that has a label, x, y, z, and optional
charge, dipoles, and polarizabilites
args holds definitions of quantum numbers
-------------------------------------------------------------
label is a string of which atom it is, unique:
>>> atype
'63H-V-HA'
x, y, z are floats represtenting coordinates
>>> x
1.0
props is a class with mm float point properties such as charge, multipole etc.
>>> props.Charge
-0.213
>>> props.Polar
8.213
CloseAtoms is a list of atoms that are close to current atom.
at first is empty but will be filled later on by class Cluster.
"""
self.CloseAtoms = []
self.Props = Props()
l = int(real_l(args.angular))
a = int(real_a(args.polar))
line = line.split()
#Exit if given q, l and a don't agree with supplied MM file
if (4 + l + a) != len(line):
print "Wrong supplied combination of --charge, --angular and --polar"
if qm:
return
#If q = l = a = 0, read only xyz
n = False
#This is xyz
if ( l+a ) == 0:
self.label = line[0]
self.x, self.y, self.z = map(float, [line[1], line[2], line[3]])
#This is the charges
if l == 0:
self.label = line[0]
self.x, self.y, self.z = map(float, [line[1], line[2], line[3]])
self.Props.Charge = float( line[4] )
#This is charges and polarizabilities
if a == 0:
self.label = line[0]
self.x, self.y, self.z = map(float, [line[1], line[2], line[3]])
self.Props.Charge = float( line[4] )
self.Props.Polar = float( line[5] )
#
##Charges and multipoles
# elif (a) == 0:
# n = Atom( atom = i[0],
# xyz = [i[1], i[2], i[3]],
# charge = i[4],
# angular = i[5:5+l+1]
# )
##All included
# else:
# n = Atom( atom = i[0],
# xyz = [i[1], i[2], i[3]],
# charge = i[4],
# angular = i[5:5+l+1],
# polar = i[5+l+1:5+l+a+1]
# )
try:
self.atype = re.compile(r'^\d+([A-Z]{1})').match( line[0] ).group(1)
except AttributeError:
self.atype = line[0]
def __eq__(self, other):
if (self.x == other.x) and (self.y == other.y) and (self.z == other.z):
return True
else:
return False
def __str__(self):
return self.label +' '+ str(self.x) + str(self.y) + str(self.z) +' '+str(self.Props.Charge) + ' CloseAtoms: %s'%" ".join(map(lambda x: \
x.label, self.CloseAtoms))
def Same(self,other):
"""
Returns true if coordinates of self and other Matches
Threshhold of 0.1 Angstrom
"""
if self.Dist(other) < 0.1:
return True
else:
return False
def HasCloseAtom(self,other):
"""
returns true if self and other are CloseAtoms.
"""
if other in self.CloseAtoms:
return True
else:
return False
def AddCloseAtom(self, other):
"""
Check if neighbour is not the same
"""
if self != other:
self.CloseAtoms.append(other)
def RemoveCloseAtom(self, other):
"""
Check if neighbour is there, then remove
"""
if other in self.CloseAtoms:
self.CloseAtoms.remove(other)
def Dist(self,other):
r=np.sqrt((self.x - other.x)**2 +
(self.y - other.y )**2 +
(self.z - other.z )**2 )
return r
class Cluster:
atom_index = {'H':0, 'C':1, 'N':2, 'O':3, 'S':4}
Dist_matrix = [
[ 0.0, 1.2 , 1.2 , 1.2, 1.2 ],
[ 1.2, 1.6 , 1.75, 1.7, 1.7 ],
[ 1.2, 1.75, 0.0 , 1.7, 2.5 ],
[ 1.2, 1.7 , 1.7 , 0.0, 2.5 ],
[ 1.2, 1.7 , 2.5 , 2.5, 0.0 ]]
def __init__(self, label):
self.label = label
self.Atomlist = []
def __len__(self):
return len(self.Atomlist)
def __getitem__(self, val):
return self.Atomlist[val]
def AddAtom(self, atom):
"""
If atom doesn't exist, then add it
"""
if atom in self:
return
self.Atomlist.append( atom )
def ExistsAtom(self, atom):
"""
Search Atomlist for atom, if it exists, return True
"""
for i in self.Atomlist:
if i.Same(atom):
return True
return False
def UpdateCloseAtoms(self, MM_C ):
for i in self.Atomlist:
for j in self.Atomlist:
if j in i.CloseAtoms:
continue
r = self.Dist_matrix[ self.atom_index[ i.atype ]][ self.atom_index[ j.atype ]]
if i.Dist(j) < r and j in MM_C:
i.AddCloseAtom(j)
def RemoveAtom(self, atom, args):
"""
Removes the Atom that has coordinates Matching x, y and z,
but before removing, passes on it's properties evenly to
CloseAtoms, and removes itself from their neighbour lists.
If this is connected to a hydrogen, first take hydrogen properties to itself, and remove the hydrogen, that way, group properties are concerted isntead of atom properties
"""
#Check if Atom exists in Atomlist
if not self.ExistsAtom( atom ):
return
if len( atom.CloseAtoms) == 0:
self.Atomlist.remove(atom)
return
try:
q = atom.Props.Charge / len(atom.CloseAtoms)
a = atom.Props.Polar / len(atom.CloseAtoms)
except ZeroDivisionError:
print "ERROR Tried to remove atom with no CloseAtoms, try increasing region cutoff!"
exit(10)
if args.groups:
dic = {}
d = []
for i in atom.CloseAtoms:
r = atom.Dist(i)
dic[r] = i
d.append( r )
keep = dic[ max(d) ]
atom.RemoveCloseAtom( keep )
tmp = []
for i in atom.CloseAtoms:
atom.Props.Charge += i.Props.Charge
atom.Props.Polar += i.Props.Polar
tmp.append( i )
for i in tmp:
self.Atomlist.remove( i )
atom.RemoveCloseAtom( i )
keep.Props.Charge += atom.Props.Charge
keep.Props.Polar += atom.Props.Polar
self.Atomlist.remove( atom )
else:
tmp = []
for i in atom.CloseAtoms:
i.Props.Charge += q
i.Props.Polar += a
tmp.append(i)
for i in tmp:
atom.RemoveCloseAtom( i )
self.Atomlist.remove( atom )
def real_l(l):
"""
Returns number of angular momentum numbers to read depending on the number
>>>real_l(2) %Quadrupoles
3
"""
l = str(l)
if l == '0':
return '0'
elif l == '1':
return '3'
elif l == '2':
return '6'
elif l == '3':
return '9'
print 'BUG discovered, angular momentum number is out of bound'
raise ValueError
def real_a(a):
"""
Returns number of polarizability numbers to read depending on the number
>>>real_a(1) %isotropic
1
"""
a = str(a)
if a == '0':
return '0'
elif a == '1':
return '1'
elif a == '2':
return '6'
print 'BUG discovered, polarizability number is out of bound'
raise ValueError
def main():
"""
Main function for given input of MM and QM file, along with cutoff threshhold,
reorders the MM file to only include mm sites close to QM within a given distance.
Purpose is to study the automatic variation of the QM size given a fixed pdb
crystal structure. For covalently bonded systems in QM/MM.
"""
#Read options from terminal, list of options available with graph -h
args = run_argparse()
#Define a cluster class for QM, MM and QMMM regions
QM_C = Cluster( "QM" )
MM_C = Cluster( "MM")
QMMM_C = Cluster( "QMMM")
#Search pattern that matches a coordinate entry that has an arbitrary label for atom
pat = re.compile(r'^\s*\w+\S*\s+-*\d+\.\d*\s+-*\d+\.\d*\s+-*\d+\.\d*')
#Store all atoms from QM region
for i in open(args.qm).readlines():
if pat.match(i):
QM_C.AddAtom( Atom( i, args, qm= True ))
#Store all atoms in mm region that do not have identical coordinates to any atom in the QM region
for i in open(args.mm).readlines():
if pat.match(i):
#Add all atoms to MM region by default, unless if its water and water cutoff
if args.waters:
if i.split()[0].split('-')[1] == 'T3':
if QM_C.Atomlist[7].Dist( Atom( i, args) ) > args.waters:
continue
mm_atom = Atom( i, args)
MM_C.AddAtom( mm_atom )
for j in QM_C:
r = j.Dist( mm_atom )
if r < 0.1:
#Remove MM atoms without invoking property transfer methods for obvious QM atoms
MM_C.Atomlist.remove( mm_atom )
if r < args.region_cutoff:
#Atoms in a region within bond capping region placed in QM/MM region
QMMM_C.AddAtom( mm_atom )
print "Done storing MM and QMMM clusters"
print "QM atoms: %d\nMM atoms: %d\nQMMM atoms: %d\n" %(len(QM_C), len(MM_C), len(QMMM_C))
print "---------------------------\nUpdating close atoms in QMMM:"
QMMM_C.UpdateCloseAtoms( MM_C )
print "Done updating"
print "Information on QMMM cluster:"
for i in QMMM_C:
if i.label == "62C-X1-C":
print i
#Now time for elimination of atoms which are poisonous.
#Calculate distance between each atom in the boarder region and remove it
#if it is too cloose to qm as defined by cutoff
#print_f.pre_elimination(qm_region, qm_mm_region, args.cutoff)
print "---------------------------\nBegin elimination\n\n"
for i in QMMM_C:
for j in QM_C:
r = i.Dist(j)
if r < float(args.cutoff):
#If this atom still exists in mm region, it should be removed, and its properties transfered defiend by args implemented in RemoveAtom method
MM_C.RemoveAtom( i, args )
continue
print "-----------------------------\nPost elimination:"
print "Lenght of MM: %d\nlength of QMMM: %d" %(len(MM_C), len(QMMM_C))
f = open(args.outfile + "_" + str(args.cutoff) + '.pro', 'w')
f.write('AA\n')
f.write('%d %d %d %d\n'%tuple([len(MM_C),int(args.angular),int(args.charge),len(args.polar) ] ))
for i in MM_C:
f.write("{0:10s} {1:10f} {2:10f} {3:10f} {4:10f} {5:10f}\n".format( i.label, i.x, i.y, i.z, i.Props.Charge, i.Props.Polar))
f.close()
pat = re.compile(r'(^\d+)')
f = open(args.outfile + "_" + str(args.cutoff) + '.pot', 'w')
f.write('AA\n')
f.write('%d %d %d %d\n'%tuple([len(MM_C),int(args.angular),int(args.charge),len(args.polar) ] ))
for i in MM_C:
f.write("{0:10s} {1:10f} {2:10f} {3:10f} {4:10f} {5:10f}\n".format( pat.match( i.label).group(1) , i.x, i.y, i.z, i.Props.Charge, i.Props.Polar))
f.close()
if args.xyz_files:
f = open( args.outfile + "_QM_MM.xyz", 'w')
f.write( str(len( QM_C) + len(MM_C)) + "\n\n" )
for i in QM_C:
f.write("{0:15s}{1:10f}{2:10f}{3:10f}\n".format( i.atype, i.x, i.y, i.z))
for i in MM_C:
f.write("{0:15s}{1:10f}{2:10f}{3:10f}\n".format( i.atype, i.x, i.y, i.z))
f.close()
f = open( args.outfile + "_QM_QMMM.xyz", 'w')
f.write( str(len( QMMM_C)+ len(QM_C)) + "\n\n" )
for i in QMMM_C:
f.write("{0:15s}{1:10f}{2:10f}{3:10f}\n".format( i.atype, i.x, i.y, i.z))
for i in QM_C:
f.write("{0:15s}{1:10f}{2:10f}{3:10f}\n".format( i.atype, i.x, i.y, i.z))
f.close()
if __name__ == '__main__':
main()
|
import discord
import asyncio
from discord.ext.commands import Bot
from discord.ext import commands
import logging
logging.basicConfig(level=logging.INFO) # Helps with debugging issues
Client = discord.Client()
client = commands.Bot(command_prefix="@") #put command prefix in these quotes
@client.event # Prints conformation message
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------') # Connection conformation
users = {}
userid = {}
handles = []
@client.event # Bot command
async def on_message(message):
if message.content.startswith("@connect"): #Put whatever you want to have to type to trigger the command
user = message.author
args = message.content.split(" ")
if len(args[1]) < 50:
handle = args[1]
else:
handle = "default"
await client.send_message(user, "Username too long. Connected as default.")
await client.send_message(message.author,"You have connected to the server using handle " + handle)
try:
for i in handles:
handles.remove(userid[user])
except:
pass
users[handle] = user
userid[user] = handle
handles.append(handle)
print(handles)
if message.content.startswith("@say"):
args = message.content.split(" ")
args.pop(0)
author = message.author
h = userid[author]
for i in handles:
await client.send_message(users[i], h + " : " + " ".join(args))
if message.content.startswith("@disconnect"):
auth = message.author
ho = userid[auth]
users[ho] = ""
userid[auth] = ""
handles.remove(ho)
await client.send_message(auth, "You have now disconnected.")
if message.content.startswith("@online"):
auth = message.author
for i in handles:
await client.send_message(auth, i)
client.run("") # Put bot token in here
|
list1 = ['1', '2', '3', '4', '5']
str2 = ".".join(list1)
print(str2)
|
#Written by Roy Talman 16/8/2021
# for more support contact roytalman@gmail.com
from __future__ import unicode_literals
import numpy as np
import pandas as pd
import os
import youtube_dl
from glob import glob
import librosa
import matplotlib.pyplot as plt
import pickle
import sys
CurrentFolder = os.getcwd()
# Finel relevant segment folder
Audio_segment_Folder = CurrentFolder + '/Audio_Segment_hard/'
# labels destination folder:
LabelDest = CurrentFolder + '/Audio_Segment_hard_labels/'
# name of desired classes
Classes = sys.argv[1:]
print("List of classes to download:")
print(sys.argv[1:])
# output data sample rate
Out_SR = 20000
# maximum recordings to down load
MaxRecNum = 100 # maximum number of records
if os.path.exists(Audio_segment_Folder) == 0:
os.mkdir(Audio_segment_Folder)
if os.path.exists(LabelDest) == 0:
os.mkdir(LabelDest)
if not os.path.exists(os.getcwd()+'/mid_to_display_name.csv'):
tsv_file='mid_to_display_name.tsv'
csv_table=pd.read_table(tsv_file,sep='\t')
csv_table.to_csv('mid_to_display_name.csv',index=False)
tsv_file='audioset_train_strong.tsv'
csv_table=pd.read_table(tsv_file,sep='\t')
csv_table.to_csv('audioset_train_strong.csv',index=False)
# read data csv
TrainSet = pd.read_csv('./audioset_train_strong.csv', error_bad_lines=False)
Names = list(TrainSet[list(TrainSet.columns)[0]])
# set youtube read settings
ydl_opts = {'forcetitl|e': True,
'format': 'bestaudio/best',
'postprocessors': [{
'key': 'FFmpegExtractAudio',
'preferredcodec': 'mp3',
'preferredquality': '192',
}],
}
ColumnsList = list(TrainSet.columns)
# names of classes and their code-names dictionary:
Names = pd.read_csv('./mid_to_display_name.csv',header=None)
#p
DictClass = {}
DictClassInv = {}
for i,j in zip(Names[0],Names[1]):
DictClass[i]= j
DictClassInv[j] = i
OldRecName = ' '
ClassesColumnsList = list(TrainSet.columns)
TrainSetList = list(TrainSet)
Id_List = []
IndRec = 0
for recName in list(set(TrainSet[ColumnsList[0]])):
RecIndex = [i for i, x in enumerate(TrainSet[ColumnsList[0]]) if x == recName]
ClassMapKey = list(TrainSet[ColumnsList[3]][RecIndex])
ClassNameRec = [DictClass[i] for i in list(ClassMapKey)]
# dowbload only if one of the words in desired classes is in current rec
if len(set(ClassNameRec) & set(Classes))> 0 :
recName1 = '_'.join(recName.split('_')[:-1])
StartTime = int(recName.split('_')[-1]) / 1000
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
#try:
ydl.download(['http://www.youtube.com/watch?v=' + recName1])
Files = glob(CurrentFolder + '/*.mp3')
y, sr = librosa.load(CurrentFolder + '/' + Files[0].split('/')[-1], sr=Out_SR)
Y_seg = y[int(StartTime * sr):int((StartTime + 10) * sr)]
librosa.output.write_wav(Audio_segment_Folder + Files[0].split('/')[-1], Y_seg, sr)
os.remove(CurrentFolder + '/' + Files[0].split('/')[-1])
Id_List.append(i)
IndRec += 1
# save label:
DictOut = {}
for i, className in enumerate(ClassNameRec):
if className in DictOut.keys():
DictOut[className].append(TrainSet[ColumnsList[1]][RecIndex[i]])
DictOut[className].append(TrainSet[ColumnsList[2]][RecIndex[i]])
else:
DictOut[className] = [TrainSet[ColumnsList[1]][RecIndex[i]], TrainSet[ColumnsList[2]][RecIndex[i]]]
with open(LabelDest + Files[0].split('/')[-1] + '.pickle', 'wb') as handle:
pickle.dump(DictOut, handle, protocol=pickle.HIGHEST_PROTOCOL)
print("Good recordings downloaded: {}".format(IndRec))
#except:
# print('Bad File')
OldRecName = recName
|
def selectionsort(arr):
len_arr = len(arr)
for i in range(len_arr-1):
min_index=i
for j in range(i+1,len_arr):
if arr[min_index]>arr[j]:
min_index = j
arr[i],arr[min_index] = arr[min_index],arr[i]
arr= [72,50,10,44,8,20,100]
selectionsort(arr)
print(arr)
|
#!/usr/bin/env python
import time
from math import atan, degrees, sqrt
import numpy as np
from copy import copy
# in mm
WHEEL_RADIUS = 40
ROBOT_RADIUS = 120
ROBOT_CIRCUMFERENCE = 2 * np.pi * ROBOT_RADIUS
# for 360 degrees
DISTANCE_PER_FLIP = 0.2 * np.pi * WHEEL_RADIUS
HALL_SENSOR_FLIPS = ROBOT_CIRCUMFERENCE / DISTANCE_PER_FLIP
# number of degrees the robot turns when hall sensor flips once
TURN_ANGLE_PER_FLIP = 360 / HALL_SENSOR_FLIPS
world_size = 100.0
_landmarks = [[50.0, 50.0], [world_size, world_size], [0.0, world_size], [world_size, 0.0]]
def _gaussian(mu, sigma, x):
# Given a x position, mean mu and std sigma of Gaussian, calculates the probability
# Note
# mu: estimated distance by each particle's position, (map and _landmarks are known)
# x: measured distance by the robot
return np.exp(- ((mu - x) ** 2) / (sigma ** 2) / 2.0) / np.sqrt(2.0 * np.pi * (sigma ** 2))
def evaluation(robot, particles):
# gives the mean error
sum = 0.0;
for p in particles: # calculate mean error
dx = (p.x - robot.x + (world_size/2.0)) % world_size - (world_size/2.0)
dy = (p.y - robot.y + (world_size/2.0)) % world_size - (world_size/2.0)
err = np.sqrt(dx * dx + dy * dy)
sum += err
return sum / float(len(particles))
class _Robot(object):
def __init__(self): # variable
# random.random() returns random number in [0.0, 1.0)
self.x = np.random.rand() * world_size # first random position
self.y = np.random.rand() * world_size
self.orientation = np.random.rand() * 2.0 * np.pi
self.forward_noise = 0.0
self.turn_noise = 0.0
self.sense_noise = 0.0
def set(self, new_x, new_y, new_orientation): # set the orginal position and orientation
# if new_x < 0 or new_x >= world_size:
# raise ValueError('X coordinate out of bound')
# if new_y < 0 or new_y >= world_size:
# raise ValueError('Y coordinate out of bound')
# if new_orientation < 0 or new_orientation >= 2 * np.pi:
# raise ValueError('Orientation must be in [0..2pi]')
self.x = float(new_x)
self.y = float(new_y)
self.orientation = float(new_orientation)
def set_noise(self, new_f_noise, new_t_noise, new_s_noise):
# possibility to set noise parameters
# this is often useful in particle filters
self.forward_noise = float(new_f_noise)
self.turn_noise = float(new_t_noise)
self.sense_noise = float(new_s_noise)
def sense(self):
Z = []
for i in range(len(_landmarks)):
# given its position x, y, measure the distance to each landmark
dist = np.sqrt((self.x - _landmarks[i][0]) ** 2 + (self.y - _landmarks[i][1]) ** 2)
# superimpose a _gaussian noise on ideal measurement
dist += np.random.randn() * self.sense_noise
Z.append(dist)
return Z
def move(self, turn, forward):
if forward < 0:
raise ValueError('Robot cant move backwards')
# turn, and add Gaussian noise to the turning command
orientation = self.orientation + float(turn) + np.random.randn() * self.turn_noise
orientation %= 2 * np.pi # make sure: 0=< orientation <=2*pi
# move, and add Gaussian noise to the motion command
dist = float(forward) + np.random.randn() * self.forward_noise
x = self.x + (np.cos(orientation) * dist)
y = self.y + (np.sin(orientation) * dist)
x %= world_size # make sure: 0=< position <= world_size
y %= world_size
# set the new location x, y back to the member variables x y of the class
self.set(x, y, orientation)
return None
def measurement_prob(self, measurement):
# calculates how likely a measurement should be
prob = 1.0;
for i in range(len(_landmarks)):
# calculate probability of being in a location given a measurement
dist = np.sqrt((self.x - _landmarks[i][0]) ** 2 + (self.y - _landmarks[i][1]) ** 2)
prob *= _gaussian(dist, self.sense_noise, measurement[i])
# the final probability is the multiplication of each measurement probability
return prob
def __repr__(self):
# return a printable pose of the robot
return '[x=%.5s y=%.5s orient=%.5s]' % (str(self.x), str(self.y), str(self.orientation))
def _rouletteWheelSampling(particles, weights):
index = range(len(particles))
new_particles = []
for x in np.random.choice(index, replace=True, p=weights, size=len(particles)):
new_particles.append(copy(particles[x]))
return new_particles
def _resampling(particles, w):
N = len(particles)
beta = 0
index = 0
w_max = max(w)
p_temp = []
for i in range(N):
beta = beta + 2.0 * w_max * np.random.rand() # if add same number, it is a circle
while beta > w[index]:
beta = beta - w[index]
index = (index + 1) % N
selectedParticle = copy(particles[index]) # if changes, change them all so have to copy
p_temp.append(selectedParticle) # if beta<w[index], this indexed particle is selected
return p_temp
def _stochasticUniversalSampling(particles, weights):
N = len(particles)
beta = np.random.rand() / N
index = 0
p_temp = []
for i in range(N):
beta += 1.0 / N
while beta > weights[index]:
beta = beta - weights[index]
index = (index + 1) % N
selectedParticle = copy(particles[index])
p_temp.append(selectedParticle) # if beta<w[index], this indexed particle is selected
return p_temp
class _ParticleFilterStateEstimator(object):
def __init__(self, noParticles): # noParticles == object?
self.particles = []
for _ in range(noParticles):
# create a list of particles with random initial state, and noise levels 0.1, 0.1, 5.0
particle = _Robot()
particle.set_noise(0.1, 0.1, 5.0)
self.particles.append(particle) # robot is a class, how to append
def actionUpdate(self, action):
# insert code here
for p in self.particles:
p.move(*action)
def measurementUpdate(self, measurement):
# given each particle's position, what is the probability of being at that position given Z measurement
weights = []
for p in self.particles:
weight = p.measurement_prob(measurement)
weights.append(weight)
# weight w is proportional to p(z|x)= p(z|x1)*p(z|x2)*....
# quiz: normalize w (total probability), enter code below
weights = np.array(weights)
weights = weights / np.sum(weights)
# quiz: apply resampling based on importance sampling below
self.particles = _stochasticUniversalSampling(self.particles, weights)
class Toddler:
__version = '2018a'
def __init__(self, IO):
print('[Toddler] I am toddler {} of group 13'.format(Toddler.__version))
self.camera = IO.camera.initCamera('pi', 'low')
self.getInputs = IO.interface_kit.getInputs
self.getSensors = IO.interface_kit.getSensors
self.mc = IO.motor_control
self.sc = IO.servo_control
# motor board ports
self._left_wheel_port = 3
self._right_wheel_port = 4
self._light_bulb_port = 5
# interface board ports
# sensors
self._left_light_sensor_port = 2
self._right_light_sensor_port = 1
self._sonar_sensor_port = 0
self._ir_left_sensor_port = 4
self._ir_right_sensor_port = 5
# inputs
# self._left_whisker_port = 1
# self._right_whisker_port = 0
self._hall_sensor_port = 7
self._over_poi = False
self._pointing_to_satellite = False
# fixed on arena
self._sat_x = (4.25 + 0.10) * 100 * 10
self._sat_y = (2.15 + 0.36) * 100 * 10
self._sat_height = 2.95 * 100 * 10
self._robot_height = 20 * 10
self.sc.engage()
self.sc.setPosition(0)
self.robot = _Robot()
self.estimator = _ParticleFilterStateEstimator(1500)
self._exploring = False
self._steps = 0
self._hall_value = self.getSensors()[self._hall_sensor_port]
self._step_length = 0.2 * np.pi * WHEEL_RADIUS
def _avoid_obstacle(self):
'''
Ensures the robot does not run into obstacles; uses whisker sensors to sense obstacle in front, turns away
from it and continues straight.cat to
'''
sensor_values = self.getSensors()
right_ir_val = sensor_values[self._ir_right_sensor_port]
front_ir_val = sensor_values[self._ir_left_sensor_port]
sonar_val = sensor_values[self._sonar_sensor_port]
# if sonar_val <= 30:
# print('Met obstacle')
# if left_ir_val > right_ir_val:
# self._turn('right', 10)
# else:
# self._turn('left', 10)
# self._keep_moving_forward()
# print('right - ', right_ir_val)
# print('front - ', front_ir_val)
if front_ir_val > 160:
print('Met obstacle')
# turn
if right_ir_val > 200:
self._turn('left', 10)
else:
self._turn('right', 10)
self._keep_moving_forward()
#
# # using sonar sensor
# sonar_val = sensor_values[self._sonar_sensor_port]
# input_values = self.getInputs()
# whisker_vals = [input_values[self._left_whisker_port], input_values[self._right_whisker_port]]
# if any(whisker_vals):
# print('Whisker hit an obstacle')
# self.mc.setMotor(self._left_wheel_port, -80)
# self.mc.setMotor(self._right_wheel_port, -80)
# time.sleep(1)
# left_ir_val = sensor_values[self._ir_left_sensor_port]
# right_ir_val = sensor_values[self._ir_right_sensor_port]
# if left_ir_val > right_ir_val:
# print('Turning right')
# self.mc.setMotor(self._left_wheel_port, 80)
# self.mc.setMotor(self._right_wheel_port, -80)
# else:
# print('Turning left')
# self.mc.setMotor(self._left_wheel_port, -80)
# self.mc.setMotor(self._right_wheel_port, 80)
# if sonar_val <= 30:
# left_ir_val = sensor_values[self._ir_left_sensor_port]
# right_ir_val = sensor_values[self._ir_right_sensor_port]
# if left_ir_val > right_ir_val:
# print('Turning right')
# self.mc.setMotor(self._left_wheel_port, 80)
# self.mc.setMotor(self._right_wheel_port, -80)
# else:
# print('Turning left')
# self.mc.setMotor(self._left_wheel_port, -80)
# self.mc.setMotor(self._right_wheel_port, 80)
# time.sleep(0.5)
# else:
# self.mc.setMotor(self._left_wheel_port, 80)
# self.mc.setMotor(self._right_wheel_port, 80)
def _stop_on_poi(self):
'''
Stops over the POI, which is a reflective surface.
When the input from the light sensor changes enough, assume that the robot is over a reflective surface and
stop. The problem is to quantify 'enough'.
'''
# if not self._over_poi:
# turn on light bulb
# self.mc.setMotor(self._light_bulb_port, 100)
sensor_values = self.getSensors()
light_sensor_value = (sensor_values[self._left_light_sensor_port] + sensor_values[self._right_light_sensor_port]) / 2
# change to more robust code
# TODO: detect light sensor value over POI
if light_sensor_value > 180:
self._over_poi = True
self.mc.stopMotors()
self.mc.setMotor(self._light_bulb_port, 100)
# else:
# self.mc.setMotor(self._left_wheel_port, 80)
# self.mc.setMotor(self._right_wheel_port, 80)
# else:
# pass
def _point_to_satellite(self, x, y, orientation):
'''
Points the antenna of the robot to the satellite, given that the robot is in the point (x, y) at given
orientation (starting from 0 degrees at x-axis); involves 2 steps -
1. Turn main body to satellite
2. Lift antenna to point to satellite
'''
# we need this flag only for demo: 1
# if not self._pointing_to_satellite:
x = abs(x)
y = abs(y)
if y < self._sat_y:
print('Using formula 1')
tan_phi = (self._sat_x - x) / (self._sat_y - y)
phi = degrees(atan(tan_phi))
angle_to_turn = 180 - orientation - phi
else:
print('Using formula 2')
tan_phi = (y - self._sat_y) / (self._sat_x - x)
phi = degrees(atan(tan_phi))
angle_to_turn = (90 - phi) - orientation
# assuming the robot turns 360 degrees in 4.3 seconds
# duration = (4.3 / 360) * abs(angle_to_turn)
print('The main body should turn : ', angle_to_turn)
# print(duration)
if angle_to_turn < 0:
# turn left
print('Turning left')
self._turn('left', abs(angle_to_turn))
# self.mc.setMotor(self._left_wheel_port, -80)
# self.mc.setMotor(self._right_wheel_port, 80)
# time.sleep(duration)
else:
# turn right
print('Turning right')
self._turn('right', angle_to_turn)
# self.mc.setMotor(self._left_wheel_port, 80)
# self.mc.setMotor(self._right_wheel_port, -80)
# time.sleep(duration)
self.mc.stopMotors()
self.mc.setMotor(self._light_bulb_port, 100)
# lift antenna
l1 = sqrt((self._sat_x - x)**2 + (self._sat_y - y - 40)**2)
tan_theta = (self._sat_height - self._robot_height) / l1
theta = degrees(atan(tan_theta))
self.sc.engage()
self.sc.setPosition(theta)
self._pointing_to_satellite = True
time.sleep(30)
# lower antenna
self.sc.engage()
self.sc.setPosition(0)
# turn back
if angle_to_turn < 0:
# turn right
print('Turning right')
self._turn('right', angle_to_turn)
else:
# turn left
print('Turning left')
self._turn('left', angle_to_turn)
def _process_poi(self, distance_moved, sprint):
self.mc.stopMotors()
self.mc.setMotor(self._light_bulb_port, 100)
# TODO: landmarks
self._localize(distance_moved, sprint)
self._point_to_satellite(self.robot.x, self.robot.y, self.robot.orientation)
# move over the rest of the POI
# self.mc.setMotor(self._light_bulb_port, 0)
self._move(250, sprint, False)
self.mc.setMotor(self._light_bulb_port, 100)
def _localize(self, distance_moved, sprint):
# print('Faking localization')
if sprint == 1:
x = self.robot.x
y = self.robot.y + distance_moved
elif sprint == 2:
x = self.robot.x - distance_moved
y = self.robot.y
elif sprint == 3:
x = self.robot.x
y = self.robot.y - distance_moved
elif sprint == 4:
x = self.robot.x + distance_moved
y = self.robot.y
elif sprint == 5:
x = self.robot.x
y = self.robot.y + distance_moved
elif sprint == 6:
x = self.robot.x - distance_moved
y = self.robot.y
orientation = self._convert_sprint_to_orientation(sprint)
self.robot.set(x, y, orientation)
return (x, y, self._convert_sprint_to_orientation(sprint))
# # args - landmark to be used for this region
# for each in range(steps):
# self.robot.move(heading, steplength)
# self.estimator.actionUpdate([heading, steplength])
# measurement = self.robot.sense()
# self.estimator.measurementUpdate(measurement)
# self._steps = 0
# # TODO: pick one particle from filter??
# # return 100, 200, 60
# # TODO; impl localization
# curr_x, curr_y, curr_orientation = None
def _keep_moving_forward(self):
self.mc.setMotor(self._left_wheel_port, 80)
self.mc.setMotor(self._right_wheel_port, 80)
def _keep_moving_backward(self):
self.mc.setMotor(self._left_wheel_port, -80)
self.mc.setMotor(self._right_wheel_port, -80)
def _keep_turning_right(self):
self.mc.setMotor(self._left_wheel_port, 80)
self.mc.setMotor(self._right_wheel_port, -80)
def _keep_turning_left(self):
self.mc.setMotor(self._left_wheel_port, -80)
self.mc.setMotor(self._right_wheel_port, 80)
def _turn(self, direction, angle):
self.mc.setMotor(self._light_bulb_port, 100)
print('Turning direction - ', direction)
print('Turning angle - ', angle)
flips = 0
flips_required = angle / TURN_ANGLE_PER_FLIP
print(flips_required, ' flips are required')
if direction == 'right':
self._keep_turning_right()
else:
self._keep_turning_left()
while flips < flips_required:
# time.sleep(0.13)
time.sleep(0.1630)
# time.sleep(0.08)
input_values = self.getInputs()
hall_sensor_val = input_values[self._hall_sensor_port]
# print('--------------------------------------------------------------------------------------')
# print('hall_sensor_val - ', hall_sensor_val)
# print('old hall sensor val - ', self._hall_value)
if hall_sensor_val != self._hall_value:
# print('There was a flip')
flips = flips + 1
# print(flips)
self._hall_value = hall_sensor_val
# print('I have turned')
# time.sleep(5)
def _move(self, distance, sprint, detect_poi):
'''
Moves forward for distance given
'''
print('Moving forward in sprint - ', sprint)
self.mc.setMotor(self._light_bulb_port, 100)
# impl should include scanning for poi and obstacles and localiszation on POI
self._keep_moving_forward()
flips = 0
flips_required = distance/ DISTANCE_PER_FLIP
print(flips_required, ' flips are required')
while flips < flips_required:
self._avoid_obstacle()
time.sleep(0.13)
if detect_poi:
sensor_values = self.getSensors()
light_sensor_value = (sensor_values[self._left_light_sensor_port] + sensor_values[
self._right_light_sensor_port])
if light_sensor_value > 15:
# over POI
distance_covered_so_far = flips * DISTANCE_PER_FLIP
self._process_poi(distance_covered_so_far, sprint)
distance_left = distance - distance_covered_so_far - 250
self._move(distance_left, sprint, True)
input_values = self.getInputs()
hall_sensor_val = input_values[self._hall_sensor_port]
if hall_sensor_val != self._hall_value:
# print('There was a flip')
flips = flips + 1
# print(flips)
self._hall_value = hall_sensor_val
print('I have moved')
self.mc.stopMotors()
self.mc.setMotor(self._light_bulb_port, 100)
return (flips_required * DISTANCE_PER_FLIP)
def _convert_sprint_to_orientation(self, sprint):
if sprint == 1:
orientation = 0
elif sprint == 2:
orientation = 270
elif sprint == 3:
orientation = 180
elif sprint == 4:
orientation = 90
elif sprint == 5:
orientation = 0
elif sprint == 6:
orientation = 270
return orientation
def control(self):
'''
Being run in an infinite loop by sandbox
'''
x = 3840
y = - 1231
facing = 270
self.mc.setMotor(self._light_bulb_port, 100)
# sensor_values = self.getSensors()
# print('Interface kit - getSensors() --> ', sensor_values)
# print('Sonar - ', sensor_values[self._sonar_sensor_port])
# print('IR right sensor value - ', sensor_values[self._ir_right_sensor_port])
# print('IR front val - ', sensor_values[self._ir_left_sensor_port])
# print('----------------------------------------------------------------------------')
# left_light_sensor = sensor_values[self._left_light_sensor_port]
# right_light_sensor = sensor_values[self._right_light_sensor_port]
# print('Left light sensor - ', left_light_sensor)
# print('Right light sensor - ', right_light_sensor)
# # print('Interface kit - getInputs() --> ', input_values)
#
################################## BEGIN FAIR DRAFT FOR NAVIGATION ############################################
if not self._exploring:
# start exploring
# set to initial position and orientation
self.robot.set(x, y, facing)
self.robot.set_noise(0.0, 0.0, 0.0)
# turn to point to zero degrees
if facing < 180 or facing == 180:
# turn left
self._turn('left', facing)
else:
# turn right
self._turn('right', (360 - facing))
self._exploring = True
# turn on light bulb
self.mc.setMotor(self._light_bulb_port, 100)
# moving within region: 1
distance_moved = self._move(1350, 1, True)
print('At end of sprint-1')
# localize
self._localize(distance_moved, 1)
self._turn('left', 90)
# moving within region: 2
distance_moved = self._move(2900, 2, True)
# localize
self._localize(distance_moved, 2)
self._turn('left', 90)
# moving within region: 3
distance_moved = self._move(1800, 3, True)
# localize
self._localize(distance_moved, 3)
self._turn('left', 90)
# moving within region: 4
distance_moved = self._move(1800, 4, True)
# localize
self._localize(distance_moved, 4)
self._turn('left', 90)
# moving from region: 4 to region: 5
distance_moved = self._move(600, 5, True)
# localize
self._localize(distance_moved, 5)
self._turn('left', 90)
# moving within region: 5 and go higher on region: 3
distance_moved = self._move(2200, 6, True)
# localize
self._localize(distance_moved, 6)
self._turn('left', 90)
# moving through upper layer of region: 3
distance_moved = self._move(1200, 7, True)
# localize
self._localize(distance_moved, 7)
# self._localize()
self._turn('left', 90)
# go back to homebase
# TODO: calculate theta; low priority
theta = None
self._turn('left', theta)
# TODO: calculte distance to homebase; low priority
distance_moved = self._move(4000)
#
time.sleep(0.05)
# self.mc.setMotor(self._light_bulb_port, 100)
##################################### END FAIR DRAFT FOR NAVIGATION ###########################################
# self._point_to_satellite(x, y, facing)
# self.sc.engage()
# self.sc.setPosition(0)
# self.sc.engage()
# self.sc.setPosition(90)
# print('Begin impl of control')
# self._move(500)
# self._turn('right', 90)
# self.mc.stopMotors()
# print('I have moved')
# self.mc.setMotor(self._light_bulb_port, 100)
# time.sleep(3)
# self._move(1000, 3, False)
# self.mc.setMotor(self._light_bulb_port, 100)
# sensors = self.getSensors()
# print('left - ', sensors[self._left_light_sensor_port])
# print('right - ', sensors[self._right_light_sensor_port])
# print('8888888888888888888888888888888')
# orig_val = sensors[self._hall_sensor_port]
# print(orig_val)
# start = time.time()
# flips = 0
# while flips == 0:
# new_val = self.getSensors()[self._hall_sensor_port]
# if new_val != orig_val:
# print('Flipping')
# flips = 1
# end = time.time()
# print(end - start)
# self.mc.stopMotors()
# self.mc.setMotor(self._light_bulb_port, 100)
def vision(self):
image = self.camera.getFrame()
self.camera.imshow('Camera', image)
|
import numpy as np
x = np.load('x_data.npy')
y = np.load('y_data.npy')
print(x)
print(y)
print(x.shape)
|
from __future__ import annotations
import os
from hypothesis import given
from hypothesis.strategies import integers
from typing import Tuple
from tm_trees import TMTree, FileSystemTree
EXAMPLE_PATH_10_FILES = ''
tree_10_file = FileSystemTree(EXAMPLE_PATH_10_FILES)
def is_valid_colour(colour: Tuple[int, int, int]) -> bool:
"""Return True iff <colour> is a valid colour. That is, if all of its
values are between 0 and 255, inclusive.
"""
for i in range(3):
if not 0 <= colour[i] <= 255:
return False
return True
def test_10_files_folder_initializer() -> None:
"""test a folder with 10 files"""
assert tree_10_file._name == 'Test_10_files'
for j in tree_10_file._subtrees:
assert j._subtrees == []
print(j.data_size)
assert tree_10_file._parent_tree is None
i = 0
for k in tree_10_file._subtrees:
i += k.data_size
assert tree_10_file.data_size == i
assert is_valid_colour(tree_10_file._colour)
def test_10_files_folder_image() -> None:
tree_10_file.update_rectangles((0, 0, 200, 100))
rects = tree_10_file.get_rectangles()
assert len(rects) == 1
tree_10_file.expand()
tree_10_file.update_rectangles((0, 0, 200, 100))
rects_1 = tree_10_file.get_rectangles()
assert len(rects_1) == 10
width = 0
for i in tree_10_file._subtrees:
assert i.rect[0] == width
width += 20
assert i.rect[1] == 0
assert i.rect[2] == 20
assert i.rect[3] == 100
EXAMPLE_PATH_EMPTY = '/Test_empty_folder'
tree_empty_file = FileSystemTree(EXAMPLE_PATH_EMPTY)
def test_tree_empty_file_initializer() -> None:
"""test a folder with 10 files"""
assert tree_empty_file._name == 'Test_empty_folder'
assert tree_empty_file._subtrees == []
assert tree_empty_file._parent_tree is None
assert tree_empty_file.data_size == 0
assert is_valid_colour(tree_empty_file._colour)
def test_tree_empty_file_image() -> None:
tree_empty_file.update_rectangles((0, 0, 200, 100))
rects = tree_empty_file.get_rectangles()
assert len(rects) == 1
tree_empty_file.expand()
tree_empty_file.update_rectangles((0, 0, 200, 100))
rects_1 = tree_empty_file.get_rectangles()
assert len(rects_1) == 1
EXAMPLE_PATH_5_HEIGHT = '/Test_tree_height_5'
tree_5_height = FileSystemTree(EXAMPLE_PATH_5_HEIGHT)
def test_tree_5_height_initializer() -> None:
"""test a folder with 10 files"""
assert tree_5_height._name == 'Test_tree_height_5'
assert len(tree_5_height._subtrees) == 3
assert tree_5_height._parent_tree is None
assert is_valid_colour(tree_5_height._colour)
def test_tree_5_height_image() -> None:
tree_5_height.update_rectangles((0, 0, 400, 200))
rects = tree_5_height.get_rectangles()
assert len(rects) == 1
assert rects[0][0] == (0, 0, 400, 200)
tree_5_height.expand()
tree_5_height.update_rectangles((0, 0, 400, 200))
rects_1 = tree_5_height.get_rectangles()
assert len(rects_1) == 3
a = tree_5_height._subtrees[0]
b = tree_5_height._subtrees[1]
c = tree_5_height._subtrees[2]
assert a.rect == (0, 0, 171, 200)
assert b.rect == (171, 0, 129, 200)
assert c.rect == (300, 0, 100, 200)
a.expand()
d = a._subtrees[0]
assert d.rect == (0, 0, 171, 30)
e = a._subtrees[1]
assert e.rect == (0, 30, 171, 30)
f = a._subtrees[2]
assert f.rect == (0, 60, 171, 117)
g = a._subtrees[3]
assert g.rect == (0, 177, 171, 12)
h = a._subtrees[4]
assert h.rect == (0, 189, 171, 11)
g.change_size(0.01)
assert g.data_size == 53523
h.change_size(-0.01)
assert h.data_size == 40135
g.move(f)
assert len(f._subtrees) == 6
assert len(a._subtrees) == 4
tree_5_height.update_data_sizes()
assert a.data_size == 856660
assert f.data_size == 557746
g.collapse_all()
assert tree_5_height._expanded == False
for i in tree_5_height._subtrees:
assert i._expanded is False
for j in i._subtrees:
assert j._expanded is False
for k in j._subtrees:
assert k._expanded is False
for l in k._subtrees:
assert l._expanded is False
for m in l._subtrees:
assert m._expanded is False
if __name__ == '__main__':
import pytest
pytest.main(['TMTree_tests.py'])
|
from pymol import cmd
def goto(pathname):
path_select = {
"drive": "D:\Users\Brahm Yachnin\Documents\Google Drive",
"design": "D:\Users\Brahm Yachnin\Documents\Google Drive\design",
"pdbs": "D:\Users\Brahm Yachnin\Documents\Google Drive\PDB Files",
"dropbox": "D:\Users\Brahm Yachnin\Documents\Dropbox"
}
os.chdir(path_select.get(pathname, "."))
print "Changing to the following directory:"
print path_select.get(pathname, ".")
cmd.extend("goto",goto)
|
"""
Convert the raw sequence and the lables to hdf5 data/arrays for faster batch reading.
Split data into training, test and validation set. Save training and test set in same file.
Will store a .h5 file with the labels and sequences and a coord file per test/valid and train set
"""
# from __future__ import absolute_import
# from __future__ import division
# from __future__ import print_function
import numpy as np
import h5py
import argparse
import sys
from operator import itemgetter
# Define arguments -------------------------------------------------------------
parser = argparse.ArgumentParser(
description="""Take a raw sequence and a labels bed like file and encode and store
both as numpy arrays. Split up into traiing, test and validation samples.""")
parser.add_argument('in_file', type=str,
help='Five column file. [chr start end comma separated IDs to split and raw sequence].')
parser.add_argument('--split_mode', dest='split_mode', default='random', choices=['random', 'chr'],
help="""Specify how to split up the data into training, test
and validation set. []chr] - select chromomes from which
features are attritbuted to the different sets. [random]
- split the features by random sampling. Needs --chr_test
--chr_valid or --frac_test --frac_valid declared respectively.
Default = [random]""")
parser.add_argument('--frac_test', type=float, dest='frac_test', default=0.05,
help='Fraction of total set to sample into test set. (Float > 1.0)')
parser.add_argument('--frac_valid', type=float, dest='frac_valid', default=0.05,
help='Fraction of total set to sample into validation set. (Float > 1.0)')
parser.add_argument('--chr_test', nargs='+', dest='chr_test', default='chr20',
help="""Select one ore more space separated chromosome (chr1 chr2 chr3) to use
as test chromosomes. Default = chr20 Only if split_mode = 'chr' """)
parser.add_argument('--chr_valid', nargs='+', dest='chr_valid', default='chr21',
help="""Select one ore more space separated chromosome (chr1 chr2 chr3) to use
as validation chromosomes. Default = chr20 Only if split_mode = 'chr' """)
parser.add_argument('--save_prefix', dest='save_prefix', default='./data_set',
help='Prefix to store the training/ test and validation sets. Default = ./data_set')
parser.add_argument('--seed', dest='seed', type=int, default=1234,
help='Random seed for sampling.')
parser.add_argument('--trim_seq', dest='trim_seq', type=int, default=0,
help='Number of bp to rim the sequence from both ends (default 0).')
parser.add_argument('--num_classes', dest='num_classes', type=int, default=936,
help='Specify number of classes.')
parser.add_argument('--store_bool', dest='store_bool', type=bool, default=False,
help='Indicate if to store the 1-hot encoded sequence and labels as bool dtype (convert in train script etc.).')
# Parse arguments
args = parser.parse_args()
if args.store_bool:
print("Stoing as boolean ...")
# Helper get hotcoded sequence
def get_hot_coded_seq(sequence):
"""Convert a 4 base letter sequence to 4-row x-cols hot coded sequence"""
# initialise empty
hotsequence = np.zeros((len(sequence),4))
# set hot code 1 according to gathered sequence
for i in range(len(sequence)):
if sequence[i] == 'A':
hotsequence[i,0] = 1
elif sequence[i] == 'C':
hotsequence[i,1] = 1
elif sequence[i] == 'G':
hotsequence[i,2] = 1
elif sequence[i] == 'T':
hotsequence[i,3] = 1
# return the numpy array
return hotsequence
print("\n# === Creating a Training, Test and Validation Set from provided input === #")
# Set seed for random sampling -------------------------------------------------
np.random.seed(args.seed)
# init binary representatons --------------------------------------------------
# Inititialize Classes strucutre to store
if args.num_classes < 2:
sys.exit("No distinct classes specified: %s !" % args.num_classes)
num_ids = args.num_classes # get number of unique ids
print("\nNumber of distinct labels: " + str(num_ids))
# make a look-up dictionary with a binary label per id
bin_look_up = {}
for i in range(num_ids):
if args.store_bool:
bin_look_up[i] = np.zeros((num_ids), dtype=np.bool)
bin_look_up[i][i] = True
else:
bin_look_up[i] = np.zeros((num_ids), dtype=np.uint8)
bin_look_up[i][i] = 1
# test print some bin_look_up_lines
print("Test Print first bin_look_up lines:")
print(bin_look_up[0][0:6])
print(bin_look_up[1][0:6])
print(bin_look_up[2][0:6])
print(bin_look_up[3][0:6])
print(bin_look_up[4][0:6])
print(bin_look_up[5][0:6])
# Read in data -----------------------------------------------------------------
# init single empty binary label array for access later
if args.store_bool:
label_bin_init = np.zeros(num_ids, dtype=np.bool)
else:
label_bin_init = np.zeros(num_ids, dtype=np.uint8)
print("\nReading lines ...")
# read data in, split into vectors
with open(args.in_file, "r") as f:
chroms = []
start = []
stop = []
label = []
for i,l in enumerate(f):
l = l.rstrip()
l = l.split("\t")
chroms.append(l[0])
start.append(l[1])
stop.append(l[2])
# get first sequence to estimate length and format
if i == 0:
temp_seq = l[4]
# trim if desired
if args.trim_seq > 0:
temp_seq = temp_seq[args.trim_seq:-args.trim_seq]
temp_seq = get_hot_coded_seq(temp_seq)
# Sample Test/ Validation and Training set according to selected mode -----------
input_rows = np.array(range(len(chroms))) # make an array of input rows to sample from once
# if to split based on fractions randomly form all chromosomes
if args.split_mode == 'random':
print("\nSampling randomly across chromosomes.")
to_sample_test = round(len(input_rows) * args.frac_test) # get fractions
to_sample_valid = round(len(input_rows) * args.frac_valid)
to_sample_train = len(input_rows) - to_sample_test - to_sample_valid
print("%s Test cases\n%s Validation cases\n%s Training cases left." %
(int(to_sample_test), int(to_sample_valid), int(to_sample_train)))
# sample and get test and valid rows
tmp_sampled = np.random.choice(input_rows, size=int(to_sample_test+to_sample_valid), replace=False)
test_rows = tmp_sampled[range(int(to_sample_test))]
test_rows = np.sort(test_rows)
valid_rows = tmp_sampled[range(int(to_sample_test), int(to_sample_test+to_sample_valid))]
valid_rows = np.sort(valid_rows)
# prune remaining training cases
training_rows = np.delete(input_rows, tmp_sampled)
elif args.split_mode == 'chr':
print("\nSetting specifc chromosomes as test and validatipon set:")
print("Using %s as Test, %s as Validation and the remaining as Training cases" % (args.chr_test, args.chr_valid))
test_rows = []
valid_rows = []
# match row numbers against chromosomes
for i in range(len(chroms)):
if chroms[i] in args.chr_test:
test_rows.append(i)
if chroms[i] in args.chr_valid:
valid_rows.append(i)
# prune remaining training cases
training_rows = np.delete(input_rows, (test_rows + valid_rows))
print("%s Test cases \n%s Validation cases\n%s Training cases left." %
(int(len(test_rows)), int(len(valid_rows)), int(len(training_rows))))
test_rows = input_rows[test_rows,]
valid_rows = input_rows[valid_rows,]
# print(test_rows)
print("\nSampled into sets ...")
# write training/test/validation set coords ------------------------------------
print("\nStoring Coordinates ...")
write_train_coords = open(args.save_prefix + "_training_coords.bed", "w")
for tr in training_rows:
write_train_coords.write("%s\t%s\t%s\n" % (chroms[tr], start[tr], stop[tr]))
write_test_coords = open(args.save_prefix + "_test_coords.bed", "w")
for tr in test_rows:
write_test_coords.write("%s\t%s\t%s\n" % (chroms[tr], start[tr], stop[tr]))
write_valid_coords = open(args.save_prefix + "_validation_coords.bed", "w")
for tr in valid_rows:
write_valid_coords.write("%s\t%s\t%s\n" % (chroms[tr], start[tr], stop[tr]))
# Initialize training and validation data in hdf5 files ------------------------
print("\nInitializing hdf5 Storage Files ...")
# and already store labels
train_h5f = h5py.File(args.save_prefix + "_training_data.h5", 'w')
if args.store_bool:
set_train_seq = train_h5f.create_dataset('training_seqs', (training_rows.shape[0], temp_seq.shape[0], temp_seq.shape[1]) , dtype='b')
set_test_seq = train_h5f.create_dataset('test_seqs', (test_rows.shape[0], temp_seq.shape[0], temp_seq.shape[1]), dtype='b')
set_train_label = train_h5f.create_dataset('training_labels', (training_rows.shape[0], num_ids), dtype='b')
set_test_label = train_h5f.create_dataset('test_labels', (test_rows.shape[0], num_ids), dtype='b')
else:
set_train_seq = train_h5f.create_dataset('training_seqs', (training_rows.shape[0], temp_seq.shape[0], temp_seq.shape[1]) , dtype='ui8')
set_test_seq = train_h5f.create_dataset('test_seqs', (test_rows.shape[0], temp_seq.shape[0], temp_seq.shape[1]), dtype='ui8')
set_train_label = train_h5f.create_dataset('train_labels', (training_rows.shape[0], num_ids), dtype='ui8')
set_test_label = train_h5f.create_dataset('test_labels', (test_rows.shape[0], num_ids), dtype='ui8')
# Validation
valid_h5f = h5py.File(args.save_prefix + "_validation_data.h5", 'w')
if args.store_bool:
set_valid_seq = valid_h5f.create_dataset('validation_seqs', (valid_rows.shape[0], temp_seq.shape[0], temp_seq.shape[1]), dtype='b')
set_valid_label = valid_h5f.create_dataset('validation_labels', (valid_rows.shape[0], num_ids), dtype='b')
else:
set_valid_seq = valid_h5f.create_dataset('validation_seqs', (valid_rows.shape[0], temp_seq.shape[0], temp_seq.shape[1]), dtype='ui8')
set_valid_label = valid_h5f.create_dataset('validation_labels', (valid_rows.shape[0], num_ids), dtype='ui8')
# Last run through file get, convert and store sequence
print("\nRunning through raw file again, converting sequences and store in sets ...")
with open(args.in_file, "r") as f:
seq = []
# make iterators
test_i = 0
valid_i = 0
train_i = 0
skip_count = 0
for i,l in enumerate(f):
l = l.rstrip()
l = l.split("\t")
# get label and sum up binary array to represent all classes
label = l[3].split(",") # split by commata
label = np.array(label, dtype='i')
# Sum up classes into binary representation
label_bin = label_bin_init
for j in range(len(label)):
label_bin = label_bin + bin_look_up[label[j]]
# get sequence
seq = l[4]
if args.trim_seq > 0:
seq = seq[args.trim_seq:-args.trim_seq]
# get first sequence length
if i == 0:
seq_length = len(seq)
print("Converting and storing sequences of length %s bp." % (seq_length))
# check sequence length matches
if len(seq) < seq_length:
# skip otherwise
skip_count = skip_count + 1
continue
# convert to one hot coded
seq = get_hot_coded_seq(seq)
# match and write to respective hdf5 file
# also store label
if i in test_rows[:]:
set_test_seq[test_i,] = seq
set_test_label[test_i,] = label_bin
# test_rows = np.delete(test_rows, test_i)
test_i += 1
elif i in valid_rows[:]:
set_valid_seq[valid_i,] = seq
set_valid_label[valid_i,] = label_bin
# valid_rows = np.delete(valid_rows, valid_i)
valid_i += 1
else:
set_train_seq[train_i,] = seq
set_train_label[train_i,] = label_bin
train_i += 1
if i % 10000 == 0:
print('Written lines ... %s' % (i))
print("Skipped %s elements with sequence length != %s" % (skip_count, seq_length))
# Close
train_h5f.close()
valid_h5f.close()
print("\nSaved the data Data.\n")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.