text
stringlengths 8
6.05M
|
|---|
import serial
import sqlite3
import psutil as ps
import time
try:
dbConn = sqlite3.connect('datalogger.db')
except:
print("could not connect to database")
#open a cursor to the database
cursor = dbConn.cursor()
device = 'COM18' #this will have to be changed to the serial port you are using
try:
print("Trying...",device)
arduino = serial.Serial(device, 9600)
except:
print("Failed to connect on",device)
while True:
try:
time.sleep(2)
data = arduino.readline() #read the data from the arduino
print(data)
pieces = data.split(" ") #split the data by the tab
#Here we are going to insert the data into the Database
try:
cursor.execute("INSERT INTO sensor_data VALUES (?,?,?,?,?)", (pieces[0],pieces[1],pieces[2],pieces[3],pieces[4]))
dbConn.commit() #commit the insert
except sqlite3.IntegrityError:
print("failed to insert data")
# finally:
# cursor.close() #close just incase it failed
except:
print("Failed to get data from Arduino!")
|
# -*- coding: utf-8 -*-
import urllib, hashlib
from flask import Blueprint, render_template, request, redirect, url_for
from pygit2 import Repository
from pygit2 import init_repository
repo = Blueprint('repo', __name__)
@repo.route('/repositories/new', methods = ['GET'])
def new():
email = "jian.baij@gmail.com"
default = "http://en.gravatar.com/favicon.ico"
size = 80
img_url = "http://www.gravatar.com/avatar/" + hashlib.md5(email.lower()).hexdigest() + "?"
img_url += urllib.urlencode({'d':default, 's': str(size)})
return render_template("repo/new.html", img_url=img_url, username = 'baijian')
@repo.route('/repositories', methods = ['GET','POST'])
def repositories():
if request.method == 'POST':
username = 'baijian'
reponame = request.form['reponame']
repodesc = request.form['repodesc']
init_repository('/home/git/repositories/' + username + '/' + reponame, True)
return redirect(url_for('.index', name=username, reponame=reponame))
@repo.route('/<name>/<reponame>', methods = ['GET'])
def index(name, reponame):
repo = Repository('/home/git/repositories/' + name + '/' + reponame)
return render_template("repo/index.html", empty=repo.is_empty)
@repo.route('/<name>/<reponame>/commits/<branch>', methods = ['GET'])
def commits():
return render_template("")
@repo.route('/<name>/<reponame>/commit/<sha>', methods = ['GET'])
def commit():
return render_template('')
@repo.route('/<name>/<reponame>/tree/<branch>/<treename>', methods = ['GET'])
def tree():
return render_template('')
@repo.route('/<name>/<reponame>/blob/<branch>/<blobname>', methods = ['GET'])
def blob():
return render_template('')
|
dias = int(input('Quantos dias alugados? '))
km = int(input('Quantos Km rodados? '))
print('-'*25)
Pdias = (dias * 60)
PKm = (km * 0.15)
print("""Preço por dias: R${:.2f}
Preço por Km: R${:.2f}
Total: R${:.2f}""".format(Pdias, PKm, Pdias+PKm))
print('-'*25)
|
from flask import Flask, render_template, request, flash, redirect, url_for,send_file, make_response
import os
from canvasapi import Canvas
from werkzeug.utils import secure_filename
import requests
from water import water
import asyncio
API_URL = "https://canvas.oregonstate.edu/"
# Canvas API key
API_KEY = "1002~m1ShsxLu5bZY6SbSd5KlXjN9ejluixXwRFVYDvVQhGjIMx46dLJqS81NfZtCeTRJ"
app = Flask(__name__,template_folder='templates')
ALLOWED_EXTENSIONS = {'txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
@app.before_request
def log_request():
# app.logger.debug("Request Headers %s", request.headers)
return None
@app.route('/upload',methods=["GET","POST"])
def getHomework():
form = request.form
clientID = form['custom_clientid']
logo_url = form['custom_logo_url']
canvas = Canvas(API_URL, clientID)
user_data = {"Name":form['lis_person_name_full'],
"Role":form['roles'],
"Institution":form['tool_consumer_instance_name']
}
import re
regex = r"\d{7,}"
matches = re.findall(regex, form['custom_membership_service_url'], re.MULTILINE)
course_id = matches[0]
print(course_id)
course = canvas.get_course(course_id)
assigments=course.get_assignments()
assList=[{"name":item.name,"id":item.id} for item in assigments]
# return str(clientID)
course = {"Title":course.name,"Logo":logo_url}
datas = {"ass":assList,"course":course,"user":user_data}
return render_template("assList.html",data = datas)
@app.route('/clientID/<clientID>/course/<courseID>',methods=["GET","POST"])
def getHomeworkwiCourse(clientID=None,courseID=None):
canvas = Canvas(API_URL, clientID)
course = canvas.get_course(courseID)
assigments=course.get_assignments()
assNameList=[item.name for item in assigments]
assIDList=[item.id for item in assigments]
print(assNameList)
# return str(clientID)
return render_template("assList.html",len=len(assNameList),Ass=assNameList,AssID=assIDList,title=course.name)
@app.route('/submit', methods=['POST'])
def upload_file():
print('received!!')
print(request.form)
if request.method == 'POST':
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file_dir = os.path.join(app.config['UPLOAD_FOLDER'], filename)
print("Upload: ",file_dir)
print("File Save: ",file.save(file_dir))
f = request.form
info = { 'name':f['name'],\
'title':f['title'],\
'inst':f['inst'],\
'id':f['id']}
w = water(file_dir,'./logo.png','./config.ini',info)
d = w.do()
return make_response("Uploaded",200)
return '''
<!doctype html>
<title>Upload new File</title>
<h1>Upload new File</h1>
<form method=post enctype=multipart/form-data>
<input type=file name=file>
<input type=submit value=Upload>
</form>
'''
from flask import send_from_directory
@app.route('/uploads/<filename>')
def uploaded_file(filename):
file_dir = os.path.join(app.config['UPLOAD_FOLDER'], filename)
send_from_directory(app.config['UPLOAD_FOLDER'],filename,as_attachment=True)
return send_file(file_dir, as_attachment=True)
@app.route("/download/<fileName>",methods=["POST","GET"])
def downloadFile(fileName):
path = os.path.join(app.config['UPLOAD_FOLDER'], fileName)
return send_file(path, as_attachment=True)
@app.route("/try",methods=["POST","GET"])
def test():
print(request.form)
url = "https://canvas.oregonstate.edu/login/oauth2/auth?client_id={0}&response_type=code&redirect_uri=https://example.com/oauth_complete".format(API_KEY)
# headers = {"Authorization":"Bearer"+request.form['oauth_signature']}
res = requests.get(url)
print(res.text)
return res.text
if __name__ == "__main__":
app.config['ENV'] = 'development'
app.config['DEBUG'] = True
app.config['TESTING'] = True
app.config['JSON_AS_ASCII'] = False
app.config['UPLOAD_FOLDER']="static/uploadStack/"
app.secret_key = 'super secret key'
app.config['SESSION_TYPE'] = 'filesystem'
app.run(host='0.0.0.0', port='2333',ssl_context='adhoc')
|
#
# This file is part of LUNA.
#
# Copyright (c) 2020 Great Scott Gadgets <info@greatscottgadgets.com>
# SPDX-License-Identifier: BSD-3-Clause
""" Link Management Packet (LMP) -related gateware. """
from amaranth import *
from usb_protocol.types.superspeed import HeaderPacketType, LinkManagementPacketSubtype
from ..link.header import HeaderQueue, HeaderPacket
class PortCapabilityHeaderPacket(HeaderPacket):
DW0_LAYOUT = [
('type', 5),
('subtype', 4),
('link_speed', 7),
('reserved', 16)
]
DW1_LAYOUT = [
('num_hp_buffers', 8),
('reserved_0', 8),
('supports_downstream', 1),
('supports_upstream', 1),
('reserved_1', 2),
('tiebreaker', 4),
('reserved_2', 8),
]
class PortConfigurationHeaderPacket(HeaderPacket):
DW0_LAYOUT = [
('type', 5),
('subtype', 4),
('link_speed', 7),
('reserved', 16)
]
class PortConfigurationResponseHeaderPacket(HeaderPacket):
DW0_LAYOUT = [
('type', 5),
('subtype', 4),
('response_code', 7),
('reserved', 16)
]
class LinkManagementPacketHandler(Elaboratable):
""" Gateware that handles Link Management Packets.
Attributes
-----------
header_sink: HeaderQueue(), input stream
Stream that brings up header packets for handling.
header_source: HeaderQueue(), output stream
Stream that accepts header packets for generation.
link_ready: Signal(), input
Should be asserted once our link is ready; used to trigger advertising.
"""
# Link speed constants.
LINK_SPEED_5GBPS = 1
# Constants.
CONFIGURATION_ACCEPTED = 1
CONFIGURATION_REJECTED = 2
def __init__(self):
#
# I/O port
#
self.header_sink = HeaderQueue()
self.header_source = HeaderQueue()
# Status / control.
self.usb_reset = Signal()
self.link_ready = Signal()
def elaborate(self, platform):
m = Module()
header_sink = self.header_sink
header_source = self.header_source
#
# Pending "tasks" for our transmitter.
#
pending_configuration_result = Signal(2)
#
# LMP transmitter.
#
def send_packet_response(response_type, **fields):
""" Helper that allows us to easily define a packet-send state."""
# Create a response packet, and mark ourselves as sending it.
response = response_type()
m.d.comb += [
header_source.valid .eq(1),
header_source.header .eq(response),
response.type .eq(HeaderPacketType.LINK_MANAGEMENT)
]
# Next, fill in each of the fields:
for field, value in fields.items():
m.d.comb += response[field].eq(value)
def handle_resets():
""" Helper that brings down the link on USB reset. """
with m.If(self.usb_reset):
m.next = "LINK_DOWN"
with m.FSM(domain="ss"):
# LINK_DOWN -- our link is not yet ready to exchange packets; we'll wait until
# it's come up to the point where we can exchange header packets.
with m.State("LINK_DOWN"):
# Once our link is ready, we're ready to start link bringup.
with m.If(self.link_ready):
m.next = "SEND_CAPABILITIES"
with m.Else():
m.d.ss += pending_configuration_result.eq(0)
# SEND_CAPABILITIES -- our link has come up; and we're now ready to advertise our link
# capabilities to the other side of our link [USB3.2r1: 8.4.5].
with m.State("SEND_CAPABILITIES"):
handle_resets()
send_packet_response(PortCapabilityHeaderPacket,
subtype = LinkManagementPacketSubtype.PORT_CAPABILITY,
link_speed = self.LINK_SPEED_5GBPS,
# We're required by specification to support exactly four buffers.
num_hp_buffers = 4,
# For now, we only can operate as an upstream device.
supports_upstream = 1
)
# Continue to drive our packet until it's accepted by the link layer.
with m.If(header_source.ready):
m.next = "DISPATCH_COMMANDS"
# DISPATCH_COMMANDS -- we'll wait for a command to be queued, and then send it.
with m.State("DISPATCH_COMMANDS"):
handle_resets()
# If we have a pending configuration result, send it!
with m.If(pending_configuration_result):
m.next = "SEND_PORT_CONFIGURATION_RESPONSE"
# SEND_CONFIGURATION_RESPONSE -- we're sending a Port Configuration Response,
# typically as a result of receiving a Port Configuration Request packet.
with m.State("SEND_PORT_CONFIGURATION_RESPONSE"):
handle_resets()
send_packet_response(PortConfigurationResponseHeaderPacket,
subtype = LinkManagementPacketSubtype.PORT_CONFIGURATION_RESPONSE,
response_code = pending_configuration_result
)
# Continue to drive our packet until it's accepted by the link layer.
with m.If(header_source.ready):
m.d.ss += pending_configuration_result.eq(0)
m.next = "DISPATCH_COMMANDS"
#
# LMP receiver.
#
# We'll handle all link management packets.
new_packet = header_sink.valid
is_for_us = header_sink.get_type() == HeaderPacketType.LINK_MANAGEMENT
with m.If(new_packet & is_for_us):
# Accept the packet from the physical layer, so its buffer will be freed
# on the next clock cycle.
m.d.comb += header_sink.ready.eq(1)
# We'll handle link management packets based on their subtype.
subtype = header_sink.header.dw0[5:9]
with m.Switch(subtype):
# As an upstream-only Gen1 port, there's not much we need to do with
# capability advertisements. For now, we'll mostly ignore them.
with m.Case(LinkManagementPacketSubtype.PORT_CAPABILITY):
pass
# If we receive a PORT_CONFIGURATION request, then our host is assigning
# us a configuration.
with m.Case(LinkManagementPacketSubtype.PORT_CONFIGURATION):
configuration = PortConfigurationHeaderPacket()
m.d.comb += configuration.eq(header_sink.header)
# For now, we only support Gen1 / 5Gbps, so we'll accept only links
# with that speed selected.
with m.If(configuration.link_speed == self.LINK_SPEED_5GBPS):
m.d.ss += pending_configuration_result.eq(self.CONFIGURATION_ACCEPTED)
with m.Else():
m.d.ss += pending_configuration_result.eq(self.CONFIGURATION_REJECTED)
# TODO: handle any invalid packet types?
with m.Default():
pass
return m
|
import requests
r = requests.get("http://mobilelistings.tvguide.com/Listingsweb/ws/rest/schedules/80004.null/start/1517214600/duration/1440?ChannelFields=Name&ScheduleFields=ProgramId%2CEndTime%2CStartTime%2CTitle%2C&formattype=json")
data = r.json()
print(data)
channel = data[0]['Channel']
# program = [i[1]['ProgramSchedules'] for i in data ]
print(channel)
# print(program)
# for x in data:
# print(data[x]['ProgramSchedules'])
|
class Serializer():
# ----------------------- SERIALIZATION ----------------------
def serialize(self, obj, filename="default"):
if filename != "default":
return self.dump(obj, filename)
else:
return self.dumps(obj)
# serializing python object to string
@classmethod
def dump(cls, obj, filename):
str_data = cls.parse(obj)
opened_file = open(filename, mode = 'w', encoding='UTF-8')
opened_file.write(str_data)
opened_file.close()
return str_data
# serializing python ojbect to file
@classmethod
def dumps(cls, obj):
return cls.parse(obj)
# parsing object of python to string
# expected to be overrided by derived class
def parse(obj):
raise Exception("parse is not implemented for abstract Serializer")
# ----------------------- DESERIALIZATION ----------------------
def deserialize(self, str_data, filename="default"):
if filename != "default":
return self.load(filename)
else:
return self.loads(str_data)
# deserializing python object from file
@classmethod
def load(cls, filename):
opened_file = open(filename, mode = 'r', encoding='UTF-8')
str_data = opened_file.read()
opened_file.close()
return cls.unparse(str_data)
# deserializing python object from string
@classmethod
def loads(cls, str_data):
return cls.unparse(str_data)
# unparsing string to python object
# expected to be overrided by derived class
def unparse(str_data):
raise Exception("unparse is not implemented for abstract Serializer")
|
import os
from urllib import request, parse
import time
from cluster import knn_detect,get_file_name
import copy
import shutil
# client_id 为官网获取的AK, client_secret 为官网获取的SK
# 获取token
def get_token():
client_id = 'j6qXAsKVzYtqoGGvX6tLoI15'
client_secret ='IpyFTwYKgsc5j9SkqmDXRnnsCiVV9IfQ'
host = 'https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=%s&client_secret=%s'%(client_id, client_secret)
req = request.Request(host)
req.add_header('Content-Type', 'application/json; charset=UTF-8')
try:
response = request.urlopen(req)
except ConnectionResetError:
print("connectting error")
get_token()
# 获得请求结果
content = response.read()
# 结果转化为字符
content = bytes.decode(content)
# 转化为字典
content = eval(content[:-1])
return content['access_token']
# 转换图片
# 读取文件内容,转换为base64编码
# 二进制方式打开图文件
def imgdata(file1path, file2path):
import base64
import json
f1 = open(r'%s' % file1path, 'rb')
f2 = open(r'%s' % file2path, 'rb')
pic1 = base64.b64encode(f1.read())
pic2 = base64.b64encode(f2.read())
image1 = str(pic1, 'utf-8')
image2 = str(pic2, 'utf-8')
params =json.dumps(
[{"image": image1, "image_type": "BASE64", "face_type": "LIVE", "quality_control": "LOW"},
{"image": image2, "image_type": "BASE64", "face_type": "IDCARD", "quality_control": "LOW"}]).encode(encoding='utf-8')
return params
# 提交进行对比获得结果
def img(file1path, file2path):
time_start = time.time()
token = get_token()
# 人脸识别API
# url = 'https://aip.baidubce.com/rest/2.0/face/v2/detect?access_token='+token
# 人脸对比API
url = 'https://aip.baidubce.com/rest/2.0/face/v3/match?access_token=' + token
params = imgdata(file1path, file2path)
# urlencode处理需提交的数据
# data = parse.urlencode(params).encode('utf-8')
req = request.Request(url, data=params)
req.add_header('Content-Type', 'application/json; charset=UTF-8')
try:
response = request.urlopen(req)
except Exception:
img(file1path,file2path)
content = response.read()
content = bytes.decode(content)
content_dict = eval(content)
if content_dict.get('result') is None:
if os.path.exists(file1path):
os.remove(file1path)
return 0
else:
pass
# 获得分数
time_end = time.time()
time_c = time_end - time_start
print('time cost:', time_c, 's')
score = content_dict['result']['score']
if score > 85:
print('照片相似度:' + str(score) + ',同一个人')
else:
print( '照片相似度:' + str(score) + ',不是同一个人')
return score
def get_name(path_filenames,labels,id):
refilelist = []
for i in range(len(labels)):
if labels[i]==id:
refilelist.append(path_filenames[i])
return refilelist
if __name__ == '__main__':
error = []
path_dir = '/media/zs/LinkData/machine_1028/Struct_KeyPoint_1018/4A/4A#175-176@18'
nums = 4
n = 0
for dirs in os.listdir(path_dir):
start_time = time.time()
try:
path_filenames = get_file_name(path_dir + '/' + dirs + '/capture_images/2019-10-19')
except FileNotFoundError:
continue
if not path_filenames:
continue
try:
labels, cluster_centers = knn_detect(path_filenames, nums)
except ValueError:
continue
tmp = os.listdir(path_dir + '/' + dirs)
file2path = tmp[2]
file1path = tmp[1]
try:
res = img(path_dir + '/' + dirs + '/'+file1path, path_dir + '/' + dirs + '/'+file2path)
n += 1
print(n, path_dir + '/' + dirs + '/'+file1path, path_dir + '/' + dirs + '/'+file2path)
if res < 85:
shutil.rmtree(path_dir + '/' + dirs)
continue
except NameError:
error.append(path_dir + '/' + dirs)
shutil.rmtree(path_dir + '/' + dirs)
continue
for i in range(nums):
file_list = get_name(path_filenames, labels, i)
file3path = file_list[0]
# time.sleep(0.8)
try:
res = img(file3path, path_dir + '/' + dirs + '/'+file2path)
if res < 85 or res is None:
for img_file in file_list:
if os.path.exists(img_file):
os.remove(img_file)
else:
pass
except NameError:
os.remove(file3path)
continue
end_time = time.time()
time_c = end_time - start_time
print('time cost:', time_c, 's')
# file3path = file_list[1]
# res = img(file3path, path_dir + '/' + dirs + '/'+file2path)
# os.remove(img_file)
# if res < 60:
# for img_file in file_list:
# os.remove(img_file)
file = open('error1.txt', 'w')
file.write(str(error))
file.close()
list_remain = copy.deepcopy(error)
|
from carbon_black.endpoints.base_endpoint import Endpoint
class Stats(Endpoint):
def __init__(self) -> None:
super().__init__()
return
def get(self) -> dict:
all_results = []
for data_item in self.config['nostradamus']:
db_result = self.query(
data_item['api_endpoint'], "SELECT COUNT(transaction_id) FROM Transaction;")
db_dups = self.query(
data_item['api_endpoint'], "SELECT ticker, COUNT(ticker) AS dups FROM Transaction GROUP BY ticker HAVING (dups > 1);")
dups = []
for dup_item in db_dups:
dups.append({
'ticker': dup_item[0],
'count': dup_item[1]
})
all_results.append({
'api_endpoint': data_item['api_endpoint'],
'total_items': db_result[0][0],
'duplicate_items': dups
})
return all_results[0] if all_results == 1 else all_results
|
# General Imports
import logging
import os
import database
import time
import threading
import sys
import ssl
from struct import unpack
# Socket Imports
import socket
import tqdm
import pickle
import csv
from config import SOCK
# MQTT Imports
import signal
import json
from time import sleep
import subprocess
import paho.mqtt.client as mqtt
import threading
from _thread import *
import hashlib
from config import MQT
'''
# Initialize Logging
logging.basicConfig( filemode='a',
format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
datefmt='%Y-%m-%d, %H:%M:%S',
level=logging.INFO) # Global logging configuration
logger = logging.getLogger("SERVER") # Logger for this module
output_file_handler = logging.FileHandler("server.log")
stdout_handler = logging.StreamHandler(sys.stdout)
logger.addHandler(output_file_handler)
logger.addHandler(stdout_handler)
'''
# --- AWS Logging
logging.basicConfig( format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
datefmt='%Y-%m-%d, %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger()
def lambda_handler(event, context):
logger.info('## ENVIRONMENT VARIABLES')
logger.info(os.environ)
logger.info('## EVENT')
logger.info(event)
def launch_socket():
# device's IP address
SERVER_HOST = SOCK.SERVER_HOST
SERVER_PORT = SOCK.SERVER_PORT
SERVER_CERT = SOCK.SERVER_CERT
SERVER_KEY = SOCK.SERVER_KEY
CLIENT_CERT = SOCK.CLIENT_CERT
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
context.verify_mode = ssl.CERT_REQUIRED
context.load_cert_chain(certfile=SERVER_CERT, keyfile=SERVER_KEY)
context.load_verify_locations(cafile=CLIENT_CERT)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Create a TCP socket Object
s.bind((SERVER_HOST, SERVER_PORT)) # Bind The Socket To Our Server Port
s.listen(5) # Listen, allow 5 pending requests
logger.info('SOCKET - '+ f"[*] Listening as {SERVER_HOST}:{SERVER_PORT}")
BUFFER_SIZE = 4096 # Receive 4096 Bytes In Each Transmission
SEPARATOR = "<SEPARATOR>"
def multi_threaded_client(conn):
while True:
received = conn.recv(BUFFER_SIZE).decode()
filename, filesize = received.split(SEPARATOR)
filename = os.path.basename(filename)
filesize = int(filesize)
progress = tqdm.tqdm(range(filesize), f"Receiving {filename}", unit="B", unit_scale=True, unit_divisor=BUFFER_SIZE)
try:
bs = conn.recv(8)
(length,) = unpack('>Q', bs)
buffer = b""
while len(buffer) < length:
to_read = length - len(buffer)
buffer += conn.recv(
4096 if to_read > 4096 else to_read)
progress.update(len(buffer))
progress.close()
assert len(b'\00') == 1
conn.sendall(b'\00')
finally:
conn.shutdown(socket.SHUT_WR)
logger.info("SOCKET - Client Socket Shutdown")
conn.close()
try:
buffer = buffer.decode("utf-8")
except:
buffer = pickle.loads(buffer)
database.writeToDatabase(buffer) # TESTING
logger.info("SOCKET - Submitted To Database")
sys.exit()
def wrappedSocket():
while True:
client_socket, address = s.accept()
logger.info('SOCKET - ' + f"[+] {address} is connected.")
try:
conn = context.wrap_socket(client_socket, server_side=True)
#logger.info("SSL established. Peer: {}".format(conn.getpeercert()))
start_new_thread(multi_threaded_client, (conn,))
except:
logger.error('Unauthorised Access Attempt')
if __name__ == "__main__":
wrappedSocket()
def launch_mqtt():
# Global Variables
BROKER_HOST = MQT.BROKER_HOST
BROKER_PORT = MQT.BROKER_PORT
CLIENT_ID = MQT.CLIENT_ID
TOPIC = MQT.TOPIC
DATA_BLOCK_SIZE = 2000
process = None
client = None # MQTT client instance. See init_mqtt()
logger.info("MQTT - Creating MQTT Instance")
def switch(msg):
msg_dec = msg.payload.decode("utf-8") # Writes the decoded msg to an object
msg_top = msg.topic
if msg_top == 'cycle/init':
database.addDeviceToDB(msg_dec)
# --- MQTT Related Functions and Callbacks --------------------------------------------------------------
def on_connect( client, user_data, flags, connection_result_code):
if connection_result_code == 0:
logger.info("MQTT - Connected to Broker")
else:
logger.error("MQTT - Failed to connect to Broker: " + mqtt.connack_string(connection_result_code))
client.subscribe(TOPIC)
def on_disconnect( client, user_data, disconnection_result_code):
logger.error("MQTT - Disconnected from Broker")
def on_message( client, user_data, msg): # Callback called when a message is received on a subscribed topic.
logger.debug("MQTT - Received message for topic {}: {}".format( msg.topic, msg.payload))
switch(msg)
def on_publish(client, user_data, connection_result_code):
logger.info("MQTT - Message Published")
pass
def main():
global client
# Our MQTT Client. See PAHO documentation for all configurable options.
# "clean_session=True" means we don"t want Broker to retain QoS 1 and 2 messages
# for us when we"re offline. You"ll see the "{"session present": 0}" logged when
# connected.
logger.info("MQTT - Initialising Client")
client = mqtt.Client(
client_id=CLIENT_ID,
clean_session=False)
# Route Paho logging to Python logging.
client.enable_logger()
# Setup callbacks
client.on_connect = on_connect
client.on_disconnect = on_disconnect
client.on_message = on_message
client.on_publish = on_publish
# Connect to Broker.
while True:
try:
client.connect(BROKER_HOST, BROKER_PORT)
break
except:
logger.error("MQTT - Failed to connect to broker. Retrying...")
finally:
time.sleep(5)
client.loop_start()
if __name__ == "__main__":
main()
logger.info("MQTT - Listening for messages on topic '" + str(TOPIC))
if __name__ == "__main__":
t1 = threading.Thread(target=launch_socket)
#t2 = threading.Thread(target=launch_mqtt)
t1.start()
# t2.start()
|
class Account(object):
def __init__(self,init_value):
self.init_value = init_value
self.value = self.init_value
self.credit_list = [self.value]
self.debit_list = [self.value]
self.last_credit = self.value
self.last_debit = self.value
def bind_stock(self,stockobj):
self.stockobj = stockobj
def credit(self,value):
'''add money to account'''
self.value += (value - self.stockobj.commission)
self.credit_list.append(self.value)
self.last_credit = self.value
def debit(self,value):
'''take money from account'''
self.value -= (value + self.stockobj.commission)
self.debit_list.append(self.value)
self.last_debit = self.value
|
'''
imputationflask.model
-------------------
Database model
'''
from datetime import datetime
from flask_sqlalchemy import SQLAlchemy
HASH_STRING_SIZE = 128
COMMENT_STRING_SIZE = 1000
EMAIL_STRING_SIZE = 254
db = SQLAlchemy()
class Comment(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
email = db.Column(db.String(EMAIL_STRING_SIZE), nullable=True)
comment = db.Column(db.String(COMMENT_STRING_SIZE), nullable=False)
timestamp = db.Column(db.TIMESTAMP(timezone=False),
nullable=False, default=datetime.now())
user_hash_id = db.Column(db.String(HASH_STRING_SIZE),
db.ForeignKey('User.hash_id'), nullable=False)
user = db.relationship('User', backref=db.backref('comment', lazy=True))
def __repr__(self):
return f'<Comment {self.id}>'
class CensusImputationRequest(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
timestamp = db.Column(db.TIMESTAMP(timezone=False),
nullable=False, default=datetime.now())
user_hash_id = db.Column(db.String(HASH_STRING_SIZE),
db.ForeignKey('User.hash_id'), nullable=False)
imputation_hash_id = db.Column(db.String(HASH_STRING_SIZE), db.ForeignKey(
'CensusImputation.hash_id'), nullable=False)
user = db.relationship('User', backref=db.backref('comment', lazy=True))
imputation = db.relationship('CensusImputation', backref=db.backref(
'censusimputationrequest', lazy=True))
def __repr__(self):
return f'<Imputation request {self.id}>'
class User(db.Model):
hash_id = db.Column(db.String(HASH_STRING_SIZE), primary_key=True)
fingerprint = db.Column(db.JSON)
def __repr__(self):
return f'<User with hash {self.hash_id}>'
class CensusImputation(db.Model):
hash_id = db.Column(db.String(HASH_STRING_SIZE), primary_key=True)
input_object = db.Column(db.JSON)
output_object = db.Column(db.JSON)
def __repr__(self):
return f'<Census imputation with hash {self.hash_id}>'
|
import time
from tempmail import TempMail
from html.parser import HTMLParser
class MyHTMLParser(HTMLParser):
def __init__(self):
super().__init__()
self.result = []
def handle_starttag(self, tag, attrs):
# Only parse the 'anchor' tag.
if tag == "a":
# Check the list of defined attributes.
for name, value in attrs:
# If href is defined, print it.
if name == "href":
self.result.append(value)
# https://pypi.python.org/pypi/temp-mail
class TempEmail(object):
def __init__(self):
self.tm = TempMail()
self.email = self.tm.get_email_address()
# this method will wait the needed for us email
# if you send number_of_email=3 , this method will
# wait a third email and return link from it
def get_link_from_email_by_number(self, number_of_email, number_of_link=1):
delay = 5
start_time = time.time()
while True:
time.sleep(delay)
mail = self.tm.get_mailbox(self.email)
current_time = time.time()
end_time = current_time - start_time
# if the letter doesnt come in 10 minutes function will return None
if end_time > 600:
print('Message hasn`t came more than 10 minutes')
return None
if isinstance(mail, list) and len(mail) == number_of_email:
mail = self.tm.get_mailbox(self.email)[number_of_email - 1]
html = mail.get('mail_text_only')
parser = MyHTMLParser()
parser.feed(html)
return parser.result[number_of_link]
|
from rest_framework import generics, status
from rest_framework.permissions import IsAuthenticatedOrReadOnly
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework_jwt.authentication import (JSONWebTokenAuthentication,
get_authorization_header)
from rest_framework_jwt.settings import api_settings
from .models import Artist, Song, User
from .serializers import (ArtistDetailSerializer, ArtistSerializer,
SongSerializer, UserCreateSerializer, UserSerializer)
class SongList(generics.ListCreateAPIView):
queryset = Song.objects.all()
serializer_class = SongSerializer
permission_classes = (IsAuthenticatedOrReadOnly, )
authentication_classes = (JSONWebTokenAuthentication, )
class SongDetail(generics.RetrieveAPIView):
queryset = Song.objects.all()
serializer_class = SongSerializer
class ArtistList(generics.ListCreateAPIView):
queryset = Artist.objects.all()
serializer_class = ArtistSerializer
permission_classes = (IsAuthenticatedOrReadOnly, )
authentication_classes = (JSONWebTokenAuthentication, )
class ArtistDetail(generics.RetrieveAPIView):
queryset = Artist.objects.all()
serializer_class = ArtistDetailSerializer
class UserList(generics.ListAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
class UserDetail(generics.RetrieveUpdateAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = (IsAuthenticatedOrReadOnly, )
authentication_classes = (JSONWebTokenAuthentication, )
def update(self, request, *args, **kwargs):
# print(jwt_decode_handler(get_authorization_header(request).split()[1].decode('utf-8')).get('user_id'))
instance = self.get_object()
jwt_decode_handler = api_settings.JWT_DECODE_HANDLER
jwt = get_authorization_header(request).split()[1].decode('utf-8')
user = jwt_decode_handler(jwt)
user_id = user.get('user_id')
if instance.pk != user_id:
return Response({'message': 'Unauthorized'}, status=status.HTTP_401_UNAUTHORIZED)
return super().update(request, *args, **kwargs)
class UserCreate(generics.CreateAPIView):
model = User
serializer_class = UserCreateSerializer
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
self.perform_create(serializer)
headers = self.get_success_headers(serializer.data)
user = User.objects.get(username=request.data['username'])
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
payload = jwt_payload_handler(user)
token = jwt_encode_handler(payload)
temp = {'token': token, 'user': UserSerializer(user).data}
return Response(temp, status=status.HTTP_201_CREATED, headers=headers)
return Response({'message': serializer.errors}, status=status.HTTP_400_BAD_REQUEST)
class ReturnUser(APIView):
def post(self, request):
jwt_decode_handler = api_settings.JWT_DECODE_HANDLER
payload = jwt_decode_handler(request.data['token'])
user = User.objects.get(pk=payload['user_id'])
serializer = UserSerializer(user)
return Response(serializer.data)
class UserSongsList(generics.ListAPIView):
serializer_class = SongSerializer
def get_queryset(self):
pk = self.kwargs['pk']
return Song.objects.filter(user__pk=pk)
class ArtistSongsList(generics.ListAPIView):
serializer_class = SongSerializer
def get_queryset(self):
pk = self.kwargs['pk']
return Song.objects.filter(artist__pk=pk)
class Search(APIView):
def get(self, request, format=None):
q = request.query_params.get('q', None)
artists = Artist.objects.filter(name__icontains=q).all()
songs = Song.objects.filter(title__icontains=q).all()
return Response({'result': {
'artists': ArtistSerializer(artists, many=True).data,
'songs': SongSerializer(songs, many=True).data
}}, status=status.HTTP_200_OK)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
last mod 7/17/19
Determines the 3m x 3m tiles that are most important to apply object detection on.
This can be used to speed up an object detector, at the cost of lowered accuracy
because of missed detections (but that's the reason to carefully choose tiles to miss).
At the moment, this effect is only simulated (detections outside the tiles are
not used by the tracker).
currently not a modular function, a.k.a. based on the specific object parameterization
from singleIntegrator.py
"""
import numpy as np
from config import grnd2checkgrid, grndstart, grndstep, grndlen
from singleIntegrator import soPositionDistribution
from occupancygrid import mapNormal2Subgrid
""" determines whether tile with tracked object will be checked
set so that well-tracked (steady-state) object is as important as unviewable tile
or tile adjacent to border
"""
_steadystatedistentropy = .9
_maxdistentropy = 3.
_existentropymultiplier = .1/.25 * _maxdistentropy/_steadystatedistentropy
def objectEntropy(obj, existprob):
variances = obj[[6,13,20,27,34]]
distentropy = np.clip(np.sum(np.sqrt(variances)), 0, _maxdistentropy)
distentropy *= .05 / _steadystatedistentropy
return existprob*distentropy + (1 - existprob)*existprob*_existentropymultiplier
def subselectDetector(objects, objecthypweights, occupancy, visibility, empty, ntiles):
tilescores = occupancy.copy()
for objidx in range(len(objects)):
obj = objects[objidx]
objectexistprob = obj[42] * objecthypweights[objidx]
if objectexistprob < 1e-3: continue
objuncertainty = objectEntropy(obj, objectexistprob)
positiondist = soPositionDistribution(obj)
subgridloc, occupysubgrid = mapNormal2Subgrid(positiondist,
grndstart, grndstep, grndlen, subsize=2)
subgridend = subgridloc + occupysubgrid.shape
tilescores[subgridloc[0]:subgridend[0],
subgridloc[1]:subgridend[1]] += occupysubgrid * objuncertainty
tilescores *= visibility # no point in checking undetectable tiles
tilescores[empty] = 0 # always "detect" empty tiles
emptytiles = grnd2checkgrid[np.where(empty[grnd2checkgrid[:,0],
grnd2checkgrid[:,1]])[0]]
tiles2detect = np.argsort(tilescores[grnd2checkgrid[:,0], grnd2checkgrid[:,1]])
ntilespossible = sum(tilescores[grnd2checkgrid[:,0],grnd2checkgrid[:,1]]>0)
tiles2detect = grnd2checkgrid[tiles2detect[-min(ntiles, ntilespossible):]]
tiles2detect = np.append(tiles2detect, emptytiles, axis=0)
# scatter detected tiles to binary grid
tiles2detectgrid = np.zeros(grndlen, dtype=bool)
tiles2detectgrid[tiles2detect[:,0], tiles2detect[:,1]] = 1
return tiles2detectgrid
|
class Solution(object):
def minimumTotal(self, triangle):
if triangle == None:
return 0
if len(triangle) == 1:
return triangle[0][0]
for i in range(1, len(triangle)):
k = len(triangle) - i - 1
for j in range(0, len(triangle[k])):
if triangle[k][j] + triangle[k+1][j] > triangle[k][j] + triangle[k+1][j+1]:
triangle[k][j] = triangle[k][j] + triangle[k+1][j+1]
else:
triangle[k][j] = triangle[k][j] + triangle[k+1][j]
return triangle[0][0]
solu = Solution()
print( solu.minimumTotal([[2], [3,4], [6,5,7], [4,1,8,3]]) )
print( solu.minimumTotal([[2], [3,4]]) )
print( solu.minimumTotal([[2]]) )
print( solu.minimumTotal([]) )
|
import sys
import time
import Queue as q
from core import DefaultProcess
class Discoverer(DefaultProcess):
def __init__(self,
name='Discoverer',
queue=None,
shutdown=None,
config=None,
collectors=None):
DefaultProcess.__init__(self,
name=name,
shutdown=shutdown,
config=config)
self.queue = queue
self.collectors = collectors
def shutdown(self):
self.log.info('Stopping')
sys.exit()
def run(self):
self.log.info('Starting')
for collector in self.collectors:
collector.next_discovery = None
while True:
self.check_shutdown()
now = time.time()
items = []
for collector in self.collectors:
if collector.next_discovery < now:
item = collector.do_discovery()
collector.next_discovery = now + collector.discovery_interval
items.append(item)
else:
item = None
for item in items:
try:
self.log.debug('Putting item to queue: %s' % item)
self.queue.put(item)
except q.Full:
self.log.warning('Queue is full')
time.sleep(5)
|
from tkinter import *
from PIL import Image, ImageTk
import numpy as np
import cv2
import os
import shutil
from run_all_models import run_all
from test import ESR_gan
import os.path as osp
import glob
import cv2
import numpy as np
import torch
import RRDBNet_arch as arch
#orig1.jpeg lady with hat
#orig2.jpeg scenery
#orig3.jpg food
#orig4.jpg dog
#orig5.jpg eiffel tower
#orig6.jpg city
sets = "set4"
orig = "orig4.jpg"
class Region_Coords(object):
DEFAULT_COLOR = 'black'
File="images/content-images/"+orig
arr=[]
curr_image=[1]
def __init__(self):
self.root = Tk()
#self.File = "orig.jpeg" #"C:/Users/Priyanka C/AnacondaProjects/CMPT 726 Project/orig.jpeg"
self.img = ImageTk.PhotoImage(Image.open(self.File))
Label(self.root, text = 'Style Brush', font =('Verdana', 15)).grid(column=3, columnspan=2, row=0)
self.photo1 = Image.open("images/output-images/"+sets+"/1-horses.jpg")
self.photo1 = self.photo1.resize((40,40), Image.ANTIALIAS)
self.photo1 = ImageTk.PhotoImage(self.photo1)
self.first_style = Button(self.root, text='horses', image=self.photo1, compound=TOP, command=self.use_style_one)
self.first_style.grid(row=1, column=0, padx=0, pady=0)
self.photo2 = Image.open("images/output-images/"+sets+"/2-trees.jpg")
self.photo2 = self.photo2.resize((40,40), Image.ANTIALIAS)
self.photo2 = ImageTk.PhotoImage(self.photo2)
self.second_style = Button(self.root, text='trees', image=self.photo2, compound=TOP, command=self.use_style_two)
self.second_style.grid(row=1, column=1)
self.photo3 = Image.open("images/output-images/"+sets+"/3-blue_trees.jpg")
self.photo3 = self.photo3.resize((40,40), Image.ANTIALIAS)
self.photo3 = ImageTk.PhotoImage(self.photo3)
self.third_style = Button(self.root, text='blue trees', image=self.photo3, compound=TOP, command=self.use_style_three)
self.third_style.grid(row=1, column=2)
self.photo4 = Image.open("images/output-images/"+sets+"/4-sick_child.jpg")
self.photo4 = self.photo4.resize((40,40), Image.ANTIALIAS)
self.photo4 = ImageTk.PhotoImage(self.photo4)
self.fourth_style = Button(self.root, text='sick child', image=self.photo4, compound=TOP, command=self.use_style_four)
self.fourth_style.grid(row=1, column=3)
self.photo5 = Image.open("images/output-images/"+sets+"/5-candy.jpg")
self.photo5 = self.photo5.resize((40,40), Image.ANTIALIAS)
self.photo5 = ImageTk.PhotoImage(self.photo5)
self.fifth_style = Button(self.root, text='candy', image=self.photo5, compound=TOP, command=self.use_style_five)
self.fifth_style.grid(row=1, column=4)
self.photo6 = Image.open("images/output-images/"+sets+"/6-mosaic.jpg")
self.photo6 = self.photo6.resize((40,40), Image.ANTIALIAS)
self.photo6 = ImageTk.PhotoImage(self.photo6)
self.sixth_style = Button(self.root, text='mosaic', image=self.photo6, compound=TOP, command=self.use_style_six)
self.sixth_style.grid(row=1, column=5)
self.photo7 = Image.open("images/output-images/"+sets+"/7-udnie.jpg")
self.photo7 = self.photo7.resize((40,40), Image.ANTIALIAS)
self.photo7 = ImageTk.PhotoImage(self.photo7)
self.seventh_style = Button(self.root, text='udnie', image=self.photo7, compound=TOP, command=self.use_style_seven)
self.seventh_style.grid(row=1, column=6)
self.photo8 = Image.open("images/output-images/"+sets+"/8-rain_princess.jpg")
self.photo8 = self.photo8.resize((40,40), Image.ANTIALIAS)
self.photo8 = ImageTk.PhotoImage(self.photo8)
self.eighth_style = Button(self.root, text='rain princess', image=self.photo8, compound=TOP, command=self.use_style_eight)
self.eighth_style.grid(row=1, column=7)
self.c = Canvas(self.root, bg='white', width=self.img.width(), height=self.img.height())
self.c.grid(row=2, columnspan=8)
self.c.create_image(0,0,image=self.img,anchor="nw")
self.setup()
self.root.mainloop()
def setup(self):
self.old_x = None
self.old_y = None
self.eraser_on = False
self.active_button = self.first_style
self.activate_button(self.first_style, "images/output-images/"+sets+"/1-horses.jpg")
self.style = "images/output-images/"+sets+"/1-horses.jpg"
self.c.bind('<B1-Motion>', self.brushybrushy)
self.c.bind('<ButtonRelease-1>', self.reset)
def use_style_one(self):
self.activate_button(self.first_style, "images/output-images/"+sets+"/1-horses.jpg")
def use_style_two(self):
self.activate_button(self.second_style, "images/output-images/"+sets+"/2-trees.jpg")
def use_style_three(self):
self.activate_button(self.third_style, "images/output-images/"+sets+"/3-blue_trees.jpg")
def use_style_four(self):
self.activate_button(self.fourth_style, "images/output-images/"+sets+"/4-sick_child.jpg")
def use_style_five(self):
self.activate_button(self.fifth_style, "images/output-images/"+sets+"/5-candy.jpg")
def use_style_six(self):
self.activate_button(self.sixth_style, "images/output-images/"+sets+"/6-mosaic.jpg")
def use_style_seven(self):
self.activate_button(self.seventh_style, "images/output-images/"+sets+"/7-udnie.jpg")
def use_style_eight(self):
self.activate_button(self.eighth_style, "images/output-images/"+sets+"/8-rain_princess.jpg")
def activate_button(self, some_button, curr_style):
self.active_button.config(relief=RAISED)
some_button.config(relief=SUNKEN)
self.active_button = some_button
self.style = curr_style
def brushybrushy(self, event):
#self.c.create_line(0, 0, 30, 30, width=5.0, fill='blue', capstyle=ROUND, smooth=TRUE, splinesteps=36)
if self.old_x and self.old_y:
self.c.create_line(self.old_x, self.old_y, event.x, event.y,
width=3, fill=self.DEFAULT_COLOR,
capstyle=ROUND, smooth=TRUE, splinesteps=36)
self.old_x = event.x
self.old_y = event.y
self.arr.append([event.x, event.y])
self.curr_image.append(self.style)
#print(self.arr)
#print("")
def reset(self, event):
self.old_x, self.old_y = None, None
self.arr=[]
self.curr_image=[]
print("stop")
def free_form_roi(ip, roi_corners):
image = cv2.imread(ip, -1)
mask = np.zeros(image.shape, dtype=np.uint8)
channel_count = image.shape[2]
ignore_mask_color = (255,)*channel_count
cv2.fillPoly(mask, roi_corners, ignore_mask_color)
return image, mask
def save_mask(image, mask, style):
#image_not = cv2.bitwise_not(image) #or image from where we want to get that roi
#cv2.imwrite(style, image_not)
d1 = 25 #int(image.shape[0]/7)
d2 = 25 #int(image.shape[1]/10)
if d1%2==0: d1+=1
if d2%2==0: d2+=1
cv2.GaussianBlur(mask, (d1, d2), 111, dst=mask)
cv2.imwrite("images/output-images/"+sets+"/mask.jpg", mask)
def combine_pil(ip, style):
src1 = np.array(Image.open(ip))
src2 = np.array(Image.open(style).resize(src1.shape[1::-1], Image.BILINEAR))
mask1 = np.array(Image.open("images/output-images/"+sets+"/mask.jpg").resize(src1.shape[1::-1], Image.BILINEAR))
mask1 = mask1 / 255
dst = src2 * mask1 + src1 * (1 - mask1)
Image.fromarray(dst.astype(np.uint8)).save("images/output-images/"+sets+"/final.jpg")
#dst = cv2.imread("images/output-images/"+sets+"/final.jpg")
#cv2.imshow("Final", dst)
#cv2.waitKey()
#cv2.destroyAllWindows()
class Show_Res(object):
File="images/output-images/"+sets+"/final.jpg"
choice=['y']
def __init__(self):
self.root = Tk()
self.img = ImageTk.PhotoImage(Image.open(self.File))
Label(self.root, text = 'Your final result!', font =('Verdana', 15)).grid(column=2, columnspan=4, row=0)
Label(self.root, text = 'Do you want to continue?', font =('Verdana', 10)).grid(column=2, columnspan=4, row=1)
self.yes_button = Button(self.root, text='YES', command=self.ans_yes)
self.yes_button.grid(row=2, column=2, padx=0, pady=0)
self.no_button = Button(self.root, text='NO', command=self.ans_no)
self.no_button.grid(row=2, column=5, padx=0, pady=0)
self.c = Canvas(self.root, bg='white', width=self.img.width(), height=self.img.height())
self.c.grid(row=3, columnspan=8)
self.c.create_image(0,0,image=self.img,anchor="nw")
self.active_button = self.yes_button
self.activate_button(self.yes_button)
self.root.mainloop()
def ans_yes(self):
self.activate_button(self.yes_button)
self.choice.append('y')
#print("they chose yes")
def ans_no(self):
self.activate_button(self.no_button)
self.choice.append('n')
#print("they chose no :(")
def activate_button(self, some_button):
self.active_button.config(relief=RAISED)
some_button.config(relief=SUNKEN)
self.active_button = some_button
if __name__ == '__main__':
#Comment next line out to not run model
run_all(orig, sets)
again = "y"
prog_run = 1
while again=="y" or again=="Y":
Region_Coords()
#print("From main",Region_Coords.arr)
style = Region_Coords.curr_image
#print("From main", style)
style = style[len(style)-1]
coord = Region_Coords.arr
coord=np.array([coord])
coord[:,[0,1]]=coord[:,[1,0]]
if prog_run==1:
ip = "images/content-images/"+orig
else:
ip = "images/output-images/"+sets+"/final.jpg"
image, mask = free_form_roi(ip, coord)
#combine(image, mask)
save_mask(image, mask, style)
combine_pil(ip, style)
Show_Res()
choice = Show_Res.choice
#print("From main", choice)
choice = choice[len(choice)-1]
again = choice #input("Do you want to run it again? y/n ")
Show_Res.choice=['y']
Region_Coords.arr=[]
Region_Coords.style=[]
Region_Coords.File="images/output-images/"+sets+"/final.jpg"
prog_run+=1
if not os.path.exists('images/output-images/'+sets+'/final'):
os.mkdir('images/output-images/'+sets+'/final')
shutil.move("images/output-images/"+sets+"/final.jpg", 'images/output-images/'+sets+'/final/final.jpg')
ESR_gan(sets)
print("THANK YOU")
|
from openpyxl import Workbook
from openpyxl import load_workbook
from zlib import crc32
import sys
import glob
import logging
import xml.etree.ElementTree as ET
def GetCrc32(filename): # calculate crc32
with open(filename, 'rb') as f:
return crc32(f.read())
def strnset(str,ch,n): # string change
str = str[:n] + ch
return str
#log setting
LOG_FORMAT = "%(asctime)s - %(levelname)s - %(message)s"
logging.basicConfig(filename='info.log', level=logging.DEBUG, format=LOG_FORMAT)
# if len(sys.argv) < 2:
# print('You must enter the file')
# exit(1)
# elif len(sys.argv) > 2:
# print('Only one file is permitted')
# exit(1)
#filename = sys.argv[1]
#search all files and store in an array
logging.info("Start search all files in Packages.")
listFile = []
listuxz = glob.glob("./*/*.uxz")
listbin = glob.glob("./*/*.bin")
listtgz = glob.glob("./*/*.tgz")
listexe = glob.glob("./*/*.exe")
listFile.extend(listuxz)
listFile.extend(listbin)
listFile.extend(listtgz)
listFile.extend(listexe)
logging.info("Finish searching.")
#list sort
logging.info("Sorting the list...")
sortFile = sorted(listFile)
#Create sheet for excel
wb = Workbook()
ws = wb.active
ws.title = "platform"
ws1 = wb.create_sheet("win2019")
ws2 = wb.create_sheet("win2016")
ws3 = wb.create_sheet("rhel7")
ws4 = wb.create_sheet("suse12")
#Check all packages by crc32
row = 1
for n in sortFile:
crcpkg = format(GetCrc32(n), 'x')
print('{:s} {:8} {:s}'.format( n,' crc32: ', crcpkg))
logging.info('{:s} {:8} {:s}'.format( n,' crc32: ', crcpkg))
tmpxml = strnset(n,".xml",-4)
tree = ET.parse(tmpxml)
# print(tree.getroot())
root = tree.getroot()
crcxml = root.findall(".//*[@NAME='crc']/VALUE")
for tmp in crcxml:
# print(tmp.text)
ws.cell(column=4, row=row, value=tmp.text)
ws.cell(column=1, row=row, value=n)
ws.cell(column=2, row=row, value=crcpkg)
ws.cell(column=3, row=row, value=tmpxml)
row = row + 1
wb.save("compareCRC.xlsx")
logging.info("Mission Completed!")
# result = 'crc32.txt'
# f = open ('./' + result,'w')
# for n in sortFile:
# str = n;
# crc = format(getCrc32(n), 'x')
# print('{:s} {:8} {:x}'.format( n,' crc32: ', getCrc32(n)))
# f.write(str + ' page_crc32: ' + crc + '\n')
# f.close()
|
import smbus
# Power management registers
power_mgmt_1 = 0x6b
power_mgmt_2 = 0x6c
def read_byte(adr):
return bus.read_byte_data(address, adr)
def read_word(adr):
high = bus.read_byte_data(address, adr)
low = bus.read_byte_data(address, adr+1)
val = (high << 8) + low
return val
def read_word_2c(adr):
val = read_word(adr)
if (val >= 0x8000):
return -((65535 - val) + 1)
else:
return val
def dist(a,b):
return math.sqrt((a*a)+(b*b))
def get_y_rotation(x,y,z):
radians = math.atan2(x, dist(y,z))
return -math.degrees(radians)
def get_x_rotation(x,y,z):
radians = math.atan2(y, dist(x,z))
return math.degrees(radians)
def GetScaledAccelValues():
try:
# Grab Accelerometer Data
accel_xout = read_word_2c(0x3b)
accel_yout = read_word_2c(0x3d)
accel_zout = read_word_2c(0x3f)
except:
print("** Read failed - assume 0 accel")
accel_xout =0
accel_yout =0
accel_zout =0
ScaledAccel = [accel_xout / 16384.0 *8, accel_yout / 16384.0 *8, accel_zout / 16384.0*8]
return ScaledAccel
# Start talking to accelerometer - standard I2C stuff.
bus = smbus.SMBus(1) # or bus = smbus.SMBus(1) for Revision 2 boards
address = 0x68 # This is the address value read via the i2cdetect command
# Now wake the 6050 up as it starts in sleep mode
bus.write_byte_data(address, power_mgmt_1, 0)
# Write the setup to the accelerometer - value of 3 in AFS_SEL gives accel range of 16g. The register to use is 1C (28 decimal)
bus.write_byte_data(address, 0x1C, 0b00011000)
# Adjust sensitivity of accelerometer to maximum of 16g.
''' Not using gyro yet. - Not sure if I need/want it.
print( "gyro data")
print( "---------")
gyro_xout = read_word_2c(0x43)
gyro_yout = read_word_2c(0x45)
gyro_zout = read_word_2c(0x47)
print ("gyro_xout: ", gyro_xout, " scaled: ", (gyro_xout / 131))
print ("gyro_yout: ", gyro_yout, " scaled: ", (gyro_yout / 131))
print ("gyro_zout: ", gyro_zout, " scaled: ", (gyro_zout / 131))
print ("accelerometer data")
print ("------------------")
'''
|
#!/usr/bin/env python
from main.page.desktop_v3.purchase.pe_tx_payment_base import *
from selenium.webdriver.common.by import By
class TransactionListPage(TxPaymentBasePage):
_page = "tx_order_list.pl"
#LOCATORS
#Search Invoice
_search_invoice_bar_loc = (By.CSS_SELECTOR, 'div.row-fluid form#form-filter input.input-medium')
_search_invoice_button_loc = (By.CSS_SELECTOR, 'div.row-fluid form#form-filter button.pull-left')
_search_invoice_status_loc = (By.CSS_SELECTOR, 'div.row-fluid form#form-filter a.selectBox-dropdown')
#Button Sembunyikan/Tampilkan Semua
_collapse_show_all_loc = (By.CSS_SELECTOR, 'a#collapse_show_all span#colapse_show_open')
#Invoice Link
_order_invoice_loc = (By.XPATH, '/html/body/div[1]/div[5]/div/div[2]/div[3]/div[1]/div/div/table/tbody/tr[1]/td[2]/a/b')
#Snapshot product
_snapshot_product_loc = (By.XPATH, '/html/body/div[1]/div[5]/div/div[2]/div[3]/div[1]/div/div/table/tbody/tr[2]/td/table/tbody/tr[3]/td[1]/span[2]/a')
_last_order_loc = (By.XPATH, "//*[@class='list-box-content']/table")
#Action
def open(self, site=""):
self._open(site, self._page)
def get_last_inv(self):
last_order = self.driver.find_element(*self._last_order_loc)
id_order = last_order.find_element(By.TAG_NAME, "tr").get_attribute("id")
self.inv = self.driver.find_element(By.XPATH, "//*[@id='"+ id_order +"']/td[2]/a/b")
return self.inv.text
|
"""
=================
Typenames
=================
[User]
* GraphUser
[Hashtag]
* GraphHashtag
[Post]
* GraphSidecar -> combination of videos or/and images
- GraphImage
- GraphVideo
[Story]
* GraphReel -> user story
* GraphHighlightReel -> user's story highlights
* GraphMASReel -> hashtag story
- GraphStoryImage
- GraphStoryVideo
"""
from instascrape.utils import get_biggest_media
class Container:
"""'Adapter' to hold properties of Image / Video (of Stories and Posts)
Arguments:
data: dictionary containing information of the object returned by Instagram
Fields:
typename: (see docstring)
thumbnail: url to the thumbnail
size: x y dimensions of the media
video_duration: only for 'GraphStoryVideo' and 'GraphVideo', returns None otherwise
src: biggest in size source url
"""
def __init__(self, data: dict):
self.data = data
def __repr__(self):
return "<Container({0})>".format(self.typename)
@property
def typename(self) -> str:
"""One of [GraphImage, GraphVideo, GraphStoryImage, GraphStoryVideo]."""
return self.data["__typename"]
@property
def thumbnail(self) -> str:
return self.data.get("thumbnail_src") or self.data.get("display_url", "")
@property
def size(self) -> dict:
"""Width and height of Instagram displayed media."""
return self.data["dimensions"]
@property
def video_duration(self) -> float or None:
"""Only for GraphStoryVideo or GraphVideo"""
return self.data.get("video_duration")
@property
def src(self) -> str:
"""List of dictionaries to the source and dimensions info of media.
Returns:
src: source url of the media
"""
assert self.typename in ("GraphImage", "GraphStoryImage", "GraphVideo", "GraphStoryVideo"), "Invalid typename {0}".format(self.typename)
if self.typename in ("GraphImage", "GraphStoryImage"):
return get_biggest_media(self.data["display_resources"])["src"]
elif self.typename in ("GraphVideo", "GraphStoryVideo"):
media = self.data.get("video_resources", [])
if not media:
# just url
return self.data["video_url"] # undefined config width & height
return get_biggest_media(media)["src"]
def container(typename: str, data: dict) -> list:
""""Factory function for generating Container objects according to the typename."""
if typename in ("GraphImage", "GraphStoryImage", "GraphVideo", "GraphStoryVideo"):
return [Container(data)]
results = []
if typename in ("GraphReel", "GraphMASReel", "GraphHighlightReel"):
data = data["items"]
if typename == "GraphSidecar":
data = data["edge_sidecar_to_children"]["edges"]
# convert mutliple items into their types respectively
for node in data:
if typename == "GraphSidecar":
node = node["node"]
results.append(Container(node))
return results
|
#!/usr/bin/env python
# encoding: utf-8
# @author: Zhipeng Ye
# @contact: Zhipeng.ye19@xjtlu.edu.cn
# @file: filter_gram2.py
# @time: 2020-01-16 18:43
# @desc:
import os
import codecs
import sys
sys.stdout = codecs.getwriter('utf-8')(sys.stdout.detach())
def verifyContent(words, word_set):
for word in words:
if word not in word_set:
return False
return True
if __name__ == "__main__":
word_set =set()
with open('/Data_SSD/zhipengye/zhipengye/data/filter_rule/WordSegDict.small',encoding='utf-8') as file:
for line in file:
word_set.add(line.strip())
content_list = []
with open('/Data_SSD/zhipengye/zhipengye/data/gram2/gram2_count',encoding='utf-8') as file:
for line in file:
segments = line.split('\t')
words = segments[0].split(' ')
flag = verifyContent(words, word_set)
if flag:
content_list.append(line.strip())
if len(content_list) >= 1000000:
with open('/Data_SSD/zhipengye/zhipengye/data/gram2/filtered_gram2_count1', 'a', encoding='utf-8') as file:
file.write('\n'.join(content_list))
content_list = []
print('1000000 rows have been processed!')
with open('/Data_SSD/zhipengye/zhipengye/data/gram2/filtered_gram2_count1', 'a',encoding='utf-8') as file:
file.write('\n'.join(content_list))
|
import random
from tkinter import Tk, Canvas
def create_board(width, height):
columns = []
for i in range(height):
row = []
columns.append(row)
for i in columns:
for n in range(width):
i.append(None)
gameboard = columns
return gameboard
def bury_mines(gameboard, n):
mine_counter = 0
height = len(gameboard)
width = len(gameboard[0])
while mine_counter < n:
rows = random.randint(0, height-1)
columns = random.randint(0, width-1)
if gameboard[rows][columns] != '-1':
gameboard[rows][columns] = '-1'
mine_counter += 1
else:
continue
def get_minecount(gameboard,y,x):
adj_mines = 0
height = len(gameboard)
width = len(gameboard[0])
for i in range(y-1, y+2):
if i >= 0 and i <= height-1:
for n in range(x-1, x+2):
if n >= 0 and n <= width-1:
checked_position = gameboard[i][n]
if checked_position == '-1':
adj_mines += 1
return (adj_mines)
def uncover_board(gameboard,y,x):
height = len(gameboard)
width = len(gameboard[0])
cell = gameboard[y][x]
if cell != None:
return
elif cell == None:
gameboard[y][x] = get_minecount(gameboard,y,x)
if gameboard[y][x] > 0:
return
else:
for i in range(y-1, y+2):
if i >= 0 and i < height:
for n in range(x-1, x+2):
if n >= 0 and n < width:
uncover_board(gameboard,i, n)
def check_won(gameboard):
for i in gameboard:
if None in i:
return False
return True
def run():
width = 8
height = 8
board = create_board(height, width)
bury_mines(board, 7)
root = Tk()
root.wm_title ("Minesweeper")
heightpxls = 50 * height
widthpxls = 50 * width
canvas = Canvas(master=root, height=heightpxls,width = widthpxls)
canvas.pack()
def handle_click(event):
x = event.x // 50
y = event.y // 50
uncover_board(board, y, x)
print(y,x)
if board[y][x] == '-1':
canvas.unbind("<Button-1>")
canvas.create_rectangle ((x*50), (y*50), ((x*50)+50), ((y*50)+50), fill="red", outline="white")
canvas.create_text ((heightpxls//2),(widthpxls//2),font="arial 36", text = "YOU LOSE")
elif check_won(board) == True:
display_board(board, canvas)
canvas.unbind("<Button-1>")
canvas.create_text ((heightpxls//2),(widthpxls//2),font="arial 36", text = "YOU WIN")
else:
display_board(board, canvas)
canvas.bind("<Button-1>", handle_click)
display_board(board, canvas)
root.mainloop()
#widthpxls = canvas.winfo_width()
#heightpxls = canvas.winfo_height()
def display_board(board, canvas):
row_length = len(board[0])
for i in range(0,len(board)):
for n in range (0, row_length):
x1 = (n * 50 ) #+ 10
y1 = (i * 50) #+ 10
if board[i][n] in range (0, 9):
canvas.create_rectangle(x1, y1, (x1 +50), (y1 + 50), fill= "light grey", outline = "white")
canvas.create_text((x1 + 25),(y1 + 25),font="arial 20", text=str(get_minecount (board, i, n)))
elif board[i][n] == None:
canvas.create_rectangle(x1, y1, (x1 +50), (y1 + 50), fill= "grey", outline = "white")
else:
canvas.create_rectangle(x1, y1, (x1 +50), (y1 + 50), fill= "grey", outline = "white")
run()
|
X, Y = input().split()
X = int(X)
Y = int(Y)
V = [4.00, 4.50, 5.00, 2.00, 1.50]
print('Total: R$ {:.2f}'.format(V[X-1]*Y))
|
#
# Copyright 2008-2009, Blue Dynamics Alliance, Austria - http://bluedynamics.com
#
# GNU General Public Licence Version 2 or later
__author__ = """Robert Niederreiter <rnix@squarewave.at>"""
__docformat__ = 'plaintext'
import logging
logger = logging.getLogger('IntelliDateTime')
logger.info('Installing Product')
from widget import IntelliDateTimeWidget
from field import IntelliDateTimeField
from Products.CMFCore.DirectoryView import registerDirectory
from config import GLOBALS
registerDirectory('skins', GLOBALS)
|
#
# This file is part of LUNA.
#
# Copyright (c) 2020 Great Scott Gadgets <info@greatscottgadgets.com>
# SPDX-License-Identifier: BSD-3-Clause
""" Endpoint interfaces for working with streams.
The endpoint interfaces in this module provide endpoint interfaces suitable for
connecting streams to USB endpoints.
"""
from amaranth import Elaboratable, Module, Signal
from ..endpoint import EndpointInterface
from ...stream import StreamInterface, USBOutStreamBoundaryDetector
from ..transfer import USBInTransferManager
from ....memory import TransactionalizedFIFO
class USBStreamInEndpoint(Elaboratable):
""" Endpoint interface that transmits a simple data stream to a host.
This interface is suitable for a single bulk or interrupt endpoint.
This endpoint interface will automatically generate ZLPs when a stream packet would end without
a short data packet. If the stream's ``last`` signal is tied to zero, then a continuous stream of
maximum-length-packets will be sent with no inserted ZLPs.
The ``flush`` input may be asserted to to cause all pending data to be transmitted as soon as
possible. When ``flush`` is asserted, packets of varying length will be sent as needed, according
to the data available.
This implementation is double buffered; and can store a single packets worth of data while transmitting
a second packet.
Attributes
----------
stream: StreamInterface, input stream
Full-featured stream interface that carries the data we'll transmit to the host.
flush: Signal(), input
Assert to cause all pending data to be transmitted as soon as possible.
discard: Signal(), input
Assert to cause all pending data to be discarded.
interface: EndpointInterface
Communications link to our USB device.
Parameters
----------
endpoint_number: int
The endpoint number (not address) this endpoint should respond to.
max_packet_size: int
The maximum packet size for this endpoint. Should match the wMaxPacketSize provided in the
USB endpoint descriptor.
"""
def __init__(self, *, endpoint_number, max_packet_size):
self._endpoint_number = endpoint_number
self._max_packet_size = max_packet_size
#
# I/O port
#
self.stream = StreamInterface()
self.interface = EndpointInterface()
self.flush = Signal()
self.discard = Signal()
def elaborate(self, platform):
m = Module()
interface = self.interface
# Create our transfer manager, which will be used to sequence packet transfers for our stream.
m.submodules.tx_manager = tx_manager = USBInTransferManager(self._max_packet_size)
m.d.comb += [
# Always generate ZLPs; in order to pass along when stream packets terminate.
tx_manager.generate_zlps .eq(1),
# We want to handle packets only that target our endpoint number.
tx_manager.active .eq(interface.tokenizer.endpoint == self._endpoint_number),
# Connect up our transfer manager to our input stream, flush and discard control...
tx_manager.transfer_stream .stream_eq(self.stream),
tx_manager.flush .eq(self.flush),
tx_manager.discard .eq(self.discard),
# ... and our output stream...
interface.tx .stream_eq(tx_manager.packet_stream),
interface.tx_pid_toggle .eq(tx_manager.data_pid),
# ... and connect through our token/handshake signals.
interface.tokenizer .connect(tx_manager.tokenizer),
tx_manager.handshakes_out .connect(interface.handshakes_out),
interface.handshakes_in .connect(tx_manager.handshakes_in)
]
return m
class USBMultibyteStreamInEndpoint(Elaboratable):
""" Endpoint interface that transmits a simple data stream to a host.
This interface is suitable for a single bulk or interrupt endpoint.
This variant accepts streams with payload sizes that are a multiple of one byte; data is always
transmitted to the host in little-endian byte order.
This endpoint interface will automatically generate ZLPs when a stream packet would end without
a short data packet. If the stream's ``last`` signal is tied to zero, then a continuous stream of
maximum-length-packets will be sent with no inserted ZLPs.
This implementation is double buffered; and can store a single packets worth of data while transmitting
a second packet.
Attributes
----------
stream: StreamInterface, input stream
Full-featured stream interface that carries the data we'll transmit to the host.
interface: EndpointInterface
Communications link to our USB device.
Parameters
----------
byte_width: int
The number of bytes to be accepted at once.
endpoint_number: int
The endpoint number (not address) this endpoint should respond to.
max_packet_size: int
The maximum packet size for this endpoint. Should match the wMaxPacketSize provided in the
USB endpoint descriptor.
"""
def __init__(self, *, byte_width, endpoint_number, max_packet_size):
self._byte_width = byte_width
self._endpoint_number = endpoint_number
self._max_packet_size = max_packet_size
#
# I/O port
#
self.stream = StreamInterface(payload_width=byte_width * 8)
self.interface = EndpointInterface()
def elaborate(self, platform):
m = Module()
# Create our core, single-byte-wide endpoint, and attach it directly to our interface.
m.submodules.stream_ep = stream_ep = USBStreamInEndpoint(
endpoint_number=self._endpoint_number,
max_packet_size=self._max_packet_size
)
stream_ep.interface = self.interface
# Create semantic aliases for byte-wise and word-wise streams;
# so the code below reads more clearly.
byte_stream = stream_ep.stream
word_stream = self.stream
# We'll put each word to be sent through an shift register
# that shifts out words a byte at a time.
data_shift = Signal.like(word_stream.payload)
# Latched versions of our first and last signals.
first_latched = Signal()
last_latched = Signal()
# Count how many bytes we have left to send.
bytes_to_send = Signal(range(0, self._byte_width + 1))
# Always provide our inner transmitter with the least byte of our shift register.
m.d.comb += byte_stream.payload.eq(data_shift[0:8])
with m.FSM(domain="usb"):
# IDLE: transmitter is waiting for input
with m.State("IDLE"):
m.d.comb += word_stream.ready.eq(1)
# Once we get a send request, fill in our shift register, and start shifting.
with m.If(word_stream.valid):
m.d.usb += [
data_shift .eq(word_stream.payload),
first_latched .eq(word_stream.first),
last_latched .eq(word_stream.last),
bytes_to_send .eq(self._byte_width - 1),
]
m.next = "TRANSMIT"
# TRANSMIT: actively send each of the bytes of our word
with m.State("TRANSMIT"):
m.d.comb += byte_stream.valid.eq(1)
# Once the byte-stream is accepting our input...
with m.If(byte_stream.ready):
is_first_byte = (bytes_to_send == self._byte_width - 1)
is_last_byte = (bytes_to_send == 0)
# Pass through our First and Last signals, but only on the first and
# last bytes of our word, respectively.
m.d.comb += [
byte_stream.first .eq(first_latched & is_first_byte),
byte_stream.last .eq(last_latched & is_last_byte)
]
# ... if we have bytes left to send, move to the next one.
with m.If(bytes_to_send > 0):
m.d.usb += [
bytes_to_send .eq(bytes_to_send - 1),
data_shift .eq(data_shift[8:]),
]
# Otherwise, complete the frame.
with m.Else():
m.d.comb += word_stream.ready.eq(1)
# If we still have data to send, move to the next byte...
with m.If(self.stream.valid):
m.d.usb += [
data_shift .eq(word_stream.payload),
first_latched .eq(word_stream.first),
last_latched .eq(word_stream.last),
bytes_to_send .eq(self._byte_width - 1),
]
# ... otherwise, move to our idle state.
with m.Else():
m.next = "IDLE"
return m
class USBStreamOutEndpoint(Elaboratable):
""" Endpoint interface that receives data from the host, and produces a simple data stream.
This interface is suitable for a single bulk or interrupt endpoint.
Attributes
----------
stream: StreamInterface, output stream
Full-featured stream interface that carries the data we've received from the host.
interface: EndpointInterface
Communications link to our USB device.
Parameters
----------
endpoint_number: int
The endpoint number (not address) this endpoint should respond to.
max_packet_size: int
The maximum packet size for this endpoint. If this there isn't `max_packet_size` space in
the endpoint buffer, this endpoint will NAK (or participate in the PING protocol.)
buffer_size: int, optional
The total amount of data we'll keep in the buffer; typically two max-packet-sizes or more.
Defaults to twice the maximum packet size.
"""
def __init__(self, *, endpoint_number, max_packet_size, buffer_size=None):
self._endpoint_number = endpoint_number
self._max_packet_size = max_packet_size
self._buffer_size = buffer_size if (buffer_size is not None) else (self._max_packet_size * 2 - 1)
#
# I/O port
#
self.stream = StreamInterface()
self.interface = EndpointInterface()
def elaborate(self, platform):
m = Module()
stream = self.stream
interface = self.interface
tokenizer = interface.tokenizer
#
# Internal state.
#
# Stores the data toggle value we expect.
expected_data_toggle = Signal()
# Stores whether we've had a receive overflow.
overflow = Signal()
# Stores a count of received bytes in the current packet.
rx_cnt = Signal(range(self._max_packet_size))
# Stores whether we're in the middle of a transfer.
transfer_active = Signal()
#
# Receiver logic.
#
# Create a version of our receive stream that has added `first` and `last` signals, which we'll use
# internally as our main stream.
m.submodules.boundary_detector = boundary_detector = USBOutStreamBoundaryDetector()
m.d.comb += [
interface.rx .stream_eq(boundary_detector.unprocessed_stream),
boundary_detector.complete_in .eq(interface.rx_complete),
boundary_detector.invalid_in .eq(interface.rx_invalid),
]
rx = boundary_detector.processed_stream
rx_first = boundary_detector.first
rx_last = boundary_detector.last
# Create a Rx FIFO.
m.submodules.fifo = fifo = TransactionalizedFIFO(width=10, depth=self._buffer_size, name="rx_fifo", domain="usb")
#
# Create some basic conditionals that will help us make decisions.
#
endpoint_number_matches = (tokenizer.endpoint == self._endpoint_number)
targeting_endpoint = endpoint_number_matches & tokenizer.is_out
expected_pid_match = (interface.rx_pid_toggle == expected_data_toggle)
sufficient_space = (fifo.space_available >= self._max_packet_size)
ping_response_requested = endpoint_number_matches & tokenizer.is_ping & tokenizer.ready_for_response
data_response_requested = targeting_endpoint & tokenizer.is_out & interface.rx_ready_for_response
okay_to_receive = targeting_endpoint & expected_pid_match
data_is_lost = okay_to_receive & rx.next & rx.valid & fifo.full
data_accepted = okay_to_receive & ~data_is_lost & ~overflow
should_skip = targeting_endpoint & ~expected_pid_match
full_packet = rx_cnt == self._max_packet_size - 1
m.d.comb += [
# We'll always populate our FIFO directly from the receive stream; but we'll also include our
# "short packet detected" signal, as this indicates that we're detecting the last byte of a transfer.
fifo.write_data[0:8] .eq(rx.payload),
fifo.write_data[8] .eq(rx_last & ~full_packet),
fifo.write_data[9] .eq(rx_first & ~transfer_active),
fifo.write_en .eq(okay_to_receive & rx.next & rx.valid & ~fifo.full),
# We'll keep data if our packet finishes with a valid CRC and no overflow; and discard it otherwise.
fifo.write_commit .eq(targeting_endpoint & boundary_detector.complete_out & ~overflow),
fifo.write_discard .eq(targeting_endpoint & (boundary_detector.invalid_out | (boundary_detector.complete_out & overflow))),
# We'll ACK each packet if it's received correctly; _or_ if we skipped the packet
# due to a PID sequence mismatch. If we get a PID sequence mismatch, we assume that
# we missed a previous ACK from the host; and ACK without accepting data [USB 2.0: 8.6.3].
interface.handshakes_out.ack .eq(
(data_response_requested & data_accepted) |
(ping_response_requested & sufficient_space) |
(data_response_requested & should_skip)
),
# We'll NAK any time we want to accept a packet, but we don't have enough room.
interface.handshakes_out.nak .eq(
(data_response_requested & ~data_accepted & ~should_skip) |
(ping_response_requested & ~sufficient_space)
),
# Our stream data always comes directly out of the FIFO; and is valid
# henever our FIFO actually has data for us to read.
stream.valid .eq(~fifo.empty),
stream.payload .eq(fifo.read_data[0:8]),
# Our `last` bit comes directly from the FIFO; and we know a `first` bit immediately
# follows a `last` one.
stream.last .eq(fifo.read_data[8]),
stream.first .eq(fifo.read_data[9]),
# Move to the next byte in the FIFO whenever our stream is advaced.
fifo.read_en .eq(stream.ready),
fifo.read_commit .eq(1)
]
# Count bytes in packet.
with m.If(fifo.write_en):
m.d.usb += rx_cnt.eq(rx_cnt + 1)
# Set the transfer active flag depending on whether this is a full packet.
with m.If(rx_last):
m.d.usb += transfer_active.eq(full_packet)
# We'll set the overflow flag if we're receiving data we don't have room for.
with m.If(data_is_lost):
m.d.usb += overflow.eq(1)
# We'll clear the overflow flag and byte counter when the packet is done.
with m.Elif(fifo.write_commit | fifo.write_discard):
m.d.usb += overflow.eq(0)
m.d.usb += rx_cnt.eq(0)
# We'll toggle our DATA PID each time we issue an ACK to the host [USB 2.0: 8.6.2].
with m.If(data_response_requested & data_accepted):
m.d.usb += expected_data_toggle.eq(~expected_data_toggle)
return m
|
import admincommands, usercommands, supercommands, re, actions, spelling
# DEVELOPER: https://github.com/undefinedvalue0103/nullcore-1.0/
vk = None
config = None
logging = None
utils = None
def init():
usercommands.vk = vk
admincommands.vk = vk
supercommands.vk = vk
actions.vk = vk
spelling.vk = vk
usercommands.config = config
admincommands.config = config
supercommands.config = config
actions.config = config
spelling.config = config
usercommands.utils = utils
admincommands.utils = utils
supercommands.utils = utils
actions.utils = utils
spelling.utils = utils
usercommands.logging = logging
admincommands.logging = logging
supercommands.logging = logging
actions.logging = logging
spelling.logging = logging
usercommands.root = root
admincommands.root = root
supercommands.root = root
actions.root = root
spelling.root = root
logging.log('$FY[$FRCommands$FY] $FGInitialized')
comm_types = {
'!': 'admin',
'/': 'user',
'$': 'super'
}
localization = {
'indev': 'Команда в разработке',
'invalid_args': 'Неверные аргументы',
'denied': 'Доступ запрещен',
'not_permitted': 'Недостаточно прав',
'format_time': '%H:%M:%S',
'': 'NULL'
}
def parse_args(string):
if string.count('"') % 2:
return []
d = re.split('"*"', string)
args = []
i = 0
for a in d:
if a == ' ' or a == '': continue
if i % 2:
args.append(a)
else:
a = a.split(' ')
for _a in a:
if len(_a) == 0: continue
args.append(_a)
i += 1
return args
def check_alias(commandname):
aliases_db = config.get('command_aliases')
for origin_cmd, aliases in aliases_db.items():
if commandname.lower() in aliases:
return origin_cmd
return commandname.lower()
def parse_arguments(text):
if len(text) == 0: return (False, 'Too short', None, None, None)
if text[0] not in '!/$': return (False, 'Invalid prefix', None, None, None)
comm_type = comm_types[text[0]]
comm_name = check_alias(text.split(' ')[0][1:])
comm_argl = ' '.join(text.split(' ')[1:]) if len(text.split(' ')) >= 2 else ''
comm_args = parse_args(comm_argl)
return True, comm_type, comm_name, comm_argl, comm_args
def check_permission(user_id, permission):
try:
if user_id == config.get('developer'):
return True
return permission.lower() in config.get('permissions.%s'%user_id).split(',') or 'full_access' in config.get('permissions.%s'%(user_id)).split(',')
except:
return False
def handle(message):
if 'action' in message:
logging.log('$FY[$FRCommands$FY][$FRACTION$FY][$FR%7s$FY][$FB%9s$FY]%s $FG$BB%s'%(
message['id'],
message['user_id'],
'[$FC%3s$FY]'%message['chat_id'] if 'chat_id' in message else '',
message['action']))
response = actions.handle(message)
logging.log('$FY[$FRCommands$FY][$FR%7s$FY]$FBResponse: $FM%s'%(message['id'], repr(response)))
return response
response = spelling.handle(message['body'], message)
if response not in ['', None]:
logging.log('$FY[$FRCommands$FY][$FMEXEC$FY][$FR%7s$FY][$FMSPELLING$FY][$FB%9s$FY]%s $B_$FC%s '%(
message['id'],
message['user_id'],
'[$FC%3s$FY]'%message['chat_id'] if 'chat_id' in message else '',
message['body']))
logging.log('$FY[$FRCommands$FY][$FMEXEC$FY][$FR%7s$FY]$FBResponse: $FM%s'%(message['id'], repr(response)))
return response
response = None
(succ, typ, nam, argl, arg) = parse_arguments(message['body'])
if typ in comm_types.values():
root.handle_cmd(typ)
if typ == 'admin':
if not check_permission(message['user_id'], nam):
logging.log('$FY[$FRCommands$FY][$FMEXEC$FY][$FR%7s$FY][$FRADMIN$FY][$FB%9s$FY]%s $FG$BB%s $B_$FC%s $BY$FRDENIED$B_'%(
message['id'],
message['user_id'],
'[$FC%3s$FY]'%message['chat_id'] if 'chat_id' in message else '',
nam,
argl))
return localization['not_permitted']
logging.log('$FY[$FRCommands$FY][$FMEXEC$FY][$FR%7s$FY][$FRADMIN$FY][$FB%9s$FY]%s $FG$BB%s $B_$FC%s '%(
message['id'],
message['user_id'],
'[$FC%3s$FY]'%message['chat_id'] if 'chat_id' in message else '',
nam,
argl))
response = admincommands.handle(nam, arg, message, argl)
logging.log('$FY[$FRCommands$FY][$FMEXEC$FY][$FR%7s$FY]$FBResponse: $FM%s'%(message['id'], repr(response)))
return response
elif typ == 'user':
logging.log('$FY[$FRCommands$FY][$FMEXEC$FY][$FR%7s$FY][$FGUSER $FY][$FB%9s$FY]%s $FG$BB%s $B_$FC%s'%(
message['id'],
message['user_id'],
'[$FC%3s$FY]'%message['chat_id'] if 'chat_id' in message else '',
nam,
argl))
response = usercommands.handle(nam, arg, message, argl)
logging.log('$FY[$FRCommands$FY][$FMEXEC$FY][$FR%7s$FY]$FBResponse: $FM%s'%(message['id'], repr(response)))
return response
elif typ == 'super':
response = localization['indev']
if not check_permission(message['user_id'], 'full_access'):
logging.log('$FY[$FRCommands$FY][$FMEXEC$FY][$FR%7s$FY][$FRADMIN$FY][$FB%9s$FY]%s $FG$BB%s $B_$FC%s $BY$FRDENIED$B_'%(
message['id'],
message['user_id'],
'[$FC%3s$FY]'%message['chat_id'] if 'chat_id' in message else '',
nam,
argl))
return localization['not_permitted']
logging.log('$FY[$FRCommands$FY][$FMEXEC$FY][$FR%7s$FY][$FRSUPER$FY][$FB%9s$FY]%s $FG$BB%s $B_$FC%s '%(
message['id'],
message['user_id'],
'[$FC%3s$FY]'%message['chat_id'] if 'chat_id' in message else '',
nam,
argl))
response = supercommands.handle(nam, arg, message, argl)
logging.log('$FY[$FRCommands$FY][$FMEXEC$FY][$FR%7s$FY]$FBResponse: $FM%s'%(message['id'], repr(response)))
return response
|
# Arrays: Left Rotation
# Cracking the Coding Interview Challenge
# https://www.hackerrank.com/challenges/ctci-array-left-rotation
def array_left_rotation(a, n, k):
# Initialize
answer, rotations, count = [], k, 0
# Begin new array from point in old array
while rotations < len(a):
answer.append(a[rotations])
rotations += 1
# Circle back to front of old array
while count < k:
answer.append(a[count])
count += 1
return answer
# -------------- Provided --------------
n, k = map(int, input().strip().split(' '))
a = list(map(int, input().strip().split(' ')))
answer = array_left_rotation(a, n, k);
print(*answer, sep=' ')
|
def SumSquares(lst):
""" sum_squares == PEP8 (forced PascalCase by Codewars) """
try:
return sum(SumSquares(a) for a in lst)
except TypeError:
return lst ** 2
|
# coding our association rules algorithm into python notes
'''
our algorithm essentially manipulates our tables T and C with counts
how do we store these tables?
we do not want to store all of the pairs if we don't have to (keeps Table T lean)
we want our table to be sparse (many entries will be 0 and not need to be stored)
dictionary is a great way to store this data:
- the key will be a pair of letters
- the value will be the count of this pair
- we will only create an entry if there is at least one item pair
- otherwise we can assume that a missing entry is implicitly 0
- this is a clever way to expliot sparsity!
'''
'''
defaultDictionary: updating values for an existing key
if key does not already exist we will throw an error trying to update dictionaries
this means we must always write a code that checks for the available key first
'''
D = {'existing_key': 5} # dictionary with one key-value pair
D['existing_key'] += 1 # will increment D to existing_key == 6
D['key_key'] += 1 # error: key does not exist
'''
The second attempt causes an error because 'new-key' is not yet a member of the
dictionary. So, a more correct approach would be to do the following:
'''
D = {'existing-key': 5} # Dictionary with one key-value pair
if 'existing-key' not in D:
D['existing-key'] = 0
D['existing-key'] += 1
if 'new-key' not in D:
D['new-key'] = 0
D['new-key'] += 1
'''
defaultDictionary hides the check!
we need to supply a 'factory function' to build the base dictionary
'''
from collections import defaultdict
D2 = defaultdict (int) # Empty dictionary
D2['existing-key'] = 5 # Create one key-value pair
D2['existing-key'] += 1 # Update
D2['new-key'] += 1
print (D2)
|
import math
from torch.optim.lr_scheduler import _LRScheduler
from torch.optim.optimizer import Optimizer
class CyclicLR(_LRScheduler):
"""Sets the learning rate of each parameter group according to
cyclical learning rate policy (CLR). The policy cycles the learning
rate between two boundaries with a constant frequency, as detailed in
the paper `Cyclical Learning Rates for Training Neural Networks`_.
The distance between the two boundaries can be scaled on a per-iteration
or per-cycle basis.
Cyclical learning rate policy changes the learning rate after every batch.
`step` should be called after a batch has been used for training.
This class has three built-in policies, as put forth in the paper:
"triangular":
A basic triangular cycle w/ no amplitude scaling.
"triangular2":
A basic triangular cycle that scales initial amplitude by half each cycle.
"exp_range":
A cycle that scales initial amplitude by gamma**(cycle iterations) at each
cycle iteration.
This implementation was adapted from the github repo: `bckenstler/CLR`_
Args:
optimizer (Optimizer): Wrapped optimizer.
base_lr (float or list): Initial learning rate which is the
lower boundary in the cycle for each parameter group.
max_lr (float or list): Upper learning rate boundaries in the cycle
for each parameter group. Functionally,
it defines the cycle amplitude (max_lr - base_lr).
The lr at any cycle is the sum of base_lr
and some scaling of the amplitude; therefore
max_lr may not actually be reached depending on
scaling function.
step_size_up (int): Number of training iterations in the
increasing half of a cycle. Default: 2000
step_size_down (int): Number of training iterations in the
decreasing half of a cycle. If step_size_down is None,
it is set to step_size_up. Default: None
mode (str): One of {triangular, triangular2, exp_range}.
Values correspond to policies detailed above.
If scale_fn is not None, this argument is ignored.
Default: 'triangular'
gamma (float): Constant in 'exp_range' scaling function:
gamma**(cycle iterations)
Default: 1.0
scale_fn (function): Custom scaling policy defined by a single
argument lambda function, where
0 <= scale_fn(x) <= 1 for all x >= 0.
If specified, then 'mode' is ignored.
Default: None
scale_mode (str): {'cycle', 'iterations'}.
Defines whether scale_fn is evaluated on
cycle number or cycle iterations (training
iterations since start of cycle).
Default: 'cycle'
cycle_momentum (bool): If ``True``, momentum is cycled inversely
to learning rate between 'base_momentum' and 'max_momentum'.
Default: True
base_momentum (float or list): Initial momentum which is the
lower boundary in the cycle for each parameter group.
Default: 0.8
max_momentum (float or list): Upper momentum boundaries in the cycle
for each parameter group. Functionally,
it defines the cycle amplitude (max_momentum - base_momentum).
The momentum at any cycle is the difference of max_momentum
and some scaling of the amplitude; therefore
base_momentum may not actually be reached depending on
scaling function. Default: 0.9
last_epoch (int): The index of the last batch. This parameter is used when
resuming a training job. Since `step()` should be invoked after each
batch instead of after each epoch, this number represents the total
number of *batches* computed, not the total number of epochs computed.
When last_epoch=-1, the schedule is started from the beginning.
Default: -1
Example:
>>> optimizer = torch.optim.SGD(model.parameters(), lr=0.1, momentum=0.9)
>>> scheduler = torch.optim.CyclicLR(optimizer)
>>> data_loader = torch.utils.data.DataLoader(...)
>>> for epoch in range(10):
>>> for batch in data_loader:
>>> train_batch(...)
>>> scheduler.step()
.. _Cyclical Learning Rates for Training Neural Networks: https://arxiv.org/abs/1506.01186
.. _bckenstler/CLR: https://github.com/bckenstler/CLR
"""
def __init__(
self,
optimizer,
base_lr,
max_lr,
step_size_up=2000,
step_size_down=None,
mode="triangular",
gamma=1.0,
scale_fn=None,
scale_mode="cycle",
cycle_momentum=True,
base_momentum=0.8,
max_momentum=0.9,
last_epoch=-1,
):
if not isinstance(optimizer, Optimizer):
raise TypeError("{} is not an Optimizer".format(type(optimizer).__name__))
self.optimizer = optimizer
base_lrs = self._format_param("base_lr", optimizer, base_lr)
if last_epoch == -1:
for lr, group in zip(base_lrs, optimizer.param_groups):
group["lr"] = lr
self.max_lrs = self._format_param("max_lr", optimizer, max_lr)
step_size_up = float(step_size_up)
step_size_down = (
float(step_size_down) if step_size_down is not None else step_size_up
)
self.total_size = step_size_up + step_size_down
self.step_ratio = step_size_up / self.total_size
if mode not in ["triangular", "triangular2", "exp_range"] and scale_fn is None:
raise ValueError("mode is invalid and scale_fn is None")
self.mode = mode
self.gamma = gamma
if scale_fn is None:
if self.mode == "triangular":
self.scale_fn = self._triangular_scale_fn
self.scale_mode = "cycle"
elif self.mode == "triangular2":
self.scale_fn = self._triangular2_scale_fn
self.scale_mode = "cycle"
elif self.mode == "exp_range":
self.scale_fn = self._exp_range_scale_fn
self.scale_mode = "iterations"
else:
self.scale_fn = scale_fn
self.scale_mode = scale_mode
self.cycle_momentum = cycle_momentum
if cycle_momentum:
if "momentum" not in optimizer.defaults:
raise ValueError(
"optimizer must support momentum with `cycle_momentum` option enabled"
)
base_momentums = self._format_param("base_momentum", optimizer, base_momentum)
if last_epoch == -1:
for momentum, group in zip(base_momentums, optimizer.param_groups):
group["momentum"] = momentum
self.base_momentums = list(
map(lambda group: group["momentum"], optimizer.param_groups)
)
self.max_momentums = self._format_param("max_momentum", optimizer, max_momentum)
super(CyclicLR, self).__init__(optimizer, last_epoch)
def _format_param(self, name, optimizer, param):
"""Return correctly formatted lr/momentum for each param group."""
if isinstance(param, (list, tuple)):
if len(param) != len(optimizer.param_groups):
raise ValueError(
"expected {} values for {}, got {}".format(
len(optimizer.param_groups), name, len(param)
)
)
return param
else:
return [param] * len(optimizer.param_groups)
def _triangular_scale_fn(self, x):
return 1.0
def _triangular2_scale_fn(self, x):
return 1 / (2.0 ** (x - 1))
def _exp_range_scale_fn(self, x):
return self.gamma ** (x)
def get_lr(self):
"""Calculates the learning rate at batch index. This function treats
`self.last_epoch` as the last batch index.
If `self.cycle_momentum` is ``True``, this function has a side effect of
updating the optimizer's momentum.
"""
cycle = math.floor(1 + self.last_epoch / self.total_size)
x = 1.0 + self.last_epoch / self.total_size - cycle
if x <= self.step_ratio:
scale_factor = x / self.step_ratio
else:
scale_factor = (x - 1) / (self.step_ratio - 1)
lrs = []
for base_lr, max_lr in zip(self.base_lrs, self.max_lrs):
base_height = (max_lr - base_lr) * scale_factor
if self.scale_mode == "cycle":
lr = base_lr + base_height * self.scale_fn(cycle)
else:
lr = base_lr + base_height * self.scale_fn(self.last_epoch)
lrs.append(lr)
if self.cycle_momentum:
momentums = []
for base_momentum, max_momentum in zip(self.base_momentums, self.max_momentums):
base_height = (max_momentum - base_momentum) * scale_factor
if self.scale_mode == "cycle":
momentum = max_momentum - base_height * self.scale_fn(cycle)
else:
momentum = max_momentum - base_height * self.scale_fn(self.last_epoch)
momentums.append(momentum)
for param_group, momentum in zip(self.optimizer.param_groups, momentums):
param_group["momentum"] = momentum
return lrs
|
from utils.function.setup import *
from utils.lib.user_data import *
from main.activity.desktop_v3.activity_login import *
from main.activity.desktop_v3.activity_logout import *
from main.activity.desktop_v3.activity_user_settings import *
import unittest
class TestBank(unittest.TestCase):
# Instance
_site = "live"
#dictionary user
dict = {
"add_acc_name": "bank add",
"add_acc_numb": "85851111",
"add_bank_name": "BANK CENTRAL ASIA",
"add_bank_branch": "cihampelas",
"edit_acc_name": "bank edit",
"edit_acc_numb": "7475555",
"edit_bank_name": "BANK CENTRAL ASIA",
"edit_bank_branch": "cibinong"
}
def setUp(self):
test_driver = ""
self.driver = tsetup("firefox")
self.flag = 0
def test_add_bank(self):
print("> ::TEST ADD BANK::")
print("===================")
driver = self.driver
self.user= user8
email = self.user['email']
pwd = self.user['password']
#Object Activity
loginValidate = loginActivity()
logoutValidate = logoutActivity()
addBank = settingBankActivity()
#--
loginValidate.do_login(driver, self.user, email, pwd, self._site)
addBank.bank_add(driver, self.dict['add_acc_name'], self.dict['add_acc_numb'], self.dict['add_bank_name'], self.dict['add_bank_branch'], pwd)
logoutValidate.do_logout(driver, self._site)
def test_edit_bank(self):
print("> ::TEST EDIT BANK::")
print("====================")
driver = self.driver
self.user= user8
email = self.user['email']
pwd = self.user['password']
#Object Activity
loginValidate = loginActivity()
logoutValidate = logoutActivity()
editBank = settingBankActivity()
#--
loginValidate.do_login(driver, self.user, email, pwd, self._site)
editBank.bank_edit(driver, self.dict['edit_acc_name'], self.dict['edit_acc_numb'], self.dict['edit_bank_name'], self.dict['edit_bank_branch'], pwd)
logoutValidate.do_logout(driver, self._site)
def test_delete_bank(self):
print("> ::TEST DELETE BANK::")
print("======================")
driver = self.driver
self.user= user8
email = self.user['email']
pwd = self.user['password']
#Object Activity
loginValidate = loginActivity()
logoutValidate = logoutActivity()
deleteBank = settingBankActivity()
#--
loginValidate.do_login(driver, self.user, email, pwd, self._site)
deleteBank.bank_delete(driver)
logoutValidate.do_logout(driver, self._site)
def test_default_bank(self):
print("> ::TEST DEFAULT BANK::")
print("=======================")
driver = self.driver
self.user= user8
email = self.user['email']
pwd = self.user['password']
#Object Activity
loginValidate = loginActivity()
logoutValidate = logoutActivity()
defaultBank = settingBankActivity()
#--
loginValidate.do_login(driver, self.user, email, pwd, self._site)
defaultBank.bank_default(driver)
logoutValidate.do_logout(driver, self._site)
def tearDown(self):
print("> ::Testing has done, the browser window will be closed soon::")
self.driver.quit()
if __name__ == '__main__':
unittest.main(warnings='ignore')
|
#-*-coding=utf-8-*-
"""FirstDjango URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from . import view,testdb,search,search_post
#from django.conf.urls. import *
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$',view.FirstView),
url(r'first',view.FirstView),
url(r'^insert$',testdb.insert),
url(r'^get$',testdb.search),
url(r'^search_form$',search.search_form),
#url(r'^search',search.search_get),
#url(r'^post$',search_post.form_post),
url(r'^search_post$',search_post.form_post2),
# 无法执行
url(r'^dbtest/$',view.dbshow),
url(r'^books/$',view.books),
url(r'^date/$',view.showdate),
url(r'^ajax_list/$', view.ajax_list, name='ajax-list'),
url(r'^ajax_dict/$', view.ajax_dict, name='ajax-dict'),
url(r'^[A,B]',view.show_string),
url(r'^header/$',view.getAgent),
url(r'^notfound/$',view.notfound),
url(r'^time/$',view.time_show),
url(r'^mytime/plug/(\d{1,2})$',view.time_show2),
url(r'^current$',view.current_date),
url(r'^filter$',view.filter_usage),
url(r'^filter2$',view.filter_usage2),
#url(r'^admin/', include('django.contrib.admin.urls')),
]
|
from enum import Enum
from typing import Optional
import numpy as np
from pydantic import PrivateAttr, validator
from napari.utils.color import ColorArray
from napari.utils.colormaps.colorbars import make_colorbar
from napari.utils.events import EventedModel
from napari.utils.events.custom_types import Array
from napari.utils.translations import trans
class ColormapInterpolationMode(str, Enum):
"""INTERPOLATION: Interpolation mode for colormaps.
Selects an interpolation mode for the colormap.
* linear: colors are defined by linear interpolation between
colors of neighboring controls points.
* zero: colors are defined by the value of the color in the
bin between by neighboring controls points.
"""
LINEAR = 'linear'
ZERO = 'zero'
class Colormap(EventedModel):
"""Colormap that relates intensity values to colors.
Attributes
----------
colors : array, shape (N, 4)
Data used in the colormap.
name : str
Name of the colormap.
display_name : str
Display name of the colormap.
controls : array, shape (N,) or (N+1,)
Control points of the colormap.
interpolation : str
Colormap interpolation mode, either 'linear' or
'zero'. If 'linear', ncontrols = ncolors (one
color per control point). If 'zero', ncontrols
= ncolors+1 (one color per bin).
"""
# fields
colors: ColorArray
name: str = 'custom'
_display_name: Optional[str] = PrivateAttr(None)
interpolation: ColormapInterpolationMode = ColormapInterpolationMode.LINEAR
controls: Array[np.float32, (-1,)] = None
def __init__(
self, colors, display_name: Optional[str] = None, **data
) -> None:
if display_name is None:
display_name = data.get('name', 'custom')
super().__init__(colors=colors, **data)
self._display_name = display_name
# controls validator must be called even if None for correct initialization
@validator('controls', pre=True, always=True, allow_reuse=True)
def _check_controls(cls, v, values):
# If no control points provided generate defaults
if v is None or len(v) == 0:
n_controls = len(values['colors']) + int(
values['interpolation'] == ColormapInterpolationMode.ZERO
)
return np.linspace(0, 1, n_controls, dtype=np.float32)
# Check control end points are correct
if v[0] != 0 or (len(v) > 1 and v[-1] != 1):
raise ValueError(
trans._(
'Control points must start with 0.0 and end with 1.0. Got {start_control_point} and {end_control_point}',
deferred=True,
start_control_point=v[0],
end_control_point=v[-1],
)
)
# Check control points are sorted correctly
if not np.array_equal(v, sorted(v)):
raise ValueError(
trans._(
'Control points need to be sorted in ascending order',
deferred=True,
)
)
# Check number of control points is correct
n_controls_target = len(values['colors']) + int(
values['interpolation'] == ColormapInterpolationMode.ZERO
)
n_controls = len(v)
if n_controls != n_controls_target:
raise ValueError(
trans._(
'Wrong number of control points provided. Expected {n_controls_target}, got {n_controls}',
deferred=True,
n_controls_target=n_controls_target,
n_controls=n_controls,
)
)
return v
def __iter__(self):
yield from (self.colors, self.controls, self.interpolation)
def map(self, values):
values = np.atleast_1d(values)
if self.interpolation == ColormapInterpolationMode.LINEAR:
# One color per control point
cols = [
np.interp(values, self.controls, self.colors[:, i])
for i in range(4)
]
cols = np.stack(cols, axis=1)
elif self.interpolation == ColormapInterpolationMode.ZERO:
# One color per bin
# Colors beyond max clipped to final bin
indices = np.clip(
np.searchsorted(self.controls, values, side="right") - 1,
0,
len(self.colors) - 1,
)
cols = self.colors[indices.astype(np.int32)]
else:
raise ValueError(
trans._(
'Unrecognized Colormap Interpolation Mode',
deferred=True,
)
)
return cols
@property
def colorbar(self):
return make_colorbar(self)
|
from django.conf.urls import url
from django.contrib import admin
from blog import views
admin.autodiscover()
urlpatterns = [
url(r'^$', views.writing, name='writing'),
url(r'^tags/(?P<tags>.+)/$', views.tags, name='tags'),
url(r'^search/$', views.search, name='search'),
url(r'^(?P<category>\w+)/$', views.category, name='category'),
url(r'^(?P<category>\w+)/(?P<slug>[-\w]+)/$', views.post, name='post'),
url(r'^(?P<category>\w+)/(?P<slug>[-\w]+)/(?P<percent>[\d]+)/$',
views.read_post, name='read_post'),
url(r'^(?P<category>\w+)/(?P<slug>[-\w]+)/not_bad/$',
views.liked_post, name='liked_post'),
]
|
import unittest
from idstools import maps
class SignatureMapTestCase(unittest.TestCase):
def test_load_generator_map(self):
sigmap = maps.SignatureMap()
sigmap.load_generator_map(open("tests/gen-msg.map"))
sig = sigmap.get(1, 1)
self.assertTrue(sig is not None)
self.assertEquals(1, sig["gid"])
self.assertEquals(1, sig["sid"])
self.assertEquals("snort general alert", sig["msg"])
sig = sigmap.get(139, 1)
self.assertTrue(sig is not None)
self.assertEquals(139, sig["gid"])
self.assertEquals(1, sig["sid"])
self.assertEquals(
"sensitive_data: sensitive data global threshold exceeded",
sig["msg"])
def test_load_signature_map(self):
sigmap = maps.SignatureMap()
sigmap.load_signature_map(open("tests/sid-msg.map"))
# Get a basic signature.
sig = sigmap.get(1, 2000356)
self.assertTrue(sig is not None)
self.assertEquals(1, sig["gid"])
self.assertEquals(2000356, sig["sid"])
self.assertEquals("ET POLICY IRC connection", sig["msg"])
self.assertEquals(len(sig["ref"]), 1)
self.assertEquals("url,doc.emergingthreats.net/2000356", sig["ref"][0])
# Try again but with a gid of 3.
self.assertEquals(sig, sigmap.get(3, 2000356))
# This signature has multiple refs.
sig = sigmap.get(1, 2000373)
print(sig)
self.assertEquals(3, len(sig["ref"]))
def test_load_signature_v2_map(self):
sigmap = maps.SignatureMap()
sigmap.load_signature_map(open("tests/sid-msg-v2.map"))
sig = sigmap.get(1, 2495)
self.assertEquals(1, sig["gid"])
self.assertEquals(2495, sig["sid"])
self.assertEquals("misc-attack", sig["classification"])
self.assertEquals(0, sig["priority"])
self.assertEquals(
"GPL NETBIOS SMB DCEPRC ORPCThis request flood attempt",
sig["msg"])
self.assertEquals(4, len(sig["ref"]))
|
def get_sign(x):
if x[0] in '+-':
return x[0]
arr = ['в', '5', 'часов', '17', 'минут', 'температура', 'воздуха', 'была', '+5', 'градусов']
i = 0
while i < len(arr):
sign = get_sign(arr[i])
if arr[i].isdigit() or (sign and arr[i][1:].isdigit()):
if sign:
arr[i] = sign + arr[i][1:].zfill(2)
else:
arr[i] = arr[i].zfill(2)
arr.insert(i, '"')
arr.insert(i + 2, '"')
i += 2
i += 1
print(" ".join(arr))
|
from django.contrib import admin
from .models import Game, Score, RoundScore, Player
admin.site.register(Player)
admin.site.register(Game)
admin.site.register(Score)
admin.site.register(RoundScore)
|
# __all__ = [
# 'main',
# 'processPrefixes',
# 'processProperties',
# 'processInfosheet',
# 'processDictionaryMapping',
# 'processCodebook',
# 'processTimeline',
# 'processData',
# 'sdd2setl'
# ]
#from .sdd2rdf import *
from .sdd2setl import sdd2setl, sdd2setl_main
from .sddmarkup import sddmarkup_main
|
""" script to check, if there can be a delayed signal without a neutron in the events of user_atmoNC_.root:
To get a delayed signal, there should be at least one neutron in the event. But to check this, around 1000 events
with no neutron from user_atmoNC_.root must be analyzed and checked for a delayed signal.
"""
import datetime
import NC_background_functions
import numpy as np
from matplotlib import pyplot as plt
# get the date and time, when the script was run:
date = datetime.datetime.now()
now = date.strftime("%Y-%m-%d %H:%M")
# set the path of the input files (filename must be 'user_atmoNC_{}.root'):
input_path = "/local/scratch1/pipc51/astro/blum/detsim_output_data/"
# set path, where results should be saved:
output_path = "/home/astro/blum/juno/atmoNC/data_NC/output_neutron_cut/"
# set the number of the first file and number of the last file that should be read:
start_number = 21
stop_number = 24
# number of entries in the input files:
Number_entries_input = 100
# Set SAVE_HITTIME and SAVE_TXT flag:
SAVE_HITTIME = True
SAVE_TXT = True
# Set minimum and maximum hittime in ns (define time window, where delayed signal should lie):
min_hittime = 10000
max_hittime = 1000000
# Set threshold of number of PE per bin for possible delayed signal (bin-width = 5 ns):
threshold = 30
# set threshold2 of number of PEs per bin (signal peak is summed as long as nPE is above threshold2):
threshold2 = 1
# Set bin-width of hittime histograms in ns:
binwidth = 5.0
# min and max number of PE for delayed energy cut (from OLD_check_delayed_energy.py):
min_PE_delayed = 2805.53
max_PE_delayed = 3731.04
# preallocate variables:
# number of total events, that are analyzed:
number_events = 0
# number of events with at least one neutron:
number_neutron = 0
# number of events without neutron:
number_no_neutron = 0
# number of events without neutron and without possible delayed signal:
number_no_delayed = 0
# number of events without neutron, but with delayed signal (agree with time and delayed energy cut):
number_delayed = 0
# number of events without neutron, but with possible delayed signal (agree with time but NOT with energy cut):
number_possible_delayed = 0
# number of events without neutron, but with possible second delayed signal (agree only with time cut) after one
# delayed or possible delayed cut:
number_possible_second_delayed = 0
# loop over files:
for file_number in range(start_number, stop_number+1, 1):
# path to file:
input_file = "user_atmoNC_{0:d}.root".format(file_number)
print("Start reading {0} ...".format(input_file))
# analyze file with function check_neutron_cut():
num_events, num_neutron, num_no_neutron, num_no_delayed, num_delayed, num_pos_delayed, num_pos_second_delayed = \
NC_background_functions.check_neutron_cut(input_path, file_number, output_path, min_hittime, max_hittime,
threshold, threshold2, binwidth, min_PE_delayed, max_PE_delayed,
Number_entries_input, SAVE_HITTIME)
# add variables:
number_events = number_events + num_events
number_neutron = number_neutron + num_neutron
number_no_neutron = number_no_neutron + num_no_neutron
number_no_delayed = number_no_delayed + num_no_delayed
number_delayed = number_delayed + num_delayed
number_possible_delayed = number_possible_delayed + num_pos_delayed
number_possible_second_delayed = number_possible_second_delayed + num_pos_second_delayed
print("\nnumber_events = {0}".format(number_events))
print("\nnumber_neutron = {0}".format(number_neutron))
print("\nnumber_no_neutron = {0}".format(number_no_neutron))
print("\nnumber_no_delayed = {0}".format(number_no_delayed))
print("\nnumber_delayed = {0}".format(number_delayed))
print("\nnumber_possible_delayed = {0}".format(number_possible_delayed))
print("\nnumber_possible_second_delayed = {0}".format(number_possible_second_delayed))
# calculate neutron cut efficiency in percent (I lost number_delayed possible delayed signals when I only look
# at event with at least 1 neutron):
efficiency = float(number_delayed) / float(number_events) * 100
if SAVE_TXT:
# save numbers from above to txt file:
np.savetxt(output_path + "result_neutron_cut_atmoNC_{0:d}_to_{1:d}.txt".format(start_number, stop_number),
np.array([number_events, number_neutron, number_no_neutron, number_no_delayed, number_delayed,
number_possible_delayed, number_possible_second_delayed, efficiency]),
fmt='%.3f',
header="Results of script check_neutron_cut.py (only 20inch PMTs) ({0}):\n"
"Input-files: user_atmoNC_{1:d}.root to user_atmoNC_{2:d}.root;\n"
"Time window: min_hittime = {3:d} ns, max_hittime = {4:d} ns;\n"
"Threshold (nPE per bin, bin-width = {6:0.1f} ns) = {5:d};\n"
"Threshold2 (nPE per bin, define integration of pulse) = {7:d};\n"
"Delayed energy cut: min. PE = {8:0.2f}, max. PE = {9:0.2f}.\n"
"Results:\n"
"\n"
"Total number of analyzed events;\n"
"Number of events with at least 1 neutron;\n"
"Number of events without a neutron;\n"
"Number of events without neutron and without possible delayed signal;\n"
"Number of events without neutron, but with delayed signal (agrees with time and delayed energy "
"cut);\n"
"Number of events without neutron, but with possible delayed signal (agrees with time but NOT "
"with energy cut);\n"
"Number of events without neutron, but with possible second delayed signal (agree only with time "
"cut) after one delayed or possible delayed cut;\n"
"Cut efficiency in percent (number_delayed/number_events):"
.format(now, start_number, stop_number, min_hittime, max_hittime, threshold, binwidth, threshold2,
min_PE_delayed, max_PE_delayed))
|
from data_load import data_list
query=input("query: ")
query = query.strip(" ").split()
query = list(set(query))
if ("or" in query) and ("and" not in query):
query.remove("or")
print("Performing OR search for: ", query)
for i, quote in enumerate(data_list):
for word in query:
if (word in quote):
index1 = quote.index(word)
print("Found:", i, "...", quote[index1:index1+50],"...")
elif ("and" in query) and ("or" not in query):
query.remove("and")
print("Performing AND search for: ", query)
for i,quote in enumerate(data_list):
if (query[0] in quote) and (query[1] in quote):
print("Found:", i, "...", quote[:500],"...")
elif(len(query)==1):
for i,quote in enumerate(data_list):
found_at = quote.find(query[0])
if( found_at > 0):
print("Found:", i, "..."+quote[found_at:found_at+50], "...")
elif ('and' not in query) and ("or" not in query):
for i,quote in enumerate(data_list):
if (query[0] in quote) and (query[1] in quote):
print("Found:", i, "...", quote[:500],"...")
else :
query.remove("and")
query.remove("or")
for i,quote in enumerate(data_list):
if (query[0] in quote) and (query[1] in quote):
print("Found:", i, "...", quote[:500],"...")
|
from torch import einsum
from backpack.extensions.firstorder.base import FirstOrderModuleExtension
class BatchL2Linear(FirstOrderModuleExtension):
def __init__(self):
super().__init__(params=["bias", "weight"])
def bias(self, ext, module, g_inp, g_out, backproped):
C_axis = 1
return (g_out[0] ** 2).sum(C_axis)
def weight(self, ext, module, g_inp, g_out, backproped):
return einsum("ni,nj->n", (g_out[0] ** 2, module.input0 ** 2))
|
import os
import sys
import glob
import numpy as np
def extract_likelihoods(results_dir, verbose = True):
ll1 = 0
ll2 = 0
improve = 0
treated = 0
diffs = []
maxdiff = 0.0
maxfam = ""
for stats in glob.glob(os.path.join(results_dir, "*.stats")):
#if ("14014_" in stats):
# continue
line = open(stats).readlines()[0]
sp = line.split()
best_tree_number = int(float(sp[0]))
famll1 = float(sp[1])
famll2 = float(sp[2])
ll1 += famll1
ll2 += famll2
treated += 1
diff = famll2 - famll1
if (diff > 0.1):
if (verbose):
print(os.path.basename(stats) + " " + str(diff))
diffs.append(diff)
improve += 1
if (diff > maxdiff):
maxdiff = diff
maxfam = os.path.basename(stats)
if (verbose):
print("Initial ll= " + str(ll1))
print("Final ll= " + str(ll2))
print("Diff = " + str(ll2 - ll1))
print("Av diff = " + str(np.mean(diffs)))
print("Median diff = " + str(np.median(diffs)))
print("Max diff = " + str(maxdiff))
print("Max diff = " + str(max(diffs)))
print("Max diff for family " + maxfam)
print("Improved families: " + str(improve) + "/" + str(treated))
return (ll1, ll2)
if __name__ == "__main__":
if (len(sys.argv) < 2):
print("syntax: python " + os.path.basename(__file__) + " results_dir")
sys.exit(1)
results_dir = sys.argv[1]
extract_likelihoods(results_dir)
|
from flask import Blueprint, flash, request, redirect, url_for, render_template
from flask import json
from labcheckin.models import Seat, Student, Transaction
from labcheckin import db
from labcheckin.utilities import parse_card, utc2local
from datetime import datetime
from labcheckin.models import Seat
main = Blueprint(
"main",
__name__
)
@main.route("/")
def index():
current_student_ids = db.session.query(
Transaction.student_id
).filter_by(out_time=None)
# get students currently in lab from list of student_ids
current_students = Student.query.filter(
Student.student_id.in_(current_student_ids)).all()
current_swipe_numbers = [s.swipe_number for s in current_students]
seats = Seat.query.all()
seat_statuses = {}
seat_types = set([seat.type for seat in seats])
for seat_type in seat_types:
for seat in seats:
if seat.type == seat_type:
seat_statuses.setdefault(seat_type, [])
seat_statuses[seat_type].append(seat)
'''
seat_statuses['Booth'] = [B1, B2, B3]
seat_statuses['WIN'] = [WIN1, WIN2]
'''
return render_template(
"main/index.html.j2",
current_students=current_students,
current_swipe_numbers=current_swipe_numbers,
get_student_seat=get_student_seat,
seat_statuses=seat_statuses,
)
@main.route("/add", methods=["GET", "POST"])
def add_student():
if request.method == "GET":
return render_template("main/add_student.html.j2")
else:
name = request.form.get("fullName")
studentID = request.form.get("studentID")
raw_swipe_input = request.form.get("swipeNum")
email = request.form.get("email")
swipeNum = parse_card(raw_swipe_input)
new_student = Student(
student_id=studentID,
full_name=name,
swipe_number=swipeNum,
email=email
)
db.session.add(new_student)
db.session.commit()
flash("Student added successfully", "success")
return redirect(url_for("main.index"))
@main.route("/validate_card/<card_input>", methods=["GET"])
def validate_card(card_input):
print(card_input)
swipe_number = parse_card(card_input + "?")
seatTypes = get_available_seat_types()
print(seatTypes)
if swipe_number:
return json.dumps({
"swipe_number": swipe_number,
"seatTypes": seatTypes
})
return json.dumps({})
@main.route("/create_transaction", methods=["POST"])
def create_transaction():
swipe_number = request.form.get("swipe_number")
student = Student.query.filter_by(
swipe_number=swipe_number).first()
if student:
last_t = Transaction.query.filter_by(
student=student).order_by(Transaction.in_time.desc()).first()
# if the transaction and the out_time does not exist update it with time out
if last_t and not last_t.out_time:
seat = last_t.seat
seat.status = "Needs Cleaning"
currentTime = datetime.now()
last_t.out_time = currentTime
db.session.commit()
# otherwise create a new transaction
else:
seatType = request.form.get("options")
if seatType:
seat = get_next_available(seatType)
flash(f"{seat.label} has been assigned", "success")
seat.status = "In Use"
t = Transaction(student_id=student.student_id, seat_id=seat.id)
db.session.add(t)
db.session.commit()
else:
flash("You must select a seat type", "danger")
else:
flash("No student found", "danger")
return redirect(url_for("main.index"))
@main.route("/end_transaction/<seat_label>", methods=["GET"])
def end_transaction(seat_label: str):
seat = Seat.query.filter_by(label=seat_label).first()
last_t = Transaction.query.filter_by(seat=seat).order_by(
Transaction.in_time.desc()).first()
seat.status = "Needs Cleaning"
currentTime = datetime.now()
last_t.out_time = currentTime
db.session.commit()
return redirect(url_for("main.index"))
@main.route("/seat_cleaned/<seat_label>", methods=["GET"])
def seat_cleaned(seat_label: str):
seat = Seat.query.filter_by(label=seat_label).first()
seat.status = "Open"
db.session.commit()
return redirect(url_for("main.index"))
def get_available_seat_types():
# create empty list to store available seat types
seatTypes = []
availableSeats = Seat.query.filter_by(status="Open").all()
for seat in availableSeats:
seatTypes.append(seat.type)
# shrinks the list to only unique values. ["WIN", "WIN", "MAC"] -> ["WIN", "MAC"]
seatTypes = list(set(seatTypes))
return seatTypes
def get_student_seat(student):
last_t = Transaction.query.filter_by(student=student).order_by(
Transaction.in_time.desc()).first()
seat = last_t.seat
return seat
def get_next_available(seat_type: str) -> Seat:
seat_type = seat_type.upper()
priorities = {
"PC": (
"WIN1", "WIN12", "WIN9", "WIN7", "WIN5", "WIN11", "WIN2", "WIN8", "WIN4", "WIN6"
),
"MAC": (
"MAC14", "MAC10", "MAC13"
),
"BOOTH": (
"B1", "B5", "B3", "B2", "B4"
),
"SCANNER": (
"SCN3",
)
}
for seat_label in priorities[seat_type]:
seat = Seat.query.filter_by(label=seat_label).first()
if seat.status == "Open":
return seat
return None
|
from django.urls import path
from . import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('', views.home, name='home'),
path('products/<pk>/', views.ProductDetailView.as_view(), name='product-detail'),
path('products/categories/<pk>/', views.CategoryDetailView.as_view(), name='list-by-category'),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
#
# @lc app=leetcode.cn id=116 lang=python3
#
# [116] 填充每个节点的下一个右侧节点指针
#
# @lc code=start
"""
# Definition for a Node.
class Node:
def __init__(self, val: int = 0, left: 'Node' = None, right: 'Node' = None, next: 'Node' = None):
self.val = val
self.left = left
self.right = right
self.next = next
"""
class Solution:
def connect(self, root: 'Node') -> 'Node':
"""
二叉树前序遍历 —— 递归:深度优先搜索,定义连接两个节点的辅助函数
"""
# 58/58 cases passed (164 ms)
# Your runtime beats 12.1 % of python3 submissions
# Your memory usage beats 6.13 % of python3 submissions (16.7 MB)
if not root or not root.left:
return root
self.connectTwoNodes(root.left, root.right)
return root
def connectTwoNodes(self, node1, node2):
if not node1:
return node1
# **** 前序遍历位置 ****
# 将传入的两个节点连接
node1.next = node2
# 连接相同父节点的两个子节点
self.connectTwoNodes(node1.left, node1.right)
self.connectTwoNodes(node2.left, node2.right)
# 连接跨越父节点的两个子节点
self.connectTwoNodes(node1.right, node2.left)
# @lc code=end
|
import sys
from PyQt5.QtWidgets import QDialog, QApplication, QWidget, QMainWindow, QTableWidgetItem, QAbstractItemView, QDesktopWidget
from PyQt5.QtGui import QColor
from template import Ui_main_window
import get_data
from subprocess import call
class game_viewer(QMainWindow):
def __init__(self, parent=None):
super(game_viewer, self).__init__(parent=parent)
# Set up the user interface from Designer.
self.ui = Ui_main_window()
self.ui.setupUi(self)
self.ui.todos_jogos.clicked.connect(self.jogos)
self.ui.procurar_jogo.clicked.connect(self.procura_jogo)
self.data_json = get_data.get_data()
self.ui.table_jogos.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.ui.table_jogos.cellDoubleClicked.connect(self.ligar_jogo)
#self.screen_size = QDesktopWidget().screenGeometry(-1)
#HALPPPPPPP
#self.ui.resize(self.screen_size.width(),self.screen_size.height())
self.link_to_ace = "" # link for the ace streamer
for i in range(len(self.data_json["days_to_Select"])):
self.ui.select_days.addItem(self.data_json["days_to_Select"][i])
self.ui.select_days.currentTextChanged.connect(self.jogos)
self.jogos()
def jogos(self):
self.ui.table_jogos.setRowCount(0);
for i in self.data_json["games"]:
if self.data_json["games"][i]["date"] == self.data_json["days_to_Select"][self.ui.select_days.currentIndex()]:
rowPos = self.ui.table_jogos.rowCount()
self.ui.table_jogos.insertRow(rowPos)
#self.ui.table_jogos.setItem(rowPos, 0, QTableWidgetItem(self.data_json["games"][i]["date"]))
self.ui.table_jogos.setItem(rowPos, 0, QTableWidgetItem(self.data_json["games"][i]["time"]))
self.ui.table_jogos.setItem(rowPos, 1, QTableWidgetItem(self.data_json["games"][i]["type"]))
self.ui.table_jogos.setItem(rowPos, 2, QTableWidgetItem(self.data_json["games"][i]["league"]))
self.ui.table_jogos.setItem(rowPos, 3, QTableWidgetItem(self.data_json["games"][i]["players"]))
if "language" in self.data_json["games"][i]:
if self.data_json["games"][i]["language"] == "[POR]":
ling = QTableWidgetItem(self.data_json["games"][i]["language"])
ling.setBackground(QColor(255,255,0))
self.ui.table_jogos.setItem(rowPos, 4, ling)
else:
self.ui.table_jogos.setItem(rowPos, 4, QTableWidgetItem(self.data_json["games"][i]["language"]))
else:
self.ui.table_jogos.setItem(rowPos, 4, QTableWidgetItem("UnKnown"))
if "Channels" in self.data_json["games"][i]:
if len(self.data_json["games"][i]["Channels"]) is 2:
canal1 = QTableWidgetItem("CANAL " + str(self.data_json["games"][i]["Channels"][0][1]))
canal2 = QTableWidgetItem("CANAL " + str(self.data_json["games"][i]["Channels"][1][1]))
#MUDAR A COR DA FONT E AUMENTAR LA
canal1.setBackground(QColor(0,255,0))
canal2.setBackground(QColor(0,255,0))
self.ui.table_jogos.setItem(rowPos, 5, canal1)
self.ui.table_jogos.setItem(rowPos, 6, canal2)
else:
canal1 = QTableWidgetItem("CANAL " + str(self.data_json["games"][i]["Channels"][0][1]))
canal1.setBackground(QColor(0,255,0))
canal2 = QTableWidgetItem("None")
canal2.setBackground(QColor(255,0,0))
self.ui.table_jogos.setItem(rowPos, 5, QTableWidgetItem(canal1))
self.ui.table_jogos.setItem(rowPos, 6, QTableWidgetItem(canal2))
else:
canal = QTableWidgetItem("None")
canal.setBackground(QColor(255,0,0))
self.ui.table_jogos.setItem(rowPos, 5, QTableWidgetItem(canal))
self.ui.table_jogos.setItem(rowPos, 6, QTableWidgetItem(canal))
self.ui.table_jogos.resizeColumnsToContents()
def procura_jogo(self):
if self.ui.procurar_jogo_texto.text() is not "":
self.ui.table_jogos.setRowCount(0);
for i in self.data_json["games"]:
if self.data_json["games"][i]["date"] == self.data_json["days_to_Select"][self.ui.select_days.currentIndex()]:
if self.ui.procurar_jogo_texto.text().upper() in self.data_json["games"][i]["players"]: #VERIFICA ISTO AQUI FICOU AQUI
rowPos = self.ui.table_jogos.rowCount()
self.ui.table_jogos.insertRow(rowPos)
#self.ui.table_jogos.setItem(rowPos, 0, QTableWidgetItem(self.data_json["games"][i]["date"]))
self.ui.table_jogos.setItem(rowPos, 0, QTableWidgetItem(self.data_json["games"][i]["time"]))
self.ui.table_jogos.setItem(rowPos, 1, QTableWidgetItem(self.data_json["games"][i]["type"]))
self.ui.table_jogos.setItem(rowPos, 2, QTableWidgetItem(self.data_json["games"][i]["league"]))
self.ui.table_jogos.setItem(rowPos, 3, QTableWidgetItem(self.data_json["games"][i]["players"]))
if "language" in self.data_json["games"][i]:
if self.data_json["games"][i]["language"] == "[POR]":
ling = QTableWidgetItem(self.data_json["games"][i]["language"])
ling.setBackground(QColor(255,255,0))
self.ui.table_jogos.setItem(rowPos, 4, ling)
else:
self.ui.table_jogos.setItem(rowPos, 4, QTableWidgetItem(self.data_json["games"][i]["language"]))
else:
self.ui.table_jogos.setItem(rowPos, 4, QTableWidgetItem("UnKnown"))
if "Channels" in self.data_json["games"][i]:
if len(self.data_json["games"][i]["Channels"]) is 2:
canal1 = QTableWidgetItem("CANAL " + str(self.data_json["games"][i]["Channels"][0][1]))
canal2 = QTableWidgetItem("CANAL " + str(self.data_json["games"][i]["Channels"][1][1]))
#MUDAR A COR DA FONT E AUMENTAR LA
canal1.setBackground(QColor(0,255,0))
canal2.setBackground(QColor(0,255,0))
self.ui.table_jogos.setItem(rowPos, 5, canal1)
self.ui.table_jogos.setItem(rowPos, 6, canal2)
else:
canal1 = QTableWidgetItem("CANAL " + str(self.data_json["games"][i]["Channels"][0][1]))
canal1.setBackground(QColor(0,255,0))
canal2 = QTableWidgetItem("None")
canal2.setBackground(QColor(255,0,0))
self.ui.table_jogos.setItem(rowPos, 5, QTableWidgetItem(canal1))
self.ui.table_jogos.setItem(rowPos, 6, QTableWidgetItem(canal2))
else:
canal = QTableWidgetItem("None")
canal.setBackground(QColor(255,0,0))
self.ui.table_jogos.setItem(rowPos, 5, QTableWidgetItem(canal))
self.ui.table_jogos.setItem(rowPos, 6, QTableWidgetItem(canal))
self.ui.table_jogos.resizeColumnsToContents()
def ligar_jogo(self):
col = self.ui.table_jogos.currentItem().column()
if col is 5 or col is 6:
row = self.ui.table_jogos.currentItem().row()
text = self.ui.table_jogos.item(row,col).text()
if not (text == "None"):
if col is 5:
url = self.data_json["games"]["game" + str(row + 1)]["Channels"][0][0]
elif col is 6:
url = self.data_json["games"]["game" + str(row + 1)]["Channels"][1][0]
self.link_to_ace = get_data.get_link_to_ace_streamer(self.data_json["session"], url)
call(["ace_player",self.link_to_ace])
if __name__ == "__main__":
app = QApplication(sys.argv)
window = game_viewer()
window.show()
sys.exit(app.exec_())
|
import os
import sys
import re
class Shift:
def __init__(self, day, guard):
self.day = day
self.guard = guard
#self.onDuty = ['.'] * 60 # minutes
self.onDuty = [0] * 60 # minutes
def setup():
global fileHandle, fileData
filename = input("Enter an input file name: ")
exists = os.path.isfile("./%s" % filename)
notEmpty = os.path.getsize("./%s" % filename) > 0
if exists and notEmpty:
fileHandle = open ("./%s" % filename, "r")
else:
print ("File doesn't exist or is empty.")
exit
fileData = list()
for entry in fileHandle:
fileData.append(entry)
fileHandle.close()
def printSchedule():
print ()
print ("Date ID Minute")
print (" 000000000011111111112222222222333333333344444444445555555555")
print (" 012345678901234567890123456789012345678901234567890123456789")
for Shift in schedule:
onDuty = ''.join([str(minute) for minute in Shift.onDuty])
print ("{0} #{1} {2}".format(Shift.day, str(Shift.guard).zfill(4), onDuty))
print ()
def buildSchedule():
global schedule
schedule = []
newShift = None
entries = 0
for entry in fileData:
entries += 1
# Extract data (e.g. [ 1518- 11-05 00:55 ] wakes up)
match = re.search('^.[\\d]+-([\\d]+)-([\\d]+) ([\\d]+):([\\d]+). (.*)$', entry)
month = int(match.group(1))
day = int(match.group(2))
hour = int(match.group(3))
minute = int(match.group(4))
text = match.group(5)
if text == "wakes up":
for i in range(lastTimeChange, minute):
#newShift.onDuty[i] = 'z'
newShift.onDuty[i] = 1 # 0 awake, 1 asleep
# If we've reached the last shift
if entries == len(fileData):
schedule.append(newShift)
elif text == "falls asleep":
lastTimeChange = minute
else: # Set up new shift
# Add the last completed shift to the schedule first
if newShift != None:
schedule.append(newShift)
match = re.search('Guard #([\\d]+) begins shift', text)
guard = int(match.group(1))
# Shift the day by 1 if the shift starts before 00:00
if hour != 0:
day += 1
date = "{0}-{1}".format(str(month).zfill(2), str(day).zfill(2))
newShift = Shift(date, guard)
def processShifts():
sleepiestGuard = 0
sleepiestMinute = 0
sleepiestSum = 0
sumOfGuardShifts = {} # { key=guard : value=sum of minutes in shift)
# Zip up each guard's shifts into one shift summary
for Shift in schedule:
if Shift.guard not in sumOfGuardShifts:
sumOfGuardShifts.update({Shift.guard : Shift.onDuty})
else:
sumMinutes = [sum(x) for x in zip(Shift.onDuty, sumOfGuardShifts[Shift.guard])]
sumOfGuardShifts[Shift.guard] = sumMinutes
#print (Shift.guard, ' '.join([str(minute) for minute in sumOfGuardShifts[Shift.guard]]))
# Parse through each minute to find which guard sleeps the most
# Keep track of the most-slept minute and guard number
for min in range (60):
for (guard,shift) in sumOfGuardShifts.items():
if shift[min] > sleepiestSum:
sleepiestGuard = guard
sleepiestSum = shift[min]
sleepiestMinute = min
print ("Guard", sleepiestGuard, "slept the more during minute", sleepiestMinute, "for {0} shifts.".format(sleepiestSum))
print ("Checksum:", int(sleepiestGuard * sleepiestMinute))
setup()
fileData.sort()
buildSchedule()
printSchedule()
processShifts()
|
import numpy as np
from keras.applications.resnet50 import ResNet50
from keras.preprocessing import image
from keras.applications.resnet50 import preprocess_input
from keras.models import Model
from PIL import Image as PIL_Image
from pelops.features.feature_producer import FeatureProducer
# Use global so we only load the resnet model once
# TODO: find a better way to do this
resnet_model = None
class ResNet50FeatureProducer(FeatureProducer):
def __init__(self, chip_producer):
global resnet_model
super().__init__(chip_producer)
if resnet_model is None:
# include_top needs to be True for this to work
base_model = ResNet50(weights='imagenet', include_top=True)
resnet_model = Model(input=base_model.input,
output=base_model.get_layer('flatten_1').output)
self.resnet_model = resnet_model
@staticmethod
def preprocess_image(img, x_dim=224, y_dim=224):
if img.size != (x_dim, y_dim):
img = img.resize((x_dim,y_dim), PIL_Image.BICUBIC)
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x = preprocess_input(x)
return x
def produce_features(self, chip):
pil_image = self.get_image(chip)
preprocessed_image = self.preprocess_image(pil_image)
image_features = self.resnet_model.predict(preprocessed_image)
return image_features
def set_variables(self):
self.feat_size = 2048
|
#!usr/bin/env python
# -*- coding: utf-8 -*-
"""
Entries as classes
"""
from src.DB.Model import Model
class Page(Model):
table = 'page_content' # table name is page_content
fields = ['id', 'url', 'content']
class Url(Model):
table = 'cached_url' # table name is page_content
fields = ['url']
|
"""
Faça um Programa que leia três números e mostre-os em ordem decrescente.
"""
def obter_numero_inteiro(msg):
return int(input(msg))
def obter_numero_maior(numero_1, numero_2, numero_3):
if numero_1 >= numero_2 and numero_1 >= numero_3:
return numero_1
elif numero_2 >= numero_1 and numero_2 >= numero_3:
return numero_2
else:
return numero_3
def obter_numero_medio(numero_1, numero_2, numero_3):
if numero_2 <= numero_1 <= numero_3 or numero_3 <= numero_1 <= numero_2:
return numero_1
elif numero_1 <= numero_2 <= numero_3 or numero_3 <= numero_2 <= numero_1:
return numero_2
else:
return numero_3
def obter_numero_menor(numero_1, numero_2, numero_3):
if numero_1 <= numero_2 and numero_1 <= numero_3:
return numero_1
elif numero_2 <= numero_1 and numero_2 <= numero_3:
return numero_2
else:
return numero_3
def ordenar_em_ordem_decrescente(numero_1, numero_2, numero_3):
numero_maior = obter_numero_maior(numero_1, numero_2, numero_3)
numero_medio = obter_numero_medio(numero_1, numero_2, numero_3)
numero_menor = obter_numero_menor(numero_1, numero_2, numero_3)
return [numero_maior, numero_medio, numero_menor]
def ordena_tres_numeros_em_ordem_decrescente():
numero_1 = obter_numero_inteiro('Informe o primeiro número: ')
numero_2 = obter_numero_inteiro('Informe o segundo número: ')
numero_3 = obter_numero_inteiro('Informe o terceiro número: ')
numeros_ordenados = ordenar_em_ordem_decrescente(numero_1, numero_2, numero_3)
print('Os números em ordem decrescente:', numeros_ordenados)
if __name__ == '__main__':
print('+-----------------------------------------------------+')
print('| Programa: Escreve três números em ordem decrescente |')
print('+-----------------------------------------------------+')
ordena_tres_numeros_em_ordem_decrescente()
|
#!/usr/bin/env python
# coding: utf-8
import requests
import json
import argparse
# 获取access_token用于鉴权
def get_access_token(client_secret, client_id):
grant_type = "client_credentials"
url = "https://openapi.data-baker.com/oauth/2.0/token?grant_type={}&client_secret={}&client_id={}"\
.format(grant_type, client_secret, client_id)
try:
response = requests.post(url)
response.raise_for_status()
except Exception as e:
print(e)
return
else:
access_token = json.loads(response.text).get('access_token')
return access_token
# 获取识别后文本
def get_text(file, headers):
url = "https://asr.data-baker.com/asr/api?"
response = requests.post(url, data=file, headers=headers)
code = json.loads(response.text).get("code")
text = json.loads(response.text).get("text")
if code != 20000:
print(response.text)
return text
# 获取命令行输入参数
def get_args():
parser = argparse.ArgumentParser(description='ASR')
parser.add_argument('-client_secret', type=str, required=True)
parser.add_argument('-client_id', type=str, required=True)
parser.add_argument('-file_path', type=str, required=True)
parser.add_argument('--audio_format', type=str, default='wav')
parser.add_argument('--sample_rate', type=str, default='16000')
parser.add_argument('--add_pct', type=str, default='true')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = get_args()
# 获取access_token
client_secret = args.client_secret
client_id = args.client_id
access_token = get_access_token(client_secret, client_id)
# 读取音频文件
with open(args.file_path, 'rb') as f:
file = f.read()
# 填写Header信息
audio_format = args.audio_format
sample_rate = args.sample_rate
add_pct = args.add_pct
headers = {'access_token': access_token, 'audio_format': audio_format, 'sample_rate': sample_rate,
'add_pct': add_pct}
text = get_text(file, headers)
print(text)
|
import sys
if len(sys.argv) == 1:
print('Input filename:')
f=str(sys.stdin.readline()).strip()
else: f = sys.argv[1]
data = []
for l in open(f):
data.append(l.strip())
def step(d, mx, my, xl, yl, x=0, y=0, c=0):
x = (x + mx) % xl
y = y + my
if '#' == d[y][x]: c += 1
return c if y == yl-1 else step(d, mx, my, xl, yl, x, y, c)
'''
Solution 1
'''
ans = step(data, 3, 1, len(data[0]), len(data))
print('Solution 1:', ans)
'''
Solution 2
'''
slopes = [
(1, 1),
(3, 1),
(5, 1),
(7, 1),
(1, 2)
]
ans = 1
for mx, my in slopes:
ans *= step(data, mx, my, len(data[0]), len(data))
print('Solution 2:', ans)
|
resources = {}
while True:
item = input().lower().split()
if item[0] == 'total':
break
key = item[0]
value = item[1]
if key in resources:
resources[key] += int(value)
else:
resources[key] = int(value)
for element in sorted(resources):
print(element, resources[element])
|
# -*- coding: utf-8 -*-
"""
Created on Sun Dec 17 19:33:38 2017
Contains the Agent class and functions of it
@author: paula
"""
import random
class Agent():
#x = random.randint()
#y = random.randint()
agents = []
environment = []
def __init__(self, environment, agents):
self.x = random.randint(0,99)
self.y = random.randint(0,99)
self.environment = environment
self.agents = agents
self.store = 0
def set_x(self, x):
self.x = x
def set_y(self, y):
self.y = y
def get_x(self):
return self.x
def get_y(self):
return self.y
def move(self):
# Ensure no agent goes out of bounds
if random.random() < 0.5:
self.y = (self.y + 1) % 100
else:
self.y = (self.y - 1) % 100
if random.random() < 0.5:
self.x = (self.x + 1) % 100
else:
self.x = (self.x - 1) % 100
def eat(self):
if self.environment[self.y][self.x] > 10:
self.environment[self.y][self.x] -= 10
self.store += 10
def share_with_neighbours(self, neighborhood):
for agent in (self.agents):
distance = self.distance_between(agent)
if distance <= neighborhood:
sum = self.store + agent.store
average = sum / 2
self.store = average
agent.store = average
#calculate euclidian distance with pythagoras theorem
def distance_between(self, agent):
distance = (((self.x - agent.x)**2) + ((self.y - agent.y)**2))**0.5
return distance
|
def gameStart() :
loop = False
while (loop == False) :
print ('What do you want to play?')
print ('1.) Single player')
print ('2.) 2 Player')
print ('3.) How to play')
print ('Please choose number do you want')
number = input()
if number == '1' :
bot = singlePlayer()
if bot != 'Back' :
loop = True
return bot
elif number == '2':
player = '2Player'
loop = True
return player
elif number == '3':
howToPlay()
loop == True
def singlePlayer() :
print ('## Single player ##')
print ('1.) Easy bot')
print ('2.) Medium bot')
print ('3.) Back to the menu')
loop = False
while (loop == False) :
print ('Please choose number do you want')
number = input()
if number == '1' :
bot = 'Easy'
loop = True
return bot
elif number == '2' :
bot = 'Medium'
loop = True
return bot
elif number == '3':
bot = 'Back'
loop = True
return bot
def howToPlay () :
loop = False
while (loop == False) :
print ('## How to play ##')
print ('In the Quest Master game Will divide the gameplay into 3 parts')
print ('1.) Character')
print ('2.) Winning rules')
print ('3.) How to play')
print ('4.) Back to the menu')
print ('Please choose the number you want to learn more')
number = input()
if number == '1' :
print ('## Character ##')
print ('** All characters have normal attack skill is use MP 2 for At 2 except for swordman **')
print ('1.) ( Priest ), Priest will have HP starting at 26.\n The skill is to use MP 2 for HP+2.\n It will start at position P on the map (O14).')
print ('2.) ( Swordman ), Swordman will have HP starting at 23.\n The skill is to use MP 2 for At 3 and skill use MP 3 for At 4.\n It will start at the position S on the map (C14)')
print ('3.) ( Magician ), Magician will have HP starting at 24.\n There is a skill when defending a fight. If starting a fight with MP 0, will increase MP by 2.\n It starting at M position on the map (B3)')
print ('4.) ( Robots ), The robot will have HP starting at 25.\n The skill is to use MP 1 for Shield 2.\n It will start at the F position on the map (H7)')
print ('5.) ( Demon ), The demon will have HP starting at 23.\n The skill is to use MP 3 for At 2 and HP+1.\n t will start at position D on the map (O3)')
print ('Press Enter for back to the menu')
enter = input()
elif number == '2' :
print ('## Winning rules ##')
print ('1.) Players have HP more than 40 ')
print ('2.) The last remaining player')
print ('3.) Players who enter all 5 of the castles')
print ('Press Enter for back to the menu')
enter = input()
elif number == '3' :
print ('## How to play ##')
print ('1.) Players who roll two dice And then choose to walk or draw cards according to the face of the dice you get,\n such as walking 3 and a sword. Players choose to walk 3 slots or battle')
print ('2.) When a player falls to a different location (Place), the player must follow the specified details on that location')
print ('3.) In the case of falling into the Item ( I ), players get 1 Item card')
print ('4.) In the case of falling into the Event ( E ), the player draws an Event card and acts as described by the card')
print ('5.) In case of falling into the Battle ( B ), players must choose to battle or not')
print ('6.) In case of falling into the Warp ( W ), players can travel to other Warp spots')
print ()
print ('## In the battle ##')
print ('When battle both of them must roll the dice to get their MP. The MP is used to use the Item card or the character skill.')
print ('* Which the battle will start from *')
print ('1.) The challenger will attack first by getting At+2 for free and can use the card only At card and AD card.\n then use the character skill and then end turn.')
print ('2.) The other person will be able to turn, defend and attack.\n Which can use any card Then use the character skill and then end turn')
print ('3.) The challenger will get a defense turn, cannot attack and can only use Df card and AD card,\n use the character skill and then end turn.')
print ('4.) It will conclude the battle and continue the game.')
print ('Press Enter for back to the menu')
enter = input()
else :
loop = True
|
import numpy as np
np.random.seed(1234)
#Original class given by the paper
class Driving(object):
def __init__(self, num_lanes=5, p_car=0.16, p_cat=0.09, sim_len=300, ishuman_n=False, ishuman_p=False):
self.num_lanes = num_lanes
self.road_length = 8
self.car_speed = 1
self.cat_speed = 3
self.actions = range(3)
self.p_car = p_car
self.p_cat = p_cat
self.sim_len = sim_len
self.ishuman_n = ishuman_n
self.ishuman_p = ishuman_p
def reset(self):
self.lane = 2
self.timestamp = 0
self.done = False
self.num_collision = 0
self.num_hit_cat = 0
self.cars = {}
self.cats = {}
for lane in range(self.num_lanes):
self.cars[lane] = []
self.cats[lane] = []
# the state shows the positions of the first cat and car in adjacent lanes
self.state_generator()
return self.state # Initialize by a function
def checker(self, lane):
if len(self.cars[lane]) == 0: # if the lane is free of cars
self.state += (-1,) # go the the left line ??
else: #if there is a car on the lane
self.state += (self.cars[lane][0],)
# the lane of agent is augmented by the position of the closest car
if len(self.cats[lane]) == 0: # if the lane is free of car
self.state += (-1,)
else:
self.state += (self.cats[lane][0],)
def state_generator(self):
self.state = (self.lane,) # collect the current lane of the car
self.checker(self.lane) # check if there is cars or cats on the current line
if self.lane > 0: # there is a lane of the left
self.checker(self.lane-1) # check if there is cars or cats on the left line
else:
self.state += (-2, -2) # if already on the first line
if self.lane < self.num_lanes-1: # there is a lane on the right
self.checker(self.lane+1) # check if there is cars or cats on the right line
else:
self.state += (-2, -2) # if already on the last
def clip(self, x):
return min(max(x, 0), self.num_lanes-1) # ensure the car is not going outside the map
def step(self, action):
self.timestamp += 1
if action not in self.actions:
raise AssertionError
if action == 1: # Going on the right lane
next_lane = self.clip(self.lane + 1)
elif action == 2: # Going on the left lane
next_lane = self.clip(self.lane - 1)
else: #Going straight
next_lane = self.lane
for lane in range(self.num_lanes): # The obejects are moved to simulate the traffic
self.cats[lane] = [pos - self.cat_speed for pos in self.cats[lane]]
self.cars[lane] = [pos - self.car_speed for pos in self.cars[lane]]
cat_hit = 0
car_hit = 0
if self.lane != next_lane: #if changing its lane
for cat in self.cats[self.lane] + self.cats[next_lane]:
if cat <= 0: cat_hit += 1 # cats are above or same level as the car, so a collision happens
for car in self.cars[self.lane] + self.cars[next_lane]:
if car <= 0: car_hit += 1 # cars are above or same level as the car, so a collision happens
self.lane = next_lane # the lane is changed
else:
for cat in self.cats[self.lane]: # same situation but only its current line is considered
if cat <= 0: cat_hit += 1
for car in self.cars[self.lane]:
if car <= 0: car_hit += 1
for lane in range(self.num_lanes): # Delete the object which get off the grid
self.cats[lane] = [pos for pos in self.cats[lane] if pos > 0]
self.cars[lane] = [pos for pos in self.cars[lane] if pos > 0]
#Adding cars and cats on the lanes according to their probabilities of apparition, they are put at the end of the lane
if np.random.rand() < self.p_car:
self.cars[np.random.randint(5)].append(self.road_length)
if np.random.rand() < self.p_cat:
self.cats[np.random.randint(5)].append(self.road_length)
if self.ishuman_n: # building the human policy for "Driving and avoiding" = negative reward if crossing the object
reward = -20 * cat_hit + -1 * car_hit + 0.5 * (action == 0)
elif self.ishuman_p: # building the human policy for "Driving and Rescuing" = positive reward if crossing the object
reward = 20 * cat_hit + -1 * car_hit + 0.5 * (action == 0)
else:
reward = -20 * car_hit + 0.5 * (action == 0) # Classic agent, bigger penalty on the car hitting
self.num_collision += car_hit
self.num_hit_cat += cat_hit
if self.timestamp >= self.sim_len:
self.done = True
self.state_generator()
return self.state, reward, self.done
def log(self):
return self.num_collision, self.num_hit_cat
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__version__ = '1.0.1'
""" Error classes used for database models"""
class UserEmailError(AttributeError):
def __init__(self, email, *args, **kwargs):
self.message = "E-mail: {} is already taken!".format(email)
super().__init__(args, kwargs)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 16-12-4 下午8:52
# @Author : sadscv
# @File : chunkFreatures.py
import nltk
from chunkFreatures import npchunk_features_ultimate as \
npchunk_features
class ConsecutiveNPChunkTagger(nltk.TaggerI):
def __init__(self, train_sents):
"""
:
将tagger用作chunker.因此分类器中所有的tag都是chunk.
:param train_sents:((word,tag),chunk)
"""
train_set = []
#对每个训练集中的句子
for tagged_sent in train_sents:
# param untagged_sent: 生成untag(即unchunk)句子集合。
untagged_sent = nltk.tag.untag(tagged_sent)
#param history: 当前句中第i个词之前的所有tag(chunk)
history = []
# 对于每个句子中的每个词,提取出其特征,并将特征加入train_set。
for i, (word, tag) in enumerate(tagged_sent):
featureset = npchunk_features(untagged_sent, i, history)
train_set.append((featureset, tag))
#history中加入之前的tag(chunk)
history.append(tag)
# 创建最大熵分类器并用train_set训练。
self.classifier = nltk.MaxentClassifier.train(
train_set, algorithm='megam', trace=3)
def tag(self, sentence):
"""
:param sentence: (word,tag)
:return:((word,tag),chunk)
"""
#history为该句中i之前的词的chunk.
history = []
#对每个单词,提取特征,并用分类器分类。将分类逐个加入history中,
#最后将sentence和history zip一下。变为((w,t),c)形式。
for i, word in enumerate(sentence):
featureset = npchunk_features(sentence, i, history)
tag = self.classifier.classify(featureset)
history.append(tag)
return zip(sentence, history)
class ConsecutiveNPChunker(nltk.ChunkParserI):
def __init__(self, train_sents):
"""
:param train_sents:训练集
"""
#将训练集从树转为conll2000格式再转为((word,tag),chunk)格式。
tagged_sents = [[((w,t),c) for (w,t,c) in \
nltk.chunk.tree2conlltags(sent)] for sent in
train_sents]
#实例化一个tagger,并用tagged_sents训练。
self.tagger = ConsecutiveNPChunkTagger(tagged_sents)
def parse(self, sentence):
"""
:param sentence:被用来分类的测试语句。
:return: conll2000格式转变成的tree.
"""
tagged_sents = self.tagger.tag(sentence)
conlltags = [(w,t,c) for ((w,t),c) in tagged_sents]
return nltk.chunk.conlltags2tree(conlltags)
|
from django.db import models
import datetime
class Region(models.Model):
nombre = models.CharField(max_length=100)
def __str__(self):
return self.nombre
#asdasd
class Ciudad(models.Model):
nombre = models.CharField(max_length=100)
region = models.ForeignKey(Region, on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return self.nombre
class Direccion(models.Model):
calle = models.CharField(max_length=100)
numero = models.IntegerField(default=0)
departamento = models.CharField(max_length=10, null=True, blank=True)
ciudad = models.ForeignKey(Ciudad, on_delete=models.CASCADE, blank=True, null=True)
class Lector(models.Model):
nombre = models.CharField(max_length=100)
sexo = models.CharField(max_length=15)
correo = models.EmailField(null=True, blank= True)
direccion = models.ForeignKey(Direccion, on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return self.nombre
class Dealer(models.Model):
nombre = models.CharField(max_length=100)
fono = models.CharField(max_length=13)
correo = models.EmailField(default='')
contrasena = models.CharField(max_length=100)
direccion = models.ForeignKey(Direccion, on_delete=models.CASCADE, null = True, blank=True)
def __str__(self):
return self.nombre
class Pedido(models.Model):
fecha = models.DateTimeField(default=datetime.datetime.now)
total = models.IntegerField(default=0)
estado = models.CharField(max_length=15)
dealer = models.ForeignKey(Dealer, on_delete=models.CASCADE, blank=True, null=True)
class Editorial(models.Model):
nombre = models.CharField(max_length=50)
correo = models.EmailField(default='')
contrasena = models.CharField(max_length=100)
direccion = models.ForeignKey(Direccion, on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return self.nombre
class Autor(models.Model):
nombre = models.CharField(max_length=100)
def __str__(self):
return self.nombre
class Genero(models.Model):
tipo = models.CharField(max_length = 50, unique=True)
ventas = models.IntegerField(default=0)
def __str__(self):
return self.tipo
class Libro(models.Model):
titulo = models.CharField(max_length=100)
ano = models.IntegerField(default=0)
precio = models.IntegerField(default=0)
cantidad = models.IntegerField(default=0)
descripcion = models.TextField(default='')
imagenURL = models.TextField(default='')
fecha = models.DateField(default=datetime.datetime.now)
venta = models.IntegerField(default=0)
editorial = models.ForeignKey(Editorial, on_delete=models.CASCADE, blank=True, null=True)
autor = models.ForeignKey(Autor, on_delete=models.CASCADE, blank=True, null=True)
genero = models.ForeignKey(Genero, on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return self.titulo
class Rating(models.Model):
porcentaje = models.FloatField()
lector = models.ForeignKey(Lector, on_delete=models.CASCADE, blank=True, null=True)
dealer = models.ForeignKey(Dealer, on_delete=models.CASCADE, blank=True, null=True)
class Dealer_Catalogo(models.Model):
dealer = models.ForeignKey(Dealer, on_delete=models.CASCADE, blank=True, null=True)
libro = models.ForeignKey(Libro, on_delete=models.CASCADE, blank=True, null=True)
class Pedido_Libro(models.Model):
pedido = models.ForeignKey(Pedido, on_delete=models.CASCADE, blank=True, null=True)
libro = models.ForeignKey(Libro, on_delete=models.CASCADE, blank=True, null=True)
class Pedido_Lector(models.Model):
pedido = models.ForeignKey(Pedido, on_delete=models.CASCADE, blank=True, null=True)
lector = models.ForeignKey(Lector, on_delete=models.CASCADE, blank=True, null=True)
|
from Character import Character
from data import features
class FighterClass(Character):
def __init__(self, params):
super().__init__(params)
# self.stamina = params['stamina']
# self.combat_skill = params['combat_skill']
def fight_auto_attack(self, enemy):
damage = 0
if self.energy < features["skill"][self.skill_book[0]]["energy_cost"]:
self.use_potion('energypotion')
if self.potions['energypotion'] < 1 and \
self.energy < features["skill"][self.skill_book[0]]["energy_cost"]:
print(f'{self.name} {self.weapon["message"]} {enemy.name}')
damage = 10
if enemy.lives >= damage:
enemy.lives -= damage
else:
enemy.lives = 0
if self.energy >= features["skill"][self.skill_book[0]]["energy_cost"]:
print(f'{self.name} {features["skill"][self.skill_book[0]]["message"] } {enemy.name}')
damage = (features['skill'][self.skill_book[0]]['dmg'] * self.lives) // 100
if enemy.lives >= damage:
enemy.lives -= damage
else:
enemy.lives = 0
self.energy -= features['skill'][self.skill_book[0]]['energy_cost']
return damage
|
#!/usr/bin/python3
import os
import sys
if len(sys.argv) == 1:
print('错误!请传入 xml 文件')
elif len(sys.argv) > 2:
print('错误!传入参数太多')
else:
print('传入的文件是 %s' % sys.argv[1])
with open(sys.argv[1], 'r') as fin:
while True:
linestr = fin.readline()
if linestr == '': #表示文件结束
break
#print(linestr)
#下面开始对本行内容分析
if (('name=' in linestr) or ('name =' in linestr)) and (('project' in linestr) or ('path' in linestr)): #本行内容含有name信息
#print(linestr)
#下面分析本行内容,并提取name
charistr1 = 'name="'
charistr2 = '"'
gitprojstr = linestr[linestr.index(charistr1)+len(charistr1) : linestr.index(charistr1)+len(charistr1)+ linestr[linestr.index(charistr1)+len(charistr1):].index(charistr2)]
#print(gitprojstr)
#下面开始创建git工程
cmd = 'git init --bare %s.git' % gitprojstr
print(cmd)
os.system(cmd)
|
import os
from kivy.app import App
from kivy.uix.label import Label
from kivy.uix.behaviors import ButtonBehavior
from kivy.uix.image import Image
from db.sqlite3_connect import select_data, insert_data
from custom_gestures import gesture_nd as gesture
from utils.common import num_of_word_to_study
class ImageButton(ButtonBehavior, Image):
"""使用图片按钮"""
pass
class InfoPage(gesture.GestureBox):
def __init__(self, **kwargs):
super().__init__(**kwargs)
# 读取下一个单词
self.read_random_word()
self.set_name()
self.review_res = []
def know_button_press(self):
"""Know 按钮被点击时执行该方法"""
# 清除已认识的单词
self.delete_today_word()
self.insert_known_word()
self.read_random_word()
# self.__init__()
def indistinct_button_press(self):
"""Indistinct 按钮被点击时执行该方法"""
# 添加两个旧单词
self.insert_unknown_word()
self.insert_unknown_word()
self.read_random_word()
# self.__init__()
def unknown_button_press(self):
"""Unknown 按钮被点击时执行该方法"""
# 添加三个旧单词
self.insert_unknown_word()
self.insert_unknown_word()
self.insert_unknown_word()
self.read_random_word()
# self.__init__()
def read_random_word(self):
"""随机读取一条数据"""
sql = "SELECT * FROM word WHERE id = (SELECT word_id FROM today ORDER BY RANDOM() limit 1)"
rows = select_data(sql)
if len(rows) == 0:
# 先清除所有widgets
self.ids.main_box.clear_widgets()
# 添加需要显示的widgets
# self.ids.main_box.add_widget(self.ids.personal_box)
word_to_study = num_of_word_to_study()
if word_to_study == 0:
sql = "SELECT word_id FROM known WHERE id NOT IN(SELECT id FROM known WHERE (create_date > date('now','-1 day') AND add_times >= 0) OR (create_date > date('now','-3 day') AND add_times >= 1) OR (create_date > date('now','-7 day') AND add_times >= 2) OR (create_date > date('now','-14 day') AND add_times >= 3) OR (create_date > date('now','-30 day') AND add_times >= 4) OR (create_date > date('now','-60 day') AND add_times >= 5) OR (create_date > date('now','-150 day') AND add_times >= 6) OR (create_date > date('now','-365 day') AND add_times >= 7) OR (create_date > date('now','-770 day') AND add_times >= 8))"
self.review_res = select_data(sql)
if not len(self.review_res) > 0:
self.ids.main_box.add_widget(Label(text='[ref=review word]点击这段文字来开始复习达到遗忘临界点的单词!\n下拉刷新![/ref]',
markup=True, color=(0, 0, 0, 1), halign='center', font_name="site_packages/DroidSansFallback.ttf", on_ref_press=self.review_word))
else:
self.ids.main_box.add_widget(Label(text='今日学习任务完成!', color=(0, 0, 0, 1), font_name="site_packages/DroidSansFallback.ttf"))
else:
self.ids.main_box.add_widget(Label(text='[ref=add word]还需添加 %s 个单词!\n下拉刷新![/ref]' % word_to_study,
markup=True, color=(0, 0, 0, 1), font_name="site_packages/DroidSansFallback.ttf",
on_ref_press=self.press_word_to_study_label))
else:
# 清除存在的三个 按钮和标签
self.ids.main_box.clear_widgets(children=[self.ids.three_labels_box, self.ids.box_button_anchor])
# 绑定点击任意位置执行方法
self.ids.main_box.bind(on_touch_down=self.anywhere_touch_down)
self.ids.word_to_study.text = rows[0][1]
self.ids.phonetic.text = rows[0][3]
def show_detail_word(self):
"""显示被隐藏的部分"""
word = self.ids.word_to_study.text
sql = "SELECT * FROM word WHERE word = '%s'" % word
rows = select_data(sql)
self.ids.main_box.clear_widgets(children=[self.ids.box_button_anchor, self.ids.three_labels_box])
self.ids.main_box.add_widget(self.ids.three_labels_box)
self.ids.main_box.add_widget(self.ids.box_button_anchor)
from utils.common import sub_str_len
self.ids.word_to_study.text = rows[0][1]
self.ids.explain_word.text = sub_str_len(rows[0][2].strip(), 3)
self.ids.phonetic.text = rows[0][3]
self.ids.examples_en.text = rows[0][4]
self.ids.examples_cn.text = rows[0][5]
def insert_unknown_word(self):
"""插入一条数据"""
word = self.ids.word_to_study.text
sql = "INSERT INTO today (word_id) SELECT id FROM word WHERE word = '%s'" % word
insert_data(sql)
def delete_today_word(self):
"""删除一条数据"""
word = self.ids.word_to_study.text
sql = "DELETE FROM today WHERE id = (SELECT id FROM today WHERE word_id = (SELECT id FROM word WHERE word = '%s') LIMIT 1)" % word
insert_data(sql)
def insert_known_word(self):
"""known表添加一条数"""
import datetime
now = datetime.datetime.now().strftime("%Y-%m-%d")
word = self.ids.word_to_study.text
sql = "INSERT INTO known(word_id,create_date) SELECT (SELECT id FROM word WHERE word = '%s'),'%s' WHERE NOT EXISTS(SELECT 1 FROM known WHERE word_id = (SELECT id FROM word WHERE word = '%s'))" % (word, now, word)
print(sql)
insert_data(sql)
def play_word(self):
"""播放MP3文件"""
word = self.ids.word_to_study.text
storage_path = 'mp3/%s.mp3' % word
if os.name == 'nt':
self.window_play_word(storage_path)
elif os.name == 'posix':
self.unix_play_word(storage_path)
@staticmethod
def window_play_word(storage_path):
from kivy.core.audio import SoundLoader
sound = SoundLoader.load(storage_path)
sound.play()
@staticmethod
def unix_play_word(storage_path):
# 安装Pyjnius
from jnius import autoclass
MediaPlayer = autoclass('android.media.MediaPlayer')
player = MediaPlayer()
if player.isPlaying():
player.stop()
player.reset()
try:
player.setDataSource(storage_path)
player.prepare()
player.start()
except:
player.reset()
def review_word(self, a, b):
"""复习单词"""
# sql = "SELECT word_id FROM known WHERE id NOT IN(SELECT id FROM known WHERE (create_date > date('now','-1 day') AND add_times >= 0) OR (create_date > date('now','-3 day') AND add_times >= 1) OR (create_date > date('now','-7 day') AND add_times >= 2) OR (create_date > date('now','-14 day') AND add_times >= 3) OR (create_date > date('now','-30 day') AND add_times >= 4) OR (create_date > date('now','-60 day') AND add_times >= 5) OR (create_date > date('now','-150 day') AND add_times >= 6) OR (create_date > date('now','-365 day') AND add_times >= 7) OR (create_date > date('now','-770 day') AND add_times >= 8))"
# res = select_data(sql)
for word_id_tuple in self.review_res:
word_id = word_id_tuple[0]
# 插入数据并更新word表添加次数
insert_sql = "INSERT INTO today(word_id) SELECT '%d' WHERE NOT EXISTS(SELECT 1 FROM today WHERE word_id = '%d')" % (word_id, word_id)
update_sql = "UPDATE known SET add_times = add_times + 1 WHERE word_id = '%d'" % word_id
insert_data(insert_sql)
insert_data(update_sql)
def refresh_button(self):
"""刷新页面"""
# 先清空所有widgets
self.ids.main_box.clear_widgets()
# 添加需要的显示widgets
# self.ids.main_box.add_widget(self.ids.personal_box)
self.ids.main_box.add_widget(self.ids.word_phonetic_box)
# 随机读取一个单词
self.read_random_word()
def anywhere_touch_down(self, instance, args):
"""单击任意位置都会执行这个方法"""
print("info.py anywhere_touch_down is running")
self.ids.main_box.unbind(on_touch_down=self.anywhere_touch_down)
self.play_word()
self.show_detail_word()
def set_name(self):
"""为name赋值"""
sql = 'SELECT name FROM user WHERE id=1'
res = select_data(sql)
self.ids.slide_name_label.text = res[0][0]
@staticmethod
def press_word_to_study_label(instance, args):
App.get_running_app().screen_manager.transition.direction = 'left'
App.get_running_app().screen_manager.current = 'AddWord'
@staticmethod
def me_slide_button():
App.get_running_app().screen_manager.transition.direction = 'left'
App.get_running_app().screen_manager.current = 'Me'
@staticmethod
def add_word_slide_button():
App.get_running_app().screen_manager.transition.direction = 'left'
App.get_running_app().screen_manager.current = 'AddWord'
@staticmethod
def review_slide_button():
App.get_running_app().screen_manager.transition.direction = 'left'
App.get_running_app().screen_manager.current = 'Review'
@staticmethod
def upload_slide_button():
App.get_running_app().screen_manager.transition.direction = 'left'
App.get_running_app().screen_manager.current = 'Upload'
@staticmethod
def help_slide_button():
App.get_running_app().screen_manager.transition.direction = 'left'
App.get_running_app().screen_manager.current = 'Help'
|
import os
import threading
import socket
import sys
import struct
import time
lock = threading.Lock()
class clientreceiver(threading.Thread):
def __init__(self, hostname, port, clientsocket, packet, seqnum):
threading.Thread.__init__(self)
self.port = port
self.hostname = hostname
self.clientsocket = clientsocket
self.packet = packet
self.sequencenum = seqnum
self.start()
def retransmit(self):
self.clientsocket.sendto(self.packet, (self.hostname, socket.htons(self.port)))
self.run()
def run(self):
try:
self.clientsocket.settimeout(0.2)
ackrxd, serveraddr = self.clientsocket.recvfrom(4096)
sequenceno = struct.unpack("=I", ackrxd[0:4])
sequenceno=int(sequenceno[0])
paddingbits = struct.unpack("=H", ackrxd[4:6])
paddingbits=int(paddingbits[0])
ackidentifier = struct.unpack("=H", ackrxd[6:])
ackidentifier=int(ackidentifier[0])
if paddingbits==0 and ackidentifier==43690:
if self.sequencenum == sequenceno:
#print("ACK received for Packet with SEQ{}".format(self.sequencenum))
if (sequenceno == 4294967295):
sequenceno = 0
else:
self.retransmit()
else:
#print("The packet received is not an ACK Packet")
self.retransmit()
except socket.timeout:
self.retransmit()
class clientsender(threading.Thread):
def __init__(self, hostname, port, filename, MSS, clientsocket):
threading.Thread.__init__(self)
self.port = port
self.hostname = hostname
self.clientsocket = clientsocket
self.filename = filename
self.MSS = MSS
self.start()
def dochecksum(self, filesend):
sumcarrynew = 0
tempdata=0
i=0
n = len(filesend) % 2
for i in range(0, len(filesend)-n, 2):
tempdata += ord(filesend[i]) + (ord(filesend[i + 1]) << 8)
if n:
tempdata+=ord(filesend[i+1])
while tempdata >> 16:
#sumcarry = sumcarry + tempdata
sumcarrynew = (tempdata & 0xffff) + (tempdata >> 16)
break
return ~sumcarrynew & 0xffff
def makepacket(self, filesend, seqnum):
checksum = 0
indicator = 0
data = filesend.encode('ISO-8859-1','ignore')
filesend = data.decode('ISO-8859-1','ignore')
# encoding and packing since python 3.X accepts only byte like objects in sendto()
# = is for native standardized byte ordering
# I = unsigned int(32 bit) H= unsigned short (16 bits)
sequence = struct.pack('=I', seqnum)
checksum = struct.pack('=H', self.dochecksum(filesend))
indicator = struct.pack('=H', 21845)
packet = sequence + checksum + indicator + filesend.encode('ISO-8859-1', 'ignore')
return packet
def rdt_send(self):
file = open(self.filename, 'r')
filebyte = True
addbytes = ""
sequencenum = 0
count = 0
while filebyte:
filebyte = file.read(1)
addbytes += filebyte
if len(addbytes) == self.MSS or (not filebyte):
lock.acquire()
while(len(addbytes)<self.MSS):
addbytes+=" "
packet = self.makepacket(addbytes, sequencenum)
self.clientsocket.sendto(packet, (self.hostname, socket.htons(self.port)))
ackrxvr = clientreceiver(self.hostname, self.port, self.clientsocket, packet, sequencenum)
ackrxvr.join()
lock.release()
if (sequencenum == 4294967295):
sequencenum = 0
else:
sequencenum += 1
addbytes = ""
addbytes = "00000end111111"
lock.acquire()
packet = self.makepacket(addbytes, sequencenum)
self.clientsocket.sendto(packet, (self.hostname, socket.htons(self.port)))
lock.release()
def run(self):
starttime = time.time()
self.rdt_send()
endtime = time.time()
totaltime = endtime - starttime
print("Total time for host %s is %.4f sec" % (self.hostname, totaltime))
time.sleep(10)
def main():
hostrx = sys.argv[1:-3]
port = int(sys.argv[-3])
filename = sys.argv[-2]
MSS = int(sys.argv[-1])
clientsockt = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
clientsockt.bind(('', port))
for i in range(0, len(hostrx)):
filesender = clientsender(hostrx[i], port, filename, MSS, clientsockt)
if __name__=='__main__':
main()
|
from django.conf.urls import url
from ad_hoc_scripts.ad_hoc_scripts import *
from api_views import *
urlpatterns = [
url(r'^getnews', GetNewsRecordView.as_view(),name="get_news_results"),
url(r'^updatenewstactical', UpdateNewsView.as_view(),name="update_news_view"),
url(r'^scorecalcultaions', ScoreCalculationsView.as_view(),name="score_calculations_view"),
url(r'^updateintensitybyexecutive', UpdateIntensityView.as_view(),name="update_intersity_score_view"),
url(r'^getsignalslist', SiganlListView.as_view(),name="signal_list_view"),
url(r'^newscount', StateWiseNewsCount.as_view(),name="news_count_state_wise"),
url(r'^get_news_records_for_tactical', GetNewsRecordsForTactical.as_view(),name="news_records_tactical"),
url(r'^get_news_details_based_on_news_id', GetNewsDetailsBasedOnNewsid.as_view(),name="get_news_details_based_on_news_id"),
url(r'^get_news_based_on_filter', GetNewsBasedOnFilter.as_view(),name="get_news_based_on_filter_view"),
url(r'^update_verify_news', UpdateVerifyNews.as_view(),name="update_news_as_verify"),
url(r'^data_bricks_job_call', DataBrickJobCallView.as_view(),name="data_bricks_job_call_view"),
url(r'^validate_multiple_news', UpdateMultipleNews.as_view(),name="validate_multiple_news_view"),
url(r'^update_state_to_unprocessed', UpdateStateToUnprocessed.as_view(),name="update_state_to_unprocessed_view"),
url(r'^signal_oi_score_calculations', CalculateSignalOiScore.as_view(), name='signal-oi-score-view'),
url(r'^executive_profiles_list', ExecutiveProfileList.as_view(), name='executive-profile-list-view'),
url(r'^iris1_company_list', Iris1MasterCompanyList.as_view(), name='iris1-company-list-view'),
# ad-hoc urls export_records_state_9,
url(r'^convert_state_2_to_9_by_query', ConvertStateTo9ByQuery.as_view(),name="convert_2_state_to_9"),
url(r'^get_records_state_9', GetRecordsState9.as_view(),name="get_records_state_9"),
url(r'^export_records_state_9', ExpoortRecordsofState9.as_view(),name="export_state_9_records"),
url(r'^update_signal_names_by_sheet', UpdateSignalsByCSV.as_view(),name="update_signal_names_by_sheet"),
]
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\FindIDWindow.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_IDFindWindow(object):
def setupUi(self, IDFindWindow):
IDFindWindow.setObjectName("IDFindWindow")
IDFindWindow.resize(387, 411)
self.gridLayout = QtWidgets.QGridLayout(IDFindWindow)
self.gridLayout.setObjectName("gridLayout")
self.listWidgetIDs = QtWidgets.QListWidget(IDFindWindow)
self.listWidgetIDs.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.listWidgetIDs.setTextElideMode(QtCore.Qt.ElideMiddle)
self.listWidgetIDs.setObjectName("listWidgetIDs")
self.gridLayout.addWidget(self.listWidgetIDs, 0, 0, 1, 1)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.pushButtonSelectID = QtWidgets.QPushButton(IDFindWindow)
self.pushButtonSelectID.setObjectName("pushButtonSelectID")
self.horizontalLayout.addWidget(self.pushButtonSelectID)
self.pushButtonDeleteID = QtWidgets.QPushButton(IDFindWindow)
self.pushButtonDeleteID.setObjectName("pushButtonDeleteID")
self.horizontalLayout.addWidget(self.pushButtonDeleteID)
self.pushButtonClose = QtWidgets.QPushButton(IDFindWindow)
self.pushButtonClose.setObjectName("pushButtonClose")
self.horizontalLayout.addWidget(self.pushButtonClose)
self.gridLayout.addLayout(self.horizontalLayout, 1, 0, 1, 1)
self.retranslateUi(IDFindWindow)
QtCore.QMetaObject.connectSlotsByName(IDFindWindow)
def retranslateUi(self, IDFindWindow):
_translate = QtCore.QCoreApplication.translate
IDFindWindow.setWindowTitle(_translate("IDFindWindow", "Stored IDs"))
self.pushButtonSelectID.setText(_translate("IDFindWindow", "선택"))
self.pushButtonDeleteID.setText(_translate("IDFindWindow", "삭제"))
self.pushButtonClose.setText(_translate("IDFindWindow", "닫기"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
IDFindWindow = QtWidgets.QDialog()
ui = Ui_IDFindWindow()
ui.setupUi(IDFindWindow)
IDFindWindow.show()
sys.exit(app.exec_())
|
from tkinter import *
# Initializing root
root = Tk()
myLabel1 = Label(root, text='Write some text here!')
myLabel2 = Label(root, text='Something more...')
myLabel1.pack() # Can use grid(row=0, column=0) instead aswell.
myLabel2.pack()
root.mainloop()
|
import configparser
import io
import json
import os
import re
import threading
import zipfile
from pathlib import Path
from wsgiref.util import FileWrapper
import pymysql
from datetime import datetime
from django.core import serializers
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from django.shortcuts import render
from django.http import HttpResponse, JsonResponse
from DataModel.AnalysisData import AnalysisData
from DataModel.LoadAllData import LoadAllData
from DataModel.LoadIndustryData import LoadIndustryData
from DataModel.LoadSingleData import LoadSingleData
from DataModel.models import LoadDataStatus, AnalysisDataStatus, Role, Resource, User
import django.utils.timezone as timezone
lock = threading.Lock()
globalwkdir=''
globaldatadict={}
globaltablenames=[]
def QueryExportStatus(request):
if globaldatadict.get('cleandata') is None:
return JsonResponse({"result": {"status": '200', 'data': []}})
chooselist = globaldatadict['cleandata']
reschooselist = []
for tmp in chooselist:
reschooselist.append(tmp)
return JsonResponse({"result": {"status": '200', 'data': reschooselist}})
def CheckCleanExist(request):
try:
cleantablename=request.POST['cleantablename']
existflag=False
tablelist = ''
for tmp in globaltablenames:
if tmp==cleantablename:
existflag=True
tablelist=tablelist+tmp+","
if existflag is True:
return JsonResponse({"status": '201', "data": tablelist})
return JsonResponse({"status": '200'})
except Exception as ex:
print("Exception: %s" % ex)
return JsonResponse({"result": {"status": '500', "msg": "内部异常"}})
def ExportData2Tables(requset):
datatablename1=requset.POST['basetable1']
datatablename2 = requset.POST['basetable2']
datatablename3 = requset.POST['basetable3']
industrytablename=requset.POST['industrytable']
cleantable=requset.POST['cleantable']
tabletype='cleandata'
try:
loadDataStatuslist = LoadDataStatus.objects.filter(status='0', type=tabletype)
if len(loadDataStatuslist) > 0:
loadDataStatuslistres = serializers.serialize("json", loadDataStatuslist)
return JsonResponse({"result": {"status": '201', "data": loadDataStatuslistres}})
loaddatastatus = LoadDataStatus(type=tabletype, dirpath=cleantable, status='0',updatetime=timezone.now())
loaddatastatus.save()
LoadAllData(datatablename3, datatablename2, datatablename1,
industrytablename,cleantable,loaddatastatus.id)
print("-------end----------")
return JsonResponse({"result": {"status": '200'}})
except Exception as ex:
print("Exception: %s" % ex)
if loaddatastatus is not None and loaddatastatus.id is not None:
LoadDataStatus.objects.filter(id=loaddatastatus.id).update(status='1',exceptionmsgs=ex,updatetime=timezone.now())
return JsonResponse({"result": {"status": '500', "msg": "内部异常"}})
def testroute(request,m):
print(m)
return render(request, 'runoob.html', {'fuck': m, 'name': 'picture'})
def testupload(request):
print("--------------------------")
foldername=request.POST.get('foldername')
file=request.FILES.get('file')
config = configparser.ConfigParser()
config.read('loadsingledataproperties.conf')
currentdir = os.getcwd()
basedata = config['loaddatadir']['datadir']
currentbasedir = currentdir + "\\" + basedata + "\\" + foldername + "\\"
lock.acquire()
if not os.path.exists(currentbasedir):
os.makedirs(currentbasedir)
print("------------目录创建成功!")
currentfile=currentbasedir+file.name
lock.release()
f = open(currentfile,'wb+')
for chunk in file.chunks():
f.write(chunk)
f.close()
return JsonResponse({"result": 'ok'})
def QueryByData(request):
type=request.POST['type']
tablename=request.POST['tablename']
if globaldatadict.get(type) is None:
loadDataStatus = LoadDataStatus.objects.filter(type=type, status='1')
datadictlist = []
for tmp in loadDataStatus:
datadict = {}
datadict['id'] = tmp.id
createtime = tmp.createtime
datadict['createtime'] = createtime
filename = tmp.dirpath.split("\\")[len(tmp.dirpath.split("\\")) - 2]
datadict['dirpath'] = filename
datadictlist.append(datadict)
globaldatadict[type] = datadictlist
chooselist=globaldatadict[type]
pareent=".*?"+tablename+".*?"
regex_start = re.compile(pareent)
reschooselist=[]
for tmp in chooselist:
reschoosedict={}
if re.match(regex_start, tmp['dirpath']) is not None:
reschoosedict=tmp
reschooselist.append(reschoosedict)
return JsonResponse({"result": {"status": '200','data':reschooselist}})
def QueryData(request):
loadOgrdataDataStatus=LoadDataStatus.objects.filter(type='orgdata',status='1')
if len(loadOgrdataDataStatus)>0:
globaldatadict['orgdata']=[]
loadOgrdataDataStatusjsonstr = serializers.serialize("json", loadOgrdataDataStatus)
loadOgrdataDataStatusjson=json.loads(loadOgrdataDataStatusjsonstr)
orgdatadictlist=[]
for tmp in loadOgrdataDataStatusjson:
orgdatadict = {}
orgdatadict['id']=tmp['pk']
orcreatetime= tmp["fields"]['createtime']
orgdatadict['createtime']=orcreatetime
filename=tmp['fields']['dirpath'].split("\\")[len(tmp['fields']['dirpath'].split("\\"))-2]
orgdatadict['dirpath'] = filename
orgdatadictlist.append(orgdatadict)
globaldatadict['orgdata']=orgdatadictlist
loadIndustryDataStatus = LoadDataStatus.objects.filter(type='industry', status='0')
if len(loadIndustryDataStatus)>0:
globaldatadict['industry'] = []
loadIndustryDataStatusjsonstr = serializers.serialize("json", loadIndustryDataStatus)
loadIndustryDataStatusjson = json.loads(loadIndustryDataStatusjsonstr)
industrydatadictlist = []
for tmp1 in loadIndustryDataStatusjson:
industrydatadict = {}
industrydatadict['id'] = tmp1['pk']
industrydatadict['createtime'] = tmp1["fields"]['createtime']
filename = tmp1['fields']['dirpath'].split("\\")[len(tmp1['fields']['dirpath'].split("\\")) - 2]
industrydatadict['dirpath'] = filename
industrydatadictlist.append(industrydatadict)
globaldatadict['industry'] = industrydatadictlist
loadCleanDataStatus = LoadDataStatus.objects.filter(type='cleandata', status='0')
if len(loadCleanDataStatus)>0:
globaldatadict['cleandata'] = []
loadCleanDataStatusjsonstr = serializers.serialize("json", loadCleanDataStatus)
loadCleanDataStatusjson = json.loads(loadCleanDataStatusjsonstr)
cleandatadictlist = []
for tmp1 in loadCleanDataStatusjson:
cleandatadict = {}
cleandatadict['id'] = tmp1['pk']
cleandatadict['createtime'] = tmp1["fields"]['createtime']
cleandatadict['dirpath'] = filename
cleandatadictlist.append(cleandatadict)
globaldatadict['cleandata'] = cleandatadictlist
print("---------------query tables---------------------")
config = configparser.ConfigParser()
config.read('loadsingledataproperties.conf')
print(config['db']['host'])
print(config['db']['user'])
print(config['db']['database'])
print(config['db']['port'])
print(config['db']['password'])
host = config['db']['host']
user = config['db']['user']
password = config['db']['password']
database = config['db']['database']
port = int(config['db']['port'])
db = pymysql.connect(host=host, user=user,
password=password, database=database, port=port)
tablessql = "show tables;"
cursor = db.cursor()
cursor.execute(tablessql)
tableresults = cursor.fetchall()
i=0
while i<len(tableresults):
globaltablenames.append(tableresults[i][0])
i=i+1
return JsonResponse({"result": {"status": '200'}})
def renderQueryAnalysisStatusHtml(request):
return render(request, 'queryanalysisstatustable.html')
def renderExportLayHtml(request):
return render(request, 'exportlayinput.html')
def renderShowStaticsDetail(request):
return render(request,'showstaticsdetail.html')
def renderBaseHtml(request):
return render(request, 'fileinput.html')
def renderAnalysisHtml(request):
return render(request, 'showstatics.html')
def renderIndustryHtml(request):
return render(request, 'industryinput.html')
def renderQueryHtml(request):
return render(request, 'querystatustable.html')
def renderindexcompare(request):
return render(request, 'showstatics.html')
def renderinsureanalysislayhtml(request):
return render(request, 'insureanalysislay.html')
def QueryBaseDataDir(request):
foldername = request.POST.get('foldername')
config = configparser.ConfigParser()
config.read('loadsingledataproperties.conf')
currentdir = os.getcwd()
basedata = config['loaddatadir']['datadir']
querycurrentbasedir = currentdir + "\\" + basedata + "\\"
try:
for root, dirs, files in os.walk(querycurrentbasedir):
print(root) # 当前目录路径
print(dirs) # 当前路径下所有子目录
print(files) # 当前路径下所有非目录 子文件
break
for dir in dirs:
print("------------------------------------", dir)
if dir == foldername:
return JsonResponse({"result": {"status": '202', "data": dirs}})
return JsonResponse({"result": {"status": '200'}})
except Exception as ex:
print("Exception: %s" % ex)
return JsonResponse({"result": {"status": '500', "msg": "内部异常"}})
def QueryBaseDataStatus(request):
type=request.POST.get('type')
try:
loadDataStatuslist = LoadDataStatus.objects.filter(status='0',type=type)
if len(loadDataStatuslist)>0:
loadDataStatuslistres = serializers.serialize("json", loadDataStatuslist)
return JsonResponse({"result": {"status": '201',"data":loadDataStatuslistres}})
else:
return JsonResponse({"result": {"status": '200'}})
except Exception as ex:
print("Exception: %s "%ex)
return JsonResponse({"result": {"status": '500',"msg":"内部异常"}})
def EditStatus(request):
try:
print("------------edit-------------------")
id=request.POST.get('id')
LoadDataStatus.objects.filter(id=id).update(status='1')
return JsonResponse({"result": {"status": '200'}})
except Exception as ex:
print("Exception-----------------: %s" %ex)
return JsonResponse({"result": {"status": '500'}})
def deleteTable(tablename):
config = configparser.ConfigParser()
config.read('loadsingledataproperties.conf')
print(config['db']['host'])
print(config['db']['user'])
print(config['db']['database'])
print(config['db']['port'])
print(config['db']['password'])
host = config['db']['host']
user = config['db']['user']
password = config['db']['password']
database = config['db']['database']
port = int(config['db']['port'])
db = pymysql.connect(host=host, user=user,
password=password, database=database, port=port)
cur = db.cursor()
droptablesql = "drop table if exists " + tablename + ";"
print("droptablesql:-----------",droptablesql)
cur.execute(droptablesql)
cur.close()
def deleteDir(dirname):
config = configparser.ConfigParser()
config.read('loadsingledataproperties.conf')
currentdir = os.getcwd()
basedata = config['loaddatadir']['datadir']
currentbasedir = currentdir + "\\" + basedata + "\\" + dirname + "\\"
my_file = Path(currentbasedir)
if my_file.is_dir():
for root, dirs, files in os.walk(currentbasedir, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
os.rmdir(currentbasedir)
def DeleteStatus(request):
try:
print("------------delete table-------------------")
tablename=request.POST.get('filename')
deleteTable(tablename)
except Exception as ex:
print("------------delete-------------------")
id = request.POST.get('id')
LoadDataStatus.objects.filter(id=id).update(status='2',exceptionmsgs=ex,updatetime=timezone.now())
print("Exception-----------------: %s" % ex)
return JsonResponse({"result": {"status": '500','msg':"表删除出现异常"}})
try:
print("------------delete firdir-------------------")
deleteDir(tablename)
except Exception as ex:
print("------------delete-------------------")
id = request.POST.get('id')
LoadDataStatus.objects.filter(id=id).update(status='2',exceptionmsgs=ex,updatetime=timezone.now())
print("Exception-----------------: %s" %ex)
return JsonResponse({"result": {"status": '500', 'msg': "文件夹删除出现异常"}})
print("------------delete-------------------")
id = request.POST.get('id')
LoadDataStatus.objects.filter(id=id).update(status='2',updatetime=timezone.now())
return JsonResponse({"result": {"status": '200'}})
def QueryAnysisStatus(request):
print("------------------ query page ---------------------")
page = request.GET.get('page')
pageSize = int(request.GET.get('pageSize'))
cleantablename = request.GET.get('analysistablename')
industry = request.GET.get('industry')
status = request.GET.get('status')
createtime = request.GET.get('createtime')
updatetime = request.GET.get('updatetime')
queryparam={}
if cleantablename is not None and len(cleantablename.strip()) != 0:
queryparam['cleantablename'] = cleantablename
if industry is not None and len(industry.strip()) != 0:
queryparam['industrytablename'] = type
if status is not None and len(status.strip()) != 0:
queryparam['status'] = status
if createtime is not None and len(createtime.strip()) != 0:
createtimedatetime = datetime.strptime(createtime.strip(), '%Y-%m-%d')
createtimeyear = createtimedatetime.date().year
createtimemonth = createtimedatetime.date().month
createtimeday = createtimedatetime.date().day
queryparam['createtime__year'] = createtimeyear
queryparam['createtime__month'] = createtimemonth
queryparam['createtime__day'] = createtimeday
if updatetime is not None and len(updatetime.strip()) != 0:
updatetimedatetime = datetime.strptime(updatetime.strip(), '%Y-%m-%d')
updatetimeyear = updatetimedatetime.date().year
updatetimemonth = updatetimedatetime.date().month
updatetimeday = updatetimedatetime.date().day
queryparam['updatetime__year'] = updatetimeyear
queryparam['updatetime__month'] = updatetimemonth
queryparam['updatetime__day'] = updatetimeday
if queryparam is None or queryparam == {}:
loadDataStatuslist = AnalysisDataStatus.objects.all()
else:
loadDataStatuslist = AnalysisDataStatus.objects.filter(**queryparam)
response = {}
paginator = Paginator(loadDataStatuslist, pageSize)
total = paginator.count
try:
loadDataStatus = paginator.page(page)
rows = []
for tmp in loadDataStatus.object_list:
# print({'id': tmp.id, 'type': tmp.type, 'status': tmp.status,
# 'createtime': tmp.createtime,
# 'updatetime': tmp.updatetime,
# 'dirpath': tmp.dirpath})
rows.append({'id': tmp.id, 'cleantablename': tmp.cleantablename, 'industrytablename': tmp.industrytablename,
'dirpath': tmp.dirpath, 'status': tmp.status, 'createtime': tmp.createtime.strftime('%Y-%m-%d %H:%M:%S'),
'updatetime': tmp.updatetime.strftime('%Y-%m-%d %H:%M:%S'),
'exceptionmsgs': tmp.exceptionmsgs})
print(tmp)
# data = serializers.serialize("json", loadDataStatus.object_list)
# jsonarray=json.loads(data)
# rows=[]
# for tmp in jsonarray:
return JsonResponse({'total': total, 'rows': rows})
except PageNotAnInteger as ex:
print("PageNotAnInteger------------: %s " % ex)
return JsonResponse(response)
except EmptyPage as ex:
print("EmptyPage-----------------: %s" % ex)
return JsonResponse(response)
def QueryStatusPage(request):
print("------------------ query page ---------------------")
config = configparser.ConfigParser()
config.read('loadsingledataproperties.conf')
currentdir = os.getcwd()
basedata = config['loaddatadir']['datadir']
page = request.GET.get('page')
pageSize = int(request.GET.get('pageSize'))
dirpath = request.GET.get('dirpath')
type = request.GET.get('type')
status = request.GET.get('status')
createtime = request.GET.get('createtime')
updatetime = request.GET.get('updatetime')
queryparam={}
if dirpath is not None and len(dirpath.strip())!=0:
queryparam['dirpath'] = currentdir + "\\" + basedata + "\\" + dirpath + "\\"
if type is not None and len(type.strip())!=0:
queryparam['type'] = type
if status is not None and len(status.strip())!=0:
queryparam['status'] = status
if createtime is not None and len(createtime.strip())!=0:
createtimedatetime=datetime.strptime(createtime.strip(),'%Y-%m-%d')
createtimeyear=createtimedatetime.date().year
createtimemonth = createtimedatetime.date().month
createtimeday = createtimedatetime.date().day
queryparam['createtime__year'] = createtimeyear
queryparam['createtime__month'] = createtimemonth
queryparam['createtime__day'] = createtimeday
if updatetime is not None and len(updatetime.strip())!=0:
updatetimedatetime = datetime.strptime(updatetime.strip(), '%Y-%m-%d')
updatetimeyear = updatetimedatetime.date().year
updatetimemonth = updatetimedatetime.date().month
updatetimeday = updatetimedatetime.date().day
queryparam['updatetime__year'] = updatetimeyear
queryparam['updatetime__month'] = updatetimemonth
queryparam['updatetime__day'] = updatetimeday
if queryparam is None or queryparam=={}:
loadDataStatuslist = LoadDataStatus.objects.all()
else:
loadDataStatuslist = LoadDataStatus.objects.filter(**queryparam)
response = {}
paginator = Paginator(loadDataStatuslist, pageSize)
total = paginator.count
try:
loadDataStatus = paginator.page(page)
rows = []
for tmp in loadDataStatus.object_list:
# print({'id': tmp.id, 'type': tmp.type, 'status': tmp.status,
# 'createtime': tmp.createtime,
# 'updatetime': tmp.updatetime,
# 'dirpath': tmp.dirpath})
rows.append({'id': tmp.id, 'type': tmp.type, 'status': tmp.status,
'createtime': tmp.createtime.strftime('%Y-%m-%d %H:%M:%S'),
'updatetime': tmp.updatetime.strftime('%Y-%m-%d %H:%M:%S'),
'dirpath': tmp.dirpath})
print(tmp)
# data = serializers.serialize("json", loadDataStatus.object_list)
# jsonarray=json.loads(data)
# rows=[]
# for tmp in jsonarray:
return JsonResponse({'total': total, 'rows': rows})
except PageNotAnInteger as ex:
print("PageNotAnInteger------------: %s " % ex)
return JsonResponse(response)
except EmptyPage as ex:
print("EmptyPage-----------------: %s" % ex)
return JsonResponse(response)
def CreateTableAndInsertIndustry(request):
print("------------------------------industrydata--------------------------")
print(request)
foldername = request.POST.get('foldername')
tablename = request.POST.get('tablename')
tabletype = request.POST.get('tabletype')
filename = request.POST.get('filename')
config = configparser.ConfigParser()
config.read('loadsingledataproperties.conf')
currentdir = os.getcwd()
basedata = config['loaddatadir']['datadir']
loaddatastatus = None
try:
# status:0--进行中 1--完成 2--删除
loadDataStatuslist = LoadDataStatus.objects.filter(status='0', type=tabletype)
if len(loadDataStatuslist) > 0:
loadDataStatuslistres = serializers.serialize("json", loadDataStatuslist)
return JsonResponse({"result": {"status": '201', "data": loadDataStatuslistres}})
currentbasedir = currentdir + "\\" + basedata + "\\" + foldername + "\\"
loaddatastatus = LoadDataStatus(type=tabletype, dirpath=currentbasedir, status='0')
loaddatastatus.save()
print("loaddatastatus save--------------------------")
LoadIndustryData(currentbasedir, tablename, filename)
LoadDataStatus.objects.filter(id=loaddatastatus.id).update(status='1', updatetime=timezone.now())
return JsonResponse({"result": {"status": '200'}})
except Exception as ex:
print("Exception: %s" % ex)
if loaddatastatus is not None and loaddatastatus.id is not None:
LoadDataStatus.objects.filter(id=loaddatastatus.id).update(status='1', exceptionmsgs=ex,
updatetime=timezone.now())
return JsonResponse({"result": {"status": '500', "msg": "内部异常"}})
def CreateTableAndInsertBaseData(request):
print("------------------------------basedata--------------------------")
print(request)
foldername = request.POST.get('foldername')
tablename = request.POST.get('tablename')
tabletype = request.POST.get('tabletype')
filename = request.POST.get('filename')
config = configparser.ConfigParser()
config.read('loadsingledataproperties.conf')
currentdir = os.getcwd()
basedata = config['loaddatadir']['datadir']
loaddatastatus=None
try:
# for root, dirs, files in os.walk(querycurrentbasedir):
# print(root) # 当前目录路径
# print(dirs) # 当前路径下所有子目录
# print(files) # 当前路径下所有非目录 子文件
# break
# for dir in dirs:
# print("------------------------------------", dir)
# if dir == foldername:
# return JsonResponse({"result": {"status": '202', "data": dirs}})
globalwkdir = currentdir + "\\" + basedata + "\\"
#查询表名是否存在
# if tabletype=='orgdata':
# 查询是否存在未完成任务
loadDataStatuslist=LoadDataStatus.objects.filter(status='0',type=tabletype)
if len(loadDataStatuslist)>0:
loadDataStatuslistres = serializers.serialize("json", loadDataStatuslist)
return JsonResponse({"result": {"status": '201', "data": loadDataStatuslistres}})
currentbasedir = currentdir + "\\" + basedata + "\\" + foldername + "\\"
loaddatastatus=LoadDataStatus(type=tabletype,dirpath=currentbasedir,status='0')
loaddatastatus.save()
LoadSingleData(currentbasedir,tablename,loaddatastatus.id)
print("-------end----------")
# if tabletype=='industry':
# #status:0--进行中 1--完成 2--删除
# loadDataStatuslist = LoadDataStatus.objects.filter(status='0', type=tabletype)
# if len(loadDataStatuslist) > 0:
# loadDataStatuslistres = serializers.serialize("json", loadDataStatuslist)
# return JsonResponse({"result": {"status": '201', "data": loadDataStatuslistres}})
# currentbasedir = currentdir + "\\" + basedata + "\\" + foldername + "\\"
# loaddatastatus = LoadDataStatus(type=tabletype, dirpath=currentbasedir, status='0')
# loaddatastatus.save()
# print("loaddatastatus save--------------------------")
# LoadIndustryData(currentbasedir,tablename,filename)
# LoadDataStatus.objects.filter(id=loaddatastatus.id).update(status='1',updatetime=timezone.now())
return JsonResponse({"result": {"status":'200'}})
except Exception as ex:
print("Exception: %s" % ex)
if loaddatastatus is not None and loaddatastatus.id is not None:
LoadDataStatus.objects.filter(id=loaddatastatus.id).update(status='1',exceptionmsgs=ex,updatetime=timezone.now())
return JsonResponse({"result": {"status": '500', "msg": "内部异常"}})
def AnalysisProc(cleantablename,industrytable,currentdir):
try:
# todo:查询表是否存在,有返回,无创建文件夹,分析生成数据
analysisDataStatus = AnalysisDataStatus.objects.filter(status='0', cleantablename=cleantablename,
industrytablename=industrytable)
if len(analysisDataStatus) > 0:
return JsonResponse({"result": {"status": '500', "msg": "存在相同的量两个表数据在分析"}})
analysisDataStatus = AnalysisDataStatus.objects.filter(status='1', cleantablename=cleantablename,
industrytablename=industrytable)
if len(analysisDataStatus) > 0:
return JsonResponse({"result": {"status": '500', "msg": "已经存在两个表数据分析结果"}})
# todo:插入状态
lock.acquire()
if not os.path.exists(currentdir):
os.makedirs(currentdir)
print("------------目录创建成功!")
lock.release()
analysisDataStatus = AnalysisDataStatus(cleantablename=cleantablename, industrytablename=industrytable,
dirpath=(cleantablename + industrytable), status='0')
analysisDataStatus.save()
# 分析
AnalysisData((cleantablename + industrytable), currentdir, cleantablename, industrytable, analysisDataStatus.id)
# todo:插入完成状态
AnalysisDataStatus.objects.filter(id=analysisDataStatus.id).update(status='1', updatetime=timezone.now())
return JsonResponse({"result": {"status": '200'}})
except Exception as ex:
print("Exception: %s" % ex)
if analysisDataStatus is not None and analysisDataStatus.id is not None:
AnalysisDataStatus.objects.filter(id=analysisDataStatus.id).update(status='2', exceptionmsgs=ex,
updatetime=timezone.now())
return JsonResponse({"result": {"status": '500', "msg": "内部异常"}})
def AnysisData(request):
cleantablename=request.POST['cleantablename']
industrytable = request.POST['industrytable']
taskid=request.POST['taskid']
config = configparser.ConfigParser()
config.read('loadsingledataproperties.conf')
currentdir = os.getcwd()
basedata = config['analysisdir']['datadir']
currentdir = currentdir + "\\" + basedata + "\\" + cleantablename + industrytable + "\\"
return AnalysisProc(cleantablename,industrytable,currentdir)
def DownLoadAnalysisFile(request):
cleantablename = request.GET['cleantablename']
industrytable = request.GET['industrytable']
dirpath = request.GET['dirpath']
config = configparser.ConfigParser()
config.read('loadsingledataproperties.conf')
currentdir = os.getcwd()
basedata = config['analysisdir']['datadir']
currentdir = currentdir + "\\" + basedata + "\\" + dirpath + "\\"
if os.path.exists(currentdir):
# 创建BytesIO
s = io.BytesIO()
zip = zipfile.ZipFile(s, 'w')
for root, dirs, files in os.walk(currentdir):
print(root) # 当前目录路径
print(dirs) # 当前路径下所有子目录
print(files) # 当前路径下所有非目录 子文件
for file in files:
tmpexcelpath = currentdir + file
print("------------------------------------tmpexcelpath:", tmpexcelpath)
zip.write(tmpexcelpath, file)
# 关闭文件
zip.close()
s.seek(0)
# 用FileWrapper类来迭代器化一下文件对象,实例化出一个经过更适合大文件下载场景的文件对象,实现原理相当与把内容一点点从文件中读取,放到内存,下载下来,直到完成整个下载过程。这样内存就不会担心你一下子占用它那么多空间了。
wrapper = FileWrapper(s)
response = HttpResponse(wrapper)
response['Content-Type'] = 'application/octet-stream'
response['Content-Disposition'] = 'attachment; filename={}.zip'.format(datetime.now().strftime("%Y-%m-%d"))
# 指针回到初始位置,没有这一句前端得到的zip文件会损坏
return response
def ShowAllData(request):
try:
cleantablename = request.POST['cleantablename']
industrytable = request.POST['industrytable']
dirpath = request.POST['dirpath']
querydata=AnalysisDataStatus.objects.filter(cleantablename=cleantablename,
industrytablename=industrytable,status=1).first()
resultdict={}
#高价值行业
mvi= json.loads(querydata.mvimsg)
mvidict=[]
mvilegendlist = []
for k,v in mvi.items():
mvilegendlist.append(k)
tmpdict={}
tmpdict['name'] = k
tmpdict['value'] = v
mvidict.append(tmpdict)
resultdict['mvi']=mvidict
resultdict['mvilegendlist'] = mvilegendlist
#高渗透行业
mgi = json.loads(querydata.mgimsg)
mgidict = []
mgilegendlist = []
for k1,v1 in mgi.items():
mgilegendlist.append(k1)
tmpdict1={}
tmpdict1['name'] = k1
tmpdict1['value'] = v1
mgidict.append(tmpdict1)
resultdict['mgi'] = mgidict
resultdict['mgilegendlist'] = mvilegendlist
prolist=[]
mvp = json.loads(querydata.mvpmsg)
for k2,v2 in mvp.items():
prolist.append(k2)
resultdict['prolist'] = prolist
print(resultdict)
return JsonResponse({"result": {"status": '200', "data": resultdict}})
except Exception as ex:
print("Exception: %s" % ex)
return JsonResponse({"result": {"status": '500', "msg": "内部异常"}})
def ShowDetailData(request):
try:
cleantablename = request.POST['cleantablename']
industrytable = request.POST['industrytable']
dirpath = request.POST['dirpath']
proname = request.POST['proname']
querydata=AnalysisDataStatus.objects.filter(cleantablename=cleantablename,
industrytablename=industrytable,status=1).first()
resultdict={}
#产品高价值行业
mvp= json.loads(querydata.mvpmsg)
mvpdict=[]
mvplegendlist = []
pos=0
for k,v in mvp.get(proname).items():
if pos==0:
pos=pos+1
continue
mvplegendlist.append(k)
tmpdict={}
tmpdict['name'] = k
tmpdict['value'] = v
mvpdict.append(tmpdict)
resultdict['mvp']=mvpdict
resultdict['mvplegendlist'] = mvplegendlist
#产品高渗透行业
mgp = json.loads(querydata.mgpmsg)
mgpdict = []
mgplegendlist = []
pos1 = 0
for k1,v1 in mgp.get(proname).items():
if pos1==0:
pos1 = pos1 + 1
continue
mgplegendlist.append(k1)
tmpdict1={}
tmpdict1['name'] = k1
tmpdict1['value'] = v1
mgpdict.append(tmpdict1)
resultdict['mgp'] = mgpdict
resultdict['mgplegendlist'] = mgplegendlist
print(resultdict)
return JsonResponse({"result": {"status": '200', "data": resultdict}})
except Exception as ex:
print("Exception: %s" % ex)
return JsonResponse({"result": {"status": '500', "msg": "内部异常"}})
def GetMenu(request):
username=request.POST['username']
usermobile=request.POST['usermobile']
try:
userinfo=User.objects.filter(usermobile=usermobile).first()
rolelist = Role.objects.filter(usermobile=usermobile).all()
resourcedict = {}
for tmprole in rolelist:
resource = Resource.objects.filter(roleid=tmprole.roleid).all()
for tmpresource in resource:
if resourcedict.get(tmpresource.resourceparentid) is None:
tmplist = []
tmpresourcedict = {}
tmpresourcedict['id'] = tmpresource.resourceid
tmpresourcedict['name'] = tmpresource.resourcename
tmpresourcedict['url'] = tmpresource.resourceurl
tmplist.append(tmpresourcedict)
resourcedict[tmpresource.resourceparentid] = tmplist
else:
tmpresourcedict = {}
tmpresourcedict['id'] = tmpresource.resourceid
tmpresourcedict['name'] = tmpresource.resourcename
tmpresourcedict['url'] = tmpresource.resourceurl
tmpresultlist = resourcedict.get(tmpresource.resourceparentid)
flag = False
for tmpdict in tmpresultlist:
if tmpdict.get('id') == tmpresource.resourceid:
flag = True
break
if flag is True:
continue
tmpresultlist.append(tmpresourcedict)
resultlist = []
for k, v in resourcedict.items():
tmpresultdict = {}
tmpparentresource = Resource.objects.filter(resourceparentid=k).first()
tmpresultdict['id'] = tmpparentresource.resourceparentid
tmpresultdict['name'] = tmpparentresource.resourceparentname
tmpresultdict['content'] = v
resultlist.append(tmpresultdict)
return JsonResponse({"result": {"status": '200', "data": resultlist,'username':userinfo.realname}})
except Exception as ex:
print("Exception: %s" % ex)
return JsonResponse({"result": {"status": '500', "msg": "内部异常"}})
|
from django.shortcuts import render, reverse, HttpResponseRedirect
from .forms import CustomUserCreationForm
from django.contrib.auth import authenticate, login, logout
def sign_up(request):
"""
Task:
- if this is a POST request we need to process the form data
- create a form instance and populate it with data from the request
- check whether it's valid
- if a GET (or any other method) we'll create a blank form
"""
if request.method == 'POST':
form = CustomUserCreationForm(request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse('users:login'))
else:
form = CustomUserCreationForm()
return render(request, 'users/signup.html', {'form': form})
def login_user(request):
if request.method == "POST":
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
# Redirect to a success page.
return HttpResponseRedirect(reverse('movies:home'))
# Return an 'invalid login' error message.
return render(request, 'users/login.html')
def logout_user(request):
logout(request)
# Redirect to a success page.
return render(request, 'users/logout.html')
|
from odoo import models, fields
class SaleReport(models.Model):
_inherit = 'sale.report'
contract_id = fields.Many2one('sale.contract', 'Contract', readonly=True)
def _query(self, with_clause='', fields={}, groupby='', from_clause=''):
fields['contract_id'] = ", s.contract_id AS contract_id"
groupby += ', s.contract_id'
return super(SaleReport, self)._query(with_clause, fields, groupby, from_clause)
|
""" Harry Potter Sorting Hat Quiz by Ryan Smith.
This project is for my class COP 1500 Intro to Computer Science.
The purpose of this project is to have a demonstration of the knowledge
I accumulated during this semester.
Sources: Descriptions of each House Source:
https://harrypotter.fandom.com/wiki/Hogwarts_Houses
Quiz questions: Taken from the official Harry Potter Sorting Hat
Quiz on Pottermore.com (Site now down)
__author__ = Ryan Smith """
# Cite: http://www.newthinktank.com/2016/07/learn-program-9/
# # Taught me how to use classes
class House:
"""
This class creates each house the user can be sorted into
Attributes:
name (str): house name
score (int): user's score
descript (lis): description of the house
percent (float): user's percentage of answers corresponding with
this house
"""
def __init__(self, name="", score=0, descript=None, percent=0.0):
"""
The constructor for the class House
:param name: house name
:param score: user's score
:param descript: description of the house
:param percent: user's percentage of answers corresponding with
this house
"""
if descript is None:
descript = []
self.name = name
self.score = score
self.descript = descript
self.percent = percent
# Cite: http://www.newthinktank.com/2016/07/learn-program-5/
# This tutorial taught me how to use functions in python
@staticmethod
def run_quiz():
"""
Asks the quiz questions and interpret user input
:return: integer associated with a specific house
"""
# Cite: http://www.newthinktank.com/2016/06/learn-program-3/ # This
# tutorial taught me how to use a while loop to check for proper input
while True: # Loop checking for valid input
try:
user_input = input()
if len(user_input) != 1:
print("Invalid Input. Try Again.")
continue
elif ('a' <= user_input <= 'd') or ('A' <= user_input <= 'D'):
if user_input in ('A', 'a'):
return 1
elif user_input in ('B', 'b'):
return 2
elif user_input in ('C', 'c'):
return 3
else:
return 4
user_input = int(user_input)
if user_input > 4 or user_input < 1:
print("Invalid Input. Try Again.")
continue
return user_input
except ValueError:
print("Invalid Input. Try Again.")
@staticmethod
def do_sorting(gryffindor_score, slytherin_score, ravenclaw_score,
hufflepuff_score):
"""
Sort user into correct house
:param gryffindor_score: # of time user answered gryffindor
:param slytherin_score: # of time user answered slytherin
:param ravenclaw_score: # of time user answered ravenclaw
:param hufflepuff_score: # of time user answered hufflepuff
:return: result of quiz
"""
house = House()
unsorted_results = [gryffindor_score, slytherin_score, ravenclaw_score,
hufflepuff_score]
sorted_results = sorted(unsorted_results, reverse=True)
while True:
if sorted_results[0] != sorted_results[1]:
if unsorted_results[0] == sorted_results[0]:
return 0
elif unsorted_results[1] == sorted_results[0]:
return 1
elif unsorted_results[2] == sorted_results[0]:
return 2
else:
return 3
else:
print("You tied two for two or more houses. Choose Which "
"House you wish to belong to! "
"\n1. Gryffindor\n2. Slytherin\n3. Ravenclaw\n4. "
"Hufflepuff")
print("Gryffindor: {:.2f}%".format
(find_percentage(gryffindor_score, slytherin_score,
ravenclaw_score, hufflepuff_score)))
print("Slytherin: {:.2f}%".format
(find_percentage(slytherin_score, gryffindor_score,
ravenclaw_score, hufflepuff_score)))
print("Ravenclaw: {:.2f}%".format
(find_percentage(ravenclaw_score, slytherin_score,
gryffindor_score, hufflepuff_score)))
print("Hufflepuff: {:.2f}%".format
(find_percentage(hufflepuff_score, slytherin_score,
ravenclaw_score, gryffindor_score)))
tiebreaker_input = house.run_quiz()
if tiebreaker_input == 1:
return 5
elif tiebreaker_input == 2:
return 6
elif tiebreaker_input == 3:
return 7
else:
return 8
def find_percentage(house_1, house_2, house_3, house_4):
"""
Calculates each house's percentage.
:param house_1: The house whose percentage is being calculated
:param house_2: used to calculate percentage
:param house_3: used to calculate percentage
:param house_4: used to calculate percentage
:return: house_1's percentage
"""
return (house_1 / (house_1 + house_2 + house_3 + house_4)) * 100
def main():
"""
The main function that runs the program
"""
# Takes questions from external .txt file and stores it in a list
# Cite:https://qiita.com/visualskyrim/items/1922429a07ca5f974467
# That url showed me how to import the .txt file without \n at the end
# of each line
quiz_questions = [line.rstrip('\n') for line in
open("HP_Sort_Questions.txt")]
# The house descriptions were take from
# https://harrypotter.fandom.com/wiki/Hogwarts_Houses
house_descriptions = [line.rstrip('\n') for line in open("House_Desc.txt")]
# Contains Intro and Instructions
intro = [line.rstrip('\n') for line in open("Intro_Instructions.txt")]
# Create each house
gryffindor = House("Gryffindor", descript=house_descriptions[0:8])
slytherin = House("Slytherin", descript=house_descriptions[9:17])
ravenclaw = House("Ravenclaw", descript=house_descriptions[19:26])
hufflepuff = House("Hufflepuff", descript=house_descriptions[28:38])
house = House()
sort_loop = True
for i in intro: # prints intro
print(i)
print() # Formatting
for i in range(0, len(quiz_questions)): # Prints out the question and
# answer options before
print(quiz_questions[i])
if i % 5 == 0 and i != 0:
answer = house.run_quiz()
if answer == 1:
gryffindor.score += 1
elif answer == 2:
slytherin.score += 1
elif answer == 3:
ravenclaw.score += 1
else:
hufflepuff.score += 1
print()
while sort_loop:
sort_results = house.do_sorting(gryffindor.score, slytherin.score,
ravenclaw.score, hufflepuff.score)
if sort_results > 4: # If it was tied this adds to the house's score
if sort_results == 5:
gryffindor.score += 1
elif sort_results == 6:
slytherin.score += 1
elif sort_results == 7:
ravenclaw.score += 1
else:
hufflepuff.score += 1
else:
if sort_results == 0:
print("You were sorted into {}!".format(gryffindor.name), "\n")
print(*gryffindor.descript, sep="\n") # * operator prints
sort_loop = False
elif sort_results == 1:
print("You were sorted into {}!".format(slytherin.name), "\n")
print(*slytherin.descript, sep="\n")
sort_loop = False
elif sort_results == 2:
print("You were sorted into {}!".format(ravenclaw.name), "\n")
print(*ravenclaw.descript, sep="\n")
sort_loop = False
else:
print("You were sorted into {}!".format(hufflepuff.name), "\n")
print(*hufflepuff.descript, sep="\n")
sort_loop = False
gryffindor.percent = find_percentage(gryffindor.score, slytherin.score,
ravenclaw.score, hufflepuff.score)
slytherin.percent = find_percentage(slytherin.score, gryffindor.score,
ravenclaw.score, hufflepuff.score)
ravenclaw.percent = find_percentage(ravenclaw.score, slytherin.score,
gryffindor.score, hufflepuff.score)
hufflepuff.percent = find_percentage(hufflepuff.score, slytherin.score,
ravenclaw.score, gryffindor.score)
print("\nFinal Results:")
print(gryffindor.name, "{:.2f}%".format(gryffindor.percent))
print(slytherin.name, "{:.2f}%".format(slytherin.percent))
print(ravenclaw.name, "{:.2f}%".format(ravenclaw.percent))
print(hufflepuff.name, "{:.2f}%".format(hufflepuff.percent))
main()
|
class Solution:
def maximumSafenessFactor(self, grid: List[List[int]]) -> int:
dist = [[float('inf')]*len(grid[0]) for _ in range(len(grid))]
q = collections.deque()
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == 1:
q.append((i, j, 0))
dist[i][j] = 0
while q:
i, j, d = q.popleft()
for di, dj in [(0,1),(0,-1),(1,0),(-1,0)]:
ni, nj = i+di, j+dj
if 0 <= ni < len(grid) and 0 <= nj < len(grid[0]) and dist[ni][nj] > dist[i][j] + 1:
dist[ni][nj] = dist[i][j] + 1
q.append((ni, nj, dist[ni][nj]))
def good(target):
q = deque()
visited = [[0]*len(grid[0]) for _ in range(len(grid))]
if dist[0][0] >= target:
q.append((0,0))
visited[0][0] = 1
while q:
i, j = q.popleft()
if (i,j) == (len(grid)-1,len(grid[0])-1):
return True
for di, dj in [(0,1),(0,-1),(1,0),(-1,0)]:
ni, nj = i+di, j+dj
if 0 <= ni < len(grid) and 0 <= nj < len(grid[0]) and not visited[ni][nj] and dist[ni][nj] >= target:
visited[ni][nj] = 1
q.append((ni, nj))
return False
l, r = 0, len(grid)
while l <= r:
mid = l + (r-l)//2
if good(mid):
l = mid + 1
else:
r = mid - 1
return r
|
import json
import sys
from devtools import debug
def handle(event, context) -> dict:
"""
Escalates or de-escalates depending on the incoming event type.
For more details on the event object format, refer to our reporting docs:
https://docs.symops.com/docs/reporting
"""
print("Got event:")
debug(event)
try:
username = resolve_user(event)
message = update_user(username, event)
return {"body": {"message": message}, "errors": []}
except Exception as e:
return {"body": {}, "errors": [str(e)]}
def resolve_user(event) -> str:
"""
Placeholder to take the requesting user and resolve to the right
user id for the system you're escalating the user in.
"""
return event["run"]["actors"]["request"]["username"]
def update_user(username, event) -> str:
"""
Placeholder to handle updating the given user based on the event type
"""
event_type = event["event"]["type"]
if event_type == "escalate":
message = f"Escalating user: {username}"
elif event_type == "deescalate":
message = f"Deescalating user: {username}"
else:
raise RuntimeError(f"Unsupported event type: {event_type}")
return message
def resolve_local_json(arg) -> str:
"""Find the right test json file based on the arg"""
if arg == "-d":
file = "deescalate.json"
elif arg == "-e":
file = "escalate.json"
else:
raise RuntimeError(f"Specify either -e or -d, you supplied: {arg}")
return f"../test/{file}"
def run_local() -> dict:
"""
This lets you test your function code locally, with an escalate or
deescalate payload (in the ../test) directory.
$ python handler.py [-e | -d]
"""
arg = None if len(sys.argv) < 2 else sys.argv[1]
path = resolve_local_json(arg)
with open(path, "r") as payload:
event = json.load(payload)
return handle(event, {})
if __name__ == "__main__":
result = run_local()
debug(result)
|
from django.shortcuts import render, redirect
from django.contrib import messages
from django.core.urlresolvers import reverse
from .forms import RegisterForm, LoginForm
from .models import *
from ..home.models import *
import stripe, datetime
# Create your views here.
def index(request):
if 'user' in request.session:
return redirect(reverse('home:dashboard'))
context = {
'register': RegisterForm(),
'login': LoginForm()
}
return render(request, 'users/index.html', context)
def register(request):
user = None
if request.method == "POST":
if 'agreement' not in request.POST:
messages.error(request, "You must agree to the Terms of Service")
return redirect(reverse('users:index'))
else:
form = RegisterForm(request.POST)
if form.is_valid():
print form.cleaned_data['stripe_token']
customer = stripe.Customer.create(
description = form.cleaned_data['email'],
email = form.cleaned_data['email'],
card = form.cleaned_data['stripe_token'],
)
user = User(
first_name = form.cleaned_data['first_name'],
last_name = form.cleaned_data['last_name'],
email = form.cleaned_data['email'],
last_4_digits = form.cleaned_data['last_4_digits'],
stripe_id = customer.id
)
user.set_password(form.cleaned_data['password1'])
try:
user.save()
except IntegrityError:
form.addError(user.email + ' is already a member')
else:
request.session['user'] = {
'id': user.id,
'first_name': user.first_name
}
messages.success(request, "Thanks for registering!")
return redirect(reverse('home:dashboard'))
for error in form.errors:
messages.error(request, form.errors[error])
return redirect(reverse('users:index'))
def login(request):
if request.method == "POST":
response = User.objects.login(request.POST)
if type(response) is list:
for error in response:
messages.error(request, error)
return redirect(reverse('users:index'))
else:
request.session['user'] = {
'id': response.id,
'first_name': response.first_name
}
return redirect(reverse('home:dashboard'))
def logout(request):
if 'user' in request.session:
request.session.pop('user')
return redirect(reverse('home:index'))
def show_user(request, user_id):
if 'user' not in request.session:
return redirect(reverse('users:index'))
user=User.objects.get(id=request.session['user']['id'])
print user.admin
if int(request.session['user']['id']) != int(user_id) and user.admin != True:
messages.error(request, "Cannot access another user's information")
return redirect(reverse('home:dashboard'))
today = datetime.datetime.now().date()
user_orders = Meal.objects.filter(meal_orders__user__id=user_id).order_by('-live_date')
past_orders = Meal.objects.filter(meal_orders__user__id=user_id, live_date__lt=today)
context = {
'user':user,
'current_orders': user_orders.filter(live_date__gte=today),
'past_orders': [{'meal': meal, 'rating': meal.meal_ratings.filter(user__id=user_id)} for meal in past_orders]
}
return render(request, 'users/show.html', context)
def show_order(request, meal_id):
if 'user' not in request.session:
return redirect(reverse('users:index'))
this_meal = Meal.objects.get(id=meal_id)
this_meal_users = User.objects.filter(user_orders__meal=this_meal)
for user in this_meal_users:
if request.session['user']['id'] == user.id:
this_meal_user = user
specific_messages = Message.objects.filter(user=this_meal_user) | Message.objects.filter(user=24)
this_meal_messages = specific_messages.filter(meal=this_meal).order_by('-created_at')
elif request.session['user']['id'] == 24:
this_meal_user=User.objects.get(id=24)
this_meal_messages = Message.objects.filter(meal=this_meal).order_by('-created_at')
if int(request.session['user']['id']) != int(this_meal_user.id):
messages.error(request, "Cannot access another user's information")
print int(request.session['user']['id']) == int(user_id)
return redirect(reverse('users:show_user',kwargs={'user_id':request.session['user']['id']}))
context={
'user': this_meal_user,
'meal': this_meal,
'messages': this_meal_messages
}
return render(request, 'users/order.html', context)
def edit_card(request):
if 'user' not in request.session:
return redirect(reverse('users:index'))
if request.method == "POST":
customer = User.objects.get(id=request.session['user']['id'])
customer.card = request.POST['stripe_token']
customer.save()
return redirect(reverse('home:dashboard'))
def create_feedback(request):
if request.method == "POST":
message = request.POST['message']
meal_id = request.POST['meal_id']
try:
this_meal = Meal.objects.get(id=meal_id)
this_user = User.objects.get(id=request.session['user']['id'])
this_order = Meal_Order.objects.filter(meal=this_meal,user=this_user)
for order in this_order.all():
this_order = order
print this_order.id
Message.objects.create(user=this_user,message=message,meal=this_meal)
messages.success(request, "Thank you for your feedback!")
except:
messages.error(request, "Cannot have feedback for this meal")
return redirect(reverse('users:show_order',kwargs={'meal_id':this_meal.id}))
|
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the repeatedString function below.
def repeatedString(s, n):
qntd = int(n/len(s)) * s.count("a")
resto = n % len(s)
qntd += s[0:resto].count("a")
return qntd
if __name__ == '__main__':
s = "aba"
n = 10
result = repeatedString(s, n)
print(result)
|
value = raw_input()
print any([c.isalnum() for c in value])
print any([c.isalpha() for c in value])
print any([c.isdigit() for c in value])
print any([c.islower() for c in value])
print any([c.isupper() for c in value])
|
"""
注释:
"""
import matplotlib.pyplot as plt
import numpy as np
x = np.arange(-10, 11, 1)
plt.plot(x, x**2, 'r--')
# 方法:plt.annotate()
plt.annotate('this is bottom', xy=(0, 1), xytext=(0, 20),
arrowprops=dict(facecolor='r', headlength=5, headwidth=10, width=5))
# xy用于指定箭头的位置,xytest用于指定注释的位置,arrowprops用于配置箭的属性
# facecolor是箭头的颜色,headlength表示箭头前段的长度,width用于配置箭身的宽度
# headwith用于配置箭头前段的宽度
plt.show()
|
"""
Given a directed graph, design an algorithm to find out whether there is a route between two nodes
"""
def route_between_nodes_using_dfs(graph, start, end):
visited = set()
import pdb; pdb.set_trace()
def dfs(node):
if node == end:
return True
visited.add(node)
for neigh in graph.get(node, []):
if neigh not in visited:
dfs(neigh)
return dfs(start)
# def dfs_with_stack(node):
# if node == end:
# return True
# stack = [node]
# visited.add(node)
# while stack:
# s = stack.pop(-1)
# for neigh in graph.get(s, []):
# if neigh == end:
# return True
# if neigh not in visited:
# stack.append(neigh)
# visited.add(neigh)
# return False
# return dfs_with_stack(start)
def route_between_nodes_using_bfs(graph, start, end):
if start == end:
return True
visited = set()
queue = []
queue.append(start)
while len(queue) > 0:
u = queue.pop(0)
visited.add(u)
for v in graph.get(u, []):
if v not in visited:
if v == end:
return True
visited.add(v)
queue.append(v)
return False
graph = {
0: [1, 5],
1: [3],
2: [1],
3: [2],
4: [1, 3]
}
print(route_between_nodes_using_dfs(graph, 0, 3))
# print(route_between_nodes_using_bfs(graph, 0, 3))
# print(route_between_nodes_using_dfs(graph, 0, 5))
# print(route_between_nodes_using_bfs(graph, 0, 5))
# print(route_between_nodes_using_dfs(graph, 1, 5))
# print(route_between_nodes_using_bfs(graph, 1, 5))
|
# see http://effbot.org/zone/simple-top-down-parsing.htm
import sys
import re
if 1:
class literal_token:
def __init__(self, value):
self.value = value
def nud(self):
return self
def __repr__(self):
return "(literal %s)" % self.value
class operator_add_token:
lbp = 10
def nud(self):
self.first = expression(100)
self.second = None
return self
def led(self, left):
self.first = left
self.second = expression(10)
return self
def __repr__(self):
return "(add %s %s)" % (self.first, self.second)
class operator_sub_token:
lbp = 10
def nud(self):
self.first = expression(100)
self.second = None
return self
def led(self, left):
self.first = left
self.second = expression(10)
return self
def __repr__(self):
return "(sub %s %s)" % (self.first, self.second)
class operator_mul_token:
lbp = 20
def led(self, left):
self.first = left
self.second = expression(20)
return self
def __str__(self):
return "(mul %s %s)" % (self.first, self.second)
class end_token:
lbp = 0
def tokenize(program):
for number, operator in re.findall("\s*(?:(\d+)|(\*\*|.))", program):
if number:
yield literal_token(int(number))
elif operator == "+":
yield operator_add_token()
elif operator == "-":
yield operator_sub_token()
elif operator == "*":
yield operator_mul_token()
elif operator == "/":
yield operator_div_token()
elif operator == "**":
yield operator_pow_token()
else:
raise SyntaxError("unknown operator: %r" % operator)
yield end_token()
def expression(rbp=0):
global token
t = token
token = next()
left = t.nud()
while rbp < token.lbp:
t = token
token = next()
left = t.led(left)
return left
def parse(program):
global token, next
next = tokenize(program).next
token = next()
print program, "->", expression()
parse("1")
parse("+1")
parse("-1")
parse("1+2")
parse("1+2+3")
parse("1+2*3")
parse("1*2+3")
|
# Generated by Django 2.2.6 on 2020-01-31 03:59
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('consumers', '0011_consumer_consumer_id'),
]
operations = [
migrations.AlterUniqueTogether(
name='consumer',
unique_together=set(),
),
]
|
import os
class SoundPlayer:
def __init__(self):
self.pause_file = "/home/pi/spotify_dj/Assets/brute-force.mp3"
self.skip_file = "/home/pi/spotify_dj/Assets/SkipTone.mp3"
self.boot_file = "/home/pi/spotify_dj/Assets/bootup.mp3"
def play_pause_tone(self):
os.system("mpg321 " + self.pause_file + " &")
def play_skip_tone(self):
os.system("mpg321 " + self.skip_file + " &")
def play_boot_tone(self):
os.system("mpg321 " + self.boot_file + " &")
|
import time
import numpy as np
from State import State
from constants import move_action_to_deviation, Action
import random
class Oracle:
def __init__(self, window_width, window_height, step_size, goal_config, init_state):
self.window_width = window_width
self.window_height = window_height
self.step_size = step_size
self.goal_config = goal_config
self.init_state = init_state
def get_available_actions(self, state, block_idx) -> []:
position = state.get_position(block_idx)
return [action for action in move_action_to_deviation if 0 <= position[0] + move_action_to_deviation[action][0] < self.window_width and 0 <= position[1] + move_action_to_deviation[action][1] < self.window_height]
@staticmethod
def get_oracle_best_action(state: State, block_idx, order=True):
if state.goal_reached():
return Action.DROP, order
curr_position = state.get_position(block_idx)
goal_position = state.get_goal_position(block_idx)
if tuple(curr_position) == tuple(goal_position):
return Action.DROP, False
is_goal_blocked = any([tuple(goal_position) == tuple(block_position) for block_position in state.block_positions])
am_blocking_goal = any([tuple(goal_position) == tuple(curr_position) for goal_position in state.goal_positions]) and not tuple(curr_position) == tuple(state.goal_positions[block_idx])
is_left_good = state.is_action_good(Action.MOVE_LEFT, block_idx)
is_right_good = state.is_action_good(Action.MOVE_RIGHT, block_idx)
is_down_good = state.is_action_good(Action.MOVE_DOWN, block_idx)
is_up_good = state.is_action_good(Action.MOVE_UP, block_idx)
is_goal_to_the_left = goal_position[0] < curr_position[0]
is_goal_to_the_right = goal_position[0] > curr_position[0]
is_goal_to_the_top = goal_position[1] < curr_position[1]
is_goal_to_the_bottom = goal_position[1] > curr_position[1]
if order:
if is_left_good and is_goal_to_the_left:
return Action.MOVE_LEFT, order
elif is_right_good and is_goal_to_the_right:
return Action.MOVE_RIGHT, order
elif is_down_good and is_goal_to_the_bottom:
return Action.MOVE_DOWN, order
elif is_up_good and is_goal_to_the_top:
return Action.MOVE_UP, order
else:
if is_down_good and is_goal_to_the_bottom:
return Action.MOVE_DOWN, order
elif is_up_good and is_goal_to_the_top:
return Action.MOVE_UP, order
elif is_left_good and is_goal_to_the_left:
return Action.MOVE_LEFT, order
elif is_right_good and is_goal_to_the_right:
return Action.MOVE_RIGHT, order
allowed_actions = []
is_left_allowed = state.is_action_allowed(Action.MOVE_LEFT, block_idx)
is_right_allowed = state.is_action_allowed(Action.MOVE_RIGHT, block_idx)
is_down_allowed = state.is_action_allowed(Action.MOVE_DOWN, block_idx)
is_up_allowed = state.is_action_allowed(Action.MOVE_UP, block_idx)
if is_left_allowed: allowed_actions.append(Action.MOVE_LEFT)
if is_right_allowed: allowed_actions.append(Action.MOVE_RIGHT)
if is_down_allowed: allowed_actions.append(Action.MOVE_DOWN)
if is_up_allowed: allowed_actions.append(Action.MOVE_UP)
if am_blocking_goal and allowed_actions:
return random.choice(allowed_actions), not order
if is_goal_blocked:
return Action.DROP, not order
return Action.DROP, not order
@staticmethod
def get_next_state(state, action, block_idx):
if action == Action.PICK:
new_state = state.copy()
new_state.select(block_idx)
elif action == Action.DROP:
new_state = state.copy()
new_state.deselect()
else:
new_state = state.get_next_state(block_idx, action)
return new_state
def run(self):
curr_state = self.init_state
n = curr_state.block_count
# bring all blocks together
actions = []
block_count = curr_state.block_count
curr_state.set_goal_positions(Oracle.get_goal_position(curr_state, self.goal_config, self.step_size))
action = None
block_world = BlockWorld(self.window_width, self.window_height, num_blocks=block_count, num_stacks=1, block_size=self.step_size)
b_w_g_c = self.goal_config.copy()
b_w_g_c.tolist().reverse()
block_world.create_goal([b_w_g_c])
block_world.pre_render()
block_world.update_all_block_states(curr_state)
block_world.render()
flip_order = True
while not curr_state.goal_reached():
actions_taken = []
if flip_order:
this_range = range(block_count)
else:
this_range = range(block_count - 1, -1, -1)
for block_idx in this_range:
if curr_state.get_position(block_idx) != curr_state.goal_positions[block_idx]:
actions.append((Action.PICK, block_idx))
while action != Action.DROP:
time.sleep(0.5)
block_world.pre_render()
action, flip_order = self.get_best_action(curr_state, block_idx, flip_order)
if action:
actions_taken.append(action)
print(block_idx, curr_state, action)
actions.append(action)
curr_state = Oracle.get_next_state(curr_state, action, block_idx)
block_world.update_all_block_states(curr_state)
block_world.render()
else:
break
action = None
if len(actions_taken) == 3 and not curr_state.goal_reached():
print("STUCK", actions_taken)
# find all blocks which arent in their goal_pos
conflicting_blocks = [idx for idx in range(curr_state.block_count) if curr_state.get_position(idx) == curr_state.goal_positions[idx]]
# break
time.sleep(2)
print(actions)
if __name__ == '__main__':
block_count = 2
for _ in range(10):
oracle = Oracle(300, 300, 50, np.random.permutation(block_count), State([(50, 150), (250, 50)], None, None))
oracle.run()
|
from fid import fid
from kid import kid_kid, kid_is
if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-m", "--metric", dest="metric", default="all",
help="Set batch size to use for InceptionV3 network",
type=str)
parser.add_option("--p1", "--path1", dest="path1", default=None,
help="Path to directory containing the real images")
parser.add_option("--p2", "--path2", dest="path2", default=None,
help="Path to directory containing the generated images")
parser.add_option("-b", "--batch-size", dest="batch_size", default=1,
help="Set batch size to use for InceptionV3 network",
type=int)
print ('------------------------options-------------------------')
options, _ = parser.parse_args()
train_A_path = 'dataset/selfie2anime_64_64/trainA'
train_B_path = 'dataset/selfie2anime_64_64/trainB'
print ('here')
if options.metric == 'all':
print ('calculating is now...')
print ('is score trainA vs output_B2A:', kid_is(options.path1, 16))
print ('is score trainB vs output_A2B:', kid_is(options.path2, 16))
print ('calculating fid now...')
print ('fid score trainA vs output_B2A:', fid(train_A_path, options.path1, 8))
print ('fid score trainB vs output_A2B:', fid(train_B_path, options.path2, 8))
print ('calculating kid now...')
print ('kid score trainA vs output_B2A:', kid_kid(train_A_path, options.path1, 16))
print ('kid score trainB vs output_A2B:', kid_kid(train_B_path, options.path2, 16))
if options.metric == 'fid':
print ('calculating fid now...')
print ('fid score trainA vs output_B2A:', fid(train_A_path, options.path1, 8))
print ('fid score trainB vs output_A2B:', fid(train_B_path, options.path2, 8))
if options.metric == 'is':
print ('calculating is now...')
print ('is score trainA vs output_B2A:', kid_is(options.path1, 16))
print ('is score trainB vs output_A2B:', kid_is(options.path2, 16))
if options.metric == 'kid':
print ('calculating kid now...')
print ('kid score trainA vs output_B2A:', kid_kid(train_A_path, options.path1, 16))
print ('kid score trainB vs output_A2B:', kid_kid(train_B_path, options.path2, 16))
|
x=float(input("x= "))
y=4*(pow((x-3),6))-7*(pow((x-3),3))+2
print(y)
|
#!/usr/bin/env python3
import dadi
import dadi.NLopt_mod
import nlopt
def three_epoch_noF(params, ns, pts):
"""
params = (nuB,nuF,TB,TF)
ns = (n1,)
nuB: Ratio of bottleneck population size to ancient pop size
nuF: Ratio of contemporary to ancient pop size
TB: Length of bottleneck (in units of 2*Na generations)
TF: Time since bottleneck recovery (in units of 2*Na generations)
n1: Number of samples in resulting Spectrum
pts: Number of grid points to use in integration.
"""
nuB,nuF,TB,TF = params
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
phi = dadi.Integration.one_pop(phi, xx, TB, nuB)
phi = dadi.Integration.one_pop(phi, xx, TF, nuF)
fs = dadi.Spectrum.from_phi(phi, ns, (xx,))
return fs
if __name__ == '__main__':
# Read in
data = dadi.Spectrum.from_file("cabbage.fs")
data_f = data.fold()
ns = data.sample_sizes
pts_l = [100,110,120]
func = three_epoch_noF
upper_bound = [50.0,50.0,50.0,50.0]
lower_bound = [1e-3,1e-3,1e-3,1e-3]
p0 = [1.0,1.0,0.5,0.5]
func_ex = dadi.Numerics.make_extrap_log_func(func)
p0 = dadi.Misc.perturb_params(p0,fold=1,upper_bound=upper_bound,lower_bound=lower_bound)
print("\n\n**** Starting NLopt optimization ****\n\n")
popt,LLopt,result = dadi.NLopt_mod.opt(p0, data_f, func_ex, pts_l,
lower_bound=lower_bound, upper_bound=upper_bound,
algorithm=nlopt.LN_BOBYQA)
model = func_ex(popt, ns, pts_l)
model_f = model.fold()
ll_optF = dadi.Inference.ll_multinom(model_f, data_f)
thetaF = dadi.Inference.optimal_sfs_scaling(model_f, data_f)
with open("cabbage_fits_3epoch_noF.csv", 'a') as f1_out:
for p in range(len(popt)):
print("{}".format(popt[p]), ",", end='', sep='', file=f1_out)
print(ll_optF, ",", thetaF, sep='', file=f1_out)
|
"""Partial derivatives for the SELU activation function."""
from torch import exp, le, ones_like, zeros_like
from backpack.core.derivatives.elementwise import ElementwiseDerivatives
class SELUDerivatives(ElementwiseDerivatives):
"""Implement first- and second-order partial derivatives of SELU."""
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
def hessian_is_zero(self):
"""`SELU''(x) != 0`."""
return False
def df(self, module, g_inp, g_out):
"""First SELU derivative: `SELU'(x) = scale if x > 0 else scale*alpha*e^x`."""
non_pos = le(module.input0, 0)
result = self.scale * ones_like(module.input0)
result[non_pos] = self.scale * self.alpha * exp(module.input0[non_pos])
return result
def d2f(self, module, g_inp, g_out):
"""Second SELU derivative: `SELU''(x) = 0 if x > 0 else scale*alpha*e^x`."""
non_pos = le(module.input0, 0)
result = zeros_like(module.input0)
result[non_pos] = self.scale * self.alpha * exp(module.input0[non_pos])
return result
|
#-*-coding:utf-8-*-
"""
"创建者:Li Zhen
"创建时间:2019/4/1 8:48
"描述:TODO
"""
import torch
from torch.nn import Linear, Module, MSELoss
from torch.optim import SGD
import numpy as np
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
print(torch.__version__)
#%%
x = np.linspace(0, 20, 500)
y = 5 * x + 7
plt.figure()
plt.plot(x, y)
plt.show()
#%%
x = np.random.rand(256)
noise = np.random.randn(256) / 4
y = x * 5 + 7 + noise
df = pd.DataFrame()
df['x'] = x
df['y'] = y
# plt.figure()
sns.lmplot(x='x', y='y', data=df)
#%%
model = Linear(1, 1)
criterion = MSELoss()
optim = SGD(model.parameters(), lr=0.01)
epoches = 30000
[w, b] = model.parameters()
x_train = x.reshape(-1, 1).astype('float32')
y_train = y.reshape(-1, 1).astype('float32')
for i in range(epoches):
i += 1
inputs = torch.from_numpy(x_train)
labels = torch.from_numpy(y_train)
outputs = model(inputs)
optim.zero_grad()
loss = criterion(outputs, labels)
loss.backward()
optim.step()
if (i %100 == 0):
print('epoche {}, loss {}'.format(i, loss.data.item()))
print(w.detach().item(), b.detach().item())
#%%
with torch.no_grad():
predicted = model(torch.from_numpy(x_train)).data.numpy()
plt.figure()
plt.plot(x_train, y_train, 'go', label='data', alpha=0.3)
plt.plot(x_train, predicted, 'k', label='predited', alpha=1)
plt.legend()
plt.show()
|
"""
Support for broadlink remote control of a media device.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.broadlink/
"""
import asyncio
from base64 import b64decode
import binascii
import logging
import socket
from math import copysign
from datetime import datetime, timedelta
import voluptuous as vol
from homeassistant.components.media_player import (
MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL, SUPPORT_NEXT_TRACK,
SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_STEP, SUPPORT_VOLUME_SET, SUPPORT_SELECT_SOUND_MODE)
from homeassistant.const import (
CONF_COMMAND_OFF, CONF_COMMAND_ON, CONF_HOST, CONF_MAC, CONF_NAME,
CONF_PORT, CONF_TIMEOUT, STATE_OFF, STATE_ON)
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = [
'broadlink==0.9.0',
'irgen==0.1.0',
]
DOMAIN = 'broadlink'
DEFAULT_NAME = "Broadlink IR Media Player"
DEFAULT_TIMEOUT = 10
DEFAULT_DELAY = 0.5
DEFAULT_PORT = 80
CONF_VOLUME_UP = 'volume_up'
CONF_VOLUME_DOWN = 'volume_down'
CONF_VOLUME_MUTE = 'volume_mute'
CONF_VOLUME_MUTE_ON = 'volume_mute_on'
CONF_VOLUME_MUTE_OFF = 'volume_mute_off'
CONF_NEXT_TRACK = 'next_track'
CONF_PREVIOUS_TRACK = 'previous_track'
CONF_SOURCES = 'sources'
CONF_CHANNELS = 'channels'
CONF_DIGITS = 'digits'
CONF_SOUND_MODES = 'sound_modes'
CONF_VOLUME_LEVELS = 'levels'
CONF_VOLUME_STEP = 'step'
CONF_VOLUME_MAX = 'max'
CONF_VOLUME_MIN = 'min'
CONF_VOLUME_SET = 'volume_set'
CONF_VOLUME_TIMEOUT = 'timeout'
CONF_VOLUME_RESTORE = 'restore'
CONF_CODE = 'code'
CONF_DELAY = 'delay'
_LOGGER = logging.getLogger(__name__)
def convert_list_to_hex(data):
if len(data) != 4:
raise vol.Invalid('Invalid length of list')
import irgen
raw = irgen.gen_raw_general(*data)
res = irgen.gen_broadlink_base64_from_raw(raw)
_LOGGER.debug("%s converted to: %s", data, res)
return res
def convert_code_to_command(data):
return {
CONF_CODE: data,
CONF_DELAY: None
}
CODE_SCHEMA = vol.Schema(
vol.Any(
vol.All(
list,
convert_list_to_hex,
),
cv.string
)
)
COMMAND_SCHEMA = vol.Schema(
vol.Any(
{vol.Required(CONF_CODE): CODE_SCHEMA,
vol.Optional(CONF_DELAY, default=0.0): float},
vol.All(
CODE_SCHEMA,
convert_code_to_command
)
)
)
DIGITS_SCHEMA = vol.Schema({
vol.Required('0'): COMMAND_SCHEMA,
vol.Required('1'): COMMAND_SCHEMA,
vol.Required('2'): COMMAND_SCHEMA,
vol.Required('3'): COMMAND_SCHEMA,
vol.Required('4'): COMMAND_SCHEMA,
vol.Required('5'): COMMAND_SCHEMA,
vol.Required('6'): COMMAND_SCHEMA,
vol.Required('7'): COMMAND_SCHEMA,
vol.Required('8'): COMMAND_SCHEMA,
vol.Required('9'): COMMAND_SCHEMA,
})
ENTRY_SCHEMA = vol.Schema({str: COMMAND_SCHEMA})
VOLUME_LEVELS_SCHEMA = vol.Schema({float: COMMAND_SCHEMA})
VOLUME_SCHEMA_SET = vol.Schema({
vol.Optional(CONF_VOLUME_RESTORE): float,
vol.Required(CONF_VOLUME_MAX): float,
vol.Required(CONF_VOLUME_MIN): float,
vol.Required(CONF_VOLUME_LEVELS): VOLUME_LEVELS_SCHEMA,
vol.Required(CONF_VOLUME_STEP): float,
vol.Optional(CONF_VOLUME_TIMEOUT): float,
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.positive_int,
vol.Required(CONF_MAC): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(CONF_COMMAND_ON): COMMAND_SCHEMA,
vol.Optional(CONF_COMMAND_OFF): COMMAND_SCHEMA,
vol.Optional(CONF_VOLUME_SET): VOLUME_SCHEMA_SET,
vol.Optional(CONF_VOLUME_UP): COMMAND_SCHEMA,
vol.Optional(CONF_VOLUME_DOWN): COMMAND_SCHEMA,
vol.Optional(CONF_VOLUME_MUTE): COMMAND_SCHEMA,
vol.Optional(CONF_VOLUME_MUTE_ON): COMMAND_SCHEMA,
vol.Optional(CONF_VOLUME_MUTE_OFF): COMMAND_SCHEMA,
vol.Optional(CONF_NEXT_TRACK): COMMAND_SCHEMA,
vol.Optional(CONF_PREVIOUS_TRACK): COMMAND_SCHEMA,
vol.Optional(CONF_SOURCES, default={}): ENTRY_SCHEMA,
vol.Optional(CONF_SOUND_MODES, default={}): ENTRY_SCHEMA,
vol.Optional(CONF_DIGITS): DIGITS_SCHEMA,
})
SUPPORT_MAPPING = [
(CONF_COMMAND_ON, SUPPORT_TURN_ON),
(CONF_COMMAND_OFF, SUPPORT_TURN_OFF),
(CONF_VOLUME_UP, SUPPORT_VOLUME_STEP),
(CONF_VOLUME_DOWN, SUPPORT_VOLUME_STEP),
(CONF_VOLUME_MUTE, SUPPORT_VOLUME_MUTE),
(CONF_NEXT_TRACK, SUPPORT_NEXT_TRACK),
(CONF_PREVIOUS_TRACK, SUPPORT_PREVIOUS_TRACK),
]
async def async_setup_platform(hass,
config,
async_add_devices,
discovery_info=None):
"""Set up platform."""
import broadlink
host = (config.get(CONF_HOST),
config.get(CONF_PORT))
mac = get_broadlink_mac(config.get(CONF_MAC))
link = broadlink.rm(
host,
mac,
None)
try:
await hass.async_add_job(link.auth)
except socket.timeout:
_LOGGER.warning("Timeout trying to authenticate to broadlink")
raise PlatformNotReady
async_add_devices([BroadlinkRM(link, config)])
def get_supported_by_config(config):
"""Calculate support flags based on available configuration entries."""
support = 0
for mapping in SUPPORT_MAPPING:
if mapping[0] in config:
support = support | mapping[1]
if config.get(CONF_SOURCES):
support = support | SUPPORT_SELECT_SOURCE
if config.get(CONF_SOUND_MODES):
support = support | SUPPORT_SELECT_SOUND_MODE
if config.get(CONF_DIGITS):
support = support | SUPPORT_PLAY_MEDIA
if config.get(CONF_VOLUME_SET):
support = support | SUPPORT_VOLUME_SET
return support
def get_broadlink_mac(mac: str):
"""Convert a mac address string with : in it to just a flat string."""
return binascii.unhexlify(mac.encode().replace(b':', b''))
def convert_volume_to_device(config_volume_set, volume):
return (
config_volume_set[CONF_VOLUME_MIN] +
volume * (config_volume_set[CONF_VOLUME_MAX] -
config_volume_set[CONF_VOLUME_MIN])
)
class BroadlinkRM(MediaPlayerDevice):
"""Representation of a media device."""
def __init__(self, link, config):
"""Initialize device."""
super().__init__()
self._support = get_supported_by_config(config)
self._config = config
self._link = link
self._state = STATE_OFF
self._source = None
self._sound_mode = None
self._muted = None
self._volume_level = None
self._lock = asyncio.Lock()
self._volume_timestamp = datetime.now() + timedelta(seconds=-100)
self._volume_calls = 0
self._volume_step = None
self._volume_levels = None
self._volume_restore = None
if CONF_VOLUME_SET in config:
volume_set = config[CONF_VOLUME_SET]
scale = (volume_set[CONF_VOLUME_MAX] -
volume_set[CONF_VOLUME_MIN])
offset = volume_set[CONF_VOLUME_MIN]
self._volume_step = volume_set[CONF_VOLUME_STEP] / scale
self._volume_levels = {
(level - offset) / scale: code
for level, code in volume_set[CONF_VOLUME_LEVELS].items()
}
_LOGGER.debug("Converted step %f, volumes: %s",
self._volume_step, self._volume_levels)
if CONF_VOLUME_RESTORE in volume_set:
self._volume_restore = (
(volume_set[CONF_VOLUME_RESTORE] - offset) / scale
)
async def send(self, command):
"""Send b64 encoded command to device."""
if command is None:
raise Exception('No command defined!')
packet = b64decode(command[CONF_CODE])
await self.hass.async_add_job(self._link.send_data, packet)
if command[CONF_DELAY]:
await asyncio.sleep(command[CONF_DELAY])
async def send_volume(self, code):
if await self._volume_timeout():
await self.send(code)
await self.send(code)
self._volume_timestamp = datetime.now()
@property
def name(self):
"""Return the name of the controlled device."""
return self._config.get(CONF_NAME)
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self):
"""Flag media player features that are supported."""
return self._support
async def async_turn_on(self):
"""Turn on media player."""
async with self._lock:
await self.send(self._config.get(CONF_COMMAND_ON))
self._state = STATE_ON
if self._volume_restore:
await self.async_set_volume_level(self._volume_restore)
async def async_turn_off(self):
"""Turn off media player."""
async with self._lock:
await self.send(self._config.get(CONF_COMMAND_OFF))
self._state = STATE_OFF
async def async_volume_up(self):
"""Volume up media player."""
async with self._lock:
await self.send_volume(self._config.get(CONF_VOLUME_UP))
if CONF_VOLUME_STEP in self._config and \
self._volume_level is not None:
self._volume_level += self._volume_step
async def async_volume_down(self):
"""Volume down media player."""
async with self._lock:
await self.send_volume(self._config.get(CONF_VOLUME_DOWN))
if CONF_VOLUME_STEP in self._config and \
self._volume_level is not None:
self._volume_level -= self._volume_step
async def async_mute_volume(self, mute):
"""Send mute command."""
async with self._lock:
if mute and CONF_VOLUME_MUTE_ON in self._config:
await self.send(self._config.get(CONF_VOLUME_MUTE_ON))
self._muted = True
elif not mute and CONF_VOLUME_MUTE_OFF in self._config:
await self.send(self._config.get(CONF_VOLUME_MUTE_OFF))
self._muted = False
else:
await self.send(self._config.get(CONF_VOLUME_MUTE))
async def async_media_next_track(self):
"""Send next track command."""
async with self._lock:
await self.send(self._config.get(CONF_NEXT_TRACK))
async def async_media_previous_track(self):
"""Send the previous track command."""
async with self._lock:
await self.send(self._config.get(CONF_PREVIOUS_TRACK))
async def async_select_source(self, source):
"""Select a specific source."""
async with self._lock:
await self.send(self._config.get(CONF_SOURCES)[source])
self._source = source
self._sound_mode = None
async def async_select_sound_mode(self, sound_mode):
"""Select a specific source."""
async with self._lock:
await self.send(self._config.get(CONF_SOUND_MODES)[sound_mode])
self._sound_mode = sound_mode
async def async_play_media(self, media_type, media_id, **kwargs):
"""Switch to a specific channel."""
if media_type != MEDIA_TYPE_CHANNEL:
_LOGGER.error('Unsupported media type %s', media_type)
return
cv.positive_int(media_id)
async with self._lock:
for digit in media_id:
await self.send(self._config.get(CONF_DIGITS).get(digit))
async def async_set_volume_level(self, volume):
"""Set volume level, range 0..1."""
if CONF_VOLUME_SET not in self._config:
raise NotImplementedError()
config = self._config[CONF_VOLUME_SET]
self._volume_calls += 1
volume_calls = self._volume_calls
async with self._lock:
if self._volume_calls != volume_calls:
_LOGGER.debug('Aborted volume change early')
def items():
if self._volume_level:
yield self._volume_level, None
yield from self._volume_levels.items()
base_level, base_code = min(
items(),
key=lambda kv: abs(volume - kv[0]))
steps = int(round((volume - base_level) / self._volume_step))
if steps > 0:
code = self._config.get(CONF_VOLUME_UP)
else:
code = self._config.get(CONF_VOLUME_DOWN)
target = base_level + self._volume_step * steps
_LOGGER.debug('Volume base %f(%f) target %f(%f) steps %f',
base_level,
convert_volume_to_device(config, base_level),
target,
convert_volume_to_device(config, target),
steps)
# lie and say we are at volume, while
# changing to keep gui happy
self._volume_level = target
if base_code:
await self.send(base_code)
self._volume_timestamp = datetime.now()
for step in range(abs(steps)):
await self.send_volume(code)
if self._volume_calls != volume_calls:
_LOGGER.debug('Aborted volume change')
# set correct level on abort
self._volume_level = base_level + (
self._volume_step * copysign(step + 1, steps))
break
_LOGGER.debug('Volume level %f(%f)',
self._volume_level,
convert_volume_to_device(config, self._volume_level))
async def _volume_timeout(self):
if CONF_VOLUME_TIMEOUT not in self._config[CONF_VOLUME_SET]:
return False
timeout = self._config[CONF_VOLUME_SET][CONF_VOLUME_TIMEOUT]
delay = (datetime.now() - self._volume_timestamp).total_seconds()
remain = timeout - delay
if remain > 0.0:
if remain < 0.5:
_LOGGER.debug("Volume timeout %f", remain)
await asyncio.sleep(remain)
return True
else:
return False
else:
_LOGGER.debug("Volume timeout %f", remain)
return True
@property
def media_content_type(self):
"""Return content type currently active."""
return MEDIA_TYPE_CHANNEL
@property
def source(self):
"""Return the current input source."""
return self._source
@property
def source_list(self):
"""List of available input sources."""
return list(self._config.get(CONF_SOURCES).keys())
@property
def sound_mode(self):
"""Name of the current sound mode."""
return self._sound_mode
@property
def sound_mode_list(self):
"""List of available sound modes."""
return list(self._config.get(CONF_SOUND_MODES).keys())
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def volume_level(self):
return self._volume_level
@property
def media_title(self):
"""Title of current playing media."""
return self._source
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-14 14:42
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('teams', '0001_initial'),
('players', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='PlayerSeason',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('votes_win', models.PositiveIntegerField(default=0, editable=False, verbose_name='votes win')),
('votes_tie', models.PositiveIntegerField(default=0, editable=False, verbose_name='votes tie')),
('rating_mu', models.FloatField(default=25.0, verbose_name='Rating MU')),
('rating_sigma', models.FloatField(default=8.333333333333334, verbose_name='Rating SIGMA')),
('pts', models.FloatField(verbose_name='PTS')),
('reb', models.FloatField(verbose_name='REB')),
('ast', models.FloatField(verbose_name='AST')),
('stl', models.FloatField(verbose_name='STL')),
('blk', models.FloatField(verbose_name='BLK')),
('fg_pct', models.FloatField(verbose_name='FG%')),
('fg3_pct', models.FloatField(verbose_name='3P%')),
('ft_pct', models.FloatField(verbose_name='FT%')),
('ROSTERSTATUS', models.PositiveSmallIntegerField(verbose_name='ROSTERSTATUS')),
('GAMES_PLAYED_FLAG', models.CharField(max_length=8, verbose_name='GAMES_PLAYED_FLAG')),
('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='seasons', to='players.Player', verbose_name='player')),
],
options={
'verbose_name_plural': 'player seasons',
'ordering': ['-season', '-rating_mu', 'rating_sigma'],
'verbose_name': 'player season',
},
),
migrations.CreateModel(
name='Season',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('abbr', models.CharField(max_length=16, verbose_name='season')),
],
options={
'verbose_name_plural': 'seasons',
'ordering': ['-abbr'],
'verbose_name': 'season',
},
),
migrations.AddField(
model_name='playerseason',
name='season',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='players_seasons', to='seasons.Season', verbose_name='season'),
),
migrations.AddField(
model_name='playerseason',
name='team',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='players_seasons', to='teams.Team', verbose_name='team'),
),
migrations.AlterUniqueTogether(
name='playerseason',
unique_together=set([('player', 'season')]),
),
]
|
# Use the QuickSelect Algorithm to find the k-th largest element of an array
import random
def swap(arr, i, j):
arr[i], arr[j] = arr[j], arr[i]
def kthlargest(arr, k):
n = len(arr)
def partition(start, end, ind):
""" Rearranges arr[start:end] so that everything to
the left of arr[pivot] is smaller than it and likewise
for the right of arr[pivot] """
# ind = random.randrange(start, end)
swap(arr, start, ind)
pivot = arr[start]
divider = start+1
explorer = start+1
while explorer <= end:
if arr[explorer] < pivot:
swap(arr, explorer, divider)
divider += 1
if explorer < divider:
explorer += 1
else:
explorer += 1
divider -= 1
swap(arr, start, divider)
return divider
start, end = 0, n-1
divider = partition(start, end, random.randint(start, end))
while divider != n-k:
if divider < n-k:
start = divider
else:
end = divider
divider = partition(start, end, random.randint(start, end))
return arr[divider]
#print(kthlargest([3, 4, 1, 5, 7,8 , 0, 0], 4))
def sortByResidueClass(arr, n):
dividers = [0]*(n-1)
explorer = 0
# everything before arr[dividers[i]] will be of residue class i or smaller
while explorer < len(arr):
if arr[explorer] % n < n-1:
swap(arr, dividers[arr[explorer] % n], explorer)
for i in range(arr[explorer] % n, n-1):
dividers[i] += 1
if max(dividers) > explorer:
explorer += 1
else:
explorer += 1
print(dividers)
print(arr)
return arr
#print(sortByResidueClass(arr = [3, 0, 4, 1, 5, 2], n = 4))
import heapq
def kthlargestheap(arr, k):
heap = arr[:k]
heapq.heapify(heap) #min heap
for i in range(k, len(arr)):
if arr[k] > heap[0]:
heapq.heappush(heap, arr[k])
heapq.heappop(heap)
return heapq.heappop(heap)
#print(kthlargest([3, 4, 1, 5, 7,8 , 0, 0], 4))
def hoare(arr):
""" Partitions array using Hoare's scheme, wrt last element """
l, r = 0, len(arr)-2
while l < r:
if arr[l] > arr[r]:
swap(arr, l, r)
else:
pass
|
#import time
class Fremoga:
def __init__ (self, name):
#adding characteristics
self.name = name
self.age = 13
self.connect = []
def born (self, age):
if age < 13:
print ("This is a big kid website. Go play outside.")
else:
print ("Welcome!")
def getname (self):
return self.name
def addconnect (self, otherUser):
self.connect.append(otherUser)
def printconnect (self):
for i in self.connect:
print (i)
def checkconnect (self, againstThis, a): #againstThis is user input, checkConnect makes sure the input is a username
for i in self.connect:
if i == againstThis:
print("Sorry, they are already in your connections")
def getconnect (self):
return self.connect
class FremogaNetwork:
def __init__ (self):
self.user = []
def addUser(self, username):
self.user.append(Fremoga(username)) #list of profiles
def checkUser (self, username): #Based on imput, plz find name of instance w/ that username
for i in user:
if i.getname() == username:
print (i)
def listChoice ():
print ("Would you like to:")
print("a) add a user")
print("b) add connections")
print("c) delete conection")
print("d) print connections")
print("e) quit program")
print("Write the letter")
def main():
Database = FremogaNetwork()
Tester = Fremoga("Testing")
print ("Welcome to Fremoga! What is your name?")
M_name = input()
MainUser = Fremoga(M_name)
Database.addUser(M_name)
print ("Nice to meet you ", M_name, "What is your age?")
aging = int (input())
MainUser.born (aging)
listChoice()
choice = input()
if choice == "a":
print ("Enter a username:")
one_name = input()
User1 = Fremoga(one_name)
Database.addUser(one_name)
print ("You have now added", one_name, ". Would you like to become friends?")
print("Type: 'yes' or 'no'")
IsFriend = input()
if IsFriend == 'yes':
User1.addconnect(MainUser)
MainUser.addconnect(User1)
print (M_name, "and", one_name, "are now friends!")
elif IsFriend == 'no':
listChoice()
else:
print ("Please type that again correctly.")
IsFriend = input()
elif choice == "b":
print ("Enter the two users you would like to connect:")
print ("User 1:")
marcone = input ()
User2 = Fremoga(marcone)
print ("User 2:")
marctwo = input()
User3 = Fremoga(marctwo)
User2.addconnect(marctwo)
User3.addconnect(marcone)
User2.printconnect()
# elif choice == "c":
elif choice == "d":
print ("Whose connections would you like to check?")
checkMe = input()
#Database.get
# elif choice == "e":
else:
print("Oh no! That is not an input! Please input a letter.")
main()
|
# -*- coding: utf-8 -*-
from project_name import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# LOGGING = {
# 'version': 1,
# 'disable_existing_loggers': False,
# 'filters': {
# 'require_debug_false': {
# '()': 'django.utils.log.RequireDebugFalse'
# }
# },
# 'handlers': {
# 'mail_admins': {
# 'level': 'ERROR',
# 'filters': ['require_debug_false'],
# 'class': 'django.utils.log.AdminEmailHandler'
# }
# },
# 'loggers': {
# 'django.request': {
# 'handlers': ['mail_admins'],
# 'level': 'ERROR',
# 'propagate': True,
# },
# }
# }
LOGGING = {
'loggers': {
'': {
'handlers': ['console'], # can be: null, console, mail_admin
'level': 'WARNING',
},
'%s' % PROJECT_INSTANCE_NAME: {
'handlers': ['console'], # can be: null, console, mail_admin
'level': 'DEBUG',
},
'django.request': {
'handlers': ['console'], # can be: null, console, mail_admin
#'filters': ['require_debug_false'], # means when debug set to false do logging
'level': 'WARNING',
},
'django.db.backends': { # For performance reasons, SQL logging is only enabled when settings.DEBUG is set to True
'handlers': ['console'], # can be: null, console, mail_admin
'level': 'WARNING',
},
}
}
BASEPATH = '/home/alireza/PycharmProjects/%s/' % PROJECT_INSTANCE_NAME
STATIC_ROOT = BASEPATH + 'static/'
SITE_URL = 'http://127.0.0.1:8000/'
LOGIN_URL = '/accounts/login/'
TEMPLATE_DIRS = ( # in here JUST import django admin templates
'/usr/lib/python2.7/site-packages/django/contrib/admin/templates/',
)
SERVE_STATIC_FILES = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': '%s' % PROJECT_INSTANCE_NAME,
'USER': 'root',
'PASSWORD': '1234',
'HOST': '',
'PORT': '',
}
}
ADMINS = (
# ('Sadeghi', 'sadeghi@arsh.co'),
)
|
import Image
class Resize:
def __init__(self, max_scale, height):
self.max_scale_ = max_scale
self.height_ = height
def image(self, name, width_resize):
image_file = Image.open("fon.jpg")
image_file.thumbnail((width_resize, width_resize))
image_file.save(name)
def coordinates(self, point):
return float(point)/self.max_scale_*self.height_
__author__ = 'lisgein'
|
'''
get specific number(--num) images from a folder(--input_dir), rename these images(--prefix), turn them to destination folder(save_dir)
'''
import sys
from glob import glob
import os
import argparse
import scipy.misc
parser = argparse.ArgumentParser()
parser.add_argument('--input_dir', type=str, default='', help='')
parser.add_argument('--save_dir', type=str, default='', help='')
parser.add_argument('--start', type=int, default=0, help='start index')
parser.add_argument('--num', type=int, default=100, help='')
parser.add_argument('--prefix', type=str, default='image_', help='')
parser.add_argument('--given_path', type=str, default='None', help='when given images path, ignore input_dir')
args = parser.parse_args()
def main():
save_dir = args.save_dir
if not os.path.exists(save_dir):
os.mkdir(save_dir)
input_dir = args.input_dir
if args.given_path == 'None':
if not os.path.exists(input_dir):
print('[*]input dir not exsit!')
sys.exit()
data_list = glob(os.path.join(input_dir, '*'))
else:
f = open(args.given_path,'r')
data = f.readlines()
data_list = [item.split(',')[0] for item in data]
f.close()
total = len(data_list)
num = args.num if args.num < total else total
start = args.start
for i in range(start,start+num):
img_path = data_list[i]
img = scipy.misc.imread(img_path)
base_name = os.path.basename(img_path)
img_type = base_name.split('.')[-1]
rename = args.prefix + str(i) + '.' + img_type
des_path = os.path.join(save_dir, rename)
scipy.misc.imsave(des_path,img)
if __name__ == '__main__':
main()
|
import time
class Solution:
def countPrimes(self, n):
if n<2:
return 0
#生成长度为n的list
isPrime=[1]*n
isPrime[0],isPrime[1]=0,0
for i in range(2,int(n**0.5)+1): #遍历2-根号n
#如果i为质数,所有i的倍数为0
if isPrime[i]:
isPrime[i**2:n:i]=[0]*((n-1-i**2)//i+1)
return sum(isPrime)
a=Solution()
b=a.countPrimes(49997999)
print(b)
|
from django.db import models
class Vmhost(models.Model):
name=models.CharField(max_length=200)
virtType=models.CharField(max_length=200)
class Guest(models.Model):
vmhost= models.ForeignKey(Vmhost)
name=models.CharField(max_length=200)
currCpu=models.CharField(max_length=200)
currMemory=models.CharField(max_length=200)
# Create your models here.
|
# -*- coding: utf-8 -*-
""" Classe definissant une delivery (mission) caracterisee par :"""
#- son nom
#- la position de destination
#- l'id du drone auquel est affecte la mission courante
#- l'id du stock d'ou vient le colis
#- l'id du colis
#- le statut de la mission
# Etat de la livraison
import time
class Delivery:
__id = 0
NOT_STARTED = 0
STARTED = 1
ABORTED = 2
FINISHED = 3
def __init__(self, name, packet, path):
self.name = name
self.status = Delivery.NOT_STARTED
self.id = Delivery.__id + 1
self.packet = packet
self.path = path
self.timestamp = time.time()
|
from django.db import models
from djutil.models import TimeStampedModel
from edtech.models.mixins import DefaultPermissions
from edtech.models.test_series import TestSeries
from edtech.models.topic import Topic
class Question(TimeStampedModel, DefaultPermissions):
description = models.TextField()
diagram = models.CharField(max_length=500, blank=True)
difficulty_level = models.IntegerField(default=1, blank=True)
marks = models.FloatField(default=10)
topic = models.ForeignKey(Topic)
test_series = models.ForeignKey(TestSeries, null=True, related_name='questions')
hints = models.TextField(null=True, blank=True)
def __unicode__(self):
return self.description
|
"""
红黑树查询,红黑树真的不好理解啊
"""
"""
红黑树的五条性质:
1)任何一个节点非红即黑;
2)树的根为黑色;
3)叶子节点为黑色(注意:红黑树的所有叶子节点都指的是Nil节点);
4)任何两个父子节点不可能同时为红色;
5)任何节点到其所有分枝叶子的简单路径上的黑节点个数相同;
红黑树通过上述五条性质,保证整棵树的黑色节点数量平衡,使得红黑树是一个红黑平衡树,尽可能平衡的二叉树的搜索速度是非常快的。
上亿条数据,通过简单的几十次对比就能找到需要的数据,确实非常厉害了。
"""
class Entity(object):
'''数据实体,假设这是一种数据存储的结构'''
def __init__(self, key, value):
self.key = key
self.value = value
# 首先定义红黑树的节点
class RBNode():
def __init__(self,key,value=None,color="R"):
"""
:param key:
:param color:默认颜色为红色树
:param value:
"""
self.value = value
self.key = key
self.color = color
self.left = None
self.right =None
self.parent = None
def ifBlack(self):
return self.color == "B"
def setBlack(self):
self.color = "B"
def setRed(self):
self.color = "R"
def printSelf(self,node):
print(node.key,node.color)
if node.left is not None:
self.printSelf(node.left)
if node.right is not None:
self.printSelf(node.right)
class RBTree():
"""
红黑树 五大特征
性质一:节点是红色或者是黑色;
性质二:根节点是黑色;
性质三:每个叶节点(NIL或空节点)是黑色;
性质四:每个红色节点的两个子节点都是黑色的(也就是说不存在两个连续的红色节点);
"""
def __init__(self):
self.root = None
#树节点的变色操作。
def changeColor(self,node):
node.color = "R" if node.color == "B" else "B"
#定义树的左旋以及右旋操作
def leftRotate(self,node):
"""
左旋做了三件事:
* 1. 将right的左子节点ly赋给node的右子节点,并将node赋给right左子节点ly的父节点(ly非空时)
* 2. 将right的左子节点设为node,将node的父节点设为right
* 3. 将node的父节点parent(非空时)赋给right的父节点,同时更新parent的子节点为right(左或右)
"""
parentNode = node.parent
rightNode = node.right
#step1:
node.right = rightNode.left
if node.right:
rightNode.parent = node
#step2:
rightNode.left = node
node.parent = rightNode
#step3:
rightNode.parent = parentNode
if parentNode:
if parentNode.left == node:
parentNode.left = rightNode
else:
parentNode.right = rightNode
else:
self.root = rightNode
# 定义树中节点的右旋操作。
def rightRotate(self,node):
"""
右旋做了差不多的三件事:
* 1. 将left的右子节点rn赋给node的左子节点,并将node赋给rn右子节点的父节点(left右子节点非空时)
* 2. 将left的右子节点设为node,将node的父节点设为left
* 3. 将node的父节点parent(非空时)赋给left的父节点,同时更新parent的子节点为left(左或右)
"""
parentNode = node.parent
leftNode = node.left
# step1:
node.left = leftNode.right
if node.left:
node.left.parent = node
#step2:
leftNode.right = node
node.parent = leftNode
#step3:
leftNode.parent = parentNode
if parentNode:
if parentNode.left == node:
parentNode.left = leftNode
else:
parentNode.right = leftNode
else:
self.root = leftNode
def treeSearch(self,key):
"""
红黑树的查找与普通二叉树的查找类似,都是做比较即可。
:return:
"""
if not self.root:
return None
else:
point = self.root
while point != None:
if point.key == key:
return point
elif point.key > key:
point = point.left
else:
point = point.right
return None
def insert(self,key,value=None):
# 插入情景1:当树是空树的时候,二话不说直接插进去,并且将插入的点颜色设置为黑色。
if not self.root:
self.root = RBNode(key=key,value=value,color="B")
else:
currentpoint = self.root
nextPoint = currentpoint
while nextPoint != None:
if nextPoint.key < key:
if nextPoint.right == None:
currentpoint = nextPoint
nextPoint = nextPoint.right
elif nextPoint.key > key:
if nextPoint.left == None:
currentpoint = nextPoint
nextPoint = nextPoint.left
else:
# 插入情景二:直接将数据插入在nextPoint位置,且该位置节点无需换新的节点,直接替换数据就可以。
nextPoint.value = value
else:
# 在currentPoint处插入数据,是一个新的节点。
newNode = RBNode(key,value)
newNode.parent = currentpoint
if currentpoint.key < key:
currentpoint.right = newNode
else:
currentpoint.left = newNode
self.rbBalance(newNode)
#那么问题来了,在插入之后,是一定要做平衡处理的,对node做插入处理。
def rbBalance(self,node):
parent = node.parent
if parent == None:
node.color = "B"
return
if parent.color == "R":
if node.parent == node.parent.parent.left:
uncle = node.parent.parent.right
else:
uncle = node.parent.parent.left
if uncle and uncle.color == "R":
node.parent.color = "B"
uncle.color = "B"
uncle.parent.color = "R"
self.rbBalance(uncle.parent)
elif parent.parent.left == parent and (not uncle or uncle.color == "B"):
if parent.left == node:
parent.color = "B"
parent.parent.color = "R"
self.rightRotate(parent.parent)
elif parent.right == node:
self.leftRotate(parent)
self.rbBalance(parent)
elif parent.parent.right == parent and (not uncle or uncle.color == "B"):
if parent.right == node:
parent.color = "B"
parent.parent.color = "R"
self.leftRotate(parent.parent)
elif parent.left == node:
self.leftRotate(parent)
self.rbBalance(parent)
def deleteNode(self,key):
"""
第一步,找到要删除的点,并找到要删除的这个点的后继节点和这个点互换key和value的值,然后将后继节点删除就可以了。
在不考虑删除的key和value的情况下,完全可以认为删除的是替换节点。
:return:
"""
#找到要删除的节点。
targetNode = self.treeSearch(key=key)
if targetNode:
if not targetNode.left and not targetNode.right:
if targetNode.parent.left == targetNode:
targetNode.parent.left = None
elif targetNode.parent.right == targetNode:
targetNode.parent.right = None
if __name__ == '__main__':
rbTree = RBTree()
rbTree.insert(1)
rbTree.insert(2)
rbTree.insert(4)
rbTree.insert(3)
rbTree.insert(5)
rbTree.insert(6,value=123)
rbTree.insert(7)
rbTree.root.printSelf(rbTree.root)
res = rbTree.treeSearch(6)
print(res.value)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.