content
stringlengths 5
1.05M
|
|---|
# pylint: disable=missing-function-docstring, missing-module-docstring/
import numpy as np
from numpy.random import randint, uniform
from modules import types
from pyccel.epyccel import epyccel
def test_int_default(language):
f1 = types.test_int_default
f2 = epyccel(f1, language = language)
a = randint(low = -1e9, high = 0, dtype = int) # negative
b = randint(low = 0, high = 1e9, dtype = int) # positive
assert f1(a) == f2(a) #add type comparaison when https://github.com/pyccel/pyccel/issues/735 is solved
assert f1(b) == f2(b)
def test_int64(language):
f1 = types.test_int64
f2 = epyccel(f1, language = language)
a = randint(low = -1e9, high = 0, dtype = np.int64) # negative
b = randint(low = 0, high = 1e9, dtype = np.int64) # positive
assert f1(a) == f2(a)
assert f1(b) == f2(b)
def test_int32(language):
f1 = types.test_int32
f2 = epyccel(f1, language = language)
a = randint(low = -1e9, high = 0, dtype = np.int32) # negative
b = randint(low = 0, high = 1e9, dtype = np.int32) # positive
assert f1(a) == f2(a)
assert f1(b) == f2(b)
def test_int16(language):
f1 = types.test_int16
f2 = epyccel(f1, language = language)
a = randint(low = -32768, high = 0, dtype = np.int16) # negative
b = randint(low = 0, high = 32767, dtype = np.int16) # positive
assert f1(a) == f2(a)
assert f1(b) == f2(b)
def test_int8(language):
f1 = types.test_int8
f2 = epyccel(f1, language = language)
a = randint(low = -128, high = 0, dtype = np.int8) # negative
b = randint(low = 0, high = 127, dtype = np.int8) # positive
assert f1(a) == f2(a)
assert f1(b) == f2(b)
def test_real_defaultl(language):
f1 = types.test_real_default
f2 = epyccel(f1, language = language)
a = uniform() * 1e9 # negative
b = uniform() * -1e9 # positive
assert f1(a) == f2(a)
assert f1(b) == f2(b)
def test_float32(language):
f1 = types.test_float32
f2 = epyccel(f1, language = language)
a = np.float32(uniform() * 1e9) # negative
b = np.float32(uniform() * -1e9) # positive
assert f1(a) == f2(a)
assert f1(b) == f2(b)
def test_float64(language):
f1 = types.test_float64
f2 = epyccel(f1, language = language)
a = np.float64(uniform() * 1e9) # negative
b = np.float64(uniform() * -1e9) # positive
assert f1(a) == f2(a)
assert f1(b) == f2(b)
def test_complex_default(language):
f1 = types.test_complex_default
f2 = epyccel(f1, language = language)
a = complex(uniform() * -1e9, uniform() * 1e9)
b = complex(uniform() * 1e9, uniform() * -1e9)
assert f1(a) == f2(a)
assert f1(b) == f2(b)
def test_complex64(language):
f1 = types.test_complex64
f2 = epyccel(f1, language = language)
a = complex(uniform() * -1e9, uniform() * 1e9)
b = complex(uniform() * 1e9, uniform() * -1e9)
a = np.complex64(a)
b = np.complex64(b)
assert f1(a) == f2(a)
assert f1(b) == f2(b)
def test_complex128(language):
f1 = types.test_complex128
f2 = epyccel(f1, language = language)
a = complex(uniform() * -1e9, uniform() * 1e9)
b = complex(uniform() * 1e9, uniform() * -1e9)
a = np.complex128(a)
b = np.complex128(b)
assert f1(a) == f2(a)
assert f1(b) == f2(b)
def test_bool(language):
f1 = types.test_bool
f2 = epyccel(f1, language = language)
assert f1(True) == f2(True)
assert f1(False) == f2(False)
|
from flask import Flask, render_template, request, redirect, session, jsonify
from flask.helpers import make_response
import random
import smtplib
import mysql.connector
import os
from werkzeug.utils import secure_filename
app = Flask(__name__)
app.secret_key = os.urandom(10)
mydb = mysql.connector.connect(
host="localhost",
user="root",
password="tharun@159",
database="restaurant",
)
mycursor=mydb.cursor()
@app.route('/')
def home():
return render_template('home.html')
@app.route('/adminlogin')
def adminlogin():
return render_template('adminlogin.html')
@app.route('/login')
def login():
return render_template('login.html')
@app.route('/logout')
def logout():
session.clear()
return redirect('/')
Email=''
@app.route('/login_validation', methods=['POST'])
def login_validation():
admin=request.form.get('Administrator')
if not admin:
global Email
Email = request.form.get('Email')
password = request.form.get('password')
mycursor.execute('SELECT * FROM customers WHERE Email=%s AND Password=%s',(Email,password))
customers = mycursor.fetchall()
if len(customers)>0:
session['CustomerId'] = customers[0][0]
session['Customername']=customers[0][1]
return redirect('/')
else:
return render_template('login.html', error_message="Invalid Email Id or Password")
else:
Email = request.form.get('Email')
password = request.form.get('password')
mycursor.execute('SELECT * FROM employee WHERE emailid=%s AND password=%s',(Email,password))
emps = mycursor.fetchall()
if len(emps)>0:
session['CustomerId'] = emps[0][0]
session['Customername']=emps[0][1]
session['admin']=True
return redirect('/')
else:
return render_template('login.html', error_message="Invalid Email Id or Password")
@app.route('/register')
def register():
return render_template('register.html')
@app.route('/addemployee')
def addemployee():
return render_template('addemployee.html')
@app.route('/menu')
def menu():
mycursor.execute("""select * from foodCategory""")
foodCategories = mycursor.fetchall()
mycursor.execute("""select * from menu""")
menu = mycursor.fetchall()
print(menu)
return render_template('menu.html', foodCategories=foodCategories, menu=menu)
@app.route('/menuchange',methods=['POST','GET'])
def menchange():
mycursor.execute("""select max(`DIshID`) from `menu`""")
m=mycursor.fetchall()
mx=m[0][0]+1
mycursor.execute('select * from foodCategory')
cat=mycursor.fetchall()
rem=request.form.get('removeit')
add=request.form.get('AddIt')
chit=request.form.get('changeit')
adon=request.form.get('adon')
if adon!=None:
return render_template('change.html',dish=None,mx=mx,cat=cat)
elif rem!=None:
mycursor.execute("""update `menu` set `active`=0 where DishID='{}'""".format(int(rem)))
mydb.commit()
return redirect('/menu')
elif add!=None:
mycursor.execute("""update `menu` set `active`=1 where DishID='{}'""".format(int(add)))
mydb.commit()
return redirect('/menu')
elif chit!=None:
# print(chit)
mycursor.execute("""select * from `menu` where `DishID`='{}'""".format(int(chit)))
k=mycursor.fetchall()
dish=k[0]
print(dish)
return render_template('change.html',dish=dish,mx=mx,cat=cat)
else:
return redirect('/menu')
@app.route('/changeconts',methods=["POST","GET"])
def changeconts():
dishId=request.form.get("dishid")
file = request.files['image']
if file:
filename=str(dishId+".png")
fileloc='static/img/menu/'
print(filename)
file.save(os.path.join(fileloc, filename))
vn=request.form.get('vn')
title=request.form.get('title')
price=request.form.get('price')
description=request.form.get('description')
CategoryId=request.form.get('CategoryId')
mycursor.execute('select max(DIshID) from menu')
m=mycursor.fetchall()
mx=m[0][0]+1
if not (int(dishId)==mx) :
mycursor.execute("""update `menu` set `DishName`='{}' , `CategoryId`='{}' , `Price`='{}' , `Veg_NonVeg`='{}' , `Description`='{}' where `DishID`='{}'""".format(title,CategoryId,price,vn,description,dishId))
mydb.commit()
else:
mycursor.execute("""insert into `menu`(`DishID`,`DishName`,`CategoryId`,`Price`,`Veg_NonVeg`,`Description`,`active`) value('{}','{}','{}','{}','{}','{}',1)""".format(dishId,title,CategoryId,price,vn,description))
mydb.commit()
return redirect('/menu')
@app.route('/admininfo')
def admininfo():
mycursor.execute("""select * from employee where employeeID='{}'""".format(session['CustomerId']))
empinfodframe=mycursor.fetchall()
empinfo=empinfodframe[0]
mycursor.execute("""select * from `Reservations` where `Date`>(Select CURRENT_DATE()) order by `Date` asc""")
reservations=mycursor.fetchall()
mycursor.execute("""select `RestaurantWallet` from `Restaurant`""")
RestaurantWallet=mycursor.fetchall()
RestaurantWallet=RestaurantWallet[0][0]
mycursor.execute("""select * from `orders` order by `OrderId` desc""")
orderstaken = mycursor.fetchall()
orderdetails={}
delivery_info={}
delivery_agent_info={}
for i in orderstaken:
mycursor.execute("""select `menu`.`DishName`,`orderDetails`.`Quantity`,(`menu`.`Price`)*(`orderDetails`.`Quantity`) from (`orderDetails` inner join `menu` on `orderDetails`.`DishId`=`menu`.`DishID` ) where `orderDetails`.`DetailsId`= '{}'""".format(i[4]))
orderdetails[i[4]]=mycursor.fetchall()
mycursor.execute("""select * from deliveries where OrderId='{}'""".format(i[0]))
delivery_info[i[0]] = mycursor.fetchall()
mycursor.execute("""select `AgentName`, `MobileNum` from `delivery_agents` where `AgentId`='{}'""".format(delivery_info[i[0]][0][0]))
delivery_agent_info[i[0]] = mycursor.fetchall()
return render_template('admininfo.html', empinfo=empinfo,reservations=reservations, orderstaken=orderstaken,orderdetails=orderdetails, delivery_info=delivery_info, delivery_agent_info=delivery_agent_info, RestaurantWallet=RestaurantWallet)
@app.route('/add_to_cart', methods=['POST'])
def add_to_cart():
if 'CustomerId' in session:
print(session['CustomerId'])
addDishId = request.form.get('addDishId')
mycursor.execute("""insert into `cart` (`CustomerId`, `DishId`, `Quantity`) values ('{}', '{}', '{}')""".format(session['CustomerId'], addDishId, 1))
mydb.commit()
return redirect('/menu')
else:
return render_template('login.html', message='First, you should login. Then you can add food to your cart')
@app.route('/cart')
def cart():
if 'CustomerId' in session:
mycursor.execute("""select Address from customers where CustomerId='{}'""".format(session['CustomerId']))
myAddress = mycursor.fetchall()
print(myAddress)
mycursor.execute("""select menu.DishName, menu.Price, cart.Quantity, menu.DishId from (cart inner join menu) where cart.CustomerId='{}' and cart.DishId=menu.DishId""".format(session['CustomerId']))
cartDishes = mycursor.fetchall()
emptyCart = False
if (cartDishes==[]):
emptyCart = True
subTotalBill = 0
for dish in cartDishes:
subTotalBill += dish[1]*dish[2]
return render_template('cart.html', emptyCart=emptyCart, cartDishes=cartDishes, subTotalBill=subTotalBill, totalBill = subTotalBill+30, myAddress=myAddress[0][0])
else:
return render_template('login.html', message='First, you should login. Then you get your cart')
payment_otp = 0
payment_method = ''
@app.route('/place_order', methods=['GET','POST'])
def place_order():
dish_id_delete = request.form.get("remove_itemid")
changeAddress = request.form.get("changeAddress")
makePayment = request.form.get("makePayment")
global payment_method
payment_method = request.form.get("payment_method")
if payment_method != None:
if payment_method == "no_option":
return render_template("payment.html", error_message="Select a valid payment method")
elif payment_method == "cod":
return redirect('/place_order_continued')
elif payment_method == "ewallet":
mycursor.execute("""select menu.DishId, menu.Price, cart.Quantity, menu.DishName from (cart inner join menu) where cart.CustomerId='{}' and cart.DishId=menu.DishId""".format(session['CustomerId']))
cartDishes = mycursor.fetchall()
# print(cartDishes)
totalBill = 30
for dish in cartDishes:
totalBill += dish[1]*dish[2]
mycursor.execute("""select Wallet,Email from customers where CustomerId='{}'""".format(session['CustomerId']))
balance = mycursor.fetchall()
if balance[0][0]>=totalBill:
s = smtplib.SMTP('smtp.gmail.com', 587)
s.starttls()
s.login("restaurants1233@gmail.com", "restrest")
global payment_otp
payment_otp=random.randint(10000,99999)
message = "Your OTP for the payment of INR {} from your E-Wallet".format(totalBill) + " is "+str(payment_otp)
s.sendmail("restuarants1233@gmail.com",balance[0][1], message)
s.quit()
return render_template('wallet.html', totalBill=totalBill, balance=balance[0][0])
else:
return render_template('payment.html', error_message="Insufficient balance in your E-Wallet", balance_message="Balance: INR {}".format(balance[0][0]), bill_message="Total Bill: INR {}".format(totalBill))
elif dish_id_delete != None:
print(dish_id_delete)
mycursor.execute("""delete from `cart` where `CustomerId`='{}' and `DishId`='{}'""".format(session['CustomerId'],dish_id_delete))
mydb.commit()
return redirect("/cart")
elif changeAddress != None:
return redirect("/update_profile")
elif makePayment != None:
mycursor.execute("""select menu.DishId, menu.Price, cart.Quantity, menu.DishName from (cart inner join menu) where cart.CustomerId='{}' and cart.DishId=menu.DishId""".format(session['CustomerId']))
cartDishes = mycursor.fetchall()
# print(cartDishes)
for dish in cartDishes:
dish_quantity = request.form.get(dish[3])
print(dish_quantity)
mycursor.execute("""update `cart` set `Quantity` = '{}' where `DishId` = '{}'""".format(dish_quantity, dish[0]))
mydb.commit()
mycursor.execute("""select * from delivery_agents where AgentStatus='Available'""")
availableAgents = mycursor.fetchall()
print(availableAgents)
if len(availableAgents) == 0:
return render_template('orderFinished.html', message="Sorry, all of our delivery agents are busy right now...Please try again after some time", message_code="Agents_Unavailable")
else:
return render_template('payment.html')
@app.route('/payment_otp_authentication', methods=["POST", "GET"])
def payment_otp_authentication():
entered_otp=request.form.get('entered_otp')
entered_otp=int(entered_otp)
if entered_otp != payment_otp :
error_message="Invalid OTP - Payment failed. Please try again!"
return render_template('payment.html',error_message=error_message)
else:
mycursor.execute("""select menu.DishId, menu.Price, cart.Quantity, menu.DishName from (cart inner join menu) where cart.CustomerId='{}' and cart.DishId=menu.DishId""".format(session['CustomerId']))
cartDishes = mycursor.fetchall()
totalBill = 30
for dish in cartDishes:
totalBill += dish[1]*dish[2]
mycursor.execute("""update customers set Wallet = Wallet - {} where CustomerId='{}'""".format(totalBill, session['CustomerId']))
mydb.commit()
mycursor.execute("""update restaurant set RestaurantWallet = RestaurantWallet + {}""".format(totalBill))
mydb.commit()
return redirect('/place_order_continued')
@app.route('/place_order_continued', methods=["POST", "GET"])
def place_order_continued():
#adding data into orderDetails
mycursor.execute("""select max(DetailsId) from orderDetails""")
maxDetailsIdList = mycursor.fetchall()
maxDetailsId = maxDetailsIdList[0][0]
if maxDetailsId==None:
addDetailsId = 1
else:
addDetailsId = maxDetailsId + 1
# print(maxDetailsId)
# print(addDetailsId)
mycursor.execute("""select menu.DishId, menu.Price, cart.Quantity, menu.DishName from (cart inner join menu) where cart.CustomerId='{}' and cart.DishId=menu.DishId""".format(session['CustomerId']))
cartDishes = mycursor.fetchall()
# print(cartDishes)
totalBill = 30
for dish in cartDishes:
mycursor.execute("""insert into `orderDetails` (`DetailsId`, `DishId`, `Quantity`) values ('{}', '{}', '{}')""".format(addDetailsId, dish[0], dish[2]))
mydb.commit()
totalBill += dish[1]*dish[2]
#adding data into orders
mycursor.execute("""select CURRENT_TIME""")
time = mycursor.fetchall()
TIME = time[0][0]
mycursor.execute("""select CURRENT_DATE""")
date = mycursor.fetchall()
DATE = date[0][0]
# print(addDateTime)
# print(type(addDateTime))
mycursor.execute("""insert into `orders` (`Date`,`Time`, `CustomerId`, `DetailsId`, `BillAmount`) values ('{}','{}', '{}', '{}', '{}')""".format(DATE,TIME , session['CustomerId'], addDetailsId, totalBill))
mydb.commit()
#deleting from cart
mycursor.execute("""delete from `cart` where `CustomerId`='{}'""".format(session['CustomerId']))
mydb.commit()
#getting orderId
mycursor.execute("""select max(OrderId) from orders""")
orderId = mycursor.fetchall()
orderId = orderId[0][0]
#assigning deliver agent to this order
mycursor.execute("""select * from delivery_agents where AgentStatus='Available'""")
availableAgents = mycursor.fetchall()
print(availableAgents)
minOrders = 100000
for agent in availableAgents:
if agent[8] < minOrders:
minOrders = agent[8]
mycursor.execute("""select * from delivery_agents where AgentStatus='Available' and NumberOfOrders='{}'""".format(minOrders))
minOrderAgents = mycursor.fetchall()
assignedAgent = minOrderAgents[0]
#changing status of delivery agent
mycursor.execute("""update delivery_agents set AgentStatus='NotAvailable', NumberOfOrders = NumberOfOrders+1 where AgentId='{}'""".format(assignedAgent[0]))
mydb.commit()
payment_status = ''
if payment_method=='cod':
payment_status = 'Cash-On-Delivery'
elif payment_method=='ewallet':
payment_status = 'Paid'
mycursor.execute("""insert into `deliveries` (`AgentId`, `OrderId`, `DeliveryStatus`, `PaymentStatus`) values ('{}', '{}', 'Preparing', '{}')""".format(assignedAgent[0], orderId, payment_status))
mydb.commit()
return render_template('orderFinished.html', message="Hurray!! The order has been placed, your delicious food is enroute",message_code="Successful")
@app.route('/myorders',methods=["POST","GET"])
def myorders():
# print("hello")
if 'CustomerId' in session:
mycursor.execute("""select * from `orders` where `CustomerId`= '{}' order by `OrderId` desc""".format(session['CustomerId']))
orderstaken = mycursor.fetchall()
orderdetails={}
delivery_info={}
delivery_agent_info={}
# statusreport={}
for i in orderstaken:
mycursor.execute("""select `menu`.`DishName`,`orderDetails`.`Quantity`,(`menu`.`Price`)*(`orderDetails`.`Quantity`) from (`orderDetails` inner join `menu` on `orderDetails`.`DishId`=`menu`.`DishID` ) where `orderDetails`.`DetailsId`= '{}'""".format(i[4]))
orderdetails[i[4]]=mycursor.fetchall()
mycursor.execute("""select * from deliveries where OrderId='{}'""".format(i[0]))
delivery_info[i[0]] = mycursor.fetchall()
mycursor.execute("""select `AgentName`, `MobileNum` from `delivery_agents` where `AgentId`='{}'""".format(delivery_info[i[0]][0][0]))
delivery_agent_info[i[0]] = mycursor.fetchall()
return render_template('myOrders.html', orderstaken=orderstaken,orderdetails=orderdetails, delivery_info=delivery_info, delivery_agent_info=delivery_agent_info)
# mycursor.execute("""select * from `Delivery` where `orderID`= '{}'""".format(i[0]))
# deliverydetails=mycursor.fetchall()
# statusreport[i[0]]=deliverydetails[0][3]
# Noorder = False
# if (orderstaken==[]):
# Noorder = True
# return render_template('myOrders.html', orderstaken=orderstaken,orderdetails=orderdetails,statusreport=statusreport,deliverydetails=deliverydetails)
else:
return render_template('login.html', message='First, you should login. Then you can see your Orders')
@app.route('/reservations')
def reservations():
if 'CustomerId' in session:
mycursor.execute("""SELECT DATE_ADD( CURRENT_DATE(), interval 1 day)""")
tomdate=mycursor.fetchall()
mycursor.execute("""select distinct Capacity from tables""")
capacities = mycursor.fetchall()
# print(capacities)
return render_template ('reservations.html', capacities=capacities, slot_table={},tomdate=tomdate)
else:
return render_template('login.html', message='First, you should login. Then you can reserve a table')
@app.route('/check_availability', methods=['GET','POST'])
def check():
capacity = request.form.get('capacity')
print(capacity)
date=request.form.get('date')
global select_date
select_date=date
mycursor.execute("""select tables.TableId, timeslots.Slot from (tables cross join timeslots)
where tables.TableId not in (select TableId from Reservations where Slot = timeslots.Slot and Date='{}') and tables.Capacity='{}' ;""".format(date,capacity))
availability = mycursor.fetchall()
print(availability)
mycursor.execute("""SELECT DATE_ADD( CURRENT_DATE(), interval 1 day)""")
tomdate=mycursor.fetchall()
slot_table = {}
for pair in availability:
slot_table[pair[1]] = pair[0]
return render_template('reservations.html', slot_table=slot_table,tomdate=tomdate)
@app.route('/make_reservation', methods=['POST', 'GET'])
def make_reservation():
reservedSlotTable = request.form.get('slotTable')
suggesition=request.form.get('suggesitions')
# print(reservedSlotTable)
[reservedSlot, reservedTable] = reservedSlotTable.split('+')
global select_date
mycursor.execute("""insert into `Reservations` (`TableId`,`Date` ,`Slot`,`suggestions`) values ('{}','{}','{}','{}')""".format(reservedTable,select_date,reservedSlot,suggesition))
mydb.commit()
mycursor.execute("""select `ReservationId` from Reservations where TableId='{}'and Date='{}'and Slot='{}'""".format(reservedTable,select_date,reservedSlot))
reservationid=mycursor.fetchall()
mycursor.execute("""SELECT DATE_ADD( CURRENT_DATE(), interval 1 day)""")
tomdate=mycursor.fetchall()
return render_template('reservations.html',tomdate=tomdate, message=" Table {} will be reserved for you at {} on {}. Your Regeservation ID is {} .".format(reservedTable, reservedSlot,select_date,reservationid[0][0]), slot_table={})
addName=''
addMobileNum=''
addEmail=''
addPassword=''
addAddress=''
addMarketing=''
addworkslot=''
addaddress=''
designation=''
emp=None
n=0
@app.route('/add_user', methods=['POST'])
def add_user():
global emp
emp=request.form.get('admin')
global addName
addName= request.form.get('reg_name')
global addMobileNum
addMobileNum = request.form.get('reg_MobileNum')
global addEmail
addEmail= request.form.get('reg_Email')
global addPassword
addPassword= request.form.get('reg_Password')
global addAddress
addAddress = request.form.get('reg_Address')
global addMarketing
addMarketing = request.form.get('marketing')
global addworkslot
addworkslot= request.form.get('workslot')
global addaddress
addaddress= request.form.get('address')
global designation
designation=request.form.get('designation')
print(addPassword)
print(addEmail)
if len(addMobileNum)!=10 or (not addMobileNum.isdigit()) :
if emp is not None:
return render_template('addemployee.html', error_message='Enter a valid mobile number')
else:
return render_template('register.html', error_message='Enter a valid mobile number')
if emp is not None:
mycursor.execute ("""SELECT * FROM employee WHERE emailid='{}'""".format(addEmail))
customers = mycursor.fetchall()
if len(customers)>0:
return render_template('addemployee.html', message='Entered Email address was already registered. Login to proceed')
else:
mycursor.execute ("""SELECT * FROM customers WHERE Email='{}'""".format(addEmail))
customers = mycursor.fetchall()
if len(customers)>0:
return render_template('register.html', message='Entered Email address was already registered. Login to proceed')
s = smtplib.SMTP('smtp.gmail.com', 587)
s.starttls()
s.login("restaurants1233@gmail.com", "restrest")
global n
n=random.randint(10000,99999)
message = "Your OTP is "+str(n)
s.sendmail("restuarants1233@gmail.com",addEmail, message)
print(n)
s.quit()
return render_template('otp.html')
@app.route('/otp_authentication', methods=['POST'])
def otp1():
x=request.form.get('otp')
print(x)
print(n)
y=int(x)
global addPassword,addEmail,addName,addPassword
if y!=n :
error_message="Invalid OTP"
return render_template('otp.html',error_message=error_message)
if emp is not None:
mycursor.execute("""insert into `employee` (`employeename`, `phonenumber`, `emailid`, `password`,`address`,`workslot`,`Designation`) values('{}', '{}', '{}', '{}','{}','{}','{}')""".format(addName, addMobileNum, addEmail, addPassword,addaddress,addworkslot,designation))
mydb.commit()
return render_template('login.html', after_reg_message="Account created successfully! Login to proceed")
else:
mycursor.execute("""insert into `customers` (`CustomerName`, `MobileNum`, `Email`, `Password`, `Address`, `Marketing`) values('{}', '{}', '{}', '{}', '{}', '{}')""".format(addName, addMobileNum, addEmail, addPassword, addAddress, addMarketing))
mydb.commit()
return render_template('login.html', after_reg_message="Account created successfully! Login to proceed")
@app.route('/userhomepage')
def profile():
mycursor.execute("""select * from `customers` where `CustomerId`= '{}'""".format(session['CustomerId']))
userdetails=mycursor.fetchall()
print(userdetails)
mycursor.execute("""select count(OrderID), sum(BillAmount) from `orders` where `CustomerId`= '{}'""".format(session['CustomerId']))
details=mycursor.fetchall()
print(details)
return render_template('profile.html',userdetails=userdetails,details=details)
@app.route('/delivery_agent_login', methods=['GET', 'POST'])
def delivery_agent_login():
return render_template('delivery_agent_login.html')
@app.route('/delivery_agent_login_validation', methods=['GET', 'POST'])
def delivery_agent_login_validation():
Email = request.form.get('Email')
password = request.form.get('password')
mycursor.execute('SELECT * FROM delivery_agents WHERE Email=%s AND Password=%s',(Email,password))
agents = mycursor.fetchall()
if len(agents)>0:
session['AgentId'] = agents[0][0]
return redirect('/deliveries')
else:
return render_template('delivery_agent_login.html', error_message="Invalid Email Id or Password")
@app.route('/deliveries', methods=['GET', 'POST'])
def deliveries():
mycursor.execute("""select * from deliveries where AgentId = '{}' order by `OrderId` desc""".format(session['AgentId']))
deliveries = mycursor.fetchall()
orderdetails={}
customer_info={}
assignedOrders={}
currentOrder=False
for delivery in deliveries:
if delivery[2]!="Delivered":
currentOrder=True
mycursor.execute("""select * from `orders` where `OrderId`= '{}'""".format(delivery[1]))
order = mycursor.fetchall()
assignedOrders[delivery[1]] = order[0]
detailsId = order[0][4]
customerId = order[0][3]
mycursor.execute("""select `menu`.`DishName`,`orderDetails`.`Quantity`,(`menu`.`Price`)*(`orderDetails`.`Quantity`) from (`orderDetails` inner join `menu` on `orderDetails`.`DishId`=`menu`.`DishID` ) where `orderDetails`.`DetailsId`= '{}'""".format(detailsId))
ODetails = mycursor.fetchall()
orderdetails[delivery[1]] = ODetails
mycursor.execute("""select `CustomerName`, `MobileNum`, `Address` from `customers` where `CustomerId` = '{}'""".format(customerId))
CInfo = mycursor.fetchall()
customer_info[delivery[1]] = CInfo[0]
return render_template('delivery.html', currentOrder=currentOrder, deliveries=deliveries, assignedOrders=assignedOrders, orderdetails=orderdetails, customer_info=customer_info)
@app.route('/order_status_change', methods=['GET', 'POST'])
def order_status_change():
preparing_to_enroute = request.form.get("preparing_to_enroute")
enroute_to_delivered = request.form.get("enroute_to_delivered")
if preparing_to_enroute != None and (enroute_to_delivered==None):
mycursor.execute("""update `deliveries` set `DeliveryStatus` = "Enroute" where `OrderId` = '{}'""".format(preparing_to_enroute))
mydb.commit()
elif enroute_to_delivered != None and (preparing_to_enroute == None):
mycursor.execute("""select CURRENT_TIME""")
time = mycursor.fetchall()
TIME = time[0][0]
mycursor.execute("""update `deliveries` set `DeliveryStatus` = "Delivered", `PaymentStatus` = "Paid", `DeliveredTime` = '{}' where `OrderId` = '{}'""".format(TIME, enroute_to_delivered))
mydb.commit()
mycursor.execute("""update `delivery_agents` set `AgentStatus` = "Available" where AgentId = '{}'""".format(session['AgentId']))
mydb.commit()
return redirect('/deliveries')
@app.route('/customersInfo', methods=['GET', 'POST'])
def customersInfo():
mycursor.execute("""select `CustomerId`, `CustomerName`, `Email`, `MobileNum`, `Wallet` from `Customers`""")
customers = mycursor.fetchall()
return render_template('customers_info.html', customers=customers)
@app.route('/update_customer_wallet', methods=['GET', 'POST'])
def update_customer_wallet():
CustomerId = request.form.get("CustomerId")
Amount = request.form.get("Amount")
mycursor.execute("""update `customers` set Wallet = Wallet + {} where CustomerId = '{}'""".format(Amount, CustomerId))
mydb.commit()
return redirect('/customersInfo')
@app.route('/update_profile', methods=['GET', 'POST'])
def update_profile():
mycursor.execute("""select * from `customers` where `CustomerId`= '{}'""".format(session['CustomerId']))
userdetails=mycursor.fetchall()
print(userdetails)
return render_template("update_profile.html", userdetails=userdetails)
@app.route('/update_profile_database', methods=['GET', 'POST'])
def update_profile_database():
newAddress = request.form.get('Address')
print(newAddress)
if newAddress != "":
mycursor.execute("""update `customers` set `Address` = '{}' where `CustomerId` = '{}'""".format(newAddress, session['CustomerId']))
mydb.commit()
return redirect('/userhomepage')
if __name__ == '__main__':
app.run(debug=True)
|
"""
Kuramoto-type model.
- Sinusoidal coupling
- accelerated with numba
"""
import numpy as np
import math
import numba
def njit_wrapper(use_numba, *args, **kwargs):
'''
Wrapper for numba njit decorator
If :param use_numba: is True -> will use @njit with given args and kwargs.
False -> decorator does not do anything
:return: a decorator
'''
if use_numba:
from numba import njit
def decorator(func):
if not use_numba:
# Return the function unchanged, not decorated.
return func
else:
return njit(func, *args, **kwargs)
return decorator
def define_right_side_of_ODE_kuramoto(neighbours_indices, omega_vec, sin_str, use_numba=True):
'''
:param use_numba: if True, accelerate code with numba (5-50 times faster);
requires the same number of neighbours for every oscillator
:param omega_vec: vector of frequencies
:return:
'''
N = len(neighbours_indices)
if use_numba:
neighbours_indices = np.array(neighbours_indices) # numba doesn't like list of lists
# neighbours_indices = [tuple(x) for x in neighbours_indices] # -> tested; does not work
@njit_wrapper(use_numba)
def q_glob(t, phi):
"""
:param neighbours_indices: list of neighbours for each oscillator
:param omega: vector of frequencies rad/s
:param sin_str: coupling strength K/m in rad/s, float
:param phi: vector of phases, corresponding to each of the cilia
:param t: vector of time
:return: right side of differential equation as a function of t and phi
"""
q = np.zeros(N)
for i in range(N):
neighbours = neighbours_indices[i]
coupling = 0
for neighbour_index in neighbours:
coupling += sin_str * math.sin(phi[i] - phi[neighbour_index])
q[i] = omega_vec[i] - coupling
return q
return q_glob
if __name__ == '__main__':
import carpet.lattice.triangular as lattice
import carpet.visualize as vis
import matplotlib.pyplot as plt
import carpet, time
a = 18 # [um]
period = 31.25 # [ms] period
freq = 2 * np.pi / period
nx = 3
ny = 4 # must be even
N = nx * ny
tol = 10 ** -6 # solver tolerance
coords, lattice_ids = lattice.get_nodes_and_ids(nx, ny, a) # get cilia (nodes) coordinates
N1, T1 = lattice.get_neighbours_list(coords, nx, ny, a) # get list of neighbours and relative positions
sin_str = 0.001 * freq
right_side_of_ODE = define_right_side_of_ODE_kuramoto(N1, freq * np.ones(N), sin_str, use_numba=False)
solve_cycle = carpet.define_solve_cycle(right_side_of_ODE, 2 * period, phi_global_func=carpet.get_mean_phase)
# Solve
phi0 = np.zeros([len(coords)]) # initial condition
phi0[5] += 3 # perturb
sol = solve_cycle(phi0, tol)
start = time.time() # track CPU time
solution = solve_cycle(phi0, tol)
time_spent = time.time() - start
print("Time spent", time_spent)
# Get result
phi1 = solution.y.T[-1]
print("Change in phase after one cycle:", phi1 - phi0 - 2 * np.pi)
# Visualize
vals = (phi1 - phi0 - 2 * np.pi)
plt.title("How much phases changed after one cycle")
vis.plot_nodes(coords, phi=vals, vmin=vals.min(), vmax=vals.max(),cmap='jet')
vis.plt.show()
# Plot as a function of time
ys = sol.y[5]
ts = sol.t
plt.plot(ts, ys - freq * ts, 'o')
plt.show()
|
import torch
import struct
import sys
import os
from utils.torch_utils import select_device
# Initialize
device = select_device('cpu')
pt_file = sys.argv[1]
# Load model
model = torch.load(pt_file, map_location=device)['model'].float() # load to FP32
model.to(device).eval()
with open('../models/' + os.path.splitext(os.path.basename(pt_file))[0] + '.wts', 'w') as f:
f.write('{}\n'.format(len(model.state_dict().keys())))
for k, v in model.state_dict().items():
vr = v.reshape(-1).cpu().numpy()
f.write('{} {} '.format(k, len(vr)))
for vv in vr:
f.write(' ')
f.write(struct.pack('>f',float(vv)).hex())
f.write('\n')
|
from bottle import route, run, template, request
contoller_values = ["a" = 0; "b" = 1; "x" = 2; "y" = 3; "r_x" = 4; "r_y" = 5; "l_x" = 6; "l_y" = 7];
@route('/controller')
def get_comments():
return render_data() + '''
<form action="/comment" method="post">
a: <input name="a" type="button" />
Name: <input name="name" type="text" />
<input value="Submit" type="submit" />
</form>
'''
@route('/controller', method="post")
def post_comment():
comment = request.forms.get('comment')
name = request.forms.get('name')
temp_com = [comment, name]
comments.append(temp_com)
return get_comments() + all_comments()
def all_comments():
formatted = "";
for comment in comments:
formatted += "<p>" + comment[0] + " - " + comment[1] + "</p>"
return formatted
@route('/controller')
@route('/controller', method="post")
def post_controller():
if type="button":
return
else:
return
run(host='localhost', port=8080)
|
from typing import List
from lxml import objectify
from lxml.objectify import Element
from testplan.importers import ImportedResult, ResultImporter
from testplan.importers.base import T, ThreePhaseFileImporter
from testplan.importers.suitesresults import SuitesResult
from testplan.report import (
TestGroupReport,
TestReport,
ReportCategories,
TestCaseReport,
RuntimeStatus,
)
from testplan.testing.multitest.entries.assertions import RawAssertion
from testplan.testing.multitest.entries.schemas.base import registry
class GTestImportedResult(SuitesResult):
REPORT_CATEGORY = ReportCategories.GTEST
class GTestResultImporter(ThreePhaseFileImporter[Element]):
def _read_data(self, path) -> T:
"""
Parse XML report generated by Google test and return the root node.
XML report should be compatible with xUnit format.
:return: Root node of parsed raw test data
:rtype: ``xml.etree.Element``
"""
with open(path) as report_file:
return objectify.parse(report_file).getroot()
def _process_data(self, data: T) -> List[TestGroupReport]:
"""
XML output contains entries for skipped testcases
as well, which are not included in the report.
"""
result: List[TestGroupReport] = []
for suite in data.getchildren():
suite_name = suite.attrib["name"]
suite_report = TestGroupReport(
name=suite_name,
uid=suite_name,
category=ReportCategories.TESTSUITE,
)
suite_has_run = False
for testcase in suite.getchildren():
testcase_name = testcase.attrib["name"]
testcase_report = TestCaseReport(
name=testcase_name, uid=testcase_name
)
if not testcase.getchildren():
assertion_obj = RawAssertion(
description="Passed",
content="Testcase {} passed".format(testcase_name),
passed=True,
)
testcase_report.append(registry.serialize(assertion_obj))
else:
for entry in testcase.getchildren():
assertion_obj = RawAssertion(
description=entry.tag,
content=entry.text,
passed=entry.tag != "failure",
)
testcase_report.append(
registry.serialize(assertion_obj)
)
testcase_report.runtime_status = RuntimeStatus.FINISHED
if testcase.attrib["status"] != "notrun":
suite_report.append(testcase_report)
suite_has_run = True
if suite_has_run:
result.append(suite_report)
return result
def _create_result(
self, raw_data: T, processed_data: List[TestGroupReport]
) -> ImportedResult:
return GTestImportedResult(
name=self.name,
results=processed_data,
description=self.description,
)
|
from logging import getLogger
from logging import FileHandler
from argparse import ArgumentParser
from waitress import serve
from paste.translogger import TransLogger
from app import create_app
if __name__ == "__main__":
parser = ArgumentParser(description="서버 런처")
parser.add_argument("--set-port", metavar="PORT",
help="서버를 작동 시킬 포트 번호를 지정합니다.",
action="store", type=int, default=21212)
args = parser.parse_args()
logger = getLogger("wsgi")
logger.addHandler(FileHandler("wsgi.log"))
serve(app=TransLogger(create_app(), setup_console_handler=False), port=args.set_port, _quiet=True)
|
import unittest
from serendipity.set_and_map.singly_linked_list_map import Map
class MapTestCase(unittest.TestCase):
def setUp(self):
self.map = Map()
def test_map(self):
self.map.add("a", 65)
self.assertEqual(self.map.get_size(), 1)
self.assertFalse(self.map.is_empty())
self.assertTrue(self.map.contains("a"))
self.map.set("a", 66)
self.assertTrue(self.map.contains("a"))
self.map.add("b", 23)
self.map.add("c", 22)
self.assertEqual(self.map.remove("b"), 23)
self.assertIsNone(self.map.remove("n"))
self.assertEqual(self.map.get("a"), 66)
|
"""
Created by: Rob Mulla
Sep 7
IEEE Fraud Detection Model
- Add back ids
- Add V Features
"""
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import os
import sys
import matplotlib.pylab as plt
from sklearn.model_selection import KFold
from datetime import datetime
import time
import logging
from sklearn.metrics import roc_auc_score
from catboost import CatBoostClassifier, Pool
from timeit import default_timer as timer
start = timer()
##################
# PARAMETERS
###################
run_id = "{:%m%d_%H%M}".format(datetime.now())
KERNEL_RUN = False
MODEL_NUMBER = os.path.basename(__file__).split('.')[0]
if KERNEL_RUN:
INPUT_DIR = '../input/champs-scalar-coupling/'
FE_DIR = '../input/molecule-fe024/'
FOLDS_DIR = '../input/champs-3fold-ids/'
TARGET = "isFraud"
N_ESTIMATORS = 100000
N_META_ESTIMATORS = 500000
LEARNING_RATE = 0.1
VERBOSE = 1000
EARLY_STOPPING_ROUNDS = 500
RANDOM_STATE = 529
N_THREADS = 48
DEPTH = 7
N_FOLDS = 5
MODEL_TYPE = "catboost"
#####################
## SETUP LOGGER
#####################
def get_logger():
"""
credits to: https://www.kaggle.com/ogrellier/user-level-lightgbm-lb-1-4480
"""
os.environ["TZ"] = "US/Eastern"
time.tzset()
FORMAT = "[%(levelname)s]%(asctime)s:%(name)s:%(message)s"
logging.basicConfig(format=FORMAT)
logger = logging.getLogger("main")
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
fhandler = logging.FileHandler(f'../logs/{MODEL_NUMBER}_{run_id}.log')
formatter = logging.Formatter(FORMAT)
handler.setFormatter(formatter)
# logger.addHandler(handler)
logger.addHandler(fhandler)
return logger
logger = get_logger()
logger.info(f'Running for Model Number {MODEL_NUMBER}')
##################
# PARAMETERS
###################
if MODEL_TYPE == 'xgboost':
EVAL_METRIC = "AUC"
elif MODEL_TYPE == 'lgbm':
EVAL_METRIC = 'AUC'
elif MODEL_TYPE == 'catboost':
EVAL_METRIC = "AUC"
##################
# TRACKING FUNCTION
###################
def update_tracking(run_id,
field,
value, csv_file="../tracking/tracking.csv", integer=False, digits=None, drop_incomplete_rows=False):
"""
Function to update the tracking CSV with information about the model
"""
try:
df = pd.read_csv(csv_file, index_col=[0])
except FileNotFoundError:
df = pd.DataFrame()
if integer:
value = round(value)
elif digits is not None:
value = round(value, digits)
if drop_incomplete_rows:
df = df.loc[~df['AUC'].isna()]
df.loc[run_id, field] = value # Model number is index
df.to_csv(csv_file)
update_tracking(run_id, "model_number", MODEL_NUMBER, drop_incomplete_rows=True)
update_tracking(run_id, "n_estimators", N_ESTIMATORS)
update_tracking(run_id, "early_stopping_rounds", EARLY_STOPPING_ROUNDS)
update_tracking(run_id, "random_state", RANDOM_STATE)
update_tracking(run_id, "n_threads", N_THREADS)
update_tracking(run_id, "learning_rate", LEARNING_RATE)
update_tracking(run_id, "n_fold", N_FOLDS)
update_tracking(run_id, "model_type", MODEL_TYPE)
update_tracking(run_id, "eval_metric", EVAL_METRIC)
#####################
# PREPARE MODEL DATA
#####################
folds = KFold(n_splits=N_FOLDS, random_state=RANDOM_STATE)
logger.info('Loading Data...')
train_df = pd.read_parquet('../input/train.parquet')
test_df = pd.read_parquet('../input/test.parquet')
logger.info('Done loading Data...')
###########
# FEATURES
###########
FEATURES = ['TransactionAmt', 'ProductCD',
'card1', 'card2', 'card3',
'card4', 'card5', 'card6',
'id_12', 'id_13', 'id_14',
'id_15', 'id_16', 'id_17',
'id_18', 'id_19', 'id_20',
'id_21',
'id_22',
'id_23',
'id_24',
'id_25',
'id_26',
'id_27',
'id_28',
'id_29',
'id_30', 'id_31',
'id_32',
'id_33',
'id_34',
'id_35',
'id_36', 'id_37', 'id_38',
'DeviceType', 'DeviceInfo',
'M4','P_emaildomain',
'R_emaildomain',
'addr1', 'addr2',
'M1', 'M2', 'M3', 'M5', 'M6', 'M7', 'M8', 'M9',
'C1', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8',
'C9', 'C10', 'C11', 'C12', 'C13', 'C14',
'D1', 'D2', 'D3', 'D4', 'D5', 'D6', 'D7', 'D8', 'D9',
'D10', 'D11', 'D12', 'D13', 'D14', 'D15',
'V1', 'V2', 'V3', 'V4', 'V5', 'V6', 'V7', 'V8', 'V9', 'V10', 'V11', 'V12', 'V13',
'V14', 'V15', 'V16', 'V17', 'V18', 'V19', 'V20', 'V21', 'V22', 'V23', 'V24', 'V25',
'V26', 'V27', 'V28', 'V29', 'V30', 'V31', 'V32', 'V33', 'V34', 'V35', 'V36', 'V37',
'V38', 'V39', 'V40', 'V41', 'V42', 'V43', 'V44', 'V45', 'V46', 'V47', 'V48', 'V49',
'V50', 'V51', 'V52', 'V53', 'V54', 'V55', 'V56', 'V57', 'V58', 'V59', 'V60', 'V61',
'V62', 'V63', 'V64', 'V65', 'V66', 'V67', 'V68', 'V69', 'V70', 'V71', 'V72', 'V73',
'V74', 'V75', 'V76', 'V77', 'V78', 'V79', 'V80', 'V81', 'V82', 'V83', 'V84', 'V85',
'V86', 'V87', 'V88', 'V89', 'V90', 'V91', 'V92', 'V93', 'V94', 'V95', 'V96', 'V97',
'V98', 'V99', 'V100', 'V101', 'V102', 'V103', 'V104', 'V105', 'V106', 'V107', 'V108',
'V109', 'V110', 'V111', 'V112', 'V113', 'V114', 'V115', 'V116', 'V117', 'V118', 'V119',
'V120', 'V121', 'V122', 'V123', 'V124', 'V125', 'V126', 'V127', 'V128', 'V129', 'V130',
'V131', 'V132', 'V133', 'V134', 'V135', 'V136', 'V137', 'V138', 'V139', 'V140', 'V141',
'V142', 'V143', 'V144', 'V145', 'V146', 'V147', 'V148', 'V149', 'V150', 'V151', 'V152',
'V153', 'V154', 'V155', 'V156', 'V157', 'V158', 'V159', 'V160', 'V161', 'V162', 'V163',
'V164', 'V165', 'V166', 'V167', 'V168', 'V169', 'V170', 'V171', 'V172', 'V173', 'V174',
'V175', 'V176', 'V177', 'V178', 'V179', 'V180', 'V181', 'V182', 'V183', 'V184', 'V185',
'V186', 'V187', 'V188', 'V189', 'V190', 'V191', 'V192', 'V193', 'V194', 'V195', 'V196',
'V197', 'V198', 'V199', 'V200', 'V201', 'V202', 'V203', 'V204', 'V205', 'V206', 'V207',
'V208', 'V209', 'V210', 'V211', 'V212', 'V213', 'V214', 'V215', 'V216', 'V217', 'V218',
'V219', 'V220', 'V221', 'V222', 'V223', 'V224', 'V225', 'V226', 'V227', 'V228', 'V229',
'V230', 'V231', 'V232', 'V233', 'V234', 'V235', 'V236', 'V237', 'V238', 'V239', 'V240',
'V241', 'V242', 'V243', 'V244', 'V245', 'V246', 'V247', 'V248', 'V249', 'V250', 'V251',
'V252', 'V253', 'V254', 'V255', 'V256', 'V257', 'V258', 'V259', 'V260', 'V261', 'V262',
'V263', 'V264', 'V265', 'V266', 'V267', 'V268', 'V269', 'V270', 'V271', 'V272', 'V273',
'V274', 'V275', 'V276', 'V277', 'V278', 'V279', 'V280', 'V281', 'V282', 'V283', 'V284',
'V285', 'V286', 'V287', 'V288', 'V289', 'V290', 'V291', 'V292', 'V293', 'V294', 'V295',
'V296', 'V297', 'V298', 'V299', 'V300', 'V301', 'V302', 'V303', 'V304', 'V305', 'V306',
'V307', 'V308', 'V309', 'V310', 'V311', 'V312', 'V313', 'V314', 'V315', 'V316', 'V317',
'V318', 'V319', 'V320', 'V321', 'V322', 'V323', 'V324', 'V325', 'V326', 'V327', 'V328',
'V329', 'V330', 'V331', 'V332', 'V333', 'V334', 'V335', 'V336', 'V337', 'V338', 'V339',
'dist1', 'dist2']
CAT_FEATURES = ['ProductCD', 'card4', 'card6',
'id_12', 'id_13', 'id_14',
'id_15', 'id_16', 'id_17',
'id_18', 'id_19', 'id_20',
'id_21',
'id_22',
'id_23',
'id_24',
'id_25',
'id_26',
'id_27',
'id_28',
'id_29',
'id_30', 'id_31',
'id_32',
'id_33',
'id_34',
'id_35',
'id_36', 'id_37', 'id_38',
'DeviceType', 'DeviceInfo',
'M4','P_emaildomain',
'R_emaildomain', 'addr1', 'addr2',
'M1', 'M2', 'M3', 'M5', 'M6', 'M7', 'M8', 'M9']
X = train_df[FEATURES]
y = train_df[TARGET]
X_test = test_df[FEATURES]
X = X.fillna(-9999)
X_test = X_test.fillna(-9999)
logger.info('Running with features...')
logger.info(FEATURES)
logger.info(f'Target is {TARGET}')
update_tracking(run_id, "n_features", len(FEATURES), integer=True)
################################
# Dataframes for storing results
#################################
feature_importance = pd.DataFrame()
oof = np.zeros(len(X))
pred = np.zeros(len(X_test))
oof_df = train_df[['isFraud']].copy()
oof_df['oof'] = np.nan
oof_df['fold'] = np.nan
scores = []
best_iterations = []
for fold_n, (train_idx, valid_idx) in enumerate(folds.split(X, y)):
X_train = X.iloc[train_idx]
y_train = y.iloc[train_idx]
X_valid = X.iloc[valid_idx]
y_valid = y.iloc[valid_idx]
if MODEL_TYPE == "catboost":
train_dataset = Pool(data=X_train, label=y_train, cat_features=CAT_FEATURES)
valid_dataset = Pool(data=X_valid, label=y_valid, cat_features=CAT_FEATURES)
test_dataset = Pool(data=X_test, cat_features=CAT_FEATURES)
model = CatBoostClassifier(
iterations=N_ESTIMATORS,
learning_rate=LEARNING_RATE,
depth=DEPTH,
eval_metric=EVAL_METRIC,
verbose=VERBOSE,
random_state=RANDOM_STATE,
thread_count=N_THREADS,
task_type="GPU")
model.fit(
train_dataset,
eval_set=valid_dataset,
early_stopping_rounds=EARLY_STOPPING_ROUNDS,
)
y_pred_valid = model.predict_proba(valid_dataset)[:,1]
y_pred = model.predict_proba(test_dataset)[:,1]
fold_importance = pd.DataFrame()
fold_importance["feature"] = model.feature_names_
fold_importance["importance"] = model.get_feature_importance()
fold_importance["fold"] = fold_n + 1
feature_importance = pd.concat([feature_importance, fold_importance],
axis=0)
best_iteration = model.best_iteration_
best_iterations.append(best_iteration)
fold_score = roc_auc_score(y_valid, y_pred_valid)
scores.append(fold_score)
update_tracking(run_id, "AUC_f{}".format(fold_n + 1),
fold_score,
integer=False,)
logger.info('Fold {} of {} CV mean AUC score: {:.4f}. Best iteration {}'.format(fold_n + 1,
N_FOLDS,
fold_score,
best_iteration))
oof_df.iloc[valid_idx, oof_df.columns.get_loc('oof')] = y_pred_valid.reshape(-1)
oof_df.iloc[valid_idx, oof_df.columns.get_loc('fold')] = fold_n + 1
pred += y_pred
update_tracking(run_id, 'avg_best_iteration',
np.mean(best_iterations),
integer=True)
###############
# Store Results
###############
pred /= N_FOLDS
score = np.mean(scores)
sub = pd.read_csv('../input/sample_submission.csv')
sub['isFraud'] = pred
sub.to_csv(f'../sub/sub_{MODEL_NUMBER}_{run_id}_{score:.4f}.csv', index=False)
oof_df.to_csv(f'../oof/oof_{MODEL_NUMBER}_{run_id}_{score:.4f}.csv')
logger.info('CV mean AUC score: {:.4f}, std: {:.4f}.'.format(np.mean(scores),
np.std(scores)))
total_score = roc_auc_score(oof_df['isFraud'], oof_df['oof'])
feature_importance.to_csv(f'../fi/fi_{MODEL_NUMBER}_{run_id}_{score:.4f}.csv')
update_tracking(run_id, "AUC",
total_score,
integer=False,)
logger.info('OOF AUC Score: {:.4f}'.format(total_score))
end = timer()
update_tracking(run_id, "training_time", (end - start), integer=True)
logger.info('Done!')
|
n = int(input("Enter an integer: "))
result =n+(n*n)+(n*n*n)
print(result)
|
from __future__ import unicode_literals
import frappe
def execute():
frappe.delete_doc("DocType", "Process Payroll")
|
from .get import Req
class COVID19():
def __init__(self):
self.req = Req()
def get_prefectures_data(self, pref_name=None):
"""
都道府県別の統計データを取得
"""
data = {}
for pref in self.req.get_prefectures():
data[pref['name_ja']] = {'id': pref['id']}
for item in ['cases', 'deaths', 'pcr']:
data[pref['name_ja']][item] = {
'count': pref[item],
'last_updated': pref['last_updated'][f'{item}_date']
}
if pref_name:
if pref_name in data:
data = data[pref_name]
else:
data = {}
return data
def get_total_data(self):
"""
日本全体での統計データを取得
"""
return self.req.get_total()
def get_history_data(self, date=None):
"""
今までの日付別統計データを取得
dateはYYYYmmddで指定
"""
data = self.req.get_history()
if date:
data2 = {}
for item in data:
if str(item['date']) == date:
data2 = item
if data2:
data = data2
else:
data = {}
return data
def get_predict_data(self, date=None):
"""
機械学習により今後30日間の日付別予測データを取得
dateはYYYYmmddで指定
"""
data = self.req.get_predict()
if date:
data2 = {}
for item in data:
if str(item['date']) == date:
data2 = item
if data2:
data = data2
else:
data = {}
return data
def get_positives_data(self, pref):
"""
都道府県を指定して感染者データを取得
prefは日本語名 例: 東京
"""
if pref == '東京':
pref += '都'
elif pref in ['大阪', '京都']:
pref += '府'
else:
pref += '県'
return self.req.get_positives(pref)
def get_positives_count_last_days(self, pref, days=0):
"""
都道府県を指定して感染者数データを取得
prefは日本語名 例: 東京
"""
data = {}
for item in self.get_positives_data(pref):
announce_date = item['announcement_date']
if announce_date not in data:
data[announce_date] = 0
data[announce_date] += 1
return {day: data[day] for day in list(data)[-days:]}
def get_statistics_data(self):
"""
感染者の年齢別統計データを取得
prefは日本語名 例: 東京都
"""
return self.req.get_statistics()
|
import pandas as pd
import numpy as np
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from resources import SOPLS
Y_df = pd.read_table('./data/D.txt', index_col=0)
Y = Y_df.values
X1_df = pd.read_table('./data/A.txt', index_col=0)
X1 = X1_df.values
X2_df = pd.read_table('./data/B.txt', index_col=0)
X2 = X2_df.values
X3_df = pd.read_table('./data/C.txt', index_col=0)
X3 = X3_df.values
X = np.hstack([X1, X2, X3])
blocks = np.hstack([np.ones(X1.shape[1]),np.ones(X2.shape[1])*2,np.ones(X3.shape[1])*3])
mlf = make_pipeline(SOPLS.SOPLS(blocks=blocks, ncomp=[5,3,7], max_comp=10, wide_data=True))
mlf.fit(X,Y)
mlf.predict(X)
mlf2 = make_pipeline(StandardScaler(),SOPLS.SOPLS(blocks=blocks, ncomp=[5,3,7], max_comp=10, wide_data=True))
mlf2.fit(X,Y)
print(mlf2.predict(X))
|
from Tkinter import *
from p2_game import Game, State
import first_bot as red_bot
import first_bot as blue_bot
BOTS = {'red': red_bot, 'blue': blue_bot}
def display(state):
canvas.delete(ALL)
square_width = min(int(canvas['width']),int(canvas['height']))
step = square_width/state.game.width
r = int(step/10.0)
w = int(step/15.0)
def make_callback(move):
def callback(event):
if state.whos_turn == 'red' and RED_AI.get():
print "Give the red guy a chance to think!"
return
if state.whos_turn == 'blue' and BLUE_AI.get():
print "The blue lady needs more time to think!"
return
make_move(state, move)
return callback
for i,j in state.game.h_lines:
x = (i+0.5)*step
y = (j+0.5)*step
if (i,j) in state.h_line_owners:
owner = state.h_line_owners[(i,j)]
canvas.create_line(x,y,x+step,y,width=w)
else:
line = canvas.create_line(x,y,x+step,y,width=w,dash=(w,w),fill=state.whos_turn)
canvas.tag_bind(line,"<Button-1>",make_callback(('h',(i,j))))
for i,j in state.game.v_lines:
x = (i+0.5)*step
y = (j+0.5)*step
if (i,j) in state.v_line_owners:
owner = state.v_line_owners[(i,j)]
canvas.create_line(x,y,x,y+step,width=w)
else:
line = canvas.create_line(x,y,x,y+step,width=w,dash=(w,w),fill=state.whos_turn)
canvas.tag_bind(line,"<Button-1>",make_callback(('v',(i,j))))
for i,j in state.game.boxes:
x = (i+0.5)*step
y = (j+0.5)*step
if (i,j) in state.box_owners:
owner = state.box_owners[(i,j)]
canvas.create_rectangle(x+r,y+r,x+step-r,y+step-r,fill=owner)
for i,j in state.game.dots:
x = (i+0.5)*step
y = (j+0.5)*step
canvas.create_oval(x-r,y-r,x+r,y+r,fill='black')
if not state.is_terminal():
if state.whos_turn == 'red' and RED_AI.get():
think(state)
elif state.whos_turn == 'blue' and BLUE_AI.get():
think(state)
def make_move(state, move):
moves = state.get_moves()
if move in moves:
UNDO_STACK.append(state)
next_state = state.copy()
next_state.apply_move(move)
display(next_state)
else:
print move, "not in legal moves!"
def think(state):
import threading
class ThinkingThread(threading.Thread):
def run(self):
def quip(line):
AI_THOUGHTS.set(line)
move = BOTS[state.whos_turn].think(state.copy(), quip)
make_move(state, move)
AI_THOUGHTS.set("")
ThinkingThread().start()
def restart():
game = Game(4)
initial_state = State(game)
UNDO_STACK[:] = [initial_state]
display(initial_state)
def undo():
if len(UNDO_STACK) > 1:
UNDO_STACK.pop()
display(UNDO_STACK[-1])
master = Tk()
UNDO_STACK = []
RED_AI = IntVar(master)
BLUE_AI = IntVar(master)
AI_THOUGHTS = StringVar(master)
master.title("Dots and Boxes")
w = 600
h = 600
toolbar = Frame(master, width=w, height=h+20)
toolbar.pack(side=BOTTOM)
undo_btn = Button(toolbar, text="Undo", command=undo)
undo_btn.pack(side=LEFT)
restart_btn = Button(toolbar, text="Restart", command=restart)
restart_btn.pack(side=LEFT)
red_ai_btn = Checkbutton(toolbar, text="Red AI", variable=RED_AI)
red_ai_btn.pack(side=LEFT)
blue_ai_btn = Checkbutton(toolbar, text="Blue AI", variable=BLUE_AI)
blue_ai_btn.pack(side=LEFT)
ai_thoughts_ent = Entry(toolbar, textvariable=AI_THOUGHTS, state=DISABLED, width=50)
ai_thoughts_ent.pack(side=LEFT)
canvas = Canvas(master, width=w, height=h)
canvas.pack(side=RIGHT)
restart()
mainloop()
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import fcntl
import os
import queue
import re
import select
import subprocess
import threading
import time
from . import message_item
from silk.node.base_node import BaseNode
class MessageSystemCallItem(message_item.MessageItemBase):
"""Class to encapsulate a system call into the message queue.
"""
def __init__(self, action, cmd, expect, timeout, field, refresh=0, exact_match: bool = False):
super(MessageSystemCallItem, self).__init__()
self.action = action
self.cmd = cmd
self.expect = expect
self.timeout = timeout
self.field = field
self.refresh = refresh
self.exact_match = exact_match
def log_match_failure(self, response):
self.parent.log_error("Worker failed to match expected output.")
self.parent.log_error("Expected: \"%s\"" % self.expect)
self.parent.log_error("Actual Output:")
for line in response.splitlines():
self.parent.log_error(line)
self._delegates.set_error("{0} not found for cmd:{1}!".format(self.expect, self.action))
def log_response_failure(self):
self.parent.log_error("Worker failed to execute command.")
self.parent.log_error("Fork failed when trying to start subprocess.")
self._delegates.set_error("Command \"%s\" not executed" % self.cmd)
def store_groupdict_match(self, match):
match_dict = match.groupdict()
for key in self.field:
self.parent.store_data(match_dict[key], key)
def invoke(self, parent):
"""
Consumer thread for serializing and asynchronously handling command inputs and expected returns.
Make system calls using the _make_system_call method.
"""
self.parent = parent
if self.expect is None:
self.expect = ""
self.parent.log_debug("Dequeuing command \"%s\"" % self.cmd)
response = None
if self.cmd is not None:
response = self.parent._make_system_call(self.action, self.cmd, self.timeout)
if response is None:
self.log_response_failure()
return
if self.exact_match:
response = response.rstrip()
if response != self.expect:
self.log_match_failure(response)
return
if type(self.field) is str:
self.parent.store_data(response, self.field)
else:
match = re.search(self.expect, response)
if match is None:
self.log_match_failure(response)
return
if type(self.field) is str:
self.parent.store_data(match.group(), self.field)
elif type(self.field) is list:
self.store_groupdict_match(match)
class SystemCallManager(object):
def __init__(self):
self.__message_queue = queue.Queue()
self.__event_lock = threading.Lock()
self.__worker_thread = threading.Thread(target=self.__worker_run, name="thread-" + self._name)
self.__worker_thread.daemon = True
self.__worker_thread.start()
def make_system_call_async(self, action, command, expect, timeout, field=None, exact_match: bool = False):
"""Post a command, timeout, and expect value to a queue for the consumer thread.
"""
self.log_info("Enqueuing command \"%s\"" % command)
item = MessageSystemCallItem(action, command, expect, timeout, field, exact_match)
with self.__event_lock:
self.set_all_clear(False)
self.__message_queue.put_nowait(item)
self.log_debug("Message enqueued")
def make_function_call_async(self, function, *args):
"""Enqueue a Python function to be called on the worker thread.
"""
self.log_info("Enqueueing function %s with args %s" % (function, args))
item = message_item.MessageCallableItem(function, args)
with self.__event_lock:
self.set_all_clear(False)
self.__message_queue.put_nowait(item)
self.log_debug("Message enqueued")
def _make_system_call(self, action, command, timeout):
"""Generic method for making a system call with timeout.
"""
log_line = "Making system call for %s" % action
self.log_debug(log_line)
self.log_debug(command)
try:
proc = subprocess.Popen(command, bufsize=0, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except Exception as error:
self.log_error("Failed to start subprocess: %s" % error)
self.log_error("\tCommand: %s" % command)
return None
flags = fcntl.fcntl(proc.stdout, fcntl.F_GETFL)
fcntl.fcntl(proc.stdout, fcntl.F_SETFL, flags | os.O_NONBLOCK)
stdout = ""
curr_line = ""
t_start = time.time()
while True:
if proc.poll() == 0:
break
try:
# Poll proc.stdout, no write list, no exception list, timeout=1s
# If proc.stdout is closed, this will throw an exception
# select.select is portable between OS X and Ubuntu.
poll_list = select.select([proc.stdout], [], [], 1)
# Check the length of the read list to see if there is new data
if len(poll_list[0]) > 0:
curr_line += proc.stdout.read(1).decode("utf-8")
except Exception as err:
print("EXCEPTION:{}".format(err))
break
if len(curr_line) > 0 and curr_line[-1] == "\n" and not curr_line.isspace():
self.log_debug("[stdout] %s" % (curr_line.rstrip()))
stdout += curr_line
curr_line = ""
if time.time() - t_start > timeout:
try:
proc.kill()
except OSError:
pass
break
try:
this_stdout = ""
while True:
try:
new_char = proc.stdout.read(1)
if not new_char:
break
this_stdout += new_char.decode("utf-8")
except Exception as err:
print("Exception: {}".format(err))
break
if this_stdout:
this_stdout = curr_line + this_stdout
for line in this_stdout.splitlines():
self.log_debug("[stdout] %s" % (line.rstrip()))
stdout += this_stdout
except ValueError:
pass
return stdout
def __clear_message_queue(self):
"""Remove all pending messages in queue.
"""
try:
while True:
self.__message_queue.get_nowait()
except queue.Empty:
pass
def __set_error(self, msg):
"""Post error msg and clear message queue.
"""
self.log_error("set_error: {0}".format(msg))
self.post_error(msg)
self.__clear_message_queue()
def __worker_run(self):
"""
Consumer thread for serializing and asynchronously handling command inputs and expected returns.
Serialize requests to make system calls.
Make system calls using the _make_system_call method.
"""
while True:
self.__event_lock.acquire()
self.set_all_clear(self.__message_queue.empty())
self.__event_lock.release()
item = self.__message_queue.get()
error_handler = lambda me, error_str: me.__set_error(error_str)
delegates = message_item.MessageItemDelegates(self, None, None, error_handler)
item.set_delegates(delegates)
item.invoke(self)
class TemporarySystemCallManager(SystemCallManager, BaseNode):
"""Class that can be used to make simple system calls with timeouts and logging functionality.
"""
def __init__(self, name="TemporarySystemCallManager"):
BaseNode.__init__(self, name)
SystemCallManager.__init__(self)
|
import numpy as np
from PIL import Image
from solvers import Solver
from image_op import get_derivatives
class OpticFlow(object):
def __init__(self):
self.solver = Solver()
def __call__(self):
return self.compute_flow()
def visualize(self, u, v):
""" Computes RGB image visualizing the flow vectors """
max_mag = np.amax(np.sqrt(u ** 2 + v ** 2))
u = u / max_mag
v = v / max_mag
angle = np.where(u == 0., 0.5 * np.pi, np.arctan(v / u))
angle[(u == 0) * (v < 0.)] += np.pi
angle[u < 0.] += np.pi
angle[(u > 0.) * (v < 0.)] += 2 * np.pi
r = np.zeros_like(u, dtype = float)
g = np.zeros_like(u, dtype = float)
b = np.zeros_like(u, dtype = float)
mag = np.minimum(np.sqrt(u ** 2 + v ** 2), 1.)
# Red-Blue Case
case = (angle >= 0.0) * (angle < 0.25 * np.pi)
a = angle / (0.25 * np.pi)
r = np.where(case, a * 255. + (1 - a) * 255., r)
b = np.where(case, a * 255. + (1 - a) * 0., b)
case = (angle >= 0.25 * np.pi) * (angle < 0.5 * np.pi)
a = (angle - 0.25 * np.pi) / (0.25 * np.pi)
r = np.where(case, a * 64. + (1 - a) * 255., r)
g = np.where(case, a * 64. + (1 - a) * 0., g)
b = np.where(case, a * 255. + (1 - a) * 255., b)
# Blue-Green Case
case = (angle >= 0.5 * np.pi) * (angle < 0.75 * np.pi)
a = (angle - 0.5 * np.pi) / (0.25 * np.pi)
r = np.where(case, a * 0. + (1 - a) * 64., r)
g = np.where(case, a * 255. + (1 - a) * 64., g)
b = np.where(case, a * 255. + (1 - a) * 255., b)
case = (angle >= 0.75 * np.pi) * (angle < np.pi)
a = (angle - 0.75 * np.pi) / (0.25 * np.pi)
g = np.where(case, a * 255. + (1 - a) * 255., g)
b = np.where(case, a * 0. + (1 - a) * 255., b)
# Green-Yellow Case
case = (angle >= np.pi) * (angle < 1.5 * np.pi)
a = (angle - np.pi) / (0.5 * np.pi)
r = np.where(case, a * 255. + (1 - a) * 0., r)
g = np.where(case, a * 255. + (1 - a) * 255., g)
# Yellow-Red Case
case = (angle >= 1.5 * np.pi) * (angle < 2. * np.pi)
a = (angle - 1.5 * np.pi) / (0.5 * np.pi)
r = np.where(case, a * 255. + (1 - a) * 255., r)
g = np.where(case, a * 0. + (1 - a) * 255., g)
r = np.minimum(np.maximum(r * mag, 0.0), 255.)
g = np.minimum(np.maximum(g * mag, 0.0), 255.)
b = np.minimum(np.maximum(b * mag, 0.0), 255.)
flow_img = np.stack([r, g, b], axis = -1).astype(np.uint8)
return flow_img
def compute_flow(self, f1, f2, solver = 'explicit', alpha = 500, lambd = 4, tau = 0.2, maxiter = 10000, grid_steps = [2, 4, 8]):
r, c = f1.shape[:2]
u = v = np.zeros((r, c))
fx, fy, ft = get_derivatives(f1, f2)
print('Alpha:', alpha)
print('Lambda:', lambd)
print('tau:', tau)
if solver == 'explicit':
for it in range(maxiter):
print('Iteration-', it)
u, v = self.solver.explicit_solver(fx, fy, ft, u, v, alpha = alpha, lambd = lambd, tau = tau)
mag = np.sqrt(u ** 2 + v ** 2)
print('Max: {0:.2f} Min: {1:.2f} Mean: {2:.2f} std: {3:.2f}'.format(np.amax(mag), np.amin(mag), np.mean(mag), np.std(mag)))
else:
u, v = self.solver.multi_grid_solver(f1, f2, grid_steps = grid_steps, alpha = alpha)
mag = np.sqrt(u ** 2 + v ** 2)
print('Max mag: {0:.2f} Mean mag: {1:.2f}'.format(np.amax(mag), np.mean(mag)))
vis = self.visualize(u, v)
return vis
def main():
kmax = 100 # Number of iterations (we are interested in steady state of the diffusion-reaction system)
alpha = 500 # Regularization Parameter (should be large enough to weight smoothness terms which have small magnitude)
tau = 0.2 # Step size (For implicit scheme, can choose arbitrarily large, for explicit scheme <=0.25)
lambd = 0
frame1_path = input('Enter first image: ')
frame2_path = input('Enter second image: ')
frame1 = Image.open(frame1_path)
frame2 = Image.open(frame2_path)
f1 = np.array(frame1, dtype = np.float)
f2 = np.array(frame2, dtype = np.float)
optic_flow = OpticFlow()
vis = optic_flow.compute_flow(f1, f2, alpha = alpha, lambd = lambd, tau = tau, maxiter = kmax)
vis = Image.fromarray(vis)
vis.save('flow_visual.jpg')
vis.show()
if __name__ == '__main__':
main()
|
from IOTHubActuator import excutor as ex
def generate_content(actions, room):
content = "based on the current situation, following plan was made:\n"
count = 0
for action in actions:
count += 1
if action == 'heater_On':
content += str(count) + ". " + "please turn on heater\n"
ex.heater_On()
room.heater.status = 1
if action == 'heater_Off':
content += str(count) + ". " + "please turn off heater\n"
ex.heater_Off()
room.heater.status = 0
if action == 'cooler_On':
content += str(count) + ". " + "please turn on air condition\n"
ex.cooler_On()
room.cooler.status = 1
if action == 'cooler_Off':
content += str(count) + ". " + "please turn off air condition\n"
ex.cooler_Off()
room.cooler.status = 0
if action == 'open_Windows':
content += str(count) + ". " + "please open the windows\n"
ex.open_Windows()
room.window.status = 1
if action == 'close_Windows':
content += str(count) + ". " + "please close the windows\n"
ex.close_Windows()
room.window.status = 0
if action == 'nothing':
ex.nothing()
if action == 'refresh_air':
content += str(count) + ". " + "please open the windows between lectures (recommended)\n"
ex.refresh_air()
room.window.status = 1
if action == 'dimLightsup':
content += str(count) + ". " + "please turn up the lights a little\n"
ex.dimLightsup()
room.light.status = 1
if action == 'dimLightsdown':
content += str(count) + ". " + "please dim the lights\n"
ex.dimLightsdown()
room.light.status = 1
if action == 'fullLights':
content += str(count) + ". " + "please turn on the lights\n"
ex.fullLights()
room.light.status = 2
if action == 'lightsOff':
content += str(count) + ". " + "please turn off the lights\n"
ex.lightsOff()
room.light.status = 0
if action == 'curtain_up':
content += str(count) + ". " + "please open the curtain\n"
ex.curtain_up()
room.curtain.status = 0
if action == 'curtain_down':
content += str(count) + ". " + "please close the curtain\n"
ex.curtain_down()
room.curtain.status = 1
if action == 'unlockRoom':
content += str(count) + ". " + "please open the lecture room\n"
ex.unlockRoom()
room.door.status = 0
if action == 'lockDoor':
content += str(count) + ". " + "please close the lecture room\n"
ex.lockRoom()
room.door.status = 1
return content
|
# -*- coding: utf-8 -*-
from unittest import mock
from typing import Any, Dict, Tuple, Text, Optional
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import UserProfile, Recipient, get_display_recipient
class TestEmbeddedBotMessaging(ZulipTestCase):
def setUp(self):
# type: () -> None
self.user_profile = self.example_user("othello")
self.bot_profile = self.create_test_bot('embedded-bot@zulip.testserver', self.user_profile, 'Embedded bot',
'embedded', UserProfile.EMBEDDED_BOT, service_name='helloworld')
def test_pm_to_embedded_bot(self):
# type: () -> None
self.send_personal_message(self.user_profile.email, self.bot_profile.email,
content="help")
last_message = self.get_last_message()
self.assertEqual(last_message.content, "beep boop")
self.assertEqual(last_message.sender_id, self.bot_profile.id)
display_recipient = get_display_recipient(last_message.recipient)
# The next two lines error on mypy because the display_recipient is of type Union[Text, List[Dict[str, Any]]].
# In this case, we know that display_recipient will be of type List[Dict[str, Any]].
# Otherwise this test will error, which is wanted behavior anyway.
self.assert_length(display_recipient, 1) # type: ignore
self.assertEqual(display_recipient[0]['email'], self.user_profile.email) # type: ignore
def test_stream_message_to_embedded_bot(self):
# type: () -> None
self.send_stream_message(self.user_profile.email, "Denmark",
content="@**{}** foo".format(self.bot_profile.full_name),
topic_name="bar")
last_message = self.get_last_message()
self.assertEqual(last_message.content, "beep boop")
self.assertEqual(last_message.sender_id, self.bot_profile.id)
self.assertEqual(last_message.subject, "bar")
display_recipient = get_display_recipient(last_message.recipient)
self.assertEqual(display_recipient, "Denmark")
def test_stream_message_not_to_embedded_bot(self):
# type: () -> None
self.send_stream_message(self.user_profile.email, "Denmark",
content="foo", topic_name="bar")
last_message = self.get_last_message()
self.assertEqual(last_message.content, "foo")
class TestEmbeddedBotFailures(ZulipTestCase):
@mock.patch("logging.error")
def test_invalid_embedded_bot_service(self, logging_error_mock):
# type: (mock.Mock) -> None
user_profile = self.example_user("othello")
bot_profile = self.create_test_bot('embedded-bot@zulip.testserver', user_profile, 'Embedded bot',
'embedded', UserProfile.EMBEDDED_BOT, service_name='nonexistent_service')
mention_bot_message = "@**{}** foo".format(bot_profile.full_name)
self.send_stream_message(user_profile.email, "Denmark",
content=mention_bot_message,
topic_name="bar")
last_message = self.get_last_message()
self.assertEqual(last_message.content, mention_bot_message)
|
#@+leo-ver=5-thin
#@+node:edream.110203113231.921: * @file examples/redefine_put.py
"""Redefine the "put" and "put_nl" methods"""
#@@language python
#@@tabwidth -4
import leo.core.leoGlobals as g
__version__ = "1.4"
#@+others
#@+node:ekr.20111104210837.9690: ** init
def init():
'''Return True if the plugin has loaded successfully.'''
ok = not g.app.unitTesting
# Not for unit testing: overrides core methods.
if ok:
g.registerHandler("start2", onStart)
g.plugin_signon(__name__)
return ok
#@+node:edream.110203113231.922: ** onStart
# This code illustrates how to redefine _any_ method of Leo.
# Python makes this is almost too easy :-)
def onStart(tag, keywords):
'''redefine methods when Leo starts.'''
c = keywords.get('c')
if c:
log = c.frame.log
# Replace frame.put with newPut.
g.funcToMethod(newPut, log, "put")
# Replace frame.putnl with newPutNl.
g.funcToMethod(newPutNl, log, "putnl")
#@+node:edream.110203113231.923: ** newPut and newPutNl
# Contrived examples of how to redefine frame.put and frame.putnl
# Same as frame.put except converts everything to upper case.
def newPut(self, s, color="black"):
g.pr("newPut", s, newline=False)
if g.app.quitting > 0: return
s = s.upper()
t = self.logCtrl
if t:
t.insert("end", s)
t.see("end")
else:
g.pr(s, newline=False)
# Same as frame.putnl except writes two newlines.
def newPutNl(self):
g.pr("newPutNl")
if g.app.quitting > 0: return
t = self.logCtrl
if t:
t.insert("end", "\n\n")
t.see("end")
else:
g.pr('')
#@-others
#@-leo
|
"""Script to run the calibrator"""
from __future__ import print_function
import logging
from simtk import unit
from protons import AmberCalibrationSystem
from protons import log
log.setLevel(logging.INFO)
settings = dict()
settings["temperature"] = 300.0 * unit.kelvin
settings["timestep"] = 1.0 * unit.femtosecond
settings["pressure"] = 1.0 * unit.atmospheres
settings["collision_rate"] = 9.1 / unit.picoseconds
settings["pH"] = 7.4
settings["solvent"] = "implicit"
settings["perturbations_per_trial"] = 0
settings["platform_name"] = "CUDA"
datapoints = dict(HIP=[], HID=[], HIE=[],idx=[])
aac = AmberCalibrationSystem("hip", settings, minimize=True, guess_free_energy=[0.0, 0.0, 0.0])
print(aac.target_weights)
window = 1000
for i,x in enumerate(aac.sams_till_converged(threshold=1.e-6, mc_every=100, gk_every=1, window=window, scheme='global'), start=1):
datapoints['HIP'].append(x[0])
datapoints['HID'].append(x[1])
datapoints['HIE'].append(x[2])
datapoints['idx'].append(i)
print(aac.sams_sampler.naccepted / aac.sams_sampler.nattempted)
from protons import plot_sams_trace
plot_sams_trace(datapoints["HID"], title="His-Delta Tautomer", ylabel="beta * zeta_2", window=window, filename="hid-calibrated.png")
plot_sams_trace(datapoints["HIE"], window=window,title="His-Eps Tautomer", ylabel="beta * zeta_3", filename="hie-calibrated.png")
|
import os
from fabric.api import env
from fabric.api import task
from fabric.operations import put, sudo
import docker
import openvpn
import varkite
import dns
import puppet
import strongswan
env.user = 'ubuntu'
env.key_filename = '~/.ssh/kite-dev.pem'
env.use_ssh_config = True
|
"""
Shows basic usage of the Gmail API.
Lists the user's Gmail labels.
"""
from __future__ import print_function
from apiclient.discovery import build
from httplib2 import Http
from oauth2client import file, client, tools
# Setup the Gmail API
SCOPES = 'https://www.googleapis.com/auth/gmail.readonly'
store = file.Storage('token.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('credentials.json', SCOPES)
creds = tools.run_flow(flow, store)
service = build('gmail', 'v1', http=creds.authorize(Http()))
# Call the Gmail API
results = service.users().labels().list(userId='me').execute()
labels = results.get('labels', [])
if not labels:
print('No labels found.')
else:
print('Labels:')
for label in labels:
print(label['name'])
|
import torch
from torch.nn.functional import interpolate
from torch.nn import Softmax
from loderunner.tokens import TOKEN_DOWNSAMPLING_HIERARCHY as HIERARCHY
def special_loderunner_downsampling(num_scales, scales, image, token_list):
"""
Special Downsampling Method designed for Super Mario Bros. Token based levels.
num_scales : number of scales the image is scaled down to.
scales : downsampling scales. Should be an array tuples (scale_x, scale_y) of length num_scales.
image : Original level to be scaled down. Expects a torch tensor.
token_list : list of tokens appearing in the image in order of channels from image.
"""
scaled_list = []
for sc in range(num_scales):
scale_v = scales[sc][0]
scale_h = scales[sc][1]
# Initial downscaling of one-hot level tensor is normal bilinear scaling
bil_scaled = interpolate(image, (int(image.shape[-2] * scale_v), int(image.shape[-1] * scale_h)),
mode='bilinear', align_corners=False)
# Init output level
img_scaled = torch.zeros_like(bil_scaled)
for x in range(bil_scaled.shape[-2]):
for y in range(bil_scaled.shape[-1]):
curr_h = 0
curr_tokens = [tok for tok in token_list if bil_scaled[:, token_list.index(tok), x, y] > 0]
for h in range(len(HIERARCHY)): # find out which hierarchy group we're in
for token in HIERARCHY[h].keys():
if token in curr_tokens:
curr_h = h
for t in range(bil_scaled.shape[-3]):
if not (token_list[t] in HIERARCHY[curr_h].keys()):
# if this token is not on the correct hierarchy group, set to 0
img_scaled[:, t, x, y] = 0
else:
# if it is, keep original value
img_scaled[:, t, x, y] = bil_scaled[:, t, x, y]
# Adjust level to look more like the generator output through a Softmax function.
img_scaled[:, :, x, y] = Softmax(dim=1)(30*img_scaled[:, :, x, y])
scaled_list.append(img_scaled)
scaled_list.reverse()
return scaled_list
|
from __future__ import annotations
import abc
import typing as ty
class BaseJSONParser(abc.ABC):
"""
Неймспейс, объединяющий методы сериализации и десериализации
JSON в один протокол. Имплементации используются для
декодирования/кодированния JSON ответов от вк.
Имплементации некоторых из JSON-библиотек можно
найти в [json_parsers.py](../json_parsers.py)
"""
@staticmethod
@abc.abstractmethod
def dumps(data: ty.Dict[str, ty.Any]) -> ty.Union[str, bytes]:
"""
Метод, сериализующий JSON в строку
Args:
data: Сериализуемое значение (передаются только словари)
Returns:
JSON-строку
"""
@staticmethod
@abc.abstractmethod
def loads(string: ty.Union[str, bytes]) -> ty.Dict[str, ty.Any]:
"""
Метод, сериализующий JSON из строки
Args:
string: JSON-строка
Returns:
Словарь, который был объектом в строке
"""
|
# Copyright 2021 Google Research. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for effnetv2_model."""
from absl import logging
from absl.testing import parameterized
import tensorflow.compat.v1 as tf
from brain_automl.efficientnetv2 import effnetv2_model
class EffNetV2ModelTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
("v1_b0", "efficientnet-b0", 5330564),
("v1_b1", "efficientnet-b1", 7856232),
("v1_b2", "efficientnet-b2", 9177562),
("v1_b3", "efficientnet-b3", 12314268),
("v1_b4", "efficientnet-b4", 19466816),
("v1_b5", "efficientnet-b5", 30562520),
("v1_b6", "efficientnet-b6", 43265136),
)
def test_effnetv1(self, model_name, expected_params):
images = tf.zeros((1, 224, 224, 3), dtype=tf.float32)
model = effnetv2_model.EffNetV2Model(model_name)
_ = model(images)
self.assertEqual(model.count_params(), expected_params)
@parameterized.named_parameters(
("v1-b0", "efficientnetv2-b0", 7200312),
("v1-b1", "efficientnetv2-b1", 8212124),
("v1-b2", "efficientnetv2-b2", 10178374),
("v1-b3", "efficientnetv2-b3", 14467622),
("s", "efficientnetv2-s", 21612360),
("m", "efficientnetv2-m", 54431388),
("l", "efficientnetv2-l", 119027848),
("xl", "efficientnetv2-xl", 208896832),
)
def test_effnetv2(self, model_name, expected_params):
images = tf.zeros((10, 224, 224, 3), dtype=tf.float32)
model = effnetv2_model.EffNetV2Model(model_name)
_ = model(images)
self.assertEqual(model.count_params(), expected_params)
if __name__ == "__main__":
logging.set_verbosity(logging.WARNING)
tf.test.main()
|
# -*- coding: UTF-8 -*-
def fib():
a, b = 0, 1
while 1:
yield b
a, b = b, a + b
if __name__ == '__main__':
fib_gen = fib()
print(fib_gen)
print(next(fib_gen))
print(next(fib_gen))
print(next(fib_gen))
print(next(fib_gen))
|
import numpy as np
import os
import glob
seedList= np.random.randint(40000000, size=10)
for i in seedList:
inFile = open('script/templateScript.scr', 'r')
outFile = open('script/freija_'+str(i)+'.scr', 'w')
inFileStr=inFile.read()
inFileStr=inFileStr.replace("rat -l /data/liggins/Myjob.log /data/liggins/SimpleBatch/mac/Myjob.mac","rat -s "+str(i)+" -l /data/snoplus/liggins/year1/memoryLeaks/logs/freija_"+str(i)+".log /data/snoplus/liggins/year1/memoryLeaks/macs/FRscript.mac") # This is the standard line
# inFileStr=inFileStr.replace("rat -l /data/liggins/Myjob.log /data/liggins/SimpleBatch/mac/Myjob.mac","rat -s "+str(i)+" -l /data/snoplus/liggins/year1/memoryLeaks/logs/freija_"+str(i)+"_with_Jacks_mod.log /data/snoplus/liggins/year1/memoryLeaks/macs/FRscript.mac") # This is the line with Jack's mod
outFile.write(inFileStr)
inFile.close()
outFile.close()
os.system("qsub -cwd -q snoplusSL6 script/freija_"+str(i)+".scr")
os.system("rm script/freija_"+str(i)+".scr")
|
# Copyright (c) 2019-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import difflib
import subprocess
from pathlib import Path
import uuid
import os
folder_path = Path(__file__).parent
JAVA_PATH = folder_path.joinpath("resources/java_evosuite_tests")
PYTHON_PATH = folder_path.joinpath("resources/expected_python_translations")
CPP_PATH = folder_path.joinpath("resources/expected_cpp_translations")
def translation_testing(examples_list, translator, should_apply_black=False):
for input_test, expected_translation in examples_list:
actual = translator.translate(input_test)
if should_apply_black:
actual = apply_black(actual)
diff_tester(expected_translation, actual)
def apply_black(code: str):
filepath = f"/tmp/python_code_{uuid.uuid4()}.py"
with open(filepath, "w") as tmp_file:
tmp_file.write(code)
subprocess.run(
f"black {filepath}", shell=True,
)
with open(filepath, "r") as tmp_file_in:
output = tmp_file_in.read()
os.remove(filepath)
return output
def diff_tester(expected, res, split="\n"):
expected = split.join([x.rstrip() for x in expected.split(split)])
res = split.join([x.rstrip() for x in res.split(split)])
d = difflib.Differ()
if expected != res:
print("Expected:")
print(expected)
print("#" * 50)
print("Got:")
print(res)
print("#" * 50)
diff = d.compare(expected.split(split), res.split(split))
for line in diff:
print(line)
assert expected == res
def read_inputs(filename, target_lang):
java_path = JAVA_PATH.joinpath(filename).with_suffix(".java").absolute()
with open(java_path, "r") as java_file:
input_test = java_file.read()
if target_lang == "python":
with open(
PYTHON_PATH.joinpath(filename).with_suffix(".py"), "r"
) as python_file:
expected_translation = python_file.read()
elif target_lang == "cpp":
with open(CPP_PATH.joinpath(filename).with_suffix(".cpp"), "r") as python_file:
expected_translation = python_file.read()
else:
raise ValueError(f"target_lang {target_lang} not supported")
return input_test, expected_translation
|
# encoding: UTF-8
from vnzaif import TradeApi, DataApi
|
import logging
import os
import requests
logger = logging.getLogger("GW: GlasswallService")
class GlasswallService:
@staticmethod
def get_file(file, filename):
file = file + "/" + filename
try:
return {"file": (filename, open(file, "rb"))}
except Exception:
logger.info(f"unable to get file {file}")
return False
@staticmethod
def rebuild(filename, file, mode):
files = GlasswallService.get_file(file, filename)
if not files:
return None
output = False
try:
rebuild_api = os.environ.get('rebuild_api', None)
response = requests.post(rebuild_api, files=files, data={'mode': mode})
except Exception as ex:
logger.info(str(ex))
else:
output = response
if '"message": "failed"' in str(response.content):
output = None
return output
|
from typing import Optional
from sqlmodel import Field, SQLModel
from datetime import datetime
class Calendar(SQLModel, table=True):
"""Create an SQLModel for a calendar"""
id: Optional[int] = Field(default=None, primary_key=True)
date: datetime
year_number: int
year_name: str
quarter_number: int
quarter_name: str
month_number: int
month_name: str
week_number: int
week_name: str
week_day_number: int
week_day_name: str
__table_args__ = {"schema": "app_db"}
|
import torch, re, os
import numpy as np
from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler
from transformers import BertTokenizer, BertConfig
from keras.preprocessing.sequence import pad_sequences
from sklearn.model_selection import train_test_split
from typing import List, Union
from dotenv import load_dotenv
nn_tags = ['B-CONTROLEEXTERNO',
'B-LOCAL',
'B-ORGANIZACAO',
'B-PESSOA',
'B-TEMPO',
'B-VALOR',
'I-CONTROLEEXTERNO',
'I-LOCAL',
'I-ORGANIZACAO',
'I-PESSOA',
'I-TEMPO',
'I-VALOR',
'O',
'PAD']
class EntitySource(object):
def get_entities_dict(self, words:List[str], ents:List[str]) -> dict:
ent_dict = {}
for w, e in zip(words, ents):
if e not in ent_dict.keys():
ent_dict[e] = []
ent_dict[e].append(w)
return ent_dict
class NerNeuralNet(EntitySource):
def __init__(self, tokenizer='neuralmind/bert-base-portuguese-cased'):
load_dotenv()
self.model = torch.load(os.getenv('MODEL_PATH'))
self.tokenizer = BertTokenizer.from_pretrained(tokenizer, do_lower_case=False)
def get_entities_lists(self, msg:str) -> Union[List, List]:
tokenized_sentence = self.tokenizer.encode(msg)
input_ids = torch.tensor([tokenized_sentence]).cuda()
with torch.no_grad():
output = self.model(input_ids)
label_indices = np.argmax(output[0].to('cpu').numpy(), axis=2)
tokens = self.tokenizer.convert_ids_to_tokens(input_ids.to('cpu').numpy()[0])
new_tokens, new_labels = [], []
for token, label_idx in zip(tokens, label_indices[0]):
if token.startswith("##"):
new_tokens[-1] = new_tokens[-1] + token[2:]
else:
new_labels.append(nn_tags[label_idx])
new_tokens.append(token)
zip_result = list(zip(new_labels, new_tokens))
words = []
ents = []
for e in [r for r in zip_result if r[0] != 'O']:
symb, t_ent = e[0].split('-')
word = e[1]
if symb == 'B':
words.append(word)
ents.append(t_ent)
else:
words[-1] += ' ' + word
return words, ents
def get_entities_dict(self, msg:str) -> dict:
words, ents = self.get_entities_lists(msg)
return super(NerNeuralNet, self).get_entities_dict(words, ents)
class NerRuleExtractor(EntitySource):
def __init__(self):
self.cpf_pattern = r'\b([0-9]{3}\.?[0-9]{3}\.?[0-9]{3}\-?[0-9]{2})\b'
self.cnpj_pattern = r'\b([0-9]{2}\.?[0-9]{3}\.?[0-9]{3}\/?[0-9]{4}\-?[0-9]{2})\b'
self.email_pattern = r'\b[a-z0-9]+[\._]?[a-z0-9]+[@]\w+[.]\w{2,3}\b'
def get_entities_lists(self, msg:str) -> Union[List, List]:
emails = re.findall(self.email_pattern, msg)
cpfs = re.findall(self.cpf_pattern, msg)
cnpjs = re.findall(self.cnpj_pattern, msg)
words_ents = [(e,'EMAIL') for e in emails] + [(c,'CPF') for c in cpfs] + [(c,'CNPJ') for c in cnpjs]
words = []
ents = []
for w,e in words_ents:
words.append(w)
ents.append(e)
return words, ents
def get_entities_dict(self, msg:str) -> dict:
words, ents = self.get_entities_lists(msg)
return super(NerRuleExtractor, self).get_entities_dict(words, ents)
class EntityExtractor(object):
def __init__(self):
self.ner_nn = NerNeuralNet()
self.ner_rule = NerRuleExtractor()
def get_entities_dict(self, msg:str) -> dict:
dict_nn = self.ner_nn.get_entities_dict(msg)
dict_rule = self.ner_rule.get_entities_dict(msg)
dict_nn.update(dict_rule)
return dict_nn
|
# generated by datamodel-codegen:
# filename: json_pointer.yaml
# timestamp: 2019-07-26T00:00:00+00:00
from __future__ import annotations
from typing import Optional
from pydantic import BaseModel
class TestNestedNested(BaseModel):
test_nested_nested_string: Optional[str] = None
class TestNested(BaseModel):
test_string: Optional[str] = None
nested_nested: Optional[TestNestedNested] = None
class Test(TestNested):
pass
class Foo(Test):
pass
|
from django.urls import re_path
from oscar.core.application import OscarDashboardConfig
class WFRSDashboardConfig(OscarDashboardConfig):
name = "wellsfargo.dashboard"
label = "wellsfargo_dashboard"
namespace = "wellsfargo_dashboard"
default_permissions = [
"is_staff",
]
def get_urls(self):
from .views import (
FinancingPlanListView,
FinancingPlanCreateView,
FinancingPlanUpdateView,
FinancingPlanDeleteView,
FinancingPlanBenefitListView,
FinancingPlanBenefitCreateView,
FinancingPlanBenefitUpdateView,
FinancingPlanBenefitDeleteView,
CreditApplicationListView,
CreditApplicationDetailView,
TransferMetadataListView,
TransferMetadataDetailView,
PreQualificationListView,
PreQualificationDetailView,
SDKApplicationListView,
)
urlpatterns = [
re_path(
r"^plans/$", FinancingPlanListView.as_view(), name="wfrs-plan-list"
),
re_path(
r"^plans/new/$",
FinancingPlanCreateView.as_view(),
name="wfrs-plan-create",
),
re_path(
r"^plans/(?P<pk>[0-9]+)/edit/$",
FinancingPlanUpdateView.as_view(),
name="wfrs-plan-edit",
),
re_path(
r"^plans/(?P<pk>[0-9]+)/delete/$",
FinancingPlanDeleteView.as_view(),
name="wfrs-plan-delete",
),
re_path(
r"^benefits/$",
FinancingPlanBenefitListView.as_view(),
name="wfrs-benefit-list",
),
re_path(
r"^benefits/new/$",
FinancingPlanBenefitCreateView.as_view(),
name="wfrs-benefit-create",
),
re_path(
r"^benefits/(?P<pk>[0-9]+)/edit/$",
FinancingPlanBenefitUpdateView.as_view(),
name="wfrs-benefit-edit",
),
re_path(
r"^benefits/(?P<pk>[0-9]+)/delete/$",
FinancingPlanBenefitDeleteView.as_view(),
name="wfrs-benefit-delete",
),
re_path(
r"^applications/$",
CreditApplicationListView.as_view(),
name="wfrs-application-list",
),
re_path(
r"^applications/(?P<pk>[0-9]+)/$",
CreditApplicationDetailView.as_view(),
name="wfrs-application-detail",
),
re_path(
r"^transfers/$",
TransferMetadataListView.as_view(),
name="wfrs-transfer-list",
),
re_path(
r"^transfers/(?P<merchant_reference>[A-Za-z0-9\-]+)/$",
TransferMetadataDetailView.as_view(),
name="wfrs-transfer-detail",
),
re_path(
r"^prequal-requests/$",
PreQualificationListView.as_view(),
name="wfrs-prequal-list",
),
re_path(
r"^prequal-requests/(?P<uuid>[A-Za-z0-9\-]+)/$",
PreQualificationDetailView.as_view(),
name="wfrs-prequal-detail",
),
re_path(
r"^sdk-applications/$",
SDKApplicationListView.as_view(),
name="wfrs-sdk-application-list",
),
]
return self.post_process_urls(urlpatterns)
|
#!/usr/bin/env python
#coding:utf-8
# Author: mozman --<mozman@gmx.at>
# Purpose: element factory
# Created: 15.10.2010
# Copyright (C) 2010, Manfred Moitzi
# License: MIT License
from svgwrite import container
from svgwrite import shapes
from svgwrite import path
from svgwrite import image
from svgwrite import text
from svgwrite import gradients
from svgwrite import pattern
from svgwrite import masking
from svgwrite import animate
from svgwrite import filters
from svgwrite import solidcolor
factoryelements = {
'g': container.Group,
'svg': container.SVG,
'defs': container.Defs,
'symbol': container.Symbol,
'marker': container.Marker,
'use': container.Use,
'a': container.Hyperlink,
'script': container.Script,
'style': container.Style,
'line': shapes.Line,
'rect': shapes.Rect,
'circle': shapes.Circle,
'ellipse': shapes.Ellipse,
'polyline': shapes.Polyline,
'polygon': shapes.Polygon,
'path': path.Path,
'image': image.Image,
'text': text.Text,
'tspan': text.TSpan,
'tref': text.TRef,
'textPath': text.TextPath,
'textArea': text.TextArea,
'linearGradient': gradients.LinearGradient,
'radialGradient': gradients.RadialGradient,
'pattern': pattern.Pattern,
'solidColor': solidcolor.SolidColor,
'clipPath': masking.ClipPath,
'mask': masking.Mask,
'animate': animate.Animate,
'set': animate.Set,
'animateColor': animate.AnimateColor,
'animateMotion': animate.AnimateMotion,
'animateTransform': animate.AnimateTransform,
'filter': filters.Filter,
}
class ElementBuilder(object):
def __init__(self, cls, factory):
self.cls = cls
self.factory = factory
def __call__(self, *args, **kwargs):
# inject creator object - inherit _parameter from factory
kwargs['factory'] = self.factory
# create an object of type 'cls'
return self.cls(*args, **kwargs)
class ElementFactory(object):
def __getattr__(self, name):
if name in factoryelements:
return ElementBuilder(factoryelements[name], self)
else:
raise AttributeError("'%s' has no attribute '%s'" % (self.__class__.__name__, name))
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 29 17:02:02 2017
A collection of functions for plotting synthetic interferograms
@author: eemeg
"""
#%%
def xticks_every_nmonths(ax_to_update, day0_date, time_values, include_tick_labels,
major_ticks_n_months = 3, minor_ticks_n_months = 1):
"""Given an axes, update the xticks so the major ones are the 1st of every n months (e.g. if every 3, would be: jan/april/july/october).
Inputs:
ax_to_update | matplotlib axes | the axes to update.
day0_date | string | in form yyyymmdd
time_values | rank 1 array | cumulative temporal baselines, e.g. np.array([6,18, 30, 36, 48])
include_tick_labels | boolean | if True, tick labels are added to the ticks.
n_months | int | x ticks are very n months. e.g. 2, 3,4,6,12 (yearly) Funny spacings (e.g. every 5) not tested.
Returns:
updates axes
History:
2021_09_27 | MEG | Written
2022_02_17 | MEG | modify so can be monhtly spacing other than every 3 months.
"""
import pdb
import numpy as np
import datetime as dt
import copy
from dateutil.relativedelta import relativedelta # add 3 months and check not after end
import matplotlib.pyplot as plt
def create_tick_labels_every_nmonths(day0_date_dt, dayend_date_dt, n_months = 1):
""" Given a spacing of every n_months, get the dates and days since the first date for ticks every n_months.
e.g. every month, every 6 months.
Inputs:
day0_date_dt | datetime | date of x = 0 on axis.
dayend_date_dt | datetime | date of last x value.
n_months | int | frequency of ticks.
Returns:
ticks | dict | contains datetimes : datetimes for each tick
yyyymmdd : strings to use as labels in form yyyy/mm/dd
n_day : day number of tick.
History:
2022_03_29 | MEG | Written
"""
# 1: find first tick date (the first of the jan/ april/jul /oct)
date_tick0 = copy.deepcopy(day0_date_dt) # version that can be modified as we iterate through.
while not ( (date_tick0.day) == 1 and (date_tick0.month in (np.arange(0, 12, n_months) + 1))): # i.e. whilst it's not the 1st of the month, and not jan/apr/jul/oct....
date_tick0 += dt.timedelta(1) # then add one day and keep going.
# 2: get all the other first of the quarters as datetimes (ie keep adding n months until we're gone past the day end date)
ticks = {'datetimes' : [date_tick0],
'yyyymmdd' : [],
'n_day' : []}
while ticks['datetimes'][-1] < (dayend_date_dt - relativedelta(months=+n_months)): # while we haven't gone past the last date (and subtract 3 months to make sure we don't go one 3 month jump too far. )
ticks['datetimes'].append(ticks['datetimes'][-1] + relativedelta(months=+n_months)) # append the next date which is n_months more.
# 3: work out what day number each first of the quarter is.
for tick_dt in ticks['datetimes']: # loop along the list of datetimes (which are each tick)
ticks['yyyymmdd'].append(dt.datetime.strftime(tick_dt, "%Y/%m/%d")) # as a string that can be used for the tick label (hence why include / to make more readable)
ticks['n_day'].append((tick_dt - day0_date_dt).days)
return ticks
xtick_label_angle = 315 # this angle will read from top left to bottom right (i.e. at a diagonal)
tick_labels_days = ax_to_update.get_xticks().tolist() # get the current tick labels
day0_date_dt = dt.datetime.strptime(day0_date, "%Y%m%d") # convert the day0 date (date of day number 0) to a datetime.
dayend_date_dt = day0_date_dt + dt.timedelta(int(time_values[-1])) # the last time value is the number of days we have, so add this to day0 to get the end.
ticks_major = create_tick_labels_every_nmonths(day0_date_dt, dayend_date_dt, n_months = major_ticks_n_months)
ticks_minor = create_tick_labels_every_nmonths(day0_date_dt, dayend_date_dt, n_months = minor_ticks_n_months) # these are used as the minor ticks every month.
# 4: Update the figure.
ax_to_update.set_xticks(ticks_major['n_day']) # apply major tick labels to the figure
ax_to_update.set_xticks(ticks_minor['n_day'], minor = True) # apply major tick labels to the figure
if include_tick_labels:
ax_to_update.set_xticklabels(ticks_major['yyyymmdd'], rotation = xtick_label_angle, ha = 'left') # update tick labels, and rotate
plt.subplots_adjust(bottom=0.15)
ax_to_update.set_xlabel('Date')
else:
ax_to_update.set_xticklabels([]) # remove any tick lables if they aren't to be used.
# add vertical lines every year.
for major_tick_n, datetime_majortick in enumerate(ticks_major['datetimes']):
if datetime_majortick.month == 1: # if it's the january tick (i.e the 1st of the year)
ax_to_update.axvline(x = ticks_major['n_day'][major_tick_n], color='k', alpha=0.1, linestyle='--')
#%%
def plot_points_interest(r3_data, points_interest, baselines_cs, acq_dates, title = '', ylabel = 'm',
ylims = None):
""" Given rank 3 data of incremental interferograms (e.g. n_images x height x width) and some points of interest (e.g. an xy pair), plot the cumulative time
series for those points (i.e. as r3 is incremental, summing is done in the function). Also information is required (baselines and acq_dates) for the x axis of the plot.
Inputs:
r3_data | rank 3 array (masked array support?) | incremental interferograms, rank 3 (n_images x height x width)
points_interest | dict | point name (e.g. 'reference' or 'deforming' and tuple of x and y position. )
baselines_cs | rank 1 array | cumulative temporal baselines in days.
acq_dates | string of YYYYMMDD | date of each radar acquisition.
title | string | figure and window title.
ylabel | string | units of r3_data (e.g. m, mm, cm, rad etc. )
Returns:
Figure
History:
2021_09_22 | MEG | Added to package.
"""
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime, timedelta
from scipy import optimize
def test_func(x, a, b, c):
""" a sets amplitude, b sets frequency, c sets gradient of linear term
"""
return c * x + (a * np.sin((2*np.pi *(1/b) * x)))
f, ax = plt.subplots(figsize = (10,6))
f.canvas.manager.set_window_title(title)
ax.set_title(title)
ax.grid(True)
ax.set_xlabel('Time (days)')
ax.set_ylabel(ylabel)
ax.axhline(0,c = 'k')
for key, value in points_interest.items():
ax.scatter(baselines_cs, np.cumsum(r3_data[:,value[1], value[0]]), label = key) # plot each of hte points.
try:
params, params_covariance = optimize.curve_fit(test_func, baselines_cs, np.cumsum(r3_data[:,points_interest['highlands'][1],
points_interest['highlands'][0]]), p0=[15, 365, 0.01]) # p0 is first guess at abc parameters for sinusoid (ie. 365 means suggesting it has an annual period)
y_highlands_predict = test_func(baselines_cs, params[0], params[1], params[2]) # predict points of line.
ax.plot(baselines_cs, y_highlands_predict, c='k', label = 'Sinusoid + linear') # plot the line of best fit.
except:
print(f"Failed to find a highlands point to fit a ling of best fit to, but continuing anyway. ")
ax.legend()
if ylims is not None:
ax.set_ylim(bottom = ylims['bottom'], top = ylims['top'])
start, end = ax.get_xlim()
ax.xaxis.set_ticks(np.arange(start, end, 180))
xticks_dayn = ax.get_xticks()
xticks_date = []
day0_date = datetime.strptime(acq_dates[0], '%Y%m%d')
for xtick_dayn in xticks_dayn:
xtick_date = day0_date + timedelta(days = float(xtick_dayn)) # add the number of dats to the original date
xticks_date.append(xtick_date.strftime('%Y_%m_%d'))
ax.set_xticklabels(xticks_date, rotation = 'vertical')
f.subplots_adjust(bottom=0.2)
#%%
def BssResultComparison(S_synth, tc_synth, S_pca, tc_pca, S_ica, tc_ica, pixel_mask, title):
""" A function to plot the results of PCA and ICA against the synthesised sources
Inputs:
S_synth | rank 2 array | synthesised sources images as rows (e.g. 2 x 5886)
tc_synth | rank 2 array | synthesised time courses as columns (e.g. 19 x 2)
S_pca | PCA sources
tc_pca | PCA time courses
S_ica | ica sources
tc_ica | ica time courses
pixel_mask | rank 2 pixel mask | used to covnert columns arrays to 2d masked arrays (ie for ifgs)
title | string | figure name
"""
import numpy as np
import numpy.ma as ma
import matplotlib.pyplot as plt
import matplotlib
import matplotlib.gridspec as gridspec
def source_row(sources, tcs, grid):
""" Given a grid object, plot up to 6 spatial maps and time courses in a 2x6 grid
"""
def linegraph(sig, ax):
""" signal is a 1xt row vector """
times = sig.size
if times > 20:
times = 20
a = np.arange(times)
ax.plot(a,sig[:20], color='k')
ax.axhline(y=0, color='k', alpha=0.4)
ax.set_xticks([])
ax.set_yticks([])
#ax.set_aspect(1)
def ifg(sig, ax):
""" signal is a 1xt row vector """
from small_plot_functions import col_to_ma
#ax.imshow(col_to_ma(sig, pixel_mask), cmap = matplotlib.cm.coolwarm, vmin = -1, vmax = 1)
ax.imshow(col_to_ma(sig, pixel_mask), cmap = matplotlib.cm.coolwarm)
ax.set_xticks([])
ax.set_yticks([])
grid_inner = gridspec.GridSpecFromSubplotSpec(2, 6, subplot_spec=grid, wspace=0.0, hspace=0.0)
for j in np.arange(0,6):
if j < np.size(sources, axis= 0):
ax_ifg = plt.subplot(grid_inner[0, j])
ax_line = plt.subplot(grid_inner[1, j])
ifg(sources[j,:], ax_ifg)
linegraph(tcs[:,j], ax_line)
fig_extraSources_comps = plt.figure(title, figsize=(8,8))
grid_rows = gridspec.GridSpec(3, 1)
source_row(S_synth, tc_synth, grid_rows[0])
source_row(S_pca, tc_pca, grid_rows[1])
source_row(S_ica, tc_ica, grid_rows[2])
fig_extraSources_comps.tight_layout(rect =[0.05,0,1,1])
fig_extraSources_comps.text(0.05, 0.88, 'Sources', fontsize=12, rotation = 90, horizontalalignment='center')
fig_extraSources_comps.text(0.05, 0.55, 'sPCA', fontsize=12, rotation = 90, horizontalalignment='center')
fig_extraSources_comps.text(0.05, 0.24, 'sICA', fontsize=12, rotation = 90, horizontalalignment='center')
#%% ifg plot
def ifg_plot_V2(ifgs, pixel_mask, cols, title, colorbar=False, shared=True):
"""
Function to plot a time series of ifg
ifgs | pxt matrix of ifgs as columns (p pixels, t times)
pixel_mask | mask to turn spaital maps back to regular grided masked arrays
cols | number of columns for ifg plot to have.
colorbar | 1 or 0 | 1 add colorbar to each ifg, 0 add one for whole figure (if shared is set to 1, too)
shared | 1 or 0 | 1 and all ifgs share the same colour scale, 0 and don't
DEPENDENCIES:
2017/02/17 | modified to use masked arrays that are given as vectors by ifgs, but can be converted back to
masked arrays using the pixel mask
2017/05/08 | option to add a colorbar to each ifg
2017/07/03 | option to make all ifgs share the same scale
2017/07/03 | function from stack exchange to make 0 of ifgs plot as white.
2017/07/06 | fix bug for colourbars that start at 0 and go negative (reverse of above problem)
2017/08/09 | update so that colourbar is cropped for skewed data (ie -1 to +10 won't have -1 as dark red as +10 is a dark blue)
2017/10/05 | fix a bug in how the colorbars are plotted
2017/10/05 | switch to v2
2017/10/06 | fix a bug in how the colormaps are done when not shared
2017/10/11 | remove lines redundant after change to remappedColorMap
"""
import numpy as np
import numpy.ma as ma
import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import AxesGrid
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import AxesGrid
from small_plot_functions import remappedColorMap
# colour map stuff
ifg_colours = plt.get_cmap('coolwarm')
if shared:
cmap_mid = 1 - np.max(ifgs)/(np.max(ifgs) + abs(np.min(ifgs))) # get the ratio of the data that 0 lies at (eg if data is -15 to 5, ratio is 0.75)
print('Cmap centre: ' + str(cmap_mid))
if cmap_mid > 0.5:
ifg_colours_cent = remappedColorMap(ifg_colours, start=0.0, midpoint=cmap_mid, stop=(0.5 + (1-cmap_mid)), name='shiftedcmap')
else:
ifg_colours_cent = remappedColorMap(ifg_colours, start=(0.5 - cmap_mid), midpoint=cmap_mid, stop=1, name='shiftedcmap')
#data stuff - convert the ifgs from columns to a list of masked arrays
ifgs_ma = []
for i in range(np.size(ifgs,1)):
ifgs_ma.append(ma.array(np.zeros(pixel_mask.shape), mask = pixel_mask ))
ifgs_ma[i].unshare_mask()
ifgs_ma[i][~ifgs_ma[i].mask] = ifgs[:,i].ravel()
# the plot
pixels, times = ifgs.shape
rows = int(np.ceil(times/float(cols)))
f, (ax_all) = plt.subplots(rows, cols, figsize=(14,4))
f.suptitle(title, fontsize=14)
ax_all_1d = np.ndarray.flatten(ax_all)
for temp_ax in ax_all_1d: #
temp_ax.set_yticks([])
temp_ax.set_xticks([])
# loop through plotting ifgs
for i in range(times):
if shared:
im = ax_all_1d[i].imshow(ifgs_ma[i], cmap = ifg_colours_cent, vmin=np.min(ifgs), vmax=np.max(ifgs)) # either plot with shared colours
else:
cmap_mid = 1 - np.max(ifgs[:,i])/(np.max(ifgs[:,i]) + abs(np.min(ifgs[:,i]))) # get cmap mid for each ifg
ifg_cols_local = remappedColorMap(ifg_colours, start=0.0, midpoint=cmap_mid, stop=1, name='ifg_cols_local') # remappedColorMap function now includes stuff from below
im = ax_all_1d[i].imshow(ifgs_ma[i], cmap = ifg_cols_local) # or just own scale
if colorbar == 1: # and if own scale, might want a colorbar on each plot
f.colorbar(im, ax=ax_all_1d[i])
#shared colorbar
if colorbar == 1 and shared == 1: # if colormap is shared, only one colorbar needed at edge
f.subplots_adjust(right=0.8)
cbar_ax = f.add_axes([0.95, 0.15, 0.02, 0.7])
f.colorbar(im, cax=cbar_ax)
f.tight_layout()
# remove any unused axes
ax_list = np.ravel(ax_all) # all axes as list
ax_list = ax_list[times:] # ones we want to delete
for axes in ax_list: # loop through deleting them
f.delaxes(axes)
f.tight_layout()
#%%
def component_plot(spatial_map, pixel_mask, timecourse, shape, title, shared = 0, temporal_baselines = None):
"""
Input:
spatial map | pxc matrix of c component maps (p pixels)
pixel_mask | mask to turn spaital maps back to regular grided masked arrays
codings | cxt matrix of c time courses (t long)
shape | tuple | the shape of the grid that the spatial maps are reshaped to
shared | 0 or 1 | if 1, spatial maps share colorbar and time courses shared vertical axis
Temporal_baselines | x axis values for time courses. Useful if some data are missing (ie the odd 24 day ifgs in a time series of mainly 12 day)
2017/02/17 | modified to use masked arrays that are given as vectors by spatial map, but can be converted back to
masked arrays using the pixel mask
2017/05/12 | shared scales as decrived in 'shared'
2017/05/15 | remove shared colorbar for spatial maps
2017/10/16 | remove limit on the number of componets to plot (was 5)
2017/12/06 | Add a colorbar if the plots are shared, add an option for the time courses to be done in days
2017/12/?? | add the option to pass temporal baselines to the function
"""
import numpy as np
import numpy.ma as ma
import matplotlib.pyplot as plt
import matplotlib
from small_plot_functions import remappedColorMap
def linegraph(sig, ax, temporal_baselines = None):
""" signal is a 1xt row vector """
if temporal_baselines is None:
times = sig.size
a = np.arange(times)
else:
a = temporal_baselines
ax.plot(a,sig,marker='o', color='k')
ax.axhline(y=0, color='k', alpha=0.4)
# colour map stuff
ifg_colours = plt.get_cmap('coolwarm')
cmap_mid = 1 - np.max(spatial_map)/(np.max(spatial_map) + abs(np.min(spatial_map))) # get the ratio of the data that 0 lies at (eg if data is -15 to 5, ratio is 0.75)
if cmap_mid < (1/257): # this is a fudge so that if plot starts at 0 doesn't include the negative colorus for the smallest values
ifg_colours_cent = remappedColorMap(ifg_colours, start=0.5, midpoint=0.5, stop=1.0, name='shiftedcmap')
else:
ifg_colours_cent = remappedColorMap(ifg_colours, start=0.0, midpoint=cmap_mid, stop=1.0, name='shiftedcmap')
#make a list of ifgs as masked arrays (and not column vectors)
spatial_maps_ma = []
for i in range(np.size(spatial_map,1)):
spatial_maps_ma.append(ma.array(np.zeros(pixel_mask.shape), mask = pixel_mask ))
spatial_maps_ma[i].unshare_mask()
spatial_maps_ma[i][~spatial_maps_ma[i].mask] = spatial_map[:,i].ravel()
tmp, n_sources = spatial_map.shape
# if n_sources > 5:
# n_sources = 5
del tmp
f, (ax_all) = plt.subplots(2, n_sources, figsize=(15,7))
f.suptitle(title, fontsize=14)
f.canvas.manager.set_window_title(title)
for i in range(n_sources):
im = ax_all[0,i].imshow(spatial_maps_ma[i], cmap = ifg_colours_cent, vmin = np.min(spatial_map), vmax = np.max(spatial_map))
ax_all[0,i].set_xticks([])
ax_all[0,i].set_yticks([])
# if shared == 0:
# ax_all[0,i].imshow(spatial_maps_ma[i])
# else:
# im = ax_all[0,i].imshow(spatial_maps_ma[i], vmin = np.min(spatial_map) , vmax = np.max(spatial_map))
for i in range(n_sources):
linegraph(timecourse[i,:], ax_all[1,i], temporal_baselines)
if temporal_baselines is not None:
ax_all[1,i].set_xlabel('Days')
if shared ==1:
ax_all[1,i].set_ylim([np.min(timecourse) , np.max(timecourse)])
if shared == 1:
f.tight_layout(rect=[0, 0, 0.94, 1])
cax = f.add_axes([0.94, 0.6, 0.01, 0.3])
f.colorbar(im, cax=cax, orientation='vertical')
|
from maha.expressions import EXPRESSION_SPACE, EXPRESSION_SPACE_OR_NONE
from maha.parsers.templates import Value
from maha.rexy import Expression, non_capturing_group
from ..common import ALL_ALEF, SUM_SUFFIX, TWO_SUFFIX
TEN_SUFFIX = f"{EXPRESSION_SPACE_OR_NONE}[تط]?[اع]?شر?[ةه]?"
TEH_OPTIONAL_SUFFIX = "[ةه]?"
EXPRESSION_OF_FASILA = Expression(
EXPRESSION_SPACE + "فاصل" + TEH_OPTIONAL_SUFFIX + EXPRESSION_SPACE
)
three_prefix = "[ثت]لا[ثت]"
four_prefix = "[أا]ربع"
five_prefix = "خمس"
six_prefix = "ست"
seven_prefix = "سبع"
eight_prefix = "[تث]ما?ني?"
nine_prefix = "تسع"
ten_prefix = "عشر"
ZERO = Value(0, "صفر")
ONE = Value(1, "وا?حد" + TEH_OPTIONAL_SUFFIX)
TWO = Value(2, "[إا][ثت]نت?[اي]ن")
THREE = Value(3, three_prefix + TEH_OPTIONAL_SUFFIX)
FOUR = Value(4, four_prefix + TEH_OPTIONAL_SUFFIX)
FIVE = Value(5, five_prefix + TEH_OPTIONAL_SUFFIX)
SIX = Value(6, six_prefix + TEH_OPTIONAL_SUFFIX)
SEVEN = Value(7, seven_prefix + TEH_OPTIONAL_SUFFIX)
EIGHT = Value(8, eight_prefix + TEH_OPTIONAL_SUFFIX)
NINE = Value(9, nine_prefix + TEH_OPTIONAL_SUFFIX)
TEN = Value(10, ten_prefix + TEH_OPTIONAL_SUFFIX)
ELEVEN = Value(11, f"{ALL_ALEF}?حد[اى]?" + TEN_SUFFIX)
TWELVE = Value(
12,
non_capturing_group(
f"{ALL_ALEF}[طت]نا?" + TEN_SUFFIX,
f"{ALL_ALEF}[ثت]نت?[اىي]ن?" + TEN_SUFFIX,
),
)
THIRTEEN = Value(13, "[ثت]لا?[ثت]" + TEH_OPTIONAL_SUFFIX + TEN_SUFFIX)
FOURTEEN = Value(14, FOUR + TEN_SUFFIX)
FIFTEEN = Value(15, FIVE + TEN_SUFFIX)
SIXTEEN = Value(16, SIX + TEN_SUFFIX)
SEVENTEEN = Value(17, SEVEN + TEN_SUFFIX)
EIGHTEEN = Value(18, "[تث]ما?ني?" + TEH_OPTIONAL_SUFFIX + TEN_SUFFIX)
NINETEEN = Value(19, NINE + TEN_SUFFIX)
TWENTY = Value(20, ten_prefix + SUM_SUFFIX)
THIRTY = Value(30, three_prefix + SUM_SUFFIX)
FORTY = Value(40, four_prefix + SUM_SUFFIX)
FIFTY = Value(50, five_prefix + SUM_SUFFIX)
SIXTY = Value(60, six_prefix + SUM_SUFFIX)
SEVENTY = Value(70, seven_prefix + SUM_SUFFIX)
EIGHTY = Value(80, eight_prefix + SUM_SUFFIX)
NINETY = Value(90, nine_prefix + SUM_SUFFIX)
ONE_HUNDRED = Value(100, "ما?[يئ][ةه]")
TWO_HUNDREDS = Value(200, "م[يئ]ت" + TWO_SUFFIX)
THREE_HUNDREDS = Value(300, THREE + EXPRESSION_SPACE_OR_NONE + ONE_HUNDRED)
FOUR_HUNDREDS = Value(400, FOUR + EXPRESSION_SPACE_OR_NONE + ONE_HUNDRED)
FIVE_HUNDREDS = Value(500, FIVE + EXPRESSION_SPACE_OR_NONE + ONE_HUNDRED)
SIX_HUNDREDS = Value(600, SIX + EXPRESSION_SPACE_OR_NONE + ONE_HUNDRED)
SEVEN_HUNDREDS = Value(700, SEVEN + EXPRESSION_SPACE_OR_NONE + ONE_HUNDRED)
EIGHT_HUNDREDS = Value(800, EIGHT + EXPRESSION_SPACE_OR_NONE + ONE_HUNDRED)
NINE_HUNDREDS = Value(900, NINE + EXPRESSION_SPACE_OR_NONE + ONE_HUNDRED)
SEVERAL_HUNDREDS = Value(100, "م[يئ]ات")
ONE_THOUSAND = Value(1000, "[أا]لف")
TWO_THOUSANDS = Value(2000, ONE_THOUSAND + TWO_SUFFIX)
SEVERAL_THOUSANDS = Value(
1000, non_capturing_group(f"{ALL_ALEF}ل[او]ف", f"{ALL_ALEF}لفات")
)
ONE_MILLION = Value(1000000, "مليون")
TWO_MILLIONS = Value(2000000, ONE_MILLION + TWO_SUFFIX)
SEVERAL_MILLIONS = Value(1000000, "ملايين")
ONE_BILLION = Value(1000000000, non_capturing_group("بليون", "مليار"))
TWO_BILLIONS = Value(2000000000, ONE_BILLION + TWO_SUFFIX)
SEVERAL_BILLIONS = Value(1000000000, non_capturing_group("بلايين", "مليارات"))
ONE_TRILLION = Value(1000000000000, "تري?ليون")
TWO_TRILLIONS = Value(2000000000000, ONE_TRILLION + TWO_SUFFIX)
SEVERAL_TRILLIONS = Value(1000000000000, ONE_TRILLION + "ات")
|
import sys, os
def main():
try:
file = open(sys.argv[1], 'rb')
for line in range(0, os.path.getsize(sys.argv[1]), 16):
data = file.read(16)
print('{:08X} | {:47} | {}'.format(line, hex(data), str(data)))
file.close()
except:
print('Usage: {} <filename>'.format(os.path.basename(sys.argv[0])))
hex = lambda data: ' '.join('{:02X}'.format(i) for i in data)
str = lambda data: ''.join(31 < i < 127 and chr(i) or '.' for i in data)
if __name__ == '__main__':
main()
|
# Save-Humanity-Hackerrank-problem-Solution
#Solution
#Python 3.
#!/bin/python3
import math
import os
import random
import re
import sys
#
# Complete the 'virusIndices' function below.
#
# The function accepts following parameters:
# 1. STRING p
# 2. STRING v
#
def small_match(w1, w2):
counter = 0
for i in range(len(w1)):
if w1[i] != w2[i]:
counter += 1
if counter > 1:
return 0
return 1
def match(w1, w2):
length = len(w1)
if length < 10:
return small_match(w1, w2)
w11 = w1[:length // 2]
w12 = w1[length // 2:]
w21 = w2[:length // 2]
w22 = w2[length // 2:]
s1 = (w11 == w21)
s2 = (w12 == w22)
if s1 and s2:
return True
elif s1 and not s2:
return match(w12, w22)
elif not s1 and s2:
return match(w11, w21)
else:
return False
def virusIndices(p, v):
res = ''
if len(v) > len(p):
return "No Match!"
else:
for i in range(len(p) - len(v) + 1):
temp = p[i:i + len(v)]
flag = match(temp, v)
if flag:
res += str(i) + ' '
if len(res) == 0:
return "No Match!"
else:
return res.strip()
if __name__ == '__main__':
t = int(input().strip())
for t_itr in range(t):
first_multiple_input = input().rstrip().split()
p = first_multiple_input[0]
v = first_multiple_input[1]
print(virusIndices(p, v))
|
from pybb.models import Forum, Topic, Post
from zinnia.models import Entry
from django_messages.models import Message # 190414 GT: added custom activity type private-message
from commons.models import UserProfile, Folder, FolderDocument, Project, ProjectMember, Repo, OER, OerEvaluation, LearningPath, PathNode
xapi_namespaces = {
'as': 'activitystrea.ms',
}
verbs = ['Accept', 'Approve', 'Bookmark', 'Create', 'Delete', 'Edit', 'Play', 'Reject', 'Search', 'Send', 'Submit', 'View']
xapi_verbs = {
'Accept': {'id': 'http://activitystrea.ms/schema/1.0/accept',
'display': {'en-US': 'accepted', 'it-IT': 'ha accettato' }},
'Approve': {'id': 'http://activitystrea.ms/schema/1.0/approve',
'display': {'en-US': 'approved', 'it-IT': 'ha approvato' }},
'Bookmark': {'id': 'http://id.tincanapi.com/verb/bookmarked',
'display': {'en-US': 'bookmarked', 'it-IT': 'ha creato un segnalibro per' }},
'Create': {'id': 'http://activitystrea.ms/schema/1.0/create',
'display': {'en-US': 'created', 'it-IT': 'ha creato' }},
'Delete': {'id': 'http://activitystrea.ms/schema/1.0/delete',
'display': {'en-US': 'deleted', 'it-IT': 'ha cancellato' }},
'Edit': {'id': 'http://curatr3.com/define/verb/edited',
'display': {'en-US': 'edited', 'it-IT': 'ha editato' }},
'Play': {'id': 'http://activitystrea.ms/schema/1.0/play',
'display': {'en-US': 'played', 'it-IT': 'ha interagito con' }},
'Reject': {'id': 'http://activitystrea.ms/schema/1.0/reject',
'display': {'en-US': 'rejected', 'it-IT': 'ha rifiutato' }},
'Search': {'id': 'http://activitystrea.ms/schema/1.0/search',
'display': {'en-US': 'searched', 'it-IT': 'ha cercato' }},
'Send': {'id': 'http://activitystrea.ms/schema/1.0/send',
'display': {'en-US': 'sent', 'it-IT': 'ha inviato' }},
'Submit': {'id': 'http://activitystrea.ms/schema/1.0/submit',
'display': {'en-US': 'submitted', 'it-IT': 'ha sottoposto' }},
'View': {'id': 'http://id.tincanapi.com/verb/viewed',
'display': {'en-US': 'viewed', 'it-IT': 'ha visto' }},
}
xapi_activities = {
UserProfile.__name__: {
'type': 'http://id.tincanapi.com/activitytype/user-profile',
},
Folder.__name__: {
'type': 'http://activitystrea.ms/schema/1.0/collection',
},
FolderDocument.__name__: {
'type': 'http://activitystrea.ms/schema/1.0/file',
},
Project.__name__: {
'type': 'http://activitystrea.ms/schema/1.0/group',
},
ProjectMember.__name__: {
'type': 'http://commonspaces.eu/activitytype/membership',
},
Forum.__name__: {
'type': 'http://id.tincanapi.com/activitytype/discussion',
},
Topic.__name__: {
'type': 'http://id.tincanapi.com/activitytype/forum-topic',
},
Post.__name__: {
'type': 'http://id.tincanapi.com/activitytype/forum-reply',
},
Repo.__name__: {
'type': 'http://activitystrea.ms/schema/1.0/collection',
},
OER.__name__: {
'type': 'http://id.tincanapi.com/activitytype/resource',
},
OerEvaluation.__name__: {
'type': 'http://activitystrea.ms/schema/1.0/review',
},
LearningPath.__name__: {
'type': 'http://id.tincanapi.com/activitytype/playlist',
},
PathNode.__name__: {
'type': 'http://adlnet.gov/expapi/activities/module',
},
Entry.__name__: {
'type': 'http://activitystrea.ms/schema/1.0/article',
},
Message.__name__: {
'type': 'http://commonspaces.eu/activitytype/private-message',
},
'Webpage': {
'type': 'http://activitystrea.ms/schema/1.0/page',
},
}
xapi_contexts = {
Project.__name__: '',
LearningPath.__name__: '',
}
|
from .router import CMSRouter
__all__ = ["CMSRouter"]
|
# coding=utf-8
# Copyright (C) 2010-2013 Claudio Guarnieri.
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import collections
import datetime
import json
import sys
import time
class Instance(object):
"""Machine Learning (clustering) for Cuckoo."""
LABEL_SIGNIFICANCE_COUNT = 5
POSITIVE_RATE = 2 * LABEL_SIGNIFICANCE_COUNT
def __init__(self):
self.json_path = ""
self.bin_path = ""
self.name = ""
self.report = None
self.total = None
self.positives = None
self.scans = None
self.label = None
self.features = {}
self.basic_features = {}
def load_json(self, json_file, name="unknown"):
"""Load JSON formatted malware report. It can handle both a path to
JSON file and a dictionary object."""
if isinstance(json_file, str):
self.json_path = json_file
with open(json_file, "r") as malware_report:
try:
self.report = json.load(malware_report)
except ValueError, error:
print >> sys.stderr, "Could not load file;", \
malware_report, "is not a valid JSON file."
print >> sys.stderr, "Exception: %s" % str(error)
sys.exit(1)
elif isinstance(json_file, dict):
self.report = json_file
else:
# Unknown binary format
print >> sys.stderr, "Could not load the data *", json, "* is of " \
"unknown type: ", type(json), "."
self.name = name
# Get total and positives
self.total = self.report.get("virustotal").get("total")
self.positives = self.report.get("virustotal").get("positives")
# Pull all VT normalised results
self.scans = self.report.get("virustotal").get("scans")
def label_sample(self, external_labels=None, label_type="family"):
"""Generate label for the loaded sample. You can use platform, cve, metatype, type, and family (default)."""
merged_labels = []
if external_labels is None and self.scans is not None:
for vendor in self.scans:
merged_labels += self.scans[vendor]["normalized"][label_type]
elif external_labels is not None and self.scans is None:
merged_labels = external_labels
if not merged_labels:
self.label = "none"
return
# Get most common label if it has more hits than set threshold
labels_frequency = collections.Counter(merged_labels)
top_label, top_label_count = labels_frequency.most_common(1)[0]
if top_label_count >= self.LABEL_SIGNIFICANCE_COUNT:
# self.positives >= self.POSITIVE_RATE:
self.label = top_label.encode("ascii", "ignore")
else:
self.label = "none"
def update(self, element, location):
"""Insert `element` at given `location`."""
element_to_update = self.report
for l in location[:-1]:
etu = element_to_update.get(l)
if etu is None:
element_to_update[l] = {}
element_to_update = element_to_update.get(l)
else:
element_to_update = etu
element_to_update[location[-1]] = element
def save_json(self, root_dir):
"""Save JSON stored in the class to a file."""
with open(root_dir+self.name, "w") as j_file:
json.dump(self.report, j_file)
def extract_features(self):
"""Extract features of the loaded sample."""
self.extract_features_static()
self.extract_features_dynamic()
def extract_features_static(self):
"""Extract static features of the loaded sample."""
self.feature_static_metadata()
self.feature_static_signature()
self.feature_static_heuristic()
self.feature_static_packer()
self.feature_static_pef()
self.feature_static_imports()
def extract_features_dynamic(self):
"""Extract dynamic features of the loaded sample."""
self.feature_dynamic_imports()
self.feature_dynamic_filesystem()
self.feature_dynamic_network()
self.feature_dynamic_registry()
self.feature_dynamic_windowsapi()
def feature_static_metadata(self):
"""Create features form extracted binary metadata."""
# Get binary size
self.features["size"] = \
self.report.get("target", {}).get("file", {}).get("size")
# Get binary timestamp in the UNIX timestamp format
str_dt = self.report.get("static", {}).get("pe_timestamp")
ts = None
if str_dt is not None:
dt = datetime.datetime.strptime(str_dt, "%Y-%m-%d %H:%M:%S")
ts = int(time.mktime(dt.timetuple()))
self.features["timestamp"] = ts
# ExifTool output
et_tokens = ["FileDescription", "OriginalFilename"]
for token in et_tokens:
self.features[token] = None
for attr in self.report.get("static", {}).get("pe_versioninfo", []):
attr_name = attr.get("name")
if attr_name in et_tokens:
self.features[attr_name] = attr.get("value")
# Magic byte
self.features["magic_byte"] = \
self.report.get("target", {}).get("file", {}).get("type")
def feature_static_signature(self):
"""Create features form binary signature check."""
# Check availability of digital signature
self.features["signed"] = \
bool(self.report.get("static", {}).get("signature", []))
# ExifTool output
et_tokens = ["Comments", "ProductName", "LegalCopyright", \
"InternalName", "CompanyName"]
for token in et_tokens:
self.features[token] = None
for attr in self.report.get("static", {}).get("pe_versioninfo", []):
attr_name = attr.get("name")
if attr_name in et_tokens:
self.features[attr_name] = attr.get("value")
def feature_static_heuristic(self):
"""Create features form results return by heuristic tools.
**Not available for current JSON content.**"""
pass
def feature_static_packer(self):
"""Create feature from information returned by packer/cryptor
detectors."""
self.features["packer"] = \
self.report.get("static", {}).get("peid_signatures", None)
def feature_static_pef(self):
"""Create features from information derived form portable executable
format."""
# Get resource languages
self.features["languages"] = []
for d in self.report.get("static", {}).get("pe_resources", []):
lang = d.get("language", False)
if lang:
if lang.startswith("LANG_"):
lang = lang[5:]
else:
lang = lang
if lang not in self.features["languages"]:
self.features["languages"].append(lang)
sublang = d.get("sublanguage", False)
if sublang:
if sublang.startswith("SUBLANG_"):
sublang = sublang[8:]
else:
sublang = sublang
if sublang not in self.features["languages"]:
self.features["languages"].append(sublang)
# Section and resource attributes
self.features["section_attrs"] = {}
for d in self.report.get("static", {}).get("pe_sections", []):
n = d.get("name")
e = d.get("entropy")
if n and d:
self.features["section_attrs"][n] = e
self.features["resource_attrs"] = {}
for d in self.report.get("static", {}).get("pe_resources", []):
n = d.get("name")
f = d.get("filetype")
if n and f:
self.features["resource_attrs"][n] = f
def feature_static_imports(self):
"""Extract features from static imports like referenced library
functions."""
self.features["static_imports"] = {}
# Static libraries import count
self.features["static_imports"]["count"] = \
self.report.get("static", {}).get("imported_dll_count", None)
# Get all imported libraries
for d in self.report.get("static", {}).get("pe_imports", []):
ddl_name = d.get("dll")
if not ddl_name:
continue
self.features["static_imports"][ddl_name] = []
for i in d.get("imports", []):
ref = i.get("name")
if ref is not None:
self.features["static_imports"][ddl_name].append(ref)
def feature_dynamic_imports(self):
"""Extract features from dynamic imports, mutexes, and processes."""
# Get mutexes
self.features["mutex"] = \
self.report.get("behavior", {}).get("summary", {}).get("mutex")
# Get processes names
self.features["processes"] = []
for p in self.report.get("behavior", {}).get("processes", []):
p_name = p.get("process_name")
if p_name and p_name not in self.features["processes"]:
self.features["processes"].append(p_name)
# Get dynamically loaded library names
self.features["dynamic_imports"] = \
self.report.get("behavior", {}).get("summary", {})\
.get("dll_loaded", [])
def feature_dynamic_filesystem(self):
"""Extract features from filesystem operations."""
def flatten_list(structured):
"""Flatten nested list."""
flat = []
for i in structured:
flat += i
return flat
# Get file operations and their number
self.features["file_read"] = \
self.report.get("behavior", {}).get("summary", {})\
.get("file_read", [])
self.features["files_read"] = len(self.features["file_read"])
self.features["file_written"] = \
self.report.get("behavior", {}).get("summary", {})\
.get("file_written", [])
self.features["files_written"] = len(self.features["file_written"])
self.features["file_deleted"] = \
self.report.get("behavior", {}).get("summary", {})\
.get("file_deleted", [])
self.features["files_deleted"] = len(self.features["file_deleted"])
self.features["file_copied"] = flatten_list(\
self.report.get("behavior", {}).get("summary", {})\
.get("file_copied", [])
)
self.features["files_copied"] = len(\
self.report.get("behavior", {}).get("summary", {})\
.get("file_copied", [])
)
self.features["file_renamed"] = flatten_list(\
self.report.get("behavior", {}).get("summary", {})\
.get("file_moved", [])
)
self.features["files_renamed"] = len(self.features["file_renamed"])
# Get other file operations numbers
self.features["files_opened"] = len(
self.report.get("behavior", {}).get("summary", {})\
.get("file_opened", [])
)
self.features["files_exists"] = len(
self.report.get("behavior", {}).get("summary", {})\
.get("file_exists", [])
)
self.features["files_failed"] = len(
self.report.get("behavior", {}).get("summary", {})\
.get("file_failed", [])
)
# Get total number of unique touched files
file_operations = \
self.report.get("behavior", {}).get("summary", {})\
.get("file_read", []) + \
self.report.get("behavior", {}).get("summary", {})\
.get("file_written", []) + \
self.report.get("behavior", {}).get("summary", {})\
.get("file_deleted", []) + \
flatten_list(self.report.get("behavior", {}).get("summary", {})\
.get("file_copied", [])) + \
flatten_list(self.report.get("behavior", {}).get("summary", {})\
.get("file_moved", [])) + \
self.report.get("behavior", {}).get("summary", {})\
.get("file_recreated", []) + \
self.report.get("behavior", {}).get("summary", {})\
.get("file_opened", []) + \
self.report.get("behavior", {}).get("summary", {})\
.get("file_exists", []) + \
self.report.get("behavior", {}).get("summary", {})\
.get("file_failed", [])
# remove duplicates
self.features["files_operations"] = len(list(set(file_operations)))
def feature_dynamic_network(self):
"""Extract features from network operations."""
# Get TCP IP addresses
self.features["tcp"] = []
for c in self.report.get("network", {}).get("tcp", []):
c_dst = c.get("dst")
if c_dst and c_dst not in self.features["tcp"]:
self.features["tcp"].append(c_dst)
# Get UDP IPs
self.features["udp"] = []
for c in self.report.get("network", {}).get("udp", []):
c_dst = c.get("dst")
if c_dst and c_dst not in self.features["udp"]:
self.features["udp"].append(c_dst)
# Get DNS queries and responses
self.features["dns"] = {}
for c in self.report.get("network", {}).get("dns", []):
request = c.get("request")
if request:
self.features["dns"][request] = []
else:
continue
answers = c.get("answers", [])
for a in answers:
a_type = a.get("type")
a_data = a.get("data")
if a_type == "A" and a_data:
self.features["dns"][request].append(a_data)
# Get HTTP requests: method, host, port, path
self.features["http"] = {}
for c in self.report.get("network", {}).get("http", []):
c_data = c.get("data")
if c_data:
self.features["http"][c_data] = {}
else:
continue
c_method = c.get("method")
if c_method:
self.features["http"][c_data]["method"] = c_method
c_host = c.get("host")
if c_host:
self.features["http"][c_data]["host"] = c_host
c_port = c.get("port")
if c_port:
self.features["http"][c_data]["port"] = c_port
def feature_dynamic_registry(self):
"""Extract features from registry operations."""
# Registry written
self.features["regkey_written"] = \
self.report.get("behavior", {}).get("summary", {})\
.get("regkey_written", [])
# Registry delete
self.features["regkey_deleted"] = \
self.report.get("behavior", {}).get("summary", {})\
.get("regkey_deleted", [])
def feature_dynamic_windowsapi(self):
"""Extract features from Windows API calls sequence."""
self.features["api_stats"] = {}
apistats = self.report.get("behavior", {}).get("apistats", {})
for d in apistats:
for e in apistats[d]:
if e in self.features["api_stats"]:
self.features["api_stats"][e] += apistats[d][e]
else:
self.features["api_stats"][e] = apistats[d][e]
def extract_basic_features(self):
"""Extract very basic set of features from *signatures* JSON field.
These are extracted characteristics of the binary by cuckoo sandbox."""
if self.basic_features:
self.basic_features = {}
for s in self.report.get("signatures", []):
name = s.get("name", "")
description = s.get("description", "")
if name:
self.basic_features[name] = description
continue
if description:
self.basic_features[hash(description)] = description
|
# coding:utf-8
'''
@author = super_fazai
@File : ip_pools.py
@Time : 2018/7/13 18:31
@connect : superonesfazai@gmail.com
'''
import requests
import gc
from random import randint
__all__ = [
'MyIpPools',
]
class MyIpPools(object):
def __init__(self, high_conceal=False):
'''
:param high_conceal: 是否初始化为高匿代理
'''
super(MyIpPools, self).__init__()
self.high_conceal = high_conceal
def get_proxy_ip_from_ip_pool(self):
'''
从代理ip池中获取到对应ip
:return: dict类型 {'http': ['http://183.136.218.253:80', ...]}
'''
if self.high_conceal:
base_url = 'http://127.0.0.1:8000/?types=0' # types: 0高匿|1匿名|2透明
else:
base_url = 'http://127.0.0.1:8000'
try:
result = requests.get(base_url).json()
except Exception as e:
print(e)
return {'http': None}
result_ip_list = {}
result_ip_list['http'] = []
for item in result:
if item[2] > 7:
tmp_url = 'http://' + str(item[0]) + ':' + str(item[1])
result_ip_list['http'].append(tmp_url)
else:
delete_url = 'http://127.0.0.1:8000/delete?ip='
delete_info = requests.get(delete_url + item[0])
# pprint(result_ip_list)
return result_ip_list
def _get_random_proxy_ip(self):
'''
随机获取一个代理ip: 格式 'http://175.6.2.174:8088'
:return:
'''
ip_list = self.get_proxy_ip_from_ip_pool().get('http')
try:
if isinstance(ip_list, list):
proxy_ip = ip_list[randint(0, len(ip_list) - 1)] # 随机一个代理ip
else:
raise TypeError
except Exception:
print('从ip池获取随机ip失败...正在使用本机ip进行爬取!')
proxy_ip = False
return proxy_ip
def __del__(self):
gc.collect()
# _ = MyIpPools()
# print(_._get_random_proxy_ip().replace('http://', ''))
|
import sys
def usage():
print "accu_times.py <input.txt>"
def main():
if len(sys.argv) < 2:
usage()
sys.exit(0)
cnt = 0
acctime = 0.0
with open(sys.argv[1], "r") as f:
for l in f:
sl = l.split()
if len(sl) > 2:
try:
t = float(sl[1])
acctime += t
cnt += 1
print "%d, %.8f" % (cnt, acctime)
except:
pass
if __name__ == "__main__":
main()
|
# Generated by Django 3.0.6 on 2020-05-30 12:57
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posting', '0003_auto_20200530_1216'),
]
operations = [
migrations.AlterField(
model_name='my_post',
name='my_post_date',
field=models.DateTimeField(default=datetime.datetime(2020, 5, 30, 15, 57, 7, 109421)),
),
]
|
from Treap import Treap
from math import log
class IKS:
def __init__(self):
self.treap = None
self.n = [0, 0]
@staticmethod
def KSThresholdForPValue(pvalue, N):
'''Threshold for KS Test given a p-value
Args:
pval (float): p-value.
N (int): the size of the samples.
Returns:
Threshold t to compare groups 0 and 1. The null-hypothesis is discarded if KS() > t.
'''
ca = (-0.5 * log(pvalue)) ** 0.5
return ca * (2.0 * N / N ** 2)
@staticmethod
def CAForPValue(pvalue):
'''ca for KS Test given a p-value
Args:
pval (float): p-value.
Returns:
Threshold the "ca" that can be used to compute a threshold for KS().
'''
return (-0.5 * log(pvalue)) ** 0.5
def KS(self):
'''Kolmogorov-Smirnov statistic. Both groups must have the same number of observations.
Returns:
The KS statistic D.
'''
assert(self.n[0] == self.n[1])
N = self.n[0]
if N == 0:
return 0
return max(self.treap.max_value, -self.treap.min_value) / N
def Kuiper(self):
'''Kuiper statistic. Both groups must have the same number of observations.
Returns:
The Kuiper statistic.
'''
assert(self.n[0] == self.n[1])
N = self.n[0]
if N == 0:
return 0
return (self.treap.max_value - self.treap.min_value) / N
def Add(self, obs, group):
'''Insert new observation into one of the groups.
Args:
obs: the value of the obseration. Tip: a tuple (actual value, random value) is recommended when there is overlap between groups or if values are not guaranteed to be mostly unique.
group (int): which group the observation belongs to. Must be either 0 or 1.
'''
group = 0 if group == 2 else group
assert(group == 0 or group == 1)
key = (obs, group)
self.n[group] += 1
left, left_g, right, val = None, None, None, None
left, right = Treap.SplitKeepRight(self.treap, key)
left, left_g = Treap.SplitGreatest(left)
val = 0 if left_g is None else left_g.value
left = Treap.Merge(left, left_g)
right = Treap.Merge(Treap(key, val), right)
Treap.SumAll(right, 1 if group == 0 else -1)
self.treap = Treap.Merge(left, right)
def Remove(self, obs, group):
'''Remove observation from one of the groups.
Args:
obs: the value of the obseration. Must be identical to a previously inserted observation (including the random element of a tuple, if this was the case).
group (int): which group the observation belongs to. Must be either 0 or 1.
'''
group = 0 if group == 2 else group
assert(group == 0 or group == 1)
key = (obs, group)
self.n[group] -= 1
left, right, right_l = None, None, None
left, right = Treap.SplitKeepRight(self.treap, key)
right_l, right = Treap.SplitSmallest(right)
if right_l is not None and right_l.key == key:
Treap.SumAll(right, -1 if group == 0 else 1)
else:
right = Treap.Merge(right_l, right)
self.treap = Treap.Merge(left, right)
def Test(self, ca = 1.95):
'''Test whether the reference and sliding window follow the different probability distributions according to KS Test.
Args:
ca: ca is a parameter used to calculate the threshold for the Kolmogorov-Smirnov statistic. The default value corresponds to a p-value of 0.001. Use IKS.CAForPValue to obtain an appropriate ca.
Returns:
True if we **reject** the null-hypothesis that states that both windows have the same distribution. In other words, we can consider that the windows have now different distributions.
'''
ca = ca or 1.95
n = self.n[0]
return self.KS() > ca * (2 * n / n ** 2) ** 0.5
IKS.AddObservation = IKS.Add
IKS.RemoveObservation = IKS.Remove
|
from web3 import Web3
import pandas as pd
import json, pickle
from tqdm import tqdm
curve_factory_addr = '0x0000000022D53366457F9d5E68Ec105046FC4383'
curve_factory_ropsten_addr = '0xc35DADB65012eC5796536bD9864eD8773aBc74C4'
with open('solidity/curve_abi.json', 'rb') as file:
curve_factory_abi = json.load(pickle.load(file))
w3 = Web3(Web3.HTTPProvider('https://mainnet.infura.io/v3/24596c5339fb414787e72d493fc6fd24'))
sushi_factory = w3.eth.contract(address=curve_factory_addr, abi=curve_factory_abi)
sushi_factory.functions()
|
import logging
from django.db.models.fields import TextField
from wagtail.admin.edit_handlers import FieldPanel
from wagtail.admin.edit_handlers import StreamFieldPanel
from wagtail.core import blocks
from wagtail.core.fields import RichTextField
from wagtail.core.fields import StreamField
from core.models import DiscoverUniBasePage
logger = logging.getLogger(__name__)
STUDENT_SATISFACTION_KEY = 'student_satisfaction'
ENTRY_INFO_KEY = 'entry_information'
AFTER_ONE_YEAR_KEY = 'after_one_year'
EARNINGS_AFTER_COURSE_KEY = 'earnings_after_the_course'
EMPLOYMENT_AFTER_COURSE_KEY = 'employment_after_the_course'
ACCREDITATION_KEY = 'professional_accreditation'
# New accordion section ('Graduate Perceptions) added; various sections of this file are affected.
# TODO: In order for the new accordion section to be rendered in the UI, an instance of the corresponding
# accordion panel must be created in the Wagtail admin site (http://0.0.0.0:8000/admin/);
# go to Pages > Home > Course Details > 'ACCORDIONS' section > Add (+ icon) > add a panel of the new type.
# Then Save Draft, then Publish.
GRADUATE_PERCEPTIONS_KEY = 'graduate_perceptions'
LINKS_TO_THE_INSTITUTION_WEBSITE_KEY = 'links_to_the_institution_website'
class AccordionPanel(blocks.StructBlock):
heading = blocks.CharBlock(required=False)
class SatisfactionDataSet(blocks.StructValue):
@staticmethod
def data_set():
return STUDENT_SATISFACTION_KEY
class EntryInfoDataSet(blocks.StructValue):
@staticmethod
def data_set():
return ENTRY_INFO_KEY
class AfterOneYearDataSet(blocks.StructValue):
@staticmethod
def data_set():
return AFTER_ONE_YEAR_KEY
class EarningsAfterCourseDataSet(blocks.StructValue):
@staticmethod
def data_set():
return EARNINGS_AFTER_COURSE_KEY
class EmploymentAfterCourseDataSet(blocks.StructValue):
@staticmethod
def data_set():
return EMPLOYMENT_AFTER_COURSE_KEY
class AccreditationDataSet(blocks.StructValue):
@staticmethod
def data_set():
return ACCREDITATION_KEY
class GraduatePerceptionsDataSet(blocks.StructValue):
@staticmethod
def data_set():
return GRADUATE_PERCEPTIONS_KEY
class LinksToTheInstitutionWebsiteDataSet(blocks.StructValue):
@staticmethod
def data_set():
return LINKS_TO_THE_INSTITUTION_WEBSITE_KEY
class SatisfactionBlock(AccordionPanel):
lead_text = blocks.CharBlock(required=False)
intro_body = blocks.RichTextBlock(blank=True)
teaching_stats_header = blocks.CharBlock(required=False)
learning_opportunities_stats_header = blocks.CharBlock(required=False)
assessment_stats_header = blocks.CharBlock(required=False)
support_stats_header = blocks.CharBlock(required=False)
organisation_stats_header = blocks.CharBlock(required=False)
learning_resources_stats_header = blocks.CharBlock(required=False)
learning_community_stats_header = blocks.CharBlock(required=False)
student_voice_stats_header = blocks.CharBlock(required=False)
nhs_placement_stats_header = blocks.CharBlock(required=False)
data_source = blocks.RichTextBlock(blank=True)
class Meta:
value_class = SatisfactionDataSet
class EntryInformationBlock(AccordionPanel):
qualification_heading = blocks.CharBlock(required=False)
qualification_intro = blocks.CharBlock(required=False)
qualification_label_explanation_heading = blocks.CharBlock(required=False)
qualification_label_explanation_body = blocks.RichTextBlock(blank=True)
qualification_data_source = blocks.RichTextBlock(blank=True)
tariffs_heading = blocks.CharBlock(required=False)
tariffs_intro = blocks.CharBlock(required=False)
tariffs_data_source = blocks.RichTextBlock(blank=True)
class Meta:
value_class = EntryInfoDataSet
class AfterOneYearBlock(AccordionPanel):
section_heading = blocks.CharBlock(required=False)
intro = blocks.CharBlock(required=False)
lead = blocks.CharBlock(required=False)
label_explanation_heading = blocks.CharBlock(required=False)
label_explanation_body = blocks.RichTextBlock(blank=True)
data_source = blocks.RichTextBlock(blank=True)
class Meta:
value_class = AfterOneYearDataSet
class EarningsAfterCourseBlock(AccordionPanel):
section_heading = blocks.CharBlock(required=False)
intro = blocks.RichTextBlock(blank=True)
average_earnings_inst_heading = blocks.RichTextBlock(blank=True)
institution_graduates_heading = blocks.RichTextBlock(blank=True)
after_fifteen_months_earnings_heading = blocks.CharBlock(required=False)
after_fifteen_months_range_explanation = blocks.RichTextBlock(blank=True)
after_fifteen_months_respondents_explanation = blocks.RichTextBlock(blank=True)
after_fifteen_months_no_of_graduates_explanation = blocks.RichTextBlock(blank=True)
after_fifteen_months_data_source = blocks.RichTextBlock(blank=True)
leo_respondents_explanation = blocks.RichTextBlock(blank=True)
after_three_years_earnings_heading = blocks.CharBlock(required=False)
after_five_years_earnings_heading = blocks.CharBlock(required=False)
after_three_five_years_data_source = blocks.RichTextBlock(blank=True)
average_earnings_sector_heading = blocks.RichTextBlock(blank=True)
# respondents_live_in_explanation_go = blocks.RichTextBlock(blank=True)
# respondents_live_in_explanation_leo = blocks.RichTextBlock(blank=True)
respondents_live_in_explanation = blocks.RichTextBlock(blank=True)
class Meta:
value_class = EarningsAfterCourseDataSet
class EmploymentAfterCourseBlock(AccordionPanel):
six_month_employment_lead = blocks.CharBlock(required=False)
six_month_employment_data_source = blocks.RichTextBlock(blank=True)
section_heading = blocks.RichTextBlock(required=False)
intro = blocks.CharBlock(blank=True)
six_month_employment_roles_heading = blocks.CharBlock(required=False)
six_month_employment_roles_label_explanation_heading = blocks.CharBlock(required=False)
six_month_employment_roles_data_source = blocks.RichTextBlock(blank=True)
occupation_types_label_explanation_heading = blocks.CharBlock(required=False)
occupation_types_label_explanation_body = blocks.RichTextBlock(blank=True)
class Meta:
value_class = EmploymentAfterCourseDataSet
class AccreditationBlock(AccordionPanel):
section_heading = blocks.CharBlock(required=False)
class Meta:
value_class = AccreditationDataSet
class GraduatePerceptionsBlock(AccordionPanel):
lead_text = blocks.CharBlock(required=False)
intro_body = blocks.RichTextBlock(blank=True)
perception_of_work_heading = blocks.CharBlock(required=False)
data_source = blocks.RichTextBlock(blank=True)
usefulness_explanation_heading = blocks.CharBlock(required=False)
usefulness_explanation = blocks.RichTextBlock(blank=True)
meaningfulness_explanation_heading = blocks.CharBlock(required=False)
meaningfulness_explanation = blocks.RichTextBlock(blank=True)
future_explanation_heading = blocks.CharBlock(required=False)
future_explanation = blocks.RichTextBlock(blank=True)
class Meta:
value_class = GraduatePerceptionsDataSet
class LinksToTheInstitutionWebsiteBlock(AccordionPanel):
course_information_on_website_header = blocks.RichTextBlock(blank=True)
class Meta:
value_class = LinksToTheInstitutionWebsiteDataSet
class CourseDetailPage(DiscoverUniBasePage):
accordions = StreamField([
('satisfaction_panel', SatisfactionBlock(required=True, icon='collapse-down')),
('entry_information_panel', EntryInformationBlock(required=True, icon='collapse-down')),
('after_one_year_panel', AfterOneYearBlock(required=True, icon='collapse-down')),
('accreditation_panel', AccreditationBlock(required=True, icon='collapse-down')),
('earningsafter_course_panel', EarningsAfterCourseBlock(required=True, icon='collapse-down')),
('employment_after_course_panel', EmploymentAfterCourseBlock(required=True, icon='collapse-down')),
('graduate_perceptions_panel', GraduatePerceptionsBlock(required=True, icon='collapse-down')),
('links_to_the_institution_website_panel',
LinksToTheInstitutionWebsiteBlock(required=True, icon='collapse-down'))
])
uni_site_links_header = TextField(blank=True)
content_panels = DiscoverUniBasePage.content_panels + [
StreamFieldPanel('accordions'),
FieldPanel('uni_site_links_header'),
]
class CourseComparisonPage(DiscoverUniBasePage):
heading = TextField(blank=True)
lead = TextField(blank=True)
remove_text = RichTextField(blank=True)
save_text = RichTextField(blank=True)
compare_heading = TextField(blank=True)
accordions = StreamField([
('satisfaction_panel', SatisfactionBlock(required=True, icon='collapse-down')),
('entry_information_panel', EntryInformationBlock(required=True, icon='collapse-down')),
('after_one_year_panel', AfterOneYearBlock(required=True, icon='collapse-down')),
('accreditation_panel', AccreditationBlock(required=True, icon='collapse-down')),
('earningsafter_course_panel', EarningsAfterCourseBlock(required=True, icon='collapse-down')),
('employment_after_course_panel', EmploymentAfterCourseBlock(required=True, icon='collapse-down')),
('graduate_perceptions_panel', GraduatePerceptionsBlock(required=True, icon='collapse-down')),
('links_to_the_institution_website_panel',
LinksToTheInstitutionWebsiteBlock(required=True, icon='collapse-down'))
])
content_panels = DiscoverUniBasePage.content_panels + [
FieldPanel('heading'),
FieldPanel('lead'),
FieldPanel('remove_text'),
FieldPanel('save_text'),
FieldPanel('compare_heading'),
StreamFieldPanel('accordions'),
]
class CourseManagePage(DiscoverUniBasePage):
heading = TextField(blank=True)
lead = TextField(blank=True)
save_text = RichTextField(blank=True)
compare_text = RichTextField(blank=True)
none_selected_text = RichTextField(blank=True)
one_selected_text = RichTextField(blank=True)
content_panels = DiscoverUniBasePage.content_panels + [
FieldPanel('heading'),
FieldPanel('lead'),
FieldPanel('save_text'),
FieldPanel('compare_text'),
FieldPanel('none_selected_text'),
FieldPanel('one_selected_text'),
]
|
import os
import psycopg2
import json
import csv
import time
ELEPHANT_HOST = os.getenv("ELEPHANT_HOST")
ELEPHANT_NAME = os.getenv("ELEPHANT_NAME")
ELEPHANT_PASSWORD = os.getenv("ELEPHANT_PASSWORD")
print(ELEPHANT_HOST)
dsn = "host={} dbname={} user={} password={}".format(ELEPHANT_HOST, ELEPHANT_NAME, ELEPHANT_NAME, ELEPHANT_PASSWORD)
def test_connection():
"""Pārbauda pieslēgumu datubāzei
Returns:
string -- tekstu ar datubāzes versiju
"""
# saformatē pieslēgšanās parametrus
# dsn = "host={} dbname={} user={} password={}".format(ELEPHANT_HOST, ELEPHANT_NAME, ELEPHANT_NAME, ELEPHANT_PASSWORD)
# izveido pieslēgumu
conn = psycopg2.connect(dsn)
# izveido kursoru
cur = conn.cursor()
# aizsūta kursoram SQL vaicājumu
cur.execute("SELECT version();")
# pieprasa no kursora atbildi
record = cur.fetchone()
result = "You are connected to - " + str(record)
# aizver kursoru
cur.close()
# aizver peislēgumu daubāzei
conn.close()
return result
def ierakstit1(parametri):
conn = psycopg2.connect(dsn)
cur = conn.cursor()
sql="""INSERT INTO saites (url,nosaukums,atsauksme,autors)
VALUES ({}) RETURNING id;"""
cur.execute(sql.format(parametri))
jaunais_id=cur.fetchone()[0]
conn.commit()
cur.close()
conn.close()
return jaunais_id
def ierakstit2(tags,saite):
conn = psycopg2.connect(dsn)
cur = conn.cursor()
sql="""INSERT INTO tagi_saites (tag_id,saite_id)
VALUES (%s,%s) RETURNING saraksta_id;"""
cur.execute(sql,(tags,saite))
conn.commit()
cur.close()
conn.close()
return test_connection()
def nolasit(parametri = 0):
conn = psycopg2.connect(dsn)
cur = conn.cursor()
if parametri==0: #Šeit nolasās pilnīgi visi dati
kverijs='''SELECT id, url, nosaukums, atsauksme, autors, tag_name, tagi.tag_id, seciba, kategorija FROM saites LEFT JOIN tagi_saites ON saites.id=tagi_saites.saite_id LEFT JOIN tagi ON tagi_saites.tag_id=tagi.tag_id ORDER BY id DESC, kategorija ASC, tagi.tag_id ASC '''
cur.execute(kverijs)
r = [dict((cur.description[i][0], value) \
for i, value in enumerate(row)) for row in cur.fetchall()]
elif parametri == 1:
kverijs='''SELECT name FROM kategorijas '''
cur.execute(kverijs)
r = [dict((cur.description[i][0], value) \
for i, value in enumerate(row)) for row in cur.fetchall()]
elif parametri == 2:
kverijs='''SELECT * FROM tagi ORDER BY kategorija ASC, seciba ASC'''
cur.execute(kverijs)
r = [dict((cur.description[i][0], value) \
for i, value in enumerate(row)) for row in cur.fetchall()]
else: #Šeit būs atlasītie dati
cur.execute(parametri)
r = [dict((cur.description[i][0], value) \
for i, value in enumerate(row)) for row in cur.fetchall()]
return r
def tekstapstrade(teksts, ietvars, saraksts):
print(teksts,ietvars,saraksts)
if len(saraksts) == 0:
jaunaiskverijsvidus = ""
else:
jaunaiskverijsvidus = """WHERE EXISTS (SELECT 1 FROM tagi_saites WHERE (tagi_saites.tag_id = {} AND tagi_saites.saite_id=id)
""".format(saraksts[0])
for tags in saraksts[1:]:
jaunaiskverijsvidus +="""
AND EXISTS (SELECT 1 FROM tagi_saites WHERE tagi_saites.tag_id = {} AND tagi_saites.saite_id=id)
""".format(tags)
jaunaiskverijsvidus += " ) "
if teksts == "":
if len(saraksts) == 0:
jaunaiskverijs = 0
else:
jaunaiskverijssakums = """
SELECT id, url, nosaukums, atsauksme, autors, tag_name, tagi.seciba, tagi.tag_id, kategorija
FROM
(SELECT * FROM saites """
jaunaiskverijsbeigas = """
) AS a
LEFT JOIN tagi_saites ON a.id=tagi_saites.saite_id LEFT JOIN tagi ON tagi_saites.tag_id = tagi.tag_id
ORDER BY id DESC, kategorija ASC, tagi.seciba ASC
"""
jaunaiskverijs = jaunaiskverijssakums + jaunaiskverijsvidus + jaunaiskverijsbeigas
print(jaunaiskverijs)
else:
jaunaiskverijssakums = """
SELECT * FROM (
SELECT id, url, nosaukums, atsauksme, autors, tag_name, tagi.tag_id, tagi.seciba, kategorija
FROM
(SELECT * FROM saites """
jaunaiskverijsbeigas = """
) AS a
LEFT JOIN tagi_saites ON a.id=tagi_saites.saite_id LEFT JOIN tagi ON tagi_saites.tag_id = tagi.tag_id
ORDER BY id DESC, kategorija ASC, tagi.seciba ASC) AS tabula WHERE
"""
if ietvars == '1':
jaunaiskverijsbeigas += """tabula.nosaukums LIKE '%{}%' """.format(teksts)
elif ietvars == '2':
jaunaiskverijsbeigas += """tabula.atsauksme LIKE '%{}%' """.format(teksts)
else:
jaunaiskverijsbeigas += """tabula.autors LIKE '%{}%' """.format(teksts)
jaunaiskverijs = jaunaiskverijssakums + jaunaiskverijsvidus + jaunaiskverijsbeigas
return jaunaiskverijs
def dzest(id):
conn = psycopg2.connect(dsn)
cur = conn.cursor()
kverijs = """DELETE FROM tagi_saites WHERE saite_id = {};
DELETE FROM saites WHERE id = {};""".format(id, id)
cur.execute(kverijs)
conn.commit()
cur.close()
conn.close()
return kverijs
def saisuSaraksts():
conn = psycopg2.connect(dsn)
cur = conn.cursor()
kverijs = """SELECT url FROM saites"""
cur.execute(kverijs)
saraksts=[r[0] for r in cur.fetchall()]
conn.commit()
cur.close()
conn.close()
return saraksts
|
from Common.hive_connection import HiveConnection
import time
from Common.helper import format_two_point_time, sql_to_string
class Interpolation:
def __init__(self, config):
self.config = config
self.hc = HiveConnection()
def calculate_interpolation(self):
self.convert_cdr_to_array_format()
self.create_trip_format()
self.create_trip_24hr_padding()
self.create_poi_relocation()
self.create_route_interpolation()
self.export_to_csv()
def convert_cdr_to_array_format(self):
provider_prefix = self.config.provider_prefix
cursor = self.hc.cursor
print('########## CREATE CDR BY UID ARRAY FORMAT TABLE ##########')
timer = time.time()
print('Checking and dropping {provider_prefix}_cdr_by_uid table if existing.'
.format(provider_prefix=provider_prefix))
cursor.execute('DROP TABLE IF EXISTS {provider_prefix}_cdr_by_uid'
.format(provider_prefix=provider_prefix))
print('Checked and dropped {provider_prefix}_cdr_by_uid table if existing. '
'Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
timer = time.time()
print('Creating {provider_prefix}_cdr_by_uid table'
.format(provider_prefix=provider_prefix))
raw_sql = sql_to_string('interpolation/create_cdr_by_uid.sql')
query = raw_sql.format(provider_prefix=provider_prefix)
cursor.execute(query)
print('Created {provider_prefix}_cdr_by_uid table. Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
timer = time.time()
raw_sql = sql_to_string('interpolation/insert_cdr_by_uid.sql')
print('Inserting into {provider_prefix}_cdr_by_uid table'
.format(provider_prefix=provider_prefix))
query = raw_sql.format(provider_prefix=provider_prefix, max_size_cdr_by_uid=self.config.max_size_cdr_by_uid)
cursor.execute(query)
print('Inserted into {provider_prefix}_cdr_by_uid table. Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
print('########## FINISHED CREATING CDR BY UID TABLE ##########')
def create_trip_format(self):
provider_prefix = self.config.provider_prefix
cursor = self.hc.cursor
print('########## CREATE CDR BY UID ARRAY TRIP FORMAT TABLE ##########')
timer = time.time()
print('Checking and dropping {provider_prefix}_cdr_by_uid_trip table if existing.'
.format(provider_prefix=provider_prefix))
cursor.execute('DROP TABLE IF EXISTS {provider_prefix}_cdr_by_uid_trip'
.format(provider_prefix=provider_prefix))
print('Checked and dropped {provider_prefix}_cdr_by_uid_trip table if existing. '
'Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
timer = time.time()
print('Creating {provider_prefix}_cdr_by_uid_trip table'
.format(provider_prefix=provider_prefix))
raw_sql = sql_to_string('interpolation/create_trip_format.sql')
query = raw_sql.format(provider_prefix=provider_prefix)
cursor.execute(query)
print('Created {provider_prefix}_cdr_by_uid_trip table. Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
timer = time.time()
raw_sql = sql_to_string('interpolation/insert_trip_format.sql')
print('Inserting into {provider_prefix}_cdr_by_uid_trip table'
.format(provider_prefix=provider_prefix))
query = raw_sql.format(provider_prefix=provider_prefix)
cursor.execute(query)
print('Inserted into {provider_prefix}_cdr_by_uid_trip table. Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
print('########## FINISHED CREATING CDR BY UID TRIP FORMAT TABLE ##########')
def create_trip_24hr_padding(self):
provider_prefix = self.config.provider_prefix
cursor = self.hc.cursor
print('########## CREATE TRIP 24 HR PADDING TABLE ##########')
timer = time.time()
print('Checking and dropping {provider_prefix}_cdr_by_uid_trip_organized_array_apd table if existing.'
.format(provider_prefix=provider_prefix))
cursor.execute('DROP TABLE IF EXISTS {provider_prefix}_cdr_by_uid_trip_organized_array_apd'
.format(provider_prefix=provider_prefix))
print('Checked and dropped {provider_prefix}_cdr_by_uid_trip_organized_array_apd table if existing. '
'Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
timer = time.time()
print('Creating {provider_prefix}_cdr_by_uid_trip_organized_array_apd table'
.format(provider_prefix=provider_prefix))
raw_sql = sql_to_string('interpolation/create_trip_24_hr_padding.sql')
query = raw_sql.format(provider_prefix=provider_prefix)
cursor.execute(query)
print('Created {provider_prefix}_cdr_by_uid_trip_organized_array_apd table. Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
timer = time.time()
raw_sql = sql_to_string('interpolation/insert_trip_24_hr_padding.sql')
print('Inserting into {provider_prefix}_cdr_by_uid_trip_organized_array_apd table'
.format(provider_prefix=provider_prefix))
query = raw_sql.format(provider_prefix=provider_prefix)
cursor.execute(query)
print('Inserted into {provider_prefix}_cdr_by_uid_trip_organized_array_apd table. Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
print('########## FINISHED TRIP 24 HR PADDING TABLE ##########')
def create_poi_relocation(self):
provider_prefix = self.config.provider_prefix
cursor = self.hc.cursor
print('########## CREATE POI RELOCATION TABLE ##########')
timer = time.time()
print('Checking and dropping {provider_prefix}_cdr_by_uid_trip_realloc_array_apd table if existing.'
.format(provider_prefix=provider_prefix))
cursor.execute('DROP TABLE IF EXISTS {provider_prefix}_cdr_by_uid_trip_realloc_array_apd'
.format(provider_prefix=provider_prefix))
print('Checked and dropped {provider_prefix}_cdr_by_uid_trip_realloc_array_apd table if existing. '
'Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
timer = time.time()
print('Creating {provider_prefix}_cdr_by_uid_trip_realloc_array_apd table'
.format(provider_prefix=provider_prefix))
raw_sql = sql_to_string('interpolation/create_poi_relocation.sql')
query = raw_sql.format(provider_prefix=provider_prefix)
cursor.execute(query)
print('Created {provider_prefix}_cdr_by_uid_trip_realloc_array_apd table. Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
timer = time.time()
raw_sql = sql_to_string('interpolation/insert_poi_relocation.sql')
print('Inserting into {provider_prefix}_cdr_by_uid_trip_realloc_array_apd table'
.format(provider_prefix=provider_prefix))
query = raw_sql.format(provider_prefix=provider_prefix,
poi=self.config.interpolation_poi_file_location.split('/')[-1])
cursor.execute(query)
print('Inserted into {provider_prefix}_cdr_by_uid_trip_realloc_array_apd table. Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
print('########## FINISHED CREATING POI RELOCATION TABLE ##########')
def create_route_interpolation(self):
provider_prefix = self.config.provider_prefix
cursor = self.hc.cursor
print('########## CREATE ROUTE INTERPOLATION TABLE ##########')
timer = time.time()
print('Checking and dropping {provider_prefix}_cdr_by_uid_trip_routing_array_apd table if existing.'
.format(provider_prefix=provider_prefix))
cursor.execute('DROP TABLE IF EXISTS {provider_prefix}_cdr_by_uid_trip_routing_array_apd'
.format(provider_prefix=provider_prefix))
print('Checked and dropped {provider_prefix}_cdr_by_uid_trip_routing_array_apd table if existing. '
'Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
timer = time.time()
print('Creating {provider_prefix}_cdr_by_uid_trip_routing_array_apd table'
.format(provider_prefix=provider_prefix))
raw_sql = sql_to_string('interpolation/create_route_interpolation.sql')
query = raw_sql.format(provider_prefix=provider_prefix)
cursor.execute(query)
print('Created {provider_prefix}_cdr_by_uid_trip_routing_array_apd table. Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
timer = time.time()
raw_sql = sql_to_string('interpolation/insert_route_interpolation.sql')
print('Inserting into {provider_prefix}_cdr_by_uid_trip_routing_array_apd table'
.format(provider_prefix=provider_prefix))
query = raw_sql.format(provider_prefix=provider_prefix,
max_size_interpolation=self.config.max_size_interpolation,
osm=self.config.interpolation_osm_file_location.split('/')[-1],
voronoi=self.config.interpolation_voronoi_file_location.split('/')[-1])
cursor.execute(query)
print('Inserted into {provider_prefix}_cdr_by_uid_trip_routing_array_apd table. Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
print('########## FINISHED ROUTE INTERPOLATION TABLE ##########')
def export_to_csv(self):
provider_prefix = self.config.provider_prefix
cursor = self.hc.cursor
print('########## Exporting route interpolation to CSV ##########')
timer = time.time()
raw_sql = sql_to_string('interpolation/export_to_gps_format.sql')
query = raw_sql.format(provider_prefix=provider_prefix)
cursor.execute(query)
print('Exported to CSV. Elapsed time: {time} seconds'
.format(provider_prefix=provider_prefix, time=format_two_point_time(timer, time.time())))
print('########## FINISHED EXPORTING, FILE LOCATED IN /tmp/hive/cdr_interpolation ##########')
|
from __future__ import annotations
import copy
def counting_sort(array: list[float], exponent: int) -> list[float]:
length = len(array)
output = [0] * (length)
count = [0] * (10)
for index in range(0, length):
_index = int(array[index] // exponent)
count[_index % 10] += 1
for index in range(1, 10):
count[index] += count[index - 1]
index = length - 1
while index >= 0:
_index = int(array[index] // exponent)
output[count[_index % 10] - 1] = array[index]
count[_index % 10] -= 1
index -= 1
return output
def radix_sort(array: list[float]) -> list[float]:
_array = copy.deepcopy(array)
maximum = max(_array)
exponent = 1
while maximum / exponent > 1:
_array = counting_sort(array, exponent)
exponent *= 10
return _array
|
import json
import requests
import time
from .exceptions import *
from .version import __version__
FORDONSPOSITION_URL = 'https://api.sl.se/fordonspositioner/GetData?' \
'type={}&pp=false&cacheControl={}'
TRAFIKLAB_URL = 'https://api.sl.se/api2/'
SI2_URL = TRAFIKLAB_URL + 'deviations.json?key={}&siteid={}&lineNumber={}'
TL2_URL = TRAFIKLAB_URL + 'trafficsituation.json?key={}'
RI4_URL = TRAFIKLAB_URL + 'realtimedeparturesV4.json?key={}&siteid={}' \
'&timeWindow={}'
PU1_URL = TRAFIKLAB_URL + 'typeahead.json?key={}&searchstring={}' \
'&stationsonly=False&maxresults=25'
TP3_URL = TRAFIKLAB_URL + 'TravelplannerV3_1/trip.json?key={}&originExtId={}' \
'&destExtId={}&originCoordLat={}' \
'&originCoordLong={}&destCoordLat={}' \
'&destCoordLong={}'
USER_AGENT = "pyHASL/"+__version__
class fpapi(object):
def __init__(self, timeout=None):
self._timeout = timeout
def version(self):
return __version__
def request(self, type):
if type not in ('PT', 'RB', 'TVB', 'SB', 'LB',
'SpvC', 'TB1', 'TB2', 'TB3'):
raise HASL_Error(-1, "Traffic type is not valid",
"Must be one of 'PT','RB','TVB','SB',"
"'LB','SpvC','TB1','TB2','TB3'")
try:
request = requests.get(FORDONSPOSITION_URL.format(type,
time.time()),
headers={"User-agent": USER_AGENT},
allow_redirects=True,
timeout=self._timeout)
except Exception as e:
raise HASL_HTTP_Error(997, "A HTTP error occured", repr(e))
response = json.loads(request.json())
result = []
for trip in response['Trips']:
result.append(trip)
return result
class haslapi(object):
def __init__(self, timeout=None):
self._timeout = timeout
def version(self):
return __version__
def _get(self, url):
api_errors = {
1001: 'API key is over qouta',
1002: 'API key is invalid',
}
try:
resp = requests.get(url,
headers={"User-agent": USER_AGENT},
allow_redirects=True,
timeout=self._timeout)
except Exception as e:
raise HASL_HTTP_Error(997, "A HTTP error occured", repr(e))
try:
jsonResponse = resp.json()
except Exception as e:
raise HASL_API_Error(998, "A parsing error occured", repr(e))
if not jsonResponse:
raise HASL_Error(999, "Internal error", "jsonResponse is empty")
if 'StatusCode' in jsonResponse:
if jsonResponse['StatusCode'] == 0:
return jsonResponse
apiErrorText = api_errors.get(jsonResponse['StatusCode'])
if apiErrorText:
raise HASL_API_Error(jsonResponse['StatusCode'],
apiErrorText,
jsonResponse['Message'])
else:
raise HASL_API_Error(jsonResponse['StatusCode'],
"Unknown API-response code encountered",
jsonResponse['Message'])
elif 'Trip' in jsonResponse:
return jsonResponse
elif 'Sites' in jsonResponse:
return jsonResponse
else:
raise HASL_Error(-100, "ResponseType is not known")
class pu1api(haslapi):
def __init__(self, api_token, timeout=None):
super().__init__(timeout)
self._api_token = api_token
def request(self, searchstring):
return self._get(PU1_URL.format(self._api_token, searchstring))
class tp3api(haslapi):
def __init__(self, api_token, timeout=None):
super().__init__(timeout)
self._api_token = api_token
def request(self, origin, destination, orgLat, orgLong, destLat, destLong):
return self._get(TP3_URL.format(self._api_token, origin, destination,
orgLat, orgLong, destLat, destLong))
class ri4api(haslapi):
def __init__(self, api_token, siteid, window, timeout=None):
super().__init__(timeout)
self._api_token = api_token
self._siteid = siteid
self._window = window
def request(self):
return self._get(RI4_URL.format(self._api_token,
self._siteid, self._window))
class si2api(haslapi):
def __init__(self, api_token, siteid, lines, timeout=None):
super().__init__(timeout)
self._api_token = api_token
self._siteid = siteid
self._lines = lines
def request(self):
return self._get(SI2_URL.format(self._api_token,
self._siteid, self._lines))
class tl2api(haslapi):
def __init__(self, api_token, timeout=None):
super().__init__(timeout)
self._api_token = api_token
def request(self):
return self._get(TL2_URL.format(self._api_token))
|
"""Main module."""
import logging
import sys
from loguru import logger
def main(verbose=0):
"""Main function"""
# Console script for pyargcbr
if verbose > 0:
logger.remove()
logger.add(sys.stderr, level="DEBUG")
else:
logger.remove()
logger.add(sys.stderr, level="INFO")
logging.getLogger("aiohttp").setLevel(logging.WARNING)
logging.getLogger("aioopenssl").setLevel(logging.WARNING)
logging.getLogger("aiosasl").setLevel(logging.WARNING)
logging.getLogger("asyncio").setLevel(logging.WARNING)
logging.getLogger("spade").setLevel(logging.WARNING)
if verbose > 2:
logging.getLogger("spade").setLevel(logging.INFO)
if verbose > 3:
logging.getLogger("aioxmpp").setLevel(logging.INFO)
else:
logging.getLogger("aioxmpp").setLevel(logging.WARNING)
if __name__ == "__main__":
main()
|
# -*- coding: ascii -*-
import numpy as np
class GTD:
def __init__(self, initial_x):
assert (len(initial_x.shape) == 1)
n = len(initial_x)
self._last_x = np.copy(initial_x)
self.e = np.zeros(n)
self.h = np.zeros(n)
self.w = np.zeros(n)
def predict(self, x):
"""Return the current prediction for a given set of features x."""
return np.dot(self.w, x)
def update(self, reward, gamma, x, alpha, eta, lambda_, rho=1):
delta = reward + gamma * self.predict(x) - self.predict(self._last_x)
self.w += alpha * (delta * self.e - gamma *
(1 - lambda_) * x * np.dot(self.e, self.h))
self.h += alpha * eta * (
delta * self.e - np.dot(self.h, self._last_x) * self._last_x)
self.e *= lambda_ * gamma
self.e += x
self.e *= rho
np.copyto(self._last_x, x)
|
from django.db import models
from .base_model import BaseModel
from .site import Site
class ExternalSite (BaseModel):
site = models.ForeignKey(Site)
url = models.URLField('URL')
title = models.CharField(max_length=128)
class Meta:
app_label = 'sculpture'
|
from django.shortcuts import render, redirect
from django.contrib.contenttypes.models import ContentType
from django.urls import reverse
from django.http import JsonResponse
from .models import Comment
from .forms import CommentForm
def update_comment(request):
'''
referer = request.META.get('HTTP_REFERER', reverse('home'))
# 数据检查
if not request.user.is_authenticated:
return render(request, 'error.html', {'message': '用户未登录', 'redirect_to': referer})
text = request.POST.get('text', '').strip()
if text == '':
return render(request, 'error.html', {'message': '评论内容为空', 'redirect_to': referer})
try:
content_type = request.POST.get('content_type', '')
object_id = int(request.POST.get('object_id', ''))
#print(content_type)
#print(object_id)
model_obj = ContentType.objects.get(model=content_type).model_class().objects.get(pk=object_id)
#model_obj = model_class.objects.get(pk=object_id)
except Exception as e:
#print(e)
return render(request, 'error.html', {'message': '评论对象不存在', 'redirect_to': referer})
# 检查通过,保存数据
comment = Comment()
comment.user = request.user
comment.text = text
comment.content_object = model_obj
comment.save()
return redirect(referer)
'''
referer = request.META.get('HTTP_REFERER', reverse('home'))
comment_form = CommentForm(request.POST, user=request.user)
data = {}
if comment_form.is_valid():
comment = Comment()
comment.user = comment_form.cleaned_data['user']
comment.text = comment_form.cleaned_data['text']
comment.content_object = comment_form.cleaned_data['content_object']
parent = comment_form.cleaned_data['parent']
if not parent is None:
comment.root = parent.root if not parent.root is None else parent
comment.parent = parent
comment.reply_to = parent.user
comment.save()
data['status'] = 'SUCCESS'
data['username'] = comment.user.get_nickname_or_username()
data['comment_time'] = comment.comment_time.strftime('%Y-%m-%d %H:%M')
data['text'] = comment.text
data['content_type'] = ContentType.objects.get_for_model(comment).model
if not parent is None:
data['reply_to'] = comment.reply_to.get_nickname_or_username()
else:
data['reply_to'] = ''
data['pk'] = comment.pk
data['root_pk'] = comment.root.pk if not comment.root is None else ''
#data['root_pk'] = comment.root.pk if not comment.root is None else ''
#return JsonResponse(data) #redirect(referer)
else:
#return render(request, 'error.html', {'message': comment_form.errors, 'redirect_to': referer})
data['status'] = 'ERROR'
data['message'] = list(comment_form.errors.values())[0][0]
return JsonResponse(data) #redirect(referer)
|
# -*- coding: utf-8 -*-
"""
@author: Adam Eaton
Contains functions to retrieve and store data for the three Cryptocurrencies.
Doesn't run as part of the main application but instead runs in it's own instance.
"""
import Slack_Notify as SN
import time
import requests
import csv
BTC_Ticker_Add = "https://www.okcoin.com/api/v1/ticker.do?symbol=btc_usd"
BTC_Depth_Add = "https://www.okcoin.com/api/v1/depth.do?symbol=btc_usd&size=60"
ETH_Ticker_Add = "https://www.okcoin.com/api/v1/ticker.do?symbol=eth_usd"
ETH_Depth_Add = "https://www.okcoin.com/api/v1/depth.do?symbol=eth_usd&size=60"
LTC_Ticker_Add = "https://www.okcoin.com/api/v1/ticker.do?symbol=ltc_usd"
LTC_Depth_Add = "https://www.okcoin.com/api/v1/depth.do?symbol=ltc_usd&size=60"
currency_Add = "https://api.fixer.io/latest?base=USD"
def run_queries():
btc_data = BTC_query()
eth_data = ETH_query()
ltc_data = LTC_query()
btc_str = str(btc_data)
eth_str = str(eth_data)
ltc_str = str(ltc_data)
if(btc_str[:1] == "[" and eth_str[:1] == "[" and ltc_str[:1] == "[" ):
with open(r'data/btc_data.csv', 'a', newline = '') as btc:
writer = csv.writer(btc)
writer.writerow(btc_data)
with open(r'data/eth_data.csv', 'a', newline = '') as eth:
writer = csv.writer(eth)
writer.writerow(eth_data)
with open(r'data/ltc_data.csv', 'a', newline = '') as ltc:
writer = csv.writer(ltc)
writer.writerow(ltc_data)
print("Tick")
else:
SN.send_notification("Data_Collection.py - run_queries() -- ")
return
# Possibly look into re-implementing later on, as of now (18/01/18) it's providing innacurate results
"""
def currency_query(val_usd):
currency_response = requests.get(currency_Add).json()
currency_value = currency_response['rates']['EUR']
return float(val_usd) * float(currency_value)
"""
def BTC_query():
BTC_Ticker = requests.get(BTC_Ticker_Add).json()
BTC_Depth = requests.get(BTC_Depth_Add).json()
BTC_Price_USD = float(BTC_Ticker['ticker']['last'])
# BTC_Price_EUR = currency_query(BTC_Price_USD)
BTC_Date = BTC_Ticker['date']
BTC_vBid = sum([bid[1] for bid in BTC_Depth['bids']])
BTC_vAsk = sum([ask[1] for ask in BTC_Depth['asks']])
values = [BTC_Date, BTC_Price_USD, BTC_vBid, BTC_vAsk]
return values
def ETH_query():
ETH_Ticker = requests.get(ETH_Ticker_Add).json()
ETH_Depth = requests.get(ETH_Depth_Add).json()
ETH_Price_USD = float(ETH_Ticker['ticker']['last'])
#ETH_Price_EUR = currency_query(ETH_Price_USD)
ETH_Date = ETH_Ticker['date']
ETH_vBid = sum([bid[1] for bid in ETH_Depth['bids']])
ETH_vAsk = sum([ask[1] for ask in ETH_Depth['asks']])
values = [ETH_Date, ETH_Price_USD, ETH_vBid, ETH_vAsk]
return values
def LTC_query():
LTC_Ticker = requests.get(LTC_Ticker_Add).json()
LTC_Depth = requests.get(LTC_Depth_Add).json()
LTC_Price_USD = float(LTC_Ticker['ticker']['last'])
#LTC_Price_EUR = currency_query(LTC_Price_USD)
LTC_Date = LTC_Ticker['date']
LTC_vBid = sum([bid[1] for bid in LTC_Depth['bids']])
LTC_vAsk = sum([ask[1] for ask in LTC_Depth['asks']])
values = [LTC_Date, LTC_Price_USD, LTC_vBid, LTC_vAsk]
return values
def main():
start_time = time.time()
while True:
try:
run_queries()
time.sleep(20.0 - ((time.time() - start_time) % 20.0))
except:
SN.send_notification("Data_Collection.py - run_queries() -- ")
pass
if __name__ == '__main__':
main()
|
import struct
from typing import List, Tuple, BinaryIO
def main(infiles: List[str], outfile: str, n: int) -> None:
size_x, size_y = None, None
with open(outfile, 'wb') as ntf:
for i in range(n):
print(f'processing file {i+1}/{n}')
with open(infiles[i], 'rb') as jxf:
if i == 0:
print(f'=> reading and parsing .jxf header')
jxf_header = jxf.read(56)
ntf_header, size_x, size_y = construct_ntf_header(jxf_header, n)
print(f'=> matrix dimensions: {size_x=}, {size_y=}')
print('=> writing .ntf header')
ntf.write(ntf_header)
# write one row of data at a time
jxf.seek(56)
print(
'=> converting big endian .jxf data to little endian,',
'writing little endian data to .ntf file'
)
for row in range(size_y):
data = jxf.read(size_x * 4)
ntf.seek(128 + 4 * (row*size_x*n + i*size_x))
data = convert_block_to_little_endian(data, size_x)
ntf.write(data)
def construct_ntf_data(jxf: BinaryIO, size_x: int, size_y: int) -> bytearray:
# 32bits = 4 bytes
jxf.seek(56)
big_endian_bytes = jxf.read(size_x * size_y * 4)
data = []
for i in range(size_x * size_y):
data += itob(btoi(big_endian_bytes[4*i:4*i+4]))
return bytearray(data)
def construct_ntf_header(jxf_header: bytes, n: int) -> Tuple[bytes, int, int]:
# See below link for .jxf file header spec:
# https://cycling74.com/sdk/max-sdk-8.2.0/chapter_jit_jxf.html#chapter_jit_jxf_api
assert jxf_header[36:40] == b'FL32', 'matrix is not in float32 format'
assert btoi(jxf_header[40:44]) == 1, 'matrix does not have planes == 1'
assert btoi(jxf_header[44:48]) == 2, 'matrix is not 2-dimensional'
assert btoi(jxf_header[48:52]) == btoi(jxf_header[52:56]), 'matrix rows != cols'
size_x = btoi(jxf_header[48:52])
size_y = btoi(jxf_header[52:56])
# ntf file header
bytes_arr = [
*itob(0), # 0x00 ByteOrder: 1 = little endian
*itob(1), # 0x04 VersionNumber: 1 = Reaktor 5+?
*itob(1), # 0x08 ArrayFormat: 1 = Float32Bits
*itob(1), # 0x0C ???, default = 1
*itob(size_x * n), # 0x10 dx: X size (horizontal)
*itob(size_y), # 0x14 dy: Y size (vertical)
*ftob(0), # 0x18 Min: Value Property, default = 0.0
*ftob(1), # 0x1C Max: Value Property, default = 1.0
*ftob(0), # 0x20 StepSize: Value Property, default = 0.0
*ftob(0), # 0x24 Default: Value Property, default = 0.0
*itob(0), # 0x28 DisplayFormat
*itob(0), # 0x2C DefaultValueColor
*itob(0), # 0x30 MinValueColor
*itob(0), # 0x34 MaxValueColor
*itob(0), # 0x38 X-Units: 0=Index, 1=[0...1], 2=ms, 3=tempo ticks
*ftob(48000), # 0x3C X-SamplesPerSecond
*ftob(120), # 0x40 X-BPM: default = 120.0
*ftob(1), # 0x44 X-SamplesPerTick: default = 1.0
*itob(24), # 0x48 X-TicksPerBeat: default = 24
*itob(4), # 0x4C X-BeatsPerBar: default = 4
*ftob(0), # 0x50 X-Offset: default = 0.0
*ftob(1), # 0x54 X-CustomRange: default = 1.0
*ftob(1), # 0x58 X-CustomRatio: default = 1.0
*itob(0), # 0x5C Y-Units: 0=Index, 1=[0...1]
*ftob(48), # 0x60 Y-SamplesPerSecond, default = 48.0
*ftob(120), # 0x64 Y-BPM: default = 120.0
*ftob(1), # 0x68 Y-SamplesPerTick: default = 1.0
*itob(24), # 0x6C Y-TicksPerBeat: default = 24
*itob(4), # 0x70 Y-BeatsPerBar: default = 4
*ftob(0), # 0x74 Y-Offset: default = 0.0
*ftob(1), # 0x78 Y-CustomRange: default = 1.0
*ftob(1), # 0x7C Y-CustomRatio: default = 1.0
]
return bytearray(bytes_arr), size_x, size_y
def convert_block_to_little_endian(
big_endian_bytes: bytes,
size_in_bytes: int,
) -> bytearray:
data = bytearray(big_endian_bytes)
for i in range(size_in_bytes):
big = data[4*i : 4*(i+1)]
little = btoi(big).to_bytes(4, byteorder='little')
data[4*i : 4*(i+1)] = little
return data
def ftob(f: float) -> struct:
return struct.pack('<f', f)
def itob(i: int) -> List[bytes]:
return [*i.to_bytes(4, byteorder='little')]
def btoi(b: bytes) -> int:
return int.from_bytes(b, byteorder='big')
if __name__ == '__main__':
import sys
infiles = sys.argv[1:]
n = len(infiles)
if n == 0:
print(f"Usage: {__file__} <jxf_file1> ...")
sys.exit(-1)
elif n == 1:
outfile = f'{infiles[0].rpartition(".")[0]}.ntf'
else:
outfile = f'{infiles[0].rpartition(".")[0].rpartition("_")[0]}.ntf'
main(infiles, outfile, n)
"""
https://web.archive.org/web/20160407101527/http://www.semaforte.com/reaktor/files/NTF-Layout.htm
//----------------------------- NTF - file format ------------------------
// int : 32-bit signed Integer
// float : 32-bit IEEE Floating Point
Adr Type Parameter Comment
==== ===== ================= ==========================================
0x00 int ByteOrder // INTEL (little endian) = 0, MOTOROLA = 1
int VersionNumber // Reaktor 6 = 1
int ArrayFormat // Undefined = 0, Float32Bits = 1
int <unknown> // always 1?
0x10 int dx // X size (horizontal)
int dy // Y size (vertical)
float Min // Value Properties
float Max // Value Properties
0x20 float StepSize // Value Properties, generally 0
float Default // Value Properties, generally 0
int DisplayFormat // 0 = Numeric, 1 = Midi Note, 2 = %
int DefaultValueColor // generally 0
0x30 int MinValueColor // generally 0
int MaxValueColor // generally 0
int X-Units // 0 = Index, 1 = [0...1], 2 = milliseconds, 3 = tempo ticks
float X-SamplesPerSecond // e.g. 48000.0
0x40 float X-BPM // e.g. 120.0
float X-SamplesPerTick // generally 1.0
int X-TicksPerBeat // generally 24
int X-BeatsPerBar // generally 4
0x50 float X-Offset // generally 0.0
float X-CustomRange // generally 1.0
float X-CustomRatio // generally 1.0
int Y-Units // 0 = Index, 1 = [0...1]
0x60 float Y-SamplesPerSecond // generally 48.0
float Y-BPM // generally 120.0
float Y-SamplesPerTick // generally 1.0
int Y-TicksPerBeat // generally 24
0x70 int Y-BeatsPerBar // generally 4
float Y-Offset // generally 0.0
float Y-CustomRange // generally 1.0
float Y-CustomRatio // generally 1.0
// Start of table data strored as Float-32's. Data is saved by row.
// So...First row = (y0, x0), (y0,x1), (y0,x2)...(y1, x0), (y1,x1), (y1,x2)…etc
// for (int y = 0; y < dy; y++) // vertical
// {
// for (int x = 0; x < dx; x++) // horizontal
// {
// Value[y][x]
// }
// }
0x80 float Value[0][0]
float Value[0][1]
float Value[0][2]
...
float Value[0][dx-1]
float Value[1][0]
0x90 float Value[1][1]
float Value[1][2]
...
float Value[1][dx-1]
float Value[2][0]
0xA0 float Value[2][1]
float Value[2][2]
...
float Value[2][dx-1]
...
float Value[dy-1][0]
float Value[dy-1][1]
float Value[dy-1][2]
...
float Value[dy-1][dx-1]
// for questions please contact -> julian.ringel@native-instruments.de
"""
|
from os import system
import threading
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime as dt
import time
extract_thread = -1
display_thread = -1
global info
info = {}
check = 0
def extract_data(pid, tid, refreshRate):
system("top -H -p "+pid+" -d "+refreshRate+" -b | grep "+tid+" >> "+pid+"_"+tid+".txt")
def process_data(pid,tid,refreshRate):
global info, check
while (check):
f = open(pid+"_"+tid+".txt")
data = f.readlines()
data = [i.strip() for i in data[:len(data)-1]]
time = [i.split()[-2] for i in data]
cores = [int(i.split()[-1]) for i in data]
df = {"cores":pd.Series(np.array(cores)),"time":pd.Series(np.array(time))}
df = pd.DataFrame(df)
freq_change = {}
start = df['time'][0]
for i in range(1,len(df)):
if df['cores'][i-1]!=df['cores'][i]:
freq_change[(start,df['time'][i])] = df['cores'][i-1]
start = df['time'][i]
start_time = [dt.strptime(i[0],"%M:%S.%f") for i in list(freq_change.keys())]
end_time = [dt.strptime(i[1],"%M:%S.%f") for i in list(freq_change.keys())]
deltas = [(end_time[i]-start_time[i]).microseconds for i in range(len(start_time))]
core_time = {"core":pd.Series(np.array(list(freq_change.values()))),"time":pd.Series(np.array(deltas))}
core_time = pd.DataFrame(core_time)
core_time.tail()
def to_milli(example):
return example/1000
core_time['time'] = core_time['time'].apply(to_milli)
print(core_time)
for i in range(4):
if i in info:
for j in range(len(core_time)):
if i==core_time['core'][j]:
info[i] += core_time['time'][j]
else:
info[i] = 0
for j in range(len(core_time)):
for i in range(4):
if i==core_time['core'][j]:
info[i] += core_time['time'][j]
print(info)
display_data(pid,tid,refreshRate)
def display_data(pid,tid,refreshRate):
global info
x = np.arange(len(info.keys()))
plt.bar(x, np.array(list(info.values())), color = 'blue')
plt.xlabel("Core IDs",fontsize=10)
plt.ylabel("Count in milliseconds",fontsize=10)
plt.title("Process ID: "+pid+"\nThread ID: "+tid+"\nRefresh Rate: "+refreshRate+" seconds")
plt.draw()
plt.pause(0.1)
def start(pid, tid, refreshRate):
global check, extract_thread, display_thread
check = 1
extract_thread = threading.Thread(target=extract_data, args = (pid,tid,refreshRate))
display_thread = threading.Thread(target=process_data, args = (pid,tid,refreshRate))
extract_thread.start()
display_thread.start()
def stop():
global check, extract_thread, display_thread
check = 0
plt.close()
extract_thread.join()
display_thread.join()
|
from .base import *
# 生成环境配置
DEBUG = False
SECRET_KEY = 'd+h0io02_a8=g#v9&4adwkbbtkn!usqd3fhn9xf(g7a-ln32al'
SITE_HOST = 'shoor.xyz'
FRONT_HOST = SITE_HOST
ALLOWED_HOSTS = []
# Github登录
GITHUB_APP_KEY = 'xxxx'
GITHUB_APP_SECRET = 'xxxxxx'
|
import socket #needed for client/server communications
import requests #needed to query CelesTrak
import threading #needed for multi-threading
from queue import Queue #needed for shared queue
import time #needed for sleep() function
from tqdm import tqdm #Added for loading bar
from datetime import datetime #Needed for checking launch day
from skyfield.api import Loader,load,Topos,EarthSatellite, wgs84
from skyfield.timelib import Time
import matplotlib.pyplot as plt #needed for matplotlib
from mpl_toolkits.basemap import Basemap #needed for projecting globe graphic
import numpy as np
import multiprocessing
import os
print("STARTING PROGRAM")
#Server overhead
HOST = '127.0.0.1'
PORT = 10001
#Skyfield overhead
data = load('de421.bsp') #skyfield intuits that it needs to download this file; see "Ephemeris download links"
ts = load.timescale()
earth = data['earth']
#Database lock
#dbLOCK = threading.Lock()
dbLOCK = multiprocessing.Lock()
#TODO
#implement multi-threaded calls to celestrak
#https://timber.io/blog/multiprocessing-vs-multithreading-in-python-what-you-need-to-know/
#Implement function that returns text of color
#Colors
#{'#bcbd22', '#d62728', '#17becf', '#7f7f7f', '#ff7f0e', '#2ca02c', '#1f77b4', '#e377c2', '#9467bd', '#8c564b'}
def getColor(code):
colorcode = {
'#bcbd22' : "YELLOW", #Sick yellow
'#d62728' : "RED", #Red
'#17becf' : "CYAN", #Teal
'#7f7f7f' : "GREY", #Grey
'#ff7f0e' : "ORANGE", #Orange
'#2ca02c' : "GREEN", #Forest Green
'#1f77b4' : "BLUE", #Blue
'#e377c2' : "PINK", #Pink
'#9467bd' : "PURPLE", #Purple
'#8c564b' : "BROWN" #Brown
}
return colorcode[code]
#Function for calculating the distance between 2 points given their latitudes/longtitudes (haversine)
def haversine(lat1,lon1,lat2,lon2):
lon1,lat1,lon2,lat2 = map(np.radians, [lon1,lat1,lon2,lat2])
dlon = lon2 - lon1
dlat = lat2 - lat1
a = np.sin(dlat/2.0)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2.0)**2
c = 2 * np.arcsin(np.sqrt(a))
km = 6367 * c
return km
def siteSelect(s):
#LATITUDE, LONGITUTE, SITENAME
sites = {
"1" : (34.7373367,-120.5843126, "Vandenberg"), #Vandenberg
"2" : (28.396837,-80.605659, "Cape Canaveral"), #Cape Canaveral
"3" : (19.614492,110.951133, "Wengchang Space Launch Site"),
"4" : (45.965000,63.305000, "Baikonur Cosmodrome"),
"5" : (13.719939,80.230425, "Satish Dhawan Space Centre")
}
#slat,slong,siteName = sites[s]
#return (slat,slong,siteName)
return(sites[s])
def processRequest(myr, mmo, mday, mhour, mmin, msite, morbit, conn):
print("[+] Processing request")
"""
yr = 2022
month = 12
day = 15
hour = 13
minute = 15
slat = 28.396837
slong = -80.605659
siteName = "Cape Canaveral"
LEO = False
MEO = False
"""
#conn.sendall("TEST1\r\nTEST2\r\n\r\n".encode())
tracker= []
#tracker = multiprocessing.Manager().list()
yr = int(myr)
month = int(mmo)
day = int(mday)
hour = int(mhour)
minute = int(mmin)
slat,slong,siteName = siteSelect(msite)
LEO = False
MEO = False
if morbit == "LEO":
LEO = True
if morbit == "MEO":
MEO = True
#Setup the figure
fig = plt.figure(figsize=(10,8))
m = Basemap(projection='cyl', llcrnrlat=(slat-10), urcrnrlat=(slat+10), llcrnrlon=(slong-10), urcrnrlon=(slong+10),)
m.shadedrelief()
#Plot the launch site
plt.scatter(slong,slat)
plt.annotate(siteName, (slong,slat))
for tle in tqdm(dic.values()):
name = tle[0]
L1 = tle[1]
L2 = tle[2]
#Evaluate the debris path
#time = ts.utc(yr, month, day, hour, range(minute,minute+10))
time = ts.utc(yr, month, day, hour, minute, range(0,360,20)) #plot by 20sec increments
################
#SPEED
################
satl = EarthSatellite(L1,L2)
satlloc = satl.at(time)
sub = satlloc.subpoint()
lon = sub.longitude.degrees
lat = sub.latitude.degrees
breaks = np.where(np.abs(lon[1:]-lon[:-1]) > 30) #don't plot wrap-around
lon, lat = lon[:-1], lat[:-1]
lon[breaks] = np.nan
#Scrub ground tracks that do not appear within our mappable window
#Check the first longtitude
if np.isnan(lon[0]) or lon[0] < m.llcrnrlon or lon[0] > m.urcrnrlon:
end = lon[len(lon)-1]
#Check the last longtitude
if np.isnan(end) or end < m.llcrnrlon or end > m.urcrnrlon:
#If both fall outside of our boundary, don't plot it
continue
#Do the same with latitudes
if np.isnan(lat[0]) or lat[0] < m.llcrnrlat or lat[0] > m.urcrnrlat:
end = lat[len(lat)-1]
if np.isnan(end) or end < m.llcrnrlat or end > m.urcrnrlat:
continue
##################
#SPEED
##################
#satl = EarthSatellite(L1,L2)
#satlloc = satl.at(time)
satl_alt = satlloc.distance().km - 6371 #Get satellite altitude by subtracing earth's mean radius (km)
#Scrub satellites that are above destination altitude
if LEO and satl_alt.all() > 2000:
continue
if MEO and satl_alt.all() > 36786:
continue
#Calculate distance between ground plot and launch site using haversine formula
distances = haversine(lat,lon,slat,slong)
np.seterr(all = "ignore")
closest_km = np.nanmin(distances)
if np.isnan(closest_km): #I need to suppress the RunTimeWarning error message at some point
continue
idx_closest_km = np.nanargmin(distances)
#timestamp = str(yr) + "-" + str(month) + "-" + str(day) + " " + str(hour) + ":" + str(minute+idx_closest_km)
mins = minute + (idx_closest_km * 20 // 60)
secs = (idx_closest_km * 20) % 60
timestamp = str(yr) + "-" + str(month) + "-" + str(day) + " " + str(hour) + ":" + str(mins) + ":" + str(secs)
#Matplotlib is not threadsafe
#Have threads push plot arguments to a queue, then plot in unified process
p = plt.plot(lon,lat, label=name)
color = getColor(p[-1].get_color())
tracker.append((name,closest_km,timestamp,color, satl_alt[idx_closest_km]))
#dbLOCK.release()
print("LOCK RELEASED")
sortedTracker = sorted(tracker, key = lambda x: x[1])
dist = 0
idx = 0
msg = ""
msg += "The following near misses will occur:\r\n"
while dist < 200:
name = sortedTracker[idx][0]
closest = str(round(sortedTracker[idx][1], 2))
time = sortedTracker[idx][2]
color = sortedTracker[idx][3]
altitude = str(round(sortedTracker[idx][4], 2))
msg += name + " passes within " + closest + "km of the launchsite at " + time + " with altitude " + altitude + ". Plot color: " + color + "\r\n"
idx += 1
dist = sortedTracker[idx][1]
msg += "\r\n"
conn.sendall(msg.encode())
plt.show()
#implement client/server model
#Identifies the various lists managed by Celestrak
celestrak_lists = ['active','geo','amateur','analyst','argos','beidou','2012-044','cosmos-2251-debris','cubesat','dmc','resource','education','engineering','1999-025','galileo','geodetic','globalstar','glo-ops','gnss','goes','gorizont','gps-ops','2019-006','intelsat','iridium','iridium-33-debris','iridium-NEXT','tle-new','military','molniya','nnss','noaa','oneweb','orbcomm','other','other-comm','planet','radar','raduga','musson','sbas','satnogs','sarsat','ses','science','stations','spire','starlink','tdrss','weather']
#dic = {}
dic = multiprocessing.Manager().dict()
#This is a queue that is populated with elements from celestrak_lists periodically for threads to action
#listQ = Queue()
#listQlock = threading.Lock()
#cond = threading.Condition() #Condition object
def updateDic(celestrak_element):
item = celestrak_element
req = requests.get("http://celestrak.com/NORAD/elements/" + item + ".txt")
if req.status_code == 200:
tle = req.text.splitlines()
#Process the response from celestrak, updating the
for idx,line in enumerate(tqdm(tle)):
if idx%3 == 0:
name = line.strip() #Removes trailing white space
L1 = tle[idx+1]
L2 = tle[idx+2]
catnum = L2.split()[1]
if catnum in dic.keys():
#Indicates an update to the database
if L1 != dic[catnum][1] or L2 != dic[catnum][2]:
dic[catnum] = (name, L1, L2)
#Indicates a new entry
else:
continue
else:
dic[catnum] = (name, L1, L2)
else:
print("[-] " + item + " not found!")
#Populate listQ
#for satlist in celestrak_lists:
# listQ.put(satlist)
print()
print("##############")
print()
def updateDatabase():
dbLOCK.acquire()
print("[+] LOCK ACQUIRED")
for trak in celestrak_lists:
updateDic(trak)
dbLOCK.release()
print("[+] Update Finished, releasing lock")
#Updates the dic data structure every 120 seconds (probably could afford to wait longer...)
def testDB():
while True:
updateDatabase()
time.sleep(120)
#db_thread = threading.Timer(10.0, updateDatabase).start()
#db_thread = threading.Thread(target=testDB).start() #Faster than multiprocessing, but creates errors with matplotlib (which is not threadsafe)
db_thread = multiprocessing.Process(target=testDB).start()
print("STARTING SERVER")
s_time = datetime.now()
while True:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((HOST,PORT))
s.listen()
conn, addr = s.accept()
msg = ""
with conn:
print('Connected by', addr)
while True:
data = conn.recv(1024)
msg += data.decode()
#if data.decode() == "Hello!":
#if "\r\n\r\n" in msg:
#print(msg)
#print("END")
#if not data:
if "\r\n" in data.decode():
print((datetime.now() - s_time).total_seconds())
print(msg)
client_msg = msg.split(" ")
myr = client_msg[0]
mmonth = client_msg[1]
mday = client_msg[2]
mhour = client_msg[3]
mmin = client_msg[4]
msite = client_msg[5]
morbit = client_msg[6]
print(siteSelect(msite))
#Try to process request (in case received mid-update)
dbLOCK.acquire()
processRequest(myr, mmonth, mday, mhour, mmin, msite, morbit, conn)
dbLOCK.release()
break
#conn.sendall(data)
#conn.send(data)
|
from functools import partial
from unittest.mock import patch
import pytest
from pkgdev.scripts import run
from snakeoil.contexts import chdir
from snakeoil.osutils import pjoin
class TestPkgdevManifestParseArgs:
def test_non_repo_cwd(self, capsys, tool):
with pytest.raises(SystemExit) as excinfo:
tool.parse_args(['manifest'])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
assert err.strip() == 'pkgdev manifest: error: not in ebuild repo'
def test_repo_cwd(self, repo, capsys, tool):
repo.create_ebuild('cat/pkg-0')
with chdir(repo.location):
options, _ = tool.parse_args(['manifest'])
matches = [x.cpvstr for x in repo.itermatch(options.restriction)]
assert matches == ['cat/pkg-0']
def test_dir_target(self, repo, capsys, tool):
repo.create_ebuild('cat/pkg-0')
with chdir(repo.location):
options, _ = tool.parse_args(['manifest', pjoin(repo.location, 'cat')])
matches = [x.cpvstr for x in repo.itermatch(options.restriction)]
assert matches == ['cat/pkg-0']
def test_ebuild_target(self, repo, capsys, tool):
path = repo.create_ebuild('cat/pkg-0')
with chdir(repo.location):
options, _ = tool.parse_args(['manifest', path])
matches = [x.cpvstr for x in repo.itermatch(options.restriction)]
assert matches == ['cat/pkg-0']
def test_atom_target(self, repo, capsys, tool):
repo.create_ebuild('cat/pkg-0')
with chdir(repo.location):
options, _ = tool.parse_args(['manifest', 'cat/pkg'])
matches = [x.cpvstr for x in repo.itermatch(options.restriction)]
assert matches == ['cat/pkg-0']
def test_non_repo_dir_target(self, tmp_path, repo, capsys, tool):
with pytest.raises(SystemExit) as excinfo, \
chdir(repo.location):
tool.parse_args(['manifest', str(tmp_path)])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
assert err.startswith("pkgdev manifest: error: 'fake' repo doesn't contain:")
def test_invalid_atom_target(self, repo, capsys, tool):
with pytest.raises(SystemExit) as excinfo, \
chdir(repo.location):
tool.parse_args(['manifest', '=cat/pkg'])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
assert err.startswith("pkgdev manifest: error: invalid atom: '=cat/pkg'")
class TestPkgdevManifest:
script = partial(run, 'pkgdev')
@pytest.fixture(autouse=True)
def _setup(self):
self.args = ['pkgdev', 'manifest']
def test_good_manifest(self, capsys, repo):
repo.create_ebuild('cat/pkg-0')
with patch('sys.argv', self.args), \
pytest.raises(SystemExit) as excinfo, \
chdir(repo.location):
self.script()
assert excinfo.value.code == 0
out, err = capsys.readouterr()
assert out == err == ''
def test_bad_manifest(self, capsys, repo):
repo.create_ebuild('cat/pkg-0')
repo.create_ebuild('cat/pkg-1', eapi='-1')
with patch('sys.argv', self.args), \
pytest.raises(SystemExit) as excinfo, \
chdir(repo.location):
self.script()
assert excinfo.value.code == 1
out, err = capsys.readouterr()
assert not err
assert out == " * cat/pkg-1: invalid EAPI '-1'\n"
|
from django.db import models, transaction
from django.utils.crypto import get_random_string
from ckeditor_uploader.fields import RichTextUploadingField
from django.core.validators import RegexValidator
from datetime import timedelta
from django.utils import timezone
from django import forms as django_forms
from collections import namedtuple, defaultdict
import re
from django.core.exceptions import ValidationError
import select2.fields
from heltour.tournament import signals
import logging
from django.contrib.auth.models import User
from django.contrib.postgres.fields.jsonb import JSONField
from django.contrib.sites.models import Site
from django_comments.models import Comment
from heltour import settings
import reversion
logger = logging.getLogger(__name__)
# Helper function to find an item in a list by its properties
def find(lst, **prop_values):
for k, v in list(prop_values.items()):
lst = [obj for obj in lst if getnestedattr(obj, k) == v]
return next(iter(lst), None)
def getnestedattr(obj, k):
for k2 in k.split('__'):
if obj is None:
return None
obj = getattr(obj, k2)
return obj
def abs_url(url):
site = Site.objects.get_current().domain
return '%s://%s%s' % (settings.LINK_PROTOCOL, site, url)
def add_system_comment(obj, text, user_name='System'):
Comment.objects.create(content_object=obj, site=Site.objects.get_current(), user_name=user_name,
comment=text, submit_date=timezone.now(), is_public=True)
# Represents a positive number in increments of 0.5 (0, 0.5, 1, etc.)
class ScoreField(models.PositiveIntegerField):
def from_db_value(self, value, expression, connection, context):
if value is None:
return None
return value / 2.0
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
return int(value * 2)
def to_python(self, value):
if value is None or value == '':
return None
return float(value)
def formfield(self, **kwargs):
defaults = {'widget': django_forms.TextInput(attrs={'class': 'vIntegerField'}), 'initial': self.default}
defaults.update(kwargs)
return django_forms.FloatField(**defaults)
#-------------------------------------------------------------------------------
class _BaseModel(models.Model):
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
THEME_OPTIONS = (
('blue', 'Blue'),
('green', 'Green'),
('red', 'Red'),
('yellow', 'Yellow'),
)
RATING_TYPE_OPTIONS = (
('classical', 'Classical'),
('rapid', 'Rapid'),
('chess960', 'Chess 960'),
('blitz', 'Blitz'),
)
COMPETITOR_TYPE_OPTIONS = (
('team', 'Team'),
('individual', 'Individual'),
)
PAIRING_TYPE_OPTIONS = (
('swiss-dutch', 'Swiss Tournament: Dutch Algorithm'),
('swiss-dutch-baku-accel', 'Swiss Tournament: Dutch Algorithm + Baku Acceleration'),
)
#-------------------------------------------------------------------------------
class League(_BaseModel):
name = models.CharField(max_length=255, unique=True)
tag = models.SlugField(unique=True, help_text='The league will be accessible at /{league_tag}/')
description = models.CharField(max_length=1023, blank=True)
theme = models.CharField(max_length=32, choices=THEME_OPTIONS)
display_order = models.PositiveIntegerField(default=0)
time_control = models.CharField(max_length=32, blank=True)
rating_type = models.CharField(max_length=32, choices=RATING_TYPE_OPTIONS)
competitor_type = models.CharField(max_length=32, choices=COMPETITOR_TYPE_OPTIONS)
pairing_type = models.CharField(max_length=32, choices=PAIRING_TYPE_OPTIONS)
is_active = models.BooleanField(default=True)
is_default = models.BooleanField(default=False)
enable_notifications = models.BooleanField(default=False)
class Meta:
permissions = (
('view_dashboard', 'Can view dashboard'),
)
def time_control_initial(self):
parts = self.time_control.split('+')
if len(parts) != 2:
return None
return int(parts[0]) * 60
def time_control_increment(self):
parts = self.time_control.split('+')
if len(parts) != 2:
return None
return int(parts[1])
def time_control_total(self):
initial = self.time_control_initial()
increment = self.time_control_increment() or 0
if not initial:
return None
expected_moves = 60
return initial + increment * expected_moves
def get_leaguesetting(self):
try:
return self.leaguesetting
except LeagueSetting.DoesNotExist:
return LeagueSetting.objects.create(league=self)
def __str__(self):
return self.name
class LeagueSetting(_BaseModel):
league = models.OneToOneField(League)
contact_period = models.DurationField(default=timedelta(hours=48))
notify_for_comments = models.BooleanField(default=True)
notify_for_latereg_and_withdraw = models.BooleanField(default=True)
notify_for_forfeits = models.BooleanField(default=True)
notify_for_registrations = models.BooleanField(default=True)
notify_for_pre_season_registrations = models.BooleanField(default=False)
close_registration_at_last_round = models.BooleanField(default=True)
warning_for_late_response = models.BooleanField(default=True)
carry_over_red_cards_as_yellow = models.BooleanField(default=True)
limit_game_nominations_to_participants = models.BooleanField(default=True)
max_game_nominations_per_user = models.PositiveIntegerField(default=3)
def __str__(self):
return '%s Settings' % self.league
PLAYOFF_OPTIONS = (
(0, 'None'),
(1, 'Finals'),
(2, 'Semi-Finals'),
(3, 'Quarter-Finals'),
)
#-------------------------------------------------------------------------------
class Season(_BaseModel):
league = models.ForeignKey(League)
name = models.CharField(max_length=255)
tag = models.SlugField(help_text='The season will be accessible at /{league_tag}/season/{season_tag}/')
start_date = models.DateTimeField(blank=True, null=True)
rounds = models.PositiveIntegerField()
round_duration = models.DurationField(default=timedelta(days=7))
boards = models.PositiveIntegerField(blank=True, null=True)
playoffs = models.PositiveIntegerField(default=0, choices=PLAYOFF_OPTIONS)
is_active = models.BooleanField(default=False)
is_completed = models.BooleanField(default=False)
registration_open = models.BooleanField(default=False)
nominations_open = models.BooleanField(default=False)
class Meta:
unique_together = (('league', 'name'), ('league', 'tag'))
permissions = (
('manage_players', 'Can manage players'),
('review_nominated_games', 'Can review nominated games'),
)
ordering = ['league__name', '-name']
def __init__(self, *args, **kwargs):
super(Season, self).__init__(*args, **kwargs)
self.initial_rounds = self.rounds
self.initial_round_duration = self.round_duration
self.initial_start_date = self.start_date
self.initial_is_completed = self.is_completed
def last_season_alternates(self):
last_season = Season.objects.filter(league=self.league, start_date__lt=self.start_date) \
.order_by('-start_date').first()
last_season_alts = Alternate.objects.filter(season_player__season=last_season) \
.select_related('season_player__player').nocache()
return {alt.season_player.player for alt in last_season_alts}
def export_players(self):
last_season_alts = self.last_season_alternates()
def extract(sp):
info = {
'name': sp.player.lichess_username,
'rating': sp.player.rating_for(self.league),
'has_20_games': not sp.player.provisional_for(self.league),
'in_slack': bool(sp.player.slack_user_id),
'account_status': sp.player.account_status,
'date_created': None,
'friends': None,
'avoid': None,
'prefers_alt': False,
'previous_season_alternate': sp.player in last_season_alts
}
reg = sp.registration
if reg is not None:
info.update({
'date_created': reg.date_created.isoformat(),
'peak_classical_rating': reg.peak_classical_rating,
'friends': reg.friends,
'avoid': reg.avoid,
'prefers_alt': reg.alternate_preference == 'alternate',
})
return info
season_players = (self.seasonplayer_set
.filter(is_active=True)
.select_related('player', 'registration')
.nocache())
return [extract(sp) for sp in season_players]
def clean(self):
if self.league_id and self.league.competitor_type == 'team' and self.boards is None:
raise ValidationError('Boards must be specified for a team season')
def save(self, *args, **kwargs):
# TODO: Add validation to prevent changes after a certain point
new_obj = self.pk is None
rounds_changed = self.pk is None or self.rounds != self.initial_rounds
round_duration_changed = self.pk is None or self.round_duration != self.initial_round_duration
start_date_changed = self.pk is None or self.start_date != self.initial_start_date
is_completed_changed = self.pk is None or self.is_completed != self.initial_is_completed
if self.is_completed and self.registration_open:
self.registration_open = False
super(Season, self).save(*args, **kwargs)
if rounds_changed or round_duration_changed or start_date_changed:
date = self.start_date
for round_num in range(1, self.rounds + 1):
next_date = date + self.round_duration if date is not None else None
Round.objects.update_or_create(season=self, number=round_num, defaults={'start_date': date, 'end_date': next_date})
date = next_date
if new_obj:
# Create a default set of prizes. This may need to be modified in the future
SeasonPrize.objects.create(season=self, rank=1)
SeasonPrize.objects.create(season=self, rank=2)
SeasonPrize.objects.create(season=self, rank=3)
if self.league.competitor_type != 'team':
SeasonPrize.objects.create(season=self, max_rating=1600, rank=1)
if is_completed_changed and self.is_completed:
# Remove out of date prizes
SeasonPrizeWinner.objects.filter(season_prize__season=self).delete()
# Award prizes
if self.league.competitor_type == 'team':
team_scores = sorted(TeamScore.objects.filter(team__season=self).select_related('team').nocache(), reverse=True)
for prize in self.seasonprize_set.filter(max_rating=None):
if prize.rank <= len(team_scores):
# Award a prize to each team member
for member in team_scores[prize.rank - 1].team.teammember_set.all():
SeasonPrizeWinner.objects.create(season_prize=prize, player=member.player)
else:
player_scores = sorted(LonePlayerScore.objects.filter(season_player__season=self).select_related('season_player__player').nocache(), key=lambda s: s.final_standings_sort_key(), reverse=True)
for prize in self.seasonprize_set.all():
eligible_players = [s.season_player.player for s in player_scores if prize.max_rating is None or s.season_player.seed_rating < prize.max_rating]
if prize.rank <= len(eligible_players):
SeasonPrizeWinner.objects.create(season_prize=prize, player=eligible_players[prize.rank - 1])
def calculate_scores(self):
if self.league.competitor_type == 'team':
self._calculate_team_scores()
else:
self._calculate_lone_scores()
def _calculate_team_scores(self):
# Note: The scores are calculated in a particular way to allow easy adding of new tiebreaks
score_dict = {}
last_round = None
for round_ in self.round_set.filter(is_completed=True).order_by('number'):
round_pairings = round_.teampairing_set.all()
for team in Team.objects.filter(season=self):
white_pairing = find(round_pairings, white_team_id=team.id)
black_pairing = find(round_pairings, black_team_id=team.id)
is_playoffs = round_.number > self.rounds - self.playoffs
def increment_score(round_opponent, round_points, round_opponent_points, round_wins):
playoff_score, match_count, match_points, game_points, games_won, _, _, _, _ = score_dict[(team.pk, last_round.number)] if last_round is not None else (0, 0, 0, 0, 0, 0, 0, None, 0)
round_match_points = 0
if round_opponent is None:
if not is_playoffs:
# Bye
match_points += 1
game_points += self.boards / 2
else:
if is_playoffs:
if round_points > round_opponent_points:
playoff_score += 2 ** (self.rounds - round_.number)
# TODO: Handle ties/tiebreaks somehow?
else:
match_count += 1
if round_points > round_opponent_points:
round_match_points = 2
elif round_points == round_opponent_points:
round_match_points = 1
match_points += round_match_points
game_points += round_points
games_won += round_wins
score_dict[(team.pk, round_.number)] = _TeamScoreState(playoff_score, match_count, match_points, game_points, games_won, round_match_points, round_points, round_opponent, round_opponent_points)
if white_pairing is not None:
increment_score(white_pairing.black_team_id, white_pairing.white_points, white_pairing.black_points, white_pairing.white_wins)
elif black_pairing is not None:
increment_score(black_pairing.white_team_id, black_pairing.black_points, black_pairing.white_points, black_pairing.black_wins)
else:
increment_score(None, 0, 0, 0)
last_round = round_
# Precalculate groups of tied teams for the tiebreaks
tied_team_map = defaultdict(set)
for team in Team.objects.filter(season=self):
score_state = score_dict[(team.pk, last_round.number)]
tied_team_map[(score_state.match_points, score_state.game_points)].add(team.pk)
team_scores = TeamScore.objects.filter(team__season=self)
for score in team_scores:
if last_round is None:
score.playoff_score = 0
score.match_count = 0
score.match_points = 0
score.game_points = 0
score.head_to_head = 0
score.games_won = 0
score.sb_score = 0
else:
score_state = score_dict[(score.team_id, last_round.number)]
score.playoff_score = score_state.playoff_score
score.match_count = score_state.match_count
score.match_points = score_state.match_points
score.game_points = score_state.game_points
score.games_won = score_state.games_won
# Tiebreak calculations
tied_team_set = tied_team_map[(score_state.match_points, score_state.game_points)]
score.head_to_head = 0
score.sb_score = 0
for round_number in range(1, last_round.number + 1):
round_state = score_dict[(score.team_id, round_number)]
opponent = round_state.round_opponent
if opponent is not None:
if round_state.round_match_points == 2:
score.sb_score += score_dict[(round_state.round_opponent, last_round.number)].match_points
elif round_state.round_match_points == 1:
score.sb_score += score_dict[(round_state.round_opponent, last_round.number)].match_points / 2.0
if opponent in tied_team_set:
score.head_to_head += round_state.round_match_points
score.save()
def _calculate_lone_scores(self):
season_players = SeasonPlayer.objects.filter(season=self).select_related('loneplayerscore').nocache()
seed_rating_dict = {sp.player_id: sp.seed_rating for sp in season_players}
score_dict = {}
last_round = None
for round_ in self.round_set.filter(is_completed=True).order_by('number'):
pairings = round_.loneplayerpairing_set.all().nocache()
byes = PlayerBye.objects.filter(round=round_)
for sp in season_players:
white_pairing = find(pairings, white_id=sp.player_id)
black_pairing = find(pairings, black_id=sp.player_id)
bye = find(byes, player_id=sp.player_id)
def increment_score(round_opponent, round_score, round_played):
total, mm_total, cumul, perf, _, _ = score_dict[(sp.player_id, last_round.number)] if last_round is not None else (0, 0, 0, PerfRatingCalc(), None, False)
total += round_score
cumul += total
if round_played:
mm_total += round_score
opp_rating = seed_rating_dict.get(round_opponent, None)
if opp_rating is not None:
perf.add_game(round_score, opp_rating)
else:
# Special cases for unplayed games
mm_total += 0.5
cumul -= round_score
score_dict[(sp.player_id, round_.number)] = _LoneScoreState(total, mm_total, cumul, perf, round_opponent, round_played)
if white_pairing is not None:
increment_score(white_pairing.black_id, white_pairing.white_score() or 0, white_pairing.game_played())
elif black_pairing is not None:
increment_score(black_pairing.white_id, black_pairing.black_score() or 0, black_pairing.game_played())
elif bye is not None:
increment_score(None, bye.score(), False)
else:
increment_score(None, 0, False)
last_round = round_
player_scores = [sp.get_loneplayerscore() for sp in season_players]
for score in player_scores:
player_id = score.season_player.player_id
if last_round is None:
score.points = 0
score.tiebreak1 = 0
score.tiebreak2 = 0
score.tiebreak3 = 0
score.tiebreak4 = 0
else:
score_state = score_dict[(score.season_player.player_id, last_round.number)]
score.points = score_state.total
# Tiebreak calculations
opponent_scores = []
opponent_cumuls = []
for round_number in range(1, last_round.number + 1):
round_state = score_dict[(player_id, round_number)]
if round_state.round_played and round_state.round_opponent is not None:
opponent_scores.append(score_dict[(round_state.round_opponent, last_round.number)].mm_total)
opponent_cumuls.append(score_dict[(round_state.round_opponent, last_round.number)].cumul)
else:
opponent_scores.append(0)
opponent_scores.sort()
# TB1: Modified Median
median_scores = opponent_scores
skip = 2 if last_round.number >= 9 else 1
if score.points <= last_round.number / 2.0:
median_scores = median_scores[:-skip]
if score.points >= last_round.number / 2.0:
median_scores = median_scores[skip:]
score.tiebreak1 = sum(median_scores)
# TB2: Solkoff
score.tiebreak2 = sum(opponent_scores)
# TB3: Cumulative
score.tiebreak3 = score_state.cumul
# TB4: Cumulative opponent
score.tiebreak4 = sum(opponent_cumuls)
# Performance rating
score.perf_rating = score_state.perf.calculate()
score.save()
def is_started(self):
return self.start_date is not None and self.start_date < timezone.now()
def end_date(self):
last_round = self.round_set.filter(number=self.rounds).first()
if last_round is not None:
return last_round.end_date
return None
def board_number_list(self):
if self.boards is None:
raise Exception('Tried to get board list but season.boards is None')
return [n for n in range(1, self.boards + 1)]
def alternates_manager_enabled(self):
if not hasattr(self.league, 'alternatesmanagersetting'):
return False
return self.league.alternatesmanagersetting.is_active
def alternates_manager_setting(self):
if not hasattr(self.league, 'alternatesmanagersetting'):
return None
return self.league.alternatesmanagersetting
def section_list(self):
if not hasattr(self, 'section'):
return [self]
return Season.objects.filter(section__section_group_id=self.section.section_group_id).order_by('section__order')
def section_group_name(self):
if not hasattr(self, 'section'):
return self.name
return self.section.section_group.name
@classmethod
def get_registration_season(cls, league, season=None):
if season is not None and season.registration_open:
return season
else:
return cls.objects.filter(league=league, registration_open=True).order_by('-start_date').first()
@property
def pairings(self):
return (PlayerPairing.objects.filter(teamplayerpairing__team_pairing__round__season=self)
| PlayerPairing.objects.filter(loneplayerpairing__round__season=self)).nocache()
def __str__(self):
return self.name
_TeamScoreState = namedtuple('_TeamScoreState', 'playoff_score, match_count, match_points, game_points, games_won, round_match_points, round_points, round_opponent, round_opponent_points')
_LoneScoreState = namedtuple('_LoneScoreState', 'total, mm_total, cumul, perf, round_opponent, round_played')
# From https://www.fide.com/component/handbook/?id=174&view=article
# Used for performance rating calculations
fide_dp_lookup = [-800, -677, -589, -538, -501, -470, -444, -422, -401, -383, -366, -351, -336, -322, -309, -296, -284, -273, -262, -251,
- 240, -230, -220, -211, -202, -193, -184, -175, -166, -158, -149, -141, -133, -125, -117, -110, -102, -95, -87, -80, -72,
- 65, -57, -50, -43, -36, -29, -21, -14, -7, 0, 7, 14, 21, 29, 36, 43, 50, 57, 65, 72, 80, 87, 95, 102, 110, 117, 125, 133,
141, 149, 158, 166, 175, 184, 193, 202, 211, 220, 230, 240, 251, 262, 273, 284, 296, 309, 322, 336, 351, 366, 383, 401,
422, 444, 470, 501, 538, 589, 677, 800]
def get_fide_dp(score, total):
# Turn the score into a number from 0-100 (0 = 0%, 100 = 100%)
lookup_index = max(min(int(round(100.0 * score / total)), 100), 0)
# Use that number to get a rating difference from the FIDE lookup table
return fide_dp_lookup[lookup_index]
class PerfRatingCalc():
def __init__(self):
self._score = 0
self._game_count = 0
self._opponent_ratings = []
def merge(self, other):
self._score += other._score
self._game_count += other._game_count
self._opponent_ratings += other._opponent_ratings
def add_game(self, score, opponent_rating):
self._score += score
self._game_count += 1
self._opponent_ratings.append(opponent_rating)
def calculate(self):
if self._game_count < 5:
return None
average_opp_rating = int(round(sum(self._opponent_ratings) / float(self._game_count)))
dp = get_fide_dp(self._score, self._game_count)
return average_opp_rating + dp
def debug(self):
return '%.1f / %d [%s]' % (self._score, self._game_count, ', '.join((str(r) for r in self._opponent_ratings)))
#-------------------------------------------------------------------------------
class Round(_BaseModel):
season = models.ForeignKey(Season)
number = models.PositiveIntegerField(verbose_name='round number')
start_date = models.DateTimeField(blank=True, null=True)
end_date = models.DateTimeField(blank=True, null=True)
publish_pairings = models.BooleanField(default=False)
is_completed = models.BooleanField(default=False)
class Meta:
permissions = (
('generate_pairings', 'Can generate and review pairings'),
)
def __init__(self, *args, **kwargs):
super(Round, self).__init__(*args, **kwargs)
self.initial_is_completed = self.is_completed
self.initial_publish_pairings = self.publish_pairings
def save(self, *args, **kwargs):
is_completed_changed = self.pk is None and self.is_completed or self.is_completed != self.initial_is_completed
publish_pairings_changed = self.pk is None and self.publish_pairings or self.publish_pairings != self.initial_publish_pairings
super(Round, self).save(*args, **kwargs)
if is_completed_changed:
self.season.calculate_scores()
if publish_pairings_changed and self.publish_pairings and not self.is_completed:
signals.do_pairings_published.send(Round, round_id=self.pk)
@property
def pairings(self):
return (PlayerPairing.objects.filter(teamplayerpairing__team_pairing__round=self)
| PlayerPairing.objects.filter(loneplayerpairing__round=self)).nocache()
def pairing_for(self, player):
pairings = self.pairings
return (pairings.filter(white=player) | pairings.filter(black=player)).first()
def __str__(self):
return "%s - Round %d" % (self.season, self.number)
#-------------------------------------------------------------------------------
class SectionGroup(_BaseModel):
league = models.ForeignKey(League)
name = models.CharField(max_length=255)
def __str__(self):
return self.name
#-------------------------------------------------------------------------------
class Section(_BaseModel):
season = models.OneToOneField(Season)
section_group = models.ForeignKey(SectionGroup)
name = models.CharField(max_length=255, verbose_name='section name')
order = models.PositiveIntegerField()
min_rating = models.PositiveIntegerField(blank=True, null=True)
max_rating = models.PositiveIntegerField(blank=True, null=True)
def clean(self):
if self.season and self.section_group and self.season.league_id != self.section_group.league_id:
raise ValidationError('Season and section group leagues must match')
def is_eligible(self, player):
rating = player.rating_for(self.season.league)
if self.min_rating is not None and (rating is None or rating < self.min_rating):
return False
if self.max_rating is not None and (rating is None or rating >= self.max_rating):
return False
return True
def __str__(self):
return '%s - %s' % (self.name, self.section_group.name)
username_validator = RegexValidator('^[\w-]+$')
ACCOUNT_STATUS_OPTIONS = (
('normal', 'Normal'),
('engine', 'Engine'),
('booster', 'Booster'),
('closed', 'Closed'),
)
#-------------------------------------------------------------------------------
class Player(_BaseModel):
# TODO: we should find out the real restrictions on a lichess username and
# duplicate them here.
# Note: a case-insensitive unique index for lichess_username is added via migration to the DB
user = models.OneToOneField(User, blank=True, null=True)
lichess_username = models.CharField(max_length=255, validators=[username_validator])
rating = models.PositiveIntegerField(blank=True, null=True)
games_played = models.PositiveIntegerField(blank=True, null=True)
email = models.CharField(max_length=255, blank=True)
is_active = models.BooleanField(default=True)
slack_user_id = models.CharField(max_length=255, blank=True)
timezone_offset = models.DurationField(blank=True, null=True)
account_status = models.CharField(default='normal', max_length=31, choices=ACCOUNT_STATUS_OPTIONS)
profile = JSONField(blank=True, null=True)
def player_rating_display(self, league=None):
return self.rating_for(league)
@property
def pairings(self):
return (self.pairings_as_white.all() | self.pairings_as_black.all()).nocache()
class Meta:
ordering = ['lichess_username']
permissions = (
('change_player_details', 'Can change player details'),
('invite_to_slack', 'Can invite to slack'),
('link_slack', 'Can manually link slack accounts'),
)
def __init__(self, *args, **kwargs):
super(Player, self).__init__(*args, **kwargs)
self.initial_account_status = self.account_status
def save(self, *args, **kwargs):
self.lichess_username = self.user.username
account_status_changed = self.pk and self.account_status != self.initial_account_status
super(Player, self).save(*args, **kwargs)
if account_status_changed:
signals.player_account_status_changed.send(Player, instance=self, old_value=self.initial_account_status, new_value=self.account_status)
def update_profile(self, user_meta):
self.profile = user_meta
classical = user_meta['perfs'].get('classical')
if classical is not None:
self.rating = classical['rating']
self.games_played = classical['games']
is_engine = user_meta.get('engine', False)
is_booster = user_meta.get('booster', False)
is_closed = user_meta.get('disabled', False)
self.account_status = 'closed' if is_closed else 'engine' if is_engine else 'booster' if is_booster else 'normal'
self.save()
@classmethod
def get_or_create(cls, user):
player, _ = Player.objects.get_or_create(user=user)
return player
def link_slack_account(self, slack_user_id):
if self.slack_user_id == slack_user_id:
# No change needed
return False
with reversion.create_revision():
reversion.set_comment('Link slack account')
self.slack_user_id = slack_user_id
self.save()
signals.slack_account_linked.send(sender=Player, player=self, slack_user_id=slack_user_id)
return True
def is_available_for(self, round_):
return not PlayerAvailability.objects.filter(round=round_, player=self, is_available=False).exists()
def rating_for(self, league):
if league:
if self.profile is None:
return None
return self.profile['perfs'].get(league.rating_type, {}).get('rating')
return self.rating
def games_played_for(self, league):
if league:
if self.profile is None:
return None
return self.profile['perfs'].get(league.rating_type, {}).get('games')
return self.games_played # classical
def provisional_for(self, league):
if self.profile is None:
return True
perf = self.profile['perfs'].get(league.rating_type)
if perf is None:
return True
return perf.get('prov', False)
@property
def timezone_str(self):
if self.timezone_offset == None:
return '?'
seconds = self.timezone_offset.total_seconds()
sign = '-' if seconds < 0 else '+'
hours = abs(seconds) / 3600
minutes = (abs(seconds) % 3600) / 60
return 'UTC%s%02d:%02d' % (sign, hours, minutes)
def get_season_prizes(self, league):
return SeasonPrize.objects \
.filter(season__league=league, seasonprizewinner__player=self) \
.order_by('rank', '-season')
def __str__(self):
if self.rating is None:
return self.lichess_username
else:
return "%s (%d)" % (self.lichess_username, self.rating)
def __lt__(self, other):
return self.lichess_username.lower() < other.lichess_username.lower()
#-------------------------------------------------------------------------------
class PlayerSetting(_BaseModel):
player = models.OneToOneField(Player)
dark_mode = models.BooleanField(default=False)
#-------------------------------------------------------------------------------
class LeagueModerator(_BaseModel):
league = models.ForeignKey(League)
player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
is_active = models.BooleanField(default=True)
send_contact_emails = models.BooleanField(default=True)
class Meta:
unique_together = ('league', 'player')
def __str__(self):
return "%s - %s" % (self.league, self.player.lichess_username)
ROUND_CHANGE_OPTIONS = (
('register', 'Register'),
('withdraw', 'Withdraw'),
('half-point-bye', 'Half-Point Bye'),
)
#-------------------------------------------------------------------------------
class PlayerLateRegistration(_BaseModel):
round = models.ForeignKey(Round)
player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
retroactive_byes = models.PositiveIntegerField(default=0)
late_join_points = ScoreField(default=0)
class Meta:
unique_together = ('round', 'player')
def perform_registration(self):
with transaction.atomic():
# Set the SeasonPlayer as active
sp, _ = SeasonPlayer.objects.get_or_create(season=self.round.season, player=self.player)
sp.is_active = True
if sp.seed_rating is None:
sp.seed_rating = self.player.rating_for(self.round.season.league)
sp.save()
# Create any retroactive byes (but don't overwrite existing byes/pairings)
rounds = self.round.season.round_set.all()
for i in range(self.retroactive_byes):
round_number = self.round.number - i - 1
round_ = find(rounds, number=round_number)
pairings = round_.loneplayerpairing_set.filter(white=self.player) | round_.loneplayerpairing_set.filter(black=self.player)
byes = round_.playerbye_set.filter(player=self.player)
if pairings.count() == 0 and byes.count() == 0:
PlayerBye.objects.create(round=round_, player=self.player, type='half-point-bye')
# Set the late-join points
score = sp.get_loneplayerscore()
score.late_join_points = max(score.late_join_points, self.late_join_points)
score.save()
def save(self, *args, **kwargs):
super(PlayerLateRegistration, self).save(*args, **kwargs)
if self.round.publish_pairings and not self.round.is_completed:
self.perform_registration()
def clean(self):
if self.round_id and self.round.season.league.competitor_type == 'team':
raise ValidationError('Player late registrations can only be created for lone leagues')
def __str__(self):
return "%s - %s" % (self.round, self.player)
#-------------------------------------------------------------------------------
class PlayerWithdrawal(_BaseModel):
round = models.ForeignKey(Round)
player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
class Meta:
unique_together = ('round', 'player')
def perform_withdrawal(self):
with transaction.atomic():
# Set the SeasonPlayer as inactive
sp, _ = SeasonPlayer.objects.get_or_create(season=self.round.season, player=self.player)
sp.is_active = False
sp.save()
# Delete pairings and give opponents byes
for pairing in self.round.loneplayerpairing_set.filter(white=self.player):
PlayerBye.objects.create(round=self.round, player=pairing.black, type='full-point-pairing-bye')
pairing.delete()
for pairing in self.round.loneplayerpairing_set.filter(black=self.player):
PlayerBye.objects.create(round=self.round, player=pairing.white, type='full-point-pairing-bye')
pairing.delete()
def save(self, *args, **kwargs):
super(PlayerWithdrawal, self).save(*args, **kwargs)
if self.round.publish_pairings and not self.round.is_completed:
self.perform_withdrawal()
def clean(self):
if self.round_id and self.round.season.league.competitor_type == 'team':
raise ValidationError('Player withdrawals can only be created for lone leagues')
def __str__(self):
return "%s - %s" % (self.round, self.player)
BYE_TYPE_OPTIONS = (
('full-point-pairing-bye', 'Full-Point Bye (Pairing)'),
('full-point-bye', 'Full-Point Bye'),
('half-point-bye', 'Half-Point Bye'),
('zero-point-bye', 'Zero-Point Bye'),
)
#-------------------------------------------------------------------------------
class PlayerBye(_BaseModel):
round = models.ForeignKey(Round)
player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
type = models.CharField(max_length=31, choices=BYE_TYPE_OPTIONS)
player_rank = models.PositiveIntegerField(blank=True, null=True)
player_rating = models.PositiveIntegerField(blank=True, null=True)
class Meta:
unique_together = ('round', 'player')
def __init__(self, *args, **kwargs):
super(PlayerBye, self).__init__(*args, **kwargs)
self.initial_round_id = self.round_id
self.initial_player_id = self.player_id
self.initial_type = self.type
def player_rating_display(self, league=None):
if self.player_rating is not None:
return self.player_rating
else:
if league is None:
league = self.round.season.league
return self.player.rating_for(league)
def refresh_rank(self, rank_dict=None):
if rank_dict == None:
rank_dict = lone_player_pairing_rank_dict(self.round.season)
self.player_rank = rank_dict.get(self.player_id, None)
def score(self):
if self.type == 'full-point-bye' or self.type == 'full-point-pairing-bye':
return 1
elif self.type == 'half-point-bye':
return 0.5
else:
return 0
def __str__(self):
return "%s - %s" % (self.player, self.get_type_display())
def save(self, *args, **kwargs):
round_changed = self.pk is None or self.round_id != self.initial_round_id
player_changed = self.pk is None or self.player_id != self.initial_player_id
type_changed = self.pk is None or self.type != self.initial_type
if (round_changed or player_changed) and self.round.publish_pairings:
if not self.round.is_completed:
self.refresh_rank()
else:
self.player_rank = None
if player_changed:
self.player_rating = None
super(PlayerBye, self).save(*args, **kwargs)
if (round_changed or player_changed or type_changed) and self.round.is_completed:
self.round.season.calculate_scores()
def delete(self, *args, **kwargs):
round_ = self.round
super(PlayerBye, self).delete(*args, **kwargs)
if round_.is_completed:
round_.season.calculate_scores()
def clean(self):
if self.round_id and self.round.season.league.competitor_type == 'team':
raise ValidationError('Player byes can only be created for lone leagues')
#-------------------------------------------------------------------------------
class Team(_BaseModel):
season = models.ForeignKey(Season)
number = models.PositiveIntegerField(verbose_name='team number')
name = models.CharField(max_length=255, verbose_name='team name')
slack_channel = models.CharField(max_length=255, blank=True)
is_active = models.BooleanField(default=True)
seed_rating = models.PositiveIntegerField(blank=True, null=True)
class Meta:
unique_together = (('season', 'number'), ('season', 'name'))
def get_teamscore(self):
try:
return self.teamscore
except TeamScore.DoesNotExist:
return TeamScore.objects.create(team=self)
def boards(self):
team_members = self.teammember_set.all()
return [(n, find(team_members, board_number=n)) for n in Season.objects.get(pk=self.season_id).board_number_list()]
def average_rating(self, expected_rating=False):
n = 0
total = 0.0
for _, board in self.boards():
if board is not None:
if expected_rating:
rating = board.expected_rating()
else:
rating = board.player.rating_for(self.season.league)
if rating is not None:
n += 1
total += rating
return total / n if n > 0 else None
def get_mean(self, expected_rating=False):
return self.average_rating(expected_rating)
def captain(self):
return self.teammember_set.filter(is_captain=True).first()
def get_teampairing(self, round_):
return (round_.teampairing_set.filter(white_team=self) | round_.teampairing_set.filter(black_team=self)).first()
def get_opponent(self, round_):
team_pairing = self.get_teampairing(round_)
if team_pairing is None:
return None
if team_pairing.white_team != self:
return team_pairing.white_team
if team_pairing.black_team != self:
return team_pairing.black_team
return None
@property
def pairings(self):
return self.pairings_as_white.all() | self.pairings_as_black.all()
def __str__(self):
return "%s - %s" % (self.season, self.name)
BOARD_NUMBER_OPTIONS = (
(1, '1'),
(2, '2'),
(3, '3'),
(4, '4'),
(5, '5'),
(6, '6'),
(7, '7'),
(8, '8'),
)
#-------------------------------------------------------------------------------
class TeamMember(_BaseModel):
team = models.ForeignKey(Team)
player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
board_number = models.PositiveIntegerField(choices=BOARD_NUMBER_OPTIONS)
is_captain = models.BooleanField(default=False)
is_vice_captain = models.BooleanField(default=False)
player_rating = models.PositiveIntegerField(null=True, blank=True)
class Meta:
unique_together = ('team', 'board_number')
def __init__(self, *args, **kwargs):
super(TeamMember, self).__init__(*args, **kwargs)
self.initial_player_id = self.player_id
def player_rating_display(self, league=None):
if self.player_rating is not None:
return self.player_rating
else:
if league is None:
league = self.team.season.league
return self.player.rating_for(league)
def expected_rating(self):
try:
sp = SeasonPlayer.objects.get(season=self.team.season, player=self.player)
return sp.expected_rating(self.team.season.league)
except SeasonPlayer.DoesNotExist:
return None
def save(self, *args, **kwargs):
player_changed = self.pk is None or self.player_id != self.initial_player_id
if player_changed:
self.player_rating = None
super(TeamMember, self).save(*args, **kwargs)
# A little trick here to add a corresponding entry to the team model's history when using reversion
self.team.save()
def delete(self, *args, **kwargs):
super(TeamMember, self).delete(*args, **kwargs)
self.team.save()
def clean(self):
if self.team_id and self.player_id and not SeasonPlayer.objects.filter(season=self.team.season, player=self.player).exists():
raise ValidationError('Team member must be a player in the season')
def __str__(self):
return "%s%s" % (self.player, ' (C)' if self.is_captain else ' (V)' if self.is_vice_captain else '')
#-------------------------------------------------------------------------------
class TeamScore(_BaseModel):
team = models.OneToOneField(Team)
match_count = models.PositiveIntegerField(default=0)
match_points = models.PositiveIntegerField(default=0)
game_points = ScoreField(default=0)
playoff_score = models.PositiveIntegerField(default=0)
head_to_head = models.PositiveIntegerField(default=0)
games_won = models.PositiveIntegerField(default=0)
sb_score = ScoreField(default=0)
def match_points_display(self):
return str(self.match_points)
def game_points_display(self):
return "%g" % self.game_points
def pairing_sort_key(self):
return (self.playoff_score, self.match_points, self.game_points, self.head_to_head, self.games_won, self.sb_score, self.team.seed_rating)
def round_scores(self):
white_pairings = self.team.pairings_as_white.all()
black_pairings = self.team.pairings_as_black.all()
for round_ in Round.objects.filter(season_id=self.team.season_id).order_by('number'):
if round_ is None or not round_.is_completed:
yield None, None, None
continue
points = None
opp_points = None
white_pairing = find(white_pairings, round_id=round_.id)
black_pairing = find(black_pairings, round_id=round_.id)
if white_pairing is not None:
points = white_pairing.white_points
opp_points = white_pairing.black_points
if black_pairing is not None:
points = black_pairing.black_points
opp_points = black_pairing.white_points
yield points, opp_points, round_.number
def cross_scores(self, sorted_teams=None):
if sorted_teams is None:
sorted_teams = Team.objects.filter(season_id=self.team.season_id).order_by('number')
white_pairings = self.team.pairings_as_white.all()
black_pairings = self.team.pairings_as_black.all()
for other_team in sorted_teams:
white_pairing = find(white_pairings, black_team_id=other_team.pk)
black_pairing = find(black_pairings, white_team_id=other_team.pk)
points = None
opp_points = None
round_num = None
if white_pairing is not None and white_pairing.round.is_completed:
points = white_pairing.white_points
opp_points = white_pairing.black_points
round_num = white_pairing.round.number
if black_pairing is not None and black_pairing.round.is_completed:
points = black_pairing.black_points
opp_points = black_pairing.white_points
round_num = black_pairing.round.number
yield other_team.number, points, opp_points, round_num
def __str__(self):
return "%s" % (self.team)
def __lt__(self, other):
return (self.playoff_score, self.match_points, self.game_points, self.head_to_head, self.games_won, self.sb_score) < \
(other.playoff_score, other.match_points, other.game_points, other.head_to_head, other.games_won, other.sb_score)
#-------------------------------------------------------------------------------
class TeamPairing(_BaseModel):
white_team = models.ForeignKey(Team, related_name="pairings_as_white")
black_team = models.ForeignKey(Team, related_name="pairings_as_black")
round = models.ForeignKey(Round)
pairing_order = models.PositiveIntegerField()
white_points = ScoreField(default=0)
white_wins = models.PositiveIntegerField(default=0)
black_points = ScoreField(default=0)
black_wins = models.PositiveIntegerField(default=0)
class Meta:
unique_together = ('white_team', 'black_team', 'round')
def __init__(self, *args, **kwargs):
super(TeamPairing, self).__init__(*args, **kwargs)
self.initial_white_points = self.white_points
self.initial_black_points = self.black_points
def save(self, *args, **kwargs):
points_changed = self.pk is None or self.white_points != self.initial_white_points or self.black_points != self.initial_black_points
super(TeamPairing, self).save(*args, **kwargs)
if points_changed and self.round.is_completed:
self.round.season.calculate_scores()
def clean(self):
if self.white_team_id and self.black_team_id and self.white_team.season != self.round.season or self.black_team.season != self.round.season:
raise ValidationError('Round and team seasons must match')
def refresh_points(self):
self.white_points = 0
self.black_points = 0
self.white_wins = 0
self.black_wins = 0
for pairing in self.teamplayerpairing_set.all().nocache():
if pairing.board_number % 2 == 1:
self.white_points += pairing.white_score() or 0
self.black_points += pairing.black_score() or 0
if pairing.white_score() == 1:
self.white_wins += 1
if pairing.black_score() == 1:
self.black_wins += 1
else:
self.white_points += pairing.black_score() or 0
self.black_points += pairing.white_score() or 0
if pairing.black_score() == 1:
self.white_wins += 1
if pairing.white_score() == 1:
self.black_wins += 1
def white_points_display(self):
return "%g" % self.white_points
def black_points_display(self):
return "%g" % self.black_points
def season_name(self):
return "%s" % self.round.season.name
def round_number(self):
return "%d" % self.round.number
def white_team_name(self):
return "%s" % self.white_team.name
def black_team_name(self):
return "%s" % self.black_team.name
def __str__(self):
return "%s - %s - %s" % (self.round, self.white_team.name, self.black_team.name)
# Game link structure:
# 1. (Optional) http/s prefix
# 2. (Optional) Subdomain, e.g. "en."
# 3. "lichess.org/"
# 4. The gameid (8 chars)
# 5. (Optional) Extended id for games in progress (4 chars)
# 6. (Optional) Any junk at the end, e.g. "/black", etc.
game_link_regex = re.compile(r'^(https?://)?([a-z]+\.)?lichess\.org/([A-Za-z0-9]{8})([A-Za-z0-9]{4})?([/#\?].*)?$')
game_link_validator = RegexValidator(game_link_regex)
def get_gameid_from_gamelink(gamelink):
if gamelink is None or gamelink == '':
return None
match = game_link_regex.match(gamelink)
if match is None:
return None
return match.group(3)
def get_gamelink_from_gameid(gameid):
return 'https://en.lichess.org/%s' % gameid
def normalize_gamelink(gamelink):
if gamelink == '':
return gamelink, True
gameid = get_gameid_from_gamelink(gamelink)
if gameid is None:
return gamelink, False
return get_gamelink_from_gameid(gameid), True
RESULT_OPTIONS = (
('1-0', '1-0'),
('1/2-1/2', '\u00BD-\u00BD'),
('0-1', '0-1'),
('1X-0F', '1X-0F'),
('1/2Z-1/2Z', '\u00BDZ-\u00BDZ'),
('0F-1X', '0F-1X'),
('0F-0F', '0F-0F'),
)
TV_STATE_OPTIONS = (
('default', 'Default'),
('hide', 'Hide'),
)
#-------------------------------------------------------------------------------
class PlayerPairing(_BaseModel):
white = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username', blank=True, null=True, related_name="pairings_as_white")
black = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username', blank=True, null=True, related_name="pairings_as_black")
white_rating = models.PositiveIntegerField(blank=True, null=True)
black_rating = models.PositiveIntegerField(blank=True, null=True)
result = models.CharField(max_length=16, blank=True, choices=RESULT_OPTIONS)
game_link = models.URLField(max_length=1024, blank=True, validators=[game_link_validator])
scheduled_time = models.DateTimeField(blank=True, null=True)
colors_reversed = models.BooleanField(default=False)
tv_state = models.CharField(max_length=31, default='default', choices=TV_STATE_OPTIONS)
def __init__(self, *args, **kwargs):
super(PlayerPairing, self).__init__(*args, **kwargs)
self.initial_result = '' if self.pk is None else self.result
self.initial_white_id = None if self.pk is None else self.white_id
self.initial_black_id = None if self.pk is None else self.black_id
self.initial_game_link = '' if self.pk is None else self.game_link
self.initial_scheduled_time = None if self.pk is None else self.scheduled_time
def white_rating_display(self, league=None):
if self.white_rating is not None:
return self.white_rating
elif self.white is not None:
if league is None:
round_ = self.get_round()
if round_ is not None:
league = round_.season.league
return self.white.rating_for(league)
else:
return None
def black_rating_display(self, league=None):
if self.black_rating is not None:
return self.black_rating
elif self.black is not None:
if league is None:
round_ = self.get_round()
if round_ is not None:
league = round_.season.league
return self.black.rating_for(league)
else:
return None
def white_display(self):
if not self.white:
return '?'
if self.white_rating:
return '%s (%d)' % (self.white.lichess_username, self.white_rating)
else:
return self.white
def black_display(self):
if not self.black:
return '?'
if self.black_rating:
return '%s (%d)' % (self.black.lichess_username, self.black_rating)
else:
return self.black
def white_score(self):
if self.result == '1-0' or self.result == '1X-0F':
return 1 if not self.colors_reversed else 0
elif self.result == '0-1' or self.result == '0F-1X' or self.result == '0F-0F':
return 0 if not self.colors_reversed else 1
elif self.result == '1/2-1/2' or self.result == '1/2Z-1/2Z':
return 0.5
return None
def black_score(self):
if self.result == '0-1' or self.result == '0F-1X':
return 1 if not self.colors_reversed else 0
elif self.result == '1-0' or self.result == '1X-0F' or self.result == '0F-0F':
return 0 if not self.colors_reversed else 1
elif self.result == '1/2-1/2' or self.result == '1/2Z-1/2Z':
return 0.5
return None
def result_display(self):
if not self.result:
return ''
result = self.result.replace('1/2', '\u00BD')
if self.colors_reversed:
result += '*'
return result
def game_played(self):
return self.result in ('1-0', '1/2-1/2', '0-1')
def game_id(self):
return get_gameid_from_gamelink(self.game_link)
def get_round(self):
if hasattr(self, 'teamplayerpairing'):
return self.teamplayerpairing.team_pairing.round
if hasattr(self, 'loneplayerpairing'):
return self.loneplayerpairing.round
return None
def get_player_presence(self, player):
presence = self.playerpresence_set.filter(player=player).first()
if not presence:
presence = PlayerPresence.objects.create(pairing=self, player=player, round=self.get_round())
return presence
def __str__(self):
return "%s - %s" % (self.white_display(), self.black_display())
def save(self, *args, **kwargs):
result_changed = self.result != self.initial_result
white_changed = self.white_id != self.initial_white_id
black_changed = self.black_id != self.initial_black_id
game_link_changed = self.game_link != self.initial_game_link
scheduled_time_changed = self.scheduled_time != self.initial_scheduled_time
if game_link_changed:
self.game_link, _ = normalize_gamelink(self.game_link)
self.tv_state = 'default'
if white_changed or black_changed or game_link_changed:
self.white_rating = None
self.black_rating = None
super(PlayerPairing, self).save(*args, **kwargs)
if hasattr(self, 'teamplayerpairing') and result_changed:
self.teamplayerpairing.team_pairing.refresh_points()
self.teamplayerpairing.team_pairing.save()
if hasattr(self, 'loneplayerpairing'):
lpp = LonePlayerPairing.objects.nocache().get(pk=self.loneplayerpairing.pk)
if result_changed and lpp.round.is_completed:
lpp.round.season.calculate_scores()
# If the players for a PlayerPairing in the current round are edited, then we can update the player ranks
if (white_changed or black_changed) and lpp.round.publish_pairings and not lpp.round.is_completed:
lpp.refresh_ranks()
lpp.save()
# If the players for a PlayerPairing in a previous round are edited, then the player ranks will be out of
# date but we can't recalculate them
if white_changed and lpp.round.is_completed:
lpp.white_rank = None
lpp.save()
if black_changed and lpp.round.is_completed:
lpp.black_rank = None
lpp.save()
if result_changed and (result_is_forfeit(self.result) or result_is_forfeit(self.initial_result)):
signals.pairing_forfeit_changed.send(sender=self.__class__, instance=self)
# Update scheduled notifications based on the scheduled time
if scheduled_time_changed:
league = self.get_round().season.league
# Calling the save method triggers the logic to recreate notifications
white_setting = PlayerNotificationSetting.get_or_default(player_id=self.white_id, type='before_game_time', league=league)
white_setting.save()
black_setting = PlayerNotificationSetting.get_or_default(player_id=self.black_id, type='before_game_time', league=league)
black_setting.save()
if white_changed and self.initial_white_id:
old_white_setting = PlayerNotificationSetting.get_or_default(player_id=self.initial_white_id, type='before_game_time', league=league)
old_white_setting.save()
if black_changed and self.initial_black_id:
old_black_setting = PlayerNotificationSetting.get_or_default(player_id=self.initial_black_id, type='before_game_time', league=league)
old_black_setting.save()
def delete(self, *args, **kwargs):
team_pairing = None
round_ = None
if hasattr(self, 'teamplayerpairing'):
team_pairing = self.teamplayerpairing.team_pairing
if hasattr(self, 'loneplayerpairing'):
lpp = LonePlayerPairing.objects.nocache().get(pk=self.loneplayerpairing.pk)
if lpp.round.is_completed:
round_ = lpp.round
super(PlayerPairing, self).delete(*args, **kwargs)
if team_pairing is not None:
self.teamplayerpairing.team_pairing.refresh_points()
self.teamplayerpairing.team_pairing.save()
if round_ is not None:
round_.season.calculate_scores()
def result_is_forfeit(result):
return result.endswith(('X', 'F', 'Z'))
#-------------------------------------------------------------------------------
class TeamPlayerPairing(PlayerPairing):
team_pairing = models.ForeignKey(TeamPairing)
board_number = models.PositiveIntegerField(choices=BOARD_NUMBER_OPTIONS)
class Meta:
unique_together = ('team_pairing', 'board_number')
def white_team(self):
return self.team_pairing.white_team if self.board_number % 2 == 1 else self.team_pairing.black_team
def black_team(self):
return self.team_pairing.black_team if self.board_number % 2 == 1 else self.team_pairing.white_team
def white_team_player(self):
return self.white if self.board_number % 2 == 1 else self.black
def black_team_player(self):
return self.black if self.board_number % 2 == 1 else self.white
def white_team_rating(self, league=None):
return self.white_rating_display(league) if self.board_number % 2 == 1 else self.black_rating_display(league)
def black_team_rating(self, league=None):
return self.black_rating_display(league) if self.board_number % 2 == 1 else self.white_rating_display(league)
def white_team_color(self):
return 'white' if self.board_number % 2 == 1 else 'black'
def black_team_color(self):
return 'black' if self.board_number % 2 == 1 else 'white'
def white_team_score(self):
return self.white_score() if self.board_number % 2 == 1 else self.black_score()
def black_team_score(self):
return self.black_score() if self.board_number % 2 == 1 else self.white_score()
def white_team_match_score(self):
return self.team_pairing.white_points if self.board_number % 2 == 1 else self.team_pairing.black_points
def black_team_match_score(self):
return self.team_pairing.black_points if self.board_number % 2 == 1 else self.team_pairing.white_points
def white_team_name(self):
return "%s" % self.white_team().name
def black_team_name(self):
return "%s" % self.black_team().name
def season_name(self):
return "%s" % self.team_pairing.round.season.name
def round_number(self):
return "%d" % self.team_pairing.round.number
#-------------------------------------------------------------------------------
class LonePlayerPairing(PlayerPairing):
round = models.ForeignKey(Round)
pairing_order = models.PositiveIntegerField()
white_rank = models.PositiveIntegerField(blank=True, null=True)
black_rank = models.PositiveIntegerField(blank=True, null=True)
def refresh_ranks(self, rank_dict=None):
if rank_dict == None:
rank_dict = lone_player_pairing_rank_dict(self.round.season)
self.white_rank = rank_dict.get(self.white_id, None)
self.black_rank = rank_dict.get(self.black_id, None)
REGISTRATION_STATUS_OPTIONS = (
('pending', 'Pending'),
('approved', 'Approved'),
('rejected', 'Rejected'),
)
PREVIOUS_SEASON_ALTERNATE_OPTIONS = (
('alternate', 'Yes, I was an alternate at the end of the last season.'),
('alternate_to_full_time', 'Yes, but I was able to find a consistent team (did not simply fill in for a week or two).'),
('full_time', 'No, I was not an alternate for the last season. I played the season.'),
('new', 'No, I was not an alternate for the last season. I am a new member / I took last season off.'),
)
ALTERNATE_PREFERENCE_OPTIONS = (
('alternate', 'Alternate'),
('full_time', 'Full Time'),
)
#-------------------------------------------------------------------------------
class Registration(_BaseModel):
season = models.ForeignKey(Season)
user = models.ForeignKey(User, blank=True, null=True)
status = models.CharField(max_length=255, choices=REGISTRATION_STATUS_OPTIONS)
status_changed_by = models.CharField(blank=True, max_length=255)
status_changed_date = models.DateTimeField(blank=True, null=True)
lichess_username = models.CharField(max_length=255, validators=[username_validator])
slack_username = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255)
classical_rating = models.PositiveIntegerField(verbose_name='rating')
peak_classical_rating = models.PositiveIntegerField(blank=True, null=True, verbose_name='peak rating')
has_played_20_games = models.BooleanField()
already_in_slack_group = models.BooleanField()
previous_season_alternate = models.CharField(blank=True, max_length=255, choices=PREVIOUS_SEASON_ALTERNATE_OPTIONS)
can_commit = models.BooleanField()
friends = models.CharField(blank=True, max_length=1023)
avoid = models.CharField(blank=True, max_length=1023)
agreed_to_rules = models.BooleanField()
alternate_preference = models.CharField(blank=True, max_length=255, choices=ALTERNATE_PREFERENCE_OPTIONS)
section_preference = models.ForeignKey(Section, on_delete=models.SET_NULL, blank=True, null=True)
weeks_unavailable = models.CharField(blank=True, max_length=255)
validation_ok = models.NullBooleanField(blank=True, null=True, default=None)
validation_warning = models.BooleanField(default=False)
def save(self, *args, **kwargs):
self.lichess_username = self.user.username
super(Registration, self).save(*args, **kwargs)
def __str__(self):
return "%s" % (self.lichess_username)
def previous_registrations(self):
return Registration.objects.filter(user=self.user, date_created__lt=self.date_created)
def other_seasons(self):
return SeasonPlayer.objects.filter(player__user=self.user).exclude(season=self.season)
def player(self):
return Player.objects.get(user=self.user)
@classmethod
def can_register(cls, user, season):
if not season or not season.registration_open:
return False
return not cls.was_rejected(user, season)
@classmethod
def was_rejected(cls, user, season):
reg = cls.get_latest_registration(user, season)
return reg and reg.status == 'rejected'
@classmethod
def get_latest_registration(cls, user, season):
return (cls.objects
.filter(user=user, season=season)
.order_by('-date_created')
.first())
@classmethod
def is_registered(cls, user, season):
return cls.objects.filter(user=user, season=season).exists()
#-------------------------------------------------------------------------------
class SeasonPlayer(_BaseModel):
season = models.ForeignKey(Season)
player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
registration = models.ForeignKey(Registration, on_delete=models.SET_NULL, blank=True, null=True)
is_active = models.BooleanField(default=True)
games_missed = models.PositiveIntegerField(default=0)
unresponsive = models.BooleanField(default=False)
seed_rating = models.PositiveIntegerField(blank=True, null=True)
final_rating = models.PositiveIntegerField(blank=True, null=True)
class Meta:
unique_together = ('season', 'player')
def __init__(self, *args, **kwargs):
super(SeasonPlayer, self).__init__(*args, **kwargs)
self.initial_unresponsive = self.unresponsive
self.initial_player_id = self.player_id
def player_rating_display(self, league=None):
if self.final_rating is not None:
return self.final_rating
else:
if league is None:
league = self.season.league
return self.player.rating_for(league)
def save(self, *args, **kwargs):
unresponsive_changed = self.pk is None or self.unresponsive != self.initial_unresponsive
player_changed = self.pk is None or self.player_id != self.initial_player_id
if player_changed:
self.player_rating = None
if unresponsive_changed and self.unresponsive and hasattr(self, 'alternate'):
alt = self.alternate
current_date = timezone.now()
if alt.priority_date_override is None or alt.priority_date_override < current_date:
alt.priority_date_override = current_date
alt.save()
super(SeasonPlayer, self).save(*args, **kwargs)
def expected_rating(self, league=None):
rating = self.player.rating_for(league)
if rating is None:
return None
if self.registration is not None:
peak = max(self.registration.peak_classical_rating or 0, rating)
return (rating + peak) / 2
return rating
def seed_rating_display(self, league=None):
if self.seed_rating is not None:
return self.seed_rating
else:
if league is None:
league = self.season.league
return self.player.rating_for(league)
@property
def card_color(self):
if self.games_missed >= 2:
return 'red'
elif self.games_missed == 1:
return 'yellow'
else:
return None
def get_loneplayerscore(self):
try:
return self.loneplayerscore
except LonePlayerScore.DoesNotExist:
return LonePlayerScore.objects.create(season_player=self)
def __str__(self):
return "%s - %s" % (self.season, self.player)
#-------------------------------------------------------------------------------
class LonePlayerScore(_BaseModel):
season_player = models.OneToOneField(SeasonPlayer)
points = ScoreField(default=0)
late_join_points = ScoreField(default=0)
tiebreak1 = ScoreField(default=0)
tiebreak2 = ScoreField(default=0)
tiebreak3 = ScoreField(default=0)
tiebreak4 = ScoreField(default=0)
acceleration_group = models.PositiveIntegerField(default=0)
perf_rating = models.PositiveIntegerField(blank=True, null=True)
def round_scores(self, rounds, player_number_dict, white_pairings_dict, black_pairings_dict, byes_dict, include_current=False):
white_pairings = white_pairings_dict.get(self.season_player.player, [])
black_pairings = black_pairings_dict.get(self.season_player.player, [])
byes = byes_dict.get(self.season_player.player, [])
cumul_score = 0.0
for round_ in rounds:
if not round_.is_completed and (not include_current or not round_.publish_pairings):
yield (None, None, None, None)
continue
result_type = None
opponent = None
color = None
white_pairing = find(white_pairings, round_id=round_.id)
black_pairing = find(black_pairings, round_id=round_.id)
bye = find(byes, round_id=round_.id)
if white_pairing is not None and white_pairing.black is not None:
opponent = white_pairing.black
score = white_pairing.white_score()
if white_pairing.game_played() or score is None:
# Normal result
color = 'W'
result_type = 'W' if score == 1 else 'D' if score == 0.5 else 'L' if score == 0 else 'F'
else:
# Special result
result_type = 'X' if score == 1 else 'Z' if score == 0.5 else 'F' if score == 0 else ''
elif black_pairing is not None and black_pairing.white is not None:
opponent = black_pairing.white
score = black_pairing.black_score()
if black_pairing.game_played() or score is None:
# Normal result
color = 'B'
result_type = 'W' if score == 1 else 'D' if score == 0.5 else 'L' if score == 0 else 'F'
else:
# Special result
result_type = 'X' if score == 1 else 'Z' if score == 0.5 else 'F' if score == 0 else ''
elif bye is not None:
score = bye.score()
result_type = 'B' if score == 1 else 'H' if score == 0.5 else 'U'
else:
score = 0
result_type = 'U'
if score is not None:
cumul_score += score
yield (result_type, player_number_dict.get(opponent, 0), color, cumul_score)
def pairing_points(self):
return self.points + self.late_join_points
def pairing_points_display(self):
return "%.1f" % (self.points + self.late_join_points)
def final_standings_points_display(self):
return "%.1f" % self.points
def late_join_points_display(self):
return "%.1f" % self.late_join_points
def tiebreak1_display(self):
return "%g" % self.tiebreak1
def tiebreak2_display(self):
return "%g" % self.tiebreak2
def tiebreak3_display(self):
return "%g" % self.tiebreak3
def tiebreak4_display(self):
return "%g" % self.tiebreak4
def pairing_sort_key(self):
return (self.points + self.late_join_points, self.season_player.player_rating_display() or 0)
def intermediate_standings_sort_key(self):
return (self.points + self.late_join_points, self.tiebreak1, self.tiebreak2, self.tiebreak3, self.tiebreak4, self.season_player.player_rating_display() or 0)
def final_standings_sort_key(self):
return (self.points, self.tiebreak1, self.tiebreak2, self.tiebreak3, self.tiebreak4, self.season_player.player_rating_display() or 0)
def __str__(self):
return "%s" % (self.season_player)
def lone_player_pairing_rank_dict(season):
raw_player_scores = LonePlayerScore.objects.filter(season_player__season=season) \
.select_related('season_player__season__league', 'season_player__player').nocache()
player_scores = list(enumerate(sorted(raw_player_scores, key=lambda s: s.pairing_sort_key(), reverse=True), 1))
return {p.season_player.player_id: n for n, p in player_scores}
#-------------------------------------------------------------------------------
class PlayerAvailability(_BaseModel):
round = models.ForeignKey(Round)
player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
is_available = models.BooleanField(default=True)
class Meta:
verbose_name_plural = 'player availabilities'
def __str__(self):
return "%s" % self.player
ALTERNATE_STATUS_OPTIONS = (
('waiting', 'Waiting'),
('contacted', 'Contacted'),
('accepted', 'Accepted'),
('declined', 'Declined'),
('unresponsive', 'Unresponsive'),
)
#-------------------------------------------------------------------------------
class Alternate(_BaseModel):
season_player = models.OneToOneField(SeasonPlayer)
board_number = models.PositiveIntegerField(choices=BOARD_NUMBER_OPTIONS)
priority_date_override = models.DateTimeField(null=True, blank=True)
status = models.CharField(blank=True, default='waiting', max_length=31, choices=ALTERNATE_STATUS_OPTIONS)
last_contact_date = models.DateTimeField(null=True, blank=True)
player_rating = models.PositiveIntegerField(null=True, blank=True)
def __init__(self, *args, **kwargs):
super(Alternate, self).__init__(*args, **kwargs)
self.initial_season_player_id = self.season_player_id
self.initial_status = self.status
def player_rating_display(self, league=None):
if self.player_rating is not None:
return self.player_rating
else:
if league is None:
league = self.season_player.season.league
return self.season_player.player.rating_for(league)
def save(self, *args, **kwargs):
season_player_changed = self.pk is None or self.season_player_id != self.initial_season_player_id
status_changed = self.pk is None or self.status != self.initial_status
if season_player_changed:
self.player_rating = None
if status_changed and self.status == 'unresponsive':
current_date = timezone.now()
if self.priority_date_override is None or self.priority_date_override < current_date:
self.priority_date_override = current_date
super(Alternate, self).save(*args, **kwargs)
def update_board_number(self):
season = self.season_player.season
player = self.season_player.player
buckets = AlternateBucket.objects.filter(season=season)
if len(buckets) == season.boards and player.rating_for(season.league) is not None:
for b in buckets:
if b.contains(player.rating_for(season.league)):
self.board_number = b.board_number
self.save()
def priority_date(self):
return self.priority_date_and_reason()[0]
def priority_date_and_reason(self):
if self.priority_date_override is not None:
return max((self.priority_date_override, 'Was unresponsive'), self._priority_date_without_override())
return self._priority_date_without_override()
def _priority_date_without_override(self):
most_recent_assign = AlternateAssignment.objects.filter(team__season_id=self.season_player.season_id, player_id=self.season_player.player_id) \
.order_by('-round__start_date').first()
if most_recent_assign is not None:
round_date = most_recent_assign.round.end_date
if round_date is not None:
return (round_date, 'Assigned game')
if self.season_player.registration is not None:
return (self.season_player.registration.date_created, 'Registered')
return (self.date_created, 'Made alternate')
def __str__(self):
return "%s" % self.season_player
def __lt__(self, other):
return self.priority_date() < other.priority_date()
#-------------------------------------------------------------------------------
class AlternateAssignment(_BaseModel):
round = models.ForeignKey(Round)
team = models.ForeignKey(Team)
board_number = models.PositiveIntegerField(choices=BOARD_NUMBER_OPTIONS)
player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
replaced_player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username', null=True, blank=True, on_delete=models.SET_NULL, related_name='alternate_replacements')
class Meta:
unique_together = ('round', 'team', 'board_number')
def __init__(self, *args, **kwargs):
super(AlternateAssignment, self).__init__(*args, **kwargs)
self.initial_player_id = self.player_id
self.initial_team_id = self.team_id
self.initial_board_number = self.board_number
def clean(self):
if self.round_id and self.team_id and self.round.season_id != self.team.season_id:
raise ValidationError('Round and team seasons must match')
if self.team_id and self.player_id and not SeasonPlayer.objects.filter(season=self.team.season, player=self.player).exists():
raise ValidationError('Assigned player must be a player in the season')
def save(self, *args, **kwargs):
if self.replaced_player is None:
tm = TeamMember.objects.filter(team=self.team, board_number=self.board_number).first()
if tm is not None:
self.replaced_player = tm.player
super(AlternateAssignment, self).save(*args, **kwargs)
# Find and update any current pairings
white_pairing = self.team.pairings_as_white.filter(round=self.round).first()
if white_pairing is not None:
pairing = white_pairing.teamplayerpairing_set.filter(board_number=self.board_number).nocache().first()
if pairing is not None:
if self.board_number % 2 == 1:
pairing.white = self.player
else:
pairing.black = self.player
pairing.save()
black_pairing = self.team.pairings_as_black.filter(round=self.round).first()
if black_pairing is not None:
pairing = black_pairing.teamplayerpairing_set.filter(board_number=self.board_number).nocache().first()
if pairing is not None:
if self.board_number % 2 == 1:
pairing.black = self.player
else:
pairing.white = self.player
pairing.save()
def __str__(self):
return "%s - %s - Board %d" % (self.round, self.team.name, self.board_number)
#-------------------------------------------------------------------------------
class AlternateBucket(_BaseModel):
season = models.ForeignKey(Season)
board_number = models.PositiveIntegerField(choices=BOARD_NUMBER_OPTIONS)
min_rating = models.PositiveIntegerField(null=True, blank=True)
max_rating = models.PositiveIntegerField(null=True, blank=True)
class Meta:
unique_together = ('season', 'board_number')
def contains(self, rating):
if rating is None:
return self.min_rating is None
return (self.min_rating is None or rating > self.min_rating) and (self.max_rating is None or rating <= self.max_rating)
def __str__(self):
return "Board %d (%s, %s]" % (self.board_number, self.min_rating, self.max_rating)
def create_api_token():
return get_random_string(length=32)
ALTERNATE_SEARCH_STATUS_OPTIONS = (
('started', 'Started'),
('all_contacted', 'All alternates contacted'),
('completed', 'Completed'),
('cancelled', 'Cancelled'),
('failed', 'Failed'),
)
#-------------------------------------------------------------------------------
class AlternateSearch(_BaseModel):
round = models.ForeignKey(Round)
team = models.ForeignKey(Team)
board_number = models.PositiveIntegerField(choices=BOARD_NUMBER_OPTIONS)
is_active = models.BooleanField(default=True)
status = models.CharField(blank=True, max_length=31, choices=ALTERNATE_SEARCH_STATUS_OPTIONS)
last_alternate_contact_date = models.DateTimeField(blank=True, null=True)
class Meta:
unique_together = ('round', 'team', 'board_number')
def clean(self):
if self.round_id and self.team_id and self.round.season_id != self.team.season_id:
raise ValidationError('Round and team seasons must match')
def still_needs_alternate(self):
if self.round.publish_pairings:
team_pairing = self.team.get_teampairing(self.round)
player_pairing = TeamPlayerPairing.objects.filter(team_pairing=team_pairing, board_number=self.board_number, result='', game_link='').nocache().first()
return player_pairing is not None and \
(player_pairing.white_team() == self.team and (not player_pairing.white or not player_pairing.white.is_available_for(self.round)) or \
player_pairing.black_team() == self.team and (not player_pairing.black or not player_pairing.black.is_available_for(self.round)))
else:
player = None
aa = AlternateAssignment.objects.filter(round=self.round, team=self.team, board_number=self.board_number).first()
if aa is not None:
player = aa.player
else:
team_member = TeamMember.objects.filter(team=self.team, board_number=self.board_number).first()
if team_member is not None:
player = team_member.player
return player is not None and not player.is_available_for(self.round)
def __str__(self):
return "%s - %s - Board %d" % (self.round, self.team.name, self.board_number)
#-------------------------------------------------------------------------------
class AlternatesManagerSetting(_BaseModel):
league = models.OneToOneField(League)
is_active = models.BooleanField(default=True)
contact_interval = models.DurationField(default=timedelta(hours=8), help_text='How long before the next alternate will be contacted during the round.')
unresponsive_interval = models.DurationField(default=timedelta(hours=24), help_text='How long after being contacted until an alternate will be marked as unresponsive.')
rating_flex = models.PositiveIntegerField(default=0, help_text='How far out of a board\'s rating range an alternate can be if it helps alternate balance.')
contact_before_round_start = models.BooleanField(default=True, help_text='If we should search for alternates before the pairings are published. Has no effect for round 1.')
contact_offset_before_round_start = models.DurationField(default=timedelta(hours=48), help_text='How long before the round starts we should start searching for alternates. Also ends the previous round searches early.')
contact_interval_before_round_start = models.DurationField(default=timedelta(hours=12), help_text='How long before the next alternate will be contacted, if the round hasn\'t started yet.')
def clean(self):
if self.league_id and self.league.competitor_type != 'team':
raise ValidationError('Alternates manager settings can only be created for team leagues')
def __str__(self):
return "%s" % (self.league)
#-------------------------------------------------------------------------------
class SeasonPrize(_BaseModel):
season = models.ForeignKey(Season)
rank = models.PositiveIntegerField()
max_rating = models.PositiveIntegerField(null=True, blank=True)
class Meta:
unique_together = ('season', 'rank', 'max_rating')
def __str__(self):
if self.max_rating is not None:
return '%s - U%d #%d' % (self.season, self.max_rating, self.rank)
else:
return '%s - #%d' % (self.season, self.rank)
#-------------------------------------------------------------------------------
class SeasonPrizeWinner(_BaseModel):
season_prize = models.ForeignKey(SeasonPrize)
player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
class Meta:
unique_together = ('season_prize', 'player')
def __str__(self):
return '%s - %s' % (self.season_prize, self.player)
#-------------------------------------------------------------------------------
class GameNomination(_BaseModel):
season = models.ForeignKey(Season)
nominating_player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
game_link = models.URLField(validators=[game_link_validator])
pairing = models.ForeignKey(PlayerPairing, blank=True, null=True, on_delete=models.SET_NULL)
def __str__(self):
return '%s - %s' % (self.season, self.nominating_player)
#-------------------------------------------------------------------------------
class GameSelection(_BaseModel):
season = models.ForeignKey(Season)
game_link = models.URLField(validators=[game_link_validator])
pairing = models.ForeignKey(PlayerPairing, blank=True, null=True, on_delete=models.SET_NULL)
class Meta:
unique_together = ('season', 'game_link')
def __str__(self):
return '%s - %s' % (self.season, self.game_link)
class AvailableTime(_BaseModel):
league = models.ForeignKey(League)
player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
time = models.DateTimeField()
#-------------------------------------------------------------------------------
class NavItem(_BaseModel):
league = models.ForeignKey(League)
parent = models.ForeignKey('self', blank=True, null=True)
order = models.PositiveIntegerField()
text = models.CharField(max_length=255)
path = models.CharField(max_length=1023, blank=True)
league_relative = models.BooleanField(default=False)
season_relative = models.BooleanField(default=False)
append_separator = models.BooleanField(default=False)
def __str__(self):
return '%s - %s' % (self.league, self.text)
#-------------------------------------------------------------------------------
class ApiKey(_BaseModel):
name = models.CharField(max_length=255, unique=True)
secret_token = models.CharField(max_length=255, unique=True, default=create_api_token)
def __str__(self):
return self.name
#-------------------------------------------------------------------------------
class PrivateUrlAuth(_BaseModel):
# Note: Could separate the one-time-URL and timed-auth portions into separate models at some point in the future
authenticated_user = models.CharField(max_length=255, validators=[username_validator])
secret_token = models.CharField(max_length=255, unique=True, default=create_api_token)
expires = models.DateTimeField()
used = models.BooleanField(default=False)
def is_expired(self):
return self.expires < timezone.now()
def __str__(self):
return self.authenticated_user
#-------------------------------------------------------------------------------
class LoginToken(_BaseModel):
lichess_username = models.CharField(max_length=255, blank=True, validators=[username_validator])
username_hint = models.CharField(max_length=255, blank=True)
slack_user_id = models.CharField(max_length=255, blank=True)
secret_token = models.CharField(max_length=255, unique=True, default=create_api_token)
mail_id = models.CharField(max_length=255, blank=True)
source_ip = models.GenericIPAddressField(null=True, blank=True)
expires = models.DateTimeField()
used = models.BooleanField(default=False)
def is_expired(self):
return self.expires < timezone.now()
def __str__(self):
return self.lichess_username or self.slack_user_id
#-------------------------------------------------------------------------------
class Document(_BaseModel):
name = models.CharField(max_length=255)
content = RichTextUploadingField()
allow_editors = models.BooleanField(default=False, verbose_name='Allow designated editors')
owner = select2.fields.ForeignKey(User, ajax=True, search_field='username', limit_choices_to=models.Q(is_staff=True))
def owned_by(self, user):
return self.owner == user
def __str__(self):
return self.name
LEAGUE_DOCUMENT_TYPES = (
('faq', 'FAQ'),
('rules', 'Rules'),
('intro', 'Intro'),
('slack-welcome', 'Slack Welcome'),
)
#-------------------------------------------------------------------------------
class LeagueDocument(_BaseModel):
league = models.ForeignKey(League)
document = models.OneToOneField(Document)
tag = models.SlugField(help_text='The document will be accessible at /{league_tag}/document/{document_tag}/')
type = models.CharField(blank=True, max_length=255, choices=LEAGUE_DOCUMENT_TYPES)
class Meta:
unique_together = ('league', 'tag')
def clean(self):
if SeasonDocument.objects.filter(document_id=self.document_id):
raise ValidationError('Document already belongs to a season')
def __str__(self):
return self.document.name
SEASON_DOCUMENT_TYPES = (
('links', 'Links'),
)
#-------------------------------------------------------------------------------
class SeasonDocument(_BaseModel):
season = models.ForeignKey(Season)
document = models.OneToOneField(Document)
tag = models.SlugField(help_text='The document will be accessible at /{league_tag}/season/{season_tag}/document/{document_tag}/')
type = models.CharField(blank=True, max_length=255, choices=SEASON_DOCUMENT_TYPES)
class Meta:
unique_together = ('season', 'tag')
def clean(self):
if LeagueDocument.objects.filter(document_id=self.document_id):
raise ValidationError('Document already belongs to a league')
def __str__(self):
return self.document.name
LEAGUE_CHANNEL_TYPES = (
('mod', 'Mods'),
('captains', 'Captains'),
('scheduling', 'Scheduling'),
)
#-------------------------------------------------------------------------------
class LeagueChannel(_BaseModel):
# TODO: Rename to LeagueChannel
league = models.ForeignKey(League)
type = models.CharField(max_length=255, choices=LEAGUE_CHANNEL_TYPES)
slack_channel = models.CharField(max_length=255)
slack_channel_id = models.CharField(max_length=255, blank=True)
send_messages = models.BooleanField(default=True)
class Meta:
unique_together = ('league', 'slack_channel', 'type')
def channel_link(self):
if not self.slack_channel_id:
return self.slack_channel
return '<%s%s|%s>' % (self.slack_channel[0], self.slack_channel_id, self.slack_channel[1:])
def __str__(self):
return '%s - %s' % (self.league, self.get_type_display())
SCHEDULED_EVENT_TYPES = (
('notify_mods_unscheduled', 'Notify mods of unscheduled games'),
('notify_mods_no_result', 'Notify mods of games without results'),
('notify_mods_pending_regs', 'Notify mods of pending registrations'),
('start_round_transition', 'Start round transition'),
('notify_players_unscheduled', 'Notify players of unscheduled games'),
('notify_players_game_time', 'Notify players of their game time'),
('automod_unresponsive', 'Auto-mod unresponsive players'),
('automod_noshow', 'Auto-mod no-shows'),
)
SCHEDULED_EVENT_RELATIVE_TO = (
('round_start', 'Round start'),
('round_end', 'Round end'),
('game_scheduled_time', 'Game scheduled time'),
)
#-------------------------------------------------------------------------------
class ScheduledEvent(_BaseModel):
league = models.ForeignKey(League, blank=True, null=True)
season = models.ForeignKey(Season, blank=True, null=True)
type = models.CharField(max_length=255, choices=SCHEDULED_EVENT_TYPES)
offset = models.DurationField()
relative_to = models.CharField(max_length=255, choices=SCHEDULED_EVENT_RELATIVE_TO)
last_run = models.DateTimeField(blank=True, null=True)
def __str__(self):
return '%s' % (self.get_type_display())
def run(self, obj):
self.last_run = timezone.now()
self.save()
if self.type == 'notify_mods_unscheduled' and isinstance(obj, Round):
signals.notify_mods_unscheduled.send(sender=self.__class__, round_=obj)
elif self.type == 'notify_mods_no_result' and isinstance(obj, Round):
signals.notify_mods_no_result.send(sender=self.__class__, round_=obj)
elif self.type == 'notify_mods_pending_regs' and isinstance(obj, Round):
signals.notify_mods_pending_regs.send(sender=self.__class__, round_=obj)
elif self.type == 'start_round_transition' and isinstance(obj, Round):
signals.do_round_transition.send(sender=self.__class__, round_id=obj.pk)
elif self.type == 'notify_players_unscheduled' and isinstance(obj, Round):
signals.notify_players_unscheduled.send(sender=self.__class__, round_=obj)
elif self.type == 'notify_players_game_time' and isinstance(obj, PlayerPairing):
signals.notify_players_game_time.send(sender=self.__class__, pairing=obj)
elif self.type == 'automod_unresponsive' and isinstance(obj, Round):
signals.automod_unresponsive.send(sender=self.__class__, round_=obj)
elif self.type == 'automod_noshow' and isinstance(obj, PlayerPairing):
signals.automod_noshow.send(sender=self.__class__, pairing=obj)
def clean(self):
if self.league_id and self.season_id and self.season.league != self.league:
raise ValidationError('League and season must be compatible')
PLAYER_NOTIFICATION_TYPES = (
('round_started', 'Round started'),
('before_game_time', 'Before game time'),
('game_time', 'Game time'),
('unscheduled_game', 'Unscheduled game'),
('game_warning', 'Game warning'),
('alternate_needed', 'Alternate needed'),
)
#-------------------------------------------------------------------------------
class PlayerNotificationSetting(_BaseModel):
player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
type = models.CharField(max_length=255, choices=PLAYER_NOTIFICATION_TYPES)
league = models.ForeignKey(League)
offset = models.DurationField(blank=True, null=True)
enable_lichess_mail = models.BooleanField()
enable_slack_im = models.BooleanField()
enable_slack_mpim = models.BooleanField()
class Meta:
unique_together = ('player', 'type', 'league')
def __str__(self):
return '%s - %s' % (self.player, self.get_type_display())
def save(self, *args, **kwargs):
super(PlayerNotificationSetting, self).save(*args, **kwargs)
if self.type == 'before_game_time':
# Rebuild scheduled notifications based on offset
self.schedulednotification_set.all().delete()
upcoming_pairings = self.player.pairings.filter(scheduled_time__gt=timezone.now())
upcoming_pairings = upcoming_pairings.filter(teamplayerpairing__team_pairing__round__season__league=self.league) | \
upcoming_pairings.filter(loneplayerpairing__round__season__league=self.league)
for p in upcoming_pairings:
notification_time = p.scheduled_time - self.offset
ScheduledNotification.objects.create(setting=self, pairing=p, notification_time=notification_time)
@classmethod
def get_or_default(cls, **kwargs):
obj = PlayerNotificationSetting.objects.filter(**kwargs).first()
if obj is not None:
return obj
# Return (but don't create) the default setting based on the type
obj = PlayerNotificationSetting(**kwargs)
type_ = kwargs.get('type')
if type_ == 'before_game_time' and obj.offset is not None:
del kwargs['offset']
has_other_offset = PlayerNotificationSetting.objects.filter(**kwargs).exists()
if has_other_offset or obj.offset != timedelta(minutes=60):
# Non-default offset, so leave everything disabled
return obj
obj.enable_lichess_mail = type_ in ('round_started', 'game_warning', 'alternate_needed')
obj.enable_slack_im = type_ in ('round_started', 'before_game_time', 'game_time', 'unscheduled_game', 'alternate_needed')
obj.enable_slack_mpim = type_ in ('round_started', 'before_game_time', 'game_time', 'unscheduled_game')
if type_ == 'before_game_time':
obj.offset = timedelta(minutes=60)
return obj
def clean(self):
if self.type in ('before_game_time',):
if self.offset is None:
raise ValidationError('Offset is required for this type')
else:
if self.offset is not None:
raise ValidationError('Offset is not applicable for this type')
#-------------------------------------------------------------------------------
class PlayerPresence(_BaseModel):
player = models.ForeignKey(Player)
pairing = models.ForeignKey(PlayerPairing)
round = models.ForeignKey(Round)
first_msg_time = models.DateTimeField(null=True, blank=True)
last_msg_time = models.DateTimeField(null=True, blank=True)
online_for_game = models.BooleanField(default=False)
def __str__(self):
return '%s' % (self.player)
PLAYER_WARNING_TYPE_OPTIONS = (
('unresponsive', 'unresponsive'),
('card_unresponsive', 'card for unresponsive'),
('card_noshow', 'card for no-show'),
)
#-------------------------------------------------------------------------------
class PlayerWarning(_BaseModel):
round = models.ForeignKey(Round, null=True, blank=True)
player = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
type = models.CharField(max_length=255, choices=PLAYER_WARNING_TYPE_OPTIONS)
class Meta:
unique_together = ('round', 'player', 'type')
def __str__(self):
return '%s - %s' % (self.player.lichess_username, self.get_type_display())
#-------------------------------------------------------------------------------
class ScheduledNotification(_BaseModel):
setting = models.ForeignKey(PlayerNotificationSetting)
pairing = models.ForeignKey(PlayerPairing)
notification_time = models.DateTimeField()
def __str__(self):
return '%s' % (self.setting)
def save(self, *args, **kwargs):
if self.notification_time < timezone.now():
if self.pk:
self.delete()
else:
super(ScheduledNotification, self).save(*args, **kwargs)
def run(self):
try:
if self.setting.type == 'before_game_time':
pairing = PlayerPairing.objects.nocache().get(pk=self.pairing_id)
if pairing.scheduled_time is not None:
signals.before_game_time.send(sender=self.__class__, player=self.setting.player, pairing=pairing, offset=self.setting.offset)
except Exception:
logger.exception('Error running scheduled notification')
self.delete()
def clean(self):
if self.setting.offset is None:
raise ValidationError('Setting must have an offset')
#-------------------------------------------------------------------------------
class FcmSub(_BaseModel):
slack_user_id = models.CharField(max_length=31)
reg_id = models.CharField(max_length=4096, unique=True)
MOD_REQUEST_STATUS_OPTIONS = (
('pending', 'Pending'),
('approved', 'Approved'),
('rejected', 'Rejected'),
)
MOD_REQUEST_TYPE_OPTIONS = (
('withdraw', 'Withdraw'),
('reregister', 'Re-register'),
('appeal_late_response', 'Appeal late response'),
('appeal_noshow', 'Appeal no-show'),
('appeal_draw_scheduling', 'Appeal scheduling draw'),
('claim_win_noshow', 'Claim a forfeit win (no-show)'),
('claim_win_effort', 'Claim a forfeit win (insufficient effort)'),
('claim_draw_scheduling', 'Claim a scheduling draw'),
('claim_loss', 'Claim a forfeit loss'),
('request_continuation', 'Request continuation'),
)
# A plain string literal won't work as a Django signal sender since it will have a unique object reference
# By using a common dict we can make sure we're working with the same object (using `intern` would also work)
# This also has the advantage that typos will create a KeyError instead of silently failing
MOD_REQUEST_SENDER = { a: a for a, _ in MOD_REQUEST_TYPE_OPTIONS }
#-------------------------------------------------------------------------------
class ModRequest(_BaseModel):
season = models.ForeignKey(Season)
round = models.ForeignKey(Round, null=True, blank=True)
pairing = models.ForeignKey(PlayerPairing, null=True, blank=True)
requester = select2.fields.ForeignKey(Player, ajax=True, search_field='lichess_username')
type = models.CharField(max_length=255, choices=MOD_REQUEST_TYPE_OPTIONS)
status = models.CharField(max_length=31, choices=MOD_REQUEST_STATUS_OPTIONS)
status_changed_by = models.CharField(blank=True, max_length=255)
status_changed_date = models.DateTimeField(blank=True, null=True)
notes = models.TextField(blank=True)
# TODO: Multiple screenshot support?
screenshot = models.ImageField(upload_to='screenshots/%Y/%m/%d/', null=True, blank=True)
response = models.TextField(blank=True)
def approve(self, user='System', response=''):
self.status = 'approved'
self.status_changed_by = user
self.status_changed_date = timezone.now()
self.response = response
self.save()
signals.mod_request_approved.send(sender=MOD_REQUEST_SENDER[self.type], instance=self)
def reject(self, user='System', response=''):
self.status = 'rejected'
self.status_changed_by = user
self.status_changed_date = timezone.now()
self.response = response
self.save()
signals.mod_request_rejected.send(sender=MOD_REQUEST_SENDER[self.type], instance=self, response=response)
def clean(self):
pass
# TODO: This validation isn't working because type is not populated in the form.
# if not self.screenshot and self.type in ('appeal_late_response', 'claim_win_noshow', 'claim_win_effort', 'claim_draw_scheduling'):
# raise ValidationError('Screenshot is required')
def __str__(self):
return '%s - %s' % (self.requester.lichess_username, self.get_type_display())
|
class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
if len(prices) <= 1:
return 0
rest = [0] * len(prices)
hold = [0] * len(prices)
hold[0] = -prices[0]
sold = [0] * len(prices)
for i in range(1, len(prices)):
rest[i] = max(rest[i - 1], sold[i - 1])
hold[i] = max(hold[i - 1], rest[i - 1] - prices[i])
sold[i] = hold[i - 1] + prices[i]
return max(rest[-1], sold[-1])
# With space compression
class Solution2(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
if len(prices) <= 1:
return 0
rest = 0
hold = -prices[0]
sold = 0
for i in range(1, len(prices)):
old_sold = sold
sold = hold + prices[i]
hold = max(hold, rest - prices[i])
rest = max(rest, old_sold)
return max(rest, sold)
|
from django_task.task_command import TaskCommand
class Command(TaskCommand):
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument('num_beans', type=int)
def handle(self, *args, **options):
from tasks.models import CountBeansTask
self.run_task(CountBeansTask, **options)
|
# Copyright © 2022 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests to assure the business-addresses end-point.
Test-Suite to ensure that naics endpoints are working as expected.
"""
from http import HTTPStatus
from legal_api.services.authz import BASIC_USER
from tests.unit.services.utils import create_header
def test_search_naics_using_search_term_with_results(session, client, jwt):
"""Assert that search results are returned when searching with search term.
Note: we are able to hard code a search term and verify specific values because the test data will always be
the same. This test search term provides a quick way of testing that a lot of paths of the search logic is
working correctly. A bit overloaded but a quick way to uncover any issues with the NAICS search.
"""
# test
rv = client.get(f'/api/v2/naics?search_term=roast',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.OK
assert 'results' in rv.json
results = rv.json['results']
assert len(results) == 7
# verify elements are filtered correctly
results_with_more_than_one_element = [result for result in results if len(result['naicsElements']) > 0]
assert len(results_with_more_than_one_element) == 7
results_with_3_elements = [result for result in results if len(result['naicsElements']) == 3]
assert len(results_with_3_elements) == 2
def test_exact_match_search_naics(session, client, jwt):
"""Assert that search results are returned when searching with exact search term."""
# test
rv = client.get(f'/api/v2/naics?search_term=chocolate%20confectionery%20manufacturing',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.OK
assert 'results' in rv.json
results = rv.json['results']
assert len(results) == 1
assert len(results[0]['naicsElements']) == 10
def test_non_exact_match_search_naics(session, client, jwt):
"""Assert that search results are returned when searching with non-exact search term."""
# test
rv = client.get(f'/api/v2/naics?search_term=confectionery%20chocolate',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.OK
assert 'results' in rv.json
results = rv.json['results']
assert len(results) == 3
# verify elements are filtered correctly
results_with_7_elements = [result for result in results if len(result['naicsElements']) == 7]
assert len(results_with_7_elements) == 1
results_with_2_elements = [result for result in results if len(result['naicsElements']) == 2]
assert len(results_with_2_elements) == 1
results_with_4_elements = [result for result in results if len(result['naicsElements']) == 4]
assert len(results_with_4_elements) == 1
def test_search_naics_using_code_with_result(session, client, jwt):
"""Assert that search result is returned when searching with valid naics code."""
# test
rv = client.get(f'/api/v2/naics?search_term=311911',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.OK
assert 'results' in rv.json
results = rv.json['results']
assert len(results) == 1
assert len(results[0]["naicsElements"]) == 3
def test_search_naics_using_code_with_no_result(session, client, jwt):
"""Assert that no search result is returned when searching with non-existent naics code."""
# test
rv = client.get(f'/api/v2/naics?search_term=999999',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.OK
assert 'results' in rv.json
results = rv.json['results']
assert len(results) == 0
def test_search_naics_no_results(session, client, jwt):
"""Assert that 200 is returned with no results."""
# test
rv = client.get(f'/api/v2/naics?search_term=jaklsjdf',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.OK
assert 'results' in rv.json
results = rv.json['results']
assert len(results) == 0
def test_search_naics_with_no_search_term_param(session, client, jwt):
"""Assert that hitting naics endpoint with no search_term query param returns 400 and correct error message."""
# test
rv = client.get(f'/api/v2/naics',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.BAD_REQUEST
assert 'message' in rv.json
assert 'search_term query parameter is required.' in rv.json['message']
def test_search_naics_with_no_value_for_search_term_param(session, client, jwt):
"""Assert that hitting naics endpoint with no value for search_term query param returns 400 and correct error message."""
# test
rv = client.get(f'/api/v2/naics?search_term',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.BAD_REQUEST
assert 'message' in rv.json
assert 'search_term query parameter is required.' in rv.json['message']
def test_search_naics_with_search_term_param_too_short(session, client, jwt):
"""Assert that hitting naics endpoint with search_term query param with value of less than 3 characters
returns 400 and correct error message."""
# test
rv = client.get(f'/api/v2/naics?search_term=ab',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.BAD_REQUEST
assert 'message' in rv.json
assert 'search_term cannot be less than 3 characters.' in rv.json['message']
def test_get_naics_code_by_code(session, client, jwt):
"""Assert that naics code can be retrieved using code."""
# setup
naics_code = '311911'
# test
rv = client.get(f'/api/v2/naics/{naics_code}',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.OK
assert 'code' in rv.json
assert rv.json['code'] == naics_code
assert 'classDefinition' in rv.json
assert 'classTitle' in rv.json
assert 'year' in rv.json
assert 'naicsElements' in rv.json
assert len(rv.json['naicsElements']) == 3
def test_get_naics_code_by_key(session, client, jwt):
"""Assert that naics code can be retrieved using key."""
# setup
naics_code = '311911'
naics_key = 'd2fca3f1-f391-49a7-8b67-00381b569612'
# test
rv = client.get(f'/api/v2/naics/{naics_key}',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.OK
assert 'code' in rv.json
assert rv.json['code'] == naics_code
assert 'naicsKey' in rv.json
assert rv.json['naicsKey'] == naics_key
assert 'classDefinition' in rv.json
assert 'classTitle' in rv.json
assert 'year' in rv.json
assert 'naicsElements' in rv.json
assert len(rv.json['naicsElements']) == 3
def test_get_naics_code_invalid_code_or_key_format(session, client, jwt):
"""Assert that retrieving naics code with invalid code format returns 400."""
# setup
naics_code = '311aaa'
# test
rv = client.get(f'/api/v2/naics/{naics_code}',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.BAD_REQUEST
assert 'message' in rv.json
assert rv.json['message'] == 'Invalid NAICS code(6 digits) or naics key(uuid v4) format.'
def test_get_naics_code_not_found(session, client, jwt):
"""Assert that retrieving naics code returns 404 when not found."""
# setup
naics_code = '999999'
# test
rv = client.get(f'/api/v2/naics/{naics_code}',
headers=create_header(jwt, [BASIC_USER], 'user'))
# check
assert rv.status_code == HTTPStatus.NOT_FOUND
assert 'message' in rv.json
assert rv.json['message'] == 'NAICS code not found.'
|
from django.contrib import admin
from . import models
# Register your models here.
from django.contrib.auth.admin import UserAdmin
class UserAdmin(UserAdmin):
pass
admin.site.register(models.User, UserAdmin)
|
from a10sdk.common.A10BaseClass import A10BaseClass
class ManagementAddress(A10BaseClass):
"""Class Description::
Configure lldp management address.
Class management-address supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param ipv6_addr_list: {"minItems": 1, "items": {"type": "ipv6-addr"}, "uniqueItems": true, "array": [{"required": ["ipv6"], "properties": {"interface-ipv6": {"type": "object", "properties": {"ipv6-ve": {"description": "configure lldp management-address interface ve. help-val lldp management-address interface port number", "minimum": 2, "type": "number", "maximum": 4094, "format": "number"}, "ipv6-eth": {"type": "number", "description": "configure lldp management-address interface ethernet. help-val lldp management-address interface port number", "format": "interface"}, "ipv6-mgmt": {"default": 0, "type": "number", "description": "configure lldp management-address interface management", "format": "flag"}}}, "ipv6": {"optional": false, "type": "string", "description": "Configure lldp management-address, subtype is ipv6. help-val lldp management-address ipv6 address", "format": "ipv6-address"}}}], "type": "array", "$ref": "https://axapi.a10networks.com/axapi/v3/lldp/management-address/ipv6-addr/{ipv6}"}
:param ipv4_addr_list: {"minItems": 1, "items": {"type": "ipv4-addr"}, "uniqueItems": true, "array": [{"required": ["ipv4"], "properties": {"ipv4": {"optional": false, "type": "string", "description": "Configure lldp management-address, subtype is ipv4. help-val lldp management-address ipv4 address", "format": "ipv4-address"}, "interface-ipv4": {"type": "object", "properties": {"ipv4-eth": {"type": "number", "description": "configure lldp management-address interface ethernet. help-val lldp management-address interface port number", "format": "interface"}, "ipv4-mgmt": {"default": 0, "type": "number", "description": "configure lldp management-address interface management", "format": "flag"}, "ipv4-ve": {"description": "configure lldp management-address interface ve. help-val lldp management-address interface port number", "minimum": 2, "type": "number", "maximum": 4094, "format": "number"}}}}}], "type": "array", "$ref": "https://axapi.a10networks.com/axapi/v3/lldp/management-address/ipv4-addr/{ipv4}"}
:param dns_list: {"minItems": 1, "items": {"type": "dns"}, "uniqueItems": true, "array": [{"required": ["dns"], "properties": {"interface": {"type": "object", "properties": {"ethernet": {"not-list": ["ve", "management"], "type": "number", "description": "configure lldp management-address interface ethernet. help-val lldp management-address interface port number", "format": "interface"}, "management": {"default": 0, "not-list": ["ethernet", "ve"], "type": "number", "description": "configure lldp management-address interface management", "format": "flag"}, "ve": {"description": "configure lldp management-address interface management. help-val lldp management-address interface port number", "format": "number", "not-list": ["ethernet", "management"], "maximum": 4094, "minimum": 2, "type": "number"}}}, "dns": {"description": "Configure lldp management-address, subtype is dns. help-val lldp management-address dns address", "format": "string", "minLength": 1, "optional": false, "maxLength": 31, "type": "string"}}}], "type": "array", "$ref": "https://axapi.a10networks.com/axapi/v3/lldp/management-address/dns/{dns}"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/lldp/management-address`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "management-address"
self.a10_url="/axapi/v3/lldp/management-address"
self.DeviceProxy = ""
self.ipv6_addr_list = []
self.ipv4_addr_list = []
self.dns_list = []
for keys, value in kwargs.items():
setattr(self,keys, value)
|
GENOMES_DIR='/home/cmb-panasas2/skchoudh/genomes'
OUT_DIR = '/staging/as/skchoudh/rna/Apr_04_2013_MSI1_RiboSeq/fastq'
SRC_DIR = '/home/cmb-panasas2/skchoudh/github_projects/ribo-seq-snakemake/scripts'
RAWDATA_DIR ='/home/cmb-06/as/skchoudh/dna/Apr_04_2013_MSI1_RiboSeq/fastq/'
GENOME_BUILD = 'hg38'
GENOME_FASTA = GENOMES_DIR + '/' + GENOME_BUILD + '/fasta/'+ GENOME_BUILD+ '.fa'
STAR_INDEX = GENOMES_DIR + '/' + GENOME_BUILD + '/star_annotated'
GTF = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.v25.annotation.gtf'
GENE_NAMES = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + GENOME_BUILD+'_gene_names_stripped.tsv'
GENE_LENGTHS = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.v25.coding_lengths.tsv' #+ GENOME_BUILD+'_gene_lengths.tsv'
DESIGN_FILE = RAWDATA_DIR + '/' + 'design.txt'
HTSEQ_STRANDED = 'yes'
FEATURECOUNTS_S = '-s 1'
GENE_BED = GENOMES_DIR + '/' + GENOME_BUILD + '/annotation/' + 'gencode.24.genes.bed' #+ GENOME_BUILD+'_gene_lengths.tsv'
FEATURECOUNTS_T='CDS'
HTSEQ_MODE='union'
|
# pylint: disable=W0212
# It is fine to access protected members for test purposes.
#
# $Filename$
# $Authors$
# Last Changed: $Date$ $Committer$ $Revision-Id$
#
# Copyright (c) 2003-2011, German Aerospace Center (DLR)
# All rights reserved.
#
#
#Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are
#met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of the German Aerospace Center nor the names of
# its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
#LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
#A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
#OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
#SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
#LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
#DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
#THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
#(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
#OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Contains tests of the TSM factory.
"""
import unittest
from datafinder.persistence.adapters.tsm import factory
from datafinder.persistence.adapters.tsm.data.adapter import DataTsmAdapter
from datafinder.persistence.common.configuration import BaseConfiguration
__version__ = "$Revision-Id:$"
class FileSystemTestCase(unittest.TestCase):
""" Tests cases of the TSM factory. """
def testBasicProcedures(self):
tsmFileSystem = factory.FileSystem(BaseConfiguration("tsm://host.de/basePath"))
self.assertTrue(isinstance(tsmFileSystem.createDataStorer("/logical/Identifier"), DataTsmAdapter))
tsmFileSystem.release()
credentials = {"username": "me", "password": "secret"}
tsmFileSystem.updateCredentials(credentials)
self.assertEquals(tsmFileSystem._configuration.username, "me")
self.assertEquals(tsmFileSystem._configuration.password, "secret")
|
import time
from threading import Thread
def sleeper(i):
print ("thread %d sleeps for 5 seconds" % i)
time.sleep(5)
print ("thread %d woke up" % i)
for i in range(10):
t = Thread(target=sleeper, args=(i,))
t.start()
|
"""Module to run minimal flask app to be used for tests.
"""
from flask import Flask
APP = Flask(__name__)
@APP.route('/')
def hello_world():
return 'Hello, World!'
@APP.route('/foo')
def foo():
return 'bar'
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Feature Pyramid Networks.
Feature Pyramid Networks were proposed in:
[1] Tsung-Yi Lin, Piotr Dollar, Ross Girshick, Kaiming He, Bharath Hariharan,
, and Serge Belongie
Feature Pyramid Networks for Object Detection. CVPR 2017.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import itertools
import logging
import tensorflow as tf
from tensorflow.python.keras import backend
from . import nn_ops
from ..ops import spatial_transform_ops
from ..utils.efficientdet_utils import get_feat_sizes, activation_fn
from xl_tensorflow.utils import hparams_config
@tf.keras.utils.register_keras_serializable(package='Text')
class WeightedAdd(tf.keras.layers.Layer):
def __init__(self, epsilon=1e-4, activation="relu", **kwargs):
"""
Args:
epsilon:
activation: relu and softmax
**kwargs:
"""
super(WeightedAdd, self).__init__(**kwargs)
self.epsilon = epsilon
self.activation = tf.nn.softmax if activation == "softmax" else tf.nn.relu
def build(self, input_shape):
num_in = len(input_shape)
self.w = self.add_weight(name=self.name,
shape=(num_in,),
initializer=tf.keras.initializers.constant(1 / num_in),
trainable=True,
dtype=tf.float32)
def call(self, inputs, **kwargs):
w = self.activation(self.w)
weights_sum = tf.reduce_sum(self.w)
x = tf.reduce_sum([(w[i] * inputs[i]) / (weights_sum + self.epsilon) for i in range(len(inputs))], axis=0)
return x
def compute_output_shape(self, input_shape):
return input_shape[0]
def get_config(self):
config = super(WeightedAdd, self).get_config()
config.update({
'epsilon': self.epsilon
})
return config
class BiFpn(object):
"""BiFeature pyramid networks.
1、去掉training_bn参数
2、以keras网络层为主,部分tf.nn层
todo 把bifpn放到yolo种
"""
def __init__(self,
min_level=3,
max_level=7,
):
"""FPN initialization function.
Args:
min_level: `int` minimum level in FPN output feature maps.
max_level: `int` maximum level in FPN output feature maps.
"""
self._min_level = min_level
self._max_level = max_level
def get_fpn_config(self, fpn_name, min_level, max_level, weight_method):
"""Get fpn related configuration."""
if not fpn_name:
fpn_name = 'bifpn_fa'
name_to_config = {
'bifpn_sum': self.bifpn_sum_config(),
'bifpn_fa': self.bifpn_fa_config(),
'bifpn_dyn': self.bifpn_dynamic_config(min_level, max_level, weight_method)
}
return name_to_config[fpn_name]
def fuse_features(self, nodes, weight_method):
"""Fuse features from different resolutions and return a weighted sum.
Args:
nodes: a list of tensorflow features at different levels
weight_method: feature fusion method. One of:
- "attn" - Softmax weighted fusion
- "fastattn" - Fast normalzied feature fusion
- "sum" - a sum of inputs
Returns:
A tensor denoting the fused feature.
"""
dtype = nodes[0].dtype
if weight_method == 'attn':
new_node = WeightedAdd(activation="softmax")(nodes)
elif weight_method == 'fastattn':
new_node = WeightedAdd(activation="relu")(nodes)
elif weight_method == 'sum':
new_node = tf.add_n(nodes)
else:
raise ValueError(
'unknown weight_method {}'.format(weight_method))
return new_node
def build_bifpn_layer(self, feats, feat_sizes, params):
"""Builds a feature pyramid given previous feature pyramid and config."""
p = params # use p to denote the network config.
if p.fpn.fpn_config:
fpn_config = p.fpn_config
else:
fpn_config = self.get_fpn_config(p.fpn.fpn_name, p.architecture.min_level, p.architecture.max_level,
p.fpn.fpn_weight_method)
num_output_connections = [0 for _ in feats]
for i, fnode in enumerate(fpn_config.nodes):
with tf.name_scope('fnode{}'.format(i)):
logging.info('fnode %d : %s', i, fnode)
new_node_height = feat_sizes[fnode['feat_level']]['height']
new_node_width = feat_sizes[fnode['feat_level']]['width']
nodes = []
for idx, input_offset in enumerate(fnode['inputs_offsets']):
input_node = feats[input_offset]
num_output_connections[input_offset] += 1
input_node = spatial_transform_ops.resample_feature_map(
input_node, '{}_{}_{}'.format(idx, input_offset, len(feats)),
new_node_height, new_node_width, p.fpn.fpn_feat_dims,
p.fpn.apply_bn_for_resampling, p.is_training_bn,
p.fpn.conv_after_downsample,
p.fpn.use_native_resize_op,
p.fpn.pooling_type,
use_tpu=p.use_tpu,
data_format=params.data_format)
nodes.append(input_node)
new_node = self.fuse_features(nodes, fpn_config.weight_method)
with tf.name_scope('op_after_combine{}'.format(len(feats))):
if not p.fpn.conv_bn_act_pattern:
new_node = activation_fn(new_node, p.act_type)
if p.fpn.use_separable_conv:
conv_op = functools.partial(
tf.keras.layers.SeparableConv2D, depth_multiplier=1)
else:
conv_op = tf.keras.layers.Conv2D
new_node = conv_op(
filters=p.fpn.fpn_feat_dims,
kernel_size=(3, 3),
padding='same',
use_bias=not p.fpn.conv_bn_act_pattern,
data_format=params.data_format)(new_node)
# 拆分activation
act_type = None if not p.fpn.conv_bn_act_pattern else p.act_type
new_node = tf.keras.layers.BatchNormalization(
axis=1 if params.data_format == "channels_first" else -1,
momentum=p.norm_activation.batch_norm_momentum,
epsilon=p.norm_activation.batch_norm_epsilon)(new_node)
if act_type:
new_node = activation_fn(new_node, act_type)
feats.append(new_node)
num_output_connections.append(0)
output_feats = {}
for l in range(p.architecture.min_level, p.architecture.max_level + 1):
for i, fnode in enumerate(reversed(fpn_config.nodes)):
if fnode['feat_level'] == l:
output_feats[l] = feats[-1 - i]
break
return output_feats
def bifpn_sum_config(self):
"""BiFPN config with sum."""
p = hparams_config.Config()
p.nodes = [
{'feat_level': 6, 'inputs_offsets': [3, 4]},
{'feat_level': 5, 'inputs_offsets': [2, 5]},
{'feat_level': 4, 'inputs_offsets': [1, 6]},
{'feat_level': 3, 'inputs_offsets': [0, 7]},
{'feat_level': 4, 'inputs_offsets': [1, 7, 8]},
{'feat_level': 5, 'inputs_offsets': [2, 6, 9]},
{'feat_level': 6, 'inputs_offsets': [3, 5, 10]},
{'feat_level': 7, 'inputs_offsets': [4, 11]},
]
p.weight_method = 'sum'
return p
def bifpn_fa_config(self):
"""BiFPN config with fast weighted sum."""
p = self.bifpn_sum_config()
p.weight_method = 'fastattn'
return p
def bifpn_dynamic_config(self, min_level, max_level, weight_method):
"""A dynamic bifpn config that can adapt to different min/max levels."""
p = hparams_config.Config()
p.weight_method = weight_method or 'fastattn'
num_levels = max_level - min_level + 1
node_ids = {min_level + i: [i] for i in range(num_levels)}
level_last_id = lambda level: node_ids[level][-1]
level_all_ids = lambda level: node_ids[level]
id_cnt = itertools.count(num_levels)
p.nodes = []
for i in range(max_level - 1, min_level - 1, -1):
# top-down path.
p.nodes.append({
'feat_level': i,
'inputs_offsets': [level_last_id(i), level_last_id(i + 1)]
})
node_ids[i].append(next(id_cnt))
for i in range(min_level + 1, max_level + 1):
# bottom-up path.
p.nodes.append({
'feat_level': i,
'inputs_offsets': level_all_ids(i) + [level_last_id(i - 1)]
})
node_ids[i].append(next(id_cnt))
return p
def __call__(self, multilevel_features, params):
"""Returns the FPN features for a given multilevel features.
Args:
multilevel_features: a `dict` containing `int` keys for continuous feature
levels, e.g., [2, 3, 4, 5]. The values are corresponding features with
shape [batch_size, height_l, width_l, num_filters].
Returns:
a `dict` containing `int` keys for continuous feature levels
[min_level, min_level + 1, ..., max_level]. The values are corresponding
FPN features with shape [batch_size, height_l, width_l, fpn_feat_dims].
"""
# step 1: Build additional input features that are not from backbone.(ie. level 6 and 7)
feats = []
# with tf.name_scope('bifpn'):
with backend.get_graph().as_default(), tf.name_scope('bifpn'):
for level in range(self._min_level, self._max_level + 1):
if level in multilevel_features.keys():
feats.append(multilevel_features[level])
else:
h_id, w_id = (1, 2) # 不允许通道前置,即data_format必须等于channels_last
feats.append(
spatial_transform_ops.resample_feature_map(
feats[-1],
name='p%d' % level,
target_height=(feats[-1].shape[h_id] - 1) // 2 + 1,
target_width=(feats[-1].shape[w_id] - 1) // 2 + 1,
target_num_channels=params.fpn.fpn_feat_dims,
apply_bn=params.fpn.apply_bn_for_resampling,
is_training=params.is_training_bn,
conv_after_downsample=params.fpn.conv_after_downsample,
use_native_resize_op=params.fpn.use_native_resize_op,
pooling_type=params.fpn.pooling_type,
use_tpu=False,
data_format="channels_last"
))
feat_sizes = get_feat_sizes(params.efficientdet_parser.output_size[0], self._max_level)
with tf.name_scope("bifpn_cells"):
for rep in range(params.fpn.fpn_cell_repeats):
logging.info('building cell %d', rep)
new_feats = self.build_bifpn_layer(feats, feat_sizes, params)
feats = [
new_feats[level]
for level in range(
self._min_level, self._max_level + 1)
]
return new_feats
|
#!/usr/bin/env python2
# coding: UTF-8
import rospy
import math
from consai2_msgs.msg import Referee, DecodedReferee, BallInfo
from consai2_receiver_proto.referee_pb2 import SSL_Referee
from geometry_msgs.msg import Pose2D
REFEREE_TEXT = {
0 : "HALT", 1 : "STOP", 3 : "FORCE_START",
11 : "OUR_KICKOFF_PREPARATION", 12 : "OUR_KICKOFF_START",
13 : "OUR_PENALTY_PREPARATION", 14 : "OUR_PENALTY_START",
15 : "OUR_DIRECT_FREE", 16 : "OUR_INDIRECT_FREE",
17 : "OUR_TIMEOUT", 18 : "OUR_GOAL", 19 : "OUR_BALL_PLACEMENT",
21 : "THEIR_KICKOFF_PREPARATION", 22 : "THEIR_KICKOFF_START",
23 : "THEIR_PENALTY_PREPARATION", 24 : "THEIR_PENALTY_START",
25 : "THEIR_DIRECT_FREE", 26 : "THEIR_INDIRECT_FREE",
27 : "THEIR_TIMEOUT", 28 : "THEIR_GOAL", 29 : "THEIR_BALL_PLACEMENT",
}
REFEREE_ID = {v:k for k, v in REFEREE_TEXT.items()}
class RefereeWrapper(object):
def __init__(self):
self._OUR_COLOR = rospy.get_param('consai2_description/our_color', 'blue')
self._sub_ball_info = rospy.Subscriber(
'vision_wrapper/ball_info',
BallInfo,
self._callback_ball_info)
self._sub_referee = rospy.Subscriber(
'referee_receiver/raw_referee',
Referee,
self._callback_referee)
self._pub_decoded_referee = rospy.Publisher(
'~decoded_referee', DecodedReferee, queue_size=1)
self._DECODE_ID = {
"OUR" : 10, "THEIR" : 20,
"HALT" : 0, "STOP" : 1, "FORCE_START" : 3, # 定数の代わりに定義
"KICKOFF_PREPARATION" : 1, "KICKOFF_START" : 2,
"PENALTY_PREPARATION" : 3, "PENALTY_START" : 4,
"DIRECT_FREE" : 5, "INDIRECT_FREE" : 6,
"TIMEOUT" : 7, "GOAL" : 8, "BALL_PLACEMENT" : 9,
}
self._ID_BLUE = self._DECODE_ID["OUR"]
self._ID_YELLOW = self._DECODE_ID["THEIR"]
if self._OUR_COLOR != 'blue':
self._ID_BLUE = self._DECODE_ID["THEIR"]
self._ID_YELLOW = self._DECODE_ID["OUR"]
self._INPLAY_DISTANCE = 0.05 # meter
self._SPEED_LIMIT_OF_ROBOT = 1.5 # meters / sec
self._SPEED_LIMIT_OF_BALL = 6.5 # meters / sec
self._KEEP_OUT_RADIUS_FROM_BALL = 0.5 # meters
self._KEEP_OUT_DISTANCE_FROM_THEIR_DEFENSE_AREA = 0.2 # meters
self._NO_LIMIT = -1
self._prev_referee = Referee()
self._prev_decoded_msg = DecodedReferee()
self._ball_pose = Pose2D()
self._stationary_ball_pose = Pose2D()
self._game_is_inplay = False
def _callback_ball_info(self, msg):
self._ball_pose = msg.pose
def _callback_referee(self, msg):
# Refereeのデータをチームカラーに合わせて解釈する
decoded_msg = self._decode_referee(msg)
self._pub_decoded_referee.publish(decoded_msg)
# NORMAL_STARTの解釈に前回のコマンドが必要
self._prev_referee = msg
self._prev_decoded_msg = decoded_msg
def _decode_referee_id(self, referee_command):
decoded_id = 0
# HALT, STOP, FORCE_STARTはチームカラーに依存しない
if referee_command == SSL_Referee.HALT:
decoded_id = SSL_Referee.HALT
elif referee_command == SSL_Referee.STOP:
decoded_id = SSL_Referee.STOP
elif referee_command == SSL_Referee.FORCE_START:
decoded_id = SSL_Referee.FORCE_START
elif referee_command == SSL_Referee.NORMAL_START:
# 複数回IDに加算するのを防ぐため、前回のコマンドと比較する
if self._prev_referee.command != SSL_Referee.NORMAL_START:
# PREPARATIONのIDに1を加えたものがSTARTのIDになる
decoded_id = self._prev_decoded_msg.referee_id + 1
else:
decoded_id = self._prev_decoded_msg.referee_id
else:
# チームカラーに合わせてコマンドの解釈を変える
if referee_command == SSL_Referee.PREPARE_KICKOFF_YELLOW:
decoded_id = self._ID_YELLOW + self._DECODE_ID["KICKOFF_PREPARATION"]
if referee_command == SSL_Referee.PREPARE_KICKOFF_BLUE:
decoded_id = self._ID_BLUE + self._DECODE_ID["KICKOFF_PREPARATION"]
if referee_command == SSL_Referee.PREPARE_PENALTY_YELLOW:
decoded_id = self._ID_YELLOW + self._DECODE_ID["PENALTY_PREPARATION"]
if referee_command == SSL_Referee.PREPARE_PENALTY_BLUE:
decoded_id = self._ID_BLUE + self._DECODE_ID["PENALTY_PREPARATION"]
if referee_command == SSL_Referee.DIRECT_FREE_YELLOW:
decoded_id = self._ID_YELLOW + self._DECODE_ID["DIRECT_FREE"]
if referee_command == SSL_Referee.DIRECT_FREE_BLUE:
decoded_id = self._ID_BLUE + self._DECODE_ID["DIRECT_FREE"]
if referee_command == SSL_Referee.INDIRECT_FREE_YELLOW:
decoded_id = self._ID_YELLOW + self._DECODE_ID["INDIRECT_FREE"]
if referee_command == SSL_Referee.INDIRECT_FREE_BLUE:
decoded_id = self._ID_BLUE + self._DECODE_ID["INDIRECT_FREE"]
if referee_command == SSL_Referee.TIMEOUT_YELLOW:
decoded_id = self._ID_YELLOW + self._DECODE_ID["TIMEOUT"]
if referee_command == SSL_Referee.TIMEOUT_BLUE:
decoded_id = self._ID_BLUE + self._DECODE_ID["TIMEOUT"]
if referee_command == SSL_Referee.GOAL_YELLOW:
decoded_id = self._ID_YELLOW + self._DECODE_ID["GOAL"]
if referee_command == SSL_Referee.GOAL_BLUE:
decoded_id = self._ID_BLUE + self._DECODE_ID["GOAL"]
if referee_command == SSL_Referee.BALL_PLACEMENT_YELLOW:
decoded_id = self._ID_YELLOW + self._DECODE_ID["BALL_PLACEMENT"]
if referee_command == SSL_Referee.BALL_PLACEMENT_BLUE:
decoded_id = self._ID_BLUE + self._DECODE_ID["BALL_PLACEMENT"]
return decoded_id
def _decode_referee(self, msg):
decoded_msg = DecodedReferee()
decoded_msg.referee_id = self._decode_referee_id(msg.command)
decoded_msg.referee_text = REFEREE_TEXT.get(decoded_msg.referee_id, 'INVALID_COMMAND')
decoded_msg.placement_position = msg.designated_position
# Decode restrictions
if decoded_msg.referee_id == self._DECODE_ID["HALT"] \
or decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["GOAL"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["GOAL"]:
# Reference : Rule 2019, 5.1.2 Halt
decoded_msg.can_move_robot = False
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = False
decoded_msg.can_enter_their_side = False
decoded_msg.can_enter_center_circle = False
decoded_msg.keep_out_radius_from_ball = self._NO_LIMIT
decoded_msg.keep_out_distance_from_their_defense_area = self._NO_LIMIT
elif decoded_msg.referee_id == self._DECODE_ID["STOP"]:
# Reference : Rule 2019, 5.1.1 Stop
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._SPEED_LIMIT_OF_ROBOT
decoded_msg.can_kick_ball = False
decoded_msg.can_enter_their_side = True
decoded_msg.can_enter_center_circle = True
decoded_msg.keep_out_radius_from_ball = self._KEEP_OUT_RADIUS_FROM_BALL
decoded_msg.keep_out_distance_from_their_defense_area = \
self._KEEP_OUT_DISTANCE_FROM_THEIR_DEFENSE_AREA
elif decoded_msg.referee_id == self._DECODE_ID["FORCE_START"]:
# Reference : Rule 2019, 5.3.5 Force Start
# Reference : Rule 2019, 8.1.6 Ball Speed
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = True
decoded_msg.can_enter_their_side = True
decoded_msg.can_enter_center_circle = True
decoded_msg.keep_out_radius_from_ball = self._NO_LIMIT
decoded_msg.keep_out_distance_from_their_defense_area = self._NO_LIMIT
elif decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["KICKOFF_PREPARATION"]:
# Reference : Rule 2019, 5.3.2 Kick-Off
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = False
decoded_msg.can_enter_their_side = False
decoded_msg.can_enter_center_circle = True
decoded_msg.keep_out_radius_from_ball = 0 # No limit but robot do not touch the ball
decoded_msg.keep_out_distance_from_their_defense_area = \
self._KEEP_OUT_DISTANCE_FROM_THEIR_DEFENSE_AREA
elif decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["KICKOFF_START"]:
# Reference : Rule 2019, 5.3.1 Normal Start
# Reference : Rule 2019, 5.3.2 Kick-Off
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = True
decoded_msg.can_enter_their_side = False
decoded_msg.can_enter_center_circle = True
decoded_msg.keep_out_radius_from_ball = self._NO_LIMIT
decoded_msg.keep_out_distance_from_their_defense_area = \
self._KEEP_OUT_DISTANCE_FROM_THEIR_DEFENSE_AREA
elif decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["PENALTY_PREPARATION"]:
# Reference : Rule 2019, 5.3.6 Penalty Kick
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = False
decoded_msg.can_enter_their_side = True
decoded_msg.can_enter_center_circle = True
decoded_msg.keep_out_radius_from_ball = 0 # No limit but robot do not touch the ball
decoded_msg.keep_out_distance_from_their_defense_area = self._NO_LIMIT
elif decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["PENALTY_START"]:
# Reference : Rule 2019, 5.3.1 Normal Start
# Reference : Rule 2019, 5.3.6 Penalty Kick
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = True
decoded_msg.can_enter_their_side = True
decoded_msg.can_enter_center_circle = True
decoded_msg.keep_out_radius_from_ball = self._NO_LIMIT
decoded_msg.keep_out_distance_from_their_defense_area = self._NO_LIMIT
elif decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["DIRECT_FREE"] \
or decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["INDIRECT_FREE"]:
# Reference : Rule 2019, 5.3.3 Direct Free Kick
# Reference : Rule 2019, 5.3.6 Indirect Free Kick
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = True
decoded_msg.can_enter_their_side = True
decoded_msg.can_enter_center_circle = True
decoded_msg.keep_out_radius_from_ball = self._NO_LIMIT
decoded_msg.keep_out_distance_from_their_defense_area = \
self._KEEP_OUT_DISTANCE_FROM_THEIR_DEFENSE_AREA
elif decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["BALL_PLACEMENT"]:
# Reference : Rule 2019, 5.2 Ball Placement
# Reference : Rule 2019, 8.2.8 Robot Stop Speed
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = True
decoded_msg.can_enter_their_side = True
decoded_msg.can_enter_center_circle = True
decoded_msg.keep_out_radius_from_ball = self._NO_LIMIT
decoded_msg.keep_out_distance_from_their_defense_area = \
self._KEEP_OUT_DISTANCE_FROM_THEIR_DEFENSE_AREA
elif decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["KICKOFF_PREPARATION"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["KICKOFF_START"]:
# Reference : Rule 2019, 5.3.2 Kick-Off
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = False
decoded_msg.can_enter_their_side = False
decoded_msg.can_enter_center_circle = False
decoded_msg.keep_out_radius_from_ball = self._KEEP_OUT_RADIUS_FROM_BALL
decoded_msg.keep_out_distance_from_their_defense_area = \
self._KEEP_OUT_DISTANCE_FROM_THEIR_DEFENSE_AREA
elif decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["PENALTY_PREPARATION"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["PENALTY_START"]:
# Reference : Rule 2019, 5.3.6 Penalty Kick
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = False
decoded_msg.can_enter_their_side = True
decoded_msg.can_enter_center_circle = True
decoded_msg.keep_out_radius_from_ball = self._KEEP_OUT_RADIUS_FROM_BALL
decoded_msg.keep_out_distance_from_their_defense_area = \
self._KEEP_OUT_DISTANCE_FROM_THEIR_DEFENSE_AREA
elif decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["DIRECT_FREE"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["INDIRECT_FREE"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["BALL_PLACEMENT"]:
# Reference : Rule 2019, 5.3.3 Direct Free Kick
# Reference : Rule 2019, 5.3.6 Indirect Free Kick
# Reference : Rule 2019, 8.2.3 Ball Placement Interference
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = False
decoded_msg.can_enter_their_side = True
decoded_msg.can_enter_center_circle = True
decoded_msg.keep_out_radius_from_ball = self._KEEP_OUT_RADIUS_FROM_BALL
decoded_msg.keep_out_distance_from_their_defense_area = \
self._KEEP_OUT_DISTANCE_FROM_THEIR_DEFENSE_AREA
elif decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["TIMEOUT"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["TIMEOUT"]:
# Reference : Rule 2019, 4.4.2 Timeouts
# No limitations
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = True
decoded_msg.can_enter_their_side = True
decoded_msg.can_enter_center_circle = True
decoded_msg.keep_out_radius_from_ball = self._NO_LIMIT
decoded_msg.keep_out_distance_from_their_defense_area = self._NO_LIMIT
else:
decoded_msg.can_move_robot = False
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = False
decoded_msg.can_enter_their_side = False
decoded_msg.can_enter_center_circle = False
decoded_msg.keep_out_radius_from_ball = self._NO_LIMIT
decoded_msg.keep_out_distance_from_their_defense_area = self._NO_LIMIT
# Consider inplay
# Reference : Rule 2019, 8.1.3 Double Touch
# Reference : Rule 2019, A.1 Ball In And Out Of Play
if decoded_msg.referee_id == self._DECODE_ID["STOP"] \
or decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["KICKOFF_PREPARATION"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["KICKOFF_PREPARATION"] \
or decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["PENALTY_PREPARATION"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["PENALTY_PREPARATION"] \
or decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["BALL_PLACEMENT"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["BALL_PLACEMENT"]:
self._stationary_ball_pose = self._ball_pose
self._game_is_inplay = False
elif decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["KICKOFF_START"] \
or decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["PENALTY_START"] \
or decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["DIRECT_FREE"] \
or decoded_msg.referee_id == self._DECODE_ID["OUR"] + self._DECODE_ID["INDIRECT_FREE"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["KICKOFF_START"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["PENALTY_START"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["DIRECT_FREE"] \
or decoded_msg.referee_id == self._DECODE_ID["THEIR"] + self._DECODE_ID["INDIRECT_FREE"]:
# ボールが静止位置から動いたかを判断する
if self._game_is_inplay is False:
diff_pose = Pose2D()
diff_pose.x = self._ball_pose.x - self._stationary_ball_pose.x
diff_pose.y = self._ball_pose.y - self._stationary_ball_pose.y
move_distance = math.hypot(diff_pose.x, diff_pose.y)
if move_distance > self._INPLAY_DISTANCE:
self._game_is_inplay = True
# インプレイのときは行動制限を解除する
if self._game_is_inplay is True:
decoded_msg.can_move_robot = True
decoded_msg.speed_limit_of_robot = self._NO_LIMIT
decoded_msg.can_kick_ball = True
decoded_msg.can_enter_their_side = True
decoded_msg.can_enter_center_circle = True
decoded_msg.keep_out_radius_from_ball = self._NO_LIMIT
decoded_msg.keep_out_distance_from_their_defense_area = self._NO_LIMIT
decoded_msg.referee_text += "(INPLAY)"
return decoded_msg
def main():
rospy.init_node('referee_wrapper')
wrapper = RefereeWrapper()
rospy.spin()
if __name__ == '__main__':
main()
|
# Copyright (c) 2016-2017, Jani Nikula <jani@nikula.org>
# Licensed under the terms of BSD 2-Clause, see LICENSE for details.
"""
Alternative docstring syntax
============================
This module abstracts different compatibility options converting different
syntaxes into 'native' reST ones.
"""
import re
# Basic Javadoc/Doxygen/kernel-doc import
#
# FIXME: try to preserve whitespace better
def javadoc(comment):
"""Basic javadoc conversion to reStructuredText"""
# @param
comment = re.sub(r"(?m)^([ \t]*)@param([ \t]+)([a-zA-Z0-9_]+|\.\.\.)([ \t]+)",
"\n\\1:param\\2\\3:\\4", comment)
# @param[direction]
comment = re.sub(r"(?m)^([ \t]*)@param\[([^]]*)\]([ \t]+)([a-zA-Z0-9_]+|\.\.\.)([ \t]+)",
"\n\\1:param\\3\\4: *(\\2)* \\5", comment)
# @return
comment = re.sub(r"(?m)^([ \t]*)@returns?([ \t]+|$)",
"\n\\1:return:\\2", comment)
# @code/@endcode blocks. Works if the code is indented.
comment = re.sub(r"(?m)^([ \t]*)@code([ \t]+|$)",
"\n::\n", comment)
comment = re.sub(r"(?m)^([ \t]*)@endcode([ \t]+|$)",
"\n", comment)
# Ignore @brief.
comment = re.sub(r"(?m)^([ \t]*)@brief[ \t]+", "\n\\1", comment)
# Ignore groups
comment = re.sub(r"(?m)^([ \t]*)@(defgroup|addtogroup)[ \t]+[a-zA-Z0-9_]+[ \t]*",
"\n\\1", comment)
comment = re.sub(r"(?m)^([ \t]*)@(ingroup|{|}).*", "\n", comment)
return comment
def javadoc_liberal(comment):
"""Liberal javadoc conversion to reStructuredText"""
comment = javadoc(comment)
# Liberal conversion of any @tags, will fail for @code etc. but don't
# care.
comment = re.sub(r"(?m)^([ \t]*)@([a-zA-Z0-9_]+)([ \t]+)",
"\n\\1:\\2:\\3", comment)
return comment
def kerneldoc(comment):
"""Basic kernel-doc conversion to reStructuredText"""
comment = re.sub(r"(?m)^([ \t]*)@(returns?|RETURNS?):([ \t]+|$)",
"\n\\1:return:\\3", comment)
comment = re.sub(r"(?m)^([ \t]*)@([a-zA-Z0-9_]+|\.\.\.):([ \t]+)",
"\n\\1:param \\2:\\3", comment)
return comment
def convert(comment, **options):
"""Convert documentation from a supported syntax into reST."""
transform = options.get('transform')
transformations = {
'javadoc-basic': javadoc,
'javadoc-liberal': javadoc_liberal,
'kernel-doc': kerneldoc,
}
if transform in transformations:
comment = transformations[transform](comment)
return comment
|
# -*- coding: utf-8 -*-
""" gtorch_utils/segmentation/metrics/neg_pred_val """
import torch
from gtorch_utils.constants import EPSILON
from gtorch_utils.segmentation.confusion_matrix import ConfusionMatrixMGR
def npv(input_, target, per_channel=False):
"""
Calculates and returns the average negative predictive value of the provided masks
NPV = \frac{TN}{TN+FN}
Args:
inputs <torch.Tensor>: predicted masks [batch_size, channels, ...]
target <torch.Tensor>: ground truth masks [batch_size, channels, ...]
per_channel <bool>: Whether or not return values per channel
Returns:
avg_npv <torch.Tensor>
"""
assert isinstance(input_, torch.Tensor), type(input_)
assert isinstance(target, torch.Tensor), type(target)
assert isinstance(per_channel, bool), type(per_channel)
# TODO: find an efficient way of implementing the special case when
# both the prediction and the ground truth are white
mgr = ConfusionMatrixMGR(input_, target)
tn = mgr.true_negatives
if per_channel:
result = tn / (tn + mgr.false_negatives + EPSILON)
return result.sum(0) / input_.size(0)
tn = tn.sum(1)
result = tn / (tn + mgr.false_negatives.sum(1) + EPSILON)
return result.sum() / input_.size(0)
|
from xml.etree.ElementTree import Element, SubElement, tostring
from mittab.apps.tab.models import School, Team, Judge, Room, Round
DEBATE_ID_PREFIX = "D"
ROOM_ID_PREFIX = "V"
SPEAKER_ID_PREFIX = "S"
TEAM_ID_PREFIX = "T"
JUDGE_ID_PREFIX = "A"
SCHOOL_ID_PREFIX = "I"
SPEAKER_STATUS_PREFIX = "SC"
class ArchiveExporter:
def __init__(self, name):
self.name = name
self.root = None
def export_tournament(self):
self.root = Element("tournament", {"name": self.name, "style": "apda"})
self.add_rounds()
self.add_participants()
self.add_schools()
self.add_rooms()
self.add_categories()
return tostring(self.root)
def add_rounds(self):
pos = ["pm", "lo", "mg", "mo"]
qs = Round.objects.all().order_by("round_number").prefetch_related(
"judges", "roundstats_set")
cur_round = None
r_tag = None
for debate in qs:
if debate.round_number != cur_round:
r_tag = SubElement(self.root, "round", {
"name": "Round %s" % debate.round_number
})
cur_round = debate.round_number
adjs = " ".join(
[JUDGE_ID_PREFIX + str(j.id) for j in debate.judges.all()])
d_tag = SubElement(r_tag, "debate", {
"id": DEBATE_ID_PREFIX + str(debate.id),
"chair": JUDGE_ID_PREFIX + str(debate.chair_id),
"adjudicators": adjs,
"venue": ROOM_ID_PREFIX + str(debate.room_id)
})
gov_tag = SubElement(d_tag, "side", {
"team": TEAM_ID_PREFIX + str(debate.gov_team_id)
})
opp_tag = SubElement(d_tag, "side", {
"team": TEAM_ID_PREFIX + str(debate.opp_team_id)
})
if debate.victor == Round.GOV_VIA_FORFEIT or \
debate.victor == Round.ALL_DROP:
opp_tag.set("forfeit", "true")
elif debate.victor == Round.OPP_VIA_FORFEIT or \
debate.victor == Round.ALL_DROP:
gov_tag.set("forfeit", "true")
team_points = [0, 0] # Gov, Opp
stats = sorted(list(debate.roundstats_set.all().values()),
key=lambda x: pos.index(x["debater_role"]))
if debate.victor == Round.GOV or debate.victor == Round.OPP:
for i, speech in enumerate(stats):
side_tag = gov_tag if i % 2 == 0 else opp_tag
team_points[i % 2] += speech["speaks"]
speech_tag = SubElement(side_tag, "speech", {
"speaker": SPEAKER_ID_PREFIX + str(speech["debater_id"]),
"reply": "false"
})
ballot = SubElement(speech_tag, "ballot", {
"adjudicators": adjs,
"rank": str(int(speech["ranks"])),
"ignored": "false"
})
ballot.text = str(speech["speaks"])
gov_win = [Round.GOV, Round.GOV_VIA_FORFEIT, Round.ALL_WIN]
gov_rank = "1" if debate.victor in gov_win else "2"
gov_ballot = Element("ballot", {
"adjudicators": adjs, "rank": gov_rank})
gov_ballot.text = str(team_points[0])
gov_tag.insert(0, gov_ballot) # Ballot must be first in sequence
opp_win = [Round.OPP, Round.OPP_VIA_FORFEIT, Round.ALL_WIN]
opp_rank = "1" if debate.victor in opp_win else "2"
opp_ballot = Element("ballot", {
"adjudicators": adjs, "rank": opp_rank})
opp_ballot.text = str(team_points[1])
opp_tag.insert(0, opp_ballot)
def add_participants(self):
participants_tag = SubElement(self.root, "participants")
for team in Team.objects.all().prefetch_related("debaters"):
team_tag = SubElement(participants_tag, "team", {
"id": TEAM_ID_PREFIX + str(team.id),
"name": team.name
})
if team.team_code is not None:
team_tag.set("code", team.team_code)
institutions = SCHOOL_ID_PREFIX + str(team.school_id)
if team.hybrid_school_id is not None:
institutions += " " + SCHOOL_ID_PREFIX + str(team.hybrid_school_id)
for debater in team.debaters.all():
debater_tag = SubElement(team_tag, "speaker", {
"id": SPEAKER_ID_PREFIX + str(debater.id),
"categories": SPEAKER_STATUS_PREFIX + str(debater.novice_status),
"institutions": institutions
})
debater_tag.text = debater.name
for judge in Judge.objects.all().prefetch_related("schools"):
SubElement(participants_tag, "adjudicator", {
"id": JUDGE_ID_PREFIX + str(judge.id),
"name": judge.name,
"score": str(judge.rank),
"institutions": " ".join(
[SCHOOL_ID_PREFIX + str(s.id) for s in judge.schools.all()])
})
def add_schools(self):
for school in School.objects.all():
school_tag = SubElement(self.root, "institution", {
"id": SCHOOL_ID_PREFIX + str(school.id),
"reference": school.name
})
school_tag.text = school.name
def add_rooms(self):
for room in Room.objects.all():
room_tag = SubElement(self.root, "venue", {
"id": ROOM_ID_PREFIX + str(room.id),
"score": str(room.rank)
})
room_tag.text = room.name
def add_categories(self):
for i, name in enumerate(["Varsity", "Novice"]):
sc_tag = SubElement(self.root, "speaker-category", {
"id": SPEAKER_STATUS_PREFIX + str(i)
})
sc_tag.text = name
|
valores = []
for linha in range(0, 3):
for coluna in range(0, 3):
valores.append(int(input(f'Digite um valor para [{linha + 1}, {coluna + 1}]: ')))
cont = 0
print('')
for linha in range(0, 3):
for coluna in range(0, 3):
print(f'[ {valores[cont]} ]',end=' ')
cont += 1
print('')
print('')
|
import numpy as np
class Vocab:
EMPTY_CHAR = ' '
POS_CODE2HUMAN = {
'COMP': 'наречие',
'GRND': 'деепричастие',
'PRED': 'предикатив',
'INTJ': 'междометие',
'PRTF': 'причастие',
'ADJS': 'краткое прилагательное',
'PRTS': 'глагол (сов. форма)',
'INFN': 'инфинитив (глагол)',
'CONJ': 'союз',
'PRCL': 'частицы',
'ADVB': 'наречие',
'NPRO': 'местоимение',
'ADJF': 'прилагательное',
'PREP': 'предлог',
'VERB': 'глагол',
'NOUN': 'существительное'
}
def __init__(self, data_reader):
self._data_reader = data_reader
self._char2index = {}
self._index2char = []
self._part2index = {}
self._index2part = []
self._char_freq_threshold = 0
self._skipped_chars = []
self._loaded = False
def _feed_char(self, char):
if char in self._skipped_chars:
return
if char not in self._char2index:
index = len(self._char2index)
self._char2index[char] = index
self._index2char.append(char)
def _feed_speech_part(self, speech_part):
if speech_part not in self._part2index:
index = len(self._part2index)
self._part2index[speech_part] = index
self._index2part.append(speech_part)
def _load_initial_chars(self):
self._feed_char(self.EMPTY_CHAR)
self._feed_char('{')
self._feed_char('}')
def _load_initial_parts(self):
self._feed_speech_part('UNKNOWN')
def _load_chars(self):
uniq_chars = self._data_reader.get_uniq_chars()
self._load_initial_chars()
for c in sorted(uniq_chars):
self._feed_char(c)
def _load_speech_parts(self):
uniq_speech_parts = self._data_reader.get_uniq_speech_parts()
self._load_initial_parts()
for part in sorted(uniq_speech_parts):
self._feed_speech_part(part)
def _calculate_char_freq_threshold(self):
_char, max_freq = self._data_reader.get_chars_freq()[-1]
self._char_freq_threshold = np.ceil(max_freq * 0.01) # 1% of max threshold
def _find_low_freq_chars(self):
self._calculate_char_freq_threshold()
for char, freq in self._data_reader.get_chars_freq():
if freq <= self._char_freq_threshold:
self._skipped_chars.append(char)
else:
break
def load(self):
self._find_low_freq_chars()
self._load_chars()
self._load_speech_parts()
self._loaded = True
def char_to_index(self, char):
if not self._loaded:
raise BaseException('chars not loaded')
return self._char2index.get(char) or self._char2index[self.EMPTY_CHAR]
def part_to_index(self, speech_part):
if not self._loaded:
raise BaseException('chars not loaded')
return self._part2index[speech_part]
def index_to_speech_part(self, index):
if not self._loaded:
raise BaseException('chars not loaded')
return self._index2part[index]
def indices_to_speech_part(self, indices):
if not self._loaded:
raise BaseException('chars not loaded')
return np.array(self._index2part)[indices]
def index_to_speech_part_human(self, index):
pos_code = self.index_to_speech_part(index)
return self.POS_CODE2HUMAN[pos_code]
def char_vocab_size(self):
return len(self._index2char)
def part_vocab_size(self):
return len(self._index2part)
if __name__ == '__main__':
from data_reader import OpenCorporaReader
from download_data import OPEN_CORPORA_DEST_FILE
data_reader = OpenCorporaReader(OPEN_CORPORA_DEST_FILE)
data_reader.load()
vocab = Vocab(data_reader)
vocab.load()
print('skipped chars threshold', vocab._char_freq_threshold)
print('skipped chars', vocab._skipped_chars)
print('vocab', vocab._char2index)
|
from interface import cabeçalho
def arquivo_yes(nome):
"""
-> Funcao para verificar se já existe um arquiv txt gerado
recebe como parametro o nome do arquivo
"""
try:
a = open(nome, 'rt')
a.close()
except FileNotFoundError:
return False
else:
return True
def criar_arq(nome):
"""
-> Em caso de não haver arquivo gerado, é criado um novo arquivo
"""
try:
a = open(nome, 'wt+')
a.close()
except:
print('Houve um erro na criação do arquivo')
else:
print(f'Arquivo {nome}, criado com sucesso!')
def ler_arquivo(nome):
"""
-> funcao para ler e mostra o arquivo txt, que matem os dados de usuarios cadastrados
"""
try:
a = open(nome, 'rt')
except:
print('Erro ao ler o arquivo')
else:
cabeçalho('PESSOAS CADASTRADAS')
for linha in a:
dado = linha.split(';')
dado[1] = dado[1].replace('\n', '')
print(f'{dado[0]:<30}{dado[1]:>3} anos')
finally:
a.close()
def cadastrar(arq, nome = 'Desconhecido', idade = 0):
""""
-> função para cadastrar um novo usuario no arquivo txt
"""
try:
a = open(arq,'at')
except:
print('Houve um erro na abertura do arquivo!')
else:
try:
a.write(f'{nome};{idade}\n')
except:
print('Erro ao incluir novo cadastro')
else:
print('Novo registro salvo com sucesso')
a.close()
|
"""
Forked from https://github.com/ecometrica/django-hashedfilenamestorage
Changes:
- Storage defined as a usual class directly
- Added segments param to define filename segmentation rule
"""
import os
import sys
import random
import hashlib
from django.core.files import File
from .unique import UniqueNameFileSystemStorage
class HashedNameFileSystemStorage(UniqueNameFileSystemStorage):
segments = None
overwrite_existing = False
def __init__(self, segments=None, *args, **kwargs):
"""
segments param: tuple value (seglength, segcount), seglength * segcount
should be less than 40 (sha1 len), usually used (1,2) or (2,2);
add prefix to filename with segcount segments by seglen chars
example: (2,2) => 1234567890... -> 12/34/1234567890,
(1,2) => 1234567890... -> 1/2/341234567890.
"""
if (segments and segments.__len__() == 2 and
segments[0] * segments[1] <= 40):
self.segments = segments
super().__init__(*args, **kwargs)
def get_unique_available_name(self, name, max_length=None,
content=None, chunk_size=None):
dirname, basename = os.path.split(name)
ext = os.path.splitext(basename)[1].lower()
root = (self._compute_hash_by_name(name) if self.uniquify_names else
self._compute_hash_by_content(content=content,
chunk_size=chunk_size))
root = self._segments(root)
return os.path.join(dirname, '%s%s' % (root, ext,))
def _segments(self, value):
segments = None
if self.segments:
slen, scnt = self.segments
segments = [value[i:i+slen] for i in range(0, slen*scnt, slen)]
segments = '/'.join(i for i in segments if i)
return segments and '%s/%s' % (segments, value,) or value
def _compute_hash_by_name(self, name):
# generate hash from filename and some random value
name = '{}#{:.20f}'.format(name, random.random())
hasher = hashlib.sha1()
hasher.update(name.encode(sys.getfilesystemencoding(), 'ignore'))
return hasher.hexdigest()
def _compute_hash_by_content(self, content, chunk_size=None):
if chunk_size is None:
chunk_size = getattr(content, 'DEFAULT_CHUNK_SIZE',
File.DEFAULT_CHUNK_SIZE)
hasher = hashlib.sha1()
cursor = content.tell()
content.seek(0)
try:
while True:
data = content.read(chunk_size)
if not data:
break
hasher.update(data)
return hasher.hexdigest()
finally:
content.seek(cursor)
|
from pycroft.model.facilities import Room
def refill_room_data(form, room):
if room:
form.building.data = room.building
levels = Room.q.filter_by(building_id=room.building.id).order_by(Room.level)\
.distinct()
form.level.choices = [(entry.level, str(entry.level)) for entry in
levels]
form.level.data = room.level
rooms = Room.q.filter_by(
building_id=room.building.id,
level=room.level
).order_by(Room.number).distinct()
form.room_number.choices = [(entry.number, str(entry.number))
for entry in rooms]
form.room_number.data = room.number
|
import pandas as pd
import os
import argparse
import sys
def index_ccds_ids_by_uniprot(ccds_to_uniprot_df):
# trim the trailing version information (-1, -2, etc.) since we don't have versioning in the PTM files
ccds_to_uniprot_df['UniProtKB_short'] = ccds_to_uniprot_df['UniProtKB'].apply(lambda u: u.split('-')[0])
# use the new trimmed UniProtKB column for grouping
return ccds_to_uniprot_df.groupby('UniProtKB_short')["#ccds"].apply(set).to_dict()
def index_enst_by_ccds_ids(ccds_to_sequence_df):
filtered = ccds_to_sequence_df[ccds_to_sequence_df['nucleotide_ID'].str.contains("ENST")]
grouped = filtered.groupby("#ccds")
return grouped["nucleotide_ID"].apply(set).to_dict()
def find_enst_by_ccds (ccds_id, ccds_to_enst_dict):
try:
return ccds_to_enst_dict[ccds_id]
except KeyError:
return 'NA'
def find_enst_by_uniprot(uniprot_id, ccds_to_uniprot_dict, ccds_to_enst_dict):
try:
flat_enst_id_list = []
# add all into a flat list
for enst_ids in map(lambda ccds_id: find_enst_by_ccds(ccds_id, ccds_to_enst_dict),
ccds_to_uniprot_dict[uniprot_id]):
for enst_id in enst_ids:
flat_enst_id_list.append(enst_id)
# remove duplicates (if any)
flat_enst_id_set = set(flat_enst_id_list)
# remove NA
flat_enst_id_set = set(filter(lambda enst_id: enst_id != 'NA', flat_enst_id_set))
return flat_enst_id_set
except KeyError:
return set([])
# Convert comma (or colon, or semi-colon) separated PubMedID column into a proper list of strings
def parse_pubmed_ids(pubmed_ids):
# clean up & split by possible delimiters
pubmed_id_list = str(pubmed_ids).replace('doi:', '').replace(':', ';').replace(",", ";").split(';')
# filter out empty/invalid string values
pubmed_id_list = list(filter(lambda pubmed_id: len(pubmed_id.strip()) > 0, pubmed_id_list))
# return a set of strings (or an empty set)
if len(pubmed_id_list) > 0:
return set(pubmed_id_list)
else:
return set([])
# for each ptm file map UniprotKB to ENST, and add a new EnsemblTranscript column
def add_enst_column_to_ptm_files(ccds_to_uniprot_dict, ccds_to_enst_dict, ptm_input_dir):
frames = []
# read and process all files under the directory
for ptm_file in os.listdir(ptm_input_dir):
ptm_df = pd.read_csv(f'{ptm_input_dir}/{ptm_file}',
sep='\t',
names=["uniprot_entry", "uniprot_accession", "position", "type", "pubmed_ids", "sequence"])
# parse PubMed ids
ptm_df['pubmed_ids'] = ptm_df.apply(lambda row: parse_pubmed_ids(row["pubmed_ids"]), axis=1)
# add EnsemblTrascript info
ptm_df['ensembl_transcript_ids'] = ptm_df.apply(
lambda row: find_enst_by_uniprot(row["uniprot_accession"], ccds_to_uniprot_dict, ccds_to_enst_dict),
axis=1)
frames.append(ptm_df)
# combine all frames and output a single PTM file
pd.concat(frames).to_json(sys.stdout, orient='records', lines=True)
def main(ccds_to_uniprot, ccds_to_sequence, ptm_input_dir):
# parse ccds mapping files
ccds_to_uniprot_df = pd.read_csv(ccds_to_uniprot, sep='\t')
ccds_to_sequence_df = pd.read_csv(ccds_to_sequence, sep='\t')
# create dictionaries
ccds_to_uniprot_dict = index_ccds_ids_by_uniprot(ccds_to_uniprot_df)
ccds_to_enst_dict = index_enst_by_ccds_ids(ccds_to_sequence_df)
# add ENST to PTM files
add_enst_column_to_ptm_files(ccds_to_uniprot_dict, ccds_to_enst_dict, ptm_input_dir)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("ccds_to_uniprot",
help="common_input/CCDS2UniProtKB.current.txt")
parser.add_argument("ccds_to_sequence",
help="common_input/CCDS2Sequence.current.txt")
parser.add_argument("ptm_input_dir",
help="ptm/input")
args = parser.parse_args()
main(args.ccds_to_uniprot, args.ccds_to_sequence, args.ptm_input_dir)
|
import boto3
import time
from boto3.session import Session
import sys, traceback
import json
import requests
from botocore.config import Config
import logging
from utils.innovation_sbx_helpers import *
import inspect
SUCCESS = "SUCCESS"
FAILED = "FAILED"
config = Config(
retries = {
'max_attempts': 10,
'mode': 'standard'
}
)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def accept_resource_share_sbx(credentials):
client = boto3.client('ram',
aws_access_key_id=credentials['AccessKeyId'],
aws_secret_access_key=credentials['SecretAccessKey'],
aws_session_token=credentials['SessionToken'],
region_name=boto3.session.Session().region_name+"",
config=config
)
# Check if resource share invitation is auto-accepted
try:
resource_shares = client.get_resource_shares(resourceOwner='OTHER-ACCOUNTS')
if len(resource_shares["resourceShares"]) > 0:
for r in resource_shares["resourceShares"]:
if r["name"] == "ISTGWShareAppStream" and r["status"] == "ACTIVE":
return
except Exception as e:
message = {'MESSAGE': 'Exception while getting resource shares in the Sandbox Account','FILE': __file__.split('/')[-1],
'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e), 'TRACE': traceback.format_exc()}
logger.exception(message)
raise
rs_arn = None
try:
response = client.get_resource_share_invitations()
logger.info(response)
if len(response['resourceShareInvitations']) != 0:
for r in response['resourceShareInvitations']:
if r["resourceShareName"] == "ISTGWShareAppStream":
rs_arn = r["resourceShareInvitationArn"]
break
except Exception as e:
message = {'MESSAGE': 'Exception ocurred while fetching TGW resource share invitation in the Sandbox Account',
'FILE': __file__.split('/')[-1], 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e), 'TRACE': traceback.format_exc()}
logger.exception(message)
raise
if rs_arn is None:
message = {'MESSAGE': 'No resource share invitations found', 'FILE': __file__.split('/')[-1],
'METHOD': inspect.stack()[0][3], 'MESSAGE': 'No resource share invitations found'}
logger.exception(message)
raise Exception("No resource share invitations found")
try:
accept_inv = client.accept_resource_share_invitation(
resourceShareInvitationArn=rs_arn,
clientToken='xyz_abcd9991'
)
except Exception as e:
message = {'MESSAGE': 'Unable to accept TGW resource share invitation in the Sandbox Account', 'FILE': __file__.split('/')[-1],
'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e), 'TRACE': traceback.format_exc()}
logger.exception(message)
raise
return
def create(event, context):
logger.info("Accepting Resource Share for Transit Gateway")
logger.info(event)
try:
props = event["ResourceProperties"]
sbx = props['Sandbox_Account_ID']
credentials_sbx = assume_role(sbx)
accept_resource_share_sbx(credentials_sbx)
responseData = {
"Message": "TGW resource sharing accepted"
}
send(event, context, SUCCESS, responseData, "Accept_TGW_Resource_Share")
except Exception as e:
message = {'MESSAGE': 'Exception occurred while attempting to accept TGW resource share',
'FILE': __file__.split('/')[-1], 'METHOD': inspect.stack()[0][3], 'EXCEPTION': str(e), 'TRACE': traceback.format_exc()}
logger.exception(message)
errorResponseData = {
"Message": "Resource Sharing Failed"
}
send(event, context, FAILED, errorResponseData, "Accept_TGW_Resource_Share")
def main(event, context):
if event['RequestType'] == 'Create':
create(event, context)
return
elif event['RequestType'] == 'Update':
responseData = {"message": "No updates were made"}
send(event, context, SUCCESS, responseData, "Accept_TGW_Resource_Share")
return
elif event['RequestType'] == 'Delete':
responseData = {"message":"No deletes were made."}
send(event, context, SUCCESS, responseData, "Accept_TGW_Resource_Share")
return
else:
responseData = {"message": "Unsupported operation"}
send(event, context, FAILED, responseData, "Accept_TGW_Resource_Share")
|
from flask import jsonify, request
from flask_jwt_extended import get_jwt_identity
from sqlalchemy import or_, literal, func
from sqlalchemy.orm import aliased
from model.Database import DBSession
from model.models import *
def get_nearby_station(place, session):
return session.query(Station) \
.join(District, Station.district_id == District.district_id) \
.join(City, District.city_id == City.city_id) \
.join(Province, Province.province_id == City.province_id) \
.filter(or_(District.district_name.like("%" + place + "%"),
City.city_name.like("%" + place + "%"),
Province.province_name.like("%" + place + "%")),
Station.available == True)
def get_interval_list(train_name, session, allow_unavailable=False):
first_id = session.query(Interval.interval_id) \
.join(Train, Train.train_id == Interval.train_id) \
.filter(Train.train_name == train_name, Interval.prev_id == None,
or_(literal(allow_unavailable), Interval.available == True)) \
.first() \
.interval_id
cte = session.query(Interval, literal(1).label('interval_no')) \
.filter(Interval.interval_id == first_id, or_(literal(allow_unavailable), Interval.available == True)) \
.cte(name='cte', recursive=True)
cte_alias = aliased(cte, name='c')
i_alias = aliased(Interval, name='i')
cte = cte.union_all(
session.query(i_alias, cte_alias.c.interval_no + 1)
.filter(i_alias.interval_id == cte_alias.c.next_id,
or_(literal(allow_unavailable), i_alias.available == True))
)
return cte
def fuzzy_query(dep_place, arv_place, dg_only, session):
dep_train_info = session.query(Interval.train_id, Interval.dep_station) \
.join(Station, Interval.dep_station == Station.station_id) \
.filter(Station.station_name.like(dep_place),
Station.available == True,
Interval.available == True) \
.subquery()
arv_train_info = session.query(Interval.train_id, Interval.arv_station) \
.join(Station, Interval.arv_station == Station.station_id) \
.filter(Station.station_name.like(arv_place),
Station.available == True,
Interval.available == True) \
.subquery()
raw_train_info = session.query(Interval.train_id, Train.train_name,
func.min(Interval.interval_id).label('first_interval'),
func.max(Interval.interval_id).label('last_interval')) \
.join(Train, Train.train_id == Interval.train_id) \
.join(dep_train_info, Interval.train_id == dep_train_info.c.train_id) \
.join(arv_train_info, Interval.train_id == arv_train_info.c.train_id) \
.filter(or_(Interval.dep_station == dep_train_info.c.dep_station,
Interval.arv_station == arv_train_info.c.arv_station),
Train.available == True) \
.group_by(Interval.train_id, Train.train_name) \
.subquery()
dep_i = aliased(Interval, name='dep_i')
arv_i = aliased(Interval, name='arv_i')
dep_s = aliased(Station, name='dep_s')
arv_s = aliased(Station, name='arv_s')
train_info_list = session.query(raw_train_info.c.train_name,
raw_train_info.c.first_interval, raw_train_info.c.last_interval,
dep_s.station_name.label('dep_station'),
func.cast(dep_i.dep_datetime, String).label('dep_time'),
arv_s.station_name.label('arv_station'),
func.cast(arv_i.arv_datetime, String).label('arv_time')) \
.join(dep_i, dep_i.interval_id == raw_train_info.c.first_interval) \
.join(arv_i, arv_i.interval_id == raw_train_info.c.last_interval) \
.join(dep_s, dep_s.station_id == dep_i.dep_station) \
.join(arv_s, arv_s.station_id == arv_i.arv_station) \
.filter(dep_s.station_name.like(dep_place), arv_s.station_name.like(arv_place),
dep_s.available == True, arv_s.available == True,
dep_i.available == True, arv_i.available == True) \
.order_by(dep_i.dep_datetime) \
.all()
return list(filter(lambda x: x['train_name'][0] in 'DG' if dg_only else True,
map(lambda x: dict(zip(x.keys(), x)), train_info_list)))
def check_admin(fun):
def wrapper(*args, **kwargs):
session = DBSession()
user_id = get_jwt_identity()
is_admin = session.query(User.is_admin).filter(User.user_id == user_id).first()
session.close()
if is_admin[0]:
return fun(*args, **kwargs)
else:
return jsonify(code=10, error='该用户没有管理员权限,无法执行管理员操作')
return wrapper
def check_not_empty(*req_args):
def decorator(fun):
def wrapper(*args, **kwargs):
for arg in req_args:
if not request.args.get(arg):
return jsonify(code=21, error='请求内容不能为空')
return fun(*args, **kwargs)
return wrapper
return decorator
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/hyperxai01/can-autoware/src/socketcan_interface/include;/usr/include".split(';') if "/home/hyperxai01/can-autoware/src/socketcan_interface/include;/usr/include" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lsocketcan_interface_string;/usr/lib/x86_64-linux-gnu/libboost_chrono.so;/usr/lib/x86_64-linux-gnu/libboost_system.so;/usr/lib/x86_64-linux-gnu/libboost_thread.so;/usr/lib/x86_64-linux-gnu/libboost_date_time.so;/usr/lib/x86_64-linux-gnu/libboost_atomic.so;/usr/lib/x86_64-linux-gnu/libpthread.so".split(';') if "-lsocketcan_interface_string;/usr/lib/x86_64-linux-gnu/libboost_chrono.so;/usr/lib/x86_64-linux-gnu/libboost_system.so;/usr/lib/x86_64-linux-gnu/libboost_thread.so;/usr/lib/x86_64-linux-gnu/libboost_date_time.so;/usr/lib/x86_64-linux-gnu/libboost_atomic.so;/usr/lib/x86_64-linux-gnu/libpthread.so" != "" else []
PROJECT_NAME = "socketcan_interface"
PROJECT_SPACE_DIR = "/home/hyperxai01/can-autoware/devel"
PROJECT_VERSION = "0.7.11"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayOpenMiniMiniappFavoriteextDeleteModel(object):
def __init__(self):
self._biz_type = None
self._extend_info = None
self._mini_app_id = None
self._principal_biz_type = None
self._principal_ids = None
self._principal_type = None
self._user_id = None
@property
def biz_type(self):
return self._biz_type
@biz_type.setter
def biz_type(self, value):
self._biz_type = value
@property
def extend_info(self):
return self._extend_info
@extend_info.setter
def extend_info(self, value):
self._extend_info = value
@property
def mini_app_id(self):
return self._mini_app_id
@mini_app_id.setter
def mini_app_id(self, value):
self._mini_app_id = value
@property
def principal_biz_type(self):
return self._principal_biz_type
@principal_biz_type.setter
def principal_biz_type(self, value):
self._principal_biz_type = value
@property
def principal_ids(self):
return self._principal_ids
@principal_ids.setter
def principal_ids(self, value):
if isinstance(value, list):
self._principal_ids = list()
for i in value:
self._principal_ids.append(i)
@property
def principal_type(self):
return self._principal_type
@principal_type.setter
def principal_type(self, value):
self._principal_type = value
@property
def user_id(self):
return self._user_id
@user_id.setter
def user_id(self, value):
self._user_id = value
def to_alipay_dict(self):
params = dict()
if self.biz_type:
if hasattr(self.biz_type, 'to_alipay_dict'):
params['biz_type'] = self.biz_type.to_alipay_dict()
else:
params['biz_type'] = self.biz_type
if self.extend_info:
if hasattr(self.extend_info, 'to_alipay_dict'):
params['extend_info'] = self.extend_info.to_alipay_dict()
else:
params['extend_info'] = self.extend_info
if self.mini_app_id:
if hasattr(self.mini_app_id, 'to_alipay_dict'):
params['mini_app_id'] = self.mini_app_id.to_alipay_dict()
else:
params['mini_app_id'] = self.mini_app_id
if self.principal_biz_type:
if hasattr(self.principal_biz_type, 'to_alipay_dict'):
params['principal_biz_type'] = self.principal_biz_type.to_alipay_dict()
else:
params['principal_biz_type'] = self.principal_biz_type
if self.principal_ids:
if isinstance(self.principal_ids, list):
for i in range(0, len(self.principal_ids)):
element = self.principal_ids[i]
if hasattr(element, 'to_alipay_dict'):
self.principal_ids[i] = element.to_alipay_dict()
if hasattr(self.principal_ids, 'to_alipay_dict'):
params['principal_ids'] = self.principal_ids.to_alipay_dict()
else:
params['principal_ids'] = self.principal_ids
if self.principal_type:
if hasattr(self.principal_type, 'to_alipay_dict'):
params['principal_type'] = self.principal_type.to_alipay_dict()
else:
params['principal_type'] = self.principal_type
if self.user_id:
if hasattr(self.user_id, 'to_alipay_dict'):
params['user_id'] = self.user_id.to_alipay_dict()
else:
params['user_id'] = self.user_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayOpenMiniMiniappFavoriteextDeleteModel()
if 'biz_type' in d:
o.biz_type = d['biz_type']
if 'extend_info' in d:
o.extend_info = d['extend_info']
if 'mini_app_id' in d:
o.mini_app_id = d['mini_app_id']
if 'principal_biz_type' in d:
o.principal_biz_type = d['principal_biz_type']
if 'principal_ids' in d:
o.principal_ids = d['principal_ids']
if 'principal_type' in d:
o.principal_type = d['principal_type']
if 'user_id' in d:
o.user_id = d['user_id']
return o
|
"""
Community Data
--------------
Module for managing Input data for AAEM
"""
from pandas import read_csv, DataFrame
import yaml
import os.path
import numpy as np
from importlib import import_module
from aaem.defaults import base_order
from defaults import base_structure, base_comments
from diagnostics import Diagnostics
from aaem.components import comp_lib, comp_order
## read in config IO stuff, it's two lines because it was too long for one
from aaem.config_IO import read_config, merge_configs, save_config
from aaem.config_IO import validate_dict
class CommunityData (object):
"""This class manages the input data for the an AAEM model instance
"""
def __init__ (self, community_config, global_config = None, diag = None,
scalers = {'diesel price':1.0, 'diesel price adder':0},
intertie_config = None):
"""This class manages the input data for the an AAEM model instance
Parameters
----------
community_config: path to yaml file or dict
the dictionary or dictionary that would be loaded from the yaml file
should match the format to be validated for the model to run, unless
global_config is also provided, in which case all config values not
provided here have values in global_config
global_config: path to yaml file or dict, optional
Optional second config file of values that can be applied to many
model instances
diag: Diagnostics, optional
AAEM Diagnostics object for tracking messages from model
scalers: Dict, optional
Scalers to change model behaviour. The keys 'diesel price' and
'diesel price adder' are used in CommunityData, and will be multiplied
or added to the diesel prices respectively. This will carry over into
the electirc non-fuel prices.
Attributes
----------
data: Dict
Configuration data for model instance
diagnostics:Diagnostics
AAEM Diagnostics object for tracking messages from model
intertie: str, or None
status to track intertie placment, 'parent', 'child', or None
intertie_data: CommunityData
config data for the entire intertie if community is a
child community
"""
self.diagnostics = diag
if diag == None:
self.diagnostics = Diagnostics()
if type(community_config) is dict and type(global_config) is dict:
self.data = merge_configs(self.load_structure(), global_config)
self.data = merge_configs(self.data, community_config)
else:
self.data = self.load_config(community_config, global_config)
valid, reason = self.validate_config()
if not valid:
raise StandardError, 'INVALID CONFIG FILE: ' + reason
self.intertie = None
intertie = self.get_item('community', 'intertie')
if type (intertie) is list:
if self.get_item('community', 'model as intertie') :
self.intertie = 'parent'
else:
self.intertie = 'child'
self.intertie_data = None
## load if community is part of an intertie but not the intertie
## it's self. I.E. load info for Bethel or Oscarville,
## but not Bethel_intertie
if not self.intertie is None and \
not self.get_item('community', 'model as intertie'):
self.diagnostics.add_note(
'Community Data',
'Attempting to find intertie data'
)
## is the intertie_config a dict of file, also use same globals
if type(intertie_config) is dict \
or (type(intertie_config) is str \
and os.path.isfile(intertie_config)):
self.diagnostics.add_note(
'Community Data',
'using provided intertie_config argument'
)
self.intertie_data = CommunityData(
intertie_config,
global_config,
self.diagnostics,
scalers
)
## try to find the file
elif os.path.isfile(community_config):
rt_path = os.path.split(community_config)
it_file = \
self.get_item('community','intertie')[0].\
replace(' ','_').replace("'",'')\
+ '_intertie.yaml'
it_file = os.path.join(rt_path[0], it_file)
if os.path.isfile(it_file):
self.diagnostics.add_note(
'Community Data',
'Found interte data at ' + it_file
)
self.intertie_data = CommunityData(
it_file,
global_config,
self.diagnostics,
scalers
)
else:
self.diagnostics.add_note(
'Community Data',
'Could not find intertie_data Leaving it as None'
)
## self.intertie_data = None is initlized
## component spesifc plugins
for comp in comp_order:
config = import_module("aaem.components." + comp_lib[comp]).config
try:
plugins = config.plugins
except AttributeError:
continue
for plugin in plugins:
plugin(self, community_config, global_config, scalers)
convert = self.data['community']['diesel prices']
convert.index = [int(y) for y in convert.index]
convert.index.name = 'year'
convert = self.data['community']['electric prices']
convert.index = [int(y) for y in convert.index]
convert.index.name = 'year'
# modify diesel prices and electric prices
self.apply_scalers(scalers)
self.check_auto_disable_conditions ()
#~ self.load_construction_multipliers(construction_multipliers)
def apply_scalers(self, scalers):
"""apply scalers to inupt variables
Parameters
----------
scalers: Dict
Scalers to change model behaviour. The keys 'diesel price' and
'diesel price adder' are used in CommunityData, and will be multiplied
or added to the diesel prices respectively. This will carry over into
the electirc non-fuel prices.
"""
## diesel
if scalers['diesel price'] != 1 or scalers['diesel price adder'] != 1:
self.diagnostics.add_note(
'Community Data',
'Adjusting disel and electric prices'
)
else:
return
self.data['community']['diesel prices'] = \
self.data['community']['diesel prices'] * scalers['diesel price'] +\
scalers['diesel price adder']
## electric
percent_diesel = \
float(self.data['community']['percent diesel generation']) / 100.0
efficiency = \
float(self.data['community']['diesel generation efficiency'])
adder = percent_diesel * \
self.data['community']['diesel prices'] / efficiency
self.data['community']['electric prices'] = \
float(self.data['community']['electric non-fuel price']) + adder
def check_auto_disable_conditions (self):
"""
check for any auto disable conditions and disable those components
"""
# no conditions at this time
pass
#~ st = self.get_item('Water & Wastewater Efficiency',"data").ix["assumption type used"]
#~ if st.values[0] == "UNKNOWN":
#~ self.set_item('Water & Wastewater Efficiency',"enabled", False)
#~ self.diagnostics.add_error("Community Data",
#~ ("(Checking Inputs) Water Wastewater system type unknown."
#~ " Fixing by disabling Wastewater component at runtime"))
def modify_diesel_prices(self,
scalers = {'diesel price':1.0, 'diesel price adder':0}):
pass
def modify_non_fuel_electricity_prices (self, N_slope_price = .15):
"""
calculate the electricity price
pre:
community: diesel generation efficiency should be a numeric type
> 0
community: elec non-fuel cost: is a floating point dollar value
community: diesel prices: is a diesel projections object.
post:
community: electric prices: is a data frame of dollar
values indexed by year
"""
# TODO: 1 is 100% need to change to a calculation
pass
def validate_config(self):
"""
validate a config library
pre:
lib should be a dictionary object
post:
returns true if lib is a valid config object; otherwise false
"""
return validate_dict(self.data, self.load_structure())
def load_structure (self):
"""
"""
try:
return self.structure
except AttributeError:
pass
structure = base_structure
for comp in comp_lib:
module = \
import_module('aaem.components.' + comp_lib[comp] + '.config')
structure = merge_configs(structure, module.structure)
self.structure = structure
return structure
def load_config (self, community_file, global_file = None):
"""
loads the input files and creates the model input object
pre:
community_file, and defaults should be .yaml files
post:
self.data is is usable
"""
community = read_config(community_file)
global_ = {}
if not global_file is None:
global_ = read_config(global_file)
return merge_configs(
merge_configs(
self.load_structure(),
global_
),
community
)
def get_item (self, section, key):
"""
get an item
pre:
self.data exists
section is a config section, and key is a key in said section
post:
returns an item
"""
return self.data[section][key]
def get_section (self, section):
"""
gets a sction
pre:
self.data exists
section is a config section
post:
returns a section library
"""
return self.data[section]
def set_item (self, section, key, data):
"""
set an item
pre:
self.data exists
section is a config section, and key is a key in said section, and
data is the type that make sense there.
post:
self.data[section][key] is data
"""
self.data[section][key] = data
def save (self, fname):
"""
save the inputs used
pre:
self.model_inputs exists, fname is the path to a file
pre:
a valid .yaml config file is created
"""
## save work around
import copy
copy = copy.deepcopy(self.data)
comment = "config used"
#~ conf, orders, comments, defaults = \
#~ config.get_config(config.non_component_config_sections)
#~ for comp in comp_lib:
#~ cfg = import_module("aaem.components." + comp_lib[comp]+ '.config')
#~ order = list(cfg.yaml_order) + \
#~ list(set(cfg.yaml_order) ^ set(cfg.yaml.keys()))
#~ orders[comp] = order
#~ comments[comp] = cfg.yaml_comments
#~ section_order = config.non_component_config_sections + comp_order
s_order = ['community'] + comp_order
i_order = {'community': base_order}
comments = base_comments
for comp in comp_lib:
module = import_module('aaem.components.' + comp_lib[comp])
i_order[comp] = module.config.order
comments[comp] = module.config.comments
save_config(fname,copy,
comments = comments,
s_order = s_order,
i_orders = i_order,
header = 'confiuration used to generate these results'
)
del copy
#~ self.data = copy
#~ return comment + text
|
"""
===============================
11. Noise covariance estimation
===============================
Covariance matrices are computed and saved.
"""
import itertools
import logging
import mne
from mne.parallel import parallel_func
from mne_bids import BIDSPath
import config
from config import gen_log_message, on_error, failsafe_run
logger = logging.getLogger('mne-bids-pipeline')
def compute_cov_from_epochs(subject, session, tmin, tmax):
bids_path = BIDSPath(subject=subject,
session=session,
task=config.get_task(),
acquisition=config.acq,
run=None,
processing=config.proc,
recording=config.rec,
space=config.space,
extension='.fif',
datatype=config.get_datatype(),
root=config.get_deriv_root(),
check=False)
processing = None
if config.spatial_filter is not None:
processing = 'clean'
epo_fname = bids_path.copy().update(processing=processing, suffix='epo')
cov_fname = bids_path.copy().update(suffix='cov')
msg = (f"Computing regularized covariance based on epochs' baseline "
f"periods. Input: {epo_fname}, Output: {cov_fname}")
logger.info(gen_log_message(message=msg, step=11, subject=subject,
session=session))
epochs = mne.read_epochs(epo_fname, preload=True)
cov = mne.compute_covariance(epochs, tmin=tmin, tmax=tmax, method='shrunk',
rank='info')
cov.save(cov_fname)
def compute_cov_from_empty_room(subject, session):
bids_path = BIDSPath(subject=subject,
session=session,
task=config.get_task(),
acquisition=config.acq,
run=None,
recording=config.rec,
space=config.space,
extension='.fif',
datatype=config.get_datatype(),
root=config.get_deriv_root(),
check=False)
raw_er_fname = bids_path.copy().update(processing='filt', task='noise',
suffix='raw')
cov_fname = bids_path.copy().update(suffix='cov')
msg = (f'Computing regularized covariance based on empty-room recording. '
f'Input: {raw_er_fname}, Output: {cov_fname}')
logger.info(gen_log_message(message=msg, step=11, subject=subject,
session=session))
raw_er = mne.io.read_raw_fif(raw_er_fname, preload=True)
cov = mne.compute_raw_covariance(raw_er, method='shrunk', rank='info')
cov.save(cov_fname)
@failsafe_run(on_error=on_error)
def run_covariance(subject, session=None):
if config.noise_cov == 'emptyroom' and 'eeg' not in config.ch_types:
compute_cov_from_empty_room(subject=subject, session=session)
else:
tmin, tmax = config.noise_cov
compute_cov_from_epochs(subject=subject, session=session, tmin=tmin,
tmax=tmax)
def main():
"""Run cov."""
msg = 'Running Step 11: Estimate noise covariance'
logger.info(gen_log_message(step=11, message=msg))
if not config.run_source_estimation:
msg = ' … skipping: run_source_estimation is set to False.'
logger.info(gen_log_message(step=11, message=msg))
return
parallel, run_func, _ = parallel_func(run_covariance, n_jobs=config.N_JOBS)
parallel(run_func(subject, session) for subject, session in
itertools.product(config.get_subjects(), config.get_sessions()))
msg = 'Completed Step 11: Estimate noise covariance'
logger.info(gen_log_message(step=11, message=msg))
if __name__ == '__main__':
main()
|
from sounding_selection.point import Point
class Vertex(Point):
""" A Vertex is an extension of Class Point and takes (x,y) attributes plus an elevation."""
def __init__(self, x, y, z):
Point.__init__(self, x, y)
self.__field_values = [z] # Default: vertex has one field value and can be extended
def get_z(self):
return self.__field_values[0]
def set_z(self, z):
self.__field_values[0] = z
def get_c(self, pos):
if pos in (0, 1):
return super().get_c(pos)
else:
try:
return self.__field_values[pos]
except IndexError as e:
raise e
def set_c(self, pos, c):
if pos in (0, 1):
super().set_c(pos, c)
else:
try:
self.__field_values[pos] = c
except IndexError as e:
# raise e
# instead of raising an exception we append the field value to the end of the array
self.__field_values.append(c)
def get_fields_num(self):
return len(self.__field_values)
def __str__(self):
return "%s,%s,%s" % (self.get_x(), self.get_y(), self.get_z())
|
from time import sleep
import picamera
import picamera.array
import numpy as np
from camera_base import Camera
from shared import get_image_publisher
class CameraPicamera(Camera):
def __init__(self, cap_fps: int = 10, resolution=(2592, 1944), run=True, **kwargs) -> None:
super().__init__(**kwargs)
self.fps = cap_fps
self.res = resolution
self.buffer = None
self.camera = picamera.PiCamera()
self.configure(self.camera, self.res, self.fps)
self.output = picamera.array.PiRGBArray(self.camera)
if run:
self.start()
@staticmethod
def configure(camera, res, fps):
camera.resolution = res
camera.framerate = fps
# camera.sharpness = 0
# camera.contrast = 0
# camera.brightness = 50
# camera.saturation = 0
# camera.ISO = 0
# camera.video_stabilization = False
# camera.exposure_compensation = 0
# camera.exposure_mode = 'auto'
# camera.meter_mode = 'average'
# camera.awb_mode = 'auto'
# camera.image_effect = 'none'
# camera.color_effects = None
# camera.rotation = 0
camera.hflip = True
camera.vflip = True
# camera.crop = (0.0, 0.0, 1.0, 1.0)
#
# for i in range(10):
# if camera.analog_gain > 1:
# break
# sleep(0.1)
#
# # Now fix the values
# camera.shutter_speed = camera.exposure_speed # 10**6//90#
# camera.exposure_mode = 'off'
# g = camera.awb_gains
# camera.awb_mode = 'off'
# camera.awb_gains = g
def step(self):
self.camera.capture(self.output, 'rgb')
if self.buffer is None:
self.buffer = get_image_publisher("shm://camera", self.output.array.shape, np.uint8)
print(self.output.array.shape)
self.buffer[:] = self.output.array
self.output.truncate(0)
if __name__ == '__main__':
camera_worker = CameraPicamera()
|
import imageio
from txt_img_aug import TxtImgAug
from pathlib import Path
import sys
imagePath = "testimage.png"
seed = None
padding = "WHITE"
resize_height = None
resize_width = None
for arg in sys.argv:
name_val = arg.split("=")
if name_val[0].strip() == "image":
imagePath = name_val[1].strip()
elif name_val[0].strip() == "seed":
seed = int(name_val[1])
elif name_val[0].strip() == "padding":
padding = name_val[1].strip() if name_val[1].strip() == "BLACK" else "WHITE"
elif name_val[0].strip() =="height":
resize_height = int(name_val[1])
elif name_val[0].strip() == "width":
resize_width = int(name_val[1])
image = imageio.imread(imagePath)
print("using image: {}".format(imagePath))
print("padding colour: {}".format(padding))
print("resize to height: {}".format(resize_height))
print("resize to width: {}".format(resize_width))
tia = TxtImgAug(seed)
outputPath = "output/page_deformation"
Path(outputPath).mkdir(parents=True, exist_ok=True)
for no in range(0,50):
deformed = tia.elastic_defromation(image)
deformed = tia.scale(deformed, resize_width, resize_height)
deformed = tia.pad(deformed, resize_width, resize_height, padding)
imageio.imwrite("{}/testimage{}.png".format(outputPath, no), deformed)
|
import itertools
import argparse
import random
def sample_data(input_file, output_file, num_tokens, seed):
"""Sample lines from input_file until the number of tokens
exceeds num_tokens or the file is exhausted.
Save sampled lines to output_file.
Args:
input_file (str): input file path
output_file (str): output file path
num_tokens (int): threshold on number of tokens
"""
random.seed(seed)
with open(input_file) as f_in:
lines = f_in.readlines()
N = len(lines)
idxs = random.sample(range(N), N)
tokens = 0
with open(output_file, "w") as f_out:
for idx in idxs:
line = lines[idx]
tokens += len(line.split())
f_out.write(line)
if tokens > num_tokens:
break
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--input_prefix", default="wiki40b-txt")
parser.add_argument("--output_prefix", default="wiki40b-txt-sampled")
parser.add_argument("--num_train_tokens", type=int, default=20_000_000)
parser.add_argument("--num_test_tokens", type=int, default=1_000_000)
parser.add_argument("--lang_code_list")
parser.add_argument("--ext_list", default="train,test,valid")
parser.add_argument("--seed", type=int, default=1)
args = parser.parse_args()
lang_code_list = args.lang_code_list.split(",")
ext_list = args.ext_list.split(",")
for lang_code, ext in itertools.product(lang_code_list, ext_list):
input_file = f"{args.input_prefix}/{lang_code}.{ext}"
output_file = f"{args.output_prefix}/{lang_code}.{ext}"
if ext == "train":
sample_data(input_file, output_file, args.num_train_tokens, args.seed)
else:
sample_data(input_file, output_file, args.num_test_tokens, args.seed)
|
/ const isPP = (n) => {
// let res = []
// for(let i = 1 ; i <= n ; i++){
// for(let j = 2 ; j < n ; j ++){
// if((i ** j) === n){
// res.push(i);
// res.push(j);
// }
// }
// }
// if(res.length > 0){
// return res
// }
// return null;
// }
// console.log(isPP(5))
const isPP = (n) => {
let i = 2;
let j = 2;
let max = Math.floor(n / 2);
let res = Math.pow(i, j);
while(i <= max){
let j = 2;
while(Math.pow(i, j) <= n){
if(n === Math.pow(i, j)){
return [i, j]
} else {
j += 1;
}
i += 1;
}
}
return null
}
console.log(isPP(5))
|
from flask_restful import Resource
from tools.utilitys import openFile
class Single(Resource):
def get(self):
single = openFile("single-podcast")
return {"singlePodcast": single}
class SingleByTingkat(Resource):
def get(self, tingkat1):
single = openFile("single-podcast")
return {f"{tingkat1}": single[tingkat1]}
class SingleByTwoTingkat(Resource):
def get(self, tingkat1, tingkat2):
single = openFile("single-podcast")
return {f"{tingkat2}": single[tingkat1][0][tingkat2]}
|
n1 = float(input('Digite o primeiro número: '))
n2 = float(input('Digite o segundo número: '))
n3 = float(input('Digite o terceiro número: '))
# nl = [n1, n2, n3]
# print(f'O maior número é \033[1;32m{max(nl)}\033[m \nE o menor é \033[31;1m{min(nl)}\033[m')
maior = n2
if n1 > n2 and n1 > n3:
maior = n1
if n3 > n2 and n3 > n1:
maior = n3
menor = n1
if n2 < n1 and n2 < n3:
menor = n2
if n3 < n1 and n3 < n2:
menor = n3
print(f'O maior número é \033[32;1m{maior}\033[m \nE o menor é \033[31;1m{menor}')
|
from __future__ import absolute_import, unicode_literals
import tempfile
import uuid
from django.test import override_settings
from django.urls import reverse
from django.utils.translation import ugettext as _
from PIL import Image
from rest_framework import status
from rest_framework.test import APITestCase
from brouwers.users.tests.factories import UserFactory
from ...models import Boxart, ModelKit
from ..factories import BoxartFactory, BrandFactory, ScaleFactory
@override_settings(MEDIA_ROOT=tempfile.mkdtemp())
class BoxartTests(APITestCase):
def test_upload_endpoint(self):
user = UserFactory.create()
self.client.force_login(user, backend='django.contrib.auth.backends.ModelBackend')
url = reverse('api:boxart-list')
# create an image
image = Image.new('RGB', (10, 10), 'green')
tmp_file = tempfile.NamedTemporaryFile(suffix='.jpg')
image.save(tmp_file, format='JPEG')
with open(tmp_file.name, 'rb') as image:
response = self.client.post(url, {'image': image}, format='multipart')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
boxart = Boxart.objects.get()
self.assertEqual(response.data, {
'uuid': boxart.uuid,
'image': 'http://testserver{}'.format(boxart.image.url),
'success': True,
})
|
# Retipy - Retinal Image Processing on Python
# Copyright (C) 2017 Alejandro Valdes
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""tests for landmarks endpoint module"""
import json
import sys
from retipy.retina import Retina
from retipyserver import app
from unittest import TestCase
class TestLandmarksEndpoint(TestCase):
_resources = 'retipy/resources/images/'
_image_file_name = 'img02.png'
_image_path = _resources + _image_file_name
def setUp(self):
self.image = Retina(None, self._image_path).original_base64
self.app = app.test_client()
def test_classification_no_success(self):
response = self.app.post("/retipy/landmarks/classification")
self.assertEqual(json.loads(response.get_data().decode(sys.getdefaultencoding())), {'success': False})
|
import simplejson
import os
from github import Github, GithubException
from IPython.display import display, HTML
class ActivitySet(object):
def __init__(self, user, repo, activitySet_id, cname=None, prefLabel="", altLabel="",
description="", preamble="", scoringLogic={},
allow=['skipped', 'dontKnow'], shuffle=False):
self.user = user
self.repo = repo
self.cname = cname
g = Github(os.environ['GH_TOKEN'])
gh_repo = g.get_repo("{user}/{repo}".format(user=self.user, repo=self.repo))
self.gh_repo = gh_repo
self.data = {
"@context":["https://www.repronim.org/schema-standardization/contexts/generic.jsonld"],
"@type": "https://www.repronim.org/schema-standardization/schemas/ActivitySet.jsonld",
"schema:schemaVersion": "0.0.1",
"schema:version": "0.0.1",
}
self.data['@id'] = activitySet_id
self.data["skos:prefLabel"] = prefLabel
self.data["skos:altLabel"] = altLabel
self.data["schema:description"] = description
self.data['variableMap'] = []
self.data["ui"] = dict(shuffle=shuffle, allow=allow, order=[], visibility={}, activity_display_name={})
self.extra_context = {
'@context': {
"@version": 1.1,
}
}
self.previewURL = None
self.mindloggerURL = None
def addImage(self):
raise NotImplementedError("""
TODO: point to an image on the computer and push it to the github repo.
then take the URL and add it to the "schema:image" property of self.data
""")
def addAbout(self):
raise NotImplementedError("""
TODO: point to a markdown file on the computer and push it to the github repo
then take the URL and add it to the "schema:about" property of self.data
""")
def toJSON(self):
return simplejson.dumps(self.data)
def addActivity(self, activity, displayName, visibility=True):
# TODO: make sure item is of type Activity
activity_url = activity.postActivity()
# TODO: make sure the item isn't already in the variableMap
self.data['variableMap'].append({
"variableName": activity.data['@id'],
"isAbout": activity.data['@id'],
})
self.data['ui']['visibility'][activity.data['@id']] = visibility
self.data['ui']['activity_display_name'][activity.data['@id']] = displayName
# TODO: make sure the item isn't already in the context
self.extra_context['@context'][activity.data['@id']] = {
'@id': activity_url,
'@type': '@id',
}
# TODO: make sure the item isn't already in the list
self.data['ui']['order'].append(activity.data['@id'])
def postActivitySetContext(self):
"""
"""
fid = self.data['@id']
try:
self.gh_repo.create_file("/activitySets/{}/{}_context.jsonld".format(fid, fid),
"updated {}/{}_context".format(fid, fid),
simplejson.dumps(self.extra_context),
branch="master")
except GithubException:
filen = self.gh_repo.get_file_contents("/activitySets/{}/{}_context.jsonld".format(fid, fid))
self.gh_repo.update_file("/activitySets/{}/{}_context.jsonld".format(fid, fid),
"updated {}/{}_context".format(fid, fid), simplejson.dumps(self.extra_context),
filen.sha)
if not self.cname:
url = "https://{user}.github.io/{repo}/activitySets/{fid}/{fid}_context.jsonld".format(user=self.user,
repo=self.repo,
fid=fid)
else:
url = "https://{cname}/{repo}/activitySets/{fid}/{fid}_context.jsonld".format(
cname=self.cname,
repo=self.repo,
fid=fid)
return url
def postActivitySet(self):
# 1. post the extra context
context_url = self.postActivitySetContext()
# 2. update self.data.context with URL
self.data['@context'].append(context_url)
# 3. post self.data into the activities folder
fid = self.data['@id']
try:
self.gh_repo.create_file("/activitySets/{}/{}_schema.jsonld".format(fid, fid),
"updated {}/{}".format(fid, fid), self.toJSON(),
branch="master")
except GithubException:
filen = self.gh_repo.get_file_contents("/activitySets/{}/{}_schema.jsonld".format(fid,fid))
self.gh_repo.update_file("/activitySets/{}/{}_schema.jsonld".format(fid, fid),
"updated {}/{}".format(fid,fid), self.toJSON(),
filen.sha)
if not self.cname:
url = "https://{user}.github.io/{repo}/activitySets/{fid}/{fid}_schema.jsonld".format(user=self.user,
repo=self.repo,
fid=fid)
else:
url = "https://{cname}/{repo}/activitySets/{fid}/{fid}_schema.jsonld".format(cname=self.cname,
repo=self.repo,
fid=fid)
self.previewURL = "https://schema-ui.anisha.pizza/#/activities/0?url={}".format(url)
self.mindloggerURL = "https://web.mindlogger.org/#/?inviteURL={}".format(url)
return url
def preview(self):
return HTML("""
<p style="margin-bottom: 2em;">
Preview your activity set at <a target="_blank" href="{url}">{url}</a>
</p>
<p>
<b>Turn OFF your browser cache if things aren't updating</b>
</p>
""".format(url=self.previewURL))
def mindlogger(self):
return HTML("""
<p style="margin-bottom: 2em;">
Invite yourself to your applet on Mindlogger at <a target="_blank" href="{url}">{url}</a>
</p>
<p>
<b>Turn OFF your browser cache if things aren't updating</b>
</p>
""".format(url=self.mindloggerURL))
|
import logging
import os
import shutil
import subprocess
import sys
import textwrap
from pathlib import Path
from typing import Dict, List, Optional, Sequence, Tuple, Union
from pipx.animate import show_cursor
from pipx.constants import WINDOWS
logger = logging.getLogger(__name__)
class PipxError(Exception):
def __init__(self, message: str, wrap_message: bool = True):
if wrap_message:
super().__init__(pipx_wrap(message))
else:
super().__init__(message)
def rmdir(path: Path) -> None:
logger.info(f"removing directory {path}")
try:
if WINDOWS:
os.system(f'rmdir /S /Q "{str(path)}"')
else:
shutil.rmtree(path)
except FileNotFoundError:
pass
def mkdir(path: Path) -> None:
if path.is_dir():
return
logger.info(f"creating directory {path}")
path.mkdir(parents=True, exist_ok=True)
def get_pypackage_bin_path(binary_name: str) -> Path:
return (
Path("__pypackages__")
/ (str(sys.version_info.major) + "." + str(sys.version_info.minor))
/ "lib"
/ "bin"
/ binary_name
)
def run_pypackage_bin(bin_path: Path, args: List[str]) -> None:
def _get_env() -> Dict[str, str]:
env = dict(os.environ)
env["PYTHONPATH"] = os.path.pathsep.join(
[".", str(bin_path.parent.parent)]
+ os.getenv("PYTHONPATH", "").split(os.path.pathsep)
)
return env
exec_app([str(bin_path.resolve())] + args, env=_get_env())
if WINDOWS:
def get_venv_paths(root: Path) -> Tuple[Path, Path]:
bin_path = root / "Scripts"
python_path = bin_path / "python.exe"
return bin_path, python_path
else:
def get_venv_paths(root: Path) -> Tuple[Path, Path]:
bin_path = root / "bin"
python_path = bin_path / "python"
return bin_path, python_path
def get_site_packages(python: Path) -> Path:
output = run_subprocess(
[python, "-c", "import sysconfig; print(sysconfig.get_path('purelib'))"],
capture_stderr=False,
).stdout
path = Path(output.strip())
path.mkdir(parents=True, exist_ok=True)
return path
def _fix_subprocess_env(env: Dict[str, str]) -> Dict[str, str]:
# Remove PYTHONPATH because some platforms (macOS with Homebrew) add pipx
# directories to it, and can make it appear to venvs as though pipx
# dependencies are in the venv path (#233)
# Remove __PYVENV_LAUNCHER__ because it can cause the wrong python binary
# to be used (#334)
env_blocklist = ["PYTHONPATH", "__PYVENV_LAUNCHER__"]
for env_to_remove in env_blocklist:
env.pop(env_to_remove, None)
env["PIP_DISABLE_PIP_VERSION_CHECK"] = "1"
# Make sure that Python writes output in UTF-8
env["PYTHONIOENCODING"] = "utf-8"
# Make sure we install package to venv, not userbase dir
env["PIP_USER"] = "0"
return env
def run_subprocess(
cmd: Sequence[Union[str, Path]],
capture_stdout: bool = True,
capture_stderr: bool = True,
log_cmd_str: Optional[str] = None,
) -> subprocess.CompletedProcess:
"""Run arbitrary command as subprocess, capturing stderr and stout"""
env = dict(os.environ)
env = _fix_subprocess_env(env)
if log_cmd_str is None:
log_cmd_str = " ".join(str(c) for c in cmd)
logger.info(f"running {log_cmd_str}")
# windows cannot take Path objects, only strings
cmd_str_list = [str(c) for c in cmd]
completed_process = subprocess.run(
cmd_str_list,
env=env,
stdout=subprocess.PIPE if capture_stdout else None,
stderr=subprocess.PIPE if capture_stderr else None,
encoding="utf-8",
universal_newlines=True,
)
if capture_stdout:
logger.debug(f"stdout: {completed_process.stdout}".rstrip())
if capture_stderr:
logger.debug(f"stderr: {completed_process.stderr}".rstrip())
logger.debug(f"returncode: {completed_process.returncode}")
return completed_process
def subprocess_post_check(
completed_process: subprocess.CompletedProcess, raise_error: bool = True
) -> None:
if completed_process.returncode:
if completed_process.stdout is not None:
print(completed_process.stdout, file=sys.stdout, end="")
if completed_process.stderr is not None:
print(completed_process.stderr, file=sys.stderr, end="")
if raise_error:
raise PipxError(
f"{' '.join([str(x) for x in completed_process.args])!r} failed"
)
else:
logger.info(f"{' '.join(completed_process.args)!r} failed")
def exec_app(cmd: Sequence[Union[str, Path]], env: Dict[str, str] = None) -> None:
"""Run command, do not return
POSIX: replace current processs with command using os.exec*()
Windows: Use subprocess and sys.exit() to run command
"""
if env is None:
env = dict(os.environ)
env = _fix_subprocess_env(env)
# make sure we show cursor again before handing over control
show_cursor()
sys.stderr.flush()
logger.info("exec_app: " + " ".join([str(c) for c in cmd]))
if WINDOWS:
sys.exit(
subprocess.run(
cmd,
env=env,
stdout=None,
stderr=None,
encoding="utf-8",
universal_newlines=True,
).returncode
)
else:
os.execvpe(str(cmd[0]), [str(x) for x in cmd], env)
def full_package_description(package: str, package_spec: str) -> str:
if package == package_spec:
return package
else:
return f"{package} from spec {package_spec!r}"
def pipx_wrap(
text: str, subsequent_indent: str = "", keep_newlines: bool = False
) -> str:
"""Dedent, strip, wrap to shell width. Don't break on hyphens, only spaces"""
minimum_width = 40
width = max(shutil.get_terminal_size((80, 40)).columns, minimum_width) - 2
text = textwrap.dedent(text).strip()
if keep_newlines:
return "\n".join(
[
textwrap.fill(
line,
width=width,
subsequent_indent=subsequent_indent,
break_on_hyphens=False,
)
for line in text.splitlines()
]
)
else:
return textwrap.fill(
text,
width=width,
subsequent_indent=subsequent_indent,
break_on_hyphens=False,
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.