text
stringlengths 8
6.05M
|
|---|
#using functins to clean data
#complex cleaning
# Extract number from string
# perform transformation on extracted number
# 1 .apply(), df.apply(np.mean, axis=0), axis=0 perform operations columns wise, axis=1 performs opr. row wise
import pandas as pd
tips = pd.read_csv('E:\csvdhf5xlsxurlallfiles/tips.csv')
print(tips)
print(tips['sex'])
print(tips.columns)
print(tips.iloc[230])
import matplotlib.pyplot as plt
plt.plot(tips['total_bill'], color='yellow')
plt.hist(tips['total_bill'], bins = 20, color='green')
plt.show()
|
from flask import Flask, request, jsonify, json, render_template
from fetch_data import extract_data
import pandas as pd
import sys
app = Flask(__name__)
@app.route('/')
def home():
return render_template("index.html")
@app.route('/commodity')
def commodity():
start_date = request.args["start_date"] if "start_date" in request.args else None
end_date = request.args["end_date"] if "end_date" in request.args else None
commodity_type = request.args["commodity_type"] if "commodity_type" in request.args else None
if commodity_type == None or start_date == None or end_date == None:
return "You must provide commodity_type, start_date and end_date params", 422
commodity = pd.DataFrame()
if commodity_type == "gold":
commodity = extract_data("gold")
elif commodity_type == "silver":
commodity = extract_data("silver")
converted_date = pd.to_datetime(commodity['Date']).dt.date.astype(str)
mask = (converted_date >= start_date) & (converted_date <= end_date)
commodity = commodity.loc[mask]
# set index column as Date, for formatting requirements of the exercise
commodity = commodity.set_index("Date")
data = json.loads(commodity.to_json())["Price"]
return jsonify(
data=data,
mean=commodity.mean()["Price"],
variance=commodity.var()["Price"])
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0', port="8080")
|
"""
Given a number n, write a program to find the sum of the largest prime factors of each of nine consecutive numbers starting from n.
g(n) = f(n) + f(n+1) + f(n+2) + f(n+3) + f(n+4) + f(n+5) + f(n+6) + f(n+7) + f(n+8)
where, g(n) is the sum and f(n) is the largest prime factor of n
For example,
g(10)=f(10)+f(11)+f(12)+f(13)+f(14)+f(15)+f(16)+f(17)+f(18)
=5 + 11 + 3 + 13 + 7 + 5 + 2 + 17 + 3
=66
"""
#PF-Assgn-42
def find_factors(num):
#Accepts a number and returns the list of all the factors of a given number
factors = []
for i in range(2,(num+1)):
if(num%i==0):
factors.append(i)
return factors
def is_prime(num, i):
#Accepts the number num and num/2 --> i and returns True if the number is prime ,else returns False
if(i==1):
return True
elif(num%i==0):
return False;
else:
return(is_prime(num,i-1))
def find_largest_prime_factor(list_of_factors):
#Accepts the list of factors and returns the largest prime factor
list_of_factors.sort(reverse=True)
for fact in list_of_factors:
if is_prime(fact,int(fact/2)):
return fact
return 1
def find_f(num):
#Accepts the number and returns the largest prime factor of the number
if is_prime(num,num/2):
return num
else:
l=find_factors(num)
return find_largest_prime_factor(l)
def find_g(num):
#Accepts the number and returns the sum of the largest prime factors of the 9 consecutive numbers starting from the given number
sum=0
for i in range(num,num+9):
sum=sum+find_f(i)
return sum
#Note: Invoke function(s) from other function(s), wherever applicable.
print(find_g(10))
|
import serial
import csv
import datetime
import time
arduino = serial.Serial('/dev/ttyACM1', 9600)
print("inicia recepción de datos serial")
i=0
while 1:
now = datetime.datetime.now()
if(arduino.in_waiting >0):
time.sleep(1)
line = str(arduino.readline())[2:-5]
if line == "1999":
print("recepcion")
print(i)
i+=1
temp = str(arduino.readline())[2:-5]
lvl_f = str(arduino.readline())[2:-5]
hum = str(arduino.readline())[2:-5]
luz = str(arduino.readline())[2:-5]
now = now.strftime("%M.%S")
with open('data.csv', 'a') as csvFile:
row=[]
writer = csv.writer(csvFile)
writer.writerow([temp,lvl_f,hum,luz,now])
|
#!/usr/bin/env python3.2
import ctypes
from ctypes.util import find_library
pcap = None
if(find_library("libpcap") == None):
pcap = ctypes.cdll.LoadLibrary("libpcap.so")
else:
pcap = ctypes.cdll.LoadLibrary(find_library("libpcap"))
# int pcap_compile_nopcap(int snaplen, int linktype, struct bpf_program *program,
# const char *buf, int optimize, bpf_u_int32 mask);
pcap_close = pcap.pcap_close
pcap_lookupdev = pcap.pcap_lookupdev
pcap_lookupdev.restype = ctypes.c_char_p
#pcap_lookupnet(dev, &net, &mask, errbuf)
pcap_lookupnet = pcap.pcap_lookupnet
#pcap_t *pcap_open_live(const char *device, int snaplen,int promisc, int to_ms,
#char *errbuf
pcap_open_live = pcap.pcap_open_live
#int pcap_compile(pcap_t *p, struct bpf_program *fp,const char *str, int optimize,
#bpf_u_int32 netmask)
pcap_compile = pcap.pcap_compile
#int pcap_setfilter(pcap_t *p, struct bpf_program *fp);
pcap_setfilter = pcap.pcap_setfilter
#const u_char *pcap_next(pcap_t *p, struct pcap_pkthdr *h);
pcap_next = pcap.pcap_next
# int pcap_compile_nopcap(int snaplen, int linktype, struct bpf_program *program,
# const char *buf, int optimize, bpf_u_int32 mask);
pcap_geterr = pcap.pcap_geterr
pcap_geterr.restype = ctypes.c_char_p
#int pcap_loop(pcap_t *p, int cnt, pcap_handler callback, u_char *user)
pcap_loop = pcap.pcap_loop
#int pcap_stats(pcap_t *, struct pcap_stat *);
pcap_stats = pcap.pcap_stats
#int pcap_set_buffer_size(pcap_t *, int);
pcap_set_buffer_size=pcap.pcap_set_buffer_size
pcap_set_promisc=pcap.pcap_set_promisc
#int pcap_set_timeout(pcap_t *, int);
pcap_set_timeout=pcap.pcap_set_timeout
#int pcap_next_ex(pcap_t *p, struct pcap_pkthdr **pkt_header,const u_char **pkt_data);
pcap_next_ex=pcap.pcap_next_ex
#pcap_next_ex.argtypes=[ctypes.POINTER(ctypes.c_char_p),ctypes.POINTER(ctypes.POINTER(ctypes.c_ubyte)),ctypes.POINTER(ctypes.POINTER(ctypes.c_ubyte))]
#const u_char *pcap_next(pcap_t *p, struct pcap_pkthdr *h);
pcap_next = pcap.pcap_next
# lets have some fun with files!
pcap_dump_file=pcap.pcap_dump_file
pcap_dump_file.restype = ctypes.c_void_p
pcap_open_offline=pcap.pcap_open_offline
pcap_open_offline.restype = ctypes.c_void_p
#pcap_t *pcap_fopen_offline(FILE *, char *);
pcap_fopen_offline=pcap.pcap_fopen_offline
pcap_fopen_offline.restype = ctypes.c_void_p
pcap_open_dead = pcap.pcap_open_dead
pcap_open_dead.restype = ctypes.c_void_p
pcap_dump_open = pcap.pcap_dump_open
pcap_dump_open.restype = ctypes.c_void_p
pcap_dump = pcap.pcap_dump
pcap_dump_close = pcap.pcap_dump_close
|
from django.urls import path
from django.views.decorators.csrf import csrf_exempt
from . import views
urlpatterns = [
path('autocomplete', csrf_exempt(views.complete_query), name='autocomplete search query'),
path('data', csrf_exempt(views.get_sku_data), name='get all information related to sku'),
]
|
import cv2
import imutils
import numpy as np
from sklearn.metrics import pairwise
bg = None
#-------------------------------------------------------------------------------
# Funcion - Para encontrar el promedio sobre el fondo
#-------------------------------------------------------------------------------
def run_avg(image, aWeight):
global bg
# inicializar el fondo
if bg is None:
bg = image.copy().astype("float")
return
# computar el promedio con pesos, acumularlo y actualizar el fondo
cv2.accumulateWeighted(image, bg, aWeight)
#-------------------------------------------------------------------------------
# Funcion - Para segmentar la región de la mano en la imagen
#-------------------------------------------------------------------------------
def segment(image, threshold=25):
global bg
# encontrar la diferencia absoluta entre en fondo y el frame actual
diff = cv2.absdiff(bg.astype("uint8"), image)
# Umbral de la imagen diff para que podamos obtener el primer plano
thresholded = cv2.threshold(diff,
threshold,
255,
cv2.THRESH_BINARY)[1]
# obtener los contornos en la imagen umbral
(_, cnts, _) = cv2.findContours(thresholded.copy(),
cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
# devuelve None, si no se detectan contornos
if len(cnts) == 0:
return
else:
# basado en el área de contorno, obtener el contorno máximo (que es la mano)
segmented = max(cnts, key=cv2.contourArea)
return (thresholded, segmented)
#-------------------------------------------------------------------------------
# Funcion - Para contar el número de dedos en la región segmentada de la mano
#-------------------------------------------------------------------------------
from sklearn.metrics import pairwise
def count(thresholded, segmented):
# encontrar el casco convexo de la región segmentada de la mano
chull = cv2.convexHull(segmented)
# encontrar los puntos más extremos dentro de este casco
extreme_top = tuple(chull[chull[:, :, 1].argmin()][0])
extreme_bottom = tuple(chull[chull[:, :, 1].argmax()][0])
extreme_left = tuple(chull[chull[:, :, 0].argmin()][0])
extreme_right = tuple(chull[chull[:, :, 0].argmax()][0])
# encontrar el centro de la palma
cX = (extreme_left[0] + extreme_right[0]) / 2
cY = (extreme_top[1] + extreme_bottom[1]) / 2
# encontrar la distancia máxima euclidiana entre el centro de la palma
# y los puntos más extremos del casco
distance = pairwise.euclidean_distances([(cX, cY)], Y=[extreme_left, extreme_right, extreme_top, extreme_bottom])[0]
maximum_distance = distance[distance.argmax()]
# calcular el radio del círculo con 80% de la distancia euclidiana máxima
radius = int(0.8 * maximum_distance)
# encontrar la circunferencia del círculo
circumference = (2 * np.pi * radius)
# sacar el ROI circular que tienen la palma y los dedos
circular_roi = np.zeros(thresholded.shape[:2], dtype="uint8")
# dibujar el ROI circular
cv2.circle(circular_roi, (int(cX), int(cY)), radius, 255, 1)
# tomar el bit-wise AND entre la mano con el umbral, utilizando el ROI circular cómo la máscara
# qué indica los cortes obtenidos usando la máscara en la mano con el umbral
circular_roi = cv2.bitwise_and(thresholded, thresholded, mask=circular_roi)
# computar los contornos en el ROI circular
(_, cnts, _) = cv2.findContours(circular_roi.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
# inicializar el conteo de los dedos
count = 0
# loopear entre los contornos encontrados
for c in cnts:
# computar el rectángulo unido del contorno
(x, y, w, h) = cv2.boundingRect(c)
# incrementar el número de dedos solo si:
# 1. La zona de contorno no es la muneca (el área de abajo)
# 2. El número de puntos en el contorno no excede el 25% de la circunferencia
if ((cY + (cY * 0.25)) > (y + h)) and ((circumference * 0.25) > c.shape[0]):
count += 1
return count
#-------------------------------------------------------------------------------
# Función Main
#-------------------------------------------------------------------------------
if __name__ == "__main__":
# inicializar el peso para running average
accumWeight = 0.5
# obtener la referencia get the reference to the webcam
camera = cv2.VideoCapture(0)
# Coordenadas de la región de interés (ROI)
top, right, bottom, left = 10, 350, 225, 590
# Inicializar el número de frames
num_frames = 0
# Indicador de calibracion
calibrated = False
# Seguir hasta ser interrumpido
while(True):
# obtener el frame actual
(grabbed, frame) = camera.read()
# redimensionar el frame
frame = imutils.resize(frame, width=700)
# Dar vuelta al frame para que no sea la vista espejo
frame = cv2.flip(frame, 1)
# clonar el frame
clone = frame.copy()
# obtener el alto y el ancho del frame
(height, width) = frame.shape[:2]
# obtener el ROI
roi = frame[top:bottom, right:left]
# convertir el ROI a escala de grises y blurrearlo
gray = cv2.cvtColor(roi, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (7, 7), 0)
# para obtener el fondo, seguir buscando hasta encontrar un threshold
# para calibrar nuestro modelo de Running Average
if num_frames < 30:
run_avg(gray, accumWeight)
if num_frames == 1:
print ("[STATUS] please wait! calibrating...")
elif num_frames == 29:
print ("[STATUS] calibration successfull...")
else:
# segmentar la región de la mano
hand = segment(gray)
# revisar si la región de la mano fue segmentada
if hand is not None:
# si lo fue, desempaquetar la imagen con umbral y
# la región segmentada
(thresholded, segmented) = hand
# dibujar la región segmentada y mostrar el frame
cv2.drawContours(clone, [segmented + (right, top)], -1, (0, 0, 255))
# contar el numero de dedos
fingers = count(thresholded, segmented)
cv2.putText(clone,str(fingers), (70, 45), cv2.FONT_HERSHEY_SIMPLEX, 1, (0,0,255), 2)
cv2.imshow("Thesholded", thresholded)
# dibujar la mano segmentada
cv2.rectangle(clone, (left, top), (right, bottom), (0,255,0), 2)
# incrementar el número de frames
num_frames += 1
# mostrar el frame con la mano segmentada
cv2.imshow("Video Feed", clone)
# observar si el usuario presiona alguna tecla
keypress = cv2.waitKey(1) & 0xFF
# si presiona la letra “q”, cortar el loop
if keypress == ord("q"):
break
# liberar memoria
camera.release()
cv2.destroyAllWindows()
|
"""Define tests, sanity checks, and evaluation"""
from .image_folder_dataset_tests import test_image_folder_dataset
from .transform_tests import (
test_rescale_transform,
test_compute_image_mean_and_std
)
from .dataloader_tests import test_dataloader
from .eval_utils import save_pickle
|
# Generated by Django 2.1.4 on 2019-07-26 02:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('career_test', '0003_auto_20190723_1505'),
]
operations = [
migrations.CreateModel(
name='CareerType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type_name', models.CharField(max_length=20, verbose_name='结果类型')),
('type_content', models.CharField(max_length=500, verbose_name='结果内容')),
],
),
migrations.AlterField(
model_name='mbtianwsertype',
name='choice',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='career_test.Choice'),
),
]
|
def findOffByOne(lines):
length = len(lines[0].strip())
for j in range(len(lines)):
x = lines[j]
for y in lines[j+1:]:
diffs = 0
index = -1
for i in range(length):
if x[i] != y[i]:
diffs += 1
if diffs > 1:
break
index = i
if diffs == 1:
ans = ''
for i in range(length):
if i != index:
ans += x[i]
return ans
return ''
f = open('input.txt', 'r')
lines = f.readlines()
f.close()
answer = findOffByOne(lines)
print(answer)
|
#!/usr/bin/env python
import webapp2
import jinja2
import os
from utilities import *
from google.appengine.api import users
import datetime
import base64
from objects.usermeta import UserMeta
from objects.player import Player
from objects.game import Game
from google.appengine.ext import db
import urllib
import json
jinja_environment = jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
jinja_environment.globals['logout']=users.create_logout_url('/')
jinja_environment.globals['page']='home'
class LoginHandler(webapp2.RequestHandler):
def get(self):
access_token=self.request.get('fbtoken')
if not access_token:
self.redirect("/")
return
profile=json.load(urllib.urlopen("https://graph.facebook.com/me?"+
urllib.urlencode(dict(access_token=access_token))))
id=profile["id"]
user=UserMeta.all().filter("fb_id =",id).get()
if user:
rand_string=base64.urlsafe_b64encode(os.urandom(32))
if not user.auth_verify:
user.auth_verify=[]
user.auth_verify.append(rand_string)
self.response.set_cookie("fb_user",id+":"+rand_string,expires=datetime.datetime.now()+datetime.timedelta(days=30));
user.access_token=access_token
save_user(user)
if not self.request.get('app'):
self.redirect("/")
else:
self.response.out.write(json.dumps({"auth":id+":"+rand_string}))
return
else:
if self.request.get('app'):
self.response.out.write("Error")
return
template_values={'first_name':profile["first_name"],'surname':profile["last_name"],'fb_id':id,'access_token':access_token}
template=jinja_environment.get_template('templates/signup.html')
self.response.out.write(template.render(template_values))
class LogoutHandler(webapp2.RequestHandler):
def get(self):
if users.get_current_user():
self.redirect(users.create_logout_url('/'))
else:
try:
auth_verify=self.request.cookies.get("fb_user").split(':',1)[1]
user=get_meta()
if user:
user.auth_verify.remove(auth_verify)
save_user(user)
except:
auth_verify=''
self.response.set_cookie("fb_user", "",expires=datetime.datetime.now())
self.redirect("/")
class SignupHandler(webapp2.RequestHandler):
def get(self):
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
user_meta = get_meta()
if user_meta:
self.redirect('/team/select')
template_values={'email':user.email()}
template=jinja_environment.get_template('templates/signup.html')
self.response.out.write(template.render(template_values))
def post(self):
if get_meta():
self.redirect("/team/select")
current_user = users.get_current_user()
fb_id=None
auth_verify=[]
if not current_user:
fb_id=self.request.get('fb_id')
if not fb_id:
self.redirect(users.create_login_url(self.request.uri))
else:
rand_string=base64.urlsafe_b64encode(os.urandom(32))
auth_verify.append(rand_string)
self.response.set_cookie("fb_user",fb_id+":"+rand_string,expires=datetime.datetime.now()+datetime.timedelta(days=30))
uid=None
if current_user:
uid=current_user.user_id()
user_meta = UserMeta(first_name=self.request.get('first_name'),
surname=self.request.get('surname'),
team_name=self.request.get('team_name'),
user_id=uid,fb_id=fb_id,access_token=self.request.get('access_token'),auth_verify=auth_verify)
save_user(user_meta)
self.redirect('/team/select')
class LandingHandler(webapp2.RequestHandler):
def get(self):
user_meta=get_meta()
if not user_meta:
if users.get_current_user():
return self.redirect("/signup")
template_values={'FACEBOOK_APP_ID':FACEBOOK_APP_ID,'login':users.create_login_url("/"),'dev_server':dev_server}
template=jinja_environment.get_template('templates/index.html')
self.response.out.write(template.render(template_values))
return
round=current_round()
last_round=round-1
if last_round < 0:
last_round=5
last_game=Game.all().filter('round =',last_round).get()
last_team=get_team(game=last_game)
round_score=0
if last_team:
round_score=last_team.total_score
template_values={'user_meta':user_meta,'round_score':round_score}
if check_mobile():
template=jinja_environment.get_template('templates/mobile/home.html')
else:
template=jinja_environment.get_template('templates/home.html')
self.response.out.write(template.render(template_values))
app = webapp2.WSGIApplication([('/signup',SignupHandler),('/', LandingHandler),('/login',LoginHandler),('/logout',LogoutHandler)],debug=True)
|
"""
User urls
"""
from django.conf import settings
from django.urls import path
from rest_framework.generics import RetrieveAPIView, ListAPIView, UpdateAPIView
from . import views
from .models import User
from .serializers import QuickUserSerializer, OwnProfileSerializer
from .views import OwnProfileView
urlpatterns = [
path(
'login',
views.Login.as_view(),
name="login"
),
path(
'logout',
views.Logout.as_view(),
name="logout"
),
path(
'create',
views.CreateUser.as_view(),
name="create_user"
),
path(
'item/dailychance',
views.DailyChance.as_view(),
name="set_dailyChance"
),
path(
'profile/<str:username>',
views.ProfileView.as_view(),
name="profile_view"
),
path(
'profile/edit/<str:username>',
OwnProfileView.as_view(),
name="profile_edit_view"
),
path('view/all', ListAPIView.as_view(
authentication_classes=settings.AUTH_CLASSES,
permission_classes=settings.PERM_CLASSES,
serializer_class=QuickUserSerializer,
queryset=User.objects.all(),
)),
path('view/<str:username>', RetrieveAPIView.as_view(
authentication_classes=settings.AUTH_CLASSES,
permission_classes=settings.PERM_CLASSES,
serializer_class=QuickUserSerializer,
lookup_field="username",
queryset=User.objects.all(),
)),
path('validate', views.ValidateUserData.as_view(), name="validate_username"),
path(
'activate/<uidb64>/<token>',
views.Activate.as_view(), name='activate'
),
path('search', views.UserSearch.as_view(), name="user_search"),
path('color/<str:username>', views.UserColorView.as_view(), name="user_colors"),
]
|
def get_score(arr):
lines_cleared = 0
four_liners = 1200
three_liners = 300
two_liners = 100
one_liners = 40
points = 0
for i in arr:
if i == 4:
points += four_liners
lines_cleared += i
if lines_cleared >= 10:
four_liners += 1200
three_liners += 300
two_liners += 100
one_liners += 40
lines_cleared = lines_cleared % 10
elif i == 3:
points += three_liners
lines_cleared += i
if lines_cleared >= 10:
four_liners += 1200
three_liners += 300
two_liners += 100
one_liners += 40
lines_cleared = lines_cleared % 10
elif i == 2:
points += two_liners
lines_cleared += i
if lines_cleared >= 10:
four_liners += 1200
three_liners += 300
two_liners += 100
one_liners += 40
lines_cleared = lines_cleared % 10
elif i == 1:
points += one_liners
lines_cleared += i
if lines_cleared >= 10:
four_liners += 1200
three_liners += 300
two_liners += 100
one_liners += 40
lines_cleared = lines_cleared % 10
return points
|
"""
Model implementation in PyNN by Vitor Chaud, Andrew Davison and Padraig Gleeson (August 2013).
This is a re-implementation of the models descirbed in the following references to reproduce Fig. 1 of Izhikevich (2004)
Original implementation references:
Izhikevich E.M. (2004) Which Model to Use for Cortical Spiking Neurons?
IEEE Transactions on Neural Networks, 15:1063-1070 (special issue on temporal coding)
Izhikevich E.M. (2003) Simple Model of Spiking Neurons.
IEEE Transactions on Neural Networks, 14:1569- 1572
http://www.izhikevich.org/publications/whichmod.htm
"""
#############################################
##
## VERSION 0.1 - Using PyNN 0.8
##
#############################################
from pyNN.random import RandomDistribution, NumpyRNG
from pyNN.utility import get_script_args, Timer, ProgressBar, init_logging, normalized_filename
import matplotlib.pyplot as plt
import numpy as np
simulator_name = get_script_args(1)[0]
exec("from pyNN.%s import *" % simulator_name)
print("\n")
print "Starting PyNN with simulator: %s"%simulator_name
timer = Timer()
globalTimeStep = 0.01
# v represents the membrane potential of the neuron
# u represents a membrane recovery variable
# Synaptic currents or injected dc-currents are delivered via the variable I.
# Dimensionless parameters
# The parameter a describes the time scale of the recovery variable u
# The parameter b describes the sensitivity of the recovery variable
# u to the subthreshold fluctuations of the membrane potential v.
# The parameter c describes the after-spike reset value of the membrane
# potential v caused by the fast high-threshold K+ conductances.
# The parameter d describes after-spike reset of the recovery variable
# u caused by slow high-threshold Na+ and K+ conductances.
#############################################
## Sub-plot A: Tonic spiking
#############################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.02
b = 0.2
c = -65.0
d = 6.0
I = 0
v_init = -70
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
run(10)
neuron.set(i_offset = 14)
run(90)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 1)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(A) Tonic spiking')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 10, 10, 100],[-90, -90,-80, -80]);
plt.show(block=False)
fig.canvas.draw()
#############################################
## Sub-plot B: Phasic spiking
#############################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.02
b = 0.25
c = -65.0
d = 6.0
I = 0
v_init = -64
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
run(20)
neuron.set(i_offset = 0.5)
run(180)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 2)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(B) Phasic spiking')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 20, 20, 200],[-90, -90,-80, -80]);
plt.show(block=False)
fig.canvas.draw()
#############################################
## Sub-plot C: Tonic bursting
#############################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.02
b = 0.2
c = -50.0
d = 2.0
I = 0
v_init = -70.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
run(22)
neuron.set(i_offset = 15.)
run(198)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 3)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(C) Tonic bursting')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 22, 22, 220],[-90, -90,-80, -80]);
plt.show(block=False)
fig.canvas.draw()
#############################################
## Sub-plot D: Phasic bursting
#############################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.02
b = 0.25
c = -55.0
d = 0.05
I = 0
v_init = -64.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
run(20)
neuron.set(i_offset = 0.6)
run(180)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 4)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(D) Phasic bursting')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 20, 20, 200],[-90, -90,-80, -80]);
plt.show(block=False)
fig.canvas.draw()
#############################################
## Sub-plot E: Mixed mode
#############################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.02
b = 0.2
c = -55.0
d = 4.0
I = 0
v_init = -70.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
run(16)
neuron.set(i_offset = 10.0)
run(160 - 16)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 5)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(E) Mixed mode')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 16, 16, 160],[-90, -90,-80, -80]);
plt.show(block=False)
fig.canvas.draw()
#######################################################
## Sub-plot F: Spike Frequency Adaptation (SFA)
#######################################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.01
b = 0.2
c = -65.0
d = 8.0
I = 0
v_init = -70.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
run(8.5)
neuron.set(i_offset = 30.0)
run(85 - 8.5)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 6)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(F) SFA')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 8.5, 8.5, 85],[-90, -90,-80, -80]);
plt.show(block=False)
fig.canvas.draw()
############################################
## Sub-plot G: Class 1 excitable
############################################
'''
Note eqn for this cell is:
V = V + tau*(0.04*V^2+4.1*V+108-u+I);
as opposed to
V = V + tau*(0.04*V^2+5*V+140-u+I);
in figure1.m
'''
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.02
b = 0.2
c = -65.0
d = 6.0
I = 0
v_init = -70.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
totalTimes = np.zeros(0)
totalAmps = np.zeros(0)
times = np.linspace(0.0, 30.0, int(1 + (30.0 - 0.0) / timeStep))
amps = np.linspace(0.0, 0.0, int(1 + (30.0 - 0.0) / timeStep))
totalTimes = np.append(totalTimes, times)
totalAmps = np.append(totalAmps, amps)
injectedCurrent = StepCurrentSource(times=times, amplitudes=amps)
injectedCurrent.inject_into(neuron)
times = np.linspace(30 + timeStep, 300, int((300 - 30) / timeStep))
amps = np.linspace(0.075 * timeStep, 0.075 * (300 - 30), int((300 - 30) / timeStep))
totalTimes = np.append(totalTimes, times)
totalAmps = np.append(totalAmps, amps)
injectedCurrent = StepCurrentSource(times=times, amplitudes=amps)
injectedCurrent.inject_into(neuron)
run(300)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 7)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
plt.xlim((0.0, 300.0))
plt.ylim((-95.0, 30.0))
ax1.set_title('(G) Class 1 excitable')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 30, 300, 300],[-90, -90, -70, -90])
plt.show(block=False)
fig.canvas.draw()
############################################
## Sub-plot H: Class 2 excitable
############################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.2
b = 0.26
c = -65.0
d = 0.0
I = -0.5
v_init = -64.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
totalTimes = np.zeros(0)
totalAmps = np.zeros(0)
times = np.linspace(0.0, 30.0, int(1 + (30.0 - 0.0) / timeStep))
amps = np.linspace(-0.5, -0.5, int(1 + (30.0 - 0.0) / timeStep))
totalTimes = np.append(totalTimes, times)
totalAmps = np.append(totalAmps, amps)
times = np.linspace(30 + timeStep, 300, int((300 - 30) / timeStep))
amps = np.linspace(-0.5 + 0.015 * timeStep, -0.5 + 0.015 * (300 - 30), int((300 - 30) / timeStep))
totalTimes = np.append(totalTimes, times)
totalAmps = np.append(totalAmps, amps)
injectedCurrent = StepCurrentSource(times=totalTimes, amplitudes=totalAmps)
injectedCurrent.inject_into(neuron)
run(300)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 8)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
plt.xlim((0.0, 300.0))
plt.ylim((-95.0, 30.0))
ax1.set_title('(H) Class 2 excitable')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 30, 300, 300],[-90, -90,-70, -90]);
plt.show(block=False)
fig.canvas.draw()
#########################################
## Sub-plot I: Spike latency
#########################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.02
b = 0.2
c = -65.0
d = 6.0
I = 0
v_init = -70.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
run(10)
# neuron.set(i_offset = 7.04)
neuron.set(i_offset = 6.71)
run(3)
neuron.set(i_offset = 0.0)
run(100 - 13)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 9)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(I) Spike latency')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 10, 10, 13, 13, 100],[-90, -90, -80, -80, -90, -90]);
plt.show(block=False)
fig.canvas.draw()
#################################################
## Sub-plot J: Subthreshold oscillation
#################################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.05
b = 0.26
c = -60.0
d = 0.0
I = 0
v_init = -62.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
run(20)
neuron.set(i_offset = 2.0)
run(5)
neuron.set(i_offset = 0.0)
run(200 - 25)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 10)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(J) Subthreshold oscillation')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 20, 20, 25, 25, 200],[-90, -90, -80, -80, -90, -90]);
plt.show(block=False)
fig.canvas.draw()
####################################
## Sub-plot K: Resonator
####################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.1
b = 0.26
c = -60.0
d = -1.0
I = 0
v_init = -62.0
u_init = b * v_init
T1=400/10;
T2=T1+20;
T3 = 0.7*400;
T4 = T3+40;
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
simTime = T1
run(simTime)
simulatedTime = simTime
neuron.set(i_offset = 0.65)
simTime = 4
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.0)
simTime = T2 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.65)
simTime = 4
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.0)
simTime = T3 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.65)
simTime = 4
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.0)
simTime = T4 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.65)
simTime = 4
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.0)
simTime = 400 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 11)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(K) Resonator')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, T1, T1, (T1+8), (T1+8), T2, T2, (T2+8), (T2+8), T3, T3, (T3+8), (T3+8), T4, T4, (T4+8), (T4+8), 400], [-90, -90, -80, -80, -90, -90, -80, -80, -90, -90, -80, -80, -90, -90, -80, -80, -90, -90]);
plt.show(block=False)
fig.canvas.draw()
####################################
## Sub-plot L: Integrator
####################################
'''
Note eqn for this cell is:
V = V + tau*(0.04*V^2+4.1*V+108-u+I);
as opposed to
V = V + tau*(0.04*V^2+5*V+140-u+I);
in figure1.m
'''
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.02
b = -0.1
c = -55.0
d = 6.0
I = 0
v_init = -60.0
u_init = b * v_init
T1=100/11;
T2=T1+5;
T3 = 0.7*100;
T4 = T3+10;
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
simTime = T1
run(simTime)
simulatedTime = simTime
neuron.set(i_offset = 9)
simTime = 2
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.0)
simTime = T2 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 9)
simTime = 2
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.0)
simTime = T3 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 9)
simTime = 2
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.0)
simTime = T4 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 9)
simTime = 2
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.0)
simTime = 100 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 12)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(L) Integrator')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, T1, T1, (T1+2), (T1+2), T2, T2, (T2+2), (T2+2), T3, T3, (T3+2), (T3+2), T4, T4, (T4+2), (T4+2), 100], [-90, -90, -80, -80, -90, -90, -80, -80, -90, -90, -80, -80, -90, -90, -80, -80, -90, -90]);
plt.show(block=False)
fig.canvas.draw()
######################################
## Sub-plot M: Rebound spike
######################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.03
b = 0.25
c = -60.0
d = 4.0
I = 0
v_init = -64.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
run(20)
neuron.set(i_offset = -15.0)
run(5)
neuron.set(i_offset = 0.0)
run(200 - 25)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 13)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(M) Rebound spike')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 20, 20, 25, 25, 200],[-85, -85, -90, -90, -85, -85]);
plt.show(block=False)
fig.canvas.draw()
######################################
## Sub-plot N: Rebound burst
######################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.03
b = 0.25
c = -52.0
d = 0.0
I = 0
v_init = -64.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
run(20)
neuron.set(i_offset = -15.0)
run(5)
neuron.set(i_offset = 0.0)
run(200 - 25)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 14)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(N) Rebound burst')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 20, 20, 25, 25, 200],[-85, -85, -90, -90, -85, -85]);
plt.show(block=False)
fig.canvas.draw()
###############################################
## Sub-plot O: Threshold variability
###############################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.03
b = 0.25
c = -60.0
d = 4.0
I = 0
v_init = -64.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
simTime = 10
run(simTime)
simulatedTime = simTime
neuron.set(i_offset = 1.0)
simTime = 15 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.0)
simTime = 70 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = -6.0)
simTime = 75 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.0)
simTime = 80 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 1.0)
simTime = 85 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.0)
simTime = 100 - simulatedTime
run(simTime)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 15)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(O) Threshold variability')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 10, 10, 15, 15, 70, 70, 75, 75, 80, 80, 85, 85, 100],[-85, -85, -80 , -80 , -85 , -85, -90, -90, -85, -85, -80 , -80 , -85, -85]);
plt.show(block=False)
fig.canvas.draw()
######################################
## Sub-plot P: Bistability
######################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.1
b = 0.26
c = -60.0
d = 0.0
I = 0.24
v_init = -61.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
simTime = 300.0/8
run(simTime)
simulatedTime = simTime
neuron.set(i_offset = 1.24)
simTime = 5
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.24)
# simTime = 216 - simulatedTime
simTime = 208 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 1.24)
simTime = 5
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.24)
simTime = 300 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 16)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(P) Bistability')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 300.0/8, 300.0/8, (300.0/8 + 5), (300.0/8 + 5), 216, 216, 221, 221, 300],[-90, -90, -80, -80, -90, -90, -80, -80, -90, -90]);
plt.show(block=False)
fig.canvas.draw()
#####################################################
## Sub-plot Q: Depolarizing after-potential
#####################################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 1.0
b = 0.18
c = -60.0
d = -21.0
I = 0.0
v_init = -70.0
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
simTime = 9
run(simTime)
simulatedTime = simTime
neuron.set(i_offset = 20.0)
simTime = 2
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 0.0)
simTime = 50 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 17)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(Q) DAP')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 9, 9, 11, 11, 50],[-90, -90, -80, -80, -90, -90]);
plt.show(block=False)
fig.canvas.draw()
#####################################################
## Sub-plot R: Accomodation
#####################################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = 0.02
b = 1.0
c = -55.0
d = 4.0
I = 0.0
v_init = -65.0
u_init = -16.0
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
totalTimes = np.zeros(0)
totalAmps = np.zeros(0)
times = np.linspace(0.0, 200.0, int(1 + (200.0 - 0.0) / timeStep))
amps = np.linspace(0.0, 8.0, int(1 + (200.0 - 0.0) / timeStep))
totalTimes = np.append(totalTimes, times)
totalAmps = np.append(totalAmps, amps)
times = np.linspace(200 + timeStep, 300, int((300 - 200) / timeStep))
amps = np.linspace(0.0, 0.0, int((300 - 200) / timeStep))
totalTimes = np.append(totalTimes, times)
totalAmps = np.append(totalAmps, amps)
times = np.linspace(300 + timeStep, 312.5, int((312.5 - 300) / timeStep))
amps = np.linspace(0.0, 4.0, int((312.5 - 300) / timeStep))
totalTimes = np.append(totalTimes, times)
totalAmps = np.append(totalAmps, amps)
times = np.linspace(312.5 + timeStep, 400, int((400 - 312.5) / timeStep))
amps = np.linspace(0.0, 0.0, int((400 - 312.5) / timeStep))
totalTimes = np.append(totalTimes, times)
totalAmps = np.append(totalAmps, amps)
injectedCurrent = StepCurrentSource(times=totalTimes, amplitudes=totalAmps)
injectedCurrent.inject_into(neuron)
run(400.0)
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 18)
#plt.xlabel("Time (ms)")
#plt.ylabel("Vm (mV)")
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
plt.xlim((0.0, 400.0))
plt.ylim((-95.0, 30.0))
ax1.set_title('(R) Accomodation')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, totalTimes,1.5 * totalAmps - 90);
plt.show(block=False)
fig.canvas.draw()
#####################################################
## Sub-plot S: Inhibition-induced spiking
#####################################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
a = -0.02
b = -1.0
c = -60.0
d = 8.0
I = 80.0
v_init = -63.8
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
simTime = 50
run(simTime)
simulatedTime = simTime
neuron.set(i_offset = 75.0)
simTime = 220 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 80.0)
simTime = 350 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 19)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(S) Inhibition-induced spiking')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 50, 50, 250, 250, 350],[-80, -80, -90, -90, -80, -80]);
plt.show(block=False)
fig.canvas.draw()
#####################################################
## Sub-plot T: Inhibition-induced bursting
#####################################################
timeStep = globalTimeStep
setup(timestep=timeStep, min_delay=0.5)
'''
Modifying parameter d from -2.0 to -0.7 in order to reproduce Fig. 1
'''
a = -0.026
b = -1.0
c = -45.0
d = -0.7
I = 80.0
v_init = -63.8
u_init = b * v_init
neuronParameters = {
'a': a,
'b': b,
'c': c,
'd': d,
'i_offset': I
}
initialValues = {'u': u_init, 'v': v_init}
cell_type = Izhikevich(**neuronParameters)
neuron = create(cell_type)
neuron.initialize(**initialValues)
neuron.record('v')
simTime = 50
run(simTime)
simulatedTime = simTime
neuron.set(i_offset = 75.0)
simTime = 250 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
neuron.set(i_offset = 80.0)
simTime = 350 - simulatedTime
run(simTime)
simulatedTime = simulatedTime + simTime
data = neuron.get_data().segments[0]
plt.ion()
fig = plt.figure(1, facecolor='white')
ax1 = fig.add_subplot(5, 4, 20)
#plt.xlabel("Time (ms)")
#plt.ylabel("Vm (mV)")
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
ax1.spines['left'].set_color('None')
ax1.spines['right'].set_color('None')
ax1.spines['bottom'].set_color('None')
ax1.spines['top'].set_color('None')
ax1.set_title('(T) Inhibition-induced bursting')
vm = data.filter(name='v')[0]
plt.plot(vm.times, vm, [0, 50, 50, 250, 250, 350],[-80, -80, -90, -90, -80, -80]);
plt.show(block=False)
fig.canvas.draw()
raw_input("Simulation finished... Press enter to exit...")
|
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import re
from textwrap import dedent
from pants.backend.project_info.list_targets import ListSubsystem, list_targets
from pants.engine.addresses import Address, Addresses
from pants.engine.target import DescriptionField, Target, UnexpandedTargets
from pants.testutil.option_util import create_goal_subsystem, create_options_bootstrapper
from pants.testutil.rule_runner import MockGet, mock_console, run_rule_with_mocks
class MockTarget(Target):
alias = "tgt"
core_fields = (DescriptionField,)
def run_goal(targets: list[MockTarget], *, show_documented: bool = False) -> tuple[str, str]:
with mock_console(create_options_bootstrapper()) as (console, stdio_reader):
run_rule_with_mocks(
list_targets,
rule_args=[
Addresses(tgt.address for tgt in targets),
create_goal_subsystem(
ListSubsystem,
sep="\\n",
output_file=None,
documented=show_documented,
),
console,
],
mock_gets=[
MockGet(
output_type=UnexpandedTargets,
input_types=(Addresses,),
mock=lambda _: UnexpandedTargets(targets),
)
],
)
return stdio_reader.get_stdout(), stdio_reader.get_stderr()
def test_list_normal() -> None:
# Note that these are unsorted and that we include generated targets.
addresses = (
Address("", target_name="t2"),
Address("", target_name="t1"),
Address("", target_name="gen", relative_file_path="f.ext"),
Address("", target_name="gen", generated_name="foo"),
)
stdout, _ = run_goal([MockTarget({}, addr) for addr in addresses])
assert stdout == dedent(
"""\
//:gen#foo
//:t1
//:t2
//f.ext:gen
"""
)
def test_no_targets_warns() -> None:
_, stderr = run_goal([])
assert re.search("WARN.* No targets", stderr)
def test_list_documented() -> None:
stdout, _ = run_goal(
[
MockTarget(
{DescriptionField.alias: "Description of a target.\n\tThis target is the best."},
Address("", target_name="described"),
),
MockTarget({}, Address("", target_name="not_described")),
],
show_documented=True,
)
assert stdout == dedent(
"""\
//:described
Description of a target.
\tThis target is the best.
"""
)
|
from ABC.NodeAST import NodeAST
from ABC.Instruction import Instruction
from ST.Exception import Exception
from ST.SymbolTable import SymbolTable
from Instructions.Break import Break
from Instructions.Function import Function
from Instructions.Continue import Continue
class Main(Instruction):
def __init__(self, instructions, line, column):
self.instructions = instructions
self.line = line
self.column = column
self.ReportSymbol = None
def interpreter(self, tree, table):
ambitMain = SymbolTable(table)
for instruction in self.instructions:
value = instruction.interpreter(tree, ambitMain)
if isinstance(value, Function):
value = instruction.interpreter(tree, ambitMain)
self.ReportSymbol = instruction.ReportSymbol
if isinstance(value, Exception):
tree.getException().append(value)
tree.updateConsole(value.toString())
if isinstance(value, Break):
errBreak = Exception("Semantico", "Sentencia Break no va dentro del main", instruction.line, instruction.column)
tree.getException().append(errBreak)
tree.updateConsole(errBreak.toString())
if isinstance(value, Continue):
errContinue = Exception("Semantico", "Sentencia Continue no va dentro del main", instruction.line, instruction.column)
tree.getException().append(errContinue)
tree.updateConsole(errContinue.toString())
def getNode(self):
node = NodeAST("MAIN")
nodeInstructions = NodeAST("INSTRUCCIONES")
for instruction in self.instructions:
nodeInstructions.addChild(instruction.getNode())
node.addChild(nodeInstructions)
return node
|
import torch.nn as nn
import torch
def conv3x3(in_channels, out_channels, stride=1):
"""
3x3卷积层,并且隐藏了3x3卷积输入输出维度相同的条件
:param in_channels:输入的通道数
:param out_channels:输出通道数
:param stride:卷积步长
:return:创建好的3x3卷积
"""
return nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, bias=False, padding=1)
def conv1x1(in_channels, out_channels, stride=1):
return nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=stride, bias=False)
def deconv3x3(in_channels, out_channels, stride=1, kernel_size=3):
return nn.ConvTranspose2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
padding=int((kernel_size - 1) / 2), output_padding=stride - 1, bias=False)
def deconv1x1(in_channels, out_channels, stride=1, kernel_size=1):
return nn.ConvTranspose2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
padding=int((kernel_size - 1) / 2), output_padding=stride - 1, bias=False)
class BasicBlock(nn.Module):
def __init__(self, in_channels, out_channels, stride=1, downsample=None):
"""
:param in_channels:输入通道数量
:param out_channels:输出通道数量
:param stride:步长
"""
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(in_channels, out_channels, stride)
self.bn1 = nn.BatchNorm2d(out_channels)
self.relu = nn.ReLU(inplace=True) # inplace参数用来指示是否覆盖原变量,可用来减少内存占用
self.conv2 = conv3x3(out_channels, out_channels)
self.bn2 = nn.BatchNorm2d(out_channels)
self.stride = stride
self.downsample = downsample
def forward(self, x):
# identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
# out += identity
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, layers=[2, 2, 2, 2], num_class=10):
"""
:param layers:定义每个layer的block数量,默认与Assignment 1中对应
:param num_class:最终分类的类数
:param groups:分组卷积组数
"""
super(ResNet, self).__init__()
self.in_channels = 64
# 这段代码过后特征图已经为原来的1/4
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True) # inplace参数用来指示是否覆盖原变量,可用来减少内存占用
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1, return_indices=True)
self.conv2_x = self._make_layer(64, layers[0])
self.conv3_x = self._make_layer(128, layers[1], stride=2)
self.conv4_x = self._make_layer(256, layers[2], stride=2)
self.conv5_x = self._make_layer(512, layers[3], stride=2)
# self.conv5_x_0_conv1 = conv3x3(256, 512, stride=2)
# self.conv5_x_0_bn1 = nn.BatchNorm2d(512)
# self.conv5_x_0_conv2 = conv3x3(512,512)
# self.conv5_x_0_bn2 = nn.BatchNorm2d(512)
#
# self.conv5_x_1_conv1 = conv3x3(512, 512, stride=2)
# self.conv5_x_1_bn1 = nn.BatchNorm2d(512)
# self.conv5_x_1_conv2 = conv3x3(512, 512)
# self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
# self.fc = nn.Linear(512, num_class)
def _make_layer(self, out_channels, blocks, stride=1):
"""
用于创建Resnet的层,比如con1,con2-x,con3-x...
:param block:指定创建的block类型
:param out_channels:输出通道数
:param blocks:block个数
:param stride:步长大小默认为1
:return:
"""
downsample = None
# if stride != 1 or self.in_channels != out_channels:
# downsample = nn.Sequential(
# conv1x1(self.in_channels, out_channels, stride),
# nn.BatchNorm2d(out_channels),
# )
#
layers = []
layers.append(BasicBlock(self.in_channels, out_channels, stride, downsample))
self.in_channels = out_channels
for _ in range(1, blocks):
layers.append(BasicBlock(self.in_channels, out_channels))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x, _ = self.maxpool(x)
x = self.conv2_x(x)
x = self.conv3_x(x)
x = self.conv4_x(x)
x = self.conv5_x(x)
return x
class deconvBasicBlock(nn.Module):
def __init__(self, in_channels, out_channels, stride=1):
super(deconvBasicBlock, self).__init__()
self.bn2 = nn.BatchNorm2d(in_channels)
self.conv2 = deconv3x3(in_channels, in_channels)
self.relu = nn.ReLU(inplace=True)
self.bn1 = nn.BatchNorm2d(in_channels)
self.conv1 = deconv3x3(in_channels, out_channels, stride)
self.stride = stride
def forward(self, feature):
identity = feature
# print("indetity size:", identity.size())
out = self.bn2(feature)
out = self.conv2(out)
out = self.relu(out)
out = self.bn1(out)
out = self.conv1(out)
# print("out size:", out.size())
# out -= identity
out = self.relu(out)
return out
class deoconvResNet(nn.Module):
# pool = nn.MaxPool2d(kernel_size=3,stride=2,padding=1,return_indices=True)
def __init__(self, layers=[2, 2, 2, 2]):
"""
:param layers:定义每个layer的block数量,默认与Assignment 1中对应
:param num_class:最终分类的类数
:param groups:分组卷积组数
"""
super(deoconvResNet, self).__init__()
self.in_channels = 512
self.conv5_x = self._make_layer(256, layers[3], stride=2)
self.conv4_x = self._make_layer(128, layers[2], stride=2)
self.conv3_x = self._make_layer(64, layers[1], stride=2)
self.conv2_x = self._make_layer(64, layers[0])
self.maxunpool = nn.MaxUnpool2d(kernel_size=3, stride=2, padding=1)
self.relu = nn.ReLU(inplace=True) # inplace参数用来指示是否覆盖原变量,可用来减少内存占用
self.bn1 = nn.BatchNorm2d(64)
self.conv1 = nn.ConvTranspose2d(64, 3, kernel_size=7, stride=2, padding=3, bias=False)
def _make_layer(self, out_channels, blocks, stride=1):
"""
用于创建Resnet的层,比如con1,con2-x,con3-x...
:param block:指定创建的block类型
:param out_channels:输出通道数
:param blocks:block个数
:param stride:步长大小默认为1
:return:
"""
layers = []
layers.append(deconvBasicBlock(self.in_channels, self.in_channels, stride))
for _ in range(1, blocks):
layers.append(deconvBasicBlock(self.in_channels, out_channels))
self.in_channels = out_channels
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv5_x(x)
x = self.conv4_x(x)
x = self.conv3_x(x)
x = self.conv2_x(x)
x = self.maxunpool(x)
x = self.relu(x)
x = self.bn1(x)
x = self.conv1(x)
return x
class deconv_layer1(nn.Module):
def __init__(self):
super(deconv_layer1, self).__init__()
self.deconv1 = nn.ConvTranspose2d(64, 3, kernel_size=7, stride=2, padding=3, bias=False)
def forward(self,x):
return self.deconv1(x)
def resnet18_without_fc():
model = ResNet()
return model
def deconv_resnet18_without_fc():
model = deoconvResNet()
return model
|
#-*—coding:utf8-*-
import numpy as np
import gc
import re
import csv
import codecs
import matplotlib
import matplotlib.pyplot as plt
from decimal import *
import time as time_linger
import copy
import time as tmm
def get_dic(a, b):
a_2_b = {}
last_index = 0
len_b = len(b)
min_b = b[0]
for i in range(0, len(a)):
if a[i] >= min_b:
break
a = a[i:]
# print a
for m in a:
# print last_index
if last_index == (len_b - 1):
a_2_b[m] = b[len_b - 1]
for n in range(last_index, len_b):
if b[n] > m:
# print b[n]
a_2_b[m] = b[n - 1]
last_index = n - 1
break
elif b[n] == m:
a_2_b[m] = b[n]
last_index = n
break
else:
last_index = n
return a_2_b
try:
filenamelist = open("x.txt", 'rw')
except Exception:
print "x.txt open failed"
filenames = filenamelist.readlines()
if filenamelist:
filenamelist.close()
for i in filenames:
i = i.replace('\n', '')
file_read_time = i
file_write = i.replace('other.txt', 'debug.csv')
file_write = file_write.replace('bak_new_data/', 'bak_new_data/new/')
print file_read_time, file_write
# continue
try:
fil2 = codecs.open(file_write, "w")
# fil6 = codecs.open("channel_ssid_time.csv", "w", 'utf_8_sig')
write_record = csv.writer(fil2)
# write_ssid = csv.writer(fil6)
except Exception:
print "tranningdata open failed"
exit()
try:
fil1 = open(file_read_time, "r")
except Exception:
print "6666 or retranstime open failed."
last_time = tmm.time()
lines = fil1.readlines()
print tmm.time() - last_time, "1"
last_time = tmm.time()
count_iw = 0
count_drop = 0
count_beacon = 0
tmp_last = -1
queue = {}
survey = {}
leng = len(lines)
for j in range(0, leng):
item = lines[j]
# print item
length = len(item) - 1
tmp = int(item[0])
# print item, "before"
item = item[3:length]
item = re.split(", ", item)
item[0] = int(item[0])
length = len(item)
# print item
if tmp == 2:
# for i in range(4, length):
# item[i] = int(item[i])
count_drop += 1
elif tmp == 3:
# for i in range(3, length - 1):
# item[i] = int(item[i])
# try:
# item[length - 1] = float(item[length - 1])
# except Exception:
# pass
count_iw += 1
elif tmp == 4:
for i in range(2, length):
item[i] = int(item[i])
queue[item[0]] = item + [tmp]
elif tmp == 5:
# for i in range(2, 6):
# item[i] = int(item[i])
if tmp_last != 5:
count_beacon = 0
count_beacon += 1
elif tmp == 6:
for i in range(2, length):
item[i] = int(item[i])
item = item + [count_drop, count_iw, count_beacon]
count_iw = 0
survey[item[0]] = item + [tmp]
# print item
else:
print "fuck"
# print item
tmp_last = tmp
# if tmp in (4, 6):
# item = item + [tmp]
# # print item
# processed_lines.append(item)
# print lines[j]
print tmm.time() - last_time, "2"
last_time = tmm.time()
survey_keys = survey.keys()
survey_keys = sorted(survey_keys, reverse=True)
len_tmp = len(survey_keys)
j_found = 1
print tmm.time() - last_time, "3"
last_time = tmm.time()
for j in range(0, len_tmp):
found = False
# print survey_keys[j]
# if j % 10000 == 0:
# print j, len_tmp
m = j_found
while m < len_tmp:
if (survey_keys[j] - survey_keys[m]) < 10000:
m += 1
else:
j_found = m
found = True
break
if found is True:
t1 = survey[survey_keys[j]]
t2 = survey[survey_keys[j_found]]
dura = t1[2] - t2[2]
dura = float(dura)
if dura == 0:
continue
ll = (3, 4, 5, 6, 7, 10)
for k in ll:
t1[k] = float(t1[k] - t2[k]) / dura
t1[k] = round(t1[k], 6)
survey[survey_keys[j]] = t1
# print survey[survey_keys[j]]
print tmm.time() - last_time, "4"
last_time = tmm.time()
queue_keys = queue.keys()
queue_keys = sorted(queue_keys, reverse=True)
len_tmp = len(queue_keys)
j_found = 1
for j in range(0, len_tmp):
found = False
# print queue_keys[j]
m = j_found
while m < len_tmp:
if (queue_keys[j] - queue_keys[m]) < 10000:
m += 1
else:
j_found = m
found = True
break
if found is True:
t1 = queue[queue_keys[j]]
t2 = queue[queue_keys[j_found]]
dura = t1[0] - t2[0]
dura = float(dura)
if dura == 0:
continue
ll = (3, 4, 7, 8)
for k in ll:
t1[k] = float(t1[k] - t2[k]) / dura
t1[k] = round(t1[k], 6)
queue[queue_keys[j]] = t1
# print queue[queue_keys[j]]
# print tmm.time() - last_time, "2"
# last_time = tmm.time()
# processed_lines = sorted(processed_lines)
# print tmm.time() - last_time, "3"
# last_time = tmm.time()
# # for i in lines:
# write_record.writerows(processed_lines)
# print tmm.time() - last_time, "4"
print tmm.time() - last_time, "5"
last_time = tmm.time()
survey_keys = survey.keys()
queue_keys = queue.keys()
survey_keys = sorted(survey_keys)
queue_keys = sorted(queue_keys)
survey_2_queue = get_dic(survey_keys, queue_keys)
survey_keys = survey_2_queue.keys()
survey_keys = sorted(survey_keys)
print "6"
for key in survey_keys:
qk = survey_2_queue[key]
ttmp = queue[qk]
(ttime, mac_addr, queue_id, bytes1, packets, qlen,
backlog, drops, requeues, overlimits, category) = ttmp
ttmp = []
exit()
tlist = sorted(queue.values() + survey.values())
write_record.writerows(tlist)
print tmm.time() - last_time, "6"
last_time = tmm.time()
if fil2:
fil2.close()
if fil1:
fil1.close()
del lines
gc.collect()
gc.collect()
# 1502998137819124, 04:a1:51:96:ca:83, 0, 3602983303, 2552333, 0, 15969,
# 8801, 7075, 0
|
"""Transport handlers."""
from django.db.models import signals
from django.dispatch import receiver
from modoboa.core import signals as core_signals
from . import backends, models, postfix_maps
@receiver(core_signals.register_postfix_maps)
def register_postfix_maps(sender, **kwargs):
"""Register postfix maps."""
return [
postfix_maps.TransportMap,
]
@receiver(signals.pre_save, sender=models.Transport)
def serialize_transport_settings(sender, instance, **kwargs):
"""Call backend serialize method on transport."""
backend = backends.manager.get_backend(instance.service)
if backend:
backend.serialize(instance)
|
# 一开始想的是差分 + 离散化
# 看提示发现,高度数据量很小,可以直接枚举
# 然后对宽度二分查找,bisect_left还是比手写的好用嘿嘿
class Solution:
def countRectangles(self, rectangles: List[List[int]], points: List[List[int]]) -> List[int]:
n, m = len(rectangles), len(points)
res = []
heights = [[] for _ in range(101)]
for x, y in rectangles:
heights[y].append(x)
for i in range(1, 101):
heights[i].sort()
for px, py in points:
cnt = 0
for i in range(py, 101):
l = len(heights[i])
if len(heights[i]) == 0:
continue
cnt += l-bisect_left(heights[i], px)
res.append(cnt)
return res
|
#Leia uma String e retorne quantas vogais ela possui na tela.
palavra = input()
contador=0
for letra in palavra:
if letra in 'aeiou':
contador += 1
print(contador)
|
import random
import numpy as np
import torch
from torchvision import transforms as T
from torchvision.transforms import functional as F
def pad_if_smaller(img, size, fill=0):
min_size = min(img.size)
if min_size < size:
ow, oh = img.size
padh = size - oh if oh < size else 0
padw = size - ow if ow < size else 0
img = F.pad(img, (0, 0, padw, padh), fill=fill)
return img
class Compose:
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, image, target):
for t in self.transforms:
image, target = t(image, target)
return image, target
class RandomResize:
def __init__(self, min_size, max_size=None):
self.min_size = min_size
if max_size is None:
max_size = min_size
self.max_size = max_size
def __call__(self, image, target):
size = random.randint(self.min_size, self.max_size)
image = F.resize(image, size, antialias=True)
target = F.resize(target, size, interpolation=T.InterpolationMode.NEAREST)
return image, target
class RandomHorizontalFlip:
def __init__(self, flip_prob):
self.flip_prob = flip_prob
def __call__(self, image, target):
if random.random() < self.flip_prob:
image = F.hflip(image)
target = F.hflip(target)
return image, target
class RandomCrop:
def __init__(self, size):
self.size = size
def __call__(self, image, target):
image = pad_if_smaller(image, self.size)
target = pad_if_smaller(target, self.size, fill=255)
crop_params = T.RandomCrop.get_params(image, (self.size, self.size))
image = F.crop(image, *crop_params)
target = F.crop(target, *crop_params)
return image, target
class CenterCrop:
def __init__(self, size):
self.size = size
def __call__(self, image, target):
image = F.center_crop(image, self.size)
target = F.center_crop(target, self.size)
return image, target
class PILToTensor:
def __call__(self, image, target):
image = F.pil_to_tensor(image)
target = torch.as_tensor(np.array(target), dtype=torch.int64)
return image, target
class ToDtype:
def __init__(self, dtype, scale=False):
self.dtype = dtype
self.scale = scale
def __call__(self, image, target):
if not self.scale:
return image.to(dtype=self.dtype), target
image = F.convert_image_dtype(image, self.dtype)
return image, target
class Normalize:
def __init__(self, mean, std):
self.mean = mean
self.std = std
def __call__(self, image, target):
image = F.normalize(image, mean=self.mean, std=self.std)
return image, target
|
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as tf
#그래프에서 즉시 실행모드로 변환
tf.enable_eager_execution()
# x_data와 y_data동일
x_data = [1, 2, 3, 4, 5]
y_data = [1, 2, 3, 4, 5]
# 초기값을 임의 지정
W = tf.Variable(2.9)
b = tf.Variable(0.5)
# 가설함수
hypothesis = W * x_data + b
# tf.reduce_mean() 차원이 하나 줄어들면서 평균을 구한다.
# tf.square() 넘겨받은 값을 제곱한다.
cost = tf.reduce_mean(tf.square(hypothesis - y_data))
learning_rate = 0.01
# minimize cost(W,b)하는 알고리즘=Gradient descent(경사를 내려하면서 w,b찾음)
# 변수(W,b)들의 변화하는 정보를 tape에 기록
#for문을 통해 W,b업데이트되는 것을 보여줌
for i in range(100):
with tf.GradientTape() as tape:
hypothesis = W * x_data + b
cost = tf.reduce_mean(tf.square(hypothesis - y_data))
# tape의 gradient메소드 호출하여 경사도(=미분값)을 구해 튜플로 반환
W_grad, b_grad = tape.gradient(cost, [W, b])
# A.assign_sub(B) : A -= B
# leaering_rate는 w_grad를 얼마큼 반영할것인가를 결정
W.assign_sub(learning_rate * W_grad)
b.assign_sub(learning_rate * b_grad)
if i% 10 == 0:
print("{:5}|{:10.4f}|{:10.4}|{:10.6f}".format(i, W.numpy(),b.numpy(), cost))
print()
# predict
print(W * 5 + b)
print(W * 2.5 + b)
|
import logging
from flask import Blueprint
from flask import flash
from flask import redirect
from flask import render_template
from flask import request
from flask import url_for
from flask_login import current_user, login_required
from sqlalchemy import asc
from waitlist.utility import outgate
from waitlist.base import db
from waitlist.blueprints.settings import add_menu_entry
from waitlist.permissions import perm_manager
from waitlist.storage.database import Ban, Whitelist, Character, CharacterTypes
from waitlist.utility.eve_id_utils import get_character_by_name, get_char_corp_all_name_by_id_and_type
from waitlist.utility.utils import get_info_from_ban
from flask_babel import lazy_gettext, gettext
from waitlist.utility.outgate.exceptions import ApiException
bp = Blueprint('bans', __name__)
logger = logging.getLogger(__name__)
perm_manager.define_permission('bans_edit')
perm_manager.define_permission('bans_edit_multiple')
perm_manager.define_permission('bans_custom_name')
perm_manager.define_permission('bans_custom_reason')
perm_custom_name = perm_manager.get_permission('bans_custom_name')
perm_custom_reason = perm_manager.get_permission('bans_custom_reason')
@bp.route("/", methods=["GET"])
@login_required
@perm_manager.require('bans_edit')
def bans():
db_bans = db.session.query(Ban).all()
return render_template("settings/bans.html", bans=db_bans)
@bp.route("/bans_change", methods=["POST"])
@login_required
@perm_manager.require('bans_edit_multiple')
def bans_change():
action = request.form['change'] # ban, unban
target = request.form['target'] # name of target
reason = ''
if action == "ban":
reason = request.form['reason'] # reason for ban
targets = target.split("\n")
try:
# pre-cache names for a faster api to not hit request limit
names_to_cache = []
for line in targets:
line = line.strip()
ban_name, _, ban_admin = get_info_from_ban(line)
names_to_cache.append(ban_name)
if ban_admin is not None:
names_to_cache.append(ban_admin)
if action == "ban":
for target in targets:
target = target.strip()
ban_name, ban_reason, ban_admin = get_info_from_ban(target)
if ban_reason is None:
ban_reason = reason
if ban_admin is None:
ban_admin = current_user.get_eve_name()
logger.info("Banning %s for %s by %s as %s.", ban_name, ban_reason, ban_admin, current_user.username)
ban_id, ban_type = outgate.character.get_char_corp_all_id_by_name(ban_name)
admin_char = get_character_by_name(ban_admin)
if ban_id is None:
logger.error("Did not find ban target %s", ban_name)
flash(gettext("Could not find Character %(ban_name)s",
ban_name=ban_name), "danger")
continue
ban_name = get_char_corp_all_name_by_id_and_type(ban_id, CharacterTypes[ban_type])
admin_id = admin_char.get_eve_id()
if ban_id is None or admin_id is None:
logger.error("Failed to correctly parse: %", target)
flash(gettext("Failed to correctly parse %(target)s",
target=target), "danger")
continue
# check if ban already there
if db.session.query(Ban).filter(Ban.id == ban_id).count() == 0:
# ban him
new_ban = Ban()
new_ban.id = ban_id
new_ban.reason = ban_reason
new_ban.admin = admin_id
new_ban.targetType = CharacterTypes[ban_type]
new_ban.name = ban_name
db.session.add(new_ban)
db.session.commit()
except ApiException as e:
flash(gettext("Could not execute action, ApiException %(ex)s", ex=e),
'danger')
return redirect(url_for(".bans"))
@bp.route("/bans_change_single", methods=["POST"])
@login_required
@perm_manager.require('bans_edit')
def bans_change_single():
try:
action = request.form['change'] # ban, unban
target = request.form['target'] # name of target
target = target.strip()
ban_admin = current_user.get_eve_name()
if action == "ban":
reason = request.form['reason'] # reason for ban
ban_id, ban_type = outgate.character.get_char_corp_all_id_by_name(target)
admin_char = get_character_by_name(ban_admin)
logger.info("Banning %s for %s as %s.", ban_name, reason, current_user.username)
if ban_id is None:
logger.error("Did not find ban target %s", ban_name)
flash(gettext("Could not find Character %(name)s", name=target),
"danger")
return
admin_id = admin_char.get_eve_id()
if ban_id is None or admin_id is None:
logger.error("Failed to correctly parse: %", target)
flash(gettext("Failed to correctly parse %(target)s",
target=target),
"danger")
return
ban_name = get_char_corp_all_name_by_id_and_type(ban_id, CharacterTypes[ban_type])
# check if ban already there
if db.session.query(Ban).filter(Ban.id == eve_id).count() == 0:
# ban him
new_ban = Ban()
new_ban.id = ban_id
new_ban.reason = reason
new_ban.admin = admin_id
new_ban.targetType = CharacterTypes[ban_type]
new_ban.name = ban_name
db.session.add(new_ban)
db.session.commit()
elif action == "unban":
ban_id = int(target)
logger.info("%s is unbanning %s", current_user.username, target)
if eve_id is None:
flash(gettext("Character/Corp/Alliance %(target)s does not exist!", target=target), 'danger')
else:
# check that there is a ban
if db.session.query(Ban).filter(Ban.id == ban_id ).count() > 0:
db.session.query(Ban).filter(Ban.id == ban_id).delete()
db.session.commit()
except ApiException as e:
flash(gettext("Could not execute action, ApiException %(ex)s", ex=e),
'danger')
return redirect(url_for(".bans"))
@bp.route("/bans_unban", methods=["POST"])
@login_required
@perm_manager.require('bans_edit')
def bans_unban_single():
target = request.form['target'] # name of target
target = target.strip()
logger.info("%s is unbanning %s", current_user.username, target)
try:
eve_id = int(target)
if eve_id is None:
flash(gettext("Character/Corp/Alliance %(target)s does not exist!",
target=target), 'danger')
else:
# check that there is a ban
if db.session.query(Ban).filter(Ban.id == eve_id).count() > 0:
db.session.query(Ban).filter(Ban.id == eve_id).delete()
db.session.commit()
except ApiException as e:
flash(gettext("Could not execute action, ApiException %(ex)s", ex=e),
'danger')
return redirect(url_for(".bans"))
@bp.route("/whitelist", methods=["GET"])
@login_required
@perm_manager.require('bans_edit')
def whitelist():
whitelistings = db.session.query(Whitelist).all()
return render_template("settings/whitelist.html", wl=whitelistings)
@bp.route("/whitelist_change", methods=["POST"])
@login_required
@perm_manager.require('bans_edit_multiple')
def whitelist_change():
action: str = request.form['change'] # whitelist, unwhitelist
target: str = request.form['target'] # name of target
reason = ''
if action == "whitelist":
reason: str = request.form['reason'] # reason for whitelist
targets = target.split("\n")
try:
if action == "whitelist":
for target in targets:
whitelist_by_name(target, reason)
except ApiException as e:
flash(gettext("Could not execute action, ApiException %(ex)s", ex=e),
'danger')
return redirect(url_for(".whitelist"))
'''
@param ban_info: a eve character name, or copy from ingame chat window
'''
def whitelist_by_name(whitelist_info, reason=""):
target = whitelist_info.strip()
wl_name, wl_reason, wl_admin = get_info_from_ban(target)
if wl_reason is None or not perm_custom_reason.can():
wl_reason = reason
if wl_admin is None or not perm_custom_name.can():
wl_admin = current_user.get_eve_name()
logger.info("Whitelisting %s for %s by %s as %s.", wl_name, wl_reason, wl_admin, current_user.username)
eve_id, ban_type = outgate.character.get_char_corp_all_id_by_name(wl_name)
admin_char = get_character_by_name(wl_admin)
if eve_id is None:
logger.error("Did not find whitelist target %s", wl_name)
flash(gettext("Could not find Character %(wl_name)s for whitelisting",
wl_name=wl_name), "danger")
return
admin_id = admin_char.get_eve_id()
if eve_id is None or admin_id is None:
logger.error("Failed to correctly parse: %", target)
flash(gettext("Failed to correctly parse %(target)s", target=target),
"danger")
return
target_name = get_char_corp_all_name_by_id_and_type(eve_id, CharacterTypes[ban_type])
# check if ban already there
if db.session.query(Whitelist).filter(Whitelist.characterID == eve_id).count() == 0:
# ban him
new_whitelist = Whitelist()
new_whitelist.characterID = eve_id
new_whitelist.reason = wl_reason
new_whitelist.admin = admin_char
new_whitelist.targetType = CharacterTypes[ban_type]
new_whitelist.name = target_name
db.session.add(new_whitelist)
db.session.commit()
def unwhitelist_by_id(eve_id: int) -> None:
# check that there is a ban
if db.session.query(Whitelist).filter(Whitelist.characterID == eve_id).count() > 0:
db.session.query(Whitelist).filter(Whitelist.characterID == eve_id).delete()
db.session.commit()
@bp.route("/whitelist_change_single", methods=["POST"])
@login_required
@perm_manager.require('bans_edit')
def whitelist_change_single():
action = request.form['change'] # whitelist, unwhitelist
target = request.form['target'] # name of target
target = target.strip()
try:
if action == "whitelist":
reason = request.form['reason'] # reason for ban
whitelist_by_name(target, reason)
elif action == "unwhitelist":
target = int(target)
unwhitelist_by_id(target)
except ApiException as e:
flash(gettext("Could not execute action, ApiException %(ex)s", ex=e),
'danger')
return redirect(url_for(".whitelist"))
@bp.route("/whitelist_unlist", methods=["POST"])
@login_required
@perm_manager.require('bans_edit')
def whitelist_unlist():
target = request.form['target'] # name of target
target = target.strip()
try:
target = int(target)
unwhitelist_by_id(target)
except ApiException as e:
flash(gettext("Could not execute action, ApiException %(ex)s", ex=e),
'danger')
return redirect(url_for(".whitelist"))
add_menu_entry('bans.bans', lazy_gettext('Bans'), perm_manager.get_permission('bans_edit').can)
add_menu_entry('bans.whitelist', lazy_gettext('Whitelist'), perm_manager.get_permission('bans_edit').can)
|
import cPickle as pickle
import pb_Models as Models
import lasagne
import theano
import numpy
import os
from learnedactivations import BatchNormalizationLayer
cur_dir = os.path.dirname(os.path.realpath(__file__))
def set_batchnorm_params(nn_model,eparams_filename):
""" Given a lasagne model, and an eparams_filename update BN weights
nn_model - CNN model name
eparams_filename - filename with BN params """
with open(eparams_filename,'rb') as f:
params = pickle.load(f)
ind = 0
for layer in lasagne.layers.get_all_layers(nn_model):
if isinstance(layer,BatchNormalizationLayer):
(layer.mean_inference).set_value(params[ind])
(layer.variance_inference).set_value(params[ind+1])
ind = ind + 2
def describe_network(output_layer):
""" Given a lasagne model, print to screen a text description of it """
all_layers = lasagne.layers.get_all_layers(output_layer)
weights = lasagne.layers.get_all_param_values(output_layer)
weights = weights[0::2]
ind = 0
for layer in all_layers:
ss = str(type(layer))
ss = ss.split(' ')[1]
ss = ss.replace("'",'')
ss = ss.replace(">",'')
ss = ss.split('.')[-1]
tot = "ind" + ' ' + str(ind) + ' ' + ss + ' ' + str(lasagne.layers.get_output_shape(layer))
if hasattr(layer,'nonlinearity'):
nonlin = str(layer.nonlinearity)
if 'object' in nonlin:
nonlin = nonlin.split(' ')[0].split('<lasagne.nonlinearities.')[1]
else:
nonlin = nonlin.split(' ')[1]
tot = tot + ' ' + nonlin
cur_params = layer.get_params()
if len(cur_params) != 0 and len(weights) != 0:
tot = tot + ' ws=' + str(weights[0].shape)
weights.pop(0)
print tot
ind = ind+1
def get_descriptors(nn_model, theano_func, patches, batch_size, patch_size):
""" Given a lasgne model and patches, return the calculated descriptors as
outputed by the DNN
nn_model - lasagne DNN model, as generated by 'get_net_and_funcs'
theano_func - a theano function used to extrat the descriptors, as
generated by 'get_net_and_funcs'
patches - a <h, w, patch_size, patch_size> tensor
batch_size - the batch size
patch_size - the patch size
Returns:
a <h, w, 1, desc_size> tensor containing the descriptors per pixel """
h,w = patches.shape[:2]
patches = patches.reshape(-1, 1, patch_size, patch_size)
num_batches = patches.shape[0] / batch_size
descs = []
for b in xrange(num_batches+1):
if b % 200 == 0:
print 'finished processing', b,' batches out of', num_batches
min_slice = b* batch_size
max_slice = min((b+1) * batch_size, patches.shape[0])
if min_slice == max_slice:
break
batch_slice = slice(min_slice, max_slice)
cur_patches = patches[batch_slice]
cur_descs = theano_func(cur_patches)[0]
descs.append(cur_descs.squeeze())
res = numpy.vstack(descs)
res = res.reshape(h, w, 1, -1)
return res
def get_net_and_funcs(net_name, patch_size, batch_size, weights_filename, eparams_filename):
""" Creates a Lasagne network + theano function given a model name
net_name - one of the supported models for KITTI2012, KITTI2015 and MPI-Sintel
batch_size - the batch size
weights_filename - the network's weights filename
eparams_filename - a file containing the BN parameters
Returns:
a Lasagne network and a theano function to create descriptors """
print 'Creating NN', net_name
nn_model = Models.all_models[net_name](patch_size, batch_size)
print 'Describing network:'
describe_network(nn_model)
print 'Loading and setting weights from', weights_filename
with open(weights_filename, 'rb') as f:
weights = pickle.load(f)
lasagne.layers.set_all_param_values(nn_model, weights)
print 'Loading and setting batch_norm params from', eparams_filename
set_batchnorm_params(nn_model, eparams_filename)
Xb = theano.tensor.tensor4('x')
NN_output = lasagne.layers.get_output(nn_model, Xb, deterministic=True)
theano_func = theano.function(
inputs = [Xb],
outputs = [NN_output],
)
return nn_model, theano_func
|
from .Commands import *
from .CmdPatterns import *
from .CommandManager import CommandManager
|
from urllib.request import urlopen
from urllib.request import HTTPError
from bs4 import BeautifulSoup
try:
html = urlopen("http://www.pythonscraping.com/pages/error.html")
except HTTPError as e:
print(e)
else:
bsobj = BeautifulSoup(html.read(), "html.parser")
print(bsobj.h1)
|
"""Treadmill runtime framework.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import errno
import glob
import logging
import os
import random
import socket
import stat
import tarfile
import six
from treadmill import appcfg
from treadmill import exc
from treadmill import fs
from treadmill import utils
from treadmill import plugin_manager
from treadmill.appcfg import abort as app_abort
from treadmill.appcfg import manifest as app_manifest
STATE_JSON = 'state.json'
_LOGGER = logging.getLogger(__name__)
_ARCHIVE_LIMIT = utils.size_to_bytes('1G')
_RUNTIME_NAMESPACE = 'treadmill.runtime'
if os.name == 'posix':
# Disable C0413: should be placed at the top of the module.
from treadmill import iptables # pylint: disable=c0413
PORT_SPAN = iptables.PORT_SPAN
PROD_PORT_LOW = iptables.PROD_PORT_LOW
PROD_PORT_HIGH = iptables.PROD_PORT_HIGH
NONPROD_PORT_LOW = iptables.NONPROD_PORT_LOW
NONPROD_PORT_HIGH = iptables.NONPROD_PORT_HIGH
else:
PORT_SPAN = 8192
PROD_PORT_LOW = 32768
PROD_PORT_HIGH = PROD_PORT_LOW + PORT_SPAN - 1
NONPROD_PORT_LOW = PROD_PORT_LOW + PORT_SPAN
NONPROD_PORT_HIGH = NONPROD_PORT_LOW + PORT_SPAN - 1
def get_runtime_cls(runtime_name):
"""Get runtime classs
Raise Key exception if runtime class does not exist
"""
try:
runtime_cls = plugin_manager.load(_RUNTIME_NAMESPACE, runtime_name)
return runtime_cls
except KeyError:
_LOGGER.error('Runtime not supported: %s', runtime_name)
raise
def get_runtime(runtime_name, tm_env, container_dir, param=None):
"""Gets the runtime implementation with the given name."""
runtime_cls = get_runtime_cls(runtime_name)
return runtime_cls(tm_env, container_dir, param)
def load_app(container_dir, app_json=STATE_JSON):
"""Load app from original manifest."""
manifest_file = os.path.join(container_dir, app_json)
try:
manifest = app_manifest.read(manifest_file)
_LOGGER.debug('Manifest: %r', manifest)
return utils.to_obj(manifest)
except IOError as err:
if err.errno != errno.ENOENT:
raise
_LOGGER.critical('Manifest file does not exist: %r', manifest_file)
return None
def save_app(manifest, container_dir, app_json=STATE_JSON):
"""Saves app manifest and freezes to object."""
# Save the manifest with allocated vip and ports in the state
#
state_file = os.path.join(container_dir, app_json)
fs.write_safe(
state_file,
lambda f: f.writelines(
utils.json_genencode(manifest)
),
mode='w',
permission=0o644
)
# chmod for the file to be world readable.
if os.name == 'posix':
os.chmod(
state_file,
stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
)
# Freeze the app data into a namedtuple object
return utils.to_obj(manifest)
def _allocate_sockets(environment, host_ip, sock_type, count):
"""Return a list of `count` socket bound to an ephemeral port.
"""
# TODO: this should probably be abstracted away
if environment == 'prod':
port_pool = six.moves.range(PROD_PORT_LOW, PROD_PORT_HIGH + 1)
else:
port_pool = six.moves.range(NONPROD_PORT_LOW, NONPROD_PORT_HIGH + 1)
port_pool = random.sample(port_pool, PORT_SPAN)
# socket objects are closed on GC so we need to return
# them and expect the caller to keep them around while needed
sockets = []
for real_port in port_pool:
if len(sockets) == count:
break
socket_ = socket.socket(socket.AF_INET, sock_type)
try:
socket_.bind((host_ip, real_port))
if sock_type == socket.SOCK_STREAM:
socket_.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
socket_.listen(0)
except socket.error as err:
if err.errno == errno.EADDRINUSE:
continue
raise
if six.PY3:
# We want the sockets to survive an execv
socket_.set_inheritable(True)
sockets.append(socket_)
else:
raise exc.ContainerSetupError('{0} < {1}'.format(len(sockets), count),
app_abort.AbortedReason.PORTS)
return sockets
def _allocate_network_ports_proto(host_ip, manifest, proto, so_type):
"""Allocate ports for named and unnamed endpoints given protocol."""
ephemeral_count = manifest['ephemeral_ports'].get(proto, 0)
endpoints = [ep for ep in manifest['endpoints']
if ep.get('proto', 'tcp') == proto]
endpoints_count = len(endpoints)
sockets = _allocate_sockets(
manifest['environment'],
host_ip,
so_type,
endpoints_count + ephemeral_count
)
for idx, endpoint in enumerate(endpoints):
sock = sockets[idx]
endpoint['real_port'] = sock.getsockname()[1]
# Specifying port 0 tells appmgr that application wants to
# have same numeric port value in the container and in
# the public interface.
#
# This is needed for applications that advertise ports they
# listen on to other members of the app/cluster.
if endpoint['port'] == 0:
endpoint['port'] = endpoint['real_port']
# Ephemeral port are the rest of the ports
manifest['ephemeral_ports'][proto] = [
sock.getsockname()[1]
for sock in sockets[endpoints_count:]
]
return sockets
def allocate_network_ports(host_ip, manifest):
"""Allocate ports for named and unnamed endpoints.
:returns:
``list`` of bound sockets
"""
tcp_sockets = _allocate_network_ports_proto(host_ip,
manifest,
'tcp',
socket.SOCK_STREAM)
udp_sockets = _allocate_network_ports_proto(host_ip,
manifest,
'udp',
socket.SOCK_DGRAM)
return tcp_sockets + udp_sockets
def _cleanup_archive_dir(tm_env):
"""Delete old files from archive directory if space exceeds the threshold.
"""
archives = glob.glob(os.path.join(tm_env.archives_dir, '*'))
infos = []
dir_size = 0
for archive in archives:
archive_stat = os.stat(archive)
dir_size += archive_stat.st_size
infos.append((archive_stat.st_mtime, archive_stat.st_size, archive))
if dir_size <= _ARCHIVE_LIMIT:
_LOGGER.info('Archive directory below threshold: %s', dir_size)
return
_LOGGER.info('Archive directory above threshold: %s gt %s',
dir_size, _ARCHIVE_LIMIT)
infos.sort()
while dir_size > _ARCHIVE_LIMIT:
ctime, size, archive = infos.pop(0)
dir_size -= size
_LOGGER.info('Unlink old archive %s: ctime: %s, size: %s',
archive, ctime, size)
fs.rm_safe(archive)
def archive_logs(tm_env, name, container_dir):
"""Archive latest sys and services logs."""
_cleanup_archive_dir(tm_env)
sys_archive_name = os.path.join(tm_env.archives_dir, name + '.sys.tar.gz')
app_archive_name = os.path.join(tm_env.archives_dir, name + '.app.tar.gz')
def _add(archive, filename):
"""Safely add file to archive."""
try:
archive.add(filename, filename[len(container_dir) + 1:])
except OSError as err:
if err.errno == errno.ENOENT:
_LOGGER.warning('File not found: %s', filename)
else:
raise
with tarfile.open(sys_archive_name, 'w:gz') as f:
logs = glob.glob(
os.path.join(container_dir, 'sys', '*', 'data', 'log', 'current'))
for log in logs:
_add(f, log)
metrics = glob.glob(os.path.join(container_dir, '*.rrd'))
for metric in metrics:
_add(f, metric)
yml_cfgs = glob.glob(os.path.join(container_dir, '*.yml'))
json_cfgs = glob.glob(os.path.join(container_dir, '*.json'))
for cfg in yml_cfgs + json_cfgs:
_add(f, cfg)
_add(f, os.path.join(container_dir, 'log', 'current'))
with tarfile.open(app_archive_name, 'w:gz') as f:
logs = glob.glob(
os.path.join(container_dir, 'services', '*', 'data', 'log',
'current'))
for log in logs:
_add(f, log)
|
import os
import dj_database_url
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from . import *
DEBUG = False
SECRET_KEY = os.environ.get('SECRET_KEY', '+xz#p9=p*ahiz4l0pnp(lyhb^6gxe^7i^$=#$uj&(bs(v6cg=_')
ALLOWED_HOSTS = [
'trombinoscoop-2.herokuapp.com',
]
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware',]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
DATABASES['default'].update(dj_database_url.config())
sentry_sdk.init(
dsn="https://1199978d2b974d85b7523150181a2a7c@sentry.io/1291046",
integrations=[DjangoIntegration()]
)
INSTALLED_APPS += ['django_extensions',]
|
import numpy as np
import pandas as pd
import os
import matplotlib.pyplot as plt
from datetime import datetime
df_data=pd.read_csv("data/oar/job_oar_sample.csv")
df_data["real_time"]=df_data["start_time"]-df_data["stop_time"]
df_data["real_time"]=df_data["start_time"]-df_data["stop_time"]
df_data["start_time_minute"]=[datetime.fromtimestamp(int(df_data["start_time"][i]))for i in range(len(df_data))]
df_data["stop_time_minute"]=[datetime.fromtimestamp(int(df_data["stop_time"][i]))for i in range(len(df_data))]
df_data_old = df_data
#print(df_data.columns)
df_data.groupby('job_id',as_index=False)
#print(df_data)
for jb in df_data.job_id.unique():
df_data[df_data['job_id']==jb]["job_user"] = df_data_old[df_data_old['job_id']==jb].iloc[0]
dir= "data/RAPL"
file_list = os.listdir(dir)
data_list=[]
i=0
for files in file_list :
if i < 1 :
df=pd.read_csv(dir+"/"+files,sep=",")
#df["timestamp_minute"] = [datetime.fromtimestamp(int(df["timestamp_minute"][i])*60)for i in range(len(df))]
df["pp0/package1"] = df["pp0/package1"]/pow(10,7)
df["pp0/package2"] = df["pp0/package2"]/pow(10,7)
#df=df[np.abs(df["pp0/package1"] - df["pp0/package1"].mean()) <= (3*df["pp0/package1"].std())]
#df=df[np.abs(df["pp0/package2"] - df["pp0/package2"].mean()) <= (3*df["pp0/package2"].std())]
df=df[df["pp0/package1"]>0]
df=df[df["pp0/package2"]>0]
df=df[np.abs(df["pp0/package1"] - df["pp0/package1"].mean()) <= (3*df["pp0/package1"].std())]
df=df[np.abs(df["pp0/package2"] - df["pp0/package2"].mean()) <= (3*df["pp0/package2"].std())]
df=df.sort_values(by="timestamp_minute",ascending=True)
data_list.append(df)
i=i+1
df_energy=pd.concat(data_list,axis=0)
hostnames = df_energy.hostname.unique()
print(df_energy)
for host in hostnames :
sub_df=df_energy[df_energy["hostname"]==host].reset_index().sort_values(by="timestamp_minute")
print(sub_df.timestamp_minute.max())
print(sub_df.timestamp_minute.min())
sub_df = sub_df.set_index("timestamp_minute")
sub_df=sub_df.reindex(np.arange(sub_df.index.min(), sub_df.index.max() + 1,1)).fillna(0)
#df["timestamp_minute"] = [datetime.fromtimestamp(int(df["timestamp_minute"][i])*60)for i in range(len(df))]
plt.plot(sub_df.index,sub_df["pp0/package1"],label="premier groupe de cpu")
plt.plot(sub_df.index,sub_df["pp0/package2"],label="deuxième groupe de cpu")
sub_data_df=df_data[df_data["host"]==host]
print("================================")
print(sub_df)
print(sub_df.describe())
print("==================================")
plt.legend()
plt.title(host)
plt.show()
|
from django.contrib import admin
from django.urls import path
from django.conf.urls import include, url
from mycontacts import views
urlpatterns=[
url(r'accname.*',views.accname,name='accname'),
url(r'number.*',views.number,name='number'),
]
|
from spack import *
class Dire(Package):
url = "https://dire.gitlab.io/Downloads/DIRE-2.002.tar.gz"
version('2.002', sha256='7fba480bee785ddacd76446190df766d74e61a3c5969f362b8deace7d3fed8c1')
depends_on('pythia8')
def install(self, spec, prefix):
configure('--prefix=%s'%prefix,
'--with-pythia8=%s'%spec['pythia8'].prefix,
'--enable-shared')
make('VERBOSE=1')
filter_file('-Wl,-rpath ','-Wl,-rpath,','bin/dire-config')
make('install')
|
'''
Task:
Your job here is to create a function that will take three parameters,
fmt, nbr and start, and create an array of nbr elements formatted according
to frm with the starting index start. fmt will have <index_no> inserted at
various locations; this is where the file index number goes in each file.
Description of edge cases:
If nbr is less than or equal to 0, or not whole, return an empty array.
If fmt does not contain '<index_no>', just return an array with nbr elements
that are all equal to fmt.
If start is not an integer, return an empty array.
What each parameter looks like:
type(frm) #=> str
: "text_to_stay_constant_from_file_to_file <index_no>"
type(nbr) #=> int
: number_of_files
type(start) #=> int
: index_no_of_first_file
type(name_file(frm, nbr, start)) #=> list
Some examples:
name_file("IMG <index_no>", 4, 1)
#=> ["IMG 1", "IMG 2", "IMG 3", "IMG 4"])
name_file("image #<index_no>.jpg", 3, 7)
#=> ["image #7.jpg", "image #8.jpg", "image #9.jpg"]
name_file("#<index_no> #<index_no>", 3, -2)
#=> ["#-2 #-2", "#-1 #-1", "#0 #0"]
'''
def name_file(fmt, nbr, start):
result = []
if nbr > 0 and nbr%1==0 and start%1==0:
for x in range(nbr):
result.append('{}'.format(fmt.replace('<index_no>', str(start+x))))
return result
name_file('<file_no> number <index_no>', 2, -1)
#Returns: ["<file_no> number -1", "<file_no> number 0"]
|
def reversearr1(arr):
revarr = [0] * len(arr)
for i in range(0, len(arr)):
revarr[i] = arr[(len(arr)-1)-i]
return revarr
#time Complexity: O(N)
#space Comlexity: O(N)
def reversearr(arr):
n = len(arr)
start = 0
end = n - 1
while (start < end):
swap(arr, start, end)
start += 1
end -=1
return arr
def swap(arr, s, e):
temp = arr[s]
arr[s] = arr[e]
arr[e] = temp
#time Complexity: O(N/2) => O(N)
#space Comlexity: O(1)
print(reversearr([1, 2, 3, 4, 5]))
print(reversearr1([1, 2, 3, 4, 5]))
|
class Tweet:
def __init__(self, id, date, text, score):
self.id = id
self.date = date
self.text = text
self.score = score
|
from __future__ import division
import math
import re
import matplotlib.pyplot as plt
import numpy as np
from scipy.io import wavfile
from scipy.signal import butter, lfilter
#----------------------------------------------------------------------------------------------------------------------#
def moving_average(interval, window_size):
window = np.ones(int(window_size)) / float(window_size)
return np.convolve(interval, window, 'same')
#----------------------------------------------------------------------------------------------------------------------#
def butter_bandpass(lowcut, highcut, fs, order=5):
nyq = 0.5 * fs
low = lowcut / nyq
high = highcut / nyq
b, a = butter(order, [low, high], btype='band')
return b, a
#----------------------------------------------------------------------------------------------------------------------#
def butter_bandpass_filter(data, lowcut, highcut, fs, order=5):
b, a = butter_bandpass(lowcut, highcut, fs, order=order)
y = lfilter(b, a, data)
return y
#----------------------------------------------------------------------------------------------------------------------#
def filter_bank(o_data, low_pass, high_pass, fs, order_of_filter, window_dur, hop_dur):
atad = butter_bandpass_filter(o_data, low_pass, high_pass, fs, order_of_filter)
window_size = int(window_dur * fs * 0.001) # Converting window length to samples
hop_size = int(hop_dur * fs * 0.001) # Converting hop length to samples
window_type = np.hanning(window_size) # Window type: Hanning (by default)
no_frames = int(math.ceil(len(atad) / (float(hop_size)))) # Determining the number of frames
zero_array = np.zeros(window_size) # Appending appropriate number of zeros
atad = np.concatenate((atad, zero_array))
st_energy = []
for i in range(no_frames): # Calculating frame wise short term energy
frame = atad[i * hop_size:i * hop_size + window_size] * window_type # Multiplying each frame with a hamming window
st_energy.append(sum(frame ** 2)) # Calculating the short term energy
max_st_energy = max(st_energy) # Maximum value of Short term energy curve
for i in range(no_frames):
st_energy[i] = st_energy[i] / max_st_energy # Normalizing the curve
return st_energy, atad
#----------------------------------------------------------------------------------------------------------------------#
file_no = '17'
audio_file ='F:\Projects\Active Projects\Project Intern_IITB\Vowel Evaluation PE V6\Analyze\Vowel_Evaluation_V6_Test_7\\' + file_no + '.wav'
textgridFA = 'F:\Projects\Active Projects\Project Intern_IITB\Vowel Evaluation PE V6\Analyze\Vowel_Evaluation_V6_Test_7\\' + file_no + 'FA.TextGrid'
textgridPE = 'F:\Projects\Active Projects\Project Intern_IITB\Vowel Evaluation PE V6\Analyze\Vowel_Evaluation_V6_Test_7\\' + file_no + 'PE.TextGrid'
window_dur=50
hop_dur = 7
threshold_smooth = 120
#----------------------------------------------------------------------------------------------------------------------#
fs, data0 = wavfile.read(audio_file) # Reading data from wav file in an array
data0 = data0 / float(2 ** 15) # Normalizing it to [-1,1] range from [-2^15,2^15]
window_size = int(window_dur * fs * 0.001) # Converting window length to samples
hop_size = int(hop_dur * fs * 0.001) # Converting hop length to samples
window_type = np.hanning(window_size) # Window type: Hanning (by default)
no_frames = int(math.ceil(len(data0) / (float(hop_size)))) # Determining the number of frames
zero_array = np.zeros(window_size) # Appending appropriate number of zeros
data0 = np.concatenate((data0, zero_array))
length = len(data0) # Finding length of the actual data
st_energy_0 = []
for i in range(no_frames): # Calculating frame wise short term energy
frame = data0[i * hop_size:i * hop_size + window_size] * window_type # Multiplying each frame with a hamming window
st_energy_0.append(sum(frame ** 2)) # Calculating the short term energy
max_st_energy = max(st_energy_0) # Maximum value of Short term energy curve
for i in range(no_frames):
st_energy_0[i] = st_energy_0[i] / max_st_energy # Normalizing the curve
#----------------------------------------------------------------------------------------------------------------------#
fs, data1 = wavfile.read(audio_file) # Reading data from wav file in an array
data1 = data1 / float(2 ** 15) # Normalizing it to [-1,1] range from [-2^15,2^15]
data1 = butter_bandpass_filter(data1, 300, 2500, fs, order=6)
window_size = int(window_dur * fs * 0.001) # Converting window length to samples
hop_size = int(hop_dur * fs * 0.001) # Converting hop length to samples
window_type = np.hanning(window_size) # Window type: Hanning (by default)
no_frames = int(math.ceil(len(data1) / (float(hop_size)))) # Determining the number of frames
zero_array = np.zeros(window_size) # Appending appropriate number of zeros
data1 = np.concatenate((data1, zero_array))
st_energy_1 = []
for i in range(no_frames): # Calculating frame wise short term energy
frame = data1[i * hop_size:i * hop_size + window_size] * window_type # Multiplying each frame with a hamming window
st_energy_1.append(sum(frame ** 2)) # Calculating the short term energy
max_st_energy = max(st_energy_1) # Maximum value of Short term energy curve
for i in range(no_frames):
st_energy_1[i] = st_energy_1[i] / max_st_energy # Normalizing the curve
#----------------------------------------------------------------------------------------------------------------------#
fs, data2 = wavfile.read(audio_file) # Reading data from wav file in an array
data2 = data2 / float(2 ** 15) # Normalizing it to [-1,1] range from [-2^15,2^15]
data2 = butter_bandpass_filter(data2, 300, 2500, fs, order=6)
window_size = int(window_dur * fs * 0.001) # Converting window length to samples
hop_size = int(hop_dur * fs * 0.001) # Converting hop length to samples
window_type = np.hanning(window_size) # Window type: Hanning (by default)
no_frames = int(math.ceil(len(data2) / (float(hop_size)))) # Determining the number of frames
zero_array = np.zeros(window_size) # Appending appropriate number of zeros
data2 = np.concatenate((data2, zero_array))
x_values = np.arange(0, len(data2), 1) / float(fs)
st_energy_2 = []
for i in range(no_frames): # Calculating frame wise short term energy
frame = data2[i * hop_size:i * hop_size + window_size] * window_type # Multiplying each frame with a hamming window
st_energy_2.append(sum(frame ** 2)) # Calculating the short term energy
max_st_energy = max(st_energy_2) # Maximum value of Short term energy curve
for i in range(no_frames):
st_energy_2[i] = st_energy_2[i] / max_st_energy # Normalizing the curve
noise_energy = 0 # Initializing noise energy
energy = [0] * length # Initializing list energy
for bit in range(length):
energy[bit] = data2[bit] * data2[bit] # Squaring each point of the data to calculate noise energy
for ne in range(0, 800):
noise_energy += energy[ne] # Taking the first 800 samples of the original sound file
noise_energy /= 800 # Averaging the square of the first 800 noise samples
#----------------------------------------------------------------------------------------------------------------------#
if len(st_energy_2) < threshold_smooth:
st_energy_2 = st_energy_2
else:
st_energy_2 = moving_average(st_energy_2, 20)
#----------------------------------------------------------------------------------------------------------------------#
peak = [] # Initializing list
count_of_peaks = 0 # Initializing no of peaks
for p in range(len(st_energy_2)):
if p == 0: # First element
if st_energy_2[p] > st_energy_2[p + 1]: # If the first element is greater than the succeeding element it is a peak.
peak.append(st_energy_2[p]) # Append the energy level of the peak
count_of_peaks += 1 # Increment count
else:
peak.append(0) # Else append a zero
elif p == len(st_energy_2) - 1: # Last element
if st_energy_2[p] > st_energy_2[p - 1]: # If the last element is greater than the preceding element it is a peak.
peak.append(st_energy_2[p]) # Append the energy level of the peak
count_of_peaks += 1 # Increment count
else:
peak.append(0) # Else append a zero
else: # All the other elements
if st_energy_2[p] > st_energy_2[p + 1] and st_energy_2[p] > st_energy_2[p - 1]: # If the element is greater than the element preceding and succeeding it, it is a peak.
peak.append(st_energy_2[p]) # Append the energy level of the peak
count_of_peaks += 1 # Increment count
else:
peak.append(0) # Else append a zero
#----------------------------------------------------------------------------------------------------------------------#
threshold = 0.01 + 0.04 * (noise_energy + (sum(peak) / count_of_peaks)) # The threshold which eliminates minor peaks.
#----------------------------------------------------------------------------------------------------------------------#
count_of_peaks_threshold = 0
peak_threshold = []
location_peak = []
for p in range(len(peak)):
if threshold < peak[p]: # If the peak value is greater than the threshold
peak_threshold.append(peak[p]) # Append the energy level to a new list
count_of_peaks_threshold += 1 # Increment count
location_peak.append(p) # Make note of the location of the peak
else:
peak_threshold.append(0) # Else append zero
#----------------------------------------------------------------------------------------------------------------------#
valley = []
count_of_valleys = 0
location_valley = []
for p in range(len(st_energy_2)):
if p == 0: # For the first element
if st_energy_2[p] < st_energy_2[p + 1]: # If the first element is lesser than the succeeding element
valley.append(st_energy_2[p]) # Append the energy level of the valley
count_of_valleys += 1 # Increment the count
location_valley.append(p) # Make note of the position of the valley
else:
valley.append(0) # Else append zero
elif p == len(st_energy_2) - 1: # For the last element
if st_energy_2[p] < st_energy_2[p - 1]: # If the last element is lesser than the preceding element
valley.append(st_energy_2[p]) # Append the energy level of the valley
count_of_valleys += 1 # Increment the count
location_valley.append(p) # Make note of the position of the valley
else:
valley.append(0) # Else append zero
else:
if st_energy_2[p] < st_energy_2[p + 1] and st_energy_2[p] < st_energy_2[p - 1]: # If the element is lesser than the element preceding and succeeding it
valley.append(st_energy_2[p]) # Append the energy level of the valley
count_of_valleys += 1 # Increment the count
location_valley.append(p) # Make note of the position of the valley
else:
valley.append(0) # Else append zero
#----------------------------------------------------------------------------------------------------------------------#
location = location_peak + location_valley # Combing the list of the location of all the peaks and valleys
location.sort() # Sorting it so that each peak has a valley to it's left and right
ripple_valley = []
ripple_peak = []
ripple = []
# What we need is only the valleys to the left and right of the peak. The other valleys are not important
for k in range(len(location_peak)):
q = location.index(location_peak[k]) # Extracting the location of the peak
if location_peak[k] == len(peak) - 1: # If the peak is the last element of the short term energy curve
ripple.append(location[q - 1]) # The location of the valley before the last peak is added
ripple_valley.append(location[q - 1]) # The location of the valley before the last peak is added
ripple.append(location[q]) # The location of the peak is added
ripple_peak.append(location[q]) # The location of the peak is added
ripple.append(location[q - 1]) # The location of the valley before the last peak is added, as there is no valley after it
ripple_valley.append(location[q - 1]) # The location of the valley before the last peak is added, as there is no valley after it
elif location_peak[k] == 0: # If the peak is the first element of the short term energy curve
ripple.append(location[q + 1]) # The location of the valley after the first peak is added
ripple_valley.append(location[q + 1]) # The location of the valley after the first peak is added
ripple.append(location[q]) # The location of the peak is added
ripple_peak.append(location[q]) # The location of the peak is added
ripple.append(location[q + 1]) # The location of the valley after the first peak is added, as there is no valley after it
ripple_valley.append(location[q + 1]) # The location of the valley after the first peak is added, as there is no valley after it
else: # For every other element
ripple.append(location[q - 1]) # The location of the valley before the peak is added
ripple_valley.append(location[q - 1]) # The location of the valley before the peak is added
ripple.append(location[q]) # The location of the peak is added
ripple_peak.append(location[q]) # The location of the peak is added
ripple.append(location[q + 1]) # The location of the valley after the peak is added
ripple_valley.append(location[q + 1]) # The location of the valley after the peak is added
#----------------------------------------------------------------------------------------------------------------------#
value_valley =[]
for i in range(len(ripple_valley)):
value_valley.append(st_energy_2[ripple_valley[i]])
ripple_value = []
for k in range(1, len(ripple), 3):
ripple_value.append((st_energy_2[ripple[k]] - st_energy_2[ripple[k + 1]]) / (st_energy_2[ripple[k]] - st_energy_2[ripple[k - 1]]))
#----------------------------------------------------------------------------------------------------------------------#
loc = []
for k in range(len(ripple_value)):
loc.append(location_peak[ripple_value.index(ripple_value[k])])
for k in range(len(ripple_value)):
if k != len(ripple_value) - 1:
if location_peak[ripple_value.index(ripple_value[k + 1])] - location_peak[ripple_value.index(ripple_value[k])] < 20:
if ripple_value[k] > 3.0 and ripple_value[k + 1] < 1.4 or ripple_value[k] > 1.02 and ripple_value[k + 1] < 0.3:
v1 = st_energy_2[location_peak[ripple_value.index(ripple_value[k])]]
v2 = st_energy_2[location_peak[ripple_value.index(ripple_value[k + 1])]]
if v1 >= v2:
loc.remove(location_peak[ripple_value.index(ripple_value[k + 1])])
else:
loc.remove(location_peak[ripple_value.index(ripple_value[k])])
else:
if ripple_value[k] > 3.0:
loc.remove(location_peak[ripple_value.index(ripple_value[k])])
#----------------------------------------------------------------------------------------------------------------------#
peak_threshold[:] = []
for j in range(no_frames):
if j in loc:
peak_threshold.append(st_energy_2[loc[loc.index(j)]])
print 'Peak location:',loc[loc.index(j)],' Peak value:',st_energy_2[loc[loc.index(j)]]
else:
peak_threshold.append(0)
#----------------------------------------------------------------------------------------------------------------------#
text_grid_1 = open(textgridFA, 'r') # Open the FA TextGrid
text_grid_2 = open(textgridPE, 'r') # Open the TextGrid created by the script
data_1 = text_grid_1.read() # Read and assign the content of the FA TextGrid to data_1
data_2 = text_grid_2.read() # Read and assign the content of the created TextGrid to data_2
time_1 = [] # Creating an empty list to record time
time_2 = []
counter = 0
#----------------------------------------------------------------------------------------------------------------------#
for m in re.finditer('text = "', data_1):
if data_1[m.start() - 33] == '=':
time_1.append(float(
data_1[m.start() - 32] + data_1[m.start() - 31] + data_1[m.start() - 30] + data_1[m.start() - 29] +
data_1[m.start() - 28] + data_1[m.start() - 27] + data_1[m.start() - 26]))
time_1.append(float(
data_1[m.start() - 13] + data_1[m.start() - 12] + data_1[m.start() - 11] + data_1[m.start() - 10] +
data_1[m.start() - 9] + data_1[m.start() - 8] + data_1[m.start() - 7] + data_1[m.start() - 6] +
data_1[m.start() - 5]))
else:
time_1.append(float(
data_1[m.start() - 33] + data_1[m.start() - 32] + data_1[m.start() - 31] + data_1[m.start() - 30] +
data_1[m.start() - 29] + data_1[m.start() - 28] + data_1[m.start() - 27] + data_1[m.start() - 26]))
time_1.append(float(
data_1[m.start() - 13] + data_1[m.start() - 12] + data_1[m.start() - 11] + data_1[m.start() - 10] +
data_1[m.start() - 9] + data_1[m.start() - 8] + data_1[m.start() - 7] + data_1[m.start() - 6] +
data_1[m.start() - 5]))
#----------------------------------------------------------------------------------------------------------------------#
if data_1[m.start() + 9] == '"':
time_1.append((data_1[m.start() + 8]))
elif data_1[m.start() + 10] == '"':
time_1.append((data_1[m.start() + 8] + data_1[m.start() + 9]))
else:
time_1.append((data_1[m.start() + 8] + data_1[m.start() + 9] + data_1[m.start() + 10]))
time_1.append(counter)
counter += 1
#----------------------------------------------------------------------------------------------------------------------#
for m in re.finditer('"Vowel"', data_2):
time_2.append(float(
data_2[m.start() - 34] + data_2[m.start() - 33] + data_2[m.start() - 32] + data_2[m.start() - 31] +
data_2[m.start() - 30] + data_2[m.start() - 29]))
time_2.append(float(
data_2[m.start() - 17] + data_2[m.start() - 16] + data_2[m.start() - 15] + data_2[m.start() - 14] +
data_2[m.start() - 13] + data_2[m.start() - 12]))
#----------------------------------------------------------------------------------------------------------------------#
fs, data_f = wavfile.read(audio_file) # Reading data from wav file in an array
data_f = data_f / float(2 ** 15) # Normalizing it to [-1,1] range from [-2^15,2^15]
st_energy_1f, f_data_1 = filter_bank(data_f, 200, 400, fs, 6, window_dur, hop_dur)
st_energy_2f, f_data_2 = filter_bank(data_f, 400, 630, fs, 6, window_dur, hop_dur)
st_energy_3f, f_data_3 = filter_bank(data_f, 630, 920, fs, 6, window_dur, hop_dur)
st_energy_4f, f_data_4 = filter_bank(data_f, 920, 1270, fs, 6, window_dur, hop_dur)
st_energy_5f, f_data_5 = filter_bank(data_f, 1270, 1720, fs, 6, window_dur, hop_dur)
st_energy_6f, f_data_6 = filter_bank(data_f, 1720, 2320, fs, 6, window_dur, hop_dur)
st_energy_7f, f_data_7 = filter_bank(data_f, 2320, 3200, fs, 6, window_dur, hop_dur)
#---------------------------------------------------------------------------------------------------------------------#
plt.subplot(311)
plt.plot(x_values, data2) # The Original Data
plt.xlim(0,x_values[-1]) # Limiting it to fixed range for representational purposes
for j in range(0, len(time_1), 4):
plt.vlines(time_1[j], min(data2)+0.30*min(data2), max(data2), 'black') # Syllable Boundaries
for j in range(2, len(time_1), 4):
plt.text(time_1[j - 2], min(data2)+0.28*min(data2), time_1[j], fontsize=15, color='green', rotation=0) # Syllable Labels
for j in range(len(time_2)):
plt.vlines(time_2[j], min(data2), max(data2), 'red') # Vowel Boundaries
for j in range(0, len(time_2), 2):
plt.text(time_2[j], max(data2), 'Vowel', fontsize=12, color='red') # Vowel Label
for j in range(0,len(time_2),2): # Bounding arrows for Vowel
plt.arrow(time_2[j], max(data2), (time_2[j + 1] - time_2[j])-0.01, 0, head_width=0.005, head_length=0.01,color='red')
plt.arrow(time_2[j+1], max(data2), -(time_2[j + 1] - time_2[j]) + 0.01, 0, head_width=0.005, head_length=0.01,color='red')
for j in range(0,len(time_1),4): # Bounding arrows for Syllable
plt.arrow(time_1[j], min(data2)+0.30*min(data2), (time_1[j + 1] - time_1[j])-0.01, 0, head_width=0.005, head_length=0.01)
plt.arrow(time_1[j+1], min(data2)+0.30*min(data2), -(time_1[j + 1] - time_1[j]) + 0.01, 0, head_width=0.005, head_length=0.01)
plt.xlabel('Time (In seconds)')
plt.ylabel('Amplitude')
plt.title('Sound Waveform', color='blue')
plt.subplot(312)
plt.plot(st_energy_0, 'red')
plt.plot(st_energy_1, 'black')
plt.plot(st_energy_2, 'blue')
for i in range(len(location_peak)):
plt.scatter(location_peak[i], st_energy_2[location_peak[i]], color='red', label='Peak')
plt.scatter(ripple_valley, value_valley, color='green', label='Valley')
for j in range(len(location_peak)):
plt.text(location_peak[j], st_energy_2[location_peak[j]], str(round(ripple_value[j], 2)))
for j in range(len(loc)):
plt.vlines(loc[j], min(st_energy_2), max(st_energy_2), 'black') # Vowel Centres
plt.xlim(0, len(st_energy_2)) # Limiting it to fixed range for representational purposes
plt.subplot(313)
plt.plot(st_energy_1f, 'red', label='[200-400]')
plt.plot(st_energy_2f, 'orange', label='[400-630]')
plt.plot(st_energy_3f, 'yellow', label='[630-920]')
plt.plot(st_energy_4f, 'green', label='[920-1270]')
plt.plot(st_energy_5f, 'blue', label='[1270-1720]')
plt.plot(st_energy_6f, 'indigo', label='[1720-2320]', ls='dotted')
plt.plot(st_energy_7f, 'violet', label='[2320-3200]', ls='dashed')
plt.xlim(0, len(st_energy_1f))
# plt.legend()
plt.xlabel('No. of frames')
plt.ylabel('Normalised Magnitude')
plt.title('Short Term Energy')
plt.show()
|
import sys
import random
import pygame
from pygame.locals import *
import loadcard
import popup
import AI
class game():
def __init__(self, playernum, difficulty):
self.playernum = playernum
self.difficulty = difficulty
self.background = pygame.image.load('./img/default.png')
self.screen = pygame.display.set_mode((800, 700))
self.screen.blit(self.background, (-100, -70))
self.color = {1:'RED', 2:'YELLOW', 3:'GREEN', 4:'BLUE', 5:'BLACK'}
self.skill = {11:'_SKILL_0', 12:'_SKILL_1', 13:'_SKILL_2', 14:'_SKILL_3', 15:'_SKILL_4'}
self.card_deck = []
self.player = [[0] for i in range (0, self.playernum)]
self.waste_group = pygame.sprite.RenderPlain()
self.rotate = 0
self.uno = 0
pygame.display.update()
def text_format(self, message, textFont, textSize, textColor):
newFont = pygame.font.SysFont(textFont, textSize)
newText = newFont.render(message, K_0, textColor)
return newText
def set_deck(self):
for color_idx in range(1,5):
card = self.color[color_idx]
now_card = card + '_0'
self.card_deck.append(now_card)
for card_number in range(1, 10):
now_card = card + "_" + str(card_number)
iterate = 0
while iterate != 2:
self.card_deck.append(now_card)
iterate += 1
for card_number in range(11, 14):
now_card = card + self.skill[card_number]
iterate = 0
while iterate != 2:
self.card_deck.append(now_card)
iterate += 1
card = 'BLACK'
for card_number in range(14, 16):
now_card = card + self.skill[card_number]
iterate = 0
while iterate != 4:
self.card_deck.append(now_card)
iterate += 1
random.shuffle(self.card_deck)
def set_window(self):
self.set_deck()
for player in range(0, self.playernum):
card = []
for number in range(0, 7):
temp = self.card_deck.pop(number)
card.append(temp)
self.player[player] = card
deck = loadcard.Card('BACK', (350, 300))
self.deck_group = pygame.sprite.RenderPlain(deck)
player_deck = self.player[0]
init_card = []
for item in player_deck:
cards = loadcard.Card(item, (400, 300))
init_card.append(cards)
for i in range(0, len(self.player)):
player_deck = self.player[i]
if i == 0:
user_card = []
for item in player_deck:
cards = loadcard.Card(item, (400, 300))
user_card.append(cards)
elif i == 1:
self.com1_card = []
for item in player_deck:
cards = loadcard.Card('BACK', (400, 300))
cards.rotation(180)
self.com1_card.append(cards)
elif i == 2:
self.com2_card = []
for item in player_deck:
cards = loadcard.Card('BACK', (400, 300))
cards.rotation(270)
self.com2_card.append(cards)
else:
self.com3_card = []
for item in player_deck:
cards = loadcard.Card('BACK', (400, 300))
cards.rotation(90)
self.com3_card.append(cards)
setting = True
settinguser = 1; settingcom1 = 1; settingcom3 = 1; settingcom2 = 1
if self.playernum == 3:
settingcom3 = 0
if self.playernum == 2:
settingcom3 = 0
settingcom2 = 0
while setting:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
i = 0
temp_list = []
for item in user_card:
item.update((200+70*i, 500))
temp_list.append(item)
i +=1
self.user_group = pygame.sprite.RenderPlain(*temp_list)
self.lastcard0 = temp_list[-1].getposition()
if self.lastcard0 == (200+70*(len(temp_list)-1), 500):
settinguser = 0
i = 0
temp_list = []
setting = True
for item in self.com1_card:
item.update((270+40*i, 100))
temp_list.append(item)
i +=1
self.com1_group = pygame.sprite.RenderPlain(*temp_list)
self.lastcard1 = temp_list[-1].getposition()
if self.lastcard1 == (270+40*(len(temp_list)-1), 100):
settingcom1 = 0
if self.playernum >= 3:
i = 0
temp_list = []
setting = True
for item in self.com2_card:
item.update((80, 170+40*i))
temp_list.append(item)
i +=1
self.com2_group = pygame.sprite.RenderPlain(*temp_list)
self.lastcard2 = temp_list[-1].getposition()
if self.lastcard2 == (80, 170+40*(len(temp_list)-1)):
settingcom2 = 0
if self.playernum == 4:
i = 0
temp_list = []
setting = True
for item in self.com3_card:
item.update((710, 170+40*i))
temp_list.append(item)
i +=1
self.com3_group = pygame.sprite.RenderPlain(*temp_list)
self.lastcard3 = temp_list[-1].getposition()
if self.lastcard3 == (710, 170+40*(len(temp_list)-1)):
settingcom3 = 0
if settinguser == 0 and settingcom1 == 0 and settingcom2 == 0 and settingcom3 == 0:
setting = False
pygame.mixer.pre_init(44100, -16, 1, 512)
pygame.init()
card = pygame.mixer.Sound('./sound/card.wav')
for i in range(0,7):
card.play()
self.printwindow()
pygame.display.update()
def next_turn(self, now_turn):
if now_turn == 0:
user_text = self.text_format("ME", 'Berlin Sans FB', 30, (0,0,0))
self.screen.blit(user_text, (165, 420))
elif now_turn == 1:
com1_text = self.text_format("COM1", 'Berlin Sans FB', 30, (0,0,0))
self.screen.blit(com1_text, (235, 18))
elif now_turn == 2:
com2_text = self.text_format("COM2", 'Berlin Sans FB', 30, (0,0,0))
self.screen.blit(com2_text, (45, 100))
elif now_turn == 3:
com3_text = self.text_format("COM3", 'Berlin Sans FB', 30, (0,0,0))
self.screen.blit(com3_text, (675, 100))
temp = self.get_next_player(now_turn)
return temp
def get_next_player(self, now_turn):
if self.rotate==0 and now_turn + 1 == self.playernum: return 0
elif self.rotate==1 and now_turn - 1 < 0: return self.playernum-1
else:
if self.rotate == 0: return now_turn + 1
elif self.rotate == 1: return now_turn - 1
return 0
def select_player(self, now_turn):
if now_turn == 0:
user_text = self.text_format("ME", 'Berlin Sans FB', 30, (255,242,0))
self.screen.blit(user_text, (165, 420))
elif now_turn == 1:
com1_text = self.text_format("COM1", 'Berlin Sans FB', 30, (255,242,0))
self.screen.blit(com1_text, (235, 18))
elif now_turn == 2:
com2_text = self.text_format("COM2", 'Berlin Sans FB', 30, (255,242,0))
self.screen.blit(com2_text, (45, 100))
else:
com3_text = self.text_format("COM3", 'Berlin Sans FB', 30, (255,242,0))
self.screen.blit(com3_text, (675, 100))
pygame.display.update()
def printwindow(self):
self.screen.blit(self.background, (-100, -70))
self.deck_group.draw(self.screen)
self.user_group.draw(self.screen)
self.com1_group.draw(self.screen)
if self.playernum >= 3:
self.com2_group.draw(self.screen)
com2_text = self.text_format("COM2", 'Berlin Sans FB', 30, (0,0,0))
self.screen.blit(com2_text, (45, 100))
if self.playernum == 4:
self.com3_group.draw(self.screen)
com3_text = self.text_format("COM3", 'Berlin Sans FB', 30, (0,0,0))
self.screen.blit(com3_text, (675, 100))
user_text = self.text_format("ME", 'Berlin Sans FB', 30, (0,0,0))
self.screen.blit(user_text, (165, 420))
com1_text = self.text_format("COM1", 'Berlin Sans FB', 30, (0,0,0))
self.screen.blit(com1_text, (235, 18))
self.waste_group.draw(self.screen)
def check_card(self, sprite):
if len(self.waste_card) == 0:
return True
else:
name = sprite.get_name()
name = name.split('_')
w_name = self.waste_card[-1]
w_name = w_name.split('_')
if w_name[0] == 'BLACK' : return True
if name[0] == 'BLACK' : return True
if len(name)<3 or len(w_name)<3:
if w_name[0] == name[0]: return True
if len(name)>1 and len(w_name)>1:
if w_name[1] == name[1]: return True
else:
if w_name[0] == name[0]: return True
if w_name[2] == name[2] : return True
return False
def card_skill(self, sprite):
name = sprite.get_name()
name = name.split('_')
if name[1] == 'SKILL':
if name[2] == '0':
pygame.time.wait(500)
self.now_turn = self.next_turn(self.now_turn)
elif name[2] == '1':
if self.playernum == 2:
pygame.time.wait(500)
self.now_turn = self.next_turn(self.now_turn)
else:
if self.rotate == 0 : self.rotate = 1
else : self.rotate = 0
elif name[2] == '2':
pygame.time.wait(500)
self.give_card(2)
self.now_turn = self.next_turn(self.now_turn)
elif name[2] == '3':
pygame.mixer.pre_init(44100, -16, 1, 512)
pygame.init()
select = pygame.mixer.Sound('./sound/select.wav')
select.play()
if self.now_turn == 0:
self.pick_color()
elif self.now_turn == 1:
pygame.time.wait(500)
self.most_num_color(self.player[1])
elif self.now_turn == 2:
pygame.time.wait(500)
self.most_num_color(self.player[2])
elif self.now_turn == 3:
pygame.time.wait(500)
self.most_num_color(self.player[3])
elif name[2] == '4':
pygame.mixer.pre_init(44100, -16, 1, 512)
pygame.init()
select = pygame.mixer.Sound('./sound/select.wav')
select.play()
self.give_card(4)
if self.now_turn == 0:
self.pick_color()
elif self.now_turn == 1:
pygame.time.wait(500)
self.most_num_color(self.player[1])
elif self.now_turn == 2:
pygame.time.wait(500)
self.most_num_color(self.player[2])
elif self.now_turn == 3:
pygame.time.wait(500)
self.most_num_color(self.player[3])
return True
def most_num_color(self, card_deck):
r = 0; y = 0; g = 0; b = 0;
for item in card_deck:
card = item.split('_')
if card[0] == 'RED': r += 1
if card[0] == 'YELLOW': y += 1
if card[0] == 'GREEN': g += 1
if card[0] == 'BLUE': b += 1
a = [r, y, g, b]
index = a.index(max(a))
if index == 0 : temp_name = 'RED'
if index == 1 : temp_name = 'YELLOW'
if index == 2 : temp_name = 'GREEN'
if index == 3 : temp_name = 'BLUE'
temp = loadcard.Card(temp_name, (430, 300))
self.waste_card.append(temp_name)
self.waste_group.add(temp)
self.printwindow()
def pick_color(self):
color_popup = popup.Popup('pickcolor', (400, 300))
popup_group = pygame.sprite.RenderPlain(color_popup)
red = popup.Popup('RED', (306, 320))
yellow = popup.Popup('YELLOW', (368, 320))
green = popup.Popup('GREEN', (432, 320))
blue = popup.Popup('BLUE', (494, 320))
colors = [red, yellow, green, blue]
color_group = pygame.sprite.RenderPlain(*colors)
loop = True
while loop:
popup_group.draw(self.screen)
color_group.draw(self.screen)
pygame.display.update()
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == MOUSEBUTTONUP:
mouse_pos = pygame.mouse.get_pos()
for sprite in color_group:
if sprite.get_rect().collidepoint(mouse_pos):
temp_name = sprite.get_name()
temp = loadcard.Card(temp_name, (430, 300))
self.waste_card.append(temp_name)
self.waste_group.add(temp)
self.printwindow()
loop = False
return 0
def give_card(self, card_num):
dest_player = self.get_next_player(self.now_turn)
for i in range(0, card_num):
self.get_from_deck(dest_player)
def restart(self):
pygame.mixer.pre_init(44100, -16, 1, 512)
pygame.init()
win = pygame.mixer.Sound('./sound/win.wav')
lose = pygame.mixer.Sound('./sound/lose.wav')
pygame.draw.rect(self.screen, (255, 51, 0), pygame.Rect(200, 200, 400, 200))
pygame.draw.rect(self.screen, (255, 180, 0), pygame.Rect(210, 210, 380, 180))
if len(self.user_group) == 0:
win.play()
close_text = self.text_format("YOU WIN!", 'Berlin Sans FB', 80, (255,51,0))
press_text = self.text_format("Press SPACE to REPLAY", 'Berlin Sans FB', 35, (255,51,0))
self.screen.blit(close_text, (230, 220))
else:
lose.play()
close_text = self.text_format("YOU LOSE!", 'Berlin Sans FB', 80, (255,51,0))
press_text = self.text_format("Press SPACE to REPLAY", 'Berlin Sans FB', 35, (255,51,0))
self.screen.blit(close_text, (212, 220))
self.screen.blit(press_text, (228, 330))
pygame.display.update()
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_SPACE:
self.startgame()
return
return 0
def startgame(self):
self.card_deck.clear()
self.player = [[0] for i in range (0, self.playernum)]
self.waste_group = pygame.sprite.RenderPlain()
self.rotate = 0
self.set_window()
self.playgame()
def playgame(self):
self.now_turn = 0
self.waste_card = []
while True:
if len(self.user_group) ==0:
self.restart()
return
elif self.playernum == 4:
if len(self.player[1]) == 0 or len(self.player[2]) == 0 or len(self.player[2]) == 0:
self.restart()
return
elif self.playernum == 3:
if len(self.player[1]) == 0 or len(self.player[2]) == 0:
self.restart()
return
elif self.playernum == 2:
if len(self.player[1]) == 0:
self.restart()
return
if len(self.card_deck) == 0:
self.set_deck()
self.select_player(self.now_turn)
if self.now_turn == 1:
self.select_player(self.now_turn)
pygame.time.wait(700)
ai = AI.AI(2, self.player[1], self.waste_card)
if self.difficulty == 1:
temp = ai.basicplay()
elif self.difficulty == 2:
next = self.get_next_player(self.now_turn)
if next == 0 : next_ = self.user_group
else : next_ = self.player[next]
temp = ai.advancedplay(next_)
if temp == 0 or temp == None:
self.get_from_deck(1)
self.printwindow()
self.now_turn = self.next_turn(self.now_turn)
pygame.display.update()
else:
pygame.mixer.pre_init(44100, -16, 1, 512)
pygame.init()
card = pygame.mixer.Sound('./sound/deal_card.wav')
for sprite in self.com1_group:
if sprite.getposition() == self.lastcard1:
self.com1_group.remove(sprite)
self.player[1].remove(temp)
self.set_lastcard(self.lastcard1, (0,0))
card.play()
self.waste_card.append(temp)
t_card = loadcard.Card(temp, (430, 300))
self.waste_group.add(t_card)
self.printwindow()
pygame.display.update()
self.card_skill(t_card)
self.printwindow()
self.now_turn = self.next_turn(self.now_turn)
pygame.display.update()
elif self.now_turn == 2:
self.select_player(self.now_turn)
pygame.time.wait(700)
ai = AI.AI(3, self.player[2], self.waste_card)
if self.difficulty == 1:
temp = ai.basicplay()
elif self.difficulty == 2:
next = self.get_next_player(self.now_turn)
if next == 0 : next_ = self.user_group
else : next_ = self.player[next]
temp = ai.advancedplay(next_)
if temp == 0 or temp == None:
self.get_from_deck(2)
self.printwindow()
self.now_turn = self.next_turn(self.now_turn)
pygame.display.update()
else:
pygame.mixer.pre_init(44100, -16, 1, 512)
pygame.init()
card = pygame.mixer.Sound('./sound/deal_card.wav')
for sprite in self.com2_group:
if sprite.getposition() == self.lastcard2:
self.com2_group.remove(sprite)
self.player[2].remove(temp)
self.set_lastcard(self.lastcard2, (0,0))
card.play()
self.waste_card.append(temp)
t_card = loadcard.Card(temp, (430, 300))
self.waste_group.add(t_card)
self.printwindow()
pygame.display.update()
self.card_skill(t_card)
self.printwindow()
self.now_turn = self.next_turn(self.now_turn)
pygame.display.update()
elif self.now_turn == 3:
self.select_player(self.now_turn)
pygame.time.wait(700)
ai = AI.AI(4, self.player[3], self.waste_card)
if self.difficulty == 1:
temp = ai.basicplay()
elif self.difficulty == 2:
next = self.get_next_player(self.now_turn)
if next == 0 : next_ = self.user_group
else : next_ = self.player[next]
temp = ai.advancedplay(next_)
if temp == 0 or temp == None:
self.get_from_deck(3)
self.printwindow()
self.now_turn = self.next_turn(self.now_turn)
pygame.display.update()
else:
pygame.mixer.pre_init(44100, -16, 1, 512)
pygame.init()
card = pygame.mixer.Sound('./sound/deal_card.wav')
for sprite in self.com3_group:
if sprite.getposition() == self.lastcard3:
self.com3_group.remove(sprite)
self.player[3].remove(temp)
self.set_lastcard(self.lastcard3, (0,0))
card.play()
self.waste_card.append(temp)
t_card = loadcard.Card(temp, (430, 300))
self.waste_group.add(t_card)
self.printwindow()
pygame.display.update()
self.card_skill(t_card)
self.printwindow()
print("computer lastcard", self.lastcard3)
self.now_turn = self.next_turn(self.now_turn)
pygame.display.update()
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
return
if event.type == MOUSEBUTTONUP:
if self.now_turn == 0:
self.select_player(self.now_turn)
mouse_pos = pygame.mouse.get_pos()
for sprite in self.user_group:
if sprite.get_rect().collidepoint(mouse_pos):
if self.check_card(sprite):
pygame.mixer.pre_init(44100, -16, 1, 512)
pygame.init()
card = pygame.mixer.Sound('./sound/deal_card.wav')
self.user_group.remove(sprite)
for temp in self.user_group:
temp.move(sprite.getposition())
sprite.setposition(430, 300)
card.play()
self.put_waste_group(sprite)
self.card_skill(sprite)
self.now_turn = self.next_turn(self.now_turn)
break
for sprite in self.deck_group:
if sprite.get_rect().collidepoint(mouse_pos):
self.get_from_deck(self.now_turn)
self.now_turn = self.next_turn(self.now_turn)
break
pygame.display.update()
def get_from_deck(self, now_turn):
pygame.mixer.pre_init(44100, -16, 1, 512)
pygame.init()
deck = pygame.mixer.Sound('./sound/from_deck.wav')
item = self.card_deck.pop(0)
deck.play()
if now_turn == 0:
temp = loadcard.Card(item, (400, 300))
current_pos = self.lastcard0
if current_pos[0]>=620:
y = current_pos[1]+80
x = 200
else:
y = current_pos[1]
x = current_pos[0]+70
temp.setposition(x, y)
self.lastcard0 = (x, y)
self.user_group.add(temp)
elif now_turn == 1:
temp = loadcard.Card('BACK', (350, 300))
temp.rotation(180)
current_pos = self.lastcard1
if current_pos[0]>=510:
y = current_pos[1]+40
x = 270
else:
y = current_pos[1]
x = current_pos[0]+40
temp.setposition(x, y)
self.lastcard1 = (x, y)
self.com1_group.add(temp)
self.player[1].append(item)
elif now_turn == 2:
temp = loadcard.Card('BACK', (350, 300))
current_pos = self.lastcard2
temp.rotation(90)
if current_pos[1]>=410:
y = 170
x = current_pos[0]+40
else:
y = current_pos[1]+40
x = current_pos[0]
temp.setposition(x, y)
self.lastcard2 = (x, y)
self.com2_group.add(temp)
self.player[2].append(item)
elif now_turn == 3:
temp = loadcard.Card('BACK', (350, 300))
current_pos = self.lastcard3
temp.rotation(270)
if current_pos[1]>=410:
y = 170
x = current_pos[0]+40
else:
y = current_pos[1]+40
x = current_pos[0]
temp.setposition(x, y)
self.lastcard3 = (x, y)
self.com3_group.add(temp)
self.player[3].append(item)
self.printwindow()
def set_lastcard(self, lastcard, compare_pos):
x = lastcard[0]
y = lastcard[1]
i_x = compare_pos[0]
i_y = compare_pos[1]
if self.now_turn == 0:
if x >= i_x+60 and y == i_y:
x -= 70
elif y > i_y:
if x <= 200:
x = 620
y = y - 80
else:
x -=70
self.lastcard0 = (x, y)
elif self.now_turn == 1:
if y > 100 and x == 270:
y -= 40
x = 510
else:
x -= 40
self.lastcard1 = (x, y)
elif self.now_turn == 2:
if x > 80 and y == 170:
x -= 40
y = 410
else:
y -= 40
self.lastcard2 = (x, y)
elif self.now_turn == 3:
if x > 710 and y == 170:
x -= 40
y = 410
else:
y -= 40
self.lastcard3 = (x, y)
def put_waste_group(self, sprite):
self.waste_group.add(sprite)
self.waste_card.append(sprite.get_name())
self.set_lastcard(self.lastcard0, sprite.getposition())
self.printwindow()
|
from django.urls import path, re_path
from .apis import *
urlpatterns = [
path('iips/add', AddIipApi.as_view(), name='iip_add'),
re_path(r'^iips/list/(?:start=(?P<start>(?:19|20)\d{2}(0[1-9]|1[012])))&(?:end=(?P<end>(?:19|20)\d{2}(0[1-9]|1[012])))$', IipListApi.as_view(), name='iip_list'),
path('iips/update/<int:cpi_id>', UpdateIipApi.as_view(), name='iip_update'),
path('iips/delete/<int:cpi_id>', DeleteIipApi.as_view(), name='iip_delete'),
]
|
# -*- coding: utf-8 -*-
'''
Copyright of DasPy:
Author - Xujun Han (Forschungszentrum Jülich, Germany)
x.han@fz-juelich.de, xujunhan@gmail.com
DasPy was funded by:
1. Forschungszentrum Jülich, Agrosphere (IBG 3), Jülich, Germany
2. Cold and Arid Regions Environmental and Engineering Research Institute, Chinese Academy of Sciences, Lanzhou, PR China
3. Centre for High-Performance Scientific Computing in Terrestrial Systems: HPSC TerrSys, Geoverbund ABC/J, Jülich, Germany
Please include the following references related to DasPy:
1. Han, X., Li, X., He, G., Kumbhar, P., Montzka, C., Kollet, S., Miyoshi, T., Rosolem, R., Zhang, Y., Vereecken, H., and Franssen, H. J. H.: DasPy 1.0 – the Open Source Multivariate Land Data Assimilation Framework in combination with the Community Land Model 4.5, Geosci. Model Dev. Discuss., 8, 7395-7444, 2015.
2. Han, X., Franssen, H. J. H., Rosolem, R., Jin, R., Li, X., and Vereecken, H.: Correction of systematic model forcing bias of CLM using assimilation of cosmic-ray Neutrons and land surface temperature: a study in the Heihe Catchment, China, Hydrology and Earth System Sciences, 19, 615-629, 2015a.
3. Han, X., Franssen, H. J. H., Montzka, C., and Vereecken, H.: Soil moisture and soil properties estimation in the Community Land Model with synthetic brightness temperature observations, Water Resour Res, 50, 6081-6105, 2014a.
4. Han, X., Franssen, H. J. H., Li, X., Zhang, Y. L., Montzka, C., and Vereecken, H.: Joint Assimilation of Surface Temperature and L-Band Microwave Brightness Temperature in Land Data Assimilation, Vadose Zone J, 12, 0, 2013.
'''
import os, sys, time, datetime, calendar, subprocess, string, signal, socket, imp
import numpy
def Write_seq_maps(seq_maps_file_name, DAS_Data_Path, Row_Numbers_String, Col_Numbers_String, Region_Name):
seq_maps_file = open(seq_maps_file_name,'w')
seq_maps_file.write("##################################################################\n")
seq_maps_file.write("#\n")
seq_maps_file.write("# seq_maps.rc\n")
seq_maps_file.write("#\n")
seq_maps_file.write("# This is a resource file which lists the names of mapping\n")
seq_maps_file.write("# weight files to use in a sequential CCSM run (mapname).\n")
seq_maps_file.write("# You can also set when data is rearranged in the mapping (maptype).\n")
seq_maps_file.write("#\n")
seq_maps_file.write("# This file is read during the map_model2model_init calls.\n")
seq_maps_file.write("#\n")
seq_maps_file.write("# For maptype: X = Rearrange the input so that the output\n")
seq_maps_file.write("# is on the correct processor.\n")
seq_maps_file.write("# Y = Rearrange the output and sum partial outputs\n")
seq_maps_file.write("# if necessary\n")
seq_maps_file.write("#\n")
seq_maps_file.write("# NOTE: For bfb on different processor counts, set all maptypes to X.\n")
seq_maps_file.write("################################################################## \n")
seq_maps_file.write("atm2ice_fmapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_aave.nc")+"\n")
seq_maps_file.write("atm2ice_fmaptype: 'X'\n")
seq_maps_file.write("atm2ice_smapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_blin.nc")+"\n")
seq_maps_file.write("atm2ice_smaptype: 'X'\n")
seq_maps_file.write("atm2ice_vmapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_patc.nc")+"\n")
seq_maps_file.write("atm2ice_vmaptype: 'X'\n")
seq_maps_file.write("atm2lnd_fmapname: 'idmap'\n")
seq_maps_file.write("atm2lnd_fmaptype: 'X'\n")
seq_maps_file.write("atm2lnd_smapname: 'idmap'\n")
seq_maps_file.write("atm2lnd_smaptype: 'X'\n")
seq_maps_file.write("atm2ocn_fmapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_aave.nc")+"\n")
seq_maps_file.write("atm2ocn_fmaptype: 'X'\n")
seq_maps_file.write("atm2ocn_smapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_blin.nc")+"\n")
seq_maps_file.write("atm2ocn_smaptype: 'X'\n")
seq_maps_file.write("atm2ocn_vmapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_patc.nc")+"\n")
seq_maps_file.write("atm2ocn_vmaptype: 'X'\n")
seq_maps_file.write("atm2wav_smapname: 'idmap'\n")
seq_maps_file.write("atm2wav_smaptype: 'Y'\n")
seq_maps_file.write("ice2atm_fmapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_aave.nc")+"\n")
seq_maps_file.write("ice2atm_fmaptype: 'Y'\n")
seq_maps_file.write("ice2atm_smapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_aave.nc")+"\n")
seq_maps_file.write("ice2atm_smaptype: 'Y'\n")
seq_maps_file.write("ice2wav_smapname: 'idmap'\n")
seq_maps_file.write("ice2wav_smaptype: 'Y'\n")
seq_maps_file.write("lnd2atm_fmapname: 'idmap'\n")
seq_maps_file.write("lnd2atm_fmaptype: 'Y'\n")
seq_maps_file.write("lnd2atm_smapname: 'idmap'\n")
seq_maps_file.write("lnd2atm_smaptype: 'Y'\n")
seq_maps_file.write("lnd2rof_fmapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_aave.nc")+"\n")
seq_maps_file.write("lnd2rof_fmaptype: 'X'\n")
seq_maps_file.write("ocn2atm_fmapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_aave.nc")+"\n")
seq_maps_file.write("ocn2atm_fmaptype: 'Y'\n")
seq_maps_file.write("ocn2atm_smapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_aave.nc")+"\n")
seq_maps_file.write("ocn2atm_smaptype: 'Y'\n")
seq_maps_file.write("ocn2wav_smapname: 'idmap'\n")
seq_maps_file.write("ocn2wav_smaptype: 'Y'\n")
seq_maps_file.write("rof2lnd_fmapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_aave.nc")+"\n")
seq_maps_file.write("rof2lnd_smapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_aave.nc")+"\n")
seq_maps_file.write("rof2lnd_smaptype: 'Y'\n")
seq_maps_file.write("rof2ocn_fmapname: "+repr(DAS_Data_Path+"/SysModel/CLM/tools/map_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_TO_"+Row_Numbers_String+"x"+Col_Numbers_String+"_"+Region_Name+"_aave.nc")+"\n")
seq_maps_file.write("rof2ocn_fmaptype: 'Y'\n")
seq_maps_file.write("rof2ocn_rmapname: ' '\n")
seq_maps_file.write("rof2ocn_rmaptype: 'Y'\n")
seq_maps_file.write("wav2ocn_smapname: 'idmap'\n")
seq_maps_file.write("wav2ocn_smaptype: 'X'\n")
seq_maps_file.write("/\n")
seq_maps_file.close()
def Write_datm_atm_in(datm_atm_in_file_name, datm_streams_txt_file_name_rad, datm_streams_txt_file_name_prec, datm_streams_txt_file_name_tair, presaero_stream_txt_file_name, domain_file_path,domain_name, rdirc_name,align_year,first_year,last_year):
datm_atm_in_file = open(datm_atm_in_file_name,'w')
datm_atm_in_file.write("&shr_strdata_nml\n")
datm_atm_in_file.write(" dataMode = 'CLMNCEP'\n")
datm_atm_in_file.write(" domainFile = "+repr(domain_file_path+domain_name)+ "\n")
datm_atm_in_file.write(" dtlimit = 1000000000000,1000000000000,1000000000,1000000000000,1000000000000,1000000000000,1000000000000,1000000000000\n")
datm_atm_in_file.write(" fillalgo = 'copy','copy','copy','copy'\n")
datm_atm_in_file.write(" fillmask = 'nomask','nomask','nomask','nomask'\n")
datm_atm_in_file.write(" mapalgo = 'bilinear','bilinear','bilinear','bilinear'\n")
datm_atm_in_file.write(" mapmask = 'nomask','nomask','nomask','nomask'\n")
datm_atm_in_file.write(" streams = "+repr(datm_streams_txt_file_name_rad + " " +align_year+" "+first_year+" "+last_year+" ")+","+"\n")
datm_atm_in_file.write(" "+repr(datm_streams_txt_file_name_prec + " " +align_year+" "+first_year+" "+last_year+" ")+","+"\n")
datm_atm_in_file.write(" "+repr(datm_streams_txt_file_name_tair + " " +align_year+" "+first_year+" "+last_year+" ")+","+"\n")
datm_atm_in_file.write(" "+repr(presaero_stream_txt_file_name +" 1 1 1")+"\n")
datm_atm_in_file.write(" taxMode = 'cycle','cycle','cycle','cycle'\n")
datm_atm_in_file.write(" tintalgo = 'coszen','nearest','linear','linear'\n")
datm_atm_in_file.write(" vectors = 'null'\n")
datm_atm_in_file.write("/\n")
datm_atm_in_file.close()
def Write_datm_streams_txt_rad(datm_streams_txt_file_name_rad, Def_SpinUp, domain_file_path,domain_name, rdirc_name,forcing_file_path,start_ymd,stop_ymd):
datm_streams_txt = open(datm_streams_txt_file_name_rad,'w')
datm_streams_txt.write(" <dataSource>\n")
datm_streams_txt.write(" GENERIC\n")
datm_streams_txt.write(" </dataSource>\n")
datm_streams_txt.write(" <domainInfo>\n")
datm_streams_txt.write(" <variableNames>\n")
datm_streams_txt.write(" time time\n")
datm_streams_txt.write(" xc lon\n")
datm_streams_txt.write(" yc lat\n")
datm_streams_txt.write(" area area\n")
datm_streams_txt.write(" mask mask\n")
datm_streams_txt.write(" </variableNames>\n")
datm_streams_txt.write(" <filePath>\n")
datm_streams_txt.write(" "+domain_file_path+"\n")
datm_streams_txt.write(" </filePath>\n")
datm_streams_txt.write(" <fileNames>\n")
datm_streams_txt.write(" "+domain_name+"\n")
datm_streams_txt.write(" </fileNames>\n")
datm_streams_txt.write(" </domainInfo>\n")
datm_streams_txt.write(" <fieldInfo>\n")
datm_streams_txt.write(" <variableNames>\n")
datm_streams_txt.write(" FSDS swdn\n")
#datm_streams_txt.write(" FLDS lwdn\n")
datm_streams_txt.write(" </variableNames>\n")
datm_streams_txt.write(" <filePath>\n")
datm_streams_txt.write(" "+forcing_file_path+"\n")
datm_streams_txt.write(" </filePath>\n")
datm_streams_txt.write(" <fileNames>\n")
#print start_ymd,str.split(start_ymd),stop_ymd,str.split(stop_ymd)
#print str.split(start_ymd)[0][0:4],str.split(start_ymd)[0][4:6],str.split(start_ymd)[0][6:8]
start_ymd_tmp = "20151201"
stop_ymd_tmp = "20170131"
Datetime_Start = datetime.datetime(string.atoi(str.split(start_ymd_tmp)[0][0:4]), string.atoi(str.split(start_ymd_tmp)[0][4:6]), string.atoi(str.split(start_ymd_tmp)[0][6:8]), 00, 00)
Datetime_Stop = datetime.datetime(string.atoi(str.split(stop_ymd_tmp)[0][0:4]), string.atoi(str.split(stop_ymd_tmp)[0][4:6]), string.atoi(str.split(stop_ymd_tmp)[0][6:8]), 00, 00)
#print Datetime_Start,Datetime_Stop,(Datetime_Stop - Datetime_Start).days
Datetime_Stop_Temp = Datetime_Start
while Datetime_Stop_Temp <= Datetime_Stop:
# if calendar.isleap(Datetime_Stop_Temp.year):
# if Datetime_Stop_Temp == datetime.datetime(Datetime_Stop_Temp.year,2,29):
# # Add the Delta Days
# data_delta = datetime.timedelta(days=1)
# Datetime_Stop_Temp = Datetime_Stop_Temp + data_delta
if Datetime_Stop_Temp.day < 10:
Day_String = '00'+str(Datetime_Stop_Temp.day)
elif Datetime_Stop_Temp.day > 10 and Datetime_Stop_Temp.day < 100:
Day_String = '0'+str(Datetime_Stop_Temp.day)
else:
Day_String = str(Datetime_Stop_Temp.day)
# If the Month or Day is one-digit number, then add '0' in front of it.
if len(str(Datetime_Stop_Temp.month)) == 1:
Month = '0' + str(Datetime_Stop_Temp.month)
else:
Month = str(Datetime_Stop_Temp.month)
if len(str(Datetime_Stop_Temp.day)) == 1:
Day = '0' + str(Datetime_Stop_Temp.day)
else:
Day = str(Datetime_Stop_Temp.day)
# MODIS Date
#print Datetime_Stop_Temp.year,Datetime_Stop_Temp.month,Datetime_Stop_Temp.day
#print str(Datetime_Stop_Temp.year),Month,Day
OutputDate = str(Datetime_Stop_Temp.year) + '_' + Month + '_' + Day
NC_FileName = str(Datetime_Stop_Temp.year) + '_' + Month + '_' + Day + "_rad.nc"
datm_streams_txt.write(" "+NC_FileName+"\n")
# Add the Delta Days
data_delta = datetime.timedelta(days=1)
Datetime_Stop_Temp = Datetime_Stop_Temp + data_delta
datm_streams_txt.write(" </fileNames>\n")
datm_streams_txt.write(" <offset>\n")
datm_streams_txt.write(" 0\n")
datm_streams_txt.write(" </offset>\n")
datm_streams_txt.write(" </fieldInfo>\n")
datm_streams_txt.write("\n")
datm_streams_txt.close()
def Write_datm_streams_txt_prec(datm_streams_txt_file_name_prec, Def_SpinUp, domain_file_path,domain_name, rdirc_name,forcing_file_path,start_ymd,stop_ymd):
datm_streams_txt = open(datm_streams_txt_file_name_prec,'w')
datm_streams_txt.write(" <dataSource>\n")
datm_streams_txt.write(" GENERIC\n")
datm_streams_txt.write(" </dataSource>\n")
datm_streams_txt.write(" <domainInfo>\n")
datm_streams_txt.write(" <variableNames>\n")
datm_streams_txt.write(" time time\n")
datm_streams_txt.write(" xc lon\n")
datm_streams_txt.write(" yc lat\n")
datm_streams_txt.write(" area area\n")
datm_streams_txt.write(" mask mask\n")
datm_streams_txt.write(" </variableNames>\n")
datm_streams_txt.write(" <filePath>\n")
datm_streams_txt.write(" "+domain_file_path+"\n")
datm_streams_txt.write(" </filePath>\n")
datm_streams_txt.write(" <fileNames>\n")
datm_streams_txt.write(" "+domain_name+"\n")
datm_streams_txt.write(" </fileNames>\n")
datm_streams_txt.write(" </domainInfo>\n")
datm_streams_txt.write(" <fieldInfo>\n")
datm_streams_txt.write(" <variableNames>\n")
datm_streams_txt.write(" PRECTmms precn\n")
datm_streams_txt.write(" </variableNames>\n")
datm_streams_txt.write(" <filePath>\n")
datm_streams_txt.write(" "+forcing_file_path+"\n")
datm_streams_txt.write(" </filePath>\n")
datm_streams_txt.write(" <fileNames>\n")
#print start_ymd,str.split(start_ymd),stop_ymd,str.split(stop_ymd)
#print str.split(start_ymd)[0][0:4],str.split(start_ymd)[0][4:6],str.split(start_ymd)[0][6:8]
start_ymd_tmp = "20151201"
stop_ymd_tmp = "20170131"
Datetime_Start = datetime.datetime(string.atoi(str.split(start_ymd_tmp)[0][0:4]), string.atoi(str.split(start_ymd_tmp)[0][4:6]), string.atoi(str.split(start_ymd_tmp)[0][6:8]), 00, 00)
Datetime_Stop = datetime.datetime(string.atoi(str.split(stop_ymd_tmp)[0][0:4]), string.atoi(str.split(stop_ymd_tmp)[0][4:6]), string.atoi(str.split(stop_ymd_tmp)[0][6:8]), 00, 00)
#print Datetime_Start,Datetime_Stop,(Datetime_Stop - Datetime_Start).days
Datetime_Stop_Temp = Datetime_Start
while Datetime_Stop_Temp <= Datetime_Stop:
# if calendar.isleap(Datetime_Stop_Temp.year):
# if Datetime_Stop_Temp == datetime.datetime(Datetime_Stop_Temp.year,2,29):
# # Add the Delta Days
# data_delta = datetime.timedelta(days=1)
# Datetime_Stop_Temp = Datetime_Stop_Temp + data_delta
if Datetime_Stop_Temp.day < 10:
Day_String = '00'+str(Datetime_Stop_Temp.day)
elif Datetime_Stop_Temp.day > 10 and Datetime_Stop_Temp.day < 100:
Day_String = '0'+str(Datetime_Stop_Temp.day)
else:
Day_String = str(Datetime_Stop_Temp.day)
# If the Month or Day is one-digit number, then add '0' in front of it.
if len(str(Datetime_Stop_Temp.month)) == 1:
Month = '0' + str(Datetime_Stop_Temp.month)
else:
Month = str(Datetime_Stop_Temp.month)
if len(str(Datetime_Stop_Temp.day)) == 1:
Day = '0' + str(Datetime_Stop_Temp.day)
else:
Day = str(Datetime_Stop_Temp.day)
# MODIS Date
#print Datetime_Stop_Temp.year,Datetime_Stop_Temp.month,Datetime_Stop_Temp.day
#print str(Datetime_Stop_Temp.year),Month,Day
OutputDate = str(Datetime_Stop_Temp.year) + '_' + Month + '_' + Day
NC_FileName = str(Datetime_Stop_Temp.year) + '_' + Month + '_' + Day + "_tp.nc"
datm_streams_txt.write(" "+NC_FileName+"\n")
# Add the Delta Days
data_delta = datetime.timedelta(days=1)
Datetime_Stop_Temp = Datetime_Stop_Temp + data_delta
#datm_streams_txt.write(" 2010_01_tp.nc\n")
datm_streams_txt.write(" </fileNames>\n")
datm_streams_txt.write(" <offset>\n")
datm_streams_txt.write(" 0\n")
datm_streams_txt.write(" </offset>\n")
datm_streams_txt.write(" </fieldInfo>\n")
datm_streams_txt.write("\n")
datm_streams_txt.close()
def Write_datm_streams_txt_tair(datm_streams_txt_file_name_tair, Def_SpinUp, domain_file_path, domain_name, rdirc_name,forcing_file_path,start_ymd,stop_ymd):
datm_streams_txt = open(datm_streams_txt_file_name_tair,'w')
datm_streams_txt.write(" <dataSource>\n")
datm_streams_txt.write(" GENERIC\n")
datm_streams_txt.write(" </dataSource>\n")
datm_streams_txt.write(" <domainInfo>\n")
datm_streams_txt.write(" <variableNames>\n")
datm_streams_txt.write(" time time\n")
datm_streams_txt.write(" xc lon\n")
datm_streams_txt.write(" yc lat\n")
datm_streams_txt.write(" area area\n")
datm_streams_txt.write(" mask mask\n")
datm_streams_txt.write(" </variableNames>\n")
datm_streams_txt.write(" <filePath>\n")
datm_streams_txt.write(" "+domain_file_path+"\n")
datm_streams_txt.write(" </filePath>\n")
datm_streams_txt.write(" <fileNames>\n")
datm_streams_txt.write(" "+domain_name+"\n")
datm_streams_txt.write(" </fileNames>\n")
datm_streams_txt.write(" </domainInfo>\n")
datm_streams_txt.write(" <fieldInfo>\n")
datm_streams_txt.write(" <variableNames>\n")
datm_streams_txt.write(" TBOT tbot\n")
datm_streams_txt.write(" WIND wind\n")
datm_streams_txt.write(" PSRF pbot\n")
datm_streams_txt.write(" RH rh\n")
datm_streams_txt.write(" </variableNames>\n")
datm_streams_txt.write(" <filePath>\n")
datm_streams_txt.write(" "+forcing_file_path+"\n")
datm_streams_txt.write(" </filePath>\n")
datm_streams_txt.write(" <fileNames>\n")
#print start_ymd,str.split(start_ymd),stop_ymd,str.split(stop_ymd)
#print str.split(start_ymd)[0][0:4],str.split(start_ymd)[0][4:6],str.split(start_ymd)[0][6:8]
start_ymd_tmp = "20151201"
stop_ymd_tmp = "20170131"
Datetime_Start = datetime.datetime(string.atoi(str.split(start_ymd_tmp)[0][0:4]), string.atoi(str.split(start_ymd_tmp)[0][4:6]), string.atoi(str.split(start_ymd_tmp)[0][6:8]), 00, 00)
Datetime_Stop = datetime.datetime(string.atoi(str.split(stop_ymd_tmp)[0][0:4]), string.atoi(str.split(stop_ymd_tmp)[0][4:6]), string.atoi(str.split(stop_ymd_tmp)[0][6:8]), 00, 00)
#print Datetime_Start,Datetime_Stop,(Datetime_Stop - Datetime_Start).days
Datetime_Stop_Temp = Datetime_Start
while Datetime_Stop_Temp <= Datetime_Stop:
# if calendar.isleap(Datetime_Stop_Temp.year):
# if Datetime_Stop_Temp == datetime.datetime(Datetime_Stop_Temp.year,2,29):
# # Add the Delta Days
# data_delta = datetime.timedelta(days=1)
# Datetime_Stop_Temp = Datetime_Stop_Temp + data_delta
if Datetime_Stop_Temp.day < 10:
Day_String = '00'+str(Datetime_Stop_Temp.day)
elif Datetime_Stop_Temp.day > 10 and Datetime_Stop_Temp.day < 100:
Day_String = '0'+str(Datetime_Stop_Temp.day)
else:
Day_String = str(Datetime_Stop_Temp.day)
# If the Month or Day is one-digit number, then add '0' in front of it.
if len(str(Datetime_Stop_Temp.month)) == 1:
Month = '0' + str(Datetime_Stop_Temp.month)
else:
Month = str(Datetime_Stop_Temp.month)
if len(str(Datetime_Stop_Temp.day)) == 1:
Day = '0' + str(Datetime_Stop_Temp.day)
else:
Day = str(Datetime_Stop_Temp.day)
# MODIS Date
#print Datetime_Stop_Temp.year,Datetime_Stop_Temp.month,Datetime_Stop_Temp.day
#print str(Datetime_Stop_Temp.year),Month,Day
OutputDate = str(Datetime_Stop_Temp.year) + '_' + Month + '_' + Day
NC_FileName = str(Datetime_Stop_Temp.year) + '_' + Month + '_' + Day + "_tair.nc"
datm_streams_txt.write(" "+NC_FileName+"\n")
# Add the Delta Days
data_delta = datetime.timedelta(days=1)
Datetime_Stop_Temp = Datetime_Stop_Temp + data_delta
datm_streams_txt.write(" </fileNames>\n")
datm_streams_txt.write(" <offset>\n")
datm_streams_txt.write(" 0\n")
datm_streams_txt.write(" </offset>\n")
datm_streams_txt.write(" </fieldInfo>\n")
datm_streams_txt.write("\n")
datm_streams_txt.close()
def Write_presaero_stream_txt(presaero_stream_txt_file_name,aero_file_path,aero_file_name):
presaero_stream_txt = open(presaero_stream_txt_file_name,'w')
presaero_stream_txt.write(" <dataSource>\n")
presaero_stream_txt.write(" GENERIC\n")
presaero_stream_txt.write(" </dataSource>\n")
presaero_stream_txt.write(" <domainInfo>\n")
presaero_stream_txt.write(" <variableNames>\n")
presaero_stream_txt.write(" time time\n")
presaero_stream_txt.write(" lon lon\n")
presaero_stream_txt.write(" lat lat\n")
presaero_stream_txt.write(" area area\n")
presaero_stream_txt.write(" mask mask\n")
presaero_stream_txt.write(" </variableNames>\n")
presaero_stream_txt.write(" <filePath>\n")
presaero_stream_txt.write(" "+aero_file_path+"\n")
presaero_stream_txt.write(" </filePath>\n")
presaero_stream_txt.write(" <fileNames>\n")
presaero_stream_txt.write(" "+aero_file_name+"\n")
presaero_stream_txt.write(" </fileNames>\n")
presaero_stream_txt.write(" </domainInfo>\n")
presaero_stream_txt.write(" <fieldInfo>\n")
presaero_stream_txt.write(" <variableNames>\n")
presaero_stream_txt.write(" BCDEPWET bcphiwet\n")
presaero_stream_txt.write(" BCPHODRY bcphodry\n")
presaero_stream_txt.write(" BCPHIDRY bcphidry\n")
presaero_stream_txt.write(" OCDEPWET ocphiwet\n")
presaero_stream_txt.write(" OCPHIDRY ocphidry\n")
presaero_stream_txt.write(" OCPHODRY ocphodry\n")
presaero_stream_txt.write(" DSTX01WD dstwet1\n")
presaero_stream_txt.write(" DSTX01DD dstdry1\n")
presaero_stream_txt.write(" DSTX02WD dstwet2\n")
presaero_stream_txt.write(" DSTX02DD dstdry2\n")
presaero_stream_txt.write(" DSTX03WD dstwet3\n")
presaero_stream_txt.write(" DSTX03DD dstdry3\n")
presaero_stream_txt.write(" DSTX04WD dstdry4\n")
presaero_stream_txt.write(" DSTX04DD dstwet4\n")
presaero_stream_txt.write(" </variableNames>\n")
presaero_stream_txt.write(" <filePath>\n")
presaero_stream_txt.write(" "+aero_file_path+"\n")
presaero_stream_txt.write(" </filePath>\n")
presaero_stream_txt.write(" <offset>\n")
presaero_stream_txt.write(" 0\n")
presaero_stream_txt.write(" </offset>\n")
presaero_stream_txt.write(" <fileNames>\n")
presaero_stream_txt.write(" "+aero_file_name+"\n")
presaero_stream_txt.write(" </fileNames>\n")
presaero_stream_txt.write(" <offset>\n")
presaero_stream_txt.write(" 0\n")
presaero_stream_txt.write(" </offset>\n")
presaero_stream_txt.write(" </fieldInfo>\n")
presaero_stream_txt.write("\n")
presaero_stream_txt.close()
def Write_drv_in(Def_PP, Model_Driver, Def_CESM_Multi_Instance,Ensemble_Number,num_processors,case_name,hostname,orb_iyear,start_type,username,
atm_cpl_dt,lnd_cpl_dt,ocn_cpl_dt,ice_cpl_dt,glc_cpl_dt, rof_cpl_dt, wav_cpl_dt,
end_restart,restart_option,start_tod,start_ymd,stop_tod,stop_ymd,ntasks_CLM,rootpe_CLM,nthreads_CLM):
drv_in_file = open("drv_in",'w')
drv_in_file.write("&seq_cplflds_inparm\n")
drv_in_file.write(" flds_co2_dmsa = .false.\n")
drv_in_file.write(" flds_co2a = .false.\n")
drv_in_file.write(" flds_co2b = .false.\n")
drv_in_file.write(" flds_co2c = .false.\n")
drv_in_file.write(" glc_nec = 0\n")
drv_in_file.write("/\n")
drv_in_file.write("&seq_cplflds_userspec\n")
drv_in_file.write(" cplflds_custom = ''\n")
drv_in_file.write("/\n")
drv_in_file.write("&seq_infodata_inparm\n")
drv_in_file.write(" aoflux_grid = 'ocn'\n")
drv_in_file.write(" bfbflag = .false.\n")
drv_in_file.write(" brnch_retain_casename = .false.\n")
drv_in_file.write(" budget_ann = 1\n")
drv_in_file.write(" budget_daily = 0\n")
drv_in_file.write(" budget_inst = 0\n")
drv_in_file.write(" budget_ltann = 1\n")
drv_in_file.write(" budget_ltend = 0\n")
drv_in_file.write(" budget_month = 1\n")
drv_in_file.write(" case_desc = 'UNSET'\n")
drv_in_file.write(" case_name = "+case_name+"\n")
drv_in_file.write(" cpl_cdf64 = .true.\n")
drv_in_file.write(" cpl_decomp = 0\n")
drv_in_file.write(" do_budgets = .false.\n")
drv_in_file.write(" do_histinit = .false.\n")
drv_in_file.write(" drv_threading = .false.\n")
drv_in_file.write(" eps_aarea = 9.0e-07\n")
drv_in_file.write(" eps_agrid = 1.0e-12\n")
drv_in_file.write(" eps_amask = 1.0e-13\n")
drv_in_file.write(" eps_frac = 1.0e-02\n")
drv_in_file.write(" eps_oarea = 1.0e-01\n")
drv_in_file.write(" eps_ogrid = 1.0e-02\n")
drv_in_file.write(" eps_omask = 1.0e-06\n")
drv_in_file.write(" flux_albav = .false.\n")
drv_in_file.write(" flux_epbal = 'off'\n")
drv_in_file.write(" histaux_a2x = .false.\n")
drv_in_file.write(" histaux_a2x24hr = .false.\n")
drv_in_file.write(" histaux_a2x3hr = .false.\n")
drv_in_file.write(" histaux_a2x3hrp = .false.\n")
drv_in_file.write(" histaux_l2x = .false.\n")
drv_in_file.write(" histaux_r2x = .false.\n")
drv_in_file.write(" histaux_s2x1yr = .false.\n")
drv_in_file.write(" hostname = "+hostname+"\n")
drv_in_file.write(" info_debug = 1\n")
drv_in_file.write(" mct_usealltoall = .false.\n")
drv_in_file.write(" mct_usevector = .false.\n")
drv_in_file.write(" model_version = 'cesm1_2_1'\n")
drv_in_file.write(" ocean_tight_coupling = .false.\n")
drv_in_file.write(" orb_iyear = "+orb_iyear+"\n")
drv_in_file.write(" orb_iyear_align = "+orb_iyear+"\n")
drv_in_file.write(" orb_mode = 'fixed_year'\n")
drv_in_file.write(" run_barriers = .false.\n")
drv_in_file.write(" samegrid_al = .true.\n")
drv_in_file.write(" samegrid_ao = .false.\n")
drv_in_file.write(" samegrid_aw = .false.\n")
drv_in_file.write(" samegrid_ow = .false.\n")
drv_in_file.write(" samegrid_ro = .false.\n")
drv_in_file.write(" shr_map_dopole = .true.\n")
# 23/03/2016 dominik
#if start_ymd == "20100101":
drv_in_file.write(" start_type = 'continue'\n")
#else:
#drv_in_file.write(" start_type = "+start_type+"\n")
drv_in_file.write(" tchkpt_dir = './timing/checkpoints'\n")
drv_in_file.write(" timing_dir = './timing'\n")
drv_in_file.write(" username = "+username+"\n")
drv_in_file.write(" vect_map = 'cart3d'\n")
drv_in_file.write("/\n")
drv_in_file.write("&seq_timemgr_inparm\n")
drv_in_file.write(" atm_cpl_dt = "+str(atm_cpl_dt)+"\n")
drv_in_file.write(" calendar = 'GREGORIAN'\n")
#drv_in_file.write(" calendar = 'NO_LEAP'\n")
drv_in_file.write(" end_restart = "+end_restart+"\n")
drv_in_file.write(" glc_cpl_dt = "+str(glc_cpl_dt)+"\n")
drv_in_file.write(" histavg_n = -999\n")
drv_in_file.write(" histavg_option = 'never'\n")
drv_in_file.write(" histavg_ymd = -999\n")
drv_in_file.write(" history_n = -999\n")
drv_in_file.write(" history_option = 'never'\n")
drv_in_file.write(" history_ymd = -999\n")
drv_in_file.write(" ice_cpl_dt = "+str(ice_cpl_dt)+"\n")
drv_in_file.write(" lnd_cpl_dt = "+str(lnd_cpl_dt)+"\n")
drv_in_file.write(" ocn_cpl_dt = "+str(ocn_cpl_dt)+"\n")
drv_in_file.write(" rof_cpl_dt = "+str(rof_cpl_dt)+"\n")
drv_in_file.write(" start_tod = "+start_tod+"\n")
drv_in_file.write(" start_ymd = "+start_ymd+"\n")
drv_in_file.write(" stop_option = 'date'\n")
drv_in_file.write(" stop_tod = "+stop_tod+"\n")
drv_in_file.write(" stop_ymd = "+stop_ymd+"\n")
drv_in_file.write(" end_restart = "+end_restart+"\n")
drv_in_file.write(" tprof_n = -999\n")
drv_in_file.write(" tprof_option = 'never'\n")
drv_in_file.write(" tprof_ymd = -999\n")
drv_in_file.write(" wav_cpl_dt = "+str(wav_cpl_dt)+"\n")
drv_in_file.write("/\n")
drv_in_file.write("&ccsm_pes\n")
drv_in_file.write(" atm_layout = 'concurrent'\n")
drv_in_file.write(" atm_ntasks = "+str(int(ntasks_CLM[0]))+"\n")
drv_in_file.write(" atm_nthreads = "+str(int(nthreads_CLM))+"\n")
drv_in_file.write(" atm_pestride = 1\n")
drv_in_file.write(" atm_rootpe = "+str(int(rootpe_CLM[0]))+"\n")
drv_in_file.write(" lnd_layout = 'concurrent'\n")
drv_in_file.write(" lnd_ntasks = "+str(int(ntasks_CLM[1]))+"\n")
drv_in_file.write(" lnd_nthreads = "+str(int(nthreads_CLM))+"\n")
drv_in_file.write(" lnd_pestride = 1\n")
drv_in_file.write(" lnd_rootpe = "+str(int(rootpe_CLM[1]))+"\n")
drv_in_file.write(" cpl_ntasks = "+str(int(ntasks_CLM[2]))+"\n")
drv_in_file.write(" cpl_nthreads = "+str(int(nthreads_CLM))+"\n")
drv_in_file.write(" cpl_pestride = 1\n")
drv_in_file.write(" cpl_rootpe = "+str(int(rootpe_CLM[2]))+"\n")
drv_in_file.write(" glc_layout = 'concurrent'\n")
drv_in_file.write(" glc_ntasks = "+str(int(ntasks_CLM[3]))+"\n")
drv_in_file.write(" glc_nthreads = "+str(int(nthreads_CLM))+"\n")
drv_in_file.write(" glc_pestride = 1\n")
drv_in_file.write(" glc_rootpe = "+str(int(rootpe_CLM[3]))+"\n")
drv_in_file.write(" ice_layout = 'concurrent'\n")
drv_in_file.write(" ice_ntasks = "+str(int(ntasks_CLM[4]))+"\n")
drv_in_file.write(" ice_nthreads = "+str(int(nthreads_CLM))+"\n")
drv_in_file.write(" ice_pestride = 1\n")
drv_in_file.write(" ice_rootpe = "+str(int(rootpe_CLM[4]))+"\n")
drv_in_file.write(" ocn_layout = 'concurrent'\n")
drv_in_file.write(" ocn_ntasks = "+str(int(ntasks_CLM[5]))+"\n")
drv_in_file.write(" ocn_nthreads = "+str(int(nthreads_CLM))+"\n")
drv_in_file.write(" ocn_pestride = 1\n")
drv_in_file.write(" ocn_rootpe = "+str(int(rootpe_CLM[5]))+"\n")
drv_in_file.write(" rof_layout = 'concurrent'\n")
drv_in_file.write(" rof_ntasks = "+str(int(ntasks_CLM[6]))+"\n")
drv_in_file.write(" rof_nthreads = "+str(int(nthreads_CLM))+"\n")
drv_in_file.write(" rof_pestride = 1\n")
drv_in_file.write(" rof_rootpe = "+str(int(rootpe_CLM[6]))+"\n")
drv_in_file.write(" wav_layout = 'concurrent'\n")
drv_in_file.write(" wav_ntasks = "+str(int(ntasks_CLM[7]))+"\n")
drv_in_file.write(" wav_nthreads = "+str(int(nthreads_CLM))+"\n")
drv_in_file.write(" wav_pestride = 1\n")
drv_in_file.write(" wav_rootpe = "+str(int(rootpe_CLM[7]))+"\n")
drv_in_file.write("/\n")
drv_in_file.write("&prof_inparm\n")
drv_in_file.write(" profile_barrier = .false.\n")
drv_in_file.write(" profile_depth_limit = 12\n")
drv_in_file.write(" profile_detail_limit = 0\n")
drv_in_file.write(" profile_disable = .false.\n")
drv_in_file.write(" profile_global_stats = .false.\n")
drv_in_file.write(" profile_single_file = .false.\n")
if Def_PP == 2 or Def_CESM_Multi_Instance:
drv_in_file.write(" profile_timer = 1\n")
else:
drv_in_file.write(" profile_timer = 1\n")
drv_in_file.write("/\n")
drv_in_file.write("&pio_default_inparm\n")
drv_in_file.write(" pio_async_interface = .false.\n")
if Def_PP or Def_CESM_Multi_Instance:
drv_in_file.write(" pio_blocksize = -1\n")
drv_in_file.write(" pio_buffer_size_limit = -1\n")
drv_in_file.write(" pio_debug_level = 0\n")
drv_in_file.write(" pio_numiotasks = 1\n") # only 1 works for netcdf4c
drv_in_file.write(" pio_root = 1\n")
drv_in_file.write(" pio_stride = 1\n") # only 1 works for netcdf4c
drv_in_file.write(" pio_typename = 'netcdf4c'\n")
else:
drv_in_file.write(" pio_blocksize = -1\n")
drv_in_file.write(" pio_buffer_size_limit = -1\n")
drv_in_file.write(" pio_debug_level = 0\n")
drv_in_file.write(" pio_numiotasks = -1\n")
drv_in_file.write(" pio_root = 1\n")
drv_in_file.write(" pio_stride = -1\n")
drv_in_file.write(" pio_typename = 'netcdf'\n")
drv_in_file.write("/\n")
drv_in_file.close()
def Write_45_drv_flds_in(drv_flds_in_file_name, megan_factors_file_path, megan_factors_file_name):
drv_flds_in_file = open(drv_flds_in_file_name,'w')
drv_flds_in_file.write("&drydep_inparm\n")
drv_flds_in_file.write("/\n")
drv_flds_in_file.write("&megan_emis_nl")
drv_flds_in_file.write(" megan_factors_file = "+repr(megan_factors_file_path+megan_factors_file_name)+ "\n")
drv_flds_in_file.write(" megan_specifier = 'ISOP = isoprene', 'C10H16 = pinene_a + carene_3 + thujene_a', 'CH3OH = methanol', 'C2H5OH = ethanol',")
drv_flds_in_file.write(" 'CH2O = formaldehyde', 'CH3CHO = acetaldehyde', 'CH3COOH = acetic_acid', 'CH3COCH3 = acetone'")
drv_flds_in_file.write("/")
drv_flds_in_file.close()
def Write_rof_in(rof_in_file_name,frivinp_rtm_path,frivinp_rtm_name):
# rof_in_file = open(rof_in_file_name,'w')
# rof_in_file.write("&rtm_inparm\n")
# rof_in_file.write(" finidat_rtm = ' '\n")
# rof_in_file.write(" flood_mode = 'NULL'\n")
# rof_in_file.write(" frivinp_rtm = "+repr(frivinp_rtm_path+frivinp_rtm_name)+ "\n")
# rof_in_file.write(" ice_runoff = .true.\n")
# rof_in_file.write(" rtm_tstep = 10800\n")
# rof_in_file.write(" rtmhist_mfilt = 30\n")
# rof_in_file.write(" rtmhist_ndens = 2\n")
# rof_in_file.write(" rtmhist_nhtfrq = 0\n")
# rof_in_file.write("/\n")
# rof_in_file.close()
rof_in_file = open(rof_in_file_name,'w')
rof_in_file.write("&rtm_inparm\n")
rof_in_file.write(" rtm_effvel = 'ACTIVE'\n")
rof_in_file.write(" rtm_mode = 'NULL'\n")
rof_in_file.write("/\n")
rof_in_file.close()
def Write_lnd_in(Run_Dir, lnd_in_file_name,Model_Driver,dtime,rtm_nsteps,domain_file_lnd_path, domain_name, rdirc_name,fatmgrid_name,fatmlndfrc_name,fglcmask_name,finidat_name,\
flndtopo_name,fndepdat_name,fpftcon_name,frivinp_rtm_name,fsnowaging_name,fsnowoptics_name,fsurdat_name, popd_streams_name, light_streams_name,\
wrtdia,hist_nhtfrq,hist_mfilt,hist_crtinic,hist_dov2xy,hist_ndens,hist_type1d_pertape,hist_empty_htapes,hist_avgflag_pertape,hist_fincl1,hist_fexcl1,first_year):
lnd_in_file = open(lnd_in_file_name,'w')
lnd_in_file.write("&clm_inparm\n")
lnd_in_file.write(" albice = 0.60,0.40\n")
lnd_in_file.write(" co2_ppmv = 367.0\n")
lnd_in_file.write(" co2_type = 'constant'\n")
lnd_in_file.write(" create_crop_landunit = .false.\n")
lnd_in_file.write(" dtime = "+str(dtime)+"\n")
#lnd_in_file.write(" fatmgrid_name = "+fatmgrid_name+"\n")
lnd_in_file.write(" fatmlndfrc = "+repr(domain_file_lnd_path+fatmlndfrc_name)+"\n")
#lnd_in_file.write(" fglcmask_name = "+fglcmask_name+"\n")
#if finidat_name == "":
# lnd_in_file.write(" finidat = ''\n")
#else:
# lnd_in_file.write(" finidat = "+repr(Run_Dir+finidat_name)+"\n")
#lnd_in_file.write(" flndtopo = "+repr(Run_Dir+flndtopo_name)+"\n")
lnd_in_file.write(" fpftcon = "+repr(Run_Dir+fpftcon_name)+"\n")
lnd_in_file.write(" fsnowaging = "+repr(Run_Dir+fsnowaging_name)+"\n")
lnd_in_file.write(" fsnowoptics = "+repr(Run_Dir+fsnowoptics_name)+"\n")
lnd_in_file.write(" fsurdat = "+repr(Run_Dir+fsurdat_name)+"\n")
lnd_in_file.write(" maxpatch_glcmec = 0\n")
if Model_Driver == "CLM_45":
lnd_in_file.write(" more_vertlayers = .false.\n")
lnd_in_file.write(" nsegspc = 1\n") # default is 20, but for parflow it should be 1, then the decomposition is right to the row number
lnd_in_file.write(" hist_nhtfrq = " + str(hist_nhtfrq) + ", -24, -24" + "\n")
lnd_in_file.write(" hist_mfilt = " + str(hist_mfilt) + ", 30, 30" + "\n")
#lnd_in_file.write(" hist_crtinic = " + hist_crtinic + "\n")
# changed by dominik 06042016
#lnd_in_file.write(" hist_dov2xy = "+hist_dov2xy+"\n")
lnd_in_file.write("hist_dov2xy = .true., .true., .false.\n")
lnd_in_file.write("hist_type1d_pertape = 'PFTS', 'PFTS', 'PFTS'\n")
lnd_in_file.write(" hist_ndens = " + str(hist_ndens) + "\n")
#lnd_in_file.write(" hist_type1d_pertape = " + hist_type1d_pertape + "\n")
lnd_in_file.write(" hist_empty_htapes = " + hist_empty_htapes + "\n")
lnd_in_file.write(" hist_avgflag_pertape = " + hist_avgflag_pertape + ", 'A', 'A'" + "\n")
lnd_in_file.write(" hist_fincl1 = " + hist_fincl1 + "\n")
lnd_in_file.write(" hist_fexcl1 = " + hist_fexcl1 + "\n")
lnd_in_file.write(" outnc_large_files = .true.\n")
# 20-03.2016 dominik
lnd_in_file.write(" hist_fincl2 = 'H2OSOI'" + "\n")
# 06-04.2016 dominik
lnd_in_file.write(" hist_fincl3 = 'TLAI'" + "\n")
if Model_Driver == "CLM_BGC_SpinUp":
lnd_in_file.write(" spinup_state = 1\n")
else:
if Model_Driver == "CLM_BGC":
lnd_in_file.write(" spinup_state = 0\n")
if Model_Driver == "CLM_CN":
lnd_in_file.write(" suplnitro = 'PROG_CROP_ONLY'\n")
lnd_in_file.write(" urban_hac = 'ON'\n")
lnd_in_file.write(" urban_traffic = .false.\n")
lnd_in_file.write("/\n")
lnd_in_file.write("&ndepdyn_nml\n")
if Model_Driver == "CLM_CN" or Model_Driver == "cesm_ad_spinup":
lnd_in_file.write(" ndepmapalgo = 'bilinear'\n")
lnd_in_file.write(" stream_fldfilename_ndep = "+repr(Run_Dir+fndepdat_name)+"\n")
lnd_in_file.write(" stream_year_first_ndep = 1850\n")
lnd_in_file.write(" stream_year_last_ndep = 2005\n")
lnd_in_file.write("/\n")
lnd_in_file.write("&popd_streams\n")
if Model_Driver == "CLM_CN" or Model_Driver == "cesm_ad_spinup":
lnd_in_file.write(" popdensmapalgo = 'bilinear'\n")
lnd_in_file.write(" stream_fldfilename_popdens = "+repr(Run_Dir+popd_streams_name)+"\n")
lnd_in_file.write(" stream_year_first_popdens = 1850\n")
lnd_in_file.write(" stream_year_last_popdens = 2010\n")
lnd_in_file.write("/\n")
lnd_in_file.write("&light_streams\n")
if Model_Driver == "CLM_CN" or Model_Driver == "cesm_ad_spinup":
lnd_in_file.write(" lightngmapalgo = 'bilinear'\n")
lnd_in_file.write(" stream_fldfilename_lightng = "+repr(Run_Dir+light_streams_name)+"\n")
lnd_in_file.write(" stream_year_first_lightng = 0001\n")
lnd_in_file.write(" stream_year_last_lightng = 0001\n")
lnd_in_file.write("/\n")
lnd_in_file.write("&clm_hydrology1_inparm\n")
lnd_in_file.write(" oldfflag = 0\n") #Use old snow cover fraction from Niu et al. 2007
lnd_in_file.write("/\n")
lnd_in_file.write("&clm_soilhydrology_inparm\n")
lnd_in_file.write(" h2osfcflag = 0\n") #If surface water is active or not
lnd_in_file.write(" origflag = 0\n") #Use original CLM4 soil hydraulic properties
lnd_in_file.write("/\n")
if Model_Driver == "CLM_CN" or Model_Driver == "cesm_ad_spinup":
lnd_in_file.write(" &ch4par_in\n")
lnd_in_file.write(" fin_use_fsat = .true.\n")
lnd_in_file.write(" /\n")
lnd_in_file.write("#!--------------------------------------------------------------------------------------------------------------------------\n")
lnd_in_file.write("#! lnd_in:: Comment:\n")
lnd_in_file.write("#! This namelist was created using the following command-line:\n")
lnd_in_file.write("#! /lustrefs/lzhpc84/Library/cesm1_2_0/models/lnd/clm/bld/CLM build-namelist -infile /lustrefs/lzhpc84/Library/cesm1_2_0/scripts/sp_clm_ens_2/Buildconf/clmconf/cesm_namelist -csmdata /lustrefs/lzhpc84/DAS_Data/SysModel/CLM/inputdata -inputdata /lustrefs/lzhpc84/Library/cesm1_2_0/scripts/sp_clm_ens_2/Buildconf/clm.input_data_list -ignore_ic_year -namelist &clm_inparm start_ymd = 00010101 / -use_case 2000_control -res 1.9x2.5 -clm_start_type startup -clm_startfile I2000CN_f19_g16_c100503.clm2.r.0001-01-01-00000.nc -l_ncpl 48 -lnd_frac /lustrefs/lzhpc84/DAS_Data/SysModel/CLM/inputdata/share/domains/domain.lnd.fv1.9x2.5_gx1v6.090206.nc -glc_nec 0 -co2_ppmv 367.0 -co2_type constant -config /lustrefs/lzhpc84/Library/cesm1_2_0/scripts/sp_clm_ens_2/Buildconf/clmconf/config_cache.xml\n")
lnd_in_file.write("#! For help on options use: /lustrefs/lzhpc84/Library/cesm1_2_0/models/lnd/clm/bld/CLM build-namelist -help\n")
lnd_in_file.write("#!--------------------------------------------------------------------------------------------------------------------------\n")
lnd_in_file.close()
def Write_Config_Files(datm_in_file_name,datm_atm_in_file_name,atm_modelio_file_name,cpl_modelio_file_name,glc_modelio_file_name,ice_modelio_file_name,lnd_modelio_file_name,ocn_modelio_file_name,rof_modelio_file_name,
wav_modelio_file_name, logfile_atm, logfile_cpl, logfile_glc, logfile_ice, logfile_lnd, logfile_ocn, logfile_rof, logfile_wav, Run_Dir):
if not os.path.exists("timing/checkpoints"):
os.makedirs("timing/checkpoints")
datm_in = open(datm_in_file_name,'w')
datm_in.write("&datm_nml\n")
datm_in.write(" atm_in = "+repr(datm_atm_in_file_name)+"\n")
datm_in.write(" decomp = '1d'\n")
datm_in.write(" iradsw = 1\n")
datm_in.write(" presaero = .true.\n")
datm_in.write(" restfilm = 'undefined'\n")
datm_in.write(" restfils = 'undefined'\n")
datm_in.write(" /\n")
datm_in.close()
atm_modelio = open(atm_modelio_file_name,'w')
atm_modelio.write('&modelio\n')
atm_modelio.write(' diri = "."\n')
atm_modelio.write(' diro = '+repr(Run_Dir)+'\n')
atm_modelio.write(' logfile = '+repr(logfile_atm)+'\n')
atm_modelio.write('/\n')
atm_modelio.write('&pio_inparm\n')
atm_modelio.write(' pio_numiotasks = -99\n')
atm_modelio.write(' pio_root = -99\n')
atm_modelio.write(' pio_stride = -99\n')
atm_modelio.write(' pio_typename = "nothing"\n')
atm_modelio.write('/\n')
atm_modelio.close()
cpl_modelio = open(cpl_modelio_file_name,'w')
cpl_modelio.write('&modelio\n')
cpl_modelio.write(' diri = "."\n')
cpl_modelio.write(' diro = '+repr(Run_Dir)+'\n')
cpl_modelio.write(' logfile = '+repr(logfile_cpl)+'\n')
cpl_modelio.write('/\n')
cpl_modelio.write('&pio_inparm\n')
cpl_modelio.write(' pio_numiotasks = -99\n')
cpl_modelio.write(' pio_root = -99\n')
cpl_modelio.write(' pio_stride = -99\n')
cpl_modelio.write(' pio_typename = "nothing"\n')
cpl_modelio.write('/\n')
cpl_modelio.close()
glc_modelio = open(glc_modelio_file_name,'w')
glc_modelio.write('&modelio\n')
glc_modelio.write(' diri = "."\n')
glc_modelio.write(' diro = '+repr(Run_Dir)+'\n')
glc_modelio.write(' logfile = '+repr(logfile_glc)+'\n')
glc_modelio.write('/\n')
glc_modelio.write('&pio_inparm\n')
glc_modelio.write(' pio_numiotasks = -99\n')
glc_modelio.write(' pio_root = -99\n')
glc_modelio.write(' pio_stride = -99\n')
glc_modelio.write(' pio_typename = "nothing"\n')
glc_modelio.write('/\n')
glc_modelio.close()
ice_modelio = open(ice_modelio_file_name,'w')
ice_modelio.write('&modelio\n')
ice_modelio.write(' diri = "."\n')
ice_modelio.write(' diro = '+repr(Run_Dir)+'\n')
ice_modelio.write(' logfile = '+repr(logfile_ice)+'\n')
ice_modelio.write('/\n')
ice_modelio.write('&pio_inparm\n')
ice_modelio.write(' pio_numiotasks = -99\n')
ice_modelio.write(' pio_root = -99\n')
ice_modelio.write(' pio_stride = -99\n')
ice_modelio.write(' pio_typename = "nothing"\n')
ice_modelio.write('/\n')
ice_modelio.close()
lnd_modelio = open(lnd_modelio_file_name,'w')
lnd_modelio.write('&modelio\n')
lnd_modelio.write(' diri = "."\n')
lnd_modelio.write(' diro = '+repr(Run_Dir)+'\n')
lnd_modelio.write(' logfile = '+repr(logfile_lnd)+'\n')
lnd_modelio.write('/\n')
lnd_modelio.write('&pio_inparm\n')
lnd_modelio.write(' pio_numiotasks = -99\n')
lnd_modelio.write(' pio_root = -99\n')
lnd_modelio.write(' pio_stride = -99\n')
lnd_modelio.write(' pio_typename = "nothing"\n')
lnd_modelio.write('/\n')
lnd_modelio.close()
ocn_modelio = open(ocn_modelio_file_name,'w')
ocn_modelio.write('&modelio\n')
ocn_modelio.write(' diri = "."\n')
ocn_modelio.write(' diro = '+repr(Run_Dir)+'\n')
ocn_modelio.write(' logfile = '+repr(logfile_ocn)+'\n')
ocn_modelio.write('/\n')
ocn_modelio.write('&pio_inparm\n')
ocn_modelio.write(' pio_numiotasks = -99\n')
ocn_modelio.write(' pio_root = 0\n')
ocn_modelio.write(' pio_stride = -99\n')
ocn_modelio.write(' pio_typename = "nothing"\n')
ocn_modelio.write('/\n')
ocn_modelio.close()
rof_modelio = open(rof_modelio_file_name,'w')
rof_modelio.write('&modelio\n')
rof_modelio.write(' diri = "."\n')
rof_modelio.write(' diro = '+repr(Run_Dir)+'\n')
rof_modelio.write(' logfile = '+repr(logfile_rof)+'\n')
rof_modelio.write('/\n')
rof_modelio.write('&pio_inparm\n')
rof_modelio.write(' pio_numiotasks = -99\n')
rof_modelio.write(' pio_root = -99\n')
rof_modelio.write(' pio_stride = -99\n')
rof_modelio.write(' pio_typename = "nothing"\n')
rof_modelio.write('/\n')
rof_modelio.close()
wav_modelio = open(wav_modelio_file_name,'w')
wav_modelio.write('&modelio\n')
wav_modelio.write(' diri = "."\n')
wav_modelio.write(' diro = '+repr(Run_Dir)+'\n')
wav_modelio.write(' logfile = '+repr(logfile_wav)+'\n')
wav_modelio.write('/\n')
wav_modelio.write('&pio_inparm\n')
wav_modelio.write(' pio_numiotasks = -99\n')
wav_modelio.write(' pio_root = -99\n')
wav_modelio.write(' pio_stride = -99\n')
wav_modelio.write(' pio_typename = "nothing"\n')
wav_modelio.write('/\n')
wav_modelio.close()
def Call_CLM_3D(Def_First_Run,Def_CESM_Multi_Instance, Run_Dir_Home, Run_Dir_Multi_Instance, Run_Dir, Run_Dir_Array, Ensemble_Number, num_processors, DasPy_Path, Model_Path, Model_Driver, Def_SpinUp, Def_PP, Def_Print,align_year,first_year,last_year,\
domain_file_path,Forcing_File_Path, Forcing_File_Path_Array, domain_file_lnd_path, domain_name, rdirc_name, aero_file_path,aero_file_name, megan_factors_file_path, megan_factors_file_name,frivinp_rtm_path,frivinp_rtm_name, \
case_name,hostname,orb_iyear_ad,start_type,username,
atm_cpl_dt,lnd_cpl_dt,ocn_cpl_dt,ice_cpl_dt,glc_cpl_dt,rof_cpl_dt, wav_cpl_dt,
end_restart,restart_option,start_tod,start_ymd,stop_tod,stop_ymd,ntasks_CLM,rootpe_CLM,nthreads_CLM,\
dtime,rtm_nsteps,fatmgrid_name,fatmlndfrc_name,fglcmask_name, finidat_name, flndtopo_name,fndepdat_name,fpftcon_name,fsnowaging_name,fsnowoptics_name,fsurdat_name, popd_streams_name, light_streams_name,\
wrtdia,hist_nhtfrq,hist_mfilt,hist_crtinic,hist_dov2xy,hist_ndens,hist_type1d_pertape,hist_empty_htapes,hist_avgflag_pertape, hist_fincl1,hist_fexcl1,\
Region_Name, Stop_Year, Stop_Month, Stop_Day, stop_tod_string, seq_maps_file_name, Row_Numbers_String, Col_Numbers_String, DAS_Data_Path,
COUP_OAS_PFL, CESM_Init_Flag, fcomm, fcomm_null, fcomm_rank):
if Def_Print:
print "fcomm_rank",fcomm_rank
print "Run_Dir",Run_Dir
if Def_CESM_Multi_Instance == 1:
history_file_name = Run_Dir_Multi_Instance + Region_Name + '.clm2_0001' + '.h0.' + Stop_Year + '-' + Stop_Month + '-' + Stop_Day + '-' + stop_tod_string + '.nc'
if Ensemble_Number == 1:
os.chdir(Run_Dir_Home)
Run_Dir = Run_Dir_Home
datm_atm_in_file_name = "datm_atm_in"
datm_streams_txt_file_name_rad = "datm.streams.rad.txt"
datm_streams_txt_file_name_prec = "datm.streams.prec.txt"
datm_streams_txt_file_name_tair = "datm.streams.tair.txt"
presaero_stream_txt_file_name = "presaero.stream.txt"
if fcomm_rank == 0:
Write_datm_atm_in(datm_streams_txt_file_name_rad, datm_streams_txt_file_name_precip, datm_streams_txt_file_name_tair, presaero_stream_txt_file_name, domain_file_path,domain_name, rdirc_name,align_year,first_year,last_year)
Forcing_File_Path = Forcing_File_Path_Array[0]
Write_datm_streams_txt_rad(datm_streams_txt_file_name_rad, Def_SpinUp, domain_file_path,domain_name, rdirc_name, Forcing_File_Path, start_ymd, stop_ymd)
Write_datm_streams_txt_prec(datm_streams_txt_file_name_prec, Def_SpinUp, domain_file_path,domain_name, rdirc_name, Forcing_File_Path, start_ymd, stop_ymd)
Write_datm_streams_txt_tair(datm_streams_txt_file_name_tair, Def_SpinUp, domain_file_path,domain_name, rdirc_name, Forcing_File_Path, start_ymd, stop_ymd)
Write_presaero_stream_txt(presaero_stream_txt_file_name,aero_file_path,aero_file_name)
lnd_in_file_name = "lnd_in"
Write_lnd_in(Run_Dir, lnd_in_file_name,Model_Driver,dtime,rtm_nsteps,domain_file_lnd_path, domain_name, rdirc_name, fatmgrid_name,fatmlndfrc_name,fglcmask_name,finidat_name,\
flndtopo_name,fndepdat_name,fpftcon_name,frivinp_rtm_name,fsnowaging_name,fsnowoptics_name,fsurdat_name, popd_streams_name, light_streams_name,\
wrtdia,hist_nhtfrq,hist_mfilt,hist_crtinic,hist_dov2xy,hist_ndens,hist_type1d_pertape,hist_empty_htapes,hist_avgflag_pertape,hist_fincl1,hist_fexcl1,first_year)
rof_in_file_name = "rof_in"
Write_rof_in(rof_in_file_name,frivinp_rtm_path,frivinp_rtm_name)
drv_flds_in_file_name = "drv_flds_in"
Write_45_drv_flds_in(drv_flds_in_file_name, megan_factors_file_path, megan_factors_file_name)
if Def_First_Run:
datm_in_file_name = "datm_in"
atm_modelio_file_name = "atm_modelio.nml"
cpl_modelio_file_name = "cpl_modelio.nml"
glc_modelio_file_name = "glc_modelio.nml"
ice_modelio_file_name = "ice_modelio.nml"
lnd_modelio_file_name = "lnd_modelio.nml"
ocn_modelio_file_name = "ocn_modelio.nml"
rof_modelio_file_name = "rof_modelio.nml"
wav_modelio_file_name = "wav_modelio.nml"
logfile_atm = "atm.log"
logfile_cpl = "cpl.log"
logfile_glc = "glc.log"
logfile_ice = "ice.log"
logfile_lnd = "lnd.log"
logfile_ocn = "ocn.log"
logfile_rof = "rof.log"
logfile_wav = "wav.log"
Run_Dir = Run_Dir_Home
Write_Config_Files(datm_in_file_name,datm_atm_in_file_name,atm_modelio_file_name,cpl_modelio_file_name,glc_modelio_file_name,ice_modelio_file_name,lnd_modelio_file_name,ocn_modelio_file_name,rof_modelio_file_name,
wav_modelio_file_name, logfile_atm, logfile_cpl, logfile_glc, logfile_ice, logfile_lnd, logfile_ocn, logfile_rof, logfile_wav, Run_Dir)
elif Ensemble_Number > 1:
os.chdir(Run_Dir_Multi_Instance)
if fcomm_rank == 0:
for Ens_Index in range(Ensemble_Number):
Run_Dir = Run_Dir_Array[Ens_Index]
Ens_Index_String = str(Ens_Index+1).zfill(4)
datm_atm_in_file_name = "datm_atm_in_"+Ens_Index_String
datm_streams_txt_file_name_rad = "datm.streams.rad.txt_"+Ens_Index_String
datm_streams_txt_file_name_prec = "datm.streams.prec.txt_"+Ens_Index_String
datm_streams_txt_file_name_tair = "datm.streams.tair.txt_"+Ens_Index_String
presaero_stream_txt_file_name = "presaero.stream.txt_"+Ens_Index_String
Write_datm_atm_in(datm_atm_in_file_name, datm_streams_txt_file_name_rad, datm_streams_txt_file_name_prec, datm_streams_txt_file_name_tair, presaero_stream_txt_file_name, domain_file_path,domain_name, rdirc_name,align_year,first_year,last_year)
Forcing_File_Path = Forcing_File_Path_Array[Ens_Index]
Write_datm_streams_txt_rad(datm_streams_txt_file_name_rad, Def_SpinUp, domain_file_path,domain_name, rdirc_name, Forcing_File_Path, start_ymd, stop_ymd)
Write_datm_streams_txt_prec(datm_streams_txt_file_name_prec, Def_SpinUp, domain_file_path,domain_name, rdirc_name, Forcing_File_Path, start_ymd, stop_ymd)
erite_datm_streams_txt_tair(datm_streams_txt_file_name_tair, Def_SpinUp, domain_file_path,domain_name, rdirc_name, Forcing_File_Path, start_ymd, stop_ymd)
Write_presaero_stream_txt(presaero_stream_txt_file_name,aero_file_path,aero_file_name)
lnd_in_file_name = "lnd_in_"+Ens_Index_String
Write_lnd_in(Run_Dir, lnd_in_file_name,Model_Driver,dtime,rtm_nsteps,domain_file_lnd_path, domain_name, rdirc_name, fatmgrid_name,fatmlndfrc_name,fglcmask_name,finidat_name,\
flndtopo_name,fndepdat_name,fpftcon_name,frivinp_rtm_name,fsnowaging_name,fsnowoptics_name,fsurdat_name, popd_streams_name, light_streams_name,\
wrtdia,hist_nhtfrq,hist_mfilt,hist_crtinic,hist_dov2xy,hist_ndens,hist_type1d_pertape,hist_empty_htapes,hist_avgflag_pertape,hist_fincl1,hist_fexcl1,first_year)
rof_in_file_name = "rof_in_"+Ens_Index_String
Write_rof_in(rof_in_file_name,frivinp_rtm_path,frivinp_rtm_name)
if Def_First_Run:
datm_in_file_name = "datm_in_"+Ens_Index_String
atm_modelio_file_name = "atm_modelio.nml_"+Ens_Index_String
cpl_modelio_file_name = "cpl_modelio.nml"
glc_modelio_file_name = "glc_modelio.nml_"+Ens_Index_String
ice_modelio_file_name = "ice_modelio.nml_"+Ens_Index_String
lnd_modelio_file_name = "lnd_modelio.nml_"+Ens_Index_String
ocn_modelio_file_name = "ocn_modelio.nml_"+Ens_Index_String
rof_modelio_file_name = "rof_modelio.nml_"+Ens_Index_String
wav_modelio_file_name = "wav_modelio.nml"
logfile_atm = "atm.log"
logfile_cpl = "cpl.log"
logfile_glc = "glc.log"
logfile_ice = "ice.log"
logfile_lnd = "lnd.log"
logfile_ocn = "ocn.log"
logfile_rof = "rof.log"
logfile_wav = "wav.log"
Write_Config_Files(datm_in_file_name,datm_atm_in_file_name,atm_modelio_file_name,cpl_modelio_file_name,glc_modelio_file_name,ice_modelio_file_name,lnd_modelio_file_name,ocn_modelio_file_name,rof_modelio_file_name,
wav_modelio_file_name, logfile_atm, logfile_cpl, logfile_glc, logfile_ice, logfile_lnd, logfile_ocn, logfile_rof, logfile_wav, Run_Dir)
else:
history_file_name = Run_Dir + Region_Name + '.clm2' + '.h0.' + Stop_Year + '-' + Stop_Month + '-' + Stop_Day + '-' + stop_tod_string + '.nc'
os.chdir(Run_Dir)
datm_atm_in_file_name = "datm_atm_in"
datm_streams_txt_file_name_rad = "datm.streams.rad.txt"
datm_streams_txt_file_name_prec = "datm.streams.prec.txt"
datm_streams_txt_file_name_tair = "datm.streams.tair.txt"
presaero_stream_txt_file_name = "presaero.stream.txt"
if fcomm_rank == 0:
Write_datm_atm_in(datm_atm_in_file_name, datm_streams_txt_file_name_rad, datm_streams_txt_file_name_prec, datm_streams_txt_file_name_tair, presaero_stream_txt_file_name, domain_file_path,domain_name, rdirc_name,align_year,first_year,last_year)
Write_datm_streams_txt_rad(datm_streams_txt_file_name_rad, Def_SpinUp, domain_file_path,domain_name, rdirc_name, Forcing_File_Path, start_ymd, stop_ymd)
Write_datm_streams_txt_prec(datm_streams_txt_file_name_prec, Def_SpinUp, domain_file_path,domain_name, rdirc_name, Forcing_File_Path, start_ymd, stop_ymd)
Write_datm_streams_txt_tair(datm_streams_txt_file_name_tair, Def_SpinUp, domain_file_path,domain_name, rdirc_name, Forcing_File_Path, start_ymd, stop_ymd)
Write_presaero_stream_txt(presaero_stream_txt_file_name,aero_file_path,aero_file_name)
lnd_in_file_name = "lnd_in"
Write_lnd_in(Run_Dir, lnd_in_file_name,Model_Driver,dtime,rtm_nsteps,domain_file_lnd_path, domain_name, rdirc_name, fatmgrid_name,fatmlndfrc_name,fglcmask_name,finidat_name,\
flndtopo_name,fndepdat_name,fpftcon_name,frivinp_rtm_name,fsnowaging_name,fsnowoptics_name,fsurdat_name, popd_streams_name, light_streams_name,\
wrtdia,hist_nhtfrq,hist_mfilt,hist_crtinic,hist_dov2xy,hist_ndens,hist_type1d_pertape,hist_empty_htapes,hist_avgflag_pertape,hist_fincl1,hist_fexcl1,first_year)
if Def_First_Run:
rof_in_file_name = "rof_in"
Write_rof_in(rof_in_file_name,frivinp_rtm_path,frivinp_rtm_name)
datm_in_file_name = "datm_in"
atm_modelio_file_name = "atm_modelio.nml"
cpl_modelio_file_name = "cpl_modelio.nml"
glc_modelio_file_name = "glc_modelio.nml"
ice_modelio_file_name = "ice_modelio.nml"
lnd_modelio_file_name = "lnd_modelio.nml"
ocn_modelio_file_name = "ocn_modelio.nml"
rof_modelio_file_name = "rof_modelio.nml"
wav_modelio_file_name = "wav_modelio.nml"
logfile_atm = "atm.log"
logfile_cpl = "cpl.log"
logfile_glc = "glc.log"
logfile_ice = "ice.log"
logfile_lnd = "lnd.log"
logfile_ocn = "ocn.log"
logfile_rof = "rof.log"
logfile_wav = "wav.log"
Write_Config_Files(datm_in_file_name,datm_atm_in_file_name,atm_modelio_file_name,cpl_modelio_file_name,glc_modelio_file_name,ice_modelio_file_name,lnd_modelio_file_name,ocn_modelio_file_name,rof_modelio_file_name,
wav_modelio_file_name, logfile_atm, logfile_cpl, logfile_glc, logfile_ice, logfile_lnd, logfile_ocn, logfile_rof, logfile_wav, Run_Dir)
if Def_First_Run:
Write_seq_maps(seq_maps_file_name, DAS_Data_Path, Row_Numbers_String, Col_Numbers_String, Region_Name)
if fcomm_rank == 0:
Write_drv_in(Def_PP, Model_Driver,Def_CESM_Multi_Instance,Ensemble_Number,num_processors,case_name,hostname,orb_iyear_ad,start_type,username,
atm_cpl_dt,lnd_cpl_dt,ocn_cpl_dt,ice_cpl_dt,glc_cpl_dt,rof_cpl_dt, wav_cpl_dt,
end_restart,restart_option,start_tod,start_ymd,stop_tod,stop_ymd,ntasks_CLM,rootpe_CLM,nthreads_CLM)
if os.path.exists(history_file_name):
os.remove(history_file_name)
if Def_PP == 2:
fcomm.barrier()
fcomm.Barrier()
CLM_Output = open("CLM_Output.txt","w")
subprocess.call(Model_Path, stdout=CLM_Output, stderr=CLM_Output, shell=True)
CLM_Output.close()
os.chdir(DasPy_Path)
return
|
import os
import os.path as osp
import mmcv
from glob import glob
from annotation_loader import parse_tables_from_xml
img_exts = [".bmp", ".jpg", ".jpeg", ".png", ".tiff"]
#https://mmdetection.readthedocs.io/en/latest/2_new_data_model.html?highlight=coco#coco-annotation-format
#or https://github.com/open-mmlab/mmdetection/tree/master/tools/convert_datasets
def convert_icdar2019_to_coco(ann_files_path, out_file, img_prefix):
cat2label = {k: i for i, k in enumerate(['table', 'cell'])}
annotations = []
images = []
obj_count = 0
image_list = [osp.basename(fn)
for fn in glob(osp.join(img_prefix, '*.*'))
if osp.splitext(fn.lower())[1] in img_exts]
for idx, image_fn in enumerate(mmcv.track_iter_progress(image_list)):
image_id = osp.splitext(osp.basename(image_fn))[0]
#filename = f'{image_id}.jpg' # TODO check this vs img_path
filename = image_fn
img_path = osp.join(img_prefix, filename)
height, width = mmcv.imread(img_path).shape[:2]
images.append(dict(
id=idx,
file_name=filename,
height=height,
width=width))
# load annotations
xml_path = f'{ann_files_path}/{image_id}.xml'
tables = parse_tables_from_xml(xml_path)
for table in tables:
bbox = table.bbox
area = (bbox[2]) * (bbox[3])
poly = table.bounds
data_anno = dict(
image_id=idx,
id=obj_count,
category_id=cat2label['table'],
bbox=bbox,
area=area,
segmentation=[poly],
iscrowd=0)
annotations.append(data_anno)
obj_count += 1
coco_format_json = dict(
images=images,
annotations=annotations,
categories=[{'id':cat2label[label], 'name': label}
for label in cat2label])
os.makedirs(osp.dirname(out_file), exist_ok=True)
mmcv.dump(coco_format_json, out_file)
def restructure_ICDAR2019_dataset(root, out_dir, track="TRACKA", year="2014"):
"""
"""
# start with training
train_ann_files_path = osp.join(root, "training", track, "ground_truth")
print(train_ann_files_path)
train_img_prefix = osp.join(root, "training", track, "ground_truth")
print(train_img_prefix)
train_out_annotations = osp.join(out_dir, "annotations", f"instances_train{year}.json")
print(train_out_annotations)
print("Converting train annotations...")
convert_icdar2019_to_coco(ann_files_path=train_ann_files_path,
out_file=train_out_annotations,
img_prefix=train_img_prefix)
print("Moving train images...")
train_img_dest = osp.join(out_dir, f"train{year}")
os.makedirs(train_img_dest, exist_ok=True)
train_img_list = [fn for fn in glob(osp.join(train_img_prefix, '*.*')) if osp.splitext(fn.lower())[1] in img_exts]
for fn in mmcv.track_iter_progress(train_img_list):
os.rename(fn, osp.join(train_img_dest, osp.basename(fn)))
# val
val_ann_files_path = osp.join(root, "test_ground_truth", track)
print(val_ann_files_path)
val_img_prefix = osp.join(root, "test", track)
print(val_img_prefix)
val_out_annotations = osp.join(out_dir, "annotations", f"instances_val{year}.json")
print(val_out_annotations)
print("Converting val annotations...")
convert_icdar2019_to_coco(ann_files_path=val_ann_files_path,
out_file=val_out_annotations,
img_prefix=val_img_prefix)
print("Moving val images...")
val_img_dest = osp.join(out_dir, f"val{year}")
os.makedirs(val_img_dest, exist_ok=True)
val_img_list = [fn for fn in glob(osp.join(val_img_prefix, '*.*')) if osp.splitext(fn.lower())[1] in img_exts]
for fn in mmcv.track_iter_progress(val_img_list):
os.rename(fn, osp.join(val_img_dest, osp.basename(fn)))
os.makedirs(osp.join(out_dir, "logs"), exist_ok=True)
|
# -*- coding: utf-8 -*-
"""
Created on Tue Sep 17 10:55:08 2019
@author: Vipin
"""
from sklearn.ensemble import RandomForestClassifier
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score,confusion_matrix,f1_score ,precision_score
import seaborn as sns
fnames=['ddos','wednesday','infiltration','portscan','tuesday','webattack','friday']
features_imp=pd.DataFrame(columns=['Features','ddos','wednesday','infiltration','portscan','tuesday','webattack','friday'])
#fnames=['webattack']
output_values=pd.DataFrame(columns=['Features','ddos','wednesday','infiltration','portscan','tuesday','webattack','friday'])
for fname in fnames:
data_total=pd.read_csv("Data/"+fname+".csv",sep=",")
data_total.columns=data_total.columns.str.strip().str.replace(' ','_').str.replace("/s",'_per_sec').str.lower()
data_total["flow_bytes_per_sec"]=data_total.flow_bytes_per_sec.astype(float)
data_total["flow_packets_per_sec"]=data_total.flow_packets_per_sec.astype(float)
data_total=data_total.replace(np.inf,np.nan)
data_total=data_total.dropna()
#Y=data_total['label']
#Y=numeric.fit_transform(data_total['label'].astype('str'))
data_attack=data_total[data_total.label!='BENIGN']
data_normal=data_total[data_total.label=='BENIGN']
label=data_total['label']
Y,labels=label.factorize(sort=True)
data_total=data_total.drop('label',axis=1)
if(flag==1):
features_imp['Features']=data_total.columns.values
output_values['Features']=['Precision','Accuracy','F1_score']
flag=0
seed=123
Xtrain,Xtest,Ytrain,Ytest=train_test_split(data_total,Y,test_size=0.2,random_state=seed)
model=RandomForestClassifier(n_estimators=10,random_state=seed)
model.fit(Xtest,Ytest)
feature_imp=model.feature_importances_
features_imp[fname]=feature_imp
#print(clf.feature_importances_)
Ypred=model.predict(Xtest)
conf_mat=confusion_matrix(Ytest,Ypred)
plt.figure(figsize=(10, 10))
sns.heatmap(conf_mat, xticklabels=labels, yticklabels=labels, annot=True, fmt="d");
plt.title("Confusion matrix "+fname+" attacks")
plt.ylabel('True class')
plt.xlabel('Predicted class')
precision=precision_score(Ytest,Ypred,average='micro')
print("Precision is ",precision)
f1=f1_score(Ytest,Ypred,average='micro')
print("F1 Score ",f1)
accuracy=accuracy_score(Ytest,Ypred)
print("Accuracy is ",accuracy)
output=[precision,accuracy,f1]
output_values[fname]=output # Individual precision,accuracy and f1 score stored here
#pickle.dump(model,open('portscan_rf.sav','wb'))
#plt.show()
plt.savefig(fname+"_rf.png")
|
# Generated by Django 3.0.1 on 2020-01-04 13:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tasks', '0029_auto_20200103_1357'),
]
operations = [
migrations.AddField(
model_name='timeslot',
name='associated_type',
field=models.IntegerField(choices=[(0, 'task'), (1, 'event'), (2, 'routine')], default=0, verbose_name='type'),
preserve_default=False,
),
]
|
from alarm import alarm
from config import data
data = data.Data()
def priority():
alarm_string = str(data.alarm)
if data.keyword_3a in alarm_string:
alarm_new = data.keyword_3a_flag + alarm_string
else:
alarm_new = alarm_string
alarm.write_alarm(alarm_new)
|
def calculate():
score.append(max(parts))
pos=parts.index(max(parts))
parts[pos]=-99
for i in range(N//2):
#End for
if(N%2==1):
score.append(parts[0])
T=int(input())
for i in range(T):
N=int(input())
parts=[0]*N
string=input()
for k in range(N):
parts[k]=int(string[k])
score=[0]
calculate()
result=sum(score)
print("Case #",i+1,": ",result)
#End for
|
"""
2 usages:
a. To smooth noisy detection results
b. To accelerate the whole procedure by merely using predict with some frames
rather than detecting per frame
"""
import cv2
import numpy as np
import math
from config import Configs
from video_helper import VideoHelper
from tracker import kcftracker
# First Trial:
# 6 element in a state: [xc, yc, vx, vy, w, h]
# 4 element in a measure: [zxc, zyc, zw, zh]
class KcfFilter(object):
def __init__(self, video_helper, frame):
self.first_run = True
self.dynamParamsSize = 6
self.measureParamsSize = 4
self.kcf = kcftracker.KCFTracker(True, True, True)
def correct(self, bbx, frame):
# need to be numpy array
# measurement is numpy array
# bbx: x_left, x_right, y_up, y_bottom
# need to convert to [xc, yc, w, h] first
w = bbx[1] - bbx[0] + 1
h = bbx[3] - bbx[2] + 1
xc = int(bbx[0] + w / 2)
yc = int(bbx[2] + h / 2)
measurement = np.array([[xc, yc, w, h]], dtype = np.float32).T
if self.first_run is True:
self.kcf.init(measurement, frame) #################
# statePre = np.array(
# [measurement[0], measurement[1], [0], [0], measurement[2], measurement[3]],
# dtype = np.float32
# )
##self.first_run = False #每次检测都重新初始化框
corrected_res = self.kcf.update(frame) ### correct(measurement).T[0]
self.velocity = np.array([corrected_res[2], corrected_res[3]])
# convert back to bbx form: x_left, x_right, y_up, y_bottom
corrected_bbx = self.get_bbx_from_kcf_form(corrected_res)
return corrected_bbx
def get_predicted_bbx(self, frame):
predicted_res = self.kcf.update(frame)#.T[0]
predicted_bbx = self.get_bbx_from_kcf_form(predicted_res)
return predicted_bbx
def get_bbx_from_kcf_form(self, kcf_form):
xc = kcf_form[0]
yc = kcf_form[1]
w = kcf_form[2]
h = kcf_form[3]
x_left = math.ceil(xc - w / 2.0)
x_right = math.ceil(xc + w / 2.0) - 1
y_up = math.ceil(yc - h / 2.0)
y_bottom = math.ceil(yc + h / 2.0) - 1
return [x_left, x_right, y_up, y_bottom]
|
# coding:utf-8
import time
import ihelper
import iglobal
def __workspace_match_status(text, status):
for search_str in iglobal.GIT_STATUS_PATTEN[status]:
if search_str in text:
return status
return 0
__status = 0
out = 'this is'
for s_code, patterns in iglobal.GIT_STATUS_PATTEN.items():
print patterns
|
from back.Model import Model
from body.RsvppsFileParser import RsvppsFileParser
from body.Validator import Validator
from front.Timer import Timer
class Controller:
def __init__(self):
self.__model = Model()
self.__timer = Timer()
self.__validator = Validator()
self.__wpm = None
self.__source_to_index = {}
self.__key_to_action = {'escape': self.__do_escape,
'shift': self.__do_shift,
'up' : self.__do_up,
'down' : self.__do_down,
'left' : self.__do_left,
'right': self.__do_right}
self.__word = None
self.__error_message = None
# defaults
self.__default_dict = {"wpm": 250,
"dem": None,
"ds": "example.txt",
"zw": "Space - Start/Stop"}
self.__default_dict = RsvppsFileParser.parse(
self.__default_dict,
".rsvp-player-settings/default.rsvpps"
)
self.__default_word = self.__default_dict["zw"]
self.__source = self.__default_dict["ds"]
self.set_source(self.get_source())
self.set_wpm(int(self.__default_dict["wpm"]))
self.__default_em = self.__default_dict["dem"]
# variables for front feedback
self.set_word(self.__default_word)
self.set_em(self.__default_em)
self.__player_indicator = False
#
# Private
#
def __get_word(self):
try:
return self.__model.get_word(
self.__source_to_index[self.get_source()])
except Model.EndOfSourceException as exception:
raise Controller.EndOfSourceException from exception
except Model.StartOfSourceException as exception:
raise Controller.StartOfSourceException from exception
except Model.GreetingException as exception:
raise Controller.GreetingException from exception
def __get_orp(self, word_len):
if word_len == 1:
orp = 0
elif word_len < 6:
orp = 1
elif word_len < 10:
orp = 2
elif word_len < 14:
orp = 3
else:
orp = 4
return orp
#
# Public
#
def start_playing(self):
if not self.get_wpm():
return
self.set_pi(True)
self.__timer.start(self.get_wpm(), self.get_next_word)
def stop_playing(self):
self.set_pi(False)
if self.__timer.is_not_deleted():
self.__timer.stop()
def go_to_start(self):
self.set_word(self.get_default_word())
self.__source_to_index[self.get_source()] = -1
self.stop_playing()
self.__timer.delete()
def change_source(self, source):
if not source or self.__source == source:
self.set_em(self.__default_dict['dem'])
return
pi_copy = self.get_pi()
try:
self.stop_playing()
self.set_source(source)
self.set_em(None)
self.set_em(self.__default_dict['dem'])
except (Controller.WrongSourceNameException, Validator.ValidationException):
self.set_pi(pi_copy)
if self.get_pi():
self.start_playing()
def change_speed(self, new_wpm):
try:
self.set_wpm(int(new_wpm))
if self.__timer.is_active():
self.start_playing()
except Controller.StartTimerException:
if self.__timer.is_not_deleted() and self.get_pi():
self.start_playing()
except Controller.StopTimerException:
self.stop_playing()
except Validator.ValidationException as ignored:
pass
def get_next_word(self):
self.__source_to_index[self.get_source()] += 1
try:
self.set_word(self.__get_word())
except Controller.EndOfSourceException:
self.__source_to_index[self.get_source()] -= 1
self.set_word(self.__get_word())
self.stop_playing()
def get_previous_word(self):
self.__source_to_index[self.get_source()] -= 1
try:
self.set_word(self.__get_word())
except Controller.StartOfSourceException:
self.__source_to_index[self.get_source()] += 1
self.set_word(self.__get_word())
except Controller.GreetingException:
self.__source_to_index[self.get_source()] = -1
self.set_word(self.get_default_word())
def error_happened(self):
return self.get_em() is not None
def get_splitted_word(self):
word_len = len(self.get_word())
orp = self.__get_orp(word_len)
before = self.get_word()[:orp]
red_symbol = self.get_word()[orp]
after = self.get_word()[orp + 1:]
return [before, red_symbol, after]
def get_progress(self):
idx = max(self.__source_to_index[self.__source], 0)
return idx / (self.__model.get_cnt_words() - 1)
#
# Getters - Setters
#
def get_word(self):
return self.__word
def set_word(self, value):
self.__word = value
####################################
def get_default_word(self):
return self.__default_word
def set_default_word(self, value):
self.__default_word = value
####################################
def get_pi(self):
return self.__player_indicator
def set_pi(self, value: bool):
self.__player_indicator = value
####################################
def get_em(self):
return self.__error_message
def set_em(self, value):
if not value:
self.__error_message = " "
return
self.__error_message = "Error: " + value
####################################
def get_wpm(self):
return self.__wpm
def set_wpm(self, wpm):
self.__validator.validate('wpm', wpm)
try:
if wpm == 0 and self.__wpm is not None:
raise Controller.StopTimerException()
elif wpm > 0 and self.__wpm == 0:
raise Controller.StartTimerException()
finally:
self.__wpm = wpm
####################################
def get_source(self):
return self.__source
def get_source_cropped(self):
return self.get_source().split('/')[-1]
def set_source(self, source):
try:
self.__validator.validate("filename", source)
self.__model.set_source(source)
self.__source = source
if not source in self.__source_to_index.keys():
self.__source_to_index[source] = -1
self.set_word(self.get_default_word())
else:
self.set_word(self.__get_word())
except Model.SourceFileException as exception:
self.set_em("file do not exists")
raise Controller.WrongSourceNameException from exception
except Controller.StartOfSourceException as exception:
self.set_word(self.get_default_word())
except Validator.ValidationException as exception:
self.set_em(str(exception))
raise exception
#
# Key press events
#
def react_on_key_press(self, key):
self.__key_to_action[key]()
def __do_escape(self):
if self.get_pi():
self.stop_playing()
else:
self.start_playing()
def __do_shift(self):
self.go_to_start()
def __do_up(self):
self.change_speed(self.get_wpm() + 10)
def __do_down(self):
self.change_speed(self.get_wpm() - 10)
def __do_left(self):
self.get_previous_word()
def __do_right(self):
self.get_next_word()
#
# Exceptions
#
class WrongSourceNameException(Exception):
pass
class StartOfSourceException(Exception):
pass
class EndOfSourceException(Exception):
pass
class InvalidValidationObjectTypeException(Exception):
pass
class StopTimerException(Exception):
pass
class StartTimerException(Exception):
pass
class GreetingException(Exception):
pass
|
import os
import logging
from tqdm import tqdm
from torch.autograd import Variable
from torchvision.utils import save_image
import torch.nn.functional as F
import torch
import utils
import scipy.io as io
Tensor = torch.cuda.FloatTensor if torch.cuda.is_available() else torch.FloatTensor
def visualize_training_generator(generator, fig_path, cuda=False, n_row = 4, n_col = 4):
generator.eval()
wavelengths = torch.linspace(-1, 1, n_col).view(1, n_col).repeat(n_row, 1).view(-1, 1)
angles = torch.linspace(-1, 1, n_row).view(n_row, 1).repeat(1, n_col).view(-1, 1)
labels = torch.cat([wavelengths, angles], -1).type(Tensor)
imgs, _ = sample_images(generator, labels, cuda)
paddings = (0, 0, 0, imgs.size(2)-1)
imgs = F.pad(imgs, paddings, mode='reflect')
save_image(imgs, fig_path, n_row)
generator.train()
def sample_images(generator, labels, cuda=False):
if cuda:
z = Variable(torch.cuda.FloatTensor(labels.size(0), generator.noise_dim).normal_())
z.cuda()
else:
z = Variable(torch.randn(labels.size(0), generator.noise_dim))
return generator(labels, z), z
def evaluate(generator, wavelengths, angles, num_imgs, params):
generator.eval()
for wavelength in wavelengths:
for angle in angles:
filename = 'ccGAN_imgs_Si_w' + str(wavelength) +'_' + str(angle) +'deg.mat'
mdict = {'wavelength': wavelength, 'angle': angle}
w = (wavelength - params.wc)/params.wspan
theta = (angle - params.ac)/params.aspan
labels = Tensor([w, theta]).repeat(num_imgs, 1)
images, noise = sample_images(generator, labels, params.cuda)
mdict['imgs'] = torch.squeeze(images).cpu().detach().numpy()
mdict['noise'] = noise.data.cpu().numpy()
file_path = os.path.join(params.output_dir,'outputs',filename)
io.savemat(file_path, mdict=mdict)
logging.info('wavelength = '+str(wavelength)+ ' is done. \n')
def compute_gradient_penalty(D, real_samples, fake_samples, labels, cuda=False):
"""Calculates the gradient penalty loss for WGAN GP"""
# Random weight term for interpolation between real and fake samples
alpha = torch.rand(real_samples.size(0), 1, 1, 1).type(Tensor)
# Get random interpolation between real and fake samples
interpolates = (alpha * real_samples + ((1 - alpha) * fake_samples)).type(Tensor).requires_grad_(True)
d_interpolates = D(interpolates, labels)
fake = Variable(Tensor(real_samples.size(0), 1).fill_(1.0), requires_grad=False)
# Get gradient w.r.t. interpolates
gradients = torch.autograd.grad(
outputs=d_interpolates,
inputs=interpolates,
grad_outputs=fake,
create_graph=True,
retain_graph=True,
only_inputs=True,
)[0]
gradients = gradients.view(gradients.size(0), -1)
gradient_penalty = ((gradients.norm(2, dim=1) - 1) ** 2).mean()
return gradient_penalty
def train(models, optimizers, dataloader, params):
generator, discriminator = models
optimizer_G, optimizer_D = optimizers
generator.train()
discriminator.train()
gen_loss_history = []
dis_loss_history = []
with tqdm(total=params.numIter) as t:
it = 0
while True:
for i, (real_imgs, labels) in enumerate(dataloader):
it +=1
if it > params.numIter:
model_dir = os.path.join(params.output_dir, 'model')
utils.save_checkpoint({'iter': it,
'gen_state_dict': generator.state_dict(),
'dis_state_dict': discriminator.state_dict(),
'optim_G': optimizer_G.state_dict(),
'optim_D': optimizer_D.state_dict()},
checkpoint=model_dir)
return (gen_loss_history, dis_loss_history)
# move to GPU if available
if params.cuda:
real_imgs, labels = real_imgs.cuda(), labels.cuda()
# convert to torch Variables
Tensor = torch.cuda.FloatTensor if params.cuda else torch.FloatTensor
real_imgs, labels = Variable(real_imgs.type(Tensor)), Variable(labels.type(Tensor))
# ---------------------
# Train Discriminator
# ---------------------
optimizer_D.zero_grad()
# Sample noise as generator input
z = Variable(torch.randn(labels.size(0), params.noise_dims).type(Tensor))
#if params.cuda:
# z.cuda()
# Generate a batch of images
fake_imgs = generator(labels ,z)
# Real images
real_validity = discriminator(real_imgs, labels)
# Fake images
fake_validity = discriminator(fake_imgs, labels)
gradient_penalty = compute_gradient_penalty(discriminator, real_imgs.data, fake_imgs.data, labels.data, params.cuda)
# Adversarial loss
d_loss = -torch.mean(real_validity) + torch.mean(fake_validity) + params.lambda_gp * gradient_penalty
d_loss.backward()
optimizer_D.step()
dis_loss_history.append(d_loss.data)
# -----------------
# Train Generator
# -----------------
optimizer_G.zero_grad()
# Train the generator every n_critic steps
if it % params.n_critic == 0:
# Generate a batch of images
fake_imgs = generator(labels, z)
# Loss measures generator's ability to fool the discriminator
# Train on fake images
fake_validity = discriminator(fake_imgs, labels)
g_loss = -torch.mean(fake_validity)
g_loss.backward()
optimizer_G.step()
gen_loss_history += [g_loss.data] * params.n_critic
#t.set_postfix(loss='{:05.3f}'.format(g_loss.data))
#t.update()
if it % 250 == 0:
logging.info('Generator loss: %f' % g_loss.data)
logging.info('Discriminator loss: %f' % d_loss.data)
fig_path = os.path.join(params.output_dir, 'figures', 'iter{}.png'.format(it))
visualize_training_generator(generator, fig_path, params.cuda)
t.set_postfix(loss='{:05.3f}'.format(g_loss.data))
t.update()
|
x = str(2 ** 1000000)
print(x.count('') - 1)
|
"""
.. module:: DataPreprocessing
DataPreprocessing
*************
:Description: DataPreprocessing
:Authors: bejar
:Version:
:Created on: 09/03/2015 8:35
"""
__author__ = 'bejar'
import numpy as np
import mne
from mne.io import read_raw_bti
import scipy.io
import logging
from config.paths import smaqepath, datapath
con = ['con2/1', 'con3/2', 'con4/1', 'con5/1', 'con6/1', 'con7/1', 'con8/1', 'con9/1', 'con10/1']
comp = ['com1esq1/1', 'com2esq10/1', 'com3esq11/1', 'com4esq12/1', 'com5esq13/1', 'com6esq14/1', 'com7esq15/1',
'com8esq18/1', 'com9esq19/1', 'com12esq23/1', 'com13esq24/1']
desc = ['des1esq2/1', 'des3esq4/1', 'des4esq5/2', 'des5esq6/1', 'des6esq7/1', 'des7esq8/1', 'des10esq17/1']
lband = [('alpha', 8, 13), ('beta', 13, 30), ('gamma-l', 30, 60),
('gamma-h', 60, 200), ('theta', 4, 8), ('delta', 1, 4)]
clasif = {
'con2/1': 0, 'con3/2': 0, 'con4/1': 0, 'con5/1': 0, 'con6/1': 0, 'con7/1': 0, 'con8/1': 0, 'con9/1': 0,
'con10/1': 0,
'com1esq1/1': 1, 'com2esq10/1': 1, 'com3esq11/1': 1, 'com4esq12/1': 1, 'com5esq13/1': 1, 'com6esq14/1': 1,
'com7esq15/1': 1, 'com8esq18/1': 1, 'com9esq19/1': 1, 'com13esq24/1': 1,
'des1esq2/1': 2, 'des3esq4/1': 2, 'des4esq5/2': 2, 'des5esq6/1': 2, 'des6esq7/1': 2, 'des7esq8/1': 2,
'des10esq17/1': 2, 'com12esq23/1': 2
}
paths = [('control/', con), ('compensados/', comp), ('descompensados/', desc)]
#mne.set_log_level('WARNING')
logger = logging.getLogger('log')
console = logging.StreamHandler()
logging.getLogger('log').addHandler(console)
# print a.ch_names
# print len(a)
# print a[0][0]
# print len(a[0][0])
# print len(a[0][1])
# print a.info
for pre, con in paths:
for ind in con:
logger.info('Individual %s', ind)
indf = pre + ind
a = read_raw_bti(smaqepath + indf + '/e,rfhp1.0Hz', verbose=False)
for band, lf, hf in lband:
fa = mne.filter.band_pass_filter(a)
nchannels = len([cn for cn in a.info['ch_names'] if 'MEG' in cn])
data = np.zeros((nchannels, len(a)))
lcnames = []
ic = 0
for i, cn in enumerate(a.ch_names):
if 'MEG' in cn:
#print cn
lcnames.append(cn)
data[ic] = a[i][0][0]
ic += 1
filedata = {'channels': lcnames, 'data': data}
nfile = ind.split('/')[0]
scipy.io.savemat(datapath + 'DataT3/All/' + 'T3-' + nfile, filedata, do_compression=True)
|
import sys
import os
import socket
from PyQt5.QtCore import QTimer, pyqtSlot
from PyQt5.QtWidgets import *
from PyQt5.uic import loadUi
import threading
client = None
class MainWindow(QWidget):
def __init__(self, client):
self.client = client
super().__init__()
self.activate = True
self.msg_ls = []
loadUi('form.ui',self)
self.setWindowTitle('Client')
self.snd_btn.clicked.connect(self.onsendcl)
self.timer = QTimer()
self.timer.timeout.connect(self.update_msg)
self.timer.start(100)
self.Start_client_btn.clicked.connect(self.start_server)
self.show()
def onsendcl(self):
self.temp_snd_msg = self.send_text.text()
global client
client.send(self.temp_snd_msg.encode())
self.msg_ls.append(f'You : {self.temp_snd_msg} \n')
self.send_text.setText("")
def start_server(self):
port = 23447
host = socket.gethostname()
ip = socket.gethostbyname(host)
self.client.connect((str(ip), port))
self.msg_ls.append('client is connected to server \n')
# self.msg_ls.append(f"Connected to server\n")
self.recieve_message(self.activate)
def update_msg(self):
if len(self.msg_ls) != 0:
self.result = ''
for i in self.msg_ls:
self.result += i
self.send_rec_msg.setText(self.result)
def recieve_message(self, activate):
def rec(self):
global client
while (not activate == False):
self.msg = client.recv(1024).decode()
if self.msg == 'END':
client.close()
break
self.msg_ls.append(f'client : {self.msg} \n')
print("server stopped")
rec_thread = threading.Thread(target=rec, args=[self])
rec_thread.start()
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
app = QApplication([])
window = MainWindow(client)
app.exec_()
|
#Ceci est un projet scolaire pour mettre en place une base de données pour les projets de location de voitures
#importing--------------------------------------------------
from flask import Flask, render_template, request, session, redirect, url_for
from flaskext.mysql import MySQL
import pymysql
#app configurations------------------------------------------
app = Flask(__name__)
app.secret_key = 'hamza ait bourhim'
mysql = MySQL()
#MYSQL configurations----------------------------------------
app.config['MYSQL_DATABASE_HOST'] = 'localhost'
app.config['MYSQL_DATABASE_USER'] = 'root'
app.config['MYSQL_DATABASE_PASSWORD'] = ''
app.config['MYSQL_DATABASE_DB'] = 'projet'
mysql.init_app(app)
#login-------------------------------------------------------
@app.route('/login', methods=['GET', 'POST'])
def login():
#connection
conn = mysql.connect()
cursor = conn.cursor(pymysql.cursors.DictCursor)
if 'loggedin' in session:
return redirect(url_for('home'))
msg = ''
if request.method == 'POST' and 'username' in request.form and 'password' in request.form:
username = request.form['username']
password = request.form['password']
#check if account exists
cursor.execute('SELECT * FROM admins WHERE username = %s AND password = %s', (username, password))
#fetch the account
account = cursor.fetchone()
#if account exists
if account:
#create session
session['loggedin'] = True
session['id'] = account['id']
session['firstname'] = account['firstname']
return redirect(url_for('home'))
else:
msg = 'Nom ou Mot de passe incorrect!'
return render_template('login.html' , msg=msg)
#logout-------------------------------------------------------
@app.route('/logout')
def logout():
session.pop('loggedin', None)
session.pop('id', None)
session.pop('firstname', None)
return redirect(url_for('login'))
#team-------------------------------------------------------
@app.route('/team')
def team():
return render_template('team.html')
#home-------------------------------------------------------
@app.route('/')
def home():
if 'loggedin' in session:
#connection
conn = mysql.connect()
cursor = conn.cursor(pymysql.cursors.DictCursor)
#fetching tables
cursor.execute('SELECT * FROM admins ORDER BY `id` ')
admins = cursor.fetchall()
cursor.execute('(SELECT * FROM vehicules INNER JOIN types_vehicules ON vehicules.idtype = types_vehicules.idtype ) ORDER BY `types_vehicules`.`idtype`')
cars = cursor.fetchall()
cursor.execute('SELECT * FROM types_vehicules ORDER BY `idtype` ')
types = cursor.fetchall()
cursor.execute('SELECT MAX(idtype) FROM types_vehicules')
maximum = int(cursor.fetchall()[0]['MAX(idtype)'])
cursor.execute('SELECT MIN(idtype) FROM types_vehicules')
minimum = int(cursor.fetchall()[0]['MIN(idtype)'])
cursor.execute('SELECT * FROM clients ')
clients = cursor.fetchall()
cursor.execute('SELECT * FROM reservations ORDER BY `dateretour` ')
bookings = cursor.fetchall()
cursor.execute('SELECT * FROM locations_courantes ')
rentals = cursor.fetchall()
cursor.close()
conn.close()
return render_template('home.html',admins = admins, cars = cars, types = types, maximum = maximum, minimum = minimum, clients = clients, bookings = bookings, rentals = rentals)
return redirect(url_for('login'))
#admins-------------------------------------------------------
#add admin----------------------------------------------------
@app.route('/addadmin', methods = ['POST'])
def addadmin():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.execute('SELECT MAX(id) FROM admins')
idmax = cursor.fetchall()
id = int(idmax[0][0]) + 1
username = request.form['username']
firstname = request.form['firstname']
lastname = request.form['lastname']
email = request.form['email']
password = request.form['password']
cursor.execute("INSERT INTO `admins` (`id`, `username`, `firstname`, `lastname`, `email`, `password`) VALUES (%s, %s, %s, %s, %s, %s)", ( id, username, firstname, lastname, email, password))
conn.commit()
except Exception:
msg = "changer le nom d'utilisateur"
return redirect(url_for('home' , msg=msg))
#edit admin----------------------------------------------------
@app.route('/editadmin', methods = ['POST'])
def editadmin():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
myid = session['id']
username = request.form['username']
firstname = request.form['firstname']
lastname = request.form['lastname']
email = request.form['email']
password = request.form['password']
cursor.execute("UPDATE `admins` SET `id` = %s, `username` = %s, `firstname` = %s, `lastname` = %s, `email` = %s, `password` = %s WHERE `id` = %s ", ( myid, username, firstname, lastname, email, password, myid))
conn.commit()
except Exception:
msg = "il y a une erreur"
return redirect(url_for('home' , msg=msg))
#delete admin----------------------------------------------------
@app.route('/deleteadmin', methods = ['POST'])
def deleteadmin():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
myid = session['id']
cursor.execute(" DELETE FROM `admins` WHERE `id` = %s", ( myid))
conn.commit()
return redirect(url_for('logout'))
except Exception:
msg = 'il y a une erreur'
return redirect(url_for('home' , msg=msg))
#cars---------------------------------------------------------
#add car------------------------------------------------------
@app.route('/addcar', methods = ['POST'])
def addcar():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
matricule = request.form['matricule']
idtype = request.form['idtype']
cursor.execute("INSERT INTO `vehicules` (`matricule`, `idtype`, `disponible`) VALUES (%s, %s, TRUE)", ( matricule, idtype))
conn.commit()
except Exception:
msg = 'changer le matricule'
return redirect(url_for('home' , msg=msg))
#delete car----------------------------------------------------
@app.route('/deletecar', methods = ['POST'])
def deletecar():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
matricule = request.form['matricule']
cursor.execute(" DELETE FROM `vehicules` WHERE `matricule` = %s", (matricule))
conn.commit()
except Exception:
msg = "il y a une erreur"
return redirect(url_for('home' , msg=msg))
#types---------------------------------------------------------
#add type------------------------------------------------------
@app.route('/addtype', methods = ['POST'])
def addtype():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.execute('SELECT MAX(idtype) FROM types_vehicules')
idmax = cursor.fetchall()
idtype = int(idmax[0][0]) + 1
marque = request.form['marque']
modele = request.form['modele']
carburant = request.form['carburant']
couleur = request.form['couleur']
prix = request.form['prix']
climatisation = request.form['climatisation']
if climatisation.upper() == 'OUI':
climatisation = 'TRUE'
else:
climatisation = 'FALSE'
cursor.execute("INSERT INTO `types_vehicules` (`idtype`, `marque`, `modele`, `carburant`, `couleur`, `climatisation`, `prix`) VALUES (%s, %s, %s, %s, %s, %s, %s)", (idtype, marque, modele, carburant, couleur, climatisation, prix))
conn.commit()
except Exception:
msg = 'il y a une erreur'
return redirect(url_for('home' , msg=msg))
#edit type------------------------------------------------------
@app.route('/edittype', methods = ['POST'])
def edittype():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
idtype = request.form['idtype']
marque = request.form['marque']
modele = request.form['modele']
carburant = request.form['carburant']
couleur = request.form['couleur']
prix = request.form['prix']
climatisation = request.form['climatisation']
if climatisation.upper() == 'OUI':
climatisation = 'TRUE'
else:
climatisation = 'FALSE'
cursor.execute("UPDATE `types_vehicules` SET `marque` = %s, `modele` = %s, `carburant` = %s, `couleur` = %s, `climatisation` = %s, `prix` = %s WHERE `idtype` = %s ", (marque, modele, carburant, couleur, climatisation, prix, idtype))
conn.commit()
except Exception:
msg = 'il y a une erreur'
return redirect(url_for('home' , msg=msg))
#delete type----------------------------------------------------
@app.route('/deletetype', methods = ['POST'])
def deletetype():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
idtype = request.form['idtype']
cursor.execute(" DELETE FROM `types_vehicules` WHERE `idtype` = %s", (idtype))
conn.commit()
except Exception:
msg = "il y a une erreur"
return redirect(url_for('home' , msg=msg))
#clients---------------------------------------------------------
#add client------------------------------------------------------
@app.route('/addclient', methods = ['POST'])
def addclient():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
cin = request.form['cin']
motdepasse = request.form['motdepasse']
permis = request.form['permis']
prenom = request.form['prenom']
nom = request.form['nom']
datenaissance = request.form['datenaissance']
telephone = request.form['telephone']
adresse = request.form['adresse']
cursor.execute("INSERT INTO `clients` (`cin`, `motdepasse`, `permis`, `prenom`, `nom`, `datenaissance`, `telephone`, `adresse`) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)", (cin, motdepasse, permis, prenom, nom, datenaissance, telephone, adresse))
conn.commit()
except Exception:
msg = 'il y a une erreur'
return redirect(url_for('home' , msg=msg))
#edit client------------------------------------------------------
@app.route('/editclient', methods = ['POST'])
def editclient():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
cin = request.form['cin']
motdepasse = request.form['motdepasse']
permis = request.form['permis']
prenom = request.form['prenom']
nom = request.form['nom']
datenaissance = request.form['datenaissance']
telephone = request.form['telephone']
adresse = request.form['adresse']
cursor.execute("UPDATE `clients` SET `motdepasse` = %s, `permis` = %s, `prenom` = %s, `nom` = %s, `datenaissance` = %s, `telephone` = %s , `adresse` = %s WHERE `cin` = %s ", (motdepasse, permis, prenom, nom, datenaissance, telephone, adresse, cin))
conn.commit()
except Exception:
msg = 'il y a une erreur'
return redirect(url_for('home' , msg=msg))
#delete client----------------------------------------------------
@app.route('/deleteclient', methods = ['POST'])
def deleteclient():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
cin = request.form['cin']
cursor.execute(" DELETE FROM `clients` WHERE `cin` = %s", (cin))
conn.commit()
except Exception:
msg = "il y a une erreur"
return redirect(url_for('home' , msg=msg))
#bookings---------------------------------------------------------
#add booking------------------------------------------------------
@app.route('/addbooking', methods = ['POST'])
def addbooking():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
cursor.execute('SELECT MAX(idreservation) FROM reservations')
idmax = cursor.fetchall()
idreservation = int(idmax[0][0]) + 1
cin = request.form['cin']
idtype = request.form['idtype']
datedepart = request.form['datedepart']
dateretour = request.form['dateretour']
duree = request.form['duree']
acceptee = 'FALSE'
vue = 'FALSE'
cursor.execute('SELECT prix FROM types_vehicules WHERE idtype = %s',(idtype))
prix = cursor.fetchall()
total = int(duree) * int(prix[0][0])
cursor.execute("INSERT INTO `reservations` (`idreservation`, `cin`, `idtype`, `datedepart`, `dateretour`, `duree`, `total`, `acceptee`, `vue`) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)", (idreservation, cin, idtype, datedepart, dateretour, duree, total, acceptee, vue))
conn.commit()
except Exception:
msg = 'il y a une erreur'
return redirect(url_for('home' , msg=msg))
#accept booking------------------------------------------------------
@app.route('/acceptbooking', methods = ['POST'])
def acceptbooking():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
idreservation = request.form['idreservation']
matricule = request.form['matricule']
payee = 'FALSE'
cursor.execute("UPDATE `reservations` SET `acceptee` = TRUE, `vue` = TRUE WHERE `idreservation` = %s ", (idreservation))
conn.commit()
cursor.execute("INSERT INTO `locations_courantes` (`idreservation`, `matricule`, `payee`) VALUES (%s, %s, %s)", (idreservation, matricule, payee))
conn.commit()
cursor.execute("UPDATE `vehicules` SET `disponible` = FALSE WHERE `matricule` = %s ", (matricule))
conn.commit()
except Exception:
msg = 'il y a une erreur'
return redirect(url_for('home' , msg=msg))
#refuse booking----------------------------------------------------
@app.route('/refusebooking', methods = ['POST'])
def refusebooking():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
idreservation = request.form['idreservation']
cursor.execute("UPDATE `reservations` SET `vue` = TRUE WHERE `idreservation` = %s ", (idreservation))
conn.commit()
except Exception:
msg = "il y a une erreur"
return redirect(url_for('home' , msg=msg))
#rentals---------------------------------------------------------
#paid rental------------------------------------------------------
@app.route('/paidrental', methods = ['POST'])
def paidrental():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
idreservation = request.form['idreservation']
cursor.execute("UPDATE `locations_courantes` SET `payee` = TRUE WHERE `idreservation` = %s ", (idreservation))
conn.commit()
except Exception:
msg = 'il y a une erreur'
return redirect(url_for('home' , msg=msg))
#delete rental----------------------------------------------------
@app.route('/deleterental', methods = ['POST'])
def deleterental():
msg = ''
if request.method == 'POST':
try:
conn = mysql.connect()
cursor = conn.cursor()
idreservation = request.form['idreservation']
cursor.execute('SELECT matricule FROM locations_courantes WHERE idreservation = %s',(idreservation))
matrcl = cursor.fetchall()
matricule = matrcl[0][0]
cursor.execute("UPDATE `vehicules` SET `disponible` = TRUE WHERE `matricule` = %s ", (matricule))
conn.commit()
cursor.execute(" DELETE FROM `locations_courantes` WHERE `idreservation` = %s", (idreservation))
conn.commit()
except Exception:
msg = "il y a une erreur"
return redirect(url_for('home' , msg=msg))
#run the app--------------------------------------------------
if __name__ == "__main__":
app.run(debug=True)
|
import pytest
import warnings
import neworder as no
warnings.filterwarnings(action='ignore', category=RuntimeWarning, message=r't=')
def test_basics() -> None:
# just check you can read the attrs/call the functions
assert hasattr(no, "verbose")
assert hasattr(no, "checked")
assert hasattr(no, "__version__")
no.log("testing")
no.log(1)
no.log(no)
no.log([1, 2, 3])
no.log((1, 2, 3))
no.log({1: 2, 3:4})
def test_submodules() -> None:
assert(hasattr(no, "mpi"))
assert(hasattr(no, "stats"))
assert(hasattr(no, "df"))
def test_dummy_model() -> None:
class DummyModel(no.Model):
def __init__(self) -> None:
super().__init__(no.NoTimeline(), no.MonteCarlo.deterministic_identical_stream)
def step(self) -> None:
pass
def finalise(self) -> None:
pass
assert no.run(DummyModel())
@pytest.mark.filterwarnings("ignore:check()")
def test_check_flag() -> None:
class FailingModel(no.Model):
def __init__(self) -> None:
super().__init__(no.NoTimeline(), no.MonteCarlo.deterministic_identical_stream)
def step(self) -> None:
pass
def check(self) -> bool:
return False
# fails
assert not no.run(FailingModel())
no.checked(False)
# succeeds
assert no.run(FailingModel())
def test_mpi() -> None:
# if no mpi4py, assume serial like module does
try:
import mpi4py.MPI as mpi # type: ignore[import]
rank = mpi.COMM_WORLD.Get_rank()
size = mpi.COMM_WORLD.Get_size()
except Exception:
rank = 0
size = 1
assert no.mpi.rank() == rank
assert no.mpi.size() == size
|
"""Copyright David Donahue 2017. When run, checks to see if user is ready for next event; texts user asking if they are ready for next event.
Extends duration of current event if user texts back implying they are not finished, or marks the current event as complete if user is finished. Basically, keeps track
of user progress through adventure. As user progresses, texts may include helpful guidance as to address of next event, or start time of next event, etc."""
import MySQLdb
import datetime
import time
from twilio.rest import Client
#
# print(message.sid)
account_sid = 'AC534ccef182c5e4b4efbbc315a44bbed3'
auth_token = 'e505be28ef55d8fa15f158e6af95774b'
client = Client(account_sid, auth_token)
def send_user_reminder_of_next_events_if_necessary(cursor):
current_time = datetime.datetime.now() # .strftime("%Y-%m-%d %H:%M:%S")
#current_time = datetime.datetime.strptime("2017-05-06 15:50:00", "%Y-%m-%d %H:%M:%S")
print current_time
# Find current adventure
active_adventure_sql = "SELECT * FROM ADVENTURES WHERE ACTIVE = 1"
cursor.execute(active_adventure_sql)
for active_adventure in cursor.fetchall():
adventure_id = active_adventure[0]
adventure_name = active_adventure[2]
# Get list of events
active_event_sql = "SELECT * FROM EVENTS WHERE ADVENTURE_ID = %s AND STARTED = 0" % (adventure_id)
cursor.execute(active_event_sql)
active_events = cursor.fetchall()
# Determine if next non-started event is within 15 minutes away,
# if so, shoot the user a reminder text with details of the event
for active_event in active_events:
event_time = active_event[2]
event_id = active_event[0]
time_to_event = event_time - current_time;
print time_to_event
total_seconds = time_to_event.total_seconds()
if total_seconds < 15 * 60 and total_seconds > 0:
print event_id
print "The next event is 15 minutes away!"
print active_event[6]
message = client.messages.create(
to="+19788669891",
from_="+16176525131",
body="%s: The next event is 15 minutes away! If you would like to to postpone the next event by 15 minutes, reply with 'postpone'. If you want us to call an uber, replay with 'uber'." % adventure_name
)
event_started_sql = "UPDATE EVENTS SET STARTED=1 WHERE ID = %s" % (event_id)
print event_started_sql
cursor.execute(event_started_sql)
db.commit()
db = MySQLdb.connect("localhost", "testuser", "test123", "GO_VENTR_DB")
cursor = db.cursor()
send_user_reminder_of_next_events_if_necessary(cursor)
|
"""
The import emulation subsystem for windows.
"""
import ntdll
import kernel32
import secur32
import rpcrt4
import advapi32
import msvcrt
import user32
import gdi32
import ole32
import msvcr71
import ws2_32
import wsock32
import wininet
#oleaut32
#shlwapi
#shell32
|
#================================================================
#Author : Max R. Berrios Cruz
#Date: Jun 28, 2013
#Email: max.berrios@upr.edu
#Version:
#================================================================
import sys
from src.parser.input_output import i_o
from src.main.interface.main import main
from src.parser.parser import parser
def Start(values):
Main = main(values)
Main.workflow()
def readTerminal():
try:
options = []
for i in sys.argv[1:]:
print(i)
options.append(i)
options.reverse()
optionsParser = parser()
optionsParser.optionsListener(options)
Start(optionsParser.get_values())
except:
i_o().Help()
if __name__ == "__main__":
readTerminal()
|
def Singleton(theClass):
""" decorator for a class to make a singleton out of it """
classInstances = {}
def getInstance(*args, **kwargs):
""" creating or just return the one and only class instance.
The singleton depends on the parameters used in __init__ """
key = (theClass, args, str(kwargs))
if key not in classInstances:
classInstances[key] = theClass(*args, **kwargs)
return classInstances[key]
return getInstance
|
# import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from sklearn import model_selection
from sklearn.ensemble import RandomForestClassifier as RFC
from sklearn.model_selection import GridSearchCV as GS
from sklearn.metrics import accuracy_score, log_loss
import os
import sys
eps = sys.float_info.epsilon
training_data = pd.read_csv(os.getenv('TRAINING'), header=0)
tournament_data = pd.read_csv(os.getenv('TESTING'), header=0)
features = [f for f in list(training_data) if 'feature' in f]
# splitting my arrays in ratio of 30:70 percent
features_train, features_test, labels_train, labels_test = model_selection.train_test_split(training_data[features], training_data['target'], test_size=0.3, random_state=0)
# parameters
parameters = {
'n_estimators': [ 20,25 ],
'random_state': [ 0 ],
'max_features': [ 2 ],
'min_samples_leaf': [150,200,250]
}
# implementing my classifier
model = RFC(n_jobs=-1)
grid = GS(estimator=model, param_grid=parameters)
grid.fit(features_train, labels_train)
# Calculate the logloss of the model
prob_predictions_class_test = grid.predict(features_test)
prob_predictions_test = grid.predict_proba(features_test)
logloss = log_loss(labels_test,prob_predictions_test)
accuracy = accuracy_score(labels_test, prob_predictions_class_test, normalize=True,sample_weight=None)
# predict class probabilities for the tourney set
prob_predictions_tourney = grid.predict_proba(tournament_data[features])
t_id = tournament_data['id']
results = prob_predictions_tourney[:, 1]
results_df = pd.DataFrame(data={'probability':results})
joined = pd.DataFrame(t_id).join(np.clip(results_df, 0.0 + eps, 1.0 - eps))
joined.to_csv(os.getenv('PREDICTING'), index=False, float_format='%.16f')
|
def main():
fname = input("enter file name: ")
infile = open(fname, "r")
outfile = open("After.txt", "w")
for line in infile.readlines():
print(line.upper(), file=outfile , end="")
infile.close()
outfile.close()
main()
|
from wordcloud import WordCloud
import pandas as pd
def show_wordcloud():
print('showing wordcloud')
d = {}
bag = pd.read_csv('results/3gram_tfidf.csv')
for term, rank in bag.values:
d[term] = rank
wordcloud = WordCloud()
wordcloud.generate_from_frequencies(frequencies=d)
# plt.imshow(wordcloud, interpolation="bilinear")
wordcloud.to_file("results/phrases_cloud.png")
# show_wordcloud()
|
#!/usr/bin/env python3
import json
from lib.pos import Pos
from lib.zone_manager import ZoneManager
with open("../config/region_1.json", "r") as fp:
region_1_prop = json.load(fp)
fp.close()
test = ZoneManager(region_1_prop["locationBounds"])
tree = test.tree
print("-"*120)
print("Ave depth: {:04.2f}".format(tree.average_depth()))
print("Max depth: {}".format(tree.max_depth()))
print("Leaf nodes: {}".format(len(tree)))
print("="*120)
print("Look out for that tree!")
tree.show_tree()
print("-"*120)
print("Golden block's zone is:")
print("{!r}".format(tree.get_zone(Pos("-1441 2 -1441"))))
|
import pandas as pd
from Funkcje.WczytywanieDanych.loadNormalFiles import loadNormalFilesWithoutHeader, loadNormalFilesWithHeader
from Funkcje.WczytywanieDanych.removeSymbolicValue import removeSymbolicValue
def otwieraniePlikow(dane):
if '3D_spatial_network1.csv' in dane:
daneDF = loadNormalFilesWithoutHeader(dane, ',')
elif 'cluster_data.csv' in dane:
daneDF = loadNormalFilesWithoutHeader(dane, ',')
elif 'data_feature.csv' in dane:
daneDF = loadNormalFilesWithoutHeader(dane, ',')
elif 'supervision_cluster.csv' in dane:
daneDF = loadNormalFilesWithoutHeader(dane, ',')
elif 'xaaS.dat' in dane:
daneDF = loadNormalFilesWithoutHeader(dane, ' ')
elif 'xadS.dat' in dane:
daneDF = loadNormalFilesWithoutHeader(dane, ' ')
elif 'Absenteeism_at_work.csv' in dane:
daneDF = loadNormalFilesWithHeader(dane, ';')
elif '2018-04-2019-06-web-control.csv' in dane:
daneDF = loadNormalFilesWithHeader(dane, ';')
elif 'Data_Cortex_Nuclear.xls' in dane:
daneDF = pd.read_excel(dane)
elif 'QCM3.csv' in dane:
daneDF = loadNormalFilesWithHeader(dane, ';')
else:
daneDF = loadNormalFilesWithHeader(dane, ',')
if 'Frogs_MFCC.csv' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
elif 'iBeacon_RSSI_LabeledS.csv' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
elif 'online_shoppers_intention.csv' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
elif 'Sales_Transactions_Dataset_Weekly.csv' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
elif 'SCADI.csv' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
elif 'xaaS.dat' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
elif 'xadS.dat' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
elif '2018-04-2019-06-web-control.csv' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
elif 'a3_va3SD.csv' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
elif 'buddymove_holidayiq.csv' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
elif 'CC GENERAL.csv' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
elif 'Data_Cortex_Nuclear.xls' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
elif 'iris.data' in dane:
danePrzygotowane = removeSymbolicValue(daneDF, dane)
else:
danePrzygotowane = daneDF
return danePrzygotowane
|
from Bio import SeqIO
import pinetree as pt
import sys
import os
import datetime
import argparse
import multiprocessing
import random
import copy
import csv
CELL_VOLUME = 1.1e-15
PHI10_BIND = 1.82e7 # Binding constant for phi10
IGNORE_REGULATORY = ["E. coli promoter E[6]",
"T7 promoter phiOR",
"T7 promoter phiOL",
"E. coli promoter A0 (leftward)"]
### Some important proteins
#
# gp1 is T7 RNA Polymerase
# gp3.5 is T7 lysozyme
# gp0.7 is a protein kinase that phosphorylates E. coli RNA polymerase
#New 'CDS' feature version
IGNORE_CDS = ["gp10B",
"gp5.5-5.7",
"gp4.1",
"gp4B",
"gp0.6A",
"gp0.6B",
"gp0.5",
"gp0.4"]
# Optimal E. coli codons
OPT_CODONS_E_COLI = {'A': ['GCT'],
'R': ['CGT', 'CGC'],
'N': ['AAC'],
'D': ['GAC'],
'C': ['TGC'],
'Q': ['CAG'],
'E': ['GAA'],
'G': ['GGT', 'GGC'],
'H': ['CAC'],
'I': ['ATC'],
'L': ['CTG'],
'F': ['TTC'],
'P': ['CCG'],
'S': ['TCT', 'TCC'],
'T': ['ACT', 'ACC'],
'Y': ['TAC'],
'V': ['GTT', 'GTA']}
# RNAse information
RNase_E = {"speed": 20,
"rate": 1e-5,
"footprint": 10}
RNase_III = {"rate": 1e-2} # This is the default for the RNAse_table values if sites arent explicitly listed
RNAse_Table = {"R6.8": 0} # RNAse site R0.0 doesn't exist, its just an example. To ignore a site, set to 0
class Logger:
'''Sends pretty colors to the console and also logs console to file'''
def __init__(self, log_output = "", verbose = False):
self.colors = {'normal': "\u001b[0m",
'warn': '\u001b[31m'}
self.verbose = verbose
self.log_output = log_output
if self.log_output: # Gotta make sure this exists
self.log_output = self.log_output.replace("\\", "/")
out_dir = "/".join(self.log_output.split("/")[:-1])
if (out_dir != '') and (not os.path.exists(out_dir)):
os.makedirs(out_dir)
if self.log_output[-1] == "/" or self.log_output[-1] == ".":
self.log_output = f"{self.log_output}pinetree.log"
with open(self.log_output, "w") as _: # clears the file
pass
def _send_to_log(self, text):
if not self.log_output:
return
with open(self.log_output, 'a') as file:
file.write(text + '\n')
def normal(self,text):
print(f"{self.colors['normal']}{text}{self.colors['normal']}")
self._send_to_log(f"[NORMAL] {text}")
def warn(self, text):
print(f"{self.colors['warn']}Warning: {text}{self.colors['normal']}")
self._send_to_log(f"[WARNING] {text}")
def log(self, text):
if self.verbose:
print(f"{self.colors['normal']}{text}{self.colors['normal']}")
self._send_to_log(f"[LOG] {text}")
def get_promoter_interactions(name):
'''
Calculate promoter binding strengths. The relative strengths defined here
come from 2012 Covert, et al paper.
'''
if name in IGNORE_REGULATORY:
return
ecoli_strong = ["E. coli promoter A1",
"E. coli promoter A2",
"E. coli promoter A3"]
ecoli_weak = ["E. coli B promoter",
"E. coli C promoter"]
phi1_3 = ["T7 promoter phi1.1A",
"T7 promoter phi1.1B",
"T7 promoter phi1.3",
"T7 promoter phi1.5",
"T7 promoter phi1.6"]
phi3_8 = ["T7 promoter phi2.5",
"T7 promoter phi3.8",
"T7 promoter phi4c",
"T7 promoter phi4.3",
"T7 promoter phi4.7"]
phi6_5 = ["T7 promoter phi6.5"]
phi9 = ["T7 promoter phi9"]
phi10 = ["T7 promoter phi10"]
phi13 = ["T7 promoter phi13",
"T7 promoter phi17"]
if name in ecoli_strong:
return {'ecolipol': 10e4,
'ecolipol-p': 3e4}
elif name in ecoli_weak:
return {'ecolipol': 1e4,
'ecolipol-p': 0.3e4}
elif name in phi1_3:
return {'gp1': PHI10_BIND * 0.01,
'gp3.5': PHI10_BIND * 0.01 * 0.5}
elif name in phi3_8:
return {'gp1': PHI10_BIND * 0.01,
'gp3.5': PHI10_BIND * 0.01 * 0.5}
elif name in phi6_5:
return {'gp1': PHI10_BIND * 0.05,
'gp3.5': PHI10_BIND * 0.05}
elif name in phi9:
return {'gp1': PHI10_BIND * 0.2,
'gp3.5': PHI10_BIND * 0.2}
elif name in phi10:
return {'gp1': PHI10_BIND,
'gp3.5': PHI10_BIND}
elif name in phi13:
return {'gp1': PHI10_BIND * 0.1,
'gp3.5': PHI10_BIND * 0.1}
else:
raise ValueError(
"Promoter strength for {0} not assigned.".format(name))
def get_terminator_interactions(name):
'''
Get terminator efficiencies.
'''
if name == "E. coli transcription terminator TE":
return {'ecolipol': 1.0,
'ecolipol-p': 1.0,
'gp1': 0.0,
'gp1+gp3.5': 0.0}
elif name == "T7 transcription terminator Tphi":
return {'gp1': 0.85,
'gp1+gp3.5': 0.85}
else:
return {'name': 0.0}
def compute_cds_weights(record, feature, factor, weights):
# Grab the gene name
nuc_seq = feature.location.extract(record).seq
aa_seq = feature.qualifiers["translation"][0]
weight_sum = 0
for index, nuc in enumerate(nuc_seq):
aa_index = int(index / 3)
codon_start = aa_index * 3
codon = nuc_seq[codon_start:codon_start + 3]
genome_index = feature.location.start + index
if aa_index < len(aa_seq):
if aa_seq[aa_index] in OPT_CODONS_E_COLI:
if codon in OPT_CODONS_E_COLI[aa_seq[aa_index]]:
weights[genome_index] = factor
weight_sum += factor
else:
weights[genome_index] = 1
weight_sum += 1
return weights
def normalize_weights(weights):
# Average over all CDSs, which will have non-zero weights
non_zero = sum(1 if i != 0 else 0.0 for i in weights)
mean_weight = sum(weights) / non_zero
norm_weights = [i / mean_weight for i in weights]
# Replace non-CDS weights with 1
norm_weights = [1 if i == 0 else i for i in norm_weights]
return norm_weights
def phage_model(input, output=None, time=1500, verbose=True, seed=None, multiplicity=1, use_rnases=False):
if (seed != None) and (0 > seed > 2147483647):
raise ValueError(f"Seed must be between 0 and 2147483647. You used '{seed}'.")
sim = pt.Model(cell_volume=CELL_VOLUME)
if not output:
output = ".".join(input.split(".")[:-1])
# Make the directory for output if it doesnt exist
output_dir = output.replace("\\", "/")
output_dir = "/".join(output_dir.split("/")[:-1])
if output_dir != '' and not os.path.exists(output_dir):
os.makedirs(output_dir)
# Log relevant information
if output[-1] == "/" or output[-1] == ".":
log_output = f"{output}pinetree.log"
else:
log_output = f"{output}.log"
logger = Logger(log_output=f"{log_output}", verbose=verbose)
# Use just the first record
all_records = list(SeqIO.parse(input, "genbank"))
if len(all_records)>1:
logger.normal("Ignored extra sequence records in input file.")
record=all_records[0]
genome_length = len(record.seq)
start_time = datetime.datetime.utcnow()
logger.normal("[Pinetree] Pinetree T7 Genome Simulation")
logger.normal("barricklab/igem2020 Fork")
# Try and find a git repo and log its last commit
if os.path.exists(".git/refs/heads/master"):
git_master_path = ".git/refs/heads/master"
elif os.path.exists("../.git/refs/heads/master"):
git_master_path = "../.git/refs/heads/master"
else:
git_master_path = ""
if git_master_path:
with open(git_master_path, 'r') as file:
commit_hash = file.readline().strip()
logger.normal(f"Last commit: {commit_hash}")
logger.normal(f"Script and simulation started at {start_time} UTC")
# --- Feature Acquisition and validation VVV
feature_dict = {}
for i, feature in enumerate(record.features): # Accuasition
start = feature.location.start.position + 1
stop = feature.location.end.position
name = ''
feature_type = ''
interactions = None
skip = False
source_feature = feature
rate = 0
if 'name' in feature.qualifiers:
name = feature.qualifiers["name"][0]
elif "note" in feature.qualifiers:
name = feature.qualifiers["note"][0]
if feature.type == "regulatory":
if "promoter" in feature.qualifiers["regulatory_class"]:
length = stop - start
if length < 35:
start = start - 35
interactions = get_promoter_interactions(name)
feature_type = "promoter"
if "terminator" in feature.qualifiers["regulatory_class"]:
interactions = get_terminator_interactions(name)
feature_type = "terminator"
elif feature.type == "CDS":
feature_type = "cds"
elif feature.type == "misc_structure":
feature_type = "misc"
if "rnase" in name.lower():
feature_type = "rnase_site"
for site_name in RNAse_Table.keys():
if site_name == name.split(" ")[-1] or site_name == name:
rate = RNAse_Table[site_name]
if rate == 0:
skip = True
else:
feature_type = None
if feature_type:
feature_dict[i] = {"start": start,
"stop": stop,
"name": name,
"type": feature_type,
"interactions": interactions,
"skip": skip,
"source_feature": source_feature,
"rate": rate
}
# TODO: Add more feature validation
if use_rnases == False:
logger.normal("Not considering RNase activity (use flag -r to consider)")
else:
logger.normal(f"Considering RNase activity.")
for feature in feature_dict.items(): # Validation
feature = feature[1]
if feature['skip']:
continue
if feature['name'] in IGNORE_REGULATORY or feature['name'] in IGNORE_CDS:
feature['skip'] = True
logger.log(f"Ignored feature {feature['name']} ({feature['start']} - {feature['stop']})")
continue
if 'pseudo' in feature['source_feature'].qualifiers.keys():
logger.warn(f"Found {feature['name']} with flag 'pseudo'. Ignoring.")
feature['skip'] = True
continue
if feature['stop'] - feature['start'] < 50 and feature['type'] in ['gene', 'cds']:
logger.warn(f"Found {feature['type'], feature['name']} that is tiny! ({feature['start']} - {feature['stop']})")
if feature['type'] == "rnase_site" and use_rnases == False:
feature['skip'] = True
continue
if feature['type'] == "rnase_site" and feature['rate'] == 0:
logger.log(f"{feature['name']} has no explicit binding rate. Defaulting.")
feature['rate'] = RNase_III["rate"]
# -- Feature Acquisition Validation ^^^
# -- Set up masks
mask_interactions = ["gp1", "gp1+gp3.5", "ecolipol", "ecolipol-p", "ecolipol-2", "ecolipol-2-p"]
logger.normal("Implemented masks and weighting")
# -- Set up masks
# -- Add Features to Sim VVV
phage_genomes = {}
for infection in range(0, multiplicity):
weights = [0.0] * len(record.seq)
if use_rnases:
phage_genomes[infection] = pt.Genome(name=f"phage_{infection}", length=genome_length,
transcript_degradation_rate_ext=RNase_E['rate'],
rnase_speed=RNase_E['speed'],
rnase_footprint=RNase_E['footprint'])
else:
phage_genomes[infection] = pt.Genome(name=f"phage_{infection}", length=genome_length)
output_feature_dict = dict()
for feature in feature_dict.items():
feature_contents = feature[1]
if feature_contents['skip']:
continue
elif feature_contents['type'] == "promoter":
phage_genomes[infection].add_promoter(feature_contents['name'], feature_contents['start'], feature_contents['stop'], feature_contents['interactions'])
logger.log(f"Added promoter feature: {feature_contents['name']}, Start: {feature_contents['start']}, Stop: {feature_contents['stop']}")
output_feature_dict[copy.copy(feature[0])] = copy.deepcopy(feature[1])
elif feature_contents['type'] == "terminator":
phage_genomes[infection].add_terminator(feature_contents['name'], feature_contents['start'], feature_contents['stop'], feature_contents['interactions'])
logger.log(f"Added terminator feature: {feature_contents['name']}, Start: {feature_contents['start']}, Stop: {feature_contents['stop']}")
output_feature_dict[copy.copy(feature[0])] = copy.deepcopy(feature[1])
elif feature_contents['type'] == "cds":
phage_genomes[infection].add_gene(name=feature_contents['name'], start=feature_contents['start'], stop=feature_contents['stop'],
rbs_start=feature_contents['start'] - 30, rbs_stop=feature_contents['start'], rbs_strength=1e7)
weights = compute_cds_weights(record, feature_contents['source_feature'], 1.0, weights)
logger.log(f"Added CDS feature: {feature_contents['name']}, Start: {feature_contents['start']}, Stop: {feature_contents['stop']}")
output_feature_dict[copy.copy(feature[0])] = copy.deepcopy(feature[1])
elif feature_contents['type'] == "rnase_site":
phage_genomes[infection].add_rnase_site(name=feature_contents['name'], start=feature_contents['start'], stop=feature_contents['stop']+10, rate=feature_contents['rate'])
logger.log(f"Added RNase site: {feature_contents['name']}, Start: {feature_contents['start']}, Stop: {feature_contents['stop']}")
output_feature_dict[copy.copy(feature[0])] = copy.deepcopy(feature[1])
else:
continue
phage_genomes[infection].add_mask(500, mask_interactions)
norm_weights = normalize_weights(weights)
phage_genomes[infection].add_weights(norm_weights)
sim.register_genome(phage_genomes[infection])
logger.log(f"Registered phage genome #{infection+1}")
# -- Add Featues to Sim ^^^
# -- Output Features to CSV VVVVV
out_columns = ['name', 'type', 'start', 'end', 'strength']
out_data = []
for feature in feature_dict.items():
feature_contents = feature[1]
if feature_contents['skip'] == True:
continue
feature_contents['strength'] = None
if feature_contents['type'] == "promoter":
promoter_interaction_result = get_promoter_interactions(feature_contents['name'])
if 'ecolipol' in promoter_interaction_result.keys():
feature_contents['strength'] = promoter_interaction_result['ecolipol']
elif 'gp1' in promoter_interaction_result.keys():
feature_contents['strength'] = promoter_interaction_result['gp1']
else:
feature_contents['strength'] = 0
elif feature_contents['type'] == "rnase_site":
feature_contents['strength'] = feature_contents['rate']
out_data.append({'name': feature_contents['name'],
'type': feature_contents['type'],
'start': feature_contents['start'],
'end': feature_contents['stop'],
'strength': feature_contents['strength']})
if output[-1] == "/" or output[-1] == ".":
out_features_filename = f"{output}features.csv"
else:
out_features_filename = f"{output}.features.csv"
with open(out_features_filename, 'w') as csv_file_object:
writer = csv.DictWriter(csv_file_object, fieldnames=out_columns)
writer.writeheader()
for contents in out_data:
writer.writerow(contents)
# -- Output Features to CSV ^^^^^
logger.normal("Registered genome features")
mask_interactions = ["gp1", "gp1+gp3.5",
"ecolipol", "ecolipol-p", "ecolipol-2", "ecolipol-2-p"]
sim.add_polymerase("gp1", 35, 230, 0)
sim.add_polymerase("gp1+gp3.5", 35, 230, 0)
sim.add_polymerase("ecolipol", 35, 45, 0)
sim.add_polymerase("ecolipol-p", 35, 45, 0)
sim.add_polymerase("ecolipol-2", 35, 45, 0)
sim.add_polymerase("ecolipol-2-p", 35, 45, 0)
sim.add_ribosome(30, 30, 0)
sim.add_species("bound_ribosome", 10000)
sim.add_species("bound_ecolipol", 1800)
sim.add_species("bound_ecolipol_p", 0)
sim.add_species("ecoli_genome", 0)
sim.add_species("ecoli_transcript", 0)
sim.add_reaction(1e6, ["ecoli_transcript", "__ribosome"], [
"bound_ribosome"])
sim.add_reaction(0.04, ["bound_ribosome"], [
"__ribosome", "ecoli_transcript"])
sim.add_reaction(0.001925, ["ecoli_transcript"], ["degraded_transcript"])
sim.add_reaction(1e7, ["ecolipol", "ecoli_genome"], ["bound_ecolipol"])
sim.add_reaction(
0.3e7, ["ecolipol-p", "ecoli_genome"], ["bound_ecolipol_p"])
sim.add_reaction(0.04, ["bound_ecolipol"], [
"ecolipol", "ecoli_genome", "ecoli_transcript"])
sim.add_reaction(0.04, ["bound_ecolipol_p"], [
"ecolipol-p", "ecoli_genome", "ecoli_transcript"])
sim.add_reaction(3.8e7, ["gp0.7", "ecolipol"],
["ecolipol-p", "gp0.7"])
sim.add_reaction(3.8e7, ["gp0.7", "ecolipol+gp2"],
["ecolipol+gp2-p", "gp0.7"])
sim.add_reaction(3.8e7, ["gp2", "ecolipol"], ["ecolipol+gp2"])
sim.add_reaction(3.8e7, ["gp2", "ecolipol-p"], ["ecolipol+gp2-p"])
sim.add_reaction(1.1, ["ecolipol+gp2-p"], ["gp2", "ecolipol-p"])
sim.add_reaction(1.1, ["ecolipol+gp2"], ["gp2", "ecolipol"])
sim.add_reaction(3.8e9, ["gp3.5", "gp1"], ["gp1+gp3.5"])
sim.add_reaction(3.5, ["gp1+gp3.5"], ["gp3.5", "gp1"])
logger.normal("Registered reactions")
logger.normal("Running simulation")
if not seed:
seed = random.randint(0, 2147483647)
sim.seed(seed)
logger.normal(f"Random seed was set to {seed}")
if output[-1] == "/" or output[-1] == ".":
sim_output = f"{output}phage.counts.tsv"
else:
sim_output = f"{output}.counts.tsv"
# -- Running the actual sim. VVVV
# Note: Multiprocessing necessary for working keyboard interrupts.
try:
sim_process = multiprocessing.Process(target=sim.simulate, kwargs={'time_limit': time,
'time_step': 5,
'output': sim_output
})
sim_process.start()
sim_process.join()
except KeyboardInterrupt:
sim_process.terminate()
with open(sim_output, 'r') as outfile:
for line in outfile:
pass
last_file_line = line
interrupt_time = str(last_file_line).split("\t")[0]
logger.warn(f'Received keyboard interruption. Simulation reached time {interrupt_time}')
finish_time = datetime.datetime.utcnow()
run_time = (finish_time - start_time).total_seconds()
logger.normal(f"Simulation interrupted after {run_time / 60} minutes.")
exit(0)
except TypeError:
logger.warn("There was a problem with multiprocessing. Keyboard Interrupts won't work, but the simulation should still run.")
sim.simulate(time_limit= time, time_step=5, output=sim_output)
finish_time = datetime.datetime.utcnow()
run_time = (finish_time-start_time).total_seconds()
logger.normal(f"Simulation completed in {run_time/60} minutes.")
# -- Running the actual sim. ^^^^
if __name__ == "__main__":
arguments = sys.argv
# For hard coding variables if you feel like it:
input_genome = None # ex. (resources/T7_genome.gb)
output_path = None # ex. [output | output/]
# Otherwise it will take from command line
parser = argparse.ArgumentParser(description='Perform simulation of T7 phage gene expression')
parser.add_argument('-i',
action='store',
metavar="input.gb",
dest='i',
required=True,
type=str,
help="input file in fasta format (REQUIRED)")
parser.add_argument('-o',
action='store',
metavar="output-prefix",
dest='o',
required=False,
type=str,
help="prefix of *.counts.tsv and *.log output files (REQUIRED)")
parser.add_argument('-t',
action='store',
metavar="seconds",
dest='t',
required=False,
type=int,
help="Duration of simulation in seconds")
parser.add_argument('-s',
action='store',
metavar="seed",
dest='s',
required=False,
type=int,
help="Randomness seed")
parser.add_argument('-m',
action='store',
metavar="MOI",
dest='m',
required=False,
type=int,
help="Multiplicity of Infection")
parser.add_argument('-r',
action='store_true',
dest='r',
required=False,
help="Option to use RNases")
options = parser.parse_args()
try:
output_path = options.o
except AttributeError:
pass
if options.i:
input_genome = options.i
try:
time = options.t
except AttributeError:
time = 1500
if not time:
time = 1500
try:
multiplicity = options.m
except AttributeError:
multiplicity = 1
if not multiplicity:
multiplicity = 1
try:
seed = options.s
except AttributeError:
seed = None
if not seed:
seed = None
try:
use_rnases = options.r
except AttributeError:
use_rnases = False
if not use_rnases:
use_rnases = False
if not output_path:
output_path = ".".join(input_genome.split(".")[:-1])
if not os.path.exists(input_genome):
print(f"Could not find file {input_genome}")
exit(1)
phage_model(input_genome, output_path, time, verbose=False, seed=seed, multiplicity=multiplicity, use_rnases=use_rnases)
|
import numpy as np
from . import DistributionFunction as DistFunc
from . DistributionFunction import DistributionFunction
# BOUNDARY CONDITIONS (WHEN f_re IS DISABLED)
# (NOTE: These are kept for backwards compatibility. You
# should _really_ use 'DistributionFunction.XXX' instead)
BC_F_0 = DistFunc.BC_F_0
BC_PHI_CONST = DistFunc.BC_PHI_CONST
BC_DPHI_CONST = DistFunc.BC_DPHI_CONST
# Interpolation methods for advection term in kinetic equation
AD_INTERP_CENTRED = DistFunc.AD_INTERP_CENTRED
AD_INTERP_UPWIND = DistFunc.AD_INTERP_UPWIND
AD_INTERP_UPWIND_2ND_ORDER = DistFunc.AD_INTERP_UPWIND_2ND_ORDER
AD_INTERP_DOWNWIND = DistFunc.AD_INTERP_DOWNWIND
AD_INTERP_QUICK = DistFunc.AD_INTERP_QUICK
AD_INTERP_SMART = DistFunc.AD_INTERP_SMART
AD_INTERP_MUSCL = DistFunc.AD_INTERP_MUSCL
AD_INTERP_OSPRE = DistFunc.AD_INTERP_OSPRE
AD_INTERP_TCDF = DistFunc.AD_INTERP_TCDF
AD_INTERP_JACOBIAN_LINEAR = DistFunc.AD_INTERP_JACOBIAN_LINEAR
AD_INTERP_JACOBIAN_FULL = DistFunc.AD_INTERP_JACOBIAN_FULL
AD_INTERP_JACOBIAN_UPWIND = DistFunc.AD_INTERP_JACOBIAN_UPWIND
HOT_REGION_P_MODE_MC = 1
HOT_REGION_P_MODE_THERMAL = 2
HOT_REGION_P_MODE_THERMAL_SMOOTH = 3
PARTICLE_SOURCE_ZERO = 1
PARTICLE_SOURCE_IMPLICIT = 2
PARTICLE_SOURCE_EXPLICIT = 3
PARTICLE_SOURCE_SHAPE_MAXWELLIAN = 1
PARTICLE_SOURCE_SHAPE_DELTA = 2
F_HOT_DIST_MODE_NONREL = 1
class HotElectronDistribution(DistributionFunction):
def __init__(self, settings,
fhot=None, initr=None, initp=None, initxi=None,
initppar=None, initpperp=None,
rn0=None, n0=None, rT0=None, T0=None, bc=BC_PHI_CONST,
ad_int_r =AD_INTERP_CENTRED,
ad_int_p1=AD_INTERP_CENTRED,
ad_int_p2=AD_INTERP_CENTRED,
ad_jac_r =AD_INTERP_JACOBIAN_FULL,
ad_jac_p1=AD_INTERP_JACOBIAN_FULL,
ad_jac_p2=AD_INTERP_JACOBIAN_FULL,
fluxlimiterdamping=1.0,
dist_mode = F_HOT_DIST_MODE_NONREL,
pThreshold=7, pThresholdMode=HOT_REGION_P_MODE_THERMAL,
particleSource=PARTICLE_SOURCE_EXPLICIT,
particleSourceShape=PARTICLE_SOURCE_SHAPE_MAXWELLIAN):
"""
Constructor.
"""
super().__init__(settings=settings, name='f_hot', grid=settings.hottailgrid,
f=fhot, initr=initr, initp=initp, initxi=initxi, initppar=initppar,
initpperp=initpperp, rn0=rn0, n0=n0, rT0=rT0, T0=T0,
bc=bc, ad_int_r=ad_int_r, ad_int_p1=ad_int_p1,
ad_int_p2=ad_int_p2, ad_jac_r=ad_jac_r, ad_jac_p1=ad_jac_p1,
ad_jac_p2=ad_jac_p2, fluxlimiterdamping=fluxlimiterdamping)
self.dist_mode = dist_mode
self.pThreshold = pThreshold
self.pThresholdMode = pThresholdMode
self.particleSource = particleSource
self.particleSourceShape = particleSourceShape
def setHotRegionThreshold(self, pThreshold=7, pMode=HOT_REGION_P_MODE_THERMAL):
"""
Sets the boundary 'pThreshold' which defines the cutoff separating 'cold'
from 'hot' electrons when using collfreq_mode FULL.
"""
self.pThreshold = pThreshold
self.pThresholdMode = pMode
def setParticleSource(self, particleSource=PARTICLE_SOURCE_EXPLICIT, shape=PARTICLE_SOURCE_SHAPE_MAXWELLIAN):
"""
Sets which model to use for S_particle if using collfreq_mode FULL,
which is designed to force the density moment of f_hot to n_cold+n_hot.
ZERO: The particle source is disabled and set to zero
EXPLICIT/IMPLICIT: Two in principle equivalent models, but can be more or less stable in different situations.
"""
self.particleSource = particleSource
self.particleSourceShape = shape
def fromdict(self, data):
"""
Load data for this object from the given dictionary.
"""
super().fromdict(data)
if 'dist_mode' in data:
self.dist_mode = data['dist_mode']
if 'pThreshold' in data:
self.pThreshold = data['pThreshold']
self.pThresholdMode = data['pThresholdMode']
if 'particleSource' in data:
self.particleSource = data['particleSource']
if 'particleSourceShape' in data:
self.particleSourceShape = data['particleSourceShape']
def todict(self):
"""
Returns a Python dictionary containing all settings of
this HotElectronDistribution object.
"""
data = super().todict()
data['dist_mode'] = self.dist_mode
if self.grid.enabled:
data['pThreshold'] = self.pThreshold
data['pThresholdMode'] = self.pThresholdMode
data['particleSource'] = self.particleSource
data['particleSourceShape'] = self.particleSourceShape
return data
|
# Generated by Django 3.2.3 on 2021-05-24 15:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('categories', '0001_initial'),
('beverages', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='beverage',
name='category',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='beverages', to='categories.category'),
),
]
|
from django.db import models
# Create your models here.
#Models define the structure of database tables
# item, description, number in stock
#allow admins create, edit or delete (to do list)
#class called Item
#Field called title that accepts characters < 200
#Description field uses text field since we do not know the length
#amount field takes in only whole numbers
class Item(models.Model):
title = models.CharField(max_length=200)
description = models.TextField()
amount = models.IntegerField()
|
# coding: utf-8
# Standard Python libraries
from pathlib import Path
from typing import Optional, Union
# http://www.numpy.org/
import numpy as np
import numpy.typing as npt
# https://github.com/usnistgov/DataModelDict
from DataModelDict import DataModelDict as DM
from yabadaba import load_query
# Local imports
from . import CalculationSubset
from ..tools import aslist
from ..input import boolean, value
class Dislocation(CalculationSubset):
"""Handles calculation terms for dislocation parameters"""
############################# Core properties #################################
def __init__(self,
parent,
prefix: str = '',
templateheader: Optional[str] = None,
templatedescription: Optional[str] = None):
"""
Initializes a calculation record subset object.
Parameters
----------
parent : iprPy.calculation.Calculation
The parent calculation object that the subset object is part of.
This allows for the subset methods to access parameters set to the
calculation itself or other subsets.
prefix : str, optional
An optional prefix to add to metadata field names to allow for
differentiating between multiple subsets of the same style within
a single record
templateheader : str, optional
An alternate header to use in the template file for the subset.
templatedescription : str, optional
An alternate description of the subset for the templatedoc.
"""
super().__init__(parent, prefix=prefix, templateheader=templateheader,
templatedescription=templatedescription)
self.param_file = None
self.key = None
self.id = None
self.slip_hkl = None
self.ξ_uvw = None
self.burgers = None
self.m = None
self.n = None
self.shift = None
self.shiftscale = False
self.shiftindex = 0
self.sizemults = [1,1,1]
self.amin = 0.0
self.bmin = 0.0
self.cmin = 0.0
self.family = None
self.__content = None
self.__model = None
############################## Class attributes ################################
@property
def param_file(self) -> Optional[Path]:
"""Path or None: The path to the dislocation parameter file"""
return self.__param_file
@param_file.setter
def param_file(self, val: Union[str, Path, None]):
if val is None:
self.__param_file = None
else:
self.__param_file = Path(val)
@property
def key(self) -> Optional[str]:
"""str or None: UUID key of the dislocation parameter set"""
return self.__key
@key.setter
def key(self, val: Optional[str]):
if val is None:
self.__key = None
else:
self.__key = str(val)
@property
def id(self) -> Optional[str]:
"""str or None: id of the dislocation parameter set"""
return self.__id
@id.setter
def id(self, val: Optional[str]):
if val is None:
self.__id = None
else:
self.__id = str(val)
@property
def slip_hkl(self) -> Optional[np.ndarray]:
"""numpy.ndarray or None: The crystallographic (hkl) or (hkil) slip plane"""
return self.__slip_hkl
@slip_hkl.setter
def slip_hkl(self, val: Optional[npt.ArrayLike]):
if val is None:
self.__slip_hkl = None
else:
if isinstance(val, str):
val = np.array(val.strip().split(), dtype=float)
else:
val = np.asarray(val, dtype=float)
assert val.shape == (3,) or val.shape == (4,)
self.__slip_hkl = val.tolist()
@property
def ξ_uvw(self) -> Optional[np.ndarray]:
"""numpy.ndarray or None: The crystallographic [uvw] or [uvtw] line direction"""
return self.__ξ_uvw
@ξ_uvw.setter
def ξ_uvw(self, val: Optional[npt.ArrayLike]):
if val is None:
self.__ξ_uvw = None
else:
if isinstance(val, str):
val = np.array(val.strip().split(), dtype=float)
else:
val = np.asarray(val, dtype=float)
assert val.shape == (3,) or val.shape == (4,)
self.__ξ_uvw = val.tolist()
@property
def burgers(self) -> Optional[np.ndarray]:
"""numpy.ndarray or None: The crystallographic [uvw] or [uvtw] Burgers vector"""
return self.__burgers
@burgers.setter
def burgers(self, val: Optional[npt.ArrayLike]):
if val is None:
self.__burgers = None
else:
if isinstance(val, str):
val = np.array(val.strip().split(), dtype=float)
else:
val = np.asarray(val, dtype=float)
assert val.shape == (3,) or val.shape == (4,)
self.__burgers = val
@property
def m(self) -> Optional[np.ndarray]:
"""numpy.ndarray or None: The Cartesian vector that corresponds to the dislocation solution's m-axis"""
return self.__m
@m.setter
def m(self, val: Optional[npt.ArrayLike]):
if val is None:
self.__m = None
else:
if isinstance(val, str):
val = np.array(val.strip().split(), dtype=float)
else:
val = np.asarray(val, dtype=float)
assert val.shape == (3,)
assert np.isclose(val[0], 1.0) or np.isclose(val[1], 1.0) or np.isclose(val[2], 1.0)
assert np.isclose(np.linalg.norm(val), 1.0)
self.__m = val
@property
def n(self) -> Optional[np.ndarray]:
"""numpy.ndarray or None: The Cartesian vector that corresponds to the dislocation solution's n-axis"""
return self.__n
@n.setter
def n(self, val: Optional[npt.ArrayLike]):
if val is None:
self.__n = None
else:
if isinstance(val, str):
val = np.array(val.strip().split(), dtype=float)
else:
val = np.asarray(val, dtype=float)
assert val.shape == (3,)
assert np.isclose(val[0], 1.0) or np.isclose(val[1], 1.0) or np.isclose(val[2], 1.0)
assert np.isclose(np.linalg.norm(val), 1.0)
self.__n = val
@property
def shift(self) -> Optional[np.ndarray]:
"""numpy.ndarray or None: The rigid body shift to position the dislocation solution relative to the atomic configuration"""
return self.__shift
@shift.setter
def shift(self, val: Optional[npt.ArrayLike]):
if val is None:
self.__shift = None
else:
if isinstance(val, str):
val = np.array(val.strip().split(), dtype=float)
else:
val = np.asarray(val, dtype=float)
assert val.shape[0] == 3
self.__shift = val
@property
def shiftscale(self) -> bool:
"""bool: Indicates if shift is absolute Cartesian or scaled relative to rcell"""
return self.__shiftscale
@shiftscale.setter
def shiftscale(self, val: bool):
self.__shiftscale = boolean(val)
@property
def shiftindex(self) -> Optional[int]:
"""int or None: The index of the pre-determined shifts values to use for shift"""
return self.__shiftindex
@shiftindex.setter
def shiftindex(self, val: Optional[int]):
if val is None:
self.__shiftindex = None
else:
self.__shiftindex = int(val)
@property
def a_mults(self) -> tuple:
"""tuple: Size multipliers for the rotated a box vector"""
return self.__a_mults
@a_mults.setter
def a_mults(self, val: Union[int, list, tuple]):
val = aslist(val)
if len(val) == 1:
val[0] = int(val[0])
if val[0] > 0:
val = [0, val[0]]
# Add 0 after if val is negative
elif val[0] < 0:
val = [val[0], 0]
else:
raise ValueError('a_mults values cannot both be 0')
elif len(val) == 2:
val[0] = int(val[0])
val[1] = int(val[1])
if val[0] > 0:
raise ValueError('First a_mults value must be <= 0')
if val[1] < 0:
raise ValueError('Second a_mults value must be >= 0')
if val[0] == val[1]:
raise ValueError('a_mults values cannot both be 0')
self.__a_mults = tuple(val)
@property
def b_mults(self) -> tuple:
"""tuple: Size multipliers for the rotated b box vector"""
return self.__b_mults
@b_mults.setter
def b_mults(self, val: Union[int, list, tuple]):
val = aslist(val)
if len(val) == 1:
val[0] = int(val[0])
if val[0] > 0:
val = [0, val[0]]
# Add 0 after if val is negative
elif val[0] < 0:
val = [val[0], 0]
else:
raise ValueError('b_mults values cannot both be 0')
elif len(val) == 2:
val[0] = int(val[0])
val[1] = int(val[1])
if val[0] > 0:
raise ValueError('First b_mults value must be <= 0')
if val[1] < 0:
raise ValueError('Second b_mults value must be >= 0')
if val[0] == val[1]:
raise ValueError('b_mults values cannot both be 0')
self.__b_mults = tuple(val)
@property
def c_mults(self) -> tuple:
"""tuple: Size multipliers for the rotated c box vector"""
return self.__c_mults
@c_mults.setter
def c_mults(self, val: Union[int, list, tuple]):
val = aslist(val)
if len(val) == 1:
val[0] = int(val[0])
if val[0] > 0:
val = [0, val[0]]
# Add 0 after if val is negative
elif val[0] < 0:
val = [val[0], 0]
else:
raise ValueError('c_mults values cannot both be 0')
elif len(val) == 2:
val[0] = int(val[0])
val[1] = int(val[1])
if val[0] > 0:
raise ValueError('First c_mults value must be <= 0')
if val[1] < 0:
raise ValueError('Second c_mults value must be >= 0')
if val[0] == val[1]:
raise ValueError('c_mults values cannot both be 0')
self.__c_mults = tuple(val)
@property
def sizemults(self) -> tuple:
"""tuple: All three sets of size multipliers"""
return (self.a_mults, self.b_mults, self.c_mults)
@sizemults.setter
def sizemults(self, val: Union[list, tuple]):
if len(val) == 3:
self.a_mults = val[0]
self.b_mults = val[1]
self.c_mults = val[2]
elif len(val) == 6:
self.a_mults = val[0:2]
self.b_mults = val[2:4]
self.c_mults = val[4:6]
else:
raise ValueError('len of sizemults must be 3 or 6')
@property
def amin(self) -> float:
return self.__amin
@amin.setter
def amin(self, val: float):
"""float: Minimum distance allowed along the a box vector direction"""
self.__amin = float(val)
@property
def bmin(self) -> float:
return self.__bmin
@bmin.setter
def bmin(self, val: float):
"""float: Minimum distance allowed along the b box vector direction"""
self.__bmin = float(val)
@property
def cmin(self) -> float:
return self.__cmin
@cmin.setter
def cmin(self, val: float):
"""float: Minimum distance allowed along the c box vector direction"""
self.__cmin = float(val)
@property
def family(self) -> Optional[str]:
"""str or None: The prototype or reference crystal the dislocation parameter set is for"""
return self.__family
@family.setter
def family(self, val: Optional[str]):
if val is None:
self.__family = None
else:
self.__family = str(val)
def set_values(self, **kwargs: any):
"""
Allows for multiple class attribute values to be updated at once.
Parameters
----------
param_file : str, optional
The path to a file that fully defines the input parameters for
a specific defect type.
key : str, optional
The UUID4 unique key associated with the defect parameter set.
id : str, optional
The unique id associated with the defect parameter set.
slip_hkl : str or array-like object, optional
The Miller (hkl) slip plane.
ξ_uvw : str or array-like object, optional
The Miller [uvw] line direction.
burgers : str or array-like object, optional
The Miller Burgers vector
m : str or array-like object, optional
The Cartesian unit vector to align with the dislocation solution's
m coordinate vector (perpendicular to n and ξ).
n : str or array-like object, optional
The Cartesian unit vector to align with the dislocation solution's
n coordinate vector (slip plane normal).
shift : str or array-like object, optional
A rigid body shift to apply to all atoms.
shiftscale : bool, optional
Indicates if shift is absolute Cartesian or scaled relative to the
rotated cell's box parameters
shiftindex : int, optional
If given, the shift will automatically be selected to position the
slip plane halfway between atomic planes. Different values select
different neighboring atomic planes.
sizemults : str or array-like object, optional
The system size multipliers.
amin : float, optional
A minimum width for the box's a vector direction. The sizemults
will be modified to ensure this as needed.
bmin :
A minimum width for the box's b vector direction. The sizemults
will be modified to ensure this as needed.
cmin :
A minimum width for the box's c vector direction. The sizemults
will be modified to ensure this as needed.
family : str or None, optional
The system's family identifier that the defect is defined for.
"""
if 'param_file' in kwargs:
self.param_file = kwargs['param_file']
if 'key' in kwargs:
self.key = kwargs['key']
if 'id' in kwargs:
self.id = kwargs['id']
if 'slip_hkl' in kwargs:
self.slip_hkl = kwargs['slip_hkl']
if 'ξ_uvw' in kwargs:
self.ξ_uvw = kwargs['ξ_uvw']
if 'burgers' in kwargs:
self.burgers = kwargs['burgers']
if 'm' in kwargs:
self.m = kwargs['m']
if 'n' in kwargs:
self.n = kwargs['n']
if 'shift' in kwargs:
self.shift = kwargs['shift']
if 'shiftscale' in kwargs:
self.shiftscale = kwargs['shiftscale']
if 'shiftindex' in kwargs:
self.shiftindex = kwargs['shiftindex']
if 'sizemults' in kwargs:
self.sizemults = kwargs['sizemults']
if 'amin' in kwargs:
self.amin = kwargs['amin']
if 'bmin' in kwargs:
self.bmin = kwargs['bmin']
if 'cmin' in kwargs:
self.cmin = kwargs['cmin']
if 'family' in kwargs:
self.family = kwargs['family']
###################### Parameter file interactions ########################
def _template_init(self,
templateheader: Optional[str] = None,
templatedescription: Optional[str] = None):
"""
Sets the template header and description values.
Parameters
----------
templateheader : str, optional
An alternate header to use in the template file for the subset.
templatedescription : str, optional
An alternate description of the subset for the templatedoc.
"""
# Set default template header
if templateheader is None:
templateheader = 'Dislocation'
# Set default template description
if templatedescription is None:
templatedescription = ' '.join([
"Specifies the parameter set that defines a dislocation type",
"and how to orient it relative to the atomic system."])
super()._template_init(templateheader, templatedescription)
@property
def templatekeys(self) -> dict:
"""dict : The subset-specific input keys and their descriptions."""
return {
'dislocation_file': ' '.join([
"The path to a dislocation record file that collects the",
"parameters for a specific dislocation type."]),
'dislocation_slip_hkl': ' '.join([
"The Miller (hkl) slip plane for the dislocation given as three",
"space-delimited integers."]),
'dislocation_ξ_uvw': ' '.join([
"The Miller [uvw] line vector direction for the dislocation given",
"as three space-delimited integers. The angle between burgers and",
"ξ_uvw determines the dislocation's character."]),
'dislocation_burgers': ' '.join([
"The Miller Burgers vector for the dislocation given as three",
"space-delimited floats."]),
'dislocation_m': ' '.join([
"The Cartesian vector of the final system that the dislocation",
"solution's m vector (in-plane, perpendicular to ξ) should align",
"with. Given as three space-delimited numbers. Limited to being"
"parallel to one of the three Cartesian axes."]),
'dislocation_n': ' '.join([
"The Cartesian vector of the final system that the dislocation",
"solution's n vector (slip plane normal) should align",
"with. Given as three space-delimited numbers. Limited to being"
"parallel to one of the three Cartesian axes."]),
'dislocation_shift': ' '.join([
"A rigid body shift to apply to the atoms in the system after it",
"has been rotated to the correct orientation. This controls where",
"the dislocation is placed relative to the atomic positions as the",
"dislocation line is always inserted at coordinates (0,0) for the",
"two Cartesian axes aligned with m and n. Specified as three",
"floating point numbers."]),
'dislocation_shiftscale': ' '.join([
"boolean indicating if the dislocation_shift value is a Cartesian",
"vector (False, default) or if it is scaled relative to the rotated cell's",
"box parameters prior to applying sizemults."]),
'dislocation_shiftindex': ' '.join([
"An integer that if given will result in a shift being automatically",
"determined and used such that the dislocation's slip plane will be",
"positioned halfway between two atomic planes. Changing the integer",
"value changes which set of planes the slip plane is positioned between.",
"Note that shiftindex values only shift atoms in the slip plane normal",
"direction and therefore may not be the ideal positions for some",
"dislocation cores."]),
'sizemults': ' '.join([
"Multiplication parameters to construct a supercell from the rotated",
"system. Limited to three values for dislocation generation.",
"Values must be even for the two box vectors not aligned with the",
"dislocation line. The system will be replicated equally in the",
"positive and negative directions for those two box vectors."]),
'amin': ' '.join([
"Specifies a minimum width in length units that the resulting",
"system's a box vector must have. The associated sizemult value",
"will be increased if necessary to ensure this. Default value is 0.0."]),
'bmin': ' '.join([
"Specifies a minimum width in length units that the resulting",
"system's b box vector must have. The associated sizemult value",
"will be increased if necessary to ensure this. Default value is 0.0."]),
'cmin': ' '.join([
"Specifies a minimum width in length units that the resulting",
"system's c box vector must have. The associated sizemult value",
"will be increased if necessary to ensure this. Default value is 0.0."]),
}
@property
def preparekeys(self) -> list:
"""
list : The input keys (without prefix) used when preparing a calculation.
Typically, this is templatekeys plus *_content keys so prepare can access
content before it exists in the calc folders being prepared.
"""
return list(self.templatekeys.keys()) + [
'dislocation_family',
'dislocation_content',
]
@property
def interpretkeys(self) -> list:
"""
list : The input keys (without prefix) accessed when interpreting the
calculation input file. Typically, this is preparekeys plus any extra
keys used or generated when processing the inputs.
"""
return self.preparekeys + [
'dislocation_model',
]
@property
def multikeys(self) -> list:
"""
list: Calculation subset key sets that can have multiple values during prepare.
"""
# Define key set for system size parameters
sizekeys = ['sizemults', 'amin', 'bmin', 'cmin']
# Define key set for defect parameters as the remainder
defectkeys = []
for key in self.preparekeys:
if key not in sizekeys:
defectkeys.append(key)
# Add prefixes and return
return [
self._pre(sizekeys),
self._pre(defectkeys)
]
def load_parameters(self, input_dict: dict):
"""
Interprets calculation parameters.
Parameters
----------
input_dict : dict
Dictionary containing input parameter key-value pairs.
"""
# Set default keynames
keymap = self.keymap
# Extract input values and assign default values
self.param_file = input_dict.get(keymap['dislocation_file'], None)
self.__content = input_dict.get(keymap['dislocation_content'], None)
# Replace defect model with defect content if given
param_file = self.param_file
if self.__content is not None:
param_file = self.__content
# Extract parameters from a file
if param_file is not None:
# Verify competing parameters are not defined
for key in ('dislocation_slip_hkl',
'dislocation_ξ_uvw',
'dislocation_burgers',
'dislocation_m',
'dislocation_n',
'dislocation_shift',
'dislocation_shiftscale',
'dislocation_shiftindex'):
if keymap[key] in input_dict:
raise ValueError(f"{keymap[key]} and {keymap['dislocation_file']} cannot both be supplied")
# Load defect model
self.__model = model = DM(param_file).find('dislocation')
# Extract parameter values from defect model
self.key = model['key']
self.id = model['id']
self.family = model['system-family']
self.slip_hkl = model['calculation-parameter']['slip_hkl']
self.ξ_uvw = model['calculation-parameter']['ξ_uvw']
self.burgers = model['calculation-parameter']['burgers']
self.m = model['calculation-parameter']['m']
self.n = model['calculation-parameter']['n']
self.shift = model['calculation-parameter'].get('shift', None)
self.shiftindex = model['calculation-parameter'].get('shiftindex', None)
self.shiftscale = boolean(model['calculation-parameter'].get('shiftscale', False))
# Set parameter values directly
else:
self.__model = None
self.key = None
self.id = None
self.family = self.parent.system.family
self.slip_hkl = input_dict[keymap['dislocation_slip_hkl']]
self.ξ_uvw = input_dict[keymap['dislocation_ξ_uvw']]
self.burgers = input_dict[keymap['dislocation_burgers']]
self.m = input_dict.get(keymap['dislocation_m'], '0 1 0')
self.n = input_dict.get(keymap['dislocation_n'], '0 0 1')
self.shift = input_dict.get(keymap['dislocation_shift'], None)
self.shiftscale = boolean(input_dict.get(keymap['dislocation_shiftscale'], False))
self.shiftindex = input_dict.get(keymap['dislocation_shiftindex'], None)
# Check defect parameters
if not np.isclose(self.m.dot(self.n), 0.0):
raise ValueError("dislocation_m and dislocation_n must be orthogonal")
# Set default values for fault system manipulations
sizemults = input_dict.get(keymap['sizemults'], '1 1 1')
self.sizemults = np.array(sizemults.strip().split(), dtype=int)
self.amin = value(input_dict, keymap['amin'], default_term='0.0 angstrom',
default_unit=self.parent.units.length_unit)
self.bmin = value(input_dict, keymap['bmin'], default_term='0.0 angstrom',
default_unit=self.parent.units.length_unit)
self.cmin = value(input_dict, keymap['cmin'], default_term='0.0 angstrom',
default_unit=self.parent.units.length_unit)
########################### Data model interactions ###########################
@property
def modelroot(self) -> str:
"""str : The root element name for the subset terms."""
baseroot = 'dislocation'
return f'{self.modelprefix}{baseroot}'
def load_model(self, model: DM):
"""Loads subset attributes from an existing model."""
disl = model[self.modelroot]
self.__model = None
self.__param_file = None
self.key = disl['key']
self.id = disl['id']
self.family = disl['system-family']
cp = disl['calculation-parameter']
self.slip_hkl = cp['slip_hkl']
self.ξ_uvw = cp['ξ_uvw']
self.burgers = cp['burgers']
self.m = cp['m']
self.n = cp['n']
if 'shift' in cp:
self.shift = cp['shift']
if 'shiftindex' in cp:
self.shiftindex = cp['shiftindex']
self.shiftscale = cp['shiftscale']
run_params = model['calculation']['run-parameter']
self.a_mults = run_params[f'{self.modelprefix}size-multipliers']['a']
self.b_mults = run_params[f'{self.modelprefix}size-multipliers']['b']
self.c_mults = run_params[f'{self.modelprefix}size-multipliers']['c']
def build_model(self,
model: DM,
**kwargs: any):
"""
Adds the subset model to the parent model.
Parameters
----------
model : DataModelDict.DataModelDict
The record content (after root element) to add content to.
kwargs : any
Any options to pass on to dict_insert that specify where the subset
content gets added to in the parent model.
"""
# Save defect parameters
model[self.modelroot] = disl = DM()
disl['key'] = self.key
disl['id'] = self.id
if self.__model is not None:
disl['character'] = self.__model['character']
disl['Burgers-vector'] = self.__model['Burgers-vector']
disl['slip-plane'] = self.__model['slip-plane']
disl['line-direction'] = self.__model['line-direction']
disl['system-family'] = self.family
disl['calculation-parameter'] = cp = DM()
cp['slip_hkl'] = f'{self.slip_hkl[0]} {self.slip_hkl[1]} {self.slip_hkl[2]}'
cp['ξ_uvw'] = f'{self.ξ_uvw[0]} {self.ξ_uvw[1]} {self.ξ_uvw[2]}'
cp['burgers'] = f'{self.burgers[0]} {self.burgers[1]} {self.burgers[2]}'
cp['m'] = f'{self.m[0]} {self.m[1]} {self.m[2]}'
cp['n'] = f'{self.n[0]} {self.n[1]} {self.n[2]}'
if self.shift is not None:
cp['shift'] = f'{self.shift[0]} {self.shift[1]} {self.shift[2]}'
if self.shiftindex is not None:
cp['shiftindex'] = str(self.shiftindex)
cp['shiftscale'] = str(self.shiftscale)
# Build paths if needed
if 'calculation' not in model:
model['calculation'] = DM()
if 'run-parameter' not in model['calculation']:
model['calculation']['run-parameter'] = DM()
run_params = model['calculation']['run-parameter']
run_params[f'{self.modelprefix}size-multipliers'] = DM()
run_params[f'{self.modelprefix}size-multipliers']['a'] = list(self.a_mults)
run_params[f'{self.modelprefix}size-multipliers']['b'] = list(self.b_mults)
run_params[f'{self.modelprefix}size-multipliers']['c'] = list(self.c_mults)
@property
def queries(self) -> dict:
"""dict: Query objects and their associated parameter names."""
root = f'{self.parent.modelroot}.{self.modelroot}'
runparampath = f'{self.parent.modelroot}.calculation.run-parameter.{self.modelprefix}'
return {
'dislocation_id': load_query(
style='str_match',
name=f'{self.prefix}dislocation_id',
path=f'{root}.id',
description='search by dislocation parameter set id'),
'dislocation_key': load_query(
style='str_match',
name=f'{self.prefix}dislocation_key',
path=f'{root}.key',
description='search by dislocation parameter set UUID key'),
'dislocation_family': load_query(
style='str_match',
name=f'{self.prefix}dislocation_family',
path=f'{root}.system-family',
description='search by crystal prototype that the dislocation parameter set is for'),
'a_mult1': load_query(
style='int_match',
name=f'{self.prefix}a_mult1',
path=f'{runparampath}size-multipliers.a.0',
description='search by lower a_mult value'),
'a_mult2': load_query(
style='int_match',
name=f'{self.prefix}a_mult2',
path=f'{runparampath}size-multipliers.a.1',
description='search by upper a_mult value'),
'b_mult1': load_query(
style='int_match',
name=f'{self.prefix}b_mult1',
path=f'{runparampath}size-multipliers.b.0',
description='search by lower b_mult value'),
'b_mult2': load_query(
style='int_match',
name=f'{self.prefix}b_mult2',
path=f'{runparampath}size-multipliers.b.1',
description='search by upper b_mult value'),
'c_mult1': load_query(
style='int_match',
name=f'{self.prefix}c_mult1',
path=f'{runparampath}size-multipliers.c.0',
description='search by lower c_mult value'),
'c_mult2': load_query(
style='int_match',
name=f'{self.prefix}c_mult2',
path=f'{runparampath}size-multipliers.c.1',
description='search by upper c_mult value'),
}
########################## Metadata interactions ##############################
def metadata(self, meta: dict):
"""
Converts the structured content to a simpler dictionary.
Parameters
----------
meta : dict
The dictionary to add the subset content to
"""
prefix = self.prefix
meta[f'{prefix}dislocation_key'] = self.key
meta[f'{prefix}dislocation_id'] = self.id
meta[f'{prefix}stackingfault_family'] = self.family
meta[f'{prefix}dislocation_slip_hkl'] = self.slip_hkl
meta[f'{prefix}dislocation_ξ_uvw'] = self.ξ_uvw
meta[f'{prefix}dislocation_burgers'] = self.burgers
meta[f'{prefix}dislocation_m'] = self.m
meta[f'{prefix}dislocation_n'] = self.n
meta[f'{prefix}dislocation_shift'] = self.shift
meta[f'{prefix}dislocation_shiftscale'] = self.shiftscale
meta[f'{prefix}dislocation_shiftindex'] = self.shiftindex
meta[f'{prefix}a_mult1'] = self.a_mults[0]
meta[f'{prefix}a_mult2'] = self.a_mults[1]
meta[f'{prefix}b_mult1'] = self.b_mults[0]
meta[f'{prefix}b_mult2'] = self.b_mults[1]
meta[f'{prefix}c_mult1'] = self.c_mults[0]
meta[f'{prefix}c_mult2'] = self.c_mults[1]
########################### Calculation interactions ##########################
def calc_inputs(self, input_dict: dict):
"""
Generates calculation function input parameters based on the values
assigned to attributes of the subset.
Parameters
----------
input_dict : dict
The dictionary of input parameters to add subset terms to.
"""
input_dict['burgers'] = self.burgers
input_dict['ξ_uvw'] = self.ξ_uvw
input_dict['slip_hkl'] = self.slip_hkl
input_dict['m'] = self.m
input_dict['n'] = self.n
a_mult = self.a_mults[1] - self.a_mults[0]
b_mult = self.b_mults[1] - self.b_mults[0]
c_mult = self.c_mults[1] - self.c_mults[0]
input_dict['sizemults'] = [a_mult, b_mult, c_mult]
input_dict['amin'] = self.amin
input_dict['bmin'] = self.bmin
input_dict['cmin'] = self.cmin
input_dict['shift'] = self.shift
input_dict['shiftscale'] = self.shiftscale
input_dict['shiftindex'] = self.shiftindex
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_msgbox.ui'
#
# Created by: PyQt5 UI code generator 5.9.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_msgbox(object):
def setupUi(self, msgbox):
msgbox.setObjectName("msgbox")
msgbox.resize(982, 678)
msgbox.setStyleSheet("background:white;\n"
"")
self.page = QtWidgets.QLabel(msgbox)
self.page.setGeometry(QtCore.QRect(570, 590, 61, 40))
self.page.setStyleSheet("font: 75 16pt \"Heiti SC\";")
self.page.setText("")
self.page.setAlignment(QtCore.Qt.AlignCenter)
self.page.setObjectName("page")
self.page_2 = QtWidgets.QLabel(msgbox)
self.page_2.setGeometry(QtCore.QRect(420, 590, 121, 40))
self.page_2.setStyleSheet("font: 75 16pt \"Heiti SC\";")
self.page_2.setAlignment(QtCore.Qt.AlignCenter)
self.page_2.setObjectName("page_2")
self.page3 = QtWidgets.QLabel(msgbox)
self.page3.setGeometry(QtCore.QRect(300, 590, 41, 40))
self.page3.setStyleSheet("font: 75 16pt \"Heiti SC\";")
self.page3.setAlignment(QtCore.Qt.AlignCenter)
self.page3.setObjectName("page3")
self.page_5 = QtWidgets.QLabel(msgbox)
self.page_5.setGeometry(QtCore.QRect(650, 590, 51, 40))
self.page_5.setStyleSheet("font: 75 16pt \"Heiti SC\";")
self.page_5.setAlignment(QtCore.Qt.AlignCenter)
self.page_5.setObjectName("page_5")
self.last_page = QtWidgets.QPushButton(msgbox)
self.last_page.setGeometry(QtCore.QRect(200, 590, 75, 40))
self.last_page.setStyleSheet("\n"
"QPushButton#last_page\n"
"{\n"
" font:24pt,black;\n"
" background-color:white;\n"
" border-radius:5px;\n"
"}\n"
"\n"
"QPushButton#last_page:hover\n"
"{\n"
" font:24pt,rgb(0, 172, 230);\n"
" background-color:white;\n"
"}\n"
"\n"
"QPushButton#last_page:pressed\n"
"{\n"
" font:24pt,rgb(0, 172, 230);\n"
" background-color:white;\n"
" padding-left:3px;\n"
" padding-top:3px;\n"
"}\n"
"\n"
"")
self.last_page.setObjectName("last_page")
self.next_page = QtWidgets.QPushButton(msgbox)
self.next_page.setGeometry(QtCore.QRect(720, 590, 75, 40))
self.next_page.setStyleSheet("\n"
"QPushButton#next_page\n"
"{\n"
" font:24pt,black;\n"
" background-color:white;\n"
" border-radius:5px;\n"
"}\n"
"\n"
"QPushButton#next_page:hover\n"
"{\n"
" font:24pt,rgb(0, 172, 230);\n"
" background-color:white;\n"
"}\n"
"\n"
"QPushButton#next_page:pressed\n"
"{\n"
" font:24pt,rgb(0, 172, 230);\n"
" background-color:white;\n"
" padding-left:3px;\n"
" padding-top:3px;\n"
"}\n"
"\n"
"")
self.next_page.setObjectName("next_page")
self.msg_num = QtWidgets.QLabel(msgbox)
self.msg_num.setGeometry(QtCore.QRect(360, 590, 61, 40))
self.msg_num.setStyleSheet("font: 75 16pt \"Heiti SC\";")
self.msg_num.setText("")
self.msg_num.setAlignment(QtCore.Qt.AlignCenter)
self.msg_num.setObjectName("msg_num")
self.time_head = QtWidgets.QLabel(msgbox)
self.time_head.setGeometry(QtCore.QRect(280, 50, 220, 20))
self.time_head.setStyleSheet("font: 75 20pt \"Heiti SC\";")
self.time_head.setAlignment(QtCore.Qt.AlignCenter)
self.time_head.setObjectName("time_head")
self.send_head = QtWidgets.QLabel(msgbox)
self.send_head.setGeometry(QtCore.QRect(40, 50, 190, 20))
self.send_head.setStyleSheet("font: 75 20pt \"Heiti SC\";")
self.send_head.setAlignment(QtCore.Qt.AlignCenter)
self.send_head.setObjectName("send_head")
self.shop_owner_head = QtWidgets.QLabel(msgbox)
self.shop_owner_head.setGeometry(QtCore.QRect(520, 50, 440, 20))
self.shop_owner_head.setStyleSheet("font: 75 20pt \"Heiti SC\";")
self.shop_owner_head.setAlignment(QtCore.Qt.AlignCenter)
self.shop_owner_head.setObjectName("shop_owner_head")
self.send_1 = QtWidgets.QLabel(msgbox)
self.send_1.setGeometry(QtCore.QRect(40, 90, 190, 51))
self.send_1.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.send_1.setText("")
self.send_1.setAlignment(QtCore.Qt.AlignCenter)
self.send_1.setObjectName("send_1")
self.time_1 = QtWidgets.QLabel(msgbox)
self.time_1.setGeometry(QtCore.QRect(280, 90, 220, 51))
self.time_1.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.time_1.setText("")
self.time_1.setAlignment(QtCore.Qt.AlignCenter)
self.time_1.setObjectName("time_1")
self.content_1 = QtWidgets.QLabel(msgbox)
self.content_1.setGeometry(QtCore.QRect(520, 90, 440, 51))
self.content_1.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.content_1.setText("")
self.content_1.setAlignment(QtCore.Qt.AlignCenter)
self.content_1.setWordWrap(True)
self.content_1.setObjectName("content_1")
self.time_2 = QtWidgets.QLabel(msgbox)
self.time_2.setGeometry(QtCore.QRect(280, 200, 220, 51))
self.time_2.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.time_2.setText("")
self.time_2.setAlignment(QtCore.Qt.AlignCenter)
self.time_2.setObjectName("time_2")
self.content_2 = QtWidgets.QLabel(msgbox)
self.content_2.setGeometry(QtCore.QRect(520, 200, 440, 51))
self.content_2.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.content_2.setText("")
self.content_2.setAlignment(QtCore.Qt.AlignCenter)
self.content_2.setWordWrap(True)
self.content_2.setObjectName("content_2")
self.send_2 = QtWidgets.QLabel(msgbox)
self.send_2.setGeometry(QtCore.QRect(40, 200, 190, 51))
self.send_2.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.send_2.setText("")
self.send_2.setAlignment(QtCore.Qt.AlignCenter)
self.send_2.setObjectName("send_2")
self.time_3 = QtWidgets.QLabel(msgbox)
self.time_3.setGeometry(QtCore.QRect(280, 310, 220, 51))
self.time_3.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.time_3.setText("")
self.time_3.setAlignment(QtCore.Qt.AlignCenter)
self.time_3.setObjectName("time_3")
self.content_3 = QtWidgets.QLabel(msgbox)
self.content_3.setGeometry(QtCore.QRect(520, 310, 440, 51))
self.content_3.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.content_3.setText("")
self.content_3.setAlignment(QtCore.Qt.AlignCenter)
self.content_3.setWordWrap(True)
self.content_3.setObjectName("content_3")
self.send_3 = QtWidgets.QLabel(msgbox)
self.send_3.setGeometry(QtCore.QRect(40, 310, 190, 51))
self.send_3.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.send_3.setText("")
self.send_3.setAlignment(QtCore.Qt.AlignCenter)
self.send_3.setObjectName("send_3")
self.content_5 = QtWidgets.QLabel(msgbox)
self.content_5.setGeometry(QtCore.QRect(520, 510, 440, 51))
self.content_5.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.content_5.setText("")
self.content_5.setAlignment(QtCore.Qt.AlignCenter)
self.content_5.setWordWrap(True)
self.content_5.setObjectName("content_5")
self.time_5 = QtWidgets.QLabel(msgbox)
self.time_5.setGeometry(QtCore.QRect(280, 510, 220, 51))
self.time_5.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.time_5.setText("")
self.time_5.setAlignment(QtCore.Qt.AlignCenter)
self.time_5.setObjectName("time_5")
self.send_5 = QtWidgets.QLabel(msgbox)
self.send_5.setGeometry(QtCore.QRect(40, 510, 190, 51))
self.send_5.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.send_5.setText("")
self.send_5.setAlignment(QtCore.Qt.AlignCenter)
self.send_5.setObjectName("send_5")
self.content_4 = QtWidgets.QLabel(msgbox)
self.content_4.setGeometry(QtCore.QRect(520, 420, 440, 51))
self.content_4.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.content_4.setText("")
self.content_4.setAlignment(QtCore.Qt.AlignCenter)
self.content_4.setWordWrap(True)
self.content_4.setObjectName("content_4")
self.time_4 = QtWidgets.QLabel(msgbox)
self.time_4.setGeometry(QtCore.QRect(280, 420, 220, 51))
self.time_4.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.time_4.setText("")
self.time_4.setAlignment(QtCore.Qt.AlignCenter)
self.time_4.setObjectName("time_4")
self.send_4 = QtWidgets.QLabel(msgbox)
self.send_4.setGeometry(QtCore.QRect(40, 420, 190, 51))
self.send_4.setStyleSheet("font: 14pt \"Heiti SC\";\n"
"\n"
"\n"
"")
self.send_4.setText("")
self.send_4.setAlignment(QtCore.Qt.AlignCenter)
self.send_4.setObjectName("send_4")
self.retranslateUi(msgbox)
QtCore.QMetaObject.connectSlotsByName(msgbox)
def retranslateUi(self, msgbox):
_translate = QtCore.QCoreApplication.translate
msgbox.setWindowTitle(_translate("msgbox", "消息信箱"))
self.page_2.setText(_translate("msgbox", "条消息 当前是第"))
self.page3.setText(_translate("msgbox", "共"))
self.page_5.setText(_translate("msgbox", "页"))
self.last_page.setText(_translate("msgbox", "←"))
self.next_page.setText(_translate("msgbox", "→"))
self.time_head.setText(_translate("msgbox", "时间"))
self.send_head.setText(_translate("msgbox", "发送人"))
self.shop_owner_head.setText(_translate("msgbox", "内容"))
|
# Решить следующее рекуррентное соотношение:
# a_n+2+9a_n=0,a0=a1=1.
# Внимание: cos(π) набирать как cos(pi).
http://www.wolframalpha.com/input/?i=0%3Da%28n%2B2%29%2B9*a%28n%29%2C+a%280%29%3D1%2C+a%281%29%3D1
|
from datetime import datetime
from django.db import models
# Create your models here.
class Empresas(models.Model):
nome = models.CharField(max_length= 30)
def __str__(self):
return self.nome
class Acao(models.Model):
sigla = models.CharField(max_length=10)
empresa = models.ForeignKey(Empresas, on_delete=models.CASCADE, related_name='empresa')
data = models.DateTimeField(default=datetime.now())
class meta:
ordering = ['-data']
class Cotacao(models.Model):
data = models.DateField(default=datetime.now())
acao = models.ForeignKey(Acao, on_delete=models.CASCADE, related_name='acao')
valor = models.FloatField(null=False)
class meta:
ordering =['-data']
|
from requests import Request, Session
from requests.exceptions import ConnectionError, Timeout, TooManyRedirects
import json
import telebot
url = 'https://pro-api.coinmarketcap.com/v1/cryptocurrency/listings/latest'
parameters = {
'start':'1',
'limit':'500',
'convert':'USD'
}
headers = {
'Accepts': 'application/json',
'X-CMC_PRO_API_KEY': 'API_KEY',
}
session = Session()
session.headers.update(headers)
try:
response = session.get(url, params=parameters)
data = json.loads(response.text)
except (ConnectionError, Timeout, TooManyRedirects) as e:
print(e)
def creat_price_lists(list, index_valute):
price_list = []
for coin in list:
for keys in coin.keys():
if coin['symbol'] == index_valute:
price_list.append(coin['quote'])
break
return price_list
def creat_ptice_dict(list):
price_dict = {}
for dict_price in list:
for keys, values in dict_price.items():
for key, value in values.items():
price_dict['Цена'] = round(values['price'],5)
price_dict['Изменение за последние сутки'] = round(values['percent_change_24h'], 2)
price_dict['Изменение за месяц'] = round(values['percent_change_30d'], 2)
price_dict['Капитализация'] = round(values['market_cap'])
return price_dict
coins = (data['data'])
bot = telebot.TeleBot('API_KEY')
@bot.message_handler(commands=['start', 'help'])
def send_welcome(message):
bot.reply_to(message, "Введите индеск криптовалюты, например 'btc'")
@bot.message_handler(func=lambda message: True)
def echo_all(message):
valute = message.text.upper()
price = creat_price_lists(coins, valute)
try:
price_dict = creat_ptice_dict(price)
bot.reply_to(message, 'Цена : {} USD\n'
'Изменение за последние сутки : {} %\n'
'Изменение за месяц : {} %\n'
'Капитализация : {} USD'
.format(price_dict['Цена'],
price_dict['Изменение за последние сутки'],
price_dict['Изменение за месяц'],
price_dict['Капитализация']))
except:
bot.reply_to(message, 'Такого индекса не существует, попробуйте ще раз.')
bot.polling()
|
from __future__ import unicode_literals
import re
import urllib
from django.db import models
from django.contrib.auth.models import User
from django.utils.safestring import mark_safe
import bleach
# Create your models here.
class Chirp(models.Model):
content = models.CharField(max_length=140)
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(User)
like = models.ManyToManyField(User, related_name='likes', blank=True)
rechirp_status = models.BooleanField(default = False)
origin_chirp_user = models.ForeignKey(User, blank = True, null=True, related_name="ori_chirp_by")
parent = models.ForeignKey("self", null=True, blank=True)
def __str__(self):
return self.content[:140]
def save(self, *args, **kwargs):
super(Chirp, self).save(*args, **kwargs)
from notifications.models import new_notification
mentions = self.get_mentions()
if mentions:
verb = "mentioned you"
new_notification(
origin_user = self.user.username,
affected_users = mentions,
verb=verb,
target=self,
)
def children(self):
return Chirp.objects.filter(parent=self).order_by("-timestamp")
@property
def is_parent(self):
if self.parent == None:
return True
return False
@property
def reply_count(self):
return Chirp.objects.filter(parent=self).count()
def get_mentions(self):
text = self.content
pattern = re.compile(r'[@](\w+)')
mentions = pattern.finditer(text)
mention_list = []
for mention in mentions:
username = mention.group()[1:]
try:
mention_list.append(User.objects.get(username=username))
except User.DoesNotExist:
print ("Shame")
return mention_list
def html_tags_edit(self):
text = self.content
attrs = {
'*': ['class'],
'a': ['href', 'rel'],
'img': ['alt', 'src'],
}
try:
final_text = ""
pat = re.compile(r'[#,@](\w+)')
hashtags = pat.finditer(text)
i=0
for hasgtag in hashtags:
search_query = "\'" + "/search?search=" + urllib.quote(hasgtag.group()) + "\'"
final_text += (text[i:hasgtag.span()[0]] + "<a href=" + search_query + ">" + hasgtag.group() + "</a>")
i = hasgtag.span()[1]
final_text += (text[i:])
if final_text == "":
text = bleach.clean(text, tags=['img', 'a'], attributes=attrs, strip=True)
text = bleach.linkify(text)
return mark_safe(text)
else:
final_text = bleach.clean(final_text, tags=['img', 'a'], attributes=attrs, strip=True)
final_text = bleach.linkify(final_text)
return mark_safe(final_text)
except:
return text
|
import json
from ipywidgets import DOMWidget, Output, Widget, register, widget_serialization
from ipywidgets.widgets.trait_types import InstanceDict
from traitlets import Unicode, Int, List, Instance, Bool, validate, TraitError
from traitlets.utils.bunch import Bunch
from .options import *
from ._version import EXTENSION_VERSION
PUBLIC_GENOMES_FILE = os.path.join(os.path.dirname(__file__), 'public_genomes.json')
PUBLIC_GENOMES = Bunch({v['id']: v for v in json.load(open(PUBLIC_GENOMES_FILE, 'r')) } )
@register
class IgvBrowser(DOMWidget):
"""An IGV browser widget."""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.on_msg(self._custom_message_handler)
out = Output()
_view_name = Unicode('IgvBrowser').tag(sync=True)
_model_name = Unicode('IgvModel').tag(sync=True)
_view_module = Unicode('jupyter-igv').tag(sync=True)
_model_module = Unicode('jupyter-igv').tag(sync=True)
_view_module_version = Unicode(EXTENSION_VERSION).tag(sync=True)
_model_module_version = Unicode(EXTENSION_VERSION).tag(sync=True)
# Widget-specific property.
# Widget properties are defined as traitlets. Any property tagged with `sync=True`
# is automatically synced to the frontend *any* time it changes in Python.
# It is synced back to Python from the frontend *any* time the model is touched.
genome = InstanceDict(ReferenceGenome).tag(sync=True, **widget_serialization)
tracks = List(InstanceDict(Track)).tag(sync=True, **widget_serialization)
doubleClickDelay = Int(default_value=500).tag(sync=True)
flanking = Int(default_value=1000).tag(sync=True)
genomeList = Unicode(allow_none=True).tag(sync=True, **widget_serialization) # optional URL
locus = (Unicode() | List(Unicode())).tag(sync=True, **widget_serialization)
minimumBases = Int(default_value=40).tag(sync=True)
queryParametersSupported = Bool(default=False).tag(sync=True)
search = InstanceDict(SearchService, allow_none=True).tag(sync=True, **widget_serialization)
showAllChromosomes = Bool(default_value=True).tag(sync=True)
showAllChromosomeWidget = Bool(default_value=True).tag(sync=True)
showNavigation = Bool(default_value=True).tag(sync=True)
showSVGButton = Bool(default_value=False).tag(sync=True)
showRuler = Bool(default_value=True).tag(sync=True)
showCenterGuide = Bool(default_value=False).tag(sync=True)
# trackDefaults = # missing documentation
roi = List(InstanceDict(AnnotationTrack)).tag(sync=True, **widget_serialization) # regions of interest
oauthToken = Unicode(allow_none = True).tag(sync=True)
apiKey = Unicode(allow_none = True).tag(sync=True)
clientId = Unicode(allow_none = True).tag(sync=True)
def add_track(self, track):
# List subscript does not work for empty List, so handling this case manually.
if len(self.tracks) == 0:
self.tracks = [track]
else:
self.tracks = self.tracks[:] + [track]
def remove_track(self, track):
self.tracks = [t for t in self.tracks if t != track]
def add_roi(self, roi):
# List subscript does not work for empty List, so handling this case manually.
if len(self.roi) == 0:
self.roi = [roi]
else:
self.roi = self.roi[:] + [roi]
def remove_all_roi(self):
self.roi = []
def search(self, symbol):
self.send({"type": "search", "symbol": symbol})
print("Search completed. Check the widget instance for results.")
def dump_json(self):
print("Dumping browser configuration to browser.out")
self.send({"type": "dump_json"})
@out.capture()
def _custom_message_handler(self, _, content, buffers):
if content.get('event', '') == 'return_json':
self._return_json_handler(content)
@out.capture()
def _return_json_handler(self, content):
print (content['json'])
|
from pandac.PandaModules import *
from pirates.world.WorldCreatorBase import WorldCreatorBase
from direct.directnotify.DirectNotifyGlobal import directNotify
from pirates.world.DistributedIslandAI import DistributedIslandAI
from pirates.world.DistributedOceanGridAI import DistributedOceanGridAI
from pirates.instance.DistributedInstanceWorldAI import DistributedInstanceWorldAI
import WorldGlobals
class WorldManagerAI(WorldCreatorBase):
notify = directNotify.newCategory('WorldManagerAI')
def __init__(self, air, worldFile=None, gameZone=WorldGlobals.ISLAND_GRID_STARTING_ZONE):
WorldCreatorBase.__init__(self, air, worldFile)
self.air = air
self.world = None
self.ocean = None
self.gameZone = gameZone
def isObjectInCurrentGamePhase(self, obj):
if not obj:
return False
return True
def loadObject(self, object, parent, parentUid, objKey, dynamic, parentIsObj = False, fileName = None, actualParentObj = None):
objType = WorldCreatorBase.loadObject(self, object, parent, parentUid, objKey, dynamic, parentIsObj, fileName, actualParentObj)
if objType == 'Island':
self.world = DistributedInstanceWorldAI(self.air)
self.world.generateWithRequired(zoneId=self.gameZone)
self.world.generateIslands(object['Visual']['Model'], object['Name'], objKey, object['Undockable'], self.gameZone)
self.ocean = DistributedOceanGridAI(self.air)
self.ocean.generateWithRequired(zoneId=self.gameZone)
|
import numpy
matrix = list(map(int,input().split()))
matrix2 = list(map(int,input().split()))
print(numpy.inner(matrix,matrix2))
print(numpy.outer(matrix,matrix2))
|
"""flowsheet_control_test.py
* This contains tests for results instance
Joshua Boverhof, Lawrence Berekeley National Lab, 2018
John Eslick, Carnegie Mellon University, 2014
See LICENSE.md for license and copyright details.
"""
import io
import json
import logging
import time
import uuid
import urllib.request
from shutil import copyfile
from botocore.stub import Stubber
import os
TOP_LEVEL_DIR = os.path.abspath(os.curdir)
os.environ['FOQUS_SERVICE_WORKING_DIR'] = '/tmp/foqus_test'
from .. import flowsheet
try:
from unittest.mock import MagicMock,PropertyMock,patch
except ImportError:
from mock import MagicMock,patch
INSTANCE_USERDATA_JSON = b'''{"FOQUS-Update-Topic-Arn":"arn:aws:sns:us-east-1:387057575688:FOQUS-Update-Topic",
"FOQUS-Message-Topic-Arn":"arn:aws:sns:us-east-1:387057575688:FOQUS-Message-Topic",
"FOQUS-Job-Queue-Url":"https://sqs.us-east-1.amazonaws.com/387057575688/FOQUS-Gateway-FOQUSJobSubmitQueue-XPNWLF4Q38FD",
"FOQUS-Simulation-Bucket-Name":"foqussimulationdevelopment1562016460",
"FOQUS-DynamoDB-Table":"FOQUS_Table"
}'''
def test_floqus_aws_config():
output = io.BytesIO(INSTANCE_USERDATA_JSON)
urllib.request.urlopen = MagicMock(return_value=output)
config = flowsheet.FOQUSAWSConfig()
config.get_instance()
def test_flowsheet_control():
output = io.BytesIO(INSTANCE_USERDATA_JSON)
flowsheet.FOQUSAWSConfig._inst = flowsheet.FOQUSAWSConfig()
flowsheet.FOQUSAWSConfig._inst._d = json.loads(INSTANCE_USERDATA_JSON)
fc = flowsheet.FlowsheetControl()
def test_flowsheet_control_run():
output = io.BytesIO(INSTANCE_USERDATA_JSON)
flowsheet.FOQUSAWSConfig._inst = flowsheet.FOQUSAWSConfig()
flowsheet.FOQUSAWSConfig._inst._d = json.loads(INSTANCE_USERDATA_JSON)
flowsheet.TurbineLiteDB.consumer_register = MagicMock(return_value=None)
flowsheet.TurbineLiteDB.add_message = MagicMock(return_value=None)
flowsheet.TurbineLiteDB.job_change_status = MagicMock(return_value=None)
flowsheet.TurbineLiteDB.consumer_keepalive = MagicMock(return_value=None)
# pop_job: downloads simulation file into working dir
tp = ('testuser', dict(Id=str(uuid.uuid4()), Simulation='test'))
flowsheet.FlowsheetControl.pop_job = MagicMock(return_value=tp)
orig_simulation_file_path = os.path.abspath(
os.path.join(TOP_LEVEL_DIR,
'examples/tutorial_files/Flowsheets/Tutorial_1/Simple_flow.foqus'
)
)
sfile,rfile,vfile,ofile = flowsheet.getfilenames(tp[1]['Id'])
copyfile(orig_simulation_file_path, sfile)
with open(vfile, 'w') as fd:
fd.write("{}")
flowsheet.FlowsheetControl._delete_sqs_job = MagicMock(return_value=None)
fc = flowsheet.FlowsheetControl()
stubber = Stubber(fc._dynamodb)
fc._dynamodb = stubber
#_describe_table_response = {}
#expected_params = dict(TableName=fc._dynamodb_table_name)
#stubber.add_response('describe_table', _describe_table_response, expected_params)
#stubber.activate()
# stubber doesn't WORK.
stubber.describe_table = MagicMock(return_value=None)
stubber.get_item = MagicMock(return_value=dict(
Item={'Id':'hi', 'Simulation':'test'}))
def _run_foqus(self, db, job_desc):
fc.stop()
flowsheet.FlowsheetControl.run_foqus = _run_foqus
fc.run()
|
class Node:
def __init__(self,data):
self.data=data
self.ref=None
class Linked_list:
def __init__(self):
self.head=None
def traverse(self):
if self.head is None:
print("Linked list is empty")
else:
n = self.head
while n is not None:
print(n.data)
n=n.ref
def add_node(self,data):
new_node = Node(data)
if self.head==None:
self.head=new_node
else:
n=self.head
while n.ref is not None:
n=n.ref
n.ref=new_node
l1=Linked_list()
l1.add_node("siva")
l1.add_node("kumar")
l1.traverse()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import re
from datetime import datetime
#ori_reg = re.compile(r"Orig: (0x\w{4})")
#cmd_reg = re.compile(r"Cmd: (0x\w{4})")
#par_reg = re.compile(r"Param: (.+)$")
#lin_reg = re.compile(r"^\[Dispatcher\]")
lin_reg = re.compile(r"^fp2_pay_i_multiplexing")
pay_i_reg = re.compile(r"pay_i=(\w{1})")
pay_state_reg = re.compile(r"(\w)")
lin_reg2= re.compile(r"^pay_")
cmd_name = {}
def parse_reg(reg):
pay_i = ''
#orig = ''
#cmd = ''
#param = ''
#name = ''
try: pay_i = pay_i_reg.search(reg).group(1)
except: pass
#try: cmd = cmd_reg.search(reg).group(1)
#except: pass
#try: param = par_reg.search(reg).group(1)
#except: pass
#try: name = cmd_names[cmd]
#except: pass
#return '{0},{1},{2},{3}'.format(name,orig,cmd,param)
return '{0}'.format(pay_i)
def parse_reg2(reg):
pay_state = ''
#orig = ''
#cmd = ''
#param = ''
#name = ''
try: pay_state = pay_state_reg.search(reg).group(1)
except: pass
#try: cmd = cmd_reg.search(reg).group(1)
#except: pass
#try: param = par_reg.search(reg).group(1)
#except: pass
#try: name = cmd_names[cmd]
#except: pass
#return '{0},{1},{2},{3}'.format(name,orig,cmd,param)
return '{0}'.format( (pay_state) )
def load_cmd(filename):
if filename is '': return
fin = open(filename)
cmds = {}
for line in fin:
if 'referencia' in line:
_id = line[0:line.find(',')]
_name = line[line.find(':')+2:-1]
cmds[_id] = _name
fin.close()
return cmds
def main(in_file, out_file):
if out_file is '': return
print "hola mundo\n"
fin = open(in_file, 'r')
fout = open(out_file, 'w')
header = 'linea,pay_i,pay_state'
fout.write(header+'\n')
n_line = 0
state=0
for line in fin:
if state==0: #busco fp2_pay_i_multiplexing
if lin_reg.search(line): # si lo encuentro, agrego y paso a buscar el pay_i_state
line = parse_reg(line[:-2])
fout.write('{0},{1},'.format(n_line,line))
state=1
if state==1: #busco el pay_i_state (si es que hay)
if lin_reg2.search(line): #si lo encuentro, agrego y paso a buscar fp2_pay_i_multiplexing
#line = parse_reg2(line[:-2])
fout.write('{0}'.format(line))
state=0
if lin_reg.search(line): #si en vez de pay_i_state encuentro otro fp2_pay_i_multiplexing (caso 6 y 8), cierro el primero con "pay_i_no_state", agrego el segundo y paso a buscar su pay_i_state
line = parse_reg(line[:-2])
fout.write('pay_i_no_state\n{0},{1},'.format(n_line,line))
state=1
n_line += 1
fin.close()
fout.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog='Parse commands')
parser.add_argument('input', help='Input file')
parser.add_argument('-o','--output', help='Output file', default='')
parser.add_argument('-c','--cmd_names',help='Cmd names file', default='')
args = parser.parse_args()
in_file = args.input
out_file = args.output
cmd_file = args.cmd_names
cmd_names = load_cmd(cmd_file)
main(in_file, out_file)
|
#!/usr/bin/env python
import sys,re,time,argparse
def main(args):
sys.stdout.write("Start analysis: " + time.strftime("%a,%d %b %Y %H:%M:%S") + "\n")
sys.stdout.flush()
best_alignment(args.input,args.output)
sys.stdout.write("Finish analysis: " + time.strftime("%a,%d %b %Y %H:%M:%S") + "\n")
sys.stdout.flush()
def best_alignment(input_gpd,output_gpd):
head = 1
for line in input_gpd: # sort by -k1,1 -k15,15n -k16,16n
if head:
read_id = line.strip().split("\t")[0]
error_read,error_ref = [float(i) for i in line.strip().split("\t")[-2:]]
dic_read_line,dic_read_error_read,dic_read_error_ref = {},{},{}
dic_read_line[read_id],dic_read_error_read[read_id],dic_read_error_ref[read_id] = [],[],[]
dic_read_line[read_id].append(line.strip())
dic_read_error_read[read_id].append(error_read)
dic_read_error_ref[read_id].append(error_ref)
head -= 1
continue
if line.strip().split("\t")[0] != read_id:
if len(dic_read_line[read_id]) == 1: # uniquely map
print >>output_gpd, dic_read_line[read_id][0] + "\tU"
else:
if dic_read_error_read[read_id][0] < dic_read_error_read[read_id][1]: # error in read sequence
print >>output_gpd, dic_read_line[read_id][0] + "\tM"
else:
if dic_read_error_ref[read_id][0] < dic_read_error_ref[read_id][1]: # error in ref sequence
print >>output_gpd, dic_read_line[read_id][0] + "\tM"
read_id = line.strip().split("\t")[0]
error_read,error_ref = [float(i) for i in line.strip().split("\t")[-2:]]
dic_read_line,dic_read_error_read,dic_read_error_ref = {},{},{}
dic_read_line[read_id],dic_read_error_read[read_id],dic_read_error_ref[read_id] = [],[],[]
dic_read_line[read_id].append(line.strip())
dic_read_error_read[read_id].append(error_read)
dic_read_error_ref[read_id].append(error_ref)
else:
dic_read_line[read_id].append(line.strip())
dic_read_error_read[read_id].append(error_read)
dic_read_error_ref[read_id].append(error_ref)
input_gpd.close()
output_gpd.close()
def do_inputs():
output_gpd_format = '''
1. read id
2. read id
3. chromosome id
4. strand
5. start site of alignment
6. end site of alignment
7. MAPQ
8. number of nucleotides that are softly-clipped by aligner; left_right
9. exon count
10. exon start set
11. exon end set
12. sam flag
13. error in read sequence
14. error in reference genome sequence
15. error rate in read sequence
16. error rate in reference genome sequence
17. unique ('U') or multiple 'M' alignment'''
parser = argparse.ArgumentParser(description="Function: get best one alignment for each read",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-i','--input',type=argparse.FileType('r'),required=True,help="Input: long read gpd fiel generated by 'py_isoseqpse_sam2gpd_pacbio.py', then must be 'sort -k1,1 -k15,15n -k16,16n'")
parser.add_argument('-o','--output',type=argparse.FileType('w'),required=True,help="Output: gpd file with best alignment (at most one) for each read")
args = parser.parse_args()
return args
if __name__=="__main__":
args = do_inputs()
main(args)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, with_statement
from revolver import contextmanager as ctx
from revolver import file, text
def ensure(lines):
with ctx.sudo():
file.update('/etc/sudoers', lambda _: text.ensure_line(_, *lines))
|
import os
BASE_DIRS = os.path.dirname(__file__)
#参数
options = {
"port": 8000
}
#配置
settings = {
"static_path": os.path.join(BASE_DIRS, "static"),
"template_path": os.path.join(BASE_DIRS, "templates"),
"debug": False
}
|
from ..FeatureExtractor import FeatureExtractor
from common_functions import ChiSquare
class chi2extractor(FeatureExtractor,ChiSquare):
active = True
extname = 'chi2' #extractor's name
def extract(self):
dc = self.fetch_extr('dc')
chisquare = self.chi_square_sum(self.flux_data,lambda x: dc,x=self.time_data,rms=self.rms_data)
return chisquare
|
from enum import Enum
import bleach
import markdown
import requests
from babel.dates import get_timezone_name
from django.contrib.auth.models import AbstractUser
from django.contrib.gis.db.models import PointField
from django.db import models
from django.utils.safestring import mark_safe
from django.utils.text import Truncator
from pytz import timezone
from sentry_sdk import add_breadcrumb
from timezonefinder import TimezoneFinder
tf = TimezoneFinder()
class EventType(Enum):
SOCI = "Social"
MEET = "Meeting"
WORK = "Work"
MAPE = "Map Event"
CONF = "Conference"
class Event(models.Model):
name = models.CharField(max_length=200)
start = models.DateTimeField()
end = models.DateTimeField(blank=True, null=True)
whole_day = models.BooleanField(default=False)
timezone = models.CharField(max_length=100, blank=True, null=True)
location_name = models.CharField(max_length=50, blank=True, null=True)
location = PointField(blank=True, null=True)
location_address = models.JSONField(blank=True, null=True)
link = models.URLField(blank=True, null=True)
kind = models.CharField(max_length=4, choices=[(x.name, x.value) for x in EventType])
description = models.TextField(
blank=True,
null=True,
help_text=mark_safe(
'Tell people what the event is about and what they can expect. You may use <a href="https://daringfireball.net/projects/markdown/syntax" target="_blank">Markdown</a> in this field.'
),
)
cancelled = models.BooleanField(default=False)
hidden = models.BooleanField(default=False)
def save(self, *args, **kwargs):
if self.location:
self.geocode_location()
super().save(*args, **kwargs)
def geocode_location(self):
nr = requests.get(
"https://nominatim.openstreetmap.org/reverse",
params={"format": "jsonv2", "lat": self.location.y, "lon": self.location.x, "accept-language": "en"},
)
self.location_address = nr.json().get("address", None)
if self.location_address is None:
add_breadcrumb(category="nominatim", level="error", data=nr.json())
@property
def location_text(self):
if not self.location_address:
return None
addr = self.location_address
return ", ".join(
filter(
lambda x: x is not None,
[addr.get("village"), addr.get("town"), addr.get("city"), addr.get("state"), addr.get("country")],
)
)
@property
def location_detailed_addr(self):
# TODO: improve
if not self.location_address:
return None
addr = self.location_address
return ", ".join(
filter(
lambda x: x is not None,
[
self.location_name,
addr.get("house_number"),
addr.get("road"),
addr.get("suburb"),
addr.get("village"),
addr.get("city"),
addr.get("state"),
addr.get("country"),
],
)
)
@property
def start_localized(self):
tz = timezone(self.timezone)
return self.start.astimezone(tz)
@property
def end_localized(self):
if not self.end:
return None
tz = timezone(self.timezone)
return self.end.astimezone(tz)
@property
def tz_name(self):
return get_timezone_name(self.start_localized)
@property
def year_month(self):
l = self.start_localized
return (l.year, l.month)
@property
def short_description_without_markup(self) -> str:
if not self.description:
return ""
max_words = 15
cleaned = bleach.clean(markdown.markdown(self.description), tags=[], strip=True)
return Truncator(cleaned).words(max_words)
@property
def originally_created_by(self) -> "User":
return self.log.order_by("created_at").first().created_by
class Meta:
indexes = (models.Index(fields=("end",)),)
class AnswerType(Enum):
TEXT = "Text Field"
CHOI = "Choice"
BOOL = "Boolean"
class ParticipationQuestion(models.Model):
event = models.ForeignKey("Event", null=True, on_delete=models.SET_NULL, related_name="questions")
question_text = models.CharField(max_length=200)
answer_type = models.CharField(max_length=4, choices=[(x.name, x.value) for x in AnswerType])
mandatory = models.BooleanField(default=True)
class Meta:
ordering = ("event", "id")
class ParticipationQuestionChoice(models.Model):
question = models.ForeignKey(ParticipationQuestion, related_name="choices", on_delete=models.CASCADE)
text = models.CharField(max_length=200)
class Meta:
ordering = ("question", "id")
class EventParticipation(models.Model):
event = models.ForeignKey("Event", null=True, on_delete=models.SET_NULL, related_name="participation")
user = models.ForeignKey("User", null=True, on_delete=models.SET_NULL)
added_on = models.DateTimeField(auto_now_add=True, null=True)
class Meta:
unique_together = ["event", "user"]
class ParticipationAnswer(models.Model):
question = models.ForeignKey(ParticipationQuestion, on_delete=models.CASCADE, related_name="answers")
user = models.ForeignKey("User", null=True, on_delete=models.SET_NULL)
answer = models.CharField(max_length=200)
class Meta:
constraints = (models.UniqueConstraint(fields=("question", "user"), name="unique_question_answer"),)
class EventLog(models.Model):
event = models.ForeignKey("Event", related_name="log", on_delete=models.CASCADE)
data = models.JSONField()
created_by = models.ForeignKey("User", null=True, on_delete=models.SET_NULL)
created_at = models.DateTimeField(auto_now_add=True)
class User(AbstractUser):
id = models.AutoField(primary_key=True)
osm_id = models.IntegerField(null=True)
name = models.CharField(max_length=255)
home_location = PointField(blank=True, null=True)
is_moderator = models.BooleanField(default=False)
def home_timezone(self):
if not self.home_location:
return None
return tf.timezone_at(lng=self.home_location.x, lat=self.home_location.y)
def save(self, *args, **kwargs):
if not self.username:
if self.osm_id:
self.username = "osm_" + str(self.osm_id)
else:
self.username = str(self.id)
super().save(*args, **kwargs)
|
#!/usr/bin/env python2.7
import sys, os, zipfile
def parse(fobj):
baseurl = "https://www.pythonanywhere.com/user"
for line in fobj.readlines():
for r in ["<span>", "</span>", "<p>", "</p>", "<br>"]:
line = line.replace(r,'')
line = line.replace( "%20", " ")
if baseurl in line:
path = line.partition(baseurl)[-1]
path = path.partition('.py')[0] + ".py"
path = path.partition("files")[-1]
if not path or '>' in path or '<' in path:
raise Exception("url is messed up: %s" % path)
return path
raise Exception("Could not find url in submission")
def process(zipf):
output = ""
count = 0
with zipfile.ZipFile(zipf) as myzip:
for f in myzip.namelist():
parts = f.split("_")
folder = parts[0].replace("%20", " ").replace(" ", "")
user = parts[1]
try:
url = parse(myzip.open(f))
except Exception as err:
print user, err
# raise err
continue
newname = folder + "/" + user+'.py'
output += 'cp "%s" /home/csomerlot/ERE335/Grading/%s/%s.py\r\n' % (url, folder, user)
output += 'cp "%s" /home/mdinakar/ERE335/Grading/%s/%s.py\r\n' % (url, folder, user)
count +=1
with file('get%sfiles'%folder, 'w') as bashs:
bashs.write("#!/bin/bash\r\n\r\n")
bashs.write("mkdir /home/csomerlot/ERE335/Grading/%s\r\n" % folder)
bashs.write("mkdir /home/mdinakar/ERE335/Grading/%s\r\n\r\n" % folder)
bashs.write(output)
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: %s zipfile" % sys.argv[0]
else:
print
process(sys.argv[1])
|
#列表 相当于Array
name_List = ['George','John','Lina','Alex','Mars','Duke'];
print(name_List);
# 在列表中加入元素
# append 只会把值添加到最后面,并且一次只能添加一个
name_List.append('Tim');
print(name_List);
# insert (0, 'Coco'), 在下标0的位置插入 'Coco'
name_List.insert(0,'Coco');
print(name_List);
# extend
name_List_2 = ['王菲','梁朝伟','谢霆锋','张学友']
name = name_List + name_List_2; # 新建一个里表,相加原有列表
print(name);
name_List.extend(name_List_2); # 将第二个列表添加到地一个列表
print(name_List);
#删除元素
name_List.pop(); # pop() 每次删除最后一个元素
print(name_List);
name.remove('George'); # remove() 删除括号内的元素,如果有先同,删除第一个
print(name);
del name[0]; # 删除对应下标的元素
print(name);
#修改元素
name[0] = 'Shirley' # 把下标0 的元素改成‘Shirley’
#查询 in / not in
if 'Angler' not in name_List:
print('Name available..')
|
from sys import float_info
#https://www.interviewcake.com/question/python/stock-price
#for every time stamp t compare with every other timestamp after it
#save the difference t_n - t
#output max
#analysis
#sum(n to 1) n = n(n-1) /2 = O(n^2)
#is there a better solution?
#we need to consider every pair of values
#finding global min/max isn't that useful, since timestamp requirement
def get_max_profit(prices):
max = -float_info.max
for t in range(len(prices)):
for n in prices[t+1:]:
if (n-prices[t]) > max:
max = n-prices[t]
return max
#real solution was greedy...
def get_max_profit2(prices):
min_price = stock_prices_yesterday[0]
max_profit = stock_prices_yesterday[1] - stock_prices_yesterday[0]
for index, current_price in enumerate(stock_prices_yesterday):
# skip the first (0th) time
# we can't sell at the first time, since we must buy first,
# and we can't buy and sell at the same time!
# if we took this out, we'd try to buy /and/ sell at time 0.
# this would give a profit of 0, which is a problem if our
# max_profit is supposed to be /negative/--we'd return 0!
if index == 0:
continue
# see what our profit would be if we bought at the
# min price and sold at the current price
potential_profit = current_price - min_price
# update max_profit if we can do better
max_profit = max(max_profit, potential_profit)
# update min_price so it's always
# the lowest price we've seen so far
min_price = min(min_price, current_price)
return max_profit
stock_prices_yesterday = [10, 7, 5, 8, 11, 9]
stock_prices_yesterday = [10, 9, 8, 7, 6, 5]
print (get_max_profit2(stock_prices_yesterday))
# returns 6 (buying for $5 and selling for $11)
###############################################################################
#since num and den are integers, this is O(1)
def divide(num,den):
if num == 0:
return 0
sign = 1
result = 1
if num*den < 0:
sign = -1
num = abs(num)
den = abs(den)
while num-den > 0:
num = num-den
result +=1
return sign*result
#brute force is n^2, not going to bother implementing
#This is O(n), but technically I may have cheated by implementing my own division, by using subtraction
def get_products_of_all_ints_except_at_index(vals):
result = []
total = 1;
for val in vals:
total *= val;
for val in vals:
result.append(divide(total,val))
return result
#they actually wanted a greedy solution, my solution is just as good
# - start with brute force, break down calculation, find patterns
# - if it's a maximization/minimization problem, greedy will probably work
print(get_products_of_all_ints_except_at_index([1,0,3,4]))
################################################################################################################
#brute force
# O(n^3)
def highest_product_1(input):
maxi = 0;
for i, vali in enumerate(input):
for j, valj in enumerate(input[i+1:]):
for k, valk in enumerate ((input[i+1:])[j+1:]):
maxi = max(maxi, vali*valj*valk)
return maxi
#sorting
#O(nlogn)
def highest_product_2(input):
input = sorted(input)
return input[-1]*input[-2]*input[-3]
#divide and conquer - split the list in 3 parts recursively, find max in each, recursively
#not O(n)
#123456789
#123 456 789
#1 2 3 4 5 6 7 8 9
#this kind of solution will not work
#Greedy
#this is a maximization problem, so perhaps a greedy approach is possible
#Scan through the list, keeping track of the 2 largest numbers seen so far
#total O(n)
def highest_product_3(input):
max1 = input[0]
max2 = input[1]
max3 = input[2]
#iterating through the list once = O(n)
for val in input[3:]:
#sorting 4 items = O(1)
list = sorted([max1, max2, max3, val])
max1 = list[-1]
max2 = list[-2]
max3 = list[-3]
return max1*max2*max3
print(highest_product_1([-10,-10,1,3,2]))
|
import tensorflow as tf
from tensorflow.keras.layers import Input, Dense
from tensorflow.keras.models import Sequential
from dask.distributed import Client, LocalCluster
from sklearn.metrics import mean_squared_error
import numpy as np
def main():
cluster = LocalCluster(n_workers=4, threads_per_worker=1)
client = Client(cluster)
print("started cluster")
num_layers = [2, 3, 4]
num_neurons = [20, 40, 60]
futures = []
for l in num_layers:
for n in num_neurons:
futures.append(client.submit(train_random_model, l, n))
results = client.gather(futures)
print(results)
client.close()
return
def train_random_model(num_layers, num_neurons):
print(num_layers, num_neurons)
n_inputs = 5
examples = 10000
x = np.random.normal(size=(examples, n_inputs)).astype(np.float32)
y = x.prod(axis=1)
mod = Sequential()
mod.add(Input(shape=(n_inputs,)))
for l in range(num_layers):
mod.add(Dense(num_neurons))
mod.add(Dense(1))
mod.compile(optimizer="adam", loss="mse")
mod.fit(x, y, epochs=30)
preds = mod.predict(x)
return mean_squared_error(y, preds)
if __name__ == "__main__":
main()
|
import tempfile
import boto3
from django.conf import settings
class S3Wrapper:
def __init__(self, bucket_name):
resource = boto3.resource(
"s3",
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
self.bucket = resource.Bucket(bucket_name)
def get_file(self, filepath):
tmp = tempfile.NamedTemporaryFile()
self.bucket.download_file(filepath, tmp.name)
return tmp
|
from django.db import models
from django.conf import settings
from django.contrib.auth.base_user import AbstractBaseUser, BaseUserManager
from rest_framework.authtoken.models import Token
from django.dispatch import receiver
from django.db.models.signals import post_save
import datetime
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
# Create your models here.
class AccountManager(BaseUserManager):
def create_user(self, email ,password, **extra_fields):
if not email:
raise ValueError("Users must have an email address")
if not password:
raise ValueError("Users must enter password")
user = self.model(
email = self.normalize_email(email),
**extra_fields
)
user.set_password(password)
user.is_active = True
user.save(using=self._db)
return user
def create_staffuser(self, email, password):
user = self.create_user(email,password=password)
user.is_staff = True
user.is_active = True
user.save(using=self._db)
return user
def create_superuser(self, email, username, password):
user = self.create_user(
email = self.normalize_email(email),
password=password,
username=username
)
user.is_admin = True
user.is_staff = True
user.is_superuser = True
user.is_active = True
user.save(using=self._db)
return user
class User(AbstractBaseUser):
SEMESTER_CHOICES = (
("1", "Student"),
("2", "Tutor"),
("3", "Admin"),)
email = models.EmailField(verbose_name="email", max_length=60, unique=True)
username = models.EmailField(max_length=30, unique=True)
date_joined = models.DateTimeField(verbose_name='date_joined', auto_now_add=True)
last_login = models.DateTimeField(verbose_name='last_login', auto_now_add=True)
is_admin = models.BooleanField(default=False)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
is_superuser = models.BooleanField(default=False)
first_name = models.CharField(max_length=60)
last_name = models.CharField(max_length=60)
objects = AccountManager()
USERNAME_FIELD = "email"
REQUIRED_FIELDS = ["username"]
def __str__(self):
return self.email
def has_perm(self, perm, obj=None):
return self.is_admin
def has_module_perms(self, app_label):
return True
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
def set_username_as_email(self):
username = self.email
return username
def get_full_name(self):
full_name = "{0}".format(self.first_name)
return full_name.strip()
def get_short_name(self):
return self.first_name
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
"""Create a Token instance for any User instance created."""
if created:
Token.objects.get_or_create(user=instance)
class Product(models.Model):
name = models.CharField(max_length=255, null=False, blank=False)
price= models.DecimalField(max_digits=10, decimal_places=2,default=0)
image = models.ImageField(upload_to ='uploads/')
description = models.CharField(max_length=255, null=True, blank=True)
class Review(models.Model):
RATING_CHOICES = (
("0", "None"),
("1", "Very Poor"),
("2", "Bad"),
("3", "Average"),
("4", "Good"),
("5", "Excellent"),)
product_item = models.ForeignKey(Product, models.CASCADE, related_name='product')
sender = models.ForeignKey(User, models.CASCADE, related_name='reviewsender')
comment = models.CharField(max_length=255)
rating = models.CharField(max_length = 20, choices = RATING_CHOICES, default = '0')
timestamp = models.DateTimeField(default=timezone.now)
calculated_review = models.BooleanField(default=False)
class ActivityLog(models.Model):
product_item = models.ForeignKey(Product, models.CASCADE, related_name='activity_product')
user = models.ForeignKey(User, models.CASCADE, related_name='user_activity')
event = models.CharField(max_length=255, default="review added")
date = models.DateTimeField(auto_now_add=True)
class RatingEvaluationString(models.Model):
product_item = models.ForeignKey(Product, models.CASCADE, related_name='activity_product_string')
string_review = models.CharField(max_length=255)
fromat_string_review = models.CharField(max_length=255)
event = models.CharField(max_length=255, default="review string added")
date = models.DateTimeField(auto_now_add=True)
|
from collections import Counter
class Solution(object):
def frequency_sort(self, s):
"""
:type s: str
:rtype: str
"""
if not s:
return ""
s_counter = Counter(s)
# then sort the counter
counter_sort = sorted(s_counter.items(), key=lambda x: x[1], reverse=True)
print(counter_sort)
freq_sort = ""
for pair in counter_sort:
freq_sort += pair[0] * pair[1]
return freq_sort
# the other version but this was slower than the simple for loop
# according to leetcode evaluation
# return "".join(char[0]*char[1] for char in counter_sort)
s = "tree"
obj = Solution()
result = obj.frequency_sort(s)
print(result)
|
"""
Tests of functions to interpolate across geography.
"""
import numpy as np
from numpy.testing import assert_allclose
import pytest
import solar_energy
@pytest.fixture
def setup_linear():
x_grid = np.arange(3)
y_grid = np.arange(3)
z = np.ones((len(x_grid), len(y_grid)))
z = (z*x_grid).T*y_grid
x = np.array([0.5, 0.5, 1.5, 1.5])
y = np.array([0.5, 1.5, 0.5, 1.5])
z_expected = np.array([0.25, 0.75, 0.75, 2.25])
return x, y, z, z_expected
def test_bilinear_returns_nodes_at_nodes():
x = np.arange(10.)
y = np.arange(10.)
z = np.ones((10, 10))
z_interpolated = solar_energy.bilinear_interpolation(x, y, z)
assert_allclose(z_interpolated, np.ones(len(x)))
def test_bilinear_returns_normal():
x, y, z, z_expected = setup_linear()
z_interpolated = solar_energy.bilinear_interpolation(x, y, z)
assert_allclose(z_interpolated, z_expected)
def test_bilinear_x_at_nodes():
x_grid = np.arange(3)
y_grid = np.arange(3)
z = np.ones((len(x_grid), len(y_grid)))
z = (z*x_grid).T*y_grid
x = np.array([0., 1., 1., 2.])
y = np.array([0.5, 1.5, 0.5, 1.5])
z_expected = np.array([0., 1.5, 0.5, 3.])
z_interpolated = solar_energy.bilinear_interpolation(x, y, z)
assert_allclose(z_interpolated, z_expected)
def test_spline_interpolation_flat_input():
x = np.arange(10)
y = np.arange(10)
z = np.ones((10, 10))
z_expected = np.ones_like(x)
z_interpolated = solar_energy.spline_interpolation(x, y, z)
assert_allclose(z_interpolated, z_expected)
def test_spline_interpolation_linear():
x, y, z, z_expected = setup_linear()
z_interpolated = solar_energy.spline_interpolation(x, y, z, kx=1, ky=1)
assert_allclose(z_interpolated, z_expected)
|
# coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class SummaryClientClientItem(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
SummaryClientClientItem - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'_class': 'str',
'_in': 'float',
'in_avg': 'float',
'in_max': 'float',
'in_min': 'float',
'local_addr': 'str',
'local_name': 'str',
'node': 'int',
'num_operations': 'int',
'operation_rate': 'float',
'out': 'float',
'out_avg': 'float',
'out_max': 'float',
'out_min': 'float',
'protocol': 'str',
'remote_addr': 'str',
'remote_name': 'str',
'time': 'int',
'time_avg': 'float',
'time_max': 'float',
'time_min': 'float',
'user': 'GroupsGroupMember'
}
self.attribute_map = {
'_class': 'class',
'_in': 'in',
'in_avg': 'in_avg',
'in_max': 'in_max',
'in_min': 'in_min',
'local_addr': 'local_addr',
'local_name': 'local_name',
'node': 'node',
'num_operations': 'num_operations',
'operation_rate': 'operation_rate',
'out': 'out',
'out_avg': 'out_avg',
'out_max': 'out_max',
'out_min': 'out_min',
'protocol': 'protocol',
'remote_addr': 'remote_addr',
'remote_name': 'remote_name',
'time': 'time',
'time_avg': 'time_avg',
'time_max': 'time_max',
'time_min': 'time_min',
'user': 'user'
}
self.__class = None
self.__in = None
self._in_avg = None
self._in_max = None
self._in_min = None
self._local_addr = None
self._local_name = None
self._node = None
self._num_operations = None
self._operation_rate = None
self._out = None
self._out_avg = None
self._out_max = None
self._out_min = None
self._protocol = None
self._remote_addr = None
self._remote_name = None
self._time = None
self._time_avg = None
self._time_max = None
self._time_min = None
self._user = None
@property
def _class(self):
"""
Gets the _class of this SummaryClientClientItem.
The class of the operation.
:return: The _class of this SummaryClientClientItem.
:rtype: str
"""
return self.__class
@_class.setter
def _class(self, _class):
"""
Sets the _class of this SummaryClientClientItem.
The class of the operation.
:param _class: The _class of this SummaryClientClientItem.
:type: str
"""
self.__class = _class
@property
def _in(self):
"""
Gets the _in of this SummaryClientClientItem.
Rate of input (in bytes/second) for an operation since the last time isi statistics collected the data.
:return: The _in of this SummaryClientClientItem.
:rtype: float
"""
return self.__in
@_in.setter
def _in(self, _in):
"""
Sets the _in of this SummaryClientClientItem.
Rate of input (in bytes/second) for an operation since the last time isi statistics collected the data.
:param _in: The _in of this SummaryClientClientItem.
:type: float
"""
self.__in = _in
@property
def in_avg(self):
"""
Gets the in_avg of this SummaryClientClientItem.
Average input (received) bytes for an operation, in bytes.
:return: The in_avg of this SummaryClientClientItem.
:rtype: float
"""
return self._in_avg
@in_avg.setter
def in_avg(self, in_avg):
"""
Sets the in_avg of this SummaryClientClientItem.
Average input (received) bytes for an operation, in bytes.
:param in_avg: The in_avg of this SummaryClientClientItem.
:type: float
"""
self._in_avg = in_avg
@property
def in_max(self):
"""
Gets the in_max of this SummaryClientClientItem.
Maximum input (received) bytes for an operation, in bytes.
:return: The in_max of this SummaryClientClientItem.
:rtype: float
"""
return self._in_max
@in_max.setter
def in_max(self, in_max):
"""
Sets the in_max of this SummaryClientClientItem.
Maximum input (received) bytes for an operation, in bytes.
:param in_max: The in_max of this SummaryClientClientItem.
:type: float
"""
self._in_max = in_max
@property
def in_min(self):
"""
Gets the in_min of this SummaryClientClientItem.
Minimum input (received) bytes for an operation, in bytes.
:return: The in_min of this SummaryClientClientItem.
:rtype: float
"""
return self._in_min
@in_min.setter
def in_min(self, in_min):
"""
Sets the in_min of this SummaryClientClientItem.
Minimum input (received) bytes for an operation, in bytes.
:param in_min: The in_min of this SummaryClientClientItem.
:type: float
"""
self._in_min = in_min
@property
def local_addr(self):
"""
Gets the local_addr of this SummaryClientClientItem.
The IP address (in dotted-quad form) of the host receiving the operation request.
:return: The local_addr of this SummaryClientClientItem.
:rtype: str
"""
return self._local_addr
@local_addr.setter
def local_addr(self, local_addr):
"""
Sets the local_addr of this SummaryClientClientItem.
The IP address (in dotted-quad form) of the host receiving the operation request.
:param local_addr: The local_addr of this SummaryClientClientItem.
:type: str
"""
self._local_addr = local_addr
@property
def local_name(self):
"""
Gets the local_name of this SummaryClientClientItem.
The resolved text name of the LocalAddr, if resolution can be performed.
:return: The local_name of this SummaryClientClientItem.
:rtype: str
"""
return self._local_name
@local_name.setter
def local_name(self, local_name):
"""
Sets the local_name of this SummaryClientClientItem.
The resolved text name of the LocalAddr, if resolution can be performed.
:param local_name: The local_name of this SummaryClientClientItem.
:type: str
"""
self._local_name = local_name
@property
def node(self):
"""
Gets the node of this SummaryClientClientItem.
The node on which the operation was performed.
:return: The node of this SummaryClientClientItem.
:rtype: int
"""
return self._node
@node.setter
def node(self, node):
"""
Sets the node of this SummaryClientClientItem.
The node on which the operation was performed.
:param node: The node of this SummaryClientClientItem.
:type: int
"""
self._node = node
@property
def num_operations(self):
"""
Gets the num_operations of this SummaryClientClientItem.
The number of times an operation has been performed.
:return: The num_operations of this SummaryClientClientItem.
:rtype: int
"""
return self._num_operations
@num_operations.setter
def num_operations(self, num_operations):
"""
Sets the num_operations of this SummaryClientClientItem.
The number of times an operation has been performed.
:param num_operations: The num_operations of this SummaryClientClientItem.
:type: int
"""
self._num_operations = num_operations
@property
def operation_rate(self):
"""
Gets the operation_rate of this SummaryClientClientItem.
The rate (in ops/second) at which an operation has been performed.
:return: The operation_rate of this SummaryClientClientItem.
:rtype: float
"""
return self._operation_rate
@operation_rate.setter
def operation_rate(self, operation_rate):
"""
Sets the operation_rate of this SummaryClientClientItem.
The rate (in ops/second) at which an operation has been performed.
:param operation_rate: The operation_rate of this SummaryClientClientItem.
:type: float
"""
self._operation_rate = operation_rate
@property
def out(self):
"""
Gets the out of this SummaryClientClientItem.
Rate of output (in bytes/second) for an operation since the last time isi statistics collected the data.
:return: The out of this SummaryClientClientItem.
:rtype: float
"""
return self._out
@out.setter
def out(self, out):
"""
Sets the out of this SummaryClientClientItem.
Rate of output (in bytes/second) for an operation since the last time isi statistics collected the data.
:param out: The out of this SummaryClientClientItem.
:type: float
"""
self._out = out
@property
def out_avg(self):
"""
Gets the out_avg of this SummaryClientClientItem.
Average output (sent) bytes for an operation, in bytes.
:return: The out_avg of this SummaryClientClientItem.
:rtype: float
"""
return self._out_avg
@out_avg.setter
def out_avg(self, out_avg):
"""
Sets the out_avg of this SummaryClientClientItem.
Average output (sent) bytes for an operation, in bytes.
:param out_avg: The out_avg of this SummaryClientClientItem.
:type: float
"""
self._out_avg = out_avg
@property
def out_max(self):
"""
Gets the out_max of this SummaryClientClientItem.
Maximum output (sent) bytes for an operation, in bytes.
:return: The out_max of this SummaryClientClientItem.
:rtype: float
"""
return self._out_max
@out_max.setter
def out_max(self, out_max):
"""
Sets the out_max of this SummaryClientClientItem.
Maximum output (sent) bytes for an operation, in bytes.
:param out_max: The out_max of this SummaryClientClientItem.
:type: float
"""
self._out_max = out_max
@property
def out_min(self):
"""
Gets the out_min of this SummaryClientClientItem.
Minimum output (sent) bytes for an operation, in bytes.
:return: The out_min of this SummaryClientClientItem.
:rtype: float
"""
return self._out_min
@out_min.setter
def out_min(self, out_min):
"""
Sets the out_min of this SummaryClientClientItem.
Minimum output (sent) bytes for an operation, in bytes.
:param out_min: The out_min of this SummaryClientClientItem.
:type: float
"""
self._out_min = out_min
@property
def protocol(self):
"""
Gets the protocol of this SummaryClientClientItem.
The protocol of the operation.
:return: The protocol of this SummaryClientClientItem.
:rtype: str
"""
return self._protocol
@protocol.setter
def protocol(self, protocol):
"""
Sets the protocol of this SummaryClientClientItem.
The protocol of the operation.
:param protocol: The protocol of this SummaryClientClientItem.
:type: str
"""
self._protocol = protocol
@property
def remote_addr(self):
"""
Gets the remote_addr of this SummaryClientClientItem.
The IP address (in dotted-quad form) of the host sending the operation request.
:return: The remote_addr of this SummaryClientClientItem.
:rtype: str
"""
return self._remote_addr
@remote_addr.setter
def remote_addr(self, remote_addr):
"""
Sets the remote_addr of this SummaryClientClientItem.
The IP address (in dotted-quad form) of the host sending the operation request.
:param remote_addr: The remote_addr of this SummaryClientClientItem.
:type: str
"""
self._remote_addr = remote_addr
@property
def remote_name(self):
"""
Gets the remote_name of this SummaryClientClientItem.
The resolved text name of the RemoteAddr, if resolution can be performed.
:return: The remote_name of this SummaryClientClientItem.
:rtype: str
"""
return self._remote_name
@remote_name.setter
def remote_name(self, remote_name):
"""
Sets the remote_name of this SummaryClientClientItem.
The resolved text name of the RemoteAddr, if resolution can be performed.
:param remote_name: The remote_name of this SummaryClientClientItem.
:type: str
"""
self._remote_name = remote_name
@property
def time(self):
"""
Gets the time of this SummaryClientClientItem.
Unix Epoch time in seconds of the request.
:return: The time of this SummaryClientClientItem.
:rtype: int
"""
return self._time
@time.setter
def time(self, time):
"""
Sets the time of this SummaryClientClientItem.
Unix Epoch time in seconds of the request.
:param time: The time of this SummaryClientClientItem.
:type: int
"""
self._time = time
@property
def time_avg(self):
"""
Gets the time_avg of this SummaryClientClientItem.
The average elapsed time (in microseconds) taken to complete an operation.
:return: The time_avg of this SummaryClientClientItem.
:rtype: float
"""
return self._time_avg
@time_avg.setter
def time_avg(self, time_avg):
"""
Sets the time_avg of this SummaryClientClientItem.
The average elapsed time (in microseconds) taken to complete an operation.
:param time_avg: The time_avg of this SummaryClientClientItem.
:type: float
"""
self._time_avg = time_avg
@property
def time_max(self):
"""
Gets the time_max of this SummaryClientClientItem.
The maximum elapsed time (in microseconds) taken to complete an operation.
:return: The time_max of this SummaryClientClientItem.
:rtype: float
"""
return self._time_max
@time_max.setter
def time_max(self, time_max):
"""
Sets the time_max of this SummaryClientClientItem.
The maximum elapsed time (in microseconds) taken to complete an operation.
:param time_max: The time_max of this SummaryClientClientItem.
:type: float
"""
self._time_max = time_max
@property
def time_min(self):
"""
Gets the time_min of this SummaryClientClientItem.
The minimum elapsed time (in microseconds) taken to complete an operation.
:return: The time_min of this SummaryClientClientItem.
:rtype: float
"""
return self._time_min
@time_min.setter
def time_min(self, time_min):
"""
Sets the time_min of this SummaryClientClientItem.
The minimum elapsed time (in microseconds) taken to complete an operation.
:param time_min: The time_min of this SummaryClientClientItem.
:type: float
"""
self._time_min = time_min
@property
def user(self):
"""
Gets the user of this SummaryClientClientItem.
User issuing the operation.
:return: The user of this SummaryClientClientItem.
:rtype: GroupsGroupMember
"""
return self._user
@user.setter
def user(self, user):
"""
Sets the user of this SummaryClientClientItem.
User issuing the operation.
:param user: The user of this SummaryClientClientItem.
:type: GroupsGroupMember
"""
self._user = user
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
import pytest
from time import sleep
from pages.signuppage import SignUpPage
from webdriver_manager.chrome import ChromeDriverManager
from info.info import TestData
from selenium import webdriver
class TestSignUPage(object):
@pytest.fixture()
def setup(self):
self.driver = webdriver.Chrome(ChromeDriverManager().install())
self.signupPage = SignUpPage(self.driver)
self.signupPage.driver.maximize_window()
yield
self.signupPage.driver.quit()
"""Age must be 13 or older"""
"""Password must have 12 chars (letters) at least"""
"""No field can be blank, all required"""
@pytest.mark.skip
def test_register1(self, setup):
print("\nAll info correct")
self.signupPage.register("Abdelrahman", "Tarek", "21", "abdelrahman-tarek@outlook.com", "qwertyuiopas123")
sleep(30)
self.signupPage.click_signup_button()
sleep(10)
assert self.signupPage.page_url() == TestData.CHECK_EMAIL_URL
@pytest.mark.skip
def test_register2(self, setup):
print("\nAge below 13")
self.signupPage.register("George", "Joseph", "12", "george.joseph2896@gmail.com", "georgejoseph12345")
sleep(30)
self.signupPage.click_signup_button()
sleep(10)
assert self.signupPage.page_url() == TestData.SIGNUP_URL
@pytest.mark.skip
def test_register3(self, setup):
print("\nAge is 13 exactly")
self.signupPage.register("George", "Joseph", "13", "george.joseph2896@gmail.com", "georgejoseph12345")
sleep(50)
self.signupPage.click_signup_button()
sleep(10)
assert self.signupPage.page_url() == TestData.CHECK_EMAIL_URL
@pytest.mark.skip
def test_register4(self, setup):
print("\nAlredy used email")
self.signupPage.register("George", "Joseph", "20", "george.joseph2896@gmail.com", "georgejoseph12345")
sleep(30)
self.signupPage.click_signup_button()
sleep(10)
assert self.signupPage.page_url() == TestData.SIGNUP_URL
@pytest.mark.skip
def test_register5(self, setup):
print("\nPassword less than 12 chars")
self.signupPage.register("George", "Joseph", "12", "george_eight@hotmail.com", "george12")
sleep(30)
self.signupPage.click_signup_button()
sleep(10)
assert self.signupPage.page_url() == TestData.SIGNUP_URL
@pytest.mark.skip
def test_register6(self, setup):
print("\nBlank fields")
self.signupPage.register("", "", "", "", "")
sleep(30)
self.signupPage.click_signup_button()
sleep(10)
assert self.signupPage.page_url() == TestData.SIGNUP_URL
def test_terms_link(self, setup):
if self.signupPage.element_clickable(self.signupPage.TERMS_LINK, 2):
self.signupPage.click_terms_link()
self.driver.switch_to.window(self.driver.window_handles[1])
sleep(5)
assert self.signupPage.page_url() == TestData.TERMS_URL
print("\nOpens Terms of services page in a new tab")
def test_privacy_link(self, setup):
if self.signupPage.element_clickable(self.signupPage.PRIVACY_LINK, 2):
self.signupPage.click_privacy_link()
self.driver.switch_to.window(self.driver.window_handles[1])
sleep(5)
assert self.signupPage.page_url() == TestData.PRIVACY_URL
print("\nOpens Privacy Policy page in a new tab")
def test_help_link(self, setup):
if self.signupPage.element_clickable(self.signupPage.HELP_LINK, 2):
self.signupPage.click_help_link()
self.driver.switch_to.window(self.driver.window_handles[1])
sleep(5)
assert self.signupPage.page_url() == TestData.HELP_URL
print("\nOpens Help page in a new tab")
def test_login_link(self, setup):
if self.signupPage.element_clickable(self.signupPage.LOGIN_LINK, 2):
self.signupPage.click_login()
sleep(5)
assert self.signupPage.page_url() in TestData.LOGIN_URL
print("\nOpens sign in page")
|
# -*- coding:utf-8 -*-
name_dic = {u'刘增艳': ['fueiru_aki', 'SNH', 'XII', 5], u'陈音': ['chole_yin1205', 'SNH', 'XII', 5],
u'孙珊': ['superss0211', 'BEJ', 'B', 6], u'洪珮雲': ['realf_airy', 'SNH', 'XII', 5],
u'费沁源': ['nemo_fqy', 'SNH', 'XII', 5], u'林忆宁': ['lyn_erika', 'SNH', 'X', 6],
u'万丽娜': ['wannanana27', 'SNH', 'NII', 2], u'宫脇咲良': ['39saku_chan', 'HKT', '', ''],
u'易嘉爱': ['oneaddtwo', 'SNH', 'NII', 2], u'邱欣怡': ['00_wanwan', 'SNH', 'SII', 1],
u'宋昕冉': ['camilla.sxr', 'SNH', 'X', 4], u'鞠婧祎': ['kikuchanj', 'SNH', 'NII', 2],
u'杨惠婷': ['saltgrapefruit', 'SNH', 'HII', 3], u'黄婷婷': ['kotetehtt', 'SNH', 'NII', 2],
u'赵嘉敏': ['savokiiiii', 'SNH', 'SII', 1], u'张语格': ['zhangyugedeshengrishi0511', 'SNH', 'SII', 1],
u'向芸': ['sharenbabyoo', 'GNZ', 'G', 7], u'刘嘉怡': ['mikarin7770719', 'GNZ', 'Z', 7],
u'冯薪朵': ['nanashi_ike', 'SNH', 'NII', 2], u'莫寒': ['momo_0v0', 'SNH', 'SII', 1],
u'许杨玉琢': ['eliwa925', 'SNH', 'HII', 3], u'龚诗淇': ['17_pace', 'SNH', 'NII', 2],
u'林楠': ['ssii_lll', 'SNH', 'HII', 3], u'袁雨桢': ['junni_0110', 'SNH', 'SII', 2],
u'曾艾佳': ['liontsang_iris', 'GNZ', 'G', 5], u'冯晓菲': ['wwwwwhhhhhhhhh', 'SNH', 'X', 4],
u'王晓佳': ['skygrassaaa', 'SNH', 'X', 4], u'李钊': ['minegishimomoko', 'SNH', 'X', 4],
u'汪束': ['flfeilan', 'SNH', 'X', 4], u'杨冰怡': ['suiyyybysui', 'SNH', 'X', 4],
u'冯思佳': ['kamisamaforever16', 'BEJ', 'E', 6], u'李清扬': ['kumamaovo', 'SNH', 'X', 4],
u'张嘉予': ['muuuu_cheryl', 'SNH', 'X', 6], u'袁航': ['yoyh_rich', 'SNH', 'HII', 5],
u'刘胜男': ['milk_seagull', 'BEJ', 'E', 6], u'顼凘炀': ['xiaoxingxing322', 'BEJ', 'E', 6],
u'张怡': ['yokolizi', 'SNH', 'XII', 5], u'马玉灵': ['mylllllll', 'BEJ', 'E', 6],
u'罗雪丽': ['sherry_23333', 'BEJ', 'E', 6], u'李想': [r'llixxiang_2.555', 'BEJ', 'E', 6],
u'李诗彦': ['lsysy_sy', 'BEJ', 'E', 6], u'陈韫凌': ['cyllling_1213', 'SNH', 'XII', 5],
u'李晶': ['xykxdlj', 'SNH', 'X', 4], u'张昕': ['kimberleyyxi', 'SNH', 'HII', 3],
u'陈琳': ['lynn_chenlinn', 'SNH', 'X', 4], u'赵粤': ['akira1995', 'SNH', 'NII', 2],
u'李艺彤': ['whitehairpin', 'SNH', 'NII', 2], u'李宇琪': ['yuqi_mao', 'SNH', 'SII', 1],
u'许佳琪': ['hellokiki77', 'SNH', 'SII', 1], u'陆婷': ['kxxlisalisa', 'SNH', 'NII', 2],
u'孙芮': ['ssssssssssr_', 'SNH', 'SII', 2], u'戴萌': ['diamooonddd', 'SNH', 'SII', 1],
u'何晓玉': ['h.x.y_1031', 'SNH', 'NII', 2], u'孔肖吟': ['kgxxxxxxy', 'SNH', 'SII', 1],
u'钱蓓婷': ['_mmmmmoney', 'SNH', 'SII', 1], u'董艳芸': ['cloud_yuki', 'SNH', 'NII', 2],
u'陈佳莹': ['adding93', 'SNH', 'NII', 2], u'陈问言': ['yannis_cwy', 'SNH', 'NII', 2],
u'陈观慧': ['abccefggg', 'SNH', 'SII', 1], u'温晶婕': ['skyla_charlotte', 'SNH', 'SII', 2],
u'罗兰': ['compusrola', 'SNH', 'NII', 2], u'徐晗': ['kamexuuuh_', 'SNH', 'HII', 3],
u'王璐': ['lucy_96520', 'SNH', 'HII', 3], u'刘佩鑫': ['unaka_', 'SNH', 'HII', 3],
u'袁丹妮': ['_danni_koala', 'SNH', 'HII', 3], u'徐伊人': ['xuxuxuyiryiryir', 'SNH', 'HII', 3],
u'刘炅然': ['monster09.02', 'SNH', 'HII', 3], u'孙歆文': ['news_sxw', 'SNH', 'X', 4],
u'张丹三': ['tansan_3', 'SNH', 'X', 4],
}
BASE_URL = 'https://www.instagram.com/'
LOGIN_URL = BASE_URL + 'accounts/login/ajax/'
LOGOUT_URL = BASE_URL + 'accounts/logout/'
MEDIA_URL = BASE_URL + '{0}/media'
USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) " \
"Chrome/56.0.2924.87 Safari/537.36"
|
"""
Uzrakstiet programmu Python, lai pārbaudītu,
vai vairākiem ievadītajiem mainīgajiem ir vienāda vērtība.
"""
"""
a = 30
b = 40
c = 50
# method 1
if a == 10 or b == 10 or c == 10:
print("True")
else:
print("False")
# method 2
if 10 in (a, b, c):
print("True")
else:
print("False")
# method 3
if 10 in {a, b, c}:
print("True")
else:
print("False")
"""
"""Kļūdu labojums:"""
a=float(input("Ievadi pirmo skaitli: "))
b=float(input("Ievadi otro skaitli: "))
c=float(input("Ievadi trešo skaitli: "))
if a==b==c:
print("Skaitļu vērtība ir vienāda!")
else:
print("Skaitļu bērtība nav vienāda!")
"""Pamainīju formulu"""
|
t = (int(input('Digite um número: ')),
int(input('Digite um número: ')),
int(input('Digite um número: ')),
int(input('Digite um número: ')))
print(f'O número 9 aparecu {t.count(9)} vezes.')
if(3 in t):
print(f'O primeiro número 3 está na posição {t.index(3) + 1}')
else:
print('Não possui número 3.')
print('os números pares foram: ', end='')
for c in t:
if(c%2 == 0):
print(c, end=' ')
|
def clean(data):
"""
removes all rows with empty data in the Results column and all useless
columns
fixes some of the time formatting for weird times
"""
data['Result'] = data['Result'].astype(str)
data = data[data['Result'] != 'None']
data = data.drop(['Unnamed: 8'], axis=1)
data['Result'] = data['Result'].str.replace('h', ':')
data['Result'] = data['Result'].str.replace('est', '')
data['Result'] = data['Result'].str.replace('-', ':')
data['Result'] = data['Result'].str.replace('P.', '')
data['Result'] = data['Result'].str.strip()
return data
|
# -*- coding: utf-8 -*-
'''
Created on Sep 6, 2012
@author: YuqiChou
'''
from db.page import DEFAULT_PAGE_SIZE
def global_list_per_page(context):
return {'GLOBAL_LIST_PER_PAGE': DEFAULT_PAGE_SIZE}
|
from bottle import run, default_app, template, request, response
from bs4 import BeautifulSoup
import requests
import urllib.parse
import json
APP = default_app()
PssWd = {'acpwd-pass': 'anime1.me'}
def drive(d):
BaseUrl = 'https://drive.google.com/uc?export=download&id='
url = BaseUrl + d
try:
r = requests.post(url).text
j = json.loads(r.split('\n').pop())
fileName = j['fileName']
downloadUrl = j['downloadUrl']
except:
return ''
return (fileName, downloadUrl)
def anone(u):
url = 'https://anime1.me/' + u
try:
int(u)
r = requests.post(url, data=PssWd).text
soup = BeautifulSoup(r, 'lxml')
iframes = soup.find_all('iframe')
for i in iframes:
url = i['src']
if 'https://drive.google.com/file/d/' in url:
b = url.find('/d/') + 3
e = url.find('/', b)
return drive(url[b: e])
if 'https://p.anime1.me/?' in url:
r = requests.get(url).text
# if 'https://youtube.googleapis.com/embed/?' in r:
# # b = r.find('docid=') + 6
# # e = min(r.find('&', b), r.find('"', b))
# # return drive(r[b: e])
b = r.find('|0B') + 1
e = r.find('|', b)
return drive(r[b: e])
except:
return ''
return ''
Method = {
'drive_id': drive,
'anime1_url': anone,
}
@APP.route('/')
def index():
return template('index', r='')
@APP.route('/', method='post')
def embed():
m = request.forms.get('method')
c = request.forms.get('context')
r = Method[m](c)
return template('index', r=r)
@APP.route('/e.m3u8')
def redirect():
r = urllib.parse.unquote(request.query.u)
response.status = 302
response.set_header('Location', r)
return
if __name__ == '__main__':
run(application=APP)
|
# coding: utf-8
import functools
import os
import json
import jinja2
import bottle
class BaseApp(bottle.Bottle):
__jinja2_env = None
_config = None
catchall = False
DEFAULT_CONFIG = {}
def __init__(self, config=None):
self.routes = []
self.router = bottle.Router()
self.resources = bottle.ResourceManager()
self.error_handler = {}
self.plugins = []
self.config = config
@property
def _jinja2_env(self):
if self.__jinja2_env is None:
self.__jinja2_env = jinja2.Environment(
loader=jinja2.PackageLoader("lglass.web", "templates"))
self.__jinja2_env.filters["obj_urlize"] = obj_urlize
self.__jinja2_env.filters["obj_deurlize"] = obj_deurlize
return self.__jinja2_env
@_jinja2_env.setter
def _jinja2_env(self, new_value):
self.__jinja2_env = new_value
@property
def config(self):
if self._config is None:
self.config = self.DEFAULT_CONFIG
return self._config
@config.setter
def config(self, new_value):
if isinstance(new_value, str):
self._config = json.loads(new_value)
elif hasattr(new_value, "read"):
self._config = json.load(new_value)
else:
self._config = new_value
def render_template(self, tpl, **kwargs):
return self._jinja2_env.get_template(tpl).render(kwargs)
@property
def request(self):
return bottle.request
def obj_urlize(str):
return str.replace("/", "_")
def obj_deurlize(str):
return str.replace("_", "/")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.