text stringlengths 8 6.05M |
|---|
Python 3.9.0 (tags/v3.9.0:9cf6752, Oct 5 2020, 15:34:40) [MSC v.1927 64 bit (AMD64)] on win32
Type "help", "copyright", "credits" or "license()" for more information.
>>> import turtle
>>> t=turtle.Turtle()
>>> t.pensize(5)
>>> t.shape("fish")
Traceback (most recent call last):
File "<pyshell#3>", line 1, in <module>
t.shape("fish")
File "C:\Users\ACER\AppData\Local\Programs\Python\Python39\lib\turtle.py", line 2777, in shape
raise TurtleGraphicsError("There is no shape named %s" % name)
turtle.TurtleGraphicsError: There is no shape named fish
>>> t.shape("pen")
Traceback (most recent call last):
File "<pyshell#4>", line 1, in <module>
t.shape("pen")
File "C:\Users\ACER\AppData\Local\Programs\Python\Python39\lib\turtle.py", line 2777, in shape
raise TurtleGraphicsError("There is no shape named %s" % name)
turtle.TurtleGraphicsError: There is no shape named pen
>>> t.shape("turtle")
>>> t.color("red")
>>> t.fillcolor("grey")
>>> t.begin_fill()
>>> t.forward(150)
>>> t.left(90)
>>> t.forward(150)
>>> t.forward(150)
>>> t.left(90)
>>> t.forward(150)
>>> t.left(90)
>>> t.forward(300)
>>> t.end_fill()
>>> |
import os
def helpPath():
'''
Returns the directory of the help files
'''
instDir = os.path.abspath(__file__) # path to this file
instDir = os.path.dirname(instDir) # take off file name (path to help)
return(instDir)
|
'''
The following JSON template shows what is sent as the payload:
{
"serialNumber": "GXXXXXXXXXXXXXXXXX",
"batteryVoltage": "xxmV",
"clickType": "SINGLE" | "DOUBLE" | "LONG"
}
A "LONG" clickType is sent if the first press lasts longer than 1.5 seconds.
"SINGLE" and "DOUBLE" clickType payloads are sent for short clicks.
For more documentation, follow the link below.
http://docs.aws.amazon.com/iot/latest/developerguide/iot-lambda-rule.html
'''
from __future__ import print_function
import boto3
import json
import logging
import os
import urllib2
logger = logging.getLogger()
logger.setLevel(logging.INFO)
sns = boto3.client('sns')
ses = boto3.client('ses')
phone_number = os.environ['phone_number']
email_address = os.environ['email_address']
# Check whether email is verified. Only verified emails are allowed to send emails to or from.
def check_email(email):
result = ses.get_identity_verification_attributes(Identities=[email])
attr = result['VerificationAttributes']
if (email not in attr or attr[email]['VerificationStatus'] != 'Success'):
logging.info('Verification email sent. Please verify it.')
ses.verify_email_identity(EmailAddress=email)
return False
return True
## Main treatment
def lambda_handler(event, context):
import subprocess
logger.info('Received event: ' + json.dumps(event))
subject = 'Appui sur le bouton de la maison xxxxxxxxx. Mode %s.'% (event['clickType'])
body_text = 'Here is the full event: %s' % json.dumps(event)
## In every cases, send a mail
if not check_email(email_address):
logging.error('Email is not verified')
else:
ses.send_email(Source=email_address,
Destination={'ToAddresses': [email_address]},
Message={'Subject': {'Data': subject}, 'Body': {'Text': {'Data': body_text}}})
logger.info('Email has been sent')
## If clickType= DOUBLE => Escalate as SMS
## clickType = event['clickType']
## logger.info('clickType: ' + clickType)
#sns.publish(PhoneNumber=phone_number, Message=subject)
#logger.info('SMS has been sent to ' + phone_number)
#Send to user1
urllib2.urlopen('https://smsapi.free-mobile.fr/sendmsg?user=xxxxxxxxx&pass=xxxxxxxxx&msg=' + subject).read()
# Send to user2
urllib2.urlopen('https://smsapi.free-mobile.fr/sendmsg?user=xxxxxxxxx&pass=xxxxxxxxx&msg=' + subject).read()
logger.info('SMS has been sent')
|
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 19 00:34:09 2020
@author: anwar
"""
def get_stats(class_list):
new_stats = []
for elt in class_list:
new_stats.append([elt[0], elt[1], avg(elf[1])])
return new_stats
def avg(grades):
return sum(grades)/len(grades) |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import urllib2
from bs4 import BeautifulSoup
import re
f = open("/Users/zhoufengting/Desktop/possession1.txt","r")
lines = f.readlines()
for line in lines:
url = line
request = urllib2.urlopen(url)
response = request.read()
soup = BeautifulSoup(response,"html.parser")
time = soup.find_all
|
"""
MFreq w/ Atom Counter & f(Col_)
"""
import numpy as np
import scipy.stats as sts
import matplotlib.pyplot as plt
import scipy.constants as sc
import scipy.special as scp
import timeit
start = timeit.default_timer()
ymot = 0.004 # Radius of Col Atoms
G = 38.11e6 # See ln 96
Xi_D = 8.6
Xi_d = 0.15
E0 = 280
Da = Xi_D*G # Original detuning
d = Xi_d*G # Increment of Detuning
print(Da,d)
aa = 0.15 # Coil Radius
s = 0.11 # Coil Separation
Curr = 0.6805 # Current (16G/cm @ 0.6805)
z0 = 0.24 # Position of MOT centre
h = 0.00002 #Step Size
Natoms = 150
Nsteps = 410
Col_zPos = 0
Col_Gap = 0.002
'''If ^ > ymot then simulation is unphysical'''
J = 10e20
Q = J*(Col_Gap/2)**2
print('E0, Da, d is {}, {}, {}'.format(E0,Xi_D,Xi_d))
Db,Dc,Dd,De,Df,Dg = Da+d, Da+2*d, Da+3*d, Da+4*d, Da+5*d, Da+6*d
xx = np.linspace(0.000001, 1, 100)
def y_vgen(n):
''' Linear Velocity distribution in Y / spread of particles '''
lin = np.linspace(-vy,vy,n)
return lin
def ygen(n):
''' Linear Coordinate Distrb. '''
lin = np.linspace(-Col_Gap/2,Col_Gap/2,n)
return lin
def Jgen(T,n,M):
''' Maxwell-Boltzmann Distrb. '''
T = 273+20
MB=sts.maxwell
kb=1.38e-23
a=abs(np.sqrt((kb*T)/(M)))
vt=MB.rvs(loc=0, scale=a, size=n, random_state=None)
return vt
Vn = 70
Vx = 260
def vgen(n):
''' Linear Velocity Distrb. '''
lin = np.linspace(Vn,Vx,n)
return lin
def zgen(n):
''' Linear Coordinate Distrb. '''
lin = np.linspace(0,0,n)
return lin
def yrand(n):
ran = np.random.random(n)
return (ran-0.5)*Col_Gap
def MBrand(n):
x = np.random.rand(n)
kb = sc.Boltzmann # Boltzmann Constant
u = sc.proton_mass # Proton Mass
M = 87*u # Mass of 87Rb
a=abs(np.sqrt((kb*300)/(M)))
X = sts.maxwell.isf(x, scale=a)
return X
#v_ = 0.5*(max(MBrand(Natoms))+min(MBrand(Natoms))) #Mean Velocity
v_ = np.mean(MBrand(Natoms))
vy = ((ymot-Col_Gap/2)*v_)/(z0**2+(ymot-Col_Gap/2)**2)**0.5
def Vyrand(n):
ran = np.random.random(n)
return (ran-0.5)*vy*2
#q = Q*(1-np.cos(np.arctan((vy/v_))))
#print('q , Q =',q,Q)
'''
def MagLeak(z, z0, Curr):
#Mag Field from AntiHlmHltz coils (of center z0 [ >0 ]) that leaks into our slower
x = s/2
ZZ = -z+z0
zz = -ZZ
A,B = ZZ/aa, x/aa
Q = B**2+(1+A)**2
k = (4*A/Q)**0.5
B0 = Curr*sc.mu_0/(2*aa)
K = scp.ellipk(k**2)
E = scp.ellipe(k**2)
Br = 2*B0*(x/ZZ)/(np.pi*Q**0.5)*(E*(1+A**2+B**2)/(Q-4*A)-K)
Bro = np.nan_to_num(Br)
#
A_ = zz/aa
Q_ = B**2+(1+A_)**2
k_ = (4*A_/Q_)**0.5
K_ = scp.ellipk(k_**2)
E_ = scp.ellipe(k_**2)
Br_ = -2*B0*(x/zz)/(np.pi*Q_**0.5)*(E_*(1+A_**2+B**2)/(Q_-4*A_)-K_)
Br_o = np.nan_to_num(Br_)
return Br_o + Bro
'''
def RK4step(ti,zi,vi,h,dv,dz):
k11=dz(ti,zi,vi)
k21=dv(ti,zi,vi)
k12=dz(ti+h/2,zi +(h/2)*k11,vi +(h/2)*k21)
k22=dv(ti+h/2,zi +(h/2)*k11,vi +(h/2)*k21)
k13=dz(ti+h/2,zi +(h/2)*k12,vi +(h/2)*k22)
k23=dv(ti+h/2,zi +(h/2)*k12,vi +(h/2)*k22)
k14=dz(ti+h,zi +(h)*k13,vi +(h)*k23)
k24=dv(ti+h,zi +(h)*k13,vi +(h)*k23)
z1=zi+(h/6.0)*(k11+2.0*k12+2.0*k13+k14)
v1=vi+(h/6.0)*(k21+2.0*k22+2.0*k23+k24)
zi = z1
vi = v1
return zi,vi
""" Physical & Atomic Constants """
kb = sc.Boltzmann # Boltzmann Constant
mu0 = sc.mu_0 # Vacc Permtivity
muB = 9.2740099*10**-24 # Borh Magnetron
hbar = sc.hbar # hbar
c = sc.c # speed of light
pi = np.pi # pi
u = sc.proton_mass # Proton Mass
M = 87*u # Mass of 87Rb
wab = 2*pi*384.23e12 # Freq of transation
#G = 38.11e6 # Gamma / Rate of SpE
dip = 3.485e-29 # dipole moment
q =1
''' Variable Dance '''
Rabi = dip*E0/hbar # Rabi Frequency
IoIs = 2*Rabi**2/G**2
print('IoIs',IoIs) # Intensity / Saturation Intensity
IrE = c*8.85e-12/2*E0**2/10000 # Intensity (This /10000 makes it W/cm^2)
w = wab - Dd # Average Freq of colliding photon
Lambda = 2*pi*c/w # Avg Wavelength
k = 2*pi/Lambda # Average wavenumber of a momentum transfering photon
print(k,'k')
print(Rabi)
print((wab-Da)**2/G**2)
def dv4y(t,z,v):
return 0
def dv(t,z,v):
" The 'complete' Force Equation for a 7 freq 1 dimensional slower inc. magnetic field "
w_a = wab - Da
w_b = wab - Db
w_c = wab - Dc
w_d = wab - Dd
w_e = wab - De
w_f = wab - Df
w_g = wab - Dg
Oa = w_a/(2*pi*c)#-muB*MagLeak(z, z0, Curr)/hbar
Ob = w_b/(2*pi*c)#-muB*MagLeak(z, z0, Curr)/hbar
Oc = w_c/(2*pi*c)#-muB*MagLeak(z, z0, Curr)/hbar
Od = w_d/(2*pi*c)#-muB*MagLeak(z, z0, Curr)/hbar
Oe = w_e/(2*pi*c)#-muB*MagLeak(z, z0, Curr)/hbar
Of = w_f/(2*pi*c)#-muB*MagLeak(z, z0, Curr)/hbar
Og = w_g/(2*pi*c)#-muB*MagLeak(z, z0, Curr)/hbar
c1a = 1+IoIs+4*Da**2/G**2
c2a = Oa*8*Da/G**2
c1b = 1+IoIs+4*Db**2/G**2
c2b = Ob*8*Db/G**2
c1c = 1+IoIs+4*Dc**2/G**2
c2c = Oc*8*Dc/G**2
c1d = 1+IoIs+4*Dd**2/G**2
c2d = Od*8*Dd/G**2
c1e = 1+IoIs+4*De**2/G**2
c2e = Oe*8*De/G**2
c1f = 1+IoIs+4*Df**2/G**2
c2f = Of*8*Df/G**2
c1g = 1+IoIs+4*Dg**2/G**2
c2g = Og*8*Dg/G**2
c3a = 4*Oa**2/G**2
# print(c1g,c2g,c3a)
c3b = 4*Ob**2/G**2
c3c = 4*Oc**2/G**2
c3d = 4*Od**2/G**2
c3e = 4*Oe**2/G**2
c3f = 4*Of**2/G**2
c3g = 4*Og**2/G**2
rhoaa = -(IoIs/2)*(1/(c1a-c2a*v+c3a*v**2)+1/(c1b-c2b*v+c3b*v**2)+1/(c1c-c2c*v+c3c*v**2)+1/(c1d-c2d*v+c3d*v**2)+1/(c1e-c2e*v+c3e*v**2)+1/(c1f-c2f*v+c3f*v**2)+1/(c1g-c2g*v+c3g*v**2))
return rhoaa*hbar*k*G/M
print('F', dv(2,2,100))
print('F', dv(2,2,300))
print('F', dv(2,2,500))
def dz(t,z,v):
return v
plt.close('all')
fig = plt.figure()
ax1 = plt.subplot2grid((2,1), (0,0))#, rowspan=2)
ax2 = plt.subplot2grid((2,1), (1,0),sharex=ax1)
""" s"""
zlin=zgen(Natoms)
yran = yrand(Natoms)
Vyran = Vyrand(Natoms)
vran = MBrand(Natoms)
print('begining Loop 1')
zs,vs,ts=[],[],[]
ys,yvs=[],[]
"""this loop goes through all the atoms we've got and applies the force dv to them for a number of steps, Nsteps"""
for j in range(Natoms):
vi = vran[j]
zi = zlin[j]
yvi= Vyran[j]
yi = yran[j]
for i in range(Nsteps):
ti=h*i
zs.append(zi)
vs.append(vi)
ts.append(ti)
ys.append(yi)
yvs.append(yvi)
z1,v1 = RK4step(ti,zi,vi,h,dv,dz)
y1,yv1 = RK4step(ti,yi,yvi,h,dv4y,dz)
yvi = yv1
yi = y1
zi = z1
vi = v1
print('done Loop 1')
Y = np.reshape(ys, (Natoms,Nsteps))
V = np.reshape(vs, (Natoms,Nsteps))
Z = np.reshape(zs, (Natoms,Nsteps))
tt = np.array(ts)
thet = np.split(tt, Natoms)[1]
#Top, Thicc = 0.002, 0.003
#ax1.bar(Col_zPos, Top, Thicc, bottom= Col_Gap/2, color='k')
#ax1.bar(Col_zPos,-Top, Thicc, bottom=-Col_Gap/2, color='k')
#print(Y, Y.shape)
#ax2.subplots_adjust(hspace=0)
z_ , z__ = z0 - ymot, z0 + ymot
y_ = 0.01
capV, capv = 50,15
n_ = []
print('begin Loop 2')
for j in range(Natoms):
for i in range(Nsteps):
if (z_ < Z[j][i] < z__ and abs(Y[j][i]) < ymot and abs(V[j][i]) < capv):
#Y[j],Z[j] = np.linspace(0,0,Nsteps), np.linspace(0,0.0001,Nsteps)
nnn = 2
n_ = np.append(n_, nnn)
else:
nnn = 0
n_ = np.append(n_, nnn)
print('done Loop 2')
N = np.reshape(n_, (Natoms, Nsteps))
#print(n_)
#rint(N)
print('begin loop 3')
N0 = 0
for j in range(Natoms):
NN = False
for i in range(Nsteps):
if N[j][i] == 2:
NN = True
if NN == True:
N0 += 1
print('Number Captured =', N0)
stop = timeit.default_timer()
print('Run Time =',round(stop - start, 3),'sec')
for i in range(Natoms):
'A plot for each of the Natoms particles'
th = 0.5
col = (0.1, float(i/(Natoms+1)+0.0001), 1-float(i/(Natoms+5)+0.0001))
ax1.plot(Z[i],Y[i],linewidth=th, color = col)
ax2.plot(Z[i],V[i],linewidth=th, color = col)
ax1.axhspan(-ymot/2,ymot/2, alpha=0.05, color='green')
ax1.axvspan(z0-0.01,z0+0.01, alpha=0.05, color='purple')
ax1.axvline(x = z0 - aa, color = 'k', linestyle='dotted')
ax1.axvline(x = z0, color = 'k', linestyle='dashed')
ax1.axvline(x = z0-0.01, color = 'r',linewidth=3)
ax1.axvline(x = z0+0.01, color = 'r',linewidth=3)
ax1.axhline(y = ymot/2, color = 'r',linewidth=3)
ax1.axhline(y = -ymot/2, color = 'r',linewidth=3)
ax1.set_ylim(top = 2*ymot, bottom = -2*ymot)
# ax1.axvline(x = wab-Da, color = 'r')
# ax1.axvline(x = wab-Db, color = 'r')
# ax1.axvline(x = wab-Dc, color = 'r')
# ax1.axvline(x = wab-Dd, color = 'r')
# ax1.axvline(x = wab-De, color = 'r')
# ax1.axvline(x = wab-Df, color = 'r')
# ax1.axvline(x = wab-Dg, color = 'r')
# ax1.axvline(x = wab, color = 'b')
ax2.axvspan(z0-0.01,z0+0.01, alpha=0.05, color='purple')
ax2.axhspan(-capV,capV, alpha=0.05, color='b')
ax2.axhspan(-capv,capv, alpha=0.05, color='red')
ax2.axvline(x = z0 - aa, color = 'k', linestyle='dotted')
ax2.axvline(x = z0, color = 'k', linestyle='dashed')
ax2.axvline(x = z0-0.01, color = 'r',linewidth=3)
ax2.axvline(x = z0+0.01, color = 'r',linewidth=3)
ax2.axhline(y = capv, color = 'r',linewidth=3)
ax2.axhline(y = -capv, color = 'r',linewidth=3)
ax2.set_xlim(left=0, right=z0+1*aa)
ax2.set_ylim(top=500, bottom=-20)
fig.subplots_adjust(hspace=0) # Makes the plots that share the
# # same x axis on top of each other
ax1.set_ylabel("$y$ $coordinate$ / $m$", size = 19)
ax2.set_ylabel("$z$ $coordinate$ / $ms$`'", size = 19)
#ax3.set_title('Multi-Frequency Slowing Simulation: $\it{7}$ $\it{Frequencies}$, $\it{MOT}$ $\it{Magnetic}$ $\it{Field}$', size=17)
#ax1.set_title('Multi-Frequency: 7 Frequencies & Capture Number ', size=17)
ax2.set_xlabel('$z$ $coordinate$ / $m$ (0 = atom source)', size = 19)
#ax1.set_yticks(np.arange(-0.002, 0.002, step=0.0005))
q_pc = q*N0/Natoms
#Q_pc = q_pc/(1-np.cos(np.arctan(vy/v_)))
print('Total Flux % =', q_pc/Q * 100)
from datetime import date
today = date.today()
d4 = today.strftime("%d-%b-%Y")
#print("d4 =", d4)
#ax3.legend(title=' {}nIntensity = {}W/cm2nDetuning = {} w/ Increment {}MHznE0 = {} no. atoms = {} nLength of Tube = {}cmnMag Field Gradient = {}G/cm'.format(d4,round(IrE, 3),Da/1000000,d/1000000,E0,nj, round((z0-aa)*100,3),round(Grad*1000000,2), loc=2, prop={'size': 18}))
textstr = ' {}\nIntensity = {}mW/cm2\nDetuning = {}\n w/ Increment {}MHz\nE0 = {} no. atoms = {} \nLength of Tube = {}cm\nMag Field Gradient = {}G/cm\nLoading Rate* = {}'.format(d4,round(IrE*1000, 3),Da/1000000,d/1000000,E0,Natoms, round((z0-aa)*100,3),'Na', Q*N0/Natoms)#round(Grad*1000000,2))
bigstring = 'Hole Diameter = {} mm\nS/Atoms Captured = {} %\n Total Flux = {}%%%'
ax2.text(z0+0.05, 100, textstr, fontsize=14)
ax2.text(z0-0.18, 150,bigstring.format(Col_Gap*1000,round((N0/Natoms)*100,3), round(q_pc/Q*1000000,3)),fontweight='bold',fontsize=18)
stop = timeit.default_timer()
print('Velocity Range = [{},{}]'.format(round(min(vran),1),round(max(vran),1)))
print('# Particles = {}'.format(Natoms))
print('Beam Intensity = {}W/cm^2'.format(round(IrE, 3)))
print('Run Time =',round(stop - start, 3),'sec')
print('vy = {}m/s'.format(vy))
print('Flux ')
plt.show()
|
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from textwrap import dedent
import pytest
from pants.backend.helm.resolve import fetch
from pants.backend.helm.resolve.artifacts import HelmArtifact, ResolvedHelmArtifact
from pants.backend.helm.resolve.fetch import FetchedHelmArtifact, FetchHelmArtifactRequest
from pants.backend.helm.target_types import HelmArtifactTarget
from pants.backend.helm.target_types import rules as target_types_rules
from pants.backend.helm.util_rules import tool
from pants.core.util_rules import config_files, external_tool
from pants.engine import process
from pants.engine.addresses import Address
from pants.engine.rules import QueryRule
from pants.testutil.rule_runner import RuleRunner
@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner(
target_types=[HelmArtifactTarget],
rules=[
*config_files.rules(),
*external_tool.rules(),
*fetch.rules(),
*tool.rules(),
*process.rules(),
*target_types_rules(),
QueryRule(ResolvedHelmArtifact, (HelmArtifact,)),
QueryRule(FetchedHelmArtifact, (FetchHelmArtifactRequest,)),
],
)
def test_fetch_single_artifact(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"3rdparty/helm/BUILD": dedent(
"""\
helm_artifact(
name="prometheus-stack",
repository="https://prometheus-community.github.io/helm-charts",
artifact="kube-prometheus-stack",
version="^27.2.0"
)
"""
),
}
)
target = rule_runner.get_target(Address("3rdparty/helm", target_name="prometheus-stack"))
expected_resolved_artifact = rule_runner.request(
ResolvedHelmArtifact, [HelmArtifact.from_target(target)]
)
fetched_artifact = rule_runner.request(
FetchedHelmArtifact,
[
FetchHelmArtifactRequest.from_target(
target, description_of_origin="the test `test_fetch_single_artifact`"
)
],
)
assert "Chart.yaml" in fetched_artifact.snapshot.files
assert fetched_artifact.artifact == expected_resolved_artifact
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import socket
from PyQt4 import QtGui, QtCore
from django_packer import *
from django_frame_main import *
class FormWidget(QtGui.QWidget):
def __init__(self, parent):
super(FormWidget, self).__init__(parent)
self.django_simulator = ''
self.dj_reciver = ''
self.initUI()
def initUI(self):
self.v_layout = QtGui.QVBoxLayout(self)
self.lbl_ip = QtGui.QLabel(self)
self.lbl_ip.move(60, 40)
self.lbl_ip.setText('IP')
self.lbl_port = QtGui.QLabel(self)
self.lbl_port.move(60, 200)
self.lbl_port.setText('PORT')
self.edit_ip = QtGui.QLineEdit(self)
self.edit_ip.setText('10.18.50.66')
self.edit_ip.move(60, 100)
self.edit_port = QtGui.QLineEdit(self)
self.edit_port.setText('9001')
self.edit_port.move(60, 250)
self.btn_connect = QtGui.QPushButton("connect")
self.btn_connect.setCheckable(True)
self.btn_connect.clicked[bool].connect(self.onConnect)
self.h_layout1 = QtGui.QHBoxLayout(self)
self.h_layout1.addWidget(self.lbl_ip)
self.h_layout1.addWidget(self.edit_ip)
self.h_layout1.addWidget(self.lbl_port)
self.h_layout1.addWidget(self.edit_port)
self.h_layout1.addWidget(self.btn_connect)
self.controller_read_lbl_1 = QtGui.QLabel(self)
self.controller_read_lbl_2 = QtGui.QLabel(self)
self.controller_read_lbl_3 = QtGui.QLabel(self)
self.controller_read_lbl_1.setText('Room ID')
self.controller_read_lbl_2.setText('Controller ID')
self.controller_read_lbl_3.setText('State')
self.controller_read_txt_1 = QtGui.QLineEdit(self)
self.controller_read_txt_2 = QtGui.QLineEdit(self)
self.controller_read_txt_3 = QtGui.QLineEdit(self)
self.controller_read_btn_1 = QtGui.QPushButton(self)
self.controller_read_btn_1.setText('Read State')
self.controller_read_btn_1.clicked.connect(self.onReadController)
self.controller_read_formlayout = QtGui.QFormLayout(self)
self.controller_read_formlayout.addRow(self.controller_read_lbl_1, self.controller_read_txt_1)
self.controller_read_formlayout.addRow(self.controller_read_lbl_2, self.controller_read_txt_2)
self.controller_read_formlayout.addRow(self.controller_read_lbl_3, self.controller_read_txt_3)
self.controller_read_formlayout.addRow(self.controller_read_btn_1)
self.controller_update_lbl_1 = QtGui.QLabel(self)
self.controller_update_lbl_2 = QtGui.QLabel(self)
self.controller_update_lbl_3 = QtGui.QLabel(self)
self.controller_update_lbl_1.setText('Room ID')
self.controller_update_lbl_2.setText('Controller ID')
self.controller_update_lbl_3.setText('State')
self.controller_update_txt_1 = QtGui.QLineEdit(self)
self.controller_update_txt_2 = QtGui.QLineEdit(self)
self.controller_update_txt_3 = QtGui.QLineEdit(self)
self.controller_update_btn_1 = QtGui.QPushButton(self)
self.controller_update_btn_1.setText('Update State')
self.controller_update_btn_1.clicked.connect(self.onUpdateController)
self.controller_update_formlayout = QtGui.QFormLayout(self)
self.controller_update_formlayout.addRow(self.controller_update_lbl_1, self.controller_update_txt_1)
self.controller_update_formlayout.addRow(self.controller_update_lbl_2, self.controller_update_txt_2)
self.controller_update_formlayout.addRow(self.controller_update_lbl_3, self.controller_update_txt_3)
self.controller_update_formlayout.addRow(self.controller_update_btn_1)
self.h_layout2 = QtGui.QHBoxLayout(self)
self.h_layout2.addLayout(self.controller_read_formlayout)
self.h_layout2.addLayout(self.controller_update_formlayout)
self.log_control_btn_err = QtGui.QPushButton(self)
self.log_control_btn_cmc = QtGui.QPushButton(self)
self.log_control_btn_wrk = QtGui.QPushButton(self)
self.log_control_btn_dbg = QtGui.QPushButton(self)
self.log_control_btn_err.setCheckable(True)
self.log_control_btn_cmc.setCheckable(True)
self.log_control_btn_wrk.setCheckable(True)
self.log_control_btn_dbg.setCheckable(True)
self.log_control_btn_err.setText('error')
self.log_control_btn_cmc.setText('communication')
self.log_control_btn_wrk.setText('work')
self.log_control_btn_dbg.setText('debug')
self.log_control_btn_err.clicked[bool].connect(self.onUpdateLog)
self.log_control_btn_cmc.clicked[bool].connect(self.onUpdateLog)
self.log_control_btn_dbg.clicked[bool].connect(self.onUpdateLog)
self.log_control_btn_wrk.clicked[bool].connect(self.onUpdateLog)
self.log_control_lbl_err = QtGui.QLabel(self)
self.log_control_lbl_cmc = QtGui.QLabel(self)
self.log_control_lbl_wrk = QtGui.QLabel(self)
self.log_control_lbl_dbg = QtGui.QLabel(self)
self.log_btn_v_layout = QtGui.QVBoxLayout(self)
self.log_btn_v_layout.addWidget(self.log_control_btn_err)
self.log_btn_v_layout.addWidget(self.log_control_btn_cmc)
self.log_btn_v_layout.addWidget(self.log_control_btn_wrk)
self.log_btn_v_layout.addWidget(self.log_control_btn_dbg)
self.log_lbl_v_layout = QtGui.QVBoxLayout(self)
self.log_lbl_v_layout.addWidget(self.log_control_lbl_err)
self.log_lbl_v_layout.addWidget(self.log_control_lbl_cmc)
self.log_lbl_v_layout.addWidget(self.log_control_lbl_wrk)
self.log_lbl_v_layout.addWidget(self.log_control_lbl_dbg)
# self.log_formlayout = QtGui.QFormLayout(self)
# self.log_formlayout.addRow(self.log_control_btn_err, self.log_control_lbl_err)
# self.log_formlayout.addRow(self.log_control_btn_cmc, self.log_control_lbl_cmc)
# self.log_formlayout.addRow(self.log_control_btn_wrk, self.log_control_lbl_wrk)
# self.log_formlayout.addRow(self.log_control_btn_dbg, self.log_control_lbl_dbg)
self.h_layout3 = QtGui.QHBoxLayout(self)
self.h_layout3.addLayout(self.log_btn_v_layout)
self.h_layout3.addLayout(self.log_lbl_v_layout)
self.v_layout.addLayout(self.h_layout1)
self.v_layout.addLayout(self.h_layout2)
self.v_layout.addLayout(self.h_layout3)
# self.v_layout.addLayout(self.log_formlayout)
def onConnect(self, pressed):
if pressed:
try:
self.ip = self.edit_ip.text()
self.port = int(self.edit_port.text())
self.django_simulator = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.django_simulator.connect((self.ip, self.port))
self.btn_connect.setText('Connected')
self.dj_reciver = DjangoFrameMain(self.django_simulator)
self.init_log_btn()
except socket.error, e:
self.btn_connect.setText('Unconnected')
print e
else:
self.django_simulator.shutdown(socket.SHUT_RDWR)
self.django_simulator.close()
self.btn_connect.setText('Closed')
self.django_simulator = ''
self.dj_reciver = ''
# def onChanged(self, text):
# self.ip = self.edit_ip.text()
# self.port = int(self.edit_port.text())
def init_log_btn(self):
main_frame = read_log_state('all')
self.django_simulator.send(main_frame)
json_inst = self.dj_reciver.main_receivor()
if json_inst['code'] == SUC:
if json_inst['data']['communication'] == 'on':
self.log_control_btn_cmc.setChecked(True)
self.log_control_lbl_cmc.setText('ON')
else:
self.log_control_btn_cmc.setChecked(False)
self.log_control_lbl_cmc.setText('OFF')
if json_inst['data']['error'] == 'on':
self.log_control_btn_err.setChecked(True)
self.log_control_lbl_err.setText('ON')
else:
self.log_control_btn_err.setChecked(False)
self.log_control_lbl_err.setText('OFF')
if json_inst['data']['debug'] == 'on':
self.log_control_btn_dbg.setChecked(True)
self.log_control_lbl_dbg.setText('ON')
else:
self.log_control_btn_dbg.setChecked(False)
self.log_control_lbl_dbg.setText('OFF')
if json_inst['data']['work'] == 'on':
self.log_control_btn_wrk.setChecked(True)
self.log_control_lbl_wrk.setText('ON')
else:
self.log_control_btn_wrk.setChecked(False)
self.log_control_lbl_wrk.setText('OFF')
def onReadController(self):
room_id = self.controller_read_txt_1.text()
controller_id = self.controller_read_txt_2.text()
if room_id == '':
self.controller_read_txt_1.setText('room id can not be null')
return 0
if controller_id == '':
self.controller_read_txt_2.setText('controller id can not be null')
return 0
if self.btn_connect.text() == 'Connected':
self.django_simulator.send(read_controller_state(int(room_id), int(controller_id)))
dj_reciver = DjangoFrameMain(self.django_simulator)
json_inst = dj_reciver.main_receivor()
if json_inst['data']['state'] == ON:
self.controller_read_txt_3.setText('ON')
else:
self.controller_read_txt_3.setText('OFF')
else:
self.controller_read_txt_3.setText('Not connected yet')
def onUpdateController(self):
room_id = self.controller_update_txt_1.text()
controller_id = self.controller_update_txt_2.text()
state = self.controller_update_txt_3.text()
if room_id == '':
self.controller_update_txt_1.setText('room id can not be null')
return 0
if controller_id == '':
self.controller_update_txt_2.setText('controller id can not be null')
return 0
if state != 'ON' and state != 'OFF':
self.controller_update_txt_3.setText('state should be [ON, OFF]')
return 0
if self.btn_connect.text() == 'Connected':
state = self.django_simulator.send(update_controller_state(int(room_id), int(controller_id),str(state)))
dj_reciver = DjangoFrameMain(self.django_simulator)
json_inst = dj_reciver.main_receivor()
if json_inst['code'] == 0:
self.controller_update_txt_3.setText('update succeed')
else:
self.controller_update_txt_3.setText('update failed')
else:
self.controller_update_txt_3.setText('Not connected yet')
def onUpdateLog(self, pressed):
sender = self.sender()
if sender is self.log_control_btn_err:
if self.log_control_btn_err.isChecked():
if self.django_simulator == '':
self.log_control_lbl_err.setText('OFF \t Sorry not connected to server!')
self.log_control_btn_err.setChecked(False)
return FAI
main_frame = update_log_state('error', ON)
self.django_simulator.send(main_frame)
json_inst = self.dj_reciver.main_receivor()
if json_inst['code'] == SUC:
self.log_control_lbl_err.setText('ON')
else:
self.log_control_btn_err.setChecked(False)
else:
if self.django_simulator == '':
self.log_control_lbl_err.setText('ON \t Sorry not connected to server!')
self.log_control_btn_err.setChecked(True)
return FAI
main_frame = update_log_state('error', OFF)
self.django_simulator.send(main_frame)
json_inst = self.dj_reciver.main_receivor()
if json_inst['code'] == SUC:
self.log_control_lbl_err.setText('OFF')
else:
self.log_control_btn_err.setChecked(True)
pass
elif sender is self.log_control_btn_cmc:
if self.log_control_btn_cmc.isChecked():
if self.django_simulator == '':
self.log_control_lbl_cmc.setText('OFF \t Sorry not connected to server!')
self.log_control_btn_cmc.setChecked(False)
return FAI
main_frame = update_log_state('communication', ON)
self.django_simulator.send(main_frame)
json_inst = self.dj_reciver.main_receivor()
if json_inst['code'] == SUC:
self.log_control_lbl_cmc.setText('ON')
else:
self.log_control_btn_cmc.setChecked(False)
else:
if self.django_simulator == '':
self.log_control_lbl_cmc.setText('ON \t Sorry not connected to server!')
self.log_control_btn_cmc.setChecked(True)
return FAI
main_frame = update_log_state('communication', OFF)
self.django_simulator.send(main_frame)
json_inst = self.dj_reciver.main_receivor()
if json_inst['code'] == SUC:
self.log_control_lbl_cmc.setText('OFF')
else:
self.log_control_btn_cmc.setChecked(True)
elif sender is self.log_control_btn_wrk:
if self.log_control_btn_wrk.isChecked():
if self.django_simulator == '':
self.log_control_lbl_wrk.setText('OFF \t Sorry not connected to server!')
self.log_control_btn_wrk.setChecked(False)
return FAI
main_frame = update_log_state('work', ON)
self.django_simulator.send(main_frame)
json_inst = self.dj_reciver.main_receivor()
if json_inst['code'] == SUC:
self.log_control_lbl_wrk.setText('ON')
else:
self.log_control_btn_wrk.setChecked(False)
else:
if self.django_simulator == '':
self.log_control_lbl_wrk.setText('ON \t Sorry not connected to server!')
self.log_control_btn_wrk.setChecked(True)
return FAI
main_frame = update_log_state('work', OFF)
self.django_simulator.send(main_frame)
json_inst = self.dj_reciver.main_receivor()
if json_inst['code'] == SUC:
self.log_control_lbl_wrk.setText('OFF')
else:
self.log_control_btn_wrk.setChecked(True)
elif sender is self.log_control_btn_dbg:
if self.log_control_btn_dbg.isChecked():
if self.django_simulator == '':
self.log_control_lbl_dbg.setText('OFF \t Sorry not connected to server!')
self.log_control_btn_dbg.setChecked(False)
return FAI
main_frame = update_log_state('debug', ON)
self.django_simulator.send(main_frame)
json_inst = self.dj_reciver.main_receivor()
if json_inst['code'] == SUC:
self.log_control_lbl_dbg.setText('ON')
else:
self.log_control_btn_dbg.setChecked(False)
else:
if self.django_simulator == '':
self.log_control_lbl_dbg.setText('ON \t Sorry not connected to server!')
self.log_control_btn_dbg.setChecked(True)
return FAI
main_frame = update_log_state('debug', OFF)
self.django_simulator.send(main_frame)
json_inst = self.dj_reciver.main_receivor()
if json_inst['code'] == SUC:
self.log_control_lbl_dbg.setText('OFF')
else:
self.log_control_btn_dbg.setChecked(True) |
from django.shortcuts import render
from django.views.generic import View
import fredboardChords as gs
import random
class ChordsPage(View):
def get(self, request, *args, **kwargs):
add = gs.create_svg('CM7', 'drop2_inv1_strS1', 1)
add = add.create()
svg1 = ''.join(add)
title = 'C M7'
subtitle = 'drop2 - inv1 - strS1'
return render(request, "chords.html", {'svg': svg1, 'title': title, 'subtitle': subtitle})
class ArpegiosPage(View):
def get(self, request, *args, **kwargs):
add1 = gs.create_svg('CM7', 'drop2_inv1_strS1', 3)
add1 = add1.create()
add2 = gs.create_svg('CM7', 'drop2_inv2_strS1', 3)
add2 = add2.create()
add3 = gs.create_svg('CM7', 'drop2_inv3_strS1', 3)
add3 = add3.create()
add4 = gs.create_svg('CM7', 'drop2_inv4_strS1', 3)
add4 = add4.create()
add5 = gs.create_svg('CM7', 'drop2_inv1_strS3', 3)
add5 = add5.create()
add6 = gs.create_svg('CM7', 'drop2_inv2_strS3', 3)
add6 = add6.create()
add7 = gs.create_svg('CM7', 'drop2_inv3_strS3', 3)
add7 = add7.create()
add8 = gs.create_svg('CM7', 'drop2_inv4_strS3', 3)
add8 = add8.create()
svg1 = ''.join(add1) + ''.join(add2) + ''.join(add3) + ''.join(add4) + ''.join(add5) + ''.join(add6) + ''.join(add7) + ''.join(add8)
title = 'C M7'
subtitle = 'drop2 - inv1 - strS1'
return render(request, "chords.html", {'svg': svg1, 'title': title, 'subtitle': subtitle})
# def post(self, request, *args, **kwargs):
# color = random.randrange(0, 3)
# x1 = request.POST.get('note', 'C')
# x2 = request.POST.get('shape', 'M7')
# x3 = request.POST.get('stringset', 'strS1')
# add = gs.create_svg(x1 + x2, 'drop2_inv1_' + x3, color)
# add = add.create()
# svg = ''.join(add)
# title = x1 + x2
# return render(request, 'scales.html', {'svg': svg, 'title': title})
class AjaxChord(View):
def post(self, request, *args, **kwargs):
color = random.randrange(0, 2)
print(request.POST)
x1 = request.POST.get('note', 'C')
x2 = request.POST.get('shape', 'M7')
x3 = request.POST.get('type_chord', 'drop2')
x4 = request.POST.get('inv', 'inv1')
x5 = request.POST.get('string', 'strS1')
print(x1 + x2, x3 + '_' + x4 + '_' + x5)
add1 = gs.create_svg(x1 + x2, x3 + '_' + x4 + '_' + x5, color)
add1 = add1.create()
svg3 = ''.join(add1)
title = x1 + ' ' + x2
subtitle = x3 + ' - ' + x4 + ' - ' + x5
print('POST request was recieved on AjaxChord')
return render(request, 'chords.html', {'svg': svg3, 'title': title, 'subtitle': subtitle})
|
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
from tos.models import TermsOfService, UserAgreement, has_user_agreed_latest_tos
class TestViews(TestCase):
def setUp(self):
# User that has agreed to TOS
self.user1 = get_user_model().objects.create_user('user1', 'user1@example.com', 'user1pass')
# User that has not yet agreed to TOS
self.user2 = get_user_model().objects.create_user('user2', 'user2@example.com', 'user2pass')
self.tos1 = TermsOfService.objects.create(
content="first edition of the terms of service",
active=True
)
self.tos2 = TermsOfService.objects.create(
content="second edition of the terms of service",
active=False
)
self.login_url = getattr(settings, 'LOGIN_URL', '/login/')
UserAgreement.objects.create(
terms_of_service=self.tos1,
user=self.user1
)
def test_login(self):
""" Make sure we didn't break the authentication system
This assumes that login urls are named 'login'
"""
self.assertTrue(has_user_agreed_latest_tos(self.user1))
login = self.client.login(username='user1', password='user1pass')
self.failUnless(login, 'Could not log in')
self.assertTrue(has_user_agreed_latest_tos(self.user1))
def test_user_agrees_multiple_times(self):
login_response = self.client.post(reverse('login'), {
'username': 'user2',
'password': 'user2pass',
})
self.assertTrue(login_response)
response = self.client.post(reverse('tos_check_tos'), {'accept': 'accept'})
self.assertEqual(response.status_code, 302)
self.assertEqual(UserAgreement.objects.filter(user=self.user2).count(), 1)
response = self.client.post(reverse('tos_check_tos'), {'accept': 'accept'})
self.assertEqual(response.status_code, 302)
self.assertEqual(UserAgreement.objects.filter(user=self.user2).count(), 1)
response = self.client.post(reverse('tos_check_tos'), {'accept': 'accept'})
self.assertEqual(response.status_code, 302)
self.assertEqual(UserAgreement.objects.filter(user=self.user2).count(), 1)
def test_need_agreement(self):
""" user2 tries to login and then has to go and agree to terms"""
self.assertFalse(has_user_agreed_latest_tos(self.user2))
response = self.client.post(self.login_url, dict(username='user2', password='user2pass'))
self.assertContains(response, "first edition of the terms of service")
self.assertFalse(has_user_agreed_latest_tos(self.user2))
def test_do_not_need_agreement(self):
""" user2 tries to login and has already agreed"""
self.assertTrue(has_user_agreed_latest_tos(self.user1))
response = self.client.post(self.login_url, dict(username='user1',
password='user1pass'))
self.assertEqual(302, response.status_code)
def test_redirect_security(self):
""" redirect to outside url not allowed, should redirect to login url"""
response = self.client.post(self.login_url, dict(username='user1',
password='user1pass', next='http://example.com'))
self.assertEqual(302, response.status_code)
self.assertIn(settings.LOGIN_REDIRECT_URL, response.url)
def test_need_to_log_in(self):
""" GET to login url shows login tempalte."""
response = self.client.get(self.login_url)
self.assertContains(response, "Dummy login template.")
def test_root_tos_view(self):
response = self.client.get('/tos/')
self.assertIn(b'first edition of the terms of service', response.content)
def test_reject_agreement(self):
self.assertFalse(has_user_agreed_latest_tos(self.user2))
response = self.client.post(self.login_url, dict(username='user2', password='user2pass'))
self.assertContains(response, "first edition of the terms of service")
url = reverse('tos_check_tos')
response = self.client.post(url, {'accept': 'reject'})
self.assertFalse(has_user_agreed_latest_tos(self.user2))
def test_accept_agreement(self):
self.assertFalse(has_user_agreed_latest_tos(self.user2))
response = self.client.post(self.login_url, dict(username='user2', password='user2pass'))
self.assertContains(response, "first edition of the terms of service")
self.assertFalse(has_user_agreed_latest_tos(self.user2))
url = reverse('tos_check_tos')
response = self.client.post(url, {'accept': 'accept'})
self.assertTrue(has_user_agreed_latest_tos(self.user2))
def test_bump_new_agreement(self):
# Change the tos
self.tos2.active = True
self.tos2.save()
# is user1 agreed now?
self.assertFalse(has_user_agreed_latest_tos(self.user1))
# user1 agrees again
response = self.client.post(self.login_url, dict(username='user1', password='user1pass'))
self.assertContains(response, "second edition of the terms of service")
self.assertFalse(has_user_agreed_latest_tos(self.user2))
url = reverse('tos_check_tos')
response = self.client.post(url, {'accept': 'accept'})
self.assertTrue(has_user_agreed_latest_tos(self.user1))
|
"""
Examen Parcial 4
Carrillo Medina Alexis Adrian (CMAA)
Nombre del programa: Parcial4.py
"""
#----- Seccion de bibliotecas
import numpy as np
import matplotlib.pyplot as plt
# scipy es utilizado unicamente para la comprobacion
import scipy.integrate as integrate
#----- Codigo
# La validacion se encuentra en el metodo main
#---------- Metodos auxiliares -----------
def exacSolution(x):
return 2*np.e**x-x-1
def random(N,seed=0.128258):
# Semilla
fx=[seed]
x=[]
for i in range(1,N+1):
# Generador lineal congruencial
fx.append((7**5)*(fx[i-1])%(2*31-1))
x.append(fx[i]/(2*31-1))
return x
def random2(N,seed=0.128258):
# Semilla
fx=[seed]
# Mapeo logisticpo
for i in range(1,N+1):
fx.append(4*fx[i-1]*(1-fx[i-1]))
# La distribucion invariante
# del mapeo logistico es Beta(1/2, 1/2)
# X ~ Beta(1/2,1/2) => Y=(2/Pi)(arcsin(sqrt(X))) ~ Uniform(0,1)
x=(2/np.pi)*(np.arcsin(np.sqrt(fx)))
return x
def fRandom(x,y,z):
return x*y*np.sin(y*z)
#---------- 1 ----------------------------
#---------- 1.1 --------------------------
def difFinitas1D(a,N,xmax):
"""
a := Condicion inicial
N := # Nodos de la discretizacion
xmax := Valor maximo del eje x
"""
# Discretizacion
y=np.zeros([N+1])
# Condicion inicial
y[0]=a
# Discretizacion temporal
t=np.zeros([N+1])
# Tiempo inicial
t[0]=0
# Delta
Delta=float(xmax)/N
# Diferencias finitas con sustitucion hacia adelante
for i in range(0,N):
# Aproximacion
y[i+1]=y[i]*(Delta+1)+Delta*t[i]
t[i+1]=t[i]+Delta
# La grafica de la funcion
# la podemos encontrar en el metodo main.
# Esto es por motivos esteticos del codigo
return t,y
#---------- 2 ----------------------------
#---------- 2.1 --------------------------
def aproxTrapecioCompuesto(f,n,a,b):
"""
f := funcion a aproximar integral
n := Numero de puntos
(a,b) := Intervalo de integracion
"""
# Valor inicial de la integral
integral=f(a)
# Punto inicial x
x=a
# Delta
delta=(b-a)/n
# Suma de la definicion
suma=0;
for i in range(1,n):
x+=delta
suma+=f(x)
# Suma interior de la definicion
integral+=2*suma+f(b)
# Definicion
return (delta/2)*integral
#---------- 3 ----------------------------
#---------- 3.1 --------------------------
def integralMonteCarlo1(f,N,a,b,c,d,e,g,seed=0.1578):
"""
f := funcion a aproximar
N := Numero de vectores aleatorios
(a,b)x(c,d)x(e,g) := Rectangulo de integracion
seed := semilla
"""
# Volumen del rectangulo
vol=(b-a)*(d-c)*(g-e)
# Numero aleatorios
# Generados por lineal congruencial
x=random(N,seed)
y=random(N,seed)
z=random(N,seed)
# Suma de los montecarlo
monteCarlo=0
for i in range(N):
# Valores recorridos
xr=x[i]*(b-a)+a
yr=y[i]*(d-c)+c
zr=z[i]*(g-e)+e
# Paso de suma
monteCarlo+=f(xr,yr,zr)
# Definicion de MonteCarlo
return (vol/N)*monteCarlo
def integralMonteCarlo2(f,N,a,b,c,d,e,g,seed=0.1578):
"""
f := funcion a aproximar
N := Numero de vectores aleatorios
(a,b)x(c,d)x(e,g) := Rectangulo de integracion
seed := semilla
"""
# Volumen del rectangulo
vol=(b-a)*(d-c)*(g-e)
# Numero aleatorios
# Generados por mapeo logistico
x=random2(N,seed)
y=random2(N,seed)
z=random2(N,seed)
# Suma de los montecarlo
monteCarlo=0
for i in range(N):
# Valores recorridos
xr=x[i]*(b-a)+a
yr=y[i]*(d-c)+c
zr=z[i]*(g-e)+e
# Paso de suma
monteCarlo+=f(xr,yr,zr)
# Definicion de MonteCarlo
return (vol/N)*monteCarlo
def integralMonteCarlo(f,N,a,b,c,d,e,g):
"""
f := funcion a aproximar
N := Numero de vectores aleatorios
(a,b)x(c,d)x(e,g) := Rectangulo de integracion
"""
# Volumen del rectangulo
vol=(b-a)*(d-c)*(g-e)
# Numero aleatorios por Numpy
# Para ejemplificar el error
# Del generador aleatorio
x=np.random.uniform(size=N)
y=np.random.uniform(size=N)
z=np.random.uniform(size=N)
# Suma de los montecarlo
monteCarlo=0
for i in range(N):
# Valores recorridos
xr=x[i]*(b-a)+a
yr=y[i]*(d-c)+c
zr=z[i]*(g-e)+e
# Paso de suma
monteCarlo+=f(xr,yr,zr)
# Definicion de MonteCarlo
return (vol/N)*monteCarlo
#---------- Metodo Main ------------------
def main():
# Validacion 1 ------------
# Validacion 1.1 ----------
example=[1,1.2,1.48,1.856,2.3472,2.97664]
t,y=difFinitas1D(1,5,1)
for i in range(len(y)):
assert y[i]==example[i]
# Pruebas
a=1
N=10
xmax=1
# Grafica
t,y=difFinitas1D(a,N,xmax)
plt.plot(t,y,color="red",label="Aproximacion")
plt.plot(np.linspace(0,xmax,N+1),exacSolution(np.linspace(0,xmax,N+1)),color="blue",label="Funcion real")
plt.legend(shadow=True)
plt.show()
# Solucion
print("\nEjercicio 1.1")
print("\nDerivadas")
print(y)
print("")
# Validacion 2 ------------
# Validacion 2.1 ----------
aproxSc,error=integrate.quad(lambda x:np.cos(x),-2,3)
aproxTrapecio=aproxTrapecioCompuesto(np.cos,100,-2,3)
assert np.round(aproxSc,2)==np.round(aproxTrapecio,2)
# Pruebas
print("Ejercicio 2.1 \n")
print("f(x)=cos(x) \n")
print("Solucion 'exacta'")
print(aproxSc)
print("\nSolucion aproximada")
print(aproxTrapecio)
print("")
# Validacion 2 ------------
# Validacion 2.1 ----------
int1=integralMonteCarlo1(fRandom,10000,1,3,0,np.pi,0,np.pi/3,np.random.uniform())
int2=integralMonteCarlo2(fRandom,10000,1,3,0,np.pi,0,np.pi/3,np.random.uniform())
assert abs(int1-int2) < 1
# Pruebas
print("Ejercicio 3.1 \n")
print("f(x,y,z)=x*y*sin(y*z)\n")
print("Integral por Generador lineal congruencial")
print(int1)
print("\nIntegral por Mapeo logistico")
print(int2)
print("\nIntegral por Numpy")
print(integralMonteCarlo(fRandom,10000,1,3,0,np.pi,0,np.pi/3))
print("")
if __name__=='__main__':
main() |
# Generated by Django 3.1.1 on 2020-10-10 11:35
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Permission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('name', models.TextField(max_length=2000, null=True)),
('create', models.BooleanField(default=False)),
('read', models.BooleanField(default=False)),
('update', models.BooleanField(default=False)),
('delete', models.BooleanField(default=False)),
],
options={
'abstract': False,
},
),
]
|
"""
剑指 Offer 10-2. 青蛙跳台阶问题
一只青蛙一次可以跳上1级台阶,也可以跳上2级台阶。求该青蛙跳上一个 n 级的台阶总共有多少种跳法。
"""
from functools import lru_cache
# 显然也是个递归,这个可以反着想,比方说上第n级台阶,其实就是上第n-1个台阶的方法种类再迈一步,其实就是n-1个台阶那么多种,或者n-2个台阶那么多种类再上一步。
# 递归的东西应该仔细思考一下,再简单也要思考一下,细思极恐。
@lru_cache()
def numWays(n):
if n == 0:
return 1
elif n == 1:
return 1
elif n == 2:
return 2
else:
return numWays(n - 1) + numWays(n - 2)
|
#!/usr/bin/env python
# detections_refinement.py: Node for online refinement of detections (BirdNet 1 only)
import sys
import rospy
import datetime
import math
import numpy as np
from numpy.linalg import inv
import cv2
import tf
import message_filters
from sensor_msgs.msg import Image, CameraInfo, PointCloud2
from perception_msgs.msg import ObstacleList
from geometry_msgs.msg import Point
from cv_bridge import CvBridge, CvBridgeError
from birdview_detection_refiner import BirdviewDetectionRefiner
from perception_msgs._classes import CLASSES, CLASS_COLOR, CLASS_THRESHOLDS
refined_pub = rospy.Publisher('/refined_obstacle_list', ObstacleList, queue_size=1)
bvres = 0.05
lidar_h = 1.73 # TODO Change to use TF
only_front = False
count = 0
ms = 0
velo_pub = rospy.Publisher('velo_sync', PointCloud2, queue_size=1)
def callback(obstaclelist, bird_view, bird_ground, velo_cloud):
global count, ms, velo_pub
a = datetime.datetime.now()
bridge = CvBridge()
bv_image = []
bv_ground = []
try:
bv_image = bridge.imgmsg_to_cv2(bird_view, "bgr8")
bv_ground = bridge.imgmsg_to_cv2(bird_ground, "32FC1")
except CvBridgeError as e:
print(e)
sys.exit(-1)
# Init BV Detection Refiner
bv_refiner = BirdviewDetectionRefiner(bv_image, bv_ground, bvres, lidar_h, only_front)
refined_list = ObstacleList()
refined_list.header = obstaclelist.header
refined_list.header.frame_id = 'velodyne'
for obj in obstaclelist.obstacles:
# Filter detections with a very low score
if obj.score < 0.1: # Remove highly unlikely detections
continue
try:
# Refine detection and append to list
bv_refiner.refine_detection(obj)
refined_list.obstacles.append(obj)
except Exception as e:
rospy.logerr(e)
refined_pub.publish(refined_list)
velo_pub.publish(velo_cloud)
b = datetime.datetime.now()
delta = b - a
ms += delta.microseconds
count += 1
print 'average ms: {}'.format(ms / count / 1000.0)
def main(args):
global bvres, lidar_tf_frame, camera_tf_frame, max_height, only_front
# Initializes and cleanup ros node
rospy.init_node('detections_refinement', anonymous=True)
rospy.loginfo("[detections_refinement] Ready")
bvres = rospy.get_param("~cell_size", 0.05)
max_height = rospy.get_param("~max_height", 3.0)
obstacles_topic = rospy.get_param("~obstacles_list", '/obstacle_list')
birdview_topic = rospy.get_param("~bird_view", '/bird_view')
birdground_topic = rospy.get_param("~bird_ground", '/bird_ground')
lidar_tf_frame = rospy.get_param("~lidar_tf_frame", 'velodyne')
camera_tf_frame = rospy.get_param("~camera_tf_frame", 'stereo_camera')
only_front = rospy.get_param("~only_front", False)
print 'Using only front part of BEV: {}'.format(only_front)
velo_topic = '/velodyne_points'
obstacle_sub = message_filters.Subscriber(obstacles_topic, ObstacleList)
birdview_sub = message_filters.Subscriber(birdview_topic, Image)
birdground_sub = message_filters.Subscriber(birdground_topic, Image)
velo_sub = message_filters.Subscriber(velo_topic, PointCloud2)
ts = message_filters.TimeSynchronizer([obstacle_sub, birdview_sub, birdground_sub, velo_sub], 10)
ts.registerCallback(callback)
rospy.spin()
if __name__ == '__main__':
main(sys.argv) |
import math
def perfect_num(start, end):
perfectNums = []
s, e = int(math.sqrt(start)), int(math.sqrt(end))+1
for i in range(s, e):
if i*i >= start and i*i <= end:
perfectNums.append(i*i)
if len(perfectNums) == 0:
print(-1)
return
print(sum(perfectNums))
print(perfectNums[0])
M, N = int(input()), int(input())
perfect_num(M, N)
|
#display the output
print("New Python File")
print("edit")
|
# individual network settings for each actor + critic pair
# see networkforall for details
from networkforall import Actor, Critic
from utilities import hard_update, gumbel_softmax, onehot_from_logits
from torch.optim import Adam
import torch
import numpy as np
# add OU noise for exploration
from OUNoise import OUNoise
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class DDPGAgent:
def __init__(self, in_actor, actor_fc1_units, actor_fc2_units, out_actor,
in_critic, critic_fc1_units, critic_fc2_units, lr_actor, lr_critic,
weight_decay_actor, weight_decay_critic):
super(DDPGAgent, self).__init__()
self.actor = Actor(in_actor, actor_fc1_units, actor_fc2_units, out_actor).to(device)
self.critic = Critic(in_critic, critic_fc1_units, critic_fc2_units, 1).to(device)
self.target_actor = Actor(in_actor, actor_fc1_units, actor_fc2_units, out_actor).to(device)
self.target_critic = Critic(in_critic, critic_fc1_units, critic_fc2_units, 1).to(device)
self.target_actor.eval()
self.target_critic.eval()
self.noise = OUNoise(out_actor)
# initialize targets same as original networks
hard_update(self.target_actor, self.actor)
hard_update(self.target_critic, self.critic)
self.actor_optimizer = Adam(self.actor.parameters(), lr=lr_actor, weight_decay=weight_decay_actor)
self.critic_optimizer = Adam(self.critic.parameters(), lr=lr_critic, weight_decay=weight_decay_critic)
def act(self, state):
state = torch.from_numpy(state).float().to(device)
self.actor.eval()
with torch.no_grad():
action = self.actor(state)
self.actor.train()
# add exploration noise to the action
action += self.noise.noise()
return np.clip(action.cpu().data.numpy(), -1, 1)
def target_act(self, state):
with torch.no_grad():
action = self.target_actor(state)
return action
|
#!/usr/bin/env python
# coding: utf-8
# <b> Create and print a numpy 1-d array containing 21 decimal numbers that starts at 5 and end at 10. Calculate and print the min, max and avarage value of this 1-d array. <b>
# In[1]:
import numpy as np
# In[2]:
my_array = np.linspace(5,10,21)
print(my_array)
# In[6]:
my_array.min(axis = 0)
# In[7]:
my_array.max(axis = 0)
# In[8]:
my_array.mean(axis=0)
# <b> Create and print a 2 dimensional array A that has 27 rows and 3 columns composed of consecutive integers. Change the shape of the array A to a new array B that has 9 rows and 9 columns and print it out. <b>
# In[9]:
A = np.arange(81).reshape(27,3)
print(A)
# In[10]:
B = np.arange(81).reshape(9,9)
print(B)
# <b> Add "elementwise" the numpy array obtained above (B) with the appropriate size identity array. Print the results. <b>
# In[11]:
identity = np.identity(9)
print(identity + B)
# <b>Multiply "dot-style" the numpy array obtained from summation with the array B and print the results.<b>
# In[12]:
dot_product = np.dot(identity,B)
print(dot_product)
# In[ ]:
|
import kvt
import torch
import torch.nn as nn
import torch.nn.functional as F
def dice_loss(input, target):
smooth = 1.0
input = torch.sigmoid(input)
if input.dim() == 4:
B, C, H, W = input.size()
iflat = input.view(B * C, -1)
tflat = target.view(B * C, -1)
else:
assert input.dim() == 3
B, H, W = input.size()
iflat = input.view(B, -1)
tflat = target.view(B, -1)
intersection = (iflat * tflat).sum(dim=1)
loss = 1 - (
(2.0 * intersection + smooth) / (iflat.sum(dim=1) + tflat.sum(dim=1) + smooth)
)
loss = loss.mean()
return loss
class DiceLoss(nn.Module):
def __init__(self):
super().__init__()
pass
def forward(self, input, target):
return dice_loss(input, target)
|
import os, shutil, glob
def recursive_copy_files(source_path, destination_path, override=False):
"""
Recursive copies files from source to destination directory.
:param source_path: source directory
:param destination_path: destination directory
:param override if True all files will be overridden otherwise skip if file exist
:return: count of copied files
Source: https://stackoverflow.com/questions/3397752/copy-multiple-files-in-python
"""
files_count = 0
if not os.path.exists(destination_path):
os.mkdir(destination_path)
items = glob.glob(source_path + '/*')
for item in items:
if os.path.isdir(item):
path = os.path.join(destination_path, item.split('/')[-1])
files_count += recursive_copy_files(source_path=item, destination_path=path, override=override)
else:
file = os.path.join(destination_path, item.split('/')[-1])
if not os.path.exists(file) or override:
shutil.copyfile(item, file)
files_count += 1
return files_count |
import camera
if __name__ == "__main__":
camera.capture_single_image()
|
from base.DeepRecommender import DeepRecommender
import tensorflow as tf
from math import sqrt
from tensorflow import set_random_seed
from collections import defaultdict
import random
set_random_seed(2)
class LightGCN(DeepRecommender):
def __init__(self,conf,trainingSet=None,testSet=None,fold='[1]'):
super(LightGCN, self).__init__(conf,trainingSet,testSet,fold)
def initModel(self):
super(LightGCN, self).initModel()
self.negativeCount = 5
self.userListen = defaultdict(dict)
for entry in self.data.trainingData:
if entry['track'] not in self.userListen[entry['user']]:
self.userListen[entry['user']][entry['track']] = 0
self.userListen[entry['user']][entry['track']] += 1
print('training...')
ego_embeddings = tf.concat([self.U, self.V], axis=0)
indices = [[self.data.getId(item['user'], 'user'), self.m + self.data.getId(item['track'], 'track')] for item in self.data.trainingData]
indices += [[self.m + self.data.getId(item['track'], 'track'), self.data.getId(item['user'], 'user')] for item in self.data.trainingData]
# values = [float(self.userListen[item['user']][item['track']]) / sqrt(len(self.data.userRecord[item['user']])) / sqrt(len(self.data.trackRecord[item['track']])) for item in self.data.trainingData]*2
values = [float(self.userListen[item['user']][item['track']]) for item in self.data.trainingData]*2
norm_adj = tf.SparseTensor(indices=indices, values=values, dense_shape=[self.m+self.n, self.m+self.n])
self.n_layers = 3
all_embeddings = [ego_embeddings]
for k in range(self.n_layers):
ego_embeddings = tf.sparse_tensor_dense_matmul(norm_adj, ego_embeddings)
# normalize the distribution of embeddings.
norm_embeddings = tf.nn.l2_normalize(ego_embeddings, axis=1)
all_embeddings += [norm_embeddings]
all_embeddings = tf.reduce_sum(all_embeddings, axis=0)
self.multi_user_embeddings, self.multi_item_embeddings = tf.split(all_embeddings, [self.m, self.n], 0)
self.neg_idx = tf.placeholder(tf.int32, name="neg_holder")
self.neg_item_embedding = tf.nn.embedding_lookup(self.multi_item_embeddings, self.neg_idx)
self.u_embedding = tf.nn.embedding_lookup(self.multi_user_embeddings, self.u_idx)
self.v_embedding = tf.nn.embedding_lookup(self.multi_item_embeddings, self.v_idx)
self.test = tf.reduce_sum(tf.multiply(self.u_embedding, self.multi_item_embeddings), 1)
def next_batch_pairwise(self):
batch_id = 0
while batch_id < self.train_size:
if batch_id + self.batch_size <= self.train_size:
users = [self.data.trainingData[idx]['user'] for idx in range(batch_id, self.batch_size + batch_id)]
items = [self.data.trainingData[idx]['track'] for idx in range(batch_id, self.batch_size + batch_id)]
batch_id += self.batch_size
else:
users = [self.data.trainingData[idx]['user'] for idx in range(batch_id, self.train_size)]
items = [self.data.trainingData[idx]['track'] for idx in range(batch_id, self.train_size)]
batch_id = self.train_size
u_idx, i_idx, j_idx = [], [], []
for i, user in enumerate(users):
for j in range(self.negativeCount):
item_j = random.randint(0, self.n-1)
while self.data.id2name['track'][item_j] in self.userListen[user]:
item_j = random.randint(0, self.n-1)
u_idx.append(self.data.getId(user, 'user'))
i_idx.append(self.data.getId(items[i], 'track'))
j_idx.append(item_j)
yield u_idx, i_idx, j_idx
def buildModel(self):
y = tf.reduce_sum(tf.multiply(self.u_embedding, self.v_embedding), 1) \
- tf.reduce_sum(tf.multiply(self.u_embedding, self.neg_item_embedding), 1)
loss = -tf.reduce_sum(tf.log(tf.sigmoid(y))) + self.regU * (tf.nn.l2_loss(self.u_embedding) +
tf.nn.l2_loss(self.v_embedding) +
tf.nn.l2_loss(self.neg_item_embedding))
opt = tf.train.AdamOptimizer(self.lRate)
train = opt.minimize(loss)
init = tf.global_variables_initializer()
self.sess.run(init)
for iteration in range(self.maxIter):
for n, batch in enumerate(self.next_batch_pairwise()):
user_idx, i_idx, j_idx = batch
_, l = self.sess.run([train, loss],
feed_dict={self.u_idx: user_idx, self.neg_idx: j_idx, self.v_idx: i_idx})
print('training:', iteration + 1, 'batch', n, 'loss:', l)
def predict(self, u):
'invoked to rank all the items for the user'
if self.data.contains(u, 'user'):
uid = self.data.name2id['user'][u]
return self.sess.run(self.test, feed_dict={self.u_idx: [uid]})
else:
uid = self.data.getId(u,'user')
return np.divide(self.V.dot(self.U[uid]), self.normalized_U[uid]*self.normalized_V)
|
import sys
from PyQt5.QtWidgets import QApplication, QWidget, QSlider, QPushButton, QLabel
from PyQt5.QtWidgets import QFileDialog
from PyQt5.QtGui import QPixmap, QColor, QPainter
from PIL import Image
class Example(QWidget):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
self.setGeometry(500, 200, 500, 500)
self.setWindowTitle('Рост хорошего настроения')
self.do_paint = False
self.sld = QSlider(self)
self.sld.move(25, 50)
self.sld.resize(25, 400)
self.sld.setMaximum(100)
self.sld.setMinimum(0)
self.sld.setValue(0)
self.sld.valueChanged.connect(self.paint)
def paint(self):
self.do_paint = True
self.repaint()
def paintEvent(self, event):
if self.do_paint:
v = self.sld.value()
qp = QPainter()
qp.begin(self)
qp.setPen(QColor(255, 0, 0))
qp.drawEllipse(100, 62, 3.75 * v, 3.75 * v)
qp.drawEllipse(100 + 3.75 // 3 * v, 62 + 3.75 // 2 * v, 0.5 * v, 0.5 * v)
qp.drawEllipse(100 + 3.75 // 3 * v + 1.25 * v, 62 + 3.75 // 2 * v, 0.5 * v, 0.5 * v)
if v < 25:
qp.drawArc(100 + 3.75 * v // 2 - v, 62 + 2.5 * v, 2 * v, v, 0, 3000)
elif 25 <= v <= 50:
qp.drawLine(100 + 0.75 * v, 62 + 2.82 * v, 100 + 3 * v, 62 + 2.82 * v)
elif 50 <= v <= 75:
qp.drawArc(100 + 3.75 * v // 2 - 0.5 * v, 62 + 2 * v, 2 * v, v, -1500, 2000)
else:
qp.drawArc(100 + 3.75 * v // 2 - v, 62 + 2 * v, 2 * v, v, -2900, 3000)
qp.end()
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Example()
ex.show()
sys.exit(app.exec())
|
# This is on the handle_file branch.
import sys
# Must have at least one value.
if len(sys.argv) == 1:
print 'Error: No arguments given.'
exit()
# Calculate sum of command-line arguments.
n = 0
sum = 0
for num in open(sys.argv[1]):
sum += float(num)
n += 1
print sum / n
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-11 07:37
from __future__ import unicode_literals
from django.db import migrations
from elections.constants import ELECTION_TYPES
def add_initial_election_types(apps, schema_editor):
ElectionType = apps.get_model("elections", "ElectionType")
ElectionSubType = apps.get_model("elections", "ElectionSubType")
for type_name, info in ELECTION_TYPES.items():
election_type, _ = ElectionType.objects.update_or_create(
election_type=type_name, defaults={"name": info["name"]}
)
for subtype in info["subtypes"]:
ElectionSubType.objects.update_or_create(
election_type=election_type,
election_subtype=subtype["election_subtype"],
defaults={"name": subtype["name"]},
)
def remove_initial_election_types(apps, schema_editor):
ElectionType = apps.get_model("elections", "ElectionType")
ElectionType.objects.filter(
election_type__in=ELECTION_TYPES.keys()
).delete()
class Migration(migrations.Migration):
dependencies = [("elections", "0002_auto_20161011_0741")]
operations = [
migrations.RunPython(
add_initial_election_types, remove_initial_election_types
)
]
|
def oddTuples(aTup):
'''
:param aTup:a tuple
:return:tuple,every other element of aTup
'''
b = ()
length = len(aTup)
return aTup[0:length:2]
Tup = ('I', 'am', 'a', 'test', 'tuple')
print oddTuples(Tup)
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""End-to-end test for horizontal resharding workflow."""
import re
# "unittest" is used indirectly by importing "worker", but pylint does
# not grasp this.
# Import it explicitly to make pylint happy and stop it complaining about
# setUpModule, tearDownModule and the missing module docstring.
import unittest # pylint: disable=unused-import
import utils
import worker
def setUpModule():
try:
worker.setUpModule()
utils.Vtctld().start()
except:
tearDownModule()
raise
def tearDownModule():
worker.tearDownModule()
class TestHorizontalReshardingWorkflow(worker.TestBaseSplitClone):
"""End-to-end test for horizontal resharding workflow.
This test reuses worker.py, which sets up the environment.
"""
KEYSPACE = 'test_keyspace'
def test_successful_resharding(self):
"""Reshard from 1 to 2 shards by running the workflow."""
worker_proc, _, worker_rpc_port = utils.run_vtworker_bg(
['--cell', 'test_nj', '--use_v3_resharding_mode=false'], auto_log=True)
vtworker_endpoint = 'localhost:%d' % worker_rpc_port
stdout = utils.run_vtctl(['WorkflowCreate', 'horizontal_resharding',
'-keyspace=test_keyspace',
'-vtworkers=%s' % vtworker_endpoint,
'-enable_approvals=false'],
auto_log=True)
workflow_uuid = re.match(r'^uuid: (.*)$', stdout[0]).group(1)
utils.pause('Now is a good time to look at vtctld UI at: '
'%s, workflow uuid=%s' % (utils.vtctld.port, workflow_uuid))
utils.run_vtctl(['WorkflowWait', workflow_uuid])
self.verify()
utils.kill_sub_process(worker_proc, soft=True)
def verify(self):
self.assert_shard_data_equal(0, worker.shard_master,
worker.shard_0_tablets.replica)
self.assert_shard_data_equal(1, worker.shard_master,
worker.shard_1_tablets.replica)
# Verify effect of MigrateServedTypes. Dest shards are serving now.
utils.check_srv_keyspace('test_nj', self.KEYSPACE,
'Partitions(master): -80 80-\n'
'Partitions(rdonly): -80 80-\n'
'Partitions(replica): -80 80-\n')
# source shard: query service must be disabled after MigrateServedTypes.
source_shards = [worker.shard_rdonly1,
worker.shard_replica,
worker.shard_master]
for shard in source_shards:
utils.check_tablet_query_service(
self, shard, serving=False, tablet_control_disabled=True)
# dest shard -80, 80-: query service must be enabled
# after MigrateServedTypes.
dest_shards = [worker.shard_0_rdonly1,
worker.shard_0_replica,
worker.shard_0_master,
worker.shard_1_rdonly1,
worker.shard_1_replica,
worker.shard_1_master]
for shard in dest_shards:
utils.check_tablet_query_service(
self,
shard,
serving=True,
tablet_control_disabled=False)
if __name__ == '__main__':
utils.main(test_options=worker.add_test_options)
|
#!/bin/python3
import math
import os
import random
import re
import sys
# Complete the countApplesAndOranges function below.
def countApplesAndOranges(s, t, a, b, apples, oranges):
myApple = calculateTheArr(apples,a)
myOranges = calculateTheArr(oranges, b)
m1 = detectionAppleOrOrangeByLimit(myApple,s,t)
m2 = detectionAppleOrOrangeByLimit(myOranges,s,t)
print(len(m1))
print(len(m2))
def detectionAppleOrOrangeByLimit(arr, lowerLimit, upperLimit):
m = []
for i in arr:
if i >= lowerLimit and i<=upperLimit:
m.append(i)
return m
def calculateTheArr(arr, val):
m = []
for i in range(len(arr)):
m.append(arr[i] + val)
return m
if __name__ == '__main__':
st = input().split()
s = int(st[0])
t = int(st[1])
ab = input().split()
a = int(ab[0])
b = int(ab[1])
mn = input().split()
m = int(mn[0])
n = int(mn[1])
apples = list(map(int, input().rstrip().split()))
oranges = list(map(int, input().rstrip().split()))
countApplesAndOranges(s, t, a, b, apples, oranges)
|
import time
list_1=[1, 5, 8, 3]
x=int(input("Please enter the number you want to check in the list :"))
time.sleep(1)
print("Checking ... ")
time.sleep(1)
if (x in list_1):
print(x,"is there in the list")
else:
print(x,"is not there in the list")
|
#!/usr/bin/env python
import os
top = '.'
out = 'build'
def options(opt):
opt.add_option('--double', action='store_true', default=False, help='Double precision instead of float')
if os.name != 'nt':
opt.load('compiler_c')
else:
opt.load('msvc')
def configure(conf):
conf.env.DOUBLE = conf.options.double
if os.name != 'nt':
conf.load('compiler_c')
else:
conf.load('msvc')
def build(ctx):
_cflg = ' '.join(['' if os.name == 'nt' else '-fPIC', '-DDOUBLE' if ctx.env.DOUBLE else ''])
_defs = '' if os.name != 'nt' else 'vclib.def'
ctx.objects(source='SPTK/bin/fft/_fft.c SPTK/lib/getmem.c SPTK/lib/movem.c SPTK/lib/fillz.c SPTK/bin/mgcep/_mgcep.c SPTK/bin/mlsadf/_mlsadf.c SPTK/bin/b2mc/_b2mc.c SPTK/bin/gc2gc/_gc2gc.c SPTK/bin/gnorm/_gnorm.c SPTK/bin/ifftr/_ifftr.c SPTK/bin/fftr/_fftr.c SPTK/bin/ifft/_ifft.c SPTK/bin/ignorm/_ignorm.c SPTK/bin/mc2b/_mc2b.c SPTK/lib/theq.c SPTK/bin/mgc2sp/_mgc2sp.c SPTK/bin/c2sp/_c2sp.c SPTK/bin/mgc2mgc/_mgc2mgc.c SPTK/bin/freqt/_freqt.c', includes='SPTK/include', target='sptk', features='c', cflags=_cflg)
ctx.shlib(source='VCLib.c', includes='SPTK/include ../include', use='sptk', target='vclib_{}'.format('double' if ctx.env.DOUBLE else 'float'), features='c', clags=_cflg, defs=_defs)
|
from django.apps import AppConfig
class AndelaSocialsConfig(AppConfig):
name = 'andela_socials'
|
"""Kerberos related CLI tools.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import click
from treadmill import cli
def init():
"""Return top level command handler."""
@click.group(cls=cli.make_commands(__name__))
@click.pass_context
def run(_ctxp):
"""Manage Kerberos tickets."""
return run
|
import numpy as np
import pandas as pd
from modules import metrics
# a. Regressão Linear Univariada - Método Analítico
class LRAnalyticalMethod():
def __init__(self):
pass
def fit(self, X, y):
# Número de observações
n = len(X)
# Média do X e do y
mean_x, mean_y = np.mean(X), np.mean(y)
# Aplicando método analítico
somat_xy = (np.sum(y * X)) - (n * mean_y * mean_x)
somat_xx = (np.sum(X * X)) - (n * mean_x * mean_x)
b_1 = somat_xy / somat_xx
b_0 = mean_y - (b_1 * mean_x)
self.b_1 = b_1
self.b_0 = b_0
def predict(self, X):
return self.b_0 + self.b_1 * X
def coef_(self):
return [self.b_0, self.b_1]
# b. Regressão Linear Univariada - Gradiente Descendente
class LRGradientDescent():
def __init__(self):
pass
def fit(self, X, y, epochs, learning_rate):
# Inicializando os coeficientes com 0
b_0 = 0
b_1 = 0
n = len(X) # Número de observações
cost = np.zeros(epochs)
# Aplicando gradiente descendente
for e in range(epochs):
y_pred = b_0 + b_1 * X
# Calculando derivadas (gradientes)
D_0 = (1/n) * sum(y - y_pred)
D_1 = (1/n) * sum((y - y_pred) * X)
# Atualizando betas
b_0 = b_0 + learning_rate * D_0
b_1 = b_1 + learning_rate * D_1
cost[e] = metrics.MSE(y, y_pred)
self.b_0 = b_0
self.b_1 = b_1
self.cost = cost
def predict(self, X):
return self.b_0 + self.b_1 * X
def coef_(self):
return [self.b_0, self.b_1]
def cost_history(self): # Apenas para escolher uma época boa
return self.cost
# c. Regressão Linear Multivariada - Método Analítico
class MLRAnalyticalMethod():
def __init__(self):
pass
def fit(self, X, y):
n = X.shape[0]
X_ = np.c_[np.ones(n), X]
beta = np.linalg.pinv(X_.T @ X_) @ X_.T @ y
self.B = beta
def predict(self, X):
n = X.shape[0]
X_ = np.c_[np.ones(n), X]
return X_ @ self.B
def coef_(self):
return self.B
# d. Regressão Linear Multivariada - Gradiente Descendente
class MLRGradientDescent():
def __init__(self):
pass
def fit(self, X, y, epochs, learning_rate):
n = X.shape[0] # Número de amostras
p = X.shape[1] # Número de variáveis (parâmetros)
X_ = np.c_[np.ones(n), X]
cost = np.zeros(epochs)
B = np.zeros(p + 1)
for e in range(epochs):
y_pred = X_.dot(B)
D = (1/n) * (X_.T.dot(y_pred - y))
B = B - learning_rate * D
cost[e] = metrics.MSE(y, y_pred)
self.B = B
self.cost = cost
def predict(self, X):
n = X.shape[0]
X_ = np.c_[np.ones(n), X]
return X_.dot(self.B)
def coef_(self):
return self.B
def cost_history(self):
return self.cost
# e. Regressão Linear Multivariada - Gradiente Descendente Estocástico
class MLRStochasticGradientDescent():
def __init__(self):
pass
def fit(self, X, y, epochs, learning_rate):
n = X.shape[0] # Números de amostras
p = X.shape[1] # Número de variáveis (atributos)
X_ = np.c_[np.ones(n), X]
cost = np.zeros(epochs)
B = np.zeros(p + 1)
for e in range(epochs):
count = 0.0
random_permutation = np.random.permutation(n)
for Xi, yi in zip(X_[random_permutation], y[random_permutation]):
y_pred = Xi.dot(B)
B = B - learning_rate * (Xi.T.dot(y_pred - yi))
count += metrics.MSE(y, y_pred)
cost[e] = count
self.B = B
self.cost = cost
def predict(self, X):
n = X.shape[0]
X_ = np.c_[np.ones(n), X]
return X_.dot(self.B)
def coef_(self):
return self.B
def cost_history(self):
return self.cost
# f. Regressão Quadrática usando Regressão Múltipla
class QuadraticLN():
def __init__(self):
pass
def fit(self, X, y):
X_ = X**2
self.MLR = MLRAnalyticalMethod()
self.MLR.fit(X_, y)
def predict(self, X):
X_ = X**2
return self.MLR.predict(X_)
def coef_(self):
return self.MLR.coef_()
# g. Regressão Cúbica usando Regressão Múltipla
class CubicLN():
def __init__(self):
pass
def fit(self, X, y):
X_ = X**3
self.MLR = MLRAnalyticalMethod()
self.MLR.fit(X_, y)
def predict(self, X):
X_ = X**3
return self.MLR.predict(X_)
def coef_(self):
return self.MLR.coef_()
# h. Regressão Linear Regularizada Multivariada - Gradiente Descendente
class MLRRegularized():
def __init__(self):
pass
def fit(self, X, y, epochs, learning_rate, lamb):
n = X.shape[0] # Números de amostras
p = X.shape[1] # Número de variáveis (atributos)
X_ = np.c_[np.ones(n), X]
cost = np.zeros(epochs)
B = np.zeros(p + 1)
b_0 = 0
for e in range(epochs):
y_pred = X_.dot(B)
D_0 = (1/n) * sum(y - y_pred)
b_0 = b_0 + learning_rate * D_0
D = ((1/n) * X_.T.dot(y_pred - y)) - (lamb/n * B)
B = B - learning_rate * D
cost[e] = metrics.MSE(y, y_pred)
B[0] = b_0
self.B = B
self.cost = cost
def predict(self, X):
n = X.shape[0]
X_ = np.c_[np.ones(n), X]
return X_.dot(self.B)
def coef_(self):
return self.B
def cost_history(self):
return self.cost |
# exceptions.py
class DataFormatError(Exception):
'''Exception that is raised when provided time series data file or header file does not conform to required formatting.
'''
def __init__(self,value):
self.value = value
def __str__(self):
return str(self.value)
class TimeSeriesFileNameError(Exception):
'''Exception that is raised when provided time series data files and header file's list of file names do not match.
'''
def __init__(self,value):
self.value = value
def __str__(self):
return str(self.value)
|
from django.apps import AppConfig
class BooksliceConfig(AppConfig):
name = 'bookSlice'
|
'''
About: -Recieve family data in CSV format, save data about each family member and output a print version of the family tree
-Names can be added individually as well
Family Tree Print Output:
Parent
Child (Parent to Sub-Child)
Sub-Child
Child
Note: doesn't handle duplicate names, could generate unique ID's for this
'''
import datetime
import pandas as pd
#########################
#Get family data from csv file
#########################
def getFamilyData(famTree, path = r'Family_Members.csv'):
df = pd.read_csv(path, sep=',', skiprows=1) #Read CSV file, account for first row of headers
for index, row in df.iterrows():
name = row["Name"].split()
dateOfBirth = list(map(int, row["Date of Birth"].split("/")))
if not pd.isna(row["Date of Death"]):
dateOfDeath = list(map(int, row["Date of Birth"].split("/")))
else:
dateOfDeath = [0,0,0]
famTree.addMember(Person(name[0], name[1],dateOfBirth,dateOfDeath))
if row["Relation"].lower() == "parent":
famTree.addParent(famTree.getMemeber(row["Name"]))
if row["Relation"].lower() == "children":
famTree.addChild(famTree.getMemeber(row["Name"]), famTree.getMemeber(row["Parent Name"]))
class FamilyTree:
tree = dict() #Dictionary holding relations
treeList = [] #List holding all members
indexOldest = 0
headPerson = "" #Oldest person at the top of the tree
def addMember(self, newMember):
self.treeList.append(newMember)
def getMemeber(self, findName):
for person in self.treeList:
if findName == str(person):
return person
return None
def addChild(self, child, parent):
if parent in self.tree:
self.tree[parent].append(child)
else:
self.tree[parent] = [child]
def addParent(self, parent):
self.headPerson = parent
self.tree[parent] = []
def printSubTree(self, parent, tabs=""):
print(tabs + str(parent))
for child in self.tree[parent]:
if child in self.tree:
self.printSubTree(child, tabs + " ")
else:
print(tabs+" "+ str(child))
def printFullTree(self):
self.printSubTree(self.headPerson)
class Person:
def __init__(self, fname, lname, dateofbirth, dateofdeath =[0,0,0]):
self.fname = fname;
self.lname = lname;
self.birthDay, self.birthMonth, self.birthYear = dateofbirth;
self.deathDay, self.deathMonth, self.deathYear = dateofdeath;
def __str__(self):
return " ".join([self.fname, self.lname]);
def getID(self):
return self.ID
def dateOfBirth(self):
return '/'.join(str(i) for i in [self.birthDay, self.birthMonth, self.birthYear])
def dateOfDeath(self):
if self.deathDay != 0:
return '/'.join(str(i) for i in [self.deathDay, self.deathMonth, self.deathYear])
else:
return self.__str__() + " is not dead"
def addDeathDate(self, day, month ,year):
self.deathDay, self.deathMonth, self.deathYear = day, month, year
def checkIfDead(self):
d1 = datetime.datetime(self.deathYear, self.deathMonth, self.deathDay)
return d1 < datetime.datetime.today()
if __name__ == "__main__":
famTree = FamilyTree()
getFamilyData(famTree)
famTree.printSubTree(famTree.getMemeber("Ted Bing")) #Print sub tree
print("\n \n")
famTree.printFullTree()
|
import datetime
from functools import reduce
from django.contrib.auth.views import login as contrib_login
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.db.models import Sum, Case, When, F, Q, IntegerField, ExpressionWrapper
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from rank import DenseRank, UpperRank, Rank
from django.utils.timezone import get_default_timezone
from .models import *
def error_page(request):
return render(
request,
'course/error_page.html',
{
'request': request
}
)
def index(request):
return redirect('/login/')
def login(request, **kwargs):
if request.user.is_authenticated():
return redirect('/classes/')
else:
email_info = {
'is_email_configured': settings.EMAIL_HOST != ''
}
return contrib_login(request, extra_context=email_info, **kwargs)
@login_required(login_url='/login/')
def classes(request):
today = datetime.date.today()
if hasattr(request.user, 'student'):
student = request.user.student
all_classes = CourseClass.objects.filter(
enrollment__student=student
)
elif hasattr(request.user, 'instructor'):
instructor = request.user.instructor
all_classes = CourseClass.objects.filter(
classinstructor__instructor=instructor
)
else:
all_classes = CourseClass.objects.none()
if len(all_classes) == 1:
course_class = all_classes[0]
return redirect('/%s/%s/' %(course_class.course.code, course_class.code))
else:
return render(
request,
'course/classes.html',
{
'past_classes': filter_past_classes(all_classes),
'current_classes': filter_current_classes(all_classes),
'future_classes': filter_future_classes(all_classes),
}
)
@login_required(login_url='/login/')
def home(request, course_code, class_code):
try:
enrollment = get_enrollment(request.user.id, course_code, class_code)
course_class = enrollment.course_class
student_id = enrollment.student_id
except ObjectDoesNotExist:
try:
class_instructor = get_class_instructor(request, course_code, class_code)
course_class = class_instructor.course_class
student_id = None
except ObjectDoesNotExist:
raise Http404("Student/instructor not found")
ranking = get_ranking_data(course_class, course_class.ranking_size)
posts = Post.objects.filter(
course_class=course_class
).order_by(
'-is_pinned_to_the_top', '-post_datetime'
)
widgets = Widget.objects.filter(
course_class=course_class
).order_by('order')
if hasattr(request.user, 'student'):
posts = posts.filter(
post_datetime__lte=datetime.datetime.now(),
is_draft=False,
)
return render(
request,
'course/class.html',
{
'active_tab': 'home',
'course_class': course_class,
'ranking_size': course_class.ranking_size,
'ranking': ranking,
'student_id': student_id,
'posts': posts,
'widgets': widgets
}
)
@login_required(login_url='/login/')
def assignments (request, course_code, class_code, student_id=None):
try:
enrollment = get_enrollment(request.user.id, course_code, class_code)
course_class = enrollment.course_class
students_data = None
except ObjectDoesNotExist:
try:
class_instructor = get_class_instructor(request, course_code, class_code)
course_class = class_instructor.course_class
students_data = get_students_data(course_class)
try:
enrollment = Enrollment.objects.get(student_id=student_id, course_class=course_class)
except ObjectDoesNotExist:
enrollment = None
except ObjectDoesNotExist:
raise Http404("Student/instructor not found")
return render(
request,
'course/assignments.html',
{
'active_tab': 'assignments',
'course_class': course_class,
'enrollment': enrollment,
'assignment_items_data': get_assignments_data(enrollment),
'achievements_data': get_achievements_data(enrollment),
'students_data': students_data,
'student_id': student_id
}
)
def get_enrollment(student_user_id, course_code, class_code):
student = Student.objects.get(user_id=student_user_id)
course = Course.objects.get(code=course_code)
course_class = CourseClass.objects.get(course=course, code=class_code)
enrollment = Enrollment.objects.get(student=student, course_class=course_class)
return enrollment
def get_class_instructor(request, course_code, class_code):
instructor = Instructor.objects.get(user_id=request.user.id)
course = Course.objects.get(code=course_code)
course_class = CourseClass.objects.get(course=course, code=class_code)
class_instructor = ClassInstructor.objects.get(instructor=instructor, course_class=course_class)
return class_instructor
def filter_past_classes(query):
return query.filter(
end_date__lt=datetime.date.today()
).order_by('-end_date', 'course__name', 'code').all()
def filter_current_classes(query):
return query.filter(
start_date__lte=datetime.date.today(),
end_date__gte=datetime.date.today()
).order_by('start_date', 'course__name', 'code').all()
def filter_future_classes(query):
return query.filter(
start_date__gt=datetime.date.today()
).order_by('start_date', 'course__name', 'code').all()
def get_ranking_data(course_class, ranking_size):
ranking = Grade.objects.values(
'enrollment__student__id'
).annotate(
total = Sum(
Case(
When(is_canceled=True, then=0),
When(assignment_task__points=None, then=F('score')),
default=F('score') * F('assignment_task__points'),
output_field=IntegerField()
)
),
full_name = F('enrollment__student__full_name'),
student_id = F('enrollment__student__id'),
).annotate(
# this "dense_rank" was throwing an error sometimes, randomly
# it was not finding the previous "total" annotation
# so I put it in another "annotate" to respect the dependency
dense_rank = Rank('total'),
).filter(
enrollment__course_class = course_class
).order_by('-total', 'full_name')[:ranking_size]
# print(ranking.query)
return ranking
def get_students_data(course_class):
return Student.objects.values(
'id',
'full_name'
).filter(
enrollment__course_class = course_class
).order_by('full_name')
def get_assignments_data(enrollment):
if enrollment == None:
return None
points_data = []
assignments = enrollment.course_class.course.assignment_set.order_by('id').all()
for assignment in assignments:
tasks_data = get_tasks_data(assignment, enrollment)
if len(tasks_data) == 0:
continue
is_there_any_task_completed = reduce(
lambda x, y: True if y['grade_points'] != None else x,
tasks_data,
False
)
if assignment.is_optional and not is_there_any_task_completed:
continue
are_all_tasks_optional = reduce(
lambda x, y: False if not y['is_optional'] else x,
tasks_data,
True
)
if are_all_tasks_optional:
total_task_points = None
else:
total_task_points = reduce(
lambda x, y: x + (y['task_points'] if not y['is_optional'] and y['task_points'] != None else 0),
tasks_data,
0
)
if is_there_any_task_completed:
total_grade_points = reduce(
lambda x, y: x + y['grade_points'] if y['grade_points'] != None and not y['grade_is_canceled'] else x,
tasks_data,
0
)
else:
total_grade_points = None
if total_grade_points == None or total_task_points == None or total_task_points == 0:
total_grade_percentage = None
else:
total_grade_percentage = round(total_grade_points / total_task_points * 100)
assignment_data = {}
assignment_data['name'] = assignment.name
assignment_data['description'] = assignment.description
assignment_data['tasks'] = tasks_data
assignment_data['total_task_points'] = total_task_points
assignment_data['total_grade_points'] = total_grade_points
assignment_data['total_grade_percentage'] = total_grade_percentage
points_data.append(assignment_data)
return points_data
def get_tasks_data(assignment, enrollment):
tasks_data = []
for assignment_task in assignment.ordered_assignment_tasks(enrollment.course_class):
grade = assignment_task.grade_set.all().filter(enrollment=enrollment).first()
task_data = {}
task_data['name'] = assignment_task.task.name
task_data['task_points'] = assignment_task.points
task_data['is_optional'] = assignment_task.is_optional
if grade != None:
task_data['grade_percentage'] = round(grade.score * 100) if assignment_task.points != None else None
task_data['grade_points'] = grade.points
task_data['grade_is_canceled'] = grade.is_canceled
else:
task_data['grade_percentage'] = None
task_data['grade_points'] = None
task_data['grade_is_canceled'] = False
tasks_data.append(task_data)
return tasks_data
def get_achievements_data(enrollment):
if enrollment == None:
return None
achievements_data = []
class_badges = enrollment.course_class.classbadge_set.order_by('id').all()
for class_badge in class_badges:
achievement = class_badge.achievement_set.filter(enrollment=enrollment).first()
achievement_data = {}
achievement_data['percentage'] = achievement.percentage if achievement != None else 0
achievement_data['percentage_integer'] = int(achievement_data['percentage']*100)
achievement_data['show_progress'] = class_badge.show_progress
if achievement_data['percentage'] == 1:
achievement_data['icon'] = class_badge.badge.icon_url
else:
achievement_data['icon'] = '/static/course/question-mark.svg'
if class_badge.show_info_before_completion or achievement_data['percentage'] == 1:
achievement_data['name'] = class_badge.badge.name
achievement_data['description'] = class_badge.description
else:
achievement_data['name'] = "???"
achievement_data['description'] = _("(description will show up when you earn this badge)")
achievements_data.append(achievement_data)
return achievements_data |
import re
A = input()
A = A.replace('6', '9')
N = []
for i in range(ord('0'), ord('9')):
N.append(len(re.findall(chr(i), A)))
N.append(len(re.findall('9', A)) / 2)
N.sort(reverse=True)
if N[0] % 1 == 0:
print(int(N[0]))
else:
print(int(N[0]+1))
# Done
|
""" importing modules and functions from flask
"""
from flask import Flask, render_template, request, session, make_response, redirect
import os #for cryptographic functions
from models.bucketlist import Bucketlist
from models.user import User
import uuid
app = Flask(__name__)
Users = {}
app.secret_key = os.urandom(20)
bucketlists_dict= {}
@app.route('/register/', methods=['GET','POST'])
def register():
""" redirects to the user registration page
"""
if request.method == 'GET':
return render_template('register.html')
if request.method == 'POST':
username = request.form.get('username')
email = request.form.get('email')
password = request.form.get('password')
Users[email] = [username, email, password]
print (Users)
if Users[email]:
return (render_template('index.html'), "Account created successfully")
else:
return "Problem was encountered creating account"
@app.route('/login/', methods=['GET', 'POST'])
def index():
""" Redirects to the main/login page
"""
if request.method == 'GET':
return render_template('index.html')
if request.method == 'POST':
"""
user login authentication
"""
#session[email] = request.form['email','password']
email = request.form['email']
password = request.form['password']
print(email, password, "User entered data")
try:
valid_email = Users[email][1]
valid_pass = Users[email][-1]
if email == valid_email and password == valid_pass:
session['email'] = email
return redirect('/create_bucket/')
except KeyError:
error = "Login was not successful"
return render_template("index.html", error=error)
@app.route('/create_bucket/', methods=['GET', 'POST'])
def bucketcreate_view():
""" redirects to the bucketlist page
"""
if request.method == 'GET':
return render_template('bucketcreate_view.html')
if request.method == 'POST':
bucket_name = request.form.get('name')
print(bucket_name, "bucketlist name test")
bucketlist_id = str(uuid.uuid1())
bucketlist_object = Bucketlist(bucketlist_id, bucket_name)
if session['email'] in bucketlists_dict:
bucketlists_dict[session['email']].append(bucketlist_object)
else:
bucketlists_dict[session['email']] = []
bucketlists_dict[session['email']].append(bucketlist_object)
print(bucketlists_dict[session['email']], "testing bucketlist")
# name_exist = bucketlist[bucket_name]
# print (name_exist)
# if bucket_name != name_exist:
# bucketlist[bucket_name] = bucket_name
return render_template('bucketcreate_view.html', buckets = bucketlists_dict[session['email']])
# else:
# error = "name already exists"
# return error
@app.route('/edit_bucket/<bucketlist_id>', methods=['POST'])
def edit_bucket(bucketlist_id):
if request.method == 'POST':
name_given = request.form.get('Edit_name')
print (name_given , "This is edited bucket name")
for bucket in bucketlists_dict[session['email']]:
if bucketlist_id == bucket.bucketlist_id:
bucket.bucket_name = name_given
return render_template('bucketcreate_view.html', buckets = bucketlists_dict[session['email']])
# try:
# bucketlist[bucket] = [name_given]
# return render_template('bucketcreate_view.html', buckets = bucketlist)
# except KeyError:
# error = "name already exists"
# return render_template('bucketcreate_view.html', error = error)
@app.route('/delete_bucket/<bucketlist_id>', methods=['POST'])
def delete(bucketlist_id):
for bucket in bucketlists_dict[session['email']]:
if bucketlist_id == bucket.bucketlist_id:
bucketlists_dict[session['email']].remove(bucket)
return render_template(
'bucketcreate_view.html', buckets = bucketlists_dict[session['email']]
)
@app.route('/activities/', methods=['GET'])
def bucketlist_activities():
""" redirects to the bucketlist activities
"""
if request.method == 'GET':
return render_template('bucketlist_activities.html')
if __name__ == "__main__":
app.run(host='127.0.0.1', port='80')
|
#!/usr/bin/env python
"""
Check that alls functions are documented.
"""
import os
import sys
ERROR = False
ROOT = os.path.dirname(os.path.dirname(os.path.abspath((__file__))))
def error(message):
print(message)
global ERROR
ERROR = True
def all_functions():
functions = []
for (root, _, pathes) in os.walk(os.path.join(ROOT, "src")):
for path in pathes:
if path in ["Property.jl", "utils.jl"]:
continue
with open(os.path.join(root, path)) as fd:
for line in fd:
line = line.lstrip()
if line.startswith("function"):
name = line.split()[1].split("(")[0]
if name.startswith("Base."):
functions.append(name[5:])
elif name.startswith("_"):
continue
else:
functions.append(name)
return functions
def usage_in_doc():
usages = []
for (root, _, pathes) in os.walk(os.path.join(ROOT, "doc", "reference")):
for path in pathes:
with open(os.path.join(root, path)) as fd:
kind = ""
func = ""
for line in fd:
if line.startswith(".."):
func = line.split()[-1]
if line.split(":")[1] == "autofunction":
usages.append(func)
return usages
if __name__ == '__main__':
functions = all_functions()
docs = usage_in_doc()
for function in functions:
if function not in docs:
error("missing documentation for {}".format(function))
for function in docs:
if function not in functions:
error("documentation for non-existing {}".format(function))
if ERROR:
sys.exit(1)
|
"scope.py"
a = 1
n = 1
def f(n):
print 'In f, a =', a, 'and n =', n, vars()
f(10)
print vars()
|
class SystemItem:
def __init__(self, name, parent):
self.name = name
self.parent = parent
class Directory(SystemItem):
def __init__(self, name, parent):
super().__init__(name, parent)
self.children = []
def mkdir(self, name):
self.children.append(name)
return Directory(name, self.name)
def touch(self, name):
self.children.append(name)
return File(name, self.name)
class File(SystemItem):
def __init__(self, name, parent):
super().__init__(name, parent)
self.content = ''
root = Directory('/', None)
home = root.mkdir('/home')
test_file = home.touch('/home/test.txt')
test_file.content = 'This is the first test file'
test_file2 = home.touch('/home/test2.html')
documents = home.mkdir('/home/documents')
print(root.name)
print(home.name)
print(home.parent)
print(home.children)
print(test_file.name)
print(test_file.parent)
print(test_file.content)
|
# YOu cannot alert the data in a tuple
daysOfTheWeek=("Monday","Tuesday","Wednesday","Thursday","Fri","Sat","Sun")
print(type(daysOfTheWeek))
print(len(daysOfTheWeek))
print(daysOfTheWeek[4])
print(daysOfTheWeek.count('y'))
print(daysOfTheWeek[-6])
man= " Daset",67
print(type(man))
|
#!/usr/bin/env python
# encoding: utf-8
"""
app.py
Created by yang.zhou on 2012-08-28.
Copyright (c) 2012 zhouyang.me. All rights reserved.
"""
import logging
import os.path
import json
import tornado
import pylibmc
#import motor
import tornado.options
from tornado import httpserver
from tornado import ioloop
from tornado.web import Application
from tornado.options import define, options
from mongoengine import connect
from core.base.route import route
from core.base.models import Settings
from core.utils import session
from core.utils.configuration import get_config
from core.modules.accounts import *
from core.modules.page import *
from core.modules.admin import *
from core.modules.api import *
from core.modules.messaging import *
from core import uimodules
handlers = [] + route.routes()
class Engine(Application):
def __init__(self, opts, profiles):
"""init server, add route to server"""
settings = dict(
template_path = os.path.join(os.path.dirname(__file__), "core/tpl"),
static_path = os.path.join(os.path.dirname(__file__), "core/static"),
cookie_name = opts.server_name,
cookie_secret = opts.cookie_secret,
xsrf_cookie = True,
session_secret = opts.session_secret,
session_timeout = int(opts.session_timeout),
memcached_address = ["127.0.0.1:11211"],
login_url = "/accounts/login",
ui_modules = uimodules,
debug = bool(opts.debug),
weibo_key = opts.weibo_key,
weibo_secret = opts.weibo_secret,
tencent_key = opts.tencent_key,
tencent_secret = opts.tencent_secret,
douban_key = opts.douban_key,
douban_secret = opts.douban_secret,
opts = opts,
profiles = profiles
)
Application.__init__(self, handlers, **settings)
self.session_manager = session.SessionManager(settings["session_secret"],
settings["memcached_address"],
settings["session_timeout"])
self.mc = pylibmc.Client(["127.0.0.1"])
#mongodb_host = "mongodb://%s:%s@%s:%s/%s" %(opts.db_username,
# opts.db_password, opts.db_host, opts.db_port, opts.db_name)
#self.asyncdb = motor.MotorConnection(mongodb_host).open_sync()
def update_profiles(self):
self.settings["profiles"] = Settings.settings()
def main():
"""start engine"""
define("port", default=9999, help="Server port", type=int)
define("conf", default="app.conf", help="Config file path", type=str)
define("status", default="dev", help="Server status use dev or live", type=str)
tornado.options.parse_command_line()
if os.path.exists(options.conf):
print 'Loading', options.conf
else:
exit("No config file at %s" %options.conf)
configs = get_config(options.conf)
opts = getattr(configs, options.status)
connect(opts.db_name, host=opts.db_host, username=opts.db_username, password=opts.db_password)
profiles = Settings.settings()
http_server = httpserver.HTTPServer(Engine(opts, profiles))
http_server.listen(options.port)
tornado.locale.load_translations(os.path.join(os.path.dirname(__file__), "core/trans"))
ioloop.IOLoop.instance().start()
if __name__ == '__main__':
main() |
# Generated by Django 2.2.1 on 2019-06-16 22:58
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('questi', '0004_auto_20190617_0054'),
]
operations = [
migrations.AddField(
model_name='prof',
name='profil',
field=models.OneToOneField(default=False, on_delete=django.db.models.deletion.CASCADE, to='questi.Profil'),
),
]
|
import wx
import cv2
import pymysql
from model.dbconnect import *
from props.InputProp import *
from form.MainList import *
from form.ConnectDialog import *
from form.FlexList import *
from form.EditForm import *
from MenuForm import * |
mark = int(input("Enter your mark: "))
print(mark % 2)
if mark % 2 == 0:
print("Even!")
else:
print("Odd!") |
"""
Copyright 1999 Illinois Institute of Technology
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL ILLINOIS INSTITUTE OF TECHNOLOGY BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Illinois Institute
of Technology shall not be used in advertising or otherwise to promote
the sale, use or other dealings in this Software without prior written
authorization from Illinois Institute of Technology.
"""
import configparser
import unittest
import time
import sys
import subprocess
from threading import Thread
import os
import os.path
sys.path.append('..')
from musclex.ui.ui_launcherform import *
from musclex import __version__
from musclex.utils.exception_handler import handlers
from musclex.utils.zip_download import download_zip_pickles
from musclex.tests.module_test import *
from musclex.tests.musclex_tester import MuscleXGlobalTester
from musclex.tests.environment_tester import EnvironmentTester
if sys.platform in handlers:
sys.excepthook = handlers[sys.platform]
class LauncherForm(QWidget):
"""
Qt class definition for the GUI launcher
"""
programs = ['xv', 'eq', 'qf', 'pt', 'di', 'ddf', 'aise', 'aime'] # 'dc',
def __init__(self):
super(QWidget, self).__init__()
# Set up the user interface from Designer.
self.ui = Ui_LauncherForm()
self.ui.setupUi(self)
self.setWindowTitle("MuscleX Launcher v" + __version__)
self.td = None
# Set up popup message box
popupMsg = QMessageBox()
popupMsg.setWindowTitle('Note')
popupMsg.setTextFormat(Qt.RichText)
popupMsg.setText(
"""Please help us impove our program by reporting exceptions or bugs to
<a href="https://www.github.com/biocatiit/musclex/issues">
https://www.github.com/biocatiit/musclex/issues</a>.""")
popupMsg.setInformativeText(
"""When reporting, besides complete error logs, we hope you can also provide"""
"""the information of your platfrom and images you're processing. """)
popupMsg.setIcon(QMessageBox.Information)
popupMsg.setCheckBox(QCheckBox("Do not show this again.", self))
pmlayout = popupMsg.layout()
pmlayout.addItem(QSpacerItem(756, 0), pmlayout.rowCount(), 0, 1, pmlayout.columnCount())
self.popupMsg = popupMsg
# Make some local initializations.
self.program_idx = 0
self.ui.runButton.clicked.connect(self.launch)
self.ui.testButton.clicked.connect(self.test)
self.ui.stackedWidget.currentChanged['int'].connect(self.select)
# Read the config file
config = configparser.RawConfigParser()
config.optionxform = lambda option: option
ininame = os.path.join(os.path.expanduser('~'), 'musclex.ini')
print('Config file at ' + ininame)
if os.path.exists(ininame):
config.read(ininame)
if 'Launcher' in config and 'ShowMessage' in config['Launcher']:
self.popupMsg.checkBox().setChecked(not config['Launcher'].getboolean('ShowMessage'))
else:
open(ininame, 'a').close()
self.config = config
self.ininame = ininame
if getattr(sys, 'frozen', False):
self.test_path = os.path.join(os.path.dirname(sys._MEIPASS), "musclex", "test_logs", "test.log")
self.release_path = os.path.join(os.path.dirname(sys._MEIPASS), "musclex", "test_logs", "release.log")
elif __file__:
self.test_path = os.path.join(os.path.dirname(__file__),
"tests", "test_logs", "test.log")
self.release_path = os.path.join(os.path.dirname(__file__),
"tests", "test_logs", "release.log")
QApplication.processEvents()
def select(self, idx):
"""
Select a module to launch
"""
self.program_idx = idx
def launch(self):
"""
Launch the selected module
"""
prog = LauncherForm.programs[self.program_idx]
try:
path = os.path.dirname(sys.argv[0])
path = '.' if path == '' else path
subprocess.Popen([os.path.join(path, 'musclex-main'), prog],
shell=(sys.platform=='win32'))
except IOError:
subprocess.Popen(['musclex', prog], shell=(sys.platform=='win32'))
def test(self):
"""
Open tests
"""
self.td = TestDialog()
self.td.show()
self.td.activateWindow()
self.td.raise_()
def keyReleaseEvent(self, event):
"""
Key release event
"""
if event.key() == Qt.Key_Return:
self.launch()
def closeEvent(self, event):
"""
Close event
"""
if not self.popupMsg.checkBox().isChecked():
self.popupMsg.exec_()
if self.popupMsg.checkBox().isChecked():
if 'Launcher' not in self.config:
self.config['Launcher'] = {}
if 'ShowMessage' not in self.config['Launcher']:
self.config['Launcher']['ShowMessage'] = str(1)
self.config['Launcher']['ShowMessage'] = str(0)
with open(self.ininame, 'w') as configfile:
self.config.write(configfile)
@staticmethod
def main():
"""
Main function for the launcher
"""
app = QApplication(sys.argv)
window = LauncherForm()
window.show()
sys.exit(app.exec_())
class TestDialog(QDialog):
"""
Qt Class definition for the TestDialog window.
"""
def __init__(self):
super(QWidget, self).__init__()
# self.setWindowFlags(Qt.WindowStaysOnTopHint)
# Fixed path to the test log
if getattr(sys, 'frozen', False):
self.test_path = os.path.join(os.path.dirname(sys._MEIPASS), "musclex", "test_logs", "test.log")
self.release_path = os.path.join(os.path.dirname(sys._MEIPASS), "musclex", "test_logs", "release.log")
elif __file__:
self.test_path = os.path.join(os.path.dirname(__file__), "tests", "test_logs", "test.log")
self.release_path = os.path.join(os.path.dirname(__file__), "tests", "test_logs", "release.log")
self.green = QColor(0,150,0)
self.red = QColor(150,0,0)
self.black = QColor(0,0,0)
self.initUI()
def initUI(self):
"""
Initialize the UI
"""
self.testDialogLayout = QVBoxLayout()
self.runSummaryTestsButton = QPushButton('Run MuscleX Global Summary Tests')
self.runDetailedTestsButton = QPushButton('Run MuscleX Detailed Implementation Tests')
self.runEnvironmentTestButton = QPushButton('Run Environment Test')
self.runGPUTestButton = QPushButton('Run GPU Test')
self.showLatestTestButton = QPushButton('Show Latest Test Results')
self.showReleaseButton = QPushButton('Show Release Results')
self.progressBar = QProgressBar(self)
self.progressBar.setGeometry(0, 0, 300, 25)
self.progressBar.setMaximum(100)
self.testDialogLayout.addWidget(self.runSummaryTestsButton)
# self.testDialogLayout.addWidget(self.runDetailedTestsButton)
self.testDialogLayout.addWidget(self.runEnvironmentTestButton)
self.testDialogLayout.addWidget(self.runGPUTestButton)
self.testDialogLayout.addWidget(self.showLatestTestButton)
self.testDialogLayout.addWidget(self.showReleaseButton)
self.testDialogLayout.addWidget(self.progressBar)
self.runSummaryTestsButton.clicked.connect(self.runSummaryTestsButtonClicked)
self.runDetailedTestsButton.clicked.connect(self.runDetailedTestsButtonClicked)
self.runEnvironmentTestButton.clicked.connect(self.runEnvTestButtonClicked)
self.runGPUTestButton.clicked.connect(self.runGPUTestButtonClicked)
self.showLatestTestButton.clicked.connect(self.showLatestTestButtonClicked)
self.showReleaseButton.clicked.connect(self.showReleaseButtonClicked)
self.setLayout(self.testDialogLayout)
self.resize(700,500)
self.detail = QTextEdit()
self.detail.setReadOnly(True)
self.detail.setFontWeight(100)
if os.path.exists(self.test_path):
self.detail.insertPlainText("Module tests have already been run.\nPress \'Run Tests\' to run the module tests again.")
self.detail.insertPlainText(f"\n\nTest results:\n{'-'*80}{self.get_latest_test()}{'-'*80}\nSee the log at {self.test_path} for more info.\n")
else:
self.detail.insertPlainText("No test logs found. Running tests for the first time..\n")
self.testDialogLayout.addWidget(self.detail)
QApplication.processEvents()
self.detail.setFontWeight(50)
self.detail.moveCursor(QTextCursor.Start)
QApplication.processEvents()
def runSummaryTestsButtonClicked(self):
"""
Triggered when Global summary test button is clicked
"""
self.run_summary_test()
def runDetailedTestsButtonClicked(self):
"""
Triggered when Detailed implementation test button is clicked
"""
self.run_download_pickles()
self.run_detailed_test()
def runEnvTestButtonClicked(self):
"""
Run Environment Tests.
"""
self.progressBar.reset()
self.detail.moveCursor(QTextCursor.End)
self.detail.setFontWeight(100)
self.detail.insertPlainText("\nRunning environment tests of MuscleX modules.\nThis will take a few seconds...")
QApplication.processEvents()
suite = unittest.TestSuite()
suite.addTest(EnvironmentTester("testEnvironment"))
runner = unittest.TextTestRunner()
proc = Thread(target=runner.run, args=(suite,))
proc.start()
self.progressBar.setValue(0)
QApplication.processEvents()
while proc.is_alive():
time.sleep(0.5)
self.detail.moveCursor(QTextCursor.End)
self.detail.insertPlainText(".")
QApplication.processEvents()
self.progressBar.setValue(100)
QApplication.processEvents()
self.detail.moveCursor(QTextCursor.End)
self.detail.setFontWeight(100)
self.detail.insertPlainText("\nEnvironment tests complete.")
QApplication.processEvents()
test_results = self.get_latest_test()
if test_results.find('warning') != -1:
self.detail.setTextColor(self.red)
self.detail.insertPlainText("\nSome tests failed -- see below for details.\n")
else:
self.detail.setTextColor(self.green)
self.detail.insertPlainText("\nAll tests passed -- see below for details.\n")
QApplication.processEvents()
self.detail.setTextColor(self.black)
self.detail.setFontWeight(50)
self.detail.insertPlainText(f"Test results:\n{'-'*80}{test_results}{'-'*80}")
QApplication.processEvents()
proc.join()
def runGPUTestButtonClicked(self):
"""
Run GPU Tests from unittest.
"""
self.progressBar.reset()
self.detail.moveCursor(QTextCursor.End)
QApplication.processEvents()
suite = unittest.TestSuite()
suite.addTest(MuscleXTest("testOpenCLDevice"))
suite.addTest(MuscleXTest("testGPUIntegratePyFAI"))
runner = unittest.TextTestRunner()
runner.run(suite)
self.detail.setFontWeight(100)
self.detail.insertPlainText("GPU tests complete.\n")
self.detail.moveCursor(QTextCursor.NoMove)
QApplication.processEvents()
test_results = self.get_latest_test()
opencl_results = test_results.split('OpenCL GPU Device Test:')
pyfai_results = test_results.split('pyFAI Integration Test:')
if len(opencl_results) >= 2:
opencl_pass = (opencl_results[1][1:5] == 'pass')
else:
opencl_pass = False
if len(pyfai_results) >= 2:
pyfai_pass = (pyfai_results[1][1:5] == 'pass')
else:
pyfai_pass = False
pass_test = opencl_pass and pyfai_pass
if pass_test:
self.detail.setTextColor(self.green)
self.detail.insertPlainText("Tests Passed -- GPU acceleration is available.\n")
else:
self.detail.setTextColor(self.red)
self.detail.insertPlainText("Tests failed -- GPU acceleration is not available.\n")
QApplication.processEvents()
self.detail.setTextColor(self.black)
self.detail.setFontWeight(50)
self.detail.insertPlainText(f"Test results:\n{'-'*80}{test_results}{'-'*80}")
self.progressBar.setValue(100)
QApplication.processEvents()
def showLatestTestButtonClicked(self):
"""
Triggered when the Show lastest test button is clicked
"""
self.detail.moveCursor(QTextCursor.End)
QApplication.processEvents()
self.detail.setFontWeight(100)
self.detail.insertPlainText("\nLatest test results:\n")
self.detail.moveCursor(QTextCursor.End)
QApplication.processEvents()
self.detail.setFontWeight(50)
self.detail.insertPlainText(f"{'-'*80}{self.get_latest_test()}{'-'*80}")
QApplication.processEvents()
def showReleaseButtonClicked(self):
"""
Triggered when the Show release button is clicked
"""
self.detail.moveCursor(QTextCursor.End)
QApplication.processEvents()
self.detail.setFontWeight(100)
self.detail.insertPlainText("\nLatest release results:\n")
self.detail.moveCursor(QTextCursor.End)
QApplication.processEvents()
self.detail.setFontWeight(50)
self.detail.insertPlainText(f"{'-'*80}{self.get_release_results()}{'-'*80}")
QApplication.processEvents()
def run_download_pickles(self):
"""
Run the downloading for pickle files.
"""
self.progressBar.reset()
self.detail.moveCursor(QTextCursor.End)
self.detail.setFontWeight(100)
self.detail.insertPlainText("\nDownloading and unzipping pickle files for testing.\nThis could take a few minutes...")
QApplication.processEvents()
download_zip_pickles(os.path.dirname(__file__))
self.progressBar.setValue(100)
QApplication.processEvents()
self.detail.moveCursor(QTextCursor.End)
self.detail.insertPlainText("\nDone.\n")
QApplication.processEvents()
def run_summary_test(self):
"""
Run the gross result testing in a subprocess while monitoring progress
from the log in the parent process.
"""
self.progressBar.reset()
NTESTS = 13
suite = unittest.TestSuite()
suite.addTest(MuscleXGlobalTester("testHeadlessMarEquator"))
suite.addTest(MuscleXGlobalTester("testHeadlessEigerEquator"))
suite.addTest(MuscleXGlobalTester("testHeadlessPilatusEquator"))
suite.addTest(MuscleXGlobalTester("testHeadlessMarQuadrantFolder"))
suite.addTest(MuscleXGlobalTester("testHeadlessEigerQuadrantFolder"))
suite.addTest(MuscleXGlobalTester("testHeadlessPilatusQuadrantFolder"))
suite.addTest(MuscleXGlobalTester("testHeadlessMarDiffraction"))
suite.addTest(MuscleXGlobalTester("testHeadlessEigerDiffraction"))
suite.addTest(MuscleXGlobalTester("testHeadlessPilatusDiffraction"))
suite.addTest(MuscleXGlobalTester("testHeadlessMarProjectionTraces"))
suite.addTest(MuscleXGlobalTester("testHeadlessEigerProjectionTraces"))
suite.addTest(MuscleXGlobalTester("testHeadlessPilatusProjectionTraces"))
runner = unittest.TextTestRunner()
proc = Thread(target=runner.run, args=(suite,))
proc.start()
if os.path.exists(self.test_path):
prev_data = open(self.test_path, 'r').readlines()
else:
prev_data = ""
self.detail.moveCursor(QTextCursor.End)
self.detail.setFontWeight(100)
self.detail.insertPlainText("\nRunning summary tests of MuscleX modules.\nThis could take a few minutes...")
QApplication.processEvents()
progress = 0
test_number = 0
while progress < 100 and proc.is_alive():
time.sleep(0.5)
self.detail.moveCursor(QTextCursor.End)
self.detail.insertPlainText(".")
QApplication.processEvents()
if os.path.exists(self.test_path):
logfile = open(self.test_path, 'r')
curr_data = logfile.readlines()
if curr_data != prev_data:
test_number += 1
progress += 100 / NTESTS
self.progressBar.setValue(int(progress))
QApplication.processEvents()
self.detail.moveCursor(QTextCursor.End)
self.detail.insertPlainText(f"\nFinished test {test_number} out of {NTESTS}.\n")
QApplication.processEvents()
prev_data = curr_data
else:
pass
self.progressBar.setValue(100)
QApplication.processEvents()
self.detail.moveCursor(QTextCursor.End)
self.detail.setFontWeight(100)
self.detail.insertPlainText("\nModule tests complete.")
QApplication.processEvents()
test_results = self.get_latest_test()
test_summary = test_results.split('Summary of Test Results')
if len(test_summary) >= 2:
if test_summary[1].find('fail') != -1:
self.detail.setTextColor(self.red)
self.detail.insertPlainText("\nSome tests failed -- see below for details.\n")
else:
self.detail.setTextColor(self.green)
self.detail.insertPlainText("\nAll tests passed -- see below for details.\n")
QApplication.processEvents()
self.detail.setTextColor(self.black)
self.detail.setFontWeight(50)
if len(test_summary) >= 2:
self.detail.insertPlainText(f"\nTest results:\n{'-'*80}{test_summary[1]}{'-'*80}\nSee the log at {self.test_path} for more info.")
QApplication.processEvents()
proc.join()
def run_detailed_test(self):
"""
Run the unittest in a subprocess while monitoring progress
from the log in the parent process.
"""
self.progressBar.reset()
NTESTS = 8
suite = unittest.TestSuite()
suite.addTest(MuscleXTest("testEquatorImage"))
suite.addTest(MuscleXTest("testQuadrantFolder"))
suite.addTest(MuscleXTest("testDiffractionCentroids"))
suite.addTest(MuscleXTest("testProjectionTraces"))
suite.addTest(MuscleXTest("testScanningDiffraction"))
suite.addTest(MuscleXTest("testHDFRead"))
suite.addTest(MuscleXTest("testOpenCLDevice"))
# suite.addTest(MuscleXTest("testGPUIntegratePyFAI")) # not working with pyinstaller
runner = unittest.TextTestRunner()
proc = Thread(target=runner.run, args=(suite,))
proc.start()
if os.path.exists(self.test_path):
prev_data = open(self.test_path, 'r').readlines()
else:
prev_data = ""
self.detail.moveCursor(QTextCursor.End)
self.detail.setFontWeight(100)
self.detail.insertPlainText("\nRunning detailed tests of MuscleX modules.\nThis could take a few minutes...")
QApplication.processEvents()
progress = 0
test_number = 0
while progress < 100 and proc.is_alive():
time.sleep(0.5)
self.detail.moveCursor(QTextCursor.End)
self.detail.insertPlainText(".")
QApplication.processEvents()
if os.path.exists(self.test_path):
logfile = open(self.test_path, 'r')
curr_data = logfile.readlines()
if curr_data != prev_data:
test_number += 1
progress += 100 / NTESTS
self.progressBar.setValue(int(progress))
QApplication.processEvents()
self.detail.moveCursor(QTextCursor.End)
self.detail.insertPlainText(f"\nFinished test {test_number} out of {NTESTS}.\n")
QApplication.processEvents()
prev_data = curr_data
else:
pass
self.progressBar.setValue(100)
QApplication.processEvents()
self.detail.moveCursor(QTextCursor.End)
self.detail.setFontWeight(100)
self.detail.insertPlainText("\nModule tests complete.")
QApplication.processEvents()
test_results = self.get_latest_test()
test_summary = test_results.split('Summary of Test Results')
if len(test_summary) >= 2:
if test_summary[1].find('fail') != -1:
self.detail.setTextColor(self.red)
self.detail.insertPlainText("\nSome tests failed -- see below for details.\n")
else:
self.detail.setTextColor(self.green)
self.detail.insertPlainText("\nAll tests passed -- see below for details.\n")
QApplication.processEvents()
self.detail.setTextColor(self.black)
self.detail.setFontWeight(50)
self.detail.insertPlainText(f"\nTest results:\n{'-'*80}{test_results}{'-'*80}\nSee the log at {self.test_path} for more info.")
QApplication.processEvents()
proc.join()
def get_latest_test(self):
"""
Display the last test run from the test log.
"""
if os.path.exists(self.test_path):
file = open(self.test_path, 'r')
else:
return ""
data = file.read()
idx = 1
while idx < 13:
last_test = data.split('-'*80)[-idx]
if last_test == '\n':
idx += 1
else:
break
file.close()
return last_test
def get_release_results(self):
"""
Display the last release results and versions from the test log.
"""
if os.path.exists(self.release_path):
file = open(self.release_path, 'r')
else:
return ""
data = file.read()
idx = 1
while idx < 13:
release = data.split('-'*80)[-idx]
if release == '\n':
idx += 1
else:
break
file.close()
return release
if __name__ == "__main__":
LauncherForm.main()
|
import networkx as nx
import csv
import random
class node_preprocess:
def __init__(self, fp, G):
self.fp = fp
self.G = G
def getGraph(self):
fp = self.fp
G = self.G
with open(fp) as file:
csv_file = csv.reader(file, delimiter=',')
for row in csv_file:
# print (row)
G.add_edge(row[0], row[1])
self.mountainDict = {node:(0,0) for node in self.G.nodes()}
return G
def reducedByPercent(self, percent):
removed_num = int(nx.number_of_nodes(self.G) * percent)
c = list(self.G.nodes.keys())
removed = {}
for i in range(removed_num):
ran = random.randint(0, len(c) - 1)
while (ran in removed.keys()):
ran = random.randint(0, len(c) - 1)
removed[ran] = c[ran]
self.G.remove_nodes_from(list(removed.values()))
def reduceEdgeByPercent(self, percent):
removed_num = int(nx.number_of_edges(self.G) * percent)
c = list(self.G.edges())
removed = {}
for i in range(removed_num):
ran = random.randint(0, len(c) - 1)
while (ran in removed.keys()):
ran = random.randint(0, len(c) - 1)
removed[ran] = c[ran]
self.G.remove_edges_from(list(removed.values()))
def k_core(self, limit):
G = self.G
k_core = {}
edge_core = {}
count = 0
num = nx.number_of_nodes(G)
core = 0
while (core < limit):
k_core[core] = []
edge_core[core] = []
for node in G.__iter__():
degree = G.degree(node)
neighbors = G.neighbors(node)
length = len([x for x in neighbors if G.degree(x) < core])
difference = degree - length
if (difference >= core):
k_core[core].append(node)
count += 1
for node2 in neighbors:
edge_core[core].append(node, node2)
core += 1
return k_core, edge_core
def k_core2(self, limit, G):
G_local = G.copy()
k_core = {}
edge_core = {}
count = 0
num = nx.number_of_nodes(G_local)
core = 0
while (core < limit):
k_core[core] = []
self.clean_up(G_local, core)
k_core[core] += G_local.nodes()
core += 1
return k_core
def clean_up(self, G, k):
num = nx.number_of_nodes(G)
Done = False
removed = []
for node in G.__iter__():
degree = G.degree(node)
if (degree < k):
removed.append(node)
Done = True
G.remove_nodes_from(removed)
num = nx.number_of_nodes(G)
if (Done):
self.clean_up(G, k)
def k_peak(self, limit):
G = self.G.copy()
k_core = self.k_core2(16, G)
peak = 0
k_peak = {}
while (peak < limit-1):
k_peak[peak] = []
for node in G.__iter__():
if (node == '1637'):
count = 0
pass
degree = G.degree(node)
neighbors = G.neighbors(node)
length = len([x for x in neighbors if G.degree(x) < peak or x in k_core[peak + 1]])
difference = degree - length
if (difference > peak):
k_peak[peak].append(node)
peak += 1
k_peak[15] = k_core[15]
return k_peak
def k_peak2(self):
G = self.G.copy()
k_peak = {}
while (len(G.nodes()) > 0):
k_core = self.k_core2(16, G.copy())
keys = [x for x in sorted(k_core, reverse=True) if k_core[x] != []]
if(not keys[0] in k_peak):
k_peak[keys[0]] = []
k_peak[keys[0]]+=k_core[keys[0]]
G.remove_nodes_from(k_core[keys[0]])
return k_peak
def getEdgeList(self, node):
edge_list = []
neighbors = self.G.neighbors(node)
neighborsList = [x for x in neighbors]
return neighborsList
def coreRemoveK(self, k, peak):
G = self.G.copy()
G2 = self.G.copy()
removed = [x for x in peak.keys() if int(peak[x]) >= k]
G.remove_nodes_from(removed)
core = self.k_core2(16, G)
coreList = {}
for i in core:
for j in core[i]:
coreList[j] = i
removed = [x for x in peak.keys() if int(peak[x]) >= k+1]
G2.remove_nodes_from(removed)
core2 = self.k_core2(16, G2)
coreList2 = {}
for i in core2:
for j in core2[i]:
coreList2[j] = i
return_list = []
for x in coreList:
if (x in coreList.keys() and x not in coreList2.keys()):
difference = coreList[x]
if (difference > self.mountainDict.get(x)[1]):
self.mountainDict[x] = (k, difference)
return_list.append(x)
if (x in coreList2.keys() and coreList[x] < coreList2[x] ):
difference = coreList2[x] - coreList[x]
if (difference > self.mountainDict.get(x)[1]):
self.mountainDict[x] = (k, difference)
return_list.append(x)
"""return_list = sorted(return_list, key=lambda x:coreList[x], reverse=True)"""
def k_mountain_helper(self, k, peak, G):
G1 = G.copy()
G2 = G.copy()
removed = [x for x in peak.keys() if int(peak[x]) >= k]
G2.remove_nodes_from(removed)
core = self.k_core2(16, G1)
coreList = {}
for i in core:
for j in core[i]:
coreList[j] = i
core2 = self.k_core2(16, G2)
coreList2 = {}
for i in core2:
for j in core2[i]:
coreList2[j] = i
for x in coreList:
if (x in coreList.keys() and x not in coreList2.keys()):
difference = coreList[x]
if (difference > self.mountainDict.get(x)[1]):
self.mountainDict[x] = (k, difference)
if (x in coreList2.keys() and coreList[x] > coreList2[x] ):
difference = coreList[x] - coreList2[x]
if (difference > self.mountainDict.get(x)[1]):
self.mountainDict[x] = (k, difference)
return G2
def getCodeNum(self, G):
k_core = self.k_core2(16, G.copy())
result = {}
for i in k_core:
for j in k_core[i]:
result[j] = i
return result
def getPeakNum(self):
k_peak = self.k_peak2()
result = {}
for i in k_peak:
for j in k_peak[i]:
result[j] = i
return result
"""
G = nx.Graph()
Gc = G.copy()
pre = node_preprocess('./grad_edges.txt', Gc)
pre.getGraph()
k_peak = pre.k_peak2()
""" |
from .sbvat import SBVAT
from .obvat import OBVAT |
'''
Created on Oct 9, 2012
@author: christian
'''
import os
import re
import tempfile
import numpy as np
__all__ = ['MarkerFile']
def fix_ptb_eeg_events(raw):
"""Fix events from a vmrk file recorded with psychtoolbox/stim tracker
Parameters
----------
raw : RawBrainVision
MNE-Python object with psychtoolbox events.
"""
events = raw.get_brainvision_events()
if not np.all(events[:, 1] == 1):
err = ("Not KIT psychtoolbox input data (not all durations are 1)")
raise ValueError(err)
# invert trigger codes
events[:, 2] = np.invert(events[:, 2].astype(np.uint8))
# extend duration until next tigger start
events[:-1, 1] = np.diff(events[:, 0])
# remove 0 events
idx = np.nonzero(events[:, 2])[0]
events = events[idx]
raw.set_brainvision_events(events)
class MarkerFile:
"""
Attributes
----------
points : np.array
array with shape point by coordinate (x, y, z)
path : str
path to the temporary file containing the simplified marker file for
input to mne_kit2fiff
"""
def __init__(self, path):
"""
path : str
Path to marker avg file (saved as text form MEG160).
"""
self.src_path = path
# pattern by Tal:
p = re.compile(r'Marker \d: MEG:x= *([\.\-0-9]+), y= *([\.\-0-9]+), z= *([\.\-0-9]+)')
str_points = p.findall(open(path).read())
txt = '\n'.join(map('\t'.join, str_points))
self.points = np.array(str_points, dtype=float)
fd, self.path = tempfile.mkstemp(suffix='hpi', text=True)
f = os.fdopen(fd, 'w')
f.write(txt)
f.close()
def __del__(self):
os.remove(self.path)
def __repr__(self):
return 'MarkerFile(%r)' % self.src_path
def plt(self, marker='+k'):
self.plot_mpl(marker=marker)
def plot_mpl(self, marker='+k', ax=None, title=True):
"returns: axes object with 3d plot"
import matplotlib.pyplot as plt
if ax is None:
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot(self.points[:, 0], self.points[:, 1], self.points[:, 2], marker)
for i, (x, y, z) in enumerate(self.points):
ax.text(x, y, z, str(i))
xmin, ymin, zmin = self.points.min(0) - 1
xmax, ymax, zmax = self.points.max(0) + 1
ax.set_xlim3d(xmin, xmax)
ax.set_ylim3d(ymin, ymax)
ax.set_zlim3d(zmin, zmax)
if title:
if title is True:
title = os.path.basename(self.src_path)
ax.set_title(str(title))
return ax
|
operation = raw_input("Would you like to add, subtract, mutiply, or divide?")
number1 = int(raw_input("Enter the first number"))
number2 = int(raw_input("Enter the second number"))
if operation == "add":
answer = number1 + number2
elif operation == "subtract":
answer = number1 - number2
elif operation == "multiply":
answer = number1 * number2
elif operation == "divide":
answer = float(number1)/float(number2)
print answer
|
#!/usr/bin/python3
from flask import Flask, request, jsonify
from flask_restful import Resource, Api, reqparse
from dbconnection import connect
from Queries import ALL_BOOKS, SPECIFIC_BOOK, INSERT_BOOK, UPDATE_BOOK
from Queries import DELETE_BOOK, ALL_AUTHORS, SPECIFIC_AUTHOR
book_post = reqparse.RequestParser()
book_post.add_argument('book_name', type=str,
help='Name of book is required', required=True)
book_post.add_argument('description', type=str,
help='Description of book is required', required=True)
book_post.add_argument('author_id', type=int,
help='Author id is required', required=True)
app = Flask(__name__)
api = Api(app)
class Books(Resource):
def get(self):
# connect to database
conn = connect()
# This line performs query and returns json result
query = conn.execute(ALL_BOOKS)
# Fetches all the rows
return {'Books': [dict(zip(tuple (query.keys()) ,i))
for i in query.cursor]}
def post(self):
book_post.parse_args()
conn = connect()
BookName = request.json['book_name']
Description = request.json['description']
AuthorId = request.json['author_id']
authors_id_exist = len(conn.execute(SPECIFIC_AUTHOR.format(AuthorId))
.cursor.fetchall())
if(authors_id_exist):
try:
conn.execute(INSERT_BOOK.format(BookName, Description,
AuthorId))
return {'status': 'inserted'}
except:
return {'status': 'duplicate authors not allowed'}
return {'status': " Author not present in the table"}
class Book_Id(Resource):
def get(self, book_id):
conn = connect()
query = conn.execute(SPECIFIC_BOOK.format(book_id))
result = {'Book': [dict(zip(tuple (query.keys()) ,i))
for i in query.cursor]}
return jsonify(result)
def put(self, book_id):
conn = connect()
BookName = request.json['book_name']
Description = request.json['description']
AuthorId = request.json['author_id']
authors_id_exist = len(conn.execute(SPECIFIC_AUTHOR.format(AuthorId))
.cursor.fetchall())
if(authors_id_exist):
conn.execute(UPDATE_BOOK.format(BookName, Description,
AuthorId, book_id))
return jsonify({'status': 'updated'})
return {'status': "Author doesn't exist"}
# Additional
def delete(self, book_id):
try:
conn = connect()
conn.execute(DELETE_BOOK.format(book_id))
return jsonify({'status': 'deleted'})
except:
return jsonify({'status': 'error'})
class Authors(Resource):
def get(self):
conn = connect()
query = conn.execute(ALL_AUTHORS)
result = {'Authors': [dict(zip(tuple (query.keys()) ,i))
for i in query.cursor]}
return jsonify(result)
api.add_resource(Books, '/books') # Route_1
api.add_resource(Book_Id, '/books/<book_id>') # Route_2
api.add_resource(Authors, '/authors') # Route_3
if __name__ == '__main__':
app.run() |
import json
import boto3
import datetime
dynamodb = boto3.resource('dynamodb')
some_table = dynamodb.Table('audio-details')
def lambda_handler(event, context):
time_stamp = str(datetime.datetime.now().year)+str(datetime.datetime.now().month)+str(datetime.datetime.now().day) + \
str(datetime.datetime.now().hour)+str(datetime.datetime.now().minute) + \
str(datetime.datetime.now().second)
#
# TODO implement
some_table.put_item(
Item={
"id-time": str(event["userId"]) + time_stamp,
"UserId": str(event["userId"]),
"time": str(time_stamp),
"bucket": str(event['bucket']),
"file": str(event['file'])
}
)
return {
'statusCode': 200,
'body': json.dumps('audio details have been stored')
}
|
import pyglet
window = pyglet.window.Window()
label = pyglet.text.Label("hello")
@window.event
def on_draw():
window.clear()
label.draw()
pyglet.app.run()
|
import pickle
import bson
ARMORS_PATH = '../WebScrapper/obj/armors/'
WEAPONS_PATH = '../WebScrapper/obj/weapons/'
MONSTERS_PATH = '../WebScrapper/obj/monsters/'
DECORATIONS_PATH = '../WebScrapper/obj/decorations/'
SKILLS_PATH = '../WebScrapper/obj/skills/'
ITEMS_PATH = '../WebScrapper/obj/items/'
def read_armor_files():
id_file = open(ARMORS_PATH + 'id_list.p', 'rb')
id_list = pickle.load(id_file, encoding='unicode')
id_file.close()
armor_item_list = []
for item in id_list:
item_file = open(ARMORS_PATH + str(item) + '.p', 'rb')
armor_item_list.append(pickle.load(item_file, encoding='unicode'))
item_file.close()
# for i in armor_item_list:
# print(i)
return (armor_item_list, id_list)
def read_skills_file():
id_file = open(SKILLS_PATH + 'id_list.p', 'rb')
id_list = pickle.load(id_file, encoding='unicode')
id_file.close()
skill_list = []
for skill in id_list:
skill_file = open(SKILLS_PATH + str(skill) + '.p', 'rb')
skill_list.append(pickle.load(skill_file, encoding='unicode'))
skill_file.close()
for i in skill_list:
print(i)
return (skill_list, id_list)
def read_name_id_mapping():
f = open('../WebScrapper/obj/name_id_map.p', 'rb')
name_id_mapping = pickle.load(f, encoding='unicode')
f.close()
return name_id_mapping
def read_items_file():
id_file = open(ITEMS_PATH + 'id_dict.bson', 'rb')
id_dict = id_file.read()
id_dict = bson.loads(id_dict)
id_file.close()
item_list = []
for item in id_dict['ids']:
item_file = open(ITEMS_PATH + str(item) + '.bson', 'rb')
item_list.append(bson.loads(item_file.read()))
item_file.close()
return item_list
def read_weapon_file():
id_file = open(WEAPONS_PATH + 'id_dict.bson', 'rb')
weapon_dict = id_file.read()
weapon_dict = bson.loads(weapon_dict)
id_file.close()
return weapon_dict
def read_decoration_file():
id_file = open(DECORATIONS_PATH + 'id_dict.bson', 'rb')
id_dict = id_file.read()
id_dict = bson.loads(id_dict)
id_file.close()
decorations_list = []
for dec in id_dict['ids']:
dec_file = open(DECORATIONS_PATH + str(dec) + '.bson', 'rb')
decorations_list.append(bson.loads(dec_file.read()))
dec_file.close()
return decorations_list |
class SelectionSort(object):
def sort(self, data):
if data is None:
raise TypeError('Dados não podem ser None')
if len(data) < 2:
return data
if data == []:
return False
self.max_num(data)
return data
def max_num(self, data):
cont = 0
for enum1, i1 in enumerate(data):
aux = i1
min_ = i1
for i2 in range(cont, len(data)):
if data[i2] <= min_:
min_ = data[i2]
aux = i2
cont += 1
print(data)
data[enum1], data[aux] = min_, i1 |
import numpy as np
dt = 1.
sigma = 0.05
# goal = np.array([0.,5.])
# obstacle = np.array([2.5,2.5])
# obstacle_r = 0.5
num_steps = 5
R = 1. #0.25
def dynamics(state,action,rng):
# act = np.clip(action,-1.,1.) ## Saturate
nxt_state = state + action*R
nxt_state += np.random.randn()*0.01
# nxt_state += rng.multivariate_normal(np.zeros(2),sigma*np.eye(2))
# d_goal = np.linalg.norm(goal-nxt_state)
# d_obst = np.linalg.norm(obstacle-nxt_state)
# r = 1/float(d_goal) if d_obst>obstacle_r else -5
r = 0
if nxt_state<1.:
r = 70
elif nxt_state>5.:
r = 500
return nxt_state, r
def random_act_generator(state,rng):
return rng.rand()-0.1
def terminal_estimator(leaf,discount,rng):
return leaf.state*0.01#[0]
# state = leaf.state
# v = 0
# for i in range(num_steps):
# act = random_act_generator(None,rng)
# state,r = dynamics(state,act,rng)
# v = r + discount * v
# return v
|
# -*- coding: utf-8 -*-
from .database import Database as db
from .command import Command, CommandError, authorise
from .context import Context
from .client import bot, ratelimit
from . import language
from .utils import get_next_arg
from . import converters
from .interaction import (ButtonController, BasePager, NamedPage, NamedPager,
Icon, Lister)
from .alias import get_alias, toggle_alias
from .botban import (get_guild_botbans, get_user_botbans, is_botbanned,
toggle_botban)
from . import toggle
from .toggle import (get_guild_toggles, is_toggled, toggle_elements,
enable_elements, disable_elements, CommandToggle)
from . import authority
# this actually uses the framework, so it needs to go last
from . import default_converters
from . import default_commands
from .default_commands import Help # Decorator definitions.
|
from django.db import models
# Create your models here.
class Ingredient(models.Model):
title = models.CharField(max_length=256)
def __str__(self):
return self.title
def __unicode__(self):
return self.title
pass
class Measure(models.Model):
title = models.CharField(max_length=256)
def __str__(self):
return self.title
def __unicode__(self):
return self.title
pass
class Recipe(models.Model):
title = models.CharField(max_length=512)
description = models.TextField()
image = models.ImageField(upload_to='recipe', null=True, blank=True)
def __str__(self):
return self.title
def __unicode__(self):
return self.title
pass
class Proportion(models.Model):
ingredient = models.ForeignKey(Ingredient)
measure = models.ForeignKey(Measure)
value = models.PositiveIntegerField()
recipe = models.ForeignKey(to=Recipe, related_name="proportions", null=False)
def __str__(self):
return '%d %s of %s' % (self.value, self.measure, self.ingredient)
pass
|
# Generated by Django 2.2.10 on 2020-02-15 01:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('confession', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='confession',
old_name='hunter',
new_name='author',
),
migrations.RenameField(
model_name='confession',
old_name='url',
new_name='body',
),
]
|
"""Helper functions for Jinja2 templates.
This file is loaded by jingo and must be named helpers.py
"""
from django.conf import settings
from django.utils.six.moves.urllib_parse import urlencode
from django_jinja import library
from jinja2 import contextfunction, Markup
from ..views import can_create, can_refresh
@library.global_function
@contextfunction
def add_filter_to_current_url(context, name, value):
"""Add a filter to the current URL."""
query_parts = []
added = False
for qs_name, qs_value in context['request'].GET.items():
if qs_name == name:
added = True
query_parts.append((name, value))
else:
query_parts.append((qs_name, qs_value))
if not added:
query_parts.append((name, value))
return context['request'].path + '?' + urlencode(query_parts)
@library.global_function
@contextfunction
def drop_filter_from_current_url(context, name):
"""Drop a filter from the current URL."""
query_parts = []
for qs_name, qs_value in context['request'].GET.items():
if qs_name != name:
query_parts.append((qs_name, qs_value))
path = context['request'].path
if query_parts:
return path + '?' + urlencode(query_parts)
else:
return path
def page_list(page_obj):
"""Determine list of pages for pagination control.
The goals are:
- If less than 8 pages, show them all
- Show three numbers around current page
- Always have 7 entries (don't move click targets)
Return is a list like [1, None, 5, 6, 7, None, 66].
In separate function to facilitate unit testing.
"""
paginator = page_obj.paginator
current = page_obj.number
end = paginator.num_pages
if end <= 7:
return paginator.page_range
if current <= 4:
pages = list(range(1, max(6, current + 2))) + [None, end]
elif current >= (end - 3):
pages = [1, None] + list(range(end - 4, end + 1))
else:
pages = [1, None] + list(range(current - 1, current + 2)) + [None, end]
return pages
@library.global_function
@contextfunction
def pagination_control(context, page_obj):
"""Add a bootstrap-style pagination control.
The basic pagination control is:
<< - previous page
1, 2 - First two pages
... - Break
n-1, n, n+1 - Current page and context
... - Break
total-1, total - End pages
>> - Next page
This breaks down when the current page is low or high, or the total
number of pages is low. So, done as a function.
"""
if not page_obj.has_other_pages():
return ''
if page_obj.has_previous():
prev_page = page_obj.previous_page_number()
if prev_page == 1:
prev_url = drop_filter_from_current_url(context, 'page')
else:
prev_url = add_filter_to_current_url(context, 'page', prev_page)
previous_nav = (
'<li><a href="{prev_url}" aria-label="Previous">'
'<span aria-hidden="true">«</span></a></li>'
).format(prev_url=prev_url)
else:
previous_nav = (
'<li class="disabled"><span aria-hidden="true">«</span>'
'</li>')
pages = page_list(page_obj)
page_navs = []
current = page_obj.number
for page in pages:
if page is None:
page_navs.append(
'<li class="disabled"><span aria-hidden="true">…</span>'
'</li>')
continue
if page == current:
active = ' class="active"'
else:
active = ''
if page == 1:
page_url = drop_filter_from_current_url(context, 'page')
else:
page_url = add_filter_to_current_url(context, 'page', page)
page_navs.append(
'<li{active}><a href="{page_url}">'
'{page}</a></li>'.format(
active=active, page_url=page_url, page=page))
page_nav = '\n '.join(page_navs)
if page_obj.has_next():
next_page = page_obj.next_page_number()
next_url = add_filter_to_current_url(context, 'page', next_page)
next_nav = """<li>
<a href="{next_url}" aria-label="Next">
<span aria-hidden="true">»</span>
</a>
</li>""".format(next_url=next_url)
else:
next_nav = """\
<li class="disabled"><span aria-hidden="true">»</span></li>"""
return Markup("""\
<nav>
<ul class="pagination">
{previous_nav}
{page_nav}
{next_nav}
</ul>
</nav>
""".format(previous_nav=previous_nav, page_nav=page_nav, next_nav=next_nav))
@library.global_function
@contextfunction
def can_create_mdn_import(context, user):
return can_create(user)
@library.global_function
@contextfunction
def can_refresh_mdn_import(context, user):
return can_refresh(user)
@library.global_function
@contextfunction
def can_reparse_mdn_import(context, user):
return settings.MDN_SHOW_REPARSE and can_create(user)
@library.global_function
@contextfunction
def can_commit_mdn_import(context, user):
return can_create(user)
|
from django.contrib.auth import get_user_model
from rest_framework import authentication
User = get_user_model()
class DevAuthentication(authentication.BasicAuthentication):
def authenticate(self, request):
qs = User.objects.all()
if qs:
user = qs.order_by('?').first()
return (user, None)
else:
print('MUST HAVE A USER IN THE DATABASE TO USE REACT FOR DEVELOPMENT')
return None
|
import datetime
from datetime import datetime
class longFrame(object):
def __init__(self):
self._start = 0x68
self._stop = 0x16
self._L = 0x00
self._C = 0x00
self._A = 0x00
self._CI = 0x00
def setField(self,L,C,A,CI):
self._L = L
self._C = C
self._A = A
self._CI = CI
def convert(self):
# datetime object containing current date and time
now = datetime.now()
print("now =", now)
# dd/mm/YY H:M:S
dt_string = now.strftime("%d/%m/%Y %H:%M:%S")
print("date and time =", dt_string)
y = datetime.now().year
m = datetime.now().month
d = datetime.now().day
h = datetime.now().hour
M = datetime.now().minute
print(y,m,d,h,M)
min = M | 0x80
print('Min',min, hex(min),hex(M))
hour = h & 0x08
print('hour',hour, (hex(hour)))
day = d & 0x1F
print('day', hex(day))
mon = m
print('Month',hex(mon))
num1 = y//100
num2 = y %100
print(num1, num2)
decimal_string = str(y)
digits = [int(c) for c in decimal_string]
print(digits)
digits = [20,16]
zero_padded_BCD_digits = [format(d,'04b') for d in digits]
print(zero_padded_BCD_digits)
x = zero_padded_BCD_digits[0]
print(type(x),x)
xx = int(x)
yy = xx & 0xE0
print(hex(yy))
def markus(self):
now = datetime.now()
print("now =", now)
# dd/mm/YY H:M:S
dt_string = now.strftime("%d/%m/%Y %H:%M:%S")
print("date and time =", dt_string)
y = datetime.now().year
m = datetime.now().month
d = datetime.now().day
h = datetime.now().hour
M = datetime.now().minute
print(y,m,d,h,M)
min = M | 0x80
print('min, U16', min, hex(min),hex(M))
print('int1', hex(min))
hour = h | 0x80
print('hour',hour, (hex(hour)))
day = d & 0x1F
print('day', hex(day))
print('int2',hex(hour))
y = 2020
hh = y %100
# h = 20
print(type(hh),bin(hh))
hh = hh << 5
#
print('year',bin(hh))
highYear = ((hh & 0xF00) >> 4)
print('highYear',hex(highYear))
int4 = highYear | m
print('int4',hex(int4))
lowYear = (hh & 0xff)
int3 = lowYear | d
print('int3', hex(int3))
def bytTest(self):
data = b''
y = bytes([0xe5])
print('y',type(y),y)
x = bytearray(y)
print(type(x), type(x[0]))
data += x
print(data)
if x[0] == 0xe5:
print('t')
else:
print('gg')
if __name__ == '__main__':
x = longFrame()
# x.convert()
#x.markus()
x.bytTest() |
#!/usr/bin/env python
from beta import beta_reduce
# 1.11 Chapter Exercises
# Normal Form or diverge?
exercises_b = r"""
(\x.xxx)
(\z.zz)(\y.yy)
(\x.xxx)z
"""
# Beta Reduce
# example 6 modified so that first term doesn't shadow free var
# example 7 modified so that first term doesn't shadow free var
exercises_c = r"""
(\a.(\b.(\c.cba)))zz(\w.(\v.w))
(\x.(\y.xyy))(\a.a)b
(\y.y)(\x.xx)(\z.zq)
(\z.z)(\z.zz)(\z.zy)
(\x.(\y.xyy))(\y.y)y
(\m.mm)(\b.ba)c
(\x.(\y.(\m.xm(ym))))(\x.z)(\x.a)
"""
def do_exercises(exersise_text):
exercises = filter(lambda eg: len(eg) > 0, exersise_text.split('\n'))
for i, line in enumerate(exercises):
print('Q{}:'.format(i+1))
beta_reduce(line)
print('1.11 - "Normal Form or diverge?" exercises\n')
do_exercises(exercises_b)
print('1.11 - "Beta Reduce" exercises\n')
do_exercises(exercises_c)
|
import cv2
import numpy as np
import matplotlib.pyplot as plt
img = cv2.imread('./images/test_hough.png')
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # 灰度图像
# 实行canny边缘检测
edges = cv2.Canny(gray, 50, 200)
plt.subplot(121)
plt.imshow(edges, 'gray')
plt.xticks([])
plt.yticks([])
# hough变换直线检测
lines = cv2.HoughLines(edges, 1, np.pi / 180, 160)
lines1 = lines[:, 0, :]
for rho, theta in lines1[:]:
a = np.cos(theta)
b = np.sin(theta)
x0 = a * rho
y0 = b * rho
x1 = int(x0 + 1000 * (-b))
y1 = int(y0 + 1000 * (a))
x2 = int(x0 - 1000 * (-b))
y2 = int(y0 - 1000 * (a))
cv2.line(img, (x1, y1), (x2, y2), (255, 0, 0), 1)
plt.subplot(122)
plt.imshow(img, )
plt.xticks([])
plt.yticks([])
plt.show()
|
import time
import socket
CLIENT_PORT = 10001 #Do not change this, hardcoded in the client.py file
server_address = 0
clientSockets = []
# Will make remote client with ip 'ip' start sending data
def start(ip, session_id):
global CLIENT_PORT
global clientSockets
setup_flag = 1
#Check if connection aldready have been established to ip
for clients in clientSockets:
if (clients[0] == ip):
print('A connection is already established to that ip...')
setup_flag = 0
try:
if(setup_flag == 1):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = (str(ip), CLIENT_PORT)
sock.connect(server_address)
#Send session id to client
sock.send(str(session_id).encode())
clientSockets.append((ip, sock))
except Exception as e:
print ('Something went wrong while creating the socket...')
# Will stop remote client script from running
def stop(ip):
global server_address
global clientSockets
found = 0
i = 0
for clients in clientSockets:
if (clients[0] == ip):
try:
sock = clients[1]
data = 'stop'
sock.send(data.encode())
sock.close()
except Exception as e:
print('Something went wrong while trying to close that socket...')
print('Client is probably down, removing socket...')
finally:
clientSockets.pop(i)
found = 1
if(found == 0):
print('IP not found...')
i+=1
# Will print 'ping' on client computer
def ping():
global clientSockets
data = 'ping'
sock = clientSockets[0][1]
sock.send(data.encode())
|
# 异常检测
import matplotlib.pyplot as plt
import seaborn as sns
sns.set(context='notebook', style="white", palette=sns.color_palette("RdBu"))
import numpy as np
import pandas as pd
import scipy.io as sio
from scipy import stats
from sklearn.model_selection import train_test_split
from sklearn.metrics import f1_score, classification_report
mat = sio.loadmat("./data/ex8data1.mat")
print(mat.keys())
X = mat.get("X")
Xval, Xtest, yval, ytest = train_test_split(mat.get("Xval"), mat.get("yval").ravel(), test_size=0.5)
sns.regplot(x="Latency", y="Throughput", data=pd.DataFrame(X, columns=['Latency', "Throughput"]),
fit_reg=False,
scatter_kws={'s': 20, 'alpha': 0.5})
plt.show()
mu = X.mean(axis=0)
print("mens:", mu, '\n')
cov = np.cov(X.T)
print("cov:", cov)
# np.dstack(np.mgrid[0:3, 0:3])
multi_normal = stats.multivariate_normal(mu, cov)
x, y = np.mgrid[0:30:0.01, 0:30:0.01]
pos = np.dstack((x, y))
fig, ax = plt.subplots()
ax.contourf(x, y, multi_normal.pdf(pos), cmap='Blues')
plt.show()
def select_threshold(X, Xval, yval):
mu = X.mean(axis=0)
cov = np.cov(X.T)
multi_normal = stats.multivariate_normal(mu, cov)
pval = multi_normal.pdf(Xval)
epsilon = np.linspace(np.min(pval), np.max(pval), num=10000)
fs = []
for e in epsilon:
y_pred = (pval <= e).astype('int')
fs.append(f1_score(yval, y_pred))
argmax_fs = np.argmax(fs)
return epsilon[argmax_fs], fs[argmax_fs]
e, fs = select_threshold(X, Xval, yval)
print('Best epsilon: {}\nBest F-score on validation data: {}'.format(e, fs))
def predict(X, Xval, e, Xtest, ytest):
Xdata = np.concatenate((X, Xval), axis=0)
mu = Xdata.mean(axis=0)
cov = np.cov(Xdata.T)
multi_normal = stats.multivariate_normal(mu, cov)
pval = multi_normal.pdf(Xtest)
y_pred = (pval <= e).astype('int')
print(classification_report(ytest, y_pred))
return multi_normal, y_pred
multi_normal, y_pred = predict(X, Xval, e, Xtest, ytest)
data = pd.DataFrame(Xtest, columns=['Latency', 'Throughput'])
data['y_pred'] = y_pred
x, y = np.mgrid[0:30:0.01, 0:30:0.01]
pos = np.dstack((x, y))
fig, ax = plt.subplots()
ax.contourf(x, y, multi_normal.pdf(pos), cmap='Blues')
sns.regplot(x="Latency", y='Throughput',
data=data,
fit_reg=False,
ax=ax,
scatter_kws={'s': 10, 'alpha': 0.4})
anomaly_data = data[data['y_pred'] == 1]
ax.scatter(anomaly_data['Latency'], anomaly_data['Throughput'],
marker='x', s=50)
plt.show()
mat = sio.loadmat("./data/ex8data2.mat")
X = mat.get("X")
Xval, Xtest, yval, ytest = train_test_split(mat.get("Xval"),
mat.get("yval").ravel(),
test_size=0.5)
e, fs = select_threshold(X, Xval, yval)
print('Best epsilon: {}\nBest F-score on validation data: {}'.format(e, fs))
multi_normal, y_pred = predict(X, Xval, e, Xtest, ytest)
print("find {} anomlies".format(y_pred.sum()))
|
array = [-2, 4, -3, 4, 6, 6, 3, -2]
indexes_for_poping = []
for i in range(len(array)):
for j in range(i + 1, len(array)):
if array[i] == array[j]:
indexes_for_poping.append(j)
indexes_for_poping.sort(reverse=True)
for i in indexes_for_poping:
array.pop(i)
print('New array is ', array)
|
import tensorflow as tf
x = [1, 2, 3]
y = [1, 2, 3]
learning_rate = 0.1
w = tf.Variable(10.)
b = tf.Variable(10.)
for i in range(10):
# GradientDescentOptimizer -> GradientTape
with tf.GradientTape() as tape:
hx = w * x
cost = tf.reduce_mean(tf.square(hx - y))
# cost-function function을 w로 미분
dw = tape.gradient(cost, w)
# w = w - 미분한값
w.assign_sub(learning_rate * dw)
print(i, cost.numpy())
print(w.numpy())
|
import model.ChakeList as mo
import json
data = {'sex':1,'age':2,'alchol':1.1}
def test(data):
print(data['sex']+data['age'])
return data
test(data)
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 7 20:19:36 2018
@author: user
字串資料取代
"""
f_name = input()
str_old = input()
str_new = input()
with open(f_name,"r",encoding="utf-8") as fd:
data=fd.read()
print("=== Before the replacement")
print(data)
data=data.replace(str_old,str_new)
print("=== After the replacement")
print(data) |
# @see https://adventofcode.com/2015/day/9
import re
from itertools import permutations
def parse(s: str):
r = re.match(r'([a-zA-Z]+) to ([a-zA-Z]+) = ([\d]+)', s.strip())
return (r[1], r[2]), int(r[3])
def find_dist(a: str, b:str, c: dict):
return c[(a, b)] if (a, b) in c else c[(b, a)]
def calc_route_dist(r: list, c: dict):
acc = 0
for i in range(len(r)-1):
acc += find_dist(r[i], r[i+1], c)
return acc
def calc_all_route_dists(d: dict):
# Gather all locations into a set...
locs = set()
for (a, b) in d.keys():
locs.update([a, b])
# Generate all permutations of the locations i.e. all possible routes
# ...and calculate total distance in each route
# We will also filter out reverse routes i.e. a->b->c is the same as c->b->a
return [calc_route_dist(p, d) for p in permutations(locs, len(locs)) if p <= p[::-1]]
with open('day9_input.txt', 'r') as f:
data = dict()
for l in f:
k, d = parse(l)
data[k] = d
routes = calc_all_route_dists(data)
routes.sort()
print('------------ PART 01 -------------')
print('Distance of the shortest route:', routes[0])
print('\n------------ PART 02 -------------')
print('Distance of the longest route:', routes[-1])
|
from emails import send_simple_message
if __name__ == '__main__':
send_simple_message('eldalai@gmail.com', 'MegaChess', 'hola mundo')
|
# Exercício 10.5 - Livro
class Televisao:
def __init__(self, min=2, max=14):
self.ligada = False
self.canal = min
self.minimo = min
self.maximo = max
self.tamanho = 0
self.marca = ''
def avanca(self):
print('Muda canal!')
if self.minimo <= self.canal < self.maximo:
self.canal += 1
else:
self.canal = self.minimo
def volta(self):
print('Muda canal!')
if self.maximo >= self.canal > self.minimo:
self.canal -= 1
else:
self.canal = self.maximo
tv1 = Televisao(min=1, max=10)
print(f'TV1 mínimo: {tv1.minimo} máximo: {tv1.maximo}')
tv2 = Televisao(min=2, max=20)
print(f'TV2 mínimo: {tv2.minimo} máximo: {tv2.maximo}')
|
def wrapper_decorator(func):
def wrapper(*args, **kwargs):
return u"<p>" + func(*args, **wkargs) + u"</p>"
return wrapper
|
from rest_framework.generics import GenericAPIView, ListAPIView, CreateAPIView
from rest_framework.permissions import IsAuthenticated
from .models import Level, LevelPackage
from .serializers import LevelDetailedRetrieveSerializer, PackageSimpleRetrieveSerializer, UserPackageDetailSerializer, \
UserPackageCreateSerializer
from rest_framework.response import Response
import hashlib
import json
from rest_framework import status
def get_latest_hash():
all_levels = Level.objects.all()
data = LevelDetailedRetrieveSerializer(all_levels, many=True).data
data_hash = hashlib.md5(json.dumps(data).encode('utf-8'))
return str(data_hash.hexdigest())
class ListLevelsView(GenericAPIView):
def get_queryset(self):
pass
def get(self, request):
all_levels = Level.objects.all().order_by('id')
data = LevelDetailedRetrieveSerializer(all_levels, many=True).data
data_hash = hashlib.md5(json.dumps(data).encode('utf-8'))
response = {'hash': str(data_hash.hexdigest()), 'levels': data}
return Response(response)
class ListPacksView(ListAPIView):
serializer_class = PackageSimpleRetrieveSerializer
queryset = LevelPackage.objects.all()
class ListUserPackage(ListAPIView):
permission_classes = (IsAuthenticated, )
serializer_class = UserPackageDetailSerializer
def get_queryset(self):
return self.request.user.user_profile.pur.all()
class AddUserPack(CreateAPIView):
permission_classes = (IsAuthenticated, )
serializer_class = UserPackageCreateSerializer
def get_serializer_context(self):
return {'user_profile': self.request.user.user_profile}
class IsUpToDate(GenericAPIView):
def get_queryset(self):
pass
def get(self, request):
hash = request.GET.get('hash', None)
if not hash:
return Response({
'Error': 'No Hash'
}, status=status.HTTP_400_BAD_REQUEST)
return Response({'is_up_to_date': hash == get_latest_hash()})
|
# Generated by Django 3.0.3 on 2020-05-10 06:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0003_user_phone'),
]
operations = [
migrations.AlterField(
model_name='user',
name='phone',
field=models.CharField(max_length=20, unique=True),
),
]
|
msg="please input two words:(Enter 'q' to exit)"
while True:
print(msg)
try:
first=input("please input your first number?\n")
if first =='q':
break
first=int(first)
second=input("please input your second number?\n")
if second =='q':
break
second=int(second)
except ValueError:
print("Your input was wrong!")
else:
print("The total result is:"+str(first+second))
|
import numpy as np
from glumpy import app, gloo, gl, glm
vertex=""" uniform vec2 viewport;
uniform mat4 model, view, projection;
uniform float antialias, thickness, linelength;
attribute vec3 prev, curr, next;
attribute vec2 uv;
varying vec2 v_uv;
varying vec3 v_normal;
varying float v_thickness;
void main (){
vec4 NDC_prev = ((projection)*(view)*(model)*(vec4(prev.xyz, (1.e+0f))));
vec4 NDC_curr = ((projection)*(view)*(model)*(vec4(curr.xyz, (1.e+0f))));
vec4 NDC_next = ((projection)*(view)*(model)*(vec4(next.xyz, (1.e+0f))));
vec2 screen_prev = ((viewport)*(((((((NDC_prev.xy)/(NDC_prev.w)))+((1.e+0f))))/((2.e+0f)))));
vec2 screen_curr = ((viewport)*(((((((NDC_curr.xy)/(NDC_curr.w)))+((1.e+0f))))/((2.e+0f)))));
vec2 screen_next = ((viewport)*(((((((NDC_next.xy)/(NDC_next.w)))+((1.e+0f))))/((2.e+0f)))));
vec4 normal = ((model)*(vec4(curr.xyz, (1.e+0f))));
v_normal=normal.xyz;
if ( normal.z<0 ) {
v_thickness=((thickness)/((2.e+0f)));
} else {
v_thickness=((((thickness)*(((pow(normal.z, (5.e-1f)))+(1)))))/((2.e+0f)));
};
float w = ((((thickness)/((2.e+0f))))+(antialias));
vec2 position ;
vec2 t0 = normalize(((screen_curr.xy)-(screen_prev.xy)));
vec2 n0 = vec2(-t0.y, t0.x);
vec2 t1 = normalize(((screen_next.xy)-(screen_curr.xy)));
vec2 n1 = vec2(-t1.y, t1.x);
v_uv=vec2(uv.x, ((uv.y)*(w)));
if ( (prev.xy)==(curr.xy) ) {
v_uv.x=-w;
position=((screen_curr.xy)+(((-w)*(t1)))+(((uv.y)*(w)*(n1))));
} else {
if ( (curr.xy)==(next.xy) ) {
v_uv.x=((w)+(linelength));
position=((screen_curr.xy)+(((w)*(t0)))+(((uv.y)*(w)*(n0))));
} else {
vec2 miter = normalize(((n0)+(n1)));
float dy = ((w)/(max(dot(miter, n1), (1.e+0f))));
position=((screen_curr.xy)+(((dy)*(uv.y)*(miter))));
}
}
gl_Position=vec4((((((((2.e+0f))*(position)))/(viewport)))-((1.e+0f))), ((NDC_curr.z)/(NDC_curr.w)), (1.e+0f));
}"""
fragment=""" uniform float antialias, thickness, linelength;
varying vec2 v_uv;
varying float v_thickness;
varying vec3 v_normal;
void main (){
float d = (0.0e+0f);
float w = ((((v_thickness)/((2.e+0f))))-(antialias));
vec3 color = vec3((0.0e+0f), (0.0e+0f), (0.0e+0f));
if ( v_normal.z<0 ) {
color=(((7.5e-1f))*(vec3(pow(abs(v_normal.z), (5.e-1f)))));
};
if ( v_uv.x<0 ) {
d=((length(v_uv))-(w));
} else {
if ( (linelength)<=(v_uv.x) ) {
d=((distance(v_uv, vec2(linelength, 0)))-(w));
} else {
d=((abs(v_uv.y))-(w));
}
}
if ( d<0 ) {
gl_FragColor=vec4(color, (1.e+0f));
} else {
d=((d)/(antialias));
gl_FragColor=vec4(color, exp(((-d)*(d))));
};
}"""
app.use("glfw")
window=app.Window(1920, 1080, color=(1,1,1,1,))
def bake(P, closed=False):
epsilon=(1.000000013351432e-10)
n=len(P)
if ( ((closed) and (((epsilon)<(((((P[0])-(P[-1])))**(2)).sum())))) ):
P=np.append(P, P[0])
P=P.reshape(((n)+(1)), 3)
n=((n)+(1))
V=np.zeros((((1)+(n)+(1)),2,3,), dtype=np.float32)
UV=np.zeros((n,2,2,), dtype=np.float32)
V_prev, V_curr, V_next=(V[:-2],V[1:-1],V[2:],)
V_curr[...,0]=P[:,np.newaxis,0]
V_curr[...,1]=P[:,np.newaxis,1]
V_curr[...,2]=P[:,np.newaxis,2]
L=np.cumsum(np.sqrt(((((P[1:])-(P[:-1])))**(2)).sum(axis=-1))).reshape(((n)-(1)), 1)
UV[1:,:,0]=L
UV[...,1]=(1,-1,)
if ( closed ):
V[0]=V[-3]
V[-1]=V[2]
else:
V[0]=V[1]
V[-1]=V[-2]
return (V_prev,V_curr,V_next,UV,L[-1],)
n=2048
TT=np.linspace(0, ((20)*(2)*(np.pi)), n, dtype=np.float32)
R=np.linspace((1.0000000149011612e-1), ((np.pi)-((1.0000000149011612e-1))), n, dtype=np.float32)
X=((np.cos(TT))*(np.sin(R)))
Y=((np.sin(TT))*(np.sin(R)))
Z=np.cos(R)
P=np.dstack((X,Y,Z,)).squeeze()
V_prev, V_curr, V_next, UV, length=bake(P)
segments=gloo.Program(vertex, fragment)
segments["prev"]=V_prev
segments["curr"]=V_curr
segments["next"]=V_next
segments["uv"]=UV
segments["thickness"]=(1.5e+1)
segments["antialias"]=(1.4999999999999997e+0)
segments["linelength"]=length
segments["model"]=np.eye(4, dtype=np.float32)
segments["view"]=glm.translation(0, 0, -5)
phi=0
theta=0
pos=0
rate=2300
posdir=(((1.e+0))/(rate))
@window.event
def on_resize(width, height):
segments["projection"]=glm.perspective((3.e+1), ((width)/(float(height))), (2.e+0), (1.e+2))
segments["viewport"]=(width,height,)
@window.event
def on_init():
gl.glEnable(gl.GL_DEPTH_TEST)
@window.event
def on_draw(dt):
global phi, theta, duration, pos, posdir, rate
window.clear()
segments.draw(gl.GL_TRIANGLE_STRIP)
theta=((theta)+((1.0000000149011612e-1)))
phi=((phi)+((2.0000000298023224e-1)))
model=np.eye(4, dtype=np.float32)
glm.rotate(model, theta, 0, 1, 0)
glm.rotate(model, phi, 1, 0, 0)
pos=((pos)+(((10)*(posdir))))
if ( ((((((0)<(posdir))) and (((((rate)*(posdir)))<(pos))))) or (((((posdir)<(0))) and (((pos)<(((rate)*(posdir)))))))) ):
posdir=((posdir)*(-1))
segments["view"]=glm.translation(pos, 0, -5)
segments["model"]=model
app.run(framerate=0) |
# -*- coding: utf-8 -*-
from typing import List
class Solution:
def kidsWithCandies(self, candies: List[int], extraCandies: int) -> List[bool]:
return [candy + extraCandies >= max(candies) for candy in candies]
if __name__ == "__main__":
solution = Solution()
assert [True, True, True, False, True] == solution.kidsWithCandies(
[2, 3, 5, 1, 3], 3
)
assert [True, False, False, False, False] == solution.kidsWithCandies(
[4, 2, 1, 1, 2], 1
)
assert [True, False, True] == solution.kidsWithCandies([12, 1, 12], 10)
|
import math
from typing import cast, Iterator, List, Optional, Sized, Union
import torch
import torch.distributed as dist
from torch.utils.data import Sampler
from torchvision.datasets.video_utils import VideoClips
class DistributedSampler(Sampler):
"""
Extension of DistributedSampler, as discussed in
https://github.com/pytorch/pytorch/issues/23430
Example:
dataset: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
num_replicas: 4
shuffle: False
when group_size = 1
RANK | shard_dataset
=========================
rank_0 | [0, 4, 8, 12]
rank_1 | [1, 5, 9, 13]
rank_2 | [2, 6, 10, 0]
rank_3 | [3, 7, 11, 1]
when group_size = 2
RANK | shard_dataset
=========================
rank_0 | [0, 1, 8, 9]
rank_1 | [2, 3, 10, 11]
rank_2 | [4, 5, 12, 13]
rank_3 | [6, 7, 0, 1]
"""
def __init__(
self,
dataset: Sized,
num_replicas: Optional[int] = None,
rank: Optional[int] = None,
shuffle: bool = False,
group_size: int = 1,
) -> None:
if num_replicas is None:
if not dist.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = dist.get_world_size()
if rank is None:
if not dist.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = dist.get_rank()
if len(dataset) % group_size != 0:
raise ValueError(
f"dataset length must be a multiplier of group size dataset length: {len(dataset)}, group size: {group_size}"
)
self.dataset = dataset
self.group_size = group_size
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
dataset_group_length = len(dataset) // group_size
self.num_group_samples = int(math.ceil(dataset_group_length * 1.0 / self.num_replicas))
self.num_samples = self.num_group_samples * group_size
self.total_size = self.num_samples * self.num_replicas
self.shuffle = shuffle
def __iter__(self) -> Iterator[int]:
# deterministically shuffle based on epoch
g = torch.Generator()
g.manual_seed(self.epoch)
indices: Union[torch.Tensor, List[int]]
if self.shuffle:
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
# add extra samples to make it evenly divisible
indices += indices[: (self.total_size - len(indices))]
assert len(indices) == self.total_size
total_group_size = self.total_size // self.group_size
indices = torch.reshape(torch.LongTensor(indices), (total_group_size, self.group_size))
# subsample
indices = indices[self.rank : total_group_size : self.num_replicas, :]
indices = torch.reshape(indices, (-1,)).tolist()
assert len(indices) == self.num_samples
if isinstance(self.dataset, Sampler):
orig_indices = list(iter(self.dataset))
indices = [orig_indices[i] for i in indices]
return iter(indices)
def __len__(self) -> int:
return self.num_samples
def set_epoch(self, epoch: int) -> None:
self.epoch = epoch
class UniformClipSampler(Sampler):
"""
Sample `num_video_clips_per_video` clips for each video, equally spaced.
When number of unique clips in the video is fewer than num_video_clips_per_video,
repeat the clips until `num_video_clips_per_video` clips are collected
Args:
video_clips (VideoClips): video clips to sample from
num_clips_per_video (int): number of clips to be sampled per video
"""
def __init__(self, video_clips: VideoClips, num_clips_per_video: int) -> None:
if not isinstance(video_clips, VideoClips):
raise TypeError(f"Expected video_clips to be an instance of VideoClips, got {type(video_clips)}")
self.video_clips = video_clips
self.num_clips_per_video = num_clips_per_video
def __iter__(self) -> Iterator[int]:
idxs = []
s = 0
# select num_clips_per_video for each video, uniformly spaced
for c in self.video_clips.clips:
length = len(c)
if length == 0:
# corner case where video decoding fails
continue
sampled = torch.linspace(s, s + length - 1, steps=self.num_clips_per_video).floor().to(torch.int64)
s += length
idxs.append(sampled)
return iter(cast(List[int], torch.cat(idxs).tolist()))
def __len__(self) -> int:
return sum(self.num_clips_per_video for c in self.video_clips.clips if len(c) > 0)
class RandomClipSampler(Sampler):
"""
Samples at most `max_video_clips_per_video` clips for each video randomly
Args:
video_clips (VideoClips): video clips to sample from
max_clips_per_video (int): maximum number of clips to be sampled per video
"""
def __init__(self, video_clips: VideoClips, max_clips_per_video: int) -> None:
if not isinstance(video_clips, VideoClips):
raise TypeError(f"Expected video_clips to be an instance of VideoClips, got {type(video_clips)}")
self.video_clips = video_clips
self.max_clips_per_video = max_clips_per_video
def __iter__(self) -> Iterator[int]:
idxs = []
s = 0
# select at most max_clips_per_video for each video, randomly
for c in self.video_clips.clips:
length = len(c)
size = min(length, self.max_clips_per_video)
sampled = torch.randperm(length)[:size] + s
s += length
idxs.append(sampled)
idxs_ = torch.cat(idxs)
# shuffle all clips randomly
perm = torch.randperm(len(idxs_))
return iter(idxs_[perm].tolist())
def __len__(self) -> int:
return sum(min(len(c), self.max_clips_per_video) for c in self.video_clips.clips)
|
#!/usr/bin/env python
from __future__ import print_function
import rospy
# import gazebo_msgs.msg
from gazebo_msgs.srv import GetJointProperties
from gazebo_msgs.srv import ApplyJointEffort
import os
#This function will send the joint values with /gazebo/apply_joint_effort
class PD_Controller:
def __init__(self, joint_name, Kp, Kd):
self.error = 0
self.old_error = 0
self.time = rospy.Time.now()
self.current = 10000000000
self.joint_name = joint_name
self.Kp = Kp
self.Kd = Kd
def send_joint_efforts(self, effort):
apply_effort = rospy.ServiceProxy("/gazebo/apply_joint_effort", ApplyJointEffort)
apply_effort (self.joint_name, effort, rospy.Time(),rospy.Duration(0.1))
#message fields: "joint_name: 'joint2', effort: 10.0, start_time: secs: 0 nsecs: 0,duration: secs: 10 nsecs: 0"
def PD(self, current, desired):
catkin_pkg = os.getenv('ROS_PACKAGE_PATH').split(':')
catkin_pkg = str(catkin_pkg[0])
self.current = current
# Ku = Kp/0.6
# Tu = 0.01
# Kd = (3*Ku*Tu)/40
#Without gravity
# Kp = 25
# Kd = 10
self.error = (desired - current)
desired_file = open(catkin_pkg + "/rbe_proj/src/desired.txt", "a")
desired_file.write(str(desired) + '\n')
desired_file.close()
current_file = desired_file = open(catkin_pkg + "/rbe_proj/src/current.txt", "a")
current_file.write(str(current) + '\n')
current_file.close()
delta_error = self.error- self.old_error
if(self.joint_name == 'joint_6'):
calculated_effort = (self.Kp * self.error) + (self.Kd * delta_error) - 9.8
else:
calculated_effort = (self.Kp * self.error) + (self.Kd * delta_error)
print('Joint :' + str(self.joint_name) + 'at ' + str(self.current))
self.send_joint_efforts(calculated_effort)
self.old_error = self.error
# self.osc_per = rospy.Time.now() - self.time
#must implement a service to get a reference position for the last joint and get it to go there
#This part will obtain the joint positions from gazebo
#should be a subscriber
if __name__ == '__main__':
catkin_pkg = os.getenv('ROS_PACKAGE_PATH').split(':')
catkin_pkg = str(catkin_pkg[0])
desired_file = open(catkin_pkg + "/rbe_proj/src/desired.txt", "r+")
current_file = open(catkin_pkg + "/rbe_proj/src/current.txt", "r+")
desired_file.truncate(0)
desired_file.close()
current_file.truncate(0)
current_file.close()
rospy.init_node('joint_controller')
position_reached = False
pd_controller1 = PD_Controller('joint_2', 10, 120)
pd_controller2 = PD_Controller('joint_5', 10, 40)
pd_controller3 = PD_Controller('joint_6', 15, 20)
while position_reached == False:
joint_2properties = rospy.ServiceProxy('/gazebo/get_joint_properties',GetJointProperties)
joint_5properties = rospy.ServiceProxy('/gazebo/get_joint_properties',GetJointProperties)
joint_6properties = rospy.ServiceProxy('/gazebo/get_joint_properties',GetJointProperties)
current_joint_2properties = joint_2properties('joint_2')
current_joint_5properties = joint_5properties('joint_5')
current_joint_6properties = joint_6properties('joint_6')
current_joint_2position = current_joint_2properties.position[0]
current_joint_5position = current_joint_5properties.position[0]
current_joint_6position = current_joint_6properties.position[0]
# print(current_joint_position)
desired_joint_2position = 0.8
desired_joint_5position = 0.8
desired_joint_6position = 0.2
pd_controller1.PD(current_joint_2position, desired_joint_2position)
pd_controller2.PD(current_joint_5position, desired_joint_5position)
pd_controller3.PD(current_joint_6position, desired_joint_6position)
if((pd_controller2.error > 0.005)and pd_controller2.current > desired_joint_5position-.005 and pd_controller2.current < desired_joint_5position+ .005):
print('Position reached!')
#break
else:
print("Not there")
rospy.sleep(0.1)
|
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.views import generic
from django.utils import timezone
from ..models import Question
class DetailView(generic.DetailView):
model = Question
template_name = 'polls/detail.html'
# 템플릿에서 참조하는 객체 이름
context_object_name = 'question'
# path에서 지정한 uri 이름
pk_url_kwarg = 'question_id'
def get_object(self, queryset=None):
requested_question = get_object_or_404(self.model, id=self.kwargs['question_id'])
if requested_question.pub_date > timezone.now():
raise Http404
else:
return requested_question
|
#! -*- coding:utf8 -*-
import os
import sys
import json
reload(sys)
sys.setdefaultencoding("utf-8")
from gensqlalorm.config import get_db_config
from db_connect import DBConnectionPool
from db_executor import DBExecutor
db_connection_pool = None
db_executor = None
def init():
global db_connection_pool
global db_executor
if db_connection_pool is None:
db_connection_pool = DBConnectionPool(get_db_config())
if db_executor is None:
db_executor = DBExecutor(db_connection_pool)
def show_create_table(project_name, table_name):
"""
[{
"column": 'xxxx',
"type: 'bigint(20)',
"comment: 'xxxxx'
}]
"""
init()
sql_r = db_executor.exec_sql(project_name, "show create table %s;" % table_name)
table_create_sql = sql_r[0][1]
result = []
table_create_sql_lines = table_create_sql.split("\n")
for table_create_sql_line in table_create_sql_lines:
table_create_sql_line = table_create_sql_line.strip()
if table_create_sql_line.startswith("`") and table_create_sql_line.endswith(","):
one = {}
one["column"] = table_create_sql_line.split("` ")[0].split("`")[1]
one["type"] = table_create_sql_line.split("` ")[1].split(" ")[0]
one["comment"] = table_create_sql_line.split("COMMENT ")[1].replace(",", "").replace("'", "") if "COMMENT" in table_create_sql_line else ""
result.append(one)
return result
def desc_table(project_name, table_name):
init()
"""
[
{
"name": 'id',
"type: 'bigint(20)',
"can_null: 'yes',
"key_type": 'pri',
"default_value": null,
"extra": "auto_increment"
}
]
"""
sql_r = db_executor.exec_sql(project_name, "desc %s;" % table_name)
result = []
for sql_r_one in sql_r:
result.append({
"name": sql_r_one[0].lower() if sql_r_one[0] is not None else None,
"type": sql_r_one[1].lower() if sql_r_one[1] is not None else None,
"can_null": sql_r_one[2].lower() if sql_r_one[2] is not None else None,
"key_type": sql_r_one[3].lower() if sql_r_one[3] is not None else None,
"default_value": sql_r_one[4].lower() if sql_r_one[4] is not None else None,
"extra": sql_r_one[5].lower() if sql_r_one[5] is not None else None,
})
return result
def show_tables(project_name):
init()
"""
['table_name_1', 'table_name_2']
"""
sql_r = db_executor.exec_sql(project_name, "show tables;")
if not sql_r:
return []
result = []
for sql_r_one in sql_r:
result.append(sql_r_one[0])
return result
if __name__ == "__main__":
print json.dumps(show_tables("TestDB_2"))
print json.dumps(desc_table("PayPrivilege", "gc_test_shard_table_2"))
|
# 正確な四捨五入
from decimal import Decimal, ROUND_HALF_UP
num = 123.456
digit = 0.1
round_num = Decimal(str(num)).quantize(Decimal(str(digit)), rounding=ROUND_HALF_UP)
print(round_num)
# 自作の四捨五入関数
import math
def my_round(num, digit):
p = 10 ** digit
s = math.copysign(1, num)
return (s * num * p * 2 + 1) // 2 / p * s
num = -123.456
print(my_round(num, 1)) |
# -*- coding: utf-8 -*-
'''测试LEGB搜索规则, local --enclosed -- global -- built in '''
#定义全局变量str --str在内建函数(built in)中是将对象转换为字符串
str = 'global str'
#定义测试的嵌套函数
def outer():
#定义enclosed层的变量str
str = 'outer str'
#定义内层函数
def inner():
#定义内存本地变量
str = 'inner str'
print(str)
#调用内层函数
inner()
#调用外层函数
outer() |
# analysis of a simple investment strategy on historical stock market data
import numpy as np
import matplotlib.pyplot as plt
import scipy.optimize as opt
date, cpi, val = np.loadtxt('sp500.dat', unpack=True)
# normalize stock market values for convenience
val /= 90.0
val0 = 1.32*np.exp(0.06395*(date - 1871))
ratio = np.arange(0.3, 2.1, 0.05)
strategy = [1.0] * len(ratio)
interval = [(0.0,1.0)] * len(ratio)
# forward simulation of investment strategy
def invest(strategy):
worth = 1.0
i=1
while i < len(val):
ratio0 = val[i-1]/val0[i-1]
strategy0 = np.interp(ratio0, ratio, strategy)
worth = worth*(1.0 - strategy0) + worth*strategy0*val[i]/val[i-1]
i = i+1
return -worth
def invest_plot(strategy):
worth = [1.0]*len(val)
i=1
while i < len(val):
ratio0 = val[i-1]/val0[i-1]
strategy0 = np.interp(ratio0, ratio, strategy)
worth[i] = worth[i-1]*(1.0 - strategy0) + worth[i-1]*strategy0*val[i]/val[i-1]
i = i+1
return worth
invest0 = invest(strategy)
scan0 = invest_plot(strategy)
res = opt.minimize(invest,strategy,bounds=interval)
optimal_strategy = res.x
scan = invest_plot(optimal_strategy)
safe_strategy = [1.0] * len(ratio)
for (i,strat) in enumerate(safe_strategy):
if(ratio[i] > 1.5):
strat = 0.0
scan2 = invest_plot(safe_strategy)
fig, (ax1,ax2) = plt.subplots(2,1,tight_layout=True, figsize=(6.0,6.0))
ax1.plot(ratio,optimal_strategy,color='blue')
ax1.set_xlabel('value of risk asset relative to baseline')
ax1.set_ylabel('allocation')
ax2.semilogy(date,val0,color='black')
ax2.semilogy(date,scan0,color='red')
ax2.semilogy(date,scan,color='blue')
#ax2.semilogy(date,scan2,color='green')
ax2.set_xlabel('year')
ax2.set_ylabel('worth')
plt.savefig('invest.pdf', bbox_inches='tight', pad_inches=0.01) |
import os
import pickle
HOST = "0.0.0.0"
class PATHS:
SERVER_CERT = os.path.join(os.path.dirname(__file__),'resources/server.crt')
SERVER_KEY = os.path.join(os.path.dirname(__file__),'resources/server.key')
CLIENT_CERT = os.path.join(os.path.dirname(__file__),'resources/client.crt')
class DATABASE:
HOST = "detections.cdtm4kvpi8en.eu-west-2.rds.amazonaws.com"
DATABASE = "detections"
USER = "BPJ00SSG00"
PASSWORD = "!:*N~ZM48>n!`aZA"
class MQT:
BROKER_HOST = HOST
BROKER_PORT = 1884
CLIENT_ID = "Server"
TOPIC = [("cycle/init", 2), ("cycle/zones", 2), ("cycle/gps", 2)]
class SOCK:
SERVER_HOST = HOST
SERVER_PORT = 5001
SERVER_CERT = PATHS.SERVER_CERT
SERVER_KEY = PATHS.SERVER_KEY
CLIENT_CERT = PATHS.CLIENT_CERT
|
import numpy as np
import cv2
from queue import MyQueue
import util
import logging
class Engine():
def __init__(self,capForGoal,capForRecording):
self.capGoal = capForGoal
self.capRec = capForRecording
self.limit = 5*20 # 5 means before sec
self.storePrev = MyQueue(limit=self.limit)
self.storeNext = MyQueue(limit=self.limit)
self.storePrevRec = MyQueue(limit=self.limit)
self.storeNextRec = MyQueue(limit=self.limit)
self.isStartNext = False
self.framenum = 0
self.fpsGoal = capForGoal.get(cv2.CAP_PROP_FPS)
self.fpsRec = capForRecording.get(cv2.CAP_PROP_FPS)
logging.basicConfig(filename='app.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s')
pass
def saveVideo(self):
self.isStartNext = False
#todo
self.storePrev.queue.extend(self.storeNext.queue)
image_array_goal = self.storePrev.queue
self.storePrevRec.queue.extend(self.storeNextRec.queue)
image_array_rec = self.storePrevRec.queue
util.saveImageAsVideo(image_array_goal,"goal.avi",fps=self.fpsGoal)
util.saveImageAsVideo(image_array_rec,"rec.avi",fps=self.fpsRec)
self.storeNext.queue.clear()
self.storePrev.queue.clear()
self.storeNextRec.queue.clear()
self.storePrevRec.queue.clear()
self.framenum = 0
pass
def run(self):
framepos = 0
capture = self.capGoal
captureRec = self.capRec
res,frame = capture.read()
res,frameRec = captureRec.read()
oldframe = None
diff = None
while(res):
res,frame = capture.read()
#sync two video based on frameRate
frameNumRec = capture.get(cv2.CAP_PROP_POS_FRAMES) * self.fpsRec//self.fpsGoal
captureRec.set(cv2.CAP_PROP_POS_FRAMES,frameNumRec)
res,frameRec = captureRec.read()
if self.isStartNext == True:
if frame is None:
self.saveVideo()
continue
self.storeNext.insertItem(frame.copy())
if(frameRec is not None):
self.storeNextRec.insertItem(frameRec.copy())
if (len(self.storeNext.getAllItems())> self.limit):
self.saveVideo()
continue
else:
if frame is None:
break
self.storePrev.insertItem(frame.copy())
self.storePrevRec.insertItem(frameRec.copy())
framepos += 1
hsv_frame = cv2.cvtColor(frame,cv2.COLOR_BGR2HSV)
lower_red = np.array([70,30,70])
upper_red = np.array([230,255,255])
mask = cv2.inRange(hsv_frame,lower_red,upper_red)
resImg = cv2.bitwise_and(frame,frame,mask=mask)
sign = 0
if oldframe is None:
oldframe = frame
pass
else:
gray_frame = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
gray_frame = cv2.GaussianBlur(gray_frame,(15,15),0)
gray_old = cv2.cvtColor(oldframe,cv2.COLOR_BGR2GRAY)
gray_old = cv2.GaussianBlur(gray_old,(15,15),0)
Conv_hsv_Gray = cv2.subtract(gray_frame,gray_old)
# color the mask red
blur = cv2.GaussianBlur(Conv_hsv_Gray,(5,5),0)
ret, th = cv2.threshold(blur, 30, 255,cv2.THRESH_BINARY)
#deliation
kernel = np.ones((5,5),np.uint8)
dilation = cv2.dilate(th,kernel,iterations = 1)
# gray_blurred = cv2.blur(dilation, (3, 3))
#detecting circle
contours,hierarchy = cv2.findContours(dilation,cv2.RETR_LIST,cv2.CHAIN_APPROX_SIMPLE)
cnt = contours
sign = 0
if(cnt is not None):
for i in cnt:
(x,y),radius = cv2.minEnclosingCircle(i)
center = (int(x),int(y))
radius = int(radius)
if(3.14*radius*radius<10000 and 3.14*radius*radius>500):
rect = cv2.boundingRect(i)
sign += 1
# if(sign>20):
# cv2.circle(frame,center,radius,(0,255,0),2)
# cv2.imshow("diff",dilation)
oldframe = frame
if(sign>15):
h,w,_ = frame.shape
cv2.putText(frame,"Goal...",(w//2,h//2),cv2.FONT_HERSHEY_SIMPLEX,2,(255,0,0),2,cv2.LINE_AA)
self.isStartNext = True
self.framenum = framepos
cv2.imshow("Image",frame)
cv2.imshow("ImageRec",frameRec)
if cv2.waitKey(20)&0xFF == ord('q'):
break
elif cv2.waitKey(20)& 0xFF == ord('n'):
continue
else:
pass
if(self.isStartNext == True):
self.saveVideo()
capForGoal = cv2.VideoCapture('testVideos/1goal.mp4')
capForGoal.set(cv2.CAP_PROP_POS_FRAMES,700)
capForRecording = cv2.VideoCapture('testVideos/1.mp4')
# capForRecording.set(cv2.CAP_PROP_POS_FRAMES,700)
engine = Engine(capForGoal,capForRecording)
try:
engine.run()
except:
logging.warning("engine failed")
|
# coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Nothing in this file should be used outside of service/central
from knack.util import CLIError
from requests import Response
from knack.log import logging
from azext_iot import constants
from azext_iot.common import auth
def get_headers(token, cmd, has_json_payload=False):
if not token:
aad_token = auth.get_aad_token(cmd, resource="https://apps.azureiotcentral.com")
token = "Bearer {}".format(aad_token["accessToken"])
if has_json_payload:
return {
"Authorization": token,
"User-Agent": constants.USER_AGENT,
"Content-Type": "application/json",
}
return {"Authorization": token, "User-Agent": constants.USER_AGENT}
def try_extract_result(response: Response):
# 201 and 204 response codes indicate success
# with no content, hence attempting to retrieve content will fail
if response.status_code in [201, 204]:
return {"result": "success"}
try:
body = response.json()
except:
raise CLIError("Error parsing response body")
if "error" in body:
raise CLIError(body["error"])
return body
def log_response_debug(response: Response, logger: logging.Logger):
logger.debug("Response status code: {}".format(response.status_code))
logger.debug("Response url: {}".format(response.url))
logger.debug("Response headers: {}".format(response.headers))
|
y=int(input())
for j in range(y):
print("Hello")
|
import urllib.request as req
url='https://www.bbc.com/news'
request=req.Request(url, headers={'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36'})
with req.urlopen(request) as response:
data=response.read().decode()
#print(data)
import bs4
root=bs4.BeautifulSoup(data,'html.parser')
titles=root.find('h3',class_='gs-c-promo-heading__title gel-paragon-bold nw-o-link-split__text')
print(titles.string)
titlelinks=root.find('a', string=titles.string)
print('https://bbc.com'+titlelinks['href'])
subtitles=root.find_all('h3',class_='gs-c-promo-heading__title gel-pica-bold nw-o-link-split__text')
for subtitle in subtitles:
ss=subtitle.string
links=root.find_all('a', string=ss)
n=len(links)
if n ==0:
print(ss)
elif n >= 1:
print(ss)
print('https://bbc.com'+links[0]['href'])
|
from setuptools import setup, find_packages
setup(
name="matchingmarkeets",
version="0.1.0",
license='BSD-3',
description='Matching Market Simulations',
author='Matt Ranger',
url='https://github.com/QuantEcon/MatchingMarkets.py',
packages=find_packages(),
keywords=['graph', 'network', 'matching'],
package_data={
# If any package contains *.txt or *.rst files, include them:
'': ['*.md', '*.txt', '*.rst']
},
install_requires=[
'matplotlib',
'networkx',
'numpy',
'pandas',
'scipy',
],
) |
from rest_framework.response import Response
from .serializers import StudentSerializer
from .models import Student
from rest_framework import viewsets
class StudentView(viewsets.Viewset):
def list(self, request):
queryset = Student.objects.all()
serializer = StudentSerializer(queryset, many=True)
return Response(serializer.data) |
"""
*********************************************************************
This file is part of:
The Acorn Project
https://wwww.twistedfields.com/research
*********************************************************************
Copyright (c) 2019-2021 Taylor Alexander, Twisted Fields LLC
Copyright (c) 2021 The Acorn Project contributors (cf. AUTHORS.md).
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*********************************************************************
"""
import redis
import time
import pickle
from scipy.interpolate import CubicSpline
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib.colors as mp_colors
import sys
import utm
import scipy
import geomdl.fitting as fitting
from geomdl.visualization import VisMPL
# import open3d
import math
import random
from scipy.interpolate import splprep, splev
sys.path.append('../vehicle')
from remote_control_process import EnergySegment
import spline_lib
import gps_tools
import matplotlib.path as path
_SMOOTH_MULTIPLIER = 0.00000000001
# r = redis.Redis(
# host='acornserver.local',
# port=6379)
r = redis.Redis(
host='0.0.0.0',
port=6379)
_ROW_POINTS_CUT_OFF = 8
# self.sequence_num = sequence_num
# self.time_stamp = end_gps.time_stamp
# self.start_gps = start_gps
# self.end_gps = end_gps
# self.duration = end_gps.time_stamp - start_gps.time_stamp
# self.distance_sum = distance_sum
# self.meters_per_second = distance_sum / self.duration
# self.watt_seconds_per_meter = total_watt_seconds/distance_sum
# self.height_change = end_gps.height_m - start_gps.height_m
# self.avg_watts = avg_watts
colorlist = ["#0000FF", "#00FF00", "#FF0066"]
idx = 0
orig_x = []
orig_y = []
colors = []
path_cuts = [(0,0), (23,0), (0,48)]
final_path = []
print("%%%%%%%%%%%%%%%%%%%%%%%%")
from area import area
_SQUARE_METERS_PER_ACRE = 4046.86
poly_path = None
row = None
for key in r.scan_iter():
if "parking_row" in str(key):
row = pickle.loads(r.get(key))
# row.reverse()
break
projection_distance_meters = 1.0
projected_points = []
rows_in_polygon = []
heading_1 = gps_tools.get_heading(row[10], row[0])
new_row = gps_tools.offset_row(row, projection_distance_meters, heading_1 + 90, copy_data=True, make_dict=True)
print("%%%%%%%%%%%")
# for point in row:
# print(point)
for point in new_row:
print(point)
# sys.exit()
from remote_control_process import NavigationParameters, PathControlValues, PathSection, Direction
#self.default_navigation_parameters = NavigationParameters(travel_speed=0.0, path_following_direction=Direction.BACKWARD, vehicle_travel_direction=Direction.FORWARD, loop_path=True)
#self.default_navigation_parameters = NavigationParameters(travel_speed=0.0, path_following_direction=Direction.FORWARD, vehicle_travel_direction=Direction.BACKWARD, loop_path=True)
forward_navigation_parameters = NavigationParameters(travel_speed=0.0, path_following_direction=Direction.FORWARD, vehicle_travel_direction=Direction.FORWARD, loop_path=False)
connector_navigation_parameters = NavigationParameters(travel_speed=0.0, path_following_direction=Direction.EITHER, vehicle_travel_direction=Direction.FORWARD, loop_path=False)
#self.default_navigation_parameters = NavigationParameters(travel_speed=0.0, path_following_direction=Direction.FORWARD, vehicle_travel_direction=Direction.FORWARD, loop_path=True)
#self.default_navigation_parameters = NavigationParameters(travel_speed=0.0, path_following_direction=Direction.BACKWARD, vehicle_travel_direction=Direction.BACKWARD, loop_path=True)
_MAXIMUM_ALLOWED_DISTANCE_METERS = 8
_MAXIMUM_ALLOWED_ANGLE_ERROR_DEGREES = 140
path_control_vals = PathControlValues(angular_p=0.9, lateral_p=-0.25, angular_d=0.3, lateral_d=-0.05)
turn_control_vals = PathControlValues(angular_p=0.9, lateral_p=-0.25, angular_d=0.3, lateral_d=-0.05)
nav_path = PathSection(points=[],
control_values=path_control_vals,
navigation_parameters=forward_navigation_parameters,
max_dist=_MAXIMUM_ALLOWED_DISTANCE_METERS,
max_angle=_MAXIMUM_ALLOWED_ANGLE_ERROR_DEGREES,
end_dist=1.0,
end_angle=45)
print(len(row))
# sys.exit()
subject_rows = [row[2:8], new_row[2:8], row[2:8]]
starting_direction = -1
# rows_in_polygon_save = gps_tools.chain_rows(subject_rows, row[0], "three_pt", forward_navigation_parameters, connector_navigation_parameters, turn_control_vals, nav_path, asdict=False)
rows_in_polygon = gps_tools.chain_rows(subject_rows, row[0], starting_direction, "three_pt", forward_navigation_parameters, connector_navigation_parameters, turn_control_vals, nav_path, asdict=True)
rows_in_polygon = rows_in_polygon[:-1]
print("%%%%%%%%%%%%")
print("%%%%%%%%%%%%")
print("%%%%%%%%%%%%")
print("%%%%%%%%%%%%")
print("%%%%%%%%%%%%")
for single_row in rows_in_polygon:
print(len(single_row.points))
# for point in single_row.points:
# print(point)
#
r.set('twistedfields:gpspath:three_pt_test:key', pickle.dumps(rows_in_polygon))
sys.exit()
min_x = 0
first_x = 0
min_y = 0
first_y = 0
mesh_array = []
colors = [[1,0,0],[0,1,0],[0,0,1]]
count = 0
lat_lon_tracks = []
for track in rows_in_polygon:
if count < len(colors):
row_color = colors[count]
else:
row_color = [random.random(), random.random(), random.random()]
count += 1
track_lat_lon = []
# track = track[3:-4]
if len(track.points) > 2:
big_point = True
for point in track.points:
if big_point:
mesh_box = open3d.geometry.TriangleMesh.create_box(width=1.8, height=1.8, depth=1.8)
big_point = False
else:
mesh_box = open3d.geometry.TriangleMesh.create_box(width=0.8, height=0.8, depth=0.8)
mesh_box.compute_vertex_normals()
mesh_box.paint_uniform_color(row_color)
translation = [point["lat"]* 100000 - 3735387, point["lon"] * 100000 + 12233156, 0]
print(translation)
#print("{} {}".format(point["lat"] + min_x + first_x, point["lon"] + min_y + first_y))
#latlon_point = utm.to_latlon(point["lat"] + min_x + first_x, point["lon"] + min_y + first_y, ut_zone[0], ut_zone[1])
#print(latlon_point)
#track_lat_lon.append(latlon_point)
mesh_box.translate(translation)
mesh_array.append(mesh_box)
#lat_lon_tracks.append(track_lat_lon)
pcd = open3d.geometry.PointCloud()
# np_points = np.random.rand(100, 3)
# print(np.array(point_cloud))
# From numpy to Open3D
# pcd.points = open3d.utility.Vector3dVector(gps_mesh.pcd)
# # pcd.points = open3d.utility.Vector3dVector(gps_mesh.slice_points)
#
mesh_frame = open3d.geometry.TriangleMesh.create_coordinate_frame(
size=10, origin=[0, 0, 0])
#
# mesh_array.append(pcd)
# mesh_array.append(mesh_frame)
open3d.visualization.draw_geometries(mesh_array)
#print(row_list[row_key])
# for row_key in sorted(row_list.keys()):
# print(str(row_key))
# # print(row_key[0])
# obj = {'type':'Polygon','coordinates':[[[-180,-90],[-180,90],[180,90],[180,-90],[-180,-90]]]}
# area(obj)
# sys.exit()
#
# lat_lon_tracks = pickle.load(open('lat_lon_tracks_3.pickle', 'rb'))
#
# # print(lat_lon_tracks)
# tracknum = 1
# track_set_complete = []
#
# for track in lat_lon_tracks:
# # print(len(track))
# if len(track) > 60:
# print(tracknum)
# next_track = []
# for point in track:
# #point = gps_tools.GpsPoint(point[0], point[1])
# point = {'lat':point[0], 'lon':point[1]}
# print(point)
# next_track.append(point)
# track_set_complete.append(point)
# print(next_track)
# # r.set('twistedfields:gpspath:autogen_1_row_{}:key'.format(tracknum), pickle.dumps(next_track))
# tracknum += 1
# print("#################")
#
# # r.set('twistedfields:gpspath:autogen_1_complete:key', pickle.dumps(track_set_complete))
|
from Engine.Player.player import Player
from Engine.Elements.bag import Bag
from Engine.Elements.board import Board
from Engine.Elements.center import Center
from Engine.Elements.discard import Discard
from Engine.Elements.factory import Factory
PlayerCount = int
default_bag = {
0: 20,
1: 20,
2: 20,
3: 20,
4: 20
}
class Game:
i = 0
def __init__(self, n: PlayerCount):
self.num_players = n
self.bag: Bag
self.discard: Discard
self.factories: list[Factory]
self.players: list[Player]
if n == 2:
num_factories = 5
elif n == 3:
num_factories = 7
elif n == 4:
num_factories = 9
else:
raise ValueError
self.bag = Bag(list(default_bag.keys()), list(default_bag.values()))
self.discard = Discard(self.bag)
self.center = Center()
self.factories = []
for i in range(0, num_factories):
self.factories += Factory(self.center)
self.players = []
for i in range(0, n):
player = Player(i, Board(), self.factories, )
self.players.append(player)
for i in range(0, n):
opponents: list[Player] = self.players.copy()
opponents.pop(i)
self.players[i].set_opponents(opponents)
self.starting_player: Player = self.players[0]
def fill_factories(self):
self.center.has_starting_tile = True
for factory in self.factories:
self.check_bag()
factory.fill_factory(self.bag)
for player in self.players:
if player.has_starting_marker:
self.starting_player = player
player.has_starting_marker = False
def check_bag(self):
if self.bag.count == 0:
self.bag.add_bag(self.discard)
if self.bag.count() < 4:
tiles = self.discard.draw_tiles(4 - self.bag.count())
for tile in tiles:
self.bag.add_tile(tile)
def set_starting_player(self):
idx = self.players.index(self.starting_player)
for _ in range(0, idx):
self.players.append(self.players.pop(0))
self.i = 0
return
def player_request(self):
# provide state to agent
return self.i, self.players[self.i].state()
def player_action(self, args):
# False if error
# substitute with argument parsing
success = self.players[self.i].make_choice(Factory(Center()), 0, 0)
if not success:
return False
self.i = (self.i + 1) % self.num_players
if self.no_tiles_remain():
for player in self.players:
player.end_turn_reset()
if player.has_starting_marker:
self.starting_player = player
player.has_starting_marker = False
self.center.has_starting_tile = True
self.fill_factories()
self.set_starting_player()
state = self.players[self.i].state()
score = self.players[self.i].score
end_game = self.end_game_cond_met()
# return True, new state, current score estimate, end game condition met
return True, state, score, end_game
def end_game_cond_met(self):
return any([player.end_game_condition_met() for player in self.players])
def no_tiles_remain(self):
for factory in self.factories:
if not factory.is_empty():
return False
if self.center.is_empty():
return True
return False
|
#!/usr/bin/env python3
import sys
import re
from itertools import product
from util.aoc import file_to_day
from util.input import load_data
def main(test=False):
data = load_data(file_to_day(__file__), test)[0]
r = re.compile(r"((-?[\d]+)..(-?[\d]+))")
m = r.findall(data)
sx, ex, sy, ey = tuple(
int(x) for x in (m[0][1], m[0][2], m[1][1], m[1][2])
)
vectors = product(range(ex + 1), range(sy, abs(sy) + 1))
max_y = 0
ok = set()
for x, y in vectors:
px, py = 0, 0
ys = [py]
vx, vy = x, y
while px <= ex and py >= sy:
px += vx
py += vy
ys.append(py)
if sx <= px <= ex and sy <= py <= ey:
ok.add((x, y))
max_y = max(max_y, *ys)
break
vx = max(0, vx - 1)
vy -= 1
print("2021:17:1 =", max_y)
print("2021:17:2 =", len(ok))
if __name__ == "__main__":
test = len(sys.argv) > 1 and sys.argv[1] == "test"
main(test)
|
class Solution:
def pivotArray(self, nums: List[int], pivot: int) -> List[int]:
less, great, equal = [], [], []
for item in nums:
if item < pivot:
less.append(item)
elif item > pivot:
great.append(item)
else:
equal.append(item)
less.extend(equal)
less.extend(great)
return less |
import matplotlib.pyplot as plt
from matplotlib.patches import Ellipse
import numpy as np
import math
color = [(1, 0, 0), (0, 1, 0), (0, 1, 1), (0, 0, 1)] # colors for plots
def get_data(filename):
return np.loadtxt("dataSets/" + filename + ".txt")
def plot_data(text=""):
# plot colored points
ax = plt.subplot(111, aspect='equal')
for val in range(N_len-1):
# change color of points depending on diff to circles color(i) = ri1 blue + ri2 red + ri3 green + ri4 yellow
point_color = 0.0
for dimension in range(1, M+1):
point_color += np.dot(a[val][dimension], color[dimension-1])
# point color sometimes > 1 / < 0
for icol in range(len(point_color)):
if point_color[icol-1] > 1.0:
point_color[icol-1] = 1.0
elif point_color[icol-1] < 0.0:
point_color[icol-1] = 0.0
# plot all the points with their colors
plt.plot(data[val][0], data[val][1], 'ro', color=point_color, markersize=2, marker="x")
# The eigenvectors determine the orientation of the ellipse, and the eigenvalues determine how elogonated it is.
# TODO may cause some issues: sort eigenvectors and eigenvalues
for circle in range(0, M):
lambda_, v = np.linalg.eig(sigma[circle+1])
angle = np.rad2deg(np.arccos(v[0, 0]/np.linalg.norm(v[0])))
ell = Ellipse(xy=mu[circle+1], width=lambda_[0]*5, height=lambda_[1]*5, angle=angle, color=color[circle])
ell.set_facecolor('none')
ax.add_artist(ell)
plt.title("EM Algorithm Iteration: " + text + str(iteration) + " with Start in corner")
# plt.show()
plt.savefig(fname=("EM_" + text + "Iteration_" + str(iteration)))
plt.close()
def plot_log():
# and plot the log-likelihood for every iteration $t_i=1:30$.
# which is simply the sum of log of probability density function of Gaussian distribution.
for i in range(M):
plt.plot(logPlot[:, i], color=color[i])
plt.title("Log Likelihood")
plt.xlabel('Iterations')
# plt.show()
plt.savefig(fname="4_loglikelihood")
plt.close()
# calculate the Gaussian distribution
def gauss(gx, gmu, gsigma):
# TODO: sometimes singular matrix, but why?
exponent = -0.5 * np.dot(np.dot((gx-gmu), np.linalg.inv(gsigma)), (gx-gmu)[:, np.newaxis])
return 1/((2*math.pi)**(dim/2)*math.sqrt(np.linalg.norm(gsigma)))*math.exp(exponent)
def log_likelihood():
logL = np.zeros(M+1)
for circle in range(1, M+1):
L = 0
for x in data:
L += gauss(gx=x, gmu=mu[circle], gsigma=sigma[circle])
logL[circle] = math.log(L)
return logL
# set start params:
data = get_data("gmm")
dim = 2 # no of dimensions
M = 4 # no of Gaussians
N_len = len(data) # no. data points
a = np.zeros((N_len-1, M+1))
pi = np.ones(M+1) * 1 / M
pi[0] = 0
N = np.zeros(M+1)
# mu = np.random.rand(5, 2) * 3 # don't want to write j-1 all the time
mu = np.array(((0.0, 0.0), (-2.0, -1.0), (3.0, -1.0), (-2.0, 5.0), (3.0, 5.0))) # don't want to write j-1 all the time
sigma = [0*np.eye(dim), np.eye(dim), np.eye(dim), np.eye(dim), np.eye(dim)] # e 5x2x2
logPlot = np.zeros((30, 4))
for iteration in range(1, 31): # 30 iterations
if iteration == 1:
plot_data(text="Before_")
# e-step
# compute responsibilities
# Compute the posterior distribution for each mixture component and for all data points
for n in range(0, N_len-1): # for every data point
for j in range(1, M+1): # for every Gaussian distribution
div_sum = 0.0
for i in range(1, M+1):
div_sum += pi[i]*gauss(gx=data[n], gmu=mu[i], gsigma=sigma[i])
a[n][j] = pi[j]*gauss(gx=data[n], gmu=mu[j], gsigma=sigma[j]) / div_sum
# m step
# Compute the new parameters using weighted estimates
for j in range(1, M+1): # for every Gaussian distribution
# calc N_j
N[j] = 0.0
for nj in range(0, N_len-1):
N[j] += a[nj][j]
# calc mu_j
mu[j] = np.zeros(2, float)
for nmu in range(1, N_len-1):
mu[j] = mu[j] + (1/N[j])*a[nmu][j]*data[nmu]
# calc sigma_j
sigma[j] = 0
for nsi in range(0, N_len-1):
# TODO root?? square??
sigma[j] += 1/N[j]*a[nsi][j] * (data[nsi]-mu[j])[:, np.newaxis] * (data[nsi]-mu[j])
# calc pi_j
pi[j] = N[j]/N_len
# 1 nice plot should plot nice
if iteration in [1, 3, 5, 10, 30]:
plot_data()
# get data for log likelihood
logPlot[iteration-1] = np.delete(log_likelihood(), 0)
# plot log likelyhood
plot_log()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.