blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4dbb490667e510006a9631a51f82564848df9b9f
|
18c699d61a7ead0c8d29efdddacd71468c227221
|
/pyspark-distinct-to-drop-duplicates.py
|
154bc0ea92950a02fb50fc46c18ec406f57704fb
|
[] |
no_license
|
devs-93/Saprk-Common-Operation
|
7847f009dca3466cd5a793bb81f1468e7ef6698b
|
b9ed874dcc8d059622bc63ef942925b1198c906d
|
refs/heads/main
| 2023-08-28T09:39:15.355336
| 2021-11-12T10:19:21
| 2021-11-12T10:19:21
| 427,312,696
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,152
|
py
|
import pyspark
from pyspark.sql import SparkSession
from pyspark.sql.functions import expr
spark = SparkSession.builder.appName('SparkByExamples.com').getOrCreate()
data = [("James1", "Sales1", 3000),
("James1", "Sales1", 3000),
("Michael", "Sales", 4600),
("Robert", "Sales", 4100),
("Maria", "Finance", 3000),
("James", "Sales", 3000),
("Scott", "Finance", 3300),
("Jen", "Finance", 3900),
("Jeff", "Marketing", 3000),
("Kumar", "Marketing", 2000),
("Saif", "Sales", 4100)
]
columns = ["employee_name", "department", "salary"]
df = spark.createDataFrame(data=data, schema=columns)
df.printSchema()
df.show(truncate=False)
# Distinct
distinctDF = df.distinct()
print("Distinct count: " + str(distinctDF.count()))
distinctDF.show(truncate=False)
# Drop duplicates
df2 = df.dropDuplicates()
print("Distinct count: " + str(df2.count()))
df2.show(truncate=False)
# Drop duplicates on selected columns
dropDisDF = df.dropDuplicates(["department", "salary"])
print("Distinct count of department salary : " + str(dropDisDF.count()))
dropDisDF.show(truncate=False)
|
[
"noreply@github.com"
] |
devs-93.noreply@github.com
|
ce51e5dbc2d819e139f9eb444bd8fc36f2ad298a
|
fee21a0de0a7e04d4cea385b9403fa9ba3109fc7
|
/量化交易/天勤量化/4-Demo.py
|
6a6d78c1d931802e3cc1b139b240dfd674b9ae98
|
[
"MIT"
] |
permissive
|
veritastry/trainee
|
2e9123fe0dfb87e4dacf8de3eb9c53d5ff68281b
|
eb9f4be00e80fddd0ab3d3e6ea9a20c55f5bcab8
|
refs/heads/master
| 2023-02-17T20:44:39.660480
| 2021-01-18T14:29:33
| 2021-01-18T14:29:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,224
|
py
|
from tqsdk import TqApi, TargetPosTask
'''
价差回归
当近月-远月的价差大于200时做空近月,做多远月
当价差小于150时平仓
'''
api = TqApi()
quote_near = api.get_quote("SHFE.rb1810")
quote_deferred = api.get_quote("SHFE.rb1901")
# 创建 rb1810 的目标持仓 task,该 task 负责调整 rb1810 的仓位到指定的目标仓位
target_pos_near = TargetPosTask(api, "SHFE.rb1810")
# 创建 rb1901 的目标持仓 task,该 task 负责调整 rb1901 的仓位到指定的目标仓位
target_pos_deferred = TargetPosTask(api, "SHFE.rb1901")
while True:
api.wait_update()
if api.is_changing(quote_near) or api.is_changing(quote_deferred):
spread = quote_near.last_price - quote_deferred.last_price
print("当前价差:", spread)
if spread > 200:
print("目标持仓: 空近月,多远月")
# 设置目标持仓为正数表示多头,负数表示空头,0表示空仓
target_pos_near.set_target_volume(-1)
target_pos_deferred.set_target_volume(1)
elif spread < 150:
print("目标持仓: 空仓")
target_pos_near.set_target_volume(0)
target_pos_deferred.set_target_volume(0)
|
[
"Lincoln@usa.com"
] |
Lincoln@usa.com
|
dabffd515b7dd2a0abf3bf15380ace94082f2145
|
ed2a234be16e5ac95496cd959b531542a087faf6
|
/Functions Advanced - Exercise/10. Keyword Arguments Length.py
|
1b03e732297da99ed3703c06b09f393e7c4587db
|
[] |
no_license
|
Spas52/Python_Advanced
|
efc73eda5d10707f1f1a7407cc697448a985f014
|
7082c8947abba9b348f8372f68d0fc10ffa57fc1
|
refs/heads/main
| 2023-06-04T13:05:46.394482
| 2021-06-24T00:01:37
| 2021-06-24T00:01:37
| 379,756,494
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 52
|
py
|
def kwargs_length(**kwargs):
return len(kwargs)
|
[
"noreply@github.com"
] |
Spas52.noreply@github.com
|
f86b93be73c5c731fcb859c52d812a7e36b71d4e
|
62bb0a92dd45198769e9ffa16eeb468039db7486
|
/PM/p3dx_mover/nodes/Circumnavigation_old.py
|
df1d0a1bf19da62c445a4435160590d73dd31fba
|
[] |
no_license
|
softelli/dissertacao-mestrado
|
1080e2f0891b8c47e3d30e9fecab1a0fa5840857
|
f94031ba4d9781cc55706ae8fbc383cbe103ef98
|
refs/heads/master
| 2020-12-31T07:59:08.487364
| 2019-05-03T13:00:29
| 2019-05-03T13:00:29
| 51,708,733
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,168
|
py
|
from __future__ import division
from LocalObject import LocalObject
import math
import random
import numpy as np
#static parameters
from ParametersServer import staticParameters
sp = staticParameters
class circum:
def __init__(self):
#forca
self.force = 1.0
#Forca dos coeficientes sobre o Robo
self.coefficienteForce = 1.0
#Coeficiente de orientacao
self.orientationCoef = 1.0
#Coeficiente de Interferencia...
self.interferenceCoef = 1.0
#Coeficiente de Proximdiade
self.proximityCoef = 1.0
#Coeficiente de aceleracao
self.forwardCoef = 1.0
#Coeficiente de Conversao
self.conversionCoef = 0.0
#Coeficiente de Velocidade
self.linearCoef = 1.0
#Raio obtido
self.obtainedRadiusToBeacon = 0.0
#Angulo Obtido em Relacao ao Beacon
self.obtainedAngleToBeacon = 0.0
#Angulo Obtido em Relacao ao Robo
self.obtainedAngleToRobot = 0.0
#Distancia obtida
self.obtainedDistanceToRobot = 0.0
#Distancia obtida ao alien
self.obtainedDistanceToAlien = 0.0
#angulo obtido ao alien
self.obtainedAngleToAlien = 0.0
#Angulo Mais proximo
self.closerAngle = 0.0
#Coeficiente de Relacao entre Raio Desejado e Obtido
self.relativeDiffInRadius = 0.0
#Coeficiente angular
self.angularCoef = 0.1
#novo controle
self.angularControl = 0.0
#novo controle
self.linearControl = 0.0
#Dimensoes do cone do sensor
self.sensorRadius = sp.sensor_cone_radius
self.sensorAngle = sp.sensor_cone_angle
#substituida por linear velocity, angular_velocity
self.maxLinearVelocity = sp.max_linear_velocity
self.linearVelocity = sp.min_linear_velocity
self.angularVelocity = sp.init_angular_velocity
#velocidade tangencial/linear da roda direita m/s
self.rightLinearVelocity = 0.0
#velocidade tangencial/linear da roda esquerda m/s
self.leftLinearVelocity = 0.0
#rotacao da roda direita r.p.s
self.rightWheelRotation = 0.0
#rotacao da roda esquerda r.p.s
self.leftWheelRotation = 0.0
#Beacon detectado?
self.hasBeacon = False
#Robo detectado?
self.hasRobot = False
#Alien detectado?
self.hasAlien = False
#diferenca relativa entre o raio obtido e o desejado
def rDiR(self, dRtB, oRtB):
#dRtB = desiredRadiusToBeacon
#oRtB = obtainedRadiusToBeacon
#return relativeDiffInRadius
return (dRtB - oRtB) / dRtB
#coeficiente de velocidade linear
def lC(self, pC, fC, iC):
#pC = proximityCoef
#fC = forwardCoef
#iC = interferenceCoef
#return linearCoef
return pC + 2.0 * fC * iC
#coeficiente de conversao #eliminado o halfSensorAngle e o mirrorAngleToBeacon
def cC(self, oAtB, dAtB):
#oAtB = obtainedAngleToBeacon
#dAtB = desiredAngleToBeacon
#return conversionCoef
return 1.0 - (oAtB / dAtB)
#codigo simplificado
#se nao funcionar, voltar ao codigo do NETLOGO
#AnguloObtido > AnguloDesejado, retorna 0 > valor >= -0.5
#AnguloObtido = AnguloDesejado, retorna 0
#AnguloObtido < AnguloDesejado, retorna 0 < valor <= 2.5
#coeficiente angular (para incremento na velocidade angular) #2015.11.07 alterado
def aC(self, cF, rDiR, cC, iC):
#cF: coefficienteForce
#rDiR: relativeDiffInRadius
#cC: updateConversionCoef
#iC: interferenceCoef
#return angularCoef
#divide by zero prevent
return cF * rDiR - cC/iC
#forca angular --- acrescentado para teste em 2015.11.07
#def aF(self, oAtB, dAtB):
#if not self.hasBeacon:
#return 0.0
#else:
#return (oAtB - dAtB)/(dAtB * 2.5)
#coeficiente de orientacao - modificcado pois angulo medio agora eh ZERO
def oC(self,oAtR, Sa):
#oAtR: obtainedAngleToRobot
#Sa: sensorAngle
#return orientationCoef
#return 1 - self.plus(oAtR/(Sa/2.0))
return 1 - 2 * ((oAtR**2)**0.5)/Sa
#oAtR = 0.0: retorna 1
#oAtR = Sa/2: retorna 0
#oAtR = -Sa/2: retorna 0
#Velocidade Angular -- essa abordagem sera utilizada aqui????
def aV(self,aV,aC):
#aV: angularVelocity (actual)
#aC: angularCoef
#return angularVelocity (new)
## aV = aC ## utilizar quando aC == aF
aV += aC
aV = self.limit(sp.min_angular_velocity, aV, sp.max_angular_velocity)
return aV
#Coeficiente de Interferencia
def iC(self,oRtB, dRtB):
#oRtB: obtainedRadiusToBeacon
#dRtB: desiredRadiusToBeacon
#return interferenceCoef (always positive)
#return self.plus((oRtB - dRtB)/ dRtB)
return ((oRtB - dRtB)**2)**0.5 / dRtB
#oRtB = dR: 0.0
#oRtB = 0: 1.0aC
#oRtB > dR: iC > 0.0
#oRtB < dR: iC > 0.0
#Coeficiente de Proximidade
def pC(self,oDtR, mDbR, sR):
#oDtR: obtainedDistanceToRobot
#mDbR: minDistanceBetweenRobots
#sR: sensorRadius
#return proximityCoef
return 1 - ((oDtR - mDbR)/(sR - mDbR))**2
#oDtR = mDbR: 1.0
#oDtR = 0.0: ~= 1.0
#oDtR = sR: 0.0
#Coeficiente de Avanco - OK 2015.10.19
def fC(self, oDtR, dDtR, sR, mDbR):
#oDtR: obtainedDistanceToRobot
#dDtR: desiredDistanceToRobot
#sR: sensorRadius
#mDbR: minDistanceBetweenRobots
#return forwardCoef
return (oDtR - dDtR) / (sR - dDtR + mDbR)
#(o - d)/(r - d + m), retornando
#oDtR > dDtR: 0.0 < fC <= 1.0
#oDtR = dDtR: 0.0
#oDtR = mDbR: -1.0
#oDtR < mDbR: -1.0
#ca --> -1, quando o --> min
#ca < -1, quando o < min
#retorna sempre positivo
#def plus(self,value):
#if value >= 0.0:
# return value
#else:
# return value * -1.0
def limit(self,min_value, value, max_value):
#min_value:
#max_value:
#value:s
#return value constrained by: min_value <= value <= max_value
if value > max_value:
return max_value
if value < min_value:
return min_value
return value
def pAc(self, sensor_angle, obtained_angle, desired_angle):
#calculo da diferenca proporcional angular
#min_angle = desired_angle - (sensor_angle / 2 - desired_angle)
min_angle = 2 * desired_angle - sensor_angle / 2
max_angle = sensor_angle / 2
a = 0.0
if obtained_angle < min_angle:
a = -1.0
elif obtained_angle > max_angle:
a = 1.0
else:
a = 2 * (obtained_angle - min_angle) / (max_angle - min_angle) - 1
return a
def pRc(self, sensor_radius, obtained_radius, desired_radius):
#calculo da diferenca proporcional radial
r = (obtained_radius - desired_radius) / sensor_radius
return r
def aCtrl(self, sensor_angle, obtained_angle, desired_angle, sensor_radius, obtained_radius, desired_radius):
if obtained_radius == 0.0:
return 0.0
pac = self.pAc(sensor_angle, obtained_angle, desired_angle)
prc = self.pRc(sensor_radius, obtained_radius, desired_radius)
return pac + prc
def printCoef(self):
#print "[", num_id, "]:: Ci:", self.interferenceCoef, "Cc:", self.conversionCoef, "Co", self.orientationCoef, "Cp", self.proximityCoef, "Ca", self.forwardCoef
print("Coef. de interferencia (iC): %6.2f" % (self.interferenceCoef))
print("Coef. de conversao (cC): %6.2f" % (self.conversionCoef))
print("Coef. de orientacao (oC): %6.2f" % (self.orientationCoef))
print("Coef. de proximidade (pC): %6.2f" % (self.proximityCoef))
print("Coef. de avanco (fC): %6.2f" % (self.forwardCoef))
print("Coef. de vel linear (lC): %6.2f" % (self.linearCoef))
print("Coef. de vel angular (aC): %6.2f" % (self.angularCoef))
print("Angular Control aCtrl: %6.2f" % (self.angularControl))
print "---------------------------"
print("Velocidade Linear (vL): %6.2f m/s" % (self.linearVelocity))
print("Velocidade Angular (vA): %6.2f rad/s" % (self.angularVelocity))
print("Velocidade Tang Direita : %6.2f m/s" % (self.rightLinearVelocity))
print("Rotacao Roda Direita : %6.2f rad/s" % (self.rightWheelRotation))
print("Velocidade Tang Esquerda : %6.2f m/s" % (self.leftLinearVelocity))
print("Rotacao Roda Esquerda : %6.2f rad/s" % (self.leftWheelRotation))
print "---------------------------"
print("Angulo ao Beacon (aB): %6.2f" % (self.obtainedAngleToBeacon))
print("Raio do Sensor (Sr): %6.2f" % (sp.sensor_cone_radius))
print("Raio desejado (dR): %6.2f" % (sp.desired_radius))
print("Raio obtido (oR): %6.2f" % (self.obtainedRadiusToBeacon))
print("Diferenca entre raios (rD): %6.2f" % (self.relativeDiffInRadius))
print "---------------------------"
print("Angulo ao Robot (aR): %6.2f" % (self.obtainedAngleToRobot))
print("Min Distancia entre Rob(mD): %6.2f" % (sp.min_distance_to_robot))
print("Distancia desejada (dD): %6.2f" % (sp.desired_distance_to_robot))
print("Distancia obtida (oD): %6.2f" % (self.obtainedDistanceToRobot))
#atualiza a existencia de objetos detectados
def updateDetectedObjects(self, detectedBeaconDist, detectedRobotDist, detectedAlienDist):
#print "detected beacon"
#detectedBeaconDist.prn()
#print "detected robot"
#detectedRobotDist.prn()
#print "detected alien"
#detectedAlienDist.prn()
if detectedBeaconDist.linear > 0.0:
self.hasBeacon = True
self.obtainedRadiusToBeacon = detectedBeaconDist.linear
self.obtainedAngleToBeacon = detectedBeaconDist.angular
else:
self.hasBeacon = False
self.obtainedRadiusToBeacon = 0.0
self.obtainedAngleToBeacon = 0.0
if detectedRobotDist.linear > 0.0:
self.hasRobot = True
self.obtainedDistanceToRobot = detectedRobotDist.linear
self.obtainedAngleToRobot = detectedRobotDist.angular
else:
self.hasRobot = False
self.obtainedDistanceToRobot = 0.0
self.obtainedAngleToRobot = 0.0
if detectedAlienDist.linear > 0.0:
self.hasAlien = True
self.obtainedDistanceToAlien = detectedAlienDist.linear
self.obtainedAngleToAlien = detectedAlienDist.angular
else:
self.hasAlien = False
self.obtainedDistanceToAlien = 0.0
self.obtainedAngleToAlien = 0.0
#devolve as velocidades linear e angular
def process(self, myVelocities, beaconCoord, robotCoord, alienCoord):
#print "beacon coord", beaconCoord.getVelocities()
#print "robot coord", robotCoord.getVelocities()
#print "alien coord", alienCoord.getVelocities()
#atualiza a existencia dos objetos
self.updateDetectedObjects(beaconCoord, robotCoord, alienCoord)
#atualizar a diferenca relativa entre os raios
self.relativeDiffInRadius = self.rDiR(sp.desired_radius, self.obtainedRadiusToBeacon)
#atualizar o Coeficiente de Interferencia #nao havendo Beacon, retorna 1.0
self.interferenceCoef = self.iC(self.obtainedRadiusToBeacon, sp.desired_radius)
#atualizar o Coeficiente de Conversao #nao havendo Beacon, retorna 1.0
self.conversionCoef = self.cC(self.obtainedAngleToBeacon, sp.desired_angle_to_beacon)
#atualizar coeficiente de orientacao
self.orientationCoef = self.oC(self.obtainedAngleToRobot, sp.sensor_cone_angle)
#atualizar coeficiente de proximidade
self.proximityCoef = self.pC(self.obtainedDistanceToRobot, sp.min_distance_to_robot, sp.sensor_cone_radius)
#atualizar coeficiente de avanco
self.forwardCoef = self.fC(self.obtainedDistanceToRobot, sp.desired_distance_to_robot, sp.sensor_cone_radius, sp.min_distance_to_robot)
#atualiza o coeficiente de velocidade linear
self.linearCoef = self.lC(self.proximityCoef, self.forwardCoef, self.interferenceCoef)
#atualiza o coeficiente de velocidade angular
self.angularCoef = self.aC(self.coefficienteForce, self.relativeDiffInRadius, self.conversionCoef, self.interferenceCoef)
#atualiza a velocidade angular -- alterado em 2015.11.07
#self.angularVelocity = self.aV(self.angularVelocity, self.angularCoef)
#novo controle angular
#sensor_angle, obtained_angle, desired_angle, sensor_radius, obtained_radius, desired_radius#
self.angularControl = self.aCtrl(sp.sensor_cone_angle, self.obtainedAngleToBeacon, sp.desired_angle_to_beacon, sp.sensor_cone_radius, self.obtainedRadiusToBeacon, sp.desired_radius)
#novo controle linear
self.linearControl = 0.0
#utilizando aF
#self.angularVelocity = self.aV(self.angularVelocity, self.aF(self.obtainedAngleToBeacon, sp.desired_angle_to_beacon))
#apenas teste para o resultados
self.linearVelocity = 0.5 # m/s
#http://143.106.148.168:9080/Cursos/IA368W/parte1.pdf pag 33
#self.angularVelocity = self.linearVelocity / sp.desired_radius # rad/s
self.angularVelocity = self.linearVelocity / sp.desired_radius + self.angularControl # rad/s
myVelocities.angular = self.angularVelocity
myVelocities.linear = self.linearVelocity
#atualiza rotacoes e velocidades tangenciais #monitoramento
self.rightLinearVelocity = self.angularVelocity + self.linearVelocity
self.leftLinearVelocity = - 2 * self.linearVelocity - self.rightLinearVelocity
self.rightWheelRotation = self.rightLinearVelocity / (sp.wheel_diameter * np.pi)
self.leftWheelRotation = self.leftLinearVelocity / (sp.wheel_diameter * np.pi)
self.printCoef()
return myVelocities
|
[
"softelli@gmail.com"
] |
softelli@gmail.com
|
16011c0ebe4ae0b5330d83fc1d4a9a63f5e4b0a1
|
437a0f81f161438bba3554f440364b965fc3f432
|
/tests/unit/types/test_document.py
|
57d2ae8e6082e82b1cab460573f4c8fb735dc581
|
[
"Apache-2.0"
] |
permissive
|
ApurvaMisra/jina
|
dbbe2873771eafbbdf429c9dd717e26733496d49
|
1ecf2d74179f29f196a964f6d779b1a32bf78e7c
|
refs/heads/master
| 2023-01-24T12:46:27.030417
| 2020-12-03T17:53:41
| 2020-12-03T17:53:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,070
|
py
|
import numpy as np
import pytest
from google.protobuf.json_format import MessageToDict
from jina import NdArray, Request
from jina.proto.jina_pb2 import DocumentProto
from jina.types.document import Document, BadDocID
from tests import random_docs
@pytest.mark.parametrize('field', ['blob', 'embedding'])
def test_ndarray_get_set(field):
a = Document()
b = np.random.random([10, 10])
setattr(a, field, b)
np.testing.assert_equal(getattr(a, field), b)
b = np.random.random([10, 10])
c = NdArray()
c.value = b
setattr(a, field, c)
np.testing.assert_equal(getattr(a, field), b)
b = np.random.random([10, 10])
c = NdArray()
c.value = b
setattr(a, field, c.proto)
np.testing.assert_equal(getattr(a, field), b)
def test_doc_update_fields():
a = Document()
b = np.random.random([10, 10])
c = {'tags': 'string', 'tag-tag': {'tags': 123.45}}
d = [12, 34, 56]
e = 'text-mod'
w = 2.0
a.set_attrs(embedding=b, tags=c, location=d, modality=e, weight=w)
np.testing.assert_equal(a.embedding, b)
assert list(a.location) == d
assert a.modality == e
assert MessageToDict(a.tags) == c
assert a.weight == w
def test_granularity_get_set():
d = Document()
d.granularity = 1
assert d.granularity == 1
def test_uri_get_set():
a = Document()
a.uri = 'https://abc.com/a.jpg'
assert a.uri == 'https://abc.com/a.jpg'
assert a.mime_type == 'image/jpeg'
with pytest.raises(ValueError):
a.uri = 'abcdefg'
def test_set_get_mime():
a = Document()
a.mime_type = 'jpg'
assert a.mime_type == 'image/jpeg'
b = Document()
b.mime_type = 'jpeg'
assert b.mime_type == 'image/jpeg'
c = Document()
c.mime_type = '.jpg'
assert c.mime_type == 'image/jpeg'
def test_no_copy_construct():
a = DocumentProto()
b = Document(a, copy=False)
a.id = '1' * 16
assert b.id == '1' * 16
b.id = '2' * 16
assert a.id == '2' * 16
def test_copy_construct():
a = DocumentProto()
b = Document(a, copy=True)
a.id = '1' * 16
assert b.id != '1' * 16
b.id = '2' * 16
assert a.id == '1' * 16
def test_bad_good_doc_id():
b = Document()
with pytest.raises(BadDocID):
b.id = 'hello'
b.id = 'abcd' * 4
b.id = 'de09' * 4
b.id = 'af54' * 4
b.id = 'abcdef0123456789'
def test_id_context():
with Document() as d:
assert not d.id
d.buffer = b'123'
assert d.id
def test_doc_content():
d = Document()
assert d.content is None
d.text = 'abc'
assert d.content == 'abc'
c = np.random.random([10, 10])
d.blob = c
np.testing.assert_equal(d.content, c)
d.buffer = b'123'
assert d.buffer == b'123'
def test_request_docs_mutable_iterator():
"""To test the weak reference work in docs"""
r = Request()
r.request_type = 'index'
for d in random_docs(10):
r.docs.append(d)
for idx, d in enumerate(r.docs):
assert isinstance(d, Document)
d.text = f'look I changed it! {idx}'
# iterate it again should see the change
doc_pointers = []
for idx, d in enumerate(r.docs):
assert isinstance(d, Document)
assert d.text == f'look I changed it! {idx}'
doc_pointers.append(d)
# pb-lize it should see the change
rpb = r.as_pb_object
for idx, d in enumerate(rpb.index.docs):
assert isinstance(d, DocumentProto)
assert d.text == f'look I changed it! {idx}'
# change again by following the pointers
for d in doc_pointers:
d.text = 'now i change it back'
# iterate it again should see the change
for idx, d in enumerate(rpb.index.docs):
assert isinstance(d, DocumentProto)
assert d.text == 'now i change it back'
def test_request_docs_chunks_mutable_iterator():
"""Test if weak reference work in nested docs"""
r = Request()
r.request_type = 'index'
for d in random_docs(10):
r.docs.append(d)
for d in r.docs:
assert isinstance(d, Document)
for idx, c in enumerate(d.chunks):
assert isinstance(d, Document)
c.text = f'look I changed it! {idx}'
# iterate it again should see the change
doc_pointers = []
for d in r.docs:
assert isinstance(d, Document)
for idx, c in enumerate(d.chunks):
assert c.text == f'look I changed it! {idx}'
doc_pointers.append(c)
# pb-lize it should see the change
rpb = r.as_pb_object
for d in rpb.index.docs:
assert isinstance(d, DocumentProto)
for idx, c in enumerate(d.chunks):
assert isinstance(c, DocumentProto)
assert c.text == f'look I changed it! {idx}'
# change again by following the pointers
for d in doc_pointers:
d.text = 'now i change it back'
# iterate it again should see the change
for d in rpb.index.docs:
assert isinstance(d, DocumentProto)
for c in d.chunks:
assert c.text == 'now i change it back'
|
[
"noreply@github.com"
] |
ApurvaMisra.noreply@github.com
|
8fac5461e02a1eeb7aab7205d72609f915b89331
|
59a593f54ab281b6d270d18a62ac03a902687a67
|
/DocumentSimilarity.py
|
795b58721f49d3684c1dd12ca1406f097c85f826
|
[] |
no_license
|
ROHITHKUMARN/Document-Similarity
|
7e873543b48146eb1120032fc2f16dda16dcfb00
|
980b07e0528209fd911f828334db60c793736e1b
|
refs/heads/master
| 2021-05-16T09:56:14.287673
| 2017-02-24T06:03:58
| 2017-02-24T06:03:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,357
|
py
|
import sys
import os
import math
from math import*
from decimal import Decimal
from sklearn.decomposition import PCA
import numpy as np
f = open(os.path.abspath(sys.argv[1]))
document = []
vocabulary = []
uniqueWords = []
vector = []
tf = []
dict_idf = {}
idf = []
d = []
manhattan = []
euclidean = []
supremum = []
cos_sim =[]
pca_euclidean_dist = []
for line in f:
document.append(line)
for i in document:
vocabulary.append(i.rstrip().split(" "))
for words in vocabulary:
for w in words:
if w not in uniqueWords:
uniqueWords.append(w)
print(len(uniqueWords))
for words in vocabulary:
dict_words = {}
for w in words:
dict_words[w] = dict_words.get(w, 0) + 1
vector.append(dict_words.copy())
doc_num = 0
for count in vector:
d_tf = {}
for wrd in count:
d_tf[wrd] = float(count.get(wrd))/float(len(vocabulary[doc_num]))
tf.append(d_tf.copy())
doc_num += 1
for wrd_idf in uniqueWords:
c = 0
for doc in vocabulary:
if wrd_idf in doc:
c += 1
dict_idf[wrd_idf] = math.log(len(vocabulary)/c)
for t in tf:
temp = []
for wrd in dict_idf.keys():
temp.append(dict_idf.get(wrd)*t.get(wrd,0))
d.append(temp)
### Minkowski Distance ###
def root(number, h):
root_value = 1/float(h)
return round((number ** root_value), 4)
def minkowski_distance(x, y, h):
sum = 0
for a, b in zip(x,y):
sum += pow(abs(a-b),h)
return float(root(sum,h))
#(a). Manhattan distance, h =1
query = d[len(d)-1]
def manhattan_distance(d):
count = 0
for i in d:
count += 1
manhattan.append((count, minkowski_distance(i, query, 1)))
return(sorted(manhattan, key=lambda x: x[1]))
result = [x[0] for x in manhattan_distance(d)[0:5]]
print(' '.join(map(str, result)))
def euclidean_distance(d):
count = 0
for i in d:
count += 1
euclidean.append((count, minkowski_distance(i, query, 2)))
return(sorted(euclidean, key=lambda x: x[1]))
result = [x[0] for x in euclidean_distance(d)[0:5]]
print(' '.join(map(str, result)))
max_dist = 0
def supremum_distance(d):
doc_num = 0
for i in d:
max_dist = 0
doc_num += 1
for count in range(len(i)):
max_dist = round(max(max_dist, abs(i[count] - query[count])),4)
supremum.append((doc_num, max_dist))
return(sorted(supremum, key=lambda x: x[1]))
result = [x[0] for x in supremum_distance(d)[0:5]]
print(' '.join(map(str, result)))
def cosine_similarity(d):
doc_num = 0
for i in d:
n = []
docmnt = []
q = []
doc_num += 1
for k in range(len(i)):
n.append(i[k]*query[k])
docmnt.append(i[k]*i[k])
q.append(query[k]*query[k])
numerator = sum(n)
denominator = root(sum(docmnt),2)*root(sum(q),2)
c_sim = round(float(numerator/denominator),4)
cos_sim.append((doc_num, c_sim))
return(sorted(cos_sim,key = lambda x: x[1],reverse=True))
result = [x[0] for x in cosine_similarity(d)[0:5]]
print(' '.join(map(str, result)))
### PCA ###
pca = PCA(n_components = 2)
principal_components = pca.fit_transform(d)
#### Euclidean Distance of Two projected Vectors ###
def pca_euclidean_distance(principal_components):
count = 0
for i in principal_components:
count += 1
pca_euclidean_dist.append((count, minkowski_distance(i, principal_components[len(principal_components)-1], 2)))
return(sorted(pca_euclidean_dist, key=lambda x: x[1]))
result = [x[0] for x in pca_euclidean_distance(principal_components)[0:5]]
print(' '.join(map(str, result)))
|
[
"pallavitiagi@gmail.com"
] |
pallavitiagi@gmail.com
|
d520b650baaa41da31c71fb1fde1bdb7eff97fb3
|
329146e5d07a34acffe08c8d138a12c03f27474c
|
/server/node_modules/sqlite3/build/config.gypi
|
f6a1f154c98ef8b08335e4590d8008801edede6a
|
[
"BSD-3-Clause"
] |
permissive
|
ZhenyiZhang/full-stack-graphQL
|
a60430fae1af3a9fa925786baf35536a713faba0
|
0cad30c44df82329b533487a38a56eb1db4ef3c2
|
refs/heads/main
| 2023-01-15T21:29:39.885086
| 2020-11-26T22:37:09
| 2020-11-26T22:37:09
| 316,108,361
| 0
| 0
| null | 2020-11-26T22:37:10
| 2020-11-26T02:59:09
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 6,206
|
gypi
|
# Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"asan": 0,
"build_v8_with_gn": "false",
"coverage": "false",
"dcheck_always_on": 0,
"debug_nghttp2": "false",
"debug_node": "false",
"enable_lto": "false",
"enable_pgo_generate": "false",
"enable_pgo_use": "false",
"error_on_warn": "false",
"force_dynamic_crt": 0,
"host_arch": "x64",
"icu_data_in": "../../deps/icu-tmp/icudt67l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_path": "deps/icu-small",
"icu_small": "false",
"icu_ver_major": "67",
"is_debug": 0,
"llvm_version": "11.0",
"napi_build_version": "0",
"node_byteorder": "little",
"node_debug_lib": "false",
"node_enable_d8": "false",
"node_install_npm": "true",
"node_module_version": 83,
"node_no_browser_globals": "false",
"node_prefix": "/usr/local",
"node_release_urlbase": "https://nodejs.org/download/release/",
"node_shared": "false",
"node_shared_brotli": "false",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_nghttp2": "false",
"node_shared_openssl": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_target_type": "executable",
"node_use_bundled_v8": "true",
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_node_code_cache": "true",
"node_use_node_snapshot": "true",
"node_use_openssl": "true",
"node_use_v8_platform": "true",
"node_with_ltcg": "false",
"node_without_node_options": "false",
"openssl_fips": "",
"openssl_is_fips": "false",
"ossfuzz": "false",
"shlib_suffix": "83.dylib",
"target_arch": "x64",
"v8_enable_31bit_smis_on_64bit_arch": 0,
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_enable_inspector": 1,
"v8_enable_lite_mode": 0,
"v8_enable_object_print": 1,
"v8_enable_pointer_compression": 0,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 1,
"v8_promise_internal_field_count": 1,
"v8_random_seed": 0,
"v8_trace_maps": 0,
"v8_use_siphash": 1,
"want_separate_host_toolset": 0,
"xcode_version": "11.0",
"nodedir": "/Users/zhenyizhang/Library/Caches/node-gyp/14.15.0",
"standalone_static_library": 1,
"fallback_to_build": "true",
"module": "/Users/zhenyizhang/Documents/Project.nosync/fullstack-tutorial/start/server/node_modules/sqlite3/lib/binding/node-v83-darwin-x64/node_sqlite3.node",
"module_name": "node_sqlite3",
"module_path": "/Users/zhenyizhang/Documents/Project.nosync/fullstack-tutorial/start/server/node_modules/sqlite3/lib/binding/node-v83-darwin-x64",
"napi_version": "7",
"node_abi_napi": "napi",
"node_napi_label": "node-v83",
"save_dev": "",
"legacy_bundling": "",
"dry_run": "",
"viewer": "man",
"only": "",
"commit_hooks": "true",
"browser": "",
"also": "",
"sign_git_commit": "",
"rollback": "true",
"usage": "",
"audit": "true",
"globalignorefile": "/usr/local/etc/npmignore",
"shell": "/bin/bash",
"maxsockets": "50",
"init_author_url": "",
"shrinkwrap": "true",
"parseable": "",
"metrics_registry": "https://registry.npmjs.org/",
"timing": "",
"init_license": "ISC",
"if_present": "",
"sign_git_tag": "",
"init_author_email": "",
"cache_max": "Infinity",
"preid": "",
"long": "",
"local_address": "",
"git_tag_version": "true",
"cert": "",
"registry": "https://registry.npmjs.org/",
"fetch_retries": "2",
"versions": "",
"message": "%s",
"key": "",
"globalconfig": "/usr/local/etc/npmrc",
"prefer_online": "",
"logs_max": "10",
"always_auth": "",
"global_style": "",
"cache_lock_retries": "10",
"update_notifier": "true",
"heading": "npm",
"audit_level": "low",
"searchlimit": "20",
"read_only": "",
"offline": "",
"fetch_retry_mintimeout": "10000",
"json": "",
"access": "",
"allow_same_version": "",
"https_proxy": "",
"engine_strict": "",
"description": "true",
"userconfig": "/Users/zhenyizhang/.npmrc",
"init_module": "/Users/zhenyizhang/.npm-init.js",
"cidr": "",
"user": "",
"node_version": "14.15.0",
"save": "true",
"ignore_prepublish": "",
"editor": "vi",
"auth_type": "legacy",
"tag": "latest",
"script_shell": "",
"progress": "true",
"global": "",
"before": "",
"searchstaleness": "900",
"optional": "true",
"ham_it_up": "",
"save_prod": "",
"force": "",
"bin_links": "true",
"searchopts": "",
"node_gyp": "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js",
"depth": "Infinity",
"sso_poll_frequency": "500",
"rebuild_bundle": "true",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"tag_version_prefix": "v",
"strict_ssl": "true",
"sso_type": "oauth",
"scripts_prepend_node_path": "warn-only",
"save_prefix": "^",
"ca": "",
"save_exact": "",
"group": "20",
"fetch_retry_factor": "10",
"dev": "",
"version": "",
"prefer_offline": "",
"cache_lock_stale": "60000",
"otp": "",
"cache_min": "10",
"searchexclude": "",
"cache": "/Users/zhenyizhang/.npm",
"color": "true",
"package_lock": "true",
"package_lock_only": "",
"fund": "true",
"save_optional": "",
"ignore_scripts": "",
"user_agent": "npm/6.14.8 node/v14.15.0 darwin x64",
"cache_lock_wait": "10000",
"production": "",
"send_metrics": "",
"save_bundle": "",
"umask": "0022",
"node_options": "",
"init_version": "1.0.0",
"init_author_name": "",
"git": "git",
"scope": "",
"unsafe_perm": "true",
"tmp": "/var/folders/rw/571n_5x10vldfszl19p_gbl00000gn/T",
"onload_script": "",
"prefix": "/usr/local",
"link": "",
"format_package_lock": "true"
}
}
|
[
"brucezhangpro@hotmail.com"
] |
brucezhangpro@hotmail.com
|
dab29d3a9ef3247026fdb1d378f83a86706a7b3c
|
98ad28779c97cd0f2e566e32530c7075b73c4ed7
|
/gen_env.py
|
33f631e0b5a4440ad9aeea62e93b4d1dc21eb034
|
[
"MIT"
] |
permissive
|
jgearheart/f5-azure-saca
|
a45d46862e76ad7f85965a1f5d9a5f7dddcd4f61
|
56c6e01b955a3622800d9293b46977d6d5456b62
|
refs/heads/master
| 2021-04-06T06:34:24.424002
| 2018-03-02T02:28:00
| 2018-03-02T02:28:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,566
|
py
|
#!/usr/bin/env python
import requests
import json
import sys
import os
import re
import random
is_regkey = re.compile("([A-Z]{5}-[A-Z]{5}-[A-Z]{5}-[A-Z]{5}-[A-Z]{7})",re.M)
is_valid_dnsname = re.compile("^[a-z][a-z0-9-]{1,61}[a-z0-9]$")
session = requests.Session()
headers = {'user-agent':'f5-gen-env/0.1','Metadata':'true'}
METADATA_URL="http://169.254.169.254/metadata/instance?api-version=2017-08-01"
output = {}
try:
request = session.get(METADATA_URL,headers=headers)
data = json.loads(request.text)
output['resource_group'] = data['compute']['resourceGroupName']
output['location'] = data['compute']['location']
output['subscription_id'] = data['compute']['subscriptionId']
except requests.exceptions.ConnectionError:
#print "Please run on Azure Linux JumpBox"
#sys.exit(1)
output['resource_group'] = os.environ.get('AZURE_RESOURCE_GROUP','')
output['subscription_id'] = os.environ.get('AZURE_SUBSCRIPTION_ID','')
output['location'] = os.environ.get('location','')
pass
try:
sp = json.load(open('sp.json'))
output['client_id'] = sp["appId"]
output['client_secret'] = sp["password"]
output["tenant_id"] = sp["tenant"]
except:
output['client_id'] = ''
output['client_secret'] = ''
output["tenant_id"] = ''
pass
try:
key_text = open('keys.txt').read()
keys = is_regkey.findall(key_text)
output['key1'] = ''
output['key2'] = ''
output['key3'] = ''
output['key4'] = ''
for x in range(len(keys)):
output['key%s' %(x+1)] = keys[x]
except:
output['key1'] = ''
output['key2'] = ''
output['key3'] = ''
output['key4'] = ''
pass
output['f5_username'] = os.environ.get('USER','')
output['f5_password'] = os.environ.get('f5_password','')
shortname = output['resource_group'].lower()
if shortname.endswith("_rg"):
shortname = shortname[:-3]
if "_" in shortname:
shortname = shortname.replace('_','-')
if not is_valid_dnsname.match(shortname):
shortname = "f5-" + str(int(random.random() * 1000))
output['shortname'] = shortname
output['use_oms'] = 'False'
if os.path.exists('.use_oms'):
output['use_oms'] = 'True'
if os.path.exists('.password.txt'):
output['f5_password'] = "`base64 --decode .password.txt`"
TEMPLATE="""export AZURE_SUBSCRIPTION_ID="%(subscription_id)s"
export AZURE_CLIENT_ID="%(client_id)s"
export AZURE_SECRET="%(client_secret)s"
export AZURE_TENANT="%(tenant_id)s"
export AZURE_RESOURCE_GROUP="%(resource_group)s"
export AZURE_RESOURCE_GROUPS="${AZURE_RESOURCE_GROUP}_F5_External,${AZURE_RESOURCE_GROUP}_F5_Internal"
export location="%(location)s"
export f5_unique_short_name="%(shortname)sext"
export f5_unique_short_name2="%(shortname)sint"
export f5_license_key_1="%(key1)s"
export f5_license_key_2="%(key2)s"
export f5_license_key_3="%(key3)s"
export f5_license_key_4="%(key4)s"
export f5_username="%(f5_username)s"
export f5_password="%(f5_password)s"
export use_oms="%(use_oms)s"
export F5_VALIDATE_CERTS=no
loc=$(curl -H metadata:true "http://169.254.169.254/metadata/instance?api-version=2017-08-01" --stderr /dev/null |jq .compute.location)
echo $loc | grep -i -E "(gov|dod)" > /dev/null;
#echo $?
if [ $? == 0 ]
then
export is_gov=1;
else
export is_gov=0;
fi
if [ $is_gov == 1 ]
then
az cloud set -n AzureUSGovernment;
export AZURE_CLOUD_ENVIRONMENT="AzureUSGovernment";
fi
which az
az login \
--service-principal \
-u "$AZURE_CLIENT_ID" \
-p "$AZURE_SECRET" \
--tenant "$AZURE_TENANT"
az account set -s $AZURE_SUBSCRIPTION_ID
"""
print TEMPLATE %(output)
|
[
"eric.chen@f5.com"
] |
eric.chen@f5.com
|
23e8eaf1eb2c77ceaad174fae6cf8fcf18768993
|
cac5f68c601f9f834aa2b0de9fb00d22e1f80239
|
/floyd_test.py
|
64106a310c07591944d87c67cd8b23302fe08b30
|
[] |
no_license
|
imosk72/graph_python_task
|
f915cdc246b02c084730d00a771247bfab5f6880
|
0581774741ad766c97f47fbbcd8e9eb5aa473438
|
refs/heads/master
| 2023-04-30T23:44:13.639760
| 2021-05-19T09:25:59
| 2021-05-19T09:25:59
| 364,354,462
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 432
|
py
|
import generators
import measurement
import weighted_graph
def measure_floyd():
f = open("floyd.txt", "w")
for i in range(1, 100):
print(i)
f.write(str(i) + " " + "%.6f" % measurement.test(20, weighted_graph.WeightedGraph(i, generators.generate_graph(i, True, True), True).floyd) + "\n")
f.close()
if __name__ == "__main__":
#measure_floyd()
measurement.find_cubic_approximation("floyd.txt")
|
[
"imoskovchenko72@gmail.com"
] |
imoskovchenko72@gmail.com
|
7deca9544c4ccb6f9be27d733f45ecdc1a27b058
|
861bbf5978790bf721bb643516a5a484a8c31b38
|
/knn_risk_predictor.py
|
8fd4b0d2cf132068503f27d32bc0e401d9dfe0a0
|
[] |
no_license
|
ericgao1997/CMPT-353-Final
|
a1cd5d47e6e4aa3ec19558348d3a87db4cf3d460
|
2b0e3b398cb0057ad75f81c0cc0039ee3ed7087f
|
refs/heads/master
| 2023-02-19T12:41:53.661689
| 2021-01-05T18:51:26
| 2021-01-05T18:51:26
| 320,167,838
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,550
|
py
|
import numpy as np
import pandas as pd
from sklearn.neighbors import KNeighborsRegressor
from scipy import stats
import sys
neigh = KNeighborsRegressor(n_neighbors=10, weights='distance')
wider_neigh = KNeighborsRegressor(n_neighbors=3, weights='distance')
def risk_calc(row):
base_risk = row['medical'] + row['food']*0.8 + row['gathering']*0.5 + row['transport']*0.5 + row['notable']*0.2
if base_risk == 0:
return 0
# t_total = row['medical'] + row['food'] + row['gathering']+ row['transport'] + row['notable']
t_total = 1
return base_risk**t_total
def irl_risk(model):
irl_cases = pd.read_csv("data/confirmed_cases.csv")
irl_cases['risk'] = model.predict(irl_cases)
irl_cases.to_csv("out/irl_risk.csv",index=False)
return irl_cases['risk']
def main(data_file):
data = pd.read_csv(data_file,index_col=0)
data['risk'] = data.apply(lambda row: risk_calc(row),axis=1)
data['risk'] = data.apply(lambda x: x['risk']/data['risk'].max(), axis=1)
print (data)
targets = data[(data['risk']==0) & (data['tag_count']>0)]
data.to_csv('out/initial_risks.csv',index = False)
known = data[~data.isin(targets)]
known = known[known['lat'].notna()]
print(len(known))
print(len(targets))
X = known[['lat','lon']]
y = known['risk']
neigh.fit(X,y)
print (targets)
X_t = targets[['lat','lon']]
targets['risk'] = neigh.predict(X_t)
# * neigh.predict_proba(targets[['lat','lon']])
indv_models = pd.concat([known,targets])
indv_models.to_csv('out/checked_risks.csv',index = False)
# print (indv_models)
new_targets = indv_models[(indv_models['risk']==0)]
known_2 = indv_models[~indv_models.isin(new_targets)]
known_2 = known_2[known_2['lat'].notna()]
wider_neigh.fit(known_2[['lat','lon']],known_2['risk'])
new_targets['risk'] = wider_neigh.predict(new_targets[['lat','lon']])
overall_risks = pd.concat([known_2,new_targets])
overall_risks.to_csv('out/smart_risks.csv',index = False)
irl_risks = irl_risk(wider_neigh)
print (overall_risks)
# targets = data[ (data['food']==False) | (data['medical']==False) | (data['gathering']==False) | (data['transport']==False) | (data['notable']==False) ]
# targets = targets[targets['tag_count']>0]
# Validate our smartest model
print(stats.ttest_ind(overall_risks['risk']**0.5,irl_risks**0.5))
print(stats.mannwhitneyu(overall_risks['risk'],irl_risks))
if __name__=='__main__':
data_file = sys.argv[1]
main(data_file,)
|
[
"31947627+ericgao1997@users.noreply.github.com"
] |
31947627+ericgao1997@users.noreply.github.com
|
52793a05086193090d0b2d2851abe1075600a7d7
|
649417ac89aa4917eeecf00ad7aa2d9ddaa15bf6
|
/PhaseMatchingBiphotonFWM.py
|
b4c39e2c3123be323950df49fef38a7e65ef84ab
|
[] |
no_license
|
damienbonneau/sources
|
70bb514e384571f922b044306f6dfd81ac459bed
|
60d0aa605bbd6f9e6ea30e4a369d12dd4ed1a83b
|
refs/heads/master
| 2021-01-20T21:15:45.454573
| 2016-08-04T18:01:49
| 2016-08-04T18:01:49
| 64,950,261
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 54,609
|
py
|
# -*- coding: utf-8 -*-
from numpy import *
import matplotlib as mpl
from matplotlib import cm,colors
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from scipy.optimize import leastsq
import os,time
# -----------------------------------------------------------------------------#
# Plot functions
# -----------------------------------------------------------------------------#
# Lattice: bidimensional numpy array, example : lattice = ones((size, size), dtype=float )
# extent: axis extent for each axis [begin_x,end_x,begin_y,end_y]
def plotcolormap(lattice,extent,fname = None):
fig = plt.figure()
map1=colors.LinearSegmentedColormap.from_list('bla',['#000000','#FF0000','#FFFF00'])
begin_x,end_x,begin_y,end_y = extent
aspect = (end_x - begin_x )/(end_y - begin_y)
plt.imshow(lattice, map1,vmin = 0, interpolation='nearest',extent=extent,aspect = aspect)
plt.gca().xaxis.set_major_locator( MaxNLocator(nbins = 7, prune = 'lower') )
plt.gca().yaxis.set_major_locator( MaxNLocator(nbins = 6) )
#cbar = plt.colorbar()
#cbar.locator = MaxNLocator( nbins = 6)
# vmin=0,vmax = 1,
if fname is None:
plt.show()
else:
plt.savefig(fname)
plt.close()
def plot(plots):
for x,y,style in plots:
plt.plot(x, y, style) # x, y, 'k--',
plt.grid(True)
plt.title('')
plt.xlabel('')
plt.ylabel('')
plt.show()
def plotcolormapphase(lattice,extent):
fig = plt.figure()
map1=colors.LinearSegmentedColormap.from_list('bla',['#0000FF','#000000','#FF0000'])
plt.imshow(lattice, map1,vmin = -pi,vmax = pi, interpolation='nearest',extent=extent)
# vmin=0,vmax = 1,
plt.show()
# -----------------------------------------------------------------------------#
# MISC FUNCTIONS (helpers for classes)
# -----------------------------------------------------------------------------#
def funcpeak(lbda,lbda0):
T = 1.*10**(-9)
signu = 0.441/T
siglbda = signu/(c*10**6)*(lbda0)**2
return sqrt(1./(sqrt(2*pi)*siglbda) * exp(-(lbda-lbda0)**2/(2*siglbda**2)))
"""
input state as a 2D matrix
!! the input state is not given as a density matrix
it's a pure state given in a matrix
"""
def schmidtnumber(state):
N,M = state.shape
ror=zeros((N,N)) # reduced density matrix
for l in xrange(N):
for n in xrange(N):
for p in xrange(N):
ror[l,n]+=state[p,l]*state[p,n]
ror2 = dot(ror,ror)
# compute the trace of ror2
tmp = 0
for k in xrange(N):
tmp+= ror2[k,k]
schn = 1.0/tmp
return schn
def parse_extent(line):
l1 = line.split(":")[1]
l2 = l1.split(",")[0]
swlmin,swlmax = l2.split("-")
wlmin,wlmax = float(swlmin),float(swlmax)
return wlmin,wlmax
def parse_biphoton_data(line):
l1 = line.replace("\n","")
ls = l1.split(" ")
res = []
for e in ls:
res.append(float(e))
return array(res)
# -----------------------------------------------------------------------------#
# CONSTANTS
# -----------------------------------------------------------------------------#
I = 1.0j
HPLANCK = 6.626068*10**(-34) #m2 kg / s
HBAR = HPLANCK/(2*pi)
EPSILON0 = 8.85418782*10**(-12)#m-3 kg-1 s4 A2 or C.V-1.M-1
c = 299792458.0 # CLIGHT = 299792458. # m/s
n2_Si = 6.3* 10**(-18) # m2/W (Semicond. Sci. Technol. 23 (2008) 064007 (9pp))
# -----------------------------------------------------------------------------#
# CLASS Waveguide
# -----------------------------------------------------------------------------#
# Init (width, height):
# * Take the width and height of the waveguide cross section as parameters
# * Loads a file containing lbda vs neff
# * fits a dispersion curve to the data loaded
# This class has methods to obtain the effective index, the group index, and wave number when given a wavelength
#
class Waveguide(object):
def __init__(self,width,height):
self.rootname = "waveguide_data_noslab"
self.width = width
self.height = height
s = "%dx%d" % (width,height)
files = os.listdir(self.rootname)
for fname in files:
if fname.find(s) >=0:
self.__load__(fname)
self.__fit__()
# We fix the FWM effective area that we calculate using the overlap between the four fields
self.Aeff = 0.03 # um^2
def __load__(self,fname):
path = self.rootname+"\\"+fname
f = open(path)
line = f.readline()
lbdas = []
neffs = []
while(len(line))>0:
splitted = line.split("\t")
lbda,neff = splitted[0:2]
line = f.readline()
if lbda>0:
lbdas.append(float(lbda))
neffs.append(float(neff))
self.lbdas = array(lbdas)
self.neffs = array(neffs)
return
def __fit__(self):
p0 = [1,0,0,0]
plsqwl2n = leastsq(self.__residuals__, p0, args=(self.neffs, self.lbdas))
self.pwl2n = plsqwl2n[0] # wavelength to neff
#print self.p
def __func__(self,p,x):
d,c,b,a = p
return a*x**3+b*x**2+c*x+d
def __residuals__(self,p,y, x):
err = y-self.__func__(p,x)
return err
def getneff(self,lbda):
return self.__func__(self.pwl2n,lbda)
# lbda in um
def wl2kv(self,a_lbda):
return 2*pi*self.getneff(a_lbda)/(a_lbda) # the kvector z component is returned in um-1
def kv2wl(self,a_kv):
pass # not as easy ...
def plotneff(self):
x = arange(min(self.lbdas),max(self.lbdas),0.1)
plots = [(self.lbdas,self.neffs,"-"),(x,self.getneff(x),"-")]
plot(plots)
def getng(self,lbda):
lbda_step = 0.00001
lbda1 = lbda - lbda_step
lbda2 = lbda + lbda_step
neff1 = self.getneff(lbda1)
neff2 = self.getneff(lbda2)
neff = self.getneff(lbda)
ng = neff -lbda*(neff2-neff1)/(2*lbda_step)
return ng
# -----------------------------------------------------------------------------#
# CLASS FWM_Simu
# -----------------------------------------------------------------------------#
# This class calculates the joint spectral distribution obtained for a straight
# waveguide with a given set of parameters
# Init (
# * Waveguide cross section
# * Waveguide length (Meters)
# * Pump power (Watts)
# * Pump wavelength (um)
# * Pulse duration (Seconds)
# * Repetition rate (Hz)
# )
#
# computeJS: Does the simulation
#
class FWM_Simu(object):
def __init__(self,wg = Waveguide(550,220),
length = 0.03, # 0.03 ->3cm
pumppower = 0.1*10**-3,pumpwl = 1.55,pulseduration=1.*10**(-12),reprate = 40*10**6, N= 200
):
self.T = pulseduration # in seconds
self.wg = wg # waveguide crosssection (Waveguide object)
self.length = length # Propagation length in the waveguide
self.L = length
self.pumppower = pumppower # in W
#self.gamma = 3*10**2 # W^-1 m^-1 ; non linear coeff IEEE JOURNAL OF SELECTED TOPICS IN QUANTUM ELECTRONICS, VOL. 16, NO. 1, JANUARY/FEBRUARY 2010
self.lbda_p = pumpwl
#self.pumpenvelop(self.lbda_p)
self.pumpenvelop(pumpwl) # computes siglbda
self.gamma = 460. # 2*pi/(self.lbda_p*10**(-6))*n2_Si/(self.wg.Aeff*10**(-12)) #W-1 M-1
#print "Gamma", self.gamma
self.reprate = reprate # Hz
self.Epulse = self.pumppower/self.reprate #Energy per pulse in J
self.N = N
def setPumpwl(self,x):
self.lbda_p = x
def setPulseDuration(self,x):
self.T = x
self.pumpenvelop(self.lbda_p)
# Define phase matching function
def getdk(self,p1,p2,lbda_p1,lbda_p2,lbda_s,lbda_i):
kp1,kp2,ki,ks = map(self.wg.wl2kv,[lbda_p1,lbda_p2,lbda_i,lbda_s])
ga = self.gamma*10**(-6) # to put gamma in um
dk = kp1+kp2-ks-ki-ga*(p1+p2) # When putting gamma, the phase matching bandwidth changes dramatically
return dk
# **************
# Pump envelopes
# **************
def pumpenvelop(self,lbda):
return self.gaussppumpenvelop(lbda) #self.gaussppumpenvelop(lbda)
#return self.rectpumpenvelop(lbda) #self.gaussppumpenvelop(lbda)
def toplotCWGain(self,lbda_s = arange(1.5,1.6,0.0001)):
lbda_i = 1./(2/self.lbda_p-1/lbda_s)
a_dk = self.getdk(0,0,self.lbda_p,self.lbda_p,lbda_s,lbda_i) # um-1
a_phasematching = sinc(self.length*10**6/2*a_dk)
return a_phasematching**2
def gausspulsedpumpenvelop(self,lbda,dlbda = 0.4*10**(-4)):
return self.gaussppumpenvelop(lbda) *(sin(2*pi*(lbda)/dlbda))**2# From laser textbook
def rectpumpenvelop(self,lbda):
signu = 0.441/self.T # self.linewidth #0.441/sigma_t # From laser book, in Hz
sigomega = 2*pi*signu
lbda0 = self.lbda_p
siglbda = signu/(c*10**6)*(lbda0)**2
w = sqrt(2*pi)*siglbda
self.siglbda = siglbda
a = 1/sqrt(w)
lbda_min = lbda0-w/2
lbda_max = lbda0+w/2
#print "lbdas", lbda_min,lbda_max
step = w / 400
self.pumprange = arange(lbda_min,lbda_max,step)
#print "min ", lbda_min,lbda[0]
#print "max ", lbda_max,lbda[-1]
output = (lbda>=lbda_min)*(lbda<=lbda_max)*a
#if type(lbda) == type(zeros(5)):
# print min(lbda),lbda_min,lbda_max,max(lbda)," ---> ", output.sum()
return output
def gaussppumpenvelop(self,lbda):
lbda0 = self.lbda_p
k0,k = map(self.wg.wl2kv,[lbda0,lbda])
signu = 0.441/self.T # self.linewidth #0.441/sigma_t # From laser book, in Hz
sigomega = 2*pi*signu
siglbda = signu/(c*10**6)*(lbda0)**2
ng = self.wg.getng(lbda0)
sigk = siglbda/(lbda0)**2*2*pi*ng
self.siglbda = siglbda
omega0 = 2*pi*c/lbda0
omega = 2*pi*c/lbda
#return exp(-2*log(2)*((lbda0-lbda)*10**-6)**2/(siglbda**2)) # From laser textbook
return sqrt(1./(sqrt(2*pi)*siglbda) * exp(-(lbda-lbda0)**2/(2*siglbda**2))) # this gauss envelop is on lambda which is probably not very physical ...
#return sqrt(1./(sqrt(2*pi)*sigomega) * exp(-(omega-omega0)**2/(2*sigomega**2)))*sqrt(2*pi*c)/lbda
# Rectangular pulse in the temporal domain
# lbda in um
# T : pulse length [S]
def sincpumpenvelop(self,lbda):
T = self.T
om = 2*pi*c/(lbda*10**-6)
om0 = 2*pi*c/(self.lbda_p*10**(-6))
dom = om - om0
#return sinc(dom*T/2) * sqrt(T/(2*pi)) # this normalization works when integrating over omega
# *** WARNING, in python, sinc(x) = sin(pi*x)/(pi*x) which is already normalized to one ! ***
return sinc(dom*T/2) * sqrt(T*pi*c*10**6/(lbda**2)) # c in um/s, lbda in um, T in s; this normalization is for lambda
# **************
#
# **************
# This provides the range of lbdas which should be used to accurately span the pump
def updatepumprange(self):
print "Get pump range ..."
lbda_p = self.lbda_p
lbda_step= 0.00000001 # step for finding the pump range
P = 0.
targetfraction = 0.95
deltalbda = 0.5*10**(-6) # initialize deltalbda at 1pm
while (P<targetfraction):
deltalbda = 2*deltalbda
lbdas = arange(lbda_p-deltalbda,lbda_p+deltalbda,lbda_step)
#print P
P=(self.pumpenvelop(lbdas)*self.pumpenvelop(lbdas).conjugate()).sum()*lbda_step
print P
print P
N = 400
step = (lbdas[-1]-lbdas[0])/N # Step for the returned pump range
res = arange(lbdas[0],lbdas[-1],step)
#print "Size of pump lbdas" ,lbdas.size
#print self.pumpenvelop(lbda_p)
print "Pump range : (um)",lbdas[0],lbdas[-1]
self.pumprange = res
return res
def setRangeJS(self,lbda_s_min,lbda_s_max,lbda_i_min,lbda_i_max):
self.lbda_s_min = lbda_s_min
self.lbda_s_max = lbda_s_max
self.lbda_i_min = lbda_i_min
self.lbda_i_max = lbda_i_max
self.extent = [x*1000 for x in [self.lbda_i_min,self.lbda_i_max,self.lbda_s_min,self.lbda_s_max]] # um to nm
print self.extent
def setRangeScanResonance(self,lbda_s_min,lbda_s_max):
# Get the range for signal centered on the resonance
lsm,lsM = lbda_s_min,lbda_s_max
# Get the range for idler using rough energy conservation
lp = self.lbda_p
lp_min = min(self.pumprange)
lp_max = max(self.pumprange)
lim = 1./(2./lp_min - 1./lsM)
liM = 1./(2./lp_max - 1./lsm)
print "avg_pumps", (lim+lsm)/2,(liM+lsM)/2
#print "%.2f %.2f ; %.2f %.2f (pm)" % (lsm*10**6,lsM*10**6,lim*10**6,liM*10**6)
print lsm,lsM,lim,liM
self.setRangeJS(lsm,lsM,lim,liM)
def computeJS_old(self,begin=1.545,end=1.555): # begin=1.545,end=1.555,step=0.0001
#size = int((end-begin)/step)
size = self.N
step = (end-begin) / self.N
P = self.pumppower
L = self.length
lattice = ones((size, size), dtype=float )
phases = ones((size, size), dtype=float )
for i in xrange(size):
print i
lbda_i = i*step+begin
for j in xrange(size):
lbda_s = j*step+begin
a_lbda_p1 = self.pumprange
a_lbda_p2 = 1./(1/lbda_s+1/lbda_i-1/a_lbda_p1)
a_p1 = P*self.pumpenvelop(a_lbda_p1) # pump amplitude 1
a_p2 = P*self.pumpenvelop(a_lbda_p2) # pump amplitude 2
a_dk = self.getdk(a_p1,a_p2,a_lbda_p1,a_lbda_p2,lbda_s,lbda_i)
a_phasematching = 1
a_expi = 1
#a_phasematching = sinc(L/2*a_dk)
a_expi = exp(I*L/2*a_dk)
a_res = a_phasematching*a_expi*a_p1*a_p2
res = a_res.sum()*a_res.size*step
lattice[i,size-1-j]= sqrt(abs(res.real**2+res.imag**2)) #res res #
phases[i,size-1-j] = angle(res)
#N = sqrt((lattice*conjugate(lattice)).max())
#lattice = lattice/N
self.lattice = lattice
self.phases = phases
self.extent = [begin*1000,end*1000,begin*1000,end*1000]
Z = lattice.sum()# sqrt(abs(lattice*conjugate(lattice)).sum())
self.normlattice = sqrt(abs(lattice/Z))
# Override these methods to add custom filters on signal and idler arm
def filter_idler(self,lbda):
return ones(lbda.size)
def filter_signal(self,lbda):
return ones(lbda.size)
def getPurityAfterFilter(self):
Ni = self.Ni
Ns = self.Ns
# Apply custom filters:
m_filter_signal =zeros((Ni,Ns))
m_filter_idler =zeros((Ni,Ns))
for i in arange(Ni):
m_filter_signal[i,:] = self.filter_signal(self.a_lbda_s)
for j in arange(Ns):
m_filter_idler[:,j] = self.filter_idler(self.a_lbda_i)
lattice = self.normlattice*m_filter_signal*m_filter_idler
# Multiply by the appropriate missing constants
Z = lattice.sum()# sqrt(abs(lattice*conjugate(lattice)).sum())
normlattice = sqrt(abs(lattice/Z))
self.normlattice_unfiltered = self.normlattice[:,:] # Save the previous matrix
self.normlattice = normlattice # assign the new filtered matrix
purity = self.computeHeraldedPhotonPurity() # computes the purity after filtering
self.normlattice = self.normlattice_unfiltered # restore the previous matrix
return purity
def computeJS(self):
P = self.pumppower
L = self.L # Cavity length
N = self.N
lbda_s_min = self.lbda_s_min
lbda_s_max = self.lbda_s_max
lbda_i_min = self.lbda_i_min
lbda_i_max = self.lbda_i_max
step_i = (lbda_i_max-lbda_i_min)/N
step_s = (lbda_s_max-lbda_s_min)/N
a_lbda_i = arange(lbda_i_min,lbda_i_max,step_i)[0:N]
a_lbda_s = arange(lbda_s_min,lbda_s_max,step_s)[0:N]
self.a_lbda_i = a_lbda_i
self.a_lbda_s = a_lbda_s
Ni = a_lbda_i.size
Ns = a_lbda_s.size
print Ni, Ns
self.Ni = Ni
self.Ns = Ns
self.step_i = step_i
self.step_s = step_s
rangepump = self.pumprange
M = rangepump.size
dlbda_pump = (rangepump.max()-rangepump.min())/M
lattice = zeros((Ni,Ns))
a_lbda_p1 = rangepump
a_p1 = self.pumpenvelop(a_lbda_p1) # pump amplitude 1
ng = self.wg.getng(self.lbda_p)
print "Steps" ,step_i,step_s
#dbgpm = 0.
pumpmax = self.pumpenvelop(self.lbda_p)
phases = zeros((Ni,Ns))
print max(a_p1)
for j in xrange(Ns):
#rint j
lbda_s = a_lbda_s[j] # lbda_s_min+j*step_s
for i in xrange(Ni):
lbda_i = a_lbda_i[i] # lbda_i_min+i*step_i
a_lbda_p2 = 1./(1./lbda_s+1./lbda_i-1./a_lbda_p1)
a_p2 = self.pumpenvelop(a_lbda_p2) # pump amplitude 2
#print a_lbda_p2[0],a_lbda_p2[-1]," ---> ", a_p2.sum()
#print max(a_p2)
# In order to save computation time we can take a_pm = 1. for small cavities
a_dk = 1.
a_pm = 1.
#a_dk = self.getdk(P*a_p1*conjugate(a_p1),P*a_p2*conjugate(a_p2),a_lbda_p1,a_lbda_p2,lbda_s,lbda_i)
#a_pm = sinc(L/2*a_dk/pi) # the L will be added later in the global constant
a_res = a_p1*a_p2*a_pm
a_res = a_res * a_lbda_p2/a_lbda_p1
# Multiply by the dlambda;
# The pump function is i um^(-1/2), dlbda_pump is in um
a_res = a_res*dlbda_pump
res = a_res.sum() # unitless
#res = res
# Multiply by the dlambda
# Since the formula was derived for domega, we have to remember that domega = -2*pi*c/lbda**2 * dlbda
lattice[i,Ns-1-j]= abs(res.real**2+res.imag**2)* (step_i/(lbda_i**2)) * (step_s/(lbda_s**2))
#print angle(res)
phases[i,Ns-1-j] = angle(res)
# Check what should be the proper formula which keeps the joint spectral amplitude instead of joint spectral probability distribution
# Apply custom filters:
# m_filter_signal =zeros((Ni,Ns))
# m_filter_idler =zeros((Ni,Ns))
# for i in arange(Ni):
# m_filter_signal[i,:] = self.filter_signal(a_lbda_s)
# for j in arange(Ns):
# m_filter_idler[:,j] = self.filter_idler(a_lbda_i)
# lattice = lattice*m_filter_signal*m_filter_idler
# Multiply by the appropriate missing constants
lattice = lattice*(c*self.Epulse*self.gamma*(self.L))**2/(2*pi**2) #/ (2*pi*ng)
Z = lattice.sum()# sqrt(abs(lattice*conjugate(lattice)).sum())
self.normlattice = sqrt(abs(lattice/Z))
self.lattice = lattice
self.phases = phases
def plotBiphoton(self,fname = None):
plotcolormap(self.lattice,self.extent,fname)
def __g__(self,i,j):
#return (self.normlattice[i,:]*conjugate(self.normlattice[j,:])).sum()
return (self.normlattice[i,:]*exp(I*self.phases[i,:])*conjugate(self.normlattice[j,:]*exp(I*self.phases[j,:]))).sum()
def __g_nophase__(self,i,j):
return (self.normlattice[i,:]*conjugate(self.normlattice[j,:])).sum()
def __G_nophase__(self,i,j,k,l):
return self.__g_nophase__(i,j)*self.__g_nophase__(k,l)
vectg = vectorize(__g__)
def __G__(self,i,j,k,l):
return self.__g__(i,j)*self.__g__(k,l)
vectG = vectorize(__G__)
vectG_nophase = vectorize(__G_nophase__)
# Purity = Tr(ro**2)
def computenaivepurity(self):
lattice = sqrt(self.normlattice)
N = self.N
P = 0
for n in xrange(self.N):
for m in xrange(self.N):
P+= (lattice[:,n]*conjugate(lattice[:,m])).sum()*(lattice[:,m]*conjugate(lattice[:,n])).sum()
self.purity = abs(P)
self.schn = 1./P
return P
# Computes the probability of getting coincidences between two heralded photons from different sources
def computePcoincfrom2photons(self):
lattice = sqrt(self.normlattice)
#print "State Norm:", abs(lattice*conjugate(lattice)).sum() # equivalent to the trace
print "Computing proba coincidence"
N = self.N
omega1 = zeros((N,N))
omega2 = zeros((N,N))
for i in range(N):
omega1[:,i]= arange(N)
omega2[i,:]= arange(N)
Gnnmm = self.vectG(self,omega1,omega1,omega2,omega2)
Gnmmn = self.vectG(self,omega1,omega2,omega2,omega1)
print "Gnnmm: ",Gnnmm.sum()
print "Gnmmn: ",Gnmmn.sum()
Pcoinc = 0.5*(Gnnmm.sum()-Gnmmn.sum()) # See proof in my labbook from 2012 (27/01/2012)
print "Pcoinc: ",Pcoinc
print "Visibility: ", 1.-Pcoinc/0.5
self.visibility= 1.-Pcoinc/0.5
return 1.-Pcoinc/0.5
def computeHeraldedPhotonPurity(self):
#self.computePcoincfrom2photons()
lattice = self.normlattice
N = self.N
omega1 = zeros((N,N))
omega2 = zeros((N,N))
for i in range(N):
omega1[:,i]= arange(N)
omega2[i,:]= arange(N)
#print "State Norm:", abs(lattice*conjugate(lattice)).sum() # equivalent to the trace
purity = self.vectG(self,omega1,omega2,omega2,omega1).sum()
#purity2 = self.vectG_nophase(self,omega1,omega2,omega2,omega1).sum()
# print "Purity: ", purity,purity2
self.purity = abs(purity)
self.schn = 1/purity
"""
print "Computing heralded photon purity"
N = self.N
omega1 = zeros((N,N))
omega2 = zeros((N,N))
for i in range(N):
omega1[:,i]= arange(N)
omega2[i,:]= arange(N)
x = self.vectg(self,arange(N),arange(N))
print "Tr_ro1: ",x.sum()
g12 = self.vectg(self,omega1,omega2)
purity = (g12*g12).sum() # no dot product here, the formula (g12*g12).sum() provides exactly the trace over
# the reduced density matrix squared.
#print schn, schmidtnumber(lattice)
"""
return abs(purity)
###
# -----------------------------------------------------------------------------#
# CLASS FWM_RingSimu
# -----------------------------------------------------------------------------#
# This class calculates the joint spectral distribution obtained in a ring
# resonator for a given set of parameters
# Init (
# * Waveguide cross section
# * Waveguide length (Meters)
# * Pump power (Watts)
# * Pump wavelength (um)
# * Pulse duration (Seconds)
# * Repetition rate (Hz)
# * N: grid sampling (the JSA is stored in a NxN grid)
# * r: ring coupling (r = 1 means no coupling, while r = 0 means full coupling)
# * tau: round trip transmission which accounts for the loss in the ring resonator
# )
#
# setPumpToClosestRes(lambda) : Sets the pump to the closest resonance to the given wavelength
# setRangeScanResonance(p) : Sets the resonance to be used for collecting the idler photon. p is the resonance number.
# p = 0 is the same as the pump resonance
# p = +1 or -1 are the next nearest resonance to the pump
# p = +M or -M ....
#
# plotcavityresponse() : Shows the transmission spectrum of the cavity
# computeJS() : Does the simulation
#
# applycavity(lambda) : This is the function which applies the cavity. By default, this function applies a ring resonator.
# Different cavities can however be used.
# save(filename) : Saves the result of the simulation including all the parameters, the full state, and the derived parameters such as the Schmidt number
#
class FWM_RingSimu(FWM_Simu):
def __init__(self,wg = Waveguide(550,220),
length = 80., # um
pumppower = 45.*10**-3,pumpwl = 1.55,pulseduration=1.*10**(-12),N = 200,r = 0.98,tau = 1.0): # 300*10**3 -> 300 kHz linewidth
FWM_Simu.__init__(self,wg = wg,
length = length, # 0.03 ->3cm
pumppower = pumppower,pumpwl = pumpwl,pulseduration=pulseduration)
self.lbda_p = pumpwl # in um # We take the cavity resonance wavelength equal to the pump central wavelength
self.mpump = -1 # resonance number closest to the pump
# Ring parameters
self.L = length # Length of the ring in um
self.r = r
self.tau = tau # tau = 1. -> No loss
#self.tau = self.r # critical coupling
self.N = N
self.lattice = zeros((N,N))
# For loading purpose : Params
self.purity = -1
self.schn = -1
self.geneeff = -1
self.setters = {"Purity" : self.__setPurity__,
"Schmidt number" : self.__setSchn__,
"r" : self.__setr__,
"Nb pairs per pulse" : self.__setgeneeff__,
"Pulse duration (ps)" : self.__setT__ ,
"N" : self.__setN__,
}
self.resonancenumber = 0 # Resonance scanned for signal
# Setters when loading
def __setPurity__(self,x):
self.purity = x
def __setSchn__(self,x):
self.schn = x
def __setr__(self,x):
self.r = x
def __setgeneeff__(self,x):
self.geneeff = x
def __setT__(self,x):
self.T = x
def __setN__(self,x):
self.N = x
self.lattice = zeros((x,x))
self.phases = zeros((x,x))
def setTau(self,x):
self.tau = x
def setr(self,x):
self.r = x
def setL(self,L):
self.L = L
def ring(self,lbda):
k = self.wg.wl2kv(lbda)
t = sqrt(1-self.r**2)
tau = self.tau
r = self.r
return I*t/(1-tau*r*exp(I*k*self.L))
def cavity_transmission(self,lbda):
t = sqrt(1-self.r**2)
return self.r+I*t*self.ring(lbda)
# Override these methods to add custom filters on signal and idler arm
def filter_idler(self,lbda):
return ones(lbda.size)
def filter_signal(self,lbda):
return ones(lbda.size)
# If using two coupled rings
def set_r2(self,r2 = 0.999):
self.r2 = r2
def CROW2(self,lbda):
k = self.wg.wl2kv(lbda)
r2 = self.r2
t2 = sqrt(1-r2**2)
r1 = self.r
t1 = sqrt(1-r1**2)
tau = self.tau
L1 = self.L
L2 = L1
g1 = tau*exp(I*L1*k)
g2 = tau*exp(I*L2*k)
return I*t1*(r2-g2)/(1-r2*g2+r1*g1*(g2-r2))
def applycavity(self,lbda):
return self.ring(lbda)
# Returns the closest cavity resonance for a given lambda and the resonance number
def getClosestCavityRes(self,lbda):
m = round(self.wg.wl2kv(lbda)*self.L/(2*pi))
kp0 = m*2*pi/self.L # target pump propagation constant
# The problem is now to get lbda0 from kp0
# We start approximating the neff of lbda0 using the one of lambda
neff = self.wg.getneff(lbda)
# Using a scipy optimize method could be more robust and faster than the following code
lbda0 = 2*pi*neff/kp0
print lbda0
lbdastep = 1*10**(-7) * sign(lbda0-lbda)
kp = self.wg.wl2kv(lbda0)
err = (kp-kp0)/kp0
while(abs(err)>0.0000001):
lbda0 += lbdastep
kp = self.wg.wl2kv(lbda0)
newerr = (kp-kp0)/kp0
if newerr**2>err**2:
lbdastep = lbdastep*(-1)
err = newerr
return lbda0,m
# Centers the pump on the closest cavity resonance
def setPumpToClosestRes(self,lbda):
self.lbda_p,self.mpump = self.getClosestCavityRes(lbda)
print "Pump is set at %.7f um" % self.lbda_p
# Get the range to scan for signal for the nth resonance with respect to the pump
# Rq : The pump should have been set such that mpump has a meaningful value
def getSignalRange(self,n):
FWHM = (1-self.r*self.tau)*self.lbda_p**2/(self.wg.getng(self.lbda_p)*sqrt(2)*pi*self.L)
print "FWHM (um) : ",FWHM
fullrange = 5*FWHM #
wlFSR = self.lbda_p**2/(self.L*self.wg.getng(self.lbda_p)) # FSR in lambda
print "FSR (um) : ",wlFSR
lbda_s,m = self.getClosestCavityRes(self.lbda_p+n*wlFSR)
print "Resonance (um) : ",lbda_s
return lbda_s-fullrange/2,lbda_s+fullrange/2
def plotcavityresponse(self,albda = arange(1.5477-0.01,1.5477+0.01,0.0000001)):
cavity = self.applycavity(albda)*self.applycavity(albda).conjugate()
pump = self.pumpenvelop(albda)**2
lbda_i,m_i = self.getClosestCavityRes(1.548)
lbda_s = 1./(2./self.lbda_p-1./lbda_i)
signal_wl = funcpeak(albda,lbda_s)
idler_wl = funcpeak(albda,lbda_i)
plot([(albda,cavity,"-"),
(albda,pump/pump.max()*cavity.max(),"-"),
(albda,signal_wl/signal_wl.max()*cavity.max(),"r-"),
(albda,idler_wl/idler_wl.max()*cavity.max(),"r-")
]) # Plot the pump normalised wrt the biggest field enhancement
def setRangeJS(self,lbda_s_min,lbda_s_max,lbda_i_min,lbda_i_max):
self.lbda_s_min = lbda_s_min
self.lbda_s_max = lbda_s_max
self.lbda_i_min = lbda_i_min
self.lbda_i_max = lbda_i_max
def setRangeScanResonance(self,m):
# Get the range for signal centered on the resonance
lsm,lsM = self.getSignalRange(m)
self.resonancenumber = m
# Get the range for idler using rough energy conservation
lp = self.lbda_p
lim = 1./(2./lp - 1./lsM)
liM = 1./(2./lp - 1./lsm)
#print "%.2f %.2f ; %.2f %.2f (pm)" % (lsm*10**6,lsM*10**6,lim*10**6,liM*10**6)
print lsm,lsM,lim,liM
self.setRangeJS(lsm,lsM,lim,liM)
def updatepumprange(self):
print "Get pump range ..."
lbda_p = self.lbda_p
print lbda_p
lbda_step= 0.00000001 # step for finding the pump range
P = 0.
targetfraction = 0.95
deltalbda = 0.5*10**(-6) # initialize deltalbda at 1pm
while (P<targetfraction):
deltalbda = 2*deltalbda
lbdas = arange(lbda_p-deltalbda,lbda_p+deltalbda,lbda_step)
#print P
P=(self.pumpenvelop(lbdas)*self.pumpenvelop(lbdas).conjugate()).sum()*lbda_step
print P
print P
N = 400
# get cavity range
# If the pump is broader than the cavity, then we should chop the pump to the cavity region such that the grid is fine enough in the cavity
# If the pump is narrower than the cavity, then keep pump range
lsm,lsM = self.getSignalRange(0)
rl = lsM-lsm
lsm = lsm-rl/2
lsM = lsM+rl/2
lbdamax = min(lbdas[-1],lsM)
lbdamin = max(lbdas[0],lsm)
step = (lbdamax-lbdamin)/N # Step for the returned pump range
res = arange(lbdamin,lbdamax,step)
#print "Size of pump lbdas" ,lbdas.size
#print self.pumpenvelop(lbda_p)
self.pumprange = res
print "Pump range : (um)",lbdas[0],lbdas[-1]
return res
def getjointproba(self):
return self.normlattice
def getjointprobascaled(self):
return self.normlattice/self.normlattice.max()
def computeJS(self): # begin=1.545,end=1.555,step=0.0001
print self.wg.getng(self.lbda_p)
P = self.pumppower
L = self.L # Cavity length
N = self.N
lbda_s_min = self.lbda_s_min
lbda_s_max = self.lbda_s_max
lbda_i_min = self.lbda_i_min
lbda_i_max = self.lbda_i_max
step_i = (lbda_i_max-lbda_i_min)/N
step_s = (lbda_s_max-lbda_s_min)/N
a_lbda_i = arange(lbda_i_min,lbda_i_max,step_i)[0:N]
a_lbda_s = arange(lbda_s_min,lbda_s_max,step_s)[0:N]
Ni = a_lbda_i.size
Ns = a_lbda_s.size
print Ni, Ns
Ni = N
Ns = N
self.step_i = step_i
self.step_s = step_s
rangepump = self.pumprange
M = rangepump.size
dlbda_pump = (rangepump.max()-rangepump.min())/M
lattice = zeros((Ni,Ns))
a_lbda_p1 = rangepump
cav_resp_p1 = self.applycavity(a_lbda_p1)
a_p1 = self.pumpenvelop(a_lbda_p1) # pump amplitude 1
ng = self.wg.getng(self.lbda_p)
print "Steps" ,step_i,step_s
#dbgpm = 0.
pumpmax = self.pumpenvelop(self.lbda_p)
phases = zeros((Ni,Ns))
for j in xrange(Ns):
print j
lbda_s = a_lbda_s[j] # lbda_s_min+j*step_s
cav_resp_s = self.applycavity(lbda_s)
for i in xrange(Ni):
lbda_i = a_lbda_i[i] # lbda_i_min+i*step_i
a_lbda_p2 = 1./(1./lbda_s+1./lbda_i-1./a_lbda_p1)
a_p2 = self.pumpenvelop(a_lbda_p2) # pump amplitude 2
# In order to save computation time we can take a_pm = 1. for small cavities
a_dk = self.getdk(P*a_p1*conjugate(a_p1),P*a_p2*conjugate(a_p2),a_lbda_p1,a_lbda_p2,lbda_s,lbda_i)
a_pm = sinc(L/2*a_dk/pi) # the L will be added later in the global constant
#a_pm = 1.
a_res = a_p1*a_p2*a_pm*cav_resp_p1*self.applycavity(a_lbda_p2)* self.applycavity(lbda_i)*cav_resp_s #
a_res = a_res * a_lbda_p2/a_lbda_p1
# Multiply by the dlambda;
# The pump function is i um^(-1/2), dlbda_pump is in um
a_res = a_res*dlbda_pump
res = a_res.sum() # unitless
#res = res
# Multiply by the dlambda
# Since the formula was derived for domega, we have to remember that domega = -2*pi*c/lbda**2 * dlbda
lattice[i,Ns-1-j]= abs(res.real**2+res.imag**2)* (step_i/(lbda_i**2)) * (step_s/(lbda_s**2))
#print angle(res)
phases[i,Ns-1-j] = angle(res)
# Check what should be the proper formula which keeps the joint spectral amplitude instead of joint spectral probability distribution
# Apply custom filters:
# m_filter_signal =zeros((Ni,Ns))
# m_filter_idler =zeros((Ni,Ns))
# for i in arange(Ni):
# m_filter_signal[i,:] = self.filter_signal(a_lbda_s)
# for j in arange(Ns):
# m_filter_idler[:,j] = self.filter_idler(a_lbda_i)
# lattice = lattice*m_filter_signal*m_filter_idler
# Multiply by the appropriate missing constants
lattice = lattice*(c*self.Epulse*self.gamma*(self.L))**2/(2*pi**2) #/ (2*pi*ng)
Z = lattice.sum()# sqrt(abs(lattice*conjugate(lattice)).sum())
self.normlattice = sqrt(abs(lattice/Z))
self.lattice = lattice
self.phases = phases
xi = 2*lattice.sum()
xi = tanh(sqrt(xi))**2 # Approximation valid in the case of two-mode squeezer
self.probapair = xi * (1-xi)
# Theory calculation for CW regime for comparison
vg = c/self.wg.getng(self.lbda_p)
print "Epulse (nJ) ", self.Epulse*10**9
print "gamma W-1,m-1", self.gamma
print "L (um)", L
print "T (ps)", self.T*10**12
print "vg %e" % vg
print "r : %.4f" % self.r
print "tau : %.4f" % self.tau
print "Siglbda : %.5f" % (self.siglbda)
#deltalbda = self.siglbda*sqrt(2*pi) # Such that the approx rectangular pulse results matches the gaussian def
#beta2_pulsed = (self.Epulse*self.gamma*c)**2/(32*ng**4*pi**6)*self.lbda_p**4/(L**2*deltalbda**2)*(1-self.r**2)**4/(1-self.tau*self.r)**4
xi = (self.Epulse*self.gamma*c)**2/(32*ng**4*pi**2)*self.lbda_p**4*pumpmax**4/(L**2)*(1-self.r**2)**4/(1-self.tau*self.r)**4
xi = tanh(sqrt(xi))**2
beta2_pulsed = xi * (1-xi)
#beta2_pulsed = (self.Epulse*self.T*self.gamma/(L*10**(-6)))**2*vg**4/16.*(1-self.r**2)**4/(1-self.tau*self.r)**4
xi = self.gamma**2*self.pumppower**2*(L*10**(-6))/8 * vg*self.T*(1-self.r**2)**4/(1-self.r*self.tau)**7
xi = tanh(sqrt(xi))**2
beta2_CW = xi * (1-xi)
# We multiply the lattice by a factor of two since we only integrate over half of Phi(k1,k2) and we should account for the other symmetrical half
print "Nb pairs per pulse:",self.probapair
print "Flat pulse model:", beta2_pulsed
print "CW model:", beta2_CW
lbda_i0 = (lbda_i_max+lbda_i_min)/2
lbda_s0 = (lbda_s_max+lbda_s_min)/2
self.extent = list(array([lbda_i_min-lbda_i0,lbda_i_max-lbda_i0,lbda_s_min-lbda_s0,lbda_s_max-lbda_s0])*1000) # Check where should go i and s
self.beta2_pulsed = beta2_pulsed
self.beta2_CW = beta2_CW
def getPhases(self):
return self.phases
def getAverageSpectra(self):
return self.normlattice.sum(axis = 0),self.normlattice.sum(axis = 1)
def save(self,directory="resonances_toshiba"):
timestamp = time.strftime("%m%d_%H%M",time.localtime(time.time()))
# Create repository if it does not exist
if not os.path.exists("data\\%s" % directory):
os.makedirs("data\\%s" % directory)
fname = "data\\%s\\simu_%s_r=%.3f_tau=%.3f_%.2fps_res=%d.txt" % (directory,timestamp,self.r,self.tau,self.T * 10**12,self.resonancenumber)
# Header
fw = open(fname,"w")
fw.write("#Laser parameters\n")
fw.write("%s : %.3f\n" % ("Pulse duration (ps)",self.T*10**12))
fw.write("%s : %.4f\n" % ("Pump power avg (mW)",self.pumppower*1000))
fw.write("%s : %.3f\n" % ("Repetition rate(MHz)",self.reprate/(10**6)))
fw.write("%s : %.18e\n" % ("Energy per pulse (uJ)",self.Epulse*1000000))
fw.write("%s : %.6f\n" % ("Pump wavelength (um)",self.lbda_p))
fw.write("\n#Waveguide parameters\n")
fw.write("%s : %.3f\n" % ("Width (nm)",self.wg.width))
fw.write("%s : %.3f\n" % ("Height (nm)",self.wg.height))
fw.write("%s : %.3f\n" % ("Aeff (um^2)",self.wg.Aeff))
fw.write("%s : %.3f\n" % ("gamma (W-1 m-1)",self.gamma))
fw.write("\n#Ring parameters\n")
fw.write("%s : %.3f\n" % ("Cavity length (um)",self.L))
fw.write("%s : %.5f\n" % ("Tau",self.tau))
fw.write("%s : %.5f\n" % ("r",self.r))
fw.write("\n#BiPhoton state properties\n")
fw.write("%s : %.5f\n" % ("Nb pairs per pulse",self.probapair))
fw.write("%s : %.5f\n" % ("Flat pulse model",self.beta2_pulsed))
fw.write("%s : %.5f\n" % ("CW model",self.beta2_CW))
self.computeHeraldedPhotonPurity()
#self.computePcoincfrom2photons()
#fw.write("%s : %.6f\n" % ("Visibility from two heralded sources",self.visibility))
fw.write("%s : %.6f\n" % ("Schmidt number",abs(self.schn)))
fw.write("%s : %.6f\n" % ("Purity",abs(1/self.schn)))
# Theory calculation for CW regime for comparison
vg = c/self.wg.getng(self.lbda_p)
beta2 = self.gamma**2*(self.Epulse/self.T)**2*(self.L*10**(-6))/8 * vg*self.T*(1-self.r**2)**4/(1-self.r)**7
fw.write("%s : %.5f\n" % ("Nb pairs(analytical CW)",beta2))
fw.write("\n")
fw.write("N=%d\n" % self.N)
fw.write("Resonance number : %d\n" % self.resonancenumber)
fw.write("\n#Scan range\n")
fw.write("%s : %.6e - %.6e, %.6e\n" % ("idl min, idl max, step (um)",self.lbda_i_min,self.lbda_i_max,self.step_i))
fw.write("%s : %.6e - %.6e, %.6e\n" % ("sig min, sig max, step (um)",self.lbda_s_min,self.lbda_s_max,self.step_s))
fw.write("\n#Raw data Biphoton distribution\n")
# Saves the joint spectrum
for j in xrange(self.N):
line = " ".join(("%.18e" % x) for x in self.lattice[:,self.N-1-j])
fw.write(line+"\n")
fw.write("\n#Raw data Biphoton phase distribution\n")
# Saves the joint spectrum
for j in xrange(self.N):
line = " ".join(("%.18e" % x) for x in self.phases[:,self.N-1-j])
fw.write(line+"\n")
fw.close()
return fname
def load(self,fname):
print "Loading %s ..." % fname
f = open(fname,"r")
line = f.readline()
while (len(line)>0):
if line.startswith("#Scan range"):
# Load the extent of the wavelength for signal and idler
line = f.readline() # Readline for the idler
self.lbda_i_min,self.lbda_i_max = parse_extent(line)
line = f.readline() # Readline for the signal
self.lbda_s_min,self.lbda_s_max = parse_extent(line)
self.extent = [self.lbda_i_min,self.lbda_i_max,self.lbda_s_min,self.lbda_s_max] # Check where should go i and s
if line.startswith("#Raw data Biphoton distribution"):
# Load the biphoton distribution
for j in xrange(self.N):
line = f.readline()
self.lattice[:,self.N-1-j] = parse_biphoton_data(line)
if line.startswith("#Raw data Biphoton phase distribution"):
# Load the biphoton phase distribution
for j in xrange(self.N):
line = f.readline()
self.phases[:,self.N-1-j] = parse_biphoton_data(line)
if line.find("#")>=0:
l1 = line.split("#")[0]
if line.find(":")>=0:
line = line.replace("\n","")
name,value = line.split(" : ")
if name in self.setters.keys():
self.setters[name](float(value))
elif line.startswith("N="):
name,value = line.split("=")
self.setters[name](int(value))
line = f.readline()
Z = self.lattice.sum()# sqrt(abs(lattice*conjugate(lattice)).sum())
self.normlattice = sqrt(abs(self.lattice/Z))
f.close()
class CustomPump():
def __init__(self,fname="G2 Straight Transmission.csv"):
self.rootname = "."
self.__load__(fname)
self.__fit__()
def __load__(self,fname):
path = os.path.join(self.rootname,fname)
f = open(path)
line = f.readline()
lbdas = []
amplitudes = []
for i in arange(30):
line = f.readline()
while(len(line))>0:
splitted = line.split(",")
lbda,amplitude = splitted[0:2]
line = f.readline()
if lbda>0:
lbdas.append(float(lbda)/1000) # nm -> um
amplitudes.append(float(amplitude))
self.lbdas = array(lbdas)
self.amplitudes = array(amplitudes)
self.amplitudes = self.amplitudes/self.amplitudes.sum() # Normalise
self.lbda_p = self.lbdas[self.amplitudes.argmax()]
def __fit__(self):
# Gaussian multiplied by rational fraction to account for distorsion
a = (10**3)
b = (10**3)
c = (10**3)**1.5
d = 10
e = 1
f = 1
sig = 1.0*10**(-3) # um
p0 = [self.lbda_p,sig,a,b,c,d,e,f]
plsq = leastsq(self.__residuals__, p0, args=(self.amplitudes, self.lbdas))
self.p = plsq[0]
print self.p
# p : parameters
# lbdas : wavelengths
def __func__(self,p,lbdas):
lbda0,sig,a,b,c,d,e,f = p
dlbdas = lbdas-lbda0
res = exp(-dlbdas**2/(2*sig**2))*(a*dlbdas+f/(b*dlbdas**3+c*dlbdas**2+d*dlbdas+e))
return res
def __residuals__(self,p,y, x):
err = y-self.__func__(p,x)
return err
def getPulse(self,lbda):
return self.__func__(self.p,lbda)
def plotres(self):
lbda1,lbda2 = min(self.lbdas),max(self.lbdas)
x = arange(lbda1,lbda2,0.000001)
#self.p = (A,r,tau)
plots = [(self.lbdas,self.amplitudes,"ro"),(x,self.getPulse(x),"k-")] # (neff0 self.lbdas,self.Iouts,"ro"),
#plot(plots)
print self.lbda_p
return plots
# Fit ring when seeded by a pulse laser from which we know the shape
class RingPulsed():
def __init__(self,R,Lc,fname,pumpfunc):
self.R = R # radius (um)
self.Lc = Lc # coupling length (um)
self.L = 2*(pi*R + Lc) # Total length
#FSR = 1.5556-1.5477 # um
self.neff0 = 4.14330 #4.143277 # Starting effective group index 4.1434
self.pumpfunc = pumpfunc
self.rootname = "."
self.__load__(fname)
self.__fit__()
def __load__(self,fname):
path = os.path.join(self.rootname,fname)
f = open(path)
line = f.readline()
lbdas = []
amplitudes = []
for i in arange(30):
line = f.readline()
while(len(line))>0:
splitted = line.split(",")
lbda,amplitude = splitted[0:2]
line = f.readline()
if lbda>0:
lbdas.append(float(lbda)/1000) # nm -> um
amplitudes.append(float(amplitude))
self.lbdas = array(lbdas)
self.amplitudes = array(amplitudes)
self.amplitudes = self.amplitudes/self.amplitudes.sum() # Normalise
self.lbda_p = self.lbdas[self.amplitudes.argmin()]
# adjust the neff0 guess
m = int(self.neff0*self.L/self.lbda_p)
self.neff0 = m*self.lbda_p/self.L
def __fit__(self):
a = b = c = d=e=f=0.000000000000001
p0 = [max(self.amplitudes),0.9,0.9,self.neff0,a,b,c,d,e,f]
plsq = leastsq(self.__residuals__, p0, args=(self.amplitudes, self.lbdas))
self.p = plsq[0]
print self.p
# p : parameters
# lbdas : wavelengths
def __func__(self,p,lbdas):
A,r,tau,neff,a,b,c,d,e,f = p
dlbdas = lbdas-self.lbda_p
#neff = self.neff0
L = self.L
phi = 2*pi*L*neff/lbdas
r2 = r**2
tau2 = tau**2
K = 2*r*tau*cos(phi)
res = A*(r2+tau2-K)/(1+r2*tau2-K) * self.pumpfunc(lbdas) * (a+b*dlbdas+c*dlbdas**3)/(d+e*dlbdas+f*dlbdas**3)
return res
def ringResponse(self,p,lbdas):
A,r,tau,neff,a,b,c,d,e,f = p
dlbdas = lbdas-self.lbda_p
#neff = self.neff0
L = self.L
phi = 2*pi*L*neff/lbdas
r2 = r**2
tau2 = tau**2
K = 2*r*tau*cos(phi)
res = A*(r2+tau2-K)/(1+r2*tau2-K) * (a+b*dlbdas+c*dlbdas**3)/(d+e*dlbdas+f*dlbdas**3)*max(self.pumpfunc(lbdas))
return res
def __residuals__(self,p,y, x):
err = y-self.__func__(p,x)
return err
def getIout(self,lbda):
return self.__func__(self.p,lbda)
def plotres(self):
lbda1,lbda2 = min(self.lbdas),max(self.lbdas)
x = arange(lbda1,lbda2,0.000001)
plots = [(self.lbdas,self.amplitudes,"bo"),(x,self.getIout(x),"k-"),(x,self.ringResponse(self.p,x),"b--")] # (self.lbdas,self.Iouts,"ro"),
#plot(plots)
self.lbda_p = self.lbdas[self.amplitudes.argmin()]
print self.lbda_p
return plots
# December 15, 2004 / Vol. 29, No. 24 / OPTICS LETTERS p 2861
# Ultrahigh-quality-factor silicon-on-insulator microring resonator
def computeQ(self):
A,r,tau,neff=self.p[0:4]
return (2*pi*neff/self.lbda_p)*self.L/(-2*log(r*tau))
def main():
# Load the pulse
#pump = CustomPump("G2 Straight Transmission.csv")
#pump.plotres()
#pumpfunc = pump.getPulse
wg = Waveguide(450,220)
T = 100.*10**(-12)
#for T in [100.,50.,25.,10.,5.]:
N = 100 # 200# N = 50 Provides accurate number for r = 0.98 rings with 100ps pulses
#for T in [1000.,500.,200.,100.,50.,25.,10.]:
r = 0.93
tau = 1.-0.0198
radius = 10.
coupling_length = 5.
lbda0= 1.55
res_number = 1 # resonance number (pump resonance is 0).
for res_number in [1]: #arange(0,1):# [1,2,3,4]:
for T in [5.0] : # ,0.75,1.,1.5,2.0,0.5,1.,5.,,50.,100.,500.,1000.,2000. #arange(10.,1000,10.): # [60.,70.,80.,90.,110.,120.,130.,140.,150.,160.,170.,180.,190.,210.,220.,230.,240.,250.,260.,270.,280.,290.]: #arange(10.,100.,10.): # arange(5,55,5): #[25.,50.,100.,200.,500.]: [1.0,2.0,5.0,10.0,20.0,50.0,100.0,200.0,500.0,1000.0,]
for r in [0.9]: # [0.95,0.96,0.97,0.98,0.99]: # 0.85,0.86,0.87,0.88,0.89,0.90,0.91,0.92,0.93,0.94,0.95,0.96
for tau in [0.997]: # 0.76,0.96,0.98
#for r2 in [0.9998,0.9997,0.9996,0.9995,0.9994]: #[1.0,0.9999,0.999,0.99]:
mySim =FWM_RingSimu(wg,length = 2*(radius*pi+coupling_length),pulseduration = T*10**(-12),N = N,r = r,tau = tau,pumppower = 3.*10**-3,pumpwl = lbda0) # 500
#mySim.pumpenvelop = pumpfunc
mySim.setRangeScanResonance(+res_number)
mySim.plotcavityresponse()
mySim.updatepumprange()
mySim.computeJS()
fname = mySim.save("Ring_pumpscan")
mySim.plotBiphoton(fname[:-3]+"png")
# -----------------------------------------------------------------------------#
# MISC FUNCTIONS II: Specific FWM applications
# -----------------------------------------------------------------------------#
def plot1Dgain():
wgs = [
#Waveguide(450,220),
Waveguide(470,220)
#Waveguide(500,220),
#Waveguide(550,220),
]
plots = []
colors = ["r-","b-","g-"]
i = 0
lbda_s = arange(1.40,1.70,0.0001)
for wg in wgs:
simu = FWM_Simu(wg = wg,length = 0.0058,pumpwl = 1.5479)
res = simu.toplotCWGain(lbda_s)
plots.append((lbda_s,res,colors[i]))
i += 1
fw = open("fwm_bandwidth_cw.csv","w")
fw.write("Wavelength (um), FWM gain (a.u)")
for i in arange(lbda_s.size):
line = "%.5f,%.5f\n" % (lbda_s[i],res[i])
fw.write(line)
fw.close()
plot(plots)
def plotnbpairsScaling():
lbda_min = 1.542
lbda_max = 1.544
wg = Waveguide(550,220)
lbda_s = arange(1.5,1.6,0.0001)
tointegrate = (lbda_s>lbda_min) * (lbda_s<lbda_max)
lengths = arange(0,0.01,0.0001)
#lengths = arange(0,100.,0.1)
res = []
for L in lengths:
simu = FWM_Simu(wg = wg,length = L )
gainperbandwidth = (L/2)**2*simu.toplotCWGain(lbda_s = lbda_s) #
#res.append(gainperbandwidth[tointegrate].sum())
res.append(gainperbandwidth.sum())
plot([(lengths,res,"r-")])
if __name__ == "__main__":
#pump = CustomPump("G2 Straight Transmission.csv")
#pump.plotres()
#ring = RingPulsed(20,5,"G2 Ring Transmission.csv",pump.getPulse)
#plot(ring.plotres()+pump.plotres())
main()
#plotnbpairsScaling()
#plot1Dgain()
|
[
"noreply@github.com"
] |
damienbonneau.noreply@github.com
|
237921523213c4beab3bebd982ca877c4dbba96d
|
2884f45d89a5cd378ac44d236fc7c11ff01f2d8b
|
/main.py
|
2ff64ce7e8ed859549778e58b852b52894611199
|
[] |
no_license
|
UCSD-CSE-SPIS-2021/practice-Jodi-R
|
32ea94de9d3c845403c183fc4d153a8e93f2572a
|
849fe54f9cbb7ceb11c570a87cd87e133e878256
|
refs/heads/master
| 2023-07-04T23:42:38.973082
| 2021-08-02T23:08:50
| 2021-08-02T23:08:50
| 392,114,139
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 50
|
py
|
#Jodi Roe for CSE SPIS 2021
print('Hello, World!')
|
[
"88351397+jodiiroe@users.noreply.github.com"
] |
88351397+jodiiroe@users.noreply.github.com
|
f19537278b9f16af0b07993b1df771c449dc7e10
|
b70bf06a7b8289ee4508da0c28a49bc1d22ec4a2
|
/Reliancestore/reliancefresh/apps.py
|
d5e6486b2b5b9191760bc075b88be2bf934f17cf
|
[] |
no_license
|
kondlepumanikanta/djangobt6
|
7cde78e6b47806fc1634aff943549e6be2f89942
|
6fa5ef9fef1959b17b23204ac76d4881e495b431
|
refs/heads/master
| 2020-03-22T20:22:58.558692
| 2018-07-18T17:34:04
| 2018-07-18T17:34:04
| 139,739,615
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 166
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class ReliancefreshConfig(AppConfig):
name = 'reliancefresh'
|
[
"manikantakondlepu2244@gmail.com"
] |
manikantakondlepu2244@gmail.com
|
7a01fcb135254742a291c46f01e581aad4cca8c0
|
54b09aac1d5dcb7f1310a9eaf63e81a09c4fb7f2
|
/getting-started-with-python/hello.py
|
133fd4d9df09fb7864ab09d5c01a6a4a5086f531
|
[] |
no_license
|
nikhildarocha/coursera_py4e
|
3f239d89ad26a1f148e2c70c7d2ed484db3df49d
|
b4ebe1bc5e7607b5780427df5dedcca5b8e03351
|
refs/heads/master
| 2023-04-05T01:05:51.525492
| 2021-04-14T09:50:30
| 2021-04-14T09:50:30
| 357,854,416
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 31
|
py
|
print('Hello World Everyone!')
|
[
"niks77883@gmail.com"
] |
niks77883@gmail.com
|
cd50ec0cada7b4aa014f574bfb9d1be7b604142f
|
95d7a2e316f6cc5fd0120086bd0b00a3d1dd7ac7
|
/simple_trainer/pipeline.py
|
55cddbdfed42e6f13ed1420b0e3c6bc498a7842e
|
[
"MIT"
] |
permissive
|
akshaybadola/simple_trainer
|
cf2012ada1889719f48812f0bf1dc8a3405885c1
|
f6a2c7739722b334c1f619220f40917d71b044ba
|
refs/heads/master
| 2022-11-15T10:20:48.864405
| 2022-06-24T20:21:41
| 2022-06-24T20:21:41
| 222,618,146
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,790
|
py
|
from typing import List, Dict, Callable, Union, Optional
import abc
import types
import logging
from functools import partial
from contextlib import ExitStack
from common_pyutil.functional import first, maybe_then
def partial_or_func_name(x: Callable, describe: bool = False):
return maybe_then(x, [partial, types.FunctionType],
[lambda x: ("partial " if describe else "") + x.func.__name__,
lambda x: x.__name__])
class Hooks(abc.ABC):
"""A simple class implmentation for flow based programming
Hooks are named points in an execution pipeline at which functions can be
added and removed dynamically. Hooks should be:
1. Configurable
2. Inspectable
3. Modifiable
Unlike a standard pipeline, hooks can be added and removed by user, and
functions to the hook can be altered programmatically and interactively
through the publicly exposed API.
"""
def __init__(self, logger: logging.Logger):
"""Initialize with a logger.
Args:
logger: A logger for logging
"""
self._hooks: Dict[str, List[Callable]] = {}
self.logger = logger
def __iter__(self):
return self._hooks.keys().__iter__()
def create_hook(self, hook_name: str, funcs: List[Callable] = []):
"""Create a `hook` with given name
Args:
hook_name: Name of the hook to create
funcs: List of functions to initialize the hook.
Each function can be called with or without arguments. The arguments
must be keyword arguments. The hook must process the kwargs as it
receives. This provides for the most flexibility without inspecting the
code. It's upto the functions on how they will process the keyword
arguments.
Example:
from types import SimpleNamespace
def some_function(**kwargs):
processable_args = ["arg1", "arg2", "arg3"]
args_dict = {arg: kwargs.get(arg, None) for arg in processable_args}
args = SimpleNamespace(**args_dict)
if any([args_dict[arg] is None for arg in processable_args]):
# maybe raise error or catch error and don't
# do anything with a warning
"""
if hook_name in self._hooks:
raise AttributeError(f"Hook {hook_name} already exists")
else:
self._hooks[hook_name] = funcs
def delete_hook(self, hook_name: str):
"""Delete a named `hook` from Hooks
Args:
hook_name: Name of the hook to delete
"""
if hook_name not in self._hooks:
raise AttributeError(f"No such hook {hook_name}")
else:
self._hooks.pop(hook_name)
@abc.abstractmethod
def _prepare_function(self, func: Callable) -> Callable:
"""Prepare a function to be added to a hook.
When any function is added to a hook, it's transformed by with this
function. This has to be overridden. E.g., in the example below, each
function added to any hook is called with `self` as the first argument.
Example:
class Pipeline(Hooks):
def __init__(self):
pass
def _prepare_function(self, func):
return partial(func, self)
Or if you'd like to keep the :class:`Hooks` instance separate.
class MyHooks(Hooks):
def _prepare_function(self, func):
return func
class Pipeline:
def __init__(self):
self.hooks = MyHooks()
"""
return func
def check_func_args(self, func: Callable):
if isinstance(func, partial):
n_args = func.func.__code__.co_argcount
if n_args != len(func.args):
raise AttributeError("Partial function must be fully specified")
else:
n_args = func.__code__.co_argcount
if n_args:
raise AttributeError("Function to the hook cannot take any arguments")
def run_hook_with_contexts(self, hook_name: str, contexts: List, **kwargs):
"""Run a named hook with contexts
Args:
hook_name: Name of the hook
contexts: contexts in which to run
kwargs: Optional keyword arguments for hook
"""
hook = self._get_hook(hook_name)
if hook:
with ExitStack() as stack:
for con in contexts:
stack.enter_context(con)
for func in hook:
func(**kwargs)
def run_hook(self, hook_name: str):
"""Run a named hook.
Args:
hook_name: Name of the hook
"""
hook = self._get_hook(hook_name)
if hook:
for func in hook:
func()
def run_hook_with_args(self, hook_name: str, **kwargs):
"""Run a named hook with arguments.
Only keyword arguments are allowed. Therefore, for all the functions in
the hook can accept an arbitrary number of arguments and the functions
can check and choose the relevant arguments.
Args:
hook_name: Name of the hook
kwargs: keyword arguments only for the hook
"""
hook = self._get_hook(hook_name)
if hook:
for func in hook:
func(**kwargs)
def add_to_hook(self, hook_name: str, func: Callable, position: Union[int, str] = 0):
"""Add function :code:`func` to hook_name with name `hook_name`.
Args:
hook_name: Name of the hook
func: A function with a single argument
position: Where to insert the hook_name. Defaults to front of list.
If `position` is not given then it's added to the front of the list.
"""
f_name = partial_or_func_name(func, True)
func = self._prepare_function(func)
self.check_func_args(func)
if hook_name in self._hooks:
self.logger.info(f"Adding {f_name} to {hook_name} at {position}")
if position == "first":
pos = 0
elif position == "last":
pos = len(hook_name)
elif isinstance(position, int):
pos = position
else:
raise ValueError(f"Unknown Value for position {position}")
self._hooks[hook_name].insert(pos, func)
def add_to_hook_at_end(self, hook_name: str, func: Callable):
self.add_to_hook(hook_name, func, "last")
def add_to_hook_at_beginning(self, hook_name: str, func: Callable):
self.add_to_hook(hook_name, func, "first")
def add_to_hook_before(self, hook_name: str, func: Callable, before_func: str):
"""Add function :code:`func` to hook with given name.
Args:
hook_name: Name of the hook
func: A function with a single argument
position: Where to insert the hook_name. Defaults to front of list.
"""
f_name = partial_or_func_name(func, True)
func = self._prepare_function(func)
self.check_func_args(func)
if hook_name in self._hooks:
self.logger.info(f"Adding {f_name} to {hook_name} before {before_func}")
names = [partial_or_func_name(x) for x in self._hooks[hook_name]]
if before_func in names:
pos = names.index(before_func)
self._hooks[hook_name].insert(pos, func)
else:
raise ValueError(f"No such func {before_func}")
def add_to_hook_after(self, hook_name: str, func: Callable, after_func: str):
"""Add function :code:`func` to hook with given name.
Args:
hook_name: Name of the hook_name
func: A function with a single argument
position: Where to insert the hook_name. Defaults to front of list.
"""
f_name = partial_or_func_name(func, True)
func = self._prepare_function(func)
self.check_func_args(func)
if hook_name in self._hooks:
self.logger.info(f"Adding {f_name} to {hook_name} after {after_func}")
names = [partial_or_func_name(x) for x in self._hooks[hook_name]]
if after_func in names:
pos = names.index(after_func) + 1
self._hooks[hook_name].insert(pos, func)
else:
raise ValueError(f"No such func {after_func}")
def remove_from_hook(self, hook_name: str, function_name: str):
"""Remove from named hook the named function.
Args:
hook_name: The name of the hook_name
function_name: The name of the function to remove
If there are multiple functions with same name, remove only the first
one from the list.
"""
hook = self._get_hook(hook_name)
if hook:
func = first(hook, lambda x: partial_or_func_name(x) == function_name)
self._hooks[hook_name].remove(func)
def remove_from_hook_at(self, hook_name: str, position: int):
"""Remove from named hook the function at position.
Args:
hook_name: The name of the hook_name
position: The position at which the function to remove
"""
hook = self._get_hook(hook_name)
if hook:
hook.pop(position)
def _get_hook(self, hook_name: str) -> Optional[List[Callable]]:
"""Get hook with give name if it exists.
Args:
hook_name: Name of the hook
"""
if hook_name in self.hooks:
return self.hooks[hook_name]
else:
return None
@property
def hooks(self) -> Dict[str, List[Callable]]:
"""Return all the hooks
"""
return self._hooks
def describe_hook(self, hook_name: str) -> List[str]:
"""Describe the hook with given name
Args:
hook_name: Name of the hook
For each function in the hook, if it's a regular function return a
string representation of a tuple of:
1. The function name
2. The function annotations
If it's a :class:`partial` function:
1. The function name
2. The function arguments
3. The keyword arguments
"""
hook = self._get_hook(hook_name)
retval = []
if hook:
for x in hook:
if isinstance(x, partial):
retval.append(f"{partial_or_func_name(x, True)}")
else:
retval.append(f"{x.__name__}")
return retval
|
[
"akshay.badola.cs@gmail.com"
] |
akshay.badola.cs@gmail.com
|
777367c186790c4562a46fd27243d541e08d8501
|
0596f538c3055c9f215c32cd364c716e716d8e47
|
/exercicios/ex029.py
|
b410b300366a19e68982e1f58f9eca2ab091c3bb
|
[
"MIT"
] |
permissive
|
RicardoAugusto-RCD/exercicios_python
|
6d2435a001966d180092374ff35a4fda232f5812
|
8a803f9cbc8b2ad0b5a6d61f0e7b6c2bc615b5ff
|
refs/heads/main
| 2023-07-10T18:15:24.837331
| 2021-08-18T12:27:00
| 2021-08-18T12:27:00
| 388,438,802
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 512
|
py
|
# Escreva um programa que leia a velocidade de um carro.
# Se ele ultrapassar 80km/h, mostre uma mensagem dizendo que ele foi multado.
# A multa vai custar R$7,00 por cada km acima do limite.
velocidade = float(input('Digite uma velocidade em km/h: '))
if velocidade <= 80:
print('Você estava dentro da velocidade correta da via!')
else:
velocidadeAcima = velocidade - 80
multa = velocidadeAcima * 7
print('Você estava a {}km/h e o valor da multa será R${:.2f}'.format(velocidade, multa))
|
[
"ricardoaugusto.rcd@gmail.com"
] |
ricardoaugusto.rcd@gmail.com
|
a17a3398d51c31dd07c12aac53b64feea4bb01a3
|
871e1e8e8e4a13d930ad25b24bdd9cd9fefa3c49
|
/accounts/views.py
|
f9e9c6428fed13fdd8754a44b995edc5383f24d9
|
[] |
no_license
|
namitgpta/Django-projects
|
6c9d7be5f8de358f7b7130716833ce0a31e6a54e
|
642438a8700809547b742e6427677fb410c67317
|
refs/heads/master
| 2023-04-16T15:57:35.024649
| 2021-05-02T17:40:38
| 2021-05-02T17:40:38
| 363,646,729
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,916
|
py
|
from django.shortcuts import render, redirect
from django.contrib import messages
from django.contrib.auth.models import User, auth
# Create your views here.
def login(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
user = auth.authenticate(username=username, password=password)
if user is not None:
auth.login(request, user)
return redirect('/')
else:
messages.info(request, 'invalid credentials')
return redirect('login')
else:
return render(request, 'login.html')
def register(request):
if request.method == 'POST':
first_name = request.POST['first_name']
last_name = request.POST['last_name']
username = request.POST['username']
password1 = request.POST['password1']
password2 = request.POST['password2']
email = request.POST['email']
if password1 == password2:
if User.objects.filter(username=username).exists():
messages.info(request, 'Username already taken')
return redirect('register')
elif User.objects.filter(email=email).exists():
messages.info(request,'Email already taken')
return redirect('register')
else:
user = User.objects.create_user(username=username, password=password1, email=email,
first_name=first_name, last_name=last_name)
user.save()
print('User Created')
return redirect('login')
else:
messages.info(request, 'password not matching....')
return redirect('register')
else:
return render(request, 'register.html')
def logout(request):
auth.logout(request)
return redirect('/')
|
[
"namitg677@gmail.com"
] |
namitg677@gmail.com
|
e7cc031b91f1dbc8e5774610579c7d6f5a97b643
|
e14b70a36e096ee24074b76253ac1334be66d903
|
/extranet/modules/oauthprovider/controllers/authorize.py
|
bf79ab060b0d595e21d02d8c7a86fecda8b7a4e4
|
[] |
no_license
|
lodi-g/extranet
|
e3df378222567f6831b4b8ee4545403bffb2462a
|
9252e13364574cbd0c6a9aa53de91a323115c776
|
refs/heads/master
| 2021-08-08T07:46:55.758088
| 2017-11-09T00:44:12
| 2017-11-09T01:16:34
| 110,174,987
| 0
| 0
| null | 2017-11-09T22:49:14
| 2017-11-09T22:49:14
| null |
UTF-8
|
Python
| false
| false
| 1,637
|
py
|
from flask import render_template, request, flash, session
from flask_login import login_required, current_user, login_fresh
from werkzeug.security import gen_salt
from extranet import usm
from extranet.modules.oauthprovider import bp
from extranet.connections.extranet import provider as extranet_provider
from extranet.connections.extranet import scopes as defined_scopes
from extranet.models.oauth import OauthApp, OauthToken
def render_authorize(*args, **kwargs):
app_id = kwargs.get('client_id')
app = OauthApp.query.filter_by(client_id=app_id).first()
kwargs['app'] = app
session['oauthprovider.snitch'] = gen_salt(32)
kwargs['snitch'] = session['oauthprovider.snitch']
kwargs['request'] = request
kwargs['defined_scopes'] = defined_scopes
return render_template('authorize.html', **kwargs)
@bp.route('/authorize', methods=['GET', 'POST'])
@login_required
@extranet_provider.authorize_handler
def authorize(*args, **kwargs):
# bypass accept/deny form if already accepted (has token)
if OauthToken.query.filter_by(user_id=current_user.id).first() is not None:
return True
# confirm login to access autorize/deny dialog
if not login_fresh():
return usm.needs_refresh()
# render accept/deny if GET request
if request.method == 'GET':
return render_authorize(*args, **kwargs)
# verify POST request legitimacy
if 'oauthprovider.snitch' not in session or session['oauthprovider.snitch'] != request.form.get('snitch'):
flash('Something went wrong, please retry.')
return render_authorize(*args, **kwargs)
confirm = request.form.get('confirm', 'no')
return confirm == 'yes'
|
[
"jordan.demaison@gmail.com"
] |
jordan.demaison@gmail.com
|
8a7df27444a69a45468f9ca79c391874c7e44342
|
f6fd01eaa74ace15ffc085065a51681565bf2740
|
/api_test/api/user.py
|
63fa52bd6a00572a8fbe028ee2e82684c1645a62
|
[] |
no_license
|
emmashen6786/api_test
|
d05c89a718b7441bb7f4d099792f873afa782056
|
51486a4c05548b410e360777c2e93d1c954bfa06
|
refs/heads/master
| 2020-05-15T03:10:40.762906
| 2019-04-19T02:25:08
| 2019-04-19T02:25:08
| 182,062,758
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,316
|
py
|
from rest_framework import parsers, renderers
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.serializers import AuthTokenSerializer
from rest_framework.views import APIView
from api_test.serializers import TokenSerializer
from api_test.common.api_response import JsonResponse
class ObtainAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
serializer_class = AuthTokenSerializer
def post(self, request, *args, **kwargs):
"""
用户登录
:param request:
:param args:
:param kwargs:
:return:
"""
serializer = self.serializer_class(data=request.data,
context={"request": request})
serializer.is_valid(raise_exception=True)
user = serializer.validated_data["user"]
# token, created = Token.objects.get_or_create(user=user)
data = TokenSerializer(Token.objects.get(user=user)).data
data["userphoto"] = '/file/userphoto.jpg'
print("tongguo le ")
return JsonResponse(data=data, code="999999", msg="成功")
obtain_auth_token = ObtainAuthToken.as_view()
|
[
"shanshan.shen@dianrong.com"
] |
shanshan.shen@dianrong.com
|
ef14a9785ed07e3532880cdcce6fc0279460ecb9
|
6bea11a8b7368124f7b50f6a0775da838532ec98
|
/Dbase/Sql/cleardb.py
|
5af10cd91ee941e95e1e2345144cf5f47d584570
|
[] |
no_license
|
wenhanglei/PP4E-practice
|
aaab31a4e602902277a6cf49a63aaed04cf57d3c
|
b46c4029addd7a67fa0f625695f09c2e7a833546
|
refs/heads/master
| 2021-01-20T06:07:19.142580
| 2017-07-22T07:17:49
| 2017-07-22T07:17:49
| 89,846,018
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 390
|
py
|
"""
清除表中的数据
"""
import sys
if input('Are you sure?').lower() not in ('y', 'yes'):
sys.exit()
dbname = sys.argv[1] if len(sys.argv) > 1 else 'dbase1'
table = sys.argv[2] if len(sys.argv) > 2 else 'people'
from loaddb import login
conn, curs = login(dbname)
curs.execute('delete from ' + table)
#print(curs.rowcount, 'records deleted')
conn.commit()
|
[
"3188744608@qq.com"
] |
3188744608@qq.com
|
93ff846d9e696fd2a04801945c98c805d84766e0
|
08bc071c11338a2a1fd05e9c8bb1505d16492a14
|
/tests/test_basic.py
|
8a891581c7a26f98819983fea640ca23af02b154
|
[
"MIT"
] |
permissive
|
flingflingfling/flasky
|
f3a33b7423a47a497ea43e1dcff84de29477c0f4
|
7117023bf69180b8eacae9dde69c621668ddf11d
|
refs/heads/master
| 2021-06-04T06:04:30.094170
| 2016-07-17T11:29:58
| 2016-07-17T11:29:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 783
|
py
|
#coding:utf8
'''
this is a unittest demo,
it is just a simply testing program
have fun
'''
import unittest
from flask import current_app
from app import create_app, db
class BasicsTestCase(unittest.TestCase):
def setUp(self): # create a new test config env and db
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_app_exists(self): # testing whether the instance exist
self.assertFalse(current_app is None)
def test_app_testing(self): # testing whether the config running
self.assertTrue(current_app.config['TESTING'])
|
[
"fly@purplemapledb.maple"
] |
fly@purplemapledb.maple
|
ab31f35f9772ff24371b4e4edd9c0ee518c224a7
|
3820d5b11619a2205426b0203f937495eea45611
|
/learnOne.py
|
68f21de50199a61a76112210598d18afb8fdde57
|
[] |
no_license
|
rwq9866/learnPython
|
1f490418ec51de2885a60b10f5d160d541712fb3
|
fbc9fc05bf47dccedfbe4b78ee130e81706f4ed2
|
refs/heads/master
| 2020-06-15T05:23:43.850012
| 2019-07-04T09:44:09
| 2019-07-04T09:44:09
| 195,214,159
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,531
|
py
|
# -*- coding: utf-8 -*-
# 输出
print("hello world!!!") # 自动换行输出
print("hello python!!!", end="这里面想写啥写啥 空格什么的 怎么喜欢怎么来 ") # 不换行输出
# 输入
demo = input("请输入您的姓名,按enter键结束: ")
print(demo, ",欢迎来到Python的世界!")
# 数字类型 不可变
# 1. 整数 1
# 2. 浮点数 1.1 1.6e9(1.6 * 10的9次方)
# 3. 布尔型 True(1) False(0)
# 4. 复数 eg: 1 + 1j
# 加减乘除算法时,除法和java是不一样的 还有一个乘方 eg: 2 ** 3 结果就为8 是不是很爽
demo = 10 / 3 # 精确除法,得到的是一个浮点数
print(demo) # 3.3333333333333335
demo = 10 // 3 # 取整
print(demo) # 3
# 字符串 不可变
# 1. 拼接时可以用','(自带一个空格) 或者用'+'(还是习惯这个) 再或者格式化(下面专门说 有点儿意思)
# 注意: 用+拼接时,如果是数字类型的需要用str()函数,eg: str(2) + "22"
demo = 'demo'
print(ord('p'), ord('y'), ord('t'), ord('h'), ord('o'), ord('n')) # 112 121 116 104 111 110 字符->编码
print(chr(112), chr(121), chr(116), chr(104), chr(111), chr(110)) # p y t h o n 编码->字符
print(len(demo)) # 4 字符串长度
print(str(True) + "555") # True555 字符串拼接
print(1, "de") # 1 de
print(demo * 2) # demodemo
print(demo[0:2]) # de 截取字符串
print(demo[1]) # e 获取指定下标的字符
print(demo[1:]) # emo
print(r'demo\nceshi') # demo\nceshi r''的作用是转义字符不发生转义
# 2. 字符串格式化(我是不常用这个,感觉很是麻烦,哪有+来得爽,只是感觉java上没有这个,拿来说说,只当了解)
# %s(字符串) %d(整数) %f(浮点数) %x(十六进制整数) 其实一个%s就成 貌似all->字符串
print('我今天(%s)心情还好,但是这个天气(%s)却不怎么滴!' % ('2019-7-4', '阴天'))
print('%03d' % 1) # 001 这个不错 那个3代表字符长度转为>=3,不够的补0(只能写0)
print('%.1f' % 6.84888888) # 6.8 f前的数代表几位小数(四舍五入)
print('我今天({0})心情还好,但是这个天气({1})却不怎么滴!'.format('2019-7-4', '阴天')) # 这个字符串的格式化方法贼麻烦
# 列表 list 这个在java开发时我是经常用 高频
# 1. 截取的方式和字符串都一样 能拼接 能用 * 复制 都一样
# 2. 有序 元素能改变
# 3. [] 表示空列表
demoList = [6, 6.6, True, 'demo', [6, 6.6, True]]
print(demoList[1:3]) # [6.6, True]
demoList.append('ceshi')
print(demoList) # 在末尾追加元素 [6, 6.6, True, 'demo', [6, 6.6, True], 'ceshi']
demoList.insert(1, "demo")
print(demoList) # 插入元素 [6, 'demo', 6.6, True, 'demo', [6, 6.6, True], 'ceshi']
demo = demoList.pop(2) # 删除指定元素并返回该元素 为空时 默认删除末尾元素
print(demo) # 6.6
demoList[0] = 8
print(demoList) # 修改指定元素
# 元组 tuple 基本和list一样 不可变
# 1. 截取的方式和字符串都一样 能拼接 能用 * 复制 都一样
# 2. 有序 元素不能改变
# 3. () 表示空元组 比较特殊的是 只有一个元素的元组应该这样表示 eg: (1,)
demoTuple = (6, 6.6, True, 'demo', [6, 6.6, True])
# 集合 set
# 1.创建方式比较特殊 可以用 demoSet = {6, 6.6, True, 'demo', [6, 6.6, True]},也可以用 demoSet = set(list)
# 2.空集合必须用set()
# 3.无序不重复
# 4.很像字典中的key
# 5.综上原因 set集合可以进行并集和交集操作
demoSet = {6, 6.66, True, 'demo'}
print(demoSet) # {'demo', True, 6, 6.66}
demoSet = set([6, 6.666, True, 'demo'])
print(demoSet) # {'demo', True, 6, 6.666}
# demoSet = set('222') # 一般不这么用 没什么意义
demoSet.add("ceshi") # 添加元素
print(demoSet) # {'demo', True, 6, 6.666, 'ceshi'}
demoSet.remove(6)
print(demoSet) # {'demo', True, 6.666, 'ceshi'}
demoSet1 = {6, 6.66, True, 'demo'}
print(demoSet & demoSet1) # 交集 {True, 'demo'}
print(demoSet | demoSet1) # 并集 {True, 'demo', 6.666, 6, 6.66, 'ceshi'}
# 字典 Dictionary 真心和java中的map很像
# 1. 空字典 {}
# 2. 判断是否有某个key值 用 (key in demoDictionary) 根据返回的True和False判断
demoDictionary = {"name":"muyou", "age":26, 'gender':'男'}
print(demoDictionary["name"]) # 根据key值获取value
print(demoDictionary.keys()) # 获取所有的key值 dict_keys(['name', 'age', 'gender'])
print(demoDictionary.values()) # 获取所有的value值 dict_values(['muyou', 26, '男'])
print(demoDictionary.pop("gender")) # 删除元素
|
[
"841557177@qq.com"
] |
841557177@qq.com
|
195a6562afd3acd4081df6c6e6f645bbeae2f17a
|
2171efa295b695a485d0125ebc91158122f46493
|
/django_intro/venv/bin/easy_install-3.8
|
a35674f13453445504009d021be0014136107cc1
|
[] |
no_license
|
bennami/django-intro
|
a4b0fa7488a7abeb687e178f56b8f4543a8c8f73
|
97deb170908823717dfe63388288e8836dd88c98
|
refs/heads/master
| 2022-11-16T12:44:37.625401
| 2020-07-07T12:33:57
| 2020-07-07T12:33:57
| 277,811,325
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 270
|
8
|
#!/home/imane/PycharmProjects/django_intro/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"benbenimane@hotmail.com"
] |
benbenimane@hotmail.com
|
c5ec95c66cfe71d29bb1d882b8fb66830103b339
|
8fb32802501815a06728c438d1485ded121e2fc8
|
/py/blender/TimerScript.py
|
8a0698dbdb5e731dd0c377ef28a2ee60ce3c8b87
|
[
"MIT"
] |
permissive
|
grimlock-/randomscripts
|
c5121d05a2dc80cffda410d9f3eb9245763c763d
|
b3ee95e0abdaf1320318d87b37f15b5ccee220f4
|
refs/heads/master
| 2022-02-28T10:47:51.617810
| 2019-08-07T05:22:02
| 2019-08-07T05:22:02
| 36,908,573
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,598
|
py
|
#This script is used to create a timer when making a video in the Blender VSE.
#When executed, the script grabs every marker on the timeline and groups them
#in pairs by name. Markers should be named in the pattern <sectionname>.Start
#and <sectionname>.End and there can be no more than two markers per section.
#Every section should have an associated text strip with the naming pattern
#<sectionname>.Timer
#WARNING: Each *.Start marker should be created before it's associated *.End
#marker. Otherwise they will appear to the script in reverse order and the
#timer for that section will not work.
import bpy
scene = bpy.data.scenes['Scene']
marks = []
st = -1
nm = ''
for marker in scene.timeline_markers:
i = marker.name.find('Start')
if i != -1:
st = marker.frame
nm = marker.name
else:
i = marker.name.find('End')
if i != -1:
nm = marker.name[:i]
marks.append((nm, st, marker.frame))
st = 0
nm = ''
else:
print('Unknown label: ' + marker.name)
for i in marks:
print(i)
def frame_step(scene):
for item in marks:
if scene.frame_current >= item[1] and scene.frame_current < item[2]:
obj = scene.sequence_editor.sequences_all[item[0] + 'Timer']
fps = scene.render.fps / scene.render.fps_base # actual framerate
cur_frame = scene.frame_current - item[1]
obj.text = '{0:.3f}'.format(cur_frame/fps)
break
bpy.app.handlers.frame_change_pre.append(frame_step)
bpy.app.handlers.render_pre.append(frame_step)
|
[
"gtm.mason@gmail.com"
] |
gtm.mason@gmail.com
|
5aae57ca5da47c5876a5d1998f846871038a0fa0
|
009cca46aed9599d633441b044987ae78b60685a
|
/scripts/submitJLabJob_sample.py
|
51c99e1ea1f8794af4f950771c0051f69c129d25
|
[] |
no_license
|
cipriangal/QweakG4DD
|
0de40f6c2693021db44916e03d8d55703aa37387
|
5c8f55be4ba0ec3a3898a40c4e1ff8eb42c550ad
|
refs/heads/master
| 2021-01-23T10:29:53.163237
| 2018-06-25T19:03:24
| 2018-06-25T19:03:24
| 29,534,503
| 0
| 4
| null | 2017-03-27T21:06:14
| 2015-01-20T14:45:37
|
C++
|
UTF-8
|
Python
| false
| false
| 5,539
|
py
|
#!/usr/bin/python
from subprocess import call
import sys,os,time
def main():
#center, x,y,z=0,335,560
_xP=0.
_yP=335.0
_zP=560.
_Px=0.#deg
_Py=0.
_beamE=1160#MeV
_email="ciprian@jlab.org"
_source="/lustre/expphy/work/hallc/qweak/ciprian/simCodeG410/QweakG4DD"
_directory="/lustre/expphy/volatile/hallc/qweak/ciprian/farmoutput/g41001p01/sample/moustaches/withShower/md3"
_tracking=2 #0=primary only | 1=prim + opt photon | 2=no optical ph and 10x faster than 3=full
_stpSize=-0.02
_nEv=100000
_nrStop=100
_nrStart=0
_pol="V"
modTrj=0 ## 0:standard G4 propagation(wght sims) 1:debug print == big NONO! 2: modifyTraj
submit=0
nDist=203
sample=1
#idRoot= _pol+'_sampled_%03dk'% (_nEv/1000)
idRoot= _pol+'_sampled'+str(nDist)+'_%03dk'% (_nEv/1000)
for nr in range(_nrStart,_nrStop): # repeat for nr jobs
_idN= idRoot+'_%05d'% (nr)
print _idN
createMacFile(_directory,_idN,_xP,_yP,_zP,_Px,_Py,_tracking,_beamE,_nEv,nr,modTrj,sample,_pol,_stpSize)
##create input files
# if sample==1:
# if _pol=="V":
# call("root -l -q -b ../rootScripts/samplePrimaryDist.C\\("+str(_nEv)+",1,"+str(nDist)+"\\)",shell=True)
# else:
# call("root -l -q -b ../rootScripts/samplePrimaryDist.C\\("+str(_nEv)+",-1,"+str(nDist)+"\\)",shell=True)
# call(["mv","positionMomentum.in",_directory+"/"+_idN+"/positionMomentum.in"])
# call(["mv","polarization.in",_directory+"/"+_idN+"/polarization.in"])
call(["cp",_source+"/build/QweakSimG4",_directory+"/"+_idN+"/QweakSimG4"])
call(["cp",_source+"/myQweakCerenkovOnly.mac",_directory+"/"+_idN+"/myQweakCerenkovOnly.mac"])
createXMLfile(_source,_directory,idRoot,_nrStart,_nrStop,_email,sample)
if submit==1:
print "submitting position sampled with id",_idN," between ",_nrStart,_nrStop
call(["jsub","-xml",_source+"/scripts/jobs/"+idRoot+".xml"])
else:
print "NOT submitting position sampled with id",_idN," between ",_nrStart,_nrStop
print "I am all done"
def createMacFile(directory,idname,
xPos,yPos,zPos,
Px,Py,tracking,
beamE,nEv,nr,modTrj,sample,pol,stpSize):
if not os.path.exists(directory+"/"+idname+"/log"):
os.makedirs(directory+"/"+idname+"/log")
f=open(directory+"/"+idname+"/myRun.mac",'w')
f.write("/control/execute myQweakCerenkovOnly.mac\n")
f.write("/PrimaryEvent/SetBeamPositionX "+str(xPos)+" cm\n")
f.write("/PrimaryEvent/SetBeamPositionY "+str(yPos)+" cm\n")
f.write("/PrimaryEvent/SetBeamPositionZ "+str(zPos)+" cm\n")
f.write("/PrimaryEvent/SetBeamDirectionX "+str(Px)+" deg\n")
f.write("/PrimaryEvent/SetBeamDirectionY "+str(Py)+" deg\n")
if sample==1:
f.write("/PrimaryEvent/SetFixedPosMom false\n")
f.write("/PrimaryEvent/SetPolarization f\n")
else:
f.write("/PrimaryEvent/SetFixedPosMom true\n")
f.write("/PrimaryEvent/SetPolarization "+str(pol)+"\n")
f.write("/PhysicsProcesses/settingFlag "+str(modTrj)+"\n")
f.write("/EventGen/SetBeamEnergy "+str(beamE)+" MeV\n")
f.write("/TrackingAction/TrackingFlag "+str(tracking)+"\n")
f.write("/EventGen/SelectOctant 3\n")
seedA=int(time.time()/2000.)+ 100*nr+nr
seedB=int(time.time()/300. ) +10000*nr+nr
f.write("/Cerenkov/SetPbStepSize "+str(stpSize)+" mm\n");
f.write("/HallC/GeometryUpdate\n");
f.write("/random/setSeeds "+str(seedA)+" "+str(seedB)+"\n")
f.write("/run/beamOn "+str(nEv)+"\n")
f.close()
return 0
def createXMLfile(source,writeDir,idRoot,nStart,nStop,email,sample):
if not os.path.exists(source+"/scripts/jobs"):
os.makedirs(source+"/scripts/jobs")
f=open(source+"/scripts/jobs/"+idRoot+".xml","w")
f.write("<Request>\n")
f.write(" <Email email=\""+email+"\" request=\"false\" job=\"true\"/>\n")
f.write(" <Project name=\"qweak\"/>\n")
# f.write(" <Track name=\"debug\"/>\n")
f.write(" <Track name=\"simulation\"/>\n")
f.write(" <Name name=\""+idRoot+"\"/>\n")
f.write(" <OS name=\"centos7\"/>\n")
f.write(" <Command><![CDATA[\n")
f.write("QweakSimG4 myRun.mac\n")
f.write(" ]]></Command>\n")
f.write(" <Memory space=\"2000\" unit=\"MB\"/>\n")
for nr in range(nStart,nStop): # repeat for nr jobs
f.write(" <Job>\n")
idName= writeDir+"/"+idRoot+'_%05d'%(nr)
f.write(" <Input src=\""+idName+"/QweakSimG4\" dest=\"QweakSimG4\"/>\n")
f.write(" <Input src=\""+idName+"/myQweakCerenkovOnly.mac\" dest=\"myQweakCerenkovOnly.mac\"/>\n")
f.write(" <Input src=\""+idName+"/myRun.mac\" dest=\"myRun.mac\"/>\n")
if sample==1:
f.write(" <Input src=\""+idName+"/positionMomentum.in\" dest=\"positionMomentum.in\"/>\n")
f.write(" <Input src=\""+idName+"/polarization.in\" dest=\"polarization.in\"/>\n")
f.write(" <Output src=\"QwSim_0.root\" dest=\""+idName+"/QwSim_0.root\"/>\n")
f.write(" <Output src=\"o_tuple.root\" dest=\""+idName+"/o_tuple.root\"/>\n")
f.write(" <Stdout dest=\""+idName+"/log/log.out\"/>\n")
f.write(" <Stderr dest=\""+idName+"/log/log.err\"/>\n")
f.write(" </Job>\n\n")
f.write("</Request>\n")
f.close()
return 0
if __name__ == '__main__':
main()
|
[
"ciprian@jlab.org"
] |
ciprian@jlab.org
|
31abc275139c5e9004c2b45bc340bbde3de00afd
|
e5d4ce431548bea004fad8db462467bbce210492
|
/accounts/urls.py
|
a7a46f5cd6ff1d7bb8d1f15d4cf6e96535790b5f
|
[] |
no_license
|
emirmaydemir/blogprojesiCepMarketim
|
6f95e307c8cb870b1601d43179420ecd53a5d1c7
|
47635e00ae72355c170deb5c5cab4c8939fafe07
|
refs/heads/master
| 2023-02-05T10:51:54.476227
| 2020-12-22T11:34:48
| 2020-12-22T11:34:48
| 323,609,292
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 331
|
py
|
from django.urls import path
from .views import *
from post.views import post_index, post_detail
from django.urls import re_path
app_name='accounts'
urlpatterns = [
re_path(r'login/$',login_view,name='login'),
re_path(r'register/$', register_view, name='register'),
re_path(r'logout/$', logout_view, name='logout'),
]
|
[
"emirmaydemir@gmail.com"
] |
emirmaydemir@gmail.com
|
b00f6542c6e1995eda6a3909f6de3a865280c095
|
16147da7fb23a54e3604bcf046b418755d92e9fe
|
/src/run_treemask.py
|
603d2c4aa870f8133b26b76e934608503c022dc4
|
[] |
no_license
|
tcc7496/veggie_code
|
28861e0274231f220b7c72fd678731590d45b8c6
|
5fb3fb0a7f6c32d3c56774183dea731601c9b972
|
refs/heads/master
| 2023-08-05T02:06:49.283707
| 2021-09-17T16:47:42
| 2021-09-17T16:47:42
| 352,734,400
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 697
|
py
|
'''
A script to extract the tree mask from the plant species map
'''
#######################################
from tree_mask import *
import click
@click.command()
@click.argument('file', type=click.Path(exists=True), help = 'full filepath to input plant species map')
@click.argument('outfile', type=click.Path(), help ='full filepath including filename of output geotiff')
@click.option('-a', '--aoi', default=None, type=click.Path(), help='optional area of interest shapefile')
def main(file, outfile, aoi):
tree_mask(file, outfile, aoi)
#######################################
if __name__ == "__main__":
''' Main block '''
main()
#######################################
|
[
"tcc78128@gmail.com"
] |
tcc78128@gmail.com
|
26eb004d0f44e7ffb5285a806f4ef704d73d851b
|
861327f7758b7ee3447584041e4918145b72d97f
|
/lmvelisa2.py
|
89404e350f0262c57a9b38306e4e10b1eec935da
|
[] |
no_license
|
tsamartino/ELISA
|
3b8df19b2fd9c82ac6bb4c25447ac04b266aebf5
|
799b409e284bdb2387d5260ce2d86981652ca9ed
|
refs/heads/master
| 2021-01-01T18:11:29.877640
| 2015-06-19T21:58:53
| 2015-06-19T21:58:53
| 37,539,860
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,440
|
py
|
import statistics
import os
source = "0633-1.txt"
#Open plate and print name
plate = open(source, 'r')
plate_name = "15-"
for char in source:
if char == ".":
break
else:
plate_name += char
print ("Plate name:", plate_name)
plate.readline()
plate.readline()
plate.readline()
#Creates dictionary sorted by row
#To access A1 - row_dict['A'][0]
row_dict = {}
i = 0
for line in plate:
if i < 8:
list_of_values = line.split('\t')
row_dict[line[0]] = list_of_values[1:13]
i += 1
plate.close()
#Creates dictionary sorted by column
#To access A1 - column_dict[1][0]
column_dict = {}
for number in range(1,13):
column_list = []
for item in sorted(row_dict):
column_list.append(row_dict[item][number-1])
column_dict[number] = row_dict[item][number-1]
column_dict[number] = [float(i) for i in column_list]
#Creates dictionary sorted by well number
#To access A1 - well_dict[1]
well_dict = {}
well_number = 1
for item in column_dict:
for x in range(0,8):
well_dict[well_number] = float(column_dict[item][x])
well_number += 1
#Establishes where controls are in the plate
#Creates list for each type of control
control_column = 9
#If no Agdia control is loaded, this value should == 0
agdia_well = 64
negative_list = column_dict[control_column][0:3]
positive_list = column_dict[control_column][3:6]
buffer_list = column_dict[control_column][6:8]
agdia_control = well_dict[agdia_well]
positive_wells = [agdia_well, (((control_column-1)*8)+4), (((control_column-1)*8)+5), (((control_column-1)*8)+6)]
#Calculates mean and standard deviation for each control type
negative_mean = statistics.mean(negative_list)
positive_mean = statistics.mean(positive_list)
buffer_mean = statistics.mean(buffer_list)
negative_stdev = statistics.stdev(negative_list)
positive_stdev = statistics.stdev(positive_list)
buffer_stdev = statistics.stdev(buffer_list)
#Calculates the cut-off value and signal:noise from the control means
cutoff = negative_mean * 3
sig_to_noise = positive_mean/negative_mean
#Print statistics for the plate
print("""
The cut-off was set at %.4f
The signal-to-noise ratio is %.2f
""" % (cutoff, sig_to_noise))
#Determines if a well is negative or positive and prints positive wells with O.D. value
sample_size = 60
count = 0
for item in well_dict:
if well_dict[item] >= cutoff:
if item in positive_wells:
continue
else:
count += 1
print ("Well",item,"is POSITIVE with O.D. value of",well_dict[item])
print ("%s out of %s sub-samples tested positive for LMV" % (count, sample_size))
#Builds list with only relevant sample data and mean of the controls
sample = [['Sub-sample', 'O.D. reading', 'Cut-off']]
for x in range(sample_size):
sample.append([str(x+1), well_dict[x+1], cutoff])
sample.append(['Agdia control', well_dict[agdia_well], cutoff])
sample.append(['Negative control mean', negative_mean, cutoff])
sample.append(['Positive control mean', positive_mean, cutoff])
sample.append(['Buffer control mean', buffer_mean, cutoff])
#Generates an html report using Google Visualization Combo Chart (bar for OD values, line for cutoff)
report = """
<html>
<head>
<script type="text/javascript" src="https://www.google.com/jsapi"></script>
<script type="text/javascript">
google.load("visualization", "1", {packages: ["corechart"]});
google.setOnLoadCallback(drawVisualization);
function drawVisualization() {
// Plate data
var dataTable = new google.visualization.DataTable();
dataTable.addColumn({ type: 'string', id: 'Sub-sample'})
dataTable.addColumn({ type: 'number', id: 'O.D. reading'})
dataTable.addColumn({ type: 'number', id: 'Cut-off'})
dataTable.addRows(%s)
var options = {
title : 'ELISA results for plate %s',
vAxis: {title: "O.D. reading"},
hAxis: {title: "Sub-sample"},
seriesType: "bars",
series: {1: {type: "line"}}
};
var chart = new google.visualization.ComboChart(document.getElementById('chart_div'));
chart.draw(dataTable, options);
}
</script>
</head>
<body>
<div id="chart_div" style="width: 900px; height: 500px;"></div>
</body>
</html>
""" % (sample[1:], plate_name)
output_filename = ".".join([source.split(".")[0], "html"])
with open(output_filename, "w") as handle:
handle.write(report)
#os.system("open %s" % output_filename)
#fuck around space
import json
sorted_wells = [['Sub-sample', 'O.D. reading', 'Cut-off']]
sorted_wells += [[str(i), well_dict[i], cutoff] for i in sorted(well_dict.keys())]
encoded_wells = json.dumps(sorted_wells)
html_container = """
<!doctype html>
<html>
<head>
<title>Report</title>
<script type="text/javascript" src="https://www.google.com/jsapi"></script>
<script>
var data = %s;
google.load('visualization', '1.0', {'packages':['corechart']});
function doneDrawingChart() {
var table = new google.visualization.DataTable();
table.addColumn('string', 'Well', 'Cutoff');
table.addColumn('number', 'OD', 'Cutoff');
table.addRows(data);
var options = {'title':'Seeds and shit',
'width':800,
'height':700};
var chart = new google.visualization.ColumnChart(document.getElementById("chart"));
chart.draw(table, options);
console.log("Finished drawing chart.", data);
};
google.setOnLoadCallback(doneDrawingChart);
</script>
</head>
<body>
Report results
<div id="chart"></div>
<script></script>
</body>
</html>
""" % (sample)
|
[
"tsamartino@gmail.com"
] |
tsamartino@gmail.com
|
78a758b50b7c3ecb4bb6e5761d61565d2eb317a5
|
2c5b25d0b5d6ba66d013251f93ebf4c642fd787b
|
/wrong_answer_codes/Contiguous_Array/Contiguous Array_324757576.py
|
1c620fdc45f25037006caf70d00f3c54a4797b19
|
[] |
no_license
|
abhinay-b/Leetcode-Submissions
|
da8099ac54b5d36ae23db42580064d0f9d9bc63b
|
d034705813f3f908f555f1d1677b827af751bf42
|
refs/heads/master
| 2022-10-15T22:09:36.328967
| 2020-06-14T15:39:17
| 2020-06-14T15:39:17
| 259,984,100
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 787
|
py
|
class Solution:
def findMaxLength(self, nums: List[int]) -> int:
count = [0]*2
start = end = 0
maxVal = 0
for idx,num in enumerate(nums):
count[num] += 1
diff = abs(count[0] - count[1])
# print(diff,start,end)
if diff > 1:
count[nums[start]] -= 1
start += 1
elif diff == 1 and start > 0 and (count[nums[start-1]] + 1 == count[1-nums[start
-1]]):
start -= 1
count[nums[start]] += 1
end = idx
maxVal = max(maxVal, end - start+1)
elif not diff:
end = idx
maxVal = max(maxVal, end - start+1)
return maxVal
|
[
"abhinayb.sssihl@gmail.com"
] |
abhinayb.sssihl@gmail.com
|
7975a24774fc1e6eb1d6b45c1bbbc4badcec3213
|
8d6bde798a6103fe43c4ba4e231d4f90411d68f0
|
/day-34/quizzler-app/ui.py
|
bd77df45aa224b737faf1a7fcfa8c5428ac36f9a
|
[] |
no_license
|
lhserafim/python-100-days-of-code-monorepo
|
66dd3545e264224af84a1d3b7694681c44e391f1
|
de03d0979bb4c1c770cd79b5f01dda72ae2bb375
|
refs/heads/master
| 2023-05-30T10:28:06.206198
| 2021-06-14T14:56:33
| 2021-06-14T14:56:33
| 363,091,910
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,570
|
py
|
from tkinter import *
from quiz_brain import QuizBrain
THEME_COLOR = "#375362"
class QuizInterface:
# DICA. Estou atribuindo um tipo para o meu parâmetro. Isto é opcional mas ajuda no autocomplete.
# Isto é similar ao TypeScript
def __init__(self, quiz_brain: QuizBrain):
self.quiz = quiz_brain
self.window = Tk()
self.window.title("Quizzler")
self.window.config(padx=20, pady=20, bg=THEME_COLOR)
self.score_label = Label(text="Score: 0", fg="white", bg=THEME_COLOR)
self.score_label.grid(column=1, row=0)
self.canvas = Canvas(width=300, height=250, bg="white", highlightthickness=0)
self.question_text = self.canvas.create_text(150, 125,
text="Amazon acquired Twitch in August 2014 for",
fill=THEME_COLOR,
width=280, # width faz a quebra do texto
font=("Arial", 20, "italic"))
self.canvas.grid(column=0, row=1, columnspan=2, pady=50)
self.img_true = PhotoImage(file="images/true.png")
self.img_false = PhotoImage(file="images/false.png")
self.button_true = Button(image=self.img_true, command=self.true_pressed)
self.button_true.grid(column=0, row=2)
self.button_false = Button(image=self.img_false, command=self.false_pressed)
self.button_false.grid(column=1, row=2)
self.get_next_question()
self.window.mainloop()
def get_next_question(self):
self.canvas.config(bg="white")
if self.quiz.still_has_questions():
self.score_label.config(text=f"Score: {self.quiz.score}")
q_text = self.quiz.next_question()
self.canvas.itemconfig(self.question_text, text=q_text)
else:
self.canvas.itemconfig(self.question_text, text="End of the Game")
self.button_true.config(state="disabled")
self.button_false.config(state="disabled")
def true_pressed(self):
self.give_feedback(self.quiz.check_answer("True"))
def false_pressed(self):
self.give_feedback(self.quiz.check_answer("False"))
def give_feedback(self, is_right):
if is_right:
self.canvas.config(bg="green")
else:
self.canvas.config(bg="red")
# No caso de tkinter, não posso usar o timer, por causa do mainloop()
self.window.after(1000, self.get_next_question)
|
[
"a29061@somoseducacao.com.br"
] |
a29061@somoseducacao.com.br
|
13f14dc7b1dd99b30978cce555abb5bec03d63be
|
e98064e3b51cbfdef409bca9642bea32d772bc77
|
/hw2/2.1/src/plot.py
|
d2f2fc35bc953624d39c9e51cde21463209c2669
|
[] |
no_license
|
KUAN-HSUN-LI/SDML
|
b59b26ec1f71689d0bb16d6c34c05b59f3fc3004
|
be84492fed6e4712d9a86dc1cef481954aefe22f
|
refs/heads/master
| 2021-06-23T07:56:34.050666
| 2019-11-18T05:50:29
| 2019-11-18T05:50:29
| 210,079,111
| 1
| 0
| null | 2021-04-06T01:33:18
| 2019-09-22T02:25:29
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 704
|
py
|
import matplotlib.pyplot as plt
%matplotlib inline
with open('model/history.json', 'r') as f:
history = json.loads(f.read())
train_loss = [l['loss'] for l in history['train']]
valid_loss = [l['loss'] for l in history['valid']]
train_f1 = [l['acc'] for l in history['train']]
valid_f1 = [l['acc'] for l in history['valid']]
plt.figure(figsize=(7, 5))
plt.title('Loss')
plt.plot(train_loss, label='train')
plt.plot(valid_loss, label='valid')
plt.legend()
plt.show()
plt.figure(figsize=(7, 5))
plt.title('F1 Score')
plt.plot(train_f1, label='train')
plt.plot(valid_f1, label='valid')
plt.legend()
plt.show()
print('Best F1 score ', max([[l['acc'], idx] for idx, l in enumerate(history['valid'])]))
|
[
"b06209027.ntu.edu.tw"
] |
b06209027.ntu.edu.tw
|
5426dbbd4f55d07a5a4548122f515314529345e8
|
9a8fd017aa2ca45d423d09434a6574eb7d1fe734
|
/Natural_Language_Processing_544/Finite State Machine/french_count.py
|
cbb8eed0b6f051405f669c2ba1777bba8ffb7a9f
|
[] |
no_license
|
mzy-ray/AI_Programming
|
d29395f33c86f28afb366f3bb72b1966ef3d3a7f
|
9069e2e635815142f0379424f492e52cc370ab7e
|
refs/heads/master
| 2020-03-28T04:44:14.845900
| 2018-10-02T06:53:20
| 2018-10-02T06:53:20
| 147,734,010
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,553
|
py
|
import sys
from fst import FST
from fsmutils import composewords
kFRENCH_TRANS = {0: "zero", 1: "un", 2: "deux", 3: "trois", 4:
"quatre", 5: "cinq", 6: "six", 7: "sept", 8: "huit",
9: "neuf", 10: "dix", 11: "onze", 12: "douze", 13:
"treize", 14: "quatorze", 15: "quinze", 16: "seize",
20: "vingt", 30: "trente", 40: "quarante", 50:
"cinquante", 60: "soixante", 100: "cent"}
kFRENCH_AND = 'et'
def prepare_input(integer):
assert isinstance(integer, int) and integer < 1000 and integer >= 0, \
"Integer out of bounds"
if (integer >= 100): num = str(integer)
elif (integer >= 10): num = "0" + str(integer)
else: num = "00" + str(integer)
return list(num)
def french_count():
f = FST('french')
f.add_state('hundreds')
f.add_state('tens')
f.add_state('units')
f.add_state('tens-only')
f.add_state('units-only')
f.add_state('10s')
f.add_state('20s-60s')
f.add_state('70s')
f.add_state('80s')
f.add_state('90s')
f.add_state('end')
f.initial_state = 'hundreds'
f.set_final('end')
f.add_arc('hundreds', 'tens', [str(1)], [kFRENCH_TRANS[100]])
f.add_arc('hundreds', 'tens-only', [str(0)], [])
for ii in xrange(2, 10):
f.add_arc('hundreds', 'tens', [str(ii)], [kFRENCH_TRANS[ii] + " " + kFRENCH_TRANS[100]])
for ii in xrange(0, 10):
if ii == 0:
f.add_arc('tens', 'units', [str(0)], [])
f.add_arc('tens-only', 'units-only', [str(0)], [])
elif ii == 1:
f.add_arc('tens', '10s', [str(ii)], [])
f.add_arc('tens-only', '10s', [str(ii)], [])
elif ii <= 6:
f.add_arc('tens', '20s-60s', [str(ii)], [kFRENCH_TRANS[ii * 10]])
f.add_arc('tens-only', '20s-60s', [str(ii)], [kFRENCH_TRANS[ii * 10]])
elif ii == 7:
f.add_arc('tens', '70s', [str(ii)], [kFRENCH_TRANS[60]])
f.add_arc('tens-only', '70s', [str(ii)], [kFRENCH_TRANS[60]])
elif ii == 8:
f.add_arc('tens', '80s', [str(ii)], [kFRENCH_TRANS[4] + " " + kFRENCH_TRANS[20]])
f.add_arc('tens-only', '80s', [str(ii)], [kFRENCH_TRANS[4] + " " + kFRENCH_TRANS[20]])
else:
f.add_arc('tens', '90s', [str(ii)], [kFRENCH_TRANS[4] + " " + kFRENCH_TRANS[20]])
f.add_arc('tens-only', '90s', [str(ii)], [kFRENCH_TRANS[4] + " " + kFRENCH_TRANS[20]])
for ii in xrange(0, 10):
if ii == 0:
f.add_arc('10s', 'end', [str(ii)], [kFRENCH_TRANS[10]])
elif ii <= 6:
f.add_arc('10s', 'end', [str(ii)], [kFRENCH_TRANS[ii + 10]])
else:
f.add_arc('10s', 'end', [str(ii)], [kFRENCH_TRANS[10] + " " + kFRENCH_TRANS[ii]])
for ii in xrange(0, 10):
if ii == 0:
f.add_arc('20s-60s', 'end', [str(ii)], [])
elif ii == 1:
f.add_arc('20s-60s', 'end', [str(ii)], [kFRENCH_AND + " " + kFRENCH_TRANS[1]])
else:
f.add_arc('20s-60s', 'end', [str(ii)], [kFRENCH_TRANS[ii]])
for ii in xrange(0, 10):
if ii == 0:
f.add_arc('70s', 'end', [str(ii)], [kFRENCH_TRANS[10]])
elif ii == 1:
f.add_arc('70s', 'end', [str(ii)], [kFRENCH_AND + " " + kFRENCH_TRANS[11]])
elif ii <= 6:
f.add_arc('70s', 'end', [str(ii)], [kFRENCH_TRANS[ii + 10]])
else:
f.add_arc('70s', 'end', [str(ii)], [kFRENCH_TRANS[10] + " " + kFRENCH_TRANS[ii]])
for ii in xrange(0, 10):
if ii == 0:
f.add_arc('80s', 'end', [str(ii)], [])
else:
f.add_arc('80s', 'end', [str(ii)], [kFRENCH_TRANS[ii]])
for ii in xrange(0, 10):
if ii == 0:
f.add_arc('90s', 'end', [str(ii)], [kFRENCH_TRANS[10]])
elif ii <= 6:
f.add_arc('90s', 'end', [str(ii)], [kFRENCH_TRANS[ii + 10]])
else:
f.add_arc('90s', 'end', [str(ii)], [kFRENCH_TRANS[10] + " " + kFRENCH_TRANS[ii]])
for ii in xrange(0, 10):
if ii == 0:
f.add_arc('units', 'end', [str(ii)], [])
else:
f.add_arc('units', 'end', [str(ii)], [kFRENCH_TRANS[ii]])
for ii in xrange(0, 10):
f.add_arc('units-only', 'end', [str(ii)], [kFRENCH_TRANS[ii]])
return f
if __name__ == '__main__':
string_input = raw_input()
user_input = int(string_input)
f = french_count()
if string_input:
print user_input, '-->',
print " ".join(f.transduce(prepare_input(user_input)))
|
[
"mzy.ray@gmail.com"
] |
mzy.ray@gmail.com
|
3169f03ad1a82380f124de333e6a15857ecf1ae8
|
4fc21c3f8dca563ce8fe0975b5d60f68d882768d
|
/GodwillOnyewuchi/Phase 1/Python Basic 2/day 12 task/task10.py
|
a4924e40fbc8159a266fbfd0579729acab934db6
|
[
"MIT"
] |
permissive
|
Uche-Clare/python-challenge-solutions
|
17e53dbedbff2f33e242cf8011696b3059cd96e9
|
49ede6204ee0a82d5507a19fbc7590a1ae10f058
|
refs/heads/master
| 2022-11-13T15:06:52.846937
| 2020-07-10T20:59:37
| 2020-07-10T20:59:37
| 266,404,840
| 1
| 0
|
MIT
| 2020-05-23T19:24:56
| 2020-05-23T19:24:55
| null |
UTF-8
|
Python
| false
| false
| 290
|
py
|
# Python program to get numbers divisible by fifteen from a list using an anonymous function
def divisibleby15(lists):
newList = []
for i in lists:
if i % 15 == 0:
newList.append(i)
return newList
print(divisibleby15([23, 56, 12, 15, 45, 23, 70, 678, 90]))
|
[
"godwillonyewuchii@gmail.com"
] |
godwillonyewuchii@gmail.com
|
a4bcbc3ea13c6d7161096668057371a82bc97ec8
|
e7ea544475ebfa70ebdf5d5949bde9e23edc60ba
|
/gbp/scripts/common/buildpackage.py
|
e1edfb29587dfad1895660c095e2fe13141cba7b
|
[] |
no_license
|
dcoshea/git-buildpackage
|
80cb7d890222488663a09e3d790fc5e985f791b9
|
f4aa76bfcda1ded4649cd071b123ef8d7bf2344d
|
refs/heads/master
| 2020-05-26T21:05:37.574986
| 2017-02-19T13:17:11
| 2017-02-19T13:17:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,061
|
py
|
# vim: set fileencoding=utf-8 :
#
# (C) 2006-2011, 2016 Guido Guenther <agx@sigxcpu.org>
# (C) 2012 Intel Corporation <markus.lehtonen@linux.intel.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, please see
# <http://www.gnu.org/licenses/>
#
"""Common functionality for Debian and RPM buildpackage scripts"""
import os
import os.path
import pipes
import tempfile
import shutil
from gbp.command_wrappers import (CatenateTarArchive, CatenateZipArchive)
from gbp.git import GitRepositoryError
from gbp.errors import GbpError
import gbp.log
# when we want to reference the index in a treeish context we call it:
index_name = "INDEX"
# when we want to reference the working copy in treeish context we call it:
wc_name = "WC"
def sanitize_prefix(prefix):
"""
Sanitize the prefix used for generating source archives
>>> sanitize_prefix('')
'/'
>>> sanitize_prefix('foo/')
'foo/'
>>> sanitize_prefix('/foo/bar')
'foo/bar/'
"""
if prefix:
return prefix.strip('/') + '/'
return '/'
def git_archive_submodules(repo, treeish, output, prefix, comp_type, comp_level,
comp_opts, format='tar'):
"""
Create a source tree archive with submodules.
Concatenates the archives generated by git-archive into one and compresses
the end result.
Exception handling is left to the caller.
"""
prefix = sanitize_prefix(prefix)
tempdir = tempfile.mkdtemp()
main_archive = os.path.join(tempdir, "main.%s" % format)
submodule_archive = os.path.join(tempdir, "submodule.%s" % format)
try:
# generate main (tmp) archive
repo.archive(format=format, prefix=prefix,
output=main_archive, treeish=treeish)
# generate each submodule's archive and append it to the main archive
for (subdir, commit) in repo.get_submodules(treeish):
tarpath = [subdir, subdir[2:]][subdir.startswith("./")]
gbp.log.debug("Processing submodule %s (%s)" % (subdir, commit[0:8]))
repo.archive(format=format, prefix='%s%s/' % (prefix, tarpath),
output=submodule_archive, treeish=commit, cwd=subdir)
if format == 'tar':
CatenateTarArchive(main_archive)(submodule_archive)
elif format == 'zip':
CatenateZipArchive(main_archive)(submodule_archive)
# compress the output
if comp_type:
# Redirect through stdout directly to the correct output file in
# order to avoid determining the output filename of the compressor
try:
comp_level_opt = '-%d' % comp_level if comp_level is not None else ''
except TypeError:
raise GbpError("Invalid compression level '%s'" % comp_level)
ret = os.system("%s --stdout %s %s %s > %s" %
(comp_type, comp_level_opt, comp_opts, main_archive,
output))
if ret:
raise GbpError("Error creating %s: %d" % (output, ret))
else:
shutil.move(main_archive, output)
finally:
shutil.rmtree(tempdir)
def git_archive_single(treeish, output, prefix, comp_type, comp_level, comp_opts, format='tar'):
"""
Create an archive without submodules
Exception handling is left to the caller.
"""
prefix = sanitize_prefix(prefix)
pipe = pipes.Template()
pipe.prepend("git archive --format=%s --prefix=%s %s" % (format, prefix, treeish), '.-')
try:
comp_level_opt = '-%d' % comp_level if comp_level is not None else ''
except TypeError:
raise GbpError("Invalid compression level '%s'" % comp_level)
if comp_type:
pipe.append('%s -c %s %s' % (comp_type, comp_level_opt, comp_opts), '--')
ret = pipe.copy('', output)
if ret:
raise GbpError("Error creating %s: %d" % (output, ret))
# Functions to handle export-dir
def dump_tree(repo, export_dir, treeish, with_submodules, recursive=True):
"dump a tree to output_dir"
output_dir = os.path.dirname(export_dir)
prefix = sanitize_prefix(os.path.basename(export_dir))
if recursive:
paths = []
else:
paths = ["'%s'" % nam for _mod, typ, _sha, nam in
repo.list_tree(treeish) if typ == 'blob']
pipe = pipes.Template()
pipe.prepend('git archive --format=tar --prefix=%s %s -- %s' %
(prefix, treeish, ' '.join(paths)), '.-')
pipe.append('tar -C %s -xf -' % output_dir, '-.')
top = os.path.abspath(os.path.curdir)
try:
ret = pipe.copy('', '')
if ret:
raise GbpError("Error in dump_tree archive pipe")
if recursive and with_submodules:
if repo.has_submodules():
repo.update_submodules()
for (subdir, commit) in repo.get_submodules(treeish):
gbp.log.info("Processing submodule %s (%s)" % (subdir, commit[0:8]))
tarpath = [subdir, subdir[2:]][subdir.startswith("./")]
os.chdir(subdir)
pipe = pipes.Template()
pipe.prepend('git archive --format=tar --prefix=%s%s/ %s' %
(prefix, tarpath, commit), '.-')
pipe.append('tar -C %s -xf -' % output_dir, '-.')
ret = pipe.copy('', '')
os.chdir(top)
if ret:
raise GbpError("Error in dump_tree archive pipe in submodule %s" % subdir)
except OSError as err:
gbp.log.err("Error dumping tree to %s: %s" % (output_dir, err[0]))
return False
except (GitRepositoryError, GbpError) as err:
gbp.log.err(err)
return False
except Exception as e:
gbp.log.err("Error dumping tree to %s: %s" % (output_dir, e))
return False
finally:
os.chdir(top)
return True
def wc_index(repo):
"""Get path of the temporary index file used for exporting working copy"""
return os.path.join(repo.git_dir, "gbp_index")
def write_wc(repo, force=True):
"""write out the current working copy as a treeish object"""
index_file = wc_index(repo)
repo.add_files(repo.path, force=force, index_file=index_file)
tree = repo.write_tree(index_file=index_file)
return tree
def drop_index(repo):
"""drop our custom index"""
index_file = wc_index(repo)
if os.path.exists(index_file):
os.unlink(index_file)
|
[
"agx@sigxcpu.org"
] |
agx@sigxcpu.org
|
5dfe38fc03c0375b3b51d023a6dd2aa1cca6b25d
|
ac42f1d918bdbd229968cea0954ed75250acd55c
|
/admin/dashboard/openstack_dashboard/dashboards/physical/hosts/compute/tests.py
|
47aa906803025be9db313abb19823b19ec492fcc
|
[
"Apache-2.0"
] |
permissive
|
naanal/product
|
016e18fd2f35608a0d8b8e5d2f75b653bac7111a
|
bbaa4cd60d4f2cdda6ce4ba3d36312c1757deac7
|
refs/heads/master
| 2020-04-03T22:40:48.712243
| 2016-11-15T11:22:00
| 2016-11-15T11:22:00
| 57,004,514
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,056
|
py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox3.mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
class EvacuateHostViewTest(test.BaseAdminViewTests):
@test.create_stubs({api.nova: ('hypervisor_list',
'hypervisor_stats',
'service_list')})
def test_index(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
services = [service for service in self.services.list()
if service.binary == 'nova-compute']
api.nova.service_list(IsA(http.HttpRequest),
binary='nova-compute').AndReturn(services)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:evacuate_host',
args=[hypervisor])
res = self.client.get(url)
self.assertTemplateUsed(res,
'physical/hosts/compute/evacuate_host.html')
@test.create_stubs({api.nova: ('hypervisor_list',
'hypervisor_stats',
'service_list',
'evacuate_host')})
def test_successful_post(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
services = [service for service in self.services.list()
if service.binary == 'nova-compute']
api.nova.service_list(IsA(http.HttpRequest),
binary='nova-compute').AndReturn(services)
api.nova.evacuate_host(IsA(http.HttpRequest),
services[1].host,
services[0].host,
False).AndReturn(True)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:evacuate_host',
args=[hypervisor])
form_data = {'current_host': services[1].host,
'target_host': services[0].host,
'on_shared_storage': False}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertNoFormErrors(res)
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(res, dest_url)
@test.create_stubs({api.nova: ('hypervisor_list',
'hypervisor_stats',
'service_list',
'evacuate_host')})
def test_failing_nova_call_post(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
services = [service for service in self.services.list()
if service.binary == 'nova-compute']
api.nova.service_list(IsA(http.HttpRequest),
binary='nova-compute').AndReturn(services)
api.nova.evacuate_host(IsA(http.HttpRequest),
services[1].host,
services[0].host,
False).AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:evacuate_host',
args=[hypervisor])
form_data = {'current_host': services[1].host,
'target_host': services[0].host,
'on_shared_storage': False}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertMessageCount(error=1)
self.assertRedirectsNoFollow(res, dest_url)
class MigrateHostViewTest(test.BaseAdminViewTests):
def test_index(self):
disabled_services = [service for service in self.services.list()
if service.binary == 'nova-compute'
and service.status == 'disabled']
disabled_service = disabled_services[0]
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:migrate_host',
args=[disabled_service.host])
res = self.client.get(url)
self.assertNoMessages()
self.assertTemplateUsed(res,
'physical/hosts/compute/migrate_host.html')
@test.create_stubs({api.nova: ('migrate_host',)})
def test_maintenance_host_cold_migration_succeed(self):
disabled_services = [service for service in self.services.list()
if service.binary == 'nova-compute'
and service.status == 'disabled']
disabled_service = disabled_services[0]
api.nova.migrate_host(
IsA(http.HttpRequest),
disabled_service.host,
live_migrate=False,
disk_over_commit=False,
block_migration=False
).AndReturn(True)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:migrate_host',
args=[disabled_service.host])
form_data = {'current_host': disabled_service.host,
'migrate_type': 'cold_migrate',
'disk_over_commit': False,
'block_migration': False}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertNoFormErrors(res)
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(res, dest_url)
@test.create_stubs({api.nova: ('migrate_host',)})
def test_maintenance_host_live_migration_succeed(self):
disabled_services = [service for service in self.services.list()
if service.binary == 'nova-compute'
and service.status == 'disabled']
disabled_service = disabled_services[0]
api.nova.migrate_host(
IsA(http.HttpRequest),
disabled_service.host,
live_migrate=True,
disk_over_commit=False,
block_migration=True
).AndReturn(True)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:migrate_host',
args=[disabled_service.host])
form_data = {'current_host': disabled_service.host,
'migrate_type': 'live_migrate',
'disk_over_commit': False,
'block_migration': True}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertNoFormErrors(res)
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(res, dest_url)
@test.create_stubs({api.nova: ('migrate_host',)})
def test_maintenance_host_migration_fails(self):
disabled_services = [service for service in self.services.list()
if service.binary == 'nova-compute'
and service.status == 'disabled']
disabled_service = disabled_services[0]
api.nova.migrate_host(
IsA(http.HttpRequest),
disabled_service.host,
live_migrate=True,
disk_over_commit=False,
block_migration=True
).AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:migrate_host',
args=[disabled_service.host])
form_data = {'current_host': disabled_service.host,
'migrate_type': 'live_migrate',
'disk_over_commit': False,
'block_migration': True}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertMessageCount(error=1)
self.assertRedirectsNoFollow(res, dest_url)
class DisableServiceViewTest(test.BaseAdminViewTests):
@test.create_stubs({api.nova: ('hypervisor_list',
'hypervisor_stats')})
def test_index(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:disable_service',
args=[hypervisor])
res = self.client.get(url)
template = 'physical/hosts/compute/disable_service.html'
self.assertTemplateUsed(res, template)
@test.create_stubs({api.nova: ('hypervisor_list',
'hypervisor_stats',
'service_disable')})
def test_successful_post(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
services = [service for service in self.services.list()
if service.binary == 'nova-compute']
api.nova.service_disable(IsA(http.HttpRequest),
services[0].host,
'nova-compute',
reason='test disable').AndReturn(True)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:disable_service',
args=[hypervisor])
form_data = {'host': services[0].host,
'reason': 'test disable'}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertNoFormErrors(res)
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(res, dest_url)
@test.create_stubs({api.nova: ('hypervisor_list',
'hypervisor_stats',
'service_disable')})
def test_failing_nova_call_post(self):
hypervisor = self.hypervisors.list().pop().hypervisor_hostname
services = [service for service in self.services.list()
if service.binary == 'nova-compute']
api.nova.service_disable(
IsA(http.HttpRequest), services[0].host, 'nova-compute',
reason='test disable').AndRaise(self.exceptions.nova)
self.mox.ReplayAll()
url = reverse('horizon:physical:hosts:compute:disable_service',
args=[hypervisor])
form_data = {'host': services[0].host,
'reason': 'test disable'}
res = self.client.post(url, form_data)
dest_url = reverse('horizon:physical:hosts:index')
self.assertMessageCount(error=1)
self.assertRedirectsNoFollow(res, dest_url)
|
[
"rajagopalx@gmail.com"
] |
rajagopalx@gmail.com
|
52b9f542a153e979406b22d48c38a86711d3273c
|
ee8ec2e94586e676440c85da0721c4ba7dcfc5bf
|
/fc_community/fcuser/views.py
|
e979ceb39a951fa5a594f6408e6afbeb381107cf
|
[] |
no_license
|
simbyungki/fc_django
|
acc97c48db082fd2d5a52833fa704bad241667dd
|
99332b0bfa2ec4d8bbf7e0c153926dbe07b4ae46
|
refs/heads/master
| 2023-01-02T15:47:42.352510
| 2020-11-02T10:02:46
| 2020-11-02T10:02:46
| 307,319,339
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,582
|
py
|
from django.http import HttpResponse
from django.shortcuts import render, redirect
from django.contrib.auth.hashers import make_password, check_password
from .models import Fcuser
from .forms import LoginForm
def index(request) :
res_data = {}
user_id = request.session.get('user')
if user_id :
fcuser = Fcuser.objects.get(pk=user_id)
res_data['user_id'] = fcuser
return render(request, 'fcuser/index.html', res_data)
def logout(request) :
if request.session.get('user') :
del(request.session['user'])
return redirect('/')
def login(request) :
if request.method == 'POST' :
form = LoginForm(request.POST)
if form.is_valid() :
request.session['user'] = form.user_id
return redirect('/')
else :
form = LoginForm()
return render(request, 'fcuser/login.html', {'form': form})
def register(request) :
if request.method == 'GET' :
return render(request, 'fcuser/register.html')
elif request.method == 'POST' :
username = request.POST.get('username', None)
useremail = request.POST.get('useremail', None)
password = request.POST.get('password', None)
re_password = request.POST.get('re-password', None)
res_data = {}
if not (username and useremail and password and re_password) :
res_data['error'] = '모든 값을 입력해주세요.'
elif password != re_password :
res_data['error'] = '비밀번호가 다릅니다.'
else :
fcuser = Fcuser(
username = username,
useremail = useremail,
password = make_password(password)
)
fcuser.save()
return render(request, 'fcuser/register.html', res_data)
|
[
"dung8524@gmail.com"
] |
dung8524@gmail.com
|
d4904493099383546f82db50de128bdb52234ea6
|
6e6e27192e3fe888af95a0f11bec5aebe06a74d8
|
/ArrayAndString/L209_minimum-size-subarray-sum.py
|
c657e576fd24315abd9ce7f7772c46aa5390fdef
|
[] |
no_license
|
lihujun101/LeetCode
|
af51a7eeec4b64f9d04b439285e2d0444c7fdf7e
|
96e847591aa6ea7ea285dbcfc1c9bcfc32026de5
|
refs/heads/master
| 2021-07-22T11:00:59.387428
| 2019-01-01T12:40:43
| 2019-01-01T12:40:43
| 146,878,231
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 875
|
py
|
class Solution:
def minSubArrayLen(self, s, nums):
"""
:type s: int
:type nums: List[int]
:rtype: int
"""
# O(n)的方法:
# 1、sum和s对比,sum滚动计算,向右加,向左减
if len(nums) == 0:
return 0
i, j = 0, 0
sum, count, min_count = 0, 0, float("inf")
while True:
if sum < s:
if j == len(nums):
break
sum += nums[j]
j += 1
elif sum >= s:
count = j - i
if min_count > count:
min_count = count
sum -= nums[i]
i += 1
return 0 if min_count == float("inf") else min_count
if __name__ == '__main__':
s = Solution()
print(s.minSubArrayLen(s=11, nums=[1,2,3,4,5]))
|
[
"lihujun101@163.com"
] |
lihujun101@163.com
|
c4b2fcaa8f6499cdca69575ead3662b305b1ccd5
|
4ed33dba672aa6aaef42698ef8437c872b078d37
|
/backend/home/migrations/0001_load_initial_data.py
|
e78b5b69ad3761f691200103468335142fc62434
|
[] |
no_license
|
crowdbotics-apps/flat-heart-27928
|
aecb93c66e39e94e01cef7fe9506effe994cde18
|
ce209de8910b1e9f006814b58a05aed1eeada32d
|
refs/heads/master
| 2023-05-26T14:51:41.045373
| 2021-06-11T20:01:34
| 2021-06-11T20:01:34
| 376,130,678
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 538
|
py
|
from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "flat-heart-27928.botics.co"
site_params = {
"name": "Flat Heart",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
edcb724454b921fe8dc091a316470e10f89459df
|
6cea6b8cfeef78b433e296c38ef11f4637609f20
|
/src/collectors/ipmisensor/test/testipmisensor.py
|
66a79164c5d9b0f45141583e0676c31a4b5b8902
|
[
"MIT"
] |
permissive
|
philipcristiano/Diamond
|
b659d577ec054c06ab99308d6c2ba3163de84e1a
|
577270ea820af597458aa5d3325367608cd37845
|
refs/heads/master
| 2021-01-18T10:04:59.057835
| 2012-08-02T04:08:02
| 2012-08-02T04:08:02
| 3,140,864
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,392
|
py
|
#!/usr/bin/python
################################################################################
from test import *
from diamond.collector import Collector
from ipmisensor import IPMISensorCollector
################################################################################
class TestIPMISensorCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('IPMISensorCollector', {
'interval': 10,
'bin' : 'true',
'use_sudo' : False
})
self.collector = IPMISensorCollector(config, None)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
with patch('subprocess.Popen.communicate', Mock(return_value =
( self.getFixture('ipmitool.out').getvalue() , '')
)):
self.collector.collect()
self.assertPublishedMany(publish_mock, {
'System.Temp' : 32.000000,
'CPU1.Vcore' : 1.080000,
'CPU2.Vcore' : 1.000000,
'CPU1.VTT' : 1.120000,
'CPU2.VTT' : 1.176000,
'CPU1.DIMM' : 1.512000,
'CPU2.DIMM' : 1.512000,
'+1_5V' : 1.512000,
'+1_8V' : 1.824000,
'+5V' : 4.992000,
'+12V' : 12.031000,
'+1_1V' : 1.112000,
'+3_3V' : 3.288000,
'+3_3VSB' : 3.240000,
'VBAT' : 3.240000,
'Fan1' : 4185.000000,
'Fan2' : 4185.000000,
'Fan3' : 4185.000000,
'Fan7' : 3915.000000,
'Fan8' : 3915.000000,
'Intrusion' : 0.000000,
'PS.Status' : 0.000000,
'P1-DIMM1A.Temp' : 41.000000,
'P1-DIMM1B.Temp' : 39.000000,
'P1-DIMM2A.Temp' : 38.000000,
'P1-DIMM2B.Temp' : 40.000000,
'P1-DIMM3A.Temp' : 37.000000,
'P1-DIMM3B.Temp' : 38.000000,
'P2-DIMM1A.Temp' : 39.000000,
'P2-DIMM1B.Temp' : 38.000000,
'P2-DIMM2A.Temp' : 39.000000,
'P2-DIMM2B.Temp' : 39.000000,
'P2-DIMM3A.Temp' : 39.000000,
'P2-DIMM3B.Temp' : 40.000000,
})
################################################################################
if __name__ == "__main__":
unittest.main()
|
[
"kormoc@gmail.com"
] |
kormoc@gmail.com
|
ce6667dc95fdefc8be193b41ae44902d4600a89a
|
7a9c01f7029e74c697100e244d26c72d0e283d47
|
/models/amenity.py
|
9adbf8d9f5418e8b43eeb584cccd1acbde12617c
|
[] |
no_license
|
toyugo/holbertonschool-AirBnB_clone
|
63321296ecee98b1a0cda39c7b155cc2ea5ececb
|
5edaeafb6516130f2027b505fe8b168f6f9de174
|
refs/heads/main
| 2023-03-21T06:32:18.728878
| 2021-03-04T13:08:56
| 2021-03-04T13:08:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 168
|
py
|
#!/usr/bin/python3
""" Module Amenity """
from models.base_model import BaseModel
class Amenity(BaseModel):
""" Class Amenity base en BaseModel """
name = ""
|
[
"you@example.com"
] |
you@example.com
|
a46afda8041485109144a60243600a990bd2b7d1
|
c0d5b7f8e48a26c6ddc63c76c43ab5b397c00028
|
/tests/columns/test_array.py
|
731e15ff8b962d66534e989094fe5f8cbef23a93
|
[
"MIT"
] |
permissive
|
aminalaee/piccolo
|
f6c5e5e1c128568f7ccb9ad1dfb4746acedae262
|
af8d2d45294dcd84f4f9b6028752aa45b699ec15
|
refs/heads/master
| 2023-07-14T09:44:04.160116
| 2021-07-11T22:56:27
| 2021-07-11T22:56:27
| 386,398,401
| 0
| 0
|
MIT
| 2021-07-15T19:32:50
| 2021-07-15T19:08:17
| null |
UTF-8
|
Python
| false
| false
| 2,199
|
py
|
from unittest import TestCase
from piccolo.table import Table
from piccolo.columns.column_types import Array, Integer
from tests.base import postgres_only
class MyTable(Table):
value = Array(base_column=Integer())
class TestArrayPostgres(TestCase):
"""
Make sure an Array column can be created.
"""
def setUp(self):
MyTable.create_table().run_sync()
def tearDown(self):
MyTable.alter().drop_table().run_sync()
def test_storage(self):
"""
Make sure data can be stored and retrieved.
"""
MyTable(value=[1, 2, 3]).save().run_sync()
row = MyTable.objects().first().run_sync()
self.assertEqual(row.value, [1, 2, 3])
@postgres_only
def test_index(self):
"""
Indexes should allow individual array elements to be queried.
"""
MyTable(value=[1, 2, 3]).save().run_sync()
self.assertEqual(
MyTable.select(MyTable.value[0]).first().run_sync(), {"value": 1}
)
@postgres_only
def test_all(self):
"""
Make sure rows can be retrieved where all items in an array match a
given value.
"""
MyTable(value=[1, 1, 1]).save().run_sync()
self.assertEqual(
MyTable.select(MyTable.value)
.where(MyTable.value.all(1))
.first()
.run_sync(),
{"value": [1, 1, 1]},
)
self.assertEqual(
MyTable.select(MyTable.value)
.where(MyTable.value.all(0))
.first()
.run_sync(),
None,
)
def test_any(self):
"""
Make sure rows can be retrieved where any items in an array match a
given value.
"""
MyTable(value=[1, 2, 3]).save().run_sync()
self.assertEqual(
MyTable.select(MyTable.value)
.where(MyTable.value.any(1))
.first()
.run_sync(),
{"value": [1, 2, 3]},
)
self.assertEqual(
MyTable.select(MyTable.value)
.where(MyTable.value.any(0))
.first()
.run_sync(),
None,
)
|
[
"dan@dantownsend.co.uk"
] |
dan@dantownsend.co.uk
|
1781619b0a8abd1d3cb474a67ee1c1d84f0bd6c4
|
080688b23a9114a41594a4483b07a8896f106102
|
/app.py
|
3373c706cfc92661c50d138bb12a87ac3122f194
|
[
"Apache-2.0"
] |
permissive
|
ashishqm/sept
|
3160e9498190a8b60d93fc604394ab4d9c9e67ee
|
0772c20417bb3d1331f3960016e4e6d861acf8ec
|
refs/heads/master
| 2020-03-27T21:23:48.242448
| 2018-09-03T02:16:25
| 2018-09-03T02:16:25
| 147,140,005
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,418
|
py
|
#!/usr/bin/env python
import urllib
import json
import os
from flask import Flask
from flask import request
from flask import make_response
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/static_reply', methods=['POST'])
def static_reply():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = makeWebhookResult(req)
res = json.dumps(res, indent=4)
print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def makeWebhookResult(req):
if req.get("result").get("action") != "interest":
return {}
result = req.get("result")
parameters = result.get("parameters")
name = parameters.get("bank-name")
bank = {'Federal Bank':'6.70%','Andhra Bank':'6.85%', 'Allahabad Bank':'6.75%', 'Axis Bank':'6.5%', 'Bandhan bank':'7.15%', 'Bank of Maharashtra':'6.50%', 'Bank of Baroda':'6.90%', 'Bank of India':'6.60%', 'Bharatiya Mahila Bank':'7.00%', 'Canara Bank':'6.50%', 'Central Bank of India':'6.60%', 'City Union Bank':'7.10%', 'Corporation Bank':'6.75%', 'Citi Bank':'5.25%', 'DBS Bank':'6.30%', 'Dena Bank':'6.80%', 'Deutsche Bank':'6.00%', 'Dhanalakshmi Bank':'6.60%', 'DHFL Bank':'7.75%', 'HDFC Bank':'5.75% to 6.75%', 'Post Office':'7.10%', 'Indian Overseas Bank':'6.75%', 'ICICI Bank':'6.25% to 6.9%', 'IDBI Bank':'6.65%', 'Indian Bank':'4.75%', 'Indusind Bank':'6.85%', 'J&K Bank':'6.75%', 'Karnataka Bank':'6.50 to 6.90%', 'Karur Vysya Bank':'6.75%', 'Kotak Mahindra Bank':'6.6%', 'Lakshmi Vilas Bank':'7.00%', 'Nainital Bank':'7.90%', 'Oriental Bank of Commerce':'6.85%', 'Punjab National Bank':'6.75%', 'Punjab and Sind Bank':'6.4% to 6.80%', 'Saraswat bank':'6.8%', 'South Indian Bank':'6% to 6.75%', 'State Bank of India':'6.75%', 'Syndicate Bank':'6.50%', 'Tamilnad Mercantile Bank Ltd':'6.90%', 'UCO bank':'6.75%', 'United Bank Of India':'6%', 'Vijaya Bank':'6.50%', 'Yes Bank':'7.10%'}
speech = "The interest rate of " + name + " is " + str(cost[name])
print("Response:")
print(speech)
return {
"speech": speech,
"displayText": speech,
#"data": {},
#"contextOut": [],
"source": "BankInterestRates"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print ("Starting app on port %d" %(port))
app.run(debug=True, port=port, host='0.0.0.0')
|
[
"noreply@github.com"
] |
ashishqm.noreply@github.com
|
e224dafd3e35959feffd1ace01599c5833048a0b
|
627d40980db4fbcec59249831e5f7d04f6dcc767
|
/For_Lopp.py
|
e06ef50d7c5a5e7cf1248c5b317ca962559352f9
|
[] |
no_license
|
Shantha-Kumar-A/yuvarepo
|
33a151c7241c2b3bd6a1c1d3fac14b015e73172b
|
574811bad1b573bd19912a27001a47bca0832a60
|
refs/heads/master
| 2022-06-11T07:37:50.471032
| 2020-05-09T10:07:43
| 2020-05-09T10:07:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 219
|
py
|
x = ['Yuvaraj','hi',298]
for i in x:
print(i)
x = 'Yuvaraj'
for i in x:
print(i)
for i in range(10):
print(i)
for i in range(11,20):
print(i)
for i in range(20,10,-1):
print(i)
|
[
"noreply@github.com"
] |
Shantha-Kumar-A.noreply@github.com
|
d1c7e21291310553bb972a82567ec40099308797
|
f777e7b52b4fe2839b1c487e1f1d901e67906705
|
/galeria/admin.py
|
2d9d0bb64375ca9d372a9be1e472be83545618e2
|
[] |
no_license
|
dxviidmg/LosArquetiposDeJung
|
4e438e08f1664532cf2dfb2c5d1dcda733e84ee6
|
ce8caf64464be20b64c9c7e5924028560552557b
|
refs/heads/master
| 2020-07-15T14:03:37.851172
| 2017-03-08T03:13:44
| 2017-03-08T03:13:44
| 66,895,997
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 81
|
py
|
from django.contrib import admin
from .models import *
admin.site.register(Foto)
|
[
"david@david-Lenovo-U310"
] |
david@david-Lenovo-U310
|
74889d0b7d3a2f0c76254b7c7f76fed2298d71da
|
39d9bc0bdf069700aa44c3cc8cb4127f5770431d
|
/2nd Place/geopose/models/heads.py
|
3ee102a6931f81050fd427f0dffeea2d68386d05
|
[
"MIT"
] |
permissive
|
personx000/overhead-geopose-challenge
|
5189d7dd6e385b4e9c0b0b238168f66dd0a60033
|
f32b5763ae01627519c9080224b8be97ebe7f3e2
|
refs/heads/main
| 2023-08-30T08:01:04.820481
| 2021-11-05T13:45:52
| 2021-11-05T13:45:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,143
|
py
|
from typing import List
import torch
import torch.nn.functional as F
from pytorch_toolbelt.modules import (
ACT_RELU,
instantiate_activation_block,
GlobalAvgPool2d,
FPNFuseSum,
ResidualDeconvolutionUpsample2d,
conv1x1,
)
from torch import nn
from ..dataset import (
OUTPUT_VFLOW_DIRECTION,
tensor_vector2angle,
OUTPUT_MAGNITUDE_MASK,
OUTPUT_VFLOW_ANGLE,
OUTPUT_AGL_MASK,
OUTPUT_VFLOW_SCALE,
)
__all__ = ["SimpleAGLHead", "AGLHead"]
class RegressionHeadWithGSD(nn.Module):
def __init__(self, in_channels, embedding_size, out_channels, activation=ACT_RELU):
super().__init__()
self.up = nn.Sequential(
conv1x1(in_channels, (in_channels // 4) * 4), ResidualDeconvolutionUpsample2d((in_channels // 4) * 4)
)
self.conv1 = nn.Conv2d(in_channels // 4, embedding_size, kernel_size=3, padding=1)
self.act1 = instantiate_activation_block(activation, inplace=True)
self.conv2 = nn.Conv2d(embedding_size + 1, embedding_size, kernel_size=3, padding=1)
self.act2 = instantiate_activation_block(activation, inplace=True)
self.conv3 = nn.Conv2d(embedding_size, out_channels, kernel_size=3, padding=1)
self.act3 = instantiate_activation_block(ACT_RELU, inplace=True)
for layer in [self.conv1, self.conv2, self.conv3]:
torch.nn.init.normal_(layer.weight, mean=0.0, std=0.1)
# torch.nn.init.zeros_(layer.bias)
torch.nn.init.ones_(layer.bias)
def forward(self, x, gsd):
x = self.up(x)
x = self.act1(self.conv1(x))
gsd = gsd.reshape(gsd.size(0), 1, 1, 1).expand((-1, -1, x.size(2), x.size(3)))
x = torch.cat([x, gsd], dim=1)
x = self.act2(self.conv2(x))
x = self.act3(self.conv3(x))
return x
class RegressionHead(nn.Module):
def __init__(self, in_channels, embedding_size, out_channels, activation=ACT_RELU):
super().__init__()
# self.up = nn.Sequential(conv1x1(in_channels, in_channels * 4), nn.PixelShuffle(upscale_factor=2))
self.up = nn.Sequential(
conv1x1(in_channels, (in_channels // 4) * 4), ResidualDeconvolutionUpsample2d((in_channels // 4) * 4)
)
self.conv1 = nn.Conv2d(in_channels // 4, embedding_size, kernel_size=3, padding=1)
self.act1 = instantiate_activation_block(activation, inplace=True)
self.conv2 = nn.Conv2d(embedding_size, embedding_size, kernel_size=3, padding=1)
self.act2 = instantiate_activation_block(activation, inplace=True)
self.conv3 = nn.Conv2d(embedding_size, out_channels, kernel_size=3, padding=1)
self.act3 = instantiate_activation_block(ACT_RELU, inplace=True)
for layer in [self.conv1, self.conv2, self.conv3]:
torch.nn.init.normal_(layer.weight, mean=0.0, std=0.1)
# torch.nn.init.zeros_(layer.bias)
torch.nn.init.ones_(layer.bias)
def forward(self, x):
x = self.up(x)
x = self.act1(self.conv1(x))
x = self.act2(self.conv2(x))
x = self.act3(self.conv3(x))
return x
class SimpleRegressionHead(nn.Module):
def __init__(self, in_channels, embedding_size, out_channels, activation=ACT_RELU, dropout_rate=0.0):
super().__init__()
self.conv1 = nn.Conv2d(in_channels, embedding_size, kernel_size=3, padding=1)
self.drop = nn.Dropout2d(dropout_rate, inplace=False)
self.act1 = instantiate_activation_block(activation, inplace=True)
self.conv2 = nn.Conv2d(embedding_size, out_channels, kernel_size=3, padding=1)
def forward(self, x):
x = self.conv1(x)
x = self.drop(x)
x = self.act1(x)
x = self.conv2(x)
return x
class OnlyAGLHead(nn.Module):
def __init__(
self, encoder_channels: List[int], decoder_channels: List[int], embedding_size=64, dropout_rate=0.0, activation=ACT_RELU
):
super().__init__()
in_channels = decoder_channels[0]
self.dropout = nn.Dropout2d(dropout_rate, inplace=False)
self.height = RegressionHeadWithGSD(in_channels, embedding_size, 1, activation=activation)
def forward(self, rgb, encoder_feature_maps, decoder_feature_maps, gsd, orientation_outputs):
# Take the finest feature map from the decoder
x = decoder_feature_maps[0]
x = self.dropout(x)
height = self.height(x, gsd)
return {
OUTPUT_AGL_MASK: F.interpolate(height, size=rgb.size()[2:], mode="bilinear", align_corners=True),
}
class SimpleAGLHead(nn.Module):
def __init__(
self, encoder_channels: List[int], decoder_channels: List[int], embedding_size=64, dropout_rate=0.0, activation=ACT_RELU
):
super().__init__()
in_channels = decoder_channels[0]
self.dropout = nn.Dropout2d(dropout_rate, inplace=False)
self.height = RegressionHeadWithGSD(in_channels, embedding_size, 1, activation=activation)
self.magnitude = RegressionHead(in_channels, embedding_size, 1, activation=activation)
self.scale = ScaleHead()
def forward(self, rgb, encoder_feature_maps, decoder_feature_maps, gsd, orientation_outputs):
# Take the finest feature map from the decoder
x = decoder_feature_maps[0]
x = self.dropout(x)
height = self.height(x, gsd)
mag = self.magnitude(x)
scale = self.scale(mag, height)
return {
OUTPUT_AGL_MASK: F.interpolate(height, size=rgb.size()[2:], mode="bilinear", align_corners=True),
OUTPUT_MAGNITUDE_MASK: F.interpolate(mag, size=rgb.size()[2:], mode="bilinear", align_corners=True),
OUTPUT_VFLOW_SCALE: scale,
}
class SquareRoot(nn.Module):
def forward(self, x):
return x.sqrt()
class Exponent(nn.Module):
def forward(self, x):
return x.exp()
def instantiate_transformation(name):
if name == "sqrt":
return SquareRoot()
elif name == "exp":
return Exponent()
elif name == "identity":
return nn.Identity()
else:
raise KeyError(name)
class AGLHead(nn.Module):
def __init__(
self,
encoder_channels: List[int],
decoder_channels: List[int],
embedding_size=64,
dropout_rate=0.0,
activation=ACT_RELU,
num_upsample_blocks=1,
agl_activation=ACT_RELU,
agl_transformation="identity",
):
super().__init__()
in_channels = decoder_channels[0]
self.dropout = nn.Dropout2d(dropout_rate, inplace=False)
self.scale = ScaleHead()
upsample_blocks = []
for i in range(num_upsample_blocks):
input_channels = (in_channels // 2) * 2
upsampled_channels = input_channels // 4
upsample_blocks += [
conv1x1(in_channels, input_channels),
ResidualDeconvolutionUpsample2d(input_channels, scale_factor=2),
nn.Conv2d(upsampled_channels, upsampled_channels, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(upsampled_channels),
instantiate_activation_block(activation, inplace=True),
]
in_channels = upsampled_channels
self.upsample = nn.Sequential(*upsample_blocks)
self.height = nn.Sequential(
nn.Conv2d(upsampled_channels + 1, upsampled_channels, kernel_size=3, padding=1),
instantiate_activation_block(activation, inplace=True),
nn.Conv2d(upsampled_channels, 1, kernel_size=3, padding=1),
instantiate_activation_block(agl_activation, inplace=True),
instantiate_transformation(agl_transformation),
)
self.magnitude = nn.Sequential(
nn.Conv2d(upsampled_channels, upsampled_channels, kernel_size=3, padding=1),
instantiate_activation_block(activation, inplace=True),
nn.Conv2d(upsampled_channels, 1, kernel_size=3, padding=1),
instantiate_activation_block(agl_activation, inplace=True),
instantiate_transformation(agl_transformation),
)
def forward(self, rgb, encoder_feature_maps, decoder_feature_maps, gsd, orientation_outputs):
# Take the finest feature map from the decoder
x = decoder_feature_maps[0]
x = self.upsample(x)
gsd = gsd.reshape(gsd.size(0), 1, 1, 1).expand((-1, -1, x.size(2), x.size(3)))
height = self.height(torch.cat([x, gsd], dim=1))
mag = self.magnitude(x)
scale = self.scale(mag, height)
return {
OUTPUT_AGL_MASK: height,
OUTPUT_MAGNITUDE_MASK: mag,
OUTPUT_VFLOW_SCALE: scale,
}
class HyperColumnAGLHead(nn.Module):
def __init__(
self, encoder_channels: List[int], decoder_channels: List[int], embedding_size=64, dropout_rate=0.0, activation=ACT_RELU
):
super().__init__()
self.height = nn.ModuleList(
[
SimpleRegressionHead(
in_channels=in_channels + 1,
out_channels=1,
dropout_rate=dropout_rate,
embedding_size=embedding_size,
activation=activation,
)
for in_channels in decoder_channels
]
)
self.magnitude = nn.ModuleList(
[
SimpleRegressionHead(
in_channels=in_channels,
out_channels=1,
dropout_rate=dropout_rate,
embedding_size=embedding_size,
activation=activation,
)
for in_channels in decoder_channels
]
)
self.fuse = FPNFuseSum(mode="bilinear", align_corners=True)
self.scale = ScaleHead()
def forward(self, rgb, encoder_feature_maps, decoder_feature_maps, gsd, orientation_outputs):
heights = [
height_layer(
torch.cat(
[feature_map, gsd.reshape(gsd.size(0), 1, 1, 1).expand((-1, -1, feature_map.size(2), feature_map.size(3)))],
dim=1,
)
)
for (feature_map, height_layer) in zip(decoder_feature_maps, self.height)
]
magnitude = [mag_layer(feature_map) for (feature_map, mag_layer) in zip(decoder_feature_maps, self.magnitude)]
height = F.relu(self.fuse(heights), inplace=True)
mag = F.relu(self.fuse(magnitude), inplace=True)
scale = self.scale(mag, height)
return {
OUTPUT_AGL_MASK: F.interpolate(height, size=rgb.size()[2:], mode="bilinear", align_corners=True),
OUTPUT_MAGNITUDE_MASK: F.interpolate(mag, size=rgb.size()[2:], mode="bilinear", align_corners=True),
OUTPUT_VFLOW_SCALE: scale,
}
class BasicOrientationHead(nn.Module):
def __init__(self, encoder_channels: List[int], decoder_channels: List[int], dropout_rate=0.0, activation=ACT_RELU):
super().__init__()
in_channels = encoder_channels[-1]
self.pool = GlobalAvgPool2d(flatten=True)
self.orientation = nn.Sequential(
nn.Dropout(p=dropout_rate, inplace=True),
nn.Linear(in_channels, in_channels),
instantiate_activation_block(activation, inplace=True),
nn.Linear(in_channels, 2),
)
def forward(self, rgb, encoder_feature_maps, decoder_feature_maps, gsd):
features = self.pool(encoder_feature_maps[-1])
direction = self.orientation(features)
angle = tensor_vector2angle(direction)
return {OUTPUT_VFLOW_DIRECTION: direction, OUTPUT_VFLOW_ANGLE: angle}
class BasicOrientationScaleHead(nn.Module):
def __init__(self, encoder_channels: List[int], decoder_channels: List[int], dropout_rate=0.0, activation=ACT_RELU):
super().__init__()
in_channels = encoder_channels[-1]
self.pool = GlobalAvgPool2d(flatten=True)
self.orientation = nn.Sequential(
nn.Dropout2d(p=dropout_rate, inplace=False),
nn.Conv2d(in_channels, in_channels, kernel_size=1),
instantiate_activation_block(activation, inplace=True),
nn.Conv2d(in_channels, 2, kernel_size=3),
)
self.scale = nn.Sequential(
nn.Dropout(p=dropout_rate, inplace=False),
nn.Conv2d(in_channels, in_channels, kernel_size=1),
instantiate_activation_block(activation, inplace=True),
nn.Conv2d(in_channels, 1, kernel_size=3),
)
for m in self.modules():
if isinstance(m, nn.Conv2d):
torch.nn.init.kaiming_normal_(
m.weight, a=0.01, mode="fan_out", nonlinearity="relu" if activation == ACT_RELU else "leaky_relu"
)
torch.nn.init.constant_(m.bias, 0.0001)
def forward(self, rgb, encoder_feature_maps, decoder_feature_maps, gsd):
features = encoder_feature_maps[-1]
direction = self.pool(F.normalize(self.orientation(features)))
scale = self.pool(F.relu(self.scale(features)))
angle = tensor_vector2angle(direction)
return {OUTPUT_VFLOW_DIRECTION: direction, OUTPUT_VFLOW_ANGLE: angle, OUTPUT_VFLOW_SCALE: scale}
class ScaleHead(nn.Module):
def __init__(self):
super().__init__()
@torch.cuda.amp.autocast(False)
def forward(self, mag, height):
curr_mag = torch.flatten(mag, start_dim=1).float()
curr_height = torch.flatten(height, start_dim=1).float()
batch_size = curr_mag.shape[0]
length = curr_mag.shape[1]
denom = (
torch.squeeze(
torch.bmm(
curr_height.view(batch_size, 1, length),
curr_height.view(batch_size, length, 1),
)
)
+ 0.01
)
pinv = curr_height / denom.view(batch_size, 1)
scale = torch.bmm(pinv.view(batch_size, 1, length), curr_mag.view(batch_size, length, 1))
scale = torch.squeeze(scale, dim=2)
return scale
|
[
"klwetstone@gmail.com"
] |
klwetstone@gmail.com
|
fb3c0ac7bfa0dcea0555b2ff32f1485a6926b6e0
|
033fc1b8735f5631c712c639abccea610277a0d7
|
/Configuration.py
|
07a486d50b58e37f3c7e0a25870d5c836fc56e51
|
[] |
no_license
|
MolotovCherry/Yandere-Simulator-Skin-Switcher
|
a7e70fb85b124eb90b4637ccac834d797afd16ea
|
6b17eeb1aa0cd2b2f70ad0f7553d6a599e7576ff
|
refs/heads/master
| 2022-11-25T09:02:01.167000
| 2020-07-30T09:09:42
| 2020-07-30T09:09:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,033
|
py
|
import os
import sys
from configparser import ConfigParser
def singleton(cls):
def inner(*args, **kwargs):
try:
return cls.__instance
except AttributeError:
__instance = cls(*args, **kwargs)
cls.__instance = __instance
return __instance
return inner
@singleton
class Configuration(ConfigParser):
def __init__(self):
super().__init__()
if hasattr(sys, 'frozen'):
configFile = os.path.dirname(os.path.abspath(sys.executable))
else:
configFile = os.path.dirname(os.path.abspath(__file__))
self.configFile = os.path.join(configFile, 'config.ini')
# we use this for writing a new file, or repairing damaged entries
self.defaults = {
'default': {
'style': 'dark',
'gameroot': '',
'currentskin': 'default'
}
}
self.validValues = {
'default': {
'style': ['light', 'dark'],
'gameroot': 'string',
'currentskin': 'string'
}
}
read = self.read(self.configFile)
if not read:
# config file doesn't exist, write defaults
self.writeDefaults()
else:
if self.repairConfig():
# save the fixed config
self.saveConfig()
def writeDefaults(self):
for key, value in self.defaults.items():
self[key] = value
self.saveConfig()
def saveConfig(self):
with open(self.configFile, 'w') as confile:
self.write(confile)
def repairConfig(self):
# did we repair anything?
dirtyConfig = False
# verify integrity of options, and if not exist, then reset the option
for k1, v1 in self.defaults.items():
if k1 in self:
for k2, v2 in self[k1].items():
if isinstance(self.validValues[k1][k2], list):
# make sure it is one of the allowed values
if self[k1][k2] not in self.validValues[k1][k2]:
self[k1][k2] = self.defaults[k1][k2]
dirtyConfig = True
elif isinstance(self.validValues[k1][k2], str):
# value can be anything, but it must be of a certain type
if self.validValues[k1][k2] == 'string':
# i don't care what the string is
if not isinstance(self[k1][k2], str):
self[k1][k2] = self.defaults[k1][k2]
dirtyConfig = True
elif self.validValues[k1][k2] == 'number':
# the number is represented in a string,
# so verify the string is essentially a number
try:
int(self[k1][k2])
except ValueError:
self[k1][k2] = self.defaults[k1][k2]
dirtyConfig = True
elif self.validValues[k1][k2] == 'bool':
# this is also a string
if self[k1][k2].lower() not in ('true', 'false'):
self[k1][k2] = self.defaults[k1][k2]
dirtyConfig = True
# fill in any possible gaps with default values
# in this way we'll never have a KeyError, no matter
# if the real config file is damaged
for k1, v1 in self.defaults.items():
if k1 not in self:
self[k1] = v1
dirtyConfig = True
else:
for k2, v2 in v1.items():
if k2 not in self[k1]:
self[k1][k2] = v2
dirtyConfig = True
return dirtyConfig
|
[
"cherryleafroad@gmail.com"
] |
cherryleafroad@gmail.com
|
08f80bf55016aa228f9cc5eb54a1e81918a05532
|
87645df4118bc0c7c63bb4cca5d38cc2967fa6ba
|
/assignment18.py
|
9842084f08b0fe1127e3b6f908fcfc5ddf2e51e6
|
[] |
no_license
|
manpreet01multani/acadview_assignments
|
e0559f140d95b293759b59b5284782b1950ed567
|
52fc9e97396147f50aea3b3025fa2975a75a1756
|
refs/heads/master
| 2020-03-19T11:22:13.006177
| 2018-06-24T11:15:09
| 2018-06-24T11:15:09
| 136,450,742
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 722
|
py
|
#1.
from tkinter import *
r=Tk()
d={"Meet":789,"jeet":456,"heer":786,"reet":234,"geet":987,"geen":675,"reet":345}
label=Label(r,text="Name",font='size,10')
label.pack(side="bottom")
label=Label(r,text="phone number")
label.pack(side="top")
def handleList(event):
label3=mn.get(ACTIVE)
print(label3)
ph=d.get(label3)
global label1,label2
label1.config(text=label3)
label2.config(text=ph)
mn=Listbox(r)
mn.config(selectmode=EXTENDED)
scrollbar=Scrollbar(r)
scrollbar.config(command=mn.yview)
mn.config(yscrollcommand=scrollbar.set)
scrollbar.pack(side=RIGHT,fill=Y)
mn.pack(side=LEFT,expand=YES,fill=BOTH)
mn.bind('<Double-1>',handleList)
for k,v in d.items():
mn.insert('end',k)
r.mainloop()
|
[
"manpreet01multani@gmail.com"
] |
manpreet01multani@gmail.com
|
0c1a13795be34e0ee6d38ececfad4eb66f28e0c7
|
c37cb658a5c5fb3dc1aafa959c86866dc1ed9e39
|
/第八章/2-sock_server_client.py
|
5e8660e8bdc147a7b1892295f041d469c57d1598
|
[] |
no_license
|
Dragonet-D/python3-study
|
6fe7a7f049dd8067593c865e58d125e12b1de55a
|
b057d75a2421798d27e2d15e0eee35d9eb8dbf27
|
refs/heads/master
| 2021-10-10T21:18:30.319066
| 2021-10-01T07:52:27
| 2021-10-01T07:52:27
| 98,364,215
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 230
|
py
|
import socket
client = socket.socket()
client.connect(('localhost', 9999))
while True:
cmd = input('>>:').strip()
if len(cmd) == 0: continue
client.send(cmd)
cmd_res = client.recv(1024)
print(cmd_res)
client.close()
|
[
"Dragonet-D@163.com"
] |
Dragonet-D@163.com
|
a1e1f711eb8af4ca585330198a42446ff9f904ef
|
8afeb5e41d1efbce2b8feb8f557f263120db91d3
|
/api/search_rest.py
|
736b8df13c0c8c9d8ca68368185325e599ceb723
|
[] |
no_license
|
panos1995/msccs-t23
|
7bbe9ed4c9172f3f72c2ce47e14db639a95b054b
|
228aaa71640f186ceb8e85c295c13d9df36611a5
|
refs/heads/master
| 2020-04-03T15:23:35.299920
| 2018-11-05T15:44:07
| 2018-11-05T15:44:07
| 155,361,035
| 0
| 0
| null | 2018-10-30T09:48:21
| 2018-10-30T09:48:21
| null |
UTF-8
|
Python
| false
| false
| 1,162
|
py
|
import datetime
import time
import tweepy
import pymongo
import sys
import json
from bson import json_util, ObjectId
def query_search(query):
access_token = "IHpSjYd5AuCdDRZTaGiMOwHUJ"
access_token_secret = "FNUvxez9N9vBzY72HiZcukHQqVqO0ZiV498qyaYDxaV5nKFSgu"
auth = tweepy.AppAuthHandler(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
if (not api):
print("Can't Authenticate")
sys.exit(-1)
time_started = time.time()
result_list = []
id_list = []
# You can change the cound the time limit of search.
# moreover we can use Stream to be realy real_life project
for tweet in tweepy.Cursor(api.search, q=query, lang="en", count=10).items():
if(time.time()> time_started+2):
#mycol_all.insert(result_list)
return result_list, id_list
# result_list.append(json.loads(json_util.dumps({"Postid": tweet["idstr"], "Text": tweet["text"]})))
result_list.append({"Postid": tweet._json["id_str"], "Text": tweet._json["text"]})
id_list.append(tweet._json["id_str"])
|
[
"noreply@github.com"
] |
panos1995.noreply@github.com
|
3f259779a113f38727e5e331c041593a3830edfe
|
caaf56727714f8c03be38710bc7d0434c3ec5b11
|
/tests/components/telegram/test_notify.py
|
7488db49d9ea58db8f78e93cab0842fa686ee119
|
[
"Apache-2.0"
] |
permissive
|
tchellomello/home-assistant
|
c8db86880619d7467901fd145f27e0f2f1a79acc
|
ed4ab403deaed9e8c95e0db728477fcb012bf4fa
|
refs/heads/dev
| 2023-01-27T23:48:17.550374
| 2020-09-18T01:18:55
| 2020-09-18T01:18:55
| 62,690,461
| 8
| 1
|
Apache-2.0
| 2023-01-13T06:02:03
| 2016-07-06T04:13:49
|
Python
|
UTF-8
|
Python
| false
| false
| 1,598
|
py
|
"""The tests for the telegram.notify platform."""
from os import path
from homeassistant import config as hass_config
import homeassistant.components.notify as notify
from homeassistant.components.telegram import DOMAIN
from homeassistant.const import SERVICE_RELOAD
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
async def test_reload_notify(hass):
"""Verify we can reload the notify service."""
with patch("homeassistant.components.telegram_bot.async_setup", return_value=True):
assert await async_setup_component(
hass,
notify.DOMAIN,
{
notify.DOMAIN: [
{
"name": DOMAIN,
"platform": DOMAIN,
"chat_id": 1,
},
]
},
)
await hass.async_block_till_done()
assert hass.services.has_service(notify.DOMAIN, DOMAIN)
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"telegram/configuration.yaml",
)
with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert not hass.services.has_service(notify.DOMAIN, DOMAIN)
assert hass.services.has_service(notify.DOMAIN, "telegram_reloaded")
def _get_fixtures_base_path():
return path.dirname(path.dirname(path.dirname(__file__)))
|
[
"noreply@github.com"
] |
tchellomello.noreply@github.com
|
6bc05f1c24acd83be18b9337a531c43c42f39d63
|
6e928e1651713f945c980bca6d6c02ac5dce249a
|
/task1/5.py
|
64b92c59d071daed1a062f5bbc9c61742d9564d9
|
[] |
no_license
|
Akzhan12/pp2
|
97334158b442383df32583ee6c0b9cab92a3ef45
|
56e33fd9119955ea8349172bf3f2cc5fbd814142
|
refs/heads/main
| 2023-06-28T08:30:11.068397
| 2021-07-29T08:34:43
| 2021-07-29T08:34:43
| 337,359,826
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 211
|
py
|
a = list(map(int,input().split()))
n = int(input()) % len(a)
if n < 0:
n = abs(n)
print(*a[n:],end = " ")
print(*a[0:n])
else:
n = abs(n)
print(*a[-n:],end = " ")
print(*a[0:-n])
|
[
"noreply@github.com"
] |
Akzhan12.noreply@github.com
|
496d7306eb09257e9b34d99372293f4febf58eda
|
ae912cfda71c89db5cb9d01e87ffc54c1dcb328d
|
/jobs/migrations/0002_project_project_tech.py
|
a98c0c0da537a40a8c78c8ff8d2ce677e0df106b
|
[] |
no_license
|
harshitksrivastava/portfolio-project
|
af6aa973d04a7be02c4984c657904ffb8302ba2e
|
ff36b186091e5382b2fb0567e90313aad71224fe
|
refs/heads/master
| 2022-12-12T19:10:49.182860
| 2020-12-17T12:07:49
| 2020-12-17T12:07:49
| 199,324,804
| 0
| 0
| null | 2022-11-22T06:36:29
| 2019-07-28T18:46:15
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 477
|
py
|
# Generated by Django 3.0.8 on 2020-08-02 12:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('jobs', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='project',
name='project_tech',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to='jobs.Technology'),
),
]
|
[
"harsh0311@gmail.com"
] |
harsh0311@gmail.com
|
02d83f4fd8430f4c51e99d05bd82592703152010
|
6e870a237de0a55f5b04f0ef4f2689e7b6084287
|
/administrator/forms.py
|
63c3f42b05a45d84ff4d0619e68354b6d39f9110
|
[
"MIT"
] |
permissive
|
Arose114/e-learning-with-django
|
0acaba610632a31688249ae2dbdaab629444ae07
|
bb463204e1d3c4a7bca5b7d4836adb06a85403fb
|
refs/heads/main
| 2023-06-27T17:24:32.188585
| 2021-07-20T08:36:02
| 2021-07-20T08:36:02
| 375,014,280
| 0
| 0
|
MIT
| 2021-06-08T13:13:13
| 2021-06-08T13:13:13
| null |
UTF-8
|
Python
| false
| false
| 1,829
|
py
|
from account.forms import FormSettings
from student.models import *
from staff.models import Staff
from django import forms
from .models import Settings
class AddCourseForm(FormSettings):
class Meta:
model = Course
fields = "__all__"
class AddDepartmentForm(FormSettings):
class Meta:
model = Department
fields = "__all__"
class AddSessionForm(FormSettings):
class Meta:
model = Session
fields = "__all__"
class AddStudentForm(FormSettings):
department_list = Department.objects.filter(is_general=False)
department = forms.ModelChoiceField(
label="Choose Department", queryset=department_list, required=True)
class Meta:
model = Student
exclude = ['admin']
# https://stackoverflow.com/questions/22846048/django-form-as-p-datefield-not-showing-input-type-as-date
widgets = {
'dob': forms.DateInput(attrs={'type': 'date'})
}
class AddStaffForm(FormSettings):
department_list = Department.objects.filter(is_general=False)
department = forms.ModelChoiceField(
label="Choose Department", queryset=department_list, required=True)
class Meta:
model = Staff
exclude = ['admin']
class SettingsForm(FormSettings):
def clean(self):
# Then call the clean() method of the super class
cleaned_data = super(SettingsForm, self).clean()
if not self.instance.pk and Settings.objects.exists():
# if not self.pk and Settings.objects.exists():
raise forms.ValidationError("Update Site Settings Instead")
# ... do some cross-fields validation for the subclass
# Finally, return the cleaned_data
return cleaned_data
class Meta:
model = Settings
fields = "__all__"
|
[
"jobowonubi@gmail.com"
] |
jobowonubi@gmail.com
|
4e5784eb3aa014b7f5494320c32dd4b3d0a53444
|
2997a8d56f74e0cc4e949fb20ff7d17be75b2599
|
/百度无人驾驶比赛模型/scnn/scnn.py
|
62fcd8de6c3881491f31d3120f7f99a61881cd53
|
[] |
no_license
|
GuangyanZhang/Paddle-Paddle_SCNN-Deeplabv3-bisenet-icnet
|
039b3856ed67e177d351cbe83e73f352a4a97696
|
326439194bb24ec112c74d9de2b6196add5a76c0
|
refs/heads/master
| 2020-04-30T09:17:23.010395
| 2019-04-07T07:13:33
| 2019-04-07T07:13:33
| 176,742,644
| 18
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 23,219
|
py
|
import random
import cv2
import numpy as np
import paddle
import PIL.Image
import paddle.fluid as fluid
import time
import os
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
def resize_image(img, target_size1, target_size2):
# img = img.resize((target_size1, target_size2), PIL.Image.LANCZOS)
img = cv2.resize(img, (target_size2, target_size1),interpolation=cv2.INTER_NEAREST)
return img
def batch_normalization(x, relu = False, name = ''):
if relu:
return fluid.layers.batch_norm(x, act = 'relu', name = name)
else:
return fluid.layers.batch_norm(x, name = name)
def get_wav_list(filename):
'''
读取一个wav文件列表,返回一个存储该列表的字典类型值
ps:在数据中专门有几个文件用于存放用于训练、验证和测试的wav文件列表
'''
txt_obj = open(filename, 'r') # 打开文件并读入
txt_text = txt_obj.read()
txt_lines = txt_text.split('\n') # 文本分割
dic_filelist = [] # 初始化字典
list_wavmark = [] # 初始化wav列表
j = 0
for i in txt_lines:
if (i != ''):
txt_l = i.split(' ')
dic_filelist.append(txt_l[0])
list_wavmark.append(txt_l[1])
#j = j + 1
#print(list_wavmark[str(j)])
txt_obj.close()
return dic_filelist, list_wavmark
def get_data(Height, Width,train_image,label_image):
# train_image = dic_filelist[num]
# train_image = list_wavmark[num]
train_data = cv2.imread('/home/zhngqn/zgy_RUIMING/baidu_city'+train_image)
# train_data = PIL.Image.open(train_image)
image = resize_image(train_data, Height, Width)
# print('image:', image.shape)
# train_data = fluid.layers.resize_bilinear(train_data, out_shape=[1024,512])
label_image = cv2.imread('/home/zhngqn/zgy_RUIMING/baidu_city'+label_image)
# print('label_image_before:', label_image.shape)
label_image = resize_image(label_image, Height, Width)
# print('label_image_after:', label_image.shape)
# label_image = PIL.Image.open(label_image)
return image, label_image[:, :, 0] # 取了一个通道
"""
批量读取图片
"""
def get_random():
num = random.randint(1, 21913)
if (num in dic_filelist.keys()):
return num
else:
return [-1], [-1]
def data_generator(Height, Width, batch_size,dic_filelist,list_wavmark):
X = np.zeros((batch_size, Height, Width, 3), dtype=np.float32) # batch_size train data
Y = np.zeros((batch_size, Height, Width), dtype=np.int64) # batch_size label data
for i in range(batch_size):
num = random.randint(1, 21913)
if num in dic_filelist:
pass
else:
num = random.randint(1, 21913)
#ran_num = random.randint(0, 3999) #生成一个随机数
train_data, label_image = get_data(Height, Width,dic_filelist[num],list_wavmark[num])
# print('train_data:', train_data.shape)
# print('label_image:', label_image.shape)
X[i, 0:len(train_data)] = train_data
Y[i, 0:len(label_image)] = label_image
# X = X[:, :, :, ::-1].astype(np.float32) / (255.0 / 2) - 1
return X, Y
"""
conv2d + BN(TRUE OR FALSE) + RELU+pool
"""
def conv_layers(layers_name,data,num_filters,num_filter_size = 3, stride=2,pool_stride=2, padding=1,bias_attr = True,act = "relu",Use_BN = True):
conv2d = fluid.layers.conv2d(input = data,
num_filters = num_filters,
filter_size = num_filter_size,
stride=stride,
padding=padding,
bias_attr = bias_attr,
act = None,
name = layers_name + '_conv2d' )
if Use_BN:
BN = fluid.layers.batch_norm(input = conv2d,name = layers_name + '_BN')
else:
BN = conv2d
out_put = fluid.layers.relu(BN,name = layers_name + '_relu')
return fluid.layers.pool2d(
input=out_put,
pool_size=2,
pool_type='max',
pool_stride=pool_stride,
global_pooling=False)
#return out_put
class ResNet():
"""
2X
4X
8X
16X
32X
output16 = 16X
output32 = 32X
layers = 22 (2+4*5)
"""
def __init__(self, is_test=False):
self.is_test = is_test
def net(self, input):
# if layers == 22:
# depth = [1, 1, 1, 1]
conv = self.conv_bn_layer(input=input, num_filters=64, filter_size=7, stride=1, act='relu', trainable=False)
conv = fluid.layers.pool2d(input=conv, pool_size=3, pool_stride=1, pool_padding=1, pool_type='max')
# conv2 = self.bottleneck_block(input=conv, num_filters=64, stride=2, trainable=False)#2 X
## 输出的channel = num_filters * 2
conv2 = self.bottleneck_block(input=conv, num_filters=64, stride=2, trainable=False) # 42 X
conv4 = self.bottleneck_block(input=conv2, num_filters=128, stride=2, trainable=False) # 8 X
conv8 = self.bottleneck_block(input=conv4, num_filters=128, stride=2, trainable=False) # 16 X ,
return conv8
def conv_bn_layer(self,
input,
num_filters,
filter_size,
stride=1,
groups=1,
act=None,
trainable=True):
param_attr = fluid.ParamAttr(trainable=trainable)
conv = fluid.layers.conv2d(
input=input,
num_filters=num_filters,
filter_size=filter_size,
stride=stride,
padding=(filter_size - 1) // 2, # 可能有问题,需要重新设计padding???
groups=groups,
act=None,
bias_attr=True,
param_attr=param_attr)
return fluid.layers.batch_norm(input=conv, act=act, is_test=self.is_test, param_attr=param_attr)
def shortcut(self, input, ch_out, stride, trainable=True):
ch_in = input.shape[1]
if ch_in != ch_out or stride != 1:
return self.conv_bn_layer(input, ch_out, 1, stride=stride, trainable=trainable)
else:
return input
def bottleneck_block(self, input, num_filters, stride, trainable=True):
conv0 = self.conv_bn_layer(
input=input,
num_filters=num_filters,
filter_size=1,
stride=1,
act='relu',
trainable=trainable)
conv1 = self.conv_bn_layer(
input=conv0,
num_filters=num_filters,
filter_size=3,
stride=stride,
act='relu',
trainable=trainable)
conv2 = self.conv_bn_layer(
input=conv1,
num_filters=num_filters * 2,
filter_size=1,
stride=1,
act=None,
trainable=trainable)
short = self.shortcut(input, num_filters * 2, stride, trainable=trainable)
# print('element_2:', [short, conv2])
return fluid.layers.elementwise_add(x=conv2, y=short, act='relu')
"""每片之间的卷积"""
def SCNN(layers_name, data, num_filters, filter_size, padding, stride=1, bias_attr=True, act="relu"):
return fluid.layers.conv2d(input=data,
num_filters=num_filters,
filter_size=filter_size,
stride=stride,
padding=padding,
bias_attr=bias_attr,
act=act,
name=layers_name + '_conv2d')
# out_put = fluid.layers.relu(conv2d,name = layers_name + '_relu')
"""
由上到下,下到上的卷积
"""
def SCNN_D_U(input_data, C_size,H_size, W_size): # 输入切片的高度
"""
0,1,2,3 to 0,2,1,3
"""
# print('input_data',input_data)
x_transposed = fluid.layers.transpose(input_data, perm=[0, 2, 1, 3]) ## NCHW --> NHCW
# print('x_transposed', x_transposed)
axes1 = [1]
layers_concat = list()
layers_result_concat = list()
"""SCNN_D"""
# lenth = range(0,H_size)
for i in range(0, H_size):
result = fluid.layers.slice(input=x_transposed, axes=axes1, starts=[i], ends=[i + 1]) # 切片
# 卷积操作
if i == 0: # 第一片就是原始值做卷积
layers_concat.append(result) # 保存卷积后的片
# scnn_covn2d_last = result
# scnn_covn2d = SCNN_D('scnn_covn2d',result,num_filters =1,filter_size = [32,3],
# padding=[0,1],stride=1,bias_attr = True,act = "relu")
else:
"""片于片之间的卷积"""
scnn_covn2d = SCNN('scnn_covn_D', layers_concat[-1], num_filters=1, filter_size=[3, W_size],
padding=[1, 0], stride=1, bias_attr=True, act="relu")
#print('scnn_covn2d_before:',scnn_covn2d)
# scnn_covn2d = fluid.layers.reshape(scnn_covn2d,[0,32])#三维变成一个维度
#print('scnn_covn2d',scnn_covn2d)
scnn_covn2d = fluid.layers.reshape(scnn_covn2d, [0, C_size],) # 三维变成一个维度
#print('scnn_covn2d_after:',scnn_covn2d)
# print('result_before:', result)
result = fluid.layers.transpose(result, perm=[0, 2, 1, 3])
# print('result_after:', result)
# print("result_after:", result)
# print("scnn_covn2d:", scnn_covn2d)
scnn_covn2d = fluid.layers.elementwise_add(result, scnn_covn2d, axis=0,act='relu') # 在最后一个维度上做广播
# scnn_covn2d = fluid.layers.elementwise_add(result, scnn_covn2d)
scnn_covn2d = fluid.layers.transpose(scnn_covn2d, perm=[0, 2, 1, 3])
# print('scnn_covn2d:', scnn_covn2d)
# scnn_covn2d_last = scnn_covn2d #更新保存上一片
layers_concat.append(scnn_covn2d) # 保存卷积后的片
# layers_concat.append(scnn_covn2d)
"""SCNN_U"""
for i in range(H_size - 1, -1, -1):
# print(i)
result = layers_concat[i] # 切片
# 卷积操作
if i == H_size - 1: # 第一片就是原始值做卷积
# print(i)
layers_result_concat.append(result) # 保存卷积后的片
# scnn_covn2d_last = result
# scnn_covn2d = SCNN_D('scnn_covn2d',result,num_filters =1,filter_size = [32,3],
# padding=[0,1],stride=1,bias_attr = True,act = "relu")
else:
"""片于片之间的卷积"""
# print(i)
scnn_covn2d = SCNN('scnn_covn_U', layers_result_concat[-1], num_filters=1, filter_size=[3, W_size],
padding=[1, 0], stride=1, bias_attr=True, act="relu")
# print('scnn_covn2d1',scnn_covn2d)
scnn_covn2d = fluid.layers.reshape(scnn_covn2d, [0, C_size]) # 三维变成一个维度
result = fluid.layers.transpose(result, perm=[0, 2, 1, 3])
scnn_covn2d = fluid.layers.elementwise_add(result, scnn_covn2d, axis=0,act='relu') # 在最后一个维度上做广播
# scnn_covn2d = fluid.layers.elementwise_add(result, scnn_covn2d)
scnn_covn2d = fluid.layers.transpose(scnn_covn2d, perm=[0, 2, 1, 3])
# scnn_covn2d_last = scnn_covn2d #更新保存上一片
layers_result_concat.append(scnn_covn2d) # 保存卷积后的片
return fluid.layers.concat(input=layers_result_concat, axis=1)
# print('layers_concat11',out)
# scnn_covn2d = SCNN_D('scnn_covn2d',result,num_filters =1,filter_size = [32,3], padding=[0,1],stride=1,bias_attr = True,act = "relu")
# print('scnn_covn2d',scnn_covn2d)
"""
由左到右,右到左的卷积
"""
def SCNN_R_L(input_data, C_size,H_size, W_size): # 输入切片的高度
"""
0,1,2,3 to 0,3,2,1 """
x_transposed = fluid.layers.transpose(input_data, perm=[0, 3, 2, 1]) ## NCHW -->NWCH
axes1 = [1]
layers_concat = list()
layers_result_concat = list()
"""SCNN_R"""
# lenth = range(0,H_size)
for i in range(0, W_size):
result = fluid.layers.slice(input=x_transposed, axes=axes1, starts=[i], ends=[i + 1]) # 切片
# 卷积操作
if i == 0: # 第一片就是原始值做卷积
layers_concat.append(result) # 保存卷积后的片
# scnn_covn2d_last = result
# scnn_covn2d = SCNN_D('scnn_covn2d',result,num_filters =1,filter_size = [32,3],
# padding=[0,1],stride=1,bias_attr = True,act = "relu")
else:
"""片于片之间的卷积"""
# if i == 2:
# print('SCNN_R:', layers_concat[-1])
# else:
# pass
scnn_covn2d = SCNN('scnn_covn_R', layers_concat[-1], num_filters=1, filter_size=[3, H_size],
padding=[1, 0], stride=1, bias_attr=True, act="relu")
scnn_covn2d = fluid.layers.reshape(scnn_covn2d, [0, C_size])
result = fluid.layers.transpose(result, perm=[0, 2, 1, 3])
# print("result_after:", result)
# print("scnn_covn2d:", scnn_covn2d)
scnn_covn2d = fluid.layers.elementwise_add(result, scnn_covn2d, axis=0,act='relu') # 在最后一个维度上做广播
# scnn_covn2d = fluid.layers.elementwise_add(result, scnn_covn2d)
scnn_covn2d = fluid.layers.transpose(scnn_covn2d, perm=[0, 2, 1, 3])
# scnn_covn2d_last = scnn_covn2d #更新保存上一片
layers_concat.append(scnn_covn2d) # 保存卷积后的片
"""SCNN_L"""
for i in range(W_size - 1, -1, -1):
# print(i)
result = layers_concat[i] # 切片
# 卷积操作
if i == W_size - 1: # 第一片就是原始值做卷积
# print(i)
layers_result_concat.append(result) # 保存卷积后的片
# scnn_covn2d_last = result
# scnn_covn2d = SCNN_D('scnn_covn2d',result,num_filters =1,filter_size = [32,3],
# padding=[0,1],stride=1,bias_attr = True,act = "relu")
else:
"""片于片之间的卷积"""
# print(i)
scnn_covn2d = SCNN('scnn_covn_L', layers_result_concat[-1], num_filters=1, filter_size=[3, H_size],
padding=[1, 0], stride=1, bias_attr=True, act="relu")
# print("scnn_covn2d_before:",scnn_covn2d)
scnn_covn2d = fluid.layers.reshape(scnn_covn2d, [0, C_size])
# print("scnn_covn2d_after:",scnn_covn2d)
# print("result_before:",result)
result = fluid.layers.transpose(result, perm=[0, 2, 1, 3])
# print("result_after:", result)
# print("scnn_covn2d:", scnn_covn2d)
scnn_covn2d = fluid.layers.elementwise_add(result, scnn_covn2d, axis=0,act='relu') # 在最后一个维度上做广播
# scnn_covn2d = fluid.layers.elementwise_add(result, scnn_covn2d)
scnn_covn2d = fluid.layers.transpose(scnn_covn2d, perm=[0, 2, 1, 3])
# print("scnn_covn2d:",scnn_covn2d)
# scnn_covn2d_last = scnn_covn2d #更新保存上一片
layers_result_concat.append(scnn_covn2d) # 保存卷积后的片
return fluid.layers.concat(input=layers_result_concat, axis=1)
def SCNN_D_U_R_L(input_data,C_size,H_size ,W_size):
#input_data = conv_layers( 'layers1',input_image,num_filters = 32, stride=2, padding=1,bias_attr = True)
#out_data_D_U = SCNN_D_U(input_data,H_size)
#print('out_data_D_U',out_data_D_U)
return fluid.layers.transpose(SCNN_R_L(SCNN_D_U(input_data,C_size,H_size,W_size),C_size,H_size,W_size), perm=[0, 2, 3,1])
#print(out_transposed)
def output_layers(input_data,C_size, H_size,W_size,num_classes):
model = ResNet(is_test=False)
# spatial_net = model.bottleneck_block1(inputs)
end_points_8 = model.net(input_data)
output_dat = SCNN_D_U_R_L(end_points_8,C_size, H_size,W_size )
net = batch_normalization(output_dat, relu=True, name='conv2d_transpose_bn1')
net = fluid.layers.conv2d_transpose(input=net, num_filters=num_classes, output_size=[128, 256])
net = batch_normalization(net, relu=True, name='conv2d_transpose_bn2')
net = fluid.layers.conv2d_transpose(input=net, num_filters=num_classes, output_size=[256, 512])
net = batch_normalization(net, relu=True, name='conv2d_transpose_bn3')
net = fluid.layers.image_resize(net, out_shape=[512, 1024], resample='BILINEAR')
# net = batch_normalization(net, relu=True, name='conv2d_transpose_bn4')
net = fluid.layers.conv2d(net, num_classes, 1)
# net = fluid.layers.conv2d_transpose(input=net, num_filters=num_classes, output_size=[512, 1024])
# net = batch_normalization(net, relu=True, name='conv2d_transpose_bn4')
# print('net',net)
#net = fluid.layers.image_resize(net, out_shape=[512, 1024], resample='BILINEAR')
return net
def save_model(exe,save_dir):
if os.path.exists(save_dir):
fluid.io.save_params(exe, dirname=save_dir, main_program=fluid.default_main_program())
else:
pass
# global configure
learning_rate = 5e-4
batch_size = 1
num_classes = 9
Image_Height = 512
Image_Width = 1024
num_pass = 100
C_size = 256
checkpoint_dir = '/home/zhngqn/zgy_RUIMING/code/ASR_v0.6_8k/version0906/SCNN/scnn_checkpoints'
# inference_dir = '/home/zhngqn/zgy_RUIMING/code/ASR_v0.6_8k/version0906/scnn_inference_model'
save_dir= '/home/zhngqn/zgy_RUIMING/code/ASR_v0.6_8k/version0906/SCNN/savepath'
# data layer
inputs = fluid.layers.data(name='img', shape=[3, Image_Height, Image_Width], dtype='float32')
# inputs = fluid.layers.image_resize(img, out_shape=[512,256], resample='BILINEAR') ## ???
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
# infer
predict = output_layers(inputs, C_size, Image_Height // 8, Image_Width // 8,num_classes)
print(predict)
## reshape logits into [batch_zize*H*W, num_classes]
predict = fluid.layers.transpose(predict, [0, 2, 3, 1]) ## NCHW --> NHWC
predict_reshape = fluid.layers.reshape(x=predict, shape=[-1, num_classes])
predict_reshape = fluid.layers.softmax(predict_reshape)
print('predict_reshape:', predict_reshape)
# loss function
print('predict_reshape', predict_reshape)
print('label', label)
cost = fluid.layers.cross_entropy(predict_reshape, label,
soft_label=False) ## as same as tf.sparse_softmax_cross_entopy_with_logits()
avg_cost = fluid.layers.mean(cost)
print('avg_cost:', avg_cost)
# accuracy
weight_decay = 0.00004
# acc_top1 = fluid.layers.accuracy(input=out, label=label, k=1)
acc = fluid.layers.accuracy(input=predict_reshape, label=label, k=1)
print('acc:', acc)
# get test program
test_program = fluid.default_main_program().clone(for_test=True)
# optimizer
optimizer = fluid.optimizer.Momentum(
learning_rate,
momentum=0.9,
regularization=fluid.regularizer.L2DecayRegularizer(
regularization_coeff=weight_decay), )
opts = optimizer.minimize(avg_cost)
# get test program
# test_program = fluid.default_main_program().clone(for_test=True)
# run in CPUPlace
use_cuda = True
place = fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()
# define a executor
exe = fluid.Executor(place)
# parameters initialize globally
exe.run(fluid.default_startup_program())
# startup_prog = fluid.default_startup_program()
# exe.run(startup_prog)
# fluid.io.load_persistables(exe, checkpoint_dir, startup_prog)
prog = fluid.default_main_program()
# fluid.io.load_persistables(executor=exe, dirname=checkpoint_dir,
# main_program=None)
if os.path.exists(checkpoint_dir + '/persistabels_model'):
fluid.io.load_persistables(exe, checkpoint_dir + '/persistabels_model', main_program=prog)
else:
pass
# define input data
feeder = fluid.DataFeeder(place=place, feed_list=[inputs, label])
# [inference_program, feed_target_names, fetch_targets]=fluid.io.load_inference_model(dirname=checkpoint_dir+'/', executor=exe)
filename = '/home/zhngqn/zgy_RUIMING/baidu_city/train.txt'
dic_filelist, list_wavmark = get_wav_list(filename)
# begin to train
for pass_id in range(num_pass):
for batch_id in range(20000):
start = time.time()
# training process
#num = random.randint(0, 21914)
#print(dic_filelist[num],list_wavmark[num])
train_data, train_label_data = data_generator(Image_Height, Image_Width, batch_size, dic_filelist,list_wavmark)
end = time.time()
# train_data transpose into NCHW
train_data = np.transpose(train_data, (0, 3, 1, 2))
# train_label_data = np.transpose(train_label_data, axes=[0, ])
train_label_data = np.reshape(train_label_data, (-1, 1))
# print('train_data, train_label:', [train_data.shape, train_label_data.shape])
# train_data = np.random.uniform(0, 1, (batch_size, 3, Image_Height, Image_Width)).astype(np.float32)
# train_label_data = np.zeros((5786640 * batch_size, 1)).astype(np.int64).reshape(5786640 * batch_size, 1)
train_cost, train_acc = exe.run(program=fluid.default_main_program(),
feed={'img': train_data,
'label': train_label_data},
fetch_list=[avg_cost.name, acc.name])
# end = time.time()
# np.size(train_label_data[train_label_data ==0])/np.size(train_label_data)
base_acc = np.size(train_label_data[train_label_data ==0])/np.size(train_label_data)
if batch_id % 10 == 0:
print("Pass: %d, Batch: %d, Train_Cost: %0.5f, Train_Accuracy: %0.5f,time: %0.5f" %
(pass_id, batch_id, train_cost[0],(train_acc[0]-base_acc)/(1.0-base_acc) ,end-start))
# save checkpoints
# if pass_id % 10 == 0 and batch_id == 0:
if batch_id % 50 == 0:
fluid.io.save_persistables(executor=exe,
dirname=checkpoint_dir + '/persistabels_model',
main_program=fluid.default_main_program()
)
# fluid.io.save_inference_model(dirname=inference_dir,
# feeded_var_names=['img'],
# target_vars=[predict],
# executor=exe,
# main_program=None,
# model_filename=None,
# params_filename=None,
# export_for_deployment=True)
save_model(exe, save_dir)
|
[
"1174548879@qq.com"
] |
1174548879@qq.com
|
e418afb3c244e5d140f09bc3ff9dec3f541a546d
|
c864086ea38c3cfe4819fabcd71137db79a52f2f
|
/drugi.py
|
53ebf8955dcf91801d076323c4f94518bebc5a1a
|
[] |
no_license
|
kaczmarekmichal/nauka_pythona
|
acee5d99bd19e66d8d6fe19f71c5fd8b6d3b9b71
|
d9ac839fa5290d8f7b5cb7b31e6c66d199ae8b89
|
refs/heads/master
| 2020-04-10T12:18:59.731505
| 2019-01-19T10:47:40
| 2019-01-19T10:47:40
| 161,018,459
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 467
|
py
|
marka ='Peugeot'
marka2 = 'Audi'
ilosc_drzwi = 5
pojemnosc =1.3
marka_up = marka.upper()
marka_low = marka.lower()
print ("Samochod " + marka + " ma " + str(ilosc_drzwi) + " drzwi")
print (marka_low)
print("pojemnosc po zmianach " + str(pojemnosc*2))
marka_swap=marka.swapcase()
print(marka_swap)
if ilosc_drzwi >3:
print('duzy')
else:
print('maly')
if marka.startswith('Pe'):
print('uwaga peugeot')
else:
print("na szczescie to nie francuz!")
|
[
"kaczmarekmichal@users.noreply.github.com"
] |
kaczmarekmichal@users.noreply.github.com
|
3f54caa0e55508ce17876cc1465946e0635ab1b6
|
caf34f9e5b3f9ea60a4481331004a17a278bfd55
|
/django_scrapy/quote/apps.py
|
bbcbdc2376edb935c31606c84860113c35a5518f
|
[] |
no_license
|
ArielLahiany/django_scrapy
|
a827cfa737263bd4aa1d52f3c39ac2c1180b7d6c
|
fa2b0376b5e1aa0da306612a915d5193b194ad9e
|
refs/heads/master
| 2023-03-26T10:50:52.569171
| 2021-03-24T21:20:12
| 2021-03-24T21:20:12
| 351,224,986
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 143
|
py
|
# Django modules
from django.apps import AppConfig
class QuoteConfig(AppConfig):
name = 'django_scrapy.quote'
verbose_name = "Quote"
|
[
"ariel.lahiany@gmail.com"
] |
ariel.lahiany@gmail.com
|
30d22e648e82216e843989a09b25df3c9431291e
|
747f759311d404af31c0f80029e88098193f6269
|
/addons/library/library_editor_supplier.py
|
d50451b7ea1eab1cf3f62f26950986f7861e6e54
|
[] |
no_license
|
sgeerish/sirr_production
|
9b0d0f7804a928c0c582ddb4ccb7fcc084469a18
|
1081f3a5ff8864a31b2dcd89406fac076a908e78
|
refs/heads/master
| 2020-05-19T07:21:37.047958
| 2013-09-15T13:03:36
| 2013-09-15T13:03:36
| 9,648,444
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 72
|
py
|
/home/openerp/production/extra-addons/library/library_editor_supplier.py
|
[
"geerish@omerp.net"
] |
geerish@omerp.net
|
b1571f62c847a20ecf7624a5be9945287afced54
|
704976ea552111c6a5af9cd7cb62b9d9abaf3996
|
/pypy/module/zlib/test/test_zlib.py
|
35fd7147de34051908c2d1acb58fc941e3703da9
|
[
"BSD-3-Clause"
] |
permissive
|
mesalock-linux/mesapy
|
4f02c5819ce7f2f6e249d34840f1aa097577645d
|
ed546d59a21b36feb93e2309d5c6b75aa0ad95c9
|
refs/heads/mesapy2.7
| 2023-08-16T21:33:02.239581
| 2019-08-13T10:29:43
| 2019-08-13T18:06:45
| 136,080,721
| 396
| 33
|
NOASSERTION
| 2020-04-01T03:05:18
| 2018-06-04T20:45:17
|
Python
|
UTF-8
|
Python
| false
| false
| 10,362
|
py
|
"""
Tests for the zlib module.
"""
import sys
try:
import zlib
except ImportError:
import py; py.test.skip("no zlib module on this host Python")
try:
from pypy.module.zlib import interp_zlib
except ImportError:
import py; py.test.skip("no zlib C library on this machine")
def test_unsigned_to_signed_32bit():
assert interp_zlib.unsigned_to_signed_32bit(123) == 123
assert interp_zlib.unsigned_to_signed_32bit(2**31) == -2**31
assert interp_zlib.unsigned_to_signed_32bit(2**32-1) == -1
if sys.maxint > 2**32:
from rpython.rlib.rarithmetic import r_uint
assert interp_zlib.unsigned_to_signed_32bit(r_uint(sys.maxint)) == -1
assert interp_zlib.unsigned_to_signed_32bit(r_uint(sys.maxint+1)) == 0
class AppTestZlib(object):
spaceconfig = dict(usemodules=['zlib'])
def setup_class(cls):
"""
Create a space with the zlib module and import it for use by the tests.
Also create some compressed data with the bootstrap zlib module so that
compression and decompression tests have a little real data to assert
against.
"""
cls.w_zlib = cls.space.getbuiltinmodule('zlib')
expanded = 'some bytes which will be compressed'
cls.w_expanded = cls.space.wrap(expanded)
cls.w_compressed = cls.space.wrap(zlib.compress(expanded))
def test_error(self):
"""
zlib.error should be an exception class.
"""
assert issubclass(self.zlib.error, Exception)
def test_crc32(self):
"""
When called with a string, zlib.crc32 should compute its CRC32 and
return it as a signed 32 bit integer. On 64-bit machines too
(it is a bug in CPython < 2.6 to return unsigned values in this case).
"""
assert self.zlib.crc32('') == 0
assert self.zlib.crc32('\0') == -771559539
assert self.zlib.crc32('hello, world.') == -936931198
def test_crc32_start_value(self):
"""
When called with a string and an integer, zlib.crc32 should compute the
CRC32 of the string using the integer as the starting value.
"""
assert self.zlib.crc32('', 42) == 42
assert self.zlib.crc32('\0', 42) == 163128923
assert self.zlib.crc32('hello, world.', 42) == 1090960721
hello = 'hello, '
hellocrc = self.zlib.crc32(hello)
world = 'world.'
helloworldcrc = self.zlib.crc32(world, hellocrc)
assert helloworldcrc == self.zlib.crc32(hello + world)
def test_crc32_negative_start(self):
v = self.zlib.crc32('', -1)
assert v == -1
def test_crc32_negative_long_start(self):
v = self.zlib.crc32('', -1L)
assert v == -1
assert self.zlib.crc32('foo', -99999999999999999999999) == 1611238463
def test_crc32_long_start(self):
import sys
v = self.zlib.crc32('', sys.maxint*2)
assert v == -2
assert self.zlib.crc32('foo', 99999999999999999999999) == 1635107045
def test_adler32(self):
"""
When called with a string, zlib.adler32() should compute its adler 32
checksum and return it as a signed 32 bit integer.
On 64-bit machines too
(it is a bug in CPython < 2.6 to return unsigned values in this case).
"""
assert self.zlib.adler32('') == 1
assert self.zlib.adler32('\0') == 65537
assert self.zlib.adler32('hello, world.') == 571147447
assert self.zlib.adler32('x' * 23) == -2122904887
def test_adler32_start_value(self):
"""
When called with a string and an integer, zlib.adler32 should compute
the adler 32 checksum of the string using the integer as the starting
value.
"""
assert self.zlib.adler32('', 42) == 42
assert self.zlib.adler32('\0', 42) == 2752554
assert self.zlib.adler32('hello, world.', 42) == 606078176
assert self.zlib.adler32('x' * 23, 42) == -2061104398
hello = 'hello, '
hellosum = self.zlib.adler32(hello)
world = 'world.'
helloworldsum = self.zlib.adler32(world, hellosum)
assert helloworldsum == self.zlib.adler32(hello + world)
assert self.zlib.adler32('foo', -1) == 45547858
assert self.zlib.adler32('foo', 99999999999999999999999) == -114818734
def test_invalidLevel(self):
"""
zlib.compressobj should raise ValueError when an out of bounds level is
passed to it.
"""
raises(ValueError, self.zlib.compressobj, -2)
raises(ValueError, self.zlib.compressobj, 10)
def test_compression(self):
"""
zlib.compressobj should return an object which can be used to compress
bytes.
"""
compressor = self.zlib.compressobj()
bytes = compressor.compress(self.expanded)
raises(OverflowError, compressor.flush, 2**31)
bytes += compressor.flush()
assert bytes == self.compressed
def test_decompression(self):
"""
zlib.decompressobj should return an object which can be used to
decompress bytes.
"""
decompressor = self.zlib.decompressobj()
bytes = decompressor.decompress(self.compressed)
bytes += decompressor.flush()
assert bytes == self.expanded
def test_compress(self):
"""
Test the zlib.compress() function.
"""
bytes = self.zlib.compress(self.expanded)
assert bytes == self.compressed
def test_decompress(self):
"""
Test the zlib.decompress() function.
"""
bytes = self.zlib.decompress(self.compressed)
assert bytes == self.expanded
def test_decompress_invalid_input(self):
"""
Try to feed garbage to zlib.decompress().
"""
raises(self.zlib.error, self.zlib.decompress, self.compressed[:-2])
raises(self.zlib.error, self.zlib.decompress, 'foobar')
def test_bad_arguments(self):
import zlib
raises(ValueError, zlib.decompressobj().flush, 0)
raises(ValueError, zlib.decompressobj().flush, -1)
raises(TypeError, zlib.decompressobj().flush, None)
raises(ValueError, zlib.decompressobj().decompress, b'abc', -1)
raises(TypeError, zlib.decompressobj().decompress, b'abc', None)
raises(TypeError, self.zlib.decompress, self.compressed, None)
raises(OverflowError, self.zlib.decompress, self.compressed, 2**31)
def test_empty_flush(self):
import zlib
co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
assert co.flush() # Returns a zlib header
dco = zlib.decompressobj()
assert dco.flush() == b""
def test_decompress_incomplete_stream(self):
import zlib
# This is 'foo', deflated
x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E'
# For the record
assert zlib.decompress(x) == b'foo'
raises(zlib.error, zlib.decompress, x[:-5])
# Omitting the stream end works with decompressor objects
# (see issue #8672).
dco = zlib.decompressobj()
y = dco.decompress(x[:-5])
y += dco.flush()
assert y == b'foo'
def test_unused_data(self):
"""
Try to feed too much data to zlib.decompress().
It should show up in the unused_data attribute.
"""
d = self.zlib.decompressobj()
s = d.decompress(self.compressed + 'extrastuff', 0)
assert s == self.expanded
assert d.unused_data == 'extrastuff'
assert d.flush() == ''
assert d.unused_data == 'extrastuff'
# try again with several decompression steps
d = self.zlib.decompressobj()
s1 = d.decompress(self.compressed[:10])
assert d.unused_data == ''
s2 = d.decompress(self.compressed[10:-3])
assert d.unused_data == ''
s3 = d.decompress(self.compressed[-3:] + 'spam' * 100)
assert d.unused_data == 'spam' * 100
assert s1 + s2 + s3 == self.expanded
s4 = d.decompress('egg' * 50)
assert d.unused_data == ('spam' * 100) + ('egg' * 50)
assert s4 == ''
def test_max_length(self):
"""
Test the max_length argument of the decompress() method
and the corresponding unconsumed_tail attribute.
"""
d = self.zlib.decompressobj()
data = self.compressed
for i in range(0, 100, 10):
s1 = d.decompress(data, 10)
assert s1 == self.expanded[i:i+10]
data = d.unconsumed_tail
assert not data
def test_max_length_large(self):
import sys
if sys.version_info < (2, 7, 13):
skip("passing a potentially 64-bit int as max_length is not "
"supported before 2.7.13")
d = self.zlib.decompressobj()
assert d.decompress(self.compressed, sys.maxsize) == self.expanded
def test_buffer(self):
"""
We should be able to pass buffer objects instead of strings.
"""
assert self.zlib.crc32(buffer('hello, world.')) == -936931198
assert self.zlib.adler32(buffer('hello, world.')) == 571147447
compressor = self.zlib.compressobj()
bytes = compressor.compress(buffer(self.expanded))
bytes += compressor.flush()
assert bytes == self.compressed
decompressor = self.zlib.decompressobj()
bytes = decompressor.decompress(buffer(self.compressed))
bytes += decompressor.flush()
assert bytes == self.expanded
bytes = self.zlib.compress(buffer(self.expanded))
assert bytes == self.compressed
bytes = self.zlib.decompress(buffer(self.compressed))
assert bytes == self.expanded
def test_flush_with_freed_input(self):
# Issue #16411: decompressor accesses input to last decompress() call
# in flush(), even if this object has been freed in the meanwhile.
input1 = b'abcdefghijklmnopqrstuvwxyz'
input2 = b'QWERTYUIOPASDFGHJKLZXCVBNM'
data = self.zlib.compress(input1)
dco = self.zlib.decompressobj()
dco.decompress(data, 1)
del data
data = self.zlib.compress(input2)
assert dco.flush(1) == input1[1:]
assert dco.unused_data == b''
assert dco.unconsumed_tail == b''
|
[
"mssun@mesalock-linux.org"
] |
mssun@mesalock-linux.org
|
0110676b8f2a531de2bd3d355df08326b97bf65d
|
3619115e1d476864885ca8d78bc17f19cba490f7
|
/Precis/Precis/Data/precis_formula.py
|
588e68dfeae38b9aad521418e8cfa28af2b866ae
|
[] |
no_license
|
ssaha6/Precis
|
bc9a1c975a5f024304770fc600a134e9f9890453
|
c2c59b1f126876a38f41036ecb2b0912e009dc89
|
refs/heads/master
| 2020-09-25T08:44:25.223012
| 2019-12-04T22:01:14
| 2019-12-04T22:01:14
| 225,965,202
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,288
|
py
|
from z3 import *
import re
class PrecisFormula:
# formulaZ3: Z3ExprRef; variable of Z3 version ---> more precisely should be BoolRef
formulaZ3 = None
# formula is string representation of formula
formula = ""
# string rep of formula
def __init__(self, varZ3):
self.formulaZ3 = varZ3
# Z3eprx in string format add a newline after every conjunct
self.formula = str(varZ3).replace("\n", "")
# returns string
def toInfix(self):
s = self.formula
while True:
s, flag = self.replace(s)
if not flag:
# replace("&& ","&& ") is to deal with spacing added in z3 expr when toString
# symbols ~ and ) are used placed holders for left and right parenthesis.
# We need these place holders because our regex looks for left and right paren
replacePlacedHolderFormula = s.replace("`", "(").replace("~", ")") \
.replace("&& ", "&& ").replace("&& ", "&& ") \
.replace("&& ", "&& ") \
.replace("&& ", "&& ") \
.replace("&& ", "&& ").replace("&& ", "&& ") \
.replace("|| ", "|| ").replace("|| ", "|| ") \
.replace("|| ", "|| ").replace("|| ", "|| ").replace("|| ", "|| ")\
.replace("== ","== ")\
.replace("!= ","!= ")
cSharpCompatibleFormula = replacePlacedHolderFormula.replace(
"False", "false").replace("True", "true")
return cSharpCompatibleFormula
# Acknowledgement: Neil Zhao
def replace(self, s):
pattern = re.compile(r'((And|Or|Not)(\(([^,()]+(,[^,()]+)*)\)))')
res = pattern.findall(s)
for r in res:
if r[1] == 'And':
conjunct = r[2][1:-1]
replacement = conjunct.replace(', ', ' && ')
s = s.replace(r[0], '`{}~'.format(replacement))
elif r[1] == 'Or':
disjunct = r[2][1:-1]
replacement = disjunct.replace(', ', ' || ')
s = s.replace(r[0], '`{}~'.format(replacement))
elif r[1] == 'Not':
negation = r[2][1:-1]
replacement = negation.replace(', ', ' && ')
s = s.replace(r[0], '`!`{}~~'.format(negation))
else:
assert(False) # why this case
return s, len(res) > 0
def precisAnd(self, other):
# check other is of type z3eprx
return PrecisFormula(And(self.formulaZ3. other))
def precisOr(self, other):
# check other is of type z3eprx
return PrecisFormula(Or(self.formulaZ3. other))
def precisSimplify(self):
postcondition = self.formulaZ3
set_option(max_args=10000000, max_lines=1000000,
max_depth=10000000, max_visited=1000000)
set_option(html_mode=False)
set_fpa_pretty(flag=False)
#intVars = [ Int(var) for var in intVariables]
#boolVars = [ Bool(var) for var in boolVariables]
#declareInts = "\n".join([ "(declare-const " + var + " Int)" for var in intVariables ])
#declareBools = "\n".join([ "(declare-const " + var + " Bool)" for var in boolVariables ])
#stringToParse = "\n".join([declareInts, declareBools, "( assert " + precondition + ")"])
#logger = logging.getLogger("Framework.z3Simplify")
# logger.info("############ z3 program")
# logger.info("############ " + stringToParse)
#expr = parse_smt2_string(strinagToParse)
g = Goal()
g.add(postcondition)
works = Repeat(Then(
OrElse(Tactic('ctx-solver-simplify'), Tactic('skip')),
OrElse(Tactic('unit-subsume-simplify'),Tactic('skip')),
# OrElse(Tactic('propagate-ineqs'),Tactic('skip')),
# OrElse(Tactic('purify-arith'),Tactic('skip')),
#OrElse(Tactic('ctx-simplify'),Tactic('skip')),
#OrElse(Tactic('dom-simplify'),Tactic('skip')),
#OrElse(Tactic('propagate-values'),Tactic('skip')),
OrElse(Tactic('simplify'), Tactic('skip')),
# OrElse(Tactic('aig'),Tactic('skip')),
# OrElse(Tactic('degree-shift'),Tactic('skip')),
# OrElse(Tactic('factor'),Tactic('skip')),
# OrElse(Tactic('lia2pb'),Tactic('skip')),
# OrElse(Tactic('recover-01'),Tactic('skip')),
# must to remove ite
#OrElse(Tactic('elim-term-ite'), Tactic('skip')),
#OrElse(Tactic('smt'), Tactic('skip')),
# OrElse(Tactic('injectivity'),Tactic('skip')),
# OrElse(Tactic('snf'),Tactic('skip')),
# OrElse(Tactic('reduce-args'),Tactic('skip')),
# OrElse(Tactic('elim-and'),Tactic('skip')),
# OrElse(Tactic('symmetry-reduce'),Tactic('skip')),
# OrElse(Tactic('macro-finder'),Tactic('skip')),
# OrElse(Tactic('quasi-macros'),Tactic('skip')),
Repeat(OrElse(Tactic('cofactor-term-ite'), Tactic('skip'))),
Repeat(OrElse(Tactic('split-clause'), Tactic('skip'))),
))
#works1 = Tactic('simplify')
result = works(g)
#result = works1(g)
# split_all =
# print str(result)
# result = [[ "d1", "d2", "d3"], #= conjunct && conjunct
# [ "d4", "d5", "d6"]]
# remove empty subgoals and check if resultant list is empty.
result = filter(None, result)
if not result:
return "true"
# return result
result = list(result)
completeConjunct = []
for i in range(0,len(result)):
conjunction = result[i]
completeDisjunct = []
for literal in conjunction:
#if i >= 1 and literal in result[i-1]:
# continue
completeDisjunct.append(literal)
completeConjunct.append(And(completeDisjunct))
simplifiedPrecondition = Or(completeConjunct)
return simplifiedPrecondition
# g1 = Goal()
# tac = Repeat(Then(
# OrElse(Tactic('tseitin-cnf'),Tactic('skip')),
# OrElse(Tactic('cofactor-term-ite'), Tactic('skip')),
# OrElse(Tactic('ctx-simplify'),Tactic('skip')),
# OrElse(Tactic('dom-simplify'),Tactic('skip')),
# OrElse(Tactic('factor'),Tactic('skip')),
# OrElse(Tactic('elim-term-ite'), Tactic('skip')),
# ))
# g1.add(simplifiedPrecondition)
# post = tac(g1)
# newConju = And(list(post[0]))
# print(PrecisFormula(simplify(newConju)).toInfix())
# print(list(post))
#print(PrecisFormula(post).toInfix())
#simplifiedPrecondition = simplifiedPrecondition.replace("Not", " ! ")
#simplifiedPrecondition = simplifiedPrecondition.replace("False", " false ")
#simplifiedPrecondition = simplifiedPrecondition.replace("True", " true ")
#simplifiedPrecondition = simplifiedPrecondition.replace("\n", " ")
|
[
"angello.astorga@gmail.com"
] |
angello.astorga@gmail.com
|
e5ab44dc776222c231274dd703bcd5aebdb8b110
|
f207586e34b37b13ee6012ea08f174e302fa0078
|
/mimo/util/decorate.py
|
cf41979d6dfcac6b024ecd468df4e0901d8627e7
|
[
"MIT"
] |
permissive
|
pnickl/mimo
|
92b7858108e077ff43082f15f635d1205120b143
|
81c4bbd2594e2136445009eae752ab8a1602a1cf
|
refs/heads/master
| 2022-12-24T02:10:34.838878
| 2020-08-04T19:24:21
| 2020-08-04T19:24:21
| 302,394,694
| 2
| 0
|
MIT
| 2020-10-08T16:07:26
| 2020-10-08T16:07:25
| null |
UTF-8
|
Python
| false
| false
| 1,796
|
py
|
def pass_obs_arg(f):
def wrapper(self, obs=None, **kwargs):
if obs is None:
assert self.has_data()
obs = [_obs for _obs in self.obs]
else:
obs = obs if isinstance(obs, list) else [obs]
return f(self, obs, **kwargs)
return wrapper
def pass_obs_and_labels_arg(f):
def wrapper(self, obs=None, labels=None, **kwargs):
if obs is None or labels is None:
assert self.has_data()
obs = [_obs for _obs in self.obs]
labels = self.labels
else:
obs = obs if isinstance(obs, list) else [obs]
labels = [self.gating.likelihood.rvs(len(_obs)) for _obs in obs]\
if labels is None else labels
return f(self, obs, labels, **kwargs)
return wrapper
def pass_target_and_input_arg(f):
def wrapper(self, y=None, x=None, **kwargs):
if y is None or x is None:
assert self.has_data()
y = [_y for _y in self.target]
x = [_x for _x in self.input]
else:
y = y if isinstance(y, list) else [y]
x = x if isinstance(x, list) else [x]
return f(self, y, x, **kwargs)
return wrapper
def pass_target_input_and_labels_arg(f):
def wrapper(self, y=None, x=None, z=None, **kwargs):
if y is None or x is None and z is None:
assert self.has_data()
y = [_y for _y in self.target]
x = [_x for _x in self.input]
z = self.labels
else:
y = y if isinstance(y, list) else [y]
x = x if isinstance(x, list) else [x]
z = [self.gating.likelihood.rvs(len(_y)) for _y in y]\
if z is None else z
return f(self, y, x, z, **kwargs)
return wrapper
|
[
"abdulsamad@ias.informatik.tu-darmstadt.de"
] |
abdulsamad@ias.informatik.tu-darmstadt.de
|
42b7d6c70484231c6bb27005121f31b13a66df8b
|
5d87e9864168ef07f49693ebf94c83dedfdde4ed
|
/setup.py
|
829e7337c8b5f700daac0f6c36457913d1d6eb85
|
[
"BSD-3-Clause"
] |
permissive
|
cavestruz/spa
|
34b852b63932a296009b8ee58abd50efe1f3d35a
|
4f147694601e0b88f97794c8851ccdd615c73ea4
|
refs/heads/master
| 2022-11-05T23:04:52.604040
| 2020-06-19T16:47:22
| 2020-06-19T16:47:22
| 269,761,428
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 315
|
py
|
from setuptools import setup
setup(name='spa',
version='0.1',
description='Tests of spa measurements on 300 sims',
url='http://github.com/cavestruz/spa',
author='Camille Avestruz',
author_email='cavestru@umich.edu',
license='BSD3',
packages=['spa'],
zip_safe=False)
|
[
"cavestru@umich.edu"
] |
cavestru@umich.edu
|
0697999540cfd3dabcd8c47abe2e699e561d6cca
|
396a7d442f1b9d4b6734f90ce73fb35cd1f2293b
|
/src/ashild_grotan_ex/ex01/comp_to_loop.py
|
432a11967f2578cc9738e0891bdaa2d763854df7
|
[] |
no_license
|
ashildgrotan/INF200-2019-Exersices
|
46d40d814f39ce6900bb057ce9ca7275f47a6d2c
|
70dbac6d92d29f78c0aac8c72c7a9d1f8b42b72a
|
refs/heads/master
| 2020-07-21T06:11:59.676983
| 2019-11-13T19:12:01
| 2019-11-13T19:12:01
| 206,767,955
| 0
| 0
| null | 2019-11-03T13:41:10
| 2019-09-06T10:10:53
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 348
|
py
|
"""
ex_01, task B.
"""
def squares_by_comp(n):
return [k ** 2 for k in range(n) if k % 3 == 1]
def squares_by_loop(n):
squares = []
for k in range(n):
if k % 3 == 1:
squares.append(k ** 2)
return squares
if __name__ == "__main__":
if squares_by_comp(10) != squares_by_loop(10):
print("ERROR!")
|
[
"ashild.grotan@nmbu.no"
] |
ashild.grotan@nmbu.no
|
43078cfccfee9f2bbde2f0af3de46006b564a128
|
0725ed7ab6be91dfc0b16fef12a8871c08917465
|
/tree/is_bst.py
|
26ed670c86a2703f7550da0fa62852b62ed81d7b
|
[] |
no_license
|
siddhism/leetcode
|
8cb194156893fd6e9681ef50c84f0355d09e9026
|
877933424e6d2c590d6ac53db18bee951a3d9de4
|
refs/heads/master
| 2023-03-28T08:14:12.927995
| 2021-03-24T10:46:20
| 2021-03-24T10:46:20
| 212,151,205
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 716
|
py
|
# A binary tree node
import sys
class Node:
# Constructor to create a new node
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def is_bst(node, min_limit, max_limit):
if not node:
return True
if not (min_limit < node.data < max_limit):
return False
l_path = is_bst(node.left, min_limit, node.data)
r_path = is_bst(node.right, node.data, max_limit)
return l_path and r_path
# Driver program to test above function
root = Node(4)
root.left = Node(2)
root.right = Node(5)
root.left.left = Node(1)
root.left.right = Node(3)
if (is_bst(root, -sys.maxint, sys.maxint)):
print "Is BST"
else:
print "Not a BST"
|
[
"siddhesh@hackerearth.com"
] |
siddhesh@hackerearth.com
|
3a18512d569b9063b5ddbac6e2d4e18eff02ff34
|
dab13cee0dbdd13ee891e8f81378255e91e8cf34
|
/Blog/bin/django-admin.py
|
b6ae43ff66464d4bc2b6889378afad55c4348ed4
|
[] |
no_license
|
aaron-gl94/blog
|
1dd5f5664cca268cfb92e0b118d1ef4732224b1e
|
b955825c51782501745b7377a731d0c3de131d80
|
refs/heads/master
| 2021-01-20T18:36:04.147224
| 2016-07-29T15:35:02
| 2016-07-29T15:35:02
| 64,489,270
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 147
|
py
|
#!/home/syscorp/Backend/Blog/bin/python3
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
|
[
"aaron_admb94@hotmail.com"
] |
aaron_admb94@hotmail.com
|
73a7c6f6c221eea8f7d32240e74cb094dffad3f5
|
755e8c18b6877cfc750d49f65a9b31a1380c3862
|
/main.py
|
d9c43cf8ae9297d4dc9d3e2bbb75cb11efa38514
|
[] |
no_license
|
nope2jope/sooth_seer
|
36b9382c69a64837e2ae57689dba395c3f0191e7
|
7bda629c33c1b778041a24bc3a2df041efe434f4
|
refs/heads/master
| 2023-07-15T12:14:23.576191
| 2021-09-01T16:48:51
| 2021-09-01T16:48:51
| 387,584,152
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,554
|
py
|
from sooth_sayer import croupier, deck_writer
from pprint import pprint
import os.path
from flask import Flask, render_template, redirect, url_for, request
from flask_bootstrap import Bootstrap
import os
app = Flask(__name__)
app.config['SECRET_KEY'] = os.environ['ENV_SECRET_KEY']
Bootstrap(app)
# checks to see if csv exists
# if not, runs functions to scrape and format card info
if not os.path.exists('tarot_deck.csv'):
deck_writer.write_deck()
# retrieves info from csv file
tarot_deck = deck_writer.fetch_deck()
@app.route('/', methods=['GET','POST'])
def home():
cards = 0
clicked = False
return render_template("index.html", c=cards, bool=clicked)
@app.route('/one-card')
def one_card():
fortune = croupier.fortune_teller(deck=tarot_deck, spread=1)
cards = 1
portents = ['Fortune']
clicked = True
return render_template("index.html", c=cards, f=fortune, bool=clicked, p=portents)
@app.route('/three-card')
def three_card():
fortune = croupier.fortune_teller(deck=tarot_deck, spread=3)
cards = 3
portents = ['Past', 'Present', 'Future']
clicked = True
return render_template("index.html", c=cards, f=fortune, bool=clicked, p=portents)
@app.route('/four-card')
def four_card():
fortune = croupier.fortune_teller(deck=tarot_deck, spread=4)
cards = 4
portents = ['Querent', 'Past', 'Present', 'Future']
clicked = True
return render_template("index.html", c=cards, f=fortune, bool=clicked, p=portents)
if __name__ == "__main__":
app.run(host='0.0.0.0', port=5000)
|
[
"josephkisbye@gmail.com"
] |
josephkisbye@gmail.com
|
70bba7946ac7f7477975d7e1e539ba54fb5208fd
|
918becd643a9d0fec941f89c0da3eb5295fa7104
|
/i3/i3-quickterm/i3-quickterm~
|
2d013f3485864f0f1bbce8e99794609154fb5c29
|
[] |
no_license
|
xircon/Scripts-dots
|
2790dc6f7825302ee66d8cc58dd768b0ddc394a7
|
ef27d11e4163ddb223a2a4cb9ed47af71974cd81
|
refs/heads/master
| 2021-01-20T03:09:26.086417
| 2018-10-13T14:26:15
| 2018-10-13T14:26:15
| 89,503,757
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,845
|
#!/usr/bin/env python3
import argparse
import copy
import fcntl
import json
import os
import shlex
import subprocess
import sys
from contextlib import contextmanager, suppress
from pathlib import Path
import i3ipc
MARK_QT_PATTERN = 'quickterm_.*'
MARK_QT = 'quickterm_{}'
DEFAULT_CONF = {
'menu': 'rofi -dmenu -p "quickterm: " -no-custom -auto-select',
'term': 'tilix -title "{title}"',
'history': '{$HOME}/.cache/i3/i3-quickterm.order',
'ratio': 0.40,
'pos': 'top',
'shells': {
'haskell': 'ghci',
'js': 'node',
'python': 'python3 --no-banner',
'shell': '{$SHELL}',
}
}
def conf_path():
home_dir = os.environ['HOME']
xdg_dir = os.environ.get('XDG_CONFIG_DIR', '{}/.config'.format(home_dir))
return xdg_dir + '/i3/i3-quickterm.json'
def read_conf(fn):
try:
with open(fn, 'r') as f:
c = json.load(f)
return c
except Exception as e:
return {}
@contextmanager
def get_history_file(conf):
if conf['history'] is None:
yield None
return
p = Path(expand_command(conf['history'])[0])
os.makedirs(str(p.parent), exist_ok=True)
f = open(str(p), 'a+')
fcntl.lockf(f, fcntl.LOCK_EX)
try:
f.seek(0)
yield f
finally:
fcntl.lockf(f, fcntl.LOCK_UN)
f.close()
def expand_command(cmd, **rplc_map):
d = {'$' + k: v for k, v in os.environ.items()}
d.update(rplc_map)
return shlex.split(cmd.format(**d))
def move_back(conn, selector):
conn.command('{} floating enable, move scratchpad'
.format(selector))
def pop_it(conn, mark_name, pos='top', ratio=0.40):
ws, _ = get_current_workspace(conn)
wx, wy = ws['rect']['x'], ws['rect']['y']
wwidth, wheight = ws['rect']['width'], ws['rect']['height']
width = wwidth
height = int(wheight*ratio)
posx = wx
if pos == 'bottom':
margin = 6
posy = wy + wheight - height - margin
else: # pos == 'top'
posy = wy
conn.command('[con_mark={mark}],'
'resize set {width} px {height} px,'
'move absolute position {posx}px {posy}px,'
'move scratchpad,'
'scratchpad show'
''.format(mark=mark_name, posx=posx, posy=posy,
width=width, height=height))
def get_current_workspace(conn):
ws = [w for w in conn.get_workspaces() if w['focused']][0]
tree = conn.get_tree()
# wname = workspace['name']
ws_tree = [c for c in tree.descendents()
if c.type == 'workspace' and c.name == ws['name']][0]
return ws, ws_tree
def toggle_quickterm_select(conf, hist=None):
"""Hide a quickterm visible on current workspace or prompt
the user for a shell type"""
conn = i3ipc.Connection()
ws, ws_tree = get_current_workspace(conn)
# is there a quickterm opened in the current workspace?
qt = ws_tree.find_marked(MARK_QT_PATTERN)
if qt:
qt = qt[0]
move_back(conn, '[con_id={}]'.format(qt.id))
return
with get_history_file(conf) as hist:
# compute the list from conf + (maybe) history
hist_list = None
if hist is not None:
with suppress(Exception):
hist_list = json.load(hist)
# invalidate if different set from the configured shells
if set(hist_list) != set(conf['shells'].keys()):
hist_list = None
shells = hist_list or sorted(conf['shells'].keys())
proc = subprocess.Popen(expand_command(conf['menu']),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
for r in shells:
proc.stdin.write((r + '\n').encode())
stdout, _ = proc.communicate()
shell = stdout.decode().strip()
if shell not in conf['shells']:
return
if hist is not None:
# put the selected shell on top
shells = [shell] + [s for s in shells if s != shell]
hist.truncate(0)
json.dump(shells, hist)
toggle_quickterm(conf, shell)
def term_title(shell):
return '{} - i3-quickterm'.format(shell)
def toggle_quickterm(conf, shell):
conn = i3ipc.Connection()
tree = conn.get_tree()
shell_mark = MARK_QT.format(shell)
qt = tree.find_marked(shell_mark)
# does it exist already?
if len(qt) == 0:
subprocess.call(expand_command(conf['term'], title=term_title(shell)) +
['-e', sys.argv[0], '-i', shell])
else:
qt = qt[0]
ws, ws_tree = get_current_workspace(conn)
move_back(conn, '[con_id={}]'.format(qt.id))
if qt.workspace().name != ws.name:
pop_it(conn, shell_mark)
def launch_inplace(conf, shell):
conn = i3ipc.Connection()
shell_mark = MARK_QT.format(shell)
conn.command('mark {}'.format(shell_mark))
move_back(conn, '[con_mark={}]'.format(shell_mark))
pop_it(conn, shell_mark)
prog_cmd = expand_command(conf['shells'][shell])
subprocess.call(prog_cmd)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--in-place', dest='in_place',
action='store_true')
parser.add_argument('shell', metavar='SHELL', nargs='?')
args = parser.parse_args()
conf = copy.deepcopy(DEFAULT_CONF)
conf.update(read_conf(conf_path()))
if args.shell is None:
toggle_quickterm_select(conf)
sys.exit(0)
if args.shell not in conf['shells']:
print('unknown shell: {}'.format(args.shell), file=sys.stderr)
sys.exit(1)
if args.in_place:
launch_inplace(conf, args.shell)
else:
toggle_quickterm(conf, args.shell)
|
[
"xirconuk@gmail.com"
] |
xirconuk@gmail.com
|
|
b9c5ca1798fcaffb1707909fd79abe2418769bda
|
04ac33f68827aeef7d5bc441d10979143828ef1a
|
/contactSpider.py
|
037682c5a672fc9a935a9454eaef442e24e5a338
|
[] |
no_license
|
samshultz/realtor_agent_spider
|
a06e99af15fc78902c5f44fcb91dd6d55490b14f
|
4550301a9e4733ad19bd6fd904e079037847bbf7
|
refs/heads/master
| 2021-07-05T04:28:17.703484
| 2017-09-30T02:22:34
| 2017-09-30T02:22:34
| 105,333,052
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,933
|
py
|
import scrapy
class ContactSpider(scrapy.Spider):
# name of the spider
name = "contacts"
# the url to start scraping from
start_urls = [
"https://www.realtor.com/realestateagents/Los-Angeles_CA"
]
def parse(self, response):
# check the page for the name of the agent...
for href in response.css("div[itemprop=name] a::attr(href)"):
# ...click on it and call the parse_agent method on each one
yield response.follow(href, self.parse_agent)
# follow pagination links...
# for href in response.css("a.next::attr(href)"):
# #...repeat this method (parse method) on each page
# yield response.follow(href, self.parse)
def parse_agent(self, response):
# get the element containing the address info and extract the text
address = response.css("#modalcontactInfo span[itemprop=streetAddress]::text").extract_first()
# check if the address is available...
if address is not None:
# ... if it is, get the city, state and zipcode from it (this info
# is contained in the last three info in the address)
city, state, zipcode = address.split(",")[-3:]
# separate the address
addr = ''.join(address.split(",")[:-3])
else:
# if the address is not available
# set the city, state, addr and zipcode to empty string
city, state, zipcode = "", "", ""
addr = ""
# return a dictionary of the extracted info
yield {
"name": response.css("#modalcontactInfo p.modal-agent-name::text").extract_first().split(",")[0],
"location": response.css("#modalcontactInfo p.modal-agent-location::text").extract_first().strip(),
"address": addr,
"city": city,
"state": state,
"zipcode": zipcode,
}
|
[
"taiwogabrielsamuel@gmail.com"
] |
taiwogabrielsamuel@gmail.com
|
fc5d1edb3647e18a663c8c43b897809c51abbf89
|
4c2a391f2f4d7361f2c7111b6d63edf67056f327
|
/model/oauth.py
|
4c650a7683108b8d5c4e420c7b90b52c00c2172a
|
[] |
no_license
|
niyoufa/tnd_server
|
6d69db32ceb5a6a14417b3e8b0f021fdc0e7e79c
|
59c9ac6769773573685be215b4674d77545fe127
|
refs/heads/master
| 2020-06-23T15:43:28.891619
| 2016-08-26T03:44:01
| 2016-08-26T03:44:01
| 66,613,944
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 408
|
py
|
# -*- coding: utf-8 -*-
"""
author : youfaNi
date : 2016-07-13
"""
from bson.son import SON
import renren.model.model as model
import renren.libs.mongolib as mongo
import renren.consts as consts
import renren.libs.utils as utils
class OauthModel(model.BaseModel,model.Singleton):
__name = "renren.oauth_clients"
def __init__(self):
model.BaseModel.__init__(self,OauthModel.__name)
|
[
"1061794187@qq.com"
] |
1061794187@qq.com
|
de2ccc054448a523050c34b2aa361df702f2bc2a
|
1bb13fc75aa56c00ad17a16d7c590060aa71b188
|
/src/test/test_orders.py
|
c26ae7182c118076d8e92b747ca0cc0bbcd37172
|
[
"MIT"
] |
permissive
|
TheEpicBigBoss/pytr
|
add4df656943b1179c7539d6b70ebd3be0255e2c
|
7673f049e7ea635af47d11d8450db18c27b43104
|
refs/heads/master
| 2023-01-24T13:29:49.839121
| 2020-12-04T15:08:51
| 2020-12-04T15:24:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,089
|
py
|
import asyncio
import pprint
import json
import logging
from py_tr import TradeRepublicApi
tr = TradeRepublicApi()
def save_to_file(file, response):
with open(file, 'w') as outfile:
json.dump(response, outfile)
async def find_best_warrants():
logging.info("async started")
underlying_isin = "US0378331005" # AAPL
# await tr.search_derivative(underlying_isin)
# await tr.search("AAPL", asset_type="stock")
# await tr.ticker("US0378331005", exchange="LSX")
await tr.portfolio()
# await tr.cash_available_for_order()
# await tr.market_order('US0378331005', 'LSX', 'buy', 1, 'gfd', False) # BUY 1 AAPL
await tr.market_order('US0378331005', 'LSX', 'sell', 1, 'gfd', False) # SELL 1 AAPL
while True:
subscription_id, subscription, response = await tr.recv()
pprint.pprint(response)
# save_to_file("portfolio.json", response)
# with open('warrants_'+underlying_isin+'.json', 'w') as outfile:
# json.dump(response, outfile)
asyncio.get_event_loop().run_until_complete(find_best_warrants())
|
[
"guilhermehott@gmail.com"
] |
guilhermehott@gmail.com
|
7c09a10f61384d6fed01557b413b08956ae5ed19
|
e0a2774cebdb4974e5a6c1363f3bc4e01cbe7915
|
/Chondokotha/apps.py
|
ded1f15d4575e672b78748aeb548e54a83e58f74
|
[] |
no_license
|
Julfikar-Haidar/Django-Vue-Apicall-advanced-search
|
cbc91970557848e41bbca127c12615a316ed5fe0
|
e5ff7180e43a7b9ef37847298395ebce5cee3a06
|
refs/heads/master
| 2021-09-27T08:01:08.041435
| 2020-03-18T06:22:08
| 2020-03-18T06:22:08
| 247,681,078
| 1
| 0
| null | 2021-09-22T18:45:47
| 2020-03-16T11:07:22
|
Python
|
UTF-8
|
Python
| false
| false
| 97
|
py
|
from django.apps import AppConfig
class ChondokothaConfig(AppConfig):
name = 'Chondokotha'
|
[
"julfikar6262@gmail.com"
] |
julfikar6262@gmail.com
|
25116899b20f17da29b4c7bd6836e9e76144abd0
|
0ed5a3e86e3ed2a597a479db9f3dcd8208263393
|
/Project_Euler/P004_largest_palindromic_product.py
|
ead6e3325d6d350018a3029c85de25cb8a2600d5
|
[] |
no_license
|
justanotherguy-0/my_files
|
3eded569652e3eac963fad4fa971f400b682b1fc
|
9fb6960003224529b9433ea81eaddcc1bf37206a
|
refs/heads/master
| 2021-05-19T16:34:26.912930
| 2020-10-09T00:50:59
| 2020-10-09T00:50:59
| 252,029,729
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 622
|
py
|
n=999999
nn=999
def max_poldr(n):
while n>10000:
if n==int(str(n)[::-1]):
nn=999
while nn>100:
if n/nn <=999 and n/nn>=101 and n/nn==int(n/nn):
print('tugfa')
return(nn,' times ', n//nn,' is ', n )
n=10000
break
nn-=1
n-=10**(int((len(str(n))-0.1)/2)+1)
n=int(str(n)[:int((len(str(n))-0.1)/2)+1] + '9'*(len(str(n))//2))
continue
n-=1
print (max_poldr(999999))
|
[
"noreply@github.com"
] |
justanotherguy-0.noreply@github.com
|
883646eb7ce3edd5796ff15d1b6dfccf9f746abe
|
8662ed271890dfc310c3854359d4663bfb051529
|
/Q7_b.py
|
49a34b843f33d45a87bb233727108f2531daed5b
|
[] |
no_license
|
zahraDehghanian97/Football_Player_Classification
|
daf10145fafbd8d542b3a17f0364b6eebb9d2651
|
7bc8f9685cc00b630c8c95b0a7c98daa494bee5c
|
refs/heads/master
| 2020-09-07T21:09:54.848689
| 2019-11-11T06:15:40
| 2019-11-11T06:15:40
| 220,913,642
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,103
|
py
|
import csv
import matplotlib.pyplot as plt
import statistics
import numpy as np
data = []
with open('first_half_logs.csv') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
line_count = 0
for row in csv_reader:
if line_count == 0:
line_count += 1
else:
data.append(row)
line_count += 1
players = []
for i in range(16):
players.append([])
for d in data:
temp = [float(d[2]), float(d[3])]
players[int(d[1])].append(temp)
mean = []
cov = []
number =[]
for player in players :
if len(player) > 1:
mean.append([statistics.mean(np.array(player)[:,0]),statistics.mean(np.array(player)[:,1])])
number.append(player)
cov.append(np.cov(np.array(player)[:,0],np.array(player)[:,1]))
counter =0
N_bins = 100
for player in range(len(mean)) :
x, y = np.random.multivariate_normal(mean[player], cov[player], 5000).T
# plt.figure(counter)
plt.figure(figsize=(1, 2))
plt.hist2d(x, y, bins=N_bins, normed=False, cmap='plasma')
counter += 1
print(cov)
# Show the plot.
plt.show()
|
[
"z.dehghanian@aut.ac.ir"
] |
z.dehghanian@aut.ac.ir
|
59294bd41c7d4e0de58d33c5fd3f37a16597be89
|
34a95dd5bf5c1339d6a1ba55a5c8085e62152bac
|
/quantumlounge/http/api/users/users.py
|
e1620b646be3d09e44a8100b03250618a8a39e63
|
[] |
no_license
|
mrtopf/QuantumLounge
|
fa082fd34bad54a9ed3dfdcd358a57590d43bfcc
|
ce9854dc47e7a07c3b59a165c10ee8da61d05db4
|
refs/heads/master
| 2021-01-19T14:32:50.048076
| 2011-09-29T10:27:10
| 2011-09-29T10:27:10
| 838,102
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,397
|
py
|
from quantumlounge.framework import Handler, json, html, RESTfulHandler
import werkzeug
import simplejson
import quantumlounge.usermanager.errors as errors
class Users(RESTfulHandler):
"""handle a collection of Users
**Allowed Methods**: ALL
TODO: More documentation in Sphinx about it
"""
# TODO: Do this via the content type?
collection_name = "usermanager"
@json() # we assume JSON for now
def get(self, format='json'):
um = self.settings[self.collection_name]
ct = self.settings['content1']['tweet']
so = self.request.values.get("so","date") # sort order
sd = self.request.values.get("sd","down") # sort direction
try:
l = int(self.request.values.get("l","10")) # limit
except:
return self.error("wrong value for 'l'")
try:
o = int(self.request.values.get("o","0")) #offset
except:
return self.error("wrong value for 'o'")
content = um.index(
sort_on = so,
sort_order = sd,
limit = l,
offset = o
)
content = [c.json for c in content]
return content
@json()
def post(self):
"""Create a new item"""
f = self.request.form
print "POSTING", f
return "ok"
class User(RESTfulHandler):
"""single item"""
|
[
"mrtopf@gmail.com"
] |
mrtopf@gmail.com
|
8902f9f1b1e2cecf529e9c659f9b3c92b3b8ed33
|
7a60d48c403a98e0065d701602a9ec0eae136c35
|
/algorithmic_toolbox/PA4_divide_and_conquer/3_sorting.py
|
f0204b96a893a06eb391e4e8444b05b078c56b4d
|
[] |
no_license
|
themillipede/data-structures-and-algorithms
|
691a20ec4239f43c3f3f597b8f803097dc691d30
|
4a64a10d8f5e02921a83d37c48fd3871dd91ced8
|
refs/heads/master
| 2020-03-13T05:20:24.990413
| 2019-12-28T20:13:02
| 2019-12-28T20:13:02
| 130,981,788
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,283
|
py
|
# Uses python3
"""
3. Improving quick sort
Introduction: The goal in this problem is to redesign the randomized quick sort algorithm so that it works
fast even on sequences containing many equal elements.
Task: Replace the 2-way partition with a 3-way partition to enable the quick sort algorithm to efficiently
process sequences with few unique elements. That is, your new partition procedure should partition the
array into three parts: < x part, = x part, and > x part.
Input: The first line contains an integer n. The next line contains a sequence of n integers a_0, a_1, ..., a_(n-1).
Constraints: 1 <= n <= 10^5; 1 <= a_i <= 10^9 for all 0 <= i < n.
Output: The sequence sorted in in non-decreasing order.
"""
import sys
import random
#################################
# Quick sort with 2-way partition
#################################
def partition2(a, l, r):
pivot = a[l]
p_idx = l
for i in range(l + 1, r + 1):
if a[i] <= pivot:
p_idx += 1
a[i], a[p_idx] = a[p_idx], a[i]
a[l], a[p_idx] = a[p_idx], a[l]
return p_idx
def randomized_quick_sort2(a, l, r):
if l >= r:
return
k = random.randint(l, r)
a[l], a[k] = a[k], a[l]
m = partition2(a, l, r)
randomized_quick_sort2(a, l, m - 1)
randomized_quick_sort2(a, m + 1, r)
#################################
# Quick sort with 3-way partition
#################################
def partition3(a, l, r):
pivot = a[l]
p_idx = l
k = l # Will become the index of the largest number smaller than the pivot.
for i in range(l + 1, r + 1):
if a[i] <= pivot:
p_idx += 1
a[i], a[p_idx] = a[p_idx], a[i]
if a[p_idx] < pivot:
k += 1
a[k], a[p_idx] = a[p_idx], a[k]
a[l], a[p_idx] = a[p_idx], a[l]
return k, p_idx
def randomized_quick_sort3(a, l, r):
if l >= r:
return
k = random.randint(l, r)
a[l], a[k] = a[k], a[l]
m1, m2 = partition3(a, l, r)
randomized_quick_sort3(a, l, m1)
randomized_quick_sort3(a, m2 + 1, r)
if __name__ == '__main__':
input = sys.stdin.read()
n, *a = list(map(int, input.split()))
randomized_quick_sort3(a, 0, n - 1)
for x in a:
print(x, end=' ')
|
[
"katherine.e.millican@gmail.com"
] |
katherine.e.millican@gmail.com
|
da6bbc90bc3f5eb9cc318bf4a17e5c13343bd2a6
|
ed7e9c15fa0ddf5fa45e5c9392cd032dcc41c160
|
/miro_name_funktion.py
|
b5b52789bbb8f0dced89a7797deee3d25cf1b8b2
|
[] |
no_license
|
ZiggyStarProgrammer/kyh-practice
|
acefd043572a8d8ac03ea6e06544f485c32c2349
|
d253ddb1e39e99bf5528096541d4b978a7e7d289
|
refs/heads/master
| 2023-01-13T13:04:13.948295
| 2020-11-30T09:04:02
| 2020-11-30T09:04:02
| 291,671,537
| 0
| 0
| null | 2020-09-01T08:18:56
| 2020-08-31T09:25:50
|
Python
|
UTF-8
|
Python
| false
| false
| 148
|
py
|
import random
def hello(name):
rnd_age = random.randint(1, 150)
print(f"Hell {name} age {rnd_age}")
result = hello("Olof")
print(result)
|
[
"hugo.ojdal@gmail.com"
] |
hugo.ojdal@gmail.com
|
3987405f70f48d91c8ac18c9912585cb8b9c44d3
|
5ba345bc16519d892fb533451eeface7c76a7d48
|
/Classification/Logistic-Regression/LogisticRegression.py
|
33ac92c5f131dde88d715d277e16cca84ae2164e
|
[] |
no_license
|
sayands/machine-learning-projects
|
337fd2aeb63814b6c47c9b2597bfe1ce4399a1f1
|
8e516c0ac3a96a4058d063b86559ded9be654c35
|
refs/heads/master
| 2021-05-06T16:26:37.008873
| 2018-08-02T20:27:20
| 2018-08-02T20:27:20
| 113,749,745
| 1
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,653
|
py
|
#Logistic Regression
#Importing libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
#Importing the dataset
dataset = pd.read_csv('Social_Network_Ads.csv')
X = dataset.iloc[:, 2:4].values
Y = dataset.iloc[:, 4].values
#Splitting the dataset into the Training Set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.25, random_state = 0)
#Feature Scaling
from sklearn.preprocessing import StandardScaler
sc_X = StandardScaler()
X_train = sc_X.fit_transform(X_train)
X_test = sc_X.transform(X_test)
#Fitting Logistic Regression To The Training Set
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression(random_state = 0)
classifier.fit(X_train,Y_train)
#Predicting The Test Set Results
y_pred = classifier.predict(X_test)
#Making The Confusion Matrix
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(Y_test, y_pred)
#Visualising The Training Set Results
from matplotlib.colors import ListedColormap
X_set, y_set = X_train, Y_train
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Logistic Regression (Training set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
#Visualising The Test Set Results
from matplotlib.colors import ListedColormap
X_set, y_set = X_test, Y_test
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Logistic Regression (Test set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
|
[
"sayandsarkar.1997@gmail.com"
] |
sayandsarkar.1997@gmail.com
|
cc2703f83d745a4127e40e6b080d631591948f77
|
83aaf1e569e931cc2398301bb3238f834d8e7b67
|
/Teste/client.py
|
0fe88fcf9745b4b35514634f8754dda11516017c
|
[] |
no_license
|
Lucaspvo/CI061_RedesDeComputadoresII
|
e4e6f0e2d7988ff5e1a3864df14c653a45893d74
|
7e05c64f58128936af2bb5941449ea49e4327cf7
|
refs/heads/master
| 2021-01-12T04:11:33.957778
| 2016-12-28T23:21:06
| 2016-12-28T23:21:06
| 77,536,527
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 679
|
py
|
#!/usr/bin/python # This is client.py file
# -*- coding: utf-8 -*-
import socket # Import socket module
import sys
sys.path.append( 'Teste' )
from Transmition import *
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Create a socket object
host = socket.gethostname() # Get local machine name
port = 12345 # Reserve a port for your service.
s.connect((host, port))
data = "Vou me comunicar com o servidor agora!"
Transmition.Send(s, data)
#s.recv(1480)
data = "Ja devia ter parado"
Transmition.Send(s, data)
string = Transmition.Recv(s)
print string
s.close # Close the socket when done
|
[
"lucaspazello@gmail.com"
] |
lucaspazello@gmail.com
|
6744d1aea0d953c74743d2d88e6ac66ed6aa086c
|
bfdaaee87c9383d3192dc308ba3e7ec1365dbd25
|
/mobiliseclient.py
|
254be5eaed56419b5fc044ab7ac20966d5d81421
|
[
"Apache-2.0"
] |
permissive
|
benwatson528/data-warehouse-client
|
622f2de12ad78236b66d7c21a08dd0d59dd8b6d0
|
927354e1e33dee80992b3dee7b555dd23d1bddec
|
refs/heads/master
| 2023-01-04T23:58:13.641844
| 2020-10-29T15:58:43
| 2020-10-29T15:58:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,533
|
py
|
#
# All client code in a single file for event data access
#
import json
import requests
#
# Subset of the e-SC datatypes necessary to fetch event data
#
class EscEvent:
def __init__(self):
self.eventType = ""
self.timestamp = 0
self.metadata = {}
self.data = {}
def parseDict(self, dict):
self.eventType = dict['eventType']
self.timestamp = dict['timestamp']
self.data = dict['data']
self.metadata = dict['metadata']
# Study object
class EscStudyObject:
def __init__(self):
self.id = 0
self.studyId = 0
self.folderId = ''
self.externalId = ''
self.name = ''
self.additionalProperties = {}
def parseDict(self, dict):
self.id = dict['id']
self.studyId = dict['studyId']
self.folderId = dict['folderId']
self.externalId = dict['externalId']
self.name = dict['name']
self.additionalProperties = dict['additionalProperties']
def toDict(self):
return {
'id': self.id,
'studyId': self.studyId,
'folderId': self.folderId,
'externalId': self.extrnalId,
'name': self.name,
'additionalProperties': self.additionalProperties
}
# Person in a study
class EscPerson(EscStudyObject):
def __init__(self):
EscStudyObject.__init__(self)
self.objectType = "EscPerson"
# JWT Token object
class EscJWT:
def __init__(self):
self.token = ''
self.id = ''
self.expiryTimestamp = 0
self.refreshToken = ''
# Create from JSON
def parseDict(self, dict):
self.expiryTimestamp = dict['expiryTimestamp']
self.token = dict['token']
self.id = dict['id']
self.refreshToken = dict['refreshToken']
# Write to JSON
def toDict(self):
return {
"expiryTimestamp": self.expiryTimestamp,
"token": self.token,
"id": self.id,
"refreshToken": self.refreshToken
}
# Project / Study object
class EscProject:
def __init__(self):
self.id = ''
self.name = ''
self.description = ''
self.workflowFolderId = ''
self.dataFolderId = ''
self.creatorId = ''
self.externalId = ''
self.projectType = 'HEIRARCHICAL'
def parseDict(self, dict):
self.id = dict['id']
self.name = dict['name']
self.description = dict['description']
self.workflowFolderId = dict['workflowFolderId']
self.dataFolderId = dict['dataFolderId']
self.creatorId = dict['creatorId']
self.externalId = dict['externalId']
self.projectType = dict['projectType']
def toDict(self):
return {
"id": self.id,
"name": self.name,
"description": self.description,
"workflowFolderId": self.workflowFolderId,
"dataFolderId": self.dataFolderId,
"creatorId": self.creatorId,
"externalId": self.externalId,
"projectType": self.projectType
}
# Base class for ServerObjects
class EscObject:
def __init__(self):
self.id = ''
self.name = ''
self.description = ''
self.creatorId = ''
self.projectId = ''
self.containerId = ''
self.internalClassName = ''
self.creationTime = 0
def parseDict(self, dict):
self.id = dict['id']
self.name = dict['name']
self.description = dict['description']
self.creatorId = dict['creatorId']
self.projectId = dict['projectId']
self.containerId = dict['containerId']
self.internalClassName = dict['internalClassName']
self.creationTime = dict['creationTime']
def toDict(self, dict):
return {
"id": self.id,
"name": self.name,
"description": self.description,
"creatorId": self.creatorId,
"projectId": self.projectId,
"containerId:": self.containerId,
"internalClassName": self.internalClassName,
"creationTime": self.creationTime
}
# Folder object
class EscFolder(EscObject):
def __init__(self):
EscObject.__init__(self)
# Document object
class EscDocument(EscObject):
def __init__(self):
EscObject.__init__(self)
self.currentVersionSize = 0
self.currentVersionNumber = 0
self.currentVersionHash = ''
self.downloadPath = ''
self.uploadPath = ''
def parseDict(self, dict):
self._EscObject__parseDict(dict)
self.currentVersionSize = dict['currentVersionSize']
self.currentVersionNumber = dict['currentVersionNumber']
self.currentVersionHash = dict['currentVersionHash']
self.downloadPath = dict['downloadPath']
self.uploadPath = dict['uploadPath']
def toDict(self):
dict = self.__EscObject_toDict()
dict['currentVersionSize'] = self.currentVersionSize
dict['currentVersionNumber'] = self.currentVersionNumber
dict['currentVersionHash'] = self.currentVersionHash
dict['downloadPath'] = self.downloadPath
dict['uploadPath'] = self.uploadPath
#
# Combined client object
#
class EscClient:
def __init__(self, hostname, port, ssl):
self.jwt = ""
self.hostname = hostname
self.port = port
self.ssl = ssl
# Create a url
def __create_url(self, url):
if self.ssl==True:
return 'https://' + self.hostname + ':' + str(self.port) + url
else:
return 'http://' + self.hostname + ':' + str(self.port) + url
# Create a form body that can be POSTed using a dict of name:value pairs
def __create_form_body(self, body_dict):
count = 0;
body = '';
for key in body_dict:
if count > 0:
body = body + '&'
body = body + key + '=' + body_dict[key]
count=count+1;
return body
# Create the request headers
def __create_headers(self):
return {
'Authorization' : 'Bearer ' + self.jwt
}
# Send a Form using the POST method
def __post_form_retrieve_json(self, url, form_data, send_auth):
if send_auth==True:
r = requests.post(self.__create_url(url), form_data, headers=self.__create_headers())
return r.json()
else:
r = requests.post(self.__create_url(url), form_data)
return r.json()
# Delete a resource
def __delete_resource(self, url):
requests.delete(self.__create_url(url), headers=self.__create_headers())
# Post text and get text back
def __post_text_retrieve_text(self, url, text_data):
headers = self.__create_headers()
headers['content-type'] = 'text/plain'
r = requests.post(self.__create_url(url), data=text_data.encode('utf-8'), headers=headers)
return r.text
def __post_json_retrieve_json(self, url, json_data):
headers = self.__create_headers()
headers['content-type'] = 'application/json'
r = requests.post(self.__create_url(url), data=json_data, headers=headers)
return r.json()
def __retrieve_json(self, url):
headers = self.__create_headers()
r = requests.get(self.__create_url(url), headers=headers)
return r.json()
def __retrieve_text(self, url):
headers = self.__create_headers()
r = requests.get(self.__create_url(url), headers=headers)
return r.text
# =======================================================================================
#
# Implementation of standard e-SC client methods
#
# =======================================================================================
#
# Issue an access token using a username and password
#
def issueToken(self, username, password, label):
auth_details = {
"username": username,
"password": password,
"label": label
}
result = self._EscClient__post_form_retrieve_json("/api/public/rest/v1/tokens/issue", auth_details, False)
jwt = EscJWT()
jwt.parseDict(result)
return jwt
#
# Release an access token, which prevents its subsequent use
#
def releaseToken(self, id):
self._EscClient__delete_resource(id)
#
# Check whether a JWT is still valid
#
def validateToken(self, token):
return self._EscClient__post_text_retrieve_text("/api/public/rest/v1/tokens/validate", token)
#
# Returns a list of the projects that the authenticated user is permitted to view
#
def listProjects(self):
jsonData = self._EscClient__retrieve_json("/api/public/rest/v1/storage/projects");
results = {}
for i in range(0, len(jsonData)):
project = EscProject();
project.parseDict(jsonData[i])
results[i] = project
return results
#
# Return a folder object given its database id
#
def getFolder(self, id):
jsonData = self._EscClient__retrieve_json("/api/public/rest/v1/storage/folders/" + id)
folder = EscFolder();
folder.parseDict(jsonData)
return folder
#
# Access a person using their externally visible ID. i.e. the PatientID
#
def getPersonByExternalId(self, externalId):
jsonData = self._EscClient__retrieve_json("/api/public/rest/v1/catalog/peoplebyexternalid/" + externalId)
person = EscPerson()
person.parseDict(jsonData)
return person
#
# Access a study given its externally visible ID
#
def getProjectByStudyCode(self, studyCode):
jsonData = self._EscClient__retrieve_json("/api/public/rest/v1/catalog/studiesbyexternalcode/" + studyCode)
project = EscProject()
project.parseDict(jsonData)
return project
#
# Return the number of event objects contained in a study
#
def getEventCount(self, studyCode):
return int(self._EscClient__retrieve_text("/api/public/rest/v1/catalog/studiesbyid/" + studyCode + "/allevents/count"))
#
# Get a set of events
#
def queryEventsFromStudy(self, studyCode, startIndex, pageSize):
jsonData = self._EscClient__retrieve_json("/api/public/rest/v1/catalog/studiesbyid/" + studyCode + "/allevents/" + str(startIndex) + "/" + str(pageSize))
results = {}
for i in range(0, len(jsonData)):
evt = EscEvent()
evt.parseDict(json.loads(jsonData[i]))
results[i] = evt
return results
#
# Get the number of people in a study
#
def getNumberOfPeopleInStudy(self, projectId):
return int(self._EscClient__retrieve_text("/api/public/rest/v1/catalog/studiesbyid/" + str(projectId) + "/people/count"))
#
# Get a set of people from a study
#
def getPeople(self, projectId, startIndex, count):
jsonData = self._EscClient__retrieve_json("/api/public/rest/v1/catalog/studiesbyid/" + str(projectId) + "/people/list/" + str(startIndex) + "/" + str(count))
results = {}
for i in range(0, len(jsonData)):
person = EscPerson()
person.parseDict(jsonData[i])
results[i] = person
return results
|
[
"noreply@github.com"
] |
benwatson528.noreply@github.com
|
a3a464a3725d720d8633759081deeaade26c896e
|
2b08c18c5ac84dc170eefb05d69e24800d34983e
|
/venv/Lib/site-packages/django/middleware/common.py
|
56c625910901fce91163a3d3838946257ba5a88b
|
[] |
no_license
|
wottan32/website
|
fda48f2f9c177f2aaf008c7b9bd94fbb06cb1de4
|
db05b866badab8d046ea9eeb8c061d2e66312f98
|
refs/heads/main
| 2023-06-17T00:51:28.821850
| 2021-07-14T17:50:41
| 2021-07-14T17:50:41
| 385,640,116
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,372
|
py
|
import re
from urllib.parse import urlparse
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.core.mail import mail_managers
from django.http import HttpResponsePermanentRedirect
from django.urls import is_valid_path
from django.utils.deprecation import MiddlewareMixin
from django.utils.http import escape_leading_slashes
class CommonMiddleware(MiddlewareMixin):
"""
"Common" middleware for taking care of some basic operations:
- Forbid access to User-Agents in settings.DISALLOWED_USER_AGENTS
- URL rewriting: Based on the APPEND_SLASH and PREPEND_WWW settings,
append missing slashes and/or prepends missing "www."s.
- If APPEND_SLASH is set and the initial URL doesn't end with a
slash, and it is not found in urlpatterns, form a new URL by
appending a slash at the end. If this new URL is found in
urlpatterns, return an HTTP redirect to this new URL; otherwise
process the initial URL as usual.
This behavior can be customized by subclassing CommonMiddleware and
overriding the response_redirect_class attribute.
"""
response_redirect_class = HttpResponsePermanentRedirect
def process_request(self, request):
"""
Check for denied User-Agents and rewrite the URL based on
settings.APPEND_SLASH and settings.PREPEND_WWW
"""
# Check for denied User-Agents
user_agent = request.META.get()
if user_agent is not None:
for user_agent_regex in settings.DISALLOWED_USER_AGENTS:
if user_agent_regex.search(user_agent):
raise PermissionDenied('Forbidden user agent')
# Check for a redirect based on settings.PREPEND_WWW
host = request.get_host()
must_prepend = settings.PREPEND_WWW and host and not host.startswith('www.')
redirect_url = ('%s://www.%s' % (request.scheme, host)) if must_prepend else ''
# Check if a slash should be appended
if self.should_redirect_with_slash(request):
path = self.get_full_path_with_slash(request)
else:
path = request.get_full_path()
# Return a redirect if necessary
if redirect_url or path != request.get_full_path():
redirect_url += path
return self.response_redirect_class(redirect_url)
def should_redirect_with_slash(self, request):
"""
Return True if settings.APPEND_SLASH is True and appending a slash to
the request path turns an invalid path into a valid one.
"""
if settings.APPEND_SLASH and not request.path_info.endswith('/'):
urlconf = getattr(request, 'urlconf', None)
if not is_valid_path(request.path_info, urlconf):
match = is_valid_path('%s/' % request.path_info, urlconf)
if match:
view = match.func
return getattr(view, 'should_append_slash', True)
return False
def get_full_path_with_slash(self, request):
"""
Return the full path of the request with a trailing slash appended.
Raise a RuntimeError if settings.DEBUG is True and request.method is
POST, PUT, or PATCH.
"""
new_path = request.get_full_path(force_append_slash=True)
# Prevent construction of scheme relative urls.
new_path = escape_leading_slashes(new_path)
if settings.DEBUG and request.method in ('POST', 'PUT', 'PATCH'):
raise RuntimeError(
"You called this URL via %(method)s, but the URL doesn't end "
"in a slash and you have APPEND_SLASH set. Django can't "
"redirect to the slash URL while maintaining %(method)s data. "
"Change your form to point to %(url)s (note the trailing "
"slash), or set APPEND_SLASH=False in your Django settings." % {
'method': request.method,
'url': request.get_host() + new_path,
}
)
return new_path
def process_response(self, request, response):
"""
When the status code of the response is 404, it may redirect to a path
with an appended slash if should_redirect_with_slash() returns True.
"""
# If the given URL is "Not Found", then check if we should redirect to
# a path with a slash appended.
if response.status_code == 404 and self.should_redirect_with_slash(request):
return self.response_redirect_class(self.get_full_path_with_slash(request))
# Add the Content-Length header to non-streaming responses if not
# already set.
if not response.streaming and not response.has_header('Content-Length'):
response.headers['Content-Length'] = str(len(response.content))
return response
class BrokenLinkEmailsMiddleware(MiddlewareMixin):
def process_response(self, request, response):
"""Send broken link emails for relevant 404 NOT FOUND responses."""
if response.status_code == 404 and not settings.DEBUG:
domain = request.get_host()
path = request.get_full_path()
referer = request.META.get()
if not self.is_ignorable_request(request, path, domain, referer):
ua = request.META.get()
ip = request.META.get()
mail_managers(
"Broken %slink on %s" % (
('INTERNAL ' if self.is_internal_request(domain, referer) else ''),
domain
),
"Referrer: %s\nRequested URL: %s\nUser agent: %s\n"
"IP address: %s\n" % (referer, path, ua, ip),
fail_silently=True,
)
return response
def is_internal_request(self, domain, referer):
"""
Return True if the referring URL is the same domain as the current
request.
"""
# Different subdomains are treated as different domains.
return bool(re.match("^https?://%s/" % re.escape(domain), referer))
def is_ignorable_request(self, request, uri, domain, referer):
"""
Return True if the given request *shouldn't* notify the site managers
according to project settings or in situations outlined by the inline
comments.
"""
# The referer is empty.
if not referer:
return True
# APPEND_SLASH is enabled and the referer is equal to the current URL
# without a trailing slash indicating an internal redirect.
if settings.APPEND_SLASH and uri.endswith('/') and referer == uri[:-1]:
return True
# A '?' in referer is identified as a search engine source.
if not self.is_internal_request(domain, referer) and '?' in referer:
return True
# The referer is equal to the current URL, ignoring the scheme (assumed
# to be a poorly implemented bot).
parsed_referer = urlparse(referer)
if parsed_referer.netloc in ['', domain] and parsed_referer.path == uri:
return True
return any(pattern.search(uri) for pattern in settings.IGNORABLE_404_URLS)
|
[
"mariotorreslagos@gmail.com"
] |
mariotorreslagos@gmail.com
|
9aefb0ae5bd605c4dae7ca200d14f1508eb9fb11
|
f0755c0ca52a0a278d75b76ee5d9b547d9668c0e
|
/atcoder.jp/abc084/abc084_d/Main.py
|
672f72253da43a227e962b8055a0caa9001017ec
|
[] |
no_license
|
nasama/procon
|
7b70c9a67732d7d92775c40535fd54c0a5e91e25
|
cd012065162650b8a5250a30a7acb1c853955b90
|
refs/heads/master
| 2022-07-28T12:37:21.113636
| 2020-05-19T14:11:30
| 2020-05-19T14:11:30
| 263,695,345
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 567
|
py
|
def primes(n):
is_prime = [1]*(n+1)
is_prime[0] = 0
is_prime[1] = 0
for i in range(2, int(n**0.5) + 1):
if not is_prime[i]:
continue
for j in range(i*2,n+1,i):
is_prime[j] = 0
return is_prime
max = 100001
prime = primes(max)
a = [0]*max
for i in range(max):
if i % 2 == 0:
continue
if prime[i] and prime[(i+1)//2]:
a[i] = 1
s = [0]*(max+1)
for i in range(max):
s[i+1] = s[i] + a[i]
Q = int(input())
for i in range(Q):
l,r = map(int, input().split())
print(s[r+1]-s[l])
|
[
"g1620535@is.ocha.ac.jp"
] |
g1620535@is.ocha.ac.jp
|
515c690329c1f5626a96c8abcf86ef28a3639bdb
|
1a04f833bcc5598f78d0c9febb32cbf7e636dbf3
|
/console/django_scantron/results/urls.py
|
d2d6e9bfe1a743ff2c4087e4d06291153c7fb176
|
[
"Apache-2.0"
] |
permissive
|
opsdisk/scantron
|
be56c09912763a9dfd62121c686d781e5cd45713
|
aad5b0a8b5c863294703baf22cfb2a5b8f8619af
|
refs/heads/master
| 2022-02-04T20:01:44.494099
| 2022-01-17T23:14:42
| 2022-01-17T23:14:42
| 395,031,583
| 138
| 22
|
Apache-2.0
| 2022-01-17T23:27:32
| 2021-08-11T15:14:27
|
Python
|
UTF-8
|
Python
| false
| false
| 176
|
py
|
from django.conf.urls import url
from django_scantron.results import views
urlpatterns = [url(r"^results/(?P<id>\d+)$", views.retrieve_scan_file, name="retrieve_scan_file")]
|
[
"brennon.thomas@rackspace.com"
] |
brennon.thomas@rackspace.com
|
ef279fb67bd4928a2129abf2c49b78475fe31e9e
|
8803c894db96609c05dcc3cdabc68a702124f07d
|
/djangochat/urls.py
|
91e43f4f8b902dbafc15f0c8e17aa37a17d7b851
|
[] |
no_license
|
JonathaCnB/django-essential
|
4531d449100f6a7c3fd11bbb0920e7e33db88300
|
182e973a70497702f630ce5acd6352be30fb421f
|
refs/heads/main
| 2023-08-01T07:41:29.683967
| 2021-09-25T01:58:00
| 2021-09-25T01:58:00
| 398,795,534
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 396
|
py
|
from django.urls import path
from .views import check_view, get_messages_view, send_view, to_enter, view_room
urlpatterns = [
path("", to_enter, name="to_enter"),
path("check-room/", check_view, name="checkview"),
path("send/", send_view, name="send"),
path("get_messages/<str:room>/", get_messages_view, name="get_messages"),
path("<str:room>/", view_room, name="room"),
]
|
[
"jotacarlosftb@gmail.com"
] |
jotacarlosftb@gmail.com
|
d9917661d0f20a96f2d7b647b9a499c5ee7da8b6
|
fbd06dea5dc8b0e538ff7d5bda3429d2de6b461c
|
/api/handler.py
|
8e5eb097671cae3936314635042d2ea5af9bde75
|
[] |
no_license
|
LessioGuilherme/pa004_health_insurance_cross_sell
|
713a78a71768bc505a77eb9672b5e3111a132fec
|
91ace7fffe89b2e785f10f23f160b63d523e18ec
|
refs/heads/main
| 2023-03-27T21:28:35.754093
| 2021-03-29T22:25:03
| 2021-03-29T22:25:03
| 328,284,147
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,212
|
py
|
import os
import pickle
import pandas as pd
from flask import Flask, request, Response
from insurancevehicle.Insurancevehicle import Insurancevehicle
model = pickle.load( open('C:/Users/Guilherme/Repos/pa004_health_insurance_cross_sell/model/model_lgbm.pkl', 'rb'))
app = Flask (__name__)
@app.route( '/insurancevehicle/ranking', methods =['POST'] )
def insurance_vehicle_ranking():
test_json = request.get_json()
if test_json: # there is data
if isinstance(test_json, dict): # unique example
test_raw = pd.DataFrame(test_json, index=[0])
else: # multiple example
test_raw = pd.DataFrame(test_json, columns=test_json[0].keys())
data = test_raw.copy()
pipeline = Insurancevehicle()
data = pipeline.data_cleaning(data)
data = pipeline.feature_engineering(data)
data = pipeline.data_preparation(data)
df_response = pipeline.get_ranking(model, test_raw, data)
return df_response
else:
return Response('{}', status=200, mimetype='application/json')
if __name__ == '__main__':
port = os.environ.get('PORT', 5000)
app.run(host='127.0.0.1', port=port)
|
[
"guilhermelessio@gmail.com"
] |
guilhermelessio@gmail.com
|
ea71dcf4271de4375a1cd100421e6cb04179b2a8
|
ae1d96991a256b905ab8793ebc6063a9628cef02
|
/muddery/combat/normal_combat_handler.py
|
f572690ce4f9a5ce3b3ed3411737fa890fdf193b
|
[
"BSD-3-Clause"
] |
permissive
|
FWiner/muddery
|
bd2028e431dbeae16d6db9806cd2e9a7f4c5f22d
|
f6daa5fab6007e7c830e301718154fbc7b78b2bb
|
refs/heads/master
| 2020-07-31T23:02:54.165362
| 2019-09-04T13:29:59
| 2019-09-04T13:29:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,083
|
py
|
"""
Combat handler.
"""
from django.conf import settings
from muddery.utils import defines
from muddery.utils.builder import delete_object
from muddery.combat.base_combat_handler import BaseCombatHandler
class NormalCombatHandler(BaseCombatHandler):
"""
This implements the normal combat handler.
"""
def start_combat(self):
"""
Start a combat, make all NPCs to cast skills automatically.
"""
super(NormalCombatHandler, self).start_combat()
for character in self.characters.values():
if not character.account:
# Monsters auto cast skills
character.start_auto_combat_skill()
def at_server_shutdown(self):
"""
This hook is called whenever the server is shutting down fully
(i.e. not for a restart).
"""
for character in self.characters.values():
# Stop auto cast skills
character.stop_auto_combat_skill()
super(NormalCombatHandler, self).at_server_shutdown()
def show_combat(self, character):
"""
Show combat information to a character.
Args:
character: (object) character
Returns:
None
"""
super(NormalCombatHandler, self).show_combat(character)
# send messages in order
character.msg({"combat_commands": character.get_combat_commands()})
def finish(self):
"""
Finish a combat. Send results to players, and kill all failed characters.
"""
for character in self.characters.values():
# Stop auto cast skills
character.stop_auto_combat_skill()
super(NormalCombatHandler, self).finish()
def set_combat_results(self, winners, losers):
"""
Called when the character wins the combat.
Args:
winners: (List) all combat winners.
losers: (List) all combat losers.
Returns:
None
"""
super(NormalCombatHandler, self).set_combat_results(winners, losers)
# add exp to winners
# get total exp
exp = 0
for loser in losers:
exp += loser.provide_exp(loser)
if exp:
# give experience to the winner
for character in winners:
character.add_exp(exp, combat=True)
for character in winners:
if character.is_typeclass(settings.BASE_PLAYER_CHARACTER_TYPECLASS):
# get object list
loots = None
for loser in losers:
obj_list = loser.loot_handler.get_obj_list(character)
if obj_list:
if not loots:
loots = obj_list
else:
loots.extend(obj_list)
# give objects to winner
if loots:
character.receive_objects(loots, combat=True)
# call quest handler
for loser in losers:
character.quest_handler.at_objective(defines.OBJECTIVE_KILL, loser.get_data_key())
# losers are killed.
for character in losers:
character.die(winners)
def _cleanup_character(self, character):
"""
Remove character from handler and clean
it of the back-reference and cmdset
"""
super(NormalCombatHandler, self)._cleanup_character(character)
if not character.is_typeclass(settings.BASE_PLAYER_CHARACTER_TYPECLASS):
if character.is_temp:
# notify its location
location = character.location
delete_object(character.dbref)
if location:
for content in location.contents:
if content.has_account:
content.show_location()
else:
if character.is_alive():
# Recover all hp.
character.db.hp = character.max_hp
|
[
"luyijun999@gmail.com"
] |
luyijun999@gmail.com
|
b77cbf626196e076ca7cd12c221754afd3fca90e
|
ff8847da0e311e097fb2662535e2235f090fb6c8
|
/examples/test_rotcube.py
|
cbba7d2ddd564fb217203105e88ee22bc80be276
|
[
"Apache-2.0"
] |
permissive
|
donghaozhang/SWC2VTK
|
222eda519fed7fcc9f668382db2ca4ccf812a136
|
285e4981a3f1f54b8f819f3490f2e451569ce541
|
refs/heads/master
| 2020-07-07T04:33:58.381671
| 2019-08-20T14:56:38
| 2019-08-20T14:56:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 719
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 11 14:49:53 2016
@author: nebula
"""
import numpy as np
from swc2vtk.vtkgenerator import VtkGenerator
if __name__ == '__main__':
filename = 'rot_cube.vtk'
vtkgen = VtkGenerator()
pi = 3.141592
# pos = np.array([[0, 0, 0], [0, 2, 0], [4, 2, 0], [5, 5, 5], [8, 8, 8]])
pos = np.array([[0, 0, 0], [0, 2, 0], [4, 2, 0], [5, 5, 5], [8, 8, 8], [6, 6, 6], [4, 6, 6], [4, 4, 6]])
# pos = np.array([[0, 0, 0], [0, 2, 0], [4, 2, 0], [5, 5, 0], [8, 8, 0]])
for i in range(pos.shape[0] - 1):
print str(pos[i]) + ' to ' + str(pos[i + 1])
vtkgen.add_cuboid_p2p(pos[i], pos[i + 1], 0.2 * i + 0.3, 0.2 * i)
vtkgen.write_vtk(filename)
|
[
"miyamoto@brain.imi.i.u-tokyo.ac.jp"
] |
miyamoto@brain.imi.i.u-tokyo.ac.jp
|
b1a2cc010714c4d2270279ddfa921fa64a88c943
|
54beda9490ef3f8f1e2c89fc995e1840385bc527
|
/app/models.py
|
c730a00fa9e78a9f2ea7755c3ada601bc6729217
|
[] |
no_license
|
strengthnotes/web
|
f2c7302a112222c8882b0804daef1c35ab59d2c6
|
ac3e1c1bc4b81417bc85b5c5419d93dc84885137
|
refs/heads/master
| 2021-12-28T06:25:15.104421
| 2017-03-12T21:44:44
| 2017-03-12T21:44:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,915
|
py
|
from werkzeug.security import generate_password_hash, check_password_hash
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from flask import current_app
from flask_login import UserMixin
from . import db, login_manager
class Role(db.Model):
__tablename__ = 'roles'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
users = db.relationship('User', backref='role', lazy='dynamic')
def __repr__(self):
return '<Role %r>' % self.name
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(64), unique=True, index=True)
username = db.Column(db.String(64), unique=True, index=True)
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
password_hash = db.Column(db.String(128))
confirmed = db.Column(db.Boolean, default=False)
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def generate_confirmation_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'confirm': self.id})
def confirm(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('confirm') != self.id:
return False
self.confirmed = True
db.session.add(self)
return True
def __repr__(self):
return '<User %r>' % self.username
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
|
[
"jtaylo78@gmail.com"
] |
jtaylo78@gmail.com
|
fb95a962370d7b4bb6c6d781611394a5ad69f45a
|
e3fe234510d19c120d56f9a2876b7d508d306212
|
/17tensorflow/5_lm/ngram/ngram.py
|
6146628f947c8ebec2603563c38c067b7d61b32d
|
[
"Apache-2.0"
] |
permissive
|
KEVINYZY/python-tutorial
|
78b348fb2fa2eb1c8c55d016affb6a9534332997
|
ae43536908eb8af56c34865f52a6e8644edc4fa3
|
refs/heads/master
| 2020-03-30T02:11:03.394073
| 2019-12-03T00:52:10
| 2019-12-03T00:52:10
| 150,617,875
| 0
| 0
|
Apache-2.0
| 2018-09-27T16:39:29
| 2018-09-27T16:39:28
| null |
UTF-8
|
Python
| false
| false
| 3,057
|
py
|
# -*- coding: utf-8 -*-
# Author: XuMing <shibing624@126.com>
# Data: 17/11/29
# Brief:
"""读取语料 生成 n-gram 模型"""
from collections import Counter, defaultdict
from pprint import pprint
from random import random
import jieba
N = 2 # N元模型
START = '$$' # 句首的 token
BREAK = '。!?' # 作为句子结束的符号
IGNORE = '\n “”"《》〈〉()*' # 忽略不计的符号
def process_segs(segments):
"""对 segments (iterator) 进行处理,返回一个 list. 处理规则:
- 忽略 \n、空格、引号、书名号等
- 在断句符号后添加 START token
"""
results = [START for i in range(N - 1)]
for seg in segments:
if seg in IGNORE:
continue
else:
results.append(seg)
if seg in BREAK:
results.extend([START for i in range(N - 1)])
return results
def count_ngram(segments):
"""统计 N-gram 出现次数"""
dct = defaultdict(Counter)
for i in range(N - 1, len(segments)):
context = tuple(segments[i - N + 1:i])
word = segments[i]
dct[context][word] += 1
return dct
def to_prob(dct):
"""将次数字典转换为概率字典"""
prob_dct = dct.copy()
for context, count in prob_dct.items():
total = sum(count.values())
for word in count:
count[word] /= total # works in Python 3
return prob_dct
def generate_word(prob_dct, context):
"""根据 context 及条件概率,随机生成 word"""
r = random()
psum = 0
for word, prob in prob_dct[context].items():
psum += prob
if psum > r:
return word
# return START
def generate_sentences(m, prob_dct):
"""生成 m 个句子"""
sentences = []
text = ''
context = tuple(START for i in range(N - 1))
i = 0
while (i < m):
word = generate_word(prob_dct, context)
text = text + word
context = tuple((list(context) + [word])[1:])
if word in BREAK:
sentences.append(text)
text = ''
context = tuple(START for i in range(N - 1))
i += 1
return sentences
def main():
for N in range(2, 6):
print('\n*** reading corpus ***')
with open('../../../data/tianlongbabu.txt', encoding="utf8") as f:
corpus = f.read()
print('*** cutting corpus ***')
raw_segments = jieba.cut(corpus)
print('*** processing segments ***')
segments = process_segs(raw_segments)
print('*** generating {}-gram count dict ***'.format(N))
dct = count_ngram(segments)
print('*** generating {}-gram probability dict ***'.format(N))
prob_dct = to_prob(dct)
# pprint(prob_dct)
import pickle
pickle.dump(prob_dct)
print('*** generating sentences ***')
with open('generated_{}gram.txt'.format(N), 'w', encoding="utf8") as f:
f.write('\n'.join(generate_sentences(20, prob_dct)))
if __name__ == "__main__":
main()
|
[
"507153809@qq.com"
] |
507153809@qq.com
|
bbf11f6525ae7e8077064cc40b92fa787c203531
|
5cc74872fc45943f27578f6bd647f77d1bd03f0b
|
/ppf/wsgi.py
|
1b26faa1eed919e95fb8543ab4cab990f745e940
|
[] |
no_license
|
johnnyliu1992/midterm-project
|
7aa00012612b00c2baea1f28431924c5b2d0ee9e
|
516ccceb415d120ea9f138ba7828e0556cafe964
|
refs/heads/master
| 2021-07-22T14:48:17.702329
| 2017-11-05T20:46:25
| 2017-11-05T20:46:25
| 109,615,230
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 384
|
py
|
"""
WSGI config for ppf project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ppf.settings")
application = get_wsgi_application()
|
[
"liujiannanself@gmail.com"
] |
liujiannanself@gmail.com
|
d1bbae8eca99d9390cad48e10dfc1865ab044bb6
|
3f51c53b0e4c07b09cf73971c269376d8d544c89
|
/Monthly_Expenditure.py
|
ce4410db348f4636f833baa36706e2319671bcfe
|
[] |
no_license
|
Veena-Wanjari/Monthly_Expenditure_Track
|
131f394ac03033e9e6f7b9e297a2e8fbb487c01e
|
6487096b47378c0947f8361084a81581468f45dd
|
refs/heads/master
| 2022-12-21T21:37:56.681610
| 2020-09-26T11:11:05
| 2020-09-26T11:11:05
| 298,792,370
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,800
|
py
|
import tkinter as tk
from tkinter import ttk
from tkcalendar import Calendar
from csv import DictWriter
import os
window = tk.Tk()
window.title("Monthly Expenditure")
window.geometry("450x550")
window.resizable(width = False, height = False)
#create Items Name label
name_label = ttk.Label(window, text = "Purchased items name:")
name_label.grid(row=0, column=0,padx = 14, pady = 14, sticky = tk.W)
#create Name Entry box
name_var = tk.StringVar()
name_entrybox = ttk.Entry(window, width = 16, textvariable = name_var)
name_entrybox.grid(row = 0, column = 1, padx = 14, pady = 14)
name_entrybox.focus()
#create Price Name label
price_label = ttk.Label(window, text = "Enter price:")
price_label.grid(row = 1, column = 0,padx = 14, pady = 14,sticky = tk.W)
#create Price Entry box
price_var = tk.DoubleVar()
price_entrybox = ttk.Entry(window, width = 16, textvariable = price_var)
price_entrybox.grid(row = 1, column = 1, padx = 14, pady = 14)
price_entrybox.focus()
#create Shop Name label
shop_name = ttk.Label(window, text = "Select Shop name:")
shop_name.grid(row = 3, column = 0,padx = 14, pady = 14,sticky = tk.W)
#create Combobox
shop_var = tk.StringVar()
shop_combobox = ttk.Combobox(window, width = 14, textvariable = shop_var, state = 'readonly')
shop_combobox['values'] = ('Udaya Store', 'Woolworth', 'Big Apple', 'Sunrise Fresh')
shop_combobox.current(0)
shop_combobox.grid(row = 3, column = 1, padx = 14, pady = 14)
#create Calendar Label
calendar_label = ttk.Label(window, text = "Choose Date:")
calendar_label.grid(row = 4, column = 0,padx = 14, pady = 14,sticky = tk.W)
#Creating Calendar
cal = Calendar(window, selectmode = "day", year = 2020, month = 8, day = 27)
cal.grid(row = 5, column = 1)
def Exit():
window.destroy()
def action():
item_name = name_var.get()
item_price = price_var.get()
shop_details = shop_var.get()
calc_date = cal.get_date()
#write to csv,
with open("Monthly_details.csv", 'a', newline="") as f:
dict_writer = DictWriter(f, fieldnames = ['Item Name', 'Item Price', 'Shop Name', 'Date'])
if os.stat('Monthly_details.csv'). st_size == 0:
dict_writer.writeheader()
dict_writer.writerow({
'Item Name' : item_name,
'Item Price' : item_price,
'Shop Name': shop_details,
'Date' : calc_date,
})
name_entrybox.delete(0, tk.END)
price_entrybox.delete(0, tk.END)
#creating Submit Button
submit_button = ttk.Button(window, text = 'Submit', command = action)
submit_button.grid(row = 7, column = 1, padx = 14, pady = 30)
exit_button = ttk.Button(window, text = 'EXIT', command = Exit)
exit_button.grid(row = 8, column = 1, padx = 14, pady = 10)
window.mainloop()
|
[
"veena.wanjari@gmail.com"
] |
veena.wanjari@gmail.com
|
b3bf37fd0d5fc6221eac493c999c50c201736d44
|
14dd622ef84b3f48c2d66d8ab873084634cfb6d4
|
/PythonLearning/Learning Matplotlib/TestMatplotlib2.py
|
34726ae3448e8c7248ac9e600a5fa700cc2f8a8e
|
[] |
no_license
|
ByronGe/Python-base-Learning
|
648cbbf1c7a8431dece3638dfb4de754623bc84e
|
7ade3250c4abc4b5e47e39080bf1ad8d53b04d78
|
refs/heads/master
| 2020-04-15T20:18:24.134950
| 2019-01-10T04:00:51
| 2019-01-10T04:00:51
| 164,987,206
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 578
|
py
|
import matplotlib.pyplot as plt
import numpy as np
x = np.linspace(-3,3,50)
y1 = 2*x+1
y2 = x**2
plt.plot(x,y1)
plt.plot(x,y2,color='red',linewidth=3,linestyle='--')
x_tick = np.linspace(-2,2,6)
plt.xlabel('i am xlabel')
plt.xticks(x_tick)
plt.yticks([0,2,4,7,8],[r'$terrible$','bad','normal','good','really good'])
ax = plt.gca()
ax.spines['right'].set_color('none')
ax.spines['top'].set_color('none')
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
ax.spines['bottom'].set_position(('data',0))
ax.spines['left'].set_position(('data',0))
plt.show()
|
[
"2450894732@qq.com"
] |
2450894732@qq.com
|
a919599073e4f2964287ff10984860b610212766
|
e1d1d2f008e8284e109d851a87826d943c095dd0
|
/TicTacToe.py
|
9b33a0f59f733fa06cbcbeafffd3d06e46b488ae
|
[
"MIT"
] |
permissive
|
mayamau/Tic-Tac-Toe
|
2f8e82a20429e595225566180171a19898614361
|
f0488d0d82382f6202d77f8c6fc0c53e89bb89f3
|
refs/heads/master
| 2022-07-09T10:54:13.762065
| 2021-09-29T19:16:00
| 2021-09-29T19:16:00
| 123,813,512
| 0
| 0
|
MIT
| 2022-06-22T04:25:10
| 2018-03-04T17:50:17
|
Python
|
UTF-8
|
Python
| false
| false
| 13,935
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 28 23:52:33 2018
@author: maya
"""
# import
from tkinter import *
import tkinter.messagebox
from random import randint
import time
import numpy as np
# functions
def restartGame(button):
global currentPlayer
global x
global TTTArray
currentPlayer = x
TTTArray = np.zeros(shape=[3, 3])
for s in range(1, 10):
button[s].config(state=NORMAL, text=' ')
def popupfunc(button, resultString):
pop = Toplevel(root)
pop.title("Result")
frame0 = Frame(pop, bg=c_black, bd=0, relief=FLAT)
frame0.pack()
resultTxt = Label(frame0, text=resultString, foreground=c_white,
background=c_black, font=('Agency FB', 20))
resultTxt.grid(row=0, column=0, padx=20, pady=20)
okButton = Button(frame0, command=pop.destroy, text="OK", width=10, relief=FLAT, bd=0,
background=c_black, foreground=c_aqua1, activebackground=c_black, activeforeground=c_black, font=('Agency FB', 20))
okButton.grid(row=1, column=0, padx=10, pady=0)
for s in range(1, 10):
button[s].config(state=DISABLED, disabledforeground=c_aqua3)
#
def displaySelection(button, buttonNumber, numberOfPlayers):
global currentPlayer
global o
global x
global TTTArray
positionMapping = {1: [0, 0], 2: [0, 1], 3: [0, 2], 4: [
1, 0], 5: [1, 1], 6: [1, 2], 7: [2, 0], 8: [2, 1], 9: [2, 2]}
currentR, currentC = positionMapping[buttonNumber]
if currentPlayer == 1:
button[buttonNumber].config(
state=DISABLED, disabledforeground=c_aqua1, text="X")
TTTArray[currentR, currentC] = currentPlayer
currentPlayer = -1
else:
button[buttonNumber].config(
state=DISABLED, disabledforeground=c_aqua1, text="O")
TTTArray[currentR, currentC] = currentPlayer
currentPlayer = 1
resultScore, resultString = checkForWinner(
TTTArray.copy(), positionMapping[buttonNumber].copy())
if resultString != 'Continue':
popupfunc(button, resultString)
else:
if numberOfPlayers == 1:
alpha, beta = -np.inf, np.inf
bestMove, resultScore = minimax(TTTArray.copy(), currentPlayer,
positionMapping[buttonNumber], alpha, beta)
currentR, currentC = bestMove
buttonNumber = [button for button, move in positionMapping.items() if move == [currentR, currentC]][0]
if currentPlayer == 1:
button[buttonNumber].config(
state=DISABLED, disabledforeground=c_aqua1, text="X")
TTTArray[currentR, currentC] = currentPlayer
currentPlayer = -1
else:
button[buttonNumber].config(
state=DISABLED, disabledforeground=c_aqua1, text="O")
TTTArray[currentR, currentC] = currentPlayer
currentPlayer = 1
resultScore, resultString = checkForWinner(
TTTArray.copy(), positionMapping[buttonNumber].copy())
if resultString != 'Continue':
popupfunc(button, resultString)
def checkForWinner(board, move):
currentR, currentC = move
rSum = board[currentR, :].sum()
cSum = board[:, currentC].sum()
dSum = [np.diag(board).sum(), np.diag(np.fliplr(board)).sum()]
if max([rSum, cSum] + dSum) == 3:
resultScore = 3
resultString = "X wins!"
elif min([rSum, cSum] + dSum) == -3:
resultScore = -3
resultString = "O wins!"
elif np.where(board == 0, 1, 0).sum() == 0:
resultScore = 0
resultString = "It's a draw!"
else:
resultScore = 999
resultString = "Continue"
return resultScore, resultString
def minimax(board, currentPlayer, move, alpha, beta):
if currentPlayer == 1:
bestScore = -np.inf
bestMove = [-1, -1]
else:
bestScore = +np.inf
bestMove = [-1, -1]
resultScore, resultString = checkForWinner(board, move)
if resultScore in [3, -3, 0]:
return [[-1, -1], resultScore]
emptyCells = np.argwhere(board == 0)
for emptyCell in emptyCells:
board[emptyCell[0], emptyCell[1]] = currentPlayer
move, score = minimax(board, -1 * currentPlayer,
emptyCell, alpha, beta)
board[emptyCell[0], emptyCell[1]] = 0
if currentPlayer == 1:
if score > bestScore:
bestScore = score
bestMove = emptyCell
alpha = max(alpha, bestScore)
if beta <= alpha:
break
else:
if score < bestScore:
bestScore = score
bestMove = emptyCell
beta = min(beta, bestScore)
if beta <= alpha:
break
return bestMove, bestScore
def showXOGrid(numberOfPlayers, frameToHide, headerTxt):
frameToHide.pack_forget()
if numberOfPlayers == 1:
headerTxt.config(text="SINGLEPLAYER GAME")
elif numberOfPlayers == 2:
headerTxt.config(text="MULTIPLAYER GAME")
b = [0 for x in range(0, 10)]
b[1] = Button(frame2, command=lambda: displaySelection(b, 1, numberOfPlayers), background=c_black, relief=FLAT,
bd=0, font=('Agency FB', 45), foreground=c_aqua1, width=4, text=" ", activebackground=c_black)
b[1].grid(row=1, column=0, padx=0, pady=0)
b[2] = Button(frame2, command=lambda: displaySelection(b, 2, numberOfPlayers), background=c_black, relief=FLAT,
bd=0, font=('Agency FB', 45), foreground=c_aqua1, width=4, text=" ", activebackground=c_black)
b[2].grid(row=1, column=1, padx=2, pady=0)
b[3] = Button(frame2, command=lambda: displaySelection(b, 3, numberOfPlayers), background=c_black, relief=FLAT,
bd=0, font=('Agency FB', 45), foreground=c_aqua1, width=4, text=" ", activebackground=c_black)
b[3].grid(row=1, column=2, padx=0, pady=0)
b[4] = Button(frame2, command=lambda: displaySelection(b, 4, numberOfPlayers), background=c_black, relief=FLAT,
bd=0, font=('Agency FB', 45), foreground=c_aqua1, width=4, text=" ", activebackground=c_black)
b[4].grid(row=2, column=0, padx=0, pady=0)
b[5] = Button(frame2, command=lambda: displaySelection(b, 5, numberOfPlayers), background=c_black, relief=FLAT,
bd=0, font=('Agency FB', 45), foreground=c_aqua1, width=4, text=" ", activebackground=c_black)
b[5].grid(row=2, column=1, padx=0, pady=0)
b[6] = Button(frame2, command=lambda: displaySelection(b, 6, numberOfPlayers), background=c_black, relief=FLAT,
bd=0, font=('Agency FB', 45), foreground=c_aqua1, width=4, text=" ", activebackground=c_black)
b[6].grid(row=2, column=2, padx=0, pady=2)
b[7] = Button(frame2, command=lambda: displaySelection(b, 7, numberOfPlayers), background=c_black, relief=FLAT,
bd=0, font=('Agency FB', 45), foreground=c_aqua1, width=4, text=" ", activebackground=c_black)
b[7].grid(row=3, column=0, padx=0, pady=0)
b[8] = Button(frame2, command=lambda: displaySelection(b, 8, numberOfPlayers), background=c_black, relief=FLAT,
bd=0, font=('Agency FB', 45), foreground=c_aqua1, width=4, text=" ", activebackground=c_black)
b[8].grid(row=3, column=1, padx=2, pady=0)
b[9] = Button(frame2, command=lambda: displaySelection(b, 9, numberOfPlayers), background=c_black, relief=FLAT,
bd=0, font=('Agency FB', 45), foreground=c_aqua1, width=4, text=" ", activebackground=c_black)
b[9].grid(row=3, column=2, padx=0, pady=0)
quitButton = Button(frame3, command=root.destroy, text="QUIT", width=10, relief=FLAT, bd=0, background=c_black,
foreground=c_aqua1, activebackground=c_black, activeforeground=c_black, font=('Agency FB', 20))
quitButton.grid(row=0, column=0, padx=10, pady=0)
restartButton = Button(frame3, command=lambda: restartGame(b), text="RESTART", width=10, relief=FLAT, bd=0,
background=c_black, foreground=c_aqua1, activebackground=c_black, activeforeground=c_black, font=('Agency FB', 20))
restartButton.grid(row=0, column=1, padx=10, pady=0)
# create root window
root = Tk()
# UI params
# --game window title
gameWindowTitle = "Tic Tac Toe"
# --hex colors
c_black = "#030305"
c_white = "#FFFFFF"
c_aqua1 = "#91FCFF"
c_aqua2 = "#00E7ED"
c_aqua3 = "#007b80" # greyed out c_aqua1
# modify root window
root.title(gameWindowTitle)
root.configure(background=c_black)
# frames
frame0 = Frame(root, bg=c_black, bd=0, relief=FLAT)
frame0.pack(padx=0, pady=0)
frame1 = Frame(root, bg=c_aqua2, bd=0, relief=FLAT)
frame1.pack(padx=0, pady=0)
frame2 = Frame(root, bg=c_aqua2, bd=0, relief=FLAT)
frame2.pack(padx=70, pady=0)
frame3 = Frame(root, bg=c_black, bd=0, relief=FLAT)
frame3.pack(padx=0, pady=30)
# text
headerTxt = Label(frame0,
text='SELECT GAME TYPE:',
foreground=c_white,
background=c_black,
font=('Agency FB', 25))
headerTxt.grid(row=0,
column=0,
columnspan=3,
padx=20,
pady=20)
b = [0] * 5
# empty button for spacing
b[0] = Button(frame1,
text='\n',
background=c_black,
bd=0,
font=('Agency FB', 15),
foreground=c_aqua1,
width=5,
activebackground=c_black)
b[0].grid(row=1, column=0, padx=0, pady=0)
# SINGLEPLAYER button
b[1] = Button(frame1,
text='SINGLEPLAYER\nGAME',
command=lambda: showXOGrid(1, frame1, headerTxt),
background=c_black,
bd=0,
font=('Agency FB', 15),
foreground=c_aqua1,
width=30,
activebackground=c_aqua1,
activeforeground=c_black,
cursor="hand2")
b[1].grid(row=1, column=1, padx=0, pady=0)
# empty button for spacing
b[2] = Button(frame1,
text='\n',
background=c_black,
bd=0,
font=('Agency FB', 15),
foreground=c_aqua1,
width=5,
activebackground=c_black)
b[2].grid(row=1, column=2, padx=0, pady=0)
# MULTIPLAYER button
b[3] = Button(frame1,
text='MULTIPLAYER\nGAME',
command=lambda: showXOGrid(2, frame1, headerTxt),
background=c_black,
bd=0,
font=('Agency FB', 15),
foreground=c_aqua1,
width=30,
activebackground=c_aqua1,
activeforeground=c_black,
cursor="hand2")
b[3].grid(row=1, column=3, padx=0, pady=0)
# empty button for spacing
b[4] = Button(frame1,
text='\n',
background=c_black,
bd=0,
font=('Agency FB', 15),
foreground=c_aqua1,
width=5,
activebackground=c_black)
b[4].grid(row=1, column=4, padx=0, pady=0)
# text
# welcomeTxt = Label(frame1,text='MULTIPLAYER GAME',foreground=c_white,background=c_black,font=('Agency FB',25))
# welcomeTxt.grid(row=0,column=0,columnspan=3,padx=20,pady=20)
# #buttons
# b=[0 for x in range(0,10)]
# b[1] = Button(frame2,command=lambda: displaySelection(1),background=c_black,relief=FLAT,bd=0,font=('Agency FB',45),foreground=c_aqua1,width=4,text=" ",activebackground=c_black)
# b[1].grid(row=1,column=0,padx=0,pady=0)
# b[2] = Button(frame2,command=lambda: displaySelection(2), background=c_black,relief=FLAT,bd=0,font=('Agency FB',45),foreground=c_aqua1,width=4,text=" ",activebackground=c_black)
# b[2].grid(row=1,column=1,padx=2,pady=0)
# b[3] = Button(frame2,command=lambda: displaySelection(3), background=c_black,relief=FLAT,bd=0,font=('Agency FB',45),foreground=c_aqua1,width=4,text=" ",activebackground=c_black)
# b[3].grid(row=1,column=2,padx=0,pady=0)
# b[4] = Button(frame2,command=lambda: displaySelection(4), background=c_black,relief=FLAT,bd=0,font=('Agency FB',45),foreground=c_aqua1,width=4,text=" ",activebackground=c_black)
# b[4].grid(row=2,column=0,padx=0,pady=0)
# b[5] = Button(frame2,command=lambda: displaySelection(5), background=c_black,relief=FLAT,bd=0,font=('Agency FB',45),foreground=c_aqua1,width=4,text=" ",activebackground=c_black)
# b[5].grid(row=2,column=1,padx=0,pady=0)
# b[6] = Button(frame2,command=lambda: displaySelection(6), background=c_black,relief=FLAT,bd=0,font=('Agency FB',45),foreground=c_aqua1,width=4,text=" ",activebackground=c_black)
# b[6].grid(row=2,column=2,padx=0,pady=2)
# b[7] = Button(frame2,command=lambda: displaySelection(7), background=c_black,relief=FLAT,bd=0,font=('Agency FB',45),foreground=c_aqua1,width=4,text=" ",activebackground=c_black)
# b[7].grid(row=3,column=0,padx=0,pady=0)
# b[8] = Button(frame2,command=lambda: displaySelection(8), background=c_black,relief=FLAT,bd=0,font=('Agency FB',45),foreground=c_aqua1,width=4,text=" ",activebackground=c_black)
# b[8].grid(row=3,column=1,padx=2,pady=0)
# b[9] = Button(frame2,command=lambda: displaySelection(9), background=c_black,relief=FLAT,bd=0,font=('Agency FB',45),foreground=c_aqua1,width=4,text=" ",activebackground=c_black)
# b[9].grid(row=3,column=2,padx=0,pady=0)
# quitButton = Button(frame3,command=root.destroy,text="QUIT",width=10,relief=FLAT,bd=0,background=c_black, foreground=c_aqua1,activebackground=c_black,activeforeground=c_black,font=('Agency FB',20))
# quitButton.grid(row=0,column=0,padx=10,pady=0)
# restartButton = Button(frame3,command=restartGame,text="RESTART",width=10,relief=FLAT,bd=0,background=c_black, foreground=c_aqua1,activebackground=c_black,activeforeground=c_black,font=('Agency FB',20))
# restartButton.grid(row=0,column=1,padx=10,pady=0)
# variables
x = 1
o = -1
currentPlayer = x
TTTArray = np.zeros(shape=[3, 3])
root.mainloop()
|
[
"mayamau@users.noreply.github.com"
] |
mayamau@users.noreply.github.com
|
0c28c8c4901757f77ea916c729c5f15e0fa6aaee
|
a9205fccfcb73dc1d113f7ed5650ac625082b593
|
/Repositorio.py
|
fa08d70fc2f808521f4e0f9ab0899e0733ec40ec
|
[] |
no_license
|
Fradyzz/CORPCITI
|
2b198e9bd9c0b6523832003d8557865e37d5c43c
|
0c937c39d5006694e9ad3c42a3018e29d3a9495b
|
refs/heads/master
| 2021-04-05T19:53:00.734812
| 2020-04-01T00:01:59
| 2020-04-01T00:01:59
| 248,594,743
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,426
|
py
|
import pandas as pd;
from pandas import ExcelWriter;
from openpyxl.writer.excel import ExcelWriter
class Repos():
eventos=[]; usuarios=[]; admins=[];
def __init__(self, eventos=[], usuarios=[], admins=[]):
self.eventos=eventos;
self.usuarios=usuarios;
self.admins=admins;
def loguinUser(self, user, contra):
for i in self.usuarios:
if (i.nick == user and i.contrase==contra):
return True;
else: return False;
def loguinAdmi(self, user, contra):
for i in self.admins:
if (i.nick == user and i.contrase==contra):
return True;
else: return False;
def insertaEvent(self, ob):
self.eventos.append(ob);
def insertaUser(self, ob):
self.usuarios.append(ob);
def insertaAdm(self, ob):
self.admins.append(ob);
def countEvent(self):
c=0;
for i in self.eventos:
c+=1;
return c;
def countUser(self):
c=0;
for i in self.usuarios:
c+=1;
return c;
def countAdm(self):
c=0;
for i in self.admins:
c+=1;
return c;
def listataEvent(self, i):
return (self.eventos[i]);
def listataUser(self, i):
return(self.usuarios[i]);
def listataAdm(self, i):
return (self.admins[i]);
def editaEvent(self, cod, ob):
c=0;
for i in self.eventos:
if(i.codEvent==cod):
self.eventos[c]=ob;
c+=1;
def editaUser(self, cod, ob):
c=0;
for i in self.usuarios:
if(i.codigo==cod):
self.usuarios[c]=ob;
c+=1;
def editaAdm(self, cod, ob):
c=0;
for i in self.admins:
if(i.codigo==cod):
self.admins[c]=ob;
c+=1;
def buscaEvent(self, cod, iter):
for i in self.eventos:
if(i.codEvent==cod):
return(self.eventos[iter]);
def buscaUser(self, cod, iter):
for i in self.usuarios:
if (i.codigo == cod):
return (self.usuarios[iter]);
def buscaAdm(self, cod, iter):
for i in self.admins:
if (i.codigo == cod):
return (self.admins[iter]);
def borraEvent(self, cod):
for i in self.eventos:
if(i.codEvent==cod):
self.eventos.remove(i);
def borraUser(self, cod):
for i in self.usuarios:
if(i.codigo==cod):
self.usuarios.remove(i);
def borraAdm(self, cod):
for i in self.admins:
if(i.codigo==cod):
self.admins.remove(i);
def SaveExcelEvent(self):
coA=[]; nomA=[]; descrA=[]; lugA=[]; fechaA=[]; horaA=[];
for i in self.eventos:
coA.append(i.codEvent); nomA.append(i.nomEvent);
descrA.append(i.descrEvent); lugA.append(i.lugar);
fechaA.append(i.fecha); horaA.append(i.hora);
data = {'Codigo': coA, 'Nombre': nomA, 'Descripción': descrA, 'Lugar': lugA, 'Fecha': fechaA, 'Hora': horaA}
#data2 = [{'Tipo': t, 'Cantidad': ca}];
df = pd.DataFrame(data, columns=['Codigo', 'Nombre', 'Descripción', 'Lugar', 'Fecha', 'Hora']);
writer = ExcelWriter('RegistroEventos.xlsx');
df.to_excel(writer, index=False);
writer.save();
def SaveExcelUser(self):
coA=[]; ceduA=[]; nomA=[]; apellA=[]; fNacimA=[]; nickA=[]; telfA=[]; emailA=[]; contraseA=[];
for i in self.usuarios:
coA.append(i.codigo); ceduA.append(i.cedula); nomA.append(i.nombre);
apellA.append(i.apellido); fNacimA.append(i.fNacim);
nickA.append(i.nick); telfA.append(i.telf); emailA.append(i.email);
contraseA.append(i.contrase);
data = {'Codigo': coA, 'Cédula':ceduA, 'Nombre': nomA, 'Apellido': apellA,
'Fecha de Naicimiento': fNacimA, 'Nick': nickA,
'Teléfono': telfA, 'Email': emailA, 'Contraseña:': contraseA}
#data2 = [{'Tipo': t, 'Cantidad': ca}];
df = pd.DataFrame(data, columns=['Codigo','Cédula','Nombre','Apellido','Fecha de Nacimiento',
'Nick','Teléfono','Email','Contraseña']);
writer = ExcelWriter('RegistroUsuarios.xlsx');
df.to_excel(writer, index=False);
writer.save();
def SaveExcelAdmin(self):
coA=[]; ceduA=[]; nomA=[]; apellA=[]; nickA=[]; telfA=[]; emailA=[]; contraseA=[];
for i in self.admins:
coA.append(i.codigo); ceduA.append(i.cedula); nomA.append(i.nombre);
apellA.append(i.apellido); nickA.append(i.nick);
telfA.append(i.telf); emailA.append(i.email);
contraseA.append(i.contrase);
data = {'Codigo': coA, 'Cédula':ceduA, 'Nombre': nomA, 'Apellido': apellA, 'Nick': nickA,
'Teléfono': telfA, 'Email': emailA, 'Contraseña:': contraseA}
df = pd.DataFrame(data, columns=['Codigo', 'Cédula', 'Nombre','Apellido','Fecha de Nacimiento',
'Nick','Teléfono','Email','Contraseña']);
writer = ExcelWriter('RegistroAdmins.xlsx');
df.to_excel(writer, index=False);
writer.save();
|
[
"noreply@github.com"
] |
Fradyzz.noreply@github.com
|
1e016af907e1c4eb4ab696ffded7dc703b9d20bf
|
6b359431d1fe4141b3ecdb52a23a99ede37b8044
|
/workouts/migrations/0001_initial.py
|
b279d5eaa1d12eaef436115f081f51bcdfae1b84
|
[] |
no_license
|
sergiga/fitness-backend
|
f6cdc48bd55a23da8eb671f7ffe84ac9b92c5e3e
|
a328c6c0b34a0c63e4e223ad873d1a61cf54fdeb
|
refs/heads/master
| 2022-12-24T18:03:52.293142
| 2019-12-29T14:05:23
| 2019-12-29T14:05:23
| 227,165,726
| 0
| 0
| null | 2022-12-08T03:17:25
| 2019-12-10T16:24:36
|
Python
|
UTF-8
|
Python
| false
| false
| 4,598
|
py
|
# Generated by Django 3.0 on 2019-12-08 17:19
import core.utils
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ExerciseInSet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('device_updated', models.DateTimeField(null=True)),
('reps', models.IntegerField()),
('rep_unit', models.IntegerField(choices=[(1, 'REPS'), (2, 'SECONDS')], default=core.utils.RepUnit['REPS'])),
('group', models.IntegerField()),
('group_order', models.IntegerField()),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Workout',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('device_updated', models.DateTimeField(null=True)),
('name', models.CharField(max_length=100)),
('level', models.IntegerField(choices=[(1, 'BEGINNER'), (2, 'INTERMEDIATE'), (3, 'ADVANCED')], default=core.utils.Level['INTERMEDIATE'])),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='WorkoutSet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('device_updated', models.DateTimeField(null=True)),
('sets', models.IntegerField()),
('order', models.IntegerField()),
('workout', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workout_sets', to='workouts.Workout')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='TrainingExercise',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('device_updated', models.DateTimeField(null=True)),
('reps', models.IntegerField()),
('exercise_in_set', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='training_exercises', to='workouts.ExerciseInSet')),
('workout', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='training_exercises', to='workouts.Workout')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Training',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('device_updated', models.DateTimeField(null=True)),
('date', models.DateField()),
('start_time', models.DateTimeField(null=True)),
('end_time', models.DateTimeField(null=True)),
('exercise_in_set', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='trainings', to='workouts.ExerciseInSet')),
('workout', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='trainings', to='workouts.Workout')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='exerciseinset',
name='workout_set',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='exercises_in_set', to='workouts.WorkoutSet'),
),
]
|
[
"srgarcia91@gmail.com"
] |
srgarcia91@gmail.com
|
730b513d27ee1bb307ed3b428f8a412c5135393b
|
8436711618438240714022e6868fcc3e315c75f0
|
/exam-python/shop_online/serializers.py
|
5e7119c2d34563fd25f7ddf527b9c5cc290a9657
|
[] |
no_license
|
MaximKhrenov/dip
|
5d01c8834cab1143a008f84492cddf4d215a8060
|
317072baceb6fdd17f982cd0cb2591d0d8abf44e
|
refs/heads/master
| 2021-06-16T00:40:10.948096
| 2019-02-28T14:37:39
| 2019-02-28T14:37:39
| 173,128,998
| 0
| 0
| null | 2021-04-20T17:53:20
| 2019-02-28T14:41:31
|
HTML
|
UTF-8
|
Python
| false
| false
| 2,855
|
py
|
from rest_framework import serializers
from shop_online.models import (Product, Category, User, Cart, Order, )
class ProductSerializers(serializers.ModelSerializer):
class Meta:
model = Product
exclude = ()
class CategorySerializers(serializers.ModelSerializer):
class Meta:
model = Category
exclude = ()
class UserSerializers(serializers.ModelSerializer):
password = serializers.CharField(write_only=True)
class Meta:
model = User
exclude = (
'is_superuser',
'is_staff',
'last_login',
'date_joined',
'is_active',
'groups',
'user_permissions',
)
def create(self, validate_date):
user = User(
email=validate_date['email'],
username=validate_date['username'],
first_name=validate_date['first_name'],
last_name=validate_date['last_name'],
)
user.set_password(validate_date['password'])
user.save()
return user
class CartSerializers(serializers.ModelSerializer):
user = serializers.CharField(read_only=True)
count_products = serializers.IntegerField(write_only=True)
price_product = serializers.IntegerField(read_only=True)
class Meta:
model = Cart
exclude = ()
fields = ('user', 'title', 'count_products', 'price_product',)
def create(self, validated_data):
user = validated_data['user']
count_products = validated_data['count_products']
title = validated_data['title']
price = Product.objects.get(title_product=title)
cart = Cart(
user=user,
title=title,
price_product=price.price_product * count_products,
count_products=count_products,
)
cart.save()
return cart
class OrderSerializers(serializers.ModelSerializer):
user = serializers.CharField(read_only=True)
class Meta:
model = Order
exclude = ()
fields = ('user', 'address', 'title', 'date_time',)
def create(self, validated_data):
user = validated_data['user']
date_time = validated_data['date_time']
address = validated_data['address']
product = validated_data['title']
order = Order(
date_time=date_time,
user=user,
address=address,
title=product
)
order.save()
return order
class HistorySerializers(serializers.ModelSerializer):
class Meta:
model = Order
exclude = ()
class ProfileSerializers(serializers.ModelSerializer):
class Meta:
model = User
|
[
"khrenoffmaxim@yandex.ru"
] |
khrenoffmaxim@yandex.ru
|
3ac86f71fb5162f00646cca9ac8f61ca39c0d2c3
|
e543f59a476a8c17ca660e727b5170dadaed81f3
|
/CrossValidation.py
|
a9721254488c6598096c14f88666d26c06139430
|
[] |
no_license
|
AbhinavJhanwar/Data-Processing-Techniques
|
f49768b4f2d77ee0870ab6d2bd63f3e3a9b5557b
|
77a7f27489caa4166e836a6e4247065aee5398d1
|
refs/heads/master
| 2021-06-13T21:05:57.210284
| 2021-03-19T09:09:21
| 2021-03-19T09:09:21
| 168,096,699
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,862
|
py
|
'''
Created on Aug 3, 2017
@author: abhinav.jhanwar
'''
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from sklearn.linear_model import LinearRegression
from sklearn import metrics
from sklearn.model_selection import KFold, cross_val_score, StratifiedKFold
import csv
#CLASSIFICATION MODELS
url = "iris_data.csv"
with open(url) as csvFile:
reader = csv.reader(csvFile)
names = next(reader)
data = pd.read_csv(url)
feature_cols = names[0:-1] #names will be replaced by features directly taken from user selection
X = data[feature_cols]
y = data[names[-1]] #names replaced by target taken from user selection
#kf = KFold(n_splits=5, shuffle=False)
# print the contents of each training and testing set
# ^ - forces the field to be centered within the available space
# .format() - formats the string similar to %s or %n
# enumerate(sequence, start=0) - returns an enumerate object
#print('{:^61} {}'.format('Training set observations', 'Testing set observations'))
#for train_index, test_index in kf.split(range(1,26)):
# print('{} {!s:^25}'.format(train_index, test_index))
# 10-fold cross-validation with K=5 for KNN (the n_neighbors parameter)
# k = 5 for KNeighborsClassifier
k_scores = []
max_score=0
for k in range(1,31):
knn = KNeighborsClassifier(n_neighbors=k)
# Use cross_val_score function
# We are passing the entirety of X and y, not X_train or y_train, it takes care of splitting the data
# cv=10 for 10 folds
# scoring='accuracy' for evaluation metric - although they are many
# use StratifiedKFold for imbalanced target class
kfold = StratifiedKFold(n_splits=10, random_state=7)
scores = cross_val_score(knn, X, y, scoring='accuracy', cv=kfold).mean()
#print(scores)
k_scores.append(scores)
# use average accuracy as an estimate of out-of-sample accuracy
# numpy array has a method mean()
if(k_scores[k-1]>=max_score):
#print(k_scores[k-1])
max_score = k_scores[k-1]
#print(k)
#SIMILARLY GO FOR OTHER MODELS AND SELECT MODELS GIVING BEST SCORE
#REGRESSION MODELS
url = "Advertising.csv"
with open(url) as csvFile:
reader = csv.reader(csvFile)
names = next(reader)
data = pd.read_csv(url)
feature_cols = names[0:-1] #names will be replaced by features directly taken from user selection
X = data[feature_cols]
y = data[names[-1]] #names replaced by target taken from user selection
model = LinearRegression()
# store scores in scores object
# we can't use accuracy as our evaluation metric since that's only relevant for classification problems
# RMSE is not directly available so we will use MSE
rmse_scores = np.sqrt(-cross_val_score(model, X, y, scoring='neg_mean_squared_error', cv=10)).mean()
print(rmse_scores)
|
[
"abhinav.jhanwar@accenture.com"
] |
abhinav.jhanwar@accenture.com
|
7fefa54d9f554c5233b70cab9dea453e0c81a844
|
23d2b9692d2723ec0f1f63497f36f5b9b59b0346
|
/2_Python_Data_Structures/Week_6/example_11.py
|
a43d19f289ff4d48bc9beb7bf41ccc2a1e07c3a2
|
[] |
no_license
|
lincolnjohnny/py4e
|
1ee331d9da7dab599dee1208de978a6c266a44af
|
a18deb696cde6d55f9266f389c0ccfd6eea2c540
|
refs/heads/master
| 2022-12-26T04:23:45.651991
| 2020-10-12T15:45:53
| 2020-10-12T15:45:53
| 300,396,585
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 372
|
py
|
# The top 10 most common words in a file
fhand = open('romeo.txt')
counts = dict()
for line in fhand :
words = line.split()
for word in words :
counts[word] = counts.get(word,0) + 1
lst = list()
for key,value in counts.items() :
tmp = (value,key)
lst.append(tmp)
lst = sorted(lst, reverse=True)
for value,key in lst[:10] :
print(key, value)
|
[
"lincolnjohnny@gmail.com"
] |
lincolnjohnny@gmail.com
|
b8c4e05691d3a6bcb98ea81c533c3e2d9b3ec506
|
638bfb68541bf88fe7ed09a89b67d7e7f4f2b13f
|
/informacion/migrations/0003_comentario_autor.py
|
bb5c6f8cb4c20c69ca64fe623b6a28ef8c0bd612
|
[] |
no_license
|
LuOfLuck/perfil-sholl
|
748420734050acdda18afc4402852f6b0a4df00d
|
58ae5c649b9590d58f5fed238b548efb12180bdb
|
refs/heads/master
| 2023-04-16T21:10:06.545946
| 2021-05-03T19:52:51
| 2021-05-03T19:52:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 599
|
py
|
# Generated by Django 2.2.3 on 2020-12-30 01:03
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('informacion', '0002_comentario'),
]
operations = [
migrations.AddField(
model_name='comentario',
name='autor',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
[
"lcas83539@gmail.com"
] |
lcas83539@gmail.com
|
c3b0f249410fb61263335e29c50e31b18b701255
|
30ff67c55ca0b87cf250cc9c526dde3b25d3f24e
|
/BMpro/QueAns/migrations/0001_initial.py
|
2b619dd79c39d7036c7017dcff30868996bf5053
|
[] |
no_license
|
nerajshrm/QueAnsDj
|
17ea43dad05e125e0c2df603157cacfadf953e93
|
e64cc56402e66fabffca160af1a6a17f7df7ac6b
|
refs/heads/master
| 2020-03-28T07:33:16.284237
| 2018-09-08T06:10:57
| 2018-09-08T06:10:57
| 147,909,312
| 0
| 0
| null | 2018-09-08T06:49:49
| 2018-09-08T06:49:48
| null |
UTF-8
|
Python
| false
| false
| 569
|
py
|
# Generated by Django 2.0.5 on 2018-09-07 06:42
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='QuesModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('stmt', models.CharField(max_length=50)),
('qtype', models.CharField(max_length=5)),
],
),
]
|
[
"noreply@github.com"
] |
nerajshrm.noreply@github.com
|
f3a56eab63df2e25ca7185b2b359bdc948581b9a
|
f20f3ab827eab5ad6a3f97b35d10d7afe2f118d5
|
/__init__.py
|
e910486ed0e4b8b6f2fb6655c4441fbbf9959a91
|
[
"MIT"
] |
permissive
|
bradparks/Sprytile__blender_add_on_sprite_sheets_tile_maps
|
9adb618bbd0e1f4e9334b8f4e534cff6fa9cc9d7
|
421c7efe3ea9ebd7e0f8dca7fb797eca597964d2
|
refs/heads/master
| 2021-05-08T04:37:22.745456
| 2017-10-12T15:59:17
| 2017-10-12T15:59:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 22,891
|
py
|
bl_info = {
"name": "Sprytile Painter",
"author": "Jeiel Aranal",
"version": (0, 4, 24),
"blender": (2, 7, 7),
"description": "A utility for creating tile based low spec scenes with paint/map editor tools",
"location": "View3D > UI panel > Sprytile",
"wiki_url": "https://chemikhazi.github.io/Sprytile/",
"tracker_url": "https://github.com/ChemiKhazi/Sprytile/issues",
"category": "Paint"
}
# Put Sprytile directory is sys.path so modules can be loaded
import os
import sys
import inspect
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0]))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
locals_list = locals()
if "bpy" in locals_list:
from importlib import reload
reload(addon_updater_ops)
reload(sprytile_gui)
reload(sprytile_modal)
reload(sprytile_panel)
reload(sprytile_utils)
reload(sprytile_uv)
reload(tool_build)
reload(tool_paint)
reload(tool_fill)
reload(tool_set_normal)
else:
from . import sprytile_gui, sprytile_modal, sprytile_panel, sprytile_utils, sprytile_uv
from sprytile_tools import *
import bpy
import bpy.utils.previews
from . import addon_updater_ops
from bpy.props import *
import rna_keymap_ui
class SprytileSceneSettings(bpy.types.PropertyGroup):
def set_normal(self, value):
if "lock_normal" not in self.keys():
self["lock_normal"] = False
if self["lock_normal"] is True:
return
if self["normal_mode"] == value:
self["lock_normal"] = not self["lock_normal"]
return
self["normal_mode"] = value
self["lock_normal"] = True
bpy.ops.sprytile.axis_update('INVOKE_REGION_WIN')
def get_normal(self):
if "normal_mode" not in self.keys():
self["normal_mode"] = 3
return self["normal_mode"]
normal_mode = EnumProperty(
items=[
("X", "X", "World X-Axis", 1),
("Y", "Y", "World Y-Axis", 2),
("Z", "Z", "World X-Axis", 3)
],
name="Normal Mode",
description="Normal to create the mesh on",
default='Z',
set=set_normal,
get=get_normal
)
lock_normal = BoolProperty(
name="Lock",
description="Lock normal used to create meshes",
default=False
)
snap_translate = BoolProperty(
name="Snap Translate",
description="Snap pixel translations to pixel grid",
default=True
)
paint_mode = EnumProperty(
items=[
("PAINT", "Paint", "Advanced UV paint tools", 1),
("MAKE_FACE", "Build", "Only create new faces", 3),
("SET_NORMAL", "Set Normal", "Select a normal to use for face creation", 2),
("FILL", "Fill", "Fill the work plane cursor", 4)
],
name="Sprytile Paint Mode",
description="Paint mode",
default='MAKE_FACE'
)
def set_show_tools(self, value):
keys = self.keys()
if "show_tools" not in keys:
self["show_tools"] = False
self["show_tools"] = value
if value is False:
if "paint_mode" not in keys:
self["paint_mode"] = 3
if self["paint_mode"] in {2, 4}:
self["paint_mode"] = 3
def get_show_tools(self):
if "show_tools" not in self.keys():
self["show_tools"] = False
return self["show_tools"]
show_tools = BoolProperty(
default=False,
set=set_show_tools,
get=get_show_tools
)
def set_dummy(self, value):
current_value = self.get_dummy_actual(True)
value = list(value)
for idx in range(len(value)):
if current_value[idx] and current_value[idx] & value[idx]:
value[idx] = False
mode_value_idx = [1, 3, 2, 4]
def get_mode_value(arr_value):
for i in range(len(arr_value)):
if arr_value[i]:
return mode_value_idx[i]
return -1
run_modal = True
paint_mode = get_mode_value(value)
if paint_mode > 0:
self["paint_mode"] = paint_mode
else:
run_modal = False
if "is_running" in self.keys():
if self["is_running"]:
self["is_running"] = False
else:
run_modal = True
if run_modal:
bpy.ops.sprytile.modal_tool('INVOKE_REGION_WIN')
def get_dummy_actual(self, force_real):
if "paint_mode" not in self.keys():
self["paint_mode"] = 3
out_value = [False, False, False, False]
if self["is_running"] or force_real:
index_value_lookup = 1, 3, 2, 4
set_idx = index_value_lookup.index(self["paint_mode"])
out_value[set_idx] = True
return out_value
def get_dummy(self):
if "is_running" not in self.keys():
self["is_running"] = False
is_running = self["is_running"]
return self.get_dummy_actual(is_running)
set_paint_mode = BoolVectorProperty(
name="Set Paint Mode",
description="Set Sprytile Tool Mode",
size=4,
set=set_dummy,
get=get_dummy
)
world_pixels = IntProperty(
name="World Pixel Density",
description="How many pixels are displayed in one world unit",
subtype='PIXEL',
default=32,
min=8,
max=2048
)
paint_normal_vector = FloatVectorProperty(
name="Srpytile Last Paint Normal",
description="Last saved painting normal used by Sprytile",
subtype='DIRECTION',
default=(0.0, 0.0, 1.0)
)
paint_up_vector = FloatVectorProperty(
name="Sprytile Last Paint Up Vector",
description="Last saved painting up vector used by Sprytile",
subtype='DIRECTION',
default=(0.0, 1.0, 0.0)
)
uv_flip_x = BoolProperty(
name="Flip X",
default=False
)
uv_flip_y = BoolProperty(
name="Flip Y",
default=False
)
mesh_rotate = FloatProperty(
name="Grid Rotation",
description="Rotation of mesh creation",
subtype='ANGLE',
unit='ROTATION',
step=9000,
precision=0,
min=-6.28319,
max=6.28319,
default=0.0
)
cursor_snap = EnumProperty(
items=[
('VERTEX', "Vertex", "Snap cursor to nearest vertex", "SNAP_GRID", 1),
('GRID', "Grid", "Snap cursor to grid", "SNAP_VERTEX", 2)
],
name="Cursor snap mode",
description="Sprytile cursor snap mode"
)
cursor_flow = BoolProperty(
name="Cursor Flow",
description="Cursor automatically follows mesh building",
default=False
)
paint_align = EnumProperty(
items=[
('TOP_LEFT', "Top Left", "", 1),
('TOP', "Top", "", 2),
('TOP_RIGHT', "Top Right", "", 3),
('LEFT', "Left", "", 4),
('CENTER', "Center", "", 5),
('RIGHT', "Right", "", 6),
('BOTTOM_LEFT', "Bottom Left", "", 7),
('BOTTOM', "Bottom", "", 8),
('BOTTOM_RIGHT', "Bottom Right", "", 9),
],
name="Paint Align",
description="Paint alignment mode",
default='CENTER'
)
def set_align_toggle(self, value, row):
prev_value = self.get_align_toggle(row)
row_val = 0
if row == 'top':
row_val = 0
elif row == 'middle':
row_val = 3
elif row == 'bottom':
row_val = 6
else:
return
col_val = 0
if value[0] and prev_value[0] != value[0]:
col_val = 1
elif value[1] and prev_value[1] != value[1]:
col_val = 2
elif value[2] and prev_value[2] != value[2]:
col_val = 3
else:
return
self["paint_align"] = row_val + col_val
def set_align_top(self, value):
self.set_align_toggle(value, "top")
def set_align_middle(self, value):
self.set_align_toggle(value, "middle")
def set_align_bottom(self, value):
self.set_align_toggle(value, "bottom")
def get_align_toggle(self, row):
if "paint_align" not in self.keys():
self["paint_align"] = 5
align = self["paint_align"]
if row == 'top':
return align == 1, align == 2, align == 3
if row == 'middle':
return align == 4, align == 5, align == 6
if row == 'bottom':
return align == 7, align == 8, align == 9
return False, False, False
def get_align_top(self):
return self.get_align_toggle("top")
def get_align_middle(self):
return self.get_align_toggle("middle")
def get_align_bottom(self):
return self.get_align_toggle("bottom")
paint_align_top = BoolVectorProperty(
name="Align",
size=3,
set=set_align_top,
get=get_align_top
)
paint_align_middle = BoolVectorProperty(
name="Align",
size=3,
set=set_align_middle,
get=get_align_middle
)
paint_align_bottom = BoolVectorProperty(
name="Align",
size=3,
set=set_align_bottom,
get=get_align_bottom
)
paint_hinting = BoolProperty(
name="Hinting",
description="Selected edge is used as X axis for UV mapping."
)
paint_stretch_x = BoolProperty(
name="Stretch X",
description="Stretch face over X axis of tile"
)
paint_stretch_y = BoolProperty(
name="Stretch Y",
description="Stretch face over Y axis of tile"
)
paint_edge_snap = BoolProperty(
name="Stretch Edge Snap",
description="Snap UV vertices to edges of tile when stretching.",
default=True
)
edge_threshold = FloatProperty(
name="Threshold",
description="Ratio of UV tile near to edge to apply snap",
min=0.01,
max=0.5,
soft_min=0.01,
soft_max=0.5,
default=0.35
)
paint_uv_snap = BoolProperty(
name="UV Snap",
default=True,
description="Snap UV vertices to texture pixels"
)
is_running = BoolProperty(
name="Sprytile Running",
description="Exit Sprytile tool"
)
is_snapping = BoolProperty(
name="Is Cursor Snap",
description="Is cursor snapping currently activated"
)
has_selection = BoolProperty(
name="Has selection",
description="Is there a mesh element selected"
)
is_grid_translate = BoolProperty(
name="Is Grid Translate",
description="Grid translate operator is running"
)
show_extra = BoolProperty(
name="Extra UV Grid Settings",
default=False
)
show_overlay = BoolProperty(
name="Show Grid Overlay",
default=True
)
auto_merge = BoolProperty(
name="Auto Merge",
description="Automatically merge vertices when creating faces",
default=True
)
auto_join = BoolProperty(
name="Join Multi",
description="Join multi tile faces when possible",
default=False
)
def set_reload(self, value):
self["auto_reload"] = value
if value is True:
bpy.ops.sprytile.reload_auto('INVOKE_REGION_WIN')
def get_reload(self):
if "auto_reload" not in self.keys():
self["auto_reload"] = False
return self["auto_reload"]
auto_reload = BoolProperty(
name="Auto",
description="Automatically reload images every few seconds",
default=False,
set=set_reload,
get=get_reload
)
fill_lock_transform = BoolProperty(
name="Lock Transforms",
description="Filled faces keep current rotations",
default=False,
)
axis_plane_display = EnumProperty(
items=[
('OFF', "Off", "Always Off", "RADIOBUT_OFF", 1),
('ON', "On", "Always On", "RADIOBUT_ON", 2),
('MIDDLE_MOUSE', "View", "Only when changing view", "CAMERA_DATA", 3)
],
name="Work Plane Cursor",
description="Display mode of Work Plane Cursor",
default='MIDDLE_MOUSE'
)
axis_plane_settings = BoolProperty(
name="Axis Plane Settings",
description="Show Work Plane Cursor settings",
default=False
)
axis_plane_size = IntVectorProperty(
name="Plane Size",
description="Size of the Work Plane Cursor",
size=2,
default=(2, 2),
min=1,
soft_min=1
)
axis_plane_color = FloatVectorProperty(
name="Plane Color",
description="Color Work Plane Cursor is drawn with",
size=3,
default=(0.7, 0.7, 0.7),
subtype='COLOR'
)
class SprytileMaterialGridSettings(bpy.types.PropertyGroup):
mat_id = StringProperty(
name="Material Id",
description="Name of the material this grid references",
default=""
)
id = IntProperty(
name="Grid ID",
default=-1
)
name = StringProperty(
name="Grid Name"
)
grid = IntVectorProperty(
name="Size",
description="Grid size, in pixels",
min=1,
size=2,
subtype='XYZ',
default=(32, 32)
)
def set_padding(self, value):
current_padding = self.get_padding()
if "grid" not in self.keys():
self["grid"] = (32, 32)
padding_delta = [ (value[0] - current_padding[0]) * 2, (value[1] - current_padding[1]) * 2]
new_grid = [self["grid"][0] - padding_delta[0], self["grid"][1] - padding_delta[1]]
if new_grid[0] < 1 or new_grid[1] < 1:
return
self["grid"] = (new_grid[0], new_grid[1])
self["padding"] = value
def get_padding(self):
if "padding" not in self.keys():
self["padding"] = (0, 0)
return self["padding"]
padding = IntVectorProperty(
name="Padding",
description="Cell padding, in pixels",
min=0,
size=2,
subtype='XYZ',
default=(0, 0),
set=set_padding,
get=get_padding
)
margin = IntVectorProperty(
name="Margin",
description="Spacing between tiles (top, right, bottom, left)",
min=0,
size=4,
subtype='XYZ',
default=(0, 0, 0, 0)
)
offset = IntVectorProperty(
name="Offset",
description="Offset of the grid",
subtype='TRANSLATION',
size=2,
default=(0, 0)
)
rotate = FloatProperty(
name="UV Rotation",
description="Rotation of UV grid",
subtype='ANGLE',
unit='ROTATION',
default=0.0
)
tile_selection = IntVectorProperty(
name="Tile Selection",
size=4,
default=(0, 0, 1, 1)
)
class SprytileMaterialData(bpy.types.PropertyGroup):
def expanded_default(self):
if 'is_expanded' not in self.keys():
self['is_expanded'] = True
def get_expanded(self):
self.expanded_default()
return self['is_expanded']
def set_expanded(self, value):
self.expanded_default()
do_rebuild = self['is_expanded'] is not value
self['is_expanded'] = value
if do_rebuild:
bpy.ops.sprytile.build_grid_list()
mat_id = StringProperty(
name="Material Id",
description="Name of the material this grid references",
default=""
)
is_expanded = BoolProperty(
default=True,
get=get_expanded,
set=set_expanded
)
grids = CollectionProperty(type=SprytileMaterialGridSettings)
class SprytileGridDisplay(bpy.types.PropertyGroup):
mat_id = StringProperty(default="")
grid_id = IntProperty(default=-1)
def get_mat_name(self):
if self.mat_id == "":
return ""
data_idx = bpy.data.materials.find(self.mat_id)
if data_idx < 0:
return ""
return bpy.data.materials[self.mat_id].name
def set_mat_name(self, value):
if self.mat_id == "":
return
data_idx = bpy.data.materials.find(self.mat_id)
if data_idx < 0:
return
bpy.data.materials[self.mat_id].name = value
bpy.ops.sprytile.validate_grids()
mat_name = StringProperty(
get=get_mat_name,
set=set_mat_name
)
class SprytileGridList(bpy.types.PropertyGroup):
def get_idx(self):
if "idx" not in self.keys():
self["idx"] = 0
return self["idx"]
def set_idx(self, value):
# If the selected index is a material entry
# Move to next entry
list_size = len(self.display)
while value < (list_size - 1) and self.display[value].mat_id != "":
value += 1
value = max(0, min(len(self.display)-1, value))
self["idx"] = value
if value < 0 or value >= len(self.display):
return
# Set the object grid id to target grid
target_entry = self.display[value]
if target_entry.grid_id != -1:
bpy.context.object.sprytile_gridid = target_entry.grid_id
display = bpy.props.CollectionProperty(type=SprytileGridDisplay)
idx = IntProperty(
default=0,
get=get_idx,
set=set_idx
)
def setup_props():
bpy.types.Scene.sprytile_data = bpy.props.PointerProperty(type=SprytileSceneSettings)
bpy.types.Scene.sprytile_mats = bpy.props.CollectionProperty(type=SprytileMaterialData)
bpy.types.Scene.sprytile_list = bpy.props.PointerProperty(type=SprytileGridList)
bpy.types.Scene.sprytile_ui = bpy.props.PointerProperty(type=sprytile_gui.SprytileGuiData)
bpy.types.Object.sprytile_gridid = IntProperty(
name="Grid ID",
description="Grid index used for object",
default=-1
)
def teardown_props():
del bpy.types.Scene.sprytile_data
del bpy.types.Scene.sprytile_mats
del bpy.types.Scene.sprytile_list
del bpy.types.Scene.sprytile_ui
del bpy.types.Object.sprytile_gridid
class SprytileAddonPreferences(bpy.types.AddonPreferences):
bl_idname = __package__
preview_transparency = bpy.props.FloatProperty(
name="Preview Alpha",
description="Transparency level of build preview cursor",
default=0.8,
min=0,
max=1
)
# addon updater preferences
auto_check_update = bpy.props.BoolProperty(
name="Auto-check for Update",
description="If enabled, auto-check for updates using an interval",
default=False,
)
updater_intrval_months = bpy.props.IntProperty(
name='Months',
description="Number of months between checking for updates",
default=0,
min=0
)
updater_intrval_days = bpy.props.IntProperty(
name='Days',
description="Number of days between checking for updates",
default=7,
min=0,
)
updater_intrval_hours = bpy.props.IntProperty(
name='Hours',
description="Number of hours between checking for updates",
default=0,
min=0,
max=23
)
updater_intrval_minutes = bpy.props.IntProperty(
name='Minutes',
description="Number of minutes between checking for updates",
default=0,
min=0,
max=59
)
def draw(self, context):
layout = self.layout
layout.prop(self, "preview_transparency")
kc = bpy.context.window_manager.keyconfigs.user
km = kc.keymaps['Mesh']
kmi_idx = km.keymap_items.find('sprytile.modal_tool')
if kmi_idx >= 0:
layout.label(text="Tile Mode Shortcut")
col = layout.column()
kmi = km.keymap_items[kmi_idx]
km = km.active()
col.context_pointer_set("keymap", km)
rna_keymap_ui.draw_kmi([], kc, km, kmi, col, 0)
addon_updater_ops.update_settings_ui(self, context)
def setup_keymap():
km_array = sprytile_modal.SprytileModalTool.keymaps
win_mgr = bpy.context.window_manager
key_config = win_mgr.keyconfigs.addon
keymap = key_config.keymaps.new(name='Mesh', space_type='EMPTY')
km_array[keymap] = [
keymap.keymap_items.new("sprytile.modal_tool", 'SPACE', 'PRESS', ctrl=True, shift=True)
]
keymap = key_config.keymaps.new(name="Sprytile Paint Modal Map", space_type='EMPTY', region_type='WINDOW', modal=True)
km_items = keymap.keymap_items
km_array[keymap] = [
km_items.new_modal('CANCEL', 'ESC', 'PRESS'),
km_items.new_modal('SNAP', 'S', 'ANY'),
km_items.new_modal('FOCUS', 'W', 'PRESS'),
km_items.new_modal('ROTATE_LEFT', 'ONE', 'PRESS'),
km_items.new_modal('ROTATE_RIGHT', 'TWO', 'PRESS'),
km_items.new_modal('FLIP_X', 'THREE', 'PRESS'),
km_items.new_modal('FLIP_Y', 'FOUR', 'PRESS')
]
sprytile_modal.SprytileModalTool.modal_values = [
'Cancel',
'Cursor Snap',
'Cursor Focus',
'Rotate Left',
'Rotate Right',
'Flip X',
'Flip Y'
]
def teardown_keymap():
for keymap in sprytile_modal.SprytileModalTool.keymaps:
kmi_list = keymap.keymap_items
for keymap_item in kmi_list:
keymap.keymap_items.remove(keymap_item)
sprytile_modal.SprytileModalTool.keymaps.clear()
def register():
addon_updater_ops.register(bl_info)
sprytile_panel.icons = bpy.utils.previews.new()
dirname = os.path.dirname(__file__)
icon_names = ('SPRYTILE_ICON_BUILD',
'SPRYTILE_ICON_PAINT',
'SPRYTILE_ICON_FILL',
'SPRYTILE_ICON_NORMAL')
icon_paths = ('icon-build.png',
'icon-paint.png',
'icon-fill.png',
'icon-setnormal.png')
for i in range(0, len(icon_names)):
icon_path = os.path.join(dirname, "icons")
icon_path = os.path.join(icon_path, icon_paths[i])
sprytile_panel.icons.load(icon_names[i], icon_path, 'IMAGE')
bpy.utils.register_class(sprytile_panel.SprytilePanel)
bpy.utils.register_module(__name__)
setup_props()
setup_keymap()
def unregister():
teardown_keymap()
teardown_props()
bpy.utils.unregister_class(sprytile_panel.SprytilePanel)
bpy.utils.unregister_module(__name__)
bpy.utils.previews.remove(sprytile_panel.icons)
# Unregister self from sys.path as well
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0]))
sys.path.remove(cmd_subfolder)
if __name__ == "__main__":
register()
|
[
"jeiel.aranal@gmail.com"
] |
jeiel.aranal@gmail.com
|
1f143695018dec07dee3da5d12049c169c34e0c8
|
65d6a5b8ed19ed65c9e6747fc737844320ad054b
|
/packages/python-ldap/Tests/newapi.py
|
bc7b7a3c3209dbef0a50ee4f7d1e6a9875b2cb76
|
[
"Python-2.0"
] |
permissive
|
mozilla/basket-lib
|
f3242c97508571516db9c86cfa4f54e469406aad
|
3978af377f5c132e30469cfc1af0e78fce356244
|
refs/heads/master
| 2023-09-03T19:49:52.871452
| 2015-12-03T21:51:42
| 2015-12-03T21:51:42
| 2,418,254
| 1
| 1
| null | 2022-01-12T19:47:40
| 2011-09-19T20:52:10
|
Python
|
UTF-8
|
Python
| false
| false
| 324
|
py
|
import ldap
l=ldap.initialize('ldap://localhost:1390')
print l.search_s('',0)
l.search_s('dc=stroeder,dc=com',1)
try:
l.search_s('ou=not existent,dc=stroeder,dc=com',1)
except ldap.NO_SUCH_OBJECT,e:
print str(e)
try:
l.search_s('dc=stroeder,dc=com',2,'(objectclass')
except ldap.FILTER_ERROR,e:
print str(e)
|
[
"abuchanan@mozilla.com"
] |
abuchanan@mozilla.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.