max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
LLINS_API/urls.py | majorblackc/LLINS-Hackathon-g5 | 4 | 12763151 | from django.urls import path
from LLINS_API import views
from rest_framework.urlpatterns import format_suffix_patterns
from django.contrib import admin
urlpatterns = [
path('', admin.site.urls),
path('patients/', views.patient_data_list),
path('patints/<int:pk>/', views.patient_data_detail),
path('nets/', views.nets_list),
]
| 1.679688 | 2 |
tests/data/program_analysis/strings/Translation/str05.py | rsulli55/automates | 17 | 12763152 | <reponame>rsulli55/automates
from automates.program_analysis.for2py.format import *
from automates.program_analysis.for2py.strings import *
def main():
str1 = String(10)
str1.set_("abcdef")
n1 = str1.f_index("bc")
n2 = str1.f_index("xyz")
n3 = str1.f_index("f ", ["back"])
n4 = str1.f_index("cde", ["back"])
n5 = str1.f_index("xyz", ["back"])
fmt_10 = Format(['5(I3,X)'])
write_str = fmt_10.write_line([n1, n2, n3, n4, n5])
sys.stdout.write(write_str)
main()
| 2.71875 | 3 |
backend/api/forms/UpdateReviewForm.py | kukiamarilla/polijira | 1 | 12763153 | from django import forms
class UpdateReviewForm(forms.Form):
"""
UpdateReviewForm Valida los datos del request.data al modificar un review
Args:
forms (Form): Form de django
Atributes:
observacion (TextField): Campo para validar que se especifca la observacion
"""
observacion = forms.CharField(
required=True,
error_messages={
"required": "No especificaste la observacion"
}
)
| 2.390625 | 2 |
wordler/simulator.py | jacobepst/wordler | 0 | 12763154 | <reponame>jacobepst/wordler
"""class that simulates a game of wordle (with a game and a solver)"""
from wordler.solver import Solver
from wordler.wordle_game import WordleGame
from logging import getLogger
class Simulator:
"""A Wordle simulator"""
def __init__(self, display: bool = False, strategy: str = None):
self.words = []
self.total_guesses = []
self.display = display
self.strategy = strategy
def run(self, games: int = 100):
"""Loop through the desired number of games and simulate"""
for game in range(games):
self.run_game()
def run_game(self):
"""Simulate a single game."""
logger = getLogger(__name__)
s = Solver(strategy=self.strategy)
w = WordleGame(display=self.display)
if self.display:
logger.info("-" * 45)
while not w.complete:
guess = s.guess()
if guess is None:
logger.warning("FAIL")
logger.warning(f"{w.true_word}, {s.green}, {s.orange}, {s.max_repeats}")
w.total_guesses = 10
breakpoint()
break
response = w.guess(guess)
s.process_response(response)
self.words.append(w.true_word)
self.total_guesses.append(w.total_guesses)
| 3.359375 | 3 |
bitcounting.py | boachiejude/kata | 0 | 12763155 | <gh_stars>0
"""
Write a function that takes an integer as input, and returns the number of bits that are equal to one in the binary representation of that number.
You can guarantee that input is non-negative.
Example: The binary representation of 1234 is 10011010010, so the function should return 5 in this case.
"""
def count_bits(n):
binaryversion = str(bin(n))
return str(binaryversion[2:]).count("1")
| 4 | 4 |
ApplicationPerformance/webautomation/webAutomation.py | hsy5332/Blog | 0 | 12763156 | # to do 发送邮件,以及需要增加用例的执行结果
import time
import ApplicationPerformance.sendReport as sendReport
import ApplicationPerformance.applicationperformance.launchTime as launchTime # MAC
# import ApplicationPerformance.applicationperformance.launchTime as launchTime # Windows
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class WebAutomation(object):
# 启动浏览
def startBrowser(self, browsername, testurl, *browserconfigure):
if "谷歌" in browsername:
driver = webdriver.Chrome()
return driver
elif "火狐" in browsername:
if browserconfigure[0] != "": # 判断是否有配置路径
driver = webdriver.Firefox(webdriver.FirefoxProfile(browserconfigure[0])) # 带着配置启动火狐浏览器(比如增加Xpth插件等。)
return driver
else:
driver = webdriver.Firefox()
return driver
else:
print("您的测试用例中,存在无法识别的浏览器名称,请检查用例。")
# 双击操作
def operateDoubleClick(self, operatetype, element, driver, caseid):
if operatetype == "双击_id":
try:
driver.find_element_by_id(element).double_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "双击_xpath":
try:
driver.find_element_by_xpath(element).double_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "双击_textname": # 点击textname
try:
driver.find_elements_by_name(element)[0].double_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "双击_classname":
try:
driver.find_elements_by_class_name(element)[0].double_click() # 点击xpath
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "双击_linkname":
try:
driver.find_elements_by_link_text(element)[0].double_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# 右点击击操作
def operateRightClick(self, operatetype, element, driver, caseid):
if operatetype == "右击_id":
try:
driver.find_element_by_id(element).context_click().perform()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "右击_xpath":
try:
driver.find_element_by_xpath(element).context_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "右击_textname": # 点击textname
try:
driver.find_elements_by_name(element)[0].context_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "右击_classname":
try:
driver.find_elements_by_class_name(element)[0].context_click() # 点击xpath
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "右击_linkname":
try:
driver.find_elements_by_link_text(element)[0].context_click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# 左点击击操作
def operateClick(self, operatetype, element, driver, caseid):
if operatetype == "点击_id":
try:
driver.find_element_by_id(element).click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "点击_xpath":
try:
driver.find_element_by_xpath(element).click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "点击_textname": # 点击textname
try:
driver.find_elements_by_name(element)[0].click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "点击_classname":
try:
driver.find_elements_by_class_name(element)[0].click() # 点击xpath
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "点击_linkname":
try:
driver.find_elements_by_link_text(element)[0].click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
# 扩展性 查找元素方法
elif operatetype == "点击_cssid":
try:
driver.find_element_by_css_selector("#%s" % (element)).click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "点击_cssname":
try:
driver.find_element_by_css_selector("a[name=\"%s\"]" % (element)).click()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# 检查元素是否存在
def operateCheckElement(self, operatetype, element, driver, caseid):
if operatetype == "查找_id":
try:
driver.find_element_by_id(element)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "查找_xpath":
try:
driver.find_element_by_xpath(element)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "查找_textname": # 查找textname
try:
driver.find_elements_by_name(element)[0]
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "查找_classname":
try:
driver.find_elements_by_class_name(element)[0]
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "查找_linkname":
try:
driver.find_elements_by_link_text(element)[0]
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "if包含_id":
try:
driver.find_element_by_id(element)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "if包含_xpath":
try:
driver.find_element_by_xpath(element)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "if包含_textname":
try:
driver.find_elements_by_name(element)[0]
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "if包含_classname":
try:
driver.find_elements_by_class_name(element)[0]
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "if包含_linkname":
try:
driver.find_elements_by_link_text(element)[0]
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# 清空输入框
def clearInput(self, operatetype, element, driver, caseid):
if operatetype == "清空输入框_id":
try:
driver.find_element_by_id(element).clear()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "清空输入框_xpath":
try:
driver.find_element_by_xpath(element).clear()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "清空输入框_textname":
try:
driver.find_elements_by_name(element)[0].clear()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# 输入操作
def operateInput(self, operatetype, element, driver, caseid, *parameter):
if operatetype == "输入_id":
try:
driver.find_element_by_id(element).send_keys(parameter[0])
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "输入_xpath":
try:
driver.find_element_by_xpath(element).send_keys(parameter[0])
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "输入_textname":
try:
driver.find_elements_by_name(element)[0].send_keys(parameter[0])
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
# 扩展性 查找元素方法
elif operatetype == "输入_cssid":
try:
driver.find_element_by_css_selector("#%s" % (element)).send_keys(parameter[0])
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "输入_cssname":
try:
driver.find_element_by_css_selector("a[name=\"%s\"]" % (element)).send_keys(parameter[0])
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# Android物理按键操作
def operatePhysicsKye(self, operatetype, element, driver, caseid):
if operatetype == "按enter_id":
try:
driver.find_element_by_id(element).send_keys(Keys.ENTER)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "按enter_xpath":
try:
driver.find_element_by_xpath(element).send_keys(Keys.ENTER)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "按enter_textname":
try:
driver.find_elements_by_name(element)[0].send_keys(Keys.ENTER)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "pagedown_id":
try:
driver.find_element_by_id(element).send_keys(Keys.PAGE_DOWN)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "pagedown_xpath":
try:
driver.find_element_by_xpath(element).send_keys(Keys.PAGE_DOWN)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "pagedown_textname":
try:
driver.find_elements_by_name(element)[0].send_keys(Keys.PAGE_DOWN)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "pageup_id":
try:
driver.find_element_by_id(element).send_keys(Keys.PAGE_UP)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "pageup_xpath":
try:
driver.find_element_by_xpath(element).send_keys(Keys.PAGE_UP)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "pageup_textname":
try:
driver.find_elements_by_name(element)[0].send_keys(Keys.PAGE_UP)
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "浏览器全屏":
try:
driver.maximize_window()
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
elif operatetype == "设置分辨率":
try:
windowslist = element.split(',')
driver.set_window_size(int(windowslist[0]), int(windowslist[1]))
casereport = "用例编号:%s,执行通过。" % (caseid)
return casereport
except:
casereport = "用例编号:%s,执行不通过。" % (caseid)
return casereport
else:
casereport = "用例编号:%s,执行不通过,该用例的元素属性或参数可能有问题,请检查该用例。" % (caseid)
return casereport
# 执行用例
def runCase(self):
deviceinfo = launchTime.ReadExcel().readeExcelData('browserinfo')
startautomationtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) # 开始自动化用例时间
for i in range(1, deviceinfo.get('caserows')):
devicesinfocase = deviceinfo.get('excledata_sheel').row_values(i)
browsername = devicesinfocase[0]
browserconfigure = devicesinfocase[1]
testurl = devicesinfocase[2]
browserstatus = devicesinfocase[3]
print(devicesinfocase)
eventid = time.strftime('%Y%m%d%H%M%S', time.localtime())
if "Y" in browserstatus:
driver = WebAutomation().startBrowser(browsername, browserconfigure)
time.sleep(5)
driver.get(testurl)
casedata = launchTime.ReadExcel().readeExcelData('browseefuncase') # 读取自动化用例数据
endcasenumber = []
casenumber = []
for j in range(1, casedata.get('caserows')): # Excel中的测试用例数据,使用for遍历每一行的数据,进行判断执行对应的操作
excelcasedata = casedata.get('excledata_sheel').row_values(
j)
operatetype = excelcasedata[1]
if "if" in operatetype:
casenumber.append(j)
if "end" in operatetype:
endcasenumber.append(j)
x = 1
ifnumber = 0
try:
casecount = casedata.get('caserows')-1 # 用例总数
while x <= casecount:
excelcasedata = casedata.get('excledata_sheel').row_values(x)
x = x + 1
try:
caseid = int(excelcasedata[0]) # 用例编号
except:
caseid = excelcasedata[0]
operatetype = excelcasedata[1] # 操作类型
element = excelcasedata[2] # 元素
parameter = str(excelcasedata[3]) # 参数 必须要转成字符串,要不然在使用send_keys(必须要是字符串类型)时无法使用
rundescribe = excelcasedata[6] # 步骤描述
caseexecute = excelcasedata[7] # 用例状态
driver.implicitly_wait(60)
startonecasetime = time.time()
if excelcasedata[5] == "": # 等待时间
waittime = 2
else:
waittime = int(excelcasedata[5])
if "Y" in caseexecute:
if operatetype == "等待时间":
time.sleep(waittime)
casereport = "用例编号:%s,执行通过。" % (caseid)
print(casereport)
elif operatetype == "点击_id":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "点击_xpath":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "点击_textname":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "点击_linkname":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "点击_classname":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "点击_cssid":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "点击_cssname":
print(WebAutomation().operateClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "输入_id":
print(WebAutomation().operateInput(operatetype, element, driver, caseid, parameter))
time.sleep(waittime)
elif operatetype == "输入_xpath":
print(WebAutomation().operateInput(operatetype, element, driver, caseid, parameter))
time.sleep(waittime)
elif operatetype == "输入_textname":
print(WebAutomation().operateInput(operatetype, element, driver, caseid, parameter))
time.sleep(waittime)
elif operatetype == "输入_cssid":
print(WebAutomation().operateInput(operatetype, element, driver, caseid, parameter))
time.sleep(waittime)
elif operatetype == "输入_cssname":
print(WebAutomation().operateInput(operatetype, element, driver, caseid, parameter))
time.sleep(waittime)
elif operatetype == "清空输入框_id":
print(WebAutomation().clearInput(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "清空输入框_xpath":
print(WebAutomation().clearInput(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "清空输入框_textname":
print(WebAutomation().clearInput(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "查找_id":
print(WebAutomation().operateCheckElement(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "查找_xpath":
print(WebAutomation().operateCheckElement(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "查找_textname":
print(WebAutomation().operateCheckElement(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "查找_linkname":
print(WebAutomation().operateCheckElement(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "end":
casereport = "用例编号:%s,执行通过。" % (caseid)
print(casereport)
elif "if" in operatetype:
if operatetype == "if包含_id":
casereport = WebAutomation().operateCheckElement(operatetype, element,
driver, caseid)
if "执行通过" in casereport:
print(casereport)
else:
print(casereport)
if len(endcasenumber) == len(casenumber):
x = endcasenumber[ifnumber]
else:
print("当前用例中的if和and不等,请检查用例")
x = endcasenumber[-1]
elif "if包含_xpath":
casereport = WebAutomation().operateCheckElement(operatetype, element,
driver, caseid)
if "执行通过" in casereport:
print(casereport)
else:
print(casereport)
if len(endcasenumber) == len(casenumber):
x = endcasenumber[ifnumber]
else:
print("当前用例中的if和and不等,请检查用例")
x = endcasenumber[-1]
elif "if包含_classname":
casereport = WebAutomation().operateCheckElement(operatetype, element,
driver, caseid)
if "执行通过" in casereport:
print(casereport)
else:
print(casereport)
if len(endcasenumber) == len(casenumber):
x = endcasenumber[ifnumber]
else:
print("当前用例中的if和and不等,请检查用例")
x = endcasenumber[-1]
elif "if包含_textname":
casereport = WebAutomation().operateCheckElement(operatetype, element,
driver, caseid)
if "执行通过" in casereport:
print(casereport)
else:
print(casereport)
if len(endcasenumber) == len(casenumber):
x = endcasenumber[ifnumber]
else:
print("当前用例中的if和and不等,请检查用例")
x = endcasenumber[-1]
elif "if包含_linkname":
casereport = WebAutomation().operateCheckElement(operatetype, element,
driver, caseid)
if "执行通过" in casereport:
print(casereport)
else:
print(casereport)
if len(endcasenumber) == len(casenumber):
x = endcasenumber[ifnumber]
else:
print("当前用例中的if和and不等,请检查用例")
x = endcasenumber[-1]
else:
casereport = "用例编号:%s操作类型错误,该用例不执行。" % (caseid)
print(casereport)
ifnumber = ifnumber + 1
elif operatetype == "查找_classname":
print(WebAutomation().operateCheckElement(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "右击_id":
print(WebAutomation().operateRightClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "右击_xpath":
print(WebAutomation().operateRightClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "右击_textname":
print(WebAutomation().operateRightClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "右击_linkname":
print(WebAutomation().operateRightClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "右击_classname":
print(WebAutomation().operateRightClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "双击_id":
print(WebAutomation().operateDoubleClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "双击_xpath":
print(WebAutomation().operateDoubleClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "双击_textname":
print(WebAutomation().operateDoubleClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "双击_linkname":
print(WebAutomation().operateDoubleClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "双击_classname":
print(WebAutomation().operateDoubleClick(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "按enter_id":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "按enter_xpath":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "按enter_textname":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "pagedown_id":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "pagedown_xpath":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "pagedown_textname":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "pageup_id":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "pageup_xpath":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "pageup_textname":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "浏览器全屏":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
elif operatetype == "设置分辨率":
print(WebAutomation().operatePhysicsKye(operatetype, element, driver, caseid))
time.sleep(waittime)
else:
casereport = "用例编号:%s操作类型错误,该用例不执行。" % (caseid)
print(casereport)
else:
casereport = "用例编号:%s,执行状态为No,故不执行。" % (caseid)
print(casereport)
endonecasetime = time.time()
runonecasetime = round(endonecasetime - startonecasetime, 2)
savedata = "insert into automationquery_automation_function_web (`browsername`,`browserconfigure`,`browserstatus`,`operatetype`,`element`,`parameter`,`waittime`,`rundescribe`,`caseexecute`,`runcasetime`,`caseid`,`eventid`,`casereport`,`createdtime`,`updatetime`)VALUES('%s','%s','%s','%s',\'''%s\''','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (
browsername, browserconfigure, browserstatus, operatetype, element, parameter, waittime,
rundescribe,
caseexecute,
runonecasetime, caseid, eventid, casereport,
time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
try:
launchTime.MysqlConnect().saveDatatoMysql("%s" % (savedata))
time.sleep(1)
except:
print("数据库连接失败,保存数据失败。")
except:
driver.close()
driver.quit()
driver.close()
driver.quit()
else:
print("浏览%s,状态为不执行,故该浏览器上不运行用例。" % (devicesinfocase[0]))
savedata = "insert into automationquery_automation_function_web (`browsername`,`browserconfigure`,`browserstatus`,`operatetype`,`element`,`parameter`,`waittime`,`rundescribe`,`caseexecute`,`runcasetime`,`caseid`,`eventid`,`casereport`,`createdtime`,`updatetime`)VALUES('%s','%s','%s','%s',\'''%s\''','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (
browsername, browserconfigure, browserstatus, "", "", "", "",
"",
"",
"", "", eventid, casereport,
time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
try:
launchTime.MysqlConnect().saveDatatoMysql("%s" % (savedata))
time.sleep(1)
except:
print("数据库连接失败,保存数据失败。")
tomail = "<EMAIL>,<EMAIL>"
ccemail = "<EMAIL>"
print(sendReport.SendReport().senderEmail(tomail, ccemail, startautomationtime, casecount))
if __name__ == "__main__":
WebAutomation().runCase()
| 2.796875 | 3 |
iast/views/agent_search.py | luzhongyang/DongTai-webapi | 6 | 12763157 | <gh_stars>1-10
from functools import reduce
from django.core.paginator import Paginator
from django.db.models import Q
from dongtai.endpoint import R, AnonymousAndUserEndPoint
from dongtai.models.agent import IastAgent
from dongtai.models.heartbeat import IastHeartbeat
from dongtai.models.server import IastServer
from rest_framework import serializers
from django.utils.translation import gettext_lazy as _
from iast.utils import extend_schema_with_envcheck, get_response_serializer
from django.forms.models import model_to_dict
from iast.utils import get_model_field
class _AgentSearchQuerysSerializer(serializers.Serializer):
page_size = serializers.IntegerField(default=20,
help_text=_('Number per page'))
page = serializers.IntegerField(default=1, help_text=_('Page index'))
token = serializers.CharField(help_text=_('The name of agent'))
project_name = serializers.CharField(help_text=_(
"Project name, used to start the agent first and then create the project"
))
_ResponseSerializer = get_response_serializer(
status_msg_keypair=(((201, _('Suspending ...')), ''), ))
class AgentSearch(AnonymousAndUserEndPoint):
@extend_schema_with_envcheck(
[_AgentSearchQuerysSerializer],
tags=[_('Agent')],
summary=_('Agent Search'),
description=_(
"Search for the agent corresponding to the user according to the following parameters"
),
response_schema=_ResponseSerializer,
)
def get(self, request):
page_size = int(request.query_params.get('page_size', 10))
page = int(request.query_params.get('page', 1))
fields = get_model_field(
IastAgent,
include=['token', 'project_name'],
)
searchfields = dict(
filter(lambda k: k[0] in fields, request.query_params.items()))
searchfields_ = {k: v for k, v in searchfields.items() if k in fields}
q = reduce(
lambda x, y: x | y,
map(
lambda x: Q(**x),
map(
lambda kv_pair:
{'__'.join([kv_pair[0], 'icontains']): kv_pair[1]},
searchfields_.items())), Q())
agents = self.get_auth_and_anonymous_agents(request.user)
q = q & Q(id__in=[_['id'] for _ in agents])
queryset = IastAgent.objects.filter(q).order_by('-latest_time').all()
summary, agents = self.get_paginator(queryset, page, page_size)
servers = IastServer.objects.filter(pk__in=[_['server_id'] for _ in agents]).all().values()
heartbeats = IastHeartbeat.objects.filter(agent_id__in=[_['id'] for _ in agents]).all().values()
servers = {_['id']: _ for _ in servers}
heartbeats = {_['agent_id']: _ for _ in heartbeats}
relations = []
for agent in agents:
item = {}
item['agent_id'] = agent['id']
server = servers.get(agent['server_id'], None)
if server:
for k, v in server.items():
item['_'.join(['server', k])] = v
heartbeat = heartbeats.get(agent['id'], None)
if heartbeat:
for k, v in heartbeat.items():
item['_'.join(['heartbeat', k])] = v
relations.append(item)
return R.success(
data={
'agents': [model_to_dict(agent)for agent in agents],
'summary': summary,
'relations': relations,
})
| 1.96875 | 2 |
mangle-infra-agent/Faults/NetworkFaults.py | vmaligireddy/mangle | 151 | 12763158 | from enum import Enum
class NetworkFaults(Enum):
NETWORK_DELAY_MILLISECONDS = 1
PACKET_DUPLICATE_PERCENTAGE = 2
PACKET_CORRUPT_PERCENTAGE = 3
PACKET_LOSS_PERCENTAGE = 4 | 2.203125 | 2 |
scripts/dataloader.py | haohao11/AMENet | 4 | 12763159 | # -*- coding: utf-8 -*-
"""
Created on Fri Apr 24 13:50:54 2020
This is the load to load data based on occupancy maps
@author: cheng
"""
import numpy as np
import time
import os
from augmentation import rotation
from maps import Maps
from occupancy import circle_group_grid
def loaddata(dataset_list, args, datatype="train"):
# Store the data across datasets
# All the datasets are merged for training
if datatype=="train" or datatype=="test":
offsets = np.empty((0, args.obs_seq+args.pred_seq-1, 8))
traj_data = np.empty((0, args.obs_seq+args.pred_seq, 4))
occupancy = np.empty((0, args.obs_seq+args.pred_seq-1, args.enviro_pdim[0], args.enviro_pdim[1], 3))
if dataset_list[0] == "train_merged":
# ToDo change this to make compatible with linus
data = np.load("../processed_data/train/%s.npz"%(dataset_list[0]))
_offsets, _traj_data, _occupancy = data["offsets"], data["traj_data"], data["occupancy"]
print(dataset_list[0], "contains %.0f trajectories"%len(_offsets))
offsets = np.concatenate((offsets, _offsets), axis=0)
traj_data = np.concatenate((traj_data, _traj_data), axis=0)
occupancy = np.concatenate((occupancy, _occupancy), axis=0)
else:
for i, dataset in enumerate(dataset_list):
# Only take the orinal data
# ToDo, here needs to be test if augumentation will boost the performance
if dataset != "train_merged":
# ToDo change this to make compatible with linus
data = np.load("../processed_data/train/%s.npz"%(dataset))
_offsets, _traj_data, _occupancy = data["offsets"], data["traj_data"], data["occupancy"]
print(dataset, "contains %.0f trajectories"%len(_offsets))
offsets = np.concatenate((offsets, _offsets), axis=0)
traj_data = np.concatenate((traj_data, _traj_data), axis=0)
occupancy = np.concatenate((occupancy, _occupancy), axis=0)
# NOTE: When load the challenge data, there is no need to merge them
# The submission requires each challenge data set (in total 20) to be separated
# Hence, each time only one challenge data set is called
elif datatype == "challenge":
offsets = np.empty((0, args.obs_seq-1, 8))
traj_data = np.empty((0, args.obs_seq, 4))
occupancy = np.empty((0, args.obs_seq-1, args.enviro_pdim[0], args.enviro_pdim[1], 3))
for dataset in dataset_list:
# ToDo change this to make compatible with linus
data = np.load("../processed_data/challenge/%s.npz"%(dataset))
_offsets, _traj_data, _occupancy = data["offsets"], data["traj_data"], data["occupancy"]
offsets = np.concatenate((offsets, _offsets), axis=0)
traj_data = np.concatenate((traj_data, _traj_data), axis=0)
occupancy = np.concatenate((occupancy, _occupancy), axis=0)
elif datatype=="test":
assert len(dataset_list)==1, print("Only one untouched dataset is left fot testing!")
elif datatype=="challenge":
assert len(dataset_list)==1, print("predict one by one")
if datatype=="train":
# ToDo change this to make compatible with linus
if not os.path.exists("../processed_data/train/train_merged.npz"):
# Save the merged training data
# ToDo change this to make compatible with linus
np.savez("../processed_data/train/train_merged.npz",
offsets=offsets,
traj_data = traj_data,
occupancy = occupancy)
return offsets, traj_data, occupancy
def preprocess_data(seq_length, size, dirname, path=None, data=None, aug_num=1, save=True):
'''
Parameters
----------
seq_length : int
This is the complete length of each trajectory offset and occupancy,
Note: one-step difference for the offset and occupancy and traj_data.
size : [height, width, channels]
The occupancy grid size and channels:
orientation, speed and position for the neighbors in the vicinity
dirname : string
"train" or "challenge"
path : string, optional
only for extract offsets, traj_data, and occupancy from the original data files
data : numpy, optional
it is the predicted complete trajectories after the first prediction,
it is used to calculate the occupancy in the predicted time.
aug_num : int, optional
the number for augmenting the data by rotation.
save : boolen, optional
Only save the processed training data. The default is True.
Returns
-------
offsets : numpy array
[frameId, userId, x, y, delta_x, delta_y, theata, velocity].
traj_data : numpy array
[frameId, userId, x, y]
Note: this is one-step longer
occupancy : numpy array
[height, width, channels].
'''
start = time.time()
if np.all(data)==None:
data = np.genfromtxt(path, delimiter='')
# challenge dataset have nan for prediction time steps
data = data[~np.isnan(data).any(axis=1)]
dataname = path.split('\\')[-1].split('.')[0]
print("process data %s ..."%dataname)
for r in range(aug_num):
# Agument the data by orientating if the agumentation number if more than one
if r > 0:
data[:, 2:4] = rotation(data[:, 2:4], r/aug_num)
# Get the environment maps
maps = Maps(data)
traj_map = maps.trajectory_map()
orient_map, speed_map = maps.motion_map(max_speed=10)
map_info = [traj_map, orient_map, speed_map]
enviro_maps = concat_maps(map_info)
print("enviro_maps shape", enviro_maps.shape)
offsets = np.reshape(maps.offsets, (-1,seq_length,8))
print("offsets shape", offsets.shape)
traj_data = np.reshape(maps.sorted_data, (-1, seq_length+1, 4))
print("traj_data shape", traj_data.shape)
occupancy = circle_group_grid(offsets, maps.sorted_data, size)
print("occupancy shape", occupancy.shape)
if save:
if r == 0:
# Save the original one
np.savez("../processed_data/%s/%s"%(dirname, dataname),
offsets=offsets,
traj_data = traj_data,
occupancy = occupancy)
end = time.time()
else:
# Save the rotated one(s)
np.savez("../processed_data/%s/%s_%.0f"%(dirname, dataname, r),
offsets=offsets,
traj_data = traj_data,
occupancy = occupancy)
end = time.time()
print("It takes ", round(end-start, 2), "seconds!\n")
else:
return offsets, traj_data, occupancy
def concat_maps(map_info):
# save the map information into different channels
enviro_maps = np.empty((map_info[0].shape[0], map_info[0].shape[1], len(map_info)))
for i, map in enumerate(map_info):
enviro_maps[:, :, i] = map
return enviro_maps
| 2.46875 | 2 |
erica/application/FreischaltCode/FreischaltCodeRevocationService.py | punknoir101/erica-1 | 0 | 12763160 | <reponame>punknoir101/erica-1
import datetime
from abc import abstractmethod, ABCMeta
from uuid import uuid4
from opyoid import Injector, Module
from rq import Retry
from erica.application.EricRequestProcessing.erica_input.v1.erica_input import UnlockCodeRevocationData
from erica.application.EricRequestProcessing.requests_controller import UnlockCodeRevocationRequestController
from erica.application.EricaAuftrag.EricaAuftrag import EricaAuftragDto
from erica.application.FreischaltCode.FreischaltCode import FreischaltCodeRevocateDto
from erica.application.FreischaltCode.Jobs.jobs import request_freischalt_code
from erica.domain.BackgroundJobs.BackgroundJobInterface import BackgroundJobInterface
from erica.domain.EricaAuftrag.EricaAuftrag import EricaAuftrag
from erica.domain.FreischaltCode.FreischaltCode import FreischaltCodeRevocatePayload
from erica.domain.Shared.EricaAuftrag import AuftragType
from erica.infrastructure.InfrastructureModule import InfrastructureModule
from erica.infrastructure.rq.RqModule import RqModule
from erica.infrastructure.sqlalchemy.repositories.EricaAuftragRepository import EricaAuftragRepository
injector = Injector([InfrastructureModule(), RqModule()])
class FreischaltCodeRevocationServiceInterface:
__metaclass__ = ABCMeta
@abstractmethod
def freischalt_code_bei_elster_deaktivieren_queued(self,
freischaltcode_dto: FreischaltCodeRevocateDto) -> EricaAuftragDto:
pass
@abstractmethod
def freischalt_code_bei_elster_deaktivieren(self, freischaltcode_dto: FreischaltCodeRevocateDto,
include_elster_responses: bool):
pass
class FreischaltCodeRevocationService(FreischaltCodeRevocationServiceInterface):
freischaltcode_repository: EricaAuftragRepository
def __init__(self, repository: EricaAuftragRepository = injector.inject(EricaAuftragRepository)) -> None:
super().__init__()
self.freischaltcode_repository = repository
async def freischalt_code_bei_elster_deaktivieren_queued(self,
freischaltcode_dto: FreischaltCodeRevocateDto) -> EricaAuftragDto:
job_id = uuid4()
freischaltcode = EricaAuftrag(job_id=job_id,
payload=FreischaltCodeRevocatePayload.parse_obj(freischaltcode_dto),
created_at=datetime.datetime.now().__str__(),
updated_at=datetime.datetime.now().__str__(),
creator_id="api",
type=AuftragType.freischalt_code_revocate
)
created = self.freischaltcode_repository.create(freischaltcode)
background_worker = injector.inject(BackgroundJobInterface)
background_worker.enqueue(request_freischalt_code,
created.id,
retry=Retry(max=3, interval=1),
job_id=job_id.__str__()
)
return EricaAuftragDto.parse_obj(created)
async def freischalt_code_bei_elster_deaktivieren(self, freischaltcode_dto: FreischaltCodeRevocateDto,
include_elster_responses: bool = False):
request = UnlockCodeRevocationRequestController(UnlockCodeRevocationData.parse_obj(
{"idnr": freischaltcode_dto.tax_ident, "elster_request_id": freischaltcode_dto.elster_request_id}),
include_elster_responses)
return request.process()
class FreischaltCodeRevocationServiceModule(Module):
def configure(self) -> None:
self.bind(FreischaltCodeRevocationServiceInterface, to_class=FreischaltCodeRevocationService)
| 1.78125 | 2 |
ample/util/tests/test_tm_util.py | fsimkovic/ample | 6 | 12763161 | """Test functions for util.tm_util"""
import unittest
from ample.testing import test_funcs
from ample.util import ample_util, tm_util
@unittest.skipUnless(test_funcs.found_exe("TMscore" + ample_util.EXE_EXT), "TMscore exec missing")
class TestTM(unittest.TestCase):
def test_gaps_1(self):
gaps = tm_util.TMscore("TMscore", wdir=".")._find_gaps("AAAA---AA--AA")
ref_gaps = [False, False, False, False, True, True, True, False, False, True, True, False, False]
self.assertEqual(ref_gaps, gaps)
def test_gaps_2(self):
gaps = tm_util.TMscore("TMscore", wdir=".")._find_gaps("---AA--AA")
ref_gaps = [True, True, True, False, False, True, True, False, False]
self.assertEqual(ref_gaps, gaps)
def test_gaps_3(self):
gaps = tm_util.TMscore("TMscore", wdir=".")._find_gaps("-AAA--")
ref_gaps = [True, False, False, False, True, True]
self.assertEqual(ref_gaps, gaps)
if __name__ == "__main__":
unittest.main()
| 2.25 | 2 |
classification/model.py | rechardchen123/Classification-and-regression-of-vessel-AIS-data | 7 | 12763162 | # Copyright 2017 Google Inc. and Skytruth Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
from collections import namedtuple
import logging
import numpy as np
import tensorflow as tf
import tensorflow.contrib.slim as slim
import tensorflow.contrib.metrics as metrics
import utility
class ModelBase(object):
__metaclass__ = abc.ABCMeta
@property
def number_of_steps(self):
"""Number of training examples to use"""
return 500000
@property
def use_ranges_for_training(self):
"""Choose features overlapping with provided ranges during training"""
return False
@property
def batch_size(self):
return 64
@property
def max_window_duration_seconds(self):
""" Window max duration in seconds. A value of zero indicates that
we would instead like to choose a fixed-length window. """
return None
# We often allocate a much smaller buffer than would fit the specified time
# sampled at 5 mins intervals, on the basis that the sample is almost
# always much more sparse.
@property
def window_max_points(self):
return None
@property
def min_viable_timeslice_length(self):
return 500
@property
def max_replication_factor(self):
return 100.0
def __init__(self, num_feature_dimensions, vessel_metadata):
self.num_feature_dimensions = num_feature_dimensions
if vessel_metadata:
self.vessel_metadata = vessel_metadata
self.fishing_ranges_map = vessel_metadata.fishing_ranges_map
else:
self.vessel_metadata = None
self.fishing_ranges_map = None
self.training_objectives = None
def build_training_file_list(self, base_feature_path, split):
boundary = 1 if (split == utility.TRAINING_SPLIT) else self.batch_size
random_state = np.random.RandomState()
training_mmsis = self.vessel_metadata.weighted_training_list(
random_state,
split,
self.max_replication_factor,
boundary=boundary)
return [
'%s/%s.tfrecord' % (base_feature_path, mmsi)
for mmsi in training_mmsis
]
@staticmethod
def read_metadata(all_available_mmsis,
metadata_file,
fishing_ranges,
fishing_upweight=1.0):
return utility.read_vessel_multiclass_metadata(
all_available_mmsis, metadata_file, fishing_ranges,
fishing_upweight)
@abc.abstractmethod
def build_training_net(self, features, timestamps, mmsis):
"""Build net suitable for training model
Args:
features : features to feed into net
timestamps: a list of timestamps, one for each feature point.
mmsis: a list of mmsis, one for each batch element.
Returns:
TrainNetInfo
"""
optimizer = trainers = None
return optimizer, trainers
@abc.abstractmethod
def build_inference_net(self, features, timestamps, mmsis):
"""Build net suitable for running inference on model
Args:
features : features to feed into net
timestamps: a list of timestamps, one for each feature point.
mmsis: a list of mmsis, one for each batch element.
Returns:
A list of objects derived from EvaluationBase providing
functionality to log evaluation statistics as well as to
return the results of inference as JSON.
"""
return []
| 2.03125 | 2 |
custom_components/populartimes/sensor.py | disrupted/hass-populartimes | 0 | 12763163 | """Support for Google Places API."""
from datetime import timedelta
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_API_KEY, CONF_ID, CONF_NAME
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
import logging
import populartimes
import voluptuous as vol
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_ID): cv.string,
vol.Required(CONF_NAME): cv.string,
}
)
SCAN_INTERVAL = timedelta(minutes=10)
def setup_platform(hass, config, add_entities, discovery_info=None):
api_key = config["api_key"]
id = config["id"]
name = config["name"]
add_entities([PopularTimesSensor(api_key, id, name)], True)
class PopularTimesSensor(Entity):
def __init__(self, api_key, id, name):
self._api_key = api_key
self._id = id
self._name = name
self._state = None
self._attributes = {
"maps_name": None,
"address": None,
"popularity_monday": None,
"popularity_tuesday": None,
"popularity_wednesday": None,
"popularity_thursday": None,
"popularity_friday": None,
"popularity_saturday": None,
"popularity_sunday": None,
}
@property
def name(self):
return self._name
@property
def state(self):
return self._state
@property
def unit_of_measurement(self):
return "%"
@property
def state_attributes(self):
return self._attributes
def update(self):
"""Get the latest data from Google Places API."""
try:
result = populartimes.get_id(self._api_key, self._id)
self._attributes["address"] = result["address"]
self._attributes["maps_name"] = result["name"]
self._attributes["popularity_monday"] = result["populartimes"][0]["data"]
self._attributes["popularity_tuesday"] = result["populartimes"][1]["data"]
self._attributes["popularity_wednesday"] = result["populartimes"][2]["data"]
self._attributes["popularity_thursday"] = result["populartimes"][3]["data"]
self._attributes["popularity_friday"] = result["populartimes"][4]["data"]
self._attributes["popularity_saturday"] = result["populartimes"][5]["data"]
self._attributes["popularity_sunday"] = result["populartimes"][6]["data"]
popularity = result.get("current_popularity", 0)
self._state = popularity
except:
_LOGGER.error("No popularity info returned by the populartimes library.")
| 2.46875 | 2 |
models/detectron2_detector.py | allenai/interactron | 4 | 12763164 | <filename>models/detectron2_detector.py
import torch
import torch.nn as nn
import torch.nn.functional as F
import detectron2
from detectron2 import model_zoo
from detectron2.engine import DefaultPredictor
from detectron2.config import get_cfg
import detectron2.data.transforms as T
from detectron2.structures import Boxes, Instances
import torchvision
from detectron2.utils.visualizer import Visualizer
from detectron2.data import MetadataCatalog, DatasetCatalog
from utils.detection_utils import Prediction
class Detectron2Detector(nn.Module):
def __init__(
self,
config,
model_config="COCO-Detection/faster_rcnn_R_50_DC5_1x.yaml",
):
super().__init__()
cfg = get_cfg()
# add project-specific config (e.g., TensorMask) here if you're not running a model in detectron2's core library
cfg.merge_from_file(model_zoo.get_config_file(model_config))
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.0 # set threshold for this model
# Find a model from detectron2's model zoo. You can use the https://dl.fbaipublicfiles... url as well
cfg.MODEL.DEVICE = 'cpu'
# Load custom model
cfg.MODEL.ROI_HEADS.NUM_CLASSES = 1235
cfg.MODEL.WEIGHTS = config.DETECTOR.WEIGHTS
# Extract model from detectron2 predictor
cfg.INPUT.MIN_SIZE_TEST = config.TEST_RESOLUTION
self.aug = T.ResizeShortestEdge(
[cfg.INPUT.MIN_SIZE_TEST, cfg.INPUT.MIN_SIZE_TEST], cfg.INPUT.MAX_SIZE_TEST
)
model = DefaultPredictor(cfg).model
self.model = model
self.resolution = config.TEST_RESOLUTION
self.logger = None
self.mode = 'train'
def forward(self, x):
with torch.no_grad():
batched_inputs = self.preprocess_image(x.get_images(flat=True))
images = self.model.preprocess_image(batched_inputs)
features = self.backbone_forward(images.tensor)
proposals, _ = self.model.proposal_generator(images, features, None)
predictions = Prediction(x.batch_size, x.seq_len, x.device, logger=self.logger, mode=self.mode)
predictions.set_image_features(features['res5'], flat=True)
predictions = self.roi_heads_forward(features, proposals, predictions)
return predictions
def backbone_forward(self, x):
# alternative backbone forward function that works with DataParallel
outputs = {}
x = self.model.backbone.stem(x)
x = self.model.backbone.res2(x)
x = self.model.backbone.res3(x)
x = self.model.backbone.res4(x)
x = self.model.backbone.res5(x)
outputs['res5'] = x
return outputs
def roi_heads_forward(self, features, proposals, predictions):
pruned_proposals = self.prune_proposals(proposals)
features = [features[f] for f in self.model.roi_heads.box_in_features]
box_features = self.model.roi_heads.box_pooler(features, [x.proposal_boxes for x in pruned_proposals])
box_features = self.model.roi_heads.box_head(box_features)
logits, boxes = self.model.roi_heads.box_predictor(box_features)
# select only the box of the chosen categories
box_class_indices = logits[:, :-1].argmax(dim=-1)
boxes = boxes.view(boxes.shape[0], -1, 4)
boxes = boxes[torch.arange(boxes.shape[0]), box_class_indices]
# reshape outputs
b = len(pruned_proposals)
n = logits.shape[0] // b
logits = logits.view(b, n, -1)
box_features = box_features.view(b, n, -1)
boxes = boxes.view(b, n, -1)
# combine box anchors and box offsets
for n in range(len(pruned_proposals)):
boxes[n] += pruned_proposals[n].proposal_boxes.tensor
predictions.set_logits(logits, flat=True)
predictions.set_boxes(boxes, flat=True)
predictions.set_box_features(box_features, flat=True)
return predictions
def preprocess_image(self, image):
height, width = image.shape[1:3]
image = F.interpolate(image.permute(0, 3, 1, 2).float(), size=self.resolution, mode='bilinear')
img = [{"image": x, "height": height, "width": width} for x in image]
return img
def prune_predictions(self, logits, boxes, box_features, backbone_boxes, k=50):
pruned_logits = torch.zeros(logits.shape[0], k, logits.shape[2], device=logits.device)
pruned_logits[:, :, -1] = 1.0
pruned_boxes = torch.zeros(boxes.shape[0], k, boxes.shape[2], device=boxes.device)
pruned_backbone_boxes = torch.zeros_like(pruned_boxes)
pruned_box_features = torch.zeros(box_features.shape[0], k, box_features.shape[2], device=box_features.device)
for n in range(logits.shape[0]):
cats = logits[n, :, :-1].argmax(dim=-1)
scores, _ = torch.max(F.softmax(logits[n], dim=-1)[:, :-1], dim=-1)
pruned_indexes = torchvision.ops.batched_nms(boxes[n], scores, cats, iou_threshold=0.5)[:k]
t = pruned_indexes.shape[0]
pruned_logits[n][:t] = logits[n][pruned_indexes]
pruned_boxes[n][:t] = boxes[n][pruned_indexes]
pruned_box_features[n][:t] = box_features[n][pruned_indexes]
pruned_backbone_boxes[n][:t] = backbone_boxes[n][pruned_indexes]
return pruned_logits, pruned_boxes, pruned_box_features, pruned_backbone_boxes
def prune_proposals(self, proposals, k=1000):
pruned_proposals = []
for n in range(len(proposals)):
padded_logits = torch.zeros(k, device=proposals[n].objectness_logits.device)
padded_logits[:proposals[n].objectness_logits.shape[0]] = proposals[n].objectness_logits
padded_boxes = torch.zeros(k, 4, device=proposals[n].objectness_logits.device)
padded_boxes[:proposals[n].proposal_boxes.tensor.shape[0]] = proposals[n].proposal_boxes.tensor
padded_boxes = Boxes(padded_boxes)
pruned_proposals.append(Instances(
image_size=proposals[n].image_size,
objectness_logits=padded_logits,
proposal_boxes=padded_boxes
))
return pruned_proposals
def eval(self):
return self.train(False)
def train(self, mode=True):
self.mode = 'train' if mode else 'test'
# only train proposal generator of detector
self.model.backbone.eval()
self.model.proposal_generator.eval()
# self.model.backbone.train(mode)
# self.model.proposal_generator.train(mode)
self.model.roi_heads.train(mode)
# self.model.roi_heads.eval()
return self
def get_optimizer_groups(self, train_config):
optim_groups = [{
"params": list(self.model.roi_heads.parameters()), "weight_decay": train_config.WEIGHT_DECAY
}]
# optim_groups = []
return optim_groups
def set_logger(self, logger):
assert self.logger is None, "This model already has a logger!"
self.logger = logger
| 2.359375 | 2 |
sparsereg/util/pipeline.py | Thomasillo/sparsereg | 0 | 12763165 | <reponame>Thomasillo/sparsereg
from sklearn.base import BaseEstimator, TransformerMixin
class ColumnSelector(TransformerMixin, BaseEstimator):
def __init__(self, index=slice(None)):
self.index = index
self.n_features = None
def fit(self, x, y=None):
if len(x.shape) == 2:
_, self.n_features = x.shape
else:
self.n_features = x.shape[0]
return self
def transform(self, x, y=None):
xnew = x[..., self.index]
if len(xnew.shape) == 2:
return xnew
else:
return xnew.reshape(-1, 1)
def get_feature_names(self, input_features=None):
input_features = input_features or ["x_{}".format(i) for i in range(self.n_features)]
if self.index == slice(None):
return input_features
else:
return [n for i, n in zip(index, input_features) if i]
| 2.578125 | 3 |
tests/generators/transition/main.py | jacobkaufmann/consensus-specs | 2,161 | 12763166 | from typing import Iterable
from eth2spec.test.helpers.constants import ALTAIR, MINIMAL, MAINNET, PHASE0
from eth2spec.test.altair.transition import (
test_transition as test_altair_transition,
test_activations_and_exits as test_altair_activations_and_exits,
test_leaking as test_altair_leaking,
test_slashing as test_altair_slashing,
test_operations as test_altair_operations,
)
from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing
from eth2spec.gen_helpers.gen_from_tests.gen import generate_from_tests
def create_provider(tests_src, preset_name: str, pre_fork_name: str, post_fork_name: str) -> gen_typing.TestProvider:
def prepare_fn() -> None:
return
def cases_fn() -> Iterable[gen_typing.TestCase]:
return generate_from_tests(
runner_name='transition',
handler_name='core',
src=tests_src,
fork_name=post_fork_name,
phase=pre_fork_name,
preset_name=preset_name,
)
return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn)
TRANSITION_TESTS = (
(PHASE0, ALTAIR, test_altair_transition),
(PHASE0, ALTAIR, test_altair_activations_and_exits),
(PHASE0, ALTAIR, test_altair_leaking),
(PHASE0, ALTAIR, test_altair_slashing),
(PHASE0, ALTAIR, test_altair_operations),
)
if __name__ == "__main__":
for pre_fork, post_fork, transition_test_module in TRANSITION_TESTS:
gen_runner.run_generator("transition", [
create_provider(transition_test_module, MINIMAL, pre_fork, post_fork),
create_provider(transition_test_module, MAINNET, pre_fork, post_fork),
])
| 1.90625 | 2 |
ws_lcd/layout_222.py | hnikolov/ws_lcd | 0 | 12763167 | # -*- coding: utf-8 -*-
import time
from layout import Layout
from component import *
class Layout_222(Layout):
def __init__(self):
super(Layout_222, self).__init__(color = "black")
self.ch1 = 18 # component height 1
self.ch2 = 26 # component height 2
self.sh1 = 2 # separator height 1
self.bar = 25
# Offsets
self.row_1_y = self.ch1
self.sep_2_y = self.row_1_y + self.ch2
self.row_2_y = self.sep_2_y + self.sh1
self.sep_3_y = self.row_2_y + self.ch2
self.row_3_y = self.sep_3_y + self.sh1
self.sep_4_y = self.row_3_y + self.ch2
self.row_4_y = self.sep_4_y + self.sh1
# Build the layout
self.cdate = Component(72, self.ch1, font_size=14, bg_color=0, align=1)
self.cdate.set_position(0, 0)
self.cdate.set(time.strftime('%d-%b'))
self.ctime = Component(56, self.ch1, font_size=14, bg_color=0, align=1)
self.ctime.set_position(72, 0)
self.ctime.set(time.strftime('%H:%M'))
# self.ctime.draw_borders()
self.wi = Component(self.ch2, self.ch2, font_size=20, image='tap-water1.jpg')
self.wi.set_position(4, self.row_1_y)
# self.wi.draw_borders()
self.wv = Component(68, self.ch2, font_size=18)
self.wv.set_position(30, self.row_1_y)
self.wu = Component(self.ch2, self.ch2, font_size=16)
self.wu.set_position(98, self.row_1_y)
self.wu.set_text("Lit", 0, align=0)
self.gi = Component(self.ch2, self.ch2, font_size=20, image='gas_32x32.png')
# self.gi = Component(self.ch2, self.ch2, font_size=16)
# self.gi.set_text("1h", align=1)
self.gi.set_position(4, self.row_2_y)
self.gv = Component(68, self.ch2, font_size=18, format_string="{0:.2f}")
self.gv.set_position(30, self.row_2_y)
self.gu = Component(self.ch2, self.ch2, font_size=16)
self.gu.set_position(98, self.row_2_y)
self.gu.set_text("m" + u'\u00B3', 0, align=0)
self.ei = Component(self.ch2, self.ch2, font_size=20, image='plug1.png')
self.ei.set_position(4, self.row_3_y)
self.ev = Component(68, self.ch2, font_size=18, format_string="{0:.3f}")
self.ev.set_position(30, self.row_3_y)
self.eu = Component(self.ch2, self.ch2, font_size=16)
self.eu.set_position(98, self.row_3_y)
self.eu.set_text("kW", 0, align=0)
self.egraph = BarGraph(128, self.bar, bg_color=0)
self.egraph.set_position(2, self.row_4_y)
self.egraph.update()
# --------------------------------------------------
# Add components to the layout
self.add([self.cdate, self.ctime])
self.add([self.wi, self.wv, self.wu])
self.add([self.gi, self.gv, self.gu])
self.add([self.ei, self.ev, self.eu])
self.add([self.egraph])
self.clear_all()
def clear_all(self):
self.wv.set(0)
self.gv.set(0.0)
self.ev.set(0.0)
self.egraph.clear_bars()
def set_date_time(self):
# tdate = time.strftime('%d-%b-%y')
self.cdate.set(time.strftime('%d-%b'))
self.ctime.set(time.strftime('%H:%M'))
if __name__ == '__main__':
from lcd import LCD
# Display Layout instance
L2 = Layout_222()
# Random values for test
L2.wv.set(890)
L2.gv.set(2.64)
L2.ev.set(0.0)
# LCD instance
lcd = LCD(False)
lcd.draw(L2)
for i in range(18):
L2.egraph.set_bar(i, i+1)
L2.set_date_time()
lcd.update(L2)
L2.egraph.set_bar(23,12.0)
for i in range(5):
L2.wv.add(1)
L2.gv.add(0.01)
L2.ev.add(0.001)
L2.set_date_time()
L2.egraph.set_bar(18+i, 12 - (4 + i))
lcd.update(L2)
raw_input()
L2.clear_all()
lcd.draw(L2)
idx = 0
for j in range(4):
for i in range(6):
L2.wv.add(1)
L2.gv.add(0.01)
L2.set_date_time()
L2.egraph.set_bar(idx, float(2.11*(i+1)))
# print float(2.11*(i+1))
lcd.update(L2)
idx += 1
raw_input()
lcd.close()
| 2.828125 | 3 |
mlflow/entities/model_registry/__init__.py | PeterSulcs/mlflow | 10,351 | 12763168 | from mlflow.entities.model_registry.registered_model import RegisteredModel
from mlflow.entities.model_registry.model_version import ModelVersion
from mlflow.entities.model_registry.registered_model_tag import RegisteredModelTag
from mlflow.entities.model_registry.model_version_tag import ModelVersionTag
__all__ = [
"RegisteredModel",
"ModelVersion",
"RegisteredModelTag",
"ModelVersionTag",
]
| 1.242188 | 1 |
utils/pascal.py | Vious/LPG_BBox_Segmentation | 14 | 12763169 | import xml.etree.ElementTree as ET
def parse_xml(anno_path):
CLASSES = ('background',
'aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car',
'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike',
'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor')
index_map = dict(zip(CLASSES, range(len(CLASSES))))
tree = ET.parse(anno_path)
root = tree.getroot()
size = root.find('size')
width = int(size.find('width').text)
height = int(size.find('height').text)
# print(width,height)
def validate_label(xmin, ymin, xmax, ymax, width, height):
"""Validate labels."""
assert 0 <= xmin < width, "xmin must in [0, {}), given {}".format(width, xmin)
assert 0 <= ymin < height, "ymin must in [0, {}), given {}".format(height, ymin)
assert xmin < xmax <= width, "xmax must in (xmin, {}], given {}".format(width, xmax)
assert ymin < ymax <= height, "ymax must in (ymin, {}], given {}".format(height, ymax)
label = []
for obj in root.iter('object'):
difficult = int(obj.find('difficult').text)
cls_name = obj.find('name').text.strip().lower()
if cls_name not in CLASSES:
continue
cls_id = index_map[cls_name]
xml_box = obj.find('bndbox')
xmin = (int(xml_box.find('xmin').text) - 1)
ymin = (int(xml_box.find('ymin').text) - 1)
xmax = (int(xml_box.find('xmax').text) - 1)
ymax = (int(xml_box.find('ymax').text) - 1)
try:
validate_label(xmin, ymin, xmax, ymax, width, height)
except AssertionError as e:
raise RuntimeError("Invalid label at {}, {}".format(anno_path, e))
# label.append([xmin, ymin, xmax, ymax, cls_id, difficult])
label.append([xmin, ymin, xmax, ymax, cls_id])
return label
| 2.96875 | 3 |
Basic Data Type/4_lists.py | FaranakAlikhah/ADM-HW1 | 0 | 12763170 | #!/usr/bin/env python
# coding: utf-8
# # *section 2: Basic Data Type*
#
# ### writer : <NAME> 1954128
# ### 4. Lists :
#
# In[ ]:
if __name__ == '__main__':
N = int(input())
my_list=[]
for i in range(N):
A=input().split();
if A[0]=="sort":
my_list.sort();
elif A[0]=="insert":
my_list.insert(int(A[1]),int(A[2]))
elif A[0]=="remove":
my_list.remove(int(A[1]))
elif A[0]=="append":
my_list.append(int(A[1]))
elif A[0]=="pop":
my_list.pop()
elif A[0]=="reverse":
my_list.reverse()
elif A[0]=="print":
print(my_list)
#
| 3.96875 | 4 |
fuji_server/models/persistence_output.py | ignpelloz/fuji | 25 | 12763171 | <gh_stars>10-100
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from fuji_server.models.base_model_ import Model
from fuji_server import util
class PersistenceOutput(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self,
pid: str = None,
pid_scheme: str = None,
resolvable_status: bool = False,
resolved_url: str = None): # noqa: E501
"""PersistenceOutput - a model defined in Swagger
:param pid: The pid of this PersistenceOutput. # noqa: E501
:type pid: str
:param pid_scheme: The pid_scheme of this PersistenceOutput. # noqa: E501
:type pid_scheme: str
:param resolvable_status: The resolvable_status of this PersistenceOutput. # noqa: E501
:type resolvable_status: bool
:param resolved_url: The resolved_url of this PersistenceOutput. # noqa: E501
:type resolved_url: str
"""
self.swagger_types = {'pid': str, 'pid_scheme': str, 'resolvable_status': bool, 'resolved_url': str}
self.attribute_map = {
'pid': 'pid',
'pid_scheme': 'pid_scheme',
'resolvable_status': 'resolvable_status',
'resolved_url': 'resolved_url'
}
self._pid = pid
self._pid_scheme = pid_scheme
self._resolvable_status = resolvable_status
self._resolved_url = resolved_url
@classmethod
def from_dict(cls, dikt) -> 'PersistenceOutput':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The Persistence_output of this PersistenceOutput. # noqa: E501
:rtype: PersistenceOutput
"""
return util.deserialize_model(dikt, cls)
@property
def pid(self) -> str:
"""Gets the pid of this PersistenceOutput.
:return: The pid of this PersistenceOutput.
:rtype: str
"""
return self._pid
@pid.setter
def pid(self, pid: str):
"""Sets the pid of this PersistenceOutput.
:param pid: The pid of this PersistenceOutput.
:type pid: str
"""
self._pid = pid
@property
def pid_scheme(self) -> str:
"""Gets the pid_scheme of this PersistenceOutput.
:return: The pid_scheme of this PersistenceOutput.
:rtype: str
"""
return self._pid_scheme
@pid_scheme.setter
def pid_scheme(self, pid_scheme: str):
"""Sets the pid_scheme of this PersistenceOutput.
:param pid_scheme: The pid_scheme of this PersistenceOutput.
:type pid_scheme: str
"""
self._pid_scheme = pid_scheme
@property
def resolvable_status(self) -> bool:
"""Gets the resolvable_status of this PersistenceOutput.
:return: The resolvable_status of this PersistenceOutput.
:rtype: bool
"""
return self._resolvable_status
@resolvable_status.setter
def resolvable_status(self, resolvable_status: bool):
"""Sets the resolvable_status of this PersistenceOutput.
:param resolvable_status: The resolvable_status of this PersistenceOutput.
:type resolvable_status: bool
"""
self._resolvable_status = resolvable_status
@property
def resolved_url(self) -> str:
"""Gets the resolved_url of this PersistenceOutput.
:return: The resolved_url of this PersistenceOutput.
:rtype: str
"""
return self._resolved_url
@resolved_url.setter
def resolved_url(self, resolved_url: str):
"""Sets the resolved_url of this PersistenceOutput.
:param resolved_url: The resolved_url of this PersistenceOutput.
:type resolved_url: str
"""
self._resolved_url = resolved_url
| 2.03125 | 2 |
question_repo/apps/repo/validator.py | Quizas007/question_repo | 0 | 12763172 | <reponame>Quizas007/question_repo<filename>question_repo/apps/repo/validator.py
from django.core.exceptions import ValidationError
def valid_difficulty(n):
if n > 5 or n <1:
raise ValidationError("难度介于1到5之间") | 1.8125 | 2 |
tests/issues/test_project_issue.py | mubashshirjamal/code | 1,582 | 12763173 | # encoding: UTF-8
from tests.base import TestCase
from vilya.models.issue import Issue
from vilya.models.project_issue import ProjectIssue
class TestProjectIssue(TestCase):
def test_add_issue(self):
p = ProjectIssue.add('test', 'test description', 'test', project=1)
assert isinstance(p, ProjectIssue)
assert p.title == 'test'
assert p.description == 'test description'
assert p.project_id == 1
p.delete()
def test_get_issue(self):
p = ProjectIssue.add('test', 'test description', 'test', project=1)
r = ProjectIssue.get(p.project_id, issue_id=p.issue_id)
assert isinstance(r, ProjectIssue)
assert r.project_id == 1
r = ProjectIssue.get(p.project_id, number=p.number)
assert isinstance(r, ProjectIssue)
assert r.project_id == 1
r = Issue.get_cached_issue(p.issue_id)
assert isinstance(r, ProjectIssue)
assert r.title == 'test'
assert r.description == 'test description'
assert r.project_id == 1
p2 = ProjectIssue.add(
'test2', 'test2 description', 'test', project=1,
assignee='assignee')
p3 = ProjectIssue.add(
'test3', 'test3 description', 'test', project=1,
assignee='assignee')
p4 = ProjectIssue.add(
'test4', 'test4 description', 'test', project=1, assignee='test')
p5 = ProjectIssue.add(
'test5', 'test5 description', 'test1', project=2, assignee='test')
rs = ProjectIssue._gets_by_project_id(1)
assert len(rs) == 4
rs = ProjectIssue._get_issues_by_project_id(1)
assert all([isinstance(i, ProjectIssue) for i in rs])
assert len(rs) == 4
rs = ProjectIssue.gets_by_assignee_id(1, 'assignee')
assert all([isinstance(i, ProjectIssue) for i in rs])
assert len(rs) == 2
rs = ProjectIssue.gets_by_creator_id(1, 'test')
assert all([isinstance(i, ProjectIssue) for i in rs])
assert len(rs) == 4
for p in [p, p2, p3, p4, p5]:
p.delete()
def test_n_issue(self):
p1 = ProjectIssue.add(
'test1', 'test1 description', 'test', project=1,
assignee='assignee')
p1.close('test')
p2 = ProjectIssue.add(
'test2', 'test2 description', 'test', project=1,
assignee='assignee')
p2.close('test')
p3 = ProjectIssue.add(
'test3', 'test3 description', 'test', project=1,
assignee='assignee')
p4 = ProjectIssue.add(
'test4', 'test4 description', 'test', project=1,
assignee='test')
p5 = ProjectIssue.add(
'test5', 'test5 description', 'test1', project=2,
assignee='test')
count = ProjectIssue.get_count_by_project_id(1)
assert count == 4
count = ProjectIssue.get_count_by_project_id(1, 'open')
assert count == 2
count = ProjectIssue.get_count_by_project_id(1, 'closed')
assert count == 2
count = ProjectIssue.get_count_by_assignee_id(1, 'assignee')
assert count == 3
count = ProjectIssue.get_count_by_assignee_id(1, 'assignee', 'open')
assert count == 1
count = ProjectIssue.get_count_by_assignee_id(1, 'assignee', 'closed')
assert count == 2
count = ProjectIssue.get_count_by_creator_id(1, 'test')
assert count == 4
count = ProjectIssue.get_count_by_creator_id(1, 'test', 'open')
assert count == 2
count = ProjectIssue.get_count_by_creator_id(1, 'test', 'closed')
assert count == 2
r = ProjectIssue.get(p1.project_id, p1.issue_id)
assert isinstance(r, ProjectIssue)
assert r.n_closed_issues == 2
assert r.n_open_issues == 2
for p in [p1, p2, p3, p4, p5]:
p.delete()
def test_open_and_close_issue(self):
p1 = ProjectIssue.add('test1', 'test1 description', 'test', project=1)
p2 = ProjectIssue.add('test2', 'test2 description', 'test', project=1)
p3 = ProjectIssue.add('test3', 'test3 description', 'test', project=1)
count = ProjectIssue.get_count_by_project_id(1)
assert count == 3
p1.close('test')
count = ProjectIssue.get_count_by_project_id(1, 'open')
assert count == 2
p1.open()
count = ProjectIssue.get_count_by_project_id(1, 'open')
assert count == 3
for p in [p1, p2, p3]:
p.delete()
def test_add_tags(self):
target_id = project_id = 1
p = ProjectIssue.add(
'test', 'test description', 'test', project=project_id)
assert isinstance(p, ProjectIssue)
assert p.title == 'test'
assert p.description == 'test description'
assert p.project_id == 1
tags = ['tag1', 'tag2', 'tag3']
p.add_tags(tags, target_id)
assert len(p.tags) == len(tags)
tag_names = [t.name for t in p.tags]
assert set(tags) & set(tag_names) == set(tags)
p.delete()
def test_gets_by_issue_ids(self):
project_id = 1
p = ProjectIssue.add(
'test', 'test description', 'test', project=project_id)
assert isinstance(p, ProjectIssue)
assert p.title == 'test'
assert p.description == 'test description'
assert p.project_id == 1
project_issues = ProjectIssue._gets_by_issue_ids(
[p.issue_id], state=None)
assert len(project_issues) == 1
pissue = project_issues[0]
assert isinstance(pissue, ProjectIssue)
assert pissue.project_id == project_id
project_issues = ProjectIssue._gets_by_issue_ids(
[p.issue_id], state="open")
assert len(project_issues) == 1
pissue = project_issues[0]
assert isinstance(pissue, ProjectIssue)
assert pissue.project_id == project_id
project_issues = ProjectIssue._gets_by_issue_ids(
[p.issue_id], state="closed")
assert len(project_issues) == 0
pissue.close("test")
project_issues = ProjectIssue._gets_by_issue_ids(
[p.issue_id], state="open")
assert len(project_issues) == 0
project_issues = ProjectIssue._gets_by_issue_ids(
[p.issue_id], state="closed")
assert len(project_issues) == 1
pissue = project_issues[0]
assert isinstance(pissue, ProjectIssue)
assert pissue.project_id == project_id
p.delete()
def test_gets_by_project_ids(self):
p1 = ProjectIssue.add('test1', 'desp', 'test', project=1)
p2 = ProjectIssue.add('test2', 'desp', 'test2', project=2)
p3 = ProjectIssue.add('test3', 'desp', 'test3', project=2)
issues = ProjectIssue.gets_by_project_ids([1, 2])
assert len(issues), 3
for p in [p1, p2, p3]:
p.delete()
| 2.265625 | 2 |
slybot/slybot/tests/test_linkextractors.py | ruairif/portia | 8 | 12763174 | <reponame>ruairif/portia
from unittest import TestCase
from scrapy.http import TextResponse, HtmlResponse
from slybot.linkextractor import (
create_linkextractor_from_specs,
RssLinkExtractor,
SitemapLinkExtractor,
)
class Test_RegexLinkExtractor(TestCase):
def test_default(self):
specs = {"type": "regex", "value": ''}
lextractor = create_linkextractor_from_specs(specs)
text = "Hello http://www.example.com/path, more text https://aws.amazon.com/product?id=23#tre?"
response = TextResponse(url='http://www.example.com/', body=text)
links = list(lextractor.links_to_follow(response))
self.assertEqual(len(links), 2)
self.assertEqual(links[0].url, 'http://www.example.com/path')
self.assertEqual(links[1].url, 'https://aws.amazon.com/product?id=23')
def test_custom(self):
specs = {"type": "regex", "value": 'url: ((?:http|https)://www.example.com/[\w/]+)'}
lextractor = create_linkextractor_from_specs(specs)
text = "url: http://www.example.com/path, more text url: https://www.example.com/path2. And more text url: https://aws.amazon.com/product?id=23#tre"
response = TextResponse(url='http://www.example.com/', body=text)
links = list(lextractor.links_to_follow(response))
self.assertEqual(len(links), 2)
self.assertEqual(links[0].url, 'http://www.example.com/path')
self.assertEqual(links[1].url, 'https://www.example.com/path2')
def test_custom_withargs(self):
specs = {"type": "regex", "value": 'url: ((?:http|https)://www.example.com/[\w/]+)', 'allowed_schemes': ['http']}
lextractor = create_linkextractor_from_specs(specs)
text = "url: http://www.example.com/path, more text url: https://www.example.com/path2. And more text url: https://aws.amazon.com/product?id=23#tre"
response = TextResponse(url='http://www.example.com/', body=text)
links = list(lextractor.links_to_follow(response))
self.assertEqual(len(links), 1)
self.assertEqual(links[0].url, 'http://www.example.com/path')
xmlfeed = """<?xml version="1.0" encoding="UTF-8" ?>
<rss version="2.0">
<channel>
<title>RSS Title</title>
<description>This is an example of an RSS feed</description>
<link>http://www.someexamplerssdomain.com/main.html</link>
<lastBuildDate>Mon, 06 Sep 2010 00:01:00 +0000 </lastBuildDate>
<pubDate>Mon, 06 Sep 2009 16:20:00 +0000 </pubDate>
<ttl>1800</ttl>
<item>
<title>Example entry</title>
<description>Here is some text containing an interesting description.</description>
<link>http://www.wikipedia.org/</link>
<guid>unique string per item</guid>
<pubDate>Mon, 06 Sep 2009 16:20:00 +0000 </pubDate>
</item>
</channel>
</rss>"""
sitemapfeed = """
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"
xmlns:image="http://www.sitemaps.org/schemas/sitemap-image/1.1"
xmlns:video="http://www.sitemaps.org/schemas/sitemap-video/1.1">
<url><loc>http://www.accommodationforstudents.com/</loc><changefreq>daily</changefreq><priority>1.00</priority></url>
<url><loc>http://www.accommodationforstudents.com/London.asp</loc><changefreq>daily</changefreq><priority>1.00</priority></url>
<url><loc>http://www.accommodationforstudents.com/createaccounts.asp</loc><changefreq>daily</changefreq><priority>0.85</priority></url>
</urlset>
"""
sitemapindex = """
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap>
<loc>http://www.example.com/sitemap1.xml.gz</loc>
<lastmod>2004-10-01T18:23:17+00:00</lastmod>
</sitemap>
</sitemapindex>
"""
atomfeed = """
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<title>Example Feed</title>
<subtitle>A subtitle.</subtitle>
<link href="http://example.org/feed/" rel="self" />
<link href="http://example.org/" />
<entry>
<title>Atom-Powered Robots Run Amok</title>
<link href="http://example.org/2003/12/13/atom03" />
<summary>Some text.</summary>
<author>
<name><NAME></name>
<email><EMAIL></email>
</author>
</entry>
</feed>
"""
class Test_XmlLinkExtractors(TestCase):
def setUp(self):
self.response = TextResponse(url='http://www.example.com/', body=xmlfeed)
self.sitemap = TextResponse(url='http://www.example.com/sitemap.xml', body=sitemapfeed)
self.sitemapindex = TextResponse(url='http://www.example.com/sitemap.xml', body=sitemapindex)
self.atom = TextResponse(url='http://www.example.com/atom', body=atomfeed)
def test_rss(self):
specs = {"type": "rss", "value": ""}
lextractor = create_linkextractor_from_specs(specs)
links = list(lextractor.links_to_follow(self.response))
self.assertEqual(len(links), 1)
self.assertEqual(links[0].url, 'http://www.wikipedia.org/')
def test_xml(self):
specs = {"type": "xpath", "value": "//item/link/text()"}
lextractor = create_linkextractor_from_specs(specs)
links = list(lextractor.links_to_follow(self.response))
self.assertEqual(len(links), 1)
self.assertEqual(links[0].url, 'http://www.wikipedia.org/')
def test_sitemap(self):
specs = {"type": "sitemap", "value": ""}
lextractor = create_linkextractor_from_specs(specs)
links = list(lextractor.links_to_follow(self.sitemap))
self.assertEqual(len(links), 3)
self.assertEqual(links[0].url, 'http://www.accommodationforstudents.com/')
links = list(lextractor.links_to_follow(self.sitemapindex))
self.assertEqual(len(links), 1)
self.assertEqual(links[0].url, 'http://www.example.com/sitemap1.xml.gz')
def test_atom(self):
specs = {"type": "atom", "value": ""}
lextractor = create_linkextractor_from_specs(specs)
links = list(lextractor.links_to_follow(self.atom))
self.assertEqual(len(links), 3)
self.assertEqual(links[0].url, 'http://example.org/feed/')
def test_xml_remove_namespaces(self):
specs = {"type": "xpath", "value": "//link/@href", "remove_namespaces": True}
lextractor = create_linkextractor_from_specs(specs)
links = list(lextractor.links_to_follow(self.atom))
self.assertEqual(len(links), 3)
self.assertEqual(links[0].url, 'http://example.org/feed/')
csvfeed = """
My feed
Product A,http://www.example.com/path,A
Product B,http://www.example.com/path2,B
"""
csvfeed2 = """
My feed
Product A|http://www.example.com/path|A
Product B|http://www.example.com/path2|B
"""
csvfeed3 = """
My feed
name,url,id
Product A,http://www.example.com/path,A
Product B,http://www.example.com/path2,B
"""
class Test_CsvLinkExtractor(TestCase):
def test_simple(self):
specs = {"type": "column", "value": 1}
lextractor = create_linkextractor_from_specs(specs)
response = TextResponse(url='http://www.example.com/', body=csvfeed)
links = list(lextractor.links_to_follow(response))
self.assertEqual(len(links), 2)
self.assertEqual(links[0].url, 'http://www.example.com/path')
self.assertEqual(links[1].url, 'http://www.example.com/path2')
def test_extra_params(self):
specs = {"type": "column", "value": 1, "delimiter": "|"}
lextractor = create_linkextractor_from_specs(specs)
response = TextResponse(url='http://www.example.com/', body=csvfeed2)
links = list(lextractor.links_to_follow(response))
self.assertEqual(len(links), 2)
self.assertEqual(links[0].url, 'http://www.example.com/path')
self.assertEqual(links[1].url, 'http://www.example.com/path2')
def test_header(self):
specs = {"type": "column", "value": 1}
lextractor = create_linkextractor_from_specs(specs)
response = TextResponse(url='http://www.example.com/', body=csvfeed3)
links = list(lextractor.links_to_follow(response))
self.assertEqual(len(links), 2)
self.assertEqual(links[0].url, 'http://www.example.com/path')
self.assertEqual(links[1].url, 'http://www.example.com/path2')
html = """
<a href="http://www.example.com/path">Click here</a>
"""
class Test_HtmlLinkExtractor(TestCase):
def test_simple(self):
specs = {"type": "html", "value": None}
lextractor = create_linkextractor_from_specs(specs)
response = HtmlResponse(url='http://www.example.com/', body=html)
links = list(lextractor.links_to_follow(response))
self.assertEqual(len(links), 1)
self.assertEqual(links[0].url, 'http://www.example.com/path')
self.assertEqual(links[0].text, 'Click here')
| 2.8125 | 3 |
parkings/migrations/0027_parkingcheck_performer.py | klemmari1/parkkihubi | 12 | 12763175 | # Generated by Django 2.2.3 on 2019-07-18 06:34
from django.conf import settings
from django.db import migrations, models
def fill_parkingcheck_performer(apps, schema_editor):
parking_check_model = apps.get_model('parkings', 'ParkingCheck')
items_to_process = parking_check_model.objects.filter(performer=None)
if not items_to_process.exists():
return
# Find a user, or create one if none exists
user_model = apps.get_model(settings.AUTH_USER_MODEL)
user = user_model.objects.filter(is_superuser=True).first()
if not user:
user = user_model.objects.first()
if not user:
user = user_model.objects.create_user(
username='dummy', email='', password=<PASSWORD>)
# Update the performer field to the found or created user
items_to_process.update(performer=user)
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('parkings', '0026_fix_permit_regnum_length'),
]
operations = [
migrations.AddField( # Add the field as nullable
model_name='parkingcheck',
name='performer',
field=models.ForeignKey(
to=settings.AUTH_USER_MODEL,
on_delete=models.PROTECT,
editable=False,
null=True,
blank=True,
verbose_name='performer',
help_text='User who performed the check')),
migrations.RunPython( # Fill the field for existing rows
code=fill_parkingcheck_performer,
reverse_code=migrations.RunPython.noop),
migrations.AlterField( # Set the field to non-nullable
model_name='parkingcheck',
name='performer',
field=models.ForeignKey(
to=settings.AUTH_USER_MODEL,
on_delete=models.PROTECT,
editable=False,
verbose_name='performer',
help_text='User who performed the check')),
]
| 2.140625 | 2 |
dysql/test/test_sql_in_list_templates.py | LaudateCorpus1/dy-sql | 1 | 12763176 | <reponame>LaudateCorpus1/dy-sql
"""
Copyright 2021 Adobe
All Rights Reserved.
NOTICE: Adobe permits you to use, modify, and distribute this file in accordance
with the terms of the Adobe license agreement accompanying it.
"""
# pylint: disable=too-many-public-methods
import pytest
import dysql
from dysql import QueryData, sqlquery
from dysql.test import \
_verify_query, _verify_query_args, _verify_query_params, mock_create_engine_fixture, setup_mock_engine
_ = mock_create_engine_fixture
@pytest.fixture(name="mock_engine", autouse=True)
def mock_engine_fixture(mock_create_engine):
mock_engine = setup_mock_engine(mock_create_engine)
mock_engine.connect().execution_options().execute.side_effect = lambda x, y: []
return mock_engine
def test_list_in_numbers(mock_engine):
_query(
"SELECT * FROM table WHERE {in__column_a}",
template_params={'in__column_a': [1, 2, 3, 4]}
)
_verify_query_params(
mock_engine,
"SELECT * FROM table WHERE column_a IN ( :in__column_a_0, :in__column_a_1, :in__column_a_2, :in__column_a_3 ) ",
{
'in__column_a_0': 1,
'in__column_a_1': 2,
'in__column_a_2': 3,
'in__column_a_3': 4
}
)
def test_list_in__strings(mock_engine):
_query(
"SELECT * FROM table WHERE {in__column_a}",
template_params={'in__column_a': ['a', 'b', 'c', 'd']}
)
_verify_query_params(
mock_engine,
"SELECT * FROM table WHERE column_a IN ( :in__column_a_0, :in__column_a_1, :in__column_a_2, :in__column_a_3 ) ",
{
'in__column_a_0': 'a',
'in__column_a_1': 'b',
'in__column_a_2': 'c',
'in__column_a_3': 'd'
})
def test_list_not_in_numbers(mock_engine):
_query(
"SELECT * FROM table WHERE {not_in__column_b}",
template_params={'not_in__column_b': [1, 2, 3, 4]}
)
_verify_query_params(
mock_engine,
"SELECT * FROM table WHERE column_b NOT IN ( :not_in__column_b_0, :not_in__column_b_1, "
":not_in__column_b_2, :not_in__column_b_3 ) ",
{
'not_in__column_b_0': 1,
'not_in__column_b_1': 2,
'not_in__column_b_2': 3,
'not_in__column_b_3': 4
})
def test_list_not_in_strings(mock_engine):
_query(
"SELECT * FROM table WHERE {not_in__column_b}",
template_params={'not_in__column_b': ['a', 'b', 'c', 'd']}
)
_verify_query_params(
mock_engine,
"SELECT * FROM table WHERE column_b NOT IN ( :not_in__column_b_0, :not_in__column_b_1, "
":not_in__column_b_2, :not_in__column_b_3 ) ",
{
'not_in__column_b_0': 'a',
'not_in__column_b_1': 'b',
'not_in__column_b_2': 'c',
'not_in__column_b_3': 'd'
})
def test_list_in_handles_empty(mock_engine):
_query(
"SELECT * FROM table WHERE {in__column_a}",
template_params={'in__column_a': []}
)
_verify_query(mock_engine, "SELECT * FROM table WHERE 1 <> 1 ")
def test_list_in_handles_no_param():
with pytest.raises(dysql.query_utils.ListTemplateException, match="['in__column_a']"):
_query("SELECT * FROM table WHERE {in__column_a}")
def test_list_in_multiple_lists(mock_engine):
_query("SELECT * FROM table WHERE {in__column_a} OR {in__column_b}", template_params={
'in__column_a': ['first', 'second'],
'in__column_b': [1, 2]})
_verify_query(
mock_engine,
"SELECT * FROM table WHERE column_a IN ( :in__column_a_0, :in__column_a_1 ) "
"OR column_b IN ( :in__column_b_0, :in__column_b_1 ) "
)
def test_list_in_multiple_lists_one_empty(mock_engine):
_query("SELECT * FROM table WHERE {in__column_a} OR {in__column_b}", template_params={
'in__column_a': ['first', 'second'],
'in__column_b': []})
_verify_query(
mock_engine,
"SELECT * FROM table WHERE column_a IN ( :in__column_a_0, :in__column_a_1 ) OR 1 <> 1 "
)
def test_list_in_multiple_lists_one_missing():
with pytest.raises(dysql.query_utils.ListTemplateException, match="['in__column_a']"):
_query("SELECT * FROM table WHERE {in__column_a} OR {in__column_b} ",
template_params={'in__column_b': [1, 2]})
def test_list_in_multiple_lists_all_missing():
with pytest.raises(dysql.query_utils.ListTemplateException, match="['in__column_a','in__column_b']"):
_query("SELECT * FROM table WHERE {in__column_a} OR {in__column_b} ")
def test_list_not_in_handles_empty(mock_engine):
_query(
"SELECT * FROM table WHERE {not_in__column_b}",
template_params={'not_in__column_b': []}
)
_verify_query(mock_engine, "SELECT * FROM table WHERE 1 = 1 ")
def test_list_not_in_handles_no_param():
with pytest.raises(dysql.query_utils.ListTemplateException, match="['not_in__column_b']"):
_query("SELECT * FROM table WHERE {not_in__column_b} ")
def test_list_gives_template_space_before(mock_engine):
_query("SELECT * FROM table WHERE{in__space}", template_params={'in__space': [9, 8]})
_verify_query(mock_engine, "SELECT * FROM table WHERE space IN ( :in__space_0, :in__space_1 ) ")
def test_list_gives_template_space_after(mock_engine):
_query("SELECT * FROM table WHERE {in__space}AND other_condition = 1",
template_params={'in__space': [9, 8]})
_verify_query(
mock_engine,
"SELECT * FROM table WHERE space IN ( :in__space_0, :in__space_1 ) AND other_condition = 1"
)
def test_list_gives_template_space_before_and_after(mock_engine):
_query("SELECT * FROM table WHERE{in__space}AND other_condition = 1",
template_params={'in__space': [9, 8]})
_verify_query(
mock_engine,
"SELECT * FROM table WHERE space IN ( :in__space_0, :in__space_1 ) AND other_condition = 1"
)
def test_in_contains_whitespace(mock_engine):
_query("{in__column_one}", template_params={'in__column_one': [1, 2]})
_verify_query(mock_engine, " column_one IN ( :in__column_one_0, :in__column_one_1 ) ")
def test_template_handles_table_qualifier(mock_engine):
"""
when the table is specified with a dot separator, we need to split the table out of the
keyword and the keyword passed in should be passed in without the keyword
{in__table.column} -> {table}.{in__column} -> table.column IN (:in__column_0, :in__column_1)
:return:
"""
_query(
"SELECT * FROM table WHERE {in__table.column}",
template_params={'in__table.column': [1, 2]}
)
_verify_query(
mock_engine,
"SELECT * FROM table WHERE table.column IN ( :in__table_column_0, :in__table_column_1 ) "
)
_verify_query_args(
mock_engine, {
'in__table_column_0': 1,
'in__table_column_1': 2
})
def test_template_handles_multiple_table_qualifier(mock_engine):
_query(
"SELECT * FROM table WHERE {in__table.column} AND {not_in__other_column}",
template_params={'in__table.column': [1, 2], 'not_in__other_column': ['a', 'b']}
)
_verify_query(
mock_engine,
"SELECT * FROM table WHERE table.column IN ( :in__table_column_0, :in__table_column_1 ) "
"AND other_column NOT IN ( :not_in__other_column_0, :not_in__other_column_1 ) "
)
_verify_query_args(
mock_engine, {
'in__table_column_0': 1,
'in__table_column_1': 2,
'not_in__other_column_0': 'a',
'not_in__other_column_1': 'b',
})
def test_empty_in_contains_whitespace(mock_engine):
_query("{in__column_one}", template_params={'in__column_one': []})
_verify_query(mock_engine, " 1 <> 1 ")
def test_multiple_templates_same_column_diff_table(mock_engine):
template_params = {'in__table.status': ['on', 'off', 'waiting'],
'in__other_table.status': ['success', 'partial_success']}
expected_params_from_template = {
'in__table_status_0': 'on',
'in__table_status_1': 'off',
'in__table_status_2': 'waiting',
'in__other_table_status_0': 'success',
'in__other_table_status_1': 'partial_success'
}
# writing each of these queries out to help see what we expect compared to
# the query we actually sent
_query("SELECT * FROM table WHERE {in__table.status} AND {in__other_table.status}",
template_params=template_params)
expected_query = "SELECT * FROM table WHERE table.status IN ( :in__table_status_0, :in__table_status_1, " \
":in__table_status_2 ) AND other_table.status IN ( :in__other_table_status_0, " \
":in__other_table_status_1 ) "
connection = mock_engine.connect.return_value.execution_options.return_value
execute_call = connection.execute
execute_call.assert_called_once()
assert execute_call.call_args[0][0].text == expected_query
assert execute_call.call_args[0][1] == expected_params_from_template
@sqlquery()
def _query(query, query_params=None, template_params=None):
return QueryData(query, query_params=query_params, template_params=template_params)
| 2.265625 | 2 |
Find if there is a subarray 0 sum.py | Into-Y0u/Github-Baby | 2 | 12763177 | class Solution:
def hasZeroSumSubarray(self, nums: List[int]) -> bool:
vis = set()
x = 0
for n in nums :
x+=n
if x==0 or x in vis :
return 1
vis.add(x)
return 0
| 3 | 3 |
Ejercicio3/Gato.py | carlotamartin/Ejercicio-de-POO-entrega | 0 | 12763178 | <filename>Ejercicio3/Gato.py
from Mamifero import Mamifero
class Gato (Mamifero):
def __init__ (self):
pass | 1.523438 | 2 |
models/__init__.py | doslindos/ml_crapwrap | 0 | 12763179 | from numpy import save as npsave, load as npload, array as nparray, append as npappend, expand_dims as npexpand, prod as npprod, argmax, zeros as npzeros
from datetime import datetime
from json import dump as jsondump, load as jsonload
from pickle import dump as pkldump, load as pklload
from inspect import signature
from os import getcwd
from collections import deque
from sys import exit
from pathlib import Path
from sklearn import decomposition as skdecomposition, cluster as skcluster
from utils.functions import run_function
from utils.datasets import get_dataset_info
from utils.modules import fetch_model, get_module
from UI.GUI_utils import open_dirGUI
from .util.model_handling_functions import save_configuration, save_weights, save_sk_model, load_weights, load_sk_model, load_configuration, handle_init, create_prediction_file, map_params, select_weights, read_prediction_file
from .model_handler import ModelHandler
| 1.648438 | 2 |
build/lib/geonomics/utils/__init__.py | AnushaPB/geonomics-1 | 8 | 12763180 | <reponame>AnushaPB/geonomics-1<filename>build/lib/geonomics/utils/__init__.py<gh_stars>1-10
from . import io
from . import viz
from . import spatial
from . import _str_repr_
| 1.015625 | 1 |
tests/test_008.py | vutsalsinghal/python-terrascript | 0 | 12763181 | # https://www.terraform.io/docs/configuration/locals.html
import terrascript
import terrascript.aws
import terrascript.aws.d
from shared import assert_equals_json
def test():
"""Data (008)"""
config = terrascript.Terrascript()
config += terrascript.aws.aws(version='~> 2.0', region='us-east-1')
config += terrascript.aws.d.aws_ami('example', most_recent=True, owners=['self'],
tags=dict(Name="app-server", Tested="true"))
assert_equals_json(config, 'test_008.tf.json') | 2.25 | 2 |
payment_predictor/cluster_features.py | michaelmurdock/py_ml_projects | 0 | 12763182 | # cluster_features.py
#
# Based on snippets here:
# http://scikit-learn.org/dev/auto_examples/cluster/plot_cluster_iris.html#sphx-glr-auto-examples-cluster-plot-cluster-iris-py
from __future__ import print_function
import time
import datetime
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from sklearn.cluster import KMeans
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import accuracy_score
from sklearn.decomposition import PCA
def get_payment_data(csv_filename):
df = pd.read_csv(csv_filename, header = 0)
# put the original column names in a python list
feature_names = list(df.columns.values)
# create a numpy array with the numeric values for input into scikit-learn
numpy_array = df.as_matrix()
Y = numpy_array[:,24]
X = numpy_array[:, [i for i in xrange(np.shape(numpy_array)[1]-1)]]
return (X, Y, feature_names)
if __name__ == "__main__":
(X, Y, feature_names) = get_payment_data("default_on_payment.csv")
print('Shape of the inputs: %s, shape of the labels: %s' % (str(X.shape), str(Y.shape)))
# split into a training and testing set
# Training instances: 22,500
# Test instances: 7500
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.25, random_state=42)
print('Train set inputs: %s' % (str(X_train.shape)))
print('Test set inputs %s' % (str(X_test.shape)))
print('Train set labels: %s' % (str(Y_train.shape)))
print('Test set labels: %s' % (str(Y_test.shape)))
# ---------------------------------------------------------------------------
# Scaling
# ----------------------------------------------------------------------------
scaler = StandardScaler()
scaler.fit(X_train)
X_train = scaler.transform(X_train)
X_test = scaler.transform(X_test)
# ---------------------------------------------------------------------------
# PCA Transformation of Features
# ----------------------------------------------------------------------------
pca = PCA(n_components=3)
X_train_new = pca.fit_transform(X_train, y=None)
fig = plt.figure(1)
ax = Axes3D(fig, rect=[0, 0, .95, 1], elev=48, azim=134)
#ax.scatter(X_train_new[:, 0], X_train_new[:, 1], X_train_new[:, 2],c=labels.astype(np.float))
ax.scatter(X_train_new[:, 0], X_train_new[:, 1], X_train_new[:, 2])
ax.w_xaxis.set_ticklabels([])
ax.w_yaxis.set_ticklabels([])
ax.w_zaxis.set_ticklabels([])
ax.set_xlabel('PC0')
ax.set_ylabel('PC1')
ax.set_zlabel('PC2')
plt.show()
# ---------------------------------------------------------------------------
# K-Means Clustering
# ----------------------------------------------------------------------------
num_clusters = 2
classifier_KMC = KMeans(n_clusters = num_clusters, n_jobs=-1, random_state=1)
start_time = time.time()
classifier_KMC.fit(X_train, y=None)
end_time = time.time()
labels1 = classifier_KMC.labels_
# Classify the train and test set vectors
train_labels = classifier_KMC.predict(X_train)
test_labels = classifier_KMC.predict(X_test)
# Returns 68.9% on training set
accuracy_KMC_train = accuracy_score(Y_train, train_labels)
accuracy_KMC_test = accuracy_score(Y_test, test_labels)
# Plotting
fig = plt.figure(1)
plt.clf()
ax = Axes3D(fig, rect=[0, 0, .95, 1], elev=48, azim=134)
plt.cla()
classifier_KMC.fit(X_train)
labels = classifier_KMC.labels_
#ax.scatter(X_train[:, 1], X_train[:, 2], X_train[:, 3], X_train[:, 3],c=labels.astype(np.float))
ax.scatter(X_train[:, 0], X_train[:, 1],c=labels.astype(np.float))
ax.w_xaxis.set_ticklabels([])
ax.w_yaxis.set_ticklabels([])
ax.w_zaxis.set_ticklabels([])
ax.set_xlabel('F0')
ax.set_ylabel('F1')
ax.set_zlabel('F2')
plt.show()
## Plot the ground truth
#fig = plt.figure(1)
#plt.clf()
#ax = Axes3D(fig, rect=[0, 0, .95, 1], elev=48, azim=134)
#plt.cla()
# predictions_KMC = classifier_KMC.predict(X_test) | 3.421875 | 3 |
src/ufdl/json/image_classification/_CategoriesModSpec.py | waikato-ufdl/ufdl-json-messages | 0 | 12763183 | from typing import List
from wai.json.object import StrictJSONObject
from wai.json.object.property import ArrayProperty, StringProperty, EnumProperty
class CategoriesModSpec(StrictJSONObject['CategoriesModSpec']):
"""
A specification of which images to modify the categories
for, and which categories to modify for those images.
"""
# The method to use to modify the categories
method: str = EnumProperty(
values=("add", "remove")
)
# The images to modify the categories for
images: List[str] = ArrayProperty(
element_property=StringProperty(min_length=1),
min_elements=1,
unique_elements=True
)
# The categories to add/remove from the images
categories: List[str] = ArrayProperty(
element_property=StringProperty(min_length=1),
min_elements=1,
unique_elements=True
)
| 2.546875 | 3 |
pyfo/tests/unittests/models/hmm/run_hmm.py | bradleygramhansen/pyfo | 3 | 12763184 | <filename>pyfo/tests/unittests/models/hmm/run_hmm.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
Author: <NAME>
Time created: 11:07
Date created: 20/01/2018
License: MIT
'''
import tests.unittests.models.hmm.hmm as test
from ..inference.dhmc import DHMCSampler as dhmc
### model
# test.model.display_graph()
stepsize_range = [0.01,0.085]
n_step_range = [10, 40]
### inference
dhmc_ = dhmc(test)
burn_in = 8000
n_sample = 50000
# stepsize_range = [0.03,0.15]
# n_step_range = [10, 20]
# stepsize_range = [0.01,0.05] # old parameters
# n_step_range = [5, 10]
n_chain = 3
all_stats = dhmc_.sample(chain_num = n_chain, n_samples=n_sample,burn_in=burn_in,stepsize_range=stepsize_range,n_step_range=n_step_range, save_samples=True) | 2.25 | 2 |
speclus4py/assembler.py | ml4py/speclus4py | 0 | 12763185 | import numpy as np
from numba import jit
import pyflann
from petsc4py import PETSc
from mpi4py import MPI
from speclus4py.types import DataObject, DataType, GraphType, OperatorType, OperatorContainer
@jit(nopython=True)
def get_global_index(x, y, ydim):
return y + x * ydim
@jit(nopython=True)
def get_global_index_volumetric(x, y, z, xdim, ydim):
return x + xdim * (y + z * ydim)
@jit(nopython=True)
def compute_gradient(v1, v2, sigma: float):
abs = np.abs(v1 - v2)
return np.exp(-abs * abs / (2. * sigma * sigma))
@jit(nopython=True)
def compute_gradient_norm(v1, v2, sigma: float):
norm = np.linalg.norm(v1 - v2)
return np.exp(-norm * norm / (2. * sigma * sigma))
class OperatorAssembler(DataObject, OperatorContainer):
def __init__(self, comm=MPI.COMM_WORLD, verbose=False):
DataObject.__init__(self, comm, verbose)
OperatorContainer.__init__(self)
self.__graph_type = GraphType.DIRECTED
@property
def graph_type(self) -> GraphType:
return self.__graph_type
@graph_type.setter
def graph_type(self, t: GraphType):
self.__graph_type = t
def setSimilarityFunc(self, fn, params):
self.__similarity_measure_fn = fn
self.__similarity_measure_params = params
def reset(self):
OperatorContainer.reset(self)
def __construct_adjacency_matrix_general_data(self):
data = self.getData()[0]
# determine dimension of a problem
N = data.shape[0]
# building index (FLANN - Fast Library for Approximate Nearest Neighbors)
pyflann.set_distance_type('euclidean')
flann = pyflann.FLANN()
flann.build_index(data)
# create matrix object
self.mat_adj = PETSc.Mat()
self.mat_adj.create(self.comm)
self.mat_adj.setSizes([N, N])
self.mat_adj.setType(self.mat_type)
if self.graph_type == GraphType.DIRECTED:
self.__construct_adjacency_matrix_general_data_directed_graph(flann)
else:
self.__construct_adjacency_matrix_general_data_undirected_graph(flann)
# finalizing assembly of adjacency matrix
self.mat_adj.assemble()
del flann
def __construct_adjacency_matrix_general_data_directed_graph(self, flann):
self.mat_adj.setPreallocationNNZ(self.connectivity)
self.mat_adj.setFromOptions()
self.mat_adj.setUp()
# Get function for measuring similarity and its parameters
sim_func, sim_func_params = self.getSimilarityMeasure()
if sim_func is None:
sim_func = compute_gradient_norm
if sim_func_params == PETSc.DEFAULT:
sim_func_params = 0.5
data = self.getData()[0]
# building adjacency matrix of similarity graph
i_start, i_end = self.mat_adj.getOwnershipRange()
for I in range(i_start, i_end):
v1 = data[I]
# find nearest neighbours to sample v1
# sometimes self-adjoint vertex is included, thus finding n+1 nearest neighbours
result, dist = flann.nn_index(v1, self.connectivity + 1)
used_nn = 0
for J in range(0, self.connectivity + 1):
idx = result[0, J]
if idx != I and used_nn < self.connectivity:
v2 = data[result[0, J]]
g = sim_func(v1, v2, sim_func_params)
if g > 0.:
self.mat_adj[I, idx] = g
used_nn += 1
elif used_nn >= self.connectivity:
break
def __construct_adjacency_matrix_general_data_undirected_graph(self, flann):
self.mat_adj.setFromOptions()
self.mat_adj.setUp()
# Get function for measuring similarity and its parameters
sim_func, sim_func_params = self.getSimilarityMeasure()
if sim_func is None:
sim_func = compute_gradient_norm
if sim_func_params == PETSc.DEFAULT:
sim_func_params = 0.5
data = self.getData()[0]
# building adjacency matrix of similarity graph
i_start, i_end = self.mat_adj.getOwnershipRange()
for I in range(i_start, i_end):
v1 = data[I]
# find nearest neighbours to sample v1
# sometimes self-adjoint vertex is included, thus finding n+1 nearest neighbours
result, dist = flann.nn_index(v1, self.connectivity + 1)
for J in range(0, self.connectivity + 1):
idx = result[0, J]
if idx != I:
v2 = data[result[0, J]]
g = sim_func(v1, v2, sim_func_params)
if g > 0.:
self.mat_adj[I, idx] = g
self.mat_adj[idx, I] = g
def __construct_adjacency_matrix_vol_img(self):
if self.connectivity != 6 and self.connectivity != 18 and self.connectivity != 26:
raise Exception('Connectivity (con) must be set to 6, 18, or 26')
# Get function for measuring similarity and its parameters
sim_func, sim_func_params = self.getSimilarityMeasure()
if sim_func is None:
sim_func = compute_gradient
if sim_func_params == PETSc.DEFAULT:
sim_func_params = 0.5
data = self.getData()[0]
# determine dimension of a problem
dims = data.GetDimensions()
dim_x = dims[0] - 1
dim_y = dims[1] - 1
dim_z = dims[2] - 1
N = dim_x * dim_y * dim_z
# create matrix object
self.mat_adj = PETSc.Mat()
self.mat_adj.create(self.comm)
self.mat_adj.setSizes([N, N])
self.mat_adj.setType(self.mat_type)
self.mat_adj.setPreallocationNNZ(self.connectivity)
self.mat_adj.setFromOptions()
self.mat_adj.setUp()
# compute local derivatives on structured non-uniform grid that is determined using sigma and
# connectivity of derivatives (6, 18, or 26)
data_scalars = data.GetCellData().GetScalars()
i_start, i_end = self.mat_adj.getOwnershipRange()
for I in range(i_start, i_end):
# determine (x, y, z)-coordinates
z = I // (dim_x * dim_y)
i = I - z * dim_x * dim_y
y = i // dim_x
x = i - y * dim_x
p1 = get_global_index_volumetric(x, y, z, dim_x, dim_y)
v1 = data_scalars.GetTuple1(p1) / 255.
if z > 0:
if self.connectivity > 6 and y > 0:
if self.connectivity == 26 and x > 0:
p2 = get_global_index_volumetric(x - 1, y - 1, z - 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
p2 = get_global_index_volumetric(x, y - 1, z - 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if self.connectivity == 26 and x < dim_x - 1:
p2 = get_global_index_volumetric(x + 1, y - 1, z - 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if self.connectivity > 6 and x > 0:
p2 = get_global_index_volumetric(x - 1, y, z - 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
p2 = get_global_index_volumetric(x, y, z - 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if self.connectivity > 6 and x < dim_x - 1:
p2 = get_global_index_volumetric(x + 1, y, z - 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if self.connectivity > 6 and y < dim_y - 1:
if self.connectivity == 26 and x > 0:
p2 = get_global_index_volumetric(x - 1, y + 1, z - 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
p2 = get_global_index_volumetric(x, y + 1, z - 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if self.connectivity == 26 and x < dim_x - 1:
p2 = get_global_index_volumetric(x + 1, y + 1, z - 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if y > 0:
if self.connectivity > 6 and x > 0:
p2 = get_global_index_volumetric(x - 1, y - 1, z, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
p2 = get_global_index_volumetric(x, y - 1, z, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if self.connectivity > 6 and x < dim_x - 1:
p2 = get_global_index_volumetric(x + 1, y - 1, z, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if x > 0:
p2 = get_global_index_volumetric(x - 1, y, z, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if x < dim_x - 1:
p2 = get_global_index_volumetric(x + 1, y, z, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if y < dim_y - 1:
if self.connectivity > 6 and x > 0:
p2 = get_global_index_volumetric(x - 1, y + 1, z, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
p2 = get_global_index_volumetric(x, y + 1, z, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if self.connectivity > 6 and x < dim_x - 1:
p2 = get_global_index_volumetric(x + 1, y + 1, z, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if z < dim_z - 1:
if self.connectivity > 6 and y > 0:
if self.connectivity == 26 and x > 0:
p2 = get_global_index_volumetric(x - 1, y - 1, z + 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
p2 = get_global_index_volumetric(x, y - 1, z + 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if self.connectivity == 26 and x < dim_x - 1:
p2 = get_global_index_volumetric(x + 1, y - 1, z + 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if self.connectivity > 6 and x > 0:
p2 = get_global_index_volumetric(x - 1, y, z + 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
p2 = get_global_index_volumetric(x, y, z + 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if self.connectivity > 6 and x < dim_x - 1:
p2 = get_global_index_volumetric(x + 1, y, z + 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if self.connectivity > 6 and y < dim_y - 1:
if self.connectivity == 26 and x > 0:
p2 = get_global_index_volumetric(x - 1, y + 1, z + 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
p2 = get_global_index_volumetric(x, y + 1, z + 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
if self.connectivity == 26 and x < dim_x - 1:
p2 = get_global_index_volumetric(x + 1, y + 1, z + 1, dim_x, dim_y)
v2 = data_scalars.GetTuple1(p2) / 255.
g = sim_func(v1, v2, sim_func_params)
self.mat_adj[p1, p2] = g
# finalizing assembly of adjacency matrix
self.mat_adj.assemble()
def __construct_adjacency_matrix_img(self):
if self.connectivity != 4 and self.connectivity != 8:
PETSc.Sys.Print('Connectivity (con) must be set to 4 or 8')
raise PETSc.Error(62)
rows = self.data.shape[0]
cols = self.data.shape[1]
N = rows * cols
# Get function for measuring similarity and its parameters
sim_func, sim_func_params = self.getSimilarityMeasure()
if sim_func is None:
if len(self.data.shape) == 3:
sim_func = compute_gradient_norm
else:
sim_func = compute_gradient
if sim_func_params == PETSc.DEFAULT:
sim_func_params = 0.5
data = self.getData()[0]
# create matrix object
self.mat_adj = PETSc.Mat()
self.mat_adj.create(self.comm)
self.mat_adj.setSizes([N, N])
self.mat_adj.setType(self.mat_type)
self.mat_adj.setPreallocationNNZ(self.connectivity)
self.mat_adj.setFromOptions()
self.mat_adj.setUp()
i_start, i_end = self.mat_adj.getOwnershipRange()
for I in range(i_start, i_end):
# determine (x, y) coordinates
x = I // cols
y = I - x * cols
p1 = I
v1 = self.data[x, y] / 255.
if x > 0:
if y > 0 and self.connectivity == 8:
p2 = get_global_index(x - 1, y - 1, cols)
v2 = data[x - 1, y - 1] / 255.
self.mat_adj[p1, p2] = sim_func(v1, v2, sim_func_params)
p2 = get_global_index(x - 1, y, cols)
v2 = data[x - 1, y] / 255.
self.mat_adj[p1, p2] = sim_func(v1, v2, sim_func_params)
if y < cols - 1 and self.connectivity == 8:
p2 = get_global_index(x - 1, y + 1, cols)
v2 = data[x - 1, y + 1] / 255.
self.mat_adj[p1, p2] = sim_func(v1, v2, sim_func_params)
if y > 0:
p2 = get_global_index(x, y - 1, cols)
v2 = data[x, y - 1] / 255.
self.mat_adj[p1, p2] = sim_func(v1, v2, sim_func_params)
if y < cols - 1:
p2 = get_global_index(x, y + 1, cols)
v2 = data[x, y + 1] / 255.
self.mat_adj[p1, p2] = sim_func(v1, v2, sim_func_params)
if x < rows - 1:
if y > 0 and self.connectivity == 8:
p2 = get_global_index(x + 1, y - 1, cols)
v2 = data[x + 1, y - 1] / 255.
self.mat_adj[p1, p2] = sim_func(v1, v2, sim_func_params)
p2 = get_global_index(x + 1, y, cols)
v2 = data[x + 1, y] / 255.
self.mat_adj[p1, p2] = sim_func(v1, v2, sim_func_params)
if y < cols - 1 and self.connectivity == 8:
p2 = get_global_index(x + 1, y + 1, cols)
v2 = data[x + 1, y + 1] / 255.
self.mat_adj[p1, p2] = sim_func(v1, v2, sim_func_params)
# finalizing assembly of adjacency matrix
self.mat_adj.assemble()
def assembly(self):
self.reset()
data_type = self.getData()[1]
if self.fn_similarity_params is not None and self.verbose:
if type(self.fn_similarity_params) == float:
str_params = ', param=%.2f' % self.fn_similarity_params
else:
str_params = ', params=['
str_params += ''.join('{}, '.format(k) for k in self.fn_similarity_params)
str_params = str_params[:-2] + ']'
else:
str_params = ''
if data_type == DataType.IMG:
if self.connectivity == PETSc.DEFAULT:
self.connectivity = 4
if self.verbose:
s = 'Construct operator (%s, GRAPH_%s) for image: connectivity=%d'
v = (self.operator_type.name, GraphType.UNDIRECTED.name, self.connectivity)
PETSc.Sys.Print(s % v + str_params)
self.__construct_adjacency_matrix_img()
elif data_type == DataType.VOL_IMG:
if self.connectivity == PETSc.DEFAULT:
self.connectivity = 6
if self.verbose:
s = 'Construct operator (%s, GRAPH_%s) for volumetric image: connectivity=%d'
v = (self.operator_type.name, self.graph_type.name, self.connectivity)
PETSc.Sys.Print(s % v + str_params)
self.__construct_adjacency_matrix_vol_img()
else:
if self.connectivity == PETSc.DEFAULT:
self.connectivity = 3
if self.verbose:
s = 'Construct operator (%s, GRAPH_%s) for general data: connectivity=%d'
v = (self.operator_type.name, self.graph_type.name, self.connectivity)
PETSc.Sys.Print(s % v + str_params)
self.__construct_adjacency_matrix_general_data()
# if data_type == DataType.IMG:
# if self.connectivity == PETSc.DEFAULT:
# self.connectivity = 4
#
# if self.verbose:
# PETSc.Sys.Print(
# 'Construct operator (%s) for image: connectivity=%d, sigma=%2g'
# % (self.operator_type.name, self.connectivity, self.sigma)
# )
#
# self.__construct_adjacency_matrix_img()
# elif data_type == DataType.VOL_IMG: # volumetric image
# if self.connectivity == PETSc.DEFAULT:
# self.connectivity = 6
#
# if self.verbose:
# if self.fn_similarity_params is not None:
# s = 'Construct operator (%s, GRAPH_ %s) for volumetric image: connectivity=%d, '
# v = (self.operator_type.name, self.graph_type.name, self.connectivity)
# sv = s % v
# if type(self.fn_similarity_params) == float:
# sp = 'param=%.2f' % self.fn_similarity_params
# else:
# sp = 'params=('
# sp += ''.join('{}, '.format(k) for k in self.fn_similarity_params)
# sp = sp[:-2] + ')'
# sv += sp
# else:
# s = 'Construct operator (%s, GRAPH_%s) for volumetric image: connectivity=%d params=None'
# v = (self.operator_type.name, self.graph_type.name, self.connectivity)
# sv = s % v
# PETSc.Sys.Print(sv)
#
# exit(-1)
#
# self.__construct_adjacency_matrix_vol_img()
# else:
# if self.connectivity == PETSc.DEFAULT:
# self.connectivity = 6
#
# if self.verbose:
# PETSc.Sys.Print(
# 'Construct operator (%s) for general data: connectivity=%d, params=%2g'
# % (self.operator_type.name, self.connectivity, self.__similarity_measure_params)
# )
#
# self.__construct_adjacency_matrix_general_data()
N = self.mat_adj.getSize()[0]
# compute degree matrix D_i = deg(v_i)
self.vec_diag = self.mat_adj.createVecLeft()
self.mat_adj.getRowSum(self.vec_diag)
if self.operator_type != OperatorType.MARKOV_1 or self.operator_type != OperatorType.MARKOV_2:
self.mat_op = PETSc.Mat().createAIJ((N, N), comm=self.comm)
self.mat_op.setPreallocationNNZ(self.connectivity + 1)
self.mat_op.setFromOptions()
self.mat_op.setUp()
self.mat_op.setDiagonal(self.vec_diag)
self.mat_op.assemble()
# L = D - A
self.mat_op.axpy(-1., self.mat_adj)
else: # P = D^-1 A (MARKOV_1) or Ng, Weiss (MARKOV_2)
self.mat_op = self.mat_adj.duplicate()
self.mat_op.setFromOptions()
self.mat_op.setType(self.mat_type)
self.mat_op.setUp()
self.mat_op.copy(self.mat_op)
if self.operator_type != OperatorType.LAPLACIAN_UNNORMALIZED:
tmp_vec = self.vec_diag.duplicate()
self.vec_diag.copy(tmp_vec)
if self.operator_type == OperatorType.LAPLACIAN_NORMALIZED or self.operator_type == OperatorType.MARKOV_2:
tmp_vec.sqrtabs()
tmp_vec.reciprocal()
self.mat_op.diagonalScale(tmp_vec, tmp_vec)
elif self.operator_type == OperatorType.MARKOV_1:
tmp_vec.reciprocal()
self.mat_op.diagonalScale(tmp_vec)
else: # L_rw
tmp_vec.reciprocal()
self.mat_op.diagonalScale(tmp_vec) # left diagonal scale
del tmp_vec
self.mat_op.assemble()
| 1.953125 | 2 |
wcics/auth/_cookies.py | CS-Center/CS-Center | 0 | 12763186 | <reponame>CS-Center/CS-Center
# -*- coding: utf-8 -*-
# File defines a single function, a set cookie that takes a configuration
# Use only if the app cannot be imported, otherwise use .cookies
def set_cookie(config, response, key, val):
response.set_cookie(key, val,
secure = config['COOKIES_SECURE'],
httponly = config['COOKIES_HTTPONLY'],
samesite = config['COOKIES_SAMESITE']) | 1.820313 | 2 |
samples/python/winforms.py | MicroFocus/SilkAppDriver | 8 | 12763187 | <gh_stars>1-10
import unittest
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
class InsuranceTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.driver = driver = webdriver.Remote(
command_executor='http://1192.168.3.11:8080',
desired_capabilities={
'appdriver-basestate': { 'executable' : 'C:\\tools\\sampleapplications\\dotnet\\4.0\\WindowsForms Sample Application\\SilkTest.WinForms.TestApp.exe', 'locator' : '//Window' },
'appdriver-options': { 'closeOnQuit' : True }
}
)
cls.wait = WebDriverWait(cls.driver, 10)
def setUp(self):
self.driver.execute_script("appdriver:startTrueLog", "c:\\temp\\winforms.tlz")
def test1(self):
window = self.driver.find_element_by_xpath("//Window")
accessibleRole = window.get_property("AccessibleRole")
self.assertEqual(accessibleRole, "Default")
text = window.get_property("Text")
self.assertEqual(text, "Test Application")
self.driver.find_element_by_xpath("//Menu[@caption='Control']").click()
self.driver.find_element_by_xpath("//MenuItem[@caption='Check box']").click()
checkBox = self.driver.find_element_by_xpath("//CheckBox[@automationId='chk_Check']")
self.assertEqual(checkBox.get_property("State"), 2)
checkBox.click()
self.assertEqual(checkBox.get_property("State"), 1)
self.driver.find_element_by_xpath("//PushButton[@automationId='btn_Exit']").click()
@classmethod
def tearDownClass(cls):
cls.driver.execute_script("appdriver:stopTrueLog")
cls.driver.quit()
if __name__ == "__main__":
unittest.main()
| 2.609375 | 3 |
sql_queries.py | AlkaSaliss/aws-datawarehouse | 0 | 12763188 | <filename>sql_queries.py
import configparser
# CONFIG
config = configparser.ConfigParser()
config.read('dwh.cfg')
# DROP TABLES
staging_events_table_drop = "DROP TABLE IF EXISTS staging_events;"
staging_songs_table_drop = "DROP TABLE IF EXISTS staging_songs;"
songplay_table_drop = "DROP TABLE IF EXISTS songplays;"
user_table_drop = "DROP TABLE IF EXISTS users;"
song_table_drop = "DROP TABLE IF EXISTS songs;"
artist_table_drop = "DROP TABLE IF EXISTS artists;"
time_table_drop = "DROP TABLE IF EXISTS time;"
# CREATE TABLES
staging_events_table_create = ("""
CREATE TABLE IF NOT EXISTS staging_events (
artist VARCHAR,
auth VARCHAR,
firstName VARCHAR,
gender VARCHAR,
itemInSession INTEGER,
lastName VARCHAR,
length FLOAT,
level VARCHAR,
location VARCHAR,
method VARCHAR,
page VARCHAR,
registration FLOAT,
sessionId INTEGER,
song VARCHAR,
status INTEGER,
ts TIMESTAMP,
userAgent VARCHAR,
userId INTEGER
);
""")
staging_songs_table_create = ("""
CREATE TABLE IF NOT EXISTS staging_songs (
num_songs INTEGER,
artist_id VARCHAR,
artist_latitude FLOAT,
artist_longitude FLOAT,
artist_location VARCHAR,
artist_name VARCHAR,
song_id VARCHAR,
title VARCHAR,
duration FLOAT,
year SMALLINT
);
""")
songplay_table_create = ("""
CREATE TABLE IF NOT EXISTS songplays (
songplay_id BIGINT IDENTITY(0, 1) PRIMARY KEY,
start_time TIMESTAMP REFERENCES time NOT NULL SORTKEY DISTKEY,
user_id VARCHAR REFERENCES users NOT NULL,
level VARCHAR,
song_id VARCHAR REFERENCES songs NOT NULL,
artist_id VARCHAR REFERENCES artists NOT NULL,
session_id INTEGER,
location VARCHAR,
user_agent VARCHAR
);
""")
user_table_create = ("""
CREATE TABLE IF NOT EXISTS users (
user_id VARCHAR PRIMARY KEY SORTKEY,
first_name VARCHAR,
last_name VARCHAR,
gender VARCHAR,
level VARCHAR
);
""")
song_table_create = ("""
CREATE TABLE IF NOT EXISTS songs (
song_id VARCHAR PRIMARY KEY SORTKEY,
title VARCHAR,
artist_id VARCHAR REFERENCES artists NOT NULL,
year SMALLINT,
duration FLOAT
);
""")
artist_table_create = ("""
CREATE TABLE IF NOT EXISTS artists (
artist_id VARCHAR PRIMARY KEY SORTKEY,
name VARCHAR,
location VARCHAR,
latitude FLOAT,
longitude FLOAT
);
""")
time_table_create = ("""
CREATE TABLE IF NOT EXISTS time (
start_time TIMESTAMP PRIMARY KEY SORTKEY DISTKEY,
hour SMALLINT,
day SMALLINT,
week SMALLINT,
month SMALLINT,
year SMALLINT,
weekday SMALLINT
);
""")
# STAGING TABLES
staging_events_copy = ("""
COPY staging_events FROM {}
CREDENTIALS 'aws_iam_role={}'
REGION {}
JSON {}
TIMEFORMAT 'epochmillisecs';
""").format(config.get("S3", "LOG_DATA"), config.get("IAM_ROLE", "ARN"), config.get("CLUSTER", "REGION"), config.get("S3", "LOG_JSONPATH"))
staging_songs_copy = ("""
COPY staging_songs FROM {}
CREDENTIALS 'aws_iam_role={}'
REGION {}
JSON 'auto';
""").format(config.get("S3", "SONG_DATA"), config.get("IAM_ROLE", "ARN"), config.get("CLUSTER", "REGION"))
# FINAL TABLES
songplay_table_insert = ("""
INSERT INTO songplays (start_time, user_id, level, song_id, artist_id, session_id, location, user_agent)
SELECT ste.ts,
ste.userId,
ste.level,
sts.song_id,
sts.artist_id,
ste.sessionId,
ste.location,
ste.userAgent
FROM staging_events AS ste
JOIN staging_songs AS sts ON (ste.artist = sts.artist_name AND ste.song = sts.title)
WHERE ste.page = 'NextSong'
""")
user_table_insert = ("""
INSERT INTO users (user_id, first_name , last_name, gender, level)
SELECT DISTINCT userId,
firstName,
lastName,
gender,
level
FROM staging_events
WHERE userId IS NOT NULL
""")
song_table_insert = ("""
INSERT INTO songs (song_id, title, artist_id, year, duration)
SELECT DISTINCT song_id,
title,
artist_id,
year,
duration
FROM staging_songs
WHERE song_id IS NOT NULL
""")
artist_table_insert = ("""
INSERT INTO artists (artist_id, name, location, latitude, longitude)
SELECT DISTINCT artist_id,
artist_name,
artist_location,
artist_latitude,
artist_longitude
FROM staging_songs
WHERE artist_id IS NOT NULL
""")
time_table_insert = ("""
INSERT INTO time (start_time, hour, day, week, month, year, weekday)
SELECT DISTINCT ts,
EXTRACT(HOUR FROM ts),
EXTRACT(DAY FROM ts),
EXTRACT(WEEK FROM ts),
EXTRACT(MONTH FROM ts),
EXTRACT(YEAR FROM ts),
EXTRACT(DAYOFWEEK FROM ts)
FROM staging_events
WHERE ts IS NOT NULL
""")
# QUERY LISTS
create_table_queries = [staging_events_table_create, staging_songs_table_create,
user_table_create, artist_table_create, song_table_create, time_table_create, songplay_table_create]
drop_table_queries = [staging_events_table_drop, staging_songs_table_drop,
songplay_table_drop, user_table_drop, song_table_drop, artist_table_drop, time_table_drop]
copy_table_queries = [staging_events_copy, staging_songs_copy]
insert_table_queries = [user_table_insert, time_table_insert,
artist_table_insert, song_table_insert, songplay_table_insert]
| 2.53125 | 3 |
2020/day03.py | sree-cfa/adventOfCode | 0 | 12763189 | from util.inputReader import read_as_strings
def part1(slope_grid):
trees = 0
slope = 1
for i, line in enumerate(slope_grid):
if i % 2 == 0 and line[(int(i / 2) * slope) % len(line)] == '#':
trees += 1
return trees
grid = read_as_strings("../inputs/2020_03.txt")
print("part1:", part1(grid))
print("part2:", 278 * 90 * 88 * 98 * 45)
| 3.375 | 3 |
distracting_control/suite_test.py | TmacAaron/mydrq | 0 | 12763190 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for suite code."""
from absl.testing import absltest
from absl.testing import parameterized
import mock
import suite
DAVIS_PATH = '/tmp/davis'
class SuiteTest(parameterized.TestCase):
@parameterized.named_parameters(('none', None),
('easy', 'easy'),
('medium', 'medium'),
('hard', 'hard'))
@mock.patch.object(suite, 'pixels')
@mock.patch.object(suite, 'suite')
def test_suite_load_with_difficulty(self, difficulty, mock_dm_suite,
mock_pixels):
domain_name = 'cartpole'
task_name = 'balance'
suite.load(
domain_name,
task_name,
difficulty,
background_dataset_path=DAVIS_PATH)
mock_dm_suite.load.assert_called_with(
domain_name,
task_name,
environment_kwargs=None,
task_kwargs=None,
visualize_reward=False)
mock_pixels.Wrapper.assert_called_with(
mock.ANY,
observation_key='pixels',
pixels_only=True,
render_kwargs={'camera_id': 0})
if __name__ == '__main__':
absltest.main()
| 1.976563 | 2 |
src/python_patterns/factory/rectangle.py | NostraDavid/python-patterns | 0 | 12763191 | <reponame>NostraDavid/python-patterns
from shape import IShape
class Rectangle(IShape):
def draw(self):
print("Inside Rectangle::draw() method.")
| 3.390625 | 3 |
python/petitBloc/ui/paramEditor.py | sol-ansano-kim/unitBlock | 24 | 12763192 | from Qt import QtWidgets
from Qt import QtCore
from Qt import QtGui
from . import const
from . import uiUtil
from .. import util
from .. import box
from .. import core
from functools import partial
import re
ReEqual = re.compile("^\s*[=]\s*")
class ParamCreator(QtWidgets.QDialog):
ParamTypes = {"bool": bool, "int": int, "float": float, "str": str}
def __init__(self, parent=None):
super(ParamCreator, self).__init__(parent=parent)
self.__type = None
self.__name = None
main_layout = QtWidgets.QVBoxLayout()
param_layout = QtWidgets.QHBoxLayout()
param_layout.addWidget(QtWidgets.QLabel("Type :"))
self.__type_combo = QtWidgets.QComboBox()
self.__type_combo.addItems(ParamCreator.ParamTypes.keys())
param_layout.addWidget(self.__type_combo)
param_layout.addWidget(QtWidgets.QLabel("Name :"))
self.__name_line = QtWidgets.QLineEdit()
param_layout.addWidget(self.__name_line)
main_layout.addLayout(param_layout)
button_layout = QtWidgets.QHBoxLayout()
self.__add = QtWidgets.QPushButton("Add")
self.__cancel = QtWidgets.QPushButton("Cancel")
button_layout.addWidget(self.__add)
button_layout.addWidget(self.__cancel)
main_layout.addLayout(button_layout)
self.__add.clicked.connect(self.accept)
self.__cancel.clicked.connect(self.reject)
self.__type_combo.currentIndexChanged.connect(self.__typeChanged)
self.__name_line.editingFinished.connect(self.__nameChanged)
self.__name_line.textEdited.connect(self.__nameCheck)
self.setLayout(main_layout)
def exec_(self):
self.__name = None
self.__type_combo.setCurrentIndex(0)
self.__typeChanged(0)
self.__name_line.setText("")
self.__nameCheck("")
return super(ParamCreator, self).exec_()
def __typeChanged(self, a):
self.__type = ParamCreator.ParamTypes[self.__type_combo.itemText(a)]
def __nameChanged(self):
self.__name = str(self.__name_line.text())
def __nameCheck(self, text):
self.__add.setEnabled((True if text else False))
def getType(self):
return self.__type
def getName(self):
return self.__name
class ParamEnum(QtWidgets.QComboBox):
Changed = QtCore.Signal()
def __init__(self, param, parent=None):
super(ParamEnum, self).__init__(parent=parent)
self.__param = param
self.addItems(param.getLabels())
self.setCurrentIndex(param.get())
self.currentIndexChanged.connect(self.__indexChanged)
def __indexChanged(self, index):
self.__param.set(index)
self.Changed.emit()
class ParamStatus():
Value = 0
Expression = 1
ExpressionError = 2
NormalColor = "#1D1D1D"
ExpressionColor = "#0A4646"
ExpressionErrorColor = "#640A28"
class CheckBox(QtWidgets.QCheckBox):
def __init__(self, checkable, parent=None):
super(CheckBox, self).__init__(parent=parent)
self.__checkable = checkable
def nextCheckState(self):
if self.__checkable:
self.setChecked(not self.isChecked())
else:
self.setChecked(self.isChecked())
def setCheckable(self, v):
self.__checkable = v
class ParamCheck(QtWidgets.QWidget):
Changed = QtCore.Signal()
def __init__(self, param, parent=None):
super(ParamCheck, self).__init__(parent=parent)
self.__normal_style = "QCheckBox::indicator{ border: 1px solid #1D1D1D; }"
self.__exp_style = "QCheckBox::indicator{ margin 2px; border: 3px solid %s; }"
self.__current_state = ParamStatus.Value
self.__param = param
layout = QtWidgets.QHBoxLayout()
self.setLayout(layout)
self.__check_box = CheckBox(True, self)
self.__check_box.toggled.connect(self.__toggled)
self.__exp_line = QtWidgets.QLineEdit(self)
self.__exp_line.hide()
layout.addWidget(self.__check_box)
layout.addWidget(self.__exp_line)
self.setContextMenuPolicy(QtCore.Qt.DefaultContextMenu)
self.__check_box.blockSignals(True)
self.__check_box.setChecked(self.__param.get())
self.__check_box.blockSignals(False)
self.__exp_line.editingFinished.connect(self.__expFinished)
self.refresh()
def contextMenuEvent(self, evnt):
menu = QtWidgets.QMenu(self)
set_action = menu.addAction("Set Expression")
delete_action = menu.addAction("Delete Expression")
menu.popup(self.mapToGlobal(evnt.pos()))
set_action.triggered.connect(self.__startSetExpression)
delete_action.triggered.connect(self.__deleteExpression)
def __toggled(self, v):
self.__param.set(v)
self.Changed.emit()
def __startSetExpression(self):
self.__check_box.hide()
self.__exp_line.show()
if self.__param.hasExpression():
self.__exp_line.setText(self.__param.getExpression())
else:
self.__exp_line.setText("= ")
self.__exp_line.setFocus(QtCore.Qt.OtherFocusReason)
def __deleteExpression(self):
self.__check_box.show()
self.__exp_line.hide()
self.__param.setExpression(None)
self.refresh()
def refresh(self):
if not self.__param.hasExpression():
self.__current_state = ParamStatus.Value
self.__check_box.setCheckable(True)
elif self.__param.validExpression():
self.__check_box.setCheckable(False)
self.__current_state = ParamStatus.Expression
else:
self.__check_box.setCheckable(False)
self.__current_state = ParamStatus.ExpressionError
self.__setBackgroundColor()
def __setBackgroundColor(self):
if self.__current_state == ParamStatus.Value:
s = self.__normal_style
elif self.__current_state == ParamStatus.Expression:
s = self.__exp_style % ParamStatus.ExpressionColor
elif self.__current_state == ParamStatus.ExpressionError:
s = self.__exp_style % ParamStatus.ExpressionErrorColor
self.setStyleSheet(s)
def __expFinished(self):
txt = self.__exp_line.text()
if txt:
self.__param.setExpression(str(txt))
else:
self.__param.setExpression(None)
self.Changed.emit()
self.__check_box.setChecked(self.__param.get())
self.__check_box.show()
self.__exp_line.hide()
self.refresh()
class ParamLine(QtWidgets.QLineEdit):
Changed = QtCore.Signal()
def __init__(self, param, parent=None, isInt=False, isFloat=False):
super(ParamLine, self).__init__(parent=parent)
self.__style = "QLineEdit{ background-color: %s; border: 1px solid #1D1D1D; }"
self.__current_state = ParamStatus.Value
self.__param = param
if isInt:
self.textEdited.connect(self.__intOnly)
self.setAlignment(QtCore.Qt.AlignRight)
self.editingFinished.connect(self.__intFinished)
elif isFloat:
self.textEdited.connect(self.__floatOnly)
self.setAlignment(QtCore.Qt.AlignRight)
self.editingFinished.connect(self.__floatFinished)
else:
self.editingFinished.connect(self.__strFinished)
self.setContextMenuPolicy(QtCore.Qt.DefaultContextMenu)
self.blockSignals(True)
self.setText(str(self.__param.get()))
self.blockSignals(False)
self.refresh()
def contextMenuEvent(self, evnt):
menu = QtWidgets.QMenu(self)
set_action = menu.addAction("Set Expression")
delete_action = menu.addAction("Delete Expression")
menu.popup(self.mapToGlobal(evnt.pos()))
set_action.triggered.connect(self.__startSetExpression)
delete_action.triggered.connect(self.__deleteExpression)
def __startSetExpression(self):
if self.__param.hasExpression():
self.setText(self.__param.getExpression())
else:
self.setText("= ")
self.setEditFocus(True)
def __deleteExpression(self):
self.__param.setExpression(None)
self.refresh()
def refresh(self):
if not self.__param.hasExpression():
self.__current_state = ParamStatus.Value
elif self.__param.validExpression():
self.__current_state = ParamStatus.Expression
else:
self.__current_state = ParamStatus.ExpressionError
self.blockSignals(True)
if self.hasFocus() and self.__param.hasExpression():
self.setText(str(self.__param.getExpression()))
else:
self.setText(str(self.__param.get()))
self.blockSignals(False)
self.__setBackgroundColor()
def focusInEvent(self, evnt):
super(ParamLine, self).focusInEvent(evnt)
self.refresh()
def focusOutEvent(self, evnt):
super(ParamLine, self).focusOutEvent(evnt)
self.refresh()
def __setBackgroundColor(self):
if self.__current_state == ParamStatus.Value:
s = self.__style % ParamStatus.NormalColor
elif self.__current_state == ParamStatus.Expression:
s = self.__style % ParamStatus.ExpressionColor
elif self.__current_state == ParamStatus.ExpressionError:
s = self.__style % ParamStatus.ExpressionErrorColor
self.setStyleSheet(s)
def __intOnly(self, txt):
if not ReEqual.search(txt):
self.setText(Parameter.RegexInt.sub("", txt))
def __floatOnly(self, txt):
if not ReEqual.search(txt):
self.setText(Parameter.RegexFloat.sub("", txt))
def __intFinished(self):
txt = str(self.text())
if ReEqual.search(txt):
if self.__param.setExpression(txt):
self.Changed.emit()
return
try:
int(txt)
except:
self.setText(str(self.__param.get()))
else:
self.__param.setExpression(None)
if not self.__param.set(int(txt)):
self.setText(str(self.__param.get()))
self.Changed.emit()
def __floatFinished(self):
txt = str(self.text())
if ReEqual.search(txt):
if self.__param.setExpression(txt):
self.Changed.emit()
return
try:
float(txt)
except:
self.setText(str(self.__param.get()))
else:
self.__param.setExpression(None)
if not self.__param.set(float(txt)):
self.setText(str(self.__param.get()))
self.Changed.emit()
def __strFinished(self):
txt = str(self.text())
if ReEqual.search(txt):
self.__param.setExpression(txt)
else:
self.__param.setExpression(None)
self.__param.set(txt)
self.Changed.emit()
class ColorPicker(QtCore.QObject):
Changed = QtCore.Signal()
def __init__(self, r, g, b):
super(ColorPicker, self).__init__()
self.__r = r
self.__g = g
self.__b = b
self.__label = None
self.__button = None
self.__style = "QPushButton{ background-color: rgb(%s, %s, %s); border: 1px solid #1D1D1D; }"
self.initialize()
self.refresh()
def widgets(self):
return [self.__label, self.__button]
def initialize(self):
self.__label = QtWidgets.QLabel("Color")
self.__label.setMinimumWidth(const.ParamLabelMinimumWidth)
self.__label.setMaximumWidth(const.ParamLabelMaximumWidth)
self.__button = QtWidgets.QPushButton()
self.__button.setFixedSize(18, 18)
self.__button.clicked.connect(self.__pickColor)
def refresh(self):
self.__button.setStyleSheet(self.__style % (self.__r.get(), self.__g.get(), self.__b.get()))
self.Changed.emit()
def __pickColor(self):
color = QtWidgets.QColorDialog.getColor(QtGui.QColor(self.__r.get(), self.__g.get(), self.__b.get()), self.__button)
if not color.isValid():
return
self.__r.set(color.red())
self.__g.set(color.green())
self.__b.set(color.blue())
self.refresh()
class Parameter(QtCore.QObject):
RegexInt = re.compile("[^0-9-]")
RegexFloat = re.compile("[^0-9-.]")
ParameterEdited = QtCore.Signal()
DeleteRequest = QtCore.Signal(object)
def __init__(self, param, deletable=False):
super(Parameter, self).__init__()
self.__label = None
self.__param = param
self.__val_edit = None
self.__delete_button = None
self.__need_to_refresh = True
self.__deletable = deletable
self.__initialize()
def widgets(self):
widgets = [self.__label, self.__val_edit]
if self.__delete_button:
widgets.append(self.__delete_button)
return widgets
def refresh(self):
if self.__need_to_refresh:
self.__val_edit.refresh()
def __initialize(self):
self.__label = QtWidgets.QLabel(self.__param.name())
tc = self.__param.typeClass()
if tc == bool:
self.__val_edit = ParamCheck(self.__param)
self.__val_edit.Changed.connect(self.__editedEmit)
elif tc == int:
self.__val_edit = ParamLine(self.__param, isInt=True)
self.__val_edit.Changed.connect(self.__editedEmit)
elif tc == float:
self.__val_edit = ParamLine(self.__param, isFloat=True)
self.__val_edit.Changed.connect(self.__editedEmit)
elif tc == str:
self.__val_edit = ParamLine(self.__param)
self.__val_edit.Changed.connect(self.__editedEmit)
elif tc == core.PBEnum:
self.__val_edit = ParamEnum(self.__param)
self.__val_edit.Changed.connect(self.__editedEmit)
self.__need_to_refresh = False
if self.__deletable:
self.__delete_button = QtWidgets.QPushButton()
self.__delete_button.setObjectName("RemoveButton")
self.__delete_button.setFixedSize(14, 14)
self.__delete_button.setFocusPolicy(QtCore.Qt.NoFocus)
self.__delete_button.clicked.connect(self.__deleteParam)
def __deleteParam(self):
self.DeleteRequest.emit(self.__param)
def __editedEmit(self):
self.ParameterEdited.emit()
class ParamEditor(QtWidgets.QWidget):
BlockRenamed = QtCore.Signal(object, str)
DeleteRequest = QtCore.Signal(object, object)
NodeRefreshRequest = QtCore.Signal(object)
def __init__(self, parent=None):
super(ParamEditor, self).__init__()
self.__bloc = None
self.__param_layout = None
self.__block_type_label = None
self.__name_label = None
self.__block_name = None
self.__add_param_button = None
self.__param_creator = None
self.__params = []
self.__initialize()
self.__refresh()
def setBlock(self, bloc):
if self.__bloc == bloc:
return
self.__bloc = bloc
self.__refresh()
def forceRefresh(self):
self.__refresh()
def __initialize(self):
# scroll area
main_layout = QtWidgets.QVBoxLayout()
self.setLayout(main_layout)
contents_widget = QtWidgets.QWidget()
contents_layout = QtWidgets.QVBoxLayout()
contents_layout.setAlignment(QtCore.Qt.AlignTop | QtCore.Qt.AlignLeft)
contents_widget.setLayout(contents_layout)
scroll_area = QtWidgets.QScrollArea()
scroll_area.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
scroll_area.setWidgetResizable(True)
scroll_area.setWidget(contents_widget)
main_layout.addWidget(scroll_area)
# parameters
self.__param_layout = QtWidgets.QGridLayout()
self.__param_layout.setSpacing(10)
self.__param_layout.setVerticalSpacing(5)
self.__block_type_label = QtWidgets.QLabel()
self.__block_type_label.setAlignment(QtCore.Qt.AlignCenter)
name_layout = QtWidgets.QHBoxLayout()
self.__block_name = QtWidgets.QLineEdit()
self.__name_label = QtWidgets.QLabel("Name")
self.__name_label.setMinimumWidth(const.ParamLabelMinimumWidth + 4)
self.__name_label.setMaximumWidth(const.ParamLabelMaximumWidth)
name_layout.addWidget(self.__name_label)
name_layout.addWidget(self.__block_name)
name_layout.setAlignment(QtCore.Qt.AlignLeft)
name_layout.addStretch(10)
self.__block_name.setMaximumWidth(const.ParamEditorBlockNameMaximumWidth)
add_layout = QtWidgets.QHBoxLayout()
self.__add_param_button = QtWidgets.QPushButton()
self.__add_param_button.setObjectName("AddButton")
self.__add_param_button.setFixedSize(18, 18)
self.__add_param_button.setFocusPolicy(QtCore.Qt.NoFocus)
add_layout.setAlignment(QtCore.Qt.AlignCenter)
add_layout.addWidget(self.__add_param_button)
contents_layout.addWidget(self.__block_type_label)
contents_layout.addLayout(name_layout)
contents_layout.addLayout(self.__param_layout)
contents_layout.addLayout(add_layout)
self.__add_param_button.hide()
self.__param_creator = ParamCreator(self)
self.__add_param_button.clicked.connect(self.__addParam)
self.__block_name.editingFinished.connect(self.__renameBloc)
self.__block_name.textEdited.connect(self.__nameCheck)
def __nameCheck(self, txt):
self.__block_name.setText(util.ReForbiddenName.sub("", txt))
def __renameBloc(self):
if not self.__bloc:
self.__block_name.setText(self.__block.name())
parent_box = self.__bloc.parent()
if not parent_box:
self.__block_name.setText(self.__block.name())
old_name = self.__bloc.name()
new_name = self.__block_name.text()
if old_name == new_name:
return
if not util.ValidateName(new_name):
self.__block_name.setText(old_name)
return
uniq_name = parent_box.getUniqueName(self.__bloc, new_name)
self.__bloc.rename(uniq_name)
self.__block_name.setText(uniq_name)
if old_name == uniq_name:
return
self.BlockRenamed.emit(self.__bloc, uniq_name)
def __refresh(self):
self.__params = []
self.__clearLayout(self.__param_layout)
if self.__bloc is None:
self.__block_type_label.setText("")
self.__block_name.setText("")
self.__block_type_label.hide()
self.__name_label.hide()
self.__block_name.hide()
self.__add_param_button.hide()
else:
self.__block_type_label.show()
self.__name_label.show()
self.__block_name.show()
self.__block_name.setText(self.__bloc.name())
self.__block_type_label.setText("<{}>".format(self.__bloc.__class__.__name__))
if self.__bloc and self.__bloc.expandable():
self.__add_param_button.show()
else:
self.__add_param_button.hide()
if isinstance(self.__bloc, box.SceneContext):
self.__block_name.setEnabled(False)
else:
self.__block_name.setEnabled(True)
self.__build_params()
def __build_params(self):
r = 0
if self.__bloc is None:
return
if self.__bloc.isBlank():
pm = ColorPicker(self.__bloc.param("r"), self.__bloc.param("g"), self.__bloc.param("b"))
pm.Changed.connect(partial(self.NodeRefreshRequest.emit, self.__bloc))
self.__params.append(pm)
for c, pw in enumerate(pm.widgets()):
self.__param_layout.addWidget(pw, r, c)
r += 1
return
to_disable = set()
for ip in self.__bloc.inputs():
if ip.hasLinkedParam() and ip.isConnected():
to_disable.add(ip.linkedParam().name())
for p in self.__bloc.params(includeExtraParam=False):
pm = Parameter(p)
pm.ParameterEdited.connect(self.__update_all_params)
pm.DeleteRequest.connect(self.__deleteParam)
self.__params.append(pm)
enable = p.name() not in to_disable
for c, pw in enumerate(pm.widgets()):
pw.setEnabled(enable)
self.__param_layout.addWidget(pw, r, c)
r += 1
for p in self.__bloc.extraParams():
pm = Parameter(p, deletable=True)
pm.ParameterEdited.connect(self.__update_all_params)
pm.DeleteRequest.connect(self.__deleteParam)
self.__params.append(pm)
enable = p.name() not in to_disable
for c, pw in enumerate(pm.widgets()):
pw.setEnabled(enable)
self.__param_layout.addWidget(pw, r, c)
r += 1
def __addParam(self):
if self.__param_creator.exec_() == QtWidgets.QDialog.Accepted:
type_class = self.__param_creator.getType()
name = self.__param_creator.getName()
if type_class and name:
self.__bloc.addExtraParam(type_class, name=name)
self.__refresh()
def __deleteParam(self, param):
self.__bloc.removeParam(param)
self.__refresh()
def __update_all_params(self):
for p in self.__params:
p.refresh()
def __clearLayout(self, layout):
while (True):
item = layout.takeAt(0)
if item:
l = item.layout()
w = item.widget()
if l:
self.__clearLayout(l)
if w:
layout.removeWidget(w)
w.setParent(None)
else:
break
| 2.21875 | 2 |
juice_scm_gse/app.py | NadjB/Juice_SCM_GSE | 0 | 12763193 | <filename>juice_scm_gse/app.py
#!/usr/bin/env python3
import sys, os
from functools import partial
import subprocess
#import atexit
import zmq, json
from datetime import datetime
from PySide2.QtGui import QValidator, QRegExpValidator
from PySide2.QtWidgets import QMainWindow, QApplication, QWidget, QMessageBox
from PySide2.QtCore import Signal, QThread, Slot, QObject, QMetaObject, QGenericArgument, Qt
#from juice_scm_gse.arduino_monitor import alimManagement
#from juice_scm_gse.discovery_driver import do_measurements, turn_on_psu, turn_off_psu
import numpy as np
import juice_scm_gse.config as cfg
from juice_scm_gse.gui.settings_pannel import SettingsPannel
from juice_scm_gse.gui.progress_pannel import ProgressPannel
from juice_scm_gse.gui.mainwindow import Ui_MainWindow
from juice_scm_gse import config
from juice_scm_gse.utils import list_of_floats, ureg, Q_, mkdir
from juice_scm_gse.utils.mail import send_mail
import psutil
import logging as log
desktop_entry = """[Desktop Entry]
Version=1.0
Name=JUICE-SCM-EGSE
Comment=JUICE SCM EGSE
Exec={exec}
Icon={icon}
Path={path}
Terminal=false
Type=Application
StartupNotify=true
Categories=Utility;Application;"""
class VoltagesWorker(QThread):
updateVoltages = Signal(dict)
restartDisco = Signal()
signalUpdatePower = Signal(bool)
def __init__(self, port=9990, portPair=9991):
QThread.__init__(self) #Creat a thread
self.context = zmq.Context() #Initialize ZMQ
self.sock = self.context.socket(zmq.SUB) #Configure it as Subscriber (mode Publish/Subscribe)
self.sock.connect(f"tcp://localhost:{port}")
self.sock.setsockopt(zmq.SUBSCRIBE, b"Voltages") #Subscribe to the topic: Voltages
self.sockPair = self.context.socket(zmq.PAIR)
self.sockPair.connect(f"tcp://localhost:{portPair}")
self.alimsEnabled = False
def asics(self, asic="XXX"):
asicID = f"ASIC_JUICEMagic3_SN_{asic}"
self.sockPair.send(asicID.encode())
print(f"{asicID} sent")
def startAlims(self, wantedState):
if self.alimsEnabled:
message = f"Disable alims"
elif wantedState == "Disable":
message = f"Disable alims"
else:
message = f"Enable alims"
self.sockPair.send(message.encode())
self.alimsEnabled = not self.alimsEnabled
self.signalUpdatePower.emit(self.alimsEnabled)
def __del__(self):
del self.sock
del self.sockPair
del self.context
def run(self):
while True:
try:
string = self.sock.recv(flags=zmq.NOBLOCK) #Recieve the Voltages
topic, data = string.split()
values = data.decode().split(',')
values = { #??? damn tricky
key: float(value) for key, value in zip(
["VDD_CHX", "M_CHX", "V_BIAS_LNA_CHX", "S_CHX", "RTN_CHX",
"VDD_CHY", "M_CHY", "V_BIAS_LNA_CHY", "S_CHY", "RTN_CHY",
"VDD_CHZ", "M_CHZ", "V_BIAS_LNA_CHZ", "S_CHZ", "RTN_CHZ",
"ADC_VDD_CHX", "ADC_M_CHX", "ADC_V_BIAS_LNA_CHX", "ADC_S_CHX", "ADC_RTN_CHX",
"ADC_VDD_CHY", "ADC_M_CHY", "ADC_V_BIAS_LNA_CHY", "ADC_S_CHY", "ADC_RTN_CHY",
"ADC_VDD_CHZ", "ADC_M_CHZ", "ADC_V_BIAS_LNA_CHZ", "ADC_S_CHZ", "ADC_RTN_CHZ",
"CONSO_CHX", "CONSO_CHY", "CONSO_CHZ",
"ALIM_CHX", "ALIM_CHY", "ALIM_CHZ"],
values[1:])
}
for key, value in values.items():
if "VDD" in key:
if "ADC" in key:
values[key] = (6.0 + 0.023) / 4096. * value
else:
values[key] = (6.0 + 0.023) / 1024. * value
elif "CONSO" in key:
values[key] = value / 1000.
elif "ALIM" in key:
values[key] = value / 1000.
else:
if "ADC" in key:
values[key] = 5. / 4096. * value
else:
values[key] = 5. / 1024. * value
for ch in ["X", "Y", "Z"]:
values[f"Offset_S_CH{ch}"] = values[f"S_CH{ch}"] - values[f"M_CH{ch}"]
values[f"Offset_RTN_CH{ch}"] = values[f"RTN_CH{ch}"] - values[f"M_CH{ch}"]
values[f"ADC_Offset_S_CH{ch}"] = values[f"ADC_S_CH{ch}"] - values[f"ADC_M_CH{ch}"]
values[f"ADC_Offset_RTN_CH{ch}"] = values[f"ADC_RTN_CH{ch}"] - values[f"ADC_M_CH{ch}"]
self.updateVoltages.emit(values) #MAJ Voltages
except zmq.ZMQError:
pass
if self.isInterruptionRequested():
return
QThread.msleep(10)
QApplication.processEvents()
class ArduinoStatusWorker(QThread):
updateStatus = Signal(str)
def __init__(self, port=9990):
QThread.__init__(self)
self.context = zmq.Context()
self.sock = self.context.socket(zmq.SUB)
self.sock.connect(f"tcp://localhost:{port}")
self.sock.setsockopt(zmq.SUBSCRIBE, b"Status")
self.arduino_process = None
self.arduino_process_started = False
def __del__(self):
del self.sock
del self.context
def _arduino_process_is_alive(self):
if self.arduino_process is None:
return False
if self.arduino_process.poll() is None:
return True
return False
def start(self):
if not self.arduino_process_started or not self._arduino_process_is_alive():
for proc in psutil.process_iter():
# check whether the process name matches
if 'arduino_monitor.py' in proc.cmdline():
proc.kill() #If the process exist kill it to start clean
if os.path.exists('arduino_monitor.py'): #If "arduino_monitor.py" existe create a subprocess using it
self.arduino_process = subprocess.Popen(['python', 'arduino_monitor.py']) #???
else:
self.arduino_process = subprocess.Popen(['Juice_Ardiuno_Monitor'])
self.arduino_process_started = True
QThread.start(self)
def stop(self):
if self.arduino_process_started:
self.arduino_process_started = False
self.arduino_process.kill()
def run(self):
while True:
try:
string = self.sock.recv(flags=zmq.NOBLOCK) #recieve msgs
topic, data = string.split()
self.updateStatus.emit("Temperatures and Voltages monitor: " + data.decode())
except zmq.ZMQError:
pass
if self.isInterruptionRequested():
self.stop()
return
QThread.msleep(10)
class ApplicationWindow(QMainWindow):
"""Main Window"""
Launch_Measurements = Signal(str)
burninStep = "None"
def __init__(self, parent=None):
super(ApplicationWindow, self).__init__(parent)
self.settings_ui = SettingsPannel()
self.progress_pannel = ProgressPannel()
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.ui.actionSettings.triggered.connect(self.settings_ui.show)
self.asicsList = []
self.acknowledgedAsicID = False
self.asicPowered = False
self.asicFileName = ""
self.path = cfg.global_workdir.get() + "/ASICs"
mkdir(self.path) # create a "monitor" file in the working directory
self.measuementRequested = False
#self.showMaximized()
#self.showFullScreen()
# self.ui.power_button.clicked.connect(self.turn_on)
# self.is_on = False
# self.tempWorker = TemperaturesWorker()
# self.tempWorker.updateTemperatures.connect(self.updateTemperatures)
# self.tempWorker.start()
# self.tempWorker.moveToThread(self.tempWorker)
self.ui.pathWorkDir.setText(cfg.global_workdir.get())
self.arduinoStatusWorker = ArduinoStatusWorker()
self.arduinoStatusWorker.updateStatus.connect(self.ui.statusbar.showMessage)
self.arduinoStatusWorker.start()
self.arduinoStatusWorker.moveToThread(self.arduinoStatusWorker)
self.voltagesWorker = VoltagesWorker()
self.voltagesWorker.updateVoltages.connect(self.updateVoltages)
self.voltagesWorker.updateVoltages.connect(self.asicRecording)
self.voltagesWorker.start()
self.voltagesWorker.moveToThread(self.voltagesWorker)
self.ui.power_button.clicked.connect(self.voltagesWorker.startAlims, Qt.QueuedConnection)
self.voltagesWorker.signalUpdatePower.connect(self.updatePowerButton)
self.ui.asicSN.setValidator(QRegExpValidator("[0-9]{3}")) #3 Chifre
self.ui.asicSN.returnPressed.connect(lambda: print("Declenché")) #test
self.ui.asicSN.returnPressed.connect(lambda: self.asicManagement(self.ui.asicSN.text()))
self.ui.asicSN.textChanged.connect(lambda: self.asicManagement(False))
self.ui.Launch_Measurements.clicked.connect(self.requestMeasurement)
self.ui.asicsListe.currentIndexChanged.connect(lambda: self.ui.asicSN.setText(self.ui.asicsListe.currentText()))
self.ui.asicsListe.currentIndexChanged.connect(lambda: self.asicManagement(False))
self.ui.ButtonBurninStepPre.clicked.connect(lambda: self.burninStepRecorder("PreBurnIn"))
self.ui.ButtonBurninStepPost.clicked.connect(lambda: self.burninStepRecorder("PostBurnIn"))
# self.already_restarting_disco = False
# self.discoWorker = DiscoveryWorker()
# self.ui.Launch_Measurements.clicked.connect(self.start_measurement)
# self.Launch_Measurements.connect(self.discoWorker.Launch_Measurements)
# self.ui.Launch_Measurements.clicked.connect(self.progress_pannel.show)
# self.ui.Launch_Measurements.clicked.connect(partial(self.ui.Launch_Measurements.setDisabled, True))
# self.ui.Launch_Measurements.clicked.connect(partial(self.ui.power_button.setDisabled, True))
#
# self.discoWorker.measurementsDone.connect(partial(self.ui.Launch_Measurements.setEnabled, True))
# self.discoWorker.measurementsDone.connect(partial(self.ui.power_button.setEnabled, True))
# self.discoWorker.measurementsDone.connect(self.progress_pannel.hide)
# self.discoWorker.progress_update.connect(self.progress_pannel.update_progress)
# self.voltagesWorker.restartDisco.connect(self.restart_disco)
def __del__(self):
for thr in [self.arduinoStatusWorker, self.voltagesWorker]:
thr.requestInterruption()
while thr.isRunning():
QThread.msleep(10)
# del self.discoWorker
del self.arduinoStatusWorker
# del self.tempWorker
del self.voltagesWorker
self.close()
# def closeEvent(self, event:PySide2.QtGui.QCloseEvent):
# self.voltagesWorker.startAlims("Disable")
# def updateTemperatures(self, tempA, tempB, tempC):
# self.ui.tempA_LCD.display(tempA)
# self.ui.tempB_LCD.display(tempB)
# self.ui.tempC_LCD.display(tempC)
def asicManagement(self, asicID="Rien n'est passé"):
print(asicID)
if asicID in self.asicsList:
choice = QMessageBox.question(self, 'Conflict',
f"{asicID}\n\nThis ID already exist, begin anyway?",
QMessageBox.Yes | QMessageBox.No)
if choice == QMessageBox.Yes:
self.acknowledgedAsicID = True
self.asicFileName = f"{self.path}/ASIC_JUICEMagic3_SN_{asicID}-{str(datetime.now())}.txt" # create a file with the current date to dump the data
print(self.asicFileName)
self.voltagesWorker.asics(asicID)
self.ui.asicSN.setStyleSheet("QLineEdit {background-color: green;}")
else:
pass
elif asicID == False:
self.acknowledgedAsicID = False
self.ui.asicSN.setStyleSheet('')
else:
choice = QMessageBox.question(self, 'Confirmation', #confirmation a enlever
f"The ID you entered is \n\n{asicID}\n\nconfirm?",
QMessageBox.Yes | QMessageBox.No)
if choice == QMessageBox.Yes:
self.asicsList.append(asicID)
self.ui.asicsListe.clear() #Sauvage mais fonctionne
self.ui.asicsListe.addItems(self.asicsList[::-1])
self.acknowledgedAsicID = True
self.asicFileName = f"{self.path}/ASIC_JUICEMagic3_SN_{asicID}_{self.burninStep}-{str(datetime.now())}.txt" # create a file with the current date to dump the data
print(self.asicFileName)
self.voltagesWorker.asics(f"{asicID}_{self.burninStep}")
self.ui.asicSN.setStyleSheet("QLineEdit {background-color: green;}")
else:
pass
def requestMeasurement(self):
self.measuementRequested = True
def burninStepRecorder(self, stepOfBurnin):
self.burninStep = stepOfBurnin
self.ui.asicSN.setEnabled(True)
self.asicManagement(False)
if "Pre" in stepOfBurnin:
self.ui.ButtonBurninStepPre.setStyleSheet('QPushButton {background-color: green;}')
self.ui.ButtonBurninStepPost.setStyleSheet('')
elif "Post" in stepOfBurnin:
self.ui.ButtonBurninStepPost.setStyleSheet('QPushButton {background-color: green;}')
self.ui.ButtonBurninStepPre.setStyleSheet('')
def asicRecording(self, values):
if self.acknowledgedAsicID and self.asicPowered:
self.ui.asicSN.setDisabled(True)
self.ui.asicsListe.setDisabled(True)
self.ui.Launch_Measurements.setEnabled(True)
if self.measuementRequested:
self.ui.Launch_Measurements.setDisabled(True)
self.ui.Launch_Measurements.setStyleSheet('QPushButton {background-color: #69ff69;}')
with open(self.asicFileName, 'a') as out:
out.write(str(datetime.now()) + '\t')
for channel, value in values.items():
out.write(f"{channel}: {value}, ")
out.write('\n')
print("recording")
else:
self.ui.Launch_Measurements.setStyleSheet('')
self.ui.Launch_Measurements.setDisabled(True)
if self.burninStep is not "None":
self.ui.asicSN.setEnabled(True)
self.ui.asicsListe.setEnabled(True)
def updateVoltages(self, values):
if self.measuementRequested:
for ch in ["X", "Y", "Z"]:
self.ui.__dict__[f"CH{ch}_VDD"].display(values[f"VDD_CH{ch}"])
self.ui.__dict__[f"CH{ch}_BIAS"].display(values[f"V_BIAS_LNA_CH{ch}"])
self.ui.__dict__[f"CH{ch}_M"].display(values[f"M_CH{ch}"])
self.ui.__dict__[f"CH{ch}_RTN"].display(values[f"RTN_CH{ch}"])
self.ui.__dict__[f"CH{ch}_S"].display(values[f"S_CH{ch}"])
# self.ui.__dict__[f"CH{ch}_Offset_RTN"].display(values[f"Offset_RTN_CH{ch}"])
# self.ui.__dict__[f"CH{ch}_Offset_S"].display(values[f"Offset_S_CH{ch}"])
self.ui.__dict__[f"CH{ch}_I"].display(values[f"CONSO_CH{ch}"])
self.ui.__dict__[f"CH{ch}_V"].display(values[f"ALIM_CH{ch}"])
self.ui.__dict__[f"CH{ch}_VDD_ADC"].display(values[f"ADC_VDD_CH{ch}"])
self.ui.__dict__[f"CH{ch}_BIAS_ADC"].display(values[f"ADC_V_BIAS_LNA_CH{ch}"])
self.ui.__dict__[f"CH{ch}_M_ADC"].display(values[f"ADC_M_CH{ch}"])
self.ui.__dict__[f"CH{ch}_RTN_ADC"].display(values[f"ADC_RTN_CH{ch}"])
self.ui.__dict__[f"CH{ch}_S_ADC"].display(values[f"ADC_S_CH{ch}"])
# self.ui.__dict__[f"CH{ch}_Offset_RTN_ADC"].display(values[f"ADC_Offset_RTN_CH{ch}"])
# self.ui.__dict__[f"CH{ch}_Offset_S_ADC"].display(values[f"ADC_Offset_S_CH{ch}"])
def updatePowerButton(self, powered):
if powered:
self.ui.power_button.setText("Kill")
self.ui.power_button.setStyleSheet('QPushButton {background-color: green;}')
self.asicPowered = True
if self.asicsList:
if self.asicsList[-1] == self.ui.asicSN.text():
self.acknowledgedAsicID = True
self.ui.asicSN.setStyleSheet("QLineEdit {background-color: green;}")
else:
self.ui.power_button.setText("Turn On")
self.ui.power_button.setStyleSheet('')
self.asicManagement(False)
self.measuementRequested = False
self.asicPowered = False
def start_measurement(self):
now = str(datetime.now())
work_dir = f'/{config.global_workdir.get()}/run-{now}/'
self.Launch_Measurements.emit(work_dir)
mkdir(work_dir)
manifest = {
section_name: {name: (value if 'pass' not in name else '******') for name, value in section_values.items()}
for section_name, section_values in config._config.items()}
manifest["result_dir"] = work_dir
manifest["notes"] = self.ui.notes.toPlainText()
manifest["start_time"] = now
with open(f'{work_dir}/config.json', 'w') as out:
out.write(json.dumps(manifest))
manifest_html = json.dumps(manifest, indent=4).replace(' ', ' ').replace(',\n', ',<br>').replace('\n',
'<br>')
html = f'''
<!DOCTYPE html>
<html>
<body>
<h1>Measurement started at {now}</h1>
<p>
{manifest_html}
</p>
</body>
</html>
'''
send_mail(server=config.mail_server.get(), sender="<EMAIL>",
recipients=config.mail_recipients.get(), subject="Starting measurement", html_body=html,
username=config.mail_login.get(), password=config.mail_password.get(), port=465, use_tls=True)
def quit_app(self):
self.close()
def main(args=sys.argv):
lib_dir = os.path.dirname(os.path.realpath(__file__))
bin_dir = lib_dir + "/../../../../bin"
desktop_entry_path = os.path.expanduser("~") + '/.local/share/applications/Juice-scm-egse.desktop'
if not os.path.exists(desktop_entry_path):
with open(desktop_entry_path, 'w') as d:
d.write(desktop_entry.format(exec=bin_dir + "/Juice_SCM_GSE", icon="juice-scm-egse.svg", path=bin_dir))
mkdir(config.log_dir())
log.basicConfig(filename=f'{config.log_dir()}/gui-{datetime.now()}.log', format='%(asctime)s - %(message)s',
level=log.INFO)
log.getLogger().addHandler(log.StreamHandler(sys.stdout))
app = QApplication(args)
application = ApplicationWindow()
application.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
| 1.765625 | 2 |
tests/test_gtfsrt.py | carsonyl/tilapya | 6 | 12763194 | import pytest
from requests import codes
from tilapya.errors import TransLinkAPIError
from tilapya.gtfsrt import GTFSRT
from .conftest import remove_response_headers_func
# Apply VCR to all tests in this file.
pytestmark = pytest.mark.vcr(before_record_response=remove_response_headers_func('Set-Cookie'))
@pytest.fixture
def authed_gtfs(valid_api_key):
return GTFSRT(api_key=valid_api_key)
def test_download_realtime(authed_gtfs):
assert authed_gtfs.trip_updates().content
def test_download_position(authed_gtfs):
assert authed_gtfs.position().content
def test_download_alerts(authed_gtfs):
assert authed_gtfs.service_alerts().content
def test_gtfsrt_invalid_key():
with pytest.raises(TransLinkAPIError) as info:
GTFSRT(api_key='foobar').trip_updates()
assert info.value.response.status_code == codes.forbidden
assert not info.value.code
assert not info.value.message
| 2.0625 | 2 |
bigsql/types.py | wabscale/bigsql | 1 | 12763195 | <filename>bigsql/types.py
# from . import Sql
from dataclasses import dataclass
from scanf import scanf
@dataclass
class DataType:
"""
Base datatype object. All defined types should inherit
from this object.
All this object should need is a name class variable.
"""
name: str=None
def __init__(self, length=None):
if length is not None:
self.name='{}({})'.format(
self.name,
length
)
class Integer(DataType):
name: str='INT'
class Text(DataType):
name: str='TEXT'
class DateTime(DataType):
name: str='DATETIME'
class TimeStamp(DataType):
name: str='TIMESTAMP'
class Varchar(DataType):
name: str='VARCHAR'
class StaticColumn:
"""
StaticColumn holds information about user defined columns.
This object should be used to define columns in statically defined models.
This object is used to generate the column creation sql when db.create_all() is called.
"""
column_name: str=None
def __init__(self, data_type, **kwargs):
self.data_type=data_type
default_attrs={
'primary_key' : False,
'nullable' : False,
'references' : None,
'on_delete' : None,
'auto_increment': False,
'unique' : False
}
for default_name, default_value in default_attrs.items():
self.__setattr__(
default_name,
default_value \
if default_name not in kwargs \
else kwargs[default_name]
)
if self.references is not None:
self.foreign_table, self.foreign_column=scanf('%s.%s', self.references)
def __str__(self):
return '`{}`.`{}`'.format(self.table_name, self.column_name)
def set_name(self, name, table_name):
self.column_name, self.table_name=name, table_name
return self
@staticmethod
def resolve_type(type_name):
return {
'int' : Integer,
'tinyint' : Integer,
'text' : Text,
'varchar' : Varchar(128),
'timestamp': DateTime,
'datetime' : DateTime,
}[type_name]
@property
def sql(self):
base='`{name}` {data_type}{auto_increment}{nullable}'
return base.format(
name=self.column_name,
data_type=self.data_type.name,
auto_increment=' AUTO_INCREMENT' if self.auto_increment else '',
nullable=' NOT NULL' if not self.nullable else ' NULL'
)
@property
def ref_sql(self):
base='FOREIGN KEY ({name}) REFERENCES {foreign_table}({foreign_column}){on_delete}'
return base.format(
name=self.column_name,
foreign_table=self.foreign_table,
foreign_column=self.foreign_column,
on_delete='ON DELETE {}'.format(
self.on_delete
) if self.on_delete is not None else ''
) if self.references is not None else ''
@dataclass
class DynamicColumn(StaticColumn):
def __init__(self, table_name, column_name, data_type, primary_key):
super(DynamicColumn, self).__init__(
self.resolve_type(data_type),
primary_key=primary_key == 'PRI'
)
self.set_name(column_name, table_name)
| 3.90625 | 4 |
RottenTomatoes/rt/rt/spiders/movie.py | hovhannest/TMScrappers | 0 | 12763196 | <reponame>hovhannest/TMScrappers
# -*- coding: utf-8 -*-
import scrapy
from rt.items import *
import re
class MovieSpider(scrapy.Spider):
name = 'movie'
allowed_domains = ['rottentomatoes.com']
# start_urls = ['https://www.rottentomatoes.com/m/blade_runner_2049']
start_urls = ['https://www.rottentomatoes.com/tv/good_place/s01/e01']
def parse(self, response):
# init
movie = MovieItem()
movie['name'] = ''
movie['sourceURL'] = ''
movie['year'] = ''
movie['info'] = ''
movie['Rating'] = ''
movie['Genre'] = ''
movie['DirectedBy_url'] = ''
movie['DirectedBy'] = ''
movie['WrittenBy_url'] = ''
movie['WrittenBy'] = ''
movie['InTheaters'] = ''
movie['BoxOffice'] = ''
movie['Runtime'] = ''
movie['Studio'] = ''
movie['webSyte'] = ''
movie['posterImage'] = ''
movie['cast_url'] = ''
movie['cast'] = ''
movie['cast_role'] = ''
movie['RTMainScore'] = ''
movie['RTTopScore'] = ''
movie['RTCAvRating'] = ''
movie['RTFresh'] = ''
movie['RTRotten'] = ''
movie['RTAAvRating'] = ''
movie['RTUserRatings'] = ''
movie["sourceURL"] = response.url
if '/m/' in movie["sourceURL"]:
movie['TVShow'] = 0
else:
movie['TVShow'] = 1
try:
movie['Episode'] = int(re.findall(r'http\S*\/\/\S*\/[e][0]*(\d*)[\/]*', movie['sourceURL'])[0])
if movie['Episode'] is None:
movie['Episode'] = -1
except:
movie['Episode'] = -1
try:
movie['Season'] = int(re.findall(r'http\S*\/\/\S*\/[s][0]*(\d*)[\/]*', movie['sourceURL'])[0])
if movie['Season'] is None:
movie['Season'] = -1
except:
movie['Season'] = -1
try:
movie["name"] = re.findall(r'\S+.*', response.css('h1[id=movie-title]::text').extract_first())[0]
except:
try:
movie["name"] = re.findall(r'\S+.*', response.css('div.seriesHeader h1::text').extract_first())[0]
except:
try:
movie["name"] = re.findall(r'\S+.*', response.css('div.super_series_header h1::text').extract_first())[0]
except:
try:
movie["name"] = re.findall(r'\S+.*', response.css('h1.movie-title::text').extract_first())[0]
except:
movie["name"] = ""
try:
movie["year"] = re.findall('[(](\d\d\d\d)[)]', response.css('h1[id=movie-title] span::text').extract_first())[0]
except:
movie["year"] = ""
movie["info"] = re.findall(r'\S+.*', response.css("div[id=movieSynopsis]::text").extract_first())[0]
try:
intoList = response.css('ul.content-meta')[0]
for li in intoList.css('li'):
liName = li.css('div.meta-label::text').extract_first()
if('Rating' in liName):
movie['Rating'] = li.css('div.meta-value::text').extract_first()
elif 'Genre' in liName:
gl = ""
for genre in li.css('div.meta-value a::text').extract():
if gl != "":
gl += ","
gl += re.findall(r'\S+.*', genre)[0]
movie["Genre"] = gl
elif 'Directed' in liName:
ll = ""
nl = ""
for a in li.css('div.meta-value a'):
if ll != "" or nl != "":
ll += ","
nl += ","
name = a.css("::text").extract_first()
url = a.css("::attr(href)").extract_first()
# person = Person()
# person['name'] = name
# person['url'] = url
# yield person
ll += url
nl += name
movie["DirectedBy_url"] = ll
movie["DirectedBy"] = nl
elif 'Written' in liName:
ll = ""
nl = ""
for a in li.css('div.meta-value a'):
if ll != "" or nl != "":
ll += ","
nl += ","
name = a.css("::text").extract_first()
url = a.css("::attr(href)").extract_first()
# person = Person()
# person['name'] = name
# person['url'] = url
# yield person
ll += url
nl += name
movie["WrittenBy_url"] = ll
movie["WrittenBy"] = nl
elif 'In Theaters' in liName:
movie["InTheaters"] = li.css('div.meta-value time::attr(datetime)').extract_first()
elif 'Box Office' in liName:
movie["BoxOffice"] = int(re.sub(r'\D+', '', li.css('div.meta-value::text').extract_first()))
elif 'Runtime' in liName:
movie["Runtime"] = li.css('div.meta-value time::attr(datetime)').extract_first()
elif 'Studio' in liName:
ll = ""
nl = ""
for a in li.css('div.meta-value a'):
if ll != "" or nl != "":
ll += ","
nl += ","
name = a.css("::text").extract_first()
url = a.css("::attr(href)").extract_first()
# person = Person()
# person['name'] = name
# person['url'] = url
# yield person
ll += url
nl += name
movie["webSyte"] = ll
movie["Studio"] = nl
except:
pass
movie["posterImage"] = response.css('img.posterImage::attr(src)').extract_first()
ll = ""
nl = ""
cl = ""
for div in response.css('div.castSection div div.media-body'):
if ll != "" or nl != "":
ll += ","
nl += ","
cl += ","
name = re.findall(r'\S+.*', div.css('a span::text').extract_first())[0]
url = div.css('a::attr(href)').extract_first()
# person = Person()
# person['name'] = name
# person['url'] = url
# yield person
ll += url
nl += name
ccl = ""
for s in div.css('span.characters'):
if ccl != "":
ccl += "|"
ccl += s.css('::text').extract_first()
cl += ccl
movie["cast_url"] = ll
movie["cast"] = nl
movie["cast_role"] = cl #div.css('span.characters::text').extract_first()
allReviesDiv = response.css('div#all-critics-numbers')
topReviesDiv = response.css('div#top-critics-numbers')
currDiv = allReviesDiv
rateSpan = currDiv.css('span.meter-value')
try:
movie["RTMainScore"] = int(rateSpan.css('span::text').extract_first())
except:
movie["RTMainScore"] = 0.0;
scoresDiv = currDiv.css('div#scoreStats')
scds = scoresDiv.css('div.superPageFontColor')
if len(scds) >= 4:
RTCAvRating = scoresDiv.css('div.superPageFontColor').extract_first()
movie["RTCAvRating"] = float(re.search(r'.*(\d[.]\d)', RTCAvRating).group(0))
try:
movie["RTFresh"] = int(scds[2].css('span::text').extract()[1])
except:
movie["RTFresh"] = 0
try:
movie["RTRotten"] = int(scds[3].css('span::text').extract()[1])
except:
movie["RTRotten"] = 0
currDiv = topReviesDiv
rateSpan = currDiv.css('span.meter-value')
try:
movie["RTTopScore"] = int(rateSpan.css('span::text').extract_first())
except:
movie["RTTopScore"] = 0
currDiv = response.css('div.audience-score')
try:
movie["RTAAvRating"] = float(currDiv.css('span::text').extract_first().replace('%', ''))
except:
movie["RTAAvRating"] = 0.0
currDiv = response.css('audience-info')
try:
movie["RTUserRatings"] = int(currDiv.css('div::text').extract()[1].replace(',', ''))
except:
movie["RTUserRatings"] = 0
yield movie | 2.65625 | 3 |
Regression/LinearRegression.py | ssklykov/collection_numCalc | 0 | 12763197 | <reponame>ssklykov/collection_numCalc
# -*- coding: utf-8 -*-
"""
Linear regression using the deviated randomly linear dependency (y(x) = a*x + b)
Developed in the Spyder IDE using Kite
@author: ssklykov
"""
# %% Import section
from SampleValues import GenerateSample
import numpy as np
from ComparisonLinPlots import PlotWErrTwo
# %% Controlling / modelling values
a = 2; b = 1 # From dependency y(x) = a*x + b
n = 10 # number of sampling points (modelling measures from some awesome experiment)
nDigits = 2 # Precision of calculation / rounding
percentError = 30 # An error controlling deviations in generated values [%]
xMin = 0; xMax = 5 # Controlling minimal and maximal values from an interval [a,b]
nSamples = 11 # 10 + 1 samples, specify always in this manner, because np.linspace(a,b,n) - including "a" from the interval
# %% Generating the sample values - from a linear dependency
values = GenerateSample(a, b, xMin, xMax, nSamples, percentError, nDigits)
(x, yMean, yStD) = values.generateSampleValues()
# %% Linear Regression
def LinearRegression(x, yMean, yStD, nDigits):
# Interim values calculation from the book (Sx, Sxx, etc)
S = 0.0; Sx = 0.0; Sxx = 0.0; Sy = 0.0; Sxy = 0.0; sigma2 = 0.0;
aRegressed = 0; bRegressed = 0
for i in range(len(x)):
sigma2 = 1 / pow(yStD[i], 2)
S += sigma2; Sx += x[i]*sigma2; Sy += yMean[i]*sigma2
Sxx += pow(x[i], 2)*sigma2; Sxy += (x[i]*yMean[i])*sigma2
delta = (S*Sxx) - pow(Sx, 2)
if (delta != 0):
aRegressed = (S*Sxy - Sx*Sy)/delta; bRegressed = (Sy*Sxx - Sx*Sxy)/delta
aRegressed = round(aRegressed, nDigits); bRegressed = round(bRegressed,nDigits)
return (aRegressed, bRegressed)
# %% Regression evaluation
(aR, bR) = LinearRegression(x, yMean, yStD, nDigits)
# %% Plotting
# Generation of arrays for plotting
nRegressed = (nSamples-1)*10 + 1 # For plotting - calculation in 10 times more points between specified interval [xMin,xMax]
xRegressed = np.linspace(xMin, xMax, nRegressed)
yRegressed = np.zeros(nRegressed)
for i in range(nRegressed):
yRegressed[i] = aR*xRegressed[i] + bR
# Plot
PlotWErrTwo(x, yMean, yStD, xRegressed, yRegressed, "Linear Regression results")
| 2.875 | 3 |
main.py | bl-msch-c220-max-park/01-Interactive-Fiction | 0 | 12763198 | <reponame>bl-msch-c220-max-park/01-Interactive-Fiction
import sys
assert sys.version_info >= (3,9), "This script requires at least Python 3.9"
passages = [
{
"name": "Headquarters",
"tags": "",
"id": "1",
"text": "You are at Space Headquaters. Your objective today is to find the infamous space robber throughout the space city. Where would you like to go? \n\n[[Jupiter Bar]]\n[[Cadet Cafe]]\n[[Astronomy Night Club]]\n[[Moonstar Hotel]]",
"links": [
{
"linkText": "Jupiter Bar",
"passageName": "Jupiter Bar",
"original": "[[Jupiter Bar]]"
},
{
"linkText": "Cadet Cafe",
"passageName": "Cadet Cafe",
"original": "[[Ct Cafe]]ade"
},
{
"linkText": "Astronomy Night Club",
"passageName": "Astronomy Night Club",
"original": "[[Astronomy Night Club]]"
},
{
"linkText": "Moonstar Hotel",
"passageName": "Moonstar Hotel",
"original": "[[Moonstar Hotel]]"
}
],
"hooks": [],
"cleanText": "You are at Space Headquaters. Your objective today is to find the infamous space robber throughout the space city. Where would you like to go?"
},
{
"name": "<NAME>",
"tags": "",
"id": "2",
"text": "You are at the Jupiter Bar. The owner of the bar said to you that the space robber stopped by but left a while ago but not sure where. Where would you like to go now?\n\n[[Cadet Cafe]] \n[[Astronomy Night Club]] \n[[Moonstar Hotel]]",
"links": [
{
"linkText": "Cadet Cafe",
"passageName": "Cadet Cafe",
"original": "[[Cadet Cafe]]"
},
{
"linkText": "Astronomy Night Club",
"passageName": "Astronomy Night Club",
"original": "[[Astronomy Night Club]]"
},
{
"linkText": "Moonstar Hotel",
"passageName": "Moonstar Hotel",
"original": "[[Moonstar Hotel]]"
}
],
"hooks": [],
"cleanText": "You are at the Jupiter Bar. The owner of the bar said to you that the space robber stopped by but left a while ago but not sure where. Where would you like to go now?"
},
{
"name": "Cadet Cafe",
"tags": "",
"id": "3",
"text": "You are at the Cadet Cafe. People around the cafe said that they never saw the space robber. But might have not gone too far. Where would you like to go next?\n\n[[Jupiter Bar]] \n[[Astronomy Night Club]] \n[[Moonstar Hotel]]",
"links": [
{
"linkText": "Jupiter Bar",
"passageName": "Jupiter Bar",
"original": "[[Jupiter Bar]]"
},
{
"linkText": "Astronomy Night Club",
"passageName": "Astronomy Night Club",
"original": "[[Astronomy Night Club]]"
},
{
"linkText": "Moonstar Hotel",
"passageName": "Moonstar Hotel",
"original": "[[Moonstar Hotel]]"
}
],
"hooks": [],
"cleanText": "You are at the Cadet Cafe. People around the cafe said that they never saw the space robber. But might have not gone too far. Where would you like to go next?"
},
{
"name": "Astronomy Night Club",
"tags": "",
"id": "4",
"text": "You are at the buzzing Astronomy Night Club! The bouncer said that the space robber was here and pick-pockted a few individuals at the club and ran away through the backdoor. Where would you like to go?\n\n[[Moonstar Hotel]] \n[[Pluto Pizza]]\n[[Earth Arcade]]",
"links": [
{
"linkText": "Moonstar Hotel",
"passageName": "Moonstar Hotel",
"original": "[[Moonstar Hotel]]"
},
{
"linkText": "Pluto Pizza",
"passageName": "Pluto Pizza",
"original": "[[Pluto Pizza]]"
},
{
"linkText": "Earth Arcade",
"passageName": "Earth Arcade",
"original": "[[Earth Arcade]]"
}
],
"hooks": [],
"cleanText": "You are at the buzzing Astronomy Night Club! The bouncer said that the space robber was here and pick-pockted a few individuals at the club and ran away through the backdoor. Where would you like to go?"
},
{
"name": "<NAME>",
"tags": "",
"id": "5",
"text": "You are at the Moonstar Hotel. The conceirge said they never seen a person like the space robber. Might have to go somewhere else. Where would you like to go?\n\n[[Astronomy Night Club]] \n[[Pluto Pizza]] \n[[Earth Arcade]]",
"links": [
{
"linkText": "Astronomy Night Club",
"passageName": "Astronomy Night Club",
"original": "[[Astronomy Night Club]]"
},
{
"linkText": "Pluto Pizza",
"passageName": "Pluto Pizza",
"original": "[[Pluto Pizza]]"
},
{
"linkText": "Earth Arcade",
"passageName": "Earth Arcade",
"original": "[[Earth Arcade]]"
}
],
"hooks": [],
"cleanText": "You are at the Moonstar Hotel. The conceirge said they never seen a person like the space robber. Might have to go somewhere else. Where would you like to go?"
},
{
"name": "<NAME>",
"tags": "",
"id": "6",
"text": "Sit down and have a pizza! The waiter at Pluto Pizza said he stopped by and had a slice but didn't leave that long ago. You're close! Where would you like to go next?\n\n[[Earth Arcade]] \n[[Saturn Pharamcy]]\n[[Astro Library]]",
"links": [
{
"linkText": "Earth Arcade",
"passageName": "Earth Arcade",
"original": "[[Earth Arcade]]"
},
{
"linkText": "Saturn Pharamcy",
"passageName": "Saturn Pharamcy",
"original": "[[Saturn Pharamcy]]"
},
{
"linkText": "Astro Library",
"passageName": "Astro Library",
"original": "[[Astro Library]]"
}
],
"hooks": [],
"cleanText": "Sit down and have a pizza! The waiter at Pluto Pizza said he stopped by and had a slice but didn't leave that long ago. You're close! Where would you like to go next?"
},
{
"name": "Earth Arcade",
"tags": "",
"id": "7",
"text": "Lots of games you can play at Earth Arcade. The owner said the that the space robber took all their coins and left but not sure where to. Where would you like to go next?\n\n[[Pluto Pizza]] \n[[Saturn Pharamcy]] \n[[Astro Library]]",
"links": [
{
"linkText": "Pluto Pizza",
"passageName": "Pluto Pizza",
"original": "[[Pluto Pizza]]"
},
{
"linkText": "Saturn Pharamcy",
"passageName": "Saturn Pharamcy",
"original": "[[Saturn Pharamcy]]"
},
{
"linkText": "Astro Library",
"passageName": "Astro Library",
"original": "[[Astro Library]]"
}
],
"hooks": [],
"cleanText": "Lots of games you can play at Earth Arcade. The owner said the that the space robber took all their coins and left but not sure where to. Where would you like to go next?"
},
{
"name": "<NAME>",
"tags": "",
"id": "8",
"text": "The pharmacist said he stole all their money and left. You might be close on the space robber's tail. Where would you like to go next?\n\n[[Earth Arcade]] \n[[Pluto Pizza]] \n[[Astro Library]] \n[[Rocket Gas Station]]\n[[Houston Headquarters]]",
"links": [
{
"linkText": "Earth Arcade",
"passageName": "Earth Arcade",
"original": "[[Earth Arcade]]"
},
{
"linkText": "Pluto Pizza",
"passageName": "Pluto Pizza",
"original": "[[Pluto Pizza]]"
},
{
"linkText": "Astro Library",
"passageName": "Astro Library",
"original": "[[Astro Library]]"
},
{
"linkText": "Rocket Gas Station",
"passageName": "Rocket Gas Station",
"original": "[[Rocket Gas Station]]"
},
{
"linkText": "Houston Headquarters",
"passageName": "Houston Headquarters",
"original": "[[Houston Headquarters]]"
}
],
"hooks": [],
"cleanText": "The pharmacist said he stole all their money and left. You might be close on the space robber's tail. Where would you like to go next?"
},
{
"name": "<NAME>",
"tags": "",
"id": "9",
"text": "Welcome to the city's library. But its empty. The space robber must of been here but nowhere in sight. Lets keep going! Where to next?\n\n[[Earth Arcade]] \n[[Pluto Pizza]] \n[[Saturn Pharamcy]] \n[[Rocket Gas Station]]\n[[Houston Headquarters]]",
"links": [
{
"linkText": "Earth Arcade",
"passageName": "Earth Arcade",
"original": "[[Earth Arcade]]"
},
{
"linkText": "Pluto Pizza",
"passageName": "Pluto Pizza",
"original": "[[Pluto Pizza]]"
},
{
"linkText": "Saturn Pharamcy",
"passageName": "Saturn Pharamcy",
"original": "[[Saturn Pharamcy]]"
},
{
"linkText": "Rocket Gas Station",
"passageName": "Rocket Gas Station",
"original": "[[Rocket Gas Station]]"
},
{
"linkText": "Houston Headquarters",
"passageName": "Houston Headquarters",
"original": "[[Houston Headquarters]]"
}
],
"hooks": [],
"cleanText": "Welcome to the city's library. But its empty. The space robber must of been here but nowhere in sight. Lets keep going! Where to next?"
},
{
"name": "Rocket Gas Station",
"tags": "",
"id": "10",
"text": "The Gas station owner said the robber just left and you can catch him if you're fast enough! GO! \n\n[[Houston Headquarters]]",
"links": [
{
"linkText": "Houston Headquarters",
"passageName": "Houston Headquarters",
"original": "[[Houston Headquarters]]"
}
],
"hooks": [],
"cleanText": "The Gas station owner said the robber just left and you can catch him if you're fast enough! GO!"
},
{
"name": "<NAME>",
"tags": "",
"id": "11",
"text": "You found him! With all his stolen goods. Great Job! Think you can find him faster? \n\n[[Headquarters]]",
"links": [
{
"linkText": "Headquarters",
"passageName": "Headquarters",
"original": "[[Headquarters]]"
}
],
"hooks": [],
"cleanText": "You found him! With all his stolen goods. Great Job! Think you can find him faster?"
}
]
#my attempt *********************************
response = ""
curr_location = "headquarters"
moves = 0
valid_passages = {
"headquarters" : ["jupiter bar","cadet cafe","astronomy night club","moonstar hotel","houston headquarters"],
"jupiter bar" : ["cadet cafe", "astronomy night club", "moonstar hotel"],
"cadet cafe" : ["jupiter bar", "astronomy night club", "moonstar hotel"],
"astronomy night club" : ["moonstar hotel","pluto pizza","earth arcade"],
"moonstar hotel" : ["astronomy night club","pluto pizza","earth arcade"],
"pluto pizza" : ["earth arcade","saturn pharamcy","astro library"],
"earth arcade" : ["pluto pizza","saturn pharamcy","astro library"],
"saturn pharamcy" : ["earth arcade","pluto pizza","astro library","rocket gas station","houston headquarters"],
"astro library" : ["earth arcade","pluto pizza","saturn pharamcy","rocket gas station","houston headquarters"],
"rocket gas station" : ["houston headquarters"],
"houston headquarters": []
}
passages_welcome ={
"headquarters" : "You are at Space Headquaters. Your objective today is to find the infamous space robber throughout the space city. Aim for the lowest amount of moves and score",
"jupiter bar" : "You are at the Jupiter Bar. The owner of the bar said to you that the space robber stopped by but left a while ago but not sure where.",
"cadet cafe" : "You are at the Cadet Cafe. People around the cafe said that they never saw the space robber.",
"astronomy night club" : "You are at the buzzing Astronomy Night Club! The bouncer said that the space robber was here and pick-pockted a few individuals at the club and ran away through the backdoor.",
"moonstar hotel" : "You are at the Moonstar Hotel. The conceirge said they never seen a person like the space robber. Might have to go somewhere else.",
"pluto pizza" : "Sit down and have a pizza! The waiter at Pluto Pizza said he stopped by and had a slice but didn't leave that long ago.",
"earth arcade" : "Lots of games you can play at Earth Arcade. The owner said the that the space robber took all their coins and left but not sure where to.",
"saturn pharamcy" : "The pharmacist said he stole all their money and left. You might be close on the space robber's tail.",
"astro library" : "Welcome to the city's library. But its empty. The space robber must of been here but nowhere in sight.",
"rocket gas station" : "The Gas station owner said the robber just left and you can catch him if you're fast enough! GO!.",
"nasa garage" : "You found him! With all his stolen goods. Great Job! Think you can find him faster?",
}
passages_scores = {
"headquarters" : 0,
"jupiter bar" : 1,
"cadet cafe" :1,
"astronomy night club" : 1,
"moonstar hotel" : 1,
"pluto pizza" : 1,
"earth arcade" : 1,
"saturn pharamcy" : 1,
"astro library" : 1,
"rocket gas station" : 1,
"nasa garage" : 1,
}
#inputs
def get_input():
response = input("What do you want to do? ")
response = response.lower().strip()
return response
print("You are at the Headquarters")
score = 0
while True:
response = get_input()
if response == "QUIT":
break
#if place exists and place is reachable
if (response in valid_passages) and response in valid_passages[curr_location]:
curr_location = response
moves += 1
#move limit reached
if moves > 5:
print("You are at your move limit. Start from the beginning. ")
score, moves = 0, 0
curr_location = 'headquarters'
score += passages_scores[curr_location]
print(f'Moved to {curr_location}')
print(f'Moves : {moves}')
print(f'Score: {score}')
print(f'{passages_welcome[curr_location]}\n')
#if houstan reached, prompt 'play again' or 'quit'
if curr_location == "nasa garage":
response = input("Found robber, play again or quit? ")
response = response.lower().strip()
#prompt options after game
while True:
if response == "play again":
curr_location = "headquarters"
print("You are playing again")
break
elif response == "quit":
break
else:
print("Invalid input!!")
response = input("What you wanna do? ")
response = response.lower().strip()
#place is not reachable
elif (response in valid_passages) and response not in valid_passages[curr_location]:
print(f'You cannot reach {response} from {curr_location}')
#place does not exist/invalid response
else:
print(f'This is an in-valid place!!: {response}')
print("Thanks for playing!")
| 2.375 | 2 |
swftp/test/unit/test_utils.py | softlayer/swftp | 30 | 12763199 | """
See COPYING for license information.
"""
import unittest
import os
import time
from twisted.python import log
from swftp.utils import (
try_datetime_parse, MetricCollector, parse_key_value_config)
class MetricCollectorTest(unittest.TestCase):
def setUp(self):
self.c = MetricCollector()
def test_init(self):
c = MetricCollector(10)
self.assertEqual(c.sample_size, 10)
self.assertEqual(c.current, {})
self.assertEqual(c.totals, {})
self.assertEqual(c.samples, {})
c = MetricCollector(20)
self.assertEqual(c.sample_size, 20)
def test_emit(self):
self.c.emit({'metric': 'some_metric'})
self.assertEqual(self.c.current['some_metric'], 1)
self.c.emit({'metric': 'some_metric', 'count': 10})
self.assertEqual(self.c.current['some_metric'], 11)
def test_add_metric(self):
self.c.add_metric('some_metric')
self.assertEqual(self.c.current['some_metric'], 1)
self.assertEqual(self.c.totals['some_metric'], 1)
self.c.add_metric('some_metric', count=10)
self.assertEqual(self.c.current['some_metric'], 11)
self.assertEqual(self.c.totals['some_metric'], 11)
def test_sample(self):
self.c.add_metric('some_metric')
self.c.sample()
self.assertEqual(self.c.samples['some_metric'], [1])
self.c.add_metric('some_metric')
self.c.sample()
self.assertEqual(self.c.samples['some_metric'], [1, 1])
for i in range(15):
self.c.add_metric('some_metric', count=i)
self.c.sample()
self.assertEqual(self.c.samples['some_metric'], range(4, 15))
def test_attach_logger(self):
self.c.start()
self.assertIn(self.c.emit, log.theLogPublisher.observers)
self.c.stop()
self.assertNotIn(self.c.emit, log.theLogPublisher.observers)
class DateTimeParseTest(unittest.TestCase):
def setUp(self):
os.environ['TZ'] = 'GMT'
time.tzset()
def test_invalid_date(self):
result = try_datetime_parse("this isn't a date!")
self.assertIsNone(result)
def test_RFC_1123(self):
result = try_datetime_parse("Thu, 10 Apr 2008 13:30:00 GMT")
self.assertEqual(result, 1207834200.0)
def test_RFC_1123_subsecond(self):
result = try_datetime_parse("Thu, 10 Apr 2008 13:30:00.12345 GMT")
self.assertEqual(result, 1207834200.0)
def test_ISO_8601(self):
result = try_datetime_parse("2008-04-10T13:30:00")
self.assertEqual(result, 1207834200.0)
def test_ISO_8601_subsecond(self):
result = try_datetime_parse("2008-04-10T13:30:00.12345")
self.assertEqual(result, 1207834200.0)
def test_universal_sortable(self):
result = try_datetime_parse("2008-04-10 13:30:00")
self.assertEqual(result, 1207834200.0)
def test_universal_sortable_subsecond(self):
result = try_datetime_parse("2008-04-10 13:30:00.12345")
self.assertEqual(result, 1207834200.0)
def test_date_short(self):
result = try_datetime_parse("2012-04-10")
self.assertEqual(result, 1334016000.0)
class ParseKeyValueConfigTest(unittest.TestCase):
def test_single(self):
res = parse_key_value_config('test: 1')
self.assertEqual(res, {'test': '1'})
def test_multiple(self):
res = parse_key_value_config('test: 1, test2: 2')
self.assertEqual(res, {'test': '1', 'test2': '2'})
def test_empty(self):
res = parse_key_value_config('')
self.assertEqual(res, {})
def test_duplicate(self):
res = parse_key_value_config('test: 1, test: 2')
self.assertEqual(res, {'test': '2'})
def test_whitespace(self):
res = parse_key_value_config(' test : 1 , test2 : 2 ')
self.assertEqual(res, {'test': '1', 'test2': '2'})
| 2.28125 | 2 |
covariance/rank_avg_windows.py | methylgrammarlab/proj_scwgbs | 0 | 12763200 | <reponame>methylgrammarlab/proj_scwgbs
"""
Take different bedgraph files which represent windows which were normalized ( so one value per window) and
rank them across patient to see if they match
"""
import argparse
import glob
import os
import sys
import numpy as np
import pandas as pd
from tqdm import tqdm
sys.path.append(os.path.dirname(os.getcwd()))
sys.path.append(os.getcwd())
from commons import files_tools, consts
BEDGRPH_FILE_FORMAT = os.path.join("*", "norm", "*.bedgraph")
def parse_input():
parser = argparse.ArgumentParser()
parser.add_argument('--input', help='Path to bedgraph files or folder', required=True)
parser.add_argument('--output_folder', help='Path of the output folder', required=False,
default=os.path.dirname(sys.argv[0]))
parser.add_argument('--window_boundaries', help='File with the window boundries', required=False)
args = parser.parse_args()
return args
def get_bedgraph_files(input_path):
"""
Get a list of all the bedgraph files to work on
:param input_path: A dir path or file path of bedgraph
:return: A list of all the paths to work on
"""
if os.path.isdir(input_path):
file_path = os.path.join(input_path, BEDGRPH_FILE_FORMAT)
all_file_paths = glob.glob(file_path)
else:
all_file_paths = [input_path]
return all_file_paths
def rank_covariance_across_patients(files_paths, window_boundaries):
"""
Get the covariance of a window and rank the different windows
We base this on the fact that this is the normed bedgraph, so each window has only one value or nan
:param files_paths: A list of paths to work on
:param window_boundaries: The boundaries for the chromosome
:return:
"""
patients_dict = {}
for file_path in files_paths:
patient, chromosome = consts.PATIENT_CHR_NAME_RE.findall(file_path)[0]
input_file = files_tools.load_bedgraph(file_path)
values = []
for i in window_boundaries:
try:
value = float(input_file[input_file.start == i[0]]["coverage"])
except TypeError: # Will happened if we have all nans nans in this window
value = -1
values.append(value)
patients_dict[patient] = pd.Series(values).rank(method="min")
# Chose the first patient to be the baseline
baseline_patient_name = list(patients_dict.keys())[0]
baselines_p = np.copy(patients_dict[baseline_patient_name])
for patient in patients_dict:
patients_dict[patient] = [x for _, x in sorted(zip(baselines_p, patients_dict[patient]))]
return pd.DataFrame(patients_dict)
def main():
args = parse_input()
all_file_paths = get_bedgraph_files(args.input)
all_files_dict = files_tools.convert_paths_list_to_chromosome_based_dict(all_file_paths)
window_boundaries = files_tools.load_compressed_pickle(args.window_boundaries)
for chromosome in tqdm(all_files_dict):
df = rank_covariance_across_patients(all_files_dict[chromosome], window_boundaries[int(chromosome)])
df.to_csv(os.path.join(args.output_folder, "covariance_rank_ch%s.csv" % chromosome))
if __name__ == '__main__':
main()
| 2.6875 | 3 |
code/pyscripts/computepspace.py | GandalfSaxe/letomes | 0 | 12763201 |
from matplotlib.colors import Normalize
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
import pandas as pd
import numpy as np
from math import pi, log
from scipy.stats import rankdata
from argparse import ArgumentParser
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("fp", type=str)
parser.add_argument(
"bounds", type=float, nargs=4, help="lowerbound x, upperbound x, lb y, ub y"
)
args = parser.parse_args()
filepath = args.fp
dims = args.bounds
# === setup problem space, either real or Karpathy toy problem for validation ===
# pspace = np.loadtxt("golf_course_zoom_s1024.txt")
pspace = np.loadtxt(filepath)
# uncomment this line if you want smooth toy-problem
# pspace = G
print(dims)
lbp, ubp, lbb, ubb = dims
# ******************** PLOTTING ****************************************
# ======== establish figs =================
fig = plt.figure()
ax = fig.gca()
# ============= plot problem space bg images ====
cmap = plt.cm.viridis
colors = Normalize(min(pspace.flatten()), max(pspace.flatten()))(pspace)
colors = cmap(colors)
plt.axis('equal')
plt.imshow(
colors,
vmin=min(pspace.flatten()),
vmax=max(pspace.flatten()),
extent=[lbb, ubb,lbp, ubp],
aspect="auto",
interpolation="none",
origin="lower",
)
ax.set_xlabel("burnDv")
ax.set_ylabel("position")
plt.colorbar()
plt.show()
| 2.4375 | 2 |
Week2/week2/scripts/publisher_radius.py | vedanth-03/Robotics-Automation-QSTP-2021 | 0 | 12763202 | #!/usr/bin/env python
import rospy
from std_msgs.msg import Float32
rospy.init_node('publisher_radius')
pub = rospy.Publisher('radius', Float32, queue_size=1)
rate = rospy.Rate(1)
radius = 1.0
while not rospy.is_shutdown():
try:
pub.publish(radius)
rate.sleep()
except:
pass | 2.21875 | 2 |
cla_backend/apps/guidance/admin.py | uk-gov-mirror/ministryofjustice.cla_backend | 3 | 12763203 | # coding=utf-8
from django.contrib import admin
from .admin_support.forms import NoteModelForm
from .models import Note, Tag
class TagInline(admin.TabularInline):
model = Note.tags.through
class NoteAdmin(admin.ModelAdmin):
ordering = ["title"]
exclude = ("created", "modified", "body")
list_display = ("name", "title", "modified", "created")
search_fields = ["title"]
prepopulated_fields = {"name": ("title",)}
inlines = [TagInline]
form = NoteModelForm
admin.site.register(Tag)
admin.site.register(Note, NoteAdmin)
| 1.960938 | 2 |
tests/test_en_zh_debug.py | ffreemt/google-tr-async-free | 0 | 12763204 | '''
test en-zh
'''
# import sys
import pytest # type: ignore
from loguru import logger
# sys.path.insert(0, '..')
# from google_tr_async.google_tr_async import google_tr_async
from google_tr_async import google_tr_async
@pytest.mark.asyncio
async def test_0():
''' test 0'''
text = \
'''There is now some uncertainty about the future of Google News in Europe after the European Union finalized its controversial new copyright legislation.
Google had previously showed how dramatically its search results could be affected, and warned that it may shut down the service in Europe …
The EU Copyright Directive is well-intentioned, requiring tech giants to license the right to reproduce copyrighted material on their own websites. However, the legislation as originally proposed would have made it impossible for Google to display brief snippets and photos from news stories in its search results without paying the news sites.
Google last month showed how its news search results would appear without photos and text excerpts, rendering the service all but useless. The company had previously said that its only option might be to shut down Google News in Europe.'''
trtext, proxy = await google_tr_async(text, debug=True)
# assert len(google_tr_async.dual) == 6
assert proxy is None
assert len(trtext) > 200
@pytest.mark.asyncio
async def test_1():
''' test 1 zh2en'''
text = '这是测试'
trtext, proxy = await google_tr_async(text, to_lang='en', debug=True)
logger.debug('trtext: %s' % trtext)
# logger.debug('google_tr_async.dual: %s' % google_tr_async.dual)
# assert len(google_tr_async.dual) == 6
# assert google_tr_async.dual == 6
# assert len(trtext) > 200
assert trtext == 'This is a test'
assert proxy is None
| 2.328125 | 2 |
Leetcode/1000-2000/1151. Minimum Swaps to Group All 1's Together/1151.py | Next-Gen-UI/Code-Dynamics | 0 | 12763205 | <filename>Leetcode/1000-2000/1151. Minimum Swaps to Group All 1's Together/1151.py
class Solution:
def minSwaps(self, data: List[int]) -> int:
k = data.count(1)
ones = 0 # ones in window
maxOnes = 0 # max ones in window
for i, num in enumerate(data):
if i >= k and data[i - k]:
ones -= 1
if num:
ones += 1
maxOnes = max(maxOnes, ones)
return k - maxOnes
| 3.015625 | 3 |
sort/Maximum_628.py | smallbaby/leetcode | 0 | 12763206 | # -*- coding: utf-8 -*-
class Solution(object):
def maximumProduct(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
nums.sort(reverse=True)
m1 = nums[0] * nums[1] * nums[2]
m2 = nums[0] * nums[-1] * nums[-2]
return m1 if m1 > m2 else m2
def maximumProduct1(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
# nums.sort(reverse = True)
# m1 = nums[0] * nums[1] * nums[2]
# m2 = nums[0] * nums[-1] * nums[-2]
# return m1 if m1 > m2 else m2
m1 = m2 = m3 = -1000 * 1000
s1 = s2 = 1000 * 1000
for num in nums:
if num > m1:
m3 = m2
m2 = m1
m1 = num
elif num > m2:
m3 = m2
m2 = num
elif num > m3:
m3 = num
if num < s1:
s2 = s1
s1 = num
elif num < s2:
s2 = num
mm1 = m1 * m2 * m3
mm2 = m1 * s1 * s2
return mm1 if mm1 > mm2 else mm2
s = Solution()
a = [1,4,3,-1,-2,-5]
print s.maximumProduct1(a) | 3.578125 | 4 |
pagarme/transaction.py | ch1ninha/analisar-pagarme | 0 | 12763207 | <gh_stars>0
from pagarme.resources import handler_request
from pagarme.resources.routes import transaction_routes
def calculate_installments_amount(dictionary):
return handler_request.get(transaction_routes.CALCULATE_INSTALLMENTS_AMOUNT, dictionary)
def capture(transaction_id, dictionary):
return handler_request.post(transaction_routes.CAPTURE_TRANSACTION_AFTER.format(transaction_id), dictionary)
def create(dictionary):
return handler_request.post(transaction_routes.BASE_URL, dictionary)
def events(transaction_id):
return handler_request.get(transaction_routes.GET_EVENTS_TRANSACTION.format(transaction_id))
def find_all():
return handler_request.get(transaction_routes.GET_ALL_TRANSACTIONS)
def find_by(search_params):
return handler_request.get(transaction_routes.GET_TRANSACTION_BY, search_params)
def find_by_id(transaction_id):
return handler_request.get(transaction_routes.GET_SPECIFIC_TRANSACTION_BY_ID.format(transaction_id))
def generate_card_hash_key():
return handler_request.get(transaction_routes.GENERATE_CARD_HASH_KEY)
def operations(transaction_id):
return handler_request.get(transaction_routes.GET_TRANSACTION_OPERATION.format(transaction_id))
def pay_boleto(transaction_id, dictionary):
return handler_request.put(transaction_routes.PAY_BOLETO.format(transaction_id), dictionary)
# def pay_boleto_notify(dictionary):
# return handler_request.post(transaction_routes.PAY_BOLETO_NOTIFY,dictionary)
def payables(transaction_id):
return handler_request.get(transaction_routes.GET_ALL_PAYABLES_WITH_TRANSACTION_ID.format(transaction_id))
def postbacks(transaction_id):
return handler_request.get(transaction_routes.GET_ALL_POSTBACKS.format(transaction_id))
def postback_redeliver(transaction_id, postback_id):
return handler_request.post(transaction_routes.POSTBACK_REDELIVER.format(transaction_id, postback_id))
def refund(transaction_id, dictionary):
return handler_request.post(transaction_routes.REFUND_TRANSACTION.format(transaction_id), dictionary)
def specific_payable(transaction_id, payable_id):
return handler_request.get(transaction_routes.GET_SPECIFIC_PAYABLE.format(transaction_id, payable_id))
def specific_postback(transaction_id, postback_id):
return handler_request.get(transaction_routes.GET_SPECIFIC_POSTBACK.format(transaction_id, postback_id))
def review(transaction_id,dictionary):
return handler_request.post(transaction_routes.ANTIFRAUD_ANALYSIS.format(transaction_id),dictionary) | 2.015625 | 2 |
findingPath.py | truongsatthu/neo4j_autonomous_path_planner_project | 0 | 12763208 | #!/usr/bin/env python3.6
# -*- coding: utf-8 -*-
import numpy as np
import networkx as nx
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import sys,os,time,yaml,argparse,itertools
from numpy.lib.arraysetops import isin
from queue import PriorityQueue
from mpl_toolkits.mplot3d import Axes3D
from neo4j import GraphDatabase
from scipy import spatial
import copy
from bresenham import bresenham
sys.setrecursionlimit(5000)
_image_root_dir = os.getcwd()+"/map/"
_neo4j_root_dir = os.getcwd()+"/Neo4j_setting/"
def plotVoronoiPath(path):
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
map1 = mpimg.imread(_image_root_dir+'chinokyoten1f.png')
map2 = mpimg.imread(_image_root_dir+'chinokyoten2f.png')
map3 = mpimg.imread(_image_root_dir+'chinokyoten3f.png')
y, x = np.ogrid[0:map1.shape[0], 0:map1.shape[1]]
ax.plot_surface(x, y, np.atleast_2d(100),rstride=15,cstride=15,facecolors=map1,shade=False)
ax.plot_surface(x, y, np.atleast_2d(200),rstride=15,cstride=15,facecolors=map2,shade=False)
ax.plot_surface(x, y, np.atleast_2d(300),rstride=15,cstride=15,facecolors=map3,shade=False)
#setting view
ax.view_init(30, 30)
# make the panes transparent
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
# make the grid lines transparent
ax.xaxis._axinfo["grid"]['color'] = (1,1,1,0)
ax.yaxis._axinfo["grid"]['color'] = (1,1,1,0)
ax.zaxis._axinfo["grid"]['color'] = (1,1,1,0)
for i in range(len(path)-1):
p1 = path[i]
p2 = path[i+1]
plt.plot([p1[2], p2[2]], [p1[1], p2[1]], [p1[0], p2[0]], 'r-')
for i in (range(len(number_list))):
ax.plot([float(number_list[i][2])], [float(number_list[i][1])], [float(number_list[i][0])], 'ko', markersize=3)
for i in (range(len(elevs))):
ax.plot([float(elevs[i][2])], [float(elevs[i][1])], [float(elevs[i][0])], 'yo', markersize=3)
plt.show()
#class NODE():
def __init__(self,*args):
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--id_map', help='map ID', nargs='+', required=True)
parser.add_argument('-n', '--neo4j_file', help='neo4j setting file', nargs='+', required=True)
parser.add_argument('-s', '--start_pos', help='start position', nargs='+', required=True)
parser.add_argument('-g', '--goal_pos', help='goal position', nargs='+', required=True)
parser.add_argument('-w', '--waypoints', help='waypoints position', nargs='+', required=False)
parser.add_argument('-alg', '--algorithm', help='planning algorithm', nargs='+', required=False)
args = parser.parse_args()
_id_map = args.id_map
_neo4j_path = args.neo4j_file[0]
_start_pos = list((args.start_pos[0]).split(","))
_goal_pos = list((args.goal_pos[0]).split(","))
_waypoints_list = args.waypoints
_algorithm = args.algorithm
_neo4jDir = os.path.join(_neo4j_root_dir, _neo4j_path)
number_list = []
if (_waypoints_list != None):
for i in (range(len(_waypoints_list))):
number_list.append((list(_waypoints_list[i].split(","))))
number_list.insert(0,_start_pos)
number_list.insert(len(number_list),_goal_pos)
with open(_neo4jDir, 'r', encoding='utf-8') as neo4j:
neo4jstring = neo4j.read()
neo4j_obj = yaml.safe_load(neo4jstring)
neo4jData = {
"uri": neo4j_obj['uri'],
"userName": neo4j_obj['userName'],
"password": <PASSWORD>['password']
}
return (_id_map,neo4jData,_start_pos,_goal_pos,_waypoints_list,_algorithm,number_list)
def READ_NODES_2EDGES(_id_map,neo4jData,*args):
points = []
edges = []
elevs = []
#Neo4j Enterprise
uri= neo4jData['uri']
userName= neo4jData['userName']
password= <PASSWORD>Data['password']
#Connect to the neo4j database server
graphDB_Driver = GraphDatabase.driver(uri, auth=(userName, password))
with graphDB_Driver.session() as graphDB_Session:
for args in _id_map:
Obs_cal='MATCH (n) WHERE labels(n)=["GDB_'+ args +'"] AND n.Identifier = "Obs_'+ args +'" RETURN n'
Ver_cal='MATCH (n)-[r]->(m) WHERE labels(n)=["GDB_'+ args +'"] RETURN n,r,m'
Ele_cal='MATCH (n) WHERE labels(n)=["GDB_'+ args +'"] AND n.Identifier = "EP_'+ args +'" RETURN n'
nodeo = graphDB_Session.run(Obs_cal)
nodes = graphDB_Session.run(Ver_cal)
nodee = graphDB_Session.run(Ele_cal)
for node in nodeo:
#print(str(node[0]['z']), node[0]['y'], node[0]['x'],node[0]['Action'], node[0]['Identifier'], node[0]['Floor'])
p = (float(node[0]['y']), float(node[0]['x']))
points.append(p)
for node in nodes:
#print(str(node[0]['z']), node[0]['y'], node[0]['x'],node[0]['Action'], node[0]['Identifier'], node[0]['Floor'])
p1 = (float(node[0]['z']), float(node[0]['y']), float(node[0]['x']))
p2 = (float(node[2]['z']), float(node[2]['y']), float(node[2]['x']))
edges.append((p1, p2))
for node in nodee:
#print(str(node[0]['z']), node[0]['y'], node[0]['x'],node[0]['Action'], node[0]['Identifier'], node[0]['Floor'])
e = (float(node[0]['z']), float(node[0]['y']), float(node[0]['x']))
elevs.append(e)
#EL_closest
for x,e in enumerate (elevs):
dist_e = 1000000
for i in range(len(edges)):
for j in (edges[i]):
p = j
if (p[0]==e[0]):
d = heuristic(e, p)
if d < dist_e:
EL = p
dist_e = d
edges.append((EL,e))
#for x in itertools.permutations(elevs, r=2):
for i in range(len(elevs)-1):
edges.append((elevs[i],elevs[i+1]))
#WP_closest
for w in number_list:
dist_w = 1000000
W = (float(w[0]),float(w[1]),float(w[2]))
for i in range(len(edges)):
for j in (edges[i]):
p = j
if (p[0]==W[0]):
d = heuristic(w, p)
if d < dist_w:
WP = p
dist_w = d
edges.append((WP,W))
graphDB_Driver.close()
# #Robot's width is considered (pixcel)
# R = 0.0
# tree = spatial.KDTree(np.asarray(points))
# for i,e in enumerate(edges):
# for j,n in enumerate(e):
# nodes = np.asarray((n[1],n[2]))
# if (len(tree.query_ball_point(nodes, R)) != 0):
# edges.pop(i)
# break
# else:
# pass
return points,edges,elevs
def create_waypoints_edges(number_list):
waypoints_edges = []
for w in number_list:
i = (float(w[0]),float(w[1]),float(w[2]))
for j in elevs:
if (i[0] == j[0]):
waypoints_edges.append((i, j))
for x in itertools.permutations(elevs, r= 2):
waypoints_edges.append((x[0],x[1]))
for y in itertools.permutations(number_list, r= 2):
W1 = (float(y[0][0]),float(y[0][1]),float(y[0][2]))
W2 = (float(y[1][0]),float(y[1][1]),float(y[1][2]))
if (W1[0] == W2[0]):
waypoints_edges.append((W1,W2))
return waypoints_edges
def create_graph(edges):
graph = nx.Graph()
for elem in edges:
p1 = elem[0]
p2 = elem[1]
dist = heuristic(p1, p2)
graph.add_edge(p1, p2, weight=dist)
return graph
def heuristic(n1, n2):
return ( (float(n1[0])-float(n2[0]))**2 +(float(n1[1])-float(n2[1]))**2 +(float(n1[2])-float(n2[2]))**2 )**0.5
def a_star_graph(graph, _start_pos, _goal_pos, h):
path = []
path_cost = 0
queue = PriorityQueue()
queue.put((0, _start_pos))
visited = set(_start_pos)
branch = {}
found = False
while not queue.empty():
item = queue.get()
current_node = item[1]
if current_node == _start_pos:
current_cost = 0.0
else:
current_cost = branch[current_node][0]
if current_node == _goal_pos:
print('Found a path.')
found = True
break
else:
for next_node in graph[current_node]:
cost = graph.edges[current_node, next_node]['weight']
branch_cost = current_cost + cost
queue_cost = branch_cost + h(next_node, _goal_pos)
if next_node not in visited:
visited.add(next_node)
branch[next_node] = (branch_cost, current_node)
queue.put((queue_cost, next_node))
if found:
# retrace steps
n = _goal_pos
path_cost = branch[n][0]
path.append(_goal_pos)
while branch[n][1] != _start_pos:
path.append(branch[n][1])
n = branch[n][1]
path.append(branch[n][1])
else:
print('**********************')
print('Failed to find a path!')
print('**********************')
return path[::-1], path_cost
def optimization_path(pathIndex,path_opt,points):
while (len(path_opt) > pathIndex + 2):
A = path_opt[pathIndex]
B = path_opt[pathIndex+1]
C = path_opt[pathIndex+2]
cells = list(bresenham(int(A[1]),int(A[2]),int(C[1]),int(C[2])))
hit = False
for p in points:
for c in cells:
if (int(p[0])==c[0] and int(p[1])==c[1]):
hit = True
break
break
if not hit:
path_opt.remove(B)
pathIndex += 1
return path_opt
if __name__ == "__main__":
args = sys.argv[1:]
if len(sys.argv) == 1:
print("+++++++++++++++++++++++++++++++++++++++++++++\n")
print("[********* Please input arguments **********]\n")
print("+++++++++++++++++++++++++++++++++++++++++++++\n")
sys.exit()
id_map,neo4jData,start_pos,goal_pos,waypoints_list,algorithm,number_list = __init__(args)
points,edges,elevs = READ_NODES_2EDGES(id_map,neo4jData)
print(elevs)
#****************************************************************************************************
if (algorithm == None):
graph = create_graph(edges)
path_WP = []
for i in (range(len(number_list)-1)):
WP1 = (float(number_list[i][0]),float(number_list[i][1]),float(number_list[i][2]))
WP2 = (float(number_list[i+1][0]),float(number_list[i+1][1]),float(number_list[i+1][2]))
path, cost = a_star_graph(graph, WP1, WP2, heuristic)
path_WP.extend(path)
# print(path_WP)
# plotVoronoiPath(path_WP)
elif (algorithm != None):
waypoints_edges = create_waypoints_edges(number_list)
graph = create_graph(waypoints_edges)
path_WP = []
for i in (range(len(number_list)-1)):
WP1 = (float(number_list[i][0]),float(number_list[i][1]),float(number_list[i][2]))
WP2 = (float(number_list[i+1][0]),float(number_list[i+1][1]),float(number_list[i+1][2]))
path, cost = a_star_graph(graph, WP1, WP2, heuristic)
path_WP.extend(path)
# print(path_WP)
# plotVoronoiPath(path_WP)
else:
pass
print("Optimizing path...")
path_opt = copy.deepcopy(path_WP)
pathCount = 0
while pathCount < len(path_opt) - 2:
# pathCount = 0
path_opt = optimization_path(pathCount,path_opt,points)
pathCount += 1
print("End of path optimization")
print(path_opt)
plotVoronoiPath(path_opt)
#****************************************************************************************************
t0 = time.clock()
t1 = time.clock()
total = t1-t0
print('Compu. time =', total)
| 2.109375 | 2 |
dnsimple/exceptions.py | mherrmann/dnsimple-python | 12 | 12763209 | class DNSimpleException(Exception):
def __init__(self, message=None, errors=None):
self.message = message
self.errors = errors
| 1.929688 | 2 |
src/api/tests/test_auth_refresh.py | DiceNameIsMy/proper-FastAPI-startup | 0 | 12763210 | from requests.models import Response
from fastapi.testclient import TestClient
from domain.user import UserDomain
from settings import settings
URI = f"/v{settings.api_version}/token/refresh"
def test_valid(
client: TestClient,
user_domain: UserDomain,
user_refresh_token: str,
):
response: Response = client.post(
URI,
json={"refresh_token": user_refresh_token},
headers={"Content-Type": "Application/json"},
)
assert response.status_code == 200, response.json()
assert user_domain.read_token(response.json().get("access_token"))
def test_bad_token(
client: TestClient,
user_refresh_token: str,
):
response: Response = client.post(
URI,
json={"refresh_token": user_refresh_token + "bad"},
headers={"Content-Type": "Application/json"},
)
assert response.status_code == 400, response.json()
def test_empty_token(
client: TestClient,
):
response: Response = client.post(
URI,
json={"refresh_token": ""},
headers={"Content-Type": "Application/json"},
)
assert response.status_code == 400, response.json()
| 2.546875 | 3 |
jmetal/problem/multiobjective/constrained.py | LuckysonKhaidem/ProjectAlpha | 1 | 12763211 | <reponame>LuckysonKhaidem/ProjectAlpha<filename>jmetal/problem/multiobjective/constrained.py
from math import pi, cos, atan
from jmetal.core.solution import FloatSolution
from jmetal.core.problem import FloatProblem
"""
.. module:: constrained
:platform: Unix, Windows
:synopsis: Constrained test problems for multi-objective optimization
.. moduleauthor:: <NAME> <<EMAIL>>
"""
class Srinivas(FloatProblem):
""" Class representing problem Srinivas. """
def __init__(self, rf_path: str=None):
super(Srinivas, self).__init__(rf_path=rf_path)
self.number_of_objectives = 2
self.number_of_variables = 2
self.number_of_constraints = 2
self.obj_directions = [self.MINIMIZE, self.MINIMIZE]
self.obj_labels = ['f(x)', 'f(y)']
self.lower_bound = [-20.0 for _ in range(self.number_of_variables)]
self.upper_bound = [20.0 for _ in range(self.number_of_variables)]
FloatSolution.lower_bound = self.lower_bound
FloatSolution.upper_bound = self.upper_bound
def evaluate(self, solution: FloatSolution) -> FloatSolution:
x1 = solution.variables[0]
x2 = solution.variables[1]
solution.objectives[0] = 2.0 + (x1 - 2.0) * (x1 - 2.0) + (x2 - 1.0) * (x2 - 1.0)
solution.objectives[1] = 9.0 * x1 - (x2 - 1.0) * (x2 - 1.0)
return solution
def evaluate_constraints(self, solution: FloatSolution) -> None:
constraints = [0.0 for _ in range(self.number_of_constraints)]
x1 = solution.variables[0]
x2 = solution.variables[1]
constraints[0] = 1.0 - (x1 * x1 + x2 * x2) / 225.0
constraints[1] = (3.0 * x2 - x1) / 10.0 - 1.0
overall_constraint_violation = 0.0
number_of_violated_constraints = 0.0
for constrain in constraints:
if constrain < 0.0:
overall_constraint_violation += constrain
number_of_violated_constraints += 1
solution.attributes['overall_constraint_violation'] = overall_constraint_violation
solution.attributes['number_of_violated_constraints'] = number_of_violated_constraints
def get_name(self):
return 'Srinivas'
class Tanaka(FloatProblem):
""" Class representing problem Tanaka """
def __init__(self, rf_path: str=None):
super(Tanaka, self).__init__(rf_path=rf_path)
self.number_of_objectives = 2
self.number_of_variables = 2
self.number_of_constraints = 2
self.obj_directions = [self.MINIMIZE, self.MINIMIZE]
self.obj_labels = ['f(x)', 'f(y)']
self.lower_bound = [10e-5 for _ in range(self.number_of_variables)]
self.upper_bound = [pi for _ in range(self.number_of_variables)]
FloatSolution.lower_bound = self.lower_bound
FloatSolution.upper_bound = self.upper_bound
def evaluate(self, solution: FloatSolution) -> FloatSolution:
solution.objectives[0] = solution.variables[0]
solution.objectives[1] = solution.variables[1]
return solution
def evaluate_constraints(self, solution: FloatSolution) -> None:
constraints = [0.0 for _ in range(self.number_of_constraints)]
x1 = solution.variables[0]
x2 = solution.variables[1]
constraints[0] = (x1 * x1 + x2 * x2 - 1.0 - 0.1 * cos(16.0 * atan(x1 / x2)))
constraints[1] = -2.0 * ((x1 - 0.5) * (x1 - 0.5) + (x2 - 0.5) * (x2 - 0.5) - 0.5)
overall_constraint_violation = 0.0
number_of_violated_constraints = 0.0
for constrain in constraints:
if constrain < 0.0:
overall_constraint_violation += constrain
number_of_violated_constraints += 1
solution.attributes['overall_constraint_violation'] = overall_constraint_violation
solution.attributes['number_of_violated_constraints'] = number_of_violated_constraints
def get_name(self):
return 'Tanaka'
| 2.421875 | 2 |
ui/data/polygon.py | Simi4/imgmalibu | 0 | 12763212 | polygon = [
[2000, 1333],
[2000, 300],
[500, 900],
[0, 1333],
[2000, 1333]
]
'''
polygon = [
[0, 0],
[0, 600],
[900, 600],
[1800, 0],
[0, 0]
]
''' | 2.125 | 2 |
src/hark_lang/examples/__init__.py | krrome/teal-lang | 85 | 12763213 | <reponame>krrome/teal-lang<filename>src/hark_lang/examples/__init__.py
from .load_examples import load_examples
| 1.109375 | 1 |
team_9/cocos/tools/autotest/proxy_snapshots.py | Donnyvdm/dojo19 | 1 | 12763214 | <reponame>Donnyvdm/dojo19
from __future__ import division, print_function, unicode_literals
import six
import sys
import os
import remembercases.snapshot_taker as st
import random
# seed random for repeteability
random.seed(123)
# for repeteabilty between py2 and py3 when running in autotest, random.randint
# random.shuffle, random.choice and random.randrange are redefined
def randint(lo, hi):
return lo + int(random.random()*(hi-lo+1))
random.randint = randint
def shuffle(alist):
# inplace
a = [ (random.random(), e) for e in alist]
a.sort(key=lambda x: x[0])
for i in range(len(alist)):
alist[i] = a[i][1]
random.shuffle = shuffle
def choice(seq):
return seq[random.randint(0, len(seq)-1)]
random.choice = choice
def randrange(*args):
"""
randrange(stop)
randrange(start, stop [, step])
Return a randomly selected element from range(start, stop, step).
This is equivalent to choice(range(start, stop, step)),
but doesn't actually build a range object.
NOTE: this implementation is inefficient and can have an huge overhead
on memory usage; it is intended for testing purposes and small ranges.
"""
assert len(args)>0
start = 0
step = 1
if len(args)==3:
step = args[2]
if len(args)>1:
start = args[0]
stop = args[1]
else:
stop = args[0]
return choice(range(start, stop, step))
random.randrange = randrange
import pyglet
import cocos
from cocos.director import director
import cocos.custom_clocks as cc
pyglet.resource.path.append(os.path.abspath('.'))
pyglet.resource.reindex()
def set_init_interceptor():
_director_init = director.init
def director_init_interception(*args, **kwargs):
_director_init(*args, **kwargs)
#sys.stderr.write('\nin director_init_interception')
director.init = director_init_interception
def quit_pyglet_app():
#sys.stderr.write('\nin quit_pyglet_app')
pyglet.app.exit()
def take_snapshot_cocos_app(fname):
pyglet.image.get_buffer_manager().get_color_buffer().save(fname)
#sys.stderr.write('\nafter take_snapshot_cocos_app')
# script_name the basename only
def main(script_name, stored_testinfo, snapshots_dir):
# do interceptions and other setup task here
# ...
sys.path.insert(0, os.getcwd())
module_name = script_name[:script_name.rfind('.py')]
print('module name:', module_name)
s = "import %s as script_module"%module_name
six.exec_(s, globals())
if stored_testinfo != script_module.testinfo:
sys.stderr.write("Es01 - received testinfo doesn't match script testinfo. (db outdated?)\n")
sys.exit(1)
screen_sampler, diagnostic = st.ScreenSampler.sampler(stored_testinfo,
script_name,
fn_quit=quit_pyglet_app,
fn_take_snapshot=take_snapshot_cocos_app,
snapshots_dir=snapshots_dir)
assert diagnostic == ''
clock = cc.get_autotest_clock(screen_sampler)
cocos.custom_clocks.set_app_clock(clock)
set_init_interceptor()
#sys.stderr.write('\nafter interceptor')
if hasattr(script_module, 'autotest'):
# allows the script to know if running through autotest
script_module.autotest = 1
script_module.main()
if __name__ == '__main__':
main(*sys.argv[1:])
| 2.359375 | 2 |
app/home/urls.py | Allen-lang/FXTest | 1 | 12763215 | # -*- coding: utf-8 -*-
# @Author : lileilei
# @File : urls.py
# @Time : 2017/12/7 9:27
from .views import *
from .views import home
home.add_url_rule('/index',view_func=Indexview.as_view('index'))
home.add_url_rule('/login',view_func=LoginView.as_view('login'))
home.add_url_rule('/logt',view_func=LogtView.as_view('logt'))
home.add_url_rule('/interface',view_func=InterfaceView.as_view('interface'))
home.add_url_rule('/interface/<int:page>',view_func=InterfaceView.as_view('interfaspa'))
home.add_url_rule('/yongli',view_func=YongliView.as_view('yongli'))
home.add_url_rule('/yongli/<int:page>',view_func=YongliView.as_view('yonglipage'))
home.add_url_rule('/adminuser',view_func=AdminuserView.as_view('adminuser'))
home.add_url_rule('/adminuser/<int:page>',view_func=AdminuserView.as_view('adminuserpage'))
home.add_url_rule('/project',view_func=ProjectView.as_view('project'))
home.add_url_rule('/project/<int:page>',view_func=ProjectView.as_view('projectpage'))
home.add_url_rule('/model',view_func=ModelView.as_view('model'))
home.add_url_rule('/model/<int:page>',view_func=ModelView.as_view('models'))
home.add_url_rule('/test_rep',view_func=TestrepView.as_view('test_rep'))
home.add_url_rule('/test_rep/<int:page>',view_func=TestrepView.as_view('test_repppage'))
home.add_url_rule('/ceshihuanjing',view_func=TesteventVies.as_view('ceshihuanjing'))
home.add_url_rule('/ceshihuanjing/<int:page>',view_func=TesteventVies.as_view('ceshihuanjings'))
home.add_url_rule('/mock',view_func=MockViews.as_view('mockserver'))
home.add_url_rule('/mock/<int:page>',view_func=MockViews.as_view('mockservers'))
home.add_url_rule('/timingtask',view_func=TimingtasksView.as_view('timingtask'))
home.add_url_rule('/timingtask/<int:page>',view_func=TimingtasksView.as_view('timingtasks'))
home.add_url_rule('/get_pro_test_report',view_func=GettProtestreport.as_view('get_pro_test_report'))
home.add_url_rule('/jenkinsfirst',view_func=JenkinsFirst.as_view('jenkinsfirst'))
home.add_url_rule('/buildjob/<jobname>',view_func=JenkinsGou.as_view('buildjob'))
home.add_url_rule('/getjenlog',view_func=GetJenLogview.as_view('get_jen_log'))
home.add_url_rule('/deletejentask/<int:id>',view_func=DeleteJenkinstask.as_view('deletejentask'))
home.add_url_rule('/deletegenconfig/<int:id>',view_func=DeleteGenconfi.as_view('deletegenconfig'))
home.add_url_rule('/genconfig',view_func=GenconfigView.as_view('genconfig'))
home.add_url_rule('/genconfig/<int:page>',view_func=GenconfigView.as_view('genconfigs'))
| 1.84375 | 2 |
nlp/englishnlp/bag_of_words.py | geminihcc526/Learning-Demo | 0 | 12763216 | import pandas as pd
train = pd.read_csv('alldata/labeledTrainData.tsv', header=0, delimiter='\t', quoting=3)
print(train.shape)
print(train.columns.values)
print(train.head())
print(train['review'][0])
test = pd.read_csv('alldata/testData.tsv', header=0, delimiter='\t', quoting=3)
print(test.shape)
print(test.head())
train_split = train['review'][0].split(",")
for str in train_split:
print(str)
# data cleaning
from bs4 import BeautifulSoup
# 在一条评论上初始化一个BeautifulSoup对象
# 用beautifulsoup来清洗html标签
example1 = BeautifulSoup(train['review'][0], 'lxml')
# 比较一下原始的文本和处理过后的文本的差别,通过调用get_text()得到处理后的结果
print(train['review'][0])
print()
print(example1.get_text())
import re
letters_only = re.sub('[^a-zA-Z]', # The pattern to search for
' ', # The pattern to repalce it with
example1.get_text()) # The text to search
print(letters_only)
#大写变小写
lower_case = letters_only.lower() # Convert to lower case
#tokenization
words = lower_case.split() # Split into words
#stop words removal
from nltk.corpus import stopwords # import the stop word list
print(stopwords.words('english')[:10])
#从评论取出stop words
words = [w for w in words if not w in stopwords.words('english')]
print(words[:10])
| 3.390625 | 3 |
src/sc3nb/timed_queue.py | interactive-sonification/sc3nb | 7 | 12763217 | """Classes to run register functions at certain timepoints and run asynchronously"""
import threading
import time
from typing import Any, Callable, Iterable, NoReturn, Union
import numpy as np
import sc3nb
from sc3nb.osc.osc_communication import Bundler, OSCCommunication, OSCMessage
class Event:
"""Stores a timestamp, function and arguments for that function.
Long running functions can be wrapped inside an own thread
Parameters
----------
timestamp : float
Time event should be executed
function : Callable[..., None]
Function to be executed
args : Iterable[Any]
Arguments for function
spawn : bool, optional
if True, create new thread for function, by default False
"""
def __init__(
self,
timestamp: float,
function: Callable[..., None],
args: Iterable[Any],
spawn: bool = False,
) -> None:
if spawn:
thread = threading.Thread(target=function, args=args)
function = thread.start
args = ()
self.timestamp = timestamp
self.function = function
self.args = args
def execute(self) -> None:
"""Executes function"""
self.function(*self.args)
def __eq__(self, other):
return self.timestamp == other.timestamp
def __lt__(self, other):
return self.timestamp < other.timestamp
def __le__(self, other):
return self.timestamp <= other.timestamp
def __repr__(self):
return "%s: %s" % (self.timestamp, self.function.__name__)
class TimedQueue:
"""Accumulates events as timestamps and functions.
Executes given functions according to the timestamps
Parameters
----------
relative_time : bool, optional
If True, use relative time, by default False
thread_sleep_time : float, optional
Sleep time in seconds for worker thread, by default 0.001
drop_time_threshold : float, optional
Threshold for execution time of events in seconds.
If this is exceeded the event will be dropped, by default 0.5
"""
def __init__(
self,
relative_time: bool = False,
thread_sleep_time: float = 0.001,
drop_time_threshold: float = 0.5,
) -> None:
self.drop_time_thr = drop_time_threshold
self.start = time.time() if relative_time else 0
self.onset_idx = np.empty((0, 2))
self.event_list = []
self.close_event = threading.Event()
self.lock = threading.Lock()
self.thread = threading.Thread(
target=self.__worker, args=(thread_sleep_time, self.close_event)
) # , daemon=True)
self.thread.start()
def close(self) -> None:
"""Closes event processing without waiting for pending events"""
self.close_event.set()
self.thread.join()
def join(self) -> None:
"""Closes event processing after waiting for pending events"""
self.complete()
self.close_event.set()
self.thread.join()
def complete(self) -> None:
"""Blocks until all pending events have completed"""
while self.event_list:
time.sleep(0.01)
def put(
self,
timestamp: float,
function: Callable[..., None],
args: Iterable[Any] = (),
spawn: bool = False,
) -> None:
"""Adds event to queue
Parameters
----------
timestamp : float
Time (POSIX) when event should be executed
function : Callable[..., None]
Function to be executed
args : Iterable[Any], optional
Arguments to be passed to function, by default ()
spawn : bool, optional
if True, create new sub-thread for function, by default False
Raises
------
TypeError
raised if function is not callable
"""
if not callable(function):
raise TypeError("function argument cannot be called")
if not isinstance(args, tuple):
args = (args,)
new_event = Event(timestamp, function, args, spawn)
with self.lock:
self.event_list.append(new_event)
evlen = len(self.event_list)
if not self.onset_idx.any():
idx = 0
else:
idx = np.searchsorted(self.onset_idx[:, 0], timestamp)
self.onset_idx = np.insert(
self.onset_idx, idx, [timestamp, evlen - 1], axis=0
)
def get(self) -> Event:
"""Get latest event from queue and remove event
Returns
-------
Event
Latest event
"""
event = self.peek()
self.pop()
return event
def peek(self) -> Event:
"""Look up latest event from queue
Returns
-------
Event
Latest event
"""
with self.lock:
return self.event_list[int(self.onset_idx[0][1])]
def empty(self) -> bool:
"""Checks if queue is empty
Returns
-------
bool
True if queue if empty
"""
with self.lock:
return bool(self.event_list)
def pop(self) -> None:
"""Removes latest event from queue"""
with self.lock:
event_idx = int(self.onset_idx[0][1])
self.onset_idx = self.onset_idx[1:]
# remove 1 from all idcs after popped event
self.onset_idx[:, 1][self.onset_idx[:, 1] > event_idx] -= 1
del self.event_list[event_idx]
def __worker(self, sleep_time: float, close_event: threading.Event) -> NoReturn:
"""Worker function to process events"""
while True:
if close_event.is_set():
break
if self.event_list:
event = self.peek()
if event.timestamp <= time.time() - self.start:
# execute only if not too old
if event.timestamp > time.time() - self.start - self.drop_time_thr:
event.execute()
self.pop()
# sleep_time = event_list[0].timestamp - (time.time() - self.start) - 0.001
time.sleep(sleep_time)
def __repr__(self):
return f"<TimedQueue {self.event_list.__repr__()}>"
def elapse(self, time_delta: float) -> None:
"""Add time delta to the current queue time.
Parameters
----------
time_delta : float
Additional time
"""
self.start += time_delta
class TimedQueueSC(TimedQueue):
"""Timed queue with OSC communication.
Parameters
----------
server : OSCCommunication, optional
OSC server to handle the bundlers and messsages, by default None
relative_time : bool, optional
If True, use relative time, by default False
thread_sleep_time : float, optional
Sleep time in seconds for worker thread, by default 0.001
"""
def __init__(
self,
server: OSCCommunication = None,
relative_time: bool = False,
thread_sleep_time: float = 0.001,
):
super().__init__(relative_time, thread_sleep_time)
self.server = server or sc3nb.SC.get_default().server
def put_bundler(self, onset: float, bundler: Bundler) -> None:
"""Add a Bundler to queue
Parameters
----------
onset : float
Sending timetag of the Bundler
bundler : Bundler
Bundler that will be sent
"""
self.put(onset, bundler.send)
def put_msg(
self, onset: float, msg: Union[OSCMessage, str], msg_params: Iterable[Any]
) -> None:
"""Add a message to queue
Parameters
----------
onset : float
Sending timetag of the message
msg : Union[OSCMessage, str]
OSCMessage or OSC address
msg_params : Iterable[Any]
If msg is str, this will be the parameters of the created OSCMessage
"""
if isinstance(msg, str):
self.put(onset, self.server.msg, args=(msg, msg_params))
else:
self.put(onset, self.server.send, args=(msg,))
| 3.28125 | 3 |
fawkes/fetch/remote.py | AalokAhluwalia/fawkes | 87 | 12763218 | <filename>fawkes/fetch/remote.py
import urllib.request
def fetch(review_channel):
return urllib.request.urlopen(review_channel.file_path).read().decode("utf-8")
| 1.796875 | 2 |
python_challenge/3.py | facmartoni/python_exercises | 0 | 12763219 | <reponame>facmartoni/python_exercises
# URL = http://www.pythonchallenge.com/pc/def/equality.html
import re
import collections
def run():
with open('3_text.txt', 'r') as f:
text = f.read()
patterns = re.findall(
r'[a-z][A-Z][A-Z][A-Z][a-z][A-Z][A-Z][A-Z][a-z]', text)
print(patterns)
# print(collections.Counter(list(text)))
if __name__ == '__main__':
run()
| 3.546875 | 4 |
mayan/apps/documents/tests/test_document_type_views.py | wan1869/dushuhu | 1 | 12763220 | import os
from ..models import DocumentType
from ..permissions import (
permission_document_properties_edit,
permission_document_type_create, permission_document_type_delete,
permission_document_type_edit, permission_document_type_view,
)
from .base import GenericDocumentViewTestCase
from .literals import (
TEST_DOCUMENT_TYPE_LABEL, TEST_DOCUMENT_TYPE_LABEL_EDITED,
TEST_DOCUMENT_TYPE_QUICK_LABEL, TEST_DOCUMENT_TYPE_QUICK_LABEL_EDITED
)
from .mixins import (
DocumentQuickLabelViewTestMixin,
DocumentTypeDeletionPoliciesViewTestMixin,
DocumentTypeFilenameGeneratorViewTestMixin,
DocumentTypeQuickLabelTestMixin, DocumentTypeQuickLabelViewTestMixin,
DocumentTypeViewTestMixin
)
class DocumentTypeDeletionPoliciesViewTestCase(
DocumentTypeDeletionPoliciesViewTestMixin, GenericDocumentViewTestCase
):
auto_upload_test_document = False
def test_document_type_filename_generator_get_view_no_permission(self):
response = self._request_document_type_filename_generator_get_view()
self.assertEqual(response.status_code, 404)
def test_document_type_filename_generator_get_view_access(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
response = self._request_document_type_filename_generator_get_view()
self.assertEqual(response.status_code, 200)
class DocumentTypeFilenameGeneratorViewTestCase(
DocumentTypeFilenameGeneratorViewTestMixin, GenericDocumentViewTestCase
):
auto_upload_test_document = False
def test_document_type_filename_generator_get_view_no_permission(self):
response = self._request_document_type_filename_generator_get_view()
self.assertEqual(response.status_code, 404)
def test_document_type_filename_generator_get_view_access(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
response = self._request_document_type_filename_generator_get_view()
self.assertEqual(response.status_code, 200)
class DocumentTypeViewsTestCase(
DocumentTypeViewTestMixin, GenericDocumentViewTestCase
):
auto_upload_test_document = False
def test_document_type_create_view_no_permission(self):
self.test_document_type.delete()
response = self._request_test_document_type_create_view()
self.assertEqual(response.status_code, 403)
self.assertEqual(DocumentType.objects.count(), 0)
def test_document_type_create_view_with_permission(self):
self.test_document_type.delete()
self.grant_permission(permission=permission_document_type_create)
response = self._request_test_document_type_create_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(DocumentType.objects.count(), 1)
self.assertEqual(
DocumentType.objects.first().label, TEST_DOCUMENT_TYPE_LABEL
)
def test_document_type_delete_view_no_permission(self):
response = self._request_test_document_type_delete_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(DocumentType.objects.count(), 1)
def test_document_type_delete_view_with_access(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_delete
)
response = self._request_test_document_type_delete_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(DocumentType.objects.count(), 0)
def test_document_type_edit_view_no_permission(self):
response = self._request_test_document_type_edit_view()
self.assertEqual(response.status_code, 404)
self.test_document_type.refresh_from_db()
self.assertEqual(
self.test_document_type.label, TEST_DOCUMENT_TYPE_LABEL
)
def test_document_type_edit_view_with_access(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
response = self._request_test_document_type_edit_view()
self.assertEqual(response.status_code, 302)
self.test_document_type.refresh_from_db()
self.assertEqual(
self.test_document_type.label, TEST_DOCUMENT_TYPE_LABEL_EDITED
)
def test_document_type_list_view_no_permission(self):
response = self._request_test_document_type_list_view()
self.assertNotContains(
response=response, status_code=200, text=self.test_document_type
)
def test_document_type_list_view_with_access(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
response = self._request_test_document_type_list_view()
self.assertContains(
response=response, status_code=200, text=self.test_document_type
)
class DocumentTypeQuickLabelViewsTestCase(
DocumentTypeQuickLabelTestMixin, DocumentTypeQuickLabelViewTestMixin,
GenericDocumentViewTestCase
):
auto_upload_test_document = False
def test_document_type_quick_label_create_no_permission(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
response = self._request_quick_label_create()
self.assertEqual(response.status_code, 404)
self.assertEqual(self.test_document_type.filenames.count(), 0)
def test_document_type_quick_label_create_with_access(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
response = self._request_quick_label_create()
self.assertEqual(response.status_code, 302)
self.assertEqual(self.test_document_type.filenames.count(), 1)
def test_document_type_quick_label_delete_no_permission(self):
self._create_test_quick_label()
response = self._request_quick_label_delete()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_document_type.filenames.count(), 1
)
def test_document_type_quick_label_delete_with_access(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._create_test_quick_label()
response = self._request_quick_label_delete()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_document_type.filenames.count(), 0
)
def test_document_type_quick_label_edit_no_permission(self):
self._create_test_quick_label()
response = self._request_quick_label_edit()
self.assertEqual(response.status_code, 404)
self.test_document_type_filename.refresh_from_db()
self.assertEqual(
self.test_document_type_filename.filename,
TEST_DOCUMENT_TYPE_QUICK_LABEL
)
def test_document_type_quick_label_edit_with_access(self):
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_edit
)
self._create_test_quick_label()
response = self._request_quick_label_edit()
self.assertEqual(response.status_code, 302)
self.test_document_type_filename.refresh_from_db()
self.assertEqual(
self.test_document_type_filename.filename,
TEST_DOCUMENT_TYPE_QUICK_LABEL_EDITED
)
def test_document_type_quick_label_list_no_permission(self):
self._create_test_quick_label()
response = self._request_quick_label_list_view()
self.assertEqual(response.status_code, 404)
def test_document_type_quick_label_list_with_access(self):
self._create_test_quick_label()
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
response = self._request_quick_label_list_view()
self.assertContains(
response, status_code=200, text=self.test_document_type_filename
)
class DocumentsQuickLabelViewTestCase(
DocumentQuickLabelViewTestMixin, DocumentTypeQuickLabelTestMixin,
GenericDocumentViewTestCase
):
def test_document_quick_label_no_permission(self):
self._create_test_quick_label()
response = self._request_document_quick_label_edit_view()
self.assertEqual(response.status_code, 404)
def test_document_quick_label_with_access(self):
self._create_test_quick_label()
self.grant_access(
obj=self.test_document,
permission=permission_document_properties_edit
)
response = self._request_document_quick_label_edit_view()
self.assertEqual(response.status_code, 302)
self.test_document.refresh_from_db()
self.assertEqual(
self.test_document.label, self.test_document_type_filename.filename
)
def test_document_quick_label_preserve_extension_with_access(self):
self._create_test_quick_label()
self.grant_access(
permission=permission_document_properties_edit, obj=self.test_document
)
filename, extension = os.path.splitext(self.test_document.label)
response = self._request_document_quick_label_edit_view(
extra_data={'preserve_extension': True}
)
self.assertEqual(response.status_code, 302)
self.test_document.refresh_from_db()
self.assertEqual(
self.test_document.label, '{}{}'.format(
self.test_document_type_filename.filename, extension
)
)
def test_document_quick_label_no_preserve_extension_with_access(self):
self._create_test_quick_label()
self.grant_access(
obj=self.test_document,
permission=permission_document_properties_edit
)
filename, extension = os.path.splitext(self.test_document.label)
response = self._request_document_quick_label_edit_view(
extra_data={'preserve_extension': False}
)
self.assertEqual(response.status_code, 302)
self.test_document.refresh_from_db()
self.assertEqual(
self.test_document.label, self.test_document_type_filename.filename
)
| 1.992188 | 2 |
tests/system_tests_edge_router.py | cliffjansen/qpid-dispatch | 1 | 12763221 | <gh_stars>1-10
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from time import sleep
from threading import Event
from threading import Timer
import unittest2 as unittest
from proton import Message, Timeout
from system_test import TestCase, Qdrouterd, main_module, TIMEOUT, MgmtMsgProxy
from system_test import AsyncTestReceiver
from system_test import AsyncTestSender
from system_test import QdManager
from system_tests_link_routes import ConnLinkRouteService
from test_broker import FakeService
from proton.handlers import MessagingHandler
from proton.reactor import Container, DynamicNodeProperties
from proton.utils import BlockingConnection
from qpid_dispatch.management.client import Node
from subprocess import PIPE, STDOUT
import re
class AddrTimer(object):
def __init__(self, parent):
self.parent = parent
def on_timer_task(self, event):
self.parent.check_address()
class EdgeRouterTest(TestCase):
inter_router_port = None
@classmethod
def setUpClass(cls):
"""Start a router"""
super(EdgeRouterTest, cls).setUpClass()
def router(name, mode, connection, extra=None):
config = [
('router', {'mode': mode, 'id': name}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no'}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'multiTenant': 'yes'}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'role': 'route-container'}),
('linkRoute', {'prefix': '0.0.0.0/link', 'direction': 'in', 'containerId': 'LRC'}),
('linkRoute', {'prefix': '0.0.0.0/link', 'direction': 'out', 'containerId': 'LRC'}),
('autoLink', {'addr': '0.0.0.0/queue.waypoint', 'containerId': 'ALC', 'direction': 'in'}),
('autoLink', {'addr': '0.0.0.0/queue.waypoint', 'containerId': 'ALC', 'direction': 'out'}),
('address', {'prefix': 'closest', 'distribution': 'closest'}),
('address', {'prefix': 'spread', 'distribution': 'balanced'}),
('address', {'prefix': 'multicast', 'distribution': 'multicast'}),
('address', {'prefix': '0.0.0.0/queue', 'waypoint': 'yes'}),
connection
]
if extra:
config.append(extra)
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
cls.routers = []
inter_router_port = cls.tester.get_port()
edge_port_A = cls.tester.get_port()
edge_port_B = cls.tester.get_port()
router('INT.A', 'interior', ('listener', {'role': 'inter-router', 'port': inter_router_port}),
('listener', {'role': 'edge', 'port': edge_port_A}))
router('INT.B', 'interior', ('connector', {'name': 'connectorToA', 'role': 'inter-router', 'port': inter_router_port}),
('listener', {'role': 'edge', 'port': edge_port_B}))
router('EA1', 'edge', ('connector', {'name': 'edge', 'role': 'edge',
'port': edge_port_A}
),
('connector', {'name': 'edge.1', 'role': 'edge',
'port': edge_port_B}
)
)
cls.routers[0].wait_router_connected('INT.B')
cls.routers[1].wait_router_connected('INT.A')
def __init__(self, test_method):
TestCase.__init__(self, test_method)
self.success = False
self.timer_delay = 2
self.max_attempts = 3
self.attempts = 0
def run_qdstat(self, args, regexp=None, address=None):
p = self.popen(
['qdstat', '--bus', str(address or self.router.addresses[0]),
'--timeout', str(TIMEOUT)] + args,
name='qdstat-' + self.id(), stdout=PIPE, expect=None,
universal_newlines=True)
out = p.communicate()[0]
assert p.returncode == 0, \
"qdstat exit status %s, output:\n%s" % (p.returncode, out)
if regexp: assert re.search(regexp, out,
re.I), "Can't find '%s' in '%s'" % (
regexp, out)
return out
def can_terminate(self):
if self.attempts == self.max_attempts:
return True
if self.success:
return True
return False
def run_int_b_edge_qdstat(self):
outs = self.run_qdstat(['--edge'],
address=self.routers[2].addresses[0])
lines = outs.split("\n")
for line in lines:
if "INT.B" in line and "yes" in line:
self.success = True
def run_int_a_edge_qdstat(self):
outs = self.run_qdstat(['--edge'],
address=self.routers[2].addresses[0])
lines = outs.split("\n")
for line in lines:
if "INT.A" in line and "yes" in line:
self.success = True
def schedule_int_a_qdstat_test(self):
if self.attempts < self.max_attempts:
if not self.success:
Timer(self.timer_delay, self.run_int_a_edge_qdstat).start()
self.attempts += 1
def schedule_int_b_qdstat_test(self):
if self.attempts < self.max_attempts:
if not self.success:
Timer(self.timer_delay, self.run_int_b_edge_qdstat).start()
self.attempts += 1
def test_01_active_flag(self):
"""
In this test, we have one edge router connected to two interior
routers. One connection is to INT.A and another connection is to
INT.B . But only one of these connections is active. We use qdstat
to make sure that only one of these connections is active.
Then we kill the router with the active connection and make sure
that the other connection is now the active one
"""
success = False
outs = self.run_qdstat(['--edge'],
address=self.routers[0].addresses[0])
lines = outs.split("\n")
for line in lines:
if "EA1" in line and "yes" in line:
success = True
if not success:
self.fail("Active edge connection not found not found for "
"interior router")
outs = self.run_qdstat(['--edge'],
address=self.routers[2].addresses[0])
conn_map_edge = dict()
#
# We dont know which interior router the edge will connect to.
#
conn_map_edge["INT.A"] = False
conn_map_edge["INT.B"] = False
lines = outs.split("\n")
for line in lines:
if "INT.A" in line and "yes" in line:
conn_map_edge["INT.A"] = True
if "INT.B" in line and "yes" in line:
conn_map_edge["INT.B"] = True
if conn_map_edge["INT.A"] and conn_map_edge["INT.B"]:
self.fail("Edhe router has two active connections to interior "
"routers. Should have only one")
if not conn_map_edge["INT.A"] and not conn_map_edge["INT.B"]:
self.fail("There are no active aconnections to interior routers")
if conn_map_edge["INT.A"]:
#
# INT.A has the active connection. Let's kill INT.A and see
# if the other connection becomes active
#
EdgeRouterTest.routers[0].teardown()
self.schedule_int_b_qdstat_test()
while not self.can_terminate():
pass
self.assertTrue(self.success)
elif conn_map_edge["INT.B"]:
#
# INT.B has the active connection. Let's kill INT.B and see
# if the other connection becomes active
#
EdgeRouterTest.routers[1].teardown()
self.schedule_int_a_qdstat_test()
while not self.can_terminate():
pass
self.assertTrue(self.success)
class RouterTest(TestCase):
inter_router_port = None
@classmethod
def setUpClass(cls):
"""Start a router"""
super(RouterTest, cls).setUpClass()
def router(name, mode, connection, extra=None):
config = [
('router', {'mode': mode, 'id': name}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no'}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'multiTenant': 'yes'}),
('listener', {'port': cls.tester.get_port(), 'stripAnnotations': 'no', 'role': 'route-container'}),
('linkRoute', {'prefix': '0.0.0.0/link', 'direction': 'in', 'containerId': 'LRC'}),
('linkRoute', {'prefix': '0.0.0.0/link', 'direction': 'out', 'containerId': 'LRC'}),
('autoLink', {'addr': '0.0.0.0/queue.waypoint', 'containerId': 'ALC', 'direction': 'in'}),
('autoLink', {'addr': '0.0.0.0/queue.waypoint', 'containerId': 'ALC', 'direction': 'out'}),
('address', {'prefix': 'closest', 'distribution': 'closest'}),
('address', {'prefix': 'spread', 'distribution': 'balanced'}),
('address', {'prefix': 'multicast', 'distribution': 'multicast'}),
('address', {'prefix': '0.0.0.0/queue', 'waypoint': 'yes'}),
connection
]
if extra:
config.append(extra)
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
cls.routers = []
inter_router_port = cls.tester.get_port()
edge_port_A = cls.tester.get_port()
edge_port_B = cls.tester.get_port()
router('INT.A', 'interior', ('listener', {'role': 'inter-router', 'port': inter_router_port}),
('listener', {'role': 'edge', 'port': edge_port_A}))
router('INT.B', 'interior', ('connector', {'name': 'connectorToA', 'role': 'inter-router', 'port': inter_router_port}),
('listener', {'role': 'edge', 'port': edge_port_B}))
router('EA1', 'edge', ('connector', {'name': 'edge', 'role': 'edge', 'port': edge_port_A}))
router('EA2', 'edge', ('connector', {'name': 'edge', 'role': 'edge', 'port': edge_port_A}))
router('EB1', 'edge', ('connector', {'name': 'edge', 'role': 'edge', 'port': edge_port_B}))
router('EB2', 'edge', ('connector', {'name': 'edge', 'role': 'edge', 'port': edge_port_B}))
cls.routers[0].wait_router_connected('INT.B')
cls.routers[1].wait_router_connected('INT.A')
def test_01_connectivity_INTA_EA1(self):
test = ConnectivityTest(self.routers[0].addresses[0],
self.routers[2].addresses[0],
'EA1')
test.run()
self.assertEqual(None, test.error)
def test_02_connectivity_INTA_EA2(self):
test = ConnectivityTest(self.routers[0].addresses[0],
self.routers[3].addresses[0],
'EA2')
test.run()
self.assertEqual(None, test.error)
def test_03_connectivity_INTB_EB1(self):
test = ConnectivityTest(self.routers[1].addresses[0],
self.routers[4].addresses[0],
'EB1')
test.run()
self.assertEqual(None, test.error)
def test_04_connectivity_INTB_EB2(self):
test = ConnectivityTest(self.routers[1].addresses[0],
self.routers[5].addresses[0],
'EB2')
test.run()
self.assertEqual(None, test.error)
def test_05_dynamic_address_same_edge(self):
test = DynamicAddressTest(self.routers[2].addresses[0],
self.routers[2].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_06_dynamic_address_interior_to_edge(self):
test = DynamicAddressTest(self.routers[2].addresses[0],
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_07_dynamic_address_edge_to_interior(self):
test = DynamicAddressTest(self.routers[0].addresses[0],
self.routers[2].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_08_dynamic_address_edge_to_edge_one_interior(self):
test = DynamicAddressTest(self.routers[2].addresses[0],
self.routers[3].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_09_dynamic_address_edge_to_edge_two_interior(self):
test = DynamicAddressTest(self.routers[2].addresses[0],
self.routers[4].addresses[0])
test.run()
self.assertEqual(None, test.error)
def test_10_mobile_address_same_edge(self):
test = MobileAddressTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
"test_10")
test.run()
self.assertEqual(None, test.error)
def test_11_mobile_address_interior_to_edge(self):
test = MobileAddressTest(self.routers[2].addresses[0],
self.routers[0].addresses[0],
"test_11")
test.run()
self.assertEqual(None, test.error)
def test_12_mobile_address_edge_to_interior(self):
test = MobileAddressTest(self.routers[0].addresses[0],
self.routers[2].addresses[0],
"test_12")
test.run()
self.assertEqual(None, test.error)
def test_13_mobile_address_edge_to_edge_one_interior(self):
test = MobileAddressTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
"test_13")
test.run()
self.assertEqual(None, test.error)
def test_14_mobile_address_edge_to_edge_two_interior(self):
test = MobileAddressTest(self.routers[2].addresses[0],
self.routers[4].addresses[0],
"test_14")
test.run()
self.assertEqual(None, test.error)
# One sender two receiver tests.
# One sender and two receivers on the same edge
def test_15_mobile_address_same_edge(self):
test = MobileAddressOneSenderTwoReceiversTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"test_15")
test.run()
self.assertEqual(None, test.error)
# One sender and two receivers on the different edges. The edges are
# hanging off the same interior router.
def test_16_mobile_address_edge_to_another_edge_same_interior(self):
test = MobileAddressOneSenderTwoReceiversTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
"test_16")
test.run()
self.assertEqual(None, test.error)
# Two receivers on the interior and sender on the edge
def test_17_mobile_address_edge_to_interior(self):
test = MobileAddressOneSenderTwoReceiversTest(self.routers[0].addresses[0],
self.routers[0].addresses[0],
self.routers[2].addresses[0],
"test_17")
test.run()
self.assertEqual(None, test.error)
# Two receivers on the edge and the sender on the interior
def test_18_mobile_address_interior_to_edge(self):
test = MobileAddressOneSenderTwoReceiversTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[0].addresses[0],
"test_18")
test.run()
self.assertEqual(None, test.error)
# Two receivers on the edge and the sender on the 'other' interior
def test_19_mobile_address_other_interior_to_edge(self):
test = MobileAddressOneSenderTwoReceiversTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[1].addresses[0],
"test_19")
test.run()
self.assertEqual(None, test.error)
# Two receivers on the edge and the sender on the edge of
# the 'other' interior
def test_20_mobile_address_edge_to_edge_two_interiors(self):
test = MobileAddressOneSenderTwoReceiversTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[5].addresses[0],
"test_20")
test.run()
self.assertEqual(None, test.error)
# One receiver in an edge, another one in interior and the sender
# is on the edge of another interior
def test_21_mobile_address_edge_interior_receivers(self):
test = MobileAddressOneSenderTwoReceiversTest(self.routers[4].addresses[0],
self.routers[1].addresses[0],
self.routers[2].addresses[0],
"test_21")
test.run()
self.assertEqual(None, test.error)
# Two receivers one on each interior router and and an edge sender
# connectoed to the first interior
def test_22_mobile_address_edge_sender_two_interior_receivers(self):
test = MobileAddressOneSenderTwoReceiversTest(self.routers[0].addresses[0],
self.routers[1].addresses[0],
self.routers[3].addresses[0],
"test_22")
test.run()
self.assertEqual(None, test.error)
def test_23_mobile_address_edge_sender_two_edge_receivers(self):
test = MobileAddressOneSenderTwoReceiversTest(self.routers[4].addresses[0],
self.routers[5].addresses[0],
self.routers[2].addresses[0],
"test_23")
test.run()
self.assertEqual(None, test.error)
# 1 Sender and 3 receivers all on the same edge
def test_24_multicast_mobile_address_same_edge(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"multicast.24")
test.run()
self.assertEqual(None, test.error)
# 1 Sender and receiver on one edge and 2 receivers on another edge
# all in the same interior
def test_25_multicast_mobile_address_different_edges_same_interior(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[3].addresses[0],
"multicast.25",
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
# Two receivers on each edge, one receiver on interior and sender
# on the edge
def test_26_multicast_mobile_address_edge_to_interior(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[0].addresses[0],
self.routers[2].addresses[0],
"multicast.26",
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
# Receivers on the edge and sender on the interior
def test_27_multicast_mobile_address_interior_to_edge(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[0].addresses[0],
"multicast.27",
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
# Receivers on the edge and sender on an interior that is not connected
# to the edges.
def test_28_multicast_mobile_address_other_interior_to_edge(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[1].addresses[0],
"multicast.28")
test.run()
self.assertEqual(None, test.error)
# Sender on an interior and 3 receivers connected to three different edges
def test_29_multicast_mobile_address_edge_to_edge_two_interiors(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
self.routers[0].addresses[0],
"multicast.29")
test.run()
self.assertEqual(None, test.error)
def test_30_multicast_mobile_address_all_edges(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
self.routers[5].addresses[0],
"multicast.30",
self.routers[0].addresses[0])
test.run()
self.assertEqual(None, test.error)
######### Multicast Large message tests ######################
# 1 Sender and 3 receivers all on the same edge
def test_31_multicast_mobile_address_same_edge(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"multicast.31", large_msg=True)
test.run()
self.assertEqual(None, test.error)
# 1 Sender on one edge and 3 receivers on another edge all in the same
# interior
def test_32_multicast_mobile_address_different_edges_same_interior(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[3].addresses[0],
"multicast.32",
self.routers[0].addresses[0],
large_msg=True)
test.run()
self.assertEqual(None, test.error)
# Two receivers on each edge, one receiver on interior and sender
# on the edge
def test_33_multicast_mobile_address_edge_to_interior(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[0].addresses[0],
self.routers[2].addresses[0],
"multicast.33", large_msg=True)
test.run()
self.assertEqual(None, test.error)
# Receivers on the edge and sender on the interior
def test_34_multicast_mobile_address_interior_to_edge(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[0].addresses[0],
"multicast.34", large_msg=True)
test.run()
self.assertEqual(None, test.error)
# Receivers on the edge and sender on an interior that is not connected
# to the edges.
def test_35_multicast_mobile_address_other_interior_to_edge(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[1].addresses[0],
"multicast.35",
self.routers[0].addresses[0],
large_msg=True)
test.run()
self.assertEqual(None, test.error)
# Sender on an interior and 3 receivers connected to three different edges
def test_36_multicast_mobile_address_edge_to_edge_two_interiors(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
self.routers[0].addresses[0],
"multicast.36", large_msg=True)
test.run()
self.assertEqual(None, test.error)
def test_37_multicast_mobile_address_all_edges(self):
test = MobileAddressMulticastTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[4].addresses[0],
self.routers[5].addresses[0],
"multicast.37",
self.routers[0].addresses[0],
large_msg=True)
test.run()
self.assertEqual(None, test.error)
def test_38_mobile_addr_event_three_receivers_same_interior(self):
test = MobileAddressEventTest(self.routers[2].addresses[0],
self.routers[3].addresses[0],
self.routers[3].addresses[0],
self.routers[2].addresses[0],
self.routers[0].addresses[0],
"test_38")
test.run()
self.assertEqual(None, test.error)
def test_39_mobile_addr_event_three_receivers_diff_interior(self):
# This will test the QDRC_EVENT_ADDR_TWO_DEST event
test = MobileAddressEventTest(self.routers[2].addresses[0],
self.routers[4].addresses[0],
self.routers[5].addresses[0],
self.routers[2].addresses[0],
self.routers[0].addresses[0],
"test_39")
test.run()
self.assertEqual(None, test.error)
def test_40_drop_rx_client_multicast_large_message(self):
# test what happens if some multicast receivers close in the middle of
# a multiframe transfer
test = MobileAddrMcastDroppedRxTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"multicast.40")
test.run()
self.assertEqual(None, test.error)
def test_41_drop_rx_client_multicast_small_message(self):
# test what happens if some multicast receivers close in the middle of
# a multiframe transfer
test = MobileAddrMcastDroppedRxTest(self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
self.routers[2].addresses[0],
"multicast.40",large_msg=False)
test.run()
self.assertEqual(None, test.error)
class LinkRouteProxyTest(TestCase):
"""
Test edge router's ability to proxy configured and connection-scoped link
routes into the interior
"""
@classmethod
def setUpClass(cls):
"""Start a router"""
super(LinkRouteProxyTest, cls).setUpClass()
def router(name, mode, extra):
config = [
('router', {'mode': mode, 'id': name}),
('listener', {'role': 'normal', 'port': cls.tester.get_port()})
]
if extra:
config.extend(extra)
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
return cls.routers[-1]
# configuration:
# two edge routers connected via 2 interior routers.
#
# +-------+ +---------+ +---------+ +-------+
# | EA1 |<==>| INT.A |<==>| INT.B |<==>| EB1 |
# +-------+ +---------+ +---------+ +-------+
cls.routers = []
interrouter_port = cls.tester.get_port()
cls.INTA_edge_port = cls.tester.get_port()
cls.INTB_edge_port = cls.tester.get_port()
router('INT.A', 'interior',
[('listener', {'role': 'inter-router', 'port': interrouter_port}),
('listener', {'role': 'edge', 'port': cls.INTA_edge_port})])
cls.INT_A = cls.routers[0]
cls.INT_A.listener = cls.INT_A.addresses[0]
router('INT.B', 'interior',
[('connector', {'name': 'connectorToA', 'role': 'inter-router',
'port': interrouter_port}),
('listener', {'role': 'edge', 'port': cls.INTB_edge_port})])
cls.INT_B = cls.routers[1]
cls.INT_B.listener = cls.INT_B.addresses[0]
router('EA1', 'edge',
[('listener', {'name': 'rc', 'role': 'route-container',
'port': cls.tester.get_port()}),
('connector', {'name': 'uplink', 'role': 'edge',
'port': cls.INTA_edge_port}),
('linkRoute', {'prefix': 'CfgLinkRoute1', 'containerId': 'FakeBroker', 'direction': 'in'}),
('linkRoute', {'prefix': 'CfgLinkRoute1', 'containerId': 'FakeBroker', 'direction': 'out'})])
cls.EA1 = cls.routers[2]
cls.EA1.listener = cls.EA1.addresses[0]
cls.EA1.route_container = cls.EA1.addresses[1]
router('EB1', 'edge',
[('connector', {'name': 'uplink', 'role': 'edge',
'port': cls.INTB_edge_port}),
('listener', {'name': 'rc', 'role': 'route-container',
'port': cls.tester.get_port()}),
('linkRoute', {'pattern': '*.cfg.pattern.#', 'containerId': 'FakeBroker', 'direction': 'in'}),
('linkRoute', {'pattern': '*.cfg.pattern.#', 'containerId': 'FakeBroker', 'direction': 'out'})])
cls.EB1 = cls.routers[3]
cls.EB1.listener = cls.EB1.addresses[0]
cls.EB1.route_container = cls.EB1.addresses[1]
cls.INT_A.wait_router_connected('INT.B')
cls.INT_B.wait_router_connected('INT.A')
cls.EA1.wait_connectors()
cls.EB1.wait_connectors()
cls.CFG_LINK_ROUTE_TYPE = 'org.apache.qpid.dispatch.router.config.linkRoute'
cls.CONN_LINK_ROUTE_TYPE = 'org.apache.qpid.dispatch.router.connection.linkRoute'
cls.CONNECTOR_TYPE = 'org.apache.qpid.dispatch.connector'
def _get_address(self, router, address):
"""Lookup address in route table"""
a_type = 'org.apache.qpid.dispatch.router.address'
addrs = router.management.query(a_type).get_dicts()
return list(filter(lambda a: a['name'].find(address) != -1,
addrs))
def _wait_address_gone(self, router, address):
"""Block until address is removed from the route table"""
while self._get_address(router, address):
sleep(0.1)
def _test_traffic(self, sender, receiver, address, count=5):
"""Generate message traffic between two normal clients"""
tr = AsyncTestReceiver(receiver, address)
ts = AsyncTestSender(sender, address, count)
ts.wait() # wait until all sent
for i in range(count):
tr.queue.get(timeout=TIMEOUT)
tr.stop()
def test_01_immedate_detach_reattach(self):
"""
Have a service for a link routed address abruptly detach
in response to an incoming link attach
The attaching client from EB1 will get an attach response then an
immediate detach. The client will immediately re-establish the link.
"""
class AttachDropper(FakeService):
def __init__(self, *args, **kwargs):
super(AttachDropper, self).__init__(*args, **kwargs)
self.link_dropped = Event()
def on_link_remote_open(self, event):
# drop it
event.link.close()
event.connection.close()
self.link_dropped.set()
ad = AttachDropper(self.EA1.route_container)
self.INT_B.wait_address("CfgLinkRoute1")
# create a consumer, do not wait for link to open, reattach
# on received detach
rx = AsyncTestReceiver(self.EB1.listener, 'CfgLinkRoute1/foo',
wait=False, recover_link=True)
ad.link_dropped.wait(timeout=TIMEOUT)
ad.join() # wait for thread exit
# wait until prefix addresses are removed
self._wait_address_gone(self.INT_B, "CCfgLinkRoute1")
self._wait_address_gone(self.INT_B, "DCfgLinkRoute1")
rx.stop()
# now attach a working service to the same address,
# make sure it all works
fs = FakeService(self.EA1.route_container)
self.INT_B.wait_address("CfgLinkRoute1")
rx = AsyncTestReceiver(self.EB1.listener, 'CfgLinkRoute1/foo',
wait=False, recover_link=True)
tx = AsyncTestSender(self.EA1.listener, 'CfgLinkRoute1/foo',
body="HEY HO LET'S GO!")
tx.wait()
msg = rx.queue.get(timeout=TIMEOUT)
self.assertTrue(msg.body == "HEY HO LET'S GO!")
rx.stop()
fs.join()
self.assertEqual(1, fs.in_count)
self.assertEqual(1, fs.out_count)
# wait until addresses are cleaned up
self._wait_address_gone(self.INT_A, "CfgLinkRoute1")
self._wait_address_gone(self.INT_B, "CfgLinkRoute1")
def test_02_thrashing_link_routes(self):
"""
Rapidly add and delete link routes at the edge
"""
# activate the pre-configured link routes
ea1_mgmt = self.EA1.management
fs = FakeService(self.EA1.route_container)
self.INT_B.wait_address("CfgLinkRoute1")
for i in range(10):
lr1 = ea1_mgmt.create(type=self.CFG_LINK_ROUTE_TYPE,
name="TestLRout%d" % i,
attributes={'pattern': 'Test/*/%d/#' % i,
'containerId': 'FakeBroker',
'direction': 'out'})
lr2 = ea1_mgmt.create(type=self.CFG_LINK_ROUTE_TYPE,
name="TestLRin%d" % i,
attributes={'pattern': 'Test/*/%d/#' % i,
'containerId': 'FakeBroker',
'direction': 'in'})
# verify that they are correctly propagated (once)
if i == 9:
self.INT_B.wait_address("Test/*/9/#")
lr1.delete()
lr2.delete()
fs.join()
self._wait_address_gone(self.INT_B, "CfgLinkRoute1")
def _validate_topology(self, router, expected_links, address):
"""
query existing links and verify they are set up as expected
"""
mgmt = QdManager(self, address=router)
# fetch all the connections
cl = mgmt.query('org.apache.qpid.dispatch.connection')
# map them by their identity
conns = dict([(c['identity'], c) for c in cl])
# now fetch all links for the address
ll = mgmt.query('org.apache.qpid.dispatch.router.link')
test_links = [l for l in ll if
l.get('owningAddr', '').find(address) != -1]
self.assertEqual(len(expected_links), len(test_links))
for elink in expected_links:
matches = filter(lambda l: (l['linkDir'] == elink[0]
and
conns[l['connectionId']]['container'] == elink[1]
and
conns[l['connectionId']]['role'] == elink[2]),
test_links)
self.assertTrue(len(list(matches)) == 1)
def test_03_interior_conn_lost(self):
"""
What happens when the interior connection bounces?
"""
config = Qdrouterd.Config([('router', {'mode': 'edge',
'id': 'Edge1'}),
('listener', {'role': 'normal',
'port': self.tester.get_port()}),
('listener', {'name': 'rc',
'role': 'route-container',
'port': self.tester.get_port()}),
('linkRoute', {'pattern': 'Edge1/*',
'containerId': 'FakeBroker',
'direction': 'in'}),
('linkRoute', {'pattern': 'Edge1/*',
'containerId': 'FakeBroker',
'direction': 'out'})])
er = self.tester.qdrouterd('Edge1', config, wait=True)
# activate the link routes before the connection exists
fs = FakeService(er.addresses[1])
er.wait_address("Edge1/*")
# create the connection to interior
er_mgmt = er.management
ctor = er_mgmt.create(type=self.CONNECTOR_TYPE,
name='toA',
attributes={'role': 'edge',
'port': self.INTA_edge_port})
self.INT_B.wait_address("Edge1/*")
# delete it, and verify the routes are removed
ctor.delete()
self._wait_address_gone(self.INT_B, "Edge1/*")
# now recreate and verify routes re-appear
ctor = er_mgmt.create(type=self.CONNECTOR_TYPE,
name='toA',
attributes={'role': 'edge',
'port': self.INTA_edge_port})
self.INT_B.wait_address("Edge1/*")
self._test_traffic(self.INT_B.listener,
self.INT_B.listener,
"Edge1/One",
count=5)
fs.join()
self.assertEqual(5, fs.in_count)
self.assertEqual(5, fs.out_count)
er.teardown()
self._wait_address_gone(self.INT_B, "Edge1/*")
def test_50_link_topology(self):
"""
Verify that the link topology that results from activating a link route
and sending traffic is correct
"""
fs = FakeService(self.EA1.route_container)
self.INT_B.wait_address("CfgLinkRoute1")
# create a sender on one edge and the receiver on another
bc_b = BlockingConnection(self.EB1.listener, timeout=TIMEOUT)
erx = bc_b.create_receiver(address="CfgLinkRoute1/buhbye", credit=10)
bc_a = BlockingConnection(self.EA1.listener, timeout=TIMEOUT)
etx = bc_a.create_sender(address="CfgLinkRoute1/buhbye")
etx.send(Message(body="HI THERE"), timeout=TIMEOUT)
self.assertEqual("HI THERE", erx.receive(timeout=TIMEOUT).body)
erx.accept()
# expect the following links have been established for the
# "CfgLinkRoute1/buhbye" address:
# EA1
# 1 out link to INT.A (connection role: edge)
# 1 in link from bc_a (normal)
# 1 in link from FakeBroker (route-container)
# 1 out link to FakeBroker (route-container)
# INT.A
# 1 in link from EA1 (edge)
# 1 out link to INT.B (inter-router)
# INT.B
# 1 out link to EB1 (edge)
# 1 in link from INT.A (inter-router)
# EB1
# 1 out link to bc_b (normal)
# 1 in link from INT.B (edge)
expect = {
self.EA1.listener: [
('in', bc_a.container.container_id, 'normal'),
('in', 'FakeBroker', 'route-container'),
('out', 'FakeBroker', 'route-container'),
('out', 'INT.A', 'edge')],
self.INT_A.listener: [
('in', 'EA1', 'edge'),
('out', 'INT.B', 'inter-router')],
self.INT_B.listener: [
('in', 'INT.A', 'inter-router'),
('out', 'EB1', 'edge')],
self.EB1.listener: [
('in', 'INT.B', 'edge'),
('out', bc_b.container.container_id, 'normal')]
}
for router, expected_links in expect.items():
self._validate_topology(router, expected_links,
'CfgLinkRoute1/buhbye')
fs.join()
self.assertEqual(1, fs.in_count)
self.assertEqual(1, fs.out_count)
def test_51_link_route_proxy_configured(self):
"""
Activate the configured link routes via a FakeService, verify proxies
created by passing traffic from/to and interior router
"""
a_type = 'org.apache.qpid.dispatch.router.address'
fs = FakeService(self.EA1.route_container)
self.INT_B.wait_address("CfgLinkRoute1")
self._test_traffic(self.INT_B.listener,
self.INT_B.listener,
"CfgLinkRoute1/hi",
count=5)
fs.join()
self.assertEqual(5, fs.in_count)
self.assertEqual(5, fs.out_count)
# now that FakeService is gone, the link route should no longer be
# active:
self._wait_address_gone(self.INT_A, "CfgLinkRoute1")
# repeat test, but this time with patterns:
fs = FakeService(self.EB1.route_container)
self.INT_A.wait_address("*.cfg.pattern.#")
self._test_traffic(self.INT_A.listener,
self.INT_A.listener,
"MATCH.cfg.pattern",
count=5)
fs.join()
self.assertEqual(5, fs.in_count)
self.assertEqual(5, fs.out_count)
self._wait_address_gone(self.INT_A, "*.cfg.pattern.#")
def test_52_conn_link_route_proxy(self):
"""
Test connection scoped link routes by connecting a fake service to the
Edge via the route-container connection. Have the fake service
configured some link routes. Then have clients on the interior
exchange messages via the fake service.
"""
fs = ConnLinkRouteService(self.EA1.route_container,
container_id="FakeService",
config = [("ConnLinkRoute1",
{"pattern": "Conn/*/One",
"direction": "out"}),
("ConnLinkRoute2",
{"pattern": "Conn/*/One",
"direction": "in"})])
self.assertEqual(2, len(fs.values))
self.INT_B.wait_address("Conn/*/One")
self.assertEqual(2, len(self._get_address(self.INT_A, "Conn/*/One")))
# between interiors
self._test_traffic(self.INT_B.listener,
self.INT_A.listener,
"Conn/BLAB/One",
count=5)
# edge to edge
self._test_traffic(self.EB1.listener,
self.EA1.listener,
"Conn/BLECH/One",
count=5)
fs.join()
self.assertEqual(10, fs.in_count)
self.assertEqual(10, fs.out_count)
self._wait_address_gone(self.INT_A, "Conn/*/One")
class Timeout(object):
def __init__(self, parent):
self.parent = parent
def on_timer_task(self, event):
self.parent.timeout()
class PollTimeout(object):
def __init__(self, parent):
self.parent = parent
def on_timer_task(self, event):
self.parent.poll_timeout()
class ConnectivityTest(MessagingHandler):
def __init__(self, interior_host, edge_host, edge_id):
super(ConnectivityTest, self).__init__()
self.interior_host = interior_host
self.edge_host = edge_host
self.edge_id = edge_id
self.interior_conn = None
self.edge_conn = None
self.error = None
self.proxy = None
self.query_sent = False
def timeout(self):
self.error = "Timeout Expired"
self.interior_conn.close()
self.edge_conn.close()
def on_start(self, event):
self.timer = event.reactor.schedule(10.0, Timeout(self))
self.interior_conn = event.container.connect(self.interior_host)
self.edge_conn = event.container.connect(self.edge_host)
self.reply_receiver = event.container.create_receiver(self.interior_conn, dynamic=True)
def on_link_opened(self, event):
if event.receiver == self.reply_receiver:
self.proxy = MgmtMsgProxy(self.reply_receiver.remote_source.address)
self.agent_sender = event.container.create_sender(self.interior_conn, "$management")
def on_sendable(self, event):
if not self.query_sent:
self.query_sent = True
self.agent_sender.send(self.proxy.query_connections())
def on_message(self, event):
if event.receiver == self.reply_receiver:
response = self.proxy.response(event.message)
if response.status_code != 200:
self.error = "Unexpected error code from agent: %d - %s" % (response.status_code, response.status_description)
connections = response.results
count = 0
for conn in connections:
if conn.role == 'edge' and conn.container == self.edge_id:
count += 1
if count != 1:
self.error = "Incorrect edge count for container-id. Expected 1, got %d" % count
self.interior_conn.close()
self.edge_conn.close()
self.timer.cancel()
def run(self):
Container(self).run()
class DynamicAddressTest(MessagingHandler):
def __init__(self, receiver_host, sender_host):
super(DynamicAddressTest, self).__init__()
self.receiver_host = receiver_host
self.sender_host = sender_host
self.receiver_conn = None
self.sender_conn = None
self.receiver = None
self.address = None
self.count = 300
self.n_rcvd = 0
self.n_sent = 0
self.error = None
def timeout(self):
self.error = "Timeout Expired - n_sent=%d n_rcvd=%d addr=%s" % (self.n_sent, self.n_rcvd, self.address)
self.receiver_conn.close()
self.sender_conn.close()
def on_start(self, event):
self.timer = event.reactor.schedule(5.0, Timeout(self))
self.receiver_conn = event.container.connect(self.receiver_host)
self.sender_conn = event.container.connect(self.sender_host)
self.receiver = event.container.create_receiver(self.receiver_conn, dynamic=True)
def on_link_opened(self, event):
if event.receiver == self.receiver:
self.address = self.receiver.remote_source.address
self.sender = event.container.create_sender(self.sender_conn, self.address)
def on_sendable(self, event):
while self.n_sent < self.count:
self.sender.send(Message(body="Message %d" % self.n_sent))
self.n_sent += 1
def on_message(self, event):
self.n_rcvd += 1
if self.n_rcvd == self.count:
self.receiver_conn.close()
self.sender_conn.close()
self.timer.cancel()
def run(self):
Container(self).run()
class MobileAddressTest(MessagingHandler):
def __init__(self, receiver_host, sender_host, address):
super(MobileAddressTest, self).__init__()
self.receiver_host = receiver_host
self.sender_host = sender_host
self.address = address
self.receiver_conn = None
self.sender_conn = None
self.receiver = None
self.sender = None
self.count = 300
self.rel_count = 50
self.n_rcvd = 0
self.n_sent = 0
self.n_settled = 0
self.n_released = 0
self.error = None
def timeout(self):
self.error = "Timeout Expired - n_sent=%d n_rcvd=%d n_settled=%d n_released=%d addr=%s" % \
(self.n_sent, self.n_rcvd, self.n_settled, self.n_released, self.address)
self.receiver_conn.close()
self.sender_conn.close()
def on_start(self, event):
self.timer = event.reactor.schedule(5.0, Timeout(self))
self.receiver_conn = event.container.connect(self.receiver_host)
self.sender_conn = event.container.connect(self.sender_host)
self.receiver = event.container.create_receiver(self.receiver_conn, self.address)
self.sender = event.container.create_sender(self.sender_conn, self.address)
def on_sendable(self, event):
while self.n_sent < self.count:
message = Message(body="Message %d" % self.n_sent)
self.sender.send(message)
self.n_sent += 1
def on_message(self, event):
self.n_rcvd += 1
def on_settled(self, event):
self.n_settled += 1
if self.n_settled == self.count:
self.receiver.close()
for i in range(self.rel_count):
self.sender.send(Message(body="Message %d" % self.n_sent))
self.n_sent += 1
def on_released(self, event):
self.n_released += 1
if self.n_released == self.rel_count:
self.receiver_conn.close()
self.sender_conn.close()
self.timer.cancel()
def run(self):
Container(self).run()
class MobileAddressOneSenderTwoReceiversTest(MessagingHandler):
def __init__(self, receiver1_host, receiver2_host, sender_host, address):
super(MobileAddressOneSenderTwoReceiversTest, self).__init__()
self.receiver1_host = receiver1_host
self.receiver2_host = receiver2_host
self.sender_host = sender_host
self.address = address
# One sender connection and two receiver connections
self.receiver1_conn = None
self.receiver2_conn = None
self.sender_conn = None
self.receiver1 = None
self.receiver2 = None
self.sender = None
self.count = 300
self.rel_count = 50
self.n_rcvd1 = 0
self.n_rcvd2 = 0
self.n_sent = 0
self.n_settled = 0
self.n_released = 0
self.error = None
self.timer = None
self.all_msgs_received = False
self.recvd_msg_bodies = dict()
self.dup_msg = None
def timeout(self):
if self.dup_msg:
self.error = "Duplicate message %s received " % self.dup_msg
else:
self.error = "Timeout Expired - n_sent=%d n_rcvd=%d n_settled=%d n_released=%d addr=%s" % \
(self.n_sent, (self.n_rcvd1 + self.n_rcvd2), self.n_settled, self.n_released, self.address)
self.receiver1_conn.close()
self.receiver2_conn.close()
self.sender_conn.close()
def on_start(self, event):
self.timer = event.reactor.schedule(5.0, Timeout(self))
# Create two receivers
self.receiver1_conn = event.container.connect(self.receiver1_host)
self.receiver2_conn = event.container.connect(self.receiver2_host)
self.receiver1 = event.container.create_receiver(self.receiver1_conn,
self.address)
self.receiver2 = event.container.create_receiver(self.receiver2_conn,
self.address)
# Create one sender
self.sender_conn = event.container.connect(self.sender_host)
self.sender = event.container.create_sender(self.sender_conn,
self.address)
def on_sendable(self, event):
while self.n_sent < self.count:
self.sender.send(Message(body="Message %d" % self.n_sent))
self.n_sent += 1
def on_message(self, event):
if self.recvd_msg_bodies.get(event.message.body):
self.dup_msg = event.message.body
self.timeout()
else:
self.recvd_msg_bodies[event.message.body] = event.message.body
if event.receiver == self.receiver1:
self.n_rcvd1 += 1
if event.receiver == self.receiver2:
self.n_rcvd2 += 1
if self.n_sent == self.n_rcvd1 + self.n_rcvd2:
self.all_msgs_received = True
def on_settled(self, event):
self.n_settled += 1
if self.n_settled == self.count:
self.receiver1.close()
self.receiver2.close()
for i in range(self.rel_count):
self.sender.send(Message(body="Message %d" % self.n_sent))
self.n_sent += 1
def on_released(self, event):
self.n_released += 1
if self.n_released == self.rel_count and self.all_msgs_received:
self.receiver1_conn.close()
self.receiver2_conn.close()
self.sender_conn.close()
self.timer.cancel()
def run(self):
Container(self).run()
class MobileAddressMulticastTest(MessagingHandler):
def __init__(self, receiver1_host, receiver2_host, receiver3_host,
sender_host, address, check_addr_host=None, large_msg=False):
super(MobileAddressMulticastTest, self).__init__()
self.receiver1_host = receiver1_host
self.receiver2_host = receiver2_host
self.receiver3_host = receiver3_host
self.sender_host = sender_host
self.address = address
# One sender connection and two receiver connections
self.receiver1_conn = None
self.receiver2_conn = None
self.receiver3_conn = None
self.sender_conn = None
self.receiver1 = None
self.receiver2 = None
self.receiver3 = None
self.sender = None
self.count = 200
self.n_rcvd1 = 0
self.n_rcvd2 = 0
self.n_rcvd3 = 0
self.n_sent = 0
self.n_settled = 0
self.n_released = 0
self.error = None
self.timer = None
self.all_msgs_received = False
self.recvd1_msgs = dict()
self.recvd2_msgs = dict()
self.recvd3_msgs = dict()
self.dup_msg_rcvd = False
self.dup_msg = None
self.receiver_name = None
self.large_msg = large_msg
self.body = ""
self.r_attaches = 0
self.reactor = None
self.addr_timer = None
# The maximum number of times we are going to try to check if the
# address has propagated.
self.max_attempts = 5
self.num_attempts = 0
self.num_attempts = 0
self.container = None
self.check_addr_host = check_addr_host
if not self.check_addr_host:
self.check_addr_host = self.sender_host
if self.large_msg:
for i in range(10000):
self.body += "0123456789101112131415"
def timeout(self):
if self.dup_msg:
self.error = "%s received duplicate message %s" % \
(self.receiver_name, self.dup_msg)
else:
if not self.error:
self.error = "Timeout Expired - n_sent=%d n_rcvd1=%d " \
"n_rcvd2=%d n_rcvd3=%d addr=%s" % \
(self.n_sent, self.n_rcvd1, self.n_rcvd2,
self.n_rcvd3, self.address)
self.receiver1_conn.close()
self.receiver2_conn.close()
self.receiver3_conn.close()
if self.sender_conn:
self.sender_conn.close()
def create_sndr(self):
self.sender_conn = self.container.connect(self.sender_host)
self.sender = self.container.create_sender(self.sender_conn,
self.address)
def check_address(self):
local_node = Node.connect(self.check_addr_host, timeout=TIMEOUT)
outs = local_node.query(type='org.apache.qpid.dispatch.router.address')
found = False
self.num_attempts += 1
for result in outs.results:
if self.address in result[0]:
found = True
self.create_sndr()
local_node.close()
self.addr_timer.cancel()
break
if not found:
if self.num_attempts < self.max_attempts:
self.addr_timer = self.reactor.schedule(1.0, AddrTimer(self))
else:
self.error = "Unable to create sender because of " \
"absence of address in the address table"
self.timeout()
local_node.close()
def on_start(self, event):
self.timer = event.reactor.schedule(20.0 if self.large_msg else 10.0,
Timeout(self))
# Create two receivers
self.receiver1_conn = event.container.connect(self.receiver1_host)
self.receiver2_conn = event.container.connect(self.receiver2_host)
self.receiver3_conn = event.container.connect(self.receiver3_host)
self.receiver1 = event.container.create_receiver(self.receiver1_conn,
self.address)
self.receiver2 = event.container.create_receiver(self.receiver2_conn,
self.address)
self.receiver3 = event.container.create_receiver(self.receiver3_conn,
self.address)
self.container = event.container
def on_link_opened(self, event):
if event.receiver == self.receiver1 or \
event.receiver == self.receiver2 or \
event.receiver == self.receiver3:
self.r_attaches += 1
if self.r_attaches == 3:
self.reactor = event.reactor
self.addr_timer = self.reactor.schedule(1.0, AddrTimer(self))
def on_sendable(self, event):
while self.n_sent < self.count:
msg = None
if self.large_msg:
msg = Message(body=self.body)
else:
msg = Message(body="Message %d" % self.n_sent)
msg.correlation_id = self.n_sent
self.sender.send(msg)
self.n_sent += 1
def on_message(self, event):
if event.receiver == self.receiver1:
if self.recvd1_msgs.get(event.message.correlation_id):
self.dup_msg = event.message.correlation_id
self.receiver_name = "Receiver 1"
self.timeout()
self.n_rcvd1 += 1
self.recvd1_msgs[event.message.correlation_id] = event.message.correlation_id
if event.receiver == self.receiver2:
if self.recvd2_msgs.get(event.message.correlation_id):
self.dup_msg = event.message.correlation_id
self.receiver_name = "Receiver 2"
self.timeout()
self.n_rcvd2 += 1
self.recvd2_msgs[event.message.correlation_id] = event.message.correlation_id
if event.receiver == self.receiver3:
if self.recvd3_msgs.get(event.message.correlation_id):
self.dup_msg = event.message.correlation_id
self.receiver_name = "Receiver 3"
self.timeout()
self.n_rcvd3 += 1
self.recvd3_msgs[event.message.correlation_id] = event.message.correlation_id
if self.n_rcvd1 == self.count and self.n_rcvd2 == self.count and \
self.n_rcvd3 == self.count:
self.timer.cancel()
self.receiver1_conn.close()
self.receiver2_conn.close()
self.receiver3_conn.close()
self.sender_conn.close()
def run(self):
Container(self).run()
class MobileAddrMcastDroppedRxTest(MobileAddressMulticastTest):
# failure scenario - cause some receiving clients to close while a large
# message is in transit
def __init__(self, receiver1_host, receiver2_host, receiver3_host,
sender_host, address, check_addr_host=None, large_msg=True):
super(MobileAddrMcastDroppedRxTest, self).__init__(receiver1_host,
receiver2_host,
receiver3_host,
sender_host,
address,
check_addr_host=check_addr_host,
large_msg=large_msg)
self.n_accepted = 0
self.n_released = 0
self.recv1_closed = False
self.recv2_closed = False
def _check_done(self):
if self.n_accepted + self.n_released == self.count:
self.receiver3_conn.close()
self.sender_conn.close()
self.timer.cancel()
def on_message(self, event):
super(MobileAddrMcastDroppedRxTest, self).on_message(event)
# start closing receivers
if self.n_rcvd1 == 50:
if not self.recv1_closed:
self.receiver1_conn.close()
self.recv1_closed = True
if self.n_rcvd2 == 75:
if not self.recv2_closed:
self.recv2_closed = True
self.receiver2_conn.close()
def on_accepted(self, event):
self.n_accepted += 1
self._check_done()
def on_released(self, event):
self.n_released += 1
self._check_done()
class MobileAddressEventTest(MessagingHandler):
def __init__(self, receiver1_host, receiver2_host, receiver3_host,
sender_host, interior_host, address):
super(MobileAddressEventTest, self).__init__(auto_accept=False)
self.receiver1_host = receiver1_host
self.receiver2_host = receiver2_host
self.receiver3_host = receiver3_host
self.sender_host = sender_host
self.address = address
self.receiver1_conn = None
self.receiver2_conn = None
self.receiver3_conn = None
self.sender_conn = None
self.recvd1_msgs = dict()
self.recvd2_msgs = dict()
self.recvd3_msgs = dict()
self.n_rcvd1 = 0
self.n_rcvd2 = 0
self.n_rcvd3 = 0
self.timer = None
self.receiver1 = None
self.receiver2 = None
self.receiver3 = None
self.sender = None
self.interior_host = interior_host
self.container = None
self.count = 600
self.dup_msg = None
self.receiver_name = None
self.n_sent = 0
self.error = None
self.r_attaches = 0
self.n_released = 0
self.n_settled = 0
self.addr_timer = None
self.container = None
def timeout(self):
if self.dup_msg:
self.error = "%s received duplicate message %s" % \
(self.receiver_name, self.dup_msg)
else:
if not self.error:
self.error = "Timeout Expired - n_sent=%d n_rcvd1=%d " \
"n_rcvd2=%d n_rcvd3=%d addr=%s" % \
(self.n_sent, self.n_rcvd1, self.n_rcvd2,
self.n_rcvd3, self.address)
self.receiver1_conn.close()
self.receiver2_conn.close()
self.receiver3_conn.close()
if self.sender_conn:
self.sender_conn.close()
def check_address(self):
local_node = Node.connect(self.interior_host, timeout=TIMEOUT)
outs = local_node.query(type='org.apache.qpid.dispatch.router.address')
remote_count = outs.attribute_names.index("remoteCount")
found = False
for result in outs.results:
if self.address in result[0]:
found = True
self.sender_conn = self.container.connect(self.sender_host)
self.sender = self.container.create_sender(self.sender_conn,
self.address)
break
if not found:
self.error = "Unable to create sender because of " \
"absence of address in the address table"
self.addr_timer.cancel()
self.timeout()
def on_start(self, event):
self.timer = event.reactor.schedule(10.0, Timeout(self))
# Create two receivers
self.receiver1_conn = event.container.connect(self.receiver1_host)
self.receiver2_conn = event.container.connect(self.receiver2_host)
self.receiver3_conn = event.container.connect(self.receiver3_host)
# Create all 3 receivers first.
self.receiver1 = event.container.create_receiver(self.receiver1_conn,
self.address)
self.receiver2 = event.container.create_receiver(self.receiver2_conn,
self.address)
self.receiver3 = event.container.create_receiver(self.receiver3_conn,
self.address)
self.container = event.container
self.addr_timer = event.reactor.schedule(1.0, AddrTimer(self))
def on_sendable(self, event):
if self.n_sent < self.count:
msg = Message(body="Message %d" % self.n_sent)
msg.correlation_id = self.n_sent
self.sender.send(msg)
self.n_sent += 1
def on_message(self, event):
if event.receiver == self.receiver1:
if self.recvd1_msgs.get(event.message.correlation_id):
self.dup_msg = event.message.correlation_id
self.receiver_name = "Receiver 1"
self.timeout()
self.n_rcvd1 += 1
self.recvd1_msgs[
event.message.correlation_id] = event.message.correlation_id
event.delivery.settle()
if event.receiver == self.receiver2:
if self.recvd2_msgs.get(event.message.correlation_id):
self.dup_msg = event.message.correlation_id
self.receiver_name = "Receiver 2"
self.timeout()
self.n_rcvd2 += 1
self.recvd2_msgs[
event.message.correlation_id] = event.message.correlation_id
event.delivery.settle()
if event.receiver == self.receiver3:
if self.recvd3_msgs.get(event.message.correlation_id):
self.dup_msg = event.message.correlation_id
self.receiver_name = "Receiver 3"
self.timeout()
self.n_rcvd3 += 1
self.recvd3_msgs[
event.message.correlation_id] = event.message.correlation_id
event.delivery.settle()
def on_settled(self, event):
if self.n_rcvd1 + self.n_rcvd2 + self.n_rcvd3 == self.count and \
self.n_rcvd2 !=0 and self.n_rcvd3 !=0:
self.timer.cancel()
self.receiver1_conn.close()
self.receiver2_conn.close()
self.receiver3_conn.close()
self.sender_conn.close()
def on_released(self, event):
self.n_released += 1
def run(self):
Container(self).run()
if __name__== '__main__':
unittest.main(main_module())
| 1.851563 | 2 |
Inheritance_5.py | SahilSandasani/Py2 | 0 | 12763222 | class A:
def __init__(self):
self._x = 5
class B(A):
def display(self):
print(self._x)
def main():
obj = B()
obj.display()
main() | 3.21875 | 3 |
picklachu/storage/s3.py | fowbi/picklachu | 1 | 12763223 | <filename>picklachu/storage/s3.py<gh_stars>1-10
import boto3
from botocore.client import BaseClient
from picklachu.storage.base import BaseStorage
class S3(BaseStorage):
def __init__(self, client: BaseClient, bucket: str):
self.bucket = bucket
self.client = client
def persist(self, path: str, data: bytes):
"""Persist data to s3
Args:
path (str): S3 key
data (bytes): data in bytes
Returns:
None
"""
self.client.put_object(Bucket=self.bucket, Key=path, Body=data)
def retrieve(self, path: str) -> bytes:
"""Retrieve data from S3
Args:
path (str): S3 key
Returns:
Retrieved data in bytes
"""
response = self.client.get_object(Bucket=self.bucket, Key=path)
print(response)
return response['Body'].read()
| 2.875 | 3 |
kissim/tests/encoding/test_features_sitealign.py | AJK-dev/kissim | 15 | 12763224 | """
Unit and regression test for the kissim.encoding.features.sitealign.SiteAlignFeature class.
"""
from pathlib import Path
import pytest
import numpy as np
import pandas as pd
from opencadd.databases.klifs import setup_local
from kissim.io import PocketBioPython
from kissim.encoding.features import SiteAlignFeature
PATH_TEST_DATA = Path(__name__).parent / "kissim" / "tests" / "data"
LOCAL = setup_local(PATH_TEST_DATA / "KLIFS_download")
class TestsSiteAlignFeature:
"""
Test SiteAlignFeature class methods.
"""
@pytest.mark.parametrize(
"structure_klifs_id, klifs_session, feature_name",
[
(12347, LOCAL, "hba"),
(12347, LOCAL, "hbd"),
(12347, LOCAL, "size"),
(12347, LOCAL, "charge"),
(12347, LOCAL, "aliphatic"),
(12347, LOCAL, "aromatic"),
],
)
def test_from_pocket(self, structure_klifs_id, klifs_session, feature_name):
"""
Test if SiteAlignFeature can be set from a Pocket object.
Test object attribues.
"""
pocket = PocketBioPython.from_structure_klifs_id(
structure_klifs_id, klifs_session=klifs_session
)
feature = SiteAlignFeature.from_pocket(pocket, feature_name)
assert isinstance(feature, SiteAlignFeature)
# Test class attributes
assert feature.name == structure_klifs_id
for residue_id, residue_ix, residue_name, category in zip(
feature._residue_ids, feature._residue_ixs, feature._residue_names, feature._categories
):
if residue_id is not None:
assert isinstance(residue_id, int)
assert isinstance(residue_ix, int)
assert isinstance(feature_name, str)
assert isinstance(category, float)
@pytest.mark.parametrize(
"structure_klifs_id, klifs_session, feature_name",
[(12347, LOCAL, "xxx")],
)
def test_from_pocket_raises(self, structure_klifs_id, klifs_session, feature_name):
"""
Test if SiteAlignFeature raises error when passed an invalid feature name.
"""
with pytest.raises(KeyError):
pocket = PocketBioPython.from_structure_klifs_id(
structure_klifs_id, klifs_session=klifs_session
)
SiteAlignFeature.from_pocket(pocket, feature_name)
@pytest.mark.parametrize(
"structure_klifs_id, klifs_session",
[(12347, LOCAL)],
)
def test_values(self, structure_klifs_id, klifs_session):
"""
Test class property: values.
"""
pocket = PocketBioPython.from_structure_klifs_id(
structure_klifs_id, klifs_session=klifs_session
)
# Use example feature type
feature = SiteAlignFeature.from_pocket(pocket, feature_name="hba")
assert isinstance(feature.values, list)
for value in feature.values:
assert isinstance(value, float)
@pytest.mark.parametrize(
"structure_klifs_id, klifs_session",
[(12347, LOCAL)],
)
def test_details(self, structure_klifs_id, klifs_session):
"""
Test class property: details.
"""
pocket = PocketBioPython.from_structure_klifs_id(
structure_klifs_id, klifs_session=klifs_session
)
# Use example feature type
feature = SiteAlignFeature.from_pocket(pocket, feature_name="hba")
assert isinstance(feature.details, pd.DataFrame)
assert feature.details.columns.to_list() == [
"residue.id",
"residue.name",
"sitealign.category",
]
@pytest.mark.parametrize(
"residue_name, feature_name, value",
[
("ALA", "size", 1.0), # Size
("ASN", "size", 2.0),
("ARG", "size", 3.0),
("PTR", "size", 3.0), # Converted non-standard
("MSE", "size", 2.0), # Converted non-standard
("XXX", "size", np.nan), # Non-convertable non-standard
("ALA", "hbd", 0.0),
("ASN", "hbd", 1.0),
("ARG", "hbd", 3.0),
("XXX", "hbd", np.nan),
("ALA", "hba", 0.0),
("ASN", "hba", 1.0),
("ASP", "hba", 2.0),
("XXX", "hba", np.nan),
("ALA", "charge", 0.0),
("ARG", "charge", 1.0),
("ASP", "charge", -1.0),
("XXX", "charge", np.nan),
("ALA", "aromatic", 0.0),
("HIS", "aromatic", 1.0),
("XXX", "aromatic", np.nan),
("ARG", "aliphatic", 0.0),
("ALA", "aliphatic", 1.0),
("XXX", "aliphatic", np.nan),
],
)
def test_residue_to_value(self, residue_name, feature_name, value):
"""
Test function for retrieval of residue's size and pharmacophoric features
(i.e. number of hydrogen bond donor,
hydrogen bond acceptors, charge features, aromatic features or aliphatic features )
Parameters
----------
residue_name : str
Three-letter code for residue.
feature_name : str
Feature type name.
value : float or None
Feature value.
"""
feature = SiteAlignFeature()
# Call feature from residue function
value_calculated = feature._residue_to_value(residue_name, feature_name)
if value_calculated: # If not None
assert isinstance(value_calculated, float)
# Note: Cannot use == to compare np.nan values
if np.isnan(value):
assert np.isnan(value_calculated)
else:
assert value_calculated == value
@pytest.mark.parametrize(
"feature_name",
[("XXX"), (1)],
)
def test_raise_invalid_feature_name(self, feature_name):
"""
Test if KeyError is raised if user passes an incorrect SiteAlign feature string.
"""
feature = SiteAlignFeature()
with pytest.raises(KeyError):
feature._raise_invalid_feature_name(feature_name)
@pytest.mark.parametrize(
"residue_name, residue_name_converted",
[
("MSE", "MET"),
("ALA", None),
("XXX", None),
],
)
def test_convert_modified_residue(self, residue_name, residue_name_converted):
"""
Test if modified residues are converted into standard residues correctly.
If conversion is not possible, test if None is returned.
"""
feature = SiteAlignFeature()
assert feature._convert_modified_residue(residue_name) == residue_name_converted
| 2.140625 | 2 |
socketserver/echo_stream_request_client.py | TheBigFish/simple-python | 0 | 12763225 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @File : echo_stream_request_client.py
# @Author: yubo
# @Date : 2018/12/19
# @Desc :
import sys
from socket import *
serverHost = '127.0.0.1'
serverPort = 50008
if len(sys.argv) > 1:
serverHost = sys.argv[1]
#Create socket
sSock = socket(AF_INET, SOCK_STREAM)
#Connect to server
sSock.connect((serverHost, serverPort))
#Stream data to server.
line = ""
while line != 'bye':
line = raw_input("Send to %s: " % (serverHost))
sSock.send(line+'\n')
data = sSock.recv(1024)
print 'receive:', data
sSock.shutdown(0)
sSock.close() | 2.796875 | 3 |
mailviews/helpers.py | archchoa/django-mailviews | 93 | 12763226 | <filename>mailviews/helpers.py
from django.conf import settings
def should_use_staticfiles():
return 'django.contrib.staticfiles' in settings.INSTALLED_APPS
| 1.414063 | 1 |
011. Container With Most Water.py | patrick-luo/Leet-Code | 0 | 12763227 | """
Ideas:
1. The widest container (using first and last line) is a good candidate, because of its width. Its water level is the height of the smaller one of first and last line.
2. All other containers are less wide and thus would need a higher water level in order to hold more water.
3. The smaller one of first and last line doesn't support a higher water level and can thus be safely removed from further consideration.
"""
class Solution(object):
def maxArea(self, height):
"""
:type height: List[int]
:rtype: int
"""
i, j = 0, len(height)-1
water = 0
while i < j:
water = max(water, (j-i)*min(height[i],height[j]))
if height[i] < height[j]:
i += 1
else:
j -= 1
return water
| 3.828125 | 4 |
core/src/zeit/vgwort/tests/test_token.py | rickdg/vivi | 5 | 12763228 | from zeit.vgwort.token import _order_tokens
import transaction
import unittest
import zeit.vgwort.interfaces
import zeit.vgwort.testing
import zope.component
class TokenStorageTest(zeit.vgwort.testing.EndToEndTestCase):
def order(self, amount):
ts = zope.component.getUtility(zeit.vgwort.interfaces.ITokens)
try:
ts.order(amount)
except zeit.vgwort.interfaces.TechnicalError:
self.skipTest('vgwort test system down')
def test_order_tokens(self):
ts = zope.component.getUtility(zeit.vgwort.interfaces.ITokens)
self.assertEqual(0, len(ts))
self.order(2)
self.assertEqual(2, len(ts))
def test_order_should_add_str(self):
ts = zope.component.getUtility(zeit.vgwort.interfaces.ITokens)
self.order(1)
self.assertTrue(isinstance(ts._data[0][0], str))
self.assertTrue(isinstance(ts._data[0][0], str))
class OrderTokensTest(zeit.vgwort.testing.TestCase):
def test_enough_tokens_should_not_order(self):
ts = zope.component.getUtility(
zeit.vgwort.interfaces.ITokens)
ts.order(20)
self.assertEqual(20, len(ts))
_order_tokens()
self.assertEqual(20, len(ts))
def test_insufficient_tokens_should_order_new(self):
ts = zope.component.getUtility(
zeit.vgwort.interfaces.ITokens)
self.assertEqual(0, len(ts))
_order_tokens()
self.assertEqual(1, len(ts))
class TokenTransactionTest(zeit.vgwort.testing.TestCase):
layer = zeit.vgwort.testing.XMLRPC_LAYER
def test_error_during_publish_still_marks_token_as_claimed(self):
tokens = zope.component.getUtility(zeit.vgwort.interfaces.ITokens)
tokens.order(1)
self.assertEqual(1, len(tokens))
transaction.commit()
tokens.claim_immediately()
# if an error occurs during publishing, the transaction will be aborted
transaction.abort()
self.assertEqual(0, len(tokens))
class ObjectCopyTest(zeit.vgwort.testing.TestCase):
def test_copying_should_removes_vgwort_properties_from_copy(self):
import datetime
import pytz
import zeit.cms.interfaces
import zeit.vgwort.interfaces
content = zeit.cms.interfaces.ICMSContent(
'http://xml.zeit.de/testcontent')
token = zeit.vgwort.interfaces.IToken(content)
token.public_token = u'<PASSWORD>'
token.private_token = u'<PASSWORD>'
info = zeit.vgwort.interfaces.IReportInfo(content)
info.reported_on = datetime.datetime.now(pytz.UTC)
info.reported_error = u'error'
online = zeit.cms.interfaces.ICMSContent(
'http://xml.zeit.de/online/')
zope.copypastemove.interfaces.IObjectCopier(content).copyTo(
online, 'foo')
copy = zeit.cms.interfaces.ICMSContent(
'http://xml.zeit.de/online/foo')
token = zeit.vgwort.interfaces.IToken(copy)
self.assertEqual(None, token.public_token)
self.assertEqual(None, token.private_token)
info = zeit.vgwort.interfaces.IReportInfo(copy)
self.assertEqual(None, info.reported_on)
self.assertEqual(None, info.reported_error)
class SecurityObjectCopyTest(zeit.vgwort.testing.BrowserTestCase):
def test_copying_should_work_even_with_security_on(self):
# see #9960
self.browser.handleErrors = False
self.assertNothingRaised(
self.browser.open,
'http://localhost/++skin++vivi/repository/online/@@copy?unique_id='
'http%3A%2F%2Fxml.zeit.de%2Fonline%2F2007%2F01%2FSomalia')
class TokenServiceTest(unittest.TestCase):
def test_should_be_initializable_without_config(self):
from zeit.vgwort.token import TokenService
TokenService()
| 2.1875 | 2 |
setup.py | ds-wizard/dsw2to3 | 0 | 12763229 | <filename>setup.py<gh_stars>0
from setuptools import setup, find_packages
with open('README.md') as f:
long_description = ''.join(f.readlines())
setup(
name='dsw2to3',
version='1.0.2',
description='CLI tool to support migration from DSW 2.14 to DSW 3.0',
long_description=long_description,
long_description_content_type='text/markdown',
author='<NAME>',
keywords='dsw migration database upgrade',
license='Apache License 2.0',
url='https://github.com/ds-wizard/dsw2to3',
packages=find_packages(),
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Database',
'Topic :: Utilities',
],
zip_safe=False,
python_requires='>=3.6, <4',
install_requires=[
'click',
'minio',
'pymongo',
'PyYAML',
'psycopg2',
'tenacity',
],
setup_requires=[
'wheel',
],
entry_points={
'console_scripts': [
'dsw2to3=dsw2to3:main',
],
},
)
| 1.296875 | 1 |
FastApi/sima/src/apps/comment/__init__.py | parker-pu/SiMa | 2 | 12763230 | <reponame>parker-pu/SiMa
from src.apps.comment.urls import router
__all__ = [router]
| 1.203125 | 1 |
rlogbook/computing/migrations/0010_auto_20141128_1123.py | tamasgal/rlogbook | 0 | 12763231 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('computing', '0009_auto_20141128_1121'),
]
operations = [
migrations.RenameField(
model_name='computer',
old_name='warranty_type',
new_name='warranty',
),
]
| 1.6875 | 2 |
solfasol/issues/admin.py | Solfasol/journo-blog | 0 | 12763232 | <reponame>Solfasol/journo-blog
import os
from io import BytesIO
from pdf2image import convert_from_path, convert_from_bytes
from django.core.files.base import ContentFile
from django.contrib import admin
from django.conf import settings
from .models import Issue, Page
class PageInline(admin.TabularInline):
model = Page
extra = 0
autocomplete_fields = ['tags']
@admin.register(Issue)
class IssueAdmin(admin.ModelAdmin):
list_display = ['name', 'year', 'month', 'page_count']
autocomplete_fields = ['tags']
readonly_fields = ['page_count']
actions = ['create_pages', 'delete_pages']
inlines = [PageInline]
def create_pages(self, request, queryset):
for issue in queryset:
if issue.pdf:
issue.pdf.seek(0)
images = convert_from_bytes(issue.pdf.read())
i = 1
for image in images:
page = Page.objects.create(
issue=issue,
number=i,
)
page_io = BytesIO()
image.save(page_io, 'PNG')
page.image.save(
image.filename,
ContentFile(page_io.getvalue()),
)
i += 1
issue.page_count = len(images)
cover_io = BytesIO()
images[0].save(cover_io, 'PNG')
issue.cover.save(
images[0].filename,
ContentFile(cover_io.getvalue()),
save=False,
)
issue.save()
def delete_pages(self, request, queryset):
for issue in queryset:
for page in issue.page_set.all():
try:
os.remove(os.path.join(settings.MEDIA_ROOT, page.image.name))
except FileNotFoundError:
pass
page.delete()
issue.page_count = None
try:
os.remove(os.path.join(settings.MEDIA_ROOT, issue.cover.name))
except FileNotFoundError:
pass
issue.cover = None
issue.save()
@admin.register(Page)
class PageAdmin(admin.ModelAdmin):
list_display = ['issue', 'number']
| 2.203125 | 2 |
PATH ANALYSIS Left Coordinate_Commited to SVN/calculus.py | abricot1025/Sloan_January_2018 | 1 | 12763233 | <reponame>abricot1025/Sloan_January_2018<gh_stars>1-10
import numpy as np
theta = np.tan(1.3/22.4)
print(theta*180/np.pi)
a = np.sqrt(1.3**2 + 22.4**2)
print(a)
delta_a = a - 22.4
print(delta_a) | 2.3125 | 2 |
mpfi/util.py | bensonby/mpfi | 1 | 12763234 | <filename>mpfi/util.py
import re
import pandas as pd
import numpy as np
'''
data: pandas DataFrame
key_tuple: a tuple which is the "index_col" of the data
'''
def is_exist(data, key_tuple):
return data.index.isin([key_tuple]).any()
def get_index_values(data, column):
return data.index.get_level_values(column)
'''
find header row of a .fac or a model point file (the row starting with !)
zero-indexed, i.e. if the header is at the first row, it will return 0
'''
def find_fac_header_row(filename):
result = 0
# strange characters.. need to set encoding
with open(filename, 'r', encoding='latin-1') as f:
for line in f:
if line[0] == '!':
return result
result += 1
def mpf_meta(filename):
result = {
'header_row': -1,
'rows': -1,
'column_specs': None,
}
variable_types = None
n = 0
numlines = -1
with open(filename, 'r', encoding='latin-1') as f:
for line in f:
matching_numlines = re.match(r"^NUMLINES,[\s]*([\d]+)", line)
matching_formats = re.match(r"^VARIABLE_TYPES,", line)
if matching_numlines is not None:
numlines = int(matching_numlines[1])
elif matching_formats is not None:
variable_types = line.split(',')[1:] # first column is VARIABLE_TYPES, not used
elif line[0] == '!':
result['header_row'] = n # zero-based
variable_names = line.split(',')
break
n += 1
for line in f:
if line[0] != '*':
result['rows'] = n - result['header_row']
break
else:
n += 1
if result['header_row'] == -1 or result['rows'] == -1:
print('Malformed model point file format in: ' + filename)
raise ValueError
if numlines != -1 and numlines != result['rows']:
print('Warning: actual lines loaded ({}) different from NUMLINES shown in model point ({}) in: {}'.format(result['rows'], numlines, filename))
# set formats
if variable_types is not None:
if len(variable_types) != len(variable_names):
print('Malformed model point file (variable_types) -- number of columns not matched')
raise ValueError
result['column_specs'] = dict([
(
variable_names[i],
{
'V': pd.CategoricalDtype(['*']), # for ! column with VARIABLE_TYPES
'T': np.dtype('str'),
'I': pd.Int32Dtype(),
'N': np.float64,
}[variable_types[i][0]],
) for i in range(0, len(variable_types))
])
return result
| 3.4375 | 3 |
src/CreeDictionary/phrase_translate/crk_tag_map.py | aarppe/cree-intelligent-dictionary | 0 | 12763235 | <filename>src/CreeDictionary/phrase_translate/crk_tag_map.py<gh_stars>0
from CreeDictionary.phrase_translate.tag_map import TagMap
## Motivation
#
# TagMap handles the mapping from analysis FST tags to phrase-generation
# FST tags. As the wordform analysis and phrase-generation FSTs do fairly
# different things, using different but related sets of tags, we need to specify
# how to translate from one to the other.
#
# For example, one generic wordform analysis for ‘acâhkosa’ is
# `acâhkosa+N+A+Der/Dim+N+A+Obv`. But the phrase-generation FST takes as input
# tags and definition in the form `Obv+Dim+ star`, and outputs the inflected
# phrase `little star over there`. `Obv` has the same tag name but is now a
# start tag, not an end tag, and `Der/Dim` needs to be translated to just `Dim`.
# As well, the phrase-generation FST has stricter ordering requirements on the
# input tags.
#
## Use
#
# A TagMap is iniialized with a sequence of (wordform_tag, phrase_tag,
# precedence) tuples
#
# wordform_tag can be:
# - A literal tag, e.g., "N+", which will be matched exactly
# - A tuple of tags, e.g., ("PV/e+, "+Ind") which will be matched as a
# subsequence
# - DEFAULT if the phrase_tag should be used if no other mapping applies at
# this precedence level
#
# phrase_tag can be:
# - None if the wordform_tag is not used in the phrase transcription
# - COPY_TAG_NAME if the characters of the wordform_tag match the
# phrase_tag, for example: ("+Sg", COPY_TAG_NAME, x) means the same
# thing as ("+Sg", "Sg+", x), but with less potential for copy-paste
# mistakes.
#
# All multi-mappings are applied before single maps, and consume their tags. For
# example, a match on (("+A, "+B"), "foo", 1) will take the tags "+A" and "+B"
# out of consideration before the rules ("+A", COPY, 1) or ("+B", COPY, 1) are
# considered.
#
# The precedence number is used to sort tags before sending them to the phrase
# FST. For example, if you want Pl/Sg before Px, you could give Pl and Sg
# precedence number 1 and the possessives number 2. This precedence number is
# associated with the output tag; it is an error to give a different precedence
# value to multiple definitions that output the same tag.
COPY_TAG_NAME = TagMap.COPY_TAG_NAME
noun_wordform_to_phrase = TagMap(
("+N", None, 0),
("+A", None, 0),
("+I", None, 0),
("+D", None, 0),
# Number
("+Sg", COPY_TAG_NAME, 1),
("+Pl", COPY_TAG_NAME, 1),
("+Obv", COPY_TAG_NAME, 1),
("+Loc", COPY_TAG_NAME, 1),
("+Distr", COPY_TAG_NAME, 1),
# Diminutive
("+Dim", COPY_TAG_NAME, 2),
("+Der/Dim", "Dim+", 2),
# Possessives
("+Px1Sg", COPY_TAG_NAME, 3),
("+Px2Sg", COPY_TAG_NAME, 3),
("+Px3Sg", COPY_TAG_NAME, 3),
("+Px1Pl", COPY_TAG_NAME, 3),
("+Px2Pl", COPY_TAG_NAME, 3),
("+Px12Pl", COPY_TAG_NAME, 3),
("+Px3Pl", COPY_TAG_NAME, 3),
("+Px4Sg/Pl", COPY_TAG_NAME, 3),
("+PxX", COPY_TAG_NAME, 3),
)
# Cree tense/aspects:
verb_wordform_to_phrase = TagMap(
("+V", None, 0),
("+TA", None, 0),
("+AI", None, 0),
("+II", None, 0),
("+TI", None, 0),
# Tense/Aspect
("PV/ki+", "Prt+", 1), # Preterite aka simple past
(("PV/ki+", "+Ind"), "Prt+", 1), # Preterite aka simple past
(("+Fut", "+Cond"), "Cond+", 1), # Future conditional
(("+Imp", "+Imm"), "Imm+", 1), # Immediate imperative
(("+Imp", "+Del"), "Del+", 1), # Delayed imperative
(("PV/wi+", "+Ind"), "Fut+", 1), # Future
("PV/wi+", "Fut+", 1), # Also accept PV/wi without indicative as future
(("PV/e+", "+Cnj"), None, 1), # conjunctive marker
# Note that these crk features as disjoint, but both are needed for the eng feature
(("PV/ka+", "+Ind"), "Def+", 1),
(("PV/ka+", "+Cnj"), "Inf+", 1),
(("PV/ta+", "+Cnj"), "Inf+", 1), # future definite
("+Ind", "Prs+", 1),
(TagMap.DEFAULT, "Prs+", 1), # default to present tense
# Person - Subject
("+1Sg", COPY_TAG_NAME, 2),
("+2Sg", COPY_TAG_NAME, 2),
("+3Sg", COPY_TAG_NAME, 2),
("+1Pl", COPY_TAG_NAME, 2),
("+12Pl", "21Pl+", 2),
("+2Pl", COPY_TAG_NAME, 2),
("+3Pl", COPY_TAG_NAME, 2),
("+4Sg/Pl", COPY_TAG_NAME, 2),
("+5Sg/Pl", COPY_TAG_NAME, 2),
("+X", COPY_TAG_NAME, 2),
# Person - Object
("+1SgO", COPY_TAG_NAME, 3),
("+2SgO", COPY_TAG_NAME, 3),
("+3SgO", COPY_TAG_NAME, 3),
("+1PlO", COPY_TAG_NAME, 3),
("+12PlO", "21PlO+", 3),
("+2PlO", COPY_TAG_NAME, 3),
("+3PlO", COPY_TAG_NAME, 3),
("+4Pl", COPY_TAG_NAME, 3),
("+4Sg", COPY_TAG_NAME, 3),
("+4Sg/PlO", COPY_TAG_NAME, 3),
("+5Sg/PlO", COPY_TAG_NAME, 3),
("+XO", COPY_TAG_NAME, 3),
)
| 2.15625 | 2 |
Codeforces/A_Vanya_and_Table.py | anubhab-code/Competitive-Programming | 0 | 12763236 | <gh_stars>0
arr = []
for x in range(100):
arr.append([])
arr[x] = [0 for i in range(100)]
n = int(input())
ans=0
for x in range(n):
a, b, c, d = map(int, input().split())
ans=ans+(c-a+1)*(d-b+1)
print(ans) | 2.671875 | 3 |
sdk/python/pulumi_gcp/dns/get_keys.py | sisisin/pulumi-gcp | 121 | 12763237 | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetKeysResult',
'AwaitableGetKeysResult',
'get_keys',
'get_keys_output',
]
@pulumi.output_type
class GetKeysResult:
"""
A collection of values returned by getKeys.
"""
def __init__(__self__, id=None, key_signing_keys=None, managed_zone=None, project=None, zone_signing_keys=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if key_signing_keys and not isinstance(key_signing_keys, list):
raise TypeError("Expected argument 'key_signing_keys' to be a list")
pulumi.set(__self__, "key_signing_keys", key_signing_keys)
if managed_zone and not isinstance(managed_zone, str):
raise TypeError("Expected argument 'managed_zone' to be a str")
pulumi.set(__self__, "managed_zone", managed_zone)
if project and not isinstance(project, str):
raise TypeError("Expected argument 'project' to be a str")
pulumi.set(__self__, "project", project)
if zone_signing_keys and not isinstance(zone_signing_keys, list):
raise TypeError("Expected argument 'zone_signing_keys' to be a list")
pulumi.set(__self__, "zone_signing_keys", zone_signing_keys)
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="keySigningKeys")
def key_signing_keys(self) -> Sequence['outputs.GetKeysKeySigningKeyResult']:
"""
A list of Key-signing key (KSK) records. Structure is documented below. Additionally, the DS record is provided:
"""
return pulumi.get(self, "key_signing_keys")
@property
@pulumi.getter(name="managedZone")
def managed_zone(self) -> str:
return pulumi.get(self, "managed_zone")
@property
@pulumi.getter
def project(self) -> str:
return pulumi.get(self, "project")
@property
@pulumi.getter(name="zoneSigningKeys")
def zone_signing_keys(self) -> Sequence['outputs.GetKeysZoneSigningKeyResult']:
"""
A list of Zone-signing key (ZSK) records. Structure is documented below.
"""
return pulumi.get(self, "zone_signing_keys")
class AwaitableGetKeysResult(GetKeysResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetKeysResult(
id=self.id,
key_signing_keys=self.key_signing_keys,
managed_zone=self.managed_zone,
project=self.project,
zone_signing_keys=self.zone_signing_keys)
def get_keys(managed_zone: Optional[str] = None,
project: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetKeysResult:
"""
Get the DNSKEY and DS records of DNSSEC-signed managed zones. For more information see the
[official documentation](https://cloud.google.com/dns/docs/dnskeys/)
and [API](https://cloud.google.com/dns/docs/reference/v1/dnsKeys).
## Example Usage
```python
import pulumi
import pulumi_gcp as gcp
foo = gcp.dns.ManagedZone("foo",
dns_name="foo.bar.",
dnssec_config=gcp.dns.ManagedZoneDnssecConfigArgs(
state="on",
non_existence="nsec3",
))
foo_dns_keys = foo.id.apply(lambda id: gcp.dns.get_keys(managed_zone=id))
pulumi.export("fooDnsDsRecord", foo_dns_keys.key_signing_keys[0].ds_record)
```
:param str managed_zone: The name or id of the Cloud DNS managed zone.
:param str project: The ID of the project in which the resource belongs. If `project` is not provided, the provider project is used.
"""
__args__ = dict()
__args__['managedZone'] = managed_zone
__args__['project'] = project
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('gcp:dns/getKeys:getKeys', __args__, opts=opts, typ=GetKeysResult).value
return AwaitableGetKeysResult(
id=__ret__.id,
key_signing_keys=__ret__.key_signing_keys,
managed_zone=__ret__.managed_zone,
project=__ret__.project,
zone_signing_keys=__ret__.zone_signing_keys)
@_utilities.lift_output_func(get_keys)
def get_keys_output(managed_zone: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetKeysResult]:
"""
Get the DNSKEY and DS records of DNSSEC-signed managed zones. For more information see the
[official documentation](https://cloud.google.com/dns/docs/dnskeys/)
and [API](https://cloud.google.com/dns/docs/reference/v1/dnsKeys).
## Example Usage
```python
import pulumi
import pulumi_gcp as gcp
foo = gcp.dns.ManagedZone("foo",
dns_name="foo.bar.",
dnssec_config=gcp.dns.ManagedZoneDnssecConfigArgs(
state="on",
non_existence="nsec3",
))
foo_dns_keys = foo.id.apply(lambda id: gcp.dns.get_keys(managed_zone=id))
pulumi.export("fooDnsDsRecord", foo_dns_keys.key_signing_keys[0].ds_record)
```
:param str managed_zone: The name or id of the Cloud DNS managed zone.
:param str project: The ID of the project in which the resource belongs. If `project` is not provided, the provider project is used.
"""
...
| 1.765625 | 2 |
preferences/admin.py | jbbqqf/okapi | 0 | 12763238 | <filename>preferences/admin.py<gh_stars>0
# -*- coding: utf-8 -*-
from django.contrib import admin
from preferences.models import (
UserInterface, UserInterfaceForm, UserPref, UserPrefForm)
class UserInterfaceAdmin(admin.ModelAdmin):
form = UserInterfaceForm
list_display = ('name', 'comment',)
search_fields = ('name', 'comment',)
class UserPrefAdmin(admin.ModelAdmin):
form = UserPrefForm
list_display = ('user', 'ui', 'conf',)
search_fields = ('user', 'ui', 'conf',)
admin.site.register(UserInterface, UserInterfaceAdmin)
admin.site.register(UserPref, UserPrefAdmin)
| 1.875 | 2 |
ExerciciosPYTHON/NovPython/054.py | Samuel-Melo890/Python-Desafios | 0 | 12763239 | <reponame>Samuel-Melo890/Python-Desafios<filename>ExerciciosPYTHON/NovPython/054.py
from module.interface import *
from os import system
system('cls')
menu('Parênteses Pilhas')
f = input('Digite uma frase com parênteses: ').strip()
cont = 0
pilha = []
while cont < len(f):
if f[cont] == '(':
pilha.append('(')
elif f[cont] == ')':
if len(pilha) != 0:
pilha.pop()
else:
pilha.append(')')
break
cont += 1
if pilha != []:
print('\033[31mSeus parênteses foram colocados de forma errada!\033[m')
else:
print('\033[32mOs parênteses foram colocados corretamente!\033[m')
| 3.296875 | 3 |
BOJ4287.py | INYEONGKIM/BOJ | 2 | 12763240 | alp = "abcdefghijklmnopqrstuvwxyz"
res = ""
while True:
s = __import__('sys').stdin.readline().strip()
if s == "#":
break
x, y, z = s.split()
t = ""
for i in range(len(x)):
dif = (ord(y[i]) - ord(x[i]) + 26) % 26
t += alp[(ord(z[i]) - ord('a') + dif) % 26]
res += s + " " + t + "\n"
print(res, end="")
| 2.96875 | 3 |
PythonBrasil/EstruturaSequencial/Soma.py | IGDEXE/Python | 0 | 12763241 | # Faça um Programa que peça dois números e imprima a soma.
# <NAME>
# Recebe os numeros
primeiroNumero = int(input("Informe um numero: "))
segundoNumero = int(input("Informe outro numero: "))
# Faz a soma
soma = primeiroNumero + segundoNumero
# Mostra na tela
print("A soma do numero %s com %s é %s" % (primeiroNumero, segundoNumero, soma)) | 4.09375 | 4 |
code/Tokens/TokenEnum.py | antuniooh/Dattebayo-compiler | 0 | 12763242 | <gh_stars>0
from enum import Enum
""" Enum class of Tokens
"""
class TokenEnum(Enum):
NINJUTSU = "if"
GENJUTSU = "elif"
TAIJUTSU = "else"
KAGEBUNSHIN = "for"
TSUKUYOMI = "while"
CHAKRA = "def"
KAMUI = "return"
SHARINGAN = "print"
RASENGAN = "int"
RAIKIRI = "float"
ZETSU = "bool"
KUCHIYOSE = "string"
NARUTO = "main"
FUUMASHURIKEN = "+"
KUNAI = "-"
SHURIKEN = "*"
KATANA = "/"
KIRIGAKURE = "=="
KUMOGAKURE = "&&"
AMEGAKURE = "||"
LPAREN = "("
RPAREN = ")"
LBRACK = "{"
RBRACK = "}"
HAKU = "="
ENDPOINT = ";"
INTEGER = "INTEGER"
FLOAT = "FLOAT"
BOOLEAN = "BOOLEAN"
STRING = "STRING"
IDENTIFIER = "IDENTIFIER"
GENNIN = "<"
JUNNIN = ">" | 3.078125 | 3 |
magpie/dropboxlib/downloader/__init__.py | nimiq/moogle-project | 4 | 12763243 | import logging
from dropbox.client import DropboxClient # Dropobox official library
from ..redislist import RedisDropboxDownloadList, RedisDropboxIndexList
from .dropboxfile import DropboxFile
log = logging.getLogger('dropbox')
class DropboxDownloader:
"""
Download files from Dropbox based on a list previously built by the `DropboxCrawler` and
stored internally.
Parameters:
bearertoken_id -- the id of the `BearToken` owner of the Dropbox account.
access_token -- the access token of the `BearToken` owner of the Dropbox account.
"""
def __init__(self, bearertoken_id, access_token):
self.bearertoken_id = bearertoken_id
self.access_token = access_token
@property
def _client(self):
"""
A `dropbox.DropboxClient` for the current `bearertoken`.
It is a cached attribute so that it is a singleton.
"""
try:
cl = self._client_cached
except AttributeError:
cl = self._client_cached = DropboxClient(self.access_token)
return cl
def run(self):
print("Downloading for bearerid: ", self.bearertoken_id)
redis_dw = RedisDropboxDownloadList(self.bearertoken_id)
redis_ix = RedisDropboxIndexList(self.bearertoken_id)
for redis_entry in redis_dw.iterate():
# `redis_entry` is a `RedisDropboxEntry` instance.
# If:
# - `redis_entry.is_del()`: move the entry to the index list
# - `redis_entry.is_reset()`: move the entry to the index list
# - `redis_entry.is_add()`: download the file locally, update
# `redis_entry.remote_path` with the local file name, move the entry to the
# index list
#
# Bear in mind that:
# - entries with `redis_entry.is_add()` are only files (no dirs cause they have
# already been filtered out)
# - entries with `redis_entry.is_del()`: we don't know if they are files or dir
# but we don't care since during indexing we ask Solr to delete: name and name/*
# And a sanity check is run when creating a `RedisDropboxEntry` instance.
# TODO
print(redis_entry.operation, redis_entry.remote_path)
if redis_entry.is_add():
# Download the file. We could use client.get_file or client.get_file_and_metadata,
# but under the hood the actual call to the API is the same, cause that basic API
# call returns the file plus its metadata.
log.debug('Downloading: {}'.format(redis_entry.remote_path))
content, metadata = self._client.get_file_and_metadata(redis_entry.remote_path)
file = DropboxFile(content, metadata)
file.store_to_disk(self.bearertoken_id)
# Update `remote_path` attribute with the local name
redis_entry.local_name = file.local_name
redis_ix.buffer(redis_entry)
redis_ix.flush_buffer() | 2.6875 | 3 |
test/levenshteinTests.py | gerhardJaeger/asjp19worldTree | 0 | 12763244 | import pandas as pd
import numpy as np
import Levenshtein
import random
random.seed(12345)
d = pd.read_csv("../data/asjp19wide.csv", index_col=0)
words = d.values[~d.isnull()]
words = np.concatenate([w.split('-') for w in words])
tests = pd.DataFrame(columns=['word1', 'word2', 'LD'])
for i in range(1000):
if i % 100 == 0:
print(i)
w1, w2 = random.sample(list(words), 2)
tests.loc[i] = [w1, w2, Levenshtein.distance(w1, w2)]
tests.to_csv('levenshteinTests.csv', index=False)
| 2.96875 | 3 |
tests/test_snakefile.py | stephenkraemer/regionset_profiler | 2 | 12763245 | """Test for the snakemake workflow distributed with region_set_profiler"""
import json
import subprocess
import os
import pandas as pd
import numpy as np
tmpdir = "/icgc/dkfzlsdf/analysis/hs_ontogeny/temp"
# TODO: gtfanno result has weird index
gtfanno_result: pd.DataFrame = pd.read_pickle(
"/icgc/dkfzlsdf/analysis/hs_ontogeny/results/wgbs/cohort_results/analyses/hierarchy/annotation/hierarchy-dmrs/v1/hierarchy-dmrs-anno_primary-annotations.p"
)
# all_regions_annotated = pd.read_pickle('/icgc/dkfzlsdf/analysis/hs_ontogeny/results/wgbs/cohort_results/analyses/hierarchy/annotation/hierarchy-dmrs/v1/hierarchy-dmrs-anno_all-annotations.p')
# all_regions_annotated.loc[all_regions_annotated.feat_class == 'intergenic', 'feature_rank'] = 'primary'
# gtfanno_result_temp = '/home/kraemers/temp/gtfanno-temp.p'
# primary_annotations.to_pickle(gtfanno_result_temp)
# gtfanno_result = primary_annotations
gene_annos = gtfanno_result.groupby(["Chromosome", "Start", "End", "gtfanno_uid"])[
"gene_name"
].aggregate(lambda ser: ser.str.cat(sep=","))
assert (
gene_annos.index.get_level_values("gtfanno_uid") == np.arange(gene_annos.shape[0])
).all()
gene_annos.index = gene_annos.index.droplevel(3)
clustered_gene_anno_fp = tmpdir + "clustered-gene-annos.p"
gene_annos.to_pickle(clustered_gene_anno_fp)
# Code to merge DMRs which are closer than merging_distance bp
# This should be moved elsewhere
# merging could also be achieved with pyranges:
# 1. slop all intervals with merging_distance on both sides
# 2. Cluster all intervals
# 3. Use the clustered intervals to find groups of intervals within the clustered intervals and compute the group annotations
merging_distance = 500
gtfanno_result = gtfanno_result.query('feat_class == "Promoter"')
distance_to_next_region = (
gtfanno_result.Start.iloc[1:].values - gtfanno_result.End.iloc[0:-1].values
)
# we iterate over the regions
# whenever the distance to the next region is > merging_distance, we begin a new cluster of regions
# In vectorized form:
region_cluster_ids = np.concatenate(
[[1], 1 + np.cumsum(distance_to_next_region > merging_distance)], axis=0
)
# Compress to gene anno series for the merged DMRs
gene_annos = gtfanno_result.groupby(region_cluster_ids)["gene_name"].apply(
lambda ser: ser.str.cat(sep=",")
)
gene_annos.to_pickle(clustered_gene_anno_fp)
gtfanno_result["gene_name"].to_pickle(clustered_gene_anno_fp)
config = {
"tasks": {
"cluster_ids": {
"no-basos/beta-value_zscores/metric-euclidean/linkage-ward/enrichments/min-gap_0.25": (
"min-gap_0.25",
"/icgc/dkfzlsdf/analysis/hs_ontogeny/results/wgbs/cohort_results/analyses/hierarchy/clustering/full-hierarchy/method-selection/no-basos/beta-value_zscores/metric-euclidean/linkage-ward/cutree-all.p",
),
# 'no-basos/beta-value_zscores/metric-euclidean/linkage-ward/enrichments/min-gap_0.12': ('min-gap_0.12',
# '/icgc/dkfzlsdf/analysis/hs_ontogeny/results/wgbs/cohort_results/analyses/hierarchy/clustering/full-hierarchy/method-selection/no-basos/beta-value_zscores/metric-euclidean/linkage-ward/cutree-all.p')
},
"metadata_tables": {
"codex": "/icgc/dkfzlsdf/analysis/hs_ontogeny/databases/enrichment_databases/lola_chipseq_2018-04-12/mm10/codex/regions/codex_annotations.csv",
"msigdb_canonical_pathways": "/icgc/dkfzlsdf/analysis/hs_ontogeny/databases/region_set_profiler_databases/msigdb_gmts/canonical-pathways.gmt",
},
"gene_annotations": {"promoters_500-bp-clusters": clustered_gene_anno_fp},
},
"output_dir": "/icgc/dkfzlsdf/analysis/hs_ontogeny/temp/rsp-tests",
"tmpdir": tmpdir,
"chromosomes": [
"1",
"10",
"11",
"12",
"13",
"14",
"15",
"16",
"17",
"18",
"19",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
],
}
config_fp = os.path.expanduser("~/temp/rsp-config.json")
with open(config_fp, "w") as fout:
json.dump(config, fout)
subprocess.run(
f"""
snakemake \
--snakefile {os.path.expanduser('~/projects/region_set_profiler/src/region_set_profiler/region_set_profiler.smk')} \
--configfile {config_fp} \
--cores 24 \
--keep-going \
--forcerun /icgc/dkfzlsdf/analysis/hs_ontogeny/temp/rsp-tests/no-basos/beta-value_zscores/metric-euclidean/linkage-ward/enrichments/min-gap_0.25/msigdb_canonical_pathways:promoters_500-bp-clusters/msigdb_canonical_pathways:promoters_500-bp-clusters.done
""",
shell=True,
executable="/bin/bash",
)
# --dryrun \
| 2.078125 | 2 |
src/day10.py | TobiasRoeding/advent-of-code-2021 | 0 | 12763246 | <gh_stars>0
class Day10:
ILLEGAL_CHAR_TO_POINTS = {
")": 3,
"]": 57,
"}": 1197,
">": 25137,
}
def __init__(self, input="src/input/day10.txt"):
self.INPUT = input
def read_input(self):
input = []
with open(self.INPUT, "r") as fp:
lines = fp.readlines()
input = [line.strip() for line in lines]
return input
def part1(self):
input = self.read_input()
# remove all legal chunks
legal_chunks = ["()", "{}", "<>", "[]"]
cleaned_input = []
for line in input:
prev_length = float("inf")
while prev_length > len(line):
prev_length = len(line)
for chunk in legal_chunks:
line = line.replace(chunk, "")
cleaned_input.append(line)
# check if incomplete or illegal
illegal_characters = []
for line in cleaned_input:
for char in line:
if char not in ["(", "{", "<", "["]:
illegal_characters.append(char)
break
return sum([self.ILLEGAL_CHAR_TO_POINTS[char] for char in illegal_characters])
def part2(self):
input = self.read_input()
# remove all legal chunks
legal_chunks = ["()", "{}", "<>", "[]"]
cleaned_input = []
for line in input:
prev_length = float("inf")
while prev_length > len(line):
prev_length = len(line)
for chunk in legal_chunks:
line = line.replace(chunk, "")
cleaned_input.append(line)
# discard corrupted lines
incomplete_input = []
for line in cleaned_input:
closings = [")", "}", ">", "]"]
check = False
for closing in closings:
if closing in line:
check = True
if not check:
incomplete_input.append(line)
# reverse the order
missing_input = [line[::-1] for line in incomplete_input]
# reverse doesn't change opening to closing brackets,
# which is why we use opening brackets to calculate the final score
char_to_points = {
"(": 1,
"[": 2,
"{": 3,
"<": 4,
}
# calculate result
scores = []
for line in missing_input:
score = 0
for char in line:
score *= 5
score += char_to_points[char]
scores.append(score)
# sort scores and return middle
return sorted(scores)[len(scores) // 2]
def execute(self):
print(f"Solution for part 1: {self.part1()}")
print(f"Solution for part 2: {self.part2()}")
if __name__ == "__main__":
Day10().execute()
| 3.203125 | 3 |
game/template.py | laddie132/MD3 | 6 | 12763247 | <filename>game/template.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = "Han"
__email__ = "<EMAIL>"
import random
import json
class AgentActs:
"""
Pre-defined agent actions and slots
"""
DIRECTED_BY = 'directed_by'
RELEASE_YEAR = 'release_year'
WRITTEN_BY = 'written_by'
STARRED_ACTORS = 'starred_actors'
HAS_GENRE = 'has_genre'
HAS_TAGS = 'has_tags'
IN_LANGUAGE = 'in_language'
GUESS = 'guess'
ALL_SLOTS = ['directed_by', 'release_year', 'written_by', 'starred_actors', 'has_genre', 'in_language']
ALL_ACTIONS = ALL_SLOTS + ['guess']
@staticmethod
def slot_size():
return len(AgentActs.ALL_SLOTS)
@staticmethod
def act_size():
return len(AgentActs.ALL_ACTIONS)
@staticmethod
def contains_act(a):
return a in AgentActs.ALL_ACTIONS
@staticmethod
def contains_slot(s):
return s in AgentActs.ALL_SLOTS
@staticmethod
def slot_to_id(s):
assert s in AgentActs.ALL_SLOTS, '%s not a valid slot' % s
return AgentActs.ALL_SLOTS.index(s)
@staticmethod
def id_to_slot(idx):
assert idx < len(AgentActs.ALL_SLOTS)
return AgentActs.ALL_SLOTS[idx]
@staticmethod
def action_to_id(a):
assert a in AgentActs.ALL_ACTIONS, '%s not a valid action' % a
return AgentActs.ALL_ACTIONS.index(a)
@staticmethod
def id_to_action(idx):
assert idx < len(AgentActs.ALL_ACTIONS)
return AgentActs.ALL_ACTIONS[idx]
class NLTemplate:
NO_ANSWER = ['I don`t know the answer.', 'I`m not sure about that.']
def __init__(self, template_path):
self.template_path = template_path
with open(self.template_path, 'r') as f:
self.nl_template = json.load(f)
def act_to_nl(self, act_type, act_value, is_first=True):
"""
action to natural language by filling the template
:param act_type:
:param act_value:
:param is_first: whether the first time to ask this action in current dialog
:return:
"""
if AgentActs.contains_act(act_type):
act_nl_temp = self.nl_template[act_type]
nl_temp = act_nl_temp['nl_first'] if is_first else act_nl_temp['nl_more']
if len(nl_temp) == 0:
raise ValueError('No natural language template with action %s' % act_type)
act_nl = random.choice(nl_temp)
if len(act_nl_temp['slots']) > 0:
assert len(act_nl_temp['slots']) == 1
if act_value is None:
act_nl = random.choice(self.NO_ANSWER)
else:
act_nl = act_nl.replace('$%s$' % act_nl_temp['slots'][0], str(act_value))
return act_nl
else:
raise ValueError('Wrong value of act_type: %s' % str(act_type))
| 2.953125 | 3 |
multidb/decorators.py | apnarm/django-multidb | 0 | 12763248 | # -*- coding: utf-8 -*-
import functools
from django.forms.utils import ErrorList
from .readonly import read_only_mode, ReadOnlyError
from .signals import send_post_commit, send_post_rollback, send_pre_commit
def full_clean_if_not_read_only(full_clean):
"""Decorator for preventing form submissions while in read-only mode."""
def wrapper(self):
full_clean(self)
if read_only_mode:
if '__all__' not in self._errors:
self._errors['__all__'] = ErrorList()
self._errors.get('__all__').insert(0, ReadOnlyError.message)
if hasattr(self, 'cleaned_data'):
delattr(self, 'cleaned_data')
return wrapper
def wrap(before=None, after=None, condition=lambda *args, **kwargs: True):
"""
A helper for creating decorators.
Runs a "before" function before the decorated function, and an "after"
function afterwards. The condition check is performed once before
the decorated function.
"""
def decorator(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
yes = condition(*args, **kwargs)
if yes and before:
before()
result = func(*args, **kwargs)
if yes and after:
after()
return result
return wrapped
return decorator
def wrap_before(before, condition=lambda *args, **kwargs: True):
"""
A helper for creating decorators.
Runs a "before" function before the decorated function. The condition
check is performed before the decorated function is called.
"""
def decorator(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
if condition(*args, **kwargs):
before()
return func(*args, **kwargs)
return wrapped
return decorator
def wrap_after(after, condition=lambda *args, **kwargs: True):
"""
A helper for creating decorators.
Runs an "after" function after the decorated function. The condition
check is performed after the decorated function is called.
"""
def decorator(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
result = func(*args, **kwargs)
if condition(*args, **kwargs):
after()
return result
return wrapped
return decorator
commit = wrap(
before=send_pre_commit,
after=send_post_commit,
)
rollback = wrap_after(
after=send_post_rollback,
)
| 2.578125 | 3 |
tests/dygraph/test_unstructured_prune.py | zzjjay/PaddleSlim | 0 | 12763249 | import sys
sys.path.append("../../")
import unittest
import paddle
import numpy as np
from paddleslim import UnstructuredPruner
from paddle.vision.models import mobilenet_v1
class TestUnstructuredPruner(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestUnstructuredPruner, self).__init__(*args, **kwargs)
paddle.disable_static()
self._gen_model()
def _gen_model(self):
self.net = mobilenet_v1(num_classes=10, pretrained=False)
self.pruner = UnstructuredPruner(
self.net, mode='ratio', ratio=0.98, threshold=0.0)
def test_prune(self):
ori_density = UnstructuredPruner.total_sparse(self.net)
ori_threshold = self.pruner.threshold
self.pruner.step()
self.net(
paddle.to_tensor(
np.random.uniform(0, 1, [16, 3, 32, 32]), dtype='float32'))
cur_density = UnstructuredPruner.total_sparse(self.net)
cur_threshold = self.pruner.threshold
print("Original threshold: {}".format(ori_threshold))
print("Current threshold: {}".format(cur_threshold))
print("Original density: {}".format(ori_density))
print("Current density: {}".format(cur_density))
self.assertLessEqual(ori_threshold, cur_threshold)
self.assertLessEqual(cur_density, ori_density)
self.pruner.update_params()
self.assertEqual(cur_density, UnstructuredPruner.total_sparse(self.net))
def test_summarize_weights(self):
max_value = -float("inf")
threshold = self.pruner.summarize_weights(self.net, 1.0)
for name, sub_layer in self.net.named_sublayers():
if not self.pruner._should_prune_layer(sub_layer):
continue
for param in sub_layer.parameters(include_sublayers=False):
max_value = max(
max_value,
np.max(np.abs(np.array(param.value().get_tensor()))))
print("The returned threshold is {}.".format(threshold))
print("The max_value is {}.".format(max_value))
self.assertEqual(max_value, threshold)
if __name__ == "__main__":
unittest.main()
| 2.5 | 2 |
reg/models.py | saurabh00031/Coding-Avengers-007 | 2 | 12763250 | <filename>reg/models.py<gh_stars>1-10
from django.db import models
from django.contrib.auth.models import AbstractUser
# Create your models here.
class User(AbstractUser):
is_user = models.BooleanField(default=False)
is_hospital = models.BooleanField(default=False)
class hspinfo(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
hospital_Name = models.CharField(max_length=50)
phone = models.CharField(max_length=12)
email = models.CharField(max_length=30)
city = models.CharField(max_length=30)
address = models.CharField(max_length=120)
no_of_beds = models.CharField(max_length=10)
no_of_ventilators = models.CharField(max_length=10)
no_of_vaccines = models.CharField(max_length=10)
def __str__(self):
return self.user.username
class usrinfo(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
full_Name = models.CharField(max_length=50)
phone = models.CharField(max_length=12)
email = models.CharField(max_length=30)
city = models.CharField(max_length=30)
address = models.TextField()
def __str__(self):
return self.user.username | 2.4375 | 2 |