text
stringlengths 8
6.05M
|
|---|
from providers.google_rss import GoogleRSS
url_list = ['https://lh4.googleusercontent.com/-s2WOC8Z-cu8/UtVS1nj0FfI/AAAAAAAACPs/RIhCZxCLuB4/w1200-h812-p/2013%2BLondon%2Bmeeting%2B098-sRGB%2Bsmall.jpg',
'https://lh4.googleusercontent.com/-s2WOC8Z-cu8/UtVS1nj0FfI/AAAAAAAACPs/RIhCZxCLuB4/w1200-h812/2013%2BLondon%2Bmeeting%2B098-sRGB%2Bsmall.jpg',
'https://lh4.googleusercontent.com/-s2WOC8Z-cu8/UtVS1nj0FfI/AAAAAAAACPs/RIhCZxCLuB4/w1200-h812-d/2013%2BLondon%2Bmeeting%2B098-sRGB%2Bsmall.jpg',
'https://lh4.googleusercontent.com/-s2WOC8Z-cu8/UtVS1nj0FfI/AAAAAAAACPs/RIhCZxCLuB4/s0/2013%2BLondon%2Bmeeting%2B098-sRGB%2Bsmall.jpg',
'https://lh4.googleusercontent.com/-s2WOC8Z-cu8/UtVS1nj0FfI/AAAAAAAACPs/RIhCZxCLuB4/s0-d/2013%2BLondon%2Bmeeting%2B098-sRGB%2Bsmall.jpg',
'https://lh4.googleusercontent.com/-s2WOC8Z-cu8/UtVS1nj0FfI/AAAAAAAACPs/RIhCZxCLuB4/2013%2BLondon%2Bmeeting%2B098-sRGB%2Bsmall.jpg',
'https://lh4.googleusercontent.com/-YaAT5PPovF8/UuoXyUqgV2I/AAAAAAABbC0/-MzlbFRFAek/w506-h750/20140128_153852.jpg',
'https://lh4.googleusercontent.com/-YaAT5PPovF8/UuoXyUqgV2I/AAAAAAABbC0/-MzlbFRFAek/s0-d/20140128_153852.jpg']
for url in url_list:
print(GoogleRSS.parse_full_image_url(url))
|
#!/usr/bin/python3
# This is the Python 3.x client-sided script for a Chat Server.
from socket import socket, AF_INET, SOCK_STREAM, error
HOST = 'localhost'
PORT = 1337
serverAddress = (HOST, PORT)
Client_Socket = socket(family=AF_INET, type=SOCK_STREAM)
try:
Client_Socket.connect(serverAddress)
recvMessage = Client_Socket.recv(1024)
print(recvMessage.decode(encoding='ascii'))
userName = input('Please enter your name: ')
userName = userName.encode(encoding='ascii')
Client_Socket.send('Name: '.encode(encoding='ascii') + userName)
while True:
print('\n')
message = input('{}: '.format(userName.decode(encoding='ascii')))
Client_Socket.send(message.encode(encoding='ascii'))
except error:
print('Sorry cannot connect to the Chat Server :(')
finally:
Client_Socket.close()
|
import numpy as np
list_data = [1, 2, 3]
array = np.array(list_data)
print("배열 타입 :",array.dtype)
print("배열 크기 :",array.size)
array1 = np.arange(4) #0~3까지 배열 만들기
array2 = np.zeros((4,4), dtype=np.float) #4x4 크기의 0으로 초기화된 배열
array3 = np.ones((1,4), dtype=np.str) #1x4 크기의 1로 초기화된 배열
array4 = np.random.randint(0,10,(3,3)) #0~9사이의 랜덤한 값이 들어감 3x3배열
array5 = np.random.normal(0,1,(3,3)) #평균이 0이고, 표준편차가 1인 표준정규분포를 따르는 3x3배열
array4_5 = np.concatenate([array4,array5], axis=0) #배열4와 배열5를 행을 기준으로 합침, axis=1이면 열 기준으로 합침
array2_r = array2.reshape((2,8)) #4x4 배열을 2x8 배열로 바꿈
array2_s = np.split(array2,[3],axis=1) #배열을 3열 기준으로 나눔
array2_3 = array2 + np.array(array3, dtype=np.float)*3 #4x4 배열과 1x4 배열을 더함(브로드캐스트), array3의 값을 3배로 만들어서 더함
array4_m = array4 < 4 #원소중 4보다 작은값은 False로 아닌것은 True로 바꿈(마스킹)
array4[array4_m] = -1 #원소중 4보다 작은값은 -1로 바꿈
###넘파이 저장###
saveArr1 = np.random.randint(0,10,size=9).reshape((3,3))
saveArr2 = np.random.randint(0,10,size=9).reshape((3,3))
np.save("saved.npy",saveArr1) #넘파이 객체 저장
loadArr1 = np.load("saved.npy") #저장된 객체 로드
np.savez("saved.npz", saveArr1=saveArr1, saveArr2=saveArr2) #복수개의 객체 저장
result = np.load("saved.npz") #복수개의 객체 로드
loadArr1 = result["saveArr1"] #배열의 이름을 키로 추출
loadArr2 = result["saveArr2"] #배열의 이름을 키로 추출
##넘파이 정렬##
seqArr = np.array([[5, 9, 10, 3, 1],[8, 3, 4, 2, 5]])
seqArr.sort(axis=0) #열을 기준으로 정렬
print(np.unique(seqArr)) #중복된 원소 제거
|
import inspect
import json
import sys
from difflib import ndiff
from django.conf import settings
from django.contrib.gis.gdal import DataSource
from django.db import transaction
from organisations.models import (
DivisionGeography,
OrganisationDivision,
OrganisationDivisionSet,
)
from storage.shapefile import pre_process_layer
class DiffException(Exception):
def __init__(self, message, diff):
super().__init__(message)
self.diff = diff
class MapCreationNeededException(Exception):
...
class DivisionSetGeographyImporter:
def __init__(
self,
data,
division_set,
name_column="name",
name_map={},
srid=27700,
source="unknown",
stdout=None,
):
if not isinstance(data, DataSource):
error = "param 'data' must be an instance of django.contrib.gis.gdal.DataSource"
raise TypeError(error)
if len(data) != 1:
raise ValueError("Expected 1 layer, found %i" % (len(data)))
self.data = data[0]
self.name_column = name_column
self.source = source
if not isinstance(division_set, OrganisationDivisionSet):
error = "param 'division_set' must be an instance of organisations.models.OrganisationDivisionSet"
raise TypeError(error)
self.div_set = division_set
if not isinstance(name_map, dict):
raise TypeError("param 'name_map' must be an instance of dict")
self.name_map = name_map
if not isinstance(srid, int):
raise TypeError("param 'srid' must be an instance of int")
self.srid = srid
if not stdout:
stdout = sys.stdout
self.stdout = stdout
def get_name(self, division):
name = division[self.name_column].value
if name in self.name_map:
return self.name_map[name]
return name
def make_name_map(self, legislation_names, boundary_names):
if settings.IN_TESTING:
return None
legislation_names = set(legislation_names)
boundary_names = set(boundary_names)
missing_from_leg = sorted(boundary_names - legislation_names)
map = {}
for name in sorted(legislation_names):
if name not in boundary_names:
self.stdout.write(
inspect.cleandoc(
f"""Legislation is expecting a division called
\t{name}
but that doesn't exist in the boundary data
Might it be one of these?
"""
)
)
for i, missing_name in enumerate(missing_from_leg, start=1):
self.stdout.write(f"\t {i}. {missing_name}")
match = None
while not match:
match = input(
"Pick a number to match or enter 's' to skip: "
)
if match:
if match == "s":
break
match = int(match)
if match != "s":
matched_name = missing_from_leg.pop(match - 1)
self.stdout.write(
f"Asserting that {name} is the same as {matched_name}"
)
map[matched_name] = name
return map
def check_names(self):
legislation_names = sorted(
[div.name for div in self.div_set.divisions.all()]
)
boundary_names = sorted([self.get_name(div) for div in self.data])
if len(legislation_names) != len(boundary_names):
raise ValueError(
"Expected %i boundaries in input file, found %i"
% (len(legislation_names), len(boundary_names))
)
if legislation_names != boundary_names:
map_data = self.make_name_map(legislation_names, boundary_names)
if map_data:
self.stdout.write(
"\nYou need to save this file as `name_map.json`:"
)
self.stdout.write(json.dumps(map_data, indent=4))
raise MapCreationNeededException()
# create a 'diff' of the 2 lists
# so we can work out what we need to fix
diff = ndiff(legislation_names, boundary_names)
raise DiffException("legislation_names != boundary_names", diff)
return True
def build_objects(self):
div_geogs = []
for feature in self.data:
name = self.get_name(feature)
division = OrganisationDivision.objects.get(
divisionset=self.div_set, name=name
)
div_geogs.append(
DivisionGeography(
division=division,
geography=feature.multipolygon,
source=self.source,
)
)
return div_geogs
@transaction.atomic
def save_all(self, objects):
for record in objects:
record.save()
def import_data(self):
self.data = pre_process_layer(self.data, self.srid)
self.check_names()
div_geogs = self.build_objects()
self.save_all(div_geogs)
return
|
df.describe()
df.info()
df.head()
##KNN
# Import KNeighborsClassifier from sklearn.neighbors
from sklearn.neighbors import KNeighborsClassifier
# Create arrays for the features and the response variable
y = df['party'].values #用values 令X和y都是Numpy arrays
X = df.drop('party', axis=1).values #去掉party列
# Create a k-NN classifier with 6 neighbors
knn = KNeighborsClassifier(n_neighbors=6)
# Fit the classifier to the data
knn.fit(X,y)
# 根据训练数据预测lables
y_pred = knn.predict(X)
# 根据新的数据X_new预测label
new_prediction = knn.predict(X_new)
print("Prediction: {}".format(new_prediction))
##model performance
from sklearn import datasets
import matplotlib.pyplot as plt
# Load the digits dataset: digits
digits = datasets.load_digits()
# Print the keys and DESCR of the dataset
print(digits.keys())
print(digits['DESCR'])
# Print the shape of the images and data keys
print(digits.images.shape)
print(digits.data.shape)
# Display digit 1010
plt.imshow(digits.images[1010], cmap=plt.cm.gray_r, interpolation='nearest')
plt.show()
# Import necessary modules
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import train_test_split
# Create feature and target arrays
X = digits.data
y = digits.target
# Split into training and test set
# 将数据拆成test和train。test_size为test数据占总的20%,random_state为指定的随机种子。
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state= 42, stratify= y)
# Create a k-NN classifier with 7 neighbors: knn
knn = KNeighborsClassifier(n_neighbors = 7)
# Fit the classifier to the training data
knn.fit(X_train,y_train)
# Print the accuracy
print(knn.score(X_test, y_test))
#Overfitting and underfitting
#将k从1-9,计算model 在test data和train data的预测准确度
# Setup arrays to store train and test accuracies
neighbors = np.arange(1, 9)
train_accuracy = np.empty(len(neighbors))
test_accuracy = np.empty(len(neighbors))
# Loop over different values of k
for i, k in enumerate(neighbors):
# Setup a k-NN Classifier with k neighbors: knn
knn = KNeighborsClassifier(n_neighbors=k)
# Fit the classifier to the training data
knn.fit(X_train,y_train)
#Compute accuracy on the training set
train_accuracy[i] = knn.score(X_train, y_train)
#Compute accuracy on the testing set
test_accuracy[i] = knn.score(X_test, y_test)
# Generate plot
plt.title('k-NN: Varying Number of Neighbors')
plt.plot(neighbors, test_accuracy, label = 'Testing Accuracy')
plt.plot(neighbors, train_accuracy, label = 'Training Accuracy')
plt.legend()
plt.xlabel('Number of Neighbors')
plt.ylabel('Accuracy')
plt.show()
##regression
##preparing data
# Import numpy and pandas
import numpy as np
import pandas as pd
df = pd.read_csv("gapminder.csv")
# 注意values没有括号。Series.values
y = df['life'].values
X = df['fertility'].values
print("Dimensions of y before reshaping: {}".format(y.shape))
print("Dimensions of X before reshaping: {}".format(X.shape))
#Dimensions of y before reshaping: (139,)
# Reshape X and y
# 注意reshape的用法
y = y.reshape(-1,1)
X = X.reshape(-1,1)
# Print the dimensions of X and y after reshaping
print("Dimensions of y after reshaping: {}".format(y.shape))
print("Dimensions of X after reshaping: {}".format(X.shape))
#Dimensions of y after reshaping: (139, 1)
sns.heatmap(df.corr(), square=True, cmap='RdYlGn')
#model score (OLC)
# Import LinearRegression
from sklearn.linear_model import LinearRegression
# Create the regressor: reg
reg = LinearRegression()
# Create the prediction space
# linspace 等差数列
prediction_space = np.linspace(min(X_fertility), max(X_fertility)).reshape(-1,1)
# Fit the model to the data
reg.fit(X_fertility, y)
# Compute predictions over the prediction space: y_pred
y_pred = reg.predict(prediction_space)
# Print R^2
print(reg.score(X_fertility, y))
# Plot regression line
plt.plot(prediction_space, y_pred, color='black', linewidth=3)
plt.show()
#Model prediction
# Import necessary modules
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split
#拆分
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.3, random_state=42)
# 初始化
reg_all = LinearRegression()
# Fit the regressor to the training data
reg_all.fit(X_train,y_train)
# Predict on the test data: y_pred
y_pred = reg_all.predict(X_test)
# 计算R^2 and RMSE
print("R^2: {}".format(reg_all.score(X_test, y_test)))
rmse = np.sqrt(mean_squared_error(y_test, y_pred))
print("Root Mean Squared Error: {}".format(rmse))
##Cross validation
# Import the necessary modules
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import cross_val_score
# Create a linear regression object: reg
reg = LinearRegression()
# Compute 5-fold cross-validation scores: cv_scores
cv_scores = cross_val_score(reg,X, y, cv=5)
# Print the 5-fold cross-validation scores
print(cv_scores)
print("Average 5-Fold CV Score: {}".format(np.mean(cv_scores)))
##lasso regression
#you will find that the coefficients of some features are shrunk to 0, with only the most important ones remaining.
# Import Lasso
from sklearn.linear_model import Lasso
# Instantiate a lasso regressor: lasso
lasso = Lasso(alpha = 0.4, normalize = True)
# Fit the regressor to the data
lasso.fit(X,y)
# Compute and print the coefficients
lasso_coef = lasso.coef_
print(lasso_coef)
# Plot the coefficients
plt.plot(range(len(df_columns)), lasso_coef)
plt.xticks(range(len(df_columns)), df_columns.values, rotation=60)
plt.margins(0.02)
plt.show()
|
#In main program, three different methods are called to show the threefold model
#The service designed in this program is to generate a four_digit class id based on an input integer
#Both the first method and second method could generate the class id, however there's possibility
#that a fault could hanppen and results in the wrong class_id, which could be detected and solved
#in threefold dimension model
import sys
import second_method
import third_method
from collections import Counter
import os
#the input base number used for generating the class_id, which simulates the input data
base_num = sys.argv[1]
if __name__ == "__main__":
############# First Method #############
# running the first method in 1T/nH/1S model
results = []
#remotely running first method on ukko092 - 094 and save the results
for i in range(92, 94):
ukkonode = 'xgli@ukko0' + str(i) + '.hpc.cs.helsinki.fi '
comm = 'ssh ' + ukkonode + 'python3 /cs/home/xgli/Distributed_System_Exercise_2016/big_exercise_3/first_method.py ' \
+ str(base_num) + ' > id.txt'
os.system(comm)
f = open('id.txt', 'r')
lines = f.read().split('\n')
temp = lines[0].split()
r = temp[0]
results.append(r)
print(results)
#a simple decision algorithm: choose the result which appears the most times in results[]
results_count = Counter(results)
top = results_count.most_common(1)
temp = top[0]
print(results_count)
print(top)
print(temp)
result_1 = temp[0]
print('the class id generated by first_method is: ' + result_1)
########################################
############# Second Method ############
result_2 = second_method.second_method(base_num)
#detect whether there's an error by checking the length of class_id
#if not call the method again
while (len(str(result_2)) != 4):
result_2 = second_method.second_method(base_num)
print("An error has been detected in second_method!")
print('The class id generated by second_method is: ' + str(result_2))
########################################
############# Third Method ############
#simulate an error
third_method.third_method(base_num)
########################################
|
from serializer import Serializer
class YamlSerializer(Serializer):
# function that parses py-obj to json-str
def parse(obj):
return json.dumps(obj)
# function that unparses json-str to py-obj
def unparse(str_data):
return json.loads(str_data)
|
import logging
import sys
import inject
sys.path.insert(0,'../../../python')
from model.config import Config
logging.getLogger().setLevel(logging.INFO)
from autobahn.asyncio.wamp import ApplicationSession
from asyncio import coroutine
'''
python3 findUsersByIds d44e92c1-d277-4a45-81dc-a72a76f6ef8d
python3 findUsersByIds id1 id2 id3 ...
'''
def config_injector(binder):
binder.bind(Config,Config('server-config.cfg'))
inject.configure(config_injector)
config = inject.instance(Config)
class WampMain(ApplicationSession):
def __init__(self,config=None):
logging.debug('instanciando WampMain')
ApplicationSession.__init__(self, config)
@coroutine
def onJoin(self, details):
logging.info("********** USUARIOS **********")
if len(sys.argv) < 2:
sys.exit("Error de parámetros")
ids = []
for i in range(1 , len(sys.argv)):
ids.append(sys.argv[i])
users = yield from self.call('users.findUsersByIds', ids)
for user in users:
logging.info(user)
if __name__ == '__main__':
from autobahn.asyncio.wamp import ApplicationRunner
from autobahn.wamp.serializer import JsonSerializer
url = config.configs['server_url']
realm = config.configs['server_realm']
debug = config.configs['server_debug']
json = JsonSerializer()
runner = ApplicationRunner(url=url,realm=realm,debug=debug, debug_wamp=debug, debug_app=debug, serializers=[json])
runner.run(WampMain)
|
from src.Algorithms.abstract_algorithm import AbstractAlgorithm
class DFSAlgorithm(AbstractAlgorithm):
def __init__(self, knight, endpoint, board):
super().__init__(knight, endpoint, board)
self.__stack_path = []
def calculate_path(self):
self.__dfs_helper(self.initial_x, self.initial_y)
def __get_path(self):
self.board.colour_path(self.__stack_path)
def __dfs_helper(self, x_coordinate, y_coordinate):
self.visited[x_coordinate][y_coordinate] = True
self.__stack_path.append((x_coordinate, y_coordinate))
if x_coordinate == self.x_endpoint and y_coordinate == self.y_endpoint:
self.__get_path()
return
for i in range(8):
adjacent_x = x_coordinate + self.knight_movement[i][0]
adjacent_y = y_coordinate + self.knight_movement[i][1]
if self.is_inside_board(adjacent_x, adjacent_y) and not self.visited[adjacent_x][adjacent_y]:
self.__dfs_helper(adjacent_x, adjacent_y)
# Since the element has no valid moves, remove it from the list(since it's the top element) and backtrack to last node
self.__stack_path.pop()
|
from pdfminer.converter import PDFPageAggregator
from pdfminer.layout import LAParams
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfinterp import PDFDocument
from pdfminer.pdfinterp import PDFResourceManager
from pdfminer.pdfinterp import PDFPageInterpreter
from pdfminer.pdfdevice import PDFDevice
import os
path = './PrintOrder.pdf'
# import pdf file
file = open(path, 'rb')
parser = PDFParser(file)
doc = PDFDocument()
parser.set_document(doc)
doc.set_parser(parser)
doc.initialize()
resource = PDFResourceManager()
laparam = LAParams()
device = PDFPageAggregator(resource, laparams=laparam)
interpreter = PDFPageInterpreter(resource, device=device)
test = []
for page in doc.get_pages():
interpreter.process_page(page)
layout = device.get_result()
for out in layout:
if hasattr(out, "get_text"):
test.append(out.get_text())
# pdf-green ['title', 'content', 'title', 'content'...]
grandContent = ''
grandList = []
for i in range(0, len(test)):
tempList = []
for chars in test[i]:
if (chars == '\n' or chars == ':' or chars == ':'):
grandContent.strip()
tempList.append(grandContent)
grandContent = ''
else:
grandContent += chars
for contents in tempList:
if (contents == ''):
tempList.remove(contents)
grandList.append(tempList)
# 数据处理
result = []
for grandfather in test:
str = ''
for i in range(0, len(grandfather)-len(str)):
content = grandfather[i:i+len(str)]
if (content == str):
resultTemp = grandfather[(i+2+len(str)):(i+2+len(str)+12)]
result.append({content: resultTemp})
break
for grandfather in test:
str = ''
for i in range(0, len(grandfather)-len(str)):
content = grandfather[i:i+len(str)]
if (content == str):
resultTemp = grandfather[(i+2+len(str)):(i+2+len(str)+10)]
result.append({content: resultTemp})
break
for grandfather in test:
str = ''
for i in range(0, len(grandfather)-len(str)):
content = grandfather[i:i+len(str)]
if (content == str):
TEMP = ''
if (grandfather[i+len(str)+1] == '\n'):
break
else:
for j in range(i+len(str), len(grandfather)):
if (grandfather[j] == '\n'):
break
if ((grandfather[j] == ' ') or (grandfather[j] == ':')):
continue
TEMP = TEMP + grandfather[j]
result.append({content: TEMP})
break
for grandfather in test:
str = ''
for i in range(0, len(grandfather)-len(str)):
content = grandfather[i:i+len(str)]
if (content == str):
TEMP = ''
if (grandfather[i+len(str)+1] == '\n'):
break
else:
for j in range(i+len(str), len(grandfather)):
if (grandfather[j] == '\n'):
break
if ((grandfather[j] == ' ') or (grandfather[j] == ':')):
continue
TEMP = TEMP + grandfather[j]
result.append({content: TEMP})
break
for grandfather in test:
str = ''
for i in range(0, len(grandfather)-len(str)):
content = grandfather[i:i+len(str)]
if (content == str):
TEMP = ''
content = grandfather[i:i+len(str)-1]
if (grandfather[i+len(str)+1] == '\n'):
break
else:
for j in range(i+len(str), len(grandfather)):
if (grandfather[j+1] == '\n'):
break
if ((grandfather[j] == ' ') or (grandfather[j] == ':')):
continue
TEMP = TEMP + grandfather[j+1]
result.append({content: TEMP})
break
for grandfather in test:
str = ''
for i in range(0, len(grandfather)-len(str)):
content = grandfather[i:i+len(str)]
if (content == str):
TEMP = ''
content = grandfather[i:i+len(str)-1]
if (grandfather[i+len(str)+1] == '\n'):
break
else:
for j in range(i+len(str), len(grandfather)):
if (grandfather[j+1] == '\n'):
break
if ((grandfather[j] == ' ') or (grandfather[j] == ':')):
continue
TEMP = TEMP + grandfather[j+1]
result.append({content: TEMP})
break
# Check result
print(result, '\n\n')
keywords = ''
result_table = []
grandFather = grandList[2]
temp01 = grandFather[14:-3]
amount = int((len(temp01)-2)/3)
for x in range(0, amount-1):
result_table.append([{temp01[0]: temp01[3*x+3]}, {temp01[1]: temp01[3*x+1+3]}, {temp01[2]:temp01[3*x+2+3]}])
result_table.append({temp01[-2]:temp01[-1]})
print(result_table, '\n\n')
keywords = ''
result_credit = []
for fff in range(0, len(grandList)-1):
grandFather = grandList[fff]
for i in range(0, len(grandFather)-1):
father = grandFather[i]
for j in range(0, len(father)-len(keywords)):
if (father[j:j+len(keywords)] == keywords):
real_name = grandList[fff+1]
real_code = grandList[fff+2]
real_time = grandList[fff+3]
real_cost = grandList[fff+4]
real_reim = []
real_content = []
real_reim.append(grandList[fff+5][0])
real_content.append(grandList[fff+6][0])
Content = ''
for contents in grandList[fff+5][1:]:
for chars in contents:
if (chars == ' '):
real_reim.append(Content)
Content = ''
else:
Content += chars
real_content.append(Content)
Content = ''
for x in range(1, len(real_name)-1):
result_credit.append([{'': real_name[x]}, {'': real_code[x]}, {'': real_time[x]}, {'': real_cost[x]}, {'': real_reim[x]}, {'': real_content[x]}])
print(result_credit, '\n\n')
|
import re
import uuid
import boto3
from flask import current_app
IMAGE_MIME_TYPES = set(['image/png', 'image/jpg', 'image/gif'])
AVATAR_MAX_SIZE = 2 * 1024 * 1024 # 2MB
class AvatarException(Exception):
pass
def allowed_avatar_type(mimetype):
if mimetype not in IMAGE_MIME_TYPES:
raise AvatarException("Unacceptable file type: {0}".format(mimetype))
return True
def extract_avatar_filename(url):
match = re.search(r'avatars/(\d+\.\w+\.\w+)$', url)
if match:
return match.group(1)
else:
raise AvatarException("Unable to extract avatar filename from %s" % url)
def construct_avatar_url(filename):
S3_BUCKET = current_app.config['S3_BUCKET']
return "https://%s.s3.amazonaws.com/avatars/%s" % (S3_BUCKET, filename)
def remove_avatar(url, user_id):
S3_BUCKET = current_app.config['S3_BUCKET']
filename = extract_avatar_filename(url)
user_match = re.search(r'^(\d+)\.\w+\.\w+$', filename)
if user_match and user_match.group(1) == str(user_id):
s3 = boto3.resource('s3')
s3.Object(S3_BUCKET, 'avatars/' + filename).delete()
def sign_avatar_upload(mimetype, user_id):
S3_BUCKET = current_app.config['S3_BUCKET']
if mimetype and allowed_avatar_type(mimetype):
ext = mimetype.replace('image/', '')
filename = "{0}.{1}.{2}".format(user_id, uuid.uuid4().hex, ext)
key = "avatars/" + filename
s3 = boto3.client('s3')
presigned_post = s3.generate_presigned_post(
Bucket=S3_BUCKET,
Key=key,
Fields={"acl": "public-read", "Content-Type": mimetype},
Conditions=[
{"acl": "public-read"},
{"Content-Type": mimetype},
["content-length-range", 0, AVATAR_MAX_SIZE]
],
ExpiresIn=3600
)
return {
"data": presigned_post,
"url": construct_avatar_url(filename)
}
|
o = open('1.txt', 'r')
w = open('2.txt', 'w')
for line in o:
w.write(line)
o.close()
w.close()
|
import pandas as pd
import matplotlib.pyplot as plt
house = pd.read_csv('https://drive.google.com/uc?export=download&id=1kgJseOaDUCG-p-IoLIKbnL23XHUZPEwm')
#%% numerical
house['bedrooms'].describe()
'''
box plot
'''
fig = plt.figure(figsize=(10,8))
house['bedrooms'].plot.box()
fig = plt.figure(figsize=(10,8))
house['bathrooms'].plot.box()
plt.boxplot(house['bathrooms'], whis=(0,100)) # 0 quantile = min, 100 quantile = max
'''
histogram
'''
house['bedrooms'].plot.hist(bins=20)
'''
Kernel Density Estimati
'''
house['bedrooms'].plot.kde() #너무 집중되어 있으므로 window size 올려야 함
house['bedrooms'].plot.kde(bw_method=2)
'''
묶어서 그래프 그리기
'''
house.groupby('yr_built')['price'].mean().plot() #가로는 지어진 년도, 세로는 금액.
house[house['yr_renovated']>0].groupby('yr_renovated')['price'].mean().plot()
#새로 지어진 경우, 새로지어진 년도와 가격의 관계
'''
scatter plot
'''
from pandas.plotting import scatter_matrix
scatter_matrix(house[['price', 'bedrooms','bathrooms']], figsize=(10,10))
#diagnol에는 histogram, 외엔 scatter plot. 중복되는 정보를 준다면 하나 제거
plt.scatter(house['bedrooms'], house['price']) #outlier 확인
'''
corr 확인
'''
corr = house[['price', 'bedrooms','bathrooms','sqft_living','sqft_lot', 'floors']].corr()
#sqft_living 가 price(output)과 가장 corr함
cax=plt.imshow(corr, vmin=-1, vmax=1, cmap=plt.cm.RdBu) #cov는 -1~1의 값, cmap은 색
plt.colorbar(cax) #cov를 시각적으로 나타냄
#%% Categorical
freq = house['grade'].value_counts()
'''
bar plot
'''
house['waterfront'].value_counts().plot.bar()
'''
category 별로 box plot
'''
house.boxplot(column=['price'], by='waterfront')
#%% Linear Regression
import statsmodels.api as sm
from sklearn import datasets
boston = datasets.load_boston()
X = boston.data
y = boston.target
X = sm.add_constant(X) #모든 row에 1인 새 col이 추가된다
model = sm.OLS(y, X) #ordinary Linear Regeression model
result = model.fit()
result.summary()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-12-01 10:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('lhcbpr_api', '0002_auto_20160419_0727'),
]
operations = [
migrations.CreateModel(
name='ResultJSON',
fields=[
('jobresult_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='lhcbpr_api.JobResult')),
('data', models.TextField()),
],
bases=('lhcbpr_api.jobresult',),
),
migrations.RemoveField(
model_name='option',
name='executable',
),
migrations.AlterField(
model_name='attribute',
name='dtype',
field=models.CharField(choices=[(b'String', b'String'), (b'JSON', b'JSON'), (b'Float', b'Float'), (b'Integer', b'Integer'), (b'DateTime', b'DateTime'), (b'File', b'File')], max_length=10),
),
migrations.AlterField(
model_name='jobdescription',
name='executable',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='job_descriptions', to='lhcbpr_api.Executable'),
),
]
|
import camera
camera = camera.Camera(False)
while True:
frame = camera.read()
print len(frame.getvalue())
|
from datetime import datetime
from requests.exceptions import ConnectionError
from django.test import TestCase
from fetcher.tools import TleParser
class TleParserTestCase(TestCase):
"""
Test the behavior of the TleParser tool
"""
def test_canImportTlecatparser(self):
"""
Test if TleParser exists
"""
try:
from fetcher.tools import TleParser
except ImportError:
self.fail('Cannot import TleParser')
def test_defaultFormatIsCelesTrak(self):
"""
Test if the default format is CelesTrak
"""
parser = TleParser()
self.assertEquals(TleParser.CELESTRAK, parser.format)
def test_explodeWorksInSimpleCase(self):
"""
Test if TleParser parses data correctly
"""
lines = [
'ISS (ZARYA) ',
'1 25544U 98067A 17236.53358279 .00001862 00000-0 35301-4 0 9994',
'2 25544 51.6396 57.6070 0005086 172.6034 285.4459 15.54193317 72385',
]
expected = {
'line_0_full': 'ISS (ZARYA)',
'name': 'ISS (ZARYA)',
'line_1_full': '1 25544U 98067A 17236.53358279 .00001862 00000-0 35301-4 0 9994',
'line_number': '1',
'satellite_number': '25544',
'classification': 'U',
'international_designator_year': '98',
'international_designator_number': '067',
'international_designator_piece': 'A',
'epoch_year': '17',
'epoch_day': '236.53358279',
'first_derivative_mean_motion': '0.00001862',
'second_derivative_mean_motion': '0',
'drag': 0.00035301,
'set_number': '999',
'first_checksum': '4',
'line_2_full': '2 25544 51.6396 57.6070 0005086 172.6034 285.4459 15.54193317 72385',
'inclination': '51.6396',
'ascending_node': '57.6070',
'eccentricity': '0.0005086',
'perigee_argument': '172.6034',
'mean_anomaly': '285.4459',
'mean_motion': '15.54193317',
'revolution_number': '7238',
'second_checksum': '5',
}
parser = TleParser()
data = parser.explode(lines)
for n in expected:
self.assertEquals(expected[n], data[n], n)
def test_explodeWorksWhenNoDrag(self):
"""
Test if TleParser parses data correctly when there is no drag
"""
lines = [
'GOES 13 ',
'1 29155U 06018A 17236.28392374 -.00000277 00000-0 00000+0 0 9990',
'2 29155 0.0320 231.9245 0003315 245.6473 242.4487 1.00264274 41246',
]
expected = {
'line_0_full': 'GOES 13',
'name': 'GOES 13',
'line_1_full': '1 29155U 06018A 17236.28392374 -.00000277 00000-0 00000+0 0 9990',
'line_number': '1',
'satellite_number': '29155',
'classification': 'U',
'international_designator_year': '06',
'international_designator_number': '018',
'international_designator_piece': 'A',
'epoch_year': '17',
'epoch_day': '236.28392374',
'first_derivative_mean_motion': '0.00000277',
'second_derivative_mean_motion': '0',
'drag': 0,
'set_number': '999',
'first_checksum': '0',
'line_2_full': '2 29155 0.0320 231.9245 0003315 245.6473 242.4487 1.00264274 41246',
'inclination': '0.0320',
'ascending_node': '231.9245',
'eccentricity': '0.0003315',
'perigee_argument': '245.6473',
'mean_anomaly': '242.4487',
'mean_motion': '1.00264274',
'revolution_number': '4124',
'second_checksum': '6',
}
parser = TleParser()
data = parser.explode(lines)
for n in expected:
self.assertEquals(expected[n], data[n], n)
def test_format_drag_positive(self):
"""
Test if the drag is correctly defined with a negative positive
"""
value = "88849-3"
expected = 0.0088849
tle = TleParser()
self.assertEquals(expected, tle.format_drag(value))
def test_format_drag_negative(self):
"""
Test if the drag is correctly defined with a negative positive
"""
value = "-62555-3"
expected = 0.0062555
tle = TleParser()
self.assertEquals(expected, tle.format_drag(value))
def test_format_drag_zero(self):
"""
Test if the drag is correctly defined with a negative positive
"""
value = "00000+0"
expected = 0
tle = TleParser()
self.assertEquals(expected, tle.format_drag(value))
|
import os, os.path as osp
from torch.utils.cpp_extension import load
dir_path = os.path.dirname(os.path.realpath(__file__))
lltm_cpp = load(
name="lltm_cpp",
sources=[osp.join(dir_path, "lltm.cpp")],
extra_cflags=['-O2'],
verbose=True
)
# Example usage
# from lltm.jit import lltm_cpp
|
import random
import logging
class AbstractSolver:
"""Base class for all maze solvers. Every solver implements its own algorithm to solve a maze by using
or overriding the base class methods."""
def __init__(self, seed=0):
self.log = logging.getLogger(__name__)
self.path = []
self.log.debug("generating a " + self.__class__.__name__)
self.random = random
if seed != 0:
self.random.seed(seed)
def solve_maze(self, maze):
"""Solve a maze. The solving algorithm is decided by the instantiating class.
This method has to be implemented by the concrete subclassing solver."""
# cleans the path of redundant moves
def __clean_path__(self):
i = 2
while i < len(self.path):
if self.path[i - 2] == self.path[i]:
self.__cleanup__(i - 2, i)
i = 2
else:
i += 1
def __cleanup__(self, i, j):
if i > 0 and j < len(self.path) - 1 and self.path[i - 1] == self.path[j + 1]:
self.__cleanup__(i - 1, j + 1)
else:
del self.path[(i + 1):(j + 1)]
def __is_junction__(self, cell):
self.log.debug("current cell: " + str(cell) + " is junction: " + str(cell.wall_count() < 2))
return cell.wall_count() < 2
def __isPath__(self, cell):
return cell.wall_count() == 2 and cell != self.maze.get_entrance()
def __isDeadEnd__(self, cell):
return cell.wall_count() == 3 or cell.wall_count() == 2 and cell == self.maze.get_entrance()
def __cameFromTop__(self):
previous = self.__getPrevious__()
current = self.path[-1]
return previous is not None and self.maze.get_top_neighbour(current) == previous \
or previous is None and self.maze.get_top_neighbour(current) is None \
and current.get_top().is_removed()
def __cameFromRight__(self):
previous = self.__getPrevious__()
return previous is not None and self.maze.get_right_neighbour(self.path[-1]) == previous
def __cameFromBottom__(self):
previous = self.__getPrevious__()
return previous is not None and self.maze.get_bottom_neighbour(self.path[-1]) == previous
def __cameFromLeft__(self):
previous = self.__getPrevious__()
current = self.path[-1]
return previous is not None and self.maze.get_left_neighbour(current) == previous \
or previous is None and self.maze.get_left_neighbour(current) is None \
and current.get_left().is_removed()
def __getPrevious__(self):
return self.path[-2] if len(self.path) > 1 else None
def __findNext__(self): # if there is only one way to go
current = self.path[-1]
if self.__cameFromBottom__():
if current.get_left().is_removed() and self.maze.get_left_neighbour(current) is not None:
self.__try_left__()
elif current.get_right().is_removed() and self.maze.get_right_neighbour(current) is not None:
self.__try_right__()
elif current.get_top().is_removed() and self.maze.get_top_neighbour(current) is not None:
self.__try_top__()
else:
self.log.error('No way out')
raise Exception('No way out')
elif self.__cameFromLeft__():
if current.get_bottom().is_removed() and self.maze.get_bottom_neighbour(current) is not None:
self.__try_bottom__()
elif current.get_right().is_removed() and self.maze.get_right_neighbour(current) is not None:
self.__try_right__()
elif current.get_top().is_removed() and self.maze.get_top_neighbour(current) is not None:
self.__try_top__()
else:
self.log.error('No way out')
raise Exception('No way out')
elif self.__cameFromRight__():
if current.get_bottom().is_removed() and self.maze.get_bottom_neighbour(current) is not None:
self.__try_bottom__()
elif current.get_left().is_removed() and self.maze.get_left_neighbour(current) is not None:
self.__try_left__()
elif current.get_top().is_removed() and self.maze.get_top_neighbour(current) is not None:
self.__try_top__()
else:
self.log.error('No way out')
raise Exception('No way out')
elif self.__cameFromTop__():
if current.get_bottom().is_removed() and self.maze.get_bottom_neighbour(current) is not None:
self.__try_bottom__()
elif current.get_left().is_removed() and self.maze.get_left_neighbour(current) is not None:
self.__try_left__()
elif current.get_right().is_removed() and self.maze.get_right_neighbour(current) is not None:
self.__try_right__()
else:
self.log.error('No way out')
raise Exception('No way out')
elif self.path[-1] == self.maze.get_entrance(): # We're at the entrance
self.__handle_junction__()
else:
self.log.error('Came from nowhere')
raise Exception('Came from nowhere')
def __choose_direction__(self, directions):
n = self.random.randint(0, len(directions) - 1)
directions[n]()
def __decide_next__(self):
current = self.path[-1]
if current == self.maze.get_exit():
return
self.log.debug("deciding next in: " + str(current))
if self.__is_junction__(current):
self.__handle_junction__()
self.log.debug('finished handling junction')
elif self.__isPath__(current):
self.__handle_path__()
elif self.__isDeadEnd__(current): # do nothing and go back
self.__handle_dead_end__()
else:
self.log.error('Invalid wall count')
raise Exception('Invalid wall count')
def __handle_junction__(self):
self.log.debug('handling junction...')
current = self.path[-1]
if self.__cameFromTop__():
self.log.debug('came from top...')
directions = [self.__try_bottom__, self.__try_left__, self.__try_right__]
elif self.__cameFromBottom__():
self.log.debug('came from bottom...')
directions = [self.__try_left__, self.__try_right__, self.__try_top__]
elif self.__cameFromLeft__():
self.log.debug('came from left...')
directions = [self.__try_bottom__, self.__try_right__, self.__try_top__]
elif self.__cameFromRight__():
self.log.debug('came from right...')
directions = [self.__try_bottom__, self.__try_left__, self.__try_top__]
elif self.path[-1] == self.maze.get_entrance(): # We're at the entrance
self.log.debug('we are at the entrance...')
directions = [self.__try_bottom__, self.__try_left__, self.__try_right__, self.__try_top__]
else:
self.log.error('Came from nowhere')
raise Exception('Came from nowhere')
while current == self.path[-1] and current != self.maze.get_exit():
self.__choose_direction__(directions)
def __handle_path__(self):
self.__findNext__()
def __handle_dead_end__(self):
current = self.path[-1]
[pathway] = list(filter(lambda wall: wall.is_removed() and self.maze.get_neighbour(current, wall) is not None,
current.get_walls()))
self.log.debug(self.maze.get_neighbour(current, pathway))
if pathway == current.get_top():
self.__try_top__()
elif pathway == current.get_bottom():
self.__try_bottom__()
elif pathway == current.get_left():
self.__try_left__()
elif pathway == current.get_right():
self.__try_right__()
else:
self.log.error('Came from nowhere')
raise Exception('Came from nowhere')
def __try_bottom__(self):
pass # to be implemented in subclass
def __try_left__(self):
pass # to be implemented in subclass
def __try_right__(self):
pass # to be implemented in subclass
def __try_top__(self):
pass # to be implemented in subclass
|
# -*- coding: utf-8 -*-
__all__ = ['public_method']
def public_method():
print 'public_method'
def private_method():
print 'private_method'
|
import exceptions
from tstorm.utils import release
from tstorm.utils import limit
class RangeError(exceptions.Exception):
pass
class Range:
def __init__(self, range_value):
self.sup = limit.Limit(range_value[len(range_value)-1])
if not self.sup.is_sup():
raise RangeError('Superior Limit is not well specified - %s' % self.sup)
self.inf = limit.Limit(range_value[0])
if not self.inf.is_inf():
raise RangeError('Inferior Limit is not well specified - %s' % self.inf)
if ',' not in range_value:
raise RangeError('Range is not well specified - %s' % range_value)
extreme = range_value[1:len(range_value)-1].strip().split(',')
if len(extreme) == 2:
self.min_release = release.Release(extreme[0])
self.max_release = release.Release(extreme[1])
#else:
# raise RangeError('Range is not well specified - %s' % range)
if not self.min_release.is_infinity():
if not self.max_release.is_infinity():
if not (self.min_release.is_lower(self.max_release) and \
self.max_release.is_greater(self.min_release)):
msg = 'The couple Min,Max is not well specified in'
msg += 'the range - ' + range_value
raise RangeError(msg)
def is_included(self, value):
if self.inf.is_extreme_included():
if self.sup.is_extreme_included():
if self.min_release.is_infinity() and self.max_release.is_infinity():
return True
else:
if self.min_release.is_lower_and_equal(value) and \
self.max_release.is_greater_and_equal(value):
return True
else:
return False
else:
if self.min_release.is_infinity() and self.max_release.is_infinity():
return True
else:
if self.min_release.is_lower_and_equal(value) and \
self.max_release.is_greater(value):
return True
else:
return False
else:
if self.sup.is_extreme_included():
if self.min_release.is_infinity() and self.max_release.is_infinity():
return True
else:
if self.min_release.is_lower(value) and \
self.max_release.is_greater_and_equal(value):
return True
else:
return False
else:
if self.min_release.is_infinity() and self.max_release.is_infinity():
return True
else:
if self.min_release.is_lower(value) and \
self.max_release.is_greater(value):
return True
else:
return False
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2017-11-28 17:14:46
# @Author : swz
# @Email : js_swz2008@163.com
from bs4 import BeautifulSoup
import os
from download import http
import pymongo
import datetime
import re
import shutil
class mzitu():
def __init__(self):
self.client = pymongo.MongoClient('localhost',27017)
self.mzitu = self.client['mzitu']
self.meizitu_collection =self.mzitu['mzituinfo']
self.title = '' ##用来保存页面主题
self.url = '' ##用来保存页面地址
self.img_urls = [] ##初始化一个列表用来保存图片地址
def all_url(self, url):
html = http.get(url, 5) ##
all_a = BeautifulSoup(html.text, 'lxml').find('div', class_='all').find_all('a')
if url == 'http://www.mzitu.com/all':
del all_a[0] ##删除第一个a标签
self.mkdir('无主题')
for a in all_a:
title = a.get_text()
self.title = title ##将主题保存到self.title中
print(u'开始保存:', title)
if not self.mkdir(title): ##跳过已存在的文件夹
print(u'已经跳过:', title)
continue
href = a['href']
self.url = href ##将页面地址保存到self.url中
if self.meizitu_collection.find_one({'主题页面': href}): ##判断这个主题是否已经在数据库中、不在就运行else下的内容,在则忽略。
print(u'这个页面已经爬取过了')
else:
self.html(href)
print(u'恭喜您下载完成啦!')
##把目录外的图片移动到“无主题”
self.mvfile("E:/","E:/mzitu/无主题")
def html(self, href):
html = http.get(href, 5)##
max_span = BeautifulSoup(html.text, 'lxml').find('div', class_ ='pagenavi').find_all('span')[-2].get_text()##获取最大页数
page_num = 0 ##这个当作计数器用 (用来判断图片是否下载完毕)
for page in range(1, int(max_span) + 1):
page_num = page_num + 1 ##每for循环一次就+1(当page_num等于max_span的时候,就证明我们的在下载最后一张图片了)
page_url = href + '/' + str(page)
self.img(page_url, max_span, page_num) ##把上面需要的两个变量,传递给下一个函数
def img(self, page_url, max_span, page_num): ##添加上面传递的参数
img_html = http.get(page_url, 5) ##
img_url = BeautifulSoup(img_html.text, 'lxml').find('div', class_='main-image').find('img')['src']
self.img_urls.append(img_url) ##每一次 for page in range(1, int(max_span) + 1)获取到的图片地址都会添加到 img_urls这个初始化的列表
if int(max_span) == page_num: ##传递下来的两个参数 当max_span和Page_num相等时,就是最后一张图片了,最后一次下载图片并保存到数据库中。
self.save(img_url,page_url)
post = { ##构造一个字典
'标题': self.title,
'主题页面': self.url,
'图片地址': self.img_urls,
'获取时间': datetime.datetime.now()
}
self.meizitu_collection.save(post) ##将post中的内容写入数据库。
print(u'#############################插入数据库成功############################')
else: ##max_span 不等于 page_num执行这下面
self.save(img_url,page_url)
def save(self, img_url, page_url):
"""
:param img_url: 图片的url
:param page_url: 页面地址的url
:return: true/false
"""
name = img_url[-9:-4]
try:
print(u'保存文件:', img_url)
img = http.requestpic(img_url,page_url)
f = open(name + '.jpg', 'ab')
f.write(img.content)
f.close()
except FileNotFoundError: ##捕获异常,继续往下走
print(u'图片不存在已跳过:', img_url)
return False
def mkdir(self, path): ##这个函数创建文件夹
"""
:param path: 创建目录的名字
:return: true/false
"""
path = self.strip(path) ##创建目录过滤非法字符
isExists = os.path.exists(os.path.join("E:/mzitu", path))
if not isExists:
print(u'建了一个:', path, u'的文件夹!')
os.makedirs(os.path.join("E:/mzitu", path ))
os.chdir(os.path.join("E:/mzitu", path)) ##切换到目录
return True
else:
print(u'名字叫做:', path, u'的文件夹已经存在了!')
return False
def strip(self, path):
"""
:param path: 需要清洗的文件夹名字
:return: 清洗掉Windows系统非法文件夹名字的字符串
"""
path = re.sub(r'[\\*|<>/:?"]', '', str(path))
return path
def mvfile(self, path, newpath):##移动文件到指定目录
"""
:param path: 移动文件所在路径
:param newpath: 移动文件目标路径
:return: None
"""
for item in os.listdir(path):#遍历该路径中文件和文件夹
name = os.path.basename(item) # 获取文件名
full_path = os.path.join(path, name) # 将文件名与文件目录连接起来,形成完整路径
if name.endswith('jpg'):##判断是否是图片类型
shutil.move(full_path, newpath)#移动文件到目标路径(移动+重命名)
print(u'###########',name,u'没有主题,已移动到无主题!###########')
def rmdir(self,path):##清理空文件夹和空文件
"""
:param path: 文件路径,检查此文件路径下的子文件
:return: None
"""
for item in os.listdir(path):#遍历该路径中文件和文件夹
full_path = os.path.join(path, item) # 将目录与文件目录连接起来,形成完整路径
if not os.listdir(full_path): # 如果子文件为空
shutil.rmtree(full_path,True)#递归删除
print (u'#########################',full_path,u'目录为空,已清理成功!########################')
Mzitu = mzitu() ##实例化
Mzitu.all_url('http://www.mzitu.com/all') ##给函数all_url传入参数 可以当作启动爬虫(就是入口)
# Mzitu.all_url('http://www.mzitu.com/old') ##爬取早期图片
Mzitu.rmdir('E:/mzitu')##删除空目录和空文件
|
#!/usr/bin/python
#Vivek Sinha
print "Content-Type: text/html"
print
import paramiko
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy( paramiko.AutoAddPolicy() )
ssh.connect('glgw4006.grid.uh1.inmobi.com', username='vivek.sinha', password='Password@123')
stdin, stdout, stderr = ssh.exec_command('/opt/gridscripts/gridopstools/bin/lda-monitor.sh -q -d -s killed,failed,timedout,suspended -f /usr/local/grid_lda_deploy/monitor/lda.monitor.properties -b 25 -t 15 -c gold -p lda -j summary -k dpbase -d;/opt/gridscripts/gridopstools/bin/lda-monitor.sh -q -d -s killed,failed,timedout,suspended -f /usr/local/grid_lda_deploy/monitor/lda.monitor.properties -b 25 -t 15 -c gold -p lda -j summary -k dpbase -r')
print "GOLD-BOX"
for l in stdout :
print l.strip()
for l in stderr:
print l.strip()
ssh.close();
print ('<br />')
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy( paramiko.AutoAddPolicy() )
ssh.connect('krgw4001.grid.uh1.inmobi.com', username='vivek.sinha', password='Password@123')
stdin, stdout, stderr = ssh.exec_command('/opt/gridscripts/gridopstools/bin/lda-monitor.sh -q -d -s killed,failed,timedout,suspended -f /usr/local/grid_lda_deploy/monitor/lda.monitor.properties -b 25 -t 15 -c uh1 -p lda -j summary -k dpbase -d;/opt/gridscripts/gridopstools/bin/lda-monitor.sh -q -d -s killed,failed,timedout,suspended -f /usr/local/grid_lda_deploy/monitor/lda.monitor.properties -b 25 -t 15 -c uh1 -p lda -j summary -k dpbase -r')
print "Krypton"
for l in stdout :
print l.strip()
for l in stderr:
print l.strip()
ssh.close();
print ('<br />')
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy( paramiko.AutoAddPolicy() )
ssh.connect('tzgw4001.grid.uj1.inmobi.com', username='vivek.sinha', password='Password@123')
stdin, stdout, stderr = ssh.exec_command('/opt/gridscripts/gridopstools/bin/lda-monitor.sh -q -d -s killed,failed,timedout,suspended -f /usr/local/grid_lda_deploy/monitor/lda.monitor.properties -b 25 -t 15 -c uj1 -p lda -j summary -k dpbase -d;/opt/gridscripts/gridopstools/bin/lda-monitor.sh -q -d -s killed,failed,timedout,suspended -f /usr/local/grid_lda_deploy/monitor/lda.monitor.properties -b 25 -t 15 -c uj1 -p lda -j summary -k dpbase -r')
print "UJ1"
for l in stdout :
print l.strip()
for l in stderr:
print l.strip()
ssh.close();
print ('<br />')
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy( paramiko.AutoAddPolicy() )
ssh.connect('ergw4001.grid.lhr1.inmobi.com', username='vivek.sinha', password='Password@123')
stdin, stdout, stderr = ssh.exec_command('/opt/gridscripts/gridopstools/bin/lda-monitor.sh -q -d -s killed,failed,timedout,suspended -f /usr/local/grid_lda_deploy/monitor/lda.monitor.properties -b 25 -t 15 -c lhr1 -p lda -j summary -k dpbase -d;/opt/gridscripts/gridopstools/bin/lda-monitor.sh -q -d -s killed,failed,timedout,suspended -f /usr/local/grid_lda_deploy/monitor/lda.monitor.properties -b 25 -t 15 -c lhr1 -p lda -j summary -k dpbase -r')
print "LHR1"
for l in stdout :
print l.strip()
for l in stderr:
print l.strip()
ssh.close();
print ('<br />')
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy( paramiko.AutoAddPolicy() )
ssh.connect('opgw4001.grid.hkg1.inmobi.com', username='vivek.sinha', password='Password@123')
stdin, stdout, stderr = ssh.exec_command('/opt/gridscripts/gridopstools/bin/lda-monitor.sh -q -d -s killed,failed,timedout,suspended -f /usr/local/grid_lda_deploy/monitor/lda.monitor.properties -b 25 -t 15 -c hkg1 -p lda -j summary -k dpbase -d;/opt/gridscripts/gridopstools/bin/lda-monitor.sh -q -d -s killed,failed,timedout,suspended -f /usr/local/grid_lda_deploy/monitor/lda.monitor.properties -b 25 -t 15 -c gold -p hkg1 -j summary -k dpbase -r')
print "HKG1"
for l in stdout :
print l.strip()
for l in stderr:
print l.strip()
ssh.close()
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Confirms presence of files generated by our targets we depend on.
If they exist, create a new file.
Note target's input files are explicitly NOT defined in the gyp file
so they can't easily be passed to this script as args.
"""
import os
import sys
outfile = sys.argv[1] # Example value we expect: deps_all_done_first_123.txt
if (os.path.exists("dep_1.txt") and
os.path.exists("dep_2.txt") and
os.path.exists("dep_3.txt")):
open(outfile, "w")
|
a=float(input("Length of one side"))
b=float(input("Length of second side"))
c=(a**2+b**2)**0.5
print("The length of Hypotenuse is",c)
|
import pygame
import pprint
pygame.init()
pygame.mouse.set_visible(0)
fancyFont = 'fonts/Aladin-Regular.ttf'
plainFont = 'fonts/Arial.ttf'
white = (255, 255, 255)
purple = (57,2,68)
pale = (209,203,211)
black = (51,51,51)
rain = (28,116,153)
screen = pygame.display.set_mode((800, 480))
timeFont = pygame.font.Font(fancyFont, 120)
dateFont = pygame.font.Font(fancyFont, 48)
summaryFont = pygame.font.Font(fancyFont, 37)
background = pygame.image.load('images/background.png')
class Display:
ticktock = True
actions = None
def __init__(self):
screen.blit(background,(0,0))
self.update();
def tick(self):
self.erase(201,35,27,86)
self.ticktock = not self.ticktock
if(self.ticktock):
return
theSecond = timeFont.render(':', True, purple)
secondRect = theSecond.get_rect()
secondRect.center = (210,79)
screen.blit(theSecond, secondRect)
def updateMinute(self,value):
self.erase(225,25,125,119)
theMinute = timeFont.render(value, True, purple)
minuteRect = theMinute.get_rect()
minuteRect.left = 225
minuteRect.top = 16
screen.blit(theMinute, minuteRect)
def updateHour(self,value):
self.erase(70,25,139,119)
theHour = timeFont.render(value, True, purple)
hourRect = theHour.get_rect()
hourRect.right = 198
hourRect.top = 16
screen.blit(theHour, hourRect)
def updateWeatherSummary(self,summary):
self.erase(25,143,375,115)
minTemp = summaryFont.render('min '+summary[1]+'°', True, pale)
minRect = minTemp.get_rect()
minRect.right = 198
minRect.top = 174
screen.blit(minTemp, minRect)
maxTemp = summaryFont.render('max '+summary[0]+'°', True, pale)
maxRect = maxTemp.get_rect()
maxRect.left = 214
maxRect.top = 174
screen.blit(maxTemp, maxRect)
def updateWeatherForecast(self,rawData,now):
self.actions = {
"uv":False,
"wet":0,
"cold":False,
}
self.erase(25,258,750,197)
startIndex = self.findStartOfWeatherRange(rawData,now)
finishIndex = startIndex + 7
instance = 0
for i in range(startIndex,finishIndex):
fc = Forecast(rawData[i])
offset = 25 + (instance * 107)
screen.blit(fc.render(), (offset,258))
if(self.isRelevantToActions(rawData[i]["timestamp"][0].hour,now.hour)):
self.updateActionValues(rawData[i])
instance +=1
def isRelevantToActions(self,hour,now):
if(hour > now and hour >= 9 and hour <= 18):
return True
return False
def updateActionValues(self,data):
# UV index above 5 is a flag
if(data["U"][0] > 5):
self.actions["uv"] = True
# 30% chance of rain counts as one warning score
if(data["Pp"][0] >= 30):
self.actions["wet"] +=1
# Anything 5 degrees or below is cold
if(data["T"][0] <= 5):
self.actions["cold"] = True
def displayActions(self):
self.erase(405,143,370,116)
# Any UV warning in the day gives an icon
if(self.actions["uv"]):
uvIcon = pygame.image.load('images/suncream.png')
screen.blit(uvIcon,(405,143))
# Rain score of 2 or more gives an icon
if(self.actions["wet"] > 1):
uvIcon = pygame.image.load('images/umbrella.png')
screen.blit(uvIcon,(528,143))
# Cold warning gives icon
if(self.actions["cold"]):
uvIcon = pygame.image.load('images/hat.png')
screen.blit(uvIcon,(651,143))
def findStartOfWeatherRange(self,rawData,now):
startHour = now.strftime("%Y-%m-%d ") + str(now.hour - now.hour%3)
for i in range(len(rawData)):
forecastHour = rawData[i]["timestamp"][0].strftime("%Y-%m-%d %-H")
if(forecastHour == startHour):
return i
return 0
def erase(self,x,y,w,h):
screen.blit(background, (x, y), pygame.Rect(x, y, w, h))
pygame.display.update()
def updateDate(self,dayName,fullDate):
self.erase(405,25,370,119)
theDay = dateFont.render(dayName, True, purple)
dayRect = theDay.get_rect()
dayRect.center = (587,57)
screen.blit(theDay, dayRect)
theDate = dateFont.render(fullDate, True, purple)
dateRect = theDate.get_rect()
dateRect.center = (587,109)
screen.blit(theDate, dateRect)
def update(self):
pygame.display.flip()
class Forecast:
data = None
tile = None
font = None
temperatureColours = {
"-10":(127,141,184),
"-9":(139,152,191),
"-8":(152,164,198),
"-7":(162,172,203),
"-6":(175,184,211),
"-5":(187,194,217),
"-4":(198,204,223),
"-3":(211,215,230),
"-2":(222,226,237),
"-1":(234,237,243),
"0":(245,246,250),
"1":(255,249,232),
"2":(255,245,217),
"3":(255,241,202),
"4":(255,237,186),
"5":(255,234,172),
"6":(255,229,155),
"7":(255,226,141),
"8":(255,222,125),
"9":(255,218,109),
"10":(255,215,101),
"11":(255,210,98),
"12":(255,204,95),
"13":(255,199,92),
"14":(255,194,89),
"15":(255,189,86),
"16":(255,184,83),
"17":(255,179,80),
"18":(255,173,77),
"19":(255,168,74),
"20":(255,164,71),
"21":(255,158,68),
"22":(255,153,66),
"23":(255,148,62),
"24":(255,143,59),
"25":(255,138,57),
"26":(255,131,53),
"27":(254,125,51),
"28":(250,116,51),
"29":(248,109,51),
"30":(243,98,51),
"31":(239,87,51),
"32":(235,78,51),
"33":(231,68,51),
"34":(227,58,51),
"35":(222,46,51),
"36":(218,36,51),
"37":(214,25,51),
"38":(209,13,51),
"39":(205,3,51),
"40":(195,0,49),
}
def __init__(self,data):
self.data = data
self.setTile()
self.font = pygame.font.Font(plainFont, 20)
#
def setTile(self):
self.tile = pygame.Surface((105, 196), pygame.SRCALPHA, 32)
self.tile = self.tile.convert_alpha()
def setIcon(self):
icon = pygame.image.load('images/weather_icons/'+str(self.data["W"][0])+'.png')
self.tile.blit(icon,(15,40))
def setTime(self):
theTime = self.font.render(self.data["timestamp"][0].strftime("%H:00"), True, black)
timeRect = theTime.get_rect()
timeRect.center = (51,170)
self.tile.blit(theTime, timeRect)
def setRainLikelihood(self):
chance = self.data["Pp"][0]
if(chance <= 30):
colour = black
else:
colour = rain
theRain = self.font.render(str(chance)+'%', True, colour)
rainRect = theRain.get_rect()
rainRect .center = (51,24)
self.tile.blit(theRain, rainRect )
def setTemperature(self):
t = self.data["T"][0]
panel = pygame.Surface((51, 38))
panel.fill(self.getTempColour(t))
theTemp = self.font.render(str(t)+'°', True, black)
fontRect = theTemp.get_rect()
fontRect.center = (27,19)
panel.blit(theTemp, fontRect )
tempRect = panel.get_rect()
tempRect.center = (52,130)
self.tile.blit(panel, tempRect )
def getTempColour(self,t):
if(t < -10):
return self.temperatureColours["-10"]
if(t > 40):
return self.temperatureColours["40"]
return self.temperatureColours[str(t)]
def render(self):
self.setIcon()
self.setTime()
self.setRainLikelihood()
self.setTemperature()
return self.tile
|
#Main Restaurant page
@app.route('/')
@app.route('/restaurants/')
#Create New Restaurant
@app.route('/restaurant/new')
#Edit Restaurant
@app.route('/restaurant/<int:restaurant_id>/edit/')
#Delete Restaurant
@app.route('/restaurant/<int:restaurant_id>/delete/')
#Show Restaurant Menu
@app.route('/restaurant/<int:restaurant_id>/menu/')
#Create new menu item
@app.route('/restaurant/<int:restaurant_id>/menu/new/')
#Edit Menu Item
@app.route('/restaurant/<int:restaurant_id>/menu/<int:menu_id>/edit/')
#Delete Menu Item
@app.route('/restaurant/<int:restaurant_id>/menu/<int:menu_id>/delete/')
|
# coding=utf-8
def readAsDict(path):
'''读取为str类型的字典,value可为空,不为None'''
with open(path, 'rb') as f:
kv = {}
for line in f:
line = line.replace('\r', '').replace('\n', '').strip()
if line.startswith('#') or line == '':
continue
tokens = line.split('=', 1)
kv[tokens[0].strip()] = tokens[1].strip() if len(tokens) == 2 else ''
return kv
def readAsList(path):
'''根据顺序的需求,读取为str类型的双元组列表,第二个值可为空,不为None'''
with open(path, 'rb') as f:
l = []
for line in f:
line = line.replace('\r', '').replace('\n', '').strip()
if line.startswith('#') or line == '':
continue
tokens = line.split('=', 1)
l.append((tokens[0].strip(), tokens[1].strip() if len(tokens) == 2 else ''))
return l
|
def factorial(n):
i = 1
p = 1
while i <= n:
p *= i
i += 1
return p
print(factorial(10))
print(factorial(11))
print(factorial(12))
# 檔名: exercise0805.py
# 作者: Kaiching Chang
# 時間: July, 2014
|
#-coding:utf-8
print list(range(1, 11))
L = []
for x in xrange(1,11):
L.append(x*x)
print L
#列表生成式
print [x*x for x in xrange(1, 11)]
print [x*x for x in xrange(1, 11) if x%2 == 0]
#两层循环
print [x+y for x in ['a','b','c'] for y in '123']
#列出当前目录所有文件及文件名
import os
print [d for d in os.listdir('.')]
L = ['HELLO', 'WORLD',18, 'apple' , None]
print [s.lower() for s in L if isinstance(s, str)]
|
"""
Model objects for the Nova mimic.
"""
from __future__ import absolute_import, division, unicode_literals
import re
import uuid
import attr
from random import randrange
from json import loads, dumps
from six.moves.urllib.parse import urlencode
from six import string_types
from six import text_type
from mimic.util.helper import (
seconds_to_timestamp,
random_string,
timestamp_to_seconds
)
from mimic.model.behaviors import (
BehaviorRegistryCollection, EventDescription, Criterion, regexp_predicate
)
from mimic.util.helper import json_from_request
from twisted.web.http import ACCEPTED, BAD_REQUEST, FORBIDDEN, NOT_FOUND, CONFLICT
from mimic.model.rackspace_images import RackspaceSavedImage
@attr.s
class LimitError(Exception):
"""
Error to be raised when a limit has been exceeded.
"""
nova_message = attr.ib()
@attr.s
class BadRequestError(Exception):
"""
Error to be raised when bad input has been received to Nova.
"""
nova_message = attr.ib()
def _nova_error_message(msg_type, message, status_code, request):
"""
Set the response code on the request, and return a JSON blob representing
a Nova error body, in the format Nova returns error messages.
:param str msg_type: What type of error this is - something like
"badRequest" or "itemNotFound" for Nova.
:param str message: The message to include in the body.
:param int status_code: The status code to set
:param request: the request to set the status code on
:return: dictionary representing the error body
"""
request.setResponseCode(status_code)
return {
msg_type: {
"message": message,
"code": status_code
}
}
def bad_request(message, request):
"""
Return a 400 error body associated with a Nova bad request error.
Also sets the response code on the request.
:param str message: The message to include in the bad request body.
:param request: The request on which to set the response code.
:return: dictionary representing the error body.
"""
return _nova_error_message("badRequest", message, BAD_REQUEST, request)
def not_found(message, request):
"""
Return a 404 error body associated with a Nova not found error.
Also sets the response code on the request.
:param str message: The message to include in the bad request body.
:param request: The request on which to set the response code.
:return: dictionary representing the error body.
"""
return _nova_error_message("itemNotFound", message, NOT_FOUND, request)
def forbidden(message, request):
"""
Return a 403 error body associated with a Nova forbidden error.
Also sets the response code on the request.
:param str message: The message to include in the bad request body.
:param request: The request on which to set the response code.
:return: dictionary representing the error body.
"""
return _nova_error_message("forbidden", message, FORBIDDEN, request)
def conflicting(message, request):
"""
Return a 409 error body associated with a Nova conflicting request error.
:param str message: The message to include in the bad request body.
:param request: The request on which to set the response code.
:return: dictionary representing the error body.
"""
return _nova_error_message("conflictingRequest", message, CONFLICT, request)
@attr.s
class Server(object):
"""
A :obj:`Server` is a representation of all the state associated with a nova
server. It can produce JSON-serializable objects for various pieces of
state that are required for API responses.
"""
admin_password = attr.ib()
collection = attr.ib()
creation_request_json = attr.ib()
creation_time = attr.ib()
disk_config = attr.ib()
flavor_ref = attr.ib()
image_ref = attr.ib()
key_name = attr.ib()
metadata = attr.ib()
private_ips = attr.ib()
public_ips = attr.ib()
server_id = attr.ib()
server_name = attr.ib()
status = attr.ib()
update_time = attr.ib()
max_metadata_items = attr.ib(validator=attr.validators.instance_of(int), default=40)
static_defaults = {
"OS-EXT-STS:power_state": 1,
"OS-EXT-STS:task_state": None,
"accessIPv4": "198.101.241.238", # TODO: same as public IP
"accessIPv6": "2001:4800:780e:0510:d87b:9cbc:ff04:513a",
"hostId": "33ccb6c82f3625748b6f2338f54d8e9df07cc583251e001355569056",
"progress": 100,
"user_id": "170454"
}
def addresses_json(self):
"""
Create a JSON-serializable data structure describing the public and
private IPs associated with this server.
"""
return {
"private": [addr.json() for addr in self.private_ips],
"public": [addr.json() for addr in self.public_ips]
}
def links_json(self, absolutize_url):
"""
Create a JSON-serializable data structure describing the links to this
server.
:param callable absolutize_url: see :obj:`default_create_behavior`.
"""
tenant_id = self.collection.tenant_id
server_id = self.server_id
return [
{
"href": absolutize_url("v2/{0}/servers/{1}"
.format(tenant_id, server_id)),
"rel": "self"
},
{
"href": absolutize_url("{0}/servers/{1}"
.format(tenant_id, server_id)),
"rel": "bookmark"
}
]
def brief_json(self, absolutize_url):
"""
Brief JSON-serializable version of this server, for the non-details
list servers request.
"""
return {
'name': self.server_name,
'links': self.links_json(absolutize_url),
'id': self.server_id
}
def detail_json(self, absolutize_url):
"""
Long-form JSON-serializable object representation of this server, as
returned by either a GET on this individual server or a member in the
list returned by the list-details request.
"""
template = self.static_defaults.copy()
tenant_id = self.collection.tenant_id
template.update({
"id": self.server_id,
"OS-DCF:diskConfig": self.disk_config,
"OS-EXT-STS:vm_state": self.status,
"addresses": self.addresses_json(),
"created": seconds_to_timestamp(self.creation_time),
"updated": seconds_to_timestamp(self.update_time),
"flavor": {
"id": self.flavor_ref,
"links": [{
"href": absolutize_url(
"{0}/flavors/{1}".format(tenant_id, self.flavor_ref)),
"rel": "bookmark"}],
},
"image": {
"id": self.image_ref,
"links": [{
"href": absolutize_url("{0}/images/{1}".format(
tenant_id, self.flavor_ref)),
"rel": "bookmark"
}]
}
if self.image_ref is not None else '',
"links": self.links_json(absolutize_url),
"key_name": self.key_name,
"metadata": self.metadata,
"name": self.server_name,
"tenant_id": tenant_id,
"status": self.status
})
return template
def creation_response_json(self, absolutize_url):
"""
A JSON-serializable object returned for the initial creation of this
server.
"""
return {
'server': {
"OS-DCF:diskConfig": self.disk_config,
"id": self.server_id,
"links": self.links_json(absolutize_url),
"adminPass": self.admin_password,
}
}
def set_metadata(self, metadata):
"""
Replace all metadata with given metadata
"""
self.metadata = metadata
self.update_time = self.collection.clock.seconds()
def set_metadata_item(self, key, value):
"""
Set a metadata item on the server.
All the response messages have been verified as of 2015-04-23 against
Rackspace Nova.
"""
if key not in self.metadata:
if len(self.metadata) == self.max_metadata_items:
raise LimitError(nova_message=(
"Maximum number of metadata items exceeds {0}"
.format(self.max_metadata_items)))
if not isinstance(value, string_types):
raise BadRequestError(nova_message=(
"Invalid metadata: The input is not a string or unicode"))
self.metadata[key] = value
self.update_time = self.collection.clock.seconds()
def update_status(self, status):
"""
Update status on the server. This will also update the `update_time`
of the server
"""
self.status = status
self.update_time = self.collection.clock.seconds()
@classmethod
def validate_metadata(cls, metadata, max_metadata_items=40):
"""
Validate the given metadata - this is the complete metadata dict.
All the response messages have been verified as of 2015-04-23 against
Rackspace Nova.
"""
if not isinstance(metadata, dict):
raise BadRequestError(nova_message="Malformed request body")
if len(metadata) > max_metadata_items:
raise LimitError(nova_message=(
"Maximum number of metadata items exceeds {0}"
.format(max_metadata_items)))
if not all(isinstance(v, string_types) for v in metadata.values()):
raise BadRequestError(nova_message=(
"Invalid metadata: The input is not a string or unicode"))
@classmethod
def from_creation_request_json(cls, collection, creation_json,
ipsegment=lambda: randrange(255),
max_metadata_items=40):
"""
Create a :obj:`Server` from a JSON-serializable object that would be in
the body of a create server request.
"""
now = collection.clock.seconds()
server_json = creation_json['server']
disk_config = server_json.get('OS-DCF:diskConfig', None) or "AUTO"
if disk_config not in ["AUTO", "MANUAL"]:
raise BadRequestError(nova_message=(
"OS-DCF:diskConfig must be either 'MANUAL' or 'AUTO'."))
metadata = server_json.get("metadata") or {}
cls.validate_metadata(metadata, max_metadata_items)
while True:
private_ip = IPv4Address(
address="10.180.{0}.{1}".format(ipsegment(), ipsegment()))
if private_ip not in [addr for server in collection.servers
for addr in server.private_ips]:
break
self = cls(
collection=collection,
server_name=server_json['name'],
server_id=('test-server{0}-id-{0}'
.format(str(randrange(9999999999)))),
metadata=metadata,
creation_time=now,
update_time=now,
private_ips=[private_ip],
public_ips=[
IPv4Address(address="198.101.241.{0}".format(ipsegment())),
IPv6Address(address="2001:4800:780e:0510:d87b:9cbc:ff04:513a")
],
key_name=None if 'key_name' not in server_json else server_json['key_name'],
creation_request_json=creation_json,
flavor_ref=server_json['flavorRef'],
image_ref=server_json['imageRef'] or '',
disk_config=disk_config,
status="ACTIVE",
admin_password=random_string(12),
max_metadata_items=max_metadata_items
)
collection.servers.append(self)
return self
@attr.s
class IPv4Address(object):
"""
An IPv4 address for a server.
"""
address = attr.ib()
def json(self):
"""
A JSON-serializable representation of this address.
"""
return {"addr": self.address, "version": 4}
@attr.s
class IPv6Address(object):
"""
An IPv6 address for a server.
"""
address = attr.ib()
def json(self):
"""
A JSON-serializable representation of this address.
"""
return {"addr": self.address, "version": 6}
server_creation = EventDescription()
@server_creation.declare_criterion("server_name")
def server_name_criterion(value):
"""
Return a Criterion which matches the given regular expression string
against the ``"server_name"`` attribute.
"""
return Criterion(name='server_name', predicate=regexp_predicate(value))
@server_creation.declare_criterion("metadata")
def metadata_criterion(value):
"""
Return a Criterion which matches against metadata.
:param value: a dictionary, mapping a regular expression of a metadata key
to a regular expression describing a metadata value.
:type value: dict mapping unicode to unicode
"""
def predicate(attribute):
for k, v in value.items():
if not re.compile(v).match(attribute.get(k, "")):
return False
return True
return Criterion(name='metadata', predicate=predicate)
@server_creation.declare_default_behavior
def default_create_behavior(collection, http, json, absolutize_url,
ipsegment=lambda: randrange(255), hook=None):
"""
Default behavior in response to a server creation.
:param absolutize_url: A 1-argument function that takes a string and
returns a string, where the input is the list of segments identifying a
particular object within the compute service's URL hierarchy within a
region, and the output is an absolute URL that identifies that same
object. Note that the region's URL hierarchy begins before the version
identifier, because bookmark links omit the version identifier and go
straight to the tenant ID. Be sure to include the 'v2' first if you
are generating a versioned URL; the tenant ID itself should always be
passed in as part of the input, either the second or first segment,
depending on whether the version is included or not respectively.
Note that this is passed in on every request so that servers do not
retain a memory of their full URLs internally, and therefore you may
access Mimic under different hostnames and it will give you URLs
appropriate to how you accessed it every time. This is intentionally
to support the use-case of running tests against your local dev machine
as 'localhost' and then showing someone else the state that things are
in when they will have to access your machine under a different
hostname and therefore a different URI.
:param ipsegment: A hook provided for IP generation so the IP addresses in
tests are deterministic; normally a random number between 0 and 255.
:param callable hook: a 1-argument callable which, if specified, will be
invoked with the :obj:`Server` object after creating it, but before
generating the response. This allows for invoking the default behavior
with a small tweak to alter the server's state in some way.
"""
new_server = Server.from_creation_request_json(collection, json, ipsegment)
if hook is not None:
hook(new_server)
response = new_server.creation_response_json(absolutize_url)
http.setResponseCode(ACCEPTED)
return dumps(response)
def default_with_hook(function):
"""
A convenience decorator to make it easy to write a slightly-customized
version of :obj:`default_create_behavior`.
:param Server function: a 1-argument function taking a :obj:`Server` and
returning Nothing.
:return: a creation behavior, i.e. a function with the same signature as
:obj:`default_create_behavior`, which does the default behavior of
creating a server, adding it to the collection, and returning a
successful ``ACCEPTED`` response, but with the server's state
first modified by whatever the input ``function`` does.
"""
def hooked(collection, http, json, absolutize_url):
return default_create_behavior(collection, http, json, absolutize_url,
hook=function)
return hooked
def _get_failure_behavior(parameters, create=False):
"""
Helper function to produce a failure to create function. Either creating
the server or not.
Takes three parameters:
``"code"``, an integer describing the HTTP response code, and
``"message"``, a string describing a textual message.
``"type"``, a string representing what type of error message it is
If ``type`` is "string", the message is just returned as the string body.
Otherwise, the following JSON body will be synthesized (as per the
canonical Nova error format):
```
{
<type>: {
"message": <message>,
"code": <code>
}
}
The default type is computeFault, the default code is 500, and the default
message is "The server has either erred or is incapable of performing the
requested operation".
"""
status_code = parameters.get("code", 500)
failure_type = parameters.get("type", "computeFault")
failure_message = parameters.get(
"message",
("The server has either erred or is incapable of performing the "
"requested operation"))
if failure_type == "string":
fail_body = failure_message
else:
fail_body = dumps({
failure_type: {
"message": failure_message,
"code": status_code
}
})
def _fail(collection, http, json, absolutize_url):
if create:
Server.from_creation_request_json(
collection, json, lambda: randrange(255))
http.setResponseCode(status_code)
return fail_body
return _fail
@server_creation.declare_behavior_creator("fail")
def create_fail_behavior(parameters):
"""
Create a failing behavior for server creation.
Takes three parameters:
``"code"``, an integer describing the HTTP response code, and
``"message"``, a string describing a textual message.
``"type"``, a string representing what type of error message it is
If ``type`` is "string", the message is just returned as the string body.
Otherwise, the following JSON body will be synthesized (as per the
canonical Nova error format):
```
{
<type>: {
"message": <message>,
"code": <code>
}
}
The default type is computeFault, the default code is 500, and the default
message is "The server has either erred or is incapable of performing the
requested operation".
"""
return _get_failure_behavior(parameters)
@server_creation.declare_behavior_creator("false-negative")
def create_success_report_failure_behavior(parameters):
"""
Create a behavior that reports failure, but actually succeeds, for server
creation.
Takes three parameters:
``"code"``, an integer describing the HTTP response code, and
``"message"``, a string describing a textual message.
``"type"``, a string representing what type of error message it is
If ``type`` is "string", the message is just returned as the string body.
Otherwise, the following JSON body will be synthesized (as per the
canonical Nova error format):
```
{
<type>: {
"message": <message>,
"code": <code>
}
}
The default type is computeFault, the default code is 500, and the default
message is "The server has either erred or is incapable of performing the
requested operation".
"""
return _get_failure_behavior(parameters, create=True)
@server_creation.declare_behavior_creator("build")
def create_building_behavior(parameters):
"""
Create a "build" behavior for server creation.
Puts the server into the "BUILD" status immediately, transitioning it to
"ACTIVE" after a requested amount of time.
Takes one parameter:
``"duration"`` which is a Number, the duration of the build process in
seconds.
"""
duration = parameters["duration"]
@default_with_hook
def set_building(server):
server.update_status(u"BUILD")
server.collection.clock.callLater(
duration,
server.update_status,
u"ACTIVE")
return set_building
@server_creation.declare_behavior_creator("error")
def create_error_status_behavior(parameters=None):
"""
Create an "error" behavior for server creation.
The created server will go into the ``"ERROR"`` state immediately.
Takes no parameters.
"""
@default_with_hook
def set_error(server):
server.update_status(u"ERROR")
return set_error
@server_creation.declare_behavior_creator("active-then-error")
def active_then_error(parameters):
"""
Sometimes, a server goes into "active", but later (for unknown reasons)
goes into "error"; presumably due to a hardware failure or similar
operational issue.
Takes one parameter:
``"duration"`` which is a Number, the duration of the time spent in the
``ACTIVE`` state.
"""
duration = parameters["duration"]
@default_with_hook
def fail_later(server):
server.update_status(u"ACTIVE")
server.collection.clock.callLater(
duration,
server.update_status,
u"ERROR")
return fail_later
def metadata_to_creation_behavior(metadata):
"""
Examine the metadata given to a server creation request, and return a
behavior based on the values present there.
"""
if 'create_server_failure' in metadata:
return create_fail_behavior(loads(metadata['create_server_failure']))
if 'server_building' in metadata:
return create_building_behavior(
{"duration": float(metadata['server_building'])}
)
if 'server_error' in metadata:
return create_error_status_behavior()
return None
@attr.s
class RegionalServerCollection(object):
"""
A collection of servers, in a given region, for a given tenant.
"""
tenant_id = attr.ib()
region_name = attr.ib()
clock = attr.ib()
servers = attr.ib(default=attr.Factory(list))
behavior_registry_collection = attr.ib(default=attr.Factory(
lambda: BehaviorRegistryCollection()))
def server_by_id(self, server_id):
"""
Retrieve a :obj:`Server` object by its ID.
"""
for server in self.servers:
if server.server_id == server_id and server.status != u"DELETED":
return server
def request_creation(self, creation_http_request, creation_json,
absolutize_url):
"""
Request that a server be created.
"""
server = creation_json.get('server', {})
metadata = server.get('metadata', {})
behavior = metadata_to_creation_behavior(metadata)
if behavior is None:
registry = self.behavior_registry_collection.registry_by_event(
server_creation)
behavior = registry.behavior_for_attributes({
"tenant_id": self.tenant_id,
"server_name": creation_json["server"]["name"],
"metadata": creation_json["server"].get("metadata", {})
})
return behavior(self, creation_http_request, creation_json,
absolutize_url)
def request_read(self, http_get_request, server_id, absolutize_url):
"""
Request the information / details for an individual server.
Not found response verified against Rackspace Cloud Servers as of
2015-04-30.
"""
server = self.server_by_id(server_id)
if server is None:
return dumps(not_found("Instance could not be found",
http_get_request))
return dumps({"server": server.detail_json(absolutize_url)})
def request_ips(self, http_get_ips_request, server_id):
"""
Request the addresses JSON for a specific server.
Not found response verified against Rackspace Cloud Servers as of
2015-04-30.
"""
http_get_ips_request.setResponseCode(200)
server = self.server_by_id(server_id)
if server is None:
return dumps(not_found("Instance does not exist",
http_get_ips_request))
return dumps({"addresses": server.addresses_json()})
def request_list(self, http_get_request, include_details, absolutize_url,
name=u"", limit=None, marker=None, changes_since=None):
"""
Request the list JSON for all servers.
:param str changes_since: ISO8601 formatted datetime. Based on
http://docs.rackspace.com/servers/api/v2/cs-devguide/content/ChangesSince.html
Note: only supports filtering by name right now, but will need to
support more going forward.
Pagination behavior verified against Rackspace Nova as of 2015-04-29.
"""
to_be_listed = self.servers
if changes_since is not None:
since = timestamp_to_seconds(changes_since)
to_be_listed = [s for s in to_be_listed if s.update_time >= since]
# marker can be passed without limit, in which case the whole server
# list, after the server that matches the marker, is returned
if marker is not None:
last_seen = [i for i, server in enumerate(to_be_listed)
if server.server_id == marker]
if not last_seen:
# Error response and body verified against Rackspace Nova as
# of 2015-04-29
return dumps(bad_request(
"marker [{0}] not found".format(marker),
http_get_request))
else:
last_seen = last_seen[0]
to_be_listed = to_be_listed[last_seen + 1:]
# A valid marker is an ID in the entire server list. It does not
# have to be for a server that matches the given name.
to_be_listed = [server for server in to_be_listed
if name in server.server_name]
if changes_since is None:
to_be_listed = [s for s in to_be_listed if s.status != u"DELETED"]
if limit is not None:
try:
limit = int(limit)
except ValueError:
return dumps(bad_request("limit param must be an integer",
http_get_request))
if limit < 0:
return dumps(bad_request("limit param must be positive",
http_get_request))
to_be_listed = to_be_listed[:limit]
result = {
"servers": [
server.brief_json(absolutize_url) if not include_details
else server.detail_json(absolutize_url)
for server in to_be_listed
]
}
# A server links blob is included only if limit is passed. If
# only the marker was provided, no server links blob is included.
# Note that if limit=0, an empty server list is returned and no
# server link blob is returned.
if limit and len(to_be_listed) >= limit:
query_params = {'limit': limit}
query_params['marker'] = to_be_listed[-1].server_id
if name:
query_params['name'] = name
path = "v2/{0}/servers{1}?{2}".format(
self.tenant_id,
"/detail" if include_details else "",
urlencode(query_params))
result["servers_links"] = [{
"href": absolutize_url(path),
"rel": "next"
}]
return dumps(result)
def request_delete(self, http_delete_request, server_id):
"""
Delete a server with the given ID.
Not found response verified against Rackspace Cloud Servers as of
2015-04-30.
"""
server = self.server_by_id(server_id)
if server is None:
return dumps(not_found("Instance could not be found",
http_delete_request))
if 'delete_server_failure' in server.metadata:
srvfail = loads(server.metadata['delete_server_failure'])
if srvfail['times']:
srvfail['times'] -= 1
server.metadata['delete_server_failure'] = dumps(srvfail)
http_delete_request.setResponseCode(500)
return b''
http_delete_request.setResponseCode(204)
server.update_status(u"DELETED")
return b''
def request_action(self, http_action_request, server_id, absolutize_url,
regional_image_collection, image_store):
"""
Perform the requested action on the provided server
"""
server = self.server_by_id(server_id)
if server is None:
return dumps(not_found("Instance " + server_id + " could not be found",
http_action_request))
action_json = json_from_request(http_action_request)
if 'resize' in action_json:
flavor = action_json['resize'].get('flavorRef')
if not flavor:
return dumps(bad_request("Resize requests require 'flavorRef' attribute",
http_action_request))
server.status = 'VERIFY_RESIZE'
server.oldFlavor = server.flavor_ref
server.flavor_ref = flavor
http_action_request.setResponseCode(202)
return b''
elif 'confirmResize' in action_json or 'revertResize' in action_json:
if server.status == 'VERIFY_RESIZE' and 'confirmResize' in action_json:
server.status = 'ACTIVE'
http_action_request.setResponseCode(204)
return b''
elif server.status == 'VERIFY_RESIZE' and 'revertResize' in action_json:
server.status = 'ACTIVE'
server.flavor_ref = server.oldFlavor
http_action_request.setResponseCode(202)
return b''
else:
return dumps(conflicting(
"Cannot '" + list(action_json.keys())[0] + "' instance " +
server_id + " while it is in vm_state active",
http_action_request)
)
elif 'rescue' in action_json:
if server.status != 'ACTIVE':
return dumps(conflicting("Cannot 'rescue' instance " + server_id +
" while it is in task state other than active",
http_action_request))
else:
server.status = 'RESCUE'
http_action_request.setResponseCode(200)
password = random_string(12)
return dumps({"adminPass": password})
elif 'unrescue' in action_json:
if server.status == 'RESCUE':
server.status = 'ACTIVE'
http_action_request.setResponseCode(200)
return b''
else:
return dumps(conflicting("Cannot 'unrescue' instance " + server_id +
" while it is in task state other than rescue",
http_action_request))
elif 'reboot' in action_json:
reboot_type = action_json['reboot'].get('type')
if not reboot_type:
return dumps(bad_request("Missing argument 'type' for reboot",
http_action_request))
if reboot_type == 'HARD':
server.status = 'HARD_REBOOT'
http_action_request.setResponseCode(202)
server.collection.clock.callLater(
6.0,
server.update_status,
u"ACTIVE")
return b''
elif reboot_type == 'SOFT':
server.status = 'REBOOT'
http_action_request.setResponseCode(202)
server.collection.clock.callLater(
3.0,
server.update_status,
u"ACTIVE")
return b''
else:
return dumps(bad_request("Argument 'type' for reboot is not HARD or SOFT",
http_action_request))
elif 'changePassword' in action_json:
password = action_json['changePassword'].get('adminPass')
if not password:
return dumps(bad_request("No adminPass was specified",
http_action_request))
if server.status == 'ACTIVE':
http_action_request.setResponseCode(202)
return b''
else:
return dumps(conflicting("Cannot 'changePassword' instance " + server_id +
" while it is in task state other than active",
http_action_request))
elif 'rebuild' in action_json:
image_ref = action_json['rebuild'].get('imageRef')
if not image_ref:
return dumps(bad_request("Could not parse imageRef from request.", http_action_request))
if server.status == 'ACTIVE':
server.image_ref = image_ref
server.status = 'REBUILD'
http_action_request.setResponseCode(202)
server.collection.clock.callLater(
5.0,
server.update_status,
u"ACTIVE")
server_details = server.detail_json(absolutize_url)
server_details['adminPass'] = 'password'
return dumps({"server": server_details})
else:
return dumps(conflicting("Cannot 'rebuild' instance " + server_id +
" while it is in task state other than active",
http_action_request))
elif 'createImage' in action_json:
image_name = action_json['createImage'].get('name')
server == self.server_by_id(server_id)
links = server.links_json(absolutize_url)
server_id = server.server_id
image_ref = server.image_ref
image = image_store.get_image_by_id(image_ref)
image_json = regional_image_collection.get_image(http_action_request,
image_ref, absolutize_url)
image_dict = loads(image_json)
flavor_classes = image_dict['image']['metadata']['flavor_classes']
os_type = image_dict['image']['metadata']['os_type']
os_distro = image_dict['image']['metadata']['org.openstack__1__os_distro']
vm_mode = image_dict['image']['metadata']['vm_mode']
disk_config = image_dict['image']['metadata']['auto_disk_config']
image_id = text_type(uuid.uuid4())
image_size = image.image_size
minRam = image.minRam
minDisk = image.minDisk
saved_image = RackspaceSavedImage(image_id=image_id, tenant_id=self.tenant_id,
image_size=image_size, name=image_name, minRam=minRam,
minDisk=minDisk, links=links, server_id=server_id,
flavor_classes=flavor_classes, os_type=os_type,
os_distro=os_distro, vm_mode=vm_mode,
disk_config=disk_config)
image_store.add_image_to_store(saved_image)
http_action_request.setHeader(b"Location", b"www.someurl.com")
http_action_request.setResponseCode(202)
return b''
else:
return dumps(bad_request("There is no such action currently supported", http_action_request))
@attr.s
class GlobalServerCollections(object):
"""
A :obj:`GlobalServerCollections` is a set of all the
:obj:`RegionalServerCollection` objects owned by a given tenant. In other
words, all the objects that a single tenant owns globally in a Nova
service.
"""
tenant_id = attr.ib()
clock = attr.ib()
regional_collections = attr.ib(default=attr.Factory(dict))
def collection_for_region(self, region_name):
"""
Get a :obj:`RegionalServerCollection` for the region identified by the
given name.
"""
if region_name not in self.regional_collections:
self.regional_collections[region_name] = (
RegionalServerCollection(tenant_id=self.tenant_id,
region_name=region_name,
clock=self.clock)
)
return self.regional_collections[region_name]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
class Note(models.Model):
PRIVATE = 'PV'
PUBLIC = 'PL'
URL = 'UR'
PUBLISHMENT_STATUS_CHOICES = (
(PRIVATE, 'Private'), # 表示不可。本人が編集閲覧のみ
(PUBLIC, 'Public'), # 誰でも閲覧可能
(URL, 'URL'), # URLを知っていれば閲覧可能
)
user = models.ForeignKey(User, on_delete=models.CASCADE)
id = models.CharField(primary_key=True, unique=True, max_length=128)
title = models.CharField(blank=True, max_length=1024)
content = models.TextField(blank=True, max_length=524288)
created_at = models.DateTimeField()
updated_at = models.DateTimeField()
is_markdown = models.BooleanField(default=False)
deleted = models.BooleanField(default=False)
publishment_status = models.CharField(max_length=2, choices=PUBLISHMENT_STATUS_CHOICES, default='PV')
def __str__(self):
return str(self.title)
class Tag(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
note = models.ForeignKey(Note, on_delete=models.CASCADE)
name = models.CharField(primary_key=True, unique=True, max_length=128)
def __str__(self):
return str(self.name)
|
from game import Game
import sys
def main():
game = Game()
game.run()
sys.exit()
if __name__ == '__main__':
main()
"""
BEhaviour:
Game over:
-snake touch edge of screen
-snake touch itself
snake movement:
-body trails its head
snake:
-eats apple and grow by 1
score:
-how many apple eaten
menu screen:
-shows at the begenning
-dissapears at any key press
game over screen:
-displays on gameover
-will go back to a new game on keypress
key input:
-arrow keys and wasd change direction of snake
"""
"""
constants:
-colors
-dimensions
-size of cell
-speed(frame rate)
"""
# snake is a list of blocks
|
#!/usr/bin/env python
print('Content-type: text/plain\n\nhello world from RSAL')
|
import cv2
import numpy as np
lower_white = np.array([0, 0, 150]) # 0,0,200 5,100,100 , 0,0,200 - bardzo czule
upper_white = np.array([180, 50, 255]) # 180,50,255 25,255,255 - 180, 255, 255
lower_pink = np.array([160, 100, 100])
upper_pink = np.array([179, 255, 255])
lower_dark_green = np.array([50, 60, 60])
upper_dark_green = np.array([80, 255, 255])
lower_yellow = np.array([20, 100, 100]) # lower_yellow = np.array([20, 100, 100])
upper_yellow = np.array([100, 255, 255]) # upper_yellow = np.array([50, 255, 255])
lower_blue = np.array([74, 100, 100]) # 100,50,50 100, 70, 70 84,100,100
upper_blue = np.array([114, 255, 255]) # 130,255,255 104,255,255
def check_edges(hsv_image):
mask = cv2.inRange(hsv_image, lower_yellow, upper_yellow)
kernel_er = np.ones((6, 6), np.uint8)
kernel_dil = np.ones((30, 30), np.uint8)
erosion = cv2.erode(mask, kernel_er, iterations=1)
dilation = cv2.dilate(erosion, kernel_dil, iterations=1)
# cv2.imshow('zolete', dilation)
# cv2.imshow('zoltemaksa', mask)
image, contours, hierarchy = cv2.findContours(dilation, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
img2 = cv2.drawContours(image, contours, -1, (128, 255, 187), 3)
list_of_edges_points = []
for i in contours:
cnt = i
M = cv2.moments(cnt)
cx = int(M['m10'] / M['m00'])
cy = int(M['m01'] / M['m00'])
img2 = cv2.line(img2, (cx, cy), (cx, cy), (128, 255, 187), 5)
list_of_edges_points.append([cx, cy])
if len(list_of_edges_points) == 8:
return True
else:
return False
def check_vertex_list(vertex_list):
if vertex_list[0][0] <= vertex_list[1][0] and vertex_list[2][0] >= vertex_list[3][0]:
return vertex_list
elif vertex_list[0][0] >= vertex_list[1][0] and vertex_list[2][0] <= vertex_list[3][0]:
vertex_list[0], vertex_list[1] = vertex_list[1], vertex_list[0]
vertex_list[2], vertex_list[3] = vertex_list[3], vertex_list[2]
return vertex_list
elif vertex_list[0][0] <= vertex_list[1][0] and vertex_list[2][0] <= vertex_list[3][0]:
vertex_list[2], vertex_list[3] = vertex_list[3], vertex_list[2]
return vertex_list
elif vertex_list[0][0] >= vertex_list[1][0] and vertex_list[2][0] >= vertex_list[3][0]:
vertex_list[0], vertex_list[1] = vertex_list[1], vertex_list[0]
return vertex_list
def increase_board_area(vertex_list):
vertex_list[0][0] -= 10
vertex_list[0][1] -= 10
vertex_list[1][0] += 10
vertex_list[1][1] -= 10
vertex_list[2][0] += 10
vertex_list[2][1] += 10
vertex_list[3][0] -= 10
vertex_list[3][1] += 10
return vertex_list
def board_perspective_transform(source_image):
# zmniejszenie obrazu
# res = cv2.resize(image, None, fx=0.18, fy=0.18, interpolation=cv2.INTER_CUBIC)
# cv2.imshow('obrazek', source_image)
# cv2.waitKey(0)
# zamiana na HSV
hsv = cv2.cvtColor(source_image, cv2.COLOR_BGR2HSV)
# cv2.imshow('obrazek', hsv)
# cv2.waitKey(0)
# stworzenie obrazu binarnego z pikseli z podanego zakresu
mask = cv2.inRange(hsv, lower_blue, upper_blue)
#cv2.imshow('mask', mask)
# cv2.waitKey(0)
# usuniecie zle wykrytych pojdynczych pikseli
kernel = np.ones((8, 8), np.uint8)
erosion = cv2.erode(mask, kernel, iterations=1)
#cv2.imshow('ed', erosion)
# cv2.waitKey(0)
dilation = cv2.dilate(erosion, kernel, iterations=1)
# cv2.imshow('rogi', dilation)
# cv2.waitKey(0)
# znalezienie konturow
image, contours, hierarchy = cv2.findContours(dilation, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# rysowanie konturow
img2 = cv2.drawContours(image, contours, -1, (128, 255, 187), 3)
# wyliczenie srodkow masy prostokatow
list_of_corners = []
for i in contours:
cnt = i
M = cv2.moments(cnt)
cx = int(M['m10'] / M['m00'])
cy = int(M['m01'] / M['m00'])
img2 = cv2.line(img2, (cx, cy), (cx, cy), (128, 255, 187), 5)
list_of_corners.append([cx, cy])
# cv2.imshow('obrazek', img2)
# cv2.waitKey(0)
if len(list_of_corners) != 4:
print("Nie ma 4 rogow")
return None
else:
if not check_edges(hsv):
print('Nie ma 8 zoltych znacznikow')
return None
list_of_corners.reverse()
# print('1. ' + str(list_of_points))
checked_list_of_points = check_vertex_list(list_of_corners)
# print('2. ' + str(checked_list_of_points))
increased_list_of_points = increase_board_area(checked_list_of_points)
pts1 = np.float32([increased_list_of_points[0], increased_list_of_points[1], increased_list_of_points[2],
increased_list_of_points[3]])
pts2 = np.float32([[0, 0], [600, 0], [600, 600], [0, 600]])
# stworzenie macierzy tranformacji perspektywicznej
M = cv2.getPerspectiveTransform(pts1, pts2)
# transformacja perspektywiczna
dst = cv2.warpPerspective(source_image, M, (600, 600))
# cv2.imshow("obrazek", dst)
# cv2.waitKey()
return dst
def find_checkers(image):
# dodanie rozmycia zeby lepiej wykrywac kola
blurred_img = cv2.medianBlur(image, 7)
gray_img = cv2.cvtColor(blurred_img, cv2.COLOR_BGR2GRAY)
circles = cv2.HoughCircles(gray_img, cv2.HOUGH_GRADIENT, 1, 40, param1=35, param2=25, minRadius=29,
maxRadius=45) # 1,40,35,22,25,35
if circles is not None:
circles = np.uint16(np.around(circles))
index = 1
img_circles = image.copy()
for i in circles[0, :]:
# print(str(index) + ': ' + str(i))
index += 1
# draw the outer circle
cv2.circle(img_circles, (i[0], i[1]), i[2], (0, 255, 0), 2)
# draw the center of the circle
cv2.circle(img_circles, (i[0], i[1]), 2, (0, 0, 255), 3)
small = cv2.resize(img_circles, (0, 0), fx=0.8, fy=0.8)
cv2.imshow('Wykryte_pionki', small)
# cv2.waitKey(0)
# cv2.destroyAllWindows()
return circles
def find_board_squares():
x = 24
y = 24
inc = 69
squares_coord = []
for j in range(0, 8):
for i in range(0, 8):
squares_coord.append([[x, y], [x + inc, y + inc]])
x += inc
x = 24
y += inc
# print(squares_coord)
# for i in squares_coord:
# cv2.rectangle(img, (i[0][0], i[0][1]), (i[1][0], i[1][1]), (122, 240, 122), 4)
#
# cv2.imshow('image', img)
# cv2.waitKey()
# cv2.destroyAllWindows()
return squares_coord
def find_colored_checkers(image, checkers, squares):
board_set = ['n'] * 64
hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
kernel = np.ones((12, 12), np.uint8)
kernel_damka = np.ones((16, 16), np.uint8)
kernel_color = np.ones((12, 12), np.uint8)
mask_white = cv2.inRange(hsv, lower_white, upper_white)
erosion_white = cv2.erode(mask_white, kernel, iterations=1)
dilation_white = cv2.dilate(erosion_white, kernel, iterations=1)
# cv2.imshow('w', mask_white)
mask_pink = cv2.inRange(hsv, lower_pink, upper_pink)
# erosion_pink = cv2.erode(mask_pink, kernel_color, iterations=1)
dilation_pink = cv2.dilate(mask_pink, kernel_damka, iterations=1)
# cv2.imshow('p', dilation_pink)
mask_green = cv2.inRange(hsv, lower_dark_green, upper_dark_green)
# erosion_green = cv2.erode(mask_green, kernel_color, iterations=1)
dilation_green = cv2.dilate(mask_green, kernel_damka, iterations=1)
# cv2.imshow('g', dilation_green)
for checker in checkers:
x1 = checker[0] - 15
y1 = checker[1] - 15
x2 = checker[0] + 15
y2 = checker[1] + 15
crop_img_w = dilation_white[y1:y2, x1:x2]
height_w, width_w = crop_img_w.shape
if height_w == 0 or width_w == 0:
return None
crop_img_p = dilation_pink[y1:y2, x1:x2]
height_p, width_p = crop_img_p.shape
if height_p == 0 or width_p == 0:
return None
crop_img_g = dilation_green[y1:y2, x1:x2]
height_g, width_g = crop_img_g.shape
if height_g == 0 or width_g == 0:
return None
n_non_zero_w = cv2.countNonZero(crop_img_w)
n_non_zero_p = cv2.countNonZero(crop_img_p)
n_non_zero_g = cv2.countNonZero(crop_img_g)
if (n_non_zero_w / (height_w * width_w)) > 0.6:
color = 'WM'
elif (n_non_zero_p / (height_p * width_p)) > 0.6:
color = 'WK'
elif (n_non_zero_g / (height_g * width_g)) > 0.6:
color = 'BK'
else:
color = 'BM'
for idx, square in enumerate(squares):
if (checker[0] > square[0][0]) and (checker[0] < square[1][0]) \
and (checker[1] > square[0][1]) and (checker[1] < square[1][1]):
position = idx
board_set[position] = color
break
return board_set
def run_all(img):
img_transformed = board_perspective_transform(img)
if img_transformed is not None:
checkers_list = find_checkers(img_transformed)
if checkers_list is not None:
squares_coordinates = find_board_squares()
mojatablic = find_colored_checkers(img_transformed, checkers_list[0], squares_coordinates)
# print(len(mojatablic))
# index = 0
# for x in mojatablic:
# if index == 7:
# print(x)
# index = -1
# else:
# print(x + ' ', end='')
#
# index += 1
# cv2.imshow('obrazek', img_transformed)
return mojatablic, img_transformed
else:
print("There is no checkers")
return None, None
else:
#nie znaleziono wszystkich znacznikow
return None, None
# def check_if_was_move(source, list_of_eight_after_source):
# cnt = 0
# for x in list_of_eight_after_source:
# if x == source:
# cnt += 1
#
# if cnt > 4:
# return True
# else:
# return False
def choose_most_common_set(first_8_frames):
amounts = []
for x in first_8_frames:
counter = 0
for y in first_8_frames:
if x == y:
counter += 1
amounts.append(counter)
maximum = max(amounts)
return first_8_frames[amounts.index(maximum)], maximum
# cap = cv2.VideoCapture(4)
#
# move_counter = 0
# counter = 0
# avoid_first_frame = 1
# prev = []
# list_of_eight_prev = []
#
# while cap.isOpened():
# ret, frame = cap.read()
#
# if ret:
# tab, img_transf = run_all(frame)
# # cv2.imshow('frame', frame)
# if tab is not None and img_transf is not None:
# # cv2.imshow('lol', img_transf)
# if avoid_first_frame == 1:
#
# prev = tab
# avoid_first_frame += 1
#
# else:
# if prev != tab:
# if counter == 0:
# prev = tab
# counter += 1
# else:
# list_of_eight_prev.append(tab)
# counter += 1
# else:
# if counter > 0:
# list_of_eight_prev.append(tab)
# counter += 1
#
# if counter == 8:
# if check_if_was_move(prev, list_of_eight_prev):
# move_counter += 1
# print(str(move_counter) + '------------------------RUCH--------------------------------------------------------------')
# counter = 0
# list_of_eight_prev = []
# prev = tab
# else:
# counter = 0
# list_of_eight_prev = []
# prev = tab
#
# if cv2.waitKey(1) & 0xFF == ord('q'):
# break
#
# else:
# break
#
# cap.release()
# cv2.destroyAllWindows()
|
'''
Created on Jul 26, 2013
@author: emma
'''
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.action_chains import ActionChains
from robot.libraries.BuiltIn import BuiltIn
class BillingInfo:
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
def __init__(self):
self._webd_wrap = BuiltIn().get_library_instance('WebDriverWrapper')
def confirm_page(self):
''' raises AssertionError if page is incorrect '''
self._webd_wrap.wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'l-section-header-2')))
_url = self._webd_wrap._driver.current_url
if not _url.startswith(self._webd_wrap._baseURL + '/profile/billing'):
raise AssertionError("Not on the billing info page.")
##########################################################################
##########################################################################
def click_add_card(self):
self.confirm_page()
self._webd_wrap._driver.find_element_by_class_name('l-section-header-2').find_element_by_xpath('a').click()
def click_back_to_profile(self):
self.confirm_page()
self._webd_wrap._driver.find_element_by_class_name('l-section-capital').find_element_by_xpath('a').click()
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2014 Noviat nv/sa (www.noviat.com). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import xlwt
import time
from datetime import datetime
from openerp.osv import orm
from openerp.report import report_sxw
from openerp.addons.report_xls.report_xls import report_xls
from openerp.addons.report_xls.utils import rowcol_to_cell, _render
from openerp.tools.translate import translate, _
from openerp import pooler
import logging
_logger = logging.getLogger(__name__)
_ir_translation_name = 'chart_dre_line.xls'
class chart_dre_line_xls_parser(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(chart_dre_line_xls_parser, self).__init__(cr, uid, name, context=context)
move_obj = self.pool.get('chart_dre_line')
self.context = context
wanted_list = move_obj._report_xls_fields(cr, uid, context)
template_changes = move_obj._report_xls_template(cr, uid, context)
self.localcontext.update({
'datetime': datetime,
'wanted_list': wanted_list,
'template_changes': template_changes,
'_': self._,
})
def _(self, src):
lang = self.context.get('lang', 'pt_BR')
return translate(self.cr, _ir_translation_name, 'report', lang, src) or src
class chart_dre_line_xls(report_xls):
def __init__(self, name, table, rml=False, parser=False, header=True, store=False):
super(chart_dre_line_xls, self).__init__(name, table, rml, parser, header, store)
# Cell Styles
_xs = self.xls_styles
# header
rh_cell_format = _xs['bold'] + _xs['fill'] + _xs['borders_all']
self.rh_cell_style = xlwt.easyxf(rh_cell_format)
self.rh_cell_style_center = xlwt.easyxf(rh_cell_format + _xs['center'])
self.rh_cell_style_right = xlwt.easyxf(rh_cell_format + _xs['right'])
# lines
aml_cell_format = _xs['borders_all']
self.aml_cell_style = xlwt.easyxf(aml_cell_format)
self.aml_cell_style_center = xlwt.easyxf(aml_cell_format + _xs['center'])
self.aml_cell_style_date = xlwt.easyxf(aml_cell_format + _xs['left'], num_format_str=report_xls.date_format)
self.aml_cell_style_decimal = xlwt.easyxf(aml_cell_format + _xs['right'], num_format_str=report_xls.decimal_format)
# totals
rt_cell_format = _xs['bold'] + _xs['fill'] + _xs['borders_all']
self.rt_cell_style = xlwt.easyxf(rt_cell_format)
self.rt_cell_style_right = xlwt.easyxf(rt_cell_format + _xs['right'])
self.rt_cell_style_decimal = xlwt.easyxf(rt_cell_format + _xs['right'], num_format_str=report_xls.decimal_format)
# XLS Template
self.col_specs_template = {
'name': {
'header': [1, 42, 'text', _render(u"_('Descrição')")],
'lines': [1, 0, 'text', _render("line.name or ''")],
'totals': [1, 0, 'text', None]},
'code': {
'header': [1, 42, 'text', _render(u"_('Código')")],
'lines': [1, 0, 'text', _render("line.code or ''")],
'totals': [1, 0, 'text', None]},
'period': {
'header': [1, 12, 'text', _render(u"_('Período')")],
'lines': [1, 0, 'text', _render("line.period_id.code or line.period_id.name")],
'totals': [1, 0, 'text', None]},
'parent': {
'header': [1, 12, 'text', _render(u"_('Superior')")],
'lines': [1, 0, 'text', _render("line.parent_id.code or line.parent_id.name")],
'totals': [1, 0, 'text', None]},
'account': {
'header': [1, 36, 'text', _render(u"_('Conta')")],
'lines': [1, 0, 'text', _render("line.account_id.code or line.account_id.name or ''")],
'totals': [1, 0, 'text', None]},
'type': {
'header': [1, 36, 'text', _render(u"_('Tipo')")],
'lines': [1, 0, 'text', _render("line.type")],
'totals': [1, 0, 'text', None]},
'value': {
'header': [1, 18, 'text', _render(u"_('Valor')"), None, self.rh_cell_style_right],
'lines': [1, 0, 'number', _render("line.value"), None, self.aml_cell_style_decimal],
'totals': [1, 0, 'text', None]},
'sum': {
'header': [1, 18, 'text', _render(u"_('Soma')"), None, self.rh_cell_style_right],
'lines': [1, 0, 'number', _render("line.sum"), None, self.aml_cell_style_decimal],
'totals': [1, 0, 'text', None]},
}
def generate_xls_report(self, _p, _xs, data, objects, wb):
wanted_list = _p.wanted_list
self.col_specs_template.update(_p.template_changes)
_ = _p._
sum_pos = 'sum' in wanted_list and wanted_list.index('sum')
if not sum_pos:
raise orm.except_orm(_(u'Erro de Customização!'),
_(u"A coluna 'Soma' é um campo calculado e sua presença é obrigatória!"))
#report_name = objects[0]._description or objects[0]._name
report_name = _(u"DRE do Mês")
ws = wb.add_sheet(report_name[:31])
ws.panes_frozen = True
ws.remove_splits = True
ws.portrait = 0 # Landscape
ws.fit_width_to_pages = 1
row_pos = 0
# set print header/footer
ws.header_str = self.xls_headers['standard']
ws.footer_str = self.xls_footers['standard']
# Title
cell_style = xlwt.easyxf(_xs['xls_title'])
c_specs = [
('report_name', 1, 0, 'text', report_name),
]
row_data = self.xls_row_template(c_specs, ['report_name'])
row_pos = self.xls_write_row(ws, row_pos, row_data, row_style=cell_style)
row_pos += 1
# Column headers
c_specs = map(lambda x: self.render(x, self.col_specs_template, 'header', render_space={'_': _p._}), wanted_list)
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(ws, row_pos, row_data, row_style=self.rh_cell_style, set_column_size=True)
ws.set_horz_split_pos(row_pos)
# account move lines
for line in objects:
c_specs = map(lambda x: self.render(x, self.col_specs_template, 'lines'), wanted_list)
row_data = self.xls_row_template(c_specs, [x[0] for x in c_specs])
row_pos = self.xls_write_row(ws, row_pos, row_data, row_style=self.aml_cell_style)
chart_dre_line_xls('report.chart_dre_line.xls',
'chart_dre_line',
parser=chart_dre_line_xls_parser)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
#import dask.bag as db
#db.read_text('*.json').map(json.loads).pluck('name').frequencies().compute()
import z
import dask.dataframe as dd
import dask
import pandas as pd
import os
dask.config.set(scheduler='threads')
def getName(path):
return os.path.splitext(os.path.basename(path))[0]
dfd = None
def convertToDask(simple = False, astock=None):
global dfd
path = z.getPath(convertToDask.directory)
whichone = "*"
if astock:
whichone = astock
dfd = dd.read_csv('{}/{}.csv'.format(path, whichone), include_path_column = (not simple))
if not simple:
dfd = dfd.drop(['Adj Close'], axis=1)
dfd['path'] = dfd['path'].map(lambda x: getName(x))
dfd['Change'] = dfd.Close/dfd.Open
dfd['Change'] = dfd['Change'].map(lambda x: round(x,4))
print ("begin rolling")
createRollingData()
if not simple:
import zen
zen.regenerateHistorical()
convertToDask.directory = "historical"
def getModes():
if getModes.override:
return getModes.override
return ['C3', 'C6', 'C12', 'C30', 'S30', 'C50', 'S12','A3', "P12", "Change", "Volume"]
getModes.override = None
def threader():
global q
while True:
doone(q.get())
q.task_done()
q = None
def createRollingData():
global dfd, q
import threading
from queue import Queue
q = Queue()
for x in range(7):
t = threading.Thread(target=threader)
t.daemon = True
t.start()
for indx in range(dfd.npartitions):
q.put([indx])
q.join()
def doone(indx):
global dfd
indx = indx[0]
try:
computed = dfd.get_partition(indx).compute()
name = computed.path[0]
path = z.getPath("{}/{}.csv".format(createRollingData.dir, name))
computed['C3'] = (computed.Close/computed.Open.shift(3))\
.map(lambda x: round(x,4))
computed['C6'] = (computed.Close/computed.Open.shift(6))\
.map(lambda x: round(x,4))
computed['C12'] = (computed.Close/computed.Open.shift(12))\
.map(lambda x: round(x,4))
computed['C30'] = (computed.Close/computed.Open.shift(30))\
.map(lambda x: round(x,4))
computed['C50'] = (computed.Low/computed.High.shift(50))\
.map(lambda x: round(x,4))
computed['temp'] = computed.Close.map(lambda x: round(x**(.12),4))
computed['P12'] = (computed.temp/computed.C12).map(lambda x: round(x,4))
computed['S30'] = (computed.C30/(computed.C6*computed.Change)).map(lambda x: round(x,4))
computed['S12'] = (computed.C12/computed.C3).map(lambda x: round(x,4))
computed['A3'] = computed.Change.rolling(3).mean()\
.map(lambda x: round(x,4))
computed['Volume'] = computed.Volume.rolling(5).mean()\
.map(lambda x: round(x,4))
computed.to_csv(path)
print("done indx : {}".format( indx ))
except Exception as e:
z.trace(e)
pass
createRollingData.dir = "historicalCalculated"
#if __name__ == '__main__':
def historicalToCsv(astocka = None):
import csv
stocks = [astocka]
if not astocka:
stocks = z.getStocks()
howmany = 52
dates = z.getp("dates")
starti = dates[-1 * howmany]
convertToDask.directory = "csv"
createRollingData.dir = "csvCalculated"
if not astocka:
try:
import shutil
tpath = z.getPath("csv")
shutil.rmtree(tpath)
tpath = z.getPath("csvCalculated")
shutil.rmtree(tpath)
except:
pass
for astock in stocks:
path = z.getPath("historical/{}.csv".format(astock))
tpath = z.getPath("csv/{}.csv".format(astock))
if os.path.exists(tpath):
continue
if not os.path.exists(path):
# print ("need to download {}".format(astock))
continue
# if not os.path.exists(path):
# continue
with open(tpath, "w") as f:
f.write("Date,Open,Close,High,Low,Volume,path\n")
starting = False
for row in csv.DictReader(open(path)):
cdate = row['Date']
if cdate == starti:
starting = True
if starting:
f.write("{},{},{},{},{},{},{}\n".format(\
cdate,row['Open'],row['Close'],row['High'],row['Low'],row['Volume'],astock))
convertToDask(simple=True, astock = astocka)
if __name__ == '__main__':
import sys
try:
if len(sys.argv) > 1:
if sys.argv[1] == "buy":
z.getStocks.devoverride = "ITOT"
z.getStocks.extras = True
historicalToCsv()
if sys.argv[1] == "history":
convertToDask.directory = "historical"
createRollingData.dir = "historicalCalculated"
convertToDask()
except Exception as e:
z.trace(e)
pass
|
from sigpy.learn import app
from sigpy.learn import util
__all__ = ['app']
from sigpy.learn.util import * # noqa
__all__.extend(util.__all__)
|
class Shell_sort:
'''
shell排序
:type nums: List[int] 要排序的数组
'''
def sort(self, nums):
'''
:type nums: List[int] 要排序的数组
'''
m = len(nums)
gap = m//2
while gap > 0:
for i in range(gap, m):
j = i
while j - gap >= 0 and nums[j] < nums[j - gap]:
nums[j], nums[j - gap] = nums[j - gap], nums[j]
j = j - gap
gap = gap//2
def sort_2(self, nums):
m = len(nums)
gap = m//2
while gap > 0:
for i in range(gap, m):
j, key = i, nums[i]
while j - gap >= 0 and key < nums[j - gap]:
nums[j] = nums[j - gap]
j -= gap
nums[j] = key
gap //= 2
if __name__ == '__main__':
print("start")
a = [1, 7, 3, 5, 4, 0]
s = Shell_sort()
s.sort_2(a)
print(a)
print("end")
|
#!/usr/bin/env python3
# Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import logging
import monitored
import re
typed_array_renames = {
'ArrayBuffer': 'ByteBuffer',
'ArrayBufferView': 'TypedData',
'DataView': 'ByteData',
'Float32Array': 'Float32List',
'Float64Array': 'Float64List',
'Int8Array': 'Int8List',
'Int16Array': 'Int16List',
'Int32Array': 'Int32List',
'Uint8Array': 'Uint8List',
'Uint8ClampedArray': 'Uint8ClampedList',
'Uint16Array': 'Uint16List',
'Uint32Array': 'Uint32List',
}
html_interface_renames = monitored.Dict(
'htmlrenamer.html_interface_renames',
dict(
{
'Attr':
'_Attr',
'BudgetService':
'_BudgetService',
'CDATASection':
'CDataSection',
'Clipboard':
'_Clipboard', # TODO(terry): Need to remove when ACX Clipboard is renamed to AcxClipboard.
'Database':
'SqlDatabase', # Avoid conflict with Index DB's Database.
'DatabaseSync':
'SqlDatabaseSync',
'DOMFileSystem':
'FileSystem',
'Entity':
'_Entity', # Not sure if we want to expose this yet, may conflict with other libs.
'EntryCallback':
'_EntryCallback',
'EntriesCallback':
'_EntriesCallback',
'ErrorCallback':
'_ErrorCallback',
'FileCallback':
'_FileCallback',
'FileSystemCallback':
'_FileSystemCallback',
'FileWriterCallback':
'_FileWriterCallback',
'HTMLDocument':
'HtmlDocument',
'HTMLElement':
'HtmlElement',
'HTMLHtmlElement':
'HtmlHtmlElement',
'IDBFactory':
'IdbFactory', # Manual to avoid name conflicts.
'Iterator':
'DomIterator',
'Key':
'CryptoKey',
'NamedNodeMap':
'_NamedNodeMap',
'NavigatorUserMediaErrorCallback':
'_NavigatorUserMediaErrorCallback',
'NavigatorUserMediaSuccessCallback':
'_NavigatorUserMediaSuccessCallback',
'NotificationPermissionCallback':
'_NotificationPermissionCallback',
'Position':
'Geoposition',
'PositionCallback':
'_PositionCallback',
'PositionErrorCallback':
'_PositionErrorCallback',
'Request':
'_Request',
'Report':
'_Report',
'RTCDTMFSender':
'RtcDtmfSender',
'RTCDTMFToneChangeEvent':
'RtcDtmfToneChangeEvent',
'RTCErrorCallback':
'_RtcErrorCallback',
'RTCSessionDescriptionCallback':
'_RtcSessionDescriptionCallback',
'SVGDocument':
'SvgDocument', # Manual to avoid name conflicts.
'SVGElement':
'SvgElement', # Manual to avoid name conflicts.
'SVGGradientElement':
'_GradientElement',
'SVGSVGElement':
'SvgSvgElement', # Manual to avoid name conflicts.
'Stream':
'FileStream',
'StringCallback':
'_StringCallback',
'WebGL2RenderingContext':
'RenderingContext2',
'WebGL2RenderingContextBase':
'RenderingContextBase2',
'WindowTimers':
'_WindowTimers',
'XMLHttpRequest':
'HttpRequest',
'XMLHttpRequestUpload':
'HttpRequestUpload',
'XMLHttpRequestEventTarget':
'HttpRequestEventTarget',
},
**typed_array_renames))
# Some callback interfaces are not just a simple callback functions. If the callback
# interface is in this list then the interface is exposed as a class.
_gen_callback_interfaces = ['NodeFilter']
def generateCallbackInterface(id):
return id in _gen_callback_interfaces
# Interfaces that are suppressed, but need to still exist for Dartium and to
# properly wrap DOM objects if/when encountered.
_removed_html_interfaces = [
'Bluetooth',
'BluetoothAdvertisingData',
'BluetoothCharacteristicProperties',
'BluetoothDevice',
'BluetoothRemoteGATTCharacteristic',
'BluetoothRemoteGATTServer',
'BluetoothRemoteGATTService',
'BluetoothUUID',
'Cache', # TODO: Symbol conflicts with Angular: dartbug.com/20937
'CanvasPath',
'CDataSection',
'CSSPrimitiveValue',
'CSSUnknownRule',
'CSSValue',
'Counter',
'DOMFileSystemSync', # Workers
'DatabaseCallback', # WebSql was removed
'DatabaseSync', # Workers
'DirectoryEntrySync', # Workers
'DirectoryReaderSync', # Workers
'DocumentType',
'EntrySync', # Workers
'FileEntrySync', # Workers
'FileReaderSync', # Workers
'FileWriterSync', # Workers
'HTMLAllCollection',
'HTMLAppletElement',
'HTMLBaseFontElement',
'HTMLDirectoryElement',
'HTMLFontElement',
'HTMLFrameElement',
'HTMLFrameSetElement',
'HTMLMarqueeElement',
'IDBAny',
'Mojo',
'MojoHandle',
'MojoInterfaceInterceptor',
'MojoInterfaceRequestEvent',
'MojoWatcher',
'NFC',
'Notation',
'PagePopupController',
'RGBColor',
'RadioNodeList', # Folded onto NodeList in dart2js.
'Rect',
'Response', # TODO: Symbol conflicts with Angular: dartbug.com/20937
'SQLTransactionSync', # Workers
'SQLTransactionSyncCallback', # Workers
'SVGAltGlyphDefElement', # Webkit only.
'SVGAltGlyphItemElement', # Webkit only.
'SVGAnimateColorElement', # Deprecated. Use AnimateElement instead.
'SVGColor',
'SVGComponentTransferFunctionElement', # Currently not supported anywhere.
'SVGCursorElement', # Webkit only.
'SVGFEDropShadowElement', # Webkit only for the following:
'SVGFontElement',
'SVGFontFaceElement',
'SVGFontFaceFormatElement',
'SVGFontFaceNameElement',
'SVGFontFaceSrcElement',
'SVGFontFaceUriElement',
'SVGGlyphElement',
'SVGGlyphRefElement',
'SVGHKernElement',
'SVGMPathElement',
'SVGPaint',
'SVGMissingGlyphElement',
'SVGTRefElement',
'SVGVKernElement',
'SubtleCrypto',
'USB',
'USBAlternateInterface',
'USBConfiguration',
'USBConnectionEvent',
'USBDevice',
'USBEndpoint',
'USBInTransferResult',
'USBInterface',
'USBIsochronousInTransferPacket',
'USBIsochronousInTransferResult',
'USBIsochronousOutTransferPacket',
'USBIsochronousOutTransferResult',
'USBOutTransferResult',
'WebKitCSSFilterValue',
'WebKitCSSMatrix',
'WebKitCSSMixFunctionValue',
'WebKitCSSTransformValue',
'WebKitMediaSource',
'WebKitNotification',
'WebGLRenderingContextBase',
'WebGL2RenderingContextBase',
'WebKitSourceBuffer',
'WebKitSourceBufferList',
'WorkerLocation', # Workers
'WorkerNavigator', # Workers
'Worklet', # Rendering Workers
'XMLHttpRequestProgressEvent',
# Obsolete event for NaCl.
'ResourceProgressEvent',
]
for interface in _removed_html_interfaces:
html_interface_renames[interface] = '_' + interface
convert_to_future_members = monitored.Set(
'htmlrenamer.converted_to_future_members', [
'Database.changeVersion',
'Database.readTransaction',
'DataTransferItem.getAsString',
'DirectoryEntry.getDirectory',
'DirectoryEntry.getFile',
'DirectoryEntry.removeRecursively',
'Entry.copyTo',
'Entry.getMetadata',
'Entry.getParent',
'Entry.moveTo',
'Entry.remove',
'FileEntry.createWriter',
'FileEntry.file',
'FontLoader.notifyWhenFontsReady',
'MediaStreamTrack.getSources',
'Notification.requestPermission',
'SQLTransaction.executeSql',
'StorageInfo.requestQuota',
'StorageQuota.requestQuota',
'Window.webkitRequestFileSystem',
'Window.webkitResolveLocalFileSystemURL',
'WorkerGlobalScope.webkitRequestFileSystem',
'WorkerGlobalScope.webkitResolveLocalFileSystemURL',
])
# DDC Exposed Classes
ddc_extensions = monitored.Dict(
'ddcextensions.ddc_extensions', {
'DirectoryEntry': {
'getDirectory': [''],
'getFile': [
'applyExtension(\'FileEntry\', value);',
'applyExtension(\'webkitFileSystemFileEntry\', value);',
'applyExtension(\'FileSystemFileEntry\', value);',
]
},
'Entry': {
'getMetadata': [
'applyExtension(\'Metadata\', value);',
],
'getParent': [
'applyExtension(\'Entry\', value);',
'applyExtension(\'webkitFileSystemEntry\', value);',
'applyExtension(\'FileSystemEntry\', value);',
]
},
'FileEntry': {
'createWriter': ['applyExtension(\'FileWriter\', value);'],
'file': ['applyExtension(\'File\', value);']
},
'SQLTransaction': {
'executeSql': [
'applyExtension(\'SQLResultSet\', resultSet);'
'applyExtension(\'SQLResultSetRowList\', resultSet.rows);'
],
},
'Window': {
'webkitRequestFileSystem': [
'applyExtension(\'DOMFileSystem\', value);',
'applyExtension(\'WebKitFileSystem\', value);',
'applyExtension(\'webkitFileSystem\', value);',
'applyExtension(\'FileSystem\', value);',
'applyExtension(\'DirectoryEntry\', value.root);',
'applyExtension(\'webkitFileSystemDirectoryEntry\', value.root);',
'applyExtension(\'FileSystemDirectoryEntry\', value.root);',
]
},
})
# DDC Extension for this interface operation?
def GetDDC_Extension(interface, operationName):
if interface.id in ddc_extensions:
entry = ddc_extensions[interface.id]
if operationName in entry:
return entry[operationName]
return None
# Classes where we have customized constructors, but we need to keep the old
# constructor for dispatch purposes.
custom_html_constructors = monitored.Set(
'htmlrenamer.custom_html_constructors',
[
'CompositionEvent', # 45 Roll hide default constructor use Dart's custom
'CustomEvent', # 45 Roll hide default constructor use Dart's custom
'Event', # 45 Roll hide default constructor use Dart's custom
'HashChangeEvent', # 45 Roll hide default constructor use Dart's custom
'HTMLAudioElement',
'HTMLOptionElement',
'KeyboardEvent', # 45 Roll hide default constructor use Dart's custom
'MessageEvent', # 45 Roll hide default constructor use Dart's custom
'MouseEvent', # 45 Roll hide default constructor use Dart's custom
'MutationObserver',
'StorageEvent', # 45 Roll hide default constructor use Dart's custom
'UIEvent', # 45 Roll hide default constructor use Dart's custom
'WheelEvent', # 45 Roll hide default constructor use Dart's custom
])
# Members from the standard dom that should not be exposed publicly in dart:html
# but need to be exposed internally to implement dart:html on top of a standard
# browser. They are exposed simply by placing an underscore in front of the
# name.
private_html_members = monitored.Set(
'htmlrenamer.private_html_members',
[
'AudioNode.connect',
'Cache.add',
'Cache.delete',
'Cache.keys',
'Cache.match',
'Cache.matchAll',
'Cache.put',
'CanvasRenderingContext2D.arc',
'CanvasRenderingContext2D.drawImage',
'CanvasRenderingContext2D.getLineDash',
'Crypto.getRandomValues',
'CSSStyleDeclaration.getPropertyValue',
'CSSStyleDeclaration.setProperty',
'CSSStyleDeclaration.var',
'CompositionEvent.initCompositionEvent',
'CustomEvent.detail',
'CustomEvent.initCustomEvent',
'DataTransferItem.webkitGetAsEntry',
'DeviceOrientationEvent.initDeviceOrientationEvent',
'DirectoryEntry.createReader',
'DirectoryReader.readEntries',
'Document.createElement',
'Document.createElementNS',
'Document.createEvent',
'Document.createNodeIterator',
'Document.createTextNode',
'Document.createTouch',
'Document.createTouchList',
'Document.createTreeWalker',
'Document.querySelectorAll',
'DocumentFragment.querySelectorAll',
# Moved to HTMLDocument.
'Document.body',
'Document.caretRangeFromPoint',
'Document.elementFromPoint',
'Document.getCSSCanvasContext',
'Document.head',
'Document.lastModified',
'Document.preferredStylesheetSet',
'Document.referrer',
'Document.selectedStylesheetSet',
'Document.styleSheets',
'Document.title',
'Document.webkitCancelFullScreen',
'Document.webkitExitFullscreen',
# Not prefixed.
'Document.webkitFullscreenElement',
'Document.webkitFullscreenEnabled',
'Document.webkitHidden',
'Document.webkitIsFullScreen',
'Document.webkitVisibilityState',
# Not prefixed but requires custom implementation for cross-browser compatibility.
'Document.visibilityState',
'Element.animate',
'Element.childElementCount',
'Element.firstElementChild',
'Element.getClientRects',
'Element.getElementsByTagName',
'Element.insertAdjacentHTML',
'Element.scrollIntoView',
'Element.getAttribute',
'Element.getAttributeNS',
'Element.hasAttribute',
'Element.hasAttributeNS',
'Element.removeAttribute',
'Element.removeAttributeNS',
'Element.setAttribute',
'Element.setAttributeNS',
'Element.innerHTML',
'Element.querySelectorAll',
# TODO(vsm): These have been converted from int to double in Chrome 36.
# Special case them so we run on 34, 35, and 36.
'Element.scrollLeft',
'Element.scrollTop',
'Element.scrollWidth',
'Element.scrollHeight',
'Event.initEvent',
'EventTarget.addEventListener',
'EventTarget.removeEventListener',
'FileReader.result',
'Geolocation.clearWatch',
'Geolocation.getCurrentPosition',
'Geolocation.watchPosition',
'HashChangeEvent.initHashChangeEvent',
'HTMLCanvasElement.toDataURL',
'HTMLTableElement.createCaption',
'HTMLTableElement.createTFoot',
'HTMLTableElement.createTHead',
'HTMLTableElement.createTBody',
'HTMLTableElement.insertRow',
'HTMLTableElement.rows',
'HTMLTableElement.tBodies',
'HTMLTableRowElement.cells',
'HTMLTableRowElement.insertCell',
'HTMLTableSectionElement.insertRow',
'HTMLTableSectionElement.rows',
'IDBCursor.delete',
'IDBCursor.update',
'IDBDatabase.createObjectStore',
'IDBFactory.deleteDatabase',
'IDBFactory.webkitGetDatabaseNames',
'IDBFactory.open',
'IDBIndex.count',
'IDBIndex.get',
'IDBIndex.getKey',
'IDBIndex.openCursor',
'IDBIndex.openKeyCursor',
'IDBObjectStore.add',
'IDBObjectStore.clear',
'IDBObjectStore.count',
'IDBObjectStore.createIndex',
'IDBObjectStore.delete',
'IDBObjectStore.get',
'IDBObjectStore.openCursor',
'IDBObjectStore.put',
'KeyboardEvent.initKeyboardEvent',
'KeyboardEvent.keyIdentifier',
'MediaKeys.createSession',
'MediaKeySession.update',
'MessageEvent.initMessageEvent',
'MessagePort.start',
'MouseEvent.initMouseEvent',
'MouseEvent.clientX',
'MouseEvent.clientY',
'MouseEvent.movementX',
'MouseEvent.movementY',
'MouseEvent.offsetX',
'MouseEvent.offsetY',
'MouseEvent.screenX',
'MouseEvent.screenY',
'MutationObserver.observe',
'Navigator.getGamepads',
'Node.attributes',
'Node.localName',
'Node.namespaceURI',
'Node.removeChild',
'Node.replaceChild',
'ParentNode.childElementCount',
'ParentNode.children',
'ParentNode.firstElementChild',
'ParentNode.lastElementChild',
'ParentNode.querySelectorAll',
'Range.getClientRects',
'Screen.availHeight',
'Screen.availLeft',
'Screen.availTop',
'Screen.availWidth',
'ShadowRoot.resetStyleInheritance',
'Storage.clear',
'Storage.getItem',
'Storage.key',
'Storage.length',
'Storage.removeItem',
'Storage.setItem',
'StorageEvent.initStorageEvent',
'SubtleCrypto.encrypt',
'SubtleCrypto.decrypt',
'SubtleCrypto.sign',
'SubtleCrypto.digest',
'SubtleCrypto.importKey',
'SubtleCrypto.unwrapKey',
'ShadowRoot.applyAuthorStyles',
'SpeechSynthesis.getVoices',
'TextEvent.initTextEvent',
# TODO(leafp): These have been converted from int to double in Chrome 37.
# client, page, and screen were already special cased, adding radiusX/radiusY.
# See impl_Touch.darttemplate for impedance matching code
'Touch.clientX',
'Touch.clientY',
'Touch.pageX',
'Touch.pageY',
'Touch.screenX',
'Touch.screenY',
'Touch.radiusX',
'Touch.radiusY',
'UIEvent.initUIEvent',
'UIEvent.layerX',
'UIEvent.layerY',
'UIEvent.pageX',
'UIEvent.pageY',
'UIEvent.which',
'KeyboardEvent.charCode',
'KeyboardEvent.keyCode',
'KeyboardEvent.which',
'WebGLRenderingContext.readPixels',
'WebGL2RenderingContext.readPixels',
'WheelEvent.initWebKitWheelEvent',
'WheelEvent.deltaX',
'WheelEvent.deltaY',
'WorkerGlobalScope.webkitNotifications',
'Window.getComputedStyle',
'Window.clearInterval',
'Window.clearTimeout',
# TODO(tll): These have been converted from int to double in Chrome 39 for
# subpixel precision. Special case for backward compatibility.
'Window.pageXOffset',
'Window.pageYOffset',
'Window.scrollX',
'Window.scrollY',
'WindowTimers.clearInterval',
'WindowTimers.clearTimeout',
'WindowTimers.setInterval',
'WindowTimers.setTimeout',
'Window.moveTo',
'Window.requestAnimationFrame',
'Window.setInterval',
'Window.setTimeout',
])
# Members from the standard dom that exist in the dart:html library with
# identical functionality but with cleaner names.
renamed_html_members = monitored.Dict(
'htmlrenamer.renamed_html_members',
{
'ConsoleBase.assert': 'assertCondition',
'CSSKeyframesRule.insertRule': 'appendRule',
'DirectoryEntry.getDirectory': '_getDirectory',
'DirectoryEntry.getFile': '_getFile',
'Document.createCDATASection': 'createCDataSection',
'Document.defaultView': 'window',
'Window.CSS': 'css',
'Window.webkitNotifications': 'notifications',
'Window.webkitRequestFileSystem': '_requestFileSystem',
'Window.webkitResolveLocalFileSystemURL': 'resolveLocalFileSystemUrl',
'Navigator.webkitGetUserMedia': '_getUserMedia',
'Node.appendChild': 'append',
'Node.cloneNode': 'clone',
'Node.nextSibling': 'nextNode',
'Node.parentElement': 'parent',
'Node.previousSibling': 'previousNode',
'Node.textContent': 'text',
'SVGElement.className': '_svgClassName',
'SVGStopElement.offset': 'gradientOffset',
'URL.createObjectURL': 'createObjectUrl',
'URL.revokeObjectURL': 'revokeObjectUrl',
#'WorkerContext.webkitRequestFileSystem': '_requestFileSystem',
#'WorkerContext.webkitRequestFileSystemSync': '_requestFileSystemSync',
# OfflineAudioContext.suspend has an signature incompatible with shadowed
# base class method AudioContext.suspend.
'OfflineAudioContext.suspend': 'suspendFor',
})
# Members that have multiple definitions, but their types are vary, so we rename
# them to make them distinct.
renamed_overloads = monitored.Dict(
'htmldartgenerator.renamed_overloads', {
'AudioContext.createBuffer(ArrayBuffer buffer, boolean mixToMono)':
'createBufferFromBuffer',
'CSS.supports(DOMString conditionText)':
'supportsCondition',
'DataTransferItemList.add(File file)':
'addFile',
'DataTransferItemList.add(DOMString data, DOMString type)':
'addData',
'FormData.append(DOMString name, Blob value, DOMString filename)':
'appendBlob',
'RTCDataChannel.send(ArrayBuffer data)':
'sendByteBuffer',
'RTCDataChannel.send(ArrayBufferView data)':
'sendTypedData',
'RTCDataChannel.send(Blob data)':
'sendBlob',
'RTCDataChannel.send(DOMString data)':
'sendString',
'SourceBuffer.appendBuffer(ArrayBufferView data)':
'appendTypedData',
'URL.createObjectURL(MediaSource source)':
'createObjectUrlFromSource',
'URL.createObjectURL(WebKitMediaSource source)':
'_createObjectUrlFromWebKitSource',
'URL.createObjectURL(MediaStream stream)':
'createObjectUrlFromStream',
'URL.createObjectURL(Blob blob)':
'createObjectUrlFromBlob',
'WebSocket.send(ArrayBuffer data)':
'sendByteBuffer',
'WebSocket.send(ArrayBufferView data)':
'sendTypedData',
'WebSocket.send(DOMString data)':
'sendString',
'WebSocket.send(Blob data)':
'sendBlob',
'Window.setInterval(DOMString handler, long timeout, any arguments)':
'_setInterval_String',
'Window.setTimeout(DOMString handler, long timeout, any arguments)':
'_setTimeout_String',
'WindowTimers.setInterval(DOMString handler, long timeout, any arguments)':
'_setInterval_String',
'WindowTimers.setTimeout(DOMString handler, long timeout, any arguments)':
'_setTimeout_String',
})
# Members that have multiple definitions, but their types are identical (only
# number of arguments vary), so we do not rename them as a _raw method.
keep_overloaded_members = monitored.Set(
'htmldartgenerator.keep_overloaded_members', [
'CanvasRenderingContext2D.putImageData',
'CanvasRenderingContext2D.webkitPutImageDataHD',
'DataTransferItemList.add', 'Document.createElement',
'Document.createElementNS', 'HTMLInputElement.setRangeText',
'HTMLTextAreaElement.setRangeText', 'IDBDatabase.transaction',
'RTCDataChannel.send', 'URL.createObjectURL', 'WebSocket.send',
'XMLHttpRequest.send'
])
# Members that can be overloaded.
overloaded_and_renamed = monitored.Set(
'htmldartgenerator.overloaded_and_renamed', [
'CanvasRenderingContext2D.clip',
'CanvasRenderingContext2D.drawFocusIfNeeded',
'CanvasRenderingContext2D.fill',
'CanvasRenderingContext2D.isPointInPath',
'CanvasRenderingContext2D.isPointInStroke',
'CanvasRenderingContext2D.stroke',
'Navigator.sendBeacon',
])
def convertedFutureMembers(member):
return member in convert_to_future_members
for member in convert_to_future_members:
if member in renamed_html_members:
renamed_html_members[member] = '_' + renamed_html_members[member]
else:
renamed_html_members[member] = '_' + member[member.find('.') + 1:]
# Members and classes from the dom that should be removed completely from
# dart:html. These could be expressed in the IDL instead but expressing this
# as a simple table instead is more concise.
# Syntax is: ClassName.(get\:|set\:|call\:|on\:)?MemberName
# Using get: and set: is optional and should only be used when a getter needs
# to be suppressed but not the setter, etc.
# Prepending ClassName with = will only match against direct class, not for
# subclasses.
# TODO(jacobr): cleanup and augment this list.
removed_html_members = monitored.Set(
'htmlrenamer.removed_html_members',
[
'Attr.textContent', # Not needed as it is the same as Node.textContent.
'AudioContext.decodeAudioData',
'AudioBufferSourceNode.looping', # TODO(vsm): Use deprecated IDL annotation
'CSSStyleDeclaration.getPropertyCSSValue',
'HTMLCanvasElement.toBlob',
'CanvasRenderingContext2D.clearShadow',
'CanvasRenderingContext2D.drawImageFromRect',
'CanvasRenderingContext2D.setAlpha',
'CanvasRenderingContext2D.setCompositeOperation',
'CanvasRenderingContext2D.setFillColor',
'CanvasRenderingContext2D.setLineCap',
'CanvasRenderingContext2D.setLineJoin',
'CanvasRenderingContext2D.setLineWidth',
'CanvasRenderingContext2D.setMiterLimit',
'CanvasRenderingContext2D.setShadow',
'CanvasRenderingContext2D.setStrokeColor',
# Disable the webKit version, imageSmoothingEnabled is exposed.
'CanvasRenderingContext2D.webkitImageSmoothingEnabled',
'CharacterData.remove',
'ChildNode.replaceWith',
'CSSStyleDeclaration.__getter__',
'CSSStyleDeclaration.__setter__',
'Window.call:blur',
'Window.call:focus',
'Window.clientInformation',
'Window.createImageBitmap',
'Window.get:frames',
'Window.get:length',
'Window.openDatabase',
'Window.on:beforeUnload',
'Window.on:webkitTransitionEnd',
'Window.pagePopupController',
'Window.prompt',
'Window.webkitCancelAnimationFrame',
'Window.webkitCancelRequestAnimationFrame',
'Window.webkitIndexedDB',
'Window.webkitRequestAnimationFrame',
'Document.alinkColor',
'Document.all',
'Document.append',
'Document.applets',
'Document.bgColor',
'Document.clear',
'Document.createAttribute',
'Document.createAttributeNS',
'Document.createComment',
'Document.createExpression',
'Document.createNSResolver',
'Document.createProcessingInstruction',
'Document.designMode',
'Document.dir',
'Document.evaluate',
'Document.fgColor',
'Document.get:URL',
'Document.get:anchors',
'Document.get:characterSet',
'Document.get:compatMode',
'Document.get:defaultCharset',
'Document.get:doctype',
'Document.get:documentURI',
'Document.get:embeds',
'Document.get:forms',
'Document.get:inputEncoding',
'Document.get:links',
'Document.get:plugins',
'Document.get:scripts',
'Document.get:xmlEncoding',
'Document.getElementsByTagNameNS',
'Document.getOverrideStyle',
'Document.getSelection',
'Document.images',
'Document.linkColor',
'Document.location',
'Document.on:wheel',
'Document.open',
'Document.prepend',
'Document.register',
'Document.set:domain',
'Document.vlinkColor',
'Document.webkitCurrentFullScreenElement',
'Document.webkitFullScreenKeyboardInputAllowed',
'Document.write',
'Document.writeln',
'Document.xmlStandalone',
'Document.xmlVersion',
'DocumentFragment.children',
'DocumentType.*',
'DOMException.code',
'DOMException.ABORT_ERR',
'DOMException.DATA_CLONE_ERR',
'DOMException.DOMSTRING_SIZE_ERR',
'DOMException.HIERARCHY_REQUEST_ERR',
'DOMException.INDEX_SIZE_ERR',
'DOMException.INUSE_ATTRIBUTE_ERR',
'DOMException.INVALID_ACCESS_ERR',
'DOMException.INVALID_CHARACTER_ERR',
'DOMException.INVALID_MODIFICATION_ERR',
'DOMException.INVALID_NODE_TYPE_ERR',
'DOMException.INVALID_STATE_ERR',
'DOMException.NAMESPACE_ERR',
'DOMException.NETWORK_ERR',
'DOMException.NOT_FOUND_ERR',
'DOMException.NOT_SUPPORTED_ERR',
'DOMException.NO_DATA_ALLOWED_ERR',
'DOMException.NO_MODIFICATION_ALLOWED_ERR',
'DOMException.QUOTA_EXCEEDED_ERR',
'DOMException.SECURITY_ERR',
'DOMException.SYNTAX_ERR',
'DOMException.TIMEOUT_ERR',
'DOMException.TYPE_MISMATCH_ERR',
'DOMException.URL_MISMATCH_ERR',
'DOMException.VALIDATION_ERR',
'DOMException.WRONG_DOCUMENT_ERR',
'Element.accessKey',
'Element.append',
'Element.children',
'Element.dataset',
'Element.get:classList',
'Element.getAttributeNode',
'Element.getAttributeNodeNS',
'Element.getElementsByTagNameNS',
'Element.innerText',
# TODO(terry): All offset* attributes are in both HTMLElement and Element
# (it's a Chrome bug with a FIXME note to correct - sometime).
# Until corrected these Element attributes must be ignored.
'Element.offsetParent',
'Element.offsetTop',
'Element.offsetLeft',
'Element.offsetWidth',
'Element.offsetHeight',
'Element.outerText',
'Element.prepend',
'Element.removeAttributeNode',
'Element.set:outerHTML',
'Element.setApplyScroll',
'Element.setAttributeNode',
'Element.setAttributeNodeNS',
'Element.setDistributeScroll',
'Element.webkitCreateShadowRoot',
'Element.webkitMatchesSelector',
'Element.webkitPseudo',
'Element.webkitShadowRoot',
'=Event.returnValue', # Only suppress on Event, allow for BeforeUnloadEvnt.
'Event.srcElement',
'EventSource.URL',
'FontFace.ready',
'FontFaceSet.load',
'FontFaceSet.ready',
'HTMLAnchorElement.charset',
'HTMLAnchorElement.coords',
'HTMLAnchorElement.rev',
'HTMLAnchorElement.shape',
'HTMLAnchorElement.text',
'HTMLAppletElement.*',
'HTMLAreaElement.noHref',
'HTMLBRElement.clear',
'HTMLBaseFontElement.*',
'HTMLBodyElement.aLink',
'HTMLBodyElement.background',
'HTMLBodyElement.bgColor',
'HTMLBodyElement.link',
'HTMLBodyElement.on:beforeUnload',
'HTMLBodyElement.text',
'HTMLBodyElement.vLink',
'HTMLDListElement.compact',
'HTMLDirectoryElement.*',
'HTMLDivElement.align',
'HTMLFontElement.*',
'HTMLFormControlsCollection.__getter__',
'HTMLFormElement.get:elements',
'HTMLFrameElement.*',
'HTMLFrameSetElement.*',
'HTMLHRElement.align',
'HTMLHRElement.noShade',
'HTMLHRElement.size',
'HTMLHRElement.width',
'HTMLHeadElement.profile',
'HTMLHeadingElement.align',
'HTMLHtmlElement.manifest',
'HTMLHtmlElement.version',
'HTMLIFrameElement.align',
'HTMLIFrameElement.frameBorder',
'HTMLIFrameElement.longDesc',
'HTMLIFrameElement.marginHeight',
'HTMLIFrameElement.marginWidth',
'HTMLIFrameElement.scrolling',
'HTMLImageElement.align',
'HTMLImageElement.hspace',
'HTMLImageElement.longDesc',
'HTMLImageElement.name',
'HTMLImageElement.vspace',
'HTMLInputElement.align',
'HTMLLegendElement.align',
'HTMLLinkElement.charset',
'HTMLLinkElement.rev',
'HTMLLinkElement.target',
'HTMLMarqueeElement.*',
'HTMLMenuElement.compact',
'HTMLMetaElement.scheme',
'HTMLOListElement.compact',
'HTMLObjectElement.align',
'HTMLObjectElement.archive',
'HTMLObjectElement.border',
'HTMLObjectElement.codeBase',
'HTMLObjectElement.codeType',
'HTMLObjectElement.declare',
'HTMLObjectElement.hspace',
'HTMLObjectElement.standby',
'HTMLObjectElement.vspace',
'HTMLOptionElement.text',
'HTMLOptionsCollection.*',
'HTMLParagraphElement.align',
'HTMLParamElement.type',
'HTMLParamElement.valueType',
'HTMLPreElement.width',
'HTMLScriptElement.text',
'HTMLSelectElement.options',
'HTMLSelectElement.selectedOptions',
'HTMLTableCaptionElement.align',
'HTMLTableCellElement.abbr',
'HTMLTableCellElement.align',
'HTMLTableCellElement.axis',
'HTMLTableCellElement.bgColor',
'HTMLTableCellElement.ch',
'HTMLTableCellElement.chOff',
'HTMLTableCellElement.height',
'HTMLTableCellElement.noWrap',
'HTMLTableCellElement.scope',
'HTMLTableCellElement.vAlign',
'HTMLTableCellElement.width',
'HTMLTableColElement.align',
'HTMLTableColElement.ch',
'HTMLTableColElement.chOff',
'HTMLTableColElement.vAlign',
'HTMLTableColElement.width',
'HTMLTableElement.align',
'HTMLTableElement.bgColor',
'HTMLTableElement.cellPadding',
'HTMLTableElement.cellSpacing',
'HTMLTableElement.frame',
'HTMLTableElement.rules',
'HTMLTableElement.summary',
'HTMLTableElement.width',
'HTMLTableRowElement.align',
'HTMLTableRowElement.bgColor',
'HTMLTableRowElement.ch',
'HTMLTableRowElement.chOff',
'HTMLTableRowElement.vAlign',
'HTMLTableSectionElement.align',
'HTMLTableSectionElement.ch',
'HTMLTableSectionElement.chOff',
'HTMLTableSectionElement.vAlign',
'HTMLTitleElement.text',
'HTMLUListElement.compact',
'HTMLUListElement.type',
'IDBDatabase.transaction', # We do this in a template without the generated implementation at all.
'Location.valueOf',
'MessageEvent.data',
'MouseEvent.webkitMovementX',
'MouseEvent.webkitMovementY',
'MouseEvent.x',
'MouseEvent.y',
'Navigator.bluetooth',
'Navigator.isProtocolHandlerRegistered',
'Navigator.unregisterProtocolHandler',
'Navigator.usb',
'Node.compareDocumentPosition',
'Node.get:DOCUMENT_POSITION_CONTAINED_BY',
'Node.get:DOCUMENT_POSITION_CONTAINS',
'Node.get:DOCUMENT_POSITION_DISCONNECTED',
'Node.get:DOCUMENT_POSITION_FOLLOWING',
'Node.get:DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC',
'Node.get:DOCUMENT_POSITION_PRECEDING',
'Node.get:childNodes',
'Node.get:prefix',
'Node.hasAttributes',
'Node.isDefaultNamespace',
'Node.isEqualNode',
'Node.isSameNode',
'Node.isSupported',
'Node.lookupNamespaceURI',
'Node.lookupPrefix',
'Node.normalize',
'Node.set:nodeValue',
'NodeFilter.acceptNode',
'NodeIterator.expandEntityReferences',
'NodeIterator.filter',
'NodeList.item',
'ParentNode.append',
'ParentNode.prepend',
'RTCPeerConnection.generateCertificate',
'ShadowRoot.getElementsByTagNameNS',
'SVGElement.getPresentationAttribute',
'SVGElementInstance.on:wheel',
'Touch.get:webkitRadiusX',
'Touch.get:webkitRadiusY',
'Touch.get:webkitForce',
'Touch.get:webkitRotationAngle',
'WheelEvent.wheelDelta',
'WheelEvent.wheelDeltaX',
'WheelEvent.wheelDeltaY',
'Window.on:wheel',
'WindowEventHandlers.on:beforeUnload',
'WorkerGlobalScope.webkitIndexedDB',
'XMLHttpRequest.open',
# TODO(jacobr): should these be removed?
'Document.close',
'Document.hasFocus',
])
# Manual dart: library name lookup.
_library_names = monitored.Dict(
'htmlrenamer._library_names', {
'ANGLEInstancedArrays': 'web_gl',
'CHROMIUMSubscribeUniform': 'web_gl',
'Database': 'web_sql',
'Navigator': 'html',
'Window': 'html',
'AnalyserNode': 'web_audio',
'AudioBufferCallback': 'web_audio',
'AudioBuffer': 'web_audio',
'AudioBufferSourceNode': 'web_audio',
'AudioContext': 'web_audio',
'AudioDestinationNode': 'web_audio',
'AudioElement': 'web_audio',
'AudioListener': 'web_audio',
'AudioNode': 'web_audio',
'AudioParam': 'web_audio',
'AudioParamMap': 'web_audio',
'AudioProcessingEvent': 'web_audio',
'AudioScheduledSourceNode': 'web_audio',
'AudioSourceNode': 'web_audio',
'AudioTrack': 'web_audio',
'AudioTrackList': 'web_audio',
'AudioWorkletGlobalScope': 'web_audio',
'AudioWorkletNode': 'web_audio',
'AudioWorkletProcessor': 'web_audio',
'BaseAudioContext': 'web_audio',
'BiquadFilterNode': 'web_audio',
'ChannelMergerNode': 'web_audio',
'ChannelSplitterNode': 'web_audio',
'ConstantSourceNode': 'web_audio',
'ConvolverNode': 'web_audio',
'DelayNode': 'web_audio',
'DynamicsCompressorNode': 'web_audio',
'GainNode': 'web_audio',
'IIRFilterNode': 'web_audio',
'MediaElementAudioSourceNode': 'web_audio',
'MediaStreamAudioDestinationNode': 'web_audio',
'MediaStreamAudioSourceNode': 'web_audio',
'OfflineAudioCompletionEvent': 'web_audio',
'OfflineAudioContext': 'web_audio',
'OscillatorNode': 'web_audio',
'PannerNode': 'web_audio',
'PeriodicWave': 'web_audio',
'ScriptProcessorNode': 'web_audio',
'StereoPannerNode': 'web_audio',
'WaveShaperNode': 'web_audio',
'WindowWebAudio': 'web_audio',
})
_library_ids = monitored.Dict(
'htmlrenamer._library_names', {
'ANGLEInstancedArrays': 'WebGl',
'CHROMIUMSubscribeUniform': 'WebGl',
'Database': 'WebSql',
'Navigator': 'Html',
'Window': 'Html',
'AnalyserNode': 'WebAudio',
'AudioBufferCallback': 'WebAudio',
'AudioBuffer': 'WebAudio',
'AudioBufferSourceNode': 'WebAudio',
'AudioContext': 'WebAudio',
'AudioDestinationNode': 'WebAudio',
'AudioListener': 'WebAudio',
'AudioNode': 'WebAudio',
'AudioParam': 'WebAudio',
'AudioProcessingEvent': 'WebAudio',
'AudioSourceNode': 'WebAudio',
'BiquadFilterNode': 'WebAudio',
'ChannelMergerNode': 'WebAudio',
'ChannelSplitterNode': 'WebAudio',
'ConvolverNode': 'WebAudio',
'DelayNode': 'WebAudio',
'DynamicsCompressorNode': 'WebAudio',
'GainNode': 'WebAudio',
'IIRFilterNode': 'WebAudio',
'MediaElementAudioSourceNode': 'WebAudio',
'MediaStreamAudioDestinationNode': 'WebAudio',
'MediaStreamAudioSourceNode': 'WebAudio',
'OfflineAudioCompletionEvent': 'WebAudio',
'OfflineAudioContext': 'WebAudio',
'OscillatorNode': 'WebAudio',
'PannerNode': 'WebAudio',
'PeriodicWave': 'WebAudio',
'ScriptProcessorNode': 'WebAudio',
'StereoPannerNode': 'WebAudio',
'WaveShaperNode': 'WebAudio',
'WindowWebAudio': 'WebAudio',
})
class HtmlRenamer(object):
def __init__(self, database, metadata):
self._database = database
self._metadata = metadata
def RenameInterface(self, interface):
if 'Callback' in interface.ext_attrs:
if interface.id in _removed_html_interfaces:
return None
candidate = self.RenameInterfaceId(interface.id)
if candidate:
return candidate
if interface.id.startswith('HTML'):
if any(interface.id in ['Element', 'Document']
for interface in self._database.Hierarchy(interface)):
return interface.id[len('HTML'):]
return self._DartifyName(interface.javascript_binding_name)
def RenameInterfaceId(self, interface_id):
if interface_id in html_interface_renames:
return html_interface_renames[interface_id]
return None
def isPrivate(self, interface, member):
return self._FindMatch(interface, member, '', private_html_members)
def RenameMember(self,
interface_name,
member_node,
member,
member_prefix='',
dartify_name=True):
"""
Returns the name of the member in the HTML library or None if the member is
suppressed in the HTML library
"""
interface = self._database.GetInterface(interface_name)
if not member:
if 'ImplementedAs' in member_node.ext_attrs:
member = member_node.ext_attrs['ImplementedAs']
if self.ShouldSuppressMember(interface, member, member_prefix):
return None
if 'CheckSecurity' in member_node.ext_attrs:
return None
name = self._FindMatch(interface, member, member_prefix,
renamed_html_members)
target_name = renamed_html_members[name] if name else member
if self._FindMatch(interface, member, member_prefix,
private_html_members):
if not target_name.startswith('_'): # e.g. _svgClassName
target_name = '_' + target_name
if not name and target_name.startswith('webkit'):
target_name = member[len('webkit'):]
target_name = target_name[:1].lower() + target_name[1:]
if dartify_name:
target_name = self._DartifyMemberName(target_name)
return target_name
def ShouldSuppressMember(self, interface, member, member_prefix=''):
""" Returns true if the member should be suppressed."""
if self._FindMatch(interface, member, member_prefix,
removed_html_members):
return True
if interface.id in _removed_html_interfaces:
return True
metadata_member = member
if member_prefix == 'on:':
metadata_member = 'on' + metadata_member.lower()
if self._metadata.IsSuppressed(interface, metadata_member):
return True
return False
def ShouldSuppressInterface(self, interface):
""" Returns true if the interface should be suppressed."""
if interface.id in _removed_html_interfaces:
return True
def _FindMatch(self, interface, member, member_prefix, candidates):
def find_match(interface_id):
member_name = interface_id + '.' + member
if member_name in candidates:
return member_name
member_name = interface_id + '.' + member_prefix + member
if member_name in candidates:
return member_name
member_name = interface_id + '.*'
if member_name in candidates:
return member_name
# Check direct matches first
match = find_match('=%s' % interface.id)
if match:
return match
for interface in self._database.Hierarchy(interface):
match = find_match(interface.id)
if match:
return match
def GetLibraryName(self, interface):
# Some types have attributes merged in from many other interfaces.
if interface.id in _library_names:
return _library_names[interface.id]
# Support for IDL conditional has been removed from indexed db, web_sql,
# svg and web_gl so we can no longer determine the library based on conditional.
# Use interface prefix to do that. web_audio interfaces have no common prefix
# - all audio interfaces added to _library_names/_library_ids.
if interface.id.startswith("IDB"):
return 'indexed_db'
if interface.id.startswith("SQL"):
return 'web_sql'
if interface.id.startswith("SVG"):
return 'svg'
if interface.id.startswith("WebGL") or interface.id.startswith("OES") \
or interface.id.startswith("EXT") \
or interface.id == "WebGL": # Name of the synthesized class for WebGL constants.
return 'web_gl'
if interface.id in typed_array_renames:
return 'typed_data'
return 'html'
def GetLibraryId(self, interface):
# Some types have attributes merged in from many other interfaces.
if interface.id in _library_ids:
return _library_ids[interface.id]
# Support for IDL conditional has been removed from indexed db, web_sql,
# svg and web_gl so we can no longer determine the library based on conditional.
# Use interface prefix to do that. web_audio interfaces have no common prefix
# - all audio interfaces added to _library_names/_library_ids.
if interface.id.startswith("IDB"):
return 'IndexedDb'
if interface.id.startswith("SQL"):
return 'WebSql'
if interface.id.startswith("SVG"):
return 'Svg'
if interface.id.startswith("WebGL") or interface.id.startswith("OES") \
or interface.id.startswith("EXT") \
or interface.id == 'WebGL': # Name of the synthesized class for WebGL constants.
return 'WebGl'
if interface.id in typed_array_renames:
return 'TypedData'
return 'Html'
def DartifyTypeName(self, type_name):
"""Converts a DOM name to a Dart-friendly class name. """
if type_name in html_interface_renames:
return html_interface_renames[type_name]
return self._DartifyName(type_name)
def _DartifyName(self, dart_name):
# Strip off any standard prefixes.
name = re.sub(r'^SVG', '', dart_name)
name = re.sub(r'^IDB', '', name)
# Don't Strip the synthesized class name WebGL contains all rendering/draw constants.
if name != 'WebGL':
name = re.sub(r'^WebGL', '', name)
name = re.sub(r'^WebKit', '', name)
return self._CamelCaseName(name)
def _DartifyMemberName(self, member_name):
# Strip off any OpenGL ES suffixes.
name = re.sub(r'OES$', '', member_name)
return self._CamelCaseName(name)
def _CamelCaseName(self, name):
def toLower(match):
return match.group(1) + match.group(2).lower() + match.group(3)
# We're looking for a sequence of letters which start with capital letter
# then a series of caps and finishes with either the end of the string or
# a capital letter.
# The [0-9] check is for names such as 2D or 3D
# The following test cases should match as:
# WebKitCSSFilterValue: WebKit(C)(SS)(F)ilterValue
# XPathNSResolver: (X)()(P)ath(N)(S)(R)esolver (no change)
# IFrameElement: (I)()(F)rameElement (no change)
return re.sub(r'([A-Z])([A-Z]{2,})([A-Z]|$)', toLower, name)
|
class Solution:
def zigzagLevelOrder(self, root: Optional[TreeNode]) -> List[List[int]]:
q = collections.deque()
if root:
q.append(root)
result = []
flag = True
while q:
l = len(q)
temp = []
for _ in range(l):
cur = q.popleft()
temp.append(cur.val)
if cur.left:
q.append(cur.left)
if cur.right:
q.append(cur.right)
if flag:
result.append(temp)
else:
result.append(reversed(temp))
flag = not flag
return result
|
import random
import csv
'''rate=0.2'''
def add_data(rate,load,save):
file = open(load)
line = file.readline()
listCsv = []
neg=[]
pos=[]
while line!='':
listCsv.append(line.split(','))
line = file.readline()
print(listCsv)
for a in range(1,len(listCsv)):
if(int(listCsv[a][len(listCsv[a])-1])>0):
pos.append(listCsv[a])
else:
neg.append(listCsv[a])
print(len(listCsv))
print (pos)
print (len(pos))
print(len(neg))
while (float((len(pos))/float(len(listCsv))))<rate:
rand=random.randint(0, len(pos)-1)
listCsv.append(pos[rand])
pos.append(pos[rand])
print (len(neg))
print (float((len(pos))/float(len(listCsv))))
out=open(save,"w")
for l in listCsv:
for k in range(0,len(l)):
if k!=len(l)-1:
out.write(l[k])
out.write(',')
else:
out.write(l[k])
'''out.write('\n')'''
out.close()
|
import RPi.GPIO as GPIO
import time
# Pin Constants
LED = 23
GPIO.setmode(GPIO.BCM)
GPIO.setup(LED, GPIO.OUT)
#GPIO.setup(24, GPIO.IN, pull_up_down = GPIO.PUD_UP)
on = True
try:
while True:
if on:
GPIO.output(led, 0)
on = False
else:
GPIO.output(led, 1)
on = True
time.sleep(1)
except KeyboardInterrupt:
print('Program terminated.')
finally:
GPIO.cleanup()
|
from datetime import datetime
def date_to_mins(date):
try:
day = datetime.strptime(date, '%m/%d/%Y')
except ValueError:
return -1
return (day - datetime(day.year, 1, 1)).total_seconds() / 60 + 1440
|
from socket import *
import time
import re
import sys
import os
tcpSerPort = 80
server_ip = '127.0.0.1'
if len(sys.argv) != 3:
print(sys.argv)
print("Usage: python3 this.py server_ip port_num")
else:
server_ip = sys.argv[1]
tcpSerPort = int(sys.argv[2])
print("Will use the deafule setting with port number %d and server_ip is %s" %(tcpSerPort, server_ip))
BUFFER_SIZE = 4096
FILE_FOLDER = "File/"
THIS_SERVER_IP_PORT_NUM = '127.0.0.1:%d' % tcpSerPort
# Using regular expression to parse certain fields from the request and response
# print(precompile_str)
httpres = re.compile(r'HTTP\/[1-9]\.[0-9] (?P<RESCODE>[0-9][0-9][0-9])[.|\s|\S]*Date\:\s(?P<DATE>[.|\s|\S]*GMT)\s[.|\s|\S]*')
http_request_rule_for_normal_GET = re.compile(r'[.|\s|\S]*GET \/(?P<GET>[a-z|A-Z|\.]*)[.|\s|\S]*')
with socket(AF_INET, SOCK_STREAM) as tcpSerSock:
# Prepare a server socket
tcpSerSock.bind((server_ip, tcpSerPort))
# Listen to at most 5 connection at a time
tcpSerSock.listen(5)
while True:
print("Ready to serve.")
tcpCLISock, addr = tcpSerSock.accept()
print("Reveived a connection from ", addr)
message = tcpCLISock.recv(BUFFER_SIZE)
message = message.decode()
# print(message)
requested_item = None
hostn = None
tmp_match = http_request_rule_for_normal_GET.match(message)
if tmp_match:
requested_item = tmp_match.group('GET')
print(requested_item)
if requested_item != '':
try:
buffer_lines = None
with open(FILE_FOLDER+requested_item, "r") as f:
buffer_lines = f.readlines()
response_header = b"HTTP/1.1 200 OK\r\n"
response_header += b"Content-Type: text/html\r\n"
response_header += b"Content-Disposition: attachment\r\n"
response_header += ("Content-Length: %d\r\n" % os.path.getsize(FILE_FOLDER+requested_item)).encode()
response_header += b"\n\n"
print(response_header)
response_body = b""
for line in buffer_lines:
print(line)
response_body += (line.encode())
response_body += b"\n\n"
tcpCLISock.sendall(response_header+response_body)
except FileNotFoundError as e:
print("No such file")
tcpCLISock.sendall(b"HTTP/1.1 404 Not Found\n\n")
except Exception as e:
print(e)
tcpCLISock.sendall(b"HTTP/1.1 404 Not Found\n\n")
finally:
tcpCLISock.close()
else:
tcpCLISock.close()
|
import dataclasses
import os
import typing
import requests
access_token = os.getenv('QIITA_ACCESS_TOKEN', None)
headers = {}
if access_token:
headers.update({
"Authorization": f"Bearer {access_token}"
})
@dataclasses.dataclass
class User:
description: str
facebook_id: str
followees_count: int
followers_count: int
github_login_name: str
id: str
items_count: int
linkedin_id: str
location: str
name: str
organization: str
permanent_id: int
profile_image_url: str
twitter_screen_name: str
website_url: str
@dataclasses.dataclass
class ItemTag:
name: str
versions: typing.List[str]
@dataclasses.dataclass
class Item:
rendered_body: str
body: str
coediting: bool
comments_count: int
created_at: str
group: typing.Dict
id: str
likes_count: int
private: bool
reactions_count: int
tags: typing.List[ItemTag]
title: str
updated_at: str
url: str
user: User
page_views_count: int
def __post_init__(self):
if isinstance(self.user, dict):
self.user = User(**self.user)
self.tags = [ItemTag(**tag) if isinstance(tag, dict) else tag for tag in self.tags]
@dataclasses.dataclass
class Tag:
followers_count: int
icon_url: str
id: str
items_count: int
def get_items(per_page=10):
response = requests.get(
'https://qiita.com/api/v2/items',
headers=headers,
params=dict(per_page=per_page)
)
data = response.json()
items = [Item(**row) for row in data]
return items
def get_user_items(user_name, per_page=10):
response = requests.get(
f'https://qiita.com/api/v2/users/{user_name}/items',
headers=headers,
params=dict(per_page=per_page)
)
data = response.json()
items = [Item(**row) for row in data]
return items
def get_tag_items(tag_name, per_page=10):
response = requests.get(
f'https://qiita.com/api/v2/tags/{tag_name}/items',
headers=headers,
params=dict(per_page=per_page)
)
data = response.json()
items = [Item(**row) for row in data]
return items
def get_tag(tag_name):
response = requests.get(
f'https://qiita.com/api/v2/tags/{tag_name}',
headers=headers
)
data = response.json()
tag = Tag(**data)
return tag
|
# -*- coding: utf-8 -*-
"""
##############################################################################
The calculation of whole holistic invariant molecular descriptors (WHIM).
You can get 70 molecular decriptors. You can freely use and distribute it.
If you hava any problem, you could contact with us timely!
Authors: Dongsheng Cao and Yizeng Liang, Qingsong Xu
Date: 2011.04.19
Email: oriental-cds@163.com
##############################################################################
"""
import scipy
import scipy.linalg
from .AtomProperty import GetRelativeAtomicProperty
from .GeoOpt import _ReadCoordinates
Version = 1.0
#############################################################################
_filename = 'temp'
def GetAtomCoordinateMatrix():
"""
#################################################################
Get the atom coordinate matrix
#################################################################
"""
ChargeCoordinates = _ReadCoordinates(_filename)
nAtom = len(ChargeCoordinates)
CoordinateMatrix = scipy.zeros([nAtom, 3])
AtomLabel = []
for i, j in enumerate(ChargeCoordinates):
CoordinateMatrix[i, :] = [j[1], j[2], j[3]]
AtomLabel.append(j[0])
return scipy.matrix(CoordinateMatrix), AtomLabel
def XPreCenter(X):
"""
#################################################################
Center the data matrix X
#################################################################
"""
Xdim = scipy.size(X, axis=0)
Xmean = scipy.mean(X, axis=0)
Xmean = scipy.matrix(Xmean)
Xp = X - scipy.ones([Xdim, 1]) * Xmean
return Xp
def GetPropertyMatrix(AtomLabel, proname='m'):
"""
#################################################################
#################################################################
"""
res = []
for i in AtomLabel:
res.append(GetRelativeAtomicProperty(i, proname))
return scipy.matrix(scipy.diag(res))
def GetSVDEig(CoordinateMatrix, AtomLabel, proname='u'):
"""
"""
nAtom, kc = CoordinateMatrix.shape
if proname == 'u':
weight = scipy.matrix(scipy.eye(nAtom))
else:
weight = GetPropertyMatrix(AtomLabel, proname)
S = XPreCenter(CoordinateMatrix)
u, s, v = scipy.linalg.svd(S.T * weight * S / sum(scipy.diag(weight)))
return s
def GetWHIM1(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->L1u
#################################################################
"""
s = GetSVDEig(CoordinateMatrix, AtomLabel, proname)
return round(s[0], 3)
def GetWHIM2(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->L2u
#################################################################
"""
s = GetSVDEig(CoordinateMatrix, AtomLabel, proname)
return round(s[1], 3)
def GetWHIM3(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->L3u
#################################################################
"""
s = GetSVDEig(CoordinateMatrix, AtomLabel, proname)
return round(s[2], 3)
def GetWHIM4(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->Tu
#################################################################
"""
s = GetSVDEig(CoordinateMatrix, AtomLabel, proname)
T = round(sum(s), 3)
return T
def GetWHIM5(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->Au
#################################################################
"""
s = GetSVDEig(CoordinateMatrix, AtomLabel, proname)
A = s[0] * s[1] + s[0] * s[2] + s[1] * s[2]
return round(A, 3)
def GetWHIM6(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->Vu
#################################################################
"""
s = GetSVDEig(CoordinateMatrix, AtomLabel, proname)
A = s[0] * s[1] + s[0] * s[2] + s[1] * s[2]
T = sum(s)
V = A + T + s[0] * s[1] * s[2]
return round(V, 3)
def GetWHIM7(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->P1u
#################################################################
"""
s = GetSVDEig(CoordinateMatrix, AtomLabel, proname)
return round(s[0] / (s[0] + s[1] + s[2]), 3)
def GetWHIM8(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->P2u
#################################################################
"""
s = GetSVDEig(CoordinateMatrix, AtomLabel, proname)
return round(s[1] / (s[0] + s[1] + s[2]), 3)
def GetWHIM9(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->Ku
#################################################################
"""
s = GetSVDEig(CoordinateMatrix, AtomLabel, proname)
res = 0.0
for i in s:
res = res + abs(i / sum(s) - 1 / 3.0)
Ku = 3.0 / 4 * res
return round(Ku, 3)
def GetWHIM10(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->E1u
#################################################################
"""
nAtom, kc = CoordinateMatrix.shape
if proname == 'u':
weight = scipy.matrix(scipy.eye(nAtom))
else:
weight = GetPropertyMatrix(AtomLabel, proname)
S = XPreCenter(CoordinateMatrix)
u, s, v = scipy.linalg.svd(S.T * weight * S / sum(scipy.diag(weight)))
res = scipy.power(s[0], 2) * nAtom / sum(scipy.power(S * scipy.matrix(u[:, 0]).T, 4))
return round(float(res.real), 3)
def GetWHIM11(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->E2u
#################################################################
"""
nAtom, kc = CoordinateMatrix.shape
if proname == 'u':
weight = scipy.matrix(scipy.eye(nAtom))
else:
weight = GetPropertyMatrix(AtomLabel, proname)
S = XPreCenter(CoordinateMatrix)
u, s, v = scipy.linalg.svd(S.T * weight * S / sum(scipy.diag(weight)))
res = scipy.power(s[1], 2) * nAtom / sum(scipy.power(S * scipy.matrix(u[:, 1]).T, 4))
return round(float(res.real), 3)
def GetWHIM12(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->E3u
#################################################################
"""
nAtom, kc = CoordinateMatrix.shape
if proname == 'u':
weight = scipy.matrix(scipy.eye(nAtom))
else:
weight = GetPropertyMatrix(AtomLabel, proname)
S = XPreCenter(CoordinateMatrix)
u, s, v = scipy.linalg.svd(S.T * weight * S / sum(scipy.diag(weight)))
res = scipy.power(s[2], 2) * nAtom / sum(scipy.power(S * scipy.matrix(u[:, 2]).T, 4))
return round(float(res.real), 3)
def GetWHIM13(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->Du
#################################################################
"""
c1 = GetWHIM10(CoordinateMatrix, AtomLabel, proname)
c2 = GetWHIM11(CoordinateMatrix, AtomLabel, proname)
c3 = GetWHIM12(CoordinateMatrix, AtomLabel, proname)
Du = c1 + c2 + c3
return round(float(Du), 3)
def GetWHIM14(CoordinateMatrix, AtomLabel, proname='u'):
"""
#################################################################
WHIM descriptors
--->P3u
#################################################################
"""
s = GetSVDEig(CoordinateMatrix, AtomLabel, proname)
return round(s[2] / (s[0] + s[1] + s[2]), 3)
###########################################################################
def GetWHIMUnweighted():
"""
#################################################################
Wrapper for the unweighted WHIM descriptors.
#################################################################
"""
res = {}
CoordinateMatrix, AtomLabel = GetAtomCoordinateMatrix()
res['L1u'] = GetWHIM1(CoordinateMatrix, AtomLabel, proname='u')
res['L2u'] = GetWHIM2(CoordinateMatrix, AtomLabel, proname='u')
res['L3u'] = GetWHIM3(CoordinateMatrix, AtomLabel, proname='u')
res['Tu'] = GetWHIM4(CoordinateMatrix, AtomLabel, proname='u')
res['Au'] = GetWHIM5(CoordinateMatrix, AtomLabel, proname='u')
res['Vu'] = GetWHIM6(CoordinateMatrix, AtomLabel, proname='u')
res['P1u'] = GetWHIM7(CoordinateMatrix, AtomLabel, proname='u')
res['P2u'] = GetWHIM8(CoordinateMatrix, AtomLabel, proname='u')
res['Ku'] = GetWHIM9(CoordinateMatrix, AtomLabel, proname='u')
res['E1u'] = GetWHIM10(CoordinateMatrix, AtomLabel, proname='u')
res['E2u'] = GetWHIM11(CoordinateMatrix, AtomLabel, proname='u')
res['E3u'] = GetWHIM12(CoordinateMatrix, AtomLabel, proname='u')
res['Du'] = GetWHIM13(CoordinateMatrix, AtomLabel, proname='u')
res['P3u'] = GetWHIM14(CoordinateMatrix, AtomLabel, proname='u')
return res
def GetWHIMMass():
"""
#################################################################
Wrapper for the WHIM descriptors based on atomic mass.
#################################################################
"""
res = {}
CoordinateMatrix, AtomLabel = GetAtomCoordinateMatrix()
res['L1m'] = GetWHIM1(CoordinateMatrix, AtomLabel, proname='m')
res['L2m'] = GetWHIM2(CoordinateMatrix, AtomLabel, proname='m')
res['L3m'] = GetWHIM3(CoordinateMatrix, AtomLabel, proname='m')
res['Tm'] = GetWHIM4(CoordinateMatrix, AtomLabel, proname='m')
res['Am'] = GetWHIM5(CoordinateMatrix, AtomLabel, proname='m')
res['Vm'] = GetWHIM6(CoordinateMatrix, AtomLabel, proname='m')
res['P1m'] = GetWHIM7(CoordinateMatrix, AtomLabel, proname='m')
res['P2m'] = GetWHIM8(CoordinateMatrix, AtomLabel, proname='m')
res['Km'] = GetWHIM9(CoordinateMatrix, AtomLabel, proname='m')
res['E1m'] = GetWHIM10(CoordinateMatrix, AtomLabel, proname='m')
res['E2m'] = GetWHIM11(CoordinateMatrix, AtomLabel, proname='m')
res['E3m'] = GetWHIM12(CoordinateMatrix, AtomLabel, proname='m')
res['Dm'] = GetWHIM13(CoordinateMatrix, AtomLabel, proname='m')
res['P3m'] = GetWHIM14(CoordinateMatrix, AtomLabel, proname='m')
return res
def GetWHIMSandersonElectronegativity():
"""
#################################################################
Wrapper for the WHIM descriptors based on Sanderson Electronegativity.
#################################################################
"""
res = {}
CoordinateMatrix, AtomLabel = GetAtomCoordinateMatrix()
res['L1e'] = GetWHIM1(CoordinateMatrix, AtomLabel, proname='En')
res['L2e'] = GetWHIM2(CoordinateMatrix, AtomLabel, proname='En')
res['L3e'] = GetWHIM3(CoordinateMatrix, AtomLabel, proname='En')
res['Te'] = GetWHIM4(CoordinateMatrix, AtomLabel, proname='En')
res['Ae'] = GetWHIM5(CoordinateMatrix, AtomLabel, proname='En')
res['Ve'] = GetWHIM6(CoordinateMatrix, AtomLabel, proname='En')
res['P1e'] = GetWHIM7(CoordinateMatrix, AtomLabel, proname='En')
res['P2e'] = GetWHIM8(CoordinateMatrix, AtomLabel, proname='En')
res['Ke'] = GetWHIM9(CoordinateMatrix, AtomLabel, proname='En')
res['E1e'] = GetWHIM10(CoordinateMatrix, AtomLabel, proname='En')
res['E2e'] = GetWHIM11(CoordinateMatrix, AtomLabel, proname='En')
res['E3e'] = GetWHIM12(CoordinateMatrix, AtomLabel, proname='En')
res['De'] = GetWHIM13(CoordinateMatrix, AtomLabel, proname='En')
res['P3e'] = GetWHIM14(CoordinateMatrix, AtomLabel, proname='En')
return res
def GetWHIMVDWVolume():
"""
#################################################################
Wrapper for the WHIM descriptors based on VDW Volume.
#################################################################
"""
res = {}
CoordinateMatrix, AtomLabel = GetAtomCoordinateMatrix()
res['L1v'] = GetWHIM1(CoordinateMatrix, AtomLabel, proname='V')
res['L2v'] = GetWHIM2(CoordinateMatrix, AtomLabel, proname='V')
res['L3v'] = GetWHIM3(CoordinateMatrix, AtomLabel, proname='V')
res['Tv'] = GetWHIM4(CoordinateMatrix, AtomLabel, proname='V')
res['Av'] = GetWHIM5(CoordinateMatrix, AtomLabel, proname='V')
res['Vv'] = GetWHIM6(CoordinateMatrix, AtomLabel, proname='V')
res['P1v'] = GetWHIM7(CoordinateMatrix, AtomLabel, proname='V')
res['P2v'] = GetWHIM8(CoordinateMatrix, AtomLabel, proname='V')
res['Kv'] = GetWHIM9(CoordinateMatrix, AtomLabel, proname='V')
res['E1v'] = GetWHIM10(CoordinateMatrix, AtomLabel, proname='V')
res['E2v'] = GetWHIM11(CoordinateMatrix, AtomLabel, proname='V')
res['E3v'] = GetWHIM12(CoordinateMatrix, AtomLabel, proname='V')
res['Dv'] = GetWHIM13(CoordinateMatrix, AtomLabel, proname='V')
res['P3v'] = GetWHIM14(CoordinateMatrix, AtomLabel, proname='V')
return res
def GetWHIMPolarizability():
"""
#################################################################
Wrapper for the WHIM descriptors based on Polarizability.
#################################################################
"""
res = {}
CoordinateMatrix, AtomLabel = GetAtomCoordinateMatrix()
res['L1p'] = GetWHIM1(CoordinateMatrix, AtomLabel, proname='alapha')
res['L2p'] = GetWHIM2(CoordinateMatrix, AtomLabel, proname='alapha')
res['L3p'] = GetWHIM3(CoordinateMatrix, AtomLabel, proname='alapha')
res['Tp'] = GetWHIM4(CoordinateMatrix, AtomLabel, proname='alapha')
res['Ap'] = GetWHIM5(CoordinateMatrix, AtomLabel, proname='alapha')
res['Vp'] = GetWHIM6(CoordinateMatrix, AtomLabel, proname='alapha')
res['P1p'] = GetWHIM7(CoordinateMatrix, AtomLabel, proname='alapha')
res['P2p'] = GetWHIM8(CoordinateMatrix, AtomLabel, proname='alapha')
res['Kp'] = GetWHIM9(CoordinateMatrix, AtomLabel, proname='alapha')
res['E1p'] = GetWHIM10(CoordinateMatrix, AtomLabel, proname='alapha')
res['E2p'] = GetWHIM11(CoordinateMatrix, AtomLabel, proname='alapha')
res['E3p'] = GetWHIM12(CoordinateMatrix, AtomLabel, proname='alapha')
res['Dp'] = GetWHIM13(CoordinateMatrix, AtomLabel, proname='alapha')
res['P3p'] = GetWHIM14(CoordinateMatrix, AtomLabel, proname='alapha')
return res
def GetWHIM():
"""
#################################################################
Wrapper for the WHIM descriptors.
#################################################################
"""
res = {}
CoordinateMatrix, AtomLabel = GetAtomCoordinateMatrix()
res['L1u'] = GetWHIM1(CoordinateMatrix, AtomLabel, proname='u')
res['L2u'] = GetWHIM2(CoordinateMatrix, AtomLabel, proname='u')
res['L3u'] = GetWHIM3(CoordinateMatrix, AtomLabel, proname='u')
res['Tu'] = GetWHIM4(CoordinateMatrix, AtomLabel, proname='u')
res['Au'] = GetWHIM5(CoordinateMatrix, AtomLabel, proname='u')
res['Vu'] = GetWHIM6(CoordinateMatrix, AtomLabel, proname='u')
res['P1u'] = GetWHIM7(CoordinateMatrix, AtomLabel, proname='u')
res['P2u'] = GetWHIM8(CoordinateMatrix, AtomLabel, proname='u')
res['Ku'] = GetWHIM9(CoordinateMatrix, AtomLabel, proname='u')
res['E1u'] = GetWHIM10(CoordinateMatrix, AtomLabel, proname='u')
res['E2u'] = GetWHIM11(CoordinateMatrix, AtomLabel, proname='u')
res['E3u'] = GetWHIM12(CoordinateMatrix, AtomLabel, proname='u')
res['Du'] = GetWHIM13(CoordinateMatrix, AtomLabel, proname='u')
res['L1m'] = GetWHIM1(CoordinateMatrix, AtomLabel, proname='m')
res['L2m'] = GetWHIM2(CoordinateMatrix, AtomLabel, proname='m')
res['L3m'] = GetWHIM3(CoordinateMatrix, AtomLabel, proname='m')
res['Tm'] = GetWHIM4(CoordinateMatrix, AtomLabel, proname='m')
res['Am'] = GetWHIM5(CoordinateMatrix, AtomLabel, proname='m')
res['Vm'] = GetWHIM6(CoordinateMatrix, AtomLabel, proname='m')
res['P1m'] = GetWHIM7(CoordinateMatrix, AtomLabel, proname='m')
res['P2m'] = GetWHIM8(CoordinateMatrix, AtomLabel, proname='m')
res['Km'] = GetWHIM9(CoordinateMatrix, AtomLabel, proname='m')
res['E1m'] = GetWHIM10(CoordinateMatrix, AtomLabel, proname='m')
res['E2m'] = GetWHIM11(CoordinateMatrix, AtomLabel, proname='m')
res['E3m'] = GetWHIM12(CoordinateMatrix, AtomLabel, proname='m')
res['Dm'] = GetWHIM13(CoordinateMatrix, AtomLabel, proname='m')
res['L1e'] = GetWHIM1(CoordinateMatrix, AtomLabel, proname='En')
res['L2e'] = GetWHIM2(CoordinateMatrix, AtomLabel, proname='En')
res['L3e'] = GetWHIM3(CoordinateMatrix, AtomLabel, proname='En')
res['Te'] = GetWHIM4(CoordinateMatrix, AtomLabel, proname='En')
res['Ae'] = GetWHIM5(CoordinateMatrix, AtomLabel, proname='En')
res['Ve'] = GetWHIM6(CoordinateMatrix, AtomLabel, proname='En')
res['P1e'] = GetWHIM7(CoordinateMatrix, AtomLabel, proname='En')
res['P2e'] = GetWHIM8(CoordinateMatrix, AtomLabel, proname='En')
res['Ke'] = GetWHIM9(CoordinateMatrix, AtomLabel, proname='En')
res['E1e'] = GetWHIM10(CoordinateMatrix, AtomLabel, proname='En')
res['E2e'] = GetWHIM11(CoordinateMatrix, AtomLabel, proname='En')
res['E3e'] = GetWHIM12(CoordinateMatrix, AtomLabel, proname='En')
res['De'] = GetWHIM13(CoordinateMatrix, AtomLabel, proname='En')
res['L1v'] = GetWHIM1(CoordinateMatrix, AtomLabel, proname='V')
res['L2v'] = GetWHIM2(CoordinateMatrix, AtomLabel, proname='V')
res['L3v'] = GetWHIM3(CoordinateMatrix, AtomLabel, proname='V')
res['Tv'] = GetWHIM4(CoordinateMatrix, AtomLabel, proname='V')
res['Av'] = GetWHIM5(CoordinateMatrix, AtomLabel, proname='V')
res['Vv'] = GetWHIM6(CoordinateMatrix, AtomLabel, proname='V')
res['P1v'] = GetWHIM7(CoordinateMatrix, AtomLabel, proname='V')
res['P2v'] = GetWHIM8(CoordinateMatrix, AtomLabel, proname='V')
res['Kv'] = GetWHIM9(CoordinateMatrix, AtomLabel, proname='V')
res['E1v'] = GetWHIM10(CoordinateMatrix, AtomLabel, proname='V')
res['E2v'] = GetWHIM11(CoordinateMatrix, AtomLabel, proname='V')
res['E3v'] = GetWHIM12(CoordinateMatrix, AtomLabel, proname='V')
res['Dv'] = GetWHIM13(CoordinateMatrix, AtomLabel, proname='V')
res['L1p'] = GetWHIM1(CoordinateMatrix, AtomLabel, proname='alapha')
res['L2p'] = GetWHIM2(CoordinateMatrix, AtomLabel, proname='alapha')
res['L3p'] = GetWHIM3(CoordinateMatrix, AtomLabel, proname='alapha')
res['Tp'] = GetWHIM4(CoordinateMatrix, AtomLabel, proname='alapha')
res['Ap'] = GetWHIM5(CoordinateMatrix, AtomLabel, proname='alapha')
res['Vp'] = GetWHIM6(CoordinateMatrix, AtomLabel, proname='alapha')
res['P1p'] = GetWHIM7(CoordinateMatrix, AtomLabel, proname='alapha')
res['P2p'] = GetWHIM8(CoordinateMatrix, AtomLabel, proname='alapha')
res['Kp'] = GetWHIM9(CoordinateMatrix, AtomLabel, proname='alapha')
res['E1p'] = GetWHIM10(CoordinateMatrix, AtomLabel, proname='alapha')
res['E2p'] = GetWHIM11(CoordinateMatrix, AtomLabel, proname='alapha')
res['E3p'] = GetWHIM12(CoordinateMatrix, AtomLabel, proname='alapha')
res['Dp'] = GetWHIM13(CoordinateMatrix, AtomLabel, proname='alapha')
res['P3p'] = GetWHIM14(CoordinateMatrix, AtomLabel, proname='alapha')
res['P3u'] = GetWHIM14(CoordinateMatrix, AtomLabel, proname='u')
res['P3m'] = GetWHIM14(CoordinateMatrix, AtomLabel, proname='m')
res['P3e'] = GetWHIM14(CoordinateMatrix, AtomLabel, proname='En')
res['P3v'] = GetWHIM14(CoordinateMatrix, AtomLabel, proname='V')
return res
def _GetHTMLDoc():
"""
#################################################################
Write HTML documentation for this module.
#################################################################
"""
import pydoc
pydoc.writedoc('whim')
#############################################################################
|
import xml.etree.ElementTree as xml
from xml.dom import minidom
import os
import sys
def custom_package_xml_generator(directory, packagename=None, version='45.0', filename='package.xml'):
"""Create custom package.xml file from directories with metadata"""
METADATA_TYPE = {
'applications':'CustomApplication', 'aura':'AuraDefinitionBundle', 'classes':'ApexClass', 'customPermissions':'CustomPermission',
'flexipages':'FlexiPage', 'flows':'Flow', 'globalValueSets':'GlobalValueSet', 'labels':'CustomLabels', 'layouts':'Layout',
'lwc': 'LightningComponentBundle', 'objects':'CustomObject', 'pages':'ApexPage', 'permissionsets':'PermissionSet', 'profiles':'Profile',
'staticresources':'StaticResource', 'tabs':'CustomTab', 'triggers':'ApexTrigger', 'contentassets':'ContentAsset', 'pathAssistants':'PathAssistant',
'quickActions':'QuickAction', 'remoteSiteSettings':'RemoteSiteSetting', 'workflows':'Workflow', 'dashboards':'Dashboard', 'reports':'Report',
'cspTrustedSites':'CspTrustedSite',
}
"""
Non-implemented Metadata:
'ApexComponent', 'CustomMetadata' (needs custom manipulation), 'CustomObjectTranslation', 'DuplicateRule',
'FlowCategory', 'GlobalValueSetTranslation', 'MatchingRules',
"""
#read directory structure
mdtypedirs = os.listdir(directory)
nested_mdt_object = ['ValidationRule', 'CompactLayout', 'ListView', 'SharingReason', 'RecordType']
nested_mdt_workflow = ['WorkflowFieldUpdate', 'WorkflowKnowledgePublish', 'WorkflowTask', 'WorkflowAlert', 'WorkflowSend', 'WorkflowOutboundMessage', 'WorkflowRule']
# start our xml structure
root = xml.Element('Package')
root.set('xmlns','http://soap.sforce.com/2006/04/metadata')
for mdtype in mdtypedirs:
# create child node for each type of component
if mdtype in METADATA_TYPE.keys():
etype = xml.SubElement(root, 'types')
ename = xml.SubElement(etype, 'name')
ename.text = str(METADATA_TYPE[mdtype])
emember = xml.SubElement(etype, 'members')
emember.text = str('*')
if mdtype == 'objects':
for nest_mdtyp in nested_mdt_object:
etype = xml.SubElement(root, 'types')
ename = xml.SubElement(etype, 'name')
ename.text = nest_mdtyp
emember = xml.SubElement(etype, 'members')
emember.text = str('*')
if mdtype == 'workflows':
for nest_mdtyp in nested_mdt_workflow:
etype = xml.SubElement(root, 'types')
ename = xml.SubElement(etype, 'name')
ename.text = nest_mdtyp
emember = xml.SubElement(etype, 'members')
emember.text = str('*')
#Custom behavior for custom labels
if mdtype == 'labels':
etype = xml.SubElement(root, 'types')
ename = xml.SubElement(etype, 'name')
ename.text = 'CustomLabel'
emember = xml.SubElement(etype, 'members')
emember.text = str('*')
# add the final xml node package.api_version
eversion = xml.SubElement(root, 'version')
eversion.text = str(version)
#package name
if packagename != None:
efname = xml.SubElement(root, 'fullName')
efname.text = str(packagename)
#pretty format for xml
xmlstring = xml.tostring(root)
reparsed = minidom.parseString(xmlstring)
prettyxml = reparsed.toprettyxml(indent=' ', newl='\n', encoding='UTF-8')
#generate xml file from string
try:
with open(os.path.join(directory, filename), "bw") as xml_file:
xml_file.write(prettyxml)
except IOError:
pass
if __name__ == '__main__':
#custom_package_xml_generator('src', packagename='CMS Test Package')
args = sys.argv[1:]
if args:
custom_package_xml_generator(args[0], args[1])
else:
custom_package_xml_generator('src')
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 11 20:27:24 2016
@author: Srinivas
"""
def testing_data(x, *y=1):
print x, 'is also'
for i in y:
print i
|
#!/usr/bin/env python3
import dadi
import numpy as np
import matplotlib.pyplot as plt
def bottleneck(params, ns, pts):
"""
Model 2
-------
Bottleneck followed by growth.
Parameters
----------
nu0: Relative size of pop after bottleneck.
T: Time of bottleneck.
F: Inbreeding coefficient (0 < F < 1).
"""
nu0, T, F = params
# check that F is in range
if F <= 0.0:
F = 1e-4
elif F >= 1.0:
F = 1.0 - 1e-4
else:
pass
xx = dadi.Numerics.default_grid(pts)
phi = dadi.PhiManip.phi_1D(xx)
nu_func = lambda t: nu0 * (1.0 / nu0) ** (t/T)
phi = dadi.Integration.one_pop(phi, xx, T, nu_func)
fs = dadi.Spectrum.from_phi_inbreeding(phi, ns, (xx,), (F,), (2,))
return fs
if __name__ == "__main__":
theta = 10000
pts_l = [70,80,90]
func_ex = dadi.Numerics.make_extrap_log_func(bottleneck)
# Start reading in spectra
# F = 0.1
#dadi_F10 = theta / (1+0.1) * func_ex([0.25,0.2,0.1],[50], pts_l)
dadi_F10 = theta / (1+0.1) * func_ex([0.25,0.2 * (1+0.1),0.1],[50], pts_l)
slim_F10 = dadi.Spectrum.from_file("SLiM_F0.1_mean_bottleneck.fs")
print("F=0.1 RMSD: {}".format(np.sqrt(np.mean((dadi_F10 - slim_F10)**2))))
print("F=0.1 RRMSD: {}\n".format(np.sqrt(np.mean((dadi_F10 - slim_F10)**2))/np.sum(slim_F10)))
plt.plot(dadi_F10, '-ob')
plt.plot(slim_F10, '-og')
plt.show()
# F = 0.25
#dadi_F25 = theta / (1+0.25) * func_ex([0.25,0.2,0.25],[50], pts_l)
dadi_F25 = theta / (1+0.25) * func_ex([0.25,0.2*(1+0.25),0.25],[50], pts_l)
slim_F25 = dadi.Spectrum.from_file("SLiM_F0.25_mean_bottleneck.fs")
print("F=0.25 RMSD: {}".format(np.sqrt(np.mean((dadi_F25 - slim_F25)**2))))
print("F=0.25 RRMSD: {}\n".format(np.sqrt(np.mean((dadi_F25 - slim_F25)**2))/np.sum(slim_F25)))
plt.plot(dadi_F25, '-ob')
plt.plot(slim_F25, '-og')
plt.show()
# F = 0.5
#dadi_F50 = theta / (1+0.5) * func_ex([0.25,0.2,0.5],[50], pts_l)
dadi_F50 = theta / (1+0.5) * func_ex([0.25,0.2*(1+0.5),0.5],[50], pts_l)
slim_F50 = dadi.Spectrum.from_file("SLiM_F0.5_mean_bottleneck.fs")
print("F=0.5 RMSD: {}".format(np.sqrt(np.mean((dadi_F50 - slim_F50)**2))))
print("F=0.5 RRMSD: {}\n".format(np.sqrt(np.mean((dadi_F50 - slim_F50)**2))/np.sum(slim_F50)))
plt.plot(dadi_F50, '-ob')
plt.plot(slim_F50, '-og')
plt.show()
# F = 0.75
#dadi_F75 = theta / (1+0.75) * func_ex([0.25,0.2,0.75],[50], pts_l)
dadi_F75 = theta / (1+0.75) * func_ex([0.25,0.2*(1+0.75),0.75],[50], pts_l)
slim_F75 = dadi.Spectrum.from_file("SLiM_F0.75_mean_bottleneck.fs")
print("F=0.75 RMSD: {}".format(np.sqrt(np.mean((dadi_F75 - slim_F75)**2))))
print("F=0.75 RRMSD: {}\n".format(np.sqrt(np.mean((dadi_F75 - slim_F75)**2))/np.sum(slim_F75)))
plt.plot(dadi_F75, '-ob')
plt.plot(slim_F75, '-og')
plt.show()
# F = 0.9
#dadi_F90 = theta / (1+0.9) * func_ex([0.25,0.2,0.9],[50], pts_l)
dadi_F90 = theta / (1+0.9) * func_ex([0.25,0.2*(1+0.9),0.9],[50], pts_l)
slim_F90 = dadi.Spectrum.from_file("SLiM_F0.9_mean_bottleneck.fs")
print("F=0.9 RMSD: {}".format(np.sqrt(np.mean((dadi_F90 - slim_F90)**2))))
print("F=0.9 RRMSD: {}\n".format(np.sqrt(np.mean((dadi_F90 - slim_F90)**2))/np.sum(slim_F90)))
plt.plot(dadi_F90, '-ob')
plt.plot(slim_F90, '-og')
plt.show()
|
inf = 999999
negInf = -999999
import os
class GameBoard():
def __init__(self):
self.board = [[' ' for i in range(0,7)] for j in range (0,6)]
self.position = []
def show_board(self):
os.system(['clear','cls'][os.name == 'nt'])
print(self.board[0])
print(self.board[1])
print(self.board[2])
print(self.board[3])
print(self.board[4])
print(self.board[5])
print("-----------------------------------")
print("| 0 || 1 || 2 || 3 || 4 || 5 || 6 |")
def moves_left(self):
for row in range(0,6):
for col in range(0,7):
if self.board[row][col] is ' ':
return True
return False
def is_horizontal(self):
x_count = 0
o_count = 0
row = self.position[0]
for i in range(0,7):
if self.board[row][i] == 'X':
o_count = 0
x_count += 1
elif self.board[row][i] == 'O':
x_count = 0
o_count += 1
else:
x_count = 0
o_count = 0
if x_count == 4 or o_count == 4:
return True
return False
def is_vertical(self):
x_count = 0
o_count = 0
col = self.position[1]
for i in range(5,-1,-1):
if self.board[i][col] == 'X':
o_count = 0
x_count += 1
elif self.board[i][col] == 'O':
x_count = 0
o_count += 1
else:
x_count = 0
o_count = 0
if x_count == 4 or o_count == 4:
return True
return False
def is_LR_diagonal(self):
x_count = 0
o_count = 0
row = self.position[0]
col = self.position[1]
while row != 5 and col != 0:
row += 1
col -= 1
while row != -1 and col != 7:
if self.board[row][col] == 'X':
o_count = 0
x_count += 1
elif self.board[row][col] == 'O':
x_count = 0
o_count += 1
else:
x_count = 0
o_count = 0
if x_count == 4 or o_count == 4:
return True
row -= 1
col += 1
return False
def is_RL_diagonal(self):
x_count = 0
o_count = 0
row = self.position[0]
col = self.position[1]
while row != 0 and col != 0:
row -= 1
col -= 1
while row != 6 and col != 7:
if self.board[row][col] == 'X':
o_count = 0
x_count += 1
elif self.board[row][col] == 'O':
x_count = 0
o_count += 1
else:
x_count = 0
o_count = 0
if x_count == 4 or o_count == 4:
return True
row += 1
col += 1
return False
def is_goal(self):
if (self.is_horizontal() is True or self.is_vertical() is True or
self.is_LR_diagonal() is True or self.is_RL_diagonal() is True):
return True
else:
return False
def insert(self, char, col):
for i in range(5,-1,-1):
if self.board[i][col] is ' ':
self.board[i][col] = char
self.position = [i, col]
return True
return False
def possible_moves(self):
moves = []
for col in range(0,7):
for row in range(5,-1,-1):
if self.board[row][col] is ' ':
moves.append([row,col])
break
else:
continue
return moves
def check_streak(self, char, streak):
count = 0
for row in range(0,6):
for col in range(0,7):
if self.board[row][col] == char:
count += self.vertical_streak(row, col, streak)
count += self.horizontal_streak(row, col, streak)
count += self.diagonal_streak(row, col, streak)
return count
def vertical_streak(self, row, col, streak):
consecutiveCount = 0
for i in range(row, 6):
if self.board[i][col] == self.board[row][col]:
consecutiveCount += 1
else:
break
if consecutiveCount >= streak:
return 1
else:
return 0
def horizontal_streak(self, row, col, streak):
consecutiveCount = 0
for j in range(col, 7):
if self.board[row][j] == self.board[row][col]:
consecutiveCount += 1
else:
break
if consecutiveCount >= streak:
return 1
else:
return 0
def diagonal_streak(self, row, col, streak):
total = 0
consecutiveCount = 0
j = col
for i in range(row, 6):
if j > 6:
break
elif self.board[i][j] == self.board[row][col]:
consecutiveCount += 1
else:
break
j += 1
if consecutiveCount >= streak:
total += 1
consecutiveCount = 0
j = col
for i in range(row, -1, -1):
if j > 6:
break
elif self.board[i][j] == self.board[row][col]:
consecutiveCount += 1
else:
break
j += 1
if consecutiveCount >= streak:
total += 1
return total
def minimax(self, depth, is_human, alpha, beta):
score = 0
if is_human is True:
if self.is_goal() is True:
return 10000-depth
score -= 100*self.check_streak('X', 3)
score -= 2*self.check_streak('X', 2)
if is_human is False:
if self.is_goal() is True:
return -10000-depth
score += 100*self.check_streak('O', 3)
score += 2*self.check_streak('O', 2)
if self.moves_left() is False:
return score-depth
if depth == 5:
return score-depth
moves = self.possible_moves()
if is_human is True:
v = inf
for i in moves:
row = i[0]
col = i[1]
if self.board[row][col] is ' ':
self.board[row][col] = 'X'
v = min(v, self.minimax(depth+1, False, alpha, beta))
self.board[row][col] = ' '
if v <= alpha: return v
beta = min(beta, v)
else:
v = negInf
for i in moves:
row = i[0]
col = i[1]
if self.board[row][col] is ' ':
self.board[row][col] = 'O'
v = max(v, self.minimax(depth+1, True, alpha, beta))
self.board[row][col] = ' '
if v >= beta: return v
alpha = max(alpha, v)
return score+v
def best_move(self):
bestMove = []
bestVal = -1000
moves = self.possible_moves()
for i in moves:
row = i[0]
col = i[1]
if self.board[row][col] is ' ':
self.board[row][col] = 'O'
tempVal = self.minimax(1, True, negInf, inf)
if tempVal > bestVal:
bestMove = i
bestVal = tempVal
self.board[row][col] = ' '
return bestMove
class Player():
def __init__(self):
self.human = 'X'
self.computer = 'O'
self.turn = True
def is_human(self):
return self.turn
def switch(self):
if self.turn is True:
self.turn = False
else:
self.turn = True
def character(self):
if self.turn is True:
return self.human
else:
return self.computer
def main():
print("--Welcome to Connect-Four!--")
connect4 = GameBoard()
player = Player()
connect4.show_board()
while connect4.moves_left() is True:
if player.is_human() is True:
print('\n')
col = int(input("Choose a column to input character (0-6): "))
if connect4.insert(player.character(), col) is False:
print("Column Full! It is ", player.character(), "'s Turn!")
continue
print('\n')
connect4.show_board()
if connect4.is_goal() is True:
print("\n==You Win!!==\n")
return
player.switch()
else:
print("\nComputer is thinking...\n")
move = connect4.best_move()
print("Computer Move - Column: ", move)
connect4.insert(player.character(), move[1])
connect4.show_board()
if connect4.is_goal() is True:
print("\n--You Lose!!--\n")
return
player.switch()
print("--It's a Tie!!--\n")
return
again = "y"
while again is "y":
main()
again = str(input("Play again? (y/n)"))
print("Goodbye!")
|
from django.db import models
from string import capwords
from datetime import datetime, timedelta
from django.utils.timezone import utc
from django.contrib.auth.models import User
from lok.utils import level_from_value as level_from_value
from lok.utils import value_from_level as value_from_level
import random
from random import Random
import logging
from imagekit.models import ImageSpecField
from imagekit.processors import ResizeToFill, Adjust, ResizeToFit, AddBorder
from lok.templatetags.lok_extras import macro
logger = logging.getLogger(__name__)
def valid_for_plot_pre_reqs(character, pre_reqs):
if pre_reqs:
try:
for pre_req in pre_reqs:
if pre_req.value == 0 and ((CharacterPlot.objects.filter(character = character.pk, plot = pre_req.plot) and CharacterPlot.objects.get(character = character.pk, plot = pre_req.plot).value != 0)):
return False
elif pre_req.value > 0 and CharacterPlot.objects.get(character = character.pk, plot = pre_req.plot).value != pre_req.value:
return False
except CharacterPlot.DoesNotExist:
return False
return True
def valid_for_money_pre_req(character, pre_reqs):
if pre_reqs:
if pre_reqs[0].amount > character.money:
return False
return True
def valid_for_location_known_pre_reqs(character, pre_reqs):
if pre_reqs:
try:
for pre_req in pre_reqs:
if (CharacterLocationAvailable.objects.get(character=character.pk, location=pre_req.location) and not pre_req.known):
return False
except CharacterLocationAvailable.DoesNotExist:
if pre_req.known:
return False
return True
def valid_for_stat_pre_reqs(character, pre_reqs, enforceMax):
if pre_reqs:
try:
for pre_req in pre_reqs:
if pre_req.minimum > 0:
character_stat = CharacterStat.objects.get(character=character.pk, stat=pre_req.stat)
level = level_from_value(character_stat.value)
level += character.stat_bonus(character_stat.stat)
#print "For stat " + character_stat.stat.name + " comparing " + str(level) + ":" + str(level_from_value(level)) + " to " + str(pre_req.minimum)
if level < pre_req.minimum:
return False
elif enforceMax and level_from_value(character_stat.value) > pre_req.maximum:
return False
except CharacterStat.DoesNotExist:
return False
return True
def valid_for_item_pre_reqs(character, pre_reqs):
if pre_reqs:
try:
for pre_req in pre_reqs:
character_item = CharacterItem.objects.get(character=character.pk, item=pre_req.item)
if character_item.quantity < pre_req.minimum:
return False
except CharacterItem.DoesNotExist:
return False
return True
def valid_for_level_pre_reqs(character, pre_reqs):
if pre_reqs:
if character.level() < pre_reqs[0].minimum:
return False
return True
class Image(models.Model):
title = models.CharField(max_length=100)
alt = models.CharField(max_length=500)
contributor = models.CharField(max_length=100,blank=True,null=True)
contributor_link = models.CharField(max_length=200,blank=True,null=True)
image = models.ImageField(upload_to='images')
thumbnail = ImageSpecField([ResizeToFill(50, 50)], image_field='image', format='JPEG', options={'quality': 90})
# Hrm... I wanted to add a colored border around this, but I can't seem to make it show as any color other than white. Can revisit later.
scaled = ImageSpecField([ResizeToFit(400, 1000), AddBorder(0)], image_field='image', format='JPEG', options={'quality': 90})
def __unicode__(self):
return self.title
class Scenario(models.Model):
title = models.CharField(max_length=100)
portrait = models.ForeignKey(Image, null=True, blank=True)
description = models.TextField(max_length=2000)
weight = models.IntegerField(default=10000)
TYPE_QUEST = 1
TYPE_ENCOUNTER = 2
TYPE_CHOICES = (
(TYPE_QUEST, "Quest"),
(TYPE_ENCOUNTER, "Encounter"),
)
type = models.IntegerField(choices=TYPE_CHOICES, default=TYPE_ENCOUNTER)
def __unicode__(self):
return self.title
def valid_for(self, character):
# Special case, for my own sanity/convenience: if you're in a special location, you only get the scenarios specific to that place.
if character.location.type == Location.TYPE_NONE and not ScenarioLocationPreReq.objects.filter(scenario=self.pk):
return False
pre_reqs = ScenarioStatPreReq.objects.filter(scenario=self.pk)
if not valid_for_stat_pre_reqs(character,pre_reqs, True):
return False
pre_reqs = ScenarioItemPreReq.objects.filter(scenario=self.pk)
if not valid_for_item_pre_reqs(character,pre_reqs):
return False
pre_reqs = ScenarioPlotPreReq.objects.filter(scenario=self.pk)
if not valid_for_plot_pre_reqs(character,pre_reqs):
return False
pre_reqs = ScenarioLevelPreReq.objects.filter(scenario=self.pk)
if not valid_for_level_pre_reqs(character,pre_reqs):
return False
if ScenarioLocationPreReq.objects.filter(scenario=self.pk) and ScenarioLocationPreReq.objects.get(scenario=self.pk).location != character.location:
return False
if ScenarioLocationTypePreReq.objects.filter(scenario=self.pk):
if ScenarioLocationTypePreReq.objects.get(scenario=self.pk).type != character.location.type:
return False
pre_reqs = ScenarioLocationKnownPreReq.objects.filter(scenario=self.pk)
if not valid_for_location_known_pre_reqs(character, pre_reqs):
return False
if ScenarioHealthPreReq.objects.filter(scenario=self.pk):
health = ScenarioHealthPreReq.objects.get(scenario=self.pk)
if health.full and character.current_health < character.max_health():
return False
elif not health.full and character.current_health == character.max_health():
return False
return True
class Battle(Scenario):
ENEMY_SLASHING=1
ENEMY_BASHING=2
ENEMY_RANGED=3
TYPE_ENEMY = (
(ENEMY_SLASHING, "Slashing"),
(ENEMY_BASHING, "Bashing"),
(ENEMY_RANGED, "Ranged"),
)
enemy = models.IntegerField(choices=TYPE_ENEMY)
name = models.CharField(max_length=100)
strength = models.IntegerField()
def __unicode__(self):
return capwords(self.title)
class Choice(models.Model):
scenario = models.ForeignKey(Scenario)
title = models.CharField(max_length=100)
description = models.TextField(max_length=1000)
visible = models.BooleanField()
def __unicode__(self):
return self.title
def valid_for(self, character):
pre_reqs = ChoiceStatPreReq.objects.filter(choice=self.pk)
if not valid_for_stat_pre_reqs(character,pre_reqs, False):
return False
pre_reqs = ChoiceItemPreReq.objects.filter(choice=self.pk)
if not valid_for_item_pre_reqs(character,pre_reqs):
return False
pre_reqs = ChoicePlotPreReq.objects.filter(choice=self.pk)
if not valid_for_plot_pre_reqs(character,pre_reqs):
return False
pre_reqs = ChoiceMoneyPreReq.objects.filter(choice=self.pk)
if not valid_for_money_pre_req(character, pre_reqs):
return False
return True
class Plot(models.Model):
MAX_LEVEL = 2
name = models.CharField(max_length=100)
visible = models.BooleanField(default=False)
achievement = models.BooleanField(default = False)
description = models.TextField(max_length=2000)
def __unicode__(self):
return self.name
class Meta:
ordering = ['-id']
class PlotDescription(models.Model):
plot = models.ForeignKey(Plot)
value = models.IntegerField()
description = models.TextField(max_length=2000)
def __unicode__(self):
return self.plot.name + ": " + self.description
class Item(models.Model):
name = models.CharField(max_length=100)
value = models.IntegerField(default=1)
sellable = models.BooleanField(default=True)
multiple = models.BooleanField(default=True)
def __unicode__(self):
return self.name
class Equipment(Item):
TYPE_SWORD = 1
TYPE_BASHING = 2
TYPE_BOW = 3
TYPE_FEET = 4
TYPE_CLOAK = 5
TYPE_CLOTHES = 6
TYPE_GLOVES = 7
TYPE_RING = 8
TYPE_NECK = 9
TYPE_ARMOR = 10
TYPE_CHOICES = (
(TYPE_SWORD, "Sword"),
(TYPE_BASHING, "Bashing"),
(TYPE_BOW, "Bow"),
(TYPE_FEET, "Feet"),
(TYPE_CLOAK, "Cloak"),
(TYPE_CLOTHES, "Clothes"),
(TYPE_GLOVES, "Gloves"),
(TYPE_RING, "Ring"),
(TYPE_NECK, "Neck"),
(TYPE_ARMOR, "Armor")
)
def __init__(self, *args, **kwargs):
if 'multiple' not in kwargs:
kwargs['multiple'] = False
super(Item, self).__init__(*args, **kwargs)
type = models.IntegerField(choices=TYPE_CHOICES)
def __unicode__(self):
return self.name
class Stat(models.Model):
TYPE_SKILL = 1
TYPE_FAME = 2
TYPE_ESTEEM = 3
TYPE_CHARACTERISTIC = 4
TYPE_PROGRESS = 5
TYPE_CHOICES = (
(TYPE_SKILL, "Skill"),
(TYPE_FAME, "Fame"),
(TYPE_ESTEEM, "Esteem"),
(TYPE_CHARACTERISTIC, "Characteristic"),
(TYPE_PROGRESS, "Progress"),
)
type = models.IntegerField(choices=TYPE_CHOICES, default=TYPE_PROGRESS)
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
class EquipmentStat(models.Model):
equipment = models.ForeignKey(Equipment)
stat = models.ForeignKey(Stat)
amount = models.IntegerField()
def __unicode__(self):
return str(self.amount) + " points of " + self.stat.name + " for " + self.equipment.name
class Location(models.Model):
TYPE_CITY = 1
TYPE_COUNTRY = 2
TYPE_CAVE = 3
TYPE_NONE = 4
TYPE_CHOICES = (
(TYPE_CITY, "City"),
(TYPE_COUNTRY, "Country"),
(TYPE_CAVE, "Cave"),
(TYPE_NONE, "None"),
)
name = models.CharField(max_length=100, unique=True)
type = models.IntegerField(choices=TYPE_CHOICES)
def __unicode__(self):
return self.name
class ScenarioLocationPreReq(models.Model):
scenario = models.ForeignKey(Scenario)
location = models.ForeignKey(Location)
def __unicode__(self):
return self.location.name
class ScenarioLocationTypePreReq(models.Model):
scenario = models.ForeignKey(Scenario)
type = models.IntegerField(choices=Location.TYPE_CHOICES)
def __unicode__(self):
return Location.TYPE_CHOICES[self.type-1][1]
class ScenarioLocationKnownPreReq(models.Model):
scenario = models.ForeignKey(Scenario)
location = models.ForeignKey(Location)
known = models.BooleanField()
def __unicode__(self):
return self.location.name + " " + str(self.known)
class ScenarioStatPreReq(models.Model):
scenario = models.ForeignKey(Scenario)
stat = models.ForeignKey(Stat)
minimum = models.IntegerField(default=0)
maximum = models.IntegerField(default=100)
visible = models.BooleanField(default=True)
def __unicode__(self):
return str(self.stat)
class ScenarioItemPreReq(models.Model):
scenario = models.ForeignKey(Scenario)
item = models.ForeignKey(Item)
minimum = models.IntegerField(default=1)
visible = models.BooleanField(default=True)
def __unicode__(self):
return str(self.minimum) + " " + self.item.name
class ScenarioLevelPreReq(models.Model):
scenario = models.ForeignKey(Scenario)
minimum = models.IntegerField()
maximum = models.IntegerField(default=1000)
def __unicode__(self):
return "Level " + str(self.minimum) + "-" + str(self.maximum)
class ScenarioHealthPreReq(models.Model):
scenario = models.ForeignKey(Scenario)
full = models.BooleanField(default=False)
def __unicode__(self):
return "Full health: " + str(self.full)
class ChoicePlotPreReq(models.Model):
choice = models.ForeignKey(Choice)
plot = models.ForeignKey(Plot)
value = models.IntegerField()
def __unicode__(self):
return self.plot.name + " " + str(self.value)
class ScenarioPlotPreReq(models.Model):
scenario = models.ForeignKey(Scenario)
plot = models.ForeignKey(Plot)
value = models.IntegerField()
def __unicode__(self):
return self.plot.name + " " + str(self.value)
class ChoiceStatPreReq(models.Model):
choice = models.ForeignKey(Choice)
stat = models.ForeignKey(Stat)
minimum = models.IntegerField(default=0)
maximum = models.IntegerField(default=100)
visible = models.BooleanField(default=True)
def __unicode__(self):
return str(self.stat)
def odds(self, character):
try:
value = CharacterStat.objects.get(character=character, stat = self.stat).level()
except CharacterStat.DoesNotExist:
value = 0
value += character.stat_bonus(self.stat)
#print "Have " + str(value) + ", need " + str(self.maximum)
if value >= self.maximum:
return 1
# Our odds of success are our progress between minimum and maximum.
odds = float(value - self.minimum + 1) / float(self.maximum - self.minimum + 1)
return odds
def challenge(self, character):
return random.random() < self.odds(character)
class ChoiceItemPreReq(models.Model):
choice = models.ForeignKey(Choice)
item = models.ForeignKey(Item)
minimum = models.IntegerField(default=1)
def __unicode__(self):
return str(self.minimum) + " " + self.item.name
class ChoiceMoneyPreReq(models.Model):
choice = models.ForeignKey(Choice)
amount = models.IntegerField()
def __unicode__(self):
return str(self.amount)
class Result(models.Model):
SUCCESS = 1
FAILURE = 2
CHOICES = (
(SUCCESS, "Success"),
(FAILURE, "Failure"),
)
type = models.IntegerField(choices=CHOICES, default=SUCCESS)
portrait = models.ForeignKey(Image, null=True, blank=True)
weight = models.IntegerField(default=1)
choice = models.ForeignKey(Choice)
title = models.CharField(max_length=100)
description = models.TextField(max_length=4000)
def __unicode__(self):
return self.title
class SetLocationOutcome(models.Model):
choice = models.ForeignKey(Result)
location = models.ForeignKey(Location)
def __unicode__(self):
return self.location.name
class LearnLocationOutcome(models.Model):
choice = models.ForeignKey(Result)
location = models.ForeignKey(Location)
def __unicode__(self):
return self.location.name
class MoneyOutcome(models.Model):
choice = models.ForeignKey(Result)
amount = models.IntegerField()
def __unicode__(self):
return str(self.amount)
class StatOutcome(models.Model):
choice = models.ForeignKey(Result)
stat = models.ForeignKey(Stat)
amount = models.IntegerField()
maximum = models.IntegerField(default=100000)
def __unicode__(self):
return str(self.stat)
class ItemOutcome(models.Model):
result = models.ForeignKey(Result)
item = models.ForeignKey(Item)
amount = models.IntegerField(default=1)
def __unicode__(self):
return str(self.amount) + " " + self.item.name
class PlotOutcome(models.Model):
result = models.ForeignKey(Result)
plot = models.ForeignKey(Plot)
value = models.IntegerField()
def __unicode__(self):
return str(self.value) + " " + self.plot.name
class HealthOutcome(models.Model):
result = models.ForeignKey(Result)
amount = models.IntegerField()
def __unicode__(self):
return str(self.amount) + " health"
def get_stat_bonus(item, stat):
stats = EquipmentStat.objects.filter(equipment=item, stat=stat)
bonus = 0
for estat in stats:
bonus += estat.amount
return bonus
class LocationRoute(models.Model):
origin = models.ForeignKey(Location, related_name='+')
destination = models.ForeignKey(Location, related_name='+')
def __unicode__(self):
return self.origin.name + " -> " + self.destination.name
class RouteOption(models.Model):
route = models.ForeignKey(LocationRoute)
description = models.CharField(max_length=1000)
def __unicode__(self):
return self.route.__unicode__()
def summary(self):
return self.description
class RouteFree(RouteOption):
def __unicode__(self):
return self.route.__unicode__()
class RouteItemFree(RouteOption):
item = models.ForeignKey(Item)
def __unicode__(self):
return self.route.__unicode__() + " : " + self.item.name
def summary(self):
return self.description + " This uses your " + self.item.name + "."
class RouteItemCost(RouteOption):
item = models.ForeignKey(Item)
amount = models.IntegerField()
def __unicode__(self):
return self.route.__unicode__() + " : " + str(self.amount) + " " + self.item.name
def summary(self):
return self.description + " This requires using " + str(self.amount) + " " + self.item.name + "s."
class RouteToll(RouteOption):
amount = models.IntegerField()
def __unicode__(self):
return self.route.__unicode__() + " : " + str(self.amount) + " royals"
def summary(self):
return self.description + " The fee is " + str(self.amount) + " royals."
class Title(models.Model):
raw_title_male = models.CharField(max_length=100)
raw_title_female = models.CharField(max_length=100)
def __unicode__(self):
return self.raw_title_male + "/" + self.raw_title_female
def title(self, character):
if character.gender == Character.GENDER_FEMALE:
return self.raw_title_female.replace("#NAME#", character.name)
else:
return self.raw_title_male.replace("#NAME#", character.name)
class TitleOutcome(models.Model):
result = models.ForeignKey(Result)
title = models.ForeignKey(Title)
def __unicode__(self):
return self.title.__unicode__()
class Party(models.Model):
def leader(self):
members = sorted(Character.objects.filter(party=self), key=lambda a: a.max_party_size)
if not members:
return None
return members[0]
def member(self, character):
return Character.objects.filter(party=self,pk=character.id).exists()
def members(self):
return Character.objects.filter(party=self)
def odds_against(self, battle):
odds_result = None
for member in self.members():
result = member.odds_against(battle)
if not odds_result or result['odds'] > odds_result['odds']:
result['character'] = member
odds_result = result
return odds_result
def size(self):
return self.members().count()
def max_size(self):
size = 1
for member in self.members():
member_size = member.max_party_size()
if member_size > size:
size = member_size
return size
def __unicode__(self):
if not self.leader():
return "Empty party"
return self.leader().title_name() + "'s party"
class Character(models.Model):
GENDER_FEMALE = 1
GENDER_MALE = 2
GENDER_CHOICES = (
(GENDER_FEMALE, "Female"),
(GENDER_MALE, "Male"),
)
CONTACT_YES = 2
CONTACT_NO = 1
CONTACT_CHOICES = (
(CONTACT_YES, "Yes"),
(CONTACT_NO, "No"),
)
MAX_ACTIONS = 20
ACTION_RECHARGE_TIME_SECS = 900
player = models.ForeignKey(User)
party = models.ForeignKey(Party)
contact = models.IntegerField(choices=CONTACT_CHOICES, default=CONTACT_YES)
name = models.CharField(max_length=20,unique=True)
created = models.DateTimeField(auto_now_add=True)
money = models.IntegerField(default=0)
gender = models.IntegerField(choices=GENDER_CHOICES)
current_health = models.IntegerField()
total_choices = models.IntegerField(default=0)
actions = models.IntegerField(default=20)
refill_time = models.DateTimeField()
recharge_delay_secs = models.IntegerField(default=900)
location = models.ForeignKey(Location)
sword = models.ForeignKey('Equipment', limit_choices_to={'type': Equipment.TYPE_SWORD}, null=True, blank=True, related_name='+')
bashing = models.ForeignKey('Equipment', limit_choices_to={'type': Equipment.TYPE_BASHING}, null=True, blank=True, related_name='+')
bow = models.ForeignKey('Equipment', limit_choices_to={'type': Equipment.TYPE_BOW}, null=True, blank=True, related_name='+')
feet = models.ForeignKey('Equipment', limit_choices_to={'type': Equipment.TYPE_FEET}, null=True, blank=True, related_name='+')
cloak= models.ForeignKey('Equipment', limit_choices_to={'type': Equipment.TYPE_CLOAK}, null=True, blank=True, related_name='+')
clothing= models.ForeignKey('Equipment', limit_choices_to={'type': Equipment.TYPE_CLOTHES}, null=True, blank=True, related_name='+')
gloves = models.ForeignKey('Equipment', limit_choices_to={'type': Equipment.TYPE_GLOVES}, null=True, blank=True, related_name='+')
ring = models.ForeignKey('Equipment', limit_choices_to={'type': Equipment.TYPE_RING}, null=True, blank=True, related_name='+')
neck = models.ForeignKey('Equipment', limit_choices_to={'type': Equipment.TYPE_NECK}, null=True, blank=True, related_name='+')
armor = models.ForeignKey('Equipment', limit_choices_to={'type': Equipment.TYPE_ARMOR}, null=True, blank=True, related_name='+')
active_title = models.ForeignKey(Title, null=True, blank=True)
def __unicode__(self):
return self.name
def title_name(self):
if self.active_title:
return self.active_title.title(self)
else:
return self.name
def max_party_size(self):
# I'm gonna play around with this... may also expand based on land ownership, renown, etc.
size = 1
try:
skill_value = CharacterStat.objects.get(character=self, stat__name="Persuasion").level()
except CharacterStat.DoesNotExist:
skill_value = 0
skill_value+= self.stat_bonus(Stat.objects.get(name="Persuasion"))
if skill_value > 20:
size += 1
if skill_value > 40:
size += 1
if skill_value > 60:
size += 1
try:
renown_value = CharacterStat.objects.get(character=self, stat__name="Renown").level()
except CharacterStat.DoesNotExist:
renown_value = 0
if renown_value > 10:
size += 1
return size
def stat_bonus(self, stat):
bonus = 0
bonus += get_stat_bonus(self.sword, stat)
bonus += get_stat_bonus(self.bashing, stat)
bonus += get_stat_bonus(self.bow, stat)
bonus += get_stat_bonus(self.feet, stat)
bonus += get_stat_bonus(self.cloak, stat)
bonus += get_stat_bonus(self.clothing, stat)
bonus += get_stat_bonus(self.gloves, stat)
bonus += get_stat_bonus(self.ring, stat)
bonus += get_stat_bonus(self.neck, stat)
bonus += get_stat_bonus(self.armor, stat)
return bonus
def rest(self):
self.update_actions()
if self.actions == Character.MAX_ACTIONS:
self.refill_time = datetime.utcnow().replace(tzinfo=utc) + timedelta(0, self.recharge_delay_secs)
if self.actions > 0:
self.actions -= 1
self.save()
self.total_choices += 1
self.save()
def update_actions(self):
while datetime.utcnow().replace(tzinfo=utc) > self.refill_time and self.actions < Character.MAX_ACTIONS:
self.actions = self.actions + 1
self.refill_time = self.refill_time + timedelta(0, self.recharge_delay_secs)
self.save()
def odds_against(self, battle):
sword_strength = 0
bow_strength = 0
bashing_strength=0
if self.sword:
sword_strength=self.stat_bonus(Stat.objects.get(name="Swordfighting"))
if CharacterStat.objects.filter(character=self, stat__name="Swordfighting"):
sword_strength += level_from_value(CharacterStat.objects.get(character=self, stat__name="Swordfighting").value)
if self.bow:
bow_strength=self.stat_bonus(Stat.objects.get(name="Archery"))
if CharacterStat.objects.filter(character=self, stat__name="Archery"):
bow_strength += level_from_value(CharacterStat.objects.get(character=self, stat__name="Archery").value)
# Unlike bows and swords, we can use bashing even without an item equipped.
if self.bashing:
bashing_strength += self.bashing.amount
if CharacterStat.objects.filter(character=self, stat__name="Bashing"):
bashing_strength += level_from_value(CharacterStat.objects.get(character=self, stat__name="Bashing").value)
if battle.enemy == Battle.ENEMY_RANGED:
sword_strength = sword_strength - (5 + .1 * sword_strength)
bashing_strength = bashing_strength + (5 + .1 * bashing_strength)
elif battle.enemy == Battle.ENEMY_BASHING:
sword_strength = sword_strength + (5 + .1 * sword_strength)
bow_strength = bow_strength - (5 + .1 * bow_strength)
else:
bow_strength = bow_strength + (5 + .1 * bow_strength)
bashing_strength = bashing_strength - (5 + .1 * bashing_strength)
best_strength = 0
weapon = None
if bow_strength >= sword_strength and bow_strength >= bashing_strength:
best_strength = bow_strength
weapon = self.bow
elif sword_strength >= bow_strength and sword_strength >= bashing_strength:
best_strength = bow_strength
weapon = self.sword
else:
best_strength = bashing_strength
weapon = self.bashing
# Phew! Now, calculate the delta!
odds = .5 + 0.05 * (best_strength - battle.strength)
# Can play around with this, but I think there's always a 5% chance of success or failure. (Nice D20 odds.)
if odds < .05:
odds = .05
elif odds > .95:
odds = .95
return {'odds': odds, 'weapon': weapon}
def max_health(self):
amount = 0
stats = CharacterStat.objects.filter(character=self, stat__type=Stat.TYPE_SKILL).order_by('-value')
if stats:
best_stat = stats[0]
amount = level_from_value(best_stat.value)
if amount < 10:
amount = 10
return amount
def level(self):
best_stat = CharacterStat.objects.filter(character=self, stat__type=Stat.TYPE_SKILL).order_by('-value')[0]
return level_from_value(best_stat.value)
def update_with_result(self, result, pre_reqs, battle, block_death):
changes = list()
if self.actions == Character.MAX_ACTIONS:
self.refill_time = datetime.utcnow().replace(tzinfo=utc) + timedelta(0, self.recharge_delay_secs)
self.actions = self.actions - 1
if battle:
# Increment the appropriate weapon skills. Always get 2pts in current weapon if it's a challenge, 1pt if it's easy (or someone else is fighting for us)
odds = self.party.odds_against(battle)
if odds['odds'] > 1 or odds['character'] != self:
amount = 1
else:
amount = 2
if odds['weapon'] == self.sword:
stat = Stat.objects.get(name="Swordfighting")
elif odds['weapon'] == self.bow:
stat = Stat.objects.get(name="Archery")
else:
stat = Stat.objects.get(name="Bashing")
if (not CharacterStat.objects.filter(character=self,stat=stat)):
charstat = CharacterStat(character=self, stat=stat, value=0)
else:
charstat = CharacterStat.objects.get(character=self, stat=stat)
change = Change(type=Change.TYPE_INCREMENT)
change.name = stat.name
change.old = charstat.value
change.amount = amount
charstat.value += amount
change.new = charstat.value
charstat.save()
changes.append(change)
stat_outcomes = StatOutcome.objects.filter(choice = result.pk)
for outcome in stat_outcomes:
stat, created = CharacterStat.objects.get_or_create(character=self, stat=outcome.stat)
if (level_from_value(stat.value) < outcome.maximum):
change = Change(type=Change.TYPE_INCREMENT)
change.old = stat.value
oldlevel = level_from_value(stat.value)
oldvalue = stat.value
change.name = stat.stat.name
# If we succeeded using a maxed stat, it can only increase by 1 point.
if (pre_reqs.filter(stat=outcome.stat) and pre_reqs.get(stat=outcome.stat).maximum <= oldlevel):
stat.value += 1
else:
stat.value += outcome.amount
if stat.value < 0:
stat.value = 0
change.amount = stat.value - oldvalue
newlevel = level_from_value(stat.value)
stat.save()
if oldlevel != newlevel:
change.type = Change.TYPE_LEVEL
change.old = oldlevel
change.new = newlevel
change.amount = newlevel - oldlevel
# ... and give them a free health point if they can use it. Note that we [currently] don't refill all health.
if self.current_health < self.max_health():
self.current_health += 1
self.save()
else:
change.old = value_from_level(oldlevel + 1) - stat.value
change.new = oldlevel + 1
changes.append(change)
money_outcomes = MoneyOutcome.objects.filter(choice = result.pk)
for outcome in money_outcomes:
share = outcome.amount
if battle and self.party.size() > 1:
# Divide money among the team.
share = (int)((outcome.amount * 1.25) / self.party.size())
for player in self.party.members():
if player != self:
player.money += share
player.save()
notice = SocialMessage(to_character=player, description = "You received " + str(share) + " royals from " + self.title_name() + "'s battle with " + battle.title + ".")
notice.save()
change = Change(type = Change.TYPE_MONEY)
change.old = self.money
self.money += share
if outcome.amount == 1:
change.name = "royal"
else:
change.name = "royals"
# If we hit this test, we probably accidentally made the result amount bigger than the choice amount.
if (self.money < 0):
self.money = 0
change.new = self.money
change.amount = change.new - change.old
changes.append(change)
item_outcomes = ItemOutcome.objects.filter(result = result.pk)
for outcome in item_outcomes:
change = Change(type = Change.TYPE_ITEM)
change.name = outcome.item.name
item, created = CharacterItem.objects.get_or_create(character=self, item=outcome.item)
change.old = item.quantity
item.quantity += outcome.amount
if item.quantity < 0:
item.quantity = 0
change.new = item.quantity
change.amount = change.new - change.old
item.save()
changes.append(change)
health_outcomes = HealthOutcome.objects.filter(result = result.pk)
for outcome in health_outcomes:
change = Change(type = Change.TYPE_HEALTH)
change.name = "health"
change.old = self.current_health
# If this was a fight, we have two chances to mitigate damage done. First dodge, then absorb.
if battle:
dodge_chance = self.stat_bonus(Stat.objects.get(name="Dodging"))
if CharacterStat.objects.filter(character=self, stat__name="Dodging"):
dodge_chance += level_from_value(CharacterStat.objects.get(character=self, stat__name="Dodging").value)
# Hard cap at 80% dodge rate. Should probably eventually turn this to a soft cap that kicks in much earlier. We also check on stat increase, but need to keep a check here as well to prevent someone reaching 100% through use of equipment.
if dodge_chance > 80:
dodge_chance = 80
if random.random() * 100 < dodge_chance:
# They dodged it! No damage done. Let them know how lucky they are.
changes.append(Change(type=Change.TYPE_DODGE))
outcome.amount = 0
# Next, let them absorb the blow if they have armor.
armor_rating = self.stat_bonus(Stat.objects.get(name="Armor"))
if (armor_rating > 40):
armor_rating = 40
old_outcome = outcome.amount
outcome.amount = int(outcome.amount * (1.0 - float(armor_rating) * 2))
if old_outcome != outcome.amount:
changes.append(Change(type=Change.TYPE_ABSORBED, amount=old_outcome - outcome.amount))
# See if they lucked out and gained an increase in dodge rating.
if dodge_chance < self.level():
# Definitely play around with this! Right now I'm thinking the odds of a dodge increase will range between 1-5%, depending on the ratio between the character's stealth, deviousness and their level.
max_dodgy = 0
if (CharacterStat.objects.filter(character=self, stat__name="Deviousness")):
max_dodgy=level_from_value(CharacterStat.objects.get(character=self,stat__name="Deviousness").value)
if (CharacterStat.objects.filter(character=self, stat__name="Stealth")):
stealthy=level_from_value(CharacterStat.objects.get(character=self,stat__name="Stealth").value)
if stealthy > max_dodgy:
max_dodgy = stealthy
odds = 0.01 + (0.04 * (max_dodgy/self.level()))
if random.random() < odds:
# Congrats! You just got better at dodging!
dodgechange = Change(type=Change.TYPE_INCREMENT)
dodgechange.name = "Dodging"
if not CharacterStat.objects.filter(character=self, stat__name="Dodging"):
dodgechange.old = 0
stat = CharacterStat(character=self, stat=Stat.objects.get(name="Dodging"), value=10)
stat.save()
else:
stat = CharacterStat.objects.get(character=self, stat__name="Dodging")
dodgechange.old = stat.value
stat.value += 10
stat.save()
dodgechange.amount = 10
dodgechange.new = stat.value
changes.append(dodgechange)
self.current_health += outcome.amount
if (self.current_health < 1 and block_death):
self.current_health = 1
elif (self.current_health < 0):
self.current_health = 0
elif (self.current_health > self.max_health()):
self.current_health = self.max_health()
if (self.current_health != change.old):
change.new = self.current_health
change.amount = change.new - change.old
changes.append(change)
plot_outcomes = PlotOutcome.objects.filter(result = result.pk)
for outcome in plot_outcomes:
change = Change(type = Change.TYPE_PLOT)
change.name = outcome.plot.description
plot, created = CharacterPlot.objects.get_or_create(character = self, plot = outcome.plot)
change.new = outcome.value
plot.value = outcome.value
plot.save()
if plot.plot.achievement:
changes.append(change)
location_learn_outcomes = LearnLocationOutcome.objects.filter(choice = result.pk)
for outcome in location_learn_outcomes:
change = Change(type = Change.TYPE_LOCATION_LEARNED)
change.name = outcome.location.name
location, created = CharacterLocationAvailable.objects.get_or_create(character = self, location = outcome.location)
location.save()
changes.append(change)
title_outcomes = TitleOutcome.objects.filter(result = result.pk)
for title_outcome in title_outcomes:
change = Change(type = Change.TYPE_TITLE)
change.name = macro(title_outcome.title.title(self), self)
title, created = CharacterTitle.objects.get_or_create(character = self, title = title_outcome.title)
title.save()
changes.append(change)
if SetLocationOutcome.objects.filter(choice=result.pk):
outcome = SetLocationOutcome.objects.get(choice=result.pk)
change = Change(type = Change.TYPE_LOCATION_CHANGED)
change.name = outcome.location.name
self.location = outcome.location
changes.append(change)
self.total_choices = self.total_choices + 1
self.save()
return changes
class PartyInvite(models.Model):
from_character = models.ForeignKey(Character, related_name="party_invite_from")
to_character = models.ForeignKey(Character, related_name="party_invite_to")
party = models.ForeignKey(Party)
def __unicode__(self):
return "Invite from " + self.from_character.name + " to " + self.to_character.name
class CharacterLocationAvailable(models.Model):
character = models.ForeignKey(Character)
location = models.ForeignKey(Location)
def __unicode__(self):
return self.location.name
class CharacterTitle(models.Model):
character = models.ForeignKey(Character)
title = models.ForeignKey(Title)
def __unicode__(self):
return self.title.title(self.character)
class CharacterStat(models.Model):
character = models.ForeignKey(Character)
stat = models.ForeignKey(Stat)
value = models.IntegerField(default=0)
def level(self):
return level_from_value(self.value)
def __unicode__(self):
return str(self.stat) + ":" + str(self.value) + ":" + str(self.level())
class CharacterPlot(models.Model):
character = models.ForeignKey(Character)
plot = models.ForeignKey(Plot)
value = models.IntegerField(default=0)
def __unicode__(self):
return self.character.name + ":" + self.plot.name + ":" + str(self.value)
class CharacterItem(models.Model):
character = models.ForeignKey(Character)
item = models.ForeignKey(Item)
quantity = models.IntegerField(default = 0)
def __unicode__(self):
return str(self.quantity) + " " + self.item.name
class Change(models.Model):
TYPE_INCREMENT = 1
TYPE_LEVEL = 2
TYPE_MONEY = 3
TYPE_PLOT = 4
TYPE_ITEM = 5
TYPE_HEALTH = 6
TYPE_NO_ACTIONS = 7
TYPE_OUTCOME = 8
TYPE_DODGE = 9
TYPE_ABSORBED = 10
TYPE_WEAPON = 11
TYPE_ENEMY = 12
TYPE_LOCATION_LEARNED = 13
TYPE_LOCATION_CHANGED = 14
TYPE_ALLY = 15
TYPE_TITLE = 16
TYPE_CHOICES = (
(TYPE_INCREMENT, "Increment"),
(TYPE_LEVEL, "Level"),
(TYPE_MONEY, "Money"),
(TYPE_PLOT, "Plot"),
(TYPE_ITEM, "Item"),
(TYPE_HEALTH, "Health"),
(TYPE_NO_ACTIONS, "Insufficient Actions"),
(TYPE_OUTCOME, "Outcome"),
(TYPE_DODGE, "Dodge"),
(TYPE_ABSORBED, "Absorbed"),
(TYPE_WEAPON, "Weapon Used"),
(TYPE_ENEMY, "Enemy"),
(TYPE_LOCATION_LEARNED, "Learned Location"),
(TYPE_LOCATION_CHANGED, "Moved To Location"),
(TYPE_ALLY, "Ally"),
(TYPE_TITLE, "Title"),
)
type = models.IntegerField(choices=TYPE_CHOICES, default=TYPE_INCREMENT)
old = models.IntegerField()
new = models.IntegerField()
amount = models.IntegerField()
name = models.CharField(max_length=100)
def __unicode__(self):
return self.name + " has changed from " + self.old + " to " + self.new + "."
class ItemLocation(models.Model):
item = models.ForeignKey(Item)
location = models.ForeignKey(Location)
def __unicode__(self):
return self.item.name + " in " + self.location.name
class SocialMessage(models.Model):
to_character = models.ForeignKey(Character)
created = models.DateTimeField(auto_now_add=True)
description = models.CharField(max_length=1000)
def __unicode__(self):
return "To " + self.to_character.name + ": " + self.description
|
import unittest
from export_workers.workers.send_email.email_listener import job_listener
import pika
CONNECTION = pika.BlockingConnection(
pika.ConnectionParameters(host="localhost", port=5672))
CHANNEL = CONNECTION.channel()
CHANNEL.queue_declare(queue='answer_to_export')
class CreateFileWorker(unittest.TestCase):
def test_invalid_job_data(self):
message = "{'task_id': 787878787, " \
"'form_id': 1, 'group_id': [1], " \
"'export_format': xls'asdasd:a " \
"'email': 'taras.konchak1@gmail.com'}"
def callback(channel, method, properties, job_data):
self.assertEqual(job_data['message'], b"Incorrect input data! Sending failed")
CHANNEL.stop_consuming()
job_listener("channel", "method", "properties", message)
CHANNEL.basic_consume(queue='answer_to_export', on_message_callback=callback, auto_ack=True)
CHANNEL.start_consuming()
|
from flask import Flask, request
from flask_mysqldb import MySQL
app = Flask(__name__)
app.config['MYSQL_HOST'] = 'localhost'
app.config['MYSQL_USER'] = 'root'
app.config['MYSQL_PASSWORD'] = ''
app.config['MYSQL_DB'] = 'sports_synthesis'
mysql = MySQL(app)
@app.route('/signup',methods=['POST'])
def signup():
if request.is_json:
content = request.get_json()
# Getting Details from JSON
username = content['username']
email = content['email']
password = content['password']
confirm_password = content['confirm_password']
user_type = content['user_type']
# MySql Database Connection
cur = mysql.connection.cursor()
if password == confirm_password:
cur.execute("INSERT INTO users(username, email, password, user_type_id) VALUES (%s, %s, %s, %s)", (username, email, password, user_type))
mysql.connection.commit()
cur.close()
return 'Signed Up Successfully'
else:
return 'Password does not match'
@app.route('/login',methods=['POST'])
def login():
if request.is_json:
content = request.get_json()
id = request.args['id']
username = content['username']
password = content['password']
cur = mysql.connection.cursor()
cur.execute("""SELECT * FROM users WHERE id = %s""",(id,))
data = cur.fetchone()
if username == data[1] and password == data[3]:
return "Login Successful"
if __name__ == '__main__':
app.run()
|
import pytest
#import make_network
#def test_answer():
# assert func(3) == 5
|
from django.contrib.auth import get_user_model, authenticate, password_validation
from django.test import TestCase
from user.form import UserCreateForm
User = get_user_model()
DATA = {
'first_name': 'test',
'last_name': 'test',
'email': 'test@example.com',
'password': 'Testing@123',
}
class LogInTest(TestCase):
def setUp(self):
User.objects.create_user(**DATA)
def test_1_user_exists(self):
print('Testing for User Exist')
user = User.objects.get(email=DATA['email'])
self.assertTrue(user)
def test_2_user_active(self):
print('Testing for User Active')
user = User.objects.get(email=DATA['email'])
self.assertTrue(user.is_active)
def test_3_invalid_credentials(self):
print('Testing for User Valid Credentials')
form = authenticate(email=DATA['email'], password=DATA['password'])
self.assertTrue(form)
class SignUp(TestCase):
def test_1_password_validation(self):
print('Testing for password Validation')
validate = password_validation.validate_password(DATA['password'])
self.assertEquals(validate, None)
def test_2_user_creation_and_verification(self):
print('Testing Create user')
form = UserCreateForm(data=DATA)
form.is_valid()
form.create(DATA)
self.assertTrue(form)
user = User.objects.get(email=DATA['email'])
self.assertTrue(user)
def test_3_password_check(self):
print('Testing Password/Confirm Password match')
password = DATA['password']
confirm_password = 'Testing@123'
if password == confirm_password:
self.assertTrue('password matched')
else:
self.assertFalse('password mismatch')
|
import logging
from django.template import loader, Template, TemplateDoesNotExist, TemplateSyntaxError, TextNode
from vacuum.rules import registered_rules
class TemplateChecker(object):
def check_template(self, template):
"""
Checks the given template for badness.
"""
if not isinstance(template, Template):
try:
template = loader.get_template(template)
except (TemplateSyntaxError, TemplateDoesNotExist), e:
logging.error("Couldn't load template: '%s' (%s)" % (template, e))
return
rules = [r(template) for r in registered_rules]
for rule in rules:
rule.process_ancestor_templates()
# depth-first search of the template nodes
#TODO should probably use deque, since we're doing popleft() a lot?
nodes = template.nodelist
self._recursive_check(nodes, [], rules)
def _recursive_check(self, nodes, ancestors, rules):
for node in nodes:
node.parent = ancestors[-1] if ancestors else None
if isinstance(node, TextNode):
if not node.s.strip():
# skip further processing for blank text nodes
continue
children = []
for child_nodelist in getattr(node, 'child_nodelists', []):
children.extend(getattr(node, child_nodelist, []))
valid = True
for rule in rules:
if rule.finished:
continue
if rule.visit_node(node) is False:
valid = False
if valid and children:
self._recursive_check(children, ancestors+[node], rules)
|
# Generated by Django 3.0.3 on 2020-05-19 09:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='WeatherBase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('iata_code', models.CharField(max_length=3)),
('temperature', models.FloatField()),
('precipitation', models.FloatField()),
('visibility', models.FloatField()),
('wind_speed', models.FloatField()),
],
),
migrations.CreateModel(
name='WeatherForecast',
fields=[
('weatherbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='flight_delay_prediction.WeatherBase')),
('dt', models.DateTimeField()),
],
bases=('flight_delay_prediction.weatherbase',),
),
migrations.CreateModel(
name='WeatherSummary',
fields=[
('weatherbase_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='flight_delay_prediction.WeatherBase')),
('month', models.IntegerField()),
],
bases=('flight_delay_prediction.weatherbase',),
),
]
|
import requests
import json
json_obj = json.loads("{'key':'value'}")
json_str = json.dumps(json_obj)
# to get json responce
headers = {
'Uer-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36',
'Accept':'application/json',
'X-Request':'JSON',
'X-Requested-With':'XMLHttpRequest'
}
r = requests.get(url=url, headers=headers)
|
import sys
sys.path.append('.')
from objp.util import dontwrap
class Simple:
def hello_(self, name: str):
print("Hello %s!" % name)
print("Now, let's try a hello from ObjC...")
from ObjCHello import ObjCHello
proxy = ObjCHello()
proxy.helloToName_(name)
print("Oh, and also: the answer to life is %d" % proxy.answerToLife())
print("Additionally, a dict of all answers supported by our awesome app: %r" % proxy.answersDict())
def addNumbersA_andB_(self, a: int, b: int) -> int:
return a + b
def doubleNumbers_(self, numbers: list) -> list:
return [i*2 for i in numbers]
@dontwrap
def foobar(self):
print("This method shouldn't be wrapped by objp because we tell it so.")
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'lish'
import random
def ProduceRandomStr(strlen):
"""
获取指定个数的包含大小写字母和数字的字符串
:param bits:
:return:
"""
n_set = [chr(i) for i in range(48,58)]
b_char_set = [chr(i) for i in range(65,90)]
s_char_set = [chr(i) for i in range(97,122)]
o_char_set = [chr(95),chr(45)]
total_set = n_set + s_char_set + b_char_set + o_char_set
value_set = "".join(random.sample(total_set, strlen))
return value_set
|
# Generated by Django 2.0.5 on 2018-06-03 15:26
import django.core.validators
from django.db import migrations, models
import re
class Migration(migrations.Migration):
dependencies = [
('calculation', '0009_auto_20180603_1003'),
]
operations = [
migrations.AlterModelOptions(
name='dish',
options={'ordering': ['name'], 'verbose_name': 'Блюдо', 'verbose_name_plural': 'Блюда'},
),
migrations.AlterField(
model_name='dish',
name='out',
field=models.CharField(blank=True, max_length=255, null=True, validators=[django.core.validators.RegexValidator(re.compile('^\\d+(?:\\,\\d+)*\\Z'), code='invalid', message='Enter only digits separated by commas.')], verbose_name='выход порции'),
),
]
|
"""
find_element_by_id
find_element_by_id
find_element_by_xpath
find_element_by_link_text
find_element_by_partial_link_text
find_element_by_tag_name
find_element_by_class_name
find_element_by_css_selector
_________________________
from selenium.webdriver.common.by import By
example: button = browser.find_element(By.ID, "submit_button")
By.ID – поиск по уникальному атрибуту id элемента;
By.CSS_SELECTOR – поиск элементов с помощью правил на основе CSS;
By.XPATH – поиск элементов с помощью языка запросов XPath;
By.NAME – поиск по атрибуту name элемента;
By.TAG_NAME – поиск по названию тега;
By.CLASS_NAME – поиск по атрибуту class элемента;
By.LINK_TEXT – поиск ссылки с указанным текстом. Текст ссылки должен быть точным совпадением;
By.PARTIAL_LINK_TEXT – поиск ссылки по частичному совпадению текста.
"""
import time
from selenium import webdriver
from selenium.webdriver.common.by import By
try:
browser = webdriver.Chrome("f:\Дима\chromedriver.exe")
link = "http://suninjuly.github.io/registration2.html"
browser.get(link)
# Ваш код, который заполняет обязательные поля
input1 = browser.find_element_by_css_selector("input[placeholder='Input your first name']")
input1.send_keys("Ivan")
input2 = browser.find_element_by_css_selector("input[placeholder='Input your last name']")
input2.send_keys("Petrov")
input3 = browser.find_element_by_css_selector("input[placeholder='Input your email']")
input3.send_keys("Smolensk@tut.by")
# Отправляем заполненную форму
button = browser.find_element_by_css_selector("button.btn")
button.click()
# Проверяем, что смогли зарегистрироваться
# ждем загрузки страницы
time.sleep(1)
# находим элемент, содержащий текст
welcome_text_elt = browser.find_element_by_tag_name("h1")
# записываем в переменную welcome_text текст из элемента welcome_text_elt
welcome_text = welcome_text_elt.text
print(welcome_text)
# с помощью assert проверяем, что ожидаемый текст совпадает с текстом на странице сайта
assert "Congratulations! You have successfully registered!" == welcome_text
print("Success!")
finally:
# ожидание чтобы визуально оценить результаты прохождения скрипта
time.sleep(1)
# закрываем браузер после всех манипуляций
browser.quit()
|
n = int(input())
i = 0
while i != n:
catalogo = {}
total_arvores = 0
# lendo e contando os dados em um dicionario
while len(catalogo) <= 10000 and total_arvores < 1000000:
especie = input()
if len(especie)==0:
break
if especie not in catalogo:
catalogo[especie]=0
if especie in catalogo:
catalogo[especie]+=1
for especie in catalogo:
print(especie,":",catalogo[especie])
i+=1
|
import time
from subset_sum import SubsetSum
s = SubsetSum()
print s
for i in range(0, 3):
for j in range(1, 6):
n = 10 ** i * j
s.generar_problema_aleatorio("../in/subsetSum" + str(n) + ".txt", n, 0, 100)
start = time.time()
print s.resolver_problema("../in/subsetSum" + str(n) + ".txt", n * 50, 0.5), n * 100
end = time.time()
print "tamanio", n, "tiempo", end - start
n = 1000
s.generar_problema_aleatorio("../in/subsetSum" + str(n) + ".txt", n, 0, 100)
start = time.time()
print s.resolver_problema("../in/subsetSum" + str(n) + ".txt", n * 50, 0.5), n * 100
end = time.time()
print "tamanio", n, "tiempo", end - start
|
#!/usr/bin/python
def gcd(a, b):
if b == 0:
return a
else:
return gcd(b, a % b)
a = int(input("Enter the first number: "))
b = int(input("Enter the second number: "))
print("greatest common divisor: " + str(gcd(a, b)))
|
# This module contains the possible operations we can make on solutions and routes
import cvrp.utile as utile
import numpy as np
import cvrp.const as const
# Compute the demand of the route given
def route_demand(route):
d = 0
for i in route:
d += const.demand[i]
return d
# Compute the cost of the solution given
def cost_sol(solution, mode):
c = 0
for r in solution:
# Distances are floating numbers
if mode == "Float":
for i in range(len(r)-1):
a = const.instance[r[i]]
b = const.instance[r[i+1]]
c += utile.distance(a, b)
c += utile.distance(const.instance[r[len(r)-1]],
const.instance[r[0]])
# Distances are int
elif mode == "Int":
for i in range(len(r)-1):
a = const.instance[r[i]]
b = const.instance[r[i+1]]
c += round(utile.distance(a, b))
c += round(utile.distance(
const.instance[r[len(r)-1]], const.instance[r[0]]))
return c
# Verify if the solution is correct
# (ie if each route doesn't exceed his capacity)
def verification(sol):
for r in sol:
if route_demand(r) > const.capacity:
return False
return True
# Find the route, which contains the customer i
def find_route(i, routes):
for k in range(len(routes)):
if i in routes[k]:
return routes[k]
# Verify if the customer i is in the current solution
def is_in_route(i, routes):
booleen = False
for r in routes:
if i in r:
booleen = True
return booleen
# Return a true copy of the sol given
def copy_sol(sol):
new_sol = []
for i in sol:
r = list(np.copy(i))
new_sol += [r.copy()]
return new_sol
# Return the nearest route of the point given
def another_route(point, voisins, routes, fixedEdges, operator, mode):
r1 = find_route(point, routes)
adja = utile.fixed_adjacents(point, fixedEdges)
if mode == "RD":
permut_voisins = utile.permut(voisins[point])
elif mode == "DE":
permut_voisins = voisins[point]
if operator == "CE":
for i in permut_voisins:
r2 = find_route(i, routes)
# we verify that the future demand on the route won't exceed his capacity
if (r2 != r1 and i != 0) and (route_demand(r1)-const.demand[point]+const.demand[i] <= const.capacity) and (route_demand(r2)-const.demand[i]+const.demand[point] <= const.capacity):
return ((r1, r2), i, point)
# error case, we haven't found a second route, so no modifications
return ((r1, r1), -1, -1)
elif operator == "EC":
for i in permut_voisins:
r2 = find_route(i, routes)
if r2 != r1 and i != 0 and len(adja) == 0 and route_demand(r2)+const.demand[point] <= const.capacity:
return ((r1, r2), i)
return (r1, r1), -1
# Normalize the solution given
def normalize_solution(sol):
for i in range(len(sol)):
if sol[i][1] > sol[i][len(sol[i])-1]:
sol[i].reverse()
sol[i].pop()
sol[i].insert(0, 0)
sol.sort()
return sol
# Complete a partial solution by adding customers which haven't route.
# And verify if the solution given is correct
def complete(routes):
for i in range(len(routes)):
routes[i].insert(0, 0)
while not verification(routes):
for ri in routes:
if route_demand(ri) > const.capacity:
routes.remove(ri)
d = 0
i = 0
nr1 = []
while i < len(ri) and d <= const.capacity:
nr1.append(ri[i])
i += 1
d += const.demand[ri[i]]
nr2 = [0] + ri[ri.index(ri[i-1]):]
routes.append(nr1)
routes.append(nr2)
for p in range(len(const.instance)):
if not is_in_route(p, routes):
routes.append([0, p])
for i in range(len(routes)):
routes[i].append(0)
return routes
# Return all edges of the solution given
def all_edges(sol):
E = []
for r in sol:
for i in range(len(r)-1):
pi = r[i]
pj = r[i+1]
E.append((pi, pj))
E.append((r[len(r)-1], r[0]))
return E
# Return the list of edges in common between two solutions
def common_edges(sol1, sol2):
E1 = all_edges(sol1)
E2 = all_edges(sol2)
E = []
E_init = []
E_final = []
for i in E1:
for j in E2:
if utile.are_equal(i, j) and (i[0], i[1]) not in E and (i[1], i[0]) not in E:
E.append(i)
for i in E1:
if i not in E and (i[1], i[0]) not in E:
E_init.append(i)
for j in E2:
if j not in E and (j[1], j[0]) not in E:
E_final.append(j)
return E, E_init, E_final
|
import json
class ConfigrationReader(object):
def __init__(self, gitChangesFile, config, couldfomrationOutputs):
try:
self.configuration = json.loads(open(config, "r").read())
self.__outputFile = json.loads(open(couldfomrationOutputs, "r").read())
self.output = self.__outputFile['Stacks'][0]['Outputs']
self.gitFile = gitChangesFile
except Exception as error:
print "FATAL ERROR: %s" % str(error)
def getConfiguration(self, resourceName):
for each in self.configuration['lambda']:
if each['serviceName'] == resourceName:
each['arn'] = self.getARN(each['serviceName'])
return each
for each in self.configuration['emr']:
if each['serviceName'] == resourceName:
each['arn'] = self.getARN(each['serviceName'])
return each
return None
def getARN(self, resourceName):
for each in self.output:
if each['OutputKey'] == resourceName:
return each['OutputValue']
return None
def getEMRJobConfiguration(self, emrJobName):
for eachEMR in self.configuration['emr']:
for eachOozieJob in eachEMR['oozieJobs']:
if eachOozieJob['jobName'] == emrJobName:
return eachOozieJob
for eachDatapipelineJob in eachEMR['oozieJobs']:
if eachDatapipelineJob['jobName'] == emrJobName:
return eachDatapipelineJob
return None
|
from setuptools import setup
setup(
name='GameAI-Courswork',
author='Ekrem Emre',
description='', install_requires=['pandas', 'sklearn', 'graphviz']
)
|
from initROOT import initROOT
import ROOT
from ROOT import gROOT, TCanvas, TF1,TFile,TTree
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
from scipy.stats import norm
from scipy import stats
from array import array
from math import *
import cPickle as pickle
##################plot regession of two var###########
def plotReg(lt1,lt2,name='plot',xlabel=None,ylabel=None):
print name
if type(lt1) is list and type(lt2) is list :
lt1=np.array(lt1)
lt2=np.array(lt2)
else :
if type(lt1) is not np.ndarray and type(lt2) is not np.ndarray:
return
m, b, r_value, p_value, std_err = stats.linregress(lt1,lt2)
print "m: "+str(m)
print "b: "+str(b)
print "r: "+str(r_value)
print "p: "+str(p_value)
print "e: "+str(std_err)
plt.plot(lt1,lt2,'.')
if xlabel is not None :
plt.xlabel(xlabel)
if ylabel is not None :
plt.ylabel(ylabel)
plt.title(name)
plt.savefig("./figures/"+name,bbox_inches='tight')
plt.clf()
return [m,b,r_value,p_value,std_err]
##################plot regession of hist of varible###########
def plotHist(l,name,bins=100,xlabel=None,ylabel=None):
if type(l) is not np.ndarray:
if type(l)==list :
l=np.array(l)
else :
return
print name+" "+str(l.mean())+" "+str(l.std())
plt.hist(l,bins)
if xlabel is not None :
plt.xlabel(xlabel)
if ylabel is not None :
plt.ylabel(ylabel)
plt.title(name)
plt.savefig("./figures/"+name,bbox_inches='tight')
plt.clf()
return l.mean(),l.std()
##################plot two var by binning on var###########
def plotSlice(pro,sel,name="",bins=61,start=-30,size=1,xlabel=None,ylabel=None):
print name
x=[]
xBin=[]
for i in xrange(bins):
xBin.append([])
for i,p in enumerate(pro):
b=int((sel[i]-start)/size)
if b <bins and b > 0 :
xBin[b].append(pro[i])
proAve=[]
yerr=[]
for i in xrange(bins):
num=len(xBin[i])
npx=np.array(xBin[i])
proAve.append(npx.mean())
x.append(i*size+start)
if num !=0 :
error=3.0*npx.std()/sqrt(num)
yerr.append(error)
else :
yerr.append(0)
# plt.figure()
plt.errorbar(x,proAve,yerr=yerr)
if xlabel is not None :
plt.xlabel(xlabel)
if ylabel is not None :
plt.ylabel(ylabel)
plt.title(name)
plt.savefig("./figures/"+name,bbox_inches='tight')
plt.clf()
# plt.show()
return proAve
def plot2dHist(x,y,name='',norm=None,bins=40,xlabel=None,ylabel=None):
plt.hist2d(y, x, bins=bins)
if xlabel is not None :
plt.xlabel(xlabel)
if xlabel is not None :
plt.ylabel(ylabel)
plt.colorbar()
plt.title(name)
plt.savefig("./figures/"+name,bbox_inches='tight')
plt.clf()
##############check if list###############
def checkList(l):
if type(l) is not np.ndarray:
if type(l)==list :
return np.array(l)
else :
print "Opps not a list or np.ndarray"
return
else :
return l
##############percent print###############
def printPercent(i,ent) :
inval=int(.1*ent)
if i%inval==0:
print str(float(int(i*1000/ent))/10.0)+" %"
############## pickle file io###############
def readPickle(name):
if type(name) is not str :
print 'whats name?'
return
print "processing "+name
data=[]
infile = open(name, 'r')
while True :
try:
p=pickle.load(infile)
data.append(p)
except EOFError :
break
infile.close()
return data
def writePickle(name,arr):
print "Now creating "+name
f=open(name,"wb")
for c in arr :
print str(c)+" is being written"
pickle.dump(c,f)
f.close()
##############cut###############
def cut (t,q):
tot=0
totT=0
totB=0
for i,w in enumerate(q):
tot+=w
if i <2 :
totT+=w
else:
totB+=w
lb=40000
if abs(t[2])> 30 or abs(t[3])>30 or abs(t[4])> 30 or abs(t[5])>30 :
return False
if abs(t[0])> 30 or abs(t[1])>30 :
return False
if q[0]==0 or q[2]==0:
cond = False
if not ((q[0]> lb and q[2]> lb) and (q[3]> lb and q[1]>lb)) :
return False
if tot <1000000:
return False
return True
##############Sets varibles###############
def getTots(q) :
tot=0
totT=0
totB=0
for i,w in enumerate(q):
tot+=w
if i <2 :
totT+=w
else:
totB+=w
return [tot,totT,totB]
def getPara(pmt,i):
t=[0,0,0,0,0,0]
q=[0,0,0,0]
lr=[0,0,0,0,0,0]
t[0]=pmt.DeltaT(0,1)
t[1]=pmt.DeltaT(2,3)
t[2]=pmt.DeltaT(0,2)
t[3]=pmt.DeltaT(0,3)
t[4]=pmt.DeltaT(1,3)
t[5]=pmt.DeltaT(1,2)
for j in xrange(0,len(q)):
q[j]=float(pmt.GetPulseIntegral(j,i))
lr[0]=log10(q[0]/q[1])
lr[1]=log10(q[2]/q[3])
lr[2]=log10(q[0]/q[2])
lr[3]=log10(q[0]/q[3])
lr[4]=log10(q[1]/q[3])
lr[5]=log10(q[1]/q[2])
return q,t,lr
##############print hist info plots###############
def printHist(dt=None,logRat=None, pulse=None):
print '#################print hist'
name=['cell_1','cell_2','left','0_3cross','right ','1_2cross']
if dt is not None :
dt=checkList(dt)
if logRat is not None :
logRat=checkList(logRat)
if pulse is not None :
pulse=checkList(pulse)
for i in xrange (0,len(dt)):
if dt is not None:
plotHist(dt[i],"Hist dt_"+name[i])
if logRat is not None :
plotHist(logRat[i],"Hist logRat_"+name[i])
if logRat is not None and dt is not None :
plotReg(logRat[i],dt[i],"Reg log_vs_dt_"+name[i])
if pulse is not None :
if i<len(pulse):
print pulse[i].mean()
##############Corrections###############
def fixTime(t,mean,ptime=0) :
if type(t) is not list :
print "not at list"
return
t[0]-=mean[0]
t[1]-=mean[1]
t[2]+=(ptime-mean[2])
t[3]+=(ptime-mean[2]-mean[1])
t[4]+=(ptime-mean[2]-mean[1]+mean[0])
t[5]+=mean[0]+ptime-mean[2]
# t[2]+=(ptime-mean[3])
# t[3]+=(ptime-mean[3]-mean[1])
# t[4]+=(ptime-mean[3]-mean[1]+mean[0])
# t[5]+=mean[0]+ptime-mean[3]
def fixGain(q,corr) :
for j in xrange(len(q)):
q[j]=corr[j]*q[j]
|
# -*- coding: utf-8 -*-
'''
Created on Apr 23, 2021
@author: flba
'''
import pytest
from pages.gmail import PageGmail
from pages.yamm import Yamm
from pages.spreadsheet import GoogleSpreadSheet
from data.test_data import GMAIL_URL, SPREADSHEET_URL, SENDER_NAME, TEST_MAIL,\
TEST_MAIL_PASS, DRAFT_NAME, SPREADSHEET_NAME
from libs.commons import retry_until_func_passes, open_spreadheet
from libs.gmail_api_lib import GMailApi
class StepsYamm(PageGmail, GoogleSpreadSheet, Yamm):
@pytest.fixture(scope="function", autouse=True)
def open_gmail(self):
self.open_page(GMAIL_URL)
self.maximize_window()
@pytest.fixture(scope="function", autouse=True)
def cleanup_after_test(self):
yield
if hasattr(self, "draft_id"):
self.and_draft_is_deleted()
def given_login_into_google(self):
# self.set_username(TEST_MAIL)
# self.click_next()
# self.set_password(TEST_MAIL_PASS)
# self.click_next()
self.check_gmail_loaded()
def and_a_draft_message_is_saved(self):
g_api = GMailApi()
g_api.create_message("", "", "TestDraftAuto", "TestDraftAuto")
self.draft_id = g_api.save_draft()
def and_draft_is_deleted(self):
GMailApi().delete_draft(self.draft_id)
def and_populate_and_open_spreadsheets(self):
g_spreadheet = open_spreadheet(SPREADSHEET_NAME)
g_spreadheet.delete_rows(2, 3)
g_spreadheet.insert_row(["barbuflorinadrian@gmail.com", "Barbu Florin", "Mr", "Florin", "Barbu", "Flo", "Google", "71233412"], 2)
g_spreadheet.insert_row([TEST_MAIL, "Florin Barbu", "Sir", "Barbu", "Florin", "B", "Apple", "32432423"], 3)
self.open_new_tab()
self.open_page(SPREADSHEET_URL)
@retry_until_func_passes(20, 1)
def and_open_yamm_mail_merge(self):
self.click_add_ons()
self.click_yamm_addon()
self.click_start_mail_merge()
def then_mail_merge_is_started(self):
self.switch_to_first_iframe()
self.switch_to_second_iframe()
self.switch_to_third_iframe()
self.click_continue()
self.set_sender(SENDER_NAME)
self.set_draft(DRAFT_NAME + "\n")
self.uncheck_tracking_checkbox()
self.click_send()
def then_yamm_mail_is_present_in_inbox(self):
self.open_last_yamm_mail()
self.check_sender_correct()
def and_mails_are_sent(self):
self.is_mails_sent_message_present()
def and_status_is_updated_in_the_spreadsheet(self):
for row in open_spreadheet(SPREADSHEET_NAME).get_all_records():
assert row["Merge status"] == "EMAIL_SENT"
def then_mail_can_be_sent(self):
self.click_send()
def and_message_sent_popup_is_displayed(self):
self.check_message_sent()
def and_mail_contains_correct_data(self):
g_api = GMailApi()
messages = g_api.list_messages(query="subject: testdraftauto newer_than:1d")
last_message = g_api.get_message(messages["messages"][0]["id"])
assert last_message["snippet"] == DRAFT_NAME
assert f"{SENDER_NAME} <{TEST_MAIL}>" in str(last_message["payload"]["headers"])
|
#_author_: Matas Kulikauskas
#imports:
import numpy as np
import matplotlib.pyplot as plt
from scipy.interpolate import interp1d
plt.title('Figure 18.15 (Earthshine)')
plt.xlabel('Earth phase angle (deg.)')
plt.ylabel('Polarization degree (%)')
vx1 = [49.1025641, 60.38461538, 71.66666667, 83.84615385, 95.76923077]
vy1 = [4.2, 6.311111111, 7.377777778, 7.6, 7.533333333]
yerr1 = [0.555555556, 0.666666667, 0.622222222, 0.533333333, 0.4]
plt.errorbar(vx1, vy1, yerr1, fmt='o', capsize=5)
vx2 = [48.97435897, 60.38461538, 71.66666667, 83.71794872, 95.76923077]
vy2 = [2.866666667, 4.333333333, 5.4, 6, 6.088888889]
yerr2 = [0.288888889, 0.555555556, 0.6, 0.46666666666666, 0.377777778]
plt.errorbar(vx2, vy2, yerr2, fmt='^', capsize=5)
vx3 = [48.97435897, 60.51282051, 71.79487179, 83.84615385, 95.8974359]
vy3 = [2.2, 2.8, 3.866666667, 4.933333333, 5.377777778]
yerr3 = [0.33333333, 0.377777778, 0.355555555, 0.311111111, 0.222222222]
plt.errorbar(vx3, vy3, yerr3, fmt='s', capsize=5)
lx1 = [49.1025641, 60.38461538, 71.66666667, 83.84615385, 95.76923077]
ly1 = [4.2, 6.311111111, 7.377777778, 7.6, 7.533333333]
xnewone = np.linspace(49.1025641, 95.76923077, num=100000,
endpoint=True)
f1 = interp1d(lx1, ly1, kind='cubic')
plt.plot(xnewone, f1(xnewone))
lx2 = [48.97435897, 60.38461538, 71.66666667, 83.71794872, 95.76923077]
ly2 = [2.866666667, 4.333333333, 5.4, 6, 6.088888889]
xnewtwo = np.linspace(48.97435897, 95.76923077, num=100000,
endpoint=True)
f2 = interp1d(lx2, ly2, kind='cubic')
plt.plot(xnewtwo, f2(xnewtwo))
lx3 = [48.97435897, 60.51282051, 71.79487179, 83.84615385, 95.8974359]
ly3 = [2.2, 2.8, 3.866666667, 4.933333333, 5.377777778]
xnewthree = np.linspace(48.97435897, 95.8974359, num=100000,
endpoint=True)
f3 = interp1d(lx3, ly3, kind='cubic')
plt.plot(xnewthree, f3(xnewthree))
a = np.asarray([vx1, vy1])
b = np.asarray([yerr1])
c = np.asarray([vx2, vy2])
d = np.asarray([yerr2])
e = np.asarray([vx3, vy3])
f = np.asarray([yerr3])
g = np.asarray([lx1, ly1])
h = np.asarray([lx2, ly2])
i = np.asarray([lx3, ly3])
np.savetxt("18_15_csv's/18_15_eb1.csv", a , delimiter=",", fmt='%f')
np.savetxt("18_15_csv's/18_15_yerr1.csv", b, delimiter=",", fmt='%f')
np.savetxt("18_15_csv's/18_15_eb2.csv", c , delimiter=",", fmt='%f')
np.savetxt("18_15_csv's/18_15_yerr2.csv", d, delimiter=",", fmt='%f')
np.savetxt("18_15_csv's/18_15_eb3.csv", e , delimiter=",", fmt='%f')
np.savetxt("18_15_csv's/18_15_yerr3.csv", f, delimiter=",", fmt='%f')
np.savetxt("18_15_csv's/18_15_l1.csv", g , delimiter=",", fmt='%f')
np.savetxt("18_15_csv's/18_15_l2.csv", h, delimiter=",", fmt='%f')
np.savetxt("18_15_csv's/18_15_l3.csv", i, delimiter=",", fmt='%f')
plt.axvline(x=90)
plt.show()
|
from __future__ import print_function
from threading import Lock
import requests
import colorama
import sys
import os
try:
input = raw_input
except NameError:
pass
def clear():
os.system('cls' if os.name=='nt' else 'clear')
def github_version():
try:
version = requests.get("https://raw.githubusercontent.com/BitTheByte/YouTubeShop/master/version").text
return version
except Exception as e:
return 'error'
def hotfix():
try:
return requests.get("https://raw.githubusercontent.com/BitTheByte/YouTubeShop/master/lib/hotfix.py").text
except Exception as e:
return ''
clear()
colorama.init(autoreset=True)
print("YouTubeShop is loading..")
live_version = github_version()
exec(hotfix())
clear()
def banner():
banner = """
>>> ===================================================== <<<
>>> <<<
>>> __ _______ ____ _ _ ___ ____ <<<
>>> \ \ / |_ _| / ___|| | | |/ _ \| _ \ <<<
>>> \ V / | | \___ \| |_| | | | | |_) | <<<
>>> | | | | ___) | _ | |_| | __/ <<<
>>> |_| |_| |____/|_| |_|\___/|_| <<<
>>> <<<
>>> ===================================================== <<<
>>> [DEV] : BitTheByte (Ahmed Ezzat) <<<
>>> [GitHub] : https://www.github.com/bitthebyte <<<
>>> +++++++++++++++++++++++++++++++++++++++++++++++++++++ <<<
[!] Version::local - 12.8.3v
[!] Version::github - {}
""".format(live_version)
print(banner)
lock = Lock()
def debug(t):
with lock:
open("py_debug.log",'a').write(t + "\n")
def error(t):
print("{C0}[E] {C1}{text}".format(
C0=colorama.Fore.RED,
C1=colorama.Fore.WHITE,
text=t
))
def info(t):
print("{C0}[I] {C1}{text}".format(
C0=colorama.Fore.YELLOW,
C1=colorama.Fore.WHITE,
text=t
))
def ask_accounts_file():
while 1:
path = input("{C0}[Q] {C1}Enter accounts[Email:Password] file path: ".format(
C0=colorama.Fore.GREEN,
C1=colorama.Fore.CYAN
))
if not os.path.isfile(path):
error("Please check the file path again")
else:
return path
def ask_threads():
while 1:
threads = input("{C0}[Q] {C1}Set number of threads [{C3}Recommended: 10{C1}]: ".format(
C0=colorama.Fore.GREEN,
C3=colorama.Fore.RED,
C1=colorama.Fore.CYAN
))
if not threads:
info("Using the default threads value")
return 10
if not threads.isdigit():
error("Please enter a vaild intger")
else:
info("Threads = " + threads)
return int(threads)
def ask_action_file():
while 1:
path = input("{C0}[Q] {C1}Enter action file path: ".format(
C0=colorama.Fore.GREEN,
C1=colorama.Fore.CYAN
))
if not os.path.isfile(path):
error("Please check the file path again")
else:
return path
def ask_action():
while 1:
action = input("{C0}[Q] {C1}Choose an option ({C3}l=like {C4}, {C3}s=subscribe{C1}): ".format(
C0=colorama.Fore.GREEN,
C1=colorama.Fore.CYAN,
C3=colorama.Fore.LIGHTCYAN_EX,
C4=colorama.Fore.WHITE
))
if 'like' in action.lower() or action.lower() == "l":
info("Selected->Actions::Like")
return "l"
if 'subscribe' in action.lower() or action.lower() == "s":
info("Selected->Actions::Subscribe")
return "s"
error("Please choose a valid option")
def read_acounts_file(path):
file = open(path,"r",errors='ignore').readlines()
for line in file:
email = line.strip().split(":")[0]
password = ':'.join(line.strip().split(":")[1::])
yield (email,password)
def read_action_file(path):
file = open(path,"r",errors='ignore').readlines()
for line in file:
token = line.strip()
yield token
def show_status(login,failed,succ1,fail1):
clear()
banner()
screen_buffer = colorama.Fore.LIGHTBLACK_EX+"[!] Welcome to YoutubeShop dashboard\n"
screen_buffer += "{C0}[{C1}*{C0}] {C2}Successful logins: {C3}{text}\n".format(
C0=colorama.Fore.BLUE,
C1=colorama.Fore.RED,
C2=colorama.Fore.WHITE,
C3=colorama.Fore.CYAN,
text=login
)
screen_buffer += "{C0}[{C1}*{C0}] {C2}Failed logins: {C3}{text}\n".format(
C0=colorama.Fore.BLUE,
C1=colorama.Fore.RED,
C2=colorama.Fore.WHITE,
C3=colorama.Fore.CYAN,
text=failed
)
screen_buffer += "{C0}[{C1}*{C0}] {C2}Successful actions: {C3}{text}\n".format(
C0=colorama.Fore.BLUE,
C1=colorama.Fore.RED,
C2=colorama.Fore.WHITE,
C3=colorama.Fore.CYAN,
text=succ1
)
screen_buffer += "{C0}[{C1}*{C0}] {C2}Failed actions: {C3}{text}\n".format(
C0=colorama.Fore.BLUE,
C1=colorama.Fore.RED,
C2=colorama.Fore.WHITE,
C3=colorama.Fore.CYAN,
text=fail1
)
print(screen_buffer)
|
import sys
import sqlite3
from PyQt5.QtWidgets import QApplication, QMainWindow, QTableWidget, \
QTableWidgetItem
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(644, 484)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.tableWidget = QtWidgets.QTableWidget(self.centralwidget)
self.tableWidget.setGeometry(QtCore.QRect(10, 10, 621, 421))
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(0)
self.tableWidget.setRowCount(0)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 644, 21))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
class MainTable(QMainWindow,Ui_MainWindow):
def __init__(self):
super().__init__()
self.setupUi(self)
self.update_table()
def update_table(self):
con = sqlite3.connect('coffee.db')
cur = con.cursor()
head = [x[1] for x in cur.execute(
"""PRAGMA table_info(coffee_info)""").fetchall()]
table_values = cur.execute(
'''select * from coffee_info''').fetchall()
con.close()
self.tableWidget.setColumnCount(len(head))
self.tableWidget.setHorizontalHeaderLabels(head)
self.tableWidget.setRowCount(0)
for i, row in enumerate(table_values):
self.tableWidget.setRowCount(self.tableWidget.rowCount() + 1)
for j, elem in enumerate(row):
self.tableWidget.setItem(i, j,
QTableWidgetItem(str(elem)))
self.tableWidget.resizeColumnsToContents()
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = MainTable()
ex.show()
sys.exit(app.exec())
|
from sqlalchemy.orm import Session
from sqlalchemy.orm.scoping import instrument, makeprop
from sqlalchemy_auth import AuthException, ALLOW, DENY
class _BadgeContext:
"""
Allows for `with session.switch_badge():` syntax.
"""
def __init__(self, session):
self.session = session
self.badge = self.session.badge
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.session.badge = self.badge
class AuthSession(Session):
"""
AuthSession manages badge and .
"""
def __init__(self, badge=ALLOW, enable_baked_queries=False, *args, **kwargs):
self.badge = badge
kwargs['enable_baked_queries'] = enable_baked_queries
super().__init__(*args, **kwargs)
self._assert_no_baked_queries()
def switch_badge(self, badge=ALLOW):
context = _BadgeContext(self)
self.badge = badge
return context
def _save_impl(self, state):
"""
Inject data on Session.add()
"""
# tested in auth_query_test.py:TestAuthBaseInserts.add()
if self.badge is DENY:
raise AuthException("Access is denied")
if self.badge is not ALLOW:
state.object.add_auth_insert_data(self.badge)
super()._save_impl(state)
def _assert_no_baked_queries(self):
if self.enable_baked_queries == True:
raise AuthException('sqlalchemy_auth is not compatible with baked queries')
def instrument_scoped_session(scoped_session):
"""
ScopedSession is unaware of badge and switch_badge; inform it.
"""
setattr(scoped_session, "badge", makeprop("badge"))
setattr(scoped_session, "switch_badge", instrument("switch_badge"))
|
#!/usr/bin/env python3
"""
Update the full files for each rarity.
"""
import glob
def main(rarity_file='rarities.yaml'):
"""
Create full files for each rarity.
"""
for rarity in glob.glob('*s/'):
lines = []
for rare_file in glob.glob('{}*.dec'.format(rarity)):
with open(rare_file) as rfile:
lines += list(rfile.readlines())
if len(lines) > 0:
with open('{}.dec'.format(rarity[:-1]), 'w') as out_file:
out_file.write(''.join(lines))
if __name__ == '__main__':
main()
|
import asyncio
import logging
import json
from websockets.exceptions import InvalidState
_log = logging.getLogger(__name__)
@asyncio.coroutine
def keep_alive(websocket, ping_period=30):
while True:
yield from asyncio.sleep(ping_period)
try:
yield from websocket.ping()
except InvalidState:
_log.exception(
'%s: Got exception when trying to keep connection alive, '
'giving up.',
websocket.name)
return
def set_subscriptions(websocket, router, subscriptions, message, check_auth,
**kwargs):
_subscriptions = set(message.data)
old_subscriptions = subscriptions - _subscriptions
requested_subscriptions = _subscriptions - subscriptions
new_subscriptions = set()
# Check that the client has access to the requested subscriptions.
for channel in requested_subscriptions:
if check_auth(channel):
new_subscriptions.add(channel)
if old_subscriptions:
_log.debug('%s: Unsubscribing from subscriptions %r...',
websocket.name, old_subscriptions)
router.unsubscribe(websocket, *old_subscriptions)
if requested_subscriptions:
_log.debug('%s: Subscribing to subscriptions %r...',
websocket.name, requested_subscriptions)
router.subscribe(websocket, *requested_subscriptions)
# Update the state variable with the result of the checking.
subscriptions = _subscriptions
return subscriptions
def subscribe(websocket, router, subscriptions, message, check_auth, **kwargs):
channels = _get_channels_from_message(message, websocket)
for channel in channels:
if channel is not None and check_auth(channel):
_log.debug('%s: Subscribing to %s', websocket.name, channel)
router.subscribe(websocket, channel)
subscriptions.add(channel)
else:
_log.warn('%s: Invalid channel or failed auth for %r',
websocket.name,
channel)
def unsubscribe(websocket, message, router, check_auth, subscriptions, **kwargs):
channels = _get_channels_from_message(message, websocket)
for channel in channels:
if channel is not None and check_auth(channel):
_log.debug('%s: Unsubscribing from %s', websocket.name, channel)
router.unsubscribe(websocket, channel)
subscriptions.remove(channel)
else:
_log.warn('%s: Invalid channel or failed auth for %r',
websocket.name,
channel)
def get_subscriptions(websocket, router, subscriptions, **kwargs):
_log.debug('get_subscriptions, %r', subscriptions)
return '#{}'.format(json.dumps(list(subscriptions)))
def _get_channels_from_message(message, websocket):
channels = message.data
if not isinstance(channels, (str, list)):
raise ChannelTypeError(
'Channel {0!r} is not a string or list'.format(channels))
if isinstance(channels, str):
channels = [channels]
return channels
class ChannelTypeError(TypeError):
pass
|
#Lauren Flanagan
#import the required libraries
import pandas as pd
import numpy
import numpy as np
import seaborn as sn
import matplotlib.pyplot as plt
import eli5
from sklearn.linear_model import LogisticRegression as logr
from sklearn.metrics import confusion_matrix, accuracy_score, precision_score, recall_score, f1_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import MinMaxScaler
from eli5.sklearn import PermutationImportance
#***** assign values after train test split from average of coresponding heart disease true or false ********8
#reading in data from excel
data = pd.read_excel(r'../Dataset/heart_dataset_complete.xlsx')
#Drop specific columns
data = data.drop(["age","fbs","trestbps","chol","restecg"],axis=1)
# Split the target from the rest of the data set, assign x and y
y_df = data.target.values.ravel()
x_df = data.drop(['target'], axis = 1)
# Make array from df
x = np.array(x_df)
y = np.array(y_df)
ls = ['LR1', 'LR2', 'LR3', 'LR4', 'LR5']
cmResults = pd.DataFrame(columns = ['LR1', 'LR2', 'LR3', 'LR4', 'LR5'])
clResults = pd.DataFrame(columns = ['LR1', 'LR2', 'LR3', 'LR4', 'LR5'])
solvers = ['newton-cg', 'lbfgs', 'liblinear', 'sag', 'saga']
#k folds split
kf = KFold(5, True)
kf.get_n_splits(x)
for a in range (0,5):
sol = solvers[a]#set solver type
print("solver type is: ", sol)
b=0
l=ls[b]
# Enumerate splits
for train_index, test_index in kf.split(x):
X_train, X_test = x[train_index], x[test_index]
Y_train, Y_test = y[train_index], y[test_index]
#normailize train data
norm = MinMaxScaler(feature_range=(0, 1))
X_train = norm.fit_transform(X_train)
Y_train = norm.fit_transform(Y_train.reshape(-1, 1))
X_test = norm.fit_transform(X_test)
Y_test = norm.fit_transform(Y_test.reshape(-1, 1))
#build model
model = logr(solver = sol) #solver : {‘newton-cg’, ‘lbfgs’, ‘liblinear’, ‘sag’, ‘saga’}
model.fit(X_train, Y_train.ravel())
Y_pred = model.predict(X_test)
cm_LR= confusion_matrix(Y_test,Y_pred)
#seperate to be stored in individual rows
LR_cm = (cm_LR[1][0], cm_LR[0][0], cm_LR[0][1], cm_LR[1][1]) #false negs, true neg, false pos, true pos
cmResults[l] = LR_cm #store CM results
#record the average classification report info for each trial
result_LR = (accuracy_score(Y_test,Y_pred), precision_score(Y_test,Y_pred), recall_score(Y_test,Y_pred), f1_score(Y_test,Y_pred))
clResults[l] = result_LR
b=b+1
#Permutation Importance
perm = PermutationImportance(model, random_state = 1).fit(X_test, Y_test)
eli5.show_weights(perm, feature_names = x_df.columns.tolist())
#calculate average over 5 trials
cmResults['AVG'] = cmResults.mean(axis=1)
clResults['AVG'] = clResults.mean(axis=1)
#print average results of Confusion matrix and Classification report
print ("Average Confustion Matrix:")
print(cmResults['AVG'])
print ("average Classification results:")
print(clResults['AVG'])
#plot average CM
ax= plt.subplot()
cm_average = [[round(cmResults['AVG'][1]), round(cmResults['AVG'][2])],[round(cmResults['AVG'][0]),round(cmResults['AVG'][3])]]
sn.heatmap(cm_average, annot=True, cmap="Blues")
# labels, title and ticks
ax.set_xlabel('Classifier Prediction')
ax.set_ylabel('True Value')
ax.set_title('Average Confusion Matrix')
ax.xaxis.set_ticklabels(['0', '1'])
ax.yaxis.set_ticklabels(['0', '1'])
b, t = plt.ylim() # discover the values for bottom and top
b += 0.5 # Add 0.5 to the bottom
t -= 0.5 # Subtract 0.5 from the top
plt.ylim(b, t) # update the ylim(bottom, top) values
plt.show()
|
from numpy import arange, linspace, mean
from scipy.stats import expon, zscore, norm
import matplotlib.pyplot as plt
from math import log10
# 30개의 샘플을 추출하고 표준화 하는 과정
times = 10
l = 10
loc = 0
m = []
for i in arange(times):
m.append(mean(expon(loc, l).rvs(size=30))) #Lambda = 10인 지수분포를 랜덤으로 30개 생성하고 그 평균을 구해 배열(m)에 담기
z = zscore(m) # z score를 이용한 표준화(평균 0, 표준편차 1)
print(z)
# 위와 같은 방식으로 샘플을 추출하고 그 평균을 구해 그래프로 그리기
def test (times):
t = times
l = 10
loc = 0
m = []
for i in arange(t):
m.append(mean(expon(loc, l).rvs(size=30)))
z = zscore(m)
b = int(6 * log10(t))
fig = plt.figure(figsize=(12, 6))
ax1 = fig.add_subplot(1, 2, 1)
ax1 = plt.hist(m, bins=b, facecolor='wheat')
ax1 = plt.xlabel('m')
ax1 = plt.ylabel('frequency')
ax1 = plt.title(r'Histogram of Random Exponential ($\lambda = 10, size = $' + str(t) + ')')
ax1 = plt.grid()
x = linspace(-3, 3, 101)
ax2 = fig.add_subplot(1, 2, 2)
ax2 = plt.hist(z, bins=b, density=True, facecolor='skyblue')
ax2 = plt.plot(x, norm(0, 1).pdf(x), 'r--')
ax2 = plt.xlabel('z')
ax2 = plt.ylabel('density')
ax2 = plt.title(r'Histogram of Random Exponential ($\lambda = 10, size = $' + str(t) + ')')
ax2 = plt.grid()
plt.show()
# 30회 시뮬레이션
test(1000)
|
# -*- coding: utf-8 -*-
"""
Created on Fri Dec 11 20:33:43 2020
@author: Ayax
"""
import random
import array
import numpy
import pandas as pd
from deap import algorithms
from deap import base
from deap import creator
from deap import tools
#Definimos la matriz al leer desde un archivo csv
matriz = pd.read_csv("Ejercicio1.csv", sep=',',header=None)
#Mostramos la matriz
print (matriz.values)
#Definimos el tamaño del indice
NB_QUEENS = 4
caminos = matriz
#Calculo de la distancia que recorre el viajero
#Función objetivo
def evalPAV(individual):
#Ruta entre el último y el primero
ruta = caminos[individual[-1]][individual[0]]
#Ruta entre las demas
for gene1, gene2 in zip(individual[0:-1], individual[1:]):
ruta += caminos[gene1][gene2]
return ruta,
#Se minimiza con -1
creator.create("FitnessMin", base.Fitness, weights=(-1.0,))
#Se define al individual como un array
creator.create("Individual", array.array, typecode='b', fitness=creator.FitnessMin)
#Operaciones
toolbox = base.Toolbox()
#Generamos aleatoriamente de acuerdo al tamaño del indice
toolbox.register("permutation", random.sample, range(NB_QUEENS), NB_QUEENS)
# Generacion del individuo y poblacion
toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.permutation)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
#Evaluamos la función
toolbox.register("evaluate", evalPAV)
toolbox.register("mate", tools.cxPartialyMatched)
toolbox.register("mutate", tools.mutShuffleIndexes, indpb=0.05)
toolbox.register("select", tools.selTournament, tournsize=3)
def main():
#Definición de la semilla de generador de numero aleatorios
random.seed(64)
#Población inicial
pop = toolbox.population(n=200)
#El mejor individuo
hof = tools.HallOfFame(1)
#Estadísticas básicas
stats = tools.Statistics(lambda ind: ind.fitness.values)
stats.register("Avg", numpy.mean)
stats.register("Std", numpy.std)
stats.register("Min", numpy.min)
stats.register("Max", numpy.max)
algorithms.eaSimple(pop, toolbox, cxpb=0.5, mutpb=0.2, ngen=100, stats=stats,
halloffame=hof, verbose=True)
return pop, stats, hof
if __name__ == "__main__":
pop, log, hof = main()
print("Distancia mínima: ")
print(hof[0].fitness.values)
print("La mejor ruta:")
print(hof)
|
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import pytest
from pants.util.pip_requirement import PipRequirement
def test_parse_simple() -> None:
req = PipRequirement.parse("Foo.bar==1.2.3")
assert req.project_name == "Foo.bar"
assert req.specs == [("==", "1.2.3")]
assert req.url is None
@pytest.mark.parametrize(
("url", "expected_project", "expected_url"),
(
("git+https://github.com/django/django.git@stable/2.1.x#egg=Django&a=b", "Django", ""),
("file:///here#egg=foo&a=b", "foo", ""),
("/path/to#egg=foo&a=b", "foo", "file:///path/to#egg=foo&a=b"),
),
)
def test_parse_old_style_vcs(url: str, expected_project: str, expected_url: str) -> None:
req = PipRequirement.parse(url)
assert req.project_name == expected_project
assert req.specs == []
assert req.url == expected_url or url
def test_parse_pep440_vcs() -> None:
req = PipRequirement.parse("Django@ git+https://github.com/django/django.git@stable/2.1.x")
assert req.project_name == "Django"
assert req.specs == []
assert req.url == "git+https://github.com/django/django.git@stable/2.1.x"
def test_error() -> None:
with pytest.raises(ValueError) as exc_info:
PipRequirement.parse("not valid! === 3.1", description_of_origin="some origin")
assert "Invalid requirement 'not valid! === 3.1' in some origin:" in str(exc_info.value)
with pytest.raises(ValueError) as exc_info:
# No egg= in the fragment.
PipRequirement.parse("git+https://github.com/django/django.git@stable/2.1.x#a=b")
assert "Invalid requirement 'git+https://github.com/django/django.git@stable/2.1.x#a=b'" in str(
exc_info.value
)
|
number = float(input())
inputme = input()
output = input()
if inputme == "m" and output == "cm":
number *= 100
elif inputme == "m" and output == "mm":
number *= 1000
elif inputme == "cm" and output == "m":
number /= 100
elif inputme == "cm" and output == "mm":
number *= 10
elif inputme == "mm" and output == "m":
number /= 1000
elif inputme == "mm" and output == "cm":
number /= 10
print(f"{number:.3f}")
|
#!/usr/bin/python
import sys
import http_server
import proxy
import storage
import tiles
import wm_cache
_DEFAULT_PORT = 15000
def RunServer():
tiles_path = sys.argv[1]
hash_dir_root = sys.argv[2]
server = http_server.MoreBaseHttpServer(_DEFAULT_PORT)
proxy_cache = proxy.Cache('localcache')
proxy_cache.Load()
wmcache = storage.HashDirStorage(hash_dir_root)
wmcache_handler = wm_cache.WMCacheHandler(wmcache)
wmcache_handler.Register(server)
tile_storage = tiles.TileStorage(tiles_path)
host_handler = proxy.ProxyHandler(proxy_cache, tile_storage)
host_handler.Register(server)
print 'OK. Serving.'
server.Serve()
def main():
RunServer()
if __name__ == '__main__':
main()
|
from django import template
register = template.Library()
@register.filter(name='chuyi')
def chuyi(value, arg):
a = value/arg
return '%.2f' % a
@register.filter(name='roundtwo')
def roundtwo(value):
return '%.2f' % value
|
from django.shortcuts import render
from django.http.response import HttpResponse
from .models import Members
from pages.views import index
# Create your views here.
# def index(req):
# return HttpResponse("<h1>hello</h1>")
# def index(req):
# return render(req, 'index.html')
def login(req):
if req.method == 'POST':
userid = req.POST.get('userid')
userpw = req.POST.get('userpassword')
res_data = {}
members = Members.objects.all() #Members에 있는 모든 객체를 불러와 members에 저장
for member in members:
if userid == member.userid:
if userpw == member.userpassword:
res_data['username'] = member.username
# res_data['pid'] = member.id
print('member pid : ', member.id)
print('로그인 성공')
#return render(req, 'index.html', res_data)
return index(req, res_data)
res_data['res'] = '아이디가 존재하지 않거나 비밀번호가 틀렸습니다.'
return render(req, 'login.html', res_data)
# print(req.POST['userid'])
return render(req, 'login.html')
def signup(req):
if req.method == 'POST':
userid = req.POST.get('userid')
userpw = req.POST.get('userpassword')
usergender = req.POST.get('usergender')
username = req.POST.get('username')
useremail = req.POST.get('useremail')
userphone = req.POST.get('userphone')
useraddress = req.POST.get('useraddress')
member = Members(
userid = userid,
userpassword = userpw,
username = username,
usergender = usergender,
useremail = useremail,
userphone = userphone,
useraddress = useraddress,
)
member.save()
# res_data = {}
# res_data['res'] = '회원가입을 축하드립니다!'
print(req.POST['username'])
return render(req, 'login.html')
return render(req, 'signup.html')
|
# Generated by Django 2.2.1 on 2019-06-21 16:35
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('questi', '0019_auto_20190621_1833'),
]
operations = [
migrations.AddField(
model_name='score',
name='cours',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='questi.cours'),
),
]
|
from django.urls import path
from . import views
# http://127.0.0.1:8000/about/
urlpatterns = [
path('', views.intro, name="intro"),
path('intro/', views.intro, name="intro"),
path('contact/', views.contact, name="contact"),
path('faq/', views.faq, name="faq"),
path('policy/', views.policy, name="policy"),
path('terms/', views.terms, name="terms"),
]
|
# -*- coding: utf-8 -*-
"""Parsers and serializers for /decoder API endpoints."""
|
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) Qotto, 2019
"""BaseEventHandler
All event handler must be inherit from this class. Handle function was called by consumer on each received events.
For make an transaction in handle function return 'transaction' as string after end transaction otherwise return none.
"""
from typing import Union, Type
from tonga.models.handlers.base import BaseHandler
from tonga.models.records.event.event import BaseEvent
__all__ = [
'BaseEventHandler'
]
class BaseEventHandler(BaseHandler):
""" Base of all event handler
"""
@classmethod
def handler_name(cls) -> str:
""" Return handler name, used by serializer
Raises:
NotImplementedError: Abstract def
Returns:
None
"""
raise NotImplementedError
async def handle(self, event: Type[BaseEvent]) -> Union[str, None]:
""" This function is automatically call by Tonga when event with same name was receive by consumer
Args:
event (BaseEvent): Event receive by consumer
Notes:
If handle make an transaction return 'transaction' as string at transaction end
Raises:
NotImplementedError: Abstract def
Returns:
None
"""
raise NotImplementedError
|
import requests
from bs4 import BeautifulSoup
import urllib.request
#Added comments
def main_crawler(url):
source=requests.get(url)
source=source.text
soup=BeautifulSoup(source,"html.parser")
for links in soup.findAll('a',{'class':'question_link'}):
link=links.get('href')
if(link[0]=='/'):
link='https://www.quora.com'+str(link)
title=links.string
print('Q ',title)
print(link)
print('\n')
def input_quora():
str1=input();
s=list(str(str1))
for i in range(0,len(str1)):
if s[i]==' ':
s[i]='-'
str1=''.join(s)
str1='https://www.quora.com/'+str(str1)
source_t=requests.get(str1)
source_t=source_t.text
soup2=BeautifulSoup(source_t,"html.parser")
i=0
for stuff in soup2.findAll('a',{'class':'question_link'}):
url=stuff.get('href')
i=1
if i==1:
break
url='https://www.quora.com'+url
main_crawler(url)
input_quora()
|
"""
Edanur Demir
Utilities are defined in this code.
"""
import os
import csv
import torch
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as tick
from torchvision import datasets, transforms
from custom_eenet import CustomEENet
from eenet import EENet
plt.switch_backend("agg")
def load_dataset(args):
"""dataset loader.
This loads the dataset.
"""
kwargs = {'num_workers': 2, 'pin_memory': True} if args.device == 'cuda' else {}
if args.dataset == 'mnist':
root = '../data/mnist'
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])
trainset = datasets.MNIST(root=root, train=True, download=True, transform=transform)
testset = datasets.MNIST(root=root, train=False, download=True, transform=transform)
elif args.dataset == 'cifar10':
root = '../data/cifar10'
trainset = datasets.CIFAR10(root=root, train=True, download=True,\
transform=transforms.Compose([\
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))]))
testset = datasets.CIFAR10(root=root, train=False, download=True,\
transform=transforms.Compose([\
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))]))
elif args.dataset == 'svhn':
#def target_transform(target):
# return target[0]-1
root = '../data/svhn'
transform = transforms.Compose([\
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
trainset = datasets.SVHN(root=root, split='train', download=True, transform=transform)
#,target_transform=target_transform)
testset = datasets.SVHN(root=root, split='test', download=True, transform=transform)
#,target_transform=target_transform)
elif args.dataset == 'imagenet':
root = '../data/imagenet'
trainset = datasets.ImageFolder(root=root+'/train', transform=transforms.Compose([\
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])]))
testset = datasets.ImageFolder(root=root+'/val', transform=transforms.Compose([\
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])]))
elif args.dataset == 'tiny-imagenet':
create_val_img_folder()
root = '../data/tiny-imagenet'
trainset = datasets.ImageFolder(root=root+'/train', transform=transforms.Compose([\
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])]))
testset = datasets.ImageFolder(root=root+'/val/images', transform=transforms.Compose([\
transforms.ToTensor(),
transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])]))
#exit_tags = torch.randint(0, args.num_ee+1, )
train_loader = torch.utils.data.DataLoader(trainset, batch_size=args.batch_size,\
shuffle=True, **kwargs)
test_loader = torch.utils.data.DataLoader(testset, batch_size=args.test_batch,\
shuffle=False, **kwargs)
exit_tags = []
for data, target in train_loader:
exit_tags.append(torch.randint(0, args.num_ee+1, (len(target), 1)))
return train_loader, test_loader, exit_tags
def create_val_img_folder():
"""
This method is responsible for separating validation images into separate sub folders.
"""
val_dir = '../data/tiny-imagenet/val'
img_dir = os.path.join(val_dir, 'images')
file = open(os.path.join(val_dir, 'val_annotations.txt'), 'r')
data = file.readlines()
val_img_dict = {}
for line in data:
words = line.split('\t')
val_img_dict[words[0]] = words[1]
file.close()
# Create folder if not present and move images into proper folders
for img, folder in val_img_dict.items():
newpath = (os.path.join(img_dir, folder))
if not os.path.exists(newpath):
os.makedirs(newpath)
if os.path.exists(os.path.join(img_dir, img)):
os.rename(os.path.join(img_dir, img), os.path.join(newpath, img))
def plot_history(args):
"""plot figures
Argument is
* history: history to be plotted.
This plots the history in a chart.
"""
data = pd.read_csv(args.results_dir+'/history.csv')
data = data.drop_duplicates(subset='epoch', keep="last")
data = data.sort_values(by='epoch')
title = 'loss of '+args.model+' on '+args.dataset
xticks = data[['epoch']]
yticks = data[['train_loss', 'train_loss_sem', 'val_loss', 'val_loss_sem',
'pred_loss', 'pred_loss_sem', 'cost_loss', 'cost_loss_sem']]
labels = ('epochs', 'loss')
filename = args.results_dir+'/loss_figure.png'
plot_chart(title, xticks, yticks, labels, filename)
title = 'val. accuracy and cost rate of '+args.model+' on '+args.dataset
xticks = data[['epoch']]
yticks = data[['acc', 'acc_sem', 'cost', 'cost_sem']]
labels = ('epochs', 'percent')
filename = args.results_dir+'/acc_cost_figure.png'
plot_chart(title, xticks, yticks, labels, filename)
data = data.sort_values(by='flop')
title = 'val. accuracy vs flops of '+args.model+' on '+args.dataset
xticks = data[['flop', 'flop_sem']]
yticks = data[['acc', 'acc_sem']]
labels = ('flops', 'accuracy')
filename = args.results_dir+'/acc_vs_flop_figure.png'
plot_chart(title, xticks, yticks, labels, filename)
def plot_chart(title, xticks, yticks, labels, filename):
"""draw chart
Arguments are
* title: title of the chart.
* xtick: array that includes the xtick values.
* yticks: array that includes the ytick values.
* labels: labels of x and y axises.
* filename: filename of the chart.
This plots the history in a chart.
"""
_, axis = plt.subplots()
axis.xaxis.set_major_formatter(tick.FuncFormatter(x_fmt))
xerr = None
for key, value in xticks.items():
if key.endswith('_sem'):
xerr = value
else: xtick = value
if all(float(x).is_integer() for x in xtick):
axis.xaxis.set_major_locator(tick.MaxNLocator(integer=True))
xlabel, ylabel = labels
min_x = np.mean(xtick)
if min_x // 10**9 > 0:
xlabel += ' (GMac)'
elif min_x // 10**6 > 0:
xlabel += ' (MMac)'
elif min_x // 10**3 > 0:
xlabel += ' (KMac)'
legend = []
for key, value in yticks.items():
if not key.endswith('_sem'):
legend.append(key)
ytick = value
yerr = yticks[key+'_sem']
plt.errorbar(xtick, ytick, xerr=xerr, yerr=yerr, capsize=3)
plt.title(title)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.legend(legend, loc='best')
plt.savefig(filename)
plt.clf()
print('The figure is plotted under \'{}\''.format(filename))
def display_examples(args, model, dataset):
"""display examples
Arguments are
* model: model object.
* dataset: dataset loader object.
This method shows the correctly predicted sample of images from dataset.
Produced table shows the early exit block which classifies that samples.
"""
images = [[[] for j in range(10)] for i in range(args.num_ee+1)]
model.eval()
with torch.no_grad():
for idx, (data, target) in enumerate(dataset):
data = data.view(-1, *args.input_shape)
data, target = data.to(args.device), target.to(args.device).item()
output, idx, _ = model(data)
# get the index of the max log-probability
pred = output.max(1, keepdim=True)[1].item()
if pred == target:
if len(images[idx][target]) < 10:
images[idx][target].append(idx)
for idx in range(args.num_ee+1):
fig, axarr = plt.subplots(10, 10)
for class_id in range(10):
for example in range(10):
axarr[class_id, example].axis('off')
for example in range(len(images[idx][class_id])):
axarr[class_id, example].imshow(
dataset[images[idx][class_id][example]][0].view(args.input_shape[1:]))
fig.savefig(args.results_dir+'/exitblock'+str(idx)+'.png')
def save_model(args, model, epoch, best=False):
"""save model
Arguments are
* model: model object.
* best: version number of the best model object.
This method saves the trained model in pt file.
"""
# create the folder if it does not exist
if not os.path.exists(args.models_dir):
os.makedirs(args.model_dir)
filename = args.models_dir+'/model'
if best is False:
torch.save(model, filename+'.v'+str(epoch)+'.pt')
else:
train_files = os.listdir(args.models_dir)
for train_file in train_files:
if not train_file.endswith('.v'+str(epoch)+'.pt'):
os.remove(os.path.join(args.models_dir, train_file))
os.rename(filename+'.v'+str(epoch)+'.pt', filename+'.pt')
def get_active_exit(args, epoch):
quantize = min(args.num_ee, ((epoch-1) // 7))
sequential = (epoch-1) % (args.num_ee+1)
return args.num_ee - sequential
def adaptive_learning_rate(args, optimizer, epoch):
"""adaptive learning rate
Arguments are
* optimizer: optimizer object.
* epoch: the current epoch number when the function is called.
This method adjusts the learning rate of training. The same configurations as the ResNet paper.
"""
# Default SGD: lr=? momentum=0, dampening=0, weight_decay=0, nesterov=False
if args.optimizer == "SGD":
# Assummed batch-size is 128. Converge until 165 epochs
if args.dataset == "cifar10":
learning_rate = 0.1
# EENet-110 model
if args.model[-3:] == "110" and epoch <= 2:
learning_rate = 0.01
# divide by 10 per 100 epochs
if epoch % 100 == 0:
learning_rate = 0.1 / (10**(epoch // 100))
# Assumed batch-size is 256. Converge until 150-160 epochs
elif args.dataset == "imagenet":
learning_rate = 0.05 * 0.1**((epoch-1) // 30 + 1)
# Default Adam: lr=0.001 betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False
# elif args.optimizer == "Adam":
for param_group in optimizer.param_groups:
param_group['lr'] = learning_rate
def print_validation(args, batch, exit_points=None):
"""print validation results
Arguments are
* batch: validation batch results.
* exit_points: the number of samples exiting from the specified exit blocks.
This method prints the results of validation.
"""
# print the validation results of epoch
print(' Test avg time: {:.4f}msec; avg val_loss: {:.4f}; avg val_acc: {:.2f}%'
.format(np.mean(batch['time'])*100.,
np.mean(batch['val_loss']),
np.mean(batch['acc'])))
# detail print for EENet based models
if exit_points is not None:
print('\tavg val_cost: {:.2f}%; exits: <'.format(np.mean(batch['cost'])), end='')
for i in range(args.num_ee+1):
print('{:d},'.format(exit_points[i]), end='')
print('>')
def save_history(args, record):
"""save a record to the history file"""
args.recorder.writerow([str(record[key]) for key in sorted(record)])
args.hist_file.flush()
def close_history(args):
"""close the history file and print the best record in the history file"""
args.hist_file.close()
args.hist_file = open(args.results_dir+'/history.csv', 'r', newline='')
reader = csv.DictReader(args.hist_file)
best_epoch = 0
best = {}
for epoch, record in enumerate(reader):
if not best or record['val_loss'] < best['val_loss']:
best = record
best_epoch = epoch+1
print('\nThe best avg val_loss: {}, avg val_cost: {}%, avg val_acc: {}%\n'
.format(best['val_loss'], best['cost'], best['acc']))
return best_epoch
def x_fmt(x_value, _):
"""x axis formatter"""
if x_value // 10**9 > 0:
return '{:.1f}'.format(x_value / 10.**9)
if x_value // 10**6 > 0:
return '{:.1f}'.format(x_value / 10.**6)
if x_value // 10**3 > 0:
return '{:.1f}'.format(x_value / 10.**3)
return str(x_value)
|
import logging
log = logging.getLogger(__name__)
"""Classes that remember parameters during training, e.g.,
remember best model so far"""
class RememberBest():
def __init__(self, chan_name):
self.chan_name = chan_name
self.best_epoch = 0
self.lowest_val = float('inf')
def remember_epoch(self, monitor_chans, all_params):
# -1 due to doing one monitor at start of training
i_epoch = len(monitor_chans.values()[0]) - 1
current_val = monitor_chans[self.chan_name][-1]
if current_val <= self.lowest_val:
self.best_epoch = i_epoch
self.lowest_val = current_val
self.best_params = dict([(p, p.get_value()) for p in all_params])
log.info("New best {:s}: {:5f}".format(self.chan_name, current_val))
def reset_to_best_model(self, monitor_chans, all_params):
for key in monitor_chans:
monitor_chans[key] = monitor_chans[key][:self.best_epoch+1]
for p in all_params:
p.set_value(self.best_params[p])
|
# -*- coding: utf-8 -*-
import logging
import os
import sys
import time
from pythonjsonlogger.jsonlogger import JsonFormatter
from sanic.log import DefaultFilter
import ujson
from jussi.typedefs import WebApp
LOG_DATETIME_FORMAT = r'%Y-%m-%dT%H:%M:%S.%s%Z'
os.environ['TZ'] = 'UTC'
time.tzset()
# JsonFormatter.converter = time.gmtime
SUPPORTED_LOG_MESSAGE_KEYS = (
'levelname',
'asctime',
# 'created',
# 'filename',
# 'levelno',
# 'module',
'funcName',
'lineno',
'msecs',
'message',
'name',
'timestamp',
'severity'
# 'pathname',
# 'process',
# 'processName',
# 'relativeCreated',
# 'thread',
# 'threadName'
)
JSON_LOG_FORMAT = ' '.join(
['%({0:s})'.format(i) for i in SUPPORTED_LOG_MESSAGE_KEYS])
class CustomJsonFormatter(JsonFormatter):
def add_fields(self, log_record, record, message_dict):
super(
CustomJsonFormatter,
self).add_fields(
log_record,
record,
message_dict)
if getattr(record, 'asctime', None):
log_record['timestamp'] = record.asctime
if 'asctime' in log_record:
del log_record['asctime']
if getattr(record, 'levelname', None):
log_record['severity'] = record.levelname
if 'levelname' in log_record:
del log_record['levelname']
# pylint: disable=no-self-use
def _jsonify_log_record(self, log_record):
"""Returns a json string of the log record."""
return ujson.dumps(log_record)
LOGGING = {
'version': 1,
'filters': {
'accessFilter': {
'()': DefaultFilter,
'param': [0, 10, 20]
},
'errorFilter': {
'()': DefaultFilter,
'param': [30, 40, 50]
}
},
'formatters': {
'simple': {
'()': CustomJsonFormatter,
'format': '%(asctime)s %(name) %(levelname) %(message)',
'datefmt': LOG_DATETIME_FORMAT,
'json_indent': None
},
'json_access': {
'()': CustomJsonFormatter,
'format':
'%(asctime) %(name) %(levelname) %(host) ' +
'%(request) %(message) %(status) %(byte)',
'datefmt': LOG_DATETIME_FORMAT,
'json_indent': None
},
'json_request': {
'()': CustomJsonFormatter,
'format': '%(asctime)s',
},
'json': {
'()': CustomJsonFormatter,
'format': JSON_LOG_FORMAT,
'datefmt': LOG_DATETIME_FORMAT,
'json_indent': None
}
},
'handlers': {
'internal': {
'class': 'logging.StreamHandler',
'filters': ['accessFilter'],
'formatter': 'simple',
'stream': sys.stderr
},
'accessStream': {
'class': 'logging.StreamHandler',
'filters': ['accessFilter'],
'formatter': 'json_access',
'stream': sys.stderr
},
'errorStream': {
'class': 'logging.StreamHandler',
'filters': ['errorFilter'],
'formatter': 'simple',
'stream': sys.stderr
},
'jussiStdOut': {
'class': 'logging.StreamHandler',
'formatter': 'json'
},
'jussiRequest': {
'class': 'logging.StreamHandler',
'formatter': 'json_request'
}
},
'loggers': {
'sanic': {
'level': logging.INFO,
'handlers': ['errorStream']
},
'network': {
'level': logging.INFO,
'handlers': []
},
'jussi': {
'level': logging.INFO,
'handlers': ['jussiStdOut']
},
'jussi_debug': {
'level': logging.INFO,
'handlers': ['jussiStdOut']
},
'jussi_request': {
'level': logging.INFO,
'handlers': ['jussiRequest']
},
}
}
def setup_logging(app: WebApp, log_level: str = None) -> WebApp:
LOG_LEVEL = log_level or getattr(logging, os.environ.get('LOG_LEVEL', 'INFO'))
LOGGING['loggers']['sanic']['level'] = LOG_LEVEL
LOGGING['loggers']['network']['level'] = LOG_LEVEL
LOGGING['loggers']['jussi']['level'] = LOG_LEVEL
LOGGING['loggers']['jussi_debug']['level'] = os.environ.get(
'REQUEST_LOG_LEVEL', logging.INFO)
LOGGING['loggers']['jussi_request']['level'] = LOG_LEVEL
logger = logging.getLogger('jussi')
logger.info('configuring jussi logger')
app.config.logger = logger
return app
|
import copy
import re
import nltk
import numpy as np
from HostileSet import *
from sklearn import feature_extraction
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from scipy.stats import kstest
from nltk.corpus import stopwords
from nltk.sentiment.vader import SentimentIntensityAnalyzer
def tokenize_only(text):
# first tokenize by sentence, then by word to ensure that punctuation is caught as it's own token
tokens = [word.lower() for sent in nltk.sent_tokenize(text) for word in nltk.word_tokenize(sent)]
filtered_tokens = []
# filter out any tokens not containing letters (e.g., numeric tokens, raw punctuation)
for token in tokens:
if re.search('[a-zA-Z]', token):
filtered_tokens.append(token)
return filtered_tokens
def fetch_tweets():
f = open('KashmirTwitterData.txt', 'r')
l = {}
for line in f.readlines():
try:
_id = line.split('\t')[3].strip()
twt = line.split('\t')[4].strip()
l[_id]=twt
except IndexError:
continue
f.close()
return l
def removeTfIdf():
arr1=[]
temp_sentences = []
sentences = []
l1={}
newarr=[]
nwarr=[]
for key, value in l.iteritems():
arr1.append(value)
print(len(l))
for i in arr1:
if len(i) > 20:
sentences.append(i)
#define vectorizer parameters
tfidf_vectorizer = TfidfVectorizer(max_df=0.93, max_features=200000, min_df=0.07, stop_words='english', use_idf=True, tokenizer=tokenize_only, ngram_range=(1,5))
tfidf_matrix = tfidf_vectorizer.fit_transform(sentences) #fit the vectorizer to articles
terms = tfidf_vectorizer.get_feature_names()
#get the distance between the articles
dist = 1 - cosine_similarity(tfidf_matrix)
scores = []
for i in range(len(sentences)):
scores.append(sum(dist[i], 0.0) / (len(dist[i])))
arr = copy.deepcopy(scores)
arr = np.array(arr)
iqr = np.percentile(arr, 75, interpolation= 'higher') - np.percentile(arr, 25, interpolation= 'lower')
f = open("tf-idf distribution.txt", "w")
n = len(scores)
for i in range(n):
f.write(str(scores[i]))
f.write("\n")
mean_dist = sum(scores)/n
uppr=mean_dist - (1.5*iqr)
lwr=mean_dist + (1.5*iqr)
for ele in range(len(arr)):
if arr[ele]<uppr or arr[ele]>lwr:
newarr.append(ele)
for ele1 in newarr:
nwarr.append(arr1[ele1])
print(len(nwarr))
for key, value in l.iteritems():
if value in nwarr:
pass;
else:
l1[key]=value
print(len(l1))
return l1
def hostilityfactor(x):
count=0;
for i in x:
if i.upper() in hostile:
count+=1
for i in x:
if i.lower() in stop:
x.remove(i)
for i in x:
if i.lower() == "":
x.remove(i)
frac = float(count)/float(len(x))
return frac
def Hostile():
hos=[]
top=0;
for y in arr1:
y=y.split(' ')
red=hostilityfactor(y)
hos.append(red)
top+=1;
top =top * 0.005
print top
li=[]
for i in range(int(top)+1):
li.append(hos.index(max(hos)))
hos[hos.index(max(hos))]=0.0
print li
if __name__=='__main__':
l = fetch_tweets()
l2 = removeTfIdf()
arr1=[]
for key, value in l.iteritems():
arr1.append(value)
stop = set(stopwords.words('english'))
Hostile()
sid = SentimentIntensityAnalyzer()
sentences = []
sentences_string = ""
for i in arr1:
sentences.append(i)
sentiments = [None] * len(sentences)
for i in range(len(sentences)):
sentiments[i] = sid.polarity_scores(sentences[i])["compound"]
#most negative
print(sentences[ sentiments.index(sorted(sentiments)[0])] , sorted(sentiments)[0])
print "-----------------------------------"
#most positive
print(sentences[ sentiments.index(sorted(sentiments)[len(sentiments)-1])], sorted(sentiments)[len(sentiments)-1])
f = open("sentiment distribution.txt", 'w')
for i in range(len(sentiments)):
f.write(str(sentiments[i]))
f.write('\n')
|
import eons, esam
import logging
import pandas as pd
class Pandatum(esam.Datum):
def __init__(self, name=eons.INVALID_NAME()):
super().__init__()
def ToDataFrame(self):
return pd.DataFrame.from_dict(self.__dict__, orient='index')
def FromDict(self, rhs):
# logging.info(rhs)
for key, value in rhs.items():
setattr(self, str(key).replace(' ','_'), value)
def FromDataFrame(self, dataFrame):
#TODO: Throw error if more than 1 record in df.
dfDict = dataFrame.to_dict('records')[0]
for key, value in dfDict:
setattr(self, key, value)
|
import numpy as np
from torch import tensor
from doa_math import tensor_angle
class ToleranceScore:
def __init__(self,thresholds,doa_classes):
size = len(thresholds)
self.CC = np.zeros(size)
self.CX = np.zeros(size)
self.XC = np.zeros(size)
self.XX = np.zeros(size)
self.thresholds = thresholds
self.doa_classes = doa_classes
def update(self,Yhat,Y):
cc,cx,xc,xx = angular_errors(Yhat,Y,self.thresholds,self.doa_classes)
CC += cc
CX += cx
XC += xc
XX += xx
def __repr__(self):
return '\n'.join(["{}={}".format(p,self.__dict__[p]) for p in self.__dict__])
def ratios_less(X,thresholds):
return np.array([sum(X <= t) / float(len(X)) for t in thresholds])
def angular_errors(Yhat,Y,thresholds,doa_classes):
Yhat = tensor(Yhat)
Yhat_c = tensor(snap_all(Yhat,doa_classes))
Y = tensor(Y)
Y_c = tensor(snap_all(Y,doa_classes))
cc = ratios_less(tensor_angle(Yhat_c,Y_c),thresholds)
cx = ratios_less(tensor_angle(Yhat_c,Y),thresholds)
xc = ratios_less(tensor_angle(Yhat,Y_c),thresholds)
xx = ratios_less(tensor_angle(Yhat,Y),thresholds)
return cc,cx,xc,xx
'''
class SNRTestDatasets:
def __init__(self):
None
class SNRCurve:
def __init__(self,):
tolerance_scores = [ToleranceScore() for i in]
'''
|
a = 4 # 4 = 100
b = 11 # 11 = 1011
c = 0
c = 4|11
print("Line1-Value of c is ", c)
c = 4>>11
print("Line2-Value of c is ", c)
c = 4^11
print("Line3-Value of c is ", c)
c = ~4
print("Line4-Value of c is ", c)
c = 11&4
print("Line5-Value of c is ", c)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
from girder.api import access
from girder.api.describe import Description
from girder.api.rest import Resource
from girder import logger
from girder.plugins.videospace import ImageFeatures
from .settings import PoTSetting
from girder.plugins.videospace import solr_documents_from_field
import json
import requests
import urllib
import numpy as np
class PoTImageSimilaritySearch(Resource):
def __init__(self):
self.resourceName = 'pot_similarity_search'
self.route('GET', (), self.getPoTSimilaritySearch)
# Load matrix
setting = PoTSetting()
path_to_sim_mat = setting.get(setting.MATRIX_LOC_PROP)
self.SOLR_ID_PREFIX = setting.get("IMAGE_SPACE_SOLR_PREFIX") + "/"
print "Loading similarity matrix", path_to_sim_mat
# List of all videos
self.videos_to_idx = None
self.videos_list = None
with open(path_to_sim_mat) as f:
self.videos_list = f.readline().strip().split(",")[1:]
self.videos_to_idx = dict([(x,i) for i,x in enumerate(self.videos_list)])
# load data from formatted_similarity_calc.csv
# skip header
# skip first column so usecols=range(1 , num_videos),
# paint only upper half filling_values=0)
self.data = np.genfromtxt(path_to_sim_mat,
delimiter=",", skip_header=1, usecols=range(1 , len(self.videos_list) + 1),
filling_values=0)
## add matrix with it's transpose to fill lower half
self.data = np.triu(self.data).T + np.triu(self.data)
## Setting diagonal to 0 so video is not evaluated against itself
np.fill_diagonal(self.data, 0)
@access.public
def getPoTSimilaritySearch(self, params):
return self._similarVideoSearch(params)
getPoTSimilaritySearch.description = (
Description('Searches for similar videos using PoT similarity')
.param('url', 'URL of video file in solr index')
.param('limit',
'Number of videos to limit search to (defaults to 100)',
required=False))
def _similarVideoSearch(self, params):
limit = params['limit'] if 'limit' in params else '100'
limit = int(limit)
video_url = urllib.unquote(urllib.unquote(params["url"]))
video_url = video_url.split("/")
video_name = video_url[-1]
if video_name not in self.videos_to_idx:
print video_name, "Not found in computed matrix"
return {
'numFound': 0,
'docs': []
}
video_idx = self.videos_to_idx[video_name]
print video_name, video_idx
#create copies of array so we don't disturb original array
sim_score_sort = 0 + self.data[video_idx]
videos_sorted = [] + self.videos_list
#sort with similar index
sim_score_sort, videos_sorted = (list(x) for x in zip(*sorted(zip(sim_score_sort, videos_sorted))))
# Pick videos from last
results = videos_sorted[-limit:]
return {
'numFound': len(results),
'docs': solr_documents_from_field("id", [self.SOLR_ID_PREFIX + res_video_name for res_video_name in results ])
}
|
import tensorflow as tf
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn import preprocessing
'''
1. trees.csv를 읽어들여서 아래에 대해
Volume을 예측해 보세요.(텐서, 케라스)
Girth 8.8, 10.5
Height 63, 72
'''
data = np.loadtxt("../../../data/trees.csv", delimiter=",", skiprows=1, dtype=np.float32)
total_scale = preprocessing.MinMaxScaler()
dataN = total_scale.fit_transform(data)
x_data = dataN[:, :-1]
y_data = dataN[:, -1:]
print(x_data)
print(y_data)
print(x_data.shape)
print(y_data.shape)
# W = tf.Variable(tf.random_uniform([2, 1]))
# b = tf.Variable(tf.random_uniform([1]))
W = tf.get_variable(name="w1", shape=[2, 1], initializer=tf.contrib.layers.xavier_initializer())
b = tf.get_variable(name="b1", shape=[1], initializer=tf.contrib.layers.xavier_initializer())
X = tf.placeholder(dtype=tf.float32, shape=[None, 2])
# Y = tf.constant(y_data, dtype=tf.float32)
Y = tf.placeholder(dtype=tf.float32, shape=[None, 1])
hx = tf.matmul(X, W) + b
cost = tf.reduce_mean(tf.square(hx - Y))
optimizer = tf.train.AdamOptimizer(0.1)
train = optimizer.minimize(cost)
sess = tf.Session()
init = tf.global_variables_initializer()
sess.run(init)
for i in np.arange(1000):
_cost, _train = sess.run([cost, train], feed_dict={X: x_data, Y: y_data})
if not i % 100:
print(i, _cost)
print(sess.run(W))
print(sess.run(b))
# print(sess.run(hx, feed_dict={X: np.array([[8.8, 63], [10.5, 72]])}))
# 정규화
# 전체 스케일 크기가 N X 3 형태이므로 맨뒤에 None 값이 들어가도록
predict_data = total_scale.transform([[8.8, 63, None], [10.5, 72, None]])
predict_data = predict_data[:, :-1]
predict_result = sess.run(hx, feed_dict={X: predict_data})
print(predict_result)
# 역정규화
label_scale = preprocessing.MinMaxScaler()
yN = label_scale.fit_transform(data[:, -1:])
print(label_scale.inverse_transform(predict_result))
|
import BayesianNetwork;
import edge;
class AveragedBayesianNetwork:
def __init__(self, topologicalOrdering):
self.topologicalOrdering = topologicalOrdering;
self.edgeCounts = {};
self.numModelsConsidered = 0;
for childIndex in range(len(topologicalOrdering)):
for parentIndex in range(childIndex):
self.edgeCounts[edge.Edge(topologicalOrdering[parentIndex], topologicalOrdering[childIndex])] = 0;
def incrementEdgeCount(self, startVertex, endVertex):
for e in self.edgeCounts.keys():
if e.startVertex.name == startVertex.name and e.endVertex.name == endVertex.name:
self.edgeCounts[e] += 1;
def addModel(self, data):
oldVertices = self.topologicalOrdering;
self.numModelsConsidered += 1;
newModel = BayesianNetwork.BayesianNetwork(self.topologicalOrdering);
self.learnEdgesByK3(data, newModel);
for vert in self.topologicalOrdering:
vert.parents = [];
for edge in newModel.edges:
self.incrementEdgeCount(edge.startVertex, edge.endVertex);
learnedEdges = self.getLearnedEdges();
for e in learnedEdges:
e.endVertex.parents.append(e.startVertex);
for v in self.topologicalOrdering:
print("vertex " + v.name + " has parents ");
for p in v.parents:
print("p.name");
def getLearnedEdges(self):
learnedEdges = [];
print(self.numModelsConsidered);
for edge in self.edgeCounts:
numModelsWithEdge = self.edgeCounts[edge];
if (numModelsWithEdge >= 0.3 * self.numModelsConsidered):
learnedEdges.append(edge);
return learnedEdges;
def updateCPTNewModel(self, data):
for vertex in self.topologicalOrdering:
vertex.setupCPTs(data);
def edgePresent(self, potentialEdge, model):
for e in model.getEdges():
if (e.startVertex.name == potentialEdge.startVertex.name and e.endVertex.name == potentialEdge.endVertex.name):
return True;
return False;
def learnEdgesByK3(self, data, newModel):
for index in range(1, len(self.topologicalOrdering)):
while (True):
currVertex = self.topologicalOrdering[index];
self.updateCPTNewModel(data);
currScore = newModel.scoreModel(data);
maxScoreIncreasingAddition = None;
currMaxScore = currScore;
for parentIndex in range(index):
potentialParent = self.topologicalOrdering[parentIndex];
potentialEdgeAddition = edge.Edge(potentialParent, currVertex);
if (not self.edgePresent(potentialEdgeAddition, newModel)):
newModel.addEdge(potentialParent, currVertex);
self.updateCPTNewModel(data);
modelScore = newModel.scoreModel(data);
if (modelScore > currMaxScore):
currMaxScore = modelScore;
maxScoreIncreasingAddition = potentialParent;
newModel.removeEdge(potentialParent, currVertex);
if (maxScoreIncreasingAddition == None):
break;
newModel.addEdge(maxScoreIncreasingAddition, currVertex);
#maxScoreIncreasingAddition = None;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.