text
stringlengths 8
6.05M
|
|---|
#!/usr/bin/env python
# ----------------------------------------------------------
# Controller File for FSX
# ----------------------------------------------------------
from PySimConnect import SimConnect, DataDefinition
import FSXdef
import time
import logging
#This is code to import config file (config.py)
try:
import config
except ImportError:
# We're in a py2exe, so we'll append an element to the (one element)
# sys.path which points to Library.zip, to the directory that contains
# Library.zip, allowing us to import config.py
# Adds one level up from the Library.zip directory to the path, so import will go forward
sys.path.append(os.path.split(sys.path[0])[0])
import config
class control_c(object):
#Class to read and write data to FSX.
def __init__(self, variables, mod_data, sim):
self.mod_data = mod_data
self.connected = False
self.desire_connect = True
self.variables = variables
self.last_connect_attempt = 0
self.nodata = True
self.nodata_time = 0
self.mode = config.modes[sim['mode']]
#self.s = SimConnect('GlassServer', self.mode, True)
#self.sevent = SimConnect('GlassServer Event', self.mode, False)
#self.addr = '192.168.1.46'
self.addr = sim['IP']
#self.port = 1500
self.port = int(sim['port'])
#Add definition's
#self.s.definition_0 = self.s.create_DataDefinition(2)
#self.connect()
time.sleep(0.01)
def init_comm(self):
#Before connecting initialize SimConnect connections
self.s = SimConnect('GlassServer', self.mode, True)
self.sevent = SimConnect('GlassServer Event', self.mode, False)
def close_comm(self):
#Shutdown both sockets.
self.s.quit()
self.sevent.quit()
self.connected = False
logging.info("GlassServer - SimConnect Comm Closed")
def quit(self):
self.desire_connect = False
self.last_connect_attempt = time.time()
self.close_comm()
def connect(self):
self.init_comm()
self.connected = True
self.desire_connect = True
logging.info('SimConnect Connect: %r : %r' , self.addr, self.port)
if not self.s.connect(self.addr, self.port, True):
self.connected = False
if not self.sevent.connect(self.addr, self.port, False):
self.connected = False
self.last_connect_attempt = time.time()
if self.connected == True:
FSXdef.setup(self.s,self.variables)
FSXdef.setup_events(self.sevent, self.variables)
self.request_data()
logging.info("Connection to FSX Succeded")
else:
logging.info("Connection to FSX Failed")
def request_data(self):
self.s.definition_0.request(4, DataDefinition.USER, DataDefinition.ONCE, interval = 0, flag = 0)
#self.s.definition_0.request(4, DataDefinition.USER, DataDefinition.SIM_FRAME, interval = 1, flag = 0)
def decode_input(self, data_in):
if self.s.definition_0.id in data_in: #Define ID is high priority data, if received then compute, and request another.
#start_time = 0.0
self.request_data()
#self.comp() # Main computation loop
self.nodata = False #Rest no data boolean
self.nodata_time = time.time()
else:
diff = time.time() - self.nodata_time
#if diff > 5.0: #If no data for more than 5 seconds, stop socket.
#self.s.client.go = False
if diff > 2.0: #If no data for 2 seconds.
#Request data
self.nodata = True
#Request more data from FSX (This was causing multiple requests removed for now)
# self.request_data()
# self.nodata_time +=2 #Reset timer so request again in 2 seconds.
def calc_status_message(self):
if self.connected:
return ("Connected")
elif self.desire_connect: #Not connected, but wanting to be connected
return ("Connecting")
else: #Disconnecting.
return ("Disconnected")
def process(self):
if self.connected:
if ((self.s.connected() == False) or (self.sevent.connected() == False)): #Probably with socket, socket has shutdown.
self.close_comm() #Reset comm, to try a reconnect.
else:
self.decode_input(self.s.receive())
self.mod_data.comp()
elif self.desire_connect == True: #not connected
if (time.time() - self.nodata_time) > 5.0: #Wait 5 sec to reconnect
if (time.time() - self.last_connect_attempt) > 10.0: #Wait 10 sec between attempts.
self.connect()
#Create Status message
#self.status_message = self.calc_status_message()
pass
|
you="hello"
if ""in you:
robot_brain="Ican hear you"
elif "hello"in you :
robot_brain="hello duy"
elif "today":
robot_brain="february 16"
print(robot_brain)
|
# _*_coding:utf-8_*_
__author__ = "Alex Li"
from conf import settings
import os
import yaml
import json
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
def print_err(msg, quit=False):
output = "\033[31;1mError: %s\033[0m" % msg
if quit:
exit(output)
else:
print(output)
def yaml_parser(yml_filename):
"""
load yaml file and return
:param yml_filename:
:return:
"""
# yml_filename = "%s/%s.yml" % (settings.StateFileBaseDir,yml_filename)
try:
yaml_file = open(yml_filename, "r")
data = yaml.load(yaml_file)
return data
except Exception as e:
print_err(e)
def loadFiles(filepath):
f = open(filepath, encoding="utf-8")
strs = f.readlines()
return strs
def jsonData(source_data):
dicts = {}
for index, line in enumerate(source_data):
dict = {}
list = line.split(",")
for val in list:
tagName = val.split("=")[0].strip()
tagValue = val.split("=")[1].strip()
dict[tagName] = tagValue
dicts[list[0].split("=")[1].strip()] = dict
print(dicts)
json_data = json.dumps(
dicts, sort_keys=True, indent=4, separators=(",", ": ")
).encode("utf-8")
return json_data
def dumpFiles(filepath,source_data):
# print(os.path.dirname(os.path.abspath(filepath)))
file = os.path.dirname(os.path.abspath(filepath)) + "\\meta.json"
f = open(file, "wb")
f.write(source_data)
return file
def json_parser(filepath):
print(filepath)
# print(settings.BASE_DIR + filepath)
file = os.path.abspath(filepath)
strs_data = loadFiles(file)
json_data = jsonData(strs_data)
json_file = dumpFiles(filepath,json_data)
return json_file
|
import tkinter as tk
from tkinter.filedialog import askdirectory #窗口
from tkinter import StringVar #窗口
import tkinter.messagebox
import json
import base64
import urllib3
import os
import time
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
a_labname=''
log_result=''
log_file=''
l_list=[]
log_upresult=''
a_path=''
suc_file=0
f_file=0
temmm=0
def selectParh():
path_=askdirectory()
path_=path_.replace("/","\\")
path.set(path_)
global a_path
a_path=path_
# def LabelsName():
# labname_=lab.get()
# labname.set(labname_)
# a_labname=labname_
# def cheakstring():
def LabelList():
access_token = '24.5bd9d33ad007169c01db493a4b76d7ac.2592000.1560521571.282335-16203984'
url = 'https://aip.baidubce.com/rpc/2.0/easydl/label/list?access_token='+access_token
http=urllib3.PoolManager()
body={
'type':'IMAGE_CLASSIFICATION',
'dataset_id':33410
}
encoded_date=json.dumps(body).encode('utf-8')
request=http.request('POST',
url,
body=encoded_date,
headers={'Content-Type':'application/json'})
result=str(request.data,'utf-8')
result=eval(result)
if 'error_code' in result:
global log_result
log_result=str(result)
log()
lerror=tk.messagebox.askretrycancel('错误','获取标签名发生错误详情请查看日志',parent=window)
if (lerror):
LabelList()
else:
window.destroy()
else:
result=list(result['results'])
lenoflist=len(result)
for i in range(lenoflist):
result2=result[i]
dictresult=eval(str(result2))
l_list.append(str(dictresult['label_name']))
def UploadPic():
global suc_file
global f_file
global log_upresult
global log_file
blabel.destroy()
bpath.destroy()
access_token = '24.5bd9d33ad007169c01db493a4b76d7ac.2592000.1560521571.282335-16203984'
url = 'https://aip.baidubce.com/rpc/2.0/easydl/dataset/addentity?access_token='+access_token
http=urllib3.PoolManager()
filename=os.listdir(a_path)
for i in filename:
f=open(a_path+'\\'+i,'rb')
img=base64.b64encode(f.read())
params=str(img,'utf-8')
body={
'type':'IMAGE_CLASSIFICATION',
'dataset_id':33410,
'entity_content':params,
'entity_name':i,
'labels':[{
'label_name':a_labname,
}]
}
encoded_date=json.dumps(body).encode('utf-8')
request=http.request('POST',
url,
body=encoded_date,
headers={'Content-Type':'application/json'})
result=str(request.data,'utf-8')
result=eval(result)
if 'error_code' in result:
f_file=f_file+1
log_upresult=str(result)
log_file=i
uplog()
list2.insert(tk.END,i)
else:
log_file=i
suc_file=suc_file+1
list1.insert(tk.END,i)
successlog()
window.update()
list1.update()
list2.update()
tk.messagebox.showinfo("成功","上传完毕")
def tqdmtest():
lerror=tk.messagebox.askretrycancel('错误','获取标签名发生错误请检查日志',parent=window)
if (lerror):
tqdmtest()
else:
window.destroy()
def successlog():
global temmm
successlog=open("已上传.txt","a")
if(temmm==7 or temmm==0):
successlog.write("\n")
temmm=0
temmm=temmm+1
successlog.write(log_file+" ")
successlog.close()
def uplog():
logfile=open("log.txt","a")
logfile.write(time.strftime('%Y.%m.%d %H:%M:%S',time.localtime(time.time()))+":\n")
logfile.write("上传失败:"+log_file+" "+log_upresult+"\n")
logfile.write("\n")
logfile.close()
return
def log():
logfile=open("log.txt","a")
logfile.write(time.strftime('%Y.%m.%d %H:%M:%S',time.localtime(time.time()))+":\n")
logfile.write("获取标签名失败:"+log_result+"\n")
logfile.write("\n")
logfile.close()
return
def CheakLabel():
labname_=lab.get()
labname.set(labname_)
global a_labname
a_labname=labname_
lab1=tk.Label(labelframe,text='当前标签名为:'+str(a_labname))
lab1.place(x=7,y=30)
if a_labname in l_list:
m=0
else:
dele=open("已上传.txt","w")
dele.write("\n")
dele.close()
def on_closing():
if (tk.messagebox.askokcancel("退出", "是否关闭窗口?")):
window.destroy()
def click_upb():
if (a_path==''):
tk.messagebox.askretrycancel('错误','请检查路径是否正确',parent=window)
elif (a_labname==''):
tk.messagebox.askretrycancel('错误','请检查标签名是否正确',parent=window)
else:
UploadPic()
window=tk.Tk()
window.geometry('520x600')
window.title("上传")
path=StringVar()
labname=StringVar()
filename=StringVar()
LabelList()
window.protocol("WM_DELETE_WINDOW", on_closing)
ver=tk.Label(window,text='版本:1.3.0')
ver.place(x=15,y=500)
acc=tk.Label(window,text="当前access_token'24.5bd9d33ad007169c01db493a4b76d7ac.2592000.1560521571.282335-16203984'")
acc.place(x=15,y=530)
#标签名
labelframe=tk.LabelFrame(window,text='标签名',labelanchor='nw')
labelframe.place(x=5,y=5,width=250,height=80)
lab=tk.Entry(labelframe)
lab.place(x=5,y=5,width=230)
blabel=tk.Button(labelframe,text=' 确 认 ',command=CheakLabel)
blabel.place(x=170,y=28)
#目标路径
pathframe=tk.LabelFrame(window,text='目标路径')
pathframe.place(x=5,y=90,width=250,height=80)
entpath=tk.Entry(pathframe,textvariable=path)
entpath.place(x=5,y=5,width=230)
bpath=tk.Button(pathframe,text='选择路径',command=selectParh)
bpath.place(x=170,y=28)
#已有的标签名
haslabelframe=tk.LabelFrame(window,text='已有的标签名',labelanchor='ne')
haslabelframe.place(x=265,y=5,width=250,height=165)
sbhas=tk.Scrollbar(haslabelframe)
sbhas.pack(side='right',fill='y')
labellist=tk.Listbox(haslabelframe,yscrollcommand=sbhas.set)
if l_list != []:
for listitem in l_list:
labellist.insert(tk.END,str(listitem))
else:
tqdmtest()
labellist.place(x=3,y=0,width=227,height=144)
sbhas.config(command=labellist.yview)
#已上传frame1
frame1=tk.LabelFrame(window,text='已上传',labelanchor='nw')
frame1.place(x=5,y=220,width=250,height=200)
sbframe1=tk.Scrollbar(frame1)
sbframe1.pack(side='right',fill='y')
list1=tk.Listbox(frame1,yscrollcommand=sbframe1.set)
list1.place(x=6,y=0,width=224,height=179)
sbframe1.config(command=list1.yview)
#未上传frame2
frame2=tk.LabelFrame(window,text='上传失败',labelanchor='ne')
frame2.place(x=265,y=220,width=250,height=200)
sbframe2=tk.Scrollbar(frame2)
sbframe2.pack(side='right',fill='y')
list2=tk.Listbox(frame2,yscrollcommand=sbframe2.set)
list2.place(x=6,y=0,width=224,height=179)
sbframe2.config(command=list2.yview)
bupload=tk.Button(window,text=' 确 认 上 传 ',command=click_upb)
bupload.place(x=160,y=180,width=200,height=35)
# btest=tk.Button(window,text='test',command=test)
# btest.place(x=500,y=450)
window.mainloop()
|
# coding = utf-8
class Person(object):
"""
This is about a person.
"""
def __init__(self, name, lang="python"):
self.name = name
self.lang = lang
self.email = "qiwsir@gmail.com"
def getName(self):
return self.name
def color(self, col):
print "{0} is {1}".format(self.name, col)
laoqi = Person("qiwsir")
name = laoqi.getName()
print name
cang = Person("canglaoshi")
cang_name = cang.getName()
print cang_name
cang.color("white")
laoqi.color("black")
print cang.lang
print laoqi.lang
print cang.email
print laoqi.email
|
word = "Hello World"
print(word)
print(word[0])
len(word)
print(word.count('l'))
print(word.find('h'))
print(word.index("World"))
print(word[0:3])
print(word[:-3])
print(word[start:end])
word.startswith('H')
word.endswith('L')
word.replace("Hello", "Goodbye")
|
def karatusba(x,y):
if len(str(x)) == 1 or len(str(y)) == 1:
return x*y
else:
mid = max(len(str(x)),len(str(y))) // 2
a = x // 10**(mid)
b = x % 10**(mid)
c = y // 10**(mid)
d = y % 10**(mid)
_1 = karatusba(b,d)
_2 = karatusba((a+b),(c+d))
_3 = karatusba(a,c)
return (_3 * 10**(2*mid)) + ((_2 - _3 - _1) * 10**(mid)) + (_1)
x = 3141592653589793238462643383279502884197169399375105820974944592;
y = 271828182845904523536028747135266249775724709369995957496696762743432234;
z = karatusba(x,y)
print(z)
|
from tfcgp.config import Config
from tfcgp.chromosome import Chromosome
import numpy as np
import tensorflow as tf
c = Config()
c.update("cfg/test.yaml")
def test_creation():
ch = Chromosome(5, 2)
ch.random(c)
assert len(ch.nodes) == c.cfg["num_nodes"] + 5
assert len(ch.outputs) == 2
def test_active():
ch = Chromosome(5, 2)
ch.random(c)
ch.nodes[5].x = 0
ch.nodes[5].y = 1
ch.nodes[5].arity = 2
ch.nodes[6].x = 3
ch.nodes[7].x = 5
ch.nodes[7].y = 6
ch.nodes[7].arity = 1
ch.outputs[0] = 7
ch.outputs[1] = 7
ch.set_active()
print(ch.get_active())
assert ch.get_active() == [0, 1, 5, 7]
def test_tensor():
ch = Chromosome(5, 2)
ch.random(c)
ch.outputs[0] = 9
ch.outputs[1] = 9
out = ch.get_tensors()
print(out)
assert True
def test_visul():
ch = Chromosome(8, 8)
ch.random(c)
ch.visualize("test")
assert True
def test_run():
ch = Chromosome(2, 1)
ch.random(c)
ch.outputs[0] = 2
ch.nodes[2].x = 0
ch.nodes[2].y = 1
ch.nodes[2].function = tf.square
ch.nodes[2].arity = 1
outv = ch.run()
print(outv)
assert outv == ch.nodes[2].param
def test_multiout_run():
ch = Chromosome(10, 10)
ch.random(c)
outv = ch.run()
print(outv)
assert len(outv) > 1
|
def hello():
# type: () -> object
for a in range(5):
print ("hello world")
|
# Changed news to internship in this file
import graphene
from graphene_django.types import DjangoObjectType
# from bootcamp.news.models import News
from bootcamp.internship.models import Internship
from bootcamp.helpers import paginate_data
class InternshipType(DjangoObjectType): # Changed news to internship
"""DjangoObjectType to acces the Internship model.""" # Changed news to internship
count_thread = graphene.Int()
count_likers = graphene.Int()
class Meta:
# model = News
model = Internship
def resolve_count_thread(self, info, **kwargs):
return self.get_thread().count()
def resolve_count_likers(self, info, **kwargs):
return self.get_likers().count()
def resolve_count_attendees(self, info, **kwargs):
return self.get_attendees().count()
def resolve_get_thread(self, info, **kwargs):
return self.get_thread()
def resolve_get_likers(self, info, **kwargs):
return self.get_likers()
def resolve_get_attendees(self, info, **kwargs):
return self.get_attendees()
class InternshipPaginatedType(graphene.ObjectType): # Changed news to Internship
"""A paginated type generic object to provide pagination to the Internship
graph.""" # Changed news to Internship
page = graphene.Int()
pages = graphene.Int()
has_next = graphene.Boolean()
has_prev = graphene.Boolean()
# objects = graphene.List(NewsType)
objects = graphene.List(InternshipType)
class InternshipQuery(object): # Changed news to internship
# all_news = graphene.List(NewsType)
all_internship = graphene.List(InternshipType)
# paginated_news = graphene.Field(NewsPaginatedType, page=graphene.Int())
paginated_internship = graphene.Field(InternshipPaginatedType, page=graphene.Int())
# news = graphene.Field(NewsType, uuid_id=graphene.String())
internship = graphene.Field(InternshipType, uuid_id=graphene.String())
def resolve_all_internship(self, info, **kwargs):
# return News.objects.filter(reply=False)
return Internship.objects.filter(reply=False)
def resolve_paginated_internship(self, info, page): # Change news to internship
"""Resolver functions to query the objects and turn the queryset into
the PaginatedType using the helper function"""
page_size = 30
# qs = News.objects.filter(reply=False)
qs = Internship.objects.filter(reply=False)
# return paginate_data(qs, page_size, page, NewsPaginatedType)
return paginate_data(qs, page_size, page, InternshipPaginatedType)
def resolve_internship(self, info, **kwargs): # Changed news to internship
uuid_id = kwargs.get("uuid_id")
print("uuid_id" + uuid_id)
if uuid_id is not None:
# return News.objects.get(uuid_id=uuid_id)
print("uuid_id" + uuid_id)
return Internship.objects.get(uuid_id=uuid_id)
return None
class InternshipMutation(graphene.Mutation): # Changed news to internship
"""Mutation to create internship objects on a efective way.""" # Changed news to internship
class Arguments:
content = graphene.String()
user = graphene.ID()
parent = graphene.ID()
content = graphene.String()
user = graphene.ID()
parent = graphene.ID()
# news = graphene.Field(lambda: News)
internship = graphene.Field(lambda: Internship)
def mutate(self, **kwargs):
print(kwargs)
|
import logging
from typing import Dict, List
_logger = logging.getLogger().getChild(__name__)
# each node should have the following property
# RTL
# port / wire mapping
# members
# when we create a new upper level instance
# the new instance will have direct wire connection with other instances
# because we have pushed some streams into the instance
# in this case, we should create new virtual edges and give them a speical type
# basically the three wires connecting to a stream inside the upper instance will form such a virtual edge
# we need to recode the pipline level for such a special stream
# we need to replace the innner stream by almost full FIFOs (not relay station)
# and the grace period of the almost-full FIFO should match the external pipeline level
INBOUND_PORTS = (
'if_din',
'if_full_n',
'if_write',
)
OUTBOUND_PORTS = (
'if_dout',
'if_empty_n',
'if_read',
)
def get_updated_decl(
config: Dict,
inbound_external_streams: List[str],
outbound_external_streams: List[str],
):
"""Get the updated external wire decl, internal wire decl
and the new partition pins on the wrapper
TODO: add partition pins for ctrl signals
TODO: add partition pins if any instance has top-level IO
"""
io_decl = {}
external_wire_decl = {}
internal_wire_decl = {}
def _update_decl(stream, port):
wire_name = config['edges'][stream]['port_wire_map'][port]
width = config['wire_decl'][wire_name]
io_decl[f'{wire_name}_PARTITION_PIN'] = width
external_wire_decl[f'{wire_name}_EXTERNAL'] = width
internal_wire_decl[f'{wire_name}_INTERNAL'] = width
for stream in inbound_external_streams:
for port in INBOUND_PORTS:
_update_decl(stream, port)
for stream in outbound_external_streams:
for port in OUTBOUND_PORTS:
_update_decl(stream, port)
return io_decl, external_wire_decl, internal_wire_decl
def get_internal_wires():
"""Get the wires between instances included into the wrapper"""
def update_external_wires():
"""Remove the wires included into the wrapper from the external wire list"""
def get_internal_instances():
"""Get the task/stream instances included into the wrapper"""
def update_external_instances():
"""Remove the task/stream instances included into the wrapper from the external list
Add the newly created instance
"""
def create_wrapper(
config: Dict,
target_vertices: List[str],
wrapper_name: str,
):
"""Create an upper node that includes the specified nodes"""
# get all internal streams
# get all internal wires and external wires
# generate the ctrl signal inside the wrapper
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from model.ServerClass import FtpServer
from conf.settings import *
import json
import struct
def entry():
server = FtpServer(SERVER_IP_PORT)
while True: # 链接循环
conn, address = server.phone.accept()
# 登陆验证
cmd = conn.recv(4)
if not cmd: # 适用于linux操作系统
break
header_len = struct.unpack("l", cmd)[0]
date_dic = json.loads(conn.recv(header_len).decode(encoding))
if not server.login(conn, date_dic):
continue
while True: # 通信循环
try:
# 1、收命令
res = conn.recv(1024) # b'put 1.mp4'
print(res)
if not res: # 适用于linux操作系统
break
# 2、解析命令,提取相应命令参数
cmd = res.decode('utf-8').split() # ['put','1.mp4']
if cmd[0] == 'get':
server.get(conn, cmd)
elif cmd[0] == 'put':
server.put(conn, cmd)
elif cmd[0] == "dir":
server.dir(conn, cmd)
elif cmd[0] == "cd":
server.cd(conn, cmd)
else:
continue
except ConnectionResetError: # 适用于windows操作系统
break
conn.close()
server.phone.close()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-08-02 11:24
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('album_creator', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='albumimagerelation',
name='imported_at',
field=models.DateTimeField(auto_now_add=True, default=datetime.datetime(2016, 8, 2, 11, 24, 2, 552204, tzinfo=utc), verbose_name='Image import datetime'),
preserve_default=False,
),
]
|
# -*- coding: utf-8 -*-
"""
**************************************************************************
* IMAGE PROCESSING (e-Yantra 2016)
* ================================
* This software is intended to teach image processing concepts
*
* MODULE: Task1C
* Filename: getCellVal.py
* Version: 1.0.0
* Date: October 13, 2016
*
* Author: Jayant Solanki, e-Yantra Project, Department of Computer Science
* and Engineering, Indian Institute of Technology Bombay.
*
* Software released under Creative Commons CC BY-NC-SA
*
* For legal information refer to:
* http://creativecommons.org/licenses/by-nc-sa/4.0/legalcode
*
*
* This software is made available on an “AS IS WHERE IS BASIS”.
* Licensee/end user indemnifies and will keep e-Yantra indemnified from
* any and all claim(s) that emanate from the use of the Software or
* breach of the terms of this agreement.
*
* e-Yantra - An MHRD project under National Mission on Education using
* ICT(NMEICT)
*
**************************************************************************
"""
# detectCellVal detects the numbers/operatorsm,
# perform respective expression evaluation
# and stores them into the grid_map
# detectCellVal(img,grid_map)
# Find the number/operators, perform the calculations and store the result into the grid_map
# Return the resultant grid_map
import cv2
import numpy as np
# comment here
def detectCellVal(img_rgb,grid_map):
#your code here
return grid_map
|
# -*- coding:utf-8 -*-
"""最小代价字母树"""
"""设有n堆沙子排成一排,其编号为1,2,3,…,n(n≤100)。每堆沙子有一定的数量,如下表
13 7 8 16 21 4 18 现在要将n堆沙子归并成一堆
状态方程 F[i][j] = min{f[i][k]+f[k+1][j]} + s[j] - s[i - 1]
其中F[i][j]表示从i到j的代价,s[j]表示到堆j的和
[输入格式]
n {表示沙子的堆数, 2<=n<=100}
a1 a2 … an {表示每堆沙子的数量,1<=Ai<=100}
[输出格式]
x {表示最小的归并总代价 }
输入样例:
7
13 7 8 16 21 4 18
输出样例:
239"""
def min_num(num, dp, s):
n = len(num)
sum_ = 0
for i in range(n):
sum_ += num[i]
s[i + 1] = sum_
if i < n-1:
dp[i + 1][i + 2] = num[i] + num[i + 1]
dp[i + 1][i + 1] = 0
for k in range(1, n):
for i in range(1, n - k + 1):
j = i + k
min_ = float("Inf")
for m in range(i, j):
min_ = min(min_, dp[i][m] + dp[m+1][j])
dp[i][j] = min_ + s[j] - s[i - 1]
return dp[1][n]
def main():
n = int(input("输入沙子的堆数:"))
num = input("输入沙子的数量:")
num = list(map(int, num.split()))
dp = [[0]*(n+1) for i in range(n+1)]
s = [0]*(n+1)
print("最小归并代价为:", min_num(num, dp, s))
for i in range(n + 1):
print(dp[i])
if __name__ == '__main__':
main()
|
# coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class FtpSettingsSettings(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
FtpSettingsSettings - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'accept_timeout': 'int',
'allow_anon_access': 'bool',
'allow_anon_upload': 'bool',
'allow_dirlists': 'bool',
'allow_downloads': 'bool',
'allow_local_access': 'bool',
'allow_writes': 'bool',
'always_chdir_homedir': 'bool',
'anon_chown_username': 'str',
'anon_password_list': 'list[str]',
'anon_root_path': 'str',
'anon_umask': 'int',
'ascii_mode': 'str',
'chroot_exception_list': 'list[str]',
'chroot_local_mode': 'str',
'connect_timeout': 'int',
'data_timeout': 'int',
'denied_user_list': 'list[str]',
'dirlist_localtime': 'bool',
'dirlist_names': 'str',
'file_create_perm': 'int',
'limit_anon_passwords': 'bool',
'local_root_path': 'str',
'local_umask': 'int',
'server_to_server': 'bool',
'service': 'bool',
'session_support': 'bool',
'session_timeout': 'int',
'user_config_dir': 'str'
}
self.attribute_map = {
'accept_timeout': 'accept_timeout',
'allow_anon_access': 'allow_anon_access',
'allow_anon_upload': 'allow_anon_upload',
'allow_dirlists': 'allow_dirlists',
'allow_downloads': 'allow_downloads',
'allow_local_access': 'allow_local_access',
'allow_writes': 'allow_writes',
'always_chdir_homedir': 'always_chdir_homedir',
'anon_chown_username': 'anon_chown_username',
'anon_password_list': 'anon_password_list',
'anon_root_path': 'anon_root_path',
'anon_umask': 'anon_umask',
'ascii_mode': 'ascii_mode',
'chroot_exception_list': 'chroot_exception_list',
'chroot_local_mode': 'chroot_local_mode',
'connect_timeout': 'connect_timeout',
'data_timeout': 'data_timeout',
'denied_user_list': 'denied_user_list',
'dirlist_localtime': 'dirlist_localtime',
'dirlist_names': 'dirlist_names',
'file_create_perm': 'file_create_perm',
'limit_anon_passwords': 'limit_anon_passwords',
'local_root_path': 'local_root_path',
'local_umask': 'local_umask',
'server_to_server': 'server_to_server',
'service': 'service',
'session_support': 'session_support',
'session_timeout': 'session_timeout',
'user_config_dir': 'user_config_dir'
}
self._accept_timeout = None
self._allow_anon_access = None
self._allow_anon_upload = None
self._allow_dirlists = None
self._allow_downloads = None
self._allow_local_access = None
self._allow_writes = None
self._always_chdir_homedir = None
self._anon_chown_username = None
self._anon_password_list = None
self._anon_root_path = None
self._anon_umask = None
self._ascii_mode = None
self._chroot_exception_list = None
self._chroot_local_mode = None
self._connect_timeout = None
self._data_timeout = None
self._denied_user_list = None
self._dirlist_localtime = None
self._dirlist_names = None
self._file_create_perm = None
self._limit_anon_passwords = None
self._local_root_path = None
self._local_umask = None
self._server_to_server = None
self._service = None
self._session_support = None
self._session_timeout = None
self._user_config_dir = None
@property
def accept_timeout(self):
"""
Gets the accept_timeout of this FtpSettingsSettings.
The timeout, in seconds, for a remote client to establish a PASV style data connection.
:return: The accept_timeout of this FtpSettingsSettings.
:rtype: int
"""
return self._accept_timeout
@accept_timeout.setter
def accept_timeout(self, accept_timeout):
"""
Sets the accept_timeout of this FtpSettingsSettings.
The timeout, in seconds, for a remote client to establish a PASV style data connection.
:param accept_timeout: The accept_timeout of this FtpSettingsSettings.
:type: int
"""
self._accept_timeout = accept_timeout
@property
def allow_anon_access(self):
"""
Gets the allow_anon_access of this FtpSettingsSettings.
Controls whether anonymous logins are permitted or not.
:return: The allow_anon_access of this FtpSettingsSettings.
:rtype: bool
"""
return self._allow_anon_access
@allow_anon_access.setter
def allow_anon_access(self, allow_anon_access):
"""
Sets the allow_anon_access of this FtpSettingsSettings.
Controls whether anonymous logins are permitted or not.
:param allow_anon_access: The allow_anon_access of this FtpSettingsSettings.
:type: bool
"""
self._allow_anon_access = allow_anon_access
@property
def allow_anon_upload(self):
"""
Gets the allow_anon_upload of this FtpSettingsSettings.
Controls whether anonymous users will be permitted to upload files.
:return: The allow_anon_upload of this FtpSettingsSettings.
:rtype: bool
"""
return self._allow_anon_upload
@allow_anon_upload.setter
def allow_anon_upload(self, allow_anon_upload):
"""
Sets the allow_anon_upload of this FtpSettingsSettings.
Controls whether anonymous users will be permitted to upload files.
:param allow_anon_upload: The allow_anon_upload of this FtpSettingsSettings.
:type: bool
"""
self._allow_anon_upload = allow_anon_upload
@property
def allow_dirlists(self):
"""
Gets the allow_dirlists of this FtpSettingsSettings.
If set to false, all directory list commands will return a permission denied error.
:return: The allow_dirlists of this FtpSettingsSettings.
:rtype: bool
"""
return self._allow_dirlists
@allow_dirlists.setter
def allow_dirlists(self, allow_dirlists):
"""
Sets the allow_dirlists of this FtpSettingsSettings.
If set to false, all directory list commands will return a permission denied error.
:param allow_dirlists: The allow_dirlists of this FtpSettingsSettings.
:type: bool
"""
self._allow_dirlists = allow_dirlists
@property
def allow_downloads(self):
"""
Gets the allow_downloads of this FtpSettingsSettings.
If set to false, all downloads requests will return a permission denied error.
:return: The allow_downloads of this FtpSettingsSettings.
:rtype: bool
"""
return self._allow_downloads
@allow_downloads.setter
def allow_downloads(self, allow_downloads):
"""
Sets the allow_downloads of this FtpSettingsSettings.
If set to false, all downloads requests will return a permission denied error.
:param allow_downloads: The allow_downloads of this FtpSettingsSettings.
:type: bool
"""
self._allow_downloads = allow_downloads
@property
def allow_local_access(self):
"""
Gets the allow_local_access of this FtpSettingsSettings.
Controls whether local logins are permitted or not.
:return: The allow_local_access of this FtpSettingsSettings.
:rtype: bool
"""
return self._allow_local_access
@allow_local_access.setter
def allow_local_access(self, allow_local_access):
"""
Sets the allow_local_access of this FtpSettingsSettings.
Controls whether local logins are permitted or not.
:param allow_local_access: The allow_local_access of this FtpSettingsSettings.
:type: bool
"""
self._allow_local_access = allow_local_access
@property
def allow_writes(self):
"""
Gets the allow_writes of this FtpSettingsSettings.
This controls whether any FTP commands which change the filesystem are allowed or not.
:return: The allow_writes of this FtpSettingsSettings.
:rtype: bool
"""
return self._allow_writes
@allow_writes.setter
def allow_writes(self, allow_writes):
"""
Sets the allow_writes of this FtpSettingsSettings.
This controls whether any FTP commands which change the filesystem are allowed or not.
:param allow_writes: The allow_writes of this FtpSettingsSettings.
:type: bool
"""
self._allow_writes = allow_writes
@property
def always_chdir_homedir(self):
"""
Gets the always_chdir_homedir of this FtpSettingsSettings.
This controls whether FTP will always initially change directories to the home directory of the user, regardless of whether it is chroot-ing.
:return: The always_chdir_homedir of this FtpSettingsSettings.
:rtype: bool
"""
return self._always_chdir_homedir
@always_chdir_homedir.setter
def always_chdir_homedir(self, always_chdir_homedir):
"""
Sets the always_chdir_homedir of this FtpSettingsSettings.
This controls whether FTP will always initially change directories to the home directory of the user, regardless of whether it is chroot-ing.
:param always_chdir_homedir: The always_chdir_homedir of this FtpSettingsSettings.
:type: bool
"""
self._always_chdir_homedir = always_chdir_homedir
@property
def anon_chown_username(self):
"""
Gets the anon_chown_username of this FtpSettingsSettings.
This is the name of the user who is given ownership of anonymously uploaded files.
:return: The anon_chown_username of this FtpSettingsSettings.
:rtype: str
"""
return self._anon_chown_username
@anon_chown_username.setter
def anon_chown_username(self, anon_chown_username):
"""
Sets the anon_chown_username of this FtpSettingsSettings.
This is the name of the user who is given ownership of anonymously uploaded files.
:param anon_chown_username: The anon_chown_username of this FtpSettingsSettings.
:type: str
"""
self._anon_chown_username = anon_chown_username
@property
def anon_password_list(self):
"""
Gets the anon_password_list of this FtpSettingsSettings.
A list of passwords for anonymous users.
:return: The anon_password_list of this FtpSettingsSettings.
:rtype: list[str]
"""
return self._anon_password_list
@anon_password_list.setter
def anon_password_list(self, anon_password_list):
"""
Sets the anon_password_list of this FtpSettingsSettings.
A list of passwords for anonymous users.
:param anon_password_list: The anon_password_list of this FtpSettingsSettings.
:type: list[str]
"""
self._anon_password_list = anon_password_list
@property
def anon_root_path(self):
"""
Gets the anon_root_path of this FtpSettingsSettings.
This option represents a directory in /ifs which vsftpd will try to change into after an anonymous login.
:return: The anon_root_path of this FtpSettingsSettings.
:rtype: str
"""
return self._anon_root_path
@anon_root_path.setter
def anon_root_path(self, anon_root_path):
"""
Sets the anon_root_path of this FtpSettingsSettings.
This option represents a directory in /ifs which vsftpd will try to change into after an anonymous login.
:param anon_root_path: The anon_root_path of this FtpSettingsSettings.
:type: str
"""
self._anon_root_path = anon_root_path
@property
def anon_umask(self):
"""
Gets the anon_umask of this FtpSettingsSettings.
The value that the umask for file creation is set to for anonymous users.
:return: The anon_umask of this FtpSettingsSettings.
:rtype: int
"""
return self._anon_umask
@anon_umask.setter
def anon_umask(self, anon_umask):
"""
Sets the anon_umask of this FtpSettingsSettings.
The value that the umask for file creation is set to for anonymous users.
:param anon_umask: The anon_umask of this FtpSettingsSettings.
:type: int
"""
self._anon_umask = anon_umask
@property
def ascii_mode(self):
"""
Gets the ascii_mode of this FtpSettingsSettings.
Controls whether ascii mode data transfers are honored for various types of requests.
:return: The ascii_mode of this FtpSettingsSettings.
:rtype: str
"""
return self._ascii_mode
@ascii_mode.setter
def ascii_mode(self, ascii_mode):
"""
Sets the ascii_mode of this FtpSettingsSettings.
Controls whether ascii mode data transfers are honored for various types of requests.
:param ascii_mode: The ascii_mode of this FtpSettingsSettings.
:type: str
"""
allowed_values = ["off", "upload", "download", "both"]
if ascii_mode not in allowed_values:
raise ValueError(
"Invalid value for `ascii_mode`, must be one of {0}"
.format(allowed_values)
)
self._ascii_mode = ascii_mode
@property
def chroot_exception_list(self):
"""
Gets the chroot_exception_list of this FtpSettingsSettings.
A list of users that are not chrooted when logging in.
:return: The chroot_exception_list of this FtpSettingsSettings.
:rtype: list[str]
"""
return self._chroot_exception_list
@chroot_exception_list.setter
def chroot_exception_list(self, chroot_exception_list):
"""
Sets the chroot_exception_list of this FtpSettingsSettings.
A list of users that are not chrooted when logging in.
:param chroot_exception_list: The chroot_exception_list of this FtpSettingsSettings.
:type: list[str]
"""
self._chroot_exception_list = chroot_exception_list
@property
def chroot_local_mode(self):
"""
Gets the chroot_local_mode of this FtpSettingsSettings.
If set to 'all', all local users will be (by default) placed in a chroot() jail in their home directory after login. If set to 'all-with-exceptions', all local users except those listed in the chroot exception list (isi ftp chroot-exception-list) will be placed in a chroot() jail in their home directory after login. If set to 'none', no local users will be chrooted by default. If set to 'none-with-exceptions', only the local users listed in the chroot exception list (isi ftp chroot-exception-list) will be place in a chroot() jail in their home directory after login.
:return: The chroot_local_mode of this FtpSettingsSettings.
:rtype: str
"""
return self._chroot_local_mode
@chroot_local_mode.setter
def chroot_local_mode(self, chroot_local_mode):
"""
Sets the chroot_local_mode of this FtpSettingsSettings.
If set to 'all', all local users will be (by default) placed in a chroot() jail in their home directory after login. If set to 'all-with-exceptions', all local users except those listed in the chroot exception list (isi ftp chroot-exception-list) will be placed in a chroot() jail in their home directory after login. If set to 'none', no local users will be chrooted by default. If set to 'none-with-exceptions', only the local users listed in the chroot exception list (isi ftp chroot-exception-list) will be place in a chroot() jail in their home directory after login.
:param chroot_local_mode: The chroot_local_mode of this FtpSettingsSettings.
:type: str
"""
allowed_values = ["all", "none", "all-with-exceptions", "none-with-exceptions"]
if chroot_local_mode not in allowed_values:
raise ValueError(
"Invalid value for `chroot_local_mode`, must be one of {0}"
.format(allowed_values)
)
self._chroot_local_mode = chroot_local_mode
@property
def connect_timeout(self):
"""
Gets the connect_timeout of this FtpSettingsSettings.
The timeout, in seconds, for a remote client to respond to our PORT style data connection.
:return: The connect_timeout of this FtpSettingsSettings.
:rtype: int
"""
return self._connect_timeout
@connect_timeout.setter
def connect_timeout(self, connect_timeout):
"""
Sets the connect_timeout of this FtpSettingsSettings.
The timeout, in seconds, for a remote client to respond to our PORT style data connection.
:param connect_timeout: The connect_timeout of this FtpSettingsSettings.
:type: int
"""
self._connect_timeout = connect_timeout
@property
def data_timeout(self):
"""
Gets the data_timeout of this FtpSettingsSettings.
The timeout, in seconds, which is roughly the maximum time we permit data transfers to stall for with no progress. If the timeout triggers, the remote client is kicked off.
:return: The data_timeout of this FtpSettingsSettings.
:rtype: int
"""
return self._data_timeout
@data_timeout.setter
def data_timeout(self, data_timeout):
"""
Sets the data_timeout of this FtpSettingsSettings.
The timeout, in seconds, which is roughly the maximum time we permit data transfers to stall for with no progress. If the timeout triggers, the remote client is kicked off.
:param data_timeout: The data_timeout of this FtpSettingsSettings.
:type: int
"""
self._data_timeout = data_timeout
@property
def denied_user_list(self):
"""
Gets the denied_user_list of this FtpSettingsSettings.
A list of uses that will be denied access.
:return: The denied_user_list of this FtpSettingsSettings.
:rtype: list[str]
"""
return self._denied_user_list
@denied_user_list.setter
def denied_user_list(self, denied_user_list):
"""
Sets the denied_user_list of this FtpSettingsSettings.
A list of uses that will be denied access.
:param denied_user_list: The denied_user_list of this FtpSettingsSettings.
:type: list[str]
"""
self._denied_user_list = denied_user_list
@property
def dirlist_localtime(self):
"""
Gets the dirlist_localtime of this FtpSettingsSettings.
If enabled, display directory listings with the time in your local time zone. The default is to display GMT. The times returned by the MDTM FTP command are also affected by this option.
:return: The dirlist_localtime of this FtpSettingsSettings.
:rtype: bool
"""
return self._dirlist_localtime
@dirlist_localtime.setter
def dirlist_localtime(self, dirlist_localtime):
"""
Sets the dirlist_localtime of this FtpSettingsSettings.
If enabled, display directory listings with the time in your local time zone. The default is to display GMT. The times returned by the MDTM FTP command are also affected by this option.
:param dirlist_localtime: The dirlist_localtime of this FtpSettingsSettings.
:type: bool
"""
self._dirlist_localtime = dirlist_localtime
@property
def dirlist_names(self):
"""
Gets the dirlist_names of this FtpSettingsSettings.
When set to 'hide', all user and group information in directory listings will be displayed as 'ftp'. When set to 'textual', textual names are shown in the user and group fields of directory listings. When set to 'numeric', numeric IDs are show in the user and group fields of directory listings.
:return: The dirlist_names of this FtpSettingsSettings.
:rtype: str
"""
return self._dirlist_names
@dirlist_names.setter
def dirlist_names(self, dirlist_names):
"""
Sets the dirlist_names of this FtpSettingsSettings.
When set to 'hide', all user and group information in directory listings will be displayed as 'ftp'. When set to 'textual', textual names are shown in the user and group fields of directory listings. When set to 'numeric', numeric IDs are show in the user and group fields of directory listings.
:param dirlist_names: The dirlist_names of this FtpSettingsSettings.
:type: str
"""
allowed_values = ["numeric", "textual", "hide"]
if dirlist_names not in allowed_values:
raise ValueError(
"Invalid value for `dirlist_names`, must be one of {0}"
.format(allowed_values)
)
self._dirlist_names = dirlist_names
@property
def file_create_perm(self):
"""
Gets the file_create_perm of this FtpSettingsSettings.
The permissions with which uploaded files are created. Umasks are applied on top of this value.
:return: The file_create_perm of this FtpSettingsSettings.
:rtype: int
"""
return self._file_create_perm
@file_create_perm.setter
def file_create_perm(self, file_create_perm):
"""
Sets the file_create_perm of this FtpSettingsSettings.
The permissions with which uploaded files are created. Umasks are applied on top of this value.
:param file_create_perm: The file_create_perm of this FtpSettingsSettings.
:type: int
"""
self._file_create_perm = file_create_perm
@property
def limit_anon_passwords(self):
"""
Gets the limit_anon_passwords of this FtpSettingsSettings.
This field determines whether the anon_password_list is used.
:return: The limit_anon_passwords of this FtpSettingsSettings.
:rtype: bool
"""
return self._limit_anon_passwords
@limit_anon_passwords.setter
def limit_anon_passwords(self, limit_anon_passwords):
"""
Sets the limit_anon_passwords of this FtpSettingsSettings.
This field determines whether the anon_password_list is used.
:param limit_anon_passwords: The limit_anon_passwords of this FtpSettingsSettings.
:type: bool
"""
self._limit_anon_passwords = limit_anon_passwords
@property
def local_root_path(self):
"""
Gets the local_root_path of this FtpSettingsSettings.
This option represents a directory in /ifs which vsftpd will try to change into after a local login.
:return: The local_root_path of this FtpSettingsSettings.
:rtype: str
"""
return self._local_root_path
@local_root_path.setter
def local_root_path(self, local_root_path):
"""
Sets the local_root_path of this FtpSettingsSettings.
This option represents a directory in /ifs which vsftpd will try to change into after a local login.
:param local_root_path: The local_root_path of this FtpSettingsSettings.
:type: str
"""
self._local_root_path = local_root_path
@property
def local_umask(self):
"""
Gets the local_umask of this FtpSettingsSettings.
The value that the umask for file creation is set to for local users.
:return: The local_umask of this FtpSettingsSettings.
:rtype: int
"""
return self._local_umask
@local_umask.setter
def local_umask(self, local_umask):
"""
Sets the local_umask of this FtpSettingsSettings.
The value that the umask for file creation is set to for local users.
:param local_umask: The local_umask of this FtpSettingsSettings.
:type: int
"""
self._local_umask = local_umask
@property
def server_to_server(self):
"""
Gets the server_to_server of this FtpSettingsSettings.
If enabled, allow server-to-server (FXP) transfers.
:return: The server_to_server of this FtpSettingsSettings.
:rtype: bool
"""
return self._server_to_server
@server_to_server.setter
def server_to_server(self, server_to_server):
"""
Sets the server_to_server of this FtpSettingsSettings.
If enabled, allow server-to-server (FXP) transfers.
:param server_to_server: The server_to_server of this FtpSettingsSettings.
:type: bool
"""
self._server_to_server = server_to_server
@property
def service(self):
"""
Gets the service of this FtpSettingsSettings.
This field controls whether the FTP daemon is running.
:return: The service of this FtpSettingsSettings.
:rtype: bool
"""
return self._service
@service.setter
def service(self, service):
"""
Sets the service of this FtpSettingsSettings.
This field controls whether the FTP daemon is running.
:param service: The service of this FtpSettingsSettings.
:type: bool
"""
self._service = service
@property
def session_support(self):
"""
Gets the session_support of this FtpSettingsSettings.
If enabled, maintain login sessions for each user through Pluggable Authentication Modules (PAM). Disabling this option prevents the ability to do automatic home directory creation if that functionality were otherwise available.
:return: The session_support of this FtpSettingsSettings.
:rtype: bool
"""
return self._session_support
@session_support.setter
def session_support(self, session_support):
"""
Sets the session_support of this FtpSettingsSettings.
If enabled, maintain login sessions for each user through Pluggable Authentication Modules (PAM). Disabling this option prevents the ability to do automatic home directory creation if that functionality were otherwise available.
:param session_support: The session_support of this FtpSettingsSettings.
:type: bool
"""
self._session_support = session_support
@property
def session_timeout(self):
"""
Gets the session_timeout of this FtpSettingsSettings.
The timeout, in seconds, which is roughly the maximum time we permit data transfers to stall for with no progress. If the timeout triggers, the remote client is kicked off.
:return: The session_timeout of this FtpSettingsSettings.
:rtype: int
"""
return self._session_timeout
@session_timeout.setter
def session_timeout(self, session_timeout):
"""
Sets the session_timeout of this FtpSettingsSettings.
The timeout, in seconds, which is roughly the maximum time we permit data transfers to stall for with no progress. If the timeout triggers, the remote client is kicked off.
:param session_timeout: The session_timeout of this FtpSettingsSettings.
:type: int
"""
self._session_timeout = session_timeout
@property
def user_config_dir(self):
"""
Gets the user_config_dir of this FtpSettingsSettings.
Specifies the directory where per-user config overrides can be found.
:return: The user_config_dir of this FtpSettingsSettings.
:rtype: str
"""
return self._user_config_dir
@user_config_dir.setter
def user_config_dir(self, user_config_dir):
"""
Sets the user_config_dir of this FtpSettingsSettings.
Specifies the directory where per-user config overrides can be found.
:param user_config_dir: The user_config_dir of this FtpSettingsSettings.
:type: str
"""
self._user_config_dir = user_config_dir
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
from django.db import models
from django.contrib.auth.models import (
AbstractBaseUser , BaseUserManager
)
from django.utils.safestring import mark_safe
class UserManager(BaseUserManager):
def create_user(self, email,first_name=None,last_name=None,password=None, is_active=True, is_staff=False, is_admin=False): # Any required field add into this
if not email:
raise ValueError("Users must have an Email Address")
if not password:
raise ValueError("Users must have a Password")
user_obj = self.model(
email = self.normalize_email(email),
first_name=first_name,
last_name = last_name,
)
user_obj.set_password(password)
user_obj.staff = is_staff
user_obj.admin = is_admin
user_obj.active = is_active
user_obj.save(using=self._db)
return user_obj
def create_staffuser(self, email, first_name=None,last_name=None,password=None):
user = self.create_user(
email,
first_name=first_name,
last_name=last_name,
password=password,
is_staff=True
)
return user
def create_superuser(self, email,first_name=None,last_name=None, password=None):
user = self.create_user(
email,
first_name=first_name,
last_name=last_name,
password=password,
is_staff=True,
is_admin=True
)
return user
class User(AbstractBaseUser):
email = models.EmailField(max_length=255, unique=True)
first_name = models.CharField(max_length=255, blank=True, null=True)
last_name = models.CharField(max_length=255, blank=True, null=True)
active = models.BooleanField(default=True) # can login
staff = models.BooleanField(default=False) # staff user non super
admin = models.BooleanField(default=False) # superuser
timestamp = models.DateTimeField(auto_now_add=True)
USERNAME_FIELD = 'email' #Username
REQUIRED_FIELDS = [] #add full name to user form
obejcts = UserManager()
def __str__(self):
return self.email
def get_first_name(self):
if self.first_name:
return self.first_name
return self.email
def get_last_name(self):
return self.last_name
def has_perm(self, perm, obj=None):
return True
def has_module_perms(self, app_label):
return True
@property
def is_staff(self):
return self.staff
@property
def is_admin(self):
return self.admin
@property
def is_active(self):
return self.active
class UserProfile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
phone = models.CharField(blank=True, max_length=20)
address = models.CharField(blank=True, max_length=150)
city = models.CharField(blank=True, max_length=20)
country = models.CharField(blank=True, max_length=50)
image = models.ImageField(blank=True,upload_to='images/users/')
def __str__(self):
return self.user.first_name
def user_name(self):
return self.user.first_name + ' ' + self.user.last_name
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('occ_survey', '0035_merge'),
]
operations = [
migrations.CreateModel(
name='LogButton',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('time', models.DateTimeField()),
('room', models.TextField(null=True, blank=True)),
('state', models.IntegerField(null=True, blank=True)),
('lux', models.IntegerField(null=True, blank=True)),
],
options={
'db_table': 'log_button',
'managed': False,
},
),
migrations.CreateModel(
name='LogControl',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('time', models.DateTimeField()),
('name', models.TextField(null=True, blank=True)),
('ip', models.TextField(null=True, blank=True)),
('room', models.TextField(null=True, blank=True)),
('lux', models.IntegerField(null=True, blank=True)),
('td', models.IntegerField(null=True, db_column=b'TD', blank=True)),
('dt', models.IntegerField(null=True, blank=True)),
('action', models.TextField(null=True, blank=True)),
('occupied', models.IntegerField(null=True, blank=True)),
('dark', models.IntegerField(null=True, blank=True)),
('bright', models.IntegerField(null=True, blank=True)),
('lights', models.IntegerField(null=True, blank=True)),
('buttonontime', models.IntegerField(null=True, db_column=b'buttonOnTime', blank=True)),
('buttonofftime', models.IntegerField(null=True, db_column=b'buttonOffTime', blank=True)),
('trigger', models.TextField(null=True, blank=True)),
('mode', models.IntegerField(null=True, blank=True)),
('timetaken', models.FloatField(null=True, blank=True)),
],
options={
'db_table': 'log_control',
'managed': False,
},
),
]
|
# Solution of the challenge Ice Cream Parlor proposed on Hackerrank at https://www.hackerrank.com/challenges/missing-numbers/problem
import random
import re
import sys
# We should build a double for-loop over the array arr. As soon as the unique pair of indices has been found,
# the indices should be translated by 1 and then returned by the function
def icecreamParlor(m, arr):
for i in range(len(arr) - 1):
for j in range(i + 1, len(arr)):
if arr[i] + arr[j] == m:
ind1 = i + 1
ind2 = j + 1
return [ind1, ind2]
if __name__ == '__main__':
t = int(input())
for t_itr in range(t):
m = int(input())
n = int(input())
arr = list(map(int, input().rstrip().split()))
result = icecreamParlor(m, arr)
print(result)
|
#Process osm tiles to a mkgmap template file
#nice java -jar ../mkgmap-r3337/mkgmap.jar --max-jobs=4 --drive-on-left --mapname=63290001 --description="FOSM map (C) fosm, OpenStreetMap" --copyright-message="CC BY-SA 2.0" --route --add-pois-to-areas --add-pois-to-lines --road-name-pois --index --gmapsupp -c template.args
import tiles, os, sortosm, bz2
import mergeTiles
import xml.etree.ElementTree as ET
def ProcessSingleDataTile(x, y, zoom, mapIds, tileGroups):
fina = "../{2}/{0}/{1}.osm.bz2".format(x, y, zoom)
mapId = 63240000+len(mapIds)
mapIds.append(mapId)
tileGroups.append((fina,))
#sortosm.SortOsm(fina, "sorted.osm.bz2")
#cmd = "java -jar ../splitter-r412/splitter.jar --mapid={0} sorted.osm.bz2".format(mapId)
#os.system(cmd)
def MergeArea(x, y, zoom, mapIds, tileGroups):
lat, lon = tiles.num2deg(x, y, zoom)
x2, y2 = tiles.deg2num(lat, lon, dataTileZoom)
lat2, lon2 = tiles.num2deg(x+1, y+1, zoom)
x3, y3 = tiles.deg2num(lat2, lon2, dataTileZoom)
fiList = []
for cx in range(x2, x3):
for cy in range(y2, y3):
fina = "../12/{0}/{1}.osm.bz2".format(cx, cy)
if not os.path.isfile(fina):
continue
fiList.append(fina)
if len(fiList) == 0:
return
#out = bz2.BZ2File("merge.osm.bz2","w")
#print "Found {0} data tiles".format(len(fiList))
tileGroups.append(fiList)
mapId = 63240000+len(mapIds)
mapIds.append(mapId)
if 0:
countNodes, countWays, countRelations = mergeTiles.MergeFiles(fiList, out, 0)
if countNodes + countWays + countRelations == 0:
#Everything is empty
return
cmd = "java -jar ../splitter-r412/splitter.jar --mapid={0} merge.osm.bz2".format(mapId)
os.system(cmd)
def CalcFileSize(x, y, zoom, dataTileZoom):
lat, lon = tiles.num2deg(x, y, zoom)
x2, y2 = tiles.deg2num(lat, lon, dataTileZoom)
lat2, lon2 = tiles.num2deg(x+1, y+1, zoom)
x3, y3 = tiles.deg2num(lat2, lon2, dataTileZoom)
#print (x2, x3), (y2, y3)
totalSize = 0
for cx in range(x2, x3):
for cy in range(y2, y3):
fina = "../12/{0}/{1}.osm.bz2".format(cx, cy)
if not os.path.isfile(fina):
continue
s = os.path.getsize(fina)
totalSize += s
return totalSize
def ProcessAreaNextZoom(x, y, zoom, dataTileZoom, mapIds, tileGroups):
lat, lon = tiles.num2deg(x, y, zoom)
x2, y2 = tiles.deg2num(lat, lon, zoom+1)
lat2, lon2 = tiles.num2deg(x+1, y+1, zoom)
x3, y3 = tiles.deg2num(lat2, lon2, zoom+1)
for cx in range(x2, x3):
for cy in range(y2, y3):
totalFileSize = CalcFileSize(cx, cy, zoom+1, dataTileZoom)
if totalFileSize <= maxAllowedFileSize or zoom >= dataTileZoom:
if zoom >= dataTileZoom:
ProcessSingleDataTile(cx, cy, zoom+1, mapIds, tileGroups)
else:
#Merge at this zoom
#print "Merge tiles", cx, cy, zoom+1, totalFileSize
MergeArea(cx, cy, zoom+1, mapIds, tileGroups)
else:
#Recusively consder higher zoom level
ProcessAreaNextZoom(cx, cy, zoom+1, dataTileZoom, mapIds, tileGroups)
if __name__=="__main__":
#lats = (51.00434, 51.26630) #Exmoor
#lons = (-4.02825, -3.26607) #Exmoor
#minZoom = 8
#lats = (49.6676278, 61.1856247) #UK and Eire
#lons = (2.2851563, -14.765625) #UK and Eire
#minZoom = 6
lats = (-47.279229, -9.2756222) #Aus
lons = (107.7539063, 162.5976563) #Aus
minZoom = 6
count = 0
os.chdir("tmp")
dataTileZoom = 12
mapIds = []
tileGroups = []
maxAllowedFileSize = 10000000
#Plan work
print "Lat range", min(lats), max(lats)
print "Lon range", min(lons), max(lons)
tileBL = tiles.deg2num(min(lats), min(lons), minZoom)
tileTR = tiles.deg2num(max(lats), max(lons), minZoom)
print "Tile coordinates", minZoom, tileBL, tileTR
for x in range(tileBL[0], tileTR[0] + 1):
for y in range(tileTR[1], tileBL[1] + 1):
totalFileSize = CalcFileSize(x, y, minZoom, dataTileZoom)
if totalFileSize <= maxAllowedFileSize or minZoom >= dataTileZoom:
if minZoom >= dataTileZoom:
ProcessSingleDataTile(x, y, minZoom, mapIds, tileGroups)
else:
#Merge at this zoom
#print "Merge tiles", x, y, minZoom, totalFileSize
MergeArea(x, y, minZoom, mapIds, tileGroups)
else:
#Recusively consider higher zoom level
ProcessAreaNextZoom(x, y, minZoom, dataTileZoom, mapIds, tileGroups)
print "Numer of tile groups", len(tileGroups)
validMapIds = []
for i, (tg, tileId) in enumerate(zip(tileGroups, mapIds)):
print "Process tile group", i, "of", len(mapIds), ", num files:", len(tg)
empty = False
if len(tg) > 1:
out = bz2.BZ2File("tmp{0}.osm.bz2".format(tileId),"w")
countNodes, countWays, countRelations = mergeTiles.MergeFiles(tg, out, 0)
if countNodes + countWays + countRelations == 0:
#Everything is empty
empty = True
else:
#Check if empty
root = ET.fromstring(bz2.BZ2File(fina).read())
empty = True
for nd in root:
if nd.tag in ["node", "way", "relation"]:
empty = False
if not empty:
sortosm.SortOsm(fina, "tmp{0}.osm.bz2".format(tileId))
if empty: continue
validMapIds.append(tileId)
for tileId in validMapIds:
cmd = "java -jar ../splitter-r412/splitter.jar --mapid={0} tmp{0}.osm.bz2".format(tileId)
os.system(cmd)
template = "# family-id: 981\n"
template += "# product-id: 100\n\n"
template += "# Following is a list of map tiles. Add a suitable description\n"
template += "# for each one.\n\n"
for mapId in validMapIds:
if not os.path.isfile("{0}.osm.pbf".format(mapId)): continue
template += "mapname: {0}\n".format(mapId)
template += "# description: Tile\n"
template += "input-file: {0}.osm.pbf\n".format(mapId)
finaOut = open("template.args","wt")
finaOut.write(template)
finaOut.flush()
|
from typing import List
from .animal import Animal
class AnimalsRepository:
def __init__(self, conn):
self.table_name = "animals"
self.conn = conn
def find_animal(self, id_: str) -> Animal:
cur = self.conn.cursor()
cur.execute(f"SELECT id, species FROM {self.table_name} WHERE id = %s;", (id_,))
result = cur.fetchone()
print(result)
return Animal(result[0], result[1])
def list_animals(self) -> List[Animal]:
cur = self.conn.cursor()
cur.execute(f"SELECT id, species FROM {self.table_name};")
results = cur.fetchall()
return [Animal(result[0], result[1]) for result in results]
|
#!/usr/bin/python
#
# Copyright (c) 2019 Opticks Team. All Rights Reserved.
#
# This file is part of Opticks
# (see https://bitbucket.org/simoncblyth/opticks).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
NB lldb.frame and frame are not the same
INTERACTIVE::
>>> from opticks.tools.evaluate import EV ; self = EV(lldb.frame.FindVariable("this"))
WITHIN BREAKPOINT FUNC::
self = EV(frame.FindVariable("this"))
::
(lldb) fr v
(CRandomEngine *) this = 0x0000000110025c70
(lldb) script
Python Interactive Interpreter. To exit, type 'quit()', 'exit()' or Ctrl-D.
>>> from opticks.tools.evaluate import EV ; self = EV(lldb.frame.FindVariable("this"))
>>> self.v.keys
['CLHEP::HepRandomEngine', 'm_g4', 'm_ctx', 'm_ok', 'm_mask', 'm_masked', 'm_path', 'm_alignlevel', 'm_seed', 'm_internal', 'm_skipdupe', 'm_locseq', 'm_curand', 'm_curand_index', 'm_curand_ni', 'm_curand_nv', 'm_current_record_flat_count', 'm_current_step_flat_count', 'm_offset', 'm_offset_count', 'm_flat', 'm_location', 'm_sequence', 'm_cursor', 'm_cursor_old']
>>> self.v(".m_ctx._step_id")
(int) _step_id = 0
>>> self.ev(".m_ctx._step_id")
0
>>> self.ev(".m_ctx")
OrderedDict([('_ok', 'ptr'), ('_pindex', 0), ('_print', True), ('_dbgrec', False), ('_dbgseq', False), ('_dbgzero', False), ('_photons_per_g4event', 10000), ('_steps_per_photon', 10), ('_gen', 4096), ('_record_max', 1), ('_bounce_max', 9), ('_ok_event_init', True), ('_event', 'ptr'), ('_event_id', 0), ('_event_total', 1), ('_event_track_count', 1), ('_track', 'ptr'), ('_process_manager', 'ptr'), ('_track_id', 0), ('_track_total', 1), ('_track_step_count', 1), ('_parent_id', -1), ('_optical', True), ('_pdg_encoding', 0), ('_primary_id', -2), ('_photon_id', 0), ('_reemtrack', False), ('_record_id', 0), ('_record_fraction', 0.0), ('_rejoin_count', 0), ('_primarystep_count', 1), ('_stage', OrderedDict()), ('_debug', False), ('_other', False), ('_dump', False), ('_dump_count', 0), ('_step', 'ptr'), ('_noZeroSteps', 0), ('_step_id', 0), ('_step_total', 1), ('_step_origin', OrderedDict([('dx', 11.291412353515625), ('dy', -34.645111083984375), ('dz', -449.8999938964844)]))])
>>> self.v(".m_ctx")
(CG4Ctx &) m_ctx = 0x0000000110025af0: {
_ok = 0x000000010c735c40
_pindex = 0
_print = true
_dbgrec = false
_dbgseq = false
_dbgzero = false
_photons_per_g4event = 10000
_steps_per_photon = 10
_gen = 4096
_record_max = 1
"""
from collections import OrderedDict
from opticks.tools.lldb_ import lldb
def rsplit(r):
return map(lambda _:_.lstrip().rstrip(),r.split("\n"))
class EV(object):
def __init__(self, v=None):
self.e = Evaluate()
self.v = v
def _set_v(self, v):
if type(v) is Value or type(v) is None:
self._v = v
else:
self._v = Value(v)
pass
def _get_v(self):
return self._v
v = property(_get_v, _set_v)
def v_(self, k):
if self.v is None:
return None
return self.v(k)
def ev(self, k):
if self.v is None:
return None
return self.e(self.v(k))
class Value(object):
"""
>>> from opticks.tools.evaluate import Value, Evaluate ; e = Evaluate() ; v = Value(lldb.frame.FindVariable("this"))
>>> v
(CRandomEngine *) this = 0x000000010f7e2470
>>> v.keys
['CLHEP::HepRandomEngine', 'm_g4', 'm_ctx', 'm_ok', 'm_mask', 'm_masked', 'm_path', 'm_alignlevel', 'm_seed', 'm_internal', 'm_skipdupe', 'm_locseq', 'm_curand', 'm_curand_index', 'm_curand_ni', 'm_curand_nv', 'm_current_record_flat_count', 'm_current_step_flat_count', 'm_flat', 'm_location', 'm_sequence', 'm_cursor']
>>> v("m_location")
(std::__1::string) m_location = "OpBoundary;"
>>> e(v("m_location"))
'OpBoundary;'
>>> ef = e.evaluate_frame(lldb.frame)
"""
def __init__(self, v):
self.v = v
def _get_keys(self):
nc = self.v.GetNumChildren()
return [self.v.GetChildAtIndex(i).GetName() for i in range(nc)]
keys = property(_get_keys)
def __call__(self, k ):
if k[0] == ".":
vv = self.v.GetValueForExpressionPath(k)
else:
vv = self.v.GetChildMemberWithName(k)
pass
return Value(vv)
def __repr__(self):
return str(self.v)
def __str__(self):
return "\n".join( map(repr,map(self, self.keys) ))
class Evaluate(object):
"""
NB : holds no "domain" state
"""
SKIPS = rsplit(r"""
char **
""")
NOT_CANONICAL = rsplit(r"""
std::__1::string
""")
# canonical type for std::string is giant basic_string monstrosity, so dont use it for classify
E_ATOM = "ATOM"
E_SKIP = "SKIP"
E_PTR = "PTR"
E_COMP = "COMP"
E_ENUM = "ENUM"
@classmethod
def classify(cls, v):
tn = v.GetTypeName()
t = v.GetType()
pt = t.IsPointerType()
lem = len(t.enum_members)
if tn in cls.ATOMS:
et = cls.E_ATOM
elif tn in cls.SKIPS:
et = cls.E_SKIP
elif lem > 0:
et = cls.E_ENUM
elif pt:
et = cls.E_PTR
else:
et = cls.E_COMP
pass
return et
def __init__(self, error=None, opt=""):
if error is None:
error = lldb.SBError()
pass
self.error = error
self.opt = opt
def __call__(self, v ):
if type(v) is Value:
vv = v.v
else:
vv = v
pass
return self.evaluate(vv)
def evaluate_frame(self, f):
ef = OrderedDict()
vls = f.get_all_variables()
for v in vls:
k = v.GetName()
e = self.evaluate(v)
ef[k] = e
if "f" in self.opt:
te = type(e)
print "(f) %(k)10s : %(e)15s : %(te)15s " % locals()
pass
pass
return ef
def evaluate(self, v):
et = self.classify(v)
k = v.GetName()
nc = v.GetNumChildren()
tn = v.GetTypeName()
if "e" in self.opt:
print "(e) %(k)10s : %(tn)15s : %(nc)4d : %(et)s " % locals()
pass
if et == self.E_ATOM:
e = self.evaluate_atom(v)
elif et == self.E_ENUM:
e = v.GetValue()
elif et == self.E_SKIP:
e = "skp"
elif et == self.E_PTR:
e = "ptr"
elif et == self.E_COMP:
e = self.evaluate_comp(v)
else:
assert 0
pass
return e
def evaluate_comp(self, o):
eo = OrderedDict()
nc = o.GetNumChildren()
for i in range(nc):
v = o.GetChildAtIndex(i)
k = v.GetName()
eo[k] = self.evaluate(v)
if "c" in self.opt:
te = type(e)
print "(c) %(k)10s : %(e)15s : %(te)15s " % locals()
pass
pass
return eo
ATOMS = rsplit(r"""
bool
char
int
long
long long
unsigned char
unsigned int
unsigned long
unsigned long long
float
double
std::__1::string
const char *
""")
def atom_typename(self, v):
t = v.GetType()
vtn = v.GetTypeName()
if vtn in self.NOT_CANONICAL:
tn = vtn
else:
ct = t.GetCanonicalType()
ctn = ct.GetName()
tn = ctn
pass
return tn
def evaluate_atom(self, v):
"""
:param v: SBValue
:return: python equivalent or "?" if unhandled
"""
nc = v.GetNumChildren()
k = v.GetName()
tn = self.atom_typename(v)
sz = v.GetByteSize()
d = v.GetData()
error = self.error
if tn == "unsigned int":
assert sz == 4
e = v.GetValueAsUnsigned()
elif tn == "int":
assert sz == 4
e = v.GetValueAsSigned()
elif tn == "long" or tn == "long long":
assert sz == 8
e = d.GetSignedInt64(error, 0)
elif tn == "unsigned long" or tn == "unsigned long long":
assert sz == 8
e = d.GetUnsignedInt64(error, 0)
elif tn == "float":
assert sz == 4
offset = 0
e = d.GetFloat(error, offset)
elif tn == "double":
assert sz == 8
offset = 0
e = d.GetDouble(error, offset)
elif tn == "bool":
assert sz == 1
offset = 0
e = d.GetUnsignedInt8(error, offset)
assert e == 0 or e == 1
e = e == 1
elif tn == "unsigned char":
assert sz == 1
offset = 0
e = d.GetUnsignedInt8(error, offset)
elif tn == "char":
assert sz == 1
offset = 0
e = d.GetSignedInt8(error, offset)
elif tn == "std::__1::string":
s = v.GetSummary()
e = s[1:-1] # unquote
#offset = 1
#e = d.GetString(error, offset)
# offset 1 avoids "\x16hello"
# hmm kinda dodgy, the string is actually composite with one child
# sometimes gives blanks
#
#e = v.GetFrame().EvaluateExpression("%s.c_str()" % k)
elif tn == "const char *":
tt = v.GetType().GetPointeeType()
assert tt.GetName() == "const char"
sz = tt.GetByteSize()
assert sz == 1
ptr = v.GetValueAsUnsigned()
e = v.GetFrame().GetThread().GetProcess().ReadCStringFromMemory(ptr,256, error)
else:
e = "?"
pass
te = str(type(e))
fmt = "(a) %(k)10s : %(tn)30s : %(nc)4d : %(sz)4d : %(e)15s : %(te)10s : %(v)40s "
if "a" in self.opt:
print fmt % locals()
pass
return e
if __name__ == '__main__':
pass
|
# Generated by Django 2.2.6 on 2019-11-02 13:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('work', '0003_test'),
]
operations = [
migrations.CreateModel(
name='ProgressQty',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ht', models.FloatField()),
('lt_3p', models.FloatField()),
('lt_1p', models.FloatField()),
('pole_lt_8m', models.IntegerField()),
('pole_ht_8m', models.IntegerField()),
('dtr_100', models.IntegerField()),
('dtr_63', models.IntegerField()),
('dtr_25', models.IntegerField()),
('remark', models.CharField(max_length=200)),
('status', models.CharField(max_length=200)),
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='work.Site')),
],
),
migrations.CreateModel(
name='ShiftedQty',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('acsr', models.FloatField()),
('cable_3p', models.FloatField()),
('cable_1p', models.FloatField()),
('pole_8m', models.IntegerField()),
('pole_9m', models.IntegerField()),
('dtr_100', models.IntegerField()),
('dtr_63', models.IntegerField()),
('dtr_25', models.IntegerField()),
('remark', models.CharField(max_length=200)),
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='work.Site')),
],
),
migrations.DeleteModel(
name='Location',
),
]
|
from random import randint
play = True
while play:
board = []
for x in range(5):
board.append(["O"] * 5)
def print_board(board):
for row in board:
print(" ".join(row))
print('You have 5 turns to hit my battleship.')
print_board(board)
def random_row(board):
return randint(0, len(board) - 1)
def random_col(board):
return randint(0, len(board[0]) - 1)
ship_row = random_row(board)
ship_col = random_col(board)
#print(ship_row, ship_col)
turn = 0
win = 0
while turn < 5:
print("Turn", turn + 1)
guess_row = -1
guess_col = -1
try:
guess_row = int(input("Guess Row: "))-1
guess_col = int(input("Guess Col: "))-1
except:
print('That is not a valid input.')
if guess_row == ship_row and guess_col == ship_col:
print("Congratulations! You sunk my battleship!")
board[guess_row][guess_col] = "H"
print_board(board)
win = 1
turn = 5
break
else:
if (guess_row < 0 or guess_row > 4) or (guess_col < 0 or guess_col > 4):
print("That is not in the ocean. Please aim in the ocean. (Range 1-5)")
elif(board[guess_row][guess_col] == "X"):
print("You guessed that one already.")
else:
print("You missed my battleship!")
board[guess_row][guess_col] = "X"
turn +=1
print_board(board)
if turn == 5:
if win == 0:
print("Game Over. You lose.")
again = input('Would you like to play again? (y/n) ')
retry = 0
while retry == 0:
if again != 'y' and again != 'n':
again = input('Invalid response. Please enter y or n. ')
elif again == 'y':
retry = 1
elif again == 'n':
retry = 1
play = False
|
import socket
import sys
import struct
import fcntl
import array
import threading
import time
import json
import multiprocessing
import datetime
import servo.servo as servo
import peltier.peltier as peltier
servo = ["", ""]
accel_gyro = ["", ""]
class server (threading.Thread):
def __init__(self, threadID, name, port):
threading.Thread.__init__(self)
self.threadID = threadID
self.name = name
self.port = port
peltier.Peltier.init()
def run(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ip = 0
interfaces = get_interfaces(sock)
for interface in interfaces:
if interface[0] is 'eth0' or 'wlan0':
ip = interface[1]
server_address = (ip, self.port)
sock.bind(server_address)
print '%s %s %s \n' % (ip, self.port, self.name)
servo_exec = None
peltier_exec = None
while True:
data, addr = sock.recvfrom(1024)
print data
print self.port
# JSON Info
info = json.loads(data)
print datetime.datetime.now().time()
if (self.name is 'servo'):
#handle servo data in seperate thread
# { 'timestamp': aabbccxxyyzz, 'angle': 300 }
if (servo_exec is None or (servo_exec is not None and not servo_exec.is_alive())):
if (info["angle"] is not 0):
servo_exec = threading.Thread(target=servo.Servo.tilt, args=(info["angle"], "finger", ))
else:
servo_exec = threading.Thread(target=servo.Servo.stop, args=())
servo_exec.start()
elif (self.name is 'peltier'):
# { 'timestamp': aabbccxxyyzz, 'temperature': 5 }
if (peltier_exec is None or (peltier_exec is not None and not peltier_exec.is_alive())):
if (info["temperature"] > 0):
peltier_exec = threading.Thread(target=peltier.Peltier.hot, args=())
elif (info["temperature"] == 0):
peltier_exec = threading.Thread(target=peltier.Peltier.stop, args=())
else:
peltier_exec = threading.Thread(target=peltier.Peltier.cold, args=())
peltier_exec.start()
elif (self.name is 'servo_send'):
def format_ip(ip):
return str(ord(ip[0])) + '.' \
+ str(ord(ip[1])) + '.' \
+ str(ord(ip[2])) + '.' \
+ str(ord(ip[3]))
def get_interfaces(sock):
total_bytes = 256 * 32;
interface_info = array.array('B', '\0' * total_bytes)
output_bytes = struct.unpack('iL', fcntl.ioctl(
sock.fileno(),
0x8912,
struct.pack('iL', total_bytes, interface_info.buffer_info()[0])
))[0]
interfaces = []
str_interface_info = interface_info.tostring()
for i in range (0, output_bytes, 32):
name = str_interface_info[i:i+32].split('\0', 1)[0]
ip = str_interface_info[i+20:i+24]
interfaces.append((
name,
format_ip(ip)
))
return interfaces
servo_server = server(1, "servo", 3000)
peltier_server = server(2, "peltier", 3001)
servo_send_server = server(3, "servo_send", 3002)
servo_server.daemon = True
peltier_server.daemon = True
servo_send_server.daemon = True
# Running on seperate thread
servo_server.start()
# Running on main thread
peltier_server.run()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-20 19:10
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='CustomUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='', max_length=100)),
('bio', models.TextField(default='')),
('position', models.CharField(default='', max_length=200)),
('role', models.CharField(default='', max_length=200)),
],
),
migrations.CreateModel(
name='FilledCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='FilledSubcategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_offered', models.BooleanField(default=False)),
('description', models.TextField(default='')),
('filled_category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='filled_subcategories', to='backend.FilledCategory')),
],
),
migrations.CreateModel(
name='PrivateLike',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='PrivatePost',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='', max_length=500)),
('content', models.TextField(default='')),
('timestamp', models.DateTimeField(default=django.utils.timezone.now)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='private_posts', to='backend.CustomUser')),
],
),
migrations.CreateModel(
name='PrivateResponse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(default=django.utils.timezone.now)),
('content', models.TextField(default='')),
('creator', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='private_responses', to='backend.CustomUser')),
],
),
migrations.CreateModel(
name='ProfileCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='', max_length=200)),
],
),
migrations.CreateModel(
name='ProfileSubcategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='', max_length=200)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subcategories', to='backend.ProfileCategory')),
],
),
migrations.CreateModel(
name='PublicLike',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='PublicPost',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='', max_length=500)),
('content', models.TextField(default='')),
('timestamp', models.DateTimeField(default=django.utils.timezone.now)),
('is_anonymous', models.BooleanField(default=True)),
('creator_name', models.CharField(default='', max_length=80)),
('creator', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='public_posts', to='backend.CustomUser')),
],
),
migrations.CreateModel(
name='PublicResponse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(default=django.utils.timezone.now)),
('content', models.TextField(default='')),
('is_anonymous', models.BooleanField(default=True)),
('creator_name', models.CharField(default='', max_length=80)),
('creator', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='public_responses', to='backend.CustomUser')),
],
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='', max_length=100)),
],
),
migrations.CreateModel(
name='UniFiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='', max_length=200)),
('description', models.TextField(default='')),
('uploaded_file', models.FileField(blank=True, null=True, upload_to='uni_files/')),
],
),
migrations.CreateModel(
name='UniKeyTerms',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('term', models.CharField(default='', max_length=200)),
('definition', models.TextField(default='')),
],
),
migrations.CreateModel(
name='University',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='', max_length=200)),
('description', models.TextField(default='')),
('state', models.CharField(default='', max_length=80)),
('city', models.CharField(default='', max_length=80)),
('population', models.IntegerField(default=0)),
('logo', models.FileField(blank=True, null=True, upload_to='uni_logos/')),
('is_staging', models.BooleanField(default=False)),
],
),
migrations.AddField(
model_name='unikeyterms',
name='university',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='key_terms', to='backend.University'),
),
migrations.AddField(
model_name='unifiles',
name='university',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='files', to='backend.University'),
),
migrations.AddField(
model_name='publicpost',
name='tags',
field=models.ManyToManyField(related_name='public_posts', to='backend.Tag'),
),
migrations.AddField(
model_name='publiclike',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='likes', to='backend.PublicPost'),
),
migrations.AddField(
model_name='publiclike',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='public_post_likes', to='backend.CustomUser'),
),
migrations.AddField(
model_name='privatepost',
name='tags',
field=models.ManyToManyField(related_name='private_posts', to='backend.Tag'),
),
migrations.AddField(
model_name='privatelike',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='likes', to='backend.PrivatePost'),
),
migrations.AddField(
model_name='privatelike',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='private_post_likes', to='backend.CustomUser'),
),
migrations.AddField(
model_name='filledcategory',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='filled_forms', to='backend.ProfileCategory'),
),
migrations.AddField(
model_name='filledcategory',
name='university',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='form_categories', to='backend.University'),
),
migrations.AddField(
model_name='customuser',
name='university',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='members', to='backend.University'),
),
migrations.AddField(
model_name='customuser',
name='user',
field=models.OneToOneField(default=0, on_delete=django.db.models.deletion.CASCADE, related_name='customuser', to=settings.AUTH_USER_MODEL),
),
]
|
import pandas as pd
from sklearn.cross_validation import train_test_split
df_train = pd.read_csv('https://s3-us-west-2.amazonaws.com/fbdataset/train.csv')
df_test = pd.read_csv('https://s3-us-west-2.amazonaws.com/fbdataset/test.csv')
class PredictionModel():
def __init__(self, df, xsize=1, ysize=0.5, xslide=0.5, yslide=0.25, xcol='x', ycol='y'):
self.df = df
self.xsize = xsize
self.ysize = ysize
self.xslide = xslide
self.yslide = yslide
self.xcol = xcol
self.ycol = ycol
self.xmax = self.df.x.max()
self.ymax = self.df.y.max()
self.windows = self.generate_windows()
self.slices = self.slice_df()
def frange(self, x, y, jump):
while x < y:
yield x
x += jump
yield y
def generate_windows(self):
ranges = []
result = []
xmin, xmax = self.df.x.min(), self.df.x.max()
ymin, ymax = self.df.y.min(), self.df.y.max()
xranges = list(self.frange(xmin, xmax-self.xsize, self.xslide))
yranges = list(self.frange(ymin, ymax-self.ysize, self.yslide))
ylen = len(yranges)
for x in xranges:
subrange = [x] * ylen
ranges.extend(zip(subrange, yranges))
for x1, y1 in ranges:
x2, y2 = x1 + self.xsize, y1 + self.ysize
result.append(((x1, y1), (x1+self.xsize, y1+self.ysize)))
return result
def slice_df(self):
slices = {}
for window in self.windows:
slices[window] = ModelStore(self.df, window, self.xcol, self.ycol)
return slices
def find_best_window(self, df):
x1, y1 = self.find_x_window(x), self.find_y_window(y)
x2, y2 = x1+self.xsize, y1+self.ysize
try:
assert x1 <= x <= x2
assert y1 <= y <= y2
except:
import pdb; pdb.set_trace()
return ((x1, y1), (x2, y2))
def find_x_window(self, x):
xs = max(0, x - (self.xsize/2.0))
x0 = 0
while x0 < xs:
x0 += self.xslide
if x0 >= self.xmax - self.xsize:
x0 = self.xmax - self.xsize
return x0
def find_y_window(self, y):
ys = max(0, y - (self.ysize/2.0))
y0 = 0
while y0 < ys:
y0 += self.yslide
if y0 >= self.ymax - self.ysize:
y0 = self.ymax - self.ysize
return y0
def train(self):
for window, model in self.slices.iteritems():
print model.describe()
model.train()
def predict(self, df):
self.expected = df.sort_values('row_id')['place_id']
result_set = {}
df['x1'] = df.x.apply(self.find_x_window)
df['x2'] = df.x1 + self.xsize
df['y1'] = df.y.apply(self.find_y_window)
df['y2'] = df.y1 + self.ysize
for window, model in self.slices.iteritems():
(x1, y1), (x2, y2) = window
wdf = df[(df.x1 == x1) & (df.x2 == x2) & (df.y1 == y1) & (df.y2 == y2)]
res = model.predict(wdf)
result_set.update(res)
self.actual = [result_set[x] for x in sorted(result_set.keys())]
return result_set
def score(self):
expect = pd.Series(self.expected)
actual = pd.Series(self.actual)
return (sum(expect == actual) / float(len(self.expected))) * 100
class ModelStore():
def __init__(self, df, window, xcol, ycol):
self.window = window
(self.x1, self.y1), (self.x2, self.y2) = self.window
self.df = df[(df[xcol] >= self.x1) & (df[xcol] <= self.x2) & (df[ycol] >= self.y1) & (df[ycol] <= self.y2)]
self.unique_place_count = len(self.df.place_id.unique())
self.model = None
self.df['hours'] = self.df.time / 60.0
self.df['days'] = self.df.time / (60*24.0)
self.df['hours_cycle'] = self.df.hours % 24
self.df['days_cycle'] = self.df.days % 7
self.total_count = len(self.df)
def describe(self):
return '{}: {}, {}'.format(self.window, self.total_count, self.unique_place_count)
def train(self):
from sklearn.ensemble import RandomForestClassifier
self.model = RandomForestClassifier(n_estimators=5) # x, y, accuracy, hours_cycle, days_cycle
self.tdf = self.df.sort_values('row_id').set_index('row_id')
self.train_df = self.tdf[['x', 'y', 'accuracy', 'hours_cycle', 'days_cycle']]
self.values = self.tdf['place_id']
self.model.fit(self.train_df, self.values)
def predict(self, df):
wdf = df.sort_values('row_id').set_index('row_id')
wdf['hours'] = wdf.time / 60.0
wdf['days'] = wdf.time / (60*24.0)
wdf['hours_cycle'] = wdf.hours % 24
wdf['days_cycle'] = wdf.days % 7
wdf = wdf[['x', 'y', 'accuracy', 'hours_cycle', 'days_cycle']]
return dict(zip(wdf.index, self.model.predict(wdf)))
def run():
train, test = train_test_split(df_train, test_size = 0.2)
pred_model = PredictionModel(df=train)
print pred_model.slices
pred_model.train()
print pred_model.predict(test)
score = pred_model.score()
print score
return score
run()
|
from .restrictions import *
from .to_sorted_ntriples import *
|
import RPi.GPIO as GPIO
import time
import os.path
import sys
frequencyHertz = 100
msPerCycle = 1000 / frequencyHertz
leftPosition = 2
rightPosition = 2
positionList = [leftPosition, rightPosition]
tmpfile = "/var/www/html/data/servo_run"
def run_cycle(i):
GPIO.setmode(GPIO.BOARD)
GPIO.setup(7, GPIO.OUT)
pwm = GPIO.PWM(7, frequencyHertz)
for i in range(i):
for position in positionList:
dutyCyclePercentage = position * 100 / msPerCycle
pwm.start(dutyCyclePercentage)
time.sleep(2)
pwm.stop()
GPIO.cleanup()
def write_file(file, n):
f = open(file, 'w')
f.write(str(n))
f.close()
if (os.path.isfile(tmpfile)):
print("File found")
f = open(tmpfile, 'r+')
if (f.read() == "0"):
print("File contains 0, writing 1 and running cycle")
f.write("1")
f.close()
run_cycle(2)
print("Cycle done, writing 0")
write_file(tmpfile, "0")
else:
print("File contains 1, stopping script")
f.close()
else:
print("File did not exist, writing 1 and starting cycle")
write_file(tmpfile, "1")
run_cycle(2)
print("Cycle done, writing 0")
write_file(tmpfile, "0")
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'main.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_WinGetGui(object):
def setupUi(self, WinGetGui):
WinGetGui.setObjectName("WinGetGui")
WinGetGui.resize(1033, 653)
self.centralwidget = QtWidgets.QWidget(WinGetGui)
self.centralwidget.setObjectName("centralwidget")
self.textBrowser = QtWidgets.QTextBrowser(self.centralwidget)
self.textBrowser.setGeometry(QtCore.QRect(10, 430, 781, 121))
self.textBrowser.setObjectName("textBrowser")
self.tableWidget = QtWidgets.QTableWidget(self.centralwidget)
self.tableWidget.setGeometry(QtCore.QRect(10, 10, 1001, 411))
self.tableWidget.setColumnCount(4)
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(3, item)
self.horizontalLayoutWidget = QtWidgets.QWidget(self.centralwidget)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(10, 560, 1001, 51))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.pb_refresh = QtWidgets.QPushButton(self.horizontalLayoutWidget)
self.pb_refresh.setObjectName("pb_refresh")
self.horizontalLayout.addWidget(self.pb_refresh)
self.pb_update = QtWidgets.QPushButton(self.horizontalLayoutWidget)
self.pb_update.setObjectName("pb_update")
self.horizontalLayout.addWidget(self.pb_update)
self.pb_remove = QtWidgets.QPushButton(self.horizontalLayoutWidget)
self.pb_remove.setObjectName("pb_remove")
self.horizontalLayout.addWidget(self.pb_remove)
self.pb_add_to_ignore = QtWidgets.QPushButton(self.horizontalLayoutWidget)
self.pb_add_to_ignore.setObjectName("pb_add_to_ignore")
self.horizontalLayout.addWidget(self.pb_add_to_ignore)
self.groupBox = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox.setGeometry(QtCore.QRect(810, 430, 201, 121))
self.groupBox.setObjectName("groupBox")
self.rb_updates = QtWidgets.QRadioButton(self.groupBox)
self.rb_updates.setGeometry(QtCore.QRect(20, 30, 131, 21))
self.rb_updates.setObjectName("rb_updates")
self.rb_installed = QtWidgets.QRadioButton(self.groupBox)
self.rb_installed.setGeometry(QtCore.QRect(20, 60, 131, 21))
self.rb_installed.setObjectName("rb_installed")
self.rb_ignore_list = QtWidgets.QRadioButton(self.groupBox)
self.rb_ignore_list.setGeometry(QtCore.QRect(20, 90, 104, 21))
self.rb_ignore_list.setObjectName("rb_ignore_list")
WinGetGui.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(WinGetGui)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1033, 27))
self.menubar.setObjectName("menubar")
WinGetGui.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(WinGetGui)
self.statusbar.setObjectName("statusbar")
WinGetGui.setStatusBar(self.statusbar)
self.retranslateUi(WinGetGui)
QtCore.QMetaObject.connectSlotsByName(WinGetGui)
def retranslateUi(self, WinGetGui):
_translate = QtCore.QCoreApplication.translate
WinGetGui.setWindowTitle(_translate("WinGetGui", "WinGetGui"))
item = self.tableWidget.horizontalHeaderItem(0)
item.setText(_translate("WinGetGui", "Id"))
item = self.tableWidget.horizontalHeaderItem(1)
item.setText(_translate("WinGetGui", "Name"))
item = self.tableWidget.horizontalHeaderItem(2)
item.setText(_translate("WinGetGui", "Version"))
item = self.tableWidget.horizontalHeaderItem(3)
item.setText(_translate("WinGetGui", "Latest"))
self.pb_refresh.setText(_translate("WinGetGui", "Refresh"))
self.pb_update.setText(_translate("WinGetGui", "Update Selected"))
self.pb_remove.setText(_translate("WinGetGui", "Remove Selected"))
self.pb_add_to_ignore.setText(_translate("WinGetGui", "Add to ignore list"))
self.groupBox.setTitle(_translate("WinGetGui", "Show Mode"))
self.rb_updates.setText(_translate("WinGetGui", "Avalible Updates"))
self.rb_installed.setText(_translate("WinGetGui", "All installed apps"))
self.rb_ignore_list.setText(_translate("WinGetGui", "Ignore List"))
|
if __name__ == '__main__':
import timeit
setup = '''
from Graph import Graph
t = [
[4, 4, 4, 4, 4],
[4, 2, 2, 2, 4],
[4, 2, 0, 4, 4],
[4, 2, 2, 4, 4],
[4, 4, 4, 6, 8],
[4, 4, 4, 4, 4],
[4, 4, 4, 4, 4],
[4, 4, 4, 4, 4],
[4, 4, 4, 4, 4],
[4, 4, 4, 4, 4],
[4, 4, 4, 4, 4]]
g = Graph()
g.from_matrix(t)
g.check_for_end()
'''
testBrute = '''
g.brute_force()
g.reset()
'''
testSimplify = '''
g.simplify()
g.brute_force()
g.reset()
'''
its = 1
repeats = 1
print 'Average of {} iterations, best of {}:'.format(its, repeats)
print ' Brute force: {:0.6f}s/iteration'.format(
min(timeit.repeat(testBrute, setup=setup, number=its, repeat=repeats))/its)
print ' Simplify + brute force: {:0.6f}s/iteration'.format(
min(timeit.repeat(testSimplify, setup=setup, number=its, repeat=repeats))/its)
|
from functools import wraps
def print_function_data(function):
@wraps(function)
def wrapper(*args, **kwargs):
print(f"You are calling {function.__name__} function")
print(f"{function.__doc__}")
return function(*args,**kwargs)
return wrapper
@print_function_data
def add(a,b):
'''This function takes numbers as arguments and return their sum'''
return a+b
print(add(4,5))
|
# -*- coding: utf-8 -*-
# @Time : 2019/5/16 3:15 PM
# @Author : Shande
# @Email : seventhedog@163.com
# @File : auth.py
# @Software: PyCharm
from functools import wraps
from flask import session, jsonify
def is_login(view_func):
"""检验用户的登录状态"""
@wraps(view_func)
def wrapper(*args, **kwargs):
user_name = session.get("user_name")
if not user_name:
return jsonify(errno=403, errmsg='用户未登录')
else:
return view_func(*args, **kwargs)
return wrapper
|
lim=int(input())
v1=0
v2=1
for i in range(0,lim):
print(v2,end=" ")
sum=v1+v2
v1=v2
v2=sum
|
from rsf.proj import *
from decimal import Decimal
'''
This script and data plot cross-correlogram in Figure 4 from the paper "Urban Near-surface Seismic Monitoring using Distributed Acoustic Sensing" by Gang Fang, Yunyue Elita Li, Yumin Zhao and Eileen R. Martin
To run this script, you need to install Madagascar software(www.ahay.org).
'''
def wiggle(name, min1, max1, min2, max2, par):
return '''
wiggle title=%s grid1=n grid2=n
font=2 titlefat=2 labelfat=2 gridfat=3 labelsz=7 titlesz=7 zplot=0.8
label1='Time lag (sec)' label2='Channel Number'
screenratio=2.5 grid1=y grid2=y xll=3 yreverse=n
pclip=97 g1num0=0 g1num=2
min1=%g max1=%g min2=%g max2=%g
'''%(name, min1, max1, min2, max2) + par
def graph(name, min1, max1, min2, max2, par):
return '''
graph title=%s
transp=y screenratio=2.5 grid1=n grid2=n
xll=3 yreverse=n
min2=%g max2=%g min1=%d max1=%d
wanttitle=n wantaxis=n plotfat=4
''' %(name, mint, maxt, min2, max2) + par
# for cross-correlation at 2016/10/12
ccnt = Decimal('3004')
ccdt = Decimal('0.02')
ccot = -1*ccnt/2*ccdt
mint = -1.5
maxt = 1.5
minfch = 160
maxfch = 185
minbch = 108
maxbch = 130
# select array
trst_front = 300
trnum_front = 60
trst_back = 204
trnum_back = 63
trst_frontshow = 156
trst_backshow = 107
# plot parameters for front and back array
ccwigparf = " label1='Time lag (sec)' label2='Channel Number' screenratio=2.5 grid1=y grid2=y xll=3 yreverse=y min1=%g max1=%g pclip=97 g1num0=0 g1num=2 "%(mint,maxt)
ccwigparb = " label1='Time lag (sec)' label2='Channel Number' screenratio=2.5 grid1=y grid2=y xll=3 yreverse=n min1=%g max1=%g pclip=97 g1num0=0 g1num=2 "%(mint,maxt)
# Plot Cross-correlation for 2016/10/12, front array
Plot('SDAS20161012_FCCputw','SDAS20161012_FCCput',
'''
wiggle title='2016/10/12' grid1=n grid2=n
font=2 titlefat=2 labelfat=2 gridfat=3 labelsz=7 titlesz=7 zplot=0.8
label1='Time lag (sec)' label2='Channel Number'
screenratio=2.5 grid1=y grid2=y xll=3 yreverse=y
pclip=97 g1num0=0 g1num=2
min1=%g max1=%g min2=%g max2=%g
'''%(mint, maxt, minfch, maxfch))
Plot('SDAS20161012_tlag_front', 'SDAS20161012_tlag_front',
'''
put d1=0.5 o1=%d |
smooth rect1=4 repeat=5 |
graph title='2016/10/12'
transp=y screenratio=2.5 grid1=n grid2=n
xll=3 plotcol=5 yreverse=y
min2=%g max2=%g min1=%d max1=%d
wanttitle=n wantaxis=n plotfat=4
'''%(trst_frontshow, mint, maxt, minfch, maxfch))
Plot('SDAS20161012_tlag_ref_front', 'SDAS20161012_tlag_ref_front',
'''
put d1=0.5 o1=%d |
graph title='2016/10/12'
transp=y screenratio=2.5 grid1=n grid2=n
xll=3 yreverse=y plotcol=3 dash=2
min2=%g max2=%g min1=%d max1=%d
wanttitle=n wantaxis=n plotfat=4
'''%(trst_frontshow, mint, maxt, minfch, maxfch))
Result('SDAS20161012_tlag_front_FCC', 'SDAS20161012_FCCputw SDAS20161012_tlag_front SDAS20161012_tlag_ref_front', 'Overlay')
# Plot Cross-correlation for 2016/10/12, back array
Plot('SDAS20161012_BCCputw', 'SDAS20161012_BCCput',
'''
wiggle title='2016/10/12' grid1=n grid2=n
font=2 titlefat=2 labelfat=2 gridfat=3 labelsz=7 titlesz=7 zplot=0.8
label1='Time lag (sec)' label2='Channel Number'
screenratio=2.5 grid1=y grid2=y xll=3 yreverse=n
pclip=97 g1num0=0 g1num=2
min1=%g max1=%g min2=%g max2=%g
'''%(mint, maxt, minbch, maxbch))
# Plot picked time lag
Plot('SDAS20161012_tlag_back','SDAS20161012_tlag_back',
'''
put d1=0.5 o1=%d |
smooth rect1=5 repeat=3 |
graph title='2016/10/12'
transp=y screenratio=2.5 grid1=n grid2=n
xll=3 plotcol=5 yreverse=n
min2=%g max2=%g min1=%d max1=%d
wanttitle=n wantaxis=n plotfat=4
'''%(trst_backshow, mint, maxt, minbch, maxbch))
# reference time lag
Plot('SDAS20161012_tlag_ref_back', 'SDAS20161012_tlag_ref_back',
'''
put d1=0.5 o1=%d |
graph title='2016/10/12'
transp=y screenratio=2.5 grid1=n grid2=n
xll=3 plotcol=3 dash=2 yreverse=n
min2=%g max2=%g min1=%d max1=%d
wanttitle=n wantaxis=n plotfat=4
'''%(trst_backshow, mint, maxt, minbch, maxbch))
Result('SDAS20161012_tlag_back_FCC', 'SDAS20161012_BCCputw SDAS20161012_tlag_back SDAS20161012_tlag_ref_back', 'Overlay')
# Plot Cross-correlation for 2016/11/21, front array
trst_frontshow = 156
trst_backshow = 138
Plot('SDAS20161121_FCCputw','SDAS20161121_FCCput',
'''
wiggle title='2016/11/21' grid1=n grid2=n
font=2 titlefat=2 labelfat=2 gridfat=3 labelsz=7 titlesz=7 zplot=0.8
label1='Time lag (sec)' label2='Channel Number'
screenratio=2.5 grid1=y grid2=y xll=3 yreverse=y
pclip=97 g1num0=0 g1num=2
min1=%g max1=%g min2=%g max2=%g
'''%(mint, maxt, minfch, maxfch))
Plot('SDAS20161121_tlag_front', 'SDAS20161121_tlag_front',
'''
put d1=0.5 o1=%d |
smooth rect1=4 repeat=5 |
graph title='2016/11/21'
transp=y screenratio=2.5 grid1=n grid2=n
xll=3 plotcol=5 yreverse=y
min2=%g max2=%g min1=%d max1=%d
wanttitle=n wantaxis=n plotfat=4
'''%(trst_frontshow, mint, maxt, minfch, maxfch))
Plot('SDAS20161121_tlag_ref_front', 'SDAS20161121_tlag_ref_front',
'''
put d1=0.5 o1=%d |
graph title='2016/11/21'
transp=y screenratio=2.5 grid1=n grid2=n
xll=3 yreverse=y plotcol=3 dash=2
min2=%g max2=%g min1=%d max1=%d
wanttitle=n wantaxis=n plotfat=4
'''%(trst_frontshow, mint, maxt, minfch, maxfch))
Result('SDAS20161121_tlag_front_FCC', 'SDAS20161121_FCCputw SDAS20161121_tlag_front SDAS20161121_tlag_ref_front', 'Overlay')
# Plot Cross-correlation for 2016/11/21, back array
Plot('SDAS20161121_BCCputw', 'SDAS20161121_BCCput',
'''
wiggle title='2016/11/21' grid1=n grid2=n
font=2 titlefat=2 labelfat=2 gridfat=3 labelsz=7 titlesz=7 zplot=0.8
label1='Time lag (sec)' label2='Channel Number'
screenratio=2.5 grid1=y grid2=y xll=3 yreverse=n
pclip=97 g1num0=0 g1num=2
min1=%g max1=%g min2=%g max2=%g
'''%(mint, maxt, minbch, maxbch))
# Plot picked time lag
Plot('SDAS20161121_tlag_back', 'SDAS20161121_tlag_back',
'''
put d1=-0.5 o1=%d |
smooth rect1=5 repeat=3 |
graph title='2016/10/12'
transp=y screenratio=2.5 grid1=n grid2=n
xll=3 plotcol=5 yreverse=n
min2=%g max2=%g min1=%d max1=%d
wanttitle=n wantaxis=n plotfat=4
'''%(trst_backshow, mint, maxt, minbch, maxbch))
# reference time lag
Plot('SDAS20161121_tlag_ref_back','SDAS20161121_tlag_ref_back',
'''
put d1=-0.5 o1=%d |
graph title='2016/11/21'
transp=y screenratio=2.5 grid1=n grid2=n
xll=3 plotcol=3 dash=2 yreverse=y
min2=%g max2=%g min1=%d max1=%d
wanttitle=n wantaxis=n plotfat=4
'''%(trst_backshow, mint, maxt, minbch, maxbch))
Result('SDAS20161121_tlag_back_FCC', 'SDAS20161121_BCCputw SDAS20161121_tlag_back SDAS20161121_tlag_ref_back', 'Overlay')
|
def minimumBribes(q):
# Write your code here
# print(q)
bribes = 0
check = 1
for x in range(len(q)-1, 0, -1):
# print(x)
if q[x] != x+1:
if q[x-1] == x+1:
bribes += 1
q[x-1], q[x] = q[x], q[x-1]
elif q[x-2]==x+1:
bribes += 2
q[x-2], q[x-1], q[x] = q[x-1], q[x], q[x-2]
else:
print("Too chaotic")
return
print(bribes)
return bribes
'''
2 1 5 3 4
2 5 1 3 4
5 1 2 3 7 8 6 4
1 2 5 3 7 8 6 4
'''
|
"""
2. Написать функцию square, принимающую 1 аргумент — сторону квадрата,
и возвращающую 3 значения (с помощью кортежа): периметр квадрата,
площадь квадрата и диагональ квадрата.
"""
def square(a):
return (f'perimeter = {a * 4},'
f'square = {a **2},'
f'diagonal = {a * pow(2, 1/2)}'
)
print(square(10))
|
"""Encapsulates requests to challenge API"""
import requests
class ChallengeApi(object):
"""Performs requests to challenge API"""
BASE_URL = 'http://challenge.curbside.com'
SESSION_URL = BASE_URL + '/get-session'
START_URL = BASE_URL + '/start'
def get_session(self):
"""Retrieve a new session ID"""
session = self.__get_json(ChallengeApi.SESSION_URL)
return session['session']
def get_start(self, session_id):
"""Initial GET request to API to begin the challenge."""
start_result = self.__get_json(ChallengeApi.START_URL, session_id=session_id)
return start_result['id'], start_result['next']
def get_next(self, next_id, session_id):
"""GET request to retrieve node that has an id = next_id"""
next_url = ChallengeApi.BASE_URL + '/' + next_id
return self.__get_json(next_url, session_id=session_id)
def __get_json(self, url, session_id=None):
"""Reusable method to create GET request to challenge API using session_id."""
get_json_request = requests.get(url, headers={'Session': session_id})
if get_json_request.status_code != 200:
raise Exception('Out of time!')
return get_json_request.json()
|
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import os
from jinja2 import Template
import requests
class MailGunTools(object):
def __init__(self, domain=None, api_key=None, root=None):
self.configured = False
if domain and api_key:
self.domain = domain
self.api_key = api_key
self.root = root
self.configured = True
def send_email(self, to, mime):
if not self.configured:
raise Exception('MailGunTools not configured.')
return requests.post(
'https://api.mailgun.net/v2/' + self.domain + '/messages.mime',
auth=('api', self.api_key),
data={"to": to},
files={'message': mime.as_string()}
)
def create_email(self, template_name, data):
try:
with open(os.path.join(self.root, 'templates/emails/') + template_name) as f:
template = f.read()
except Exception, e:
raise e
template = Template(template)
email = MIMEMultipart('alternative')
try:
email.attach(MIMEText(template.render(data), 'html'))
except Exception, e:
raise e
return email
|
import math
"""Point Class, used for targets and center points of FOV's"""
class Point:
"""Constructor, based on right_ascension and declination of the point.
To allow for "nice" distancing, we add 90 to the declination when we store
so that the range of values for the point's coordinates are [0, 360) degrees
for right_ascension and [0, 180] degrees for declination"""
def __init__(self, right_ascension, declination):
self.right_ascension = right_ascension
self.declination = declination + 90
self.points_in_fov = dict()
"""Method to check if the given point is within the given radius of self"""
def in_fov_at_point(self, radius, point):
if self.euclidean_distance(point) <= 2 * radius:
return True
return False
"""Method to get the simple euclidean distance between self and the given point"""
def euclidean_distance(self, point):
return math.sqrt((self.right_ascension - point.right_ascension)**2 + (self.declination - point.declination)**2)
"""Method to add target to self.points_in_fov if it is within the given radius of the point"""
def add_target_if_fov(self, radius, point):
if self.in_fov_at_point(radius, point):
if radius is self.points_in_fov.keys():
checker = []
to_add = []
for i in self.points_in_fov[radius].append(point):
for j in i:
if j.in_fov_at_point(radius, point):
checker.append(j)
if len(checker) == len(i):
i.append(point)
else:
checker.append(point)
to_add.append(checker)
checker = []
self.points_in_fov[radius].extend(to_add + [self])
else:
self.points_in_fov[radius] = [[point, self]]
return True
return False
"""Equals override for point"""
def __eq__(self, other):
if self.right_ascension == other.right_ascension and self.declination == other.declination:
return True
return False
"""Not equals override for point"""
def __ne__(self, other):
if self.right_ascension != other.right_ascension or self.declination != other.declination:
return True
return False
"""FOVNode Class, gives node that contains given targets and is located at a center point"""
class FOVNode:
"""Simple Constructor that stores the center point and targets for the FOVNode"""
def __init__(self, center, targets):
self.center = center
self.targets = targets
self.adjacency_list = []
|
import csv
import networkx as nx
from datetime import datetime, timedelta
def get_dubl(flights_number, time_in_city, multidigraph_check):
counter = {}
flights_for_every_name = {}
with open(f'new_result.csv', "r", newline="") as file:
reader = csv.reader(file)
for row in reader:
name = (row[0] + ' ' + row[1] + ' ' + row[2]).lower()
if not row[2]:
name = (row[0] + ' ' + row[1]).lower()
counter[name] = counter.get(name, 0) + 1
from_time = row[5]+' '+row[6]
if flights_for_every_name.get(name):
flights_info = flights_for_every_name[name]
flights_info[from_time] = [r.lower() for r in row]
flights_for_every_name[name] = flights_info
else:
flights_for_every_name[name] = {from_time: [r.lower() for r in row]}
filtered_names = [el for el, count in counter.items() if count > flights_number]
tic_filtered_names = []
mdg_filtered_names = []
for filtered_name in filtered_names:
G = nx.MultiDiGraph()
rd = flights_for_every_name[filtered_name]
for number, row in enumerate(sorted(rd.keys())):
if (rd[row][3], rd[row][4]) in [(edge[0], edge[1]) for edge in G.edges]:
rad = [edge[2]['rad'] for edge in G.edges(data=True) if (edge[0], edge[1]) == (rd[row][3], rd[row][4])]
current_rad = max(rad) + 0.1
G.add_edge(rd[row][3], rd[row][4], rad=current_rad, label=number, from_time=row)
else:
G.add_edge(rd[row][3], rd[row][4], rad=0.1, label=number, from_time=row)
edges_info = G.edges(data=True)
sorted_edges_info = sorted(edges_info, key=lambda edge: edge[2]['from_time'])
timedelta_flag = True
for label, edge in enumerate(sorted_edges_info):
if label < len(sorted_edges_info) - 1:
if edge[1] == sorted_edges_info[label + 1][0]:
dt_fmt = '%Y-%m-%d %H:%M'
dt_s1 = edge[2]['from_time']
dt_s2 = sorted_edges_info[label + 1][2]['from_time']
dt1 = datetime.strptime(dt_s1, dt_fmt)
dt2 = datetime.strptime(dt_s2, dt_fmt)
delta = dt2 - dt1
if int(delta.total_seconds())//3600 > time_in_city:
timedelta_flag = False
if timedelta_flag:
tic_filtered_names.append(filtered_name.title())
multidigraph_flag = False
if multidigraph_check:
for edge in sorted_edges_info:
if edge[2]['rad'] > 0.1:
multidigraph_flag = True
if timedelta_flag and multidigraph_flag:
mdg_filtered_names.append(filtered_name.title())
if multidigraph_check:
return mdg_filtered_names
else:
return tic_filtered_names
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that an Info.plist with CFBundleSignature works.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
print "This test is currently disabled: https://crbug.com/483696."
sys.exit(0)
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
test.run_gyp('test.gyp', chdir='missing-cfbundlesignature')
test.build('test.gyp', test.ALL, chdir='missing-cfbundlesignature')
test.built_file_must_match('mytarget.app/Contents/PkgInfo', 'APPL????',
chdir='missing-cfbundlesignature')
test.built_file_must_match('myothertarget.app/Contents/PkgInfo', 'APPL????',
chdir='missing-cfbundlesignature')
test.built_file_must_match('thirdtarget.app/Contents/PkgInfo', 'APPL????',
chdir='missing-cfbundlesignature')
test.pass_test()
|
list=["yogesh","vicky",'vaibhav']
if "vaibhav" in list:
print("yay,vaibhav is in the list")
a=[1,2,3]
if 2 in a:
print("Yes 2 is present")
|
Python 3.9.0 (tags/v3.9.0:9cf6752, Oct 5 2020, 15:34:40) [MSC v.1927 64 bit (AMD64)] on win32
Type "help", "copyright", "credits" or "license()" for more information.
>>> import turtle
>>> t=turtle.Turtle()
>>> t.shape("turtle")
>>> t.pensize(5)
>>> t.color("red")
>>> t.fillcolor("blue")
>>> t.begin_fill()
>>> t.circle(100)
>>> t.end_fill()
>>> t.penup()
>>> t.goto(-100,300)
>>> t.goto(-200,-300)
>>> t.pendownd()
Traceback (most recent call last):
File "<pyshell#12>", line 1, in <module>
t.pendownd()
AttributeError: 'Turtle' object has no attribute 'pendownd'
>>> t.pendown()
>>> t.color("blue")
>>> t.fillcolor("red")
>>> t.begin_fill()
>>> t.circle(150)
>>> t.end_fill()
>>> t.penup()
>>> t.goto(200,-300)
>>> t.color("yellow")
>>> t.fillcolor("green")
>>> t.pendown()
>>> t.begin_fill()
>>> t.circle(150)
>>> t.end_fill()
>>>
|
from os import path
from game.base.enemy import Enemy
from game.constants import *
from game.entities.camera import Camera
from game.entities.blast import Blast
from game.util import *
class ButtaBomber(Enemy):
NB_FRAMES = 1
DEFAULT_SCALE = 10
def __init__(
self,
app,
scene,
pos,
color=ORANGE,
scale=DEFAULT_SCALE,
num=0,
ai=None,
**kwargs
):
"""
:param app: our main App object
:param scene: Current scene (probably Game)
:param color: RGB tuple
:param scale:
"""
super().__init__(app, scene, position=pos, ai=ai, **kwargs)
self.num = num
self.frames = self.get_animation(color)
size = self.frames[0].get_size()
self.collision_size = self.size = vec3(*size, min(size))
self.hp = 10
self.speed = 100
self.time = 0
self.frame = 0
self.damage = 3
# drift slightly in X/Y plane
self.velocity = (
vec3(random.random() - 0.5, random.random() - 0.5, 0) * random.random() * 2
)
self.scripts += [self.injured, self.approach]
def get_animation(self, color="red"):
cache_id = ("buttabomber.gif:frames", color)
if cache_id in self.app.cache:
return self.app.cache[cache_id]
color = pg_color(color)
filename = path.join(SPRITES_DIR, "buttabomber.gif")
# load an image if its not already in the cache, otherwise grab it
image: pygame.SurfaceType = self.app.load_img(filename)
brighter = color
darker = pygame.Color("darkred")
very_darker = pygame.Color("black")
palette = [(1, 0, 1), (0, 0, 0), brighter, darker, very_darker]
image.set_palette(palette)
image.set_colorkey((1, 0, 1)) # index 0
self.width = image.get_width() // self.NB_FRAMES
self.height = image.get_height()
frames = [
image.subsurface((self.width * i, 0, self.width, self.height))
for i in range(self.NB_FRAMES)
]
self.app.cache[cache_id] = frames
return frames
# def fall(self):
# self.frames = self.get_animation(pygame.Color("gray"))
# self.velocity = -Y * 100
# self.life = 2 # remove in 2 seconds
# self.alive = False
def blast(self):
self.scripts.clear()
self.frames = self.get_animation(GRAY)
self.scene.add(
Blast(
self.app,
self.scene,
2, # radius
"white",
1, # damage
200, # spread
position=self.position,
velocity=self.velocity,
life=0.2,
),
)
self.remove()
def kill(self, damage, bullet, player):
self.remove()
if not self.alive:
return False
return True
def hurt(self, damage, bullet, player):
self.injured = True
self.blast()
self.position += 100
# bullet.remove()
return super().hurt(damage, bullet, player)
def update(self, dt):
self.time += dt
super().update(dt)
def injured(self, script):
self.injured = False
yield
while self.alive:
if self.injured:
player = self.app.state.player
if player and player.alive:
to_player = glm.normalize(player.position - self.position)
self.velocity = vec3(nrand(100), nrand(100), 0)
self.velocity += to_player * 100
# ppos = self.scene.player.position
# pvel = self.scene.player.velocity
# v = ppos - self.position
# d = glm.length(v)
# self.velocity = to_player * nrand(20)
for x in range(100):
self.frames = self.get_animation("yellow")
yield script.sleep(0.1)
self.frames = self.get_animation("purple")
yield script.sleep(0.1)
self.blast()
return
yield
# def charge(self, script):
# """
# Behavior script: Charge towards player randomly
# """
# yield # no call during entity ctor
# while True:
# # print('charge')
# player = self.app.state.player
# if player and player.alive:
# to_player = player.position - self.position
# if glm.length(to_player) < 3000: # wihin range
# to_player = player.position - self.position
# self.velocity = glm.normalize(to_player) * 200
# yield
def render(self, camera: Camera):
if self.position.z > camera.position.z:
self.remove()
return
surf = self.frames[int(self.time + self.num) % self.NB_FRAMES]
super(ButtaBomber, self).render(camera, surf)
def __call__(self, script):
self.activated = False
yield
while True:
for color in ["darkred", "white"]:
if not self.injured:
# if self.activated:
self.frames = self.get_animation(color)
yield script.sleep(0.25)
yield
def approach(self, script):
yield
self.velocity = Z * 4000
while True:
yield script.sleep(0.2)
ppos = self.scene.player.position
v = ppos - self.position
d = glm.length(v)
if d < 2500:
self.velocity = vec3(
nrand(20), nrand(20), self.scene.player.velocity.z * nrand(1)
)
break
|
data_dir = '../data'
import os
import json
import pandas as pd
import itertools
import time
def get_valid_documents(thresh=100):
doc_read = {}
path = "{}/read/".format(data_dir)
for dirpath, subdirs, files in os.walk(path):
for f in files:
filename = dirpath+f
file = open(filename, 'r')
for line in file.readlines():
words = line.strip().split(' ')
user = words[0]
for doc in words[1:]:
if doc not in doc_read:
doc_read[doc] = {}
doc_read[doc]['num'] = 1
doc_read[doc]['reader'] = [user]
else:
doc_read[doc]['num'] += 1
doc_read[doc]['reader'].append(user)
doc_read_thresh = {key:{'num':doc_read[key]['num'], \
'reader':doc_read[key]['reader']} for key in doc_read if doc_read[key]['num'] > thresh}
"""
total doc: 505840
doc over thresh=100: 36340
"""
return doc_read_thresh
def get_user_list(valid_doc, thresh=100, etc_user_num=200):
user_read_num = {}
user_read_doc = {}
for doc in valid_doc:
readers = valid_doc[doc]['reader']
for reader in readers:
if reader not in user_read_doc:
user_read_doc[reader] = [doc]
user_read_num[reader] = 1
else:
user_read_doc[reader].append(doc)
user_read_num[reader] += 1
user_read_num1 = {key:user_read_num[key] for key in user_read_num if user_read_num[key] >= thresh}
user_read_doc1 = {key:user_read_doc[key] for key in user_read_num1}
user_read_num2 = {key:user_read_num[key] for key in user_read_num if user_read_num[key] < thresh}
user_read_num2 = {key:user_read_num2[key] for i, key in enumerate(user_read_num2) if i < etc_user_num}
user_read_doc2 = {key:user_read_doc[key] for key in user_read_num2}
return user_read_doc1, user_read_num1, user_read_doc2, user_read_num2
def categorize_value(target, cat_num=100):
new_target = {}
target_list = list(target.values())
max_ = max(target_list)
min_ = min(target_list)
division = int((max_ - min_ +1) / cat_num)
for key in target:
for i in range(cat_num):
if target[key] >= (min_ + division*i) and target[key] < (min_+division*(i+1)):
new_target[key] = i
if target[key] >= min_ + division*cat_num:
new_target[key] = cat_num
return new_target
def doc_id2author_id(doc_id):
return doc_id.split('_')[0]
def get_y_views(user_id, doc_id, user_read_doc):
cnt = 0
docs_read = user_read_doc[user_id]
for doc in docs_read:
if doc == doc_id:
cnt += 1
return cnt
def get_doc_meta_and_age(valid_doc, date_cat_num):
"""
{document(id): { tags(keyword_list),age(unix timestamp) ,magazine_id,}}
"""
data = open('../data/metadata.json', 'r')
meta={}
age = {}
for line in data.readlines():
line = json.loads(line.strip())
if line['id'] in valid_doc:
tmp_dict={}
tmp_age={}
tmp_dict[line['id']]= {'keyword_list':line['keyword_list'],
'mag_id':line['magazine_id']
}
tmp_age[line['id']] = line['reg_ts']
meta.update(tmp_dict)
age.update(tmp_age)
age = categorize_value(age, date_cat_num)
return meta, age
def get_following_list(user_read_num1, user_read_num2):
user_list = {key:user_read_num1[key] for key in user_read_num1}
user_list.update(user_read_num2)
result = {}
data = open('../data/users.json', 'r')
for line in data.readlines():
line = json.loads(line)
if line['id'] in user_list:
result[line['id']] = line['following_list']
return result
def check_is_followed(user_id, author_id, following_data):
if user_id in following_data:
if author_id in following_data[user_id]:
return 1
return 0
def prepare_data(param):
print("preparing valid documents...")
valid_doc = get_valid_documents(thresh=param['doc_thresh'])
print("preparing user information...")
user_read_doc1, user_read_num1, user_read_doc2, user_read_num2 \
= get_user_list(valid_doc, etc_user_num=param['etc_user_num'])
pop = {key:valid_doc[key]['num'] for key in valid_doc}
print("preparing popularity...")
popularity = categorize_value(pop, cat_num=param['pop_cat_num'])
print("preparing meta and age...")
meta, age = get_doc_meta_and_age(valid_doc, param['date_cat_num'])
following = get_following_list(user_read_num1, user_read_num2)
print("preparing user read num categories...")
tmp = {key:user_read_num1[key] for key in user_read_num1}
tmp.update(user_read_num2)
user_read = categorize_value(tmp, cat_num=param['user_read_cat_num'])
return valid_doc, user_read_doc1, user_read_num1, user_read_doc2, user_read_num2,\
popularity, meta, age, following, user_read
def __index(dict, name=1, etc=1):
ind={}
if name==2:
ind.update({key:etc for key in dict.keys()}) ##1000000은 수정 가능
else:
for (index, entry) in enumerate(dict):
ind.update({entry:index})
return ind
def get_index_data(valid_doc, user_read_num1, user_read_num2, meta, dummy):
doc_indexed = __index(valid_doc)
valid_user_idx = __index(user_read_num1)
etc_user_num = len(valid_user_idx)
etc_user_idx = __index(user_read_num2, name=2, etc=etc_user_num)
valid_user_idx.update(etc_user_idx)
user_idx=valid_user_idx
# author_idx
authors = list(set([doc_id2author_id(doc_id) for doc_id in valid_doc]))
author_indexed = {author:idx for idx, author in enumerate(authors)}
# tags indx
tags = []
mags = []
for doc_id in meta:
tags += meta[doc_id]['keyword_list']
tags = list(set(tags))
mags.append(meta[doc_id]['mag_id'])
mags = list(set(mags))
# add DUMMY_TAG and empty tag
tags += ['', dummy['DUMMY_TAG']]
mags.append(dummy['DUMMY_MAG_ID'])
tag_indexed = {tag:idx for idx, tag in enumerate(tags)}
mag_indexed = {mag:idx for idx, mag in enumerate(mags)}
return doc_indexed, user_idx, author_indexed, tag_indexed, mag_indexed, etc_user_num
def data_to_index(doc_indexed, user_idx, author_indexed, tag_indexed, mag_indexed, etc_user_num, target_file='train'):
writefile = open('../data/{}_data.txt'.format(target_file), 'w')
writefile.write('')
writefile = open('../data/{}_data.txt'.format(target_file), 'a')
with open('../data/{}_data_raw.txt'.format(target_file), 'r') as f:
for line in f.readlines():
line = json.loads(line.strip())
if line['user'] in user_idx:
line['user'] = user_idx[line['user']]
else:
line['user'] = etc_user_num
line['doc'] = doc_indexed[line['doc']]
line['author'] = author_indexed[line['author']]
line['tagA'] = tag_indexed[line['tagA']]
line['tagB'] = tag_indexed[line['tagB']]
line['tagC'] = tag_indexed[line['tagC']]
line['magazine_id'] = mag_indexed[line['magazine_id']]
writefile.write(json.dumps(line))
writefile.write('\n')
def generate_data(user_id, doc_id, valid_doc, user_read_doc, user_read_num, popularity, \
meta, age, following, dataframe, dummy, state='train'):
"""
author: from doc_id2author_id <- doc_id
tags : meta -> keyword_list
magazine_id: meta -> magazine_id
age: age <- doc_id
is_followed: check_is_followed <- following, user_id, author_id
pop: popularity
y: get_y_views <- user_id, doc_id, user_read_doc
is_followed, y, views changes for test data
"""
author_id = doc_id2author_id(doc_id)
dataframe['user'] = user_id
dataframe['doc'] = doc_id
dataframe['author'] = author_id
# add dummy data
tag_list = ['tagA', 'tagB', 'tagC']
if doc_id in meta:
for i, keyword in enumerate(meta[doc_id]['keyword_list']):
if i >= 3: break
dataframe[tag_list[i]] = keyword
dataframe['magazine_id'] = meta[doc_id]['mag_id']
dataframe['age'] = age[doc_id]
else:
dataframe['tagA'] = dummy['DUMMY_TAG']
dataframe['tagB'] = dummy['DUMMY_TAG']
dataframe['tagC'] = dummy['DUMMY_TAG']
dataframe['magazine_id'] = dummy['DUMMY_MAG_ID']
dataframe['age'] = dummy['DUMMY_DATE']
dataframe['popularity'] = popularity[doc_id]
if state == 'train':
dataframe['y'] = get_y_views(user_id, doc_id, user_read_doc)
dataframe['views'] = user_read_num[user_id]
dataframe['is_followed'] = check_is_followed(user_id, author_id, following)
else:
dataframe['y'] = 0
dataframe['views'] = 0
dataframe['is_followed'] = 0
return dataframe
def generate_train_data(valid_doc, user_read_doc1, user_read_doc2,\
user_read, popularity, meta, age, following, dataframe, dummy):
"""create data to train"""
writefile = open('../data/train_data_raw.txt', 'w')
writefile.write('')
writefile = open('../data/train_data_raw.txt', 'a')
for user_id in user_read_doc1:
for doc_id in list(set(user_read_doc1[user_id])):
d = generate_data(user_id, doc_id, valid_doc, user_read_doc1, \
user_read, popularity, meta, age, following, dataframe, dummy, \
state='train')
writefile.write(json.dumps(d))
writefile.write('\n')
for user_id in user_read_doc2:
for doc_id in list(set(user_read_doc2[user_id])):
d = generate_data(user_id, doc_id, valid_doc, user_read_doc2, \
user_read, popularity, meta, age, following, dataframe, dummy, \
state='train')
writefile.write(json.dumps(d))
writefile.write('\n')
return
def generate_test_data(valid_doc, user_read_doc1, user_read_doc2,\
user_read, popularity, meta, age, following, dataframe):
test_file = open('../data/predict/dev.users', 'r')
writefile = open('../data/test_data_raw.txt', 'w')
writefile.write('')
writefile = open('../data/test_data_raw.txt', 'a')
tmp_read_doc = {key:user_read_doc1[key] for key in user_read_doc1}
tmp_read_doc.update(user_read_doc2)
for line in test_file.readlines():
user_id = line.strip()
for doc_id in valid_doc:
if user_id in user_read_doc1:
if doc_id in user_read_doc1[doc_id]: continue
if user_id in user_read_doc2:
if doc_id in user_read_doc2[doc_id]: continue
d = generate_data(user_id, doc_id, valid_doc, tmp_read_doc, \
user_read, popularity, meta, age, following, dataframe, \
state='test')
writefile.write(json.dumps(d))
writefile.write('\n')
return
def load_data(target='train', data_num=-1):
df = pd.DataFrame()
start_time = time.time()
with open('../data/{}_data.txt'.format(target), 'r') as f:
for i, line in enumerate(f.readlines()):
if data_num != -1 and i % data_num == 0 and i != 0:
print("Progress: {}".format(str(i)))
return df
if i % 10000 == 0:
end_time = time.time()
print("Progress: {} time: {}s".format(str(i), str(end_time-start_time)))
start_time = time.time()
line = json.loads(line.strip())
dict = {i: line}
tmp_df = pd.DataFrame(dict).transpose()
df = pd.concat([df, tmp_df], axis=0)
return df
def save_field_vocab_size(doc_indexed, user_idx, author_indexed, tag_indexed, \
mag_indexed, param, dataframe):
# user, doc, author, tagA, tagB, tagC, is_followed, views, magazine_id,
# popularity, age
dataframe['user'] = len(user_idx)
dataframe['doc'] = len(doc_indexed)
dataframe['author'] = len(author_indexed)
dataframe['tagA'] = len(tag_indexed)
dataframe['tagB'] = len(tag_indexed)
dataframe['tagC'] = len(tag_indexed)
dataframe['magazine_id'] = len(mag_indexed)
dataframe['is_followed'] = 2
dataframe['views'] = param['user_read_cat_num']
dataframe['popularity'] = param['pop_cat_num']
dataframe['age'] = param['date_cat_num'] + 1
with open('../data/field_vocab_size.txt', 'w') as f:
f.write(json.dumps(dataframe))
return
def get_field_vocab_size():
with open('../data/field_vocab_size.txt', 'r') as f:
data = json.loads(f.readlines()[0].strip())
return data
def make_data(state='train'):
param = {
'user_thresh': 300,
'doc_thresh': 5000,
'pop_cat_num': 100,
'date_cat_num': 100,
'user_read_cat_num': 20,
'etc_user_num': 200
}
dummy = {
'DUMMY_TAG' : 'JAEICK',
'DUMMY_DATE' : param['date_cat_num']+1,
'DUMMY_MAG_ID' : -1
}
dataframe = {'user': '', 'doc':'', 'author': '', 'tagA': '', 'tagB': '', 'tagC':'',\
'is_followed': 0, 'views': 0, 'magazine_id': '', 'popularity': 0, 'age': 0, 'y': 0}
valid_doc, user_read_doc1, user_read_num1, user_read_doc2, user_read_num2,\
popularity, meta, age, following, user_read = prepare_data(param)
print("Indexing...")
doc_indexed, user_idx, author_indexed, tag_indexed, mag_indexed, etc_user_num = \
get_index_data(valid_doc, user_read_num1, user_read_num2, meta, dummy)
if state == 'train':
print("Generating train data...")
generate_train_data(valid_doc, user_read_doc1, user_read_doc2,\
user_read, popularity, meta, age, following, dataframe, dummy)
print("Converting string to index...")
data_to_index(doc_indexed, user_idx, author_indexed, tag_indexed, \
mag_indexed, etc_user_num, target_file=state)
print("Saving field vocab size...")
save_field_vocab_size(doc_indexed, user_idx, author_indexed, tag_indexed, \
mag_indexed, param, dataframe)
else:
print("Generating test data...")
generate_test_data(valid_doc, user_read_doc1, user_read_doc2,\
user_read, popularity, meta, age, following, dataframe, dummy)
print("Converting string to index...")
data_to_index(doc_indexed, user_idx, author_indexed, tag_indexed, \
mag_indexed, etc_user_num, target_file=state)
if __name__ == "__main__":
make_data('train')
|
import subprocess
import json
import argparse
import sys
import os
import pathlib
from dotenv import load_dotenv
load_dotenv()
username = os.getenv("USERNAME")
currpath = pathlib.Path(__file__).parent.absolute()
parser = argparse.ArgumentParser(description='Take name tag and return public ip with ssh login.')
parser.add_argument('nametag',
metavar='nametag',
default=None,
action='store',
help='input valid aws name tag into the system')
args = parser.parse_args()
nametag = sys.argv[1]
#nametag = '0005-nagios-kpex-asia'
ip = ''
with open(os.path.join(currpath, 'addresses.txt')) as json_file:
data = json.load(json_file)
for i in range(len(data['Addresses'])):
try:
if data['Addresses'][i]['Tags'][0]['Value'] == nametag and \
data['Addresses'][i]['Tags'][0]['Key'] == 'Name':
ip = data['Addresses'][i]['PublicIp']
break
except KeyError:
pass
if ip:
awssh_path = os.path.join(currpath, 'awssh.sh')
script = "{0} {1} {2}".format(awssh_path,username,ip)
print('ssh {0}@{1}'.format(username,ip))
subprocess.call(script, shell=True)
else:
print('Host not found')
|
from lxml import html
from bs4 import BeautifulSoup
import requests
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
import time
import datetime
import Util
def td(feature):
try:
return soup.find(text=feature).parent.parent.find('td').contents[0]
except Exception as e:
return ''
def tdSpan(feature):
try:
return soup.find(text=feature).parent.parent.find('td').find('span').contents[0]
except Exception as e:
return ''
def tdA(feature):
try:
return soup.find(text=feature).parent.parent.find('td').find('a').contents[0]
except Exception as e:
return ''
def tdSeasons(feature):
try:
return soup.findAll(text=feature)[1].parent.parent.find('td').find('span').contents[0]
except Exception as e:
return ''
def tdProgress(feature):
try:
return soup.find(id='playerPerformance').find(text=feature).parent.parent.find(class_='pct-data').contents[0]
except Exception as e:
return ''
def tdStats(feature):
try:
if soup.find(id='playerStatsTab').find(text=feature).parent.parent.findAll('th')[0].find('a'):
return soup.find(id='playerStatsTab').find(text=feature).parent.parent.findAll('th')[0].find('a').contents[0]
else:
if soup.find(id='playerStatsTab').find(text=feature).parent.parent.parent.name == 'thead':
return soup.find(id='playerStatsTab').findAll(text=feature)[1].parent.parent.findAll('th')[0].contents[0]
else:
return soup.find(id='playerStatsTab').find(text=feature).parent.parent.findAll('th')[0].contents[0]
except Exception as e:
return ''
def getGeneralData(players):
#settings
url = 'https://www.ultimatetennisstatistics.com/playerProfile?playerId='
driver = webdriver.Chrome(ChromeDriverManager().install())
general_features = ['player_id', 'player_name', 'age', 'country', 'height', 'weight', 'favorite_hand', 'backhand', 'turned_Pro',
'seasons', 'is_active', 'prize_money', 'titles', 'grand_slams', 'masters', 'finals', 'current_rank', 'best_rank']
data = []
data.append(general_features)
for index, row in players.iterrows():
print('Getting general data for '+row.player_name+' number: '+str(index))
player_id = row.player_id
driver.get(url+str(player_id))
global soup
soup = BeautifulSoup(driver.page_source, 'lxml')
data_player = [player_id, row.player_name, td('Age'), tdSpan('Country'), td('Weight'), td('Height'), td('Plays'),
td('Backhand'), td('Turned Pro'), tdSeasons('Seasons'), td('Active'), td('Prize Money'),
tdA('Titles'), tdA('Grand Slams'), tdA('Masters'), tdA('Tour Finals'), td('Current Rank'), td('Best Rank')]
data.append(data_player)
filename = 'players_general_' + str(datetime.datetime.today().__format__('%d-%m-%Y'))
Util.saveDFtoCSV(data, filename)
def getPerformanceData(players):
#settings
url = 'https://www.ultimatetennisstatistics.com/playerProfile?playerId='
driver = webdriver.Chrome(ChromeDriverManager().install())
#performance header
driver.get(url+str(players.iloc[0].player_id))
button = driver.find_element_by_id("performancePill")
driver.execute_script("arguments[0].click()", button)
time.sleep(1)
sp = BeautifulSoup(driver.page_source, 'lxml')
tds = sp.find(id='playerPerformance').findAll('td')
header = []
header.append('player_id')
for td in tds:
header.append(td.contents[0])
#TODO tratar nomes do header
data = []
data.append(header)
for index, row in players.iterrows():
repeat = False
counter = 0
while(not(repeat)):
print('Getting performance data for '+row.player_name+' number: '+str(index))
player_id = row.player_id
driver.get(url+str(player_id))
button = driver.find_element_by_id("performancePill")
driver.execute_script("arguments[0].click()", button)
time.sleep(1)
global soup
soup = BeautifulSoup(driver.page_source, 'lxml')
rowToAppend = []
rowToAppend.append(player_id)
for attr in header[1:]:
rowToAppend.append(tdProgress(attr))
if (rowToAppend[1] != ''):
repeat = True
data.append(rowToAppend)
if (counter >= 5):
repeat = True
counter += 1
filename = 'players_performance_' + str(datetime.datetime.today().__format__('%d-%m-%Y'))
Util.saveDFtoCSV(data, filename)
def getStatisticsData(players):
#settings
url = 'https://www.ultimatetennisstatistics.com/playerProfile?playerId='
driver = webdriver.Chrome(ChromeDriverManager().install())
# statistics header
driver.get(url+str(players.iloc[0].player_id))
button = driver.find_element_by_id("statisticsPill")
driver.execute_script("arguments[0].click()", button)
time.sleep(1)
sp = BeautifulSoup(driver.page_source, 'lxml')
tds = sp.find(id='playerStatsTab').find(class_='tab-content').findAll('td')
header = []
header.append('player_id')
for td in tds:
header.append(td.contents[0])
#TODO tratar nomes do header
data = []
data.append(header)
for index, row in players.iterrows():
repeat = False
counter = 0
while(not(repeat)):
print('Getting stats data for '+row.player_name+' number: '+str(index))
player_id = row.player_id
driver.get(url+str(player_id))
button = driver.find_element_by_id("statisticsPill")
driver.execute_script("arguments[0].click()", button)
time.sleep(1)
global soup
soup = BeautifulSoup(driver.page_source, 'lxml')
rowToAppend = []
rowToAppend.append(player_id)
for attr in header[1:]:
rowToAppend.append(str(tdStats(attr)))
if (rowToAppend[1] != ''):
repeat = True
data.append(rowToAppend)
if (counter >= 5):
repeat = True
counter += 1
filename = 'players_stats_' + str(datetime.datetime.today().__format__('%d-%m-%Y'))
Util.saveDFtoCSV(data, filename)
|
import unittest
from katas.kyu_7.sum_up_the_random_string import sum_from_string
class SumFromStringTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(sum_from_string(
'In 2015, I want to know how much does iPhone 6+ cost?'), 2021)
def test_equals_2(self):
self.assertEqual(sum_from_string('1+1=2'), 4)
def test_equals_3(self):
self.assertEqual(sum_from_string('e=mc^2'), 2)
|
from aiohttp import web
from ClientsFactory import get_vk_api_client, get_loop, get_aiohttp_client
from Message import Message
CALLBACK = "/callback"
routes = web.RouteTableDef()
vk = get_vk_api_client()
event_loop = get_loop()
client = get_aiohttp_client()
@routes.post('/callback')
async def hello(request):
data = await request.json()
if data["type"] == "confirmation":
return web.Response(text="0d238b23")
else:
await Message(vk, event_loop, (await request.json())["object"]["message"]).answer()
return web.Response(text="ok")
@routes.get("/")
async def index(request):
return web.Response(text="ok")
app = web.Application()
app.add_routes(routes)
web.run_app(app, port=5050, host="0.0.0.0", loop=event_loop)
|
from django.db import models
from django.utils.translation import gettext_lazy as _
from .choices import LoyalityPrograms
class Profile(models.Model):
GENDERS = [("M", "Male"), ("F", "Female"), ("U", "Undefined")]
class YearInSchool(models.TextChoices):
FRESHMAN = "FR", _("Freshman")
SOPHOMORE = "SO", _("Sophomore")
JUNIOR = "JR", _("Junior")
SENIOR = "SR", _("Senior")
GRADUATE = "GR", _("Graduate")
name = models.CharField(max_length=30)
gender = models.CharField(max_length=1, choices=GENDERS, default="U")
age = models.IntegerField(default=0)
loyality = models.CharField(max_length=8, choices=LoyalityPrograms.choices(), default=LoyalityPrograms.SILVER_CARD)
year_in_school = models.CharField(
max_length=2,
choices=YearInSchool.choices,
default=YearInSchool.FRESHMAN,
)
|
from model.vehicle_handling.vehicle_movement_handler import vehicle_movement_handler
import global_variables as gv
from model.vehicle_handling import off_screen_handling
from model.direction import Dir
class Vehicle:
def __init__(self, index, movement_pattern, x, y, w, l, acceleration, max_speed, handling, max_handling,
health, input_x_vel=0, input_y_vel=0, input_direction=Dir.NONE, reaction_x_vel=0, reaction_y_vel=0,
reaction_direction=Dir.NONE, cur_x_vel=0, cur_y_vel=0, cur_direction=Dir.NONE,
friction_marker=gv.FRICTION_MARKER):
self.__movement_pattern = movement_pattern
self.__index = index
self.__x = x
self.__y = y
self.__w = w
self.__l = l
self.__max_speed = max_speed
self.__cur_x_vel = cur_x_vel # current x velocity
self.__cur_y_vel = cur_y_vel
self.__cur_direction = cur_direction
self.__handling = handling
self.__max_handling = max_handling
self.__input_x_vel = input_x_vel
self.__input_y_vel = input_y_vel
self.__input_direction = input_direction
self.__acceleration = acceleration
self.__health = health
self.__reaction_x_vel = reaction_x_vel
self.__reaction_y_vel = reaction_y_vel
self.__reaction_direction = reaction_direction
self.__acceleration_count = 0
self.__handling_count = 0
self.__friction_count = 0
self.__friction_marker = friction_marker
""" methods """
def update_location(self, other_vehicles):
vehicle_movement_handler(self, other_vehicles)
def is_alive(self):
if self.health > 0:
return True
return False
def is_next_to_road(self):
if abs(self.cur_x_vel) >= self.max_handling:
if self.x + self.cur_x_vel < gv.ROAD_X_PLACEMENT:
return Dir.WEST
elif self.x + self.w + self.cur_x_vel > gv.ROAD_X_PLACEMENT + gv.ROAD_W:
return Dir.EAST
else:
if self.x + self.cur_x_vel - self.handling < gv.ROAD_X_PLACEMENT:
return Dir.WEST
elif self.x + self.w + self.cur_x_vel + self.handling > gv.ROAD_X_PLACEMENT + gv.ROAD_W:
return Dir.EAST
return Dir.NONE
""" getters """
@property
def index(self):
return self.__index
@property
def movement_pattern(self):
return self.__movement_pattern
@property
def x(self):
return self.__x
@property
def y(self):
return self.__y
@property
def w(self):
return self.__w
@property
def l(self):
return self.__l
@property
def cur_x_vel(self):
return self.__cur_x_vel
@property
def cur_y_vel(self):
return self.__cur_y_vel
@property
def acceleration(self):
return self.__acceleration
@property
def max_speed(self):
return self.__max_speed
@property
def handling(self):
return self.__handling
@property
def max_handling(self):
return self.__max_handling
@property
def health(self):
return self.__health
@property
def cur_direction(self):
return self.__cur_direction
@property
def reaction_x_vel(self):
return self.__reaction_x_vel
@property
def reaction_y_vel(self):
return self.__reaction_y_vel
@property
def input_direction(self):
return self.__input_direction
@property
def input_x_vel(self):
return self.__input_x_vel
@property
def input_y_vel(self):
return self.__input_y_vel
@property
def acceleration_count(self):
return self.__acceleration_count
@property
def handling_count(self):
return self.__handling_count
@property
def friction_count(self):
return self.__friction_count
@property
def friction_marker(self):
return self.__friction_marker
""" setters """
@index.setter
def index(self, index):
self.__index = index
@movement_pattern.setter
def movement_pattern(self, movement_pattern):
self.__movement_pattern = movement_pattern
@x.setter
def x(self, x):
self.__x = x
@y.setter
def y(self, y):
self.__y = y
@cur_x_vel.setter
def cur_x_vel(self, cur_x_vel):
self.__cur_x_vel = cur_x_vel
@cur_y_vel.setter
def cur_y_vel(self, cur_y_vel):
self.__cur_y_vel = cur_y_vel
@max_speed.setter
def max_speed(self, max_speed):
self.__max_speed = max_speed
@max_handling.setter
def max_handling(self, max_handling):
self.__max_handling = max_handling
@health.setter
def health(self, health):
self.__health = health
if self.__health < 0:
self.__health = 0
@cur_direction.setter
def cur_direction(self, cur_direction):
self.__cur_direction = cur_direction
def x_input_against_x_reaction(self):
if self.reaction_x_vel < 0 and self.input_x_vel < 0:
self.__reaction_x_vel = self.reaction_x_vel - self.input_x_vel
if self.__reaction_x_vel > 0:
self.__reaction_x_vel = 0
elif self.reaction_x_vel > 0 and self.input_x_vel > 0:
self.__reaction_x_vel = self.reaction_x_vel - self.input_x_vel
if self.__reaction_y_vel < 0:
self.__reaction_y_vel = 0
if self.reaction_x_vel > 0 and self.input_x_vel < 0:
self.__reaction_x_vel = self.reaction_x_vel + self.input_x_vel
if self.__reaction_y_vel < 0:
self.__reaction_y_vel = 0
elif self.reaction_x_vel < 0 and self.input_x_vel > 0:
self.__reaction_x_vel = self.reaction_x_vel + self.input_x_vel
if self.__reaction_y_vel > 0:
self.__reaction_y_vel = 0
@reaction_x_vel.setter
def reaction_x_vel(self, reaction_x_vel):
self.__reaction_x_vel = reaction_x_vel
def y_input_against_y_reaction(self):
if self.reaction_y_vel > 0 and self.input_y_vel < 0:
self.__reaction_y_vel = self.reaction_y_vel + self.input_y_vel
if self.__reaction_y_vel < 0:
self.__reaction_y_vel = 0
elif self.reaction_y_vel < 0 and self.input_y_vel > 0:
self.__reaction_y_vel = self.reaction_y_vel + self.input_y_vel
if self.__reaction_y_vel > 0:
self.__reaction_y_vel = 0
if self.reaction_y_vel < 0 and self.input_y_vel < 0:
self.__reaction_y_vel = self.reaction_y_vel - self.input_y_vel
if self.__reaction_y_vel > 0:
self.__reaction_y_vel = 0
elif self.reaction_y_vel > 0 and self.input_y_vel > 0:
self.__reaction_y_vel = self.reaction_y_vel - self.input_y_vel
if self.__reaction_y_vel < 0:
self.__reaction_y_vel = 0
@reaction_y_vel.setter
def reaction_y_vel(self, reaction_y_vel):
self.__reaction_y_vel = reaction_y_vel
@input_direction.setter
def input_direction(self, input_direction):
self.__input_direction = input_direction
@input_x_vel.setter
def input_x_vel(self, input_x_vel):
self.__input_x_vel = input_x_vel
def acceleration_on_input_x_vel(self, acceleration):
if self.__input_x_vel + acceleration > self.max_handling:
return
elif self.__input_x_vel + acceleration < -1 * self.max_handling:
return
else:
self.__input_x_vel += acceleration
def friction_on_input_x_vel(self, input_x_vel):
self.__input_x_vel = input_x_vel
@input_y_vel.setter
def input_y_vel(self, input_y_vel):
self.__input_y_vel = input_y_vel
def acceleration_on_input_y_vel(self, acceleration):
if self.__input_y_vel + acceleration > int(round(self.max_speed / 2)):
return
elif self.__input_y_vel + acceleration < -1 * self.max_speed:
return
else:
self.__input_y_vel += acceleration
def off_road_on_input_y_vel(self, off_road_y):
if self.__input_y_vel + off_road_y > self.max_speed:
return
elif self.__input_y_vel + off_road_y < -1 * self.max_speed:
return
else:
self.__input_y_vel += off_road_y
def friction_on_input_y_vel(self, input_y_vel):
self.__input_y_vel = input_y_vel
@acceleration_count.setter
def acceleration_count(self, acceleration_count):
self.__acceleration_count = acceleration_count
@handling_count.setter
def handling_count(self, handling_count):
self.__handling_count = handling_count
@friction_count.setter
def friction_count(self, friction_count):
self.__friction_count = friction_count
@friction_marker.setter
def friction_marker(self, friction_marker):
self.__friction_marker = friction_marker
class Player(Vehicle):
def __init__(self, index, movement_pattern ="player", x=400, y=400, w=gv.PLAYER_WIDTH, l=gv.PLAYER_LENGTH,
acceleration=gv.PLAYER_ACCELERATION, max_speed=gv.PLAYER_MAX_SPEED, handling=gv.PLAYER_HANDLING,
max_handling=gv.PLAYER_MAX_HANDLING, health=gv.PLAYER_STARTING_HEALTH):
super().__init__(index, movement_pattern, x, y, w, l, acceleration, max_speed, handling, max_handling, health)
self.score = 0
def is_below_screen(self):
if off_screen_handling.check_if_below_screen(self):
return True
return False
""" getters """
@property
def score(self):
return self.__score
""" setters """
@score.setter
def score(self, score):
self.__score = score
class Enemy(Vehicle):
def __init__(self, index, movement_pattern="random", x=None, y=None, w=gv.ENEMY_WIDTH, l=gv.ENEMY_LENGTH,
acceleration=gv.ENEMY_ACCELERATION, max_speed=gv.ENEMY_MAX_SPEED, handling=gv.ENEMY_HANDLING,
max_handling=gv.ENEMY_MAX_HANDLING, health=gv.ENEMY_STARTING_HEALTH, input_x_vel=0, input_y_vel=0,
input_direction=Dir.NONE):
if x is not None and y is None:
super().__init__(index, movement_pattern, x, -l-1, w, l, acceleration, max_speed, handling, max_handling,
health, input_x_vel, input_y_vel, input_direction)
elif x is not None or y is not None:
super().__init__(index, movement_pattern, x, y, w, l, acceleration, max_speed, handling, max_handling,
health, input_x_vel, input_y_vel, input_direction)
else:
super().__init__(index, movement_pattern, 30, 30, w, l, acceleration, max_speed, handling, max_handling,
health, input_x_vel, input_y_vel, input_direction)
""" methods """
def check_to_despawn(self, vehicles):
if off_screen_handling.check_if_below_screen(self):
off_screen_handling.despawn(self, vehicles)
|
"""This is a model"""
import psycopg2
def database(app):
"""This is a function"""
con = psycopg2.connect(
"dbname='stack_over_flow'\
user='dennis' password='12345'\
host='localhost'")
cur = con.cursor()
# create a table
cur.execute(
"CREATE TABLE IF NOT EXISTS\
users(id serial PRIMARY KEY,\
name varchar, email varchar UNIQUE,\
password varchar);")
cur.execute("""CREATE TABLE IF NOT EXISTS questions(
id serial PRIMARY KEY, date_posted varchar, questions varchar, user_id INT ,
FOREIGN KEY (user_id) REFERENCES users(id)
);""")
cur.execute("""CREATE TABLE IF NOT EXISTS answers(
id serial PRIMARY KEY, date_posted varchar, answers varchar, user_id INT ,
FOREIGN KEY (user_id) REFERENCES users(id)
);""")
print("Database has been connected")
con.commit()
|
# Generated by Django 2.2.6 on 2020-07-30 07:17
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Articles',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=50)),
('content', models.TextField()),
('created_at', models.DateTimeField()),
('updated_at', models.DateTimeField()),
],
options={
'db_table': 'articles',
'managed': False,
},
),
migrations.CreateModel(
name='Pros',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=50)),
('userid', models.CharField(max_length=50)),
('password', models.CharField(max_length=500)),
],
options={
'db_table': 'pros',
'managed': False,
},
),
migrations.CreateModel(
name='ProSurveys',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('text', models.CharField(max_length=200)),
('checked', models.IntegerField()),
('created_at', models.DateTimeField()),
('updated_at', models.DateTimeField()),
],
options={
'db_table': 'pro_surveys',
'managed': False,
},
),
migrations.CreateModel(
name='Regions',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=50)),
],
options={
'db_table': 'regions',
'managed': False,
},
),
migrations.CreateModel(
name='Students',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=50)),
('class_field', models.BigIntegerField(db_column='class')),
('profile_img', models.TextField(blank=True, null=True)),
],
options={
'db_table': 'students',
'managed': False,
},
),
migrations.CreateModel(
name='Surveys',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('body_temperature', models.FloatField()),
('danger', models.IntegerField()),
('body_check', models.IntegerField()),
('created_at', models.DateTimeField()),
('updated_at', models.DateTimeField()),
],
options={
'db_table': 'surveys',
'managed': False,
},
),
]
|
######-11-ud120:lesson12-4-######
"""
fraction = 0.
if (poi_messages !="NaN") or (all_messages !="NaN") :
fraction = float(poi_messages)/all_messages
else :
fraction = 0.
return fraction
"""
######-10-ud120:lesson12-3-######
"""
if from_emails:
ctr = 0
while not from_poi and ctr < len(from_emails):
if from_emails[ctr] in poi_email_list:
from_poi = True
ctr += 1
"""
######-9-ud120:lesson11-19-#######
"""
data = []
for name, from_person in [("sara", 1), ("chris", 2)]:
# print name,from_person
if name == "sara":
data.append(from_person)
elif name == "chris":
data.append(from_person)
print data
"""
"""
a = [1,2,3]
b = [9,6,1]
for i,j in zip(a,b):
print i+j
"""
######-8-ud120:lesson11-19-#######
"""
from nltk.stem.snowball import SnowballStemmer
stemmer = SnowballStemmer("english")
Str = " I am a good response responsitivity person"
print Str
"""
"""
Str = Str.replace("good","")
print Str
Str = ' '.join(Str.split())
print Str
"""
"""
Str = Str.replace("good ","")
print Str
Str = Str.replace("am ","").replace("a ","")
print Str
"""
######-7-ud120:lesson11-18-#######
"""
from nltk.stem.snowball import SnowballStemmer
stemmer = SnowballStemmer("english")
Str = " I am a good response responsitivity person"
print Str
# for x in Str.split( ):
# print x
singles = [stemmer.stem(word) for word in Str.split( )]
print singles
st = ' '.join(singles)
print st
"""
######-6-ud120:lesson10-#######
"""
from nltk.corpus import stopwords
sw = stopwords.words("english")
print len(sw)
# import nltk
# nltk.download('stopwords')
"""
|
def user_prompt():
ans = input('Enter a number: ')
try:
float(ans)
except:
import sys
sys.exit('NaN')
return 'Your number is {}'.format(ans)
|
# -*- coding: utf-8 -*-
import uuid
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# Create your models here.
def max_words(words=400):
def validator(value):
wslen = len(value.split())
if wslen > words:
raise ValidationError('Presiahnutý limit slov: %(current_words)s/%(max_words)s',
params={'current_words': wslen, 'max_words': words},
code='max_words')
return validator
@python_2_unicode_compatible
class Registracia(models.Model):
meno = models.CharField(max_length=100 )
priezvisko = models.CharField(max_length=100)
email = models.EmailField(max_length=100)
telefon = models.CharField(max_length=15)
QUESTION_SOFT_LIMIT_WORDS = 200
QUESTION_HARD_LIMIT_WORDS = 400
uspech = models.TextField(validators=[max_words(QUESTION_HARD_LIMIT_WORDS)])
smerovanie = models.TextField(validators=[max_words(QUESTION_HARD_LIMIT_WORDS)])
okolie = models.TextField(validators=[max_words(QUESTION_HARD_LIMIT_WORDS)])
ocakavanie = models.TextField(validators=[max_words(QUESTION_HARD_LIMIT_WORDS)])
list = models.FileField(null=True, upload_to='uploads/list/')
token = models.UUIDField(default=uuid.uuid4, editable=False)
cv = models.FileField(upload_to='uploads/cv/', blank=True)
ref = models.CharField(max_length=500)
skola = models.CharField(max_length=500)
STUPNE_STUDIA = (('1', 'Stredná škola',), ('2', 'Maturitný ročník',), ('3', 'Bakalárske štúdium',), ('4', 'Magisterské štúdium',), ('5', 'Absolvent (PhD.)',))
stupen_studia = models.CharField(max_length=1, choices=STUPNE_STUDIA, null=True)
udaje = models.BooleanField()
novinky = models.BooleanField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.meno + ' ' + self.priezvisko
|
# -*- coding: utf-8 -*-
f = open("/Users/TomonotiHayshi/GitHub/My Research/Rakuten-fake-/all.csv","w")
for i in range(51):
f1 = open("/Users/TomonotiHayshi/GitHub/My Research/Rakuten-fake-/"+str(i)+".csv")
i = 0
for line in f1:
if i > 1:
f.write(line)
i += 1
f1.close()
f.close()
|
import argparse
import os
import subprocess
from copy import copy
import yaml
IGNORE_KEYS = ["repository", "code_path", "python_bin", "experiment_group"]
"""
Script that takes an experiment definition file and creates a new folder for it.
It then downloads the parser code at the given commit hash and passes the parameters
to the training scripts. Useful when running a lot of experiments on a compute cluster.
"""
def get_args():
parser = argparse.ArgumentParser(
description="Helper script to run experiments in a repeatable way.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument("experiment", type=str, help="Path to the experiment definition.")
parser.add_argument("--dry_run", action="store_true")
return parser.parse_args()
def dict_generator(indict):
"""
Yields the leaves of a dict (keys + values).
"""
if isinstance(indict, dict):
for key, value in indict.items():
if isinstance(value, dict):
for d in dict_generator(value):
yield d
else:
yield key, value
else:
yield indict
if __name__ == "__main__":
args = get_args()
experiment_definition = yaml.safe_load(open(args.experiment))
experiment = experiment_definition["experiment"]
# Fill in the templates in the path values
experiment_copy = copy(experiment)
for k, v in experiment.items():
if not isinstance(v, str):
continue
experiment[k] = experiment[k].format(**experiment_copy)
# Make sure the paths are empty and create the directories
if v.endswith("_path"):
if os.path.exists(v):
raise Exception(f"Folder already exists: {v}.")
if args.dry_run:
print(f"Would create folder: {v}")
else:
os.makedirs(v, exist_ok=True)
elif v.endswith("_file"):
if os.path.exists(v):
raise Exception(f"File already exists: {v}.")
if args.dry_run:
print(f"Would create folder: {os.path.dirname(v)}")
else:
os.makedirs(os.path.dirname(v), exist_ok=True)
#
# # Clone the code base and switch to the required commit
cmd = (
f"git clone {experiment['repository']} {experiment['code_path']} &&"
f"cd {experiment['code_path']} &&"
f"git checkout --quiet {experiment['commit']}"
)
if args.dry_run:
print(f"Would run: {cmd}")
else:
subprocess.run(cmd, shell=True)
# Get all the arguments we want to pass to the trainer
training_args = {}
for k, v in dict_generator(experiment_definition):
if k not in IGNORE_KEYS:
training_args[k] = v
arguments = []
for option, value in training_args.items():
if isinstance(value, list):
value = " ".join([str(v) for v in value])
if isinstance(value, bool):
value = str(value).lower()
if value != "":
arguments.append(f"--{option} {value}")
arguments = " ".join(arguments)
# Run the experiment
cmd = (
f"PYTHONPATH=$PYTHONPATH:{experiment['code_path']} {experiment['python_bin']} "
f"parseridge/train.py {arguments}"
)
if args.dry_run:
print(f"Would run: {cmd}")
else:
subprocess.run(cmd, cwd=experiment["code_path"], shell=True)
|
import pygame
class MyFace(pygame.sprite.Sprite):
def __init__(self, image_file, location):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load(image_file)
self.rect = self.image.get_rect()
self.rect.left, self.rect.top = location
self._x = 1
self._y = 0
def move(self):
self.rect = self.rect.move(self._x * 100, self._y * 100)
self._next_x_y()
def _next_x_y(self):
if (self._x, self._y) == (1, 0):
self._x, self._y = 0, 1
elif (self._x, self._y) == (0, 1):
self._x, self._y = -1, 0
elif (self._x, self._y) == (-1, 0):
self._x, self._y = 0, -1
else:
self._x, self._y = 1, 0
if __name__ == '__main__':
pygame.init()
width = 640
height = 480
screen = pygame.display.set_mode([width, height])
screen.fill([255, 255, 255])
red_face = MyFace("red_face.jpg", [20, 20])
screen.blit(red_face.image, red_face.rect)
pygame.display.flip()
running = True
while running:
for e in pygame.event.get():
if e.type == pygame.QUIT:
running = False
pygame.time.delay(100)
screen.fill([255, 255, 255])
red_face.move()
screen.blit(red_face.image, red_face.rect)
pygame.display.flip()
pygame.quit()
|
from django.contrib import admin
from .models import Job, JobBox
admin.site.register(Job)
admin.site.register(JobBox)
# Register your models here.
|
import requests
import json
import time
import base64
import hmac
import hashlib
# hard stuff, check this:
# https://gist.github.com/jordanbaucke/5812039
class Bitfinex:
def __init__(self, conf):
self.conf = conf
self.apiKey = self.conf.bitfinex_api_key
self.secret = self.conf.bitfinex_secret
self.url = "https://api.bitfinex.com/v1"
def fetchData(self):
key = "balances"
try:
# json data object
data = {}
data["request"] = "/v1/{:s}".format(key)
data["nonce"] = str(time.time())
data["options"] = {}
dataj = json.dumps(data)
#encode with base64
datab = base64.b64encode(bytes(dataj), "utf-8")
# encrypt using secret key
datae = hmac.new(self.secret, datab, hashlib.sha384).hexdigest()
# create header
header = {}
header["X-BFX-APIKEY"] = self.apiKey
header["X-BFX-PAYLOAD"] = datab
header["X-BFX-SIGNATURE"] = datae
r = requests.get("{:s}/{:s}".format(self.url, key), data={}, headers=header)
except requests.exceptions.ConnectionError:
print "failed to resolve bitfinex.com"
try:
j = json.loads(r.text)
except Exception, e:
print "failed to fetch data from bitfinex.com"
print str(e)
return j
if __name__ == "__main__":
# ugly, but only for testing puporse
import sys, os
sys.path.append(os.path.dirname("../"))
from config import Config
bitfinex = Bitfinex(Config)
print "fetch wallet"
wallets = bitfinex.fetchData()
print wallets
|
# L1 = ['Hello', 'World', 18, 'Apple', None]
# L2 = [x.lower() for x in L1 if isinstance(x,str) ]
# print(L2)
#
#
# g = (x % 2 for x in range(10))
#
#
# def fac(n):
# if n == 1 or n == 0:
# return 1
# else:
# return n * fac(n - 1)
#
#
# def triangles():
# # res = [1]
# n = 0
# while True:
# res = [int(fac(n) / (fac(a) * fac(n - a))) for a in list(range(n + 1))]
# yield res
# n += 1
#
# n = 0
# results = []
# for t in triangles():
# results.append(t)
# n = n + 1
# if n == 10:
# break
#
# for t in results:
# print(t)
#
# if results == [
# [1],
# [1, 1],
# [1, 2, 1],
# [1, 3, 3, 1],
# [1, 4, 6, 4, 1],
# [1, 5, 10, 10, 5, 1],
# [1, 6, 15, 20, 15, 6, 1],
# [1, 7, 21, 35, 35, 21, 7, 1],
# [1, 8, 28, 56, 70, 56, 28, 8, 1],
# [1, 9, 36, 84, 126, 126, 84, 36, 9, 1]
# ]:
# print('测试通过!')
# else:
# print('测试失败!')
#
#
# def triangles():
# L = [1]
# while 1:
# yield L
# L = [0] + L + [0]
# L = [L[i] + L[i + 1] for i in range(len(L) - 1)]
names = 'gzx '
print('wo de mingzi {0}'.format(names))
print('wo de mingzi' + names)
print('%s' % names)
|
#coding:utf-8
#!/usr/bin/env python
class invite:
base = [str(x) for x in range(10)] + [ chr(x) for x in range(ord('A'),ord('A')+26)] + [ chr(x) for x in range(ord('a'),ord('a')+26)]
@staticmethod
def generateCode(accountid):
"""
生成邀请码
"""
code_base = 916132832 - accountid
text_code = invite.dec2ary62(code_base)
final_code = []
for i in range(len(text_code)):
c = ord(text_code[i])
c = c - i - 1
if c < ord('a') and c > ord('Z'):
c = c - 6
elif c < ord('A') and c > ord('9'):
c = c - 7
elif c < ord('0'):
c = ord('z') + 1 - (ord('0') - c)
final_code.append(chr(c))
return ''.join(final_code)
@staticmethod
def reverseCode(inviteCode):
"""
解释邀请码
"""
orgineCode = []
for i in range(len(inviteCode)):
c = ord(inviteCode[i])
c = c + (i + 1)
if c < ord('a') and c > ord('Z'):
c = c + 6
elif c < ord('A') and c > ord('9'):
c = c + 7
elif c > ord('z'):
c = (ord('0') - 1) + (c - ord('z'))
orgineCode.append(chr(c))
accountid = 916132832 - invite.ary622dec(orgineCode)
return accountid
@staticmethod
def dec2ary62(string_num):
"""
编码
"""
num = int(string_num)
mid = []
while True:
if num == 0:
break
num, rem = divmod(num, 62)
mid.append(invite.base[rem])
return ''.join([str(x) for x in mid[::-1]])
@staticmethod
def ary622dec(string_num):
"""
解码
"""
val = 0
for i in range(len(string_num)):
num = invite.base.index(string_num[i])
val = num * pow(62, len(string_num) - i - 1) + val
return val
|
import os
import json
from itertools import product
xdg_data_home = (os.environ.get('XDG_DATA_HOME') or
os.path.join(os.path.expanduser('~'), '.local', 'share'))
macht_data_dir = os.path.join(xdg_data_home, 'macht')
def grid_to_dict(grid):
tiles = []
tile_base = 0
for row_idx, col_idx in product(range(len(grid)), range(len(grid[0]))):
if grid[row_idx][col_idx]:
tile = grid[row_idx][col_idx]
tile_base = tile.base
tiles.append({'row': row_idx, 'column': col_idx,
'exponent': tile.exponent})
return {'rows': len(grid), 'cols': len(grid[0]), 'base': tile_base,
'tiles': tiles}
def write_to_file(score, grids, filename=None):
filename = filename or os.path.join(macht_data_dir, 'default_save.json')
try:
os.makedirs(os.path.dirname(filename))
except getattr(__builtins__, 'FileExistsError', OSError) as err:
if err.errno != 17: # py2: OSerror but not file exists
raise
contents = {'score': score,
'grids': [grid_to_dict(grid) for grid in grids]}
with open(filename, 'w') as save_file:
json.dump(contents, save_file, indent=2)
def load_from_file(filename=None):
filename = filename or os.path.join(macht_data_dir, 'default_save.json')
try:
with open(filename) as save_file:
return json.load(save_file)
except getattr(__builtins__, 'FileNotFoundError', IOError) as err:
if err.errno != 2: # py2: IOerror but not file not found
raise
return {}
|
import sys
import string
reu_path = "C:/Users/Daway Chou-Ren/Documents/REU/"
filename = open(reu_path + sys.argv[1], 'r')
output = open("C:/Users/Daway Chou-Ren/Documents/REU/federalistOutput/" + sys.argv[2], 'w')
# will be a dictionary with tuples as keys, integers as values
pairs = dict()
total_count = 0.0
for line in filename:
# strip line, break into words
table = string.maketrans( '', '', )
line = line.translate(table, ".,?;:\"!$")
line = line.strip().lower();
words = line.split(" ")
# skip this line if there are fewer than two words on it
if len(words) < 2:
continue
# iterate from 0 up to the length of the list minus one (so we don't
# inadvertently grab elements from beyond the length of the list)
for i in range(len(words) - 1):
# find the pair; convert to tuple so we can use it as a dictionary key
pair = tuple(words[i:i+2])
if pair in pairs:
pairs[pair] += 1
else:
pairs[pair] = 1
total_count += 1
# print out pairs
for key in sorted(pairs):
count = pairs[key]
if count > 1: # only print pairs that occur more than once
bigram = " ".join(key)
output.write("%s: %.3f\n" % (bigram, pairs[key] / total_count * 1000))
output.write(str(total_count))
|
#!/usr/bin/python3
import sys
from datetime import datetime
import timeit
import math
result01 = 0
result02 = 0
squarenumber = 0
hsquarenumber = 0
spiral = []
spiral2 = []
x = 0
y = 0
n = 0
calc_adjsum = 0
#old solution for part01
def part01():
global result01
global iinput
squarenumber = math.floor(math.sqrt(iinput))+1
squarenumber += (squarenumber+1) % 2
distances = []
distances.append(0)
for i in range(4):
distances.append(1)
distances.append(2)
for i in range(5, squarenumber+1, 2):
for l in range(4):
for j in range(i-2, math.floor(i/2), -1):
distances.append(j)
for k in range(math.floor(i/2), i):
distances.append(k)
result01 = distances[iinput]-1
def calculate_adjacent_sum():
global result02
global spiral2
global iinput
global x
global y
global squarenumber
adjacent_sum = 0
global calc_adjsum
if (y > 0):
adjacent_sum += spiral2[y-1][x]
if (x > 0):
adjacent_sum += spiral2[y-1][x-1]
if (x < squarenumber-1):
adjacent_sum += spiral2[y-1][x+1]
if (y < squarenumber-1):
adjacent_sum += spiral2[y+1][x]
if (x > 0):
adjacent_sum += spiral2[y+1][x-1]
if (x < squarenumber-1):
adjacent_sum += spiral2[y+1][x+1]
if (x > 0):
adjacent_sum += spiral2[y][x-1]
if (x < squarenumber-1):
adjacent_sum += spiral2[y][x+1]
if (adjacent_sum > iinput):
result02 = adjacent_sum
calc_adjsum = 0
return adjacent_sum
def draw_number():
global calc_adjsum
global n
#global spiral
global spiral2
global x
global y
#n = 1
#spiral[y][x] = n
#n += 1
if (calc_adjsum == 1):
spiral2[y][x] = calculate_adjacent_sum()
def part02():
global x
global y
global calc_adjsum
global spiral2
global squarenumber
squarenumber = math.floor(math.sqrt(iinput))+1
squarenumber += (squarenumber+1) % 2
hsquarenumber = math.floor(squarenumber/2)
x = hsquarenumber
y = hsquarenumber
calc_adjsum = 1
#spiral=[[0] * squarenumber for i in range(squarenumber)]
spiral2=[[0] * squarenumber for i in range(squarenumber)]
#draw_number()
spiral2[y][x] = 1
for i in range(3, squarenumber+1, 2):
x += 1
draw_number()
for j in range(i-2):
y -= 1
draw_number()
for k in range(i-1):
x -= 1
draw_number()
for l in range(i-1):
y += 1
draw_number()
for m in range(i-1):
x += 1
draw_number()
# for s in range(squarenumber):
# for t in range(squarenumber):
# if (spiral[s][t] == iinput):
# result = math.fabs(hsquarenumber - s) + math.fabs(hsquarenumber - t)
def bench(part=0, filename=''):
global iinput
global result01
global result02
iinput = 0
if filename != '':
with open(filename, 'r') as f:
iinput = int(f.read())
if part == 1:
duration01 = timeit.timeit("part01()", setup="from day03 import part01", number=1)
print(3, 1, result01, int(duration01 * 10 ** 6))
elif part == 2:
duration02 = timeit.timeit("part02()", setup="from day03 import part02", number=1)
print(3, 2, result02, int(duration02 * 10 ** 6))
# Main
if __name__ == '__main__':
with open(sys.argv[1], 'r') as f:
iinput = int(f.read())
duration01 = timeit.timeit("part01()", setup="from __main__ import part01", number=1)
print(3, 1, result01, int(duration01 * 10 ** 6))
duration02 = timeit.timeit("part02()", setup="from __main__ import part02", number=1)
print(3, 2, result02, int(duration02 * 10 ** 6))
|
import selectiveSearch as ss
import gtcfeat as gtc
import numpy as np
import cv2
# extract feature(default filter is hog)
def extractFeature(img):
return gtc.getFeat(img, algorithm='hog')
# Perform selective search and return candidates
def processing(cv_img, clf, nonFiltered):
# perform selective search
rect = ss.selective_search(cv_img, opt='f')
candidates = set()
cnt = 0
for r in rect:
# excluding same rectangle (with different segments)
x, y, w, h = r
tmp = (x, y, w, h)
if tmp in candidates:
continue
# distorted rects
if w < 30 or h < 30 or w > 70 or h > 70 or w / h > 1.7 or h / w > 1.7:
continue
cnt += 1
candidates.add(tmp)
ret = []
for x, y, w, h in candidates:
x1 = x
x2 = x + w - 1
y1 = y
y2 = y + h - 1
cropped = cv_img[y1:y2, x1:x2]
feat = extractFeature(cropped)
feat = [feat]
np.reshape(feat, (-1, 1))
pred = clf.predict(feat)
if pred == 1:
feat = []
cropped = cv2.resize(cropped, (50, 50))
cropped = cv2.cvtColor(cropped, cv2.COLOR_BGR2GRAY)
feat = np.resize(cropped, -1)
feat = [feat]
pred = nonFiltered.predict(feat)
ret.append([x1, x2, y1, y2, pred])
return ret
|
from MyList import LinkedList
from employee import Employee
def initial_display():
print ("***CS172 PAYROLL PROGRAM***")
print ("a. Add New Employee")
print ("b. Calculate Weekly Wages")
print ("c. Display Payroll")
print ("d. Update Employee Hourly Rate")
print ("e. Remove Employee from Payroll")
print ("f. Exit the Program")
initial_display()
user_input = input("Enter your choice\n")
main_object = LinkedList()
while user_input.lower() in ["a","b","c","d","e","f"]:
if user_input.lower() == "a":
id_input = input("Enter the New Employee ID:\n")
hour_input = int(input("Enter the Hourly Rate:\n"))
main_object.AddNewMethod(Employee(id_input,hour_input))
initial_display()
user_input = input("Enter your choice\n")
elif user_input.lower() == "b":
main_object.WeeklyWages()
initial_display()
user_input = input("Enter your choice\n")
elif user_input.lower() == "c":
main_object.DisplayPayroll()
initial_display()
user_input = input("Enter your choice\n")
elif user_input.lower() == "d":
id_input1 = input("Enter the Employee ID:\n")
hour_input1 = int(input("Enter the new hourly rate:\n"))
main_object.HourlyRate(id_input1,hour_input1)
initial_display()
user_input = input("Enter your choice\n")
elif user_input.lower() == "e":
id_remove = input("Enter the Employee ID to remove:\n")
main_object.EmpRemove(id_remove)
initial_display()
user_input = input("Enter your choice\n")
else:
break
|
#program for infinite loop
print 'Example of infinite loop'
n=input('Enter A Number:')
i=1
while (i<=n):
print i,
|
#
# run_cythonOU.py
# cythonOU
#
# Created by nicain on 5/18/10.
# Copyright (c) 2010 __MyCompanyName__. All rights reserved.
#
################################################################################
# Preamble:
################################################################################
# Import necessary packages:
from subprocess import call as call
# Compile cython extension cythonOU.pyx:
call('python setup.py build_ext --inplace', shell=True)
# Import cython extension:
from cythonOU import OUProcess
################################################################################
# Call the main function, as an example:
################################################################################
# Settings
mu = 1
var = 5
tau = .5
tMax = 1
dt = .01
# Call the function:
t, X = OUProcess(mu, var, tau, tMax, dt)
# Print results:
print t, X
|
import time
import sys
import socket
import base64
import json
from threading import Thread
SERVER_PORT=11117
HOST_IP='127.0.0.1'
SERVER_IP='172.30.1.10'
BUF_SIZE=8192
class Shell(Thread):
def __init__(self, server):
Thread.__init__(self)
self.server=server
def run(self):
while True:
print('select menu')
print('1) : get clients')
print('2) : shut down client')
print('3) : shut down all client')
ch=input()
if ch=='1':
client_list = self.server.getClientThreads()
print(str(len(client_list)) + ' clients connected')
for client in client_list:
print('client id : ' + str(client.id))
print('client name : ' + client.name)
elif ch=='2':
client_list = self.server.getClientThreads()
if len(client_list) == 0:
print('there is no connected client')
continue
print('1) : select client')
print('2) : back before menu')
ch=input()
if ch=='1':
print('input client id : ')
ch=input()
client_list = self.server.getClientThreads()
for client in client_list:
if client.id == int(ch):
client.setReady()
print('shutdown ' + ch + ' client pc')
continue
print('there is no client id ' + ch)
continue
if ch=='2':
continue
else:
print('you choose incorect menu num.')
continue
elif ch=='3':
client_list = self.server.getClientThreads()
for client in client_list:
client.setReady()
print('shutdown ' + client.name + ' pc')
continue
else:
print('you choose incorect menu num.')
continue
'''
main server has clientThread list
if connect client, make thread(Sender class) and and thread list(clientThreads)
and wait another connection from client
'''
class Server (Thread):
def __init__(self, ip, port):
Thread.__init__(self)
self.ip=ip
self.port=port
self.clientThreads=[]
self.packets=[]
self.clientNum=0
def run(self):
serverSocket=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
serverSocket.bind((self.ip, self.port))
while True:
print('wait client connect...')
serverSocket.listen(0)
clientSocket, addr=serverSocket.accept()
print('client connect!')
self.clientNum=self.clientNum+1
clientThread=Client(self, clientSocket, self.clientNum)
self.clientThreads.append(clientThread)
clientThread.start()
def getClientThreads(self):
return self.clientThreads
def getClientCount():
return len(self.clientThreads)
'''
class that communicate with C# client
'''
class Client (Thread):
def __init__(self, server, clientSocket, clientNum):
Thread.__init__(self)
self.client=self
self.clientSocket=clientSocket
self.id=clientNum
self.name=''
self.server=server
self.packet = {'IF_CODE' : 'SHUTDOWN', 'Client' : 'BACKEND/TEST', 'Type' : 'server' }
self.ready=False
def run(self):
while True:
data=self.clientSocket.recv(1024)
if data:
packet=data.decode('utf8')
break
packets=packet.split('<EOP')
for str in packets:
if str=='': break
try:
parse=json.loads(str)
except:
print(str+'\n> ', end='')
continue
if parse['IF_CODE']=='Device':
print('client name : ' + parse['Client'])
self.name=parse['Client']
while True:
if self.ready==True:
self.networkWrite()
self.server.getClientThreads().remove(self)
self.clientSocket.close()
return
def networkWrite(self):
str=json.dumps(self.packet)
str+='<EOP>'
bytes=str.encode('utf8')
self.clientSocket.sendall(bytes)
def setReady(self):
self.ready=True
def main():
server = Server(HOST_IP, SERVER_PORT)
server.start()
shell = Shell(server)
shell.start()
if __name__ == '__main__':
main()
|
import unittest
from katas.kyu_6.find_the_parity_outlier import find_outlier
class FindOutlierTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(find_outlier([2, 4, 0, 100, 4, 11, 2602, 36]), 11)
def test_equals_2(self):
self.assertEqual(find_outlier([160, 3, 1719, 19, 11, 13, -21]), 160)
def test_equals_3(self):
self.assertEqual(find_outlier([2, 6, 8, 10, 3]), 3)
|
from tkinter import *
from random import randint
from math import sqrt
# ___Globals___
lit_x = []
lit_y = []
vec_v1 = ''
vec_v2 = ''
vec_v3 = ''
c1 = 'black'
c2 = 'black'
o_x = 960
o_y = 500
colors = ['green', 'blue', 'red', 'orange', 'violet', 'pink', 'indigo', 'yellow']
class Vector(object):
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def __add__(self, b):
m = self.x + b.x
n = self.y + b.y
d = self.z + b.z
return Vector(m, n, d)
def __sub__(self, b):
m = self.x - b.x
n = self.y - b.y
d = self.z - b.z
return Vector(m, n, d)
def __mul__(self, b):
m = (self.y * b.z) - (self.z * b.y)
n = (self.z * b.x) - (self.x * b.z)
d = (self.x * b.y) - (self.y * b.x)
return Vector(m, n, d)
def __rmul__(self, b):
m = self.x*b
n = self.y*b
d = self.z*b
return Vector(m, n, d)
def __dmul__(self, b):
m = (self.x*b.x) + (self.y*b.y) + (self.z*b.z)
return m
def return_coords(self):
return [self.x, self.y, self.z]
v1 = Vector(0, 0, 0)
v2 = Vector(0, 0, 0)
v3 = Vector(0, 0, 0)
class VecFunc(object):
def make_vec(self):
global v1, v2, vec_v1, vec_v2, c1, c2
x1 = float(entry_x1.get())
y1 = float(entry_y1.get())
z1 = float(entry_z1.get())
x2 = float(entry_x2.get())
y2 = float(entry_y2.get())
z2 = float(entry_z2.get())
v1 = Vector(x1, y1, z1)
v2 = Vector(x2, y2, z2)
if v1.x < 0:
graph_x1 = 960 + v1.x*20
else:
graph_x1 = 960 + v1.x*20
if v1.y < 0:
graph_y1 = 500 - v1.y*20
else:
graph_y1 = 500 - v1.y*20
if v2.x < 0:
graph_x2 = 960 + v2.x*20
else:
graph_x2 = 960 + v2.x*20
if v2.y < 0:
graph_y2 = 500 - v2.y*20
else:
graph_y2 = 500 - v2.y*20
if v1.z == 0:
canvas.delete(vec_v1)
c1 = colors[randint(0, 7)]
m.set("(" + c1 + ')')
vec_v1 = canvas.create_line(960, 500, graph_x1, graph_y1, width=2, fill=c1)
if v2.z == 0:
canvas.delete(vec_v2)
c2 = colors[randint(0, 7)]
n.set("(" + c2 + ')')
vec_v2 = canvas.create_line(960, 500, graph_x2, graph_y2, width=2, fill=c2)
def add(self):
global vec_v3, v1, v2, v3
v3 = v1 + v2
if v3.x < 0:
graph_x3 = 960 + v3.x*20
else:
graph_x3 = 960 + v3.x*20
if v3.y < 0:
graph_y3 = 500 - v3.y*20
else:
graph_y3 = 500 - v3.y*20
if v3.z == 0:
canvas.delete(vec_v3)
vec_v3 = canvas.create_line(960, 500, graph_x3, graph_y3, width=2)
add.set(f'Addition:- vector 3 ({v3.x}, {v3.y}, {v3.z})')
def sub(self):
global vec_v3, v1, v2, v3
v3 = v1 - v2
if v3.x < 0:
graph_x3 = 960 + v3.x*20
else:
graph_x3 = 960 + v3.x*20
if v3.y < 0:
graph_y3 = 500 - v3.y*20
else:
graph_y3 = 500 - v3.y*20
if v3.z == 0:
canvas.delete(vec_v3)
vec_v3 = canvas.create_line(960, 500, graph_x3, graph_y3, width=2)
sub.set(f'Subtraction:- vector 3 ({v3.x}, {v3.y}, {v3.z})')
def c_mul(self):
global vec_v3, v1, v2, v3
v3 = v1 * v2
if v3.x < 0:
graph_x3 = 960 + v3.x*20
else:
graph_x3 = 960 + v3.x*20
if v3.y < 0:
graph_y3 = 500 - v3.y*20
else:
graph_y3 = 500 - v3.y*20
if v3.z == 0:
canvas.delete(vec_v3)
vec_v3 = canvas.create_line(960, 500, graph_x3, graph_y3, width=2)
cross.set(f'Cross Product:- vector 3 ({v3.x}, {v3.y}, {v3.z})')
def d_mul(self):
global vec_v3, v1, v2, v3
v3 = v1.__dmul__(v2)
dot.set(f'Dot Product:- {v3}')
def determin(self):
global vec_v3, v1, v2, v3
v3 = v1 * v2
d = sqrt((v3.x ** 2) + (v3.y ** 2) + (v3.z ** 2))
deter.set(f"Determinant:- {d}")
def reset():
global x_axis, y_axis, lit_x, lit_y, v1, v2, v3
dot.set('Dot Product:- ')
cross.set('Cross Product:- ')
add.set('Addition:- ')
sub.set("Subtraction:- ")
deter.set('Determinant:- ')
v1 = Vector(0, 0, 0)
v2 = Vector(0, 0, 0)
v3 = Vector(0, 0, 0)
canvas.delete('all')
y_axis = canvas.create_line(960, 1000, 960, 0)
x_axis = canvas.create_line(0, 500, 1920, 500)
for x in lit_x:
canvas.create_line(953, x, 967, x)
for x in lit_y:
canvas.create_line(x, 493, x, 507)
def exit_():
win.destroy()
for i in range(0, 1001):
if i % 20 == 0:
lit_x.append(i)
for i in range(0, 1921):
if i % 20 == 0:
lit_y.append(i)
v = VecFunc()
# ___UI___
win = Tk()
win.wm_attributes('-fullscreen', True)
canvas = Canvas(win, height=1000, width=1920, bg='light blue')
canvas.pack()
# ___lines___
y_axis = canvas.create_line(960, 1000, 960, 0)
x_axis = canvas.create_line(0, 500, 1920, 500)
for i in lit_x:
canvas.create_line(953, i, 967, i)
for i in lit_y:
canvas.create_line(i, 493, i, 507)
# ___labels___
label_vec1 = Label(win, text='VECTOR 1:-')
label_vec2 = Label(win, text='VECTOR 2:-')
label_vec1.place(height=25, width=100, x=40, y=1010)
label_vec2.place(height=25, width=100, x=40, y=1040)
label_x1 = Label(win, text='X')
label_x1.place(height=25, width=10, x=130, y=1010)
label_y1 = Label(win, text='Y')
label_y1.place(height=25, width=10, x=200, y=1010)
label_z1 = Label(win, text='Z')
label_z1.place(height=25, width=10, x=270, y=1010)
label_x2 = Label(win, text='X')
label_x2.place(height=25, width=10, x=130, y=1040)
label_y2 = Label(win, text='Y')
label_y2.place(height=25, width=10, x=200, y=1040)
label_z2 = Label(win, text='Z')
label_z2.place(height=25, width=10, x=270, y=1040)
m = StringVar()
n = StringVar()
color_label1 = Label(win, textvariable=m)
color_label1.place(height=25, width=45, x=10, y=1010)
color_label2 = Label(win, textvariable=n)
color_label2.place(height=25, width=45, x=10, y=1040)
dot = StringVar()
dot.set('Dot Product:- ')
label_dot_product = Label(win, textvariable=dot)
label_dot_product.place(height=25, width=120, x=1600, y=1010)
cross = StringVar()
cross.set('Cross Product:- ')
label_cross_product = Label(win, textvariable=cross)
label_cross_product.place(height=25, width=240, x=1540, y=1040)
add = StringVar()
add.set('Addition:- ')
label_add = Label(win, textvariable=add)
label_add.place(height=25, width=190, x=1200, y=1010)
sub = StringVar()
sub.set('Subtraction:- ')
label_sub = Label(win, textvariable=sub)
label_sub.place(height=25, width=240, x=1180, y=1040)
deter = StringVar()
deter.set("Determinant:- ")
label_deter = Label(win, textvariable=deter)
label_deter.place(height=25, width=170, x=960, y=1030)
# ___entries___
entry_x1 = Entry(win)
entry_y1 = Entry(win)
entry_z1 = Entry(win)
entry_x1.place(height=20, width=40, x=150, y=1010)
entry_y1.place(height=20, width=40, x=220, y=1010)
entry_z1.place(height=20, width=40, x=290, y=1010)
entry_x2 = Entry(win)
entry_y2 = Entry(win)
entry_z2 = Entry(win)
entry_x2.place(height=20, width=40, x=150, y=1040)
entry_y2.place(height=20, width=40, x=220, y=1040)
entry_z2.place(height=20, width=40, x=290, y=1040)
# ___Buttons___
vector_fy = Button(win, text='Graph', command=v.make_vec)
vector_fy.place(height=25, width=65, x=360, y=1030)
vec_add = Button(win, text='Add', command=v.add)
vec_add.place(height=25, width=65, x=450, y=1030)
vec_sub = Button(win, text='Subtract', command=v.sub)
vec_sub.place(height=25, width=70, x=540, y=1030)
vec_cmul = Button(win, text='Cross Product', command=v.c_mul)
vec_cmul.place(height=25, width=85, x=640, y=1030)
vec_dmul = Button(win, text='Dot product', command=v.d_mul)
vec_dmul.place(height=25, width=85, x=750, y=1030)
vec_deter = Button(win, text='Determinant', command=v.determin)
vec_deter.place(height=25, width=90, x=860, y=1030)
reset = Button(win, text='Reset', command=reset, bg='light green', fg='blue')
reset.place(height=25, width=65, x=1780, y=1010)
exit_but = Button(win, text='Exit', command=exit_, bg='red', fg='light green')
exit_but.place(height=25, width=65, x=1780, y=1050)
win.mainloop()
|
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import animation
import matplotlib.patches as patches
from LucasKanade import LucasKanade
# write your script here, we recommend the above libraries for making your animation
frames = np.load('../data/carseq.npy')
n = frames.shape[2]
rect = np.zeros((n, 4))
rect[0, :] = [59, 116, 145, 151]
p_prev = np.zeros(2)
for i in range(50):
print(i)
# initial p_n
p_n = LucasKanade(frames[:, :, i], frames[:, :, i + 1], rect[i,:], p0=np.zeros(2))
# update
p_star = LucasKanade(frames[:, :, 0], frames[:, :, i + 1], rect[0, :], p0=p_prev+p_n)
# print('p_n', p_n)
# print('p_star',p_star)
if np.linalg.norm(p_prev+p_n-p_star) < 0.2:
rect[i+1,:] = rect[0, :] + np.concatenate((p_star, p_star))
p_prev = p_star
# print('p_prev',p_prev)
else:
rect[i + 1, :] = rect[i, :]
fig, ax = plt.subplots(1)
w = rect[i, 3] - rect[i, 1]
h = rect[i, 2] - rect[i, 0]
co = (rect[i, 0], rect[i, 1])
box = patches.Rectangle(co, h, w, edgecolor='r', facecolor='none')
ax.imshow(frames[:, :, i], cmap='gray')
ax.add_patch(box)
fig.show()
# for i in [1, 100, 200, 300,400]:
# fig, ax = plt.subplots(1)
# w = rect[i, 3] - rect[i, 1]
# h = rect[i, 2] - rect[i, 0]
# co = (rect[i, 0], rect[i, 1])
# box = patches.Rectangle(co, h, w, edgecolor='r', facecolor='none')
# ax.imshow(frames[:, :, i], cmap='gray')
# ax.add_patch(box)
# fig.savefig('../results/1-4-'+str(i)+'.png')
# plt.close(fig)
|
from typing import Optional
from config.model import DockConfig
from eddn.journal_v1.model import JournalV1 as EddnJournalV1
from summary.model import DockSummary, Station
class DockHandler:
def __init__(self, config: DockConfig, target: DockSummary) -> None:
self.config = config
self.journal = target
self.save_counter = self.config.autosave_wait
def update(self, journal_v1: EddnJournalV1) -> bool:
"""Updates the summary and returns true if it's time to save"""
system = journal_v1.message.system_name
station = journal_v1.message.station_name
if dock_entry := self.get_dock_entry(system=system, station=station):
if not dock_entry.station_type:
self._set_dock_entry(
system=system, station=station, journal_v1=journal_v1
)
print(
f" Updating existing dock entry {system}/{station} ({journal_v1.message.station_type})"
)
else:
self._set_dock_entry(system=system, station=station, journal_v1=journal_v1)
if self.save_counter <= 0:
self.save_counter = self.config.autosave_wait
return True
else:
self.save_counter -= 1
return False
def _dock_details(self, journal_v1: EddnJournalV1) -> Station:
return Station(
dist_from_star_ls=journal_v1.message.dist_from_star_ls,
market_id=journal_v1.message.market_id,
star_pos=journal_v1.message.star_pos,
station_allegiance=journal_v1.message.station_allegiance,
station_name=journal_v1.message.station_name,
station_type=journal_v1.message.station_type,
system_address=journal_v1.message.system_address,
system_name=journal_v1.message.system_name,
timestamp=journal_v1.message.timestamp,
)
def get_dock_entry(self, system: str, station: str) -> Optional[Station]:
key = f"{system}/{station}"
return self.journal.stations.get(key, None)
def _set_dock_entry(
self, system: str, station: str, journal_v1: EddnJournalV1
) -> None:
key = f"{system}/{station}"
self.journal.stations[key] = self._dock_details(journal_v1=journal_v1)
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^participant_team/(?P<pk>[0-9]+)/invite$', views.invite_participant_to_team,
name='invite_participant_to_team'),
url(r'^participant_team/(?P<participant_team_pk>[0-9]+)/challenge$', views.get_participant_team_challenge_list,
name='get_participant_team_challenge_list'),
url(r'^remove_self_from_participant_team/(?P<participant_team_pk>[0-9]+)$',
views.remove_self_from_participant_team,
name='remove_self_from_participant_team'),
url(r'^participant_team/(?P<participant_team_pk>[0-9]+)/participant/(?P<participant_pk>[0-9]+)$',
views.delete_participant_from_team, name='delete_participant_from_team'),
url(r'^participant_teams/challenges/(?P<challenge_pk>[0-9]+)/user$',
views.get_teams_and_corresponding_challenges_for_a_participant,
name='get_teams_and_corresponding_challenges_for_a_participant'),
url(r'^participant_team$', views.participant_team_list,
name='get_participant_team_list'),
url(r'^participant_team/(?P<pk>[0-9]+)$',
views.participant_team_detail, name='get_participant_team_details'),
]
|
#!/usr/bin/python
def my_function(fname):
print(fname + "1")
my_function("http://example.com/")
|
import sys
from PIL import Image
def flip(image):
width, height = image.size
imgDup = image.copy()
oldCoords = image.load()
newCoords = imgDup.load()
for y in range(height):
for x in range(width):
newCoords[x,y] = oldCoords[width-x-1,y]
return imgDup
if len(sys.argv) <= 1:
print "missing image filename"
sys.exit(1)
filename = sys.argv[1]
img = Image.open(filename)
img = img.convert("L") #grayscale
img.show()
img = flip(img)
img.show()
|
#!/usr/bin/env python
import code
import readline
import rlcompleter
import os
from novaclient import client
import openstack_variables
osvars = openstack_variables.get()
nclient = client.Client(
2.0,
osvars['OS_USERNAME'],
osvars['OS_PASSWORD'],
osvars['OS_TENANT_NAME'],
osvars['OS_AUTH_URL'],
no_cache=True
)
print("""
#################################################
# Nova API access available through 'nclient.*' #
# example: nclient.flavors.list() #
#################################################
""")
vars = globals()
vars.update(locals())
readline.set_completer(rlcompleter.Completer(vars).complete)
readline.parse_and_bind("tab: complete")
shell = code.InteractiveConsole(vars)
shell.interact()
|
import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from main_window_layout import Ui_MainWindow
app = QApplication(sys.argv)
window = QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(window)
window.show()
sys.exit(app.exec_())
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import timeit
import jk_simpleipcb
binFilePath = "../../SimpleIPCB/ExampleUTF8Echo/bin/Release/ExampleUTF8Echo.exe"
b = jk_simpleipcb.SimpleInterProcessCommunicationBridge()
b.launchComponentProcess("MyFancyComponent", binFilePath, "str", "str")
NUMBER_OF_REPEATS = 100000
start = timeit.time.time()
for i in range(0, NUMBER_OF_REPEATS):
b.send("MyFancyComponent", "echo-test", "The quick brown fox jumps over the lazy dog.")
end = timeit.time.time()
totalDuration = end - start
print("Total duration for " + str(NUMBER_OF_REPEATS) + " message exchanges: " + str(totalDuration))
durationperRequest = totalDuration/NUMBER_OF_REPEATS
print("Total duration of a single message exchange: " + str(durationperRequest*1000) + " ms")
numberOfRequestsPerSecond = 1/durationperRequest
print("Number of requests possible per second: " + str(int(numberOfRequestsPerSecond)))
|
# 9개의 서로 다른 자연수가 주어질 때,
# 이들 중 최댓값을 찾고
# 그 최댓값이 몇 번째 수인지를 구하는 프로그램을 작성하시오.
arr = []
for i in range(9):
arr.append(int(input()))
maxnum = max(arr)
print(maxnum)
print(arr.index(maxnum)+1)
|
# coding: utf-8
# # Problem 8: Balancing Act
#
# "Balancing a seesaw with Python data structures."
# **Background.** Suppose there is a plank of wood pivoted to a point so that it acts as a seesaw. There are weights attached to it at different locations. This module deals with predicting the seesaw behaviour given the distributed weights and pivot point. Let's illustrate it with an example.
# Suppose there are 6 friends sitting on the see-saw. Let's "number" these friends starting at 1, 2, ..., up to 6. Let the weight of each of the friends be 56, 52, 48, 64, 52, 70 (from left to right). The position of the pivot on which the seesaw rests is denoted by the position at which 0 occurs. Such a system is illustrated by following list:
# In[1]:
w = [56, 52, 48, 0, 64, 52, 70]
# One can use rotational concepts to calculate the moment generated by each friend. The equation for the moment of friend $i$ is given by $M_i$, where
#
# $M_i = F_i * r_i.$
#
# Here, $F_i$ is the force exerted by friend $i$ and $r_i$ is the distance of weight $i$ from the pivot.
#
# For the forces, assume they are the same as the weight. Thus, the force of weight $i=1$ is 56, of weight $i=2$ is 52, and weight $i=6$ is 70.
#
# For the distances, let $r_i$ measure the absolute difference between the index of weight $i$ in the list and the index of the pivot. Thus, in above example, the amount of rotational "force" generated by each of the friends would be calculated as follows:
# In[2]:
Friend_1 = 56*3
Friend_2 = 52*2
Friend_3 = 48*1
Friend_4 = 64*1
Friend_5 = 52*2
Friend_6 = 70*3
# The seesaw will be **balanced** if the amount of rotational forces on each side of the pivot are equal. Therefore, moments generated by individual weights on each side needs to be added and compared to check if seesaw will be balanced or tilted. Please run the cell below to check the behaviour for the seesaw whose weights are given by `w` above.
# In[4]:
#Run this cell
left_moment = Friend_1 + Friend_2 + Friend_3
right_moment = Friend_4 + Friend_5 + Friend_6
print('<---#### Rotation Force measures ####--->')
print('Left_moment: ', left_moment, 'Right_moment: ', right_moment)
if left_moment > right_moment:
print("The seesaw tilts to the left")
elif right_moment > left_moment:
print("The seesaw tilts to the right")
else:
print("The seesaw stays balanced")
# Since the sum of rotational forces on the right side of the pivot is greater than that on the left side, the seesaw tilts towards the right.
# Here is one more example to make the calculation clear:
# In[5]:
W = [53, 76, 87, 54, 0, 76, 52, 67]
# In[6]:
#rotational force of all elements will be :
F_1 = 53 * 4
F_2 = 76 * 3
F_3 = 87 * 2
F_4 = 54 * 1
F_6 = 76 * 1
F_7 = 52 * 2
F_8 = 67 * 3
# In[7]:
left_moment = F_1 + F_2 + F_3 + F_4
right_moment = F_6 + F_7 + F_8
print('<---#### Rotation Force measures ####--->')
print('Left_moment: ', left_moment, 'Right_moment: ', right_moment)
if left_moment > right_moment:
print("The seesaw tilts to the left")
elif right_moment > left_moment:
print("The seesaw tilts to the right")
else:
print("The seesaw stays balanced")
# **Exercise 0** (2 points)
#
# For a given Python list that represents weights attached to a plank at the indexed positions and the pivot position (denoted by zero), write a function - `get_moment(weights, ordered_pos)` which returns the rotational force of an element with respect to the pivot. Your function should take as input two elements, the list of weights (`weights`) and the position of a target element in `weights` (i.e., a list index, `ordered_pos`). It should return the individual contribution of the weight `weights[ordered_pos]` to the rotational force.
#
# You can assume that the list of weights will always have exactly one pivot, i.e., one and only one element as 0, and all other elements will be greater than zero. If the pivot position is passed as an argument, its associated force will be 0 (Recall $M_i = F_i * r_i$ and $r_i$ will be 0).
# In[8]:
def get_moment(weights, ordered_pos):
assert ordered_pos < len(weights)
### BEGIN SOLUTION
pivot_pos = weights.index(0)
arm = abs(pivot_pos - ordered_pos)
w = weights[ordered_pos]
return w*arm
# In[9]:
## Test cell: `single_moment`
def check_moment(w, p, v):
msg = "<-----Calculating moment for index {} in weights----->".format(p, w)
print(msg)
assert get_moment(w, p) == v
print("Passed: Correct moment calculated")
check_moment([4, 3, 5, 10, 22, 0, 8, 12, 32], 1, 12)
check_moment([0, 3, 5, 10, 22, 13, 8, 12, 32], 4, 88)
check_moment([11, 3, 5, 10, 22, 13, 8, 12, 0],4, 88 )
print("\n(Passed!)")
# **Exercise 1** (3 points)
#
# Write a function `sum_moment(weights, side)` that returns the total rotational force around the pivot on a given side. Your function should accept two parameters, `weight` and `side`. The parameter `weight` represents the list of ordered weights and the parameter `side` is one of two values, either the string `'left'` or the string `'right'`, which represents whether we want the total sum of rotational forces to the left of the pivot or the right, respectively. Refer to the initial discussion on how to calculate the value.
# In[10]:
def sum_moment(weights, side):
### BEGIN SOLUTION
if side == 'left':
w = weights
else:
w = weights[::-1]
pivot_position = w.index(0)
total = 0
for i in range(pivot_position):
total += get_moment(w, i)
return total
# In[11]:
## Test cell: `net_moment`
def check_total_moment(w, s, v):
msg = "<-----Calculating total moment on {} side in weights----->".format(s, w)
print(msg)
assert sum_moment(w, s) == v
print("Passed: Correct moment calculated")
check_total_moment([4, 3, 5, 10, 22, 0, 8, 12, 32], 'left', 89)
check_total_moment([4, 3, 5, 10, 22, 0, 8, 12, 32], 'right', 128)
check_total_moment([0, 3, 5, 10, 22, 13, 8, 12, 32], "right", 584)
check_total_moment([0, 3, 5, 10, 22, 13, 8, 12, 32], "left", 0)
check_total_moment([11, 3, 5, 10, 22, 13, 8, 12, 0],'left', 344)
check_total_moment([11, 3, 5, 10, 22, 13, 8, 12, 0],'right', 0)
print("\n(Passed!)")
# **Exercise 2** (2 point)
#
# Write a function `get_tilt(weights)`, which determines whether the seesaw tilts to the left, tilts to the right, or stays balanced. The function must only take the `weights` list as the input parameter. It should return one of three strings, `"left"`, `"right"`, or `"balanced"`, depending on the tilt of the seesaw.
# In[12]:
def get_tilt(weights):
### BEGIN SOLUTION
moment_left = sum_moment(weights, "left")
moment_right = sum_moment(weights, "right")
if moment_left > moment_right:
tilt = "left"
elif moment_left < moment_right:
tilt = "right"
else:
tilt = "balanced"
return tilt
# In[13]:
## Test cell: `tilt_direction`
def check_tilt(w, v):
msg = "<-----Finding tilt of the weights {}----->".format(w)
print(msg)
assert get_tilt(w) == v
print("Passed: Correct Tilt direction")
check_tilt([4, 3, 5, 10, 22, 0, 8, 12, 32], 'right')
check_tilt([4, 13, 5, 10, 22, 0, 8, 12, 32], 'left')
check_tilt([0, 13, 5, 10, 22, 11, 8, 12, 32], 'right')
check_tilt([4, 13, 5, 10, 22, 0, 15, 12, 30], 'balanced')
print("\n(Passed!)")
# **Exercise 3** (3 points)
#
# Knowing that a given list of weights is tilted towards one side, suppose we want to know the minimum weight that must be added to the opposite side to balance the seesaw.
#
# You can think of the problem in the following way. To balance the seesaw, you can add weights to different positions on the opposite end. If you were to add only a single weight at any position on the opposite side, what is the minimum it should be?
#
# If the seesaw is already balanced, your code must retun 0 because no more weight needs to be added.
#
# > Hint: Think of how to maximize the moment for a given value of force.
# In[14]:
def add_minimum_weight(weights):
### BEGIN SOLUTION
tilt = get_tilt(weights)
moment_left = sum_moment(weights, "left")
moment_right = sum_moment(weights, "right")
pivot_index = weights.index(0)
if tilt == "left":
arm = len(weights) - pivot_index -1
else:
arm = pivot_index
moment_diff = abs(moment_right - moment_left)
return moment_diff/arm
# In[15]:
# Test cell: `minimum_weight`
import random
import numpy as np
def build_plank():
length = random.randint(3, 20)
pivot = random.randint(1, length-2)
plank = []
for i in range(length):
plank.append(np.round(random.random()*100 + 1, 2))
plank[pivot] = 0
return plank
def test_minimum_weight():
w = build_plank()
msg = "<-----Finding minimum weight that can be added on {} to balance it----->".format(w)
print(msg)
w_min = add_minimum_weight(w)
moment_left = sum_moment(w, "left")
moment_right = sum_moment(w, "right")
tilt = get_tilt(w)
pivot = w.index(0)
l = len(w)
w_c = w.copy()
if tilt == "left":
w_c[l-1] += w_min
elif tilt == "right":
w_c[0] += w_min
new_tilt = get_tilt(w_c)
# print(w_min)
assert abs(sum_moment(w_c, "left") - sum_moment(w_c, "right")) <= 10e-10
print("floating point error is {}".format(abs(sum_moment(w_c, "left") - sum_moment(w_c, "right"))))
print("Passed: Correct weight identified")
n_tests = 10
for t in range(n_tests):
test_minimum_weight()
# **Fin!** This cell marks the end of this part. Don't forget to save, restart and rerun all cells, and submit it. When you are done, proceed to other parts.
|
# Solution of the challenge LongestWord proposed on Coderbyte at https://coderbyte.com/challenges
"""
After converting the input to a list, we should remove all the characters which are not letters. Therefore,
the words are stored into a list. Then, after storing each word length into a list, we should get the index of the
first word with maximal length and print it.
"""
def LongestWord(sen):
sent = list(sen.split(' '))
words = []
for item in sent:
word = str()
for i in range(len(item)):
if item[i].isalpha():
word += item[i]
words.append(word)
# print(words)
lens = []
for word in words:
lens.append(len(word))
m = next(lens.index(x) for x in lens if x == max(lens))
return sent[m]
# keep this function call here
print(LongestWord(input()))
|
import autodisc as ad
class IsFiniteBinaryObjectClassifier(ad.core.Classifier):
@staticmethod
def default_config():
default_config = ad.core.Classifier.default_config()
default_config.r = 1
default_config.tol = 0.1
return default_config
def __init__(self, config=None, **kwargs):
super().__init__(config=config, **kwargs)
self.segmented_image = None
self.finite_segments = None
def calc(self, observations, statistics=None):
self.finite_segments, (self.segmented_image, _), _ = ad.helper.statistics.calc_is_segments_finite(observations, tol=self.config.tol, r=self.config.r)
if self.finite_segments:
return True
else:
return False
|
__author__ = 'Elisabetta Ronchieri'
import datetime
import time
import os
import unittest
import inspect
from tstorm.utils import config
from tstorm.commands import ping
from tstorm.commands import protocol
from tstorm.commands import ls
from tstorm.commands import mkdir
from tstorm.commands import cp
from tstorm.commands import rm
from tstorm.commands import rmdir
from tstorm.utils import cksm
from tstorm.utils import utils
class LoadsTest(unittest.TestCase):
def __init__(self, testname, tfn, ifn, dfn, bifn, uid, lfn):
super(LoadsTest, self).__init__(testname)
self.tsets = config.TestSettings(tfn).get_test_sets()
self.ifn = ifn
self.dfn = dfn
self.bifn = bifn
self.id = uid.get_id()
self.lfn = lfn
def test_storm_get_transfer_protocols(self):
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
storm_protocol = protocol.StoRMGtp(self.tsets['general']['endpoint'])
self.lfn.put_cmd(storm_protocol.get_command())
protocol_result = storm_protocol.get_output()
msg = 'storm gtp status'
self.assert_(protocol_result['status'] == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
msg = 'The number of supported protocols is not 6'
self.assertEqual(len(protocol_result['transferProtocol']), 6,
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_ls_unexist_file(self):
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
storm_ls = ls.StoRMLs(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn)
self.lfn.put_cmd(storm_ls.get_command())
ls_result = storm_ls.get_output()
msg = 'storm ls status'
self.assert_(ls_result['status'] == 'FAILURE',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_ls_unexist_dir(self):
if '/' in self.dfn:
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
a=os.path.dirname(self.dfn)
storm_ls = ls.StoRMLs(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], a)
self.lfn.put_cmd(storm_ls.get_command())
ls_result = storm_ls.get_output()
msg = 'storm ls status'
self.assert_(ls_result['status'] == 'FAILURE',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_ls_dir(self):
if '/' in self.dfn:
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
a=os.path.dirname(self.dfn)
storm_ls = ls.StoRMLs(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], a)
self.lfn.put_cmd(storm_ls.get_command())
ls_result = storm_ls.get_output()
msg = 'storm ls status'
self.assert_(ls_result['status'] == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_ls_file(self):
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
storm_ls = ls.StoRMLs(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn)
self.lfn.put_cmd(storm_ls.get_command())
ls_result = storm_ls.get_output()
msg = 'storm ls status'
self.assert_(ls_result['status'] == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
cksm_lf = cksm.CksmLf(self.ifn)
cksm_result = cksm_lf.get_output()
new_check_value = ls_result['checkSumValue'] + ' (' + ls_result['checkSumType'] + ')'
msg = 'Wrong checksum'
self.assert_(new_check_value == cksm_result['Checksum'],
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_ls_fake_file(self):
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
storm_ls = ls.StoRMLs(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn)
self.lfn.put_cmd(storm_ls.get_command())
ls_result = storm_ls.get_output()
msg = 'lcg ls status'
self.assert_(ls_result['status'] == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_mkdir(self):
if '/' in self.dfn:
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
a=os.path.dirname(self.dfn)
storm_mkdir = mkdir.StoRMMkdir(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], a)
dtc=a.split('/')
dtc=dtc[1:]
y='/'
for x in dtc:
if x != '':
self.lfn.put_cmd(storm_mkdir.get_command(y + x))
y = y + x + '/'
mkdir_result = storm_mkdir.get_output()
msg = 'storm mkdir status'
for x in mkdir_result['status']:
self.assert_(x == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_mkdir_exist_dir(self):
if '/' in self.dfn:
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
a=os.path.dirname(self.dfn)
storm_mkdir = mkdir.StoRMMkdir(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], a)
dtc=a.split('/')
dtc=dtc[1:]
y='/'
for x in dtc:
if x != '':
self.lfn.put_cmd(storm_mkdir.get_command(y + x))
y = y + x + '/'
mkdir_result = storm_mkdir.get_output()
msg = 'storm mkdir status'
for x in mkdir_result['status']:
self.assert_(x == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_rm_file(self):
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
storm_rm = rm.StoRMRm(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn)
self.lfn.put_cmd(storm_rm.get_command())
rm_result = storm_rm.get_output()
msg = 'storm rm status'
self.assert_(rm_result['status'] == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_rm_unexist_file(self):
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
storm_rm = rm.StoRMRm(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn)
self.lfn.put_cmd(storm_rm.get_command())
rm_result = storm_rm.get_output()
msg = 'storm rpm status'
self.assert_(rm_result['status'] == 'FAILURE',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_rm_dir(self):
if '/' in self.dfn:
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
a=os.path.dirname(self.dfn)
storm_rmdir = rmdir.StoRMRmdir(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], a)
y=a
while y != '/':
self.lfn.put_cmd(storm_rmdir.get_command(y))
y=os.path.dirname(y)
rmdir_result = storm_rmdir.get_output()
msg = 'storm rm status'
for x in rmdir_result['status']:
self.assert_(x == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_rm_unexist_dir(self):
if '/' in self.dfn:
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
a=os.path.dirname(self.dfn)
storm_rmdir = rmdir.StoRMRmdir(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], a)
y=a
while y != '/':
self.lfn.put_cmd(storm_rmdir.get_command(y))
y=os.path.dirname(y)
rmdir_result = storm_rmdir.get_output()
msg = 'storm rm status'
for x in rmdir_result['status']:
self.assert_(x == 'FAILURE',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_prepare_to_get(self):
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
storm_ptg = cp.StoRMPtg(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn)
self.lfn.put_cmd(storm_ptg.get_command())
ptg_result = storm_ptg.get_output()
msg = 'storm ptg status'
self.assert_(ptg_result['status'] == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_prepare_to_get_unexist_file(self):
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
storm_ptg = cp.StoRMPtg(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn)
self.lfn.put_cmd(storm_ptg.get_command())
ptg_result = storm_ptg.get_output()
msg = 'storm ptg status'
self.assert_(ptg_result['status'] == 'FAILURE',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_release_file(self):
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
storm_ptg = cp.StoRMPtg(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn)
self.lfn.put_cmd(storm_ptg.get_command())
ptg_result = storm_ptg.get_output()
msg = 'storm ptg status'
self.assert_(ptg_result['status'] == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
storm_rf = cp.StoRMRf(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn,
ptg_result['requestToken'])
self.lfn.put_cmd(storm_rf.get_command())
rf_result = storm_rf.get_output()
msg = 'storm rf status'
self.assert_(rf_result['status'] == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_prepare_to_put(self):
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
storm_ptp = cp.StoRMPtp(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn)
self.lfn.put_cmd(storm_ptp.get_command())
ptp_result = storm_ptp.get_output()
msg = 'storm ptp status'
self.assert_(ptp_result['status'] == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_prepare_to_put_exist_file(self):
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
storm_ptp = cp.StoRMPtp(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn)
self.lfn.put_cmd(storm_ptp.get_command())
ptp_result = storm_ptp.get_output()
msg = 'storm ptp status'
self.assert_(ptp_result['status'] == 'FAILURE',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
def test_storm_put_done(self):
stack_value = inspect.stack()[0]
path = stack_value[1]
method = stack_value[3]
try:
storm_ptp = cp.StoRMPtp(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn)
self.lfn.put_cmd(storm_ptp.get_command())
ptp_result = storm_ptp.get_output()
msg = 'storm ptp status'
self.assert_(ptp_result['status'] == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
storm_pd = cp.StoRMPd(self.tsets['general']['endpoint'],
self.tsets['general']['accesspoint'], self.dfn,
ptp_result['requestToken'])
self.lfn.put_cmd(storm_pd.get_command())
pd_result = storm_pd.get_output()
msg = 'storm pd status'
self.assert_(pd_result['status'] == 'PASS',
'%s, %s - FAILED, %s, Test ID %s' %
(path, method, msg, self.id))
except AssertionError, err:
print err
self.lfn.put_result('FAILED')
else:
self.lfn.put_result('PASSED')
self.lfn.flush_file()
|
import argparse
import datetime
import json
import math
from collections import defaultdict
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
from steemapi.steemnoderpc import SteemNodeRPC
from steemutils import *
from arg_parse import *
def get_posts(rpc, beg_block, end_block):
"""
Get all the posts in the blocks from beg_block up to and including
end_block and loads them into a panda dataframe, return the dataframe.
"""
print("Getting posts from block {} to {}".format(beg_block, end_block))
posts = []
for block in blocks(rpc, beg_block, end_block):
if "transactions" not in block:
continue
for tx in block["transactions"]:
for op in tx["operations"]:
op_type = op[0]
op_data = op[1]
timestamp = pd.to_datetime(tx['expiration'])
if op_type == "comment" and op_data['title'] != '':
author = op_data['author']
permlink = op_data['permlink']
post = rpc.get_content(author, permlink)
post['num_uvotes'] = len([v for v in post['active_votes'] if float(v['percent']) > 0.0])
post['num_dvotes'] = len([v for v in post['active_votes'] if float(v['percent']) < 0.0])
# Posts have a root_comment that matches the id
if post['root_comment'] != post['id']:
continue
posts.append(post)
df = pd.DataFrame.from_records(posts)
# Remove duplicates caused by blockchain entries for updates
df = df.drop_duplicates('id')
# Convert data to correct types
df.cashout_time = pd.to_datetime(df.cashout_time)
df.created = pd.to_datetime(df.created)
df.last_payout = pd.to_datetime(df.last_payout)
df.last_update = pd.to_datetime(df.last_update)
df.total_payout_value = df.total_payout_value.apply(
lambda x: float(x.partition(' ')[0]))
df.total_pending_payout_value = df.total_pending_payout_value.apply(
lambda x: float(x.partition(' ')[0]))
# Add some useful columns based on other columns
df['day'] = df['created'].dt.weekday_name
df['hour'] = df['created'].dt.hour
return df
def create_plot1(data):
print("Creating plot1")
fig, (ax1, ax2, ax3) = plt.subplots(nrows=3)
sns.barplot('hour', 'total_payout_value', palette='Blues', ci=None, data=data, ax=ax1, estimator=np.sum)
sns.barplot('hour', 'total_payout_value', palette='Blues', ci=None, data=data, ax=ax2, estimator=np.mean)
sns.countplot('hour', palette='Blues', data=data, ax=ax3)
ax1.set_ylabel('Total Payout Value')
ax2.set_ylabel('Total Payout Value\nAverage')
ax3.set_ylabel('Num Posts')
ax1.set_ylim(bottom=0)
ax2.set_ylim(bottom=0)
ax3.set_ylim(bottom=0)
fig.savefig('plot1.png')
def create_plot2(data):
print("Creating plot2")
fig, (ax1, ax2) = plt.subplots(nrows=2)
sns.barplot('hour', 'num_uvotes', palette='Greens', ci=None, data=data, ax=ax1, estimator=np.sum)
sns.barplot('hour', 'num_uvotes', palette='Greens', ci=None, data=data, ax=ax2, estimator=np.mean)
ax1.set_ylabel('Num Upvotes')
ax2.set_ylabel('Num Upvotes\nAverage')
ax1.set_ylim(bottom=0)
ax2.set_ylim(bottom=0)
fig.savefig('plot2.png')
def create_plot3(data):
print("Creating plot3")
col_order = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',
'Saturday', 'Sunday']
plot = sns.factorplot('hour', 'total_payout_value', kind='bar', ci=None,
data=data, col='day', col_order=col_order,
col_wrap=3, estimator=np.sum, palette='Blues')
plot.fig.suptitle('Total Payout Value per Day', size=16)
plot.fig.subplots_adjust(top=.9)
plot.savefig('plot3-sum.png')
plot = sns.factorplot('hour', 'total_payout_value', kind='bar', ci=None,
data=data, col='day', col_order=col_order,
col_wrap=3, estimator=np.mean, palette='Blues')
plot.fig.suptitle('Total Payout Value Average per Day', size=16)
plot.fig.subplots_adjust(top=.9)
plot.savefig('plot3-mean.png')
plot = sns.factorplot('hour', 'total_payout_value', kind='bar', ci=None,
data=data, col='day', col_order=col_order,
col_wrap=3, estimator=len, palette='Blues')
plot.fig.suptitle('Num Posts per Day', size=16)
plot.fig.subplots_adjust(top=.9)
plot.savefig('plot3-count.png')
def create_plot4(data):
print("Creating plot4")
col_order = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',
'Saturday', 'Sunday']
fig, (ax) = plt.subplots(nrows=1)
sns.stripplot(x='total_payout_value', y='day',
ax=ax, data=data, jitter=True, order=col_order);
ax.set_xlim(left=0)
fig.savefig('plot4.png')
def create_plot5(data):
print("Creating plot5")
col_order = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',
'Saturday', 'Sunday']
plot = sns.factorplot('hour', 'num_uvotes', kind='bar', ci=None,
data=data, col='day', col_order=col_order,
col_wrap=3, estimator=np.sum, palette='Greens')
plot.fig.suptitle('Upvotes per Day', size=16)
plot.fig.subplots_adjust(top=.9)
plot.savefig('plot5-sum.png')
plot = sns.factorplot('hour', 'num_uvotes', kind='bar', ci=None,
data=data, col='day', col_order=col_order,
col_wrap=3, estimator=np.mean, palette='Greens')
plot.fig.suptitle('Average upvotes per Day', size=16)
plot.fig.subplots_adjust(top=.9)
plot.savefig('plot5-mean.png')
if __name__ == "__main__":
parent_parser = get_parent_parser()
parser = argparse.ArgumentParser(parents=[parent_parser])
args = parser.parse_args()
# Connect to steem rpc server
rpc = SteemNodeRPC(args.server, "", "")
props = rpc.get_dynamic_global_properties()
validate_parent_args(rpc, args, props)
# Get all posts in the given blocks
data = get_posts(rpc, args.beg_block, args.end_block)
# Filter data
# Remove posts that haven't paid out yet
data = data[data.created != pd.to_datetime('1970-01-01T00:00:00')]
# Create plots
create_plot1(data)
create_plot2(data)
create_plot3(data)
create_plot4(data)
create_plot5(data)
|
'''
author: Zitian(Daniel) Tong
date: 09:05 2019-05-19 2019
editor: PyCharm
email: danieltongubc@gmail.com
'''
from flask import request, render_template, Blueprint, redirect, url_for, session
from models.alert import Alert
from models.item import Item
from models.store import Store
from models.user import requires_login
alert_blueprint = Blueprint('alerts', __name__)
@alert_blueprint.route('/')
@requires_login
def index():
alerts = Alert.find_many_by('user_email', session['email'])
return render_template('alerts/index.html', alerts=alerts)
@alert_blueprint.route('/new', methods=['GET', 'POST'])
@requires_login
def create_alert():
if request.method == 'POST':
# get from the front form
alert_name = request.form['name']
item_url = request.form['item_url']
price_limit = float(request.form['price_limit'])
# from form info find store info, tag_name, and query info
store = Store.find_by_url(item_url)
item = Item(item_url, store.tag_name, store.query)
item.load_price()
item.save_to_mongo()
Alert(alert_name, item._id, price_limit, session['email']).save_to_mongo()
return render_template('alerts/new_alert.html')
@alert_blueprint.route('/edit/<string:alert_id>', methods=['GET', 'POST'])
@requires_login
def edit_alerts(alert_id):
alert = Alert.find_by_id(alert_id)
if request.method == 'POST':
edit_price_limit = float(request.form['price_limit'])
alert.price_limit = edit_price_limit
alert.save_to_mongo()
return redirect(url_for('.index'))
return render_template('alerts/edit_alerts.html', alert=alert)
@alert_blueprint.route('/delete/<string:alert_id>')
@requires_login
def delete_alerts(alert_id):
alert = Alert.find_by_id(alert_id)
if alert.user_email == session['email']:
alert.remove_from_mongo()
return redirect(url_for('.index'))
|
from os import name
from django.urls import path
from. import views
app_name = "processer"
urlpatterns = [
path("", views.Index.as_view(), name="index"),
path("<int:pk>/", views.Detail.as_view(), name='detail'),
path("<int:pk>/image/", views.getFrameViaAjax, name="ajax_image"),
]
|
import os
FILE = "Indicators.csv"
indicator = {}
ID = []
with open(FILE, 'r') as file:
for line in file:
colonne = line.rstrip().split(';')
if colonne[2] == "oui":
ID.append((colonne[0], colonne[1]))
print(ID)
for id, name in ID:
command = f"curl \"http://ec2-54-174-131-205.compute-1.amazonaws.com/API/HDRO_API.php/indicator_id={id}\" -H \"User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:84.0) Gecko/20100101 Firefox/84.0\" -H \"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8\" -H \"Accept-Language: en-US,en;q=0.5\" --compressed -H \"Prefer: safe\" -H \"Referer: http://ec2-54-174-131-205.compute-1.amazonaws.com/API/Information.php\" -H \"DNT: 1\" -H \"Connection: keep-alive\" -H \"Cookie: PHPSESSID=q79r1h6hif37jrpjubp6npsbg4\" -H \"Upgrade-Insecure-Requests: 1\" -H \"Cache-Control: max-age=0\" > '{name}'.json"
os.system(command)
|
import os
import pytest
def take_screenshot(driver, name):
# first make the required dir
os.makedirs(
os.path.join("screenshot", # path where to make directory
os.path.dirname(name) # name of new dir
),
exist_ok=True # no error if already exists
)
# now save the image to given dir with given name
driver.save_screenshot(
# fullname of path where to save ss
os.path.join("screenshot", name)
)
@pytest.mark.skip
@pytest.mark.usefixtures("driver_init_screenshot")
class TestScreenshot:
def test_screenshot_admin(self, live_server):
self.driver.get(f"{live_server.url}/admin/")
take_screenshot(self.driver, "admin/" +
"admin_" + self.browser + ".png")
assert "Log in | Django site admin" in self.driver.title
|
from classes import Datum
from classes import Person as P
x = Datum(-1.1, 0.08)
print(x)
paolo = P("Paolo")
paolo.display()
print(paolo)
|
import pandas as pd
import random
df = pd.read_excel(r'C:\Users\ruttu\Desktop\CurrencyData File.xlsx')
df.to_dict()
def qp(c):
list2 = []
listmain = []
k=1
file.write("Name :\n\n")
file.write("Registration Number :\n\n")
file.write("\t\t\t\t\tQuestion Paper\n")
for j in range(c,c+18):
string1 = 'k'
file.write(str(k) )
file.write(" What is the symbol of ")
file.write(df['Unnamed: 1'][j])
for g in range(0,3):
l=random.choice(list(df['Unnamed: 2']))
list2.append(l)
listmain.append(df['Unnamed: 2'][j])
listmain.append(list2[0])
listmain.append(list2[1])
listmain.append(list2[2])
random.shuffle(listmain)
p = listmain[0]
q = listmain[1]
r = listmain[2]
s = listmain[3]
file.write("\na. ")
file.write(p)
file.write("\nb. ")
file.write(q)
file.write("\nc. ")
file.write(r)
file.write("\nd. ")
file.write(s)
file.write("\n")
k = k+1
file.close()
def a(c):
k=1
answer.write("\t\t\t\t\tAnswer Sheet\n")
for j in range(c,c+18):
answer.write(str(k))
answer.write(" ")
answer.write(df['Unnamed: 2'][j])
answer.write("\n")
k= k+1
answer.close()
c=1
file= open('questionPaper[1].txt','w')
answer = open('answerpaper[1].txt','w')
qp(c)
a(c)
c=18
file= open('questionPaper[2].txt','w')
answer = open('answerpaper[2].txt','w')
qp(c)
a(c)
c=36
file= open('questionPaper[3].txt','w')
answer = open('answerpaper[3].txt','w')
qp(c)
a(c)
c=54
file= open('questionPaper[4].txt','w')
answer = open('answerpaper[4].txt','w')
qp(c)
a(c)
c=72
file= open('questionPaper[5].txt','w')
answer = open('answerpaper[5].txt','w')
qp(c)
a(c)
c=90
file= open('questionPaper[6].txt','w')
answer = open('answerpaper[6].txt','w')
qp(c)
a(c)
c=108
file= open('questionPaper[7].txt','w')
answer = open('answerpaper[7].txt','w')
qp(c)
a(c)
c=126
file= open('questionPaper[8].txt','w')
answer = open('answerpaper[8].txt','w')
qp(c)
a(c)
c=144
file= open('questionPaper[9].txt','w')
answer = open('answerpaper[9].txt','w')
qp(c)
a(c)
c=162
file= open('questionPaper[10].txt','w')
answer = open('answerpaper[10].txt','w')
qp(c)
a(c)
|
#!/usr/bin/env python
# coding=utf-8
"""
The main entry point.
Invoke as `python_module_project' or `python -m python_module_project'.
"""
import sys
def main():
try:
from .cli import main as cli_main
sys.exit(cli_main())
except KeyboardInterrupt:
sys.exit(1)
if __name__ == "__main__":
main()
|
# Credit to http://www.jesshamrick.com/2011/05/18/an-introduction-to-classes-and-inheritance-in-python/
# for exercises
class Pet(object):
def __init__(self, name, species):
self.name = name
self.species = species
def getName(self):
return self.name
def getSpecies(self):
return self.species
def __repr__(self):
return "%r %r" % (self.name, self.species)
def __str__(self):
return "%s is a %s." % (self.name, self.species)
class Dog(Pet):
def __init__(self, name, chases_cats):
super(Dog, self).__init__(name, "Dog")
self.chases_cats = chases_cats
def chasesCats(self):
return self.chases_cats
class Cat(Pet):
def __init__(self, name, hates_dogs):
super(Cat, self).__init__(name, "Cat")
self.hates_dogs = hates_dogs
def hatesDogs(self):
return self.hates_dogs
|
score = input("Enter Score: ")
s = float(score)
if s > 1:
print ("Input Error")
elif s < 0:
print ("Input Error")
elif s < 0.6:
print ("F")
elif s < 0.7:
print ("D")
elif s < 0.8:
print ("C")
elif s < 0.9:
print ("B")
elif s < 1:
print ("A")
|
import numpy as np
''' read the files'''
def getlist(filename):
read_list = []
with open(filename) as f:
for line in f:
line_data = line.strip()
line_data = line.split(',')
read_list.append(line_data)
return read_list
''' modify the last term'''
def mdflt(lst):
outlst = [[] for _ in range(len(lst))]
for i in range(len(lst)):
for j in range(len(lst[i])):
if j == len(lst[i])-1:
outlst[i].append(lst[i][-1][0])
else:
outlst[i].append(lst[i][j])
return outlst
'''change str into int or float if possible'''
def mknum(lst):
outlst = [[] for _ in range(len(lst))]
for i in range(len(lst)):
for j in range(len(lst[i])):
try:
outlst[i].append(int(lst[i][j]))
except:
try:
outlst[i].append(float(lst[i][j]))
except:
outlst[i].append(lst[i][j])
return outlst
'''find the mean'''
def findm(lst):
outlst = []
for i in range(len(lst)):
try:
outlst.append(float(np.mean(lst[i])))
except:
outlst.append(0)
return outlst
'''find the standard deviation'''
def findstd(lst):
outlst = []
for i in range(len(lst)):
try:
outlst.append(float(np.std(lst[i])))
except:
outlst.append(0)
return outlst
'''normalize the data'''
def nmlz(means, stds, lst):
outlst = [[] for _ in range(len(lst))]
for i in range(len(lst)):
for j in range(len(lst[i])):
if type(lst[i][j]) == int or type(lst[i][j]) == float:
outlst[i].append((lst[i][j]-means[j])/stds[j])
else:
outlst[i].append(lst[i][j])
return outlst
''' add number as final list'''
def addo(lst):
outlst = [[] for _ in range(len(lst[0]))]
for j in range(len(lst[0])):
for i in range(len(lst)+1):
if i == len(lst):
outlst[j].append(j)
else:
outlst[j].append(lst[i][j])
return outlst
''' calculate the D2 distance'''
def caldis(slst,rlst):
outlst = [[] for _ in range(len(rlst))]
for i in range(len(rlst)):
for j in range(len(slst)):
if i == len(rlst)-1:
#a = 0
outlst[-1].append(slst[j][-1])
elif type(rlst[i]) == str:
if rlst[i] == slst[j][i]:
outlst[i].append(0)
else:
outlst[i].append(1)
else:
outlst[i].append(rlst[i]-slst[j][i])
return outlst
''' calculate sum'''
def calsum(lst):
innerlst = [[],[]]
outlst = []
for i in range(len(lst[0])):
nsum = 0
for j in range(len(lst)-1):
nsum += lst[j][i]*lst[j][i]
innerlst[0].append(float(np.sqrt(nsum)))
innerlst[1].append(lst[-1][i])
outlst = addo(innerlst)
return outlst
'''voting strategy'''
def votf(k, lst):
ranklst = [[] for _ in range(len(lst[0]))]
sortlst = []
prelst = [[] for _ in range(len(lst))]
outlst = []
for i in range(len(lst[0])):
for j in range(len(lst)):
ranklst[i].append(lst[j][i])
sortlst = sorted(ranklst[0])
for n in range(len(sortlst)):
for m in range(len(sortlst)):
if n!=m:
if sortlst[n] == sortlst[m]:
print('have same sum')
if sortlst[n] == ranklst[0][m]:
prelst[n].append(ranklst[0][m])
prelst[n].append(ranklst[1][m])
prelst[n].append(ranklst[2][m])
if k == 1:
outlst.append(prelst[0][1])
elif k <= 0:
print('error')
else:
posno = 0
negno = 0
for i in range(k):
if prelst[i][1] == '+':
posno += 1
else:
negno += 1
if posno > negno:
outlst.append('+')
elif posno < negno:
outlst.append('-')
else:
outlst.append(prelst[0][1])
outlst = str(outlst[0])
return outlst
'''calculate the accuracy'''
def acur(rlst,slst):
if len(rlst) != len(slst):
print('Input error!')
outp = 0
totalnum = len(rlst)
right_value = 0
for i in range(len(rlst)):
if rlst[i] == slst[i][-1]:
right_value += 1
outp = (right_value/totalnum)*100
return outp
'''main function: k is the k-NN's k, and sourcename as well as file name are as their names'''
def kNNC(k,sourcename,filename):
# get source name
source_list = getlist(sourcename)
# read the data first
read_list = getlist(filename)
# get rid of '\n'
source_list = mdflt(source_list)
read_list = mdflt(read_list)
# generate output list
out_list = [[],[]]
# initialize lists
# source lists
n = len(source_list[0])
# source lists
sflsts = [[] for _ in range(n)]
nsflsts = []
# other sources
rmeans = []
smeans = []
rstds = []
sstds = []
numrl = []
numsl = []
results = []
# loaded lists
rflsts = [[] for _ in range(n)]
nrflsts = []
### make the data to be its transpose
# input loaded data into empty lists
for i in range(len(read_list)):
for j in range(n):
rflsts[j].append(read_list[i][j])
# input source data into empty lists
for i in range(len(source_list)):
for j in range(n):
sflsts[j].append(source_list[i][j])
# modify lists
rflsts = mknum(rflsts)
sflsts = mknum(sflsts)
numrl = mknum(read_list)
numsl = mknum(source_list)
# calculate means and stds
rmeans = findm(rflsts)
smeans = findm(sflsts)
rstds = findstd(rflsts)
sstds = findstd(sflsts)
# normalize the data
nrflsts = nmlz(rmeans, rstds, numrl)
nsflsts = nmlz(smeans, sstds, numsl)
# generate empty list for saving results
showlst = []
# kNN algorithm
for i in range(len(nrflsts)):
results = caldis(nsflsts, nrflsts[i])
#results = modfstr(strno, results)
results = calsum(results)
results = votf(k, results)
showlst.append(results)
# calculate the accuracy
accuracy = acur(showlst, read_list)
# store the data into list
out_list.append(showlst)
out_list.append(accuracy)
#####print out the results#####
print('According to kNN algorithm, the labels in testing dataset should be:')
for i in range(len(showlst)-1):
print(showlst[i],end = ', ')
print(showlst[-1])
print('The accuracy of kNN algorithm is:', end = ' ')
print(str(round(accuracy,4))+' %')
return out_list
# actions when running .py file
if __name__ == '__main__':
kNNC(20,'crx.training.processed','crx.testing.processed')
|
import numpy as np
import matplotlib.pyplot as plt
#function that we differentiate
def func(t, y):
x = y[0]
xdot = y[1]
return np.array([xdot, -x])
class Euler:
def __init__(self, func, x0, time_zero, time_end, step):
self.func = func #function
self.x0 = x0 #function zero place
self.time_zero = time_zero #start differentiate
self.time_end = time_end #stop differentiate
self.step = step
self.time = np.arange(self.time_zero, self.time_end, self.step) #list with time from time_zero to time_end with step
self.length_time = len(self.time)
self.x_matrix = np.zeros((self.length_time, len(self.x0))) #for a vector equation x_matrix = np.zeros(N)
self.x_matrix[0] = np.array(self.x0)
self.iter = 1
#method of start symulation
def start_symulation(self):
while(self.iter < self.length_time):
self.x_matrix[self.iter]= np.array(self.x_matrix[self.iter - 1] + self.step
* self.func(self.time[self.iter - 1],
self.x_matrix[self.iter - 1])) #equation Euler
self.iter += 1
return self.time, self.x_matrix #return our time and euler solution
def show_plots(self):
plt.subplot(211) #first plot with time to solution x and xdot
plt.plot(self.time, self.x_matrix[:,0])
plt.plot(self.time, self.x_matrix[:,1])
plt.subplot(212) #second plot with solution x to xdot
plt.plot(self.x_matrix[:,0], self.x_matrix[:,1])
return plt.show()
|
import os
import flask
from solr_feeder import config
from solr_feeder.endpoints import endpoint
__all__ = ['create_application']
# Create the Flask application
def create_application(run_mode=os.getenv('FLASK_ENV', 'production')):
# Create application
application = flask.Flask(__name__)
application.config.from_object(config.CONFIGURATION[run_mode])
endpoint.init_app(application)
return application
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.