text stringlengths 8 6.05M |
|---|
import json
from datetime import date, datetime
from pathlib import Path
from typing import List
import attr
import cattr
converter = cattr.Converter()
@attr.s(auto_attribs=True, frozen=True)
class Row:
date: str
areaCode: str
areaName: str
newCasesBySpecimenDateRollingRate: float
@attr.s(auto_attribs=True, frozen=True)
class Dataset:
length: int
body: List[Row]
def read_dataset(filename: Path) -> Dataset:
"""
Read the dataset from the filesystem.
"""
with filename.open() as f:
data = json.load(f)
return converter.structure(data, Dataset)
converter.register_structure_hook(
date, lambda value, _: datetime.strptime(value, "%Y-%m-%d").date()
)
|
#!/usr/bin/env python
import rospy
from left import Left
from right import Right
#from start import Start
from enum import IntEnum
from ar_track_alvar_msgs.msg import AlvarMarkers
from ar_switch import Ar_Find
import math
STATES = {-1:'MidWall',1:'LeftWall',4:'RightWall',2:"Start",3:"}
def ar_state(id):
return STATES[id]
def main():
left=Left()
right=Right()
#start=Start()
ar=Ar_Find()
rospy.init_node("State_Machine")
rate=rospy.Rate(10)
current_state="RightWall"
rospy.loginfo("Buscando")
while not rospy.is_shutdown():
#if not starlight:
#CURRENTstate
current_state="RightWall"
try:
current_state = ar_state(my_ar_find.id)
except:
pass
if current_state == "RightWall":
print"Right"
right.rightw()
ar.scan()
elif current_state =="leftWall":
print "Left"
left.leftw()
start_light.look()
ar.scan()
rate.sleep()
main()
|
#oef6
a = int(input("Give a number: "))
b = int(input("Give another number: "))
result = (a+b)*(a+b)
print("({} + {}) ^ 2 = {}".format(a, b, result)) |
import pandas as pd
df = pd.read_csv('nyc_weather_report.csv')
print(df)
print(df['Temperature'].max()) #max in temp column
print(df['EST'][df['Events']=='Rain']) #dates on which event was rain
df.fillna(0, inplace = True)
#replaces the blank spaces with 0 or called data wrangling
print(df['WindSpeedMPH'].mean())
|
#TODO
'''
Map plot
'''
from .taylor import TaylorDiagram
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
from mpl_toolkits.basemap import Basemap
from .io import Raster
def layout(src, *arg, **kargs):
'''
Inputs:
-----------------------
:src - geoPackage.io.Raster object
:kargs - {'cmap': plt.cm.colorbar object,
'location': location of colorbar right/bottom,
'remove_neg': bool; don't display negative values
'cb_label': label of colorbar,
'extent': base map extent 'global'/'local',
'save': save path}
Returns:
-----------------------
:map - Basemap object
'''
cmap= kargs.get('cmap', cm.rainbow)
loc= kargs.get('location', 'bottom')
cb_label= kargs.get('cb_label', '')
extent= kargs.get('extent', 'global')
dst= kargs.get('save', None)
remove= kargs.get('remove_neg', True)
figkargs= {key: kargs[key] for key in kargs.keys() if key in ['figsize', 'dpi']}
meshplotkargs= {key: kargs[key] for key in kargs.keys() if key in ['vmin', 'vmax']}
if not isinstance(src, Raster):
raise ValueError('expected geo.Package.io.Raster object, but get %s'%type(src))
ulx, xres, _, uly, _, yres = src.geotransform
m,n= src.array.shape
xmin= ulx+ 0.5*xres
ymin= uly+ 0.5*yres
xmax = xmin + ((src.layer.RasterXSize-0.5) * xres)
ymax = ymin + ((src.layer.RasterYSize-0.5) * yres)
lons= np.linspace(xmin, xmax, n)
lats= np.linspace(ymin, ymax, m)
x,y = np.meshgrid(lons, lats)
data= src.array
if remove:
data[data<0]= np.nan
# print(xmin, xmax, ymin, ymax)
if extent=='global':
map_extent= (-180, 180, -90, 90)
else:
map_extent= (xmin, xmax, ymin, ymax)
fig= plt.figure(**figkargs)
m = Basemap(projection='cyl', resolution='l',
llcrnrlat=map_extent[3], urcrnrlat=map_extent[2],
llcrnrlon=map_extent[0], urcrnrlon=map_extent[1])
m.drawcoastlines(linewidth=0.5)
m.drawparallels(np.arange(-90, 91, 45), labels=[True,False,False,True], dashes=[10,10], linewidth=.5)
m.drawmeridians(np.arange(-180, 180, 45), labels=[True,False,False,True], dashes=[10,10], linewidth=.5)
# cmap= plt.get_cmap('rainbow') if cmap is None else plt.get_cmap(cmap)
x,y = m(x,y)
map = m.pcolormesh(x,y, data, cmap=cmap, **meshplotkargs)
cb = m.colorbar(location=loc, pad='10%')
cb.set_label(cb_label)
if dst is not None:
fig.savefig(dst)
return map
def taylorPlot(*args, **kargs):
'''
Inputs:
------------------------
:args
data=arg[0]
example
[
[std, cc, name]
]
:kargs
refstd=1.0, fig=None, rect=111, label='_', srange=(0, 1.5), extend=False
'''
data= args[0]
markers= args[1]
colors= args[2]
dst= kargs.get('save', None)
diakargs= {key: kargs[key] for key in kargs.keys() if key in ['refstd', 'fig', 'rect', 'label', 'srange', 'extend']}
figkargs= {key: kargs[key] for key in kargs.keys() if key in ['figsize', 'dpi']}
dia= TaylorDiagram(**diakargs)
fig = plt.figure(**figkargs)
for i, (stddev, corrcoef, name) in enumerate(data):
dia.add_sample(stddev, corrcoef,
marker=markers[i], ms=10, ls='',
mfc=colors[i],mec=colors[i],
# mfc='k', mec='k',
label=str(name))
contours = dia.add_contours(levels=5, colors='0.5')
plt.clabel(contours, inline=1, fontsize=10, fmt='%.2f')
dia.add_grid() # Add grid
dia._ax.axis[:].major_ticks.set_tick_out(True) # Put ticks outward
# Add a figure legend and title
fig.legend(dia.samplePoints,
[ p.get_label() for p in dia.samplePoints ],
numpoints=1, prop=dict(size='large'), loc='upper right')
cbar= fig.add_axes((0.5,0.9,.4,.01))
cb = plt.colorbar(orientation='horizontal', mappable= plt.matplotlib.cm.ScalarMappable(cmap=plt.matplotlib.cm.rainbow), cax=cbar, fraction=0.70, shrink=0.7,ticks=[0,1,2,3,4])
cb.set_label('Temperature')
cb.ax.set_xticks(range(5))
cb.ax.set_xticklabels(['cold','hot'])
# fig.suptitle("Taylor diagram", size='x-large') # Figure title
if dst is not None:
fig.savefig(dst, dpi=144)
return dia
|
#!/usr/bin/python
#\file follow_q_traj3.py
#\brief Following joint angle trajectory
# where target velocity is automatically decided with spline.
#\author Akihiko Yamaguchi, info@akihikoy.net
#\version 0.1
#\date Nov.22, 2017
import roslib; roslib.load_manifest('motoman_driver')
import rospy
import sensor_msgs.msg
import trajectory_msgs.msg
import copy
from cubic_hermite_spline import TCubicHermiteSpline
from follow_q_traj2 import ToROSTrajectory
'''Convert joint angle trajectory to joint velocity trajectory.'''
def QTrajToDQTraj(q_traj, t_traj):
dof= len(q_traj[0])
#Modeling the trajectory with spline.
splines= [TCubicHermiteSpline() for d in range(dof)]
for d in range(len(splines)):
data_d= [[t,q[d]] for q,t in zip(q_traj,t_traj)]
splines[d].Initialize(data_d, tan_method=splines[d].CARDINAL, c=0.0, m=0.0)
#NOTE: We don't have to make spline models as we just want velocities at key points.
# They can be obtained by computing tan_method, which will be more efficient.
dq_traj= []
for t in t_traj:
dq= [splines[d].Evaluate(t,with_tan=True)[1] for d in range(dof)]
dq_traj.append(dq)
return dq_traj
#Wait for subscribers (src: motoman_driver/move_to_joint.py)
def WaitForSubscribers(pub, timeout, num_subs=1):
time_end= rospy.Time.now()+rospy.Duration(timeout)
rate= rospy.Rate(10)
while all((pub.get_num_connections()<num_subs, rospy.Time.now()<time_end, not rospy.is_shutdown())):
rate.sleep()
return (pub.get_num_connections()>=num_subs)
if __name__=='__main__':
rospy.init_node('motoman_test')
pub_traj= rospy.Publisher('/joint_path_command', trajectory_msgs.msg.JointTrajectory, queue_size=1)
if not WaitForSubscribers(pub_traj, 3.0):
print 'WARNING: No subscribers of /joint_path_command'
joint_names= ['joint_'+jkey for jkey in ('s','l','e','u','r','b','t')]
joint_names= rospy.get_param('controller_joint_names')
t_traj= [4.0, 6.0, 8.0, 12.0]
q_traj= [[0.0]*7,
[0.1, -0.3, 0.15, -0.7, 0.1, -0.3, 0.15],
[0.21, -0.59, 0.30, -1.46, 0.35, -0.68, 0.31],
[0.0]*7]
dq_traj= QTrajToDQTraj(q_traj, t_traj)
print dq_traj
traj= ToROSTrajectory(joint_names, q_traj, t_traj, dq_traj)
pub_traj.publish(traj)
rospy.signal_shutdown('Done.')
|
from picamera import PiCamera
from time import sleep
from datetime import datetime
class Cameratest:
def __init__(self):
self.camera = PiCamera()
self.resolution()
def __del__(self):
self.camera.close()
def preview(self):
self.camera.start_preview(fullscreen=False, window=(2,170,320,240))
#sleep(preview_time)
#self.camera.stop_preview()
def stoppreview(self):
self.camera.stop_preview()
def capture(self, filename):
self.camera.stop_preview()
# save 'filename.jpg' in current directory
self.camera.capture(str(filename) + '.jpg')
# def record(self, filename, time=20):
# # save 'filename.h264' in current directory
# #self.camera.start_preview()
# self.camera.start_recording(str(filename) + '.h264')
# self.camera.wait_recording(time)
# #self.camera.stop_preview()
# self.camera.stop_recording()
def recordStart(self, filename):
self.camera.start_recording(str(filename) + '.h264')
def recordStop(self):
self.camera.stop_recording()
def rotate(self, degree=0):
# Rotation degree: 0, 90, 180, 270
self.camera.rotate = degree
def resolution(self, width=240, height=180):
self.camera.resolution = (width, height)
|
# Heuristic Cost Function *****************************************************
def heuFunc1(root,v,h):
if root == None:
return 0
val = 0
x = root.leaf
if x == v:
val+=((h-1))
val+=0
else:
val+=0
for i in range(len(root.children)):
val += heuFunc1(root.children[i],v,h+1)
return val
def heuFunc2(root,v,h):
if root == None:
return 0
val = 0
x = root.leaf
if x=='diff' or x=='integrate':
val+=((h-1))+10
elif x=='sin' or x=='cos':
val+=2
elif x == v:
val+=((h-1))
val+=1
else:
val+=2
for i in range(len(root.children)):
val += heuFunc2(root.children[i],v,h+1)
return val
|
#! /usr/bin/env python
import droneinfo.infonode
if __name__ == '__main__':
droneinfo.infonode.main() |
numbers = [2, 5, 1, 3, 8, 7, 10, 9, 4, 6]
print(numbers)
print(len(numbers))
print(numbers[1])
print(numbers.index(5))
numbers.sort()
print(numbers)
numbers.reverse()
print(numbers)
numbers.remove(5)
print(numbers)
numbers.append(5)
print(numbers)
numbers.pop()
print(numbers)
numbers.insert(1, 0)
print(numbers)
numbers.extend(['a','b'])
print(numbers)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ˅
from structural_patterns.bridge.display_impl import DisplayImpl
# ˄
class TextDisplayImpl(DisplayImpl):
# ˅
# ˄
def __init__(self, text):
# A string to display
self.__text = text
# A number of characters in bytes
self.__width = len(text)
# ˅
pass
# ˄
def impl_open(self):
# ˅
self.__print_line()
# ˄
def impl_write(self):
# ˅
print(f':{self.__text}:') # Enclose a text with ":" and display it.
# ˄
def impl_close(self):
# ˅
self.__print_line()
# ˄
def __print_line(self):
# ˅
print('*', end='') # Display "*" mark at the beginning of a frame.
for _ in range(self.__width): # Display "." for the number of "width".
print('.', end='')
print('*') # Display "*" mark at the end of a frame.
# ˄
# ˅
# ˄
# ˅
# ˄
|
import random
#author name as key , and index as value
def read_authors_dict():
infile = open("data/authors.txt","r")
infile.readline()
authors_dict = {}
for line in infile:
fields = line.strip().split('|')
if fields[1] not in authors_dict:
authors_dict[fields[1]] = fields[0]
infile.close()
return authors_dict
#interest name as key, and index as value
def read_interests_dict():
interests_dict = {}
infile = open("data/interests_dict.txt","r")
for line in infile:
fields = line.strip().split('|')
if fields[1] not in interests_dict:
interests_dict[fields[1]] = fields[0]
infile.close()
return interests_dict
"""
#return labeled authors dict
# index as key and author name as value
def read_labeled_authors_info(authors_dict):
infile = open("data/labeled_authors_info.txt","r")
labeled_authors_dict = {}
for line in infile:
fields = line.strip().split("|")
if fields[0] not in authors_dict:
print fields[0]
raise SyntaxError
else:
if fields[0] not in labeled_authors_dict:
labeled_authors_dict[authors_dict[fields[0]]] = fields[0]
print labeled_authors_dict
infile.close()
return labeled_authors_dict
"""
#read author dict
def build_author_interest_dict(interests_dict,authors_dict):
infile = open("data/labeled_authors_info.txt","r")
flag = 0 # check the position of scanning cursor
length = len(interests_dict)
author_interest_dict = {}
for line in infile:
interests_list = [] #for each author create a interests_list
fields = line.strip().split("|")
for field in fields:
if "Citations" in field:
flag = 1
else:
if flag == 1:
if field != '':
if field in interests_dict:
index = int(interests_dict[field])
interests_list.append(index)
else:
raise SyntaxError
else:
#interests_dict[field][1] += 1
flag = 0
if fields[0] not in author_interest_dict:
index = authors_dict[fields[0]]
author_interest_dict[index] = interests_list
print len(author_interest_dict)
return author_interest_dict
#read latentRep for each labeled author, and extent it with interest list
def generate_M3L_data(author_interest_dict):
infile = open("data/2014/latentRep.txt","r")
infile.readline()
outfile = open("data/2014/m3l/M3L.dat","w")
for line in infile:
fields = line.strip().split(" ")
if fields[0] in author_interest_dict:
interest_list = author_interest_dict[fields[0]]
for index in interest_list[:-1]: #iterate interest list to write index
outfile.write(str(index)+',')
outfile.write(str(interest_list[-1]) + ' ')
length = len(fields)
for i in range(1,length):
outfile.write(str(i)+':'+fields[i]+' ')
outfile.write('\n')
outfile.close()
#random sample some traning data, and rest as testing data
def random_sample():
size = 200
random_index_list = random.sample(range(2065), size)
infile = open("data/2014/m3l/M3L.dat","r")
outfile = open("data/2014/m3l/train.txt","w")
outfile2 = open("data/2014/m3l/test.txt","w")
matrix = []
index = 0
for line in infile:
if index in random_index_list:
outfile.write(line)
else:
outfile2.write(line)
index += 1
infile.close()
outfile.close()
outfile2.close()
if __name__ == '__main__':
authors_dict = read_authors_dict()
interests_dict = read_interests_dict()
author_interest_dict = build_author_interest_dict(interests_dict,authors_dict)
generate_M3L_data(author_interest_dict)
random_sample()
|
p = int(input())
for i in range(p):
a = input()
b = input()
count = 0
for i in a:
if i in b:
count+=1
print('YES' if count>0 else 'NO')
|
import urllib2
from bs4 import BeautifulSoup
CAPRI_HOME = 'http://www.ebi.ac.uk/msd-srv/capri/capri.html'
if __name__ == "__main__":
req = urllib2.Request(CAPRI_HOME)
response = urllib2.urlopen(req)
home_page = response.read()
soup = BeautifulSoup(home_page, 'html.parser')
rounds = soup.select("a[href*=round]")
for link in rounds:
print link.get('href')
|
def about(name, age, city):
return '{}, {} год(а), проживает в городе {}'.format(name,age,city)
name = input('Введите имя: ')
age = input('Введите возраст: ')
city = input('Введите город: ')
print(about(name,age,city)) |
# coding:utf-8
def bubble_sort(item):
"""冒泡排序 正向"""
for i in range(0, len(item)-1):
for j in range(i+1, len(item)):
if item[i] > item[j]:
item[i], item[j] = item[j], item[i]
return item
def bubble_sort2(item):
"""冒泡排序 倒序"""
for i in range(0, len(item)-1):
for j in range(0, len(item)-i-1):
if item[j+1] < item[j]:
item[j+1], item[j] = item[j], item[j+1]
return item
def select_sort(item):
"""选择排序"""
for i in range(0, len(item)):
min_pos = i
for j in range(i+1, len(item)):
if item[min_pos] > item[j]:
min_pos = j
if min_pos != i:
item[min_pos], item[i] = item[i], item[min_pos]
return item
def insert_sort(item):
"""插入排序"""
for i in range(1, len(item)):
for j in range(i, 0, -1):
if item[j] < item[j-1]:
item[j], item[j-1] = item[j-1], item[j]
else:
break
return item
def quick_sort(item, first, last):
"""快速排序"""
if first >= last:
return
mid_vlue = item[first]
low = first
hight = last
while low < hight:
while item[hight] > mid_vlue and low < hight:
hight -= 1
item[low] = item[hight]
while item[low] < mid_vlue and low < hight:
low += 1
item[hight] = item[low]
# 结束时 low的值与hight的值相等
item[low] = mid_vlue
quick_sort(item, first, low)
quick_sort(item, low+1, last)
return item
def shell_sort(item):
"""希尔排序"""
gaps = [5, 3, 1]
for gap in gaps:
for i in range(gap, len(item)):
j = i
while j > 0:
if item[j] < item[j-gap]:
item[j], item[j-gap] = item[j-gap], item[j]
j -= gap
else:
break
return item
def meiga_sort(item):
"""归并排序"""
if len(item) <= 1:
return item
mid = len(item) // 2
left_li = meiga_sort(item[0: mid])
right_li = meiga_sort(item[mid:])
left_pos, right_pos = 0, 0
result = []
while left_pos < len(left_li) and right_pos < len(right_li):
if left_li[left_pos] < right_li[right_pos]:
result.append(left_li[left_pos])
left_pos += 1
else:
result.append(right_li[right_pos])
right_pos += 1
# [left:] 与 [right:] 是将左右两部分未添加的部分添加进结果集
result += (left_li[left_pos:] + right_li[right_pos:])
return result
def main():
item = [4, 7, 2, 1, 5, 3, 8, 6]
print(bubble_sort(item))
print(bubble_sort2(item))
print(select_sort(item))
print(insert_sort(item))
print(quick_sort(item, 0, len(item)-1))
print(shell_sort(item))
print(meiga_sort(item))
if __name__ == '__main__':
main()
|
from typing import Dict, List, Callable, Union, Set
from overrides import overrides
from antu.io.vocabulary import Vocabulary
from antu.io.instance import Instance
from antu.io.datasets.dataset import Dataset
from antu.io.dataset_readers.dataset_reader import DatasetReader
from antu.utils.padding_function import shadow_padding
import random
from itertools import cycle
class DatasetSetting:
def __init__(self, file_path: str, is_train: bool):
self.file_path = file_path
self.is_train = is_train
class SingleTaskDataset:
def __init__(
self,
vocabulary: Vocabulary,
datasets_settings: Dict[str, DatasetSetting],
reader: DatasetReader):
self.vocabulary = vocabulary
self.datasets_settings = datasets_settings
self.datasets = dict()
self.reader = reader
def build_dataset(
self,
counters: Dict[str, Dict[str, int]],
min_count: Union[int, Dict[str, int]] = dict(),
no_pad_namespace: Set[str] = set(),
no_unk_namespace: Set[str] = set()) -> None:
for name, setting in self.datasets_settings.items():
self.datasets[name] = self.reader.read(setting.file_path)
if setting.is_train:
for ins in self.datasets[name]:
ins.count_vocab_items(counters)
self.vocabulary.extend_from_counter(
counters, min_count, no_pad_namespace, no_unk_namespace)
for name in self.datasets:
for ins in self.datasets[name]:
ins.index_fields(self.vocabulary)
def get_dataset(self, name: str) -> List[Instance]:
return self.datasets[name]
def get_batches(
self,
name: str,
size: int,
ordered: bool=False,
cmp: Callable[[Instance, Instance], int]=None,
is_infinite: bool=False) -> List[List[int]]:
#print(self.datasets[name])
if ordered: self.datasets[name].sort(key=cmp)
num = len(self.datasets[name]) # Number of Instances
result = []
for beg in range(0, num, size):
ins_batch = self.datasets[name][beg: beg+size]
idx_batch = [ins.index_fields(self.vocabulary) for ins in ins_batch]
indexes, masks = shadow_padding(idx_batch, self.vocabulary)
yield indexes, masks
result.append((indexes, masks))
while is_infinite:
random.shuffle(result)
for indexes, masks in result:
yield indexes, masks
# def build_batches(self, )
|
from django.urls import path
from . import views
urlpatterns = [
path('',views.page1,name="page1"),
path('page2',views.page2,name="page2"),
path('page3',views.page3,name="page3"),
path('page4',views.page4,name="page4"),
path('estanteria1',views.estanteria1,name="estanteria1"),
path('estanteria2',views.estanteria2,name="estanteria2"),
path('estanteria3',views.estanteria3,name="estanteria3"),
path('estanteria4',views.estanteria4,name="estanteria4"),
path('oficina1',views.oficina1,name="oficina1"),
path('oficina2',views.oficina2,name="oficina2"),
path('oficina3',views.oficina3,name="oficina3"),
path('oficina4',views.oficina4,name="oficina4"),
]
|
"""
import sys
n=int(input())
stack=[]
result=[]
array=[]
for i in range(n):
num=int(sys.stdin.readline())
for j in range(1,n+2):
if len(stack)==0:
stack.append(j)
result.append('+')
print (" ",j,result)
continue
if stack[-1]>num :
stack.pop()
result.append('-')
elif stack[-1]==num:
temp=stack.pop()
result.append('-')
array.append(temp)
print (" ",j,result)
break
else:
if j in array:
continue
else:
stack.append(j)
result.append('+')
print (" ",j,result)
print (result)
print(array)
"""
import sys
n=int(input())
s=[]
op=[]
count=1
temp=True
for i in range(n):
num=int(sys.stdin.readline())
while count<=num:
s.append(count)
op.append('+')
count+=1
if s[-1]==num:
s.pop()
op.append('-')
else:
temp=False
if temp==False:
print('NO')
else:
for i in op:
print(i) |
def authen(username,password):
if username=="coachcarl1000" and password=="pikachuPika":
return True
else:
return False
|
import pytube
import converter_moviepy
import os
# remove from title illegal characters found
def clean_title(title):
illegal_characters = []
illegal_characters.append('"')
illegal_characters.append('|')
for character in illegal_characters:
title = title.replace(character, "")
print("New title:" + title)
return title
# check the resolutions available for the video
def check_available_resolutions(video):
available_res = []
print("The available resolutions for this video are:")
for resolution in ["144p", "240p", "360p", "480p", "720p", "1080p"]:
if len(video.streams.filter(resolution=resolution, mime_type="video/mp4")) > 0:
available_res.append(resolution)
print(resolution)
return available_res
# download the mp4 video and audio and combine them together
def download_mp4(url, path):
video = pytube.YouTube(url)
title = video.title
title = clean_title(title)
available_res = check_available_resolutions(video)
resolution = input("Choose one of the above resolutions:")
if resolution in available_res:
print(f'Downloading in {resolution}...')
# get the proper video stream
video_stream = video.streams.filter(resolution=resolution, mime_type="video/mp4").first()
# get the proper audio stream
audio_stream = video.streams.get_audio_only()
# download video
video_path = video_stream.download(filename=f'{title}_video')
# download audio
audio_path = audio_stream.download(filename=f'{title}_audio')
# call the function to combine the downloaded video and audio
converter_moviepy.combine_video_and_audio(video_path, audio_path, title, path)
print("Deleting video and audio...")
# delete downloaded video and audio parts
os.remove(video_path)
os.remove(audio_path)
else:
print("The inserted resolution is not valid. Closing.")
# download the mp4 video and convert it to mp3
def download_mp3(url, path):
video = pytube.YouTube(url)
title = video.title
title = clean_title(title)
print("Downloading audio...")
# download mp4 file
audio_path = video.streams.get_audio_only().download()
# call the function to convert it
converter_moviepy.convert_to_mp3(audio_path, title, path)
# delete mp4 file
os.remove(audio_path)
|
# Project Euler Problem 6
#Find the difference between the sum of the squares of the first one hundred natural numbers and the square of the sum.
a=0
b=0
c=0
d=0
for a in range(1,101):
b += a #generate the sum
c += a*a #b is the sum of the squares
b=b*b
d=b-c #Difference between the sum of squares and square of sums
print(d)
#Correct
|
import cv2
import numpy as np
import time
# omni-lens with android_ros app param
Cx = 695
Cy = 350
SR = 120
LR = 230
imagelist = ['imageLists.txt', 'imageLists_step_2.txt', 'imageLists_step_5.txt', 'imageLists_step_10.txt']
# for loading raw datasets and panorama it to a general images
class MyDataloader:
def __init__(self, root):
self.path = root
self.raw_images = []
self.unwrap_images = []
# reading lines for extract images
file = self.path + '/' + imagelist[0]
with open(file, 'r') as f:
for line in f.readlines():
image_path = self.path + '/' + line.strip('\n')
# print(image_path)
img_temp = cv2.imread(image_path, 0)
self.raw_images.append(img_temp)
# unwrape image parameters
self.map_x, self.map_y = None, None
def set_unwrap_param(self, img):
Hd = int(LR - SR)
Wd = int(np.pi * (SR + LR) + 1)
dst = np.zeros([Hd, Wd])
map_x = np.zeros(dst.shape, np.float32)
map_y = np.zeros(dst.shape, np.float32)
start_time = time.time()
for j in range(dst.shape[0]):
for i in range(dst.shape[1]):
r= j / Hd *(LR - SR) + SR
theta = i / Wd * 2 * np.pi
Xs = Cx + r * np.sin(theta)
Ys = Cy + r * np.cos(theta)
map_x[j, i] = Xs
map_y[j, i] = Ys
print("--- %s seconds ---" % (time.time() - start_time))
self.map_x, self.map_y = map_x, map_y
def unwrap_dataset(self):
self.set_unwrap_param(self.raw_images[0])
for i in range(len(self.raw_images)):
# img_temp = self.unwrap_image(self.raw_images[i])
img_temp = cv2.remap(self.raw_images[i], self.map_x, self.map_y, cv2.INTER_LINEAR)
self.unwrap_images.append(img_temp)
|
from mongo_connection import MongoConnection
#############################################
class User():
def __init__(self, username, password="", first_name="", last_name=""):
self.username = username
self.password = password
self.first_name = first_name
self.last_name = last_name
self.full_name = first_name + " " + last_name
self.conn = MongoConnection()
self.db = self.conn.connect_users_db()
self.col = self.db['users_data']
#########################################
def authenticate(self):
col = self.col.find_one({
'username': self.username,
'password': self.password
})
return True if col else False
#########################################
def create_user(self):
user = self.col.find_one({
'username': self.username
})
if user:
return 'Username already registred'
self.col.insert({
'username': self.username,
'password': self.password,
'full_name': self.full_name,
'first_name': self.first_name,
'last_name': self.last_name
})
#########################################
def get_user(self):
user = self.col.find_one({
'username': self.username
}, {'_id': 0, 'password': 0})
return user
#########################################
def add_to_history(self, time, article_id):
collection = self.db['user_history']
collection.update(
{'username': self.username, 'doc_id': article_id},
{'$set': {'time': time}},
upsert=True
)
#########################################
def get_user_history(self):
collection = self.db['user_history']
history = collection.find({
'username': self.username
}, {'_id': 0})
return list(history)
############################################# |
from marshmallow import (
fields,
Schema,
validate,
validates_schema,
ValidationError
)
from api.validators import email_not_existing, password_validate
class NewUserSchema(Schema):
name = fields.Str(required=True)
surname = fields.Str(required=True)
email = fields.Email(required=True, validate=email_not_existing)
password = fields.Str(validate=password_validate, required=True)
phone = fields.Str(required=True)
role = fields.Str(missing='user', validate=validate.OneOf(choices=['user', 'delivery']))
subscription = fields.Str(missing='flat', validate=validate.OneOf(choices=['flat', 'premium']))
photo_url = fields.Url(required=False)
@validates_schema
def validate_photo(self, data, **kwargs):
if data['role'] == 'delivery' and not data.get('photo_url'):
raise ValidationError("Delivery must upload photo", field_name='photo_url')
|
import maya.cmds as cmds
'''
Goes through selected items and checks if translates, rotates, and scales are
identity, changing them to identity if they are not identity and not locked.
Created by Derek Ho
'''
def CheckItem(a , b):
#if not identity, and not locked, add into a list
#check if string contains scale
#if scale
if(a.find("scale") != -1):
#print "S detected"
if (cmds.getAttr(a)!=1.0) and (cmds.getAttr(a,lock=True)==False):
b = b + [a]
#if not, it's translate or rotate
#if translate or rotate
else:
#print "TR detected"
if(cmds.getAttr(a)!=0.0) and (cmds.getAttr(a,lock=True)==False):
b = b + [a]
return b
def SetItem(a):
#if scale
if(a.find("scale") != -1):
#print "Setting S"
cmds.setAttr(a, 1.0)
#if translate/rotate
else:
cmds.setAttr(a, 0.0)
#print "Setting TR"
def IdentityChecker():
#selected
selected = cmds.ls(selection=True)
#attribute array
attrArr = []
#things to change
toChange = []
for i in range(0,len(selected)):
#for every item in selected
#go through attributes
attrArr = attrArr+[selected[i]+'.translateX']
attrArr = attrArr+[selected[i]+'.translateY']
attrArr = attrArr+[selected[i]+'.translateZ']
attrArr = attrArr+[selected[i]+'.rotateX']
attrArr = attrArr+[selected[i]+'.rotateY']
attrArr = attrArr+[selected[i]+'.rotateZ']
attrArr = attrArr+[selected[i]+'.scaleX']
attrArr = attrArr+[selected[i]+'.scaleY']
attrArr = attrArr+[selected[i]+'.scaleZ']
#check if identity
for i in range(0,len(attrArr)):
toChange = CheckItem(attrArr[i],toChange)
#print "toChange = ", toChange
#for every item in the !identity list
#make identity
for i in range(0,len(toChange)):
SetItem(toChange[i])
IdentityChecker()
|
#!/usr/bin/python
#-*- coding: utf-8 -*-
"""
This script creates a file containing all paths of the dataset.
"""
import logging
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
import argparse
import utils
if __name__ == "__main__":
argparser = argparse.ArgumentParser()
argparser.add_argument('inputfolder', metavar='input_folder', help='Path to the folder containing images of the dataset.')
argparser.add_argument('-o', '--output', help='Path to the file where paths are saved.', default=None)
args = argparser.parse_args()
utils.create_pathfile(args.inputfolder, args.output)
|
from django.db import models
# Create your models here.
class Product(models.Model):
name = models.CharField(max_length=30)
address = models.TextField()
price = models.IntegerField()
def __str__(self):
return self.name
class Shops(models.Model):
name = models.CharField(max_length=30)
address = models.TextField()
product = models.ForeignKey(Product, on_delete=models.CASCADE)
total = models.IntegerField(default=0)
def __str__(self):
return self.name
|
"""
Models for Orders Service(adapted from Professor Rofrano's demo code)
All of the models are stored in this module
Models
------
Order - An order model used in the Online Shopping System
Attributes:
-----------
"""
import logging
import json
from flask_sqlalchemy import SQLAlchemy
from datetime import datetime
# Create the SQLAlchemy object to be initialized later in init_db()
db = SQLAlchemy()
class DataValidationError(Exception):
""" Used for an data validation errors when deserializing """
pass
class Order(db.Model):
"""
Warning: since decimal is not supported by SQLite, we'll
use float instead of Numeric as a workaround
"""
logger = logging.getLogger(__name__)
app = None
# Table Schema
id = db.Column(db.Integer, primary_key=True)
cust_id = db.Column(db.Integer)
created_on = db.Column(db.DateTime, server_default=db.func.now())
updated_on = db.Column(db.DateTime, server_default=db.func.now(), server_onupdate=db.func.now())
items = db.relationship('OrderItem', backref='order', lazy='dynamic', passive_deletes=True)
def __repr__(self):
# return '<Order %r>' % (self.name)
return str(self.serialize())
def save(self):
if not self.id:
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
def serialize(self):
"""
Warning: since decimal is not supported by SQLite, we'll
use float as a workaround
"""
return {
"id": self.id,
"cust_id": self.cust_id,
"created_on": self.created_on,
"items": [item.serialize() for item in self.items]
}
def deserialize(self, data):
if not isinstance(data, dict):
return "Invalid order: body of request contained bad or no data", False
try:
self.cust_id = data['cust_id']
for item in data['items']:
self.items.append(OrderItem(prod_id=item['prod_id'],
prod_name = item['prod_name'],
prod_qty = item['prod_qty'],
prod_price = float(item['prod_price']),
status = item['status']))
except KeyError as error:
return "Invalid order: missing " + error.args[0], False
return self, True
@staticmethod
def init_db(app):
Order.logger.info('Initializing database')
Order.app = app
db.init_app(app)
# This context is only used by nosetests so we
# move it to test code
# app.app_context().push()
db.create_all()
@staticmethod
def all():
Order.logger.info('Processing all Orders')
return Order.query.all()
@staticmethod
def find(order_id):
Order.logger.info('Processing lookup for id %s ...', order_id)
return Order.query.get(order_id)
@staticmethod
def find_or_404(order_id):
Order.logger.info('Processing lookup or 404 for id %s ...', order_id)
return Order.query.get_or_404(order_id)
@staticmethod
def find_by_name(name):
Order.logger.info('Processing name query for %s ...', name)
return Order.query.filter(OrderItem.prod_name == name)
@staticmethod
def find_by_order_item_id(id):
Order.logger.info('Processing name query for %s ...', id)
return OrderItem.query.get(id)
@staticmethod
def find_by_cust_id(cust_id):
Order.logger.info('Processing customer id query for %s ...', cust_id)
return Order.query.filter(Order.cust_id == cust_id)
@staticmethod
def remove_all():
""" Remove all orders from the database """
rows_deleted = Order.query.delete()
# db.session.commit()
Order.logger.info("Deleted %d rows", rows_deleted)
class DateTimeEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, datetime):
return o.isoformat()
return json.JSONEncoder.default(self, o)
class OrderItem(db.Model):
id = db.Column(db.Integer, primary_key=True)
order_id = db.Column(db.Integer, db.ForeignKey('order.id', ondelete='CASCADE'))
prod_id = db.Column(db.Integer)
prod_name = db.Column(db.String(63))
prod_qty = db.Column(db.Integer)
prod_price = db.Column(db.Float)
status = db.Column(db.String(63))
created_on = db.Column(db.DateTime, server_default=db.func.now())
updated_on = db.Column(db.DateTime, server_default=db.func.now(), server_onupdate=db.func.now())
def serialize(self):
""" Serializes an OrderItem into a dictionary """
return {
"id": self.id,
"order_id": self.order_id,
"prod_id": self.prod_id,
"prod_name": self.prod_name,
"prod_qty": self.prod_qty,
"prod_price": self.prod_price,
"status": self.status,
"created_on": json.dumps(self.created_on, cls=DateTimeEncoder),
"updated_on": json.dumps(self.updated_on, cls=DateTimeEncoder)
}
|
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from .models import Profile, Reservation
ROLES = (
('O', 'Owner'),
('R', 'Renter')
)
class SignUpForm(UserCreationForm):
role = forms.ChoiceField(choices=ROLES, help_text='Required. select either Owner/Renter')
class Meta:
model = User
fields = ('username', 'password1', 'password2', 'email', 'role',)
class UserForm(forms.ModelForm):
class Meta:
model = User
fields = ('username','email')
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ('role', 'fund')
class AdjustFundForm(forms.ModelForm):
class Meta:
model = Profile
fields = ('fund',)
class ReservationForm(forms.ModelForm):
class Meta:
model = Reservation
fields = ['date', 'time'] |
from rv.chunks import DrawnWaveformChunk
from rv.modules import Behavior as B
from rv.modules import Module
from rv.modules.base.generator import BaseGenerator
class Generator(BaseGenerator, Module):
chnk = 1
behaviors = {B.receives_notes, B.receives_modulator, B.sends_audio}
class DrawnWaveform(DrawnWaveformChunk):
chnm = 0
def __init__(self, **kwargs):
samples = kwargs.pop("samples", None)
super(Generator, self).__init__(**kwargs)
self.drawn_waveform = self.DrawnWaveform()
if samples is not None:
self.drawn_waveform.samples = samples
def specialized_iff_chunks(self):
yield from self.drawn_waveform.chunks()
yield from super(Generator, self).specialized_iff_chunks()
def load_chunk(self, chunk):
if chunk.chnm == 0:
self.load_drawn_waveform(chunk)
def load_drawn_waveform(self, chunk):
# Convert samples from unsigned to signed.
self.drawn_waveform.samples = [
(int(y) & ((1 << 7) - 1)) - (int(y) & (1 << 7)) for y in chunk.chdt
]
self.drawn_waveform.format = self.drawn_waveform.Format(chunk.chff or 1)
self.drawn_waveform.freq = chunk.chfr
|
#import sys
#input = sys.stdin.readline
def main():
N = int( input())
A = list( map( int, input().split()))
now = 0
ans = 0
for a in A:
if now <= a:
now = a
continue
ans += now - a
print(ans)
if __name__ == '__main__':
main()
|
import unittest
def triangle(n: int) -> list[str]:
result = []
for i in range(1, n + 1):
sum = "*" * i
result.append(sum)
return result
class Test(unittest.TestCase):
def test_input_n_equals_3(self):
self.assertEqual(triangle(3), ["*", "**", "***"])
def test_input_n_equals_4(self):
self.assertEqual(triangle(4), ["*", "**", "***", "****"])
def test_input_n_equals_6(self):
self.assertEqual(triangle(6), ["*", "**", "***", "****", "*****", "******"])
if __name__ == "__main__":
unittest.main()
|
import shutil, torch
def save_checkpoint(state, is_best, filename='checkpoint.pth.tar'):
torch.save(state, filename)
if is_best:
shutil.copyfile(filename, 'model_best.pth.tar')
save_checkpoint({
'epoch': epoch + 1,
'arch': args.arch,
'state_dict': model.state_dict(),
'best_prec1': best_prec1,
'optimizer' : optimizer.state_dict(),
}, is_best)
'''
best_prec1 = <whatever>
while training:
prec1 = validate(val_loader, model, criterion)
is_best = prec1 > best_prec1
best_prec1 = max(prec1, best_prec1)
'''
|
#!/bin/env python
from argparse import ArgumentParser
import subprocess
def main() :
from GaudiScriptBuilder.AppConfig import DaVinciScript
argparser = ArgumentParser()
argparser.add_argument('--datafile')
argparser.add_argument('--linename')
argparser.add_argument('--version')
argparser.add_argument('--outputfile')
argparser.add_argument('--L0List', default = '')
argparser.add_argument('--HLT1List', default = '')
argparser.add_argument('--HLT2List', default = '')
argparser.add_argument('--strippingList', default = '')
argparser.add_argument('--aliases', default = '{}')
argparser.add_argument('--labXAliases', action = 'store_true', default = False)
argparser.add_argument('--substitutions', default = '{}')
argparser.add_argument('--optssuffix', default = 'settings')
argparser.add_argument('--extraopts', default = '')
argparser.add_argument('--extraoptsfile', default = '', nargs = '*')
argparser.add_argument('--useTrackScaleState', default = 'True')
argparser.add_argument('--datatype', default = None)
argparser.add_argument('--diracversion', default = 'prod')
argparser.add_argument('--force', action = 'store_true', default = False)
argparser.add_argument('--mooreversion', default = 'latest')
args = argparser.parse_args()
opts = DaVinciScript(args.outputfile, args.version, args.linename, args.datafile,
L0List = args.L0List.split(),
HLT1List = args.HLT1List.split(),
HLT2List = args.HLT2List.split(),
strippingList = args.strippingList.split(),
aliases = eval(args.aliases),
labXAliases = args.labXAliases,
substitutions = eval(args.substitutions),
optssuffix = args.optssuffix,
extraopts = args.extraopts,
extraoptsfile = args.extraoptsfile,
useTrackScaleState = eval(args.useTrackScaleState),
datatype = args.datatype,
diracversion = args.diracversion,
force = args.force,
mooreversion = args.mooreversion)
fname = opts.write()
print 'Created', fname
return fname
if __name__ == '__main__' :
main()
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Nombre: stackedWidget.py
# Autor: Miguel Andres Garcia Niño
# Creado: 11 de Mayo 2018
# Modificado: 11 de Mayo 2018
# Copyright: (c) 2018 by Miguel Andres Garcia Niño, 2018
# License: Apache License 2.0
# ----------------------------------------------------------------------------
__versión__ = "1.0"
"""
El módulo *stackedWidget* proporciona una pila de widgets donde solo está visible un
widget a la vez.
"""
from PyQt5.QtGui import QIcon
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QApplication, QDialog, QComboBox, QStackedWidget, QWidget,
QPushButton, QLabel, QVBoxLayout)
# ======================== CLASE Boton ===========================
class Boton(QWidget):
def __init__(self, parent=None):
super(Boton, self).__init__(parent)
self.initUI()
def initUI(self):
# ======================= WIDGETS ==========================
button = QPushButton("Esto es un botón (QPushButton)")
# ==================== DISEÑO (LAYOUT) =====================
disenio = QVBoxLayout()
disenio.setContentsMargins(0, 0, 0, 0)
disenio.addWidget(button)
self.setLayout(disenio)
# ====================== CLASE Etiqueta ==========================
class Etiqueta(QWidget):
def __init__(self, parent=None):
super(Etiqueta, self).__init__(parent)
self.initUI()
def initUI(self):
# ======================= WIDGETS ==========================
label = QLabel("Esto es una etiqueta (QLabel)")
# ==================== DISEÑO (LAYOUT) =====================
disenio = QVBoxLayout()
disenio.setContentsMargins(0, 0, 0, 0)
disenio.addWidget(label)
disenio.setAlignment(label, Qt.AlignCenter)
self.setLayout(disenio)
# ====================== CLASE ventanaHija =========================
class stackedWidget(QDialog):
def __init__(self, parent=None):
super(stackedWidget, self).__init__(parent)
self.setWindowTitle("Pila de widgets (QStackedWidget) por : ANDRES NIÑO")
self.setWindowIcon(QIcon("icono.png"))
self.setWindowFlags(Qt.WindowCloseButtonHint | Qt.MSWindowsFixedSizeDialogHint)
self.setFixedSize(400, 400)
self.initUI()
def initUI(self):
# ======================= WIDGETS ==========================
self.comboBox = QComboBox(self)
self.comboBox.addItems(["Boton", "Etiqueta"])
self.comboBox.setGeometry(20, 20, 360, 24)
# Instancias
self.boton = Boton(self)
self.etiqueta = Etiqueta(self)
self.stackedWidget = QStackedWidget(self)
self.stackedWidget.addWidget(self.boton)
self.stackedWidget.addWidget(self.etiqueta)
self.stackedWidget.setGeometry(20, 84, 360, 25)
# ==================== EVENTO QCOMBOBOX ====================
self.comboBox.activated.connect(self.cambiarWidget)
# ======================= FUNCIONES ============================
def cambiarWidget(self):
# Obtener el índice del item seleccionado en el QComboBox
widget = self.comboBox.currentIndex()
# Indicar el widget a visualizar
self.stackedWidget.setCurrentIndex(widget)
# ================================================================
if __name__ == '__main__':
import sys
aplicacion = QApplication(sys.argv)
ventana = stackedWidget()
ventana.show()
sys.exit(aplicacion.exec_())
|
# Generated by Django 2.0.2 on 2018-02-28 06:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('articles', '0010_auto_20180228_0941'),
]
operations = [
migrations.AlterField(
model_name='article',
name='picture',
field=models.ImageField(blank=True, default='articles/static/img.png', upload_to='article_pictures'),
),
]
|
"""
NPDownloader.py
Created by Jonathon Scofield
Designed to download all comics from the 8-bit theater on nuklearpower.com
Possibly will expand to further download capability later
"""
import os
from bs4 import BeautifulSoup
import wget
import urllib
def oranizeFiles():
for i in range(0, 1230, 10):
if epnum <= (i + 10):
dloc = pathname + "/{0}/Episode{1}.jpg".format(i, epnum)
return dloc
dirname = "8-bit Theater"
initialLink = 'http://www.nuklearpower.com/2001/03/02/episode-001-were-going-where/'
pathname = os.path.expanduser('~') + "/Documents/" + dirname
if os.path.exists(pathname) == False:
os.makedirs(pathname)
for i in range(0, 1230, 10):
newdir = pathname + "/{}".format(i)
if os.path.exists(newdir) == False:
os.mkdir(newdir)
looper = True
epnum = 1
while looper:
content = urllib.request.urlopen(initialLink).read()
looper = False #assume false until proven true
soup = BeautifulSoup(content, features="html5lib")
for image in soup.find_all('img'):
check = image.get('alt')
if "Episode" in check:
ep = image.get('src')
percentage = epnum / 1230 * 100
print(" Comic Strip #" + str(epnum) + " " + str(round(percentage, 2)) + "%")
dloc = oranizeFiles()
epnum += 1
wget.download(ep, dloc)
for links in soup.find_all('a'):
checknext = str(links.get('rel'))
if "next" in checknext:
looper = True
initialLink = links.get('href')
print(" Series Complete 100%")
|
#!/bin/python3
# snakes and ladders DP solution
inin = lambda: [ map(int, input().split()) for _ in range(int(input())) ]
def shortest():
cost = [0] + [999] * 99
jumps = { s - 1: e - 1 for s, e in inin() + inin () }
i = 1
while i < 100:
p = [ cost[j] for j in range(max(0, i-6), i)
if cost[j] != None and j not in jumps ]
if not p: # unreachable
cost[i] = None
i += 1
continue
cost[i] = min(min(p) + 1, cost[i])
if i in jumps:
j = jumps[i]
if cost[i] < cost[j]:
cost[j] = cost[i]
i = min(i+1, j)
continue
i += 1
return cost[99] if cost[99] else -1
for _ in range(int(input())):
print(shortest())
|
class Solution:
def singleNumber(self, nums) -> int:
numsDict = {}
for i in range(len(nums)):
if numsDict.get(nums[i])==None:
numsDict[nums[i]] = 1
else:
numsDict.pop(nums[i])
return numsDict.popitem()[0]
def singleNumber2(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
a = 0
for i in nums:
a ^= i
return a
s = Solution()
res = s.singleNumber([4,1,2,1,2])
print(res)
|
import threading #import all librarisch
from rover_5 import Rover
from UltrasoneSensor import UltrasoneSensor
from KleurenSensor import KleurenSensor
from Camera import Camera
from servo import Arm
import time
import RPi.GPIO as GPIO
from multiprocessing.pool import ThreadPool
class Robot(object):
def RijNaarBoter(self):
t1.daemon = True
t3.daemon = True
t4.daemon = True
t1.start()
t3.start()
t4.start()
bool = True
while bool:
USS_voor_result = pool1.apply_async(USS.MeetAfstandVoor)
return_USS_Voor = USS_voor_result.get()
kleur_result = pool2.apply_async(kleurensensor.isBlack)
return_kleur = kleur_result.get()
print return_USS_Voor
print return_kleur
if return_USS_Voor <= 15 or return_kleur == True:
rover.stopRover()
bool = False
def DraaiRobot(self):
rover.turnRover('left', 58, 6.4)
rover.stopRover()
arm = Arm()
arm.updateAngle()
def ZoekBoter(self, camera):
while True:
return_camera = camera.herkenKleur("geel")
time.sleep(1)
print return_camera
if return_camera > 10:
break
else:
rover.turnRover('left', 58, 0.1)
rover.stopRover()
rover.stopRover()
def RijNaarBord(self):
t7.daemon = True
t7.start()
bool = True
while bool:
USS_voor_result = pool1.apply_async(USS.MeetAfstandVoor)
return_USS_Voor = USS_voor_result.get()
kleur_result = pool2.apply_async(kleurensensor.isBlack)
return_kleur = kleur_result.get()
print return_USS_Voor
print return_kleur
if return_USS_Voor <= 15 or return_kleur == True:
rover.stopRover()
bool = False
def ZoekBord(self, camera):
while True:
return_camera = camera.herkenKleur("blauw")
time.sleep(1)
print return_camera
if return_camera > 10:
break
else:
rover.turnRover('left', 58, 0.1)
rover.stopRover()
rover.stopRover()
if __name__ == '__main__':
GPIO.setmode(GPIO.BCM)
rover = Rover()
USS = UltrasoneSensor()
kleurensensor = KleurenSensor()
camera = Camera()
robot = Robot()
speed = 28
direction = 'left'
speed2 = 50
t = 2
t1 = threading.Thread(target=USS.MeetAfstandVoor)
t2 = threading.Thread(target=USS.MeetAfstandAchter)
t3 = threading.Thread(target=kleurensensor.isBlack)
t4 = threading.Thread(target=rover.goForward, args=(speed,))
t5 = threading.Thread(target=rover.turnRover, args=(direction, speed2, t))
t6 = threading.Thread(target=rover.goBackward, args=(speed,))
t7 = threading.Thread(target=rover.goForward, args=(speed,))
pool1 = ThreadPool(processes=1)
pool2 = ThreadPool(processes=2)
pool3 = ThreadPool(processes=3)
robot.ZoekBoter(camera)
robot.RijNaarBoter()
robot.DraaiRobot()
robot.ZoekBord(camera)
robot.RijNaarBord()
GPIO.cleanup()
|
from flask import Blueprint, request, jsonify
from marvel_inventory.helpers import token_required
from marvel_inventory.models import User, Character, character_schema, characters_schema, db
api = Blueprint('api', __name__, url_prefix='/api')
@api.route('/characters', methods = ['POST'])
@token_required
def create_character(current_user_token):
name = request.json['name']
description = request.json['description']
comics_appeared_in = request.json['comics_appeared_in']
super_power = request.json['super_power']
owner = current_user_token.token
character = Character(name,description,comics_appeared_in,super_power,user_token = owner)
db.session.add(character)
db.session.commit()
response = character_schema.dump(character)
return jsonify(response)
@api.route('/characters', methods = ['GET'])
@token_required
def get_characters(current_user_token):
owner = current_user_token.token
characters = Character.query.filter_by(user_token = owner).all()
response = characters_schema.dump(characters)
return jsonify(response)
@api.route('/characters/<id>', methods = ['GET'])
@token_required
def get_character(current_user_token, id):
character = Character.query.get(id)
response = character_schema.dump(character)
return jsonify(response)
@api.route('/characters/<id>', methods = ['POST', 'PUT'])
@token_required
def update_character(current_user_token, id):
character = Character.query.get(id)
character.name = request.json['name']
character.description = request.json['description']
character.comics_appeared_in = request.json['comics_appeared_in']
character.super_power = request.json['super_power']
character.user_token = current_user_token.token
db.session.commit()
response = character_schema.dump(character)
return jsonify(response)
@api.route('/characters/<id>', methods = ['DELETE'])
@token_required
def delete_character(current_user_token, id):
character = Character.query.get(id)
db.session.delete(character)
db.session.commit()
response = character_schema.dump(character)
return jsonify(response)
|
from django.db import models
from classrooms.models import ClassRoom
from workers.models import TeachingStaff
class Subject(models.Model):
name = models.CharField(max_length=20, unique=True)
description = models.TextField(max_length=250)
def __str__(self):
return self.name
class SubjectClass(models.Model):
subject = models.ForeignKey(Subject, on_delete=models.CASCADE)
class_room = models.ForeignKey(ClassRoom, on_delete=models.CASCADE)
class Meta:
unique_together = ['subject', 'class_room']
def __str__(self):
return '{} - {}'.format(self.subject, self.class_room)
class SubjectClassTeacher(models.Model):
teacher = models.ForeignKey(TeachingStaff, on_delete=models.CASCADE)
subject_class = models.ForeignKey(SubjectClass, on_delete=models.CASCADE)
class Meta:
unique_together = ['teacher', 'subject_class']
|
n =int(input())
now_x = 0
now_y = 0
now_t = 0
for i in range(n):
t,x,y= map(int,input().split())
x2 = x -now_x
y2 = y -now_y
t2 = t -now_t
distance = abs(x2) + abs(y2)
if( not(distance <= t2 and (distance - t2)%2 ==0)):
print("No")
exit()
now_x = x
now_y = y
now_t = t
print("Yes")
|
from client_database_connection import mycursor
from config import node_id
import time
time.sleep(5)
def run():
while True:
time.sleep(5)
sql = "SELECT node_free FROM node_data where node_id = " + code_id
mycursor.execute(sql)
check = mycursor.fetchone()
if(check==1):
os.system('python node_free.py')
run()
|
MONGODB_URL = "mongodb://localhost:27017/"
PORT = 5002
IS_DEBUG = False
REQUIREMENT_MANAGER_URL = "http://localhost:5003"
|
from fractions import Fraction
# a Vrectangle is a rectangle defined by its top, left, bottom, right coordinates as
# proportions of height & width of a parent canvas (itself a Vrectangle or None if top-level aka root canvas)
# top, left, bottom, right are handled as Fraction
class Vrectangle:
def __init__(self, top=Fraction(0), left=Fraction(0),
bottom=Fraction(1), right=Fraction(1),
canvas=None):
self.top = self._frac(top)
self.left = self._frac(left)
self.bottom = self._frac(bottom)
self.right = self._frac(right)
self.canvas = canvas
if self.height() <= 0 or self.width() <= 0:
raise ValueError("height and width must be strictly positive")
def __str__(self):
return ("" if self.canvas is None else f"{self.canvas} / ") + \
f"[{self.top} {self.left}|{self.bottom} {self.right}]"
def width(self):
return self.right - self.left
def height(self):
return self.bottom - self.top
def reframe(self, canvas=None):
# return a Vrectangle reframed in the coordinate scheme of provided canvas
reframed = self._reframe_root()
# if input canvas is the root one, we are done
if canvas is None:
return reframed
# otherwise, we need to reframe wrt provided canvas
# calculations are easier using reframed Vrectangle and canvas wrt root canvas
reframed_canvas = canvas._reframe_root()
top = (reframed.top - reframed_canvas.top) / reframed_canvas.height()
bottom = (reframed.bottom - reframed_canvas.top) / reframed_canvas.height()
left = (reframed.left - reframed_canvas.left) / reframed_canvas.width()
right = (reframed.right - reframed_canvas.left) / reframed_canvas.width()
return Vrectangle(top, left, bottom, right, canvas)
def _reframe_once(self):
# reframe wrt parent canvas's canvas
# if parent canvas is root canvas, we are done
if self.canvas is None:
return self
# otherwise, compute top, bottom, left, right wrt parent canvas's canvas
top = self.canvas.top + self.top * self.canvas.height()
bottom = self.canvas.top + self.bottom * self.canvas.height()
left = self.canvas.left + self.left * self.canvas.width()
right = self.canvas.left + self.right * self.canvas.width()
canvas = self.canvas.canvas
return Vrectangle(top, left, bottom, right, canvas)
def _reframe_root(self):
# reframe wrt root canvas
# if parent canvas is root canvas, we are done
if self.canvas is None:
return self
# otherwise, call recursively on Vrectangle reframed wrt parent canvas's canvas
return self._reframe_once()._reframe_root()
def __eq__(self, other):
if other is None:
other = Vrectangle(0, 0, 1, 1, None)
if not isinstance(other, Vrectangle):
return NotImplemented
reframed = self._reframe_root()
reframed_other = other._reframe_root()
return reframed.top == reframed_other.top and reframed.left == reframed_other.left and \
reframed.bottom == reframed_other.bottom and reframed.right == reframed_other.right
@staticmethod
def _frac(value):
return Fraction(value).limit_denominator()
|
from random import randint
class ss():
def __init__(self):
self.celloccupancy = [[0] * 10 for i in range(10)]
def act(self):
x = 5
y = 5
availableCells = []
if (self.celloccupancy[x - 1][y - 1] == 0):
availableCells.append([- 1, - 1])
if (self.celloccupancy[x][y - 1] == 0):
availableCells.append([0, - 1])
if (self.celloccupancy[x + 1][y - 1] == 0):
availableCells.append([+ 1, - 1])
if (self.celloccupancy[x + 1][y] == 0):
availableCells.append([+ 1, 0])
if (self.celloccupancy[x + 1][y + 1] == 0):
availableCells.append([+ 1, + 1])
if (self.celloccupancy[x][y + 1] == 0):
availableCells.append([0, + 1])
if (self.celloccupancy[x - 1][y + 1] == 0):
availableCells.append([- 1, + 1])
if (self.celloccupancy[x - 1][y] == 0):
availableCells.append([- 1, 0])
for i in range(len(availableCells)):
print(availableCells[i])
def act2(self):
x = 5
y = 5
availableCells = []
for i in [-1, 0, 1]:
for j in [-1, 0, 1]:
if (([i, j] != [0, 0]) & (self.celloccupancy[x + i][y + j] == 0)):
availableCells.append([i, j])
print('k')
#for i in range(len(availableCells)):
# print(availableCells[i])
if (len(availableCells) > 0):
print(availableCells[randint(0, len(availableCells) - 1)])
def main():
x = ss()
x.act2()
main() |
"""gcon URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from main import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^repos/$', views.repos, name='repos'),
# Todo: add regexp
url(r'^hook/(?P<repo_name>.+)/$', views.create_hook, name='hook'),
url(r'^handle/$', views.handle_hook, name='handle'),
url(r'^logout/$', views.logout_user, name='logout')
]
|
from pymodm import connect, MongoModel, EmbeddedMongoModel, fields
from pymodm.queryset import QuerySet
from pymodm.manager import Manager
connect('mongodb+srv://mah148:7C2BeZmfwzWmSgwW@bme547-gxtrh.mongodb.net/'
'test?retryWrites=true', 'bme547-db')
class ImageQuerySet(QuerySet):
def user(self, user_id):
'''Return all images uploaded by a User'''
return self.raw({'user': user_id})
def userimage(self, user_id, filename, extension):
'''Return user image indentified by filename and extension.'''
return self.raw({'user': user_id, 'filename': filename,
'extension': extension}).first()
def userprocessedimage(self, user_id, filename, extension, method):
'''Return user image indentified by filename and extension.'''
return self.raw({'user': user_id, 'filename': filename,
'extension': extension,
'procedureType': method}).first()
def proc(self, user_id, method):
'''Return all processed images that have undergone this method.'''
return self.raw({'user': user_id, 'procedureType': method})
ImageManager = Manager.from_queryset(ImageQuerySet)
class User(MongoModel):
userID = fields.CharField(primary_key=True)
created = fields.DateTimeField()
class Meta:
connection_alias = 'bme547-db'
class Image(MongoModel):
name = fields.CharField(primary_key=True)
filename = fields.CharField()
extension = fields.CharField()
image = fields.CharField()
uploadedAt = fields.DateTimeField()
user = fields.ReferenceField(User)
objects = ImageManager()
class Meta:
connection_alias = 'bme547-db'
class ProcessedImage(MongoModel):
filename = fields.CharField()
image = fields.CharField()
procedureType = fields.ListField(
fields.CharField(choices=('Hist',
'Contrast',
'Log',
'Reverse')))
processedAt = fields.DateTimeField()
timeToProcess = fields.FloatField()
user = fields.ReferenceField(User)
baseImage = fields.ReferenceField(Image)
extension = fields.CharField()
objects = ImageManager()
class Meta:
connection_alias = 'bme547-db'
def upload_image(user_id, filename, extension, image_str):
"""Upload a base-64 encoded image to the database.
:param user_id: Unique User identifier.
:type user_id: str
:param filename: Image filename.
:type filename: str
:param extension: Image extension.
:type extension: str
:param image_str: Base-64 encoded image.
:type image_str: str
:return: Job completed message.
:rtype: str
"""
from datetime import datetime
time = datetime.now()
img = Image(name=user_id+filename+extension,
filename=filename,
extension=extension,
image=image_str,
uploadedAt=time,
user=user_id)
img.save()
out = "Uploaded {} (userID: {}) at {}.".format(filename, user_id, time)
return out
class UserExists(Exception):
pass
def register_user(user_id):
"""Register a new User on the Database.
:param user_id: Unique User Identifier.
:type user_id: str
:raises UserExists: Exception if User already exists.
:return: Operation completed string.
:rtype: str
"""
u = None
try:
u = User.objects.raw({'_id': user_id}).first()
except User.DoesNotExist:
pass
finally:
if u is not None:
raise UserExists('User {} exists on the database.'.format(user_id))
else:
from datetime import datetime
time = datetime.now()
user = User(user_id, created=time)
user.save()
out = 'User {} registered.'.format(user_id)
return out
def get_uploaded_image(user_id, filename, extension):
"""Retrieve an uploaded image.
:param user_id: Unique User identifier.
:type user_id: str
:param filename: Filename.
:type filename: str
:param extension: Image extension.
:type extension: str
:return: Dictionary of image data.
:rtype: dict
"""
img = Image.objects.userimage(user_id, filename, extension)
img_dict = {
'image': img.image,
'extension': img.extension,
'uploadedAt': img.uploadedAt,
}
return img_dict
def process_image(img, method):
"""Perform image manipulation.
:param img: Image array.
:type img: np.array
:param method: Manipulation method.
:type method: ster
:raises ValueError: Incorrect image method submitted.
:return: Time to perform operation, processed image.
:rtype: datetime.timedelta, np.array
"""
import img_proc_server as im
import datetime
time = datetime.datetime.now()
if method == 'Hist':
proc_img = im.equalize_hist(img)
proc_img *= 255
elif method == 'Contrast':
proc_img = im.contr_stretch_img(img)
elif method == 'Log':
proc_img = im.log_correct_img(img)
elif method == 'Reverse':
proc_img = im.reverse_img(img)
else:
raise ValueError('Invalid method: {}.'.format(method))
time_later = datetime.datetime.now()
total_time = time_later-time
time_s = total_time.total_seconds()
return time_s, proc_img
def save_processed_image(filename, proc_image_str, user_id, proceduretype,
processedat, timetoprocess, extension,
already_processed=False):
"""Save a processed image to the database.
:param filename: Base image filename.
:type filename: str
:param proc_image_str: Base64 encoded processed image.
:type proc_image_str: str
:param user_id: User ID.
:type user_id: str
:param proceduretype: Procedure performed.
:type proceduretype: str
:param processedat: Time image was processed.
:type processedat: datetime object
:param timetoprocess: Time taken to process image (s)
:type timetoprocess: float
:param already_processed: If image has alrady been processed, defaults to
False.
:type already_processed: bool, optional
"""
import datetime
time = datetime.datetime.now()
if not isinstance(proceduretype, list):
proceduretype = [proceduretype]
img = ProcessedImage(filename=filename,
image=proc_image_str,
user=user_id,
procedureType=proceduretype,
processedAt=processedat,
timeToProcess=timetoprocess,
extension=extension)
img.save()
out = "Uploaded {} (process:{}) (userID: {}) at {}.".format(filename,
proceduretype,
user_id,
time)
return out
def get_average(user_id, method):
"""Get the average amount of time in (s) it takes to run method for user.
:param user_id: User ID.
:type user_id: str
:param method: Method type.
:type method: str, list
:return: Average time.
:rtype: Float
"""
import numpy as np
if not isinstance(method, list):
method = [method]
procs = ProcessedImage.objects.proc(user_id, method)
t_av = np.mean([proc.timeToProcess for proc in procs])
if np.isnan(t_av):
return 'User has not used this operation.'
else:
return t_av
|
import os
import sys
from subprocess import Popen, PIPE
from conans.util.files import decode_text
from conans.errors import ConanException
import six
class ConanRunner(object):
def __init__(self, print_commands_to_output=False, generate_run_log_file=False, log_run_to_output=True):
self._print_commands_to_output = print_commands_to_output
self._generate_run_log_file = generate_run_log_file
self._log_run_to_output = log_run_to_output
def __call__(self, command, output, log_filepath=None, cwd=None):
"""
@param command: Command to execute
@param output: Instead of print to sys.stdout print to that stream. Could be None
@param log_filepath: If specified, also log to a file
@param cwd: Move to directory to execute
"""
stream_output = output if output and hasattr(output, "write") else sys.stdout
if not self._generate_run_log_file:
log_filepath = None
# Log the command call in output and logger
call_message = "\n----Running------\n> %s\n-----------------\n" % command
if self._print_commands_to_output and stream_output and self._log_run_to_output:
stream_output.write(call_message)
# No output has to be redirected to logs or buffer or omitted
if output is True and not log_filepath and self._log_run_to_output:
return self._simple_os_call(command, cwd)
elif log_filepath:
if stream_output:
stream_output.write("Logging command output to file '%s'\n" % log_filepath)
with open(log_filepath, "a+") as log_handler:
if self._print_commands_to_output:
log_handler.write(call_message)
return self._pipe_os_call(command, stream_output, log_handler, cwd)
else:
return self._pipe_os_call(command, stream_output, None, cwd)
def _pipe_os_call(self, command, stream_output, log_handler, cwd):
try:
proc = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=cwd)
except Exception as e:
raise ConanException("Error while executing '%s'\n\t%s" % (command, str(e)))
def get_stream_lines(the_stream):
while True:
line = the_stream.readline()
if not line:
break
decoded_line = decode_text(line)
if stream_output and self._log_run_to_output:
try:
stream_output.write(decoded_line)
except UnicodeEncodeError: # be agressive on text encoding
decoded_line = decoded_line.encode("latin-1", "ignore").decode("latin-1",
"ignore")
stream_output.write(decoded_line)
if log_handler:
# Write decoded in PY2 causes some ASCII encoding problems
# tried to open the log_handler binary but same result.
log_handler.write(line if six.PY2 else decoded_line)
get_stream_lines(proc.stdout)
get_stream_lines(proc.stderr)
proc.communicate()
ret = proc.returncode
return ret
def _simple_os_call(self, command, cwd):
if not cwd:
return os.system(command)
else:
try:
old_dir = os.getcwd()
os.chdir(cwd)
result = os.system(command)
except Exception as e:
raise ConanException("Error while executing"
" '%s'\n\t%s" % (command, str(e)))
finally:
os.chdir(old_dir)
return result
|
class Runner:
def __init__(self, firstname, lastname, bib):
self.name = f"{lastname}, {firstname}"
self.bib = int(bib)
self.bibstring = bib
self.splits = {
"S1": "",
"S2": "",
"F": "",
}
self.ranks = {
"S1": "0",
"S2": "0",
"F": "0",
}
def print_runner(self):
f_name = self.name.ljust(20)
f_bib = self.bibstring.rjust(10)
f_S1 = self.splits["S1"].rjust(10)
f_S1_rank = self.ranks["S1"].rjust(10)
f_S2 = self.splits["S2"].rjust(10)
f_S2_rank = self.ranks["S2"].rjust(10)
f_F = self.splits["F"].rjust(10)
f_F_rank = self.ranks["F"].rjust(10)
print(f"{f_name}{f_bib}{f_S1}{f_S1_rank}{f_S2}{f_S2_rank}{f_F}{f_F_rank}")
def add_time(self, split, time):
self.splits[split] = time
class Table:
def __init__(self):
self.runners = dict()
def add_runner(self, runner):
self.runners[runner.bib] = runner
def add_time(self, bib, split, time):
self.runners[int(bib)].add_time(split, time)
def order_bibs(self):
bibs_and_names = [(self.runners[bib].name, bib) for bib in self.runners.keys()]
return sorted(bibs_and_names)
def calculate_ranks_for_split(self, split):
times = sorted(
[(self.runners[bib].splits[split], bib) for bib in self.runners.keys()]
)
rank = 1
players = 1
prev_time = None
for player_time in times:
t = player_time[0]
bib = player_time[1]
if not prev_time:
prev_time = t
if prev_time != t:
rank = players
self.runners[bib].ranks[split] = str(rank)
players += 1
prev_time = t
def calculate_ranks(self):
self.calculate_ranks_for_split("S1")
self.calculate_ranks_for_split("S2")
self.calculate_ranks_for_split("F")
def print_header(self):
print(
"NAME BIB SPLIT1 RANK SPLIT2 RANK FINISH RANK"
)
def print_runners(self):
bibs = self.order_bibs()
for _, bib in bibs:
self.runners[bib].print_runner()
def print_table(self):
self.print_header()
self.print_runners()
print("")
def solve(n):
table = Table()
for i in range(n):
data = input().split()
runner = Runner(*data)
table.add_runner(runner)
for i in range(3 * n):
data = input().split()
table.add_time(*data)
table.calculate_ranks()
table.print_table()
if __name__ == "__main__":
while True:
n = int(input())
if n == 0:
break
solve(n)
|
import pygame
from pygame.locals import *
import random
class Node:
# Function to initialize the node object
def __init__(self,isStar,data,color):
self.coord = data # Assign data(x,y coord)
self.isStar=isStar
self.color=color
self.next = None # Initialize next as null
self.enter= None
self.exit = None
# Linked List class
class LinkedList:
# Function to initialize the Linked
# List object
def __init__(self):
self.head = None
def printList(self):
temp = self.head
while (temp):
print (temp.coord)
temp = temp.next
if __name__=='__main__':
pygame.init()
width,height=512,700
screen=pygame.display.set_mode((width,height))
bg=pygame.image.load("resources/Board512x512.png")
d1=pygame.image.load("resources/d1.jpg").convert()
d2=pygame.image.load("resources/d2.jpg").convert()
d3=pygame.image.load("resources/d3.jpg").convert()
d4=pygame.image.load("resources/d4.jpg").convert()
d5=pygame.image.load("resources/d5.jpg").convert()
d6=pygame.image.load("resources/d6.jpg").convert()
d1.set_alpha(128)
d2.set_alpha(128)
d3.set_alpha(128)
d4.set_alpha(128)
d5.set_alpha(128)
d6.set_alpha(128)
dicerollcoor=(350,556)
btroll=pygame.image.load("resources/btRoll.png")
btrollrec=pygame.Rect(btroll.get_rect())
btrollcoor=(200,556)
btrollrec.top=btrollcoor[1]
btrollrec.left=btrollcoor[0]
lastroll=-1
setroll=False
while 1:
screen.fill(12)
screen.blit(bg,(0,0))
screen.blit(btroll,btrollcoor)
if setroll:
tm=pygame.time.get_ticks()
while pygame.time.get_ticks()<=tm+2000:
tmn=(int)(pygame.time.get_ticks())
if tmn%100==0:
num=random.randint(1,6)
lastroll=num
address="resources/d"+(str)(num)+".jpg"
dx=pygame.image.load(address)
screen.blit(dx,dicerollcoor)
pygame.display.flip()
print(lastroll)
setroll=False
if lastroll>0 and lastroll<7:
address="resources/d"+(str)(lastroll)+".jpg"
dxt=pygame.image.load(address)
screen.blit(dxt,dicerollcoor)
pygame.display.flip()
for event in pygame.event.get():
if event.type==pygame.QUIT:
pygame.quit()
exit(0)
if event.type==pygame.MOUSEBUTTONDOWN:
mouse_pos=event.pos
if btrollrec.collidepoint(mouse_pos) and setroll==False:
setroll=True
|
import tkinter
from buttons import Button as bt
from bridge import MouseMotionToController
from configuration import ConfigOfButton, ConfigOfCanvas
from controller.buttonController import ButtonController
from controller.canvasController import CanvasController
from controller.modeController import *
def initAllButtons(master):
associationLine = bt(master,ConfigOfButton.nameOfAssociationLine)
class_ = bt(master,ConfigOfButton.nameOfClass)
compositionLine = bt(master,ConfigOfButton.nameOfCompositionLine)
generalizationLine = bt(master,ConfigOfButton.nameOfGeneralizationLine)
select = bt(master,ConfigOfButton.nameOfSelect)
useCase = bt(master,ConfigOfButton.nameOfUseCase)
buttons = [select, associationLine, generalizationLine, compositionLine, class_, useCase]
for button in buttons:
button.pack()
ButtonController.knwoWhatAreAvailableButtons(buttons)
return buttons
def initCanvasContainer(master):
canvasContainer = tkinter.Canvas(master)
canvasContainer.configure(bg = "white", height = ConfigOfCanvas.heightOfInitCanvas, width = ConfigOfCanvas.widthOfInitCanvas)
canvasContainer.pack(side = tkinter.LEFT)
addMouseListenerToCanvas(canvasContainer)
CanvasController.canvasContainer = canvasContainer
MouseMotionToController.canvasContainer = canvasContainer
# Should be revised when there is mouse action added
def addMouseListenerToCanvas(canvasContainer):
canvasContainer.bind("<Button-1>", CanvasController.handleClickOnCanvas)
canvasContainer.bind("<B1-Motion>", CanvasController.handlePressAndDragOnCanvas)
canvasContainer.bind("<ButtonRelease-1>", CanvasController.handleMouseReleaseOnCanvas)
# Should be revised when there is new mode added
def initModeControllers():
CanvasController.availableModes.append(AssociationLinController())
CanvasController.availableModes.append(ClassModeController())
CanvasController.availableModes.append(CompositionLinController())
CanvasController.availableModes.append(GeneralizationLinController())
CanvasController.availableModes.append(SelectModeController())
CanvasController.availableModes.append(UseCaseModeController()) |
import datetime
from typing import List
import sqlalchemy as sa
import sqlalchemy.orm as orm
from pypi_org.data.modelbase import SqlAlchemyBase
from pypi_org.data.releases import Release
class Package(SqlAlchemyBase):
__tablename__ = 'packages'
id = sa.Column(sa.String, primary_key=True)
created_date = sa.Column(sa.DateTime, default=datetime.datetime.now, index=True)
summary = sa.Column(sa.String, nullable=False)
description = sa.Column(sa.String, nullable=True)
home_page = sa.Column(sa.String)
docs_url = sa.Column(sa.String)
package_url = sa.Column(sa.String)
author_name = sa.Column(sa.String)
author_email = sa.Column(sa.String, index=True)
license = sa.Column(sa.String, index=True)
# releases relationship
releases: List[Release] = orm.relation("Release", order_by=[
Release.major_ver.desc(),
Release.minor_ver.desc(),
Release.build_ver.desc(),
], back_populates='package')
def __repr__(self):
return '<Package {}>'.format(self.id)
# p = Package() # one query
#
# print(p.id)
# print("All releases")
# for r in p.releases:
# print("{}.{}.{}".format(r.major_ver, r.minor_ver, r.build_ver))
|
numero = int(input("Digite um número"))
acumulador = 0
while numero != 0:
acumulador += numero
numero = int(input("Digite um número"))
print(acumulador)
|
import math
import xml.sax.handler
import xml.sax
import pprint
#cambiar por vectores
#La clase Vector realiza algunas operaciones basias que ayudan a la implementacion de
#algunas de las funciones de distancia y size del juego
class Vector:
def __init__(self,x,y):
self.x =x
self.y =y
def getUnitario(self):
div = math.pow(self.x,2) + math.pow(self.y,2)
div = math.sqrt(div)
x = self.x / div
y = self.y / div
return Vector(x,y)
def getMagnitud(self):
magnitud = math.pow(self.x,2) + math.pow(self.y,2)
magnitud = math.sqrt(magnitud)
return magnitud
def resta(self,vec):
x = self.x -vec.x
y = self.y -vec.y
return Vector(x,y)
def suma(self,vec):
x = self.x +vec.x
y = self.y +vec.y
return Vector(x,y)
def mulCon(self,constante):
x = self.x * constante
y = self.y * constante
return Vector(x,y)
#comprueba si hay coliciones entre dos objetos
def collision(obj1, obj2):
limInf_obj1 = obj1.getPosicion().y + obj1.get_height()
limSup_obj1 = obj1.getPosicion().y
limInf_obj2 = obj2.getPosicion().y + obj2.get_height()
limSup_obj2 = obj2.getPosicion().y
limDer_obj1 = obj1.getPosicion().x + obj1.get_width()
limIzq_obj1 = obj1.getPosicion().x
limDer_obj2 = obj2.getPosicion().x + obj2.get_width()
limIzq_obj2 = obj2.getPosicion().x
colision_y = False
colision_x = False
if( (limInf_obj2 <= limInf_obj1) and (limInf_obj2 >= limSup_obj1) ):
colision_y = True
if( (limSup_obj2 >= limSup_obj1) and (limSup_obj2 <= limInf_obj1) ) :
colision_y = True
if( (limDer_obj2 >= limIzq_obj1) and (limDer_obj2 <= limDer_obj1) ) :
colision_x = True
if( (limIzq_obj2 >= limIzq_obj1) and (limIzq_obj2 <= limDer_obj1) ):
colision_x = True
if (colision_x and colision_y) :
return True
else:
return False
class Imagen:
def __init__(self,img,pos):
self.sprite = img
self.x = pos.x
self.y = pos.y
self.layer = 0
#La clase radar interactual con los objetos en pantalla para obtener su posicion y al mismo tiempo
#realiza alguna de la mecanica para obtener cual objeto es mas cercano a la nave actual
class Radar:
def __init__(self,mundo):
self.mundo = mundo
self.ships = []
self.proj = []
self.target = None
self.oldTargets = []
self.team = 0
self.currentPos= None
def setTeam(self,team):
self.team = team
def detectShips(self):
self.ships = self.mundo.getShips()
if not self.target in self.ships:
self.target = None
for objeto in self.oldTargets:
if not objeto in self.ships:
self.oldTargets.remove(objeto)
def detectProjectiles(self):
self.proj = self.mundo.getProj()
def getPosTarget(self):
if not self.target == None:
return self.target.getCentro()
return None
def toggleTarget(self):
if not self.target == None:
self.oldTargets += [self.target]
self.target = None
enemies = self.getEnemies()
for objeto in enemies:
if not objeto in self.oldTargets:
self.target = objeto
if self.target == None:
if len(self.oldTargets) > 0:
self.oldTargets = []
self.toggleTarget()
def setClosestTarget(self,pos):
enemies = self.getEnemies()
closest = None
distance = 999999 #infinito xD
for objeto in enemies:
tmp = self.getDistance(pos,objeto.posicion)
if tmp < distance:
closest = objeto
distance = tmp
self.target = closest
self.oldTargets = [self.target]
def getEnemies(self):
enemies = []
for ship in self.ships:
if not self.team == ship.team:
enemies += [ship]
return enemies
def getDistance(self,pos1,pos2):
a = math.pow(pos1.x - pos2.x,2)
b = math.pow(pos1.x - pos2.x,2)
a += b
a = math.sqrt(a)
return a
class Propulsor:
def __init__(self):
self.potencia = 10
def movRight(self):
dir = Vector(1,0)
fuerza = dir.mulCon(self.potencia)
return fuerza
def movLeft(self):
dir = Vector(-1,0)
fuerza = dir.mulCon(self.potencia)
return fuerza
def movUp(self):
dir = Vector(0,-1)
fuerza = dir.mulCon(self.potencia)
return fuerza
def movDown(self):
dir = Vector(0,1)
fuerza = dir.mulCon(self.potencia)
return fuerza
class Estabilizador:
def __init__(self):
self.velocidad = Vector(0,0)
self.factor = 0.20
self.velocidadMaxima = 100
def estabiliza(self):
fuerza = Vector(0,0)
if self.velocidad.getMagnitud() > 0:
fuerza = self.velocidad.getUnitario()
fuerza = fuerza.mulCon(-1)
fuerza = fuerza.mulCon(self.velocidad.getMagnitud()*self.factor)
return fuerza
def update(self,velocidad):
self.velocidad = velocidad
#clase generica pa los objetos
#en general tods los objetos en el juego que parecen en pantalla deberan tener una velocidad aceleracion
#posicion y un estado de vivos o muertos
class Object:
def __init__(self):
self.velocidad=None
self.aceleracion= Vector(0,0)
self.posicion= None
self.masa = 1
self.alive= None
self.vista=0
#estas son las imagenes cargadas del juego
self.imagenesDelJuego = None
#estas son imagenes particulares del objeto
self.imagen = []
#esta dice que imagen del arreglo es la actual
def Update(self):
self.velocidad = self.velocidad.suma(self.aceleracion)
self.aceleracion = Vector(0,0)
self.posicion = self.posicion.suma(self.velocidad)
def aplicarFuerza(self,fuerza):
self.aceleracion = self.aceleracion.suma(fuerza);
self.aceleracion = self.aceleracion.mulCon(1/self.masa)
def getView(self):
return []
def getCentro(self):
inc_x = self.imagen[self.vista].get_width() / 2
inc_y = self.imagen[self.vista].get_height() / 2
return Vector(self.posicion.x + inc_x , self.posicion.y + inc_y )
def get_width(self):
return self.imagen[self.vista].get_width()
def get_height(self):
return self.imagen[self.vista].get_height()
def getPosicion(self):
return self.posicion
#saca las cosas el archivo de XML
#Algunos de los parametros de configuracion del juego se guardaran en archivos XML
#Esta clase toma el archivo lo parsea y responde con los datos necesarios para que la clase de
#configuracion los ponga donde se debe
class HelixConfHandler(xml.sax.handler.ContentHandler):
def __init__(self):
self.width = 0
self.height = 0
def startElement(self, name, attributes):
if name == "window":
self.width = attributes.get('width', None)
self.height = attributes.get('height', None)
from xml.sax import make_parser
from xml.sax.handler import feature_namespaces
def parsea():
# Create a parser
parser = make_parser()
# Tell the parser we are not interested in XML namespaces
parser.setFeature(feature_namespaces, 0)
# Create the handler
dh = HelixConfHandler()
# Tell the parser to use our handler
parser.setContentHandler(dh)
# Parse the input
parser.parse("helixconf.xml")
return [dh.width, dh.height] |
# groceries.py
# csv-mgmt/read_teams.py
import operator
import pandas
import os
#csv_filepath = "products.csv"
csv_filepath = os.path.join(os.path.dirname(os.path.dirname(__file__)), "..", "groceries-exercise", "products.csv")
df = pandas.read_csv(csv_filepath)
products = df.to_dict("records")
def to_usd(my_price):
"""
Converts a numeric value to usd-formatted string, for printing and display purposes.
Param: my_price (int or float) like 4000.444444
Example: to_usd(4000.444444)
Returns: $4,000.44
"""
return f"(${my_price:,.2f})"
sorted_list = sorted(products, key=lambda x: x["name"])
department_list = []
print("---------------")
print("THERE ARE", len(sorted_list), "PRODUCTS")
print("---------------")
for x in sorted_list:
if x["department"] not in department_list:
department_list.append(x["department"])
print("+", x["name"],to_usd(float(x["price"])))
print("---------------")
print("THERE ARE", len(department_list), "DEPARTMENTS")
print("---------------")
department_list = sorted(department_list)
for dept_name in department_list:
matching_products = len([x for x in products if x["department"] == dept_name])
if matching_products > 1:
print(f"+ {dept_name.title()} ({matching_products} products)")
else:
print(f"+ {dept_name.title()} ({matching_products} product)")
# print([x["department"] for x in products if x["department"] == "snacks"][0])
# TODO: write some Python code here to produce the desired output |
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth import get_user_model
from optparse import make_option
import logging
logger = logging.getLogger('django.commands')
class Command(BaseCommand):
help = "Create a super user"
option_list = BaseCommand.option_list + (
make_option('--password', action='store', dest='password',
help=('Password of the superuser'), ),
make_option('--email', action='store', dest='email',
help=('Email of the superuser'), ),
)
def handle(self, *args, **options):
logger.info('Creation of a superuser')
if options['email'] is None and\
options['password'] is None:
raise CommandError('You must provide --email'
'and --password argument')
UserModel = get_user_model()
user = UserModel.objects.create(email=options['email'])
user.set_password(options['password'])
user.save()
logger.info('SuperUser [{!s}] created'.format(user.pk))
|
/Users/matthewpeterson/anaconda3/lib/python3.7/functools.py |
def dimensoes(matriz):
if len(matriz) == 0:
print("0X0")
else:
linha = len(matriz)
coluna = len(matriz[0])
print(str(linha) + "X" + str(coluna))
|
from math import gcd
def eratosthenes(N):
from collections import deque
work = [True] * (N+1)
work[0] = False
work[1] = False
ret = []
for i in range(N+1):
if work[i]:
ret.append(i)
for j in range(2* i, N+1, i):
work[j] = False
return ret
# def gcd(a, b):
# while b != 0:
# a, b = b, a % b
# return a
def main():
N = int( input())
A = list( map( int, input().split()))
g = A[0]
for a in A:
g = gcd(a, g)
if g > 1:
print("not coprime")
return
E = eratosthenes(10**3)
EE = [0]*len(E)
R = []
for a in A:
for i, e in enumerate(E):
if a%e == 0:
if EE[i] > 0:
print("setwise coprime")
return
EE[i] = 1
while a%e == 0:
a //= e
if a > 1:
R.append(a)
R.sort()
for i in range(len(R)-1):
if R[i] == R[i+1]:
print("setwise coprime")
return
print("pairwise coprime")
if __name__ == '__main__':
main()
|
#!/usr/bin/python3
# -*- encoding: utf-8 -*-
'''
@File : testserver1.py
@Time : 2018/11/20 09:07:20
@Author : BaiYang
@Version : 1.0
@Contact : yang01.bai@horizon.ai
@License : (C)Copyright 2017-2018, Liugroup-NLPR-CASIA
@Desc : 测试tcp的非阻塞模式1:使用非阻塞单任务为多个客户端服务。
'''
import random,os,time,sys
import socket
def main():
new_sock = None
client_list = list()
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # availd address reused
server.bind(("",9998))
server.listen(128)
server.setblocking(False)
while True:
time.sleep(0.5) #debug
try:
new_sock,_ = server.accept()
except Exception as e: # no client connect
print("没有客户端链接......")
pass
else:
print("有一个客户端链接(%s)......" % str(new_sock))
new_sock.setblocking(False)
client_list.append(new_sock)
for client in client_list:
try:
content = client.recv(0x1024)
except Exception as e: # not read data
print("---没有数据---")
pass
else:
if content:
print(content.decode("utf-8")) # 处理数据
else: # 客户端关闭
print("客户端(%s)关闭<<<" % str(client))
client.close()
client_list.remove(client)
if __name__ == "__main__":
main() |
from django.contrib import admin
from shostpost_app.models import GhostPost
admin.site.register(GhostPost)
|
import codecs
import re
import itertools
#-------delete first line-------
'''
print "delete first line"
#delete first line and create new file such as new______.
with codecs.open('./resources/agency.txt', "r", "utf-8-sig") as fin:
data = fin.read().splitlines(True)
with codecs.open('./resources/agency.txt', "w", "utf-8-sig") as fout:
fout.writelines(data[1:])
with codecs.open('./resources/routes.txt', "r", "utf-8-sig") as fin:
data = fin.read().splitlines(True)
with codecs.open('./resources/routes.txt', "w", "utf-8-sig") as fout:
fout.writelines(data[1:])
with codecs.open('./resources/stop_times.txt', "r", "utf-8-sig") as fin:
data = fin.read().splitlines(True)
with codecs.open('./resources/stop_times.txt', "w", "utf-8-sig") as fout:
fout.writelines(data[1:])
with codecs.open('./resources/stops.txt', "r", "utf-8-sig") as fin:
data = fin.read().splitlines(True)
with codecs.open('./resources/stops.txt', "w", "utf-8-sig") as fout:
fout.writelines(data[1:])
with codecs.open('./resources/trips.txt', "r", "utf-8-sig") as fin:
data = fin.read().splitlines(True)
with codecs.open('./resources/trips.txt', "w", "utf-8-sig") as fout:
fout.writelines(data[1:])
print "end delete first line"
'''
#-------end delete first line-------
#-------agency-------
'''
print "agency start"
# agency.txt
# 0 - agency_id *
# 1 - agency_name *
# 2 - agency_url
# 3 - agency_timezone
# 4 - agency_lang
# 5 - agency_phone
# 6 - agency_fare_url
# new_agency.txt
# 0 - agency_id
# 1 - agency_name
# 3: egged, 5: dan, 15: metropoline, 16: superbus
agency = ['3','5','15','16']
inputFile = codecs.open('./resources/agency.txt', "r", "utf-8-sig")
outputFile = codecs.open('./resources/new_agency.txt', "wb", "utf-8-sig")
for line in inputFile:
words = line.split(",")
if words[0] in agency:
print words[0]+','+words[1]
outputFile.write(words[0]+','+words[1]+u'\r\n')
outputFile.close()
inputFile.close()
print "end agency"
'''
#-------end agency-------
#-------routes-------
'''
print "routes"
# routes.txt
# 0 - route_id *
# 1 - agency_id *
# 2 - route_short_name *
# 3 - route_long_name *
# 4 - route_desc *
# 5 - route_type *
# 6 - route_color
# new_routes.txt
# 0 - route_id
# 1 - agency_id
# 2 - route_short_name
# 3 - route_long_name
# 4 - route_desc
# 5 - route_type
# 6 - origin_city_name (new column)
agencyFile = codecs.open('./resources/new_agency.txt', "r", "utf-8-sig")
inputFile = codecs.open('./resources/routes.txt', "r", "utf-8-sig")
outputFile = codecs.open('./resources/new_routes.txt', "wb", "utf-8-sig")
agency = []
for line in agencyFile:
words = line.split(",")
agency.append(words[0])
for line in inputFile:
words = line.split(",")
route_long_name = re.split('<->',words[3])
origin_city = re.split('-',route_long_name[0])
if words[1] in agency:
print words[0]+','+words[1]+','+words[2]+','+words[3]+','+words[4]+','+words[5]+','+origin_city[-1]
outputFile.write(words[0]+','+words[1]+','+words[2]+','+words[3]+','+words[4]+','+words[5]+','+origin_city[-1]+u'\r\n')
agencyFile.close()
outputFile.close()
inputFile.close()
print "end routes"
'''
#-------end routes-------
#-------trips-------
'''
print "trips"
# trips.txt
# 0 - route_id *
# 1 - service_id
# 2 - trip_id *
# 3 - direction_id *
# 4 - shape_id
# new_trips.txt
# 0 - route_id
# 1 - trip_id
# 2 - direction_id
routesFile = codecs.open('./resources/new_routes.txt', "r", "utf-8-sig")
inputFile = codecs.open('./resources/trips.txt', "r", "utf-8-sig")
outputFile = codecs.open('./resources/new_trips.txt', "wb", "utf-8-sig")
routes = []
for line in routesFile:
words = line.split(",")
routes.append(words[0])
for line in inputFile:
words = line.split(",")
if words[0] in routes:
print words[0]+','+words[2]+','+words[3]
outputFile.write(words[0]+','+words[2]+','+words[3]+u'\r\n')
routesFile.close()
outputFile.close()
inputFile.close()
print "end trips"
'''
#-------end trips-------
#-------stop_times-------
'''
print "stop_times"
# stop_times.txt
# 0 - trip_id *
# 1 - arrival_time
# 2 - departure_time
# 3 - stop_id *
# 4 - stop_sequence *
# 5 - pickup_type
# 6 - drop_off_type
# new_trips.txt
# 0 - trip_id
# 1 - stop_id
# 2 - stop_sequence
tripsFile = codecs.open('./resources/new_trips.txt', "r", "utf-8-sig")
inputFile = codecs.open('./resources/stop_times.txt', "r", "utf-8-sig")
outputFile = codecs.open('./resources/new_stop_times.txt', "wb", "utf-8-sig")
trips = []
for line in tripsFile:
words = line.split(",")
trips.append(words[1])
for line in inputFile:
words = line.split(",")
if words[0] in trips:
print words[0]+','+words[3]+','+words[4]
outputFile.write(words[0]+','+words[3]+','+words[4]+u'\r\n')
tripsFile.close()
outputFile.close()
inputFile.close()
print "end stop_times"
'''
#-------end stop_times-------
#-------stops-------
'''
print "stops"
# stop_times.txt
# 0 - stop_id *
# 1 - stop_code
# 2 - stop_name *
# 3 - stop_desc
# 4 - stop_lat *
# 5 - stop_lon *
# 6 - location_type
# 7 - parent_station
# new_trips.txt
# 0 - stop_id
# 1 - stop_name
# 2 - stop_lat
# 2 - stop_lon
stop_timesFile = codecs.open('./resources/stop_times.txt', "r", "utf-8-sig")
inputFile = codecs.open('./resources/stops.txt', "r", "utf-8-sig")
outputFile = codecs.open('./resources/new_stops.txt', "wb", "utf-8-sig")
stop_times = []
for line in stop_timesFile:
words = line.split(",")
if words[1] not in words:
stop_times.append(words[1])
for line in inputFile:
words = line.split(",")
if words[0] in stop_times:
print words[0]+','+words[2]+','+words[4]+','+words[5]
outputFile.write(words[0]+','+words[2]+','+words[4]+','+words[5]+u'\r\n')
stop_timesFile.close()
outputFile.close()
inputFile.close()
print "end stops"
'''
#-------end stops-------
#-------split new_stop_times-------
'''
print "split new_stop_times"
inputFile = codecs.open('./resources/new_stop_times.txt', "r", "utf-8-sig")
readLine = inputFile.readlines()
for i in range(0,48):
outputFile = codecs.open('./resources/stop_times_directory/stop_times_'+str(i)+'.txt', "wb", "utf-8-sig")
for j in range(((i * 80000) + 1), ((i+1) * 80000)+1):
print j
outputFile.write(readLine[j-1])
print "end split new_stop_times"
'''
#-------end split new_stop_times------- |
'''
This is a implementation of Quantum State Tomography for Qubits,
using techniques of following papars.
'Iterative algorithm for reconstruction of entangled states(10.1103/PhysRevA.63.040303)'
'Diluted maximum-likelihood algorithm for quantum tomography(10.1103/PhysRevA.75.042108)'
'Qudit Quantum State Tomography(10.1103/PhysRevA.66.012303)'
'''
import numpy as np
from numpy import array, kron, trace, identity, sqrt, zeros, exp, pi, conjugate, random
from scipy.linalg import sqrtm
from datetime import datetime
from concurrent import futures
import os
from pathlib import Path
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import pickle
su2b = array([
[[ 1, 0], [ 0, 1]],
[[ 0, 1], [ 1, 0]],
[[ 0,-1j], [ 1j, 0]],
[[ 1, 0], [ 0, -1]]
]
)
su2Bases = []
newbases = su2b.copy()
def makeSU2Bases(numberOfQubits):
global newbases, su2Bases
for _ in range(numberOfQubits-1):
for i in range(len(newbases)):
su2Bases.extend([kron(newbases[i], su2b[j]) for j in range(4)])
newbases = su2Bases.copy()
su2Bases = []
su2Bases = array(newbases) / (2**numberOfQubits)
bH = array([[1,0],[0,0]])
bV = array([[0,0],[0,1]])
bD = array([[1/2,1/2],[1/2,1/2]])
bR = array([[1/2,1j/2],[-1j/2,1/2]])
bL = array([[1/2,-1j/2],[1j/2,1/2]])
initialBases = array([bH, bV, bR, bD])
cycleBases1 = array([bH, bV, bR, bD])
cycleBases2 = array([bD, bR, bV, bH])
def makeBases(numberOfQubits):
beforeBases = initialBases
for _ in range(numberOfQubits - 1):
afterBases = []
for i in range(len(beforeBases)):
if i % 2 == 0:
afterBases.extend([kron(beforeBases[i], cycleBase) for cycleBase in cycleBases1])
else:
afterBases.extend([kron(beforeBases[i], cycleBase) for cycleBase in cycleBases2])
beforeBases = afterBases
baseName = []
for i in range(2**numberOfQubits):
if i%4 == 0:
baseName.append("|"+ str(np.binary_repr(i, width=4)) +">")
return array(afterBases), baseName
def makeBMatrix(numberOfQubits, bases):
global su2Bases
B = np.zeros((4**numberOfQubits, 4**numberOfQubits))
for i in range(4**numberOfQubits):
for j in range(4**numberOfQubits):
B[i][j] = np.trace(bases[i] @ su2Bases[j])
return B
def makeMMatrix(numberOfQubits, bases):
global su2Bases
B = makeBMatrix(numberOfQubits, bases)
BInverse = np.linalg.inv(B)
M = []
for i in range(4**numberOfQubits):
M.append(sum([BInverse[j][i] * su2Bases[j] for j in range(4**numberOfQubits)]))
return array(M)
def makeInitialDensityMatrix(numberOfQubits, dataList, bases, M):
# M = makeMMatrix(numberOfQubits, bases)
N = sum([np.trace(M[i]) * dataList[i] for i in range(4**numberOfQubits)])
densityMatrix = sum([dataList[i] * M[i] for i in range(4**numberOfQubits)]) / N
initialDensityMatrix = choleskyDecomposition(numberOfQubits, densityMatrix)
return initialDensityMatrix
""" cholesky decomposition """
def choleskyDecomposition(numberOfQubits, matrix):
L = np.zeros([2**numberOfQubits, 2**numberOfQubits], dtype=np.complex)
for i in range(2**numberOfQubits-1, -1, -1):
s = matrix[i][i]
for k in range(2**numberOfQubits-1, i, -1):
s -= np.conjugate(L[k][i]) * L[k][i]
if s >= 0:
L[i][i] = np.sqrt(s)
else:
L[i][i] = np.sqrt(s)
for j in range(i):
t = matrix[i][j]
for k in range(2**numberOfQubits-1, i, -1):
t -= (np.conjugate(L[k][i]) * L[k][j])
if L[i][i] != 0:
L[i][j] = t / np.conjugate(L[i][i])
else:
L[i][j] = t / 1e-9
for i in range(2**numberOfQubits):
L[i][i] = np.real(L[i][i])
return (np.conjugate(L).T @ L) / np.trace(np.conjugate(L).T @ L)
""" Get Experimental Data """
def getExperimentalData(pathOfExperimentalData):
"""
getExperimentalData(pathOfExperimentalData(string)):
This function is getting experimental data from file at "pathOfExperimentalData",
----------------------------------------------------------------------------------------
return:
np.array of them.
"""
with open(pathOfExperimentalData) as f:
experimentalData = []
for s in f.readlines():
experimentalData.extend(map(int, s.strip().split()))
return array(experimentalData)
def doIterativeAlgorithm(numberOfQubits, bases, listOfExperimentalDatas, MMatrix):
"""
doIterativeAlgorithm():
This function is to do iterative algorithm(10.1103/PhysRevA.63.040303) and diluted MLE algorithm(10.1103/PhysRevA.75.042108) to a set of datas given from a experiment.
This recieve four variables (numberOfQubits, bases, listAsExperimentalDatas),
and return most likely estimated density matrix (np.array) and total time of calculation(datetime.timedelta).
First quantum state matrix for this algorithm is a identity matrix.
--------------------------------------------------------------------------------------------------------------
Return:
most likely estimated density matrix(np.array),
"""
""" Setting initial parameters """
iter = 0
epsilon = 1000
endDiff = 1e-11
diff = 100
# TolFun = 10e-11
# traceDistance = 100
maxNumberOfIteration = 100000
dataList = listOfExperimentalDatas
totalCountOfData = sum(dataList)
nDataList = dataList / totalCountOfData # nDataList is a list of normarized datas
densityMatrix = makeInitialDensityMatrix(numberOfQubits, dataList, bases, MMatrix)
# densityMatrix = identity(2 ** numberOfQubits)
""" Start iteration """
# while traceDistance > TolFun and iter <= maxNumberOfIteration:
while diff > endDiff and iter <= maxNumberOfIteration and epsilon > 1e-6:
probList = [trace(base @ densityMatrix) for base in bases]
nProbList = probList / sum(probList)
rotationMatrix = sum([(nDataList[i] / probList[i]) * bases[i] for i in range(4 ** numberOfQubits) if probList[i] != 0])
""" Normalization of Matrices for Measurement Bases """
U = np.linalg.inv(sum(bases)) / sum(probList)
rotationMatrixLeft = (identity(2 ** numberOfQubits) + epsilon * U @ rotationMatrix) / (1 + epsilon)
rotationMatrixRight = (identity(2 ** numberOfQubits) + epsilon * rotationMatrix @ U) / (1 + epsilon)
""" Calculation of updated density matrix """
modifiedDensityMatrix = rotationMatrixLeft @ densityMatrix @ rotationMatrixRight / trace(rotationMatrixLeft @ densityMatrix @ rotationMatrixRight)
eigValueArray, eigVectors = np.linalg.eig(densityMatrix - modifiedDensityMatrix)
traceDistance = sum(np.absolute(eigValueArray)) / 2
""" Update Likelihood Function, and Compared with older one """
LikelihoodFunction = sum([nDataList[i] * np.log(nProbList[i]) for i in range(4 ** numberOfQubits) if nProbList[i] != 0])
probList = [trace(base @ modifiedDensityMatrix) for base in bases]
nProbList = probList / sum(probList)
modifiedLikelihoodFunction = sum([nDataList[i] * np.log(nProbList[i]) for i in range(4 ** numberOfQubits) if nProbList[i] != 0])
nowdiff = np.real(modifiedLikelihoodFunction - LikelihoodFunction)
""" Show Progress of Calculation """
progress = 100 * iter / maxNumberOfIteration
if progress % 5 == 0:
msg = "Progress of calculation: " + str(int(progress)) + "%"
print(msg)
""" Increment """
iter += 1
""" Check Increasing of Likelihood Function """
if nowdiff < 0:
epsilon = epsilon * 0.1
continue
else:
diff = nowdiff
""" Update Density Matrix """
densityMatrix = modifiedDensityMatrix.copy()
""" Check That Max Iteration Number was appropriate """
if iter >= maxNumberOfIteration:
print("----------------------------------------------")
print("Iteration time reached max iteration number.")
print("The number of max iteration times is too small.")
print("----------------------------------------------")
""" Show the total number of iteration """
endIterationTimes = iter
print("Iteration was '" + str(endIterationTimes) + "' times.")
return modifiedDensityMatrix, endIterationTimes
""" Calculate Fidelity """
def calculateFidelity(idealDensityMatrix, estimatedDensityMatrix):
"""
calculateFidelity(idealDensityMatrix, estimatedDensityMatrix):
"""
fidelity = np.real(trace(sqrtm(sqrtm(idealDensityMatrix) @ estimatedDensityMatrix @ sqrtm(idealDensityMatrix)))) ** 2
return fidelity
""" Iterative Simulation """
def doIterativeSimulation(numberOfQubits, bases, pathOfExperimentalData, idealDensityMatrix, resultDirectoryName, MMatrix, baseNames):
"""
doIterativeSimulation(numberOfQubits, bases, pathOfExperimentalData, idealDensityMatrix, resultDirectoryName, MMatrix)
"""
""" Get Experimental Data"""
listOfExperimentalData = getExperimentalData(pathOfExperimentalData)
""" Calculate """
estimatedDensityMatrix, totalIterationTime = doIterativeAlgorithm(numberOfQubits, bases, listOfExperimentalData, MMatrix)
fidelity = calculateFidelity(idealDensityMatrix, estimatedDensityMatrix)
""" Make File Name of result """
l = 0
r = len(pathOfExperimentalData)-1
for i in range(len(pathOfExperimentalData)):
if pathOfExperimentalData[len(pathOfExperimentalData)-1-i] == ".":
r = len(pathOfExperimentalData)-1-i
if pathOfExperimentalData[len(pathOfExperimentalData)-1-i] == "/" or pathOfExperimentalData[len(pathOfExperimentalData)-1-i] == "\\":
l = len(pathOfExperimentalData)-i
break
resultFileName = pathOfExperimentalData[l:r]
resultFilePath = '.\\result\\qubit\\iterative\\' + resultDirectoryName + '\\' + 'result' + '.txt'
resultIterationTimeFilePath = '.\\result\\qubit\\iterative\\' + resultDirectoryName + '\\' + 'resultIterationTime' + '.txt'
""" Save Result """
with open(resultFilePath, mode='a') as f:
f.writelines(str(fidelity) + '\n')
with open(resultIterationTimeFilePath, mode='a') as f:
f.writelines(str(totalIterationTime) + '\n')
""" Make 3D Plot """
plotResult(numberOfQubits, estimatedDensityMatrix, baseNames)
""" Poisson Distributed Simulation """
def doPoissonDistributedSimulation(numberOfQubits, bases, pathOfExperimentalData, idealDensityMatrix, resultDirectoryName, MMatrix):
"""
doPoissonDistributedSimulation(numberOfQubits, bases, pathOfExperimentalData, idealDensityMatrix, resultDirectoryName, MMatrix)
"""
""" Get Experimental Data"""
listOfExperimentalData = getExperimentalData(pathOfExperimentalData)
""" Calculate """
estimatedDensityMatrix, totalIterationTime = doIterativeAlgorithm(numberOfQubits, bases, random.poisson(listOfExperimentalData), MMatrix)
fidelity = calculateFidelity(idealDensityMatrix, estimatedDensityMatrix)
""" Make File Name of result """
l = 0
r = len(pathOfExperimentalData)-1
for i in range(len(pathOfExperimentalData)):
if pathOfExperimentalData[len(pathOfExperimentalData)-1-i] == ".":
r = len(pathOfExperimentalData)-1-i
if pathOfExperimentalData[len(pathOfExperimentalData)-1-i] == "/" or pathOfExperimentalData[len(pathOfExperimentalData)-1-i] == "\\":
l = len(pathOfExperimentalData)-i
break
resultFileName = pathOfExperimentalData[l:r]
resultFilePath = '.\\result\\qubit\\poisson\\' + resultDirectoryName + "\\" + resultFileName + '_result' + '.txt'
""" Save Result """
with open(resultFilePath, mode='a') as f:
f.write(str(fidelity) + '\n')
def plotResult(numberOfQubits, densityMatrix, baseNames):
"""
plotResult(densityMatrix)
"""
""" Plot Setting """
xedges, yedges = np.arange(2**numberOfQubits), np.arange(2**numberOfQubits)
xpos, ypos = np.meshgrid(xedges, yedges) # x,y座標を3D用の形式に変換(その1)
zpos = 0 # zは常に0を始点にする
dx = 1 # x座標の幅を設定
dy = 1 # y座標の幅を設定
dz = densityMatrix.ravel() # z座標の幅は棒の長さに相当
xpos = xpos.ravel() # x座標を3D用の形式に変換(その2)
ypos = ypos.ravel() # y座標を3D用の形式に変換(その2)
fig = plt.figure() # 描画領域を作成
ax1 = fig.add_subplot(121, projection="3d") # 3Dの軸を作成
ax1.bar3d(xpos,ypos,zpos,dx,dy,np.real(dz), edgecolor='black') # ヒストグラムを3D空間に表示
plt.title("Real Part") # タイトル表示
# plt.xlabel("X") # x軸の内容表示
plt.xticks(np.arange(0, 2**numberOfQubits, 4), labels=baseNames)
# plt.ylabel("Y") # y軸の内容表示
plt.yticks(np.arange(0, 2**numberOfQubits, 4), labels=baseNames)
# ax1.set_zlabel("Z") # z軸の内容表示
ax1.set_zlim(-0.1, 0.6)
ax2 = fig.add_subplot(122, projection="3d") # 3Dの軸を作成
ax2.bar3d(xpos,ypos,zpos,dx,dy,np.imag(dz), edgecolor='black') # ヒストグラムを3D空間に表示
plt.title("Imaginary Part") # タイトル表示
# plt.xlabel("X") # x軸の内容表示
plt.xticks(np.arange(0, 2**numberOfQubits, 4), labels=baseNames)
# plt.ylabel("Y") # y軸の内容表示
plt.yticks(np.arange(0, 2**numberOfQubits, 4), labels=baseNames)
# ax2.set_zlabel("Z") # z軸の内容表示
ax2.set_zlim(-0.1, 0.6)
plt.show()
with open('firstplottest'+'_plot.pkl', mode='wb') as f:
pickle.dump(fig, f)
""" Get Number of Qubits """
def getNumberOfQubits():
"""
getNumberOfQubits()
"""
print("------------------------------------------------------------")
print("PLEASE ENTER NUMBER OF QUBITS")
print("------------------------------------------------------------")
print(">>")
numberOfQubits = int(input())
return numberOfQubits
""" Get Path of Experimental Data Directory """
def getExperimentalDataDirectoryPath():
"""
getExperimentalDataDirectoryPath()
"""
print("------------------------------------------------------------")
print("PLEASE ENTER PATH OF EXPERIMENTAL DATA DIRECTORY")
print("")
print("LIKE THIS >> .\\datadirectory")
print("------------------------------------------------------------")
print(">>")
return Path(input())
""" Get Paths of Experimental Data """
def getExperimentalDataPaths():
"""
getExperimentalDataPaths()
"""
print("------------------------------------------------------------")
print("PLEASE ENTER PATHS OF EXPERIMENTAL DATA")
print("")
print("IF THERE ARE MULTIPLE DATA FILE YOU WANT TO TOMOGRAPHY,")
print("ENTER ALL PATHS SEPARATED WITH SPACE.")
print("LIKE THIS >> .\\datadirectory\\ex1.txt .\\datadirectory\\ex2.txt ...")
print("------------------------------------------------------------")
print(">>")
paths = list(input().split())
return paths
""" Get Name of Result Directory AND FILE """
def getNameOfResultDirectory():
"""
getNameOfResultDirectory()
"""
print("------------------------------------------------------------")
print("PLEASE ENTER NAME OF RESULT DIRECTORY ")
print("")
print("THE RESULT DATA WILL SAVED AT ")
print("'.\\result\\qubit\\iterative(or poisson)\\{ YOUR ENTED DIRECTORY NAME }\\{ EXPERIMENTAL DATA FILE NAME }_result.txt'")
print("")
print("IF EMPTY, THE NAME OF RESULT DIRECTORY IS 'default'")
print("------------------------------------------------------------")
print(">>")
nameOfResultDirectory = input()
if nameOfResultDirectory == "":
nameOfResultDirectory = "default"
return nameOfResultDirectory
""" Whether Do Poisson Distributed Simulation """
def checkPoisson():
"""
checkPoisson()
"""
print("------------------------------------------------------------")
print("PLEASE ENTER ANSWER WHETHER DO POISSON DISTRIBUTED SIMULATION")
print("IF YOU DO, PLEASE ENTER 'yes'")
print("IF YOU ENTER ANOTHER WORD OR EMPTY, YOUR ANSWER IS REGARED AS 'no'")
print("------------------------------------------------------------")
print(">>")
answer = input()
if answer == "yes" or answer == "Yes" or answer == "YES":
print("YOUR ANSWER IS: 'yes'")
poissonPaths = getExperimentalDataPaths()
eachIterationTime = getEachIterationTime()
return True, poissonPaths*eachIterationTime
else:
print("YOUR ANSWER IS: 'no'")
return False, []
""" Get Each Iteration Time """
def getEachIterationTime():
"""
getEachIterationTime()
"""
print("------------------------------------------------------------")
print("PLEASE ENTER ITERATION TIME OF EACH POISSON SIMULATION")
print("------------------------------------------------------------")
print(">>")
eachIterationTime = input()
if eachIterationTime == "":
eachIterationTime = 0
else:
eachIterationTime = int(eachIterationTime)
return eachIterationTime
""" Get Number of Parallel Comuting """
def getNumberOfParallelComputing():
"""
getNumberOfParallelComputing()
"""
print("------------------------------------------------------------")
print("HOW MANY TIMES DO YOU WANT TO PARALLELIZE?")
print("IF THE NUMBER IS TOO LARGE, THE PARFORMANCE OF SIMULATION BECOME LOWER.")
print("THE NUMBER OF LOGICAL PROCESSOR OF YOUR COMPUTER IS >>")
print(os.cpu_count())
print("RECOMENDED NUMBER IS LESS THAN THE ABOVE NUMBER.")
print("------------------------------------------------------------")
print(">>")
n = input()
if n != '':
numberOfParallelComputing = int(n)
else:
numberOfParallelComputing = 1
return numberOfParallelComputing
if __name__ == "__main__":
""" Get Number of Qubits """
numberOfQubits = getNumberOfQubits()
""" Make SU2 Bases """
makeSU2Bases(numberOfQubits)
""" Get Path of Experimental Data Directory """
directoryPath = getExperimentalDataDirectoryPath()
paths = list(directoryPath.glob("*.txt"))
""" Get Name of Result Directory """
resultDirectoryName = getNameOfResultDirectory()
""" Check Poisson Distributed Simulation """
check, poissonPaths = checkPoisson()
""" Get Number of Parallel Computing """
numberOfParallelComputing = getNumberOfParallelComputing()
""" Make Bases """
basesOfQubits, baseNames = makeBases(numberOfQubits)
""" Make M Matrix """
M = makeMMatrix(numberOfQubits, basesOfQubits)
""" Make Ideal Density Matrix """
baseVecter = np.zeros([1, 2**numberOfQubits])
baseVecter[0][0] = 1 / sqrt(2)
baseVecter[0][2**numberOfQubits-1] = 1 / sqrt(2)
idealDensityMatrix = baseVecter.T @ baseVecter
# baseVecter[0][1] = 1 / 2
# baseVecter[0][2] = 1 / 2
# baseVecter[0][4] = 1 / 2
# baseVecter[0][8] = 1 / 2
# baseVecter = np.full([1, 2**numberOfQubits], 1/np.sqrt(2**numberOfQubits), dtype=np.complex)
# idealDensityMatrix = baseVecter.T @ baseVecter
# matrix = np.zeros([2**numberOfQubits, 2**numberOfQubits]) # (|0000>+|1111>)(<0000|+<1111|) + |0001><0001| + |0010><0010| + |0100><0100| + |1000><1000|
# baseVecter = np.zeros([1, 2**numberOfQubits])
# baseVecter[0][1] = 1
# matrix += baseVecter.T @ baseVecter
# baseVecter = np.zeros([1, 2**numberOfQubits])
# baseVecter[0][2] = 1
# matrix += baseVecter.T @ baseVecter
# baseVecter = np.zeros([1, 2**numberOfQubits])
# baseVecter[0][4] = 1
# matrix += baseVecter.T @ baseVecter
# baseVecter = np.zeros([1, 2**numberOfQubits])
# baseVecter[0][8] = 1
# matrix += baseVecter.T @ baseVecter
# idealDensityMatrix = baseVecter.T @ baseVecter
# baseVecter = np.zeros([1, 2**numberOfQubits])
# baseVecter[0][0] = 1
# baseVecter[0][2**numberOfQubits-1] = 1
# matrix += baseVecter.T @ baseVecter
# matrix = matrix/np.trace(matrix)
# idealDensityMatrix = matrix
""" Make Result Directory """
if not os.path.exists('.\\result\\qubit\\iterative\\' + resultDirectoryName):
os.makedirs('.\\result\\qubit\\iterative\\' + resultDirectoryName)
""" Start Tomography """
with futures.ProcessPoolExecutor(max_workers=numberOfParallelComputing) as executor:
for path in paths:
executor.submit(fn=doIterativeSimulation, numberOfQubits=numberOfQubits, bases=basesOfQubits, pathOfExperimentalData=str(path), idealDensityMatrix=idealDensityMatrix, resultDirectoryName=resultDirectoryName, MMatrix=M, baseNames=baseNames)
""" Start Poisson Distributed Simulation """
if check:
""" Make Result Directory for Poisson Distributed Simulation """
if not os.path.exists('.\\result\\qubit\\poisson\\' + resultDirectoryName):
os.makedirs('.\\result\\qubit\\poisson\\' + resultDirectoryName)
with futures.ProcessPoolExecutor(max_workers=numberOfParallelComputing) as executor:
for poissonPath in poissonPaths:
executor.submit(fn=doPoissonDistributedSimulation, numberOfQubits=numberOfQubits, bases=basesOfQubits, pathOfExperimentalData=poissonPath, idealDensityMatrix=idealDensityMatrix, resultDirectoryName=resultDirectoryName, MMatrix=M)
# if not os.path.exists('.\\result\\4qubit\\poisson\\benchmark'):
# os.makedirs('.\\result\\4qubit\\poisson\\benchmark')
# with open('benchmark'+str(numberOfQubits)+'qubits.txt', mode='a') as f:
# f.write("total time: " + str(end_time - start_time) + "\n")
|
conf = '/home/pi/coffee/temp.conf';
killfile = '/var/www/html/killfile.tmp';
runfile = '/var/www/html/runfile.tmp';
##GPIO PINS
relais = 17;
statusLED = 27;
button = 22;
|
import requests
import json
# Importing json And requests
url = "https://api.tellonym.me/tokens/create"
# Login API URL
headers = {
"Host": "api.tellonym.me",
"Content-Type": "application/json",
"Accept": "application/json",
"Connection": "keep-alive",
"tellonym-client": "ios:2.65.0:488:14:iPhone13,3",
"User-Agent": "Tellonym/488 CFNetwork/1206 Darwin/20.1.0",
"Accept-Language": "en",
}
# Login API Headers
email = input("Username/Email: ")
# Email Or Username Input
password = input("Password: ")
# Password Input
data = {
"activeExperimentId": 0,
"password": password,
"country": "US",
"deviceName": "Soud’s iPhone",
"deviceType": "ios",
"lang": "en",
"limit": 16,
"email": email
}
# Login API Data
req = requests.post(url, json=data, headers=headers)
# Login API Request
if "WRONG_CREDENTIALS" in req.text:
print("Login Failed, Try Again")
# Wrong Login Info
elif "PARAMETER_MISSING" in req.text:
print("Missing Something, Try Again")
# Missing Data
elif "accessToken" in req.text:
print("Login Success")
token = json.loads(req.text)["accessToken"]
# Login Success And Parse Login Token
else:
print("Error !")
print(req)
print(req.text)
# Error or Something Wrong
|
# find all numbers disappeared in an array
# 取负数以标志元素是否只是出现了一次
def find_num(nums):
result = []
for i in nums:
if nums[abs(i)-1] > 0:
nums[abs(i)-1] = -nums[abs(i)-1]
for i in range(len(nums)):
if nums[i] > 0:
result.append(i+1)
return result
if __name__ == '__main__':
print(find_num([1,1,2,2])) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ˅
from behavioral_patterns.iterator.aggregate import Aggregate
from behavioral_patterns.iterator.book_shelf_iterator import BookShelfIterator
# ˄
class BookShelf(Aggregate):
# ˅
# ˄
def __init__(self, max_size):
self.__number_of_books = 0
self.__books = [None] * max_size
# ˅
pass
# ˄
def iterator(self):
# ˅
return BookShelfIterator(book_shelf=self)
# ˄
def get_at(self, index):
# ˅
return self.__books[index]
# ˄
def add(self, book):
# ˅
self.__books[self.__number_of_books] = book
self.__number_of_books += 1
# ˄
@property
def number_of_books(self):
# ˅
return self.__number_of_books
# ˄
# ˅
# ˄
# ˅
# ˄
|
def readAllData(filename):
file = open(filename, 'r')
l = []
# print(file.readline())
# print()
# print(file.readlines())
file.readline()
for i in file.readlines():
# print(i.split())
# print(type(i))
# tup = tuple(i.split())
# print(tup)
l.append(tuple(i.split()))
# print(l)
file.close()
return l
def computeAverageForClasses(list_val):
result = {}
global set_lis
set_lis = set()
for i in list_val:
set_lis.add(i[1])
list_class = list(set_lis)
number = list(i*0 for i in range(0, len(list_class)))
total = list(i*0 for i in range(0, len(list_class)))
for i in list_val:
for j in range(0, len(list_class)):
if i[1] == list_class[j]:
total[j] += float(i[0])
number[j] += 1
for i in range(0, len(list_class)):
result[list_class[i]] = total[i]/number[i]
print(list_class)
print(total)
print(number)
return result
def countMisclassified(avg_val, list_val):
yes_avg = avg_val['YES']
no_avg = avg_val['NO']
list_class = list(set_lis)
fo = open("Misclassified.txt", 'w')
misclassified = 0
for i in list_val:
if i[1] == 'YES':
if abs(float(i[0]) - yes_avg) > abs(float(i[0]) - no_avg):
misclassified += 1
fo.write(i[0])
fo.write(", ")
fo.write(i[1])
fo.write('\n')
else:
if abs(float(i[0]) - no_avg) > abs(float(i[0]) - yes_avg):
misclassified += 1
fo.write(i[0])
fo.write(", ")
fo.write(i[1])
fo.write('\n')
fo.close()
f = open("Misclassified_global.txt", 'w')
misclassified_global = 0
for i in list_val:
for j in range(0, len(list_class)):
if i[1] == list_class[j]:
for k in range(0, len(list_class)):
if j != k:
if abs(float(i[0]) - avg_val[list_class[j]]) > abs(float(i[0]) - avg_val[list_class[k]]):
misclassified_global += 1
f.write(i[0])
f.write(", ")
f.write(i[1])
f.write('\n')
f.close()
# print(misclassified_global)
# return misclassified_global
return misclassified
list_data = readAllData('data.txt')
# print('List of tuple:')
# print(list_data)
avg_val = computeAverageForClasses(list_data)
print("Average Values of the classes in a dictionary: ", avg_val)
mis = countMisclassified(avg_val, list_data)
print("Total misclassified values in the file: ", mis)
|
import os
input_file = input('Enter the file name: ')
input_file = os.path.abspath(input_file)
while True:
if os.path.isfile(input_file):
break
else:
print('{} is not a valid file path...'.format(input_file))
input_file = input('Enter the file name: ')
filename = os.path.splitext(input_file)[0]
extension = os.path.splitext(input_file)[1]
out_file = '{}_NE_ID_only{}'.format(filename, extension)
print('Reading {}...'.format(out_file))
with open(input_file) as in_fh:
with open(out_file, 'w') as out_fh:
out_fh.write('ID\n')
count = 0
while True:
line = in_fh.readline()
if not line:
break
count += 1
if count <= 2:
continue
temp_list = line.split(',')
if len(temp_list) >= 2:
ne_id = 'eNB_{}'.format(temp_list[1])
out_fh.write(ne_id + '\n')
print('Your file has been created successfully.\n'
'RESULT FILE: {}\n'.format(out_file))
os.startfile(out_file)
input('Enter any key to finish...')
|
class Solution(object):
def findMin(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if nums[0]<=nums[len(nums)-1]:
return nums[0]
def bins(vals, start, end):
if end<start:
return
if end==start:
return vals[end]
mid = (start+end)//2
if vals[mid]<vals[0] and vals[mid]<vals[mid-1]:
print(vals[mid], 'w')
return vals[mid]
if vals[mid]>=vals[0]:
return bins(vals, mid+1, end)
else:
return bins(vals, start, mid-1)
val = bins(nums, 0, len(nums)-1)
return None |
from flask import Flask
from webapp.config import Config
app = Flask(__name__)
app.config.from_object(Config)
from webapp import routes
|
#!/bin/env python
import os
def distance_matrix(distance_matrix):
print "<br /><h1>DISTANCE MATRIX</h1><br />"
print "<table class='table'>"
print "<tr><th></th>"
for distance1 in distance_matrix.iterkeys():
print "<th>"+distance1.split("/")[-1]+"</th>"
print "</tr>"
for distance1 in distance_matrix.iterkeys():
print "<tr>"
print "<th>"+distance1.split("/")[-1]+"</th>"
for distance2 in distance_matrix.iterkeys():
print "<td>"+str(distance_matrix[distance1][distance2])+"</td>"
print "</tr>"
print "</table>"
def cluster(clusters):
print "<br /><h1>CLUSTERS</h1><br />"
for _cluster in clusters:
print "<div class='cluster_box'>"
print "<br />".join([ x.split("/")[-1] for x in _cluster])
print "</div>"
def tokens(token_weight):
print "<br /><h1>TAGS</h1><br />"
print "<table class='table'>"
print "<tr><th>Service</th><th>Details</th></tr>"
for tok_weight in token_weight.iterkeys():
print "<tr>"
print "<td>"+tok_weight.split("/")[-1]+"</td>"
print "<td>"
print "<table class='table' style='width:600px'>"
print "<tr><th>Token</th><th>Weight</th></tr>"
for token in reversed(token_weight[tok_weight]):
print "<tr><td>"+token[0]+"</td><td>"+str(token[1])+"</td></tr>"
print "</table>"
print "</td>"
print "</tr>"
print "</table>"
def enriched_tag(enriched_tags):
print "<br /><h1>ENRICHED TAGS</h1><br/>"
print "<table class='table'>"
print "<tr><th>Service</th><th>Tags</th></tr>"
for enriched in enriched_tags.iterkeys():
print "<tr>"
print "<td>"+enriched.split("/")[-1]+"</td>"
print "<td><table class='table' style='width:600px'>"
print "<tr><th>Tag</th><th>Weight</th></tr>"
for tag in reversed(list(enriched_tags[enriched])):
print "<tr><td>"+tag[0]+"</td><td>"+str(tag[1])+"</td></tr>"
print "</td><tr>"
print "</table>"
print "</table>"
def header():
os.system("cat html/header.htm")
def footer():
print "</body>"
print "</html>"
|
from keras.models import Sequential
from keras.layers.core import Flatten, Dense, Dropout
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.optimizers import SGD
import cv2, numpy as np
from keras import backend as K
import json
import time
from keras.callbacks import TensorBoard
from keras.utils.data_utils import get_file
K.set_image_dim_ordering('th')
CLASS_INDEX = None
CLASS_INDEX_PATH = 'https://s3.amazonaws.com/deep-learning-models/image-models/imagenet_class_index.json'
def VGG_19(weights_path=None):
model = Sequential()
model.add(ZeroPadding2D((1,1),input_shape=(3,224,224)))
model.add(Convolution2D(64, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(64, 3, 3, activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(128, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(128, 3, 3, activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, 3, 3, activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, 3, 3, activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Flatten())
model.add(Dense(4096, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(4096, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(1000, activation='softmax'))
if weights_path:
model.load_weights(weights_path)
return model
def decode_predictions(preds):
global CLASS_INDEX
assert len(preds.shape) == 2 and preds.shape[1] == 1000
if CLASS_INDEX is None:
fpath = get_file('classes.json',
CLASS_INDEX_PATH,
cache_subdir='models')
CLASS_INDEX = json.load(open(fpath))
indices = np.argmax(preds, axis=-1)
results = []
for i in indices:
results.append(CLASS_INDEX[str(i)])
return results
if __name__ == "__main__":
im = cv2.resize(cv2.imread('cock.jpg'), (224, 224)).astype(np.float32)
im[:,:,0] -= 103.939
im[:,:,1] -= 116.779
im[:,:,2] -= 123.68
im = im.transpose((2,0,1))
im = np.expand_dims(im, axis=0)
# Test pretrained model
model = VGG_19('vgg19_weights_tf.h5')
tensorboard = TensorBoard(log_dir='logs/{}'.format(time.time()))
tensorboard.set_model(model)
sgd = SGD(lr=0.1, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(optimizer=sgd, loss='categorical_crossentropy')
out = model.predict(im)
print (decode_predictions(out)) |
import unittest
from importlib import import_module
solution = import_module('main')
test_cases = [
"4Always0 5look8 4on9 7the2 4bright8 9side7 3of8 5life5",
"5Nobody5 7expects3 5the4 6Spanish4 9inquisition0",
]
test_results = [
"0Always4 8look5 9on4 2the7 8bright4 7side9 8of3 5life5",
"5Nobody5 3expects7 4the5 4Spanish6 0inquisition9",
]
class TestSolution(unittest.TestCase):
def test_solution(self):
for test, result in zip(test_cases, test_results):
self.assertEqual(str(solution.main(test)), result)
if __name__ == '__main__':
unittest.main()
|
from bs4 import BeautifulSoup
import requests
r = requests.get("http://digidb.io/digimon-list/")
soup = BeautifulSoup(r.content, 'html.parser')
dataTarget = soup.find_all('tr', class_='')
# print(dataTarget)
# print(len(dataTarget))
# print(dataTarget[:4433])
# print(dataTarget[0].img['src'])
# print(dataTarget[0].text[0])
# print(dataTarget[0].td.find_next_sibling())
# print(dataTarget[0].td.find_next().text)
# print(dataTarget[0].find_all('center')[0].text)
# print(len(dataTarget[0].find_all('center')))
# print(dataTarget[0].b.text.replace(' ',''))
dataJSON=[]
for i in range(len(dataTarget)):
scrap=dataTarget[i]
value=scrap.find_all('center')
no=scrap.b.text.replace('\xa0','')
digimon=scrap.a.text
image=scrap.img['src']
stage=value[0].text
tipe=value[1].text
attribute=value[2].text
memory=value[3].text
equip_slots=value[4].text
hp=value[5].text
sp=value[6].text
atk=value[7].text
dfc=value[8].text
intt=value[9].text
spd=value[10].text
data={
'no':no,
'digimon':digimon,
'image':image,
'stage':stage,
'type':tipe,
'attribute':attribute,
'memory':memory,
'equip slots':equip_slots,
'hp':hp,
'sp':sp,
'atk':atk,
'def':dfc,
'int':intt,
'spd':spd
}
dataJSON.append(data)
# print(dataJSON)
import json
with open('digimon.json', 'w') as x:
json.dump(dataJSON, x) |
import argparse
import logging
import wandb
from action_recognition.utils.mq_tqdm import mp_tqdm, cmd_worker
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument("--train_group", default='', type=str)
parser.add_argument("--group", default='', type=str)
parser.add_argument("--process_cnt", default=1, type=int)
parser.add_argument("--prob_thresh", default=0.5, type=float)
parser.add_argument("--random_split", default=False, action="store_true")
args = parser.parse_args()
api = wandb.Api()
group_runs = api.runs('donny/video_classification', {'group': args.train_group}, order='+config.split_by')
ret = {}
for run in group_runs:
if run.state in ['finished']:
ret[run.config['split_by']] = run.id
cmd_format = 'pipenv run python run_inference.py --group {gp} --split {sp} --run_id {rid} --prob_thresh {pt}'
if args.random_split:
cmds = [
cmd_format.format(
gp=f'inference_{args.train_group}' if not args.group else args.group,
pt=args.prob_thresh, sp=-1, rid=ret['random'])
]
else:
cmds = [
cmd_format.format(
gp=f'inference_{args.train_group}' if not args.group else args.group,
pt=args.prob_thresh, sp=split.split('_')[-1], rid=run_id)
for split, run_id in ret.items() if split != 'random'
]
logging.info('Running cmds:\n%s', '\n'.join(cmds))
mp_tqdm(cmd_worker, [{'cmd': c} for c in cmds], process_cnt=args.process_cnt)
|
import pathlib
PATH_TO_ROOT = pathlib.Path(__file__).parent.parent
PATH_TO_INPUT = PATH_TO_ROOT.joinpath('input')
PATH_TO_DOMAINS = PATH_TO_INPUT.joinpath('domains.txt')
PATH_TO_QUERIES = PATH_TO_INPUT.joinpath('queries.txt')
PATH_TO_OUTPUT = PATH_TO_ROOT.joinpath('output')
PATH_TO_REPORT = PATH_TO_OUTPUT.joinpath('report')
PATH_TO_REPORT_JSON = PATH_TO_REPORT.with_suffix('.json')
DEBUG = True
# DEBUG = False
|
from __future__ import division
import logging; _L = logging.getLogger('openaddr.render')
from .compat import standard_library
from glob import glob
from argparse import ArgumentParser
from itertools import combinations
from os.path import join, dirname, basename
from urllib.parse import urljoin
import json
from .compat import cairo
from . import SOURCES_DIR
from osgeo import ogr, osr
import requests
# Areas
WORLD, USA, EUROPE = 54029, 2163, 'Europe'
# WGS 84, http://spatialreference.org/ref/epsg/4326/
EPSG4326 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
# US National Atlas Equal Area, http://spatialreference.org/ref/epsg/2163/
EPSG2163 = '+proj=laea +lat_0=45 +lon_0=-100 +x_0=0 +y_0=0 +a=6370997 +b=6370997 +units=m +no_defs'
# World Van der Grinten I, http://spatialreference.org/ref/esri/54029/
ESRI54029 = '+proj=vandg +lon_0=0 +x_0=0 +y_0=0 +R_A +ellps=WGS84 +datum=WGS84 +units=m +no_defs'
def make_context(width=960, resolution=1, area=WORLD):
''' Get Cairo surface, context, and drawing scale.
Global extent, World Van der Grinten I:
(-19918964.35, -8269767.91) - (19918964.18, 14041770.96)
http://spatialreference.org/ref/esri/54029/
U.S. extent, National Atlas Equal Area:
(-2031905.05, -2114924.96) - (2516373.83, 732103.34)
http://spatialreference.org/ref/epsg/2163/
Europe extent, World Van der Grinten I:
(-2679330, 3644860) - (5725981, 8635513)
http://spatialreference.org/ref/esri/54029/
'''
if area == WORLD:
left, top = -18000000, 14050000
right, bottom = 19500000, -7500000
elif area == USA:
left, top = -2040000, 740000
right, bottom = 2525000, -2130000
elif area == EUROPE:
left, top = -2700000, 8700000
right, bottom = 5800000, 3600000
else:
raise RuntimeError('Unknown area "{}"'.format(area))
aspect = (right - left) / (top - bottom)
hsize = int(resolution * width)
vsize = int(hsize / aspect)
hscale = hsize / (right - left)
vscale = (hsize / aspect) / (bottom - top)
hoffset = -left
voffset = -top
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, hsize, vsize)
context = cairo.Context(surface)
context.scale(hscale, vscale)
context.translate(hoffset, voffset)
return surface, context, hscale
def load_live_state():
'''
'''
got = requests.get('http://data.openaddresses.io/state.json')
got = requests.get(urljoin(got.url, got.json()))
columns, rows = got.json()[0], got.json()[1:]
state = [dict(zip(columns, row)) for row in rows]
good_sources = [s['source'] for s in state if (s['cache'] and s['processed'])]
return set(good_sources)
def load_fake_state(sources_dir):
'''
'''
fake_sources = set()
for path in glob(join(sources_dir, '*.json')):
fake_sources.add(basename(path))
return fake_sources
def load_geoids(directory, good_sources):
''' Load a set of U.S. Census GEOIDs that should be rendered.
'''
good_geoids, bad_geoids = set(), set()
for path in glob(join(directory, '*.json')):
with open(path) as file:
data = json.load(file)
if 'geoid' in data.get('coverage', {}).get('US Census', {}):
if basename(path) in good_sources:
good_geoids.add(data['coverage']['US Census']['geoid'])
else:
bad_geoids.add(data['coverage']['US Census']['geoid'])
return good_geoids, bad_geoids
def load_iso3166s(directory, good_sources):
''' Load a set of ISO 3166 codes that should be rendered.
'''
good_iso3166s, bad_iso3166s = set(), set()
for path in glob(join(directory, '*.json')):
with open(path) as file:
data = json.load(file)
if 'code' in data.get('coverage', {}).get('ISO 3166', {}):
if basename(path) in good_sources:
good_iso3166s.add(data['coverage']['ISO 3166']['code'])
else:
bad_iso3166s.add(data['coverage']['ISO 3166']['code'])
elif 'alpha2' in data.get('coverage', {}).get('ISO 3166', {}):
if basename(path) in good_sources:
good_iso3166s.add(data['coverage']['ISO 3166']['alpha2'])
else:
bad_iso3166s.add(data['coverage']['ISO 3166']['alpha2'])
return good_iso3166s, bad_iso3166s
def load_geometries(directory, good_sources, area):
''' Load a set of GeoJSON geometries should be rendered.
'''
good_geometries, bad_geometries = list(), list()
osr.UseExceptions()
sref_geo = osr.SpatialReference(); sref_geo.ImportFromProj4(EPSG4326)
sref_map = osr.SpatialReference(); sref_map.ImportFromProj4(EPSG2163 if area == USA else ESRI54029)
project = osr.CoordinateTransformation(sref_geo, sref_map)
for path in glob(join(directory, '*.json')):
with open(path) as file:
data = json.load(file)
if 'geometry' in data.get('coverage', {}):
geojson = json.dumps(data['coverage']['geometry'])
geometry = ogr.CreateGeometryFromJson(geojson)
if not geometry:
continue
geometry.Transform(project)
if basename(path) in good_sources:
good_geometries.append(geometry)
else:
bad_geometries.append(geometry)
return good_geometries, bad_geometries
def stroke_features(ctx, features):
'''
'''
return stroke_geometries(ctx, [f.GetGeometryRef() for f in features])
def stroke_geometries(ctx, geometries):
'''
'''
for geometry in geometries:
if geometry.GetGeometryType() in (ogr.wkbMultiPolygon, ogr.wkbMultiLineString):
parts = geometry
elif geometry.GetGeometryType() in (ogr.wkbPolygon, ogr.wkbLineString):
parts = [geometry]
else:
continue
for part in parts:
if part.GetGeometryType() is ogr.wkbPolygon:
rings = part
else:
rings = [part]
for ring in rings:
points = ring.GetPoints()
if geometry.GetGeometryType() in (ogr.wkbPolygon, ogr.wkbMultiPolygon):
draw_line(ctx, points[-1], points)
else:
draw_line(ctx, points[0], points[1:])
ctx.stroke()
def fill_features(ctx, features, muppx, rgb):
'''
'''
return fill_geometries(ctx, [f.GetGeometryRef() for f in features], muppx, rgb)
def fill_geometries(ctx, geometries, muppx, rgb):
'''
'''
ctx.set_source_rgb(*rgb)
for geometry in geometries:
if geometry.GetGeometryType() == ogr.wkbMultiPolygon:
parts = geometry
elif geometry.GetGeometryType() == ogr.wkbPolygon:
parts = [geometry]
elif geometry.GetGeometryType() == ogr.wkbPoint:
buffer = geometry.Buffer(2 * muppx, 3)
parts = [buffer]
else:
raise NotImplementedError()
for part in parts:
for ring in part:
points = ring.GetPoints()
draw_line(ctx, points[-1], points)
ctx.fill()
def draw_line(ctx, start, points):
'''
'''
ctx.move_to(*start)
for point in points:
ctx.line_to(*point)
parser = ArgumentParser(description='Draw a map of worldwide address coverage.')
parser.set_defaults(resolution=1, width=960, area=WORLD)
parser.add_argument('--2x', dest='resolution', action='store_const', const=2,
help='Draw at double resolution.')
parser.add_argument('--1x', dest='resolution', action='store_const', const=1,
help='Draw at normal resolution.')
parser.add_argument('--width', dest='width', type=int,
help='Width in pixels.')
parser.add_argument('--use-state', dest='use_state', action='store_const',
const=True, default=False, help='Use live state from http://data.openaddresses.io/state.json.')
parser.add_argument('filename', help='Output PNG filename.')
parser.add_argument('--world', dest='area', action='store_const', const=WORLD,
help='Render the whole world.')
parser.add_argument('--usa', dest='area', action='store_const', const=USA,
help='Render the United States.')
parser.add_argument('--europe', dest='area', action='store_const', const=EUROPE,
help='Render Europe.')
def main():
args = parser.parse_args()
good_sources = load_live_state() if args.use_state else load_fake_state(SOURCES_DIR)
return render(SOURCES_DIR, good_sources, args.width, args.resolution,
args.filename, args.area)
def first_layer_list(datasource):
''' Return features as a list, or an empty list.
'''
return list(datasource.GetLayer(0) if hasattr(datasource, 'GetLayer') else [])
def render(sources_dir, good_sources, width, resolution, filename, area=WORLD):
''' Resolution: 1 for 100%, 2 for 200%, etc.
'''
# Prepare output surface
surface, context, scale = make_context(width, resolution, area)
# Load data
good_geoids, bad_geoids = load_geoids(sources_dir, good_sources)
good_iso3166s, bad_iso3166s = load_iso3166s(sources_dir, good_sources)
good_geometries, bad_geometries = load_geometries(sources_dir, good_sources, area)
geodata = join(dirname(__file__), 'geodata')
if area in (WORLD, EUROPE):
landarea_ds = ogr.Open(join(geodata, 'ne_50m_admin_0_countries-54029.shp'))
coastline_ds = ogr.Open(join(geodata, 'ne_50m_coastline-54029.shp'))
lakes_ds = ogr.Open(join(geodata, 'ne_50m_lakes-54029.shp'))
countries_ds = ogr.Open(join(geodata, 'ne_50m_admin_0_countries-54029.shp'))
countries_borders_ds = ogr.Open(join(geodata, 'ne_50m_admin_0_boundary_lines_land-54029.shp'))
admin1s_ds = ogr.Open(join(geodata, 'ne_10m_admin_1_states_provinces-54029.shp'))
us_state_ds = ogr.Open(join(geodata, 'cb_2013_us_state_20m-54029.shp'))
us_county_ds = ogr.Open(join(geodata, 'cb_2013_us_county_20m-54029.shp'))
elif area == USA:
landarea_ds = ogr.Open(join(geodata, 'cb_2013_us_nation_20m-2163.shp'))
coastline_ds = ogr.Open(join(geodata, 'cb_2013_us_nation_20m-2163.shp'))
lakes_ds = None
countries_ds = None
countries_borders_ds = None
admin1s_ds = None
us_state_ds = ogr.Open(join(geodata, 'cb_2013_us_state_20m-2163.shp'))
us_county_ds = ogr.Open(join(geodata, 'cb_2013_us_county_20m-2163.shp'))
else:
raise RuntimeError('Unknown area "{}"'.format(area))
# Pick out features
landarea_features = first_layer_list(landarea_ds)
coastline_features = first_layer_list(coastline_ds)
lakes_features = [f for f in first_layer_list(lakes_ds) if f.GetField('scalerank') == 0]
countries_features = first_layer_list(countries_ds)
countries_borders_features = first_layer_list(countries_borders_ds)
admin1s_features = first_layer_list(admin1s_ds)
us_state_features = first_layer_list(us_state_ds)
us_county_features = first_layer_list(us_county_ds)
# Assign features to good or bad lists
good_data_states = [f for f in us_state_features if f.GetFieldAsString('GEOID') in good_geoids]
good_data_counties = [f for f in us_county_features if f.GetFieldAsString('GEOID') in good_geoids]
bad_data_states = [f for f in us_state_features if f.GetFieldAsString('GEOID') in bad_geoids]
bad_data_counties = [f for f in us_county_features if f.GetFieldAsString('GEOID') in bad_geoids]
good_data_countries = [f for f in countries_features if f.GetFieldAsString('iso_a2') in good_iso3166s]
good_data_admin1s = [f for f in admin1s_features if f.GetFieldAsString('iso_3166_2') in good_iso3166s]
bad_data_countries = [f for f in countries_features if f.GetFieldAsString('iso_a2') in bad_iso3166s]
bad_data_admin1s = [f for f in admin1s_features if f.GetFieldAsString('iso_3166_2') in bad_iso3166s]
# Draw each border between neighboring states exactly once.
state_borders = [s1.GetGeometryRef().Intersection(s2.GetGeometryRef())
for (s1, s2) in combinations(us_state_features, 2)
if s1.GetGeometryRef().Intersects(s2.GetGeometryRef())]
# Set up some colors
silver = 0xdd/0xff, 0xdd/0xff, 0xdd/0xff
white = 0xff/0xff, 0xff/0xff, 0xff/0xff
black = 0, 0, 0
light_red = 244/0xff, 109/0xff, 67/0xff
dark_red = 215/0xff, 48/0xff, 39/0xff
light_green = 0x74/0xff, 0xA5/0xff, 0x78/0xff
dark_green = 0x1C/0xff, 0x89/0xff, 0x3F/0xff
# Map units per reference pixel (http://www.w3.org/TR/css3-values/#reference-pixel)
muppx = resolution / scale
# Fill land area background
fill_features(context, landarea_features, muppx, silver)
# Fill populated countries
fill_features(context, bad_data_countries, muppx, light_red)
fill_features(context, good_data_countries, muppx, light_green)
# Fill Admin-1 (ISO-3166-2) subdivisions
fill_features(context, bad_data_admin1s, muppx, light_red)
fill_features(context, good_data_admin1s, muppx, light_green)
# Fill populated U.S. states
fill_features(context, bad_data_states, muppx, light_red)
fill_features(context, good_data_states, muppx, light_green)
# Fill populated U.S. counties
fill_features(context, bad_data_counties, muppx, dark_red)
fill_features(context, good_data_counties, muppx, dark_green)
# Fill other given geometries
fill_geometries(context, bad_geometries, muppx, dark_red)
fill_geometries(context, good_geometries, muppx, dark_green)
# Outline countries and boundaries, fill lakes
context.set_source_rgb(*black)
context.set_line_width(.25 * muppx)
stroke_geometries(context, state_borders)
stroke_features(context, countries_borders_features)
fill_features(context, lakes_features, muppx, white)
context.set_source_rgb(*black)
context.set_line_width(.5 * muppx)
stroke_features(context, coastline_features)
# Output
surface.write_to_png(filename)
if __name__ == '__main__':
exit(main())
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-04-03 11:49
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('fees', '0006_auto_20180403_0937'),
]
operations = [
migrations.AlterUniqueTogether(
name='transactionfeerange',
unique_together=set([('limit', 'value')]),
),
]
|
# Copyright (c) 2022 Dell Inc. or its subsidiaries.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from PyPowerFlex import exceptions
from PyPowerFlex.objects import utility
import tests
class TestPowerFlexUtility(tests.PyPowerFlexTestCase):
def setUp(self):
super(TestPowerFlexUtility, self).setUp()
self.client.initialize()
self.MOCK_RESPONSES = {
self.RESPONSE_MODE.Valid: {
'/types/StoragePool/instances/action/querySelectedStatistics':
{},
'/types/Volume/instances/action/querySelectedStatistics':
{},
}
}
def test_get_statistics_for_all_storagepools(self):
self.client.utility.get_statistics_for_all_storagepools()
def test_get_statistics_for_all_storagepools_bad_status(self):
with self.http_response_mode(self.RESPONSE_MODE.BadStatus):
self.assertRaises(exceptions.PowerFlexClientException,
self.client.utility.get_statistics_for_all_storagepools)
def test_get_statistics_for_all_volumes(self):
self.client.utility.get_statistics_for_all_volumes()
def test_get_statistics_for_all_volumes_bad_status(self):
with self.http_response_mode(self.RESPONSE_MODE.BadStatus):
self.assertRaises(exceptions.PowerFlexClientException,
self.client.utility.get_statistics_for_all_volumes)
|
# *********************************************************************************************
# Program to update dynamodb with latest data from mta feed. It also cleans up stale entried from db
# Usage python dynamodata.py
# *********************************************************************************************
from boto3.dynamodb.conditions import Key, Attr
from collections import OrderedDict
from threading import Thread
import time, sys, boto3
# local packages
sys.path.append('./utils')
from mta_updates import MTAUpdates
import aws as aws
### YOUR CODE HERE ####
DYNAMO_TABLE_NAME = 'mtaData'
# dynamodb = boto3.resource('dynamodb')
dynamodb = aws.getResource('dynamodb','us-east-1')
try:
table = dynamodb.create_table(
AttributeDefinitions=[
{
'AttributeName': 'trip_id',
'AttributeType': 'S'
}
],
TableName=DYNAMO_TABLE_NAME,
KeySchema=[
{
'AttributeName': 'trip_id',
'KeyType': 'HASH'
}
],
ProvisionedThroughput={
'ReadCapacityUnits': 5,
'WriteCapacityUnits': 5
}
)
table.meta.client.get_waiter('table_exists').wait(TableName='users')
print "New Table Created.\n"
except Exception as e:
table = dynamodb.Table('mtaData')
print "Table Already Exists.\n"
def data_purge(table):
while True:
time.sleep(60)
print("purging...")
expiretime = str(time.time() - 120)
response = table.scan(
ScanFilter={"timestamp": {
"AttributeValueList": [expiretime],
"ComparisonOperator": "LE"}
}
)
for item in response['Items']:
table.delete_item(
Key={'trip_id': item['trip_id']}
)
def data_update(table):
mta_updater = MTAUpdates()
mta_updater.update()
for key in mta_updater.trip_updates:
# print(mta_updater.trip_updates[key])
table.put_item(Item=mta_updater.trip_updates[key].to_json())
print('put!')
time.sleep(30)
t2 = Thread(name='datapurge', target=data_purge, args=(table,))
t2.setDaemon(True)
try:
t2.start()
while(True):
data_update(table)
except KeyboardInterrupt:
exit
|
"""
2015-2016 Constantine Belev const.belev@ya.ru
"""
import numpy as np
import scipy as sp
from scipy import sparse, optimize
from lowrank_matrix import ManifoldElement
from approximator_api import AbstractApproximator
from manifold_functions import TangentVector, svd_retraction
from manifold_functions import riemannian_grad_partial, delta_on_sigma_set
from scipy.sparse import linalg, csc_matrix
from scipy.optimize import minimize_scalar
EPS = 1e-9
class MGDApproximator(AbstractApproximator):
def __init__(self):
AbstractApproximator.__init__(self)
self.target_matrix = None
self.density = None
self.norm_bound = None
self.sigma_set = None
self.x = None
self.grad = None
self.v = None
self.mu = None
def approximate(self, a, r, sigma_set=None, x0=None, mu=0.85, maxiter=900, eps=EPS):
return self._approximate(a, r, sigma_set=sigma_set, x0=x0, mu=mu, maxiter=maxiter, eps=eps)
def _approximate(self, a, r, sigma_set=None, x0=None, mu=0.85, maxiter=900, eps=EPS):
if a is None:
raise ValueError("target matrix must be provided")
self.target_matrix = a
self.initialization(sigma_set)
self.mu = mu
for rank in range(1, r):
x0, it, err = self.mgd_approximate(r=rank, x0=x0,
maxiter=50, eps=eps)
return self.mgd_approximate(r=r, x0=x0, maxiter=maxiter, eps=eps)
def mgd_approximate(self, r, x0=None, maxiter=100, eps=1e-9):
self.init_condition(r, x0)
error_history = []
for it in range(maxiter):
self.step()
error_history.append(self.loss())
print('it: %s, error: %s' % (it, error_history[-1]))
if error_history[-1] < self.norm_bound * eps:
return self.x, it, error_history
return self.x, maxiter, error_history
def step(self):
self.mgd_grad()
self.mgd_step()
pass
def init_condition(self, r, x0):
if x0 is None:
x0 = ManifoldElement.rand(self.target_matrix.shape, r, norm=self.norm_bound)
self.x = ManifoldElement(x0, r)
self.grad = -TangentVector(self.x, riemannian_grad_partial(self.x, self.target_matrix,
self.sigma_set, manifold_elems=True))
self.v = TangentVector.zero(self.x)
return None
def mgd_grad(self):
riemannian_grad = riemannian_grad_partial(self.x, self.target_matrix,
self.sigma_set, manifold_elems=True)
self.grad = -TangentVector(self.x, riemannian_grad)
return None
def mgd_step(self):
self.v = self.mu * self.v.transport(self.x)
alpha = minimize_scalar(lambda x: self.cost_func(x), bounds=(0., 10.), method='bounded')['x']
if alpha is None:
alpha = 1.
self.v += (1. - self.mu) * alpha * self.grad
self.x = svd_retraction(self.x + self.v.release(), self.x.r)
# self.armijo_backtracking(lambda x: self.cost_raw(x), alpha)[0]
return None
def armijo_backtracking(self, func, alpha, maxiter=20):
"""
Returns step and next point, minimizing given functional
Parameters
----------
func : function
function to minimize
x : ManifoldElement
initial point
alpha : float
estimated line search parameter
direction : TangentVector
direction to move from initial point
conj_direction : TangentVector
conjugated direction
Returns
-------
x_new :
next point (x + step * direction)
step : float
optimal step
"""
scale = -0.0001 * alpha
for i in range(maxiter):
x_new = svd_retraction(self.x + (0.5 ** i * alpha) * self.grad.release(), self.x.r)
bound = (0.5 ** i * scale) * self.grad.release().scalar_product(self.conj.release())
if self.cost_raw(self.x) - self.cost_raw(x_new) >= bound:
return x_new, 0.5 ** i * scale
return x_new, 0.5 ** maxiter * scale
def cost_raw(self, elem):
"""
Compute function 0.5 *|| a[sigma] - elem[sigma] ||_F^2
Parameters
----------
a : np.ndarray or sp.sparse.spmatrix
matrix to approximate
elem : ManifoldElement
approximation
sigma_set : tuple of np.ndarrays
index set of x indices and y indices
Returns
-------
out: float
cost function
"""
return 0.5 * sp.sparse.linalg.norm(elem.evaluate(self.sigma_set) - self.target_matrix) ** 2
def cost_func(self, param):
retracted = svd_retraction(self.x + self.v.release() + param * self.grad.release(), self.x.r)
return self.cost_raw(retracted)
|
import datetime
import unittest
from time import sleep
from selenium import webdriver
class EnvironmentSetup(unittest.TestCase):
def setUp(self):
chrome_path = r"C:\Users\Idur\PycharmProjects\RaptAutomation\Drivers\chromedriver.exe"
#firefox_path = r"C:\Users\Idur\PycharmProjects\RaptAutomation\Drivers\geckodriver.exe"
# Create Chrome Browser
self.driver = webdriver.Chrome(executable_path=chrome_path)
#self.driver = webdriver.Firefox(executable_path=firefox_path)
self.driver.implicitly_wait(10)
def test_browser(self):
driver = self.driver
driver.get("http://crimsoninnovative.com")
#driver.get("http://google.com")
sleep(10)
def tearDown(self):
if (self.driver != None):
print("*******************")
print("Environment-1 Destroyed")
print("Run Completed at :" + str(datetime.datetime.now()))
# self.driver.close()
self.driver.quit()
|
from django.contrib import admin
from .models import *
admin.site.register(Tiers)
admin.site.register(CompteBancaire)
admin.site.register(Document)
admin.site.register(TypeDocument)
admin.site.register(ModePaiement)
admin.site.register(Paiement)
|
class Solution:
def generateMatrix(self, n):
"""
:type n: int
:rtype: List[List[int]]
"""
matrix = [[0 for _ in range(n)] for _ in range(n)]
up, left = 0, 0
right, bottom = n, n
i, j = 0, 0
count = 1
while True:
for j in range(left, right):
matrix[i][j] = count
count += 1
up += 1
if up >= bottom:
break
for i in range(up, bottom):
matrix[i][j] = count
count += 1
right -= 1
if left >= right:
break
for j in range(right - 1, left - 1, -1):
matrix[i][j] = count
count += 1
bottom -= 1
if up >= bottom:
break
for i in range(bottom - 1, up - 1, -1):
matrix[i][j] = count
count += 1
left += 1
if left >= right:
break
return matrix
print(Solution().generateMatrix(1)) |
def normalizer(data, mean, std):
"""
Normalize features by standard deviation
data is a ndarray
"""
return (data - mean) / std |
"""Script para cythonize todos los .pyx"""
import sys, os, shutil, contextlib#, argparse
from Cython.Build import Cythonize, cythonize
#TO DO: incluir command line options
@contextlib.contextmanager
def redirect_sys_argv(*argv):
"""contextmanager para cambiar sys.argv al argv dado"""
original = list(sys.argv)
sys.argv[:] = argv
try:
yield
finally:
sys.argv[:] = original
def get_pyx_files(path, abspath=True):
"""Regresa una lista con los archivos .pyx del path dado
Si abspath es true, la lista de archivos es el path absoluto
de lo contrario es solo el nombre de los mismos"""
path = os.path.normpath(os.path.abspath(path))
to_compile = []
for name in os.listdir(path):
if name.endswith(".pyx"):
pyx = os.path.join(path,name)
if os.path.isfile(pyx):
to_compile.append(pyx if abspath else name)
return to_compile
def compile_dir(path):
"""Funcion para cythonize todos los .pyx del path dado in-place"""
to_compile = get_pyx_files(path)
print("De:",path)
if to_compile:
print("Se compilaran:", list(map(os.path.basename,to_compile)))
Cythonize.main( ['-a', '-i'] + to_compile )
else:
print("Nada para compilar")
def compile_dir_with_numpy(path, cleanup=True):
"""Funcion para cythonize todos los .pyx del path dado in-place
incluyendo numpy"""
from distutils.core import setup
import numpy
path = os.path.normpath(os.path.abspath(path))
temp = os.path.join(path,".temp_build")
with redirect_sys_argv(os.path.join(path,"make_virtual_script.py"), "build_ext", "--inplace", "-t", temp):
setup(
ext_modules = cythonize("./*.pyx", annotate=True),
include_dirs=[numpy.get_include()]
)
if cleanup and os.path.exists(temp):
shutil.rmtree(temp)
if __name__ == "__main__":
if "with_numpy" in sys.argv[1:]:
compile_dir_with_numpy(os.getcwd())
else:
compile_dir(os.getcwd())
|
from geventwebsocket.handler import WebSocketHandler
from gevent.pywsgi import WSGIServer
import json
from flask import Flask, jsonify, request, abort, render_template, Response
import gevent
from redis import Redis
from index import JIRARedisIndex
from jira.client import JIRA
import module
app = Flask(__name__)
@module.loaded
def init():
module.g.client = JIRA(
options={
'server': module.opts['server']
},
oauth={
'access_token': module.opts['oauth_access_token'],
'access_token_secret': module.opts['oauth_access_token_secret'],
'consumer_key': module.opts['oauth_consumer_key'],
'key_cert': module.opts['oauth_key_cert'],
'resilient': True,
}
)
module.g.redis = Redis()
module.g.index = JIRARedisIndex(module.g.client, redis_client=module.g.redis)
module.g.http_server = WSGIServer((module.opts.get('listen_host', '0.0.0.0'),
int(module.opts.get('listen_port', '8085'))), app,
handler_class=WebSocketHandler)
module.g.http_server.start()
module.g.connections = Connection
@module.unloaded
def deinit():
module.g.redis.connection_pool.disconnect()
module.g.http_server.stop()
module.g.connections.stop_all()
@app.route('/search/key/<key>')
def search_key(key):
data_key = 'full' in request.args and module.g.index.data_key
try:
limit = max(1, min(50, int(request.args.get('limit', 50))))
except:
limit = 50
return jsonify(results=module.g.index.search_by_key(key.lower(), data_key, limit=limit))
@app.route('/search/summary')
def search():
query = request.args.get('q', '')
data_key = 'full' in request.args and module.g.index.data_key
response = jsonify(results=module.g.index.search(query, limit=50, data_key=data_key))
return response
@app.route('/issue/<key>')
def get_by(key):
data_key = 'full' in request.args and module.g.index.data_key
issue = module.g.index.get_by_key(key.upper(), data_key=data_key)
if not issue:
abort(404)
return jsonify(**issue)
class Connection(object):
clients = set()
def __init__(self, ws):
self.ws = ws
self.sent_ids = set()
self.last_requested_id = None
self.last_query = None
self.clients.add(self)
def stop(self):
self.clients.discard(self)
@classmethod
def stop_all(cls):
cls.clients.clear()
def send(self, **kwargs):
self.ws.send(json.dumps(kwargs))
def send_raw(self, raw):
self.ws.send(raw)
def did_send_key(self, key):
return key in self.sent_ids
def is_last_requested_key(self, key):
return key == self.last_requested_id
@classmethod
def publish_updated_issue(cls, raw, small):
key = raw['key']
payload = None
for client in cls.clients:
if client.did_send_key(key):
if payload is None:
payload = json.dumps(dict(
c='update',
i=small
))
client.send_raw(payload)
if client.is_last_requested_key(key):
client.send(
c='updateraw',
i=raw
)
def query(self, query, full, seq=None):
data_key = full and module.g.index.data_key or module.g.index.smalldata_key
ids = module.g.index.search(query, limit=50, data_key=data_key, return_id_list=True, autoboost=True)
r = [id for id in ids if id not in self.sent_ids]
datas = module.g.index._load_ids(r, None, data_key)
data_dict = {v['key']: v for v in datas}
r = []
for id in ids:
if id in self.sent_ids:
r.append(id)
elif id in data_dict:
r.append(data_dict[id])
self.sent_ids.add(id)
self.last_query = query
if seq is not None:
self.send(
r=r,
s=seq
)
else:
return r
@classmethod
def trigger_update(cls):
for client in cls.clients:
if not client.last_query:
continue
r = client.query(client.last_query, False)
client.send(
i=r,
q=client.last_query,
c='updatesearch'
)
@app.route('/ws')
def ws():
ws = request.environ.get('wsgi.websocket')
if not ws:
return
con = Connection(ws)
try:
while True:
message = ws.receive()
if not message:
break
data = json.loads(message)
cmd = data.get('c')
seq = data.get('s')
if cmd == 'query':
query = data.get('q')
full = data.get('full')
con.query(query, full, seq)
elif cmd == 'get':
key = data.get('key')
full = data.get('full')
data_key = full and module.g.index.data_key
issue = module.g.index.get_by_key(key.upper(), data_key=data_key)
con.last_requested_id = key.upper()
ws.send(json.dumps(dict(
r=issue,
s=seq
)))
finally:
con.stop()
return Response("")
@app.route("/")
def idx():
return render_template("index.html")
@app.route("/webhook", methods=["POST"])
def webhook():
data = json.loads(request.data)
key = data['issue']['key']
gevent.spawn(update_issue, key)
return Response("")
def update_issue(key):
issue = module.g.client.issue(key, expand="renderedFields")
module.g.index.index(issue)
module.g.index.boost(issue.key)
module.g.connections.publish_updated_issue(
raw=issue.raw,
small=module.g.index.jira_issue_to_smalldata(issue)
)
module.g.connections.trigger_update() |
from django.test import TestCase
# Create your tests here.
from django.test import TestCase
# Create your tests here.
import uuid
import pytest
from django.contrib.auth.models import User
from django.urls import reverse
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode
from .models import Heroes, Guide, Role, Profile, Runes, Neutrals
from .tokens import account_activation_token
@pytest.mark.django_db
def test_user_create():
User.objects.create_user('rocky', 'bumbini@gmail.com', 'password')
assert User.objects.count() == 1
@pytest.mark.django_db
def test_unauthorized(client):
response = client.get('/admin/')
assert response.status_code == 302
@pytest.mark.django_db
def test_superuser_view(admin_client):
response = admin_client.get('/admin/')
assert response.status_code == 200
@pytest.mark.django_db
def test_user_detail(client, django_user_model):
django_user_model.objects.create_user(username='someone', password='password')
client.login(username='someone', password='password')
url = reverse('profile')
response = client.get(url)
assert response.status_code == 200
@pytest.mark.django_db
@pytest.mark.parametrize("username, status_code", [
("someone", 200),
("someone2", 200)
])
def test_register(client, django_user_model, username, status_code):
django_user_model.objects.create_user(username=username, password='password')
client.login(username=username, password='password')
url = reverse('profile')
response = client.get(url)
assert response.status_code == 200
@pytest.mark.django_db
def test_guide_add(client, django_user_model):
django_user_model.objects.create_user(username='someone', password='password')
client.login(username='someone', password='password')
url = reverse('guide_new')
response = client.get(url)
assert response.status_code == 200
@pytest.mark.django_db
def test_home_view(client):
response = client.get('/')
assert response.status_code == 200
@pytest.mark.django_db
def test_5(client):
response = client.get('/fivexfive/')
assert response.status_code == 200
@pytest.mark.django_db
def test_4(client):
response = client.get('/')
assert response.status_code == 200
@pytest.mark.django_db
def test_3(client):
response = client.get('/fivexfive/')
assert response.status_code == 200
@pytest.mark.django_db
def test_0(client):
response = client.get('/register/')
assert response.status_code == 200
@pytest.mark.django_db
def test_10(client):
response = client.get('/logout/')
assert response.status_code == 200
@pytest.mark.django_db
def test_1(client):
response = client.get('/login/')
assert response.status_code == 200
@pytest.mark.django_db
def test_contact(client):
response = client.get('/contact/')
assert response.status_code == 200
@pytest.mark.django_db
def test_2(client):
response = client.get('/login/')
assert response.status_code == 200
@pytest.mark.django_db
def test_4(client):
response = client.get('/articles/')
assert response.status_code == 200
@pytest.mark.django_db
def test_5(client):
response = client.get('/guides/')
assert response.status_code == 200
@pytest.fixture
def test_password():
return 'strong-test-pass'
@pytest.fixture
def create_user(db, django_user_model, test_password):
def make_user(**kwargs):
kwargs['password'] = test_password
if 'username' not in kwargs:
kwargs['username'] = str(uuid.uuid4())
return django_user_model.objects.create_user(**kwargs)
return make_user
@pytest.fixture
def auto_login_user(db, client, create_user, test_password):
def make_auto_login(user=None):
if user is None:
user = create_user()
client.login(username=user.username, password=test_password)
return client, user
return make_auto_login
@pytest.mark.django_db
def test_auth_view(auto_login_user):
client, user = auto_login_user()
response = client.get('/register/')
assert response.status_code == 200
@pytest.mark.django_db
def test_guideop_view(auto_login_user):
client, user = auto_login_user()
response = client.get(
'/guides/cur_guide/?name=Terrorblade&role=Mid(2)&build_runes=bounty&build_opa=invis&build_jopa=arcane&build_skill=313134311224224&build_sum=1&build_sum2=17&build_sum3=31&build_sum4=39&build_sum5=45&build_items=refr&build_items2=mek&build_items3=guard&build_items4=refr&build_items5=guard')
assert response.status_code == 200
@pytest.mark.django_db
def test_activate_view(auto_login_user):
client, user = auto_login_user()
uid = urlsafe_base64_encode(force_bytes(user.pk))
token = account_activation_token.make_token(user)
url = reverse('activate', args=[uid, token])
response = client.get(url)
assert response.status_code == 200
|
import numpy as np
class Estimation:
def __init__(self, datax, datay, dataz):
self.x = datax
self.y = datay
self.v = dataz
def estimate(self, x, y, sigma=0.0, p=-2):
"""
Estimate point at coordinate x,y based on the input data for this
class.
"""
d = np.sqrt((x - self.x) ** 2 + (y - self.y) ** 2) ** (1 + sigma)
if d.min() > 0:
return np.sum(self.v * d ** p / np.sum(d ** p))
else:
return self.v[d.argmin()]
def idw_mesh(points, n_points=50, sigma=0.6, map_points=True, xmin=0, xmax=2, ymin=0, ymax=2):
"""
Generate mesh of interpolated height map using Inverse Distance Weighting
:param points: map points to interpolate
:param n_points: number of points per axis. 50 will give you 50x50 grid
:param sigma: smoothing parameter
Euclidean distance d(p1, p2) ^ (1 + sigma)
:param map_points: Append map points if True
:param xmin:
:param xmax:
:param ymin:
:param ymax:
:return: Array of interpolated points
"""
e = Estimation(points[:, 0], points[:, 1], points[:, 2])
x = np.linspace(xmin, xmax, n_points)
y = np.linspace(ymin, ymax, n_points)
res = np.zeros((n_points ** 2, 3))
for i in range(n_points):
for j in range(n_points):
res[i * n_points + j, :] = x[i], y[j], e.estimate(x[i], y[j], sigma=sigma)
if map_points:
return np.vstack((res, points))
return res
|
DEBUG = True
import os
USERID = 0
# session
SECRET_KEY = os.urandom(24)
# 数据库 初始化提交
HOSTNAME = None
PORT = '3306'
DATABASE = 'alter'
USERNAME = None
PASSWORD = None
SQLALCHEMY_POOL_RECYCLE = 20
SQLALCHEMY_POOL_SIZE = 100
SQLALCHEMY_TRACK_MODIFICATIONS = True
DEBUG = True
# SQLALCHEMY_ECHO = True
DB_URI = 'mysql+pymysql://{}:{}@{}:{}/{}'.format(USERNAME, PASSWORD, HOSTNAME, PORT, DATABASE)
SQLALCHEMY_DATABASE_URI = DB_URI
SQLALCHEMY_TRACK_MODIFICATIONS = False
# user_id
MONGO_DBNAME = 'alter'
MONGO_URI = None # mongo连接 初始化提交
URL_PATH = "E:/"
|
from model.models import Contact
import random
import string
import os.path
import jsonpickle
import getopt
import sys
try:
opts, args = getopt.getopt(sys.argv[1:], "n:f:", ["Number_of_contacts", "file"])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 2
f = "data/contacts.json"
for o, a in opts:
if o == "-n":
n = int(a)
elif o == "-f":
f = a
def random_phone_number():
phone_list = ['914', '983', '902', '999', '951', '924', '953']
return "+" + random.choice(phone_list) + "".join([random.choice(string.digits) for i in range(7)])
def random_email(maxlen):
email = string.ascii_letters + " "
email_list = ["quality-lab.ru", "@google.com", "@yandex.ru", "@rambler.ru", "@mail.ru"]
return "".join([random.choice(email) for i in range(random.randrange(maxlen))]) + random.choice(email_list)
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits + string.punctuation + ""*10
return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
testdata = [Contact(firstname=random_string("firstname", 20), middlename=random_string("middlename", 20),
lastname=random_string("lastname", 20), company=random_string("company", 10), addreswork=random_string("addreswork", 20),
homephone=random_phone_number(), mobilephone=random_phone_number(),
workphone=random_phone_number(), email=random_email(10), email2=random_email(10),
email3=random_email(8), bday="30", bmonth="August", byear="1994", address=random_string("address", 10), address2="vampilov",
secondaryphone=random_phone_number(), notes=random_string("notes", 50))
for i in range(5)]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
with open(file, "w") as out:
jsonpickle.set_encoder_options("json", indent=2)
out.write(jsonpickle.encode(testdata)) |
from turtle import *
bgcolor("#16F4DC")
bgpic("Dratini.gif")
pencolor("#0624F8")
for number in range (4):
forward(100)
rt(90)
input()
|
from django.shortcuts import render
from django.http import HttpResponse
from .models import Job
from .models import Email
from django.db.models import Q
from django.core.paginator import Paginator
from django.shortcuts import redirect
# Create your views here.
def index(request):
job=Job.objects.all()[:10]
paginator=Paginator(job,4)
page=request.GET.get('page')
#?page=2
job=paginator.get_page(page)
#return HttpResponse("hello")
context={
'job':job,
}
return render(request,"index.html",context)
def post(request):
#return HttpResponse("hello from signup")
return render(request,"post.html")
def summary(request,id):
job=Job.objects.get(id=id)
context={
'job':job,
}
return render(request,"summary.html",context)
def search(request):
template='index.html'
query=request.GET.get('q')
if query:
job=Job.objects.all()[:10]
results=Job.objects.filter(Q(title__icontains = query) | Q(location__icontains = query) | Q(company__icontains= query))
context={
'job':results,
}
return render(request,template,context)
else:
return HttpResponse("Please enter a search term")
def gpa(request):
template="index.html"
query="gpa"
if query:
job=Job.objects.all()[:10]
results=Job.objects.filter(summary__icontains = "gpa")
context={
'job':results,
}
return render(request,template,context)
else:
return HttpResponse("Please enter a search term")
def nogpa(request):
template="index.html"
query="gpa"
job=Job.objects.all()
results=Job.objects.exclude(summary__icontains="gpa")
context={
'job':results,
}
return render(request,template,context)
def contact(request):
template="contact.html"
return render(request,template)
def donate(request):
template="donate.html"
return render(request,template)
def addemail(request):
if(request.method == 'POST'):
email=request.POST['email']
emailobj=Email(email=email)
emailobj.save();
return redirect('/')
else:
return render(request,'index.html')
|
from .opencv_utils import show_image_and_wait_for_key, BrightnessProcessor, draw_segments
from .segmentation_aux import contained_segments_matrix, LineFinder, guess_segments_lines
from .processor import DisplayingProcessor, create_broadcast
import numpy
def create_default_filter_stack():
stack = [LargeFilter(), SmallFilter(), LargeAreaFilter(), ContainedFilter(), LineFinder(), NearLineFilter()]
stack[4].add_poshook(create_broadcast("lines_topmiddlebottoms", stack[5]))
return stack
class Filter(DisplayingProcessor):
"""A filter processes given segments, returning only the desirable ones"""
PARAMETERS = DisplayingProcessor.PARAMETERS
def display(self, display_before=False):
"""shows the effect of this filter"""
try:
copy = self.image.copy()
except AttributeError:
raise Exception("You need to set the Filter.image attribute for displaying")
copy = BrightnessProcessor(brightness=0.6).process(copy)
s, g = self._input, self.good_segments_indexes
draw_segments(copy, s[g], (0, 255, 0))
draw_segments(copy, s[True ^ g], (0, 0, 255))
show_image_and_wait_for_key(copy, "segments filtered by " + self.__class__.__name__)
def _good_segments(self, segments):
raise NotImplementedError
def _process(self, segments):
good = self._good_segments(segments)
self.good_segments_indexes = good
segments = segments[good]
if not len(segments):
raise Exception("0 segments after filter " + self.__class__.__name__)
return segments
class LargeFilter(Filter):
"""desirable segments are larger than some width or height"""
PARAMETERS = Filter.PARAMETERS + {"min_width": 4, "min_height": 8}
def _good_segments(self, segments):
good_width = segments[:, 2] >= self.min_width
good_height = segments[:, 3] >= self.min_height
return good_width * good_height # AND
class SmallFilter(Filter):
"""desirable segments are smaller than some width or height"""
PARAMETERS = Filter.PARAMETERS + {"max_width": 30, "max_height": 50}
def _good_segments(self, segments):
good_width = segments[:, 2] <= self.max_width
good_height = segments[:, 3] <= self.max_height
return good_width * good_height # AND
class LargeAreaFilter(Filter):
"""desirable segments' area is larger than some"""
PARAMETERS = Filter.PARAMETERS + {"min_area": 45}
def _good_segments(self, segments):
return (segments[:, 2] * segments[:, 3]) >= self.min_area
class ContainedFilter(Filter):
"""desirable segments are not contained by any other"""
def _good_segments(self, segments):
m = contained_segments_matrix(segments)
return (True ^ numpy.max(m, axis=1))
class NearLineFilter(Filter):
PARAMETERS = Filter.PARAMETERS + {"nearline_tolerance": 5.0} # percentage distance stddev
'''desirable segments have their y near a line'''
def _good_segments(self, segments):
lines = guess_segments_lines(segments, self.lines_topmiddlebottoms, nearline_tolerance=self.nearline_tolerance)
good = lines != -1
return good
|
import sys
import json
import time
import logging
import time
import random
from kafka import KafkaConsumer
from kafka import KafkaProducer
from kafka.errors import KafkaError
from opentracing.propagation import Format
from opentracing import child_of, follows_from
from jaeger_client import Config
from jaeger_client import constants as c
class CoreConnector():
tracer = None
def __init__(self, bootstrap_servers, topics=None, group_id=None):
print(f"Connecting Producer to {bootstrap_servers}")
self.producer = KafkaProducer(bootstrap_servers=bootstrap_servers,
value_serializer=lambda m: json.dumps(m).encode('ascii')
)
if topics is not None and group_id is not None:
print(f"Connecting Consumer to {bootstrap_servers}")
self.consumer = KafkaConsumer(*topics,
group_id=group_id,
#auto_offset_reset='earliest',
bootstrap_servers=bootstrap_servers,
value_deserializer=lambda m: json.loads(m.decode('ascii'))
)
logger = logging.getLogger('kafka')
logger.addHandler(logging.StreamHandler(sys.stdout))
logger.setLevel(logging.ERROR)
def publish_event(self, topic, event):
self.producer.send(topic, event)
def process_event(self, event, span_ctx):
return event
def start_consuming(self):
span_ctx = None
main_span = None
for event in self.consumer:
event = event.value
if self.tracer is not None:
event, span_ctx, main_span = self.extract_tracer(event)
self.process_event(event, span_ctx)
if main_span is not None:
main_span.finish()
def extract_tracer(self, event):
print(event)
carrier = {
"uber-trace-id": event['tracer_context']
}
span_ctx = None if event['tracer_context'] is None else self.tracer.extract(Format.TEXT_MAP, carrier)
main_span = None
if span_ctx is None:
main_span = self.tracer.start_span('workflow')
span_ctx = main_span
headers = {}
self.tracer.inject(span_ctx, Format.TEXT_MAP, headers)
event['tracer_context'] = headers['uber-trace-id']
event['time'] = time.time()
else:
comm_span = self.tracer.start_span(f'communication-layer-{event["name"]}', child_of=span_ctx, start_time=event['time'])
end = time.time()
comm_span.finish(end)
print(f"Added comlayer: start: {event['time']} end: {end}")
return event, span_ctx, main_span
def init_tracer(self, service, jaeger_reporting_host):
logging.getLogger('').handlers = []
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
config = Config(
config={
'sampler': {
'type': 'const',
'param': 1,
},
'local_agent': {
'reporting_host': jaeger_reporting_host,
'reporting_port': 5775,
},
'tags':{
c.JAEGER_IP_TAG_KEY: "127.0.0.1" # TODO: TempFix for ClockAdjustement issue of jaeger remove in the future!
},
'logging': True,
},
service_name=service
)
# this call also sets opentracing.tracer
self.tracer = config.initialize_tracer()
print("Init Tracer")
return self.tracer
def close_tracer(self):
self.tracer.close()
def SimulateRuntime(mean, sigma=0):
x = 0
while x <=0:
x = random.normalvariate(mean, sigma)
time.sleep(x)
return x |
"""
instabot example
Workflow:
Like last images with hashtags from file.
"""
import sys
import os
import time
import random
from tqdm import tqdm
sys.path.append(os.path.join(sys.path[0], '../'))
from instabot import Bot
if len(sys.argv) != 2:
print("USAGE: Pass a path to the file with hashtags."
" (one line - one hashtag)")
print("Example: python %s hashtags.txt" % sys.argv[0])
exit()
bot = Bot()
hashtags = bot.read_list_from_file(sys.argv[1])
bot.logger.info("Hashtags: " + str(hashtags))
if not hashtags:
exit()
bot.login()
for hashtag in hashtags:
bot.like_hashtag(hashtag)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.