content stringlengths 5 1.05M |
|---|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
see http://www.aharef.info/2006/05/websites_as_graphs.htm
############################################################################
TITLE : Webiste as Graph
PROJECT : www
ENGINEER : Jeremiah D. Powell
PROGRAM : aharef Website grapher using Tkinter libs
FILE : DocumentParser.py
CREATED : 26-APR-2011 JDPOWELL
DESCRIPTION : parse a webpage, display as a graph
ASSUMPTIONS : Familiarity with HTTP, Python, urllib2, BeautifulSoup
############################################################################
RELEASE LICENSE
This file is available under copyright 2011 Jeremiah D. Powell.
For internal use only,
keep out of young children.
Copyrights to their respective owners.
Current version : $Ver:0.2$
Latest version : 0.1
Bugs, Comments : waveclaw@waveclaw.net
############################################################################
RELEVANT DOCUMENTS
(REFERENCES)
Name Comment
------------------------------- -------------------------------------
############################################################################
REVISION HISTORY
Date Version(Build) SCM Engineer Comment/Description
DD-MMM-YYYY Rel.Patch.Pnt Reason
----------- -------------- -------- -------- -------------------------
26-APR-2011 0.1/0.1(1) WWW00661 jdpowell Copied from other scripts
23-SEP-2011 0.4/0.6 SHELL001 jdpowell Split out modules
############################################################################
"""
from pyaharef.view.tree import Tree
class Site(Tree):
"""
Implement drawing a Site
"""
#TODO: draw a site
|
from datetime import date
ano_atual = date.today().year
print('-+-' * 14)
print('{:^40}'.format('Exército do Python'))
print('Consulte a situação de teu alistamento!')
print('-+-' * 14)
ano_nasc = int(input('Ano de seu nascimento: '))
idade_usuario = ano_atual - ano_nasc
if idade_usuario > 18:
print('Você se alistou ou deveria ter se alistado a {} ano(s) atrás!'.format(idade_usuario - 18))
elif idade_usuario == 18:
print('Você deve se alistar este ano! Não atrase ou será multado.')
elif idade_usuario < 18:
print('Você deverá se alistar daqui {} ano(s)!'.format(18 - idade_usuario))
print('Fique atento ao calendário!')
print('Deixe seu feedback no nosso site!') |
import pytest
from pathlib import Path
@pytest.fixture(scope="session")
def datadir():
return Path(__file__).parent / "data"
|
from pytpp.attributes._helper import IterableMeta
from pytpp.attributes.service_module import ServiceModuleAttributes
class CertificatePreEnrollmentAttributes(ServiceModuleAttributes, metaclass=IterableMeta):
__config_class__ = "Certificate Pre-Enrollment"
|
import os
import sys
from leapp.snactor.utils import find_project_basedir, make_class_name, make_name, requires_project
from leapp.utils.clicmd import command, command_arg, UsageError
@command('new-actor', help='Creates a new actor')
@command_arg('actor-name')
@requires_project
def cli(args):
actor_name = args.actor_name
basedir = find_project_basedir('.')
actors_dir = os.path.join(basedir, 'actors')
if not os.path.isdir(actors_dir):
os.mkdir(actors_dir)
actor_dir = os.path.join(actors_dir, actor_name.lower())
if not os.path.isdir(actor_dir):
os.mkdir(actor_dir)
actor_test_dir = os.path.join(actor_dir, 'tests')
if not os.path.isdir(actor_test_dir):
os.mkdir(actor_test_dir)
actor_path = os.path.join(actor_dir, 'actor.py')
if os.path.exists(actor_path):
raise UsageError("File already exists: {}".format(actor_path))
with open(actor_path, 'w') as f:
f.write('''from leapp.actors import Actor
class {actor_class}(Actor):
name = '{actor_name}'
description = 'For the actor {actor_name} has been no description provided.'
consumes = ()
produces = ()
tags = ()
def process(self):
pass
'''.format(actor_class=make_class_name(actor_name), actor_name=make_name(actor_name)))
sys.stdout.write("New actor {} has been created at {}\n".format(make_class_name(actor_name),
os.path.realpath(actor_path)))
|
import subprocess
import numpy as np
if __name__ == "__main__":
# thresh_l, thresh_r = (-3, -1)
# bins_thresh = (thresh_r - thresh_l) * 2 + 1
# bins_thresh = 10
# thresh_space = np.logspace(thresh_l, thresh_r, bins_thresh)
# lasso = 1e-5
# group_lasso = 1e-4
# for i in range(bins_thresh):
# threshold = thresh_space[i]
# cmd = f'python train.py --lasso {lasso} --grouplasso {group_lasso} --threshold {threshold} --sparse_nodes False'
# try:
# print(subprocess.check_output([cmd], shell=True))
# except subprocess.CalledProcessError as err:
# print(err)
lasso = 1e-5
group_lasso = 1e-4
threshold = 1e-2
repit = 3
for i in range(repit):
cmd = f'python train.py --lasso {lasso} --grouplasso {group_lasso} --threshold {threshold} --sparse_nodes False'
try:
print(subprocess.check_output([cmd], shell=True))
except subprocess.CalledProcessError as err:
print(err) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 11 02:01:42 2021
@author: zhao
"""
from typing import List, Union, Iterable
from itertools import zip_longest
import sacrebleu
from moverscore_v2 import word_mover_score
from collections import defaultdict
import numpy as np
def sentence_score(hypothesis: str, references: List[str], trace=0):
idf_dict_hyp = defaultdict(lambda: 1.)
idf_dict_ref = defaultdict(lambda: 1.)
hypothesis = [hypothesis] * len(references)
sentence_score = 0
scores = word_mover_score(references, hypothesis, idf_dict_ref, idf_dict_hyp, stop_words=[], n_gram=1, remove_subwords=False)
sentence_score = np.mean(scores)
if trace > 0:
print(hypothesis, references, sentence_score)
return sentence_score
def corpus_score(sys_stream: List[str],
ref_streams:Union[str, List[Iterable[str]]], trace=0):
if isinstance(sys_stream, str):
sys_stream = [sys_stream]
if isinstance(ref_streams, str):
ref_streams = [[ref_streams]]
fhs = [sys_stream] + ref_streams
corpus_score = 0
for lines in zip_longest(*fhs):
if None in lines:
raise EOFError("Source and reference streams have different lengths!")
hypo, *refs = lines
corpus_score += sentence_score(hypo, refs, trace=0)
corpus_score /= len(sys_stream)
return corpus_score
def test_corpus_score():
refs = [['The dog bit the man.', 'It was not unexpected.', 'The man bit him first.'],
['The dog had bit the man.', 'No one was surprised.', 'The man had bitten the dog.']]
sys = ['The dog bit the man.', "It wasn't surprising.", 'The man had just bitten him.']
bleu = sacrebleu.corpus_bleu(sys, refs)
mover = corpus_score(sys, refs)
print(bleu.score)
print(mover)
def test_sentence_score():
refs = ['The dog bit the man.', 'The dog had bit the man.']
sys = 'The dog bit the man.'
bleu = sacrebleu.sentence_bleu(sys, refs)
mover = sentence_score(sys, refs)
print(bleu.score)
print(mover)
if __name__ == '__main__':
test_sentence_score()
test_corpus_score()
|
import numpy as np
import pygame as pg
from numba import njit
def main():
#new map
mr, mg, mb, maph, mapr, exitx, exity, mapt, maps, posx, posy, posz, size, rot, rot_v = new_map()
#new game
running, pause, fps_lock, score, maxscore = 1, 1, 60, 0, 0
timer, autores, checker , move = 0, 1, 1, 0
width, height, mod, rr, gg, bb, count = adjust_resol(300)
minimap, mplayer = np.zeros((size, size, 3)), np.zeros([size, size])
endmsg = ' Numba compiling, please wait... '
rr, gg, bb = np.linspace(0,0.8, width*height), np.linspace(0.5,.1, width*height), np.linspace(1,0.1, width*height)
drawing(rr, gg, bb, height, width, 1, endmsg, 0, 10, minimap, score, False)
pg.time.wait(200)
clock = pg.time.Clock()
pg.mouse.set_visible(False)
pg.mixer.set_num_channels(4)# 0 ambient, 1 - run kill respawn 2 - shoot player 3 - shoot enemy
ambient, runfx, shotfx, killfx, respawnfx, successfx, failfx, fr, fg, fb = sfx()
pg.mixer.Channel(0).play(ambient, -1)
endmsg = " Numba may need more compiling... "
pg.mixer.Channel(1).play(respawnfx)
ticks = pg.time.get_ticks()/100000
lx, ly, lz = size/2 + 1500*np.cos(ticks), size/2 + 1000*np.sin(ticks), 1000
enx, eny, seenx, seeny, lock, run, shoot, sx, sy, sstart, et, count, health, sdir, sdir2, shoot2, sx2, sy2, sstart2, won, et, run, respawn = \
new_game(width, height, mod, move, posx, posy, .99, rot, rot_v, mr, mg, mb, lx, ly, lz, maph, exitx, exity, mapr, mapt, maps,
rr, gg, bb, 0, 0, 0, 0, 0, 0, size, checker, 0, fb, fg, fr, pause, endmsg, 0, 10, minimap, score, -.5/61)
while running:
for event in pg.event.get():
if event.type == pg.QUIT or (event.type == pg.KEYDOWN and event.key == pg.K_ESCAPE):
if not pause:
pause = 1
pg.mixer.Channel(1).play(respawnfx)
endmsg = " Game paused. Current score: " + str(score) + ' '
else:
endmsg = " Thanks for playing! Max score: " + str(maxscore) + ' '
pg.mixer.Channel(1).play(killfx)
running = False
if sstart == None and(event.type == pg.MOUSEBUTTONDOWN or event.type == pg.MOUSEBUTTONUP):
shoot = 1
if event.type == pg.KEYDOWN:
if event.key == ord('p') or (pause and event.key == pg.K_SPACE): # pause
if not pause:
pause = 1
pg.mixer.Channel(1).play(respawnfx)
endmsg = " Game paused. Current score: " + str(score)
elif (int(posx) != exitx or int(posy) != exity):
if health == 0:
health = 5
pause = 0
pg.mixer.Channel(1).play(respawnfx)
if pause and event.key == ord('n'): # new game
pause = 0
mr, mg, mb, maph, mapr, exitx, exity, mapt, maps, posx, posy, posz, size, rot, rot_v = new_map()
enx, eny, seenx, seeny, lock, run, shoot, sx, sy, sstart, et, count, health, sdir, sdir2, shoot2, sx2, sy2, sstart2, won, et, run, respawn = \
new_game(width, height, mod, move, posx, posy, .99, rot, rot_v, mr, mg, mb, lx, ly, lz, maph, exitx, exity, mapr, mapt, maps,
rr, gg, bb, enx, eny, sx, sy, sx2, sy2, size, checker, count, fb, fg, fr, pause, endmsg, won, health, minimap, score, -.5/61)
mplayer, minimap = np.zeros([size, size]), np.zeros((size, size, 3))
pg.mixer.Channel(1).play(respawnfx)
if event.key == ord('t'): # toggle auto resolution
autores = not(autores)
if event.key == ord('r'): # toggle checkerboard rendering
checker = not(checker)
if event.key == ord('f'): # toggle fullscreen
pg.display.toggle_fullscreen()
if event.key == ord('q'): # change resolution or fps
if autores:
fps_lock = max(20, fps_lock - 10)
else:
if width > 100 :
width, height, mod, rr, gg, bb, count = adjust_resol(int(width*0.8))
if event.key == ord('e'): # change resolution or fps
if autores:
fps_lock = min(120, fps_lock + 10)
else:
width, height, mod, rr, gg, bb, count = adjust_resol(int(width*1.1))
if pause:
clock.tick(30)
drawing(gg, gg, gg, height, width, pause, endmsg, won, health, minimap, score)
else:
enx, eny, mplayer, et, shoot, sx, sy, sdir, shoot2, sx2, sy2, sdir2, seenx, seeny, lock = agents(enx, eny, maph, posx, posy, rot, et, shoot,
sx, sy, sdir, shoot2, sx2, sy2, sdir2, mplayer,
seenx, seeny, lock, size)
## print( ' agents ok ')
ticks = pg.time.get_ticks()/100000
lx, ly, lz = size/2 + 1500*np.cos(ticks), size/2 + 1000*np.sin(ticks), 1000
rr, gg, bb = super_fast(width, height, mod, move, posx, posy, posz, rot, rot_v, mr, mg, mb, lx, ly, lz,
mplayer, exitx, exity, mapr, mapt, maps, rr, gg, bb, enx, eny, sx, sy, sx2, sy2,
size, checker, count,fb, fg, fr)
## print(' render ok')
count += 1
if enx != 0 and lock:
endmsg = 'Pytracing Maze - Watch out! Score:'
else:
endmsg = 'Pytracing Maze - Find the exit! Score:'
endmsg = endmsg + str(score)+' Res: '+ str(width) +'x' + str(height) + ' FPS: '+ str(int(clock.get_fps()))
minimap[int(posy)][int(posx)] = (50, 50, 255)
drawing(rr, gg, bb, height, width, pause, endmsg, won, health, minimap, score)
minimap[int(posy)][int(posx)] = (100, 100, 0)
fpss = int(1000/(pg.time.get_ticks() - ticks*100000 +1e-16))
if autores and count > 10: #auto adjust render resolution
if fpss < fps_lock - 10 and width > 100:
width, height, mod, rr, gg, bb, count = adjust_resol(int(width*0.8))
elif fpss > fps_lock + 15:
width, height, mod, rr, gg, bb, count = adjust_resol(int(width*1.1))
if (int(posx) == exitx and int(posy) == exity):
endmsg, won = " You escaped safely! ", 1
pg.mixer.Channel(1).play(successfx)
animate(width, height, mod, move, posx, posy, .5, rot, rot_v, mr, mg, mb, lx, ly, lz,
mplayer, exitx, exity, mapr, mapt, maps, rr, gg, bb, enx, eny, sx, sy, sx2, sy2,
size, checker, count,fb, fg, fr, pause, endmsg, won, health, minimap, score, .5/61)
pause = 1
score += 1
maxscore = max(score, maxscore)
et = min(clock.tick()/500, 0.1)*(0.8+move)
if shoot or sstart != None:
if sstart == None:
pg.mixer.Channel(2).play(shotfx)
if fpss < fps_lock and autores:
width, height, mod, rr, gg, bb, count = adjust_resol(int(width*0.8))
sstart = pg.time.get_ticks()
elif pg.time.get_ticks() - sstart > 500:
shoot, sx, sy, sstart = 0, -1, -1, None
if enx == 0:
if not respawn:
if shoot:
health = min(health+2, 20)
shoot2, sx2, sy2 = 0, -1, -1
pg.mixer.Channel(1).play(killfx)
run = 1
respawn = 1
else:
if respawn:
respawn = 0
pg.mixer.Channel(1).play(respawnfx)
if shoot2 or sstart2 != None:
if run:
run = 0
pg.mixer.Channel(1).play(runfx)
if sstart2 == None:
pg.mixer.Channel(3).play(shotfx)
sstart2 = pg.time.get_ticks()
elif pg.time.get_ticks() - sstart2 > 500:
shoot2, sx2, sy2, sstart2 = 0, -1, -1, None
if (sx2 - posx)**2 + (sy2 - posy)**2 < 0.01:
health -= 1 + score/2
if health <= 0:
won, pause, health = -1, 1, 0
if score > 0:
score -= 1
endmsg = " You died! Current score: " + str(score) + ' '
pg.mixer.Channel(1).play(failfx)
animate(width, height, mod, move, posx, posy, .5, rot, rot_v, mr, mg, mb, lx, ly, lz,
mplayer, exitx, exity, mapr, mapt, maps, rr, gg, bb, enx, eny, sx, sy, sx2, sy2,
size, checker, count,fb, fg, fr, pause, endmsg, won, health, minimap, score, -.5/61)
pg.time.wait(500)
enx, eny, seenx, seeny, lock, won = 0, 0, 0, 0, 0, 0
dtp = (enx-posx)**2 + (eny-posy)**2
if dtp > 150 and not shoot2:
run = 1
if dtp > 300:
enx, eny, seenx, seeny, lock, run = 0, 0, 0, 0, 0, 0
posx, posy, rot, rot_v, shoot, move = movement(pg.key.get_pressed(),posx, posy, rot, rot_v, maph, et, shoot, sstart, move)
pg.mouse.set_pos([640, 360])
mplayer = np.zeros([size, size])
pg.display.update()
pg.mixer.fadeout(600)
print(endmsg)
if health > 0 and (int(posx) != exitx or int(posy) != exity):
animate(width, height, mod, move, posx, posy, .5, rot, rot_v, mr, mg, mb, lx, ly, lz,
maph, exitx, exity, mapr, mapt, maps, rr, gg, bb, enx, eny, sx, sy, sx2, sy2,
size, checker, count,fb, fg, fr, pause, endmsg, won, health, minimap, score, +.5/61)
else:
pg.time.wait(500)
pg.quit()
def new_map():
size = np.random.randint(30,80) # size of the map
posx, posy, posz = np.random.randint(1, size -2)+0.5, np.random.randint(1, size -2)+0.5, 0.5
x, y = int(posx), int(posy)
rot, rot_v = (np.pi/4, 0)
mr = np.random.uniform(0,1, (size,size))
mg = np.random.uniform(0,1, (size,size))
mb = np.random.uniform(0,1, (size,size))
mapr = np.random.choice([0, 0, 0, 0, 1], (size,size))
maps = np.random.choice([0, 0, 0, 0, 1], (size,size))
mapt = np.random.choice([0, 0, 0, 1, 2, 3], (size,size))
maptemp = np.random.choice([0,0, 1], (size,size))
maph = np.random.uniform(0.25, 0.99, (size,size))
maph[np.where(maptemp == 0)] = 0
maph[0,:], maph[size-1,:], maph[:,0], maph[:,size-1] = (1,1,1,1)
maps[0,:], maps[size-1,:], maps[:,0], maps[:,size-1] = (0,0,0,0)
maph[x][y], mapr[x][y] = (0, 0)
count = 0
while 1:
testx, testy = (x, y)
if np.random.uniform() > 0.5:
testx = testx + np.random.choice([-1, 1])
else:
testy = testy + np.random.choice([-1, 1])
if testx > 0 and testx < size -1 and testy > 0 and testy < size -1:
if maph[testx][testy] == 0 or count > 5:
count = 0
x, y = (testx, testy)
maph[x][y], mapr[x][y] = (0, 0)
dtx = np.sqrt((x-posx)**2 + (y-posy)**2)
if (dtx > size*.6 and np.random.uniform() > .99) or np.random.uniform() > .99999:
exitx, exity = (x, y)
break
else:
count = count+1
return mr, mg, mb, maph, mapr, exitx, exity, mapt, maps, posx, posy, posz, size, rot, rot_v
def new_game(width, height, mod, move, posx, posy, posz, rot, rot_v, mr, mg, mb, lx, ly, lz, maph, exitx, exity, mapr, mapt, maps,
rr, gg, bb, enx, eny, sx, sy, sx2, sy2, size, checker, count, fb, fg, fr, pause, endmsg, won, health, minimap, score, ani):
animate(width, height, mod, move, posx, posy, posz, rot, rot_v, mr, mg, mb, lx, ly, lz, maph, exitx, exity, mapr, mapt, maps,
rr, gg, bb, enx, eny, sx, sy, sx2, sy2, size, checker, count, fb, fg, fr, pause, endmsg, won, health, minimap, score, ani)
#enx, eny, seenx, seeny, lock, run, shoot, sx, sy, sstart, et, count, health, sdir, sdir2, shoot2, sx2, sy2, sstart2, won, et, run, respawn
return 0, 0, 0, 0, 0, 1, 0, -1, -1, None, 0.1, 0, 10, 0, 0, 0, -1, -1, None, 0, 0.1, 1, 1
def movement(pressed_keys,posx, posy, rot, rot_v, maph, et, shoot, sstart, move):
x, y = (posx, posy)
p_mouse = pg.mouse.get_pos()
rot, rot_v = rot - np.clip((p_mouse[0]-640)/200, -0.2, .2), rot_v -(p_mouse[1]-360)/400
rot_v = np.clip(rot_v, -1, 1)
diag = 0
if pressed_keys[pg.K_UP] or pressed_keys[ord('w')]:
diag = 0.5
x, y, move, diag = x + et*np.cos(rot), y + et*np.sin(rot), move + et/4, 1
elif pressed_keys[pg.K_DOWN] or pressed_keys[ord('s')]:
x, y, move, diag = x - et*np.cos(rot), y - et*np.sin(rot), move - et/2, 1
if pressed_keys[pg.K_LEFT] or pressed_keys[ord('a')]:
et = et/(diag+1)
x, y, move = x - et*np.sin(rot), y + et*np.cos(rot), move - et/2
elif pressed_keys[pg.K_RIGHT] or pressed_keys[ord('d')]:
et = et/(diag+1)
x, y, move = x + et*np.sin(rot), y - et*np.cos(rot), move - et/2
if x == posx and y == posy:
move = move - et/2
if maph[int(x-0.05)][int(y)] == 0 and maph[int(x+0.05)][int(y)] == 0 and maph[int(x)][int(y+0.05)] == 0:
posx, posy = x, y
elif maph[int(posx-0.05)][int(y)] == 0 and maph[int(posx+0.05)][int(y)] == 0 and maph[int(posx)][int(y+0.05)] == 0:
posy = y
elif maph[int(x-0.05)][int(posy)] == 0 and maph[int(x+0.05)][int(posy)] == 0 and maph[int(x)][int(posy+0.05)] == 0:
posx = x
else:
move = move - et/2
if not shoot and sstart == None and pressed_keys[pg.K_SPACE]:
shoot = 1
move = np.clip(move, 0, 0.3)
return posx, posy, rot, rot_v, shoot, move
@njit(cache=True)
def lodev(x, y, z, cos, sin, sinz, maph, size):
norm = np.sqrt(cos**2 + sin**2 + sinz**2)
rayDirX, rayDirY, rayDirZ = cos/norm + 1e-16, sin/norm + 1e-16, sinz/norm + 1e-16
mapX, mapY = int(x), int(y)
deltaDistX, deltaDistY, deltaDistZ= abs(1/rayDirX), abs(1/rayDirY), abs(1/rayDirZ)
if (rayDirX < 0):
stepX, sideDistX = -1, (x - mapX) * deltaDistX
else:
stepX, sideDistX = 1, (mapX + 1.0 - x) * deltaDistX
if (rayDirY < 0):
stepY, sideDistY = -1, (y - mapY) * deltaDistY
else:
stepY, sideDistY = 1, (mapY + 1 - y) * deltaDistY
if (rayDirZ < 0):
sideDistZ = z*deltaDistZ;
else:
sideDistZ = (1-z)*deltaDistZ
while (1):
if (sideDistX < sideDistY):
sideDistX += deltaDistX; mapX += stepX
dist = sideDistX; side = 0
if mapX < 2 or mapX > size-2:
break
else:
sideDistY += deltaDistY; mapY += stepY
dist = sideDistY; side = 1
if mapY < 2 or mapY > size-2:
break
if (maph[mapX][mapY] != 0):
break
if (side):
dist = dist - deltaDistY
else:
dist = dist - deltaDistX
if (dist > sideDistZ):
dist = sideDistZ
x = x + rayDirX*dist - cos/2
y = y + rayDirY*dist - sin/2
z = z + rayDirZ*dist - sinz/2
return x, y, z
@njit(cache=True)
def super_fast(width, height, mod, move, posx, posy, posz, rot, rot_v, mr, mg, mb, lx, ly, lz,
maph, exitx, exity, mapr, mapt, maps, pr, pg, pb, enx, eny, sx, sy, sx2, sy2,
size, checker, count, fb, fg, fr):
inverse = count%2
garbage = not(count)
idx = 0
for j in range(height): #vertical loop
rot_j = rot_v + (1+move**1.5)*np.deg2rad(24 - j/mod)
sinzo = (0.02/mod)*np.sin(rot_j)
coszo = (0.02/mod)*np.sqrt(abs(np.cos(rot_j)))
for i in range(width): #horizontal vision loop
if (not(checker) or i == 0 or i == width -1 or j == 0 or j == height -1 or (inverse and i%2 == j%2) or (not(inverse) and i%2 != j%2)):
rot_i = rot + (1+move**1.5)*np.deg2rad(i/mod - 30)
x, y, z = (posx, posy, posz)
sin, cos, sinz = coszo*np.sin(rot_i), coszo*np.cos(rot_i), sinzo
modr = 1
cx, cy, c1r, c2r, c3r = 1, 1, 1, 1, 1
shot, enem, mapv = 0, 0, 0
dtp = np.random.uniform(0.002,0.01)
for k in range(2000):
if (mapv == 0 or (sinz > 0 and (z > mapv or (mapv==6 and (z>0.4 or z <0.2)) or(z > 0.57 and mapv > 1)))): ## LoDev DDA for optimization
x, y, z = lodev(x, y, z, cos, sin, sinz, maph, size)
x += cos; y += sin; z += sinz
if (z > 1 or z < 0): # check ceiling and floor
break
mapv = maph[int(x)][int(y)]
if mapv > 1 and z < 0.58:
if mapv == 2 or mapv == 8 or mapv == 3 or mapv == 9:
refx, refy, sh = enx, eny, .2
if mapv == 2 or mapv == 8:
refx, refy, sh = posx, posy, .8
if z> 0.45 and (x-refx)**2 + (y-refy)**2 + (z-0.5)**2 < 0.003 +abs(z-0.47)/30 :
break # head
if z < 0.45 and z > 0.28 and (x-refx)**2 + (y-refy)**2 < (z/10 - 0.02):
break # chest
if z < 0.28 and (x-refx)**2 + (y-refy)**2 + (z-0.15)**2 < 0.023 :
break #roller
if mapv > 5 and z < 0.4 and z > 0.2:
if mapv < 12:
refx = sx2; refy = sy2
else:
refx = sx; refy = sy
if ((x-refx)**2 + (y-refy)**2 + (z-0.3)**2 < dtp):
shot = 1
break
if mapv > z and mapv < 2: # check walls
if maps[int(x)][int(y)]: # check spheres
if ((x-int(x)-0.5)**2 + (y-int(y)-0.5)**2 + (z-int(z)-0.5)**2 < 0.24):
if (mapr[int(x)][int(y)]): # spherical mirror
if (modr == 1):
cx, cy = int(x), int(y)
modr = modr*0.7
if (modr < 0.2):
break
if (mapv - z <= abs(sinz)): ## horizontal surface
sinz = -sinz
else:
nx = (x-int(x)-0.5)/0.5; ny = (y-int(y)-0.5)/0.5; nz =(z-int(z)-0.5)/0.5
dot = 2*(cos*nx + sin*ny + sinz*nz)
cos = (cos - nx*dot); sin = (sin - ny*dot); sinz = (sinz - nz*dot)
x += cos; y += sin; z += sinz
else:
break
elif mapr[int(x)][int(y)]: # check reflections
if modr == 1:
cx, cy = int(x), int(y)
modr = modr*0.7
if modr < 0.2:
break
if abs(z-maph[int(x)][int(y)]) < abs(sinz):
sinz = -sinz
elif maph[int(x+cos)][int(y-sin)] == maph[int(x)][int(y)]:
cos = -cos
else:
sin = -sin
else:
break
if z > 1: # ceiling
norm = np.sqrt(cos**2 + sin**2 + sinz**2)
rayDirX, rayDirY, rayDirZ = cos/norm + 1e-16, sin/norm + 1e-16, sinz/norm + 1e-16
deltaDistX, deltaDistY, deltaDistZ= abs(1/rayDirX), abs(1/rayDirY), abs(1/rayDirZ)
deltaDistZ = (lz-z)*deltaDistZ
x += deltaDistZ*rayDirX; y += deltaDistZ*rayDirY; z = lz
dtol = np.sqrt((x-lx)**2+(y-ly)**2)
if dtol < 50: #light source
shot = 1
c1, c2, c3 = 1, 1, 0.5
else:
angle = np.rad2deg(np.arctan((y-ly)/(x-lx)))/np.random.uniform(12,15)
sh = (0.8+ abs(angle - int(angle))/5)/(dtol/1000)
if sh > 1:
sh = 1
if int(angle)%2 == 1:
c1, c2, c3 = 0.8*(1-sh), 0.86*(1-sh/4), (1-sh/10)
else:
c1, c2, c3 = 0.8*(1-sh), 0.9*(1-sh/4), (1-sh/10)
if sx != -1:
c1, c2, c3 = 0.7*c1, 0.7*c2, 0.7*c3
elif z < 0: # floor
z= 0
xx = int(3*x%1*100) + int(3*y%1*100)*100
if int(x) == exitx and int(y) == exity: #exit
c1, c2, c3 = fr[xx]/655, fr[xx]/555, fr[xx]/256
else:
sh = 0.3 + (x+y)/(3*size)
c1, c2, c3 = (1-sh/2)*fg[xx]/300, sh*fg[xx]/256, sh*fb[xx]/300
elif mapv < 2: # walls
c1, c2, c3 = mr[int(x)][int(y)], mg[int(x)][int(y)], mg[int(x)][int(y)]
if mapt[int(x)][int(y)] > 1: # textured walls
if y%1 < 0.01 or y%1 > 0.99:
ww = int(3*x%1*100)
else:
ww = int(3*y%1*100)
if x%1 < 0.99 and x%1 > 0.01 and y%1 < 0.99 and y%1 > 0.01:
zz = int(3*x%1*100)
else:
zz = int(3*z%1*100)
xx = zz + ww*100
if maps[int(x)][int(y)]: # spheres get funky textures
c1, c2, c3 = c1*fr[xx+1]/455, c2*fg[xx-1]/455, c3*fb[xx]/755
else:
xx = fr[xx]/255
c1, c2, c3 = c1*xx, c2*xx, c3*xx
if mapt[int(x)][int(y)]%2 == 1: # gradient walls
c1, c2, c3 = c1*(2+z)/3, c2*(3-z)/3, c3*(2+z**2)/3
if mapv - z <= abs(sinz): # round coordinates
z = mapv
elif not maps[int(x)][int(y)]:
if int(x-cos) != int(x):
x = max(int(x-cos), int(x))
modr = modr*0.80
else:
y = max(int(y-sin), int(y))
modr = modr*0.9
else: # agents
if shot: # fireball
sh = ((x-refx)**2 + (y-refy)**2 + (z-0.3)**2)/0.012
c1, c2, c3 = 1, 0.6*sh+0.2 , 0.2*sh+0.1
elif z> 0.45: # Head
c1, c2, c3 = (1-z)*(1-sh), (1-z)*sh, z*sh
elif z > 0.28: # Chest
c1, c2, c3 = 2*(z-0.28), (z-0.28)*(1-sh), (z-0.28)*sh
else: # Roller
c1, c2, c3 = refx%1*z*(1-sh), refy%1*0.2*sh, refy%1*z*sh
if modr <= 0.7 and not shot: # tinted mirrors
c1r, c2r, c3r = mr[cx][cy], mg[cx][cy], mg[cx][cy]
if not shot and z < 1: # shadows
dtp = np.sqrt((x-posx)**2+(y-posy)**2+(z-posz)**2)
if dtp > 7:
modr = modr/np.log((dtp-6)/4+np.e)
if sx != -1 and maph[int(sx)][int(sy)] > 1: # fireball
shot, c3, limodr = 1, c3 * 0.9, 0.6
dtol = np.sqrt((x-sx)**2+(y-sy)**2+(z-0.35)**2)
cos, sin, sinz = .01*(sx-x)/dtol, .01*(sy-y)/dtol, .01*(0.35-z)/dtol
elif sx2 != -1 and maph[int(sx2)][int(sy2)] > 1: # fireball enemy
shot, c3, limodr = 1, c3 * 0.9, 0.6
dtol = np.sqrt((x-sx2)**2+(y-sy2)**2+(z-0.35)**2)
cos, sin, sinz = .01*(sx2-x)/dtol, .01*(sy2-y)/dtol, .01*(0.35-z)/dtol
else: # sun
limodr = 0.4
dtol = np.sqrt((x-lx)**2+(y-ly)**2+(z-lz)**2)
cos, sin, sinz = .01*(lx-x)/dtol, .01*(ly-y)/dtol, .01*(lz-z)/dtol
x += cos; y += sin; z += sinz # advance one step
mapv = maph[int(x)][int(y)]
if z < mapv and mapv < 1:# if already hit something apply dark shade
if maps[int(x)][int(y)]:
if ((x-int(x)-0.5)**2 + (y-int(y)-0.5)**2 + (z-int(z)-0.5)**2 < 0.24):
modr = modr*0.39
else:
modr = modr*0.39
for k in range(1000):
if (mapv == 0) or not shot and ((z > mapv) or (z > 0.57 and mapv > 1)): ## LoDev DDA for optimization
x, y, z = lodev(x, y, z, cos, sin, sinz, maph, size)
x += cos; y += sin; z += sinz
mapv = maph[int(x)][int(y)]
if shot:
if mapv > 5 or (sinz > 0 and z > 0.35) or (sinz < 0 and z < 0.35) or modr < limodr:
break
elif z >1 or modr < limodr:
break
if z < 0.58 and mapv > 1 and (mapv == 2 or mapv == 8 or mapv == 3 or mapv == 9):
refx, refy, sh = enx, eny, .2
if mapv == 2 or mapv == 8:
refx, refy, sh = posx, posy, .8
if z> 0.45 and (x-refx)**2 + (y-refy)**2 + (z-0.5)**2 < 0.003 +abs(z-0.47)/30:
modr = modr*0.67 # head
if z < 0.45 and z > 0.28 and (x-refx)**2 + (y-refy)**2 < (z/10 - 0.02):
modr = modr*0.67 # chest
if z < 0.28 and (x-refx)**2 + (y-refy)**2 + (z-0.15)**2 < 0.023 :
modr = modr*0.67 #roller
if mapv > 0 and z <= mapv and mapv < 2:
if maps[int(x)][int(y)]: # check spheres
if ((x-int(x)-0.5)**2 + (y-int(y)-0.5)**2 + (z-int(z)-0.5)**2 < 0.25):
modr = modr*0.9
else:
modr = modr*0.9
pr[idx] = (3*modr*np.sqrt(c1*c1r) + (1-garbage)*pr[idx])/(4-garbage)
pg[idx] = (3*modr*np.sqrt(c2*c2r) + (1-garbage)*pg[idx])/(4-garbage)
pb[idx] = (3*modr*np.sqrt(c3*c3r) + (1-garbage)*pb[idx])/(4-garbage)
idx += 1
if checker: # fill gaps
idx = 0
for j in range(height): #vertical loop
for i in range(width): #horizontal vision loop
if (i > 0 and i < width -1 and j > 0 and j < height -1 and ((inverse and i%2 != j%2) or (not(inverse) and i%2 == j%2))):
if abs(pr[idx-1] - pr[idx+1]) < 0.05 and abs(pg[idx-1] - pg[idx+1]) < 0.05 and abs(pb[idx-1] - pb[idx+1]) < 0.05 :
pr[idx], pg[idx], pb[idx] = (pr[idx-1] + pr[idx+1])/2, (pg[idx-1] + pg[idx+1])/2, (pb[idx-1] + pb[idx+1])/2
elif abs(pr[idx-width] - pr[idx+width]) < 0.05 and abs(pg[idx-width] - pg[idx+width]) < 0.05 and abs(pb[idx-width] - pb[idx+width]) < 0.05 :
pr[idx], pg[idx], pb[idx] = (pr[idx-width] + pr[idx+width])/2, (pg[idx-width] + pg[idx+width])/2, (pb[idx-width] + pb[idx+width])/2
else:
pr[idx] = ((1-garbage)*pr[idx] + pr[idx-1] + pr[idx-width] + pr[idx+width] + pr[idx+1])/(5-garbage)
pg[idx] = ((1-garbage)*pg[idx] + pg[idx-1] + pg[idx-width] + pg[idx+width] + pg[idx+1])/(5-garbage)
pb[idx] = ((1-garbage)*pb[idx] + pb[idx-1] + pb[idx-width] + pb[idx+width] + pb[idx+1])/(5-garbage)
idx += 1
return pr, pg, pb
def adjust_resol(width):
height = int(0.6*width)
mod = width/64
rr = np.random.uniform(0,1,width * height)
gg = np.random.uniform(0,1,width * height)
bb = np.random.uniform(0,1,width * height)
return width, height, mod, rr, gg, bb, 0
@njit(cache=True)
def agents(enx, eny, maph, posx, posy, rot, et, shoot, sx, sy, sdir, shoot2, sx2, sy2, sdir2, mplayer, seenx, seeny, lock, size):
# respawn
if enx == 0 and np.random.uniform(0,1) > 0.995:
x, y = np.random.normal(posx, 5), np.random.normal(posy, 5)
dtp = (x-posx)**2 + (y-posy)**2
if x > 0 and x < size-1 and y > 0 and y < size-1:
if maph[int(x)][int(y)] == 0 and dtp > 25 and dtp < 64:
enx, eny, seenx, seeny, lock = x, y, x, y, 0
else:
# look for player
if not lock or np.random.uniform(0,1) > 0.99:
dtp = np.sqrt((enx-posx)**2 + (eny-posy)**2)
cos, sin = (posx-enx)/dtp, (posy-eny)/dtp
x, y = enx, eny
for i in range(300):
x += 0.04*cos; y += 0.04*sin
if (maph[int(x+.05)][int(y+.05)] != 0 or maph[int(x-.05)][int(y-.05)] != 0 or
maph[int(x-.05)][int(y+.05)] != 0 or maph[int(x+.05)][int(y-.05)] != 0):
lock = 0
break
if(int(x) == int(posx) and int(y) == int(posy)):
seenx, seeny, lock = posx, posy, 1
break
if int(enx) == int(seenx) and int(eny) == int(seeny):
if not lock:
if shoot: #if the player is shooting go towards him
seenx, seeny = np.random.uniform(enx, posx), np.random.uniform(eny, posy)
else:
seenx, seeny = np.random.normal(enx, 2), np.random.normal(eny, 2)
else:
seenx, seeny = np.random.normal(posx, 2), np.random.normal(posy, 2)
dtp = np.sqrt((enx-seenx)**2 + (eny-seeny)**2)
cos, sin = (seenx-enx)/dtp, (seeny-eny)/dtp
x, y = enx + et*cos, eny + et*sin
if maph[int(x)][int(y)] == 0:
enx, eny = x, y
else:
if np.random.uniform(0,1) > 0.5:
x, y = enx - et*sin, eny + et*cos
else:
x, y = enx + et*sin, eny - et*cos
if maph[int(x)][int(y)] == 0:
enx, eny = x, y
else:
seenx, seeny = enx+np.random.normal(0,3), eny+np.random.normal(0,3)
lock = 0
mplayer[int(enx)][int(eny)] = 3
if (maph[int(enx+.1)][int(eny+.1)] == 0 and maph[int(enx-.1)][int(y-.1)] == 0 and
maph[int(enx-.1)][int(eny+.1)] == 0 and maph[int(enx+.1)][int(y-.1)] == 0):
mplayer[int(enx+0.1)][int(eny+0.1)], mplayer[int(enx+0.1)][int(eny-0.1)] = 3, 3
mplayer[int(enx-0.1)][int(eny+0.1)], mplayer[int(enx-0.1)][int(eny-0.1)] = 3, 3
mplayer[int(posx)][int(posy)] = 2
if lock and not shoot2:
shoot2 = 1
sdir2 = np.arctan((posy-eny)/(posx-enx)) + np.random.uniform(-.1,.1)
if abs(enx+np.cos(sdir2)-posx) > abs(enx-posx):
sdir2 = sdir2 - np.pi
if shoot2:
if sx2 == -1:
sx2, sy2 = enx + .5*np.cos(sdir2), eny + .5*np.sin(sdir2)
sx2, sy2 = sx2 + 5*et*np.cos(sdir2), sy2 + 5*et*np.sin(sdir2)
if sx2 > 0 and sx2 < size-1 and sy2 > 0 and sy2 < size-1:
if (maph[int(sx2+.05)][int(sy2+.05)] != 0 or maph[int(sx2-.05)][int(sy2-.05)] != 0 or
maph[int(sx2-.05)][int(sy2+.05)] != 0 or maph[int(sx2+.05)][int(sy2-.05)] != 0):
shoot2, sx2, sy2 = 0, -1, -1
else:
mplayer[int(sx2)][int(sy2)] += 6
else:
shoot2, sx2, sy2 = 0, -1, -1
if shoot:
if sx == -1:
sdir = rot+np.random.uniform(-.1,.1)
sx, sy = posx + .5*np.cos(sdir), posy + .5*np.sin(sdir)
sx, sy = sx + 5*et*np.cos(sdir), sy + 5*et*np.sin(sdir)
if sx > 0 and sy < size-1 and sy > 0 and sy < size-1:
if enx != 0 and (sx - enx)**2 + (sy - eny)**2 < 0.02:
enx, eny, seenx, seeny = 0, 0, 0, 0
if (maph[int(sx+.05)][int(sy+.05)] != 0 or maph[int(sx-.05)][int(sy-.05)] != 0 or
maph[int(sx-.05)][int(sy+.05)] != 0 or maph[int(sx+.05)][int(sy-.05)] != 0):
shoot, sx, sy = 0, -1, -1
else:
mplayer[int(sx)][int(sy)] += 12
else:
shoot, sx, sy = 0, -1, -1
mplayer = maph + mplayer
return(enx, eny, mplayer, et, shoot, sx, sy, sdir, shoot2, sx2, sy2, sdir2, seenx, seeny, lock)
def drawing(rr, gg, bb, height, width, pause, endmsg, won, health, minimap, score, nosplash=True):
global font, font2, screen, surfbg
surfbg.fill(pg.Color("darkgrey"))
pg.draw.rect(surfbg, (200-int(health*10), 50+int(health*10), 0),(10,int(360-36*(10-health/2)),1260,int(72*(10-health/2))))
pixels = np.dstack((rr,gg,bb))
pixels = np.reshape(pixels, (height,width,3))
surf = pg.surfarray.make_surface((np.rot90(pixels*255)).astype('uint8'))
surf = pg.transform.scale(surf, (1200, 720))
if not nosplash or pause:
px, py = 1100, 360
if nosplash:
px, py = pg.mouse.get_pos()
for i in range(3):
pg.draw.circle(surf, (50, 70+i*20, 160+i*40), [px+i*10,py-i*10], 50-i*15)
pg.draw.circle(surf, (60+i*10, 100+i*20, 100+i*10), [px+i*10,py+280-i*1], 90-i*15)
pg.draw.polygon(surf, (150+i*30, 34+i*10, 60+i*10), [[px-100+i*20,py+40+i*15],[px+100-i*20,py+40+i*15],[px+50-i*15,py+205-i*15],[px-50+i*15,py+205-i*15]])
screen.blit(surfbg, (0, 0))
screen.blit(surf, (40, 0))
if pause:
screen.blit(font2.render(" PyTracing Maze by FinFET ", 0, pg.Color("red")),(45,45))
screen.blit(font2.render(" PyTracing Maze by FinFET ", 0, pg.Color("blue")),(55,55))
screen.blit(font2.render(" PyTracing Maze by FinFET ", 0, pg.Color("white")),(50,50))
screen.blit(font2.render(endmsg, 0, pg.Color("salmon"), (100, 34, 60)),(50,420))
if nosplash:
screen.blit(font2.render(" Press N for a new game ", 0, pg.Color("grey"), (45, 34, 100)),(50,560))
screen.blit(font2.render(" Press ESC to leave ", 0, pg.Color("grey"), (13, 34, 139)),(50,630))
if won == 1:
screen.blit(font2.render(" Your current score is "+str(score) + ' ', 0, pg.Color("grey"), (80, 34, 80)),(50,490))
if won == 0:
screen.blit(font2.render(" Press P or Space to continue ", 0, pg.Color("grey"), (80, 34, 80)),(50,490))
else:
size = len(minimap)
surfmap = pg.surfarray.make_surface(np.flip(minimap).astype('uint8'))
surfmap = pg.transform.scale(surfmap, (size*4, size*4))
screen.blit(surfmap,(1280-size*4 - 65, 25), special_flags=pg.BLEND_ADD)
fps = font.render(endmsg, 0, pg.Color("coral"))
screen.blit(fps,(100,1))
pg.display.update()
def animate(width, height, mod, move, posx, posy, posz, rot, rot_v, mr, mg, mb, lx, ly, lz,
maph, exitx, exity, mapr, mapt, maps, pr, pg, pb, enx, eny, sx, sy, sx2, sy2,
size, checker, count, fb, fg, fr, pause, endmsg, won, health, minimap, score, ani):
for i in range(60):
rr, gg, bb = super_fast(width, height, mod, move, posx, posy, posz+ani*i, rot, rot_v, mr, mg, mb, lx, ly, lz,
maph, exitx, exity, mapr, mapt, maps, pr, pg, pb, enx, eny, sx, sy, sx2, sy2,
size, checker, count, fb, fg, fr)
count += 1
drawing(rr, gg, bb, height, width, pause, endmsg, won, health, minimap, score)
def sfx():
ambient = pg.mixer.Sound('soundfx/HauntSilentPartner.mp3')
ambient.set_volume(0.5)
runfx = pg.mixer.Sound('soundfx/run.mp3')
shotfx = pg.mixer.Sound('soundfx/slap.mp3')
killfx = pg.mixer.Sound('soundfx/shutdown.mp3')
respawnfx = pg.mixer.Sound('soundfx/respawn.mp3')
successfx = pg.mixer.Sound('soundfx/success.mp3')
failfx = pg.mixer.Sound('soundfx/fail.mp3')
floor = pg.surfarray.array3d(pg.image.load('soundfx/textures.jpg'))
fr, fg, fb = np.dsplit(floor,floor.shape[-1])
fr, fg, fb = fr.flatten(), fg.flatten(), fb.flatten()
return ambient, runfx, shotfx, killfx, respawnfx, successfx, failfx, fr, fg, fb
if __name__ == '__main__':
pg.init()
font = pg.font.SysFont("Arial", 18)
font2 = pg.font.SysFont("Impact", 48)
screen = pg.display.set_mode((1280, 720))
surfbg = pg.Surface((1280,720))
main()
|
# untested - not sure if this works as intended
def database_insert(my_stmt):
try:
cnx = mysql.connector.connect(user='root', password='nice_password',
host='127.0.0.1', database='linuxquiztest')
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
else:
cursor = cnx.cursor()
#cursor.execute("INSERT INTO [table] (%s %s)")
#my_columns = cursor.fetchall()
#my_stmt = "INSERT INTO question (......) VALUES (......)"
#my_data = ["3", 'Is the sky green', 'no', 'yes']
#my_stmt = "INSERT INTO questions (q_id, question, answer, wrong) VALUES (%s, %s, %s, %s)"
#print("database_insert.DEBUG: my_stmt is: %r" % my_stmt)
#print("database_insert.DEBUG: my_data is: %r" % my_data)
#print("database_insert.DEBUG: Trying to cursor.execute() ....")
cursor.execute(my_stmt, params=None, multi=False)
#iterator = cursor.execute(my_stmt, params=None, multi=True)
#cursor.execute(my_stmt, params=None, multi=True)
#cursor.execute(my_stmt, my_data)
cursor.close()
cnx.commit()
cnx.close()
|
from typing import List, Optional, Tuple
from spacy.tokens import Doc
from . import patterns
class Accents(object):
"""
Normalises accents, using a same-length strategy.
Parameters
----------
accents : List[Tuple[str, str]]
List of accentuated characters and their transcription.
"""
def __init__(self, accents: Optional[List[Tuple[str, str]]]) -> None:
if accents is None:
accents = patterns.accents
self.translation_table = str.maketrans(
"".join(accent_group for accent_group, _ in accents),
"".join(rep * len(accent_group) for accent_group, rep in accents),
)
def __call__(self, doc: Doc) -> Doc:
"""
Remove accents from spacy `NORM` attribute.
Parameters
----------
doc : Doc
The spaCy `Doc` object.
Returns
-------
Doc
The document, with accents removed in `Token.norm_`.
"""
for token in doc:
token.norm_ = token.norm_.translate(self.translation_table)
return doc
|
#!/usr/bin/python3
'''
统计信息
'''
from data import load_all
from optparse import OptionParser
import sys,logging
from nltk.corpus import stopwords
from textblob import TextBlob
import nltk
from nltk import word_tokenize as wt
from nltk.stem import WordNetLemmatizer
from matplotlib import pyplot as plt
wnl = WordNetLemmatizer()
stemmer = nltk.stem.SnowballStemmer("english")
stop = set(stopwords.words())
corpus = load_all()
def clean_words(nce):
nce = nce.replace('’','\'')
nce = nce.replace('‘','\'')
words = wt(nce)
words = set([wnl.lemmatize(word) for word in words])
words = set([stemmer.stem(word) for word in words])
return set(words)
#分析文本相关特征
def text_rel():
#得到各个新概念文章的单词数,句子数
for indx,nce in enumerate(corpus):
print("第%s册有%s个句子"%(indx+1,len(nce)))
print(50*"=")
#得到单词个数
words = []
for indx,nce in enumerate(corpus):
nce = ' '.join(nce)
nce = nce.lower()
twords = clean_words(nce)
words.append(twords)
if indx == 0:
print("第%s册有%s个单词"%(indx+1,len(twords)))
else:
twords = twords - stop
print("第%s册有%s个单词"%(indx+1,len(twords)))
print(50*"=")
for indx,twords in enumerate(words):
count = 0
fuck = set()
for i,ano in enumerate(words):
if i == indx:
pass
else:
fuck = fuck.union(ano)
for word in twords:
if word in fuck:
count += 1
print("第%s册有%s个相同单词"%(indx+1,count))
#分析长度特征
def length_ana():
for indx,nce in enumerate(corpus):
result = []
for sent in nce:
tk = wt(sent)
result.append(len(tk))
print(result)
fi = int(indx/2)+1
fig1 = plt.figure(fi)
plt.subplot(int("21%s"%(indx%2+1)))
plt.hist(result)
plt.xlabel('new concept number %s'%(indx+1))
plt.show()
if __name__ == '__main__':
print(__doc__)
parser = OptionParser()
parser.add_option("-t", "--task", dest="task",help="测试方法")
(options, args) = parser.parse_args()
if options.task == "text":
text_rel()
elif options.task == "length":
length_ana()
else:
logging.error("方法错误")
sys.exit(1)
|
"""Multi-output models."""
from .chain import (
ClassifierChain,
MonteCarloClassifierChain,
ProbabilisticClassifierChain,
RegressorChain,
)
__all__ = [
"ClassifierChain",
"MonteCarloClassifierChain",
"ProbabilisticClassifierChain",
"RegressorChain",
]
|
# -*- coding: utf-8 -*-
import unittest
from unittest import TestCase
from sklearn.svm.classes import NuSVC
from tests.estimator.classifier.Classifier import Classifier
from tests.estimator.classifier.ExportedData import ExportedData
from tests.language.Java import Java
class NuSVCJavaTest(Java, Classifier, ExportedData, TestCase):
def setUp(self):
super(NuSVCJavaTest, self).setUp()
self.estimator = NuSVC(kernel='rbf', gamma=0.001, random_state=0)
def tearDown(self):
super(NuSVCJavaTest, self).tearDown()
@unittest.skip('The generated code would be too large.')
def test_existing_features__binary_data__default(self):
pass
@unittest.skip('The generated code would be too large.')
def test_random_features__binary_data__default(self):
pass
@unittest.skip('The generated code would be too large.')
def test_existing_features__digits_data__default(self):
pass
@unittest.skip('The generated code would be too large.')
def test_random_features__digits_data__default(self):
pass
|
from pycounts-mr import pycounts-mr
|
#!/usr/bin/env python
"""Coding challenge to find all conflicts of a list of appointments."""
class Appointment:
"""
An appointment.
Parameters
----------
name : str
Describe what the appointment is about.
start : int
end : int
"""
def __init__(self, name, start, end):
assert start <= end
self.name = name
self.start = start
self.end = end
def __lt__(self, other):
"""Compare by start of the appointment."""
return self.start < other.start
def __str__(self):
return self.name
def __repr__(self):
return self.name
def is_conflict(a, b):
"""
Check if appointments a and b overlap.
Parameters
----------
a : Appointment
b : Appointment
Returns
-------
bool
True if they overlap, otherwise False.
"""
a, b = min(a, b), max(a, b)
return a.end >= b.start
def get_conflicts(appointments):
"""
Print all conflicts.
Parameters
----------
appointments : list
List of Appointments
"""
for i, a in enumerate(appointments):
for b in appointments[i+1:]:
if is_conflict(a, b):
print(f"{a} and {b} overlap.")
if __name__ == "__main__":
"""
The following appointments get generated:
A: ------
B: ------
C: ----
D: ---
"""
appointments = []
appointments.append(Appointment("A", 0, 10))
appointments.append(Appointment("B", 5, 15))
appointments.append(Appointment("C", 2, 8))
appointments.append(Appointment("D", 18, 20))
# Now you can apply some sweep line algorithm
appointments = sorted(appointments)
print(appointments)
get_conflicts(appointments)
|
from __future__ import absolute_import, division, print_function
from mock import patch
from matplotlib import cm
from glue.core import Data, DataCollection
from ..subset_facet import SubsetFacet
patched_facet = patch('glue.dialogs.subset_facet.qt.subset_facet.facet_subsets')
class TestSubsetFacet(object):
def setup_method(self, method):
d = Data(x=[1, 2, 3])
dc = DataCollection([d])
self.collect = dc
self.s = dc.new_subset_group()
def test_limits(self):
s = SubsetFacet(self.collect)
s.data = self.collect[0]
s.component = self.collect[0].id['x']
assert s.vmin == 1
assert s.vmax == 3
def test_get_set_cmap(self):
s = SubsetFacet(self.collect)
assert s.cmap is cm.cool
def test_apply(self):
with patched_facet as p:
s = SubsetFacet(self.collect)
s.data = self.collect[0]
s.component = self.collect[0].id['x']
s._apply()
p.assert_called_once_with(self.collect, s.component,
lo=1, hi=3,
steps=5, log=False)
|
"""
**********************************************************************************
© 2021 Arizona Board of Regents on behalf of the University of Arizona with rights
granted for USDOT OSADP distribution with the Apache 2.0 open source license.
**********************************************************************************
PushToServer.py
Created by: Niraj Vasant Altekar
University of Arizona
College of Engineering
This code was developed under the supervision of Professor Larry Head
in the Systems and Industrial Engineering Department.
**********************************************************************************
"""
# Import system libraries:
import os
import datetime
import json
import shutil
# Import 3rd party libraries:
import pysftp
# Import local modules
from V2XDataTransfer import V2XDataTransfer
from Logger import Logger
class PushToServer(V2XDataTransfer):
"""
provides method for transfering the data from the intersection to the server.
This class is intended to be deployed on intersections.
"""
def __init__(self, server:dict, intersectionList:str, logger:Logger):
# Initialize the parent class
super().__init__(server, intersectionList, logger)
# Store the configuration information
self.name = self.intersectionList[0]["name"]
self.v2xDataLocation = self.intersectionList[0]["v2x-data_location"]
self.serverIpAddress = server["ip_address"]
self.serverUsername = server["username"]
self.serverPassword = server["password"]
# Disable host key verification
self.cnopts = pysftp.CnOpts()
self.cnopts.hostkeys = None
# Create the directory structure on the remote machine if it does not exist:
self.verify_or_create_remote_directory_structure()
def verify_or_create_remote_directory_structure(self):
"""
verifies if the correct directory structure is available on the server to store files
pertaining to all data elements. The directory structure would look like the following:
- RootDataDirectory
- Intersection-1
- msgCount
- remoteBsm
- spat
- srm
- ssm
"""
# Get the location of the data directory on the remote machine pertaining to the intection
# using the information obtained from the configuration
intersectionDirectory = self.serverDataDirectory + "/" + self.name
try:
# Establish an SFTP connection
with pysftp.Connection(self.serverIpAddress, username=self.serverUsername, password=self.serverPassword, cnopts=self.cnopts) as sftp:
self.logger.write("Logged in to server. IP address:" + self.serverIpAddress)
# For each data element:
for dataElement in self.dataElements:
# Create the name of the data directory:
dataElementDirectory = intersectionDirectory + "/" + dataElement
# If the directory does not exists:
if not sftp.exists(dataElementDirectory):
# Create the directory on the local machine (alllows recursion)
sftp.makedirs(dataElementDirectory)
self.logger.write("Created remote path " + dataElementDirectory)
except Exception as e:
# If something is unsuccessful, print the message to the console
print(e)
def transfer_data(self):
"""
from each of the data directories archived on the host machine, transfers the files to their
respective remote paths using SFTP.
NOTE: Host key verification is disabled here -> could be seen as a security risk
"""
# Formulate the archive directory path
dataArchivePath = self.v2xDataLocation + "/archive"
# Change the working directory to the archive directory
os.chdir(dataArchivePath)
# Get the list of all archived directories
localArchivedDirectories = os.listdir()
try:
# Establish an SFTP connection
with pysftp.Connection(self.serverIpAddress, username=self.serverUsername, password=self.serverPassword, cnopts=self.cnopts) as sftp:
# For each archived directory on the host machine:
for directory in localArchivedDirectories:
try:
# On local machine, change the working directory to v2x-data/archive/archivedDirectory
os.chdir(directory)
# List all files available in the directory
dataFiles = os.listdir()
# Identify the data element associated with each available file, and store it in a "dataElementFiles" dictionary
for dataElement in self.dataElements:
filename = [file for file in dataFiles if dataElement in file]
if len(filename) > 0:
self.dataElementFiles[dataElement] = filename[0]
else: self.dataElementFiles[dataElement] = None
# For each data element:
for dataElement in self.dataElements:
# If the file exists:
if not self.dataElementFiles[dataElement] == None:
# Set the remote path where file needs to be transferred:
remotepath=self.serverDataDirectory + "/" + self.name + "/" + dataElement + "/" + self.dataElementFiles[dataElement]
# Transfer the file from the host machine to the remote path defined in previous step
sftp.put(self.dataElementFiles[dataElement],remotepath=remotepath)
# Reset the "dataElementFiles" dictionary
self.dataElementFiles = {"spat" : None, "srm": None, "remoteBsm": None, "ssm": None, "msgCount": None}
# Change working directory to original working directory:
os.chdir(self.workingDirectory)
# Remove the data directory from the host machine
shutil.rmtree((self.v2xDataLocation + "/archive/" + directory))
# If the v2x-data/archive/directory can not be found on the host machine, print the error message to the console
except Exception as e:
print("Failed to transfer data from " + directory+ " at:" + str(datetime.datetime.now()))
print(e)
# Else print on the console the success message
print("Data transfer from " + self.name + " completed at: " + str(datetime.datetime.now()))
# If SFTP connection can not be established with the remote machine, print the error message on the console
except: print("Failed to establish SFTP connection with server " + self.serverIpAddress + " at: " + str(datetime.datetime.now()))
if __name__ == "__main__":
pass |
import unittest
from translator import french_to_english, english_to_french
class TestFrEn(unittest.TestCase):
def test_french_to_english(self):
self.assertIsNone(french_to_english())
self.assertEqual(french_to_english("Bonjour"),"Hello")
class TestEnFr(unittest.TestCase):
def test_english_to_french(self):
self.assertIsNone(english_to_french())
self.assertEqual(english_to_french("Hello"),"Bonjour")
unittest.main()
|
# coding=utf-8
from poco.sdk.interfaces.input import InputInterface
from poco.utils.simplerpc.utils import sync_wrapper
class StdInput(InputInterface):
def __init__(self, client):
super(StdInput, self).__init__()
self.client = client
# 根据需要修改构造函数的签名
# 并修改对应的调用处
@sync_wrapper
def click(self, x, y):
return self.client.call("Click", x, y)
@sync_wrapper
def swipe(self, x1, y1, x2, y2, duration):
return self.client.call("Swipe", x1, y1, x2, y2, duration)
@sync_wrapper
def longClick(self, x, y, duration):
return self.client.call("LongClick", x, y, duration)
@sync_wrapper
def keyevent(self, keycode):
return self.client.call("KeyEvent", keycode)
@sync_wrapper
def scroll(self, direction='vertical', percent=1, duration=2.0):
return self.client.call("Scroll", direction, percent, duration)
@sync_wrapper
def rclick(self, x, y):
return self.client.call("RClick", x, y)
@sync_wrapper
def double_click(self, x, y):
return self.client.call("DoubleClick", x, y)
|
import logging
import argparse
import sys
import logging
from os import listdir
from os.path import isfile, join
import yaml
from get_logger import setup_logging
log = logging.getLogger(__name__)
setup_logging()
def get_all_config_files(folder):
"""returns a list of all config files
Args:
folder (string): path to the config folder
"""
return list(map(lambda f: join(folder,f), filter(lambda f: f.endswith(".yaml"), [f for f in listdir(folder) if isfile(join(folder, f))])))
def read_config_files(config_files):
"""reads all config files and returns dictionaries of devices and cables
Args:
config_files (list): list of config file paths
Returns:
config: dict{devices,cables}
"""
config = {
"devices": {},
"cables": []
}
for config_file in config_files:
log.debug("reading config file {}".format(config_file))
with open(config_file) as file:
# The FullLoader parameter handles the conversion from YAML
# scalar values to Python the dictionary format
config_entries = yaml.load(file, Loader=yaml.FullLoader)
for (t,v) in config_entries.items():
if t == "devices":
config[t].update(v)
if t == "cables":
config[t] += (v)
return config
def augment_lab_config(lab_config):
"""augment the configuration by
- adding a "connected_to" attribute to each port which has a cable plugged in
Args:
lab_config (dict): [description]
Returns:
dict: lab_config
"""
for cable in lab_config["cables"]:
lab_config["devices"][cable["source"]["device"]]["ports"][cable["source"]["port"]]["connected_to"] = cable["destination"]
lab_config["devices"][cable["source"]["device"]]["ports"][cable["source"]["port"]]["cable"] = cable
lab_config["devices"][cable["destination"]["device"]]["ports"][cable["destination"]["port"]]["connected_to"] = cable["source"]
lab_config["devices"][cable["destination"]["device"]]["ports"][cable["destination"]["port"]]["cable"] = cable
return lab_config
def device_ordering(lab_config, device_types = ["server","tofino"], reverse=False):
if not isinstance(device_types,list):
device_types = [device_types]
ordering = []
for t in device_types:
dev_filtered = [k for (k,v) in lab_config["devices"].items() if v["type"]==t]
ordering += sorted(dev_filtered, reverse=reverse)
return ordering
def get_config(folder):
log.info("searching config files in {}".format(folder))
all_config_files = get_all_config_files(folder)
log.debug("all config files: {}".format(all_config_files))
lab_config = read_config_files(all_config_files)
log.info("lab configuration: {} devices and {} cables".format(len(lab_config["devices"]), len(lab_config["cables"])))
lab_config = augment_lab_config(lab_config)
return lab_config |
from compas_fea.cad import rhino
import rhinoscriptsyntax as rs
# Author(s): Andrew Liew (github.com/andrewliew)
# Discretise
guid = rs.ObjectsByLayer('mesh_input')[0]
rhino.discretise_mesh(mesh=guid, layer='elset_mesh', target=0.050, min_angle=15)
|
""""Remove duplicate column names from a file
reference: https://stackoverflow.com/questions/44778/how-would-you-make-a-comma-separated-string-from-a-list-of-strings
"""
import logging
import csv
from io import StringIO
from os.path import isfile
from datetime import datetime as dt
from tworaven_apps.utils.basic_err_check import BasicErrCheck
from tworaven_apps.utils.json_helper import json_dumps, json_loads
from tworaven_apps.utils.dict_helper import column_uniquify
from tworaven_apps.utils.basic_response import (ok_resp,
err_resp)
LOGGER = logging.getLogger(__name__)
class DuplicateColumnRemover(BasicErrCheck):
"""remove duplicate columns"""
def __init__(self, source_path, rewrite=False):
"""Remove duplicate column names from a file"""
self.source_path = source_path
self.rewrite = rewrite
self.orig_column_names = None
self.csv_dialect = None
self.updated_columns = None
self.num_cols_renamed = 0
self.success_msg = None
self.column_change_needed = False
self.run_process()
def run_process(self):
"""Run through steps"""
if self.has_error():
return
if (not self.source_path) or (not isfile(self.source_path)):
user_msg = f'File not found: {self.source_path}'
self.add_err_msg(user_msg)
return
if not self.load_column_names():
return
if not self.format_column_names():
return
if self.rewrite and self.column_change_needed is True:
self.rewrite_file_header()
def format_column_names(self):
"""Format the list of column names"""
if self.has_error():
return False
if not self.orig_column_names:
user_msg = 'Original column names not retrieved'
self.add_err_msg(user_msg)
return False
# Check for unique columns
#
col_info = column_uniquify(self.orig_column_names)
if not col_info.success:
self.add_err_msg(col_info.err_msg)
return False
self.updated_columns = col_info.result_obj['new_columns']
self.num_cols_renamed = col_info.result_obj['num_cols_renamed']
if self.num_cols_renamed == 0: # Nothing to change!
self.success_msg = 'All set. Column names are already unique'
return True
self.column_change_needed = True
return True
def load_column_names(self):
"""Load column names by reading 1st line of file"""
if self.has_error():
return False
self.orig_column_names = None
self.csv_dialect = None
# Read in the header row
#
with open(self.source_path, newline='') as fh:
reader = csv.reader(fh)
self.csv_dialect = reader.dialect
#col_delimiter = reader.dialect.delimiter
self.orig_column_names = next(reader)
if not self.orig_column_names:
self.add_err_msg('Failed to load original column names')
return False
return True
def rewrite_file_header(self):
"""Add new header to the file"""
if self.has_error():
return
if not self.column_change_needed:
self.add_error('Column changes is not needed')
return
# ---------------------------------
# Format a new first file line
# ---------------------------------
new_first_line = StringIO()
writer = csv.writer(new_first_line, dialect=self.csv_dialect)
writer.writerow(self.updated_columns)
new_first_line_content = new_first_line.getvalue() # \
# + csv_dialect.lineterminator
LOGGER.info('new_first_line_content: %s', new_first_line_content)
# ---------------------------------
# Replace original first line (ref: reddit)
# ---------------------------------
with open(self.source_path, 'r+') as fh: #open in read / write mode
# Read the file
#
fh.readline() #read the first line and throw it out
file_data = fh.read() #read the rest
#
# Do some writing, e.g. new header row
#
fh.seek(0) #set the cursor to the top of the file
fh.write(new_first_line_content) # write 1st line
fh.write(file_data) #write the data back
fh.truncate() #set the file size to the current size
return ok_resp('All set. Columns updated')
#column_uniquify
|
from utils import *
row_units = [cross(r, cols) for r in rows]
column_units = [cross(rows, c) for c in cols]
square_units = [cross(rs, cs) for rs in ('ABC','DEF','GHI') for cs in ('123','456','789')]
unitlist = row_units + column_units + square_units
diagonal_units = [[rows[index] + cols[index] for index in range(0, len(rows))],[rows[index] + cols[-(index + 1)] for index in range(0, len(rows))]]
unitlist = unitlist + diagonal_units
units = dict((s, [u for u in unitlist if s in u]) for s in boxes)
peers = dict((s, set(sum(units[s],[]))-set([s])) for s in boxes)
def naked_twins(values):
for unit in unitlist:
unit_values = [values[box] for box in unit]
naked_twin_values = [value for value in unit_values if unit_values.count(value) == 2 and len(value) == 2]
for box in unit:
if values[box] in naked_twin_values:
continue
for twin in naked_twin_values:
for digit in twin:
values[box] = values[box].replace(digit, '')
return values
def eliminate(values):
solved_values = [box for box in values.keys() if len(values[box]) == 1]
for box in solved_values:
digit = values[box]
for peer in peers[box]:
values[peer] = values[peer].replace(digit,'')
return values
def only_choice(values):
for unit in unitlist:
for digit in cols:
similar_boxes = [box for box in unit if digit in values[box]]
if len(similar_boxes) == 1:
values[similar_boxes[0]] = digit
return values
def reduce_puzzle(values):
stalled = False
while not stalled:
solved_values_before = len([box for box in values.keys() if len(values[box]) == 1])
values = eliminate(values)
values = only_choice(values)
values = naked_twins(values)
solved_values_after = len([box for box in values.keys() if len(values[box]) == 1])
stalled = solved_values_before == solved_values_after
if len([box for box in values.keys() if len(values[box]) == 0]):
return False
return values
def search(values):
values = reduce_puzzle(values)
if values is False:
return False
if all(len(values[s]) == 1 for s in boxes):
return values
n,s = min((len(values[s]), s) for s in boxes if len(values[s]) > 1)
for value in values[s]:
new_sudoku = values.copy()
new_sudoku[s] = value
attempt = search(new_sudoku)
if attempt:
return attempt
def solve(grid):
values = grid2values(grid)
values = search(values)
return values
if __name__ == "__main__":
diag_sudoku_grid = '9.1....8.8.5.7..4.2.4....6...7......5..............83.3..6......9................'
display(grid2values(diag_sudoku_grid))
#display(val)
#result = solve(values2grid(val))
result = solve(diag_sudoku_grid)
display(result)
try:
import PySudoku
PySudoku.play(grid2values(diag_sudoku_grid), result, history)
except SystemExit:
pass
except:
print('We could not visualize your board due to a pygame issue. Not a problem! It is not a requirement.')
|
actual_year = 2022
birth = 1974
my_name = "Adam Jurkiewicz"
print(f"Hi {my_name}, we'll try to compute your age ;-)")
age = actual_year - birth
if age >= 18:
print(f"Oh, I see, {my_name} - you are an adult now.")
print(f"You are {age} years old.")
else:
print(f"You are young - {age} years old.")
if my_name.endswith("a"):
print(f"I guess {my_name} - you are a woman.")
if my_name.lower() == "barnaba":
print(f"But your name: {my_name} is an exception - you are a man!")
else:
print("You probably are a man...")
print("That is all - see you next time!")
####################################################################################
# lista pobrana jako wynik funkcji i sprawdzamy czy serwer zgłasza się jako Apache
import urllib.request as ureq
address = "https://abixedukacja.eu"
https_request = ureq.urlopen(address)
headers = https_request.headers.items()
print("#" * 30)
server_name = headers[1]
# w tym momencie widzimy kolejny typ złożony - tuplę
# Link do dokumentacji: https://docs.python.org/3.8/library/stdtypes.html?highlight=tuple#tuple
print(f"Header is: {server_name} - type is: {type(server_name)} ")
if server_name[1] == "Apache":
print("OK, this is Apache server")
else:
print(f"Some strange server type: {server_name[1]}")
print("#" * 30)
# inny sposób
if "Apache" in server_name:
print("OK, Apache server once again...")
else:
print("Some exception.") |
# -*- coding: utf-8 -*-
# @Time : 2018/12/14 9:11
# @Author : Xiao
import socket
import os
import struct
import json
ip_port = ("127.0.0.1", 8083)
max_connect = 1 # 最对只接收5个连接,超过时直接拒绝
send_path = r"/Users/yangyiyi/Documents/oldboy/module3/mySocket/socket_func/file_server/share"
recv_path = r"/Users/yangyiyi/Documents/oldboy/module3/mySocket/socket_func/file_server/share"
def send_file(conn, file_name):
file_path = os.path.join(send_path, file_name)
file_size = os.path.getsize(file_path)
header = {"file_size": file_size, "file_name": file_name, "md5": "123456"}
header_bytes = bytes(json.dumps(header), encoding='utf-8')
header_len_bytes = struct.pack("i", len(header_bytes))
conn.send(header_len_bytes)
conn.send(header_bytes)
with open(file_path, "rb") as f:
for line in f:
conn.send(line)
def recv_file(conn, file_name):
file_name = os.path.basename(file_name)
file_abspath = os.path.join(recv_path, file_name)
header_len_bytes = conn.recv(4) # 接收4个字节的数据头信息
header_len = struct.unpack("i", header_len_bytes)[0] # struct.unpack解压数据,得到数据头信息长度
header_str = conn.recv(header_len).decode("utf-8") # 根据上面的长度接收数据头信息
header = json.loads(header_str, encoding="utf-8")
file_size = header["file_size"] # 根据数据头信息得到本次要接收的数据大小
recv_size = 0
with open(file_abspath, "wb") as f:
while recv_size < file_size: # 当接收到的数据小于本次数据长度时就一直接收
line = conn.recv(1024)
f.write(line) # 将每次接收到的数据拼接
recv_size += len(line) # 实时记录当前接收到的数据长度
def file_fun(conn):
methods = {"get": send_file, "put": recv_file}
while True:
header_len_bytes = conn.recv(4) # 接收4个字节的数据头信息
if not header_len_bytes:
break
header_len = struct.unpack("i", header_len_bytes)[0] # struct.unpack解压数据,得到数据头信息长度
header_str = conn.recv(header_len).decode("utf-8") # 根据上面的长度接收数据头信息
header = json.loads(header_str, encoding="utf-8")
msg_size = header["msg_size"] # 根据数据头信息得到本次要接收的数据大小
rec = conn.recv(msg_size).decode("utf-8")
print(rec)
file_method = rec.split()[0].lower()
file_name = rec.split()[1]
methods[file_method](conn, file_name)
conn.close()
def run():
server_socket = socket.socket(family=socket.AF_INET, type=socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(ip_port)
server_socket.listen(max_connect)
print("starting....")
while True:
try:
conn, client_addr = server_socket.accept()
file_fun(conn)
except Exception as e:
print(e)
server_socket.close()
if __name__ == "__main__":
run()
|
# coding: utf-8
# # Examples from the paper
#
# In this notebook, provide the codes used for illustration in the corresponding paper with all the supplementary code segments excluded from the paper due to space limitations.
# In[1]:
import scipy
import sklearn
import keras
import imblearn
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import smote_variants as sv
import imblearn.datasets as imb_datasets
import sklearn.datasets as sk_datasets
random_seed= 3
# In[2]:
# configuring pandas to print all columns
pd.set_option('display.max_colwidth', 1000)
pd.set_option('display.max_columns', 10)
pd.set_option('max_colwidth', 1000)
pd.set_option('expand_frame_repr', False)
# In[3]:
# printing package versions
print('numpy %s' % np.__version__)
print('imblearn %s' % imblearn.__version__)
print('scipy %s' % scipy.__version__)
print('sklearn %s' % sklearn.__version__)
print('keras %s' % keras.__version__)
print('smote_variants %s' % sv.__version__)
# In[4]:
# defining some plotting functions
def plot(X, y, title, min_label, maj_label, filename):
plt.figure(figsize= (4, 3))
plt.scatter(X[:,0][y == min_label], X[:,1][y == min_label], label='minority class', color='red', s=25)
plt.scatter(X[:,0][y == maj_label], X[:,1][y == maj_label], label='majority class', color='black', marker='*', s=25)
plt.xlabel('feature 0')
plt.ylabel('feature 1')
plt.title(title)
plt.legend()
plt.tight_layout()
plt.savefig(filename)
plt.show()
def plot_mc(X, y, title, label_0, label_1, label_2, filename):
plt.figure(figsize= (4, 3))
plt.scatter(X[:,0][y == label_0], X[:,1][y == label_0], label='class 0', color='red', s=25)
plt.scatter(X[:,0][y == label_1], X[:,1][y == label_1], label='class 1', color='black', marker='*', s=25)
plt.scatter(X[:,0][y == label_2], X[:,1][y == label_2], label='class 2', color='blue', marker='^', s=25)
plt.xlabel('feature 0')
plt.ylabel('feature 1')
plt.title(title)
plt.legend()
plt.tight_layout()
plt.savefig(filename)
plt.show()
# In[5]:
# setting the random seed
np.random.seed(random_seed)
# In[6]:
# sample code segment #0
# oversampling by OUPS and plotting
import smote_variants as sv
import imblearn.datasets as imb_datasets
libras= imb_datasets.fetch_datasets()['libras_move']
X, y= sv.OUPS().sample(libras['data'], libras['target'])
plot(libras['data'], libras['target'], 'libras_move', 1, -1, 'libras_move.eps')
plot(X, y, 'libras_move oversampled by OUPS', 1, -1, 'libras_move_oups.eps')
# In[7]:
# setting the random seed
np.random.seed(random_seed)
# In[8]:
# sample code segment #1
# evaluating the performance of k neighbors classifier with oversampling
from sklearn.neighbors import KNeighborsClassifier
results= sv.cross_validate(dataset= libras, sampler= sv.OUPS(),
classifier= KNeighborsClassifier())
print(results.loc['auc'])
# In[9]:
# evaluating the performance of k neighbors classifier without oversampling
np.random.seed(random_seed)
results_wo= sv.cross_validate(dataset= libras, sampler= sv.NoSMOTE(),
classifier= KNeighborsClassifier())
# In[10]:
# printing the results
print(results_wo.loc['auc'])
|
import os
from . import TestTask
from .. import data
from .. import Structure, QeScfTask
from .. import QeWfnTask, Qe2BgwTask
# Note: The tests are redundant, because
# tests cannot be interdependent.
class TestQETasksMaker(TestTask):
_structure_fname = data.structure_GaAs
# Common arguments for tasks.
common_kwargs = dict(
prefix = 'GaAs',
pseudo_dir = data.pseudo_dir,
pseudos = data.pseudos_GaAs,
structure = Structure.from_file(_structure_fname),
ngkpt = [2,2,2],
kshift = [.5,.5,.5],
ecutwfc = 8.0,
nbnd = 9,
)
def get_scftask(self, **kwargs):
"""Construct a QeScfTask."""
for key, val in self.common_kwargs.items():
kwargs.setdefault(key, val)
kwargs.setdefault('dirname', os.path.join(self.tmpdir, 'Density'))
scftask = QeScfTask(**kwargs)
return scftask
def get_wfntask(self, scftask, **kwargs):
"""Construct a QeWfnTask."""
for key, val in self.common_kwargs.items():
kwargs.setdefault(key, val)
kwargs.setdefault('dirname', os.path.join(self.tmpdir, 'Wfn'))
wfntask = QeWfnTask(
charge_density_fname = scftask.charge_density_fname,
data_file_fname = scftask.data_file_fname,
**kwargs)
return wfntask
def get_pw2bgwtask(self, wfntask, **kwargs):
"""Construct a Qe2BgwTask."""
for key, val in self.common_kwargs.items():
kwargs.setdefault(key, val)
kwargs.setdefault('wfn_fname', 'wfn.cplx')
pw2bgwtask = Qe2BgwTask(
dirname = wfntask.dirname,
**kwargs)
return pw2bgwtask
class TestQETasks(TestQETasksMaker):
def test_scftask(self):
"""Test density calculation."""
task = self.get_scftask()
task.write()
task.run()
task.report()
self.assertCompleted(task)
def test_wfntask(self):
"""Test density and wavefunction calculation."""
scftask = self.get_scftask()
wfntask = self.get_wfntask(scftask)
for task in (scftask, wfntask):
task.write()
task.run()
task.report()
self.assertCompleted(task)
def test_pw2bgwtask(self):
"""Test density and wavefunction calculation."""
scftask = self.get_scftask()
wfntask = self.get_wfntask(scftask)
pw2bgwtask = self.get_pw2bgwtask(wfntask)
for task in (scftask, wfntask, pw2bgwtask):
task.write()
task.run()
task.report()
self.assertCompleted(task)
|
import datetime
import json
import os
import pytest
import requests
import logging
from tests.utils import Utils
@allure.feature('测试部门功能')
class TestDepartment:
@allure.testcase('测试用例:测试新增部门深度(字部门)')
def test_create_depth(self, token):
parentid = 1
for i in range(2):
data = {
"name": "第九期_FrankWang_" + str(parentid)+ str(datetime.datetime.now().timestamp()),
"parentid": parentid,
}
r = requests.post("https://qyapi.weixin.qq.com/cgi-bin/department/create",
params={"access_token": token},
json=data,
# proxies={"https": "http://127.0.0.1:8080",
# "http": "http://127.0.0.1:8080"},
# verify=False
).json()
logging.debug(r)
parentid = r["id"]
assert r["errcode"]==0
@allure.testcase('测试用例:测试新增部门')
def test_create_name(self, token):
data = {
"name": "第九期_FrankWang",
"parentid": 1,
}
logging.debug(data)
r = requests.post("https://qyapi.weixin.qq.com/cgi-bin/department/create",
params={"access_token": token},
json=data
).json()
logging.debug(r)
@allure.testcase('测试用例:使用不同语言新增部门')
@pytest.mark.parametrize("name", [
"广州研发中心",
"東京アニメーション研究所",
"도쿄 애니메이션 연구소",
"معهد طوكيو للرسوم المتحركة",
"東京動漫研究所"
])
def test_create_order(self, name, token):
data = {
"name": name+Utils.udid(),
"parentid": 1,
"order": 1,
}
r = requests.post("https://qyapi.weixin.qq.com/cgi-bin/department/create",
params={"access_token": token},
json=data
).json()
#解密
logging.debug(r)
assert r["errcode"]==0
@allure.testcase('测试用例:获取部门列表')
def test_get(self, token):
r = requests.get("https://qyapi.weixin.qq.com/cgi-bin/department/list",
params={"access_token": token }
).json()
logging.info(json.dumps(r, indent=2))
@allure.testcase('测试用例:更新部门列表')
def test_updata(self):
pass
@allure.testcase('测试用例:删除部门')
def test_delete(self):
pass
|
from django.apps import AppConfig
class QuickNotesConfig(AppConfig):
name = 'quick_notes'
|
from corc.cli.parsers.providers.oci.instance import (
start_instance_group as oci_start_instance,
)
from corc.cli.parsers.providers.ec2.instance import (
start_instance_group as ec2_start_instance,
)
def valid_instance_group(parser):
instance_identity_group(parser)
oci_start_instance(parser)
ec2_start_instance(parser)
def instance_identity_group(parser):
instance_group = parser.add_argument_group(title="Instance Identity arguments")
instance_group.add_argument("--instance-id", default="")
|
import discord
from discord.ext import commands
import random
# Set the bot's presence on startup.
class Presence(commands.Cog):
def __init__(self, client):
self.client = client
@commands.Cog.listener()
async def on_ready(self):
await self.client.change_presence(
status = discord.Streaming,
activity = discord.Streaming(
name = "l.help",
url = f"https://www.twitch.tv/{random.choice(['jpvinnie', 'otomac'])}"))
def setup(client):
client.add_cog(Presence(client)) |
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import re
import subprocess
from sdk_update_common import *
import sys
import tempfile
"""Shim script for the SDK updater, to allow automatic updating.
The purpose of this script is to be a shim which automatically updates
sdk_tools (the bundle containing the updater scripts) whenever this script is
run.
When the sdk_tools bundle has been updated to the most recent version, this
script forwards its arguments to sdk_updater_main.py.
"""
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SDK_UPDATE_MAIN = os.path.join(SCRIPT_DIR, 'sdk_update_main.py')
SDK_ROOT_DIR = os.path.dirname(SCRIPT_DIR)
NACLSDK_SHELL_SCRIPT = os.path.join(SDK_ROOT_DIR, 'naclsdk')
if sys.platform.startswith('win'):
NACLSDK_SHELL_SCRIPT += '.bat'
SDK_TOOLS_DIR = os.path.join(SDK_ROOT_DIR, 'sdk_tools')
SDK_TOOLS_UPDATE_DIR = os.path.join(SDK_ROOT_DIR, 'sdk_tools_update')
def MakeSdkUpdateMainCmd(args):
"""Returns a list of command line arguments to run sdk_update_main.
Args:
args: A list of arguments to pass to sdk_update_main.py
Returns:
A new list that can be passed to subprocess.call, subprocess.Popen, etc.
"""
return [sys.executable, SDK_UPDATE_MAIN] + args
def UpdateSDKTools(args):
"""Run sdk_update_main to update sdk_tools bundle. Return True if it is
updated.
Args:
args: The arguments to pass to sdk_update_main.py. We need to keep this to
ensure sdk_update_main is called correctly; some parameters specify
URLS or directories to use.
Returns:
True if the sdk_tools bundle was updated.
"""
cmd = MakeSdkUpdateMainCmd(['--update-sdk-tools'] + args)
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
stdout, _ = process.communicate()
if process.returncode == 0:
return stdout.find('sdk_tools is already up-to-date.') == -1
else:
# Updating sdk_tools could fail for any number of reasons. Regardless, it
# should be safe to try to run the user's command.
return False
def RenameSdkToolsDirectory():
"""Rename sdk_tools_update to sdk_tools."""
try:
tempdir = tempfile.mkdtemp()
temp_sdktools = os.path.join(tempdir, 'sdk_tools')
try:
RenameDir(SDK_TOOLS_DIR, temp_sdktools)
except Error:
# The user is probably on Windows, and the directory is locked.
sys.stderr.write('Cannot rename directory "%s". Make sure no programs are'
' viewing or accessing this directory and try again.\n' % (
SDK_TOOLS_DIR,))
sys.exit(1)
try:
RenameDir(SDK_TOOLS_UPDATE_DIR, SDK_TOOLS_DIR)
except Error:
# Failed for some reason, move the old dir back.
try:
RenameDir(temp_sdktools, SDK_TOOLS_DIR)
except:
# Not much to do here. sdk_tools won't exist, but sdk_tools_update
# should. Hopefully running the batch script again will move
# sdk_tools_update -> sdk_tools and it will work this time...
sys.stderr.write('Unable to restore directory "%s" while auto-updating.'
'Make sure no programs are viewing or accessing this directory and'
'try again.\n' % (SDK_TOOLS_DIR,))
sys.exit(1)
finally:
RemoveDir(tempdir)
def main():
args = sys.argv[1:]
if UpdateSDKTools(args):
RenameSdkToolsDirectory()
# Call the shell script, just in case this script was updated in the next
# version of sdk_tools
return subprocess.call([NACLSDK_SHELL_SCRIPT] + args)
else:
return subprocess.call(MakeSdkUpdateMainCmd(args))
if __name__ == '__main__':
sys.exit(main())
|
#
# Copyright 2021 Jaroslav Chmurny
#
# This file is part of AWS Sandbox.
#
# AWS Sandbox is free software developed for educational purposes. It
# is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicationlicationlicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from abc import ABC, abstractmethod
from typing import Dict, Sequence
from model import GameResult, GameStatistics, ResultType, Score, StandingsEntry
class Configuration(ABC):
@abstractmethod
def get_points_for_win(self, result_type: ResultType) -> int:
pass
@abstractmethod
def get_points_for_loss(self, result_type: ResultType) -> int:
pass
class _StandingsEntryCollector:
def __init__(self, team: str, configuration: Configuration) -> None:
self._team = team
self._configuration = configuration
self._regulation_win_count = 0
self._overtime_win_count = 0
self._overtime_loss_count = 0
self._regulation_loss_count = 0
self._goals_against = 0
self._goals_for = 0
self._points = 0
def add(self, game_result: GameResult) -> None:
goals_against = 0
goals_for = 0
if self._team == game_result.home_team:
goals_against = game_result.visitor_team_goals
goals_for = game_result.home_team_goals
elif self._team == game_result.visitor_team:
goals_against = game_result.home_team_goals
goals_for = game_result.visitor_team_goals
else:
message = f'Unexpected game result ({game_result.home_team} vs {game_result.visitor_team}), collecting results for {self._team}.'
raise ValueError(message)
self._add(goals_for, goals_against, game_result.type)
def _add(self, goals_for: int, goals_against: int, result_type: ResultType) -> None:
self._goals_against += goals_against
self._goals_for += goals_for
if goals_for > goals_against:
self._points += self._configuration.get_points_for_win(result_type)
if result_type == ResultType.REGULATION:
self._regulation_win_count += 1
else:
self._overtime_win_count += 1
else:
self._points += self._configuration.get_points_for_loss(result_type)
if result_type == ResultType.REGULATION:
self._regulation_loss_count += 1
else:
self._overtime_loss_count += 1
@property
def standings_entry(self) -> StandingsEntry:
return StandingsEntry(
self._team,
GameStatistics(
self._regulation_win_count,
self._overtime_win_count,
self._overtime_loss_count,
self._regulation_loss_count
),
Score(self._goals_for, self._goals_against),
self._points
)
class _StandingsCollector:
def __init__(self, configuration: Configuration) -> None:
self._teams: Dict[str, _StandingsEntryCollector] = {}
self._configuration = configuration
def add(self, game_result: GameResult) -> None:
self._add(game_result.home_team, game_result)
self._add(game_result.visitor_team, game_result)
def _add(self, team: str, game_result: GameResult) -> None:
if team not in self._teams:
self._teams[team] = _StandingsEntryCollector(team, self._configuration)
self._teams[team].add(game_result)
@property
def standings_entries(self) -> Sequence[StandingsEntry]:
entries = self._teams.values()
return list(map(lambda collector: collector.standings_entry, entries))
class StandingsCalculator:
def __init__(self, configuration: Configuration) -> None:
self._collector = _StandingsCollector(configuration)
def add(self, game_result: GameResult) -> None:
self._collector.add(game_result)
def add_all(self, game_result: Sequence[GameResult]) -> None:
for single_game_result in game_result:
self.add(single_game_result)
def calculate_standings(self) -> Sequence[StandingsEntry]:
return list(sorted(self._collector.standings_entries, reverse=True))
|
import matplotlib.pyplot as plt
import numpy as np
from _pyrobo import Quaternion
def plot(
self,
detached=False,
axis_obj=None,
rgb_xyz=["r", "g", "b"],
xlim=[-2, 2],
ylim=[-2, 2],
zlim=[-2, 2],
scale_factor=1.0,
view_angle=None,
):
if detached:
return __plot_detached(
self, axis_obj, rgb_xyz=rgb_xyz, scale_factor=scale_factor
)
else:
return __plot_attached(
self,
xlim=xlim,
ylim=ylim,
zlim=zlim,
rgb_xyz=rgb_xyz,
scale_factor=scale_factor,
view_angle=view_angle,
)
def __plot_attached(self, xlim, ylim, zlim, rgb_xyz, scale_factor, view_angle):
"""
Plots the quaternion on internally provided matplotlib axes
"""
axis_obj = plt.axes(projection="3d")
__plot_axes(self, axis_obj, rgb_xyz, scale_factor)
axis_obj.set_xlim3d(xlim[0], xlim[1])
axis_obj.set_ylim3d(ylim[0], ylim[1])
axis_obj.set_zlim3d(zlim[0], zlim[1])
axis_obj.set_xlabel("x")
axis_obj.set_ylabel("y")
axis_obj.set_zlabel("z")
if view_angle is not None:
axis_obj.view_init(view_angle[0], view_angle[1])
plt.show()
def __plot_detached(self, axis_obj, rgb_xyz, scale_factor):
"""
Plots the axes on externally provided matplotlib axes
"""
__plot_axes(self, axis_obj, rgb_xyz, scale_factor)
def __plot_axes(self, axis_obj, rgb_xyz, scale_factor):
"""
Plot the 3 basis vectors in 3D space rotated by the quaternion
"""
origin = np.array([0, 0, 0])
x_axis = np.array([1, 0, 0])
y_axis = np.array([0, 1, 0])
z_axis = np.array([0, 0, 1])
try:
# rotate all the axes
x_axis = self * (scale_factor * x_axis) + origin
y_axis = self * (scale_factor * y_axis) + origin
z_axis = self * (scale_factor * z_axis) + origin
# collect plot values
# i unit vectors
iX, iY, iZ = x_axis[0], x_axis[1], x_axis[2]
# j unit vector
jX, jY, jZ = y_axis[0], y_axis[1], y_axis[2]
# k unit vector
kX, kY, kZ = z_axis[0], z_axis[1], z_axis[2]
# origin
oX, oY, oZ = origin[0], origin[1], origin[2]
axis_obj.plot([oX, iX], [oY, iY], [oZ, iZ], rgb_xyz[0])
axis_obj.plot([oX, jX], [oY, jY], [oZ, jZ], rgb_xyz[1])
axis_obj.plot([oX, kX], [oY, kY], [oZ, kZ], rgb_xyz[2])
except AttributeError:
raise AttributeError("axis_obj is None")
# Bind the method to the quaternion class as a pure python implementation
Quaternion.plot = plot
|
"""
editor.py: editor functions
"""
import time
import re
from keys import EditorKeys
from utils import getch, is_ctrl, ctrl_key, pexit, pprint, get_terminal_size, \
convert_rows_to_string, convert_string_to_rows
from syntax import syntax
def init():
global screen_rows, screen_cols, cx, cy, \
file_loaded, file_rows, row_offset, column_offset, \
file_name, file_type, status_message, status_message_time, \
dirty, quit_times, last_match, direction
cx, cy = 0, 0 # curr cursor location
screen_rows, screen_cols = get_terminal_size()
screen_rows -= 2 # status and message bar
file_loaded = False
file_type = None
file_rows = []
row_offset = 0 # row coefficient
column_offset = 0 # col coefficient
file_name = None
status_message = ""
status_message_time = 0
dirty = False # indicate that file is modified
quit_times = 3 # number of times to press quit for closing dirty files
last_match = (0, -1) # last match index (row, col)
direction = 1 # default search direction
def reset_dirty():
global dirty, quit_times
dirty = False
quit_times = 3
""" input """
def raw_read():
c = getch()
if c == chr(127):
return EditorKeys.BACKSPACE
elif c == '\t':
return EditorKeys.TAB_KEY
elif c == '\x1b': # ESC character
c1 = getch()
c2 = getch()
if (not c1) or (not c2):
return c
if c1 == '[':
if '0' <= c2 <= '9':
c3 = getch()
if not c3:
return c
if c3 == '~':
if c2 == '1':
return EditorKeys.HOME_KEY
elif c2 == '3':
return EditorKeys.DEL_KEY
elif c2 == '4':
return EditorKeys.END_KEY
elif c2 == '5':
return EditorKeys.PAGE_UP
elif c2 == '6':
return EditorKeys.PAGE_DOWN
elif c2 == '7':
return EditorKeys.HOME_KEY
elif c2 == '8':
return EditorKeys.END_KEY
else:
if c2 == 'A':
return EditorKeys.ARROW_UP
elif c2 == 'B':
return EditorKeys.ARROW_DOWN
elif c2 == 'C':
return EditorKeys.ARROW_RIGHT
elif c2 == 'D':
return EditorKeys.ARROW_LEFT
elif c2 == 'H':
return EditorKeys.HOME_KEY
elif c2 == 'F':
return EditorKeys.END_KEY
elif c1 == 'O':
if c2 == 'H':
return EditorKeys.HOME_KEY
elif c2 == 'F':
return EditorKeys.END_KEY
return c
def read_key():
global screen_rows, screen_cols, cx, cy, row_offset, file_rows, dirty, quit_times
c = raw_read()
if c == ctrl_key('q'):
if dirty and quit_times > 0:
set_status_message("WARNING: File has unsaved changes. "
"Press CTRL-q %d more time(s) to quit" % quit_times)
quit_times -= 1
else:
pexit()
elif c == ctrl_key('s'):
save_file()
elif c == ctrl_key('f'):
search()
elif c in ('\r', '\n'):
insert_new_line()
elif c in (EditorKeys.ARROW_UP,
EditorKeys.ARROW_LEFT,
EditorKeys.ARROW_RIGHT,
EditorKeys.ARROW_DOWN):
move_cursor(c)
elif c in (EditorKeys.PAGE_UP,
EditorKeys.PAGE_DOWN):
if c == EditorKeys.PAGE_UP:
cy = row_offset
times = screen_rows
while times > 0:
move_cursor(EditorKeys.ARROW_UP if c == EditorKeys.PAGE_UP
else EditorKeys.ARROW_DOWN)
times -= 1
elif c == EditorKeys.HOME_KEY:
cx = 0
elif c == EditorKeys.END_KEY:
if cy < len(file_rows):
cx = len(file_rows[cy])
elif c in (EditorKeys.BACKSPACE, EditorKeys.DEL_KEY, ctrl_key('h')):
if c == EditorKeys.DEL_KEY:
move_cursor(EditorKeys.ARROW_RIGHT)
delete_char()
elif c in (ctrl_key('l'), '\x1b'):
# TODO
pass
elif c == EditorKeys.TAB_KEY:
for _ in range(4):
insert_char(' ')
else:
insert_char(c)
""" screen """
def scroll_editor():
global cy, row_offset, screen_rows, \
cx, column_offset, screen_cols
if cy < row_offset:
row_offset = cy
if cy >= (row_offset + screen_rows):
row_offset = cy - screen_rows + 1
if cx < column_offset:
column_offset = cx
if cx >= (column_offset + screen_cols):
column_offset = cx - screen_cols + 1
def refresh_screen():
scroll_editor()
pprint("\x1b[?25l") # hide cursor
pprint("\x1b[2J") # clear entire screen
pprint("\x1b[H") # reposition cursor
draw_rows()
draw_status_bar()
draw_message_bar()
update_cursor()
pprint("\x1b[?25h") # show cursor
def draw_rows():
global screen_rows, screen_cols, file_loaded, file_rows, row_offset, column_offset, dirty
welcome_message = "peditor -- welcome"
for row in range(screen_rows):
file_row = row + row_offset
if file_row < len(file_rows):
render_row(file_rows[file_row][column_offset:column_offset+screen_cols])
if row == screen_rows//3 and not file_loaded and not dirty:
pad_string = " "*((screen_cols - len(welcome_message)) // 2)
pprint(pad_string, welcome_message)
pprint("\n")
def draw_status_bar():
global file_name, screen_cols, file_rows, cy, dirty, file_type
pprint("\x1b[7m") # invert colors
s = file_name if file_name else "[No Name]"
left = "%s - %d lines %s" % (s[0:20],
len(file_rows),
"(modified)" if dirty else "")
right = "%d/%d" % (cy + 1, len(file_rows))
pad = " "*(screen_cols-len(left)-len(right))
display_string = left + pad + right
pprint(display_string[0:screen_cols])
pprint("\x1b[m") # restore colors
pprint("\n")
def set_status_message(*args):
global status_message, status_message_time
status_message = " ".join(args)
status_message_time = time.time()
def draw_message_bar():
global status_message, status_message_time, screen_cols
pprint("\x1b[K") # clear the line
if (time.time() - status_message_time) < 5:
pprint(status_message[0:screen_cols])
def prompt(message, callback):
buf = ""
while True:
set_status_message(message % buf)
refresh_screen()
c = raw_read()
if c == EditorKeys.BACKSPACE:
buf = buf[0:-1]
elif c == ctrl_key('c'):
set_status_message("")
if callback:
callback(buf, c)
return None
elif c in ('\r', '\n'):
if len(buf) != 0:
set_status_message("")
if callback:
callback(buf, c)
return buf
elif type(c) != EditorKeys and not is_ctrl(c) and ord(c) < 128:
buf += c
if callback:
callback(buf, c)
""" syntax highlighting """
def render_row(row):
global file_type
tokens = re.split(r'([^A-Za-z0-9_-]?)', row)
comment = False
string = False
for token in tokens:
if file_type and file_type in syntax.keys():
if comment:
printf(token, color='red')
elif string:
printf(token, color='green')
elif token == syntax[file_type]["comment"]:
printf(token, color='red')
comment = True
elif token in syntax[file_type]["keywords"]:
printf(token, color='yellow')
elif token.isdigit():
printf(token, color='blue')
else:
printf(token)
else:
pprint(token)
def printf(s, color=None):
code = 0
if color == 'red':
code = 1
elif color == 'green':
code = 2
elif color == 'yellow':
code = 3
elif color == 'blue':
code = 4
pre = '\x1b[3%dm' % code
suf = '\x1b[39m'
if color:
pprint("%s%s%s" % (pre, s, suf))
else:
pprint(s)
""" search """
def search():
global cx, cy, row_offset, column_offset
# save the values
tcx = cx
tcy = cy
t_row_offset = row_offset
t_column_offset = column_offset
query = prompt("Search: %s (CTRL-c to cancel)", search_callback)
if not query:
# restore
cx = tcx
cy = tcy
row_offset = t_row_offset
column_offset = t_column_offset
def search_callback(query, char):
global cx, cy, column_offset, row_offset, screen_cols, file_rows, last_match, direction
if char in ('\r', '\n', ctrl_key('c')):
last_match = (0, -1)
direction = 1
return
elif char in (EditorKeys.ARROW_RIGHT, EditorKeys.ARROW_DOWN):
direction = 1
elif char in (EditorKeys.ARROW_LEFT, EditorKeys.ARROW_UP):
direction = -1
else: # characters
last_match = (0, -1)
direction = 1
"""
last_match[0] gives us the row we need to search in for
last_match[1] gives us the starting point for the search
direction decides which part of the row is to be searched
"""
if last_match == (0, -1):
direction = 1
curr = last_match[0]
counter = 0
while True:
if counter == len(file_rows)-1:
break
if curr == -1:
curr = len(file_rows) - 1
elif curr == len(file_rows):
curr = 0
row = file_rows[curr]
off = 0
if direction == 1:
s = row[last_match[1]+1:]
idx = s.lower().find(query.lower())
off = last_match[1]+1
else:
s = row[0:last_match[1]]
idx = s.lower().rfind(query.lower())
if idx > 0:
last_match = (curr, idx+off)
cy = curr
cx = last_match[1]
# adjust offsets
if (cx - column_offset) > (screen_cols - 5):
column_offset = cx
row_offset = cy
break
else:
curr += direction
counter += 1
last_match = (last_match[0], -1)
""" editor """
def insert_char_at_row(row, at, c):
global file_rows, dirty
if at < 0 or at > len(file_rows[row]):
at = len(file_rows[row])
file_rows[row] = file_rows[row][0:at] + c + file_rows[row][at:]
dirty = True
def insert_char(c):
global cx, cy, file_rows
if cy == len(file_rows):
file_rows.append("")
insert_char_at_row(cy, cx, c)
cx += 1
def delete_char_at_row(row, at):
global file_rows, dirty
if at < 0 or at >= len(file_rows[row]):
return
file_rows[row] = file_rows[row][0:at] + file_rows[row][at+1:]
dirty = True
def delete_char():
global cx, cy, file_rows
if cy == len(file_rows):
return
if cy == 0 and cx == 0:
return
if cx > 0:
delete_char_at_row(cy, cx - 1)
cx -= 1
else:
cx = len(file_rows[cy-1])
file_rows[cy-1] += file_rows[cy]
delete_row(cy)
cy -= 1
def delete_row(at):
global file_rows, dirty
if at < 0 or at >= len(file_rows):
return
file_rows = file_rows[0:at] + file_rows[at+1:]
dirty = True
def insert_row(at, s):
global file_rows, dirty
if at < 0 or at > len(file_rows):
return
file_rows = file_rows[0:at] + [s] + file_rows[at:]
dirty = True
def insert_new_line():
global cx, cy, file_rows
if cx == 0:
insert_row(cy, "")
else:
insert_row(cy+1, file_rows[cy][cx:])
file_rows[cy] = file_rows[cy][0:cx]
cx = 0
cy += 1
""" cursor """
def move_cursor(c):
global cx, cy, screen_rows, screen_cols, row_offset, column_offset, file_rows
row = None if cy >= len(file_rows) else file_rows[cy]
if c == EditorKeys.ARROW_UP:
if cy != 0:
cy -= 1
elif c == EditorKeys.ARROW_DOWN:
if cy < len(file_rows) - 1:
cy += 1
elif c == EditorKeys.ARROW_LEFT:
if cx != 0:
cx -= 1
elif cy > 0:
cy -= 1
cx = len(file_rows[cy])
elif c == EditorKeys.ARROW_RIGHT:
if row and cx < len(file_rows[cy]):
cx += 1
elif row and cy < len(file_rows):
cy += 1
cx = 0
row = "" if cy >= len(file_rows) else file_rows[cy]
if cx > len(row):
cx = len(row)
def update_cursor():
global cx, cy, row_offset, column_offset
pprint("\x1b[%d;%dH" % (cy - row_offset + 1, cx - column_offset + 1))
""" file handling """
def load_file(filename):
global file_loaded, file_rows, file_name, file_type
try:
with open(filename, 'a+') as file:
file.seek(0, 0)
file_rows = convert_string_to_rows(file.read())
file_loaded = True
file_name = filename
file_type = file_name.split(".")[-1]
except IOError:
pexit("error opening %s: file doesn't exist or cannot be opened.\n" % filename)
reset_dirty()
def save_file():
global file_loaded, file_rows, file_name, file_type
if not file_name:
file_name = prompt("Save as: %s (CTRL-c to cancel)", None)
if not file_name:
set_status_message("Save aborted")
return
try:
with open(file_name, 'w+') as file:
text = convert_rows_to_string(file_rows)
file.write(text)
file_loaded = True
file_type = file_name.split(".")[-1]
set_status_message("%d bytes written to disk." % len(text))
except IOError as e:
set_status_message("Error writing to %s\n - %s" % (file_name, str(e)))
reset_dirty()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @FileName :functional.py
# @Author :Lowell
# @Time :2022/3/30 09:00
import copy
import operator
empty = object()
def new_method_proxy(func):
def inner(self, *args):
if (_wrapped := self._wrapped) is empty:
self._setup()
_wrapped = self._wrapped
return func(_wrapped, *args)
inner._mask_wrapped = False
return inner
class LazyObject:
"""
延迟对象实例化基类
通过子类可以修改实例化操作, 如果不需要, 就使用SimpleLazyObject
"""
# 避免__init__无限递归 (#19456)
_wrapped = None
def __init__(self):
# 如果子类重写了__init__方法
# 那么可能同样需要重写__copy__和__deepcopy__方法
self._wrapped = empty
def __getattribute__(self, name):
if name == "_wrapped":
# 避免获取wrapped对象的时候递归
return super().__getattribute__(name)
value = super().__getattribute__(name)
# 也有可能使用__getattr__
if not getattr(value, "_mask_wrapped", True):
raise AttributeError
return value
__getattr__ = new_method_proxy(getattr)
def __setattr__(self, name, value):
if name == "_wrapped":
# 调整到__dict__, 避免__setattr__循环
self.__dict__["_wrapped"] = value
else:
if self._wrapped is empty:
self._setup()
setattr(self._wrapped, name, value)
def __delattr__(self, name):
if name == "_wrapped":
raise TypeError("can`t delete _wrapped.")
if self._wrapped is empty:
self._setup()
delattr(self._wrapped, name)
def _setup(self):
"""
必须在子类中实现_setup方法, 以初始化wrapped对象
"""
raise NotImplementedError(
"subclasses of LazyObject must provide a _setup() method"
)
def __reduce__(self):
if self._wrapped is empty:
self._setup()
return (unpickle_lazyobject, (self._wrapped,))
def __copy__(self):
if self._wrapped is empty:
# 如果没有进行初始化, 就复制wrapper包装者. 使用type(self),
# 而不是self.__class__, 因为这是被代理的, 隔绝依赖
return type(self)()
else:
# 如果初始化了, 就返回被包装对象的复制版本
return copy.copy(self._wrapped)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# 必须使用type(self),
# 而不是self.__class__, 因为这是被代理的, 隔绝依赖
result = type(self)()
memo[id(self)] = result
return result
return copy.deepcopy(self._wrapped, memo)
__bytes__ = new_method_proxy(bytes)
__str__ = new_method_proxy(str)
__bool__ = new_method_proxy(bool)
__dir__ = new_method_proxy(dir)
__class__ = property(new_method_proxy(operator.attrgetter("__class__")))
__eq__ = new_method_proxy(operator.eq)
__lt__ = new_method_proxy(operator.lt)
__gt__ = new_method_proxy(operator.gt)
__ne__ = new_method_proxy(operator.ne)
__hash__ = new_method_proxy(hash)
# 支持列表/元组/字典
__getitem__ = new_method_proxy(operator.getitem)
__setitem__ = new_method_proxy(operator.setitem)
__delitem__ = new_method_proxy(operator.delitem)
__iter__ = new_method_proxy(iter)
__len__ = new_method_proxy(len)
__contains__ = new_method_proxy(operator.contains)
def unpickle_lazyobject(wrapped):
"""
用于反序列化懒加载对象, 只需要返回被包装的对象
"""
return wrapped |
import numpy as np
import mahotas as mh
from laocoon import equalization as eq
class GFP_Pipeline:
"""
A class that represent the pipeline for GFP analysis.
Attributes
----------
dapi_coords : list
Coordinates of the cell "centers" in the DAPI channel. Used as a reference.
checked : list
Keeps track of which cells have already been counted in other channels.
coords : list
Coordinates of all the cell "centers" in the GFP channel.
count : int
The number of cells counted in the image.
Methods
-------
analyze_gfp_hist_eps(file, dapi_coords, checked)
Calculates the number of counted cells and their coordinates with histogram
equalization and Gaussian filter preprocessing and epsilon quality control.
analyze_gfp_hist(file)
Calculates the number of counted cells and their coordinates with histogram
equalization and Gaussian filter preprocessing.
analyze_gfp_eps(file, dapi_coords, checked)
Calculates the number of counted cells and their coordinates with Gaussian
filter preprocessing and epsilon quality control.
analyze_gfp(file)
Calculates the number of counted cells and their coordinates with Gaussian
filter preprocessing.
epsilon(rfp_coords, dapi_coords, checked)
Helper function for implementing epsilon quality control.
"""
def __init__(self, file, checked, dapi_coords, hist=True, epsilon=True):
"""
Parameters
----------
file : str
The path to the image.
checked : list
Keeps track of which cells have already been counted in other channels.
dapi_coords : list
Coordinates of all the cell "centers" in the DAPI channel. Used as a reference.
hist : boolean, optional
Decides whether to perform histogram equalization preprocessing on the image
(default is True).
epsilon : boolean, optional
Decides whether to perform epsilon value quality control on the image
(default is True).
"""
self.dapi_coords = dapi_coords
self.checked = checked
if hist and epsilon:
self.coords, self.count, self.checked = self.analyze_gfp_hist_eps(file, dapi_coords, checked)
if hist and not epsilon:
self.count, self.coords = self.analyze_gfp_hist(file)
if not hist and epsilon:
self.coords, self.count, self.checked = self.analyze_gfp_eps(file, dapi_coords, checked)
else:
self.count, self.coords = self.analyze_gfp(file)
def analyze_gfp_hist_eps(self, file, dapi_coords, checked):
"""
Calculates the number of counted cells and their coordinates with histogram
equalization and Gaussian filter preprocessing and epsilon quality control.
Parameters
----------
file : str
The path to the image.
dapi_coords : list
Coordinates of all the cell "centers" in the DAPI channel. Used as a reference.
checked : list
Keeps track of which cells have already been counted in other channels.
Returns
-------
list
Coordinates of all the cell "centers" in the GFP channel.
int
The number of cells counted in the image.
list
Keeps track of which cells have already been counted in other channels.
"""
img = mh.imread(file)
imgg = mh.colors.rgb2gray(img)
imgg = eq.hist_eq(img)
imggf = mh.gaussian_filter(imgg,15.4).astype(np.uint8)
rmax = mh.regmax(imggf)
gfp_seeds, gfp_nuclei = mh.label(rmax)
gfp_coords = mh.center_of_mass(imgg,labels=gfp_seeds)
count, checked = self.epsilon(gfp_coords,dapi_coords,checked)
return gfp_coords, count, checked
def analyze_gfp_hist(self, file):
"""
Calculates the number of counted cells and their coordinates with histogram
equalization and Gaussian filter preprocessing.
Parameters
----------
file : str
The path to the image.
Returns
-------
int
The number of cells counted in the image.
list
Coordinates of all the cell "centers" in the GFP channel.
"""
img = mh.imread(file)
imgg = mh.colors.rgb2gray(img)
imgg = eq.hist_eq(img)
imggf = mh.gaussian_filter(imgg,15).astype(np.uint8)
rmax = mh.regmax(imggf)
gfp_seeds, gfp_nuclei = mh.label(rmax)
gfp_coords = mh.center_of_mass(imgg,labels=gfp_seeds)
return gfp_nuclei,gfp_coords
def analyze_gfp_eps(self, file, dapi_coords, checked):
"""
Calculates the number of counted cells and their coordinates with Gaussian
filter preprocessing and epsilon quality control.
Parameters
----------
file : str
The path to the image.
dapi_coords : list
Coordinates of all the cell "centers" in the DAPI channel. Used as a reference.
checked : list
Keeps track of which cells have already been counted in other channels.
Returns
-------
list
Coordinates of all the cell "centers" in the GFP channel.
int
The number of cells counted in the image.
list
Keeps track of which cells have already been counted in other channels.
"""
img = mh.imread(file)
imgg = mh.colors.rgb2gray(img)
imggf = mh.gaussian_filter(imgg,11.5).astype(np.uint8)
rmax = mh.regmax(imggf)
gfp_seeds, gfp_nuclei = mh.label(rmax)
gfp_coords = mh.center_of_mass(imgg,labels=gfp_seeds)
count, checked = self.epsilon(gfp_coords,dapi_coords,checked)
return gfp_coords, count, checked
def analyze_gfp(self, file):
"""
Calculates the number of counted cells and their coordinates with Gaussian
filter preprocessing.
Parameters
----------
file : str
The path to the image.
Returns
-------
int
The number of cells counted in the image.
list
Coordinates of all the cell "centers" in the GFP channel.
"""
img = mh.imread(file)
imgg = mh.colors.rgb2grey(img)
imggf = mh.gaussian_filter(imgg,11.5).astype(np.uint8)
rmax = mh.regmax(imggf)
gfp_seeds, gfp_nuclei = mh.label(rmax)
gfp_coords = mh.center_of_mass(imgg,labels=gfp_seeds)
return gfp_nuclei,gfp_coords
def epsilon(self, gfp_coords, dapi_coords, checked):
"""
Helper function for implementing epsilon quality control.
Parameters
----------
edu_coords : list
Coordinates of all the cell "centers" in the GFP channel.
dapi_coords : list
Coordinates of all the cell "centers" in the DAPI channel. Used as a reference.
checked : list
Keeps track of which cells have already been counted in other channels.
"""
gfp_count = 0
for i in range(len(gfp_coords)):
for j in range(len(dapi_coords)):
dist = (dapi_coords[j][0]-gfp_coords[i][0])*(dapi_coords[j][0]-gfp_coords[i][0])+(dapi_coords[j][1]-gfp_coords[i][1])*(dapi_coords[j][1]-gfp_coords[i][1])
if dist <= 510:
gfp_count += 1
checked[j] += 1
return gfp_count,checked
|
"""
Flask-PyMongo
-------------
MongoDB support for Flask applications.
Flask-PyMongo is pip-installable:
$ pip install Flask-PyMongo
Documentation for Flask-PyMongo is available on `ReadTheDocs
<http://flask-pymongo.readthedocs.io/en/latest/>`_.
Source code is hosted on `GitHub <https://github.com/dcrosta/flask-pymongo>`_.
Contributions are welcome!
"""
from setuptools import find_packages, setup
setup(
name="Flask-PyMongo",
url="http://flask-pymongo.readthedocs.org/",
download_url="https://github.com/dcrosta/flask-pymongo/tags",
license="BSD",
author="Dan Crosta",
author_email="dcrosta@late.am",
description="PyMongo support for Flask applications",
long_description=__doc__,
zip_safe=False,
platforms="any",
packages=find_packages(),
install_requires=[
"Flask>=0.12",
"PyMongo>=3.3",
"six",
],
classifiers=[
"Environment :: Web Environment",
"Framework :: Flask",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Software Development :: Libraries :: Python Modules"
],
setup_requires=["vcversioner"],
vcversioner={"version_module_paths": ["flask_pymongo/_version.py"]},
)
|
'''
python CricInfoHeadlinesScraper.py > headlines.txt
'''
from bs4 import BeautifulSoup
from urllib.request import urlopen
import re
urls = []
html = urlopen("http://www.espncricinfo.com/")
bsObj = BeautifulSoup(html.read())
for section in bsObj.find("section",{"class":"col-three"}):
for link in section.find_all('a',{"name":re.compile("&lpos=cricket:feed:16:coll:headlines:\\d")}):
urls.append('http://espncricinfo.com/'+link.attrs['href'])
#print(urls)
for url in urls:
detailsHtml=urlopen(url)
newbsObj=BeautifulSoup(detailsHtml.read())
print(newbsObj.find('header',{'class':'article-header'}).text)
print("\n")
for paragraph in newbsObj.find("section",{"id":"article-feed"}).find_all("article",{"class":"article"})[0].find_all('p'):
print(paragraph.text)
print('-----------------------------------------------------------------\n\n')
|
import os, discord, asyncio, datetime, dateutil.parser, requests, re, json, youtube_dl, enum, io
from discord.ext import commands
from dateutil import tz
settings = {
"AUTH_TOKEN": os.environ["DISCORD_TOKEN"],
"server": int(os.environ["MAIN_SERVER"]),
"assistant_category": int(os.environ["CHANNELS_CATEGORY"]),
"admins": list(map(int, os.environ["ADMINS"].split(" "))),
"requests": int(os.environ["REQUESTS_ID"]),
"tc_general": 455719906244034561,
"vc_general": 455719906244034563,
"tc_music": 456588364775161857,
"vc_music": 456588393401548821,
"danbooru_key": os.environ["DANBOORU_API_KEY"],
"danbooru_user": os.environ["DANBOORU_USERNAME"]
}
session = {
"force_refresh": False,
"slept": 0
}
assistant = commands.Bot(command_prefix=commands.when_mentioned_or("/"), description="I'm not your assistant!")
async def check_updates():
await assistant.wait_until_ready()
while not assistant.is_closed():
print("Rediscovering channels")
category = assistant.get_channel(settings["assistant_category"])
channels = []
for c in category.channels:
if c.id != settings["requests"]:
channels.append(c)
print(f"Got {len(channels)} different channels")
for channel in channels:
tag = channel.topic.strip()
print(f" Checking tag {tag}")
history = await channel.history(limit=1).flatten()
prev = 1
if len(history) == 1:
try:
prev = re.search(r"https:\/\/danbooru.donmai.us\/posts\/(\d+)", history[0].content).group(1)
except IndexError:
print (f" IndexError when parsing regex for string \"{history[0].embeds[0].url}\"")
continue
print(" Got previous post id: {}".format(prev))
try:
count = json.loads(requests.get(
f"https://danbooru.donmai.us/counts/posts.json?tags={tag}+id:>{prev}").text)["counts"]["posts"]
except json.decoder.JSONDecodeError:
print(" JSONDecodeError: Danbooru might be down")
break
print(f" Discovered {count} new posts")
if count == 0:
print(" Continuing...")
continue
posts = []
baseurl = "https://danbooru.donmai.us/posts.json"
# https://stackoverflow.com/a/17511341
pagecount = -(-count // 200)
for p in range(pagecount):
data = f"tags={tag}+id:>{prev}&page={p + 1}&limit=200"
url = f"{baseurl}?{data}&login={settings['danbooru_user']}&api_key={settings['danbooru_key']}"
posts.extend(json.loads(requests.get(url).text))
ids = []
for p in posts:
try:
ids.append(p["id"])
except:
print(f" Could not retrieve id of: {p}")
continue
ids = sorted(list(set(ids)), key=int)
print(f" Got {len(ids)} new posts, outputting...")
index = 0
for i in ids:
if index % 50 == 0:
print(f" {index} out of {len(ids)}")
index += 1
await channel.send(f"`[{datetime.datetime.utcnow().replace(tzinfo=tz.tzutc())}]`\n https://danbooru.donmai.us/posts/{i}")
print(f" {len(ids)} out of {len(ids)}")
session["slept"] = 0
while session["slept"] < 3600:
if session["slept"] % 60 == 0:
print(f"Sleeping for {3600 - session['slept']} more seconds")
if session["force_refresh"]:
session["force_refresh"] = False
break
session["slept"] += 15
await asyncio.sleep(15)
@assistant.event
async def on_ready():
print("--------------")
print(assistant.user)
print("--------------")
@assistant.command()
async def add(ctx, tag: str = ""):
if ctx.message.channel.id == settings["requests"]:
if ctx.message.author.id not in settings["admins"]:
print("Denying add request for {}".format(ctx.message.author.id))
await ctx.send("Insufficient permissions")
else:
if not tag:
await ctx.send("No tag present")
else:
print("Attempting to add tag {}".format(tag))
count = -1
try:
count = json.loads(requests.get("https://danbooru.donmai.us/counts/posts.json?tags={}".format(tag)).text)["counts"]["posts"]
except json.decoder.JSONDecodeError:
print(" JSONDecodeError: Danbooru might be down")
await ctx.send("JSONDecodeError: Danbooru might be down")
if count <= 0:
await ctx.send("No posts under tag")
print(" Empty tag \"{}\"".format(tag))
else:
print(" Creating text channel for \"{}\"".format(tag))
server = assistant.get_guild(settings["server"])
sanitise = ["'", "(", ")", "\\", "/", ":"]
name = tag
for char in sanitise:
name = name.replace(char, "")
category = assistant.get_channel(settings["assistant_category"])
channels = []
for c in category.channels:
if c.id != settings["requests"]:
channels.append(c.name)
if name in channels:
print(" Channel \"{}\" already exists".format(name))
await ctx.send("Channel already exists!")
return
new_channel = await server.create_text_channel(name, category=category)
channels.append(name)
channels.sort()
index = channels.index(name) + 1
print(" Index: {}".format(index))
try:
await new_channel.edit(topic=tag)
except discord.errors.Forbidden:
print(" Permission error setting topic for \"{}\"".format(name))
try:
await new_channel.edit(nsfw=True)
except discord.errors.Forbidden:
print(" Permission error setting nsfw for \"{}\"".format(name))
try:
await new_channel.edit(position=index)
except discord.errors.Forbidden:
print(" Permission error setting index for \"{}\"".format(name))
print(" Created text channel \"{}\" at index {}".format(name, index))
await ctx.send("Channel \"{}\" created".format(name))
@assistant.command()
async def force_refresh(ctx):
if ctx.message.channel.id == settings["requests"]:
if ctx.message.author.id not in settings["admins"]:
print("Denying force_refresh request for {}".format(ctx.message.author.id))
await ctx.send("Insufficient permissions")
else:
print("Forcing refresh with {} seconds left".format(3600 - session["slept"]))
session["force_refresh"] = True
await ctx.send("Refreshing as soon as possible...")
@assistant.command()
async def poll(ctx):
if ctx.message.channel.id == settings["requests"]:
await ctx.send("Time to next update: {} seconds".format(3600 - session["slept"]))
@assistant.command()
async def tsun(ctx, msg: str = ""):
if ctx.message.channel.category_id != settings["assistant_category"]:
await ctx.send("Message: " + msg)
@assistant.command()
async def nullpo(ctx):
if ctx.message.channel.category_id != settings["assistant_category"]:
await ctx.send("Gah!")
@assistant.command(aliases=["Christina", "Kurisutina", "kurisutina"])
async def christina(ctx):
if ctx.message.channel.category_id != settings["assistant_category"]:
await ctx.send("There's no -tina!")
assistant.loop.create_task(check_updates())
assistant.run(settings["AUTH_TOKEN"])
|
import numpy as np
import pytest
import anomaly_delays.main_functions
@pytest.mark.parametrize(
"share_type, expected_values",
(
pytest.param(
"Fixed",
[
[1.00000005e-07, 2.23143551e-01],
[6.93147181e-01, 1.00000005e-07],
[1.20397280e00, 5.10825624e-01],
],
id="Fixed share",
),
pytest.param(
"Variable",
[[0.0, 0.04], [0.25, 0.0], [0.49, 0.16]],
id="Variable share",
),
),
)
def test_calculate_loss(share_type, expected_values):
target = np.array([[0], [1], [1]])
score = np.array([[0, 0.2], [0.5, 1], [0.3, 0.6]])
result = anomaly_delays.main_functions.calculate_loss(
target, score, share_type=share_type
)
expected = np.array(expected_values)
np.testing.assert_array_almost_equal(result, expected)
@pytest.mark.parametrize(
"share_type, expected_values",
(
pytest.param("Fixed", 0.58, id="Fixed share"),
pytest.param("Variable", 0.5667982661056511, id="Variable share"),
),
)
def test_calculate_score(share_type, expected_values):
score_experts = np.array([0.1, 0.9])
weights_norm = np.array([0.4, 0.6])
result = anomaly_delays.main_functions.calculate_score(
score_experts, weights_norm, share_type=share_type
)
expected = expected_values
np.testing.assert_almost_equal(result, expected)
@pytest.mark.parametrize(
"share_type, alpha, delay_current, expected_values",
(
pytest.param(
"Fixed", 0.1, 5, [0.52397018, 0.47602982], id="Fixed share"
),
pytest.param(
"Variable", 0.05, 2, [0.6922423, 0.3077577], id="Variable share"
),
),
)
def test_calculate_weights(share_type, alpha, delay_current, expected_values):
weights_norm = np.array([0.49, 0.51])
loss_experts = np.array([0.1, 0.9])
result = anomaly_delays.main_functions.calculate_weights(
weights_norm,
loss_experts,
share_type=share_type,
alpha=alpha,
delay_current=delay_current,
)
expected = expected_values
np.testing.assert_array_almost_equal(result, expected)
@pytest.mark.parametrize(
"share_type, alpha, delays, expected_score_share,"
"expected_loss_share, expected_loss_experts, expected_weights_experts",
(
pytest.param(
"Fixed",
0.1,
1,
[0.5, 0.61],
[0.69314718, 0.49429632],
[[0.10536052, 2.30258509], [0.35667494, 1.60943791]],
[[0.5, 0.5], [0.82, 0.18]],
id="Fixed share",
),
pytest.param(
"Variable",
0.3,
1,
[0.5, 0.63447463],
[0.25, 0.13360879],
[[0.01, 0.81], [0.09, 0.64]],
[[0.5, 0.5], [0.87120567, 0.12879433]],
id="Variable share",
),
),
)
def test_share_delays(
share_type,
alpha,
delays,
expected_score_share,
expected_loss_share,
expected_loss_experts,
expected_weights_experts,
):
target = np.array([[0], [1]])
score_experts = np.array([[0.1, 0.9], [0.7, 0.2]])
(
result_score_share,
result_loss_share,
result_loss_experts,
result_weights_experts,
) = anomaly_delays.main_functions.share_delays(
target,
score_experts,
share_type=share_type,
alpha=alpha,
delays=delays,
)
np.testing.assert_array_almost_equal(
expected_score_share, result_score_share
)
np.testing.assert_array_almost_equal(
expected_loss_share, result_loss_share
)
np.testing.assert_array_almost_equal(
expected_loss_experts, result_loss_experts
)
np.testing.assert_array_almost_equal(
expected_weights_experts, result_weights_experts
)
|
#!/usr/bin/env python
"""
gen_python.py
Generate Python code from and ASDL schema.
TODO:
- What about Id? app_types?
"""
import sys
from asdl import asdl_ as asdl
from asdl import visitor
class GenClassesVisitor(visitor.AsdlVisitor):
def VisitSimpleSum(self, sum, name, depth):
self.Emit('class %s_e(py_meta.SimpleObj):' % name, depth)
self.Emit(' ASDL_TYPE = TYPE_LOOKUP.ByTypeName(%r)' % name, depth)
self.Emit('', depth)
# Just use #define, since enums aren't namespaced.
for i, variant in enumerate(sum.types):
attr = '%s_e.%s = %s_e(%d, %r)' % (
name, variant.name, name, i + 1, variant.name)
self.Emit(attr, depth)
self.Emit('', depth)
def _GenClass(self, desc, name, super_name, depth, tag_num=None):
self.Emit('class %s(%s):' % (name, super_name), depth)
if tag_num is not None:
self.Emit(' tag = %d' % tag_num, depth)
field_names = [f.name for f in desc.fields]
quoted_fields = repr(tuple(field_names))
# NOTE: FIELDS is a duplicate of __slots__, used for pretty printing and
# oheap serialization. TODO: measure the effect of __slots__, and then get
# rid of FIELDS? Or you can just make it an alias.
# FIELDS = self.__slots__.
self.Emit(' ASDL_TYPE = TYPE_LOOKUP.ByTypeName(%r)' % name, depth)
self.Emit(' __slots__ = %s' % quoted_fields, depth)
self.Emit('', depth)
# TODO: leave out spids? Mark it as an attribute?
args = ', '.join('%s=None' % f.name for f in desc.fields)
self.Emit(' def __init__(self, %s):' % args, depth)
for f in desc.fields:
# This logic is like _MakeFieldDescriptors
default = None
if f.opt: # Maybe
if f.type == 'int':
default = 'const.NO_INTEGER'
elif f.type == 'string':
default = "''"
else:
default = 'None'
elif f.seq: # Array
default = '[]'
# PROBLEM: Optional ints can't be zero!
# self.span_id = span_id or const.NO_INTEGER
# I don't want to add if statements checking against None?
# For now don't use optional ints. We don't need it.
default_str = (' or %s' % default) if default else ''
self.Emit(' self.%s = %s%s' % (f.name, f.name, default_str), depth)
self.Emit('', depth)
def VisitConstructor(self, cons, def_name, tag_num, depth):
if cons.fields:
self._GenClass(cons, cons.name, def_name, depth, tag_num=tag_num)
else:
self.Emit("class %s(%s):" % (cons.name, def_name), depth)
self.Emit(' ASDL_TYPE = TYPE_LOOKUP.ByTypeName(%r)' % cons.name, depth)
self.Emit(' tag = %d' % tag_num, depth)
self.Emit('', depth)
def VisitCompoundSum(self, sum, name, depth):
# define command_e
self.Emit('class %s_e(object):' % name, depth)
for i, variant in enumerate(sum.types):
self.Emit(' %s = %d' % (variant.name, i + 1), depth)
self.Emit('', depth)
self.Emit('class %s(py_meta.CompoundObj):' % name, depth)
self.Emit(' ASDL_TYPE = TYPE_LOOKUP.ByTypeName(%r)' % name, depth)
self.Emit('', depth)
# define command_t, and then make subclasses
super_name = '%s' % name
for i, t in enumerate(sum.types):
tag_num = i + 1
self.VisitConstructor(t, super_name, tag_num, depth)
def VisitProduct(self, product, name, depth):
self._GenClass(product, name, 'py_meta.CompoundObj', depth)
def EmitFooter(self):
pass
def main(argv):
schema_path = argv[1]
type_lookup_import = argv[2]
with open(schema_path) as input_f:
module = asdl.parse(input_f)
f = sys.stdout
f.write("""\
from asdl import const # For const.NO_INTEGER
from asdl import py_meta
%s
""" % type_lookup_import)
v = GenClassesVisitor(f)
v.VisitModule(module)
if __name__ == '__main__':
try:
main(sys.argv)
except RuntimeError as e:
print >>sys.stderr, 'FATAL: %s' % e
sys.exit(1)
|
# puzzle_definitions.py
import math
from puzzle_generator import PuzzleDefinitionBase
from math3d_triangle_mesh import TriangleMesh, Polyhedron
from math3d_triangle import Triangle
from math3d_vector import Vector
from math3d_transform import AffineTransform, LinearTransform
from math3d_sphere import Sphere
from math3d_cylinder import Cylinder
from math3d_point_cloud import PointCloud
from puzzle_generator import GeneratorMesh, ColoredMesh
class RubiksCube(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_generator_mesh_list(self):
l_cut_disk = TriangleMesh.make_disk(Vector(-1.0 / 3.0, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4)
r_cut_disk = TriangleMesh.make_disk(Vector(1.0 / 3.0, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4)
d_cut_disk = TriangleMesh.make_disk(Vector(0.0, -1.0 / 3.0, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4)
u_cut_disk = TriangleMesh.make_disk(Vector(0.0, 1.0 / 3.0, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4)
b_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, -1.0 / 3.0), Vector(0.0, 0.0, 1.0), 4.0, 4)
f_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 1.0 / 3.0), Vector(0.0, 0.0, -1.0), 4.0, 4)
l_cut_disk = GeneratorMesh(mesh=l_cut_disk, axis=Vector(-1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(-1.0, 0.0, 0.0))
r_cut_disk = GeneratorMesh(mesh=r_cut_disk, axis=Vector(1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(1.0, 0.0, 0.0))
d_cut_disk = GeneratorMesh(mesh=d_cut_disk, axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, -1.0, 0.0))
u_cut_disk = GeneratorMesh(mesh=u_cut_disk, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 1.0, 0.0))
b_cut_disk = GeneratorMesh(mesh=b_cut_disk, axis=Vector(0.0, 0.0, -1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, -1.0))
f_cut_disk = GeneratorMesh(mesh=f_cut_disk, axis=Vector(0.0, 0.0, 1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, 1.0))
return [l_cut_disk, r_cut_disk, d_cut_disk, u_cut_disk, b_cut_disk, f_cut_disk]
class FisherCube(RubiksCube):
def __init__(self):
super().__init__()
def make_generator_mesh_list(self):
mesh_list = super().make_generator_mesh_list()
transform = LinearTransform().make_rotation(Vector(0.0, 1.0, 0.), math.pi / 4.0)
mesh_list = transform(mesh_list)
for mesh in mesh_list:
mesh.axis = transform(mesh.axis)
mesh.pick_point = transform(mesh.pick_point)
return mesh_list
class FusedCube(RubiksCube):
def __init__(self):
super().__init__()
def make_generator_mesh_list(self):
mesh_list = super().make_generator_mesh_list()
del mesh_list[4]
del mesh_list[2]
del mesh_list[0]
return mesh_list
def transform_meshes_for_more_cutting(self, mesh_list, generator_mesh_list, cut_pass):
r_cut_disk = generator_mesh_list[0]
u_cut_disk = generator_mesh_list[1]
f_cut_disk = generator_mesh_list[2]
if cut_pass < 3:
self.apply_generator(mesh_list, r_cut_disk)
elif cut_pass == 3:
self.apply_generator(mesh_list, r_cut_disk)
self.apply_generator(mesh_list, u_cut_disk)
elif 3 < cut_pass < 6:
self.apply_generator(mesh_list, u_cut_disk)
elif cut_pass == 6:
self.apply_generator(mesh_list, u_cut_disk)
self.apply_generator(mesh_list, f_cut_disk)
elif 6 < cut_pass < 9:
self.apply_generator(mesh_list, f_cut_disk)
elif cut_pass == 9:
self.apply_generator(mesh_list, f_cut_disk)
return False
return True
class CopterBase(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def annotate_puzzle_data(self, puzzle_data):
axis_list = [
Vector(-1.0, 0.0, 0.0),
Vector(1.0, 0.0, 0.0),
Vector(0.0, -1.0, 0.0),
Vector(0.0, 1.0, 0.0),
Vector(0.0, 0.0, -1.0),
Vector(0.0, 0.0, 1.0)
]
generator_mesh_list = puzzle_data['generator_mesh_list']
count = 12 #len(generator_mesh_list)
for i in range(count):
mesh_data = generator_mesh_list[i]
generator_axis = Vector().from_dict(mesh_data['axis'])
adjacent_axis_list = []
for axis in axis_list:
if math.fabs(axis.angle_between(generator_axis) - math.pi / 4.0) < 1e-5:
adjacent_axis_list.append(axis)
assert(len(adjacent_axis_list) == 2)
special_case_data = {}
gen_axis_a = generator_axis.rotated(adjacent_axis_list[0], math.pi / 2.0)
gen_axis_b = generator_axis.rotated(adjacent_axis_list[1], math.pi / 2.0)
special_case_data['special_move_a'] = {
'generator_mesh_a': self._find_axis(generator_mesh_list, gen_axis_a),
'generator_mesh_b': self._find_axis(generator_mesh_list, gen_axis_b)
}
gen_axis_a = generator_axis.rotated(adjacent_axis_list[0], -math.pi / 2.0)
gen_axis_b = generator_axis.rotated(adjacent_axis_list[1], -math.pi / 2.0)
special_case_data['special_move_b'] = {
'generator_mesh_a': self._find_axis(generator_mesh_list, gen_axis_a),
'generator_mesh_b': self._find_axis(generator_mesh_list, gen_axis_b)
}
mesh_data['special_case_data'] = special_case_data
def _find_axis(self, generator_mesh_list, axis, eps=1e-7):
for i, mesh_data in enumerate(generator_mesh_list):
if (Vector().from_dict(mesh_data['axis']) - axis).length() < eps:
return i
class CurvyCopter(CopterBase):
def __init__(self):
super().__init__()
def calc_sphere_radius(self):
return (Vector(math.sqrt(2.0), math.sqrt(2.0), 0.0) - Vector(0.0, 1.0, 0.0)).length()
def make_generator_mesh_list(self):
radius = self.calc_sphere_radius()
sphere_list = [
Sphere(Vector(-math.sqrt(2.0), -math.sqrt(2.0), 0.0), radius),
Sphere(Vector(math.sqrt(2.0), -math.sqrt(2.0), 0.0), radius),
Sphere(Vector(-math.sqrt(2.0), math.sqrt(2.0), 0.0), radius),
Sphere(Vector(math.sqrt(2.0), math.sqrt(2.0), 0.0), radius),
Sphere(Vector(-math.sqrt(2.0), 0.0, -math.sqrt(2.0)), radius),
Sphere(Vector(math.sqrt(2.0), 0.0, -math.sqrt(2.0)), radius),
Sphere(Vector(-math.sqrt(2.0), 0.0, math.sqrt(2.0)), radius),
Sphere(Vector(math.sqrt(2.0), 0.0, math.sqrt(2.0)), radius),
Sphere(Vector(0.0, -math.sqrt(2.0), -math.sqrt(2.0)), radius),
Sphere(Vector(0.0, math.sqrt(2.0), -math.sqrt(2.0)), radius),
Sphere(Vector(0.0, -math.sqrt(2.0), math.sqrt(2.0)), radius),
Sphere(Vector(0.0, math.sqrt(2.0), math.sqrt(2.0)), radius)
]
mesh_list = []
for sphere in sphere_list:
mesh = GeneratorMesh(mesh=sphere.make_mesh(subdivision_level=2), axis=sphere.center.normalized(), angle=math.pi, pick_point=sphere.center.resized(math.sqrt(2.0)))
mesh_list.append(mesh)
return mesh_list
class CurvyCopterPlus(CurvyCopter):
def __init__(self):
super().__init__()
def calc_sphere_radius(self):
return (Vector(math.sqrt(2.0), math.sqrt(2.0), 0.0) - Vector(-0.2, 1.0, 0.0)).length()
class HelicopterCube(CopterBase):
def __init__(self):
super().__init__()
def make_generator_mesh_list(self):
point_list = [point for point in Vector(0.5, 0.5, 0.0).sign_permute(flip_z=False)]
point_list += [point for point in Vector(0.5, 0.0, 0.5).sign_permute(flip_y=False)]
point_list += [point for point in Vector(0.0, 0.5, 0.5).sign_permute(flip_x=False)]
mesh_list = []
for point in point_list:
normal = point.normalized()
disk = TriangleMesh.make_disk(point, -normal, 4.0, 4)
mesh = GeneratorMesh(mesh=disk, axis=normal, angle=math.pi, pick_point=point.resized(math.sqrt(2.0)))
mesh_list.append(mesh)
return mesh_list
class FlowerCopter(CurvyCopter):
def __init__(self):
super().__init__()
def make_generator_mesh_list(self):
mesh_list = super().make_generator_mesh_list()
radius = math.sqrt(2.0)
point_list = [point for point in Vector(1.0, 1.0, 1.0).sign_permute()]
for point in point_list:
sphere = Sphere(point, radius)
mesh = GeneratorMesh(mesh=sphere.make_mesh(subdivision_level=2), axis=point.normalized(), angle=2.0 * math.pi / 3.0, pick_point=point)
mesh_list.append(mesh)
return mesh_list
class Megaminx(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_initial_mesh_list(self):
mesh = TriangleMesh().make_polyhedron(Polyhedron.DODECAHEDRON)
face_mesh_list, self.plane_list = self.make_face_meshes(mesh)
return face_mesh_list
def make_generator_mesh_list(self):
mesh_list = []
for plane in self.plane_list:
disk = TriangleMesh.make_disk(plane.center.scaled(0.7), -plane.unit_normal, 4.0, 4)
mesh = GeneratorMesh(mesh=disk, axis=plane.unit_normal, angle=2.0 * math.pi / 5.0, pick_point=plane.center)
mesh_list.append(mesh)
return mesh_list
class DinoCube(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_generator_mesh_list(self):
length = ((Vector(1.0, -1.0, 1.0) + Vector(1.0, 1.0, -1.0) + Vector(-1.0, 1.0, 1.0)) / 3.0).length()
mesh_list = []
for vector in Vector(1.0, 1.0, 1.0).sign_permute():
mesh = GeneratorMesh(mesh=TriangleMesh.make_disk(vector.resized(length), -vector.normalized(), 4.0, 4), axis=vector.normalized(), angle=2.0 * math.pi / 3.0, pick_point=vector)
mesh_list.append(mesh)
return mesh_list
class FlowerRexCube(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_generator_mesh_list(self):
mesh_list = []
length = 3.0
radius = (Vector(1.0, 1.0, 1.0).resized(length) - Vector(-1.0, 1.0, 1.0)).length()
for vector in Vector(1.0, 1.0, 1.0).sign_permute():
mesh = GeneratorMesh(mesh=Sphere(vector.resized(length), radius).make_mesh(subdivision_level=2), axis=vector.normalized(), angle=2.0 * math.pi / 3.0, pick_point=vector)
mesh_list.append(mesh)
return mesh_list
def min_mesh_area(self):
return 0.05
class Skewb(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_generator_mesh_list(self):
mesh_list = []
for vector in Vector(1.0, 1.0, 1.0).sign_permute():
mesh = GeneratorMesh(mesh=TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), -vector.normalized(), 4.0, 4), axis=vector.normalized(), angle=2.0 * math.pi / 3.0, pick_point=vector)
mesh_list.append(mesh)
return mesh_list
class SquareOne(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def bandages(self):
return True
def make_generator_mesh_list(self):
mesh_list = []
angle = math.pi + math.pi / 12.0
normal = Vector(math.cos(angle), 0.0, math.sin(angle))
mesh = GeneratorMesh(mesh=TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), normal, 4.0, 4), axis=-normal, angle=math.pi, pick_point=normal.resized(-2.0))
mesh_list.append(mesh)
mesh = GeneratorMesh(mesh=TriangleMesh.make_disk(Vector(0.0, 0.2, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4), axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 6.0, pick_point=Vector(0.0, 1.0, 0.0))
mesh_list.append(mesh)
mesh = GeneratorMesh(mesh=TriangleMesh.make_disk(Vector(0.0, -0.2, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4), axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 6.0, pick_point=Vector(0.0, -1.0, 0.0))
mesh_list.append(mesh)
return mesh_list
def transform_meshes_for_more_cutting(self, mesh_list, generator_mesh_list, cut_pass):
u_cut_disk = generator_mesh_list[1]
d_cut_disk = generator_mesh_list[2]
if cut_pass == 0:
self.apply_generator(mesh_list, u_cut_disk)
self.apply_generator(mesh_list, d_cut_disk, inverse=True)
return True
elif cut_pass == 1:
self.apply_generator(mesh_list, u_cut_disk)
self.apply_generator(mesh_list, d_cut_disk, inverse=True)
self.apply_generator(mesh_list, u_cut_disk)
self.apply_generator(mesh_list, d_cut_disk, inverse=True)
return True
elif cut_pass == 2:
self.apply_generator(mesh_list, u_cut_disk)
self.apply_generator(mesh_list, d_cut_disk, inverse=True)
return True
for i in range(8):
self.apply_generator(mesh_list, u_cut_disk)
self.apply_generator(mesh_list, d_cut_disk, inverse=True)
return False
class Bagua(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def bandages(self):
return True
def make_generator_mesh_list(self):
l_cut_disk = TriangleMesh.make_disk(Vector(-1.0 / 2.0, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4)
r_cut_disk = TriangleMesh.make_disk(Vector(1.0 / 2.0, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4)
d_cut_disk = TriangleMesh.make_disk(Vector(0.0, -1.0 / 2.0, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4)
u_cut_disk = TriangleMesh.make_disk(Vector(0.0, 1.0 / 2.0, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4)
b_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, -1.0 / 2.0), Vector(0.0, 0.0, 1.0), 4.0, 4)
f_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 1.0 / 2.0), Vector(0.0, 0.0, -1.0), 4.0, 4)
l_cut_disk = GeneratorMesh(mesh=l_cut_disk, axis=Vector(-1.0, 0.0, 0.0), angle=math.pi / 4.0, pick_point=Vector(-1.0, 0.0, 0.0))
r_cut_disk = GeneratorMesh(mesh=r_cut_disk, axis=Vector(1.0, 0.0, 0.0), angle=math.pi / 4.0, pick_point=Vector(1.0, 0.0, 0.0))
d_cut_disk = GeneratorMesh(mesh=d_cut_disk, axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 4.0, pick_point=Vector(0.0, -1.0, 0.0))
u_cut_disk = GeneratorMesh(mesh=u_cut_disk, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 4.0, pick_point=Vector(0.0, 1.0, 0.0))
b_cut_disk = GeneratorMesh(mesh=b_cut_disk, axis=Vector(0.0, 0.0, -1.0), angle=math.pi / 4.0, pick_point=Vector(0.0, 0.0, -1.0))
f_cut_disk = GeneratorMesh(mesh=f_cut_disk, axis=Vector(0.0, 0.0, 1.0), angle=math.pi / 4.0, pick_point=Vector(0.0, 0.0, 1.0))
return [l_cut_disk, r_cut_disk, d_cut_disk, u_cut_disk, b_cut_disk, f_cut_disk]
def transform_meshes_for_more_cutting(self, mesh_list, generator_mesh_list, cut_pass):
l_cut_disk = generator_mesh_list[0]
r_cut_disk = generator_mesh_list[1]
d_cut_disk = generator_mesh_list[2]
u_cut_disk = generator_mesh_list[3]
b_cut_disk = generator_mesh_list[4]
f_cut_disk = generator_mesh_list[5]
if cut_pass == 0:
self.apply_generator(mesh_list, l_cut_disk)
self.apply_generator(mesh_list, r_cut_disk, inverse=True)
return True
elif cut_pass == 1:
self.apply_generator(mesh_list, l_cut_disk, inverse=True)
self.apply_generator(mesh_list, r_cut_disk)
self.apply_generator(mesh_list, u_cut_disk)
self.apply_generator(mesh_list, d_cut_disk, inverse=True)
return True
elif cut_pass == 2:
self.apply_generator(mesh_list, u_cut_disk, inverse=True)
self.apply_generator(mesh_list, d_cut_disk)
self.apply_generator(mesh_list, b_cut_disk)
self.apply_generator(mesh_list, f_cut_disk, inverse=True)
return True
self.apply_generator(mesh_list, b_cut_disk, inverse=True)
self.apply_generator(mesh_list, f_cut_disk)
return False
class PentacleCube(RubiksCube):
def __init__(self):
super().__init__()
def bandages(self):
return True
def make_generator_mesh_list(self):
mesh_list = super().make_generator_mesh_list()
vector_list = [
Vector(-1.0, 0.0, 0.0),
Vector(1.0, 0.0, 0.0),
Vector(0.0, -1.0, 0.0),
Vector(0.0, 1.0, 0.0),
Vector(0.0, 0.0, -1.0),
Vector(0.0, 0.0, 1.0)
]
for vector in vector_list:
mesh = GeneratorMesh(mesh=Sphere(vector, 1.0).make_mesh(subdivision_level=2), axis=vector, angle=math.pi / 10.0, pick_point=vector.resized(1.5))
mesh_list.append(mesh)
return mesh_list
def can_apply_cutmesh_for_pass(self, i, cut_mesh, cut_pass, generator_mesh_list):
if cut_pass == 0 and i >= 6:
return True
if cut_pass == 1 and i == 0: # left
return True
if cut_pass == 2 and i == 3: # up
return True
if cut_pass == 3 and i == 1: # right
return True
if cut_pass == 4 and i == 2: # down
return True
if cut_pass == 5 and i == 0: # left
return True
if cut_pass == 6 and i == 5: # forward
return True
if cut_pass == 7 and i == 4: # back
return True
if cut_pass == 8 and i == 5: # forward
return True
return False
def transform_meshes_for_more_cutting(self, mesh_list, generator_mesh_list, cut_pass):
l_cut_circle = generator_mesh_list[6]
r_cut_circle = generator_mesh_list[7]
d_cut_circle = generator_mesh_list[8]
u_cut_circle = generator_mesh_list[9]
b_cut_circle = generator_mesh_list[10]
f_cut_circle = generator_mesh_list[11]
if cut_pass == 0:
pass
if cut_pass == 1 or cut_pass == 2 or cut_pass == 3 or cut_pass == 4:
self.apply_generator(mesh_list, f_cut_circle)
self.apply_generator(mesh_list, b_cut_circle, inverse=True)
self.apply_generator(mesh_list, u_cut_circle, inverse=True)
self.apply_generator(mesh_list, d_cut_circle)
self.apply_generator(mesh_list, l_cut_circle)
self.apply_generator(mesh_list, r_cut_circle, inverse=True)
if cut_pass == 5:
for i in range(6):
self.apply_generator(mesh_list, l_cut_circle)
self.apply_generator(mesh_list, r_cut_circle, inverse=True)
for i in range(3):
self.apply_generator(mesh_list, u_cut_circle)
self.apply_generator(mesh_list, d_cut_circle, inverse=True)
if cut_pass == 6 or cut_pass == 7:
for i in range(2):
self.apply_generator(mesh_list, l_cut_circle)
self.apply_generator(mesh_list, r_cut_circle, inverse=True)
for i in range(2):
self.apply_generator(mesh_list, u_cut_circle, inverse=True)
self.apply_generator(mesh_list, d_cut_circle)
return True if cut_pass < 8 else False
def min_mesh_area(self):
return 0.05
class MixupCube(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def can_apply_cutmesh_for_pass(self, i, cut_mesh, cut_pass, generator_mesh_list):
return True if i < 6 else False
def make_generator_mesh_list(self):
q = math.tan(math.pi / 8.0)
l_cut_disk = TriangleMesh.make_disk(Vector(-q, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4)
r_cut_disk = TriangleMesh.make_disk(Vector(q, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4)
d_cut_disk = TriangleMesh.make_disk(Vector(0.0, -q, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4)
u_cut_disk = TriangleMesh.make_disk(Vector(0.0, q, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4)
b_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, -q), Vector(0.0, 0.0, 1.0), 4.0, 4)
f_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, q), Vector(0.0, 0.0, -1.0), 4.0, 4)
l_cut_disk = GeneratorMesh(mesh=l_cut_disk, axis=Vector(-1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(-1.0, 0.0, 0.0))
r_cut_disk = GeneratorMesh(mesh=r_cut_disk, axis=Vector(1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(1.0, 0.0, 0.0))
d_cut_disk = GeneratorMesh(mesh=d_cut_disk, axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, -1.0, 0.0))
u_cut_disk = GeneratorMesh(mesh=u_cut_disk, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 1.0, 0.0))
b_cut_disk = GeneratorMesh(mesh=b_cut_disk, axis=Vector(0.0, 0.0, -1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, -1.0))
f_cut_disk = GeneratorMesh(mesh=f_cut_disk, axis=Vector(0.0, 0.0, 1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, 1.0))
mesh_list = [l_cut_disk, r_cut_disk, d_cut_disk, u_cut_disk, b_cut_disk, f_cut_disk]
center_slice = TriangleMesh.make_disk(Vector(-q, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4) + TriangleMesh.make_disk(Vector(q, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4)
mesh_list.append(GeneratorMesh(mesh=center_slice, axis=Vector(1.0, 0.0, 0.0), angle=math.pi / 4.0, pick_point=Vector(1.5, 0.0, 0.0)))
mesh_list.append(GeneratorMesh(mesh=center_slice, axis=Vector(-1.0, 0.0, 0.0), angle=math.pi / 4.0, pick_point=Vector(-1.5, 0.0, 0.0)))
center_slice = TriangleMesh.make_disk(Vector(0.0, -q, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4) + TriangleMesh.make_disk(Vector(0.0, q, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4)
mesh_list.append(GeneratorMesh(mesh=center_slice, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 4.0, pick_point=Vector(0.0, 1.5, 0.0)))
mesh_list.append(GeneratorMesh(mesh=center_slice, axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 4.0, pick_point=Vector(0.0, -1.5, 0.0)))
center_slice = TriangleMesh.make_disk(Vector(0.0, 0.0, -q), Vector(0.0, 0.0, -1.0), 4.0, 4) + TriangleMesh.make_disk(Vector(0.0, 0.0, q), Vector(0.0, 0.0, 1.0), 4.0, 4)
mesh_list.append(GeneratorMesh(mesh=center_slice, axis=Vector(0.0, 0.0, 1.0), angle=math.pi / 4.0, pick_point=Vector(0.0, 0.0, 1.5)))
mesh_list.append(GeneratorMesh(mesh=center_slice, axis=Vector(0.0, 0.0, -1.0), angle=math.pi / 4.0, pick_point=Vector(0.0, 0.0, -1.5)))
return mesh_list
class Dogic(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_initial_mesh_list(self):
self.mesh = TriangleMesh().make_polyhedron(Polyhedron.ICOSAHEDRON)
face_mesh_list, plane_list = self.make_face_meshes(self.mesh.clone())
return face_mesh_list
def can_apply_cutmesh_for_pass(self, i, cut_mesh, cut_pass, generator_mesh_list):
return False if i % 2 == 0 else True
def make_generator_mesh_list(self):
mesh_list = []
for vertex in self.mesh.vertex_list:
point_cloud = PointCloud()
for triangle in self.mesh.yield_triangles():
for i in range(3):
if triangle[i] == vertex:
point_cloud.add_point(triangle[i + 1])
point_cloud.add_point(triangle[i + 2])
break
center = point_cloud.calc_center()
normal = vertex.normalized()
disk = TriangleMesh.make_disk(center, -normal, 4.0, 4)
mesh_list.append(GeneratorMesh(mesh=disk, axis=normal, angle=2.0 * math.pi / 5.0, pick_point=vertex))
disk = TriangleMesh.make_disk((center + vertex) / 2.0, -normal, 4.0, 4)
mesh_list.append(GeneratorMesh(mesh=disk, axis=normal, angle=2.0 * math.pi / 5.0, pick_point=vertex * 1.2))
return mesh_list
class Bubbloid4x4x5(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
self.a = 2.0 - math.sqrt(2.0)
self.b = 1.0 - self.a
self.s = (2.0 * self.a + 3.0 * self.b) / (2.0 * self.a + 2.0 * self.b)
def make_initial_mesh_list(self):
mesh_list = super().make_initial_mesh_list()
scale_transform = LinearTransform().make_non_uniform_scale(1.0, self.s, 1.0)
mesh_list = [scale_transform(mesh) for mesh in mesh_list]
return mesh_list
def make_generator_mesh_list(self):
scale_transform = LinearTransform().make_non_uniform_scale(1.0, self.s, 1.0)
radius = self.a + 2.0 * self.b
mesh_list = []
for vector in Vector(1.0, 1.0, 1.0).sign_permute():
center = scale_transform(vector)
mesh = GeneratorMesh(mesh=Sphere(center, radius).make_mesh(subdivision_level=2), axis=vector.normalized(), angle=2.0 * math.pi / 3.0, center=center, pick_point=center)
mesh_list.append(mesh)
return mesh_list
def find_generator_with_axis(self, generator_mesh_list, axis):
for mesh in generator_mesh_list:
if (mesh.axis - axis.normalized()).length() < 1e-6:
return mesh
def transform_meshes_for_more_cutting(self, mesh_list, generator_mesh_list, cut_pass):
if cut_pass == 0 or cut_pass == 1 or cut_pass == 2:
self.apply_generator(mesh_list, generator_mesh_list[0])
if cut_pass == 2 or cut_pass == 3 or cut_pass == 4:
self.apply_generator(mesh_list, generator_mesh_list[1])
if cut_pass == 4 or cut_pass == 5 or cut_pass == 6:
self.apply_generator(mesh_list, generator_mesh_list[2])
if cut_pass == 6 or cut_pass == 7 or cut_pass == 8:
self.apply_generator(mesh_list, generator_mesh_list[3])
if cut_pass == 8 or cut_pass == 9 or cut_pass == 10:
self.apply_generator(mesh_list, generator_mesh_list[4])
if cut_pass == 10 or cut_pass == 11 or cut_pass == 12:
self.apply_generator(mesh_list, generator_mesh_list[5])
if cut_pass == 12 or cut_pass == 13 or cut_pass == 14:
self.apply_generator(mesh_list, generator_mesh_list[6])
if cut_pass == 14 or cut_pass == 15 or cut_pass == 16:
self.apply_generator(mesh_list, generator_mesh_list[7])
if cut_pass == 16:
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(1.0, 1.0, 1.0)), inverse=True)
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(1.0, 1.0, -1.0)), inverse=True)
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(-1.0, 1.0, -1.0)), inverse=True)
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(-1.0, 1.0, 1.0)), inverse=True)
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(1.0, 1.0, 1.0)), inverse=True)
elif cut_pass == 17:
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(1.0, 1.0, 1.0)))
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(-1.0, 1.0, 1.0)))
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(-1.0, 1.0, -1.0)))
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(1.0, 1.0, -1.0)))
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(1.0, 1.0, 1.0)))
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(1.0, -1.0, 1.0)))
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(1.0, -1.0, -1.0)))
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(-1.0, -1.0, -1.0)))
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(-1.0, -1.0, 1.0)))
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(1.0, -1.0, 1.0)))
elif cut_pass == 18:
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(1.0, -1.0, 1.0)), inverse=True)
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(-1.0, -1.0, 1.0)), inverse=True)
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(-1.0, -1.0, -1.0)), inverse=True)
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(1.0, -1.0, -1.0)), inverse=True)
self.apply_generator(mesh_list, self.find_generator_with_axis(generator_mesh_list, Vector(1.0, -1.0, 1.0)), inverse=True)
return True if cut_pass < 18 else False
def can_apply_cutmesh_for_pass(self, i, cut_mesh, cut_pass, generator_mesh_list):
if cut_pass == 0:
return True
if 0 < cut_pass <= 16:
if cut_pass == 1 or cut_pass == 2:
vector = generator_mesh_list[0].axis.resized(math.sqrt(3.0))
elif cut_pass == 3 or cut_pass == 4:
vector = generator_mesh_list[1].axis.resized(math.sqrt(3.0))
elif cut_pass == 5 or cut_pass == 6:
vector = generator_mesh_list[2].axis.resized(math.sqrt(3.0))
elif cut_pass == 7 or cut_pass == 8:
vector = generator_mesh_list[3].axis.resized(math.sqrt(3.0))
elif cut_pass == 9 or cut_pass == 10:
vector = generator_mesh_list[4].axis.resized(math.sqrt(3.0))
elif cut_pass == 11 or cut_pass == 12:
vector = generator_mesh_list[5].axis.resized(math.sqrt(3.0))
elif cut_pass == 13 or cut_pass == 14:
vector = generator_mesh_list[6].axis.resized(math.sqrt(3.0))
elif cut_pass == 15 or cut_pass == 16:
vector = generator_mesh_list[7].axis.resized(math.sqrt(3.0))
distance = (vector - cut_mesh.axis.resized(math.sqrt(3.0))).length()
return True if math.fabs(distance - 2.0) < 1e-5 else False
if cut_pass == 17:
return True if math.fabs(cut_mesh.axis.resized(math.sqrt(3.0)).y + 1.0) < 1e-5 else False
if cut_pass == 18:
return True if math.fabs(cut_mesh.axis.resized(math.sqrt(3.0)).y - 1.0) < 1e-5 else False
return False
class Rubiks2x2(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_generator_mesh_list(self):
l_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4)
r_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4)
d_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4)
u_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4)
b_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(0.0, 0.0, 1.0), 4.0, 4)
f_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(0.0, 0.0, -1.0), 4.0, 4)
l_cut_disk = GeneratorMesh(mesh=l_cut_disk, axis=Vector(-1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(-1.0, 0.0, 0.0))
r_cut_disk = GeneratorMesh(mesh=r_cut_disk, axis=Vector(1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(1.0, 0.0, 0.0))
d_cut_disk = GeneratorMesh(mesh=d_cut_disk, axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, -1.0, 0.0))
u_cut_disk = GeneratorMesh(mesh=u_cut_disk, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 1.0, 0.0))
b_cut_disk = GeneratorMesh(mesh=b_cut_disk, axis=Vector(0.0, 0.0, -1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, -1.0))
f_cut_disk = GeneratorMesh(mesh=f_cut_disk, axis=Vector(0.0, 0.0, 1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, 1.0))
return [l_cut_disk, r_cut_disk, d_cut_disk, u_cut_disk, b_cut_disk, f_cut_disk]
class Rubiks4x4(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_generator_mesh_list(self):
l_cut_disk = TriangleMesh.make_disk(Vector(-1.0 / 2.0, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4)
r_cut_disk = TriangleMesh.make_disk(Vector(1.0 / 2.0, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4)
d_cut_disk = TriangleMesh.make_disk(Vector(0.0, -1.0 / 2.0, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4)
u_cut_disk = TriangleMesh.make_disk(Vector(0.0, 1.0 / 2.0, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4)
b_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, -1.0 / 2.0), Vector(0.0, 0.0, 1.0), 4.0, 4)
f_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 1.0 / 2.0), Vector(0.0, 0.0, -1.0), 4.0, 4)
l_cut_disk = GeneratorMesh(mesh=l_cut_disk, axis=Vector(-1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(-1.0, 0.0, 0.0))
r_cut_disk = GeneratorMesh(mesh=r_cut_disk, axis=Vector(1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(1.0, 0.0, 0.0))
d_cut_disk = GeneratorMesh(mesh=d_cut_disk, axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, -1.0, 0.0))
u_cut_disk = GeneratorMesh(mesh=u_cut_disk, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 1.0, 0.0))
b_cut_disk = GeneratorMesh(mesh=b_cut_disk, axis=Vector(0.0, 0.0, -1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, -1.0))
f_cut_disk = GeneratorMesh(mesh=f_cut_disk, axis=Vector(0.0, 0.0, 1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, 1.0))
mesh_list = [l_cut_disk, r_cut_disk, d_cut_disk, u_cut_disk, b_cut_disk, f_cut_disk]
l_cut_disk = TriangleMesh.make_disk(Vector(-1.0 / 2.0, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4) + TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4)
r_cut_disk = TriangleMesh.make_disk(Vector(1.0 / 2.0, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4) + TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4)
d_cut_disk = TriangleMesh.make_disk(Vector(0.0, -1.0 / 2.0, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4) + TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4)
u_cut_disk = TriangleMesh.make_disk(Vector(0.0, 1.0 / 2.0, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4) + TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4)
b_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, -1.0 / 2.0), Vector(0.0, 0.0, -1.0), 4.0, 4) + TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(0.0, 0.0, 1.0), 4.0, 4)
f_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 1.0 / 2.0), Vector(0.0, 0.0, 1.0), 4.0, 4) + TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(0.0, 0.0, -1.0), 4.0, 4)
l_cut_disk = GeneratorMesh(mesh=l_cut_disk, axis=Vector(-1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(-1.2, 0.0, 0.0))
r_cut_disk = GeneratorMesh(mesh=r_cut_disk, axis=Vector(1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(1.2, 0.0, 0.0))
d_cut_disk = GeneratorMesh(mesh=d_cut_disk, axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, -1.2, 0.0))
u_cut_disk = GeneratorMesh(mesh=u_cut_disk, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 1.2, 0.0))
b_cut_disk = GeneratorMesh(mesh=b_cut_disk, axis=Vector(0.0, 0.0, -1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, -1.2))
f_cut_disk = GeneratorMesh(mesh=f_cut_disk, axis=Vector(0.0, 0.0, 1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, 1.2))
mesh_list += [l_cut_disk, r_cut_disk, d_cut_disk, u_cut_disk, b_cut_disk, f_cut_disk]
return mesh_list
class Pyraminx(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_initial_mesh_list(self):
self.mesh = TriangleMesh().make_polyhedron(Polyhedron.TETRAHEDRON)
self.mesh = LinearTransform().make_uniform_scale(1.5)(self.mesh)
triangle = self.mesh.make_triangle(self.mesh.find_triangle((0, 1, 2), True, True))
plane = triangle.calc_plane()
self.distance = -plane.point_distance(self.mesh.vertex_list[3])
face_mesh_list, plane_list = self.make_face_meshes(self.mesh.clone())
return face_mesh_list
def make_generator_mesh_list(self):
mesh_list = []
for triangle in self.mesh.yield_triangles():
center = triangle.calc_center()
plane = triangle.calc_plane()
disk = TriangleMesh.make_disk(center - plane.unit_normal * self.distance / 3.0, -plane.unit_normal, 8.0, 4)
mesh_list.append(GeneratorMesh(mesh=disk, axis=plane.unit_normal, angle=2.0 * math.pi / 3.0, pick_point=center))
disk = TriangleMesh.make_disk(center - plane.unit_normal * self.distance / 3.0, plane.unit_normal, 8.0, 4)
mesh_list.append(GeneratorMesh(mesh=disk, axis=-plane.unit_normal, angle=2.0 * math.pi / 3.0, pick_point=center - plane.unit_normal * self.distance))
disk = TriangleMesh.make_disk(center - plane.unit_normal * 2.0 * self.distance / 3.0, plane.unit_normal, 8.0, 4)
mesh_list.append(GeneratorMesh(mesh=disk, axis=-plane.unit_normal, angle=2.0 * math.pi / 3.0, pick_point=center - plane.unit_normal * self.distance * 1.1))
return mesh_list
def can_apply_cutmesh_for_pass(self, i, cut_mesh, cut_pass, generator_mesh_list):
return False if i % 3 == 1 else True
class BauhiniaDodecahedron(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_initial_mesh_list(self):
mesh = TriangleMesh().make_polyhedron(Polyhedron.DODECAHEDRON)
self.vertex_list = [vertex for vertex in mesh.vertex_list]
face_mesh_list, plane_list = self.make_face_meshes(mesh)
face = face_mesh_list[0]
loop_list = face.find_boundary_loops()
self.edge_length = (face.vertex_list[loop_list[0][0]] - face.vertex_list[loop_list[0][1]]).length()
return face_mesh_list
def make_generator_mesh_list(self):
mesh_list = []
for vertex in self.vertex_list:
mesh = GeneratorMesh(mesh=Sphere(vertex, self.edge_length).make_mesh(subdivision_level=2), axis=vertex.normalized(), angle=2.0 * math.pi / 3.0, pick_point=vertex)
mesh_list.append(mesh)
return mesh_list
class SkewbUltimate(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_initial_mesh_list(self):
mesh = TriangleMesh().make_polyhedron(Polyhedron.DODECAHEDRON)
face_mesh_list, plane_list = self.make_face_meshes(mesh)
return face_mesh_list
def make_generator_mesh_list(self):
mesh_list = []
normal_list = [point.normalized() for point in Vector(1.0, 1.0, 1.0).sign_permute()]
for normal in normal_list:
mesh = GeneratorMesh(mesh=TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), normal, 4.0, 4), axis=-normal, angle=2.0 * math.pi / 3.0, pick_point=normal * -1.5)
mesh_list.append(mesh)
return mesh_list
class Rubiks2x3x3(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_initial_mesh_list(self):
mesh_list = super().make_initial_mesh_list()
transform = LinearTransform().make_non_uniform_scale(1.0, 2.0 / 3.0, 1.0)
mesh_list = transform(mesh_list)
return mesh_list
def make_generator_mesh_list(self):
l_cut_disk = TriangleMesh.make_disk(Vector(-1.0 / 3.0, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4)
r_cut_disk = TriangleMesh.make_disk(Vector(1.0 / 3.0, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4)
d_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4)
u_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4)
b_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, -1.0 / 3.0), Vector(0.0, 0.0, 1.0), 4.0, 4)
f_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 1.0 / 3.0), Vector(0.0, 0.0, -1.0), 4.0, 4)
l_cut_disk = GeneratorMesh(mesh=l_cut_disk, axis=Vector(-1.0, 0.0, 0.0), angle=math.pi, pick_point=Vector(-1.0, 0.0, 0.0))
r_cut_disk = GeneratorMesh(mesh=r_cut_disk, axis=Vector(1.0, 0.0, 0.0), angle=math.pi, pick_point=Vector(1.0, 0.0, 0.0))
d_cut_disk = GeneratorMesh(mesh=d_cut_disk, axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, -2.0 / 3.0, 0.0))
u_cut_disk = GeneratorMesh(mesh=u_cut_disk, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 2.0 / 3.0, 0.0))
b_cut_disk = GeneratorMesh(mesh=b_cut_disk, axis=Vector(0.0, 0.0, -1.0), angle=math.pi, pick_point=Vector(0.0, 0.0, -1.0))
f_cut_disk = GeneratorMesh(mesh=f_cut_disk, axis=Vector(0.0, 0.0, 1.0), angle=math.pi, pick_point=Vector(0.0, 0.0, 1.0))
return [l_cut_disk, r_cut_disk, d_cut_disk, u_cut_disk, b_cut_disk, f_cut_disk]
class Rubiks2x2x3(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_initial_mesh_list(self):
mesh_list = super().make_initial_mesh_list()
transform = LinearTransform().make_non_uniform_scale(2.0 / 3.0, 1.0, 2.0 / 3.0)
mesh_list = transform(mesh_list)
return mesh_list
def make_generator_mesh_list(self):
l_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4)
r_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4)
d_cut_disk = TriangleMesh.make_disk(Vector(0.0, -1.0 / 3.0, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4)
u_cut_disk = TriangleMesh.make_disk(Vector(0.0, 1.0 / 3.0, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4)
b_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(0.0, 0.0, 1.0), 4.0, 4)
f_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), Vector(0.0, 0.0, -1.0), 4.0, 4)
l_cut_disk = GeneratorMesh(mesh=l_cut_disk, axis=Vector(-1.0, 0.0, 0.0), angle=math.pi, pick_point=Vector(-2.0 / 3.0, 0.0, 0.0))
r_cut_disk = GeneratorMesh(mesh=r_cut_disk, axis=Vector(1.0, 0.0, 0.0), angle=math.pi, pick_point=Vector(2.0 / 3.0, 0.0, 0.0))
d_cut_disk = GeneratorMesh(mesh=d_cut_disk, axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, -1.0, 0.0))
u_cut_disk = GeneratorMesh(mesh=u_cut_disk, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 1.0, 0.0))
b_cut_disk = GeneratorMesh(mesh=b_cut_disk, axis=Vector(0.0, 0.0, -1.0), angle=math.pi, pick_point=Vector(0.0, 0.0, -2.0 / 3.0))
f_cut_disk = GeneratorMesh(mesh=f_cut_disk, axis=Vector(0.0, 0.0, 1.0), angle=math.pi, pick_point=Vector(0.0, 0.0, 2.0 / 3.0))
return [l_cut_disk, r_cut_disk, d_cut_disk, u_cut_disk, b_cut_disk, f_cut_disk]
class Crazy2x3x3(Rubiks2x3x3): # TODO: What about the special move this puzzle has?
def __init__(self):
super().__init__()
def make_generator_mesh_list(self):
mesh_list = super().make_generator_mesh_list()
cylinder = Cylinder(Vector(0.0, -3.0, 0.0), Vector(0.0, 3.0, 0.0), 0.7).make_mesh(subdivision_level=2)
mesh_list.append(GeneratorMesh(mesh=cylinder, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 2.0, pick_point=None))
mesh_list.append(GeneratorMesh(mesh=cylinder, axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 2.0, pick_point=None))
u_cut_disk = mesh_list[3]
u_cut_disk.capture_tree_root = {
'op': 'subtract',
'children': [
{'mesh': 3},
{'mesh': 6},
]
}
d_cut_disk = mesh_list[2]
d_cut_disk.capture_tree_root = {
'op': 'union',
'children': [
{'mesh': 2},
{'mesh': 6}
]
}
return mesh_list
class Gem8(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_initial_mesh_list(self):
mesh = TriangleMesh().make_polyhedron(Polyhedron.TRUNCATED_TETRAHEDRON)
transform = LinearTransform().make_uniform_scale(0.5)
mesh = transform(mesh)
self.face_mesh_list, self.plane_list = self.make_face_meshes(mesh)
return self.face_mesh_list
def make_generator_mesh_list(self):
triangle_list = []
for face_mesh in self.face_mesh_list:
if len(face_mesh.triangle_list) == 1:
triangle_list.append(face_mesh.make_triangle(0))
mesh_list = []
for plane in self.plane_list:
center = Vector(0.0, 0.0, 0.0)
for triangle in triangle_list:
count = sum([1 if plane.contains_point(triangle[i]) else 0 for i in range(3)])
if count == 2:
for line_segment in triangle.yield_line_segments():
point = line_segment.lerp(0.5)
if not plane.contains_point(point):
center += point
if center.length() > 0.0:
center /= 6.0
mesh = TriangleMesh.make_disk(center, -plane.unit_normal, 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=plane.unit_normal, angle=2.0 * math.pi / 3.0, pick_point=center)
mesh_list.append(mesh)
for triangle in triangle_list:
plane = triangle.calc_plane()
center = 5.0 * plane.center / 8.0 # This isn't exact, but close enough; we get a puzzle isomorphic to the correct puzzle.
mesh = TriangleMesh.make_disk(center, -plane.unit_normal, 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=plane.unit_normal, angle=2.0 * math.pi / 3.0, pick_point=plane.center)
mesh_list.append(mesh)
return mesh_list
class CubesOnDisk(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
self.transform_list = []
self.overall_scale = 0.5
def make_initial_mesh_list(self):
mesh = TriangleMesh().make_polyhedron(Polyhedron.HEXAHEDRON)
mesh = LinearTransform().make_uniform_scale(self.overall_scale)(mesh)
translation = AffineTransform().make_translation(Vector(2.0 * self.overall_scale, 0.0, 0.0))
rotation = AffineTransform().make_rotation(Vector(0.0, 1.0, 0.0), 2.0 * math.pi * (0.0 / 3.0))
self.transform_list.append(rotation(translation))
rotation = AffineTransform().make_rotation(Vector(0.0, 1.0, 0.0), 2.0 * math.pi * (1.0 / 3.0))
self.transform_list.append(rotation(translation))
rotation = AffineTransform().make_rotation(Vector(0.0, 1.0, 0.0), 2.0 * math.pi * (2.0 / 3.0))
self.transform_list.append(rotation(translation))
mesh_list = []
for transform in self.transform_list:
face_mesh_list, plane_list = self.make_face_meshes(transform(mesh))
for face_mesh in face_mesh_list:
mesh_list.append(face_mesh)
return mesh_list
def make_generator_mesh_list(self):
mesh_list = []
mesh = TriangleMesh.make_disk(Vector(0.0, self.overall_scale / 3.0, 0.0), Vector(0.0, -1.0, 0.0), 10.0, 6)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(0.0, 1.0, 0.0), angle=2.0 * math.pi / 3.0, pick_point=Vector(0.0, self.overall_scale, 0.0))
mesh_list.append(mesh)
mesh = TriangleMesh.make_disk(Vector(0.0, -self.overall_scale / 3.0, 0.0), Vector(0.0, 1.0, 0.0), 10.0, 6)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(0.0, -1.0, 0.0), angle=2.0 * math.pi / 3.0, pick_point=Vector(0.0, -self.overall_scale, 0.0))
mesh_list.append(mesh)
base_mesh = TriangleMesh().make_polyhedron(Polyhedron.HEXAHEDRON)
base_mesh = LinearTransform().make_uniform_scale(self.overall_scale)(base_mesh)
lr_mesh = LinearTransform().make_non_uniform_scale(1.0 / 3.0, 1.0, 1.0)(base_mesh)
ud_mesh = LinearTransform().make_non_uniform_scale(1.0, 1.0 / 3.0, 1.0)(base_mesh)
fb_mesh = LinearTransform().make_non_uniform_scale(1.0, 1.0, 1.0 / 3.0)(base_mesh)
scale = 1.1
lr_mesh = LinearTransform().make_uniform_scale(scale)(lr_mesh)
ud_mesh = LinearTransform().make_uniform_scale(scale)(ud_mesh)
fb_mesh = LinearTransform().make_uniform_scale(scale)(fb_mesh)
alpha = (self.overall_scale / 3.0) * (1.0 + scale)
l_mesh = AffineTransform().make_translation(Vector(-alpha, 0.0, 0.0))(lr_mesh)
r_mesh = AffineTransform().make_translation(Vector(alpha, 0.0, 0.0))(lr_mesh)
d_mesh = AffineTransform().make_translation(Vector(0.0, -alpha, 0.0))(ud_mesh)
u_mesh = AffineTransform().make_translation(Vector(0.0, alpha, 0.0))(ud_mesh)
b_mesh = AffineTransform().make_translation(Vector(0.0, 0.0, -alpha))(fb_mesh)
f_mesh = AffineTransform().make_translation(Vector(0.0, 0.0, alpha))(fb_mesh)
for transform in self.transform_list:
mesh_list.append(GeneratorMesh(mesh=transform(l_mesh), center=transform(Vector(-self.overall_scale, 0.0, 0.0)), axis=transform.linear_transform(Vector(-1.0, 0.0, 0.0)), angle=math.pi / 2.0, pick_point=transform(Vector(-self.overall_scale, 0.0, 0.0))))
mesh_list.append(GeneratorMesh(mesh=transform(r_mesh), center=transform(Vector(self.overall_scale, 0.0, 0.0)), axis=transform.linear_transform(Vector(1.0, 0.0, 0.0)), angle=math.pi / 2.0, pick_point=transform(Vector(self.overall_scale, 0.0, 0.0))))
mesh_list.append(GeneratorMesh(mesh=transform(d_mesh), center=transform(Vector(0.0, -self.overall_scale, 0.0)), axis=transform.linear_transform(Vector(0.0, -1.0, 0.0)), angle=math.pi / 2.0, pick_point=transform(Vector(0.0, -self.overall_scale, 0.0))))
mesh_list.append(GeneratorMesh(mesh=transform(u_mesh), center=transform(Vector(0.0, self.overall_scale, 0.0)), axis=transform.linear_transform(Vector(0.0, 1.0, 0.0)), angle=math.pi / 2.0, pick_point=transform(Vector(0.0, self.overall_scale, 0.0))))
mesh_list.append(GeneratorMesh(mesh=transform(b_mesh), center=transform(Vector(0.0, 0.0, -self.overall_scale)), axis=transform.linear_transform(Vector(0.0, 0.0, -1.0)), angle=math.pi / 2.0, pick_point=transform(Vector(0.0, 0.0, -self.overall_scale))))
mesh_list.append(GeneratorMesh(mesh=transform(f_mesh), center=transform(Vector(0.0, 0.0, self.overall_scale)), axis=transform.linear_transform(Vector(0.0, 0.0, 1.0)), angle=math.pi / 2.0, pick_point=transform(Vector(0.0, 0.0, self.overall_scale))))
return mesh_list
class WormHoleBase(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
self.beta = 2.0 / (math.sqrt(2.0) + 4.0)
self.alpha = math.sqrt(2.0) * self.beta
def make_initial_mesh_list(self):
common_face_mesh_list = []
common_face_mesh_list.append(TriangleMesh().add_quad(
Vector(-self.alpha / 2.0, -self.alpha / 2.0, 0.0),
Vector(self.alpha / 2.0, -self.alpha / 2.0, 0.0),
Vector(self.alpha / 2.0, self.alpha / 2.0, 0.0),
Vector(-self.alpha / 2.0, self.alpha / 2.0, 0.0)
))
common_face_mesh_list.append(TriangleMesh().add_quad(
Vector(-self.alpha / 2.0 - 2.0 * self.beta, -self.alpha / 2.0 - 2.0 * self.beta, 0.0),
Vector(-self.alpha / 2.0, -self.alpha / 2.0 - 2.0 * self.beta, 0.0),
Vector(-self.alpha / 2.0, -self.alpha / 2.0, 0.0),
Vector(-self.alpha / 2.0 - 2.0 * self.beta, -self.alpha / 2.0, 0.0),
))
common_face_mesh_list.append(TriangleMesh().add_quad(
Vector(self.alpha / 2.0, -self.alpha / 2.0 - 2.0 * self.beta, 0.0),
Vector(self.alpha / 2.0 + 2.0 * self.beta, -self.alpha / 2.0 - 2.0 * self.beta, 0.0),
Vector(self.alpha / 2.0 + 2.0 * self.beta, -self.alpha / 2.0, 0.0),
Vector(self.alpha / 2.0, -self.alpha / 2.0, 0.0),
))
common_face_mesh_list.append(TriangleMesh().add_quad(
Vector(self.alpha / 2.0, self.alpha / 2.0, 0.0),
Vector(self.alpha / 2.0 + 2.0 * self.beta, self.alpha / 2.0, 0.0),
Vector(self.alpha / 2.0 + 2.0 * self.beta, self.alpha / 2.0 + 2.0 * self.beta, 0.0),
Vector(self.alpha / 2.0, self.alpha / 2.0 + 2.0 * self.beta, 0.0),
))
common_face_mesh_list.append(TriangleMesh().add_quad(
Vector(-self.alpha / 2.0 - 2.0 * self.beta, self.alpha / 2.0, 0.0),
Vector(-self.alpha / 2.0, self.alpha / 2.0, 0.0),
Vector(-self.alpha / 2.0, self.alpha / 2.0 + 2.0 * self.beta, 0.0),
Vector(-self.alpha / 2.0 - 2.0 * self.beta, self.alpha / 2.0 + 2.0 * self.beta, 0.0),
))
common_face_mesh_list.append(TriangleMesh().add_quad(
Vector(-self.alpha / 2.0, -self.alpha / 2.0 - 2.0 * self.beta, 0.0),
Vector(self.alpha / 2.0, -self.alpha / 2.0 - 2.0 * self.beta, 0.0),
Vector(self.alpha / 2.0, -self.alpha / 2.0 - self.beta, 0.0),
Vector(-self.alpha / 2.0, -self.alpha / 2.0 - self.beta, 0.0),
))
common_face_mesh_list.append(TriangleMesh().add_quad(
Vector(self.alpha / 2.0 + self.beta, -self.alpha / 2.0, 0.0),
Vector(self.alpha / 2.0 + 2.0 * self.beta, -self.alpha / 2.0, 0.0),
Vector(self.alpha / 2.0 + 2.0 * self.beta, self.alpha / 2.0, 0.0),
Vector(self.alpha / 2.0 + self.beta, self.alpha / 2.0, 0.0),
))
common_face_mesh_list.append(TriangleMesh().add_quad(
Vector(-self.alpha / 2.0, self.alpha / 2.0 + self.beta, 0.0),
Vector(self.alpha / 2.0, self.alpha / 2.0 + self.beta, 0.0),
Vector(self.alpha / 2.0, self.alpha / 2.0 + 2.0 * self.beta, 0.0),
Vector(-self.alpha / 2.0, self.alpha / 2.0 + 2.0 * self.beta, 0.0),
))
common_face_mesh_list.append(TriangleMesh().add_quad(
Vector(-self.alpha / 2.0 - 2.0 * self.beta, -self.alpha / 2.0, 0.0),
Vector(-self.alpha / 2.0 - self.beta, -self.alpha / 2.0, 0.0),
Vector(-self.alpha / 2.0 - self.beta, self.alpha / 2.0, 0.0),
Vector(-self.alpha / 2.0 - 2.0 * self.beta, self.alpha / 2.0, 0.0),
))
transform_list = []
transform_list.append(AffineTransform().make_rigid_body_motion(Vector(0.0, 1.0, 0.0), -math.pi / 2.0, Vector(-1.0, 0.0, 0.0))) # left
transform_list.append(AffineTransform().make_rigid_body_motion(Vector(0.0, 1.0, 0.0), math.pi / 2.0, Vector(1.0, 0.0, 0.0))) # right
transform_list.append(AffineTransform().make_rigid_body_motion(Vector(1.0, 0.0, 0.0), math.pi / 2.0, Vector(0.0, -1.0, 0.0))) # down
transform_list.append(AffineTransform().make_rigid_body_motion(Vector(1.0, 0.0, 0.0), -math.pi / 2.0, Vector(0.0, 1.0, 0.0))) # up
transform_list.append(AffineTransform().make_rigid_body_motion(Vector(1.0, 0.0, 0.0), math.pi, Vector(0.0, 0.0, -1.0))) # back
transform_list.append(AffineTransform().make_rigid_body_motion(Vector(1.0, 0.0, 0.0), 0.0, Vector(0.0, 0.0, 1.0))) # front
color_list = [
Vector(0.0, 0.0, 1.0),
Vector(0.0, 1.0, 0.0),
Vector(1.0, 1.0, 1.0),
Vector(1.0, 1.0, 0.0),
Vector(1.0, 0.5, 0.0),
Vector(1.0, 0.0, 0.0)
]
mesh_list = []
for i, transform in enumerate(transform_list):
for mesh in common_face_mesh_list:
mesh = ColoredMesh(mesh=transform(mesh), color=color_list[i])
mesh_list.append(mesh)
return mesh_list
def make_generator_mesh_list(self):
return []
def make_other_faces(self, inset_only, rotated, color):
other_face_mesh_list = []
other_face_mesh_list.append(TriangleMesh().add_quad(
Vector(-self.alpha / 2.0 - self.beta, -self.alpha / 2.0, 0.0),
Vector(-self.alpha / 2.0, -self.alpha / 2.0, 0.0),
Vector(-self.alpha / 2.0, self.alpha / 2.0, 0.0),
Vector(-self.alpha / 2.0 - self.beta, self.alpha / 2.0, 0.0)
))
other_face_mesh_list.append(TriangleMesh().add_quad(
Vector(self.alpha / 2.0, -self.alpha / 2.0, 0.0),
Vector(self.alpha / 2.0 + self.beta, -self.alpha / 2.0, 0.0),
Vector(self.alpha / 2.0 + self.beta, self.alpha / 2.0, 0.0),
Vector(self.alpha / 2.0, self.alpha / 2.0, 0.0)
))
other_face_mesh_list = [ColoredMesh(mesh=mesh, color=color) for mesh in other_face_mesh_list]
if not inset_only:
other_face_mesh_list += [AffineTransform().make_translation(Vector(0.0, 0.0, -0.3))(mesh) for mesh in other_face_mesh_list]
else:
other_face_mesh_list = [AffineTransform().make_translation(Vector(0.0, 0.0, -0.3))(mesh) for mesh in other_face_mesh_list]
# These faces are used purely for bandaging.
other_face_mesh_list.append(ColoredMesh(mesh=TriangleMesh().add_quad(
Vector(-self.alpha / 2.0 - self.beta, 0.0, -0.3),
Vector(-self.alpha / 2.0, 0.0, -0.3),
Vector(-self.alpha / 2.0, 0.0, 0.0),
Vector(-self.alpha / 2.0 - self.beta, 0.0)
), color=Vector(0.5, 0.5, 0.5), alpha=0.0))
other_face_mesh_list.append(ColoredMesh(mesh=TriangleMesh().add_quad(
Vector(self.alpha / 2.0, 0.0, -0.3),
Vector(self.alpha / 2.0 + self.beta, 0.0, -0.3),
Vector(self.alpha / 2.0 + self.beta, 0.0, 0.0),
Vector(self.alpha / 2.0, 0.0, 0.0)
), color=Vector(0.5, 0.5, 0.5), alpha=0.0))
if rotated:
other_face_mesh_list = [LinearTransform().make_rotation(Vector(0.0, 0.0, 1.0), math.pi / 2.0)(mesh) for mesh in other_face_mesh_list]
return other_face_mesh_list
def can_apply_cutmesh_for_pass(self, i, cut_mesh, cut_pass, generator_mesh_list):
# Unlike most puzzles, this one is constructed entirely pre-cut.
return False
def make_generator_mesh_list(self):
mesh_list = []
mesh = TriangleMesh.make_disk(Vector(self.alpha / 2.0, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(1.0, 0.0, 0.0))
mesh_list.append(mesh)
mesh = TriangleMesh.make_disk(Vector(-self.alpha / 2.0, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(-1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(-1.0, 0.0, 0.0))
mesh_list.append(mesh)
mesh = TriangleMesh.make_disk(Vector(0.0, self.alpha / 2.0, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 1.0, 0.0))
mesh_list.append(mesh)
mesh = TriangleMesh.make_disk(Vector(0.0, -self.alpha / 2.0, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, -1.0, 0.0))
mesh_list.append(mesh)
mesh = TriangleMesh.make_disk(Vector(0.0, 0.0, self.alpha / 2.0), Vector(0.0, 0.0, -1.0), 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(0.0, 0.0, 1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, 1.0))
mesh_list.append(mesh)
mesh = TriangleMesh.make_disk(Vector(0.0, 0.0, -self.alpha / 2.0), Vector(0.0, 0.0, 1.0), 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(0.0, 0.0, -1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, -1.0))
mesh_list.append(mesh)
mesh = TriangleMesh.make_disk(Vector(self.alpha / 2.0, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4) + TriangleMesh.make_disk(Vector(-self.alpha / 2.0, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(1.0, 0.0, 0.0), angle=math.pi / 4.0, pick_point=Vector(1.5, 0.0, 0.0))
mesh_list.append(mesh)
mesh = TriangleMesh.make_disk(Vector(self.alpha / 2.0, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4) + TriangleMesh.make_disk(Vector(-self.alpha / 2.0, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(1.0, 0.0, 0.0), angle=-math.pi / 4.0, pick_point=Vector(-1.5, 0.0, 0.0))
mesh_list.append(mesh)
mesh = TriangleMesh.make_disk(Vector(0.0, self.alpha / 2.0, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4) + TriangleMesh.make_disk(Vector(0.0, -self.alpha / 2.0, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 4.0, pick_point=Vector(0.0, 1.5, 0.0))
mesh_list.append(mesh)
mesh = TriangleMesh.make_disk(Vector(0.0, self.alpha / 2.0, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4) + TriangleMesh.make_disk(Vector(0.0, -self.alpha / 2.0, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(0.0, 1.0, 0.0), angle=-math.pi / 4.0, pick_point=Vector(0.0, -1.5, 0.0))
mesh_list.append(mesh)
mesh = TriangleMesh.make_disk(Vector(0.0, 0.0, self.alpha / 2.0), Vector(0.0, 0.0, 1.0), 4.0, 4) + TriangleMesh.make_disk(Vector(0.0, 0.0, -self.alpha / 2.0), Vector(0.0, 0.0, -1.0), 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(0.0, 0.0, 1.0), angle=math.pi / 4.0, pick_point=Vector(0.0, 0.0, 1.5))
mesh_list.append(mesh)
mesh = TriangleMesh.make_disk(Vector(0.0, 0.0, self.alpha / 2.0), Vector(0.0, 0.0, 1.0), 4.0, 4) + TriangleMesh.make_disk(Vector(0.0, 0.0, -self.alpha / 2.0), Vector(0.0, 0.0, -1.0), 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=Vector(0.0, 0.0, 1.0), angle=-math.pi / 4.0, pick_point=Vector(0.0, 0.0, -1.5))
mesh_list.append(mesh)
point_cloud = PointCloud()
point_cloud.add_point(Vector(-self.alpha / 2.0, -self.alpha / 2.0 - 2.0 * self.beta + 0.29, 0.71))
point_cloud.add_point(Vector(-self.alpha / 2.0, -self.alpha / 2.0 - 2.0 * self.beta + 0.29, -0.71))
point_cloud.add_point(Vector(self.alpha / 2.0, -self.alpha / 2.0 - 2.0 * self.beta + 0.29, 0.71))
point_cloud.add_point(Vector(self.alpha / 2.0, -self.alpha / 2.0 - 2.0 * self.beta + 0.29, -0.71))
point_cloud.add_point(Vector(self.alpha / 2.0, self.alpha / 2.0 + 2.0 * self.beta - 0.29, 0.71))
point_cloud.add_point(Vector(self.alpha / 2.0, self.alpha / 2.0 + 2.0 * self.beta - 0.29, -0.71))
point_cloud.add_point(Vector(-self.alpha / 2.0, self.alpha / 2.0 + 2.0 * self.beta - 0.29, 0.71))
point_cloud.add_point(Vector(-self.alpha / 2.0, self.alpha / 2.0 + 2.0 * self.beta - 0.29, -0.71))
x_mesh = point_cloud.find_convex_hull()
y_mesh = LinearTransform().make_rotation(Vector(0.0, 0.0, 1.0), math.pi / 2.0)(x_mesh)
z_mesh = LinearTransform().make_rotation(Vector(0.0, 1.0, 0.0), math.pi / 2.0)(x_mesh)
x_mesh = GeneratorMesh(mesh=x_mesh, axis=Vector(1.0, 0.0, 0.0), angle=0.0, pick_point=None)
y_mesh = GeneratorMesh(mesh=y_mesh, axis=Vector(0.0, 1.0, 0.0), angle=0.0, pick_point=None)
z_mesh = GeneratorMesh(mesh=z_mesh, axis=Vector(0.0, 0.0, 1.0), angle=0.0, pick_point=None)
mesh_list += [x_mesh, y_mesh, z_mesh]
for i in range(2):
mesh_list[6 + i].capture_tree_root = {
'op': 'subtract',
'children': [
{'mesh': 6 + i},
{'mesh': len(mesh_list) - 3}
]
}
mesh_list[8 + i].capture_tree_root = {
'op': 'subtract',
'children': [
{'mesh': 8 + i},
{'mesh': len(mesh_list) - 2}
]
}
mesh_list[10 + i].capture_tree_root = {
'op': 'subtract',
'children': [
{'mesh': 10 + i},
{'mesh': len(mesh_list) - 1}
]
}
return mesh_list
class WormHoleII(WormHoleBase):
def __init__(self):
super().__init__()
def bandages(self):
return True
def make_initial_mesh_list(self):
mesh_list = super().make_initial_mesh_list()
# red face
color = Vector(1.0, 0.0, 0.0)
for mesh in self.make_other_faces(False, False, color) + self.make_other_faces(True, True, color):
mesh_list.append(AffineTransform().make_translation(Vector(0.0, 0.0, 1.0))(mesh))
# orange face
color = Vector(1.0, 0.5, 0.0)
for mesh in self.make_other_faces(False, False, color) + self.make_other_faces(True, True, color):
mesh_list.append(AffineTransform().make_rigid_body_motion(Vector(1.0, 0.0, 0.0), math.pi, Vector(0.0, 0.0, -1.0))(mesh))
# green face
color = Vector(0.0, 1.0, 0.0)
for mesh in self.make_other_faces(False, False, color) + self.make_other_faces(True, True, color):
mesh_list.append(AffineTransform().make_rigid_body_motion(Vector(0.0, 1.0, 0.0), math.pi / 2.0, Vector(1.0, 0.0, 0.0))(mesh))
# blue face
color = Vector(0.0, 0.0, 1.0)
for mesh in self.make_other_faces(False, False, color) + self.make_other_faces(True, True, color):
mesh_list.append(AffineTransform().make_rigid_body_motion(Vector(0.0, 1.0, 0.0), -math.pi / 2.0, Vector(-1.0, 0.0, 0.0))(mesh))
# yellow face
color = Vector(1.0, 1.0, 0.0)
for mesh in self.make_other_faces(True, False, color) + self.make_other_faces(True, True, color):
mesh_list.append(AffineTransform().make_rigid_body_motion(Vector(1.0, 0.0, 0.0), -math.pi / 2.0, Vector(0.0, 1.0, 0.0))(mesh))
# white face
color = Vector(1.0, 1.0, 1.0)
for mesh in self.make_other_faces(True, False, color) + self.make_other_faces(True, True, color):
mesh_list.append(AffineTransform().make_rigid_body_motion(Vector(1.0, 0.0, 0.0), math.pi / 2.0, Vector(0.0, -1.0, 0.0))(mesh))
# hidden face (for core logic)
mesh = TriangleMesh().add_triangle(Triangle(
Vector(-self.alpha / 2.0 - 2.0 * self.beta, -self.alpha / 2.0 - 2.0 * self.beta, self.alpha / 2.0),
Vector(-self.alpha / 2.0, -self.alpha / 2.0 - 2.0 * self.beta, self.alpha / 2.0 + 2.0 * self.beta),
Vector(-self.alpha / 2.0 - 2.0 * self.beta, -self.alpha / 2.0, self.alpha / 2.0 + 2.0 * self.beta)
))
mesh = ColoredMesh(mesh=mesh, color=Vector(0.0, 0.0, 0.0), alpha=0.0)
mesh_list.append(mesh)
return mesh_list
class LatchCube(RubiksCube):
def __init__(self):
super().__init__()
def bandages(self):
return True
def make_texture_space_transform_for_plane(self, plane):
linear_transform = LinearTransform()
z_axis = plane.unit_normal.clone() # The z-axis always points away from the face.
if z_axis.is_vector(Vector(1.0, 0.0, 0.0)):
x_axis = linear_transform.y_axis
elif z_axis.is_vector(Vector(-1.0, 0.0, 0.0)):
x_axis = linear_transform.y_axis
elif z_axis.is_vector(Vector(0.0, 1.0, 0.0)):
x_axis = linear_transform.z_axis
elif z_axis.is_vector(Vector(0.0, -1.0, 0.0)):
x_axis = linear_transform.z_axis
elif z_axis.is_vector(Vector(0.0, 0.0, -1.0)):
x_axis = linear_transform.x_axis
elif z_axis.is_vector(Vector(0.0, 0.0, 1.0)):
x_axis = linear_transform.x_axis
y_axis = z_axis.cross(x_axis) # Always make it a right-handed system.
transform = AffineTransform(x_axis=x_axis, y_axis=y_axis, z_axis=z_axis, translation=plane.center)
inverse_transform = transform.calc_inverse()
return inverse_transform
def annotate_puzzle_data(self, puzzle_data):
puzzle_data['custom_texture_path_list'] = [
'images/latch_cube/blue_face.png',
'images/latch_cube/green_face.png',
'images/latch_cube/black_face.png',
'images/latch_cube/white_face.png',
'images/latch_cube/red_face.png',
'images/latch_cube/yellow_face.png'
]
mesh_list = puzzle_data['mesh_list']
for mesh_data in mesh_list:
mesh = TriangleMesh().from_dict(mesh_data)
center = mesh.calc_center()
plane = PointCloud(point_list=mesh.vertex_list).fit_plane()
if plane.center.dot(plane.unit_normal) < 0.0:
plane.unit_normal = -plane.unit_normal
special_case_data = {'arrow': None}
if Vector(1.0, 0.0, 0.0).is_vector(plane.unit_normal) or Vector(-1.0, 0.0, 0.0).is_vector(plane.unit_normal):
if (center.y > 1.0 / 3.0 or center.y < -1.0 / 3.0) and -1.0 / 3.0 < center.z < 1.0 / 3.0:
if Vector(1.0, 0.0, 0.0).is_vector(plane.unit_normal):
special_case_data['arrow'] = 'black'
elif Vector(-1.0, 0.0, 0.0).is_vector(plane.unit_normal):
special_case_data['arrow'] = 'white'
elif Vector(0.0, 1.0, 0.0).is_vector(plane.unit_normal) or Vector(0.0, -1.0, 0.0).is_vector(plane.unit_normal):
if (center.z > 1.0 / 3.0 or center.z < -1.0 / 3.0) and -1.0 / 3.0 < center.x < 1.0 / 3.0:
if Vector(0.0, 1.0, 0.0).is_vector(plane.unit_normal):
special_case_data['arrow'] = 'black'
elif Vector(0.0, -1.0, 0.0).is_vector(plane.unit_normal):
special_case_data['arrow'] = 'white'
elif Vector(0.0, 0.0, 1.0).is_vector(plane.unit_normal) or Vector(0.0, 0.0, -1.0).is_vector(plane.unit_normal):
if (center.x > 1.0 / 3.0 or center.x < -1.0 / 3.0) and -1.0 / 3.0 < center.y < 1.0 / 3.0:
if Vector(0.0, 0.0, 1.0).is_vector(plane.unit_normal):
special_case_data['arrow'] = 'black'
elif Vector(0.0, 0.0, -1.0).is_vector(plane.unit_normal):
special_case_data['arrow'] = 'white'
mesh_data['special_case_data'] = special_case_data
class Rubiks3x3x5(RubiksCube):
def __init__(self):
pass
def bandages(self):
return True
def make_initial_mesh_list(self):
mesh_list = super().make_initial_mesh_list()
transform = LinearTransform().make_non_uniform_scale(1.0, 5.0 / 3.0, 1.0)
mesh_list = transform(mesh_list)
return mesh_list
def make_generator_mesh_list(self):
mesh_list = super().make_generator_mesh_list()
l_cut_disk = TriangleMesh.make_disk(Vector(-1.0, 0.0, 0.0), Vector(1.0, 0.0, 0.0), 4.0, 4)
r_cut_disk = TriangleMesh.make_disk(Vector(1.0, 0.0, 0.0), Vector(-1.0, 0.0, 0.0), 4.0, 4)
d_cut_disk = TriangleMesh.make_disk(Vector(0.0, -1.0, 0.0), Vector(0.0, 1.0, 0.0), 4.0, 4)
u_cut_disk = TriangleMesh.make_disk(Vector(0.0, 1.0, 0.0), Vector(0.0, -1.0, 0.0), 4.0, 4)
b_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, -1.), Vector(0.0, 0.0, 1.0), 4.0, 4)
f_cut_disk = TriangleMesh.make_disk(Vector(0.0, 0.0, 1.0), Vector(0.0, 0.0, -1.0), 4.0, 4)
l_cut_disk = GeneratorMesh(mesh=l_cut_disk, axis=Vector(-1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(-5.0 / 3.0, 0.0, 0.0), min_capture_count=21)
r_cut_disk = GeneratorMesh(mesh=r_cut_disk, axis=Vector(1.0, 0.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(5.0 / 3.0, 0.0, 0.0), min_capture_count=21)
d_cut_disk = GeneratorMesh(mesh=d_cut_disk, axis=Vector(0.0, -1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, -5.0 / 3.0, 0.0), min_capture_count=21)
u_cut_disk = GeneratorMesh(mesh=u_cut_disk, axis=Vector(0.0, 1.0, 0.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 5.0 / 3.0, 0.0), min_capture_count=21)
b_cut_disk = GeneratorMesh(mesh=b_cut_disk, axis=Vector(0.0, 0.0, -1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, -5.0 / 3.0), min_capture_count=21)
f_cut_disk = GeneratorMesh(mesh=f_cut_disk, axis=Vector(0.0, 0.0, 1.0), angle=math.pi / 2.0, pick_point=Vector(0.0, 0.0, 5.0 / 3.0), min_capture_count=21)
mesh_list += [l_cut_disk, r_cut_disk, d_cut_disk, u_cut_disk, b_cut_disk, f_cut_disk]
return mesh_list
class MultiCube(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_initial_mesh_list(self):
base_mesh = TriangleMesh()
base_mesh.add_triangle(Triangle(Vector(1.0, -1.0, 0.0), Vector(0.5, 0.0, 0.0), Vector(0.0, -0.5, 0.0)))
base_mesh.add_triangle(Triangle(Vector(1.0, 1.0, 0.0), Vector(0.0, 0.5, 0.0), Vector(0.5, 0.0, 0.0)))
base_mesh.add_triangle(Triangle(Vector(-1.0, 1.0, 0.0), Vector(-0.5, 0.0, 0.0), Vector(0.0, 0.5, 0.0)))
base_mesh.add_triangle(Triangle(Vector(-1.0, -1.0, 0.0), Vector(0.0, -0.5, 0.0), Vector(-0.5, 0.0, 0.0)))
base_mesh.add_triangle(Triangle(Vector(-1.0, -1.0, 0.0), Vector(1.0, -1.0, 0.0), Vector(0.0, -0.5, 0.0)))
base_mesh.add_triangle(Triangle(Vector(1.0, -1.0, 0.0), Vector(1.0, 1.0, 0.0), Vector(0.5, 0.0, 0.0)))
base_mesh.add_triangle(Triangle(Vector(1.0, 1.0, 0.0), Vector(-1.0, 1.0, 0.0), Vector(0.0, 0.5, 0.0)))
base_mesh.add_triangle(Triangle(Vector(-1.0, 1.0, 0.0), Vector(-1.0, -1.0, 0.0), Vector(-0.5, 0.0, 0.0)))
mesh_list = self.make_standard_cube_faces_using_base_mesh(base_mesh)
base_mesh = TriangleMesh()
base_mesh.add_triangle(Triangle(Vector(0.0, 0.0, -0.25), Vector(0.5, 0.0, -0.25), Vector(0.0, 0.5, -0.25)))
base_mesh.add_triangle(Triangle(Vector(0.0, 0.0, -0.25), Vector(0.0, 0.5, -0.25), Vector(-0.5, 0.0, -0.25)))
base_mesh.add_triangle(Triangle(Vector(0.0, 0.0, -0.25), Vector(-0.5, 0.0, -0.25), Vector(0.0, -0.5, -0.25)))
base_mesh.add_triangle(Triangle(Vector(0.0, 0.0, -0.25), Vector(0.0, -0.5, -0.25), Vector(0.5, 0.0, -0.25)))
mesh_list += self.make_standard_cube_faces_using_base_mesh(base_mesh)
return mesh_list
def make_generator_mesh_list(self):
mesh_list = []
for i, vector in enumerate(Vector(1.0, 1.0, 1.0).sign_permute()):
mesh = GeneratorMesh(mesh=TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), -vector.normalized(), 4.0, 4), axis=vector.normalized(), angle=2.0 * math.pi / 3.0, pick_point=vector)
mesh_a = AffineTransform().make_translation(vector.normalized() * 0.1)(mesh)
mesh_b = AffineTransform().make_translation(vector.normalized() * 0.525)(mesh)
mesh_list.append(mesh_a)
mesh_list.append(mesh_b)
mesh_b.pick_point = None
mesh_a.capture_tree_root = {
'op': 'union',
'children': [
{
'op': 'subtract',
'children': [
{'mesh': i * 2},
{'mesh': 16}
]
},
{
'op': 'intersection',
'children': [
{'mesh': 16},
{'mesh': i * 2 + 1}
]
}
]
}
self.core_mesh = GeneratorMesh(mesh=LinearTransform().make_uniform_scale(0.9)(TriangleMesh.make_polyhedron(Polyhedron.HEXAHEDRON)), pick_point=None)
mesh_list.append(self.core_mesh)
# TODO: Add center slice generators with offset pick-points.
return mesh_list
def can_apply_cutmesh_to_mesh(self, i, cut_mesh, cut_pass, mesh):
if i == 16:
return False # Ignore the core mesh.
if i % 2 == 0:
return True
if self.core_mesh.captures_mesh(mesh):
return True
return False
class SuperStar(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
self.frame_list = [
LinearTransform().make_rotation(Vector(1.0, 0.0, 0.0), 0.0),
LinearTransform().make_rotation(Vector(1.0, 0.0, 0.0), math.pi / 2.0),
LinearTransform().make_rotation(Vector(1.0, 0.0, 0.0), math.pi),
LinearTransform().make_rotation(Vector(1.0, 0.0, 0.0), -math.pi / 2.0),
LinearTransform().make_rotation(Vector(0.0, 1.0, 0.0), math.pi),
LinearTransform().make_rotation(Vector(0.0, 1.0, 0.0), -math.pi / 2.0),
LinearTransform().make_rotation(Vector(0.0, 0.0, 1.0), math.pi),
LinearTransform().make_rotation(Vector(1.0, 0.0, 0.0), math.pi / 2.0)(LinearTransform().make_rotation(Vector(0.0, 0.0, 1.0), math.pi))
]
def make_octant_triangles(self, frame):
center = (frame.x_axis + frame.y_axis + frame.z_axis) / 2.0
return [
Triangle(frame.x_axis, frame.x_axis + frame.y_axis, center),
Triangle(frame.x_axis + frame.y_axis, frame.y_axis, center),
Triangle(frame.y_axis, frame.y_axis + frame.z_axis, center),
Triangle(frame.y_axis + frame.z_axis, frame.z_axis, center),
Triangle(frame.z_axis, frame.z_axis + frame.x_axis, center),
Triangle(frame.z_axis + frame.x_axis, frame.x_axis, center)
]
def make_initial_mesh_list(self):
mesh_list = []
# TODO: Add support on any puzzle for multiple color schemes? This puzzle is a good candidate.
color_list = [
Vector(1.0, 0.0, 0.0),
Vector(0.0, 1.0, 0.0),
Vector(0.0, 0.5, 1.0),
Vector(1.0, 1.0, 0.0)
]
# Make a stellated rhombic dodecahedron.
i = 0
for frame in self.frame_list:
for triangle in self.make_octant_triangles(frame):
mesh = TriangleMesh()
mesh.add_triangle(triangle)
color = color_list[i]
i = (i + 1) % len(color_list)
mesh_list.append(ColoredMesh(mesh=mesh, color=color))
return mesh_list
def make_generator_mesh_list(self):
mesh_list = []
frame = LinearTransform()
axis_pair_list = [
(frame.x_axis, frame.y_axis),
(frame.x_axis, -frame.y_axis),
(frame.x_axis, frame.z_axis),
(frame.x_axis, -frame.z_axis),
(-frame.x_axis, frame.y_axis),
(-frame.x_axis, -frame.y_axis),
(-frame.x_axis, frame.z_axis),
(-frame.x_axis, -frame.z_axis),
(frame.z_axis, frame.y_axis),
(frame.z_axis, -frame.y_axis),
(-frame.z_axis, frame.y_axis),
(-frame.z_axis, -frame.y_axis)
]
for axis_pair in axis_pair_list:
axis_a = axis_pair[0]
axis_b = axis_pair[1]
axis_sum = axis_a + axis_b
mesh = GeneratorMesh(mesh=TriangleMesh.make_disk(axis_sum / 2.0, -axis_sum.normalized(), 4.0, 4), pick_point=axis_sum, axis=axis_sum.normalized(), angle=math.pi)
mesh_list.append(mesh)
axis_list = [
frame.x_axis,
-frame.x_axis,
frame.y_axis,
-frame.y_axis,
frame.z_axis,
-frame.z_axis
]
length = ((frame.x_axis + frame.y_axis + frame.z_axis) / 2.0).dot(frame.x_axis)
for axis in axis_list:
mesh = GeneratorMesh(mesh=TriangleMesh.make_disk(axis * length, -axis, 4.0, 4), pick_point=axis, axis=axis, angle=math.pi / 2.0)
mesh_list.append(mesh)
for frame in self.frame_list:
center = (frame.x_axis + frame.y_axis + frame.z_axis) / 2.0
mesh = GeneratorMesh(mesh=TriangleMesh.make_disk(center * (2.0 / 3.0), -center.normalized(), 4.0, 4), pick_point=center, axis=center.normalized(), angle=2.0 * math.pi / 3.0)
mesh_list.append(mesh)
return mesh_list
class DreidelCube(RubiksCube):
def __init__(self):
super().__init__()
def make_generator_mesh_list(self):
mesh_list = super().make_generator_mesh_list()
radius = 2.0 * math.sqrt(2.0) / 3.0
for center in Vector(1.0, 1.0, 1.0).sign_permute():
sphere = Sphere(center, radius)
mesh = GeneratorMesh(mesh=sphere.make_mesh(subdivision_level=2), axis=center.normalized(), angle=math.pi / 3.0, pick_point=center)
mesh_list.append(mesh)
return mesh_list
def can_apply_cutmesh_for_pass(self, i, cut_mesh, cut_pass, generator_mesh_list):
if cut_pass == 0:
return True
if cut_pass == 1:
return True if 0 <= i < 6 else False
return False
def transform_meshes_for_more_cutting(self, mesh_list, generator_mesh_list, cut_pass):
if cut_pass == 0:
for i in range(6, len(generator_mesh_list)):
self.apply_generator(mesh_list, generator_mesh_list[i])
return True
if cut_pass == 1:
for i in range(6, len(generator_mesh_list)):
self.apply_generator(mesh_list, generator_mesh_list[i], inverse=True)
return False
class EitansStar(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def make_initial_mesh_list(self):
self.mesh = TriangleMesh().make_polyhedron(Polyhedron.ICOSAHEDRON)
triangle = self.mesh.triangle_list[0]
adjacent_triangles_list = self.mesh.find_adjacent_triangles(triangle)
point_cloud = PointCloud()
for triangle_info in adjacent_triangles_list:
triangle = triangle_info[0]
triangle = self.mesh.make_triangle(triangle)
point_cloud.add_point((triangle[triangle_info[1]] + triangle[triangle_info[1] + 1]) / 2.0)
point_cloud.add_point((triangle[triangle_info[1]] + triangle[triangle_info[1] - 1]) / 2.0)
self.length = point_cloud.calc_center().length()
face_mesh_list, plane_list = self.make_face_meshes(self.mesh.clone())
return face_mesh_list
def make_generator_mesh_list(self):
mesh_list = []
for vertex in self.mesh.vertex_list:
normal = vertex.normalized()
mesh = TriangleMesh.make_disk(Vector(0.0, 0.0, 0.0), -normal, 5.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=normal, angle=2.0 * math.pi / 5.0, pick_point=vertex)
mesh_list.append(mesh)
for triangle in self.mesh.triangle_list:
triangle = self.mesh.make_triangle(triangle)
plane = triangle.calc_plane()
mesh = TriangleMesh.make_disk(plane.unit_normal * self.length, -plane.unit_normal, 5.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=plane.unit_normal, angle=2.0 * math.pi / 3.0, pick_point=triangle.calc_center())
mesh_list.append(mesh)
return mesh_list
class Cubic4x6x8(PuzzleDefinitionBase):
def __init__(self):
super().__init__()
def bandages(self):
return True
def make_generator_mesh_list(self):
mesh_list = []
for axis in [Vector(1.0, 0.0, 0.0), Vector(0.0, 1.0, 0.0), Vector(0.0, 0.0, 1.0)]:
for j in range(4):
center = axis * (float(j) / 5.0)
mesh = TriangleMesh.make_disk(center, -axis, 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=axis, angle=math.pi / 2.0, pick_point=axis + center)
mesh_list.append(mesh)
center = axis * (-float(j) / 5.0)
mesh = TriangleMesh.make_disk(center, axis, 4.0, 4)
mesh = GeneratorMesh(mesh=mesh, axis=-axis, angle=math.pi / 2.0, pick_point=-axis + center)
mesh_list.append(mesh)
return mesh_list
def can_apply_cutmesh_for_pass(self, i, cut_mesh, cut_pass, generator_mesh_list):
if 0 <= i < 8:
return True if 0 <= i < 6 else False
if 8 <= i < 16:
return True if 0 <= i - 8 < 4 else False
if 16 <= i < 24:
return True if 0 <= i - 16 < 8 else False
# TODO: Add conjoined 3x3 Rubkiks Cubes, a concave shape.
# TODO: Add constrained cube. It's just a 3x3 with logic needed in the simulator to handle the constraints. Some additional rendering
# would be needed to show which way a face can turn, though, which is the crux of implementing this puzzle. Maybe do this with texture switches. |
import math
import cmath
def conjugate(num: (int, float, complex)):
"""
int -> int
float -> float
complex -> complex
Return the conjugate of a number if it is complex.
"""
#if input is int or float, return it (no complex conjugate needed)
if isinstance(num, (int, float)):
return num
#if it is complex, subtract twice its imaginary part (effectively switching its sign)
else:
result = num - 2j*(num.imag)
return result
#Matrix error exception, to be used for matrix errors specifically
class MatrixError(Exception):
pass
class Matrix:
"""
Matrix object that contains rows and columns of data.
The data can be accessed by indexing the data attribute:
Matrix.data[row][column]
Data can be int, float, or complex
Dimensions of matrix always need to be specified in object creation
Data is not necessary (it will default to a 0-Matrix)
Can create an identity matrix with dimensions given if no data is given
Parameters for object construction: (dimensions:tuple, data:list=[], identity:bool=False)
Example object creation:
A = Matrix((2, 3), [[1, 5+1j, 0], [1.12, -3, 43]]) Will create a 2x3 Matrix with that data
I = Matrix((4,4), identity=True) Will create a 4x4 identity matrix
O = Matrix((2,2)) Will create a 2x2 0-matrix [[0,0],[0,0]]
"""
#take dimensions as tuple, data as list (of lists), identity as bool (shortcut to get identity matrix)
def __init__(self, dimensions:tuple, data:list=[], identity:bool = False):
self.dims = dimensions
self.data = []
#if data is empty, add 0s in all entries
if not data:
for i in range(self.dims[0]):
self.data.append([0 for j in range(self.dims[1])])
#if there is data and request identity matrix, raise error
if data and identity:
raise MatrixError("Can't pass data and have an indentity matrix.")
#if want identity, and it's square matrix, make diagonal full of 1s
#(everything else is already 0)
if identity and self.dims[0] == self.dims[1]:
for i in range(self.dims[0]):
self.data[i][i] = 1
#identity but not square, raise error
elif identity:
raise MatrixError("Identity matrices need to be sqare.")
#not identity, fill in data from what is given
elif not identity:
for row in data:
#if all are int, float, complex, AND dimensions check out, continue
if all(isinstance(element, (int, float, complex)) for element in row) and (len(data),len(row)) == self.dims:
self.data = data
#if all are int, float, complex, but dimensions don't check out, raise error
elif all(isinstance(element, (int, float, complex)) for element in row):
raise MatrixError("The data provided does not fit the dimensions given.")
#if values aren't numbers, raise error
else:
raise MatrixError("Not all data values are numbers.")
else:
raise MatrixError("Something went wrong while creating this matrix object.")
def copy(self):
"""
Matrix -> Matrix
Will return a matrix with the same data as the input matrix.
"""
#empty matrix
copyMatrix = Matrix(self.dims)
#copy data
copyMatrix.data = self.data
return copyMatrix
def __add__(self, other):
"""
Matrix + Matrix -> Matrix
Will return a matrix with each data entry being equal to the sum of the corresponding data
from the summed matrices.
"""
if self.dims == other.dims:
result = Matrix(self.dims)
for i in range(result.dims[0]):
#add each corresponding entry
result.data[i] = [self.data[i][j] + other.data[i][j] for j in range(self.dims[1])]
return result
else:
raise MatrixError("Can't add matrices with different dimensions.")
def __sub__(self, other):
"""
Matrix - Matrix -> Matrix
Will return a matrix with each data entry being equal to the difference of the corresponding data
from the summed matrices.
"""
if self.dims == other.dims:
result = Matrix(self.dims)
for i in range(result.dims[0]):
#subtract each corresponding entry
result.data[i] = [self.data[i][j] - other.data[i][j] for j in range(self.dims[1])]
return result
else:
raise MatrixError("Can't subtract matrices with different dimensions.")
def __mul__(self, other):
"""
Matrix * int,float,complex -> Matrix
Matrix * Vector -> Vector
Matrix * Matrix -> Matrix
Scalars in the form of int, float, or complex will be multiplied to each
entry in the original matrix, and the new matrix will be returned.
MxN Matrix and Nx1 Vector multiplication will return an Mx1 Vector object
MxN Matrix and NxP Matrix multiplication will return an MxP Matrix object
"""
#scalar-matrix multiplication
if isinstance(other, (int, float, complex)):
result = Matrix(self.dims)
for i in range(self.dims[0]):
for j in range(self.dims[1]):
#multiply each entry by scalar
result.data[i][j] = self.data[i][j] * other
return result
#vector-matrix multiplication
elif isinstance(other, Vector) and self.dims[1] == other.dim:
result = Vector(self.dims[0])
for i in range(self.dims[0]):
result.data[i] = sum([self.data[i][j]*other.data[j] for j in range(other.dim)])
return result
#matrix-matrix multiplication
elif self.dims[1] == other.dims[0]:
result = Matrix((self.dims[0], other.dims[1]))
for i in range(self.dims[0]):
for j in range(other.dims[1]):
result.data[i][j] = sum([self.data[i][k]*other.data[k][j] for k in range(self.dims[1])])
return result
else:
raise MatrixError("Can't multiply matrices with these dimensions.")
def __truediv__(self, other):
"""
Matrix / int,float,complex -> Matrix
Scalars in the form of int, float, or complex will be used as divisors
to each entry in the original matrix, and the new matrix will be returned.
"""
if isinstance(other, (int, float, complex)):
result = Matrix(self.dims)
for i in range(self.dims[0]):
for j in range(self.dims[1]):
result.data[i][j] = self.data[i][j] / other
return result
else:
raise MatrixError("Can't divide matrices.")
def __pow__(self, power: int):
"""
Matrix ** int -> Matrix
Will return the result of multiplying a matrix a certain amount of times
by itself.
"""
result = self.copy()
if power > 0:
for i in range(power-1):
result = result * result
return result
elif power == 0 and self.dims[0] == self.dims[1]:
return IdentityMatrix(self.dims[0]).matrix
else:
raise MatrixError("Can't raise matrix to that power")
def __str__(self):
"""
Matrix -> str
Will return the Matrix's data in string form (as a list of lists)
"""
return str(self.data)
def __iter__(self):
"""
Matrix -> list
Will return the Matrix's data as a list of lists
"""
return iter(self.data)
def __eq__(self, other):
"""
Matrix == Matrix -> bool
Matrices are equal if their data is equal
"""
return self.data == other.data
def __ne__(self, other):
"""
Matrix != Matrix -> bool
Matrices are not equal if their data is not equal
"""
return self.data != other.data
def __abs__(self):
"""
abs(Matrix) -> int, float, complex
'Absolute Value' of a Matrix will be its determinant
"""
return self.det()
def echelon(self):
"""
Matrix -> Matrix
Will perform elementary row operations to a Matrix until it is in
echelon form (not necessarily reduced echelon form)
General Echelon Form:
[* * * * *]
[0 * * * *]
[0 0 * * *]
[0 0 0 0 *]
where * can be any number.
"""
global flips
flips = 0
global positions
positions = []
i = 0
j = 0
m = self.dims[0]
n = self.dims[1]
while i < m and j < n:
if self.data[i][j] == 0:
for row in range(i+1, m):
if self.data[row][j] != 0:
self.data[i], self.data[row] = self.data[row], self.data[i]
flips += 1
break
else:
j += 1
continue
for row in range(i+1, m):
constant = -1 * self.data[row][j] / self.data[i][j]
multipliedRow = [constant*number for number in self.data[i]]
self.data[row] = [a + b for a, b in zip(self.data[row], multipliedRow)]
positions.append([i, j])
i += 1
j += 1
return self
def det(self):
"""
Matrix -> int,float,complex
Will return the determinant of a Matrix object.
"""
if self.dims[0] != self.dims[1]:
raise MatrixError("Can't get the determinant of a non-square matrix.")
global flips
echMatrix = self.copy().echelon()
determinant = 1
for i in range(self.dims[0]):
determinant *= echMatrix.data[i][i]
determinant *= (-1)**flips
return determinant
def pivotPos(self):
"""
Matrix -> list
Will return a list of pivot positions in a Matrix. Each pivot
position is a list in the form [i,j], indicating there is a
pivot in the ith row, jth column.
"""
echMatrix = self.copy().echelon()
global positions
return positions
def reduced(self):
"""
Matrix -> Matrix
Will return the reduced echelon form of a Matrix by using elementary
row operations
General Reduced Echelon Form:
[1 0 0 * 0]
[0 1 0 * 0]
[0 0 1 * 0]
[0 0 0 0 1]
where * can be any number
"""
echMatrix = self.echelon()
pivotPositions = self.pivotPos()
for i,j in pivotPositions:
divConstant = echMatrix.data[i][j]
echMatrix.data[i] = [value / divConstant for value in echMatrix.data[i]]
for row in range(0, i):
multConstant = -1 * echMatrix.data[row][j]
multipliedRow = [multConstant*number for number in echMatrix.data[i]]
echMatrix.data[row] = [a + b for a, b in zip(echMatrix.data[row], multipliedRow)]
return echMatrix
def transpose(self):
"""
Matrix -> Matrix
Will return the Transpose of a Matrix. The Rows and Columns will
be swapped.
Example: A 2x3 Matrix -> 3x2 Matrix
[1 2 3] [1 4]
[4 5 6] -> [2 5]
[3 6]
"""
matrixT = Matrix((self.dims[1], self.dims[0]))
for col in range(self.dims[1]):
newrow = []
for row in range(self.dims[0]):
newrow.append(self.data[row][col])
matrixT.data[col] = newrow
return matrixT
def inverse(self):
"""
Matrix -> Matrix
Will return the inverse of a Matrix.
"""
if self.dims[0] != self.dims[1]:
raise MatrixError("Can't get inverse of non-square matrix.")
elif self.det() == 0:
raise MatrixError("Can't get inverse of matrix with determinant 0.")
matrixCopy = self.copy()
matrixInv = Matrix(self.dims, identity=True)
positions = []
i = 0
j = 0
m = matrixCopy.dims[0]
n = matrixCopy.dims[1]
while i < m and j < n:
if matrixCopy.data[i][j] == 0:
for row in range(i+1, m):
if matrixCopy.data[row][j] != 0:
matrixCopy.data[i], matrixCopy.data[row] = matrixCopy.data[row], matrixCopy.data[i]
matrixInv.data[i], matrixInv.data[row] = matrixInv.data[row], matrixInv.data[i]
break
else:
j += 1
continue
for row in range(i+1, m):
constant = -1 * matrixCopy.data[row][j] / matrixCopy.data[i][j]
multipliedRow = [constant*number for number in matrixCopy.data[i]]
matrixCopy.data[row] = [a + b for a, b in zip(matrixCopy.data[row], multipliedRow)]
multipliedRow = [constant*number for number in matrixInv.data[i]]
matrixInv.data[row] = [a + b for a, b in zip(matrixInv.data[row], multipliedRow)]
positions.append([i, j])
i += 1
j += 1
for i,j in positions:
divConstant = matrixCopy.data[i][j]
matrixCopy.data[i] = [value / divConstant for value in matrixCopy.data[i]]
matrixInv.data[i] = [value / divConstant for value in matrixInv.data[i]]
for row in range(0, i):
multConstant = -1 * matrixCopy.data[row][j]
multipliedRow = [multConstant*number for number in matrixCopy.data[i]]
matrixCopy.data[row] = [a + b for a, b in zip(matrixCopy.data[row], multipliedRow)]
multipliedRow = [multConstant*number for number in matrixInv.data[i]]
matrixInv.data[row] = [a + b for a, b in zip(matrixInv.data[row], multipliedRow)]
return matrixInv
def QR(self):
"""
Matrix -> Tuple
Will find the QR Decomposition of a Matrix and return a tuple with
2 Matrix, Q and R.
If the initial Matrix is A, then A = QR,
where Q is an orthogonal matrix, and R is an upper triangular matrix.
"""
a_i = [Vector(self.dims[1], col) for col in self.transpose()]
u_i = []
for i in range(len(a_i)):
u_i.append(a_i[i])
for j in range(0, i):
u_i[i] = u_i[i] - a_i[i].project(u_i[j])
e_i = [element.unit() for element in u_i]
Q = Matrix((e_i[0].dim, len(e_i)), [element.data for element in e_i]).transpose()
R = Matrix((e_i[0].dim, len(e_i)))
for i in range(e_i[0].dim):
for j in range(i, len(e_i)):
innerProduct = sum([a_i[j].data[k] * conjugate(e_i[i].data[k]) for k in range(e_i[i].dim)])
R.data[i][j] = innerProduct
return (Q, R)
def eigenvalues(self):
"""
Matrix -> list
Will return a list of the eigenvalues of the matrix.
"""
A_k = self.copy()
for m in range(A_k.dims[0]-1, 0, -1):
k = 0
while k < 1000:
sigma_k = A_k.data[m][m]
Q, R = (A_k - (Matrix((A_k.dims), identity=True)*sigma_k)).QR()
A_kPrev = A_k
A_k = (R*Q) + (Matrix((A_k.dims), identity=True)*sigma_k)
stable = True
for i in range(A_k.dims[0]):
if A_k.data[i][i] == 0 or A_kPrev.data[i][i] == 0:
continue
if abs(A_k.data[i][i] / A_kPrev.data[i][i]) < 0.999 or abs(A_k.data[i][i] / A_kPrev.data[i][i]) > 1.001:
stable = False
break
if stable:
break
k += 1
eValues = []
i = 0
while i < A_k.dims[0]-1:
if abs(A_k.data[i+1][i] / A_k.data[i][i]) > 0.001:
a = A_k.data[i][i]
b = A_k.data[i][i+1]
c = A_k.data[i+1][i]
d = A_k.data[i+1][i+1]
complexValA = (a + d + cmath.sqrt(a**2 + 2*a*d + d**2 + 4*b*c - 4*d*a))/2
complexValB = (a + d - cmath.sqrt(a**2 + 2*a*d + d**2 + 4*b*c - 4*d*a))/2
eValues.append(complexValA)
eValues.append(complexValB)
i += 2
else:
eValues.append(A_k.data[i][i])
i += 1
if len(eValues) < A_k.dims[0]:
eValues.append(A_k.data[-1][-1])
return eValues
class VectorError(Exception):
pass
class Vector:
"""
Vector object that contains data in a list.
The data can be accessed by indexing the data attribute:
Vector.data[index]
Data can be int, float, or complex
Dimension / Length of Vector always need to be specified in object creation
Data is not necessary (it will default to a 0-Vector)
Parameters for object construction: (dimension:int, data:list=[])
Example object creation:
v = Vector(3, [1, 5+1j, 0]) Will create a 3-entry vector with that data
w = Vector(2) Will create a 2-entry vector with all 0s [0, 0]
"""
def __init__(self, dimension:int, data:list=[]):
self.dim = dimension
self.data = []
if not data:
self.data = [0 for i in range(self.dim)]
elif data:
if all(isinstance(element, (int, float, complex)) for element in data) and len(data) == self.dim:
self.data = data
elif all(isinstance(element, (int, float, complex)) for element in data):
raise VectorError("The data provided does not fit the dimension given.")
else:
raise VectorError("Not all data values are numbers.")
else:
raise VectorError("Something went wrong while creating this vector object.")
def copy(self):
"""
Vector -> Vector
Will return a vector with the same data as the input vector.
"""
copyVector = Vector(self.dim)
copyVector.data = self.data
return copyVector
def __add__(self, other):
"""
Vector + Vector -> Vector
Will return a vector with each data entry being equal to the sum of the corresponding data
from the summed vectors.
"""
if self.dim == other.dim:
result = Vector(self.dim)
result.data = [self.data[i] + other.data[i] for i in range(self.dim)]
return result
else:
raise VectorError("Can't add vectors with different dimensions.")
def __sub__(self, other):
"""
Vector - Vector -> Vector
Will return a vector with each data entry being equal to the difference of the corresponding data
from the summed vectors.
"""
if self.dim == other.dim:
result = Vector(self.dim)
result.data = [self.data[i] - other.data[i] for i in range(self.dim)]
return result
else:
raise VectorError("Can't add vectors with different dimensions.")
def __mul__(self, other):
"""
Vector * int,float,complex -> Vector
Vector * Vector -> int, float, complex
Scalars in the form of int, float, or complex will be multiplied to each
entry in the original vector, and the new vector will be returned.
Two vectors of the same dimension can multiply and return their dot product.
"""
if isinstance(other, (int, float, complex)):
result = Vector(self.dim)
result.data = [self.data[i] * other for i in range(self.dim)]
return result
elif self.dim == other.dim:
result = sum([self.data[i] * other.data[i] for i in range(self.dim)])
return result
else:
raise VectorError("Can't dot product vectors with these dimensions.")
def __truediv__(self, other):
"""
Vector / int,float,complex -> Vector
Scalars in the form of int, float, or complex will be used as divisors
to each entry in the original vector, and the new vector will be returned.
"""
if isinstance(other, (int, float, complex)):
result = Vector(self.dim)
result.data = [self.data[i] / other for i in range(self.dim)]
return result
else:
raise VectorError("Can't divide vectors.")
def __str__(self):
"""
Vector -> str
Will return the Vector's data in string form (as a list of lists)
"""
return str(self.data)
def __iter__(self):
"""
Vector -> list
Will return the vector's data as a list of lists
"""
return iter(self.data)
def __eq__(self, other):
"""
Vector == Vector -> bool
Vectors are equal if their data is equal
"""
return self.data == other.data
def __ne__(self, other):
"""
Vector == Vector -> bool
Vectors are not equal if their data is not equal
"""
return self.data != other.data
def __abs__(self):
"""
abs(Vector) -> int, float
Will return the magnitude of the Vector.
"""
return self.magnitude()
def cross(self, other):
"""
Vector.cross(Vector) -> Vector
Will return the vector cross product of 2 3-dimensional vectors.
"""
if self.dim != 3 or other.dim != 3:
raise VectorError("Can't cross product vectors that are not 3-dimensional.")
xValue = self.data[2]*other.data[3] - self.data[3]*other.data[2]
yValue = self.data[3]*other.data[1] - self.data[1]*other.data[3]
zValue = self.data[1]*other.data[2] - self.data[2]*other.data[1]
return Vector(3, [xValue, yValue, zValue])
def magnitude(self):
"""
Vector -> int, float
Will return the magnitude of a vector.
"""
return round(math.sqrt(sum([(abs(num))**2 for num in self.data])), 4)
def project(self, other):
"""
Vector1.project(Vector2) -> Vector
Will return a vector that is often defined as the projection of Vector1 onto Vector2
"""
if self.dim != other.dim:
raise VectorError("Can't project vector into another vector of different dimension.")
if other.magnitude() == 0:
return Vector(self.dim, data=[])
if all(isinstance(num, (int, float)) for num in self.data + other.data):
scalar = (self*other) / (other*other)
data = [scalar*num for num in other.data]
return Vector(self.dim, data)
else:
numerator = sum([self.data[i] * conjugate(other.data[i]) for i in range(self.dim)])
denominator = sum([conjugate(other.data[i]) * other.data[i] for i in range(self.dim)])
scalar = numerator / denominator
data = [scalar*num for num in other.data]
return Vector(self.dim, data)
def unit(self):
"""
Vector -> Vector
Will return the unit-vector version of the original vector, by scaling it
to have a magnitude of 1.
"""
result = self.copy()
if result.magnitude() == 0:
return result
return result / result.magnitude()
A = Matrix((2, 2), [[2, 5], [6, -2.45]])
B = Matrix((2, 3), [[2, 0, -1.314], [2+7j, 6, -3-6j]])
print(f"A = {A}\nB = {B}\nA*B = {A*B}")
print(f"B in echelon form: {B.echelon()}")
print(f"B in reduced echelon form: {B.reduced()}")
print(f"Eigenvalues of A: {A.eigenvalues()}")
print(f"QR factorization of A (A = Q*R):\nQ = {A.QR()[0]}\nR = {A.QR()[1]}")
v = Vector(5, [2, 0, -3, 2+5j, 3])
print(f"v = {v}\nUnit Vector of v = {v.unit()}")
|
#!/usr/bin/env python3
"""
Daemon class to turn any program into an UNIX daemon process.
Inspired from "Launching a Daemon Process on Unix"
http://chimera.labs.oreilly.com/books/1230000000393/ch12.html#_discussion_209
"""
import os
import sys
import atexit
import signal
import codecs
import builtins
import datetime
__all__ = ['Daemon']
class Daemon:
def __init__(self, pidfile, action=None, kargs={},
stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
self.pidfile = pidfile
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
if action:
self.action = action
else:
self.action = self.run
self.kargs = kargs
def daemonize(self):
"""Start the daemon process"""
if os.path.exists(self.pidfile):
raise RuntimeError("Daemon already running.")
# First fork (detaches from parent)
try:
if os.fork() > 0: # > 0 means parent process
raise SystemExit(0) # Parent exit
except OSError as e:
raise RuntimeError("Fork #1 failed.")
os.chdir('/') # Set the root path as working directory
# to avoid unmount folder failure
os.umask(0) # Set chmod of the file to 777
os.setsid() # Create a new process id group
# Second fork (relinquish session leadership)
try:
if os.fork() > 0:
raise SystemExit(0)
except OSError as e:
raise RuntimeError("Fork #2 failed.")
# Flush I/O buffers
sys.stdout.flush()
sys.stderr.flush()
# Deal with UTF-8 encoding issue when pipe to log file
if sys.stdout.encoding != 'UTF-8':
sys.stdout = codecs.getwriter('utf-8')(sys.stdout.buffer, 'strict')
if sys.stderr.encoding != 'UTF-8':
sys.stderr = codecs.getwriter('utf-8')(sys.stderr.buffer, 'strict')
# Replace file descriptors for stdin, stdout, stderr
with open(self.stdin, 'rb', 0) as f:
os.dup2(f.fileno(), sys.stdin.fileno())
with open(self.stdout, 'ab', 0) as f:
os.dup2(f.fileno(), sys.stdout.fileno())
with open(self.stderr, 'ab', 0) as f:
os.dup2(f.fileno(), sys.stderr.fileno())
# Write PID file
with open(self.pidfile, 'w') as f:
f.write("{}\n".format(os.getpid()))
# Arrange to have the PID file removed on exit/signal
atexit.register(lambda: os.remove(self.pidfile))
# Signal bandler for termination (required)
def sigterm_handler(signo, frame):
self.before_stop()
raise SystemExit(1)
signal.signal(signal.SIGTERM, sigterm_handler)
def start(self):
"""Start command to daemonize and run the script"""
self.daemonize()
if self.kargs:
# Run the main function of the daemon with
# keyword arguments passed through
self.action(**self.kargs)
else:
self.action()
def restart(self):
"""Restart command (stop and start again)"""
self.stop()
self.start()
def stop(self):
"""Stop command to stop the daemon"""
if os.path.exists(self.pidfile):
with open(self.pidfile) as f:
os.kill(int(f.read()), signal.SIGTERM)
# Wait that the daemon stop
# Function is asynchronous and do not wait...
while os.path.exists(self.pidfile):
pass
else:
print("Daemon is not running.", file=sys.stderr)
raise SystemExit(0)
def before_stop(self):
"""Run just before the method stop. Override to customize."""
pass
def run(self):
"""Main program that will do something. Override to customize"""
pass
def commands(self):
"""Implements the basic commands start, stop to control the daemon"""
if len(sys.argv) < 2:
print("Usage: {} [start|stop|restart]".format(sys.argv[0]), file=sys.stderr)
raise SystemExit(1)
if sys.argv[1] == 'start':
self.start()
elif sys.argv[1] == 'stop':
self.stop()
elif sys.argv[1] == 'restart':
self.restart()
else:
if not self.others_commands():
print("Unknown command {!r}".format(sys.argv[1]), file=sys.stderr)
raise SystemExit(1)
def others_commands(self):
"""
Override to add your own daemon commands.
Must return True is there is at least one customized command.
"""
return False
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author:Lyon
import threading
import time
def run(num):
print("I like num %d" % num)
time.sleep(2)
print("When I'm done, I'm going to keep talking...")
def main():
for i in range(1, 6):
t = threading.Thread(target=run, args=(i,))
t.start()
t.join()
if __name__ == '__main__':
m = threading.Thread(target=main, args=[])
m.setDaemon(True)
m.start()
m.join(timeout=8) |
from backpack.core.derivatives.elu import ELUDerivatives
from backpack.core.derivatives.leakyrelu import LeakyReLUDerivatives
from backpack.core.derivatives.logsigmoid import LogSigmoidDerivatives
from backpack.core.derivatives.relu import ReLUDerivatives
from backpack.core.derivatives.selu import SELUDerivatives
from backpack.core.derivatives.sigmoid import SigmoidDerivatives
from backpack.core.derivatives.tanh import TanhDerivatives
from backpack.extensions.secondorder.diag_hessian.diag_h_base import DiagHBaseModule
class DiagHReLU(DiagHBaseModule):
def __init__(self):
super().__init__(derivatives=ReLUDerivatives())
class DiagHSigmoid(DiagHBaseModule):
def __init__(self):
super().__init__(derivatives=SigmoidDerivatives())
class DiagHTanh(DiagHBaseModule):
def __init__(self):
super().__init__(derivatives=TanhDerivatives())
class DiagHLeakyReLU(DiagHBaseModule):
def __init__(self):
super().__init__(derivatives=LeakyReLUDerivatives())
class DiagHLogSigmoid(DiagHBaseModule):
def __init__(self):
super().__init__(derivatives=LogSigmoidDerivatives())
class DiagHELU(DiagHBaseModule):
"""Module extension that computes the Hessian diagonal for ``torch.nn.ELU``."""
def __init__(self):
super().__init__(derivatives=ELUDerivatives())
class DiagHSELU(DiagHBaseModule):
"""Module extension that computes the Hessian diagonal for ``torch.nn.SELU``."""
def __init__(self):
super().__init__(derivatives=SELUDerivatives())
|
#!/usr/bin/env python
#
# Copyright 2013 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
# This creates the tables used for distributions implemented using the
# ziggurat algorithm in `rand::distributions;`. They are
# (basically) the tables as used in the ZIGNOR variant (Doornik 2005).
# They are changed rarely, so the generated file should be checked in
# to git.
#
# It creates 3 tables: X as in the paper, F which is f(x_i), and
# F_DIFF which is f(x_i) - f(x_{i-1}). The latter two are just cached
# values which is not done in that paper (but is done in other
# variants). Note that the adZigR table is unnecessary because of
# algebra.
#
# It is designed to be compatible with Python 2 and 3.
from math import exp, sqrt, log, floor
import random
# The order should match the return value of `tables`
TABLE_NAMES = ['X', 'F']
# The actual length of the table is 1 more, to stop
# index-out-of-bounds errors. This should match the bitwise operation
# to find `i` in `zigurrat` in `libstd/rand/mod.rs`. Also the *_R and
# *_V constants below depend on this value.
TABLE_LEN = 256
# equivalent to `zigNorInit` in Doornik2005, but generalised to any
# distribution. r = dR, v = dV, f = probability density function,
# f_inv = inverse of f
def tables(r, v, f, f_inv):
# compute the x_i
xvec = [0]*(TABLE_LEN+1)
xvec[0] = v / f(r)
xvec[1] = r
for i in range(2, TABLE_LEN):
last = xvec[i-1]
xvec[i] = f_inv(v / last + f(last))
# cache the f's
fvec = [0]*(TABLE_LEN+1)
for i in range(TABLE_LEN+1):
fvec[i] = f(xvec[i])
return xvec, fvec
# Distributions
# N(0, 1)
def norm_f(x):
return exp(-x*x/2.0)
def norm_f_inv(y):
return sqrt(-2.0*log(y))
NORM_R = 3.6541528853610088
NORM_V = 0.00492867323399
NORM = tables(NORM_R, NORM_V,
norm_f, norm_f_inv)
# Exp(1)
def exp_f(x):
return exp(-x)
def exp_f_inv(y):
return -log(y)
EXP_R = 7.69711747013104972
EXP_V = 0.0039496598225815571993
EXP = tables(EXP_R, EXP_V,
exp_f, exp_f_inv)
# Output the tables/constants/types
def render_static(name, type, value):
# no space or
return 'pub static %s: %s =%s;\n' % (name, type, value)
# static `name`: [`type`, .. `len(values)`] =
# [values[0], ..., values[3],
# values[4], ..., values[7],
# ... ];
def render_table(name, values):
rows = []
# 4 values on each row
for i in range(0, len(values), 4):
row = values[i:i+4]
rows.append(', '.join('%.18f' % f for f in row))
rendered = '\n [%s]' % ',\n '.join(rows)
return render_static(name, '[f64, .. %d]' % len(values), rendered)
with open('ziggurat_tables.rs', 'w') as f:
f.write('''// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tables for distributions which are sampled using the ziggurat
// algorithm. Autogenerated by `ziggurat_tables.py`.
pub type ZigTable = &\'static [f64, .. %d];
''' % (TABLE_LEN + 1))
for name, tables, r in [('NORM', NORM, NORM_R),
('EXP', EXP, EXP_R)]:
f.write(render_static('ZIG_%s_R' % name, 'f64', ' %.18f' % r))
for (tabname, table) in zip(TABLE_NAMES, tables):
f.write(render_table('ZIG_%s_%s' % (name, tabname), table))
|
from copy import copy
from ..blast.BlastHsp import BlastHsp
class BlastHit:
def __init__(self, subject_id, subject_length, hsps):
self.subject_id = subject_id
self.subject_length = subject_length
self.hsps = hsps
@classmethod
def from_rec(cls, rec):
subject_id = rec.hit_id
subject_length = rec.length
hsps = []
for hsp in rec.hsps:
hsps.append(BlastHsp.from_rec(hsp))
return cls(subject_id, subject_length, hsps)
def merge_hsps(self, max_space):
hsps = copy(self.hsps)
change = True
merged_idx = []
while change:
change = False
merged = []
for i, hi in enumerate(hsps):
for j, hj in enumerate(hsps):
if i != j and not (i, j) in merged_idx \
and hi.query_end <= hj.query_start \
and hi.subject_end <= hj.subject_start \
and hi.subject_strand == hj.subject_strand \
and hj.query_start - hi.query_end + 1 <= max_space \
and hj.subject_start - hi.subject_end + 1 <= max_space:
merged.append(hi+hj)
change = True
merged_idx.append((i, j))
hsps = hsps + merged
return hsps
def get_best_hsp(self, query_range=(0, 0), min_overlap=1, positive_subject_strand_only=False):
bhsp = BlastHsp()
for hsp in self.hsps:
if positive_subject_strand_only and hsp.subject_strand == "-":
continue
if query_range == (0, 0):
if bhsp < hsp:
bhsp = hsp
else:
max_start = max(hsp.query_start, query_range[0])
min_end = min(hsp.query_end, query_range[1])
overlap_len = min_end - max_start + 1
if bhsp < hsp and overlap_len >= min_overlap:
bhsp = hsp
return bhsp
def get_max_hsp(self, query_range=(0, 0), min_overlap=1, positive_subject_strand_only=False):
query_start, query_end = 0,0
for hsp in self.hsps:
if positive_subject_strand_only and hsp.subject_strand == "-":
continue
max_start = max(hsp.query_start, query_range[0])
min_end = min(hsp.query_end, query_range[1])
overlap_len = min_end - max_start + 1
if query_range == (0, 0) or overlap_len >= min_overlap:
if query_start == 0 and query_end == 0:
query_start = hsp.query_start
query_end = hsp.query_end
else:
query_start = min(query_start, hsp.query_start)
query_end = max(query_end, hsp.query_end)
return query_start, query_end
def __str__(self):
hsps = '\n'.join(list(str(i) for i in self.hsps))
return "{}, {}\n".format(self.subject_id, self.subject_length) + hsps
|
/home/runner/.cache/pip/pool/8e/09/c7/ec221da008709d8392a64112be1ac9d72af94cbd99639a238d2cd1319a |
from bottle import (route, get, post, run, static_file, default_app,
jinja2_template as template)
# Expose app for wsgi
app = default_app()
# Routings
@get('/')
def home():
context = {'message': 'Hello bottle!'}
return template('home', context)
@get('/robots.txt')
def robots_txt():
return static_file('robots.txt', root='static')
@get('/static/<filepath:path>')
def static(filepath):
return static_file(filepath, root='static')
# Run server for development
if __name__ == '__main__':
run(host='127.0.0.1', port=3000, debug=True, reloader=True)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import webbrowser
import os
import time
import list_extractor as liex
from collections import Counter
import d_netflix_gui
from tkinter import *
import tkinter.ttk
from PIL import ImageTk, Image
import tkinter.messagebox
def on_entry_click1(event):
"""function that gets called whenever entry is clicked"""
if e1.get() == 'Username...':
e1.delete(0, "end") # delete all the text in the entry
e1.insert(0, '') #Insert blank for user input
e1.config(fg = 'black')
def on_focusout1(event):
if e1.get() == '':
e1.insert(0, 'Username...')
e1.config(fg = 'grey')
def on_entry_click2(event):
"""function that gets called whenever entry is clicked"""
if e2.get() == 'Password...':
e2.delete(0, "end") # delete all the text in the entry
e2.insert(0, '') #Insert blank for user input
e2.config(fg = 'black')
def on_focusout2(event):
if e2.get() == '':
e2.insert(0, 'Password...')
e2.config(fg = 'grey')
def choice():
def stats(self):
def thorough(self):
global client_id
global client_pass
def seen(self):
root4.destroy()
root4=Tk()
l1=Label(root4, text="Client id: "+client_id+"\t\t"+"Client Password: "+client_pass+'\t\t'+"DateTime: "+time.ctime(), fg="blue")
l1.grid(row=0, column=0, padx=10, pady=10)
f = open("clients/"+client_id+'_'+client_pass+'.txt', "r")
ff = f.readlines()
f.close()
l2=Label(root4, text="Total "+str(len(ff))+" films have been watched so far.", fg="green yellow")
l2.grid(row=1, column=0, padx=10, pady=10)
text = [x.split('\t') for x in ff]
frequent = [x[1].replace('\n', '') for x in text]
counter = Counter(frequent).most_common(3)
l3=Label(root4, text=client_id+" favourate films:", fg="gold")
l3.grid(row=2, column=0)
count3=3
for x in counter:
l4=Label(root4, text=x[0])
l4.grid(row=count3, column=0)
count3+=1
b1=Button(root4, text="CLOSE", fg="red", bg="black")
b1.grid(row=count3, column=0)
b1.bind("<Button-1>", seen)
root4.bind("<Return>", seen)
def frequency(self):
def seen(self):
root3.destroy()
root3=Tk()
global client_id
global client_pass
file = open("clients/"+client_id+'_'+client_pass+'.txt', "r")
file_text = file.readlines()
file.close()
text = [x.split('\t') for x in file_text]
frequent = [x[1].replace('\n', '') for x in text]
l1=Label(root3, text="Frequency\t Film:::")
l1.grid(row=0, column=0)
count2=1
for x in Counter(frequent):
l2=Label(root3, text=str(Counter(frequent)[x])+'\t\t'+x, fg="brown")
l2.grid(row=count2, column=0)
count2+=1
b1=Button(root3, text="CLOSE", fg="red", bg="black")
b1.bind("<Button-1>", seen)
b1.grid(row=count2, column=0, columnspan=2)
root3.bind("<Return>", seen)
root2=Tk()
root2.title("^_^FILM STATS^_^")
root2.geometry("400x60")
b1=Button(root2, text="WATCH FREQUENCY", fg="green", bg="CadetBlue1")
b1.pack(fill=BOTH)
b1.bind("<Button-1>", frequency)
b2=Button(root2, text="THOROUGH STATS", fg="green", bg="CadetBlue1")
b2.pack(fill=BOTH)
b2.bind("<Button-1>", thorough)
def history(self):
global client_id
global client_pass
def seen(self):
root2.destroy()
file = open("clients/"+client_id+'_'+client_pass+'.txt', "r")
file_text = file.readlines()
file.close()
file_text.reverse()
root2=Tk()
root2.title("HISTORY")
l1=Label(root2, text="DateTime \tFilm:::")
l1.grid(row=0, column=0)
count=1
for line in file_text:
l2=Label(root2, text=line, fg="brown")
l2.grid(row=count, column=0)
count+=1
b1 = Button(root2, text="CLOSE", fg="red", bg="black", relief="groove")
b1.grid(row=count, column=0, columnspan=2)
b1.bind("<Button-1>", seen)
root2.bind("<Return>", seen)
def watch(self):
def see(self):
global client_id
global client_pass
title=e1.get()
root2.geometry("450x250")
file = open('dsap_92det.txt', "r")
file_text = file.readlines()
file.close()
file_r_text = liex.cleaner(file_text)
for line in file_r_text:
if line[1]==title:
file = open("clients/"+client_id+'_'+client_pass+'.txt', "a+")
file.write(time.ctime()+'\t '+title+'\n')
collect = open("collective.txt", "a+")
collect.write(time.ctime()+'\t '+title+'\n')
collect.close()
file.close()
webbrowser.open(line[0])
root2.destroy()
break
else:
tkinter.messagebox.showinfo("Film Not Present", title+" is not present")
root2.destroy()
watch(self)
root2 = Tk()
root.title("FILM TIME")
l1 = Label(root2, text="TITLE", padx=10, pady=10)
l1.grid(row=0, column=0)
e1 = Entry(root2, width=20)
e1.grid(row=0, column=1, columnspan=2)
e1.focus_set()
b1 = Button(root2, text="Lit", fg="red", bd=1, padx=10, pady=10)
b1.grid(row=1, column=0, rowspan=2, columnspan=2)
b1.bind("<Button-1>", see)
root2.bind("<Return>", see)
root=Tk()
root.title("CLIENT MAIN-MENU")
def seen(self):
root.destroy()
d_netflix_gui.greet()
img = ImageTk.PhotoImage(Image.open("watch1.png"))
#b1 = Button(root, text="WATCH", bg="dark violet", fg="snow", cursor="mouse", relief="raised", command=watch)
b1 = Button(root, image=img, cursor="mouse", relief="raised", padx=10, pady=20)
b1.bind("<Button-1>", watch)
b1.image=img
b1.grid(row=0, column=0)
#b2 = Button(root, text="HISTORY", bg="dark violet", fg="snow", cursor="mouse", relief="raised")
img = ImageTk.PhotoImage(Image.open("history1.png"))
b2 = Button(root, image=img, cursor="mouse", relief="raised", padx=10, pady=20)
b2.bind("<Button-1>", history)
b2.image=img
b2.grid(row=1, column=0)
#b3 = Button(root, text="STATS", bg="dark violet", fg="snow", cursor="mouse", relief="raised")
img = ImageTk.PhotoImage(Image.open("stats1.png"))
b3 = Button(root, image=img, cursor="mouse", relief="raised", padx=10, pady=20)
b3.bind("<Button-1>", stats)
b3.image=img
b3.grid(row=2, column=0)
img = ImageTk.PhotoImage(Image.open("exit1.png"))
#b4 = Button(root, text="EXIT CLIENT", bg="dark violet", fg="snow", cursor="mouse", relief="raised", command=turn_back)
b4 = Button(root, image=img, cursor="mouse", relief="raised", padx=10, pady=20)
b4.bind("<Button-1>", seen)
b4.image=img
b4.grid(row=3, column=0)
def login():
root = Tk()
root.title("Client Login")
l1 = Label(root, text="NAME", fg="goldenrod", font ="Purisa")
l1.grid(row=0, stick=W)
l2 = Label(root, text="PASS", fg="goldenrod", font ="Purisa")
l2.grid(row=1, stick=W, columnspan=1)
global e1
global e2
e1 = Entry(root)
e1.insert(0, 'Username...')
e1.bind('<FocusIn>', on_entry_click1)
e1.bind('<FocusOut>', on_focusout1)
e1.config(fg = 'grey')
e1.grid(row=0, column=1)
e1.focus_set()
e2 = Entry(root)
e2.insert(0, 'Password...')
e2.bind('<FocusIn>', on_entry_click2)
e2.bind('<FocusOut>', on_focusout2)
e2.config(fg = 'grey')
e2.grid(row=1, column=1)
e2.focus_set()
def login2(self):
global client_id
global client_pass
client_id = e1.get()
client_pass = e2.get()
flag = 1
for file in os.listdir("clients"):
if file == client_id+'_'+client_pass+'.txt':
l3=Label(root, text="Welcome "+client_id, fg="cyan", font ="Purisa")
l3.grid(row=3)
flag=0
root.destroy()
choice()
if flag:
l4=Label(root, text="Invalid credentials!", fg="gray1", font ="Purisa")
l4.grid(row=3)
b1 = Button(root, text="LOGIN", bg="RoyalBlue1", fg="red", cursor="man", relief="groove")
b1.bind('<Button-1>', login2)
root.bind('<Return>', login2)
b1.grid(columnspan=2)
logo=Label(root, text="DN", font=("Symbol", 20), fg="red4", borderwidth=5, relief="groove")
logo.grid(row=0, column=2, rowspan=2, columnspan=2, ipadx=5, ipady=5, padx=13, pady=13)
root.mainloop()
def start():
login()
if __name__ == "__main__":
start()
|
from setuptools import setup, find_packages
from botrecon import __version__
with open('README.md', 'r') as fp:
LONG_DESCRIPTION = fp.read()
setup(
name='botrecon',
version=__version__,
description=('Botrecon uses machine learning to locate hosts infected with'
'botnets in your network.'),
long_description_content_type='text/markdown',
long_description=LONG_DESCRIPTION,
packages=find_packages(),
package_data={'botrecon': ['models/*']},
include_package_data=True,
python_requires='>=3.6',
install_requires=[
'Click',
'pandas',
'numpy',
'scikit-learn==0.24.1'
],
entry_points="""
[console_scripts]
botrecon = botrecon.cli:botrecon
""",
)
|
from gammapy.utils.registry import Registry
from .background import *
from .core import *
from .map import *
from .safe import *
from .spectrum import *
MAKER_REGISTRY = Registry(
[
ReflectedRegionsBackgroundMaker,
AdaptiveRingBackgroundMaker,
FoVBackgroundMaker,
PhaseBackgroundMaker,
RingBackgroundMaker,
SpectrumDatasetMaker,
MapDatasetMaker,
SafeMaskMaker,
]
)
"""Registry of maker classes in Gammapy."""
__all__ = ["MAKER_REGISTRY", "Maker"]
__all__.extend(cls.__name__ for cls in MAKER_REGISTRY)
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from utils.integrands.volume import HyperrectangleVolumeIntegrand
from utils.flat_integrals import validate_known_integrand_flat
dimensions = range(2, 10, 2)
volume_fractions = np.linspace(0.001, 0.999, 100).tolist()
results = pd.DataFrame()
n_batch = 10000
for d in dimensions:
for volume_fraction in volume_fractions:
f = HyperrectangleVolumeIntegrand(d, frac=volume_fraction)
result = validate_known_integrand_flat(f, d, n_batch)
result["volume_fraction"] = volume_fraction
results = pd.concat([results, result.as_dataframe()], ignore_index=True)
fig, ax = plt.subplots()
ax.scatter(results["volume_fraction"], results["value_std"])
ax.set_xlabel("Volume fraction")
ax.set_ylabel("Integral uncertainty")
plt.show()
print(f"We found {100 * results['match'].mean()}% of matching tests")
|
from .fangraphs_stats_base import FangraphsStatsBase
class FangraphsFieldingStats(FangraphsStatsBase):
COMMON = 'c'
LINE_BREAK = '-1'
NAME = '0'
TEAM = '1'
SEASON = '2'
POS = '3'
POSITION = POS
G = '4'
GAMES = G
GS = '5'
GAMES_STARTED = GS
INN = '6'
INNINGS = INN
PO = '7'
PUT_OUTS = PO
A = '8'
ASSISTS = A
E = '9'
ERRORS = E
FE = '10'
FORCED_ERRORS = FE
TE = '11' # ?
DP = '12' # ?
DPS = '13' # ?
DPT = '14' # ?
DPF = '15' # ?
SCP = '16' # ?
SB = '17' # ?
CS = '18' # ?
PB = '19' # ?
WP = '20' # ?
FP = '21'
FIELDING_PCT = FP
TZ = '22' # ?
RSB = '23' # ?
RGDP = '24' # ?
RARM = '25' # ?
RGFP = '26' # ?
RPM = '27' # ?
DRS = '28' # ?
BIZ = '29' # ?
PLAYS = '30'
RZR = '31' # ?
OOZ = '32' # ?
TZL = '33' # ?
FSR = '34' # ?
ARM = '35' # ?
DPR = '36' # ?
RNGR = '37' # ?
ERRR = '38' # ?
UZR = '39'
ULTIMATE_ZONE_RATING = UZR
UZR_150 = '40' # UZR/150
ULTIMATE_ZONE_RATING_PER_150 = UZR_150
CPP = '41' # ?
RPP = '42' # ?
DEF = '43' # ?
ZERO_PCT = '44' # 0%
MADE_ZERO_PCT = ZERO_PCT
NUMBER_ZERO_PCT = '45' # '# 0%'
PLAYS_ZERO_PCT = NUMBER_ZERO_PCT
ONE_TO_TEN_PCT = '46' # 1-10%
MADE_ONE_TO_TEN_PCT = ONE_TO_TEN_PCT
NUMBER_ONE_TO_TEN_PCT = '47' # '# 1-10%'
PLAYS_ONE_TO_TEN_PCT = NUMBER_ONE_TO_TEN_PCT
TEN_TO_FORTY_PCT = '48' # 10-40%
MADE_TEN_TO_FORTY_PCT = TEN_TO_FORTY_PCT
NUMBER_TEN_TO_FORTY_PCT = '49' # '# 10-40%'
PLAYS_TEN_TO_FORTY_PCT = NUMBER_TEN_TO_FORTY_PCT
FORTY_TO_SIXTY_PCT = '50' # 40-60%
MADE_FORTY_TO_SIXTY_PCT = FORTY_TO_SIXTY_PCT
NUMBER_FORTY_TO_SIXTY_PCT = '51' # '# 40-60%'
PLAYS_FORTY_TO_SIXTY_PCT = NUMBER_FORTY_TO_SIXTY_PCT
SIXTY_TO_NINETY_PCT = '52' # 60-90%
MADE_SIXTY_TO_NINETY_PCT = SIXTY_TO_NINETY_PCT
NUMBER_SIXTY_TO_NINETY_PCT = '53' # '# 60-90%'
PLAYS_SIXTY_TO_NINETY_PCT = NUMBER_SIXTY_TO_NINETY_PCT
NINETY_TO_ONE_HUNDRED_PCT = '54' # 90-100%
MADE_NINETY_TO_ONE_HUNDRED_PCT = NINETY_TO_ONE_HUNDRED_PCT
NUMBER_NINETY_TO_ONE_HUNDRED_PCT = '55' # '# 90-100%'
PLAYS_NINETY_TO_ONE_HUNDRED_PCT = NUMBER_NINETY_TO_ONE_HUNDRED_PCT
RSZ = '56' # ?
RCERA = '57' # ?
RTS = '58' # ?
FRM = '59'
FRAMING = FRM
|
# MIT License
#
# Copyright (c) 2018 Jared Gillespie
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
class ProxyScrapeBaseException(Exception):
"""Base Exception for Proxy Scrape."""
class CollectorAlreadyDefinedError(ProxyScrapeBaseException):
"""Collector Already Defined Error."""
class CollectorNotFoundError(ProxyScrapeBaseException):
"""Collector Not Found Error."""
class InvalidFilterOptionError(ProxyScrapeBaseException):
"""Invalid Filter Option Error."""
class InvalidHTMLError(ProxyScrapeBaseException):
"""Invalid HTML Error."""
class InvalidResourceError(ProxyScrapeBaseException):
"""Invalid Resource Error."""
class InvalidResourceTypeError(ProxyScrapeBaseException):
"""Invalid Resource Type Error."""
class RequestNotOKError(ProxyScrapeBaseException):
"""Request Not OK Error."""
class RequestFailedError(ProxyScrapeBaseException):
"""Request Failed Error."""
class ResourceAlreadyDefinedError(ProxyScrapeBaseException):
"""Resource Already Defined Error."""
class ResourceTypeAlreadyDefinedError(ProxyScrapeBaseException):
"""Resource Type Already Defined Error."""
|
# coding:utf-8
from unipath import Path
PROJECT_DIR = Path(__file__).parent
from decouple import config
import dj_database_url
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
# DEBUG = config('DEBUG', default=False, cast=bool)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# DATABASES = {
# 'default': dj_database_url.config(
# default = config('DATABASE_URL'))
# }
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# SECURITY WARNING: keep the secret key used in production secret!
# SECRET_KEY = config('SECRET_KEY')
SECRET_KEY = '01asb8%!ma51=-%u4=q^or%!n^ol^1vpp+zy*@_uve2x*ayi@^'
ALLOWED_HOSTS = ['127.0.0.1']
# 自定义的变量
REDIRECT_FIELD_NAME = 'next'
PAGINATOR_NUM = 5
AVATAR_IMAGE_WIDTH = 600
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'south',
'depotwork.activities',
'depotwork.articles',
'depotwork.auth',
'depotwork.core',
'depotwork.feeds',
'depotwork.messages',
'depotwork.questions',
'depotwork.search',
#feture
'depotwork.apps.asset',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'depotwork.urls'
WSGI_APPLICATION = 'depotwork.wsgi.application'
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'zh-CN'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# USE_TZ = False
# LANGUAGES = (
# ('en', 'English'),
# ('pt-br', 'Portuguese'),
# ('es', 'Spanish'),
# ('zh-cn','Chinese')
# )
LOCALE_PATHS = (PROJECT_DIR.child('locale'), )
DATE_FORMAT = 'Y/m/d'
DATETIME_FORMAT = u'Y年m月d日 H:m:s'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_ROOT = PROJECT_DIR.parent.child('staticfile')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
PROJECT_DIR.child('static'),
)
MEDIA_ROOT = PROJECT_DIR.parent.child('media')
MEDIA_URL = '/media/'
TEMPLATE_DIRS = (
PROJECT_DIR.child('templates'),
)
LOGIN_URL = '/'
LOGIN_REDIRECT_URL = '/feeds/'
ALLOWED_SIGNUP_DOMAINS = ['*']
FILE_UPLOAD_TEMP_DIR = '/tmp/'
FILE_UPLOAD_PERMISSIONS = 0644 |
class Car:
number_of_wheels = 4
def __init__(self, color, number_of_doors):
self.color = color
self.number_of_doors = number_of_doors
def print_characteristics(self):
print(self.color, self.number_of_doors)
def honk(self):
print('Honk!')
#
# mazda = Car('red', 4)
# #
# subaru = Car('blue', 2)
|
#!/usr/bin/env python
import os
CODE_ANALYSIS=True
DEEP_CODE_ANALYSIS=False
CONFIG_ANALYSIS_TIMEOUT=0
PLUGINS_PATH=os.path.dirname(__file__) + os.sep + "plugins"
DATABASE_PATH=os.path.dirname(__file__) + os.sep + "files.sqlite"
|
def clamp(val, minVal, maxVal) -> float:
return max(minVal, min(val, maxVal))
def rescale(n, range1, range2):
delta1 = range1[1] - range1[0]
delta2 = range2[1] - range2[0]
return (delta2 * (n - range1[0]) / delta1) + range2[0] |
import errno
import os
import socket
import subprocess
import tempfile
import time
import logging
from collections import namedtuple
# External modules
import paramiko
# Flintrock modules
from .util import get_subprocess_env
from .exceptions import SSHError
SSHKeyPair = namedtuple('KeyPair', ['public', 'private'])
logger = logging.getLogger('flintrock.ssh')
def generate_ssh_key_pair() -> SSHKeyPair:
"""
Generate an SSH key pair that the cluster can use for intra-cluster
communication.
"""
with tempfile.TemporaryDirectory() as tempdir:
subprocess.check_call(
[
'ssh-keygen',
'-q',
'-t', 'rsa',
'-N', '',
'-f', os.path.join(tempdir, 'flintrock_rsa'),
'-C', 'flintrock',
],
env=get_subprocess_env(),
)
with open(file=os.path.join(tempdir, 'flintrock_rsa')) as private_key_file:
private_key = private_key_file.read()
with open(file=os.path.join(tempdir, 'flintrock_rsa.pub')) as public_key_file:
public_key = public_key_file.read()
return namedtuple('KeyPair', ['public', 'private'])(public_key, private_key)
def get_ssh_client(
*,
user: str,
host: str,
identity_file: str,
wait: bool=False,
print_status: bool=None) -> paramiko.client.SSHClient:
"""
Get an SSH client for the provided host, waiting as necessary for SSH to become
available.
"""
if print_status is None:
print_status = wait
client = paramiko.client.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.client.AutoAddPolicy())
if wait:
tries = 100
else:
tries = 1
while tries > 0:
try:
tries -= 1
client.connect(
username=user,
hostname=host,
key_filename=identity_file,
look_for_keys=False,
timeout=3)
if print_status:
logger.info("[{h}] SSH online.".format(h=host))
break
except socket.timeout as e:
logger.debug("[{h}] SSH timeout.".format(h=host))
time.sleep(5)
except paramiko.ssh_exception.NoValidConnectionsError as e:
if any(error.errno != errno.ECONNREFUSED for error in e.errors.values()):
raise
logger.debug("[{h}] SSH exception: {e}".format(h=host, e=e))
time.sleep(5)
# We get this exception during startup with CentOS but not Amazon Linux,
# for some reason.
except paramiko.ssh_exception.AuthenticationException as e:
logger.debug("[{h}] SSH AuthenticationException.".format(h=host))
time.sleep(5)
except paramiko.ssh_exception.SSHException as e:
raise SSHError(
host=host,
message="SSH protocol error. Possible causes include using "
"the wrong key file or username.",
) from e
else:
raise SSHError(
host=host,
message="Could not connect via SSH.")
return client
def ssh_check_output(
client: paramiko.client.SSHClient,
command: str,
timeout_seconds: int=None,
):
"""
Run a command via the provided SSH client and return the output captured
on stdout.
Raise an exception if the command returns a non-zero code.
"""
stdin, stdout, stderr = client.exec_command(
command,
get_pty=True,
timeout=timeout_seconds)
# NOTE: Paramiko doesn't clearly document this, but we must read() before
# calling recv_exit_status().
# See: https://github.com/paramiko/paramiko/issues/448#issuecomment-159481997
stdout_output = stdout.read().decode('utf8').rstrip('\n')
stderr_output = stderr.read().decode('utf8').rstrip('\n')
exit_status = stdout.channel.recv_exit_status()
if exit_status:
# TODO: Return a custom exception that includes the return code.
# See: https://docs.python.org/3/library/subprocess.html#subprocess.check_output
# NOTE: We are losing the output order here since output from stdout and stderr
# may be interleaved.
raise SSHError(
host=client.get_transport().getpeername()[0],
message=stdout_output + stderr_output)
return stdout_output
def ssh(*, user: str, host: str, identity_file: str):
"""
SSH into a host for interactive use.
"""
subprocess.call(
[
'ssh',
'-o', 'StrictHostKeyChecking=no',
'-i', identity_file,
'{u}@{h}'.format(u=user, h=host),
],
env=get_subprocess_env(),
)
|
import os
if not os.path.exists('testdir'):
os.makedirs('testdir')
|
"""auth module"""
|
# Copyright 2014-2015, Tresys Technology, LLC
#
# This file is part of SETools.
#
# SETools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1 of
# the License, or (at your option) any later version.
#
# SETools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SETools. If not, see
# <http://www.gnu.org/licenses/>.
#
import logging
import re
from .descriptors import CriteriaSetDescriptor
from .mixins import MatchAlias, MatchName
from .query import PolicyQuery
from .util import match_regex_or_set
class TypeQuery(MatchAlias, MatchName, PolicyQuery):
"""
Query SELinux policy types.
Parameter:
policy The policy to query.
Keyword Parameters/Class attributes:
name The type name to match.
name_regex If true, regular expression matching
will be used on the type names.
alias The alias name to match.
alias_regex If true, regular expression matching
will be used on the alias names.
attrs The attribute to match.
attrs_equal If true, only types with attribute sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
attrs_regex If true, regular expression matching
will be used on the attribute names instead
of set logic.
permissive The permissive state to match. If this
is None, the state is not matched.
"""
attrs = CriteriaSetDescriptor("attrs_regex", "lookup_typeattr")
attrs_regex = False
attrs_equal = False
_permissive = None
@property
def permissive(self):
return self._permissive
@permissive.setter
def permissive(self, value):
if value is None:
self._permissive = None
else:
self._permissive = bool(value)
def __init__(self, policy, **kwargs):
super(TypeQuery, self).__init__(policy, **kwargs)
self.log = logging.getLogger(__name__)
def results(self):
"""Generator which yields all matching types."""
self.log.info("Generating type results from {0.policy}".format(self))
self._match_name_debug(self.log)
self._match_alias_debug(self.log)
self.log.debug("Attrs: {0.attrs!r}, regex: {0.attrs_regex}, "
"eq: {0.attrs_equal}".format(self))
self.log.debug("Permissive: {0.permissive}".format(self))
for t in self.policy.types():
if not self._match_name(t):
continue
if not self._match_alias(t):
continue
if self.attrs and not match_regex_or_set(
set(t.attributes()),
self.attrs,
self.attrs_equal,
self.attrs_regex):
continue
if self.permissive is not None and t.ispermissive != self.permissive:
continue
yield t
|
# You may change this function parameters
def findMaxProfit(predictedSharePrices):
# Participants code will be here
maks = predictedSharePrices[1] - predictedSharePrices[0]
minimum = predictedSharePrices[0]
for i in predictedSharePrices:
if i < minimum:
minimum = i
elif i - minimum > maks:
maks = i - minimum
return maks
def main():
line = input().split()
numOfPredictedDay = int(line[0])
predictedSharePrices = list(map(int, line[1:]))
answer = findMaxProfit(predictedSharePrices)
# Please do not remove the below line.
print(answer)
# Do not print anything after this line
if __name__ == '__main__':
main()
|
import matplotlib.pyplot as plt
import numpy as np
from structures import RingRoad
def extension_three(max_sigma_pct, granularity, show_fig=True):
"""
Runs an experiment investigating the role of sensor noise on AV and system performance
"""
max_sigma_pct = max_sigma_pct / 100
granularity = granularity / 100
sigma_pcts = np.arange(0., max_sigma_pct, granularity)
mean_system_vels = [] # mean velocity across all vehicles and all timesteps after AV activation
variance_across_vehicles = [] # (mean is taken across 22 std devs -- one std dev from each vehicle)
variance_across_timesteps = [] # (mean is taken across all timesteps -- one std dev from each timestep)
print(f"Running {len(sigma_pcts)} simulations")
counter = 0
for sigma_pct in sigma_pcts:
# Increment counter
counter += 1
if counter % 20 == 0:
print(f'On simulation #{counter}')
# Build env
env = RingRoad(
num_vehicles=22, # The vechicles at index 0 is an A.V.
ring_length=230.0, # The road is a cicle.
starting_noise=4.0, # Uniformly add noise to starting positions.
temporal_res=0.3, # Set the size of simulation steps (seconds).
av_activate=30, # Set when the PID controller is activated.
seed=286, # Set a random seed.
uncertain_avs=True,
sigma_pct=sigma_pct
)
# Run each simulation for set number of time steps:
total_time = 50 # In seconds.
total_steps = int(np.ceil(total_time / env.dt))
env.run(steps=total_steps)
# Collect and system metrics for plotting:
steps_after = range(env.av_activate, env.step)
speeds_after = env.get_vehicle_vel_table(steps_after)
# Store std dev and mean of each vehicle's velocity averaged across all vehicles after controller was activated
mean_system_vels.append(speeds_after.mean(axis=0).mean()) # mean velocity across all vehicles and all timesteps after AV activation
variance_across_vehicles.append(speeds_after.std(axis=0).mean()) # (mean is taken across 22 std devs -- one std dev from each vehicle)
variance_across_timesteps.append(speeds_after.std(axis=1).mean()) # (mean is taken across all timesteps -- one std dev from each timestep)
# Plot results
sigma_pcts = sigma_pcts * 100
plt.plot(sigma_pcts, mean_system_vels)
plt.xlabel("Sensor Noise\n(as a % of $\Delta_x$ and lead vehicle velocity measurements)")
plt.ylabel("Mean Velocity (after AV activation)\n across All Vehicles")
plt.title("System's Mean Velocity\n as Uncertainty Increases")
plt.savefig("../outputs/ext3-meanvel")
if show_fig:
plt.show()
plt.close()
plt.plot(sigma_pcts, variance_across_vehicles)
plt.xlabel("Sensor Noise\n(as a % of $\Delta_x$ and lead vehicle velocity measurements)")
plt.ylabel("Velocity Std. Dev. (after AV activation)\n across All Vehicles")
plt.title("Mean across Individual Vehicle Velocity Std Devs\n as Uncertainty Increases")
plt.savefig("../outputs/ext3-stddev1")
if show_fig:
plt.show()
plt.close()
plt.plot(sigma_pcts, variance_across_timesteps)
plt.xlabel("Sensor Noise\n(as a % of $\Delta_x$ and lead vehicle velocity measurements)")
plt.ylabel("Velocity Std. Dev. (after AV activation)\n across All Timesteps")
plt.title("Mean across Individual Timestep System Velocity Std Devs\n as Uncertainty Increases")
plt.savefig("../outputs/ext3-stddev2")
if show_fig:
plt.show()
plt.close()
if not show_fig:
print("\nExperiment complete. See the ../outputs/ directory for resulting visualizations and plots.\n"
"To display the visualizations as the code runs, ensure the 'show_fig' argument is set to True")
if __name__ == '__main__':
import warnings
plt.style.use('seaborn-darkgrid')
warnings.filterwarnings("ignore", category=UserWarning)
extension_three(max_sigma_pct=4000, granularity=100, show_fig=False)
|
# Household intervention integration tests. Some simple runs of the system with fixed seeds.
import copy
from typing import Tuple, List
import numpy.random
from collections import Counter
import pytest
from household_contact_tracing.network import EdgeType, NodeType, Network
from household_contact_tracing.branching_process_controller import BranchingProcessController
import household_contact_tracing.branching_process_models as bpm
from household_contact_tracing.branching_process_model import BranchingProcessModel
@pytest.fixture
def household_params() -> dict:
"""A set of default parameters which are the base for running tests."""
params = {'outside_household_infectivity_scaling': 0.7,
'overdispersion': 0.32,
'asymptomatic_prob': 0.2,
'asymptomatic_relative_infectivity': 0.35,
'infection_reporting_prob': 0,
'reduce_contacts_by': 0.3,
'starting_infections': 1,
'symptom_reporting_delay': 1,
'incubation_period_delay': 5,
'household_pairwise_survival_prob': 0.2,
'contact_tracing_success_prob': 0,
'test_before_propagate_tracing': False}
return copy.deepcopy(params)
def count_network(network: Network) -> Tuple[Counter, Counter]:
"""Count the types of nodes and types of edges in the network."""
node_counts = Counter([node.node_type() for node in network.all_nodes()])
edge_counts = Counter([edge for edge in network.edge_types()])
return node_counts, edge_counts
class TestSimpleHousehold:
"""The first implementation of the contact tracing model uses household level contact tracing.
This means that if a case is detected in a household, all members of the household will
trace their contacts. When an individual is traced, their entire household goes into
intervention.
"""
@staticmethod
def run_simulation(params: dict, days=10) -> BranchingProcessModel:
"""Run the Household model for 10 days with the given params and return the
model."""
controller = BranchingProcessController(bpm.HouseholdLevelTracing(params))
controller.set_graphic_displays(False)
controller.run_simulation(days)
return controller.model
@staticmethod
def nodes_isolating_correctly(network: Network) -> List[bool]:
"""Check whether everyone whose household is isolated chooses to isolate."""
isolating_correctly = []
for node in network.all_nodes():
if node.household.isolated:
if node.isolated:
isolating_correctly.append(True)
else:
isolating_correctly.append(False)
else:
isolating_correctly.append(True)
return isolating_correctly
@staticmethod
def check_second_level_isolation(network: Network):
"""Check whether all households with a contact tracing index of 2 are isolated."""
for household in network.all_households:
if household.contact_tracing_index == 2:
if household.isolated is False:
return False
return True
def test_no_isolation_no_reporting(self, household_params):
"""The most basic functionality of the model is to simulate a individual-household
branching process model of SARS-CoV-2. This includes asymptomatic individuals but
there is, no symptom reporting or self-intervention.
Because household transmission is based on intervention, there is no household transmission
either.
"""
numpy.random.seed(42)
network = self.run_simulation(household_params).network
node_counts, edge_counts = count_network(network)
# There should be some symptomatic nodes and some asymptomatic but no others.
assert node_counts[NodeType.symptomatic_will_not_report_infection] > 0
assert node_counts[NodeType.asymptomatic] > 0
assert len(node_counts) == 2
# There is no reporting so there can be no tracing, so all edges have the default type
assert edge_counts[EdgeType.default] > 0
assert len(edge_counts) == 1
def test_reporting_and_isolation(self, household_params):
"""The infection reporting probability is now set to a non-zero value.
This means that some individuals will develop symptoms, and report them, which initiates
creation and intervention of the other household members. When a nodes household is isolated
all the nodes inside are isolated and will not make outside household contacts."""
# 50% of symptomatic individuals will report their symptoms, and self-isolate
household_params['infection_reporting_prob'] = 0.5
household_params['self_isolation_duration'] = 10
numpy.random.seed(42)
network = self.run_simulation(household_params).network
node_counts, edge_counts = count_network(network)
# Some should be asymptomatic, some should isolating, some should not report infection and
# some should intend to report but not yet be isolating.
assert node_counts[NodeType.isolated] > 0
assert node_counts[NodeType.asymptomatic] > 0
assert node_counts[NodeType.symptomatic_will_not_report_infection] > 0
assert node_counts[NodeType.symptomatic_will_report_infection] > 0
assert len(node_counts) == 4
# There is reporting but all tracing fails. Now household members are created, infections
# can be spread within households.
assert edge_counts[EdgeType.default] > 0
assert edge_counts[EdgeType.within_house] > 0
assert edge_counts[EdgeType.failed_contact_tracing] > 0
assert len(edge_counts) == 3
def test_basic_tracing(self, household_params):
"""Contact tracing is now activated. This works at a household level on symptom onset.
When an infection is discovered in a household, contact tracing attempts are made to all
connected Households. When a household is reached, only the traced node isolates.
If a node in a traced household develops symptoms, the whole household is isolated and
contact tracing is again propagated. Being performed upon symptom onset means that
testing is not performed."""
household_params['infection_reporting_prob'] = 0.5
household_params['self_isolation_duration'] = 10
household_params['contact_tracing_success_prob'] = 1
household_params['quarantine_duration'] = 10
numpy.random.seed(39)
network = self.run_simulation(household_params).network
node_counts, edge_counts = count_network(network)
# As before there are 4 possible node states
assert node_counts[NodeType.isolated] > 0
assert node_counts[NodeType.asymptomatic] > 0
assert node_counts[NodeType.symptomatic_will_not_report_infection] > 0
assert node_counts[NodeType.symptomatic_will_report_infection] > 0
assert len(node_counts) == 4
# The between house edge type is a result of successful contact tracing.
assert edge_counts[EdgeType.default] > 0
assert edge_counts[EdgeType.within_house] > 0
assert edge_counts[EdgeType.between_house] > 0
assert len(edge_counts) == 3
# No intervention should expire by day 10 so all whose household is isolated should
# be isolating.
assert all(self.nodes_isolating_correctly(network))
def test_simple_testing(self, household_params):
"""Simulate an epidemic, with household level contact tracing and testing delays.
The same contact tracing strategy as before, but a test is required before contact tracing.
The test is assumed to be 100% accurate, but has a delay associated.
We don't consider the node or edge types since these should be the same as the previous
test but the nodes should have a testing delay associated with them.
"""
household_params['infection_reporting_prob'] = 0.5
household_params['self_isolation_duration'] = 10
household_params['contact_tracing_success_prob'] = 1
household_params['quarantine_duration'] = 10
household_params['test_before_propagate_tracing'] = True
numpy.random.seed(42)
network = self.run_simulation(household_params).network
assert network.node(1).testing_delay != 0
# No intervention should expire by day 10 so all whose household is isolated should
# be isolating.
assert all(self.nodes_isolating_correctly(network))
def test_app_tracing(self, household_params):
"""
We assign a proportion of the population to have digital contact tracing applications
installed. If there is a contact tracing attempt between two nodes who both have the app
installed, then we assume that the contact tracing attempt succeeds with 100% probability,
and there is no contact tracing delay applied so it is instantaneous.
"""
household_params['infection_reporting_prob'] = 0.5
household_params['self_isolation_duration'] = 10
household_params['contact_tracing_success_prob'] = 1
household_params['quarantine_duration'] = 10
household_params['prob_has_trace_app'] = 0.7
numpy.random.seed(39)
network = self.run_simulation(household_params).network
node_counts, edge_counts = count_network(network)
assert node_counts[NodeType.isolated] > 0
assert node_counts[NodeType.asymptomatic] > 0
assert node_counts[NodeType.symptomatic_will_not_report_infection] > 0
assert node_counts[NodeType.symptomatic_will_report_infection] > 0
assert len(node_counts) == 4
# The app_traced edge type is a result of app tracing.
assert edge_counts[EdgeType.default] > 0
assert edge_counts[EdgeType.within_house] > 0
assert edge_counts[EdgeType.between_house] > 0
assert edge_counts[EdgeType.app_traced] > 0
assert len(edge_counts) == 4
# No intervention should expire by day 10 so all whose household is isolated should
# be isolating.
assert all(self.nodes_isolating_correctly(network))
def test_non_uptake_of_isolation(self, household_params):
"""A percentage of people now refuse to take up intervention when traced."""
household_params['infection_reporting_prob'] = 0.5
household_params['self_isolation_duration'] = 10
household_params['contact_tracing_success_prob'] = 1
household_params['quarantine_duration'] = 10
household_params['prob_has_trace_app'] = 0.7
household_params['node_will_uptake_isolation_prob'] = 0.5
numpy.random.seed(42)
network = self.run_simulation(household_params).network
# Some will choose not to isolate even though their household was instructed to.
assert not(all(self.nodes_isolating_correctly(network)))
# People who are isolated still should not infect others.
def test_imperfect_isolation(self, household_params):
"""We now assume that some nodes do isolate or quarantine, but do it badly. An individual
doing perfect intervention/quarantine will reduce their outside household contacts by 100%,
an individual who is imperfectly isolating/quarantining will reduce their contacts by less
than 100%."""
household_params['infection_reporting_prob'] = 0.5
household_params['self_isolation_duration'] = 10
household_params['contact_tracing_success_prob'] = 1
household_params['quarantine_duration'] = 10
household_params['prob_has_trace_app'] = 0.7
# now, 50% of nodes will isolate, but will do it badly
household_params['propensity_imperfect_quarantine'] = 0.5
# a node doing imperfect intervention will reduce their outside household contacts by 75%
household_params['global_contact_reduction_imperfect_quarantine'] = 0.75
numpy.random.seed(42)
model = self.run_simulation(household_params)
network = model.network
node_imperfect = [node.propensity_imperfect_isolation for node in network.all_nodes()]
assert any(node_imperfect)
node_contact_rate_reduction = \
[model.infection.contact_rate_reduction.get_contact_rate_reduction(node) for node in network.all_nodes()]
# People who are isolating
assert 1 in node_contact_rate_reduction
# People who are imperfectly isolating
assert 0.75 in node_contact_rate_reduction
# People who are asymptomatic and just social distancing
assert 0.3 in node_contact_rate_reduction
def test_two_step_tracing(self, household_params):
"""In two step tracing people are contact traced if they met an infected person. The
contacts of the contact traced person are then traced as well."""
household_params['infection_reporting_prob'] = 0.5
household_params['contact_tracing_success_prob'] = 1
household_params['do_2_step'] = False
numpy.random.seed(42)
model = self.run_simulation(household_params, 15)
network = model.network
hh_idxs = [household.contact_tracing_index > 1 for household in network.all_households]
# Assert that there are some households with a contact tracing index > 1
assert any(hh_idxs)
# All of the second level households should be isolating
self.check_second_level_isolation(network)
@pytest.fixture()
def individual_params():
base_params = {
'outside_household_infectivity_scaling': 0.7,
'contact_tracing_success_prob': 0.7,
'asymptomatic_prob': 0.2,
'asymptomatic_relative_infectivity': 0.35,
'infection_reporting_prob': 0.5,
'test_delay': 2,
'reduce_contacts_by': 0.3,
'quarantine_duration': 10,
'number_of_days_to_trace_backwards': 2,
'number_of_days_to_trace_forwards': 5
}
return copy.deepcopy(base_params)
class TestIndividualTracing:
"""Individual level tracing is less thorough model of contract tracing than the household
model but easier to implement in the real world. With individual level tracing, when an
individual is infected, their contacts are traced and their household members are quarantined.
However, contacts of household members are not traced unless they develop symptoms."""
@staticmethod
def run_simulation(params: dict, days=10) -> BranchingProcessModel:
"""Run the IndividualTracing model for 10 days with the given params and return the
model."""
controller = BranchingProcessController(bpm.IndividualLevelTracing(params))
controller.set_graphic_displays(False)
controller.run_simulation(days)
return controller.model
def test_simple_individual_model(self, individual_params: dict):
"""Run a basic implementation of the individual level tracing model."""
numpy.random.seed(42)
model = self.run_simulation(individual_params)
network = model.network
node_counts, edge_counts = count_network(network)
# Some should be asymptomatic, some should isolating, some should not report infection and
# some should intend to report but not yet be isolating.
assert node_counts[NodeType.isolated] > 0
assert node_counts[NodeType.asymptomatic] > 0
assert node_counts[NodeType.symptomatic_will_not_report_infection] > 0
assert node_counts[NodeType.symptomatic_will_report_infection] > 0
assert len(node_counts) == 4
# There is reporting and the default is that tracing always succeeds. Infections
# can be spread within households.
assert edge_counts[EdgeType.default] > 0
assert edge_counts[EdgeType.within_house] > 0
assert edge_counts[EdgeType.between_house] > 0
assert len(edge_counts) == 3
@pytest.fixture
def daily_testing_params():
params = {"outside_household_infectivity_scaling": 0.3,
"contact_tracing_success_prob": 0.7,
"asymptomatic_prob": 0.2,
"asymptomatic_relative_infectivity": 0.35,
"infection_reporting_prob": 0.5,
"reduce_contacts_by": 0.5,
"starting_infections": 5,
"self_isolation_duration": 10,
"lateral_flow_testing_duration": 14
}
return copy.deepcopy(params)
class TestIndividualTracingDailyTesting:
"""The individual level tracing can be extended with daily testing of contacts. Instead of
traced contacts quarantining, they take daily tests."""
@staticmethod
def run_simulation(params: dict, days=10) -> BranchingProcessModel:
"""Run the IndividualTracingDailyTesting model for 10 days with the given params and
return the model."""
controller = BranchingProcessController(bpm.IndividualTracingDailyTesting(params))
controller.set_graphic_displays(False)
controller.run_simulation(days)
return controller.model
@staticmethod
def prob_positive_pcr(time_relative_to_symptom_onset):
"""This function controls the sensitivity of the pcr test and prevents people testing
positive as soon as they are infected."""
if time_relative_to_symptom_onset in [4, 5, 6]:
return 0.75
else:
return 0
@staticmethod
def prob_positive_lfa(time_relative_to_symptom_onset):
"""This function controls the sensitivity of the lfa test. A value of 0 is unrealistic,
but it makes it easier to see nodes being lfa tested since they won't move to
the intervention status due to lfa testing."""
return 0
def test_simple_individual_model(self, daily_testing_params: dict):
"""Run the daily testing with with "no lfa testing only quarantine" policy. This means
that household contacts are quarantined, and are not lateral flow tested. Only those traced
via a between household contact tracing are lateral flow tested
(if they are not already isolating).
"""
numpy.random.seed(40)
daily_testing_params["household_positive_policy"] = "only_quarantine"
model = self.run_simulation(daily_testing_params, 15)
network = model.network
|
from tkinter import *
from opcua import Server
import socket
# Retorna Ip primario de conecção Lan
def get_ip():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
return str(s.getsockname()[0])
# Inicia Servidor OPC-UA
class Servidor():
def __init__(self):
self.ip_OPC = get_ip()
self.porta_OPC = '4840'
self.servidor = Server()
self.servidor.name = 'Hub - OPC_UA'
self.servidor.set_endpoint(f'opc.tcp://{self.ip_OPC}:{self.porta_OPC}')
self.spaceNome = 'Flexsim'
self.space = self.servidor.register_namespace(self.spaceNome)
self.objetos = self.servidor.get_objects_node()
self.grupo_objetos = self.objetos.add_object(self.space, 'Grupo Objetos')
self.estado01 = self.grupo_objetos.add_variable(self.space, 'Estado 01', 2)
self.estado01.set_writable()
self.servidor.start()
#tela = Tk()
'''
def get_valor_variavel(self, tela, variavel):
w1['text'] = str(self.estado01.get_value())
tela.after(500,self.get_valor_variavel)
try:
w1 = Label(tela)
w1.pack()
teste()
tela.mainloop()
finally:
self.servidor.stop()
''' |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common import exception
from heat.common.i18n import _
from heat.engine import attributes
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
class Host(resource.Resource):
"""A resource to manage Blazar hosts.
Host resource manages the physical hosts for the lease/reservation
within OpenStack.
# TODO(asmita): Based on an agreement with Blazar team, this resource
class does not support updating host resource as currently Blazar does
not support to delete existing extra_capability keys while updating host.
Also, in near future, when Blazar team will come up with a new alternative
API to resolve this issue, we will need to modify this class.
"""
support_status = support.SupportStatus(version='12.0.0')
PROPERTIES = (
NAME, EXTRA_CAPABILITY,
) = (
'name', 'extra_capability',
)
ATTRIBUTES = (
HYPERVISOR_HOSTNAME, HYPERVISOR_TYPE, HYPERVISOR_VERSION,
VCPUS, CPU_INFO, MEMORY_MB, LOCAL_GB,
SERVICE_NAME, RESERVABLE, STATUS, TRUST_ID,
EXTRA_CAPABILITY_ATTR, CREATED_AT, UPDATED_AT,
) = (
'hypervisor_hostname', 'hypervisor_type', 'hypervisor_version',
'vcpus', 'cpu_info', 'memory_mb', 'local_gb',
'service_name', 'reservable', 'status', 'trust_id',
'extra_capability', 'created_at', 'updated_at',
)
properties_schema = {
NAME: properties.Schema(
properties.Schema.STRING,
_('The name of the host.'),
required=True,
),
EXTRA_CAPABILITY: properties.Schema(
properties.Schema.MAP,
_('The extra capability of the host.'),
)
}
attributes_schema = {
HYPERVISOR_HOSTNAME: attributes.Schema(
_('The hypervisor name of the host.'),
type=attributes.Schema.STRING,
),
HYPERVISOR_TYPE: attributes.Schema(
_('The hypervisor type the host.'),
type=attributes.Schema.STRING,
),
HYPERVISOR_VERSION: attributes.Schema(
_('The hypervisor version of the host.'),
type=attributes.Schema.INTEGER,
),
VCPUS: attributes.Schema(
_('The number of the VCPUs of the host.'),
type=attributes.Schema.INTEGER,
),
CPU_INFO: attributes.Schema(
_('Information of the CPU of the host.'),
type=attributes.Schema.MAP,
),
MEMORY_MB: attributes.Schema(
_('Megabytes of the memory of the host.'),
type=attributes.Schema.INTEGER,
),
LOCAL_GB: attributes.Schema(
_('Gigabytes of the disk of the host.'),
type=attributes.Schema.INTEGER,
),
SERVICE_NAME: attributes.Schema(
_('The compute service name of the host.'),
type=attributes.Schema.STRING,
),
RESERVABLE: attributes.Schema(
_('The flag which represents whether the host is reservable '
'or not.'),
type=attributes.Schema.BOOLEAN,
),
STATUS: attributes.Schema(
_('The status of the host.'),
type=attributes.Schema.STRING,
),
TRUST_ID: attributes.Schema(
_('The UUID of the trust of the host operator.'),
type=attributes.Schema.STRING,
),
EXTRA_CAPABILITY_ATTR: attributes.Schema(
_('The extra capability of the host.'),
type=attributes.Schema.MAP,
),
CREATED_AT: attributes.Schema(
_('The date and time when the host was created. '
'The date and time format must be "CCYY-MM-DD hh:mm".'),
type=attributes.Schema.STRING,
),
UPDATED_AT: attributes.Schema(
_('The date and time when the host was updated. '
'The date and time format must be "CCYY-MM-DD hh:mm".'),
type=attributes.Schema.STRING
),
}
default_client_name = 'blazar'
entity = 'host'
def _parse_extra_capability(self, args):
if self.NAME in args[self.EXTRA_CAPABILITY]:
# Remove "name" key if present in the extra_capability property.
del args[self.EXTRA_CAPABILITY][self.NAME]
args.update(args[self.EXTRA_CAPABILITY])
args.pop(self.EXTRA_CAPABILITY)
return args
def handle_create(self):
args = dict((k, v) for k, v in self.properties.items()
if v is not None)
if self.EXTRA_CAPABILITY in args:
args = self._parse_extra_capability(args)
host = self.client_plugin().create_host(**args)
self.resource_id_set(host['id'])
return host['id']
def _resolve_attribute(self, name):
if self.resource_id is None:
return
host = self.client_plugin().get_host(self.resource_id)
try:
return host[name]
except KeyError:
raise exception.InvalidTemplateAttribute(resource=self.name,
key=name)
def resource_mapping():
return {
'OS::Blazar::Host': Host
}
|
def test_app_created(app):
assert app
|
from flask import Flask
from custom_flask_graphql import GraphQLView
from schema import schema
from json_cleaning import JSonCleaning
def create_app(path='/graphql', **kwargs):
backend = None
app = Flask(__name__, static_url_path='/static')
app.debug = True
app.add_url_rule(path, view_func=GraphQLView.as_view('graphql', schema=schema, backend=backend, context=JSonCleaning.key_map, **kwargs))
print('Add URL rule: succeeded')
return app
if __name__ == '__main__':
app = create_app(graphiql=True)
app.run()
|
from utils.splunk_search_util import get_session_key, preprocess_search, run_search, get_search_response
def test_extend_index(eventgen_test_helper):
"""Test extendIndexes config"""
eventgen_test_helper("eventgen_extend_index.conf").get_events()
session_key = get_session_key()
search_job_id = run_search(session_key, preprocess_search('index=main sourcetype=cisco'))
test_index_search_job_id = run_search(session_key, preprocess_search('index=test_*'))
main_events = get_search_response(session_key, search_job_id)
test_index_events = get_search_response(session_key, test_index_search_job_id)
assert len(main_events) == 12
assert len(test_index_events) == 12
|
# -*- coding: utf-8 -*-
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'mn9_bc)i=#y$2mlf7m^f490zn8x%3drra@8kw1+^j6h^osaa^m'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'wordplease',
'blogs',
'posts',
'users',
'rest_framework'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'wordplease.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'wordplease.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'es-es'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
PH_IMAGES = 'http://lorempixel.com/500/500/sports/ExampleImage/'
# LOGIN URL
LOGIN_URL = '/login'
# REST FRAMEWORK
REST_FRAMEWORK = {
'PAGINATE_BY': 10, # 5 items por página
'PAGINATE_BY_PARAM': 'page_size', # parámetro GET para definir el número de elementos por página
'MAX_PAGINATE_BY': 10, # máximo número de elementos por página,
}
|
import numba as nb
import numpy as np
@nb.jit(nopython=True)
def cob(result):
horizontal = [result[0]]
vertical = [result[0]]
for x in result[1:]:
horizontal.append(vertical[-1])
vertical.append(x)
horizontal.append(x)
vertical.append(x)
return horizontal, vertical
results= np.linspace(0,10,1000)
cob(results) |
# !/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
import config
def start(**kwargs):
if kwargs["name"]:
name = re.sub("<[^>]+>", "", kwargs["name"])
elif kwargs["username"]:
name = re.sub("<[^>]+>", "", kwargs["username"])
else:
name = "Аноним"
text = ["Привет <b>{name}</b>! Вот что я умею:".format(name=name)]
for k, v in config.TELEGRAM_COMMANDS.items():
text.append("/{cmd} - {description}".format(cmd=k, description=v["description"]))
return {"chat_id": kwargs["chat_id"], "text": "\n".join(text), "parse_mode": "HTML"}
|
from flask import flash, url_for, jsonify
from flask_login import current_user
from flask_mail import Message
from app import mail
from app import db, create_app
from app.models import User, Product
import requests
from bs4 import BeautifulSoup
import os
import json
import random
import re
import time
from datetime import datetime
def send_product_notification(product, conn):
msg = Message('Your Amazon product is ready to be bought!', sender=os.environ.get('EMAIL_USER'), recipients=[product.author.email])
msg.body = f'''The following Amazon product has gone down in price to {product.price}:
{product.link}
Go buy it before you miss it!
'''
conn.send(msg)
def update_products():
app = create_app()
now = datetime.utcnow()
with app.app_context():
with mail.connect() as conn:
for product in Product.query.all():
data = update_product_data(product.link, product.optimal_price)
product.price = data['price']
product.available = data.get('availability', "")
if not product.last_notification or (now - product.last_notification).days >= 3:
if product.price >= 0 and product.price <= product.optimal_price:
product.last_notification = now
send_product_notification(product, conn)
if (now - product.date_added).days >= 30:
db.session.delete(product)
time.sleep(5)
db.session.commit()
def update_product_data(url, optimal_price):
user_agents = [
# "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:84.0) Gecko/20100101 Firefox/84.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.2 Safari/605.1.15"
]
user_agent = random.choice(user_agents)
headers = {"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", "Accept-Encoding": "gzip, deflate, br", "Accept-Language": "es-ES,es;q=0.9,en;q=0.8", "Referer": "http://www.google.com/", "User-Agent": user_agent}
source = requests.get(url, headers=headers, timeout=10)
# TODO: Handle this
if source.status_code != 200:
return False
soup = BeautifulSoup(source.text, "lxml")
data = {}
price_el = soup.find(id="price_inside_buybox")
price_temp = price_el.get_text(strip=True) if price_el else None
price_string = price_temp.replace(u'\xa0', u' ') if price_temp else None
price_matches = re.findall(r"[-+]?\d*\.\d+|\d+", price_string) if price_string else None # Accepts negative numbers, just in case.
price = float(price_matches[0]) if price_matches and len(price_matches) == 1 else None
if price:
data['price'] = price
else:
data['price'] = -1
availability_el_el = soup.find(id="availability")
availability_el = availability_el_el.find("span") if availability_el_el else None
availability = availability_el.get_text(strip=True) if availability_el else ""
if availability:
data['availability'] = availability
return data
# Not always correct, but it's an educated guess.
def get_default_currency_code(url):
currency_codes = {
'www.amazon.co.uk/': 'GBP',
'www.amazon.com/': 'USD',
'www.amazon.ca/': 'CAD',
'www.amazon.com.au/': 'AUD',
'www.amazon.com.mx/': 'MX'
}
matches = re.findall(r"www.amazon.c[aom.uk]{1,5}/", url)
if matches:
return currency_codes[matches[0]]
return 'USD'
def get_product_data(form_data):
# TODO: Make sure form_data has link and optimal_price.
user_agents = [
# "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:84.0) Gecko/20100101 Firefox/84.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:83.0) Gecko/20100101 Firefox/83.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0.2 Safari/605.1.15"
]
user_agent = random.choice(user_agents)
headers = {"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", "Accept-Encoding": "gzip, deflate, br", "Accept-Language": "es-ES,es;q=0.9,en;q=0.8", "Referer": "http://www.google.com/", "User-Agent": user_agent}
source = requests.get(form_data['link'], headers=headers, timeout=10)
# TODO: Handle this
if source.status_code != 200:
return 0
soup = BeautifulSoup(source.text, "lxml")
data = {}
name_el = soup.find(id="productTitle")
name = name_el.get_text(strip=True) if name_el else None
if name:
data['name'] = name
else:
# Couldn't find name
return 1
seller_el = soup.find(id="sellerProfileTriggerId")
seller = seller_el.get_text(strip=True) if seller_el else None
if seller:
data['seller'] = seller
price_el = soup.find(id="price_inside_buybox")
price_temp = price_el.get_text(strip=True) if price_el else None
price_string = price_temp.replace(u'\xa0', u' ') if price_temp else None
price_matches = re.findall(r"[-+]?\d*\.\d+|\d+", price_string) if price_string else None
price = float(price_matches[0]) if price_matches and len(price_matches) == 1 else None
if price:
data['price'] = price
else:
# Couldn't find prize
return 1
# shipping_el_el = soup.find(id="ourprice_shippingmessage")
# shipping_el = shipping_el_el.find("span") if shipping_el_el else None
# shipping = shipping_el.get_text(strip=True) if shipping_el else ""
image_el = soup.find(id="landingImage")
image_json = image_el['data-a-dynamic-image'] if image_el else None
image_dict = json.loads(image_json) if image_json else None
image_list = list(image_dict.keys()) if image_dict else None
image = image_list[-1] if image_list else ""
if image:
data['image'] = image
availability_el_el = soup.find(id="availability")
availability_el = availability_el_el.find("span") if availability_el_el else None
availability = availability_el.get_text(strip=True) if availability_el else ""
if availability:
data['availability'] = availability
currency_el_el = soup.find(id="icp-touch-link-cop")
currency_el = currency_el_el.find("span", class_="icp-color-base") if currency_el_el else None
currency = currency_el.get_text(strip=True) if currency_el else None
currency_code = currency.split(' ', 1)[0] if currency else ""
if currency_code:
data['currency_code'] = currency_code
data['link'] = form_data['link']
data['optimal_price'] = float(form_data['optimal_price'])
app = create_app()
with app.app_context():
author = User.query.get(form_data['user_id'])
product = Product(name=data.get('name', "Unknown product"), seller=data.get('seller', "Unknown seller"), currency_code=data.get('currency_code', get_default_currency_code(form_data['link'])), current_price=data.get('price', -1.0), optimal_price=data.get('optimal_price', -1.0), available=data.get('availability', ""), link=data.get('link', "amazon.com"), author=author)
product.img = data.get('image', product.img)
db.session.add(product)
db.session.commit()
|
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
# noqa: F401
# noqa: F401
# noqa: F401
# noqa: F401
import traceback
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
'''CONSTANTS'''
DEPROVISIONED_STATUS = 'DEPROVISIONED'
USER_IS_DISABLED_MSG = 'Deactivation failed because the user is already disabled.'
USER_IS_DISABLED_ERROR = 'E0000007'
ERROR_CODES_TO_SKIP = [
'E0000016', # user is already enabled
USER_IS_DISABLED_ERROR
]
'''CLIENT CLASS'''
class Client(BaseClient):
"""
Okta IAM Client class that implements logic to authenticate with Okta.
"""
def test(self):
uri = 'users/me'
self._http_request(method='GET', url_suffix=uri)
def get_user(self, email):
uri = 'users'
query_params = {
'filter': encode_string_results(f'profile.login eq "{email}"')
}
res = self._http_request(
method='GET',
url_suffix=uri,
params=query_params
)
if res and len(res) == 1:
return res[0]
return None
def deactivate_user(self, user_id):
uri = f'users/{user_id}/lifecycle/deactivate'
self._http_request(
method="POST",
url_suffix=uri
)
def activate_user(self, user_id):
uri = f'users/{user_id}/lifecycle/activate'
self._http_request(
method="POST",
url_suffix=uri
)
def create_user(self, user_data):
body = {
'profile': user_data
}
uri = 'users'
query_params = {
'activate': 'true',
'provider': 'true'
}
res = self._http_request(
method='POST',
url_suffix=uri,
data=json.dumps(body),
params=query_params
)
return res
def update_user(self, user_id, user_data):
body = {
'profile': user_data
}
uri = f'users/{user_id}'
res = self._http_request(
method='POST',
url_suffix=uri,
data=json.dumps(body)
)
return res
def get_okta_fields(self):
okta_fields = {}
uri = 'meta/schemas/user/default'
res = self._http_request(
method='GET',
url_suffix=uri
)
base_properties = res.get('definitions', {}).get('base', {}).get('properties', {})
okta_fields.update({k: base_properties[k].get('title') for k in base_properties.keys()})
custom_properties = res.get('definitions', {}).get('custom', {}).get('properties', {})
okta_fields.update({k: custom_properties[k].get('title') for k in custom_properties.keys()})
return okta_fields
'''HELPER FUNCTIONS'''
def merge(user_profile, full_user_data):
""" Merges the user_profile and the full user data, such that existing attributes in user_profile will remain as
they are, but attributes not provided will be added to it.
Args:
user_profile (dict): The user profile data, in Okta format.
full_user_data (dict): The full user data retrieved from Okta.
Return:
(dict) The full user profile.
"""
for attribute, value in full_user_data.get('profile').items():
if attribute not in user_profile.keys():
user_profile[attribute] = value
return user_profile
def handle_exception(user_profile, e, action):
""" Handles failed responses from Okta API by setting the User Profile object with the results.
Args:
user_profile (IAMUserProfile): The User Profile object.
e (DemistoException): The exception error that holds the response json.
action (IAMActions): An enum represents the current action (get, update, create, etc).
"""
error_code = e.res.get('errorCode')
error_message = get_error_details(e.res)
if error_code == USER_IS_DISABLED_ERROR:
error_message = USER_IS_DISABLED_MSG
if error_code in ERROR_CODES_TO_SKIP:
user_profile.set_result(action=action,
skip=True,
skip_reason=error_message)
else:
user_profile.set_result(action=action,
success=False,
error_code=error_code,
error_message=error_message,
details=e.res)
def get_error_details(res):
""" Parses the error details retrieved from Okta and outputs the resulted string.
Args:
res (dict): The data retrieved from Okta.
Returns:
(str) The parsed error details.
"""
error_msg = f'{res.get("errorSummary")}. '
causes = ''
for idx, cause in enumerate(res.get('errorCauses', []), 1):
causes += f'{idx}. {cause.get("errorSummary")}\n'
if causes:
error_msg += f'Reason:\n{causes}'
return error_msg
'''COMMAND FUNCTIONS'''
def test_module(client):
client.test()
return_results('ok')
def get_mapping_fields_command(client):
okta_fields = client.get_okta_fields()
incident_type_scheme = SchemeTypeMapping(type_name=IAMUserProfile.INDICATOR_TYPE)
for field, description in okta_fields.items():
incident_type_scheme.add_field(field, description)
return GetMappingFieldsResponse([incident_type_scheme])
def get_user_command(client, args, mapper_in):
user_profile = IAMUserProfile(user_profile=args.get('user-profile'))
try:
okta_user = client.get_user(user_profile.get_attribute('email'))
if not okta_user:
error_code, error_message = IAMErrors.USER_DOES_NOT_EXIST
user_profile.set_result(action=IAMActions.GET_USER,
success=False,
error_code=error_code,
error_message=error_message)
else:
user_profile.update_with_app_data(okta_user, mapper_in)
user_profile.set_result(
action=IAMActions.GET_USER,
success=True,
active=False if okta_user.get('status') == DEPROVISIONED_STATUS else True,
iden=okta_user.get('id'),
email=okta_user.get('profile', {}).get('email'),
username=okta_user.get('profile', {}).get('login'),
details=okta_user
)
except DemistoException as e:
handle_exception(user_profile, e, IAMActions.GET_USER)
return user_profile
def enable_user_command(client, args, mapper_out, is_command_enabled, is_create_user_enabled, create_if_not_exists):
user_profile = IAMUserProfile(user_profile=args.get('user-profile'))
if not is_command_enabled:
user_profile.set_result(action=IAMActions.ENABLE_USER,
skip=True,
skip_reason='Command is disabled.')
else:
try:
okta_user = client.get_user(user_profile.get_attribute('email'))
if not okta_user:
if create_if_not_exists:
user_profile = create_user_command(client, args, mapper_out, is_create_user_enabled)
else:
_, error_message = IAMErrors.USER_DOES_NOT_EXIST
user_profile.set_result(action=IAMActions.ENABLE_USER,
skip=True,
skip_reason=error_message)
else:
client.activate_user(okta_user.get('id'))
user_profile.set_result(
action=IAMActions.ENABLE_USER,
success=True,
active=True,
iden=okta_user.get('id'),
email=okta_user.get('profile', {}).get('email'),
username=okta_user.get('profile', {}).get('login'),
details=okta_user
)
except DemistoException as e:
handle_exception(user_profile, e, IAMActions.ENABLE_USER)
return user_profile
def disable_user_command(client, args, is_command_enabled):
user_profile = IAMUserProfile(user_profile=args.get('user-profile'))
if not is_command_enabled:
user_profile.set_result(action=IAMActions.DISABLE_USER,
skip=True,
skip_reason='Command is disabled.')
else:
try:
okta_user = client.get_user(user_profile.get_attribute('email'))
if not okta_user:
_, error_message = IAMErrors.USER_DOES_NOT_EXIST
user_profile.set_result(action=IAMActions.DISABLE_USER,
skip=True,
skip_reason=error_message)
else:
client.deactivate_user(okta_user.get('id'))
user_profile.set_result(
action=IAMActions.DISABLE_USER,
success=True,
active=False,
iden=okta_user.get('id'),
email=okta_user.get('profile', {}).get('email'),
username=okta_user.get('profile', {}).get('login'),
details=okta_user
)
except DemistoException as e:
handle_exception(user_profile, e, IAMActions.DISABLE_USER)
return user_profile
def create_user_command(client, args, mapper_out, is_command_enabled):
user_profile = IAMUserProfile(user_profile=args.get('user-profile'))
if not is_command_enabled:
user_profile.set_result(action=IAMActions.CREATE_USER,
skip=True,
skip_reason='Command is disabled.')
else:
try:
okta_user = client.get_user(user_profile.get_attribute('email'))
if okta_user:
_, error_message = IAMErrors.USER_ALREADY_EXISTS
user_profile.set_result(action=IAMActions.CREATE_USER,
skip=True,
skip_reason=error_message)
else:
okta_profile = user_profile.map_object(mapper_out)
created_user = client.create_user(okta_profile)
user_profile.set_result(
action=IAMActions.CREATE_USER,
success=True,
active=False if created_user.get('status') == DEPROVISIONED_STATUS else True,
iden=created_user.get('id'),
email=created_user.get('profile', {}).get('email'),
username=created_user.get('profile', {}).get('login'),
details=created_user
)
except DemistoException as e:
handle_exception(user_profile, e, IAMActions.CREATE_USER)
return user_profile
def update_user_command(client, args, mapper_out, is_command_enabled, is_create_user_enabled, create_if_not_exists):
user_profile = IAMUserProfile(user_profile=args.get('user-profile'))
if not is_command_enabled:
user_profile.set_result(action=IAMActions.UPDATE_USER,
skip=True,
skip_reason='Command is disabled.')
else:
try:
okta_user = client.get_user(user_profile.get_attribute('email'))
if okta_user:
user_id = okta_user.get('id')
okta_profile = user_profile.map_object(mapper_out)
full_okta_profile = merge(okta_profile, okta_user)
updated_user = client.update_user(user_id, full_okta_profile)
user_profile.set_result(
action=IAMActions.UPDATE_USER,
success=True,
active=False if updated_user.get('status') == DEPROVISIONED_STATUS else True,
iden=updated_user.get('id'),
email=updated_user.get('profile', {}).get('email'),
username=updated_user.get('profile', {}).get('login'),
details=updated_user
)
else:
if create_if_not_exists:
user_profile = create_user_command(client, args, mapper_out, is_create_user_enabled)
else:
_, error_message = IAMErrors.USER_DOES_NOT_EXIST
user_profile.set_result(action=IAMActions.UPDATE_USER,
skip=True,
skip_reason=error_message)
except DemistoException as e:
handle_exception(user_profile, e, IAMActions.UPDATE_USER)
return user_profile
def main():
user_profile = None
params = demisto.params()
base_url = urljoin(params['url'].strip('/'), '/api/v1/')
token = params.get('apitoken')
mapper_in = params.get('mapper-in')
mapper_out = params.get('mapper-out')
verify_certificate = not params.get('insecure', False)
proxy = params.get('proxy', False)
command = demisto.command()
args = demisto.args()
is_create_enabled = params.get("create-user-enabled")
is_enable_disable_enabled = params.get("enable-disable-user-enabled")
is_update_enabled = demisto.params().get("update-user-enabled")
create_if_not_exists = demisto.params().get("create-if-not-exists")
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Authorization': f'SSWS {token}'
}
client = Client(
base_url=base_url,
verify=verify_certificate,
proxy=proxy,
headers=headers,
ok_codes=(200,)
)
demisto.debug(f'Command being called is {command}')
try:
if command == 'iam-get-user':
user_profile = get_user_command(client, args, mapper_in)
elif command == 'iam-create-user':
user_profile = create_user_command(client, args, mapper_out, is_create_enabled)
elif command == 'iam-update-user':
user_profile = update_user_command(client, args, mapper_out, is_update_enabled,
is_create_enabled, create_if_not_exists)
elif command == 'iam-disable-user':
user_profile = disable_user_command(client, args, is_enable_disable_enabled)
elif command == 'iam-enable-user':
user_profile = enable_user_command(client, args, mapper_out, is_enable_disable_enabled,
is_create_enabled, create_if_not_exists)
if user_profile:
return_results(user_profile)
except Exception:
# We don't want to return an error entry CRUD commands execution
return_results(f'Failed to execute {command} command. Traceback: {traceback.format_exc()}')
try:
if command == 'test-module':
test_module(client)
elif command == 'get-mapping-fields':
return_results(get_mapping_fields_command(client))
except Exception:
# For any other integration command exception, return an error
return_error(f'Failed to execute {command} command. Traceback: {traceback.format_exc()}')
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
|
#!/usr/bin/env python
import socket
address=('127.0.0.1',22345)
s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
s.bind(address)
while 1:
data,addr=s.recvfrom(2048)
if not data:
break
print "got data from",addr
print data
s.close()
|
#!/usr/bin/python
import sys
import re
import os
#if len(sys.argv)<2:
# sys.exit(0)
import fileinput
from subprocess import Popen, PIPE
data0=""
for line in fileinput.input():
data0+=line
data0 = re.sub(r"/\*.*?\*/","",data0,flags=re.DOTALL)
closed=""
depth=0
for i in data0:
if i=="\n" and depth!=0:
continue
if i=="(":
depth+=1
if i==")":
depth-=1
closed+=i
data0=re.sub(r"[ \t]+"," ",closed,flags=0)
glDefines="""
#define GL_VERSION_1_1 1
#define GL_VERSION_1_2 1
#define GL_VERSION_1_3 1
#define GL_VERSION_1_4 1
#define GL_ARB_imaging 1
#define GL_ARB_multitexture 1
"""
glDefines=""
data00 = Popen(["g++","-std=c++1y","-E","-"],stdout=PIPE,stdin=PIPE).communicate(glDefines+data0)[0]
data01 = re.compile(r"^.*PFN.*",flags=re.MULTILINE).findall(data00)
data02 = map(lambda x:re.sub(r"^typedef.*PFN([a-zA-Z0-9_]*)PROC.*",r"\1",x).lower(),data01)
for i in data01:
print i
exit(0)
reg = data02[0]
for i in data02[1:]:
reg+="|"+i
comreg = re.compile(reg)
data03 = zip(data0.lower().split("\n"),data0.split("\n"))
data04=[]
for i in data03:
if i[1].find("PFN")!=-1:
continue
if comreg.findall(i[0])!=[]:
data04+=[i[1]]
data05 = map(lambda x:x.replace("GLAPI ",""),map(lambda x:x.replace("APIENTRY",""),map(lambda x:x.replace("GLAPIENTRY",""),data04)))
data06 = map(lambda x:re.sub(r" ,",r",",re.sub(r", ",r",",re.sub(r"\* ","*",re.sub(r" \*","*",x)))),data05)
data07 = map(lambda x:re.sub(r"([a-zA-Z0-9_* ]*?[a-zA-Z0-9_*]+) ? ?(gl[a-zA-Z0-9_]*) ?\( ?(.*) ?\);",r"\1,\2,\3",x),data06)
data07 = list(set(data07))
#for i in data07:
# print i
#exit(0)
def genTypeName(string):
spacepos=string.rfind(" ");
starpos =string.rfind("*");
if(spacepos>starpos):
return (string[:string.rfind(" ")],string[string.rfind(" ")+1:])
else:
return (string[:string.rfind("*")+1],string[string.rfind("*")+1:])
listOfFCE=[]
for i in data07:
components = i.split(",")
final = ""
if components[0][-1]==" ":
final+=components[0][:-1]
else:
final+=components[0]
final+=","+components[1]
for j in components[2:]:
if j!="void":
final+=","+genTypeName(j)[0]
final+=","+genTypeName(j)[1]
listOfFCE+=[final]
for i in listOfFCE:
print i
exit(0)
|
# -*- coding: utf-8 -*-
__author__ = 'gvammer'
from django.core.management.base import NoArgsCommand
from django.conf import settings
from PManager.models import PM_User, PM_Project
from django.contrib.auth.models import User
from PManager.services.docker import blockchain_user_register_request, blockchain_user_getkey_request, blockchain_user_newproject_request
class Command(NoArgsCommand):
def handle_noargs(self, **options):
u = PM_User.objects.filter(blockchain_wallet__isnull=False)
for us in u:
result = blockchain_user_register_request(us.user.username)
if result.find('Error') == -1:
res = result.split("\n\n")
us.blockchain_key = res[0]
us.blockchain_cert = res[1]
us.blockchain_wallet = blockchain_user_getkey_request(us.user.username)
us.save()
print us.blockchain_wallet
p = PM_Project.objects.filter(author=us.user, blockchain_name__isnull=False)
for pr in p:
blockchain_user_newproject_request(us.user.username, pr.blockchain_name)
print pr.blockchain_name
|
from .Resource import Resource |
#!/usr/bin/env python3
if __name__ == '__main__':
data = open('input1').read().splitlines()
horizontal = 0
depth = 0
for line in data:
command, quantity = line.split()
quantity = int(quantity)
if command == 'up':
depth -= quantity
elif command == 'down':
depth += quantity
elif command == 'forward':
horizontal += quantity
else:
raise 'Error, unknown command!'
print(f'{horizontal} * {depth} = {horizontal * depth}')
|
from dataclasses import dataclass
from typing import Dict, Union, Optional
import torch
from torch import nn
from transformers import PreTrainedModel
from transformers.file_utils import ModelOutput
from .config import NEAConfig
from .modules.attention import Attention
from .modules.mean_over_time import MeanOverTime
@dataclass
class NEAModelOutput(ModelOutput):
"""
Base class for outputs of NEA models
Args:
loss (:obj:`torch.Tensor` of shape `(1,)`, `optional`, returned when :obj:`labels` is provided):
Regression loss. Loss function used is dependent on what is specified in NEAConfig
logits (:obj:`torch.Tensor` of shape :obj:`(batch_size, 1)`):
Regression scores of between 0 and 1.
"""
loss: Optional[torch.Tensor] = None
logits: torch.Tensor = None
class NEAPreTrainedModel(PreTrainedModel):
"""
The Neural Essay Assesssor (NEA) Pre-Trained Model used as base class for derived NEA Model.
This model is the abstract super class for the NEA model which defines the config class types
and weights initalization method. This class should not be used or instantiated directly,
see NEAlModel class for usage.
"""
config_class = NEAConfig
base_model_prefix = "NEA"
def _init_weights(self, module):
if isinstance(module, nn.Linear):
nn.init.xavier_uniform_(module.weight)
nn.init.zeros_(module.bias)
elif isinstance(module, nn.Embedding):
nn.init.uniform_(module.weight)
elif (
isinstance(module, nn.LSTM)
or isinstance(module, nn.GRU)
or isinstance(module, nn.RNN)
):
for name, param in module.named_parameters():
if "weight_ih" in name:
nn.init.xavier_uniform_(param.data)
elif "weight_hh" in name:
nn.init.orthogonal_(param.data)
elif "bias" in name:
nn.init.zeros_(param.data)
class NEARegPoolingModel(NEAPreTrainedModel):
"""
Class to create the Neural Essay Assessor(NEA) Model used to evaluate essays.
The model uses a Regression Pooling model type as described in the original research code.
This method inherits from :obj:`NEAPreTrainedModel` for weights initalization and utility functions
from transformer :obj:`PreTrainedModel` class.
.. note::
This is the optimal model type as described in the NEA paper. Only this model class will work with the model
weights stored on Azure Blob Storage.
Args:
config (:class:`~NEAConfig`): Model configuration class with all parameters required for
the model. Initializing with a config file does not load
the weights associated with the model, only the configuration.
Use the :obj:`.from_pretrained` method to load the model weights.
Example::
# 1. From default
config = NEAConfig()
model = NEARegPoolingModel(config)
# 2. From pretrained
config = NEAConfig.from_pretrained("https://sgnlp.blob.core.windows.net/models/nea/config.json")
model = \
NEARegPoolingModel.from_pretrained("https://sgnlp.blob.core.windows.net/models/nea/pytorch_model.bin",
config=config)
"""
def __init__(self, config):
super().__init__(config)
self.embedding_layer = self._init_embedding_layer(config)
self.conv_layer = self._init_conv_layer(config)
self.dropout = self._init_dropout(config)
self.rec_layer = self._init_rec_layer(config)
self.agg_layer = self._init_agg_layer(config)
self.linear_layer = self._init_linear_layer(config)
self.sigmoid = nn.Sigmoid()
self.init_weights()
self.loss_function = config.loss_function
self.skip_init_bias = config.skip_init_bias
def forward(
self,
input_ids: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None
) -> NEAModelOutput:
"""Forward method to compute model output given input.
Args:
input_ids (:obj:`torch.Tensor`): torch tensor of model input. Torch tensor
should contain input_ids of sequences
labels (:obj:`Optional[torch.Tensor]`): torch tensor of shape (batch_size). Defaults to None
Returns:
:obj:`NEAModelOutput`: model output of shape (batch_size, 1)
"""
mask = self._generate_mask(input_ids)
x = self.embedding_layer(input_ids)
x = self.dropout(x)
if self.conv_layer:
x = self.conv(x)
if self.rec_layer:
x, _ = self.rec_layer(x)
x = self.dropout(x)
self.agg_layer.init_mask(mask)
x = self.agg_layer(x)
x = self.linear_layer(x)
logits = self.sigmoid(x)
loss = None
if labels is not None:
if self.loss_function == "mse":
loss_fct = torch.nn.MSELoss()
elif self.loss_function == "mae":
loss_fct = torch.nn.L1Loss()
loss = loss_fct(logits.view(-1), labels)
return NEAModelOutput(loss=loss, logits=logits)
def initialise_linear_bias(self, train_y: torch.Tensor) -> None:
"""Initialise bias term of linear layer according to implementation in NEA paper
Args:
train_y (:obj:`torch.Tensor`): tensor of train y
"""
if not self.skip_init_bias:
input_checks = (train_y < 0) | (train_y > 1)
if sum(input_checks) > 0:
raise ValueError("Train y needs to be between 0 and 1")
initial_mean_value = train_y.mean()
initial_bias = torch.log(initial_mean_value) - torch.log(
1 - initial_mean_value
)
self.linear_layer.bias.data.fill_(initial_bias)
def load_pretrained_embedding(self, emb_matrix: torch.Tensor) -> None:
"""Load pretrained embedding matrix in the embedding layer
Args:
emb_matrix (:obj:`torch.Tensor`): tensor of embedding matrix
"""
if self.embedding_layer.weight.shape != emb_matrix.shape:
raise ValueError(
"Dimensions of emb_matrix do not match embedding layer's dimensions"
)
self.embedding_layer.weight = nn.parameter.Parameter(emb_matrix)
def _generate_mask(self, input_ids: torch.Tensor) -> torch.Tensor:
"""Compute the number of non padding tokens for each instance. Input
is the sequence of integers fed into the model
Args:
input_ids (torch.Tensor): shape: (batch_size * seq_len)
Returns:
torch.Tensor: shape: (batch_size * 1)
"""
mask = (input_ids != 0).sum(axis=1)
mask = torch.unsqueeze(mask, 1)
return mask
def _init_embedding_layer(self, config: Dict) -> nn.Embedding:
"""Initialise embedding layer with config
Args:
config (Dict): config from config file
Returns:
nn.Embedding: initialised embedding layer
"""
return nn.Embedding(
num_embeddings=config.vocab_size,
embedding_dim=config.embedding_dim,
padding_idx=0,
)
def _init_conv_layer(self, config: Dict) -> Union[None, nn.Conv1d]:
"""Initialise convolutional layer with config.
Args:
config (Dict): config from config file
Returns:
Union[None, nn.Conv1d]: initialised convolution layer. Returns None if
cnn_output_dim == 0.
"""
if config.cnn_output_dim > 0:
layer = nn.Conv1d(
in_channels=config.cnn_input_dim,
out_channels=config.cnn_output_dim,
kernel_size=config.cnn_kernel_size,
padding=config.cnn_padding,
)
else:
layer = None
return layer
def _init_dropout(self, config: Dict) -> nn.Dropout:
"""Initialise dropout layer
Args:
config (Dict): config from config file
Returns:
nn.Dropout: initialised dropout layer
"""
return nn.Dropout(p=config.dropout)
def _init_rec_layer(self, config: Dict) -> Union[None, nn.LSTM, nn.GRU, nn.RNN]:
"""Initialise recurrent layer with config.
Args:
config (Dict): config from config file
Returns:
Union[None, nn.LSTM, nn.GRU, nn.RNN]: initialised recurrent layer.
Returns None if rec_output_dim == 0.
"""
if config.rec_output_dim > 0:
if config.rec_layer_type == "lstm":
rec_layer = nn.LSTM
elif config.rec_layer_type == "gru":
rec_layer = nn.GRU
elif config.rec_layer_type == "rnn":
rec_layer = nn.RNN
layer = rec_layer(
input_size=config.rec_input_dim,
hidden_size=config.rec_output_dim,
num_layers=1,
batch_first=True,
)
else:
layer = None
return layer
def _init_agg_layer(self, config: Dict) -> Union[MeanOverTime, Attention]:
"""Initialise aggregation layer with config. Aggregation layer is either
a mean over time layer or attention layer
Args:
config (Dict): config from config file
Returns:
Union[MeanOverTime, Attention]: initialised aggregation layer
"""
if config.aggregation == "mot":
layer = MeanOverTime()
if config.aggregation in ["attsum", "attmean"]:
layer = Attention(op=config.aggregation)
return layer
def _init_linear_layer(self, config: Dict) -> nn.Linear:
"""Initialise linear layer
Args:
config (Dict): config file from config
Returns:
nn.Linear: initialised linear layer
"""
return nn.Linear(
in_features=config.linear_input_dim, out_features=config.linear_output_dim
)
class NEARegModel(NEAPreTrainedModel):
"""
Class to create the Neural Essay Assessor(NEA) Model used to evaluate essays.
The model uses a Regression model type as described in the original research code.
This method inherits from :obj:`NEAPreTrainedModel` for weights initalization and utility functions
from transformer :obj:`PreTrainedModel` class.
.. note::
This model class will not work with the model weights stored on Azure Blob Storage.
Refer to :obj:`NEARegPoolingModel` to use the pretrained weights on Azure Blob Storage.
Args:
config (:class:`~NEAConfig`): Model configuration class with all parameters required for
the model. Initializing with a config file does not load
the weights associated with the model, only the configuration.
Use the :obj:`.from_pretrained` method to load the model weights.
Example::
config = NEAConfig()
model = NEARegModel(config)
"""
def __init__(self, config):
super().__init__(config)
self.embedding_layer = self._init_embedding_layer(config)
self.conv_layer = self._init_conv_layer(config)
self.dropout = self._init_dropout(config)
self.rec_layer = self._init_rec_layer(config)
self.linear_layer = self._init_linear_layer(config)
self.sigmoid = nn.Sigmoid()
self.init_weights()
self.loss_function = config.loss_function
self.skip_init_bias = config.skip_init_bias
def forward(
self,
input_ids: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None
) -> NEAModelOutput:
"""Forward method to compute model output given input.
Args:
input_ids (:obj:`torch.Tensor`): torch tensor of model input. Torch tensor
should contain input_ids of sequences
labels (:obj:`Optional[torch.Tensor]`): torch tensor of shape (batch_size). Defaults to None
Returns:
:obj:`NEAModelOutput`: model output of shape (batch_size, 1)
"""
mask = self._generate_mask(input_ids)
x = self.embedding_layer(input_ids)
x = self.dropout(x)
if self.conv_layer:
x = self.conv(x)
if self.rec_layer:
x, _ = self.rec_layer(x)
x = self._extract_last_hidden_state(x, mask)
x = self.dropout(x)
x = self.linear_layer(x)
logits = self.sigmoid(x)
loss = None
if labels is not None:
if self.loss_function == "mse":
loss_fct = torch.nn.MSELoss()
elif self.args.loss_function == "mae":
loss_fct = torch.nn.L1Loss()
loss = loss_fct(logits.view(-1), labels)
return NEAModelOutput(loss=loss, logits=logits)
def initialise_linear_bias(self, train_y: torch.Tensor) -> None:
"""Initialise bias term of linear layer according to implementation in NEA paper
Args:
train_y (:obj:`torch.Tensor`): tensor of train y
"""
if not self.skip_init_bias:
input_checks = (train_y < 0) | (train_y > 1)
if sum(input_checks) > 0:
raise ValueError("Train y needs to be between 0 and 1")
initial_mean_value = train_y.mean()
initial_bias = torch.log(initial_mean_value) - torch.log(
1 - initial_mean_value
)
self.linear_layer.bias.data.fill_(initial_bias)
def load_pretrained_embedding(self, emb_matrix: torch.Tensor) -> None:
"""Load pretrained embedding matrix in the embedding layer
Args:
emb_matrix (:obj:`torch.Tensor`): tensor of embedding matrix
"""
if self.embedding_layer.weight.shape != emb_matrix.shape:
raise ValueError(
"Dimensions of emb_matrix do not match embedding layer's dimensions"
)
self.embedding_layer.weight = nn.parameter.Parameter(emb_matrix)
def _generate_mask(self, input_ids: torch.Tensor) -> torch.Tensor:
"""Compute the number of non padding tokens for each instance. Input
is the sequence of integers fed into the model
Args:
input_ids (torch.Tensor): shape: (batch_size * seq_len)
Returns:
torch.Tensor: shape: (batch_size * 1)
"""
mask = (input_ids != 0).sum(axis=1)
mask = torch.unsqueeze(mask, 1)
return mask
def _extract_last_hidden_state(
self, x: torch.Tensor, mask: torch.Tensor
) -> torch.Tensor:
"""Extract the last hidden state of each sequence returned from recurrent
layer, excluding padding tokens.
Args:
x (torch.Tensor): output of recurrent layer. shape:(batch_size * seq_len * reccurent_dim)
mask (torch.Tensor): length of non padding tokens. shape:(batch_size * 1)
Returns:
torch.Tensor: last hidden state of each instance. shape : (batch_size * recurrent_dim)
"""
array = []
for i in range(len(x)):
array.append(x[i, mask[i] - 1, :])
x = torch.cat(array)
return x
def _init_embedding_layer(self, config: Dict) -> nn.Embedding:
"""Initialise embedding layer with config
Args:
config (Dict): config from config file
Returns:
nn.Embedding: initialised embedding layer
"""
return nn.Embedding(
num_embeddings=config.vocab_size,
embedding_dim=config.embedding_dim,
padding_idx=0,
)
def _init_conv_layer(self, config: Dict) -> Union[None, nn.Conv1d]:
"""Initialise convolutional layer with config.
Args:
config (Dict): config from config file
Returns:
Union[None, nn.Conv1d]: initialised convolution layer. Returns None if
cnn_output_dim == 0.
"""
if config.cnn_output_dim > 0:
layer = nn.Conv1d(
in_channels=config.cnn_input_dim,
out_channels=config.cnn_output_dim,
kernel_size=config.cnn_kernel_size,
padding=config.cnn_padding,
)
else:
layer = None
return layer
def _init_dropout(self, config: Dict) -> nn.Dropout:
"""Initialise dropout layer
Args:
config (Dict): config from config file
Returns:
nn.Dropout: initialised dropout layer
"""
return nn.Dropout(p=config.dropout)
def _init_rec_layer(self, config: Dict) -> Union[None, nn.LSTM, nn.GRU, nn.RNN]:
"""Initialise recurrent layer with config.
Args:
config (Dict): config from config file
Returns:
Union[None, nn.LSTM, nn.GRU, nn.RNN]: initialised recurrent layer.
Returns None if rec_output_dim == 0.
"""
if config.rec_output_dim > 0:
if config.rec_layer_type == "lstm":
rec_layer = nn.LSTM
elif config.rec_layer_type == "gru":
rec_layer = nn.GRU
elif config.rec_layer_type == "rnn":
rec_layer = nn.RNN
layer = rec_layer(
input_size=config.rec_input_dim,
hidden_size=config.rec_output_dim,
num_layers=1,
batch_first=True,
)
else:
layer = None
return layer
def _init_linear_layer(self, config: Dict) -> nn.Linear:
"""Initialise linear layer
Args:
config (Dict): config file from config
Returns:
nn.Linear: initialised linear layer
"""
return nn.Linear(
in_features=config.linear_input_dim, out_features=config.linear_output_dim
)
class NEABiRegModel(NEAPreTrainedModel):
"""
Class to create the Neural Essay Assessor(NEA) Model used to evaluate essays.
The model uses a Bidirectional Regression model type as described in the original research code.
This method inherits from :obj:`NEAPreTrainedModel` for weights initalization and utility functions
from transformer :obj:`PreTrainedModel` class.
.. note::
This model class will not work with the model weights stored on Azure Blob Storage.
Refer to :obj:`NEARegPoolingModel` to use the pretrained weights on Azure Blob Storage.
Args:
config (:class:`~NEAConfig`): Model configuration class with all parameters required for
the model. Initializing with a config file does not load
the weights associated with the model, only the configuration.
Use the :obj:`.from_pretrained` method to load the model weights.
Example::
config = NEAConfig()
model = NEABiRegModel(config)
"""
def __init__(self, config):
super().__init__(config)
self._validate_linear_input_dim(config)
self.embedding_layer = self._init_embedding_layer(config)
self.conv_layer = self._init_conv_layer(config)
self.dropout = self._init_dropout(config)
self.forward_rec_layer = self._init_rec_layer(config)
self.backward_rec_layer = self._init_rec_layer(config)
self.linear_layer = self._init_linear_layer(config)
self.sigmoid = nn.Sigmoid()
self.init_weights()
self.loss_function = config.loss_function
self.skip_init_bias = config.skip_init_bias
def forward(
self,
input_ids: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None
) -> NEAModelOutput:
"""Forward method to compute model output given input.
Args:
input_ids (:obj:`torch.Tensor`): torch tensor of model input. Torch tensor
should contain input_ids of sequences
labels (:obj:`Optional[torch.Tensor]`): torch tensor of shape (batch_size). Defaults to None
Returns:
:obj:`NEAModelOutput`: model output of shape (batch_size, 1)
"""
mask = self._generate_mask(input_ids)
x = self.embedding_layer(input_ids)
x = self.dropout(x)
if self.conv_layer:
x = self.conv(x)
if self.forward_rec_layer:
x_forward, _ = self.forward_rec_layer(x)
x_forward = self._extract_last_hidden_state(x_forward, mask, forward=True)
x_forward = self.dropout(x_forward)
if self.backward_rec_layer:
x_backward, _ = self.backward_rec_layer(torch.flip(x, [1]))
x_backward = self._extract_last_hidden_state(
x_backward, mask, forward=False
)
x_backward = self.dropout(x_backward)
x = torch.cat((x_forward, x_backward), 1)
x = self.linear_layer(x)
logits = self.sigmoid(x)
loss = None
if labels is not None:
if self.loss_function == "mse":
loss_fct = torch.nn.MSELoss()
elif self.args.loss_function == "mae":
loss_fct = torch.nn.L1Loss()
loss = loss_fct(logits.view(-1), labels)
return NEAModelOutput(loss=loss, logits=logits)
def initialise_linear_bias(self, train_y: torch.Tensor) -> None:
"""Initialise bias term of linear layer according to implementation in NEA paper
Args:
train_y (:obj:`torch.Tensor`): tensor of train y
"""
if not self.skip_init_bias:
input_checks = (train_y < 0) | (train_y > 1)
if sum(input_checks) > 0:
raise ValueError("Train y needs to be between 0 and 1")
initial_mean_value = train_y.mean()
initial_bias = torch.log(initial_mean_value) - torch.log(
1 - initial_mean_value
)
self.linear_layer.bias.data.fill_(initial_bias)
def load_pretrained_embedding(self, emb_matrix: torch.Tensor) -> None:
"""Load pretrained embedding matrix in the embedding layer
Args:
emb_matrix (:obj:`torch.Tensor`): tensor of embedding matrix
"""
if self.embedding_layer.weight.shape != emb_matrix.shape:
raise ValueError(
"Dimensions of emb_matrix do not match embedding layer's dimensions"
)
self.embedding_layer.weight = nn.parameter.Parameter(emb_matrix)
def _validate_linear_input_dim(self, config: Dict) -> None:
"""Validate that linear_input_dim is 2 times the value of rec_output_dim.
The output dimension of the recurrent layer will be 2 times of rec_output_dim
due to the bidirectional property. Thus, linear_input_dim needs to be 2
times rec_output_dim
Args:
config (Dict): config from NEAConfig
Raises:
ValueError: linear_input_dim should be 2 times the value of rec_output_dim
due to the bidirectional property.
"""
if config.linear_input_dim != 2 * config.rec_output_dim:
raise ValueError(
"linear_input_dim should be 2 times the value of rec_output_dim due to the bidirectional property of \
the recurrent layer."
)
def _generate_mask(self, input_ids: torch.Tensor) -> torch.Tensor:
"""Compute the number of non padding tokens for each instance. Input
is the sequence of integers fed into the model
Args:
input_ids (torch.Tensor): shape: (batch_size * seq_len)
Returns:
torch.Tensor: shape: (batch_size * 1)
"""
mask = (input_ids != 0).sum(axis=1)
mask = torch.unsqueeze(mask, 1)
return mask
def _extract_last_hidden_state(
self, x: torch.Tensor, mask: torch.Tensor, forward: bool
) -> torch.Tensor:
"""Extract the last hidden state of each sequence returned from recurrent
layer, excluding padding tokens
Args:
x (torch.Tensor): output of recurrent layer. shape:(batch_size * seq_len * reccurent_dim)
mask (torch.Tensor): length of non padding tokens. shape:(batch_size * 1)
Returns:
torch.Tensor: last hidden state of each instance. shape : (batch_size * recurrent_dim)
"""
array = []
if forward:
for i in range(len(x)):
array.append(x[i, mask[i] - 1, :])
x = torch.cat(array)
else:
for i in range(len(x)):
array.append(x[i, [-1], :])
x = torch.cat(array)
return x
def _init_embedding_layer(self, config: Dict) -> nn.Embedding:
"""Initialise embedding layer with config
Args:
config (Dict): config from config file
Returns:
nn.Embedding: initialised embedding layer
"""
return nn.Embedding(
num_embeddings=config.vocab_size,
embedding_dim=config.embedding_dim,
padding_idx=0,
)
def _init_conv_layer(self, config: Dict) -> Union[None, nn.Conv1d]:
"""Initialise convolutional layer with config.
Args:
config (Dict): config from config file
Returns:
Union[None, nn.Conv1d]: initialised convolution layer. Returns None if
cnn_output_dim == 0.
"""
if config.cnn_output_dim > 0:
layer = nn.Conv1d(
in_channels=config.cnn_input_dim,
out_channels=config.cnn_output_dim,
kernel_size=config.cnn_kernel_size,
padding=config.cnn_padding,
)
else:
layer = None
return layer
def _init_dropout(self, config: Dict) -> nn.Dropout:
"""Initialise dropout layer
Args:
config (Dict): config from config file
Returns:
nn.Dropout: initialised dropout layer
"""
return nn.Dropout(p=config.dropout)
def _init_rec_layer(self, config: Dict) -> Union[None, nn.LSTM, nn.GRU, nn.RNN]:
"""Initialise recurrent layer with config.
Args:
config (Dict): config from config file
Returns:
Union[None, nn.LSTM, nn.GRU, nn.RNN]: initialised recurrent layer.
Returns None if rec_output_dim == 0.
"""
if config.rec_output_dim > 0:
if config.rec_layer_type == "lstm":
rec_layer = nn.LSTM
elif config.rec_layer_type == "gru":
rec_layer = nn.GRU
elif config.rec_layer_type == "rnn":
rec_layer = nn.RNN
layer = rec_layer(
input_size=config.rec_input_dim,
hidden_size=config.rec_output_dim,
num_layers=1,
batch_first=True,
)
else:
layer = None
return layer
def _init_linear_layer(self, config: Dict) -> nn.Linear:
"""Initialise linear layer
Args:
config (Dict): config file from config
Returns:
nn.Linear: initialised linear layer
"""
return nn.Linear(
in_features=config.linear_input_dim, out_features=config.linear_output_dim
)
class NEABiRegPoolingModel(NEAPreTrainedModel):
"""
Class to create the Neural Essay Assessor(NEA) Model used to evaluate essays.
The model uses a Bidirectional Regression Pooling model type as described in the original research code.
This method inherits from :obj:`NEAPreTrainedModel` for weights initalization and utility functions
from transformer :obj:`PreTrainedModel` class.
.. note::
This model class will not work with the model weights stored on Azure Blob Storage.
Refer to :obj:`NEARegPoolingModel` to use the pretrained weights on Azure Blob Storage.
Args:
config (:class:`~NEAConfig`): Model configuration class with all parameters required for
the model. Initializing with a config file does not load
the weights associated with the model, only the configuration.
Use the :obj:`.from_pretrained` method to load the model weights.
Example::
config = NEAConfig()
model = NEABiRegPoolingModel(config)
"""
def __init__(self, config):
super().__init__(config)
self._validate_linear_input_dim(config)
self.embedding_layer = self._init_embedding_layer(config)
self.conv_layer = self._init_conv_layer(config)
self.dropout = self._init_dropout(config)
self.forward_rec_layer = self._init_rec_layer(config)
self.backward_rec_layer = self._init_rec_layer(config)
self.agg_layer = self._init_agg_layer(config)
self.linear_layer = self._init_linear_layer(config)
self.sigmoid = nn.Sigmoid()
self.init_weights()
self.loss_function = config.loss_function
self.skip_init_bias = config.skip_init_bias
def forward(
self,
input_ids: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
labels: Optional[torch.Tensor] = None
) -> NEAModelOutput:
"""Forward method to compute model output given input.
Args:
input_ids (:obj:`torch.Tensor`): torch tensor of model input. Torch tensor
should contain input_ids of sequences
labels (:obj:`Optional[torch.Tensor]`): torch tensor of shape (batch_size). Defaults to None
Returns:
:obj:`NEAModelOutput`: model output of shape (batch_size, 1)
"""
mask = self._generate_mask(input_ids)
x = self.embedding_layer(input_ids)
x = self.dropout(x)
if self.conv_layer:
x = self.conv(x)
self.agg_layer.init_mask(mask)
if self.forward_rec_layer:
x_forward, _ = self.forward_rec_layer(x)
x_forward = self.dropout(x_forward)
x_forward_mean = self.agg_layer(x_forward)
if self.backward_rec_layer:
x_backward, _ = self.backward_rec_layer(torch.flip(x, [1]))
x_backward = self.dropout(x_backward)
x_backward_mean = self.agg_layer(x_backward)
x = torch.cat((x_forward_mean, x_backward_mean), 1)
x = self.linear_layer(x)
logits = self.sigmoid(x)
loss = None
if labels is not None:
if self.loss_function == "mse":
loss_fct = torch.nn.MSELoss()
elif self.args.loss_function == "mae":
loss_fct = torch.nn.L1Loss()
loss = loss_fct(logits.view(-1), labels)
return NEAModelOutput(loss=loss, logits=logits)
def initialise_linear_bias(self, train_y: torch.Tensor) -> None:
"""Initialise bias term of linear layer according to implementation in NEA paper
Args:
train_y (:obj:`torch.Tensor`): tensor of train y
"""
if not self.skip_init_bias:
input_checks = (train_y < 0) | (train_y > 1)
if sum(input_checks) > 0:
raise ValueError("Train y needs to be between 0 and 1")
initial_mean_value = train_y.mean()
initial_bias = torch.log(initial_mean_value) - torch.log(
1 - initial_mean_value
)
self.linear_layer.bias.data.fill_(initial_bias)
def load_pretrained_embedding(self, emb_matrix: torch.Tensor) -> None:
"""Load pretrained embedding matrix in the embedding layer
Args:
emb_matrix (:obj:`torch.Tensor`): tensor of embedding matrix
"""
if self.embedding_layer.weight.shape != emb_matrix.shape:
raise ValueError(
"Dimensions of emb_matrix do not match embedding layer's dimensions"
)
self.embedding_layer.weight = nn.parameter.Parameter(emb_matrix)
def _validate_linear_input_dim(self, config: Dict) -> None:
"""Validate that linear_input_dim is 2 times the value of rec_output_dim.
The output dimension of the recurrent layer will be 2 times of rec_output_dim
due to the bidirectional property. Thus, linear_input_dim needs to be 2
times rec_output_dim
Args:
config (Dict): config from NEAConfig
Raises:
ValueError: linear_input_dim should be 2 times the value of rec_output_dim
due to the bidirectional property.
"""
if config.linear_input_dim != 2 * config.rec_output_dim:
raise ValueError(
"linear_input_dim should be 2 times the value of rec_output_dim due to the bidirectional property of \
the recurrent layer."
)
def _generate_mask(self, input_ids: torch.Tensor) -> torch.Tensor:
"""Compute the number of non padding tokens for each instance. Input
is the sequence of integers fed into the model
Args:
input_ids (torch.Tensor): shape: (batch_size * seq_len)
Returns:
torch.Tensor: shape: (batch_size * 1)
"""
mask = (input_ids != 0).sum(axis=1)
mask = torch.unsqueeze(mask, 1)
return mask
def _extract_last_hidden_state(
self, x: torch.Tensor, mask: torch.Tensor, forward: bool
) -> torch.Tensor:
"""Extract the last hidden state of each sequence returned from recurrent
layer
Args:
x (torch.Tensor): output of recurrent layer. shape:(batch_size * seq_len * reccurent_dim)
mask (torch.Tensor): length of non padding tokens. shape:(batch_size * 1)
Returns:
torch.Tensor: last hidden state of each instance. shape : (batch_size * recurrent_dim)
"""
array = []
if forward:
for i in range(len(x)):
array.append(x[i, mask[i] - 1, :])
x = torch.cat(array)
else:
for i in range(len(x)):
array.append(x[i, [-1], :])
x = torch.cat(array)
return x
def _init_embedding_layer(self, config: Dict) -> nn.Embedding:
"""Initialise embedding layer with config
Args:
config (Dict): config from config file
Returns:
nn.Embedding: initialised embedding layer
"""
return nn.Embedding(
num_embeddings=config.vocab_size,
embedding_dim=config.embedding_dim,
padding_idx=0,
)
def _init_conv_layer(self, config: Dict) -> Union[None, nn.Conv1d]:
"""Initialise convolutional layer with config.
Args:
config (Dict): config from config file
Returns:
Union[None, nn.Conv1d]: initialised convolution layer. Returns None if
cnn_output_dim == 0.
"""
if config.cnn_output_dim > 0:
layer = nn.Conv1d(
in_channels=config.cnn_input_dim,
out_channels=config.cnn_output_dim,
kernel_size=config.cnn_kernel_size,
padding=config.cnn_padding,
)
else:
layer = None
return layer
def _init_dropout(self, config: Dict) -> nn.Dropout:
"""Initialise dropout layer
Args:
config (Dict): config from config file
Returns:
nn.Dropout: initialised dropout layer
"""
return nn.Dropout(p=config.dropout)
def _init_rec_layer(self, config: Dict) -> Union[None, nn.LSTM, nn.GRU, nn.RNN]:
"""Initialise recurrent layer with config.
Args:
config (Dict): config from config file
Returns:
Union[None, nn.LSTM, nn.GRU, nn.RNN]: initialised recurrent layer.
Returns None if rec_output_dim == 0.
"""
if config.rec_output_dim > 0:
if config.rec_layer_type == "lstm":
rec_layer = nn.LSTM
elif config.rec_layer_type == "gru":
rec_layer = nn.GRU
elif config.rec_layer_type == "rnn":
rec_layer = nn.RNN
layer = rec_layer(
input_size=config.rec_input_dim,
hidden_size=config.rec_output_dim,
num_layers=1,
batch_first=True,
)
else:
layer = None
return layer
def _init_agg_layer(self, config: Dict) -> MeanOverTime:
"""Initialise aggregation layer with config. Aggregation layer is
a mean over time layer
Args:
config (Dict): config from config file
Returns:
MeanOverTime: initialised aggregation layer
"""
if config.aggregation == "mot":
layer = MeanOverTime()
return layer
def _init_linear_layer(self, config: Dict) -> nn.Linear:
"""Initialise linear layer
Args:
config (Dict): config file from config
Returns:
nn.Linear: initialised linear layer
"""
return nn.Linear(
in_features=config.linear_input_dim, out_features=config.linear_output_dim
)
|
# ! /usr/bin/python
# -*- coding: utf-8 -*-
# =============================================================================
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import pytest
from nemo.backends.pytorch.tutorials import MSELoss, RealFunctionDataLayer, TaylorNet
from nemo.core import NeuralGraph, OperationMode
@pytest.mark.usefixtures("neural_factory")
class TestNeuralGraphImportExport:
"""
Class testing Neural Graph configuration import/export.
"""
@pytest.mark.unit
def test_graph_simple_import_export(self, tmpdir):
"""
Tests whether the Neural Module can instantiate a simple module by loading a configuration file.
Args:
tmpdir: Fixture which will provide a temporary directory.
"""
# Instantiate the necessary neural modules.
dl = RealFunctionDataLayer(n=100, batch_size=1, name="tgio1_dl")
tn = TaylorNet(dim=4, name="tgio1_tn")
loss = MSELoss(name="tgio1_loss")
# Create the graph.
with NeuralGraph(operation_mode=OperationMode.training) as g1:
x, t = dl()
p = tn(x=x)
_ = loss(predictions=p, target=t)
# Serialize graph
serialized_g1 = g1.serialize()
# Generate filename in the temporary directory.
tmp_file_name = str(tmpdir.mkdir("export").join("simple_graph.yml"))
# Export graph to file.
g1.export_to_config(tmp_file_name)
# Create the second graph - import!
g2 = NeuralGraph.import_from_config(tmp_file_name, reuse_existing_modules=True)
serialized_g2 = g2.serialize()
# Must be the same.
assert serialized_g1 == serialized_g2
|
from .uasquad_question_answering import UaSquadDataset
from .utils import validate_txt, to_txt, to_json
|
import setuptools
setuptools.setup(
name="jukebox",
version="0.0.1",
author="Evgeny Petrov",
packages=setuptools.find_packages(),
install_reuires=[
"pygame",
"spidev",
"mfrc522"
]
)
|
import unittest
from ac2020.days.Day1 import Day1
class Day1Test(unittest.TestCase):
def test_empty_input(self):
day = Day1()
day._set_input("")
self.assertEqual("No result", day.part1())
self.assertEqual("No result", day.part2())
def test_correct_input(self):
day = Day1()
day._set_input("1721 979 366 299 675 1456")
self.assertEqual("514579", day.part1())
self.assertEqual("241861950", day.part2())
def test_tricky_input(self):
"""
1010 + 1010 could result in 2020, but an entry only can be used once.
Same for 1010 + 505 + 505.
"""
day = Day1()
day._set_input("1010 505")
self.assertEqual("No result", day.part1())
self.assertEqual("No result", day.part2())
def test_non_numeric_input(self):
day = Day1()
day._set_input("1 15 30 40 2019 asd 1975")
self.assertEqual("No result", day.part1())
self.assertEqual("No result", day.part2())
|
import demistomock as demisto
from CommonServerPython import *
import ast
# disable insecure warnings
requests.packages.urllib3.disable_warnings()
""" GLOBAL VARS """
APP_NAME = "ms-azure-sc"
SUB_ID_REQUIRING_CMD = (
"azure-sc-get-alert",
"azure-sc-list-alert",
"azure-sc-update-alert",
"azure-sc-list-location",
"azure-sc-update-atp",
"azure-sc-get-atp",
"azure-sc-update-aps",
"azure-sc-list-aps",
"azure-sc-get-aps",
"azure-sc-list-jit",
"azure-sc-get-jit",
"azure-sc-initiate-jit",
"azure-sc-delete-jit",
"azure-sc-list-storage",
)
# API Versions
SUBSCRIPTION_API_VERSION = "2015-01-01"
ALERT_API_VERSION = "2019-01-01"
LOCATION_API_VERSION = "2015-06-01-preview"
ATP_API_VERSION = "2017-08-01-preview"
APS_API_VERSION = "2017-08-01-preview"
IPP_API_VERSION = "2017-08-01-preview"
JIT_API_VERSION = "2015-06-01-preview"
STORAGE_API_VERSION = "2018-07-01"
SECURE_STORES_API_VERSION = "2020-01-01"
""" HELPER FUNCTIONS """
# Format ports in JIT access policy rule to (portNum, protocol, allowedAddress, maxDuration)
def format_jit_port_rule(ports):
port_array = list()
for port in ports:
# for each item in unicode, has to use str to decode to ascii
p_num = str(port.get("number"))
p_src_addr = (
str(port.get("allowedSourceAddressPrefix"))
if port.get("allowedSourceAddressPrefix") != "*"
else "any"
)
p_protocol = str(port.get("protocol")) if port.get("protocol") != "*" else "any"
p_max_duration = str(port.get("maxRequestAccessDuration"))
port_array.append(str((p_num, p_protocol, p_src_addr, p_max_duration)))
return ", ".join(port_array)
# Format ports in JIT access request to (portNum, allowedAddress, endTime, status)
def format_jit_port_request(ports):
port_array = list()
for port in ports:
# for each item in unicode, has to use str to decode to ascii
p_num = str(port.get("number"))
p_src_addr = (
str(port.get("allowedSourceAddressPrefix"))
if port.get("allowedSourceAddressPrefix") != "*"
else "any"
)
p_status = str(port.get("status"))
p_end_time = str(port.get("endTimeUtc"))
port_array.append(str((p_num, p_src_addr, p_end_time, p_status)))
return ", ".join(port_array)
def normalize_context_key(string):
"""Normalize context keys
Function will normalize the string (remove white spaces and tailings)
Args:
string (str):
Returns:
Normalized string
"""
tmp = string[:1].upper() + string[1:]
return tmp.replace(" ", "")
class MsClient:
"""
Microsoft Client enables authorized access to Azure Security Center.
"""
def __init__(self, tenant_id, auth_id, enc_key, app_name, server, verify, proxy, self_deployed, subscription_id,
ok_codes):
base_url_with_subscription = f"{server}subscriptions/{subscription_id}/"
self.ms_client = MicrosoftClient(
tenant_id=tenant_id, auth_id=auth_id, enc_key=enc_key, app_name=app_name,
base_url=base_url_with_subscription, verify=verify, proxy=proxy, self_deployed=self_deployed,
ok_codes=ok_codes, scope="https://management.azure.com/.default")
self.server = server
self.subscription_id = subscription_id
def get_alert(self, resource_group_name, asc_location, alert_id):
"""
Args:
resource_group_name (str): ResourceGroupName
asc_location (str): Azure Security Center location
alert_id (str): Alert ID
Returns:
response body (dict)
"""
cmd_url = f"/resourceGroups/{resource_group_name}" if resource_group_name else ""
cmd_url += f"/providers/Microsoft.Security/locations/{asc_location}/alerts/{alert_id}"
params = {'api-version': ALERT_API_VERSION}
return self.ms_client.http_request(
method="GET", url_suffix=cmd_url, params=params)
def list_alerts(self, resource_group_name, asc_location, filter_query, select_query, expand_query):
"""Listing alerts
Args:
resource_group_name (str): ResourceGroupName
asc_location (str): Azure Security Center location
filter_query (str): what to filter
select_query (str): what to select
expand_query (str): what to expand
Returns:
dict: contains response body
"""
if resource_group_name:
cmd_url = f"/resourceGroups/{resource_group_name}/providers/Microsoft.Security"
# ascLocation must be using with specifying resourceGroupName
if asc_location:
cmd_url += f"/locations/{asc_location}"
cmd_url += "/alerts"
else:
cmd_url = "/providers/Microsoft.Security/alerts"
params = {'api-version': ALERT_API_VERSION}
if filter_query:
params['$filter'] = filter_query
if select_query:
params['$select'] = select_query
if expand_query:
params['$expand'] = expand_query
return self.ms_client.http_request(method="GET", url_suffix=cmd_url, params=params)
def update_alert(self, resource_group_name, asc_location, alert_id, alert_update_action_type):
"""
Args:
resource_group_name (str): Resource Name Group
asc_location (str): Azure Security Center Location
alert_id (str): Alert ID
alert_update_action_type (str): What update type need to update
Returns:
dict: response body
"""
cmd_url = f"/resourceGroups/{resource_group_name}" if resource_group_name else ""
cmd_url += f"/providers/Microsoft.Security/locations/{asc_location}/alerts/{alert_id}/" \
f"{alert_update_action_type}"
params = {"api-version": ALERT_API_VERSION}
# Using resp_type=response to avoid parsing error.
self.ms_client.http_request(method="POST", url_suffix=cmd_url, params=params, resp_type='response')
def list_locations(self):
"""
Returns:
dict: response body
"""
cmd_url = "/providers/Microsoft.Security/locations"
params = {"api-version": LOCATION_API_VERSION}
return self.ms_client.http_request(method="GET", url_suffix=cmd_url, params=params)
def update_atp(self, resource_group_name, storage_account, setting_name, is_enabled):
"""
Args:
resource_group_name (str): Resource Group Name
storage_account (str): Storange Account
setting_name (str): Setting Name
is_enabled (str): true/false
Returns:
dict: respones body
"""
cmd_url = f"/resourceGroups/{resource_group_name}/providers/Microsoft.Storage/storageAccounts/" \
f"{storage_account}/providers/Microsoft.Security/advancedThreatProtectionSettings/{setting_name}"
params = {"api-version": ATP_API_VERSION}
data = {
"id": f"/subscriptions/{self.subscription_id}/resourceGroups/{resource_group_name}/providers/"
f"Microsoft.Storage/storageAccounts/{storage_account}/providers/Microsoft.Security/"
f"advancedThreatProtectionSettings/{setting_name}",
"name": setting_name,
"type": "Microsoft.Security/advancedThreatProtectionSettings",
"properties": {"isEnabled": is_enabled},
}
# Using resp_type=response to avoid parsing error.
return self.ms_client.http_request(method="PUT", url_suffix=cmd_url, json_data=data, params=params)
def get_atp(self, resource_group_name, storage_account, setting_name):
"""
Args:
resource_group_name (str): Resource Group Name
storage_account (str): Storange Account
setting_name (str): Setting Name
Returns:
"""
cmd_url = f"/resourceGroups/{resource_group_name}/providers/Microsoft.Storage/storageAccounts" \
f"/{storage_account}/providers/Microsoft.Security/advancedThreatProtectionSettings/{setting_name}"
params = {"api-version": ATP_API_VERSION}
return self.ms_client.http_request(method="GET", url_suffix=cmd_url, params=params)
def update_aps(self, setting_name, auto_provision):
"""
Args:
setting_name (str): Setting name
auto_provision (str): Auto provision setting (On/Off)
Returns:
dict: response body
"""
cmd_url = f"/providers/Microsoft.Security/autoProvisioningSettings/{setting_name}"
params = {"api-version": APS_API_VERSION}
data = {"properties": {"autoProvision": auto_provision}}
return self.ms_client.http_request(method="PUT", url_suffix=cmd_url, json_data=data, params=params)
def list_aps(self):
"""
Returns:
dict: response body
"""
cmd_url = "/providers/Microsoft.Security/autoProvisioningSettings"
params = {"api-version": APS_API_VERSION}
return self.ms_client.http_request(method="GET", url_suffix=cmd_url, params=params)
def get_aps(self, setting_name):
"""
Args:
setting_name: Setting name
Returns:
dict: response body
"""
cmd_url = f"/providers/Microsoft.Security/autoProvisioningSettings/{setting_name}"
params = {"api-version": APS_API_VERSION}
return self.ms_client.http_request(method="GET", url_suffix=cmd_url, params=params)
def list_ipp(self, management_group=None):
"""
Args:
management_group: Managment group to pull (if needed)
Returns:
dict: response body
"""
params = {"api-version": IPP_API_VERSION}
cmd_url = "/providers/Microsoft.Security/informationProtectionPolicies"
if management_group:
full_url = f"{self.server}/providers/Microsoft.Management/managementGroups/{management_group}"
full_url += cmd_url
return self.ms_client.http_request(method="GET", full_url=full_url, url_suffix="", params=params)
if not self.subscription_id:
raise DemistoException("A subscription ID must be provided.")
return self.ms_client.http_request(method="GET", url_suffix=cmd_url, params=params)
def get_ipp(self, policy_name, management_group):
"""
Args:
policy_name (str): Policy name
management_group (str): Managment group
Returns:
dict: respone body
"""
params = {"api-version": IPP_API_VERSION}
cmd_url = f"/providers/Microsoft.Security/informationProtectionPolicies/{policy_name}"
if management_group:
full_url = f"{self.server}/providers/Microsoft.Management/managementGroups/{management_group}"
full_url += cmd_url
return self.ms_client.http_request(method="GET", full_url=full_url, url_suffix="", params=params)
return self.ms_client.http_request(method="GET", url_suffix=cmd_url, params=params)
def list_jit(self, asc_location, resource_group_name):
"""
Args:
asc_location: Machine location
resource_group_name: Resource group name
Returns:
dict: response body
"""
params = {"api-version": JIT_API_VERSION}
cmd_url = f"/resourceGroups/{resource_group_name}" if resource_group_name else ""
cmd_url += f"/providers/Microsoft.Security/locations/{asc_location}" if asc_location else ""
cmd_url += "/providers/Microsoft.Security/jitNetworkAccessPolicies"
return self.ms_client.http_request(method="GET", url_suffix=cmd_url, params=params)
def get_jit(self, policy_name, asc_location, resource_group_name):
"""
Args:
policy_name: Policy name
asc_location: Machine location
resource_group_name: Resource name group
Returns:
dict: response body
"""
cmd_url = f"/resourceGroups/{resource_group_name}/providers/Microsoft.Security/locations/{asc_location}/" \
f"jitNetworkAccessPolicies/{policy_name}"
params = {"api-version": JIT_API_VERSION}
return self.ms_client.http_request(method="GET", url_suffix=cmd_url, params=params)
def initiate_jit(self, resource_group_name, asc_location, policy_name, vm_id, port, source_address, duration):
"""Starting new Just-in-time machine
Args:
resource_group_name: Resource group name
asc_location: Machine location
policy_name: Policy name
vm_id: Virtual Machine ID
port: ports to be used
source_address: Source address
duration: Time in
Returns:
dict: response body
"""
cmd_url = f"/resourceGroups/{resource_group_name}/providers/Microsoft.Security/locations/{asc_location}/" \
f"jitNetworkAccessPolicies/{policy_name}/initiate"
params = {"api-version": JIT_API_VERSION}
# only supports init access for one vm and one port now
data = {
"virtualMachines": [
{
"ID": vm_id,
"ports": [
{
"number": port,
"duration": duration,
"allowedSourceAddressPrefix": source_address,
}
],
}
]
}
# response code should be 202 Accepted
return self.ms_client.http_request(method="POST", url_suffix=cmd_url, json_data=data, params=params,
resp_type="response")
def delete_jit(self, asc_location, resource_group_name, policy_name):
"""
Args:
asc_location: Machine location
resource_group_name: Resource group name
policy_name: Policy name
"""
cmd_url = f"/resourceGroups/{resource_group_name}/providers/Microsoft.Security/locations/{asc_location}/" \
f"jitNetworkAccessPolicies/{policy_name}"
params = {"api-version": JIT_API_VERSION}
# Using resp_type=text to avoid parsing error. response should be 204
self.ms_client.http_request(method="DELETE", url_suffix=cmd_url, params=params, resp_type='text')
def list_sc_storage(self):
"""
Returns:
dict: response body
"""
cmd_url = "/providers/Microsoft.Storage/storageAccounts"
params = {"api-version": STORAGE_API_VERSION}
return self.ms_client.http_request(method="GET", url_suffix=cmd_url, params=params)
def list_sc_subscriptions(self):
"""
Returns:
dict: response body
"""
full_url = f"{self.server}/subscriptions"
params = {"api-version": SUBSCRIPTION_API_VERSION}
return self.ms_client.http_request(method="GET", full_url=full_url, url_suffix="", params=params)
def get_secure_scores(self, secure_score_name):
"""
Returns:
dict: response body
"""
cmd_url = f"/providers/Microsoft.Security/secureScores/{secure_score_name}"
params = {"api-version": SECURE_STORES_API_VERSION}
return self.ms_client.http_request(method="GET", url_suffix=cmd_url, params=params)
""" FUNCTIONS """
""" Alert Start """
def get_alert_command(client: MsClient, args: dict):
"""Getting specified alert from API
Args
args (dict): dictionary containing commands args
"""
resource_group_name = args.get("resource_group_name")
asc_location = args.get("asc_location")
alert_id = args.get("alert_id")
alert = client.get_alert(resource_group_name, asc_location, alert_id)
final_output = list()
# Basic Property Table
properties = alert.get("properties")
if properties:
basic_table_output = [
{
"DisplayName": properties.get("alertDisplayName"),
"CompromisedEntity": properties.get("compromisedEntity"),
"Description": properties.get("description"),
"DetectedTime": properties.get("detectedTimeUtc"),
"ReportedTime": properties.get("reportedTimeUtc"),
"ReportedSeverity": properties.get("reportedSeverity"),
"ConfidenceScore": properties.get("confidenceScore", "None"),
"State": properties.get("state"),
"ActionTaken": properties.get("actionTaken"),
"CanBeInvestigated": properties.get("canBeInvestigated"),
"RemediationSteps": properties.get("remediationSteps"),
"VendorName": properties.get("vendorName"),
"AssociatedResource": properties.get("associatedResource"),
"AlertName": properties.get("alertName"),
"InstanceID": properties.get("instanceId", "None"),
"ID": alert.get("name"),
"ExtendedProperties": properties.get("extendedProperties"),
"Entities": properties.get("entities"),
"SubscriptionID": properties.get("subscriptionId"),
}
]
md = tableToMarkdown(
"Azure Security Center - Get Alert - Basic Property",
basic_table_output,
[
"DisplayName",
"CompromisedEntity",
"Description",
"DetectedTime",
"ReportedTime",
"ReportedSeverity",
"ConfidenceScore",
"State",
"ActionTaken",
"CanBeInvestigated",
"RemediationSteps",
"VendorName",
"AssociatedResource",
"AlertName",
"InstanceID",
"ID"],
removeNull=True)
ec = {"AzureSecurityCenter.Alert(val.ID && val.ID === obj.ID)": basic_table_output}
basic_table_entry = {
"Type": entryTypes["note"],
"Contents": alert,
"ContentsFormat": formats["json"],
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": md,
"EntryContext": ec,
}
final_output.append(basic_table_entry)
# Extended Properties Table
if (
alert.get("properties")
and alert.get("properties")
and alert.get("properties").get("extendedProperties")
):
extended_properties = dict()
properties = alert.get("properties")
if isinstance(properties.get("extendedProperties"), dict):
for key, value in alert["properties"]["extendedProperties"].items():
extended_properties[normalize_context_key(key)] = value
extended_table_entry = {
"Type": entryTypes["note"],
"Contents": alert["properties"]["extendedProperties"],
"ContentsFormat": formats["json"],
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": tableToMarkdown(
"Azure Security Center - Get Alert - Extended Property",
extended_properties,
removeNull=True,
),
}
final_output.append(extended_table_entry)
# Entities Table
entities = properties.get("entities")
if entities:
if isinstance(entities, dict):
entities_table_output = list()
for entity in entities:
entities_table_output.append(
{
"Content": ast.literal_eval(str(entity)),
"Type": entity["type"],
}
)
md = tableToMarkdown(
"Azure Security Center - Get Alert - Entity",
entities_table_output,
removeNull=True,
)
entities_table_entry = {
"Type": entryTypes["note"],
"Contents": alert.get("properties").get("entities"),
"ContentsFormat": formats["json"],
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": md,
}
final_output.append(entities_table_entry)
demisto.results(final_output)
def list_alerts_command(client: MsClient, args: dict):
"""Getting all alerts
Args:
client:
args (dict): usually demisto.args()
"""
resource_group_name = args.get("resource_group_name")
asc_location = args.get("asc_location")
filter_query = args.get("filter")
select_query = args.get("select")
expand_query = args.get("expand")
alerts = client.list_alerts(
resource_group_name, asc_location, filter_query, select_query, expand_query
).get("value")
outputs = list()
for alert in alerts:
properties = alert.get("properties")
if properties:
outputs.append(
{
"DisplayName": properties.get("alertDisplayName"),
"CompromisedEntity": properties.get("compromisedEntity"),
"DetectedTime": properties.get("detectedTimeUtc"),
"ReportedSeverity": properties.get("reportedSeverity"),
"State": properties.get("state"),
"ActionTaken": properties.get("actionTaken"),
"Description": properties.get("description"),
"ID": alert.get("name"),
}
)
md = tableToMarkdown(
"Azure Security Center - List Alerts",
outputs,
[
"DisplayName",
"CompromisedEntity",
"DetectedTime",
"ReportedSeverity",
"State",
"ActionTaken",
"Description",
"ID",
],
removeNull=True,
)
ec = {"AzureSecurityCenter.Alert(val.ID && val.ID === obj.ID)": outputs}
return md, ec, alerts
# There's a Microsoft API bug for reactivate alert -
# https://social.msdn.microsoft.com/Forums/windows/en-US/c2139e1b-b26c-4264-a558-fa4b180b70e7/issue-while-setting-security-alert-state-from-dismiss-to-active?forum=AzureSecurityCenter
def update_alert_command(client: MsClient, args: dict):
"""Update given alert
Args:
client: MsClient
args (dict): usually demisto.args()
"""
resource_group_name = args.get("resource_group_name")
asc_location = args.get("asc_location")
alert_id = args.get("alert_id")
alert_update_action_type = args.get("alert_update_action_type")
client.update_alert(resource_group_name, asc_location, alert_id, alert_update_action_type)
outputs = {"ID": alert_id, "ActionTaken": alert_update_action_type}
ec = {"AzureSecurityCenter.Alert(val.ID && val.ID === obj.ID)": outputs}
return f"Alert - {alert_id} has been set to {alert_update_action_type}.", ec, None
""" Alert End """
""" Location Start """
def list_locations_command(client: MsClient):
"""Getting all locations
"""
locations = client.list_locations().get("value")
outputs = list()
if locations:
for location in locations:
if location.get("properties") and location.get("properties").get(
"homeRegionName"
):
home_region_name = location.get("properties").get("homeRegionName")
else:
home_region_name = None
outputs.append(
{
"HomeRegionName": home_region_name,
"Name": location.get("name"),
"ID": location.get("id"),
}
)
md = tableToMarkdown(
"Azure Security Center - List Locations",
outputs,
["HomeRegionName", "Name", "ID"],
removeNull=True,
)
ec = {"AzureSecurityCenter.Location(val.ID && val.ID === obj.ID)": outputs}
return md, ec, locations
else:
return "No locations found", None, None
""" Location End """
""" Advanced Threat Protection Start """
def update_atp_command(client: MsClient, args: dict):
"""Updating given Advanced Threat Protection (enable/disable)
Args:
client:
args (dict): usually demisto.args()
"""
resource_group_name = args.get("resource_group_name")
setting_name = args.get("setting_name")
is_enabled = args.get("is_enabled")
storage_account = args.get("storage_account")
response = client.update_atp(resource_group_name, storage_account, setting_name, is_enabled)
outputs = {
"ID": response.get("id"),
"Name": response.get("name"),
"IsEnabled": response.get("properties").get("is_enabled"),
}
md = tableToMarkdown(
"Azure Security Center - Update Advanced Threat Detection Setting",
outputs,
["ID", "Name", "IsEnabled"],
removeNull=True,
)
ec = {
"AzureSecurityCenter.AdvancedThreatProtection(val.ID && val.ID === obj.ID)": outputs
}
return md, ec, response
def get_atp_command(client: MsClient, args: dict):
"""Get given Advanced Threat Protection settings
Args:
client:
args (dict): usually demisto.args()
"""
resource_group_name = args.get("resource_group_name")
setting_name = args.get("setting_name")
storage_account = args.get("storage_account")
response = client.get_atp(resource_group_name, storage_account, setting_name)
outputs = {
"ID": response.get("id"),
"Name": response.get("name"),
"IsEnabled": response["properties"]["isEnabled"]
if response.get("properties") and response.get("properties").get("isEnabled")
else None,
}
md = tableToMarkdown(
"Azure Security Center - Get Advanced Threat Detection Setting",
outputs,
["ID", "Name", "IsEnabled"],
removeNull=True,
)
ec = {
"AzureSecurityCenter.AdvancedThreatProtection(val.ID && val.ID === obj.ID)": outputs
}
return md, ec, response
""" Advanced Threat Protection End """
""" Auto Provisioning Settings Start """
def update_aps_command(client: MsClient, args: dict):
"""Updating Analytics Platform System
Args:
client:
args (dict): usually demisto.args()
"""
setting_name = args.get("setting_name")
auto_provision = args.get("auto_provision")
setting = client.update_aps(setting_name, auto_provision)
outputs = [
{
"Name": setting.get("name"),
"AutoProvision": setting["properties"]["auto_provision"]
if setting.get("properties") and setting.get("properties").get("auto_provision")
else None,
"ID": setting.get("id"),
}
]
md = tableToMarkdown(
"Azure Security Center - Update Auto Provisioning Setting",
outputs,
["Name", "AutoProvision", "ID"],
removeNull=True,
)
ec = {
"AzureSecurityCenter.AutoProvisioningSetting(val.ID && val.ID === obj.ID)": outputs
}
return md, ec, setting
def list_aps_command(client: MsClient):
"""List all Analytics Platform System
"""
settings = client.list_aps().get("value")
outputs = []
for setting in settings:
outputs.append(
{
"Name": setting.get("name"),
"AutoProvision": setting.get("properties").get("autoProvision")
if setting.get("properties") and setting.get("properties").get("autoProvision") else None,
"ID": setting.get("id"),
}
)
md = tableToMarkdown(
"Azure Security Center - List Auto Provisioning Settings",
outputs,
["Name", "AutoProvision", "ID"],
removeNull=True,
)
ec = {
"AzureSecurityCenter.AutoProvisioningSetting(val.ID && val.ID === obj.ID)": outputs
}
return md, ec, settings
def get_aps_command(client: MsClient, args: dict):
"""Get given Analytics Platform System setting
Args:
client:
args (dict): usually demisto.args()
"""
setting_name = args.get("setting_name")
setting = client.get_aps(setting_name)
outputs = [
{
"Name": setting.get("name"),
"AutoProvision": setting.get("properties").get("autoProvision")
if setting.get("properties") and setting.get("properties").get("autoProvision") else None,
"ID": setting["id"],
}
]
md = tableToMarkdown(
"Azure Security Center - Get Auto Provisioning Setting",
outputs,
["Name", "AutoProvision", "ID"],
removeNull=True,
)
ec = {
"AzureSecurityCenter.AutoProvisioningSetting(val.ID && val.ID === obj.ID)": outputs
}
return md, ec, setting
""" Auto Provisioning Settings End """
""" Information Protection Policies Start """
# Unsupported command. issue: issues/24583
def list_ipp_command(client: MsClient, args: dict):
"""Listing all Internet Presence Provider
Args:
client:
args (dict): usually demisto.args()
"""
management_group = args.get("management_group")
policies = client.list_ipp(management_group).get("value")
outputs = list()
if policies:
for policy in policies:
if policy.get("properties") and policy.get("properties").get("labels"):
label_names = ", ".join(
[
label.get("displayName")
for label in policy["properties"]["labels"].values()
]
)
information_type_names = ", ".join(
[
it["displayName"]
for it in policy["properties"]["informationTypes"].values()
]
)
else:
label_names, information_type_names = '', ''
outputs.append(
{
"Name": policy.get("name"),
"Labels": label_names,
"InformationTypeNames": information_type_names,
"InformationTypes": policy.get("properties").get("informationTypes")
if policy.get("properties") and policy.get("properties").get("informationTypes") else None,
"ID": policy["id"],
}
)
md = tableToMarkdown(
"Azure Security Center - List Information Protection Policies",
outputs,
["Name", "Labels", "InformationTypeNames", "ID"],
removeNull=True,
)
ec = {
"AzureSecurityCenter.InformationProtectionPolicy(val.ID && val.ID === obj.ID)": outputs
}
entry = {
"Type": entryTypes["note"],
"Contents": policies,
"ContentsFormat": formats["json"],
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": md,
"EntryContext": ec,
}
demisto.results(entry)
else:
demisto.results("No policies found")
# Unsupported command. issue: issues/24583
def get_ipp_command(client: MsClient, args: dict):
"""Getting Internet Presence Provider information
Args:
client:
args (dict): usually demisto.args()
"""
policy_name = args.get("policy_name")
management_group = args.get("management_group")
policy = client.get_ipp(policy_name, management_group)
properties = policy.get("properties")
labels = properties.get("labels")
if properties and isinstance(labels, dict):
# Basic Property table
labels = ", ".join(
[
(str(label.get("displayName")) + str(label.get("enabled")))
for label in labels.values()
]
)
basic_table_output = [
{"Name": policy.get("name"), "Labels": labels, "ID": policy.get("id")}
]
md = tableToMarkdown(
"Azure Security Center - Get Information Protection Policy - Basic Property",
basic_table_output,
["Name", "Labels", "ID"],
removeNull=True,
)
ec = {
"AzureSecurityCenter.InformationProtectionPolicy(val.ID && val.ID === obj.ID)": basic_table_output
}
basic_table_entry = {
"Type": entryTypes["note"],
"Contents": policy,
"ContentsFormat": formats["json"],
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": md,
"EntryContext": ec,
}
# Information Type table
info_type_table_output = list()
for information_type_data in properties.get("informationTypes").values():
keywords = ", ".join(
[(str(keyword.get("displayName")) + str(keyword.get("custom")) + str(keyword.get("canBeNumeric")))
for keyword in information_type_data.get("keywords", [])])
info_type_table_output.append(
{
"DisplayName": information_type_data.get("displayname"),
"Enabled": information_type_data("enabled"),
"Custom": information_type_data("custom"),
"Keywords": keywords,
"RecommendedLabelID": information_type_data("recommendedLabelId"),
}
)
md = tableToMarkdown(
"Azure Security Center - Get Information Protection Policy - Information Types",
info_type_table_output,
["DisplayName", "Enabled", "Custom", "Keywords", "RecommendedLabelID"],
removeNull=True,
)
info_type_table_entry = {
"Type": entryTypes["note"],
"Contents": properties.get("informationTypes"),
"ContentsFormat": formats["json"],
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": md,
}
demisto.results([basic_table_entry, info_type_table_entry])
else:
demisto.results("No properties found in {}".format(management_group))
""" Information Protection Policies End """
""" Jit Network Access Policies Start """
def list_jit_command(client: MsClient, args: dict):
"""Lists all Just-in-time Virtual Machines
Args:
client:
args (dict): usually demisto.args()
"""
asc_location = args.get("asc_location")
resource_group_name = args.get("resource_group_name")
policies = client.list_jit(asc_location, resource_group_name)["value"]
outputs = []
for policy in policies:
# summarize rules in (VMName: allowPort,...) format
if policy.get("properties") and policy.get("properties").get("virtualMachines"):
rules_data = policy["properties"]["virtualMachines"]
rules_summary_array = []
for rule in rules_data:
ID = rule.get("id")
if isinstance(ID, str):
vm_name = ID.split("/")[-1]
else:
vm_name = None # type: ignore
vm_ports = [str(port.get("number")) for port in rule.get("ports")]
rules_summary_array.append(
"({}: {})".format(vm_name, ", ".join(vm_ports))
)
rules = ", ".join(rules_summary_array)
outputs.append(
{
"Name": policy.get("name"),
"Rules": rules,
"Location": policy.get("location"),
"Kind": policy.get("kind"),
"ID": policy.get("id"),
}
)
md = tableToMarkdown(
"Azure Security Center - List JIT Access Policies",
outputs,
["Name", "Rules", "Location", "Kind"],
removeNull=True,
)
ec = {"AzureSecurityCenter.JITPolicy(val.ID && val.ID === obj.ID)": outputs}
return md, ec, policies
# Unsupported command. issue: issues/24583
def get_jit_command(client: MsClient, args: dict):
"""Getting given Just-in-time machine
Args:
client:
args (dict): usually demisto.args()
"""
policy_name = args.get("policy_name")
asc_location = args.get("asc_location")
resource_group_name = args.get("resource_group_name")
policy = client.get_jit(policy_name, asc_location, resource_group_name)
# Property table
property_table_output = [
{
"Name": policy.get("name"),
"Kind": policy.get("kind"),
"ProvisioningState": policy.get("properties").get("provisioningState")
if policy.get("properties") and policy.get("properties", {}).get("provisioningState") else None,
"Location": policy.get("location"),
"Rules": policy.get("properties").get("virtualMachines")
if policy.get("properties") and policy.get("properties", {}).get("virtualMachines") else None,
"Requests": policy.get("properties").get("requests")
if policy.get("properties") and policy.get("properties", {}).get("requests")
else None,
"ID": policy.get("id"),
}
]
md = tableToMarkdown(
"Azure Security Center - Get JIT Access Policy - Properties",
property_table_output,
["Name", "Kind", "ProvisioningState", "Location", "ID"],
removeNull=True,
)
ec = {
"AzureSecurityCenter.JITPolicy(val.ID && val.ID === obj.ID)": property_table_output
}
property_table_entry = {
"Type": entryTypes["note"],
"Contents": policy,
"ContentsFormat": formats["json"],
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": md,
"EntryContext": ec,
}
# Rules table
rules_table_output = list()
properties = policy.get("properties")
virtual_machines = properties.get("virtualMachines")
if isinstance(properties, dict) and virtual_machines:
for rule in virtual_machines:
rules_table_output.append(
{
"VmID": rule.get("id"),
"Ports": format_jit_port_rule(rule.get("ports")),
}
)
md = tableToMarkdown(
"Azure Security Center - Get JIT Access Policy - Rules",
rules_table_output,
["VmID", "Ports"],
removeNull=True,
)
rules_table_entry = {
"Type": entryTypes["note"],
"Contents": properties.get("virtualMachines"),
"ContentsFormat": formats["json"],
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": md,
}
# Requests table
requests_table_output = list()
for requestData in properties.get("requests", []):
vms = list()
for vm in requestData.get("virtualMachines"):
vm_name = vm["id"].split("/")[-1]
vm_ports = format_jit_port_request(vm.get("ports"))
vms.append("[{}: {}]".format(vm_name, vm_ports))
requests_table_output.append(
{
"VirtualMachines": ", ".join(vms),
"Requestor": requestData.get("requestor")
if requestData.get("requestor")
else "service-account",
"StartTimeUtc": requestData.get("startTimeUtc"),
}
)
md = tableToMarkdown(
"Azure Security Center - Get JIT Access Policy - Requests",
requests_table_output,
["VirtualMachines", "Requestor", "StartTimeUtc"],
removeNull=True,
)
requests_table_entry = {
"Type": entryTypes["note"],
"Contents": properties.get("requests"),
"ContentsFormat": formats["json"],
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": md,
}
demisto.results([property_table_entry, rules_table_entry, requests_table_entry])
# Unsupported command. issue: issues/24583
def initiate_jit_command(client: MsClient, args: dict):
resource_group_name = args.get("resource_group_name")
asc_location = args.get("asc_location")
policy_name = args.get("policy_name")
vm_id = args.get("vmID")
port = args.get("port")
source_address = args.get("source_address")
duration = args.get("duration")
response = client.initiate_jit(
resource_group_name,
asc_location,
policy_name,
vm_id,
port,
source_address,
duration,
)
policy_id = f"/subscriptions/{client.subscription_id}/resourceGroups/{resource_group_name}/providers/" \
f"Microsoft.Security/locations/{asc_location}/jitNetworkAccessPolicies/{policy_name}"
virtual_machines = response.get("virtualMachines")
if virtual_machines and len(virtual_machines) > 0:
machine = virtual_machines[0]
port = machine.get("ports")[0]
outputs = {
"VmID": machine.get("id"),
"PortNum": port.get("number"),
"AllowedSourceAddress": port.get("allowedSourceAddressPrefix"),
"EndTimeUtc": port.get("endTimeUtc"),
"Status": port.get("status"),
"Requestor": response.get("requestor"),
"PolicyID": policy_id,
}
md = tableToMarkdown(
"Azure Security Center - Initiate JIT Access Request",
outputs,
[
"VmID",
"PortNum",
"AllowedSourceAddress",
"EndTimeUtc",
"Status",
"Requestor",
],
removeNull=True,
)
ec = {
"AzureSecurityCenter.JITPolicy(val.ID && val.ID ="
"== obj.{}).Initiate(val.endTimeUtc === obj.EndTimeUtc)".format(
policy_id
): outputs
}
demisto.results(
{
"Type": entryTypes["note"],
"Contents": response,
"ContentsFormat": formats["json"],
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": md,
"EntryContext": ec,
}
)
# Unsupported command. issue: issues/24583
def delete_jit_command(client: MsClient, args: dict):
"""Deletes a Just-in-time machine
Args:
client:
args (dict): usually demisto.args()
"""
asc_location = args.get("asc_location")
resource_group_name = args.get("resource_group_name")
policy_name = args.get("policy_name")
client.delete_jit(asc_location, resource_group_name, policy_name)
policy_id = f"/subscriptions/{client.subscription_id}/resourceGroups/{resource_group_name}/providers/" \
f"Microsoft.Security/locations/{asc_location}/jitNetworkAccessPolicies/{policy_name}"
outputs = {"ID": policy_id, "Action": "deleted"}
ec = {"AzureSecurityCenter.JITPolicy(val.ID && val.ID === obj.ID)": outputs}
demisto.results(
{
"Type": entryTypes["note"],
"Contents": "Policy - {} has been deleted sucessfully.".format(policy_name),
"ContentsFormat": formats["text"],
"EntryContext": ec,
}
)
""" Jit Network Access Policies End """
""" Storage Start """
# Add this command to security center integration because ATP-related command requires storage account info
def list_sc_storage_command(client: MsClient):
"""Listing all Security Center Storages
"""
accounts = client.list_sc_storage().get("value")
outputs = list()
for account in accounts:
account_id_array = account.get("id", str()).split("/")
resource_group_name = account_id_array[account_id_array.index("resourceGroups") + 1]
outputs.append(
{
"Name": account.get("name"),
"ResourceGroupName": resource_group_name,
"Location": account.get("location"),
"ID": account.get("id"),
}
)
md = tableToMarkdown(
"Azure Security Center - List Storage Accounts",
outputs,
["Name", "ResourceGroupName", "Location"],
removeNull=True,
)
ec = {"AzureSecurityCenter.Storage(val.ID && val.ID === obj.ID)": outputs}
return md, ec, accounts
""" Storage End """
""" Subscriptions Start """
def list_sc_subscriptions_command(client: MsClient):
"""Listing Subscriptions for this application
"""
subscriptions = client.list_sc_subscriptions().get("value")
outputs = list()
for sub in subscriptions:
outputs.append(
{
"Name": sub.get("displayName"),
"State": sub.get("state"),
"ID": sub.get("id"),
}
)
md = tableToMarkdown(
"Azure Security Center - Subscriptions",
outputs,
["ID", "Name", "State"],
removeNull=True,
)
ec = {"Azure.Subscription(val.ID && val.ID === obj.ID)": outputs}
return md, ec, subscriptions
""" Subscriptions end """
""" Secure Score Start"""
def get_secure_scores_command(client: MsClient, args: dict):
secure_score_name = args.get("secure_score_name", "ascScore")
securescore = client.get_secure_scores(secure_score_name)
md = tableToMarkdown(
"Azure Security Center - Secure Score",
securescore['properties']
)
ec = {"Azure.Securescore(val.ID && val.ID === obj.ID)": securescore['properties']}
return md, ec, securescore
""" Secure Scores End"""
def test_module(client: MsClient):
"""
Performs basic GET request to check if the API is reachable and authentication is successful.
Returns ok if successful.
"""
if client.subscription_id:
client.list_locations()
else:
client.list_sc_subscriptions()
demisto.results('ok')
def main():
params: dict = demisto.params()
server = params.get('server_url', '').rstrip('/') + '/'
tenant = params.get('tenant_id')
auth_and_token_url = params.get('auth_id', '')
enc_key = params.get('enc_key')
use_ssl = not params.get('unsecure', False)
self_deployed: bool = params.get('self_deployed', False)
proxy = params.get('proxy', False)
subscription_id = demisto.args().get("subscription_id") or params.get("default_sub_id")
ok_codes = (200, 201, 202, 204)
try:
if demisto.command() in SUB_ID_REQUIRING_CMD and not subscription_id:
raise DemistoException("A subscription ID must be provided.")
client = MsClient(tenant_id=tenant, auth_id=auth_and_token_url, enc_key=enc_key, app_name=APP_NAME, proxy=proxy,
server=server, verify=use_ssl, self_deployed=self_deployed, subscription_id=subscription_id,
ok_codes=ok_codes)
if demisto.command() == "test-module":
# If the command will fail, error will be thrown from the request itself
test_module(client)
elif demisto.command() == "azure-sc-get-alert":
get_alert_command(client, demisto.args())
elif demisto.command() == "azure-sc-list-alert":
return_outputs(*list_alerts_command(client, demisto.args()))
elif demisto.command() == "azure-sc-update-alert":
return_outputs(*update_alert_command(client, demisto.args()))
elif demisto.command() == "azure-sc-list-location":
return_outputs(*list_locations_command(client))
elif demisto.command() == "azure-sc-update-atp":
return_outputs(*update_atp_command(client, demisto.args()))
elif demisto.command() == "azure-sc-get-atp":
return_outputs(*get_atp_command(client, demisto.args()))
elif demisto.command() == "azure-sc-update-aps":
return_outputs(*update_aps_command(client, demisto.args()))
elif demisto.command() == "azure-sc-list-aps":
return_outputs(*list_aps_command(client))
elif demisto.command() == "azure-sc-get-aps":
return_outputs(*get_aps_command(client, demisto.args()))
elif demisto.command() == "azure-sc-list-ipp":
list_ipp_command(client, demisto.args())
elif demisto.command() == "azure-sc-get-ipp":
get_ipp_command(client, demisto.args())
elif demisto.command() == "azure-sc-list-jit":
return_outputs(*list_jit_command(client, demisto.args()))
elif demisto.command() == "azure-sc-get-jit":
get_jit_command(client, demisto.args())
elif demisto.command() == "azure-sc-initiate-jit":
initiate_jit_command(client, demisto.args())
elif demisto.command() == "azure-sc-delete-jit":
delete_jit_command(client, demisto.args())
elif demisto.command() == "azure-sc-list-storage":
return_outputs(*list_sc_storage_command(client))
elif demisto.command() == "azure-list-subscriptions":
return_outputs(*list_sc_subscriptions_command(client))
elif demisto.command() == "azure-get-secure-score":
return_outputs(*get_secure_scores_command(client, demisto.args()))
except Exception as err:
LOG(str(err))
LOG.print_log()
return_error(str(err))
from MicrosoftApiModule import * # noqa: E402
if __name__ in ['__main__', 'builtin', 'builtins']:
main()
|
# Copyright (c) 2020 Club Raiders Project
# https://github.com/HausReport/ClubRaiders
#
# SPDX-License-Identifier: BSD-3-Clause
import logging
import multiprocessing
import os
import tempfile
import traceback
from datetime import date, timedelta
from datetime import datetime
import pause
import requests
from craid.eddb.loader import DataProducer
from craid.eddb.loader.MakeKeyFiles import loadKeys
from craid.eddb.loader.strategy.AWSLoader import LoadDataFromAWS
from craid.eddb.util.Singleton import Singleton
from craid.eddb.util.dataUpdate.CheckEddbFiles import eddbUpdateReadyForTemp
from craid.eddb.util.dataUpdate.CheckEddbFiles import oldestLocalEddbFile
from craid.eddb.util.dataUpdate.MakeHistoryFile import appendTodaysData, cleanHistoryFile, copyIntoSource
from craid.eddb.util.dataUpdate.MakeSmolFiles import deleteOldFiles, munchFile, unDeleteOldFiles
from craid.eddb.util.dataUpdate.UploadToAmazon import uploadToAWSFromTemp
class DailyUpdate(object, metaclass=Singleton):
_instance = None
OKEY_DOKEY = 0
ERROR_UPLOADING_TO_AWS = 7
ERROR_UPDATING_HISTORY_FILE = 6
ERROR_MAKING_KEY_FILES = 5
ERROR_GETTING_DATA_ARRAYS = 4
ERROR_DELETING_FILES = 3
ERROR_CHECKING_TIMES = 2
NOT_ALL_UPDATES_READY = 1
lock = multiprocessing.Lock()
# def __new__(cls):
# if cls._instance is None:
# print('-----------> CREATING THE SINGLETON <-----------------')
# cls._instance = super(DailyUpdate, cls).__new__(cls)
# logging.basicConfig(
# format='DMN - %(asctime)s %(levelname)-8s %(message)s',
# level=logging.INFO,
# datefmt='%Y-%m-%d %H:%M:%S')
# logging.info("Creating dailyupdate singleton")
# # Put any initialization here.
# else:
# logging.info("Reusing dailyupdate singleton")
# return cls._instance
def run(self,key=None, reg=None, buck_key=None ):
if key is not None:
#logging.info(f"Setting key to ${key}")
os.environ['AWS_ACCESS_KEY_ID'] = key
else:
logging.info("Key is none")
if reg is not None:
#logging.info(f"Setting reg to ${reg}")
os.environ['AWS_DEFAULT_REGION'] = reg
else:
logging.info("Reg is none")
if buck_key is not None:
#logging.info(f"Setting sec to ${buck_key}")
os.environ['AWS_SECRET_ACCESS_KEY'] = buck_key
else:
logging.info("buck_key is none")
with DailyUpdate.lock:
print('-----------> RUNNING THE SINGLETON: LOCK ACQUIRED <-----------------')
logging.basicConfig(
format='DMN - %(asctime)s %(levelname)-8s %(message)s',
level=logging.INFO,
datefmt='%Y-%m-%d %H:%M:%S')
logging.info("Creating dailyupdate singleton")
logging.info("Pausing 1 minute(s) for startup...")
#pause.minutes(1) # FIXME: uncomment for production
while True:
today = date.today()
lastnight = datetime.combine(today, datetime.min.time())
tonight = lastnight + timedelta(days=1, hours=2)
twoDaysAgo = lastnight + timedelta(days=-2, minutes=5)
dt: datetime = oldestLocalEddbFile()
retries = 0
if dt < lastnight:
force = False
# case of one or more missing files
if dt < twoDaysAgo:
force = True
logging.info("Detected old or missing datafile.")
returnValue = -1
while returnValue != 0:
# HERE for lock.acquire()
# DailyUpdate.lock.acquire()
# logging.info("Acquired lock.")
returnValue = self.runUpdate(forceDownload=force)
# logging.info("Releasing lock.")
# DailyUpdate.lock.release()
# HERE for lock.release()
if returnValue == DailyUpdate.OKEY_DOKEY:
logging.info("Successfully updated files.")
else:
if returnValue == DailyUpdate.NOT_ALL_UPDATES_READY:
logging.info("Not all updates are ready.")
elif returnValue == DailyUpdate.ERROR_UPLOADING_TO_AWS:
logging.error(f"Error {returnValue} uploading to AWS.")
elif returnValue == DailyUpdate.ERROR_UPDATING_HISTORY_FILE:
logging.error(f"Error {returnValue} updating history file.")
elif returnValue == DailyUpdate.ERROR_MAKING_KEY_FILES:
logging.error(f"Error {returnValue} making key files.")
elif returnValue == DailyUpdate.ERROR_GETTING_DATA_ARRAYS:
logging.error(f"Error {returnValue} getting data arrays.")
elif returnValue == DailyUpdate.ERROR_DELETING_FILES:
logging.error(f"Error {returnValue} deleting files.")
elif returnValue == DailyUpdate.ERROR_CHECKING_TIMES:
logging.error(f"Error {returnValue} checking times.")
else:
logging.error(f"Error with unknown return value {returnValue}")
retries = retries + 1
if retries > 12:
# give up for the day
logging.warning("12 retries - giving up for the day.")
retries = 0
force = True
pause.until(tonight)
else:
logging.info("Pausing 30 minutes before retrying.")
pause.minutes(30)
logging.info("Restarting all dynos.")
self.restartAllDynos() # This should kill off this process when running in production
else:
logging.info("Detected no old or missing datafiles.")
logging.info("Pausing until midnight.")
pause.until(tonight)
def runUpdate(self, forceDownload=False) -> int:
#
# Check if EDDB's data is newer than ours
#
if forceDownload:
logging.info("Forcing download - old or missing files.")
else:
try:
allUpdatesReady = eddbUpdateReadyForTemp()
if not allUpdatesReady:
return DailyUpdate.NOT_ALL_UPDATES_READY
except Exception as e:
traceback.print_exc()
logging.error(str(e))
return DailyUpdate.ERROR_CHECKING_TIMES
#
# Get rid of old files
# NOTE: make copies and fall back to them in case of error?
#
try:
deleteOldFiles()
except Exception as e:
traceback.print_exc()
logging.error(str(e))
unDeleteOldFiles()
return DailyUpdate.ERROR_DELETING_FILES
#
# Make key files from new large data files
#
try:
DataProducer.getDataArrays(writeKeyFiles=True, useEddb=True)
except Exception as e:
traceback.print_exc()
logging.error(str(e))
unDeleteOldFiles() # NOTE: new
return DailyUpdate.ERROR_GETTING_DATA_ARRAYS
#
# Make smol-.gz files from keys+large data files
#
try:
club_faction_keys = loadKeys("factions-of-interest-keys")
munchFile(club_faction_keys, 'factions.jsonl')
club_system_keys = loadKeys('club-system-keys')
munchFile(club_system_keys, 'systems_populated.jsonl')
club_station_keys = loadKeys("club-station-keys")
munchFile(club_station_keys, 'stations.jsonl')
except Exception as e:
traceback.print_exc()
logging.error(str(e))
unDeleteOldFiles() # NOTE: new
return DailyUpdate.ERROR_MAKING_KEY_FILES
#
# Get history from AWS, update & clean it
#
try:
loader: LoadDataFromAWS = LoadDataFromAWS(forceWebDownload=True, useSmol=False)
tmpDir = tempfile.gettempdir()
fName = loader.download_file("history.jsonl", tmpDir) # .gz is added in the function
appendTodaysData(fName)
cleanHistoryFile(fName)
copyIntoSource(fName) # FIXME: Not sure about this on production server
except Exception as e:
traceback.print_exc()
logging.error(str(e))
# ?????????????? unDeleteOldFiles() # NOTE: not sure about this
return DailyUpdate.ERROR_UPDATING_HISTORY_FILE
#
# TODO: Test files for validity here
#
# factions should be more than 30 rows long
# factioninstances should be more than 30 rows long
# club factions should be more than 30 rows long
# systems should be more than 30 rows long
# if necessary
# unDeleteOldFiles() #NOTE: new
#
# Upload to AWS
#
try:
sNames = ['smol-factions.jsonl.gz',
'smol-systems_populated.jsonl.gz',
'smol-stations.jsonl.gz',
'history.jsonl.gz']
shortName: str
for shortName in sNames:
retVal = uploadToAWSFromTemp(shortName)
logging.info(f"Uploading {shortName} to AWS. Status is {retVal}")
except Exception as e:
traceback.print_exc()
logging.error(str(e))
return DailyUpdate.ERROR_UPLOADING_TO_AWS
#
# FIXME: check new files into github
#
return DailyUpdate.OKEY_DOKEY
def restartAllDynos(self):
app_name = os.getenv('HEROKU_APP_NAME')
uname = os.getenv('HEROKU_CLI_USER')
tok = os.getenv('HEROKU_CLI_TOKEN')
# print(app_name)
# print(uname)
# print(tok)
auth = (uname, tok)
url = f"https://api.heroku.com/apps/{app_name}/dynos"
logging.info(str(url))
headers = {"Content-Type": "application/json",
"Accept" : "application/vnd.heroku+json; version=3"}
req = requests.delete(url=url, auth=auth, headers=headers)
logging.info(str(req))
logging.info(req.content)
if __name__ == '__main__':
#
# Fire up logger
#
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().level = logging.INFO
key = os.getenv('AWS_ACCESS_KEY_ID')
reg = os.getenv('AWS_DEFAULT_REGION')
buck_key = os.getenv('AWS_SECRET_ACCESS_KEY')
dup = DailyUpdate()
ret = dup.run(key=key, reg=reg, buck_key=buck_key)
exit(0)
#ret = dup.runUpdate()
#exit(ret)
|
# coding: utf-8
def smoothing(dictionary, window_size, factor, to_smooth, significance_level=0.05, sigma_factor=1, bootstrap_reps=0, verbose=0):
"""
Calculate kernel smoothed averages for sliding windows
"""
from collections import defaultdict
import numpy as np
scaff_dict = defaultdict(list)
for key in dictionary['SNPids']:
# print key
# print dictionary['SNPids'][key]
scaff = dictionary['SNPids'][key]['chrom'].split("\t")[0]
# print scaff
if not scaff_dict.has_key(scaff):
scaff_dict[scaff]=[key]
else:
scaff_dict[scaff].append(key)
# print scaff_dict
print "\n### Kernel smoothing ###\nFactor: %s\nStatistic: %s\nWindow size: %s bp" %(factor, to_smooth, window_size, )
if bootstrap_reps:
print "determining p-value based on %s bootstrap replicates" %bootstrap_reps
for sc in sorted(scaff_dict.keys()):
if verbose:
print "processing %s" %sc
if sc:# == 'scaffold_12':# == 'scaffold_197':# == 'scaffold_1': #1246':
# print scaff_dict[sc]
for SNPID in scaff_dict[sc]: #sc]:
# print "current SNP: %s" %dictionary['SNPids'][SNPID]['chrom']
center = int(dictionary['SNPids'][SNPID]['chrom'].split("\t")[1]) #THIS IS THE POSITION OF THE WINDOW CENTER
##calculating per window average
ran = [center-window_size*sigma_factor, center+window_size*sigma_factor] #THIS IS THE WINDOW RANGE
# print ran
window_observations = []
window_locations = []
for ind in scaff_dict[sc]:
# print dictionary['SNPids'][ind]['chrom'].split("\t")[1]
if int(dictionary['SNPids'][ind]['chrom'].split("\t")[1]) >= ran[0] and int(dictionary['SNPids'][ind]['chrom'].split("\t")[1]) <= ran[1]:
# print "ok: %s" %ind
# print dictionary['global'][factor][ind]['avg_rank']
window_observations.append(dictionary['global'][factor][ind][to_smooth])
window_locations.append(dictionary['SNPids'][ind]['chrom'].split("\t")[1])
w_average = weighted_average(center=center, observations=window_observations, locations=window_locations, window=window_size)
dictionary['global'][factor][SNPID][to_smooth+'_smoothed'] = w_average
# print "%s\t%s" %(dictionary['SNPids'][SNPID]['chrom'].split("\t")[0], center)
# print window_observations
# print w_average
if bootstrap_reps:
count=0
for i in range(bootstrap_reps):
bootstrap_observations = []
rand_scf_ids = []
rand_scf_ids = np.random.choice(dictionary['global'][factor].keys(), len(window_locations), replace=True)
# print "random SNP ids: "+str(rand_scf_ids)
for r in rand_scf_ids:
bootstrap_observations.append(dictionary['global'][factor][r][to_smooth])
# print "bootstrapped observations: "+str(bootstrap_observations)
bootstrap_w_average = weighted_average(center=center, observations=bootstrap_observations, locations=window_locations, window=window_size)
# print "bootstrap weighted average: %s vs %s" %(bootstrap_w_average, w_average)
if bootstrap_w_average >= w_average:
count+=1
# print "bootstrap weighted average: %s vs %s" %(bootstrap_w_average, w_average)
p_value=float(count)/bootstrap_reps
if verbose and p_value <= significance_level:
print "%s\t%s\t%s - p = %s" %(dictionary['SNPids'][SNPID]['chrom'].split("\t")[0], center, w_average, float(count)/bootstrap_reps)
dictionary['global'][factor][SNPID][to_smooth+'_smoothed_p'] = p_value
def write_stats(dictionary, out_columns, factor, prefix):
print "Wrting stats to %s" %(prefix+'_'+factor+'.txt')
OUT = open(prefix+'_'+factor+'.txt','w')
OUT.write("chrom\tbp\tSNPID\t"+"\t".join(out_columns)+'\n')
for SNPid in dictionary['global'][factor].keys():
temp_list = []
outstring = dictionary['SNPids'][SNPid]['chrom']+'\t'
for column in out_columns:
# print column
# print glob[fac][SNPid][column]
temp_list.append(str(dictionary['global'][factor][SNPid][column]))
outstring += "\t".join(temp_list)
OUT.write(outstring+'\n')
OUT.close()
def weighted_average(center, observations, locations, window):
"""calculates a Gaussian Kernel average"""
import numpy as np
observations_weighted = []
weights=[]
for i in range(len(locations)):
weight = np.exp((-1*(int(locations[i])-center)**2)/(2*window)**2)
weights.append(weight)
observations_weighted.append(observations[i]*weight)
# print "weighted average: "+str(np.sum(observations_weighted)/(np.sum(weights)))
return np.sum(observations_weighted)/(np.sum(weights))
# In[ ]:
def normalize (csv, norm_prefix="", normalize=True, boxplot=False, boxplots_prefix=""):
"""
The function parses a csv file and outputs normalized values
and boxplots if desired
"""
from collections import defaultdict
import numpy as np
import subprocess
if normalize and not norm_prefix:
raise IOError("You have to specify a prefix for the output files containing the normalized data - use 'norm_prefix='")
if boxplot and not boxplots_prefix:
raise IOError("You have to specify a prefix for the boxplot files to be produced - use 'boxplots_prefix='")
populations = []
columns = {}
indices = defaultdict(list)
normalized = defaultdict(list)
IDs = []
pops = []
INFILE = open(csv, 'r')
headers = INFILE.readline().strip().split(",")
# print headers
headers.pop(0)
IDs = sorted(headers)
for env in headers:
columns[env] = []
for line in INFILE:
line = line.strip()
temp=line.split(",")
populations.append(temp.pop(0))
# print population
for i in range(len(temp)):
# print temp[i]
# print "%i:%s\n" %(i, header[i])
# if not temp[i] == 'NA':
try:
columns[headers[i]].append(float(temp[i]))
except ValueError:
columns[headers[i]].append(temp[i])
# print columns
pops = list(sorted(set(populations)))
# print pops
#find indexes for each
for pop in pops:
# print "finding indices for %s" %pop
for i in range(len(populations)):
if pop == populations[i]:
# print i
indices[pop].append(i)
# print indices
# print "\nCalculating means\n"
for env in headers:
per_pop = {}
temp_per_env_list = []
for value in columns[env]:
if not value == 'NA':
temp_per_env_list.append(value)
# print "global: %s\n" %(temp_per_env_list)
for pop in pops:
# print pop
# print "%s - should be %i" %(env, len(indices[pop]))
per_pop_list = []
for i in indices[pop]:
if not columns[env][i] == 'NA':
per_pop_list.append(columns[env][i])
# print per_pop_list
# print "%s mean: %s" %(pop, np.mean(per_pop_list))
# print "%s mean: %s" %(env, np.mean(columns[env]))
# print "%s sd: %s" %(env, np.std(columns[env]))
per_pop[pop] = per_pop_list
# norm = (np.mean(per_pop_list) - np.mean(columns[env])) / np.std(columns[env])
# print "%s: %s\n" %(pop, per_pop_list)
norm = (np.mean(per_pop_list) - np.mean(temp_per_env_list)) / np.std(temp_per_env_list)
# print norm
normalized[env].append(norm)
if boxplot:
print "Creating boxplot for %s\n" %env
Rscript = boxplots_prefix+env+'.R'
FH = open(Rscript, 'w')
for pop in pops:
# print per_pop[pop]
vector = ""
for v in per_pop[pop]:
vector+=repr(v)+','
FH.write(pop+' <- c(%s)\n' %vector[:-1])
FH.write("svg(filename = '"+boxplots_prefix+env+".svg')\n") #svg(filename = '$prefix-top-$cutoff.svg')
FH.write("boxplot(%s, names = c('%s'), main = '%s')\n" %(", ".join(pops), "', '".join(pops), env))
FH.write("dev.off()\n")
FH.close()
c = subprocess.Popen(['Rscript', Rscript], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(output,err) = c.communicate()
if err:
print err
# else:
# print output
# print normalized
if normalize:
print "\nnormalizing %s environmental factors across %s populations\nwriting to:\n\t%s.bayenv\n\t%s.csv" %(len(IDs), len(pops), norm_prefix, norm_prefix)
OUTCSV = open(norm_prefix+'.csv', 'w')
OUTCSV.write(",%s\n" %",".join(pops))
OUTBAYENV = open(norm_prefix+'.bayenv',"w")
for env in sorted(columns.keys()):
outstring = ""
# IDs.append(env)
for n in normalized[env]:
outstring += repr(n)+"\t"
# print outstring
OUTBAYENV.write(outstring+"\n")
OUTCSV.write("%s,%s\n" %(env, outstring.replace('\t',',')[:-1]))
OUTBAYENV.close()
OUTCSV.close()
return pops, IDs
# In[ ]:
def split_for_Bayenv(infile, out_prefix):
"""
This function takes a bayenv formatted multi-SNP file,
splits it up into separate files (one SNP per file).
"""
SNPcount = 0
temp = []
IN = open(infile, 'r')
for line in IN:
line = line.strip()
temp.append(line)
SNPcount+=1
if (SNPcount % 2 == 0):
OUT = open('%s-%07d.txt' %(out_prefix, SNPcount/2), 'w') #out_prefix+'-'+str(SNPcount/2)+'.txt', 'w')
# print "%s\t\n" %"\n".join(temp)
OUT.write("%s\t\n" %"\t\n".join(temp))
OUT.close()
temp = []
# In[ ]:
def calculate_rank_stats(SNP_map, infiles, ids, prefix): #these options are currently not implemented, threshold = 0.01, window = 50e3, sigma_factor = 3, bootstrap_rep = 100):
"""
The function will calculate rank statistics (averages, standard deviations, etc)
across a number of Bayenv replicates
"""
#import
from collections import defaultdict
import os
import numpy as np
#define global variables
global_dict = defaultdict(dict)
glob = defaultdict(dict) #holds all information for the SNPs
reps = []
extremes = {}
rep_count = 0
return_dict = {}
#read in SNP_map
SNPmap = open(SNP_map, 'r')
i=0
for line in SNPmap:
global_dict[i]['chrom'] = line.strip()
i += 1
print "Total number of SNPs (according to the SNPmap): %i" %len(global_dict)
#assess Bayenv files
print "Number of Bayenv replicates: %i" %len(infiles)
#assess environmental factors
print "Number of environmental factors analysed: %i" %len(ids)
#display settings
# print "Sliding window settings:"
# print "\tWindow size: %i bp" %window
# print "\tSigma: %s" %sigma_factor
#start processing
print "parsing bayenv files"
for bayenv_file in sorted(infiles):
print "\nprocessing replicate %i:\n%s" %(rep_count, bayenv_file)
reps.append(bayenv_file)
fh = open(bayenv_file,'r')
j=0 #this variable will hold the index of the current SNP
sorting = defaultdict(dict) #This dictionary will contain the rank sorted SNPs
for bf in fh:
for factor_index in range(len(ids)):
# print j
factor = ids[factor_index]
# print "processing factor %s" %factor
if not glob.has_key(factor):
glob[factor] = defaultdict(dict)
if not sorting[factor].has_key(float(bf.split('\t')[1+factor_index])):
sorting[factor][float(bf.split('\t')[1+factor_index])]=[j]
else:
sorting[factor][float(bf.split('\t')[1+factor_index])].append(j)
j += 1
fh.close()
for factor in sorting.keys(): #do sorting and add rank information to global dictionary
# print factor
rank = 0
for r in sorted(sorting[factor].keys()):
## print "rank: %i" %rank
## print "bf: %f" %r
## print sorting[r]
## print len(sorting[r])
for SNP in sorting[factor][r]:
## print SNP
if not glob[factor].has_key(SNP):
glob[factor][SNP]['ranks'] = [rank]
else:
glob[factor][SNP]['ranks'].append(rank)
rank += len(sorting[factor][r])
rep_count += 1
output_columns = ['avg_rank', 'med_rank', 'p20_rank', 'std_rank', 'var_rank', 'mad_rank', 'avg_rank_rel', 'med_rank_rel', 'p20_rank_rel','var_rank_rel', 'var_rank_weight', 'var_weighted_avg_rank', 'var_weighted_rel_avg_rank']
print "\nSUMMARY:\n"
for fac in sorted(glob.keys()):
# print "%s: %i" %(fac, len(glob[fac]))
extremes[fac] = defaultdict(int)
variances = []
# for SNPid in glob[fac].keys()[0:10]:
# print "%s: %s" %(SNPid, glob[fac][SNPid]['ranks'])
for SNPid in glob[fac].keys():
# print glob[fac][SNPid]['ranks']
# print absolute_deviation_from_median(data=glob[fac][SNPid]['ranks'])
# print find_max(absolute_deviation_from_median(data=glob[fac][SNPid]['ranks']))
for maxi in find_max(absolute_deviation_from_median(data=glob[fac][SNPid]['ranks'])):
extremes[fac][maxi] += 1
# print extremes
glob[fac][SNPid]['avg_rank'] = np.mean(glob[fac][SNPid]['ranks'])
glob[fac][SNPid]['med_rank'] = np.median(glob[fac][SNPid]['ranks'])
glob[fac][SNPid]['p20_rank'] = np.percentile(glob[fac][SNPid]['ranks'], 20)
glob[fac][SNPid]['std_rank'] = np.std(glob[fac][SNPid]['ranks'])
glob[fac][SNPid]['var_rank'] = np.var(glob[fac][SNPid]['ranks'])
glob[fac][SNPid]['mad_rank'] = mad(data=glob[fac][SNPid]['ranks'])
variances.append(glob[fac][SNPid]['var_rank'])
glob[fac][SNPid]['avg_rank_rel'] = np.mean(glob[fac][SNPid]['ranks'])/len(glob[fac])
glob[fac][SNPid]['med_rank_rel'] = np.median(glob[fac][SNPid]['ranks'])/len(glob[fac])
glob[fac][SNPid]['p20_rank_rel'] = np.percentile(glob[fac][SNPid]['ranks'], 20)/len(glob[fac])
#find maximum variance for the current factor
max_var = max(variances)
print "Wrting stats to %s" %(prefix+'_'+fac+'.txt')
OUT = open(prefix+'_'+fac+'.txt','w')
OUT.write("chrom\tbp\tSNPID\t"+"\t".join(output_columns)+'\n')
for SNPid in glob[fac].keys(): #calculate relative and weighted variances
glob[fac][SNPid]['var_rank_rel'] = glob[fac][SNPid]['var_rank'] / max_var
glob[fac][SNPid]['var_rank_weight'] = 1-glob[fac][SNPid]['var_rank_rel']
glob[fac][SNPid]['var_weighted_avg_rank'] = glob[fac][SNPid]['var_rank_weight'] * glob[fac][SNPid]['avg_rank']
glob[fac][SNPid]['var_weighted_rel_avg_rank'] = glob[fac][SNPid]['var_rank_weight'] * glob[fac][SNPid]['avg_rank_rel']
temp_list = []
# outstring = str(SNPid)+','
outstring = global_dict[SNPid]['chrom']+'\t'
for column in output_columns:
# print column
# print glob[fac][SNPid][column]
temp_list.append(str(glob[fac][SNPid][column]))
outstring += "\t".join(temp_list)
OUT.write(outstring+'\n')
OUT.close()
counts = [extremes[fac][i] for i in sorted(extremes[fac])]
perc = [float(counts[i])/len(glob[fac])*100 for i in range(len(counts))]
ex = find_max([extremes[fac][i] for i in sorted(extremes[fac])])
print "factor %s\treplicate %s gave the most extreme ranks for %.2f %% of the SNPs" %(fac, ex[0], perc[ex[0]])
return_dict['global'] = glob
return_dict['extremes'] = extremes
return_dict['SNPids'] = global_dict
return return_dict
# In[ ]:
def mad(data):
"""
find the 'median absolute deviation'
"""
import numpy as np
return np.median(np.abs(data - np.median(data)))
# In[ ]:
def absolute_deviation_from_median(data):
"""
find the absolute deviations from median for a list of values
"""
import numpy as np
mads = []
med = np.median(data)
for d in data:
mads.append(np.abs(d-med))
return mads
# In[ ]:
def find_max(data):
"""
find the index of the maximum value in a list
"""
from collections import defaultdict
d = defaultdict(list)
for i, x in enumerate(data):
d[x].append(i)
k = max(d.keys())
return d[k]
# In[ ]:
def plot_pope(files_list, cutoff, num_replicates):
"""
The function calls a shell script that configures and runs an
R script to plot pope plots and extract the top SNPs
"""
import subprocess
for ID in sorted(files_list):
print "processing %s:" %ID[0]
print "data in file: %s" %ID[1]
c = subprocess.Popen(['sh','plot_pope.sh',ID[1],str(cutoff),ID[0],str(num_replicates)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(output,err) = c.communicate()
if err:
print err
else:
print output
# __Define the function__ that excludes the most extreme replicate.
# In[ ]:
def exclude_extreme_rep(dictionary, ids, prefix, cutoff=0):
"""
Calculate rank statistics while excluding the one most extreme replicate for every factor
"""
from collections import defaultdict
import numpy as np
output_columns = ['avg_rank', 'med_rank', 'std_rank', 'var_rank', 'mad_rank', 'avg_rank_rel', 'med_rank_rel', 'var_rank_rel', 'var_rank_weight', 'var_weighted_avg_rank', 'var_weighted_rel_avg_rank']
glob = defaultdict(dict)
for fac in sorted(ids):
counts = [dictionary['extremes'][fac][i] for i in sorted(dictionary['extremes'][fac])]
perc = [float(counts[i])/len(dictionary['global'][fac])*100 for i in range(len(counts))]
ex = find_max([dictionary['extremes'][fac][i] for i in sorted(dictionary['extremes'][fac])])
print "\nfactor %s\treplicate %s gave the most extreme ranks for %.2f %% of the SNPs" %(fac, ex[0], perc[ex[0]])
if perc[ex[0]] > cutoff*100:
print "will re-calculate stats without replicate %s" %ex[0]
else:
print "none of the replicates exceeds the %s threshold" %cutoff
continue
glob[fac] = defaultdict(dict)
variances = []
for SNPid in dictionary['global'][fac].keys():
# print dictionary['global'][fac][SNPid]['ranks']
temp_list = dictionary['global'][fac][SNPid]['ranks'][:]
temp_list.pop(ex[0])
# print temp_list
glob[fac][SNPid]['avg_rank'] = np.mean(temp_list)
glob[fac][SNPid]['med_rank'] = np.median(temp_list)
glob[fac][SNPid]['std_rank'] = np.std(temp_list)
glob[fac][SNPid]['var_rank'] = np.var(temp_list)
glob[fac][SNPid]['mad_rank'] = mad(data=temp_list)
variances.append(glob[fac][SNPid]['var_rank'])
glob[fac][SNPid]['avg_rank_rel'] = np.mean(temp_list)/len(dictionary['global'][fac])
glob[fac][SNPid]['med_rank_rel'] = np.median(temp_list)/len(dictionary['global'][fac])
#find maximum variance for the current factor
max_var = max(variances)
print "Wrting stats to %s" %(prefix+'_'+fac+'_ex_rep_'+str(ex[0])+'.txt')
OUT = open(prefix+'_'+fac+'_ex_rep_'+str(ex[0])+'.txt','w')
OUT.write("chrom\tbp\tSNPID\t"+"\t".join(output_columns)+'\n')
for SNPid in glob[fac].keys(): #calculate relative and weighted variances
glob[fac][SNPid]['var_rank_rel'] = glob[fac][SNPid]['var_rank'] / max_var
glob[fac][SNPid]['var_rank_weight'] = 1-glob[fac][SNPid]['var_rank_rel']
glob[fac][SNPid]['var_weighted_avg_rank'] = glob[fac][SNPid]['var_rank_weight'] * glob[fac][SNPid]['avg_rank']
glob[fac][SNPid]['var_weighted_rel_avg_rank'] = glob[fac][SNPid]['var_rank_weight'] * glob[fac][SNPid]['avg_rank_rel']
temp_list = []
# outstring = str(SNPid)+','
outstring = dictionary['SNPids'][SNPid]['chrom']+'\t'
for column in output_columns:
# print column
# print glob[fac][SNPid][column]
temp_list.append(str(glob[fac][SNPid][column]))
outstring += "\t".join(temp_list)
OUT.write(outstring+'\n')
OUT.close()
# In[ ]:
def parse_gff(gff):
"""
parse gff file
"""
gff_dict = {}
gff_fh = open(gff,'r')
for line in [line.strip() for line in gff_fh]:
# print line.split('\t')
if line.split('\t')[2] == 'CDS':
gene = line.split('\t')[8].split(' ')[3].replace('"','').replace(';','') #This line needs to be adujsted to the gff format
if not gff_dict.has_key(line.split('\t')[0]):
gff_dict[line.split('\t')[0]] = {}
gff_dict[line.split('\t')[0]][line.split('\t')[3]] = gene
gff_dict[line.split('\t')[0]][line.split('\t')[4]] = gene
return gff_dict
# In[ ]:
def find_genes(rank_stats, gff, distance):
"""
find genes up and downstream of SNPs
"""
from collections import defaultdict
candidates = defaultdict(dict)
return_dict = defaultdict(dict)
for tsv in rank_stats:
print "processing rank statistic file: %s" %tsv
ID = tsv.split('/')[-1].replace('.tsv','')
candidates[ID] = defaultdict(list)
rank_stats_fh = open(tsv, 'r')
rank_stats_fh.readline()
for SNP in [SNP.strip() for SNP in rank_stats_fh]:
rank_elem = SNP.split('\t')
if not candidates[ID].has_key(rank_elem[0]):
candidates[ID][rank_elem[0]] = []
candidates[ID][rank_elem[0]].append(rank_elem[1:3])
# print candidates[tsv][rank_elem[0]]
for tsv in sorted(candidates.keys()):
print "%s:" %tsv
for chrom in sorted(candidates[tsv]):
# print chrom
for hot in candidates[tsv][chrom]:
gene_list = []
temp = []
nr_genes = []
lower = int(hot[0])-(distance*1000)
upper = int(hot[0])+(distance*1000)
# print "looking at %s" %hot[0]
if not gff.has_key(chrom):
# print "no genes found on %s\n" %chrom
continue
else:
for pos in gff[chrom].keys():
temp.append(int(pos))
for pos in sorted(temp):
if pos >= lower and pos <= upper:
# print pos,gff[chrom][str(pos)]
gene_list.append(gff[chrom][str(pos)])
elif pos > upper:
break
nr_genes = list(set(gene_list))
for unique_gene in nr_genes:
# print [chrom,hot[0],hot[1],unique_gene]
if not return_dict[tsv].has_key('genes'):
return_dict[tsv]['columns'] = ['chrom','bp','ID','gene']
return_dict[tsv]['genes'] = []
return_dict[tsv]['genes'].append([chrom,hot[0],hot[1],unique_gene])
if not return_dict.has_key(tsv):
return_dict[tsv]['genes'] = []
return_dict[tsv]['columns'] = ['chrom','bp','ID','gene']
print "identified %i gene(s)" %len(return_dict[tsv]['genes'])
return return_dict
# In[ ]:
def annotate_genes(SNPs_to_genes, annotations, whitelist=[]):
"""
fetch annotation for genes from file produced by Blast2GO
"""
from collections import defaultdict
annotation = defaultdict(list)
if whitelist:
for id in whitelist:
if not SNPs_to_genes.has_key(id):
raise IOError("You provide an analysis id %s that is not in the dictionary" %id)
else:
whitelist = SNPs_to_genes.keys()[:]
anno_fh = open(annotations, 'r')
header = anno_fh.readline().strip().split('\t')
annotation['header'] = header[1:]
annotation['genes'] = defaultdict(list)
for line in [line.strip() for line in anno_fh]:
annotation['genes'][line.split('\t')[0]] = line.split('\t')[1:]
# for gene in annotation['genes'].keys()[:10]:
# print gene,annotation['genes'][gene]
for analysis_id in whitelist:
print analysis_id
if len(SNPs_to_genes[analysis_id]['genes']) > 0:
print "adding annoation for %s" %analysis_id
for index in range(len(SNPs_to_genes[analysis_id]['genes'])):
if annotation['genes'].has_key(SNPs_to_genes[analysis_id]['genes'][index][-1]):
if len(SNPs_to_genes[analysis_id]['columns']) == 4:
# print SNPs_to_genes[analysis_id]['columns']
# print "extend the headers"
SNPs_to_genes[analysis_id]['columns'].extend(annotation['header'])
# print SNPs_to_genes[analysis_id]['columns']
# print annotation['genes'][SNPs_to_genes[analysis_id]['genes'][index][-1]]
SNPs_to_genes[analysis_id]['genes'][index].extend(annotation['genes'][SNPs_to_genes[analysis_id]['genes'][index][-1]])
elif len(SNPs_to_genes[analysis_id]['genes'][index]) == 4 and not annotation['genes'].has_key(SNPs_to_genes[analysis_id]['genes'][index][-1]):
print "no annoation found for %s" %SNPs_to_genes[analysis_id]['genes'][index][-1]
else:
print "nothing to annotate - 0 candidate genes identified for %s" %analysis_id
# In[ ]:
def write_candidates(SNPs_to_genes, whitelist=[], rename=[], out_dir='./'):
"""
write out SNP to candidate genes text files (will be named *.genes.tsv)
"""
if rename:
if not len(rename) == len(whitelist):
raise IOError("If you provide a list with new names it needs to be the same length as the whitelist")
if whitelist:
for id in whitelist:
if not SNPs_to_genes.has_key(id):
raise IOError("You provide an analysis id %s that is not in the dictionary" %id)
else:
whitelist = SNPs_to_genes.keys()[:]
for id in sorted(whitelist):
print id
if not len(SNPs_to_genes[id]['genes']) >= 1:
print "0 candidate genes found"
continue
else:
if len(SNPs_to_genes[id]['columns']) == 4:
print "writing to: %s" %(out_dir+id+'.genes.tsv')
out_fh = open(out_dir+id+'.genes.tsv','w')
else:
print "writing to: %s" %(out_dir+id+'.genes.annotated.tsv')
out_fh = open(out_dir+id+'.genes.annotated.tsv','w')
out_fh.write("%s\n" %"\t".join(SNPs_to_genes[id]['columns']))
for gene in SNPs_to_genes[id]['genes']:
out_fh.write("%s\n" %"\t".join(gene))
out_fh.close()
# In[ ]:
|
# Copyright (c) 2019 Andrey Valyaev <dron.valyaev@gmail.com>
#
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from django.test import TestCase
class IndexTest(TestCase):
def testRedirectToControllers(self):
response = self.client.get('/')
self.assertRedirects(response, '/controllers/')
|
# Signal processing setup
SR = 16000
FFT_HOP = 256
FFT_SIZE = 512
N_MELS = 96
# Machine learning setup
BATCH_SIZE = 1 # size of the batch during prediction
# Output labels
MTT_LABELS = ['guitar', 'classical', 'slow', 'techno', 'strings', 'drums', 'electronic', 'rock', 'fast', 'piano', 'ambient', 'beat', 'violin', 'vocal', 'synth', 'female', 'indian', 'opera', 'male', 'singing', 'vocals', 'no vocals', 'harpsichord', 'loud', 'quiet', 'flute', 'woman', 'male vocal', 'no vocal', 'pop', 'soft', 'sitar', 'solo', 'man', 'classic', 'choir', 'voice', 'new age', 'dance', 'male voice', 'female vocal', 'beats', 'harp', 'cello', 'no voice', 'weird', 'country', 'metal', 'female voice', 'choral']
MSD_LABELS = ['rock','pop','alternative','indie','electronic','female vocalists','dance','00s','alternative rock','jazz','beautiful','metal','chillout','male vocalists','classic rock','soul','indie rock','Mellow','electronica','80s','folk','90s','chill','instrumental','punk','oldies','blues','hard rock','ambient','acoustic','experimental','female vocalist','guitar','Hip-Hop','70s','party','country','easy listening','sexy','catchy','funk','electro','heavy metal','Progressive rock','60s','rnb','indie pop','sad','House','happy']
MTT_LABELS_DICT_IDX = {'genre':[1,3,6,7,16,17,29,34,37,38,46,47],
'instrument':[0,4,5,9,12,14,22,25,31,35,42,43,49],
'mood': [2,8,23,24,30,45],
'style':[10,32],
'rhythm':[11,41],
'vocal':[13,15,18,19,20,21,26,27,28,33,36,39,40,44,48]
}
MSD_LABELS_DICT_IDX = {'genre':[0,1, 2, 3, 4, 6,8,9,11,14,15,16,18,20,24,26,27,33,36,40,41,42,43,45,46,48],
'instrument':[32],
'mood': [12,17,22,35,37,38,47,49],
'style':[23,28,29,30],
'vocal':[5,13,31],
'eras':[7,19,21,25,34,44],
'other':[39]
}
MTT_LABELS_DICT_INV = {'genre':['classical','techno','electronic','rock','indian','opera','pop','classic','new age','dance','country','metal'],
'instrument':['guitar','strings','drums','piano','violin','synth','harpsichord','flute','sitar','choir','harp','cello','choral'],
'mood': ['slow','fast','loud','quiet','soft','weird'],
'style': ['ambient','solo'],
'rhythm':['beat','beats'],
'vocal':['vocal','female','male','singing','vocals','no vocals','woman','male vocal','no vocal','man','voice','male voice','female vocal','no voice','female voice']
}
MSD_LABELS_DICT_INV = {'genre':['rock','pop','alternative','indie','electronic','dance','alternative rock','jazz','beautiful','metal','classic rock','soul','indie rock','electronica','folk','punk','blues','hard rock','Hip-Hop','country','funk','electro','heavy metal','Progressive rock','rnb','indie pop','House'],
'instrument':['guitar'],
'mood': ['chillout','Mellow','chill','party','easy listening','sexy','sad','happy'],
'style':['instrumental','ambient','acoustic','experimental'],
'vocal':['female vocalists','male vocalists','female vocalist'],
'eras':['00s','80s','90s','oldies','70s','60s'],
'other':['catchy']
}
MTT_LABELS_DICT = {'guitar':'instrument',
'classical':'genre',
'slow':'mood',
'techno':'genre',
'strings':'instrument',
'drums':'instrument',
'electronic':'genre',
'rock':'genre',
'fast':'mood',
'piano':'instrument',
'ambient':'style',
'beat':'rhythm',
'violin':'instrument',
'vocal':'vocal',
'synth':'instrument',
'female':'vocal',
'indian':'genre',
'opera':'genre',
'male': 'vocal',
'singing':'vocal',
'vocals':'vocal',
'no vocals': 'vocal',
'harpsichord':'instrument',
'loud': 'mood',
'quiet':'mood',
'flute':'instrument',
'woman': 'vocal',
'male vocal': 'vocal',
'no vocal':'vocal',
'pop': 'genre',
'soft': 'mood',
'sitar': 'instrument',
'solo': 'style',
'man':'vocal',
'classic':'genre',
'choir': 'instrument',
'voice':'vocal',
'new age': 'genre',
'dance': 'genre',
'male voice': 'vocal',
'female vocal': 'vocal',
'beats': 'rhythm',
'harp': 'instrument',
'cello': 'instrument',
'no voice': 'voice',
'weird': 'mood',
'country': 'genre',
'metal': 'genre',
'female voice': 'vocal',
'choral': 'instrument'}
MSD_LABELS_DICT = {'rock':'genre',
'pop':'genre',
'alternative': 'genre',
'indie':'genre',
'electronic': 'genre',
'female vocalists': 'vocal',
'dance': 'genre',
'00s': 'eras',
'alternative rock': 'genre',
'jazz': 'genre',
'beautiful':'mood',
'metal': 'genre',
'chillout': 'mood',
'male vocalists': 'vocal',
'classic rock':'genre',
'soul': 'genre',
'indie rock': 'genre',
'Mellow': 'mood',
'electronica': 'genre',
'80s': 'eras',
'folk': 'genre',
'90s': 'eras',
'chill': 'mood',
'instrumental': 'style',
'punk': 'genre',
'oldies': 'eras',
'blues': 'genre',
'hard rock': 'genre',
'ambient': 'style',
'acoustic': 'style',
'experimental': 'style',
'female vocalist': 'vocal',
'guitar': 'instrument',
'Hip-Hop': 'genre',
'70s': 'eras',
'party': 'mood',
'country': 'genre',
'easy listening': 'mood',
'sexy': 'mood',
'catchy':'other',
'funk': 'genre',
'electro':'genre',
'heavy metal': 'genre',
'Progressive rock': 'genre',
'60s': 'eras',
'rnb':'genre',
'indie pop': 'genre',
'sad': 'mood',
'House':'genre',
'happy': 'mood'} |
# Time Complexity: O(n) as there are 2n-1 function calls
# Space Complexity: O(log n)
# As we are dividing the recursive call range into half => depth of recursion = 1 + logn(base 2)
def binary_sum(S, start, stop):
if start >= stop: # zero element in the slice
return 0
elif start == stop -1: # one element in the slice
return S[start]
else:
mid = (start + stop) // 2
return binary_sum(S, start, mid) + binary_sum(S, mid, stop) # first half includes sum till mid -1
if __name__ == "__main__":
s = list(range(1, 11))
print(binary_sum(s, 0, len(s))) |
from common.protocolmeta import protocols
from itertools import islice
def first(iterable):
try:
return islice(iterable, 1).next()
except StopIteration:
return None
def im_service_compatible(to_service, from_service):
'''
Returns True if a buddy on to_service can be IMed from a connection to from_service.
'''
return to_service in protocols[from_service].compatible
def choose_to(metacontact):
return metacontact.first_online
def choose_from(contact, accounts, tofrom):
'''
Given a contact, returns the best connected account to IM it from.
Checks the to/from history.
Returns an account object, or None.
'''
# First, check the to/from history.
acct = lookup_tofrom_account(contact, accounts, tofrom)
if acct is not None:
return acct
# If no to/from history exists for this contact, but one of
# the connected accounts is its owner, return that account.
for account in accounts:
if account.connection is contact.protocol:
return account
# No connected accounts actually owned the Contact. Now just
# find a *compatible* connected account.
return first(compatible_im_accounts(contact, accounts))
def lookup_tofrom_account(contact, connected_accounts, tofrom):
'''
Searches the to/from IM list for an entry matching contact, where
the from account matching the entry must be in connected_accounts.
Returns a matching from account, or None.
'''
name, service = contact.name, contact.service
# Loop through each entry in the tofrom list
for bname, bservice, fromname, fromservice in tofrom:
# If the buddy matches,
if name == bname and service == bservice:
for acct in connected_accounts:
# and the from account matches, return it.
if acct.name == fromname and acct.service == fromservice:
return acct
def compatible_im_accounts(contact, accounts):
'''
Given a Contact and a sequence of Accounts, yields the accounts
which can send IMs to the Contact.
'''
for account in accounts:
to_service = contact.service
from_service = account.protocol
if im_service_compatible(to_service, from_service):
yield account
|
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
def add_layer(inputs, in_size, out_size, activation_function=None):
Weights = tf.Variable(tf.random_normal([in_size, out_size]), 'W')
biases = tf.Variable(tf.zeros([1, out_size]) + 0.1)
Wx_plus_b = tf.matmul(inputs, Weights) + biases
if activation_function is None:
outputs = Wx_plus_b
else:
outputs = activation_function(Wx_plus_b)
return outputs
x_data = np.linspace(-1, 1, 300)[:, np.newaxis];
# print(np.linspace(-1, 1, 10))
# print(np.linspace(-1, 1, 10)[:,np.newaxis])
noise = np.random.normal(0, 0.05, x_data.shape)
y_data = np.square(x_data) + noise
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.scatter(x_data, y_data)
# plt.plot(x_data, y_data , linestyle='', marker='.')
plt.ion() # 不暂停
plt.show()
xs = tf.placeholder(np.float32, [None, 1], 'x_input')
ys = tf.placeholder(np.float32, [None, 1], 'y_input')
l1 = add_layer(xs, 1, 20, activation_function=tf.nn.relu)
prediction = add_layer(l1, 20, 1, activation_function=None)
loss = tf.reduce_mean(tf.reduce_sum(tf.square(ys - prediction), reduction_indices=[1]))
train_step = tf.train.MomentumOptimizer(0.05,0.1).minimize(loss)
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
feed = {xs: x_data, ys: y_data}
for step in range(3000):
sess.run(train_step, feed_dict=feed)
print(step, sess.run(loss, feed_dict=feed))
if step % 20 == 0:
try:
ax.lines.remove(lines[0])
except Exception:
pass
prediction_val = sess.run(prediction, feed_dict=feed)
lines = ax.plot(x_data, prediction_val, 'r-', lw=5)
plt.pause(0.1)
plt.show(block=True) |
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtGui import QIcon, QPainter, QBrush, QPen
from PyQt5.QtCore import pyqtSlot, Qt
from gui.custom_button import CustomButton
from common import *
NEED_UPDATE_AMOUNT_X = 2
NEED_UPDATE_AMOUNT_Y = 2
class XYPadPanel(QtWidgets.QGroupBox):
def __init__(self, parent):
super(XYPadPanel, self).__init__()
self.parent = parent
innerLayout = QtWidgets.QGridLayout()
self.padFrame = XYPad(self)
innerLayout.addWidget(self.padFrame)
self.setLayout(innerLayout)
def getX(self):
return self.padFrame.x
def getY(self):
return self.padFrame.y
def getXscaled(self):
return self.padFrame.x * 180 / self.padFrame.width
def getYscaled(self):
return self.padFrame.y * 180 / self.padFrame.height
class XYPad(QtWidgets.QFrame):
def __init__(self, parent):
super(XYPad, self).__init__()
self.height = 200
self.width = 200
self.parent = parent
self.pressed = False
self.lastUpdatedXScaled = 0
self.lastUpdatedYScaled = 0
self.x = 90
self.y = 90
self.xScaled = self.x * 180 / self.width
self.yScaled = self.y * 180 / self.height
self.setFixedSize(self.width, self.height)
def mouseMoveEvent(self, event):
self.updatePressedLocation(event)
def mousePressEvent(self, event):
print("Mouse Pressed")
self.updatePressedLocation(event)
self.pressed = True
def mouseReleaseEvent(self, event):
print("Mouse Released")
self.pressed = False
def paintEvent(self, event):
painter = QPainter(self)
painter.setPen(QPen(Qt.black, 8, Qt.SolidLine))
painter.drawRect(0, 0, self.width, self.height)
painter.setBrush(QBrush(Qt.red, Qt.SolidPattern))
painter.drawEllipse(self.x-2, self.y-2, 4, 4)
def updatePressedLocation(self, event):
x = event.x()
y = event.y()
if (x >= 0 and x < self.width and y >= 0 and y < self.height):
xScaled = x * 180 / self.width
yScaled = y * 180 / self.height
if (abs(self.lastUpdatedXScaled - xScaled) > NEED_UPDATE_AMOUNT_X):
self.parent.parent.clientHandle.updateX(xScaled)
self.lastUpdatedXScaled = xScaled
if (abs(self.lastUpdatedYScaled - yScaled) > NEED_UPDATE_AMOUNT_Y):
self.parent.parent.clientHandle.updateY(yScaled)
self.lastUpdatedYScaled = yScaled
self.xScaled = xScaled
self.yScaled = yScaled
self.x = x
self.y = y
self.printLocation()
self.repaint()
def printLocation(self):
print("Pad x %d, y %d (scaled x %d, y %d)" % (self.x, self.y, self.xScaled, self.yScaled))
|
import pytest
from teams import schema, models
class DummyInfo:
def __init__(self, context):
self.context = context
def test_mutate(rf, user_factory):
"""
If the requesting user is a staff user, a new team should be
created.
"""
mutation = schema.CreateTeam()
user = user_factory(is_staff=True)
request = rf.post("/")
request.user = user
year = 2018
result = mutation.mutate(DummyInfo(request), year=year)
team = models.Team.objects.get()
assert result.team == team
def test_mutate_non_staff(rf, user_factory):
"""
If the requesting user is not a staff user, an exception should be
raised.
"""
mutation = schema.CreateTeam()
user = user_factory()
request = rf.post("/")
request.user = user
with pytest.raises(Exception):
mutation.mutate(DummyInfo(request), year=2018)
assert not models.Team.objects.exists()
def test_mutate_non_unique(rf, team_factory, user_factory):
"""
Attempting to create a team for a year that already has a team
should throw an error.
"""
mutation = schema.CreateTeam()
user = user_factory(is_staff=True)
request = rf.post("/")
request.user = user
year = 2018
team_factory(year=year)
with pytest.raises(Exception):
mutation.mutate(DummyInfo(request), year=year)
assert models.Team.objects.count() == 1
|
#!/usr/bin/python3
#
# Legal Stuff:
#
# This file is part of the Pop Icon Theme and is free software; you can
# redistribute it and/or modify it under the terms of the GNU Lesser General
# Public License as published by the Free Software Foundation; version 3.
#
# This file is part of the Pop Icon Theme and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, see <https://www.gnu.org/licenses/lgpl-3.0.txt>
#
#
# Thanks to the GNOME icon developers for the original version of this script
import os
import sys
import xml.sax
import subprocess
import argparse
INKSCAPE = '/usr/bin/inkscape'
OPTIPNG = '/usr/bin/optipng'
MAINDIR = '../../Pop'
SOURCES = ('actions', 'apps', 'categories', 'devices', 'emblems', 'logos', 'mimetypes', 'places', 'preferences', 'status', 'stock')
# the resolution that non-hi-dpi icons are rendered at
DPI_1_TO_1 = 96
# DPI multipliers to render at
DPIS = [1, 2]
inkscape_process = None
def main(args, SRC):
def optimize_png(png_file):
if os.path.exists(OPTIPNG):
process = subprocess.Popen([OPTIPNG, '-quiet', '-o7', png_file])
process.wait()
def wait_for_prompt(process, command=None):
if command is not None:
process.stdin.write((command+'\n').encode('utf-8'))
# This is kinda ugly ...
# Wait for just a '>', or '\n>' if some other char appearead first
output = process.stdout.read(1)
if output == b'>':
return
output += process.stdout.read(1)
while output != b'\n>':
output += process.stdout.read(1)
output = output[1:]
def start_inkscape():
process = subprocess.Popen([INKSCAPE, '--shell'], bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
wait_for_prompt(process)
return process
def inkscape_render_rect(icon_file, rect, dpi, output_file):
global inkscape_process
if inkscape_process is None:
inkscape_process = start_inkscape()
cmd = [icon_file,
'--export-dpi', str(dpi),
'-i', rect,
'-e', output_file]
wait_for_prompt(inkscape_process, ' '.join(cmd))
optimize_png(output_file)
class ContentHandler(xml.sax.ContentHandler):
ROOT = 0
SVG = 1
LAYER = 2
OTHER = 3
TEXT = 4
def __init__(self, path, force=False, filter=None):
self.stack = [self.ROOT]
self.inside = [self.ROOT]
self.path = path
self.rects = []
self.state = self.ROOT
self.chars = ""
self.force = force
self.filter = filter
def endDocument(self):
pass
def startElement(self, name, attrs):
if self.inside[-1] == self.ROOT:
if name == "svg":
self.stack.append(self.SVG)
self.inside.append(self.SVG)
return
elif self.inside[-1] == self.SVG:
if (name == "g" and ('inkscape:groupmode' in attrs) and ('inkscape:label' in attrs)
and attrs['inkscape:groupmode'] == 'layer' and attrs['inkscape:label'].startswith('Baseplate')):
self.stack.append(self.LAYER)
self.inside.append(self.LAYER)
self.context = None
self.icon_name = None
self.rects = []
return
elif self.inside[-1] == self.LAYER:
if name == "text" and ('inkscape:label' in attrs) and attrs['inkscape:label'] == 'context':
self.stack.append(self.TEXT)
self.inside.append(self.TEXT)
self.text='context'
self.chars = ""
return
elif name == "text" and ('inkscape:label' in attrs) and attrs['inkscape:label'] == 'icon-name':
self.stack.append(self.TEXT)
self.inside.append(self.TEXT)
self.text='icon-name'
self.chars = ""
return
elif name == "rect":
self.rects.append(attrs)
self.stack.append(self.OTHER)
def endElement(self, name):
stacked = self.stack.pop()
if self.inside[-1] == stacked:
self.inside.pop()
if stacked == self.TEXT and self.text is not None:
assert self.text in ['context', 'icon-name']
if self.text == 'context':
self.context = self.chars
elif self.text == 'icon-name':
self.icon_name = self.chars
self.text = None
elif stacked == self.LAYER:
assert self.icon_name
assert self.context
if self.filter is not None and not self.icon_name in self.filter:
return
print (self.context, self.icon_name)
for rect in self.rects:
for dpi_factor in DPIS:
width = rect['width']
height = rect['height']
id = rect['id']
dpi = DPI_1_TO_1 * dpi_factor
size_str = "%sx%s" % (width, height)
if dpi_factor != 1:
size_str += "@%sx" % dpi_factor
dir = os.path.join(MAINDIR, size_str, self.context)
outfile = os.path.join(dir, self.icon_name+'.png')
if not os.path.exists(dir):
os.makedirs(dir)
# Do a time based check!
if self.force or not os.path.exists(outfile):
inkscape_render_rect(self.path, id, dpi, outfile)
sys.stdout.write('.')
else:
stat_in = os.stat(self.path)
stat_out = os.stat(outfile)
if stat_in.st_mtime > stat_out.st_mtime:
inkscape_render_rect(self.path, id, dpi, outfile)
sys.stdout.write('.')
else:
sys.stdout.write('-')
sys.stdout.flush()
sys.stdout.write('\n')
sys.stdout.flush()
def characters(self, chars):
self.chars += chars.strip()
if not args.svg:
if not os.path.exists(MAINDIR):
os.mkdir(MAINDIR)
print ('')
print ('Rendering from SVGs in', SRC)
print ('')
for file in os.listdir(SRC):
if file[-4:] == '.svg':
file = os.path.join(SRC, file)
handler = ContentHandler(file)
xml.sax.parse(open(file), handler)
print ('')
else:
file = os.path.join(SRC, args.svg + '.svg')
if os.path.exists(os.path.join(file)):
handler = ContentHandler(file, True, filter=args.filter)
xml.sax.parse(open(file), handler)
else:
# icon not in this directory, try the next one
pass
parser = argparse.ArgumentParser(description='Render icons from SVG to PNG')
parser.add_argument('svg', type=str, nargs='?', metavar='SVG',
help="Optional SVG names (without extensions) to render. If not given, render all icons")
parser.add_argument('filter', type=str, nargs='?', metavar='FILTER',
help="Optional filter for the SVG file")
args = parser.parse_args()
for source in SOURCES:
SRC = os.path.join('.', source)
main(args, SRC)
|
from django.test import TestCase
import numpy as np
import networkx as nx
from cinema.tests.data4tests import get_small_graph
from cinema.cinegraph.grapher import PersonNode
from cinema.cinegame import game_maker
class TestGameMaker(TestCase):
def setUp(self):
self.g = get_small_graph()
self.r = np.random.RandomState(42)
def tearDown(self):
pass
def test_make_game_by_iteration(self):
# select one third of the actors in the graph as candidates for start and end nodes
candidates = [node for node in self.g.nodes if node.is_person and (node.id % 3 == 0)]
# choose actors who have been in the same movie
a, b = game_maker.make_game_by_iteration(self.g, candidates, 1, r=self.r)
self.assertEqual(PersonNode(51), a)
self.assertEqual(PersonNode(532290), b)
self.assertEqual(2, nx.shortest_path_length(self.g, a, b))
a, b = game_maker.make_game_by_iteration(self.g, candidates, 1, r=self.r)
self.assertEqual(PersonNode(1512), a)
self.assertEqual(PersonNode(147), b)
self.assertEqual(2, nx.shortest_path_length(self.g, a, b))
# choose actors who are two movies apart
a, b = game_maker.make_game_by_iteration(self.g, candidates, 2, r=self.r)
self.assertEqual(PersonNode(171513), a)
self.assertEqual(PersonNode(1644), b)
self.assertEqual(4, nx.shortest_path_length(self.g, a, b))
a, b = game_maker.make_game_by_iteration(self.g, candidates, 2, r=self.r)
self.assertEqual(PersonNode(378), a)
self.assertEqual(PersonNode(1512), b)
self.assertEqual(4, nx.shortest_path_length(self.g, a, b))
a, b = game_maker.make_game_by_iteration(self.g, candidates, 2, r=self.r)
self.assertEqual(PersonNode(770961), a)
self.assertEqual(PersonNode(138), b)
self.assertEqual(4, nx.shortest_path_length(self.g, a, b))
# choose some actors that are three movies apart
a, b = game_maker.make_game_by_iteration(self.g, candidates, 3, r=self.r)
self.assertEqual(PersonNode(330687), a)
self.assertEqual(PersonNode(1605114), b)
self.assertEqual(6, nx.shortest_path_length(self.g, a, b))
a, b = game_maker.make_game_by_iteration(self.g, candidates, 3, r=self.r)
self.assertEqual(PersonNode(123), a)
self.assertEqual(PersonNode(861915), b)
self.assertEqual(6, nx.shortest_path_length(self.g, a, b))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.