text
stringlengths 8
6.05M
|
|---|
#encoding utf8
dp = []
dp[0] = 0
print(dp)
|
from setuptools import setup, find_packages
import os
setup(name='cyberhead',
version='1.0',
url='https://github.com/TheCyberHead',
license='MIT',
author='CyberHead LLC',
author_email='info@cyberhead.com',
entry_points={"console_scripts": ["cyberhead = cyberhead.wrapper:cli"]},
description='Modular Open Source Trading',
packages=find_packages(),
zip_safe=False,
)
|
a = '12345'
b = int(a)
print(b)
print(type(b))
c = int(a, base=8)
print(c)
d = int(a, base=6)
# print(d)
# 偏函数,构造一个新函数
import functools
int2 = functools.partial(int, base=2)
f = int2('1001')
print(f)
|
# Wind Turbine Allocation Game
# import libraries
import pygame as pg
import random
from settings import *
from sprites import *
from windspeed import *
import time
from os import path
###### Game Idea ########
# Regions
# - Implement 2-4 regions / on-shore and off-shore locations
# - Implement slots where wind turbines can be placed
# - Implement functionality as soon as wind turbine is placed, turbine interacts with
# wind in that region to generate power
# - Use simple collision and drag and drop system
# Turbines
# - Implement wind turbines in inventory
# - Begin with 5 turbines
# - Turbines have HP
# - More energy production leads to faster degradation of turbine
# - Have to implement placement of wind turbines
# Wind
# - Implement function that generates a random wind speed for each region
# - (Use Weibull distribution)
# - Update 5 seconds
# - Wind speed determines power/energy generation of wind turbines
# Energy
# - Calculate energy generated each second
# - Energy generation determined by number of turbines in region and wind speed
# Earth Satisfaction
# - Dependent on energy levels
# - Earth happy if energy generation rate > threshhold level
# - Timer resets everytime earth emotion changes
# - What should the threshold level be?
# Game Over condition
# - Initally: keep earth happy for certain period of time
# - Just went with generating certain amount of power
# Animations
# - Flowing wind turbines
# - Animated clouds
# Game class
class Game:
def __init__(self):
# initialize everything
pg.init()
pg.mixer.init()
self.screen = pg.display.set_mode((WIDTH, HEIGHT))
pg.display.set_caption('Wind Turbine Game')
self.clock = pg.time.Clock()
self.running = True
self.font_name = pg.font.match_font(FONT_NAME)
self.load_data()
def load_data(self):
self.dir = path.dirname('__file__')
self.img_dir = path.join(self.dir, 'img')
self.snd_dir = path.join(self.dir, 'snd')
# Sounds
pg.mixer.music.load(path.join(self.snd_dir, 'wind.wav'))
def new(self):
# New game
# Power generated
self.power = 0
self.win = False
# Sprite groups
self.all_sprites = pg.sprite.LayeredUpdates()
self.turbines = pg.sprite.Group()
self.slots = pg.sprite.Group()
self.regions = pg.sprite.Group()
self.earth = EarthEmotion(self, 450,450)
# Wind speed initial values
self.offshore_region_1_windspeed = 0
self.offshore_region_2_windspeed = 0
self.onshore_region_1_windspeed = 0
self.onshore_region_2_windspeed = 0
# Keep track of time
self.wind_timer = 0
self.power_timer = 0
self.img_timer = 0
self.start_time = pg.time.get_ticks()
self.count_seconds = 0
# Background images
self.offshore_region_imgs = []
self.offshore_region_imgs.append(pg.image.load(path.join(self.img_dir, 'offshore1.png')). convert())
self.offshore_region_imgs.append(pg.image.load(path.join(self.img_dir, 'offshore2.png')). convert())
self.offshore_region_imgs.append(pg.image.load(path.join(self.img_dir, 'offshore3.png')). convert())
self.offshore_region_imgs.append(pg.image.load(path.join(self.img_dir, 'offshore4.png')). convert())
self.index = 0
self.offshore_region = self.offshore_region_imgs[self.index]
self.onshore_region = pg.image.load(path.join(self.img_dir, 'onshore.png')).convert()
# Spawn some turbines
x = 25
for i in range(6):
Turbines(self, x, 800)
x += 100
self.turbine_origin = []
for turbine in self.turbines:
self.turbine_origin.append([turbine.rect.x, turbine.rect.y])
# Create the grid under the map
self.grid = self.create_grid()
# Spawn turbine slots, where the grid is equal to turbine
for x in range(36):
for y in range(36):
if self.grid[x][y] == 'turbine':
TurbineSlot(self,(x*25),(y*25))
if self.grid[x][y] == 'region1' or "region2" or "region3" or "region4" :
Regions(self, (x*25), (y*25))
# Draw Earth
self.run()
def run(self):
pg.mixer.music.play(loops=-1)
self.playing = True
while self.playing:
self.clock.tick(FPS)
self.events()
self.update()
self.draw()
pg.mixer.music.fadeout(500)
def get_mouse_position(self):
'''
Returns mouse position
'''
mouse_pos = pg.Vector2(pg.mouse.get_pos())
return mouse_pos
def update(self):
# Update game loop
# Set timer
count_time = pg.time.get_ticks() - self.start_time
# Count the time
self.count_minutes = int(count_time / 60000) % 60
if self.count_seconds < 61:
self.count_seconds = int(count_time / 1000) % 60
self.all_sprites.update()
# Get current mouse position
self.mouse_pos = self.get_mouse_position()
# Store the information under mouse
self.piece, self.x, self.y = self.get_square_under_mouse(self.grid)
# Check if turbine is colliding with slot
for turbine in self.turbines:
if pg.sprite.spritecollide(turbine, self.slots, False):
turbine.in_slot = True
else:
turbine.in_slot = False
# Update wind speeds
now = pg.time.get_ticks()
if now - self.wind_timer > 5000:
self.wind_timer = now
# Wind speed for first offshore region
self.offshore_region_1_windspeed = Wind_speed(WS, 6, 3)
# Wind speed for second offshore region
self.offshore_region_2_windspeed = Wind_speed(WS, 6, 3)
# Wind speed for first onshore region
self.onshore_region_1_windspeed = Wind_speed(WS, 6, 2)
# Wind speed for first onshore region
self.onshore_region_2_windspeed = Wind_speed(WS, 6, 2)
if self.win:
self.playing = False
# Update power generation
power_now = pg.time.get_ticks()
if power_now - self.power_timer > 2000:
for turbine in self.turbines:
if turbine.in_slot:
if self.piece == 'region1':
self.power += Power_output(self.offshore_region_1_windspeed)
elif self.piece == 'region2':
self.power += Power_output(self.offshore_region_2_windspeed)
elif self.piece == 'region3':
self.power += Power_output(self.onshore_region_1_windspeed)
else:
self.power += Power_output(self.onshore_region_2_windspeed)
self.power_timer = power_now
if self.power > 10000:
self.earth.animating = True
self.win = True
# Game Over Condition
print(self.count_minutes)
if self.count_minutes > 0:
self.playing = False
def events(self):
# Events loop
for event in pg.event.get():
if event.type == pg.QUIT:
self.playing = False
self.running = False
if event.type == pg.MOUSEBUTTONDOWN:
for turbine in self.turbines:
if turbine.check_collision(self.mouse_pos):
turbine.click = True
break
if event.type == pg.MOUSEBUTTONUP:
for turbine in self.turbines:
turbine.click = False
def draw(self):
# Draw sprites
background = pg.image.load(path.join(self.img_dir, 'Background.png')).convert()
background_rect = background.get_rect()
offshore_region_rect = self.offshore_region.get_rect()
onshore_region_rect = self.onshore_region.get_rect()
# Set appropriate offsets to draw the background images
offshore_region_rect_offset1 = (offshore_region_rect[0] + 25, offshore_region_rect[1] + 100)
offshore_region_rect_offset2 = (offshore_region_rect[0] + 575, offshore_region_rect[1] + 100)
onshore_region_rect_offset1 = (onshore_region_rect[0] + 25, onshore_region_rect[1] + 475)
onshore_region_rect_offset2 = (onshore_region_rect[0] + 575, onshore_region_rect[1] + 475)
self.screen.blit(background, background_rect)
# Draw background for region 1
self.screen.blit(self.offshore_region, offshore_region_rect_offset1)
self.screen.blit(self.offshore_region, offshore_region_rect_offset2)
self.screen.blit(self.onshore_region, onshore_region_rect_offset1)
self.screen.blit(self.onshore_region, onshore_region_rect_offset2)
self.draw_text('Wind Speed: '+ str(self.offshore_region_1_windspeed) + 'm/s', 18, BLACK, WIDTH - 750, HEIGHT - 830)
self.draw_text('Wind Speed: '+ str(self.offshore_region_2_windspeed) + 'm/s', 18, BLACK, WIDTH - 200, HEIGHT - 830)
self.draw_text('Wind Speed: '+ str(self.onshore_region_1_windspeed) + 'm/s', 18, BLACK, WIDTH - 750, HEIGHT - 460)
self.draw_text('Wind Speed: '+ str(self.onshore_region_2_windspeed) + 'm/s', 18, BLACK, WIDTH - 200, HEIGHT - 460)
self.draw_text('Time: ' + str(self.count_minutes) + ' : '+ str(self.count_seconds), 18, BLACK, WIDTH / 2, HEIGHT - 850)
self.draw_text('Power generated: ' + str(self.power) + ' kW', 18, BLACK, WIDTH / 2, HEIGHT - 250)
self.all_sprites.draw(self.screen)
# self.draw_grid()
# Draw the rectangle around the square the mouse is above
if self.x != None:
rect = (BOARD_POS[0] + self.x * TILESIZE, BOARD_POS[1] + self.y * TILESIZE, TILESIZE, TILESIZE)
pg.draw.rect(self.screen, (255, 0, 0, 50), rect, 2)
for slot in self.slots:
if slot.rect.collidepoint(self.mouse_pos):
pg.draw.rect(self.screen, (127, 255, 0, 50), rect, 2)
pg.display.flip()
def create_grid(self):
'''
Creates a 36 x 36 grid with 25x25px per tile.
Appends region and slot markers to generate sprites upon starting the game.
'''
grid = []
for y in range(36):
grid.append([])
for x in range(36):
grid[y].append(None)
# regions
grid[1][2] = ('region1')
grid[23][2] = ('region2')
grid[1][17] = ('region3')
grid[23][17] = ('region4')
# Offshore 1 first slots
grid[2][13] = ('turbine')
# Offshore 1 second slots
grid[9][13] = ('turbine')
# Offshore 2 first slots
grid[24][13] = ('turbine')
# Offshore 2 second slots
grid[31][13] = ('turbine')
# Onshore 1 first slots
grid[2][28] = ('turbine')
# Onshore 1 second slots
grid[6][27] = ('turbine')
# Onshore 1 third slots
grid[10][29] = ('turbine')
# Onshore 2 first slots
grid[24][28] = ('turbine')
# Onshore 2 second slots
grid[28][27] = ('turbine')
# Onshore 2 third slots
grid[32][29] = ('turbine')
return grid
def get_square_under_mouse(self, grid):
'''
Takes in the 36 x 36 grid of the map and returns the x and y position
of the current tile under the mouse.
It also returns the string assigned to the tile, if any.
'''
mouse_pos = pg.Vector2(pg.mouse.get_pos())
x, y = [int(v // TILESIZE) for v in mouse_pos]
try:
if x >= 0 and y >= 0: return (grid[y][x], x, y)
except IndexError: pass
return None, None, None
def draw_grid(self):
'''
Draws the 36 x 36 grid on the map
'''
for x in range(0, WIDTH, TILESIZE):
pg.draw.line(self.screen, BLACK, (x, 0), (x, HEIGHT))
for y in range(0, HEIGHT, TILESIZE):
pg.draw.line(self.screen, BLACK, (0, y), (WIDTH, y))
def show_start_screen(self):
'''
Game start screen
'''
# Made this using procreate following tutorial by 'Art with Flo'
background = pg.image.load(path.join(self.img_dir, 'earth.png')).convert()
background_rect = background.get_rect()
self.screen.blit(background, background_rect)
# Print some information about how to play
self.draw_text('Generate 10,000kW of Power in under 1 Minute to Save the Earth!', 28, GREEN, WIDTH / 2, HEIGHT - 850)
self.draw_text('Place Turbines in slots to generate power. More Wind = More Power!.', 28, GREEN, WIDTH / 2, (HEIGHT - 100))
self.draw_text('Press any key to play', 28, GREEN, WIDTH / 2, HEIGHT - 150)
pg.display.flip()
self.wait_for_key()
def show_end_screen(self):
'''
Game over screen
'''
if not self.running:
return
if self.win:
self.screen.fill(BLACK)
self.draw_text('Game Over. You Saved Earth!', 22, WHITE, WIDTH / 2, HEIGHT / 2)
self.draw_text('Press any key to play again', 22, WHITE, WIDTH / 2, (HEIGHT + 50) / 2)
else:
self.screen.fill(BLACK)
self.draw_text('Game Over. Earth is Doomed!', 22, WHITE, WIDTH / 2, HEIGHT / 2)
self.draw_text('Press any key to play again', 22, WHITE, WIDTH / 2, (HEIGHT +50) / 2)
pg.display.flip()
self.wait_for_key()
def wait_for_key(self):
'''
If called set a variable waiting equal to False
'''
pg.event.wait()
waiting = True
while waiting:
self.clock.tick(FPS)
for event in pg.event.get():
if event.type == pg.QUIT:
waiting = False
self.running = False
if event.type == pg.KEYUP:
waiting = False
def draw_text(self, text, size, color, x, y):
'''
Function for drawing any text on the game screen
'''
font = pg.font.Font(self.font_name, size)
text_surface = font.render(text, True, color)
text_rect = text_surface.get_rect()
text_rect.midtop = (x, y)
self.screen.blit(text_surface, text_rect)
g = Game()
g.show_start_screen()
while g.running:
g.new()
g.show_end_screen()
pg.quit()
|
import argparse
import numpy as np
import tensorflow as tf
import os
import CNN_recurrent
import helper
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
# use CPU for RPM to ensure the determinism
config = tf.ConfigProto(allow_soft_placement=True, device_count={'GPU': 0})
sess = tf.Session(config=config)
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--path", default='BasketballPass')
parser.add_argument("--frame", type=int, default=100)
parser.add_argument("--f_P", type=int, default=6)
parser.add_argument("--b_P", type=int, default=6)
parser.add_argument("--mode", default='PSNR', choices=['PSNR', 'MS-SSIM'])
parser.add_argument("--l", type=int, default=1024, choices=[8, 16, 32, 64, 256, 512, 1024, 2048])
parser.add_argument("--entropy_coding", type=int, default=1)
parser.add_argument("--N", type=int, default=128, choices=[128])
parser.add_argument("--M", type=int, default=128, choices=[128])
args = parser.parse_args()
# Settings
I_level, Height, Width, batch_size, Channel, \
activation, GOP_size, GOP_num, \
path, path_com, path_bin, path_lat = helper.configure(args)
# Placeholder
prior_tensor = tf.placeholder(tf.float32, [batch_size, Height//16, Width//16, args.M]) # previous latent
latent_tensor = tf.placeholder(tf.float32, [batch_size, Height//16, Width//16, args.M]) # latent to compress
hidden_states = tf.placeholder(tf.float32, [2, batch_size, Height//16, Width//16, args.N]) # hidden states in RPM
c_prob, h_prob = tf.split(hidden_states, 2, axis=0)
# RPM network
prob_latent, c_prob_out, h_prob_out \
= CNN_recurrent.rec_prob(prior_tensor, args.N, Height, Width, c_prob[0], h_prob[0])
# estimate bpp
bits_est, sigma, mu = CNN_recurrent.bpp_est(latent_tensor, prob_latent, args.N)
hidden_states_out = tf.stack([c_prob_out, h_prob_out], axis = 0)
# calculates bits for I frames and bottlenecks
total_bits = 0
for g in range(GOP_num + 1):
I_index = g * GOP_size + 1
if I_index <= args.frame:
# I frame
total_bits += os.path.getsize(path_bin + 'f' + str(I_index).zfill(3) + '.bin') * 8
# if there exists forward P frame(s), I_index + 1 is encoded by the bottleneck
if args.f_P > 0 and I_index + 1 <= args.frame:
total_bits += os.path.getsize(path_bin + 'f' + str(I_index + 1).zfill(3) + '.bin') * 8
# if there exists backward P frame(s), I_index - 1 is encoded by the bottleneck
if args.b_P > 0 and I_index - 1 >= 1:
total_bits += os.path.getsize(path_bin + 'f' + str(I_index - 1).zfill(3) + '.bin') * 8
# start RPM
latents = ['mv', 'res'] # two kinds of latents
for lat in latents:
# load model
model_path = './model/RPM_' + args.mode + '_' + str(args.l) + '_' + lat
saver = tf.train.Saver(max_to_keep=None)
saver.restore(sess, save_path=model_path + '/model.ckpt')
# encode GOPs
for g in range(GOP_num):
# forward P frames (only if more than 2 P frames exist)
if args.f_P >= 2:
# load first prior
frame_index = g * GOP_size + 2
prior_value = np.load(path_lat + '/f' + str(frame_index).zfill(3) + '_' + lat + '.npy')
# init state
h_state = np.zeros([2, batch_size, Height // 16, Width // 16, args.N], dtype=np.float)
for f in range(args.f_P - 1):
# load latent
frame_index = g * GOP_size + f + 3
latent_value = np.load(path_lat + '/f' + str(frame_index).zfill(3) + '_' + lat + '.npy')
# run RPM
bits_estimation, sigma_value, mu_value, h_state \
= sess.run([bits_est, sigma, mu, hidden_states_out],
feed_dict={prior_tensor: prior_value, latent_tensor: latent_value,
hidden_states: h_state})
if args.entropy_coding:
bits_value = helper.entropy_coding(frame_index, lat, path_bin, latent_value, sigma_value, mu_value)
total_bits += bits_value
print('Frame', frame_index, lat + '_bits =', bits_value)
else:
total_bits += bits_estimation
print('Frame', frame_index, lat + '_bits =', bits_estimation)
# the latent will be the prior for the next latent
prior_value = latent_value
# backward P frames (only if more than 2 P frames exist)
if args.b_P >= 2:
# load first prior
frame_index = (g + 1) * GOP_size
prior_value = np.load(path_lat + '/f' + str(frame_index).zfill(3) + '_' + lat + '.npy')
# init state
h_state = np.zeros([2, batch_size, Height // 16, Width // 16, args.N], dtype=np.float)
for f in range(args.b_P - 1):
# load latent
frame_index = (g + 1) * GOP_size - f - 1
latent_value = np.load(path_lat + '/f' + str(frame_index).zfill(3) + '_' + lat + '.npy')
# run RPM
bits_estimation, sigma_value, mu_value, h_state \
= sess.run([bits_est, sigma, mu, hidden_states_out],
feed_dict={prior_tensor: prior_value, latent_tensor: latent_value,
hidden_states: h_state})
if args.entropy_coding:
bits_value = helper.entropy_coding(frame_index, lat, path_bin, latent_value, sigma_value, mu_value)
total_bits += bits_value
print('Frame', frame_index, lat + '_bits =', bits_value)
else:
total_bits += bits_estimation
print('Frame', frame_index, lat + '_bits =', bits_estimation)
# the latent will be the prior for the next latent
prior_value = latent_value
# encode rest frames (only if more than 2 P frames exist)
rest_frame_num = args.frame - 1 - GOP_size * GOP_num
if rest_frame_num >= 2:
# load first prior
frame_index = GOP_num * GOP_size + 2
prior_value = np.load(path_lat + '/f' + str(frame_index).zfill(3) + '_' + lat + '.npy')
# init state
h_state = np.zeros([2, batch_size, Height // 16, Width // 16, args.N], dtype=np.float)
for f in range(rest_frame_num - 1):
# load latent
frame_index = GOP_num * GOP_size + f + 3
latent_value = np.load(path_lat + '/f' + str(frame_index).zfill(3) + '_' + lat + '.npy')
# run RPM
bits_estimation, sigma_value, mu_value, h_state \
= sess.run([bits_est, sigma, mu, hidden_states_out],
feed_dict={prior_tensor: prior_value, latent_tensor: latent_value,
hidden_states: h_state})
if args.entropy_coding:
bits_value = helper.entropy_coding(frame_index, lat, path_bin, latent_value, sigma_value, mu_value)
total_bits += bits_value
print('Frame', frame_index, lat + '_bits =', bits_value)
else:
total_bits += bits_estimation
print('Frame', frame_index, lat + '_bits =', bits_estimation)
# the latent will be the prior for the next latent
prior_value = latent_value
bpp_video = total_bits/args.frame/Height/Width
print('Average bpp:', bpp_video)
|
from app.views import all_pages, show_page
def setup_routes(app):
app.router.add_get('/api/v1/list_pages', all_pages)
app.router.add_get('/api/v1/page/{page_id}', show_page)
|
import logging
import os
import json
from backend import LookupHotelInviumPlaces
API_KEY = os.environ.get('API_KEY')
LOGGING_LEVEL = os.environ.get('LOGGING_LEVEL')
def handler(event, context):
if LOGGING_LEVEL == 'DEBUG':
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.INFO)
logging.info('Received Address Lookup Request')
logging.info(json.dumps(event))
places = LookupHotelInviumPlaces()
places.initialise_places(api_key=API_KEY)
request = json.loads(event['body'])
address = request['address']
language = request.get('language', 'en')
area = request['area']
return {
"statusCode": 200,
"body": json.dumps(places.lookup_hotels(address=address, language=language, area=area)),
"headers": {
"Content-Type": "application/json",
"Access-Control-Allow-Origin": "*"
}
}
|
class Circle:
name = 'Circle'
def __init__(self, color, size):
self.color = color
self.size = size
class Triangle:
name = 'Triangle'
def __init__(self, color, size):
self.color = color
self.size = size
class Rectangle:
name = 'Rectangle'
def __init__(self, color, size):
self.color = color
self.size = size
class Star:
name = 'Star'
def __init__(self, color, size, points):
self.color = color
self.size = size
self.points = points
class Box:
def __init__(self, name, figures):
self.name = name
self.figures = figures
def put_in(self, figure):
self.figures.append([figure.name, figure.color, figure.size])
def print_info(self):
print(f'Figures: {self.figures}')
circle = Circle('pink', 2)
triangle = Triangle('magenta', 8)
rectangle = Rectangle('orange', 0.4)
star = Star('violet', 19, 9)
box_1 = Box('Box 1', [])
box_1.put_in(circle)
box_1.put_in(star)
box_1.put_in(rectangle)
box_1.print_info()
|
import unittest
from katas.kyu_8.squash_the_bugs import find_longest
class SquashTheBugsTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(find_longest(
'The quick white fox jumped around the massive dog'
), 7
)
def test_equals_2(self):
self.assertEqual(find_longest('Take me to tinseltown with you'), 10)
def test_equals_3(self):
self.assertEqual(find_longest('Sausage chops'), 7)
def test_equals_4(self):
self.assertEqual(
find_longest('Wind your body and wiggle your belly'), 6
)
def test_equals_5(self):
self.assertEqual(find_longest('Lets all go on holiday'), 7)
|
def solution(s):
# 이 문제의 관건은 빈 공백문자에 있고 split()과 split(' ')의 차이를 알아야함
# split(' ') 공백을 살려서 받아와서 이걸 써야함
# 따로 리스트를 만들어 주는게 너무 오래 걸릴 수 있음
# answer = ' '.join([i.capitalize() for i in s.split(' ')])
sub = s.split(' ')
cap = []
for i in sub:
cap.append(i.capitalize())
answer = ' '.join(cap)
print(answer)
return answer
|
import logging
import numpy as np
from parser.argument_parser import training_arguments_parser
from parser import configs
from parser.constants import RNN_EXPT_DIRECTORY, ACTION_CHANNEL_LABELS_PATH
from parser.model import Model
from parser import utils
class ActionChannelModel(Model):
"""Model for predicting Action Channels from descriptions.
"""
def __init__(self, config, path, stem=True):
super(ActionChannelModel, self).__init__(config, path, stem)
def _create_label_maps(self):
"""Creates mapping from label keywords to ids by loading the mapping
from appropriate CSV file.
"""
self._load_label_maps(ACTION_CHANNEL_LABELS_PATH)
logging.info("Number of classes = %s", len(self.labels_map))
def _convert_to_one_hot(self, labels):
"""Converts the label keywords to one-hot vectors.
For example, a label "dummy" with id 1 is converted to the following
vector, assuming there are a total of 5 distinct label classes:
[0,1,0,0,0]
Args:
labels (`list` of `label.Label`): Labels.
Returns:
numpy.ndarray: 2D array, with rows representing one-hot vector for
labels.
"""
indices = []
for label in labels:
indices.append(self.labels_map[label.action_channel])
m, n = len(indices), len(self.labels_map)
indices = np.array(indices)
one_hot = np.zeros((m, n))
one_hot[np.arange(m), indices] = 1.
return one_hot
def main():
args = training_arguments_parser().parse_args()
experiment_name = args.experiment_name[0]
logging.basicConfig(level=getattr(logging, args.log_level.upper()),
format='%(levelname)s: %(asctime)s: %(message)s')
logging.info("Log Level: %s", args.log_level)
logging.info("Model: %s", args.model[0])
logging.info("Use Train Set: %s", args.use_train_set)
logging.info("Use Triggers API: %s", args.use_triggers_api)
logging.info("Use Actions API: %s", args.use_actions_api)
logging.info("Use Synthetic Recipes: %s", args.use_synthetic_recipes)
logging.info("Experiment Name: %s", experiment_name)
utils.create_experiment_directory(experiment_name)
path = RNN_EXPT_DIRECTORY + experiment_name + "/"
if args.model[0] == "ActionChannelModel":
model = ActionChannelModel(configs.PaperConfiguration, path, True)
model.load_train_dataset(
use_train_set=args.use_train_set,
use_triggers_api=args.use_triggers_api,
use_actions_api=args.use_actions_api,
use_synthetic_recipes=args.use_synthetic_recipes,
use_names_descriptions=True, load_vocab=False)
model.initialize_network()
model.train()
if __name__ == '__main__':
main()
|
"""
Contains business logic tasks for this order of the task factory.
Each task should be wrapped inside a task closure that accepts a **kargs parameter
used for task initialization.
"""
def make_task_dict():
"""
Returns a task dictionary containing all tasks in this module.
"""
task_dict = {}
task_dict["split_string"] = split_string_closure
task_dict["filter_string_length"] = filter_strings_closure
return task_dict
def get_task(task_name, init_args):
"""
Accesses the task dictionary, returning the task corresponding to a given key,
wrapped in a closure containing the task and its arguments.
"""
tasks = make_task_dict()
return tasks[task_name](init_args)
def split_string_closure(init_args):
"""
A closure around the split_string function which is an endpoint in the task factory.
"""
init_args = init_args
async def split_string(string_map):
"""
Splits a string into a list and returns it.
"""
input_string = string_map["input"]
split_string = input_string.split()
return {"strings": split_string}
return split_string
def filter_strings_closure(init_args):
"""
A closure around the split_string function which is an endpoint in the task factory.
"""
word_length = init_args["word_length"]
comparison = init_args["comparison"]
async def filter_string_length(strings_map, word_length=word_length, comparison=comparison):
"""
Splits a string into a list and returns it.
"""
string_list = strings_map["strings"]
if comparison == "less":
filtered_strings = [string for string in string_list if len(string) < word_length]
elif comparison == "greater":
filtered_strings = [string for string in string_list if len(string) > word_length]
else:
filtered_strings = string_list
return {"strings": filtered_strings}
return filter_string_length
|
# Generate a land/sea mask for the MRED domain
import netCDF3
import mm5_class
import mx.DateTime
mm5 = mm5_class.mm5('TERRAIN_DOMAIN1')
land = mm5.get_field('landmask', 0)
# 1,143,208
data = land['values']
lats = mm5.get_field('latitdot',0)['values']
lons = mm5.get_field('longidot',0)['values']
nc = netCDF3.Dataset('LANDSEA_IMM5.nc', 'w')
nc.institution = "Iowa State University, Ames, IA, USA"
nc.source = "MM5 (2009): atmosphere: MM5v3.6.3 non-hydrostatic, split-explicit; sea ice: Noah; land: Noah"
nc.project_id = "MRED"
nc.table_id = "Table 2"
nc.realization = 1
nc.forcing_data = "CFS01"
# Optional
nc.Conventions = 'CF-1.0'
nc.contact = "Daryl Herzmann, akrherz@iastate.edu, 515-294-5978"
nc.history = "%s Generated" % (mx.DateTime.now().strftime("%d %B %Y"),)
nc.comment = "Runs processed on derecho@ISU, output processed on mred@ISU"
nc.title = "ISU MM5 model output prepared for MRED using CFS input"
nc.createDimension('y', 143)
nc.createDimension('x', 208)
lat = nc.createVariable('lat', 'd', ('y','x') )
lat.units = "degrees_north"
lat.long_name = "latitude"
lat.standard_name = "latitude"
lat.axis = "Y"
lat[:] = lats
lon = nc.createVariable('lon', 'd', ('y', 'x',) )
lon.units = "degrees_east"
lon.long_name = "longitude"
lon.standard_name = "longitude"
lon.axis = "X"
lon[:] = lons
lsea = nc.createVariable('landmask', 'd', ('y', 'x'))
lsea.long_name = "land mask"
lsea.standard_name = "land mask"
lsea[:] = data[0,:,:]
nc.close()
mm5.close()
|
from django.urls import path
from django.views.generic import TemplateView
from . import views
urlpatterns = [
path('hello/', views.hello, name='hello'),
path('morning/', views.morning, name='morning'),
path('article/<id>/', views.view_article, name='article'),
path('articles/<month>/<year>/', views.view_articles, name='articles'),
path('crud/', views.crud_ops, name='crud'),
path('email/', views.send_mass_email, name='send_email'),
path('static/', views.StaticView.as_view()),
path('dreamreals/', views.DreamView.as_view()),
path('connection/', TemplateView.as_view(template_name='login.html')),
path('login/', views.login, name='logged_in'),
]
|
#!/usr/bin/env python
import math
import sys
#power=int(sys.argv[1])
power=4
summ=0
for i in range(1,11):
an= math.pow(i,power)
summ+= an
print "%2d %5d %5d"%(i, an, summ)
print math.pow(10,power+1)/power
|
#-*-coding:utf-8-*-
"""
"创建者:Li Zhen
"创建时间:2019/4/4 17:41
"描述:TODO 通过sin预测Cos
"""
import torch
import torch.nn as nn
from torch.nn import functional as F
from torch import optim
import numpy as np
from matplotlib import pyplot as plt
import matplotlib.animation
import math, random
# rnn时序不唱数
TIME_STEP=20
# rnn的输入维度
INPUT_SIZE = 1
DEVICE = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# rnn隐藏单元个数
H_SIZE = 64
# rnn层数
RNN_SIZE = 2
# 总训练次数
EPOCHS = 1000
# 隐藏层状态
h_state = None
steps = np.linspace(0, np.pi*2, 256, dtype=np.float32)
x_np = np.sin(steps)
y_np = np.cos(steps)
# plt.figure(1)
# plt.suptitle('Sin and Cos', fontsize='18')
# plt.plot(steps, y_np, 'r-', label='target(cos)')
# plt.plot(steps, x_np, 'b*', label='input(sin)')
# plt.legend(loc='best')
# plt.show()
class RNN(nn.Module):
def __init__(self):
super(RNN, self).__init__()
self.rnn = nn.RNN(input_size=INPUT_SIZE,
hidden_size=H_SIZE,
num_layers=RNN_SIZE,
batch_first=True,
dropout=0.5)
self.out = nn.Linear(H_SIZE, 1)
def forward(self, x, h_state):
"""
:param x: (batch, time_step, input_size)
:param h_state: (n_layers, batch, hidden_size)
:return: (batch, time_step, hiddent_size)
"""
r_out, h_state = self.rnn(x, h_state)
outs = []
for time_step in range(r_out.size()[1]):
outs.append(self.out(r_out[:, time_step, :]))
# r_out = r_out.view(-1, 64)
# outs = self.out(r_out)
# return outs, h_state
return torch.stack(outs, dim=1), h_state
rnn = RNN().to(DEVICE)
optimizer = torch.optim.Adam(rnn.parameters())
criterion = nn.MSELoss()
rnn.train()
plt.figure(2)
for step in range(EPOCHS):
start, end = step * np.pi, (step + 1) * np.pi
steps = np.linspace(start, end, TIME_STEP, dtype=np.float32)
x_np = np.sin(steps)
y_np = np.cos(steps)
x = torch.from_numpy(x_np[np.newaxis, :, np.newaxis]).to(DEVICE)
y = torch.from_numpy(y_np[np.newaxis, :, np.newaxis]).to(DEVICE)
prediction, h_state = rnn(x, h_state)
# print(h_state.requires_grad)
h_state = h_state.detach()
# print(h_state.requires_grad)
loss = criterion(prediction, y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (step + 1) % 20 == 0:
print('Epochs: {}, Loss: {:4f}'.format(step, loss))
plt.cla()
plt.plot(steps, y_np.flatten(), 'r-')
plt.plot(steps, prediction.to('cpu').data.numpy().flatten(), 'b-')
plt.pause(0.1)
plt.draw()
|
import nltk
# nltk.download()
from nltk import word_tokenize
from nltk.util import ngrams
from collections import Counter
# text = "I need to write a program in NLTK that breaks a corpus (a large collection of txt files) into unigrams, bigrams, trigrams, fourgrams and fivegrams. I need to write a program in NLTK that breaks a corpus"
text = ' '.join(
["all the information [laughter]",
"all the information again",
"all the information all that",
"all the information and would need",
"all the information but",
"all the information for you and",
"all the information from the vehicle on",
"all the information from you but",
"all the information i gave you guys",
"all the information i had",
"all the information i have",
"all the information i have on here",
"all the information i your account",
"all the information in",
"all the information in for you",
"all the information is correct",
"all the information on it",
"all the information on it i just",
"all the information on the vehicle",
"all the information so they know all you know what i mean",
"all the information that i can",
"all the information that i had",
"all the information that they need and then",
"all the information that you how",
"all the information they you need he would have to give you",
"all the information uh",
"all the information when you get here",
"all the information you know",
"all the information you need",
"all the information you need on it",
"all the information you need will be on it",
"all the instructions on how to"])
token = nltk.word_tokenize(text)
bigrams = ngrams(token,2)
trigrams = ngrams(token,3)
fourgrams = ngrams(token,4)
fivegrams = ngrams(token,5)
print(Counter(ngrams(token,5)))
|
# importing Dataloader class from data_transformation
from data_transformation.data_loader import Dataloader
from training.model import Model
from application_logging.logger import Applog
import warnings
warnings.filterwarnings('ignore')
if __name__ == '__main__':
def load_data(func):
logg_data_transform = Applog("data_transformation/dataloading.log")
logger = logg_data_transform.write(logg_data_transform)
dataloader_obj = Dataloader("handwritten_digit_corpus","handwritten_digit_corpus",logger)
images = dataloader_obj.imageData()
X_train,X_test,y_train,y_test = dataloader_obj.getData(images)
func(X_train,X_test,y_train,y_test)
@load_data
def training_model(X_train,X_test,y_train,y_test):
logg_training = Applog("training/training_model.log")
loggerr = logg_training.write(logg_training)
model_object = Model(X_train,X_test,y_train,y_test,loggerr)
model = model_object.ml_model()
model_object.training(model)
|
from typing import List
import random
def mergesort(nums: List[int]) -> List[int]:
if len(nums) <= 1:
return nums
center = len(nums) // 2
nums_l = nums[:center]
nums_r = nums[center:]
mergesort(nums_l)
mergesort(nums_r)
i = j = k = 0
while i < len(nums_l) and j < len(nums_r):
if nums_l[i] <= nums_r[j]:
nums[k] = nums_l[i]
i += 1
else:
nums[k] = nums_r[j]
j += 1
k += 1
while i < len(nums_l):
nums[k] = nums_l[i]
i += 1
k += 1
while j < len(nums_r):
nums[k] = nums_r[j]
j += 1
k += 1
return nums
if __name__ == "__main__":
nums = [5,4,1,8,7,3,2,9]
nums = [random.randint(0,100) for _ in range(99)]
print(mergesort(nums))
|
#!/usr/bin/env python3
"""Check file for non-ascii lines."""
from sys import argv
path = argv[1]
print('Path:', path)
def isascii(string):
try:
string_ascii = string.encode('ascii')
return True
except UnicodeEncodeError:
return False
def check_file():
num = 0
with open(path) as f:
for n, line in enumerate(f):
res = isascii(line)
if res:
continue
else:
print('Line {} is non-ascii:'.format(n + 1))
print(line.strip('\n'))
num += 1
continue
print('Found {} non-ascii lines'.format(num))
check_file()
|
import random
import time
import copy
import sys
def cal_time(func):
'''计算函数运行时间的装饰器'''
def wrapper(*args, **kwargs):
t1 = time.time()
result = func(*args, **kwargs)
t2 = time.time()
print("%s running time: %s secs." % (func.__name__, t2-t1))
return result
return wrapper
@cal_time
def bubble_sort(li):
"""
冒泡排序
time complexity: O(n^2)
以下改进版冒泡最好的情况是传进来的列表是有序的,那么只走一趟,比较和移动元素的次数分别是n-1和0,所以最好情况的时间复杂度是O(n)
平均情况下和最差情况都是O(n^2)
稳定性:因为每次比较后如果两个相邻元素相等我们并不会将他们交换,所以冒泡不会改变相同元素的下标,所以冒泡排序是一个稳定的排序。
待排序的记录序列中可能存在两个或两个以上关键字相等的记录。排序前的序列中Ri领先于Rj(即i<j).若在排序后的序列中Ri仍然领先于Rj,则称所用的方法是稳定的。
"""
for i in range(len(li)-1): #趟数
exchange = False
for j in range(len(li)-i-1):
if li[j] > li[j+1]:
li[j], li[j+1] = li[j+1], li[j]
exchange = True
if not exchange:
break
return li
def select_sort(li):
"""
选择排序
time complexity: O(n^2)
举个例子,序列5 8 5 2 9, 我们知道第一遍选择第1个元素5会和2交换,那么原序列中2个5的相对前后顺序就被破坏了,所以选择排序不是一个稳定的排序算法
"""
for i in range(len(li)-1):
min_loc = i
for j in range(i+1, len(li)): # 无序区,i+1开始,最后的位置是len(li)-1
if li[j] < li[min_loc]:
min_loc = j
li[min_loc], li[i] = li[i], li[min_loc] # 交换
def insert_sort(li):
"""
插入排序
time complexity: O(n^2)
平均情况O(n^2),最好情况O(n),最坏情况O(n^2)
插入排序是在一个已经有序的小序列的基础上,一次插入一个元素。当然,刚开始这个有序的小序列只有1个元素,就是第一个元素。
比较是从有序序列的末尾开始,也就是想要插入的元素和已经有序的最大者开始比起,如果比它大则直接插入在其后面,否则一直往前找直到找到它该插入的位置。
如果碰见一个和插入元素相等的,那么插入元素把想插入的元素放在相等元素的后面。所以,相等元素的前后顺序没有改变,从原无序序列出去的顺序就是排好序后的顺序,所以插入排序是稳定的。
优化:应用二分查找来寻找插入点(没什么卵用),下面的是顺序查找的
"""
for i in range(1, len(li)): # 这里的i作为刚抽到的牌,比如len(li)=10,这里循环就是1-9。第一个元素不需要参见排序,最后一个要参加排序,前面两种是最后一个不要排序。
tmp = li[i] # 把摸到的牌临时存起来
j = i - 1 # 手里所有牌中的最后一张牌的位置
while j >= 0 and li[j] > tmp: # 手里的牌向后挪动,找到新抓的牌应该插入的位置
li[j+1] = li[j]
j = j - 1
li[j+1] = tmp # 空的位置不是j,是j+1
#li = list(range(100))
#random.shuffle(li) #打乱列表
#bubble_sort(li)
#select_sort(li)
#insert_sort(li)
#print(li)
'''
快速排序
'''
def partition(li, left, right):
"""
归位第一个元素
这个函数复杂度,其实就是两个指针从两侧往中间靠,时间复杂度O(n)
"""
tmp = li[left] # 把第一个元素存起来,这个位置就空了。而left和right作为指针往列表中间移动,直到left和right碰上
while left < right:
while left < right and li[right] >= tmp: # 先动右边,找比tmp小的数,移到左边那个空位。加=的时候保证等于的时候也不挪,省时间
right -= 1
li[left] = li[right] #这样右边有一个位置空了,再去动左边
while left < right and li[left] <= tmp:
left += 1
li[right] = li[left]
li[left] = tmp # 直到left和right碰上
return left
def _quick_sort(li, left, right): #注意不能给递归函数加装饰器,所以选择套一个马甲
"""
递归
"""
if left < right:
mid = partition(li, left, right) # 归位函数,时间复杂度O(n)
_quick_sort(li, left, mid-1) # 递归,每次切成一半,时间复杂度O(logn)
_quick_sort(li, mid+1, right)
@cal_time
def quick_sort(li):
"""
快速排序
时间复杂度:O(nlogn)
最坏情况:比如 [9,8,7,6,5,4,3,2,1],每次partition的时候都没有分成两部分,这种最坏处情况复杂度是O(n^2)。
"""
_quick_sort(li, 0, len(li)-1)
# li = [5, 7, 4, 6, 3, 1, 2, 9, 3]
# quick_sort(li)
# print(li)
'''
堆排序
'''
def sift(li, left, right): #调整成堆的过程是调整一个个子树和树,left和right表示树的范围
i = left
j = 2 * i + 1 # 找左孩子
tmp = li[left] # 把堆顶保存起来
while j <= right:
if j+1 <= right and li[j] < li[j+1]:
j = j + 1
if tmp < li[j]:
li[i] = li[j]
i = j
j = 2 * i + 1
else:
break
li[i] = tmp
@cal_time
def heap_sort(li):
n = len(li)
for i in range(n//2-1, -1, -1): #建立堆(从最后一个非叶子节点开始调整,为什么是到-1,因为0那个节点,也就是省长那个位置,也是要调整的)
sift(li, i, n-1) # 对于每一颗子树都去找它的right太麻烦了,直接就规定right为最下层的最右节点
for i in range(n-1, -1, -1): #挨个出堆顶的省长
li[0], li[i] = li[i], li[0] # 不用生成新列表,把省长append进去,直接把强行推上省长的民的位置(也就是i)给退下来的省长,省空间
sift(li, 0, i-1) # i的位置已经不是堆的部分了,是退休领导的地方
li = [6, 8, 1, 9, 3, 0, 7, 2, 4, 5]
heap_sort(li)
print(li)
'''
系统的sort
'''
@cal_time
def sys_sort(li):
"""
原装的sort
系统的这个最快,因为它是C语言写的
"""
li.sort()
# li = [10,4,6,3,8,2,5,7]
# merge_sort(li,0,len(li)-1)
# 冒泡和快排对比
# li = list(range(10000))
# random.shuffle(li)
# li1 = copy.deepcopy(li)
# li2 = copy.deepcopy(li)
# bubble_sort(li1)
# quick_sort(li2)
#快排最差情况
# sys.setrecursionlimit(1000000) #设置递归深度
# lix = list(range(1000, 1, -1))
# quick_sort(lix)
# 快排、堆排序、归并排序、原装的排序
# li = [random.randint(0, 100) for i in range(10000000)]
# li1 = copy.deepcopy(li)
# li2 = copy.deepcopy(li)
# li3 = copy.deepcopy(li)
#
# sys_sort(li1)
# heap_sort(li1)
# quick_sort(li2)
# merge_sort(li3)
|
"""Access Rules Classes."""
from fmcapi.api_objects.apiclasstemplate import APIClassTemplate
from fmcapi.api_objects.policy_services.accesspolicies import AccessPolicies
from fmcapi.api_objects.policy_services.intrusionpolicies import IntrusionPolicies
from fmcapi.api_objects.object_services.variablesets import VariableSets
from fmcapi.api_objects.object_services.securityzones import SecurityZones
from fmcapi.api_objects.object_services.vlantags import VlanTags
from fmcapi.api_objects.object_services.portobjectgroups import PortObjectGroups
from fmcapi.api_objects.object_services.protocolportobjects import ProtocolPortObjects
from fmcapi.api_objects.object_services.fqdns import FQDNS
from fmcapi.api_objects.object_services.networkgroups import NetworkGroups
from fmcapi.api_objects.object_services.networkaddresses import NetworkAddresses
from fmcapi.api_objects.policy_services.filepolicies import FilePolicies
from fmcapi.api_objects.object_services.isesecuritygrouptags import ISESecurityGroupTags
from fmcapi.api_objects.helper_functions import get_networkaddress_type
import logging
import sys
import warnings
class AccessRules(APIClassTemplate):
"""The AccessRules Object in the FMC."""
VALID_JSON_DATA = [
"id",
"name",
"type",
"action",
"enabled",
"sendEventsToFMC",
"logFiles",
"logBegin",
"logEnd",
"variableSet",
"originalSourceNetworks",
"vlanTags",
"sourceNetworks",
"destinationNetworks",
"sourcePorts",
"destinationPorts",
"ipsPolicy",
"urls",
"sourceZones",
"destinationZones",
"applications",
"filePolicy",
"sourceSecurityGroupTags",
"destinationSecurityGroupTags",
]
VALID_FOR_KWARGS = VALID_JSON_DATA + [
"acp_id",
"acp_name",
"insertBefore",
"insertAfter",
"section",
]
PREFIX_URL = "/policy/accesspolicies"
REQUIRED_FOR_POST = ["name", "acp_id"]
REQUIRED_FOR_GET = ["acp_id"]
VALID_FOR_ACTION = [
"ALLOW",
"TRUST",
"BLOCK",
"MONITOR",
"BLOCK_RESET",
"BLOCK_INTERACTIVE",
"BLOCK_RESET_INTERACTIVE",
]
VALID_CHARACTERS_FOR_NAME = """[.\w\d_\- ]"""
@property
def URL_SUFFIX(self):
"""
Add the URL suffixes for categories, insertBefore and insertAfter.
NOTE: You must specify these at the time the object is initialized (created) for this feature
to work correctly. Example:
This works:
new_rule = AccessRules(fmc=fmc, acp_name='acp1', insertBefore=2)
This does not:
new_rule = AccessRules(fmc=fmc, acp_name='acp1')
new_rule.insertBefore = 2
"""
url = "?"
if "category" in self.__dict__:
url = f"{url}category={self.category}&"
if "insertBefore" in self.__dict__:
url = f"{url}insertBefore={self.insertBefore}&"
if "insertAfter" in self.__dict__:
url = f"{url}insertAfter={self.insertAfter}&"
if "insertBefore" in self.__dict__ and "insertAfter" in self.__dict__:
logging.warning("ACP rule has both insertBefore and insertAfter params")
if "section" in self.__dict__:
url = f"{url}section={self.section}&"
return url[:-1]
def __init__(self, fmc, **kwargs):
"""
Initialize AccessRules object.
Set self.type to "AccessRule", parse the kwargs, and set up the self.URL.
:param fmc: (object) FMC object
:param kwargs: Any other values passed during instantiation.
:return: None
"""
super().__init__(fmc, **kwargs)
logging.debug("In __init__() for AccessRules class.")
self.type = "AccessRule"
self.parse_kwargs(**kwargs)
self.URL = f"{self.URL}{self.URL_SUFFIX}"
def format_data(self, filter_query=""):
"""
Gather all the data in preparation for sending to API in JSON format.
:param filter_query: (str) 'all' or 'kwargs'
:return: (dict) json_data
"""
json_data = super().format_data(filter_query=filter_query)
logging.debug("In format_data() for AccessRules class.")
if "sourceNetworks" in self.__dict__:
json_data["sourceNetworks"] = {"objects": self.sourceNetworks["objects"]}
json_data["sourceNetworks"]["literals"] = [
{"type": v, "value": k}
for k, v in self.sourceNetworks["literals"].items()
]
if "destinationNetworks" in self.__dict__:
json_data["destinationNetworks"] = {
"objects": self.destinationNetworks["objects"]
}
json_data["destinationNetworks"]["literals"] = [
{"type": v, "value": k}
for k, v in self.destinationNetworks["literals"].items()
]
if "action" in self.__dict__:
if self.action not in self.VALID_FOR_ACTION:
logging.warning(f"Action {self.action} is not a valid action.")
logging.warning(f"\tValid actions are: {self.VALID_FOR_ACTION}.")
return json_data
def parse_kwargs(self, **kwargs):
"""
Parse the kwargs and set self variables to match.
:return: None
"""
super().parse_kwargs(**kwargs)
logging.debug("In parse_kwargs() for AccessRules class.")
if "acp_id" in kwargs:
self.acp(id=kwargs["acp_id"])
if "acp_name" in kwargs:
self.acp(name=kwargs["acp_name"])
if "action" in kwargs:
if kwargs["action"] in self.VALID_FOR_ACTION:
self.action = kwargs["action"]
else:
logging.warning(f"Action {kwargs['action']} is not a valid action.")
logging.warning(f"\tValid actions are: {self.VALID_FOR_ACTION}.")
if "sourceNetworks" in kwargs:
self.sourceNetworks = {"objects": [], "literals": {}}
if kwargs["sourceNetworks"].get("objects"):
self.sourceNetworks["objects"] = kwargs["sourceNetworks"]["objects"]
if kwargs["sourceNetworks"].get("literals"):
for literal in kwargs["sourceNetworks"]["literals"]:
self.sourceNetworks["literals"][literal["value"]] = literal["type"]
if "destinationNetworks" in kwargs:
self.destinationNetworks = {"objects": [], "literals": {}}
if kwargs["destinationNetworks"].get("objects"):
self.destinationNetworks["objects"] = kwargs["destinationNetworks"][
"objects"
]
if kwargs["destinationNetworks"].get("literals"):
for literal in kwargs["destinationNetworks"]["literals"]:
self.destinationNetworks["literals"][literal["value"]] = literal[
"type"
]
# Check if suffix should be added to URL
# self.url_suffix()
def acp(self, name="", id=""):
"""
Associate an AccessPolicies object with this AccessRule object.
:param name: (str) Name of ACP.
:param id: (str) ID of ACP.
:return: None
"""
# either name or id of the ACP should be given
logging.debug("In acp() for AccessRules class.")
if id != "":
self.acp_id = id
self.URL = f"{self.fmc.configuration_url}{self.PREFIX_URL}/{id}/accessrules"
elif name != "":
acp1 = AccessPolicies(fmc=self.fmc)
acp1.get(name=name)
if "id" in acp1.__dict__:
self.acp_id = acp1.id
self.URL = f"{self.fmc.configuration_url}{self.PREFIX_URL}/{acp1.id}/accessrules"
else:
logging.warning(
f"Access Control Policy {name} not found. Cannot set up accessPolicy for AccessRules."
)
else:
logging.error("No accessPolicy name or ID was provided.")
def intrusion_policy(self, action, name=""):
"""
Add/remove name of ipsPolicy field of AccessRules object.
:param action: (str) 'set', or 'clear'
:param name: (str) Name of intrusion policy in FMC.
:return: None
"""
logging.debug("In intrusion_policy() for AccessRules class.")
if action == "clear":
if "ipsPolicy" in self.__dict__:
del self.ipsPolicy
logging.info("Intrusion Policy removed from this AccessRules object.")
elif action == "set":
ips = IntrusionPolicies(fmc=self.fmc, name=name)
ips.get()
self.ipsPolicy = {"name": ips.name, "id": ips.id, "type": ips.type}
logging.info(
f'Intrusion Policy set to "{name}" for this AccessRules object.'
)
def variable_set(self, action, name="Default-Set"):
"""
Add/remove name of variableSet field of AccessRules object.
:param action: (str) 'set', or 'clear'
:param name: (str) Name of variable set in FMC.
:return: None
"""
logging.debug("In variable_set() for AccessRules class.")
if action == "clear":
if "variableSet" in self.__dict__:
del self.variableSet
logging.info("Variable Set removed from this AccessRules object.")
elif action == "set":
vs = VariableSets(fmc=self.fmc)
vs.get(name=name)
self.variableSet = {"name": vs.name, "id": vs.id, "type": vs.type}
logging.info(f'VariableSet set to "{name}" for this AccessRules object.')
def file_policy(self, action, name="None"):
"""
Add/remove name of filePolicy field of AccessRules object.
:param action: (str) 'set', or 'clear'
:param name: (str) Name of file policy in FMC.
:return: None
"""
logging.debug("In file_policy() for ACPRule class.")
if action == "clear":
if "filePolicy" in self.__dict__:
del self.filePolicy
logging.info("file_policy removed from this AccessRules object.")
elif action == "set":
fp = FilePolicies(fmc=self.fmc)
fp.get(name=name)
self.filePolicy = {"name": fp.name, "id": fp.id, "type": fp.type}
logging.info(f'file_policy set to "{name}" for this AccessRules object.')
def vlan_tags(self, action, name=""):
"""
Add/modify name to vlanTags field of AccessRules object.
:param action: (str) 'add', 'remove', or 'clear'
:param name: (str) Name of VLAN tag in FMC.
:return: None
"""
logging.debug("In vlan_tags() for AccessRules class.")
if action == "add":
vlantag = VlanTags(fmc=self.fmc)
vlantag.get(name=name)
if "id" in vlantag.__dict__:
if "vlanTags" in self.__dict__:
new_vlan = {
"name": vlantag.name,
"id": vlantag.id,
"type": vlantag.type,
}
duplicate = False
for obj in self.vlanTags["objects"]:
if obj["name"] == new_vlan["name"]:
duplicate = True
break
if not duplicate:
self.vlanTags["objects"].append(new_vlan)
logging.info(
f'Adding "{name}" to vlanTags for this AccessRules.'
)
else:
self.vlanTags = {
"objects": [
{
"name": vlantag.name,
"id": vlantag.id,
"type": vlantag.type,
}
]
}
logging.info(f'Adding "{name}" to vlanTags for this AccessRules.')
else:
logging.warning(
f'VLAN Tag, "{name}", not found. Cannot add to AccessRules.'
)
elif action == "remove":
vlantag = VlanTags(fmc=self.fmc)
vlantag.get(name=name)
if "id" in vlantag.__dict__:
if "vlanTags" in self.__dict__:
objects = []
for obj in self.vlanTags["objects"]:
if obj["name"] != name:
objects.append(obj)
self.vlanTags["objects"] = objects
logging.info(
f'Removed "{name}" from vlanTags for this AccessRules.'
)
else:
logging.info(
"vlanTags doesn't exist for this AccessRules. Nothing to remove."
)
else:
logging.warning(
f"VLAN Tag, {name}, not found. Cannot remove from AccessRules."
)
elif action == "clear":
if "vlanTags" in self.__dict__:
del self.vlanTags
logging.info("All VLAN Tags removed from this AccessRules object.")
def source_zone(self, action, name=""):
"""
Add/modify name to sourceZones field of AccessRules object.
:param action: (str) 'add', 'remove', or 'clear'
:param name: (str) Name of Security Zone in FMC.
:return: None
"""
logging.debug("In source_zone() for AccessRules class.")
if action == "add":
sz = SecurityZones(fmc=self.fmc)
sz.get(name=name)
if "id" in sz.__dict__:
if "sourceZones" in self.__dict__:
new_zone = {"name": sz.name, "id": sz.id, "type": sz.type}
duplicate = False
for obj in self.sourceZones["objects"]:
if obj["name"] == new_zone["name"]:
duplicate = True
break
if not duplicate:
self.sourceZones["objects"].append(new_zone)
logging.info(
f'Adding "{name}" to sourceZones for this AccessRules.'
)
else:
self.sourceZones = {
"objects": [{"name": sz.name, "id": sz.id, "type": sz.type}]
}
logging.info(
f'Adding "{name}" to sourceZones for this AccessRules.'
)
else:
logging.warning(
'Security Zone, "{name}", not found. Cannot add to AccessRules.'
)
elif action == "remove":
sz = SecurityZones(fmc=self.fmc)
sz.get(name=name)
if "id" in sz.__dict__:
if "sourceZones" in self.__dict__:
objects = []
for obj in self.sourceZones["objects"]:
if obj["name"] != name:
objects.append(obj)
self.sourceZones["objects"] = objects
logging.info(
f'Removed "{name}" from sourceZones for this AccessRules.'
)
else:
logging.info(
"sourceZones doesn't exist for this AccessRules. Nothing to remove."
)
else:
logging.warning(
f'Security Zone, "{name}", not found. Cannot remove from AccessRules.'
)
elif action == "clear":
if "sourceZones" in self.__dict__:
del self.sourceZones
logging.info("All Source Zones removed from this AccessRules object.")
def destination_zone(self, action, name=""):
"""
Add/modify name to destinationZones field of AccessRules object.
:param action: (str) 'add', 'remove', or 'clear'
:param name: (str) Name of Security Zone in FMC.
:return: None
"""
logging.debug("In destination_zone() for AccessRules class.")
if action == "add":
sz = SecurityZones(fmc=self.fmc)
sz.get(name=name)
if "id" in sz.__dict__:
if "destinationZones" in self.__dict__:
new_zone = {"name": sz.name, "id": sz.id, "type": sz.type}
duplicate = False
for obj in self.destinationZones["objects"]:
if obj["name"] == new_zone["name"]:
duplicate = True
break
if not duplicate:
self.destinationZones["objects"].append(new_zone)
logging.info(
f'Adding "{name}" to destinationZones for this AccessRules.'
)
else:
self.destinationZones = {
"objects": [{"name": sz.name, "id": sz.id, "type": sz.type}]
}
logging.info(
f'Adding "{name}" to destinationZones for this AccessRules.'
)
else:
logging.warning(
f'Security Zone, "{name}", not found. Cannot add to AccessRules.'
)
elif action == "remove":
sz = SecurityZones(fmc=self.fmc)
sz.get(name=name)
if "id" in sz.__dict__:
if "destinationZones" in self.__dict__:
objects = []
for obj in self.destinationZones["objects"]:
if obj["name"] != name:
objects.append(obj)
self.destinationZones["objects"] = objects
logging.info(
'Removed "{name}" from destinationZones for this AccessRules.'
)
else:
logging.info(
"destinationZones doesn't exist for this AccessRules. Nothing to remove."
)
else:
logging.warning(
f"Security Zone, {name}, not found. Cannot remove from AccessRules."
)
elif action == "clear":
if "destinationZones" in self.__dict__:
del self.destinationZones
logging.info(
"All Destination Zones removed from this AccessRules object."
)
def source_port(self, action, name=""):
"""
Add/modify name to sourcePorts field of AccessRules object.
:param action: (str) 'add', 'remove', or 'clear'
:param name: (str) Name of Port in FMC.
:return: None
"""
logging.debug("In source_port() for AccessRules class.")
if action == "add":
pport_json = ProtocolPortObjects(fmc=self.fmc)
pport_json.get(name=name)
if "id" in pport_json.__dict__:
item = pport_json
else:
item = PortObjectGroups(fmc=self.fmc)
item.get(name=name)
if "id" in item.__dict__:
if "sourcePorts" in self.__dict__:
new_port = {"name": item.name, "id": item.id, "type": item.type}
duplicate = False
if "objects" not in self.sourcePorts:
self.__dict__["sourcePorts"]["objects"] = []
for obj in self.sourcePorts["objects"]:
if obj["name"] == new_port["name"]:
duplicate = True
break
if not duplicate:
self.sourcePorts["objects"].append(new_port)
logging.info(
f'Adding "{name}" to sourcePorts for this AccessRules.'
)
else:
self.sourcePorts = {
"objects": [
{"name": item.name, "id": item.id, "type": item.type}
]
}
logging.info(
f'Adding "{name}" to sourcePorts for this AccessRules.'
)
else:
logging.warning(
f'Protocol Port or Protocol Port Group: "{name}", '
f"not found. Cannot add to AccessRules."
)
elif action == "remove":
pport_json = ProtocolPortObjects(fmc=self.fmc)
pport_json.get(name=name)
if "id" in pport_json.__dict__:
item = pport_json
else:
item = PortObjectGroups(fmc=self.fmc)
item.get(name=name)
if "id" in item.__dict__:
if "sourcePorts" in self.__dict__:
objects = []
for obj in self.sourcePorts["objects"]:
if obj["name"] != name:
objects.append(obj)
self.sourcePorts["objects"] = objects
logging.info(
f'Removed "{name}" from sourcePorts for this AccessRules.'
)
else:
logging.info(
"sourcePorts doesn't exist for this AccessRules. Nothing to remove."
)
else:
logging.warning(
f'Protocol Port or Protocol Port Group: "{name}", '
f"not found. Cannot add to AccessRules."
)
elif action == "clear":
if "sourcePorts" in self.__dict__:
del self.sourcePorts
logging.info("All Source Ports removed from this AccessRules object.")
def destination_port(self, action, name=""):
"""
Add/modify name to destinationPorts field of AccessRules object.
:param action: (str) 'add', 'remove', or 'clear'
:param name: (str) Name of Port in FMC.
:return: None
"""
logging.debug("In destination_port() for AccessRules class.")
if action == "add":
pport_json = ProtocolPortObjects(fmc=self.fmc)
pport_json.get(name=name)
if "id" in pport_json.__dict__:
item = pport_json
else:
item = PortObjectGroups(fmc=self.fmc)
item.get(name=name)
if "id" in item.__dict__:
if "destinationPorts" in self.__dict__:
new_port = {"name": item.name, "id": item.id, "type": item.type}
duplicate = False
if "objects" not in self.destinationPorts:
self.__dict__["destinationPorts"]["objects"] = []
for obj in self.destinationPorts["objects"]:
if obj["name"] == new_port["name"]:
duplicate = True
break
if not duplicate:
self.destinationPorts["objects"].append(new_port)
logging.info(
f'Adding "{name}" to destinationPorts for this AccessRules.'
)
else:
self.destinationPorts = {
"objects": [
{"name": item.name, "id": item.id, "type": item.type}
]
}
logging.info(
f'Adding "{name}" to destinationPorts for this AccessRules.'
)
else:
logging.warning(
f'Protocol Port or Protocol Port Group: "{name}", '
f"not found. Cannot add to AccessRules."
)
elif action == "remove":
pport_json = ProtocolPortObjects(fmc=self.fmc)
pport_json.get(name=name)
if "id" in pport_json.__dict__:
item = pport_json
else:
item = PortObjectGroups(fmc=self.fmc)
item.get(name=name)
if "id" in item.__dict__:
if "destinationPorts" in self.__dict__:
objects = []
for obj in self.destinationPorts["objects"]:
if obj["name"] != name:
objects.append(obj)
self.destinationPorts["objects"] = objects
logging.info(
f'Removed "{name}" from destinationPorts for this AccessRules.'
)
else:
logging.info(
"destinationPorts doesn't exist for this AccessRules. Nothing to remove."
)
else:
logging.warning(
f'Protocol Port or Protocol Port Group: "{name}", '
f"not found. Cannot add to AccessRules."
)
elif action == "clear":
if "destinationPorts" in self.__dict__:
del self.destinationPorts
logging.info(
"All Destination Ports removed from this AccessRules object."
)
def source_network(self, action, name="", literal=None):
"""
Add/modify name/literal to sourceNetworks field of AccessRules object.
:param action: (str) the action to be done 'add', 'remove', 'clear'
:param name: (str) name of the object in question
:param literal: (dict) the literal in question {value:<>, type:<>}
:return: None
"""
# using dict() as default value is dangerous here, any thoughts/workarounds on this?
logging.debug("In source_network() for AccessRules class.")
if literal and name != "":
raise ValueError(
"Only one of literals or name (object name) should be set while creating a source network"
)
if not hasattr(self, "sourceNetworks"):
self.sourceNetworks = {"objects": [], "literals": {}}
if action == "add":
if literal:
type_ = get_networkaddress_type(literal)
self.sourceNetworks["literals"][literal] = type_
logging.info(
f'Adding literal "{literal}" of type "{type_}" to sourceNetworks for this AccessRules.'
)
else:
ipaddresses_json = NetworkAddresses(fmc=self.fmc).get()
networkgroup_json = NetworkGroups(fmc=self.fmc).get()
fqdns_json = FQDNS(fmc=self.fmc).get()
items = (
ipaddresses_json.get("items", [])
+ networkgroup_json.get("items", [])
+ fqdns_json.get("items", [])
)
new_net = None
for item in items:
if item["name"] == name:
new_net = {
"name": item["name"],
"id": item["id"],
"type": item["type"],
}
break
if new_net is None:
logging.warning(
f'Network "{name}" is not found in FMC. Cannot add to sourceNetworks.'
)
else:
if "sourceNetworks" in self.__dict__:
# thus either some objects are already present in sourceNetworks,
# or only literals are present in sourceNetworks
if "objects" in self.__dict__["sourceNetworks"]:
# some objects are already present
duplicate = False
# see if its a duplicate or not. If not, append to the list of
# existing objects in sourceNetworks
for obj in self.sourceNetworks["objects"]:
if obj["name"] == new_net["name"]:
duplicate = True
break
if not duplicate:
self.sourceNetworks["objects"].append(new_net)
logging.info(
f'Adding "{name}" to sourceNetworks for this AccessRules.'
)
else:
# this means no objects were present in sourceNetworks,
# and sourceNetworks contains literals only
self.sourceNetworks.update({"objects": [new_net]})
# So update the sourceNetworks dict which contained 'literals' key initially
# to have a 'objects' key as well
logging.info(
f'Adding "{name}" to sourceNetworks for this AccessRules.'
)
else:
# None of literals or objects are present in sourceNetworks,
# so initialize it with objects and update the provided object
self.sourceNetworks = {"objects": [new_net]}
logging.info(
f'Adding "{name}" to sourceNetworks for this AccessRules.'
)
elif action == "remove":
if "sourceNetworks" in self.__dict__:
if name != "":
# an object's name has been provided to be removed
objects = []
for obj in self.sourceNetworks["objects"]:
if obj["name"] != name:
objects.append(obj)
if len(objects) == 0:
# it was the last object which was deleted now
del self.sourceNetworks
logging.info(
f'Removed "{name}" from sourceNetworks for this AccessRules'
)
logging.info(
"All Source Networks removed from this AccessRules object."
)
else:
self.sourceNetworks["objects"] = objects
logging.info(
f'Removed "{name}" from sourceNetworks for this AccessRules.'
)
else:
# a literal value has been provided to be removed
type_ = self.sourceNetworks["literals"].get(literal)
if type_:
self.sourceNetworks["literals"].pop(literal)
logging.info(
f'Removed literal "{literal}" of type '
f'"{type_}" from sourceNetworks for this AccessRules.'
)
else:
logging.info(
f'Unable to removed literal "{literal}" from sourceNetworks as it was not found'
)
else:
logging.info(
"sourceNetworks doesn't exist for this AccessRules. Nothing to remove."
)
elif action == "clear":
if "sourceNetworks" in self.__dict__:
del self.sourceNetworks
logging.info(
"All Source Networks removed from this AccessRules object."
)
def destination_network(self, action, name="", literal=None):
"""
Add/modify name/literal to destinationNetworks field of AccessRules object.
:param action: (str) the action to be done 'add', 'remove', 'clear'
:param name: (str) name of the object in question
:param literal: (dict) the literal in question {value:<>, type:<>}
:return: None
"""
# using dict() as default value is dangerous here, any thoughts/workarounds on this?
logging.debug("In destination_network() for ACPRule class.")
if literal and name != "":
raise ValueError(
"Only one of literals or name (object name) should be set while creating a source network"
)
if not hasattr(self, "destinationNetworks"):
self.destinationNetworks = {"objects": [], "literals": {}}
if action == "add":
if literal:
type_ = get_networkaddress_type(literal)
self.destinationNetworks["literals"][literal] = type_
logging.info(
f'Adding literal "{literal}" of type "{type_}" '
f"to destinationNetworks for this AccessRules."
)
else:
ipaddresses_json = NetworkAddresses(fmc=self.fmc).get()
networkgroup_json = NetworkGroups(fmc=self.fmc).get()
if self.fmc.serverVersion >= "6.4":
fqdns_json = FQDNS(fmc=self.fmc).get()
else:
fqdns_json = {"items": []}
items = (
ipaddresses_json.get("items", [])
+ networkgroup_json.get("items", [])
+ fqdns_json.get("items", [])
)
new_net = None
for item in items:
if item["name"] == name:
new_net = {
"name": item["name"],
"id": item["id"],
"type": item["type"],
}
break
if new_net is None:
logging.warning(
f'Network "{name}" is not found in FMC. Cannot add to destinationNetworks.'
)
else:
if "destinationNetworks" in self.__dict__:
# thus either some objects are already present in destinationNetworks,
# or only literals are present in destinationNetworks
if "objects" in self.__dict__["destinationNetworks"]:
# some objects are already present
duplicate = False
for obj in self.destinationNetworks["objects"]:
if obj["name"] == new_net["name"]:
duplicate = True
break
if not duplicate:
self.destinationNetworks["objects"].append(new_net)
logging.info(
f'Adding "{name}" to destinationNetworks for this AccessRules.'
)
else:
# this means no objects were present in destinationNetworks,
# and destinationNetworks contains literals only
self.destinationNetworks.update({"objects": [new_net]})
# So update the destinationNetworks dict which contained 'literals' key initially
# to have a 'objects' key as well
logging.info(
f'Adding "{name}" to destinationNetworks for this AccessRules.'
)
else:
# None of literals or objects are present in destinationNetworks,
# so initialize it with objects and update the provided object
self.destinationNetworks = {"objects": [new_net]}
logging.info(
f'Adding "{name}" to destinationNetworks for this AccessRules.'
)
elif action == "remove":
if "destinationNetworks" in self.__dict__:
if name != "":
# an object's name has been provided to be removed
objects = []
for obj in self.destinationNetworks["objects"]:
if obj["name"] != name:
objects.append(obj)
if len(objects) == 0:
# it was the last object which was deleted now
del self.destinationNetworks
logging.info(
f'Removed "{name}" from destinationNetworks for this AccessRules'
)
logging.info(
"All Destination Networks removed from this AccessRules object."
)
else:
self.destinationNetworks["objects"] = objects
logging.info(
f'Removed "{name}" from destinationNetworks for this AccessRules.'
)
else:
# a literal value has been provided to be removed
type_ = self.destinationNetworks["literals"].get(literal)
if type_:
self.destinationNetworks["literals"].pop(literal)
logging.info(
f'Removed literal "{literal}" of '
f'type "{type_}" from destinationNetworks for this AccessRules.'
)
else:
logging.info(
f'Unable to removed literal "{literal}" '
f"from destinationNetworks as it was not found"
)
else:
logging.info(
"destinationNetworks doesn't exist for this AccessRules. Nothing to remove."
)
elif action == "clear":
if "destinationNetworks" in self.__dict__:
del self.destinationNetworks
logging.info(
"All Destination Networks removed from this AccessRules object."
)
def source_sgt(self, action, name="", literal=None):
"""
Add/modify name/literal to the sourceSecurityGroupTags field of AccessRules object.
:param action: (str) 'add', 'remove', or 'clear'
:param name: (str) Name of SGT in FMC.
:param literal: (dict) {value:<>, type:<>}
:return: None
"""
# using dict() as default value is dangerous here, any thoughts/workarounds on this?
logging.debug("In source_sgt() for ACPRule class.")
if literal and name != "":
raise ValueError(
"Only one of literals or name (object name) should be set while creating a source sgt"
)
if not hasattr(self, "sourceSecurityGroupTags"):
self.sourceSecurityGroupTags = {"objects": [], "literals": {}}
if action == "add":
if literal:
type_ = "ISESecurityGroupTag"
self.sourceSecurityGroupTags["literals"][literal] = type_
logging.info(
f'Adding literal "{literal}" of type "{type_}" '
f"to sourceSecurityGroupTags for this AccessRules."
)
else:
# Query FMC for all SGTs and iterate through them to see if our name matches 1 of them.
sgt = ISESecurityGroupTags(fmc=self.fmc)
sgt.get(name=name)
if "id" in sgt.__dict__:
item = sgt
else:
item = {}
new_sgt = None
if item.name == name:
new_sgt = {"name": item.name, "tag": item.tag, "type": item.type}
if new_sgt is None:
logging.warning(
f'SecurityGroupTag "{name}" is not found in FMC. '
f"Cannot add to sourceSecurityGroupTags."
)
else:
if "sourceSecurityGroupTags" in self.__dict__:
# thus either some objects are already present in sourceSecurityGroupTags,
# or only literals are present in sourceSecurityGroupTags
if "objects" in self.__dict__["sourceSecurityGroupTags"]:
# some objects are already present
duplicate = False
for obj in self.sourceSecurityGroupTags["objects"]:
if obj["name"] == new_sgt["name"]:
duplicate = True
break
if not duplicate:
self.sourceSecurityGroupTags["objects"].append(new_sgt)
logging.info(
f'Adding "{name}" to sourceSecurityGroupTags for this AccessRules.'
)
else:
# this means no objects were present in sourceSecurityGroupTags,
# and sourceSecurityGroupTags contains literals only
self.sourceSecurityGroupTags.update({"objects": [new_sgt]})
# So update the sourceSecurityGroupTags dict which contained 'literals' key initially
# to have a 'objects' key as well
logging.info(
f'Adding "{name}" to sourceSecurityGroupTags for this AccessRules.'
)
else:
# None of literals or objects are present in sourceSecurityGroupTags,
# so initialize it with objects and update the provided object
self.sourceSecurityGroupTags = {"objects": [new_sgt]}
logging.info(
f'Adding "{name}" to sourceSecurityGroupTags for this AccessRules.'
)
elif action == "remove":
if "sourceSecurityGroupTags" in self.__dict__:
if name != "":
# an object's name has been provided to be removed
objects = []
for obj in self.sourceSecurityGroupTags["objects"]:
if obj["name"] != name:
objects.append(obj)
if len(objects) == 0:
# it was the last object which was deleted now
del self.sourceSecurityGroupTags
logging.info(
f'Removed "{name}" from sourceSecurityGroupTags for this AccessRules'
)
logging.info(
"All source security group tags are removed from this AccessRules object."
)
else:
self.sourceSecurityGroupTags["objects"] = objects
logging.info(
f'Removed "{name}" from sourceSecurityGroupTags for this AccessRules.'
)
else:
# a literal value has been provided to be removed
type_ = self.sourceSecurityGroupTags["literals"].get(literal)
if type_:
self.sourceSecurityGroupTags["literals"].pop(literal)
logging.info(
f'Removed literal "{literal}" of '
f'type "{type_}" from sourceSecurityGroupTags for this AccessRules.'
)
else:
logging.info(
f'Unable to removed literal "{literal}" '
f"from sourceSecurityGroupTags as it was not found"
)
else:
logging.info(
"No sourceSecurityGroupTags exist for this AccessRules. Nothing to remove."
)
elif action == "clear":
if "sourceSecurityGroupTags" in self.__dict__:
del self.sourceSecurityGroupTags
logging.info(
"All source security group tags are removed from this AccessRules object."
)
def destination_sgt(self, action, name="", literal=None):
"""
Add/modify name/literal to the destinationSecurityGroupTags field of AccessRules object.
:param action: (str) 'add', 'remove', or 'clear'
:param name: (str) Name of SGT in FMC.
:param literal: (dict) {value:<>, type:<>}
:return: None
"""
pass
class ACPRule(AccessRules):
"""
Dispose of this Class after 20210101.
Use AccessRules() instead.
"""
def __init__(self, fmc, **kwargs):
warnings.resetwarnings()
warnings.warn("Deprecated: ACPRule() should be called via AccessRules().")
super().__init__(fmc, **kwargs)
class Bulk(object):
"""
Send many JSON objects in one API call.
This is specific to the AccessRules() method.
"""
MAX_SIZE_QTY = 1000
MAX_SIZE_IN_BYTES = 2048000
REQUIRED_FOR_POST = []
@property
def URL_SUFFIX(self):
"""
Add the URL suffixes for section, categories, insertBefore and insertAfter.
:return (str): url
"""
url = "?"
if "category" in self.__dict__:
url = f"{url}category={self.category}&"
if "insertBefore" in self.__dict__:
url = f"{url}insertBefore={self.insertBefore}&"
if "insertAfter" in self.__dict__:
url = f"{url}insertAfter={self.insertAfter}&"
if "insertBefore" in self.__dict__ and "insertAfter" in self.__dict__:
logging.warning("ACP rule has both insertBefore and insertAfter params")
if "section" in self.__dict__:
url = f"{url}section={self.section}&"
return url[:-1]
def __init__(self, fmc, url="", **kwargs):
"""
Initialize Bulk object.
:param fmc (object): FMC object
:param url (str): Base URL used for API action.
:param **kwargs: Pass any/all variables for self.
:return: None
"""
logging.debug("In __init__() for Bulk class.")
self.fmc = fmc
self.items = []
self.URL = url
self.parse_kwargs(**kwargs)
def parse_kwargs(self, **kwargs):
"""
Add/modify variables in self.
:return: None
"""
logging.debug("In parse_kwargs() for Bulk class.")
if "category" in kwargs:
self.category = kwargs["category"]
if "insertBefore" in kwargs:
self.insertBefore = kwargs["insertBefore"]
if "insertAfter" in kwargs:
self.insertAfter = kwargs["insertAfter"]
if "section" in kwargs:
self.section = kwargs["section"]
def add(self, item):
"""
:param item: (str) Add JSON string to list of items to send to FMC.
:return: None
"""
self.items.append(item)
logging.info(f"Adding {item} to bulk items list.")
def clear(self):
"""
Clear self.items -- Empty out list of JSON strings to send to FMC.
:return: None
"""
logging.info(f"Clearing bulk items list.")
self.items = []
def post(self):
"""
Send list of self.items to FMC as a bulk import.
:return: (str) requests response from FMC
"""
# Build URL
self.URL = f"{self.URL}{self.URL_SUFFIX}&bulk=true"
# Break up the items into MAX_BULK_POST_SIZE chunks.
chunks = [
self.items[i * self.MAX_SIZE_QTY : (i + 1) * self.MAX_SIZE_QTY]
for i in range(
(len(self.items) + self.MAX_SIZE_QTY - 1) // self.MAX_SIZE_QTY
)
]
# Post the chunks
for item in chunks:
# I'm not sure what to do about the max bytes right now so I'll just throw a warning message.
if sys.getsizeof(item, 0) > self.MAX_SIZE_IN_BYTES:
logging.warning(
f"This chunk of the post is too large. Please submit less items to be bulk posted."
)
response = self.fmc.send_to_api(method="post", url=self.URL, json_data=item)
logging.info(f"Posting to bulk items.")
return response
|
#!/usr/bin/python
# coding: utf-8
import sys
import Image
import random
import os
import ImageDraw
import ImageFont
import ImageFilter
from time import gmtime, strftime
import time
import ImageEnhance
import pickle
allongement = 4
im1 = Image.open(str(sys.argv[1]))
im2 = Image.new("RGBA",(im1.size[0], im1.size[1]))
im3 = Image.new("RGBA",(im1.size[0], im1.size[1]))
im4 = Image.new("RGBA",(im1.size[0], im1.size[1]))
im5 = Image.new("RGBA",(im1.size[0], im1.size[1]))
im6 = Image.new("RGBA",(im1.size[0]*allongement, im1.size[1]))
Larg = im1.size[0]
Haut = im1.size[1]
loadfile = False
def randHaut():
return random.randint(0, im1.size[1]/8)*8
randomCoupeHauteur = [0, \
randHaut(),randHaut(),randHaut(),randHaut(), \
randHaut(),randHaut(),randHaut(),randHaut(), \
randHaut(),randHaut(),randHaut(),randHaut(), \
randHaut(),randHaut(),randHaut(),randHaut(), \
randHaut(),randHaut(),randHaut(),randHaut(), \
randHaut(),randHaut(),randHaut(),randHaut(), \
randHaut(),randHaut(),randHaut(),randHaut(), \
randHaut(),randHaut(),randHaut(),randHaut(), \
randHaut(),randHaut(),randHaut(),randHaut(), \
randHaut(),randHaut(),randHaut(),randHaut(), \
im1.size[1]]
randomCoupeHauteur.sort()
def Hacheur(haut, bas) :
n = 0
i = 0
while n<im6.size[0] :
i+=1
loop = 0
proportions = [\
(2,2),(2,4),(2,5),(2,8),(2,16),(2,32),\
(4,4),(4,5),(4,8),(4,16),(4,32),(4,64),\
(8,3),(8,5),(8,8),(8,16),(8,32),\
(16,2),(16,3),(16,4),(16,5),(16,8),(16,16),(16,32),\
(32,3),(32,5),(32,8),(32,16),(32,32),\
(64,3),(64,4),(64,8),(64,16),\
(128,1),(128,2),(128,4),(128,8),\
(256,1),(256,2),(256,4),\
(512,1),(512,2),\
(768,1),(768,2),\
(1024,1),(1024,2),\
(2048,1),\
(3072,1)]
choix_rnd = random.randint(0, len(proportions)-1)
largeur = proportions[choix_rnd][0]
randomCopyPosi = random.randint(0, (im1.size[0]-largeur))
largeur = proportions[choix_rnd][0]
repeat = proportions[choix_rnd][1]
pixelSizeList = [1,1,1,1,1,1,1,1,1,16,32,64]
#pixelSizeList = [1,5,25,125]
pixelSizeIndex = random.randint(0,len(pixelSizeList)-1)
pixelSize = pixelSizeList[pixelSizeIndex]
hauteur = bas-haut
cropfinal = [largeur,hauteur]
if largeur % pixelSize != 0:
croop = int(largeur / pixelSize)
largeur = (croop + 1 ) * pixelSize
if hauteur % pixelSize != 0:
croop2 = int(hauteur / pixelSize)
hauteur = (croop2 + 1 ) * pixelSize
im2 = im1.crop((randomCopyPosi,haut,randomCopyPosi+largeur,haut+hauteur))
im3 = im2.resize((im2.size[0]/pixelSize, im2.size[1]/pixelSize), Image.NEAREST)
im4 = im3.resize((im3.size[0]*pixelSize, im3.size[1]*pixelSize), Image.NEAREST)
im5 = im4.crop((0,0,cropfinal[0],cropfinal[1]))
while loop<repeat:
px1 = n
px2 = n + cropfinal[0]
'''
draw = ImageDraw.Draw(im5)
draw.line((0, 0, 4, 0), fill="rgb(255,255,255)")
draw.line((0, 0, 0, 4), fill="rgb(255,255,255)")
'''
im6.paste(im5, (px1, haut, px2, bas))
n = n + cropfinal[0]
loop = loop + 1
for j in range(len(randomCoupeHauteur)-1):
Hacheur(randomCoupeHauteur[j], randomCoupeHauteur[j+1])
scriptpy = str(sys.argv[1])
script = scriptpy[:-3]
n = "1.1"
#TODO : inclure version du script + taille finale image
im6.save(script+"."+n+"_"+strftime("%Y%m%d-%Hh%Mm%Ss", gmtime())+".png",'PNG', quality=100)
|
import tensorflow as tf
import numpy as np
from tf_util.stat_util import approx_equal
from dsn.util.dsn_util import check_convergence
DTYPE = tf.float64
EPS = 1e-16
def test_check_convergence():
np.random.seed(0)
array_len = 1000
converge_ind = 500
num_params = 10
cost_grads = np.zeros((array_len, num_params))
cost_grads[:converge_ind, :] = np.random.normal(
2.0, 1.0, (converge_ind, num_params)
)
cost_grads[converge_ind:, :] = np.random.normal(
0.0, 1.0, (converge_ind, num_params)
)
lag = 100
alpha = 0.05
fail_cur_inds = range(100, 501, 100)
pass_cur_inds = range(600, 1001, 100)
for cur_ind in fail_cur_inds:
assert not check_convergence(cost_grads, cur_ind, lag, alpha)
for cur_ind in pass_cur_inds:
assert check_convergence(cost_grads, cur_ind, lag, alpha)
# All nonzero but one
cost_grads = np.random.normal(-2.0, 1.0, (array_len, num_params))
cost_grads[:, 2] = np.random.normal(0.0, 1.0, (array_len,))
fail_cur_inds = range(100, 1001, 100)
for cur_ind in fail_cur_inds:
assert not check_convergence(cost_grads, cur_ind, lag, alpha)
# All zero but one
cost_grads = np.random.normal(0.0, 1.0, (array_len, num_params))
cost_grads[:, 2] = np.random.normal(-2.0, 1.0, (array_len,))
fail_cur_inds = range(100, 1001, 100)
for cur_ind in fail_cur_inds:
assert not check_convergence(cost_grads, cur_ind, lag, alpha)
if __name__ == "__main__":
test_check_convergence()
|
import unittest
from katas.beta.the_skeptical_kid_generator import alan_annoying_kid
class AlanAnnoyingKidTestCase(unittest.TestCase):
def test_equal_1(self):
self.assertEqual(alan_annoying_kid("Today I played football."),
"I don't think you played football today, I think y"
"ou didn't play at all!")
def test_equal_2(self):
self.assertEqual(alan_annoying_kid("Today I didn't play football."),
"I don't think you didn't play football today, I th"
"ink you did play it!")
def test_equal_3(self):
self.assertEqual(alan_annoying_kid(
"Today I didn't attempt to hardcode this Kata."
), "I don't think you didn't attempt to hardcode this Kata today, I"
" think you did attempt it!")
def test_equal_4(self):
self.assertEqual(alan_annoying_kid("Today I cleaned the kitchen."),
"I don't think you cleaned the kitchen today, I thi"
"nk you didn't clean at all!")
def test_equal_5(self):
self.assertEqual(alan_annoying_kid(
"Today I learned to code like a pro."
), "I don't think you learned to code like a pro today, I think you "
"didn't learn at all!")
|
def LS_X(string_input, X):
"""
Summary and Description of Function:
This function shifts all of the characters of a string by "X" places to the left.
The leftmost characters are deleted in replacement of "X" hashtags ("#") to the right.
Parameters:
string_input (string): The string inserted by the user.
X (int): The number of characters shifted in the string.
Returns:
string_LS (string): The manipulated string done by the LS_X function.
Example:
LS-4, ELECTRONICS (X = 4, string_input = ELECTRONICS): TRONICS####
"""
def RS_X(string_input, X):
"""
Summary and Description of Function:
This function shifts all of the characters of a string by "X" places to the right.
The rightmost characters are deleted in replacement of "X" hashtags ("#") to the left.
Parameters:
string_input (string): The string inserted by the user.
X (int): The number of characters shifted in the string.
Returns:
string_RS (string): The manipulated string done by the RS_X function.
Example:
RS-3, CHAIRS (X = 3, string_input = CHAIRS): ###CHA
"""
def LC_X(string_input, X):
"""
Summary and Description of Function:
This function circulates the leftmost characters to the right-hand side of the string by X characters.
Parameters:
string_input (string): The string inserted by the user.
X (int): The number of characters circulated in the string.
Returns:
string_LC (string): The manipulated string done by the LC_X function.
Example:
LC-2, NOTEBOOK (X = 2, string_input = NOTEBOOK): TEBOOKNO
"""
string_LC = ""
LC_one = string_input[:X] # take first X letters of string_input
LC_two = string_input[X:] # take everything but first X letters of string_input
string_LC = LC_two + LC_one # move LC_two in front of LC_one (simulating circulation to the left)
return string_LC
def RC_X(string_input, X):
"""
Summary and Description of Function:
This function circulates the rightmost characters to the left-hand side of the string by X characters.
Parameters:
string_input (string): The string inserted by the user.
X (int): The number of characters circulated in the string.
Returns:
string_RC (string): The manipulated string done by the RC_X function.
Example:
RC-5, WHITEBOARD (X = 5, string_input = BLACKBOARD): BOARDBLACK
"""
def RC_X(string_input, X):
string_RC = ""
RC_one = string_input[-X:] # take the last X letters of string_input
RC_two = string_input[:-X] # take everything but the last X letters of string_input
string_RC = RC_one + RC_two # move RC_one (the last X letters) in front of RC_two (simulating circulation to the right)
return string_RC
def MC_SLXD(string_input, starting_position, length, X, direction):
"""
Summary and Description of Function:
This function circulates X characters from starting position S with a length of L, in the direction D.
Parameters:
string_input (string): The string inserted by the user.
X (int): The number of characters circulated in the string.
length (int): Length of the string being circulated from S.
starting_position (int): Character starting position.
direction (string): Direction, right or left (R, L)
Returns:
string_MC (string): The manipulated string done by the MC_SLXD function.
Example:
MC_332R, BOXCUTTER (S = 3, string_input = BOXCUTTER, L = 3, X = 3, D = R): "NOT SURE HOW THIS ONE WORKS"
"""
def REV_SL(string_input, starting_position, length):
"""
Summary and Description of Function:
This function reverses the order of the characters starting at position S with a length L.
Parameters:
string_input (string): The string inserted by the user.
starting_position (int): Character starting position.
length (int): The number of characters in the string to be reversed.
Returns:
string_REV (string): The manipulated string done by the REV_SL function.
Example:
REV_24, PEOPLE (S = 2, L = 4, string_input = PEOPLE): PLPOEE
"""
def main(): # Define the main (the main menu)
if __name__ == '__main__':
main()
|
import random
import math
import numpy as np
import torch
import torch.nn.functional as F
import torch.optim as optim
import torch.nn as nn
#import os, json
#import matplotlib.pyplot as plt
from agent_dir.agent import Agent
from environment import Environment
from collections import namedtuple
use_cuda = torch.cuda.is_available()
class DQN(nn.Module):
'''
This architecture is the one from OpenAI Baseline, with small modification.
'''
def __init__(self, channels, num_actions):
super(DQN, self).__init__()
self.conv1 = nn.Conv2d(channels, 32, kernel_size=8, stride=4)
self.conv2 = nn.Conv2d(32, 64, kernel_size=4, stride=2)
self.conv3 = nn.Conv2d(64, 64, kernel_size=3, stride=1)
self.fc = nn.Linear(3136, 512)
self.head = nn.Linear(512, num_actions)
self.relu = nn.ReLU()
self.lrelu = nn.LeakyReLU(0.01)
def forward(self, x):
x = self.relu(self.conv1(x))
x = self.relu(self.conv2(x))
x = self.relu(self.conv3(x))
x = self.lrelu(self.fc(x.view(x.size(0), -1)))
q = self.head(x)
return q
class ReplayMemory(object):
def __init__(self, capacity):
self.capacity = capacity
self.memory = []
self.position = 0
self.Transition = namedtuple('Transition',
('state', 'action', 'next_state', 'reward'))
def push(self, *args):
if len(self.memory) < self.capacity:
self.memory.append(None)
self.memory[self.position] = self.Transition(*args)
self.position = (self.position + 1) % self.capacity
def sample(self, batch_size):
return random.sample(self.memory, batch_size)
def __len__(self):
return len(self.memory)
class AgentDQN(Agent):
def __init__(self, env, args):
self.env = env
self.input_channels = 4
self.num_actions = self.env.action_space.n
# TODO:
# Initialize your replay buffer
self.device = 'cuda:0'
# build target, online network
self.target_net = DQN(self.input_channels, self.num_actions)
self.target_net = self.target_net.to(self.device)
# self.target_net = self.target_net.cuda() if use_cuda else self.target_net
self.online_net = DQN(self.input_channels, self.num_actions)
self.online_net = self.online_net.to(self.device)
# self.online_net = self.online_net.cuda() if use_cuda else self.online_net
if args.test_dqn:
self.load('dqn')
# discounted reward
self.GAMMA = 0.99
# training hyperparameters
self.train_freq = 4 # frequency to train the online network
self.learning_start = 10000 # before we start to update our network, we wait a few steps first to fill the replay.
self.batch_size = 64
self.num_timesteps = 3000000 # total training steps
self.display_freq = 10 # frequency to display training progress
self.draw_freq = 50 # frequency to add point to plot the line
self.save_freq = 200000 # frequency to save the model
self.target_update_freq = 1000 # frequency to update target network
# optimizer
self.optimizer = optim.RMSprop(self.online_net.parameters(), lr=1e-4)
self.steps = 0 # num. of passed steps. this may be useful in controlling exploration
self.Transition = namedtuple('Transition',
('state', 'action', 'next_state', 'reward'))
self.memory = ReplayMemory(self.learning_start)
def save(self, save_path):
print('save model to', save_path)
torch.save(self.online_net.state_dict(), save_path + '_online.cpt')
torch.save(self.target_net.state_dict(), save_path + '_target.cpt')
def load(self, load_path):
print('load model from', load_path)
if use_cuda:
self.online_net.load_state_dict(torch.load(load_path + '_online.cpt'))
# self.target_net.load_state_dict(torch.load(load_path + '_target.cpt'))
else:
self.online_net.load_state_dict(torch.load(load_path + '_online.cpt', map_location=lambda storage, loc: storage))
# self.target_net.load_state_dict(torch.load(load_path + '_target.cpt', map_location=lambda storage, loc: storage))
def init_game_setting(self):
# we don't need init_game_setting in DQN
pass
def make_action(self, state, test=False):
# TODO:
# At first, you decide whether you want to explore the environemnt
# TODO:
# if explore, you randomly samples one action
# else, use your model to predict action
if test:
state = torch.from_numpy(state).permute(2,0,1).unsqueeze(0)
state = state.to(self.device)
with torch.no_grad():
action = self.online_net(state).max(1)[1]
return action.item()
else:
sample = random.random()
EPS_END = 0.05
EPS_START = 0.9
EPS_DECAY = 200
eps_threshold = EPS_END + (EPS_START - EPS_END) * math.exp(-1. * self.steps / EPS_DECAY)
if sample > eps_threshold:
with torch.no_grad():
action = self.online_net(state).max(1)[1].view(1,1)
else:
action = torch.tensor([[random.randrange(self.num_actions)]],
dtype=torch.long).to(self.device)
# action = action.cuda() if use_cuda else action
return action
def update(self):
# TODO:
# To update model, we sample some stored experiences as training examples.
if len(self.memory) < self.batch_size:
return
transitions = self.memory.sample(self.batch_size)
batch = self.Transition(*zip(*transitions))
''' EX: a = [1,2,3,4,None] -> (t,t,t,t,f) '''
non_final_mask = torch.tensor(tuple(map(lambda x : x is not None, batch.next_state)),
device=self.device, dtype=torch.uint8)
non_final_next_states = torch.cat([s for s in batch.next_state if s is not None])
state_batch = torch.cat(batch.state)
action_batch = torch.cat(batch.action)
reward_batch = torch.cat(batch.reward)
# TODO:
# Compute Q(s_t, a) with your model.
state_action_values = self.online_net(state_batch).gather(1 ,action_batch)
with torch.no_grad():
# TODO:
# Compute Q(s_{t+1}, a) for all next states.
# Since we do not want to backprop through the expected action values,
# use torch.no_grad() to stop the gradient from Q(s_{t+1}, a)
# next_state_values = torch.zeros(self.batch_size, device=self.device)
# next_state_values[non_final_mask] = \
# self.target_net(non_final_next_states).max(1)[0]
next_state_actions = self.online_net(non_final_next_states).max(1)[1].unsqueeze(1)
next_state_values = torch.zeros(self.batch_size, device=self.device)
next_state_values[non_final_mask] = \
self.target_net(non_final_next_states).gather(1, next_state_actions).squeeze(1)
# TODO:
# Compute the expected Q values: rewards + gamma * max(Q(s_{t+1}, a))
# You should carefully deal with gamma * max(Q(s_{t+1}, a)) when it is the terminal state.
expected_state_action_values = (next_state_values * self.GAMMA) + reward_batch
# TODO:
# Compute temporal difference loss
loss = F.smooth_l1_loss(state_action_values, expected_state_action_values.unsqueeze(1))
self.optimizer.zero_grad()
loss.backward()
for param in self.online_net.parameters():
param.grad.data.clamp_(-1, 1)
self.optimizer.step()
return loss.item()
# def save_curve(self, x_values, y_values, title):
#
# tmp = {title:
# {
# 'x': x_values,
# 'y': y_values
# }
# }
#
# if os.path.isfile('./curve_param.json'):
# with open('curve_param.json', 'r') as f:
# file = json.load(f)
# file.update(tmp)
# with open('curve_param.json', 'w') as f:
# json.dump(file, f)
# else:
# with open('curve_param.json', 'w') as f:
# json.dump(tmp, f)
def train(self):
episodes_done_num = 0 # passed episodes
total_reward = 0 # compute average reward
loss = 0
x_values = []
y_values = []
while(True):
state = self.env.reset()
# State: (80,80,4) --> (1,4,80,80)
state = torch.from_numpy(state).permute(2,0,1).unsqueeze(0)
state = state.to(self.device)
# state = state.cuda() if use_cuda else state
done = False
while(not done):
# select and perform action
action = self.make_action(state)
next_state, reward, done, _ = self.env.step(action[0, 0].data.item())
total_reward += reward
# process new state
next_state = torch.from_numpy(next_state).permute(2,0,1).unsqueeze(0).to(self.device)
# next_state = next_state.cuda() if use_cuda else next_state
if done:
next_state = None
# TODO:
# store the transition in memory
reward = torch.tensor([reward]).to(self.device)
# reward = reward.cuda() if use_cuda else reward
self.memory.push(state, action, next_state, reward)
# move to the next state
state = next_state
# Perform one step of the optimization
if self.steps > self.learning_start and self.steps % self.train_freq == 0:
loss = self.update()
# update target network
if self.steps > self.learning_start and self.steps % self.target_update_freq == 0:
self.target_net.load_state_dict(self.online_net.state_dict())
# save the model
if self.steps % self.save_freq == 0:
self.save('dqn')
self.steps += 1
if episodes_done_num % self.draw_freq == 0:
x_values.append(self.steps)
y_values.append(total_reward/self.draw_freq)
if episodes_done_num % self.draw_freq == 0:
print('Episode: %d | Steps: %d/%d | Avg reward: %f | loss: %f '%
(episodes_done_num, self.steps, self.num_timesteps, total_reward / self.draw_freq, loss))
total_reward = 0
episodes_done_num += 1
if self.steps > self.num_timesteps:
break
self.save('dqn')
# self.save_curve(x_values, y_values, 'ddqn')
|
import pandas as pn
import math
from sklearn.metrics import roc_auc_score
data = pn.read_csv('./DATA/W03_03.csv', header=None)
X = data.loc[:, 1:]
y = data.loc[:, 0]
S1, S2, w1, w2, w1_past, w2_past = 0,0,0,0,0,0
j = 0
#un-regularized
# while j <= 10000:
# w1_grad, w1_past = w1, w1
# w2_grad, w2_past = w2, w2
# S1 = 0
# S2 = 0
# for i in range(0, len(y)):
# S1 += y[i] * X[1][i] * (1-1/(1+math.exp(-y[i] * (w1_grad * X[1][i] + w2_grad * X[2][i]))))
#
# for i in range(0, len(y)):
# S2 += y[i] * X[2][i] * (1-1/(1+math.exp(-y[i] * (w1_grad * X[1][i] + w2_grad * X[2][i]))))
#
# w1 = w1 + (0.1 * 1/len(y) * S1)
# w2 = w2 + (0.1 * 1/len(y) * S2)
#
# if math.sqrt((w1_past - w1) ** 2 + (w2_past - w2) ** 2) <= 0.00001:
# break
# j += 1
#regularized
while j <= 10000:
w1_grad, w1_past = w1, w1
w2_grad, w2_past = w2, w2
S1 = 0
S2 = 0
for i in range(0, len(y)):
S1 += y[i] * X[1][i] * (1-1/(1+math.exp(-y[i] * (w1_grad * X[1][i] + w2_grad * X[2][i]))))
for i in range(0, len(y)):
S2 += y[i] * X[2][i] * (1-1/(1+math.exp(-y[i] * (w1_grad * X[1][i] + w2_grad * X[2][i]))))
w1 = w1 + (0.1 * 1/len(y) * S1) - 10 * 10 * w1_grad
w2 = w2 + (0.1 * 1/len(y) * S2) - 10 * 10 * w2_grad
if math.sqrt((w1_past - w1) ** 2 + (w2_past - w2) ** 2) <= 0.00001:
break
j += 1
ax = pn.Series()
ax =[1 / (1 + math.exp(-w1*X[1][i] - w2*X[2][i])) for i in range(0,len(y))]
print(roc_auc_score(y, ax))
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.post_list, name='post_list'),
path('<int:key>/detail/', views.post_detail, name='post_detail'),
path('<int:key>/update/', views.post_update, name='post_update'),
path('create/', views.post_create, name='post_create'),
path('<int:key>/delete', views.post_delete, name='post_delete')
]
|
from django.conf import settings
from confapp import conf
from pyforms.controls import ControlCheckBox
from pyforms_web.web.middleware import PyFormsMiddleware
from pyforms_web.widgets.django import ModelAdminWidget
from finance.models import Project
from .financeproject_form import FinanceProjectFormApp
class FinanceProjectListApp(ModelAdminWidget):
TITLE = 'Finance projects'
MODEL = Project
AUTHORIZED_GROUPS = ['superuser', settings.PROFILE_LAB_ADMIN]
LIST_DISPLAY = [
'code',
'name',
'start_date',
'end_date',
]
ADDFORM_CLASS = FinanceProjectFormApp
EDITFORM_CLASS = FinanceProjectFormApp
USE_DETAILS_TO_EDIT = False
# ORQUESTRA CONFIGURATION
# =========================================================================
LAYOUT_POSITION = conf.ORQUESTRA_HOME
# =========================================================================
def __init__(self, *args, **kwargs):
self._active = ControlCheckBox(
'Active',
default=True,
label_visible=False,
changed_event=self.populate_list,
field_style='text-align:right;',
)
super().__init__(*args, **kwargs)
def get_toolbar_buttons(self, has_add_permission=False):
return tuple(
(['_add_btn'] if has_add_permission else []) + [
'_active',
]
)
def get_queryset(self, request, qs):
if self._active.value:
qs = qs.active()
return qs
def has_update_permissions(self, obj):
if obj and obj.code == 'NO TRACK':
return False
else:
return True
def has_remove_permissions(self, obj):
"""Only superusers may delete these objects."""
user = PyFormsMiddleware.user()
return user.is_superuser
|
# To add a new cell, type '# %%'
# To add a new markdown cell, type '# %% [markdown]'
# %% [markdown]
# # Gathered Notebook
#
# This notebook was generated by an experimental feature called "Gather". The intent is that it contains only the code and cells required to produce the same results as the cell originally selected for gathering. Please note that the Python analysis is quite conservative, so if it is unsure whether a line of code is necessary for execution, it will err on the side of including it.
#
# **Please let us know if you are satisfied with what was gathered by [taking this survey](https://aka.ms/gathersurvey).**
# %%
import pandas as pd
import pandas_profiling
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from sklearn.naive_bayes import GaussianNB
from sklearn.metrics import accuracy_score, confusion_matrix
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
# %%
titanic_df = pd.read_csv("titanic_data.csv")
# %%
pandas_profiling.ProfileReport(titanic_df)
# %%
age_median = titanic_df['Age'].median(skipna=True)
titanic_df['Age'].fillna(age_median, inplace=True)
# %%
titanic_df['Embarked'].fillna("S", inplace=True)
# %%
fare_median = titanic_df['Fare'].median(skipna=True)
titanic_df['Fare'].fillna(fare_median,inplace=True)
# %%
titanic_df['TravelGroup']=titanic_df["SibSp"]+titanic_df["Parch"]
titanic_df['TravelAlone']=np.where(titanic_df['TravelGroup']>0, 0, 1)
# %%
titanic_df['TravelTotal'] = titanic_df['TravelGroup'] + 1
# %%
le = LabelEncoder()
pclass_cat = le.fit_transform(titanic_df.Pclass)
sex_cat = le.fit_transform(titanic_df.Sex)
embarked_cat = le.fit_transform(titanic_df.Embarked)
titanic_df['pclass_cat'] = pclass_cat
titanic_df['sex_cat'] = sex_cat
titanic_df['embarked_cat'] = embarked_cat
features = ['pclass_cat', 'sex_cat', 'Age', 'Fare', 'embarked_cat', 'TravelAlone', 'TravelTotal']
data = titanic_df.reindex(features+['Survived'],axis=1)
# %%
X = data.iloc[:, 0:7]
Y = data.iloc[:, 7]
# %%
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.2)
# %%
gnb = GaussianNB().fit(X_train, y_train)
y_pred = gnb.predict(X_test)
NB_all_accuracy = accuracy_score(y_test,y_pred)
# %%
tree = DecisionTreeClassifier(criterion = 'entropy', min_samples_split = 2, random_state=5).fit(X_train, y_train)
y_pred = tree.predict(X_test)
tree_all_accuracy = accuracy_score(y_test, y_pred)
# %%
high_importance = list(zip(features, tree.feature_importances_))
high_importance = sorted(high_importance, key=lambda x: x[1], reverse=True)
high_importance = [tup[0] for tup in high_importance][:3]
X_train2 = X_train[high_importance]
X_test2 = X_test[high_importance]
# %%
tree2 = DecisionTreeClassifier(criterion = 'entropy', min_samples_split = 2, random_state=5).fit(X_train2, y_train)
y_pred2 = tree2.predict(X_test2)
tree_imp_accuracy = accuracy_score(y_test, y_pred2)
# %%
clf = RandomForestClassifier(n_jobs=2, random_state=0).fit(X_train, y_train)
y_pred = clf.predict(X_test)
RF_all_accuracy = accuracy_score(y_test,y_pred)
# %%
high_importance = list(zip(features, tree.feature_importances_))
high_importance = sorted(high_importance, key=lambda x: x[1], reverse=True)
high_importance = [tup[0] for tup in high_importance][:3]
X_train2 = X_train[high_importance]
X_test2 = X_test[high_importance]
# %%
clf2 = RandomForestClassifier(n_jobs=2, random_state=0).fit(X_train2, y_train)
y_pred2 = clf2.predict(X_test2)
RF_imp_accuracy = accuracy_score(y_test,y_pred2)
# %%
print('NB accuracy: {0:.2f}'.format(NB_all_accuracy))
print("Decision Tree:")
print('All attributes: {0:.2f}'.format(tree_all_accuracy))
print('High importance attributes: {0:.2f}'.format(tree_imp_accuracy))
print("Random Forsest:")
print('All attributes: {0:.2f}'.format(RF_all_accuracy))
print('High importance attributes: {0:.2f}'.format(RF_imp_accuracy))
|
import math
A=float(input("A= "))
B=float(input("B= "))
C=float(input("C= "))
D=(pow(B,2))-4*A*C
smaller_root=(-B-math.sqrt(D))/(2*A)
larger_root=(-B+math.sqrt(D))/(2*A)
print("x1= ",smaller_root)
print("x2= ",larger_root)
|
# Copyright (c) 2021 kamyu. All rights reserved.
#
# Google Code Jam 2021 Qualification Round - Problem C. Reversort Engineering
# https://codingcompetitions.withgoogle.com/codejam/round/000000000043580a/00000000006d12d7
#
# Time: O(N)
# Space: O(1)
#
def reverse(L, i, j):
while i < j:
L[i], L[j] = L[j], L[i]
i += 1
j -= 1
def reversort_engineering():
N, C = map(int, raw_input().strip().split())
if not (N-1 <= C <= (N+2)*(N-1)//2):
return "IMPOSSIBLE"
result = [0]*N
for i in xrange(N):
l = min(C-(N-1-i)+1, N-i) # greedy
C -= l
if l != N-i:
break
if i%2 == 0:
result[N-1-i//2] = i+1
else:
result[i//2] = i+1
if i%2 == 0:
k = i+1
for j in xrange((N-1-i//2+1)-(N-i), N-1-i//2+1):
result[j] = k
k += 1
reverse(result, (N-1-i//2+1)-(N-i), ((N-1-i//2+1)-(N-i))+l-1) # Space: O(1)
else:
k = i+1
for j in reversed(xrange(i//2, i//2+(N-i))):
result[j] = k
k += 1
reverse(result, (i//2+(N-i)-1)-l+1, i//2+(N-i)-1) # Space: O(1)
return " ".join(map(str, result))
for case in xrange(input()):
print 'Case #%d: %s' % (case+1, reversort_engineering())
|
class RegionCodeIsAbsentError(Exception):
pass
class WorksheetAbsentError(Exception):
pass
|
# -*- coding:utf-8 -*-
# author: will
import datetime
import time
from flask import request, jsonify, g
from app import db
from app.models import Banner, Article, UserBTN, UpdateTime
from utils.user_service.login import login_required, admin_required
from . import api_banner
# @api_banner.route('/uploadimage',methods=['POST'])
# def upload_image():
# res = json.loads(request.data)
# try:
# if not all([res['data'], res['type']]):
# return jsonify(errno="-2", errmsg='参数错误')
# except Exception as e:
# return jsonify(errno="-2", errmsg='参数错误')
# res = storage_by_bs64(res['data'],res['type'])
# if res:
# data = {
# 'errno': '0',
# 'url': res
# }
# else:
# data = {
# 'errno': '-2',
# 'msg': 'Fail'
# }
#
# return jsonify(data)
#
# @api_banner.route('/getbanneritem',methods=['POST'])
# def get_item():
# res = request.get_json()
# pid = res.get('pid',0)
# result = Banner_name.query.filter_by(pid=pid).all()
# if result:
# try:
# arr = []
# for item in result.items:
# data = {
# 'id':item.id,
# 'name': item.image,
# 'link': item.target_id
# }
# arr.append(data)
# except Exception as e:
# print(e)
# return jsonify(errno="-2", errmsg='网络错误')
# data = {
# 'errno': '0',
# 'list': arr,
# }
# else:
# data = {
# 'errno': '0',
# 'list': {},
# }
# return jsonify(data)
# 新增banner
@api_banner.route('/createbanner', methods=['POST'])
@login_required
@admin_required
def create_banner():
try:
res = request.get_json()
status = res.get('status')
sort = res.get('sort')
image = res.get('image')
link = res.get('link')
article_id = res.get('article_id')
group_id = res.get('group_id')
admin_id = g.user_id
if not all([status, sort, image]):
return jsonify(errno="-2", errmsg='参数不完整')
if link and article_id:
return jsonify(errno="-2", errmsg='不能同时设置文章和外链')
try:
status = int(status)
sort = int(sort)
article_id = int(article_id)
except Exception as e:
print(e)
return jsonify(errno="-2", errmsg='参数类型错误')
if status not in [0, 1]:
return jsonify(errno="-2", errmsg='参数status错误')
if sort <= 0:
return jsonify(errno="-2", errmsg='请输入大于0的序号')
article = Article.query.get(article_id)
if not article:
return jsonify(errno="-2", errmsg='文章不存在')
# group = Group.query.get('group_id')
# if not group:
# return jsonify(errno="-2", errmsg='文章分类不存在')
results = Banner.query.all()
count = len(results)
if sort > count:
return jsonify(errno="-2", errmsg='输入的序号超出范围')
for item in results:
if item.sort >= sort:
item.sort += 1
data = Banner(
image=image,
link=link,
sort=sort,
status=status,
article_id=article_id,
group_id=group_id,
admin_id=admin_id
)
sort_obj = Banner.query.order_by(Banner.sort.asc()).first()
first_sort = sort_obj.sort
print(first_sort)
if sort <= first_sort and status == 1:
# 记录小程序banner第一张图有更新,发送通知
records = UserBTN.query.filter(UserBTN.btn_num == 4).all()
for record in records:
record.is_new = 1
record.is_send = 0
db.session.add(record)
# 记录更新的时间
today = datetime.datetime.today().date()
up_obj = UpdateTime.query.filter(UpdateTime.type == 5).filter(
UpdateTime.create_time.like(str(today) + "%")).first()
if not up_obj:
time_obj = UpdateTime()
time_obj.type = 5
db.session.add(time_obj)
try:
db.session.add(data)
db.session.commit()
data = {'errno': '0', 'msg': 'success'}
except Exception as e:
db.session.rollback()
print(e)
data = {'errno': '-2', 'msg': 'Fail'}
except Exception as e:
print(e)
data = {'errno': '-2', 'msg': '网络异常'}
return jsonify(data)
# 修改banner
@api_banner.route('/changebanner', methods=['POST'])
@login_required
@admin_required
def change_banner():
res = request.get_json()
admin_id = g.user_id
print("请求参数:", request.data)
try:
if not res['id']:
return jsonify(errno="-2", errmsg='参数错误')
except Exception as e:
return jsonify(errno="-2", errmsg='参数错误')
link = res.get('link')
article_id = res.get('article_id')
if link and article_id:
return jsonify(errno="-2", errmsg='不能同时设置文章和外链')
sort = res.get('sort', 1)
resultsort = Banner.query.filter_by(sort=int(sort)).first()
results = Banner.query.filter_by(id=int(res['id'])).first()
if results:
if resultsort:
if int(results.sort) != int(sort):
resultsdata = Banner.query.all()
for item in resultsdata:
print((item.sort))
if item.sort >= int(res['sort']):
print((item.sort))
item.sort += 1
results.status = res.get('status')
results.image = res.get('image')
results.sort = res.get('sort', 1)
results.link = res.get('link')
results.article_id = res.get('article_id')
results.group_id = res.get('group_id')
results.admin_id = admin_id
sort_obj = Banner.query.order_by(Banner.sort.asc()).first()
first_sort = sort_obj.sort
print(first_sort)
if sort <= first_sort and int(res.get('status')) == 1:
# 记录小程序banner第一张图有更新,发送通知
records = UserBTN.query.filter(UserBTN.btn_num == 4).all()
for record in records:
record.is_new = 1
record.is_send = 0
db.session.add(record)
# 记录更新的时间
today = datetime.datetime.today().date()
up_obj = UpdateTime.query.filter(UpdateTime.type == 5).filter(
UpdateTime.create_time.like(str(today) + "%")).first()
if not up_obj:
time_obj = UpdateTime()
time_obj.type = 5
db.session.add(time_obj)
try:
db.session.commit()
data = {
'errno': '0',
'msg': 'success'
}
except Exception as e:
db.session.rollback()
print(e)
data = {
'errno': '-2',
'msg': 'Fail'
}
else:
data = {
'errno': '-2',
'msg': 'banner不存在'
}
return jsonify(data)
# 删除banner
@api_banner.route('/deletebanner', methods=['POST'])
@login_required
@admin_required
def delete_banner():
res = request.get_json()
print("请求参数:", request.data)
try:
if not res['id']:
return jsonify(errno="-2", errmsg='参数错误')
except Exception as e:
return jsonify(errno="-2", errmsg='参数错误')
results = Banner.query.filter_by(id=int(res['id'])).first()
if results:
try:
db.session.delete(results)
db.session.commit()
data = {
'errno': '0',
'msg': 'success'
}
except Exception as e:
db.session.rollback()
print(e)
data = {
'errno': '-2',
'msg': 'Fail'
}
else:
data = {
'errno': '-2',
'msg': 'banner不存在'
}
return jsonify(data)
# 改变banner状态
@api_banner.route('/changebannertatus', methods=['POST'])
@login_required
@admin_required
def change_status():
# res = json.loads(request.data)
res = request.get_json()
admin_id = g.user_id
print("请求参数:", request.data)
try:
if not all([res['id'], str(res['status'])]):
return jsonify(errno="-2", errmsg='参数错误')
except Exception as e:
return jsonify(errno="-2", errmsg='参数错误')
results = Banner.query.filter_by(id=int(res['id'])).first()
if results:
if int(res['status']) == 1:
cstatus = 0
else:
banner = Banner.query.order_by(Banner.sort.asc()).first()
print(banner.sort)
if results.sort <= banner.sort:
# 记录小程序banner第一张图有更新,发送通知
records = UserBTN.query.filter(UserBTN.btn_num == 4).all()
for record in records:
record.is_new = 1
record.is_send = 0
db.session.add(record)
# 记录更新的时间
today = datetime.datetime.today().date()
up_obj = UpdateTime.query.filter(UpdateTime.type == 5).filter(
UpdateTime.create_time.like(str(today) + "%")).first()
if not up_obj:
time_obj = UpdateTime()
time_obj.type = 5
db.session.add(time_obj)
cstatus = 1
results.status = cstatus
results.admin_id = admin_id
# results.status = res['status']
try:
db.session.commit()
data = {
'errno': '0',
'msg': 'success'
}
except Exception as e:
db.session.rollback()
print(e)
data = {
'errno': '-2',
'msg': 'Fail'
}
else:
data = {
'errno': '-2',
'msg': '活动不存在'
}
return jsonify(data)
# PC获取banner列表
@api_banner.route('/getpcbanner', methods=['POST'])
@login_required
@admin_required
def get_pcbannerlist():
# res = json.loads(request.data)
res = request.get_json()
print("请求参数:", request.data)
page = int(res.get('page', 1))
size = int(res.get('size', 10))
try:
total = Banner.query.count()
result = Banner.query.order_by(Banner.sort.asc()).paginate(page, size, False)
except Exception as e:
print(e)
return jsonify(errno="-2", errmsg='数据库错误')
if result:
try:
arr = []
print((666))
location = (page - 1) * size + 1
for item in result.items:
data = {
'id': item.id,
'image': item.image,
'link': item.link,
'sort': item.sort,
'location': location,
'status': str(item.status),
'article_id': item.article_id,
'group_id': item.group_id,
}
location += 1
if item.article_id:
print((item.article_id))
res = Article.query.filter_by(id=item.article_id).first()
data['title'] = res.title
arr.append(data)
except Exception as e:
print(e)
return jsonify(errno="-2", errmsg='网络错误')
data = {
'errno': '0',
'list': arr,
'total': total
}
else:
data = {
'errno': '0',
'list': {},
'total': total
}
return jsonify(data)
# 小程序获取banner
@api_banner.route('/getbanner', methods=['POST'])
def get_banner():
# res = json.loads(request.data)
res = request.get_json()
print("请求参数:", request.data)
page = int(res.get('page', 1))
size = int(res.get('size', 5))
try:
# total = Banner.query.filter_by(status=1).count()
result = Banner.query.filter_by(status=1).order_by(Banner.sort.asc()).paginate(page, size, False)
except Exception as e:
print(e)
return jsonify(errno="-2", errmsg='数据库错误')
if result:
try:
arr = []
for item in result.items:
data = {
'id': item.id,
'image': item.image,
'article_id': item.article_id,
'group_id': item.group_id,
'link': item.link,
# 'target_name': item.target_name
}
if item.article_id:
res = Article.query.filter_by(id=item.article_id).first()
if res:
data['title'] = res.title
else:
data['title'] = ''
arr.append(data)
except Exception as e:
print(e)
return jsonify(errno="-2", errmsg='网络错误')
data = {
'status': '0',
'list': arr,
}
else:
data = {
'errno': '0',
'list': {},
}
start_time = time.time()
# print '请求时间:', time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(start_time))
# print "banner数据:", data
return jsonify(data)
|
# Covered
# lists
# strings
# dictionaries
# tuples
###############################################################################
# Lists
###############################################################################
# Source: https://developers.google.com/edu/python/lists
list = ['larry', 'curly', 'moe']
# Thinking about big O, often helpful to know the number of elemets
print len(list)
list.append('shemp') ## append elem at end
list.insert(0, 'xxx') ## insert elem at index 0
list.extend(['yyy', 'zzz']) ## add list of elems at end
print list ## ['xxx', 'larry', 'curly', 'moe', 'shemp', 'yyy', 'zzz']
print list.index('curly') ## 2
list.remove('curly') ## search and remove that element
list.pop(1) ## removes and returns 'larry'
print list ## ['xxx', 'moe', 'shemp', 'yyy', 'zzz']
|
# Should use dedicated mayavi environment because of its odd requirements
import pandas as pd
import numpy as np
import os
import sys
from mayavi import mlab
# You can pass a set of folders to analyze, or else the script will do them all
if len(sys.argv) > 1:
folders = sys.argv[1:]
else:
folders = os.listdir(sys.path[0])
folders = [f for f in folders if os.path.isdir(f)]
# Don't touch folders that have no data files inside of them ...
def anydata(folder):
dirlist = os.listdir(folder)
for fn in dirlist:
if fn.endswith('mtrx'): return True
return False
folders = [f for f in folders if anydata(f)]
# Construct single dataframe from all the dataframes in the folders
# This happens to be less work at the moment
# Also allows me to write each plot type in its own loop so it can be copy pasted
dfs = []
for f in folders:
df_path = os.path.join(f, f + '.df')
dfs.append(pd.read_pickle(df_path))
print('Loaded {} into memory'.format(df_path))
df = pd.concat(dfs)
def frames_to_mp4(directory, prefix='Loop', outname='out'):
# Send command to create video with ffmpeg
#cmd = (r'cd "{}" & ffmpeg -framerate 10 -i {}%03d.png -c:v libx264 '
# '-r 15 -pix_fmt yuv420p -vf "scale=trunc(iw/2)*2:trunc(ih/2)*2" '
# '{}.mp4').format(directory, prefix, outname)
# Should be higher quality still compatible with outdated media players
cmd = (r'cd "{}" & ffmpeg -framerate 10 -i {}%03d.png -c:v libx264 '
'-pix_fmt yuv420p -crf 1 -vf "scale=trunc(iw/2)*2:trunc(ih/2)*2" '
'{}.mp4').format(directory, prefix, outname)
# Need elite player to see this one
#cmd = (r'cd "{}" & ffmpeg -framerate 10 -i {}%03d.png -c:v libx264 '
#' -crf 17 -vf "scale=trunc(iw/2)*2:trunc(ih/2)*2" '
#'{}.mp4').format(directory, prefix, outname)
os.system(cmd)
def fitplane(Z):
# Plane Regression -- basically took this from online somewhere
# probably I messed up the dimensions as usual
m, n = np.shape(Z)
X, Y = np.meshgrid(np.arange(m), np.arange(n))
XX = np.hstack((np.reshape(X, (m*n, 1)) , np.reshape(Y, (m*n, 1)) ) )
XX = np.hstack((np.ones((m*n, 1)), XX))
ZZ = np.reshape(Z, (m*n, 1))
theta = np.dot(np.dot(np.linalg.pinv(np.dot(XX.transpose(), XX)), XX.transpose()), ZZ)
plane = np.reshape(np.dot(XX, theta), (m, n))
return plane
def sweet3drender():
# Test of 3d render
imseries = df[df.type == 'xy'].iloc[0]
image = imseries['scan']
fig1 = mlab.figure(bgcolor=(1,1,1), size=(1200, 600))
# Probably messing up dimensions again.
h, w = np.shape(image)
sh = imseries['height'] * 1e9
sw = imseries['width'] * 1e9
x = np.linspace(0, sh, h)
y = np.linspace(0, sw, w)
# Use this if you just want the surface color to represent the height
mlab.surf(x, y, image, warp_scale=5)
mlab.view(elevation=70, distance='auto')
mlab.orientation_axes()
def rotatingvideo(imseries, column='scan', folder='', fnprefix='', fnsuffix='anim', color=None, nrotations=180, vmin=None, vmax=None, warpscale=3, overwrite=True):
# Make a sweet ass rotating 3d surface plot with mayavi
# Pass the pandas series of the scan you want to plot
# Don't do anything if file exists and overwrite = False
if not overwrite:
fp = os.path.join(folder, '{}_{:03d}_{}.png'.format(fnprefix, 0, fnsuffix))
if os.path.isfile(fp):
print('Starting file {} already found. Doing nothing'.format(fp))
return
from mayavi import mlab
image = imseries[column]
if not os.path.isdir(folder):
os.makedirs(folder)
fig1 = mlab.figure(bgcolor=(1,1,1), size=(1200, 600))
# I don't think this makes it any faster
#fig1.scene.off_screen_rendering = True
# Probably messing up dimensions again.
h, w = np.shape(image)
sh = imseries['height'] * 1e9
sw = imseries['width'] * 1e9
x = np.linspace(0, sh, h)
y = np.linspace(0, sw, w)
# Use this if you just want the surface color to represent the height
#mlab.surf(x, y, image, warp_scale=warpscale)
if color is None:
color = image
if (vmin is None) and (vmax is None):
vmin, vmax = np.percentile(color.flatten(), (0.1, 99.9))
# mesh allows coloring by a different array than the height
y, x = np.meshgrid(y, x)
print(np.shape(x))
print(np.shape(y))
print(np.shape(image))
print(np.shape(color))
mesh = mlab.mesh(x, y, warpscale*image, scalars=color, colormap='blue-red', vmin=vmin, vmax=vmax)
mlab.view(elevation=70, distance='auto')
#mlab.orientation_axes()
#mlab.axes(mesh, color=(.7, .7, .7), extent=mesh_extent,
#ranges=(0, 1, 0, 1, 0, 1), xlabel='', ylabel='',
#zlabel='Probability',
#x_axis_visibility=True, z_axis_visibility=True)
for i in range(nrotations):
fig1.scene.camera.azimuth(360. / nrotations)
fp = os.path.join(folder, '{}_{:03d}_{}.png'.format(fnprefix, i, fnsuffix))
fig1.scene.save_png(fp)
mlab.close()
if __name__ == '__main__':
'''
# Detailed movie
#folder = '31-May-2017'
#id = '23_1'
#id = '21_1'
#id = '5_1'
#id = '16_1'
folder = '01-Jun-2017'
#id = '20_1'
#id = '14_1'
id = '21_1'
interesting = df[(df.id == id) & (df.folder == folder)]
interestingZ = interesting.iloc[0]
interestingI = interesting.iloc[1]
Idata = interestingI['scan']
Zdata = interestingZ['scan'] # / 2 + interestingZ['scan2'] / 2
sourcefolder = interesting['folder'].iloc[0]
animdir = os.path.join(folder, 'animations', id)
#rotatingvideo(interestingZ, column='corrscan', folder=animdir, color=Idata, warpscale=3)
#frames_to_mp4(animdir, 'anim')
'''
# Make a few frames at warpscale 1 for all ~square topography measurements
# You can pass a folder to analyze, or else the script will do them all
if len(sys.argv) > 1:
folders = sys.argv[1:]
else:
folders = df['folder'].unique()
print('Making movies for folders:')
print('\n'.join(folders))
are_scans = df['type'] == 'xy'
in_folders = df['folder'].isin(folders)
for folder, folderdata in df[are_scans & in_folders].groupby('folder'):
for id, data in folderdata.groupby('id'):
print('Aspect ratio: {}'.format(data.aspect.iloc[0]))
if 0.5 < data.aspect.iloc[0] < 1.6:
Zseries = data[data['channel_name'] == 'Z'].iloc[0]
Idata = data[data['channel_name'] == 'I'].iloc[0]['scan']
vmin, vmax = np.percentile(Idata.flatten(), (0.1, 99.9))
# This gives each scan its own folder and is annoying
#animdir = os.path.join(folder, 'animations', '{}_warpscale_2'.format(id))
animdir = os.path.join(folder, '3D_rotations')
rotatingvideo(Zseries, column='corrscan', fnprefix=id ,fnsuffix='Forward', nrotations=12, folder=animdir, color=Idata, vmin=vmin, vmax=vmax, warpscale=30, overwrite=False)
#frames_to_mp4(animdir, 'Forward')
# Also write the reverse scan for comparison
#Idata_r = data[data['channel_name'] == 'I'].iloc[0]['scan2']
#rotatingvideo(Zseries, column='corrscan2', fnprefix=id, fnsuffix='Reverse', nrotations=27, folder=animdir, color=Idata_r, warpscale=2, overwrite=False)
|
from collections import defaultdict
# 중복되는 report는 횟수로 사용하지 않으므로, set을 활용하여 중복값을 제거하여 사용하면 더 빠르게 좋은 값을 가져올 수 있다.
def solution(id_list, report, k):
answer = []
stoper = defaultdict(int) # 정지된 ID
reporter = defaultdict(list) # 신고한 ID
# 신고 중 신고 받은 횟수와 신고한 사람의 목록을 정리
for i in report:
p1, p2 = i.split()
# 동일한 사람이 신고하지 않았다면 (신고하면 이름이 존재)
if p2 not in reporter[p1]:
reporter[p1].append(p2)
stoper[p2] += 1
# K 회 이상인 경우에만 카운트
for i in id_list:
result = 0
for j in reporter[i]:
if stoper[j] >= k:
result += 1
answer.append(result)
return answer
|
# -*- coding: utf-8 -*-
class TrieNode:
def __init__(self):
self.children = {}
self.leaf = False
class Trie:
def __init__(self):
self.root = TrieNode()
def insert(self, word):
current = self.root
for char in word:
if char not in current.children:
current.children[char] = TrieNode()
current = current.children[char]
current.leaf = True
class Solution:
def findWords(self, board, words):
trie = Trie()
for word in words:
trie.insert(word)
result = set()
visited = set()
for i in range(len(board)):
for j in range(len(board[0])):
self.startingHere(board, trie, visited, result, trie.root, "", i, j)
return list(result)
def startingHere(
self, board, trie, visited, result, current_node, current_word, i, j
):
if current_node.leaf:
result.add(current_word)
out_of_bounds = i < 0 or i >= len(board) or j < 0 or j >= len(board[0])
if out_of_bounds:
return
already_visited = (i, j) in visited
dead_end = board[i][j] not in current_node.children
if already_visited or dead_end:
return
current_node = current_node.children[board[i][j]]
current_word = current_word + board[i][j]
visited.add((i, j))
self.startingHere(
board, trie, visited, result, current_node, current_word, i - 1, j
)
self.startingHere(
board, trie, visited, result, current_node, current_word, i, j - 1
)
self.startingHere(
board, trie, visited, result, current_node, current_word, i + 1, j
)
self.startingHere(
board, trie, visited, result, current_node, current_word, i, j + 1
)
visited.remove((i, j))
if __name__ == "__main__":
solution = Solution()
expected = ["eat", "oath"]
result = solution.findWords(
[
["o", "a", "a", "n"],
["e", "t", "a", "e"],
["i", "h", "k", "r"],
["i", "f", "l", "v"],
],
["oath", "pea", "eat", "rain"],
)
assert sorted(expected) == sorted(result)
|
from _typeshed import Incomplete
def fast_gnp_random_graph(
n, p, seed: Incomplete | None = None, directed: bool = False
): ...
def gnp_random_graph(n, p, seed: Incomplete | None = None, directed: bool = False): ...
binomial_graph = gnp_random_graph
erdos_renyi_graph = gnp_random_graph
def dense_gnm_random_graph(n, m, seed: Incomplete | None = None): ...
def gnm_random_graph(n, m, seed: Incomplete | None = None, directed: bool = False): ...
def newman_watts_strogatz_graph(n, k, p, seed: Incomplete | None = None): ...
def watts_strogatz_graph(n, k, p, seed: Incomplete | None = None): ...
def connected_watts_strogatz_graph(
n, k, p, tries: int = 100, seed: Incomplete | None = None
): ...
def random_regular_graph(d, n, seed: Incomplete | None = None): ...
def barabasi_albert_graph(
n, m, seed: Incomplete | None = None, initial_graph: Incomplete | None = None
): ...
def dual_barabasi_albert_graph(
n,
m1,
m2,
p,
seed: Incomplete | None = None,
initial_graph: Incomplete | None = None,
): ...
def extended_barabasi_albert_graph(n, m, p, q, seed: Incomplete | None = None): ...
def powerlaw_cluster_graph(n, m, p, seed: Incomplete | None = None): ...
def random_lobster(n, p1, p2, seed: Incomplete | None = None): ...
def random_shell_graph(constructor, seed: Incomplete | None = None): ...
def random_powerlaw_tree(
n, gamma: int = 3, seed: Incomplete | None = None, tries: int = 100
): ...
def random_powerlaw_tree_sequence(
n, gamma: int = 3, seed: Incomplete | None = None, tries: int = 100
): ...
def random_kernel_graph(
n,
kernel_integral,
kernel_root: Incomplete | None = None,
seed: Incomplete | None = None,
): ...
|
from flask import jsonify
from psycopg2 import IntegrityError
from app.DAOs.BuildingDAO import BuildingDAO
ADD_BUILDING_KEYS = ["edificioid", "nomoficial", "blddenom", "codigoold", "bldtype", "attributes"]
def _buildBuildingResponse(building_tuple):
"""
Private Method to build building dictionary to be JSONified.
Uses :func:`~app.handlers.BuildingHandler.BuildingHandler._getDistinctFloorNumbersByBuildingID`
:param building_tuple: response tuple from SQL query
:returns Dict: Building information with keys:
.. code-block:: python
{'bid', 'bname', 'babbrev', 'numfloors', 'bcommonname',
'btype', 'photourl', 'distinctfloors'}
"""
response = {}
response['bid'] = building_tuple[0]
response['bname'] = building_tuple[1]
response['babbrev'] = building_tuple[2]
response['numfloors'] = building_tuple[3]
response['bcommonname'] = building_tuple[4]
response['btype'] = building_tuple[5]
response['photourl'] = building_tuple[6]
response['distinctfloors'] = _getDistinctFloorNumbersByBuildingID(
bid=building_tuple[0])
return response
def _buildCoreBuildingResponse(building_tuple):
"""
Private Method to build building dictionary to be JSONified.
:param building_tuple: response tuple from SQL query
:returns Dict: Building information with keys:
.. code-block:: python
{'bid', 'bname', 'babbrev'}
"""
# Note: currently using the getBuildingByID() method
response = {}
response['bid'] = building_tuple[0]
response['bname'] = building_tuple[1]
response['babbrev'] = building_tuple[2]
return response
def _getDistinctFloorNumbersByBuildingID(bid):
"""
Private Method to build building dictionary to be JSONified.
Uses :func:`~app.DAOs.BuildingDAO.BuildingDAO.getDistinctFloorNumbersByBuildingID`
:param building_tuple: response tuple from SQL query
:returns Dict: Building information
"""
floors = BuildingDAO().getDistinctFloorNumbersByBuildingID(bid=bid)
floor_array = []
if not floors:
pass
else:
for floor in floors:
floor_array.append(floor[0])
return floor_array
class BuildingHandler:
def addFullBuilding(self, json, uid):
"""
Handler method to verify that all necessary JSON keys are
present before creating a new building.
Uses :func:`~app.DAOs.BuildingDAO.BuildingDAO.addFullBuilding`
:param json: Contains the necessary keys to create a building from UPRM Portal data
["edificioid", "nomoficial", "blddenom", "codigoold", "bldtype", "attributes"]
:type json: JSON
:param uid: User ID
:type uid: int
:return: JSON
"""
if not isinstance(uid, int) or uid<0:
return jsonify(Error="Invalid uid: "+ str(uid))
for key in ADD_BUILDING_KEYS:
if key not in json:
return jsonify(Error='Missing key in JSON: ' + str(key)), 404
try:
building_results = BuildingDAO().addFullBuilding(building_json=json, uid=uid)
except ValueError as e:
return jsonify(Error=str(e)), 400
except KeyError as e:
return jsonify(Error=str(e)), 400
return jsonify(Result=str(building_results)), 201
def getAllBuildings(self, no_json=False):
"""
Return all Building entries in the database.
Uses :func:`~app.DAOs.BuildingDAO.BuildingDAO.getAllBuildings` as well as
:func:`~app.handlers.BuildingHandler._buildBuildingResponse`
:param no_json: states if the response should be returned as JSON or not. Default=False
:type no_json: bool
:return JSON: containing all tags. Error JSON otherwise.
"""
dao = BuildingDAO()
buildings = dao.getAllBuildings()
if not buildings:
return jsonify(Error='Could not find any buildings in system.'), 404
else:
building_list = []
for row in buildings:
building_list.append(
_buildBuildingResponse(building_tuple=row))
response = {"buildings": building_list}
if no_json:
return response
return jsonify(response)
def getAllBuildingsSegmented(self, offset, limit=20):
"""
Return all Building entries in the database, segmented.
Uses :func:`~app.DAOs.BuildingDAO.BuildingDAO.getAllBuildingsSegmented` as well as
:func:`~app.handlers.BuildingHandler._buildBuildingResponse`
:param offset: Number of results to skip from top of list.
:type offset: int
:param limit: Number of results to return. Default = 20.
:type limit: int
:return JSON: containing all tags. Error JSON otherwise.
"""
dao = BuildingDAO()
buildings = dao.getAllBuildingsSegmented(offset=offset, limit=limit)
result = []
for row in buildings:
result.append(_buildBuildingResponse(row))
return jsonify({"buildings":result})
def getBuildingByID(self, bid, no_json=False):
"""
Return the building entry belonging to the specified bid.
Uses :func:`~app.DAOs.BuildingDAO.BuildingDAO.getBuildingByID` as well as
:func:`~app.handlers.BuildingHandler._buildBuildingResponse`
:param bid: building ID.
:type bid: int
:param no_json: states if the response should be returned as JSON or not. Default=False
:type no_json: bool
:return JSON: containing room information. Error JSON otherwise.
"""
dao = BuildingDAO()
building = dao.getBuildingByID(bid=bid)
if not building:
return jsonify(Error='building does not exist: bid=' + str(bid)), 404
else:
response = _buildBuildingResponse(building_tuple=building)
if no_json:
return response
return jsonify(response)
def getCoreBuildingByID(self, bid, no_json=False):
"""
Return the building entry belonging to the specified bid.
Uses :func:`~app.DAOs.BuildingDAO.BuildingDAO.getBuildingByID` as well as
:func:`~app.handlers.BuildingHandler._buildCoreBuildingResponse`
:param bid: building ID.
:type bid: int
:param no_json: states if the response should be returned as JSON or not. Default=False
:type no_json: bool
:return JSON: containing room information. Error JSON otherwise.
"""
dao = BuildingDAO()
building = dao.getBuildingByID(bid=bid)
if not building:
return jsonify(Error='building does not exist: bid=' + str(bid)), 404
else:
response = _buildCoreBuildingResponse(building_tuple=building)
if no_json:
return response
return jsonify(response)
def safeGetBuildingByID(self, bid):
"""
Return the building entry belonging to the specified bid.
Uses :func:`~app.handlers.BuildingHandler.getBuildingByID`
:param bid: building ID.
:type bid: int
:return List: containing room information. Error JSON otherwise.
"""
building = self.getBuildingByID(bid=bid, no_json=True)
# Following line checks if the above returns a json (no room found or no_json set to False.
if not isinstance(building, dict):
building = str(building)
return building
def getBuildingsByKeyword(self, offset, limit, keyword):
"""
Returns a list of buildings taht match a given searchstring
Uses :func:`~app.DAOs.BuildingDAO.BuildingDAO.searchBuildingsByKeyword`
:param keyword: The keyword to search for.
:type keyword: str
:param offset: Number of results to skip from top of list.
:type offset: int
:param limit: Number of results to return. Default = 20.
:type limit: int
:return JSON: A list of buildings that match the given keyword
"""
dao = BuildingDAO()
keyword = keyword
result = []
alphanumeric_filter = filter(str.isalnum, keyword)
keyword = "".join(alphanumeric_filter)
print(keyword)
response = dao.searchBuildingsByKeyword(
keyword=keyword, offset=offset, limit=limit)
for building in response:
result.append(_buildBuildingResponse(
building_tuple=building))
return jsonify({"buildings":result})
|
import os
import dotenv
def get_sql_connection_string():
dotenv.load_dotenv()
return "DRIVER=%(SQL_DRIVER)s;SERVER=%(SQL_SERVER)s;PORT=1433;DATABASE=%(SQL_DATABASE)s;UID=%(SQL_USERNAME)s;PWD={%(SQL_PASSWORD)s}" % os.environ
|
import requests
import time
import simplejson
import setting
import config
def get_proxy(retry=10):
count=0
proxyurl = 'http://:8081/dynamicIp/common/getDynamicIp.do'
for i in range(retry):
try:
r = requests.get(proxyurl, timeout=10)
print(r.text)
except Exception as e:
print(e)
count += 1
print('代理获取失败,重试' + str(count))
time.sleep(1)
else:
js = r.json()
proxyServer = 'http://{0}:{1}'.format(js.get('ip'), js.get('port'))
proxies_random = {
'http': proxyServer
}
return proxies_random
# p=(print(get_proxy()) for i in range(5))
# # print(p)
# # for i in p:
# # i
# while 1:
# try:
# next(p)
# except Exception as e:
# print(e)
# break
def get_binhttp():
proxy=get_proxy()
print(proxy)
r = requests.get(
url='http://httpbin.org/ip',
proxies=proxy
)
print(r.text)
print(r.json())
get_binhttp()
|
# -*- coding: utf-8 -*-
'''
Created on 2016.6.14
@author: huke
'''
def mul(n):
if n <= 1:
return 1
else:
return n*mul(n-1)
if __name__ == '__main__':
n = input('请输入阶乘的次数')
print(mul(int(n)))
|
#!/usr/bin/env python
#coding=utf-8
'''
Created on 2018年2月23日
@author: jacket
'''
import unittest
from src.tools.CommentTool import isContainCommentForCPP,openFile
fileText=openFile("/home/jacket/Server/src/main.cpp")
# print fileText
class Test(unittest.TestCase):
def test_case1(self):
print isContainCommentForCPP(fileText)
if __name__=='__main__':
unittest.main()
|
#!/usr/bin/env python
#
# Script by Steven Grove (@sigwo)
# www.sigwo.com
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Date: 09-01-13
#
# Scans subnet.
#
# make the output go to file and pull that file into a GUI, load new window if payload is
# started, make a well known port scanner and complete port scanner, make an IP range selector
#
import datetime
import socket
import sys
# Get address string and CIDR string from command line
xaddr = raw_input("\n" + "IP address: ")
xcidr = raw_input("CIDR notation, NO / mark!: ")
if xcidr < 16:
xcidr = raw_input("Please try again. Subnets must be longer than /16. NO / mark!: ")
else:
addr = xaddr.split('.')
cidr = int(xcidr)
# Initialize the netmask and calculate based on CIDR mask
mask = [0, 0, 0, 0]
for i in range(cidr):
mask[i/8] = mask[i/8] + (1 << (7 - i % 8))
# Initialize net and binary and netmask with addr to get network
net = []
for i in range(4):
net.append(int(addr[i]) & mask[i])
# Duplicate net into broad array, gather host bits, and generate broadcast
broad = list(net)
brange = 32 - cidr
for i in range(brange):
broad[3 - i/8] = broad[3 - i/8] + (1 << (i % 8))
# Timestamp for future use in storing results
timestart = datetime.datetime.now()
t1 = str(timestart)
# Cleaning up the raw output from net and broad from above
anet = net , ".".join(map(str, net))
abroad = broad , ".".join(map(str, broad))
# Start scanning network based upon input and calculations from above
while anet < abroad:
s = socket.getprotobyname('icmp')
s = socket.socket(socket.AF_INET, socket.SOCK_RAW, 'anet')
with open('results.csv', 'a') as f:
f.write(t1)
try:
s.settimeout(.007) #speeds things up
s.connect((anet))
value = ", %s,OPEN\n" % (anet)
v = int(value)
f.write(v)
s.shutdown(2)
s.close()
print "!!!!!!!!FOUND ONE!!!!!!!!!"
print '\a' # beeps on open port
anet += 1
continue
except:
value = ", %s,CLOSED\n" % (anet)
v = int(value)
f.write(v)
anet += 1
print "..CLOSED"
f.closed
|
""" BayesianOptimization package from https://github.com/fmfn/BayesianOptimization """
from __future__ import print_function
from collections import OrderedDict
import numpy as np
try:
from bayes_opt import BayesianOptimization
bayes_opt_present = True
except Exception:
BayesianOptimization = None
bayes_opt_present = False
from kernel_tuner.strategies import minimize
supported_methods = ["poi", "ei", "ucb"]
def tune(runner, kernel_options, device_options, tuning_options):
""" Find the best performing kernel configuration in the parameter space
:params runner: A runner from kernel_tuner.runners
:type runner: kernel_tuner.runner
:param kernel_options: A dictionary with all options for the kernel.
:type kernel_options: kernel_tuner.interface.Options
:param device_options: A dictionary with all options for the device
on which the kernel should be tuned.
:type device_options: kernel_tuner.interface.Options
:param tuning_options: A dictionary with all options regarding the tuning
process.
:type tuning_options: kernel_tuner.interface.Options
:returns: A list of dictionaries for executed kernel configurations and their
execution times. And a dictionary that contains a information
about the hardware/software environment on which the tuning took place.
:rtype: list(dict()), dict()
"""
if not bayes_opt_present:
raise ImportError("Error: optional dependency Bayesian Optimization not installed")
init_points = tuning_options.strategy_options.get("popsize", 20)
n_iter = tuning_options.strategy_options.get("max_fevals", 100)
# defaults as used by Bayesian Optimization Python package
acq = tuning_options.strategy_options.get("method", "ucb")
kappa = tuning_options.strategy_options.get("kappa", 2.576)
xi = tuning_options.strategy_options.get("xi", 0.0)
tuning_options["scaling"] = True
results = []
# function to pass to the optimizer
def func(**kwargs):
args = [kwargs[key] for key in tuning_options.tune_params.keys()]
return -1.0 * minimize._cost_func(args, kernel_options, tuning_options, runner, results)
bounds, _, _ = minimize.get_bounds_x0_eps(tuning_options)
pbounds = OrderedDict(zip(tuning_options.tune_params.keys(), bounds))
verbose = 0
if tuning_options.verbose:
verbose = 2
# print(np.isnan(init_points).any())
optimizer = BayesianOptimization(f=func, pbounds=pbounds, verbose=verbose)
optimizer.maximize(init_points=init_points, n_iter=n_iter, acq=acq, kappa=kappa, xi=xi)
if tuning_options.verbose:
print(optimizer.max)
return results, runner.dev.get_environment()
|
#!/usr/bin/env python3
from copy import deepcopy
from lib.pos import Pos
from lib.zone.zone_base import ZoneBase
# Circular dependency workaround for Python; can be a normal import for Java
import lib.zone.zone as zone
class ZoneFragment(ZoneBase):
"""A zone fragment where non-standard behavior occurs.
pos2 is being rewritten to be exclusive, not inclusive.
"""
def __init__(self, other, axis_order=None):
super().__init__(other)
if isinstance(other, ZoneFragment):
self.parent = other.parent
self.axis_order = deepcopy(other.axis_order)
elif isinstance(other, zone.Zone):
self.parent = other
self.axis_order = axis_order
else:
raise TypeError("Expected ZoneFragment to be initialized with a Zone or another ZoneFragment")
def split_axis(self, pos, axis):
"""Returns (lower_zone, upper_zone) for this split along some axis.
Either zone may have a size of 0.
"""
lower = ZoneFragment(self)
lower._size[axis] = pos[axis] - lower._pos[axis]
upper = ZoneFragment(self)
upper._size[axis] -= lower._size[axis]
upper._pos[axis] += lower._size[axis]
return (lower, upper)
def split_by_overlap(self, overlap):
"""Returns a list of fragments of this zone, split by an overlapping zone."""
# overlap is a ZoneBase that overlaps and doesn't extend beyond this ZoneFragment.
if not isinstance(overlap, ZoneBase):
raise TypeError("Expected overlap to be type ZoneBase.")
center_zone = ZoneFragment(self)
other_min = overlap.min_corner
other_max = overlap.max_corner + Pos([1]*len(other_min))
result = []
for axis in self.axis_order:
if axis > len(other_max):
# Skip axis if they don't apply
continue
work_zones = result
result = []
for work_zone in work_zones:
# Add zones split from existing split zones
lower, work_zone = work_zone.split_axis(other_min, axis)
work_zone, upper = work_zone.split_axis(other_max, axis)
if lower:
result.append(lower)
if work_zone:
result.append(work_zone)
if upper:
result.append(upper)
# Add zones split from center, but not the center (overlap) itself
lower, center_zone = center_zone.split_axis(other_min, axis)
center_zone, upper = center_zone.split_axis(other_max, axis)
if lower:
result.append(lower)
if upper:
result.append(upper)
return result
def merge(self, other):
"""Merge two ZoneFragments without changing their combined size/shape.
Returns the merged ZoneFragment or None.
"""
a_min = self.min_corner
b_min = other.min_corner
a_size = self.size()
b_size = other.size()
# Confirm the ZoneFragments can be merged without extending outside their bounds
different_axis = -1
for axis in range(len(a_min)):
if (
a_min[axis] == b_min[axis]
and a_size[axis] == b_size[axis]
):
# This axis matches, all good so far
continue
if different_axis == -1:
# First different axis we've found
different_axis = axis
else:
# Second different axis; no merging this time
return None
if different_axis == -1:
# Same zone
return ZoneFragment(self)
axis = different_axis
# Confirm the two zones are touching
if (
a_min[axis] + a_size[axis] != b_min[axis] and
b_min[axis] + b_size[axis] != a_min[axis]
):
# They are not touching.
return None
# Merging is possible, go for it.
result = ZoneFragment(self)
min_corner = result.min_corner
max_corner = result.max_corner
min_corner[axis] = min(self.min_corner[axis], other.min_corner[axis])
max_corner[axis] = max(self.max_corner[axis], other.max_corner[axis])
result.min_corner = min_corner
result.max_corner = max_corner
return result
########################################################################################################################
# Only needed for debug and statistics:
def __repr__(self):
return "ZoneFragment(parent={!r}, pos={!r}, size={!r}, axis_order={!r})".format(self.parent, self._pos, self._size, self.axis_order)
|
# INFLATE.PY
# Decompresses files compressed with deflate_not3
# Theoretically.
import heapq as hq
import sys
import bitstring as bs
import huff_functions as huff
import deflate_fns as defl
# -------------------------------------------------------------
# Function that takes care of buffer for reading individual bits from file
cur_byte = 0
bits_read = 0
# Returns the next n bits as an integer
def readbits(n):
global cur_byte
global bits_read
read = 0;
for i in range(0, n):
bit_getter = 1 << (7 - bits_read)
bit = bit_getter & cur_byte
if bit != 0:
bit = 1
read = read * 2
read = read + bit
bits_read = bits_read + 1
if bits_read == 8:
bits_read = 0
cur_byte = int.from_bytes(text.read(1), byteorder = "big")
return read
# -----------------------------------------------------------
# Read arguments from command line to determine which file to decompress and where to
if len(sys.argv) == 3:
inputname = sys.argv[1]
outputname = sys.argv[2]
elif len(sys.argv) == 2:
inputname = sys.argv[1]
outputname = sys.argv[1] + "_deflated"
else:
print("Please provide at least one argument")
sys.exit()
# Setup for lookahead and search buffers, and the dictionary "search" (which contains the locations of all the three-length strings encountered)
text = open(inputname, "rb")
output = open(outputname, "wb")
cur_byte = int.from_bytes(text.read(1), byteorder = "big")
# First read in btype (currently we are only sending one block & it is dynamically compressed, so it will always be a 3-bit 6)
btype = readbits(3)
print(btype)
clc_codelengths = {}
# Read in code lengths for clc tree, which are printed in this weird order
for i in [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15]:
clc_codelengths[i] = readbits(3)
clc_codelengths_list = []
for i in range(0, 19):
clc_codelengths_list.append(clc_codelengths[i])
print(clc_codelengths_list)
# Construct canonical huffman code for code length codes
clc_canonical = huff.makecanonical(range(0, 19), clc_codelengths_list)
print(clc_canonical)
clc_canonical_tree = huff.makecanonicaltree(clc_canonical)
print(clc_canonical_tree)
# Use this code to decode code lengths for length/literal and distance trees
# 286 length/literal code lengths and 30 distance code lengths
# But some codes are followed by extra bits to specify position in a range
ll_codelengths_list = []
prev = -1
while not len(ll_codelengths_list) == 286:
# Read bits and navigate in decoding tree until we reach a leaf node
leafreached = False
currentnode = clc_canonical_tree
while not leafreached:
if (not currentnode[1]) and (not currentnode[2]):
leafreached = True
else:
nextbit = bs.BitArray(uint = readbits(1), length = 1)
if not nextbit[0]:
currentnode = currentnode[1]
else:
currentnode = currentnode[2]
length_code = currentnode[0]
print(length_code)
if length_code < 16:
# Represent literally code lengths of 0-15
ll_codelengths_list.append(length_code)
prev = length_code
elif length_code == 16:
# 16 followed by 2 extra bits represents prev code repeated 3-6 times
extrabits = readbits(2)
numrepeats = 3 + extrabits
for i in range(0, numrepeats):
ll_codelengths_list.append(prev)
elif length_code == 17:
# 17 followed by 3 extra bits represents 0 repeated 3-10 times
extrabits = readbits(3)
numrepeats = 3 + extrabits
for i in range(0, numrepeats):
ll_codelengths_list.append(prev)
elif length_code == 18:
# 18 followed by 7 extra bits represents 0 repeated 11-138 times
extrabits = readbits(7)
numrepeats = 11 + extrabits
for i in range(0, numrepeats):
ll_codelengths_list.append(prev)
else:
print("error")
print(ll_codelengths_list)
dist_codelengths_list = []
prev = -1
while not len(dist_codelengths_list) == 30:
# Read bits and navigate in decoding tree until we reach a leaf node
leafreached = False
currentnode = clc_canonical_tree
while not leafreached:
if (not currentnode[1]) and (not currentnode[2]):
leafreached = True
else:
nextbit = bs.BitArray(uint = readbits(1), length = 1)
if not nextbit[0]:
currentnode = currentnode[1]
else:
currentnode = currentnode[2]
length_code = currentnode[0]
print(length_code)
if length_code < 16:
# Represent literally code lengths of 0-15
dist_codelengths_list.append(length_code)
prev = length_code
elif length_code == 16:
# 16 followed by 2 extra bits represents prev code repeated 3-6 times
extrabits = readbits(2)
numrepeats = 3 + extrabits
for i in range(0, numrepeats):
dist_codelengths_list.append(prev)
elif length_code == 17:
# 17 followed by 3 extra bits represents 0 repeated 3-10 times
extrabits = readbits(3)
numrepeats = 3 + extrabits
for i in range(0, numrepeats):
dist_codelengths_list.append(prev)
elif length_code == 18:
# 18 followed by 7 extra bits represents 0 repeated 11-138 times
extrabits = readbits(7)
numrepeats = 11 + extrabits
for i in range(0, numrepeats):
dist_codelengths_list.append(prev)
else:
print("error")
print(dist_codelengths_list)
# Construct canonical huffman code and decoding tree for length/literal codes
ll_canonical = huff.makecanonical(range(0, 286), ll_codelengths_list)
ll_canonical_tree = huff.makecanonicaltree(ll_canonical)
# Construct canonical huffman code and decoding tree for distance codes
dist_canonical = huff.makecanonical(range(0, 30), dist_codelengths_list)
dist_canonical_tree = huff.makecanonicaltree(dist_canonical)
# Finally, DECODE DATA
# NOTE: Adapt this to multi-block structure
lls = []
distances = []
while True:
print(lls)
print(distances)
# Decode a length/literal value
leafreached = False
currentnode = ll_canonical_tree
while not leafreached:
if (not currentnode[1]) and (not currentnode[2]):
leafreached = True
else:
nextbit = bs.BitArray(uint = readbits(1), length = 1)
if not nextbit[0]:
currentnode = currentnode[1]
else:
currentnode = currentnode[2]
ll = currentnode[0]
print("Literal/coded length: " + str(ll))
# If value < 256, it's a literal; otherwise length
if ll < 256:
lls.append(ll)
else:
num_extrabits = defl.length_code_num_extrabits(ll)
print("Num extra bits for length: " + str(num_extrabits))
if num_extrabits != 0:
extrabits = readbits(num_extrabits)
else:
extrabits = -1
length = defl.length_decode(ll, extrabits)
lls.append(length)
print("Extra bits: " + str(extrabits))
print("Decoded length: " + str(length))
if length == 256:
break
# Decode a distance value
leafreached = False
currentnode = dist_canonical_tree
while not leafreached:
if (not currentnode[1]) and (not currentnode[2]):
leafreached = True
else:
nextbit = bs.BitArray(uint = readbits(1), length = 1)
if not nextbit[0]:
currentnode = currentnode[1]
else:
currentnode = currentnode[2]
dist_code = currentnode[0]
print("Coded distance: " + str(dist_code))
num_extrabits = defl.dist_code_num_extrabits(dist_code)
if num_extrabits != 0:
extrabits = readbits(num_extrabits)
else:
extrabits = -1
print("Distance extra bits: " + str(extrabits))
dist = defl.dist_decode(dist_code, extrabits)
print("Decoded distance: " + str(dist))
distances.append(dist)
print(extrabits)
print(lls)
print(distances)
|
# EXERCISE_5 WORK OF THE BOOK :
num = eval(input("Enter the number :"))
print( "The Number is :",num)
print("Suquar of the User's Number :",num*num,sep=".")
|
# Generated by Django 2.1.4 on 2019-01-17 09:12
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('base', '0011_auto_20190117_0108'),
]
operations = [
migrations.RemoveField(
model_name='householdmembership',
name='created_by',
),
migrations.RemoveField(
model_name='householdmembership',
name='household',
),
migrations.RemoveField(
model_name='householdmembership',
name='member',
),
migrations.RemoveField(
model_name='householdmembership',
name='modified_by',
),
migrations.RemoveField(
model_name='householdmembership',
name='type',
),
migrations.RemoveField(
model_name='householdmembershiptype',
name='created_by',
),
migrations.RemoveField(
model_name='householdmembershiptype',
name='modified_by',
),
migrations.DeleteModel(
name='HouseholdMembership',
),
migrations.DeleteModel(
name='HouseholdMembershipType',
),
]
|
import mechanicalsoup
url = "http://olympus.realpython.org/login"
browser = mechanicalsoup.Browser()
page = browser.get(url)
html = page.soup
form = html.select("form")[0]
form.select("input")[0]["value"] = "zeus"
form.select("input")[1]["value"] = "ThunderDude"
profiles_page = browser.submit(form, page.url)
print(profiles_page.url)
|
import dill
# from aggregator import Data_Cleaner
def pickle_object(object_to_pickle, name):
'''
This function pickles the object with the name given.
'''
savefile = open(name, 'wb')
dill.dump(object_to_pickle, savefile, protocol=2)
savefile.close()
def load_pickled_object(filename):
object_to_unpickle = dill.load(open(filename))
return object_to_unpickle
# don't forget the dill.load(open(filename))
# section = 'sta132e-wd'
# path = '/Users/darrenreger/Documents/Final Cleaner-3/'
# all_data = Data_Cleaner(path=path)
# all_data.add_gps_coords(section)
# all_data.add_vehicles_to_section(section)
|
from django.contrib import admin
from visitations.models import Visitation
# Register your models here.
admin.site.register(Visitation)
|
'''
Given a .sam file extract the mapping of sequence to haplotype.
'''
import argparse
import re
haplotype = {}
def read_contigs(contig_file_name):
with open(contig_file_name, 'r') as contig_file:
for line in contig_file:
found = re.search("(^seq[^\s]+)\s[^\s]+\s([^\s]+)", line)
if found:
haplotype[found.group(1)] = found.group(2)
return haplotype
def write_haplotypes(output_file_name):
global haplotype
with open(output_file_name, 'w') as out_file:
for sequence, haplotype in haplotype.iteritems():
out_file.write("{0}\t{1}\n".format(sequence, haplotype))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("contigs")
parser.add_argument("mapping")
args = parser.parse_args()
print(args)
read_contigs(args.contigs)
write_haplotypes(args.mapping)
|
import sys
def min_heapify(heap, start, end):
root = start
left = 2 * root + 1
right = 2 * root + 2
if left < end and heap[root] > heap[left]:
root = left
if right < end and heap[root] > heap[right]:
root = right
if start != root:
heap[start], heap[root] = heap[root], heap[start]
min_heapify(heap, root, end)
def k_largest_numbers_with_heap(nums, k):
heap = nums[0:k]
# buidl heap
for i in range(int(k/2-1), -1, -1):
min_heapify(heap, i, k)
# select k largest numbers
for i in range(k, len(nums)):
if nums[i] > heap[0]:
nums[i], heap[0] = heap[0], nums[i]
min_heapify(heap, 0, k)
return heap
def partition_desc(nums, start, end):
i = start
j = end - 1
p = nums[end]
while i <= j:
while nums[i] > p:
i += 1
while nums[j] < p:
j -= 1
if i <= j:
nums[i], nums[j] = nums[j], nums[i]
i += 1
j -= 1
nums[i], nums[end] = nums[end], nums[i]
return i
def quick_select_desc(nums, start, end, k):
if k > 0 and k <= (end-start+1):
idx = partition_desc(nums, start, end)
if idx - start + 1 == k:
return idx
if idx - start + 1 > k:
return quick_select_desc(nums, start, idx - 1, k)
return quick_select_desc(nums, idx+1, end, k-(idx-start+1))
else:
return float('inf')
def k_largest_numbers_with_quick_select(nums, k):
idx = quick_select_desc(nums, 0, len(nums) - 1, k)
if idx < 0 or idx > len(nums)-1:
return []
return nums[:k]
def max_heapify(heap, start, end):
root = start
left = 2 * root + 1
right = 2 * root + 2
if left < end and heap[root] < heap[left]:
root = left
if right < end and heap[root] < heap[right]:
root = right
if start != root:
heap[root], heap[start] = heap[start], heap[root]
max_heapify(heap, root, end)
def k_smallest_numbers_with_heap(nums, k):
heap = nums[:k]
# build max heap
for i in range(int(k / 2) - 1, -1, -1):
max_heapify(heap, i, k)
for i in range(k, len(nums)):
if nums[i] < heap[0]:
heap[0], nums[i] = nums[i], heap[0]
max_heapify(heap, 0, k)
return heap
def partition_asce(nums, start, end):
i = start
j = end - 1
p = nums[end]
while i <= j:
while nums[i] < p:
i += 1
while nums[j] > p:
j -= 1
if i <= j:
nums[i], nums[j] = nums[j], nums[i]
i += 1
j -= 1
nums[i], nums[end] = nums[end], nums[i]
return i
def quick_select_asce(nums, start, end, k):
if k > 0 and k <= (end - start + 1):
idx = partition_asce(nums, start, end)
if idx - start + 1 == k:
return idx
if idx - start + 1 > k:
return quick_select_asce(nums, start, idx - 1, k)
return quick_select_asce(nums, idx+1, end, k-(idx-start+1))
return float('inf')
def k_smallest_numbers_with_quick_select(nums, k):
idx = quick_select_asce(nums, 0, len(nums) - 1, k)
if idx < 0 or idx > len(nums) - 1:
return []
return nums[:k]
if __name__ == "__main__":
nums = [1, 2, 3, 5, 6, 3, 2, 1, 5, 6, 87, 12, 4, 55, 32]
k = 5
h = k_largest_numbers_with_heap(nums, k)
print(h)
nums = [1, 2, 3, 5, 6, 3, 2, 1, 5, 6, 87, 12, 4, 55, 32]
h = k_largest_numbers_with_quick_select(nums, k)
print(nums)
print(h)
nums = [1, 2, 3, 5, 6, 3, 2, 1, 5, 6, 87, 12, 4, 55, 32]
k = 5
h = k_smallest_numbers_with_heap(nums, k)
print(h)
nums = [1, 2, 3, 5, 6, 3, 2, 1, 5, 6, 87, 12, 4, 55, 32]
h = k_smallest_numbers_with_quick_select(nums, k)
print(nums)
print(h)
|
from __future__ import absolute_import, unicode_literals
from datetime import timezone
import os
from celery import Celery
from celery.schedules import schedule
from django.conf import settings
# from celery.schedules import crontab
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'stock_tracker.settings')
app = Celery('stock_tracker')
app.conf.enable_utc = False
app.conf.update(timezone='Asia/Kolkata')
app.config_from_object(settings, namespace='CELERY')
app.conf.beat_schedule = {
# 'every-10-seconds' : {
# 'task': 'mainapp.tasks.update_stock',
# 'schedule': 10,
# 'args':(['RELIANCE.NS', 'BAJAJFINSV.NS'],)
# }
}
app.autodiscover_tasks()
@app.task(bind=True)
def debug_task(self):
print(f'Reuqest: {self.request!r}')
|
import os
input_path = os.path.join(os.path.dirname(__file__), 'input.txt')
with open(input_path) as file:
original_polymer = file.read()
def react(polymer):
index = -1
for char in polymer:
index += 1
if index != 0:
if polymer[index-1] == char.swapcase():
polymer = polymer[:index - 1] + polymer[index + 1:]
index -= 2
return polymer
part1 = react(original_polymer)
print "Part 1: Length of polymer after reactions: %s" % len(part1)
part2 = min(((len(react(part1.replace(agent, '').replace(agent.upper(), ''))), agent) for agent in
set(x.lower() for x in part1)),
key=lambda x: x[0])
print "Part 2: Minimum length is %d after removing %s/%s" % (part2[0], part2[1].upper(), part2[1])
from functools import reduce
# Bonus solution using reduce
def should_react(x, y):
return False if not x else x[-1].swapcase() == y
print "Part 1 solved using reduce: %d" % \
len(reduce((lambda x, y: x[:-1] if should_react(x, y) else x+y), original_polymer))
|
import time
from django.core.management.base import BaseCommand
from django.db.models import Sum
from django.template.defaultfilters import filesizeformat
from ... import scan
from ... import models
class Command(BaseCommand):
help="Scan the filesystem for changes and update the cache database"
def handle(self, *args, **kwargs):
t1 = time.time()
scan.scan(progress=True)
t2 = time.time()
self.stderr.write("Scanned {} entries in {:.2f} seconds".format(
models.FSEntry.objects.count(),
t2-t1,
))
to_backup = models.FSEntry.objects.filter(obj__isnull=True)
self.stderr.write("Need to back up {} files and directories "
"totaling {}".format(
to_backup.count(),
filesizeformat(
to_backup.aggregate(size=Sum("st_size"))['size']
)
))
clean = models.FSEntry.objects.filter(obj__isnull=False)
self.stderr.write("{} files ({}) unchanged".format(
clean.count(),
filesizeformat(
clean.aggregate(size=Sum("st_size"))['size']
)
))
|
import urllib3
import json
import threading
import time
import argparse
import logging
import sys
import base64
from urllib3 import HTTPConnectionPool
query = {
"query": {
"bool": {
"must": [
{
"match_phrase": {
"send_dy": "7850858542"
}
},
{
"match_phrase": {
"send_tm": "20211009"
}
},
{
"match_phrase": {
"trc_no": "005007"
}
}
]
}
}
}
encoded_data = json.dumps(query).encode('utf8')
es_connection_pool = HTTPConnectionPool("172.22.235.69", port=9200, maxsize=100)
headers = urllib3.make_headers(basic_auth='elastic:Xjaqmffj12#')
headers['Content-Type'] = 'application/json'
response = es_connection_pool.request(
'GET',
'/_search',
body=encoded_data,
headers=headers
)
search_response_data = json.loads(response.data)
print(search_response_data)
print(search_response_data['took'])
#int search_response_data['took'] type
|
cols=['a','b','c']
res_cols=[1,2,3,4,5,6,7]
li=[]
mapping = {'surface': cols[0],'base': res_cols[6],'pos': res_cols[0],'pos1': res_cols[1]}
li.append(mapping)
mapping = {'surface': cols[1],'base': res_cols[5],'pos': res_cols[1],'pos1': res_cols[2]}
li.append(mapping)
print(li)
|
# coding: utf-8
from ansible.module_utils.basic import *
import os
def main():
module = AnsibleModule(argument_spec=dict(args=dict(required=True)))
args = module.params['args']
try:
res = os.popen('echo {0}'.format(args)).read().strip()
res_json = dict(echo=res, changed=False, stdout_lines=res.split('\n'))
module.exit_json(**res_json)
except Exception as e:
module.fail_json('error: %s' % e)
if __name__ == '__main__':
main()
"""
module.fail_json(msg="error happened")
# 注意 module.fail_json 会造成ansibel-playbook 停止工作
res_json = dict(echo=res, changed=True)
res_json 中的值会被输出到 stdout
1. 其中的值可能会被ansible 重写
2. 其中的值会被ansible 直接使用
3. 在里面定义自己的值可以在playbook中被使用于逻辑判断, 先register 再 when判断
"""
|
# -*- coding: UTF-8 -*-
'''
Created on 20171031
@author: leochechen
@Summary: 一个Client连接过来,对应一个Worker。worker使用Python中的线程实现
'''
import os
import pickle
import argparse
import traceback
import threading
from threading import Thread
from operator import itemgetter
from protocol import Command
from ctf_local import CTFWorkerLocal, CTFGlobal, lock_self
from parse import VarMap, ParseHTMLTeamplate, convert_str
from control import IControl
from context import IContext
from collection import ICollection
from exptions import *
class Worker(Thread):
'''
ctf Server交互线程、抽象工作者
'''
def __init__(self, server, sock, addr):
super(Worker, self).__init__()
self.server, self.sock, (self.client, self.port) = server, sock, addr
# 用于获取当前工作者的上下文实例
self.get_varmap = None
self.get_ctx = None
self.get_collection = None
self.cmd = {}
self.env = {}
self.serial = ""
# project root
self.root = ""
# options
self.filename = ""
self.html = ""
self.opt = ""
# case pid
self.pid = ""
# exception
self.exitcode = 0
self.exception = None
self.exc_traceback = ''
self.server.log_info("ACCEPT:({0},{1}) connect now...".format(self.client, self.port))
@property
def log_directory(self):
return self.env['TESTCASE']['report']['log']
@property
def html_directory(self):
return self.log_directory
def run(self):
try:
CTFWorkerLocal.worker = self
# start work
cmd, data = self.recv_command()
if cmd == Command.CTF_START:
self.ctf_start(data)
else:
raise CTFTestServerError("CTF Server 启动出错,Code:{} Data:{}".format(cmd, convert_str(data)))
except Exception, e:
self.exitcode = 1
self.exception = e
self.exc_traceback = traceback.format_exc()
self.server.log_info("current thread {0}:{1}".format(threading.currentThread().getName(), self.exc_traceback))
self.send_command(Command.RECV_MESSAGE, "{0}:\n{1}".format(type(e).__name__, e))
finally:
self.send_command(Command.CTF_CLOSE, "")
self.sock.close()
self.server.log_info("ACCEPT:({0},{1}) close now...".format(self.client, self.port))
def ctf_start(self, data):
self.cmd, self.serial, self.env = pickle.loads(data['cmd']), data['serial'], data['env']
# 工程根目录
self.root = os.path.dirname(self.cmd[0])
# 解析command line
pargs = self.parse_command_line(self.cmd[1:], encode=self.env['LANGUAGE_ENCODE'],
decode=self.env['LANGUAGE_DECODE'])
# 解析运行方式
self.parse_opt(pargs)
# 得到设定pid
self.pid = pargs.pid
# 获取需要运行的html
for filename, html in self.parse_html(pargs):
self.run_html(filename, html)
def run_html(self, filename, html):
'''
运行指定一张html
:param filename: html名
:param html: 包含所有html信息的字符串
:return:
'''
try:
self.filename, self.html = filename, html
# 解析html生成数据结构varmap
ParseHTMLTeamplate.load_html(self.html)
# 获取运行时必须的上下文管理器
self.get_ctx = IContext(opt=self.opt, db=None)
# 获取运行时步骤的异常检测模块
self.get_collection = ICollection()
IControl().start(self.pid)
except Exception, ex:
raise
self.send_command(Command.RECV_MESSAGE, str(ex))
def parse_command_line(self, args, encode, decode):
parser = argparse.ArgumentParser("ctf")
# ctf命令必须指定一张或者多张html
parser.add_argument('-html', help='which html will run', action='append', default=[], dest="html",
type=lambda bytestring: bytestring.decode(decode).encode(encode))
# ctf六种运行方式
parser.add_argument('-vid', help='run the specified case in specified xml', dest="vid", type=str)
parser.add_argument('-set', help='run the cases which have the same set value in specified xml', dest="set", type=str)
parser.add_argument('-lvl', help='run the cases which have the same level value in specified xml', dest='lvl', type=str)
parser.add_argument('-all', help='run the all cases in specified xml', dest="all", action='store_true')
parser.add_argument('-section', help="run the cases which is in the same section", dest="section", type=str)
parser.add_argument('-dir', help='run all xmls in the directory', dest="dir", action='store_true')
# ctf拓展命令
parser.add_argument('-pid', help="run the case which num is pid", dest="pid", type=int)
parser.add_argument('-case', help='print the cases in specified xml', dest="cases", action='store_true')
parser.add_argument('-version', help="print the ctf version and information about author", dest="version", action='store_true')
parser.add_argument('-serial', dest="serial", help="adb devices android mobile serial", type=str)
return parser.parse_args(args=args)
def parse_opt(self, pargs):
'''
解析从命令行中获取的运行方式
:param pargs: 命令行解析实例
:return:
'''
opts = [("v", "vid"),
("s", "set"),
("l", "lvl"),
("st", "section"),
("a", "all")]
reminder = "CTF现有运行方式 {}".format(",".join(["|".join(_) for _ in opts]))
_opt = filter(itemgetter(1), [("v", pargs.vid), ("s", pargs.set), ("l", pargs.lvl),
("st", pargs.section), ("a", pargs.all)])
if len(_opt) == 0:
raise EnvironmentError("CTF命令必须指定一种运行方式:{}".format(reminder))
elif len(_opt) > 1:
raise EnvironmentError("CTF命令中只能含有一种运行方式:{}".format(reminder))
elif len(_opt) == 1:
_opt = itemgetter(0)(_opt)
_opt = ("a", "") if _opt[0] == "a" \
else _opt
self.opt = _opt
def parse_html(self, pargs):
'''
根据命令获取能运行的html文件
:param pargs: 命令行解析实例
:return:
'''
def load_xml(f):
if '.html' not in f:
_file = f + ".html"
filename = f
else:
filename, suffix = os.path.splitext(f)
_file = f
self.send_command(Command.LOAD_HTML, {
'html': _file,
'kwargs': pargs.__dict__
})
data = self.wait_client()
return convert_str(filename), convert_str(data['html'])
if pargs.dir:
self.send_command(Command.LOAD_DIRECTORY, {
'directory': self.env['TESTCASE']['workspace']
})
data = self.wait_client()
return [load_xml(filename) for filename in data['xmls']]
elif pargs.html:
return [load_xml(filename) for filename in pargs.html]
else:
raise EnvironmentError("需指定一张或者多张html")
def send_command(self, cmd, params):
'''
向客户端发送一条命令
:param cmd: 命令
:param params: 数据
:return:
'''
self.server.send_data(self.sock, {
'CTFCMD': cmd,
'CTFDATA': params
})
def recv_command(self):
'''
从客户端接收一个命令
:return:
'''
ret = self.server.recv_data(self.sock)
return ret['CTFCMD'], ret['CTFDATA']
def wait_client(self):
'''
在等待client的消息过程中,服务端能够响应并作出应答的命令
:return:
'''
while True:
cmd, data = self.recv_command()
if cmd == Command.SUCCESS:
return data
elif cmd == Command.ERROR:
raise Exception(data)
elif cmd == Command.RECV_MESSAGE:
self.get_ctx.alw(data)
elif cmd == Command.GET_VAR_RECORD:
content = self.get_ctx.Record[convert_str(data)]
self.send_command(Command.RECV_MESSAGE, content)
elif cmd == Command.GET_TESTCASE_INFO:
self.send_command(Command.RECV_MESSAGE, {
"attrs": self.get_ctx.Record.attributes,
"html": self.filename,
"env": self.env
})
elif cmd == Command.REG_CALLBACK:
self.get_collection.callback.register_from_client(data)
self.send_command(Command.RECV_MESSAGE, "")
elif cmd == Command.RECV_IMG_SRC:
self.get_collection.receive_img_src_from_client(data)
self.send_command(Command.RECV_MESSAGE, "")
|
#! /usr/bin/env python3
import numpy as np
import matplotlib.pyplot as plt
import sys
from scipy.optimize import minimize
from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.mplot3d.art3d import Poly3DCollection
#GLOBAL CONSTANTS - conversion factors
degToRad = np.pi/180
radToDeg = 180.0/np.pi
PoundsToNewtons = 4.4482216282509
NewtonsTokN = 1.0/1000
inchesToM = 0.0254
mToInches = 39.3701
psiToN_msq = 6894.76 #psi to Newtons per meter squared
#ZYX Euler Angle Rotation Matrix function
#Left-handed rotation after Z,Y,X right-handed rotation
def zyx_euler_angles_to_mat(alpha, beta, gamma):
"""
Converts ZYX Euler angles (rotation about z, then resulting y, then resulting x)
into a rotation matrix.
Args:
alpha (float): angle in radians to rotate about z
beta (float): angle in radians to rotate about y
gamma (float): angle in radians to rotate about x
Returns:
3x3 numpy array that is a rotation matrix
"""
# TODO: Replace following line with implementation
ca = np.cos(alpha)
sa = np.sin(alpha)
cb = np.cos(beta)
sb = np.sin(beta)
cg = np.cos(gamma)
sg = np.sin(gamma)
return np.array([[ca*cb, ca*sb*sg-sa*cg, ca*sb*cg+sa*sg],
[sa*cb, sa*sb*sg+ca*cg, sa*sb*cg-ca*sg],
[-sb , cb*sg , cb*cg ]])
class HydraulicCylinder():
def __init__(self, CylinderType):
#choice: hydraulic cylinder brand
assert CylinderType == "BAILEY" or CylinderType == "WOLVERINE", "The hydraulic cylinder brand is invalid."
if CylinderType == "BAILEY":
# Bailey Hydraulics (can be customized)
boreD = 1.5*inchesToM # m
rodD = 1.0*inchesToM # m
maxP = 5000.0*psiToN_msq # N/m^2
wallThickness = 0.25*inchesToM; # m
cylinderOD = boreD + wallThickness * 2; # m
self.retractLength = 10.0*inchesToM # m means the shortest length for the hydraulic cylinder
self.stroke = 4.0*inchesToM # m
#capability:
# max hydraulic force is 39.303282 kN. (max pushing load)
# max hydraulic force on the side of rod is 21.835157 kN. (max pulling load)
else: # choice == "WOLVERINE"
# WWSB1004-S bore x Stroke = 1 in. x 4 in.
boreD = 1.0*inchesToM # m
rodD = 0.625*inchesToM # m
maxP = 3000.0*psiToN_msq # N/m^2
cylinderOD = 1.5*inchesToM # m
self.retractLength = 10.0*inchesToM # m. means the shortest length for the hydraulic cylinder
self.stroke = 4.0*inchesToM # m
# capability:
# max hydraulic force is 10.480875 kN. (max pushing load)
# max hydraulic force on the side of rod is 6.386783 kN. (max pulling load)
# calculate the maximum force on both sides of double acting hydraulic cylinders
self.maxPushingForce = np.pi*boreD**2/4*maxP*NewtonsTokN # kN
self.maxPullingForce = np.pi*(boreD**2 - rodD**2)/4*maxP*NewtonsTokN # kN
#if printFlag:
print("\nmax hydraulic pushing force per cylinder is %f kN." % self.maxPushingForce)
print("max hydraulic pulling force per cylinder is %f kN.\n" % self.maxPullingForce)
class StewartPlatform():
def __init__(self,r_base, r_platform, seperationAngle, PlatformTwist, Cylinder):
#Store cylinder object and extract properties
self.Cylinder = Cylinder
self.lmin = Cylinder.retractLength #m, min length of actuators
self.lmax = Cylinder.retractLength + Cylinder.stroke #m, max length of actuators
self.Fmax = Cylinder.maxPushingForce #kN
self.Fmin = -Cylinder.maxPullingForce #kN
#Intrinsic for a stewart platform. Gets rid of magic numbers though
self.numActuators = 6
#Calculate actuator connection points on base (in base frame) (store as list of np vectors)
self.base_joint_pos = []
for i in range(self.numActuators/2):
theta1 = (120*i*degToRad + seperationAngle/2)
theta2 = (120*i*degToRad - seperationAngle/2)
vec1 = np.array([r_base*np.cos(theta1), r_base*np.sin(theta1), 0])
vec2 = np.array([r_base*np.cos(theta2), r_base*np.sin(theta2), 0])
self.base_joint_pos.append(vec1)
self.base_joint_pos.append(vec2)
#Calculate actuator connection points on platform (in platform frame) (store as list of np vectors)
self.platform_joint_pos = []
for i in range(self.numActuators/2):
theta1 = (120*i*degToRad - seperationAngle/2 + PlatformTwist)
theta2 = (120*i*degToRad + seperationAngle/2 - PlatformTwist)
vec1 = np.array([r_platform*np.cos(theta1), r_platform*np.sin(theta1), 0])
vec2 = np.array([r_platform*np.cos(theta2), r_platform*np.sin(theta2), 0])
self.platform_joint_pos.append(vec1)
self.platform_joint_pos.append(vec2)
#Initialize empty containers for other variables
self.Jacobian = np.zeros((self.numActuators, self.numActuators))
self.q = np.zeros(self.numActuators) #Actuator lengths (since they are prismatic)
self.tau = np.zeros(self.numActuators) #Forces exerted by the actuators
'''Computes inverse kinematics and updates relevant object variables.
Returns True if position is within range of joint limits, otherwise returns
False. '''
def InverseKinematics(self, platformPos_b, EulerAngles):
#Extract our Euler Angles
Alpha = EulerAngles[0]
Beta = EulerAngles[1]
Gamma = EulerAngles[2]
#Compute Rotation Matrix (Left handed rotation from platform frame to base frame)
R_p_b = zyx_euler_angles_to_mat(Alpha, Beta, Gamma)
#Calculate vector from base connection point to platform connection point for each actuator (in base frame)
feasible = True
self.b_l = []
for i in range(self.numActuators):
b_li = platformPos_b+np.dot(R_p_b,self.platform_joint_pos[i]) - self.base_joint_pos[i] #Find vector from bi to pi in base frame
self.b_l.append(b_li) #Store np arrays of position vectors in a list
self.q[i] = np.linalg.norm(b_li) #Store norm of vector
#Check joint limits. If joint limits are exceeded, return false
#and assign -1 to offending joint's position.
if self.q[i] > self.lmax or self.q[i] < self.lmin:
feasible = False
self.q[i] = -1
return feasible
'''Computes Jacobian and updates relevant object variables.
Returns True if position is within range of joint limits, otherwise returns
False. '''
def ComputeJacobian(self, platformPos_b, EulerAngles):
#Extract our Euler Angles
Alpha = EulerAngles[0]
Beta = EulerAngles[1]
Gamma = EulerAngles[2]
#Compute Rotation Matrix (rotates vector from platform frame to base frame)
R_p_b = zyx_euler_angles_to_mat(Alpha, Beta, Gamma)
#Update joint positions using Inverse Kinematics
feasible = self.InverseKinematics(platformPos_b, EulerAngles)
self.Jacobian2 = np.zeros((6,6))
if feasible:
#Update each row of the Jacobian matrix
for i in range(self.numActuators):
self.Jacobian[i,:] = (1.0/self.q[i]*
np.append(self.b_l[i], np.cross(np.dot(R_p_b,self.platform_joint_pos[i]),
(platformPos_b-self.base_joint_pos[i]))) )
#Check against alternate formulation
unit_vec = self.b_l[i]/np.linalg.norm(self.b_l[i])
self.Jacobian2[i,:] = np.append(unit_vec, np.cross(np.dot(R_p_b, self.platform_joint_pos[i]), unit_vec))
return True
else:
return False
'''Computes the workspace of the platform for a certain Euler
Angle orientation'''
def GeometricWorkspace(self, yRange, zRange, resolution, EulerAngles):
yMin, yMax = yRange
zMin, zMax = zRange
pY = np.array([])
pZ = np.array([])
for pos_y in np.arange(yMin, yMax + resolution, resolution):
for pos_z in np.arange(zMin, zMax + resolution, resolution):
# Platform position
platformPos = np.array([0, pos_y, pos_z])
#See if the position and orientation is feasible
feasible = self.InverseKinematics(platformPos, EulerAngles)
if feasible:
pY = np.append(pY, pos_y)
pZ = np.append(pZ, pos_z)
return pY, pZ
'''Calculate the max force and torque the platform can resist in a certain
configuration. Force direction is given, and constant torque term is given '''
def MaxLoad(self,platformPos_b, EulerAngles, appliedForceDirection_p, appliedTorque_p):
#Run a constrained optimization routine
# - Magnitude of actuator forces are design variables
# - norm of actuator torques is objective function
# - Actuator limits are the constraints
#Extract our Euler Angles
Alpha = EulerAngles[0]
Beta = EulerAngles[1]
Gamma = EulerAngles[2]
#Compute Rotation Matrix (rotates vector from platform frame to base frame)
R_p_b = zyx_euler_angles_to_mat(Alpha, Beta, Gamma)
#Initial condition
x0 = 0
#Constraints
def lowerActuatorLimit(x):
#Put together combined force and torque vector on the platform as seen in the base frame
forceOnPlatform_b = (np.append(np.dot(R_p_b,appliedForceDirection_p*-x),
np.dot(R_p_b,appliedTorque_p)) )
#Compute the needed actuator torques to support the force on platform
tau = self.ComputeTorquesUnderLoad(platformPos_b, EulerAngles, forceOnPlatform_b)
return tau - self.Fmin
def upperActuatorLimit(x):
#Put together combined force and torque vector on the platform as seen in the base frame
forceOnPlatform_b = (np.append(np.dot(R_p_b,appliedForceDirection_p*-x),
np.dot(R_p_b,appliedTorque_p)) )
tau = self.ComputeTorquesUnderLoad(platformPos_b, EulerAngles, forceOnPlatform_b)
return self.Fmax - tau
ineq_cons1 = {'type': 'ineq',
'fun' : lowerActuatorLimit}
ineq_cons2 = {'type': 'ineq',
'fun' : upperActuatorLimit}
#Define our objective function
def obj(x):
return -x
#forceOnPlatform_b = (np.append(np.dot(R_p_b,appliedForceDirection_p*-x),
# np.dot(R_p_b,appliedTorque_p)) )
#return -np.linalg.norm(self.ComputeTorquesUnderLoad(platformPos_b, EulerAngles, forceOnPlatform_b))
#Run optimization routine
res = minimize(obj, x0, method='SLSQP',constraints = [ineq_cons1,ineq_cons2],
options={'ftol': 1e-9, 'disp': False})
maxF = res.x[0]
forceOnPlatform_b = (np.append(np.dot(R_p_b,appliedForceDirection_p*-res.x[0]),
np.dot(R_p_b,appliedTorque_p)) )
tau = self.ComputeTorquesUnderLoad(platformPos_b, EulerAngles, forceOnPlatform_b)
return maxF, tau
'''Calculate the torques on all prismatic joints necessary to
withstand a certain force and torque applied at the platform's center.
INPUT: forceOnPlatform_b is a np array that either has three elements (xyz
forces) or six elements (xyz forces and torques).'''
def ComputeTorquesUnderLoad(self, platformPos, EulerAngles, forceOnPlatform_b):
# J.T * tau = F -> tau = inv(J.T) * F
# J (6, 6) matrix in base frame
#If we are just given the force on the platform, augment the vector
#to include the zero torque vector
if forceOnPlatform_b.shape[0] == 3:
forceOnPlatform_b = np.append(forceOnPlatform_b, np.array((0,0,0)) )
#Update Jacobian and ensure configuration is feasible
feasible = self.ComputeJacobian(platformPos, EulerAngles)
if feasible:
invJ_T = np.linalg.pinv(self.Jacobian.T)
# torques (push forces) on prismatic joints of all hydraulic cylinders
self.tau = np.dot(invJ_T, forceOnPlatform_b)
return self.tau
else:
return None
b_li = T+np.dot(R_b_p,p[i]) - b[i] #Find vector from bi to pi in base frame
def WorkspaceUnderLoad(self, yRange, zRange, resolution, EulerAngles, appliedForceDirection, appliedTorque):
# need to first do geometric workspace
# the point should first satisfy the geometric constraints and then meet the needs of hydraulic forces
pYGeometric, pZGeometric = self.GeometricWorkspace(yRange, zRange, resolution, EulerAngles)
assert pYGeometric.shape[0] == pZGeometric.shape[0], "The shape of pYGeometric and the shape of pZGeometric are not the same."
#Stack points of interest into 2xn array
p = np.vstack((pYGeometric, pZGeometric))
MaxForces = []
for i in range(p.shape[1]):
platformPos = np.array([0, p[0,i], p[1,i]])
maxF, tau = self.MaxLoad(platformPos, EulerAngles, appliedForceDirection, appliedTorque)
MaxForces.append(maxF)
pY = p[0,:]
pZ = p[1,:]
return pY, pZ, MaxForces
if __name__ == '__main__':
########################################
#----------Problem Parameters----------#
########################################
#Plotting Parameters
PlotArrangement = 0
PlotWorkspace = 0
#Computing Parameters
ComputeLoadWorkspace = 0
#Type of hydraulic cylinder (BAILEY or WOLVERINE)
HydraulicType = "BAILEY"
#---Stewart Platform parameters----
r_base = 6.54*inchesToM #radius of base mounting circle (m)
r_platform = 6.54/2*inchesToM #radius of platform mounting circle (m)
s = 46.0*degToRad #Seperation Angle between actuators on triangle corners
PlatformTwist = 60.0*degToRad
###########################################################################
#----------Applied Force and Configuration Parameters----------------------
###########################################################################
#----(1) Determine max force magnitude in given direction with a given torque---
#Force direction angles
alpha = 0.0*degToRad #Rotation about z in platform frame
beta = (90.0+0.0)*degToRad #Rotation about x in platform frame
#Unit vector of applied force (in platform frame)
appliedForceDirection_p = np.array([np.cos(alpha)*np.cos(beta),np.sin(alpha)*np.cos(beta),-np.sin(beta)])
#Applied torque vector (in platform frame)
appliedTorque_p = np.array([0,0,600])*NewtonsTokN #Need to convert to kN.m to keep things consistent
#---(2) Determine actuator forces needed to balanced a given WOB, TOQ, and Disturbance force/torque---
WOB = np.array([0,0,-10])
TOB = np.array([0,0,-0.6])
Disturbance = np.array([-1,0,0])
#----Configuration To Test------
#Translational different between base frame and platform frame (T) (Seen in base frame)
T = np.array([1.0, 0 , 12.5])*inchesToM #(x,y,z), m
#Euler angles between base frame and platform frame (z,y,x) (From base frame)
Alpha = 0.0*degToRad #Z rotation (twist)
Beta = 0.0*degToRad #Y rotation (tilt)
Gamma = 7.0*degToRad #X rotation (tilt)
EulerAngles = np.array([Alpha,Beta,Gamma])
###########################################################################
###########################################################################
#-----Workspace Analysis parameters-----
yRange = np.array([-5, 5])*inchesToM # m
zRange = np.array([9, 14.5])*inchesToM # m
resolution = 0.15*inchesToM # m
#############################################
##---------------TESTING-------------------##
#############################################
#Max force that can be applied in zero configuration
MAX_F = 235.3
#Define our cylinder and Stewart Platform objects
Cylinder = HydraulicCylinder(HydraulicType)
SP = StewartPlatform(r_base, r_platform, s, PlatformTwist, Cylinder)
#Determine if configuration defined by T and EulerAngles is feasible
configFeasible = SP.InverseKinematics(T,EulerAngles)
#Compute workspace for Constant platform orientation
pY, pZ = SP.GeometricWorkspace(yRange, zRange, resolution, EulerAngles)
#Determine actuator forces needed to balanced a given WOB, TOQ, and disturbance/disturbance induced torque
R_p_b = zyx_euler_angles_to_mat(Alpha, Beta, Gamma)
forceOnPlatform_b = np.append(np.dot(R_p_b,(WOB+Disturbance)), np.dot(R_p_b,(TOB-np.array([0,Disturbance[0]*(2*inchesToM),0]))))
tau = SP.ComputeTorquesUnderLoad(T, EulerAngles, -forceOnPlatform_b)
#Compute the max Force magnitude (F) that the given configuration can balance.
#Compute the actuators forces needed to do this.
maxF, maxTau = SP.MaxLoad(T,EulerAngles,appliedForceDirection_p, appliedTorque_p)
#############################################
##--------------PRINT RESULTS--------------##
#############################################
np.set_printoptions(precision=2)
print "\n--------TEST-----------\n"
print("Base to platform displacement (x,y,z) (in):")
print(T*mToInches)
print("\nEuler Angles, (z,y,x) (deg):")
print(EulerAngles*radToDeg)
print("\nConfiguration feasible?")
print(configFeasible)
print("\nApplied force orientation (in platform frame):")
print("alpha (z rotation, deg)")
print(alpha*radToDeg)
print("beta (x rotation, deg)")
print(90-beta*radToDeg)
print("\nmaxF in configuration is (kN):")
print(np.array([maxF]))
print(np.array([maxF])/MAX_F) #Fraction of maxF at zero configuration
print("\nActuator forces to achieve maxF are (kN):")
print(maxTau)
#Print tau as the fraction of total available force in that directoin
fractionTau = []
for force in maxTau:
if force > 0:
fractionTau.append(force/Cylinder.maxPushingForce)
elif force < 0:
fractionTau.append(force/Cylinder.maxPullingForce)
print(np.array(fractionTau))
print("\n--------------------\n")
print("WOB, TOB, Disturbance (kN): ")
print(WOB)
print(TOB)
print(Disturbance)
print("Total Force/Torque on platform (p_frame)")
print(forceOnPlatform_b)
print("Actuator Torques")
print(tau)
fractionTau = []
for force in tau:
if force > 0:
fractionTau.append(force/Cylinder.maxPushingForce)
elif force < 0:
fractionTau.append(force/Cylinder.maxPullingForce)
print(np.array(fractionTau))
#############################################
##---------ERROR CHECKING------------------##
#############################################
#Constraint - Moments must be balanced around the point where the force is applied
p_l_hat = []
R_p_b = zyx_euler_angles_to_mat(Alpha, Beta, Gamma)
for i in range(6):
b_li_hat = SP.b_l[i]/np.linalg.norm(SP.b_l[i]) #Convert to unit vector
p_li_hat = np.dot(R_p_b.T, b_li_hat) #Rotate into platform frame
p_l_hat.append(p_li_hat) #Add to our list
#Obtain actuator forces on platform (Force magnitude (xi) times force unit vectors (p_li_hat))
ActuatorForces = [np.dot(Fi,p_li_hat) for Fi,p_li_hat in zip(maxTau,p_l_hat)]
#Compute the moments about the point of applied force
Moments = [np.cross(pi,Li) for pi,Li in zip(SP.platform_joint_pos,ActuatorForces)]
#Return the sum of x moments (should be zero at equilibrium)
print ("\nThe sum of moments is: ")
print (sum(Moments))
#-------------------------------------------------------------------
if(ComputeLoadWorkspace):
#Compute the Load workspace
pY_F, pZ_F, MaxForces = SP.WorkspaceUnderLoad(yRange, zRange, resolution, EulerAngles, appliedForceDirection_p, appliedTorque_p)
#%%############################
##------PLOTTING-------------##
###############################
yPlotLimit = np.array([-5, 5])
zPlotLimit = np.array([9, 14.5])
#Plot the base actuator points
if(PlotArrangement):
plt.figure(1)
plt.clf()
B = np.array(SP.base_joint_pos)*mToInches
P = np.array(SP.platform_joint_pos)*mToInches
plt.scatter(B[:,0],B[:,1], LineWidth = 3)
plt.scatter(P[:,0],P[:,1], LineWidth = 3)
plt.xlabel('x (in)')
plt.ylabel('y (in)')
plt.title('Top view of Stewart Platform in Zero Configuration')
plt.legend(('base','platform',''),loc=10)
#Plot actuator connections
for i in range(SP.numActuators):
plt.plot((B[i,0],P[i,0]),(B[i,1],P[i,1]), 'r')
plt.grid(True)
plt.axis('square')
#Plot Constant Orientation Workspace
if(PlotWorkspace):
plt.figure(2)
plt.clf()
plt.title('Constant Orientation Workspace')
plt.scatter(pY*mToInches, pZ*mToInches, marker="o")
plt.xlim(yPlotLimit) # -10.5, 10.5
plt.ylim(zPlotLimit) #6, 14
plt.gca().set_aspect('equal', adjustable='box')
plt.ylabel('z (in)')
plt.xlabel('y (in)')
#%%
if(ComputeLoadWorkspace):
plt.figure(3)
plt.clf()
plt.title('Load Capacity of Constant Orientation Workspace')
sc1 = plt.scatter(pY_F*mToInches, pZ_F*mToInches, c = MaxForces, marker="o")
plt.title("Max Pushing Load")
plt.xlim(yPlotLimit) # yPlotLimit -10.5, 10.5
plt.ylim(zPlotLimit) # zPlotLimit 6, 14
plt.gca().set_aspect('equal', adjustable='box')
plt.xlabel('Y (in)')
plt.ylabel('Z (in)')
clb = plt.colorbar(sc1,fraction=0.03)
clb.ax.set_title('tau (kN)')
#%% Plot platform, base, actuator forces and applied force in 3d
if(1):
SP.InverseKinematics(T,EulerAngles)
fig = plt.figure(4)
fig.clf()
ax = fig.add_subplot(2,1,1,projection = '3d')
B = np.array(SP.base_joint_pos).T*mToInches
R_p_b = zyx_euler_angles_to_mat(Alpha, Beta, Gamma)
#Rotate the P points to the base frame orientation
P = np.dot(R_p_b,np.array(SP.platform_joint_pos).T*mToInches)
#Add the offset to the platform frame points
T_in = T*mToInches
P[0,:] = P[0,:] + T_in[0]
P[1,:] = P[1,:] + T_in[1]
P[2,:] = P[2,:] + T_in[2]
#Plot Joint connections
ax.scatter(B[0,:],B[1,:],B[2,:], linewidth = 3)
ax.scatter(P[0,:],P[1,:],P[2,:], linewidth = 3)
#Plot leg lines in red
for i in range(6):
b_l = SP.b_l[i]*mToInches
E = B[:,i] + b_l
ax.plot((B[0,i],E[0]),(B[1,i],E[1]),(B[2,i],E[2]),'r')
#Plot retracted leg lines in blue
r = Cylinder.retractLength*mToInches
for i in range(6):
R = (P[:,i] - B[:,i])
R = R/np.linalg.norm(R)*r
R = B[:,i]+R
ax.plot((B[0,i],R[0]),(B[1,i],R[1]),(B[2,i],R[2]),'b', linewidth = 3.5)
#Plot line from center of base to center of platform
ax.plot((0,T_in[0]),(0,T_in[1]),(0,T_in[2]),'b--')
#Plot actuator force vectors
ActVecs = SP.b_l/np.linalg.norm(SP.b_l)
scale = 0.4
ActVecs[:,0] = ActVecs[:,0]*SP.tau*scale
ActVecs[:,1] = ActVecs[:,1]*SP.tau*scale
ActVecs[:,2] = ActVecs[:,2]*SP.tau*scale
for i in range(6):
ax.quiver(P[0,i],P[1,i],P[2,i],ActVecs[i,0],ActVecs[i,1],ActVecs[i,2],color='g')
#Reorder B and P for plotting
permutation = [1,0,3,2,5,4]
i = np.argsort(permutation)
B_re = np.hstack((B[:,i],B[:,1].reshape(3,1)))
P_re = np.hstack((P[:,i],P[:,1].reshape(3,1)))
#Plot base and platform perimeter
ax.plot(B_re[0,:],B_re[1,:],B_re[2,:], 'grey', linewidth = 3)
ax.plot(P_re[0,:],P_re[1,:],P_re[2,:], 'grey', linewidth = 3)
#Plot applied force vector (not to scale)
a = 3
u = np.dot(R_p_b,appliedForceDirection_p)*a
T_f = T_in-u
ax.quiver(T_f[0],T_f[1],T_f[2],u[0],u[1],u[2])
#Plot base and platform polygons
PolyCollections = []
PolyCollections.append(Poly3DCollection([B_re.T],facecolor = 'grey', lw = 2,alpha = 0.2))
PolyCollections.append(Poly3DCollection([P_re.T],facecolor = 'grey', lw = 2,alpha = 0.2))
ax.add_collection3d(PolyCollections[0])
ax.add_collection3d(PolyCollections[1])
#Set axis labels
ax.set_title("Stewart Platform")
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
ax.axis('square')
ax.view_init(25,-15)
#-----------Second subplot (XZ Plane)----------------------------
ax = fig.add_subplot(2,1,2)
ax1 = 1 #Choose 0 for x, 1 for y
ax.scatter(B[ax1,:],B[2,:], linewidth = 3)
ax.scatter(P[ax1,:],P[2,:], linewidth = 3)
#Plot actuator lines
for i in range(6):
ax.plot((B[ax1,i],P[ax1,i]),(B[2,i],P[2,i]),'r')
#Plot rectracted lengths
for i in range(6):
R = (P[:,i] - B[:,i])
R = R/np.linalg.norm(R)*r
R = B[:,i]+R
ax.plot((B[ax1,i],R[ax1]),(B[2,i],R[2]),'b', LineWidth = 3.5)
#Plot joint connections
ax.plot(B_re[ax1,:],B_re[2,:], 'grey', LineWidth = 3)
ax.plot(P_re[ax1,:],P_re[2,:], 'grey', LineWidth = 3)
#Plot line from center of base to center of platform
ax.plot((0,T_in[ax1]),(0,T_in[2]),'b--')
#Plot actuator force vectors
for i in range(6):
ax.quiver(P[ax1,i],P[2,i],ActVecs[i,ax1],ActVecs[i,2],scale = 16,color='g')
#Plot applied force vector
ax.quiver(T_f[ax1],T_f[2],u[ax1],u[2],scale = 14)
if ax1 == 0:
ax.set_xlabel('X')
else:
ax.set_xlabel('Y')
ax.set_ylabel('Z')
ax.axis('square')
ax.axis([-7,7,-1,19.5])
#ax.view_init(0,90)
#plt.show()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
PATH = '/tmp'
def enumerate_path(path):
"""
Returns list of path to all files in dir
:param path: path name
:return: list of path to files
"""
path_collection = []
for dir_path, dir_names, file_names in os.walk(path):
for file_name in file_names:
full_path = os.path.join(dir_path, file_name)
path_collection.append(full_path)
return path_collection
def enumerate_files(path):
"""
Returns list of file from path
:param path: path name
:return: list of files from path
"""
file_collection = []
for dir_path, dir_names, file_names in os.walk(path):
for file_name in file_names:
file_collection.append(file_name)
return file_collection
def enumerate_dirs(path):
"""
Returns list of subdir from path
:param path: path name
:return: list of subdir from path
"""
subdir_collection = []
for dir_path, dir_names, file_names in os.walk(path):
for dir_name in dir_names:
subdir_collection.append(dir_name)
return subdir_collection
if __name__ == '__main__':
print('\nRecursive listing of all paths in a dir\n')
for path in enumerate_path(PATH):
print('*** %s ***' % path)
print('\nRecursive listing of all files in a dir\n')
for file_name in enumerate_files(PATH):
print('*** %s ***' % file_name)
print('\nRecursive listing of all dirs in a dir\n')
for dir_name in enumerate_dirs(PATH):
print('*** %s ***' % dir_name)
|
#Philip Brendel
#I pledge my honor that I have followed the Stevens Honor code
def bmiCalc():
weight = int(input("Please enter your weight (in pounds): "))
height = int(input("Please enter your height (in inches): "))
bmi = (weight * 720)/(height**2)
if bmi < 19:
print("Your BMI is ", bmi, "which is under what is considered healthy.")
elif bmi > 25:
print("Your BMI is ", bmi, "which is over what is considered healthy.")
else:
print("Your BMI is ", bmi, "which is in the healthy range.")
bmiCalc()
|
#!/usr/bin/env python
from sys import version_info
if version_info[0] < 3:
from urllib import quote
else:
from urllib.request import quote
from glob import glob
import json
import re
header = '''
Place for everything Pandas.
Lessons
-------
'''
format_item = '* [{name}]({url})'.format
bb_url = 'bitbucket.org/hrojas/learn-pandas/raw/master/{}'.format
def notebooks():
return glob('lessons/*Lesson.ipynb')
def lesson_name(filename):
with open(filename) as fo:
return json.load(fo)['metadata']['name']
def nb_url(filename):
# The double quote is not an error
raw_url = bb_url(quote(quote(filename)))
return 'http://nbviewer.ipython.org/urls/{}'.format(raw_url)
def write_readme(nblist, fo):
fo.write('{}\n'.format(header))
for nb in nblist:
name = lesson_name(nb)
url = nb_url(nb)
fo.write('{}\n'.format(format_item(name=name, url=url)))
def lesson_id(filename):
return int(re.search('[0-9]+', filename).group())
def main():
nblist = sorted(notebooks(), key=lesson_id)
with open('README.md', 'w') as fo:
write_readme(nblist, fo)
if __name__ == '__main__':
main()
|
#!/home/kiwitech/mysite/dbcon/bin/python3
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
|
import flask
from flask import request, jsonify
import psutil
import json
from uptime import uptime
app = flask.Flask(__name__)
@app.route('/cpuinfo', methods=['GET'])
def cpuinfo():
return jsonify({'cpu_percent': psutil.cpu_percent(interval=None,percpu=False)})
@app.route('/cpuinfopercore', methods=['GET'])
def cpuinfopercore():
return jsonify({'cpu_percent': psutil.cpu_percent(interval=None,percpu=True)})
@app.route('/cpucount', methods=['GET'])
def cpucount():
return jsonify({'cpu_count':psutil.cpu_count()})
@app.route('/temp', methods=['GET'])
def temp():
return jsonify({'sensors_temperatures':psutil.sensors_temperatures(fahrenheit=False)})
@app.route('/netstat', methods=['GET'])
def netstats():
return jsonify({'net_stat':psutil.net_io_counters()})
@app.route('/uptime', methods=['GET'])
def currentuptime():
return jsonify({'uptime':uptime()})
@app.route('/memoryinfo', methods=['GET'])
def memoryinfo():
return dict(psutil.virtual_memory()._asdict())
if __name__ == '__main__':
app.run(host='0.0.0.0',port=8001)
|
#Li Xin
#Student number: 014696390
#xin.li@helsinki.fi
import socket
import sys
import time
if __name__ == "__main__":
#get port from the input
port = sys.argv[1]
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host = socket.gethostname()
s.bind((host, int(port)))
s.listen(5)
loop = True
while loop:
conn, addr = s.accept()
print('mouse start to recv')
msg = conn.recv(1024).decode("utf8")
print('mouse finish recv: ' + msg)
if msg == 'SEARCH':
conn.send(bytes('WOO', 'utf-8'))
conn.close()
else:
time.sleep(6)
conn.send(bytes('OUCH', 'utf-8'))
conn.close()
s.close()
loop = False
|
def greet():
print("hello")
greet()
def gree_two(greeting):
print(greeting)
gree_two("dinesh")
|
#! /usr/bin/python
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
class HtmlMail(object):
"""This class sends HTML emails"""
def __init__(self, subject, sender, to, username, password,
smtp="smtp.gmail.com", port=587):
# Server attr
self.server = smtplib.SMTP(smtp, port)
self.username = username
self.password = password
# Mail attr
self.subject = subject
self.sender = sender
self.to = to
def send(self, message):
"""Send the html mail"""
msg = MIMEMultipart('alternative')
msg['Subject'] = self.subject
msg['From'] = self.sender
msg['To'] = self.to
msg.attach(MIMEText(message, 'html'))
self.server.starttls()
self.server.login(self.username, self.password)
self.server.sendmail(self.sender, self.to, msg.as_string())
self.server.quit()
|
# coding: utf-8
# Python script created by Lucas Hale
# Standard library imports
from typing import Optional, Union
# http://www.numpy.org/
import numpy as np
# https://github.com/usnistgov/atomman
import atomman as am
import atomman.unitconvert as uc
def sdvpn(ucell: am.System,
C: am.ElasticConstants,
burgers: Union[list, np.ndarray],
ξ_uvw: Union[list, np.ndarray],
slip_hkl: Union[list, np.ndarray],
gamma: am.defect.GammaSurface,
m: Union[list, np.ndarray] = [0,1,0],
n: Union[list, np.ndarray] = [0,0,1],
cutofflongrange: float = uc.set_in_units(1000, 'angstrom'),
tau: np.ndarray = np.zeros((3,3)),
alpha: list = [0.0],
beta: np.ndarray = np.zeros((3,3)),
cdiffelastic: bool = False,
cdiffsurface: bool = True,
cdiffstress: bool = False,
fullstress: bool = True,
halfwidth: float = uc.set_in_units(1, 'angstrom'),
normalizedisreg: bool = True,
xnum: Optional[int] = None,
xmax: Optional[float] = None,
xstep: Optional[float] = None,
xscale: bool = False,
min_method: str = 'Powell',
min_options: dict = {},
min_cycles: int = 10) -> dict:
"""
Solves a Peierls-Nabarro dislocation model.
Parameters
----------
ucell : atomman.System
The unit cell to use as the seed for the dislocation system. Note that
only box information is used and not atomic positions.
C : atomman.ElasticConstants
The elastic constants associated with the bulk crystal structure
for ucell.
burgers : array-like object
The dislocation's Burgers vector given as a Miller or
Miller-Bravais vector relative to ucell.
ξ_uvw : array-like object
The dislocation's line direction given as a Miller or
Miller-Bravais vector relative to ucell.
slip_hkl : array-like object
The dislocation's slip plane given as a Miller or Miller-Bravais
plane relative to ucell.
m : array-like object, optional
The m unit vector for the dislocation solution. m, n, and ξ
(dislocation line) should be right-hand orthogonal. Default value
is [0,1,0] (y-axis).
n : array-like object, optional
The n unit vector for the dislocation solution. m, n, and ξ
(dislocation line) should be right-hand orthogonal. Default value
is [0,0,1] (z-axis). n is normal to the dislocation slip plane.
cutofflongrange : float, optional
The cutoff distance to use for computing the long-range energy.
Default value is 1000 angstroms.
tau : numpy.ndarray, optional
A (3,3) array giving the stress tensor to apply to the system
using the stress energy term. Only the xy, yy, and yz components
are used. Default value is all zeros.
alpha : list of float, optional
The alpha coefficient(s) used by the nonlocal energy term. Default
value is [0.0].
beta : numpy.ndarray, optional
The (3,3) array of beta coefficient(s) used by the surface energy
term. Default value is all zeros.
cdiffelastic : bool, optional
Flag indicating if the dislocation density for the elastic energy
component is computed with central difference (True) or simply
neighboring values (False). Default value is False.
cdiffsurface : bool, optional
Flag indicating if the dislocation density for the surface energy
component is computed with central difference (True) or simply
neighboring values (False). Default value is True.
cdiffstress : bool, optional
Flag indicating if the dislocation density for the stress energy
component is computed with central difference (True) or simply
neighboring values (False). Only matters if fullstress is True.
Default value is False.
fullstress : bool, optional
Flag indicating which stress energy algorithm to use. Default
value is True.
halfwidth : float, optional
A dislocation halfwidth guess to use for generating the initial
disregistry guess. Does not have to be accurate, but the better the
guess the fewer minimization steps will likely be needed. Default
value is 1 Angstrom.
normalizedisreg : bool, optional
If True, the initial disregistry guess will be scaled such that it
will have a value of 0 at the minimum x and a value of burgers at the
maximum x. Default value is True. Note: the disregistry of end points
are fixed, thus True is usually preferential.
xnum : int, optional
The number of x value points to use for the solution. Two of xnum,
xmax, and xstep must be given.
xmax : float, optional
The maximum value of x to use. Note that the minimum x value will be
-xmax, thus the range of x will be twice xmax. Two of xnum, xmax, and
xstep must be given.
xstep : float, optional
The delta x value to use, i.e. the step size between the x values used.
Two of xnum, xmax, and xstep must be given.
xscale : bool, optional
Flag indicating if xmax and/or xstep values are to be taken as absolute
or relative to ucell's a lattice parameter. Default value is False,
i.e. the x parameters are absolute and not scaled.
min_method : str, optional
The scipy.optimize.minimize method to use. Default value is
'Powell'.
min_options : dict, optional
Any options to pass on to scipy.optimize.minimize. Default value
is {}.
min_cycles : int, optional
The number of minimization runs to perform on the system. Restarting
after obtaining a solution can help further refine to the best pathway.
Default value is 10.
Returns
-------
dict
Dictionary of results consisting of keys:
- **'SDVPN_solution'** (*atomman.defect.SDVPN*) - The SDVPN solution
object at the end of the run.
- **'minimization_energies'** (*list*) - The total energy values
measured after each minimization cycle.
- **'disregistry_profiles'** (*list*) - The disregistry profiles
obtained after each minimization cycle.
"""
# Solve Volterra dislocation
volterra = am.defect.solve_volterra_dislocation(C, burgers, ξ_uvw=ξ_uvw,
slip_hkl=slip_hkl, box=ucell.box,
m=m, n=n)
# Generate SDVPN object
pnsolution = am.defect.SDVPN(volterra=volterra, gamma=gamma,
tau=tau, alpha=alpha, beta=beta,
cutofflongrange=cutofflongrange,
fullstress=fullstress, cdiffelastic=cdiffelastic,
cdiffsurface=cdiffsurface, cdiffstress=cdiffstress,
min_method=min_method, min_options=min_options)
# Scale xmax and xstep by alat
if xscale is True:
if xmax is not None:
xmax *= ucell.box.a
if xstep is not None:
xstep *= ucell.box.a
# Generate initial disregistry guess
x, idisreg = am.defect.pn_arctan_disregistry(xmax=xmax, xstep=xstep, xnum=xnum,
burgers=pnsolution.burgers,
halfwidth=halfwidth,
normalize=normalizedisreg)
# Set up loop parameters
cycle = 0
disregistries = [idisreg]
minimization_energies = [pnsolution.total_energy(x, idisreg)]
# Run minimization for min_cycles
pnsolution.x = x
pnsolution.disregistry = idisreg
while cycle < min_cycles:
cycle += 1
pnsolution.solve()
disregistries.append(pnsolution.disregistry)
minimization_energies.append(pnsolution.total_energy())
# Initialize results dict
results_dict = {}
results_dict['SDVPN_solution'] = pnsolution
results_dict['minimization_energies'] = minimization_energies
results_dict['disregistry_profiles'] = disregistries
return results_dict
|
from tensorflow.keras.layers import Dense, LSTM, BatchNormalization, Dropout
from tensorflow.keras import Sequential
import json
data_config = json.load(open("data_config.json"))
timesteps_x = data_config["input_timesteps"]
n_features = len(data_config["input_features"])
# Model definition
model = Sequential()
model.add(LSTM(64, activation="relu", input_shape=(timesteps_x, n_features), return_sequences=True))
model.add(Dropout(0.5))
model.add(LSTM(32, activation="relu", input_shape=(timesteps_x, n_features)))
model.add(Dropout(0.5))
model.add(Dense(1))
|
def pyramid(n):
output = ''
for x in reversed(range(n)):
if x != 0:
output += (x * ' ' + '/' + (((x+1) - n)*-1)*2 * ' ' + '\\' + '\n')
else:
output += (x * ' ' + '/' + (((x+1) - n)*-1)*2 * '_' + '\\' + '\n')
return output
'''
The task is very simple.
You must to return pyramids. Given a number n you print a pyramid with n floors
For example , given a n=4 you must to print this pyramid:
/\
/ \
/ \
/______\
Other example, given a n=6 you must to print this pyramid:
/\
/ \
/ \
/ \
/ \
/__________\
Another example, given a n=10, you must to print this pyramid:
/\
/ \
/ \
/ \
/ \
/ \
/ \
/ \
/ \
/__________________\
Note: an extra line feed character is needed at the end of the string.
Case n=0 should so return "\n".
'''
|
import functools
import time
import uuid
from concurrent.futures.thread import ThreadPoolExecutor
from wacryptolib.exceptions import KeyAlreadyExists, KeyDoesNotExist, OperationNotSupported
from wacryptolib.utilities import generate_uuid0
# SEE https://docs.pytest.org/en/stable/writing_plugins.html#assertion-rewriting and register_assert_rewrite()
def check_keystore_basic_get_set_api(keystore, readonly_keystore=None):
"""Test the workflow of getters/setters of the storage API, for uid-attached keys."""
import pytest
keychain_uid = generate_uuid0()
time.sleep(0.1) # Let UUID0 increase its base value
keychain_uid_separated_keys = generate_uuid0()
assert keychain_uid_separated_keys > keychain_uid
keychain_uid_unused = generate_uuid0()
key_algo = "abxz"
all_keystores = [keystore, readonly_keystore] if readonly_keystore else [keystore]
for _keystore in all_keystores:
with pytest.raises(KeyDoesNotExist, match="not found"):
_keystore.get_public_key(keychain_uid=keychain_uid, key_algo="abxz")
with pytest.raises(KeyDoesNotExist, match="not found"):
_keystore.get_private_key(keychain_uid=keychain_uid, key_algo="abxz")
try:
assert not _keystore.list_keypair_identifiers()
except OperationNotSupported:
pass
# Test the ONESHOT "keypair" API
keystore.set_keypair(
keychain_uid=keychain_uid, key_algo=key_algo, public_key=b"public_data", private_key=b"private_data"
)
for _keystore in all_keystores:
assert _keystore.get_public_key(keychain_uid=keychain_uid, key_algo=key_algo) == b"public_data"
assert _keystore.get_private_key(keychain_uid=keychain_uid, key_algo=key_algo) == b"private_data"
try:
assert _keystore.list_keypair_identifiers() == [
dict(keychain_uid=keychain_uid, key_algo=key_algo, private_key_present=True)
]
except OperationNotSupported:
pass
with pytest.raises(KeyAlreadyExists, match="Already existing"): # Even with same content, it gets rejected
keystore.set_keypair(
keychain_uid=keychain_uid, key_algo=key_algo, public_key=b"public_data", private_key=b"private_data"
)
with pytest.raises(KeyAlreadyExists, match="Already existing"):
keystore.set_keypair(
keychain_uid=keychain_uid, key_algo=key_algo, public_key=b"public_data2", private_key=b"private_data2"
)
# Test the "separated keys" API
with pytest.raises(KeyDoesNotExist, match="does not exist"): # IMPORTANT: public key MUST already exist
keystore.set_private_key(
keychain_uid=keychain_uid_separated_keys, key_algo=key_algo, private_key=b"separated_private_data"
)
keystore.set_public_key(
keychain_uid=keychain_uid_separated_keys, key_algo=key_algo, public_key=b"separated_public_data"
)
with pytest.raises(KeyAlreadyExists, match="Already existing"):
keystore.set_public_key(keychain_uid=keychain_uid, key_algo=key_algo, public_key=b"separated_public_data2")
for _keystore in all_keystores:
assert (
_keystore.get_public_key(keychain_uid=keychain_uid_separated_keys, key_algo=key_algo)
== b"separated_public_data"
)
with pytest.raises(KeyDoesNotExist, match="not found"):
_keystore.get_private_key(keychain_uid=keychain_uid_separated_keys, key_algo=key_algo)
try:
assert _keystore.list_keypair_identifiers() == [
dict(keychain_uid=keychain_uid, key_algo=key_algo, private_key_present=True),
dict(keychain_uid=keychain_uid_separated_keys, key_algo=key_algo, private_key_present=False),
]
except OperationNotSupported:
pass
keystore.set_private_key(
keychain_uid=keychain_uid_separated_keys, key_algo=key_algo, private_key=b"separated_private_data"
)
with pytest.raises(KeyAlreadyExists, match="Already existing"):
keystore.set_private_key(keychain_uid=keychain_uid, key_algo=key_algo, private_key=b"separated_private_data2")
for _keystore in all_keystores:
assert (
_keystore.get_public_key(keychain_uid=keychain_uid_separated_keys, key_algo=key_algo)
== b"separated_public_data"
)
assert (
_keystore.get_private_key(keychain_uid=keychain_uid_separated_keys, key_algo=key_algo)
== b"separated_private_data"
)
try:
assert _keystore.list_keypair_identifiers() == [
dict(keychain_uid=keychain_uid, key_algo=key_algo, private_key_present=True),
dict(keychain_uid=keychain_uid_separated_keys, key_algo=key_algo, private_key_present=True),
]
except OperationNotSupported:
pass
# Test miscellaneous "not found" cases when any part of identifiers change
for _keystore in all_keystores:
# Sanity check
assert _keystore.get_public_key(keychain_uid=keychain_uid, key_algo=key_algo) == b"public_data"
assert _keystore.get_private_key(keychain_uid=keychain_uid, key_algo=key_algo) == b"private_data"
with pytest.raises(KeyDoesNotExist, match="not found"):
_keystore.get_public_key(keychain_uid=keychain_uid, key_algo=key_algo + "_")
with pytest.raises(KeyDoesNotExist, match="not found"):
_keystore.get_private_key(keychain_uid=keychain_uid, key_algo=key_algo + "_")
with pytest.raises(KeyDoesNotExist, match="not found"):
_keystore.get_public_key(keychain_uid=keychain_uid_unused, key_algo=key_algo)
with pytest.raises(KeyDoesNotExist, match="not found"):
_keystore.get_private_key(keychain_uid=keychain_uid_unused, key_algo=key_algo)
return locals()
def check_keystore_free_keys_api(keystore):
"""Test the storage regarding the precreation of "free keys", and their subsequent attachment to uids."""
import pytest
keychain_uid = generate_uuid0()
keychain_uid_other = generate_uuid0()
# This blocks free key attachment to this uid+type
keystore.set_keypair(keychain_uid=keychain_uid, key_algo="type1", public_key=b"whatever1", private_key=b"whatever2")
keystore.add_free_keypair(key_algo="type1", public_key=b"public_data", private_key=b"private_data")
keystore.add_free_keypair(key_algo="type1", public_key=b"public_data2", private_key=b"private_data2")
keystore.add_free_keypair(
key_algo="type2", public_key=b"public_data_other_type", private_key=b"private_data_other_type"
)
assert keystore.get_free_keypairs_count("type1") == 2
assert keystore.get_free_keypairs_count("type2") == 1
assert keystore.get_free_keypairs_count("type3") == 0
with pytest.raises(KeyAlreadyExists, match="Already existing"):
keystore.attach_free_keypair_to_uuid(keychain_uid=keychain_uid, key_algo="type1")
with pytest.raises(KeyDoesNotExist, match="not found"):
keystore.get_public_key(keychain_uid=keychain_uid, key_algo="type2")
keystore.attach_free_keypair_to_uuid(keychain_uid=keychain_uid, key_algo="type2")
assert b"public_data" in keystore.get_public_key(keychain_uid=keychain_uid, key_algo="type2")
assert keystore.get_free_keypairs_count("type1") == 2
assert keystore.get_free_keypairs_count("type2") == 0
assert keystore.get_free_keypairs_count("type3") == 0
keystore.attach_free_keypair_to_uuid(keychain_uid=keychain_uid_other, key_algo="type1")
assert keystore.get_free_keypairs_count("type1") == 1
assert keystore.get_free_keypairs_count("type2") == 0
assert keystore.get_free_keypairs_count("type3") == 0
with pytest.raises(KeyDoesNotExist, match="No free keypair of type"):
keystore.attach_free_keypair_to_uuid(keychain_uid=keychain_uid_other, key_algo="type2")
with pytest.raises(KeyDoesNotExist, match="No free keypair of type"):
keystore.attach_free_keypair_to_uuid(keychain_uid=keychain_uid, key_algo="type3")
assert keystore.get_free_keypairs_count("type1") == 1
assert keystore.get_free_keypairs_count("type2") == 0
assert keystore.get_free_keypairs_count("type3") == 0
return locals()
def check_keystore_free_keys_concurrency(keystore):
"""Parallel tests to check the thread-safety of the storage regarding "free keys" booking."""
key_algo1 = "mytype1"
key_algo2 = "mytype2"
for i in range(77):
for key_algo in (key_algo1, key_algo2):
keystore.add_free_keypair(key_algo=key_algo, public_key=b"whatever1", private_key=b"whatever2")
def retrieve_free_keypair_for_index(idx, key_algo):
keychain_uid = uuid.UUID(int=idx)
try:
keystore.attach_free_keypair_to_uuid(keychain_uid=keychain_uid, key_algo=key_algo)
time.sleep(0.001)
public_key_content = keystore.get_public_key(keychain_uid=keychain_uid, key_algo=key_algo)
assert public_key_content == b"whatever1"
res = True
except KeyDoesNotExist:
res = False
return res
executor = ThreadPoolExecutor(max_workers=20)
for key_algo in (key_algo1, key_algo2):
results_gen = executor.map(functools.partial(retrieve_free_keypair_for_index, key_algo=key_algo), range(200))
results = list(results_gen)
assert results.count(True) == 77
assert results.count(False) == 123
assert keystore.get_free_keypairs_count(key_algo=key_algo1) == 0
assert keystore.get_free_keypairs_count(key_algo=key_algo2) == 0
return locals()
def check_sensor_state_machine(sensor, run_duration=0):
"""Check the proper start/stop/join behaviour of a sensor instance."""
import pytest
assert not sensor.is_running
sensor.join() # Does nothing
with pytest.raises(RuntimeError, match="already stopped"):
sensor.stop()
assert not sensor.is_running
sensor.start()
assert sensor.is_running
with pytest.raises(RuntimeError, match="already started"):
sensor.start()
with pytest.raises(RuntimeError, match="in-progress runner"):
sensor.join()
assert sensor.is_running
time.sleep(run_duration)
assert sensor.is_running
sensor.stop()
assert not sensor.is_running
with pytest.raises(RuntimeError, match="already stopped"):
sensor.stop()
assert not sensor.is_running
sensor.join()
sensor.join() # Does nothing
assert not sensor.is_running
|
from django.utils.translation import ugettext
from querystring_parser import parser
from utils.exceptions import CustomException
def jwt_response_payload_handler(token):
"""
Add any data you want to payload of response
:param token:
:return:
"""
return {
'token': token,
}
def pagination_util(request):
arguments = parser.parse(request.GET.urlencode())
try:
size = int(arguments.pop('size', 20))
index = int(arguments.pop('index', 0))
except ValueError:
raise CustomException(detail=ugettext('Size and index query param for pagination must be integer.'), code=400)
size = index + size
return index, size
|
# Override Zinnia's default urlconf to filter listings by language/category
"""Urls for the Zinnia archives"""
from django.conf.urls import url
from django.conf.urls import include
from django.conf.urls import patterns
from zinnia.urls import _
from developer_portal.blog.views import MultiLangEntryDay
from developer_portal.blog.views import MultiLangEntryWeek
from developer_portal.blog.views import MultiLangEntryYear
from developer_portal.blog.views import MultiLangEntryMonth
from developer_portal.blog.views import MultiLangEntryToday
from developer_portal.blog.views import MultiLangEntryIndex
urlpatterns = patterns(
'',
url(_(r'^feeds/'), include('zinnia.urls.feeds')),
url(_(r'^tags/'), include('zinnia.urls.tags')),
url(_(r'^authors/'), include('zinnia.urls.authors')),
url(_(r'^categories/'), include('zinnia.urls.categories')),
url(_(r'^search/'), include('zinnia.urls.search')),
url(_(r'^random/'), include('zinnia.urls.random')),
url(_(r'^sitemap/'), include('zinnia.urls.sitemap')),
url(_(r'^trackback/'), include('zinnia.urls.trackback')),
url(_(r'^comments/'), include('zinnia.urls.comments')),
url(r'^', include('zinnia.urls.entries')),
url(r'^$',
MultiLangEntryIndex.as_view(),
name='entry_archive_index'),
url(_(r'^page/(?P<page>\d+)/$'),
MultiLangEntryIndex.as_view(),
name='entry_archive_index_paginated'),
url(r'^(?P<year>\d{4})/$',
MultiLangEntryYear.as_view(),
name='entry_archive_year'),
url(_(r'^(?P<year>\d{4})/page/(?P<page>\d+)/$'),
MultiLangEntryYear.as_view(),
name='entry_archive_year_paginated'),
url(_(r'^(?P<year>\d{4})/week/(?P<week>\d+)/$'),
MultiLangEntryWeek.as_view(),
name='entry_archive_week'),
url(_(r'^(?P<year>\d{4})/week/(?P<week>\d+)/page/(?P<page>\d+)/$'),
MultiLangEntryWeek.as_view(),
name='entry_archive_week_paginated'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/$',
MultiLangEntryMonth.as_view(),
name='entry_archive_month'),
url(_(r'^(?P<year>\d{4})/(?P<month>\d{2})/page/(?P<page>\d+)/$'),
MultiLangEntryMonth.as_view(),
name='entry_archive_month_paginated'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/$',
MultiLangEntryDay.as_view(),
name='entry_archive_day'),
url(_(r'^(?P<year>\d{4})/(?P<month>\d{2})/'
'(?P<day>\d{2})/page/(?P<page>\d+)/$'),
MultiLangEntryDay.as_view(),
name='entry_archive_day_paginated'),
url(_(r'^today/$'),
MultiLangEntryToday.as_view(),
name='entry_archive_today'),
url(_(r'^today/page/(?P<page>\d+)/$'),
MultiLangEntryToday.as_view(),
name='entry_archive_today_paginated'),
url(r'^', include('zinnia.urls.shortlink')),
url(r'^', include('zinnia.urls.quick_entry')),
url(r'^', include('zinnia.urls.capabilities')),
)
|
from __future__ import print_function
from flask import Flask, render_template, request, make_response, jsonify, send_file
import sys, os, re, random, logging, stat, time
import requests, json
import pandas as pd
from urllib import urlencode
from lxml import html
from config import *
app = Flask(__name__)
@app.route("/")
def main():
"""
Renders the home site and serves it on response to the
HTTP GET request of index.html
:return: html rendered home page embedded in a HTTP GET
response
"""
return render_template('index.html')
@app.route("/metadata")
def metadata():
"""
Rest API call that serves the metadata information for the
requested company. It makes use of a 3rd party API to
retrieve the company metadata.
:param: request.args
- symbol: Trading Symbol of the Company
:return: Dictionary containing the Company metadata
transformed to JSON format and embedded in the HTTP GET
response
"""
# 1. Extract arguments from HTTP GET request
arguments = request.args
# 2. Prepare the arguments to be passed to the edgaronline API
params = {
'primarysymbols': arguments['symbol'],
'appkey': API_APPKEY_EDGARONLINE
}
# 3. Made the API call to the edgaronline service
try:
r = requests.get(API_COMPANIES_EDGARONLINE, params=params)
except requests.exceptions.RequestException as e:
exc_info = sys.exc_info()
logger.error('Get Company Metadata: %s -- %s',
str(e))
# 4. Parse the response
info = json.loads(r.text)
# 5. Handle the case where the requested trading symbol doesnt
# correspond to any company
if info['result']['totalrows'] == 0:
print("Company doesnt exist", file=sys.stderr)
return jsonify(status='NOK', company_data=[])
# 6. Extract the information from the response
info = info['result']['rows'][0]['values']
print("Company Metadata:", info, file=sys.stderr)
# 7. Prepare the reponse to be served to the UI
aux = {}
for elem in info:
aux[elem['field']] = elem['value']
print(elem['field'], elem['value'], file=sys.stderr)
return jsonify(status="OK", company_data=aux)
@app.route("/filings")
def filings():
"""
Rest API call that serves the list of filings corresponding
to the request company. It retrieves the filing list from the
EDGAR system, scraps the filing information from the html code
and returns the metadata of each filing.
:param: request.args
- symbol: Trading Symbol of the Company
- begin: Lookup start point
- count: number of filings to return
:return: An array of dictionaries containing the metadata info
of each of the requested filings, transofrmed to JSON format
and embedded in the HTTP GET response
"""
# 1. Extract arguments from HTTP GET request
params = request.args
filings = []
# 2. Prepare the params needed to perform the search fot the
# filings on the EDGAR system
url_params = {
'action': 'getcompany',
'CIK': params['symbol'],
'start': params['begin'],
'count': params['count']
}
# 3. Perform the request to EDGAR system
try:
r = requests.get(SEC_URL_FILINGS, params=url_params)
except request.exceptions.RequestException as e:
exc_info = sys.exc_info()
logger.error('Get Company Metadata: %s -- %s',
str(e))
# 3.1 Handle the cases where the requested company doesnt have
# any filings available
if int(r.headers['content-length']) < 2000:
print("Company doesnt exist or doesnt have filings", file=sys.stderr)
return jsonify(status="NOK", filing_list=filings)
# 4. Prepare to scrap the official URLs of each filing from the
# content of the returned html
tree = html.fromstring(r.content)
documents = tree.xpath('//a[@id="documentsbutton"]/@href')
# 5. Extract the metadata of each filing using Pandas
df = pd.read_html(r.content,attrs = {'class': 'tableFile2'})[0]
forms = df[0].tolist()
descriptions = df[2].tolist()
dates = df[3].tolist()
# 6. Prepare the information to be served to the UI
for i in range(1, len(forms)):
tmp = {
'form': forms[i],
'desc': descriptions[i],
'date':dates[i],
'link':SEC_URL + documents[i - 1]
}
filings.append(tmp)
return jsonify(status="OK", filing_list=filings)
if __name__ == "__main__":
app.debug = True
app.run(host='10.0.0.156')
|
# __init__.py
# Copyright (C) 2011-2014 Andrew Svetlov
# andrew.svetlov@gmail.com
#
# This module is part of BloggerTool and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from bloggertool.__version__ import __version__
__all__ = ['__version__']
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 16/4/24 上午10:15
# @Author : ZHZ
import pandas as pd
import datetime
item_store_feature = pd.read_csv("/Users/zhuohaizhen/PycharmProjects/Tianchi_Python/Data/OutputData/1_isf1.csv", index_col=0)
item_feature = pd.read_csv("/Users/zhuohaizhen/PycharmProjects/Tianchi_Python/Data/OutputData/1_if1.csv", index_col=0)
import datetime
days_20141009 = datetime.datetime(2014, 10, 9)
num_days = 7
item_store_feature['days_20141009'] = item_store_feature['date'].\
map(lambda x:(datetime.datetime(x / 10000, x / 100 % 100, x % 100) - days_20141009).days)
item_feature['days_20141009'] = item_feature['date'].\
map(lambda x:(datetime.datetime(x / 10000, x / 100 % 100, x % 100) - days_20141009).days)
result_data = []
for i,j in item_feature.groupby(item_feature['item_id']):
j = j[j['days_20141009']<=444]
j = j[j['days_20141009']>=438]
temp = {}
temp['item_id'] = i
temp['store_code'] = 'all'
index = j.sort_values('date').tail(num_days)
per = (float)(num_days)/(len(j))
print per,len(j)
temp['target'] = index.qty_alipay_njhs.sum()*per*2
print temp
result_data.append(temp)
for i,j in item_store_feature.groupby([item_store_feature['item_id'],item_store_feature['store_code']]):
temp = {}
j = j[j['days_20141009']<=444]
j = j[j['days_20141009']>=438]
temp['item_id'] = i[0]
temp['store_code'] = i[1]
index = j.sort_values('date').tail(num_days)
if(len(j)==0):
continue
per = (float)(num_days)/(len(j))
temp['target'] = index.qty_alipay_njhs.sum()*per*2
print temp
result_data.append(temp)
result_if = pd.DataFrame(result_data,columns=['item_id','store_code','target']).to_csv("/Users/zhuohaizhen/PycharmProjects/Tianchi_Python/"
"Data/Last14Records.csv",index = None,columns=None)
|
#!/usr/bin/python -tt
#
# Copyright (c) 2011 Intel, Inc.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; version 2 of the License
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import shutil
import re
import tempfile
from mic import chroot, msger, rt_util
from mic.utils import misc, fs_related, errors, runner, cmdln
from mic.conf import configmgr
from mic.plugin import pluginmgr
from mic.utils.partitionedfs import PartitionedMount
import mic.imager.raw as raw
from mic.pluginbase import ImagerPlugin
class RawPlugin(ImagerPlugin):
name = 'raw'
@classmethod
@cmdln.option("--compress-disk-image", dest="compress_image", type='choice',
choices=("gz", "bz2"), default=None,
help="Same with --compress-image")
@cmdln.option("--compress-image", dest="compress_image", type='choice',
choices=("gz", "bz2"), default = None,
help="Compress all raw images before package")
def do_create(self, subcmd, opts, *args):
"""${cmd_name}: create raw image
Usage:
${name} ${cmd_name} <ksfile> [OPTS]
${cmd_option_list}
"""
if not args:
raise errors.Usage("need one argument as the path of ks file")
if len(args) != 1:
raise errors.Usage("Extra arguments given")
creatoropts = configmgr.create
ksconf = args[0]
if not os.path.exists(ksconf):
raise errors.CreatorError("Can't find the file: %s" % ksconf)
recording_pkgs = []
if len(creatoropts['record_pkgs']) > 0:
recording_pkgs = creatoropts['record_pkgs']
if creatoropts['release'] is not None:
if 'name' not in recording_pkgs:
recording_pkgs.append('name')
ksconf = misc.normalize_ksfile(ksconf,
creatoropts['release'],
creatoropts['arch'])
configmgr._ksconf = ksconf
# Called After setting the configmgr._ksconf as the creatoropts['name'] is reset there.
if creatoropts['release'] is not None:
creatoropts['outdir'] = "%s/%s/images/%s/" % (creatoropts['outdir'], creatoropts['release'], creatoropts['name'])
# try to find the pkgmgr
pkgmgr = None
for (key, pcls) in pluginmgr.get_plugins('backend').iteritems():
if key == creatoropts['pkgmgr']:
pkgmgr = pcls
break
if not pkgmgr:
pkgmgrs = pluginmgr.get_plugins('backend').keys()
raise errors.CreatorError("Can't find package manager: %s (availables: %s)" % (creatoropts['pkgmgr'], ', '.join(pkgmgrs)))
if creatoropts['runtime']:
rt_util.runmic_in_runtime(creatoropts['runtime'], creatoropts, ksconf, None)
creator = raw.RawImageCreator(creatoropts, pkgmgr, opts.compress_image)
if len(recording_pkgs) > 0:
creator._recording_pkgs = recording_pkgs
images = ["%s-%s.raw" % (creator.name, part['name'])
for part in creator.get_diskinfo()]
self.check_image_exists(creator.destdir,
creator.pack_to,
images,
creatoropts['release'])
try:
creator.check_depend_tools()
creator.mount(None, creatoropts["cachedir"])
creator.install()
creator.configure(creatoropts["repomd"])
creator.copy_kernel()
creator.unmount()
creator.package(creatoropts["outdir"])
if creatoropts['release'] is not None:
creator.release_output(ksconf, creatoropts['outdir'], creatoropts['release'])
creator.print_outimage_info()
except errors.CreatorError:
raise
finally:
creator.cleanup()
msger.info("Finished.")
return 0
@classmethod
def do_chroot(cls, target):
img = target
imgsize = misc.get_file_size(img) * 1024L * 1024L
partedcmd = fs_related.find_binary_path("parted")
disk = fs_related.SparseLoopbackDisk(img, imgsize)
imgmnt = misc.mkdtemp()
imgloop = PartitionedMount({'/dev/sdb':disk}, imgmnt, skipformat = True)
img_fstype = "ext3"
msger.info("Partition Table:")
partnum = []
for line in runner.outs([partedcmd, "-s", img, "print"]).splitlines():
# no use strip to keep line output here
if "Number" in line:
msger.raw(line)
if line.strip() and line.strip()[0].isdigit():
partnum.append(line.strip()[0])
msger.raw(line)
rootpart = None
if len(partnum) > 1:
rootpart = msger.choice("please choose root partition", partnum)
# Check the partitions from raw disk.
# if choose root part, the mark it as mounted
if rootpart:
root_mounted = True
else:
root_mounted = False
partition_mounts = 0
for line in runner.outs([partedcmd,"-s",img,"unit","B","print"]).splitlines():
line = line.strip()
# Lines that start with number are the partitions,
# because parted can be translated we can't refer to any text lines.
if not line or not line[0].isdigit():
continue
# Some vars have extra , as list seperator.
line = line.replace(",","")
# Example of parted output lines that are handled:
# Number Start End Size Type File system Flags
# 1 512B 3400000511B 3400000000B primary
# 2 3400531968B 3656384511B 255852544B primary linux-swap(v1)
# 3 3656384512B 3720347647B 63963136B primary fat16 boot, lba
partition_info = re.split("\s+",line)
size = partition_info[3].split("B")[0]
if len(partition_info) < 6 or partition_info[5] in ["boot"]:
# No filesystem can be found from partition line. Assuming
# btrfs, because that is the only MeeGo fs that parted does
# not recognize properly.
# TODO: Can we make better assumption?
fstype = "btrfs"
elif partition_info[5] in ["ext2","ext3","ext4","btrfs"]:
fstype = partition_info[5]
elif partition_info[5] in ["fat16","fat32"]:
fstype = "vfat"
elif "swap" in partition_info[5]:
fstype = "swap"
else:
raise errors.CreatorError("Could not recognize partition fs type '%s'." % partition_info[5])
if rootpart and rootpart == line[0]:
mountpoint = '/'
elif not root_mounted and fstype in ["ext2","ext3","ext4","btrfs"]:
# TODO: Check that this is actually the valid root partition from /etc/fstab
mountpoint = "/"
root_mounted = True
elif fstype == "swap":
mountpoint = "swap"
else:
# TODO: Assing better mount points for the rest of the partitions.
partition_mounts += 1
mountpoint = "/media/partition_%d" % partition_mounts
if "boot" in partition_info:
boot = True
else:
boot = False
msger.verbose("Size: %s Bytes, fstype: %s, mountpoint: %s, boot: %s" % (size, fstype, mountpoint, boot))
# TODO: add_partition should take bytes as size parameter.
imgloop.add_partition((int)(size)/1024/1024, "/dev/sdb", mountpoint, fstype = fstype, boot = boot)
try:
imgloop.mount()
except errors.MountError:
imgloop.cleanup()
raise
try:
envcmd = fs_related.find_binary_inchroot("env", imgmnt)
if envcmd:
cmdline = "%s HOME=/root /bin/bash" % envcmd
else:
cmdline = "/bin/bash"
chroot.chroot(imgmnt, None, cmdline)
except:
raise errors.CreatorError("Failed to chroot to %s." %img)
finally:
chroot.cleanup_after_chroot("img", imgloop, None, imgmnt)
@classmethod
def do_unpack(cls, srcimg):
srcimgsize = (misc.get_file_size(srcimg)) * 1024L * 1024L
srcmnt = misc.mkdtemp("srcmnt")
disk = fs_related.SparseLoopbackDisk(srcimg, srcimgsize)
srcloop = PartitionedMount({'/dev/sdb':disk}, srcmnt, skipformat = True)
srcloop.add_partition(srcimgsize/1024/1024, "/dev/sdb", "/", "ext3", boot=False)
try:
srcloop.mount()
except errors.MountError:
srcloop.cleanup()
raise
image = os.path.join(tempfile.mkdtemp(dir = "/var/tmp", prefix = "tmp"), "target.img")
args = ['dd', "if=%s" % srcloop.partitions[0]['device'], "of=%s" % image]
msger.info("`dd` image ...")
rc = runner.show(args)
srcloop.cleanup()
shutil.rmtree(os.path.dirname(srcmnt), ignore_errors = True)
if rc != 0:
raise errors.CreatorError("Failed to dd")
else:
return image
|
thislist =["apple", "banana", "grapes"]
print(thislist[0])
|
def crossover(chr1, chr2, index):
return [chr1[:index]+chr2[index:], chr2[:index]+chr1[index:]]
'''
In genetic algorithms, crossover is a genetic operator used to vary the programming
of chromosomes from one generation to the next.
The one-point crossover consists in swapping one's cromosome part with another
in a specific given point. The image bellow shows the crossover being applied
on chromosomes 1011011001111 and 1011100100110 with the cut point (index) 4:
In this kata you have to implement a function crossover that receives two chromosomes
chromosome1, chromosome2 and a zero-based index and it has to return an array with the
crossover result on both chromosomes [chromosome1, chromosome2].
Example:
crossover('111000', '000110', 3) should return ['111110', 000000']
'''
|
from matplotlib import pyplot as plt
from utils import *
import numpy as np
import argparse
def load(path='./input.dat'):
"""
Load the sequential training data
Arg: path - The path of the training data
Ret: The 2-D array whose shape is [num_epoch, 2]
"""
string = open(path, 'r').readlines()
res = np.zeros([len(string), 2])
for i in range(len(string)):
for c in string[i]:
if c == '0':
res[i][0] += 1
elif c == '1':
res[i][1] += 1
return res
def draw(x_list, y_list):
"""
Draw the curve of whole bayesian movement
Arg: x_list - The list of x linear space whose shape is [num_epoch, 3, num_sample_points]
y_list - The list of PDF whose shape is [num_epoch, 3, num_sample_points]
"""
if len(x_list) != len(y_list):
print('invalid length...')
exit()
plt.figure(1)
for i in range(len(x_list)):
title = [' prior', ' likelihood', ' posterior']
for j in range(3):
plt.subplot(len(x_list), 3, j+1+i*3)
plt.plot(x_list[i][j], y_list[i][j])
max_prob_x = [x_list[i][j][np.argmax(y_list[i][j])]] * 100
max_prob_y = np.linspace(0, np.max(y_list[i][j]), 100)
plt.plot(max_prob_x, max_prob_y, linestyle='--')
plt.title('iter ' + str(i+1) + title[j])
plt.show()
if __name__ == '__main__':
# Parse the parameters
parser = argparse.ArgumentParser()
parser.add_argument('--a', type=int, default=2, dest='a', help='initial a of beta distribution')
parser.add_argument('--b', type=int, default=2, dest='b', help='initial b of beta distribution')
args = parser.parse_args()
a = args.a
b = args.b
# Train
x_list = []
y_list = []
training_data = load(path='./input.dat')
for i in range(len(training_data)):
_a, _b = training_data[i][1], training_data[i][0]
_x_list = []
_y_list = []
print('iter: ', i+1, end='\t')
print('prior: ', round(bataDistribution_maxProb(a, b), 5), end='\t\t')
x_curve, y_curve = bataDistribution_curve(a, b)
_x_list.append(x_curve)
_y_list.append(y_curve)
# print('likelihood: ', round(binomialDistribution_maxProb(_a+_b, _b), 5), end='\t')
print('likelihood: ', round(_a / (_a+_b), 5), end='\t')
x_curve, y_curve = binomialDistribution_curve(_a+_b, _b)
_x_list.append(x_curve)
_y_list.append(y_curve)
a += _a
b += _b
print('posterior: ', round(bataDistribution_maxProb(a, b), 5))
x_curve, y_curve = bataDistribution_curve(a, b)
_x_list.append(x_curve)
_y_list.append(y_curve)
x_list.append(_x_list)
y_list.append(_y_list)
draw(x_list, y_list)
|
#!/usr/bin/python
#
# Copyright (C) 2010 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for unittesting the daemon module"""
import unittest
import signal
import os
import socket
import time
import tempfile
import shutil
from ganeti import daemon
from ganeti import errors
from ganeti import constants
from ganeti import utils
import testutils
class TestMainloop(testutils.GanetiTestCase):
"""Test daemon.Mainloop"""
def setUp(self):
testutils.GanetiTestCase.setUp(self)
self.mainloop = daemon.Mainloop()
self.sendsig_events = []
self.onsignal_events = []
def _CancelEvent(self, handle):
self.mainloop.scheduler.cancel(handle)
def _SendSig(self, sig):
self.sendsig_events.append(sig)
os.kill(os.getpid(), sig)
def OnSignal(self, signum):
self.onsignal_events.append(signum)
def testRunAndTermBySched(self):
self.mainloop.scheduler.enter(0.1, 1, self._SendSig, [signal.SIGTERM])
self.mainloop.Run() # terminates by _SendSig being scheduled
self.assertEquals(self.sendsig_events, [signal.SIGTERM])
def testTerminatingSignals(self):
self.mainloop.scheduler.enter(0.1, 1, self._SendSig, [signal.SIGCHLD])
self.mainloop.scheduler.enter(0.2, 1, self._SendSig, [signal.SIGINT])
self.mainloop.Run()
self.assertEquals(self.sendsig_events, [signal.SIGCHLD, signal.SIGINT])
self.mainloop.scheduler.enter(0.1, 1, self._SendSig, [signal.SIGTERM])
self.mainloop.Run()
self.assertEquals(self.sendsig_events, [signal.SIGCHLD, signal.SIGINT,
signal.SIGTERM])
def testSchedulerCancel(self):
handle = self.mainloop.scheduler.enter(0.1, 1, self._SendSig,
[signal.SIGTERM])
self.mainloop.scheduler.cancel(handle)
self.mainloop.scheduler.enter(0.2, 1, self._SendSig, [signal.SIGCHLD])
self.mainloop.scheduler.enter(0.3, 1, self._SendSig, [signal.SIGTERM])
self.mainloop.Run()
self.assertEquals(self.sendsig_events, [signal.SIGCHLD, signal.SIGTERM])
def testRegisterSignal(self):
self.mainloop.RegisterSignal(self)
self.mainloop.scheduler.enter(0.1, 1, self._SendSig, [signal.SIGCHLD])
handle = self.mainloop.scheduler.enter(0.1, 1, self._SendSig,
[signal.SIGTERM])
self.mainloop.scheduler.cancel(handle)
self.mainloop.scheduler.enter(0.2, 1, self._SendSig, [signal.SIGCHLD])
self.mainloop.scheduler.enter(0.3, 1, self._SendSig, [signal.SIGTERM])
# ...not delievered because they are scheduled after TERM
self.mainloop.scheduler.enter(0.4, 1, self._SendSig, [signal.SIGCHLD])
self.mainloop.scheduler.enter(0.5, 1, self._SendSig, [signal.SIGCHLD])
self.mainloop.Run()
self.assertEquals(self.sendsig_events,
[signal.SIGCHLD, signal.SIGCHLD, signal.SIGTERM])
self.assertEquals(self.onsignal_events, self.sendsig_events)
def testDeferredCancel(self):
self.mainloop.RegisterSignal(self)
now = time.time()
self.mainloop.scheduler.enterabs(now + 0.1, 1, self._SendSig,
[signal.SIGCHLD])
handle1 = self.mainloop.scheduler.enterabs(now + 0.3, 2, self._SendSig,
[signal.SIGCHLD])
handle2 = self.mainloop.scheduler.enterabs(now + 0.4, 2, self._SendSig,
[signal.SIGCHLD])
self.mainloop.scheduler.enterabs(now + 0.2, 1, self._CancelEvent,
[handle1])
self.mainloop.scheduler.enterabs(now + 0.2, 1, self._CancelEvent,
[handle2])
self.mainloop.scheduler.enter(0.5, 1, self._SendSig, [signal.SIGTERM])
self.mainloop.Run()
self.assertEquals(self.sendsig_events, [signal.SIGCHLD, signal.SIGTERM])
self.assertEquals(self.onsignal_events, self.sendsig_events)
def testReRun(self):
self.mainloop.RegisterSignal(self)
self.mainloop.scheduler.enter(0.1, 1, self._SendSig, [signal.SIGCHLD])
self.mainloop.scheduler.enter(0.2, 1, self._SendSig, [signal.SIGCHLD])
self.mainloop.scheduler.enter(0.3, 1, self._SendSig, [signal.SIGTERM])
self.mainloop.scheduler.enter(0.4, 1, self._SendSig, [signal.SIGCHLD])
self.mainloop.scheduler.enter(0.5, 1, self._SendSig, [signal.SIGCHLD])
self.mainloop.Run()
self.assertEquals(self.sendsig_events,
[signal.SIGCHLD, signal.SIGCHLD, signal.SIGTERM])
self.assertEquals(self.onsignal_events, self.sendsig_events)
self.mainloop.scheduler.enter(0.3, 1, self._SendSig, [signal.SIGTERM])
self.mainloop.Run()
self.assertEquals(self.sendsig_events,
[signal.SIGCHLD, signal.SIGCHLD, signal.SIGTERM,
signal.SIGCHLD, signal.SIGCHLD, signal.SIGTERM])
self.assertEquals(self.onsignal_events, self.sendsig_events)
def testPriority(self):
# for events at the same time, the highest priority one executes first
now = time.time()
self.mainloop.scheduler.enterabs(now + 0.1, 2, self._SendSig,
[signal.SIGCHLD])
self.mainloop.scheduler.enterabs(now + 0.1, 1, self._SendSig,
[signal.SIGTERM])
self.mainloop.Run()
self.assertEquals(self.sendsig_events, [signal.SIGTERM])
self.mainloop.scheduler.enter(0.2, 1, self._SendSig, [signal.SIGTERM])
self.mainloop.Run()
self.assertEquals(self.sendsig_events,
[signal.SIGTERM, signal.SIGCHLD, signal.SIGTERM])
class _MyAsyncUDPSocket(daemon.AsyncUDPSocket):
def __init__(self, family):
daemon.AsyncUDPSocket.__init__(self, family)
self.received = []
self.error_count = 0
def handle_datagram(self, payload, ip, port):
self.received.append((payload))
if payload == "terminate":
os.kill(os.getpid(), signal.SIGTERM)
elif payload == "error":
raise errors.GenericError("error")
def handle_error(self):
self.error_count += 1
raise
class _BaseAsyncUDPSocketTest:
"""Base class for AsyncUDPSocket tests"""
family = None
address = None
def setUp(self):
self.mainloop = daemon.Mainloop()
self.server = _MyAsyncUDPSocket(self.family)
self.client = _MyAsyncUDPSocket(self.family)
self.server.bind((self.address, 0))
self.port = self.server.getsockname()[1]
# Save utils.IgnoreSignals so we can do evil things to it...
self.saved_utils_ignoresignals = utils.IgnoreSignals
def tearDown(self):
self.server.close()
self.client.close()
# ...and restore it as well
utils.IgnoreSignals = self.saved_utils_ignoresignals
testutils.GanetiTestCase.tearDown(self)
def testNoDoubleBind(self):
self.assertRaises(socket.error, self.client.bind, (self.address, self.port))
def testAsyncClientServer(self):
self.client.enqueue_send(self.address, self.port, "p1")
self.client.enqueue_send(self.address, self.port, "p2")
self.client.enqueue_send(self.address, self.port, "terminate")
self.mainloop.Run()
self.assertEquals(self.server.received, ["p1", "p2", "terminate"])
def testSyncClientServer(self):
self.client.handle_write()
self.client.enqueue_send(self.address, self.port, "p1")
self.client.enqueue_send(self.address, self.port, "p2")
while self.client.writable():
self.client.handle_write()
self.server.process_next_packet()
self.assertEquals(self.server.received, ["p1"])
self.server.process_next_packet()
self.assertEquals(self.server.received, ["p1", "p2"])
self.client.enqueue_send(self.address, self.port, "p3")
while self.client.writable():
self.client.handle_write()
self.server.process_next_packet()
self.assertEquals(self.server.received, ["p1", "p2", "p3"])
def testErrorHandling(self):
self.client.enqueue_send(self.address, self.port, "p1")
self.client.enqueue_send(self.address, self.port, "p2")
self.client.enqueue_send(self.address, self.port, "error")
self.client.enqueue_send(self.address, self.port, "p3")
self.client.enqueue_send(self.address, self.port, "error")
self.client.enqueue_send(self.address, self.port, "terminate")
self.assertRaises(errors.GenericError, self.mainloop.Run)
self.assertEquals(self.server.received,
["p1", "p2", "error"])
self.assertEquals(self.server.error_count, 1)
self.assertRaises(errors.GenericError, self.mainloop.Run)
self.assertEquals(self.server.received,
["p1", "p2", "error", "p3", "error"])
self.assertEquals(self.server.error_count, 2)
self.mainloop.Run()
self.assertEquals(self.server.received,
["p1", "p2", "error", "p3", "error", "terminate"])
self.assertEquals(self.server.error_count, 2)
def testSignaledWhileReceiving(self):
utils.IgnoreSignals = lambda fn, *args, **kwargs: None
self.client.enqueue_send(self.address, self.port, "p1")
self.client.enqueue_send(self.address, self.port, "p2")
self.server.handle_read()
self.assertEquals(self.server.received, [])
self.client.enqueue_send(self.address, self.port, "terminate")
utils.IgnoreSignals = self.saved_utils_ignoresignals
self.mainloop.Run()
self.assertEquals(self.server.received, ["p1", "p2", "terminate"])
def testOversizedDatagram(self):
oversized_data = (constants.MAX_UDP_DATA_SIZE + 1) * "a"
self.assertRaises(errors.UdpDataSizeError, self.client.enqueue_send,
self.address, self.port, oversized_data)
class TestAsyncIP4UDPSocket(testutils.GanetiTestCase, _BaseAsyncUDPSocketTest):
"""Test IP4 daemon.AsyncUDPSocket"""
family = socket.AF_INET
address = "127.0.0.1"
def setUp(self):
testutils.GanetiTestCase.setUp(self)
_BaseAsyncUDPSocketTest.setUp(self)
def tearDown(self):
testutils.GanetiTestCase.tearDown(self)
_BaseAsyncUDPSocketTest.tearDown(self)
class TestAsyncIP6UDPSocket(testutils.GanetiTestCase, _BaseAsyncUDPSocketTest):
"""Test IP6 daemon.AsyncUDPSocket"""
family = socket.AF_INET6
address = "::1"
def setUp(self):
testutils.GanetiTestCase.setUp(self)
_BaseAsyncUDPSocketTest.setUp(self)
def tearDown(self):
testutils.GanetiTestCase.tearDown(self)
_BaseAsyncUDPSocketTest.tearDown(self)
class _MyAsyncStreamServer(daemon.AsyncStreamServer):
def __init__(self, family, address, handle_connection_fn):
daemon.AsyncStreamServer.__init__(self, family, address)
self.handle_connection_fn = handle_connection_fn
self.error_count = 0
self.expt_count = 0
def handle_connection(self, connected_socket, client_address):
self.handle_connection_fn(connected_socket, client_address)
def handle_error(self):
self.error_count += 1
self.close()
raise
def handle_expt(self):
self.expt_count += 1
self.close()
class _MyMessageStreamHandler(daemon.AsyncTerminatedMessageStream):
def __init__(self, connected_socket, client_address, terminator, family,
message_fn, client_id, unhandled_limit):
daemon.AsyncTerminatedMessageStream.__init__(self, connected_socket,
client_address,
terminator, family,
unhandled_limit)
self.message_fn = message_fn
self.client_id = client_id
self.error_count = 0
def handle_message(self, message, message_id):
self.message_fn(self, message, message_id)
def handle_error(self):
self.error_count += 1
raise
class TestAsyncStreamServerTCP(testutils.GanetiTestCase):
"""Test daemon.AsyncStreamServer with a TCP connection"""
family = socket.AF_INET
def setUp(self):
testutils.GanetiTestCase.setUp(self)
self.mainloop = daemon.Mainloop()
self.address = self.getAddress()
self.server = _MyAsyncStreamServer(self.family, self.address,
self.handle_connection)
self.client_handler = _MyMessageStreamHandler
self.unhandled_limit = None
self.terminator = "\3"
self.address = self.server.getsockname()
self.clients = []
self.connections = []
self.messages = {}
self.connect_terminate_count = 0
self.message_terminate_count = 0
self.next_client_id = 0
# Save utils.IgnoreSignals so we can do evil things to it...
self.saved_utils_ignoresignals = utils.IgnoreSignals
def tearDown(self):
for c in self.clients:
c.close()
for c in self.connections:
c.close()
self.server.close()
# ...and restore it as well
utils.IgnoreSignals = self.saved_utils_ignoresignals
testutils.GanetiTestCase.tearDown(self)
def getAddress(self):
return ("127.0.0.1", 0)
def countTerminate(self, name):
value = getattr(self, name)
if value is not None:
value -= 1
setattr(self, name, value)
if value <= 0:
os.kill(os.getpid(), signal.SIGTERM)
def handle_connection(self, connected_socket, client_address):
client_id = self.next_client_id
self.next_client_id += 1
client_handler = self.client_handler(connected_socket, client_address,
self.terminator, self.family,
self.handle_message,
client_id, self.unhandled_limit)
self.connections.append(client_handler)
self.countTerminate("connect_terminate_count")
def handle_message(self, handler, message, message_id):
self.messages.setdefault(handler.client_id, [])
# We should just check that the message_ids are monotonically increasing.
# If in the unit tests we never remove messages from the received queue,
# though, we can just require that the queue length is the same as the
# message id, before pushing the message to it. This forces a more
# restrictive check, but we can live with this for now.
self.assertEquals(len(self.messages[handler.client_id]), message_id)
self.messages[handler.client_id].append(message)
if message == "error":
raise errors.GenericError("error")
self.countTerminate("message_terminate_count")
def getClient(self):
client = socket.socket(self.family, socket.SOCK_STREAM)
client.connect(self.address)
self.clients.append(client)
return client
def tearDown(self):
testutils.GanetiTestCase.tearDown(self)
self.server.close()
def testConnect(self):
self.getClient()
self.mainloop.Run()
self.assertEquals(len(self.connections), 1)
self.getClient()
self.mainloop.Run()
self.assertEquals(len(self.connections), 2)
self.connect_terminate_count = 4
self.getClient()
self.getClient()
self.getClient()
self.getClient()
self.mainloop.Run()
self.assertEquals(len(self.connections), 6)
def testBasicMessage(self):
self.connect_terminate_count = None
client = self.getClient()
client.send("ciao\3")
self.mainloop.Run()
self.assertEquals(len(self.connections), 1)
self.assertEquals(len(self.messages[0]), 1)
self.assertEquals(self.messages[0][0], "ciao")
def testDoubleMessage(self):
self.connect_terminate_count = None
client = self.getClient()
client.send("ciao\3")
self.mainloop.Run()
client.send("foobar\3")
self.mainloop.Run()
self.assertEquals(len(self.connections), 1)
self.assertEquals(len(self.messages[0]), 2)
self.assertEquals(self.messages[0][1], "foobar")
def testComposedMessage(self):
self.connect_terminate_count = None
self.message_terminate_count = 3
client = self.getClient()
client.send("one\3composed\3message\3")
self.mainloop.Run()
self.assertEquals(len(self.messages[0]), 3)
self.assertEquals(self.messages[0], ["one", "composed", "message"])
def testLongTerminator(self):
self.terminator = "\0\1\2"
self.connect_terminate_count = None
self.message_terminate_count = 3
client = self.getClient()
client.send("one\0\1\2composed\0\1\2message\0\1\2")
self.mainloop.Run()
self.assertEquals(len(self.messages[0]), 3)
self.assertEquals(self.messages[0], ["one", "composed", "message"])
def testErrorHandling(self):
self.connect_terminate_count = None
self.message_terminate_count = None
client = self.getClient()
client.send("one\3two\3error\3three\3")
self.assertRaises(errors.GenericError, self.mainloop.Run)
self.assertEquals(self.connections[0].error_count, 1)
self.assertEquals(self.messages[0], ["one", "two", "error"])
client.send("error\3")
self.assertRaises(errors.GenericError, self.mainloop.Run)
self.assertEquals(self.connections[0].error_count, 2)
self.assertEquals(self.messages[0], ["one", "two", "error", "three",
"error"])
def testDoubleClient(self):
self.connect_terminate_count = None
self.message_terminate_count = 2
client1 = self.getClient()
client2 = self.getClient()
client1.send("c1m1\3")
client2.send("c2m1\3")
self.mainloop.Run()
self.assertEquals(self.messages[0], ["c1m1"])
self.assertEquals(self.messages[1], ["c2m1"])
def testUnterminatedMessage(self):
self.connect_terminate_count = None
self.message_terminate_count = 3
client1 = self.getClient()
client2 = self.getClient()
client1.send("message\3unterminated")
client2.send("c2m1\3c2m2\3")
self.mainloop.Run()
self.assertEquals(self.messages[0], ["message"])
self.assertEquals(self.messages[1], ["c2m1", "c2m2"])
client1.send("message\3")
self.mainloop.Run()
self.assertEquals(self.messages[0], ["message", "unterminatedmessage"])
def testSignaledWhileAccepting(self):
utils.IgnoreSignals = lambda fn, *args, **kwargs: None
client1 = self.getClient()
self.server.handle_accept()
# When interrupted while accepting we don't have a connection, but we
# didn't crash either.
self.assertEquals(len(self.connections), 0)
utils.IgnoreSignals = self.saved_utils_ignoresignals
self.mainloop.Run()
self.assertEquals(len(self.connections), 1)
def testSendMessage(self):
self.connect_terminate_count = None
self.message_terminate_count = 3
client1 = self.getClient()
client2 = self.getClient()
client1.send("one\3composed\3message\3")
self.mainloop.Run()
self.assertEquals(self.messages[0], ["one", "composed", "message"])
self.assertFalse(self.connections[0].writable())
self.assertFalse(self.connections[1].writable())
self.connections[0].send_message("r0")
self.assert_(self.connections[0].writable())
self.assertFalse(self.connections[1].writable())
self.connections[0].send_message("r1")
self.connections[0].send_message("r2")
# We currently have no way to terminate the mainloop on write events, but
# let's assume handle_write will be called if writable() is True.
while self.connections[0].writable():
self.connections[0].handle_write()
client1.setblocking(0)
client2.setblocking(0)
self.assertEquals(client1.recv(4096), "r0\3r1\3r2\3")
self.assertRaises(socket.error, client2.recv, 4096)
def testLimitedUnhandledMessages(self):
self.connect_terminate_count = None
self.message_terminate_count = 3
self.unhandled_limit = 2
client1 = self.getClient()
client2 = self.getClient()
client1.send("one\3composed\3long\3message\3")
client2.send("c2one\3")
self.mainloop.Run()
self.assertEquals(self.messages[0], ["one", "composed"])
self.assertEquals(self.messages[1], ["c2one"])
self.assertFalse(self.connections[0].readable())
self.assert_(self.connections[1].readable())
self.connections[0].send_message("r0")
self.message_terminate_count = None
client1.send("another\3")
# when we write replies messages queued also get handled, but not the ones
# in the socket.
while self.connections[0].writable():
self.connections[0].handle_write()
self.assertFalse(self.connections[0].readable())
self.assertEquals(self.messages[0], ["one", "composed", "long"])
self.connections[0].send_message("r1")
self.connections[0].send_message("r2")
while self.connections[0].writable():
self.connections[0].handle_write()
self.assertEquals(self.messages[0], ["one", "composed", "long", "message"])
self.assert_(self.connections[0].readable())
def testLimitedUnhandledMessagesOne(self):
self.connect_terminate_count = None
self.message_terminate_count = 2
self.unhandled_limit = 1
client1 = self.getClient()
client2 = self.getClient()
client1.send("one\3composed\3message\3")
client2.send("c2one\3")
self.mainloop.Run()
self.assertEquals(self.messages[0], ["one"])
self.assertEquals(self.messages[1], ["c2one"])
self.assertFalse(self.connections[0].readable())
self.assertFalse(self.connections[1].readable())
self.connections[0].send_message("r0")
self.message_terminate_count = None
while self.connections[0].writable():
self.connections[0].handle_write()
self.assertFalse(self.connections[0].readable())
self.assertEquals(self.messages[0], ["one", "composed"])
self.connections[0].send_message("r2")
self.connections[0].send_message("r3")
while self.connections[0].writable():
self.connections[0].handle_write()
self.assertEquals(self.messages[0], ["one", "composed", "message"])
self.assert_(self.connections[0].readable())
class TestAsyncStreamServerUnixPath(TestAsyncStreamServerTCP):
"""Test daemon.AsyncStreamServer with a Unix path connection"""
family = socket.AF_UNIX
def getAddress(self):
self.tmpdir = tempfile.mkdtemp()
return os.path.join(self.tmpdir, "server.sock")
def tearDown(self):
shutil.rmtree(self.tmpdir)
TestAsyncStreamServerTCP.tearDown(self)
class TestAsyncStreamServerUnixAbstract(TestAsyncStreamServerTCP):
"""Test daemon.AsyncStreamServer with a Unix abstract connection"""
family = socket.AF_UNIX
def getAddress(self):
return "\0myabstractsocketaddress"
class TestAsyncAwaker(testutils.GanetiTestCase):
"""Test daemon.AsyncAwaker"""
family = socket.AF_INET
def setUp(self):
testutils.GanetiTestCase.setUp(self)
self.mainloop = daemon.Mainloop()
self.awaker = daemon.AsyncAwaker(signal_fn=self.handle_signal)
self.signal_count = 0
self.signal_terminate_count = 1
def tearDown(self):
self.awaker.close()
def handle_signal(self):
self.signal_count += 1
self.signal_terminate_count -= 1
if self.signal_terminate_count <= 0:
os.kill(os.getpid(), signal.SIGTERM)
def testBasicSignaling(self):
self.awaker.signal()
self.mainloop.Run()
self.assertEquals(self.signal_count, 1)
def testDoubleSignaling(self):
self.awaker.signal()
self.awaker.signal()
self.mainloop.Run()
# The second signal is never delivered
self.assertEquals(self.signal_count, 1)
def testReallyDoubleSignaling(self):
self.assert_(self.awaker.readable())
self.awaker.signal()
# Let's suppose two threads overlap, and both find need_signal True
self.awaker.need_signal = True
self.awaker.signal()
self.mainloop.Run()
# We still get only one signaling
self.assertEquals(self.signal_count, 1)
def testNoSignalFnArgument(self):
myawaker = daemon.AsyncAwaker()
self.assertRaises(socket.error, myawaker.handle_read)
myawaker.signal()
myawaker.handle_read()
self.assertRaises(socket.error, myawaker.handle_read)
myawaker.signal()
myawaker.signal()
myawaker.handle_read()
self.assertRaises(socket.error, myawaker.handle_read)
myawaker.close()
def testWrongSignalFnArgument(self):
self.assertRaises(AssertionError, daemon.AsyncAwaker, 1)
self.assertRaises(AssertionError, daemon.AsyncAwaker, "string")
self.assertRaises(AssertionError, daemon.AsyncAwaker, signal_fn=1)
self.assertRaises(AssertionError, daemon.AsyncAwaker, signal_fn="string")
if __name__ == "__main__":
testutils.GanetiTestProgram()
|
def get_bioc_TxDb_pkg(wildcards):
"""Get the package bioconductor package name for the the species in config.yaml"""
species = config["txdb"]["species"].capitalize()
Source = config["txdb"]["Source"]
build = config["txdb"]["build"]
version = config["txdb"]["version"]
if Source == "UCSC":
return "TxDb.{species}.UCSC.{build}.knownGene".format(species=species,build=build)
elif Source == "Ensembl":
return "EnsDb.{species}.{version}".format(species=species,version=version)
def get_bioc_pkg_txdb_path(wildcards):
return "resources/bioconductor/lib/R/library/{pkg}".format(pkg=get_bioc_txdb_pkg(wildcards))
def get_bioc_species_pkg(wildcards):
"""Get the Txdb bioconductor package name for the the species in config.yaml"""
species_letters = config["species"][0:2].capitalize()
return "org.{species}.eg.db".format(species=species_letters)
def get_bioc_pkg_species_path(wildcards):
return "resources/bioconductor/lib/R/library/{pkg}".format(pkg=get_bioc_species_pkg(wildcards))
|
# Generated by Django 3.0.7 on 2020-08-19 14:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('staff', '0003_auto_20200817_2050'),
]
operations = [
migrations.AlterField(
model_name='districtstaff',
name='empnumber',
field=models.IntegerField(null=True, unique=True, verbose_name='Employee No.'),
),
migrations.AlterField(
model_name='provincialstaff',
name='empnumber',
field=models.IntegerField(null=True, unique=True, verbose_name='Employee No.'),
),
migrations.AlterField(
model_name='staff',
name='comment',
field=models.CharField(default=' ', max_length=200, verbose_name='Comment'),
),
]
|
# flake8: noqa
from .evaluation_callback import EvaluationCallback
from .gradient_clipping_callback import GradientClippingCallback
from .learning_rate_finder_callback import LearningRateFinderCallback
from .lr_scheduler_callback import LRSchedulerCallback
from .partial_freeze_embeddings_callback import PartialFreezeEmbeddingsCallback
from .progress_bar_callback import ProgressBarCallback
from .save_model_callback import SaveModelCallback
from .simple_logger_callback import TrainSimpleLoggerCallback
|
from spack import *
import sys,os,re
sys.path.append(os.path.join(os.path.dirname(__file__), '../../common'))
from scrampackage import write_scram_toolfile
class LlvmLibToolfile(Package):
url = 'file://' + os.path.dirname(__file__) + '/../../common/junk.xml'
version('1.0', '68841b7dcbd130afd7d236afe8fd5b949f017615', expand=False)
depends_on('llvm')
def install(self, spec, prefix):
values = {}
values['VER'] = spec['llvm'].version
values['PFX'] = spec['llvm'].prefix
values['LIB'] = spec['llvm'].prefix.lib
# This is a toolfile to use llvm / clang as a library, not as a compiler.
fname = 'llvm.xml'
contents = str(""" <tool name="llvm" version="${VER}">
<lib name="clang"/>
<client>
<environment name="LLVM_BASE" default="${PFX}"/>
<environment name="LIBDIR" default="${LIB}"/>
<environment name="INCLUDE" default="${PFX}/include"/>
</client>
<flags LDFLAGS="-Wl,-undefined -Wl,suppress"/>
<flags CXXFLAGS="-D_DEBUG -D_GNU_SOURCE -D__STDC_CONSTANT_MACROS"/>
<flags CXXFLAGS="-D__STDC_FORMAT_MACROS -D__STDC_LIMIT_MACROS -O3 "/>
<flags CXXFLAGS="-fomit-frame-pointer -fPIC -Wno-enum-compare "/>
<flags CXXFLAGS="-Wno-strict-aliasing -fno-rtti"/>
</tool>""")
write_scram_toolfile(contents, values, fname, prefix)
fname = 'pyclang.xml'
contents = str("""<tool name="pyclang" version="${VER}">
<client>
<environment name="PYCLANG_BASE" default="${PFX}"/>
</client>
<use name="python"/>
</tool>""")
write_scram_toolfile(contents, values, fname, prefix)
|
from waveapi import events
from waveapi import model
from waveapi import robot
import waveapi.document as doc
import re
def OnParticipantsChanged(properties, context):
"""Invoked when any participants have been added/removed."""
added = properties['participantsAdded']
for p in added:
Notify(context)
def OnRobotAdded(properties, context):
"""Invoked when the robot has been added."""
root_wavelet = context.GetRootWavelet()
root_wavelet.CreateBlip().GetDocument().SetText("I'm alive!")
def OnBlipSubmit(properties, context):
"""Invoked whenever a blip is submitted"""
blip = context.GetBlipById(properties['blipId'])
contents = blip.GetDocument().GetText()
p = re.compile('\(up:P00001\)')
proteinlist = p.finditer(contents)
blip.CreateChild().GetDocument().SetText("You submitted a blip!")
for protein in proteinlist:
strip_contents = contents.replace(protein.group(0), 'P00001')
blip.GetDocument().SetText(strip_contents)
def Notify(context):
root_wavelet = context.GetRootWavelet()
root_wavelet.CreateBlip().GetDocument().SetText("Hi everybody!")
if __name__ == '__main__':
myRobot = robot.Robot('resolver-bot',
image_url='http://resolver-bot.appspot.com/icon.png',
version='0.0.3',
profile_url='http://resolver-bot.appspot.com/')
myRobot.RegisterHandler(events.WAVELET_PARTICIPANTS_CHANGED, OnParticipantsChanged)
myRobot.RegisterHandler(events.WAVELET_SELF_ADDED, OnRobotAdded)
myRobot.RegisterHandler(events.BLIP_SUBMITTED, OnBlipSubmit)
myRobot.Run()
|
import pygame, sys
from config import Config
from snake import Snake
from apple import Apple
class Game():
def __init__(self):
pygame.init()
self.screen = pygame.display.set_mode((Config.WINDOW_WIDTH,Config.WINDOW_HEIGHT))
self.clock = pygame.time.Clock()
self.BASICFONT = pygame.font.Font('freesansbold.ttf',18)
pygame.display.set_caption('Snaky')
self.apple = Apple()
self.snake = Snake()
def drawGrid(self):
for x in range(0,Config.WINDOW_WIDTH,Config.CELLSIZE):
pygame.draw.line(self.screen, Config.DARKGREY, (x,0) , (x,Config.WINDOW_HEIGHT))
for y in range(0,Config.WINDOW_HEIGHT,Config.CELLSIZE):
pygame.draw.line(self.screen, Config.DARKGREY, (0,y) , (Config.WINDOW_WIDTH,y))
def drawSnake(self):
for coord in self.snake.snakeCoords:
x = coord['x'] * Config.CELLSIZE
y = coord['y'] * Config.CELLSIZE
snakeSegmentRect = pygame.Rect(x,y,Config.CELLSIZE,Config.CELLSIZE)
pygame.draw.rect(self.screen,Config.DARKGREEN,snakeSegmentRect)
snakeInnerSegmentRect = pygame.Rect(x+4,y+4,Config.CELLSIZE - 8,Config.CELLSIZE -8)
pygame.draw.rect(self.screen,Config.GREEN,snakeInnerSegmentRect)
def drawApple(self):
x = self.apple.x * Config.CELLSIZE
y = self.apple.y * Config.CELLSIZE
appleRect = pygame.Rect(x,y,Config.CELLSIZE,Config.CELLSIZE)
pygame.draw.rect(self.screen, Config.RED , appleRect)
def drawScore(self,score):
scoreSurf = self.BASICFONT.render('Score: %s' % (score) , True,Config.WHITE)
scoreRect = scoreSurf.get_rect()
scoreRect.topleft = (Config.WINDOW_WIDTH - 120, 10)
self.screen.blit(scoreSurf,scoreRect)
def draw(self):
self.screen.fill(Config.BG_COLOR)
self.drawGrid()
self.drawScore(len(self.snake.snakeCoords) -3)
self.drawApple()
self.drawSnake()
pygame.display.update()
self.clock.tick(Config.FPS)
def checkForKeyPress(self):
if len(pygame.event.get(pygame.QUIT)) > 0 :
pygame.quit()
keyUpEvents = pygame.event.get(pygame.KEYUP)
if len(keyUpEvents) == 0:
return None
if keyUpEvents[0].key == pygame.K_ESCAPE:
pygame.quit()
quit()
return keyUpEvents[0].key
def handleKeyEvents(self,event):
if event.key == pygame.K_LEFT and self.snake.direction != self.snake.RIGHT:
self.snake.direction = self.snake.LEFT
elif event.key == pygame.K_RIGHT and self.snake.direction != self.snake.LEFT:
self.snake.direction = self.snake.RIGHT
elif event.key == pygame.K_UP and self.snake.direction != self.snake.DOWN:
self.snake.direction = self.snake.UP
elif event.key == pygame.K_DOWN and self.snake.direction != self.snake.UP:
self.snake.direction = self.snake.DOWN
elif event.key == pygame.K_ESCAPE:
pygame.quit()
def resetGame(self):
del self.snake
del self.apple
self.snake = Snake()
self.apple = Apple()
return True
def isGameOver(self):
if(self.snake.snakeCoords[self.snake.HEAD]['x'] == -1 or self.snake.snakeCoords[self.snake.HEAD]['x'] == Config.CELLWIDTH ):
return self.resetGame()
if(self.snake.snakeCoords[self.snake.HEAD]['y'] == -1 or self.snake.snakeCoords[self.snake.HEAD]['y'] == Config.CELLHIGHT ):
return self.resetGame()
for snakeBody in self.snake.snakeCoords[1:]:
if snakeBody['x'] == self.snake.snakeCoords[self.snake.HEAD]['x'] and snakeBody['y'] == self.snake.snakeCoords[self.snake.HEAD]['y']:
return self.resetGame()
def drawPressKeyMsg(self):
pressKeySurf = self.BASICFONT.render('Press a key to play', True, Config.DARKGREY)
pressKeyRect = pressKeySurf.get_rect()
pressKeyRect.topleft = (Config.WINDOW_WIDTH - 200, Config.WINDOW_HEIGHT - 50)
pressKeyExit = self.BASICFONT.render('Press ESC to exit', True, Config.DARKGREY)
pressKeyExitRect = pressKeySurf.get_rect()
pressKeyExitRect.topleft = (Config.WINDOW_WIDTH - 200, Config.WINDOW_HEIGHT - 20)
self.screen.blit(pressKeySurf,pressKeyRect)
self.screen.blit(pressKeyExit, pressKeyExitRect)
def displayGameOver(self):
gameOverFont = pygame.font.Font('freesansbold.ttf',150)
gameSurf = gameOverFont.render('Game',True,Config.WHITE)
overSurf = gameOverFont.render('Over',True,Config.WHITE)
gameRect = gameSurf.get_rect()
overRect = overSurf.get_rect()
gameRect.midtop = (Config.WINDOW_WIDTH/2,10)
overRect.midtop = (Config.WINDOW_WIDTH/2 , gameRect.height +10 + 25)
self.screen.blit(gameSurf,gameRect)
self.screen.blit(overSurf,overRect)
self.drawPressKeyMsg()
pygame.display.update()
pygame.time.wait(500)
self.checkForKeyPress() #clear out any key press in the event queue
while True:
if self.checkForKeyPress():
pygame.event.get()
return
def run(self):
# self.showStartScreen()
while True:
self.gameloop()
self.displayGameOver()
def gameloop(self):
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
elif event.type == pygame.KEYDOWN:
self.handleKeyEvents(event)
self.snake.update(self.apple)
self.draw()
if self.isGameOver():
break
|
class Flight(object):
def __init__(self, **kwargs):
mandatory_fields = ["source", "destination", "start_date", "end_date", "price", "airway", "flight_id"]
for key, val in kwargs.iteritems():
setattr(self, key, val)
for key in mandatory_fields:
if not hasattr(self, key):
raise Exception("Expected key: %s while initializing Flight instance" % key)
def to_dict(self):
return {
"source": self.source,
"destination": self.destination,
"start_date": self.start_date,
"end_date": self.end_date,
"price": self.price,
"airway": self.airway,
"flight_id": self.flight_id,
}
@classmethod
def from_dict(cls, d):
return Flight(
d["source"],
d["destination"],
d["start_date"],
d["end_date"],
d["price"],
d["airway"],
d["flight_id"],
)
class Hotel(object):
def __init__(self, **kwargs):
mandatory_fields = ["destination", "start_date", "end_date", "price"]
for key, val in kwargs.iteritems():
setattr(self, key, val)
for key in mandatory_fields:
if not hasattr(self, key):
raise Exception("Expected key: %s while initializing Flight instance" % key)
|
from swa.items import *
from scrapy.spiders import Spider
from scrapy.http import FormRequest,Request
from scrapy.selector.lxmlsel import HtmlXPathSelector
from scrapy.selector import Selector
from scrapy.http import HtmlResponse
from datetime import datetime, timedelta
from dateutil.parser import parse as dateParse
import re
import itertools, collections
import logging
from urlparse import urljoin
import pytz
import sys
from helpers import *
class Util(object):
@classmethod
def parseFlight(_class, string, date, points = None):
""" General format:
Departing flight 123(/456) $0000 12:30AM depart 7:25AM arrive (Non/1/2)stop (Change planes in XXX)
[always] [flt1/2] [price] [departure] [arrival] [# stops] [connection]
"""
removeKeywords = ['Departing flight', 'depart', 'arrive', 'Change Planes in', 'stop', 'stops', 'Plane Change']
regex = '|'.join(removeKeywords)
infoList = filter(lambda el: el!="", re.sub(regex, "", string).split(' '))
stops = int(infoList[4]) if infoList[4] != 'Non' else 0
if stops == 0:
connecting_arpts = []
elif ( infoList[5] not in SWAFareSpider.cities):
connecting_arpts = []
else:
connecting_arpts = list(infoList[5].split('/'))
departureDT = dateParse("%s %s" % (date, infoList[2]) )
arrivalDT = dateParse("%s %s" % (date, infoList[3]) )
if ( arrivalDT < departureDT ): arrivalDT += timedelta(days=1)
flight = {
'flight': tuple(infoList[0].split('/')),
'price': int(infoList[1][1:].replace(",","")),
'depart': departureDT,
'arrive': arrivalDT,
'depart_date' : date,
'stops': stops,
'connecting_arpts': connecting_arpts,
'fare_validity_date': datetime.now(),
'points' : int(points.replace(",",""))
}
return flight
class SWAFareSpider(Spider):
"""A spider to scrape the Southwest site for fare pricing."""
FORMNAME = "buildItineraryForm"
name = "southwestFare"
start_urls = ['http://www.southwest.com/flight/search-flight.html']
cities = ['GSP', 'FNT', 'BOS', 'OAK', 'LIT', 'BOI', 'SAN', 'DCA', 'LBB', 'BWI',
'PIT', 'RIC', 'SAT', 'JAX', 'IAD', 'JAN', 'HRL', 'CHS', 'EYW', 'BNA',
'PHL', 'SNA', 'SFO', 'PHX', 'LAX', 'MAF', 'LAS', 'CRP', 'CMH', 'FLL',
'DEN', 'DTW', 'BUR', 'ROC', 'GEG', 'BUF', 'GRR', 'BDL', 'DSM', 'EWR',
'MHT', 'PBI', 'RNO', 'OKC', 'IND', 'ATL', 'ISP', 'SMF', 'BKG', 'PVD',
'SEA', 'ECP', 'ICT', 'MDW', 'RDU', 'PDX', 'CLE', 'SJU', 'AUS', 'CLT',
'SJC', 'ELP', 'OMA', 'MEM', 'TUS', 'ALB', 'TUL', 'ORF', 'MKE', 'MSY',
'MSP', 'CAK', 'TPA', 'DAL', 'DAY', 'ONT', 'STL', 'ABQ', 'HOU', 'SLC',
'MCO', 'RSW', 'BHM', 'MCI', 'PNS', 'LGA', 'AMA', 'SDF', 'PWM']
def __init__(self, fromCity=None, days=None, toCity=None, startDate=None, *args, **kwargs):
super(SWAFareSpider, self).__init__(**kwargs)
self.origin = fromCity
self.days = int(days)
self.daysSearched = 0
if startDate == None: #when scraping multiple days starting with today
self.currentDate = datetime.now() + timedelta(days=1)
self.currentDate = self.currentDate.replace(hour=0, minute=0, second=0, microsecond=0)
elif type(startDate) == str: #when calling from command line - uses timesinceMidnight
if '/' in startDate:
print("incorrect date format")
sys.exit(1)
self.currentDate = fromMsEpoch(int(startDate))
else: #when calling from runUserFares (already is correct datetime object)
self.currentDate = startDate
self.destination = toCity
@classmethod
def lookupCity(_class, cityCode):
if cityCode in _class.cities:
return cityCode
else:
raise Exception("Invalid city specified.")
def buildQuery(self):
"""Build the POST query string for searching flights."""
queryData = {}
queryData["twoWayTrip"] = "false"
queryData["adultPassengerCount"] = "1"
queryData["outboundTimeOfDay"] = "ANYTIME"
queryData["fareType"] = "POINTS"
queryData["originAirport"] = self.lookupCity(self.origin)
queryData["destinationAirport"] = self.lookupCity(self.destination)
queryData["outboundDateString"] = self.currentDate.strftime("%m/%d/%Y")
queryData["returnAirport"] = ""
return queryData
def parse(self, response):
queryData = self.buildQuery()
while (self.daysSearched < self.days):
yield FormRequest.from_response(response, formdata=queryData, formname=self.FORMNAME, callback=self.scrapeFlights,
dont_filter = True, meta = {'date' : self.currentDate})
self.daysSearched = self.daysSearched + 1
self.currentDate = self.currentDate + timedelta(days=1)
queryData["outboundDateString"] = self.currentDate.strftime("%m/%d/%Y")
def scrapeFlights(self, response):
"""Scrape the flights into a Fare() object."""
htmlSelector = Selector(response = response)
errors = htmlSelector.xpath("//ul[@id='errors']/li/text()").extract()
if (len(errors) > 0 ):
if "does not offer service" in errors[0]:
logging.warning(errors)
else:
logging.error(errors)
return
# Conveniently packaged flight info in string form for form submission
subpath = '//div[@class="productPricing"]//input/@title'
selectors = [
'//table[@id="faresOutbound"]//td[@class="price_column "]//div[@class="productPricing"]//input/@title' , # business select
'//table[@id="faresOutbound"]//td[@class="price_column"]//div[@class="productPricing"]//input[contains(@id,"B")]/@title', # anytime
'//table[@id="faresOutbound"]//td[@class="price_column"]//div[@class="productPricing"]//input[contains(@id,"C")]/@title' # wanna get away
]
points_path = '//div[@class="productPricing"]//label/text()'
points_selectors = [
'//table[@id="faresOutbound"]//td[@class="price_column "]' + points_path , # business select points
'//table[@id="faresOutbound"]//td[@class="price_column"]//div[@class="productPricing" and .//input[(contains(@id,"B")) and (@name="outboundTrip")]]//label/text()', # anytime
'//table[@id="faresOutbound"]//td[@class="price_column"]//div[@class="productPricing" and .//input[(contains(@id,"C")) and (@name="outboundTrip")]]//label/text()' # wanna get away
]
fareList = []
pointsList = []
for selector in selectors:
fareList.append( htmlSelector.xpath(selector).extract())
for selector in points_selectors:
pointsList.append(htmlSelector.xpath(selector).extract())
fareType = ["Business Select", "Anytime", "Wanna Get Away"] #assume this order is always descending price if available
fareTypeIndex = 0
#verify data integrity when grabbing pointss
if not (len(fareList) == len(pointsList) and len(list(itertools.chain(*fareList))) == len(list(itertools.chain(*pointsList)))):
return
allFlights = []
for fareTypeIndex in range(3):
for flightIndex in range(len(fareList[fareTypeIndex])):
flightString = fareList[fareTypeIndex][flightIndex]
if ( flightString[0] == 'D' ):
logging.debug(flightString)
flightData = Util.parseFlight(flightString, response.meta['date'], pointsList[fareTypeIndex][flightIndex])
flight = Fare()
for key in flightData:
flight[key] = flightData[key]
flight['origin'] = self.origin
flight['destination'] = self.destination
flight['faretype'] = fareType[fareTypeIndex]
allFlights.append(flight)
for flightIndex in range(len(allFlights)):
flight = allFlights[flightIndex]
if flight in allFlights[flightIndex+1:]:
continue
else:
yield flight
|
# encoding: utf-8
"""Program do testowania naiwnego klasyfikatora bayesowskiego."""
import glob
import re
import sys
import NaiveBayes
from Task901 import train
from Task905 import classify
def getwords(docname):
"""Wyznacza zbiór cech (słów)."""
doc = open(docname).read()
splitter = re.compile('\\W*')
words = [s for s in splitter.split(doc)]
return set(words)
def category(name):
"""Zwraca liste etykiet kategorii."""
name = name.split("/")[0] # 1 powoduje list index out of range
idx_cat_time = name.split('-_')
idx = idx_cat_time[0]
cat_time = idx_cat_time[1].split('^')
cat = cat_time[0]
time = cat_time[1].split('.')[0]
cat_list = cat.split("_")
#print idx
#print time
#print cat_list
return cat_list
def readFeatureCount():
alreadyDoneFeatureCount = {};
args = ()
try:
f = open("feature_count.txt", "r")
try:
allLines = f.readlines()
finally:
f.close()
except IOError:
pass
else:
for line in allLines:
sp = line.split(";")
args = (sp[0], sp[1])
alreadyDoneFeatureCount[args] = int(sp[2])
return alreadyDoneFeatureCount;
def readClassCount():
alreadyDoneClassCount = {};
try:
f = open("class_count.txt", "r")
try:
allLines = f.readlines()
finally:
f.close()
except IOError:
pass
else:
for line in allLines:
sp = line.split(";")
alreadyDoneClassCount[sp[0]] = int(sp[1])
return alreadyDoneClassCount;
def cross_eval(directory, parts, verbose=False):
"""Dokonuje sprawdzenia krzyżowego."""
correct = 0
total = 0
testlist = []
trainlist = []
testlist.extend(glob.glob("recipiestest/*"))
trainlist.extend(glob.glob("recipies/*"))
classifier = NaiveBayes.NaiveBayes(getwords)
classifier.feature_count = readFeatureCount() #wczytuje z pliku dane z wczesniejszego trenowania
classifier.class_count = readClassCount()
if verbose:
print ("\tTraining classifier")
for doc in trainlist:
categories = category(doc)
for cat in categories:
train(classifier, doc, cat)
#--------------------------------------------------------------------------------------------
try:
#Otwiera plik istniejący lub tworzy nowy i zapisuje do niego feature_count.
resultsFile = open("feature_count.txt", "w")
try:
for feat in classifier.feature_count:
string = feat[0]+";"+feat[1]+";"+str(classifier.feature_count[(feat[0], feat[1])])+"\n"
resultsFile.write(string)
finally:
resultsFile.close()
except IOError:
pass
try:
resultsFile2 = open("class_count.txt", "w") #zapisuje class_count do pliku
try:
for cl in classifier.class_count:
string = cl+";"+str(classifier.class_count[cl])+"\n"
resultsFile2.write(string)
finally:
resultsFile2.close()
except IOError:
pass
#---------------------------------------------------------------------------------------------
if verbose:
print ("\tClassifying")
for doc in testlist:
bestcats = classify(classifier, doc)
if verbose:
print ("\t", doc, ":", bestcats, "-"),
cats_count = 2
correct_count = 0
for cat in category(doc):
for bestcat in bestcats:
if bestcat == cat:
correct_count += 1
print (correct_count, '/', cats_count)
correct += correct_count
total += cats_count
three_bests = get_three_bests(classifier)
print three_bests
baseline(three_bests,testlist,verbose)
return float(correct)/float(total)
# podaj trzy najczestsze kategorie
def get_three_bests(classifier):
#print classifier.class_count
classes_sorted = sorted(classifier.class_count, key=classifier.class_count.get, reverse=True)
return classes_sorted[:3]
# baseline
def baseline(three_bests,testlist,verbose):
correct = 0
total = 0
for doc in testlist:
#if verbose:
#print "\t", doc, ":", three_bests, "-",
cats_count = 2
correct_count = 0
for cat in category(doc):
for bestcat in three_bests:
if bestcat == cat:
correct_count += 1
#print correct_count, '/', cats_count
correct += correct_count
total += cats_count
ACCURACY = float(correct)/float(total)
print "Base line accuracy:", ACCURACY
if __name__ == '__main__':
ACCURACY = cross_eval("mailbox", 10, True)
print ("Accuracy:", ACCURACY)
|
import os
import yaml
import codecs
from general_tools.file_utils import write_file
class RC:
def __init__(self, directory):
"""
:param string directory:
"""
self.dir = directory
manifest_file = os.path.join(directory, 'manifest.yaml')
self.manifest = self.__read_yaml_file(manifest_file)
if type(self.dir) is not str and type(self.dir) is not unicode:
raise Exception('Missing string parameter: dir')
def __read_yaml_file(self, file):
"""
Attemts to load a yaml file. If the file is missing or cannot be read None is returned
:param file: the yaml file to be loaded
:return: the yaml object or None
"""
if os.access(file, os.R_OK):
with codecs.open(file, 'r', encoding='utf-8') as stream:
try:
return yaml.load(stream)
except yaml.YAMLError as exc:
print(exc)
return None
@property
def path(self):
return self.dir
@property
def type(self):
return self.manifest['dublin_core']['type']
@property
def language(self):
return self.manifest['dublin_core']['language']
@property
def resource(self):
return self.manifest['dublin_core']
@property
def conforms_to(self):
if type(self.manifest['dublin_core']['conformsto']) is str:
return self.manifest['dublin_core']['conformsto'].replace('rc', '')
else:
return None
@property
def chunk_ext(self):
return {
'text/usx': 'usx',
'text/usfm': 'usfm',
'text/markdown': 'md'
}.get(self.manifest['dublin_core']['format'], 'txt')
def project(self, identifier=None):
"""
Retrieves a project from the RC.
You can exclude the parameter if the RC only has one project.
:param identifier:
:return:
"""
if identifier:
for p in self.manifest['projects']:
if p['identifier'] == identifier:
return p
else:
if len(self.manifest['projects']) == 1:
return self.manifest['projects'][0]
elif len(self.manifest['projects']) > 1:
raise Exception('Multiple projects found. Specify the project identifier.')
@property
def project_count(self):
return len(self.manifest['projects'])
@property
def project_ids(self):
identifiers = []
for p in self.manifest['projects']:
identifiers.append(p['identifier'])
return identifiers
def chapters(self, identifier=None):
"""
Returns an array of chapters in this resource container.
You can exclude the parameter if this RC only has one project.
:param identifier: The project identifier
:return array:
"""
p = self.project(identifier)
if p is None:
return []
else:
directory = os.path.join(self.dir, p['path'])
files = os.listdir(directory)
if 'config.yaml' in files:
files.remove('config.yaml')
return files
def chunks(self, project_identifier, chapter_identifier=None):
if chapter_identifier is None:
chapter_identifier = project_identifier
project_identifier = 0
p = self.project(project_identifier)
if p is None:
return []
directory = os.path.join(self.dir, p['path'], chapter_identifier)
return os.listdir(directory)
def read_chunk(self, project_identifier, chapter_identifier, chunk_identifier=None):
if chunk_identifier is None:
chunk_identifier = chapter_identifier
chapter_identifier = project_identifier
project_identifier = None
p = self.project(project_identifier)
if p is None:
return []
file_path = os.path.join(self.dir, p['path'], chapter_identifier, chunk_identifier + '.' + self.chunk_ext)
if os.access(file_path, os.R_OK):
contents = open(file_path).read()
else:
contents = None
return contents
def write_chunk(self, project_identifier, chapter_identifier, chunk_identifier, content=None):
if content is None:
content = chunk_identifier
chunk_identifier = chapter_identifier
chapter_identifier = project_identifier
project_identifier = None
p = self.project(project_identifier)
if p is None:
return
# We need to remove the chunk if no content is specified.
if content == '':
file_path = os.path.join(self.dir, p['path'], chapter_identifier, chunk_identifier + '.' + self.chunk_ext)
if os.access(file_path, os.R_OK):
os.remove(file_path)
else:
directory_path = os.path.join(self.dir, p['path'], chapter_identifier)
file_path = os.path.join(directory_path, chunk_identifier + '.' + self.chunk_ext)
if not os.path.isdir(directory_path):
os.makedirs(directory_path)
write_file(file_path, content)
def write_toc(self, project_identifier, content=None):
if content is None:
content = project_identifier
project_identifier = None
p = self.project(project_identifier)
if p is None:
return
file_path = os.path.join(self.dir, p['path'], 'toc.yaml')
if content == '':
if os.access(file_path, os.R_OK):
os.remove(file_path)
else:
write_file(file_path, yaml.dump(content, default_flow_style=False))
def write_config(self, project_identifier, content=None):
if content is None:
content = project_identifier
project_identifier = None
p = self.project(project_identifier)
if p is None:
return
file_path = os.path.join(self.dir, p['path'], 'config.yaml')
if content == '':
if os.access(file_path, os.R_OK):
os.remove(file_path)
else:
write_file(file_path, yaml.dump(content, default_flow_style=False))
def config(self, project_identifier=None):
p = self.project(project_identifier)
if p is None:
return None
file_path = os.path.join(self.dir, p['path'], 'config.yaml')
return self.__read_yaml_file(file_path)
def toc(self, project_identifier=None):
p = self.project(project_identifier)
if p is None:
return None
file_path = os.path.join(self.dir, p['path'], 'toc.yaml')
return self.__read_yaml_file(file_path)
|
# @Time : 2018-9-10
# @Author : zxh
import hashlib
import os
def cal_md5(filepath):
md5file=open(filepath, 'rb')
md5=hashlib.md5(md5file.read()).hexdigest()
md5file.close()
return md5
def write_md5(filepath, md5):
with open(filepath, 'w') as f:
f.write(md5)
def read_md5(filepath):
if not os.path.isfile(filepath):
return ''
with open(filepath, 'r') as f:
return f.read(32)
def remove_file(filepath):
if os.path.isfile(filepath):
os.remove(filepath)
__all__ = ['cal_md5', 'write_md5', 'read_md5', 'remove_file']
|
import zmq.green as zmq
from basesocket import BaseSocket
class Server(BaseSocket):
def __init__(self, host='0.0.0.0', port=12305):
context = zmq.Context()
self.receiver = context.socket(zmq.PULL)
self.receiver.bind('tcp://%s:%i' % (host, port))
self.sender = context.socket(zmq.PUB)
self.sender.bind('tcp://%s:%i' % (host, port + 1))
|
import json
import jwt
import time
import logging
import pre
from gmssl import sm2
jwt_secret = 'tuna and bacon are my favorite'
def register(username, pubkey):
# ensure user names are distinct
sql = "SELECT * FROM Users WHERE UserName='{}'".format(username)
if len(pre.select(sql)) != 0:
return json.dumps({
'status': '401',
'msg': 'user name duplicated'
})
# store usename and hashed password into the database
sql = "INSERT INTO Users (UserName, Pubkey) VALUES ('{}', '{}')".format(username, pubkey)
pre.insert(sql)
logging.info('{} registered with pubkey {}'.format(username, pubkey))
return json.dumps({
'status': '200',
'msg': 'successfully registered'
})
def login(username, timeval, signature):
# verify username
sql = "SELECT * FROM Users WHERE UserName='{}'".format(username)
selected = pre.select(sql)
if len(selected) == 0:
return json.dumps({
'status': '401',
'msg': 'user name not exists'
})
# verify signature
pubkey = selected[0][1]
sm2_verify = sm2.CryptSM2(public_key=pubkey, private_key='')
if not sm2_verify.verify(signature, (username + timeval).encode('ascii')):
return json.dumps({
'status': '401',
'msg': 'wrong keys'
})
logging.info('{} logged in'.format(username))
claims = {
'username': username,
'pubkey': pubkey
}
jwt_encoded = jwt.encode(claims, jwt_secret, algorithm='HS256')
return json.dumps({
'status': '200',
'msg': 'successfully logged in',
'jwt': str(jwt_encoded)[1:].strip('\'')
})
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Module with functions common to whole project."""
from django.views import i18n
from django.conf import settings
from django.http import HttpResponse, HttpResponseServerError, \
HttpResponseNotFound
from django.template import Context, RequestContext, loader
from django.utils.translation import ugettext as _
from django.shortcuts import redirect
def setlang(request, lng='en'):
"Function is used for translation in case of GET method."
if lng in [jj[0] for jj in settings.LANGUAGES]:
if hasattr(request, 'session'):
request.session['django_language'] = lng
else:
return HttpResponse("No '.session' at " + request.path).set_cookie(
settings.LANGUAGE_COOKIE_NAME, lng)
else:
t = loader.get_template('404.html')
msg = _('No translation for language %s.') % lng
return HttpResponseNotFound(t.render(RequestContext(request, \
{'request_path': request.path, 'msg': msg})))
return i18n.set_language(request)
def page_under_develop(request):
"""
Customized handler500
"""
dev_page = ["/admindks/", "/admin/doc/", "/admindks/password_chage/",
"admindks/logout/"]
msg = _('Sorry, this page is not ready yet.')
if request.path in dev_page:
t = loader.get_template('404.html')
return HttpResponseNotFound(t.render(RequestContext(request, \
{'request_path': request.path, 'msg': msg})))
else:
t = loader.get_template('500.html')
return HttpResponseServerError(t.render(Context({})))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.