repo_name stringlengths 6 97 | path stringlengths 3 341 | text stringlengths 8 1.02M |
|---|---|---|
NoOneZero/Neuro | test_1_lineral/frame.py | import pygame
class Frame:
def __init__(self, start_pos, end_pos, goal_pos, frame_color = [255, 255, 0], goal_color = [255,0,20], width=5):
self.start_pos = start_pos
self.end_pos = end_pos
self.goal_pos = goal_pos
self.frame_color = frame_color
self.goal_color = goal_color
self.width = width
def draw(self, display):
pygame.draw.line(display, color=self.frame_color, start_pos=[self.start_pos[0], self.start_pos[1]], end_pos=[self.end_pos[0], self.start_pos[1]], width=self.width)
pygame.draw.line(display, color=self.frame_color, start_pos=[self.end_pos[0], self.end_pos[1]], end_pos=[self.end_pos[0], self.start_pos[1]], width=self.width)
pygame.draw.line(display, color=self.frame_color, start_pos=[self.start_pos[0], self.end_pos[1]], end_pos=[self.end_pos[0], self.end_pos[1]], width=self.width)
pygame.draw.line(display, color=self.frame_color, start_pos=[self.start_pos[0], self.start_pos[1]], end_pos=[self.start_pos[0], self.end_pos[1]], width=self.width)
pygame.draw.line(display, color=self.goal_color, start_pos=[self.goal_pos[0], self.start_pos[1]], end_pos=[self.goal_pos[0], self.end_pos[1]], width=self.width)
pygame.draw.line(display, color=self.goal_color, start_pos=[self.start_pos[0], self.goal_pos[1]], end_pos=[self.end_pos[0], self.goal_pos[1]], width=self.width)
|
NoOneZero/Neuro | else/character.py | <reponame>NoOneZero/Neuro
import config
import random
from common.web import Web
class Character:
iterator = 0
web_father = None
web_mother = None
characters_all = []
characters_web_temp = []
max = 0
def __init__(self, person = None, web = None):
self.person = person
if web != None: self.web = web
else: self.web = Web(randomize = 1)
self.fitnes = 0
self.fitnes_radical = 0
Character.iterator += 1
Character.characters_all.append(self)
def set_web(self): pass
def iteration_done(self):
self.fitnes += 0
self.fitnes_radical += 0
def epoch_done(self):
self.fitnes += 0
self.fitnes_radical += 0
@staticmethod
def calculate_end_epoch():
Character._calculate_max()
Character._make_parents_roulette_unique()
Character._make_who_not_die()
Character._make_new_population()
Character._make_mutation()
Character._reset_position()
Character._remake_the_same()
@staticmethod
def _calculate_max():
Character.max = 0
for i in range(1, len(Character.characters_all)):
if (Character.characters_all[i].fitnes
> Character.characters_all[Character.max].fitnes):
Character.max = i
@staticmethod
def _tournament(): pass
@staticmethod
def _make_parents_roulette_unique():
index_all = []
fitnes_all = []
for i in range(len(Character.characters_all)):
index_all.append(i)
fitnes_all.append(Character.characters_all[i].fitnes)
index_father = []
index_mother = []
for i in range(config.NUMBER_OF_PARENTS_COUPLES):
index_father.append(random.choices(index_all, fitnes_all, k = 1)[0])
index = index_father.index(index_father[i])
index_all.pop(index)
fitnes_all.pop(index)
index_mother.append(random.choices(index_all, fitnes_all, k = 1)[0])
index = index_mother.index(index_mother[i])
index_all.pop(index)
fitnes_all.pop(index)
Character.web_father = []
Character.web_mother = []
for i in range(config.NUMBER_OF_PARENTS_COUPLES):
Character.web_father.append(Character.characters_all[index_father[i]].web)
Character.web_mother.append(Character.characters_all[index_mother[i]].web)
@staticmethod
def _make_parents_roulette_not_unique():
sum_value = 1
for i in range(len(Character.characters_all)):
sum_value += Character.characters_all[i].fitnes
index_all = []
fitnes_all = []
for i in range(len(Character.characters_all)):
index_all.append(i)
fitnes_all.append((Character.characters_all[i].fitnes)/sum_value)
# print("{0:>5} {1:10.1f} {2:10.4f}".format(i, Character.characters_all[i].fitnes, fitnes_all[i]), end= " > ")
# print(Character.characters_all[i].web.axon_weigh)
# print()
index_father = []
index_mother = []
for i in range(config.NUMBER_OF_PARENTS_COUPLES):
index_father.append(random.choices(index_all, fitnes_all, k = 1)[0])
index = index_father.index(index_father[i])
to_append_index = index_all.pop(index)
to_append_fitnes = fitnes_all.pop(index)
index_mother.append(random.choices(index_all, fitnes_all, k = 1)[0])
index_all.append(to_append_index)
fitnes_all.append(to_append_fitnes)
Character.web_father = []
Character.web_mother = []
for i in range(config.NUMBER_OF_PARENTS_COUPLES):
Character.web_father.append(Character.characters_all[index_father[i]].web)
Character.web_mother.append(Character.characters_all[index_mother[i]].web)
# print(index_father)
# print(index_mother)
@staticmethod
def _make_who_not_die():
index_all = []
fitnes_all = []
for i in range(len(Character.characters_all)):
index_all.append(i)
fitnes_all.append(Character.characters_all[i].fitnes)
Character.characters_web_temp = []
for i in range(config.COUNT_OF_ALIVE_AFTER_EPOCH):
index = random.choices(index_all, fitnes_all, k = 1)[0]
Character.characters_web_temp.append(Character.characters_all[index].web)
index_all.remove(index)
fitnes_all.remove(Character.characters_all[index].fitnes)
@staticmethod
def _make_new_population():
start = len(Character.characters_web_temp)
for i in range(start, config.START_POPULATION):
number = i % config.NUMBER_OF_PARENTS_COUPLES
Character.characters_web_temp.extend(Character.web_father[number]
.cross_crossover_several(
Character.web_mother[number], config.CROSSOVER_POINT_NUMBER, return_couple = True))
for i in range(len(Character.characters_all)):
Character.characters_all[i].web = Character.characters_web_temp[i]
@staticmethod
def _make_mutation():
for i in range(len(Character.characters_all)):
Character.characters_all[i].web.make_mutation(config.MUTATION_PROBABILITY)
@staticmethod
def _reset_position():
for i in range(len(Character.characters_all)):
Character.characters_all[i].person.reset_position()
Character.characters_all[i].fitnes = 0
@staticmethod
def _remake_the_same():
for i in range(len(Character.characters_all)):
for j in range(i + 1, len(Character.characters_all)):
if Character.characters_all[i].web == Character.characters_all[j].web:
Character.characters_all[j].web.randomize(randomize_probability= 0.3)
@staticmethod
def save_to_db_fitnes():
d = {}
for i in range(len(Character.characters_all)):
d["f_{}".format(i)] = [Character.characters_all[i].fitnes]
d["fr_{}".format(i)] = [Character.characters_all[i].fitnes_radical]
return d
@staticmethod
def save_to_db_last_web():
d = {}
d.update(Character.characters_all[Character.max].web.axon_line(0))
return d |
NoOneZero/Neuro | test_4_move_round/activityTest.py | <gh_stars>0
from common.activity import Activity
from common.dataWriter import DataWriter
from common.geneticAlgorithmParams import GeneticAlgorithmParams
from common.monitor import Monitor
from common.character import Character
from test_4_move_round.personTest import PersonTest
from test_4_move_round.characterTest import CharacterTest
from test_4_move_round.geneticAlgorithmParamsTest import GeneticAlgorithmParamsTest
import datetime
class ActivityTest(Activity):# test 4 move round
def __init__(self, monitor: Monitor = None,
genetic_algorithm_params: GeneticAlgorithmParams = None,
data_writer: DataWriter = None):
super(ActivityTest, self).__init__(monitor, genetic_algorithm_params, data_writer)
self.init(monitor, genetic_algorithm_params, data_writer)
def init(self, monitor: Monitor = None,
genetic_algorithm_params: GeneticAlgorithmParams = None,
data_writer: DataWriter = None):
self._system_init(monitor)
self._simulation_params_init(genetic_algorithm_params)
self._character_init()
self._enemy_init()
self._environment_init()
self._data_writer_init(data_writer)
def _system_init(self, monitor: Monitor = None) -> None:
self.monitor = monitor or Monitor()
def _simulation_params_init(self, genetic_algorithm_params: GeneticAlgorithmParams = None) -> None:
self.epoch = 0
self.iteration = 0
self.is_epoch_work: bool = True
self.is_iteration_work: bool = True
self.start_time = datetime.datetime.now()
self.genetic_algorithm_params = genetic_algorithm_params or GeneticAlgorithmParams()
def _character_init(self) -> None:
self.character = []
for i in range(self.genetic_algorithm_params._start_population):
self.character.append(Character())
def _enemy_init(self) -> None: pass
def _environment_init(self) -> None: pass
def _data_writer_init(self, data_writer: DataWriter = None) -> None:
self.data_writer = data_writer or DataWriter()
def loop(self) -> None:
while self.is_epoch_work:
self._run_iteration_cycle()
self._calculate_fitness_function_epoch()
self._save_character_data()
self._create_new_population()
self._check_loop_epoch_condition()
def _run_iteration_cycle(self) -> None:
while self.is_iteration_work:
self._control_input()
self._move_environment()
self._collide_environment()
self._move_enemies()
self._collide_enemies()
self._kill_enemies()
self._move_character()
self._collide_character()
self._kill_character()
self._calculate_fitness_function_iteration()
self._draw_all()
self._write_data_on_screen()
self._check_loop_iteration_condition()
def _control_input(self): self.monitor.control_input()
def _move_environment(self): pass
def _collide_environment(self): pass
def _move_enemies(self): pass
def _collide_enemies(self): pass
def _kill_enemies(self): pass
def _move_character(self): pass
def _collide_character(self): pass
def _kill_character(self): pass
def _calculate_fitness_function_iteration(self): pass
def _draw_all(self): self.monitor.draw(environment=[], enemies=[], character=self.character)
def _write_data_on_screen(self):
self.monitor.write_data_on_screen("Test e:{}, i:{}".format(self.epoch, self.iteration))
def _check_loop_iteration_condition(self):
self.iteration += 1
if self.iteration >= self.genetic_algorithm_params.get_max_iteration():
self.is_iteration_work = False
def _calculate_fitness_function_epoch(self): pass
def _save_character_data(self) -> None:pass
def _create_new_population(self) -> None: pass
def _check_loop_epoch_condition(self) -> None:
self._count_epoch()
self._check_stop_epoch_condition()
def _count_epoch(self) -> None:
self.epoch += 1
def _check_stop_epoch_condition(self) -> None:
if self.epoch >= self.genetic_algorithm_params.get_max_epoch():
self.is_epoch_work = False
else:
self.is_iteration_work = True
self.iteration = 0
|
NoOneZero/Neuro | test_4_move_round/geneticAlgorithmParamsTest.py | from common.geneticAlgorithmParams import GeneticAlgorithmParams
class GeneticAlgorithmParamsTest(GeneticAlgorithmParams):
def __init__(self) -> None:
super(GeneticAlgorithmParamsTest, self).__init__()
self.max_iteration = 1000 # Кількість циклів в 1 еопосі
self.max_epoch = 1000 # Кількість епох
self.start_population = 10 # Кількість осіб взагалі
self.count_of_alive_after_epoch = self.start_population * 0.5 # Кількість виживших пісял того як закінчився минулий раунд
self.mutation_power = 0.01 # Ймовірність мутації гена межі [0 до 1]
self.crossover_point_number = 2 # Кількість точко кросовера, зазвичай 1, можна до 10
self.number_of_parents_couples = self.start_population # кількість осіб які будуть давати потомків
self.web_layers = [6, 4] # [6+9, 6, 4]#[6+3*3,4]#[6,4]#[6,6,4]
def get_max_epoch(self): return self.max_epoch
def get_max_iteration(self): return self.max_iteration
def get_start_population(self): return self.start_population
def get_count_of_alive_after_epoch(self): return self.count_of_alive_after_epoch
def get_mutation_power(self): return self.mutation_power
def get_crossover_point_number(self): return self.crossover_point_number
def get_number_of_parents_couples(self): return self.number_of_parents_couples
def get_web_layers(self): return self.web_layers
|
NoOneZero/Neuro | test_4_move_round/characterTest.py | from common.web import Web
from common.character import Character
class CharacterTest(Character):
counter = 0
characters_all = []
def __init__(self, person=None, web=None) -> None:
super(CharacterTest, self).__init__(person, web)
self.person = person
self.web = web or Web(randomize=1)
self.fitness = 0
Character.counter += 1
Character.characters_all.append(self)
def add_fitness(self, value): self.fitness += value
def reset_fitness(self): self.fitness = 0
def get_fitness(self): return self.fitness
def reset_person(self): pass
|
NoOneZero/Neuro | test_1_lineral/activityTest.py | from common.activity import Activity
from common.dataWriter import DataWriter
from common.geneticAlgorithmParams import GeneticAlgorithmParams
from common.monitor import Monitor
from common.web import Web
from test_1_lineral.personTest import PersonTest
from test_1_lineral.characterTest import CharacterTest
from test_1_lineral.geneticAlgorithmParamsTest import GeneticAlgorithmParamsTest
from test_1_lineral.frame import Frame
import random
class ActivityTest(Activity): # test 1 lineral
def __init__(self, monitor: Monitor = None,
genetic_algorithm_params: GeneticAlgorithmParams = GeneticAlgorithmParamsTest(),
data_writer: DataWriter = None):
self.start_pos = [[100, 100], [1200, 100], [2300, 100]]
self.end_pos = [[1100, 1100], [2200, 1100], [3300, 1100]]
self.distance = [[self.end_pos[0][0] - self.start_pos[0][0], self.end_pos[0][1] - self.start_pos[0][1]],
[self.end_pos[1][0] - self.start_pos[1][0], self.end_pos[1][1] - self.start_pos[1][1]],
[self.end_pos[2][0] - self.start_pos[2][0], self.end_pos[2][1] - self.start_pos[2][1]]]
self.goal_relative = [[900, 800], [300, 750], [200, 400]]
self.goal_absolute = [[self.goal_relative[0][0] + self.start_pos[0][0], self.goal_relative[0][1] + self.start_pos[0][1]],
[self.goal_relative[1][0] + self.start_pos[1][0], self.goal_relative[1][1] + self.start_pos[1][1]],
[self.goal_relative[2][0] + self.start_pos[2][0], self.goal_relative[2][1] + self.start_pos[2][1]]]
super(ActivityTest, self).__init__(monitor, genetic_algorithm_params, data_writer)
def _environment_init(self) -> None:
self.environment = []
for i in range(len(self.start_pos)):
self.environment.append(Frame(start_pos=self.start_pos[i],
end_pos=self.end_pos[i],
goal_pos=self.goal_absolute[i]))
def _character_init(self) -> None:
self.character = []
for i in range(self.genetic_algorithm_params._start_population):
position = []
for j in range(self.genetic_algorithm_params.get_dimension()):
position.append(random.randint(self.start_pos[j // 2][j % 2], self.end_pos[j // 2][j % 2]))
web = Web(self.genetic_algorithm_params.get_web_layers(), randomize_power=1)
person = PersonTest(color=[random.randint(10, 20), random.randint(0, 50), random.randint(0, 50)],#[i * 80, i * 80, 100],
position=position,
size=10,#5 + i * 5,
default_position_range=[self.start_pos, self.end_pos])
self.character.append(CharacterTest(person=person, web=web))
def _move_character(self):
for i in range(len(self.character)):
self.character[i].calculate(self.start_pos, self.end_pos, self.goal_relative)
self.character[i].move()
def _write_data_on_screen(self):
self.monitor.write_data_on_screen("layers: {}, popul: {}, elita: {}, alive_after_epoch: {} mutation: {:.3%} |||| e:{}, i:{}"
.format(self.genetic_algorithm_params.get_web_layers(),
self.genetic_algorithm_params.get_start_population(),
self.genetic_algorithm_params.get_count_elitism(),
self.genetic_algorithm_params.get_count_of_alive_after_epoch(),
self.genetic_algorithm_params.get_mutation_probability(),
self.epoch, self.iteration
))
# text = ""
# for i in range(len(self.character)):
# t_n = self.character[i].web.get_neuro_chain()
# t0 = ""
# for pos in self.character[i].person.position:
# t0 += "{:.3f} ".format((pos - 100)/1000)
# t1 = ""
# for j in range(min(len(self.character[i].person.speed), 2)):
# t1 += "{:.3f} ".format(self.character[i].person.speed[j])
# text += "{}) chain: {} pos: {} sp: {} ||| ".format(i, t_n, t0, t1)
# print(self.iteration, text)
# print("1: {}, {} | 2: {},{} | 3: {},{} | 4: {},{}"
# .format(self.character[0].person.position, self.character[0].person.speed,
# self.character[1].person.position, self.character[1].person.speed,
# self.character[2].person.position, self.character[2].person.speed,
# self.character[3].person.position, self.character[3].person.speed))
def _draw_all(self):
self.monitor.draw(environment=self.environment, enemies=[], character=self.character)
def _calculate_fitness_function_iteration(self):
for i in range(len(self.character)):
self.character[i].calculate_fitness(self.goal_absolute, frame=[self.start_pos,self.end_pos],
geneticAlgorithmParamsTest=self.genetic_algorithm_params)
|
NoOneZero/Neuro | cars/Beast.py | <reponame>NoOneZero/Neuro
import random
import pygame
import math
import config
class Beast:
def __init__(self, position = [10, 10], floor = 100, ceiling = 900, speed = [0, 0], size = 20, color = [200, 140, 70], only_circle = False):
self.set_params(position, floor, ceiling, speed, size, color ,only_circle)
def set_params(self, position = [10, 10], floor = 100, ceiling = 900, speed = [0, 0], size = 20, color = [200, 140, 70], only_circle = False):
self.position = [position[0], position[1]]
self.floor = floor
self.ceiling = ceiling
self.angle = math.pi/2
self.angle_speed = 0
self.angle_acceleration = 0
self.speed = [speed[0], speed[1]]
self.acceleration = [0, 0]
self.color = [color[0], color[1], color[2]]
self.color_big = [0,0,0]
self.color_eye = [250, 250, 250]
self.color_dead = [120,120,120]
self.color_dead_big = [80,80,80]
self.color_dead_eye = [50,50,50]
self.size = size
self.size_small = self.size - 2
self.size_eye = self.size / 3
self.offset_eye = self.size * 0.6
self.rule_spin_acceleration = 0.004
self.rule_acceleration_forward = 0.1
self.rule_acceleration_back = 0.02
self.life = True
self.only_circle = only_circle
self.life_time = 0
self.reset_position_value = [position[0], position[1]]
self.reset_floor = floor
self.reset_ceiling = ceiling
self.dead_show_counter_standart = 25
self.dead_show_counter = 25
def calculate_move(self, neuro, distance):
if self.life:
y,x = math.sin(self.angle), math.cos(self.angle)
result = neuro.calculate_all([x, y , *distance])
result = self.__convert_to_bool(result)
self.__rule(*result)
def __convert_to_bool(self, result):
for i in range(len(result)):
if result[i] > 0: result[i] = True
else: result[i] = False
return result
def __rule(self, clockwise = False, counterclockwise = False, forward = False, back = False):
if clockwise: self.angle_acceleration += self.rule_spin_acceleration
if counterclockwise: self.angle_acceleration -= self.rule_spin_acceleration
if forward:
self.acceleration[0] += self.rule_acceleration_forward * math.cos(self.angle)
self.acceleration[1] += self.rule_acceleration_forward * math.sin(self.angle)
if back:
self.acceleration[0] -= self.rule_acceleration_back * math.cos(self.angle)
self.acceleration[1] -= self.rule_acceleration_back * math.sin(self.angle)
def possible_to_die_from_frame(self, lazer_of_death):
if self.life:
if self.position[0] + self.speed[0] - self.size <= lazer_of_death[0].position: self.__die()
if self.position[0] + self.speed[0] + self.size >= lazer_of_death[1].position: self.__die()
if self.position[1] + self.speed[1] + self.size >= self.ceiling: self.__die()
if self.position[1] + self.speed[1] - self.size <= self.floor: self.__die()
def possible_to_die_from_enemies(self, enemies):
if self.life:
for i in range(len(enemies)):
if math.sqrt((enemies[i].position[0] - self.position[0])**2 +
(enemies[i].position[1] - self.position[1])**2) < self.size + enemies[i].size :
self.__die()
def __die(self): self.life = False
def get_life_time(self): return self.life_time
def update_life_time(self):
if self.life:
self.life_time += 1
self.dead_show_counter = self.dead_show_counter_standart
else:
self.dead_show_counter -= 1
def update_fitnes_walls(self, lazer_of_death):
fitnes_result = 0
if self.life:
fitnes_result = min(abs(self.position[0] - lazer_of_death[0].position),
abs(self.position[0] - lazer_of_death[1].position)) # todo + self.size
fitnes_result = min(fitnes_result,
abs(self.position[1] + self.speed[1] + self.size - self.ceiling),
abs(self.position[1] + self.speed[1] - self.size - self.floor))
# if self.life:
# fitnes_result = min(abs(self.position[0] - lazer_of_death[0].position),
# abs(self.position[0] - lazer_of_death[1].position)) # todo + self.size
#
# fitnes_result = min(fitnes_result,
# abs(self.position[1] + self.speed[1] + self.size - self.ceiling),
# abs(self.position[1] + self.speed[1] - self.size - self.floor))
return fitnes_result * fitnes_result
def update_fitnes_enemies(self, enemies):
fitnes_result = 0
if self.life:
min_dist = math.sqrt((enemies[0].position[0] - self.position[0])**2 +(enemies[0].position[1] - self.position[1])**2) - self.size - enemies[0].size
for i in range(1, len(enemies)):
dist = math.sqrt((enemies[i].position[0] - self.position[0])**2 +(enemies[i].position[1] - self.position[1])**2) - self.size - enemies[i].size
if dist < min_dist:
min_dist = dist
fitnes_result = min_dist
return fitnes_result
def update(self, camera_move = None):
if self.life:
self.angle += self.angle_speed
self.angle_speed += self.angle_acceleration
self.angle_speed *= 0.98
self.angle_acceleration = 0
if self.position[1]+self.speed[1]+self.size>=self.ceiling: self.speed[1] = -abs(self.speed[1])
if self.position[1]+self.speed[1]-self.size<=self.floor: self.speed[1] = abs(self.speed[1])
for i in range(len(self.position)):
self.position[i] = self.position[i] + self.speed[i]
self.speed[i] = self.speed[i] + self.acceleration[i]
self.speed[i] *= 0.993
self.acceleration[i] = 0
if camera_move != None and len(camera_move) == 2:
self.position[0] -= camera_move[0]
self.position[1] -= camera_move[1]
def draw(self, display):
if self.dead_show_counter > 0:
if self.only_circle:
pygame.draw.circle(display, self.color, [int(self.position[0]), int(self.position[1])], int(self.size))
else:
if self.life:
pygame.draw.circle(display, self.color_big, [int(self.position[0]), int(self.position[1])], int(self.size))
if self.size_small > 0: pygame.draw.circle(display, self.color, [int(self.position[0]), int(self.position[1])], int(self.size_small))
pygame.draw.circle(display, self.color_eye, [int(self.position[0] + self.offset_eye * math.cos(self.angle)), int(self.position[1] + self.offset_eye * math.sin(self.angle))], int(self.size_eye))
else:
pygame.draw.circle(display, self.color_dead_big, [int(self.position[0]), int(self.position[1])], int(self.size))
if self.size_small > 0: pygame.draw.circle(display, self.color_dead, [int(self.position[0]), int(self.position[1])], int(self.size_small))
pygame.draw.circle(display, self.color_dead_eye, [int(self.position[0] + self.offset_eye * math.cos(self.angle)), int(self.position[1] + self.offset_eye * math.sin(self.angle))], int(self.size_eye))
def reset_position(self):
self.set_params(self.reset_position_value, self.reset_floor, self.reset_ceiling)
self.position[0] += random.uniform(-1, 1) * random.uniform(-1, 1) * config.CAR_BALL_POSITION_DEFAULT_OFFSET[0]
self.position[1] += random.uniform(-1, 1) * random.uniform(-1, 1) * config.CAR_BALL_POSITION_DEFAULT_OFFSET[1] |
NoOneZero/Neuro | cars/Scene_car_test.py | import pygame
import random
import config
from Scene import Scene
from cars.Beast import Beast
from cars.Lazer_of_death import Lazer_of_death
from cars.character_beast import Character_beast
class Scene_car_test(Scene):
alive = []
def __init__(self):
super().__init__()
def init_battlefield(self):
super(Scene_car_test, self).init_battlefield()
self.camera_move = config.CAR_CAMERA_MOVE_SPEED
self.floor = config.CAR_FLOOR_POSITION
self.ceiling = config.CAR_CEILING_POSITION
self.lazer_of_death = [Lazer_of_death(config.CAR_LAZER_OF_DEATH_POSITION), Lazer_of_death(self.display_size[0] - config.CAR_LAZER_OF_DEATH_POSITION_REVERSE)]
self.enemies_number = config.CAR_ENEMIES_NUMBER
self.enemies = []
for i in range(self.enemies_number):
self.enemies.append(
Beast(position=[1000 + i * 300 + random.randrange(0, 500), random.randrange(self.floor, self.ceiling)],
size=random.randrange(50, 200), only_circle=True,
color=[random.randrange(1,100), random.randrange(100, 255), random.randrange(255)]))
def init_character(self):
super(Scene_car_test, self).init_character()
self.ball_number_of_parts = config.START_POPULATION
self.ball_position_default = config.CAR_BALL_POSITION_DEFAULT
self.character = []
for i in range(self.ball_number_of_parts):
self.character.append(Character_beast(person= Beast([self.ball_position_default[0],
self.ball_position_default[1]],
self.floor,
self.ceiling,
)))
self.number_of_life = 0
def rule(self):
for i in range(len(self.character)):
self.character[i].calculate_move(self.lazer_of_death, self.ceiling, self.floor, self.enemies)
def update(self):
self.__update_enemies()
self.__update_lazer_of_death()
self.__update_beast()
self.__update_display()
def __update_enemies(self):
for i in range(len(self.enemies)):
self.enemies[i].update(self.camera_move)
if self.enemies[i].position[0] + self.enemies[i].size < 0:
self.enemies[i].position = [2000 + i * 300 + random.randrange(0, 500) + self.camera_move[0], random.randrange(self.floor, self.ceiling)]
self.enemies[i].size = random.randint(20, 150)
self.enemies[i].color=[random.randrange(128,256), random.randrange(0, 155), random.randrange(0, 125)]
# self.enemies[i].color=[random.randrange(1,100), random.randrange(100, 255), random.randrange(255)]
def __update_lazer_of_death(self):
for i in range(len(self.lazer_of_death)):
self.lazer_of_death[i].update()
def __update_beast(self):
for i in range(len(self.character)):
self.character[i].update(self.camera_move, self.lazer_of_death, self.enemies)
self.number_of_life = Character_beast.how_many_alive()
def __update_display(self):
pygame.display.set_caption("Жив:{0:4}/{1}, Ітер:{2:5} Еп:{3:5} | Alive:"#// Фітнес результат -1: {3:5.2f} "# -2: {4:5.2f}, {5}, {6}"
.format(self.number_of_life,
config.START_POPULATION,
self.iteration,
self.epoch,
# self.character[999].person.update_fitnes_walls(lazer_of_death=self.lazer_of_death)
)
+ str(Scene_car_test.alive))
def exit_iteration(self):
super().exit_iteration()
self.number_of_life = Character_beast.how_many_alive()
if self.number_of_life < 1:
self.run_iteration = False
def evolution_operation(self):
for i in range(len(self.enemies)):
self.enemies[i].update(self.camera_move)
if self.enemies[i].position[0] + self.enemies[i].size < 1500:
self.enemies[i].position = [2000 + i * 300 + random.randrange(0, 500) + self.camera_move[0], random.randrange(self.floor, self.ceiling)]
self.enemies[i].size = random.randint(20, 150)
for i in range(len(self.character)):
self.character[i].epoch_done()
Scene_car_test.alive.append(Character_beast.how_many_alive())
if len(Scene_car_test.alive) > 20: Scene_car_test.alive.pop(0)
Character_beast.calculate_end_epoch_custom()
def _draw_all(self):
self.draw_enemies()
self.draw_characters()
self.draw_frame()
self.draw_lazer_of_death()
def draw_enemies(self):
for i in range(len(self.enemies)):
self.enemies[i].draw(self.display)
def draw_characters(self):
for i in range(len(self.character)):
if i % 1==0:
self.character[i].draw(self.display)
def draw_frame(self, frame_size = 4, color = [255, 0, 255]):
pygame.draw.line(self.display, color, [-1000 - self.camera_move[0], self.floor - self.camera_move[1]], [self.display_size[0] * 1000 - self.camera_move[0], self.floor - self.camera_move[1]], frame_size)
pygame.draw.line(self.display, color, [-1000 - self.camera_move[0], self.ceiling - self.camera_move[1]], [self.display_size[0] * 1000 - self.camera_move[0], self.ceiling - self.camera_move[1]], frame_size)
def draw_lazer_of_death(self):
for i in range(len(self.lazer_of_death)):
self.lazer_of_death[i].draw(self.display) |
NoOneZero/Neuro | cars/character_beast.py | <filename>cars/character_beast.py
from character import Character
import random
import config
import math
class Character_beast(Character):
def __init__(self, person = None, web = None):
super().__init__(person, web)
if Character_beast.iterator == 1: person.frame_color = [0, 0, 255]
# if Character_beast.iterator == 998: person.color = [0, 0, 255]
# if Character_beast.iterator == 999: person.color = [0, 255, 0]
# if Character_beast.iterator == 1000: person.color = [255, 0, 0]
def calculate_move(self, lazer_of_death, ceiling, floor, enemies):
lazer_distance = lazer_of_death[1].position - lazer_of_death[0].position
distance_to_left_lazer_normed = (self.person.position[0] - lazer_of_death[0].position) / lazer_distance
distance_to_right_lazer_normed = (lazer_of_death[1].position - self.person.position[0]) / lazer_distance
wall_distance = ceiling - floor
distance_to_up_wall_normed = (ceiling - self.person.position[1]) / wall_distance
distance_to_down_wall_normed = (self.person.position[1] - floor) / wall_distance
enemies_distances = {}
for i in range(len(enemies)):
enemies_distances[math.sqrt((self.person.position[0] - enemies[i].position[0] - enemies[i].size)**2 +
(self.person.position[1] - enemies[i].position[1] - enemies[i].size)**2)] = i
keys = sorted(enemies_distances)
enem_dis_and_angle = []
for i in range(config.CAR_COUNT_OF_IMPORTANT_ENEMIES_TO_NEURO):
this_key = enemies_distances[keys[-i]]
enem_dis_and_angle.append( math.sqrt((self.person.position[0] - enemies[this_key].position[0] - enemies[i].size)**2 +
(self.person.position[1] - enemies[this_key].position[1] - enemies[i].size)**2)
/ lazer_distance)
angle = math.atan2(self.person.position[1] - enemies[this_key].position[1],
self.person.position[0] - enemies[this_key].position[0])
# angle -= self.person.angle
y = math.sin(angle)
x = math.cos(angle)
enem_dis_and_angle.append(y)
enem_dis_and_angle.append(x)
self.person.calculate_move(self.web, [distance_to_left_lazer_normed,
distance_to_right_lazer_normed,
distance_to_up_wall_normed,
distance_to_down_wall_normed,
*enem_dis_and_angle
])
def update(self, camera_move, lazer_of_death, enemies):
self.person.possible_to_die_from_frame(lazer_of_death)
# self.person.possible_to_die_from_enemies(enemies)
self.person.update_life_time()
self.fitnes += self.person.update_fitnes_walls(lazer_of_death) * config.CAR_HOW_MANY_COST_BE_IN_MIDDLE
# self.fitnes += self.person.update_fitnes_enemies(enemies) * config.CAR_HOW_MANY_COST_BE_FAR_FROM_ENEMIES
self.person.update(camera_move)
def draw(self, display):
self.person.draw(display)
@staticmethod
def how_many_alive():
counter = 0
for i in range(len(Character_beast.characters_all)):
if Character_beast.characters_all[i].person.life:
counter += 1
return counter
def epoch_done(self):
if self.person.life:
self.fitnes += config.CAR_HOW_MANY_COST_ALIVE
@staticmethod
def calculate_end_epoch_custom():
Character_beast.wayOfLife_simple_crossover()
# super().calculate_end_epoch()
Character_beast._reset_position()
Character_beast._remake_the_same()
@staticmethod
def wayOfLife_copy_neuro_from_random_alive_to_dead():
life_beast = []
for i in range(len(Character_beast.characters_all)):
if Character_beast.characters_all[i].person.life:
life_beast.append(Character_beast.characters_all[i])
if len(life_beast) < 1:
life_beast.append(Character_beast.characters_all[0])
life_beast[0].person.neuro.randomize(1)
for i in range(len(Character_beast.characters_all)):
if not Character_beast.characters_all[i].person.life:
Character_beast.characters_all[i].web = random.choice(life_beast).web.new_randomize_deep_copy()
@staticmethod
def wayOfLife_copy_neuro_from_best_to_dead():
life_beast = Character_beast.characters_all[0]
for i in range(1, len(Character_beast.characters_all)):
if Character_beast.characters_all[i].fitnes > life_beast.fitnes:
life_beast = Character_beast.characters_all[i]
for i in range(len(Character_beast.characters_all)):
if not Character_beast.characters_all[i].person.life:
Character_beast.characters_all[i].web = life_beast.web.new_randomize_deep_copy()
@staticmethod
def wayOfLife_copy_neuro_from_best_to_all():
life_beast = 0
for i in range(1, len(Character_beast.characters_all)):
if Character_beast.characters_all[i].fitnes > Character_beast.characters_all[life_beast].fitnes:
life_beast = i
web = Character_beast.characters_all[life_beast].web.new_randomize_deep_copy(randomize_power= 0)
for i in range(len(Character_beast.characters_all)):
Character_beast.characters_all[i].web = web.new_randomize_deep_copy()
@staticmethod
def wayOfLife_simple_crossover():
Character_beast._make_parents_roulette_not_unique()
Character_beast._make_who_not_die()
Character_beast._make_new_population()
Character_beast._make_mutation() |
NoOneZero/Neuro | cars/Lazer_of_death.py | import random
import pygame
from cars.Beast import Beast
class Lazer_of_death:
def __init__(self, position = 50):
self.position = position
def update(self, add_position = 0):
self.position += add_position
def draw(self, display):
pygame.draw.line(display, [255, 0, 0], [self.position, 0], [self.position, 2000], 4) |
NoOneZero/Neuro | test_1_lineral/geneticAlgorithmParamsTest.py | from common.geneticAlgorithmParams import GeneticAlgorithmParams
class GeneticAlgorithmParamsTest(GeneticAlgorithmParams):
# list_start_population = [16, 64, 256, 1024, 4096]
#
# list_count_elitism = [0, 0.001, 0.01, 0.1, 0.2]
# list_count_of_alive_after_epoch = [0.03125, 0.0625, 0.125, 0.25, 0.5]
# list_random_kid = [0, 0.001, 0.01, 0.1, 0.2]
#
# list_mutation_probability = [0, 0.0001, 0.001, 0.01, 0.1]
#
# list_type_selection = ["roulette", "tournament", "rank", "proportional"]
# list_type_make_new_population = ["cross 1", "cross 2", "cross 3", "cross 4", "random", "lineral", "one father"]
# list_type_breeding = ["simple", "outbreeding genotype", "outbreeding phenotype", "inbreeding genotype", "inbreeding phenotype", "adaptive+1 breeding type gen", "not family breeding"]
list_dimension = [1, 2, 4, 6]
list_with_goal_changing = [0, 1]
list_with_spin = [0, 1]
list_out_data_for_one_dimension = [1, 2, 3]
list_fitness_way_distance = [0, 1] # fitness_distance += 1/(1+x)
list_fitness_way_side = [0, 1] # fitness_side_x or y += 1
list_fitness_way_vector = [0, 1] # fitness_vector to goal += from -1 to 1
list_fitness_way_kill_all_unwanted = [0, 1] # fitness_vector_speed += from -1 to 1 speed_absolute/dis_absolute (if speed_absolute>dis_absolute 0)
def __init__(self) -> None:
super(GeneticAlgorithmParamsTest, self).__init__()
self._max_iteration = 1000 # Кількість циклів в 1 еопосі
self._max_epoch = 1000 # Кількість епох
self._start_population = 1000#GeneticAlgorithmParams.list_start_population[3] # Кількість осіб взагалі
self._count_elitism = GeneticAlgorithmParams.list_count_elitism[0]
self._count_of_alive_after_epoch = int(self._start_population * GeneticAlgorithmParams.list_count_of_alive_after_epoch[4]) # Кількість виживших пісял того як закінчився минулий раунд
self._random_kid = GeneticAlgorithmParams.list_random_kid[0]
self._mutation_probability = GeneticAlgorithmParams.list_mutation_probability[2] # Ймовірність мутації гена межі [0 до 1]
self._type_selection = GeneticAlgorithmParams.list_type_selection[0]
self._type_make_new_population = GeneticAlgorithmParams.list_type_make_new_population[1]
self._type_breeding = GeneticAlgorithmParams.list_type_breeding[0]
self._dimension = GeneticAlgorithmParamsTest.list_dimension[1]
self._with_goal_changing = GeneticAlgorithmParamsTest.list_with_goal_changing[0]
self._with_spin = GeneticAlgorithmParamsTest.list_with_spin[0]
self._out_data_for_one_dimension = GeneticAlgorithmParamsTest.list_out_data_for_one_dimension[0]
self._fitness_way_distance = GeneticAlgorithmParamsTest.list_fitness_way_distance[1]
self._fitness_way_side = GeneticAlgorithmParamsTest.list_fitness_way_side[1]
self._fitness_way_vector = GeneticAlgorithmParamsTest.list_fitness_way_vector[1]
self._fitness_way_kill_all_unwanted = GeneticAlgorithmParamsTest.list_fitness_way_kill_all_unwanted[1]
self._web_layers = [self._dimension, self._dimension * self._out_data_for_one_dimension]
def get_dimension(self): return self._dimension
def get_with_goal_changing(self): return self._with_goal_changing
def get_with_spin(self): return self._with_spin
def get_out_data_for_one_dimension(self): return self._out_data_for_one_dimension
def get_fitness_way_distance(self): return self._fitness_way_distance
def get_fitness_way_side(self): return self._fitness_way_side
def get_fitness_way_vector(self): return self._fitness_way_vector
def get_fitness_way_kill_all_unwanted(self): return self._fitness_way_kill_all_unwanted
|
NoOneZero/Neuro | main.py | <gh_stars>0
import common.activity as test
import test_1_lineral.activityTest as test1
import test_2_spin.activityTest as test2
import test_3_lineral_and_spin.activityTest as test3
import test_4_move_round.activityTest as test4
def main():
activity = test1.ActivityTest() # activity = test.Activity()
activity.loop()
if __name__ == '__main__':
main()
|
NoOneZero/Neuro | test_4_move_round/personTest.py | <reponame>NoOneZero/Neuro<gh_stars>0
from common.person import Person
class PersonTest(Person):
def __init__(self) -> None:
super().__init__() |
NoOneZero/Neuro | else/config.py | FILE_NAME = 1 # Імя файла для запису даних
SHOW_INFO = True
DISPLAY_POSITION = (50,30)
DISPLAY_SIZE = (1700, 1030)
DISPLAY_COLOR = (48, 189, 221)
FPS = 240
MAX_COUNTER = 3000 # Кількість циклів в 1 еопосі
MAX_EPOCH = 1000 # Кількість епох
START_POPULATION = 10 # Кількість осіб взагалі
COUNT_OF_ALIVE_AFTER_EPOCH = 10 # Кількість виживших пісял того як закінчився минулий раунд
MUTATION_PROBABILITY = 0.01 # Ймовірність мутації гена межі [0 до 1]
CROSSOVER_POINT_NUMBER = 2 # Кількість точко кросовера, зазвичай 1, можна до 10
NUMBER_OF_PARENTS_COUPLES = START_POPULATION
WEB_LAYERS = [6,4]#[6+9, 6, 4]#[6+3*3,4]#[6,4]#[6,6,4]
#----------------- CAR ------------------
CAR_CAMERA_MOVE_SPEED = [2, 0]#[0, 0]
CAR_FLOOR_POSITION = 100
CAR_CEILING_POSITION = 900
CAR_LAZER_OF_DEATH_POSITION = 50
CAR_LAZER_OF_DEATH_POSITION_REVERSE = 50
CAR_ENEMIES_NUMBER = 14
CAR_COUNT_OF_IMPORTANT_ENEMIES_TO_NEURO = 0
CAR_BALL_POSITION_DEFAULT = [800,500]
CAR_BALL_POSITION_DEFAULT_OFFSET = [200, 200]
CAR_HOW_MANY_COST_ALIVE = 10000
CAR_HOW_MANY_COST_BE_IN_MIDDLE = 0.0001
CAR_HOW_MANY_COST_BE_FAR_FROM_ENEMIES = 0.0001
#---------------------------------------- |
Shura1oplot/sqluploadergen | sqluploadergen.py | <filename>sqluploadergen.py
#!/usr/bin/env python3
import sys
import os
import tempfile
import shutil
import subprocess
import openpyxl
MAP_TYPES = {
"bigint": "Int64",
"bit": "bool",
"char": "string",
"date": "DateTime",
"datetime": "DateTime",
"datetime2": "DateTime",
"datetimeoffset": "DateTimeOffset",
"decimal": "decimal",
"float": "double",
"int": "Int32",
"money": "decimal",
"nchar": "string",
"ntext": "string",
"numeric": "decimal",
"nvarchar": "string",
"real": "single",
"smalldatetime": "DateTime",
"smallint": "Int16",
"smallmoney": "decimal",
"text": "string",
"time": "TimeSpan",
"tinyint": "byte",
"varchar": "string",
}
MAP_CONVS = {
"byte": "ConvertStringToByte",
"Int16": "ConvertStringToShort",
"Int32": "ConvertStringToInt",
"Int64": "ConvertStringToLong",
"single": "ConvertStringToFloat",
"double": "ConvertStringToDouble",
"decimal": "ConvertStringToDecimal",
"string": "ConvertStringToString",
"DateTime": "ConvertStringToDateTime",
"TimeSpan": "ConvertStringToTimeSpan",
# not implemented:
# "DateTimeOffset": "",
}
def main(argv=sys.argv):
if getattr(sys, "frozen", False):
prog_name = os.path.basename(sys.executable)
prog_dir = os.path.dirname(sys.executable)
else:
prog_name = os.path.basename(argv[0])
prog_dir = os.path.dirname(argv[0])
if len(argv) != 2:
print(f"usage: {prog_name} specfile.xlsx", file=sys.stderr)
return 1
spec_file_name = argv[1]
spec_file_dir = os.path.dirname(spec_file_name)
wb = openpyxl.load_workbook(spec_file_name, read_only=True, data_only=True)
spec = wb["_spec_"]
server = spec["B2"].value
database = spec["B3"].value
domain_auth = spec["B4"].value
login = spec["B5"].value
password = spec["B6"].value
delimiter = spec["B7"].value
if not server:
print("error: 'Server' must be specified", file=sys.stderr)
return 1
if not database:
print("error: 'Database' must be specified", file=sys.stderr)
return 1
if not isinstance(domain_auth, bool):
print("error: 'Domain auth' must be TRUE or FALSE", file=sys.stderr)
return 1
if not domain_auth and not login:
print("error: 'Login' must be specified for non-domain auth", file=sys.stderr)
return 1
if not domain_auth and not password:
print("error: 'Password' must be specified for non-domain auth", file=sys.stderr)
return 1
if not isinstance(delimiter, str) and len(delimiter) != 1:
print("error: 'Delimiter' must be one char", file=sys.stderr)
return 1
print(f"Server: {server}")
print(f"Database: {database}")
print(f"Domain auth: {domain_auth}")
print(f"Login: {login}")
print(f"Password: {password}")
print(f"Delimiter: {delimiter}")
conn_str = f"Server={server};Database={database};"
if domain_auth:
conn_str += "Trusted_Connection=True"
else:
conn_str += f"User ID={login};Password={password};"
tables = {}
for sheet_name in wb.sheetnames:
if sheet_name == "_spec_":
continue
ws = wb[sheet_name]
table = []
for row in ws.iter_rows(min_row=2, values_only=True):
col_name = row[0]
col_type = row[1]
try:
col_type_param1 = row[2]
except IndexError:
col_type_param1 = None
try:
col_type_param2 = row[3]
except IndexError:
col_type_param2 = None
table.append((col_name, col_type, col_type_param1, col_type_param2))
tables[sheet_name] = table
# FIXME: check table definition
sql_file = os.path.join(spec_file_dir, "1_init_db.sql")
with open(sql_file, "w") as fp:
fp.write(f"USE [{database}]\nGO\n\n")
for table_name, table_def in tables.items():
fp.write(f"DROP TABLE IF EXISTS [{table_name}]\nGO\n\n")
fp.write(f"CREATE TABLE [{table_name}] (\n")
for i, (col_name, col_type, col_type_param1, col_type_param2) in enumerate(table_def):
if i == 0:
fp.write(" ")
else:
fp.write(" ,")
fp.write(f"[{col_name}] [{col_type}]")
if col_type_param1 is not None:
fp.write(f"({col_type_param1}")
if col_type_param2 is not None:
fp.write(f", {col_type_param2}")
fp.write(")")
fp.write("\n")
fp.write(")\nGO\n\n")
fp.write(f"-- CREATE CLUSTERED COLUMNSTORE INDEX ix_columnstore "
f"ON {table_name}\nGO\n\n")
tmp_dir = tempfile.mkdtemp()
for table_name, table_def in tables.items():
shutil.copytree(
os.path.join(prog_dir, "assets", "bcpstream_template"),
os.path.join(tmp_dir, f"bcpstream_{table_name}"))
Program_cs_file = os.path.join(tmp_dir, f"bcpstream_{table_name}", "Program.cs")
Program_cs_content = open(Program_cs_file, "r", encoding="utf-8").read()
Program_cs_content = Program_cs_content.replace("{{connection_string}}", conn_str)
open(Program_cs_file, "w", encoding="utf-8").write(Program_cs_content)
cs_columns = []
for i, (col_name, col_type, _, _) in enumerate(table_def):
cs_type = MAP_TYPES[col_type]
nullable = "?" if cs_type != "string" else ""
cs_columns.append(
f' [Column("{col_name}")] public {cs_type}{nullable} '
f'Column{i} {{ get; set; }}\n')
cs_columns = "".join(cs_columns)
cs_converters = []
for i, (col_name, col_type, _, _) in enumerate(table_def):
cs_type = MAP_TYPES[col_type]
conv = MAP_CONVS[cs_type]
cs_converters.append(
f' Column{i} = Helpers.{conv}(values[{i}]);\n')
cs_converters = "".join(cs_converters)
Format_cs_file = os.path.join(tmp_dir, f"bcpstream_{table_name}", "Format.cs")
Format_cs_content = open(Format_cs_file, "r", encoding="utf-8").read()
Format_cs_content = Format_cs_content.replace("{{table}}", table_name)
Format_cs_content = Format_cs_content.replace("{{columns}}", cs_columns)
Format_cs_content = Format_cs_content.replace("{{converters}}", cs_converters)
Format_cs_content = Format_cs_content.replace("{{delimiter}}", delimiter)
open(Format_cs_file, "w", encoding="utf-8").write(Format_cs_content)
subprocess.run(
args=["dotnet", "publish", "--nologo", "--configuration", "Release",
"--self-contained", "false", "--runtime", "win-x64",
"/p:PublishSingleFile=true",
"/p:IncludeNativeLibrariesForSelfExtract=true"],
cwd=os.path.join(tmp_dir, f"bcpstream_{table_name}"))
shutil.copy(
os.path.join(tmp_dir, f"bcpstream_{table_name}", "bin", "Release",
"net5.0", "win-x64", "publish", "bcpstream.exe"),
os.path.join(spec_file_dir, f"bcpstream_{table_name}.exe"))
shutil.rmtree(tmp_dir)
shutil.copy(os.path.join(prog_dir, "assets", "tools", "etl.exe"),
os.path.join(spec_file_dir, "etl.exe"))
shutil.copy(os.path.join(prog_dir, "assets", "tools", "7za.exe"),
os.path.join(spec_file_dir, "7za.exe"))
with open(os.path.join(spec_file_dir, "2_upload.bat"),
"w", encoding="cp866") as fp:
fp.write("@ECHO OFF\n\n")
for i, table_name in enumerate(tables, start=1):
fp.write(f'7za e data.zip -so data.csv | etl decode "UTF-8" '
f'| etl selectrows 2 '
f'| bcpstream_{table_name} "{i}/{len(tables)}" 111111111\n')
if __name__ == "__main__":
sys.exit(main())
|
Shura1oplot/sqluploadergen | cx_setup.py | import sys
import os
from cx_Freeze import setup, Executable
if sys.version_info[:2] != (3, 8):
raise Exception("Python 3.8 required!")
build_exe_options = {
"build_exe": os.path.abspath("build_exe"),
"packages": [],
"excludes": ["tkinter", "distutils"],
"include_files": ["assets", "example"],
}
target = Executable(
script="sqluploadergen.py",
base="Console",
)
setup(
name="sqluploadergen",
version="0.1.0",
description="SQL Stream Uploader Generator",
options={"build_exe": build_exe_options},
executables=[target, ],
)
|
Shura1oplot/sqluploadergen | configure.py | # [SublimeLinter @python:3]
import sys
import os
from pathlib import Path
import platform
PROG_VERSION = "0.1.0"
# TCL_VERSION = "8.6"
SDK_BASE_DIR = "%ProgramFiles(x86)%\\Windows Kits\\10\\Include"
VC_DIR = "%ProgramFiles(x86)%\\Microsoft Visual Studio\\2019\\Community\\VC\\Auxiliary\\Build"
def main(argv=sys.argv):
# Check dependencies
if sys.version_info[:2] != (3, 8):
print("Warning: python 3.8 is required to build exe!")
buildenv = platform.uname()
if buildenv.system != "Windows" \
or buildenv.release != "10" \
or buildenv.machine != "AMD64":
raise Exception("Windows 10 x64 required!")
for file in ("vcvars32.bat", "vcvars64.bat"):
if not (Path(os.path.expandvars(VC_DIR)) / file).exists():
raise Exception("Visual Studio 2019 Community required!")
# Create config.bat
# python_dir = Path(sys.executable).parent
# tcl_dir = python_dir / "tcl" / "tcl{}".format(TCL_VERSION)
# tk_dir = python_dir / "tcl" / "tk{}".format(TCL_VERSION)
# if not tcl_dir.exists() or not tk_dir.exists():
# raise Exception("tcl/tk not found!")
sdk_dir = Path(os.path.expandvars(SDK_BASE_DIR))
sdk_versions = [x.name for x in sdk_dir.iterdir() if x.is_dir()]
sdk_versions.sort(reverse=True)
if not sdk_versions:
raise Exception("Windows Kits not found!")
with open("config.bat", "w", encoding="ascii") as fp:
# fp.write('@SET "TCL_LIBRARY={}"\n'.format(tcl_dir))
# fp.write('@SET "TK_LIBRARY={}"\n'.format(tk_dir))
fp.write('@SET "VCVARS32={}\\vcvars32.bat"\n'.format(VC_DIR))
fp.write('@SET "VCVARS64={}\\vcvars64.bat"\n'.format(VC_DIR))
fp.write('@SET "SDK_VERSION={}"\n'.format(sdk_versions[0]))
fp.write('@SET "SDK_DIR={}\\%SDK_VERSION%"\n'.format(SDK_BASE_DIR))
fp.write('@SET "VERSION={}"\n'.format(PROG_VERSION))
print("configure.py: done!")
if __name__ == "__main__":
sys.exit(main())
|
ShashankSetty42/MAL-Telegram-Bot | server.py | from bot import telegram_chatbot
import mal_scraper
bot = telegram_chatbot("config.cfg")
#get anime name and return info string
def make_reply(msg):
reply = None
if msg is not None:
reply = format_reply(mal_scraper.scrape_page(msg))
return reply
#convert the dictonary value returned from scraping MAL
def format_reply(msg):
return_string = ""
for item in msg:
return_string += item + " : " + msg[item] + "\n"
return return_string
#set initialize updateID
update_id = None
#keep the server running
while True:
updates = bot.get_updates(offset=update_id)
updates = updates["result"]
if updates:
for item in updates:
update_id = item["update_id"]
try:
message = str(item["message"]["text"])
except:
message = None
from_ = item["message"]["from"]["id"] #get userID to reply back
reply = make_reply(message) #set reply to a string from
bot.send_message(reply, from_) #send message reply to bot.py
|
ShashankSetty42/MAL-Telegram-Bot | mal_scraper.py | <filename>mal_scraper.py
from bs4 import BeautifulSoup as soup
from urllib.request import urlopen as ureq
def scrape_page(anime_name):
#dictonary to store anime info
dict_data = {
"Title": "",
"Image": "",
"Score": "",
"Episodes": "",
"Genre": "",
"Duration": "",
"MAL": "",
}
#replace %20 with spaces to URL
url = "https://myanimelist.net/search/all?q=" + anime_name.replace(" ", "%20")
#make connection to URL and obtain the HTML code
uclient = ureq(url)
page_html = uclient.read()
uclient.close()
page_soup = soup(page_html, "html.parser")
#get title and link for the anime
containers = page_soup.findAll("div", {"class": "picSurround di-tc thumb"})
dict_data["Title"] = str([containers[0].a.img["alt"]])
url = str(containers[0].a["href"])
dict_data["MAL"] = url
#navigate to the first result by re-initializing the URL value to the first result URL and connect
uclient = ureq(url)
page_html = uclient.read()
uclient.close()
page_soup = soup(page_html, "html.parser")
#obtain rating, score, image link, episodes and duration
rating = page_soup.find("span", {"itemprop": "ratingValue"})
dict_data["Score"] = str(rating.text)
img_link = page_soup.find("meta", {"property": "og:image"})
dict_data["Image"] = str(img_link["content"])
info = page_soup.findAll("div", {"class": "spaceit"})
dict_data["Episodes"] = str(info[0].text).replace(' ', '').replace('Episodes:', '').replace('\n', '')
dict_data["Duration"] = str(info[5].text).replace('Duration:', '').replace('\n', '').replace(' ', '').replace('min.','min ').replace('perep.', 'per ep.')
#obtain generes and concat into readable string
temp_string = page_soup.findAll("span", {"itemprop": "genre"})
genre = ""
for items in temp_string:
genre += items.text + ", "
dict_data["Genre"] = genre[:-1]
#return the resultant dictonary value
return dict_data
|
silentwrath/Profile_REST_API | hellow.py | <gh_stars>0
print("helloo world")
"180": {
"name": "ETM186",
"description": "check POST API call for enabling dptsa is working",
"tags": [
"RESTAPI",
"PP",
"Phase12"
],
"suites": [
"sanity",
"smoke"
]
}
}
|
bachya/simplisafe-python | tests/sensor/test_base.py | """Define base tests for Sensor objects."""
# pylint: disable=unused-argument
import aiohttp
import pytest
from simplipy import API
from simplipy.device import DeviceTypes
from tests.common import TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, TEST_SYSTEM_ID
@pytest.mark.asyncio
async def test_properties_base(aresponses, v2_server):
"""Test that base sensor properties are created properly."""
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
sensor = system.sensors["195"]
assert sensor.name == "Garage Keypad"
assert sensor.serial == "195"
assert sensor.type == DeviceTypes.keypad
aresponses.assert_plan_strictly_followed()
|
bachya/simplisafe-python | tests/system/test_v3.py | <filename>tests/system/test_v3.py<gh_stars>10-100
"""Define tests for v3 System objects."""
# pylint: disable=protected-access,too-many-arguments,unused-argument
from datetime import datetime
import logging
import aiohttp
import pytest
import pytz
from simplipy import API
from simplipy.errors import (
EndpointUnavailableError,
InvalidCredentialsError,
PinError,
RequestError,
SimplipyError,
)
from simplipy.system import SystemStates
from simplipy.system.v3 import VOLUME_HIGH, VOLUME_MEDIUM
from tests.common import (
TEST_AUTHORIZATION_CODE,
TEST_CODE_VERIFIER,
TEST_SUBSCRIPTION_ID,
TEST_SYSTEM_ID,
TEST_SYSTEM_SERIAL_NO,
TEST_USER_ID,
)
@pytest.mark.asyncio
async def test_alarm_state(aresponses, v3_server):
"""Test that we can get the alarm state."""
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
assert system.state == SystemStates.off
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_clear_notifications(aresponses, v3_server, v3_settings_response):
"""Test getting the latest event."""
v3_server.add(
"api.simplisafe.com",
f"/v1/subscriptions/{TEST_SUBSCRIPTION_ID}/messages",
"delete",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
await system.async_clear_notifications()
assert system.notifications == []
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_get_last_event(aresponses, latest_event_response, v3_server):
"""Test getting the latest event."""
v3_server.add(
"api.simplisafe.com",
f"/v1/subscriptions/{TEST_SUBSCRIPTION_ID}/events",
"get",
response=aiohttp.web_response.json_response(latest_event_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
latest_event = await system.async_get_latest_event()
assert latest_event["eventId"] == 1234567890
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_get_pins(aresponses, v3_server, v3_settings_response):
"""Test getting PINs associated with a V3 system."""
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
pins = await system.async_get_pins()
assert len(pins) == 4
assert pins["master"] == "1234"
assert pins["duress"] == "9876"
assert pins["Test 1"] == "3456"
assert pins["Test 2"] == "5423"
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_async_get_systems(aresponses, v3_server):
"""Test the ability to get systems attached to a v3 account."""
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
assert len(systems) == 1
system = systems[TEST_SYSTEM_ID]
assert system.serial == TEST_SYSTEM_SERIAL_NO
assert system.system_id == TEST_SYSTEM_ID
assert len(system.sensors) == 24
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_empty_events(aresponses, events_response, v3_server):
"""Test that an empty events structure is handled correctly."""
events_response["events"] = []
v3_server.add(
"api.simplisafe.com",
f"/v1/subscriptions/{TEST_SUBSCRIPTION_ID}/events",
"get",
response=aiohttp.web_response.json_response(events_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
# Test the events key existing, but being empty:
with pytest.raises(SimplipyError):
_ = await system.async_get_latest_event()
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_lock_state_update_bug(aresponses, caplog, v3_server, v3_state_response):
"""Test halting updates within a 15-second window from arming/disarming."""
caplog.set_level(logging.INFO)
v3_state_response["state"] = "AWAY"
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/state/away",
"post",
response=aiohttp.web_response.json_response(v3_state_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
await system.async_set_away()
assert system.state == SystemStates.away
await system.async_update()
assert any("Skipping system update" in e.message for e in caplog.records)
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_missing_events(aresponses, events_response, v3_server):
"""Test that an altogether-missing events structure is handled correctly."""
events_response.pop("events")
v3_server.add(
"api.simplisafe.com",
f"/v1/subscriptions/{TEST_SUBSCRIPTION_ID}/events",
"get",
response=aiohttp.web_response.json_response(events_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
# Test the events key existing, but being empty:
with pytest.raises(SimplipyError):
_ = await system.async_get_latest_event()
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_no_state_change_on_failure(aresponses, v3_server):
"""Test that the system doesn't change state on an error."""
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/state/away",
"post",
response=aresponses.Response(text="Unauthorized", status=401),
)
v3_server.add(
"auth.simplisafe.com",
"/oauth/token",
"post",
response=aresponses.Response(text="Unauthorized", status=401),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
# Manually set the expiration datetime to force a refresh token flow:
simplisafe._access_token_expire_dt = datetime.utcnow()
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
assert system.state == SystemStates.off
with pytest.raises(InvalidCredentialsError):
await system.async_set_away()
assert system.state == SystemStates.off
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_properties(aresponses, v3_server, v3_settings_response):
"""Test that v3 system properties are available."""
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"post",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
assert system.alarm_duration == 240
assert system.alarm_volume == VOLUME_HIGH
assert system.battery_backup_power_level == 5293
assert system.chime_volume == VOLUME_MEDIUM
assert system.connection_type == "wifi"
assert system.entry_delay_away == 30
assert system.entry_delay_home == 30
assert system.exit_delay_away == 60
assert system.exit_delay_home == 0
assert system.gsm_strength == -73
assert system.light is True
assert system.offline is False
assert system.power_outage is False
assert system.rf_jamming is False
assert system.voice_prompt_volume == VOLUME_MEDIUM
assert system.wall_power_level == 5933
assert system.wifi_ssid == "MY_WIFI"
assert system.wifi_strength == -49
# Test "setting" various system properties by overriding their values, then
# calling the update functions:
system.settings_data["settings"]["normal"]["alarmDuration"] = 0
system.settings_data["settings"]["normal"]["alarmVolume"] = 0
system.settings_data["settings"]["normal"]["doorChime"] = 0
system.settings_data["settings"]["normal"]["entryDelayAway"] = 0
system.settings_data["settings"]["normal"]["entryDelayHome"] = 0
system.settings_data["settings"]["normal"]["exitDelayAway"] = 0
system.settings_data["settings"]["normal"]["exitDelayHome"] = 1000
system.settings_data["settings"]["normal"]["light"] = False
system.settings_data["settings"]["normal"]["voicePrompts"] = 0
await system.async_set_properties(
{
"alarm_duration": 240,
"alarm_volume": VOLUME_HIGH,
"chime_volume": VOLUME_MEDIUM,
"entry_delay_away": 30,
"entry_delay_home": 30,
"exit_delay_away": 60,
"exit_delay_home": 0,
"light": True,
"voice_prompt_volume": VOLUME_MEDIUM,
}
)
assert system.alarm_duration == 240
assert system.alarm_volume == VOLUME_HIGH
assert system.chime_volume == VOLUME_MEDIUM
assert system.entry_delay_away == 30
assert system.entry_delay_home == 30
assert system.exit_delay_away == 60
assert system.exit_delay_home == 0
assert system.light is True
assert system.voice_prompt_volume == VOLUME_MEDIUM
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_remove_nonexistent_pin(aresponses, v3_server, v3_settings_response):
"""Test throwing an error when removing a nonexistent PIN."""
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
with pytest.raises(PinError) as err:
await system.async_remove_pin("0000")
assert "Refusing to delete nonexistent PIN" in str(err)
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_remove_pin(aresponses, v3_server, v3_settings_response):
"""Test removing a PIN in a V3 system."""
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
v3_settings_response["settings"]["pins"]["users"][1]["pin"] = ""
v3_settings_response["settings"]["pins"]["users"][1]["name"] = ""
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/pins",
"post",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
latest_pins = await system.async_get_pins()
assert len(latest_pins) == 4
await system.async_remove_pin("Test 2")
latest_pins = await system.async_get_pins()
assert len(latest_pins) == 3
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_remove_reserved_pin(aresponses, v3_server, v3_settings_response):
"""Test throwing an error when removing a reserved PIN."""
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
with pytest.raises(PinError) as err:
await system.async_remove_pin("master")
assert "Refusing to delete reserved PIN" in str(err)
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_set_duplicate_pin(aresponses, v3_server, v3_settings_response):
"""Test throwing an error when setting a duplicate PIN."""
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/pins",
"post",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
async with aiohttp.ClientSession() as session:
with pytest.raises(PinError) as err:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
await system.async_set_pin("whatever", "1234")
assert "Refusing to create duplicate PIN" in str(err)
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_set_invalid_property(aresponses, v3_server, v3_settings_response):
"""Test that setting an invalid property raises a ValueError."""
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"post",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
with pytest.raises(ValueError):
await system.async_set_properties({"Fake": "News"})
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_set_max_user_pins(
aresponses,
subscriptions_response,
v3_server,
v3_settings_response,
):
"""Test throwing an error when setting too many user PINs."""
v3_settings_response["settings"]["pins"]["users"] = [
{
"_id": "1271279d966212121124c6",
"pin": "1234",
"name": "Test 1",
},
{
"_id": "1271279d966212121124c7",
"pin": "5678",
"name": "Test 2",
},
{
"_id": "1271279d966212121124c8",
"pin": "9012",
"name": "Test 3",
},
{
"_id": "1271279d966212121124c9",
"pin": "3456",
"name": "Test 4",
},
]
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/pins",
"post",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
async with aiohttp.ClientSession() as session:
with pytest.raises(PinError) as err:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
await system.async_set_pin("whatever", "8121")
assert "Refusing to create more than" in str(err)
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_set_pin(aresponses, v3_server, v3_settings_response):
"""Test setting a PIN in a V3 system."""
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
v3_settings_response["settings"]["pins"]["users"][2]["pin"] = "1274"
v3_settings_response["settings"]["pins"]["users"][2]["name"] = "whatever"
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/pins",
"post",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
latest_pins = await system.async_get_pins()
assert len(latest_pins) == 4
await system.async_set_pin("whatever", "1274")
latest_pins = await system.async_get_pins()
assert len(latest_pins) == 5
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_set_pin_wrong_chars(aresponses, v3_server):
"""Test throwing an error when setting a PIN with non-digits."""
async with aiohttp.ClientSession() as session:
with pytest.raises(PinError) as err:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
await system.async_set_pin("whatever", "abcd")
assert "PINs can only contain numbers" in str(err)
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_set_pin_wrong_length(aresponses, v3_server):
"""Test throwing an error when setting a PIN with the wrong length."""
async with aiohttp.ClientSession() as session:
with pytest.raises(PinError) as err:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
await system.async_set_pin("whatever", "1122334455")
assert "digits long" in str(err)
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_set_states(aresponses, v3_server, v3_state_response):
"""Test the ability to set the state of the system."""
v3_state_response["state"] = "AWAY"
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/state/away",
"post",
response=aiohttp.web_response.json_response(v3_state_response, status=200),
)
v3_state_response["state"] = "HOME"
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/state/home",
"post",
response=aiohttp.web_response.json_response(v3_state_response, status=200),
)
v3_state_response["state"] = "OFF"
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/state/off",
"post",
response=aiohttp.web_response.json_response(v3_state_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
await system.async_set_away()
assert system.state == SystemStates.away
await system.async_set_home()
assert system.state == SystemStates.home
await system.async_set_off()
assert system.state == SystemStates.off
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_system_notifications(aresponses, v3_server):
"""Test getting system notifications."""
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
assert len(system.notifications) == 1
notification1 = system.notifications[0]
assert notification1.notification_id == "xxxxxxxxxxxxxxxxxxxxxxxx"
assert notification1.text == "Power Outage - Backup battery in use."
assert notification1.category == "error"
assert notification1.code == "2000"
assert notification1.received_dt == datetime(
2020, 2, 16, 3, 20, 28, tzinfo=pytz.UTC
)
assert notification1.link == "http://link.to.info"
assert notification1.link_label == "More Info"
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_unavailable_endpoint(
aresponses, unavailable_endpoint_response, v3_server
):
"""Test that an unavailable endpoint logs a message."""
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(
unavailable_endpoint_response, status=403
),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
with pytest.raises(EndpointUnavailableError):
await system.async_update(include_subscription=False, include_devices=False)
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_update_system_data(
aresponses,
subscriptions_response,
v3_sensors_response,
v3_server,
v3_settings_response,
):
"""Test getting updated data for a v3 system."""
v3_server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_USER_ID}/subscriptions",
"get",
response=aiohttp.web_response.json_response(subscriptions_response, status=200),
)
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/sensors",
"get",
response=aiohttp.web_response.json_response(v3_sensors_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
await system.async_update()
assert system.serial == TEST_SYSTEM_SERIAL_NO
assert system.system_id == TEST_SYSTEM_ID
assert len(system.sensors) == 24
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_update_error(
aresponses,
subscriptions_response,
v3_sensors_response,
v3_server,
v3_settings_response,
):
"""Test handling a generic error during update."""
v3_server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_USER_ID}/subscriptions",
"get",
response=aiohttp.web_response.json_response(subscriptions_response, status=200),
)
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
v3_server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/sensors",
"get",
response=aresponses.Response(text="Server Error", status=500),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE,
TEST_CODE_VERIFIER,
session=session,
# Set so that our tests don't take too long:
request_retries=1,
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
with pytest.raises(RequestError):
await system.async_update()
aresponses.assert_plan_strictly_followed()
|
bachya/simplisafe-python | simplipy/api.py | <reponame>bachya/simplisafe-python
"""Define functionality for interacting with the SimpliSafe API."""
from __future__ import annotations
import asyncio
from datetime import datetime, timedelta
from json.decoder import JSONDecodeError
import sys
from typing import TYPE_CHECKING, Any, Callable
from aiohttp import ClientSession
from aiohttp.client_exceptions import ClientResponseError
import backoff
from simplipy.const import DEFAULT_USER_AGENT, LOGGER
from simplipy.errors import (
EndpointUnavailableError,
InvalidCredentialsError,
RequestError,
)
from simplipy.system.v2 import SystemV2
from simplipy.system.v3 import SystemV3
from simplipy.util.auth import (
AUTH_URL_BASE,
AUTH_URL_HOSTNAME,
DEFAULT_CLIENT_ID,
DEFAULT_REDIRECT_URI,
)
from simplipy.websocket import WebsocketClient
API_URL_HOSTNAME = "api.simplisafe.com"
API_URL_BASE = f"https://{API_URL_HOSTNAME}/v1"
DEFAULT_EXPIRATION_PADDING = 300
DEFAULT_REQUEST_RETRIES = 4
DEFAULT_TIMEOUT = 10
def get_expiration_datetime(expires_in_seconds: int) -> datetime:
"""Get a token expiration datetime as an offset of UTC now + a number of seconds.
Note that we pad the value to ensure the token doesn't expire without us knowing.
"""
return datetime.utcnow() + (
timedelta(seconds=expires_in_seconds - DEFAULT_EXPIRATION_PADDING)
)
class API: # pylint: disable=too-many-instance-attributes
"""An API object to interact with the SimpliSafe cloud.
Note that this class shouldn't be instantiated directly; instead, the
:meth:`simplipy.api.API.async_from_auth` and :meth:`simplipy.api.API.async_from_refresh_token`
methods should be used.
:param session: The ``aiohttp`` ``ClientSession`` session used for all HTTP requests
:type session: ``aiohttp.client.ClientSession``
:param request_retries: The default number of request retries to use
:type request_retries: ``int``
"""
def __init__(
self,
*,
session: ClientSession,
request_retries: int = DEFAULT_REQUEST_RETRIES,
) -> None:
"""Initialize."""
self._refresh_token_listeners: list[Callable[..., None]] = []
self.session: ClientSession = session
# These will get filled in after initial authentication:
self._access_token_expire_dt: datetime | None = None
self._backoff_refresh_lock = asyncio.Lock()
self.access_token: str | None = None
self.refresh_token: str | None = None
self.subscription_data: dict[int, Any] = {}
self.user_id: int | None = None
self.websocket: WebsocketClient | None = None
# Implement a version of the request coroutine, but with backoff/retry logic:
self.request = backoff.on_exception(
backoff.expo,
ClientResponseError,
jitter=backoff.random_jitter,
logger=LOGGER,
max_tries=request_retries,
on_backoff=self._async_handle_on_backoff,
on_giveup=self._async_handle_on_giveup,
)(self._async_request)
@classmethod
async def async_from_auth(
cls,
authorization_code: str,
code_verifier: str,
*,
session: ClientSession,
request_retries: int = DEFAULT_REQUEST_RETRIES,
) -> API:
"""Get an authenticated API object from an Authorization Code and Code Verifier.
:param authorization_code: The Authorization Code
:type authorization_code: ``str``
:param code_verifier: The Code Verifier
:type code_verifier: ``str``
:param session: The ``aiohttp`` ``ClientSession`` session used for all HTTP requests
:type session: ``aiohttp.client.ClientSession``
:param request_retries: The default number of request retries to use
:type request_retries: ``int``
:rtype: :meth:`simplipy.api.API`
"""
api = cls(session=session, request_retries=request_retries)
try:
token_resp = await api._async_request(
"post",
"oauth/token",
url_base=AUTH_URL_BASE,
headers={"Host": AUTH_URL_HOSTNAME},
json={
"grant_type": "authorization_code",
"client_id": DEFAULT_CLIENT_ID,
"code_verifier": code_verifier,
"code": authorization_code,
"redirect_uri": DEFAULT_REDIRECT_URI,
},
)
except ClientResponseError as err:
if err.status == 401 or err.status == 403:
raise InvalidCredentialsError("Invalid credentials") from err
raise RequestError(err) from err
api._access_token_expire_dt = get_expiration_datetime(token_resp["expires_in"])
api.access_token = token_resp["access_token"]
api.refresh_token = token_resp["refresh_token"]
await api._async_post_init()
return api
@classmethod
async def async_from_refresh_token(
cls,
refresh_token: str,
session: ClientSession,
*,
request_retries: int = DEFAULT_REQUEST_RETRIES,
) -> API:
"""Get an authenticated API object from a refresh token.
:param refresh_token: The refresh token
:type refresh_token: ``str``
:param session: The ``aiohttp`` ``ClientSession`` session used for all HTTP requests
:type session: ``aiohttp.client.ClientSession``
:param request_retries: The default number of request retries to use
:type request_retries: ``int``
:rtype: :meth:`simplipy.api.API`
"""
api = cls(session=session, request_retries=request_retries)
api.refresh_token = refresh_token
await api._async_refresh_access_token()
await api._async_post_init()
return api
async def _async_handle_on_backoff(self, _: dict[str, Any]) -> None:
"""Handle a backoff retry."""
err_info = sys.exc_info()
err: ClientResponseError = err_info[1].with_traceback(err_info[2]) # type: ignore
if err.status == 401 or err.status == 403:
if TYPE_CHECKING:
assert self._access_token_expire_dt
# Since we might have multiple requests (each running their own retry
# sequence) land here, we only refresh the access token if it hasn't
# been refreshed within the expiration window (and we lock the attempt so
# other requests can't try it at the same time):
async with self._backoff_refresh_lock:
if datetime.utcnow() < self._access_token_expire_dt:
return
LOGGER.info("401 detected; attempting refresh token")
await self._async_refresh_access_token()
async def _async_handle_on_giveup(self, _: dict[str, Any]) -> None:
"""Handle a give up after retries are exhausted."""
err_info = sys.exc_info()
err = err_info[1].with_traceback(err_info[2]) # type: ignore
raise RequestError(err) from err
async def _async_post_init(self) -> None:
"""Perform some post-init actions."""
auth_check_resp = await self._async_request("get", "api/authCheck")
self.user_id = auth_check_resp["userId"]
self.websocket = WebsocketClient(self)
async def _async_refresh_access_token(self) -> None:
"""Update access/refresh tokens from a refresh token."""
try:
token_resp = await self._async_request(
"post",
"oauth/token",
url_base=AUTH_URL_BASE,
headers={"Host": AUTH_URL_HOSTNAME},
json={
"grant_type": "refresh_token",
"client_id": DEFAULT_CLIENT_ID,
"refresh_token": self.refresh_token,
},
)
except ClientResponseError as err:
if err.status == 401 or err.status == 403:
raise InvalidCredentialsError("Invalid refresh token") from err
raise RequestError(err) from err
self._access_token_expire_dt = get_expiration_datetime(token_resp["expires_in"])
self.access_token = token_resp["access_token"]
self.refresh_token = token_resp["refresh_token"]
for callback in self._refresh_token_listeners:
callback(self.refresh_token)
async def _async_request(
self, method: str, endpoint: str, url_base: str = API_URL_BASE, **kwargs: Any
) -> dict[str, Any]:
"""Execute an API request."""
kwargs.setdefault("headers", {})
kwargs["headers"].setdefault("Host", API_URL_HOSTNAME)
kwargs["headers"]["Content-Type"] = "application/json; charset=utf-8"
kwargs["headers"]["User-Agent"] = DEFAULT_USER_AGENT
if self.access_token:
kwargs["headers"]["Authorization"] = f"Bearer {self.access_token}"
data: dict[str, Any] | str = {}
async with self.session.request(
method, f"{url_base}/{endpoint}", **kwargs
) as resp:
try:
data = await resp.json(content_type=None)
except JSONDecodeError:
message = await resp.text()
data = {"error": message}
if isinstance(data, str):
# In some cases, the SimpliSafe API will return a quoted string
# in its response body (e.g., "\"Unauthorized\""), which is
# technically valid JSON. Additionally, SimpliSafe sets that
# response's Content-Type header to application/json (#smh).
# Together, these factors will allow a non-true-JSON payload to
# escape the try/except above. So, if we get here, we use the
# string value (with quotes removed) to raise an error:
message = data.replace('"', "")
data = {"error": message}
LOGGER.debug("Data received from /%s: %s", endpoint, data)
if data and data.get("type") == "NoRemoteManagement":
raise EndpointUnavailableError(
f"Endpoint unavailable in plan: {endpoint}"
) from None
resp.raise_for_status()
return data
def add_refresh_token_listener(
self, callback: Callable[..., None]
) -> Callable[..., None]:
"""Add a listener that should be triggered when tokens are refreshed.
Note that callbacks should expect to receive a refresh token as a parameter.
:param callback: The method to call after receiving an event.
:type callback: ``Callable[..., None]``
"""
self._refresh_token_listeners.append(callback)
def remove() -> None:
"""Remove the callback."""
self._refresh_token_listeners.remove(callback)
return remove
async def async_get_systems(self) -> dict[int, SystemV2 | SystemV3]:
"""Get systems associated to the associated SimpliSafe account.
In the dict that is returned, the keys are the subscription ID and the values
are actual ``System`` objects.
:rtype: ``Dict[int, simplipy.system.System]``
"""
systems: dict[int, SystemV2 | SystemV3] = {}
await self.async_update_subscription_data()
for sid, subscription in self.subscription_data.items():
if not subscription["activated"] != 0:
LOGGER.info("Skipping inactive subscription: %s", sid)
continue
# if "system" not in subscription["location"]:
if not subscription["location"].get("system"):
LOGGER.error("Skipping subscription with missing system data: %s", sid)
continue
system: SystemV2 | SystemV3
version = subscription["location"]["system"]["version"]
if version == 2:
system = SystemV2(self, sid)
else:
system = SystemV3(self, sid)
# Update the system, but don't include subscription data itself, since it
# will already have been fetched when the API was first queried:
await system.async_update(include_subscription=False)
system.generate_device_objects()
systems[sid] = system
return systems
async def async_update_subscription_data(self) -> None:
"""Get the latest subscription data."""
subscription_resp = await self.request(
"get", f"users/{self.user_id}/subscriptions", params={"activeOnly": "true"}
)
self.subscription_data = {
subscription["sid"]: subscription
for subscription in subscription_resp["subscriptions"]
}
|
bachya/simplisafe-python | simplipy/device/sensor/v3.py | """Define a v3 (new) SimpliSafe sensor."""
from typing import cast
from simplipy.device import DeviceTypes, DeviceV3
class SensorV3(DeviceV3):
"""A V3 (new) sensor.
Note that this class shouldn't be instantiated directly; it will be
instantiated as appropriate via :meth:`simplipy.API.async_get_systems`.
"""
@property
def trigger_instantly(self) -> bool:
"""Return whether the sensor will trigger instantly.
:rtype: ``bool``
"""
return cast(
bool,
self._system.sensor_data[self._serial]["setting"].get(
"instantTrigger", False
),
)
@property
def triggered(self) -> bool:
"""Return whether the sensor has been triggered.
:rtype: ``bool``
"""
if self.type in (
DeviceTypes.carbon_monoxide,
DeviceTypes.entry,
DeviceTypes.glass_break,
DeviceTypes.leak,
DeviceTypes.motion,
DeviceTypes.smoke,
DeviceTypes.temperature,
):
return cast(
bool,
self._system.sensor_data[self._serial]["status"].get(
"triggered", False
),
)
return False
@property
def temperature(self) -> int:
"""Return the temperature of the sensor (as appropriate).
If the sensor isn't a temperature sensor, an ``AttributeError`` will be raised.
:rtype: ``int``
"""
if self.type != DeviceTypes.temperature:
raise AttributeError("Non-temperature sensor cannot have a temperature")
return cast(
int, self._system.sensor_data[self._serial]["status"]["temperature"]
)
|
bachya/simplisafe-python | tests/system/test_base.py | <filename>tests/system/test_base.py
"""Define base tests for System objects."""
# pylint: disable=too-many-arguments,unused-argument
from datetime import datetime
import aiohttp
import pytest
from simplipy import API
from simplipy.system import SystemStates
from tests.common import (
TEST_ADDRESS,
TEST_AUTHORIZATION_CODE,
TEST_CODE_VERIFIER,
TEST_SUBSCRIPTION_ID,
TEST_SYSTEM_ID,
TEST_SYSTEM_SERIAL_NO,
TEST_USER_ID,
)
@pytest.mark.asyncio
async def test_deactivated_system(aresponses, server, subscriptions_response):
"""Test that API.async_get_systems doesn't return deactivated systems."""
subscriptions_response["subscriptions"][0]["activated"] = 0
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_SUBSCRIPTION_ID}/subscriptions",
"get",
response=aiohttp.web_response.json_response(subscriptions_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
assert len(systems) == 0
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_get_events(aresponses, events_response, v2_server):
"""Test getting events from a system."""
v2_server.add(
"api.simplisafe.com",
f"/v1/subscriptions/{TEST_SYSTEM_ID}/events",
"get",
response=aiohttp.web_response.json_response(events_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
events = await system.async_get_events(datetime.now(), 2)
assert len(events) == 2
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_missing_property(
aresponses,
caplog,
server,
subscriptions_response,
v3_sensors_response,
v3_settings_response,
):
"""Test that missing property data is handled correctly."""
subscriptions_response["subscriptions"][0]["location"]["system"].pop("isOffline")
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_USER_ID}/subscriptions",
"get",
response=aiohttp.web_response.json_response(subscriptions_response, status=200),
)
server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/sensors",
"get",
response=aiohttp.web_response.json_response(v3_sensors_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
assert system.offline is False
assert any(
"SimpliSafe didn't return data for property: offline" in e.message
for e in caplog.records
)
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_missing_system_info(aresponses, caplog, server, subscriptions_response):
"""Test that a subscription with missing system data is handled correctly."""
subscriptions_response["subscriptions"][0]["location"]["system"] = {}
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_SUBSCRIPTION_ID}/subscriptions",
"get",
response=aiohttp.web_response.json_response(subscriptions_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
await simplisafe.async_get_systems()
assert any(
"Skipping subscription with missing system data" in e.message
for e in caplog.records
)
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_properties(aresponses, v2_server):
"""Test that base system properties are created properly."""
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
systems = await simplisafe.async_get_systems()
system = systems[TEST_SYSTEM_ID]
assert not system.alarm_going_off
assert system.address == TEST_ADDRESS
assert system.connection_type == "wifi"
assert system.serial == TEST_SYSTEM_SERIAL_NO
assert system.state == SystemStates.off
assert system.system_id == TEST_SYSTEM_ID
assert system.temperature == 67
assert system.version == 2
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_unknown_sensor_type(aresponses, caplog, v2_server):
"""Test whether a message is logged upon finding an unknown sensor type."""
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
await simplisafe.async_get_systems()
assert any("Unknown device type" in e.message for e in caplog.records)
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_unknown_system_state(
aresponses,
caplog,
server,
subscriptions_response,
v3_sensors_response,
v3_settings_response,
):
"""Test that an unknown system state is logged."""
subscriptions_response["subscriptions"][0]["location"]["system"][
"alarmState"
] = "NOT_REAL_STATE"
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_USER_ID}/subscriptions",
"get",
response=aiohttp.web_response.json_response(subscriptions_response, status=200),
)
server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/settings/normal",
"get",
response=aiohttp.web_response.json_response(v3_settings_response, status=200),
)
server.add(
"api.simplisafe.com",
f"/v1/ss3/subscriptions/{TEST_SUBSCRIPTION_ID}/sensors",
"get",
response=aiohttp.web_response.json_response(v3_sensors_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
await simplisafe.async_get_systems()
assert any("Unknown raw system state" in e.message for e in caplog.records)
assert any("NOT_REAL_STATE" in e.message for e in caplog.records)
aresponses.assert_plan_strictly_followed()
|
bachya/simplisafe-python | simplipy/device/sensor/v2.py | <filename>simplipy/device/sensor/v2.py
"""Define a v2 (old) SimpliSafe sensor."""
from typing import cast
from simplipy.device import Device, DeviceTypes
from simplipy.errors import SimplipyError
class SensorV2(Device):
"""A V2 (old) sensor.
Note that this class shouldn't be instantiated directly; it will be
instantiated as appropriate via :meth:`simplipy.API.async_get_systems`.
"""
@property
def data(self) -> int:
"""Return the sensor's current data flag (currently not understood).
:rtype: ``int``
"""
return cast(int, self._system.sensor_data[self._serial]["sensorData"])
@property
def error(self) -> bool:
"""Return the sensor's error status.
:rtype: ``bool``
"""
return cast(bool, self._system.sensor_data[self._serial]["error"])
@property
def low_battery(self) -> bool:
"""Return whether the sensor's battery is low.
:rtype: ``bool``
"""
return cast(
bool, self._system.sensor_data[self._serial].get("battery", "ok") != "ok"
)
@property
def settings(self) -> bool:
"""Return the sensor's settings.
:rtype: ``bool``
"""
return cast(bool, self._system.sensor_data[self._serial]["setting"])
@property
def trigger_instantly(self) -> bool:
"""Return whether the sensor will trigger instantly.
:rtype: ``bool``
"""
return cast(bool, self._system.sensor_data[self._serial]["instant"])
@property
def triggered(self) -> bool:
"""Return whether the sensor has been triggered.
:rtype: ``bool``
"""
if self.type == DeviceTypes.entry:
return cast(
bool,
self._system.sensor_data[self._serial].get("entryStatus", "closed")
== "open",
)
raise SimplipyError(f"Cannot determine triggered state for sensor: {self.name}")
|
bachya/simplisafe-python | tests/test_api.py | """Define tests for the System object."""
# pylint: disable=protected-access,too-many-arguments
from datetime import datetime
from unittest.mock import Mock
import aiohttp
import pytest
from simplipy import API
from simplipy.errors import InvalidCredentialsError, RequestError
from .common import (
TEST_ACCESS_TOKEN,
TEST_AUTHORIZATION_CODE,
TEST_CODE_VERIFIER,
TEST_REFRESH_TOKEN,
TEST_SUBSCRIPTION_ID,
)
@pytest.mark.asyncio
async def test_401_bad_credentials(aresponses, invalid_authorization_code_response):
"""Test that an InvalidCredentialsError is raised with an invalid auth code."""
aresponses.add(
"auth.simplisafe.com",
"/oauth/token",
"post",
response=aiohttp.web_response.json_response(
invalid_authorization_code_response, status=401
),
)
async with aiohttp.ClientSession() as session:
with pytest.raises(InvalidCredentialsError):
await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_401_refresh_token_failure(
aresponses, invalid_refresh_token_response, server
):
"""Test that an error is raised when refresh token and reauth both fail."""
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_SUBSCRIPTION_ID}/subscriptions",
"get",
response=aresponses.Response(text="Unauthorized", status=401),
)
server.add(
"auth.simplisafe.com",
"/oauth/token",
"post",
response=aiohttp.web_response.json_response(
invalid_refresh_token_response, status=403
),
)
async with aiohttp.ClientSession() as session:
with pytest.raises(InvalidCredentialsError):
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
# Manually set the expiration datetime to force a refresh token flow:
simplisafe._access_token_expire_dt = datetime.utcnow()
await simplisafe.async_get_systems()
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_401_refresh_token_success(
api_token_response,
aresponses,
server,
v2_settings_response,
v2_subscriptions_response,
):
"""Test that a successful refresh token carries out the original request."""
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_SUBSCRIPTION_ID}/subscriptions",
"get",
response=aresponses.Response(text="Unauthorized", status=401),
)
api_token_response["access_token"] = "<PASSWORD>"
api_token_response["refresh_token"] = "<PASSWORD>"
server.add(
"auth.simplisafe.com",
"/oauth/token",
"post",
response=aiohttp.web_response.json_response(api_token_response, status=200),
)
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_SUBSCRIPTION_ID}/subscriptions",
"get",
response=aiohttp.web_response.json_response(
v2_subscriptions_response, status=200
),
)
server.add(
"api.simplisafe.com",
f"/v1/subscriptions/{TEST_SUBSCRIPTION_ID}/settings",
"get",
response=aiohttp.web_response.json_response(v2_settings_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
# Manually set the expiration datetime to force a refresh token flow:
simplisafe._access_token_expire_dt = datetime.utcnow()
# If this succeeds without throwing an exception, the retry is successful:
await simplisafe.async_get_systems()
assert simplisafe.access_token == "<PASSWORD>"
assert simplisafe.refresh_token == "<PASSWORD>"
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_403_bad_credentials(aresponses, invalid_authorization_code_response):
"""Test that an InvalidCredentialsError is raised with a 403."""
aresponses.add(
"auth.simplisafe.com",
"/oauth/token",
"post",
response=aiohttp.web_response.json_response(
invalid_authorization_code_response, status=403
),
)
async with aiohttp.ClientSession() as session:
with pytest.raises(InvalidCredentialsError):
await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
@pytest.mark.asyncio
async def test_client_async_from_authorization_code(
api_token_response, aresponses, auth_check_response
):
"""Test creating a client from an authorization code."""
aresponses.add(
"auth.simplisafe.com",
"/oauth/token",
"post",
response=aiohttp.web_response.json_response(api_token_response, status=200),
)
aresponses.add(
"api.simplisafe.com",
"/v1/api/authCheck",
"get",
response=aiohttp.web_response.json_response(auth_check_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
assert simplisafe.access_token == TEST_ACCESS_TOKEN
assert simplisafe.refresh_token == TEST_REFRESH_TOKEN
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_client_async_from_refresh_token(
api_token_response, aresponses, auth_check_response
):
"""Test creating a client from a refresh token."""
aresponses.add(
"auth.simplisafe.com",
"/oauth/token",
"post",
response=aiohttp.web_response.json_response(api_token_response, status=200),
)
aresponses.add(
"api.simplisafe.com",
"/v1/api/authCheck",
"get",
response=aiohttp.web_response.json_response(auth_check_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_refresh_token(
TEST_REFRESH_TOKEN, session=session
)
assert simplisafe.access_token == TEST_ACCESS_TOKEN
assert simplisafe.refresh_token == TEST_REFRESH_TOKEN
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_refresh_token_listener_callback(
api_token_response,
aresponses,
caplog,
server,
v2_settings_response,
v2_subscriptions_response,
):
"""Test that listener callbacks are executed correctly."""
import logging
caplog.set_level(logging.DEBUG)
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_SUBSCRIPTION_ID}/subscriptions",
"get",
response=aresponses.Response(text="Unauthorized", status=401),
)
server.add(
"api.simplisafe.com",
f"/v1/subscriptions/{TEST_SUBSCRIPTION_ID}/settings",
"get",
response=aresponses.Response(text="Unauthorized", status=401),
)
api_token_response["access_token"] = "<PASSWORD>"
api_token_response["refresh_token"] = "<PASSWORD>"
server.add(
"auth.simplisafe.com",
"/oauth/token",
"post",
response=aiohttp.web_response.json_response(api_token_response, status=200),
)
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_SUBSCRIPTION_ID}/subscriptions",
"get",
response=aiohttp.web_response.json_response(
v2_subscriptions_response, status=200
),
)
server.add(
"api.simplisafe.com",
f"/v1/subscriptions/{TEST_SUBSCRIPTION_ID}/settings",
"get",
response=aiohttp.web_response.json_response(v2_settings_response, status=200),
)
mock_listener_1 = Mock()
mock_listener_2 = Mock()
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
# Manually set the expiration datetime to force a refresh token flow:
simplisafe._access_token_expire_dt = datetime.utcnow()
# We'll hang onto one listener callback:
simplisafe.add_refresh_token_listener(mock_listener_1)
assert mock_listener_1.call_count == 0
# ..and delete the a second one before ever using it:
remove = simplisafe.add_refresh_token_listener(mock_listener_2)
remove()
await simplisafe.async_get_systems()
mock_listener_1.assert_called_once_with("aabbcc11")
assert mock_listener_1.call_count == 1
assert mock_listener_2.call_count == 0
@pytest.mark.asyncio
async def test_request_error_failed_retry(aresponses, server):
"""Test that a RequestError that fails multiple times still raises."""
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_SUBSCRIPTION_ID}/subscriptions",
"get",
response=aresponses.Response(text="Conflict", status=409),
)
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_SUBSCRIPTION_ID}/subscriptions",
"get",
response=aresponses.Response(text="Conflict", status=409),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE,
TEST_CODE_VERIFIER,
session=session,
# Set so that our tests don't take too long:
request_retries=1,
)
with pytest.raises(RequestError):
await simplisafe.async_get_systems()
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_request_error_successful_retry(
api_token_response,
aresponses,
server,
v2_settings_response,
v2_subscriptions_response,
):
"""Test that a RequestError can be successfully retried."""
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_SUBSCRIPTION_ID}/subscriptions",
"get",
response=aresponses.Response(text="Conflict", status=409),
)
server.add(
"auth.simplisafe.com",
"/oauth/token",
"post",
response=aiohttp.web_response.json_response(api_token_response, status=200),
)
server.add(
"api.simplisafe.com",
f"/v1/users/{TEST_SUBSCRIPTION_ID}/subscriptions",
"get",
response=aiohttp.web_response.json_response(
v2_subscriptions_response, status=200
),
)
server.add(
"api.simplisafe.com",
f"/v1/subscriptions/{TEST_SUBSCRIPTION_ID}/settings",
"get",
response=aiohttp.web_response.json_response(v2_settings_response, status=200),
)
async with aiohttp.ClientSession() as session:
simplisafe = await API.async_from_auth(
TEST_AUTHORIZATION_CODE, TEST_CODE_VERIFIER, session=session
)
# If this succeeds without throwing an exception, the retry is successful:
await simplisafe.async_get_systems()
aresponses.assert_plan_strictly_followed()
@pytest.mark.asyncio
async def test_string_response(aresponses):
"""Test that a quoted stringn response is handled correctly."""
aresponses.add(
"auth.simplisafe.com",
"/oauth/token",
"post",
response=aresponses.Response(text='"Unauthorized"', status=401),
)
async with aiohttp.ClientSession() as session:
with pytest.raises(InvalidCredentialsError):
await API.async_from_auth(
TEST_AUTHORIZATION_CODE,
TEST_CODE_VERIFIER,
session=session,
# Set so that our tests don't take too long:
request_retries=1,
)
aresponses.assert_plan_strictly_followed()
|
bachya/simplisafe-python | simplipy/system/__init__.py | """Define V2 and V3 SimpliSafe systems."""
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import TYPE_CHECKING, Any, Callable, Dict, List, cast
from simplipy.const import LOGGER
from simplipy.device import DeviceTypes
from simplipy.device.sensor.v2 import SensorV2
from simplipy.device.sensor.v3 import SensorV3
from simplipy.errors import PinError, SimplipyError
from simplipy.util.dt import utc_from_timestamp
from simplipy.util.string import convert_to_underscore
if TYPE_CHECKING:
from simplipy.api import API
VERSION_V2 = 2
VERSION_V3 = 3
EVENT_SYSTEM_NOTIFICATION = "system_notification"
CONF_DEFAULT = "default"
CONF_DURESS_PIN = "duress"
CONF_MASTER_PIN = "master"
DEFAULT_MAX_USER_PINS = 4
MAX_PIN_LENGTH = 4
RESERVED_PIN_LABELS = {CONF_DURESS_PIN, CONF_MASTER_PIN}
@dataclass(frozen=True)
class SystemNotification:
"""Define a representation of a system notification."""
notification_id: str
text: str
category: str
code: str
timestamp: float
link: str | None = None
link_label: str | None = None
def __post_init__(self) -> None:
"""Run post-init initialization."""
object.__setattr__(self, "received_dt", utc_from_timestamp(self.timestamp))
class SystemStates(Enum):
"""States that the system can be in."""
alarm = 1
alarm_count = 2
away = 3
away_count = 4
entry_delay = 5
error = 6
exit_delay = 7
home = 8
home_count = 9
off = 10
test = 11
unknown = 99
def get_device_type_from_data(device_data: dict[str, Any]) -> DeviceTypes:
"""Get the device type of a raw data payload."""
try:
return DeviceTypes(device_data["type"])
except ValueError:
LOGGER.error("Unknown device type: %s", device_data["type"])
return DeviceTypes.unknown
def guard_from_missing_data(default_value: Any = None) -> Callable:
"""Guard a missing property by returning a set value."""
def decorator(func: Callable) -> Callable:
"""Decorate."""
def wrapper(system: "System") -> Any:
"""Call the function and handle any issue."""
try:
return func(system)
except KeyError:
LOGGER.warning(
"SimpliSafe didn't return data for property: %s", func.__name__
)
return default_value
return wrapper
return decorator
class System:
"""Define a system.
Note that this class shouldn't be instantiated directly; it will be instantiated as
appropriate via :meth:`simplipy.API.async_get_systems`.
:param api: A :meth:`simplipy.API` object
:type api: :meth:`simplipy.API`
:param sid: A subscription ID
:type sid: ``int``
"""
def __init__(self, api: "API", sid: int) -> None:
"""Initialize."""
self._api = api
self._sid = sid
# These will get filled in after initial update:
self._notifications: list[SystemNotification] = []
self._state = SystemStates.unknown
self.sensor_data: dict[str, dict[str, Any]] = {}
self.sensors: dict[str, SensorV2 | SensorV3] = {}
@property # type: ignore
@guard_from_missing_data()
def address(self) -> str:
"""Return the street address of the system.
:rtype: ``str``
"""
return cast(str, self._api.subscription_data[self._sid]["location"]["street1"])
@property # type: ignore
@guard_from_missing_data(False)
def alarm_going_off(self) -> bool:
"""Return whether the alarm is going off.
:rtype: ``bool``
"""
return cast(
bool,
self._api.subscription_data[self._sid]["location"]["system"]["isAlarming"],
)
@property # type: ignore
@guard_from_missing_data()
def connection_type(self) -> str:
"""Return the system's connection type (cell or WiFi).
:rtype: ``str``
"""
return cast(
str,
self._api.subscription_data[self._sid]["location"]["system"]["connType"],
)
@property
def notifications(self) -> list[SystemNotification]:
"""Return the system's current messages/notifications.
:rtype: ``List[:meth:`simplipy.system.SystemNotification`]``
"""
return self._notifications
@property # type: ignore
@guard_from_missing_data()
def serial(self) -> str:
"""Return the system's serial number.
:rtype: ``str``
"""
return cast(
str,
self._api.subscription_data[self._sid]["location"]["system"]["serial"],
)
@property
def state(self) -> SystemStates:
"""Return the current state of the system.
:rtype: :meth:`simplipy.system.SystemStates`
"""
return self._state
@property # type: ignore
@guard_from_missing_data()
def system_id(self) -> int:
"""Return the SimpliSafe identifier for this system.
:rtype: ``int``
"""
return self._sid
@property # type: ignore
@guard_from_missing_data()
def temperature(self) -> int:
"""Return the overall temperature measured by the system.
:rtype: ``int``
"""
return cast(
int,
self._api.subscription_data[self._sid]["location"]["system"]["temperature"],
)
@property # type: ignore
@guard_from_missing_data()
def version(self) -> int:
"""Return the system version.
:rtype: ``int``
"""
return cast(
int,
self._api.subscription_data[self._sid]["location"]["system"]["version"],
)
async def _async_set_state(self, value: SystemStates) -> None:
"""Raise if calling this undefined based method."""
raise NotImplementedError()
async def _async_set_updated_pins(self, pins: dict[str, Any]) -> None:
"""Post new PINs."""
raise NotImplementedError()
async def _async_update_device_data(self, cached: bool = False) -> None:
"""Update all device data."""
raise NotImplementedError()
async def _async_update_settings_data(self, cached: bool = True) -> None:
"""Update all settings data."""
raise NotImplementedError()
async def _async_update_subscription_data(self) -> None:
"""Update subscription data."""
await self._api.async_update_subscription_data()
async def async_clear_notifications(self) -> None:
"""Clear all active notifications.
This will remove the notifications from SimpliSafe's cloud, meaning they will no
longer visible in the SimpliSafe mobile and web apps.
"""
if self._notifications:
await self._api.request(
"delete", f"subscriptions/{self.system_id}/messages"
)
self._notifications = []
def generate_device_objects(self) -> None:
"""Generate device objects for this system."""
raise NotImplementedError()
async def async_get_events(
self, from_datetime: datetime | None = None, num_events: int | None = None
) -> list[dict[str, Any]]:
"""Get events recorded by the base station.
If no parameters are provided, this will return the most recent 50 events.
:param from_datetime: The starting datetime (if desired)
:type from_datetime: ``datetime.datetime``
:param num_events: The number of events to return.
:type num_events: ``int``
:rtype: ``list``
"""
params = {}
if from_datetime:
params["fromTimestamp"] = round(from_datetime.timestamp())
if num_events:
params["numEvents"] = num_events
events_resp = await self._api.request(
"get", f"subscriptions/{self.system_id}/events", params=params
)
return cast(List[Dict[str, Any]], events_resp.get("events", []))
async def async_get_latest_event(self) -> dict:
"""Get the most recent system event.
:rtype: ``dict``
"""
events = await self.async_get_events(num_events=1)
try:
return events[0]
except IndexError:
raise SimplipyError("SimpliSafe didn't return any events") from None
async def async_get_pins(self, cached: bool = True) -> dict[str, str]:
"""Return all of the set PINs, including master and duress.
The ``cached`` parameter determines whether the SimpliSafe Cloud uses the last
known values retrieved from the base station (``True``) or retrieves new data.
:param cached: Whether to used cached data.
:type cached: ``bool``
:rtype: ``Dict[str, str]``
"""
raise NotImplementedError()
async def async_remove_pin(self, pin_or_label: str) -> None:
"""Remove a PIN by its value or label.
:param pin_or_label: The PIN value or label to remove
:type pin_or_label: ``str``
"""
# Because SimpliSafe's API works by sending the entire payload of PINs, we
# can't reasonably check a local cache for up-to-date PIN data; so, we fetch the
# latest each time:
latest_pins = await self.async_get_pins(cached=False)
if pin_or_label in RESERVED_PIN_LABELS:
raise PinError(f"Refusing to delete reserved PIN: {pin_or_label}")
try:
label = next((k for k, v in latest_pins.items() if pin_or_label in (k, v)))
except StopIteration:
raise PinError(f"Cannot delete nonexistent PIN: {pin_or_label}") from None
del latest_pins[label]
await self._async_set_updated_pins(latest_pins)
async def async_set_away(self) -> None:
"""Set the system in "Away" mode."""
await self._async_set_state(SystemStates.away)
async def async_set_home(self) -> None:
"""Set the system in "Home" mode."""
await self._async_set_state(SystemStates.home)
async def async_set_off(self) -> None:
"""Set the system in "Off" mode."""
await self._async_set_state(SystemStates.off)
async def async_set_pin(self, label: str, pin: str) -> None:
"""Set a PIN.
:param label: The label to use for the PIN (shown in the SimpliSafe app)
:type label: str
:param pin: The pin value
:type pin: str
"""
if len(pin) != MAX_PIN_LENGTH:
raise PinError(f"PINs must be {MAX_PIN_LENGTH} digits long")
try:
int(pin)
except ValueError:
raise PinError("PINs can only contain numbers") from None
# Because SimpliSafe's API works by sending the entire payload of PINs, we
# can't reasonably check a local cache for up-to-date PIN data; so, we fetch the
# latest each time.
latest_pins = await self.async_get_pins(cached=False)
if pin in latest_pins.values():
raise PinError(f"Refusing to create duplicate PIN: {pin}")
max_pins = DEFAULT_MAX_USER_PINS + len(RESERVED_PIN_LABELS)
if len(latest_pins) == max_pins and label not in RESERVED_PIN_LABELS:
raise PinError(f"Refusing to create more than {max_pins} user PINs")
latest_pins[label] = pin
await self._async_set_updated_pins(latest_pins)
async def async_update(
self,
*,
include_subscription: bool = True,
include_settings: bool = True,
include_devices: bool = True,
cached: bool = True,
) -> None:
"""Get the latest system data.
The ``cached`` parameter determines whether the SimpliSafe Cloud uses the last
known values retrieved from the base station (``True``) or retrieves new data.
:param include_subscription: Whether system state/properties should be updated
:type include_subscription: ``bool``
:param include_settings: Whether system settings (like PINs) should be updated
:type include_settings: ``bool``
:param include_devices: whether sensors/locks/etc. should be updated
:type include_devices: ``bool``
:param cached: Whether to used cached data.
:type cached: ``bool``
"""
if include_subscription:
await self._async_update_subscription_data()
if include_settings:
await self._async_update_settings_data(cached)
if include_devices:
await self._async_update_device_data(cached)
# Create notifications:
self._notifications = [
SystemNotification(
raw_message["id"],
raw_message["text"],
raw_message["category"],
raw_message["code"],
raw_message["timestamp"],
link=raw_message["link"],
link_label=raw_message["linkLabel"],
)
for raw_message in self._api.subscription_data[self._sid]["location"][
"system"
].get("messages", [])
]
# Set the current state:
raw_state = self._api.subscription_data[self._sid]["location"]["system"].get(
"alarmState"
)
try:
self._state = SystemStates[convert_to_underscore(raw_state)]
except KeyError:
LOGGER.error("Unknown raw system state: %s", raw_state)
self._state = SystemStates.unknown
|
cbb2020/gcn | gcn/test.py | # from __future__ import division
# from __future__ import print_function
# import time
# import tensorflow as tf
# from gcn.utils import *
# from gcn.models import GCN, MLP
from utils import load_data
# Set random seed
# seed = 123
# np.random.seed(seed)
# tf.set_random_seed(seed)
# Settings
# flags = tf.app.flags
# FLAGS = flags.FLAGS
# flags.DEFINE_string('dataset', 'cora', 'Dataset string.') # 'cora', 'citeseer', 'pubmed'
# flags.DEFINE_string('model', 'gcn', 'Model string.') # 'gcn', 'gcn_cheby', 'dense'
# flags.DEFINE_float('learning_rate', 0.01, 'Initial learning rate.')
# flags.DEFINE_integer('epochs', 200, 'Number of epochs to train.')
# flags.DEFINE_integer('hidden1', 16, 'Number of units in hidden layer 1.')
# flags.DEFINE_float('dropout', 0.5, 'Dropout rate (1 - keep probability).')
# flags.DEFINE_float('weight_decay', 5e-4, 'Weight for L2 loss on embedding matrix.')
# flags.DEFINE_integer('early_stopping', 10, 'Tolerance for early stopping (# of epochs).')
# flags.DEFINE_integer('max_degree', 3, 'Maximum Chebyshev polynomial degree.')
# Load data
adj, features, y_train, y_val, y_test, train_mask, val_mask, test_mask = load_data('cora')
# print(test_mask[:100])
# lst = [[1, 2], [3, 4], [5, 6]]
# idx = [0, 1]
# print(lst[idx, :])
|
CatTanker/cncnet-discord-bot | discord_utils.py | class DiscordParseException(Exception):
"""An exception that is thrown when parsing Discord's representation of a channel / role / user mention fails."""
def parse_discord_str(content_str: str, type_chars: str) -> int:
"""Parses Discord's representation of a channel / role / user mention into an ID."""
if content_str.startswith('<') and content_str.endswith('>') and content_str[1:-1].startswith(type_chars):
return int(content_str[(1 + len(type_chars)):-1])
raise DiscordParseException(f"{content_str} is not a valid Discord-formatted ID representation for '{type_chars}'")
def format_discord_str(discord_id: int, type_chars: str) -> str:
"""Formats an ID into a Discord's representation of a channel / role / user mention."""
return f"<{type_chars}{discord_id}>"
# Shortcut functions
def parse_channel(content_str: str) -> int:
"""Parses Discord's representation of a channel mention into an ID."""
return parse_discord_str(content_str, '#')
def parse_role(content_str: str) -> int:
"""Parses Discord's representation of a role mention into an ID."""
return parse_discord_str(content_str, '@&')
def parse_user(content_str: str) -> int:
"""Parses Discord's representation of a user mention into an ID."""
return parse_discord_str(content_str, '@!')
def format_channel(discord_id: int) -> str:
"""Formats an ID into a Discord's representation of a channel mention."""
return format_discord_str(discord_id, '#')
def format_role(discord_id: int) -> str:
"""Formats an ID into a Discord's representation of a role mention."""
return format_discord_str(discord_id, '@&')
def format_user(discord_id: int) -> str:
"""Formats an ID into a Discord's representation of a user mention."""
return format_discord_str(discord_id, '@!')
|
CatTanker/cncnet-discord-bot | utils.py | <filename>utils.py
import asyncio
# https://phoolish-philomath.com/asynchronous-task-scheduling-in-python.html
async def run_periodically(wait_time, coro, *args):
"""
Helper for schedule_task_periodically.
Wraps a coroutine in another coroutine that will run the
given coroutine indefinitely
:param wait_time: seconds to wait between iterations of coro
:param coro: the coroutine that will be run
:param args: any args that need to be provided to coro
"""
while True:
await coro(*args)
await asyncio.sleep(wait_time)
def schedule_task_periodically(wait_time, coro, event_loop, *args):
"""
Schedule a coroutine to run periodically as an asyncio.Task
:param wait_time: interval (in seconds)
:param coro: the coroutine that will be run
:param event_loop: the event loop used
:param args: any args needed to be provided to coro
:return: an asyncio Task that has been scheduled to run
"""
return event_loop.create_task(run_periodically(wait_time, coro, *args))
async def cancel_scheduled_task(task):
"""
Gracefully cancels a task
:type task: asyncio.Task
"""
task.cancel()
try:
await task
except asyncio.CancelledError:
pass |
CatTanker/cncnet-discord-bot | data_classes.py | <filename>data_classes.py
import json
import discord
from distutils.util import strtobool
from typing import List
from datetime import datetime
from dataclasses import dataclass
from dataclasses_json import dataclass_json
@dataclass_json
@dataclass
class BotConfig(object):
game_name: str = 'CnCNet game'
game_short_name: str = ''
game_url: str = 'https://cncnet.org'
game_icon_url: str = 'https://avatars0.githubusercontent.com/u/11489929?s=200&v=4'
discord_token: str = ''
discord_prefix: str = '!'
# discord_announce_channel: int = None
discord_list_channel: int = None
discord_message_channel: int = None
# discord_announce_message: str = "Hey people, a new game has been hosted!"
irc_host: str = 'irc.gamesurge.net'
irc_port: int = 6667
irc_name: str = 'discord_bot'
irc_lobby_channel: str = ''
irc_broadcast_channel: str = ''
@staticmethod
def read_from_file(json_path):
with open(json_path) as json_file:
return BotConfig.from_dict(json.load(json_file))
def write_to_file(self, json_path):
with open(json_path, 'w') as json_file:
json.dump(self.to_dict(), json_file, indent=4)
@dataclass
class CnCNetGame(object):
"""Represents information about a game or mod on CnCNet."""
name: str
icon_url: str
site_url: str
class NotConfiguredException(Exception):
"""An exception that is thrown when the bot hasn't been configured."""
def __init__(self, msg="Bot wasn't confgiured via the config file.", *args, **kwargs):
super().__init__(msg, *args, **kwargs)
class ParseException(Exception):
"""An exception that is thrown upon trying to parse incorrect CTCP GAME command message."""
@dataclass
class HostedGame(object):
"""Represents information about a game lobby hosted on CnCNet."""
game: CnCNetGame
protocol_version: str
game_version: str
max_players: int
channel_name: str
display_name: str
is_locked: bool
is_passworded: bool
is_closed: bool
is_loaded: bool
is_ladder: bool
players: List[str]
map_name: str
game_mode: str
tunnel_address_and_port: str
loaded_game_id: str
def __init__(self, command_contents: str, game: CnCNetGame):
self.parse_message_string(command_contents)
self.game: CnCNetGame = game
def parse_message_string(self, command_contents: str):
"""Parses CTCP GAME command message that's broadcasted by XNA client when the game is hosted."""
self.timestamp = datetime.now()
split: List[str] = command_contents.split(';')
if (len(split) != 11):
raise ParseException('The provided string has invalid amount of parameters')
self.protocol_version: str = split[0]
self.game_version: str = split[1]
self.max_players: int = int(split[2])
self.channel_name: str = split[3]
self.display_name: str = split[4]
self.is_locked: bool = bool(strtobool(split[5][0]))
self.is_passworded: bool = bool(strtobool(split[5][1]))
self.is_closed: bool = bool(strtobool(split[5][2]))
self.is_loaded: bool = bool(strtobool(split[5][3]))
self.is_ladder: bool = bool(strtobool(split[5][4]))
self.players: List[str] = split[6].split(',')
self.map_name: str = split[7]
self.game_mode: str = split[8]
self.tunnel_address_and_port: str = split[9]
self.loaded_game_id: str = split[10]
def get_embed(self, host: str = None) -> discord.Embed:
"""Returns hosted game information formatted as embed in form of discord.py Embed instance."""
embed_title = self.display_name
if self.is_locked:
embed_title += "🔒"
if self.is_passworded:
embed_title += "🔑"
embed: discord.Embed = discord.Embed(
title=embed_title,
# colour=Colour(0xd5d7da),
description=f"[{self.game.name}]({self.game.site_url}) {self.game_version}"
)
embed.set_thumbnail(url=self.game.icon_url)
if host:
embed.set_author(name=host)
# embed.set_footer(text="footer text", icon_url="https://cdn.discordapp.com/embed/avatars/0.png")
embed.add_field(name="🎮 Game mode", value=self.game_mode, inline=True)
embed.add_field(name="🗺 Map", value=self.map_name, inline=True)
# embed.add_field(name="🔢 Version", value=self.game_version, inline=True)
embed.add_field(name=f"🧍 Players ({len(self.players)} / {self.max_players})", value="\n".join(self.players), inline=True)
# TODO write rest of the stuff
return embed
@dataclass
class GameMessagePair(object):
"""A class which stores a hosted game and a corresponding Discord embed message."""
game: HostedGame
message: discord.Message = None
|
CatTanker/cncnet-discord-bot | irc_client.py | import pydle
import logging
from data_classes import *
Base = pydle.featurize(pydle.features.RFC1459Support, pydle.features.TLSSupport, pydle.features.CTCPSupport)
class IRCClient(Base):
def __init__(self, *args, **kwargs):
self.event_handlers = {}
super().__init__(*args, **kwargs)
def event_handler(self, f):
self.event_handlers[f.__name__] = f
return f
async def on_connect(self):
await super().on_connect()
await self.event_handlers["on_connect"]()
async def on_message(self, channel, sender, message):
await super().on_message(channel, sender, message)
await self.event_handlers["on_message"](channel, sender, message)
async def on_ctcp_game_reply(self, sender, channel, contents):
await self.event_handlers["on_ctcp_game_reply"](sender, channel, contents) |
CatTanker/cncnet-discord-bot | discord_cncnet_bot.py | import discord
import os.path
import asyncio
import logging
import signal
from irc_client import IRCClient
from discord.ext import commands
from discord.ext.commands import has_permissions
from typing import *
from data_classes import *
from discord_utils import *
from utils import *
GAME_TIMEOUT = 35
class DiscordCnCNetBot(object):
def __init__(self, config_path: str = 'config.json', event_loop=None):
self.config_path = config_path
if os.path.isfile(self.config_path):
self.config = BotConfig.read_from_file(self.config_path)
else:
self.config = BotConfig()
if not self.config.discord_token:
logging.warning("No Discord token set")
self.event_loop = event_loop if event_loop else asyncio.new_event_loop()
self.hosted_games: Dict[str, GameMessagePair] = {}
self.irc_client = IRCClient(
nickname=self.config.irc_name,
eventloop=self.event_loop)
self.setup_irc_client()
self.discord_client = commands.Bot(
command_prefix=self.config.discord_prefix,
loop=self.event_loop)
self.setup_discord_client()
async def cleanup_obsolete_games(self):
to_remove = []
for sender in self.hosted_games:
if (datetime.now() - self.hosted_games[sender].game.timestamp).seconds > GAME_TIMEOUT:
to_remove.append(sender)
for sender in to_remove:
try:
await self.hosted_games[sender].message.delete()
self.hosted_games.pop(sender, None)
except:
pass
def setup_irc_client(self):
@self.irc_client.event_handler
async def on_connect():
await self.irc_client.join(self.config.irc_lobby_channel)
await self.irc_client.join(self.config.irc_broadcast_channel)
@self.irc_client.event_handler
async def on_message(channel, sender, message):
"""Forward IRC message to Discord channel."""
if sender == self.irc_client.nickname:
return
if self.config.discord_message_channel:
msg_channel = self.discord_client.get_channel(self.config.discord_message_channel)
await msg_channel.send(f"**`<{sender}>`** {message}")
@self.irc_client.event_handler
async def on_ctcp_game_reply(sender, channel, contents):
"""Handle CTCP GAME message broadcasted by clients when they host a game."""
logging.info("Received a CTCP GAME message")
try:
hosted_game = HostedGame(contents,
CnCNetGame(self.config.game_name, self.config.game_icon_url, self.config.game_url))
if hosted_game.is_closed:
if sender in self.hosted_games:
# if we have it in game list - remove the message and the game
if self.hosted_games[sender].message:
msg = self.hosted_games[sender].message
await msg.delete()
self.hosted_games.pop(sender, None)
else:
if sender in self.hosted_games:
# update the message if already listed
self.hosted_games[sender].game = hosted_game
if self.config.discord_list_channel:
list_id = self.config.discord_list_channel
try:
msg = self.hosted_games[sender].message
await msg.edit(embed=hosted_game.get_embed(host=sender))
except discord.errors.NotFound:
# if for some reason it wasn't found - send it
list_channel = self.discord_client.get_channel(list_id)
self.hosted_games[sender].message = await list_channel.send(
embed=hosted_game.get_embed(host=sender))
else:
# post a new message in the list channel and announce the game (if channels are set)
self.hosted_games[sender] = GameMessagePair(hosted_game)
if self.config.discord_list_channel:
list_id = self.config.discord_list_channel
list_channel = self.discord_client.get_channel(list_id)
self.hosted_games[sender].message = await list_channel.send(
embed=hosted_game.get_embed(host=sender))
# if self.config.discord_announce_channel:
# announce_id = self.config.discord_announce_channel
# announce_channel = self.discord_client.get_channel(announce_id)
# await announce_channel.send(self.config.discord_announce_message)
except Exception as e:
logging.warning(f"Got error when parsing game message: {e.message}")
def setup_discord_client(self):
@self.discord_client.event
async def on_message(message):
if (self.config.irc_lobby_channel and
message.author != self.discord_client.user and
message.channel.id == self.config.discord_message_channel):
await self.irc_client.message(self.config.irc_lobby_channel, f"<{message.author}> {message.content}")
await self.discord_client.process_commands(message)
@self.discord_client.command()
@has_permissions(administrator=True)
async def config(context, key, value):
"""Sets certain config variables via a chat command."""
# if key == "discord_prefix":
# self.config.discord_prefix = value
if key == "discord_message_channel":
self.config.discord_message_channel = parse_channel(value)
# elif key == "discord_announce_channel":
# self.config.discord_announce_channel = parse_channel(value)
elif key == "discord_list_channel":
self.config.discord_list_channel = parse_channel(value)
elif key == "discord_announce_message":
self.config.discord_announce_message = value
else:
return
self.config.write_to_file(self.config_path)
response = f"The value for key `{key}` is now `{value}`. "
await context.send(response)
def run(self):
try:
if not self.config.discord_token:
raise NotConfiguredException(f"Discord token isn't set in {self.config_path}")
self.event_loop.create_task(self.irc_client.connect(
self.config.irc_host,
self.config.irc_port,
reconnect=False))
self.event_loop.create_task(self.discord_client.start(
self.config.discord_token))
schedule_task_periodically(GAME_TIMEOUT, self.cleanup_obsolete_games, self.event_loop)
logging.info(f"Running main loop")
self.event_loop.run_forever()
except KeyboardInterrupt:
logging.info(f"Caught interrupt")
finally:
logging.info(f"Finishing and cleaning up")
self.event_loop.run_until_complete(asyncio.gather(
self.discord_client.logout(),
self.irc_client.disconnect(),
loop=self.event_loop))
self.config.write_to_file(self.config_path)
if __name__ == "__main__":
signal.signal(signal.SIGINT, signal.default_int_handler)
logging.basicConfig(level=logging.WARN)
bot = DiscordCnCNetBot()
bot.run()
|
jsravn/feed | nginx/fake_graceful_nginx.py | #!/usr/bin/env python3
import signal
import sys
import time
def sigquit_handler(sig, frame):
time.sleep(0.5)
print('Received sigquit, doing graceful shutdown')
sys.exit(0)
# Can't do anything in this handler - python libs are not thread safe, so not safe to call e.g. print.
def sighup_handler(sig, frame):
with open(startup_marker_file_name, 'w') as f:
f.write('reloaded!')
print('Running {}'.format(str(sys.argv)))
if sys.argv[1] == '-v':
print('Asked for version')
sys.exit(0)
if sys.argv[1] == '-t':
print('Asked for config validation')
sys.exit(0)
# The parent golang process blocks SIGQUIT in subprocesses, for some reason.
# So we unblock it manually - same as what nginx does.
signal.pthread_sigmask(signal.SIG_UNBLOCK, {signal.SIGQUIT, signal.SIGHUP})
signal.signal(signal.SIGQUIT, sigquit_handler)
signal.signal(signal.SIGHUP, sighup_handler)
signal.pause
startup_marker_file_name = str.join('/', sys.argv[2].split('/')[:-1]) + '/nginx-log'
with open(startup_marker_file_name, 'w') as f:
f.write('started!')
time.sleep(5)
print('Quit after 5 seconds of nada')
sys.exit(-1)
|
sebastiangithub94/coockietest | {{cookiecutter.repo_name}}/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/settings/base.py | <gh_stars>0
"""
Django settings for {{ cookiecutter.project_name }} project.
"""
from os import environ, getenv
from os.path import abspath, basename, dirname, join, normpath
from sys import path
########## PATH CONFIGURATION
BASE_DIR = dirname(dirname(__file__) + "../../../")
# Absolute filesystem path to the config directory:
CONFIG_ROOT = dirname(dirname(abspath(__file__)))
# Absolute filesystem path to the project directory:
PROJECT_ROOT = dirname(CONFIG_ROOT)
# Absolute filesystem path to the django repo directory:
DJANGO_ROOT = dirname(PROJECT_ROOT)
# Project name:
PROJECT_NAME = basename(PROJECT_ROOT).capitalize()
# Project folder:
PROJECT_FOLDER = basename(PROJECT_ROOT)
# Project domain:
PROJECT_DOMAIN = '%s.com' % PROJECT_NAME.lower()
# Add our project to our pythonpath, this way we don't need to type our project
# name in our dotted import paths:
path.append(CONFIG_ROOT)
########## END PATH CONFIGURATION
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
########## DEBUG CONFIGURATION
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = STAGING = False
########## END DEBUG CONFIGURATION
ADMINS = (
("""{{cookiecutter.author_name}}""", '{{cookiecutter.email}}'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'CONN_MAX_AGE': 0,
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = '{{ cookiecutter.timezone }}'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = '{{cookiecutter.languages.strip().split(',')[0]}}'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = normpath(join(PROJECT_ROOT, 'media'))
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = normpath(join(PROJECT_ROOT, 'assets'))
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
normpath(join(PROJECT_ROOT, 'static')),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# List of callables that know how to import templates from various sources.
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': (normpath(join(PROJECT_ROOT, 'templates')),),
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.i18n',
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.template.context_processors.media',
'django.template.context_processors.csrf',
'django.template.context_processors.tz',
'sekizai.context_processors.sekizai',
'django.template.context_processors.static',
'cms.context_processors.cms_settings'
]
},
},
]
MIDDLEWARE_CLASSES = (
'cms.middleware.utils.ApphookReloadMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
'cms.middleware.language.LanguageCookieMiddleware'
)
ROOT_URLCONF = '{{ cookiecutter.project_name }}.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = '{{ cookiecutter.project_name }}.wsgi.application'
INSTALLED_APPS = (
# Django CMS admin style
'djangocms_admin_style',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.sites',
'django.contrib.sitemaps',
'django.contrib.staticfiles',
'django.contrib.messages',
# Django CMS
'cms',
'menus',
'sekizai',
'treebeard',
{% if cookiecutter.django_filer == "y" or cookiecutter.django_filer == "Y" %}
# Django filer
'filer',
'easy_thumbnails',
'cmsplugin_filer_image',
'cmsplugin_filer_file',
'cmsplugin_filer_folder',
'cmsplugin_filer_teaser',
'cmsplugin_filer_utils',
'cmsplugin_filer_video',
{% endif %}
{% if cookiecutter.extra_plugins == "y" or cookiecutter.extra_plugins == "Y" %}
'django_select2',
'djangocms_style',
'djangocms_inherit',
'djangocms_text_ckeditor',
{% endif %}
'reversion',
'compressor',
'core',
)
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'production_only': {
'()': 'django.utils.log.RequireDebugFalse',
},
'development_only': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'formatters': {
'verbose': {
'format': '[%(asctime)s] %(levelname)-8s [%(name)s:%(lineno)s] %(message)s',
'datefmt': '%m/%d/%Y %H:%M:%S',
},
'simple': {
'format': '%(levelname)-8s [%(name)s:%(lineno)s] %(message)s',
},
"rq_console": {
"format": "%(asctime)s %(message)s",
"datefmt": "%H:%M:%S",
},
},
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'logging.NullHandler',
},
"rq_console": {
"level": "DEBUG",
"class": "rq.utils.ColorizingStreamHandler",
"formatter": "rq_console",
"exclude": ["%(asctime)s"],
},
'default': {
'level': 'DEBUG',
'class': '{{ cookiecutter.project_name }}.lib.colorstreamhandler.ColorStreamHandler',
},
'console_dev': {
'level': 'DEBUG',
'filters': ['development_only'],
'class': '{{ cookiecutter.project_name }}.lib.colorstreamhandler.ColorStreamHandler',
'formatter': 'simple',
},
'console_prod': {
'level': 'INFO',
'filters': ['production_only'],
'class': '{{ cookiecutter.project_name }}.lib.colorstreamhandler.ColorStreamHandler',
'formatter': 'simple',
},
'file_log': {
'level': 'DEBUG',
'filters': ['development_only'],
'class': 'logging.handlers.RotatingFileHandler',
'filename': join(DJANGO_ROOT, 'logs/log.log'),
'maxBytes': 1024 * 1024,
'backupCount': 3,
'formatter': 'verbose',
},
'file_sql': {
'level': 'DEBUG',
'filters': ['development_only'],
'class': 'logging.handlers.RotatingFileHandler',
'filename': join(DJANGO_ROOT, 'logs/sql.log'),
'maxBytes': 1024 * 1024,
'backupCount': 3,
'formatter': 'verbose',
},
'mail_admins': {
'level': 'ERROR',
'filters': ['production_only'],
'class': 'django.utils.log.AdminEmailHandler',
'include_html': True,
},
},
# Catch-all modules that use logging
# Writes to console and file on development, only to console on production
'root': {
'handlers': ['console_dev', 'console_prod', 'file_log'],
'level': 'DEBUG',
},
'loggers': {
# Email admins when 500 error occurs
'django.request': {
'handlers': ['mail_admins', 'console_dev'],
'level': 'ERROR',
'propagate': False,
},
'{{ cookiecutter.project_name }}': {
'handlers': ['file_log'],
'level': 'INFO',
'propagate': False,
},
"rq.worker": {
"handlers": ["rq_console"],
"level": "DEBUG"
},
}
}
LOCALE_PATHS = (normpath(join(PROJECT_ROOT, 'locale')),)
# Dummy gettext function
gettext = lambda s: s
# Django CMS configurations
CMS_TEMPLATES = (
('single_page.html', gettext('Single page')),
)
LANGUAGES = [
{% for language in cookiecutter.languages.strip().split(',') -%}
('{{ language|trim }}', gettext('{{ language|trim }}')),
{% endfor %}
]
CMS_LANGUAGES = {
## Customize this
'default': {
'public': True,
'hide_untranslated': False,
'redirect_on_fallback': True,
},
1: [{% for language in cookiecutter.languages.strip().split(',') %}
{
'public': True,
'code': '{{ language|trim }}',
'hide_untranslated': False,
'name': gettext('{{ language|trim }}'),
'redirect_on_fallback': True,
},
{% endfor %}
],
}
{% if cookiecutter.django_filer == "y" or cookiecutter.django_filer == "Y" %}
TEXT_SAVE_IMAGE_FUNCTION='cmsplugin_filer_image.integrations.ckeditor.create_image_plugin'
THUMBNAIL_PROCESSORS = (
'easy_thumbnails.processors.colorspace',
'easy_thumbnails.processors.autocrop',
'filer.thumbnail_processors.scale_and_crop_with_subject_location',
'easy_thumbnails.processors.filters'
)
MIGRATION_MODULES = {
'cmsplugin_filer_image': 'cmsplugin_filer_image.migrations',
'cmsplugin_filer_file': 'cmsplugin_filer_file.migrations',
'cmsplugin_filer_folder': 'cmsplugin_filer_folder.migrations',
'cmsplugin_filer_video': 'cmsplugin_filer_video.migrations',
'cmsplugin_filer_teaser': 'cmsplugin_filer_teaser.migrations'
}
{% endif %}
# Analytics
GOOGLE_ANALYTICS = environ.get('GOOGLE_ANALYTICS', '')
CMS_PERMISSION = True
CMS_PLACEHOLDER_CONF = {}
CACHE_ENGINES = {
'redis': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'localhost:6379:0',
},
'dummy': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
CACHES = {
'redis': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'localhost:6379:0',
}
}
CACHES['default'] = CACHE_ENGINES[getenv('CACHE', 'dummy')]
########## REDIS QUEUE CONFIGURATION
# https://github.com/ui/django-rq#support-for-django-redis-and-django-redis-cache
RQ_QUEUES = {
'default': {
'USE_REDIS_CACHE': 'redis'
},
'high': {
'USE_REDIS_CACHE': 'redis'
},
'low': {
'USE_REDIS_CACHE': 'redis'
}
}
RQ_SHOW_ADMIN_LINK = True
########## END REDIS QUEUE CONFIGURATION
|
sebastiangithub94/coockietest | {{cookiecutter.repo_name}}/{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/settings/production_example.py | from .base import *
ALLOWED_HOSTS = [
'{{ cookiecutter.domain_name }}',
]
{% if cookiecutter.heroku == "y" or cookiecutter.heroku == "Y" %}
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES['default'] = dj_database_url.config()
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
INSTALLED_APPS += (
'storages',
)
# Amazon S3 credentials
STATICFILES_STORAGE = DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
# Static files location
from S3 import CallingFormat
AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
AWS_ACCESS_KEY_ID = environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
S3_URL = 'https://%s.s3.amazonaws.com/' % AWS_STORAGE_BUCKET_NAME
STATIC_URL = S3_URL
MEDIA_URL = S3_URL
{% endif %}
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
EMAIL_PORT = 25
DEFAULT_FROM_EMAIL = EMAIL_HOST_USER
DATABASES = {
'default': {
'CONN_MAX_AGE': 0,
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
} |
wang502/rabbitmq-demo | crawler/amqp/amqp.py | import pika
import os
import sys
# type denotes whether it is producer or consumer queue
class ConnectionManager:
def __init__(self, q_type, queue):
self.connection = None
self.channel = None
self.queue = queue
self.q_type = q_type
if q_type == "PRODUCER":
self.port = int(os.environ.get('RABBITMQ_PRODUCERS_PORT'))
else:
self.port = int(os.environ.get('RABBITMQ_CONSUMERS_PORT'))
def get_connection(self):
while not self.connection:
try:
self.connection = pika.BlockingConnection(
pika.ConnectionParameters(
host = os.environ.get('RABBITMQ_HOST'),
virtual_host = os.environ.get('RABBITMQ_VIRTUAL_HOST'),
credentials = pika.PlainCredentials(os.environ.get('RABBITMQ_USER'), os.environ.get('RABBITMQ_PASSWORD')),
port = self.port
)
)
except Exception as e:
print("Error connecting to %s %d port" %(self.host, self.port))
return self.connection
def close_connection(self):
self.connection.close()
def get_channel(self):
if not self.connection:
self.connection = None
self.channel = None
self.get_connection()
if not self.channel:
self.channel = self.connection.channel()
return self.channel
def init_queue(self):
chan = self.get_channel()
chan.queue_declare(queue=self.queue,durable=True)
|
wang502/rabbitmq-demo | routing/subscribe.py | <gh_stars>0
from amqp.amqp import *
import sys
q_type = "CONSUMER"
exchange = "routing_demo"
ex_type = "direct"
# queue - routing_key pairs
dic = {"ERROR":"errors", "DEBUG": "debugs", "INFO": "infos"}
conn = ConnectionManager(q_type, exchange, ex_type)
conn.get_connection()
conn.init_queue(dic)
print(" [*] Waiting for Messages. To exit press CTRL+C")
def worker(ch, method, properties, body):
print body
conn.consume("ERROR", worker)
|
wang502/rabbitmq-demo | crawler/crawl.py | <reponame>wang502/rabbitmq-demo
# author: <NAME>
# email: <EMAIL>
# Part of RabbitMQ-Demo project
# crawl.py
# Consume all url messages in message broker, and parse the corresponding web page
##################################################################################
import pika
from bs4 import BeautifulSoup
import httplib2
import os
from amqp.amqp import *
# set up connection and channel to RabbitMQ server
conn = ConnectionManager('CONSUMER', 'crawler')
conn.get_connection()
conn.init_queue()
chan = conn.get_channel()
# consume the url messages and crawl
def crawl(ch, method, properties, body):
d = {}
print(d["a"])
print(" [*] URL to consume: %r"% body)
user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
headers = { 'User-Agent' : user_agent }
http = httplib2.Http()
status, response = http.request(body, 'GET', None, headers)
soup = BeautifulSoup(response)
investors = {}
company = body.split('/')[-1]
for a in soup.find_all("a", {"class":"startup-link"}):
name = a.get_text()
if name != "":
investors[name] = a.get('href')
c = {"name":company, "investors":investors}
print(" [*] result: " + str(c) + "\n")
chan.basic_consume(crawl,
queue='crawler'
)
print(" [*] Waiting for URL to crawl. To exit press CTRL+C")
chan.start_consuming()
|
wang502/rabbitmq-demo | routing/amqp/amqp.py | <reponame>wang502/rabbitmq-demo
import pika
import os
import sys
import logging
class ConnectionManager:
def __init__(self, q_type, exchange, ex_type):
self.q_type = q_type
self.ex_type = ex_type
self.exchange = exchange
self.connection = None
self.channel = None
if q_type == "PRODUCER":
self.port = int(os.environ.get('RABBITMQ_PRODUCERS_PORT'))
else:
self.port = int(os.environ.get('RABBITMQ_CONSUMERS_PORT'))
def get_connection(self):
while not self.connection:
try:
self.connection = pika.BlockingConnection(
pika.ConnectionParameters(
host = os.environ.get('RABBITMQ_HOST'),
virtual_host = os.environ.get('RABBITMQ_VIRTUAL_HOST'),
credentials = pika.PlainCredentials(os.environ.get('RABBITMQ_USER'), os.environ.get('RABBITMQ_PASSWORD')),
port = self.port)
)
except Exception as e:
logging.debug(e)
def get_channel(self):
if not self.channel:
self.get_connection()
chan = self.connection.channel()
return chan
def init_queue(self, d):
# declare exchange and queues
# binding exchange and queues based on queue-routing_key pairs in d
chan = self.get_channel()
chan.exchange_declare(exchange = self.exchange,
type = self.ex_type,
durable = False,
auto_delete = True)
queues = d.keys()
for queue in queues:
chan.queue_declare(queue=queue,
durable =False,
auto_delete = True)
for q, key in d.items():
chan.queue_bind(routing_key=key,
queue = q,
exchange = self.exchange)
def publish(self, exchange, routing_key, body):
chan = self.get_channel()
chan.basic_publish(exchange=exchange,
routing_key=routing_key,
body=body)
def consume(self, queue, worker):
chan = self.get_channel()
chan.basic_consume(worker,
queue = queue)
chan.start_consuming()
|
wang502/rabbitmq-demo | routing/publish.py | <filename>routing/publish.py<gh_stars>0
from amqp.amqp import *
import sys
q_type = "PRODUCER"
exchange = "routing_demo"
ex_type = "direct"
# queue - routing_key pairs
dic = {"ERROR":"errors", "DEBUG": "debugs", "INFO": "infos"}
conn = ConnectionManager(q_type, exchange, ex_type)
conn.get_connection()
conn.init_queue(dic)
routing_key = sys.argv[1]
message = sys.argv[2]
conn.publish(exchange, routing_key, message)
|
wang502/rabbitmq-demo | HelloWorld/send.py | <reponame>wang502/rabbitmq-demo
import pika
import os
#from amqp import *
import json
from amqp.amqp import *
conn = ConnectionManager("PRODUCER", "hello")
conn.get_connection()
conn.init_queue()
chan = conn.get_channel()
chan.basic_publish(exchange='',
routing_key=conn.queue,
body=json.dumps({"word":"hello world!"})
)
chan.close()
print(" [X] Sent 'Hello World!'")
|
wang502/rabbitmq-demo | HelloWorld/receive.py | <filename>HelloWorld/receive.py
import pika
import os
from amqp import *
import json
conn = ConnectionManager("CONSUMER", 'hello')
conn.get_connection()
conn.init_queue()
chan = conn.get_channel()
def callback(ch, method, properties, body):
d = json.loads(body)
print(" [X] Received %r" %(d.get("word")))
chan.basic_consume(callback,
queue=conn.queue,
no_ack=True
)
print(" [*] Waiting for messages. To exit press CTRL+C")
chan.start_consuming()
|
wang502/rabbitmq-demo | crawler/publish.py | <filename>crawler/publish.py
# author: <NAME>
# email: <EMAIL>
# Part of RabbitMQ-Demo project
# /crawler/publish.py
# Publish list of urls to message broker
################################################
import pika
import os
import sys
from amqp.amqp import *
# set up connection and channel to RabbitMQ server
conn = ConnectionManager('PRODUCER', 'crawler')
conn.get_connection()
conn.init_queue()
chan = conn.get_channel()
BASE_URL = 'https://angel.co/'
names = []
with open('names.txt') as fp:
for line in fp:
names.append(line.split('. ')[1][:-1])
for name in names:
url = BASE_URL + name
chan.basic_publish(exchange='', routing_key=conn.queue, body=url, properties=pika.BasicProperties(
delivery_mode = 2, # make message persistent
))
print("url: %r sent to task queue" % url)
chan.close()
|
joostboonzajerflaes/unofficial_beam_redis | unofficial_beam_redis/io/redisio.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""This module implements IO classes to read write data on Redis.
Write to Redis:
-----------------
:class:`WriteToRedis` is a ``PTransform`` that writes key and values to a
configured sink, and the write is conducted through a redis pipeline.
The ptransform works by getting the first and second elements from the input,
this means that inputs like `[k,v]` or `(k,v)` are valid.
Example usage::
pipeline | WriteToRedis(host='localhost',
port=6379,
batch_size=100)
No backward compatibility guarantees. Everything in this module is experimental.
"""
from __future__ import absolute_import
import logging
import pickle
from past.builtins import unicode
import apache_beam as beam
from apache_beam.io import iobase
from apache_beam.transforms import DoFn
from apache_beam.transforms import PTransform
from apache_beam.transforms import Reshuffle
from apache_beam.utils.annotations import experimental
from apache_beam.options.value_provider import ValueProvider
from apache_beam.options.value_provider import StaticValueProvider
import redis
__all__ = ['WriteToRedis']
@experimental()
class WriteToRedis(beam.PTransform):
"""WriteToRedis is a ``PTransform`` that writes a ``PCollection`` of
key, value tuple or 2-element array into a redis server.
"""
def __init__(self, host=None, port=None, command=None, batch_size=100):
"""
Args:
host (str, ValueProvider): The redis host
port (int, ValueProvider): The redis port
batch_size(int, ValueProvider): Number of key, values pairs to write at once
Returns:
:class:`~apache_beam.transforms.ptransform.PTransform`
"""
if not isinstance(host, (str, unicode, ValueProvider)):
raise TypeError(
'%s: host must be string, or ValueProvider; got %r instead'
) % (self.__class__.__name__, (type(host)))
if not isinstance(port, (int, ValueProvider)):
raise TypeError(
'%s: port must be int, or ValueProvider; got %r instead'
) % (self.__class__.__name__, (type(port)))
if not isinstance(port, (int, ValueProvider)):
raise TypeError(
'%s: batch_size must be int, or ValueProvider; got %r instead'
) % (self.__class__.__name__, (type(batch_size)))
if isinstance(host, (str, unicode)):
host = StaticValueProvider(str, host)
if isinstance(port, int):
port = StaticValueProvider(int, port)
if isinstance(command, int):
command = StaticValueProvider(str, command)
if isinstance(batch_size, int):
batch_size = StaticValueProvider(int, batch_size)
self._host = host
self._port = port
self._command = command
self._batch_size = batch_size
def expand(self, pcoll):
return pcoll \
| Reshuffle() \
| beam.ParDo(_WriteRedisFn(self._host,
self._port,
self._command,
self._batch_size))
class _WriteRedisFn(DoFn):
def __init__(self, host, port, command, batch_size):
self.host = host
self.port = port
self.command = command
self.batch_size = batch_size
self.batch_counter = 0
self.batch = list()
def finish_bundle(self):
self._flush()
def process(self, element, *args, **kwargs):
self.batch.append(element)
self.batch_counter += 1
if self.batch_counter == self.batch_size.get():
self._flush()
def _flush(self):
if self.batch_counter == 0:
return
with _RedisSink(self.host.get(), self.port.get()) as sink:
if not self.command:
sink.write(self.batch)
else:
sink.execute_command(self.command, self.batch)
self.batch_counter = 0
self.batch = list()
class _RedisSink(object):
def __init__(self, host, port):
self.host = host
self.port = port
self.client = None
def _create_client(self):
if self.client is None:
self.client = redis.StrictRedis(host=self.host,
port=self.port)
def write(self, elements):
self._create_client()
with self.client.pipeline() as pipe:
for element in elements:
k,v = element
pipe.set(k,v)
pipe.execute()
def execute_command(self, command, elements):
self._create_client()
with self.client.pipeline() as pipe:
for element in elements:
k,v = element
pipe.execute_command(command, k, v)
pipe.execute()
def __enter__(self):
self._create_client()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.client is not None:
self.client.close()
|
joostboonzajerflaes/unofficial_beam_redis | setup.py | <reponame>joostboonzajerflaes/unofficial_beam_redis<gh_stars>1-10
from distutils.core import setup
setup(
name = 'unofficial_beam_redis',
packages = ['unofficial_beam_redis', 'unofficial_beam_redis.io'],
version = '0.6',
license='Aapache 2.0',
description = 'An unofficial apache beam sink for redis',
author = 'unofficial_beam_redis',
author_email = '<EMAIL>',
url = 'https://github.com/atejeda/unofficial_beam_redis',
download_url = 'https://github.com/atejeda/unofficial_beam_redis/archive/v_01.tar.gz',
keywords = ['Apache', 'Beam', 'Redis', 'Sink'],
install_requires=[
'redis',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: Apache Software License', # 2.0
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
)
|
marceloBarrand/fase4-gr17 | blog/urls.py | <gh_stars>1-10
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
url(r'^$', views.post_list, name='post_list'),
url(r'^login/$', auth_views.login, {'template_name': 'blog/login.html'}, name='login'),
url(r'^post/(?P<pk>\d+)/$', views.post_detail, name='post_detail'),
url(r'^post/(?P<pk>\d+)/edit/$', views.post_edit, name='post_edit'),
url(r'^post/new/$', views.post_new, name='post_new'),
]
|
kokose1234/ChatApp | Tools/packet_type_maker.py | from os import walk
from os.path import join
from posixpath import splitext
from pathlib import Path
def save_packet_header(names, packet_type):
from io import StringIO
import binascii
import shutil
output_str = StringIO()
output_str.write('namespace ChatApp.Common.Net.Packet.Header;\n\npublic enum !Header : uint\n{\n NullPacket = 0,'.replace('!', packet_type))
for name in filter(lambda x: x.startswith(packet_type), names):
crc = binascii.crc32(name.encode('utf8'))
output_str.write(f'\n {name} = {crc},')
output_str.write('\n}')
with open(f'../src/ChatApp.Common/Net/Packet/Header/{packet_type}Header.cs', 'w') as fd:
output_str.seek(0)
shutil.copyfileobj(output_str, fd)
proto_path = '../src/ChatApp.Common/Protos'
file_list = []
for root, dirs, files in walk(proto_path):
for file in files:
file_list.append(join(root, file))
file_names = [Path(splitext(f)[0]).stem for f in file_list]
save_packet_header(file_names, 'Server')
save_packet_header(file_names, 'Client')
|
fuaad001/personal-gallery | gallery/models.py | from django.db import models
# Create your models here.
class Location(models.Model):
location_name = models.CharField(max_length = 25)
def __str__(self):
return self.location_name
def save_location(self):
self.save()
def delete_location(location_id):
Location.objects.filter(id = location_id).delete()
def update_location(location_id, location):
Location.objects.filter(id = location_id).update(location_name = location)
class Category(models.Model):
category_name = models.CharField(max_length = 50)
def __str__(self):
return self.category_name
def save_category(self):
self.save()
def delete_category(category_id):
Category.objects.filter(id = category_id).delete()
def update_category(category_id, category):
Category.objects.filter(id = category_id).update(category_name = category)
class Photographer(models.Model):
names = models.CharField(max_length = 50)
email = models.EmailField(blank = True)
ig = models.CharField(max_length = 20, blank = True)
phone_number = models.CharField(max_length = 10,blank =True)
def __str__(self):
return self.names
def save_photographer(self):
self.save()
def delete_photographer(photographer_id):
Photographer.objects.filter(id = photographer_id).delete()
class Image(models.Model):
image_path = models.ImageField(upload_to = 'images/')
name = models.CharField(max_length = 50)
description = models.TextField(blank = True)
location = models.ForeignKey(Location, blank=True)
category = models.ForeignKey(Category, blank=True)
photographer = models.ForeignKey(Photographer)
def __str__(self):
return self.name
def save_image(self):
self.save()
def delete_image(image_id):
Image.objects.filter(id = image_id).delete()
def update_image(image_id, path):
Image.objects.filter(id = image_id).update(image_path = path)
def get_image_by_id(image_id):
image = Image.objects.get(pk = image_id)
return image
@classmethod
def search_image(cls, search_category):
images = cls.objects.filter(category__category_name__icontains=search_category)
return images
@classmethod
def filter_by_location(cls):
images = cls.objects.order_by('location')
return images
class Meta:
ordering = ['name']
|
fuaad001/personal-gallery | gallery/views.py | from django.shortcuts import render, redirect
from django.http import HttpResponse, Http404
from .models import Image, Location, Category, Photographer
# Create your views here.
def index(request):
'''
view function to display landing page
'''
images = Image.objects.all()
return render(request, 'index.html', {"images": images})
def search_page(request):
'''
view function to open search page and display searched images
'''
if 'search' in request.GET and request.GET["search"]:
search_term = request.GET.get("search")
images = Image.search_image(search_term)
message = f"{search_term}"
return render(request, 'search.html', {"message": message, "images": images})
else:
message = "You haven't searched for any term"
return render(request, 'search.html',{"message":message})
def sortby_locations(request):
'''
view function to display images sorted by Location
'''
images = Image.filter_by_location()
return render(request, 'location.html', {"images":images})
def single_image(request, image_id):
'''
view function to display a single image and its details
'''
image = Image.get_image_by_id(image_id)
return render(request, 'single_image.html', {"image":image})
|
fuaad001/personal-gallery | gallery/tests.py | from django.test import TestCase
from .models import Location, Category, Photographer, Image
# Create your tests here.
class LocationTestClass(TestCase):
def setUp(self):
self.loc = Location(location_name = 'Mombasa, Kenya')
# Testing instance
def test_instance(self):
self.assertTrue(isinstance(self.loc, Location))
def test_save_location(self):
self.loc.save_location()
locations = Location.objects.all()
self.assertTrue(len(locations) > 0)
def test_delete_location(self):
self.loc.save_location()
Location.delete_location(self.loc.id)
locations = Location.objects.all()
self.assertEqual(len(locations), 0)
def test_update_location(self):
Location.update_location(self.loc.id, 'london')
self.assertEqual(self.loc.location_name, 'london')
class CategoryTestClass(TestCase):
def setUp(self):
self.cat = Category(category_name = 'official')
# Testing instance
def test_instance(self):
self.assertTrue(isinstance(self.cat, Category))
def test_save_category(self):
self.cat.save_category()
categories = Category.objects.all()
self.assertTrue(len(categories) > 0)
def test_delete_category(self):
self.cat.save_category()
Category.delete_category(self.cat.id)
categories = Category.objects.all()
self.assertEqual(len(categories), 0)
def test_update_category(self):
Category.update_category(self.cat.id, 'official')
self.assertEqual(self.cat.category_name, 'joking')
class PhotographerTestClass(TestCase):
def setUp(self):
self.pho = Photographer(names = '<NAME>', email = '<EMAIL>', ig = 'fatmafuaad', phone_number = '0712345678')
# Testing instance
def test_instance(self):
self.assertTrue(isinstance(self.pho, Photographer))
def test_save_photographer(self):
self.pho.save_photographer()
photographers = Photographer.objects.all()
self.assertTrue(len(photographers) > 0)
def test_delete_photographer(self):
self.pho.save_photographer()
Photographer.delete_photographer(self.pho.id)
photographers = Photographer.objects.all()
self.assertEqual(len(photographers), 0)
class ImageTestClass(TestCase):
def setUp(self):
self.loc = Location(location_name = 'Mombasa, Kenya')
self.loc.save_location()
self.cat = Category(category_name = 'official')
self.cat.save_category()
self.pho = Photographer(names = '<NAME>', email = '<EMAIL>', ig = 'fatmafuaad', phone_number = '0712345678')
self.pho.save_photographer()
self.img = Image(image_path = 'fuaad.png', name = 'passport photo', description = 'photo fo passports', location = self.loc, category = self.cat, photographer = self.pho)
def tearDown(self):
Location.objects.all().delete()
Category.objects.all().delete()
Photographer.objects.all().delete()
Image.objects.all().delete()
# Testing instance
def test_instance(self):
self.assertTrue(isinstance(self.img, Image))
def test_save_image(self):
self.img.save_image()
images = Image.objects.all()
self.assertTrue(len(images) > 0)
def test_delete_image(self):
self.img.save_image()
Image.delete_image(self.img.id)
images = Image.objects.all()
self.assertEqual(len(images), 0)
def test_get_image_by_id(self):
self.img.save_image()
image = Image.get_image_by_id(self.img.id)
self.assertEqual(self.img, image)
def test_search_image(self):
self.img.save_image()
image = Image.search_image(self.img.category)
self.assertEqual(image)
def test_filter_by_location(self):
self.img.save_image()
image = Image.filter_by_location(self.img.location)
self.assertEqual(image)
def test_update_image(self):
Image.update_image(self.img.id, 'fatma.png')
self.assertEqual(self.img.image_path, 'fatma.png')
|
Zrachel/ERNIE | ernie/client.py | <reponame>Zrachel/ERNIE
#coding=utf-8
import requests
import json
import time
start_time = time.time()
texta = "now suppose you have such a system"
textb = "such thin"
json_data = {'line':[texta, textb]}
r = requests.post('http://10.255.124.15:8808/prob_paddle', json = json_data)
print(json.loads(r.text)['prob'])
print("--- %s seconds ---" % (time.time() - start_time))
|
Zrachel/ERNIE | ernie/finetune/classifier.py | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model for classifier."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
import time
import logging
import numpy as np
from scipy.stats import pearsonr, spearmanr
from six.moves import xrange
import paddle.fluid as fluid
from model.ernie import ErnieModel
log = logging.getLogger(__name__)
def create_model(args,
pyreader_name,
ernie_config,
is_prediction=False,
task_name="",
is_classify=False,
is_regression=False,
ernie_version="1.0"):
src_ids = fluid.layers.data(name='eval_placeholder_0', shape=[-1, args.max_seq_len, 1], dtype='int64')
sent_ids = fluid.layers.data(name='eval_placeholder_1', shape=[-1, args.max_seq_len, 1], dtype='int64')
pos_ids = fluid.layers.data(name='eval_placeholder_2', shape=[-1, args.max_seq_len, 1], dtype='int64')
input_mask = fluid.layers.data(name='eval_placeholder_3', shape=[-1, args.max_seq_len, 1], dtype='float32')
task_ids = fluid.layers.data(name='eval_placeholder_4', shape=[-1, args.max_seq_len, 1], dtype='int64')
qids = fluid.layers.data(name='eval_placeholder_5', shape=[-1, 1], dtype='int64')
if is_classify:
labels = fluid.layers.data(name='6', shape=[-1, 1], dtype='int64')
elif is_regression:
labels = fluid.layers.data(name='6', shape=[-1, 1], dtype='float32')
pyreader = fluid.io.DataLoader.from_generator(feed_list=[src_ids, sent_ids, pos_ids, task_ids, input_mask, labels, qids],
capacity=70,
iterable=False)
ernie = ErnieModel(
src_ids=src_ids,
position_ids=pos_ids,
sentence_ids=sent_ids,
task_ids=task_ids,
input_mask=input_mask,
config=ernie_config,
use_fp16=args.use_fp16)
print( src_ids.name, pos_ids.name, sent_ids.name, task_ids.name, input_mask.name)
cls_feats = ernie.get_pooled_output()
cls_feats = fluid.layers.dropout(
x=cls_feats,
dropout_prob=0.1,
dropout_implementation="upscale_in_train")
logits = fluid.layers.fc(
input=cls_feats,
size=args.num_labels,
param_attr=fluid.ParamAttr(
name="_cls_out_w",
initializer=fluid.initializer.TruncatedNormal(scale=0.02)),
bias_attr=fluid.ParamAttr(
name="_cls_out_b",
initializer=fluid.initializer.Constant(0.)))
assert is_classify != is_regression, 'is_classify or is_regression must be true and only one of them can be true'
if is_prediction:
if is_classify:
probs = fluid.layers.softmax(logits)
else:
probs = logits
feed_targets_name = [
src_ids.name, sent_ids.name, pos_ids.name, input_mask.name
]
if ernie_version == "2.0":
feed_targets_name += [task_ids.name]
return pyreader, probs, feed_targets_name
num_seqs = fluid.layers.create_tensor(dtype='int64')
if is_classify:
ce_loss, probs = fluid.layers.softmax_with_cross_entropy(
logits=logits, label=labels, return_softmax=True)
loss = fluid.layers.mean(x=ce_loss)
accuracy = fluid.layers.accuracy(
input=probs, label=labels, total=num_seqs)
graph_vars = {
"loss": loss,
"probs": probs,
"accuracy": accuracy,
"labels": labels,
"num_seqs": num_seqs,
"qids": qids
}
elif is_regression:
cost = fluid.layers.square_error_cost(input=logits, label=labels)
loss = fluid.layers.mean(x=cost)
graph_vars = {
"loss": loss,
"probs": logits,
"labels": labels,
"num_seqs": num_seqs,
"qids": qids
}
else:
raise ValueError(
'unsupported fine tune mode. only supported classify/regression')
return pyreader, graph_vars
def evaluate_mrr(preds):
last_qid = None
total_mrr = 0.0
qnum = 0.0
rank = 0.0
correct = False
for qid, score, label in preds:
if qid != last_qid:
rank = 0.0
qnum += 1
correct = False
last_qid = qid
rank += 1
if not correct and label != 0:
total_mrr += 1.0 / rank
correct = True
return total_mrr / qnum
def evaluate_map(preds):
def singe_map(st, en):
total_p = 0.0
correct_num = 0.0
for index in xrange(st, en):
if int(preds[index][2]) != 0:
correct_num += 1
total_p += correct_num / (index - st + 1)
if int(correct_num) == 0:
return 0.0
return total_p / correct_num
last_qid = None
total_map = 0.0
qnum = 0.0
st = 0
for i in xrange(len(preds)):
qid = preds[i][0]
if qid != last_qid:
qnum += 1
if last_qid != None:
total_map += singe_map(st, i)
st = i
last_qid = qid
total_map += singe_map(st, len(preds))
return total_map / qnum
def evaluate_classify(exe,
test_program,
test_pyreader,
graph_vars,
eval_phase,
use_multi_gpu_test=False,
metric='simple_accuracy',
is_classify=False,
is_regression=False):
train_fetch_list = [
graph_vars["loss"].name, graph_vars["accuracy"].name,
graph_vars["num_seqs"].name
]
if eval_phase == "train":
if "learning_rate" in graph_vars:
train_fetch_list.append(graph_vars["learning_rate"].name)
outputs = exe.run(fetch_list=train_fetch_list)
ret = {"loss": np.mean(outputs[0]), "accuracy": np.mean(outputs[1])}
if "learning_rate" in graph_vars:
ret["learning_rate"] = float(outputs[3][0])
return ret
test_pyreader.start()
total_cost, total_acc, total_num_seqs, total_label_pos_num, total_pred_pos_num, total_correct_num = 0.0, 0.0, 0.0, 0.0, 0.0, 0.0
qids, labels, scores, preds = [], [], [], []
time_begin = time.time()
fetch_list = [
graph_vars["loss"].name, graph_vars["accuracy"].name,
graph_vars["probs"].name, graph_vars["labels"].name,
graph_vars["num_seqs"].name, graph_vars["qids"].name
]
while True:
try:
if use_multi_gpu_test:
np_loss, np_acc, np_probs, np_labels, np_num_seqs, np_qids = exe.run(
fetch_list=fetch_list)
else:
np_loss, np_acc, np_probs, np_labels, np_num_seqs, np_qids = exe.run(
program=test_program, fetch_list=fetch_list)
total_cost += np.sum(np_loss * np_num_seqs)
total_acc += np.sum(np_acc * np_num_seqs)
total_num_seqs += np.sum(np_num_seqs)
labels.extend(np_labels.reshape((-1)).tolist())
if np_qids is None:
np_qids = np.array([])
qids.extend(np_qids.reshape(-1).tolist())
scores.extend(np_probs[:, 1].reshape(-1).tolist())
np_preds = np.argmax(np_probs, axis=1).astype(np.float32)
preds.extend(np_preds)
total_label_pos_num += np.sum(np_labels)
total_pred_pos_num += np.sum(np_preds)
total_correct_num += np.sum(np.dot(np_preds, np_labels))
except fluid.core.EOFException:
test_pyreader.reset()
break
time_end = time.time()
cost = total_cost / total_num_seqs
elapsed_time = time_end - time_begin
evaluate_info = ""
if metric == 'acc_and_f1':
ret = acc_and_f1(preds, labels)
evaluate_info = "[%s evaluation] ave loss: %f, ave_acc: %f, f1: %f, data_num: %d, elapsed time: %f s" \
% (eval_phase, cost, ret['acc'], ret['f1'], total_num_seqs, elapsed_time)
elif metric == 'matthews_corrcoef':
ret = matthews_corrcoef(preds, labels)
evaluate_info = "[%s evaluation] ave loss: %f, matthews_corrcoef: %f, data_num: %d, elapsed time: %f s" \
% (eval_phase, cost, ret, total_num_seqs, elapsed_time)
elif metric == 'pearson_and_spearman':
ret = pearson_and_spearman(scores, labels)
evaluate_info = "[%s evaluation] ave loss: %f, pearson:%f, spearman:%f, corr:%f, data_num: %d, elapsed time: %f s" \
% (eval_phase, cost, ret['pearson'], ret['spearman'], ret['corr'], total_num_seqs, elapsed_time)
elif metric == 'simple_accuracy':
ret = simple_accuracy(preds, labels)
evaluate_info = "[%s evaluation] ave loss: %f, acc:%f, data_num: %d, elapsed time: %f s" \
% (eval_phase, cost, ret, total_num_seqs, elapsed_time)
elif metric == "acc_and_f1_and_mrr":
ret_a = acc_and_f1(preds, labels)
preds = sorted(
zip(qids, scores, labels), key=lambda elem: (elem[0], -elem[1]))
ret_b = evaluate_mrr(preds)
evaluate_info = "[%s evaluation] ave loss: %f, acc: %f, f1: %f, mrr: %f, data_num: %d, elapsed time: %f s" \
% (eval_phase, cost, ret_a['acc'], ret_a['f1'], ret_b, total_num_seqs, elapsed_time)
else:
raise ValueError('unsupported metric {}'.format(metric))
return evaluate_info
def evaluate_regression(exe,
test_program,
test_pyreader,
graph_vars,
eval_phase,
use_multi_gpu_test=False,
metric='pearson_and_spearman'):
if eval_phase == "train":
train_fetch_list = [graph_vars["loss"].name]
if "learning_rate" in graph_vars:
train_fetch_list.append(graph_vars["learning_rate"].name)
outputs = exe.run(fetch_list=train_fetch_list)
ret = {"loss": np.mean(outputs[0])}
if "learning_rate" in graph_vars:
ret["learning_rate"] = float(outputs[1][0])
return ret
test_pyreader.start()
total_cost, total_num_seqs = 0.0, 0.0
qids, labels, scores = [], [], []
fetch_list = [
graph_vars["loss"].name, graph_vars["probs"].name,
graph_vars["labels"].name, graph_vars["qids"].name
]
time_begin = time.time()
while True:
try:
if use_multi_gpu_test:
np_loss, np_probs, np_labels, np_qids = exe.run(
fetch_list=fetch_list)
else:
np_loss, np_probs, np_labels, np_qids = exe.run(
program=test_program, fetch_list=fetch_list)
labels.extend(np_labels.reshape((-1)).tolist())
if np_qids is None:
np_qids = np.array([])
qids.extend(np_qids.reshape(-1).tolist())
scores.extend(np_probs.reshape(-1).tolist())
except fluid.core.EOFException:
test_pyreader.reset()
break
time_end = time.time()
elapsed_time = time_end - time_begin
if metric == 'pearson_and_spearman':
ret = pearson_and_spearman(scores, labels)
evaluate_info = "[%s evaluation] ave loss: %f, pearson:%f, spearman:%f, corr:%f, elapsed time: %f s" \
% (eval_phase, 0.0, ret['pearson'], ret['spearmanr'], ret['corr'], elapsed_time)
else:
raise ValueError('unsupported metric {}'.format(metric))
return evaluate_info
def evaluate(exe,
test_program,
test_pyreader,
graph_vars,
eval_phase,
use_multi_gpu_test=False,
metric='simple_accuracy',
is_classify=False,
is_regression=False):
if is_classify:
return evaluate_classify(
exe,
test_program,
test_pyreader,
graph_vars,
eval_phase,
use_multi_gpu_test=use_multi_gpu_test,
metric=metric)
else:
return evaluate_regression(
exe,
test_program,
test_pyreader,
graph_vars,
eval_phase,
use_multi_gpu_test=use_multi_gpu_test,
metric=metric)
def matthews_corrcoef(preds, labels):
preds = np.array(preds)
labels = np.array(labels)
tp = np.sum((labels == 1) & (preds == 1))
tn = np.sum((labels == 0) & (preds == 0))
fp = np.sum((labels == 0) & (preds == 1))
fn = np.sum((labels == 1) & (preds == 0))
mcc = ((tp * tn) - (fp * fn)) / np.sqrt(
(tp + fp) * (tp + fn) * (tn + fp) * (tn + fn))
return mcc
def f1_score(preds, labels):
preds = np.array(preds)
labels = np.array(labels)
tp = np.sum((labels == 1) & (preds == 1))
tn = np.sum((labels == 0) & (preds == 0))
fp = np.sum((labels == 0) & (preds == 1))
fn = np.sum((labels == 1) & (preds == 0))
p = tp / (tp + fp)
r = tp / (tp + fn)
f1 = (2 * p * r) / (p + r + 1e-8)
return f1
def pearson_and_spearman(preds, labels):
preds = np.array(preds)
labels = np.array(labels)
pearson_corr = pearsonr(preds, labels)[0]
spearman_corr = spearmanr(preds, labels)[0]
return {
"pearson": pearson_corr,
"spearmanr": spearman_corr,
"corr": (pearson_corr + spearman_corr) / 2,
}
def acc_and_f1(preds, labels):
preds = np.array(preds)
labels = np.array(labels)
acc = simple_accuracy(preds, labels)
f1 = f1_score(preds, labels)
return {
"acc": acc,
"f1": f1,
"acc_and_f1": (acc + f1) / 2,
}
def simple_accuracy(preds, labels):
preds = np.array(preds)
labels = np.array(labels)
return (preds == labels).mean()
def predict(exe,
test_program,
test_pyreader,
graph_vars,
dev_count=1,
is_classify=False,
is_regression=False):
test_pyreader.start()
qids, scores, probs = [], [], []
preds = []
fetch_list = [graph_vars["probs"].name, graph_vars["qids"].name]
while True:
try:
if dev_count == 1:
np_probs, np_qids = exe.run(program=test_program,
fetch_list=fetch_list)
else:
np_probs, np_qids = exe.run(fetch_list=fetch_list)
if np_qids is None:
np_qids = np.array([])
qids.extend(np_qids.reshape(-1).tolist())
if is_classify:
np_preds = np.argmax(np_probs, axis=1).astype(np.float32)
preds.extend(np_preds)
elif is_regression:
preds.extend(np_probs.reshape(-1))
probs.append(np_probs)
except fluid.core.EOFException:
test_pyreader.reset()
break
probs = np.concatenate(probs, axis=0).reshape([len(preds), -1])
return qids, preds, probs
def make_all_inputs(args):
shapes=[[-1, args.max_seq_len, 1], [-1, args.max_seq_len, 1], [-1, args.max_seq_len, 1],
[-1, args.max_seq_len, 1], [-1, args.max_seq_len, 1]]
dtypes=['int64', 'int64', 'int64', 'float32', 'int64']
lod_levels=[0, 0, 0, 0, 0]
#names = ['read_file_0.tmp_0', 'read_file_0.tmp_1', 'read_file_0.tmp_2', 'read_file_0.tmp_3', 'read_file_0.tmp_4', 'read_file_0.tmp_5']
names = ['eval_placeholder_0', 'eval_placeholder_1', 'eval_placeholder_2', 'eval_placeholder_3', 'eval_placeholder_4']
var_list = []
for i in range(len(names)):
var_list.append(fluid.layers.data(name=names[i], shape=shapes[i], dtype=dtypes[i], lod_level=lod_levels[i]))
return var_list
def create_model_predict(args, ernie_config, is_prediction=False):
(src_ids, sent_ids, pos_ids, input_mask, task_ids) = make_all_inputs(args)
ernie = ErnieModel(
src_ids=src_ids,
position_ids=pos_ids,
sentence_ids=sent_ids,
task_ids=task_ids,
input_mask=input_mask,
config=ernie_config,
use_fp16=args.use_fp16)
cls_feats = ernie.get_pooled_output()
cls_feats = fluid.layers.dropout(
x=cls_feats,
dropout_prob=0.1,
dropout_implementation="upscale_in_train")
logits = fluid.layers.fc(
input=cls_feats,
size=args.num_labels,
param_attr=fluid.ParamAttr(
name="_cls_out_w",
initializer=fluid.initializer.TruncatedNormal(scale=0.02)),
bias_attr=fluid.ParamAttr(
name="_cls_out_b", initializer=fluid.initializer.Constant(0.)))
if is_prediction:
probs = fluid.layers.softmax(logits)
feed_targets_name = [
src_ids.name, pos_ids.name, sent_ids.name, input_mask.name
]
graph_vars = {
"probs": probs,
}
for k, v in graph_vars.items():
v.persistable = True
return probs, graph_vars
return graph_vars
|
Zrachel/ERNIE | ernie/server.py | <reponame>Zrachel/ERNIE
# coding=utf-8
import reader.task_reader as task_reader
from model.ernie import ErnieConfig
from finetune.classifier import create_model_predict, evaluate
from optimization import optimization
from utils.args import print_arguments
from utils.init import init_pretraining_params, init_checkpoint
import paddle.fluid as fluid
#from finetune_args import parser
import datetime, csv
import os, re
import numpy as np
import sys
from datetime import datetime
import pdb
from flask import Flask, request, Response
import json
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello, World!'
place = fluid.CUDAPlace(int(os.getenv('FLAGS_selected_gpus', '0')))
exe = fluid.Executor(place)
startup_prog = fluid.Program()
#args = parser.parse_args()
class args:
ernie_config_path="data/ERNIE_Large_en_stable-2.0.0/ernie_config.json"
use_cuda=True
do_train=False
do_val=False
do_test=True
batch_size=32
test_set="data/chnsenticorp/test.tsv"
vocab_path="data/ERNIE_Large_en_stable-2.0.0/vocab.txt"
init_checkpoint="checkpoints_cls2_LEN40_APPEND5_SEGMENT_force_context/step_12000" # 8808
max_seq_len=256
num_labels=5
use_fp16=False
label_map_config=None
do_lower_case=True
in_tokens=False
random_seed=1
ernie_config = ErnieConfig(args.ernie_config_path)
ernie_config.print_config()
test_prog = fluid.Program()
with fluid.program_guard(test_prog, startup_prog):
with fluid.unique_name.guard():
_, graph_vars = create_model_predict(
args,
ernie_config=ernie_config,
is_prediction=True)
LABELMODE = [str(x) for x in range(args.num_labels - 1)]
test_prog = test_prog.clone(for_test=True)
exe.run(startup_prog)
init_checkpoint(
exe,
args.init_checkpoint,
main_program=startup_prog,
use_fp16=args.use_fp16)
reader = task_reader.ClassifyReader(
vocab_path=args.vocab_path,
label_map_config=args.label_map_config,
max_seq_len=args.max_seq_len,
do_lower_case=args.do_lower_case,
in_tokens=args.in_tokens,
random_seed=args.random_seed)
#print(datetime.datetime.now())
def rm_space(line):
line = re.sub(ur'(?<=[\u4e00-\u9fa5])\s+(?=[\u4e00-\u9fa5])', '', line)
return line
class Example:
def __init__(self, line):
self._fields = ["text_a", "label"]
self.text_a = line
self.label = 1
def __init__(self, texta, textb):
self._fields = ['text_a', 'text_b', 'label']
self.text_a = rm_space(texta)
self.text_b = rm_space(textb)
self.label = 1
class Sample:
def __init__(self, values):
self.names = ['eval_placeholder_0', 'eval_placeholder_1', 'eval_placeholder_2', 'eval_placeholder_4', 'eval_placeholder_3']
self.values = values
#self.values = [[val] for val in values]
def gen(self):
sample = dict(zip(self.names, self.values[0:5]))
return sample
def predict_oneline(line):
texta, textb = line
if textb == None:
example = Example(texta)
else:
dt_string = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
print >> sys.stderr, dt_string
print >> sys.stderr, "texta:", texta
print >> sys.stderr, "textb:", textb
example = Example(texta, textb)
data_gen = reader._prepare_one_sample(example, 1)
sample = Sample(data_gen)
sample = sample.gen()
#print("Final test result:")
fetch_list = [graph_vars["probs"].name]
np_probs = exe.run(program=test_prog, feed = sample, fetch_list=fetch_list, use_program_cache=True)
np_probs = np_probs[0][0]
#if texta.startswith(u'\u6211\u4eec') and len(texta) < 5:
# print(texta.encode('utf8'))
# np_probs[0:-1] = 0
# np_probs[-1] = 1.0
#print(datetime.datetime.now())
#print(np_probs)
print >> sys.stderr, np_probs
dt_string = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
print >> sys.stderr, dt_string
return np_probs
@app.route('/prob_paddle', methods = ['POST'])
def prob():
#print request
#print request.json
line = request.json['line']
probability = predict_oneline(line)
res = {'prob': probability.tolist()}
return Response(json.dumps(res), mimetype = 'application/json')
class FLAGS:
max_wait_words = 40
waitn = 3
textb_none = True
def get_periods_position(sent):
sent = sent.strip().lower().decode('utf8')
sent = re.sub(r", |\"|- |:","",sent)
sent = re.sub("ph.d.", "phd", sent)
#tknzr = TweetTokenizer()
def find_periods_position(sent):
# Tokenize
#words = tknzr.tokenize(sent)
words = sent.split()
periods_pos = []
rm_periods = [words[0]]
for i in range(1, len(words)):
if words[i] == "|":
periods_pos.append(i - len(periods_pos))
else:
rm_periods.append(words[i])
return periods_pos, rm_periods
periods_pos, rm_periods = find_periods_position(sent)
#print("input:")
#print " ".join(rm_periods)
return periods_pos, " ".join(rm_periods)
def sentence_prediction(sent, labelmode):
def gen_sent(wordlist):
return "".join(wordlist)
line = wordlist[0]
for i in range(1, len(wordlist)):
if wordlist[i][0] >= 'a' and wordlist[i][0] <= 'z' or \
wordlist[i][0] >= '0' and wordlist[i][0] <= '9':
line += " " + wordlist[i]
else:
line += wordlist[i]
return "".join(line)
words = sent.split()
endid = 1
output = [words[0]]
prob_break = [[] for _ in range(len(words))]# prob_break[t]: prob of break before t
last_break_pos = 0
cur_sent = [words[0]]
nsent = 0
while endid < len(words):
startid = max(endid - FLAGS.max_wait_words, last_break_pos)
if FLAGS.textb_none == False:
texta = "".join(words[startid : endid])
textb = "".join(words[endid : min(endid+FLAGS.waitn, len(words))])
else:
texta = gen_sent(words[startid : endid + FLAGS.waitn])
textb = None
prob = predict_oneline(texta.encode('utf8'), textb.encode('utf8'))
for i in range(FLAGS.waitn + 1):
if endid - i + FLAGS.waitn > 0 and endid - i + FLAGS.waitn < len(words):
prob_break[endid + FLAGS.waitn - i].append(prob[i])
detect_pos = endid
if detect_pos > 0 and np.mean(prob_break[detect_pos]) > 0.4:
break_symbol = "|"
output.append(break_symbol)
cur_sent.append(break_symbol)
last_break_pos = detect_pos
print >> f_finalout, " ".join([w.encode('utf8') for w in cur_sent])
nsent += 1
cur_sent = []
cur_sent.append(words[detect_pos])
output.append(words[detect_pos])
for i in range(FLAGS.waitn + 1):
if detect_pos + i < len(words):
prob_break[detect_pos + i] = []
else:
output.append(words[detect_pos])
cur_sent.append(words[detect_pos])
if len(cur_sent) > FLAGS.max_wait_words:
maxpos = -1
maxvalue = -1
for i in range(FLAGS.max_wait_words):
if np.mean(prob_break[detect_pos - i]) > maxvalue:
maxpos = detect_pos - i
maxvalue = np.mean(prob_break[detect_pos - i])
cur_sent = cur_sent[:maxpos - (detect_pos - FLAGS.max_wait_words)]
output = output[:maxpos - (detect_pos - FLAGS.max_wait_words) + last_break_pos + nsent ] + ["|"]
# NOTE: no break_symbol added here
print >> f_finalout, " ".join([w.encode('utf8') for w in cur_sent] + ["|"])
nsent += 1
endid = maxpos
last_break_pos = maxpos
cur_sent = []
cur_sent.append(words[endid])
output.append(words[endid])
for i in range(FLAGS.waitn + 1):
if detect_pos + i < len(words):
prob_break[endid + i] = []
endid += 1
if last_break_pos + 2 < len(words):
output.append("|")
cur_sent.append("|")
if len(cur_sent) > 0:
print >> f_finalout, " ".join([w.encode('utf8') for w in cur_sent])
nsent += 1
print("prediction:")
print(" ".join(output).encode('utf8'))
print >> f_nsent, str(nsent)
return " ".join(output).encode('utf8')
def test_dataset(filename): # calculate p/r of a dataset
ngram = 5
def fscore(p,r):
return 2*p*r/(p+r)
n_correct_prediction = 0
n_predict_pos = 0
n_gt_pos = 0
lineid = 0
onesent = ""
latency = []
with open(filename) as f:
for line in f:
line = line.strip()
if len(line) > 0:
onesent += line + " | "
onesent = onesent.strip()
gt_pos, sent = get_periods_position(onesent)
predict_sent = sentence_prediction(sent, LABELMODE)
predict_pos, _ = get_periods_position(predict_sent)
for pos in gt_pos:
if pos in predict_pos:
n_correct_prediction += 1
n_predict_pos += len(predict_pos)
n_gt_pos += len(gt_pos)
onesent = ""
precision = n_correct_prediction * 1.0 / n_predict_pos
recall = n_correct_prediction * 1.0 / n_gt_pos
if precision * recall == 0:
f = 0
else:
f = 2 * precision * recall / (precision + recall)
print("------------------")
print("Precision:%.2f\tRecall:%.2f\tF-score:%.2f" % (precision, recall, f))
for i in range(1, len(predict_pos)):
latency.append(predict_pos[i] - predict_pos[i-1])
if len(latency) > 0:
avglatency = sum(latency) * 1.0 / len(latency)
print("Latency: Avg:%.2f\tMax: %d" % (avglatency, max(latency)))
print("------------------\n")
precision = n_correct_prediction * 1.0 / n_predict_pos
recall = n_correct_prediction * 1.0 / n_gt_pos
print("Precision\tRecall\tF-score")
print("%.2f\t%.2f\t%.2f" % (precision, recall, f))
f_finalout = open(os.path.join(args.init_checkpoint, "final.out"), "a")
f_nsent= open(os.path.join(args.init_checkpoint, "final.nsent"), "a")
if __name__ == "__main__":
#test_dataset('data/source/test.zh')
app.run(host = '10.255.124.15', port = 8811)
f_finalout.close()
f_nsent.close()
#predict_oneline("在下载完模型和任务数据后运行")
|
Zrachel/ERNIE | ernie/predict_duanSegment.py | # coding=utf-8
import reader.task_reader as task_reader
from model.ernie import ErnieConfig
from finetune.classifier import create_model_predict
from finetune.classifier import evaluate
from optimization import optimization
from utils.args import print_arguments
from utils.init import init_pretraining_params
from utils.init import init_checkpoint
import paddle.fluid as fluid
from finetune_args import parser
import datetime
import csv
import os
import re
import sys
import numpy as np
import pdb
place = fluid.CUDAPlace(int(os.getenv('FLAGS_selected_gpus', '0')))
exe = fluid.Executor(place)
startup_prog = fluid.Program()
args = parser.parse_args()
ernie_config = ErnieConfig(args.ernie_config_path)
ernie_config.print_config()
test_prog = fluid.Program()
with fluid.program_guard(test_prog, startup_prog):
with fluid.unique_name.guard():
_, graph_vars = create_model_predict(
args,
ernie_config=ernie_config,
is_prediction=True)
LABELMODE = ["0", '1', '2', '3', '4'] # the last class denotes not to duanju
SYMBOLS = [',', '.', '?', '|']
test_prog = test_prog.clone(for_test=True)
exe.run(startup_prog)
init_checkpoint(
exe,
args.init_checkpoint,
main_program=startup_prog,
use_fp16=args.use_fp16)
reader = task_reader.ClassifyReader(
vocab_path=args.vocab_path,
label_map_config=args.label_map_config,
max_seq_len=args.max_seq_len,
do_lower_case=args.do_lower_case,
in_tokens=args.in_tokens,
random_seed=args.random_seed)
print(datetime.datetime.now())
class Example(object):
"""THIS IS A DOCSTRING"""
def __init__(self, texta, textb=None):
"""THIS IS A DOCSTRING"""
if textb is None:
self._fields = ["text_a", "label"]
else:
self._fields = ['text_a', 'text_b', 'label']
self.text_b = textb
self.text_a = texta
self.label = 1
class Sample(object):
"""THIS IS A DOCSTRING"""
def __init__(self, values):
"""THIS IS A DOCSTRING"""
#self.names = ['read_file_0.tmp_0', 'read_file_0.tmp_1', 'read_file_0.tmp_2', 'read_file_0.tmp_3']
self.names = ['eval_placeholder_0', 'eval_placeholder_1', 'eval_placeholder_2', 'eval_placeholder_4', 'eval_placeholder_3']
self.values = values
#self.values = [[val] for val in values]
def gen(self):
"""THIS IS A DOCSTRING"""
sample = dict(zip(self.names, self.values[0:5]))
return sample
def predict_oneline(line):
"""THIS IS A DOCSTRING"""
texta, textb = line
if textb is None:
example = Example(texta)
else:
example = Example(texta, textb)
data_gen = reader._prepare_one_sample(example, 1)
sample = Sample(data_gen)
sample = sample.gen()
#print("Final test result:")
fetch_list = [graph_vars["probs"].name]
np_probs = exe.run(program=test_prog, feed=sample, fetch_list=fetch_list, use_program_cache=True)
np_probs = np_probs[0][0]
#print(datetime.datetime.now())
#print(np_probs)
return np_probs
class FLAGS(object):
"""THIS IS A DOCSTRING"""
max_wait_words = 20
#waitn = 3
#appendn = 0
#textb_none = True
waitn = 0
appendn = 3
textb_none = False
def get_periods_position(sent):
"""THIS IS A DOCSTRING"""
sent = sent.strip().lower()
sent = re.sub(r"\"|- |:", "", sent)
sent = re.sub("ph.d.", "phd", sent)
#tknzr = TweetTokenizer()
def find_periods_position(sent):
"""THIS IS A DOCSTRING"""
# Tokenize
#words = tknzr.tokenize(sent)
words = sent.split()
periods_pos = []
symbols_type = []
rm_periods = [words[0]]
for i in range(1, len(words)):
if words[i] in [x for x in SYMBOLS]:
periods_pos.append(i - len(periods_pos))
symbols_type.append(words[i])
else:
rm_periods.append(words[i])
return periods_pos, symbols_type, rm_periods
periods_pos, symbols_type, rm_periods = find_periods_position(sent)
#print("input:")
#print " ".join(rm_periods)
return periods_pos, symbols_type, " ".join(rm_periods)
def sentence_prediction(sent, labelmode):
"""THIS IS A DOCSTRING"""
def gen_sent(wordlist):
"""THIS IS A DOCSTRING"""
return "".join(wordlist)
line = wordlist[0]
for i in range(1, len(wordlist)):
if wordlist[i][0] >= 'a' and wordlist[i][0] <= 'z' or \
wordlist[i][0] >= '0' and wordlist[i][0] <= '9':
line += " " + wordlist[i]
else:
line += wordlist[i]
return "".join(line)
words = sent.split()
def rm_space(line):
"""THIS IS A DOCSTRING"""
line = re.sub(ur'(?<=[\u4e00-\u9fa5])\s+(?=[\u4e00-\u9fa5])', '', line)
return line
endid = 1
output = [words[0]]
prob_break = [[] for _ in range(len(words))]# prob_break[t]: prob of break before t
max_symbol = [","] * len(words)
last_break_pos = 0
cur_sent = [words[0]]
nsent = 0
history = ""
while endid < len(words):
startid = max(endid - FLAGS.max_wait_words, last_break_pos)
if FLAGS.textb_none == False:
texta = history + rm_space(" ".join(words[startid: endid]))
textb = rm_space(" ".join(words[endid: min(endid + FLAGS.appendn, len(words))]))
else:
texta = history + gen_sent(words[startid: endid + FLAGS.appendn])
textb = None
prob = predict_oneline([texta.encode('utf8'), textb.encode('utf8')])
prob_break[endid] = 1 - prob[-1]
sys.stderr.write("TA: " + texta.encode('utf8'))
if FLAGS.textb_none == False:
sys.stderr.write("\tTB: " + textb.encode('utf8'))
#sys.stderr.write("\t".join([str(x) for x in [len(history), len(texta)]]))
sys.stderr.write("\t[" + " ".join([str(x) for x in prob]) + "]")
#sys.stderr.write("\t" + str(prob_break[endid]))
sys.stderr.write("\n")
#if len(texta.split()) > 50:
# prob[1] *= 5
# prob[2] *= 2
max_symbol[endid] = SYMBOLS[prob[0:-1].argmax()]
detect_pos = endid
#if endid > startid and prob_break[detect_pos] > 0.6:
# break_symbol = max_symbol[detect_pos]
# if break_symbol in [",", "|"]:
# history = texta
# if break_symbol == ",":
# history += ","
# else:
# history = ""
THRES_LEN = 25
if endid - startid > 0 and prob_break[detect_pos] > 0.6:
break_symbol = max_symbol[detect_pos]
if break_symbol in [",", "|"] or break_symbol == '.' and len(texta.split()) < THRES_LEN:
history = texta + " "
if break_symbol in [",", '.']:
history += ", "
else:
#if break_symbol in ['?', '.']:
if break_symbol == '?':
history = ""
elif len(texta.split()) >= THRES_LEN:
history = texta + " "
while len(history.split()) >= THRES_LEN:
first_douhao = history.find(",")
if first_douhao == -1:
break
history = history[first_douhao + 1:]
if break_symbol in [",", '.']:
history += ", "
output.append(break_symbol)
cur_sent.append(break_symbol)
last_break_pos = detect_pos
print >> f_finalout, " ".join([w.encode('utf8') for w in cur_sent])
nsent += 1
cur_sent = []
if detect_pos < len(words):
prob_break[detect_pos] = 0
cur_sent.append(words[detect_pos])
output.append(words[detect_pos])
else:
output.append(words[detect_pos])
cur_sent.append(words[detect_pos])
if len(cur_sent) > FLAGS.max_wait_words:
maxpos = -1
maxvalue = -1
for i in range(FLAGS.max_wait_words):
if np.mean(prob_break[detect_pos - i]) > maxvalue:
maxpos = detect_pos - i
maxvalue = np.mean(prob_break[detect_pos - i])
break_symbol = max_symbol[maxpos]
if break_symbol in [",", "|"] or break_symbol == '.' and len(texta.split()) < THRES_LEN:
zishu = 0
while zishu != endid - maxpos:
if texta[-1] == " ":
texta = texta[:-1]
continue
while len(texta) and (texta[-1] != " "):
texta = texta[:-1]
zishu += 1
texta = texta[:-1]
history = texta + " "
#history = texta[:len(texta) - (endid - maxpos)]
if break_symbol in [",", '.']:
history += ", "
else:
history = ""
cur_sent = cur_sent[:maxpos - startid]
output = output[:maxpos - startid + last_break_pos + nsent] + [break_symbol]
print >> f_finalout, " ".join([w.encode('utf8') for w in cur_sent + [break_symbol]])
nsent += 1
endid = maxpos
last_break_pos = maxpos
cur_sent = []
if endid < len(words):
prob_break[endid] = 0
cur_sent.append(words[endid])
output.append(words[endid])
endid += 1
if last_break_pos + 2 < len(words):
output.append("。")
cur_sent.append("。")
if len(cur_sent) > 0:
print >> f_finalout, " ".join(cur_sent)
nsent += 1
print("prediction:")
print(" ".join(output))
print >> f_nsent, str(nsent)
return " ".join(output)
def test_dataset(filename): # calculate p/r of a dataset
"""THIS IS A DOCSTRING"""
ngram = 5
def fscore(p, r):
"""THIS IS A DOCSTRING"""
return 2 * p * r / (p + r)
n_correct_duanAll = 0
n_correct_duanBiaodian = 0
n_correct_classifyAll = 0
n_correct_classifyBiaodian = 0
n_correct_classifySegment = 0
n_predict_pos = 0
n_gt_pos = 0
n_predict_biaodian = 0
n_gt_biaodian = 0
n_predict_segment = 0
n_gt_segment = 0
lineid = 0
latency = []
onesent = ""
with open(filename) as f:
for line in f:
line = line.strip()
if line != "":
#onesent = line
onesent += line + " "
else:
onesent = onesent.strip()
gt_pos, gt_symbols, sent = get_periods_position(onesent)
predict_sent = sentence_prediction(sent, LABELMODE)
predict_pos, predict_symbols, _ = get_periods_position(predict_sent)
for i in range(len(gt_pos)):
try:
# if the i-th segment in gt_pos can be found in predict_pos
j = predict_pos.index(gt_pos[i])
n_correct_duanAll += 1
if gt_symbols[i] != "|" and predict_symbols[j] != "|": # both predict and gt is biaodian
n_correct_duanBiaodian += 1
if gt_symbols[i] == predict_symbols[j]:
n_correct_classifyAll += 1
if gt_symbols[i] != "|": # belongs to biaodian
n_correct_classifyBiaodian += 1
else:
n_correct_classifySegment += 1
except: # index j not find
pass
n_predict_pos += len(predict_pos)
n_gt_pos += len(gt_pos)
n_predict_biaodian += sum([x != "|" for x in predict_symbols])
n_gt_biaodian += sum([x != "|" for x in gt_symbols])
n_predict_segment += sum([x == "|" for x in predict_symbols])
n_gt_segment += sum([x == "|" for x in gt_symbols])
onesent = ""
print("---------Duan All---------")
print("right:%d\tall_out:%d\tall_ans:%s" \
% (n_correct_duanAll, n_predict_pos, n_gt_pos))
precision = n_correct_duanAll * 1.0 / n_predict_pos
recall = n_correct_duanAll * 1.0 / n_gt_pos
if precision * recall == 0:
f = 0
else:
f = 2 * precision * recall / (precision + recall)
print("Precision:%.4f\tRecall:%.4f\tF-score:%.4f" % (precision, recall, f))
print("---------Duan Biaodian---------")
print("right:%d\tall_out:%d\tall_ans:%s" \
% (n_correct_duanBiaodian, n_predict_biaodian, n_gt_biaodian))
precision = n_correct_duanBiaodian * 1.0 / n_predict_biaodian
recall = n_correct_duanBiaodian * 1.0 / n_gt_biaodian
if precision * recall == 0:
f = 0
else:
f = 2 * precision * recall / (precision + recall)
print("Precision:%.4f\tRecall:%.4f\tF-score:%.4f" % (precision, recall, f))
print("---------Classify All---------")
precision = n_correct_classifyAll * 1.0 / n_predict_pos
recall = n_correct_classifyAll * 1.0 / n_gt_pos
if precision * recall == 0:
f = 0
else:
f = 2 * precision * recall / (precision + recall)
print("Precision:%.4f\tRecall:%.4f\tF-score:%.4f" % (precision, recall, f))
print("---------Classify Biaodian---------")
print("right:%d\tall_out:%d\tall_ans:%s" % \
(n_correct_classifyBiaodian, n_predict_biaodian, n_gt_biaodian))
precision = n_correct_classifyBiaodian * 1.0 / n_predict_biaodian
recall = n_correct_classifyBiaodian * 1.0 / n_gt_biaodian
if precision * recall == 0:
f = 0
else:
f = 2 * precision * recall / (precision + recall)
print("Precision:%.4f\tRecall:%.4f\tF-score:%.4f" % (precision, recall, f))
print("---------Classify Segment---------")
print("right:%d\tall_out:%d\tall_ans:%s" % \
(n_correct_classifySegment, n_predict_segment, n_gt_segment))
precision = n_correct_classifySegment * 1.0 / n_predict_segment
recall = n_correct_classifySegment * 1.0 / n_gt_segment
if precision * recall == 0:
f = 0
else:
f = 2 * precision * recall / (precision + recall)
print("Precision:%.4f\tRecall:%.4f\tF-score:%.4f" % (precision, recall, f))
for i in range(1, len(predict_pos)):
latency.append(predict_pos[i] - predict_pos[i - 1])
if len(latency) > 0:
avglatency = sum(latency) * 1.0 / len(latency)
print("Latency: Avg:%.2f\tMax: %d" % (avglatency, max(latency)))
print("------------------\n")
onesent = ""
precision = n_correct_classifySegment * 1.0 / n_predict_pos
recall = n_correct_classifySegment * 1.0 / n_gt_pos
print("Precision\tRecall\tF-score")
print("%.4f\t%.4f\t%.4f" % (precision, recall, f))
f_finalout = open(os.path.join(args.init_checkpoint, "final.out"), "w")
f_nsent = open(os.path.join(args.init_checkpoint, "final.nsent"), "w")
if __name__ == "__main__":
test_dataset('data/testset/GTC2019/src.transcript.tok')
f_finalout.close()
f_nsent.close()
#predict_oneline(["I have", "a dream"])
|
Zrachel/ERNIE | ernie/script/plot_fscore_duanall.py | <gh_stars>0
import matplotlib.pyplot as plt
import os
import sys
import pdb
def plot_all(dir, id, fileout):
x = []
y = []
stepstart = 20000
for i in range(1000000):
step = stepstart + 2000 * i
#print step
filename=dir + "/" + "step_" + str(step)
if os.path.exists(filename):
with open(filename, 'r') as fin:
res = [line.strip() for line in fin.readlines()]
x.append(step)
y.append(float(res[id]))
else:
continue
#pdb.set_trace()
plt.plot(x, y, 'go-', linewidth=2)
plt.savefig(fileout)
plt.close()
#dir="checkpoints_5cls_segment_withRegen_translation"
dir="checkpoints_cls2_LEN40_APPEND5_SEGMENT_force_context"
plot_all("log/"+dir, 1, "duanall.png")
plot_all("log/"+dir, 2, "clsall.png")
plot_all("log/"+dir, 3, "clsbiaodian.png")
|
eL1x/timetable-optimizer | src/Classwork.py |
class Classwork:
def __init__(self, classwork_name, random_day, random_hour, id, num_of_students=0):
self._classwork_name = classwork_name
self._day = random_day
self._hour = random_hour
self._id = id
self._num_of_students = num_of_students
@property
def classwork_name(self):
return self._classwork_name
@property
def day(self):
return self._day
@property
def hour(self):
return self._hour
@property
def id(self):
return self._id
@property
def num_of_students(self):
return self._num_of_students
@hour.setter
def hour(self, value):
self._hour = value
@num_of_students.setter
def num_of_students(self, value):
self._num_of_students = value |
eL1x/timetable-optimizer | src/TimetablePlotter.py | <filename>src/TimetablePlotter.py<gh_stars>0
import matplotlib.pyplot as plt
from src.configuration import *
days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday']
colors = ['pink', 'lightgreen', 'lightblue', 'wheat', 'salmon']
day_label = 'Timetable'
fig_width = 10
fig_height = 6
def convert_hour(hour):
return STARTING_HOUR + hour * DIFFERENCE_BETWEEN_STARTING_CLASSES_IN_HOURS
def convert_hour_to_text(hour):
return (STARTING_HOUR + hour * DIFFERENCE_BETWEEN_STARTING_CLASSES // MINUTES_IN_HOUR,
hour * DIFFERENCE_BETWEEN_STARTING_CLASSES % MINUTES_IN_HOUR)
def plot_timetable(student):
fig = plt.figure(figsize=(fig_width, fig_height))
for classwork in student.timetable:
classwork_name = classwork.classwork_name
day = classwork.day + 0.52
start_hour = convert_hour(classwork.hour)
end_hour = start_hour + DURATION_OF_ONE_CLASSWORK_IN_HOURS
plt.fill_between([day, day + 0.96], [start_hour, start_hour], [end_hour, end_hour],
color=colors[int(day)], edgecolor='k', linewidth=0.5)
timetable_hour = convert_hour_to_text(classwork.hour)
plt.text(day + 0.02, start_hour + 0.05, '{0}:{1:0>2}'.format(int(timetable_hour[0]), int(timetable_hour[1])),
va='top', fontsize=7)
plt.text(day + 0.48, (start_hour + end_hour) * 0.5, classwork_name, ha='center', va='center', fontsize=11)
ax = fig.add_subplot(111)
ax.yaxis.grid()
ax.set_xlim(0.5, len(days) + 0.5)
ax.set_ylim(20, 8)
ax.set_xticks(range(1, len(days) + 1))
ax.set_xticklabels(days)
ax.set_ylabel('Time')
ax2 = ax.twiny().twinx()
ax2.set_xlim(ax.get_xlim())
ax2.set_ylim(ax.get_ylim())
ax2.set_xticks(ax.get_xticks())
ax2.set_xticklabels(days)
ax2.set_ylabel('Time')
plt.title(day_label, y=1.07)
plt.show()
|
eL1x/timetable-optimizer | src/Student.py | import random
import src.configuration as conf
class Student:
def __init__(self, id, first_name='Jan', last_name='Kowalski'):
self.id = id
self.first_name = first_name
self.last_name = last_name
self.timetable = []
self.fitness = 0
def generate_random_timetable(self, subjects):
for subject in subjects:
while True:
random_term = random.choice(subject.terms)
if random_term.num_of_students < conf.MAX_NUM_OF_STUDENTS:
break
self.timetable.append(random_term)
random_term.num_of_students += 1
def calculate_student_fitness(self):
fitness = 0
for day in conf.DAYS_SPACE:
day_classwork = self.get_classwork_from_one_day(day)
fitness += self.calculate_day_overlapping(day_classwork)
not_free_days_penalty = self.penalty_for_not_free_days()
self.fitness = fitness + not_free_days_penalty
return fitness + not_free_days_penalty
def calculate_not_free_days(self):
not_free = set()
for classwork in self.timetable:
not_free.add(classwork.day)
return len(not_free)
def penalty_for_not_free_days(self):
not_free_days = self.calculate_not_free_days()
return max(not_free_days - conf.NOT_FREE_DAYS_ALLOWED, 0) * conf.PENALTY_FOR_NOT_FREE_DAYS
def get_classwork_from_one_day(self, day):
classwork_day = [classwork for classwork in self.timetable if classwork.day == day]
return sorted(classwork_day, key=lambda x: x.hour)
def calculate_day_overlapping(self, day_classwork):
overlapped_in_minutes = []
for index in range(len(day_classwork)):
self.calculate_difference_for_one_classwork(day_classwork, overlapped_in_minutes, index)
return sum(overlapped_in_minutes)
def calculate_difference_for_one_classwork(self, day_classwork, overlapped_in_minutes, classwork_index):
for index in range(classwork_index, len(day_classwork)):
if index != classwork_index:
slot_difference = abs(day_classwork[index].hour - day_classwork[classwork_index].hour)
if slot_difference < conf.TIME_SLOTS:
overlapped_in_minutes.append((conf.SLOTS_DURING_ONE_CLASSWORK - slot_difference) *
conf.DIFFERENCE_BETWEEN_STARTING_CLASSES)
|
eL1x/timetable-optimizer | src/BeeAlgorithm.py | <gh_stars>0
import sys
import os.path
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import random
from copy import deepcopy
import src.configuration as conf
from src.Bee import Bee
from src.Classwork import Classwork
class BeeAlgorithm:
def __init__(self, num_of_bees, num_of_sites, num_of_elite_sites, patch_size, num_of_elite_bees, num_of_other_bees, max_gens):
self.num_of_bees = num_of_bees
self.num_of_sites = num_of_sites
self.num_of_elite_sites = num_of_elite_sites
self.patch_size = patch_size
self.num_of_elite_bees = num_of_elite_bees
self.num_of_other_bees = num_of_other_bees
self.max_gens = max_gens
self.population = []
self.fitness = []
self.training = []
self.returned_best = None
print('num of students')
print(conf.NUM_OF_STUDENTS)
def search(self):
best = None
self.population = self.generate_population()
for gen in range(self.max_gens):
if best and best.fitness == 0:
break
print('Gen : ' + str(gen))
best = self.choose_best_solution(best)
self.update_population()
self.fitness.append(best.fitness)
print("Num of students: ")
for index, subject in enumerate(best.subjects):
print("Zajęcia #", index)
for term in subject.terms:
print(term.num_of_students)
self.returned_best = best
return best
def generate_population(self):
return [Bee(conf.NUM_OF_STUDENTS, conf.NAMES_OF_SUBJECTS) for _ in range(self.num_of_bees)]
def choose_best_solution(self, best):
for bee in self.population:
bee.calculate_bee_fitness()
self.population = sorted(self.population, key=lambda x: x.fitness)
self.training.append(self.population[0].fitness)
if not best or self.population[0].fitness < best.fitness:
best = deepcopy(self.population[0])
print(best.fitness)
return best
def generate_next_gen(self):
next_gen = []
for index, bee in enumerate(self.population[:self.num_of_sites]):
neigh_size = self.num_of_elite_bees if index < self.num_of_elite_sites else self.num_of_other_bees
next_gen.append(self.search_neigh(bee, neigh_size, self.patch_size))
return next_gen
def update_population(self):
next_gen = self.generate_next_gen()
scouts = self.create_scout_bees(self.num_of_bees - self.num_of_sites)
self.population = next_gen + scouts
self.patch_size = self.patch_size * conf.PATCH_SIZE_DECREASE_FACTOR
def search_neigh(self, parent, neigh_size, patch_size):
neigh = []
for _ in range(neigh_size):
neigh.append(self.create_neigh_bee(parent, patch_size))
for bee in neigh:
bee.calculate_bee_fitness()
return sorted(neigh, key=lambda x: x.fitness)[0]
def create_neigh_bee(self, parent, patch_size):
new_bee = deepcopy(parent)
for subject_index, subject in enumerate(parent.subjects):
new_bee = self.change_term_hour(new_bee, subject, patch_size, subject_index)
return new_bee
def create_scout_bees(self, num_of_scouts):
scouts_population = []
for _ in range(num_of_scouts):
scouts_population.append(Bee(conf.NUM_OF_STUDENTS, conf.NAMES_OF_SUBJECTS))
return scouts_population
def change_term_hour(self, bee, subject, patch_size, subject_index):
for term_index, term in enumerate(subject.terms):
new_hour = self.choose_new_hour(term, patch_size)
bee.subjects[subject_index].terms[term_index] = Classwork(term.classwork_name, term.day, new_hour, term.id, term.num_of_students)
bee = self.update_students(bee, term, new_hour)
return bee
def choose_new_hour(self, term, patch_size):
random_number = random.random()
new_hour = round(term.hour + random_number * patch_size if random_number < 0.5 else term.hour - random_number * patch_size)
new_hour = min(new_hour, conf.HOURS_SPACE[-1])
new_hour = max(new_hour, conf.HOURS_SPACE[0])
return new_hour
def update_students(self, bee, term, new_hour):
for student in bee.students:
self.update_student_classwork(student, term, new_hour)
return bee
def update_student_classwork(self, student, term, new_hour):
for classwork in student.timetable:
if classwork.classwork_name == term.classwork_name and classwork.id == term.id:
classwork.hour = new_hour
def get_fitness(self):
return self.fitness
def get_training_process(self):
return self.training |
eL1x/timetable-optimizer | tests/test_Student.py | <filename>tests/test_Student.py<gh_stars>0
import sys
import os.path
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import pytest
from src.Student import Student
from src.Subject import Subject
from src.Classwork import Classwork
sub = Subject('First', 1)
@pytest.fixture
def classworks_with_overlapping():
classworks = [Classwork('Test', random_day=1, random_hour=hour, id=hour) for hour in [5, 7, 20]]
return classworks
@pytest.fixture
def classworks_without_overlapping():
classworks = [Classwork('Test', random_day=1, random_hour=hour, id=hour) for hour in [5, 15, 30]]
return classworks
@pytest.fixture
def multiple_classworks_with_overlapping():
classworks = [Classwork('Test', random_day=day, random_hour=hour, id=hour) for day in range(5) for hour in [5, 7, 20]]
return classworks
@pytest.fixture
def two_classes_starts_the_same_time():
first_classwork = Classwork('Test', random_day=1, random_hour=10, id=3)
second_classword = Classwork('Test', random_day=1, random_hour=10, id=4)
classworks = [first_classwork, second_classword]
return classworks
def test_calculate_day_overlapping_with_no_overlapping(classworks_without_overlapping):
no_overlapping_day_classwork = classworks_without_overlapping
temp = Student(id=1)
assert temp.calculate_day_overlapping(no_overlapping_day_classwork) == 0
def test_calculate_day_overlapping_with_overlapping(classworks_with_overlapping):
overlapping_day_classwork = classworks_with_overlapping
temp = Student(id=1)
assert temp.calculate_day_overlapping(overlapping_day_classwork) == 60
def test_calculate_student_fitness_with_overlapping(classworks_with_overlapping):
temp = Student(id=1)
temp.timetable = classworks_with_overlapping
assert temp.calculate_student_fitness() == 60
def test_calculate_student_fitness_with_multiple_overlapping(multiple_classworks_with_overlapping):
temp = Student(id=1)
temp.timetable = multiple_classworks_with_overlapping
assert temp.calculate_student_fitness() == 300
def test_with_two_classes_starts_the_same_time():
temp = Student(id=1)
first_classwork = Classwork('Test', random_day=1, random_hour=18, id=3)
second_classwork = Classwork('Test', random_day=1, random_hour=18, id=4)
classworks = [first_classwork, second_classwork]
temp.timetable = classworks
assert temp.calculate_student_fitness() == 90
|
eL1x/timetable-optimizer | src/configuration.py | DURATION_OF_ONE_CLASSWORK = 90
DURATION_OF_ONE_CLASSWORK_IN_HOURS = 1.5
DIFFERENCE_BETWEEN_STARTING_CLASSES = 15
SLOTS_DURING_ONE_CLASSWORK = 6
NUM_OF_STUDENTS = 50
NUM_OF_BEES = 200
NUM_OF_ELITE_BEES = 100
NUM_OF_OTHER_BEES = 50
NUM_OF_SITES = 50
NUM_OF_ELITE_SITES = 100
PATCH_SIZE = 2
MAX_GENS = 300
MAX_NUM_OF_STUDENTS = 20
DIFFERENCE_BETWEEN_STARTING_CLASSES_IN_HOURS = 0.25
STARTING_HOUR = 8
PATCH_SIZE_DECREASE_FACTOR = 1
NAMES_OF_SUBJECTS = ['Sieci komputerowe', 'Systemy pomiarowe', 'Teoria sterowania', 'Badania operacyjne',
'Technika mikroprocesorowa', 'Podstawy robotyki', 'Teoria optymalizacji']
DAYS_SPACE = range(5)
HOURS_SPACE = range(45)
TIME_SLOTS = DURATION_OF_ONE_CLASSWORK // DIFFERENCE_BETWEEN_STARTING_CLASSES
MINUTES_IN_HOUR = 60
SPARE_TERMS = 2
PENALTY_FOR_NOT_FREE_DAYS = 5
NOT_FREE_DAYS_ALLOWED = 4
ALLOWED_FITNESS = 50
ADDITIONAL_STUDENTS = 3
NUM_OF_TERMS = 5 |
eL1x/timetable-optimizer | src/main.py | import sys
import os.path
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import src.configuration as conf
import pickle
from src.BeeAlgorithm import BeeAlgorithm
names = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J']
for num_of_students in [60, 80, 100]:
for index in [5, 6, 7]:
conf.NAMES_OF_SUBJECTS = names[:index]
conf.NUM_OF_STUDENTS = num_of_students
bee_algorithm = BeeAlgorithm(conf.NUM_OF_BEES, conf.NUM_OF_SITES, conf.NUM_OF_ELITE_SITES, conf.PATCH_SIZE,
conf.NUM_OF_ELITE_BEES, conf.NUM_OF_OTHER_BEES, conf.MAX_GENS)
bee_algorithm.search()
with open('{}_{}'.format(num_of_students, index), 'wb') as f:
pickle.dump(bee_algorithm, f, pickle.HIGHEST_PROTOCOL)
|
eL1x/timetable-optimizer | src/bees_algorithm.py | from src.Bee import Bee
class Population:
def __init__(self, num_of_bees, num_of_students, names_of_subjects):
self.num_of_bees = num_of_bees
self.bees = [Bee(num_of_students, names_of_subjects) for _ in range(num_of_bees)]
def generate_random_population(self, search_space):
for bee in self.bees:
bee.generate_random_solution(search_space)
def calculate_population_fitness(self):
for bee in self.bees:
pass
|
eL1x/timetable-optimizer | src/startingPopulationGenerator.py | <filename>src/startingPopulationGenerator.py<gh_stars>0
import sys
import os.path
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import src.configuration as conf
from src.Bee import Bee
import pickle
starting_population = [Bee(conf.NUM_OF_STUDENTS, conf.NAMES_OF_SUBJECTS) for _ in range(conf.NUM_OF_BEES)]
with open('startingPopulation.pickle', 'wb') as f:
pickle.dump(starting_population, f) |
eL1x/timetable-optimizer | src/Bee.py | <gh_stars>0
from src.Student import Student
from src.Subject import Subject
from src.configuration import MAX_NUM_OF_STUDENTS, SPARE_TERMS
import random
class Bee:
def __init__(self, num_of_students, names_of_subjects):
self.students = [Student(id) for id in range(num_of_students)]
self.num_of_terms = num_of_students // MAX_NUM_OF_STUDENTS + SPARE_TERMS
self.subjects = [Subject(name_of_subject, index, self.num_of_terms) for index, name_of_subject in enumerate(names_of_subjects)]
self.fitness = 0
for subject in self.subjects:
subject.generate_random_terms()
for student in self.students:
student.generate_random_timetable(self.subjects)
def calculate_bee_fitness(self):
fitness = 0
for student in self.students:
fitness += student.calculate_student_fitness()
self.fitness = fitness
def get_random_student(self):
return random.choice(self.students)
def get_student_with_max_fitness(self):
student_with_max_fitness = None
for student in self.students:
if not student_with_max_fitness or student.fitness > student_with_max_fitness.fitness:
student_with_max_fitness = student
return student_with_max_fitness
|
eL1x/timetable-optimizer | src/Plotter.py | import matplotlib.pyplot as plt
class Plotter:
def plot_cost_function(self, cost_function):
plt.plot(cost_function)
plt.xlabel('Number of iterations')
plt.ylabel('Value of cost function')
plt.title('Optimization process')
plt.show()
|
eL1x/timetable-optimizer | src/Subject.py | import random
from src.Classwork import Classwork
import src.configuration as conf
class Subject:
def __init__(self, name, id, num_of_terms):
self.id = id
self.name = name
self.num_of_terms = num_of_terms
self.terms = []
def generate_random_terms(self):
for index in range(self.num_of_terms):
random_day = random.randrange(max(conf.DAYS_SPACE))
random_hour = random.randrange(max(conf.HOURS_SPACE))
self.terms.append(Classwork(self.name, random_day, random_hour, index))
|
rhasoff/SA-Flask-WTForms | app.py | from flask import Flask, render_template
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, \
SubmitField
from wtforms.validators import ValidationError, DataRequired, \
Email, EqualTo, Length
app = Flask(__name__, template_folder='.')
app.config['SECRET_KEY']='LongAndRandomSecretKey'
class CreateUserForm(FlaskForm):
username = StringField(label=('Username'), validators=[DataRequired(), Length(max=64)])
email = StringField(label=('Email'), validators=[DataRequired(), Email(), Length(max=120)])
password = PasswordField(label=('Password'), validators=[DataRequired(), Length(min=8, message='Password should be at least %(min)d characters long')])
confirm_password = PasswordField(
label=('Confirm Password'), validators=[DataRequired(message='*Required'),
EqualTo('password', message='Both password fields must be equal!')])
receive_emails = BooleanField(label=('Receive merketting emails.'))
submit = SubmitField(label=('Submit'))
def validate_username(self, username):
excluded_chars = " *?!'^+%&/()=}][{$#"
for char in self.username.data:
if char in excluded_chars:
raise ValidationError(f"Character {char} is not allowed in username.")
class GreetUserForm(FlaskForm):
username = StringField(label=('Enter Your Name:'),validators=[DataRequired(), Length(min=5, max=64, message='Name length must be between %(min)d and %(max)dcharacters') ])
submit = SubmitField(label=('Submit'))
@app.route('/', methods=('GET', 'POST'))
def index():
form = CreateUserForm()
if form.validate_on_submit():
return f"""<h1> Welcome {form.username.data} </h1>"""
return render_template('index.html', form=form)
|
Rubtsowa/numba | numba/cuda/decorators.py | from __future__ import print_function, absolute_import, division
from numba import config, sigutils, types
from warnings import warn
from .compiler import (compile_kernel, compile_device, declare_device_function,
AutoJitCUDAKernel, compile_device_template)
from .simulator.kernel import FakeCUDAKernel
def jitdevice(func, link=[], debug=None, inline=False):
"""Wrapper for device-jit.
"""
debug = config.CUDA_DEBUGINFO_DEFAULT if debug is None else debug
if link:
raise ValueError("link keyword invalid for device function")
return compile_device_template(func, debug=debug, inline=inline)
def jit(func_or_sig=None, argtypes=None, device=False, inline=False, bind=True,
link=[], debug=None, **kws):
"""
JIT compile a python function conforming to the CUDA Python specification.
If a signature is supplied, then a function is returned that takes a
function to compile. If
:param func_or_sig: A function to JIT compile, or a signature of a function
to compile. If a function is supplied, then an :class:`AutoJitCUDAKernel`
is returned. If a signature is supplied, then a function which takes a
function to compile and returns an :class:`AutoJitCUDAKernel` is
returned.
.. note:: A kernel cannot have any return value.
:type func_or_sig: function or numba.typing.Signature
:param device: Indicates whether this is a device function.
:type device: bool
:param bind: Force binding to CUDA context immediately
:type bind: bool
:param link: A list of files containing PTX source to link with the function
:type link: list
:param debug: If True, check for exceptions thrown when executing the
kernel. Since this degrades performance, this should only be used for
debugging purposes. Defaults to False. (The default value can be
overriden by setting environment variable ``NUMBA_CUDA_DEBUGINFO=1``.)
:param fastmath: If true, enables flush-to-zero and fused-multiply-add,
disables precise division and square root. This parameter has no effect
on device function, whose fastmath setting depends on the kernel function
from which they are called.
:param max_registers: Limit the kernel to using at most this number of
registers per thread. Useful for increasing occupancy.
"""
debug = config.CUDA_DEBUGINFO_DEFAULT if debug is None else debug
if link and config.ENABLE_CUDASIM:
raise NotImplementedError('Cannot link PTX in the simulator')
if 'boundscheck' in kws:
raise NotImplementedError("bounds checking is not supported for CUDA")
fastmath = kws.get('fastmath', False)
if argtypes is None and not sigutils.is_signature(func_or_sig):
if func_or_sig is None:
if config.ENABLE_CUDASIM:
def autojitwrapper(func):
return FakeCUDAKernel(func, device=device, fastmath=fastmath,
debug=debug)
else:
def autojitwrapper(func):
return jit(func, device=device, bind=bind, debug=debug,
**kws)
return autojitwrapper
# func_or_sig is a function
else:
if config.ENABLE_CUDASIM:
return FakeCUDAKernel(func_or_sig, device=device, fastmath=fastmath,
debug=debug)
elif device:
return jitdevice(func_or_sig, debug=debug, **kws)
else:
targetoptions = kws.copy()
targetoptions['debug'] = debug
return AutoJitCUDAKernel(func_or_sig, bind=bind, targetoptions=targetoptions)
else:
if config.ENABLE_CUDASIM:
def jitwrapper(func):
return FakeCUDAKernel(func, device=device, fastmath=fastmath,
debug=debug)
return jitwrapper
restype, argtypes = convert_types(func_or_sig, argtypes)
if restype and not device and restype != types.void:
raise TypeError("CUDA kernel must have void return type.")
def kernel_jit(func):
kernel = compile_kernel(func, argtypes, link=link, debug=debug,
inline=inline, fastmath=fastmath)
# Force compilation for the current context
if bind:
kernel.bind()
return kernel
def device_jit(func):
return compile_device(func, restype, argtypes, inline=inline,
debug=debug)
if device:
return device_jit
else:
return kernel_jit
def autojit(*args, **kwargs):
warn('autojit is deprecated and will be removed in a future release. Use jit instead.')
return jit(*args, **kwargs)
def declare_device(name, restype=None, argtypes=None):
restype, argtypes = convert_types(restype, argtypes)
return declare_device_function(name, restype, argtypes)
def convert_types(restype, argtypes):
# eval type string
if sigutils.is_signature(restype):
assert argtypes is None
argtypes, restype = sigutils.normalize_signature(restype)
return restype, argtypes
|
jdbrice/root-io | rootio/TDirectory.py | <reponame>jdbrice/root-io<filename>rootio/TDirectory.py
from rootio import TBuffer
class TDirectory (object) :
def __init__(self, file, dirname, cycle) :
self.fFile = file
self._typename = "TDirectory"
self.dir_name = dirname
self.dir_cycle = cycle
self.fKeys = []
def to_json( self ) :
obj = {
'fSeekKeys' : self['fSeekKeys'],
'dir_name' : self['dir_name'],
'dir_cycle' : self['dir_cycle'],
'fDatimeM' : self['fDatimeM'],
'fNbytesName' : self['fNbytesName'],
'fTitle' : self['fTitle'],
'fDatimeC' : self['fDatimeC'],
'fSeekParent' : self['fSeekParent'],
'fKeys' : self['fKeys'],
'fSeekDir' : self['fSeekDir'],
'fNbytesKeys' : self['fNbytesKeys'],
# "_typename": self._typename,
# "dir_name" : self.dir_name,
# "dir_cycle" : self.dir_cycle,
# "fKeys" : self.fKeys
}
return obj
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value) :
object.__setattr__( self, key, value )
def list_keys(self, prefix) :
for k in self.fKeys :
fqn=prefix + "/" + k['fName']
print( "[%s]: " %( k['fClassName'] ) + fqn )
if "TDirectory" == k['fClassName'] :
tdir = self.fFile.ReadObject( fqn )
tdir.list_keys( prefix=fqn )
def GetKey(self, keyname, cycle ) :
for i in range( 0, len(self.fKeys) ) :
if self.fKeys[i]['fName'] == keyname and self.fKeys[i]['fCycle'] == cycle :
return self.fKeys[i]
pos = keyname.rfind( '/' )
while pos > 0 :
dirname = keyname[0:pos]
subname = keyname[pos+1:]
dirkey = self.GetKey( dirname, 1 )
if None != dirkey and "fClassName" in dirkey and "TDirectory" in dirkey['fClassName'] :
tdir = self.ReadObject( dirname )
if None != tdir :
return tdir.GetKey( subname, cycle )
pos = keyname.rfind( '/', 0, pos-1 )
return None
return None
#TODO : add second part of impl
def ReadKeys(self, objbuf ) :
objbuf.ClassStreamer( self, 'TDirectory' )
if self.fSeekKeys <= 0 or self.fNbytesKeys <= 0 :
return None
file = self.fFile
blob = file.ReadBuffer([ self.fSeekKeys, self.fNbytesKeys ] )
if None == blob :
return None
buf = TBuffer( blob, 0, file, None )
buf.ReadTKey()
nkeys = buf.ntoi4()
for i in range(0, nkeys) :
self.fKeys.append( buf.ReadTKey() )
file.fDirectories.append( self )
|
jdbrice/root-io | rootio/ROOT.py | <reponame>jdbrice/root-io
# -*- coding: utf-8 -*-
# @Author: jdb
# @Date: 2017-06-14 17:52:34
# @Last Modified by: Daniel
# @Last Modified time: 2017-09-21 09:23:26
import logging
from . import UnZip
import json
from rootio.StreamerDict import Streamers
from rootio.IOData import IOData
def BIT( n ) :
return (1 << n)
class ROOT(object):
logger = logging.getLogger( "ROOT" )
@staticmethod
def AddClassMethods( classname, streamer ) :
if None == streamer :
return None
ROOT.logger.debug( "AddClassMethods : Missing Impl" )
return streamer
@staticmethod
def GetArrayKind( type_name ) :
ROOT.logger.debug( "GetArrayKind( %s )", type_name )
# HERE
if "TString" == type_name or "string" == type_name :
return 0
if type_name in Streamers.CustomStreamers and 'TString' == Streamers.CustomStreamers[ type_name ] :
return 0
if len(type_name) < 7 or -1 == type_name.find('TArray') :
return -1
# key is string type_name
# value is the enum type id
array_types = {
"TArrayI" : IOData.kInt,
"TArrayD" : IOData.kDouble,
"TArrayF" : IOData.kFloat,
"TArrayS" : IOData.kShort,
"TArrayC" : IOData.kChar,
"TArrayL" : IOData.kLong,
"TArrayL64" : IOData.kLong64,
}
if type_name in array_types :
return array_types[ type_name ]
return -1
@staticmethod
def CreateMemberSimpleStreamer( name, code ) :
def streamer_func( buf, obj ) :
obj[name] = buf.ntox( code )
return streamer_func
@staticmethod
def CreateMember (element, file) :
# create member entry for streamer element, which is used for reading of such data
ROOT.logger.debug( "CreateMember( element=%s, file=%s )", element, file )
found = False
member = {
"name": element['fName'],
"type": element['fType'],
"fArrayLength": element['fArrayLength'],
"fArrayDim": element['fArrayDim'],
"fMaxIndex": element['fMaxIndex']
}
if "BASE" == element['fTypeName'] :
if ROOT.GetArrayKind( member['name'] ) > 0 :
# this is workaround for arrays as base class
# we create 'fArray' member, which read as any other data member
member['name'] = 'fArray'
member['type'] = IOData.kAny
else :
# create streamer for base class
member['type'] = IOData.kBase;
# this.GetStreamer(element.fName);
t = member['type']
simple = {
IOData.kShort: "h",
IOData.kInt: "i",
IOData.kCounter: "i",
IOData.kLong: "u",
IOData.kLong64: "u",
IOData.kDouble: "d",
IOData.kFloat: "f",
IOData.kLegacyChar: "B",
IOData.kUChar: "B",
IOData.kUShort: "H",
IOData.kBits: "I",
IOData.kUInt: "I",
IOData.kULong64: "U",
IOData.kULong: "U"
}
if t == IOData.kBase :
found = True
member['base'] = element['fBaseVersion'] # indicate base class
member['basename'] = element['fName']; # keep class name
def func(buf, obj) :
buf.ClassStreamer( obj, member['basename'] )
member['func'] = func
if member['type'] in simple :
found = True
member['func'] = ROOT.CreateMemberSimpleStreamer( member['name'], simple[ member['type'] ] )
return member
if t == IOData.kBool :
found = True
def func( buf, obj ) :
obj[member['name']] = True if buf.ntou1() != 0 else False
member['func'] = func
memberL = [
(IOData.kBool),
(IOData.kInt),
(IOData.kCounter),
(IOData.kDouble),
(IOData.kUChar),
(IOData.kShort),
(IOData.kUShort),
(IOData.kBits),
(IOData.kUInt),
(IOData.kULong),
(IOData.kULong64),
(IOData.kLong),
(IOData.kLong64),
(IOData.kFloat)
]
if (t - IOData.kOffsetL) in memberL :
found = True
if element['fArrayDim'] < 2 :
member['arrlength'] = element['fArrayLength']
def func( buf, obj ) :
ROOT.getLogger("memberL").info( "member %s", member )
obj[member['name']] = buf.ReadFastArray( member['arrlength'], member['type'] - IOData.kOffsetL )
member[ 'func' ] = func
else :
member['arrlength'] = element['fMaxIndex'][ element['fArrayDim'] - 1 ]
member['minus1'] = True
def rnda( buf, obj ) :
def rfa( buf1, handle ) :
ROOT.getLogger("memberL").info( "member %s", member )
return buf1.ReadFastArray( handle['arrlength'], handle['type'] - IOData.kOffsetL )
obj[member['name']] = buf.ReadNdimArray( member, rfa )
member['func'] = rnda
if t == IOData.kOffsetL+IOData.kChar :
found = True
if element['fArrayDim'] < 2 :
member['arrlength'] = element['fArrayLength'];
def func( buf, obj ) :
obj[member['name']] = buf.ReadFastString(member['arrlength']);
member['func'] = func
else :
member['minus1'] = True # one dimension is used for char*
member['arrlength'] = element['fMaxIndex'][ element['fArrayDim']-1 ]
def rnda( buf, obj ) :
def rfs( buf1, handle ) :
return buf1.ReadFastString( handle['arrlength'])
obj[ member['name'] ] = buf.ReadNdimArray( member, rfs )
if (t - IOData.kOffsetP) in memberL :
found = True
member['cntname'] = element['fCountName'];
def func( buf, obj ) :
v = buf.ntou1()
if 1 == v :
# ROOT.getLogger("memberL").info( "obj \n%s, member \n%s ", json.dumps( {k:v for k, v in obj.iteritems() if k is not "func"} , indent=4), json.dumps({k:v for k, v in member.iteritems() if k is not "func"}, indent=4) )
obj[ member['name'] ] = buf.ReadFastArray( obj[ member['cntname'] ], member['type'] - IOData.kOffsetP )
else :
obj[ member['name'] ] = []
member['func'] = func
if t == (IOData.kOffsetP+IOData.kChar) :
found = True
member['cntname'] = element['fCountName'];
def func( buf, obj ) :
v = buf.ntou1()
if 1 == v :
obj[member['name']] = buf.ReadFastString(obj[member['cntname']]);
else :
obj[member['name']] = None
member['func'] = func
if t == IOData.kDouble32 or t == (IOData.kOffsetL+IOData.kDouble32) or t == (IOData.kOffsetP+IOData.kDouble32):
found = True
member['double32'] = True;
# SKIP - need to fill in
if t == IOData.kAnyP or t == IOData.kObjectP :
found = True
def func( buf, obj ) :
def roa( buf1, handle ) :
return buf1.ReadObjectAny()
obj[ member['name'] ] = buf.ReadNdimArray( member, roa )
member['func'] = func
if t == IOData.kAny or t == IOData.kAnyp or t == IOData.kObjectp or t == IOData.kObject:
found = True
classname = element[ 'fName' ] if "BASE" == element['fTypeName'] else element['fTypeName']
if classname[-1] == "*" :
classname = classname[ 0 : -1 ]
arr_kind = ROOT.GetArrayKind( classname )
if arr_kind > 0 :
member['arrkind'] = arr_kind
def func( buf, obj ) :
obj[ member['name']] = buf.ReadFastArray( buf.ntou4(), member['arrkind'] )
member['func'] = func
elif arr_kind == 0 :
def func( buf, obj ) :
obj[ member['name'] ] = buf.ReadTString()
member['func'] = func
else :
member['classname'] = classname
if element['fArrayLength'] > 1 :
def func( buf, obj ) :
def rcs( buf1, handle ) :
return buf1.ClassStreamer( {}, handle['classname'] )
obj[ member['name'] ] = buf.ReadNdimArray( member, rcs )
member['func'] = func
else :
def func( buf, obj ) :
obj[ member['name'] ] = buf.ClassStreamer( {}, member['classname'] )
member['func'] = func
# Skip - need to fill in
if t == IOData.kTString:
found = True
def func( buf, obj ) :
member['name'] = buf.ReadTString()
member['func'] = func
if not found :
ROOT.logger.error( "Not FOUND : %d", t )
return member
@staticmethod
def GetTypeId( typename, recurse = True ) :
# optimize by not doing this inside func
type_ids = {
"bool": IOData['kBool'],
"Bool_t": IOData['kBool'],
"char": IOData['kChar'],
"signed char": IOData['kChar'],
"Char_t": IOData['kChar'],
"Color_t": IOData['kShort'],
"Style_t": IOData['kShort'],
"Width_t": IOData['kShort'],
"short": IOData['kShort'],
"Short_t": IOData['kShort'],
"int": IOData['kInt'],
"EErrorType": IOData['kInt'],
"Int_t": IOData['kInt'],
"long": IOData['kLong'],
"Long_t": IOData['kLong'],
"float": IOData['kFloat'],
"Float_t": IOData['kFloat'],
"double": IOData['kDouble'],
"Double_t": IOData['kDouble'],
"unsigned char": IOData['kUChar'],
"UChar_t": IOData['kUChar'],
"unsigned short": IOData['kUShort'],
"UShort_t": IOData['kUShort'],
"unsigned": IOData['kUInt'],
"unsigned int": IOData['kUInt'],
"UInt_t": IOData['kUInt'],
"unsigned long": IOData['kULong'],
"ULong_t": IOData['kULong'],
"int64_t": IOData['kLong64'],
"long long": IOData['kLong64'],
"Long64_t": IOData['kLong64'],
"uint64_t": IOData['kULong64'],
"unsigned long long": IOData['kULong64'],
"ULong64_t": IOData['kULong64'],
"Double32_t": IOData['kDouble32'],
"Float16_t": IOData['kFloat16'],
"char*": IOData['kCharStar'],
"const char*": IOData['kCharStar'],
"const Char_t*": IOData['kCharStar'],
}
if typename in type_ids :
return type_ids[ typename ]
if not recurse :
return -1
if typename in Streamers.CustomStreamers :
replace = Streamers.CustomStreamers[ typename ];
if type( replace ) == str :
return ROOT.GetTypeId(replace, true);
return -1;
|
jdbrice/root-io | rootio/ttree/TTree.py |
from box import Box
class TTree(object):
def __init__(self):
pass
def draw(self, args):
if str == type(args):
args = { "expr" : args }
args = Box(args)
if "expr" not in args or None == args['expr']:
args['expr'] = ""
selector = TDrawSelector()
if "branch" in args:
if False == selector.draw_only_branch(self, args.branch, args.expr, args):
selector = None
else :
pass
if None == selector:
self.logger.error("BAD, no selector" )
return
return self.Process(selector, args)
|
jdbrice/root-io | rootio/ttree/TDrawVariable.py | <gh_stars>1-10
class TDrawVariable(object) :
def __init__(self):
pass |
jdbrice/root-io | rootio/Histogram.py | import logging
import matplotlib.pyplot as plt
import numpy as np
import copy
import sys
from IPython.display import display
class Histogram(object) :
def __init__(self, hist_obj=None) :
self.logger = logging.getLogger( "rootio.TFile" )
# disctionary of edges for axes
self.edges = {}
self.centers = {}
self.widths = {}
self.n_bins = {}
self.mean = {}
# nd array of values
self.vals = None
if None != hist_obj :
self.build( hist_obj )
def clone(self) :
nh = Histogram()
nh.edges = copy.deepcopy( self.edges )
nh.centers = copy.deepcopy( self.centers )
nh.widths = copy.deepcopy( self.widths )
nh.n_bins = copy.deepcopy( self.n_bins )
nh.mean = copy.deepcopy( self.mean )
nh.vals = copy.deepcopy( self.vals )
nh.n_dim = self.n_dim
return nh
def __add__(self, other) :
# now assume binning is the same!
if self.vals.shape == other.vals.shape :
nh = self.clone()
for i in np.arange( 0, len(self.vals) ) :
nh.vals[i] = self.vals[i]+other.vals[i]
return nh
return None
@staticmethod
def add( a, b, scale_a=1.0, scale_b=1.0 ) :
ac = a.clone()
bc = b.clone()
ac.scale(scale_a)
bc.scale(scale_b)
nh = ac+bc
return nh
def scale(self, factor=1.0) :
self.vals = self.vals * factor
def build( self, root_hist_obj ) :
# make the edges first
for axis in ["x", "y", "z" ] :
self.make_axis( root_hist_obj, axis=axis )
self.n_dim = sum( len(self.edges[k]) > 1 for k in self.edges )
self.calc_ndarray( root_hist_obj )
for axis in ["x", "y", "z" ] :
self.mean[axis] = self.make_mean( axis = axis )
def make_axis( self, h_obj, **kwargs ) :
axis = kwargs.get( "axis", "x" )
self.edges[axis] = self.make_edges( h_obj, axis=axis )
self.centers[axis] = self.make_centers( axis=axis )
self.widths[axis] = self.make_widths( axis=axis )
self.n_bins[axis] = len( self.centers[axis] )
def make_edges(self, h_obj, **kwargs) :
axis = kwargs.get( "axis", "x" )
bin_edges = np.array([])
nbins = 0
bmin = 0
bmax = 0
try :
nbins = h_obj[ 'f' + axis.upper() + "axis" ]["fNbins"]
bmin = h_obj[ 'f' + axis.upper() + "axis" ]["fXmin"]
bmax = h_obj[ 'f' + axis.upper() + "axis" ]["fXmax"]
bins = h_obj[ 'f' + axis.upper() + "axis" ]["fXbins"]
except KeyError as ke :
pass
if len(bins) <= 1 :
if 1 == nbins :
return np.array([])
bin_edges = np.zeros( nbins+1 )
bw = (bmax - bmin) / nbins
self.logger.debug("n=%d, (%f, %f), w=%f", nbins, bmin, bmax, bw )
for i in np.arange( 0, nbins ) :
bin_edges[ i ] = bw * i + bmin
bin_edges[ nbins ] = bmax
return bin_edges
return np.array(bins)
def get_edges( self, **kwargs ) :
axis = kwargs.get( "axis", "x" )
place = kwargs.get( "place", "left" )
if "right" == place :
return self.edges[axis][1:]
if "center" == place :
return self.centers[axis];
return self.edges[axis][:-1]
def make_mean( self, **kwargs ) :
axis = kwargs.get( "axis", "x" )
if 1 == self.n_dim and 'x' == axis:
nsum = 0
n = 0;
for c, v in zip( self.centers[axis], self.vals ) :
n = n + v
nsum = nsum + c * v
return float( nsum ) / float(n)
def make_centers( self, **kwargs ) :
axis = kwargs.get( "axis", "x" )
# may throw a KeyError if not found
bins = self.edges[ axis ]
if len(bins) < 2 :
return np.array([])
bc = np.empty( shape=(len(bins)-1) )
for i in np.arange( 0, len(bins)-1 ) :
x1 = bins[i]
x2 = bins[i+1]
bc[i] = (x1 + x2) / 2.0
return bc
def make_widths( self, **kwargs ) :
axis = kwargs.get( "axis", "x" )
bins = self.edges[axis]
if len(bins) < 2 :
return np.array([])
bw = np.empty( shape=(len(bins)-1) )
for i in np.arange( 0, len(bins)-1 ) :
x1 = bins[i]
x2 = bins[i+1]
bw[i] = (x2 - x1)
return bw
def calc_ndarray(self, root_hist_obj) :
if 1 == self.n_dim :
self.vals = np.empty( shape=(self.n_bins['x']) )
for x in np.arange( 0, self.n_bins['x'] ) :
self.vals[x] = self.value_at_index( root_hist_obj ,x+1 )
if 2 == self.n_dim :
self.vals = np.empty( shape=(self.n_bins['y'], self.n_bins['x']) )
for x in np.arange( 0, self.n_bins['x'] ) :
for y in np.arange( 0, self.n_bins['y'] ) :
self.vals[y][x] = self.value_at_index( root_hist_obj ,x+1, y+1 )
def value_at_index( self, h_obj, x, y = None, z = None, **kwargs ) :
values = h_obj[ 'fArray' ]
if 1 == self.n_dim or None == y :
return values[x]
if 2 == self.n_dim and None != x and None != y:
w = self.n_bins["x"] + 2
h = self.n_bins["y"] + 2
return values[ x + y * w ]
return None
def draw_1d(self, scale=1.0, opt='', **kwargs) :
use_bins = kwargs.get( 'bins', self.edges['x'] )
kwargs.pop( 'bins', None )
if '' == opt:
return plt.hist( self.centers['x'], bins=use_bins, weights=self.vals*scale, **kwargs )
else:
# print( "errorbar" )
# x = np.empty( len(h['fArray'][1:-1]) )
# x_bins = np.array(h['fXaxis']['fXbins'])
# y = np.array(h['fArray'][1:-1])
return plt.errorbar( self.centers['x'], self.vals, **kwargs )
def draw_2d( self, **kwargs ) :
x_bins = self.centers['x']
y_bins = self.centers['y']
vx = np.empty( shape=( len(x_bins) * len(y_bins) ) )
vy = np.empty( shape=( len(x_bins) * len(y_bins) ) )
vw = np.empty( shape=( len(x_bins) * len(y_bins) ) )
i = 0
ix = 0
iy = 0
for x in x_bins :
iy = 0
for y in y_bins :
vx[i] = x
vy[i] = y
vw[i] = self.vals[iy][ix] #self.value_at_index( ix + 1, iy + 1 )
if 0 == vw[i] :
vw[i] = float('nan')
i = i + 1
iy = iy + 1
ix = ix + 1
return plt.hist2d( vx, vy, weights=vw, bins=[x_bins, y_bins], **kwargs )
def load_js(self, **kwargs) :
_jsCode = """
requirejs.config({{
paths: {{
'JSRootCore' : 'https://root.cern.ch/js/notebook//scripts/JSRootCore',
}}
}});
"""
display( {"application/vnd.rootjs_load.v0+json" : kwargs.get('data', None)}, raw=True )
def draw_js( self, **kwargs ) :
jsDivId = "root_pad"
jsonContent = kwargs.get( 'data', None )
jsDrawOptions = kwargs.get( 'opts', "" )
_jsCode =f"""
require(['JSRootCore'],
function(Core) {{
var obj = Core.JSONR_unref({jsonContent});
Core.draw("{jsDivId}", obj, "{jsDrawOptions}");
}}
);
"""
display( {"application/vnd.rootjs_exec.v0+json" : kwargs.get('data', None), "application/javascript" : _jsCode}, metadata={ "application/vnd.rootjs_exec.v0+json" : { "id" : jsDivId } }, raw=True )
# def __getitem__(self, key):
# return self.__getattribute__(key)
def draw(self, scale=1.0, **kwargs) :
if 1 == self.n_dim :
return self.draw_1d(scale=scale, **kwargs)
if 2 == self.n_dim :
return self.draw_2d(**kwargs)
if 3 == self.n_dim :
return self.draw_3d(**kwargs)
|
jdbrice/root-io | rootio/StreamerDict.py | <reponame>jdbrice/root-io
import rootio.CustomStreamers as CustomStreamers
import rootio.DirectStreamers as DirectStreamers
from box import Box
# Does Not work!!!
StreamerDict = {
"CustomStreamers" : {
"TList" : CustomStreamers.TList,
"TObject" : CustomStreamers.TObject,
"TNamed" : [
{ "basename" : "TObject", "base": 1, "func" : CustomStreamers.TNamed_TObject },
{ "name" : "fName", "func" : CustomStreamers.TNamed_fName },
{ "name" : "fTitle", "func" : CustomStreamers.TNamed_fTitle },
],
"TStreamerInfo" : CustomStreamers.TStreamerInfo,
"TObjArray" : CustomStreamers.TObjArray,
"TStreamerBase" : CustomStreamers.TStreamerBase,
"TStreamerString" : CustomStreamers.TStreamerString,
"TStreamerObjectPointer" : CustomStreamers.TStreamerString,
"TStreamerElement" : CustomStreamers.TStreamerElement,
"TStreamerObject" : CustomStreamers.TStreamerObject,
"TStreamerBasicType" : CustomStreamers.TStreamerObject,
"TStreamerObjectAny" : CustomStreamers.TStreamerObject,
"TStreamerString" : CustomStreamers.TStreamerObject,
"TStreamerObjectPointer" : CustomStreamers.TStreamerObject,
"TStreamerBasicPointer" : CustomStreamers.TStreamerBasicPointer,
"TStreamerLoop" : CustomStreamers.TStreamerBasicPointer,
"TStreamerSTL" : CustomStreamers.TStreamerSTL,
"TObjString" : [
{ "basename" : "TObject", "base" : 1, "func" : CustomStreamers.TObjString_TObject },
{ "name" : "fString", "func" : CustomStreamers.TObjString_fString }
]
},
"DirectStreamers" : {
"TKey" : DirectStreamers.TKey,
"TDatime" : DirectStreamers.TDatime,
"TDirectory" : DirectStreamers.TDirectory
},
}
Streamers = Box( StreamerDict ) |
jdbrice/root-io | rootio/DirectStreamers.py | <filename>rootio/DirectStreamers.py
import logging
# from pdb import set_trace as bp
def TKey( buf, key ) :
logging.getLogger("DirectStreamers.TKey").debug( "( buf=%s, obj=%s )", buf, key )
key['fNbytes'] = buf.ntoi4()
key['fVersion'] = buf.ntoi2()
key['fObjlen'] = buf.ntou4()
key['fDatime'] = buf.ClassStreamer({}, 'TDatime')
key['fKeylen'] = buf.ntou2()
key['fCycle'] = buf.ntou2()
if key['fVersion'] > 1000 :
key['fSeekKey'] = buf.ntou8()
buf.shift(8); # skip seekPdir
else :
key['fSeekKey'] = buf.ntou4()
buf.shift(4); # skip seekPdir
key['fClassName'] = buf.ReadTString()
key['fName'] = buf.ReadTString()
key['fTitle'] = buf.ReadTString()
logging.getLogger("DirectStreamers.TKey").debug( "TKey( buf=%s, key=%s )", buf, key )
def TDatime( buf, key ) :
logging.getLogger("DirectStreamers.TDatime").debug( "( buf=%s, obj=%s )", buf, key )
key['fDatime'] = buf.ntou4()
logging.getLogger("DirectStreamers.TDatime").debug( "AFTER( buf=%s, obj=%s )", buf, key )
def TDirectory( buf, obj ) :
logging.getLogger("DirectStreamers.TDirectory").debug( "( buf=%s, obj=%s )", buf, obj )
version = buf.ntou2()
obj['fDatimeC'] = buf.ClassStreamer({}, 'TDatime')
obj['fDatimeM'] = buf.ClassStreamer({}, 'TDatime')
obj['fNbytesKeys'] = buf.ntou4()
obj['fNbytesName'] = buf.ntou4()
obj['fSeekDir'] = buf.ntou8() if version > 1000 else buf.ntou4()
obj['fSeekParent'] = buf.ntou8() if version > 1000 else buf.ntou4()
obj['fSeekKeys'] = buf.ntou8() if version > 1000 else buf.ntou4()
logging.getLogger("DirectStreamers.TDirectory").debug( "( buf=%s, obj=%s )", buf, obj )
|
jdbrice/root-io | rootio/CustomStreamers.py |
import sys
import logging
from rootio.IOData import IOData, BIT
import math
import json
def TList( buf, obj ) :
logging.getLogger("CustomStreamers.TList").debug( "TList( buf=%s, obj=%s )", buf, obj )
# stream all objects in the list from the I/O buffer
if '_typename' not in obj :
obj['_typename'] = "TList";
# obj.$kind = "TList"; // all derived classes will be marked as well
if buf.last_read_version > 3 :
buf.ClassStreamer(obj, "TObject")
obj['name'] = buf.ReadTString();
nobjects = buf.ntou4()
obj["arr"] = [None] * nobjects;
obj["opt"] = [None] * nobjects;
logging.getLogger("CustomStreamers.TList").debug( "TList length=%d", nobjects )
for i in range( 0, nobjects ) :
logging.getLogger("CustomStreamers.TList").debug( "Reading object %d", i )
obj['arr'][i] = buf.ReadObjectAny();
obj['opt'][i] = buf.ReadTString();
else :
obj['name'] = "";
obj['arr'] = [];
obj['opt'] = [];
logging.getLogger("CustomStreamers.TList").debug( "TList result ( buf=%s, obj=%s )", buf, obj )
def TObject( buf, obj ) :
logging.getLogger( "CustomStreamers.TObject" ).debug( "( buf=%s, obj=%s )", buf, obj )
obj['fUniqueID'] = buf.ntou4()
obj['fBits'] = buf.ntou4()
if ( obj[ 'fBits' ] & IOData.kIsReferenced) :
buf.ntou2()
logging.getLogger( "CustomStreamers.TObject" ).debug( "TObject( buf=%s, obj=%s )", buf, obj )
def TStreamerInfo( buf, obj ) :
logging.getLogger( "CustomStreamers.TStreamerInfo" ).debug( "( buf=%s, obj=%s )", buf, obj )
buf.ClassStreamer( obj, "TNamed" )
obj['fCheckSum'] = buf.ntou4()
obj['fClassVersion'] = buf.ntou4()
obj['fElements'] = buf.ReadObjectAny()
def TNamed_TObject( buf, obj ) :
logging.getLogger( "CustomStreamers.TNamed_TObject" ).debug( "( buf=%s, obj=%s )", buf, obj )
if '_typename' not in obj :
obj['_typename'] = "TNamed"
buf.ClassStreamer(obj, "TObject")
def TNamed_fName( buf, obj ) :
logging.getLogger( "CustomStreamers.TNamed_fName" ).debug( "( buf=%s, obj=%s )", buf, obj )
obj['fName'] = buf.ReadTString()
def TNamed_fTitle( buf, obj ) :
logging.getLogger( "CustomStreamers.TNamed_fTitle" ).debug( "( buf=%s, obj=%s )", buf, obj )
obj['fTitle'] = buf.ReadTString()
def TObjArray( buf, obj ) :
logging.getLogger( "CustomStreamers.TObjArray" ).debug( "( buf=%s, obj=%s )", buf, obj )
if '_typename' not in obj :
obj['_typename'] = "TObjArray"
obj['name'] = ""
ver = buf.last_read_version
if ver > 2 :
buf.ClassStreamer(obj, "TObject")
if ver > 1 :
obj['name'] = buf.ReadTString()
n_objects = buf.ntou4()
obj['arr'] = [None] * n_objects
obj['fLast'] = n_objects - 1
obj['fLowerBound'] = buf.ntou4()
i = 0
while i < n_objects :
obj[ 'arr' ][ i ] = buf.ReadObjectAny()
i += 1
def TStreamerBase( buf, obj ) :
logging.getLogger( "CustomStreamers.TStreamerBase" ).debug( "( buf=%s, obj=%s )", buf, obj )
logging.getLogger( "CustomStreamers.TStreamerBase" ).debug( "obj=%s", obj )
ver = buf.last_read_version
buf.ClassStreamer( obj, "TStreamerElement" )
if ver > 2 :
obj['fBaseVersion'] = buf.ntou4()
def TStreamerBasicPointer( buf, obj ) :
logging.getLogger( "CustomStreamers.TStreamerBasicPointer" ).debug( "( buf=%s, obj=%s )", buf, obj )
if buf.last_read_version > 1 :
buf.ClassStreamer( obj, "TStreamerElement" )
obj['fCountVersion'] = buf.ntou4();
obj['fCountName'] = buf.ReadTString();
obj['fCountClass'] = buf.ReadTString();
def TStreamerString( buf, obj ) :
logging.getLogger( "CustomStreamers.TStreamerString" ).debug( "( buf=%s, obj=%s )", buf, obj )
if buf.last_read_version > 1 :
buf.ClassStreamer(obj, "TStreamerElement")
def parse_range( val ) :
if None == val :
return 0
if val.find( "pi" ) < 0 :
return float(val)
val = val.strip()
sign = 1
if "-" == val[0] :
sign = -1
val = val[1:]
m = {
"2pi" : math.pi * 2,
"2*pi" : math.pi * 2,
"twopi" : math.pi * 2,
"pi/2" : math.pi / 2.0,
"pi/4" : math.pi / 4.0,
}
if val in m :
return sign * m[val]
return sign * math.pi
def TStreamerElement( buf, element ) :
logging.getLogger( "CustomStreamers.TStreamerElement" ).debug( "( buf=%s, obj=%s )", buf, element )
ver = buf.last_read_version
buf.ClassStreamer(element, "TNamed")
element['fType'] = buf.ntou4()
element['fSize'] = buf.ntou4()
element['fArrayLength'] = buf.ntou4()
element['fArrayDim'] = buf.ntou4()
element['fMaxIndex'] = buf.ReadFastArray( buf.ntou4() if ver == 1 else 5, IOData.kUInt )
element['fTypeName'] = buf.ReadTString()
# ROOT.ROOT.logger.debug( "TStreamerElement:A( buf=%s, element=%s )", buf, element )
if (element['fType'] == IOData.kUChar) and ((element['fTypeName'] == "Bool_t") or (element['fTypeName'] == "bool")) :
element['fType'] = IOData.kBool
element['fXmin'] = element['fXmax'] = element['fFactor'] = 0
if (ver == 3) :
element['fXmin'] = buf.ntod()
element['fXmax'] = buf.ntod()
element['fFactor'] = buf.ntod()
# ROOT.ROOT.logger.debug( "TStreamerElement:B( buf=%s, element=%s )", buf, element )
elif (ver > 3) and (element['fBits'] & BIT(6)) : # kHasRange
p1 = element['fTitle'].find("[");
if p1 >= 0 and element['fType'] > IOData.kOffsetP :
p1 = element['fTitle'].find( "[", p1+1 )
p2 = element['fTitle'].find("]", p1+1);
logging.getLogger( "CustomStreamers.TStreamerElement" ).debug( "(p1=%d, p2=%d)", p1, p2 )
if p1>=0 and p2 >= p1+2 :
arr = ROOT.ParseAsArray( element['fTitle'][ p1: p2-p1+1 ] )
nbit = 32
if length( arr ) == 3 :
nbits = int(arr[2])
if isNAN(nbits) or nbits < 2 or nbits > 32 :
nbits = 32
element['fXmin'] = parse_range( arr[0] )
element['fXmax'] = parse_range( arr[1] )
bigint = (1<<nbits) if (nbits < 32) else 0xffffffff
if element['fXmin'] < element['fXmax'] :
element['fFactor'] = bigint / (element['fXmax'] - element['fXmin'])
elif nbits<15 :
element['fXmin'] = nbits;
# ROOT.ROOT.logger.debug( "TStreamerElement:END( buf=%s, element=%s )", buf, element )
# ROOT.ROOT.logger.debug( "Element = \n %s ", json.dumps(element, indent=4) )
def TStreamerObject( buf, obj ) :
logging.getLogger( "CustomStreamers.TStreamerObject" ).debug( "( buf=%s, obj=%s )", buf, obj )
if buf.last_read_version > 1 :
buf.ClassStreamer( obj, "TStreamerElement")
def TStreamerSTL( buf, obj ) :
buf.ClassStreamer( obj, "TStreamerElement" )
obj['fSTLtype'] = buf.ntou4()
obj['fCtype'] = buf.ntou4()
# if I believe the original source, these are not typos
if IOData.kSTLmultimap == obj['fSTLtype'] and (obj['fTypeName'].find( "set" ) == 0 or obj['fTypeName'].find( "std::set" ) == 0 ) :
obj['fSTLtype'] = IOData.kSTLset
if IOData.kSTLset == obj['fSTLtype'] and (obj['fTypeName'].find( "multimap" ) == 0 or obj['fTypeName'].find( "std::multimap" ) == 0 ) :
obj['fSTLtype'] = IOData.kSTLmultimap
def TObjString_TObject( buf, obj ) :
try :
a = obj['_typename']
except KeyError as ke :
obj['_typename'] = 'TObjString'
buf.ClassStreamer( obj, 'TObject' )
def TObjString_fString( buf, obj ) :
obj['fString'] = buf.ReadTString()
|
jdbrice/root-io | rootio/ttree/TSelector.py | <reponame>jdbrice/root-io<filename>rootio/ttree/TSelector.py
class TSelector(object) :
def __init__(self) :
self.branches = [] # list of branches to read
self.names = [] # list of member names for each branch in tgtobj
self.directs = [] # indication if only branch without any children should be read
self.break_execution = 0
self.tgtobj = {}
def add_branch(self, branch, name, direct):
if None == name:
if str == type(branch):
name = branch
else:
name = "br" + str( len(self.branches) )
self.branches.append(branch);
self.names.append(name);
self.directs.append(direct);
return len(self.branches)-1;
def index_ob_branch(self, branch):
return self.branches.index( branch )
def name_of_branch(self, index):
return self.names[index] |
jdbrice/root-io | rootio/TBuffer.py | # -*- coding: utf-8 -*-
# @Author: jdb
# @Date: 2017-06-14 17:36:08
# @Last Modified by: Daniel
# @Last Modified time: 2017-09-26 17:47:21
from rootio.ROOT import ROOT as ROOT
from rootio.StreamerDict import Streamers
from rootio.IOData import IOData
import struct
import logging
import json
import sys
class TBuffer(object):
# arr should be a byte array
def __init__( self, arr, pos, file, length=None ):
self.logger = logging.getLogger( "rootio.TBuffer" )
self.logger.debug( "Creating TBuffer[ len(arr)=%d, pos=%d, file=%s ] %s", len(arr), pos, file.fURL, self )
self._typename = "TBuffer"
self.arr = arr;
self.o = pos
self.fFile = file
self.length = length if None != length else 0
self.length = len(arr) if None != arr else 0
#self.ClearObjectMap()
self.fTagOffset = 0
self.last_read_version = 0
self.fObjectMap = {}
self.fDisplacement = 0
self.ClearObjectMap()
def to_json(self):
obj = {
# "arr" : self.arr,
"pos" : self.o,
# "tFile" : self.file,
"length" : self.length,
"fTagOffset" : self.fTagOffset,
"last_read_version" : self.last_read_version,
"fObjectMap" : self.fObjectMap,
"fDisplacement" : self.fDisplacement
}
return obj
def dump_state( self ) :
m = {
"_typename" : self._typename,
"len(arr)" : len(self.arr),
"pos" : self.o
}
return m
def locate( self, pos ):
self.o = pos
def shift( self, cnt ):
self.o = self.o + cnt
def remain( self ):
return self.length - self.o
def GetMappedObject( self, tag ):
return self.fObjectMap[ tag ] if tag in self.fObjectMap else None
def MapObject( self, tag, obj ):
if None == obj :
return
self.fObjectMap[tag] = obj
def MapClass(self, tag, classname ):
self.fClassMap[tag] = classname
def GetMappedClass(self, tag):
if tag in self.fClassMap :
return self.fClassMap[tag]
return -1
def ClearObjectMap(self):
self.fObjectMap = {}
self.fClassMap = {}
self.fObjectMap[0] = None
self.fDisplacement = 0
def ReadVersion(self):
ver = {}
bytecount = self.ntou4()
if bytecount & IOData.kByteCountMask :
ver['bytecount'] = bytecount - IOData.kByteCountMask - 2
else :
self.shift( -4 )
self.last_read_version = ver['val'] = self.ntoi2()
self.last_read_checksum = 0
ver['off'] = self.o
if ver['val'] <= 0 and ver['bytecount'] and ver['bytecount'] >= 4 :
ver['checksum'] = self.ntou4()
if None == self.fFile.FindSinfoChecksum( ver['checksum'] ) :
self.shift( -4 )
else :
self.last_read_checksum = ver['checksum']
return ver
def CheckByteCount(self, ver, where ):
if 'bytecount' in ver and None != ver['bytecount'] and 'off' in ver and (ver['off'] + ver['bytecount'] != self.o ) and None != where:
self.logger.error( "Mismatch in %s, bytecount expected = %s, got = %s", where, ver['bytecount'], (self.o - ver['off']) )
self.shift( ver['bytecount'] )
return False
return True
def ReadString(self):
return ReadFastString(-1)
def ReadTString(self) :
self.logger.debug( "ReadTString()" )
self.logger.debug( "state = %s", self.dump_state() )
l = self.ntou1()
if 255 == l :
l = self.ntou4()
if 0 == l :
self.logger.debug( "TString length is 0" )
return ""
self.logger.debug( "TString shift=%d", l )
pos = self.o
self.shift( l )
if 0 == self.codeAt( pos ) :
self.logger.debug( "TString is empty " )
return ''
tstring = self.substring( pos, pos + l )
self.logger.debug( "TString = %s", tstring )
return tstring
def ReadFastString(self, n) :
"""
Reads a string of n chars or if n < 0 then it reads until it gets 0
"""
self.logger.debug( "ReadFastString( %d )", n )
res = ""
closed = False
i = 0
while ( n < 0 or i < n ) :
code = self.ntou1()
if 0 == code :
closed = True;
if n < 0 :
break
if False == closed :
res += chr( code )
self.logger.debug( "String=%s", res )
return res
def ntox( self, code ) :
lens = {
"B" : 1,
"H" : 2,
"I" : 4,
"b" : 1,
"h" : 2,
"i" : 4,
"f" : 4,
"d" : 8,
}
try :
fc = ">" + code;
l = lens[ code ]
v = struct.unpack( fc, self.arr[ self.o : self.o + l ] )
self.o += l
return v[0]
except KeyError :
pass
if "U" == code :
return self.ntou8()
if "u" == code :
return self.ntoi8()
return None
# def ntot(self, n, type) :
# v = struct.unpack( type, self.arr[ self.o : self.o + n ] )[0]
# self.o = self.o + n
# return v
def ntou1(self) :
l = 1
v = struct.unpack( '>B', self.arr[ self.o : self.o + l ] )[0]
self.o += l
return v
def ntou2(self) :
l = 2
v = struct.unpack( '>H', self.arr[ self.o:self.o+l ] )[0]
self.o += l
return v
def ntou4(self) :
l = 4
v = struct.unpack( '>I', self.arr[ self.o:self.o+l ] )[0]
self.o += l
return v
def ntou8(self) :
high = self.ntou4()
low = self.ntou4()
return high * 0x100000000 + low;
def ntoi1(self) :
v = struct.unpack( '>b', self.arr[ self.o ] )[0]
self.o = self.o + 1
return v
def ntoi2(self) :
l = 2
v = struct.unpack( '>h', self.arr[ self.o:self.o+l ] )[0]
self.o += l
return v
def ntoi4(self) :
l = 4
v = struct.unpack( '>i', self.arr[ self.o:self.o+l ] )[0]
self.o += l
return v
def ntoi8(self) :
high = self.ntou4()
low = self.ntou4()
if high < 0x80000000 :
return high * 0x100000000 + low;
return -1 - ((~high) * 0x100000000 + ~low)
def ntof( self ) :
l = 4
v = struct.unpack( '>f', self.arr[ self.o : self.o + l ] )[0]
self.o += l
return v
def ntod( self ) :
l = 8
v = struct.unpack( '>d', self.arr[ self.o : self.o + l ] )[0]
self.o += l
return v
def ReadFastArray( self, n, array_type ) :
self.logger.debug( "ReadFastArray( n=%d, array_type=%s)", n, array_type )
i = 0
o = self.o
view = self.arr
array = [None] * n
func = None
if IOData.kDouble == array_type :
func = self.ntod
elif IOData.kFloat == array_type :
func = self.ntof
elif IOData.kLong == array_type or IOData.kLong64 == array_type :
func = self.ntoi8
elif IOData.kULong == array_type or IOData.kULong64 == array_type :
func = self.ntou8
elif IOData.kInt == array_type or IOData.kCounter == array_type :
func = self.ntoi4
elif IOData.kBits == array_type or IOData.kUInt == array_type :
func = self.ntou4
elif IOData.kShort == array_type :
func = self.ntoi2
elif IOData.kUShort == array_type :
func = self.ntou2
elif IOData.kChar == array_type :
func = self.ntoi2
elif IOData.kChar == array_type or IOData.kBool == array_type :
func = self.ntou2
elif IOData.kTString == array_type :
func = self.ReadTString
elif IOData.kDouble32 == array_type or IOData.kFloat16== array_type :
self.logger.error( "Should not be used with FastArray" )
else :
func = self.ntou4
if None != func :
for i in range( 0, n ) :
array[i] = func()
else :
self.logger.error( "FUNC Should not be NONE" )
self.logger.debug( "ReadFastArray() = %s", array )
# self.o = o not a mistake - dont uncomment
return array
def ReadNdimArray( self, handle, func ) :
n_dim = handle['fArrayDim']
max_i = handle['fMaxIndex']
if n_dim < 1 and handle['fArrayLength'] > 0 :
n_dim = 1
max_i = [ handle['fArrayLength'] ]
if 'minus1' in handle and None != handle['minus1'] :
n_dim -= 1
if n_dim < 1 :
return func( self, handle )
if 1 == n_dim :
res = [None] * max_i[0]
for i in range( 0, max_i[0] ) :
res[i] = func( self, handle )
if 2 == n_dim :
res = [None] * max_i[0]
for i in range( 0, max_i[0] ) :
res1[None] * max_i[1]
for j in range( 0, max_i[1] ) :
res1[j] = func( self, handle )
res[i] = res1
else :
indx = [0] * n_dim
arr = [ [] ] * n_dim
while indx[0] < max_i[0]:
k = n_dim - 1
arr[k].append( func( self, handle ) )
indx[ k ] += 1
while ndx[k] == max_i[k] and k > 0 :
indx[k] = 0
arr[k - 1].append( arr[k] )
arr[k] = [ ]
k -= 1
indx[ k ] += 1
return res
def can_extract( self, place ) :
for n in range( 0, len(place), 2 ) :
if place[n] + place[n+1] > self.length :
return False
return True
def extract( self, place ) :
if None == self.arr or False == self.can_extract( place ) :
return None
if 2 == len(place) :
return self.arr[ place[0] : place[0] + place[1] ]
res = []
for n in range( 0, len(place), 2 ) :
res[ n/2 ] = self.arr[ place[n] : place[n] + place[n+1] ]
return res
def codeAt(self, pos ) :
if (sys.version_info > (3, 0)):
return struct.unpack( 'B', bytes( [self.arr[ pos ]] ) )[0]
else :
return struct.unpack( 'B', self.arr[ pos ] )[0]
def substring( self, beg, end ) :
res = ""
# TODO : check here
for n in range( beg, end ) :
res += chr( self.codeAt( n ) )
return res
def ReadTKey( self, key = None ) :
if None == key :
key = {}
self.ClassStreamer( key, 'TKey' )
# name = key.fName.replace( /['"]/g,'' )
# if name != key.fName :
# key.fRealName = key.fName;
# key.fName = name;
return key;
def ReadClass( self ) :
self.logger.debug( "ReadClass" )
self.logger.debug( "state = %s", self.dump_state() )
class_info = { 'name': -1 }
tag = 0
bcount = self.ntou4()
start_pos = self.o
self.logger.debug( "bcount=%d, start_pos=%d", bcount,start_pos )
if not ( bcount & IOData.kByteCountMask ) or ( bcount == IOData.kNewClassTag ) :
self.logger.debug( "ReadClass.A" )
tag = bcount
bcount = 0
else :
self.logger.debug( "ReadClass.B" )
tag = self.ntou4()
if not (tag & IOData.kClassMask) :
self.logger.debug( "ReadClass.C" )
class_info['objtag'] = tag + self.fDisplacement
return class_info
if tag == IOData.kNewClassTag :
self.logger.debug( "ReadClass.D" )
class_info['name'] = self.ReadFastString( -1 )
index = self.fTagOffset + start_pos + IOData.kMapOffset
if self.GetMappedClass( index ) == -1 :
self.MapClass( index, class_info['name'] )
self.logger.debug( "ReadClass.E" )
else :
self.logger.debug( "ReadClass.F" )
clTag = (tag & ~IOData.kClassMask) + self.fDisplacement
class_info['name'] = self.GetMappedClass( clTag )
if -1 == class_info['name'] :
self.logger.debug( "ReadClass.G" )
self.logger.warn( "Could not find class with tag %s",clTag )
self.logger.debug( "class_info=%s", class_info )
return class_info
def ReadObjectAny( self ) :
self.logger.debug( "ReadObjectAny" )
self.logger.debug( "state = %s", self.dump_state() )
objtag = self.fTagOffset + self.o + IOData.kMapOffset
clRef = self.ReadClass()
self.logger.debug( "clRef = %s", clRef )
if 'objtag' in clRef :
return self.GetMappedObject( clRef['objtag'] )
if 'name' in clRef and clRef['name'] == -1 :
return None
array_kind = ROOT.GetArrayKind( clRef['name'] )
obj = None
if 0 == array_kind :
obj = self.ReadTString()
elif array_kind > 0 :
obj = self.ReadFastArray( self.ntou4(), array_kind )
self.MapObject( objtag, obj )
else :
obj = {}
self.MapObject( objtag, obj )
self.ClassStreamer( obj, clRef['name'] )
return obj
def ClassStreamer( self, obj, classname ) :
self.logger.debug( "ClassStreamer(%s, %s)", obj, classname )
if "TBranchElement" == classname :
pass
# if "_typename" in obj :
try :
self.logger.debug( "obj._typename=%s", obj['_typename'] )
except KeyError as ke:
obj['_typename'] = classname
# if False == hasattr(obj, '_typename' ) or None == obj['_typename'] :
# if '_typename' not in obj :
DirectStreamers = Streamers.DirectStreamers
ds = DirectStreamers[classname] if classname in DirectStreamers else None
if None != ds :
self.logger.debug( 'Calling DirectStreamer["%s"]', classname )
ds( self, obj )
return obj
# No DirectStreamer for this type
# TODO
ver = self.ReadVersion()
self.logger.debug( "[%s] ver: %s", classname , ver )
streamer = self.fFile.GetStreamer( classname, ver )
if None != streamer :
for n in range( 0, len( streamer ) ) :
if 'func' in streamer[n] and callable( streamer[n]['func'] ) :
streamer[n]['func']( self, obj )
# self.logger.info( "%s", json.dumps( obj, indent=4 ) )
else :
self.logger.info( "hmm, should be callable for classname=%s, obj=%s", classname, obj )
else :
self.logger.debug( "ClassStreamer not implemented yet for ", classname )
# TODO: Add Methods
self.logger.debug( "streamer: \n %s", streamer )
self.CheckByteCount( ver, classname )
return obj
|
jdbrice/root-io | rootio/TFile.py | import rootio.ROOT as ROOT
from rootio import TBuffer
from rootio import TDirectory
from rootio.StreamerDict import Streamers
from rootio.IOData import IOData
import os
import logging
from . import UnZip
import box
import json
class TFile (object) :
def __init__(self, url) :
self.logger = logging.getLogger( "rootio.TFile" )
# self.logger.debug( "Creating TFile[ url=%s ]", url )
self._typename = "TFile"
self.fEND = 0
self.fFullURL = url
self.fURL = url
self.fAcceptRanges = True
self.fUseStampPar = "stamp = "
self.fFileContent = None
self.fMaxRanges = 200
self.fDirectories = []
self.fKeys = []
self.fSeekInfo = 0
self.fNbytesInfo = 0
self.fTagOffset = 0
self.fStreamerInfos = None
self.fFileName = ""
self.fStreamers = {}
self.fBasicTypes = {}
# TLocalFile parts
self.fUseStampPar = False;
if os.path.isfile( url ):
# TODO open file
file = open( url, "rb")
self.fLocalFile = file
self.fEND = os.stat( url ).st_size
self.fFullURL = file.name;
self.fURL = file.name;
self.fFileName = file.name;
else :
raise Exception( "File DNE" )
self.ReadKeys()
def to_json( self ) :
obj = {
"_typename": self._typename,
"fAcceptRanges": self.fAcceptRanges,
"fBEGIN": self.fBEGIN,
"fBasicTypes": self.fBasicTypes,
"fCompress": self.fCompress,
"fDatimeC" : self.fDatimeC,
"fDatimeM" : self.fDatimeM,
"fDirectories" : self.fDirectories,
"fEND" : self.fEND,
"fFileContent" : self.fFileContent,
"fFileName" : self.fFileName,
"fFullURL" : self.fFullURL,
"fKeys" : self.fKeys,
"fMaxRanges" : self.fMaxRanges,
"fNbytesFree" : self.fNbytesFree,
"fNbytesInfo" : self.fNbytesInfo,
"fNbytesKeys" : self.fNbytesKeys,
"fNbytesName" : self.fNbytesName,
"fSeekDir" : self.fSeekDir,
"fSeekFree" : self.fSeekFree,
"fSeekInfo" : self.fSeekInfo,
"fSeekKeys" : self.fSeekKeys,
"fSeekParent" : self.fSeekParent,
"fStreamerInfos" : self.fStreamerInfos,
"fStreamers" : self.fStreamers,
"fTagOffset" : self.fTagOffset,
"fTitle" : self.fTitle,
"fURL" : self.fURL,
"fUnits" : self.fUnits,
"fUseStampPar" : self.fUseStampPar,
"fVersion" : self.fVersion,
# "dict" : self.__dict__.keys()
}
return obj
def list_keys(self) :
for k in self.fKeys :
fqn = k['fName']
print( "[%s]: " %( k['fClassName'] ) + fqn )
if "TDirectory" == k['fClassName'] :
tdir = self.ReadObject( fqn )
tdir.list_keys( prefix=fqn )
def ReadBuffer( self, place ) :
# self.logger.debug( "ReadBuffer( %s )", place )
self.fLocalFile.seek( place[0] )
return self.fLocalFile.read( place[1] )
def GetDir(self, dirname, cycle ):
if None == cycle and type(dirname) is str :
pos = s.rfind( ';' )
if pos > 0 :
cycle = dirname[ pos+1: ]
dirname = dirname[ 0:pos ]
for j in range( 0, len(self.fDirectories) ) :
tdir = self.fDirectories[j]
if tdir.dir_name != dirname :
continue
return tdir
return None
def GetKey(self, keyname, cycle ) :
for i in range( 0, len(self.fKeys) ) :
if 'fName' in self.fKeys[i] and self.fKeys[i]['fName'] == keyname and 'fCycle' in self.fKeys[i] and self.fKeys[i]['fCycle'] == cycle :
return self.fKeys[i]
# look for directories
pos = keyname.rfind( '/' )
while pos > 0 :
dirname = keyname[0:pos]
subname = keyname[pos+1:]
tdir = self.GetDir( dirname, 1 )
if None != tdir :
return tdir.GetKey( subname, cycle )
dirkey = self.GetKey( dirname, 1 )
if None != dirkey and "fClassName" in dirkey and "TDirectory" in dirkey['fClassName'] :
tdir = self.ReadObject( dirname )
if None != tdir :
return tdir.GetKey( subname, cycle )
pos = keyname.rfind( '/', 0, pos-1 )
return None
#TODO : add second part of impl
def ReadObjBuffer(self, key ) :
# self.logger.debug( "ReadObjBuffer( %s )", key )
blob1 = self.ReadBuffer( [key['fSeekKey'] + key['fKeylen'], key['fNbytes'] - key['fKeylen']] )
if None == blob1 :
return None
buf = None
if key['fObjlen'] <= (key['fNbytes'] - key['fKeylen']) :
buf = TBuffer( blob1, 0, self, None )
else :
# self.logger.debug( "UNZIPPING obj buffer" )
objbuf = UnZip.R__unzip(blob1, key['fObjlen'])
if None == objbuf :
return None
buf = TBuffer( objbuf, 0, self, None )
buf.fTagOffset = key['fKeylen']
return buf
def AddReadTree(self, obj ) :
# self.logger.debug( "AddReadTree( %s )", obj )
pass
def Get( self, obj_name, cycle=1 ) :
obj = self.ReadObject( obj_name, cycle )
if None == obj :
return None
try :
from rootio.Histogram import Histogram
if "TH1" in obj['_typename'] or "TH2" in obj['_typename'] or "TH3" in obj['_typename'] :
return Histogram( obj )
except KeyError as ke :
self.logger.error( ke )
return None
def ReadObject(self, obj_name, cycle = 1) :
# self.logger.debug( "ReadObject( obj_name=%s, cycle=%d )", obj_name, cycle )
# if type( cycle ) === function :
if callable( cycle ) :
cycle = 1
pos = obj_name.rfind( ';' )
if pos > 0 :
cycle = int( obj_name[pos+1 : pos+2 ] )
obj_name = obj_name[ 0 : pos ]
if cycle < 0 :
cycle = 1
while ( len(obj_name) > 0 and obj_name[0] == "/" ) :
obj_name = obj_name[ 1: ]
key = self.GetKey( obj_name, cycle )
if None == key :
return None
if "StreamerInfo" == obj_name and "TList" == key['fClassName'] :
return self.fStreamerInfos
isdir = False
if "TDirectory" == key['fClassName'] or "TDirectoryFile" == key['fClassName'] :
isdir = True
tdir = self.GetDir( obj_name, cycle )
if None != tdir :
return tdir
buf = self.ReadObjBuffer( key )
if None == buf :
return None
if isdir :
tdir = TDirectory( self, obj_name, cycle )
tdir.fTitle = key['fTitle']
tdir.ReadKeys( buf )
return tdir
obj = {}
buf.MapObject( 1, obj )
buf.ClassStreamer( obj, key['fClassName'] )
if "TF1" == key['fClassName'] :
return self.ReadFormulas( obj, -1 )
#TODO : add Tree support
return obj
def ReadFormulas(self, tf1, cnt ) :
# self.logger.debug( "ReadFormulas( ... )" )
pass
# TODO :add
def ExtractStreamerInfos( self, buf ) :
# self.logger.debug( "ExtractStreamerInfos( buf=%s )", buf )
if None == buf :
return
lst = {}
buf.MapObject( 1, lst )
buf.ClassStreamer( lst, 'TList' )
lst['_typename'] = "TStreamerInfoList"
self.fStreamerInfos = lst
# self.logger.debug( "fStreamerInfos = \n %s", json.dumps(lst, indent=4) )
# TODO : add to ROOT
# ROOT.addStreamerInfos( lst )
for k in range( 0, len(lst['arr']) ) :
# self.logger.info( "LOOP %d", k )
# self.logger.info( json.dumps( self, indent=4, sort_keys=True ) )
si = lst['arr'][k]
if 'fElements' not in si or None == si['fElements'] :
continue
for l in range( 0, len(si['fElements']['arr']) ) :
elem = si['fElements']['arr'][l]
if 'fTypeName' not in elem or None == elem['fTypeName'] or 'fType' not in elem or None == elem['fType'] :
continue
typ = elem['fType']
typename = elem['fTypeName']
if typ >= 60 :
if IOData.kStreamer == typ and "TStreamerSTL" == elem['_typename'] and None != elem['fSTLtype'] and None != elem['fCtype'] and elem['fCtype'] < 20 :
prefix = IOData.StlNames[ elem['fSTLtype'] ] if None != IOData.StlNames and None != IOData.StlNames[ elem['fSTLtype'] ] else "undef" + "<"
if 0 == typename.find( prefix ) and ">" == typename[ -1 ] :
typ = elem['fCtype']
#TODO trim string
typename = typename[ len(prefix) : len(typename) - len(prefix) - 1 ].strip()
if IOData.kSTLmap == elem['fSTLtype'] or IOData.kSTLmultimap == elem['fSTLtype'] :
if typename.find(',')>0 :
typename = typename[ 0: typename.find( ',' ) ].strip()
else :
continue
if typ > 60 :
continue
else :
if typ > 20 and "*" == typename[ -1 ] :
typename = typename[ 0 : -1 ]
typ = typ % 20
kind = ROOT.ROOT.GetTypeId( typename )
if kind == typ :
continue
if IOData.kBits == typ and IOData.kUInt == kind :
continue
if IOData.kCounter and IOData.kInt == kind :
continue
if None != typename and None != typ :
# self.logger.debug( "Extract basic data type %s %s", typ, typename )
self.fBasicTypes[ typename ] = typ
# self.logger.info( "after extracting streamer info:" )
# self.logger.info( json.dumps( self, indent=4, sort_keys=True ) )
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value) :
object.__setattr__( self, key, value )
def ReadKeys( self ) :
blob = self.ReadBuffer( [0, 1024] )
if None == blob :
return None
buf = TBuffer( blob, 0, self, None )
ftype = buf.substring( 0, 4 )
# self.logger.debug( "fType=%s", ftype )
if ftype != 'root' :
# self.logger.debug("NOT A ROOT FILE")
return
buf.shift( 4 )
self.fVersion = buf.ntou4()
self.fBEGIN = buf.ntou4()
if self.fVersion < 1000000 : # small size
self.fEND = buf.ntou4()
self.fSeekFree = buf.ntou4()
self.fNbytesFree = buf.ntou4()
nfree = buf.ntoi4()
self.fNbytesName = buf.ntou4()
self.fUnits = buf.ntou1()
self.fCompress = buf.ntou4()
self.fSeekInfo = buf.ntou4()
self.fNbytesInfo = buf.ntou4()
else :
self.fEND = buf.ntou8()
self.fSeekFree = buf.ntou8()
self.fNbytesFree = buf.ntou8()
nfree = buf.ntou4()
self.fNbytesName = buf.ntou4()
self.fUnits = buf.ntou1()
self.fCompress = buf.ntou4()
self.fSeekInfo = buf.ntou8()
self.fNbytesInfo = buf.ntou4()
# self.logger.debug("File Header:")
# self.logger.debug( "self.fVersion = %d", self.fVersion)
# self.logger.debug( "self.fBEGIN = %d", self.fBEGIN)
# self.logger.debug( "self.fEND = %d", self.fEND )
# self.logger.debug( "self.fSeekFree = %d", self.fSeekFree )
# self.logger.debug( "self.fNbytesFree = %d", self.fNbytesFree )
# self.logger.debug( "self.fNbytesName = %d", self.fNbytesName )
# self.logger.debug( "self.fUnits = %d", self.fUnits )
# self.logger.debug( "self.fCompress = %d", self.fCompress )
# self.logger.debug( "self.fSeekInfo = %d", self.fSeekInfo )
# self.logger.debug( "self.fNbytesInfo = %d", self.fNbytesInfo )
# self.logger.debug( "" )
if None == self.fSeekInfo or None == self.fNbytesInfo :
return None
if 0 == self.fNbytesName or self.fNbytesName > 100000 :
# self.logger.debug( "Init : cannot read directory info for file :", self.fURL )
return None
nbytes = self.fNbytesName + 22;
nbytes += 4; # fDatimeC.Sizeof();
nbytes += 4; # fDatimeM.Sizeof();
nbytes += 18; # fUUID.Sizeof();
if self.fVersion >= 40000 :
nbytes += 12;
blob3 = self.ReadBuffer( [self.fBEGIN, max( 300, nbytes )] )
buf3 = TBuffer( blob3, 0, self, None )
self.fTitle = buf3.ReadTKey()['fTitle']
# self.logger.debug( "self.fTitle = %s", self.fTitle )
buf3.locate( self.fNbytesName )
buf3.ClassStreamer( self, 'TDirectory' )
# self.logger.info( "file now:" )
# self.logger.info( json.dumps(self, indent=4, sort_keys=True) )
if False == hasattr( self, 'fSeekKeys' ) or 0 == self.fSeekKeys :
# self.logger.debug( "Empty key list in", self.fURL )
return None
blob4 = self.ReadBuffer( [self.fSeekKeys, self.fNbytesKeys] )
buf4 = TBuffer( blob4, 0, self, None )
buf4.ReadTKey()
nkeys = buf4.ntoi4()
for i in range( 0, nkeys ) :
k = buf4.ReadTKey()
# self.logger.debug( "Adding Key : %s %s, %s ", k['fClassName'], k['fName'], k['fTitle'] )
self.fKeys.append( k )
blob5 = self.ReadBuffer( [self.fSeekInfo, self.fNbytesInfo] )
buf5 = TBuffer( blob5, 0, self, None )
si_key = buf5.ReadTKey()
if None == si_key :
# self.logger.debug( "No info?" )
return None
self.fKeys.append( si_key )
# self.logger.debug( "StreamerInfo:", si_key )
buf6 = self.ReadObjBuffer( si_key )
if None != buf6 :
self.ExtractStreamerInfos( buf6 )
def GetStreamer(self, classname, ver, s_i = None ):
self.logger.debug( "GetStreamer(classname=%s, ver=%s, s_i=%s )", classname, ver, s_i )
if 'TQObject' == classname or 'TBasket' == classname :
return None
fullname = classname
streamer = None
if "TH1" == classname :
self.logger.debug("TH1")
if None != ver and ( 'checksum' in ver or 'val' in ver ) :
fullname += "$chksum" + str(ver['checksum']) if 'checksum' in ver else "$ver" + str(ver['val'])
self.logger.debug( "Looking for streamer : %s",fullname )
streamer = self.fStreamers[ fullname ] if fullname in self.fStreamers else None
if None != streamer :
self.logger.debug( "Found Streamer, just trust me" )
return streamer
self.logger.debug( "Looking for custom streamer named %s", classname)
CustomStreamers = Streamers.CustomStreamers
custom = CustomStreamers[ classname ] if classname in CustomStreamers else None
if None != custom :
self.logger.debug("Found custom streamer for %s", classname )
if type( custom ) == str :
return self.GetStreamer( custom, ver, s_i )
if True == callable( custom ) :
streamer = [ { 'typename' : classname, 'func': custom } ]
return ROOT.ROOT.AddClassMethods( classname, streamer )
streamer = []
if box.BoxList == type( custom ) :
if 'name' not in custom and 'func' not in custom :
return custom
streamer.append( custom )
# check element in streamer infos, one can have special cases
if None == s_i :
s_i = self.FindStreamerInfo(classname, ver['val'], ver['checksum'] if 'checksum' in ver else None);
if None == s_i :
# delete this.fStreamers[fullname];
if fullname in self.fStreamers :
self.logger.debug( "s_i is None but % in Streamers", fullname )
if 'nowarning' not in ver or ver['nowarning'] == None :
self.logger.debug("Not found streamer for %s, ver=%s, checksum=%s, fullname=%s", classname, ver['val'], ver['checksum'] if 'checksum' in ver else None, fullname)
return None
# for each entry in streamer info produce member function
try :
self.logger.debug( "s_i = %s", s_i )
for obj in s_i['fElements']['arr'] :
# obj = s_i['fElements']['arr'][s]
streamer.append( ROOT.ROOT.CreateMember( obj, self ) )
self.logger.debug( "Appending streamer for obj=%s", obj )
except KeyError :
self.logger.debug( "No fElements.arr" )
self.logger.debug( "fStreamers[%s] = %s", fullname, streamer )
self.logger.debug( "fStreamers[%s] = SET", fullname )
self.fStreamers[fullname] = streamer;
return ROOT.ROOT.AddClassMethods(classname, streamer);
def FindStreamerInfo( self, clname, clversion, clchecksum = None ) :
if None == self.fStreamerInfos :
return None
for si in self.fStreamerInfos['arr'] :
if clchecksum != None and si['fCheckSum'] == clchecksum :
return si
if si['fName'] != clname :
continue
# this means that if it as not found by checksum it should have been None
# if checksum was given it should match
if clchecksum != None :
continue
if clversion != None and si['fClassVersion'] != clversion :
continue
return si
return None |
jdbrice/root-io | rootio/ROOTJS.py | <filename>rootio/ROOTJS.py<gh_stars>1-10
from IPython.display import display
import logging
import json
class ROOTJS(object) :
LOAD_MIMETYPE = "application/vnd.rootjs_load.v0+json"
EXEC_MIMETYPE = "application/vnd.rootjs_exec.v0+json"
JS_MIMETYPE = "application/javascript"
HTML_MIMETYPE = "text/html"
canvas_index = 0
active_div_id = ""
# def __init__( self ) :
# self.logger = logging.getLogger( "rootio.ROOTJS" )
@staticmethod
def load( ) :
_jsCode = """
requirejs.config({
paths: {
'JSRootCore' : 'https://root.cern.ch/js/notebook//scripts/JSRootCore',
}
});
"""
obj = {
ROOTJS.LOAD_MIMETYPE : _jsCode,
ROOTJS.JS_MIMETYPE : _jsCode
}
display( obj, raw=True )
@staticmethod
def canvas( **kwargs ) :
jsDivId = "root_pad_" + str( ROOTJS.canvas_index )
ROOTJS.canvas_index = ROOTJS.canvas_index+1
# if they passed in a div id then use that instead
ROOTJS.active_div_id = kwargs.get( 'div', jsDivId )
obj = {
ROOTJS.EXEC_MIMETYPE : kwargs.get('data', None),
ROOTJS.JS_MIMETYPE : "",
ROOTJS.HTML_MIMETYPE : ""
}
metadata = {
ROOTJS.EXEC_MIMETYPE : {
"id" : ROOTJS.active_div_id,
"width" : kwargs.get( 'width', kwargs.get( 'w', 500 ) ),
"height" : kwargs.get( 'height', kwargs.get( 'h', 500 ) )
}
}
display( obj, metadata=metadata, raw=True )
@staticmethod
def draw( **kwargs ) :
# create a default canvas if needed
if "" == ROOTJS.active_div_id and None == kwargs.get( 'div', None ):
ROOTJS.canvas()
jsDivId = ROOTJS.active_div_id
# if they passed in a div id then use that instead
div_id = kwargs.get( 'div', jsDivId )
jsonContent = kwargs.get( 'data', None )
if isinstance( jsonContent, dict ) :
jsonContent = json.dumps( jsonContent )
jsDrawOptions = kwargs.get( 'opts', "" )
_jsCode =f"""
require(['JSRootCore'],
function(Core) {{
var obj = Core.JSONR_unref({jsonContent});
Core.draw("{div_id}", obj, "{jsDrawOptions}");
}}
);
"""
obj = {
ROOTJS.EXEC_MIMETYPE : kwargs.get('data', None),
ROOTJS.JS_MIMETYPE : _jsCode
}
metadata = {
ROOTJS.EXEC_MIMETYPE : {
"id" : div_id,
"width" : kwargs.get( 'width', 500 ),
"height" : kwargs.get( 'height', 500 )
}
}
display( obj, metadata=metadata, raw=True ) |
jdbrice/root-io | rootio/ttree/TDrawSelector.py | <reponame>jdbrice/root-io
from rootio.ttree.TSelector import TSelector
from rootio.ttree.TDrawVariable import TDrawVariable
import logging
class TDrawSelector(TSelector):
def __init__(self):
self.logger = logging.getLogger("TDrawSelector")
self.ndim = 0
self.vars = [] # array of expression variables
self.cut = null # cut variable
self.hist = null
self.histo_callback = callback
self.histo_drawopt = ""
self.hist_name = "$htemp"
self.hist_title = "Result of TTree::Draw"
self.hist_args = [] # arguments for histogram creation
self.arr_limit = 1000 # number of accumulated items before create histogram
self.htype = "F"
self.monitoring = 0
self.globals = {} # object with global parameters, which could be used in any draw expression
self.last_progress = 0
self.aver_diff = 0
def draw_only_branch(self, tree, branch, expr, args):
self.ndim=1;
if 0==expr.find("dump"):
expr = ":" + expr
expr = self.parse_parameters( tree, args, expr)
try :
self.monitoring = args.monitoring
except:
pass
if "dump" in args:
self.dump_values = True;
args.reallocate_objects = True;
if True == self.dump_values:
self.hist = []
self.leaf = args['leaf']
self.copy_fields = False
try:
if None != args.branch.fLeaves and len(args.branch.fLeaves.arr) > 1 and 'leaf' not in args:
self.copy_fields = True
if None != args.branch.fBranches and len(args.branch.fBranches.arr) > 0 and 'leaf' not in args:
self.copy_fields = True
except:
pass
self.add_branch( branch, "br0", args.direct_branch )
self.process_action = "dump"
return True
print("DONT GET HERE")
self.vars.append( TDrawVariable(self.globals) )
if False == self.vars[0].parse( tree, self, expr, branch, args.direct_branch ) :
return False
self.hist_title = "drawing branch '" + branch.fName + (expr ? "' expr:'" + expr : "") + "' from " + tree['fName'];
self.cut = TDrawVariable( self.globals )
return True
def parse_parameters( self, tree, args, expr ):
if None == expr or str != type(expr):
return ""
pos = expr.rfind(";")
while pos >= 0:
parname = expr[pos+1:]
expr = expr[0:pos]
pos = expr.rfind( ";" )
separ = parname.find(":")
if separ>0:
parvalue = parname[separ+1:]
parname = parname[0:separ]
if "dump" == parname :
args.dump = True
return expr
|
jdbrice/root-io | rootio/IOData.py | <filename>rootio/IOData.py
from box import Box
def BIT( n ) :
return (1 << n)
io_data = {
"kBase": 0, "kOffsetL": 20, "kOffsetP": 40,
"kChar": 1, "kShort": 2, "kInt": 3, "kLong": 4, "kFloat": 5, "kCounter": 6, "kCharStar": 7, "kDouble": 8, "kDouble32": 9, "kLegacyChar ": 10,
"kUChar": 11, "kUShort": 12, "kUInt": 13, "kULong": 14, "kBits": 15, "kLong64": 16, "kULong64": 17, "kBool": 18, "kFloat16": 19,
"kObject": 61, "kAny": 62, "kObjectp": 63, "kObjectP": 64, "kTString": 65,
"kTObject": 66, "kTNamed": 67, "kAnyp": 68, "kAnyP": 69, "kAnyPnoVT": 70, "kSTLp": 71,
"kSkip": 100, "kSkipL": 120, "kSkipP": 140, "kConv": 200, "kConvL": 220, "kConvP": 240,
"kSTL": 300, "kSTLstring": 365, "kStreamer": 500, "kStreamLoop": 501,
"kMapOffset": 2,
"kByteCountMask": 0x40000000,
"kNewClassTag": 0xFFFFFFFF,
"kClassMask": 0x80000000,
"Mode": "array", # could be string or array, enable usage of ArrayBuffer in http requests
"NativeArray": True,
"TypeNames" : ["BASE", "char", "short", "int", "long", "float", "int", "const char*", "double", "Double32_t", "char", "unsigned char", "unsigned short", "unsigned", "unsigned long", "unsigned", "Long64_t", "ULong64_t", "bool", "Float16_t"],
"kNotSTL": 0, "kSTLvector": 1, "kSTLlist": 2, "kSTLdeque": 3, "kSTLmap": 4, "kSTLmultimap": 5,
"kSTLset": 6, "kSTLmultiset": 7, "kSTLbitset": 8, "kSTLforwardlist": 9,
"kSTLunorderedset" : 10, "kSTLunorderedmultiset" : 11, "kSTLunorderedmap" : 12,
"kSTLunorderedmultimap" : 13, "kSTLend" : 14,
# names of STL containers
"StlNames" : [ "", "vector", "list", "deque", "map", "multimap", "set", "multiset", "bitset"],
"kStreamedMemberWise": BIT(14),
"kSplitCollectionOfPointers": 100,
"kIsReferenced": BIT(4),
"kHasUUID": BIT(5),
# "GetArrayKind" : GetArrayKind.__func__,
# "GetTypeId" : GetTypeId.__func__,
# "CreateMember" : CreateMember.__func__,
}
IOData = Box( io_data ) |
jdbrice/root-io | rootio/UnZip.py | <gh_stars>1-10
import struct
import zlib
import gzip
import logging
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
def getChar( arr, curr ) :
return struct.unpack( 'c', arr[curr:curr+1] )[0]
def getCode( arr, curr ) :
return struct.unpack( 'b', arr[curr:curr+1] )[0]
def R__unzip( arr, tgtsize, src_shift = 0 ) :
logging.getLogger("R__unzip").debug( "R__unzip( len(arr)=%d, tgtsize=%d, src_shift=%d )", len(arr), tgtsize, src_shift )
totallen = len(arr)
curr = src_shift
fullres = 0
tgtbuf = None
while fullres < tgtsize :
logging.getLogger("R__unzip").debug( "curr=%d", curr )
fmt = "unknown"
off = 0
headersize = 9
if curr + headersize >= totallen :
logging.getLogger("R__unzip").debug( "Error in R__unxip : header size exceeds buffer size" )
return None
logging.getLogger("R__unzip").debug( "%s%s" %(getChar(arr, curr), getChar(arr, curr+1)) )
if getChar(arr, curr) == b'Z' and getChar(arr, curr+1) == b'L' and getCode(arr, curr+2) == 8 :
fmt = "new"
off = 2
elif getChar(arr, curr) == b'C' and getChar(arr, curr+1) == b'S' and getCode(arr, curr+2) == 8 :
fmt = "old"
off = 0
elif getChar(arr, curr) == b'X' and getChar(arr, curr+1) == b'Z' :
fmt = "LZMA";
if "new" != fmt and "old" != fmt :
logging.getLogger("R__unzip").debug( "ZLIB format not supported" )
return None
srcsize = headersize + ((getCode(arr, curr+3) & 0xff) | ((getCode(arr, curr+4) & 0xff) << 8) | ((getCode(arr, curr+5) & 0xff) << 16));
uint8arr = arr[ curr + headersize + off : ]
# The -15 is a hack to get it to ignore the header and stream info since this is a raw chunk
tgtbuf = zlib.decompress( uint8arr, -zlib.MAX_WBITS )
reslen = len( tgtbuf )
fullres += reslen
curr += srcsize
if fullres != tgtsize :
logging.getLogger("R__unzip").debug( "R__unzip: failed to unzip data. Expects %d, got %d", tgtsize, fullres )
return None
return tgtbuf |
jdbrice/root-io | tests/__init__.py | from __future__ import absolute_import
import unittest
import doctest
import sys
import os
def main():
runner = unittest.TextTestRunner(verbosity=1 + sys.argv.count('-v'))
# suite = all_tests_suite()
# raise SystemExit(not runner.run(suite).wasSuccessful())
if __name__ == '__main__':
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
main() |
jdbrice/root-io | rootio/__init__.py | <reponame>jdbrice/root-io
version = "0.0.1"
from .TBuffer import TBuffer
from .TFile import TFile
from .TDirectory import TDirectory
from .Histogram import Histogram
from .ROOTJS import ROOTJS
|
jdbrice/root-io | tests/read_file_object.py | # -*- coding: utf-8 -*-
# @Author: jdb
# @Date: 2017-06-14 17:36:12
# @Last Modified by: Daniel
# @Last Modified time: 2017-09-21 11:11:23
import rootio.TBuffer as TBuffer
from rootio.TFile import TFile
import json
import rootio.make_json_serializable
import logging
import sys
logging.basicConfig(filename='example.log',level=logging.INFO, filemode="w")
if ( len(sys.argv) < 2 ) :
exit()
tfile = TFile( sys.argv[1] )
# tfile.ReadKeys()
# logging.info( "TFile after reading Keys" )
# logging.info( json.dumps( tfile, indent=4, sort_keys=True ) )
tfile.list_keys()
# logging.info( json.dumps(tfile.fkeys, indent=4) )
if len(sys.argv) >= 3 :
print("READING OBJECT", sys.argv[2] )
obj = tfile.ReadObject( sys.argv[2] )
print( "hello danny" )
print( "%s" % (json.dumps(obj, indent=True) ))
# hist = tfile.Get( sys.argv[2] )
# print "hist ndim =", hist.n_dim
|
xebialabs-community/xlr-logreport-plugin | src/main/resources/restapi/logger/getLogConfig.py | # Copyright 2020 XEBIALABS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import org.slf4j.LoggerFactory as LoggerFactory
import ch.qos.logback.classic.Level as logLevels
import json
def getLogLevel( loggerName="console" ):
loggerMap = []
myLogger = LoggerFactory.getLogger("logmanager")
logger = LoggerFactory.getLogger(loggerName)
loggerContext = LoggerFactory.getILoggerFactory()
loggerList = loggerContext.getLoggerList()
myLogger.info("===================")
for loggerItem in loggerList:
if loggerItem.getLevel() is not None:
myLogger.info("%s = %s" % (loggerItem.getName(), loggerItem.getLevel()))
loggerMap.append({"logger": loggerItem.getName(), "level": loggerItem.getLevel().toString()})
else:
myLogger.info("%s = %s" % (loggerItem.getName(), ""))
loggerMap.append({"logger": loggerItem.getName(), "level": ""})
myLogger.info("===================")
return loggerMap
def setLogLevel( loggerName="console", logLevel = "DEBUG"):
loggerMap = {}
logLevel = logLevel.upper()
loggerContext = LoggerFactory.getILoggerFactory()
loggerList = loggerContext.getLoggerList()
for loggerItem in loggerList:
if( loggerItem.getName() == loggerName ):
myLogger.info("Setting %s to %s" % (loggerName, logLevel))
loggerItem.setLevel( logLevels.toLevel( logLevel ) )
myLogger.info("%s = %s" % (loggerName, logLevel))
#myLogger.error("%s != %s" % (loggerItem.getName(), loggerName))
return
myLogger = LoggerFactory.getLogger("logmanager")
verb = "GET"
if (request):
if (request.query):
if (request.query['verb']):
verb = request.query['verb']
if( verb == "SET"):
loggerName = request.query['logger']
logLevel = request.query['level']
myLogger.info("Setting %s to %s" % (loggerName, logLevel))
setLogLevel(loggerName, logLevel)
loggerMap = getLogLevel()
#loggerMap = {}
myLogger.debug("%s" % json.dumps(loggerMap, indent=4, sort_keys=True))
response.entity = {"status": "OK", "data":loggerMap }
|
xebialabs-community/xlr-logreport-plugin | src/main/resources/restapi/logger/generatelogger2.py | # Copyright 2020 XEBIALABS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import org.slf4j.LoggerFactory;
import ch.qos.logback.classic.Logger as Logger
import ch.qos.logback.classic.LoggerContext as LoggerContext
import ch.qos.logback.classic.encoder.PatternLayoutEncoder as PatternLayoutEncoder
import ch.qos.logback.classic.sift.MDCBasedDiscriminator as MDCBasedDiscriminator
import ch.qos.logback.classic.sift.SiftingAppender as SiftingAppender
import ch.qos.logback.classic.spi.ILoggingEvent as ILoggingEvent
import ch.qos.logback.core.Appender as Appender
import ch.qos.logback.core.Context as Context
import ch.qos.logback.core.joran.spi.JoranException as JoranException
import ch.qos.logback.core.rolling.RollingFileAppender as RollingFileAppender
import ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP as SizeAndTimeBasedFNATP
import ch.qos.logback.core.rolling.TimeBasedRollingPolicy as TimeBasedRollingPolicy
import ch.qos.logback.core.sift.AppenderFactory as AppenderFactory
def buildAppender( context, discriminatingValue):
ple = PatternLayoutEncoder()
ple.setPattern("%date [%thread] [%file:%line] %msg%n")
ple.setContext(context)
ple.start()
logFileAppender = new RollingFileAppender();
logFileAppender.setContext(context);
logFileAppender.setName("File-"+discriminatingValue);
logFileAppender.setEncoder(ple);
logFileAppender.setFile(filename+"-"+discriminatingValue+".txt");
SizeAndTimeBasedRollingPolicy<ILoggingEvent> logFilePolicy = new SizeAndTimeBasedRollingPolicy();
logFilePolicy.setContext(context);
logFilePolicy.setParent(logFileAppender);
logFilePolicy.setFileNamePattern(filename+"-"+discriminatingValue+".log");
logFilePolicy.setMaxHistory(5);
logFilePolicy.setMaxFileSize(FileSize.valueOf("512kb"));
logFilePolicy.setTotalSizeCap(FileSize.valueOf("1gb"));
logFilePolicy.start();
logFileAppender.setRollingPolicy(logFilePolicy);
logFileAppender.start();
logFileAppender.start();
return logFileAppender;
def putNewAppender(file, log):
lc = LoggerFactory.getILoggerFactory()
logger = lc.getLogger(log.getName())
SiftingAppender sa = SiftingAppender()
sa.setName("SIFT")
sa.setContext(lc)
discriminator = MDCBasedDiscriminator()
discriminator.setKey("logFileName")
discriminator.setDefaultValue("head0")
discriminator.start()
sa.setDiscriminator(discriminator)
sa.setAppenderFactory(buildAppender(lc, discriminator))
sa.start();
logger.addAppender(sa);
logger.setAdditive(false);
return logger;
def tail(f, n, offset=None):
"""Reads a n lines from f with an offset of offset lines. The return
value is a tuple in the form ``(lines, has_more)`` where `has_more` is
an indicator that is `True` if there are more lines in the file.
"""
avg_line_length = 74
to_read = n + (offset or 0)
while 1:
try:
fo = open(f, "r+")
fo.seek(-(avg_line_length * to_read), 2)
except IOError:
# woops. apparently file is smaller than what we want
# to step back, go to the beginning instead
fo.seek(0)
pos = fo.tell()
lines = fo.read().splitlines()
if len(lines) >= to_read or pos == 0:
fo.close()
return lines[-to_read:offset and -offset or None], \
len(lines) > to_read or pos > 0
avg_line_length *= 1.3
if __name__ == "__main__":
filename = "container.log"
tail(filename,10)
response.entity = {"status": "OK"}
|
xebialabs-community/xlr-logreport-plugin | src/main/resources/restapi/logger/getLogAppenders.py | <filename>src/main/resources/restapi/logger/getLogAppenders.py
# Copyright 2020 XEBIALABS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# https://stackoverflow.com/questions/16910955/programmatically-configure-logback-appender?noredirect=1
#
import ch.qos.logback.core.Appender as LogAppender
import ch.qos.logback.core.util.COWArrayList as COWArrayList
import ch.qos.logback.classic.encoder.PatternLayoutEncoder as PatternLayoutEncoder
import ch.qos.logback.core.FileAppender as FileAppender
import org.slf4j.LoggerFactory as LoggerFactory
import ch.qos.logback.classic.Level as logLevels
import json
def getLogAppenders( loggerName="console" ):
loggerMap = []
myLogger = LoggerFactory.getLogger("logmanager")
loggerContext = LoggerFactory.getILoggerFactory()
myLogger.error("===================")
appenderMap = {}
for logger in loggerContext.getLoggerList():
appenderList = logger.iteratorForAppenders()
while appenderList.hasNext():
appender = appenderList.next()
logger.error("Logger %s" % appender.getName())
if appender.getName() not in appenderMap.keys():
loggerMap.append({"name": appender.getName(), "appender": "NA"})
myLogger.error("Appender %s: %s" % (appender.getName(), "NA"))
myLogger.error("===================")
return loggerMap
def createLogAppender( name, file ):
lc = LoggerFactory.getILoggerFactory()
ple = PatternLayoutEncoder()
ple.setPattern("%date %level [%thread] %logger{10} [%file:%line] %msg%n")
ple.setContext(lc)
ple.start()
fileAppender = FileAppender()
fileAppender.setFile(file)
fileAppender.setEncoder(ple)
fileAppender.setContext(lc)
fileAppender.start()
logger = LoggerFactory.getLogger(string)
logger.addAppender(fileAppender)
#logger.setLevel(logLevels.DEBUG)
# set to true if root should log too
logger.setAdditive(True)
return logger
myLogger = LoggerFactory.getLogger("logmanager")
verb = "GET"
if (request):
if (request.query):
if (request.query['verb']):
verb = request.query['verb']
if( verb == "create"):
string = request.query['string']
file = request.query['file']
myLogger.info("Setting %s to %s" % (string, file))
createLogAppender(string, file)
loggerMap = getLogAppenders()
myLogger.error("%s" % json.dumps(loggerMap, indent=4, sort_keys=True))
response.entity = {"status": "OK", "data":loggerMap }
|
ahmedbilal/whoishiring | whoishiring/whoishiring.py | <reponame>ahmedbilal/whoishiring
import argparse
import re
import webbrowser
from urllib.parse import urljoin
import bs4
import requests
def extract_comment(element):
if isinstance(element, bs4.element.Tag):
# if the comment have img with 0 width, it means that the comment
# is actual comment not replies
if element.find("img", width="0"):
comment_span = element.find("span", class_="commtext")
if comment_span:
return comment_span.prettify()
return None
def write_html(comments, output_file_path):
html_body = ["<html><head><title>Hackernews Filtered Job Posts</title></head><body>"]
html_body += map(lambda comment: comment + "<hr>", comments)
html_body += ["</body></html>"]
with open(output_file_path, "w") as f:
f.writelines(html_body)
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
"--url", required=True,
help="URL of whoishiring post. For example, URL of Who is Hiring? (September 2020) is "
"https://news.ycombinator.com/item?id=24342498",
)
argparser.add_argument("--output", default="hackernews.html", help="Output file path. Default is hackernews.html")
argparser.add_argument("--keyword", nargs="+", required=True, help="Keyword examples: remote python")
args = argparser.parse_args()
url, output_file_path, keywords = args.url, args.output, args.keyword
filters = [re.compile(keyword, re.IGNORECASE) for keyword in keywords]
filtered_comments = []
while url:
request = requests.get(url)
soup = bs4.BeautifulSoup(request.content, "html.parser")
comments_in_html = list(soup.find(name="table", class_="comment-tree").children)
comments = filter(
lambda x: x is not None,
[extract_comment(comment) for comment in comments_in_html]
)
filtered_comments += [
comment for comment in comments
if all([_filter.search(comment) for _filter in filters])
]
more_link = soup.find(name="a", class_="morelink")
url = urljoin("https://news.ycombinator.com/", more_link.get("href")) if more_link else None
write_html(filtered_comments, output_file_path)
webbrowser.open_new_tab(output_file_path)
|
itsMagondu/IoTNeuralNetworks | noisefilter/apps/filter/models.py | from __future__ import unicode_literals
from django.db import models
class Data(models.Model):
reading = models.FloatField(null=True, blank=True)
prevreading = models.FloatField(null=True, blank=True)
output = models.FloatField(null=True, blank=True)
error = models.FloatField(null=True, blank=True)
truevalue = models.FloatField(null=True, blank=True)
kalmanvalue = models.FloatField(null=True, blank=True)
added = models.DateTimeField(auto_now_add=True)
active = models.BooleanField(default=True)
class KalmanConfiguration(models.Model):
base_value = models.FloatField(null=True, blank=True)
iterations = models.FloatField(null=True, blank=True)
initial_guess = models.FloatField(null=True, blank=True)
posteri_estimate = models.FloatField(null=True, blank=True)
added = models.DateTimeField(auto_now_add=True)
active = models.BooleanField(default=True)
class ANNConfiguration(models.Model):
layers = models.IntegerField(null=True, blank=True)
activation = models.CharField(max_length=20, default='', null=True, blank=True)
#Fitting info
learning_rate = models.IntegerField(null=True, blank=True)
epochs = models.IntegerField(null=True, blank=True)
added = models.DateTimeField(auto_now_add=True)
active = models.BooleanField(default=True)
class TrainingExample(models.Model):
dataoutput = models.FloatField(null=True, blank=True)
datainput = models.FloatField(null=True, blank=True)
class AnnResult(models.Model):
prediction = models.FloatField(null=True, blank=True)
epochs = models.IntegerField(null=True, blank=True)
seconds = models.IntegerField(null=True, blank=True)
hidden_layer_size = models.IntegerField(null=True, blank=True)
truevalue = models.FloatField(null=True, blank=True)
function = models.CharField(max_length=20, default='',null=True, blank=True)
added = models.DateTimeField(auto_now_add=True)
class KalmanResult(models.Model):
prediction = models.FloatField(null=True, blank=True)
iterations = models.IntegerField(null=True, blank=True)
seconds = models.IntegerField(null=True, blank=True)
initial_guess = models.IntegerField(null=True, blank=True)
truevalue = models.FloatField(null=True, blank=True)
added = models.DateTimeField(auto_now_add=True)
|
itsMagondu/IoTNeuralNetworks | noisefilter/noisefilter/__init__.py | <reponame>itsMagondu/IoTNeuralNetworks
""" noisefilter """
|
itsMagondu/IoTNeuralNetworks | noisefilter/apps/filter/kalmanfilter.py | # Kalman filter in Python adopted from http://scipy-cookbook.readthedocs.io/items/KalmanFiltering.html
import numpy as np
import matplotlib.pyplot as plt
import time
class KalmanFilter:
def __init__(self, base_value=24, iterations=200, initial_guess=20.0, posteri_estimate=4.0, data=[], plot=False):
# intial parameters
self.n_iter = iterations # How many iterations to create test data
sz = (self.n_iter,) # size of array
self.x = base_value # This is the base value that shall be used to create noisy data. It is the true value
if len(data) == 0:
self.z = np.random.normal(self.x, 1, size=sz) # observations (normal about x, sigma=0.1)
else:
self.z = data
self.Q = 1e-5 # process variance
# allocate space for arrays
self.xhat = np.zeros(sz) # a posteri estimate of x
self.P = np.zeros(sz) # a posteri error estimate
self.xhatminus = np.zeros(sz) # a priori estimate of x
self.Pminus = np.zeros(sz)
# a priori error estimate
self.K = np.zeros(sz) # gain or blending factor
self.R = 2
# intial guesses
self.xhat[0] = initial_guess # Initial estimate
self.P[0] = posteri_estimate # Estimate of the error made
self.plot = plot
def filter(self):
start = time.time()
for k in range(1, self.n_iter):
# time update
self.xhatminus[k] = self.xhat[k-1]
self.Pminus[k] = self.P[k-1]+self.Q
# measurement update
self.K[k] = self.Pminus[k]/(self.Pminus[k]+self.R)
self.xhat[k] = self.xhatminus[k]+self.K[k]*(self.z[k]-self.xhatminus[k])
self.P[k] = (1-self.K[k])*self.Pminus[k]
end = time.time()
print("Took %s seconds" % (time.time() - start))
print "Noisy data: "
print self.z
print "Estimates:"
print self.xhat
print "Truth Value:"
print self.x
#print "Error estimate"
#print self.P
if self.plot:
plt = self.plot_results()
else:
plt = None
return self.z, self.xhat, self.x, plt
def plot_results(self):
plt.rcParams['figure.figsize'] = (10, 8)
plt.figure()
plt.plot(self.z, 'k+', label='noisy measurements')
plt.plot(self.xhat, 'b-', label='a posteri estimate')
plt.axhline(self.x, color='g', label='truth value')
plt.legend()
plt.title('Estimate vs. iteration step', fontweight='bold')
plt.xlabel('Iteration')
plt.ylabel('Temperature')
return plt
|
itsMagondu/IoTNeuralNetworks | noisefilter/apps/filter/urls.py | """urlconf for the base application"""
from django.conf.urls import url
from .views import *
urlpatterns = [
url(r'^ann/$', ANNView.as_view(), name='ann'),
url(r'^kalman/$', KalmanView.as_view(), name='kalman'),
url(r'^$', index, name='index'),
] |
itsMagondu/IoTNeuralNetworks | noisefilter/apps/filter/neuralnetwork.py | <filename>noisefilter/apps/filter/neuralnetwork.py<gh_stars>0
from __future__ import division
import numpy as np
def tanh(x):
return np.tanh(x)
def tanh_deriv(x):
return 1.0 - np.tanh(x)**2
def logistic(x):
return 1/(1 + np.exp(-x))
def logistic_derivative(x):
return logistic(x)*(1-logistic(x))
def _scale_to_binary(e, minV, maxV):
result = ((e-minV)/(maxV-minV))*(1-0)+0
return result
def rescale_from_binary(e, minV, maxV):
result = e*(maxV-minV) + minV
return result
class NeuralNetwork:
def __init__(self, layers, activation='tanh'):
"""
:param layers: A list containing the number of units in each layer.
Should be at least two values
:param activation: The activation function to be used. Can be
"logistic" or "tanh"
"""
np.random.seed(0)
if activation == 'logistic':
self.activation = logistic
self.activation_deriv = logistic_derivative
elif activation == 'tanh':
self.activation = tanh
self.activation_deriv = tanh_deriv
self.weights = []
for i in range(1, len(layers) - 1):
self.weights.append((2*np.random.random((layers[i - 1] + 1, layers[i]
+ 1))-1)*2.0)
self.weights.append((2*np.random.random((layers[i] + 1, layers[i +
1]))-1)*2.0)
#print self.weights
def fit(self, X, y, learning_rate=2, epochs=50000):
#Set initial temperature to be 27
X = np.atleast_2d(X)
#temp = np.full([X.shape[0], X.shape[1]+1],27)#Initial temperature is 27
temp = np.ones([X.shape[0], X.shape[1]+1])
temp[:, 0:-1] = X # adding the bias unit to the input layer
X = temp #Create a new X but with an extra bias item
y = np.array(y)
for k in range(epochs):
i = np.random.randint(X.shape[0])
a = [X[i]]
for l in range(len(self.weights)):
a.append(self.activation(np.dot(a[l], self.weights[l])))
error = y[i] - a[-1]
deltas = [error * self.activation_deriv(a[-1])]
for l in range(len(a) - 2, 0, -1): # we need to begin at the second to last layer
deltas.append(deltas[-1].dot(self.weights[l].T)*self.activation_deriv(a[l]))
deltas.reverse()
for i in range(len(self.weights)):
layer = np.atleast_2d(a[i])
delta = np.atleast_2d(deltas[i])
self.weights[i] += learning_rate * layer.T.dot(delta)
def predict(self, x):
x = np.array(x)
#temp = np.full([x.shape[0]+1],27)
temp = np.ones(x.shape[0]+1)
temp[0:-1] = x
a = temp
#print a
for l in range(0, len(self.weights)):
#print a
a = self.activation(np.dot(a, self.weights[l]))
return a
|
itsMagondu/IoTNeuralNetworks | noisefilter/apps/filter/views.py | <gh_stars>0
import numpy as np
import time
import json
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from django.http import HttpResponseRedirect, HttpResponse, JsonResponse
from django.views.generic.base import TemplateView
from django.core import serializers
from django.shortcuts import render
from .models import (Data, KalmanConfiguration, ANNConfiguration, TrainingExample,
AnnResult, KalmanResult)
import neuralnetwork
from .kalmanfilter import KalmanFilter
from timing_decorator import time_usage
def index(request):
return render(request, 'filter/home.html')
class KalmanView(TemplateView):
template_name = "filter/kalman.html"
@time_usage
def get(self, request, *args, **kwargs):
base_value = request.GET.get('base_value', 22)
iterations = request.GET.get('iterations', 100)
predict = request.GET.get('predict', 20)
error_estimate = request.GET.get('estimate', 4)
test = request.GET.get('test', False)
dataformat = request.GET.get('format', 'html')
base_value = checkIfInt(base_value)
if not base_value:
return JsonResponse({'error': 'Incorrect base value. Send as integer'})
iterations = checkIfInt(iterations)
if not iterations:
return JsonResponse({'error': 'Incorrect iterations value. Send as integer'})
predict = checkIfInt(predict)
if not predict:
return JsonResponse({'error': 'Incorrect predict value. Send as integer'})
examples = TrainingExample.objects.all()
examples_list = examples.values_list('datainput', flat=True)
correct_list = examples.values_list('dataoutput', flat=True)
data = examples.values_list('datainput', flat=True)
start = time.time() # When the process started. Even scaling
#iteration_data = np.random.choice(data, iterations)
iteration_data = data
k = KalmanFilter(base_value, iterations, predict, error_estimate, iteration_data)
noise, estimate, truth, plt = k.filter()
result = estimate[len(estimate)-1]
end = time.time()
seconds = end - start
if test:
KalmanResult.objects.create(prediction=result,
iterations=iterations,
initial_guess=predict,
seconds=seconds,
truevalue=base_value)
results = KalmanResult.objects.all()
args = {}
args['base_value'] = base_value
args['predictions'] = list(estimate)
args['noisy_data'] = list(iteration_data)
if dataformat == 'html':
return render(request, self.template_name, args)
else:
return JsonResponse(args)
class ANNView(TemplateView):
template_name = "filter/ann.html"
minValue = 0
maxValue = 100
@time_usage
def get(self, request, *args, **kwargs):
hidden_layer = request.GET.get('layers', 3)
base_value = request.GET.get('base_value', 20) # Should not be passed. Should be stored in DB
learning_rate = request.GET.get('lrate', 2)
function = request.GET.get('function', 'tanh')
epochs = request.GET.get('epochs', 50000)
#predict = request.GET.get('predict', 19)
test = request.GET.get('test', False)
dataformat = request.GET.get('format', 'html')
epochs = checkIfInt(epochs)
if not epochs:
return JsonResponse({'error': 'Incorrect epoch value. Send as integer'})
hidden_layer = checkIfInt(hidden_layer)
if not hidden_layer:
return JsonResponse({'error': 'Incorrect layer value. Send as integer'})
base_value = checkIfInt(base_value)
if not base_value:
return JsonResponse({'error': 'Incorrect base value. Send as integer'})
examples = TrainingExample.objects.all()
examples_list = examples.values_list('datainput', flat=True)
correct_list = examples.values_list('dataoutput', flat=True)
layers = [1, hidden_layer, 1]
y = correct_list[:99] # [base_value] * len(examples)
n = neuralnetwork.NeuralNetwork(layers, function)
predict = examples_list[99]
scaled_x = []
scaled_y = []
scaled_predict = []
index = 0
start = time.time() # When the process started. Even scaling
for item in examples_list[:99]:
scaled_x.append([self.scale_to_binary(item)])
scaled_y.append(self.scale_to_binary(y[index]))
index += 1
predict = self.scale_to_binary(predict)
scaled_y = np.array(scaled_y)
scaled_x = np.array(scaled_x)
n.fit(scaled_x, scaled_y, learning_rate, epochs)
prediction = n.predict([predict])
result = self.scale_from_binary(prediction[0])
end = time.time()
seconds = end-start
if test:
AnnResult.objects.create(prediction=result,
epochs=epochs,
hidden_layer_size=hidden_layer,
seconds=seconds,
truevalue=correct_list[99],
function=function,)
results = AnnResult.objects.all()
def_epoch = 50000
def_layers = 3
epoch_tests = []
layer_tests = []
epoch_predictions = []
layer_predictions = []
for item in results:
if item.epochs == def_epoch: # I was testing layers
layer_tests.append(item.hidden_layer_size)
layer_predictions.append(item.prediction)
if item.hidden_layer_size == def_layers: # I was testing epochs
epoch_tests.append(item.epochs)
epoch_predictions.append(item.prediction)
args = {}
args['epoch_tests'] = epoch_tests
args['epoch_predictions'] = epoch_predictions
args['layer_tests'] = layer_tests
args['layer_predictions'] = layer_predictions
args['correct_answer'] = correct_list[99]
if dataformat == 'html':
return render(request, self.template_name, args)
else:
return JsonResponse(args)
def scale_to_binary(self, value):
return neuralnetwork._scale_to_binary(value, self.minValue, self.maxValue)
def scale_from_binary(self, value):
return neuralnetwork.rescale_from_binary(value, self.minValue, self.maxValue)
class DashboardView(TemplateView):
template_name = "dashboard.html"
class TrainingExamples(TemplateView):
def __init__(self, base_value, error_range, values):
self.base_value = base_value
self.error_range = error_range
self.lower_bound = base_value - error_range
self.upper_bound = base_value + error_range
self.values = values # How many records to generate
def generate(self):
np.random.seed(0)
for i in xrange(self.values):
num = np.random.randint(self.lower_bound, self.upper_bound)
TrainingExample.objects.create(datainput=num)
def clear(self):
TrainingExample.objects.all().delete()
def runTest():
nn = neuralnetwork.NeuralNetwork([2, 4, 1], 'tanh')
minValue = 0
maxValue = 100
#x_array = [[20, 24], [22, 23], [21, 23], [19, 22],[24, 27],[25, 29],[23, 25],[22, 24],[27, 29]]
x_array = [[20, 24], [22, 23], [21, 23], [19, 22],[24, 27],[25, 29],[23, 25],[22, 24],[27, 29]]
y_array = [22, 23, 22, 20, 25, 26, 27, 24, 23, 28]
predict_array = [[19, 20], [26, 28], [24, 26],[21,24]]
print "Network Input:"
print x_array
print "Network Output:"
print y_array
scaled_x_array = []
scaled_y_array = []
scaled_predict_array = []
for item in x_array:
temp = []
for i in item:
temp.append(neuralnetwork._scale_to_binary(i, minValue, maxValue))
scaled_x_array.append(temp)
for item in y_array:
scaled_y_array.append(neuralnetwork._scale_to_binary(item, minValue, maxValue))
for item in predict_array:
temp = []
for i in item:
temp.append(neuralnetwork._scale_to_binary(i, minValue, maxValue))
scaled_predict_array.append(temp)
#print scaled_y_array
#print scaled_x_array
#print scaled_predict_array
X = np.array(scaled_x_array)#Training data
y = np.array(scaled_y_array)#Testing data
nn.fit(X, y)
count = 0
for i in scaled_predict_array:
result = nn.predict(i)
#print result
result= neuralnetwork.rescale_from_binary(result, minValue, maxValue)
print "\nInput values:"
print predict_array[count]
print "Prediction:"
print result[0]
count += 1
'''
So what is needed? I need to show some live simulations
1. Show the training of ANN
2. Show the prediction using a Kalman Filter
3. Show the predictions over time on each
4. Show power and memory usage of each over time
5. Show the accuracy of each
6. How does kalman behave when looking for trends? Non-linear data
7. Show the effects of various parameters on each filter.
So much to do. So help me God.
'''
def checkIfInt(text):
try:
text = int(text)
return text
except:
return None
|
itsMagondu/IoTNeuralNetworks | noisefilter/apps/filter/admin.py | from django.contrib import admin
from .models import (Data, KalmanConfiguration, ANNConfiguration, TrainingExample,
AnnResult, KalmanResult)
# Register your models here.
class DataAdmin(admin.ModelAdmin):
fields = ['reading', 'prevreading', 'output', 'error', 'truevalue', 'kalmanvalue']
list_display = ['id', 'reading', 'prevreading', 'output', 'error', 'truevalue', 'kalmanvalue']
search_field = ['reading']
class KalmanAdmin(admin.ModelAdmin):
fields = ['base_value', 'iterations', 'initial_guess', 'posteri_estimate', 'active']
list_display = ['id', 'base_value', 'iterations', 'initial_guess', 'posteri_estimate', 'added', 'active']
search_field = ['base_value']
class ANNAdmin(admin.ModelAdmin):
fields = ['layers', 'activation', 'learning_rate', 'epochs']
list_display = ['id', 'layers', 'activation', 'learning_rate', 'epochs']
search_field = ['layers']
class TrainingAdmin(admin.ModelAdmin):
fields = ['datainput', 'dataoutput']
list_display = ['id', 'datainput', 'dataoutput']
search_field = ['datainput']
class AnnResultAdmin(admin.ModelAdmin):
fields = ['prediction', 'epochs', 'seconds', 'hidden_layer_size', 'truevalue', 'function']
list_display = ['id', 'prediction', 'epochs', 'seconds', 'added', 'hidden_layer_size', 'truevalue', 'function']
search_field = ['prediction', 'function']
class KalmanResultAdmin(admin.ModelAdmin):
fields = ['prediction', 'iterations', 'seconds', 'initial_guess', 'truevalue']
list_display = ['id', 'prediction', 'iterations', 'seconds', 'added', 'initial_guess', 'truevalue']
search_field = ['prediction']
admin.site.register(Data, DataAdmin)
admin.site.register(ANNConfiguration, ANNAdmin)
admin.site.register(KalmanConfiguration, KalmanAdmin)
admin.site.register(TrainingExample, TrainingAdmin)
admin.site.register(AnnResult, AnnResultAdmin)
admin.site.register(KalmanResult, KalmanResultAdmin)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.